code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# htermination isDenormalized :: Float -> Bool #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_isDenormalized_1.hs
|
mit
| 53 | 0 | 2 | 8 | 3 | 2 | 1 | 1 | 0 |
module Test.Entities.FooChild
( FooChild (..),
isChildOf,
table,
fooChildIdField,
fooChildFooIdField,
generate,
generateList,
withTables,
)
where
import Control.Monad.IO.Class (MonadIO (liftIO))
import Data.Function (on)
import Data.Int (Int32)
import qualified Data.List as List
import Data.Pool (Pool, withResource)
import qualified Hedgehog as HH
import qualified Hedgehog.Gen as Gen
import qualified Orville.PostgreSQL as Orville
import Orville.PostgreSQL.Connection (Connection)
import qualified Test.Entities.Foo as Foo
import qualified Test.PgGen as PgGen
import qualified Test.TestTable as TestTable
type FooChildId = Int32
data FooChild = FooChild
{ fooChildId :: FooChildId
, fooChildFooId :: Foo.FooId
}
deriving (Eq, Show)
isChildOf :: Foo.Foo -> FooChild -> Bool
isChildOf foo child =
Foo.fooId foo == fooChildFooId child
table :: Orville.TableDefinition (Orville.HasKey FooChildId) FooChild FooChild
table =
Orville.mkTableDefinition "foo_child" (Orville.primaryKey fooChildIdField) fooChildMarshaller
fooChildMarshaller :: Orville.SqlMarshaller FooChild FooChild
fooChildMarshaller =
FooChild
<$> Orville.marshallField fooChildId fooChildIdField
<*> Orville.marshallField fooChildFooId fooChildFooIdField
fooChildIdField :: Orville.FieldDefinition Orville.NotNull FooChildId
fooChildIdField =
Orville.integerField "id"
fooChildFooIdField :: Orville.FieldDefinition Orville.NotNull Foo.FooId
fooChildFooIdField =
Orville.integerField "foo_id"
generate :: [Foo.Foo] -> HH.Gen FooChild
generate foos =
FooChild
<$> PgGen.pgInt32
<*> Gen.element (Foo.fooId <$> foos)
generateList :: HH.Range Int -> [Foo.Foo] -> HH.Gen [FooChild]
generateList range foos =
fmap
(List.nubBy ((==) `on` fooChildId))
(Gen.list range $ generate foos)
withTables :: MonadIO m => Pool Connection -> Orville.Orville a -> m a
withTables pool operation =
liftIO $ do
withResource pool $ \connection -> do
TestTable.dropAndRecreateTableDef connection Foo.table
TestTable.dropAndRecreateTableDef connection table
Orville.runOrville pool operation
|
flipstone/orville
|
orville-postgresql-libpq/test/Test/Entities/FooChild.hs
|
mit
| 2,143 | 0 | 13 | 338 | 586 | 323 | 263 | 60 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Database.Persist.Sql.Orphan.PersistQuery
( deleteWhereCount
, updateWhereCount
, decorateSQLWithLimitOffset
) where
import Database.Persist hiding (updateField)
import Database.Persist.Sql.Util (
entityColumnNames, parseEntityValues, isIdField)
import Database.Persist.Sql.Types
import Database.Persist.Sql.Raw
import Database.Persist.Sql.Orphan.PersistStore (withRawQuery)
import Database.Persist.Sql.Util (dbIdColumns)
import qualified Data.Text as T
import Data.Text (Text)
import Data.Monoid (Monoid (..), (<>))
import Data.Int (Int64)
import Control.Monad.IO.Class
import Control.Monad.Trans.Reader (ReaderT, ask, withReaderT)
import Control.Exception (throwIO)
import qualified Data.Conduit.List as CL
import Data.Conduit
import Data.ByteString.Char8 (readInteger)
import Data.Maybe (isJust)
import Data.List (transpose, inits, find)
-- orphaned instance for convenience of modularity
instance PersistQueryRead SqlBackend where
count filts = do
conn <- ask
let wher = if null filts
then ""
else filterClause False conn filts
let sql = mconcat
[ "SELECT COUNT(*) FROM "
, connEscapeName conn $ entityDB t
, wher
]
withRawQuery sql (getFiltsValues conn filts) $ do
mm <- CL.head
case mm of
Just [PersistInt64 i] -> return $ fromIntegral i
Just [PersistDouble i] ->return $ fromIntegral (truncate i :: Int64) -- gb oracle
Just [PersistByteString i] -> case readInteger i of -- gb mssql
Just (ret,"") -> return $ fromIntegral ret
xs -> error $ "invalid number i["++show i++"] xs[" ++ show xs ++ "]"
Just xs -> error $ "count:invalid sql return xs["++show xs++"] sql["++show sql++"]"
Nothing -> error $ "count:invalid sql returned nothing sql["++show sql++"]"
where
t = entityDef $ dummyFromFilts filts
selectSourceRes filts opts = do
conn <- ask
srcRes <- rawQueryRes (sql conn) (getFiltsValues conn filts)
return $ fmap ($= CL.mapM parse) srcRes
where
(limit, offset, orders) = limitOffsetOrder opts
parse vals = case parseEntityValues t vals of
Left s -> liftIO $ throwIO $ PersistMarshalError s
Right row -> return row
t = entityDef $ dummyFromFilts filts
wher conn = if null filts
then ""
else filterClause False conn filts
ord conn =
case map (orderClause False conn) orders of
[] -> ""
ords -> " ORDER BY " <> T.intercalate "," ords
cols = T.intercalate ", " . entityColumnNames t
sql conn = connLimitOffset conn (limit,offset) (not (null orders)) $ mconcat
[ "SELECT "
, cols conn
, " FROM "
, connEscapeName conn $ entityDB t
, wher conn
, ord conn
]
selectKeysRes filts opts = do
conn <- ask
srcRes <- rawQueryRes (sql conn) (getFiltsValues conn filts)
return $ fmap ($= CL.mapM parse) srcRes
where
t = entityDef $ dummyFromFilts filts
cols conn = T.intercalate "," $ dbIdColumns conn t
wher conn = if null filts
then ""
else filterClause False conn filts
sql conn = connLimitOffset conn (limit,offset) (not (null orders)) $ mconcat
[ "SELECT "
, cols conn
, " FROM "
, connEscapeName conn $ entityDB t
, wher conn
, ord conn
]
(limit, offset, orders) = limitOffsetOrder opts
ord conn =
case map (orderClause False conn) orders of
[] -> ""
ords -> " ORDER BY " <> T.intercalate "," ords
parse xs = do
keyvals <- case entityPrimary t of
Nothing ->
case xs of
[PersistInt64 x] -> return [PersistInt64 x]
[PersistDouble x] -> return [PersistInt64 (truncate x)] -- oracle returns Double
_ -> return xs
Just pdef ->
let pks = map fieldHaskell $ compositeFields pdef
keyvals = map snd $ filter (\(a, _) -> let ret=isJust (find (== a) pks) in ret) $ zip (map fieldHaskell $ entityFields t) xs
in return keyvals
case keyFromValues keyvals of
Right k -> return k
Left err -> error $ "selectKeysImpl: keyFromValues failed" <> show err
instance PersistQueryRead SqlReadBackend where
count filts = withReaderT persistBackend $ count filts
selectSourceRes filts opts = withReaderT persistBackend $ selectSourceRes filts opts
selectKeysRes filts opts = withReaderT persistBackend $ selectKeysRes filts opts
instance PersistQueryRead SqlWriteBackend where
count filts = withReaderT persistBackend $ count filts
selectSourceRes filts opts = withReaderT persistBackend $ selectSourceRes filts opts
selectKeysRes filts opts = withReaderT persistBackend $ selectKeysRes filts opts
instance PersistQueryWrite SqlBackend where
deleteWhere filts = do
_ <- deleteWhereCount filts
return ()
updateWhere filts upds = do
_ <- updateWhereCount filts upds
return ()
instance PersistQueryWrite SqlWriteBackend where
deleteWhere filts = withReaderT persistBackend $ deleteWhere filts
updateWhere filts upds = withReaderT persistBackend $ updateWhere filts upds
-- | Same as 'deleteWhere', but returns the number of rows affected.
--
-- @since 1.1.5
deleteWhereCount :: (PersistEntity val, MonadIO m, PersistEntityBackend val ~ SqlBackend, IsSqlBackend backend)
=> [Filter val]
-> ReaderT backend m Int64
deleteWhereCount filts = withReaderT persistBackend $ do
conn <- ask
let t = entityDef $ dummyFromFilts filts
let wher = if null filts
then ""
else filterClause False conn filts
sql = mconcat
[ "DELETE FROM "
, connEscapeName conn $ entityDB t
, wher
]
rawExecuteCount sql $ getFiltsValues conn filts
-- | Same as 'updateWhere', but returns the number of rows affected.
--
-- @since 1.1.5
updateWhereCount :: (PersistEntity val, MonadIO m, SqlBackend ~ PersistEntityBackend val, IsSqlBackend backend)
=> [Filter val]
-> [Update val]
-> ReaderT backend m Int64
updateWhereCount _ [] = return 0
updateWhereCount filts upds = withReaderT persistBackend $ do
conn <- ask
let wher = if null filts
then ""
else filterClause False conn filts
let sql = mconcat
[ "UPDATE "
, connEscapeName conn $ entityDB t
, " SET "
, T.intercalate "," $ map (go' conn . go) upds
, wher
]
let dat = map updatePersistValue upds `Data.Monoid.mappend`
getFiltsValues conn filts
rawExecuteCount sql dat
where
t = entityDef $ dummyFromFilts filts
go'' n Assign = n <> "=?"
go'' n Add = mconcat [n, "=", n, "+?"]
go'' n Subtract = mconcat [n, "=", n, "-?"]
go'' n Multiply = mconcat [n, "=", n, "*?"]
go'' n Divide = mconcat [n, "=", n, "/?"]
go'' _ (BackendSpecificUpdate up) = error $ T.unpack $ "BackendSpecificUpdate" `mappend` up `mappend` "not supported"
go' conn (x, pu) = go'' (connEscapeName conn x) pu
go x = (updateField x, updateUpdate x)
updateField (Update f _ _) = fieldName f
updateField _ = error "BackendUpdate not implemented"
fieldName :: forall record typ. (PersistEntity record, PersistEntityBackend record ~ SqlBackend) => EntityField record typ -> DBName
fieldName f = fieldDB $ persistFieldDef f
dummyFromFilts :: [Filter v] -> Maybe v
dummyFromFilts _ = Nothing
getFiltsValues :: forall val. (PersistEntity val, PersistEntityBackend val ~ SqlBackend)
=> SqlBackend -> [Filter val] -> [PersistValue]
getFiltsValues conn = snd . filterClauseHelper False False conn OrNullNo
data OrNull = OrNullYes | OrNullNo
filterClauseHelper :: (PersistEntity val, PersistEntityBackend val ~ SqlBackend)
=> Bool -- ^ include table name?
-> Bool -- ^ include WHERE?
-> SqlBackend
-> OrNull
-> [Filter val]
-> (Text, [PersistValue])
filterClauseHelper includeTable includeWhere conn orNull filters =
(if not (T.null sql) && includeWhere
then " WHERE " <> sql
else sql, vals)
where
(sql, vals) = combineAND filters
combineAND = combine " AND "
combine s fs =
(T.intercalate s $ map wrapP a, mconcat b)
where
(a, b) = unzip $ map go fs
wrapP x = T.concat ["(", x, ")"]
go (BackendFilter _) = error "BackendFilter not expected"
go (FilterAnd []) = ("1=1", [])
go (FilterAnd fs) = combineAND fs
go (FilterOr []) = ("1=0", [])
go (FilterOr fs) = combine " OR " fs
go (Filter field value pfilter) =
let t = entityDef $ dummyFromFilts [Filter field value pfilter]
in case (isIdField field, entityPrimary t, allVals) of
(True, Just pdef, PersistList ys:_) ->
if length (compositeFields pdef) /= length ys
then error $ "wrong number of entries in compositeFields vs PersistList allVals=" ++ show allVals
else
case (allVals, pfilter, isCompFilter pfilter) of
([PersistList xs], Eq, _) ->
let sqlcl=T.intercalate " and " (map (\a -> connEscapeName conn (fieldDB a) <> showSqlFilter pfilter <> "? ") (compositeFields pdef))
in (wrapSql sqlcl,xs)
([PersistList xs], Ne, _) ->
let sqlcl=T.intercalate " or " (map (\a -> connEscapeName conn (fieldDB a) <> showSqlFilter pfilter <> "? ") (compositeFields pdef))
in (wrapSql sqlcl,xs)
(_, In, _) ->
let xxs = transpose (map fromPersistList allVals)
sqls=map (\(a,xs) -> connEscapeName conn (fieldDB a) <> showSqlFilter pfilter <> "(" <> T.intercalate "," (replicate (length xs) " ?") <> ") ") (zip (compositeFields pdef) xxs)
in (wrapSql (T.intercalate " and " (map wrapSql sqls)), concat xxs)
(_, NotIn, _) ->
let xxs = transpose (map fromPersistList allVals)
sqls=map (\(a,xs) -> connEscapeName conn (fieldDB a) <> showSqlFilter pfilter <> "(" <> T.intercalate "," (replicate (length xs) " ?") <> ") ") (zip (compositeFields pdef) xxs)
in (wrapSql (T.intercalate " or " (map wrapSql sqls)), concat xxs)
([PersistList xs], _, True) ->
let zs = tail (inits (compositeFields pdef))
sql1 = map (\b -> wrapSql (T.intercalate " and " (map (\(i,a) -> sql2 (i==length b) a) (zip [1..] b)))) zs
sql2 islast a = connEscapeName conn (fieldDB a) <> (if islast then showSqlFilter pfilter else showSqlFilter Eq) <> "? "
sqlcl = T.intercalate " or " sql1
in (wrapSql sqlcl, concat (tail (inits xs)))
(_, BackendSpecificFilter _, _) -> error "unhandled type BackendSpecificFilter for composite/non id primary keys"
_ -> error $ "unhandled type/filter for composite/non id primary keys pfilter=" ++ show pfilter ++ " persistList="++show allVals
(True, Just pdef, []) ->
error $ "empty list given as filter value filter=" ++ show pfilter ++ " persistList=" ++ show allVals ++ " pdef=" ++ show pdef
(True, Just pdef, _) ->
error $ "unhandled error for composite/non id primary keys filter=" ++ show pfilter ++ " persistList=" ++ show allVals ++ " pdef=" ++ show pdef
_ -> case (isNull, pfilter, length notNullVals) of
(True, Eq, _) -> (name <> " IS NULL", [])
(True, Ne, _) -> (name <> " IS NOT NULL", [])
(False, Ne, _) -> (T.concat
[ "("
, name
, " IS NULL OR "
, name
, " <> "
, qmarks
, ")"
], notNullVals)
-- We use 1=2 (and below 1=1) to avoid using TRUE and FALSE, since
-- not all databases support those words directly.
(_, In, 0) -> ("1=2" <> orNullSuffix, [])
(False, In, _) -> (name <> " IN " <> qmarks <> orNullSuffix, allVals)
(True, In, _) -> (T.concat
[ "("
, name
, " IS NULL OR "
, name
, " IN "
, qmarks
, ")"
], notNullVals)
(False, NotIn, 0) -> ("1=1", [])
(True, NotIn, 0) -> (name <> " IS NOT NULL", [])
(False, NotIn, _) -> (T.concat
[ "("
, name
, " IS NULL OR "
, name
, " NOT IN "
, qmarks
, ")"
], notNullVals)
(True, NotIn, _) -> (T.concat
[ "("
, name
, " IS NOT NULL AND "
, name
, " NOT IN "
, qmarks
, ")"
], notNullVals)
_ -> (name <> showSqlFilter pfilter <> "?" <> orNullSuffix, allVals)
where
isCompFilter Lt = True
isCompFilter Le = True
isCompFilter Gt = True
isCompFilter Ge = True
isCompFilter _ = False
wrapSql sqlcl = "(" <> sqlcl <> ")"
fromPersistList (PersistList xs) = xs
fromPersistList other = error $ "expected PersistList but found " ++ show other
filterValueToPersistValues :: forall a. PersistField a => Either a [a] -> [PersistValue]
filterValueToPersistValues v = map toPersistValue $ either return id v
orNullSuffix =
case orNull of
OrNullYes -> mconcat [" OR ", name, " IS NULL"]
OrNullNo -> ""
isNull = any (== PersistNull) allVals
notNullVals = filter (/= PersistNull) allVals
allVals = filterValueToPersistValues value
tn = connEscapeName conn $ entityDB
$ entityDef $ dummyFromFilts [Filter field value pfilter]
name =
(if includeTable
then ((tn <> ".") <>)
else id)
$ connEscapeName conn $ fieldName field
qmarks = case value of
Left _ -> "?"
Right x ->
let x' = filter (/= PersistNull) $ map toPersistValue x
in "(" <> T.intercalate "," (map (const "?") x') <> ")"
showSqlFilter Eq = "="
showSqlFilter Ne = "<>"
showSqlFilter Gt = ">"
showSqlFilter Lt = "<"
showSqlFilter Ge = ">="
showSqlFilter Le = "<="
showSqlFilter In = " IN "
showSqlFilter NotIn = " NOT IN "
showSqlFilter (BackendSpecificFilter s) = s
updatePersistValue :: Update v -> PersistValue
updatePersistValue (Update _ v _) = toPersistValue v
updatePersistValue _ = error "BackendUpdate not implemented"
filterClause :: (PersistEntity val, PersistEntityBackend val ~ SqlBackend)
=> Bool -- ^ include table name?
-> SqlBackend
-> [Filter val]
-> Text
filterClause b c = fst . filterClauseHelper b True c OrNullNo
orderClause :: (PersistEntity val, PersistEntityBackend val ~ SqlBackend)
=> Bool -- ^ include the table name
-> SqlBackend
-> SelectOpt val
-> Text
orderClause includeTable conn o =
case o of
Asc x -> name x
Desc x -> name x <> " DESC"
_ -> error "orderClause: expected Asc or Desc, not limit or offset"
where
dummyFromOrder :: SelectOpt a -> Maybe a
dummyFromOrder _ = Nothing
tn = connEscapeName conn $ entityDB $ entityDef $ dummyFromOrder o
name :: (PersistEntityBackend record ~ SqlBackend, PersistEntity record)
=> EntityField record typ -> Text
name x =
(if includeTable
then ((tn <> ".") <>)
else id)
$ connEscapeName conn $ fieldName x
-- | Generates sql for limit and offset for postgres, sqlite and mysql.
decorateSQLWithLimitOffset::Text -> (Int,Int) -> Bool -> Text -> Text
decorateSQLWithLimitOffset nolimit (limit,offset) _ sql =
let
lim = case (limit, offset) of
(0, 0) -> ""
(0, _) -> T.cons ' ' nolimit
(_, _) -> " LIMIT " <> T.pack (show limit)
off = if offset == 0
then ""
else " OFFSET " <> T.pack (show offset)
in mconcat
[ sql
, lim
, off
]
|
psibi/persistent
|
persistent/Database/Persist/Sql/Orphan/PersistQuery.hs
|
mit
| 18,502 | 0 | 33 | 7,073 | 5,092 | 2,619 | 2,473 | 358 | 44 |
{-# LANGUAGE CPP #-}
module Data.String.Interpolate.Compat (
readMaybe
, module Language.Haskell.TH
#if !MIN_VERSION_template_haskell(2,8,0)
, reportError
#endif
) where
import Language.Haskell.TH
import Text.Read
#if !MIN_VERSION_base(4,6,0)
import qualified Text.ParserCombinators.ReadP as P
#endif
#if !MIN_VERSION_base(4,6,0)
-- | Parse a string using the 'Read' instance.
-- Succeeds if there is exactly one valid result.
-- A 'Left' value indicates a parse error.
readEither :: Read a => String -> Either String a
readEither s =
case [ x | (x,"") <- readPrec_to_S read' minPrec s ] of
[x] -> Right x
[] -> Left "Prelude.read: no parse"
_ -> Left "Prelude.read: ambiguous parse"
where
read' =
do x <- readPrec
lift P.skipSpaces
return x
-- | Parse a string using the 'Read' instance.
-- Succeeds if there is exactly one valid result.
readMaybe :: Read a => String -> Maybe a
readMaybe s = case readEither s of
Left _ -> Nothing
Right a -> Just a
#endif
#if !MIN_VERSION_template_haskell(2,8,0)
reportError :: String -> Q ()
reportError = report True
#endif
|
beni55/interpolate
|
src/Data/String/Interpolate/Compat.hs
|
mit
| 1,160 | 0 | 10 | 265 | 251 | 136 | 115 | 24 | 3 |
module Rebase.Control.Concurrent.STM.TArray
(
module Control.Concurrent.STM.TArray
)
where
import Control.Concurrent.STM.TArray
|
nikita-volkov/rebase
|
library/Rebase/Control/Concurrent/STM/TArray.hs
|
mit
| 131 | 0 | 5 | 12 | 26 | 19 | 7 | 4 | 0 |
module Initialization where
import Control.Applicative ((<$>))
import Control.Monad (replicateM)
import Control.Monad.Random (getRandomR, Rand, StdGen)
import Types
import Vector
radius, angularVelocity :: Scalar
radius = 20
angularVelocity = 0.4
initialize :: Options -> Rand StdGen Model
initialize options = do
-- Build plane of rotation
n <- case (rotationAxis options) of
XAxis -> return $ Vector3 1 0 0
YAxis -> return $ Vector3 0 1 0
ZAxis -> return $ Vector3 0 0 1
RandomAxis -> do
let sub1 n = n - 1
nx <- (sub1 . (*2)) <$> getRandomR (0, 1 :: Scalar)
ny <- (sub1 . (*2)) <$> getRandomR (0, 1 :: Scalar)
nz <- (sub1 . (*2)) <$> getRandomR (0, 1 :: Scalar)
let nv = Vector3 nx ny nz
let norm = 1 / sqrt (nv `dot` nv)
return $ (*norm) <$> nv
-- Generate stars
replicateM (starCount options) $ do
let t = 1.9
m <- (\x -> (t*x-t/2)^3+1) <$> getRandomR (0, 1 :: Scalar)
rv <- genPosition
let p = rv
let v = (angularVelocity*) <$> (n `cross` rv)
return $ Star p v vzero m
-- Generate random position inside a radius
genPosition :: Rand StdGen Vec3
genPosition = do
rx <- getRandomR (-1, 1 :: Scalar)
ry <- getRandomR (-1, 1 :: Scalar)
rz <- getRandomR (-1, 1 :: Scalar)
let rv = (\x -> (2 * x - 1) * radius) <$> (Vector3 rx ry rz)
let r = sqrt (rv `dot` rv) -- Distance from center
if r > radius then genPosition else return rv
|
tcsavage/gravitysim
|
src/Initialization.hs
|
mit
| 1,540 | 0 | 19 | 465 | 640 | 339 | 301 | 38 | 4 |
{-#LANGUAGE DeriveDataTypeable #-}
{-#LANGUAGE BangPatterns#-}
module Control.Concurrent.HEP.Supervisor
( SupervisorMessage(..)
, SupervisorCommand
-- , spawnSupervisor
, procContinue
, procFinish
, procRestart
, procReshutdown
) where
import Control.Concurrent.HEP.Types
import Control.Concurrent.HEP.Mailbox
import Control.Exception
import Data.Typeable
import Control.Concurrent.HEP.Proc
import Control.Monad.Trans
{-spawnSupervisor:: (HEP HEPProcState-> HEP Pid) -> (Pid-> HEP HEPProcState ) -> HEP HEPProcState -> HEP Pid
spawnSupervisor forker sv worker = do
!pid <- forker worker
spawn (sv pid)
return pid
-}
procContinue:: MBox SupervisorCommand-> HEPState-> HEP ()
procContinue mbox mstate = do
liftIO $! sendMBox mbox $! ProcContinue mstate
procFinish:: MBox SupervisorCommand-> HEP ()
procFinish mbox = do
liftIO $! sendMBox mbox $! ProcFinish
procRestart:: MBox SupervisorCommand-> HEPState -> HEP ()
procRestart mbox mstate = do
liftIO $! sendMBox mbox $! ProcRestart mstate
procReshutdown:: MBox SupervisorCommand-> HEPState -> HEP ()
procReshutdown mbox mstate = do
liftIO $! sendMBox mbox $! ProcReshutdown mstate
|
dambaev/hep
|
src/Control/Concurrent/HEP/Supervisor.hs
|
mit
| 1,202 | 0 | 9 | 211 | 256 | 135 | 121 | 27 | 1 |
{-# LANGUAGE ScopedTypeVariables, MagicHash, ExistentialQuantification #-}
-- Copyright (c) Jean-Philippe Bernardy 2005-2007.
module Yi.Dynamic
(
Initializable(..),
toDyn, fromDynamic, dynamicValueA, emptyDV,
Typeable, Dynamic, DynamicValues
)
where
import Prelude ()
import Yi.Prelude
import GHC.Exts
import Data.Accessor
import Data.Maybe
import Data.Typeable
import Data.Map as M
-- ---------------------------------------------------------------------
-- | Class of values that can go in the extensible state component
--
-- | The default value. If a function tries to get a copy of the state, but the state
-- hasn't yet been created, 'initial' will be called to supply *some* value. The value
-- of initial will probably be something like Nothing, \[\], \"\", or 'Data.Sequence.empty' - compare
-- the 'mempty' of "Data.Monoid".
class (Typeable a) => Initializable a where
initial :: a
-- Unfortunately, this is not serializable: there is no way to recover a type from a TypeRep.
data Dynamic = forall a. Initializable a => Dynamic a
-- | An extensible record, indexed by type
type DynamicValues = M.Map String Dynamic
toDyn :: Initializable a => a -> Dynamic
toDyn = Dynamic
fromDynamic :: forall a. Typeable a => Dynamic -> Maybe a
fromDynamic (Dynamic b) = if typeOf (undefined :: a) == typeOf b then Just (unsafeCoerce# b) else Nothing
instance (Typeable a) => Initializable (Maybe a) where
initial = Nothing
-- | Accessor for a dynamic component
dynamicValueA :: Initializable a => Accessor DynamicValues a
dynamicValueA = accessor getDynamicValue setDynamicValue
where
setDynamicValue :: forall a. Initializable a => a -> DynamicValues -> DynamicValues
setDynamicValue v = M.insert (show $ typeOf (undefined::a)) (toDyn v)
getDynamicValue :: forall a. Initializable a => DynamicValues -> a
getDynamicValue dv = case M.lookup (show $ typeOf (undefined::a)) dv of
Nothing -> initial
Just x -> fromJust $ fromDynamic x
-- | The empty record
emptyDV :: DynamicValues
emptyDV = M.empty
|
codemac/yi-editor
|
src/Yi/Dynamic.hs
|
gpl-2.0
| 2,119 | 0 | 12 | 425 | 445 | 245 | 200 | 33 | 2 |
module Main where
import System.Exit
import QuickBench (defaultMain)
main :: IO ()
main = defaultMain >>= maybe exitSuccess die
|
simonmichael/quickbench
|
app/Main.hs
|
gpl-3.0
| 130 | 0 | 6 | 21 | 41 | 23 | 18 | 5 | 1 |
{- RJSONConvert
By Gregory W. Schwartz
Collects functions pertaining to converting the JSON output of R to the workable
tree. To create the input for this program from R:
@
library(data.tree)
library(jsonlite)
hc = hclust(dist(USArrests), "ave")
tree = as.Node(as.dendrogram(hc))
toJSON(as.list(tree, mode = "explicit", unname = TRUE))
@
-}
{-# LANGUAGE OverloadedStrings #-}
module RJSONConvert
( rJsonToTree
, decodeRJsonTree
, getRJsonTree
) where
-- Standard
import Data.Maybe
import qualified Data.Sequence as Seq
import Data.Tree
import Debug.Trace
-- Cabal
import qualified Data.Vector as V
import qualified Data.ByteString.Lazy.Char8 as C
import qualified Data.Text as T
import TextShow (showt)
import Math.TreeFun.Types
import Math.TreeFun.Tree
import Data.Aeson
import Data.Aeson.Types
-- Local
import Types
-- | Convert a R JSON format into the workable tree format
rJsonToTree :: Object -> Tree NodeLabel
rJsonToTree object =
Node { rootLabel = getNodeLabel object
, subForest = fmap rJsonToTree . getChildren $ object
}
-- | Get the NodeLabel of a node
getNodeLabel :: Object -> NodeLabel
getNodeLabel object = do
NodeLabel { nodeID = ""
, nodeLabels = getLabel object
}
-- | Get the label of the node
getLabel :: Object -> Labels
getLabel object = Seq.fromList
. V.toList
. either error id
. flip parseEither object $ \obj -> do
labels <- obj .: "name"
return labels
-- | Get the children of a node
getChildren :: Object -> [Object]
getChildren object = either (const []) id
. flip parseEither object $ \obj -> do
children <- obj .: "children"
return children
-- | Get the generic AST from the file
decodeRJsonTree :: C.ByteString -> Object
decodeRJsonTree contents = fromMaybe
(error "Input is not a JSON object")
(decode contents :: Maybe Object)
-- | Get the lineage tree from a generic AST
getRJsonTree :: Object -> Tree NodeLabel
getRJsonTree object = rJsonToTree object
|
GregorySchwartz/find-clumpiness
|
src/RJSONConvert.hs
|
gpl-3.0
| 2,187 | 0 | 10 | 593 | 396 | 221 | 175 | 44 | 1 |
{-# LANGUAGE BangPatterns #-}
module Main where
import Control.Concurrent.TokenBucket
import Control.Concurrent
import Control.Exception
import Control.Monad
import Data.Time.Clock.POSIX (getPOSIXTime)
import System.Exit
import Data.Word
getPosixTime :: IO Double
getPosixTime = fmap realToFrac getPOSIXTime
toInvRate :: Double -> Word64
toInvRate r = round (1e6 / r)
timeIO :: IO a -> IO (Double, a)
timeIO act = do
ts0 <- getPosixTime
res <- act
ts1 <- getPosixTime
dt <- evaluate (ts1-ts0)
return (dt,res)
timeIO_ :: IO a -> IO Double
timeIO_ = fmap fst . timeIO
main :: IO ()
main = runInUnboundThread $ do
putStrLn "testing tocket-bucket..."
!tb <- newTokenBucket
replicateM_ 3 $ do
check tb 10 10.0
check tb 20 20.0
check tb 50 50.0
check tb 100 100.0
check tb 200 200.0
check tb 500 500.0
check tb 1000 1000.0
putStrLn "============================================="
where
check :: TokenBucket -> Int -> Double -> IO ()
check tb n rate = do
-- threadDelay 100000
putStrLn $ "running "++show n++"+1 iterations with "++show rate++" Hz rate-limit..."
dt <- timeIO_ (replicateM_ (n+1) $ (tokenBucketWait tb 1 (toInvRate rate)))
let rate' = fromIntegral n/dt
unless (rate' <= rate) $ do
putStrLn $ "...FAILED! (effective rate was " ++ show rate' ++ " Hz)"
exitFailure
putStrLn $ "...PASSED (effective rate was " ++ show rate' ++ " Hz)"
|
hvr/token-bucket
|
test-tb.hs
|
gpl-3.0
| 1,526 | 0 | 16 | 400 | 499 | 239 | 260 | 44 | 1 |
{- |
Module: SynthParams
Description: Parameters to control synth
In general, synth parameters specify patterns of sounds, and patterns of effects on those sounds. These are the synthesis parameters you can use with the default Dirt synth:
-}
module Sound.Tidal.MIDI.SynthParams (
sound,
grp,
accelerate,
bandf,
bandq,
begin,
channel,
coarse,
crush,
cut,
cutoff,
cutoffegint,
delay,
delayfeedback,
delaytime,
detune,
dry,
end,
gain,
hcutoff,
hresonance,
loop,
n,
nudge,
offset,
orbit,
pan,
resonance,
room,
shape,
size,
speed,
s,
unit,
velocity,
vowel
) where
import Sound.Tidal.Params
|
kindohm/tidal-midi
|
Sound/Tidal/MIDI/SynthParams.hs
|
gpl-3.0
| 664 | 0 | 4 | 159 | 125 | 84 | 41 | 38 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.RegionURLMaps.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the specified UrlMap resource. Gets a list of available URL maps
-- by making a list() request.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.regionUrlMaps.get@.
module Network.Google.Resource.Compute.RegionURLMaps.Get
(
-- * REST Resource
RegionURLMapsGetResource
-- * Creating a Request
, regionURLMapsGet
, RegionURLMapsGet
-- * Request Lenses
, rumgURLMap
, rumgProject
, rumgRegion
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.regionUrlMaps.get@ method which the
-- 'RegionURLMapsGet' request conforms to.
type RegionURLMapsGetResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"urlMaps" :>
Capture "urlMap" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] URLMap
-- | Returns the specified UrlMap resource. Gets a list of available URL maps
-- by making a list() request.
--
-- /See:/ 'regionURLMapsGet' smart constructor.
data RegionURLMapsGet =
RegionURLMapsGet'
{ _rumgURLMap :: !Text
, _rumgProject :: !Text
, _rumgRegion :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'RegionURLMapsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rumgURLMap'
--
-- * 'rumgProject'
--
-- * 'rumgRegion'
regionURLMapsGet
:: Text -- ^ 'rumgURLMap'
-> Text -- ^ 'rumgProject'
-> Text -- ^ 'rumgRegion'
-> RegionURLMapsGet
regionURLMapsGet pRumgURLMap_ pRumgProject_ pRumgRegion_ =
RegionURLMapsGet'
{ _rumgURLMap = pRumgURLMap_
, _rumgProject = pRumgProject_
, _rumgRegion = pRumgRegion_
}
-- | Name of the UrlMap resource to return.
rumgURLMap :: Lens' RegionURLMapsGet Text
rumgURLMap
= lens _rumgURLMap (\ s a -> s{_rumgURLMap = a})
-- | Project ID for this request.
rumgProject :: Lens' RegionURLMapsGet Text
rumgProject
= lens _rumgProject (\ s a -> s{_rumgProject = a})
-- | Name of the region scoping this request.
rumgRegion :: Lens' RegionURLMapsGet Text
rumgRegion
= lens _rumgRegion (\ s a -> s{_rumgRegion = a})
instance GoogleRequest RegionURLMapsGet where
type Rs RegionURLMapsGet = URLMap
type Scopes RegionURLMapsGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient RegionURLMapsGet'{..}
= go _rumgProject _rumgRegion _rumgURLMap
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy RegionURLMapsGetResource)
mempty
|
brendanhay/gogol
|
gogol-compute/gen/Network/Google/Resource/Compute/RegionURLMaps/Get.hs
|
mpl-2.0
| 3,764 | 0 | 16 | 898 | 468 | 280 | 188 | 77 | 1 |
module Common.HTTPHelper where
import Data.Text (Text)
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
import Network.HTTP.Types.URI (urlEncode)
textUrlEncode :: Bool -> Text -> Text
textUrlEncode flag =
decodeUtf8 . urlEncode flag . encodeUtf8
|
asvyazin/my-books.purs
|
server/my-books/Common/HTTPHelper.hs
|
mpl-2.0
| 254 | 0 | 7 | 35 | 74 | 43 | 31 | 7 | 1 |
module LongestCommonSubsequence where
lcs :: String -> String -> String
lcs "" _ = ""
lcs _ "" = ""
lcs [a] [b] = if a == b then [a] else ""
lcs [a] y = if elem a y then [a] else ""
lcs y [a] = if elem a y then [a] else ""
lcs x y |x1 == y1 = x1 : lcs x2 y2
|otherwise = let xl = lcs x2 y
yl = lcs x y2
in
if length xl > length yl then xl else yl
where (x1:x2) = x
(y1:y2) = y
--
|
ice1000/OI-codes
|
codewars/101-200/longest-common-subsequence.hs
|
agpl-3.0
| 483 | 0 | 10 | 203 | 237 | 124 | 113 | 13 | 5 |
returnTest :: IO ()
returnTest = do
one <- return 1
let two = 2
putStrLn $ show (one + two)
|
EricYT/Haskell
|
src/real_haskell/chapter-7/return2.hs
|
apache-2.0
| 105 | 0 | 10 | 34 | 52 | 24 | 28 | 5 | 1 |
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
-- Common utilities in writing program transformations.
module Language.K3.Transform.Common where
import Control.Monad.Identity
import Control.Monad.State
import Data.Tree
import Language.K3.Core.Annotation
import Language.K3.Core.Common
import Language.K3.Core.Declaration
import Language.K3.Core.Expression
import Language.K3.Core.Utils
import qualified Language.K3.Core.Constructor.Expression as EC
-- Configuration for many transformations at once
data TransformConfig = TransformConfig { optRefs :: Bool
, optMoves :: Bool
}
defaultTransConfig :: TransformConfig
defaultTransConfig = TransformConfig True True
noRefsTransConfig :: TransformConfig
noRefsTransConfig = defaultTransConfig { optRefs = False }
noMovesTransConfig :: TransformConfig
noMovesTransConfig = defaultTransConfig { optMoves = False }
-- | Substitute all occurrences of a variable with an expression in the specified target expression.
substituteImmutBinding :: Identifier -> K3 Expression -> K3 Expression -> K3 Expression
substituteImmutBinding i iExpr expr =
runIdentity $ biFoldMapTree pruneSubs rebuild [(i, iExpr)] EC.unit expr
where
pruneSubs subs (tag -> ELambda j) = return $ pruneBinding subs [j] [True]
pruneSubs subs (tag -> ELetIn j) = return $ pruneBinding subs [j] [False, True]
pruneSubs subs (tag -> EBindAs b) = return $ pruneBinding subs (bindingVariables b) [False, True]
pruneSubs subs (tag -> ECaseOf j) = return $ pruneBinding subs [j] [False, True, False]
pruneSubs subs n = return $ (subs, replicate (length $ children n) subs)
pruneBinding subs ids oldOrNew =
let newSubs = foldl removeAssoc subs ids
in (subs, map (\useNew -> if useNew then newSubs else subs) oldOrNew)
rebuild subs _ n@(tag -> EVariable j) = return $ maybe n id $ lookup j subs
rebuild _ _ n@(tag -> EConstant _) = return $ n
rebuild _ ch n@(tag -> ETuple) = return $ if null $ children n then n else replaceCh n ch
rebuild _ ch (Node t _) = return $ Node t ch
-- Renumber the uuids in a program
renumberUids :: K3 Declaration -> K3 Declaration
renumberUids p = evalState run 1
where
run = do
-- First modify declaration annotations
ds <- modifyTree (\n -> replace isDUID (DUID . UID) n) p
ds' <- mapExpression replaceAll ds
return ds'
replaceAll d = modifyTree (\n -> replace isEUID (EUID . UID) n) d
replace matcher constructor n = do
i <- get
put (i+1)
return $ (stripAnnot matcher n) @+ constructor i
stripAnnot :: Eq (Annotation a) => (Annotation a -> Bool) -> K3 a -> K3 a
stripAnnot f t = maybe t (t @-) $ t @~ f
-- Add missing spans in a program tree
addSpans :: String -> K3 Declaration -> K3 Declaration
addSpans spanName p = runIdentity $ do
ds <- modifyTree (return . add isDSpan (DSpan $ GeneratedSpan spanName)) p
mapExpression addExpr ds
where
addExpr n = modifyTree addSpanQual n
addSpanQual n = return $
add isESpan (ESpan $ GeneratedSpan spanName) n
-- add isEQualified EImmutable n
-- Don't add span if we already have it
add matcher anno n = maybe (n @+ anno) (const n) (n @~ matcher)
-- Clean up code generation with renumbering uids and adding spans
cleanGeneration :: String -> K3 Declaration -> K3 Declaration
cleanGeneration spanName = (addSpans spanName) . renumberUids
|
yliu120/K3
|
src/Language/K3/Transform/Common.hs
|
apache-2.0
| 3,610 | 0 | 15 | 834 | 1,064 | 559 | 505 | 60 | 10 |
{-# LANGUAGE FlexibleContexts,FlexibleInstances,GADTs,DataKinds#-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ScopedTypeVariables #-}
{- | This Module essentially stubs out the subset
of Random Access Stack operations we need for efficient substitution environments
-}
module Resin.Environment.Class (
) where
|
cartazio/resin
|
src/Resin/Environment/Class.hs
|
bsd-2-clause
| 320 | 0 | 3 | 41 | 14 | 11 | 3 | 4 | 0 |
module Data.Minecraft.Snapshot192
( module Data.Minecraft.Snapshot192.Protocol
, module Data.Minecraft.Snapshot192.Version
) where
import Data.Minecraft.Snapshot192.Protocol
import Data.Minecraft.Snapshot192.Version
|
oldmanmike/hs-minecraft-protocol
|
src/Data/Minecraft/Snapshot192.hs
|
bsd-3-clause
| 223 | 0 | 5 | 21 | 39 | 28 | 11 | 5 | 0 |
{-# LANGUAGE FlexibleContexts, UndecidableInstances #-}
module Text.Roundtrip.Xml.Parser (
GenXmlParser, XmlParser, runXmlParser, runXmlParser', runXmlParser''
, WithPos, EventWithPos, eventWithPos, eventWithoutPos
, SourceName, Line, Column, ParseError
, EntityRenderer, defaultEntityRenderer
, runXmlParserString, runXmlParserText, runXmlParserLazyText
, runXmlParserByteString, runXmlParserLazyByteString
) where
import Prelude hiding ((*>),(<*))
import Control.Monad (unless, foldM)
import Control.Monad.State
import Control.Monad.Identity (Identity, runIdentity)
import Control.Exception (ErrorCall(..), SomeException, Exception, toException)
import qualified Data.Conduit as C
import qualified Data.Conduit.List as CL
import qualified Text.XML.Stream.Parse as CXP
import qualified Data.Map as Map
import Data.Map (Map)
import qualified Data.List as List
import Data.Typeable (Typeable)
import Data.Either (partitionEithers)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Debug.Trace
import qualified Text.PrettyPrint.HughesPJ as Pp
import Data.XML.Types
import Text.Roundtrip
import Text.Roundtrip.Parser
import Text.Roundtrip.Xml.ParserInternal
import Text.Roundtrip.Xml.Pretty
type EntityRenderer = T.Text -> Maybe T.Text
defaultEntityRenderer :: EntityRenderer
defaultEntityRenderer = const Nothing
type XmlParser a = GenXmlParser [RtEventWithPos] Identity a
type EventGen a = CXP.ParseSettings -> C.Conduit a (Either SomeException) Event
genEvents :: [a] -> EventGen a -> Either SomeException [Event]
genEvents items f =
CL.sourceList items C.=$= f CXP.def C.$$ CL.consume
-- Parsing a string/text/bytestring into a list of events into is done via
-- enumerators. This is not optimal because the resulting list is too strict.
-- However, currently no other functions exists for such a conversion.
runXmlParserGen :: XmlParser a -> SourceName -> EntityRenderer -> [b] -> EventGen b -> (Either ParseError a)
runXmlParserGen p src er items gen =
case genEvents items gen of
Left err -> Left $ mkParseError (initialPos src) (show err)
Right events -> runXmlParser p src er events
runXmlParserString :: XmlParser a -> SourceName -> EntityRenderer -> String -> (Either ParseError a)
runXmlParserString p src e str = runXmlParserGen p src e [T.pack str] CXP.parseText'
runXmlParserText :: XmlParser a -> SourceName -> EntityRenderer -> T.Text -> (Either ParseError a)
runXmlParserText p src e t = runXmlParserGen p src e [t] CXP.parseText'
runXmlParserLazyText :: XmlParser a -> SourceName -> EntityRenderer -> TL.Text -> (Either ParseError a)
runXmlParserLazyText p src e t = runXmlParserGen p src e (TL.toChunks t) CXP.parseText'
runXmlParserByteString :: XmlParser a -> SourceName -> EntityRenderer -> BS.ByteString -> (Either ParseError a)
runXmlParserByteString p src e bs = runXmlParserGen p src e [bs] CXP.parseBytes
runXmlParserLazyByteString :: XmlParser a -> SourceName -> EntityRenderer -> BSL.ByteString -> (Either ParseError a)
runXmlParserLazyByteString p src e bs = runXmlParserGen p src e (BSL.toChunks bs) CXP.parseBytes
runXmlParser :: XmlParser a -> SourceName -> EntityRenderer -> [Event] -> (Either ParseError a)
runXmlParser p sourceName renderer events =
runXmlParser'' p sourceName renderer (map eventWithoutPos events)
runXmlParser' :: XmlParser a -> EntityRenderer -> [EventWithPos] -> (Either ParseError a)
runXmlParser' p renderer events = runXmlParser'' p src renderer events
where
src = case events of
[] -> ""
(e:_) -> sourceName (wp_pos e)
runXmlParser'' :: XmlParser a -> SourceName -> EntityRenderer -> [EventWithPos] -> (Either ParseError a)
runXmlParser'' p sourceName entityRenderer events =
let GenXmlParser q = xmlBeginDoc *> p <* xmlEndDoc
rtEvents = List.unfoldr (simplifyEvents entityRenderer) events
in runParser q Nothing sourceName rtEvents
simplifyEvents :: EntityRenderer -> [EventWithPos] -> Maybe (RtEventWithPos, [EventWithPos])
simplifyEvents renderEntity evs = go evs
where
go evs =
case evs of
[] -> Nothing
(WithPos EventBeginDocument pos : rest) -> Just (WithPos RtBeginDocument pos, rest)
(WithPos EventEndDocument pos : rest) -> Just (WithPos RtEndDocument pos, rest)
(WithPos (EventInstruction _) _ : rest) -> go rest
(WithPos (EventBeginDoctype _ _) _ : rest) -> go rest
(WithPos EventEndDoctype _ : rest) -> go rest
(WithPos (EventBeginElement n as) pos : rest) ->
let insertAttr :: Either T.Text AttrMap -> Attribute
-> Either T.Text AttrMap
insertAttr em (k, vs) =
case em of
Right m ->
case partitionEithers (map contentToText vs) of
((t:_), _) -> Left t
([], vs') -> Right ((k, T.concat vs') : m)
Left t -> Left t
in case Prelude.foldl insertAttr (Right []) as of
Right as' -> as' `seq` Just (WithPos (RtBeginElement n (reverse as')) pos, rest)
Left t -> Just (WithPos (RtInvalidEntity t) pos, [])
(WithPos (EventEndElement n) pos : rest) -> Just (WithPos (RtEndElement n) pos, rest)
(WithPos (EventContent c) pos : rest) ->
case contentToText c of
Left t -> Just (WithPos (RtInvalidEntity t) pos, [])
Right t ->
let (cs, rest') = splitContent rest
in case partitionEithers (map contentToText cs) of
((t:_), _) -> Just (WithPos (RtInvalidEntity t) pos, [])
([], ts) -> let text = T.strip $ t `T.append` T.concat ts
in if T.null text
then go rest'
else Just (WithPos (RtText text) pos, rest')
(WithPos (EventComment _) _ : rest) -> go rest
splitContent (WithPos (EventContent c) pos : rest) =
let (cs, rest') = splitContent rest
in (c:cs, rest')
splitContent l = ([], l)
contentToText c =
case c of
ContentText t -> Right t
ContentEntity t ->
case renderEntity t of
Just t' -> Right t'
Nothing -> Left t
instance (Monad m, Stream s m RtEventWithPos) => IsoFunctor (GenXmlParser s m) where
iso <$> (GenXmlParser p) = GenXmlParser $ parsecApply iso p
instance (Monad m, Stream s m RtEventWithPos) => ProductFunctor (GenXmlParser s m) where
(GenXmlParser p) <*> (GenXmlParser q) = GenXmlParser $ parsecConcat p q
instance (Monad m, Stream s m RtEventWithPos) => Alternative (GenXmlParser s m) where
GenXmlParser p <|> GenXmlParser q = GenXmlParser $ parsecAlternative1Lookahead p q
GenXmlParser p <||> GenXmlParser q = GenXmlParser $ parsecAlternativeInfLookahead p q
empty = GenXmlParser parsecEmpty
instance (Monad m, Stream s m RtEventWithPos) => Syntax (GenXmlParser s m) where
pure x = GenXmlParser (parsecPure x)
instance (Monad m, Stream s m RtEventWithPos) => XmlSyntax (GenXmlParser s m) where
xmlBeginDoc = GenXmlParser xmlParserBeginDoc
xmlEndDoc = GenXmlParser xmlParserEndDoc
xmlBeginElem = GenXmlParser . xmlParserBeginElem
xmlAttrValue = GenXmlParser . xmlParserAttrValue
xmlTextNotEmpty = GenXmlParser xmlParserTextNotEmpty
xmlEndElem = GenXmlParser . xmlParserEndElem
matchEvent :: (Show a, Monad m, Stream s m RtEventWithPos)
=> (RtEvent -> Maybe a) -> String -> PxParser s m a
matchEvent matcher desc =
do state <- getState
case state of
Just _ -> ("cannot match " ++ desc ++ " in state " ++ show state) `debug` parserZero
Nothing -> tokenPrim show (\_ t _ -> wp_pos t) debugMatcher
where
debugMatcher ev =
let res = matcher (wp_data ev)
in ("matching " ++ show ev ++ " against " ++ desc ++ ", result: " ++ show res) `debug` res
mkPxParser :: Monad m => String -> PxParser s m a -> PxParser s m a
mkPxParser msg p = (p <?> msg)
xmlParserBeginDoc :: (Monad m, Stream s m RtEventWithPos) => PxParser s m ()
xmlParserBeginDoc = mkPxParser "begin-document" $
let f RtBeginDocument = Just ()
f _ = Nothing
in matchEvent f "begin-document"
xmlParserEndDoc :: (Monad m, Stream s m RtEventWithPos) => PxParser s m ()
xmlParserEndDoc = mkPxParser "end-document" $
let f RtEndDocument = Just ()
f _ = Nothing
in matchEvent f "end-document"
xmlParserBeginElem :: (Monad m, Stream s m RtEventWithPos) => Name -> PxParser s m ()
xmlParserBeginElem name = mkPxParser ("<" ++ ppStr name ++ " ...>") $
do let f (RtBeginElement name' attrs) | name == name' = Just attrs
f _ = Nothing
attrs <- matchEvent f ("begin-element " ++ ppStr name)
unless (null attrs) (putStateDebug $ Just attrs)
return ()
xmlParserAttrValue :: Monad m => Name -> PxParser s m T.Text
xmlParserAttrValue name = mkPxParser ("attribute " ++ ppStr name) $
do state <- getState
case state of
Nothing -> parserZero
Just m ->
case List.break (\(x,_) -> x == name) m of
(prefix, (_, t) : suffix) ->
do let m' = prefix ++ suffix
if null m'
then putStateDebug Nothing
else putStateDebug (Just m')
return t
_ -> parserZero
xmlParserEndElem :: (Monad m, Stream s m RtEventWithPos) => Name -> PxParser s m ()
xmlParserEndElem name = mkPxParser ("</" ++ ppStr name ++ ">") $
let f (RtEndElement name') | name == name' = Just ()
f _ = Nothing
in matchEvent f ("end-element " ++ ppStr name)
xmlParserTextNotEmpty :: (Monad m, Stream s m RtEventWithPos) => PxParser s m T.Text
xmlParserTextNotEmpty = mkPxParser "text node" $
let f (RtText t) = Just t
f _ = Nothing
in matchEvent f "text node"
-- debug = Debug.Trace.trace
debug _ x = x
putStateDebug x = ("setting state to " ++ show x) `debug` putState x
|
skogsbaer/roundtrip-xml
|
src/Text/Roundtrip/Xml/Parser.hs
|
bsd-3-clause
| 10,363 | 0 | 25 | 2,678 | 3,401 | 1,737 | 1,664 | 187 | 19 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Tinfoil.Hash(
hash
, hashSHA256
) where
import Crypto.Hash.Algorithms (SHA256)
import qualified Crypto.Hash as Cryptonite
import qualified Data.ByteArray as BA
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import P
import Tinfoil.Data.Hash
hash :: HashFunction -> ByteString -> Hash
hash SHA256 = hashSHA256
hashSHA256 :: ByteString -> Hash
hashSHA256 bs =
let dgst = Cryptonite.hash bs :: Cryptonite.Digest SHA256 in
Hash . BS.pack $ BA.unpack dgst
|
ambiata/tinfoil
|
src/Tinfoil/Hash.hs
|
bsd-3-clause
| 612 | 0 | 10 | 130 | 147 | 86 | 61 | 18 | 1 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, FlexibleContexts, ViewPatterns #-}
module Language.SPL.Data.Instruction where
import Language.SPL.Printer (Pretty,pretty,text,char,vsep,indent,fill,(<>),(<+>))
import Prelude hiding (length)
import Data.List ((\\))
import Data.Sequence (Seq,singleton,viewl,viewr,ViewL(..),ViewR(..),(><))
import Data.Foldable (toList)
type Comment = String
type Label = String
type Size = Int
type Offset = Int
data Register = PC | SP | MP | HP | RR | GP | R6 | R7
deriving (Show, Eq, Ord, Enum)
data Operation = ADD | SUB | MUL | DIV | MOD | NEG
| AND | OR | XOR | NOT
| EQ | NE | GT | LT | GE | LE
| LDC Integer | LDC' Label
| LDS Offset | LDL Offset | LDA Offset | LDR Register | LDH Offset
| STS Offset | STL Offset | STA Offset | STR Register | STH
| LDMS Size Offset | LDML Size Offset | LDMA Size Offset | LDMH Offset Size
| STMS Size Offset | STML Size Offset | STMA Size Offset | STMH Size
| BRA Label | BRT Label | BRF Label
| AJS Offset
| NOP
| HALT | TRAP Integer
| LDRR Register Register
| JSR | BSR | RET
| ANNOTE
| LDAA
| LDLA | LDSA
| LINK | UNLINK
| SWP | SWPR | SWPRR
deriving (Show, Eq)
-- Important lesson: use seperate type for such annotations!
data Instruction = Instruction [Label] Operation [Comment]
deriving (Show,Eq)
type Instructions = Seq Instruction
instruction :: Operation -> Instruction
instruction o = Instruction [] o []
-- Annotateable ----------------------------------------------------------------
class Annotatable a where
(#) :: Label -> a -> Instructions
(##) :: a -> Comment -> Instructions
instance Annotatable Operation where
l # o = singleton $ Instruction [l] o []
o ## c = singleton $ Instruction [] o [c]
instance Annotatable Instruction where
l # (Instruction ls o cs) = singleton $ Instruction (l:ls) o cs
(Instruction ls o cs) ## c = singleton $ Instruction ls o (c:cs)
instance Annotatable Instructions where
l # (viewl -> EmptyL) = l # NOP
l # (viewl -> i :< is) = l # i >< is
(viewr -> EmptyR) ## c = NOP ## c
(viewr -> is :> i) ## c = is >< i ## c
-- Pretty Printer --------------------------------------------------------------
tabstop :: Int
tabstop = 16
instance Pretty Operation where
pretty o = text $ show o \\ ['(', ')', '"', '"', '\''] -- For () around -1, "" around labels and ' of LDC'
instance Pretty Instruction where
pretty (Instruction [] o []) = indent tabstop (pretty o)
pretty (Instruction [l] o []) = fill tabstop (text l <> char ':') <>
pretty o
pretty (Instruction [] o [c]) = indent tabstop (fill tabstop (pretty o)) <>
char ';' <+> text c
pretty (Instruction [l] o [c]) = fill tabstop (text l <> char ':') <>
fill tabstop (pretty o) <>
char ';' <+> text c
pretty _ = error "multiple annotations in instruction not implemented"
instance Pretty Instructions where
pretty = vsep . map pretty . toList
|
timjs/spl-compiler
|
src/Language/SPL/Data/Instruction.hs
|
bsd-3-clause
| 3,371 | 0 | 12 | 1,062 | 1,110 | 617 | 493 | 68 | 1 |
module PeerTrader.Types where
import Data.Text (Text)
type UserLogin = Text
|
WraithM/peertrader-backend
|
src/PeerTrader/Types.hs
|
bsd-3-clause
| 88 | 0 | 5 | 22 | 22 | 14 | 8 | 3 | 0 |
module Problem381 where
import Prime
import Modular
main :: IO ()
-- (p-1)! + (p-2)! + (p-3)! + (p-4)! + (p-5)!; Use wilson's theorem
-- (p-1)! = -1 mod p
-- (p-2)! * (p-1) = -1 mod p => (p-2)! * -1 = -1 mod p => (p-2)! = 1 mod p
-- (p-3)! * (p-2) = 1 mod p => (p-3)! = inv (-2)
-- (p-4)! = inv (-2) * inv (-3)
-- (p-5)! = inv (-2) * inv (-3) * inv (-4)
-- -1 + 1 + inv (p-1) .. + (-1)*inv (p-1)*inv(p-2)..inv(p-4)
main = print $ sum $ map getS ps
where
start = 5
end = 10 ^ 8
getS p = (`mod` p) $ sum $ scanl1
(\a b -> (a * b) `mod` p)
(map (`modinv` p) [p - 2, p - 3, p - 4])
ps = takeWhile (< end) $ dropWhile (< start) $ getPrimesUpto end
|
adityagupta1089/Project-Euler-Haskell
|
src/problems/Problem381.hs
|
bsd-3-clause
| 684 | 0 | 13 | 195 | 176 | 103 | 73 | 11 | 1 |
{-# LANGUAGE BangPatterns #-}
module Gamgine.Image.PNG.Internal.CRC (update_crc, crc) where
import Data.Word
import Data.Array.Unboxed
import Data.Bits
import qualified Gamgine.Image.PNG.Internal.LBS as LBS
import Gamgine.Image.PNG.Internal.LBS (LBS)
crc_table :: UArray Word32 Word32
crc_table = listArray (0,255) . map iterate_c $ [0..]
where
iterate_c = (!! 8) . iterate compute_c
compute_c c
| c .&. 1 == 1 = 0xedb88320 `xor` (c `shiftR` 1)
| otherwise = c `shiftR` 1
update_crc :: Word32 -> LBS -> Word32
update_crc !c bs
| LBS.null bs = c
| otherwise = let w = LBS.head bs
newcrc = (crc_table ! ((c `xor` fromIntegral w) .&. 0xff)) `xor` (c `shiftR` 8)
in
update_crc newcrc (LBS.tail bs)
crc :: LBS -> Word32
crc = (`xor` 0xffffffff) . update_crc 0xffffffff
--test = crc $ LB.replicate 10000000 128
|
dan-t/Gamgine
|
Gamgine/Image/PNG/Internal/CRC.hs
|
bsd-3-clause
| 951 | 0 | 17 | 272 | 313 | 177 | 136 | 21 | 1 |
-- -----------------------------------------
-- Parse.hs
-- -----------------------------------------
-- Try to parse strings like "46, 28, 0.29" or " 8, 9, 59.98"
--
-- ----------------------------------------
module Parse (getCoord) where
import Text.Parsec
import Text.Parsec.String
import Data.Geo.Swiss.Conversion
comma :: Parser Char
comma = char ','
digitOrDot :: Parser Char
digitOrDot = digit <|> char '.'
parseInt :: Parser Int
parseInt = read `fmap` many1 digit
parseFloat :: Parser Double
parseFloat = read `fmap` many1 digitOrDot
parseCoord :: Parser Degree
parseCoord = do
spaces
d <- parseInt
_ <- comma
spaces
m <- parseInt
_ <- comma
spaces
s <- parseFloat
return $ Deg d m s
getCoord :: String -> String -> Either String Degree
getCoord c s = case parse parseCoord c s of
Left e -> Left $ show e
Right coord -> Right coord
|
hansroland/ShowOnSwissMap
|
src/Parse.hs
|
bsd-3-clause
| 903 | 0 | 9 | 195 | 251 | 128 | 123 | 27 | 2 |
module Main where
newtype Age = Age Int
type Age' = Int
newtype Speed = Speed {getSpeed :: Double}
newtype Forget a = Remember {value :: a}
data Tree a = Empty | Tree a [Tree a]
main :: IO ()
main = print "New-Type and haskell"
a :: Int
a = 5
b :: Age'
b = Age 5
c :: Age'
c = 5
d :: Speed
d = Speed 5
e :: Forget Char
e = Remember 'a'
|
epsilonhalbe/01_types_and_typos
|
src/NewtypeExampleBad.hs
|
bsd-3-clause
| 343 | 0 | 8 | 88 | 146 | 86 | 60 | 18 | 1 |
{-# LANGUAGE DataKinds, PolyKinds, TypeFamilies, GADTs, TypeOperators,
DefaultSignatures, ScopedTypeVariables, InstanceSigs,
MultiParamTypeClasses, FunctionalDependencies,
UndecidableInstances, TypeInType #-}
module ByHand2 where
import Prelude hiding ( Eq(..), Ord(..) )
import Data.Singletons
import Data.Proxy
data Nat = Zero | Succ Nat
class Eq a where
(==) :: a -> a -> Bool
(/=) :: a -> a -> Bool
infix 4 ==, /=
x == y = not (x /= y)
x /= y = not (x == y)
instance Eq Nat where
Zero == Zero = True
Zero == Succ _ = False
Succ _ == Zero = False
Succ x == Succ y = x == y
data instance Sing (b :: Bool) where
SFalse :: Sing 'False
STrue :: Sing 'True
data instance Sing (n :: Nat) where
SZero :: Sing 'Zero
SSucc :: Sing n -> Sing ('Succ n)
type family Not (x :: Bool) :: Bool where
Not 'True = 'False
Not 'False = 'True
sNot :: Sing b -> Sing (Not b)
sNot STrue = SFalse
sNot SFalse = STrue
infix 4 :==, :/=
class kproxy ~ 'Proxy => PEq (kproxy :: Proxy a) where
type (:==) (x :: a) (y :: a) :: Bool
type (:/=) (x :: a) (y :: a) :: Bool
type x :== y = Not (x :/= y)
type x :/= y = Not (x :== y)
instance PEq ('Proxy :: Proxy Nat) where
type 'Zero :== 'Zero = 'True
type 'Succ x :== 'Zero = 'False
type 'Zero :== 'Succ x = 'False
type 'Succ x :== 'Succ y = x :== y
class SEq a where
(%:==) :: Sing (x :: a) -> Sing (y :: a) -> Sing (x :== y)
(%:/=) :: Sing (x :: a) -> Sing (y :: a) -> Sing (x :/= y)
default (%:==) :: ((x :== y) ~ (Not (x :/= y))) => Sing (x :: a) -> Sing (y :: a) -> Sing (x :== y)
x %:== y = sNot (x %:/= y)
default (%:/=) :: ((x :/= y) ~ (Not (x :== y))) => Sing (x :: a) -> Sing (y :: a) -> Sing (x :/= y)
x %:/= y = sNot (x %:== y)
instance SEq Nat where
(%:==) :: forall (x :: Nat) (y :: Nat). Sing x -> Sing y -> Sing (x :== y)
SZero %:== SZero = STrue
SSucc _ %:== SZero = SFalse
SZero %:== SSucc _ = SFalse
SSucc x %:== SSucc y = x %:== y
instance Eq Ordering where
LT == LT = True
LT == EQ = False
LT == GT = False
EQ == LT = False
EQ == EQ = True
EQ == GT = False
GT == LT = False
GT == EQ = False
GT == GT = True
class Eq a => Ord a where
compare :: a -> a -> Ordering
(<) :: a -> a -> Bool
x < y = compare x y == LT
class (PEq kproxy, kproxy ~ 'Proxy) => POrd (kproxy :: Proxy a) where
type Compare (x :: a) (y :: a) :: Ordering
type (:<) (x :: a) (y :: a) :: Bool
type x :< y = Compare x y :== 'LT
instance Ord Nat where
compare Zero Zero = EQ
compare Zero (Succ _) = LT
compare (Succ _) Zero = GT
compare (Succ a) (Succ b) = compare a b
instance POrd ('Proxy :: Proxy Nat) where
type Compare 'Zero 'Zero = 'EQ
type Compare 'Zero ('Succ x) = 'LT
type Compare ('Succ x) 'Zero = 'GT
type Compare ('Succ x) ('Succ y) = Compare x y
data instance Sing (o :: Ordering) where
SLT :: Sing 'LT
SEQ :: Sing 'EQ
SGT :: Sing 'GT
instance PEq ('Proxy :: Proxy Ordering) where
type 'LT :== 'LT = 'True
type 'LT :== 'EQ = 'False
type 'LT :== 'GT = 'False
type 'EQ :== 'LT = 'False
type 'EQ :== 'EQ = 'True
type 'EQ :== 'GT = 'False
type 'GT :== 'LT = 'False
type 'GT :== 'EQ = 'False
type 'GT :== 'GT = 'True
instance SEq Ordering where
SLT %:== SLT = STrue
SLT %:== SEQ = SFalse
SLT %:== SGT = SFalse
SEQ %:== SLT = SFalse
SEQ %:== SEQ = STrue
SEQ %:== SGT = SFalse
SGT %:== SLT = SFalse
SGT %:== SEQ = SFalse
SGT %:== SGT = STrue
class SEq a => SOrd a where
sCompare :: Sing (x :: a) -> Sing (y :: a) -> Sing (Compare x y)
(%:<) :: Sing (x :: a) -> Sing (y :: a) -> Sing (x :< y)
default (%:<) :: ((x :< y) ~ (Compare x y :== 'LT)) => Sing (x :: a) -> Sing (y :: a) -> Sing (x :< y)
x %:< y = sCompare x y %:== SLT
instance SOrd Nat where
sCompare SZero SZero = SEQ
sCompare SZero (SSucc _) = SLT
sCompare (SSucc _) SZero = SGT
sCompare (SSucc x) (SSucc y) = sCompare x y
class Pointed a where
point :: a
class kproxy ~ 'Proxy => PPointed (kproxy :: Proxy a) where
type Point :: a
class kproxy ~ 'Proxy => SPointed (kproxy :: Proxy a) where
sPoint :: Sing (Point :: a)
instance Pointed Nat where
point = Zero
instance PPointed ('Proxy :: Proxy Nat) where
type Point = 'Zero
instance SPointed ('Proxy :: Proxy Nat) where
sPoint = SZero
--------------------------------
class FD a b | a -> b where
meth :: a -> a
l2r :: a -> b
instance FD Bool Nat where
meth = not
l2r False = Zero
l2r True = Succ Zero
t1 = meth True
t2 = l2r False
class (kp1 ~ 'Proxy, kp2 ~ 'Proxy) => PFD (kp1 :: Proxy a) (kp2 :: Proxy b) | a -> b where
type Meth (x :: a) :: a
type L2r (x :: a) :: b
instance PFD ('Proxy :: Proxy Bool) ('Proxy :: Proxy Nat) where
type Meth a = Not a
type L2r 'False = 'Zero
type L2r 'True = 'Succ 'Zero
type T1 = Meth 'True
type T2 = L2r 'False
class SFD a b | a -> b where
sMeth :: forall (x :: a). Sing x -> Sing (Meth x :: a)
sL2r :: forall (x :: a). Sing x -> Sing (L2r x :: b)
instance SFD Bool Nat where
sMeth x = sNot x
sL2r SFalse = SZero
sL2r STrue = SSucc SZero
sT1 = sMeth STrue
sT2 :: Sing T2
sT2 = sL2r SFalse
|
int-index/singletons
|
tests/ByHand2.hs
|
bsd-3-clause
| 5,150 | 17 | 14 | 1,392 | 2,665 | 1,380 | 1,285 | 158 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Spoon
-- Copyright : © 2009 Matt Morrow & Dan Peebles, © 2013 Liyang HU
-- License : see LICENSE
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (Scoped Type Variables)
--
-- Two functions for catching pureish exceptions in pure values. This library
-- considers pureish to be any error call or undefined, failed pattern matches,
-- arithmetic exceptions, and array bounds exceptions.
--
-----------------------------------------------------------------------------
module Control.Spoon
( Handles
, defaultHandles
, spoon
, spoonWithHandles
, teaspoon
, teaspoonWithHandles
) where
import Control.Exception
import Control.DeepSeq
import System.IO.Unsafe
type Handles a = [Handler (Maybe a)]
{-# INLINEABLE defaultHandles #-}
defaultHandles :: Handles a
defaultHandles =
[ Handler $ \(_ :: ArithException) -> return Nothing
, Handler $ \(_ :: ArrayException) -> return Nothing
, Handler $ \(_ :: ErrorCall) -> return Nothing
, Handler $ \(_ :: PatternMatchFail) -> return Nothing
, Handler $ \(x :: SomeException) -> throwIO x ]
-- | Evaluate a value to normal form and return Nothing if any exceptions are thrown during evaluation. For any error-free value, @spoon = Just@.
{-# INLINEABLE spoonWithHandles #-}
spoonWithHandles :: NFData a => Handles a -> a -> Maybe a
spoonWithHandles handles a = unsafePerformIO $
deepseq a (Just `fmap` return a) `catches` handles
-- | Evaluate a value to normal form and return Nothing if any exceptions are thrown during evaluation. For any error-free value, @spoon = Just@.
{-# INLINE spoon #-}
spoon :: NFData a => a -> Maybe a
spoon = spoonWithHandles defaultHandles
{-# INLINEABLE teaspoonWithHandles #-}
teaspoonWithHandles :: Handles a -> a -> Maybe a
teaspoonWithHandles handles a = unsafePerformIO $
(Just `fmap` evaluate a) `catches` handles
-- | Like 'spoon', but only evaluates to WHNF.
{-# INLINE teaspoon #-}
teaspoon :: a -> Maybe a
teaspoon = teaspoonWithHandles defaultHandles
|
liyang/spoon
|
Control/Spoon.hs
|
bsd-3-clause
| 2,219 | 0 | 10 | 413 | 369 | 211 | 158 | 31 | 1 |
module Fibon.Run.SysTools (
size
, cabal
, diff
)
where
size :: String
size = "size"
cabal :: String
cabal = "cabal"
diff :: String
diff = "diff"
|
dmpots/fibon
|
tools/fibon-run/Fibon/Run/SysTools.hs
|
bsd-3-clause
| 157 | 0 | 4 | 39 | 48 | 30 | 18 | 10 | 1 |
module Main where
import qualified Test.ReadWrite as RW
main :: IO ()
main = RW.main
|
8c6794b6/dph-sndfile
|
example/rw.hs
|
bsd-3-clause
| 86 | 0 | 6 | 16 | 29 | 18 | 11 | 4 | 1 |
module Sound where
import qualified Graphics.UI.SDL.Mixer as Mix
import Graphics.UI.SDL.Audio
import Control.Monad
import Data.IORef
type SoundState = IORef [ Mix.Chunk ]
initSound :: [FilePath] -> IO SoundState
initSound soundFiles = do
Mix.openAudio 44100 AudioS16Sys 2 4096
sounds <- mapM Mix.loadWAV soundFiles
newIORef sounds
playSound :: SoundState -> Int -> IO ()
playSound sstate cid = do
sounds <- readIORef sstate
when (cid < length sounds)
(Mix.playChannel (-1) (sounds !! cid) 0 >> return ())
endSound :: SoundState -> IO ()
endSound sstate = Mix.closeAudio
|
timbod7/hbeat
|
Sound.hs
|
bsd-3-clause
| 604 | 0 | 12 | 118 | 215 | 111 | 104 | 18 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
-- | Display attributes
--
-- Attributes have three components: a foreground color, a background
-- color, and a style mask. The simplest attribute is the default
-- attribute, or 'defAttr'. Attributes can be modified with
-- 'withForeColor', 'withBackColor', and 'withStyle', e.g.,
--
-- @
-- defAttr \`withForeColor\` red
-- @
--
-- 'Image' constructors often require an 'Attr' to indicate the
-- attributes used in the image, e.g.,
--
-- @
-- string (defAttr \`withForeColor\` red) "this text will be red"
-- @
--
-- The appearance of 'Image's using 'defAttr' is determined by the The
-- terminal, so this is not something VTY can control. The user is free
-- to The define the color scheme of the terminal as they see fit.
--
-- The value 'currentAttr' will keep the attributes of whatever was
-- output previously.
module Graphics.Vty.Attributes
( module Graphics.Vty.Attributes.Color
, Attr(..)
, FixedAttr(..)
, MaybeDefault(..)
, defAttr
, currentAttr
-- * Styles
, Style
, withStyle
, standout
, italic
, strikethrough
, underline
, reverseVideo
, blink
, dim
, bold
, defaultStyleMask
, styleMask
, hasStyle
-- * Setting attribute colors
, withForeColor
, withBackColor
-- * Setting hyperlinks
, withURL
)
where
import Control.DeepSeq
import Data.Bits
import Data.Text (Text)
import Data.Word
import GHC.Generics
import Graphics.Vty.Attributes.Color
-- | A display attribute defines the Color and Style of all the
-- characters rendered after the attribute is applied.
--
-- At most 256 colors, picked from a 240 and 16 color palette, are
-- possible for the background and foreground. The 240 colors and
-- 16 colors are points in different palettes. See Color for more
-- information.
data Attr = Attr
{ attrStyle :: !(MaybeDefault Style)
, attrForeColor :: !(MaybeDefault Color)
, attrBackColor :: !(MaybeDefault Color)
, attrURL :: !(MaybeDefault Text)
} deriving ( Eq, Show, Read, Generic, NFData )
-- This could be encoded into a single 32 bit word. The 32 bit word is
-- first divided into 4 groups of 8 bits where: The first group codes
-- what action should be taken with regards to the other groups.
-- XXYYZZ__
-- XX - style action
-- 00 => reset to default
-- 01 => unchanged
-- 10 => set
-- YY - foreground color action
-- 00 => reset to default
-- 01 => unchanged
-- 10 => set
-- ZZ - background color action
-- 00 => reset to default
-- 01 => unchanged
-- 10 => set
-- __ - unused
--
-- Next is the style flags
-- SURBDOI_
-- S - standout
-- U - underline
-- R - reverse video
-- B - blink
-- D - dim
-- O - bold
-- I - italic
-- _ - unused
--
-- Then the foreground color encoded into 8 bits.
-- Then the background color encoded into 8 bits.
-- | Specifies the display attributes such that the final style and
-- color values do not depend on the previously applied display
-- attribute. The display attributes can still depend on the terminal's
-- default colors (unfortunately).
data FixedAttr = FixedAttr
{ fixedStyle :: !Style
, fixedForeColor :: !(Maybe Color)
, fixedBackColor :: !(Maybe Color)
, fixedURL :: !(Maybe Text)
} deriving ( Eq, Show )
-- | The style and color attributes can either be the terminal defaults.
-- Or be equivalent to the previously applied style. Or be a specific
-- value.
data MaybeDefault v = Default | KeepCurrent | SetTo !v
deriving (Eq, Read, Show)
instance (NFData v) => NFData (MaybeDefault v) where
rnf Default = ()
rnf KeepCurrent = ()
rnf (SetTo v) = rnf v
-- | Styles are represented as an 8 bit word. Each bit in the word is 1
-- if the style attribute assigned to that bit should be applied and 0
-- if the style attribute should not be applied.
type Style = Word8
-- | Valid style attributes include:
--
-- * standout
--
-- * underline
--
-- * reverseVideo
--
-- * blink
--
-- * dim
--
-- * bold/bright
--
-- * italic
--
-- * strikethrough (via the smxx/rmxx terminfo capabilities)
--
-- (The invisible, protect, and altcharset display attributes some
-- terminals support are not supported via VTY.)
standout, underline, reverseVideo, blink, dim, bold, italic, strikethrough :: Style
standout = 0x01
underline = 0x02
reverseVideo = 0x04
blink = 0x08
dim = 0x10
bold = 0x20
italic = 0x40
strikethrough = 0x80
defaultStyleMask :: Style
defaultStyleMask = 0x00
styleMask :: Attr -> Word8
styleMask attr
= case attrStyle attr of
Default -> 0
KeepCurrent -> 0
SetTo v -> v
-- | true if the given Style value has the specified Style set.
hasStyle :: Style -> Style -> Bool
hasStyle s bitMask = ( s .&. bitMask ) /= 0
-- | Set the foreground color of an `Attr'.
withForeColor :: Attr -> Color -> Attr
withForeColor attr c = attr { attrForeColor = SetTo c }
-- | Set the background color of an `Attr'.
withBackColor :: Attr -> Color -> Attr
withBackColor attr c = attr { attrBackColor = SetTo c }
-- | Add the given style attribute
withStyle :: Attr -> Style -> Attr
withStyle attr 0 = attr
withStyle attr styleFlag = attr { attrStyle = SetTo $ styleMask attr .|. styleFlag }
-- | Add a hyperlinked URL using the proposed [escape sequences for
-- hyperlinked
-- URLs](https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda).
-- These escape sequences are comparatively new and aren't widely
-- supported in terminal emulators yet, but most terminal emulators
-- that don't know about these sequences will ignore these sequences,
-- and therefore this should fall back sensibly. In some cases they
-- won't and this will result in garbage, so this is why hyperlinking is
-- disabled by default, in which case this combinator has no observable
-- effect. To enable it, enable 'Hyperlink' mode on your Vty output
-- interface.
withURL :: Attr -> Text -> Attr
withURL attr url = attr { attrURL = SetTo url }
-- | Sets the style, background color and foreground color to the
-- default values for the terminal. There is no easy way to determine
-- what the default background and foreground colors are.
defAttr :: Attr
defAttr = Attr Default Default Default Default
-- | Keeps the style, background color and foreground color that was
-- previously set. Used to override some part of the previous style.
--
-- EG: current_style `withForeColor` brightMagenta
--
-- Would be the currently applied style (be it underline, bold, etc) but
-- with the foreground color set to brightMagenta.
currentAttr :: Attr
currentAttr = Attr KeepCurrent KeepCurrent KeepCurrent KeepCurrent
|
jtdaugherty/vty
|
src/Graphics/Vty/Attributes.hs
|
bsd-3-clause
| 6,859 | 0 | 11 | 1,557 | 840 | 530 | 310 | 101 | 3 |
import Universum
import Test.Hspec (hspec)
import Spec (spec)
import qualified Test.Pos.Core.Bi
import qualified Test.Pos.Core.Json
import qualified Test.Pos.Core.SafeCopy
import Test.Pos.Util.Tripping (runTests)
main :: IO ()
main = do
hspec spec
runTests
[ Test.Pos.Core.Bi.tests
, Test.Pos.Core.Json.tests
, Test.Pos.Core.SafeCopy.tests
]
|
input-output-hk/pos-haskell-prototype
|
core/test/test.hs
|
mit
| 426 | 0 | 9 | 121 | 109 | 69 | 40 | 14 | 1 |
{-# LANGUAGE Rank2Types, OverloadedStrings, CPP #-}
module Utils where
import Data.Generics hiding (typeOf)
import GHC
import GHC.SYB.Utils
-- ghcmod/Language/Haskell/GhcMod/Info.hs
listifySpans :: Typeable a => TypecheckedSource -> (Int, Int) -> [Located a]
listifySpans tcs lc = listifyStaged TypeChecker p tcs
where
p (L spn _) = isGoodSrcSpan spn && spn `spans` lc
-- ghcmod/Language/Haskell/GhcMod/Info.hs
listifyStaged :: Typeable r => Stage -> (r -> Bool) -> GenericQ [r]
listifyStaged s p = everythingStaged s (++) [] ([] `mkQ` (\x -> [x | p x]))
|
bfpg/bfpg-2015-08
|
demo/Utils.hs
|
bsd-2-clause
| 565 | 0 | 12 | 92 | 192 | 106 | 86 | 10 | 1 |
------------------------------------------------------------------------
-- |
-- Module : Main
-- Copyright : (c) Amy de Buitléir 2012-2016
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Runs the QuickCheck tests.
--
------------------------------------------------------------------------
module Main where
import ALife.Creatur.Wain.ImageID.ActionQC (test)
import ALife.Creatur.Wain.ImageID.ExperimentQC (test)
import Test.Framework as TF (defaultMain, Test)
tests :: [TF.Test]
tests =
[
-- In increasing order of complexity
ALife.Creatur.Wain.ImageID.ActionQC.test,
ALife.Creatur.Wain.ImageID.ExperimentQC.test
]
main :: IO ()
main = defaultMain tests
|
mhwombat/numeral-wains
|
test/Main.hs
|
bsd-3-clause
| 766 | 0 | 6 | 117 | 109 | 76 | 33 | 11 | 1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
--
-- Stg to C--: heap management functions
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module StgCmmHeap (
getVirtHp, setVirtHp, setRealHp,
getHpRelOffset,
entryHeapCheck, altHeapCheck, noEscapeHeapCheck, altHeapCheckReturnsTo,
heapStackCheckGen,
entryHeapCheck',
mkStaticClosureFields, mkStaticClosure,
allocDynClosure, allocDynClosureCmm, allocHeapClosure,
emitSetDynHdr
) where
#include "HsVersions.h"
import StgSyn
import CLabel
import StgCmmLayout
import StgCmmUtils
import StgCmmMonad
import StgCmmProf (profDynAlloc, dynProfHdr, staticProfHdr)
import StgCmmTicky
import StgCmmClosure
import StgCmmEnv
import MkGraph
import Hoopl
import SMRep
import Cmm
import CmmUtils
import CostCentre
import IdInfo( CafInfo(..), mayHaveCafRefs )
import Id ( Id )
import Module
import DynFlags
import FastString( mkFastString, fsLit )
#if __GLASGOW_HASKELL__ >= 709
import Prelude hiding ((<*>))
#endif
import Control.Monad (when)
import Data.Maybe (isJust)
-----------------------------------------------------------
-- Initialise dynamic heap objects
-----------------------------------------------------------
allocDynClosure
:: Maybe Id
-> CmmInfoTable
-> LambdaFormInfo
-> CmmExpr -- Cost Centre to stick in the object
-> CmmExpr -- Cost Centre to blame for this alloc
-- (usually the same; sometimes "OVERHEAD")
-> [(NonVoid StgArg, VirtualHpOffset)] -- Offsets from start of object
-- ie Info ptr has offset zero.
-- No void args in here
-> FCode CmmExpr -- returns Hp+n
allocDynClosureCmm
:: Maybe Id -> CmmInfoTable -> LambdaFormInfo -> CmmExpr -> CmmExpr
-> [(CmmExpr, ByteOff)]
-> FCode CmmExpr -- returns Hp+n
-- allocDynClosure allocates the thing in the heap,
-- and modifies the virtual Hp to account for this.
-- The second return value is the graph that sets the value of the
-- returned LocalReg, which should point to the closure after executing
-- the graph.
-- allocDynClosure returns an (Hp+8) CmmExpr, and hence the result is
-- only valid until Hp is changed. The caller should assign the
-- result to a LocalReg if it is required to remain live.
--
-- The reason we don't assign it to a LocalReg here is that the caller
-- is often about to call regIdInfo, which immediately assigns the
-- result of allocDynClosure to a new temp in order to add the tag.
-- So by not generating a LocalReg here we avoid a common source of
-- new temporaries and save some compile time. This can be quite
-- significant - see test T4801.
allocDynClosure mb_id info_tbl lf_info use_cc _blame_cc args_w_offsets = do
let (args, offsets) = unzip args_w_offsets
cmm_args <- mapM getArgAmode args -- No void args
allocDynClosureCmm mb_id info_tbl lf_info
use_cc _blame_cc (zip cmm_args offsets)
allocDynClosureCmm mb_id info_tbl lf_info use_cc _blame_cc amodes_w_offsets = do
-- SAY WHAT WE ARE ABOUT TO DO
let rep = cit_rep info_tbl
tickyDynAlloc mb_id rep lf_info
let info_ptr = CmmLit (CmmLabel (cit_lbl info_tbl))
allocHeapClosure rep info_ptr use_cc amodes_w_offsets
-- | Low-level heap object allocation.
allocHeapClosure
:: SMRep -- ^ representation of the object
-> CmmExpr -- ^ info pointer
-> CmmExpr -- ^ cost centre
-> [(CmmExpr,ByteOff)] -- ^ payload
-> FCode CmmExpr -- ^ returns the address of the object
allocHeapClosure rep info_ptr use_cc payload = do
profDynAlloc rep use_cc
virt_hp <- getVirtHp
-- Find the offset of the info-ptr word
let info_offset = virt_hp + 1
-- info_offset is the VirtualHpOffset of the first
-- word of the new object
-- Remember, virtHp points to last allocated word,
-- ie 1 *before* the info-ptr word of new object.
base <- getHpRelOffset info_offset
emitComment $ mkFastString "allocHeapClosure"
emitSetDynHdr base info_ptr use_cc
-- Fill in the fields
hpStore base payload
-- Bump the virtual heap pointer
dflags <- getDynFlags
setVirtHp (virt_hp + heapClosureSizeW dflags rep)
return base
emitSetDynHdr :: CmmExpr -> CmmExpr -> CmmExpr -> FCode ()
emitSetDynHdr base info_ptr ccs
= do dflags <- getDynFlags
hpStore base (zip (header dflags) [0, wORD_SIZE dflags ..])
where
header :: DynFlags -> [CmmExpr]
header dflags = [info_ptr] ++ dynProfHdr dflags ccs
-- ToDof: Parallel stuff
-- No ticky header
-- Store the item (expr,off) in base[off]
hpStore :: CmmExpr -> [(CmmExpr, ByteOff)] -> FCode ()
hpStore base vals = do
dflags <- getDynFlags
sequence_ $
[ emitStore (cmmOffsetB dflags base off) val | (val,off) <- vals ]
-----------------------------------------------------------
-- Layout of static closures
-----------------------------------------------------------
-- Make a static closure, adding on any extra padding needed for CAFs,
-- and adding a static link field if necessary.
mkStaticClosureFields
:: DynFlags
-> CmmInfoTable
-> CostCentreStack
-> CafInfo
-> [CmmLit] -- Payload
-> [CmmLit] -- The full closure
mkStaticClosureFields dflags info_tbl ccs caf_refs payload
= mkStaticClosure dflags info_lbl ccs payload padding
static_link_field saved_info_field
where
info_lbl = cit_lbl info_tbl
-- CAFs must have consistent layout, regardless of whether they
-- are actually updatable or not. The layout of a CAF is:
--
-- 3 saved_info
-- 2 static_link
-- 1 indirectee
-- 0 info ptr
--
-- the static_link and saved_info fields must always be in the
-- same place. So we use isThunkRep rather than closureUpdReqd
-- here:
is_caf = isThunkRep (cit_rep info_tbl)
padding
| is_caf && null payload = [mkIntCLit dflags 0]
| otherwise = []
static_link_field
| is_caf || staticClosureNeedsLink (mayHaveCafRefs caf_refs) info_tbl
= [static_link_value]
| otherwise
= []
saved_info_field
| is_caf = [mkIntCLit dflags 0]
| otherwise = []
-- For a static constructor which has NoCafRefs, we set the
-- static link field to a non-zero value so the garbage
-- collector will ignore it.
static_link_value
| mayHaveCafRefs caf_refs = mkIntCLit dflags 0
| otherwise = mkIntCLit dflags 3 -- No CAF refs
-- See Note [STATIC_LINK fields]
-- in rts/sm/Storage.h
mkStaticClosure :: DynFlags -> CLabel -> CostCentreStack -> [CmmLit]
-> [CmmLit] -> [CmmLit] -> [CmmLit] -> [CmmLit]
mkStaticClosure dflags info_lbl ccs payload padding static_link_field saved_info_field
= [CmmLabel info_lbl]
++ staticProfHdr dflags ccs
++ concatMap (padLitToWord dflags) payload
++ padding
++ static_link_field
++ saved_info_field
-- JD: Simon had ellided this padding, but without it the C back end asserts
-- failure. Maybe it's a bad assertion, and this padding is indeed unnecessary?
padLitToWord :: DynFlags -> CmmLit -> [CmmLit]
padLitToWord dflags lit = lit : padding pad_length
where width = typeWidth (cmmLitType dflags lit)
pad_length = wORD_SIZE dflags - widthInBytes width :: Int
padding n | n <= 0 = []
| n `rem` 2 /= 0 = CmmInt 0 W8 : padding (n-1)
| n `rem` 4 /= 0 = CmmInt 0 W16 : padding (n-2)
| n `rem` 8 /= 0 = CmmInt 0 W32 : padding (n-4)
| otherwise = CmmInt 0 W64 : padding (n-8)
-----------------------------------------------------------
-- Heap overflow checking
-----------------------------------------------------------
{- Note [Heap checks]
~~~~~~~~~~~~~~~~~~
Heap checks come in various forms. We provide the following entry
points to the runtime system, all of which use the native C-- entry
convention.
* gc() performs garbage collection and returns
nothing to its caller
* A series of canned entry points like
r = gc_1p( r )
where r is a pointer. This performs gc, and
then returns its argument r to its caller.
* A series of canned entry points like
gcfun_2p( f, x, y )
where f is a function closure of arity 2
This performs garbage collection, keeping alive the
three argument ptrs, and then tail-calls f(x,y)
These are used in the following circumstances
* entryHeapCheck: Function entry
(a) With a canned GC entry sequence
f( f_clo, x:ptr, y:ptr ) {
Hp = Hp+8
if Hp > HpLim goto L
...
L: HpAlloc = 8
jump gcfun_2p( f_clo, x, y ) }
Note the tail call to the garbage collector;
it should do no register shuffling
(b) No canned sequence
f( f_clo, x:ptr, y:ptr, ...etc... ) {
T: Hp = Hp+8
if Hp > HpLim goto L
...
L: HpAlloc = 8
call gc() -- Needs an info table
goto T }
* altHeapCheck: Immediately following an eval
Started as
case f x y of r { (p,q) -> rhs }
(a) With a canned sequence for the results of f
(which is the very common case since
all boxed cases return just one pointer
...
r = f( x, y )
K: -- K needs an info table
Hp = Hp+8
if Hp > HpLim goto L
...code for rhs...
L: r = gc_1p( r )
goto K }
Here, the info table needed by the call
to gc_1p should be the *same* as the
one for the call to f; the C-- optimiser
spots this sharing opportunity)
(b) No canned sequence for results of f
Note second info table
...
(r1,r2,r3) = call f( x, y )
K:
Hp = Hp+8
if Hp > HpLim goto L
...code for rhs...
L: call gc() -- Extra info table here
goto K
* generalHeapCheck: Anywhere else
e.g. entry to thunk
case branch *not* following eval,
or let-no-escape
Exactly the same as the previous case:
K: -- K needs an info table
Hp = Hp+8
if Hp > HpLim goto L
...
L: call gc()
goto K
-}
--------------------------------------------------------------
-- A heap/stack check at a function or thunk entry point.
entryHeapCheck :: ClosureInfo
-> Maybe LocalReg -- Function (closure environment)
-> Int -- Arity -- not same as len args b/c of voids
-> [LocalReg] -- Non-void args (empty for thunk)
-> FCode ()
-> FCode ()
entryHeapCheck cl_info nodeSet arity args code
= entryHeapCheck' is_fastf node arity args code
where
node = case nodeSet of
Just r -> CmmReg (CmmLocal r)
Nothing -> CmmLit (CmmLabel $ staticClosureLabel cl_info)
is_fastf = case closureFunInfo cl_info of
Just (_, ArgGen _) -> False
_otherwise -> True
-- | lower-level version for CmmParse
entryHeapCheck' :: Bool -- is a known function pattern
-> CmmExpr -- expression for the closure pointer
-> Int -- Arity -- not same as len args b/c of voids
-> [LocalReg] -- Non-void args (empty for thunk)
-> FCode ()
-> FCode ()
entryHeapCheck' is_fastf node arity args code
= do dflags <- getDynFlags
let is_thunk = arity == 0
args' = map (CmmReg . CmmLocal) args
stg_gc_fun = CmmReg (CmmGlobal GCFun)
stg_gc_enter1 = CmmReg (CmmGlobal GCEnter1)
{- Thunks: jump stg_gc_enter_1
Function (fast): call (NativeNode) stg_gc_fun(fun, args)
Function (slow): call (slow) stg_gc_fun(fun, args)
-}
gc_call upd
| is_thunk
= mkJump dflags NativeNodeCall stg_gc_enter1 [node] upd
| is_fastf
= mkJump dflags NativeNodeCall stg_gc_fun (node : args') upd
| otherwise
= mkJump dflags Slow stg_gc_fun (node : args') upd
updfr_sz <- getUpdFrameOff
loop_id <- newLabelC
emitLabel loop_id
heapCheck True True (gc_call updfr_sz <*> mkBranch loop_id) code
-- ------------------------------------------------------------
-- A heap/stack check in a case alternative
-- If there are multiple alts and we need to GC, but don't have a
-- continuation already (the scrut was simple), then we should
-- pre-generate the continuation. (if there are multiple alts it is
-- always a canned GC point).
-- altHeapCheck:
-- If we have a return continuation,
-- then if it is a canned GC pattern,
-- then we do mkJumpReturnsTo
-- else we do a normal call to stg_gc_noregs
-- else if it is a canned GC pattern,
-- then generate the continuation and do mkCallReturnsTo
-- else we do a normal call to stg_gc_noregs
altHeapCheck :: [LocalReg] -> FCode a -> FCode a
altHeapCheck regs code = altOrNoEscapeHeapCheck False regs code
altOrNoEscapeHeapCheck :: Bool -> [LocalReg] -> FCode a -> FCode a
altOrNoEscapeHeapCheck checkYield regs code = do
dflags <- getDynFlags
case cannedGCEntryPoint dflags regs of
Nothing -> genericGC checkYield code
Just gc -> do
lret <- newLabelC
let (off, _, copyin) = copyInOflow dflags NativeReturn (Young lret) regs []
lcont <- newLabelC
tscope <- getTickScope
emitOutOfLine lret (copyin <*> mkBranch lcont, tscope)
emitLabel lcont
cannedGCReturnsTo checkYield False gc regs lret off code
altHeapCheckReturnsTo :: [LocalReg] -> Label -> ByteOff -> FCode a -> FCode a
altHeapCheckReturnsTo regs lret off code
= do dflags <- getDynFlags
case cannedGCEntryPoint dflags regs of
Nothing -> genericGC False code
Just gc -> cannedGCReturnsTo False True gc regs lret off code
-- noEscapeHeapCheck is implemented identically to altHeapCheck (which
-- is more efficient), but cannot be optimized away in the non-allocating
-- case because it may occur in a loop
noEscapeHeapCheck :: [LocalReg] -> FCode a -> FCode a
noEscapeHeapCheck regs code = altOrNoEscapeHeapCheck True regs code
cannedGCReturnsTo :: Bool -> Bool -> CmmExpr -> [LocalReg] -> Label -> ByteOff
-> FCode a
-> FCode a
cannedGCReturnsTo checkYield cont_on_stack gc regs lret off code
= do dflags <- getDynFlags
updfr_sz <- getUpdFrameOff
heapCheck False checkYield (gc_call dflags gc updfr_sz) code
where
reg_exprs = map (CmmReg . CmmLocal) regs
-- Note [stg_gc arguments]
-- NB. we use the NativeReturn convention for passing arguments
-- to the canned heap-check routines, because we are in a case
-- alternative and hence the [LocalReg] was passed to us in the
-- NativeReturn convention.
gc_call dflags label sp
| cont_on_stack
= mkJumpReturnsTo dflags label NativeReturn reg_exprs lret off sp
| otherwise
= mkCallReturnsTo dflags label NativeReturn reg_exprs lret off sp []
genericGC :: Bool -> FCode a -> FCode a
genericGC checkYield code
= do updfr_sz <- getUpdFrameOff
lretry <- newLabelC
emitLabel lretry
call <- mkCall generic_gc (GC, GC) [] [] updfr_sz []
heapCheck False checkYield (call <*> mkBranch lretry) code
cannedGCEntryPoint :: DynFlags -> [LocalReg] -> Maybe CmmExpr
cannedGCEntryPoint dflags regs
= case map localRegType regs of
[] -> Just (mkGcLabel "stg_gc_noregs")
[ty]
| isGcPtrType ty -> Just (mkGcLabel "stg_gc_unpt_r1")
| isFloatType ty -> case width of
W32 -> Just (mkGcLabel "stg_gc_f1")
W64 -> Just (mkGcLabel "stg_gc_d1")
_ -> Nothing
| width == wordWidth dflags -> Just (mkGcLabel "stg_gc_unbx_r1")
| width == W64 -> Just (mkGcLabel "stg_gc_l1")
| otherwise -> Nothing
where
width = typeWidth ty
[ty1,ty2]
| isGcPtrType ty1
&& isGcPtrType ty2 -> Just (mkGcLabel "stg_gc_pp")
[ty1,ty2,ty3]
| isGcPtrType ty1
&& isGcPtrType ty2
&& isGcPtrType ty3 -> Just (mkGcLabel "stg_gc_ppp")
[ty1,ty2,ty3,ty4]
| isGcPtrType ty1
&& isGcPtrType ty2
&& isGcPtrType ty3
&& isGcPtrType ty4 -> Just (mkGcLabel "stg_gc_pppp")
_otherwise -> Nothing
-- Note [stg_gc arguments]
-- It might seem that we could avoid passing the arguments to the
-- stg_gc function, because they are already in the right registers.
-- While this is usually the case, it isn't always. Sometimes the
-- code generator has cleverly avoided the eval in a case, e.g. in
-- ffi/should_run/4221.hs we found
--
-- case a_r1mb of z
-- FunPtr x y -> ...
--
-- where a_r1mb is bound a top-level constructor, and is known to be
-- evaluated. The codegen just assigns x, y and z, and continues;
-- R1 is never assigned.
--
-- So we'll have to rely on optimisations to eliminatethese
-- assignments where possible.
-- | The generic GC procedure; no params, no results
generic_gc :: CmmExpr
generic_gc = mkGcLabel "stg_gc_noregs"
-- | Create a CLabel for calling a garbage collector entry point
mkGcLabel :: String -> CmmExpr
mkGcLabel s = CmmLit (CmmLabel (mkCmmCodeLabel rtsPackageKey (fsLit s)))
-------------------------------
heapCheck :: Bool -> Bool -> CmmAGraph -> FCode a -> FCode a
heapCheck checkStack checkYield do_gc code
= getHeapUsage $ \ hpHw ->
-- Emit heap checks, but be sure to do it lazily so
-- that the conditionals on hpHw don't cause a black hole
do { dflags <- getDynFlags
; let mb_alloc_bytes
| hpHw > 0 = Just (mkIntExpr dflags (hpHw * (wORD_SIZE dflags)))
| otherwise = Nothing
stk_hwm | checkStack = Just (CmmLit CmmHighStackMark)
| otherwise = Nothing
; codeOnly $ do_checks stk_hwm checkYield mb_alloc_bytes do_gc
; tickyAllocHeap True hpHw
; setRealHp hpHw
; code }
heapStackCheckGen :: Maybe CmmExpr -> Maybe CmmExpr -> FCode ()
heapStackCheckGen stk_hwm mb_bytes
= do updfr_sz <- getUpdFrameOff
lretry <- newLabelC
emitLabel lretry
call <- mkCall generic_gc (GC, GC) [] [] updfr_sz []
do_checks stk_hwm False mb_bytes (call <*> mkBranch lretry)
-- Note [Single stack check]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~
-- When compiling a function we can determine how much stack space it
-- will use. We therefore need to perform only a single stack check at
-- the beginning of a function to see if we have enough stack space.
--
-- The check boils down to comparing Sp-N with SpLim, where N is the
-- amount of stack space needed (see Note [Stack usage] below). *BUT*
-- at this stage of the pipeline we are not supposed to refer to Sp
-- itself, because the stack is not yet manifest, so we don't quite
-- know where Sp pointing.
-- So instead of referring directly to Sp - as we used to do in the
-- past - the code generator uses (old + 0) in the stack check. That
-- is the address of the first word of the old area, so if we add N
-- we'll get the address of highest used word.
--
-- This makes the check robust. For example, while we need to perform
-- only one stack check for each function, we could in theory place
-- more stack checks later in the function. They would be redundant,
-- but not incorrect (in a sense that they should not change program
-- behaviour). We need to make sure however that a stack check
-- inserted after incrementing the stack pointer checks for a
-- respectively smaller stack space. This would not be the case if the
-- code generator produced direct references to Sp. By referencing
-- (old + 0) we make sure that we always check for a correct amount of
-- stack: when converting (old + 0) to Sp the stack layout phase takes
-- into account changes already made to stack pointer. The idea for
-- this change came from observations made while debugging #8275.
-- Note [Stack usage]
-- ~~~~~~~~~~~~~~~~~~
-- At the moment we convert from STG to Cmm we don't know N, the
-- number of bytes of stack that the function will use, so we use a
-- special late-bound CmmLit, namely
-- CmmHighStackMark
-- to stand for the number of bytes needed. When the stack is made
-- manifest, the number of bytes needed is calculated, and used to
-- replace occurrences of CmmHighStackMark
--
-- The (Maybe CmmExpr) passed to do_checks is usually
-- Just (CmmLit CmmHighStackMark)
-- but can also (in certain hand-written RTS functions)
-- Just (CmmLit 8) or some other fixed valuet
-- If it is Nothing, we don't generate a stack check at all.
do_checks :: Maybe CmmExpr -- Should we check the stack?
-- See Note [Stack usage]
-> Bool -- Should we check for preemption?
-> Maybe CmmExpr -- Heap headroom (bytes)
-> CmmAGraph -- What to do on failure
-> FCode ()
do_checks mb_stk_hwm checkYield mb_alloc_lit do_gc = do
dflags <- getDynFlags
gc_id <- newLabelC
let
Just alloc_lit = mb_alloc_lit
bump_hp = cmmOffsetExprB dflags (CmmReg hpReg) alloc_lit
-- Sp overflow if ((old + 0) - CmmHighStack < SpLim)
-- At the beginning of a function old + 0 = Sp
-- See Note [Single stack check]
sp_oflo sp_hwm =
CmmMachOp (mo_wordULt dflags)
[CmmMachOp (MO_Sub (typeWidth (cmmRegType dflags spReg)))
[CmmStackSlot Old 0, sp_hwm],
CmmReg spLimReg]
-- Hp overflow if (Hp > HpLim)
-- (Hp has been incremented by now)
-- HpLim points to the LAST WORD of valid allocation space.
hp_oflo = CmmMachOp (mo_wordUGt dflags)
[CmmReg hpReg, CmmReg (CmmGlobal HpLim)]
alloc_n = mkAssign (CmmGlobal HpAlloc) alloc_lit
case mb_stk_hwm of
Nothing -> return ()
Just stk_hwm -> tickyStackCheck >> (emit =<< mkCmmIfGoto (sp_oflo stk_hwm) gc_id)
-- Emit new label that might potentially be a header
-- of a self-recursive tail call.
-- See Note [Self-recursive loop header].
self_loop_info <- getSelfLoop
case self_loop_info of
Just (_, loop_header_id, _)
| checkYield && isJust mb_stk_hwm -> emitLabel loop_header_id
_otherwise -> return ()
if (isJust mb_alloc_lit)
then do
tickyHeapCheck
emitAssign hpReg bump_hp
emit =<< mkCmmIfThen hp_oflo (alloc_n <*> mkBranch gc_id)
else do
when (checkYield && not (gopt Opt_OmitYields dflags)) $ do
-- Yielding if HpLim == 0
let yielding = CmmMachOp (mo_wordEq dflags)
[CmmReg (CmmGlobal HpLim),
CmmLit (zeroCLit dflags)]
emit =<< mkCmmIfGoto yielding gc_id
tscope <- getTickScope
emitOutOfLine gc_id
(do_gc, tscope) -- this is expected to jump back somewhere
-- Test for stack pointer exhaustion, then
-- bump heap pointer, and test for heap exhaustion
-- Note that we don't move the heap pointer unless the
-- stack check succeeds. Otherwise we might end up
-- with slop at the end of the current block, which can
-- confuse the LDV profiler.
-- Note [Self-recursive loop header]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Self-recursive loop header is required by loopification optimization (See
-- Note [Self-recursive tail calls] in StgCmmExpr). We emit it if:
--
-- 1. There is information about self-loop in the FCode environment. We don't
-- check the binder (first component of the self_loop_info) because we are
-- certain that if the self-loop info is present then we are compiling the
-- binder body. Reason: the only possible way to get here with the
-- self_loop_info present is from closureCodeBody.
--
-- 2. checkYield && isJust mb_stk_hwm. checkYield tells us that it is possible
-- to preempt the heap check (see #367 for motivation behind this check). It
-- is True for heap checks placed at the entry to a function and
-- let-no-escape heap checks but false for other heap checks (eg. in case
-- alternatives or created from hand-written high-level Cmm). The second
-- check (isJust mb_stk_hwm) is true for heap checks at the entry to a
-- function and some heap checks created in hand-written Cmm. Otherwise it
-- is Nothing. In other words the only situation when both conditions are
-- true is when compiling stack and heap checks at the entry to a
-- function. This is the only situation when we want to emit a self-loop
-- label.
|
TomMD/ghc
|
compiler/codeGen/StgCmmHeap.hs
|
bsd-3-clause
| 25,605 | 0 | 20 | 7,240 | 3,739 | 1,939 | 1,800 | 298 | 8 |
{-
ToDo [Oct 2013]
~~~~~~~~~~~~~~~
1. Nuke ForceSpecConstr for good (it is subsumed by GHC.Types.SPEC in ghc-prim)
2. Nuke NoSpecConstr
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[SpecConstr]{Specialise over constructors}
-}
{-# LANGUAGE CPP #-}
module SpecConstr(
specConstrProgram,
SpecConstrAnnotation(..)
) where
#include "HsVersions.h"
import GhcPrelude
import CoreSyn
import CoreSubst
import CoreUtils
import CoreUnfold ( couldBeSmallEnoughToInline )
import CoreFVs ( exprsFreeVarsList )
import CoreMonad
import Literal ( litIsLifted )
import HscTypes ( ModGuts(..) )
import WwLib ( isWorkerSmallEnough, mkWorkerArgs )
import DataCon
import Coercion hiding( substCo )
import Rules
import Type hiding ( substTy )
import TyCon ( tyConName )
import Id
import PprCore ( pprParendExpr )
import MkCore ( mkImpossibleExpr )
import Var
import VarEnv
import VarSet
import Name
import BasicTypes
import DynFlags ( DynFlags(..), GeneralFlag( Opt_SpecConstrKeen )
, gopt, hasPprDebug )
import Maybes ( orElse, catMaybes, isJust, isNothing )
import Demand
import GHC.Serialized ( deserializeWithData )
import Util
import Pair
import UniqSupply
import Outputable
import FastString
import UniqFM
import MonadUtils
import Control.Monad ( zipWithM )
import Data.List
import PrelNames ( specTyConName )
import Module
import TyCon ( TyCon )
import GHC.Exts( SpecConstrAnnotation(..) )
import Data.Ord( comparing )
{-
-----------------------------------------------------
Game plan
-----------------------------------------------------
Consider
drop n [] = []
drop 0 xs = []
drop n (x:xs) = drop (n-1) xs
After the first time round, we could pass n unboxed. This happens in
numerical code too. Here's what it looks like in Core:
drop n xs = case xs of
[] -> []
(y:ys) -> case n of
I# n# -> case n# of
0 -> []
_ -> drop (I# (n# -# 1#)) xs
Notice that the recursive call has an explicit constructor as argument.
Noticing this, we can make a specialised version of drop
RULE: drop (I# n#) xs ==> drop' n# xs
drop' n# xs = let n = I# n# in ...orig RHS...
Now the simplifier will apply the specialisation in the rhs of drop', giving
drop' n# xs = case xs of
[] -> []
(y:ys) -> case n# of
0 -> []
_ -> drop' (n# -# 1#) xs
Much better!
We'd also like to catch cases where a parameter is carried along unchanged,
but evaluated each time round the loop:
f i n = if i>0 || i>n then i else f (i*2) n
Here f isn't strict in n, but we'd like to avoid evaluating it each iteration.
In Core, by the time we've w/wd (f is strict in i) we get
f i# n = case i# ># 0 of
False -> I# i#
True -> case n of { I# n# ->
case i# ># n# of
False -> I# i#
True -> f (i# *# 2#) n
At the call to f, we see that the argument, n is known to be (I# n#),
and n is evaluated elsewhere in the body of f, so we can play the same
trick as above.
Note [Reboxing]
~~~~~~~~~~~~~~~
We must be careful not to allocate the same constructor twice. Consider
f p = (...(case p of (a,b) -> e)...p...,
...let t = (r,s) in ...t...(f t)...)
At the recursive call to f, we can see that t is a pair. But we do NOT want
to make a specialised copy:
f' a b = let p = (a,b) in (..., ...)
because now t is allocated by the caller, then r and s are passed to the
recursive call, which allocates the (r,s) pair again.
This happens if
(a) the argument p is used in other than a case-scrutinisation way.
(b) the argument to the call is not a 'fresh' tuple; you have to
look into its unfolding to see that it's a tuple
Hence the "OR" part of Note [Good arguments] below.
ALTERNATIVE 2: pass both boxed and unboxed versions. This no longer saves
allocation, but does perhaps save evals. In the RULE we'd have
something like
f (I# x#) = f' (I# x#) x#
If at the call site the (I# x) was an unfolding, then we'd have to
rely on CSE to eliminate the duplicate allocation.... This alternative
doesn't look attractive enough to pursue.
ALTERNATIVE 3: ignore the reboxing problem. The trouble is that
the conservative reboxing story prevents many useful functions from being
specialised. Example:
foo :: Maybe Int -> Int -> Int
foo (Just m) 0 = 0
foo x@(Just m) n = foo x (n-m)
Here the use of 'x' will clearly not require boxing in the specialised function.
The strictness analyser has the same problem, in fact. Example:
f p@(a,b) = ...
If we pass just 'a' and 'b' to the worker, it might need to rebox the
pair to create (a,b). A more sophisticated analysis might figure out
precisely the cases in which this could happen, but the strictness
analyser does no such analysis; it just passes 'a' and 'b', and hopes
for the best.
So my current choice is to make SpecConstr similarly aggressive, and
ignore the bad potential of reboxing.
Note [Good arguments]
~~~~~~~~~~~~~~~~~~~~~
So we look for
* A self-recursive function. Ignore mutual recursion for now,
because it's less common, and the code is simpler for self-recursion.
* EITHER
a) At a recursive call, one or more parameters is an explicit
constructor application
AND
That same parameter is scrutinised by a case somewhere in
the RHS of the function
OR
b) At a recursive call, one or more parameters has an unfolding
that is an explicit constructor application
AND
That same parameter is scrutinised by a case somewhere in
the RHS of the function
AND
Those are the only uses of the parameter (see Note [Reboxing])
What to abstract over
~~~~~~~~~~~~~~~~~~~~~
There's a bit of a complication with type arguments. If the call
site looks like
f p = ...f ((:) [a] x xs)...
then our specialised function look like
f_spec x xs = let p = (:) [a] x xs in ....as before....
This only makes sense if either
a) the type variable 'a' is in scope at the top of f, or
b) the type variable 'a' is an argument to f (and hence fs)
Actually, (a) may hold for value arguments too, in which case
we may not want to pass them. Suppose 'x' is in scope at f's
defn, but xs is not. Then we'd like
f_spec xs = let p = (:) [a] x xs in ....as before....
Similarly (b) may hold too. If x is already an argument at the
call, no need to pass it again.
Finally, if 'a' is not in scope at the call site, we could abstract
it as we do the term variables:
f_spec a x xs = let p = (:) [a] x xs in ...as before...
So the grand plan is:
* abstract the call site to a constructor-only pattern
e.g. C x (D (f p) (g q)) ==> C s1 (D s2 s3)
* Find the free variables of the abstracted pattern
* Pass these variables, less any that are in scope at
the fn defn. But see Note [Shadowing] below.
NOTICE that we only abstract over variables that are not in scope,
so we're in no danger of shadowing variables used in "higher up"
in f_spec's RHS.
Note [Shadowing]
~~~~~~~~~~~~~~~~
In this pass we gather up usage information that may mention variables
that are bound between the usage site and the definition site; or (more
seriously) may be bound to something different at the definition site.
For example:
f x = letrec g y v = let x = ...
in ...(g (a,b) x)...
Since 'x' is in scope at the call site, we may make a rewrite rule that
looks like
RULE forall a,b. g (a,b) x = ...
But this rule will never match, because it's really a different 'x' at
the call site -- and that difference will be manifest by the time the
simplifier gets to it. [A worry: the simplifier doesn't *guarantee*
no-shadowing, so perhaps it may not be distinct?]
Anyway, the rule isn't actually wrong, it's just not useful. One possibility
is to run deShadowBinds before running SpecConstr, but instead we run the
simplifier. That gives the simplest possible program for SpecConstr to
chew on; and it virtually guarantees no shadowing.
Note [Specialising for constant parameters]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This one is about specialising on a *constant* (but not necessarily
constructor) argument
foo :: Int -> (Int -> Int) -> Int
foo 0 f = 0
foo m f = foo (f m) (+1)
It produces
lvl_rmV :: GHC.Base.Int -> GHC.Base.Int
lvl_rmV =
\ (ds_dlk :: GHC.Base.Int) ->
case ds_dlk of wild_alH { GHC.Base.I# x_alG ->
GHC.Base.I# (GHC.Prim.+# x_alG 1)
T.$wfoo :: GHC.Prim.Int# -> (GHC.Base.Int -> GHC.Base.Int) ->
GHC.Prim.Int#
T.$wfoo =
\ (ww_sme :: GHC.Prim.Int#) (w_smg :: GHC.Base.Int -> GHC.Base.Int) ->
case ww_sme of ds_Xlw {
__DEFAULT ->
case w_smg (GHC.Base.I# ds_Xlw) of w1_Xmo { GHC.Base.I# ww1_Xmz ->
T.$wfoo ww1_Xmz lvl_rmV
};
0 -> 0
}
The recursive call has lvl_rmV as its argument, so we could create a specialised copy
with that argument baked in; that is, not passed at all. Now it can perhaps be inlined.
When is this worth it? Call the constant 'lvl'
- If 'lvl' has an unfolding that is a constructor, see if the corresponding
parameter is scrutinised anywhere in the body.
- If 'lvl' has an unfolding that is a inlinable function, see if the corresponding
parameter is applied (...to enough arguments...?)
Also do this is if the function has RULES?
Also
Note [Specialising for lambda parameters]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
foo :: Int -> (Int -> Int) -> Int
foo 0 f = 0
foo m f = foo (f m) (\n -> n-m)
This is subtly different from the previous one in that we get an
explicit lambda as the argument:
T.$wfoo :: GHC.Prim.Int# -> (GHC.Base.Int -> GHC.Base.Int) ->
GHC.Prim.Int#
T.$wfoo =
\ (ww_sm8 :: GHC.Prim.Int#) (w_sma :: GHC.Base.Int -> GHC.Base.Int) ->
case ww_sm8 of ds_Xlr {
__DEFAULT ->
case w_sma (GHC.Base.I# ds_Xlr) of w1_Xmf { GHC.Base.I# ww1_Xmq ->
T.$wfoo
ww1_Xmq
(\ (n_ad3 :: GHC.Base.Int) ->
case n_ad3 of wild_alB { GHC.Base.I# x_alA ->
GHC.Base.I# (GHC.Prim.-# x_alA ds_Xlr)
})
};
0 -> 0
}
I wonder if SpecConstr couldn't be extended to handle this? After all,
lambda is a sort of constructor for functions and perhaps it already
has most of the necessary machinery?
Furthermore, there's an immediate win, because you don't need to allocate the lambda
at the call site; and if perchance it's called in the recursive call, then you
may avoid allocating it altogether. Just like for constructors.
Looks cool, but probably rare...but it might be easy to implement.
Note [SpecConstr for casts]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data family T a :: *
data instance T Int = T Int
foo n = ...
where
go (T 0) = 0
go (T n) = go (T (n-1))
The recursive call ends up looking like
go (T (I# ...) `cast` g)
So we want to spot the constructor application inside the cast.
That's why we have the Cast case in argToPat
Note [Local recursive groups]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For a *local* recursive group, we can see all the calls to the
function, so we seed the specialisation loop from the calls in the
body, not from the calls in the RHS. Consider:
bar m n = foo n (n,n) (n,n) (n,n) (n,n)
where
foo n p q r s
| n == 0 = m
| n > 3000 = case p of { (p1,p2) -> foo (n-1) (p2,p1) q r s }
| n > 2000 = case q of { (q1,q2) -> foo (n-1) p (q2,q1) r s }
| n > 1000 = case r of { (r1,r2) -> foo (n-1) p q (r2,r1) s }
| otherwise = case s of { (s1,s2) -> foo (n-1) p q r (s2,s1) }
If we start with the RHSs of 'foo', we get lots and lots of specialisations,
most of which are not needed. But if we start with the (single) call
in the rhs of 'bar' we get exactly one fully-specialised copy, and all
the recursive calls go to this fully-specialised copy. Indeed, the original
function is later collected as dead code. This is very important in
specialising the loops arising from stream fusion, for example in NDP where
we were getting literally hundreds of (mostly unused) specialisations of
a local function.
In a case like the above we end up never calling the original un-specialised
function. (Although we still leave its code around just in case.)
However, if we find any boring calls in the body, including *unsaturated*
ones, such as
letrec foo x y = ....foo...
in map foo xs
then we will end up calling the un-specialised function, so then we *should*
use the calls in the un-specialised RHS as seeds. We call these
"boring call patterns", and callsToPats reports if it finds any of these.
Note [Seeding top-level recursive groups]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This seeding is done in the binding for seed_calls in specRec.
1. If all the bindings in a top-level recursive group are local (not
exported), then all the calls are in the rest of the top-level
bindings. This means we can specialise with those call patterns
ONLY, and NOT with the RHSs of the recursive group (exactly like
Note [Local recursive groups])
2. But if any of the bindings are exported, the function may be called
with any old arguments, so (for lack of anything better) we specialise
based on
(a) the call patterns in the RHS
(b) the call patterns in the rest of the top-level bindings
NB: before Apr 15 we used (a) only, but Dimitrios had an example
where (b) was crucial, so I added that.
Adding (b) also improved nofib allocation results:
multiplier: 4% better
minimax: 2.8% better
Actually in case (2), instead of using the calls from the RHS, it
would be better to specialise in the importing module. We'd need to
add an INLINABLE pragma to the function, and then it can be
specialised in the importing scope, just as is done for type classes
in Specialise.specImports. This remains to be done (#10346).
Note [Top-level recursive groups]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To get the call usage information from "the rest of the top level
bindings" (c.f. Note [Seeding top-level recursive groups]), we work
backwards through the top-level bindings so we see the usage before we
get to the binding of the function. Before we can collect the usage
though, we go through all the bindings and add them to the
environment. This is necessary because usage is only tracked for
functions in the environment. These two passes are called
'go' and 'goEnv'
in specConstrProgram. (Looks a bit revolting to me.)
Note [Do not specialise diverging functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Specialising a function that just diverges is a waste of code.
Furthermore, it broke GHC (simpl014) thus:
{-# STR Sb #-}
f = \x. case x of (a,b) -> f x
If we specialise f we get
f = \x. case x of (a,b) -> fspec a b
But fspec doesn't have decent strictness info. As it happened,
(f x) :: IO t, so the state hack applied and we eta expanded fspec,
and hence f. But now f's strictness is less than its arity, which
breaks an invariant.
Note [Forcing specialisation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
With stream fusion and in other similar cases, we want to fully
specialise some (but not necessarily all!) loops regardless of their
size and the number of specialisations.
We allow a library to do this, in one of two ways (one which is
deprecated):
1) Add a parameter of type GHC.Types.SPEC (from ghc-prim) to the loop body.
2) (Deprecated) Annotate a type with ForceSpecConstr from GHC.Exts,
and then add *that* type as a parameter to the loop body
The reason #2 is deprecated is because it requires GHCi, which isn't
available for things like a cross compiler using stage1.
Here's a (simplified) example from the `vector` package. You may bring
the special 'force specialization' type into scope by saying:
import GHC.Types (SPEC(..))
or by defining your own type (again, deprecated):
data SPEC = SPEC | SPEC2
{-# ANN type SPEC ForceSpecConstr #-}
(Note this is the exact same definition of GHC.Types.SPEC, just
without the annotation.)
After that, you say:
foldl :: (a -> b -> a) -> a -> Stream b -> a
{-# INLINE foldl #-}
foldl f z (Stream step s _) = foldl_loop SPEC z s
where
foldl_loop !sPEC z s = case step s of
Yield x s' -> foldl_loop sPEC (f z x) s'
Skip -> foldl_loop sPEC z s'
Done -> z
SpecConstr will spot the SPEC parameter and always fully specialise
foldl_loop. Note that
* We have to prevent the SPEC argument from being removed by
w/w which is why (a) SPEC is a sum type, and (b) we have to seq on
the SPEC argument.
* And lastly, the SPEC argument is ultimately eliminated by
SpecConstr itself so there is no runtime overhead.
This is all quite ugly; we ought to come up with a better design.
ForceSpecConstr arguments are spotted in scExpr' and scTopBinds which then set
sc_force to True when calling specLoop. This flag does four things:
* Ignore specConstrThreshold, to specialise functions of arbitrary size
(see scTopBind)
* Ignore specConstrCount, to make arbitrary numbers of specialisations
(see specialise)
* Specialise even for arguments that are not scrutinised in the loop
(see argToPat; Trac #4488)
* Only specialise on recursive types a finite number of times
(see is_too_recursive; Trac #5550; Note [Limit recursive specialisation])
The flag holds only for specialising a single binding group, and NOT
for nested bindings. (So really it should be passed around explicitly
and not stored in ScEnv.) Trac #14379 turned out to be caused by
f SPEC x = let g1 x = ...
in ...
We force-specialise f (because of the SPEC), but that generates a specialised
copy of g1 (as well as the original). Alas g1 has a nested binding g2; and
in each copy of g1 we get an unspecialised and specialised copy of g2; and so
on. Result, exponential. So the force-spec flag now only applies to one
level of bindings at a time.
Mechanism for this one-level-only thing:
- Switch it on at the call to specRec, in scExpr and scTopBinds
- Switch it off when doing the RHSs;
this can be done very conveniently in decreaseSpecCount
What alternatives did I consider?
* Annotating the loop itself doesn't work because (a) it is local and
(b) it will be w/w'ed and having w/w propagating annotations somehow
doesn't seem like a good idea. The types of the loop arguments
really seem to be the most persistent thing.
* Annotating the types that make up the loop state doesn't work,
either, because (a) it would prevent us from using types like Either
or tuples here, (b) we don't want to restrict the set of types that
can be used in Stream states and (c) some types are fixed by the
user (e.g., the accumulator here) but we still want to specialise as
much as possible.
Alternatives to ForceSpecConstr
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Instead of giving the loop an extra argument of type SPEC, we
also considered *wrapping* arguments in SPEC, thus
data SPEC a = SPEC a | SPEC2
loop = \arg -> case arg of
SPEC state ->
case state of (x,y) -> ... loop (SPEC (x',y')) ...
S2 -> error ...
The idea is that a SPEC argument says "specialise this argument
regardless of whether the function case-analyses it". But this
doesn't work well:
* SPEC must still be a sum type, else the strictness analyser
eliminates it
* But that means that 'loop' won't be strict in its real payload
This loss of strictness in turn screws up specialisation, because
we may end up with calls like
loop (SPEC (case z of (p,q) -> (q,p)))
Without the SPEC, if 'loop' were strict, the case would move out
and we'd see loop applied to a pair. But if 'loop' isn't strict
this doesn't look like a specialisable call.
Note [Limit recursive specialisation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is possible for ForceSpecConstr to cause an infinite loop of specialisation.
Because there is no limit on the number of specialisations, a recursive call with
a recursive constructor as an argument (for example, list cons) will generate
a specialisation for that constructor. If the resulting specialisation also
contains a recursive call with the constructor, this could proceed indefinitely.
For example, if ForceSpecConstr is on:
loop :: [Int] -> [Int] -> [Int]
loop z [] = z
loop z (x:xs) = loop (x:z) xs
this example will create a specialisation for the pattern
loop (a:b) c = loop' a b c
loop' a b [] = (a:b)
loop' a b (x:xs) = loop (x:(a:b)) xs
and a new pattern is found:
loop (a:(b:c)) d = loop'' a b c d
which can continue indefinitely.
Roman's suggestion to fix this was to stop after a couple of times on recursive types,
but still specialising on non-recursive types as much as possible.
To implement this, we count the number of times we have gone round the
"specialise recursively" loop ('go' in 'specRec'). Once have gone round
more than N times (controlled by -fspec-constr-recursive=N) we check
- If sc_force is off, and sc_count is (Just max) then we don't
need to do anything: trim_pats will limit the number of specs
- Otherwise check if any function has now got more than (sc_count env)
specialisations. If sc_count is "no limit" then we arbitrarily
choose 10 as the limit (ugh).
See Trac #5550. Also Trac #13623, where this test had become over-aggressive,
and we lost a wonderful specialisation that we really wanted!
Note [NoSpecConstr]
~~~~~~~~~~~~~~~~~~~
The ignoreDataCon stuff allows you to say
{-# ANN type T NoSpecConstr #-}
to mean "don't specialise on arguments of this type". It was added
before we had ForceSpecConstr. Lacking ForceSpecConstr we specialised
regardless of size; and then we needed a way to turn that *off*. Now
that we have ForceSpecConstr, this NoSpecConstr is probably redundant.
(Used only for PArray.)
-----------------------------------------------------
Stuff not yet handled
-----------------------------------------------------
Here are notes arising from Roman's work that I don't want to lose.
Example 1
~~~~~~~~~
data T a = T !a
foo :: Int -> T Int -> Int
foo 0 t = 0
foo x t | even x = case t of { T n -> foo (x-n) t }
| otherwise = foo (x-1) t
SpecConstr does no specialisation, because the second recursive call
looks like a boxed use of the argument. A pity.
$wfoo_sFw :: GHC.Prim.Int# -> T.T GHC.Base.Int -> GHC.Prim.Int#
$wfoo_sFw =
\ (ww_sFo [Just L] :: GHC.Prim.Int#) (w_sFq [Just L] :: T.T GHC.Base.Int) ->
case ww_sFo of ds_Xw6 [Just L] {
__DEFAULT ->
case GHC.Prim.remInt# ds_Xw6 2 of wild1_aEF [Dead Just A] {
__DEFAULT -> $wfoo_sFw (GHC.Prim.-# ds_Xw6 1) w_sFq;
0 ->
case w_sFq of wild_Xy [Just L] { T.T n_ad5 [Just U(L)] ->
case n_ad5 of wild1_aET [Just A] { GHC.Base.I# y_aES [Just L] ->
$wfoo_sFw (GHC.Prim.-# ds_Xw6 y_aES) wild_Xy
} } };
0 -> 0
Example 2
~~~~~~~~~
data a :*: b = !a :*: !b
data T a = T !a
foo :: (Int :*: T Int) -> Int
foo (0 :*: t) = 0
foo (x :*: t) | even x = case t of { T n -> foo ((x-n) :*: t) }
| otherwise = foo ((x-1) :*: t)
Very similar to the previous one, except that the parameters are now in
a strict tuple. Before SpecConstr, we have
$wfoo_sG3 :: GHC.Prim.Int# -> T.T GHC.Base.Int -> GHC.Prim.Int#
$wfoo_sG3 =
\ (ww_sFU [Just L] :: GHC.Prim.Int#) (ww_sFW [Just L] :: T.T
GHC.Base.Int) ->
case ww_sFU of ds_Xws [Just L] {
__DEFAULT ->
case GHC.Prim.remInt# ds_Xws 2 of wild1_aEZ [Dead Just A] {
__DEFAULT ->
case ww_sFW of tpl_B2 [Just L] { T.T a_sFo [Just A] ->
$wfoo_sG3 (GHC.Prim.-# ds_Xws 1) tpl_B2 -- $wfoo1
};
0 ->
case ww_sFW of wild_XB [Just A] { T.T n_ad7 [Just S(L)] ->
case n_ad7 of wild1_aFd [Just L] { GHC.Base.I# y_aFc [Just L] ->
$wfoo_sG3 (GHC.Prim.-# ds_Xws y_aFc) wild_XB -- $wfoo2
} } };
0 -> 0 }
We get two specialisations:
"SC:$wfoo1" [0] __forall {a_sFB :: GHC.Base.Int sc_sGC :: GHC.Prim.Int#}
Foo.$wfoo sc_sGC (Foo.T @ GHC.Base.Int a_sFB)
= Foo.$s$wfoo1 a_sFB sc_sGC ;
"SC:$wfoo2" [0] __forall {y_aFp :: GHC.Prim.Int# sc_sGC :: GHC.Prim.Int#}
Foo.$wfoo sc_sGC (Foo.T @ GHC.Base.Int (GHC.Base.I# y_aFp))
= Foo.$s$wfoo y_aFp sc_sGC ;
But perhaps the first one isn't good. After all, we know that tpl_B2 is
a T (I# x) really, because T is strict and Int has one constructor. (We can't
unbox the strict fields, because T is polymorphic!)
************************************************************************
* *
\subsection{Top level wrapper stuff}
* *
************************************************************************
-}
specConstrProgram :: ModGuts -> CoreM ModGuts
specConstrProgram guts
= do
dflags <- getDynFlags
us <- getUniqueSupplyM
annos <- getFirstAnnotations deserializeWithData guts
this_mod <- getModule
let binds' = reverse $ fst $ initUs us $ do
-- Note [Top-level recursive groups]
(env, binds) <- goEnv (initScEnv dflags this_mod annos)
(mg_binds guts)
-- binds is identical to (mg_binds guts), except that the
-- binders on the LHS have been replaced by extendBndr
-- (SPJ this seems like overkill; I don't think the binders
-- will change at all; and we don't substitute in the RHSs anyway!!)
go env nullUsage (reverse binds)
return (guts { mg_binds = binds' })
where
-- See Note [Top-level recursive groups]
goEnv env [] = return (env, [])
goEnv env (bind:binds) = do (env', bind') <- scTopBindEnv env bind
(env'', binds') <- goEnv env' binds
return (env'', bind' : binds')
-- Arg list of bindings is in reverse order
go _ _ [] = return []
go env usg (bind:binds) = do (usg', bind') <- scTopBind env usg bind
binds' <- go env usg' binds
return (bind' : binds')
{-
************************************************************************
* *
\subsection{Environment: goes downwards}
* *
************************************************************************
Note [Work-free values only in environment]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The sc_vals field keeps track of in-scope value bindings, so
that if we come across (case x of Just y ->...) we can reduce the
case from knowing that x is bound to a pair.
But only *work-free* values are ok here. For example if the envt had
x -> Just (expensive v)
then we do NOT want to expand to
let y = expensive v in ...
because the x-binding still exists and we've now duplicated (expensive v).
This seldom happens because let-bound constructor applications are
ANF-ised, but it can happen as a result of on-the-fly transformations in
SpecConstr itself. Here is Trac #7865:
let {
a'_shr =
case xs_af8 of _ {
[] -> acc_af6;
: ds_dgt [Dmd=<L,A>] ds_dgu [Dmd=<L,A>] ->
(expensive x_af7, x_af7
} } in
let {
ds_sht =
case a'_shr of _ { (p'_afd, q'_afe) ->
TSpecConstr_DoubleInline.recursive
(GHC.Types.: @ GHC.Types.Int x_af7 wild_X6) (q'_afe, p'_afd)
} } in
When processed knowing that xs_af8 was bound to a cons, we simplify to
a'_shr = (expensive x_af7, x_af7)
and we do NOT want to inline that at the occurrence of a'_shr in ds_sht.
(There are other occurrences of a'_shr.) No no no.
It would be possible to do some on-the-fly ANF-ising, so that a'_shr turned
into a work-free value again, thus
a1 = expensive x_af7
a'_shr = (a1, x_af7)
but that's more work, so until its shown to be important I'm going to
leave it for now.
Note [Making SpecConstr keener]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this, in (perf/should_run/T9339)
last (filter odd [1..1000])
After optimisation, including SpecConstr, we get:
f :: Int# -> Int -> Int
f x y = case case remInt# x 2# of
__DEFAULT -> case x of
__DEFAULT -> f (+# wild_Xp 1#) (I# x)
1000000# -> ...
0# -> case x of
__DEFAULT -> f (+# wild_Xp 1#) y
1000000# -> y
Not good! We build an (I# x) box every time around the loop.
SpecConstr (as described in the paper) does not specialise f, despite
the call (f ... (I# x)) because 'y' is not scrutinied in the body.
But it is much better to specialise f for the case where the argument
is of form (I# x); then we build the box only when returning y, which
is on the cold path.
Another example:
f x = ...(g x)....
Here 'x' is not scrutinised in f's body; but if we did specialise 'f'
then the call (g x) might allow 'g' to be specialised in turn.
So sc_keen controls whether or not we take account of whether argument is
scrutinised in the body. True <=> ignore that, and speicalise whenever
the function is applied to a data constructor.
-}
data ScEnv = SCE { sc_dflags :: DynFlags,
sc_module :: !Module,
sc_size :: Maybe Int, -- Size threshold
-- Nothing => no limit
sc_count :: Maybe Int, -- Max # of specialisations for any one fn
-- Nothing => no limit
-- See Note [Avoiding exponential blowup]
sc_recursive :: Int, -- Max # of specialisations over recursive type.
-- Stops ForceSpecConstr from diverging.
sc_keen :: Bool, -- Specialise on arguments that are known
-- constructors, even if they are not
-- scrutinised in the body. See
-- Note [Making SpecConstr keener]
sc_force :: Bool, -- Force specialisation?
-- See Note [Forcing specialisation]
sc_subst :: Subst, -- Current substitution
-- Maps InIds to OutExprs
sc_how_bound :: HowBoundEnv,
-- Binds interesting non-top-level variables
-- Domain is OutVars (*after* applying the substitution)
sc_vals :: ValueEnv,
-- Domain is OutIds (*after* applying the substitution)
-- Used even for top-level bindings (but not imported ones)
-- The range of the ValueEnv is *work-free* values
-- such as (\x. blah), or (Just v)
-- but NOT (Just (expensive v))
-- See Note [Work-free values only in environment]
sc_annotations :: UniqFM SpecConstrAnnotation
}
---------------------
type HowBoundEnv = VarEnv HowBound -- Domain is OutVars
---------------------
type ValueEnv = IdEnv Value -- Domain is OutIds
data Value = ConVal AltCon [CoreArg] -- _Saturated_ constructors
-- The AltCon is never DEFAULT
| LambdaVal -- Inlinable lambdas or PAPs
instance Outputable Value where
ppr (ConVal con args) = ppr con <+> interpp'SP args
ppr LambdaVal = text "<Lambda>"
---------------------
initScEnv :: DynFlags -> Module -> UniqFM SpecConstrAnnotation -> ScEnv
initScEnv dflags this_mod anns
= SCE { sc_dflags = dflags,
sc_module = this_mod,
sc_size = specConstrThreshold dflags,
sc_count = specConstrCount dflags,
sc_recursive = specConstrRecursive dflags,
sc_keen = gopt Opt_SpecConstrKeen dflags,
sc_force = False,
sc_subst = emptySubst,
sc_how_bound = emptyVarEnv,
sc_vals = emptyVarEnv,
sc_annotations = anns }
data HowBound = RecFun -- These are the recursive functions for which
-- we seek interesting call patterns
| RecArg -- These are those functions' arguments, or their sub-components;
-- we gather occurrence information for these
instance Outputable HowBound where
ppr RecFun = text "RecFun"
ppr RecArg = text "RecArg"
scForce :: ScEnv -> Bool -> ScEnv
scForce env b = env { sc_force = b }
lookupHowBound :: ScEnv -> Id -> Maybe HowBound
lookupHowBound env id = lookupVarEnv (sc_how_bound env) id
scSubstId :: ScEnv -> Id -> CoreExpr
scSubstId env v = lookupIdSubst (text "scSubstId") (sc_subst env) v
scSubstTy :: ScEnv -> Type -> Type
scSubstTy env ty = substTy (sc_subst env) ty
scSubstCo :: ScEnv -> Coercion -> Coercion
scSubstCo env co = substCo (sc_subst env) co
zapScSubst :: ScEnv -> ScEnv
zapScSubst env = env { sc_subst = zapSubstEnv (sc_subst env) }
extendScInScope :: ScEnv -> [Var] -> ScEnv
-- Bring the quantified variables into scope
extendScInScope env qvars = env { sc_subst = extendInScopeList (sc_subst env) qvars }
-- Extend the substitution
extendScSubst :: ScEnv -> Var -> OutExpr -> ScEnv
extendScSubst env var expr = env { sc_subst = extendSubst (sc_subst env) var expr }
extendScSubstList :: ScEnv -> [(Var,OutExpr)] -> ScEnv
extendScSubstList env prs = env { sc_subst = extendSubstList (sc_subst env) prs }
extendHowBound :: ScEnv -> [Var] -> HowBound -> ScEnv
extendHowBound env bndrs how_bound
= env { sc_how_bound = extendVarEnvList (sc_how_bound env)
[(bndr,how_bound) | bndr <- bndrs] }
extendBndrsWith :: HowBound -> ScEnv -> [Var] -> (ScEnv, [Var])
extendBndrsWith how_bound env bndrs
= (env { sc_subst = subst', sc_how_bound = hb_env' }, bndrs')
where
(subst', bndrs') = substBndrs (sc_subst env) bndrs
hb_env' = sc_how_bound env `extendVarEnvList`
[(bndr,how_bound) | bndr <- bndrs']
extendBndrWith :: HowBound -> ScEnv -> Var -> (ScEnv, Var)
extendBndrWith how_bound env bndr
= (env { sc_subst = subst', sc_how_bound = hb_env' }, bndr')
where
(subst', bndr') = substBndr (sc_subst env) bndr
hb_env' = extendVarEnv (sc_how_bound env) bndr' how_bound
extendRecBndrs :: ScEnv -> [Var] -> (ScEnv, [Var])
extendRecBndrs env bndrs = (env { sc_subst = subst' }, bndrs')
where
(subst', bndrs') = substRecBndrs (sc_subst env) bndrs
extendBndr :: ScEnv -> Var -> (ScEnv, Var)
extendBndr env bndr = (env { sc_subst = subst' }, bndr')
where
(subst', bndr') = substBndr (sc_subst env) bndr
extendValEnv :: ScEnv -> Id -> Maybe Value -> ScEnv
extendValEnv env _ Nothing = env
extendValEnv env id (Just cv)
| valueIsWorkFree cv -- Don't duplicate work!! Trac #7865
= env { sc_vals = extendVarEnv (sc_vals env) id cv }
extendValEnv env _ _ = env
extendCaseBndrs :: ScEnv -> OutExpr -> OutId -> AltCon -> [Var] -> (ScEnv, [Var])
-- When we encounter
-- case scrut of b
-- C x y -> ...
-- we want to bind b, to (C x y)
-- NB1: Extends only the sc_vals part of the envt
-- NB2: Kill the dead-ness info on the pattern binders x,y, since
-- they are potentially made alive by the [b -> C x y] binding
extendCaseBndrs env scrut case_bndr con alt_bndrs
= (env2, alt_bndrs')
where
live_case_bndr = not (isDeadBinder case_bndr)
env1 | Var v <- stripTicksTopE (const True) scrut
= extendValEnv env v cval
| otherwise = env -- See Note [Add scrutinee to ValueEnv too]
env2 | live_case_bndr = extendValEnv env1 case_bndr cval
| otherwise = env1
alt_bndrs' | case scrut of { Var {} -> True; _ -> live_case_bndr }
= map zap alt_bndrs
| otherwise
= alt_bndrs
cval = case con of
DEFAULT -> Nothing
LitAlt {} -> Just (ConVal con [])
DataAlt {} -> Just (ConVal con vanilla_args)
where
vanilla_args = map Type (tyConAppArgs (idType case_bndr)) ++
varsToCoreExprs alt_bndrs
zap v | isTyVar v = v -- See NB2 above
| otherwise = zapIdOccInfo v
decreaseSpecCount :: ScEnv -> Int -> ScEnv
-- See Note [Avoiding exponential blowup]
decreaseSpecCount env n_specs
= env { sc_force = False -- See Note [Forcing specialisation]
, sc_count = case sc_count env of
Nothing -> Nothing
Just n -> Just (n `div` (n_specs + 1)) }
-- The "+1" takes account of the original function;
-- See Note [Avoiding exponential blowup]
---------------------------------------------------
-- See Note [Forcing specialisation]
ignoreType :: ScEnv -> Type -> Bool
ignoreDataCon :: ScEnv -> DataCon -> Bool
forceSpecBndr :: ScEnv -> Var -> Bool
ignoreDataCon env dc = ignoreTyCon env (dataConTyCon dc)
ignoreType env ty
= case tyConAppTyCon_maybe ty of
Just tycon -> ignoreTyCon env tycon
_ -> False
ignoreTyCon :: ScEnv -> TyCon -> Bool
ignoreTyCon env tycon
= lookupUFM (sc_annotations env) tycon == Just NoSpecConstr
forceSpecBndr env var = forceSpecFunTy env . snd . splitForAllTys . varType $ var
forceSpecFunTy :: ScEnv -> Type -> Bool
forceSpecFunTy env = any (forceSpecArgTy env) . fst . splitFunTys
forceSpecArgTy :: ScEnv -> Type -> Bool
forceSpecArgTy env ty
| Just ty' <- coreView ty = forceSpecArgTy env ty'
forceSpecArgTy env ty
| Just (tycon, tys) <- splitTyConApp_maybe ty
, tycon /= funTyCon
= tyConName tycon == specTyConName
|| lookupUFM (sc_annotations env) tycon == Just ForceSpecConstr
|| any (forceSpecArgTy env) tys
forceSpecArgTy _ _ = False
{-
Note [Add scrutinee to ValueEnv too]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this:
case x of y
(a,b) -> case b of c
I# v -> ...(f y)...
By the time we get to the call (f y), the ValueEnv
will have a binding for y, and for c
y -> (a,b)
c -> I# v
BUT that's not enough! Looking at the call (f y) we
see that y is pair (a,b), but we also need to know what 'b' is.
So in extendCaseBndrs we must *also* add the binding
b -> I# v
else we lose a useful specialisation for f. This is necessary even
though the simplifier has systematically replaced uses of 'x' with 'y'
and 'b' with 'c' in the code. The use of 'b' in the ValueEnv came
from outside the case. See Trac #4908 for the live example.
Note [Avoiding exponential blowup]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The sc_count field of the ScEnv says how many times we are prepared to
duplicate a single function. But we must take care with recursive
specialisations. Consider
let $j1 = let $j2 = let $j3 = ...
in
...$j3...
in
...$j2...
in
...$j1...
If we specialise $j1 then in each specialisation (as well as the original)
we can specialise $j2, and similarly $j3. Even if we make just *one*
specialisation of each, because we also have the original we'll get 2^n
copies of $j3, which is not good.
So when recursively specialising we divide the sc_count by the number of
copies we are making at this level, including the original.
************************************************************************
* *
\subsection{Usage information: flows upwards}
* *
************************************************************************
-}
data ScUsage
= SCU {
scu_calls :: CallEnv, -- Calls
-- The functions are a subset of the
-- RecFuns in the ScEnv
scu_occs :: !(IdEnv ArgOcc) -- Information on argument occurrences
} -- The domain is OutIds
type CallEnv = IdEnv [Call]
data Call = Call Id [CoreArg] ValueEnv
-- The arguments of the call, together with the
-- env giving the constructor bindings at the call site
-- We keep the function mainly for debug output
instance Outputable ScUsage where
ppr (SCU { scu_calls = calls, scu_occs = occs })
= text "SCU" <+> braces (sep [ ptext (sLit "calls =") <+> ppr calls
, text "occs =" <+> ppr occs ])
instance Outputable Call where
ppr (Call fn args _) = ppr fn <+> fsep (map pprParendExpr args)
nullUsage :: ScUsage
nullUsage = SCU { scu_calls = emptyVarEnv, scu_occs = emptyVarEnv }
combineCalls :: CallEnv -> CallEnv -> CallEnv
combineCalls = plusVarEnv_C (++)
where
-- plus cs ds | length res > 1
-- = pprTrace "combineCalls" (vcat [ text "cs:" <+> ppr cs
-- , text "ds:" <+> ppr ds])
-- res
-- | otherwise = res
-- where
-- res = cs ++ ds
combineUsage :: ScUsage -> ScUsage -> ScUsage
combineUsage u1 u2 = SCU { scu_calls = combineCalls (scu_calls u1) (scu_calls u2),
scu_occs = plusVarEnv_C combineOcc (scu_occs u1) (scu_occs u2) }
combineUsages :: [ScUsage] -> ScUsage
combineUsages [] = nullUsage
combineUsages us = foldr1 combineUsage us
lookupOccs :: ScUsage -> [OutVar] -> (ScUsage, [ArgOcc])
lookupOccs (SCU { scu_calls = sc_calls, scu_occs = sc_occs }) bndrs
= (SCU {scu_calls = sc_calls, scu_occs = delVarEnvList sc_occs bndrs},
[lookupVarEnv sc_occs b `orElse` NoOcc | b <- bndrs])
data ArgOcc = NoOcc -- Doesn't occur at all; or a type argument
| UnkOcc -- Used in some unknown way
| ScrutOcc -- See Note [ScrutOcc]
(DataConEnv [ArgOcc]) -- How the sub-components are used
type DataConEnv a = UniqFM a -- Keyed by DataCon
{- Note [ScrutOcc]
~~~~~~~~~~~~~~~~~~~
An occurrence of ScrutOcc indicates that the thing, or a `cast` version of the thing,
is *only* taken apart or applied.
Functions, literal: ScrutOcc emptyUFM
Data constructors: ScrutOcc subs,
where (subs :: UniqFM [ArgOcc]) gives usage of the *pattern-bound* components,
The domain of the UniqFM is the Unique of the data constructor
The [ArgOcc] is the occurrences of the *pattern-bound* components
of the data structure. E.g.
data T a = forall b. MkT a b (b->a)
A pattern binds b, x::a, y::b, z::b->a, but not 'a'!
-}
instance Outputable ArgOcc where
ppr (ScrutOcc xs) = text "scrut-occ" <> ppr xs
ppr UnkOcc = text "unk-occ"
ppr NoOcc = text "no-occ"
evalScrutOcc :: ArgOcc
evalScrutOcc = ScrutOcc emptyUFM
-- Experimentally, this vesion of combineOcc makes ScrutOcc "win", so
-- that if the thing is scrutinised anywhere then we get to see that
-- in the overall result, even if it's also used in a boxed way
-- This might be too aggressive; see Note [Reboxing] Alternative 3
combineOcc :: ArgOcc -> ArgOcc -> ArgOcc
combineOcc NoOcc occ = occ
combineOcc occ NoOcc = occ
combineOcc (ScrutOcc xs) (ScrutOcc ys) = ScrutOcc (plusUFM_C combineOccs xs ys)
combineOcc UnkOcc (ScrutOcc ys) = ScrutOcc ys
combineOcc (ScrutOcc xs) UnkOcc = ScrutOcc xs
combineOcc UnkOcc UnkOcc = UnkOcc
combineOccs :: [ArgOcc] -> [ArgOcc] -> [ArgOcc]
combineOccs xs ys = zipWithEqual "combineOccs" combineOcc xs ys
setScrutOcc :: ScEnv -> ScUsage -> OutExpr -> ArgOcc -> ScUsage
-- _Overwrite_ the occurrence info for the scrutinee, if the scrutinee
-- is a variable, and an interesting variable
setScrutOcc env usg (Cast e _) occ = setScrutOcc env usg e occ
setScrutOcc env usg (Tick _ e) occ = setScrutOcc env usg e occ
setScrutOcc env usg (Var v) occ
| Just RecArg <- lookupHowBound env v = usg { scu_occs = extendVarEnv (scu_occs usg) v occ }
| otherwise = usg
setScrutOcc _env usg _other _occ -- Catch-all
= usg
{-
************************************************************************
* *
\subsection{The main recursive function}
* *
************************************************************************
The main recursive function gathers up usage information, and
creates specialised versions of functions.
-}
scExpr, scExpr' :: ScEnv -> CoreExpr -> UniqSM (ScUsage, CoreExpr)
-- The unique supply is needed when we invent
-- a new name for the specialised function and its args
scExpr env e = scExpr' env e
scExpr' env (Var v) = case scSubstId env v of
Var v' -> return (mkVarUsage env v' [], Var v')
e' -> scExpr (zapScSubst env) e'
scExpr' env (Type t) = return (nullUsage, Type (scSubstTy env t))
scExpr' env (Coercion c) = return (nullUsage, Coercion (scSubstCo env c))
scExpr' _ e@(Lit {}) = return (nullUsage, e)
scExpr' env (Tick t e) = do (usg, e') <- scExpr env e
return (usg, Tick t e')
scExpr' env (Cast e co) = do (usg, e') <- scExpr env e
return (usg, mkCast e' (scSubstCo env co))
-- Important to use mkCast here
-- See Note [SpecConstr call patterns]
scExpr' env e@(App _ _) = scApp env (collectArgs e)
scExpr' env (Lam b e) = do let (env', b') = extendBndr env b
(usg, e') <- scExpr env' e
return (usg, Lam b' e')
scExpr' env (Case scrut b ty alts)
= do { (scrut_usg, scrut') <- scExpr env scrut
; case isValue (sc_vals env) scrut' of
Just (ConVal con args) -> sc_con_app con args scrut'
_other -> sc_vanilla scrut_usg scrut'
}
where
sc_con_app con args scrut' -- Known constructor; simplify
= do { let (_, bs, rhs) = findAlt con alts
`orElse` (DEFAULT, [], mkImpossibleExpr ty)
alt_env' = extendScSubstList env ((b,scrut') : bs `zip` trimConArgs con args)
; scExpr alt_env' rhs }
sc_vanilla scrut_usg scrut' -- Normal case
= do { let (alt_env,b') = extendBndrWith RecArg env b
-- Record RecArg for the components
; (alt_usgs, alt_occs, alts')
<- mapAndUnzip3M (sc_alt alt_env scrut' b') alts
; let scrut_occ = foldr combineOcc NoOcc alt_occs
scrut_usg' = setScrutOcc env scrut_usg scrut' scrut_occ
-- The combined usage of the scrutinee is given
-- by scrut_occ, which is passed to scScrut, which
-- in turn treats a bare-variable scrutinee specially
; return (foldr combineUsage scrut_usg' alt_usgs,
Case scrut' b' (scSubstTy env ty) alts') }
sc_alt env scrut' b' (con,bs,rhs)
= do { let (env1, bs1) = extendBndrsWith RecArg env bs
(env2, bs2) = extendCaseBndrs env1 scrut' b' con bs1
; (usg, rhs') <- scExpr env2 rhs
; let (usg', b_occ:arg_occs) = lookupOccs usg (b':bs2)
scrut_occ = case con of
DataAlt dc -> ScrutOcc (unitUFM dc arg_occs)
_ -> ScrutOcc emptyUFM
; return (usg', b_occ `combineOcc` scrut_occ, (con, bs2, rhs')) }
scExpr' env (Let (NonRec bndr rhs) body)
| isTyVar bndr -- Type-lets may be created by doBeta
= scExpr' (extendScSubst env bndr rhs) body
| otherwise
= do { let (body_env, bndr') = extendBndr env bndr
; rhs_info <- scRecRhs env (bndr',rhs)
; let body_env2 = extendHowBound body_env [bndr'] RecFun
-- Note [Local let bindings]
rhs' = ri_new_rhs rhs_info
body_env3 = extendValEnv body_env2 bndr' (isValue (sc_vals env) rhs')
; (body_usg, body') <- scExpr body_env3 body
-- NB: For non-recursive bindings we inherit sc_force flag from
-- the parent function (see Note [Forcing specialisation])
; (spec_usg, specs) <- specNonRec env body_usg rhs_info
; return (body_usg { scu_calls = scu_calls body_usg `delVarEnv` bndr' }
`combineUsage` spec_usg, -- Note [spec_usg includes rhs_usg]
mkLets [NonRec b r | (b,r) <- ruleInfoBinds rhs_info specs] body')
}
-- A *local* recursive group: see Note [Local recursive groups]
scExpr' env (Let (Rec prs) body)
= do { let (bndrs,rhss) = unzip prs
(rhs_env1,bndrs') = extendRecBndrs env bndrs
rhs_env2 = extendHowBound rhs_env1 bndrs' RecFun
force_spec = any (forceSpecBndr env) bndrs'
-- Note [Forcing specialisation]
; rhs_infos <- mapM (scRecRhs rhs_env2) (bndrs' `zip` rhss)
; (body_usg, body') <- scExpr rhs_env2 body
-- NB: start specLoop from body_usg
; (spec_usg, specs) <- specRec NotTopLevel (scForce rhs_env2 force_spec)
body_usg rhs_infos
-- Do not unconditionally generate specialisations from rhs_usgs
-- Instead use them only if we find an unspecialised call
-- See Note [Local recursive groups]
; let all_usg = spec_usg `combineUsage` body_usg -- Note [spec_usg includes rhs_usg]
bind' = Rec (concat (zipWith ruleInfoBinds rhs_infos specs))
; return (all_usg { scu_calls = scu_calls all_usg `delVarEnvList` bndrs' },
Let bind' body') }
{-
Note [Local let bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~
It is not uncommon to find this
let $j = \x. <blah> in ...$j True...$j True...
Here $j is an arbitrary let-bound function, but it often comes up for
join points. We might like to specialise $j for its call patterns.
Notice the difference from a letrec, where we look for call patterns
in the *RHS* of the function. Here we look for call patterns in the
*body* of the let.
At one point I predicated this on the RHS mentioning the outer
recursive function, but that's not essential and might even be
harmful. I'm not sure.
-}
scApp :: ScEnv -> (InExpr, [InExpr]) -> UniqSM (ScUsage, CoreExpr)
scApp env (Var fn, args) -- Function is a variable
= ASSERT( not (null args) )
do { args_w_usgs <- mapM (scExpr env) args
; let (arg_usgs, args') = unzip args_w_usgs
arg_usg = combineUsages arg_usgs
; case scSubstId env fn of
fn'@(Lam {}) -> scExpr (zapScSubst env) (doBeta fn' args')
-- Do beta-reduction and try again
Var fn' -> return (arg_usg `combineUsage` mkVarUsage env fn' args',
mkApps (Var fn') args')
other_fn' -> return (arg_usg, mkApps other_fn' args') }
-- NB: doing this ignores any usage info from the substituted
-- function, but I don't think that matters. If it does
-- we can fix it.
where
doBeta :: OutExpr -> [OutExpr] -> OutExpr
-- ToDo: adjust for System IF
doBeta (Lam bndr body) (arg : args) = Let (NonRec bndr arg) (doBeta body args)
doBeta fn args = mkApps fn args
-- The function is almost always a variable, but not always.
-- In particular, if this pass follows float-in,
-- which it may, we can get
-- (let f = ...f... in f) arg1 arg2
scApp env (other_fn, args)
= do { (fn_usg, fn') <- scExpr env other_fn
; (arg_usgs, args') <- mapAndUnzipM (scExpr env) args
; return (combineUsages arg_usgs `combineUsage` fn_usg, mkApps fn' args') }
----------------------
mkVarUsage :: ScEnv -> Id -> [CoreExpr] -> ScUsage
mkVarUsage env fn args
= case lookupHowBound env fn of
Just RecFun -> SCU { scu_calls = unitVarEnv fn [Call fn args (sc_vals env)]
, scu_occs = emptyVarEnv }
Just RecArg -> SCU { scu_calls = emptyVarEnv
, scu_occs = unitVarEnv fn arg_occ }
Nothing -> nullUsage
where
-- I rather think we could use UnkOcc all the time
arg_occ | null args = UnkOcc
| otherwise = evalScrutOcc
----------------------
scTopBindEnv :: ScEnv -> CoreBind -> UniqSM (ScEnv, CoreBind)
scTopBindEnv env (Rec prs)
= do { let (rhs_env1,bndrs') = extendRecBndrs env bndrs
rhs_env2 = extendHowBound rhs_env1 bndrs RecFun
prs' = zip bndrs' rhss
; return (rhs_env2, Rec prs') }
where
(bndrs,rhss) = unzip prs
scTopBindEnv env (NonRec bndr rhs)
= do { let (env1, bndr') = extendBndr env bndr
env2 = extendValEnv env1 bndr' (isValue (sc_vals env) rhs)
; return (env2, NonRec bndr' rhs) }
----------------------
scTopBind :: ScEnv -> ScUsage -> CoreBind -> UniqSM (ScUsage, CoreBind)
{-
scTopBind _ usage _
| pprTrace "scTopBind_usage" (ppr (scu_calls usage)) False
= error "false"
-}
scTopBind env body_usage (Rec prs)
| Just threshold <- sc_size env
, not force_spec
, not (all (couldBeSmallEnoughToInline (sc_dflags env) threshold) rhss)
-- No specialisation
= -- pprTrace "scTopBind: nospec" (ppr bndrs) $
do { (rhs_usgs, rhss') <- mapAndUnzipM (scExpr env) rhss
; return (body_usage `combineUsage` combineUsages rhs_usgs, Rec (bndrs `zip` rhss')) }
| otherwise -- Do specialisation
= do { rhs_infos <- mapM (scRecRhs env) prs
; (spec_usage, specs) <- specRec TopLevel (scForce env force_spec)
body_usage rhs_infos
; return (body_usage `combineUsage` spec_usage,
Rec (concat (zipWith ruleInfoBinds rhs_infos specs))) }
where
(bndrs,rhss) = unzip prs
force_spec = any (forceSpecBndr env) bndrs
-- Note [Forcing specialisation]
scTopBind env usage (NonRec bndr rhs) -- Oddly, we don't seem to specialise top-level non-rec functions
= do { (rhs_usg', rhs') <- scExpr env rhs
; return (usage `combineUsage` rhs_usg', NonRec bndr rhs') }
----------------------
scRecRhs :: ScEnv -> (OutId, InExpr) -> UniqSM RhsInfo
scRecRhs env (bndr,rhs)
= do { let (arg_bndrs,body) = collectBinders rhs
(body_env, arg_bndrs') = extendBndrsWith RecArg env arg_bndrs
; (body_usg, body') <- scExpr body_env body
; let (rhs_usg, arg_occs) = lookupOccs body_usg arg_bndrs'
; return (RI { ri_rhs_usg = rhs_usg
, ri_fn = bndr, ri_new_rhs = mkLams arg_bndrs' body'
, ri_lam_bndrs = arg_bndrs, ri_lam_body = body
, ri_arg_occs = arg_occs }) }
-- The arg_occs says how the visible,
-- lambda-bound binders of the RHS are used
-- (including the TyVar binders)
-- Two pats are the same if they match both ways
----------------------
ruleInfoBinds :: RhsInfo -> SpecInfo -> [(Id,CoreExpr)]
ruleInfoBinds (RI { ri_fn = fn, ri_new_rhs = new_rhs })
(SI { si_specs = specs })
= [(id,rhs) | OS { os_id = id, os_rhs = rhs } <- specs] ++
-- First the specialised bindings
[(fn `addIdSpecialisations` rules, new_rhs)]
-- And now the original binding
where
rules = [r | OS { os_rule = r } <- specs]
{-
************************************************************************
* *
The specialiser itself
* *
************************************************************************
-}
data RhsInfo
= RI { ri_fn :: OutId -- The binder
, ri_new_rhs :: OutExpr -- The specialised RHS (in current envt)
, ri_rhs_usg :: ScUsage -- Usage info from specialising RHS
, ri_lam_bndrs :: [InVar] -- The *original* RHS (\xs.body)
, ri_lam_body :: InExpr -- Note [Specialise original body]
, ri_arg_occs :: [ArgOcc] -- Info on how the xs occur in body
}
data SpecInfo -- Info about specialisations for a particular Id
= SI { si_specs :: [OneSpec] -- The specialisations we have generated
, si_n_specs :: Int -- Length of si_specs; used for numbering them
, si_mb_unspec :: Maybe ScUsage -- Just cs => we have not yet used calls in the
} -- from calls in the *original* RHS as
-- seeds for new specialisations;
-- if you decide to do so, here is the
-- RHS usage (which has not yet been
-- unleashed)
-- Nothing => we have
-- See Note [Local recursive groups]
-- See Note [spec_usg includes rhs_usg]
-- One specialisation: Rule plus definition
data OneSpec =
OS { os_pat :: CallPat -- Call pattern that generated this specialisation
, os_rule :: CoreRule -- Rule connecting original id with the specialisation
, os_id :: OutId -- Spec id
, os_rhs :: OutExpr } -- Spec rhs
noSpecInfo :: SpecInfo
noSpecInfo = SI { si_specs = [], si_n_specs = 0, si_mb_unspec = Nothing }
----------------------
specNonRec :: ScEnv
-> ScUsage -- Body usage
-> RhsInfo -- Structure info usage info for un-specialised RHS
-> UniqSM (ScUsage, SpecInfo) -- Usage from RHSs (specialised and not)
-- plus details of specialisations
specNonRec env body_usg rhs_info
= specialise env (scu_calls body_usg) rhs_info
(noSpecInfo { si_mb_unspec = Just (ri_rhs_usg rhs_info) })
----------------------
specRec :: TopLevelFlag -> ScEnv
-> ScUsage -- Body usage
-> [RhsInfo] -- Structure info and usage info for un-specialised RHSs
-> UniqSM (ScUsage, [SpecInfo]) -- Usage from all RHSs (specialised and not)
-- plus details of specialisations
specRec top_lvl env body_usg rhs_infos
= go 1 seed_calls nullUsage init_spec_infos
where
(seed_calls, init_spec_infos) -- Note [Seeding top-level recursive groups]
| isTopLevel top_lvl
, any (isExportedId . ri_fn) rhs_infos -- Seed from body and RHSs
= (all_calls, [noSpecInfo | _ <- rhs_infos])
| otherwise -- Seed from body only
= (calls_in_body, [noSpecInfo { si_mb_unspec = Just (ri_rhs_usg ri) }
| ri <- rhs_infos])
calls_in_body = scu_calls body_usg
calls_in_rhss = foldr (combineCalls . scu_calls . ri_rhs_usg) emptyVarEnv rhs_infos
all_calls = calls_in_rhss `combineCalls` calls_in_body
-- Loop, specialising, until you get no new specialisations
go :: Int -- Which iteration of the "until no new specialisations"
-- loop we are on; first iteration is 1
-> CallEnv -- Seed calls
-- Two accumulating parameters:
-> ScUsage -- Usage from earlier specialisations
-> [SpecInfo] -- Details of specialisations so far
-> UniqSM (ScUsage, [SpecInfo])
go n_iter seed_calls usg_so_far spec_infos
| isEmptyVarEnv seed_calls
= -- pprTrace "specRec1" (vcat [ ppr (map ri_fn rhs_infos)
-- , ppr seed_calls
-- , ppr body_usg ]) $
return (usg_so_far, spec_infos)
-- Limit recursive specialisation
-- See Note [Limit recursive specialisation]
| n_iter > sc_recursive env -- Too many iterations of the 'go' loop
, sc_force env || isNothing (sc_count env)
-- If both of these are false, the sc_count
-- threshold will prevent non-termination
, any ((> the_limit) . si_n_specs) spec_infos
= -- pprTrace "specRec2" (ppr (map (map os_pat . si_specs) spec_infos)) $
return (usg_so_far, spec_infos)
| otherwise
= -- pprTrace "specRec3" (vcat [ text "bndrs" <+> ppr (map ri_fn rhs_infos)
-- , text "iteration" <+> int n_iter
-- , text "spec_infos" <+> ppr (map (map os_pat . si_specs) spec_infos)
-- ]) $
do { specs_w_usg <- zipWithM (specialise env seed_calls) rhs_infos spec_infos
; let (extra_usg_s, new_spec_infos) = unzip specs_w_usg
extra_usg = combineUsages extra_usg_s
all_usg = usg_so_far `combineUsage` extra_usg
; go (n_iter + 1) (scu_calls extra_usg) all_usg new_spec_infos }
-- See Note [Limit recursive specialisation]
the_limit = case sc_count env of
Nothing -> 10 -- Ugh!
Just max -> max
----------------------
specialise
:: ScEnv
-> CallEnv -- Info on newly-discovered calls to this function
-> RhsInfo
-> SpecInfo -- Original RHS plus patterns dealt with
-> UniqSM (ScUsage, SpecInfo) -- New specialised versions and their usage
-- See Note [spec_usg includes rhs_usg]
-- Note: this only generates *specialised* bindings
-- The original binding is added by ruleInfoBinds
--
-- Note: the rhs here is the optimised version of the original rhs
-- So when we make a specialised copy of the RHS, we're starting
-- from an RHS whose nested functions have been optimised already.
specialise env bind_calls (RI { ri_fn = fn, ri_lam_bndrs = arg_bndrs
, ri_lam_body = body, ri_arg_occs = arg_occs })
spec_info@(SI { si_specs = specs, si_n_specs = spec_count
, si_mb_unspec = mb_unspec })
| isBottomingId fn -- Note [Do not specialise diverging functions]
-- and do not generate specialisation seeds from its RHS
= -- pprTrace "specialise bot" (ppr fn) $
return (nullUsage, spec_info)
| isNeverActive (idInlineActivation fn) -- See Note [Transfer activation]
|| null arg_bndrs -- Only specialise functions
= -- pprTrace "specialise inactive" (ppr fn) $
case mb_unspec of -- Behave as if there was a single, boring call
Just rhs_usg -> return (rhs_usg, spec_info { si_mb_unspec = Nothing })
-- See Note [spec_usg includes rhs_usg]
Nothing -> return (nullUsage, spec_info)
| Just all_calls <- lookupVarEnv bind_calls fn
= -- pprTrace "specialise entry {" (ppr fn <+> ppr all_calls) $
do { (boring_call, new_pats) <- callsToNewPats env fn spec_info arg_occs all_calls
; let n_pats = length new_pats
-- ; if (not (null new_pats) || isJust mb_unspec) then
-- pprTrace "specialise" (vcat [ ppr fn <+> text "with" <+> int n_pats <+> text "good patterns"
-- , text "mb_unspec" <+> ppr (isJust mb_unspec)
-- , text "arg_occs" <+> ppr arg_occs
-- , text "good pats" <+> ppr new_pats]) $
-- return ()
-- else return ()
; let spec_env = decreaseSpecCount env n_pats
; (spec_usgs, new_specs) <- mapAndUnzipM (spec_one spec_env fn arg_bndrs body)
(new_pats `zip` [spec_count..])
-- See Note [Specialise original body]
; let spec_usg = combineUsages spec_usgs
-- If there were any boring calls among the seeds (= all_calls), then those
-- calls will call the un-specialised function. So we should use the seeds
-- from the _unspecialised_ function's RHS, which are in mb_unspec, by returning
-- then in new_usg.
(new_usg, mb_unspec')
= case mb_unspec of
Just rhs_usg | boring_call -> (spec_usg `combineUsage` rhs_usg, Nothing)
_ -> (spec_usg, mb_unspec)
-- ; pprTrace "specialise return }"
-- (vcat [ ppr fn
-- , text "boring_call:" <+> ppr boring_call
-- , text "new calls:" <+> ppr (scu_calls new_usg)]) $
-- return ()
; return (new_usg, SI { si_specs = new_specs ++ specs
, si_n_specs = spec_count + n_pats
, si_mb_unspec = mb_unspec' }) }
| otherwise -- No new seeds, so return nullUsage
= return (nullUsage, spec_info)
---------------------
spec_one :: ScEnv
-> OutId -- Function
-> [InVar] -- Lambda-binders of RHS; should match patterns
-> InExpr -- Body of the original function
-> (CallPat, Int)
-> UniqSM (ScUsage, OneSpec) -- Rule and binding
-- spec_one creates a specialised copy of the function, together
-- with a rule for using it. I'm very proud of how short this
-- function is, considering what it does :-).
{-
Example
In-scope: a, x::a
f = /\b \y::[(a,b)] -> ....f (b,c) ((:) (a,(b,c)) (x,v) (h w))...
[c::*, v::(b,c) are presumably bound by the (...) part]
==>
f_spec = /\ b c \ v::(b,c) hw::[(a,(b,c))] ->
(...entire body of f...) [b -> (b,c),
y -> ((:) (a,(b,c)) (x,v) hw)]
RULE: forall b::* c::*, -- Note, *not* forall a, x
v::(b,c),
hw::[(a,(b,c))] .
f (b,c) ((:) (a,(b,c)) (x,v) hw) = f_spec b c v hw
-}
spec_one env fn arg_bndrs body (call_pat@(qvars, pats), rule_number)
= do { spec_uniq <- getUniqueM
; let spec_env = extendScSubstList (extendScInScope env qvars)
(arg_bndrs `zip` pats)
fn_name = idName fn
fn_loc = nameSrcSpan fn_name
fn_occ = nameOccName fn_name
spec_occ = mkSpecOcc fn_occ
-- We use fn_occ rather than fn in the rule_name string
-- as we don't want the uniq to end up in the rule, and
-- hence in the ABI, as that can cause spurious ABI
-- changes (#4012).
rule_name = mkFastString ("SC:" ++ occNameString fn_occ ++ show rule_number)
spec_name = mkInternalName spec_uniq spec_occ fn_loc
-- ; pprTrace "{spec_one" (ppr (sc_count env) <+> ppr fn
-- <+> ppr pats <+> text "-->" <+> ppr spec_name) $
-- return ()
-- Specialise the body
; (spec_usg, spec_body) <- scExpr spec_env body
-- ; pprTrace "done spec_one}" (ppr fn) $
-- return ()
-- And build the results
; let (spec_lam_args, spec_call_args) = mkWorkerArgs (sc_dflags env)
qvars body_ty
-- Usual w/w hack to avoid generating
-- a spec_rhs of unlifted type and no args
spec_lam_args_str = handOutStrictnessInformation (fst (splitStrictSig spec_str)) spec_lam_args
-- Annotate the variables with the strictness information from
-- the function (see Note [Strictness information in worker binders])
spec_join_arity | isJoinId fn = Just (length spec_lam_args)
| otherwise = Nothing
spec_id = mkLocalIdOrCoVar spec_name
(mkLamTypes spec_lam_args body_ty)
-- See Note [Transfer strictness]
`setIdStrictness` spec_str
`setIdArity` count isId spec_lam_args
`asJoinId_maybe` spec_join_arity
spec_str = calcSpecStrictness fn spec_lam_args pats
-- Conditionally use result of new worker-wrapper transform
spec_rhs = mkLams spec_lam_args_str spec_body
body_ty = exprType spec_body
rule_rhs = mkVarApps (Var spec_id) spec_call_args
inline_act = idInlineActivation fn
this_mod = sc_module spec_env
rule = mkRule this_mod True {- Auto -} True {- Local -}
rule_name inline_act fn_name qvars pats rule_rhs
-- See Note [Transfer activation]
; return (spec_usg, OS { os_pat = call_pat, os_rule = rule
, os_id = spec_id
, os_rhs = spec_rhs }) }
-- See Note [Strictness information in worker binders]
handOutStrictnessInformation :: [Demand] -> [Var] -> [Var]
handOutStrictnessInformation = go
where
go _ [] = []
go [] vs = vs
go (d:dmds) (v:vs) | isId v = setIdDemandInfo v d : go dmds vs
go dmds (v:vs) = v : go dmds vs
calcSpecStrictness :: Id -- The original function
-> [Var] -> [CoreExpr] -- Call pattern
-> StrictSig -- Strictness of specialised thing
-- See Note [Transfer strictness]
calcSpecStrictness fn qvars pats
= mkClosedStrictSig spec_dmds topRes
where
spec_dmds = [ lookupVarEnv dmd_env qv `orElse` topDmd | qv <- qvars, isId qv ]
StrictSig (DmdType _ dmds _) = idStrictness fn
dmd_env = go emptyVarEnv dmds pats
go :: DmdEnv -> [Demand] -> [CoreExpr] -> DmdEnv
go env ds (Type {} : pats) = go env ds pats
go env ds (Coercion {} : pats) = go env ds pats
go env (d:ds) (pat : pats) = go (go_one env d pat) ds pats
go env _ _ = env
go_one :: DmdEnv -> Demand -> CoreExpr -> DmdEnv
go_one env d (Var v) = extendVarEnv_C bothDmd env v d
go_one env d e
| Just ds <- splitProdDmd_maybe d -- NB: d does not have to be strict
, (Var _, args) <- collectArgs e = go env ds args
go_one env _ _ = env
{-
Note [spec_usg includes rhs_usg]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In calls to 'specialise', the returned ScUsage must include the rhs_usg in
the passed-in SpecInfo, unless there are no calls at all to the function.
The caller can, indeed must, assume this. He should not combine in rhs_usg
himself, or he'll get rhs_usg twice -- and that can lead to an exponential
blowup of duplicates in the CallEnv. This is what gave rise to the massive
performance loss in Trac #8852.
Note [Specialise original body]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The RhsInfo for a binding keeps the *original* body of the binding. We
must specialise that, *not* the result of applying specExpr to the RHS
(which is also kept in RhsInfo). Otherwise we end up specialising a
specialised RHS, and that can lead directly to exponential behaviour.
Note [Transfer activation]
~~~~~~~~~~~~~~~~~~~~~~~~~~
This note is for SpecConstr, but exactly the same thing
happens in the overloading specialiser; see
Note [Auto-specialisation and RULES] in Specialise.
In which phase should the specialise-constructor rules be active?
Originally I made them always-active, but Manuel found that this
defeated some clever user-written rules. Then I made them active only
in Phase 0; after all, currently, the specConstr transformation is
only run after the simplifier has reached Phase 0, but that meant
that specialisations didn't fire inside wrappers; see test
simplCore/should_compile/spec-inline.
So now I just use the inline-activation of the parent Id, as the
activation for the specialisation RULE, just like the main specialiser;
This in turn means there is no point in specialising NOINLINE things,
so we test for that.
Note [Transfer strictness]
~~~~~~~~~~~~~~~~~~~~~~~~~~
We must transfer strictness information from the original function to
the specialised one. Suppose, for example
f has strictness SS
and a RULE f (a:as) b = f_spec a as b
Now we want f_spec to have strictness LLS, otherwise we'll use call-by-need
when calling f_spec instead of call-by-value. And that can result in
unbounded worsening in space (cf the classic foldl vs foldl')
See Trac #3437 for a good example.
The function calcSpecStrictness performs the calculation.
Note [Strictness information in worker binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
After having calculated the strictness annotation for the worker (see Note
[Transfer strictness] above), we also want to have this information attached to
the worker’s arguments, for the benefit of later passes. The function
handOutStrictnessInformation decomposes the strictness annotation calculated by
calcSpecStrictness and attaches them to the variables.
************************************************************************
* *
\subsection{Argument analysis}
* *
************************************************************************
This code deals with analysing call-site arguments to see whether
they are constructor applications.
Note [Free type variables of the qvar types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In a call (f @a x True), that we want to specialise, what variables should
we quantify over. Clearly over 'a' and 'x', but what about any type variables
free in x's type? In fact we don't need to worry about them because (f @a)
can only be a well-typed application if its type is compatible with x, so any
variables free in x's type must be free in (f @a), and hence either be gathered
via 'a' itself, or be in scope at f's defn. Hence we just take
(exprsFreeVars pats).
BUT phantom type synonyms can mess this reasoning up,
eg x::T b with type T b = Int
So we apply expandTypeSynonyms to the bound Ids.
See Trac # 5458. Yuk.
Note [SpecConstr call patterns]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A "call patterns" that we collect is going to become the LHS of a RULE.
It's important that it doesn't have
e |> Refl
or
e |> g1 |> g2
because both of these will be optimised by Simplify.simplRule. In the
former case such optimisation benign, because the rule will match more
terms; but in the latter we may lose a binding of 'g1' or 'g2', and
end up with a rule LHS that doesn't bind the template variables
(Trac #10602).
The simplifier eliminates such things, but SpecConstr itself constructs
new terms by substituting. So the 'mkCast' in the Cast case of scExpr
is very important!
Note [Choosing patterns]
~~~~~~~~~~~~~~~~~~~~~~~~
If we get lots of patterns we may not want to make a specialisation
for each of them (code bloat), so we choose as follows, implemented
by trim_pats.
* The flag -fspec-constr-count-N sets the sc_count field
of the ScEnv to (Just n). This limits the total number
of specialisations for a given function to N.
* -fno-spec-constr-count sets the sc_count field to Nothing,
which switches of the limit.
* The ghastly ForceSpecConstr trick also switches of the limit
for a particular function
* Otherwise we sort the patterns to choose the most general
ones first; more general => more widely applicable.
Note [SpecConstr and casts]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider (Trac #14270) a call like
let f = e
in ... f (K @(a |> co)) ...
where 'co' is a coercion variable not in scope at f's definition site.
If we aren't caereful we'll get
let $sf a co = e (K @(a |> co))
RULE "SC:f" forall a co. f (K @(a |> co)) = $sf a co
f = e
in ...
But alas, when we match the call we won't bind 'co', because type-matching
(for good reasons) discards casts).
I don't know how to solve this, so for now I'm just discarding any
call patterns that
* Mentions a coercion variable
* That is not in scope at the binding of the function
I think this is very rare.
Note that this /also/ discards the call pattern if we have a cast in a
/term/, although in fact Rules.match does make a very flaky and
fragile attempt to match coercions. e.g. a call like
f (Maybe Age) (Nothing |> co) blah
where co :: Maybe Int ~ Maybe Age
will be discarded. It's extremely fragile to match on the form of a
coercion, so I think it's better just not to try. A more complicated
alternative would be to discard calls that mention coercion variables
only in kind-casts, but I'm doing the simple thing for now.
-}
type CallPat = ([Var], [CoreExpr]) -- Quantified variables and arguments
-- See Note [SpecConstr call patterns]
callsToNewPats :: ScEnv -> Id
-> SpecInfo
-> [ArgOcc] -> [Call]
-> UniqSM (Bool, [CallPat])
-- Result has no duplicate patterns,
-- nor ones mentioned in done_pats
-- Bool indicates that there was at least one boring pattern
callsToNewPats env fn spec_info@(SI { si_specs = done_specs }) bndr_occs calls
= do { mb_pats <- mapM (callToPats env bndr_occs) calls
; let have_boring_call = any isNothing mb_pats
good_pats :: [CallPat]
good_pats = catMaybes mb_pats
-- Remove patterns we have already done
new_pats = filterOut is_done good_pats
is_done p = any (samePat p . os_pat) done_specs
-- Remove duplicates
non_dups = nubBy samePat new_pats
-- Remove ones that have too many worker variables
small_pats = filterOut too_big non_dups
too_big (vars,_) = not (isWorkerSmallEnough (sc_dflags env) vars)
-- We are about to construct w/w pair in 'spec_one'.
-- Omit specialisation leading to high arity workers.
-- See Note [Limit w/w arity] in WwLib
-- Discard specialisations if there are too many of them
trimmed_pats = trim_pats env fn spec_info small_pats
-- ; pprTrace "callsToPats" (vcat [ text "calls to" <+> ppr fn <> colon <+> ppr calls
-- , text "done_specs:" <+> ppr (map os_pat done_specs)
-- , text "good_pats:" <+> ppr good_pats ]) $
-- return ()
; return (have_boring_call, trimmed_pats) }
trim_pats :: ScEnv -> Id -> SpecInfo -> [CallPat] -> [CallPat]
-- See Note [Choosing patterns]
trim_pats env fn (SI { si_n_specs = done_spec_count }) pats
| sc_force env
|| isNothing mb_scc
|| n_remaining >= n_pats
= -- pprTrace "trim_pats: no-trim" (ppr (sc_force env) $$ ppr mb_scc $$ ppr n_remaining $$ ppr n_pats)
pats -- No need to trim
| otherwise
= emit_trace $ -- Need to trim, so keep the best ones
take n_remaining sorted_pats
where
n_pats = length pats
spec_count' = n_pats + done_spec_count
n_remaining = max_specs - done_spec_count
mb_scc = sc_count env
Just max_specs = mb_scc
sorted_pats = map fst $
sortBy (comparing snd) $
[(pat, pat_cons pat) | pat <- pats]
-- Sort in order of increasing number of constructors
-- (i.e. decreasing generality) and pick the initial
-- segment of this list
pat_cons :: CallPat -> Int
-- How many data constructors of literals are in
-- the pattern. More data-cons => less general
pat_cons (qs, ps) = foldr ((+) . n_cons) 0 ps
where
q_set = mkVarSet qs
n_cons (Var v) | v `elemVarSet` q_set = 0
| otherwise = 1
n_cons (Cast e _) = n_cons e
n_cons (App e1 e2) = n_cons e1 + n_cons e2
n_cons (Lit {}) = 1
n_cons _ = 0
emit_trace result
| debugIsOn || hasPprDebug (sc_dflags env)
-- Suppress this scary message for ordinary users! Trac #5125
= pprTrace "SpecConstr" msg result
| otherwise
= result
msg = vcat [ sep [ text "Function" <+> quotes (ppr fn)
, nest 2 (text "has" <+>
speakNOf spec_count' (text "call pattern") <> comma <+>
text "but the limit is" <+> int max_specs) ]
, text "Use -fspec-constr-count=n to set the bound"
, text "done_spec_count =" <+> int done_spec_count
, text "Keeping " <+> int n_remaining <> text ", out of" <+> int n_pats
, text "Discarding:" <+> ppr (drop n_remaining sorted_pats) ]
callToPats :: ScEnv -> [ArgOcc] -> Call -> UniqSM (Maybe CallPat)
-- The [Var] is the variables to quantify over in the rule
-- Type variables come first, since they may scope
-- over the following term variables
-- The [CoreExpr] are the argument patterns for the rule
callToPats env bndr_occs call@(Call _ args con_env)
| args `ltLength` bndr_occs -- Check saturated
= return Nothing
| otherwise
= do { let in_scope = substInScope (sc_subst env)
; (interesting, pats) <- argsToPats env in_scope con_env args bndr_occs
; let pat_fvs = exprsFreeVarsList pats
-- To get determinism we need the list of free variables in
-- deterministic order. Otherwise we end up creating
-- lambdas with different argument orders. See
-- determinism/simplCore/should_compile/spec-inline-determ.hs
-- for an example. For explanation of determinism
-- considerations See Note [Unique Determinism] in Unique.
in_scope_vars = getInScopeVars in_scope
qvars = filterOut (`elemVarSet` in_scope_vars) pat_fvs
-- Quantify over variables that are not in scope
-- at the call site
-- See Note [Free type variables of the qvar types]
-- See Note [Shadowing] at the top
(ktvs, ids) = partition isTyVar qvars
qvars' = toposortTyVars ktvs ++ map sanitise ids
-- Order into kind variables, type variables, term variables
-- The kind of a type variable may mention a kind variable
-- and the type of a term variable may mention a type variable
sanitise id = id `setIdType` expandTypeSynonyms (idType id)
-- See Note [Free type variables of the qvar types]
bad_covars = filter isCoVar ids
-- See Note [SpecConstr and casts]
; -- pprTrace "callToPats" (ppr args $$ ppr bndr_occs) $
WARN( not (null bad_covars), text "SpecConstr: bad covars:" <+> ppr bad_covars
$$ ppr call )
if interesting && null bad_covars
then return (Just (qvars', pats))
else return Nothing }
-- argToPat takes an actual argument, and returns an abstracted
-- version, consisting of just the "constructor skeleton" of the
-- argument, with non-constructor sub-expression replaced by new
-- placeholder variables. For example:
-- C a (D (f x) (g y)) ==> C p1 (D p2 p3)
argToPat :: ScEnv
-> InScopeSet -- What's in scope at the fn defn site
-> ValueEnv -- ValueEnv at the call site
-> CoreArg -- A call arg (or component thereof)
-> ArgOcc
-> UniqSM (Bool, CoreArg)
-- Returns (interesting, pat),
-- where pat is the pattern derived from the argument
-- interesting=True if the pattern is non-trivial (not a variable or type)
-- E.g. x:xs --> (True, x:xs)
-- f xs --> (False, w) where w is a fresh wildcard
-- (f xs, 'c') --> (True, (w, 'c')) where w is a fresh wildcard
-- \x. x+y --> (True, \x. x+y)
-- lvl7 --> (True, lvl7) if lvl7 is bound
-- somewhere further out
argToPat _env _in_scope _val_env arg@(Type {}) _arg_occ
= return (False, arg)
argToPat env in_scope val_env (Tick _ arg) arg_occ
= argToPat env in_scope val_env arg arg_occ
-- Note [Notes in call patterns]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Ignore Notes. In particular, we want to ignore any InlineMe notes
-- Perhaps we should not ignore profiling notes, but I'm going to
-- ride roughshod over them all for now.
--- See Note [Notes in RULE matching] in Rules
argToPat env in_scope val_env (Let _ arg) arg_occ
= argToPat env in_scope val_env arg arg_occ
-- See Note [Matching lets] in Rule.hs
-- Look through let expressions
-- e.g. f (let v = rhs in (v,w))
-- Here we can specialise for f (v,w)
-- because the rule-matcher will look through the let.
{- Disabled; see Note [Matching cases] in Rule.hs
argToPat env in_scope val_env (Case scrut _ _ [(_, _, rhs)]) arg_occ
| exprOkForSpeculation scrut -- See Note [Matching cases] in Rule.hhs
= argToPat env in_scope val_env rhs arg_occ
-}
argToPat env in_scope val_env (Cast arg co) arg_occ
| not (ignoreType env ty2)
= do { (interesting, arg') <- argToPat env in_scope val_env arg arg_occ
; if not interesting then
wildCardPat ty2
else do
{ -- Make a wild-card pattern for the coercion
uniq <- getUniqueM
; let co_name = mkSysTvName uniq (fsLit "sg")
co_var = mkCoVar co_name (mkCoercionType Representational ty1 ty2)
; return (interesting, Cast arg' (mkCoVarCo co_var)) } }
where
Pair ty1 ty2 = coercionKind co
{- Disabling lambda specialisation for now
It's fragile, and the spec_loop can be infinite
argToPat in_scope val_env arg arg_occ
| is_value_lam arg
= return (True, arg)
where
is_value_lam (Lam v e) -- Spot a value lambda, even if
| isId v = True -- it is inside a type lambda
| otherwise = is_value_lam e
is_value_lam other = False
-}
-- Check for a constructor application
-- NB: this *precedes* the Var case, so that we catch nullary constrs
argToPat env in_scope val_env arg arg_occ
| Just (ConVal (DataAlt dc) args) <- isValue val_env arg
, not (ignoreDataCon env dc) -- See Note [NoSpecConstr]
, Just arg_occs <- mb_scrut dc
= do { let (ty_args, rest_args) = splitAtList (dataConUnivTyVars dc) args
; (_, args') <- argsToPats env in_scope val_env rest_args arg_occs
; return (True,
mkConApp dc (ty_args ++ args')) }
where
mb_scrut dc = case arg_occ of
ScrutOcc bs | Just occs <- lookupUFM bs dc
-> Just (occs) -- See Note [Reboxing]
_other | sc_force env || sc_keen env
-> Just (repeat UnkOcc)
| otherwise
-> Nothing
-- Check if the argument is a variable that
-- (a) is used in an interesting way in the function body
-- (b) we know what its value is
-- In that case it counts as "interesting"
argToPat env in_scope val_env (Var v) arg_occ
| sc_force env || case arg_occ of { UnkOcc -> False; _other -> True }, -- (a)
is_value, -- (b)
-- Ignoring sc_keen here to avoid gratuitously incurring Note [Reboxing]
-- So sc_keen focused just on f (I# x), where we have freshly-allocated
-- box that we can eliminate in the caller
not (ignoreType env (varType v))
= return (True, Var v)
where
is_value
| isLocalId v = v `elemInScopeSet` in_scope
&& isJust (lookupVarEnv val_env v)
-- Local variables have values in val_env
| otherwise = isValueUnfolding (idUnfolding v)
-- Imports have unfoldings
-- I'm really not sure what this comment means
-- And by not wild-carding we tend to get forall'd
-- variables that are in scope, which in turn can
-- expose the weakness in let-matching
-- See Note [Matching lets] in Rules
-- Check for a variable bound inside the function.
-- Don't make a wild-card, because we may usefully share
-- e.g. f a = let x = ... in f (x,x)
-- NB: this case follows the lambda and con-app cases!!
-- argToPat _in_scope _val_env (Var v) _arg_occ
-- = return (False, Var v)
-- SLPJ : disabling this to avoid proliferation of versions
-- also works badly when thinking about seeding the loop
-- from the body of the let
-- f x y = letrec g z = ... in g (x,y)
-- We don't want to specialise for that *particular* x,y
-- The default case: make a wild-card
-- We use this for coercions too
argToPat _env _in_scope _val_env arg _arg_occ
= wildCardPat (exprType arg)
wildCardPat :: Type -> UniqSM (Bool, CoreArg)
wildCardPat ty
= do { uniq <- getUniqueM
; let id = mkSysLocalOrCoVar (fsLit "sc") uniq ty
; return (False, varToCoreExpr id) }
argsToPats :: ScEnv -> InScopeSet -> ValueEnv
-> [CoreArg] -> [ArgOcc] -- Should be same length
-> UniqSM (Bool, [CoreArg])
argsToPats env in_scope val_env args occs
= do { stuff <- zipWithM (argToPat env in_scope val_env) args occs
; let (interesting_s, args') = unzip stuff
; return (or interesting_s, args') }
isValue :: ValueEnv -> CoreExpr -> Maybe Value
isValue _env (Lit lit)
| litIsLifted lit = Nothing
| otherwise = Just (ConVal (LitAlt lit) [])
isValue env (Var v)
| Just cval <- lookupVarEnv env v
= Just cval -- You might think we could look in the idUnfolding here
-- but that doesn't take account of which branch of a
-- case we are in, which is the whole point
| not (isLocalId v) && isCheapUnfolding unf
= isValue env (unfoldingTemplate unf)
where
unf = idUnfolding v
-- However we do want to consult the unfolding
-- as well, for let-bound constructors!
isValue env (Lam b e)
| isTyVar b = case isValue env e of
Just _ -> Just LambdaVal
Nothing -> Nothing
| otherwise = Just LambdaVal
isValue env (Tick t e)
| not (tickishIsCode t)
= isValue env e
isValue _env expr -- Maybe it's a constructor application
| (Var fun, args, _) <- collectArgsTicks (not . tickishIsCode) expr
= case isDataConWorkId_maybe fun of
Just con | args `lengthAtLeast` dataConRepArity con
-- Check saturated; might be > because the
-- arity excludes type args
-> Just (ConVal (DataAlt con) args)
_other | valArgCount args < idArity fun
-- Under-applied function
-> Just LambdaVal -- Partial application
_other -> Nothing
isValue _env _expr = Nothing
valueIsWorkFree :: Value -> Bool
valueIsWorkFree LambdaVal = True
valueIsWorkFree (ConVal _ args) = all exprIsWorkFree args
samePat :: CallPat -> CallPat -> Bool
samePat (vs1, as1) (vs2, as2)
= all2 same as1 as2
where
same (Var v1) (Var v2)
| v1 `elem` vs1 = v2 `elem` vs2
| v2 `elem` vs2 = False
| otherwise = v1 == v2
same (Lit l1) (Lit l2) = l1==l2
same (App f1 a1) (App f2 a2) = same f1 f2 && same a1 a2
same (Type {}) (Type {}) = True -- Note [Ignore type differences]
same (Coercion {}) (Coercion {}) = True
same (Tick _ e1) e2 = same e1 e2 -- Ignore casts and notes
same (Cast e1 _) e2 = same e1 e2
same e1 (Tick _ e2) = same e1 e2
same e1 (Cast e2 _) = same e1 e2
same e1 e2 = WARN( bad e1 || bad e2, ppr e1 $$ ppr e2)
False -- Let, lambda, case should not occur
bad (Case {}) = True
bad (Let {}) = True
bad (Lam {}) = True
bad _other = False
{-
Note [Ignore type differences]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We do not want to generate specialisations where the call patterns
differ only in their type arguments! Not only is it utterly useless,
but it also means that (with polymorphic recursion) we can generate
an infinite number of specialisations. Example is Data.Sequence.adjustTree,
I think.
-}
|
ezyang/ghc
|
compiler/specialise/SpecConstr.hs
|
bsd-3-clause
| 94,978 | 56 | 19 | 29,064 | 11,269 | 6,246 | 5,023 | -1 | -1 |
module Opaleye.Internal.Column where
import qualified Opaleye.Internal.HaskellDB.PrimQuery as HPQ
-- | The 'Num' and 'Fractional' instances for 'Column' 'a' are too
-- general. For example, they allow you to add two 'Column'
-- 'String's. This will be fixed in a subsequent release.
newtype Column a = Column HPQ.PrimExpr deriving Show
data Nullable a = Nullable
unColumn :: Column a -> HPQ.PrimExpr
unColumn (Column e) = e
{-# DEPRECATED unsafeCoerce "Use unsafeCoerceColumn instead" #-}
unsafeCoerce :: Column a -> Column b
unsafeCoerce = unsafeCoerceColumn
unsafeCoerceColumn :: Column a -> Column b
unsafeCoerceColumn (Column e) = Column e
unsafeCompositeField :: Column a -> String -> Column b
unsafeCompositeField (Column e) fieldName =
Column (HPQ.CompositeExpr e fieldName)
binOp :: HPQ.BinOp -> Column a -> Column b -> Column c
binOp op (Column e) (Column e') = Column (HPQ.BinExpr op e e')
unOp :: HPQ.UnOp -> Column a -> Column b
unOp op (Column e) = Column (HPQ.UnExpr op e)
-- For import order reasons we can't make the return type PGBool
unsafeCase_ :: [(Column pgBool, Column a)] -> Column a -> Column a
unsafeCase_ alts (Column otherwise_) = Column (HPQ.CaseExpr (unColumns alts) otherwise_)
where unColumns = map (\(Column e, Column e') -> (e, e'))
unsafeIfThenElse :: Column pgBool -> Column a -> Column a -> Column a
unsafeIfThenElse cond t f = unsafeCase_ [(cond, t)] f
unsafeGt :: Column a -> Column a -> Column pgBool
unsafeGt = binOp HPQ.OpGt
unsafeEq :: Column a -> Column a -> Column pgBool
unsafeEq = binOp HPQ.OpEq
class PGNum a where
pgFromInteger :: Integer -> Column a
instance PGNum a => Num (Column a) where
fromInteger = pgFromInteger
(*) = binOp HPQ.OpMul
(+) = binOp HPQ.OpPlus
(-) = binOp HPQ.OpMinus
abs = unOp HPQ.OpAbs
negate = unOp HPQ.OpNegate
-- We can't use Postgres's 'sign' function because it returns only a
-- numeric or a double
signum c = unsafeCase_ [(c `unsafeGt` 0, 1), (c `unsafeEq` 0, 0)] (-1)
class PGFractional a where
pgFromRational :: Rational -> Column a
instance (PGNum a, PGFractional a) => Fractional (Column a) where
fromRational = pgFromRational
(/) = binOp HPQ.OpDiv
|
benkolera/haskell-opaleye
|
src/Opaleye/Internal/Column.hs
|
bsd-3-clause
| 2,183 | 0 | 11 | 399 | 733 | 381 | 352 | 42 | 1 |
module HJS.Parser(parseProgram,lexProgram,lexFile,runLexer) where
import HJS.Parser.JavaScriptParser
import HJS.Parser.JavaScript
lexFile flags name = do
input <- readFile name
putStrLn $ show $ lexProgram input
|
disnet/jscheck
|
src/HJS/Parser.hs
|
bsd-3-clause
| 270 | 0 | 11 | 80 | 66 | 36 | 30 | 6 | 1 |
module Main where
import qualified System.IO as IO
import Data.Complex
import Data.Char (chr,ord)
data Scene = Scene { scaleX :: Double, scaleY :: Double, ulX :: Double, ulY :: Double }
-- ideally get real console support at some point...
lenX = 72
lenY = 20
clrScr = mapM_ (\_ -> putStrLn "") [1..lenY]
-- add more commands soon
parseCmd 'l' scn = scn { ulX = (ulX scn) - 6.0*(scaleX scn) }
parseCmd 'r' scn = scn { ulX = (ulX scn) + 6.0*(scaleX scn) }
parseCmd 'u' scn = scn { ulY = (ulY scn) - 3.0*(scaleY scn) }
parseCmd 'd' scn = scn { ulY = (ulY scn) + 3.0*(scaleY scn) }
parseCmd 'i' scn = Scene { ulX = (ulX scn) + (scaleX scn)*(fromIntegral lenX)/4.0,
ulY = (ulY scn) + (scaleY scn)*(fromIntegral lenY)/4.0,
scaleX = (scaleX scn) / 2.0,
scaleY = (scaleY scn) / 2.0 }
parseCmd 'o' scn = let newScX = (scaleX scn)*2.0
newScY = (scaleY scn)*2.0
in Scene { scaleX = newScX,
scaleY = newScY,
ulX = (ulX scn) - newScX*(fromIntegral lenX)/4.0,
ulY = (ulY scn) - newScY*(fromIntegral lenY)/4.0 }
parseCmd _ scn = scn
-- *************************************
-- the ASCII code for each point:
-- * Declare an infinite series of mandelbrot
-- * iterations, and then count how many
-- * are calculated until the norm get high,
-- * or we get to the maximum iterations.
-- *************************************
mseries loc = iterate (\pt -> pt * pt + loc) loc
mchar lst = chr $ ord '~' - length lst
lowNorm x = (rp*rp + ip*ip) < 4.0
where rp = realPart x
ip = imagPart x
maxRange = ord '~' - ord ' '
mpoint = mchar . (takeWhile lowNorm) . (take maxRange) . mseries
-- *************************************
-- to draw the scene, map the mpoint to a
-- list of list of points, which will create
-- a list of [Char]
-- *************************************
mline = map mpoint
points scn = let xcoords = take lenX $ iterate (\c -> c + (scaleX scn)) (ulX scn)
ycoords = take lenY $ iterate (\c -> c + (scaleY scn)) (ulY scn)
in [ [ x :+ y | x <- xcoords ] | y <- ycoords ]
drawScene scn = do
clrScr
mapM_ putStrLn $ map mline (points scn)
putStrLn "(l)eft, (r)ight, (u)p, (d)own Zoom (i)n or (o)ut (q)uit"
putStrLn ""
-- simple: draw the scene, parse a command, loop!
mainLoop scn = do
drawScene scn
cmd <- getChar
if cmd /= 'q'
then mainLoop $ parseCmd cmd scn
else return ()
-- just set up line buffering and get us into the main loop
main = do
IO.hSetBuffering IO.stdin IO.NoBuffering
mainLoop $ Scene 0.04 0.1 (-2.0) (-1.0)
|
waywardcode/small_programs
|
mandelbrot/mandel.hs
|
gpl-2.0
| 2,727 | 0 | 15 | 763 | 922 | 487 | 435 | 48 | 2 |
module GoToSymbolFunction_Pattern_CaretOnVariable where
test :: Int
test = let (seven, eight) = (7,8) in
s<caret>even + 1
|
charleso/intellij-haskforce
|
tests/gold/codeInsight/GoToSymbolFunction_Pattern_CaretOnVariable.hs
|
apache-2.0
| 129 | 0 | 9 | 24 | 50 | 28 | 22 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module ResponseHeaderSpec (main, spec) where
import Data.ByteString
import qualified Network.HTTP.Types as H
import Network.Wai.Handler.Warp.ResponseHeader
import Network.Wai.Handler.Warp.Response
import Network.Wai.Handler.Warp.Header
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "composeHeader" $ do
it "composes a HTTP header" $
composeHeader H.http11 H.ok200 headers `shouldReturn` composedHeader
describe "addServer" $ do
it "adds Server if not exist" $ do
let hdrs = []
rspidxhdr = indexResponseHeader hdrs
addServer "MyServer" rspidxhdr hdrs `shouldBe` [("Server","MyServer")]
it "does not add Server if exists" $ do
let hdrs = [("Server","MyServer")]
rspidxhdr = indexResponseHeader hdrs
addServer "MyServer2" rspidxhdr hdrs `shouldBe` hdrs
it "does not add Server if empty" $ do
let hdrs = []
rspidxhdr = indexResponseHeader hdrs
addServer "" rspidxhdr hdrs `shouldBe` hdrs
it "deletes Server " $ do
let hdrs = [("Server","MyServer")]
rspidxhdr = indexResponseHeader hdrs
addServer "" rspidxhdr hdrs `shouldBe` []
headers :: H.ResponseHeaders
headers = [
("Date", "Mon, 13 Aug 2012 04:22:55 GMT")
, ("Content-Length", "151")
, ("Server", "Mighttpd/2.5.8")
, ("Last-Modified", "Fri, 22 Jun 2012 01:18:08 GMT")
, ("Content-Type", "text/html")
]
composedHeader :: ByteString
composedHeader = "HTTP/1.1 200 OK\r\nDate: Mon, 13 Aug 2012 04:22:55 GMT\r\nContent-Length: 151\r\nServer: Mighttpd/2.5.8\r\nLast-Modified: Fri, 22 Jun 2012 01:18:08 GMT\r\nContent-Type: text/html\r\n\r\n"
|
sordina/wai
|
warp/test/ResponseHeaderSpec.hs
|
bsd-2-clause
| 1,799 | 0 | 17 | 440 | 403 | 220 | 183 | 41 | 1 |
module Rules.Test (testRules) where
import System.Environment
import Base
import Expression
import Oracles.Setting
import Packages
import Settings
import Settings.Default
import Settings.Builders.RunTest
import Target
import Utilities
ghcConfigHsPath :: FilePath
ghcConfigHsPath = "testsuite/mk/ghc-config.hs"
ghcConfigProgPath :: FilePath
ghcConfigProgPath = "test/bin/ghc-config"
ghcConfigPath :: FilePath
ghcConfigPath = "test/ghcconfig"
-- TODO: clean up after testing
testRules :: Rules ()
testRules = do
root <- buildRootRules
-- | Using program shipped with testsuite to generate ghcconfig file.
root -/- ghcConfigProgPath ~> do
ghc <- builderPath $ Ghc CompileHs Stage0
createDirectory $ takeDirectory (root -/- ghcConfigProgPath)
cmd ghc [ghcConfigHsPath, "-o" , root -/- ghcConfigProgPath]
-- | TODO : Use input test compiler and not just stage2 compiler.
root -/- ghcConfigPath ~> do
ghcPath <- needFile Stage1 ghc
need [root -/- ghcConfigProgPath]
cmd [FileStdout $ root -/- ghcConfigPath] (root -/- ghcConfigProgPath)
[ghcPath]
root -/- timeoutPath ~> timeoutProgBuilder
"validate" ~> do
needTestBuilders
build $ target (vanillaContext Stage2 compiler) (Make "testsuite/tests") [] []
"test" ~> do
needTestBuilders
-- TODO : Should we remove the previosly generated config file?
-- Prepare Ghc configuration file for input compiler.
need [root -/- ghcConfigPath, root -/- timeoutPath]
-- TODO This approach doesn't work.
-- Set environment variables for test's Makefile.
env <- sequence
[ builderEnvironment "MAKE" $ Make ""
, builderEnvironment "TEST_HC" $ Ghc CompileHs Stage2
, AddEnv "TEST_HC_OPTS" <$> runTestGhcFlags ]
makePath <- builderPath $ Make ""
top <- topDirectory
ghcPath <- (top -/-) <$> builderPath (Ghc CompileHs Stage2)
ghcFlags <- runTestGhcFlags
checkPprPath <- (top -/-) <$> needFile Stage1 checkPpr
annotationsPath <- (top -/-) <$> needFile Stage1 checkApiAnnotations
-- Set environment variables for test's Makefile.
liftIO $ do
setEnv "MAKE" makePath
setEnv "TEST_HC" ghcPath
setEnv "TEST_HC_OPTS" ghcFlags
setEnv "CHECK_PPR" checkPprPath
setEnv "CHECK_API_ANNOTATIONS" annotationsPath
-- Execute the test target.
buildWithCmdOptions env $ target (vanillaContext Stage2 compiler) RunTest [] []
-- | Build extra programs and libraries required by testsuite
needTestsuitePackages :: Action ()
needTestsuitePackages = do
targets <- mapM (needFile Stage1) =<< testsuitePackages
libPath <- stageLibPath Stage1
iservPath <- needFile Stage1 iserv
need targets
-- | We need to copy iserv bin to lib/bin as this is where testsuite looks
-- | for iserv.
copyFile iservPath $ libPath -/- "bin/ghc-iserv"
-- | Build the timeout program.
-- See: https://github.com/ghc/ghc/blob/master/testsuite/timeout/Makefile#L23
timeoutProgBuilder :: Action ()
timeoutProgBuilder = do
root <- buildRoot
windows <- windowsHost
if windows
then do
prog <- programPath =<< programContext Stage1 timeout
copyFile prog (root -/- timeoutPath)
else do
python <- builderPath Python
copyFile "testsuite/timeout/timeout.py" (root -/- timeoutPath <.> "py")
let script = unlines
[ "#!/usr/bin/env sh"
, "exec " ++ python ++ " $0.py \"$@\"" ]
writeFile' (root -/- timeoutPath) script
makeExecutable (root -/- timeoutPath)
needTestBuilders :: Action ()
needTestBuilders = do
needBuilder $ Ghc CompileHs Stage2
needBuilder $ GhcPkg Update Stage1
needBuilder Hpc
needBuilder $ Hsc2Hs Stage1
needTestsuitePackages
needFile :: Stage -> Package -> Action FilePath
needFile stage pkg
-- TODO (Alp): we might sometimes need more than vanilla!
-- This should therefore depend on what test ways
-- we are going to use, I suppose?
| isLibrary pkg = pkgConfFile (Context stage pkg profilingDynamic)
| otherwise = programPath =<< programContext stage pkg
|
snowleopard/shaking-up-ghc
|
src/Rules/Test.hs
|
bsd-3-clause
| 4,355 | 0 | 16 | 1,126 | 911 | 441 | 470 | 87 | 2 |
{-# LANGUAGE CPP #-}
-- ----------------------------------------------------------------------------
-- | Handle conversion of CmmData to LLVM code.
--
module LlvmCodeGen.Data (
genLlvmData, genData
) where
#include "HsVersions.h"
import GhcPrelude
import Llvm
import LlvmCodeGen.Base
import BlockId
import CLabel
import Cmm
import DynFlags
import Platform
import FastString
import Outputable
-- ----------------------------------------------------------------------------
-- * Constants
--
-- | The string appended to a variable name to create its structure type alias
structStr :: LMString
structStr = fsLit "_struct"
-- ----------------------------------------------------------------------------
-- * Top level
--
-- | Pass a CmmStatic section to an equivalent Llvm code.
genLlvmData :: (Section, CmmStatics) -> LlvmM LlvmData
genLlvmData (sec, Statics lbl xs) = do
label <- strCLabel_llvm lbl
static <- mapM genData xs
lmsec <- llvmSection sec
let types = map getStatType static
strucTy = LMStruct types
tyAlias = LMAlias ((label `appendFS` structStr), strucTy)
struct = Just $ LMStaticStruc static tyAlias
link = if (externallyVisibleCLabel lbl)
then ExternallyVisible else Internal
align = case sec of
Section CString _ -> Just 1
_ -> Nothing
const = if isSecConstant sec then Constant else Global
varDef = LMGlobalVar label tyAlias link lmsec align const
globDef = LMGlobal varDef struct
return ([globDef], [tyAlias])
-- | Format the section type part of a Cmm Section
llvmSectionType :: Platform -> SectionType -> FastString
llvmSectionType p t = case t of
Text -> fsLit ".text"
ReadOnlyData -> case platformOS p of
OSMinGW32 -> fsLit ".rdata"
_ -> fsLit ".rodata"
RelocatableReadOnlyData -> case platformOS p of
OSMinGW32 -> fsLit ".rdata$rel.ro"
_ -> fsLit ".data.rel.ro"
ReadOnlyData16 -> case platformOS p of
OSMinGW32 -> fsLit ".rdata$cst16"
_ -> fsLit ".rodata.cst16"
Data -> fsLit ".data"
UninitialisedData -> fsLit ".bss"
CString -> case platformOS p of
OSMinGW32 -> fsLit ".rdata$str"
_ -> fsLit ".rodata.str"
(OtherSection _) -> panic "llvmSectionType: unknown section type"
-- | Format a Cmm Section into a LLVM section name
llvmSection :: Section -> LlvmM LMSection
llvmSection (Section t suffix) = do
dflags <- getDynFlags
let splitSect = gopt Opt_SplitSections dflags
platform = targetPlatform dflags
if not splitSect
then return Nothing
else do
lmsuffix <- strCLabel_llvm suffix
let result sep = Just (concatFS [llvmSectionType platform t
, fsLit sep, lmsuffix])
case platformOS platform of
OSMinGW32 -> return (result "$")
_ -> return (result ".")
-- ----------------------------------------------------------------------------
-- * Generate static data
--
-- | Handle static data
genData :: CmmStatic -> LlvmM LlvmStatic
genData (CmmString str) = do
let v = map (\x -> LMStaticLit $ LMIntLit (fromIntegral x) i8) str
ve = v ++ [LMStaticLit $ LMIntLit 0 i8]
return $ LMStaticArray ve (LMArray (length ve) i8)
genData (CmmUninitialised bytes)
= return $ LMUninitType (LMArray bytes i8)
genData (CmmStaticLit lit)
= genStaticLit lit
-- | Generate Llvm code for a static literal.
--
-- Will either generate the code or leave it unresolved if it is a 'CLabel'
-- which isn't yet known.
genStaticLit :: CmmLit -> LlvmM LlvmStatic
genStaticLit (CmmInt i w)
= return $ LMStaticLit (LMIntLit i (LMInt $ widthInBits w))
genStaticLit (CmmFloat r w)
= return $ LMStaticLit (LMFloatLit (fromRational r) (widthToLlvmFloat w))
genStaticLit (CmmVec ls)
= do sls <- mapM toLlvmLit ls
return $ LMStaticLit (LMVectorLit sls)
where
toLlvmLit :: CmmLit -> LlvmM LlvmLit
toLlvmLit lit = do
slit <- genStaticLit lit
case slit of
LMStaticLit llvmLit -> return llvmLit
_ -> panic "genStaticLit"
-- Leave unresolved, will fix later
genStaticLit cmm@(CmmLabel l) = do
var <- getGlobalPtr =<< strCLabel_llvm l
dflags <- getDynFlags
let ptr = LMStaticPointer var
lmty = cmmToLlvmType $ cmmLitType dflags cmm
return $ LMPtoI ptr lmty
genStaticLit (CmmLabelOff label off) = do
dflags <- getDynFlags
var <- genStaticLit (CmmLabel label)
let offset = LMStaticLit $ LMIntLit (toInteger off) (llvmWord dflags)
return $ LMAdd var offset
genStaticLit (CmmLabelDiffOff l1 l2 off) = do
dflags <- getDynFlags
var1 <- genStaticLit (CmmLabel l1)
var2 <- genStaticLit (CmmLabel l2)
let var = LMSub var1 var2
offset = LMStaticLit $ LMIntLit (toInteger off) (llvmWord dflags)
return $ LMAdd var offset
genStaticLit (CmmBlock b) = genStaticLit $ CmmLabel $ infoTblLbl b
genStaticLit (CmmHighStackMark)
= panic "genStaticLit: CmmHighStackMark unsupported!"
|
ezyang/ghc
|
compiler/llvmGen/LlvmCodeGen/Data.hs
|
bsd-3-clause
| 5,469 | 0 | 17 | 1,598 | 1,335 | 657 | 678 | 109 | 12 |
{-# LANGUAGE LambdaCase, DeriveFunctor #-}
module LambdaPi.Bound where
import Bound
import Control.Applicative
import Control.Monad
import Control.Monad.Gen
import Control.Monad.Reader
import qualified Data.Map as M
import Data.Maybe
import Prelude.Extras
data Expr a = Var a
| App (Expr a) (Expr a)
| Annot (Expr a) (Expr a)
| ETrue
| EFalse
| Bool
| Star
| Pi (Expr a) (Scope () Expr a)
| Lam (Scope () Expr a)
| C String
deriving(Functor, Eq)
instance Eq1 Expr where (==#) = (==)
instance Applicative Expr where
pure = return
(<*>) = ap
instance Monad Expr where
return = Var
Var a >>= f = f a
(App l r) >>= f = App (l >>= f) (r >>= f)
ETrue >>= _ = ETrue
EFalse >>= _ = EFalse
Bool >>= _ = Bool
Star >>= _ = Star
C s >>= _ = C s
Annot l r >>= f = Annot (l >>= f) (r >>= f)
Pi l s >>= f = Pi (l >>= f) (s >>>= f)
Lam e >>= f = Lam (e >>>= f)
type Val = Expr -- Represents normalized expressions
nf :: Expr a -> Val a
nf = \case
(Annot e t) -> nf e
(Lam e) -> Lam (toScope . nf . fromScope $ e)
(Pi l r) -> Pi (nf l) (toScope . nf . fromScope $ r)
(App l r) ->
case l of
Lam f -> nf (instantiate1 r f)
l' -> App l' (nf r)
e -> e
data Env = Env { localVars :: M.Map Int (Val Int)
, constants :: M.Map String (Val Int) }
type TyM = ReaderT Env (GenT Int Maybe)
unbind :: (MonadGen a m, Functor m, Monad f) => Scope () f a -> m (a, f a)
unbind scope = ((,) <*> flip instantiate1 scope . return) <$> gen
unbindWith :: Monad f => a -> Scope () f a -> f a
unbindWith = instantiate1 . return
inferType :: Expr Int -> TyM (Val Int)
inferType (Var i) = asks (M.lookup i . localVars) >>= maybe mzero return
inferType (C s) = asks (M.lookup s . constants) >>= maybe mzero return
inferType ETrue = return Bool
inferType EFalse = return Bool
inferType Bool = return Star
inferType Star = return Star
inferType (Lam _) = mzero -- We can only check lambdas
inferType (Annot e ty) = do
checkType ty Star
let v = nf ty
v <$ checkType e v
inferType (App f a) = do
ty <- inferType f
case ty of
Pi aTy body -> nf (App (Lam body) a) <$ checkType a aTy
_ -> mzero
inferType (Pi t s) = do
checkType t Star
(newVar, s') <- unbind s
local (\e -> e{localVars = M.insert newVar (nf t) $ localVars e}) $
Star <$ checkType s' Star
checkType :: Expr Int -> Val Int -> TyM ()
checkType (Lam s) (Pi t ts) = do
(newVar, s') <- unbind s
local (\e -> e{localVars = M.insert newVar (nf t) $ localVars e}) $
checkType s' (nf $ unbindWith newVar ts)
checkType e t = inferType e >>= guard . (== t)
lam :: Eq a => a -> Expr a -> Expr a
lam a = Lam . abstract1 a
pit :: Eq a => a -> Expr a -> Expr a -> Expr a
pit v t = Pi t . abstract1 v
hasType :: Expr Int -> Expr Int -> Bool
hasType e = isJust
. runGenT
. flip runReaderT (Env M.empty M.empty)
. checkType e
|
jozefg/cooked-pi
|
src/LambdaPi/Bound.hs
|
mit
| 3,074 | 0 | 17 | 944 | 1,476 | 734 | 742 | 92 | 6 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Game.MtG.Types where
import Control.Applicative
import Control.Monad.IO.Class
import Data.Data
import Control.Lens
import Control.Monad.State (StateT)
import Data.DeriveTH
import Data.IntMap (IntMap)
import Data.Sequence (Seq)
import Data.Set (Set)
import Data.Text (Text)
import Data.Word (Word8)
-- |
-- = Types for card attributes
data Layout = Normal | Split | Flip | DoubleFaced | TokenLayout | Plane
| Scheme | Phenomenon
deriving (Show, Eq, Data, Typeable)
type Name = Text
type ManaCost = [ManaSymbol]
data ManaSymbol = W | U | B | R | G | S | CL Word8 | X | Y | Z
| GW | WU | RW | WB | UB | GU | UR | BR | BG | RG
| W2 | U2 | B2 | R2 | G2 | WP | UP | BP | RP | GP | P
deriving (Show, Eq, Data, Typeable)
type ResolvedManaCost = [ResolvedManaSymbol]
data ResolvedManaSymbol = W' | U' | B' | R' | G' | CL'
deriving (Show, Read, Eq, Enum, Data, Typeable)
data ManaType = ManaAnyOneColor | ManaAnyColor | ManaThatColor
| ManaAnyCombination | ManaAnyCombinationOf [ManaSymbol]
| ManaSymbols [[ManaSymbol]] -- OrList ([ManaSymbol])
deriving (Show, Eq, Data, Typeable)
type CMC = Word8
data Color = White | Blue | Black | Red | Green
deriving (Show, Eq, Data, Typeable)
type TypeLine = Text
data Supertype = Basic | Legendary | Ongoing | Snow | World
deriving (Show, Eq, Data, Typeable)
data Type = Instant | Sorcery | Artifact | Creature | Enchantment
| Land | Planeswalker | Tribal
deriving (Show, Eq, Data, Typeable)
data Subtype = ArtifactType ArtifactType
| EnchantmentType EnchantmentType
| LandType LandType
| PlaneswalkerType PlaneswalkerType
| SpellType SpellType
| CreatureType CreatureType
deriving (Show, Eq, Data, Typeable)
data ArtifactType = Contraption | Equipment | Fortification
deriving (Show, Eq, Data, Typeable)
data EnchantmentType = Aura | Curse | Shrine
deriving (Show, Eq, Data, Typeable)
data LandType = BasicLand BasicLandType
| Desert | Gate | Lair | Locus | Mine | PowerPlant
| Tower | Urzas
deriving (Show, Eq, Data, Typeable)
data BasicLandType = Forest | Island | Mountain | Plains | Swamp
deriving (Show, Eq, Data, Typeable)
data PlaneswalkerType = Ajani | Ashiok | Bolas | Chandra | Domri | Elspeth
| Garruk | Gideon | Jace | Karn | Kiora | Koth | Liliana
| Nissa | Ral | Sarkhan | Sorin | Tamiyo | Tezzeret
| Tibalt | Venser | Vraska | Xenagos
deriving (Show, Eq, Data, Typeable)
data SpellType = Arcane | Trap
deriving (Show, Eq, Data, Typeable)
data CreatureType = Advisor | Ally | Angel | Anteater | Antelope | Ape
| Archer | Archon | Artificer | Assassin
| AssemblyWorker | Atog | Aurochs | Avatar | Badger
| Barbarian | Basilisk | Bat | Bear | Beast | Beeble
| Berserker | Bird | Blinkmoth | Boar | Bringer
| Brushwagg | Camarid | Camel | Caribou | Carrier | Cat
| Centaur | Cephalid | Chimera | Citizen | Cleric
| Cockatrice | Construct | Coward | Crab | Crocodile
| Cyclops | Dauthi | Demon | Deserter | Devil | Djinn
| Dragon | Drake | Dreadnought | Drone | Druid | Dryad
| Dwarf | Efreet | Elder | Eldrazi | Elemental | Elephant
| Elf | Elk | Eye | Faerie | Ferret | Fish | Flagbearer
| Fox | Frog | Fungus | Gargoyle | Germ | Giant | Gnome
| Goat | Goblin | God | Golem | Gorgon | Graveborn
| Gremlin | Griffin | Hag | Harpy | Hellion | Hippo
| Hippogriff | Homarid | Homunculus | Horror | Horse
| Hound | Human | Hydra | Hyena | Illusion | Imp
| Incarnation | Insect | Jellyfish | Juggernaut | Kavu
| Kirin | Kithkin | Knight | Kobold | Kor | Kraken
| Lammasu | Leech | Leviathan | Lhurgoyf | Licid | Lizard
| Manticore | Masticore | Mercenary | Merfolk | Metathran
| Minion | Minotaur | Monger | Mongoose | Monk | Moonfolk
| Mutant | Myr | Mystic | Nautilus | Nephilim | Nightmare
| Nightstalker | Ninja | Noggle | Nomad | Nymph | Octopus
| Ogre | Ooze | Orb | Orc | Orgg | Ouphe | Ox | Oyster
| Pegasus | Pentavite | Pest | Phelddagrif | Phoenix
| Pincher | Pirate | Plant | Praetor | Prism | Rabbit
| Rat | Rebel | Reflection | Rhino | Rigger | Rogue
| Sable | Salamander | Samurai | Sand | Saproling | Satyr
| Scarecrow | Scorpion | Scout | Serf | Serpent | Shade
| Shaman | Shapeshifter | Sheep | Siren | Skeleton
| Slith | Sliver | Slug | Snake | Soldier | Soltari
| Spawn | Specter | Spellshaper | Sphinx | Spider | Spike
| Spirit | Splinter | Sponge | Squid | Squirrel
| Starfish | Surrakar | Survivor | Tetravite | Thalakos
| Thopter | Thrull | Treefolk | Triskelavite | Troll
| Turtle | Unicorn | Vampire | Vedalken | Viashino
| Volver | Wall | Warrior | Weird | Werewolf | Whale
| Wizard | Wolf | Wolverine | Wombat | Worm | Wraith
| Wurm | Yeti | Zombie | Zubera
deriving (Show, Eq, Data, Typeable)
data Rarity = Common | Uncommon | Rare | MythicRare | BasicLandRarity
deriving (Show, Eq, Data, Typeable)
type RulesText = Text
type Flavor = Text
type Artist = Text
type CardNumber = Text
-- FIXME: Should parse this into our data type using Parsec
type Power = Text
-- FIXME: Should parse this into our data type using Parsec
type Toughness = Text
type Loyalty = Word8
type MultiverseID = Int
type ImageName = Text
type Watermark = Text
data Border = BlackBorder | WhiteBorder | SilverBorder
deriving (Show, Eq, Data, Typeable)
-- |
-- = Types for parsed abilities
data Cost = CMana ManaCost | CTap | CUntap | CLoyalty NumChange
| CEffect Effect
deriving (Show, Eq, Data, Typeable)
data ResolvedCost = CMana' ResolvedManaCost | CTap' | CUntap'
| CLoyalty' NumChange | CEffect' Effect
deriving (Show, Eq, Data, Typeable)
data Targets = Target CountRange [TargetMatch]
| NoTarget (Maybe CountRange) [TargetMatch]
deriving (Show, Eq, Data, Typeable)
-- FIXME: Support targeting zones ("target library" Circu, Dimir Lobotomist)
-- § 114.1 "targets are object(s), player(s), and/or zone(s)"
-- § 109.1 "an object is an ability on the stack, a card, a copy of a card,
-- a token, a spell, a permanent or an emblem"
--
-- However, an emblem is not a valid target
data TargetMatch = TMPermanent PermanentMatch | TMSpell SpellMatch
| TMCard CardMatch | TMPlayer PlayerMatch
| TMThis | TMEnchantedPermanent | TMEquippedCreature
| TMSacrificed PermanentTypeMatch | TMSacrificedCard
| TMIt | TMThey | TMTheRest
deriving (Show, Eq, Data, Typeable)
data SpellMatch = SpellMatch ColorMatch [PermanentTypeMatch]
deriving (Show, Eq, Data, Typeable)
-- TODO: CardMatch should probably also match colors at least
data CardMatch = TopCardsOfLibrary NumValue Zone
| CardMatch [PermanentTypeMatch] (Maybe Quality) (Maybe Zone)
deriving (Show, Eq, Data, Typeable)
data Quality = QPower CountRange | QToughness CountRange
| QCMC CountRange
deriving (Show, Eq, Data, Typeable)
-- TODO: Ability should be Non Ability ("with" vs. "without")
data PermanentMatch = PermanentMatch (Maybe BlockedStatus)
[CombatStatus] ColorMatch
NonToken PermanentTypeMatch [Ability] (Maybe Quality)
(Maybe Name) (Maybe OwnControl)
deriving (Show, Eq, Data, Typeable)
data ColorMatch = CMColors [Non Color] | CMMonocolored | CMMulticolored
deriving (Show, Eq, Data, Typeable)
data BlockedStatus = Blocked | Unblocked
deriving (Show, Eq, Data, Typeable)
data CombatStatus = Attacking | Blocking
deriving (Show, Eq, Data, Typeable)
data NonToken = NonToken | CardOrToken
deriving (Show, Eq, Data, Typeable)
data OwnControl = Own PlayerMatch | Control PlayerMatch
deriving (Show, Eq, Data, Typeable)
data PermanentTypeMatch = PermanentTypeMatch [Non Supertype] [Non Type]
[Non Subtype]
| PTMToken | PTMPermanent
deriving (Show, Eq, Data, Typeable)
data Non a = Non Bool a
deriving (Show, Eq, Data, Typeable)
data PermanentStatus = PermanentStatus
{ _tapStatus :: TapStatus
, _flipStatus :: FlipStatus
, _faceStatus :: FaceStatus
, _phaseStatus :: PhaseStatus
} deriving (Show, Eq, Data, Typeable)
data PermanentStatusMatch =
PermanentStatusMatch (Maybe TapStatus) (Maybe FlipStatus) (Maybe FaceStatus) (Maybe PhaseStatus)
deriving (Show, Eq, Data, Typeable)
data TapStatus = Tapped | Untapped
deriving (Show, Eq, Data, Typeable)
data FlipStatus = Flipped | Unflipped
deriving (Show, Eq, Data, Typeable)
data FaceStatus = FaceUp | FaceDown
deriving (Show, Eq, Data, Typeable)
data PhaseStatus = PhasedIn | PhasedOut
deriving (Show, Eq, Data, Typeable)
data CountRange = UpTo Count | Exactly Count | AtLeast Count
| OneOf [NumValue] | AnyNumber | Other
deriving (Show, Eq, Data, Typeable)
data Count = AnyCount NumValue | OtherCount NumValue
deriving (Show, Eq, Data, Typeable)
data NumValue = NumValue Word8 | NumValueX | All | NumVariable Calculation
| ThatMuch
deriving (Show, Eq, Data, Typeable)
data NumChange = Plus NumValue | Minus NumValue
deriving (Show, Eq, Data, Typeable)
-- FIXME: Actually parse calculations properly
type Calculation = Text
-- FIXME: Should this be parsed into possible counter types?
type CounterType = Text
type DurationOrTriggerEvent = Either Duration TriggerEvent
data Duration = DurationUntil TriggerEvent | DurationForAsLongAs TriggerEvent
| DurationDuring (Maybe PlayerMatch) (Maybe Next) Step
| DurationEachTurn -- FIXME: Perhaps "each turn" shouldn't be a duration?
| DurationEachCombat -- FIXME: Perhaps "each combat" shouldn't either?
deriving (Show, Eq, Data, Typeable)
-- FIXME: Should be ZoneMatch, and have an enumeration type Zone w/o fields
data Zone = Library Targets | TopOfLibrary Targets
| BottomOfLibrary Targets | Hand Targets
| Graveyard Targets
| Battlefield | Stack | ExileZone | Command | ZoneIt
deriving (Show, Eq, Data, Typeable)
data TriggerEvent = TEAt (Maybe PlayerMatch) (Maybe Next) Step
| TEThisETB | TEThisLTB
| TEThisETBOrDies | TEThisDies
| TEObjectETB PermanentMatch
| TEObjectLTB PermanentMatch
| TEOther Text -- FIXME: Make more value constr.
deriving (Show, Eq, Data, Typeable)
data Next = Next deriving (Show, Eq, Data, Typeable)
-- TODO: Prefix all constructors with PM?
data PlayerMatch = EachPlayer | You | PMPlayer | Players | Opponent | Opponents
| Controller TargetMatch | Owner TargetMatch | HisOrHer | Their
| ThatPlayer | ThosePlayers
deriving (Show, Eq, Data, Typeable)
data Step = UntapStep | Upkeep | DrawStep | PreCombatMain
| BeginningOfCombat | DeclareAttackers | DeclareBlockers
| CombatDamage | EndOfCombat | PostCombatMain
| End | Cleanup
deriving (Show, Eq, Bounded, Enum, Data, Typeable)
data Divided = Divided deriving (Show, Eq, Data, Typeable)
data FromAmong = FromAmong deriving (Show, Eq, Data, Typeable)
data CardOrder = AnyOrder | RandomOrder
deriving (Show, Eq, Data, Typeable)
data Effect =
-- One-shot effects
Choose Targets Targets (Maybe Zone)
| Destroy Targets
| Counter Targets
| Exile Targets Targets (Maybe FaceStatus) (Maybe DurationOrTriggerEvent)
-- who, what, from, from among, to, tap status, attached to, under control,
-- order, trigger event (for delayed zone change)
| ZoneChange Targets Targets (Maybe Zone) (Maybe FromAmong) Zone
(Maybe TapStatus) (Maybe Targets) (Maybe OwnControl) (Maybe CardOrder)
(Maybe TriggerEvent)
| RevealZone Targets Zone
| RevealCards Targets Targets (Maybe Zone)
| Tap Targets
| Untap Targets
| LoseLife Targets NumValue
| GainLife Targets NumValue
| PayLife NumValue
| AddAbilities Targets [Ability] (Maybe Duration)
| ModifyPT Targets NumChange NumChange (Maybe Duration)
| DealDamage Targets NumValue (Maybe Divided) Targets
| DrawCard Targets NumValue
| Sacrifice Targets Targets
| Discard Targets Targets
| Regenerate Targets
| GainControl Targets Targets (Maybe Duration)
| RemoveCounters CountRange (Maybe CounterType) Targets
| PutCounters CountRange (Maybe CounterType) Targets
| PutTokens Targets NumValue NumValue NumValue PermanentMatch
(Maybe [Ability])
| AddMana (Maybe CountRange) ManaType
-- who, which zone, for what
| SearchZone Targets Zone Targets
| ShuffleInto Targets Targets Zone
| Shuffle Targets Zone
-- what, by, except by, duration
| CantBeBlocked Targets (Maybe Targets) (Maybe Targets) (Maybe Duration)
-- what, whom, duration
| CantBlock Targets (Maybe Targets) (Maybe Duration)
-- what, whom, duration
| CanBlockAdditional Targets Targets (Maybe Duration)
-- what, whom
| CanBlockOnly Targets Targets
-- what, by whom (exactly), duration
| MustBeBlockedIfAble Targets (Maybe Targets) (Maybe Duration)
-- what, whom, duration
| AttackIfAble Targets (Maybe Targets) (Maybe Duration)
| CantBeRegenerated Targets (Maybe Duration)
| DoesntUntap Targets Duration (Maybe Duration)
| ETBTapStatus Targets TapStatus
| ETBWithCounters Targets CountRange (Maybe CounterType)
| GetEmblem [Ability]
-- TODO: PermanentStatusMatch for "tapped"
-- TODO: CombatStatus for "attacking" "blocking"
-- Keyword actions
| Monstrosity NumValue
| Scry NumValue
-- TODO: Parse "for each" multipliers, which can
-- be at the beginning (Curse of the Swine) or end
-- of the effect (Nemesis of Mortals)
-- Other effects
| ModalEffects CountRange [Effect]
| OptionalEffect PlayerMatch Effect
| OtherEffect Text
deriving (Show, Eq, Data, Typeable)
data Keyword = Deathtouch
| Defender
| DoubleStrike
| Enchant Targets
| Equip ([Cost])
| FirstStrike
| Flash
| Flying
| Haste
| Hexproof
| Indestructible
| Intimidate
| Landwalk PermanentTypeMatch
| Lifelink
| Protection (Either Quality PlayerMatch) -- FIXME: Color, etc.
| Reach
| Shroud
| Trample
| Vigilance
| Phasing
| Bestow ([Cost])
deriving (Show, Eq, Data, Typeable)
type TriggerCondition = Text -- TODO: should this be the same as AltCostCondition?
type ActivationInst = Text
type AltCostCondition = Text
data Ability = AdditionalCost ([Cost])
| AlternativeCost ([Cost]) (Maybe [AltCostCondition])
| KeywordAbility Keyword
| ActivatedAbility ([Cost]) [Effect] (Maybe ActivationInst)
| TriggeredAbility TriggerEvent [Effect] (Maybe [TriggerCondition])
| StaticAbility [Effect]
| SpellAbility [Effect]
deriving (Show, Eq, Data, Typeable)
type SetCode = Text
data Card = Card
{ _cardLayout :: Layout
, _cardTypeLine :: TypeLine
, _cardTypes :: [Type]
, _cardColors :: [Color]
, _cardMultiverseID :: MultiverseID
, _cardName :: Name
, _cardNames :: [Name]
, _cardSupertypes :: [Supertype]
, _cardSubtypes :: [Subtype]
, _cardCmc :: Maybe CMC
, _cardRarity :: Rarity
, _cardArtist :: Artist
, _cardPower :: Maybe Power
, _cardToughness :: Maybe Toughness
, _cardLoyalty :: Maybe Loyalty
, _cardManaCost :: Maybe ManaCost
, _cardRulesText :: Maybe RulesText
, _cardAbilities :: [Ability]
, _cardCardNumber :: CardNumber
, _cardVariations :: [MultiverseID]
, _cardImageName :: ImageName
, _cardWatermark :: Maybe Watermark
, _cardCardBorder :: Maybe Border
, _cardSetCode :: SetCode
} deriving (Show, Data, Typeable)
makeLenses ''PermanentStatus
makeFields ''Card
instance Eq Card where
c1 == c2 = (c1^.multiverseID) == (c2^.multiverseID)
instance Ord Card where
c1 `compare` c2 = (c1^.multiverseID) `compare` (c2^.multiverseID)
-- |
-- = Types for card sets
type SetName = Text
-- TODO: Should be UTCTime or something?
type SetRelease = Text
data SetType = Core | Expansion | Reprint | Box | Un | FromTheVault
| PremiumDeck | DuelDeck | Starter | Commander
| Planechase | Archenemy | Promo
deriving (Show, Eq)
type SetBlock = Text
-- Type for card set as parsed from JSON
-- FIXME: Rename to RawCardSet?
data CardSet' = CardSet'
{ _setName' :: SetName
, _code' :: SetCode
, _release' :: SetRelease
, _border' :: Border
, _setType' :: SetType
, _block' :: Maybe SetBlock
, _cards' :: [Card]
} deriving (Show)
makeLenses ''CardSet'
-- Type for card set as persisted
data CardSet = CardSet
{ _setName :: SetName
, _code :: SetCode
, _release :: SetRelease
, _border :: Border
, _setType :: SetType
, _block :: Maybe SetBlock
, _cardMultiverseIDs :: [MultiverseID]
} deriving (Show, Typeable)
makeLenses ''CardSet
-- |
-- = Types for the game engine
data Characteristics = Characteristics
{ _characteristicsName :: Name
, _characteristicsManaCost :: Maybe ManaCost
, _characteristicsColors :: [Color]
, _characteristicsTypes :: [Type]
, _characteristicsSubtypes :: [Subtype]
, _characteristicsSupertypes :: [Supertype]
, _characteristicsRulesText :: Maybe RulesText
, _characteristicsAbilities :: [Ability]
, _characteristicsPower :: Maybe Power
, _characteristicsToughness :: Maybe Toughness
, _characteristicsLoyalty :: Maybe Loyalty
} deriving (Show, Data, Typeable)
makeFields ''Characteristics
-- Object ID
type OId = Int
-- Player ID
type PId = Int
data OCard = OCard
{ _ocardOwner :: PId
, _ocardCard :: Card
} deriving (Data, Typeable)
type Timestamp = Integer
data Permanent = PCard
{ _pcardCard :: Card
, _pcardChars :: Characteristics
, _pcardOwner :: PId
, _pcardController :: PId
, _pcardPermanentStatus :: PermanentStatus
, _pcardSummoningSick :: Bool
, _pcardMarkedDamage :: Int
, _pcardLoyaltyAlreadyActivated :: Bool
, _pcardTimestamp :: Timestamp
-- TODO: Add more fields: activatedAbilityAlreadyActivated
}
| PToken
{ _ptokenCopyOfCard :: Maybe Card
, _ptokenChars :: Characteristics
, _ptokenOwner :: PId
, _ptokenController :: PId
, _ptokenPermanentStatus :: PermanentStatus
, _ptokenSummoningSick :: Bool
, _ptokenMarkedDamage :: Int
, _ptokenLoyaltyAlreadyActivated :: Bool
, _ptokenTimestamp :: Timestamp
-- TODO: Add more fields
}
deriving (Data, Typeable)
data Spell = Spell
{ _spellCard :: Card
, _spellChars :: Characteristics
, _spellOwner :: PId
, _spellController :: PId
-- TODO: Add more fields, i.e. modes, targets, value of X,
-- additional or alternative costs
} deriving (Data, Typeable)
data StackAbility = StackAbility
{ _stackabilityEffects :: [Effect]
-- Should this be Maybe [Cost], or just empty list for triggered
, _stackabilityActivationCost :: Maybe [Cost]
, _stackabilityTriggerCondition :: Maybe [TriggerCondition]
, _stackabilitySource :: OId
, _stackabilityOwner :: PId
, _stackabilityController :: PId
-- TODO: Add more fields, i.e. modes, targets, value of X
-- TODO: Copy source object into this as a way to keep
-- "last known information"
} deriving (Show, Data, Typeable)
data Emblem = Emblem
{ _emblemAbilities :: [Ability]
, _emblemOwner :: PId
, _emblemController :: PId
} deriving (Show, Data, Typeable)
-- TODO: Implement Copy (perhaps only of spells, since permanents could be
-- done within the Permanent type?) Or is there no reason to have a separate
-- type for copies, even of spells? Having no separate type would be useful
-- for TargetMatch.
--data Copy = Copy
makeFields ''OCard
makeFields ''Permanent
makeFields ''Spell
makeFields ''StackAbility
makeFields ''Emblem
-- makeFields ''Copy
instance Show OCard where
show oc = "OCard " ++ show (oc^.owner) ++ " - " ++ show (oc^.card.name)
instance Show Permanent where
show pe = "Permanent " ++ show (pe^.owner) ++ " - " ++ show (pe^.chars.name)
instance Show Spell where
show sp = "Spell " ++ show (sp^.owner) ++ " - " ++ show (sp^.chars.name)
data StackObject = OSpell Spell
| OStackAbility StackAbility
-- | OCopy FIXME
deriving (Show, Data, Typeable)
makePrisms ''StackObject
type LifeTotal = Int
type PoisonTotal = Word8
type HandSize = Int
data ManaPool = ManaPool
{ _whiteMana :: Int
, _blueMana :: Int
, _blackMana :: Int
, _redMana :: Int
, _greenMana :: Int
, _colorlessMana :: Int
} deriving (Show, Data, Typeable)
instance Each ManaPool ManaPool Int Int where
each f (ManaPool w u b r g c) =
ManaPool <$> f w <*> f u <*> f b <*> f r <*> f g <*> f c
makeLenses ''ManaPool
-- FIXME: This should be a product type with name, etc.
type PlayerInfo = Text
type AId = (OId, Int) -- activated ability id
data PriorityAction = PassPriority
| CastSpell OId
| ActivateAbility AId
| ActivateManaAbility AId
| ActivateLoyaltyAbility AId
| PlayLand OId
deriving (Show, Eq, Ord, Typeable)
data PlayerChoice = ChooseMulligan
| ChoosePriorityAction
| ChooseManaFromPool
| ChooseModes
| ChooseAlternativeCost
| ChooseAdditionalCosts
| ChooseVariableCost
| ChooseManaCost
| ChooseTarget
| ChooseDivision
| ChooseManaAbilityActivation
| ChooseAttackers
| ChooseBlockers
| ChooseBlockerOrder
deriving (Show, Eq, Ord, Typeable)
data SPlayerChoice (c :: PlayerChoice) where
SChooseMulligan :: SPlayerChoice 'ChooseMulligan
SChoosePriorityAction :: SPlayerChoice 'ChoosePriorityAction
SChooseManaFromPool :: SPlayerChoice 'ChooseManaFromPool
SChooseModes :: SPlayerChoice 'ChooseModes
SChooseManaAbilityActivation :: SPlayerChoice 'ChooseManaAbilityActivation
instance Show (SPlayerChoice c) where
show SChooseMulligan = "SChooseMulligan"
show SChoosePriorityAction = "SChoosePriorityAction"
show SChooseManaFromPool = "SChooseManaFromPool"
show SChooseModes = "SChooseModes"
show SChooseManaAbilityActivation = "SChooseManaAbilityActivation"
type family PlayerChoiceRequest (c :: PlayerChoice) :: * where
PlayerChoiceRequest 'ChooseMulligan = IntMap OCard
PlayerChoiceRequest 'ChoosePriorityAction = Set PriorityAction
PlayerChoiceRequest 'ChooseManaFromPool = ManaPool
PlayerChoiceRequest 'ChooseModes = (CountRange, [Effect])
PlayerChoiceRequest 'ChooseManaAbilityActivation = Set PriorityAction
type family PlayerChoiceResponse (c :: PlayerChoice) :: * where
PlayerChoiceResponse 'ChooseMulligan = Bool
PlayerChoiceResponse 'ChoosePriorityAction = PriorityAction
PlayerChoiceResponse 'ChooseManaFromPool = ResolvedManaSymbol
PlayerChoiceResponse 'ChooseModes = [Effect]
PlayerChoiceResponse 'ChooseManaAbilityActivation = Maybe PriorityAction
data PlayerChoiceLog where
PlayerChoiceLog :: PId -> SPlayerChoice c -> PlayerChoiceResponse c ->
PlayerChoiceLog
instance Show PlayerChoiceLog where
show (PlayerChoiceLog p pc@SChooseMulligan a) = showPlayerChoiceLog p pc a
show (PlayerChoiceLog p pc@SChoosePriorityAction a) = showPlayerChoiceLog p pc a
show (PlayerChoiceLog p pc@SChooseManaFromPool a) = showPlayerChoiceLog p pc a
show (PlayerChoiceLog p pc@SChooseModes a) = showPlayerChoiceLog p pc a
show (PlayerChoiceLog p pc@SChooseManaAbilityActivation a) = showPlayerChoiceLog p pc a
showPlayerChoiceLog :: Show a => PId -> SPlayerChoice c -> a -> String
showPlayerChoiceLog p pc a = unwords [ "PlayerChoiceLog"
, show p
, show pc
, show a
]
-- data PlayerChoiceLog = forall c. PlayerChoiceLog PId (SPlayerChoice c) (PlayerChoiceResponse c)
-- player info known to a particular player
data KPlayer = KPlayerYou
{ _kplayeryouLibrarySize :: Int
, _kplayeryouHand :: IntMap OCard
, _kplayeryouGraveyard :: Seq (OId, OCard)
, _kplayeryouLife :: LifeTotal
, _kplayeryouPoison :: PoisonTotal
, _kplayeryouMaxHandSize :: HandSize
, _kplayeryouManaPool :: ManaPool
, _kplayeryouPlayerInfo :: PlayerInfo
}
| KPlayerOpponent
{ _kplayeropponentLibrarySize :: Int
, _kplayeropponentHandSize :: Int
, _kplayeropponentGraveyard :: Seq (OId, OCard)
, _kplayeropponentLife :: LifeTotal
, _kplayeropponentPoison :: PoisonTotal
, _kplayeropponentMaxHandSize :: HandSize
, _kplayeropponentManaPool :: ManaPool
, _kplayeropponentPlayerInfo :: PlayerInfo
}
deriving (Show, Typeable)
type TurnNumber = Int
type LandCount = Word8
data Relationships = Relationships
{ _attachedTo :: IntMap OId
, _exiledWith :: IntMap (Set OId)
-- TODO: Define more relationships, i.e. soulbond, haunt
} deriving (Show, Data, Typeable)
makeLenses ''Relationships
data KGame = KGame
{ _kgameChoiceLog :: Seq PlayerChoiceLog
, _kgameYou :: PId
, _kgamePlayers :: [KPlayer]
, _kgameBattlefield :: IntMap Permanent
, _kgameStack :: Seq (OId, StackObject)
, _kgameExile :: IntMap OCard
, _kgameCommandZone :: IntMap OCard
, _kgameTurnOrder :: Seq PId
, _kgameActivePlayer :: PId
, _kgamePriority :: Maybe PId
, _kgameTurn :: TurnNumber
, _kgameRemainingLandCount :: LandCount
, _kgameStep :: Step
, _kgameRelationships :: Relationships
} deriving (Show, Typeable)
data Player = Player
{ choiceFn :: (MonadIO m => SPlayerChoice c -> KGame ->
PlayerChoiceRequest c -> m (PlayerChoiceResponse c))
, _playerLibrary :: Seq (OId, OCard)
, _playerHand :: IntMap OCard
, _playerGraveyard :: Seq (OId, OCard)
, _playerLife :: LifeTotal
, _playerPoison :: PoisonTotal
, _playerMaxHandSize :: HandSize
, _playerManaPool :: ManaPool
, _playerPlayerInfo :: PlayerInfo
}
makeFields ''Player
makeFields ''KPlayer
data Game = Game
{ _gamePlayers :: [Player] -- FIXME: Should this be Seq?
, _gameBattlefield :: IntMap Permanent
, _gameStack :: Seq (OId, StackObject)
, _gameExile :: IntMap OCard
, _gameCommandZone :: IntMap OCard
, _gameTurnOrder :: Seq PId
, _gameActivePlayer :: PId
, _gamePriority :: Maybe PId
, _gameSuccessivePasses :: Set PId
, _gameMaxTimestamp :: Timestamp
, _gameTurn :: TurnNumber
, _gameRemainingLandCount :: LandCount
, _gameStep :: Step
, _gameRelationships :: Relationships
, _gameMaxOId :: OId
, _gameChoiceLog :: Seq PlayerChoiceLog
}
makeFields ''Game
makeFields ''KGame
type App = StateT Game IO
$( derive makeIs ''ResolvedManaSymbol)
$( derive makeIs ''Cost)
$( derive makeIs ''Targets)
$( derive makeIs ''Ability)
$( derive makeIs ''Effect)
|
ltoth/mtg
|
Game/MtG/Types.hs
|
mit
| 31,000 | 0 | 16 | 9,945 | 6,856 | 3,999 | 2,857 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-
Copyright (C) 2012 Kacper Bak <http://gsd.uwaterloo.ca>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-}
module Language.Clafer.Intermediate.Resolver where
import Control.Monad
import Control.Monad.State
import qualified Data.Map as Map
import Language.Clafer.Common
import Language.Clafer.ClaferArgs
import Language.Clafer.Intermediate.Intclafer
import Language.Clafer.Intermediate.ResolverName
import Language.Clafer.Intermediate.ResolverType
import Language.Clafer.Intermediate.ResolverInheritance
-- | Run the various resolvers
resolveModule :: ClaferArgs -> IModule -> Resolve (IModule, GEnv)
resolveModule args' declarations =
do
r <- resolveNModule $ nameModule (skip_resolver args') declarations
resolveNamesModule args' =<< (rom' $ rem' r)
where
rem' = if flatten_inheritance args' then resolveEModule else id
rom' = if skip_resolver args' then return . id else resolveOModule
-- | Name resolver
nameModule :: Bool -> IModule -> (IModule, GEnv)
nameModule skipResolver imodule = (imodule{_mDecls = decls'}, genv')
where
(decls', genv') = runState (mapM (nameElement skipResolver) $ _mDecls imodule) $ GEnv Map.empty 0 Map.empty []
nameElement :: MonadState GEnv m => Bool -> IElement -> m IElement
nameElement skipResolver x = case x of
IEClafer claf -> IEClafer `liftM` (nameClafer skipResolver claf)
IEConstraint isHard' pexp -> IEConstraint isHard' `liftM` (namePExp pexp)
IEGoal isMaximize' pexp -> IEGoal isMaximize' `liftM` (namePExp pexp)
nameClafer :: MonadState GEnv m => Bool -> IClafer -> m IClafer
nameClafer skipResolver claf = do
claf' <- if skipResolver then return claf{_uid = _ident claf} else (renameClafer (not skipResolver)) claf
elements' <- mapM (nameElement skipResolver) $ _elements claf
return $ claf' {_elements = elements'}
namePExp :: MonadState GEnv m => PExp -> m PExp
namePExp pexp@(PExp _ _ _ exp') = do
n <- gets expCount
modify (\e -> e {expCount = 1 + n})
exp'' <- nameIExp exp'
return $ pexp {_pid = concat [ "e", show n, "_"], Language.Clafer.Intermediate.Intclafer._exp = exp''}
nameIExp :: MonadState GEnv m => IExp -> m IExp
nameIExp x = case x of
IDeclPExp quant' decls' pexp -> do
decls'' <- mapM nameIDecl decls'
pexp' <- namePExp pexp
return $ IDeclPExp quant' decls'' pexp'
IFunExp op' pexps -> IFunExp op' `liftM` (mapM namePExp pexps)
_ -> return x
nameIDecl :: MonadState GEnv m => IDecl -> m IDecl
nameIDecl (IDecl isDisj' dels body') = IDecl isDisj' dels `liftM` (namePExp body')
-- -----------------------------------------------------------------------------
resolveNamesModule :: ClaferArgs -> (IModule, GEnv) -> Resolve (IModule, GEnv)
resolveNamesModule args' (declarations, genv') =
do
res <- foldM (flip ($)) declarations $ map (\f -> flip (curry f) genv') funs
return (res, genv')
where
funs :: [(IModule, GEnv) -> Resolve IModule]
funs
| skip_resolver args' = [return . analyzeModule, resolveTModule]
| otherwise = [ return . analyzeModule, resolveModuleNames, resolveTModule]
|
juodaspaulius/clafer-old-customBNFC
|
src/Language/Clafer/Intermediate/Resolver.hs
|
mit
| 4,072 | 0 | 14 | 681 | 992 | 518 | 474 | 56 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
module Hickory.Utils.Bvh where
import Control.Monad (void)
import Text.Megaparsec
import Text.Megaparsec.Char
import qualified Text.Megaparsec.Char.Lexer as L
import Data.Maybe
import Hickory.Utils.Parsing
import Data.Text (Text, pack)
sc :: Parser ()
sc = L.space (void spaceChar) empty empty
lexeme :: Parser a -> Parser a
lexeme = L.lexeme sc
symbol :: Text -> Parser Text
symbol = L.symbol sc
reserved :: Text -> Parser ()
reserved w = string w *> notFollowedBy alphaNumChar *> sc
identifier :: Parser Text
identifier = lexeme $ pack <$> ((:) <$> letterChar <*> many (alphaNumChar <|> char '.'))
bracket :: Parser a -> Parser a
bracket = between (symbol "{") (symbol "}")
number :: Parser Double
number = lexeme (signed anyNumber)
integer :: Parser Integer
integer = lexeme L.decimal
parseBVH :: Parser BVH
parseBVH = do
tree <- reserved "HIERARCHY" *> reserved "ROOT" *> parseJointBody
motion <- parseMotion
eof
return $ BVH tree motion
signed :: Num b => Parser b -> Parser b
signed p = do
n <- optional (symbol "-")
num <- p
return $ if isJust n then negate num else num
parseFrame :: Parser [Double]
parseFrame = do
let f = (do
n <- signed anyNumber
optional (satisfy (== ' '))
return n)
fs <- some f
eol
return fs
parseOffset :: Parser (Double,Double,Double)
parseOffset = reserved "OFFSET" *>
((,,) <$> number <*> number <*> number)
parseEndSite :: Parser Joint
parseEndSite = reserved "End Site" *>
(JointEnd <$> bracket parseOffset)
parseChannels :: Parser [Text]
parseChannels = do
reserved "CHANNELS"
n <- fromIntegral <$> integer
count n identifier
parseJointBody :: Parser Joint
parseJointBody = do
jname <- identifier
(off, chans, jnts) <- bracket $ do
off <- parseOffset
chans <- parseChannels
joints <- many ((reserved "JOINT" *> parseJointBody) <|> parseEndSite)
return (off, chans, joints)
return $ Joint chans jname off jnts
parseMotion :: Parser Motion
parseMotion = do
reserved "MOTION"
reserved "Frames:"
nFrames <- integer
reserved "Frame Time:"
fTime <- lexeme floating
allFrames <- some (parseFrame)
return $ Motion nFrames fTime allFrames
data BVH = BVH Joint Motion
deriving (Show)
data Motion = Motion {
numFrames :: Integer,
frameTime :: Double,
frames :: [[Double]]
}
deriving (Show)
type Offset = (Double, Double, Double)
data Joint = Joint {
channels :: [Text],
name :: Text,
offset :: Offset,
children :: [Joint]
}
| JointEnd Offset
deriving (Show)
loadBVH :: String -> IO BVH
loadBVH filePath = do
res <- parseFromFile parseBVH filePath
case res of
Left err -> error (show err)
Right anim -> return anim
|
asivitz/Hickory
|
Hickory/Utils/Bvh.hs
|
mit
| 3,160 | 0 | 17 | 950 | 1,010 | 513 | 497 | 97 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE Strict #-}
-----------------------------------------------------------------------------
-- |
-- Module : SmallGL.Shader
-- Copyright : (c) Artem Chirkin
-- License : MIT
--
-- Maintainer : Artem Chirkin <[email protected]>
-- Stability : experimental
-- Portability :
--
--
-----------------------------------------------------------------------------
module SmallGL.Shader
( ShaderProgram (), programId
, initShaders, attrLoc, unifLoc
) where
import GHCJS.Types (JSVal)
import Data.JSString (JSString, unpack')
import Control.Monad
import JavaScript.WebGL
import SmallGL.Helpers
import GHCJS.Marshal.Pure (pFromJSVal)
import Data.Maybe
import Data.Coerce
-- | Shader program definision
data ShaderProgram = ShaderProgram
{ programId :: WebGLProgram -- ^ if of a program to supply to glUseProgram
, attributesOf :: JSMap AttribProps -- ^ name of the attribute - location,type,count
, uniformsOf :: JSMap UniformProps -- ^ name of the uniform - location,type,count
}
newtype AttribProps = AttribProps JSVal
newtype UniformProps = UniformProps JSVal
foreign import javascript unsafe "$1.location" js_attrLoc :: AttribProps -> GLuint
--foreign import javascript unsafe "$1.type" js_attrType :: AttribProps -> GLenum
--foreign import javascript unsafe "$1.count" js_attrCount :: AttribProps -> GLint
foreign import javascript unsafe "$r = {}; $r.location = $1; $r.type = $2; $r.count = $3;"
js_attrProps :: GLuint -> GLenum -> GLint -> AttribProps
foreign import javascript unsafe "$1.location" js_unifLoc :: UniformProps -> WebGLUniformLocation
--foreign import javascript unsafe "$1.type" js_unifType :: UniformProps -> GLenum
--foreign import javascript unsafe "$1.count" js_unifCount :: UniformProps -> GLint
foreign import javascript unsafe "$r = {}; $r.location = $1; $r.type = $2; $r.count = $3;"
js_unifProps :: WebGLUniformLocation -> GLenum -> GLint -> UniformProps
foreign import javascript unsafe "$1 != null && $1.hasOwnProperty('location')" js_hasLoc :: JSVal -> Bool
newtype JSMap a = JSMap JSVal
jsMapFromList :: Coercible a JSVal => [(JSString, a)] -> IO (JSMap a)
jsMapFromList xs = do
mm <- js_emptyMap
foldM f mm xs
where
f m (name, val) = jssetMapVal name val m
jsIndexMap :: Coercible JSVal a => JSMap a -> JSString -> a
jsIndexMap = coerce . js_indexMap
foreign import javascript unsafe "$r = {};" js_emptyMap :: IO (JSMap a)
foreign import javascript unsafe "$r = $3; $r[$1] = $2;"
js_setMapVal :: JSString -> JSVal -> JSMap a -> IO (JSMap a)
jssetMapVal :: Coercible a JSVal => JSString -> a -> JSMap a -> IO (JSMap a)
jssetMapVal s v = js_setMapVal s (coerce v)
foreign import javascript unsafe "$r = $1[$2];"
js_indexMap :: JSMap a -> JSString -> JSVal
-- | Synonym for a type of shader gl enum
type ShaderType = GLenum
-- | return attribute location by name
attrLoc :: ShaderProgram -> JSString -> GLuint
attrLoc program name = let AttribProps p = attributesOf program `jsIndexMap` name
in if js_hasLoc p
then js_attrLoc $ AttribProps p
else error $ "Could not get attrib location: " ++ show name
-- | return uniform location by name
unifLoc :: ShaderProgram -> JSString -> WebGLUniformLocation
unifLoc program name = let UniformProps p = uniformsOf program `jsIndexMap` name
in if js_hasLoc p
then js_unifLoc $ UniformProps p
else error $ "Could not get uniform location: " ++ show name
-- | Initialize shader program by supplying a number of source codes.
-- One source code per shader.
initShaders :: WebGLRenderingContext -> [(ShaderType, JSString)] -> [(GLuint, JSString)] -> IO ShaderProgram
initShaders gl shtexts explicitLocs = do
-- create program
shaderProgram <- createProgram gl
-- attach all shaders
forM_ shtexts $ \(typ,text) -> do
shader <- getShader gl typ text
checkGLError gl $ "getShader " ++ show typ
attachShader gl shaderProgram shader
checkGLError gl $ "AttachShader gl " ++ show typ
-- bind attribute locations
forM_ explicitLocs $ \(loc,text) -> do
bindAttribLocation gl shaderProgram loc text
checkGLError gl $ "bindAttribLocation gl " ++ show (loc, text)
-- check program status
linkProgram gl shaderProgram
checkGLError gl "link shader program"
serror <- fromMaybe True . pFromJSVal <$> getProgramParameter gl shaderProgram gl_LINK_STATUS
unless serror $ do
putStrLn "Shader Program linking error "
logm <- getProgramInfoLog gl shaderProgram
checkGLError gl "GetShaderInfoLog gl "
putStrLn . unpack' $ logm
-- load attributes' information
attrCount <- fromMaybe (0::GLuint) . pFromJSVal <$>getProgramParameter gl shaderProgram gl_ACTIVE_ATTRIBUTES
-- putStrLn $ "Shader attributes: " ++ show attrCount
shaderAttribs <- (>>= jsMapFromList) $ sequence . flip map [0..attrCount-1] $ \i -> do
activeInfo <- getActiveAttrib gl shaderProgram i
checkGLError gl $ "GetActiveAttrib gl for getting shader attrib " ++ show i
aPos <- getAttribLocation gl shaderProgram (aiName activeInfo)
return (aiName activeInfo, js_attrProps (fromIntegral aPos) (aiType activeInfo) (aiSize activeInfo))
-- load uniforms' information
uniCount <- fromMaybe (0::GLuint) . pFromJSVal <$> getProgramParameter gl shaderProgram gl_ACTIVE_UNIFORMS
-- putStrLn $ "Shader uniforms: " ++ show attrCount
shaderUniforms <- (>>= jsMapFromList) $ sequence . flip map [0..uniCount-1] $ \i -> do
activeInfo <- getActiveUniform gl shaderProgram i
checkGLError gl $ "GetActiveUniform gl for getting shader uniform " ++ show i
uPos <- getUniformLocation gl shaderProgram (aiName activeInfo)
return (aiName activeInfo, js_unifProps uPos (aiType activeInfo) (aiSize activeInfo))
return ShaderProgram {
programId = shaderProgram,
attributesOf = shaderAttribs,
uniformsOf = shaderUniforms
}
-- | Helper function to load shader
getShader :: WebGLRenderingContext -> ShaderType -> JSString-> IO WebGLShader
getShader gl t src = do
shaderId <- createShader gl t
checkGLError gl ("CreateShader gl type " ++ show t)
shaderSource gl shaderId src
checkGLError gl ("ShaderSource gl type " ++ show t)
compileShader gl shaderId
checkGLError gl ("CompileShader gl type" ++ show t)
serror <- fromMaybe True . pFromJSVal <$> getShaderParameter gl shaderId gl_COMPILE_STATUS
unless serror $ do
putStrLn $ "Error in shader of type " ++ show t
logm <- getShaderInfoLog gl shaderId
checkGLError gl "GetShaderInfoLog gl"
putStrLn . unpack' $ logm
putStrLn $ unpack' src
return shaderId
|
achirkin/qua-view
|
src/SmallGL/Shader.hs
|
mit
| 6,945 | 30 | 16 | 1,535 | 1,564 | 782 | 782 | 106 | 2 |
module SymVector where
import Data.IntMap.Strict as Map
import Control.Monad
import Data.Aeson
--this is the qc file
data Vec a = V (IntMap a)
{-@
data Vec a <dom :: Int -> Prop, rng :: Int -> a -> Prop>
= V {a :: i:Int<dom> -> a <rng i>}
@-}
instance (Show a) => Show (Vec a) where
show (V m) = show m
instance (FromJSON a) => FromJSON (Vec a) where
parseJSON o@(Object _) = V <$> parseJSON o
parseJSON _ = mzero
instance (ToJSON a) => ToJSON (Vec a) where
toJSON (V m) = toJSON m
{-@ emptyVec :: forall <p :: Int -> a -> Prop>. Vec <{\v -> 0 = 1}, p> a @-}
emptyVec :: Vec a
emptyVec = V (Map.empty)
{-@ mkVec :: x:a -> Vec <{\v -> 0=0}, {\i v-> v=x}> a @-}
mkVec :: a -> Vec a
mkVec x = undefined
{-@ getVec :: forall a <r :: x0: Int -> x1: a -> Prop, d :: x0: Int -> Prop>.
i: Int<d> ->
a: Vec<d, r> a ->
a<r i> @-}
getVec :: Int -> Vec a -> a
getVec i (V m) = let v = Map.lookup i m
f Nothing = error "Empty array!"
f (Just a) = a
in f v
{-@ setVec :: forall a <r :: x0: Int -> x1: a -> Prop, d :: x0: Int -> Prop>.
i: Int<d> ->
x: a<r i> ->
a: Vec <{v:Int<d> | v /= i }, r> a ->
Vec <d, r> a @-}
setVec :: Int -> a -> Vec a -> Vec a
setVec i v (V m) = let m' = Map.insert i v m
in V m'
data Vec2D a = V2D (Int -> Int -> a)
{-@
data Vec2D a <dom :: Int -> Int -> Prop, rng :: Int -> Int -> a -> Prop> = V2D (x:Int -> y:Int -> a<rng x y>)
@-}
{-@ emptyVec2D :: forall <p :: Int -> Int -> a -> Prop>. Vec2D <{\x y -> 0 = 1},p> a @-}
emptyVec2D :: Vec2D a
emptyVec2D = V2D $ \_ -> error "Empty Vec2D"
{-@ getVec2D :: forall a <r :: Int -> Int -> a -> Prop, d :: Int ->Int -> Prop>.
x:Int -> y:Int<d x> -> Vec2D <d,r> a -> a<r x y> @-}
getVec2D :: Int -> Int -> Vec2D a -> a
getVec2D x y (V2D f) = f x y
{-@ setVec2D :: forall a <r :: Int -> Int -> a -> Prop, d :: Int ->Int -> Prop>.
x:Int -> y:Int<d x> -> a:a<r x y> -> Vec2D <\i -> {j:Int<d i> | x = i => y /= j }, r> a -> Vec2D <d,r> a
@-}
setVec2D :: Int -> Int -> a -> Vec2D a -> Vec2D a
setVec2D x y v (V2D f) = V2D $ \i j -> if i == x && j == y then v else f i j
|
abakst/symmetry
|
checker/include/SymVectorQC.hs
|
mit
| 2,254 | 0 | 11 | 731 | 538 | 278 | 260 | 31 | 2 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
import Control.Applicative ((<|>))
import Control.Monad (when)
import Control.Monad.State (get, put, gets)
import Control.Monad.State (execStateT)
import Control.Lens ((%=), use)
import Data.List (intersperse)
import Data.Char (ord)
import Data.Monoid ((<>))
import qualified Data.Text as T
import qualified Data.List.PointedList as PL
import Numeric (showHex)
import System.Console.Docopt
import System.Environment (getArgs)
import Yi
import qualified Yi.Rope as R
import Yi.Command (shellCommandE, searchSources)
import Yi.Config.Simple.Types (ConfigM (..))
import qualified Yi.Keymap.Emacs as E
import Yi.Keymap.Emacs.Utils (findFileNewTab)
import Yi.Mode.Haskell
import Yi.Hoogle (hoogleRaw)
import Yi.TextCompletion (wordComplete)
import Yi.Utils (io)
import Yi.Layout (findDivider)
import Yi.Tab (tabLayout, tabDividerPositionA, tabFocus)
import Yi.Window (wkey)
import LayoutFix
help :: Docopt
help = [docopt|
Usage:
yi [<file> ...]
yi (-h|--help)
Options:
-h, --help Show usage
|]
main :: IO ()
main = do
args <- parseArgsOrExit help =<< getArgs
when (args `isPresent` (longOption "help") || args `isPresent` (shortOption 'h')) $
exitWithUsage help
let files = getAllArgs args (argument "file")
actions = intersperse (EditorA newTabE) (map (YiA . openNewFile) files)
cfg <- execStateT (runConfigM publish) (myConfig actions)
startEditor cfg Nothing
publish :: ConfigM ()
publish = do
publishAction "shellCommandE" shellCommandE
publishAction "cd" cd
publishAction "pwd" pwd
publishAction "searchSources" searchSources
publishAction "nextWinE" nextWinE
publishAction "acceptedInputsOtherWindow" acceptedInputsOtherWindow
publishAction "wordComplete" wordComplete
publishAction "ghciSend" ghciSend
publishAction "ghciLoadBuffer" ghciLoadBuffer
publishAction "ghciInferType" ghciInferType
publishAction "ghciSetProcessName" ghciSetProcessName
publishAction "ghciSetProcessArgs" ghciSetProcessArgs
publishAction "hoogle" hoogle
publishAction "hoogleSearch" hoogleSearch
myConfig :: [Action] -> Config
myConfig actions = defaultEmacsConfig
{ modeTable = fmap (configureIndent . configureModeline) (modeTable defaultEmacsConfig)
, defaultKm = myKeymapSet
, configCheckExternalChangesObsessively = False
, startActions =
(EditorA (do
e <- get
put e { maxStatusHeight = 30 }))
: actions
, configRegionStyle = Exclusive
, configUI = let cui = configUI defaultEmacsConfig
in cui { configTheme = updateTheme $ configTheme cui }
}
updateTheme :: Theme -> Theme
updateTheme t = t `override` \sup _ ->
sup { modelineAttributes = (modelineAttributes sup) { foreground = lightGrey }}
myKeymapSet :: KeymapSet
myKeymapSet = E.mkKeymap $ E.defKeymap `override` \sup _ ->
sup { E._eKeymap = overKeymap <|| E._eKeymap sup <|> myKeymap }
overKeymap :: Keymap
overKeymap = choice [ spec KEnter ?>>! doEnter
, spec KTab ?>>! doTab IncreaseCycle
, shift (spec KTab) ?>>! doTab DecreaseCycle
]
where doTab :: IndentBehaviour -> EditorM ()
doTab b = withCurrentBuffer $ do
r <- getSelectRegionB
if regionIsEmpty r then adjIndent b
else let d = if b == IncreaseCycle then 1 else -1
in shiftIndentOfRegionB d r
doEnter = newlineB >> adjIndent IncreaseCycle
myKeymap :: Keymap
myKeymap = choice [ ctrl (spec KPageDown) ?>>! previousTabE
, ctrl (spec KPageUp) ?>>! nextTabE
, meta (spec KDown) ?>>! nextWinE
, metaCh 's' ?>>! searchSources
, metaCh '`' ?>>! shellCommandE
, ctrl (metaCh 'a') ?>>! wordComplete
, ctrlCh 'x' ?>> ctrlX
]
where ctrlX = choice
[ char 'f' ?>>! findFileNewTab
, ctrlCh 'd' ?>>! deleteTabE
, ctrlCh 'k' ?>>! closeBufferAndWindowE
, ctrlCh 'p' ?>>! hoogle
, char 'l' ?>>! layoutManagersPrintMsgE
, ctrlCh 'l' ?>>! layoutManagersNextE
, char '.' ?>>! layoutManagerNextVariantE
, char ',' ?>>! layoutManagerPreviousVariantE
, char '-' ?>>! moveDivider False
, char '=' ?>>! moveDivider True
]
moveDivider :: Bool -> EditorM ()
moveDivider dir = do
tab <- use $ tabsA . PL.focus
let l = tabLayout tab
mbr = findDivider (Just . wkey $ tabFocus tab) l
clamp = min 0.9 . max 0.1
dt = if dir then 0.2 else (-0.2)
maybe (return ())
(\ref -> tabsA . PL.focus . tabDividerPositionA ref %= clamp . (+ dt))
mbr
hoogle :: YiM ()
hoogle = do
word <- withCurrentBuffer $ do
wordRegion <- regionOfB unitWord
readRegionB wordRegion
hoogleSearch word
hoogleSearch :: R.YiString -> YiM ()
hoogleSearch src = do
results <- io $ hoogleRaw src R.empty
let r = T.break (== ' ') . R.toText <$> results
let mx = maximum $ T.length . fst <$> r
let format (p, s) = (if p == "Did" then p
else T.justifyLeft mx ' ' p)
`T.append` s
printMsgs $ map format r
configureIndent :: AnyMode -> AnyMode
configureIndent = onMode $ \m ->
m { modeIndentSettings = IndentSettings { expandTabs = True
, shiftWidth = 2
, tabSize = 2
}
}
configureModeline :: AnyMode -> AnyMode
configureModeline = onMode $ \m -> m { modeModeLine = myModeLine }
where
myModeLine prefix = do
col <- curCol
pos <- pointB
ln <- curLn
p <- pointB
s <- sizeB
curChar <- readB
ro <-use readOnlyA
modeNm <- gets (withMode0 modeName)
unchanged <- gets isUnchangedBuffer
enc <- use encodingConverterNameA >>= return . \case
Nothing -> mempty
Just cn -> T.pack $ case R.unCn cn of
"UTF-8" -> "U"
other -> other
let pct | pos == 0 || s == 0 = " Top"
| pos == s = " Bot"
| otherwise = getPercent p s
changed = if unchanged then "-" else "*"
readOnly' = if ro then "%" else changed
hexxed = T.pack $ showHex (ord curChar) ""
hexChar = "0x" <> T.justifyRight 2 '0' hexxed
toT = T.pack . show
nm <- gets $ shortIdentString (length prefix)
return $ T.concat [ enc, readOnly', changed, " ", nm, " "
, pct, " ", hexChar, " ", T.justifyLeft 9 ' ' $
"(" <> toT ln <> "," <> toT col <> ")"
, " ", modeNm
]
|
mmn80/yi-static
|
src/Main.hs
|
mit
| 7,866 | 0 | 18 | 3,021 | 2,021 | 1,052 | 969 | -1 | -1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE QuasiQuotes #-}
module Main (main) where
import Blaze.ByteString.Builder.Char.Utf8 (fromLazyText)
import CMarkGFM (extAutolink, extTable, extStrikethrough, optSmart, commonmarkToHtml)
import qualified Data.ByteString.Char8 as S8
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import qualified Data.Text.Lazy as TL
import Network.HTTP.Types (status200)
import Network.Wai (Middleware, Response,
pathInfo, responseBuilder)
import Text.Blaze.Html (preEscapedToHtml)
import Text.Blaze.Html.Renderer.Utf8 (renderHtmlBuilder)
import Text.Hamlet (defaultHamletSettings, shamlet)
import Text.Hamlet.RT (parseHamletRT,
renderHamletRT)
import Text.Lucius (luciusRT)
import WaiAppStatic.CmdLine (docroot, runCommandLine)
main :: IO ()
main = runCommandLine (shake . docroot)
shake :: FilePath -> Middleware
shake docroot app req respond
| any unsafe p = app req respond
| null p = app req respond
| ".hamlet" `T.isSuffixOf` l = hamlet pr >>= respond
| ".lucius" `T.isSuffixOf` l = lucius pr >>= respond
| ".markdown" `T.isSuffixOf` l = markdown' pr >>= respond
| ".md" `T.isSuffixOf` l = markdown' pr >>= respond
| otherwise = app req respond
where
p = pathInfo req
pr = T.intercalate "/" $ T.pack docroot : p
l = last p
unsafe :: Text -> Bool
unsafe s
| T.null s = False
| T.head s == '.' = True
| otherwise = T.any (== '/') s
readFileUtf8 :: Text -> IO String
readFileUtf8 fp = do
bs <- S8.readFile $ T.unpack fp
let t = decodeUtf8With lenientDecode bs
return $ T.unpack t
hamlet :: Text -> IO Response
hamlet fp = do
str <- readFileUtf8 fp
hrt <- parseHamletRT defaultHamletSettings str
html <- renderHamletRT hrt [] (error "No URLs allowed")
return $ responseBuilder status200 [("Content-Type", "text/html; charset=utf-8")] $ renderHtmlBuilder html
lucius :: Text -> IO Response
lucius fp = do
str <- readFileUtf8 fp
let text = either error id $ luciusRT (TL.pack str) []
return $ responseBuilder status200 [("Content-Type", "text/css; charset=utf-8")] $ fromLazyText text
markdown' :: Text -> IO Response
markdown' fp = do
bs <- S8.readFile $ T.unpack fp
let t = decodeUtf8With lenientDecode bs
html = commonmarkToHtml
[optSmart]
[extStrikethrough, extTable, extAutolink]
t
title = T.strip $ T.dropWhile (== '#') $ T.concat $ take 1 $ dropWhile T.null $ T.lines t
return $ responseBuilder status200 [("Content-Type", "text/html; charset=utf-8")] $ renderHtmlBuilder
[shamlet|
$doctype 5
<html>
<head>
<meta charset=utf-8>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>#{title}
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css" integrity="sha384-BVYiiSIFeK1dGmJRAkycuHAHRg32OmUcww7on3RYdg4Va+PmSTsz/K68vbdEjh4u" crossorigin="anonymous">
<link rel="stylesheet" href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.12.0/styles/zenburn.min.css">
<body>
<div .container>
<div .row>
<div .col-sm-2>
<div .col-sm-8>
<article>#{preEscapedToHtml html}
<script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.12.0/highlight.min.js">
<script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.12.0/languages/haskell.min.js">
<script src="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.12.0/languages/rust.min.js">
<script>hljs.initHighlightingOnLoad();
|]
|
snoyberg/servius
|
app/servius.hs
|
mit
| 4,508 | 0 | 16 | 1,393 | 914 | 479 | 435 | 70 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-@ LIQUID "--no-termination" @-}
module Class () where
import Language.Haskell.Liquid.Prelude
import Prelude hiding (sum, length, (!!), Functor(..))
import qualified Prelude as P
{-@ qualif Size(v:Int, xs:a): v = size xs @-}
{-@ qualif Size(v:Int, xs:MList a): v = size xs @-}
{-@ data MList a = Nil | Cons (hd::a) (tl::(MList a)) @-}
data MList a = Nil | Cons a (MList a)
{-@ (!!) :: xs:MList a -> {v:Nat | v < (size xs)} -> a @-}
(!!) :: MList a -> Int -> a
Nil !! i = liquidError "impossible"
(Cons x _) !! 0 = x
(Cons x xs) !! i = xs !! (i - 1)
{-@ class measure size :: forall a. a -> Int @-}
{-@ class Sized s where
size :: forall a. x:s a -> {v:Nat | v = size x}
@-}
class Sized s where
size :: s a -> Int
instance Sized MList where
{-@ instance measure size :: MList a -> Int
size (Nil) = 0
size (Cons x xs) = 1 + size xs
@-}
size = length
{-@ length :: xs:MList a -> {v:Nat | v = size xs} @-}
length :: MList a -> Int
length Nil = 0
length (Cons x xs) = 1 + length xs
{-@ bob :: xs:MList a -> {v:Nat | v = size xs} @-}
bob :: MList a -> Int
bob = length
{-@ class (Sized s) => Indexable s where
index :: forall a. x:s a -> {v:Nat | v < size x} -> a
@-}
class (Sized s) => Indexable s where
index :: s a -> Int -> a
instance Indexable MList where
index = (!!)
{-@ sum :: Indexable s => s Int -> Int @-}
sum :: Indexable s => s Int -> Int
sum xs = go max 0
where
max = size xs
go (d::Int) i
| i < max = index xs i + go (d-1) (i+1)
| otherwise = 0
{-@ sumMList :: MList Int -> Int @-}
sumMList :: MList Int -> Int
sumMList xs = go max 0
where
max = size xs
go (d::Int) i
| i < max = index xs i + go (d-1) (i+1)
| otherwise = 0
{-@ x :: {v:MList Int | (size v) = 3} @-}
x :: MList Int
x = 1 `Cons` (2 `Cons` (3 `Cons` Nil))
foo = liquidAssert $ size (Cons 1 Nil) == size [1]
|
santolucito/ives
|
tests/Class.hs
|
mit
| 1,953 | 0 | 11 | 548 | 576 | 309 | 267 | 38 | 1 |
module DiveIntoMonads where
import Control.Monad.Writer
import Control.Monad.State
monadReturn = return "WHAT" :: Maybe String
monadOnJust = Just 9 >>= \x -> return (x*10)
monadOnNothing = Nothing >>= \x -> return $ x ++ " that all!"
fooUgly, fooLessUgly, fooDoNotion :: Maybe String
fooUgly = Just 3 >>= (\x -> Just "!" >>= (\y -> Just (show x ++ y)))
fooLessUgly = Just 3 >>= (\x ->
Just "!" >>= (\y ->
Just (show x ++ y)))
fooDoNotion = do -- look how this one is similar to 'fooLessUgly'... just less ugly
x <- Just 3
y <- Just "!"
Just (show x ++ y)
justH :: Maybe Char
justH = do
(x:xs) <- Just "hello" -- pattern mathching works with do-notion!
return x
-- This pattern matching will fail, so Monad's function 'fail' will be called. But 'fail' is imlemented as 'fail _ = Nothing', so we gen Nothing from this call
wopwop :: Maybe Char
wopwop = do
(x:xs) <- Just ""
return x
-- Fails in List Monad will be handles as empty list '[]' just as fails in Maybe Monad will be 'Nothing'
failListMonadOne = [] >>= \x -> ["bad","mad","rad"]
failListMonadTwo = [1,2,3] >>= \x -> []
listMonadExample = [1,2] >>= \n -> ['a','b'] >>= \ch -> return (n,ch)
-- Guards are used in conjunction with MonadPlus (Monad + Monoid typeclass) in lists to make list compherension works... Oh magic haskell!
guardWorks = guard (5 > 2) >> return "cool" :: [String]
guardFails = guard (1 > 2) >> return "cool" :: [String]
-- The Writer monoid!
writerOne = runWriter (return 3 :: Writer String Int)
writerTwo = runWriter (return 3 :: Writer (Sum Int) Int)
logNumber :: Int -> Writer [String] Int
logNumber x = writer (x, ["Got number: " ++ show x])
multWithLog :: Writer [String] Int
multWithLog = do
a <- logNumber 3
b <- logNumber 5
tell ["Gonna multiply these two!"]
return (a*b)
-- gcd with logging
gcd' :: Int -> Int -> Writer [String] Int
gcd' a b
| b == 0 = do
tell ["Finished with " ++ show a]
return a
| otherwise = do
tell [show a ++ " mod " ++ show b ++ " = " ++ show (a `mod` b)]
gcd' b (a `mod` b)
gcdPrint a b = mapM_ putStrLn $ snd $ runWriter (gcd' a b)
-- -> '(->) r' Monad (or also called Reader monad)
addStuff :: Int -> Int
addStuff = do
a <- (*2)
b <- (+10)
return (a+b)
addMoreStuff :: Int -> Int
addMoreStuff = do
a <- (*2)
b <- (+5)
c <- (+ (-3))
return (a + b + c)
-- State Monad
-- Stack example without 'State' monad
type Stack = [Int]
pop :: Stack -> (Int,Stack)
pop (x:xs) = (x,xs)
push :: Int -> Stack -> ((),Stack)
push a xs = ((),a:xs)
stackManip :: Stack -> (Int, Stack)
stackManip stack = let
((),newStack1) = push 3 stack
(a ,newStack2) = pop newStack1
in pop newStack2
-- Stack example WITH state monad
statePop :: State Stack Int
statePop = state $ \(x:xs) -> (x,xs)
statePush :: Int -> State Stack ()
statePush a = state $ \xs -> ((),a:xs)
stackManipState :: State Stack Int
stackManipState = do
statePush 3
a <- statePop
statePop
-- MonadState in 'Control.Monad.State' allows to get and replace current state
stackyStack :: State Stack ()
stackyStack = do
stackNow <- get
if stackNow == [1,2,3]
then put [8,3,1]
else put [9,2,1]
-- Make a state example which holds current and previous value of some integer
plus, minus :: Int -> State Int Int
plus a = state $ \x -> (x, x+a)
minus a = state $ \x -> (x, x-a)
-- runState (return 0 >>= (\x -> state $ \y -> (x,x+y))) 5
-- WTF...?!?!?!?
-- How to write this without do-notion...?
testState = do
plus 50
a <- minus 25
plus 100
-- This is pure witchcraft
powerset :: [a] -> [[a]]
powerset xs = filterM (\x -> [True, False]) xs
|
aquatir/remember_java_api
|
code-sample-haskell/hello_world/func_magic/monads.hs
|
mit
| 3,888 | 0 | 14 | 1,070 | 1,384 | 734 | 650 | 92 | 2 |
-----------------------------------------------------------------------------
--
-- Module : ParetoSearch
-- Copyright :
-- License : AllRightsReserved
--
-- Maintainer :
-- Stability :
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
{-# OPTIONS_GHC -F -pgmF htfpp #-}
module ParetoSearch (
paretoSet,
Coverage, covers, uses ,
Paretian, isObviouslyBetterThan, isWorthMergingWith,
htf_thisModulesTests
) where
-- parse
-- filter worse
-- evaluate options
-- create map
-- if is pareto
-- if is better than single element
-- remove single from map
-- remove elements containing it from pareto
-- add to pareto
-- do next
import Prelude
import qualified Data.Set as S
import qualified Data.Map as M
import Data.Monoid
import Data.Maybe(fromJust)
import Test.Framework
import Test.HUnit
class Paretian a where
-- isObviouslyBetterThan:: a -> a -> Ordering
isObviouslyBetterThan:: a -> a -> Bool
isWorthMergingWith:: a -> a -> Bool
class Coverage a where
covers:: a -> a -> Bool
uses:: a -> a -> Bool
-- TESTS -----------------
instance Paretian Int where
-- isObviouslyBetterThan a b = if a > (b * b) then GT else if a < b then LT else EQ
isObviouslyBetterThan a b = a > (b * b)
isWorthMergingWith a b = a < b
test_filter = do assertEqual (S.fromList ([3,4,5] :: [Int])) (filterMuchWorse (S.fromList ([1,2,3,4,5] :: [Int])))
instance Coverage Int where
covers a b = a > 2 * b
uses a b = False
test_makeOptions = do assertEqual expect (makeOptions $ S.fromList ([1,2,0,5] :: [Int])) where
expect = [(0, S.fromList [1,2,5]), (1, S.fromList [2, 5]), (2, S.fromList [5]), (5, S.empty)] :: [(Int, S.Set Int)]
instance Coverage a => Coverage (Sum a) where
a `covers` b = getSum a `covers` getSum b
a `uses` b = getSum a `uses` getSum b
test_spawnOptions = do assertEqual expect actual where
expect = [(Sum 1, S.fromList [Sum 2, Sum 3]), (Sum 2, S.fromList [Sum 3])] :: [(Sum Int, S.Set (Sum Int))]
actual = spawnOptions map (Sum 0, S.fromList [Sum 1, Sum 2, Sum 3]) where
map = M.fromList [(Sum 1, S.fromList [Sum 2, Sum 3, Sum 4]), (Sum 2, S.fromList [Sum 3, Sum 4])]
instance (Ord a, Paretian a) => Paretian (Sum a) where
isObviouslyBetterThan a b = isObviouslyBetterThan (getSum a) (getSum b)
isWorthMergingWith a b = (getSum a) < (getSum b)
test_paretoFinder = do assertEqual expected actual where
expected = S.fromList [Sum 5, Sum 3] :: S.Set (Sum Int)
actual = S.fromList $ paretoSet [Sum 1, Sum 2, Sum 3]
-- REAL CODE -------------
filterMuchWorse:: Paretian a => Ord a => S.Set a -> S.Set a
filterMuchWorse set = S.foldl' (\rest current -> S.filter (\x -> not (current `isObviouslyBetterThan` x)) rest) set set
makeOptions:: Paretian a => Ord a => S.Set a -> [(a, S.Set a)]
makeOptions set = map (\x -> (x, S.filter (\y -> x `isWorthMergingWith` y) set)) $ S.toList set
spawnOptions:: Coverage a => Monoid a => Ord a => M.Map a (S.Set a) -> (a, S.Set a) -> [(a, S.Set a)]
spawnOptions bindings test = map (createRecord test) $ S.toList $ S.filter (flip M.member bindings) $ snd test where
createRecord test t = let v = fst test <> t in (v, opts v) where
opts v = S.filter (\x -> not (v `covers` x)) $ S.intersection (snd test) (fromJust $ M.lookup t bindings)
findParetoHelper:: Paretian a => Coverage a => Monoid a => Ord a => (a, S.Set a) -> M.Map a (S.Set a) -> [(a, S.Set a)] -> [a] -> [a]
findParetoHelper current bindings remaining results = findPareto bindings (newAvailable remaining) newResults where
newAvailable available = (spawnOptions bindings current) ++ available
newResults = fst current : (filter (removeWorse $ fst current) results) where
removeWorse current x = not (current `isObviouslyBetterThan` x)
findPareto:: Paretian a => Coverage a => Monoid a => Ord a => M.Map a (S.Set a) -> [(a, S.Set a)] -> [a] -> [a]
findPareto _ [] results = results
findPareto bindings available results
| not $ isPareto (fst $ head available) results = findPareto bindings (tail available) results
| otherwise = findParetoHelper (head available) (M.filterWithKey removeWorse bindings) (tail available) results where
isPareto c currentPareto = all (\x -> not (x `isObviouslyBetterThan` c)) currentPareto
removeWorse = \x _ -> not ((fst $ (head available)) `isObviouslyBetterThan` x)
paretoSet:: Paretian a => Coverage a => Monoid a => Ord a => [a] -> [a]
paretoSet tests = findPareto (M.fromList opts) opts [] where
opts = makeOptions . filterMuchWorse $ S.fromList tests
|
Lewerow/TestSelector
|
src/ParetoSearch.hs
|
mit
| 4,546 | 0 | 15 | 840 | 1,879 | 992 | 887 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
import Crypto.Hash.MD5
import Data.List
import Data.ByteString.Base16
import qualified Data.ByteString.Char8 as B
key = "yzbqklnj"
isValid :: Int -> Int -> Bool
isValid n i = possibleResult == (B.take n $ getHashString i)
where possibleResult = B.replicate n '0'
showB :: Show a => a -> B.ByteString
showB = B.pack . show
getHashString :: Show a => a -> B.ByteString
getHashString = encode . hash . B.append key . showB
day4A = find (isValid 5) [1..]
day4B = find (isValid 6) [1..]
main = do
print day4A
print day4B
|
bruno-cadorette/AdventOfCode
|
Day 4/Day4.hs
|
mit
| 575 | 0 | 9 | 116 | 213 | 112 | 101 | 18 | 1 |
module Nondet where
import Data.Complex
import Test.QuickCheck
type Nondet a = [a]
sqrts :: Floating a => a -> Nondet a
sqrts x = [y, negate y]
where
y = sqrt x
-- Exercise Four: Apply the function on all input values and aggregate the results
bind :: (a -> Nondet b) -> (Nondet a -> Nondet b)
bind = undefined
-- Using # instead of * for composition to avoid ambiguities
(#) :: (b -> Nondet c) -> (a -> Nondet b) -> (a -> Nondet c)
g' # f' = bind g' . f'
-- Exercise Five: Provide minimal context for the given value
unit :: a -> Nondet a
unit = undefined
-- lift --- lifting functions
lift :: (a -> b) -> (a -> Nondet b)
lift f = unit . f
-- Solution to the quadratic equation a*x^2 + b * y + c = 0
-- delta = b^2 - 4*a*c
-- x = (-b +- sqrt delta) / (2*a)
solveQuadratic :: Floating a => a -> a -> a -> Nondet a
solveQuadratic a b c = lift (/(2*a)) # lift (b+) # sqrts $ delta
where
delta = b*b - 4*a*c
-- Exercise Six: Test that (for a given value x)
-- (a) f # unit = unit # f = f
-- (b) lift g # lift f = lift (g.f)
check_unit1, check_unit2 :: (Complex Float -> Nondet (Complex Float)) -> Complex Float -> Bool
check_unit1 f x = undefined
check_unit2 f x = undefined
test_unit1, test_unit2 :: IO ()
test_unit1 = quickCheck $ check_unit1 sqrts
test_unit2 = quickCheck $ check_unit2 sqrts
check_lift :: (Float -> Float) -> (Float -> Float) -> Float -> Bool
check_lift f g x = undefined
test_lift :: IO ()
test_lift = quickCheck $ check_lift (+2) (*3)
-- Exercise Ten(b): Rewrite the module to make Nondet instance of
-- the Monad typeclass
-- Note: You first need to make it an instance of Functor and Applicative
-- Exercise Twelve: Write the solution to the quadratic equation in do notation
|
PavelClaudiuStefan/FMI
|
An_3_Semestru_1/ProgramareDeclarativa/Extra/Laborator/Laborator 9/Nondet.hs
|
cc0-1.0
| 1,738 | 0 | 11 | 382 | 502 | 271 | 231 | 28 | 1 |
module BoardSpec where
import Test.Hspec
import Reversi.Coord
import Reversi.Board
import Reversi.Piece
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
context "node" $ do
let empty = node Nothing
piece p = node $ Just p
node x = (Coord 0 0, x)
it "should determine if occupied by piece" $ do
isOccupied empty `shouldBe` False
isOccupied (piece Black) `shouldBe` True
isOccupiedBy empty Black `shouldBe` False
isOccupiedBy (piece Black) Black `shouldBe` True
isOccupiedBy (piece White) Black `shouldBe` False
it "should swap occupied piece" $ do
swapNode empty `shouldBe` empty
swapNode (piece Black) `shouldBe` piece White
swapNode (piece White) `shouldBe` piece Black
context "standard board" $ do
let b = standardBoard
it "should have 64 nodes" $
length (getNodes b) `shouldBe` 64
it "should have 4 occupied nodes" $
length (getOccupiedNodes b) `shouldBe` 4
it "should have 60 unoccupied nodes" $
length (getUnoccupiedNodes b) `shouldBe` 60
it "should have correct piece placement" $ do
hasPiece b 3 3 White
hasPiece b 4 4 White
hasPiece b 3 4 Black
hasPiece b 4 3 Black
hasPiece :: Board -> Int -> Int -> Piece -> Expectation
hasPiece b x y p = snd (getNode b (Coord x y)) `shouldBe` Just p
|
mharrys/reversi
|
test/BoardSpec.hs
|
gpl-2.0
| 1,486 | 0 | 16 | 488 | 481 | 232 | 249 | 38 | 1 |
module Main where
import Paths_StatiGen
import System.FilePath
import System.Environment
import System.Directory
import System.Exit
import System.IO
import Control.Monad
import StatiGen.Util
import StatiGen.Build
-- Startpunkt des Programms
-- Anhand der angegebenen Parameter wird die entsprechende Funktion aufgerufen.
main :: IO ()
main = do
args <- getArgs
case args of
["create",site] -> create site "default" >> exitWith ExitSuccess -- Erstellt ein neues Webseiten-Projekt mit dem default Template.
["create",site,template] -> create site template >> exitWith ExitSuccess -- Erstellt ein neues Webseiten-Projekt mit dem angegebenen Template.
["settemplate",template] -> setTemplate template >> exitWith ExitSuccess -- Ändert das Template auf das angegebene Template.
["addpage",name] -> addPage name >> exitWith ExitSuccess -- Fügt eine neue Seite hinzu (.page und .conf Datei).
["build"] -> buildSite >> exitWith ExitSuccess -- Erstellt die HTML-Dateien.
[] -> buildSite >> exitWith ExitSuccess -- Erstellt die HTML-Dateien.
-- Erstellt die Ordnerstruktur für ein neues Webseiten-Projekt und kopiert alle notwendigen Dateien.
-- Falls es schon eine Webseite mit dem Namen gibt, wird die Funktion abgebrochen.
-- Die Funktion wird ebenfalls abgebrochen, wenn das angegebene Template nicht vorhanden ist.
create :: FilePath -> String -> IO ()
create site template = do
existsDir <- doesDirectoryExist site
when existsDir $ do
hPutStrLn stderr $ "Eine Webite mit dem Namen " ++ site ++ " existiert bereits."
exitWith $ ExitFailure 2
templateDir <- getDataFileName "templates"
existsTemplate <- doesDirectoryExist (templateDir </> template)
when (existsTemplate == False) $ do
hPutStrLn stderr $ "Eine Template mit dem Namen " ++ template ++ " existiert nicht."
exitWith $ ExitFailure 3
createDirectoryIfMissing True site
createDirectoryIfMissing True (site</>"output")
templateContent <- liftM (filter (/=".") . map (makeRelative (templateDir </> template))) $ getDirectoryContentsRecursive (templateDir </> template)
forM_ templateContent $ \file -> do
let dest = site </> "src" </> file
createDirectoryIfMissing True $ takeDirectory dest
copyFile ((templateDir </> template) </> file) dest
emptyPage <- liftM (filter (/=".") . map (makeRelative (templateDir </> "emptyPage"))) $ getDirectoryContentsRecursive (templateDir </> "emptyPage")
forM_ emptyPage $ \file -> do
let dest = site </> "src" </> file
createDirectoryIfMissing True $ takeDirectory dest
copyFile ((templateDir </> "emptyPage") </> file) dest
writeFile (site</>"config.conf") "author: Dein Name\nyear: 2012\nwebsite_title: Titel der Webseite\nwebsite_slogan: Slogan der Webseite\ndescription: Beschreibung\nkeywords: Keywoerter"
|
pads-fhs/hawebgen
|
StatiGen.hs
|
gpl-2.0
| 2,985 | 0 | 16 | 636 | 641 | 316 | 325 | 44 | 6 |
module S.Head where
import S.Type
import S.Cache
import S.Reduce (here)
import qualified Data.Map.Strict as M
import qualified Control.Monad.State.Strict as S
-- | member of the set Q Q Q,
-- consequently, t has infinite head reduction
isq3 t = case t of
App {fun=xy, arg=z} | isq z -> case xy of
App {fun=x, arg=y} | isq x && isq y -> True
_ -> False
_ -> False
-- | member of the set Q.
-- where P = S/S, Q = M - P,
-- alternatively, t has some subterm with left depht > 1
isq t = let S: xs = spine t in case xs of
[] -> False
[x] -> isq x
_ -> True
-- | actual head reduction, no cache, no shortcuts
plain :: T -> [ T ]
plain t =
let f xs = unspine xs : case xs of
S : x : y : z : rest -> f $ spine x ++ z : app y z : rest
_ -> []
in f $ spine t
normalform = last . plain
normal steps t = S.evalState ( normalize steps t )
$ Cache { m = M.empty, st = steps }
-- | aggressively head-normalize the given term.
-- return Just normalform, or Nothing if it could not be found
-- with the given recursion depth.
normalize :: Int -> T -> S.State Cache (Maybe T)
normalize steps t = do
S.modify $ \ c -> c { st = steps }
cached_fix norm steps t
cached_fix f _ t = do
c <- S.get
if st c < 0 then return Nothing else
case M.lookup t $ m c of
Just res -> return res
Nothing -> do
S.modify $ \ c -> c { st = pred $ st c }
res <- f ( cached_fix f undefined ) t
S.modify $ \ c -> c { m = M.insert t res $ m c }
return res
norm self t | isq3 t = return Nothing
norm self t = case t of
S -> return $ Just s
App {fun=f,arg=a} -> do
nf <- self f
case nf of
Nothing -> return Nothing
Just nf -> do
let t1 = app nf a
case here t1 of
[] -> return $ Just t1
t2 : _ -> self t2
|
jwaldmann/s
|
S/Head.hs
|
gpl-3.0
| 2,012 | 0 | 19 | 751 | 756 | 378 | 378 | 50 | 4 |
module Main where
import UI.HSCurses.Curses hiding (pi)
import Data.Convertible.Base
import Data.Convertible.Instances
import Data.Word
import Control.Concurrent
main :: IO ()
main = do
win <- initScr
startColor
wclear win
cursSet CursorInvisible
echo False
let Just fgColor = color "cyan"
initPair (Pair 1) fgColor (Color 0) >> attrSet attr0 (Pair 1)
mainDrawLoop win 0 20 0.05
mainDrawLoop :: Window -> Double -> Double -> Double -> IO ()
mainDrawLoop win offset amplitude hz = do
drawGraph win $ \x -> amplitude*sin(hz*pi*x + offset)
mvWAddStr win 0 0 $ "A: " ++ show amplitude ++ " Hz: " ++ show hz
refresh
input <- getCh
case input of
KeyChar 'd' -> mainDrawLoop win (offset-0.2) amplitude hz
KeyChar 'a' -> mainDrawLoop win (offset+0.2) amplitude hz
KeyChar 'w' -> mainDrawLoop win offset (amplitude+1) hz
KeyChar 's' -> mainDrawLoop win offset (amplitude-1) hz
KeyChar 'e' -> mainDrawLoop win offset amplitude (hz-0.002)
KeyChar 'r' -> mainDrawLoop win offset amplitude (hz+0.002)
KeyChar 'q' -> endWin
_ -> mainDrawLoop win offset amplitude hz
drawGraph :: Window -> (Double -> Double) -> IO ()
drawGraph win f = do
wclear win
(y, x) <- scrSize
let coords = map (\x -> (x, f x)) [0, 0.01..(realToFrac x)]
mapM_ (drawPoint . centerAndRound y) coords
where drawPoint (x, y) = mvAddCh y x (convert '#')
centerAndRound y (x', y') = (round x', round (y' + (realToFrac y)/2))
|
pmikkelsen/Haskell-sinewave
|
Main.hs
|
gpl-3.0
| 1,442 | 0 | 14 | 289 | 637 | 311 | 326 | 39 | 8 |
module SpacialGameMsg.SGModelMsg where
import System.Random
import Control.Monad.STM
import qualified Data.Map as Map
import qualified PureAgentsConc as PA
data SGState = Defector | Cooperator deriving (Eq, Show)
data SGMsg = NeighbourPayoff (SGState, Double) | NeighbourState SGState deriving (Eq, Show)
data SGAgentState = SIRSAgentState {
sgCurrState :: SGState,
sgPrevState :: SGState,
sgLocalPayoff :: Double,
sgBestPayoff :: (SGState, Double),
sgNeighbourFlag :: Int
} deriving (Show)
type SGEnvironment = ()
type SGAgent = PA.Agent SGMsg SGAgentState SGEnvironment
type SGTransformer = PA.AgentTransformer SGMsg SGAgentState SGEnvironment
type SGSimHandle = PA.SimHandle SGMsg SGAgentState SGEnvironment
bParam :: Double
bParam = 1.95
sParam :: Double
sParam = 0.0
pParam :: Double
pParam = 0.0
rParam :: Double
rParam = 1.0
sgTransformer :: SGTransformer
sgTransformer (a, _) PA.Start = broadCastLocalState a
sgTransformer (a, e) (PA.Dt dt) = return a
sgTransformer (a, e) (PA.Message (_,m)) = sgMsg a m
sgMsg :: SGAgent -> SGMsg -> STM SGAgent
sgMsg a (NeighbourState s) = sgStateMsg a s
sgMsg a (NeighbourPayoff p) = sgPayoffMsg a p
sgStateMsg :: SGAgent -> SGState -> STM SGAgent
sgStateMsg a s = do
if ( allNeighboursTicked a' ) then
broadCastLocalPayoff a'
else
return a'
where
a' = tickNeighbourFlag $ playGame a s
playGame :: SGAgent -> SGState -> SGAgent
playGame a s = a'
where
lp = sgLocalPayoff (PA.state a)
poIncrease = payoffWith a s
newLp = lp + poIncrease
a' = PA.updateState a (\s -> s { sgLocalPayoff = newLp })
broadCastLocalPayoff :: SGAgent -> STM SGAgent
broadCastLocalPayoff a = do
PA.broadcastMsgToNeighbours a (NeighbourPayoff (ls, lp))
return $ resetNeighbourFlag a
where
ls = sgCurrState (PA.state a)
lp = sgLocalPayoff (PA.state a)
sgPayoffMsg :: SGAgent -> (SGState, Double) -> STM SGAgent
sgPayoffMsg a p = if ( allNeighboursTicked a'' ) then
broadCastLocalState $ switchToBestPayoff a''
else
return a''
where
a' = comparePayoff a p
a'' = tickNeighbourFlag a'
comparePayoff :: SGAgent -> (SGState, Double) -> SGAgent
comparePayoff a p@(_, v)
| v > localV = PA.updateState a (\s -> s { sgBestPayoff = p } )
| otherwise = a
where
(_, localV) = sgBestPayoff (PA.state a)
switchToBestPayoff :: SGAgent -> SGAgent
switchToBestPayoff a = PA.updateState a (\s -> s { sgCurrState = bestState,
sgPrevState = oldState,
sgLocalPayoff = 0.0,
sgBestPayoff = (bestState, 0.0)} )
where
(bestState, _) = sgBestPayoff (PA.state a)
oldState = sgCurrState (PA.state a)
broadCastLocalState :: SGAgent -> STM SGAgent
broadCastLocalState a = do
PA.broadcastMsgToNeighbours a (NeighbourState ls)
return $ resetNeighbourFlag a
where
ls = sgCurrState (PA.state a)
-- NOTE: the first state is always the owning agent
payoffWith :: SGAgent -> SGState -> Double
payoffWith a s = payoff as s
where
as = sgCurrState (PA.state a)
payoff :: SGState -> SGState -> Double
payoff Defector Defector = pParam
payoff Cooperator Defector = sParam
payoff Defector Cooperator = bParam
payoff Cooperator Cooperator = rParam
allNeighboursTicked :: SGAgent -> Bool
allNeighboursTicked a = nf == 0
where
nf = (sgNeighbourFlag (PA.state a))
tickNeighbourFlag :: SGAgent -> SGAgent
tickNeighbourFlag a = PA.updateState a (\s -> s { sgNeighbourFlag = nf - 1 })
where
nf = (sgNeighbourFlag (PA.state a))
resetNeighbourFlag :: SGAgent -> SGAgent
resetNeighbourFlag a = PA.updateState a (\s -> s { sgNeighbourFlag = neighbourCount })
where
neighbourCount = Map.size (PA.neighbours a)
createRandomSGAgents :: StdGen -> (Int, Int) -> Double -> STM ([SGAgent], StdGen)
createRandomSGAgents gInit cells@(x,y) p = do
as <- mapM (\idx -> PA.createAgent idx (randStates !! idx) sgTransformer) [0..n-1]
let as' = map (\a -> PA.addNeighbours a (agentNeighbours a as cells) ) as
return (as', g')
where
n = x * y
(randStates, g') = createRandomStates gInit n p
createRandomStates :: StdGen -> Int -> Double -> ([SGAgentState], StdGen)
createRandomStates g 0 p = ([], g)
createRandomStates g n p = (rands, g'')
where
(randState, g') = randomAgentState g p
(ras, g'') = createRandomStates g' (n-1) p
rands = randState : ras
randomAgentState :: StdGen -> Double -> (SGAgentState, StdGen)
randomAgentState g p = (SIRSAgentState{ sgCurrState = s,
sgPrevState = s,
sgLocalPayoff = 0.0,
sgBestPayoff = (s, 0.0),
sgNeighbourFlag = 0}, g')
where
(isDefector, g') = randomThresh g p
(g'', _) = split g'
s = if isDefector then
Defector
else
Cooperator
randomThresh :: StdGen -> Double -> (Bool, StdGen)
randomThresh g p = (flag, g')
where
(thresh, g') = randomR(0.0, 1.0) g
flag = thresh <= p
agentNeighbours :: SGAgent -> [SGAgent] -> (Int, Int) -> [SGAgent]
agentNeighbours a as cells = filter (\a' -> any (==(agentToCell a' cells)) neighbourCells ) as
where
aCell = agentToCell a cells
neighbourCells = neighbours aCell
agentToCell :: SGAgent -> (Int, Int) -> (Int, Int)
agentToCell a (xCells, yCells) = (ax, ay)
where
aid = PA.agentId a
ax = mod aid yCells
ay = floor((fromIntegral aid) / (fromIntegral xCells))
neighbourhood :: [(Int, Int)]
neighbourhood = [topLeft, top, topRight,
left, center, right,
bottomLeft, bottom, bottomRight]
where
topLeft = (-1, -1)
top = (0, -1)
topRight = (1, -1)
left = (-1, 0)
center = (0, 0)
right = (1, 0)
bottomLeft = (-1, 1)
bottom = (0, 1)
bottomRight = (1, 1)
neighbours :: (Int, Int) -> [(Int, Int)]
neighbours (x,y) = map (\(x', y') -> (x+x', y+y')) neighbourhood
|
thalerjonathan/phd
|
public/ArtIterating/code/haskell/PureAgentsConc/src/SpacialGameMsg/SGModelMsg.hs
|
gpl-3.0
| 6,809 | 0 | 15 | 2,279 | 2,136 | 1,173 | 963 | 143 | 2 |
module TPL.Test.Value where
import Control.Applicative
import Test.QuickCheck
import TPL.Parse
import TPL.Value
instance Arbitrary TPLValue where
arbitrary = oneof [
nullValue
]
permutationsOf :: [a] -> Gen [a]
permutationsOf = listOf1 . oneof . map return
idString = do start <- oneof . map return $ idChar
rest <- permutationsOf $ idChar ++ ['0'..'9']
return $ start:rest
where idChar = '_' : ['a'..'z'] ++ ['A'..'Z']
nullValue = return Null
idValue = Id <$> idString
numValue = Number <$> arbitrary
strValue = String <$> arbitrary
boolValue = Boolean <$> arbitrary
opValue = Operator <$> permutationsOf operatorCharacters
natValue = Native <$> idString
patternList = List <$> listOf (frequency [(124, idValue), (1, patternList)])
|
TikhonJelvis/TPL
|
test/TPL/Test/Value.hs
|
gpl-3.0
| 832 | 0 | 10 | 208 | 260 | 141 | 119 | 22 | 1 |
module GnomeLookOrg.Data
( Meta(..)
, Content(..)
, Data(..)
, getData
) where
import Data.ByteString.Char8 (pack)
import Data.Maybe (catMaybes, listToMaybe)
import qualified Data.Text as T
import Network.Connection (TLSSettings(..))
import Network.HTTP.Conduit
import Text.XML (parseLBS_, def)
import Text.XML.Cursor
import GnomeLookOrg.Base
import GnomeLookOrg.Categories
import Utils
data Meta = Meta { contentMetaTotalItems :: Int } deriving Show
data Content = Content { contentId :: Int
, contentName :: T.Text
, contentVersion :: T.Text
, contentDescription :: T.Text
, contentPersonId :: T.Text
, contentScore :: Int
} deriving Show
data Data = Data { meta :: Meta
, contents :: [Content]
} deriving Show
dataUrl :: String
dataUrl = apiBaseUrl ++ "/content/data"
parseContent :: Cursor -> Maybe Content
parseContent cursor = do
id' <- readContent (cursor $/ laxElementContent "id")
name <- listToMaybe (cursor $/ laxElementContent "name")
version <- listToMaybe (cursor $/ laxElementContent "version")
description <- listToMaybe (cursor $/ laxElementContent "description")
personId <- listToMaybe (cursor $/ laxElementContent "personid")
score <- readContent (cursor $/ laxElementContent "score")
return Content { contentId = id'
, contentName = name
, contentVersion = version
, contentDescription = description
, contentPersonId = personId
, contentScore = score }
parseContents :: [Cursor] -> Maybe [Content]
parseContents cursors
| length cursors == length parsed = Just parsed
| otherwise = Nothing
where parsed = catMaybes $ fmap parseContent cursors
parseData :: Cursor -> Maybe Data
parseData cursor = do
totalItems <- readContent
(cursor $/ laxElement "meta" &/ laxElementContent "totalitems")
parsedContents <- parseContents (cursor $/ laxElement "data" &/ anyElement)
return Data { meta = Meta totalItems, contents = parsedContents }
getData :: Category -- ^ Category
-> Int -- ^ Page number, starting from 0
-> Int -- ^ Pagesize (amount of entries per page)
-> IO (Maybe Data)
getData (Category catId _) pageNum pageSize = do
initReq <- parseUrl dataUrl
let request = setQueryString [ ("categories", Just (pack $ show catId))
, ("page", Just (pack $ show pageNum))
, ("pagesize", Just (pack $ show pageSize))
] initReq
-- TODO: As far as I understand, reusing one manager
-- across the whole application would be better.
manager <- newManager (mkManagerSettings (TLSSettingsSimple True False False) Nothing)
reply <- httpLbs request manager
return . parseData . fromDocument . parseLBS_ def $ responseBody reply
|
jplatte/gtk-theme-manager
|
src/GnomeLookOrg/Data.hs
|
gpl-3.0
| 3,164 | 0 | 16 | 997 | 785 | 419 | 366 | 66 | 1 |
-- |This contains some simple frame renderers
module Effects where
import Elovalo
import Fractional
-- |Produces frame with a static intensity
singleIntensity :: RealFrac a => Elovalo -> a -> Frame
singleIntensity e intensity = fractionalToFrame e $
replicate (totalVoxels e) intensity
|
elovalo/helovalo
|
src/Effects.hs
|
gpl-3.0
| 318 | 0 | 8 | 74 | 60 | 32 | 28 | 6 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.IAM.GetAccountAuthorizationDetails
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves information about all IAM users, groups, roles, and policies
-- in your account, including their relationships to one another. Use this
-- API to obtain a snapshot of the configuration of IAM permissions (users,
-- groups, roles, and policies) in your account.
--
-- You can optionally filter the results using the 'Filter' parameter. You
-- can paginate the results using the 'MaxItems' and 'Marker' parameters.
--
-- /See:/ <http://docs.aws.amazon.com/IAM/latest/APIReference/API_GetAccountAuthorizationDetails.html AWS API Reference> for GetAccountAuthorizationDetails.
module Network.AWS.IAM.GetAccountAuthorizationDetails
(
-- * Creating a Request
getAccountAuthorizationDetails
, GetAccountAuthorizationDetails
-- * Request Lenses
, gaadMarker
, gaadMaxItems
, gaadFilter
-- * Destructuring the Response
, getAccountAuthorizationDetailsResponse
, GetAccountAuthorizationDetailsResponse
-- * Response Lenses
, gaadrsRoleDetailList
, gaadrsGroupDetailList
, gaadrsUserDetailList
, gaadrsMarker
, gaadrsIsTruncated
, gaadrsPolicies
, gaadrsResponseStatus
) where
import Network.AWS.IAM.Types
import Network.AWS.IAM.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'getAccountAuthorizationDetails' smart constructor.
data GetAccountAuthorizationDetails = GetAccountAuthorizationDetails'
{ _gaadMarker :: !(Maybe Text)
, _gaadMaxItems :: !(Maybe Nat)
, _gaadFilter :: !(Maybe [EntityType])
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GetAccountAuthorizationDetails' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gaadMarker'
--
-- * 'gaadMaxItems'
--
-- * 'gaadFilter'
getAccountAuthorizationDetails
:: GetAccountAuthorizationDetails
getAccountAuthorizationDetails =
GetAccountAuthorizationDetails'
{ _gaadMarker = Nothing
, _gaadMaxItems = Nothing
, _gaadFilter = Nothing
}
-- | Use this parameter only when paginating results and only after you
-- receive a response indicating that the results are truncated. Set it to
-- the value of the 'Marker' element in the response you received to inform
-- the next call about where to start.
gaadMarker :: Lens' GetAccountAuthorizationDetails (Maybe Text)
gaadMarker = lens _gaadMarker (\ s a -> s{_gaadMarker = a});
-- | Use this only when paginating results to indicate the maximum number of
-- items you want in the response. If there are additional items beyond the
-- maximum you specify, the 'IsTruncated' response element is 'true'.
--
-- This parameter is optional. If you do not include it, it defaults to
-- 100. Note that IAM might return fewer results, even when there are more
-- results available. If this is the case, the 'IsTruncated' response
-- element returns 'true' and 'Marker' contains a value to include in the
-- subsequent call that tells the service where to continue from.
gaadMaxItems :: Lens' GetAccountAuthorizationDetails (Maybe Natural)
gaadMaxItems = lens _gaadMaxItems (\ s a -> s{_gaadMaxItems = a}) . mapping _Nat;
-- | A list of entity types (user, group, role, local managed policy, or AWS
-- managed policy) for filtering the results.
gaadFilter :: Lens' GetAccountAuthorizationDetails [EntityType]
gaadFilter = lens _gaadFilter (\ s a -> s{_gaadFilter = a}) . _Default . _Coerce;
instance AWSRequest GetAccountAuthorizationDetails
where
type Rs GetAccountAuthorizationDetails =
GetAccountAuthorizationDetailsResponse
request = postQuery iAM
response
= receiveXMLWrapper
"GetAccountAuthorizationDetailsResult"
(\ s h x ->
GetAccountAuthorizationDetailsResponse' <$>
(x .@? "RoleDetailList" .!@ mempty >>=
may (parseXMLList "member"))
<*>
(x .@? "GroupDetailList" .!@ mempty >>=
may (parseXMLList "member"))
<*>
(x .@? "UserDetailList" .!@ mempty >>=
may (parseXMLList "member"))
<*> (x .@? "Marker")
<*> (x .@? "IsTruncated")
<*>
(x .@? "Policies" .!@ mempty >>=
may (parseXMLList "member"))
<*> (pure (fromEnum s)))
instance ToHeaders GetAccountAuthorizationDetails
where
toHeaders = const mempty
instance ToPath GetAccountAuthorizationDetails where
toPath = const "/"
instance ToQuery GetAccountAuthorizationDetails where
toQuery GetAccountAuthorizationDetails'{..}
= mconcat
["Action" =:
("GetAccountAuthorizationDetails" :: ByteString),
"Version" =: ("2010-05-08" :: ByteString),
"Marker" =: _gaadMarker, "MaxItems" =: _gaadMaxItems,
"Filter" =:
toQuery (toQueryList "member" <$> _gaadFilter)]
-- | Contains the response to a successful GetAccountAuthorizationDetails
-- request.
--
-- /See:/ 'getAccountAuthorizationDetailsResponse' smart constructor.
data GetAccountAuthorizationDetailsResponse = GetAccountAuthorizationDetailsResponse'
{ _gaadrsRoleDetailList :: !(Maybe [RoleDetail])
, _gaadrsGroupDetailList :: !(Maybe [GroupDetail])
, _gaadrsUserDetailList :: !(Maybe [UserDetail])
, _gaadrsMarker :: !(Maybe Text)
, _gaadrsIsTruncated :: !(Maybe Bool)
, _gaadrsPolicies :: !(Maybe [ManagedPolicyDetail])
, _gaadrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GetAccountAuthorizationDetailsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gaadrsRoleDetailList'
--
-- * 'gaadrsGroupDetailList'
--
-- * 'gaadrsUserDetailList'
--
-- * 'gaadrsMarker'
--
-- * 'gaadrsIsTruncated'
--
-- * 'gaadrsPolicies'
--
-- * 'gaadrsResponseStatus'
getAccountAuthorizationDetailsResponse
:: Int -- ^ 'gaadrsResponseStatus'
-> GetAccountAuthorizationDetailsResponse
getAccountAuthorizationDetailsResponse pResponseStatus_ =
GetAccountAuthorizationDetailsResponse'
{ _gaadrsRoleDetailList = Nothing
, _gaadrsGroupDetailList = Nothing
, _gaadrsUserDetailList = Nothing
, _gaadrsMarker = Nothing
, _gaadrsIsTruncated = Nothing
, _gaadrsPolicies = Nothing
, _gaadrsResponseStatus = pResponseStatus_
}
-- | A list containing information about IAM roles.
gaadrsRoleDetailList :: Lens' GetAccountAuthorizationDetailsResponse [RoleDetail]
gaadrsRoleDetailList = lens _gaadrsRoleDetailList (\ s a -> s{_gaadrsRoleDetailList = a}) . _Default . _Coerce;
-- | A list containing information about IAM groups.
gaadrsGroupDetailList :: Lens' GetAccountAuthorizationDetailsResponse [GroupDetail]
gaadrsGroupDetailList = lens _gaadrsGroupDetailList (\ s a -> s{_gaadrsGroupDetailList = a}) . _Default . _Coerce;
-- | A list containing information about IAM users.
gaadrsUserDetailList :: Lens' GetAccountAuthorizationDetailsResponse [UserDetail]
gaadrsUserDetailList = lens _gaadrsUserDetailList (\ s a -> s{_gaadrsUserDetailList = a}) . _Default . _Coerce;
-- | When 'IsTruncated' is 'true', this element is present and contains the
-- value to use for the 'Marker' parameter in a subsequent pagination
-- request.
gaadrsMarker :: Lens' GetAccountAuthorizationDetailsResponse (Maybe Text)
gaadrsMarker = lens _gaadrsMarker (\ s a -> s{_gaadrsMarker = a});
-- | A flag that indicates whether there are more items to return. If your
-- results were truncated, you can make a subsequent pagination request
-- using the 'Marker' request parameter to retrieve more items. Note that
-- IAM might return fewer than the 'MaxItems' number of results even when
-- there are more results available. We recommend that you check
-- 'IsTruncated' after every call to ensure that you receive all of your
-- results.
gaadrsIsTruncated :: Lens' GetAccountAuthorizationDetailsResponse (Maybe Bool)
gaadrsIsTruncated = lens _gaadrsIsTruncated (\ s a -> s{_gaadrsIsTruncated = a});
-- | A list containing information about managed policies.
gaadrsPolicies :: Lens' GetAccountAuthorizationDetailsResponse [ManagedPolicyDetail]
gaadrsPolicies = lens _gaadrsPolicies (\ s a -> s{_gaadrsPolicies = a}) . _Default . _Coerce;
-- | The response status code.
gaadrsResponseStatus :: Lens' GetAccountAuthorizationDetailsResponse Int
gaadrsResponseStatus = lens _gaadrsResponseStatus (\ s a -> s{_gaadrsResponseStatus = a});
|
olorin/amazonka
|
amazonka-iam/gen/Network/AWS/IAM/GetAccountAuthorizationDetails.hs
|
mpl-2.0
| 9,507 | 0 | 20 | 1,958 | 1,269 | 750 | 519 | 138 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE LambdaCase #-}
-- Module : Khan.Model.EC2.VPC
-- Copyright : (c) 2018 Wire Swiss GmbH <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Wire Swiss GmbH <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Khan.Model.EC2.VPC
(
-- * API
resolve
, find
) where
import Khan.Internal
import Khan.Prelude hiding (find, min, max)
import Network.AWS.EC2 hiding (Instance)
-- | Turn a 'VpcRef' into a VPC ID. Might error out.
resolve
:: VpcRef
-> AWS Text
resolve (VpcId s) = pure s
resolve (VpcName s) = find s >>= \case
Nothing -> throwAWS "Couldn't find VPC called {}" [s]
Just v -> pure (vitVpcId v)
find
:: Text -- ^ VPC name
-> AWS (Maybe VpcItemType)
find name = do
say "Searching for VPC {}" [name]
vpcMay <$> sendCatch (DescribeVpcs [] [Filter "tag:Name" [name]])
where
vpcMay (Right x) = headMay . toList $ dvrVpcSet x
vpcMay (Left _) = Nothing
|
zinfra/khan
|
khan/src/Khan/Model/EC2/VPC.hs
|
mpl-2.0
| 1,437 | 0 | 13 | 374 | 261 | 144 | 117 | 28 | 2 |
import System.Directory
import System.Environment
checkArgs :: [String] -> IO Bool
checkArgs args
| length args /= 2 = return False
| otherwise = do dE <- doesDirectoryExist ( head args )
fE <- doesFileExist ( last args )
return $ dE && fE
main :: IO ()
main = do args <- getArgs
gArgs <- checkArgs args
print gArgs
|
JavierJF/TextInserter
|
src/Main.hs
|
agpl-3.0
| 426 | 0 | 11 | 169 | 140 | 64 | 76 | 12 | 1 |
{-# LANGUAGE TypeSynonymInstances #-}
module Network.UDP
( DataPacket(..)
, openBoundUDPPort
, openListeningUDPPort
, pingUDPPort
, sendUDPPacketTo
, recvUDPPacket
, recvUDPPacketFrom
) where
import qualified Data.ByteString as Strict (ByteString, concat, singleton)
import qualified Data.ByteString.Lazy as Lazy (ByteString, toChunks, fromChunks)
import Data.ByteString.Char8 (pack, unpack)
import Network.Socket hiding (sendTo, recv, recvFrom)
import Network.Socket.ByteString (sendTo, recv, recvFrom)
-- Type class for converting StringLike types to and from strict ByteStrings
class DataPacket a where
toStrictBS :: a -> Strict.ByteString
fromStrictBS :: Strict.ByteString -> a
instance DataPacket Strict.ByteString where
toStrictBS = id
{-# INLINE toStrictBS #-}
fromStrictBS = id
{-# INLINE fromStrictBS #-}
openBoundUDPPort :: String -> Int -> IO Socket
openBoundUDPPort uri port = do
s <- getUDPSocket
bindAddr <- inet_addr uri
let a = SockAddrInet (toEnum port) bindAddr
bindSocket s a
return s
pingUDPPort :: Socket -> SockAddr -> IO ()
pingUDPPort s a = sendTo s (Strict.singleton 0) a >> return ()
|
SoftwareHeritage/swh-web-ui
|
swh/web/tests/resources/contents/code/extensions/test.hs
|
agpl-3.0
| 1,136 | 0 | 12 | 175 | 304 | 170 | 134 | 31 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
module StaticConfig where
import Control.Concurrent (threadDelay)
import Control.Concurrent.MVar (MVar, modifyMVar, newMVar,
readMVar)
import qualified Control.Distributed.Process as DP
import qualified Control.Distributed.Process.Node as Node
import Control.Exception (throw)
import Control.Monad (replicateM)
import Control.Monad.Catch (bracket, finally)
import Control.Monad.IO.Class (liftIO)
import qualified Data.ByteString.Char8 as BSC8
import Data.Foldable (forM_)
import Data.Maybe (catMaybes)
import Data.Monoid ((<>))
import qualified Network.Socket as N
import qualified Network.Transport as NT
import qualified Network.Transport.TCP as NT
data Config = Config
{ transport :: NT.Transport
, rtable :: DP.RemoteTable
, localNodes :: [Node.LocalNode]
, remoteNodeIds :: [DP.NodeId]
, remotePids :: [DP.ProcessId]
}
initialize
:: N.HostName -> N.ServiceName -> DP.RemoteTable -> [String]
-> IO (MVar Config)
initialize host port rtable0 nodeAddrs = do
mTransport <- NT.createTransport host port (host,) NT.defaultTCPParameters
case mTransport of
Left err -> throw err
Right transport0 ->
let config = Config
{ transport = transport0
, rtable = rtable0
, localNodes = []
, remoteNodeIds = map go nodeAddrs
, remotePids = []
}
in newMVar config
where
go x = DP.NodeId (NT.EndPointAddress ("127.0.0.1:" <> BSC8.pack x <> ":0"))
getRemotePids
:: MVar Config
-> DP.Process [DP.ProcessId]
getRemotePids config = do
nodes <- fmap remoteNodeIds (liftIO (readMVar config))
bracket
(mapM DP.monitorNode nodes)
(mapM DP.unmonitor)
$ \_ -> do
forM_ nodes $ \nid -> DP.whereisRemoteAsync nid "WHERE_IS"
pids <- catMaybes <$> replicateM (length nodes) (
DP.receiveWait
[ DP.match (\(DP.WhereIsReply "WHERE_IS" mPid) -> return mPid)
, DP.match (\DP.NodeMonitorNotification {} -> return Nothing)
])
liftIO (modifyMVar config $ \c -> return ( c { remotePids = pids }
, pids ))
newLocalNode
:: MVar Config
-> IO Node.LocalNode
newLocalNode config =
modifyMVar config $ \c -> do
localNode <- Node.newLocalNode (transport c) (rtable c)
return ( c { localNodes = localNode : localNodes c }
, localNode)
startProcess
:: MVar Config
-> (MVar Config -> DP.Process ())
-> IO ()
startProcess config proc = do
node <- newLocalNode config
Node.runProcess node $ do
pid <- DP.getSelfPid
DP.register "WHERE_IS" pid
liftIO (threadDelay 2000000)
_ <- getRemotePids config
proc config `finally` shutdownLogger
-- | Shut down the logger process.
-- Ensures that any pending messages are flushed before the process exits.
-- TODO: monitor the logger process to avoid deadlock if it has already died.
shutdownLogger
:: DP.Process ()
shutdownLogger = do
(sport,rport) <- DP.newChan
DP.nsend "logger" sport
DP.receiveChan rport
usage :: String -> IO ()
usage prog = putStrLn $ "usage: " ++ prog ++ " (master | remote) host port"
configMain :: String -> [String] -> (MVar Config -> DP.Process ()) -> IO ()
configMain programeName args app =
case args of
(host:port:peers) -> do
config <- initialize host port Node.initRemoteTable peers
startProcess config app
_ -> usage programeName
|
haroldcarr/learn-haskell-coq-ml-etc
|
haskell/topic/distributed/static-config/StaticConfig.hs
|
unlicense
| 3,866 | 0 | 22 | 1,156 | 1,068 | 563 | 505 | 93 | 2 |
import Safe (readMay)
displayAge maybeAge =
case maybeAge of
Nothing -> putStrLn "You provided Invalid year"
Just age -> putStrLn $ "In 2020, you will be : " ++ show age
calcAge n = 2020 - n
main = do
putStrLn "Enter your birthyear: "
yearString <- getLine
-- instead of fmap, we can extract value out of
-- maybe functor and apply calcAge function
-- do notation available to only monads (special functors)
-- do block help to extract value out of Maybe monads
-- after manipulation, again package it back as monads
let maybeAge = do
yearInteger <- readMay yearString
return $ calcAge yearInteger
displayAge maybeAge
|
dongarerahul/edx-haskell
|
main-5.hs
|
apache-2.0
| 672 | 0 | 13 | 162 | 120 | 57 | 63 | -1 | -1 |
import Data.List
ans c@(c1:c2:c3:_) =
let r = [1..10] \\ c
a = filter (\x -> x+c1+c2 <= 20) r
in
if (length a) >= 4
then "YES"
else "NO"
main = do
c <- getContents
let i = map (map read) $ map words $ lines c :: [[Int]]
o = map ans i
mapM_ putStrLn o
|
a143753/AOJ
|
0060.hs
|
apache-2.0
| 287 | 0 | 14 | 94 | 171 | 87 | 84 | 12 | 2 |
{-
Copyrights (c) 2016. Samsung Electronics Ltd. All right reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE FlexibleContexts #-}
module Parse ( cocoonGrammar
, cfgGrammar) where
import Control.Applicative hiding (many,optional,Const)
import Text.Parsec hiding ((<|>))
import Text.Parsec.Expr
import Text.Parsec.Language
import qualified Text.Parsec.Token as T
import Data.Maybe
import Numeric
import Syntax
import Pos
import Util
reservedOpNames = ["?", "!", "|", "==", "=", ":=", "%", "+", "-", ".", "=>", "<=", "<=>", ">=", "<", ">", "!=", ">>", "<<"]
reservedNames = ["and",
"assume",
"bool",
"case",
"default",
"else",
"false",
"filter",
"fork",
"function",
"host",
"havoc",
"if",
"let",
"not",
"or",
"pkt",
"refine",
"role",
"send",
"struct",
"switch",
"then",
"true",
"typedef",
"uint"]
lexer = T.makeTokenParser (emptyDef {T.commentStart = "(*"
,T.commentEnd = "*)"
,T.nestedComments = True
,T.identStart = letter <|> char '_'
,T.identLetter = alphaNum <|> char '_'
,T.reservedOpNames = reservedOpNames
,T.reservedNames = reservedNames
,T.opLetter = oneOf ":%*+./=|"
,T.caseSensitive = True})
reservedOp = T.reservedOp lexer
reserved = T.reserved lexer
identifier = T.identifier lexer
--semiSep = T.semiSep lexer
--semiSep1 = T.semiSep1 lexer
colon = T.colon lexer
commaSep = T.commaSep lexer
commaSep1 = T.commaSep1 lexer
symbol = T.symbol lexer
semi = T.semi lexer
comma = T.comma lexer
braces = T.braces lexer
parens = T.parens lexer
angles = T.angles lexer
brackets = T.brackets lexer
natural = T.natural lexer
decimal = T.decimal lexer
--integer = T.integer lexer
whiteSpace = T.whiteSpace lexer
lexeme = T.lexeme lexer
dot = T.dot lexer
--stringLit = T.stringLiteral lexer
--charLit = T.charLiteral lexer
removeTabs = do s <- getInput
let s' = map (\c -> if c == '\t' then ' ' else c ) s
setInput s'
withPos x = (\s a e -> atPos a (s,e)) <$> getPosition <*> x <*> getPosition
data SpecItem = SpType TypeDef
| SpFunc Function
| SpRole Role
| SpAssume Assume
| SpNode Node
cocoonGrammar = Spec <$ removeTabs <*> ((optional whiteSpace) *> spec <* eof)
cfgGrammar = removeTabs *> ((optional whiteSpace) *> (many func) <* eof)
spec = (\r rs -> r:rs) <$> (withPos $ mkRefine [] <$> (many decl)) <*> (many refine)
mkRefine :: [String] -> [SpecItem] -> Refine
mkRefine targets items = Refine nopos targets types funcs roles assumes nodes
where types = mapMaybe (\i -> case i of
SpType t -> Just t
_ -> Nothing) items
funcs = mapMaybe (\i -> case i of
SpFunc f -> Just f
_ -> Nothing) items
roles = mapMaybe (\i -> case i of
SpRole r -> Just r
_ -> Nothing) items
assumes = mapMaybe (\i -> case i of
SpAssume a -> Just a
_ -> Nothing) items
nodes = mapMaybe (\i -> case i of
SpNode n -> Just n
_ -> Nothing) items
refine = withPos $ mkRefine <$ reserved "refine"
<*> (commaSep identifier)
<*> (braces $ many decl)
decl = (SpType <$> typeDef)
<|> (SpFunc <$> func)
<|> (SpRole <$> role)
<|> (SpAssume <$> assume)
<|> (SpNode <$> node)
typeDef = withPos $ (flip $ TypeDef nopos) <$ reserved "typedef" <*> typeSpec <*> identifier
func = withPos $ Function nopos <$ reserved "function"
<*> identifier
<*> (parens $ commaSep arg)
<*> (colon *> typeSpecSimple)
<*> (option (EBool nopos True) (reservedOp "|" *> expr))
<*> (optionMaybe (reservedOp "=" *> expr))
role = withPos $ Role nopos <$ reserved "role"
<*> identifier
<*> (brackets $ commaSep arg)
<*> (option (EBool nopos True) (reservedOp "|" *> expr))
<*> (option (EBool nopos True) (reservedOp "/" *> expr))
<*> (reservedOp "=" *> stat)
assume = withPos $ Assume nopos <$ reserved "assume" <*> (parens $ commaSep arg) <*> expr
node = withPos $ Node nopos <$> ((NodeSwitch <$ reserved "switch") <|> (NodeHost <$ reserved "host"))
<*> identifier
<*> (parens $ commaSep1 $ parens $ (,) <$> identifier <* comma <*> identifier)
arg = withPos $ flip (Field nopos) <$> typeSpecSimple <*> identifier
typeSpec = withPos $
arrType
<|> uintType
<|> boolType
<|> userType
<|> structType
typeSpecSimple = withPos $
arrType
<|> uintType
<|> boolType
<|> userType
uintType = TUInt nopos <$ reserved "uint" <*> (fromIntegral <$> angles decimal)
boolType = TBool nopos <$ reserved "bool"
userType = TUser nopos <$> identifier
arrType = brackets $ TArray nopos <$> typeSpecSimple <* semi <*> (fromIntegral <$> decimal)
structType = TStruct nopos <$ reserved "struct" <*> (braces $ commaSep1 arg)
expr = buildExpressionParser etable term
<?> "expression"
term = parens expr <|> term'
term' = withPos $
estruct
<|> ebuiltin
<|> eapply
<|> eloc
<|> eint
<|> ebool
<|> epacket
<|> evar
<|> edotvar
<|> econd
eapply = EApply nopos <$ isapply <*> identifier <*> (parens $ commaSep expr)
where isapply = try $ lookAhead $ identifier *> symbol "("
ebuiltin = EBuiltin nopos <$ isbuiltin <*> (identifier <* char '!') <*> (parens $ commaSep expr)
where isbuiltin = try $ lookAhead $ (identifier *> char '!') *> symbol "("
eloc = ELocation nopos <$ isloc <*> identifier <*> (brackets $ commaSep expr)
where isloc = try $ lookAhead $ identifier *> (brackets $ commaSep expr)
ebool = EBool nopos <$> ((True <$ reserved "true") <|> (False <$ reserved "false"))
epacket = EPacket nopos <$ reserved "pkt"
evar = EVar nopos <$> identifier
edotvar = EDotVar nopos <$ reservedOp "." <*> identifier
econd = (fmap uncurry (ECond nopos <$ reserved "case"))
<*> (braces $ (,) <$> (many $ (,) <$> expr <* colon <*> expr <* semi)
<*> (reserved "default" *> colon *> expr <* semi))
--eint = EInt nopos <$> (fromIntegral <$> decimal)
eint = lexeme eint'
estruct = EStruct nopos <$ isstruct <*> identifier <*> (braces $ commaSep1 expr)
where isstruct = try $ lookAhead $ identifier *> symbol "{"
eint' = (lookAhead $ char '\'' <|> digit) *> (do w <- width
v <- sradval
mkLit w v)
width = optionMaybe (try $ ((fmap fromIntegral parseDec) <* (lookAhead $ char '\'')))
sradval = ((try $ string "'b") *> parseBin)
<|> ((try $ string "'o") *> parseOct)
<|> ((try $ string "'d") *> parseDec)
<|> ((try $ string "'h") *> parseHex)
<|> parseDec
parseBin :: Stream s m Char => ParsecT s u m Integer
parseBin = readBin <$> (many1 $ (char '0') <|> (char '1'))
parseOct :: Stream s m Char => ParsecT s u m Integer
parseOct = (fst . head . readOct) <$> many1 octDigit
parseDec :: Stream s m Char => ParsecT s u m Integer
parseDec = (fst . head . readDec) <$> many1 digit
--parseSDec = (\m v -> m * v)
-- <$> (option 1 ((-1) <$ reservedOp "-"))
-- <*> ((fst . head . readDec) <$> many1 digit)
parseHex :: Stream s m Char => ParsecT s u m Integer
parseHex = (fst . head . readHex) <$> many1 hexDigit
mkLit :: Maybe Int -> Integer -> ParsecT s u m Expr
mkLit Nothing v = return $ EInt nopos (msb v + 1) v
mkLit (Just w) v | w == 0 = fail "Unsigned literals must have width >0"
| msb v < w = return $ EInt nopos w v
| otherwise = fail "Value exceeds specified width"
etable = [[postf $ choice [postSlice, postField]]
,[pref $ choice [prefix "not" Not]]
,[binary "%" Mod AssocLeft]
,[binary "+" Plus AssocLeft,
binary "-" Minus AssocLeft]
,[binary ">>" ShiftR AssocLeft,
binary "<<" ShiftL AssocLeft]
,[binary "++" Concat AssocLeft]
,[binary "==" Eq AssocLeft,
binary "!=" Neq AssocLeft,
binary "<" Lt AssocNone,
binary "<=" Lte AssocNone,
binary ">" Gt AssocNone,
binary ">=" Gte AssocNone]
,[binary "and" And AssocLeft]
,[binary "or" Or AssocLeft]
,[binary "=>" Impl AssocLeft]
]
pref p = Prefix . chainl1 p $ return (.)
postf p = Postfix . chainl1 p $ return (flip (.))
postField = (\f end e -> EField (fst $ pos e, end) e f) <$> field <*> getPosition
postSlice = try $ (\(h,l) end e -> ESlice (fst $ pos e, end) e h l) <$> slice <*> getPosition
slice = brackets $ (\h l -> (fromInteger h, fromInteger l)) <$> natural <*> (colon *> natural)
field = dot *> identifier
prefix n fun = (\start e -> EUnOp (start, snd $ pos e) fun e) <$> getPosition <* reservedOp n
binary n fun = Infix $ (\le re -> EBinOp (fst $ pos le, snd $ pos re) fun le re) <$ reservedOp n
stat = buildExpressionParser stable stat'
<?> "statement"
stat' = braces stat
<|> parens stat
<|> simpleStat
simpleStat = withPos $
stest
<|> site
<|> ssendnd
<|> ssend
<|> sset
<|> shavoc
<|> sassume
<|> slet
<|> sfork
stest = STest nopos <$ reserved "filter" <*> expr
ssendnd = SSendND nopos <$ reservedOp "?" <* reserved "send" <*> identifier <*> (brackets expr)
ssend = SSend nopos <$ reserved "send" <*> expr
sset = SSet nopos <$> expr <*> (reservedOp ":=" *> expr)
site = SITE nopos <$ reserved "if" <*> expr <*> (reserved "then" *> stat') <*> (optionMaybe $ reserved "else" *> stat')
shavoc = SHavoc nopos <$ reserved "havoc" <*> expr
sassume = SAssume nopos <$ reserved "assume" <*> expr
slet = SLet nopos <$ reserved "let" <*> typeSpec <*> identifier <*> (reservedOp "=" *> expr)
sfork = (\(vs,c) st -> SFork nopos vs c st) <$ reserved "fork" <*> (parens $ (,) <$> (commaSep1 arg) <*> (reservedOp "|" *> expr)) <*> stat'
stable = [ [sbinary ";" SSeq AssocRight]
, [sbinary "|" SPar AssocRight]
]
sbinary n fun = Infix $ (\l r -> fun (fst $ pos l, snd $ pos r) l r) <$ reservedOp n
|
ryzhyk/cocoon
|
cocoon/Parse.hs
|
apache-2.0
| 12,303 | 0 | 15 | 4,493 | 3,654 | 1,904 | 1,750 | 242 | 6 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
--------------------------------------------------------------------
-- |
-- Copyright : (c) Edward Kmett and Dan Doel 2012-2013
-- License : BSD2
-- Maintainer: Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability: non-portable
--
--------------------------------------------------------------------
module Ermine.Syntax.Instance
( Instance(..)
, HasInstance(..)
) where
import Control.Applicative
import Control.Lens
import Data.Bifoldable
import Data.Bifunctor
import Data.Bitraversable
import Data.Foldable
import Data.Hashable
import Data.Hashable.Extras
import Data.Monoid
import Data.Typeable
import Data.Void
import Ermine.Syntax.Core
import Ermine.Syntax.Head
import Ermine.Syntax.Id
import Ermine.Syntax.Type
import GHC.Generics
------------------------------------------------------------------------------
-- Instance
------------------------------------------------------------------------------
-- instance Ord a => Ord [a]
-- Instance [ord (pure 0)] (Head ord 0 [star] [] [list (pure 0)]) (LamDict (Scope (Dict [AppDict (InstanceId (Head eq 0 [star] [] [list (pure 0))) (AppDict (Slot 0) (B ()))] ...)))
-- instance Category (:-)
-- Instance [] (Head category 0 [] [constraint] [con ":-" ...]) (Dict [] ...)
data Instance c = Instance
{ _instanceContext :: [Type Void Int]
, _instanceHead :: Head
, _instanceBody :: Core c Id
} deriving (Eq, Typeable, Generic, Show)
class HasInstance t c | t -> c where
instance_ :: Lens' t (Instance c)
instanceBody :: Lens' t (Core c Id)
instanceContext :: Lens' t [Type Void Int]
instanceHead :: Lens' t Head
instanceBody = instance_.instanceBody
instanceContext = instance_.instanceContext
instanceHead = instance_.instanceHead
makeLensesWith ?? ''Instance $ classyRules & createClass .~ False & lensClass .~ \_ -> Just (''HasInstance, 'instance_)
instance HasHead (Instance a) where
head_ = instanceHead
instance Hashable a => Hashable (Instance a) where
instance Hashable1 Instance where
instance Functor Instance where
fmap f (Instance c h b) = Instance c h $ first f b
instance Foldable Instance where
foldMap f (Instance _ _ b) = bifoldMap f (const mempty) b
instance Traversable Instance where
traverse f (Instance cxt hd b) = Instance cxt hd <$> bitraverse f pure b
|
PipocaQuemada/ermine
|
src/Ermine/Syntax/Instance.hs
|
bsd-2-clause
| 2,517 | 0 | 11 | 371 | 511 | 286 | 225 | 49 | 0 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
module System.Fuse.Box.FSTypes
(
FuseBox(..),
MonadIO,
MonadError,
MonadFuse,
MonadFSRead,
MonadFSWrite,
module System.IO,
module System.Posix.Types,
module Foreign.C.Error,
ByteString,
Exception,
fsError,
fsOkay,
fsErrorOrOkay,
fuseBoxMain,
roFuseBox,
rwFuseBox
) where
import Control.Exception.Base (Exception)
import Control.Monad.Except
import Control.Monad.IO.Class
import Data.ByteString (ByteString)
import Data.Either
import Foreign.C.Error
import System.Fuse
import System.Fuse.Box.Node
import System.IO
import System.Posix.Types
type FuseResult a = IO (Either Errno a)
type FuseCall = IO Errno
data FuseBox m fh = FuseBox
{
fsbxFhToNode :: fh -> m Node,
fsbxGetFileStat :: Node -> m FileStat,
fsbxReadSymbolicLink :: Node -> m Node,
fsbxCreateDevice :: Node -> EntryType -> FileMode -> DeviceID -> m (),
fsbxCreateDirectory :: Node -> FileMode -> m (),
fsbxRemoveLink :: Node -> m (),
fsbxRemoveDirectory :: Node -> m (),
fsbxCreateSymbolicLink :: Node -> Node -> m (),
fsbxRename :: Node -> Node -> m (),
fsbxCreateLink :: Node -> Node -> m (),
fsbxSetFileMode :: Node -> FileMode -> m (),
fsbxSetOwnerAndGroup :: Node -> UserID -> GroupID -> m (),
fsbxSetFileSize :: Node -> FileOffset -> m (),
fsbxOpen :: Node -> OpenMode -> OpenFileFlags -> m fh,
fsbxRead :: fh -> ByteCount -> FileOffset -> m ByteString,
fsbxWrite :: fh -> ByteString -> FileOffset -> m ByteCount,
fsbxGetFileSystemStats :: Node -> m FileSystemStats,
fsbxFlush :: fh -> m (),
fsbxRelease :: fh -> m (),
fsbxSynchronizeFile :: Node -> SyncType -> m (),
fsbxOpenDirectory :: Node -> m (),
fsbxReadDirectory :: Node -> m [(Node, FileStat)],
fsbxReleaseDirectory :: Node -> m (),
fsbxSynchronizeDirectory :: Node -> SyncType -> m (),
fsbxAccess :: Node -> Int -> m (),
fsbxInit :: m (),
fsbxDestroy :: m (),
fsbxSetFileTimes :: Node -> EpochTime -> EpochTime -> m ()
}
-- |Typeclass defining basic Fuse operations.
class (MonadIO m, MonadError Errno m) => MonadFuse m fh | m -> fh where
-- |Runs fuse and unpacks it into a 'FuseResult'.
runFuse :: m a -> FuseResult a
-- |Convert a file handle into a 'Node'.
mfFhToNode :: fh -> m Node
-- |Open the file at the given path and return the file handle to it.
mfOpen :: Node -> OpenMode -> OpenFileFlags -> m fh
-- |Flush the file, clearing any buffers.
mfFlush :: fh -> m ()
-- |Close the file handle.
mfRelease :: fh -> m ()
-- |Check file access permissions
mfAccess :: Node -> Int -> m ()
-- |Return the file stats for the file at the given node.
mfGetFileStat :: Node -> m FileStat
-- |Given a node anywhere on the file system, return details about the file system
mfGetFileSystemStats :: Node -> m FileSystemStats
-- |Initialize the file system
mfInit :: m ()
-- |Destroy the file system
mfDestroy :: m ()
-- |Typeclass defining read operations (regardless of ability to do write operations).
class (MonadFuse m fh) => MonadFSRead m fh | m -> fh where
-- |Perform a read from the file handle. It should read up to the given number of bytes
-- starting at the given offset.
mfRead :: fh -> ByteCount -> FileOffset -> m ByteString
-- |Return the target of the symbolic link at the given location.
mfReadSymbolicLink :: Node -> m Node
-- |Opens a directory, most specifically checking to see that the open operation is permited.
mfOpenDirectory :: Node -> m ()
-- |Implements 'readdir(3)', returning the entire contents of the directory as a list of tuples.
mfReadDirectory :: Node -> m [(Node, FileStat)]
-- |Implements 'closedir(3)'.
mfReleaseDirectory :: Node -> m ()
-- |Typeclass defining write operations (regardless of ability to do read operations).
class (MonadFuse m fh, MonadFSRead m fh) => MonadFSWrite m fh | m -> fh where
-- |Implements 'createDevice' ('mknod(2)'), and also called for regular file creation.
mfCreateDevice :: Node -> EntryType -> FileMode -> DeviceID -> m ()
-- |Implements 'createDirectory' ('mkdir(2)')
mfCreateDirectory :: Node -> FileMode -> m ()
-- |Implements 'removeDirectory' ('rmdir(2)')
mfRemoveDirectory :: Node -> m ()
-- |Implements 'removeLink' ('unlink(2)')
mfRemoveLink :: Node -> m ()
-- |Implements 'createSymbolicLink' ('symlink(2)')
mfCreateSymbolicLink :: Node -> Node -> m ()
-- |Implements 'rename' ('rename(2)')
mfRename :: Node -> Node -> m ()
-- |Implements 'setFileMode' ('chmod(2)')
mfSetFileMode :: Node -> FileMode -> m ()
-- |Implements 'setOwnerAndGroup' ('chown(2)')
mfSetOwnerAndGroup :: Node -> UserID -> GroupID -> m ()
-- |Implements 'setFileSize' ('truncate(2)')
mfSetFileSize :: Node -> FileOffset -> m ()
-- |Implements 'setFileTimes' ('utime(2)')
mfSetFileTimes :: Node -> EpochTime -> EpochTime -> m ()
-- |Implements 'pwrite(2)'
mfWrite :: fh -> ByteString -> FileOffset -> m ByteCount
-- |Implements 'fsync(2)'
mfSynchronizeFile :: Node -> SyncType -> m ()
-- |Synchronize all the contents of the directory
mfSynchronizeDirectory :: Node -> SyncType -> m ()
fsError :: Errno -> FuseResult a
-- ^Convenience method for throwing an 'Errno' as a result.
fsError a = return (Left a)
fsOkay :: FuseResult a
-- ^Convenience method for generating an 'eOK' file system result.
fsOkay = fsError eOK
fsErrorOrOkay :: FuseResult a -> FuseCall
-- ^Returns the 'Errno' value, which may be 'eOK'. If the 'FuseCall'
-- successfully returns a value (including `()`), then this function
-- returns 'eOK'. If the call raises an error, returns the error value.
fsErrorOrOkay action = do
result <- action
case result of
(Left e) -> return e
(Right _) -> return eOK
denodeify :: (Node -> a) -> (FilePath -> a)
-- ^Converts functions that start with a 'Node' to one that
-- starts with a 'FilePath'.
denodeify f = \fp -> f (nodeFromFilePath fp)
denodeify2 :: (Node -> Node -> a) -> (FilePath -> FilePath -> a)
-- ^Converts a function that starts with two 'Node' arguments to
-- a function that starts with two 'FilePath' arguments.
denodeify2 f = \fp1 fp2 -> f (nodeFromFilePath fp1) (nodeFromFilePath fp2)
notSup :: (MonadFuse m fh) => m a
-- ^Utility function for when a 'FuseBox' does not support an operation.
notSup = throwError eNOTSUP
roFuseBox :: (MonadFSRead m fh) => FuseBox m fh
-- ^Creates a 'FuseBox' which is read-only and executes in the given monad.
roFuseBox = FuseBox
{
fsbxCreateLink = \_ _ -> notSup,
fsbxFhToNode = mfFhToNode,
fsbxGetFileStat = mfGetFileStat,
fsbxReadSymbolicLink = mfReadSymbolicLink,
fsbxCreateDevice = \_ _ _ _ -> notSup,
fsbxCreateDirectory = \_ _ -> notSup,
fsbxRemoveLink = \_ -> notSup,
fsbxRemoveDirectory = \_ -> notSup,
fsbxCreateSymbolicLink = \_ _ -> notSup,
fsbxRename = \_ _ -> notSup,
fsbxSetFileMode = \_ _ -> notSup,
fsbxSetOwnerAndGroup = \_ _ _ -> notSup,
fsbxSetFileSize = \_ _ -> notSup,
fsbxSetFileTimes = \_ _ _ -> notSup,
fsbxOpen = mfOpen,
fsbxRead = mfRead,
fsbxWrite = \_ _ _ -> notSup,
fsbxGetFileSystemStats = mfGetFileSystemStats,
fsbxFlush = mfFlush,
fsbxRelease = mfRelease,
fsbxSynchronizeFile = \_ _ -> notSup,
fsbxOpenDirectory = mfOpenDirectory,
fsbxReadDirectory = mfReadDirectory,
fsbxReleaseDirectory = mfReleaseDirectory,
fsbxSynchronizeDirectory = \_ _ -> notSup,
fsbxAccess = mfAccess,
fsbxInit = mfInit,
fsbxDestroy = mfDestroy
}
rwFuseBox :: (MonadFSWrite m fh) => FuseBox m fh
-- ^Creates a 'FuseBox' which is read-write and executes in the given monad.
rwFuseBox = roFuseBox
{
fsbxCreateDevice = mfCreateDevice,
fsbxCreateDirectory = mfCreateDirectory,
fsbxRemoveLink = mfRemoveLink,
fsbxRemoveDirectory = mfRemoveDirectory,
fsbxCreateSymbolicLink = mfCreateSymbolicLink,
fsbxRename = mfRename,
fsbxSetFileMode = mfSetFileMode,
fsbxSetOwnerAndGroup = mfSetOwnerAndGroup,
fsbxSetFileSize = mfSetFileSize,
fsbxSetFileTimes = mfSetFileTimes,
fsbxWrite = mfWrite,
fsbxSynchronizeFile = mfSynchronizeFile,
fsbxSynchronizeDirectory = mfSynchronizeDirectory
}
fuseBoxMain :: (Exception e) => FuseBox m fh -> (e -> IO Errno) -> IO ()
-- ^Converts the 'FuseBox' into 'FuseOperations' and then calls 'fuseMain'.
fuseBoxMain box handler = do
ops <- boxToOps box
fuseMain ops handler
boxToOps :: FuseBox m fh -> IO (FuseOperations fh)
-- ^Converts the 'FuseBox' into 'FuseOperations'.
boxToOps box = do
return FuseOperations
{
}
|
RobertFischer/fusebox
|
shared/System/Fuse/Box/FSTypes.hs
|
bsd-3-clause
| 8,819 | 0 | 14 | 1,831 | 2,048 | 1,144 | 904 | 164 | 2 |
module D12Spec (main, spec) where
import Test.Hspec
import D12Lib
import qualified Text.Parsec as P
main :: IO ()
main = hspec spec
spec :: Spec
spec = parallel $ do
describe "instructionsP" $
it "parses messages" $ do
let tape = P.parse instructionsP "fixture" "cpy -1 a\ninc a\ncpy a b\n"
tape `shouldBe` Right
[ Cpy (Right (-1)) (Reg 'a')
, Inc (Reg 'a')
, Cpy (Left $ Reg 'a') (Reg 'b')
]
describe "terminated" $ do
it "is terminated when pos is past end of tape" $ do
let vm = VM { registers = [], tape = [Inc (Reg 'a')], pos = 1 }
terminated vm `shouldBe` True
it "is not terminated when pos is within tape range" $ do
let vm = VM { registers = [], tape = [Inc (Reg 'a')], pos = 0 }
terminated vm `shouldBe` False
describe "run" $ do
it "runs a calculation" $ do
let egInput = "cpy 41 a\ninc a\ninc a\ndec a\njnz a 2\ndec a\n"
let pRes = P.parse instructionsP "fixture" egInput
let tape = either (error . show) id pRes
let vm = newVM tape
let vm' = run vm
registers vm' `shouldBe` [('a', 42), ('b', 0), ('c', 0), ('d', 0)]
pos vm' `shouldBe` 6
|
wfleming/advent-of-code-2016
|
2016/test/D12Spec.hs
|
bsd-3-clause
| 1,323 | 0 | 22 | 478 | 462 | 237 | 225 | 31 | 1 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module Main where
import Control.Concurrent
import Control.Monad
import Control.Monad.Identity
import Control.Monad.IO.Class
import Control.Monad.Ref
import Control.Monad.Trans.Class
import Control.Monad.Trans.Maybe
import Data.Dependent.Sum (DSum (..))
import Data.IORef
import qualified Data.Map.Lazy as Map
import Data.Maybe
import Data.Monoid ((<>))
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import GHCJS.DOM hiding (runWebGUI)
import qualified GHCJS.DOM.Types as DOM
import GHCJS.DOM.Document
import GHCJS.DOM.Element
import GHCJS.DOM.Node
import Foreign.JavaScript.TH
import Graphics.UI.Gtk hiding (Widget, (:=>))
import Graphics.UI.Gtk.WebKit.Types hiding (Event, Widget, Text, Window)
import Graphics.UI.Gtk.WebKit.WebView
import Graphics.UI.Gtk.WebKit.WebSettings
import Graphics.UI.Gtk.WebKit.WebFrame
import Reflex
import Reflex.PerformEvent.Base
import Reflex.Dom hiding (Window)
import Reflex.Host.Class
import System.Directory
main :: IO ()
main = do
forkIO $ initGUI >> mainGUI
handleCommands Map.empty
postGUIAsync mainQuit
putStrLn "Killed the GUI thread."
where
handleCommands windowsByName = do
line <- putStrLn "Command?" >> getLine
case words line of
["create", windowName] -> do
case Map.lookup windowName windowsByName of
Just _ -> do
putStrLn $ "A window named " ++ windowName ++ " already exists."
handleCommands windowsByName
Nothing -> do
initialDisplayText <- putStrLn "Initial text?" >> TIO.getLine
window <- postGUISync $ startUpdatableTextWindow initialDisplayText
putStrLn $ "Created a window named: " ++ windowName
handleCommands $ Map.insert windowName window windowsByName
["update", windowName] -> do
case Map.lookup windowName windowsByName of
Just window -> do
newDisplayText <- putStrLn "New text?" >> TIO.getLine
result <- postGUISync $ trySetDisplayText window newDisplayText
if result
then do
putStrLn "Changed the text."
handleCommands windowsByName
else do
putStrLn "Unable to update the text."
handleCommands $ Map.delete windowName windowsByName
Nothing -> do
putStrLn $ "No window name " ++ windowName ++ " exists."
handleCommands windowsByName
["quit"] -> putStrLn "Goodbye."
_ -> do
putStrLn "Not a recognized command."
handleCommands windowsByName
data UpdatableTextWindow =
UpdatableTextWindow {
trySetDisplayText :: T.Text -> IO Bool
}
startUpdatableTextWindow :: T.Text -> IO UpdatableTextWindow
startUpdatableTextWindow initialDisplayText = do
webView <- webViewNew
do
webFrame <- webViewGetMainFrame webView
pwd <- getCurrentDirectory
webFrameLoadString webFrame "" Nothing $ "file://" <> pwd <> "/"
fireRef <- newIORef Nothing
(displayTextUpdated, displayTextUpdatedTriggerRef) <- runSpiderHost $ do
(ev, tr) <- newEventWithTriggerRef
void $ subscribeEvent ev
return (ev, tr)
_ <- webView `on` loadFinished $ \_ -> do
Just doc <- liftM (fmap DOM.castToHTMLDocument) $ webViewGetDomDocument webView
Just body <- getBody doc
(_, FireCommand fire) <-
withWebViewSingleton webView $ \sWebView ->
attachWidget' body sWebView $
updatableTextWidget initialDisplayText displayTextUpdated
writeIORef fireRef $ Just fire
_ <- webView `on` objectDestroy $ writeIORef fireRef Nothing
scrolledWindow <- scrolledWindowNew Nothing Nothing
scrolledWindow `containerAdd` webView
do
window <- windowNew
-- _ <- timeoutAddFull (yield >> return True) priorityHigh 10 -- not sure what this does; leaving it out for now
window `containerAdd` scrolledWindow
widgetShowAll window
return $
UpdatableTextWindow {
trySetDisplayText = \newDisplayText -> fmap isJust . runMaybeT $ do
fire <- MaybeT $ readIORef fireRef
displayTextUpdatedTrigger <- MaybeT $ readRef displayTextUpdatedTriggerRef
lift $ runSpiderHost $ fire [displayTextUpdatedTrigger :=> Identity newDisplayText] $ return ()
return ()
}
updatableTextWidget :: T.Text -> Event Spider T.Text -> Widget v ()
updatableTextWidget initialDisplayText displayTextUpdated =
el "div" $ do
displayText <- holdDyn initialDisplayText displayTextUpdated
dynText displayText
|
Rotaerk/windowTest
|
src/Main.hs
|
bsd-3-clause
| 4,619 | 0 | 24 | 1,075 | 1,124 | 573 | 551 | 112 | 7 |
-- | Compiling Ivory to ACL2.
module Ivory.Compile.ACL2
( compile
, var
, lit
) where
import qualified Language.ACL2 as A
import qualified Ivory.Language.Syntax.AST as I
import qualified Ivory.Language.Syntax.Type as I
import qualified Ivory.Language.Syntax.Names as I
import qualified Ivory.Compile.ACL2.Compile as M
import qualified Ivory.Compile.ACL2.CLL as M
import qualified Ivory.Compile.ACL2.Expr as M
-- | Compiles an Ivory module to ACL2.
compile :: I.Module -> [A.Expr]
compile = M.compile . cllModule
-- | Convert an Ivory module to CLL.
cllModule :: I.Module -> [M.Proc]
cllModule = map cllProc . procs
where
procs :: I.Module -> [I.Proc]
procs m = I.public (I.modProcs m) ++ I.private (I.modProcs m)
cllProc :: I.Proc -> M.Proc
cllProc p = M.Proc (I.procSym p) (map (var . I.tValue) $ I.procArgs p) Nothing requires ensures body
where
body = map cllStmt (I.procBody p)
requires :: [M.Expr]
requires = map (cllExpr . cond . I.getRequire) $ I.procRequires p
ensures :: [M.Expr -> M.Expr]
ensures = map (retval . cllExpr . cond . I.getEnsure ) $ I.procEnsures p
cond a = case a of
I.CondBool a -> a
I.CondDeref _ _ _ _ -> error $ "CondDeref not supported."
retval :: M.Expr -> M.Expr -> M.Expr
retval a ret = retval a
where
retval :: M.Expr -> M.Expr
retval a = case a of
M.Var "retval" -> ret
M.Var a -> M.Var a
M.Literal a -> M.Literal a
M.Deref a -> M.Deref $ retval a
M.Array a -> M.Array $ map retval a
M.Struct a -> M.Struct [ (a, retval b) | (a, b) <- a ]
M.ArrayIndex a b -> M.ArrayIndex (retval a) (retval b)
M.StructIndex a b -> M.StructIndex (retval a) b
M.Intrinsic a b -> M.Intrinsic a $ map retval b
cllStmt :: I.Stmt -> M.Stmt
cllStmt a = case a of
I.Comment _ -> M.Null
I.IfTE a b c -> M.If (cllExpr a) (cllStmts b) (cllStmts c)
I.Return a -> M.Return $ Just $ cllExpr $ I.tValue a
I.ReturnVoid -> M.Return Nothing
I.Assert a -> M.Assert $ cllExpr a
I.CompilerAssert a -> M.Assert $ cllExpr a
I.Assume a -> M.Assume $ cllExpr a
I.Local _ a b -> M.Let (var a) $ cllInit b
I.AllocRef _ a b -> M.Block [M.Alloc $ var a, M.Store (M.Var $ var a) (M.Var $ var b)]
I.Deref _ a b -> M.Let (var a) $ M.Deref $ cllExpr b
I.Store _ a b -> M.Store (cllExpr a) (cllExpr b)
I.Call _ Nothing fun args -> M.Call Nothing (var fun) $ map (cllExpr . I.tValue) args
I.Call _ (Just r) fun args -> M.Call (Just $ var r) (var fun) $ map (cllExpr . I.tValue) args
I.Loop i init incr' body -> M.Loop (var i) (cllExpr init) incr (cllExpr to) (cllStmts body)
where
(incr, to) = case incr' of
I.IncrTo a -> (True, a)
I.DecrTo a -> (False, a)
--I.Comment _ -> M.Null
I.RefCopy _ _ _ -> error $ "Unsupported Ivory statement: " ++ show a
I.Forever _ -> error $ "Unsupported Ivory statement: " ++ show a
I.Break -> error $ "Unsupported Ivory statement: " ++ show a
I.Assign _ _ _ -> error $ "Unsupported Ivory statement: " ++ show a
where
cllStmts :: [I.Stmt] -> [M.Stmt]
cllStmts = map cllStmt
cllInit :: I.Init -> M.Expr
cllInit a = case a of
I.InitZero -> M.Literal $ M.LitInteger 0
I.InitExpr _ b -> cllExpr b
I.InitArray a -> M.Array $ map cllInit a
I.InitStruct a -> M.Struct [ (n, cllInit v) | (n, v) <- a ]
cllExpr :: I.Expr -> M.Expr
cllExpr a = case a of
I.ExpSym a -> M.Var a
I.ExpVar a -> M.Var $ var a
I.ExpLit a -> M.Literal $ lit a
I.ExpOp op args -> M.Intrinsic (cllIntrinsic op) $ map cllExpr args
I.ExpIndex _ a _ b -> M.ArrayIndex (M.Deref $ cllExpr a) (cllExpr b)
I.ExpLabel _ a b -> M.StructIndex (M.Deref $ cllExpr a) b
I.ExpToIx a _ -> cllExpr a -- Is it ok to ignore the maximum bound?
I.ExpSafeCast _ a -> cllExpr a
_ -> M.Literal $ M.LitInteger 0
--error $ "Unsupported Ivory expression: " ++ show a
cllIntrinsic :: I.ExpOp -> M.Intrinsic
cllIntrinsic op = case op of
I.ExpEq _ -> M.Eq
I.ExpNeq _ -> M.Neq
I.ExpCond -> M.Cond
I.ExpGt False _ -> M.Gt
I.ExpGt True _ -> M.Ge
I.ExpLt False _ -> M.Lt
I.ExpLt True _ -> M.Le
I.ExpNot -> M.Not
I.ExpAnd -> M.And
I.ExpOr -> M.Or
I.ExpMul -> M.Mul
I.ExpMod -> M.Mod
I.ExpAdd -> M.Add
I.ExpSub -> M.Sub
I.ExpNegate -> M.Negate
I.ExpAbs -> M.Abs
I.ExpSignum -> M.Signum
a -> error $ "Unsupported intrinsic: " ++ show a
lit :: I.Literal -> M.Literal
lit a = case a of
I.LitInteger a -> M.LitInteger a
I.LitFloat a -> M.LitFloat a
I.LitDouble a -> M.LitDouble a
I.LitChar a -> M.LitChar a
I.LitBool a -> M.LitBool a
I.LitNull -> M.LitNull
I.LitString a -> M.LitString a
class GetVar a where var :: a -> M.Var
instance GetVar I.Var where
var a = case a of
I.VarName a -> a
I.VarInternal a -> a
I.VarLitName a -> a
instance GetVar I.Name where
var a = case a of
I.NameSym a -> a
I.NameVar a -> var a
|
tomahawkins/ivory-backend-acl2
|
src/Ivory/Compile/ACL2.hs
|
bsd-3-clause
| 5,213 | 0 | 16 | 1,511 | 2,245 | 1,111 | 1,134 | 121 | 19 |
{-# LANGUAGE Arrows #-}
{-# LANGUAGE RecordWildCards #-}
module Wires.Camera
( camera
, CameraEvents(..)
) where
import Control.Arrow
import Control.Lens
import Control.Wire
import Control.Wires.Extra
import Data.Monoid
import Linear
import qualified Linear as L
import Prelude hiding ((.), id)
import qualified SDL.Event as SDL
import qualified SDL.Input as SDL
import SDL.Lens
data CameraEvents =
CameraEvents {sdlEvent :: Event SDL.Event
,stepPhysics :: Event Double}
camera :: Wire IO CameraEvents (V3 Float, M44 Float)
camera = proc CameraEvents{..} -> do
let keyboardEvent =
filterJust $ fmap (preview (payload . _KeyboardEvent)) sdlEvent
keyEvent s =
filterE (\SDL.KeyboardEventData{..} ->
SDL.keysymScancode keyboardEventKeysym == s)
keyboardEvent
forward = keyEvent SDL.ScancodeW
strafeLeft = keyEvent SDL.ScancodeA
backward = keyEvent SDL.ScancodeS
strafeRight = keyEvent SDL.ScancodeD
mouseMoved =
filterJust $
fmap (preview (payload . _MouseMotionEvent .
to SDL.mouseMotionEventRelMotion))
sdlEvent
V2 yaw pitch <- scan initialOrientation -< fmap reorientate mouseMoved
forwardSpeed <- keySpeed -< forward
backwardSpeed <- fmap negate keySpeed -< backward
leftSpeed <- fmap negate keySpeed -< strafeLeft
rightSpeed <- keySpeed -< strafeRight
let zSpeed = 7 * (forwardSpeed + backwardSpeed)
xSpeed = 7 * (leftSpeed + rightSpeed)
orientation = axisAngle (V3 0 1 0) yaw * axisAngle (V3 1 0 0) pitch
forwardVector = rotate orientation (V3 0 0 (-1))
upVector = rotate orientation (V3 0 1 0)
rightVector = rotate (axisAngle (V3 0 1 0) yaw) (V3 1 0 0)
eyePosition
<- scan initialPosition
-< fmap (\dt currentPosition ->
currentPosition +
forwardVector ^* zSpeed +
rightVector ^* xSpeed)
stepPhysics
returnA -<
(eyePosition, lookAt eyePosition (eyePosition + forwardVector) upVector)
where
initialPosition = V3 500 10 (-400)
initialOrientation = V2 0 0
mouseSensitivity = 0.002
-- TODO Clamp pitch so you can't flip the camera
reorientate (L.V2 x y) yawPitch =
yawPitch + negate (V2 (fromIntegral x * mouseSensitivity) (fromIntegral y * mouseSensitivity))
keySpeed = proc e ->
hold 0 -< fmap getSum $
mconcat [ Sum 1 <$ e `motion` SDL.Pressed
, Sum 0 <$ e `motion` SDL.Released
]
motion e m =
filterE (\SDL.KeyboardEventData{..} -> keyboardEventKeyMotion == m) e
|
ocharles/hs-quake-3
|
Wires/Camera.hs
|
bsd-3-clause
| 2,691 | 2 | 18 | 754 | 802 | 418 | 384 | 67 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
module Protocol.ROC.PointTypes.PointType21 where
import Data.Binary.Get (getByteString,
getWord8,
getWord32le,
Get)
import Data.ByteString (ByteString)
import Data.Word (Word8,Word32)
import Prelude (($),
return,
Eq,
Read,
Show)
data PointType21 = PointType21 {
pointType21PointTypeDesc :: !PointType21PointTypeDesc
,pointType21TemplatePointer :: !PointType21TemplatePointer
,pointType21NumParameters :: !PointType21NumParameters
,pointType21DisplayNum :: !PointType21DisplayNum
} deriving (Read,Eq, Show)
type PointType21PointTypeDesc = ByteString
type PointType21TemplatePointer = Word32
type PointType21NumParameters = Word8
type PointType21DisplayNum = Word8
pointType21Parser :: Get PointType21
pointType21Parser = do
pointTypeDesc <- getByteString 20
templatePointer <- getWord32le
numParameters <- getWord8
displayNum <- getWord8
return $ PointType21 pointTypeDesc templatePointer numParameters displayNum
|
plow-technologies/roc-translator
|
src/Protocol/ROC/PointTypes/PointType21.hs
|
bsd-3-clause
| 1,375 | 0 | 9 | 517 | 210 | 121 | 89 | 38 | 1 |
module CommonMarkTests (construct) where
import qualified CMark
import Prelude hiding (init)
import Test.Tasty
import Test.Tasty.HUnit
import qualified Data.Text as Strict
import qualified Data.Text.Lazy as Text
import qualified Data.Text.Lazy.IO as TextIO
import qualified ElmFormat.Render.Markdown
import qualified Parse.Markdown
data State = CollectingInput | CollectingOutput | None
deriving Eq
data ParseState = ParseState
{ path :: String
, input :: [String]
, output :: [String]
, state :: State
, siblings :: [TestTree]
, children :: [TestTree]
, example :: Int
}
init :: ParseState
init = ParseState
{ path = ""
, input = []
, output = []
, state = None
, siblings = []
, children = []
, example = 1
}
step :: ParseState -> Text.Text -> ParseState
step (ParseState path input output state siblings children example) line =
if state == None && fmap fst (Text.uncons line) == Just '#' then
-- validate input, output is []
ParseState
{ path = Text.unpack line
, input = []
, output = []
, state = None
, siblings =
if null children
then siblings
else testGroup path (reverse children) : siblings
, children = []
, example = example
}
else if Text.unpack line == "```````````````````````````````` example" then
-- validate input, output is [], state is None
ParseState
{ path = path
, input = []
, output = []
, state = CollectingInput
, siblings = siblings
, children = children
, example = example
}
else if Text.unpack line == "````````````````````````````````" then
-- validate state == CollectingOutput
ParseState
{ path = path
, input = []
, output = []
, state = None
, siblings = siblings
, children = makeTest example (concat $ reverse input) (unlines $ reverse input) (unlines $ reverse output) : children
, example = example + 1
}
else if state == CollectingInput && Text.unpack line == "." then
-- validate output == []
ParseState
{ path = path
, input = input
, output = []
, state = CollectingOutput
, siblings = siblings
, children = children
, example = example
}
else
case state of
None ->
ParseState path input output state siblings children example
CollectingInput ->
ParseState
{ path = path
, input = Text.unpack line : input
, output = []
, state = state
, siblings = siblings
, children = children
, example = example
}
CollectingOutput ->
ParseState
{ path = path
, input = input
, output = Text.unpack line : output
, state = state
, siblings = siblings
, children = children
, example = example
}
done :: ParseState -> [TestTree]
done (ParseState path input output state siblings children _) =
-- validate parse finished cleanly?
reverse $ testGroup path children : siblings
makeTest :: Int -> String -> String -> String -> TestTree
makeTest i name input output =
let
source = Strict.map (\c -> if c == '→' then '\t' else c) $ Strict.pack $ input
formatted = ElmFormat.Render.Markdown.formatMarkdown (const Nothing) (Parse.Markdown.parse $ Strict.unpack source)
-- specOutput = Strict.map (\c -> if c == '→' then '\t' else c) $ Strict.pack output
description = "formatted markdown should render the same as the original\n\n"
++ Strict.unpack source
++ "\n"
++ formatted
in
testCase ("Example " ++ show i ++ ": " ++ name) $
assertEqual description
(CMark.commonmarkToHtml [] $ source)
(CMark.commonmarkToHtml [] $ Strict.pack formatted)
construct :: IO TestTree
construct =
do
spec <- TextIO.readFile "tests/test-files/CommonMark/spec.txt"
return $
testGroup "CommonMark" $
done $ foldl step init (Text.lines spec)
|
avh4/elm-format
|
tests/CommonMarkTests.hs
|
bsd-3-clause
| 4,615 | 0 | 16 | 1,748 | 1,085 | 622 | 463 | 114 | 8 |
{-# LANGUAGE TypeFamilies,RankNTypes,ImpredicativeTypes,FlexibleContexts,DeriveDataTypeable #-}
{-| Provides a common interface for all backend types.
-}
module Language.GTL.Backend.All where
import Language.GTL.Expression
import Language.GTL.Backend
import Language.GTL.Backend.Scade
import Language.GTL.Backend.None
import Language.GTL.Types
import Data.Map
import Data.Typeable
import Control.Monad.Error (MonadError(..))
import Misc.ProgramOptions as Opts
-- | Essentially a `GTLBackend' with the parameters instantiated, thus eliminating
-- the type variable.
data AllBackend = AllBackend
{ allTypecheck :: MonadError String m => ModelInterface -> m ModelInterface
, allAliases :: Map String GTLType
, allCInterface :: CInterface
, allVerifyLocal :: Integer -> [TypedExpr String] -> Map String GTLType -> Map String (Maybe GTLConstant) -> Map String (GTLType, GTLConstant) -> Opts.Options -> String -> IO (Maybe Bool)
} deriving Typeable
instance Show AllBackend where
show _ = "AllBackend"
instance Eq AllBackend where
(==) _ _ = False
instance Ord AllBackend where
compare _ _ = LT
-- | Try to initialize a given backend with a name and arguments.
-- If it works, it'll return Just with the 'AllBackend' representation.
tryInit :: GTLBackend b => b -> String -> Opts.Options -> [String] -> IO (Maybe AllBackend)
tryInit be name opts args
| backendName be == name = do
dat <- initBackend be opts args
return $ Just $ AllBackend
{ allTypecheck = typeCheckInterface be dat
, allAliases = backendGetAliases be dat
, allCInterface = cInterface be dat
, allVerifyLocal = backendVerify be dat
}
| otherwise = return Nothing
-- | Returns the first result that is not 'Nothing' from a list of functions
-- by applying the arguments to them.
firstM :: Monad m => [x -> y -> z -> m (Maybe a)] -> x -> y -> z -> m (Maybe a)
firstM (x:xs) p q r = do
res <- x p q r
case res of
Nothing -> firstM xs p q r
Just rr -> return (Just rr)
firstM [] _ _ _ = return Nothing
-- | Try to initialize the correct backend for a given backend name and arguments.
initAllBackend :: String -- ^ The name of the backend
-> Opts.Options -- ^ Options for the whole program
-> [String] -- ^ The arguments with which to initialize the backend
-> IO (Maybe AllBackend)
initAllBackend = firstM [tryInit Scade,tryInit None]
|
hguenther/gtl
|
lib/Language/GTL/Backend/All.hs
|
bsd-3-clause
| 2,514 | 0 | 18 | 589 | 625 | 330 | 295 | 45 | 2 |
module BlackjackServer where
import BlackjackData
import Network.NetSpec
import Network.NetSpec.Json
import Control.Monad
-- cabal install random-shuffle
import System.Random.Shuffle (shuffleM)
shuffle :: [a] -> IO [a]
shuffle = shuffleM -- TODO: not depend on random-shuffle
newGame :: BlackjackState
newGame = BJ [[],[]] fullDeck
where fullDeck = [Card s v | s <- [minBound .. maxBound]
, v <- [minBound .. maxBound]]
deal :: Monad m => Int -> StateT BlackjackState m ()
deal 0 = stateT $ \(BJ [h1,h2] (c:cs)) -> ((), BJ [c:h1, h2] cs)
deal 1 = stateT $ \(BJ [h1,h2] (c:cs)) -> ((), BJ [h1, c:h2] cs)
deal _ = stateT $ \s -> ((), s)
bustOrStands :: [BlackjackClientMessage] -> BlackjackState -> Bool
bustOrStands cs s = any isBust (hands s) || all isStand cs
whoWins :: BlackjackState -> Handle -> Handle -> (Handle, Handle)
whoWins (BJ [h0, h1] _) p0 p1
| isBust h1 = (p0, p1)
| isBust h0 = (p1, p0)
| bestVal h1 > bestVal h0 = (p1, p0)
| otherwise = (p0, p1)
whoWins _ _ _ = error "Unexpected BlackjackState format"
bjSpec :: NetSpec [] BlackjackState
bjSpec = ServerSpec {
_ports = map PortNumber [5001, 5002]
, _begin = \[h0, h1] -> flip execStateT newGame $ do
h0 ! YouAre 0
h1 ! YouAre 1
s <- get
deck' <- liftIO $ shuffle $ deck s
put $ s { deck = deck' }
replicateM_ 2 (deal 0 >> deal 1)
, _loop = \hs -> stopIf bustOrStands .: runStateT $
forM (zip hs [0..]) $ \(h, n) -> do
curHand <- ((!! n) . hands) <$> get
opponentHand <- ((!! rem (n+1) 2) . hands) <$> get
h ! YourTurn curHand (last opponentHand) (length opponentHand)
Just choice <- receive h
when (isHit choice) (deal n)
return choice
, _end = \[h0, h1] s -> do
let (winner, loser) = whoWins s h0 h1
winner ! YouWin s
loser ! YouLose s
}
main :: IO ()
main = runSpec bjSpec
|
DanBurton/netspec
|
examples/BlackjackServer.hs
|
bsd-3-clause
| 1,970 | 0 | 19 | 555 | 877 | 459 | 418 | 49 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module Glug.Types (
MovieDetails (..)
, MovieSubtitles (..)
, IMDbId
, ApiKey
, WordCount (..)
, WordRank (..)
, Subtitle (..)
) where
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Data.Time.Clock as C
import Data.Aeson
import Data.Int (Int32)
import GHC.Generics
-- | Information gained after scraping a movie's subtitles.
data MovieSubtitles = MovieSubtitles {
imdbid :: TL.Text -- ^ The IMDb ID of the movie
, subtitles :: [Subtitle] -- ^ The subtitles found
} deriving (Eq, Show)
-- | A single subtitle line.
data Subtitle = Subtitle {
dialogue :: T.Text
, timestamp :: C.DiffTime
} deriving (Eq, Show)
instance ToJSON Subtitle where
toJSON (Subtitle d t) = object ["dialogue" .= d, "timestamp" .= ts]
where
ts :: Integer
ts = round t
-- | Details about a movie from The Movie Database
data MovieDetails = MovieDetails {
runtime :: Integer -- ^ The runtime of the movie, in seconds
, poster :: T.Text -- ^ The path to the movie poster in TMDb's server
, overview :: T.Text -- ^ An english description of the movie
} deriving (Eq, Show, Generic)
instance ToJSON MovieDetails
-- | An ID from IMDb
type IMDbId = String
-- | The Api key for The Movie Database
type ApiKey = String
-- | A record of a given word's appearances in the movie
data WordCount = WordCount {
text :: T.Text -- ^ The word
, freq :: Int32 -- ^ Number of occurrences of this word
, occurrences :: [C.DiffTime] -- ^ A List of times this word occurs
} deriving (Show, Eq)
-- | A combination of a specific word and its heuristic ranking
data WordRank = WordRank {
wordcount :: WordCount -- ^ The word
, heuristic :: Int32 -- ^ The heuristic statistic, bigger is better
} deriving (Show, Eq)
instance Ord WordRank where
compare w1 w2 = compare (heuristic w1) (heuristic w2)
|
robmcl4/Glug
|
src/Glug/Types.hs
|
bsd-3-clause
| 2,013 | 0 | 10 | 492 | 420 | 258 | 162 | 47 | 0 |
module Hint.GHC (
Message, module X
) where
import GHC as X hiding (Phase, GhcT, runGhcT)
import Control.Monad.Ghc as X (GhcT, runGhcT)
import HscTypes as X (SourceError, srcErrorMessages, GhcApiError)
import Outputable as X (PprStyle, SDoc, Outputable(ppr),
showSDoc, showSDocForUser, showSDocUnqual,
withPprStyle, defaultErrStyle)
import ErrUtils as X (mkLocMessage, pprErrMsgBagWithLoc, MsgDoc) -- we alias MsgDoc as Message below
import DriverPhases as X (Phase(Cpp), HscSource(HsSrcFile))
import StringBuffer as X (stringToStringBuffer)
import Lexer as X (P(..), ParseResult(..), mkPState)
import Parser as X (parseStmt, parseType)
import FastString as X (fsLit)
#if __GLASGOW_HASKELL__ >= 710
import DynFlags as X (xFlags, xopt, LogAction, FlagSpec(..))
#else
import DynFlags as X (xFlags, xopt, LogAction)
#endif
#if __GLASGOW_HASKELL__ >= 800
import DynFlags as X (WarnReason(NoReason))
#endif
import PprTyThing as X (pprTypeForUser)
import SrcLoc as X (mkRealSrcLoc)
#if __GLASGOW_HASKELL__ >= 708
import ConLike as X (ConLike(RealDataCon))
#endif
#if __GLASGOW_HASKELL__ >= 708
import DynFlags as X (addWay', Way(..), dynamicGhc)
#endif
type Message = MsgDoc
|
meditans/hint
|
src/Hint/GHC.hs
|
bsd-3-clause
| 1,232 | 0 | 6 | 214 | 300 | 212 | 88 | 18 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
module Stack.FileWatch
( fileWatch
, fileWatchPoll
, printExceptionStderr
) where
import Blaze.ByteString.Builder (toLazyByteString, copyByteString)
import Blaze.ByteString.Builder.Char.Utf8 (fromShow)
import Control.Concurrent.Async (race_)
import Control.Concurrent.STM
import Control.Exception (Exception)
import Control.Exception.Enclosed (tryAny)
import Control.Monad (forever, unless, when)
import qualified Data.ByteString.Lazy as L
import qualified Data.Map.Strict as Map
import Data.Monoid ((<>))
import Data.Set (Set)
import qualified Data.Set as Set
import Data.String (fromString)
import Data.Traversable (forM)
import Ignore
import Path
import System.FSNotify
import System.IO (stderr)
-- | Print an exception to stderr
printExceptionStderr :: Exception e => e -> IO ()
printExceptionStderr e =
L.hPut stderr $ toLazyByteString $ fromShow e <> copyByteString "\n"
fileWatch :: IO (Path Abs Dir)
-> ((Set (Path Abs File) -> IO ()) -> IO ())
-> IO ()
fileWatch = fileWatchConf defaultConfig
fileWatchPoll :: IO (Path Abs Dir)
-> ((Set (Path Abs File) -> IO ()) -> IO ())
-> IO ()
fileWatchPoll = fileWatchConf $ defaultConfig { confUsePolling = True }
-- | Run an action, watching for file changes
--
-- The action provided takes a callback that is used to set the files to be
-- watched. When any of those files are changed, we rerun the action again.
fileWatchConf :: WatchConfig
-> IO (Path Abs Dir)
-> ((Set (Path Abs File) -> IO ()) -> IO ())
-> IO ()
fileWatchConf cfg getProjectRoot inner = withManagerConf cfg $ \manager -> do
allFiles <- newTVarIO Set.empty
dirtyVar <- newTVarIO True
watchVar <- newTVarIO Map.empty
projRoot <- getProjectRoot
mChecker <- findIgnoreFiles [VCSGit, VCSMercurial, VCSDarcs] projRoot >>= buildChecker
(FileIgnoredChecker isFileIgnored) <-
case mChecker of
Left err ->
do putStrLn $ "Failed to parse VCS's ignore file: " ++ err
return $ FileIgnoredChecker (const False)
Right chk -> return chk
let onChange event = atomically $ do
files <- readTVar allFiles
when (eventPath event `Set.member` files) (writeTVar dirtyVar True)
setWatched :: Set (Path Abs File) -> IO ()
setWatched files = do
atomically $ writeTVar allFiles $ Set.map toFilePath files
watch0 <- readTVarIO watchVar
let actions = Map.mergeWithKey
keepListening
stopListening
startListening
watch0
newDirs
watch1 <- forM (Map.toList actions) $ \(k, mmv) -> do
mv <- mmv
return $
case mv of
Nothing -> Map.empty
Just v -> Map.singleton k v
atomically $ writeTVar watchVar $ Map.unions watch1
where
newDirs = Map.fromList $ map (, ())
$ Set.toList
$ Set.map parent files
keepListening _dir listen () = Just $ return $ Just listen
stopListening = Map.map $ \f -> do
() <- f
return Nothing
startListening = Map.mapWithKey $ \dir () -> do
let dir' = fromString $ toFilePath dir
listen <- watchDir manager dir' (not . isFileIgnored . eventPath) onChange
return $ Just listen
let watchInput = do
line <- getLine
unless (line == "quit") $ do
case line of
"help" -> do
putStrLn ""
putStrLn "help: display this help"
putStrLn "quit: exit"
putStrLn "build: force a rebuild"
putStrLn "watched: display watched directories"
"build" -> atomically $ writeTVar dirtyVar True
"watched" -> do
watch <- readTVarIO watchVar
mapM_ (putStrLn . toFilePath) (Map.keys watch)
"" -> atomically $ writeTVar dirtyVar True
_ -> putStrLn $ concat
[ "Unknown command: "
, show line
, ". Try 'help'"
]
watchInput
race_ watchInput $ forever $ do
atomically $ do
dirty <- readTVar dirtyVar
check dirty
eres <- tryAny $ inner setWatched
-- Clear dirtiness flag after the build to avoid an infinite
-- loop caused by the build itself triggering dirtiness. This
-- could be viewed as a bug, since files changed during the
-- build will not trigger an extra rebuild, but overall seems
-- like better behavior. See
-- https://github.com/commercialhaskell/stack/issues/822
atomically $ writeTVar dirtyVar False
case eres of
Left e -> printExceptionStderr e
Right () -> putStrLn "Success! Waiting for next file change."
putStrLn "Type help for available commands. Press enter to force a rebuild."
|
rrnewton/stack
|
src/Stack/FileWatch.hs
|
bsd-3-clause
| 5,390 | 0 | 25 | 1,889 | 1,307 | 649 | 658 | 112 | 8 |
-----------------------------------------------------------------------------
-- |
-- Module : Control.Concurrent.STM.TMVar
-- Copyright : (c) The University of Glasgow 2004
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (requires STM)
--
-- TMVar: Transactional MVars, for use in the STM monad
--
-----------------------------------------------------------------------------
module Control.Concurrent.STM.TMVar (
-- * TVars
TMVar,
newTMVar,
newEmptyTMVar,
newTMVarIO,
newEmptyTMVarIO,
takeTMVar,
putTMVar,
readTMVar,
swapTMVar,
tryTakeTMVar,
tryPutTMVar,
isEmptyTMVar
) where
import GHC.Conc
newtype TMVar a = TMVar (TVar (Maybe a))
newTMVar :: a -> STM (TMVar a)
newTMVar a = do
t <- newTVar (Just a)
return (TMVar t)
newTMVarIO :: a -> IO (TMVar a)
newTMVarIO a = do
t <- newTVarIO (Just a)
return (TMVar t)
newEmptyTMVar :: STM (TMVar a)
newEmptyTMVar = do
t <- newTVar Nothing
return (TMVar t)
newEmptyTMVarIO :: IO (TMVar a)
newEmptyTMVarIO = do
t <- newTVarIO Nothing
return (TMVar t)
takeTMVar :: TMVar a -> STM a
takeTMVar (TMVar t) = do
m <- readTVar t
case m of
Nothing -> retry
Just a -> do writeTVar t Nothing; return a
tryTakeTMVar :: TMVar a -> STM (Maybe a)
tryTakeTMVar (TMVar t) = do
m <- readTVar t
case m of
Nothing -> return Nothing
Just a -> do writeTVar t Nothing; return (Just a)
putTMVar :: TMVar a -> a -> STM ()
putTMVar (TMVar t) a = do
m <- readTVar t
case m of
Nothing -> do writeTVar t (Just a); return ()
Just _ -> retry
tryPutTMVar :: TMVar a -> a -> STM Bool
tryPutTMVar (TMVar t) a = do
m <- readTVar t
case m of
Nothing -> do writeTVar t (Just a); return True
Just _ -> return False
readTMVar :: TMVar a -> STM a
readTMVar (TMVar t) = do
m <- readTVar t
case m of
Nothing -> retry
Just a -> return a
swapTMVar :: TMVar a -> a -> STM a
swapTMVar (TMVar t) new = do
m <- readTVar t
case m of
Nothing -> retry
Just old -> do writeTVar t (Just new); return old
isEmptyTMVar :: TMVar a -> STM Bool
isEmptyTMVar (TMVar t) = do
m <- readTVar t
case m of
Nothing -> return True
Just _ -> return False
|
FranklinChen/hugs98-plus-Sep2006
|
packages/stm/Control/Concurrent/STM/TMVar.hs
|
bsd-3-clause
| 2,299 | 25 | 14 | 533 | 870 | 418 | 452 | 73 | 2 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.GHC
-- Copyright : Isaac Jones 2003-2007
-- License : BSD3
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- This is a fairly large module. It contains most of the GHC-specific code for
-- configuring, building and installing packages. It also exports a function
-- for finding out what packages are already installed. Configuring involves
-- finding the @ghc@ and @ghc-pkg@ programs, finding what language extensions
-- this version of ghc supports and returning a 'Compiler' value.
--
-- 'getInstalledPackages' involves calling the @ghc-pkg@ program to find out
-- what packages are installed.
--
-- Building is somewhat complex as there is quite a bit of information to take
-- into account. We have to build libs and programs, possibly for profiling and
-- shared libs. We have to support building libraries that will be usable by
-- GHCi and also ghc's @-split-objs@ feature. We have to compile any C files
-- using ghc. Linking, especially for @split-objs@ is remarkably complex,
-- partly because there tend to be 1,000's of @.o@ files and this can often be
-- more than we can pass to the @ld@ or @ar@ programs in one go.
--
-- Installing for libs and exes involves finding the right files and copying
-- them to the right places. One of the more tricky things about this module is
-- remembering the layout of files in the build directory (which is not
-- explicitly documented) and thus what search dirs are used for various kinds
-- of files.
module Distribution.Simple.GHC (
getGhcInfo,
configure, getInstalledPackages, getPackageDBContents,
buildLib, buildExe,
replLib, replExe,
startInterpreter,
installLib, installExe,
libAbiHash,
hcPkgInfo,
registerPackage,
componentGhcOptions,
componentCcGhcOptions,
getLibDir,
isDynamic,
getGlobalPackageDB,
pkgRoot
) where
import qualified Distribution.Simple.GHC.IPI641 as IPI641
import qualified Distribution.Simple.GHC.IPI642 as IPI642
import qualified Distribution.Simple.GHC.Internal as Internal
import Distribution.Simple.GHC.ImplInfo
import Distribution.PackageDescription as PD
( PackageDescription(..), BuildInfo(..), Executable(..), Library(..)
, allExtensions, libModules, exeModules
, hcOptions, hcSharedOptions, hcProfOptions )
import Distribution.InstalledPackageInfo
( InstalledPackageInfo )
import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo
( InstalledPackageInfo(..) )
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.LocalBuildInfo
( LocalBuildInfo(..), ComponentLocalBuildInfo(..)
, absoluteInstallDirs, depLibraryPaths )
import qualified Distribution.Simple.Hpc as Hpc
import Distribution.Simple.InstallDirs hiding ( absoluteInstallDirs )
import Distribution.Simple.BuildPaths
import Distribution.Simple.Utils
import Distribution.Package
( PackageName(..) )
import qualified Distribution.ModuleName as ModuleName
import Distribution.Simple.Program
( Program(..), ConfiguredProgram(..), ProgramConfiguration
, ProgramSearchPath
, rawSystemProgramStdout, rawSystemProgramStdoutConf
, getProgramInvocationOutput, requireProgramVersion, requireProgram
, userMaybeSpecifyPath, programPath, lookupProgram, addKnownProgram
, ghcProgram, ghcPkgProgram, haddockProgram, hsc2hsProgram, ldProgram )
import qualified Distribution.Simple.Program.HcPkg as HcPkg
import qualified Distribution.Simple.Program.Ar as Ar
import qualified Distribution.Simple.Program.Ld as Ld
import qualified Distribution.Simple.Program.Strip as Strip
import Distribution.Simple.Program.GHC
import Distribution.Simple.Setup
( toFlag, fromFlag, fromFlagOrDefault, configCoverage, configDistPref )
import qualified Distribution.Simple.Setup as Cabal
( Flag(..) )
import Distribution.Simple.Compiler
( CompilerFlavor(..), CompilerId(..), Compiler(..), compilerVersion
, PackageDB(..), PackageDBStack, AbiTag(..) )
import Distribution.Version
( Version(..), anyVersion, orLaterVersion )
import Distribution.System
( Platform(..), OS(..) )
import Distribution.Verbosity
import Distribution.Text
( display )
import Distribution.Utils.NubList
( NubListR, overNubListR, toNubListR )
import Language.Haskell.Extension (Extension(..), KnownExtension(..))
import Control.Monad ( unless, when )
import Data.Char ( isDigit, isSpace )
import Data.List
import qualified Data.Map as M ( fromList )
import Data.Maybe ( catMaybes )
#if __GLASGOW_HASKELL__ < 710
import Data.Monoid ( Monoid(..) )
#endif
import Data.Version ( showVersion )
import System.Directory
( doesFileExist, getAppUserDataDirectory, createDirectoryIfMissing )
import System.FilePath ( (</>), (<.>), takeExtension,
takeDirectory, replaceExtension,
splitExtension, isRelative )
import qualified System.Info
-- -----------------------------------------------------------------------------
-- Configuring
configure :: Verbosity -> Maybe FilePath -> Maybe FilePath
-> ProgramConfiguration
-> IO (Compiler, Maybe Platform, ProgramConfiguration)
configure verbosity hcPath hcPkgPath conf0 = do
(ghcProg, ghcVersion, conf1) <-
requireProgramVersion verbosity ghcProgram
(orLaterVersion (Version [6,4] []))
(userMaybeSpecifyPath "ghc" hcPath conf0)
let implInfo = ghcVersionImplInfo ghcVersion
-- This is slightly tricky, we have to configure ghc first, then we use the
-- location of ghc to help find ghc-pkg in the case that the user did not
-- specify the location of ghc-pkg directly:
(ghcPkgProg, ghcPkgVersion, conf2) <-
requireProgramVersion verbosity ghcPkgProgram {
programFindLocation = guessGhcPkgFromGhcPath ghcProg
}
anyVersion (userMaybeSpecifyPath "ghc-pkg" hcPkgPath conf1)
when (ghcVersion /= ghcPkgVersion) $ die $
"Version mismatch between ghc and ghc-pkg: "
++ programPath ghcProg ++ " is version " ++ display ghcVersion ++ " "
++ programPath ghcPkgProg ++ " is version " ++ display ghcPkgVersion
-- Likewise we try to find the matching hsc2hs and haddock programs.
let hsc2hsProgram' = hsc2hsProgram {
programFindLocation = guessHsc2hsFromGhcPath ghcProg
}
haddockProgram' = haddockProgram {
programFindLocation = guessHaddockFromGhcPath ghcProg
}
conf3 = addKnownProgram haddockProgram' $
addKnownProgram hsc2hsProgram' conf2
languages <- Internal.getLanguages verbosity implInfo ghcProg
extensions <- Internal.getExtensions verbosity implInfo ghcProg
ghcInfo <- Internal.getGhcInfo verbosity implInfo ghcProg
let ghcInfoMap = M.fromList ghcInfo
let comp = Compiler {
compilerId = CompilerId GHC ghcVersion,
compilerAbiTag = NoAbiTag,
compilerCompat = [],
compilerLanguages = languages,
compilerExtensions = extensions,
compilerProperties = ghcInfoMap
}
compPlatform = Internal.targetPlatform ghcInfo
-- configure gcc and ld
conf4 = Internal.configureToolchain implInfo ghcProg ghcInfoMap conf3
return (comp, compPlatform, conf4)
-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find
-- the corresponding tool; e.g. if the tool is ghc-pkg, we try looking
-- for a versioned or unversioned ghc-pkg in the same dir, that is:
--
-- > /usr/local/bin/ghc-pkg-ghc-6.6.1(.exe)
-- > /usr/local/bin/ghc-pkg-6.6.1(.exe)
-- > /usr/local/bin/ghc-pkg(.exe)
--
guessToolFromGhcPath :: Program -> ConfiguredProgram
-> Verbosity -> ProgramSearchPath
-> IO (Maybe FilePath)
guessToolFromGhcPath tool ghcProg verbosity searchpath
= do let toolname = programName tool
path = programPath ghcProg
dir = takeDirectory path
versionSuffix = takeVersionSuffix (dropExeExtension path)
guessNormal = dir </> toolname <.> exeExtension
guessGhcVersioned = dir </> (toolname ++ "-ghc" ++ versionSuffix)
<.> exeExtension
guessVersioned = dir </> (toolname ++ versionSuffix)
<.> exeExtension
guesses | null versionSuffix = [guessNormal]
| otherwise = [guessGhcVersioned,
guessVersioned,
guessNormal]
info verbosity $ "looking for tool " ++ toolname
++ " near compiler in " ++ dir
exists <- mapM doesFileExist guesses
case [ file | (file, True) <- zip guesses exists ] of
-- If we can't find it near ghc, fall back to the usual
-- method.
[] -> programFindLocation tool verbosity searchpath
(fp:_) -> do info verbosity $ "found " ++ toolname ++ " in " ++ fp
return (Just fp)
where takeVersionSuffix :: FilePath -> String
takeVersionSuffix = takeWhileEndLE isSuffixChar
isSuffixChar :: Char -> Bool
isSuffixChar c = isDigit c || c == '.' || c == '-'
dropExeExtension :: FilePath -> FilePath
dropExeExtension filepath =
case splitExtension filepath of
(filepath', extension) | extension == exeExtension -> filepath'
| otherwise -> filepath
-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a
-- corresponding ghc-pkg, we try looking for both a versioned and unversioned
-- ghc-pkg in the same dir, that is:
--
-- > /usr/local/bin/ghc-pkg-ghc-6.6.1(.exe)
-- > /usr/local/bin/ghc-pkg-6.6.1(.exe)
-- > /usr/local/bin/ghc-pkg(.exe)
--
guessGhcPkgFromGhcPath :: ConfiguredProgram
-> Verbosity -> ProgramSearchPath -> IO (Maybe FilePath)
guessGhcPkgFromGhcPath = guessToolFromGhcPath ghcPkgProgram
-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a
-- corresponding hsc2hs, we try looking for both a versioned and unversioned
-- hsc2hs in the same dir, that is:
--
-- > /usr/local/bin/hsc2hs-ghc-6.6.1(.exe)
-- > /usr/local/bin/hsc2hs-6.6.1(.exe)
-- > /usr/local/bin/hsc2hs(.exe)
--
guessHsc2hsFromGhcPath :: ConfiguredProgram
-> Verbosity -> ProgramSearchPath -> IO (Maybe FilePath)
guessHsc2hsFromGhcPath = guessToolFromGhcPath hsc2hsProgram
-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a
-- corresponding haddock, we try looking for both a versioned and unversioned
-- haddock in the same dir, that is:
--
-- > /usr/local/bin/haddock-ghc-6.6.1(.exe)
-- > /usr/local/bin/haddock-6.6.1(.exe)
-- > /usr/local/bin/haddock(.exe)
--
guessHaddockFromGhcPath :: ConfiguredProgram
-> Verbosity -> ProgramSearchPath -> IO (Maybe FilePath)
guessHaddockFromGhcPath = guessToolFromGhcPath haddockProgram
getGhcInfo :: Verbosity -> ConfiguredProgram -> IO [(String, String)]
getGhcInfo verbosity ghcProg = Internal.getGhcInfo verbosity implInfo ghcProg
where
Just version = programVersion ghcProg
implInfo = ghcVersionImplInfo version
-- | Given a single package DB, return all installed packages.
getPackageDBContents :: Verbosity -> PackageDB -> ProgramConfiguration
-> IO InstalledPackageIndex
getPackageDBContents verbosity packagedb conf = do
pkgss <- getInstalledPackages' verbosity [packagedb] conf
toPackageIndex verbosity pkgss conf
-- | Given a package DB stack, return all installed packages.
getInstalledPackages :: Verbosity -> Compiler -> PackageDBStack -> ProgramConfiguration
-> IO InstalledPackageIndex
getInstalledPackages verbosity comp packagedbs conf = do
checkPackageDbEnvVar
checkPackageDbStack comp packagedbs
pkgss <- getInstalledPackages' verbosity packagedbs conf
index <- toPackageIndex verbosity pkgss conf
return $! hackRtsPackage index
where
hackRtsPackage index =
case PackageIndex.lookupPackageName index (PackageName "rts") of
[(_,[rts])]
-> PackageIndex.insert (removeMingwIncludeDir rts) index
_ -> index -- No (or multiple) ghc rts package is registered!!
-- Feh, whatever, the ghc test suite does some crazy stuff.
-- | Given a list of @(PackageDB, InstalledPackageInfo)@ pairs, produce a
-- @PackageIndex@. Helper function used by 'getPackageDBContents' and
-- 'getInstalledPackages'.
toPackageIndex :: Verbosity
-> [(PackageDB, [InstalledPackageInfo])]
-> ProgramConfiguration
-> IO InstalledPackageIndex
toPackageIndex verbosity pkgss conf = do
-- On Windows, various fields have $topdir/foo rather than full
-- paths. We need to substitute the right value in so that when
-- we, for example, call gcc, we have proper paths to give it.
topDir <- getLibDir' verbosity ghcProg
let indices = [ PackageIndex.fromList (map (Internal.substTopDir topDir) pkgs)
| (_, pkgs) <- pkgss ]
return $! (mconcat indices)
where
Just ghcProg = lookupProgram ghcProgram conf
getLibDir :: Verbosity -> LocalBuildInfo -> IO FilePath
getLibDir verbosity lbi =
dropWhileEndLE isSpace `fmap`
rawSystemProgramStdoutConf verbosity ghcProgram
(withPrograms lbi) ["--print-libdir"]
getLibDir' :: Verbosity -> ConfiguredProgram -> IO FilePath
getLibDir' verbosity ghcProg =
dropWhileEndLE isSpace `fmap`
rawSystemProgramStdout verbosity ghcProg ["--print-libdir"]
-- | Return the 'FilePath' to the global GHC package database.
getGlobalPackageDB :: Verbosity -> ConfiguredProgram -> IO FilePath
getGlobalPackageDB verbosity ghcProg =
dropWhileEndLE isSpace `fmap`
rawSystemProgramStdout verbosity ghcProg ["--print-global-package-db"]
checkPackageDbEnvVar :: IO ()
checkPackageDbEnvVar =
Internal.checkPackageDbEnvVar "GHC" "GHC_PACKAGE_PATH"
checkPackageDbStack :: Compiler -> PackageDBStack -> IO ()
checkPackageDbStack comp =
let ghcVersion = compilerVersion comp
in if ghcVersion < Version [7,6] []
then checkPackageDbStackPre76
else checkPackageDbStackPost76
checkPackageDbStackPost76 :: PackageDBStack -> IO ()
checkPackageDbStackPost76 (GlobalPackageDB:rest)
| GlobalPackageDB `notElem` rest = return ()
checkPackageDbStackPost76 rest
| GlobalPackageDB `elem` rest =
die $ "If the global package db is specified, it must be "
++ "specified first and cannot be specified multiple times"
checkPackageDbStackPost76 _ = return ()
checkPackageDbStackPre76 :: PackageDBStack -> IO ()
checkPackageDbStackPre76 (GlobalPackageDB:rest)
| GlobalPackageDB `notElem` rest = return ()
checkPackageDbStackPre76 rest
| GlobalPackageDB `notElem` rest =
die $ "With current ghc versions the global package db is always used "
++ "and must be listed first. This ghc limitation is lifted in GHC 7.6,"
++ "see http://hackage.haskell.org/trac/ghc/ticket/5977"
checkPackageDbStackPre76 _ =
die $ "If the global package db is specified, it must be "
++ "specified first and cannot be specified multiple times"
-- GHC < 6.10 put "$topdir/include/mingw" in rts's installDirs. This
-- breaks when you want to use a different gcc, so we need to filter
-- it out.
removeMingwIncludeDir :: InstalledPackageInfo -> InstalledPackageInfo
removeMingwIncludeDir pkg =
let ids = InstalledPackageInfo.includeDirs pkg
ids' = filter (not . ("mingw" `isSuffixOf`)) ids
in pkg { InstalledPackageInfo.includeDirs = ids' }
-- | Get the packages from specific PackageDBs, not cumulative.
--
getInstalledPackages' :: Verbosity -> [PackageDB] -> ProgramConfiguration
-> IO [(PackageDB, [InstalledPackageInfo])]
getInstalledPackages' verbosity packagedbs conf
| ghcVersion >= Version [6,9] [] =
sequence
[ do pkgs <- HcPkg.dump (hcPkgInfo conf) verbosity packagedb
return (packagedb, pkgs)
| packagedb <- packagedbs ]
where
Just ghcProg = lookupProgram ghcProgram conf
Just ghcVersion = programVersion ghcProg
getInstalledPackages' verbosity packagedbs conf = do
str <- rawSystemProgramStdoutConf verbosity ghcPkgProgram conf ["list"]
let pkgFiles = [ init line | line <- lines str, last line == ':' ]
dbFile packagedb = case (packagedb, pkgFiles) of
(GlobalPackageDB, global:_) -> return $ Just global
(UserPackageDB, _global:user:_) -> return $ Just user
(UserPackageDB, _global:_) -> return $ Nothing
(SpecificPackageDB specific, _) -> return $ Just specific
_ -> die "cannot read ghc-pkg package listing"
pkgFiles' <- mapM dbFile packagedbs
sequence [ withFileContents file $ \content -> do
pkgs <- readPackages file content
return (db, pkgs)
| (db , Just file) <- zip packagedbs pkgFiles' ]
where
-- Depending on the version of ghc we use a different type's Read
-- instance to parse the package file and then convert.
-- It's a bit yuck. But that's what we get for using Read/Show.
readPackages
| ghcVersion >= Version [6,4,2] []
= \file content -> case reads content of
[(pkgs, _)] -> return (map IPI642.toCurrent pkgs)
_ -> failToRead file
| otherwise
= \file content -> case reads content of
[(pkgs, _)] -> return (map IPI641.toCurrent pkgs)
_ -> failToRead file
Just ghcProg = lookupProgram ghcProgram conf
Just ghcVersion = programVersion ghcProg
failToRead file = die $ "cannot read ghc package database " ++ file
-- -----------------------------------------------------------------------------
-- Building
-- | Build a library with GHC.
--
buildLib, replLib :: Verbosity -> Cabal.Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO ()
buildLib = buildOrReplLib False
replLib = buildOrReplLib True
buildOrReplLib :: Bool -> Verbosity -> Cabal.Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO ()
buildOrReplLib forRepl verbosity numJobs pkg_descr lbi lib clbi = do
let libName = componentLibraryName clbi
libTargetDir = buildDir lbi
whenVanillaLib forceVanilla =
when (forceVanilla || withVanillaLib lbi)
whenProfLib = when (withProfLib lbi)
whenSharedLib forceShared =
when (forceShared || withSharedLib lbi)
whenGHCiLib = when (withGHCiLib lbi && withVanillaLib lbi)
ifReplLib = when forRepl
comp = compiler lbi
ghcVersion = compilerVersion comp
implInfo = getImplInfo comp
(Platform _hostArch hostOS) = hostPlatform lbi
(ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi)
let runGhcProg = runGHC verbosity ghcProg comp
libBi <- hackThreadedFlag verbosity
comp (withProfLib lbi) (libBuildInfo lib)
let isGhcDynamic = isDynamic comp
dynamicTooSupported = supportsDynamicToo comp
doingTH = EnableExtension TemplateHaskell `elem` allExtensions libBi
forceVanillaLib = doingTH && not isGhcDynamic
forceSharedLib = doingTH && isGhcDynamic
-- TH always needs default libs, even when building for profiling
-- Determine if program coverage should be enabled and if so, what
-- '-hpcdir' should be.
let isCoverageEnabled = fromFlag $ configCoverage $ configFlags lbi
-- Component name. Not 'libName' because that has the "HS" prefix
-- that GHC gives Haskell libraries.
cname = display $ PD.package $ localPkgDescr lbi
distPref = fromFlag $ configDistPref $ configFlags lbi
hpcdir way
| forRepl = mempty -- HPC is not supported in ghci
| isCoverageEnabled = toFlag $ Hpc.mixDir distPref way cname
| otherwise = mempty
createDirectoryIfMissingVerbose verbosity True libTargetDir
-- TODO: do we need to put hs-boot files into place for mutually recursive
-- modules?
let cObjs = map (`replaceExtension` objExtension) (cSources libBi)
baseOpts = componentGhcOptions verbosity lbi libBi clbi libTargetDir
vanillaOpts = baseOpts `mappend` mempty {
ghcOptMode = toFlag GhcModeMake,
ghcOptNumJobs = numJobs,
ghcOptInputModules = toNubListR $ libModules lib,
ghcOptHPCDir = hpcdir Hpc.Vanilla
}
profOpts = vanillaOpts `mappend` mempty {
ghcOptProfilingMode = toFlag True,
ghcOptProfilingAuto = Internal.profDetailLevelFlag True
(withProfLibDetail lbi),
ghcOptHiSuffix = toFlag "p_hi",
ghcOptObjSuffix = toFlag "p_o",
ghcOptExtra = toNubListR $ hcProfOptions GHC libBi,
ghcOptHPCDir = hpcdir Hpc.Prof
}
sharedOpts = vanillaOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptFPic = toFlag True,
ghcOptHiSuffix = toFlag "dyn_hi",
ghcOptObjSuffix = toFlag "dyn_o",
ghcOptExtra = toNubListR $ hcSharedOptions GHC libBi,
ghcOptHPCDir = hpcdir Hpc.Dyn
}
linkerOpts = mempty {
ghcOptLinkOptions = toNubListR $ PD.ldOptions libBi,
ghcOptLinkLibs = toNubListR $ extraLibs libBi,
ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi,
ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi,
ghcOptInputFiles = toNubListR
[libTargetDir </> x | x <- cObjs]
}
replOpts = vanillaOpts {
ghcOptExtra = overNubListR
Internal.filterGhciFlags $
(ghcOptExtra vanillaOpts),
ghcOptNumJobs = mempty
}
`mappend` linkerOpts
`mappend` mempty {
ghcOptMode = toFlag GhcModeInteractive,
ghcOptOptimisation = toFlag GhcNoOptimisation
}
vanillaSharedOpts = vanillaOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcStaticAndDynamic,
ghcOptDynHiSuffix = toFlag "dyn_hi",
ghcOptDynObjSuffix = toFlag "dyn_o",
ghcOptHPCDir = hpcdir Hpc.Dyn
}
unless (forRepl || null (libModules lib)) $
do let vanilla = whenVanillaLib forceVanillaLib (runGhcProg vanillaOpts)
shared = whenSharedLib forceSharedLib (runGhcProg sharedOpts)
useDynToo = dynamicTooSupported &&
(forceVanillaLib || withVanillaLib lbi) &&
(forceSharedLib || withSharedLib lbi) &&
null (hcSharedOptions GHC libBi)
if useDynToo
then do
runGhcProg vanillaSharedOpts
case (hpcdir Hpc.Dyn, hpcdir Hpc.Vanilla) of
(Cabal.Flag dynDir, Cabal.Flag vanillaDir) -> do
-- When the vanilla and shared library builds are done
-- in one pass, only one set of HPC module interfaces
-- are generated. This set should suffice for both
-- static and dynamically linked executables. We copy
-- the modules interfaces so they are available under
-- both ways.
copyDirectoryRecursive verbosity dynDir vanillaDir
_ -> return ()
else if isGhcDynamic
then do shared; vanilla
else do vanilla; shared
whenProfLib (runGhcProg profOpts)
-- build any C sources
unless (null (cSources libBi)) $ do
info verbosity "Building C Sources..."
sequence_
[ do let baseCcOpts = Internal.componentCcGhcOptions verbosity implInfo
lbi libBi clbi libTargetDir filename
vanillaCcOpts = if isGhcDynamic
-- Dynamic GHC requires C sources to be built
-- with -fPIC for REPL to work. See #2207.
then baseCcOpts { ghcOptFPic = toFlag True }
else baseCcOpts
profCcOpts = vanillaCcOpts `mappend` mempty {
ghcOptProfilingMode = toFlag True,
ghcOptObjSuffix = toFlag "p_o"
}
sharedCcOpts = vanillaCcOpts `mappend` mempty {
ghcOptFPic = toFlag True,
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptObjSuffix = toFlag "dyn_o"
}
odir = fromFlag (ghcOptObjDir vanillaCcOpts)
createDirectoryIfMissingVerbose verbosity True odir
needsRecomp <- checkNeedsRecompilation filename vanillaCcOpts
when needsRecomp $ do
runGhcProg vanillaCcOpts
unless forRepl $
whenSharedLib forceSharedLib (runGhcProg sharedCcOpts)
unless forRepl $ whenProfLib (runGhcProg profCcOpts)
| filename <- cSources libBi]
-- TODO: problem here is we need the .c files built first, so we can load them
-- with ghci, but .c files can depend on .h files generated by ghc by ffi
-- exports.
ifReplLib $ do
when (null (libModules lib)) $ warn verbosity "No exposed modules"
ifReplLib (runGhcProg replOpts)
-- link:
unless forRepl $ do
info verbosity "Linking..."
let cProfObjs = map (`replaceExtension` ("p_" ++ objExtension))
(cSources libBi)
cSharedObjs = map (`replaceExtension` ("dyn_" ++ objExtension))
(cSources libBi)
cid = compilerId (compiler lbi)
vanillaLibFilePath = libTargetDir </> mkLibName libName
profileLibFilePath = libTargetDir </> mkProfLibName libName
sharedLibFilePath = libTargetDir </> mkSharedLibName cid libName
ghciLibFilePath = libTargetDir </> Internal.mkGHCiLibName libName
libInstallPath = libdir $ absoluteInstallDirs pkg_descr lbi NoCopyDest
sharedLibInstallPath = libInstallPath </> mkSharedLibName cid libName
stubObjs <- fmap catMaybes $ sequence
[ findFileWithExtension [objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| ghcVersion < Version [7,2] [] -- ghc-7.2+ does not make _stub.o files
, x <- libModules lib ]
stubProfObjs <- fmap catMaybes $ sequence
[ findFileWithExtension ["p_" ++ objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| ghcVersion < Version [7,2] [] -- ghc-7.2+ does not make _stub.o files
, x <- libModules lib ]
stubSharedObjs <- fmap catMaybes $ sequence
[ findFileWithExtension ["dyn_" ++ objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| ghcVersion < Version [7,2] [] -- ghc-7.2+ does not make _stub.o files
, x <- libModules lib ]
hObjs <- Internal.getHaskellObjects implInfo lib lbi
libTargetDir objExtension True
hProfObjs <-
if (withProfLib lbi)
then Internal.getHaskellObjects implInfo lib lbi
libTargetDir ("p_" ++ objExtension) True
else return []
hSharedObjs <-
if (withSharedLib lbi)
then Internal.getHaskellObjects implInfo lib lbi
libTargetDir ("dyn_" ++ objExtension) False
else return []
unless (null hObjs && null cObjs && null stubObjs) $ do
rpaths <- getRPaths lbi clbi
let staticObjectFiles =
hObjs
++ map (libTargetDir </>) cObjs
++ stubObjs
profObjectFiles =
hProfObjs
++ map (libTargetDir </>) cProfObjs
++ stubProfObjs
ghciObjFiles =
hObjs
++ map (libTargetDir </>) cObjs
++ stubObjs
dynamicObjectFiles =
hSharedObjs
++ map (libTargetDir </>) cSharedObjs
++ stubSharedObjs
-- After the relocation lib is created we invoke ghc -shared
-- with the dependencies spelled out as -package arguments
-- and ghc invokes the linker with the proper library paths
ghcSharedLinkArgs =
mempty {
ghcOptShared = toFlag True,
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptInputFiles = toNubListR dynamicObjectFiles,
ghcOptOutputFile = toFlag sharedLibFilePath,
-- For dynamic libs, Mac OS/X needs to know the install location
-- at build time. This only applies to GHC < 7.8 - see the
-- discussion in #1660.
ghcOptDylibName = if (hostOS == OSX
&& ghcVersion < Version [7,8] [])
then toFlag sharedLibInstallPath
else mempty,
ghcOptNoAutoLinkPackages = toFlag True,
ghcOptPackageDBs = withPackageDB lbi,
ghcOptPackages = toNubListR $
Internal.mkGhcOptPackages clbi ,
ghcOptLinkLibs = toNubListR $ extraLibs libBi,
ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi,
ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi,
ghcOptRPaths = rpaths
}
info verbosity (show (ghcOptPackages ghcSharedLinkArgs))
whenVanillaLib False $ do
Ar.createArLibArchive verbosity lbi vanillaLibFilePath staticObjectFiles
whenProfLib $ do
Ar.createArLibArchive verbosity lbi profileLibFilePath profObjectFiles
whenGHCiLib $ do
(ldProg, _) <- requireProgram verbosity ldProgram (withPrograms lbi)
Ld.combineObjectFiles verbosity ldProg
ghciLibFilePath ghciObjFiles
whenSharedLib False $
runGhcProg ghcSharedLinkArgs
-- | Start a REPL without loading any source files.
startInterpreter :: Verbosity -> ProgramConfiguration -> Compiler
-> PackageDBStack -> IO ()
startInterpreter verbosity conf comp packageDBs = do
let replOpts = mempty {
ghcOptMode = toFlag GhcModeInteractive,
ghcOptPackageDBs = packageDBs
}
checkPackageDbStack comp packageDBs
(ghcProg, _) <- requireProgram verbosity ghcProgram conf
runGHC verbosity ghcProg comp replOpts
-- | Build an executable with GHC.
--
buildExe, replExe :: Verbosity -> Cabal.Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> Executable -> ComponentLocalBuildInfo -> IO ()
buildExe = buildOrReplExe False
replExe = buildOrReplExe True
buildOrReplExe :: Bool -> Verbosity -> Cabal.Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> Executable -> ComponentLocalBuildInfo -> IO ()
buildOrReplExe forRepl verbosity numJobs _pkg_descr lbi
exe@Executable { exeName = exeName', modulePath = modPath } clbi = do
(ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi)
let comp = compiler lbi
implInfo = getImplInfo comp
runGhcProg = runGHC verbosity ghcProg comp
exeBi <- hackThreadedFlag verbosity
comp (withProfExe lbi) (buildInfo exe)
-- exeNameReal, the name that GHC really uses (with .exe on Windows)
let exeNameReal = exeName' <.>
(if takeExtension exeName' /= ('.':exeExtension)
then exeExtension
else "")
let targetDir = (buildDir lbi) </> exeName'
let exeDir = targetDir </> (exeName' ++ "-tmp")
createDirectoryIfMissingVerbose verbosity True targetDir
createDirectoryIfMissingVerbose verbosity True exeDir
-- TODO: do we need to put hs-boot files into place for mutually recursive
-- modules? FIX: what about exeName.hi-boot?
-- Determine if program coverage should be enabled and if so, what
-- '-hpcdir' should be.
let isCoverageEnabled = fromFlag $ configCoverage $ configFlags lbi
distPref = fromFlag $ configDistPref $ configFlags lbi
hpcdir way
| forRepl = mempty -- HPC is not supported in ghci
| isCoverageEnabled = toFlag $ Hpc.mixDir distPref way exeName'
| otherwise = mempty
-- build executables
srcMainFile <- findFile (exeDir : hsSourceDirs exeBi) modPath
rpaths <- getRPaths lbi clbi
let isGhcDynamic = isDynamic comp
dynamicTooSupported = supportsDynamicToo comp
isHaskellMain = elem (takeExtension srcMainFile) [".hs", ".lhs"]
cSrcs = cSources exeBi ++ [srcMainFile | not isHaskellMain]
cObjs = map (`replaceExtension` objExtension) cSrcs
baseOpts = (componentGhcOptions verbosity lbi exeBi clbi exeDir)
`mappend` mempty {
ghcOptMode = toFlag GhcModeMake,
ghcOptInputFiles = toNubListR
[ srcMainFile | isHaskellMain],
ghcOptInputModules = toNubListR
[ m | not isHaskellMain, m <- exeModules exe]
}
staticOpts = baseOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcStaticOnly,
ghcOptHPCDir = hpcdir Hpc.Vanilla
}
profOpts = baseOpts `mappend` mempty {
ghcOptProfilingMode = toFlag True,
ghcOptProfilingAuto = Internal.profDetailLevelFlag False
(withProfExeDetail lbi),
ghcOptHiSuffix = toFlag "p_hi",
ghcOptObjSuffix = toFlag "p_o",
ghcOptExtra = toNubListR (hcProfOptions GHC exeBi),
ghcOptHPCDir = hpcdir Hpc.Prof
}
dynOpts = baseOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptHiSuffix = toFlag "dyn_hi",
ghcOptObjSuffix = toFlag "dyn_o",
ghcOptExtra = toNubListR $
hcSharedOptions GHC exeBi,
ghcOptHPCDir = hpcdir Hpc.Dyn
}
dynTooOpts = staticOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcStaticAndDynamic,
ghcOptDynHiSuffix = toFlag "dyn_hi",
ghcOptDynObjSuffix = toFlag "dyn_o",
ghcOptHPCDir = hpcdir Hpc.Dyn
}
linkerOpts = mempty {
ghcOptLinkOptions = toNubListR $ PD.ldOptions exeBi,
ghcOptLinkLibs = toNubListR $ extraLibs exeBi,
ghcOptLinkLibPath = toNubListR $ extraLibDirs exeBi,
ghcOptLinkFrameworks = toNubListR $ PD.frameworks exeBi,
ghcOptInputFiles = toNubListR
[exeDir </> x | x <- cObjs]
}
dynLinkerOpts = mempty {
ghcOptRPaths = rpaths
}
replOpts = baseOpts {
ghcOptExtra = overNubListR
Internal.filterGhciFlags
(ghcOptExtra baseOpts)
}
-- For a normal compile we do separate invocations of ghc for
-- compiling as for linking. But for repl we have to do just
-- the one invocation, so that one has to include all the
-- linker stuff too, like -l flags and any .o files from C
-- files etc.
`mappend` linkerOpts
`mappend` mempty {
ghcOptMode = toFlag GhcModeInteractive,
ghcOptOptimisation = toFlag GhcNoOptimisation
}
commonOpts | withProfExe lbi = profOpts
| withDynExe lbi = dynOpts
| otherwise = staticOpts
compileOpts | useDynToo = dynTooOpts
| otherwise = commonOpts
withStaticExe = (not $ withProfExe lbi) && (not $ withDynExe lbi)
-- For building exe's that use TH with -prof or -dynamic we actually have
-- to build twice, once without -prof/-dynamic and then again with
-- -prof/-dynamic. This is because the code that TH needs to run at
-- compile time needs to be the vanilla ABI so it can be loaded up and run
-- by the compiler.
-- With dynamic-by-default GHC the TH object files loaded at compile-time
-- need to be .dyn_o instead of .o.
doingTH = EnableExtension TemplateHaskell `elem` allExtensions exeBi
-- Should we use -dynamic-too instead of compiling twice?
useDynToo = dynamicTooSupported && isGhcDynamic
&& doingTH && withStaticExe
&& null (hcSharedOptions GHC exeBi)
compileTHOpts | isGhcDynamic = dynOpts
| otherwise = staticOpts
compileForTH
| forRepl = False
| useDynToo = False
| isGhcDynamic = doingTH && (withProfExe lbi || withStaticExe)
| otherwise = doingTH && (withProfExe lbi || withDynExe lbi)
linkOpts = commonOpts `mappend`
linkerOpts `mappend`
mempty { ghcOptLinkNoHsMain = toFlag (not isHaskellMain) } `mappend`
(if withDynExe lbi then dynLinkerOpts else mempty)
-- Build static/dynamic object files for TH, if needed.
when compileForTH $
runGhcProg compileTHOpts { ghcOptNoLink = toFlag True
, ghcOptNumJobs = numJobs }
unless forRepl $
runGhcProg compileOpts { ghcOptNoLink = toFlag True
, ghcOptNumJobs = numJobs }
-- build any C sources
unless (null cSrcs) $ do
info verbosity "Building C Sources..."
sequence_
[ do let opts = (Internal.componentCcGhcOptions verbosity implInfo lbi exeBi
clbi exeDir filename) `mappend` mempty {
ghcOptDynLinkMode = toFlag (if withDynExe lbi
then GhcDynamicOnly
else GhcStaticOnly),
ghcOptProfilingMode = toFlag (withProfExe lbi)
}
odir = fromFlag (ghcOptObjDir opts)
createDirectoryIfMissingVerbose verbosity True odir
needsRecomp <- checkNeedsRecompilation filename opts
when needsRecomp $
runGhcProg opts
| filename <- cSrcs ]
-- TODO: problem here is we need the .c files built first, so we can load them
-- with ghci, but .c files can depend on .h files generated by ghc by ffi
-- exports.
when forRepl $ runGhcProg replOpts
-- link:
unless forRepl $ do
info verbosity "Linking..."
runGhcProg linkOpts { ghcOptOutputFile = toFlag (targetDir </> exeNameReal) }
-- | Returns True if the modification date of the given source file is newer than
-- the object file we last compiled for it, or if no object file exists yet.
checkNeedsRecompilation :: FilePath -> GhcOptions -> IO Bool
checkNeedsRecompilation filename opts = filename `moreRecentFile` oname
where oname = getObjectFileName filename opts
-- | Finds the object file name of the given source file
getObjectFileName :: FilePath -> GhcOptions -> FilePath
getObjectFileName filename opts = oname
where odir = fromFlag (ghcOptObjDir opts)
oext = fromFlagOrDefault "o" (ghcOptObjSuffix opts)
oname = odir </> replaceExtension filename oext
-- | Calculate the RPATHs for the component we are building.
--
-- Calculates relative RPATHs when 'relocatable' is set.
getRPaths :: LocalBuildInfo
-> ComponentLocalBuildInfo -- ^ Component we are building
-> IO (NubListR FilePath)
getRPaths lbi clbi | supportRPaths hostOS = do
libraryPaths <- depLibraryPaths False (relocatable lbi) lbi clbi
let hostPref = case hostOS of
OSX -> "@loader_path"
_ -> "$ORIGIN"
relPath p = if isRelative p then hostPref </> p else p
rpaths = toNubListR (map relPath libraryPaths)
return rpaths
where
(Platform _ hostOS) = hostPlatform lbi
-- The list of RPath-supported operating systems below reflects the
-- platforms on which Cabal's RPATH handling is tested. It does _NOT_
-- reflect whether the OS supports RPATH.
-- E.g. when this comment was written, the *BSD operating systems were
-- untested with regards to Cabal RPATH handling, and were hence set to
-- 'False', while those operating systems themselves do support RPATH.
supportRPaths Linux = True
supportRPaths Windows = False
supportRPaths OSX = True
supportRPaths FreeBSD = False
supportRPaths OpenBSD = False
supportRPaths NetBSD = False
supportRPaths DragonFly = False
supportRPaths Solaris = False
supportRPaths AIX = False
supportRPaths HPUX = False
supportRPaths IRIX = False
supportRPaths HaLVM = False
supportRPaths IOS = False
supportRPaths Android = False
supportRPaths Ghcjs = False
supportRPaths Hurd = False
supportRPaths (OtherOS _) = False
-- Do _not_ add a default case so that we get a warning here when a new OS
-- is added.
getRPaths _ _ = return mempty
-- | Filter the "-threaded" flag when profiling as it does not
-- work with ghc-6.8 and older.
hackThreadedFlag :: Verbosity -> Compiler -> Bool -> BuildInfo -> IO BuildInfo
hackThreadedFlag verbosity comp prof bi
| not mustFilterThreaded = return bi
| otherwise = do
warn verbosity $ "The ghc flag '-threaded' is not compatible with "
++ "profiling in ghc-6.8 and older. It will be disabled."
return bi { options = filterHcOptions (/= "-threaded") (options bi) }
where
mustFilterThreaded = prof && compilerVersion comp < Version [6, 10] []
&& "-threaded" `elem` hcOptions GHC bi
filterHcOptions p hcoptss =
[ (hc, if hc == GHC then filter p opts else opts)
| (hc, opts) <- hcoptss ]
-- | Extracts a String representing a hash of the ABI of a built
-- library. It can fail if the library has not yet been built.
--
libAbiHash :: Verbosity -> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO String
libAbiHash verbosity _pkg_descr lbi lib clbi = do
libBi <- hackThreadedFlag verbosity
(compiler lbi) (withProfLib lbi) (libBuildInfo lib)
let
comp = compiler lbi
vanillaArgs =
(componentGhcOptions verbosity lbi libBi clbi (buildDir lbi))
`mappend` mempty {
ghcOptMode = toFlag GhcModeAbiHash,
ghcOptInputModules = toNubListR $ exposedModules lib
}
sharedArgs = vanillaArgs `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptFPic = toFlag True,
ghcOptHiSuffix = toFlag "dyn_hi",
ghcOptObjSuffix = toFlag "dyn_o",
ghcOptExtra = toNubListR $ hcSharedOptions GHC libBi
}
profArgs = vanillaArgs `mappend` mempty {
ghcOptProfilingMode = toFlag True,
ghcOptProfilingAuto = Internal.profDetailLevelFlag True
(withProfLibDetail lbi),
ghcOptHiSuffix = toFlag "p_hi",
ghcOptObjSuffix = toFlag "p_o",
ghcOptExtra = toNubListR $ hcProfOptions GHC libBi
}
ghcArgs = if withVanillaLib lbi then vanillaArgs
else if withSharedLib lbi then sharedArgs
else if withProfLib lbi then profArgs
else error "libAbiHash: Can't find an enabled library way"
--
(ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi)
hash <- getProgramInvocationOutput verbosity
(ghcInvocation ghcProg comp ghcArgs)
return (takeWhile (not . isSpace) hash)
componentGhcOptions :: Verbosity -> LocalBuildInfo
-> BuildInfo -> ComponentLocalBuildInfo -> FilePath
-> GhcOptions
componentGhcOptions = Internal.componentGhcOptions
componentCcGhcOptions :: Verbosity -> LocalBuildInfo
-> BuildInfo -> ComponentLocalBuildInfo
-> FilePath -> FilePath
-> GhcOptions
componentCcGhcOptions verbosity lbi =
Internal.componentCcGhcOptions verbosity implInfo lbi
where
comp = compiler lbi
implInfo = getImplInfo comp
-- -----------------------------------------------------------------------------
-- Installing
-- |Install executables for GHC.
installExe :: Verbosity
-> LocalBuildInfo
-> InstallDirs FilePath -- ^Where to copy the files to
-> FilePath -- ^Build location
-> (FilePath, FilePath) -- ^Executable (prefix,suffix)
-> PackageDescription
-> Executable
-> IO ()
installExe verbosity lbi installDirs buildPref
(progprefix, progsuffix) _pkg exe = do
let binDir = bindir installDirs
createDirectoryIfMissingVerbose verbosity True binDir
let exeFileName = exeName exe <.> exeExtension
fixedExeBaseName = progprefix ++ exeName exe ++ progsuffix
installBinary dest = do
installExecutableFile verbosity
(buildPref </> exeName exe </> exeFileName)
(dest <.> exeExtension)
when (stripExes lbi) $
Strip.stripExe verbosity (hostPlatform lbi) (withPrograms lbi)
(dest <.> exeExtension)
installBinary (binDir </> fixedExeBaseName)
-- |Install for ghc, .hi, .a and, if --with-ghci given, .o
installLib :: Verbosity
-> LocalBuildInfo
-> FilePath -- ^install location
-> FilePath -- ^install location for dynamic libraries
-> FilePath -- ^Build location
-> PackageDescription
-> Library
-> ComponentLocalBuildInfo
-> IO ()
installLib verbosity lbi targetDir dynlibTargetDir builtDir _pkg lib clbi = do
-- copy .hi files over:
whenVanilla $ copyModuleFiles "hi"
whenProf $ copyModuleFiles "p_hi"
whenShared $ copyModuleFiles "dyn_hi"
-- copy the built library files over:
whenVanilla $ installOrdinary builtDir targetDir vanillaLibName
whenProf $ installOrdinary builtDir targetDir profileLibName
whenGHCi $ installOrdinary builtDir targetDir ghciLibName
whenShared $ installShared builtDir dynlibTargetDir sharedLibName
where
install isShared srcDir dstDir name = do
let src = srcDir </> name
dst = dstDir </> name
createDirectoryIfMissingVerbose verbosity True dstDir
if isShared
then do when (stripLibs lbi) $ Strip.stripLib verbosity
(hostPlatform lbi) (withPrograms lbi) src
installExecutableFile verbosity src dst
else installOrdinaryFile verbosity src dst
installOrdinary = install False
installShared = install True
copyModuleFiles ext =
findModuleFiles [builtDir] [ext] (libModules lib)
>>= installOrdinaryFiles verbosity targetDir
cid = compilerId (compiler lbi)
libName = componentLibraryName clbi
vanillaLibName = mkLibName libName
profileLibName = mkProfLibName libName
ghciLibName = Internal.mkGHCiLibName libName
sharedLibName = (mkSharedLibName cid) libName
hasLib = not $ null (libModules lib)
&& null (cSources (libBuildInfo lib))
whenVanilla = when (hasLib && withVanillaLib lbi)
whenProf = when (hasLib && withProfLib lbi)
whenGHCi = when (hasLib && withGHCiLib lbi)
whenShared = when (hasLib && withSharedLib lbi)
-- -----------------------------------------------------------------------------
-- Registering
hcPkgInfo :: ProgramConfiguration -> HcPkg.HcPkgInfo
hcPkgInfo conf = HcPkg.HcPkgInfo { HcPkg.hcPkgProgram = ghcPkgProg
, HcPkg.noPkgDbStack = v < [6,9]
, HcPkg.noVerboseFlag = v < [6,11]
, HcPkg.flagPackageConf = v < [7,5]
, HcPkg.useSingleFileDb = v < [7,9]
}
where
v = versionBranch ver
Just ghcPkgProg = lookupProgram ghcPkgProgram conf
Just ver = programVersion ghcPkgProg
registerPackage
:: Verbosity
-> InstalledPackageInfo
-> PackageDescription
-> LocalBuildInfo
-> Bool
-> PackageDBStack
-> IO ()
registerPackage verbosity installedPkgInfo _pkg lbi _inplace packageDbs =
HcPkg.reregister (hcPkgInfo $ withPrograms lbi) verbosity
packageDbs (Right installedPkgInfo)
pkgRoot :: Verbosity -> LocalBuildInfo -> PackageDB -> IO FilePath
pkgRoot verbosity lbi = pkgRoot'
where
pkgRoot' GlobalPackageDB =
let Just ghcProg = lookupProgram ghcProgram (withPrograms lbi)
in fmap takeDirectory (getGlobalPackageDB verbosity ghcProg)
pkgRoot' UserPackageDB = do
appDir <- getAppUserDataDirectory "ghc"
let ver = compilerVersion (compiler lbi)
subdir = System.Info.arch ++ '-':System.Info.os
++ '-':showVersion ver
rootDir = appDir </> subdir
-- We must create the root directory for the user package database if it
-- does not yet exists. Otherwise '${pkgroot}' will resolve to a
-- directory at the time of 'ghc-pkg register', and registration will
-- fail.
createDirectoryIfMissing True rootDir
return rootDir
pkgRoot' (SpecificPackageDB fp) = return (takeDirectory fp)
-- -----------------------------------------------------------------------------
-- Utils
isDynamic :: Compiler -> Bool
isDynamic = Internal.ghcLookupProperty "GHC Dynamic"
supportsDynamicToo :: Compiler -> Bool
supportsDynamicToo = Internal.ghcLookupProperty "Support dynamic-too"
|
valderman/cabal
|
Cabal/Distribution/Simple/GHC.hs
|
bsd-3-clause
| 52,297 | 0 | 23 | 15,988 | 9,768 | 5,124 | 4,644 | 827 | 19 |
module Mire.Hint (ConfigVar, readConfigAsync, errorToString) where
import Mire.Prelude
import Mire.Plugin (Config (..))
import Language.Haskell.Interpreter
import qualified Data.Text as T
import Control.Concurrent.STM
import Control.Concurrent
import System.Directory
type ConfigVar = TMVar (Either InterpreterError Config)
getConfig :: IO (Either InterpreterError Config)
getConfig = do
dir <- getAppUserDataDirectory "mire"
runInterpreter (loadConfig dir)
readConfigAsync :: ConfigVar -> IO ()
readConfigAsync configVar = void $ forkIO (getConfig >>= atomically . putTMVar configVar)
-- Load basic config (just pluginPath for now).
loadConfig :: String -> Interpreter Config
loadConfig basePath = do
set [ searchPath := [basePath, basePath <> "/worlds"],
languageExtensions := [OverloadedStrings, NoImplicitPrelude, LambdaCase,
PackageImports, ViewPatterns, TupleSections, MultiWayIf,
BangPatterns] ]
loadModules ["Config"]
setTopLevelModules ["Config"]
-- unsafeSetGhcOption "-v" -- Does not work.
interpret "Config.config" (as :: Config)
errorToString :: InterpreterError -> Text
errorToString (UnknownError s) = "UnknownError " <> T.pack s
-- TODO WontCompile lists every error twice. Don't know why. For now fixed
-- with 'nub'.
errorToString (WontCompile ss) = T.concat $ nub (T.pack . (<> "\n") . ("GhcError " <>) . errMsg <$> ss)
errorToString (NotAllowed s) = "NotAllowed " <> T.pack s
errorToString (GhcException s) = "GhcException " <> T.pack s
|
ellej/mire
|
src/Mire/Hint.hs
|
bsd-3-clause
| 1,562 | 0 | 12 | 291 | 409 | 220 | 189 | 29 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Test
-- Copyright : Thomas Tuegel 2010
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- This is the entry point into testing a built package. It performs the
-- \"@.\/setup test@\" action. It runs test suites designated in the package
-- description and reports on the results.
{- All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Simple.Test
( test
, runTests
, writeSimpleTestStub
, stubFilePath
, stubName
, PackageLog(..)
, TestSuiteLog(..)
, Case(..)
, suitePassed, suiteFailed, suiteError
) where
import Distribution.Compat.TempFile ( openTempFile )
import Distribution.ModuleName ( ModuleName )
import Distribution.Package
( PackageId )
import qualified Distribution.PackageDescription as PD
( PackageDescription(..), BuildInfo(buildable)
, TestSuite(..)
, TestSuiteInterface(..), testType, hasTests )
import Distribution.Simple.Build.PathsModule ( pkgPathEnvVar )
import Distribution.Simple.BuildPaths ( exeExtension )
import Distribution.Simple.Compiler ( Compiler(..), CompilerId )
import Distribution.Simple.Hpc
( markupPackage, markupTest, tixDir, tixFilePath )
import Distribution.Simple.InstallDirs
( fromPathTemplate, initialPathTemplateEnv, PathTemplateVariable(..)
, substPathTemplate , toPathTemplate, PathTemplate )
import qualified Distribution.Simple.LocalBuildInfo as LBI
( LocalBuildInfo(..) )
import Distribution.Simple.Setup ( TestFlags(..), TestShowDetails(..), fromFlag )
import Distribution.Simple.Utils ( die, notice )
import qualified Distribution.TestSuite as TestSuite
( Test, Result(..), ImpureTestable(..), TestOptions(..), Options(..) )
import Distribution.Text
import Distribution.Verbosity ( normal, Verbosity )
import Distribution.System ( buildPlatform, Platform )
import Control.Exception ( bracket )
import Control.Monad ( when, liftM, unless, filterM )
import Data.Char ( toUpper )
import Data.Monoid ( mempty )
import System.Directory
( createDirectoryIfMissing, doesDirectoryExist, doesFileExist
, getCurrentDirectory, getDirectoryContents, removeDirectoryRecursive
, removeFile )
import System.Environment ( getEnvironment )
import System.Exit ( ExitCode(..), exitFailure, exitWith )
import System.FilePath ( (</>), (<.>) )
import System.IO ( hClose, IOMode(..), openFile )
import System.Process ( runProcess, waitForProcess )
-- | Logs all test results for a package, broken down first by test suite and
-- then by test case.
data PackageLog = PackageLog
{ package :: PackageId
, compiler :: CompilerId
, platform :: Platform
, testSuites :: [TestSuiteLog]
}
deriving (Read, Show, Eq)
-- | A 'PackageLog' with package and platform information specified.
localPackageLog :: PD.PackageDescription -> LBI.LocalBuildInfo -> PackageLog
localPackageLog pkg_descr lbi = PackageLog
{ package = PD.package pkg_descr
, compiler = compilerId $ LBI.compiler lbi
, platform = buildPlatform
, testSuites = []
}
-- | Logs test suite results, itemized by test case.
data TestSuiteLog = TestSuiteLog
{ name :: String
, cases :: [Case]
, logFile :: FilePath -- path to human-readable log file
}
deriving (Read, Show, Eq)
data Case = Case
{ caseName :: String
, caseOptions :: TestSuite.Options
, caseResult :: TestSuite.Result
}
deriving (Read, Show, Eq)
getTestOptions :: TestSuite.Test -> TestSuiteLog -> IO TestSuite.Options
getTestOptions t l =
case filter ((== TestSuite.name t) . caseName) (cases l) of
(x:_) -> return $ caseOptions x
_ -> TestSuite.defaultOptions t
-- | From a 'TestSuiteLog', determine if the test suite passed.
suitePassed :: TestSuiteLog -> Bool
suitePassed = all (== TestSuite.Pass) . map caseResult . cases
-- | From a 'TestSuiteLog', determine if the test suite failed.
suiteFailed :: TestSuiteLog -> Bool
suiteFailed = any isFail . map caseResult . cases
where isFail (TestSuite.Fail _) = True
isFail _ = False
-- | From a 'TestSuiteLog', determine if the test suite encountered errors.
suiteError :: TestSuiteLog -> Bool
suiteError = any isError . map caseResult . cases
where isError (TestSuite.Error _) = True
isError _ = False
-- | Run a test executable, logging the output and generating the appropriate
-- summary messages.
testController :: TestFlags
-- ^ flags Cabal was invoked with
-> PD.PackageDescription
-- ^ description of package the test suite belongs to
-> LBI.LocalBuildInfo
-- ^ information from the configure step
-> PD.TestSuite
-- ^ TestSuite being tested
-> (FilePath -> String)
-- ^ prepare standard input for test executable
-> FilePath -- ^ executable name
-> (ExitCode -> String -> TestSuiteLog)
-- ^ generator for the TestSuiteLog
-> (TestSuiteLog -> FilePath)
-- ^ generator for final human-readable log filename
-> IO TestSuiteLog
testController flags pkg_descr lbi suite preTest cmd postTest logNamer = do
let distPref = fromFlag $ testDistPref flags
verbosity = fromFlag $ testVerbosity flags
testLogDir = distPref </> "test"
options = map (testOption pkg_descr lbi suite) $ testOptions flags
pwd <- getCurrentDirectory
existingEnv <- getEnvironment
let dataDirPath = pwd </> PD.dataDir pkg_descr
shellEnv = Just $ (pkgPathEnvVar pkg_descr "datadir", dataDirPath)
: ("HPCTIXFILE", (</>) pwd
$ tixFilePath distPref $ PD.testName suite)
: existingEnv
bracket (openCabalTemp testLogDir) deleteIfExists $ \tempLog ->
bracket (openCabalTemp testLogDir) deleteIfExists $ \tempInput -> do
-- Check that the test executable exists.
exists <- doesFileExist cmd
unless exists $ die $ "Error: Could not find test program \"" ++ cmd
++ "\". Did you build the package first?"
-- Remove old .tix files if appropriate.
unless (fromFlag $ testKeepTix flags) $ do
let tDir = tixDir distPref $ PD.testName suite
exists <- doesDirectoryExist tDir
when exists $ removeDirectoryRecursive tDir
-- Create directory for HPC files.
createDirectoryIfMissing True $ tixDir distPref $ PD.testName suite
-- Write summary notices indicating start of test suite
notice verbosity $ summarizeSuiteStart $ PD.testName suite
-- Prepare standard input for test executable
appendFile tempInput $ preTest tempInput
-- Run test executable
exit <- do
hLog <- openFile tempLog AppendMode
hIn <- openFile tempInput ReadMode
-- these handles get closed by runProcess
proc <- runProcess cmd options Nothing shellEnv
(Just hIn) (Just hLog) (Just hLog)
waitForProcess proc
-- Generate TestSuiteLog from executable exit code and a machine-
-- readable test log
suiteLog <- fmap (postTest exit $!) $ readFile tempInput
-- Generate final log file name
let finalLogName = testLogDir </> logNamer suiteLog
suiteLog' = suiteLog { logFile = finalLogName }
-- Write summary notice to log file indicating start of test suite
appendFile (logFile suiteLog') $ summarizeSuiteStart $ PD.testName suite
-- Append contents of temporary log file to the final human-
-- readable log file
readFile tempLog >>= appendFile (logFile suiteLog')
-- Write end-of-suite summary notice to log file
appendFile (logFile suiteLog') $ summarizeSuiteFinish suiteLog'
-- Show the contents of the human-readable log file on the terminal
-- if there is a failure and/or detailed output is requested
let details = fromFlag $ testShowDetails flags
whenPrinting = when $ (details > Never)
&& (not (suitePassed suiteLog) || details == Always)
&& verbosity >= normal
whenPrinting $ readFile tempLog >>=
putStr . unlines . lines
-- Write summary notice to terminal indicating end of test suite
notice verbosity $ summarizeSuiteFinish suiteLog'
markupTest verbosity lbi distPref
(display $ PD.package pkg_descr) suite
return suiteLog'
where
deleteIfExists file = do
exists <- doesFileExist file
when exists $ removeFile file
openCabalTemp testLogDir = do
(f, h) <- openTempFile testLogDir $ "cabal-test-" <.> "log"
hClose h >> return f
-- |Perform the \"@.\/setup test@\" action.
test :: PD.PackageDescription -- ^information from the .cabal file
-> LBI.LocalBuildInfo -- ^information from the configure step
-> TestFlags -- ^flags sent to test
-> IO ()
test pkg_descr lbi flags = do
let verbosity = fromFlag $ testVerbosity flags
humanTemplate = fromFlag $ testHumanLog flags
machineTemplate = fromFlag $ testMachineLog flags
distPref = fromFlag $ testDistPref flags
testLogDir = distPref </> "test"
testNames = fromFlag $ testList flags
pkgTests = PD.testSuites pkg_descr
enabledTests = [ t | t <- pkgTests
, PD.testEnabled t
, PD.buildable (PD.testBuildInfo t) ]
doTest :: (PD.TestSuite, Maybe TestSuiteLog) -> IO TestSuiteLog
doTest (suite, mLog) = do
let testLogPath = testSuiteLogPath humanTemplate pkg_descr lbi
go pre cmd post = testController flags pkg_descr lbi suite
pre cmd post testLogPath
case PD.testInterface suite of
PD.TestSuiteExeV10 _ _ -> do
let cmd = LBI.buildDir lbi </> PD.testName suite
</> PD.testName suite <.> exeExtension
preTest _ = ""
postTest exit _ =
let r = case exit of
ExitSuccess -> TestSuite.Pass
ExitFailure c -> TestSuite.Fail
$ "exit code: " ++ show c
in TestSuiteLog
{ name = PD.testName suite
, cases = [Case (PD.testName suite) mempty r]
, logFile = ""
}
go preTest cmd postTest
PD.TestSuiteLibV09 _ _ -> do
let cmd = LBI.buildDir lbi </> stubName suite
</> stubName suite <.> exeExtension
oldLog = case mLog of
Nothing -> TestSuiteLog
{ name = PD.testName suite
, cases = []
, logFile = []
}
Just l -> l
preTest f = show $ oldLog { logFile = f }
postTest _ = read
go preTest cmd postTest
_ -> return TestSuiteLog
{ name = PD.testName suite
, cases = [Case (PD.testName suite) mempty
$ TestSuite.Error $ "No support for running "
++ "test suite type: "
++ show (disp $ PD.testType suite)]
, logFile = ""
}
when (not $ PD.hasTests pkg_descr) $ do
notice verbosity "Package has no test suites."
exitWith ExitSuccess
when (PD.hasTests pkg_descr && null enabledTests) $
die $ "No test suites enabled. Did you remember to configure with "
++ "\'--enable-tests\'?"
testsToRun <- case testNames of
[] -> return $ zip enabledTests $ repeat Nothing
names -> flip mapM names $ \tName ->
let testMap = zip enabledNames enabledTests
enabledNames = map PD.testName enabledTests
allNames = map PD.testName pkgTests
in case lookup tName testMap of
Just t -> return (t, Nothing)
_ | tName `elem` allNames ->
die $ "Package configured with test suite "
++ tName ++ " disabled."
| otherwise -> die $ "no such test: " ++ tName
createDirectoryIfMissing True testLogDir
-- Delete ordinary files from test log directory.
getDirectoryContents testLogDir
>>= filterM doesFileExist . map (testLogDir </>)
>>= mapM_ removeFile
let totalSuites = length testsToRun
notice verbosity $ "Running " ++ show totalSuites ++ " test suites..."
suites <- mapM doTest testsToRun
let packageLog = (localPackageLog pkg_descr lbi) { testSuites = suites }
packageLogFile = (</>) testLogDir
$ packageLogPath machineTemplate pkg_descr lbi
allOk <- summarizePackage verbosity packageLog
writeFile packageLogFile $ show packageLog
markupPackage verbosity lbi distPref (display $ PD.package pkg_descr)
$ map fst testsToRun
unless allOk exitFailure
-- | Print a summary to the console after all test suites have been run
-- indicating the number of successful test suites and cases. Returns 'True' if
-- all test suites passed and 'False' otherwise.
summarizePackage :: Verbosity -> PackageLog -> IO Bool
summarizePackage verbosity packageLog = do
let cases' = map caseResult $ concatMap cases $ testSuites packageLog
passedCases = length $ filter (== TestSuite.Pass) cases'
totalCases = length cases'
passedSuites = length $ filter suitePassed $ testSuites packageLog
totalSuites = length $ testSuites packageLog
notice verbosity $ show passedSuites ++ " of " ++ show totalSuites
++ " test suites (" ++ show passedCases ++ " of "
++ show totalCases ++ " test cases) passed."
return $! passedSuites == totalSuites
-- | Print a summary of a single test case's result to the console, supressing
-- output for certain verbosity or test filter levels.
summarizeCase :: Verbosity -> TestShowDetails -> Case -> IO ()
summarizeCase verbosity details t =
when shouldPrint $ notice verbosity $ "Test case " ++ caseName t
++ ": " ++ show (caseResult t)
where shouldPrint = (details > Never) && (notPassed || details == Always)
notPassed = caseResult t /= TestSuite.Pass
-- | Print a summary of the test suite's results on the console, suppressing
-- output for certain verbosity or test filter levels.
summarizeSuiteFinish :: TestSuiteLog -> String
summarizeSuiteFinish testLog = unlines
[ "Test suite " ++ name testLog ++ ": " ++ resStr
, "Test suite logged to: " ++ logFile testLog
]
where resStr = map toUpper (resultString testLog)
summarizeSuiteStart :: String -> String
summarizeSuiteStart n = "Test suite " ++ n ++ ": RUNNING...\n"
resultString :: TestSuiteLog -> String
resultString l | suiteError l = "error"
| suiteFailed l = "fail"
| otherwise = "pass"
testSuiteLogPath :: PathTemplate
-> PD.PackageDescription
-> LBI.LocalBuildInfo
-> TestSuiteLog
-> FilePath
testSuiteLogPath template pkg_descr lbi testLog =
fromPathTemplate $ substPathTemplate env template
where
env = initialPathTemplateEnv
(PD.package pkg_descr) (compilerId $ LBI.compiler lbi)
++ [ (TestSuiteNameVar, toPathTemplate $ name testLog)
, (TestSuiteResultVar, result)
]
result = toPathTemplate $ resultString testLog
-- TODO: This is abusing the notion of a 'PathTemplate'. The result
-- isn't neccesarily a path.
testOption :: PD.PackageDescription
-> LBI.LocalBuildInfo
-> PD.TestSuite
-> PathTemplate
-> String
testOption pkg_descr lbi suite template =
fromPathTemplate $ substPathTemplate env template
where
env = initialPathTemplateEnv
(PD.package pkg_descr) (compilerId $ LBI.compiler lbi) ++
[(TestSuiteNameVar, toPathTemplate $ PD.testName suite)]
packageLogPath :: PathTemplate
-> PD.PackageDescription
-> LBI.LocalBuildInfo
-> FilePath
packageLogPath template pkg_descr lbi =
fromPathTemplate $ substPathTemplate env template
where
env = initialPathTemplateEnv
(PD.package pkg_descr) (compilerId $ LBI.compiler lbi)
-- | The filename of the source file for the stub executable associated with a
-- library 'TestSuite'.
stubFilePath :: PD.TestSuite -> FilePath
stubFilePath t = stubName t <.> "hs"
-- | The name of the stub executable associated with a library 'TestSuite'.
stubName :: PD.TestSuite -> FilePath
stubName t = PD.testName t ++ "Stub"
-- | Write the source file for a library 'TestSuite' stub executable.
writeSimpleTestStub :: PD.TestSuite -- ^ library 'TestSuite' for which a stub
-- is being created
-> FilePath -- ^ path to directory where stub source
-- should be located
-> IO ()
writeSimpleTestStub t dir = do
createDirectoryIfMissing True dir
let filename = dir </> stubFilePath t
PD.TestSuiteLibV09 _ m = PD.testInterface t
writeFile filename $ simpleTestStub m
-- | Source code for library test suite stub executable
simpleTestStub :: ModuleName -> String
simpleTestStub m = unlines
[ "module Main ( main ) where"
, "import Control.Monad ( liftM )"
, "import Distribution.Simple.Test ( runTests )"
, "import " ++ show (disp m) ++ " ( tests )"
, "main :: IO ()"
, "main = runTests tests"
]
-- | The test runner used in library "TestSuite" stub executables. Runs a list
-- of 'Test's. An executable calling this function is meant to be invoked as
-- the child of a Cabal process during @.\/setup test@. A 'TestSuiteLog',
-- provided by Cabal, is read from the standard input; it supplies the name of
-- the test suite and the location of the machine-readable test suite log file.
-- Human-readable log information is written to the standard output for capture
-- by the calling Cabal process.
runTests :: [TestSuite.Test] -> IO ()
runTests tests = do
testLogIn <- liftM read getContents
let go :: TestSuite.Test -> IO Case
go t = do
o <- getTestOptions t testLogIn
r <- TestSuite.runM t o
let ret = Case
{ caseName = TestSuite.name t
, caseOptions = o
, caseResult = r
}
summarizeCase normal Always ret
return ret
cases' <- mapM go tests
let testLog = testLogIn { cases = cases'}
writeFile (logFile testLog) $ show testLog
when (suiteError testLog) $ exitWith $ ExitFailure 2
when (suiteFailed testLog) $ exitWith $ ExitFailure 1
exitWith ExitSuccess
|
alphaHeavy/cabal
|
Cabal/Distribution/Simple/Test.hs
|
bsd-3-clause
| 21,357 | 0 | 29 | 6,477 | 4,097 | 2,120 | 1,977 | 330 | 7 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[TcMovectle]{Typechecking a whole module}
https://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/TypeChecker
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NondecreasingIndentation #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
module TcRnDriver (
tcRnStmt, tcRnExpr, TcRnExprMode(..), tcRnType,
tcRnImportDecls,
tcRnLookupRdrName,
getModuleInterface,
tcRnDeclsi,
isGHCiMonad,
runTcInteractive, -- Used by GHC API clients (Trac #8878)
tcRnLookupName,
tcRnGetInfo,
tcRnModule, tcRnModuleTcRnM,
tcTopSrcDecls,
rnTopSrcDecls,
checkBootDecl, checkHiBootIface',
findExtraSigImports,
implicitRequirements,
checkUnitId,
mergeSignatures,
tcRnMergeSignatures,
instantiateSignature,
tcRnInstantiateSignature,
loadUnqualIfaces,
-- More private...
badReexportedBootThing,
checkBootDeclM,
missingBootThing,
) where
import GhcPrelude
import {-# SOURCE #-} TcSplice ( finishTH )
import RnSplice ( rnTopSpliceDecls, traceSplice, SpliceInfo(..) )
import IfaceEnv( externaliseName )
import TcHsType
import TcMatches
import Inst( deeplyInstantiate )
import TcUnify( checkConstraints )
import RnTypes
import RnExpr
import RnUtils ( HsDocContext(..) )
import RnFixity ( lookupFixityRn )
import MkId
import TidyPgm ( globaliseAndTidyId )
import TysWiredIn ( unitTy, mkListTy )
#if defined(GHCI)
import DynamicLoading ( loadPlugins )
import Plugins ( tcPlugin )
#endif
import DynFlags
import HsSyn
import IfaceSyn ( ShowSub(..), showToHeader )
import IfaceType( ShowForAllFlag(..) )
import PrelNames
import PrelInfo
import RdrName
import TcHsSyn
import TcExpr
import TcRnMonad
import TcRnExports
import TcEvidence
import qualified BooleanFormula as BF
import PprTyThing( pprTyThingInContext )
import MkIface( tyThingToIfaceDecl )
import Coercion( pprCoAxiom )
import CoreFVs( orphNamesOfFamInst )
import FamInst
import InstEnv
import FamInstEnv
import TcAnnotations
import TcBinds
import HeaderInfo ( mkPrelImports )
import TcDefaults
import TcEnv
import TcRules
import TcForeign
import TcInstDcls
import TcIface
import TcMType
import TcType
import TcSimplify
import TcTyClsDecls
import TcTypeable ( mkTypeableBinds )
import TcBackpack
import LoadIface
import RnNames
import RnEnv
import RnSource
import ErrUtils
import Id
import VarEnv
import Module
import UniqFM
import Name
import NameEnv
import NameSet
import Avail
import TyCon
import SrcLoc
import HscTypes
import ListSetOps
import Outputable
import ConLike
import DataCon
import Type
import Class
import BasicTypes hiding( SuccessFlag(..) )
import CoAxiom
import Annotations
import Data.List ( sortBy, sort )
import Data.Ord
import FastString
import Maybes
import Util
import Bag
import Inst (tcGetInsts)
import qualified GHC.LanguageExtensions as LangExt
import Data.Data ( Data )
import HsDumpAst
import qualified Data.Set as S
import Control.Monad
#include "HsVersions.h"
{-
************************************************************************
* *
Typecheck and rename a module
* *
************************************************************************
-}
-- | Top level entry point for typechecker and renamer
tcRnModule :: HscEnv
-> HscSource
-> Bool -- True <=> save renamed syntax
-> HsParsedModule
-> IO (Messages, Maybe TcGblEnv)
tcRnModule hsc_env hsc_src save_rn_syntax
parsedModule@HsParsedModule {hpm_module=L loc this_module}
| RealSrcSpan real_loc <- loc
= withTiming (pure dflags)
(text "Renamer/typechecker"<+>brackets (ppr this_mod))
(const ()) $
initTc hsc_env hsc_src save_rn_syntax this_mod real_loc $
withTcPlugins hsc_env $
tcRnModuleTcRnM hsc_env hsc_src parsedModule pair
| otherwise
= return ((emptyBag, unitBag err_msg), Nothing)
where
dflags = hsc_dflags hsc_env
err_msg = mkPlainErrMsg (hsc_dflags hsc_env) loc $
text "Module does not have a RealSrcSpan:" <+> ppr this_mod
this_pkg = thisPackage (hsc_dflags hsc_env)
pair :: (Module, SrcSpan)
pair@(this_mod,_)
| Just (L mod_loc mod) <- hsmodName this_module
= (mkModule this_pkg mod, mod_loc)
| otherwise -- 'module M where' is omitted
= (mAIN, srcLocSpan (srcSpanStart loc))
tcRnModuleTcRnM :: HscEnv
-> HscSource
-> HsParsedModule
-> (Module, SrcSpan)
-> TcRn TcGblEnv
-- Factored out separately from tcRnModule so that a Core plugin can
-- call the type checker directly
tcRnModuleTcRnM hsc_env hsc_src
(HsParsedModule {
hpm_module =
(L loc (HsModule maybe_mod export_ies
import_decls local_decls mod_deprec
maybe_doc_hdr)),
hpm_src_files = src_files
})
(this_mod, prel_imp_loc)
= setSrcSpan loc $
do { let { explicit_mod_hdr = isJust maybe_mod } ;
-- Load the hi-boot interface for this module, if any
-- We do this now so that the boot_names can be passed
-- to tcTyAndClassDecls, because the boot_names are
-- automatically considered to be loop breakers
tcg_env <- getGblEnv ;
boot_info <- tcHiBootIface hsc_src this_mod ;
setGblEnv (tcg_env { tcg_self_boot = boot_info }) $ do {
-- Deal with imports; first add implicit prelude
implicit_prelude <- xoptM LangExt.ImplicitPrelude;
let { prel_imports = mkPrelImports (moduleName this_mod) prel_imp_loc
implicit_prelude import_decls } ;
whenWOptM Opt_WarnImplicitPrelude $
when (notNull prel_imports) $
addWarn (Reason Opt_WarnImplicitPrelude) (implicitPreludeWarn) ;
-- TODO This is a little skeevy; maybe handle a bit more directly
let { simplifyImport (L _ idecl) = (fmap sl_fs (ideclPkgQual idecl), ideclName idecl) } ;
raw_sig_imports <- liftIO $ findExtraSigImports hsc_env hsc_src (moduleName this_mod) ;
raw_req_imports <- liftIO $
implicitRequirements hsc_env (map simplifyImport (prel_imports ++ import_decls)) ;
let { mkImport (Nothing, L _ mod_name) = noLoc $ (simpleImportDecl mod_name) {
ideclHiding = Just (False, noLoc [])
} ;
mkImport _ = panic "mkImport" } ;
let { all_imports = prel_imports ++ import_decls
++ map mkImport (raw_sig_imports ++ raw_req_imports) } ;
-- OK now finally rename the imports
tcg_env <- {-# SCC "tcRnImports" #-}
tcRnImports hsc_env all_imports ;
-- If the whole module is warned about or deprecated
-- (via mod_deprec) record that in tcg_warns. If we do thereby add
-- a WarnAll, it will override any subseqent depracations added to tcg_warns
let { tcg_env1 = case mod_deprec of
Just (L _ txt) -> tcg_env { tcg_warns = WarnAll txt }
Nothing -> tcg_env
} ;
setGblEnv tcg_env1 $ do {
-- Rename and type check the declarations
traceRn "rn1a" empty ;
tcg_env <- if isHsBootOrSig hsc_src then
tcRnHsBootDecls hsc_src local_decls
else
{-# SCC "tcRnSrcDecls" #-}
tcRnSrcDecls explicit_mod_hdr local_decls ;
setGblEnv tcg_env $ do {
-- Process the export list
traceRn "rn4a: before exports" empty;
tcg_env <- tcRnExports explicit_mod_hdr export_ies tcg_env ;
traceRn "rn4b: after exports" empty ;
-- Check that main is exported (must be after tcRnExports)
checkMainExported tcg_env ;
-- Compare the hi-boot iface (if any) with the real thing
-- Must be done after processing the exports
tcg_env <- checkHiBootIface tcg_env boot_info ;
-- The new type env is already available to stuff slurped from
-- interface files, via TcEnv.setGlobalTypeEnv
-- It's important that this includes the stuff in checkHiBootIface,
-- because the latter might add new bindings for boot_dfuns,
-- which may be mentioned in imported unfoldings
-- Don't need to rename the Haddock documentation,
-- it's not parsed by GHC anymore.
tcg_env <- return (tcg_env { tcg_doc_hdr = maybe_doc_hdr }) ;
-- Report unused names
-- Do this /after/ type inference, so that when reporting
-- a function with no type signature we can give the
-- inferred type
reportUnusedNames export_ies tcg_env ;
-- add extra source files to tcg_dependent_files
addDependentFiles src_files ;
-- Dump output and return
tcDump tcg_env ;
return tcg_env
}}}}
implicitPreludeWarn :: SDoc
implicitPreludeWarn
= text "Module `Prelude' implicitly imported"
{-
************************************************************************
* *
Import declarations
* *
************************************************************************
-}
tcRnImports :: HscEnv -> [LImportDecl GhcPs] -> TcM TcGblEnv
tcRnImports hsc_env import_decls
= do { (rn_imports, rdr_env, imports, hpc_info) <- rnImports import_decls ;
; this_mod <- getModule
; let { dep_mods :: ModuleNameEnv (ModuleName, IsBootInterface)
; dep_mods = imp_dep_mods imports
-- We want instance declarations from all home-package
-- modules below this one, including boot modules, except
-- ourselves. The 'except ourselves' is so that we don't
-- get the instances from this module's hs-boot file. This
-- filtering also ensures that we don't see instances from
-- modules batch (@--make@) compiled before this one, but
-- which are not below this one.
; want_instances :: ModuleName -> Bool
; want_instances mod = mod `elemUFM` dep_mods
&& mod /= moduleName this_mod
; (home_insts, home_fam_insts) = hptInstances hsc_env
want_instances
} ;
-- Record boot-file info in the EPS, so that it's
-- visible to loadHiBootInterface in tcRnSrcDecls,
-- and any other incrementally-performed imports
; updateEps_ (\eps -> eps { eps_is_boot = dep_mods }) ;
-- Update the gbl env
; updGblEnv ( \ gbl ->
gbl {
tcg_rdr_env = tcg_rdr_env gbl `plusGlobalRdrEnv` rdr_env,
tcg_imports = tcg_imports gbl `plusImportAvails` imports,
tcg_rn_imports = rn_imports,
tcg_inst_env = extendInstEnvList (tcg_inst_env gbl) home_insts,
tcg_fam_inst_env = extendFamInstEnvList (tcg_fam_inst_env gbl)
home_fam_insts,
tcg_hpc = hpc_info
}) $ do {
; traceRn "rn1" (ppr (imp_dep_mods imports))
-- Fail if there are any errors so far
-- The error printing (if needed) takes advantage
-- of the tcg_env we have now set
-- ; traceIf (text "rdr_env: " <+> ppr rdr_env)
; failIfErrsM
-- Load any orphan-module (including orphan family
-- instance-module) interfaces, so that their rules and
-- instance decls will be found. But filter out a
-- self hs-boot: these instances will be checked when
-- we define them locally.
-- (We don't need to load non-orphan family instance
-- modules until we either try to use the instances they
-- define, or define our own family instances, at which
-- point we need to check them for consistency.)
; loadModuleInterfaces (text "Loading orphan modules")
(filter (/= this_mod) (imp_orphs imports))
-- Check type-family consistency between imports.
-- See Note [The type family instance consistency story]
; traceRn "rn1: checking family instance consistency {" empty
; let { dir_imp_mods = moduleEnvKeys
. imp_mods
$ imports }
; checkFamInstConsistency dir_imp_mods
; traceRn "rn1: } checking family instance consistency" empty
; getGblEnv } }
{-
************************************************************************
* *
Type-checking the top level of a module
* *
************************************************************************
-}
tcRnSrcDecls :: Bool -- False => no 'module M(..) where' header at all
-> [LHsDecl GhcPs] -- Declarations
-> TcM TcGblEnv
tcRnSrcDecls explicit_mod_hdr decls
= do { -- Do all the declarations
; ((tcg_env, tcl_env), lie) <- captureTopConstraints $
do { (tcg_env, tcl_env) <- tc_rn_src_decls decls
-- Check for the 'main' declaration
-- Must do this inside the captureTopConstraints
; tcg_env <- setEnvs (tcg_env, tcl_env) $
checkMain explicit_mod_hdr
; return (tcg_env, tcl_env) }
; setEnvs (tcg_env, tcl_env) $ do {
-- Simplify constraints
--
-- We do this after checkMain, so that we use the type info
-- that checkMain adds
--
-- We do it with both global and local env in scope:
-- * the global env exposes the instances to simplifyTop
-- * the local env exposes the local Ids to simplifyTop,
-- so that we get better error messages (monomorphism restriction)
; new_ev_binds <- {-# SCC "simplifyTop" #-}
simplifyTop lie
-- Emit Typeable bindings
; tcg_env <- mkTypeableBinds
-- Finalizers must run after constraints are simplified, or some types
-- might not be complete when using reify (see #12777).
; (tcg_env, tcl_env) <- setGblEnv tcg_env run_th_modfinalizers
; setEnvs (tcg_env, tcl_env) $ do {
; finishTH
; traceTc "Tc9" empty
; failIfErrsM -- Don't zonk if there have been errors
-- It's a waste of time; and we may get debug warnings
-- about strangely-typed TyCons!
; traceTc "Tc10" empty
-- Zonk the final code. This must be done last.
-- Even simplifyTop may do some unification.
-- This pass also warns about missing type signatures
; let { TcGblEnv { tcg_type_env = type_env,
tcg_binds = binds,
tcg_ev_binds = cur_ev_binds,
tcg_imp_specs = imp_specs,
tcg_rules = rules,
tcg_vects = vects,
tcg_fords = fords } = tcg_env
; all_ev_binds = cur_ev_binds `unionBags` new_ev_binds } ;
; (bind_env, ev_binds', binds', fords', imp_specs', rules', vects')
<- {-# SCC "zonkTopDecls" #-}
zonkTopDecls all_ev_binds binds rules vects
imp_specs fords ;
; traceTc "Tc11" empty
; let { final_type_env = plusTypeEnv type_env bind_env
; tcg_env' = tcg_env { tcg_binds = binds',
tcg_ev_binds = ev_binds',
tcg_imp_specs = imp_specs',
tcg_rules = rules',
tcg_vects = vects',
tcg_fords = fords' } } ;
; setGlobalTypeEnv tcg_env' final_type_env
}
} }
-- | Runs TH finalizers and renames and typechecks the top-level declarations
-- that they could introduce.
run_th_modfinalizers :: TcM (TcGblEnv, TcLclEnv)
run_th_modfinalizers = do
th_modfinalizers_var <- fmap tcg_th_modfinalizers getGblEnv
th_modfinalizers <- readTcRef th_modfinalizers_var
if null th_modfinalizers
then getEnvs
else do
writeTcRef th_modfinalizers_var []
(envs, lie) <- captureTopConstraints $ do
sequence_ th_modfinalizers
-- Finalizers can add top-level declarations with addTopDecls.
tc_rn_src_decls []
setEnvs envs $ do
-- Subsequent rounds of finalizers run after any new constraints are
-- simplified, or some types might not be complete when using reify
-- (see #12777).
new_ev_binds <- {-# SCC "simplifyTop2" #-}
simplifyTop lie
updGblEnv (\tcg_env ->
tcg_env { tcg_ev_binds = tcg_ev_binds tcg_env `unionBags` new_ev_binds }
)
-- addTopDecls can add declarations which add new finalizers.
run_th_modfinalizers
tc_rn_src_decls :: [LHsDecl GhcPs]
-> TcM (TcGblEnv, TcLclEnv)
-- Loops around dealing with each top level inter-splice group
-- in turn, until it's dealt with the entire module
tc_rn_src_decls ds
= {-# SCC "tc_rn_src_decls" #-}
do { (first_group, group_tail) <- findSplice ds
-- If ds is [] we get ([], Nothing)
-- Deal with decls up to, but not including, the first splice
; (tcg_env, rn_decls) <- rnTopSrcDecls first_group
-- rnTopSrcDecls fails if there are any errors
-- Get TH-generated top-level declarations and make sure they don't
-- contain any splices since we don't handle that at the moment
--
-- The plumbing here is a bit odd: see Trac #10853
; th_topdecls_var <- fmap tcg_th_topdecls getGblEnv
; th_ds <- readTcRef th_topdecls_var
; writeTcRef th_topdecls_var []
; (tcg_env, rn_decls) <-
if null th_ds
then return (tcg_env, rn_decls)
else do { (th_group, th_group_tail) <- findSplice th_ds
; case th_group_tail of
{ Nothing -> return () ;
; Just (SpliceDecl (L loc _) _, _)
-> setSrcSpan loc $
addErr (text "Declaration splices are not permitted inside top-level declarations added with addTopDecls")
} ;
-- Rename TH-generated top-level declarations
; (tcg_env, th_rn_decls) <- setGblEnv tcg_env $
rnTopSrcDecls th_group
-- Dump generated top-level declarations
; let msg = "top-level declarations added with addTopDecls"
; traceSplice $ SpliceInfo { spliceDescription = msg
, spliceIsDecl = True
, spliceSource = Nothing
, spliceGenerated = ppr th_rn_decls }
; return (tcg_env, appendGroups rn_decls th_rn_decls)
}
-- Type check all declarations
; (tcg_env, tcl_env) <- setGblEnv tcg_env $
tcTopSrcDecls rn_decls
-- If there is no splice, we're nearly done
; setEnvs (tcg_env, tcl_env) $
case group_tail of
{ Nothing -> return (tcg_env, tcl_env)
-- If there's a splice, we must carry on
; Just (SpliceDecl (L loc splice) _, rest_ds) ->
do { recordTopLevelSpliceLoc loc
-- Rename the splice expression, and get its supporting decls
; (spliced_decls, splice_fvs) <- checkNoErrs (rnTopSpliceDecls
splice)
-- Glue them on the front of the remaining decls and loop
; setGblEnv (tcg_env `addTcgDUs` usesOnly splice_fvs) $
tc_rn_src_decls (spliced_decls ++ rest_ds)
}
}
}
{-
************************************************************************
* *
Compiling hs-boot source files, and
comparing the hi-boot interface with the real thing
* *
************************************************************************
-}
tcRnHsBootDecls :: HscSource -> [LHsDecl GhcPs] -> TcM TcGblEnv
tcRnHsBootDecls hsc_src decls
= do { (first_group, group_tail) <- findSplice decls
-- Rename the declarations
; (tcg_env, HsGroup { hs_tyclds = tycl_decls
, hs_derivds = deriv_decls
, hs_fords = for_decls
, hs_defds = def_decls
, hs_ruleds = rule_decls
, hs_vects = vect_decls
, hs_annds = _
, hs_valds = ValBindsOut val_binds val_sigs })
<- rnTopSrcDecls first_group
-- The empty list is for extra dependencies coming from .hs-boot files
-- See Note [Extra dependencies from .hs-boot files] in RnSource
; (gbl_env, lie) <- captureTopConstraints $ setGblEnv tcg_env $ do {
-- Check for illegal declarations
; case group_tail of
Just (SpliceDecl d _, _) -> badBootDecl hsc_src "splice" d
Nothing -> return ()
; mapM_ (badBootDecl hsc_src "foreign") for_decls
; mapM_ (badBootDecl hsc_src "default") def_decls
; mapM_ (badBootDecl hsc_src "rule") rule_decls
; mapM_ (badBootDecl hsc_src "vect") vect_decls
-- Typecheck type/class/instance decls
; traceTc "Tc2 (boot)" empty
; (tcg_env, inst_infos, _deriv_binds)
<- tcTyClsInstDecls tycl_decls deriv_decls val_binds
; setGblEnv tcg_env $ do {
-- Emit Typeable bindings
; tcg_env <- mkTypeableBinds
; setGblEnv tcg_env $ do {
-- Typecheck value declarations
; traceTc "Tc5" empty
; val_ids <- tcHsBootSigs val_binds val_sigs
-- Wrap up
-- No simplification or zonking to do
; traceTc "Tc7a" empty
; gbl_env <- getGblEnv
-- Make the final type-env
-- Include the dfun_ids so that their type sigs
-- are written into the interface file.
; let { type_env0 = tcg_type_env gbl_env
; type_env1 = extendTypeEnvWithIds type_env0 val_ids
; type_env2 = extendTypeEnvWithIds type_env1 dfun_ids
; dfun_ids = map iDFunId inst_infos
}
; setGlobalTypeEnv gbl_env type_env2
}}}
; traceTc "boot" (ppr lie); return gbl_env }
badBootDecl :: HscSource -> String -> Located decl -> TcM ()
badBootDecl hsc_src what (L loc _)
= addErrAt loc (char 'A' <+> text what
<+> text "declaration is not (currently) allowed in a"
<+> (case hsc_src of
HsBootFile -> text "hs-boot"
HsigFile -> text "hsig"
_ -> panic "badBootDecl: should be an hsig or hs-boot file")
<+> text "file")
{-
Once we've typechecked the body of the module, we want to compare what
we've found (gathered in a TypeEnv) with the hi-boot details (if any).
-}
checkHiBootIface :: TcGblEnv -> SelfBootInfo -> TcM TcGblEnv
-- Compare the hi-boot file for this module (if there is one)
-- with the type environment we've just come up with
-- In the common case where there is no hi-boot file, the list
-- of boot_names is empty.
checkHiBootIface tcg_env boot_info
| NoSelfBoot <- boot_info -- Common case
= return tcg_env
| HsBootFile <- tcg_src tcg_env -- Current module is already a hs-boot file!
= return tcg_env
| SelfBoot { sb_mds = boot_details } <- boot_info
, TcGblEnv { tcg_binds = binds
, tcg_insts = local_insts
, tcg_type_env = local_type_env
, tcg_exports = local_exports } <- tcg_env
= do { -- This code is tricky, see Note [DFun knot-tying]
; let boot_dfuns = filter isDFunId (typeEnvIds (md_types boot_details))
type_env' = extendTypeEnvWithIds local_type_env boot_dfuns
-- Why the seq? Without, we will put a TypeEnv thunk in
-- tcg_type_env_var. That thunk will eventually get
-- forced if we are typechecking interfaces, but that
-- is no good if we are trying to typecheck the very
-- DFun we were going to put in.
-- TODO: Maybe setGlobalTypeEnv should be strict.
; tcg_env <- type_env' `seq` setGlobalTypeEnv tcg_env type_env'
; dfun_prs <- checkHiBootIface' local_insts type_env'
local_exports boot_details
; let dfun_binds = listToBag [ mkVarBind boot_dfun (nlHsVar dfun)
| (boot_dfun, dfun) <- dfun_prs ]
; return tcg_env { tcg_binds = binds `unionBags` dfun_binds } }
| otherwise = panic "checkHiBootIface: unreachable code"
-- Note [DFun knot-tying]
-- ~~~~~~~~~~~~~~~~~~~~~~
-- The 'SelfBootInfo' that is fed into 'checkHiBootIface' comes
-- from typechecking the hi-boot file that we are presently
-- implementing. Suppose we are typechecking the module A:
-- when we typecheck the hi-boot file, whenever we see an
-- identifier A.T, we knot-tie this identifier to the
-- *local* type environment (via if_rec_types.) The contract
-- then is that we don't *look* at 'SelfBootInfo' until
-- we've finished typechecking the module and updated the
-- type environment with the new tycons and ids.
--
-- This most works well, but there is one problem: DFuns!
-- In general, it's not possible to know a priori what an
-- hs-boot file named a DFun (see Note [DFun impedance matching]),
-- so we look at the ClsInsts from the boot file to figure out
-- what DFuns to add to the type environment. But we're not
-- allowed to poke the DFuns of the ClsInsts in the SelfBootInfo
-- until we've added the DFuns to the type environment. A
-- Gordian knot!
--
-- We cut the knot by a little trick: we first *unconditionally*
-- add all of the boot-declared DFuns to the type environment
-- (so that knot tying works, see Trac #4003), without the
-- actual bindings for them. Then, we compute the impedance
-- matching bindings, and add them to the environment.
--
-- There is one subtlety to doing this: we have to get the
-- DFuns from md_types, not md_insts, even though involves
-- filtering a bunch of TyThings we don't care about. The
-- reason is only the TypeEnv in md_types has the actual
-- Id we want to add to the environment; the DFun fields
-- in md_insts are typechecking thunks that will attempt to
-- go through if_rec_types to lookup the real Id... but
-- that's what we're trying to setup right now.
checkHiBootIface' :: [ClsInst] -> TypeEnv -> [AvailInfo]
-> ModDetails -> TcM [(Id, Id)]
-- Variant which doesn't require a full TcGblEnv; you could get the
-- local components from another ModDetails.
--
-- Note [DFun impedance matching]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- We return a list of "impedance-matching" bindings for the dfuns
-- defined in the hs-boot file, such as
-- $fxEqT = $fEqT
-- We need these because the module and hi-boot file might differ in
-- the name it chose for the dfun: the name of a dfun is not
-- uniquely determined by its type; there might be multiple dfuns
-- which, individually, would map to the same name (in which case
-- we have to disambiguate them.) There's no way for the hi file
-- to know exactly what disambiguation to use... without looking
-- at the hi-boot file itself.
--
-- In fact, the names will always differ because we always pick names
-- prefixed with "$fx" for boot dfuns, and "$f" for real dfuns
-- (so that this impedance matching is always possible).
checkHiBootIface'
local_insts local_type_env local_exports
(ModDetails { md_insts = boot_insts, md_fam_insts = boot_fam_insts,
md_types = boot_type_env, md_exports = boot_exports })
= do { traceTc "checkHiBootIface" $ vcat
[ ppr boot_type_env, ppr boot_insts, ppr boot_exports]
-- Check the exports of the boot module, one by one
; mapM_ check_export boot_exports
-- Check for no family instances
; unless (null boot_fam_insts) $
panic ("TcRnDriver.checkHiBootIface: Cannot handle family " ++
"instances in boot files yet...")
-- FIXME: Why? The actual comparison is not hard, but what would
-- be the equivalent to the dfun bindings returned for class
-- instances? We can't easily equate tycons...
-- Check instance declarations
-- and generate an impedance-matching binding
; mb_dfun_prs <- mapM check_inst boot_insts
; failIfErrsM
; return (catMaybes mb_dfun_prs) }
where
check_export boot_avail -- boot_avail is exported by the boot iface
| name `elem` dfun_names = return ()
| isWiredInName name = return () -- No checking for wired-in names. In particular,
-- 'error' is handled by a rather gross hack
-- (see comments in GHC.Err.hs-boot)
-- Check that the actual module exports the same thing
| not (null missing_names)
= addErrAt (nameSrcSpan (head missing_names))
(missingBootThing True (head missing_names) "exported by")
-- If the boot module does not *define* the thing, we are done
-- (it simply re-exports it, and names match, so nothing further to do)
| isNothing mb_boot_thing = return ()
-- Check that the actual module also defines the thing, and
-- then compare the definitions
| Just real_thing <- lookupTypeEnv local_type_env name,
Just boot_thing <- mb_boot_thing
= checkBootDeclM True boot_thing real_thing
| otherwise
= addErrTc (missingBootThing True name "defined in")
where
name = availName boot_avail
mb_boot_thing = lookupTypeEnv boot_type_env name
missing_names = case lookupNameEnv local_export_env name of
Nothing -> [name]
Just avail -> availNames boot_avail `minusList` availNames avail
dfun_names = map getName boot_insts
local_export_env :: NameEnv AvailInfo
local_export_env = availsToNameEnv local_exports
check_inst :: ClsInst -> TcM (Maybe (Id, Id))
-- Returns a pair of the boot dfun in terms of the equivalent
-- real dfun. Delicate (like checkBootDecl) because it depends
-- on the types lining up precisely even to the ordering of
-- the type variables in the foralls.
check_inst boot_inst
= case [dfun | inst <- local_insts,
let dfun = instanceDFunId inst,
idType dfun `eqType` boot_dfun_ty ] of
[] -> do { traceTc "check_inst" $ vcat
[ text "local_insts" <+> vcat (map (ppr . idType . instanceDFunId) local_insts)
, text "boot_inst" <+> ppr boot_inst
, text "boot_dfun_ty" <+> ppr boot_dfun_ty
]
; addErrTc (instMisMatch True boot_inst)
; return Nothing }
(dfun:_) -> return (Just (local_boot_dfun, dfun))
where
local_boot_dfun = Id.mkExportedVanillaId boot_dfun_name (idType dfun)
-- Name from the /boot-file/ ClsInst, but type from the dfun
-- defined in /this module/. That ensures that the TyCon etc
-- inside the type are the ones defined in this module, not
-- the ones gotten from the hi-boot file, which may have
-- a lot less info (Trac #T8743, comment:10).
where
boot_dfun = instanceDFunId boot_inst
boot_dfun_ty = idType boot_dfun
boot_dfun_name = idName boot_dfun
-- In general, to perform these checks we have to
-- compare the TyThing from the .hi-boot file to the TyThing
-- in the current source file. We must be careful to allow alpha-renaming
-- where appropriate, and also the boot declaration is allowed to omit
-- constructors and class methods.
--
-- See rnfail055 for a good test of this stuff.
-- | Compares two things for equivalence between boot-file and normal code,
-- reporting an error if they don't match up.
checkBootDeclM :: Bool -- ^ True <=> an hs-boot file (could also be a sig)
-> TyThing -> TyThing -> TcM ()
checkBootDeclM is_boot boot_thing real_thing
= whenIsJust (checkBootDecl is_boot boot_thing real_thing) $ \ err ->
addErrAt span
(bootMisMatch is_boot err real_thing boot_thing)
where
-- Here we use the span of the boot thing or, if it doesn't have a sensible
-- span, that of the real thing,
span
| let span = nameSrcSpan (getName boot_thing)
, isGoodSrcSpan span
= span
| otherwise
= nameSrcSpan (getName real_thing)
-- | Compares the two things for equivalence between boot-file and normal
-- code. Returns @Nothing@ on success or @Just "some helpful info for user"@
-- failure. If the difference will be apparent to the user, @Just empty@ is
-- perfectly suitable.
checkBootDecl :: Bool -> TyThing -> TyThing -> Maybe SDoc
checkBootDecl _ (AnId id1) (AnId id2)
= ASSERT(id1 == id2)
check (idType id1 `eqType` idType id2)
(text "The two types are different")
checkBootDecl is_boot (ATyCon tc1) (ATyCon tc2)
= checkBootTyCon is_boot tc1 tc2
checkBootDecl _ (AConLike (RealDataCon dc1)) (AConLike (RealDataCon _))
= pprPanic "checkBootDecl" (ppr dc1)
checkBootDecl _ _ _ = Just empty -- probably shouldn't happen
-- | Combines two potential error messages
andThenCheck :: Maybe SDoc -> Maybe SDoc -> Maybe SDoc
Nothing `andThenCheck` msg = msg
msg `andThenCheck` Nothing = msg
Just d1 `andThenCheck` Just d2 = Just (d1 $$ d2)
infixr 0 `andThenCheck`
-- | If the test in the first parameter is True, succeed with @Nothing@;
-- otherwise, return the provided check
checkUnless :: Bool -> Maybe SDoc -> Maybe SDoc
checkUnless True _ = Nothing
checkUnless False k = k
-- | Run the check provided for every pair of elements in the lists.
-- The provided SDoc should name the element type, in the plural.
checkListBy :: (a -> a -> Maybe SDoc) -> [a] -> [a] -> SDoc
-> Maybe SDoc
checkListBy check_fun as bs whats = go [] as bs
where
herald = text "The" <+> whats <+> text "do not match"
go [] [] [] = Nothing
go docs [] [] = Just (hang (herald <> colon) 2 (vcat $ reverse docs))
go docs (x:xs) (y:ys) = case check_fun x y of
Just doc -> go (doc:docs) xs ys
Nothing -> go docs xs ys
go _ _ _ = Just (hang (herald <> colon)
2 (text "There are different numbers of" <+> whats))
-- | If the test in the first parameter is True, succeed with @Nothing@;
-- otherwise, fail with the given SDoc.
check :: Bool -> SDoc -> Maybe SDoc
check True _ = Nothing
check False doc = Just doc
-- | A more perspicuous name for @Nothing@, for @checkBootDecl@ and friends.
checkSuccess :: Maybe SDoc
checkSuccess = Nothing
----------------
checkBootTyCon :: Bool -> TyCon -> TyCon -> Maybe SDoc
checkBootTyCon is_boot tc1 tc2
| not (eqType (tyConKind tc1) (tyConKind tc2))
= Just $ text "The types have different kinds" -- First off, check the kind
| Just c1 <- tyConClass_maybe tc1
, Just c2 <- tyConClass_maybe tc2
, let (clas_tvs1, clas_fds1, sc_theta1, _, ats1, op_stuff1)
= classExtraBigSig c1
(clas_tvs2, clas_fds2, sc_theta2, _, ats2, op_stuff2)
= classExtraBigSig c2
, Just env <- eqVarBndrs emptyRnEnv2 clas_tvs1 clas_tvs2
= let
eqSig (id1, def_meth1) (id2, def_meth2)
= check (name1 == name2)
(text "The names" <+> pname1 <+> text "and" <+> pname2 <+>
text "are different") `andThenCheck`
check (eqTypeX env op_ty1 op_ty2)
(text "The types of" <+> pname1 <+>
text "are different") `andThenCheck`
if is_boot
then check (eqMaybeBy eqDM def_meth1 def_meth2)
(text "The default methods associated with" <+> pname1 <+>
text "are different")
else check (subDM op_ty1 def_meth1 def_meth2)
(text "The default methods associated with" <+> pname1 <+>
text "are not compatible")
where
name1 = idName id1
name2 = idName id2
pname1 = quotes (ppr name1)
pname2 = quotes (ppr name2)
(_, rho_ty1) = splitForAllTys (idType id1)
op_ty1 = funResultTy rho_ty1
(_, rho_ty2) = splitForAllTys (idType id2)
op_ty2 = funResultTy rho_ty2
eqAT (ATI tc1 def_ats1) (ATI tc2 def_ats2)
= checkBootTyCon is_boot tc1 tc2 `andThenCheck`
check (eqATDef def_ats1 def_ats2)
(text "The associated type defaults differ")
eqDM (_, VanillaDM) (_, VanillaDM) = True
eqDM (_, GenericDM t1) (_, GenericDM t2) = eqTypeX env t1 t2
eqDM _ _ = False
-- NB: first argument is from hsig, second is from real impl.
-- Order of pattern matching matters.
subDM _ Nothing _ = True
subDM _ _ Nothing = False
-- If the hsig wrote:
--
-- f :: a -> a
-- default f :: a -> a
--
-- this should be validly implementable using an old-fashioned
-- vanilla default method.
subDM t1 (Just (_, GenericDM t2)) (Just (_, VanillaDM))
= eqTypeX env t1 t2
-- This case can occur when merging signatures
subDM t1 (Just (_, VanillaDM)) (Just (_, GenericDM t2))
= eqTypeX env t1 t2
subDM _ (Just (_, VanillaDM)) (Just (_, VanillaDM)) = True
subDM _ (Just (_, GenericDM t1)) (Just (_, GenericDM t2))
= eqTypeX env t1 t2
-- Ignore the location of the defaults
eqATDef Nothing Nothing = True
eqATDef (Just (ty1, _loc1)) (Just (ty2, _loc2)) = eqTypeX env ty1 ty2
eqATDef _ _ = False
eqFD (as1,bs1) (as2,bs2) =
eqListBy (eqTypeX env) (mkTyVarTys as1) (mkTyVarTys as2) &&
eqListBy (eqTypeX env) (mkTyVarTys bs1) (mkTyVarTys bs2)
in
checkRoles roles1 roles2 `andThenCheck`
-- Checks kind of class
check (eqListBy eqFD clas_fds1 clas_fds2)
(text "The functional dependencies do not match") `andThenCheck`
checkUnless (isAbstractTyCon tc1) $
check (eqListBy (eqTypeX env) sc_theta1 sc_theta2)
(text "The class constraints do not match") `andThenCheck`
checkListBy eqSig op_stuff1 op_stuff2 (text "methods") `andThenCheck`
checkListBy eqAT ats1 ats2 (text "associated types") `andThenCheck`
check (classMinimalDef c1 `BF.implies` classMinimalDef c2)
(text "The MINIMAL pragmas are not compatible")
| Just syn_rhs1 <- synTyConRhs_maybe tc1
, Just syn_rhs2 <- synTyConRhs_maybe tc2
, Just env <- eqVarBndrs emptyRnEnv2 (tyConTyVars tc1) (tyConTyVars tc2)
= ASSERT(tc1 == tc2)
checkRoles roles1 roles2 `andThenCheck`
check (eqTypeX env syn_rhs1 syn_rhs2) empty -- nothing interesting to say
-- This allows abstract 'data T a' to be implemented using 'type T = ...'
-- and abstract 'class K a' to be implement using 'type K = ...'
-- See Note [Synonyms implement abstract data]
| not is_boot -- don't support for hs-boot yet
, isAbstractTyCon tc1
, Just (tvs, ty) <- synTyConDefn_maybe tc2
, Just (tc2', args) <- tcSplitTyConApp_maybe ty
= checkSynAbsData tvs ty tc2' args
-- TODO: When it's a synonym implementing a class, we really
-- should check if the fundeps are satisfied, but
-- there is not an obvious way to do this for a constraint synonym.
-- So for now, let it all through (it won't cause segfaults, anyway).
-- Tracked at #12704.
| Just fam_flav1 <- famTyConFlav_maybe tc1
, Just fam_flav2 <- famTyConFlav_maybe tc2
= ASSERT(tc1 == tc2)
let eqFamFlav OpenSynFamilyTyCon OpenSynFamilyTyCon = True
eqFamFlav (DataFamilyTyCon {}) (DataFamilyTyCon {}) = True
-- This case only happens for hsig merging:
eqFamFlav AbstractClosedSynFamilyTyCon AbstractClosedSynFamilyTyCon = True
eqFamFlav AbstractClosedSynFamilyTyCon (ClosedSynFamilyTyCon {}) = True
eqFamFlav (ClosedSynFamilyTyCon {}) AbstractClosedSynFamilyTyCon = True
eqFamFlav (ClosedSynFamilyTyCon ax1) (ClosedSynFamilyTyCon ax2)
= eqClosedFamilyAx ax1 ax2
eqFamFlav (BuiltInSynFamTyCon {}) (BuiltInSynFamTyCon {}) = tc1 == tc2
eqFamFlav _ _ = False
injInfo1 = tyConInjectivityInfo tc1
injInfo2 = tyConInjectivityInfo tc2
in
-- check equality of roles, family flavours and injectivity annotations
-- (NB: Type family roles are always nominal. But the check is
-- harmless enough.)
checkRoles roles1 roles2 `andThenCheck`
check (eqFamFlav fam_flav1 fam_flav2)
(whenPprDebug $
text "Family flavours" <+> ppr fam_flav1 <+> text "and" <+> ppr fam_flav2 <+>
text "do not match") `andThenCheck`
check (injInfo1 == injInfo2) (text "Injectivities do not match")
| isAlgTyCon tc1 && isAlgTyCon tc2
, Just env <- eqVarBndrs emptyRnEnv2 (tyConTyVars tc1) (tyConTyVars tc2)
= ASSERT(tc1 == tc2)
checkRoles roles1 roles2 `andThenCheck`
check (eqListBy (eqTypeX env)
(tyConStupidTheta tc1) (tyConStupidTheta tc2))
(text "The datatype contexts do not match") `andThenCheck`
eqAlgRhs tc1 (algTyConRhs tc1) (algTyConRhs tc2)
| otherwise = Just empty -- two very different types -- should be obvious
where
roles1 = tyConRoles tc1 -- the abstract one
roles2 = tyConRoles tc2
roles_msg = text "The roles do not match." $$
(text "Roles on abstract types default to" <+>
quotes (text "representational") <+> text "in boot files.")
roles_subtype_msg = text "The roles are not compatible:" $$
text "Main module:" <+> ppr roles2 $$
text "Hsig file:" <+> ppr roles1
checkRoles r1 r2
| is_boot || isInjectiveTyCon tc1 Representational -- See Note [Role subtyping]
= check (r1 == r2) roles_msg
| otherwise = check (r2 `rolesSubtypeOf` r1) roles_subtype_msg
-- Note [Role subtyping]
-- ~~~~~~~~~~~~~~~~~~~~~
-- In the current formulation of roles, role subtyping is only OK if the
-- "abstract" TyCon was not representationally injective. Among the most
-- notable examples of non representationally injective TyCons are abstract
-- data, which can be implemented via newtypes (which are not
-- representationally injective). The key example is
-- in this example from #13140:
--
-- -- In an hsig file
-- data T a -- abstract!
-- type role T nominal
--
-- -- Elsewhere
-- foo :: Coercible (T a) (T b) => a -> b
-- foo x = x
--
-- We must NOT allow foo to typecheck, because if we instantiate
-- T with a concrete data type with a phantom role would cause
-- Coercible (T a) (T b) to be provable. Fortunately, if T is not
-- representationally injective, we cannot make the inference that a ~N b if
-- T a ~R T b.
--
-- Unconditional role subtyping would be possible if we setup
-- an extra set of roles saying when we can project out coercions
-- (we call these proj-roles); then it would NOT be valid to instantiate T
-- with a data type at phantom since the proj-role subtyping check
-- would fail. See #13140 for more details.
--
-- One consequence of this is we get no role subtyping for non-abstract
-- data types in signatures. Suppose you have:
--
-- signature A where
-- type role T nominal
-- data T a = MkT
--
-- If you write this, we'll treat T as injective, and make inferences
-- like T a ~R T b ==> a ~N b (mkNthCo). But if we can
-- subsequently replace T with one at phantom role, we would then be able to
-- infer things like T Int ~R T Bool which is bad news.
--
-- We could allow role subtyping here if we didn't treat *any* data types
-- defined in signatures as injective. But this would be a bit surprising,
-- replacing a data type in a module with one in a signature could cause
-- your code to stop typechecking (whereas if you made the type abstract,
-- it is more understandable that the type checker knows less).
--
-- It would have been best if this was purely a question of defaults
-- (i.e., a user could explicitly ask for one behavior or another) but
-- the current role system isn't expressive enough to do this.
-- Having explict proj-roles would solve this problem.
rolesSubtypeOf [] [] = True
-- NB: this relation is the OPPOSITE of the subroling relation
rolesSubtypeOf (x:xs) (y:ys) = x >= y && rolesSubtypeOf xs ys
rolesSubtypeOf _ _ = False
-- Note [Synonyms implement abstract data]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- An abstract data type or class can be implemented using a type synonym,
-- but ONLY if the type synonym is nullary and has no type family
-- applications. This arises from two properties of skolem abstract data:
--
-- For any T (with some number of paramaters),
--
-- 1. T is a valid type (it is "curryable"), and
--
-- 2. T is valid in an instance head (no type families).
--
-- See also 'HowAbstract' and Note [Skolem abstract data].
-- | Given @type T tvs = ty@, where @ty@ decomposes into @tc2' args@,
-- check that this synonym is an acceptable implementation of @tc1@.
-- See Note [Synonyms implement abstract data]
checkSynAbsData :: [TyVar] -> Type -> TyCon -> [Type] -> Maybe SDoc
checkSynAbsData tvs ty tc2' args =
check (null (tcTyFamInsts ty))
(text "Illegal type family application in implementation of abstract data.")
`andThenCheck`
check (null tvs)
(text "Illegal parameterized type synonym in implementation of abstract data." $$
text "(Try eta reducing your type synonym so that it is nullary.)")
`andThenCheck`
-- Don't report roles errors unless the type synonym is nullary
checkUnless (not (null tvs)) $
ASSERT( null roles2 )
-- If we have something like:
--
-- signature H where
-- data T a
-- module H where
-- data K a b = ...
-- type T = K Int
--
-- we need to drop the first role of K when comparing!
checkRoles roles1 (drop (length args) (tyConRoles tc2'))
{-
-- Hypothetically, if we were allow to non-nullary type synonyms, here
-- is how you would check the roles
if length tvs == length roles1
then checkRoles roles1 roles2
else case tcSplitTyConApp_maybe ty of
Just (tc2', args) ->
checkRoles roles1 (drop (length args) (tyConRoles tc2') ++ roles2)
Nothing -> Just roles_msg
-}
eqAlgRhs _ AbstractTyCon _rhs2
= checkSuccess -- rhs2 is guaranteed to be injective, since it's an AlgTyCon
eqAlgRhs _ tc1@DataTyCon{} tc2@DataTyCon{} =
checkListBy eqCon (data_cons tc1) (data_cons tc2) (text "constructors")
eqAlgRhs _ tc1@NewTyCon{} tc2@NewTyCon{} =
eqCon (data_con tc1) (data_con tc2)
eqAlgRhs _ _ _ = Just (text "Cannot match a" <+> quotes (text "data") <+>
text "definition with a" <+> quotes (text "newtype") <+>
text "definition")
eqCon c1 c2
= check (name1 == name2)
(text "The names" <+> pname1 <+> text "and" <+> pname2 <+>
text "differ") `andThenCheck`
check (dataConIsInfix c1 == dataConIsInfix c2)
(text "The fixities of" <+> pname1 <+>
text "differ") `andThenCheck`
check (eqListBy eqHsBang (dataConImplBangs c1) (dataConImplBangs c2))
(text "The strictness annotations for" <+> pname1 <+>
text "differ") `andThenCheck`
check (map flSelector (dataConFieldLabels c1) == map flSelector (dataConFieldLabels c2))
(text "The record label lists for" <+> pname1 <+>
text "differ") `andThenCheck`
check (eqType (dataConUserType c1) (dataConUserType c2))
(text "The types for" <+> pname1 <+> text "differ")
where
name1 = dataConName c1
name2 = dataConName c2
pname1 = quotes (ppr name1)
pname2 = quotes (ppr name2)
eqClosedFamilyAx Nothing Nothing = True
eqClosedFamilyAx Nothing (Just _) = False
eqClosedFamilyAx (Just _) Nothing = False
eqClosedFamilyAx (Just (CoAxiom { co_ax_branches = branches1 }))
(Just (CoAxiom { co_ax_branches = branches2 }))
= numBranches branches1 == numBranches branches2
&& (and $ zipWith eqClosedFamilyBranch branch_list1 branch_list2)
where
branch_list1 = fromBranches branches1
branch_list2 = fromBranches branches2
eqClosedFamilyBranch (CoAxBranch { cab_tvs = tvs1, cab_cvs = cvs1
, cab_lhs = lhs1, cab_rhs = rhs1 })
(CoAxBranch { cab_tvs = tvs2, cab_cvs = cvs2
, cab_lhs = lhs2, cab_rhs = rhs2 })
| Just env1 <- eqVarBndrs emptyRnEnv2 tvs1 tvs2
, Just env <- eqVarBndrs env1 cvs1 cvs2
= eqListBy (eqTypeX env) lhs1 lhs2 &&
eqTypeX env rhs1 rhs2
| otherwise = False
emptyRnEnv2 :: RnEnv2
emptyRnEnv2 = mkRnEnv2 emptyInScopeSet
----------------
missingBootThing :: Bool -> Name -> String -> SDoc
missingBootThing is_boot name what
= quotes (ppr name) <+> text "is exported by the"
<+> (if is_boot then text "hs-boot" else text "hsig")
<+> text "file, but not"
<+> text what <+> text "the module"
badReexportedBootThing :: DynFlags -> Bool -> Name -> Name -> SDoc
badReexportedBootThing dflags is_boot name name'
= withPprStyle (mkUserStyle dflags alwaysQualify AllTheWay) $ vcat
[ text "The" <+> (if is_boot then text "hs-boot" else text "hsig")
<+> text "file (re)exports" <+> quotes (ppr name)
, text "but the implementing module exports a different identifier" <+> quotes (ppr name')
]
bootMisMatch :: Bool -> SDoc -> TyThing -> TyThing -> SDoc
bootMisMatch is_boot extra_info real_thing boot_thing
= pprBootMisMatch is_boot extra_info real_thing real_doc boot_doc
where
to_doc
= pprTyThingInContext $ showToHeader { ss_forall =
if is_boot
then ShowForAllMust
else ShowForAllWhen }
real_doc = to_doc real_thing
boot_doc = to_doc boot_thing
pprBootMisMatch :: Bool -> SDoc -> TyThing -> SDoc -> SDoc -> SDoc
pprBootMisMatch is_boot extra_info real_thing real_doc boot_doc
= vcat
[ ppr real_thing <+>
text "has conflicting definitions in the module",
text "and its" <+>
(if is_boot
then text "hs-boot file"
else text "hsig file"),
text "Main module:" <+> real_doc,
(if is_boot
then text "Boot file: "
else text "Hsig file: ")
<+> boot_doc,
extra_info
]
instMisMatch :: Bool -> ClsInst -> SDoc
instMisMatch is_boot inst
= hang (ppr inst)
2 (text "is defined in the" <+>
(if is_boot then text "hs-boot" else text "hsig")
<+> text "file, but not in the module itself")
{-
************************************************************************
* *
Type-checking the top level of a module (continued)
* *
************************************************************************
-}
rnTopSrcDecls :: HsGroup GhcPs -> TcM (TcGblEnv, HsGroup GhcRn)
-- Fails if there are any errors
rnTopSrcDecls group
= do { -- Rename the source decls
traceRn "rn12" empty ;
(tcg_env, rn_decls) <- checkNoErrs $ rnSrcDecls group ;
traceRn "rn13" empty ;
-- save the renamed syntax, if we want it
let { tcg_env'
| Just grp <- tcg_rn_decls tcg_env
= tcg_env{ tcg_rn_decls = Just (appendGroups grp rn_decls) }
| otherwise
= tcg_env };
-- Dump trace of renaming part
rnDump rn_decls ;
return (tcg_env', rn_decls)
}
tcTopSrcDecls :: HsGroup GhcRn -> TcM (TcGblEnv, TcLclEnv)
tcTopSrcDecls (HsGroup { hs_tyclds = tycl_decls,
hs_derivds = deriv_decls,
hs_fords = foreign_decls,
hs_defds = default_decls,
hs_annds = annotation_decls,
hs_ruleds = rule_decls,
hs_vects = vect_decls,
hs_valds = hs_val_binds@(ValBindsOut val_binds val_sigs) })
= do { -- Type-check the type and class decls, and all imported decls
-- The latter come in via tycl_decls
traceTc "Tc2 (src)" empty ;
-- Source-language instances, including derivings,
-- and import the supporting declarations
traceTc "Tc3" empty ;
(tcg_env, inst_infos, ValBindsOut deriv_binds deriv_sigs)
<- tcTyClsInstDecls tycl_decls deriv_decls val_binds ;
setGblEnv tcg_env $ do {
-- Generate Applicative/Monad proposal (AMP) warnings
traceTc "Tc3b" empty ;
-- Generate Semigroup/Monoid warnings
traceTc "Tc3c" empty ;
tcSemigroupWarnings ;
-- Foreign import declarations next.
traceTc "Tc4" empty ;
(fi_ids, fi_decls, fi_gres) <- tcForeignImports foreign_decls ;
tcExtendGlobalValEnv fi_ids $ do {
-- Default declarations
traceTc "Tc4a" empty ;
default_tys <- tcDefaults default_decls ;
updGblEnv (\gbl -> gbl { tcg_default = default_tys }) $ do {
-- Value declarations next.
-- It is important that we check the top-level value bindings
-- before the GHC-generated derived bindings, since the latter
-- may be defined in terms of the former. (For instance,
-- the bindings produced in a Data instance.)
traceTc "Tc5" empty ;
tc_envs <- tcTopBinds val_binds val_sigs;
setEnvs tc_envs $ do {
-- Now GHC-generated derived bindings, generics, and selectors
-- Do not generate warnings from compiler-generated code;
-- hence the use of discardWarnings
tc_envs@(tcg_env, tcl_env)
<- discardWarnings (tcTopBinds deriv_binds deriv_sigs) ;
setEnvs tc_envs $ do { -- Environment doesn't change now
-- Second pass over class and instance declarations,
-- now using the kind-checked decls
traceTc "Tc6" empty ;
inst_binds <- tcInstDecls2 (tyClGroupTyClDecls tycl_decls) inst_infos ;
-- Foreign exports
traceTc "Tc7" empty ;
(foe_binds, foe_decls, foe_gres) <- tcForeignExports foreign_decls ;
-- Annotations
annotations <- tcAnnotations annotation_decls ;
-- Rules
rules <- tcRules rule_decls ;
-- Vectorisation declarations
vects <- tcVectDecls vect_decls ;
-- Wrap up
traceTc "Tc7a" empty ;
let { all_binds = inst_binds `unionBags`
foe_binds
; fo_gres = fi_gres `unionBags` foe_gres
; fo_fvs = foldrBag (\gre fvs -> fvs `addOneFV` gre_name gre)
emptyFVs fo_gres
; sig_names = mkNameSet (collectHsValBinders hs_val_binds)
`minusNameSet` getTypeSigNames val_sigs
-- Extend the GblEnv with the (as yet un-zonked)
-- bindings, rules, foreign decls
; tcg_env' = tcg_env { tcg_binds = tcg_binds tcg_env `unionBags` all_binds
, tcg_sigs = tcg_sigs tcg_env `unionNameSet` sig_names
, tcg_rules = tcg_rules tcg_env
++ flattenRuleDecls rules
, tcg_vects = tcg_vects tcg_env ++ vects
, tcg_anns = tcg_anns tcg_env ++ annotations
, tcg_ann_env = extendAnnEnvList (tcg_ann_env tcg_env) annotations
, tcg_fords = tcg_fords tcg_env ++ foe_decls ++ fi_decls
, tcg_dus = tcg_dus tcg_env `plusDU` usesOnly fo_fvs } } ;
-- tcg_dus: see Note [Newtype constructor usage in foreign declarations]
-- See Note [Newtype constructor usage in foreign declarations]
addUsedGREs (bagToList fo_gres) ;
return (tcg_env', tcl_env)
}}}}}}
tcTopSrcDecls _ = panic "tcTopSrcDecls: ValBindsIn"
tcSemigroupWarnings :: TcM ()
tcSemigroupWarnings = do
traceTc "tcSemigroupWarnings" empty
let warnFlag = Opt_WarnSemigroup
tcPreludeClashWarn warnFlag sappendName
tcMissingParentClassWarn warnFlag monoidClassName semigroupClassName
-- | Warn on local definitions of names that would clash with future Prelude
-- elements.
--
-- A name clashes if the following criteria are met:
-- 1. It would is imported (unqualified) from Prelude
-- 2. It is locally defined in the current module
-- 3. It has the same literal name as the reference function
-- 4. It is not identical to the reference function
tcPreludeClashWarn :: WarningFlag
-> Name
-> TcM ()
tcPreludeClashWarn warnFlag name = do
{ warn <- woptM warnFlag
; when warn $ do
{ traceTc "tcPreludeClashWarn/wouldBeImported" empty
-- Is the name imported (unqualified) from Prelude? (Point 4 above)
; rnImports <- fmap (map unLoc . tcg_rn_imports) getGblEnv
-- (Note that this automatically handles -XNoImplicitPrelude, as Prelude
-- will not appear in rnImports automatically if it is set.)
-- Continue only the name is imported from Prelude
; when (importedViaPrelude name rnImports) $ do
-- Handle 2.-4.
{ rdrElts <- fmap (concat . occEnvElts . tcg_rdr_env) getGblEnv
; let clashes :: GlobalRdrElt -> Bool
clashes x = isLocalDef && nameClashes && isNotInProperModule
where
isLocalDef = gre_lcl x == True
-- Names are identical ...
nameClashes = nameOccName (gre_name x) == nameOccName name
-- ... but not the actual definitions, because we don't want to
-- warn about a bad definition of e.g. <> in Data.Semigroup, which
-- is the (only) proper place where this should be defined
isNotInProperModule = gre_name x /= name
-- List of all offending definitions
clashingElts :: [GlobalRdrElt]
clashingElts = filter clashes rdrElts
; traceTc "tcPreludeClashWarn/prelude_functions"
(hang (ppr name) 4 (sep [ppr clashingElts]))
; let warn_msg x = addWarnAt (Reason warnFlag) (nameSrcSpan (gre_name x)) (hsep
[ text "Local definition of"
, (quotes . ppr . nameOccName . gre_name) x
, text "clashes with a future Prelude name." ]
$$
text "This will become an error in a future release." )
; mapM_ warn_msg clashingElts
}}}
where
-- Is the given name imported via Prelude?
--
-- Possible scenarios:
-- a) Prelude is imported implicitly, issue warnings.
-- b) Prelude is imported explicitly, but without mentioning the name in
-- question. Issue no warnings.
-- c) Prelude is imported hiding the name in question. Issue no warnings.
-- d) Qualified import of Prelude, no warnings.
importedViaPrelude :: Name
-> [ImportDecl GhcRn]
-> Bool
importedViaPrelude name = any importViaPrelude
where
isPrelude :: ImportDecl GhcRn -> Bool
isPrelude imp = unLoc (ideclName imp) == pRELUDE_NAME
-- Implicit (Prelude) import?
isImplicit :: ImportDecl GhcRn -> Bool
isImplicit = ideclImplicit
-- Unqualified import?
isUnqualified :: ImportDecl GhcRn -> Bool
isUnqualified = not . ideclQualified
-- List of explicitly imported (or hidden) Names from a single import.
-- Nothing -> No explicit imports
-- Just (False, <names>) -> Explicit import list of <names>
-- Just (True , <names>) -> Explicit hiding of <names>
importListOf :: ImportDecl GhcRn -> Maybe (Bool, [Name])
importListOf = fmap toImportList . ideclHiding
where
toImportList (h, loc) = (h, map (ieName . unLoc) (unLoc loc))
isExplicit :: ImportDecl GhcRn -> Bool
isExplicit x = case importListOf x of
Nothing -> False
Just (False, explicit)
-> nameOccName name `elem` map nameOccName explicit
Just (True, hidden)
-> nameOccName name `notElem` map nameOccName hidden
-- Check whether the given name would be imported (unqualified) from
-- an import declaration.
importViaPrelude :: ImportDecl GhcRn -> Bool
importViaPrelude x = isPrelude x
&& isUnqualified x
&& (isImplicit x || isExplicit x)
-- Notation: is* is for classes the type is an instance of, should* for those
-- that it should also be an instance of based on the corresponding
-- is*.
tcMissingParentClassWarn :: WarningFlag
-> Name -- ^ Instances of this ...
-> Name -- ^ should also be instances of this
-> TcM ()
tcMissingParentClassWarn warnFlag isName shouldName
= do { warn <- woptM warnFlag
; when warn $ do
{ traceTc "tcMissingParentClassWarn" empty
; isClass' <- tcLookupClass_maybe isName
; shouldClass' <- tcLookupClass_maybe shouldName
; case (isClass', shouldClass') of
(Just isClass, Just shouldClass) -> do
{ localInstances <- tcGetInsts
; let isInstance m = is_cls m == isClass
isInsts = filter isInstance localInstances
; traceTc "tcMissingParentClassWarn/isInsts" (ppr isInsts)
; forM_ isInsts (checkShouldInst isClass shouldClass)
}
(is',should') ->
traceTc "tcMissingParentClassWarn/notIsShould"
(hang (ppr isName <> text "/" <> ppr shouldName) 2 (
(hsep [ quotes (text "Is"), text "lookup for"
, ppr isName
, text "resulted in", ppr is' ])
$$
(hsep [ quotes (text "Should"), text "lookup for"
, ppr shouldName
, text "resulted in", ppr should' ])))
}}
where
-- Check whether the desired superclass exists in a given environment.
checkShouldInst :: Class -- ^ Class of existing instance
-> Class -- ^ Class there should be an instance of
-> ClsInst -- ^ Existing instance
-> TcM ()
checkShouldInst isClass shouldClass isInst
= do { instEnv <- tcGetInstEnvs
; let (instanceMatches, shouldInsts, _)
= lookupInstEnv False instEnv shouldClass (is_tys isInst)
; traceTc "tcMissingParentClassWarn/checkShouldInst"
(hang (ppr isInst) 4
(sep [ppr instanceMatches, ppr shouldInsts]))
-- "<location>: Warning: <type> is an instance of <is> but not
-- <should>" e.g. "Foo is an instance of Monad but not Applicative"
; let instLoc = srcLocSpan . nameSrcLoc $ getName isInst
warnMsg (Just name:_) =
addWarnAt (Reason warnFlag) instLoc $
hsep [ (quotes . ppr . nameOccName) name
, text "is an instance of"
, (ppr . nameOccName . className) isClass
, text "but not"
, (ppr . nameOccName . className) shouldClass ]
<> text "."
$$
hsep [ text "This will become an error in"
, text "a future release." ]
warnMsg _ = pure ()
; when (null shouldInsts && null instanceMatches) $
warnMsg (is_tcs isInst)
}
tcLookupClass_maybe :: Name -> TcM (Maybe Class)
tcLookupClass_maybe name = tcLookupImported_maybe name >>= \case
Succeeded (ATyCon tc) | cls@(Just _) <- tyConClass_maybe tc -> pure cls
_else -> pure Nothing
---------------------------
tcTyClsInstDecls :: [TyClGroup GhcRn]
-> [LDerivDecl GhcRn]
-> [(RecFlag, LHsBinds GhcRn)]
-> TcM (TcGblEnv, -- The full inst env
[InstInfo GhcRn], -- Source-code instance decls to
-- process; contains all dfuns for
-- this module
HsValBinds GhcRn) -- Supporting bindings for derived
-- instances
tcTyClsInstDecls tycl_decls deriv_decls binds
= tcAddDataFamConPlaceholders (tycl_decls >>= group_instds) $
tcAddPatSynPlaceholders (getPatSynBinds binds) $
do { (tcg_env, inst_info, datafam_deriv_info)
<- tcTyAndClassDecls tycl_decls ;
; setGblEnv tcg_env $ do {
-- With the @TyClDecl@s and @InstDecl@s checked we're ready to
-- process the deriving clauses, including data family deriving
-- clauses discovered in @tcTyAndClassDecls@.
--
-- Careful to quit now in case there were instance errors, so that
-- the deriving errors don't pile up as well.
; failIfErrsM
; let tyclds = tycl_decls >>= group_tyclds
; (tcg_env', inst_info', val_binds)
<- tcInstDeclsDeriv datafam_deriv_info tyclds deriv_decls
; setGblEnv tcg_env' $ do {
failIfErrsM
; pure (tcg_env', inst_info' ++ inst_info, val_binds)
}}}
{- *********************************************************************
* *
Checking for 'main'
* *
************************************************************************
-}
checkMain :: Bool -- False => no 'module M(..) where' header at all
-> TcM TcGblEnv
-- If we are in module Main, check that 'main' is defined.
checkMain explicit_mod_hdr
= do { dflags <- getDynFlags
; tcg_env <- getGblEnv
; check_main dflags tcg_env explicit_mod_hdr }
check_main :: DynFlags -> TcGblEnv -> Bool -> TcM TcGblEnv
check_main dflags tcg_env explicit_mod_hdr
| mod /= main_mod
= traceTc "checkMain not" (ppr main_mod <+> ppr mod) >>
return tcg_env
| otherwise
= do { mb_main <- lookupGlobalOccRn_maybe main_fn
-- Check that 'main' is in scope
-- It might be imported from another module!
; case mb_main of {
Nothing -> do { traceTc "checkMain fail" (ppr main_mod <+> ppr main_fn)
; complain_no_main
; return tcg_env } ;
Just main_name -> do
{ traceTc "checkMain found" (ppr main_mod <+> ppr main_fn)
; let loc = srcLocSpan (getSrcLoc main_name)
; ioTyCon <- tcLookupTyCon ioTyConName
; res_ty <- newFlexiTyVarTy liftedTypeKind
; let io_ty = mkTyConApp ioTyCon [res_ty]
skol_info = SigSkol (FunSigCtxt main_name False) io_ty []
; (ev_binds, main_expr)
<- checkConstraints skol_info [] [] $
addErrCtxt mainCtxt $
tcMonoExpr (L loc (HsVar (L loc main_name)))
(mkCheckExpType io_ty)
-- See Note [Root-main Id]
-- Construct the binding
-- :Main.main :: IO res_ty = runMainIO res_ty main
; run_main_id <- tcLookupId runMainIOName
; let { root_main_name = mkExternalName rootMainKey rOOT_MAIN
(mkVarOccFS (fsLit "main"))
(getSrcSpan main_name)
; root_main_id = Id.mkExportedVanillaId root_main_name
(mkTyConApp ioTyCon [res_ty])
; co = mkWpTyApps [res_ty]
; rhs = mkHsDictLet ev_binds $
nlHsApp (mkLHsWrap co (nlHsVar run_main_id)) main_expr
; main_bind = mkVarBind root_main_id rhs }
; return (tcg_env { tcg_main = Just main_name,
tcg_binds = tcg_binds tcg_env
`snocBag` main_bind,
tcg_dus = tcg_dus tcg_env
`plusDU` usesOnly (unitFV main_name)
-- Record the use of 'main', so that we don't
-- complain about it being defined but not used
})
}}}
where
mod = tcg_mod tcg_env
main_mod = mainModIs dflags
main_fn = getMainFun dflags
interactive = ghcLink dflags == LinkInMemory
complain_no_main = checkTc (interactive && not explicit_mod_hdr) noMainMsg
-- In interactive mode, without an explicit module header, don't
-- worry about the absence of 'main'.
-- In other modes, fail altogether, so that we don't go on
-- and complain a second time when processing the export list.
mainCtxt = text "When checking the type of the" <+> pp_main_fn
noMainMsg = text "The" <+> pp_main_fn
<+> text "is not defined in module" <+> quotes (ppr main_mod)
pp_main_fn = ppMainFn main_fn
-- | Get the unqualified name of the function to use as the \"main\" for the main module.
-- Either returns the default name or the one configured on the command line with -main-is
getMainFun :: DynFlags -> RdrName
getMainFun dflags = case mainFunIs dflags of
Just fn -> mkRdrUnqual (mkVarOccFS (mkFastString fn))
Nothing -> main_RDR_Unqual
-- If we are in module Main, check that 'main' is exported.
checkMainExported :: TcGblEnv -> TcM ()
checkMainExported tcg_env
= case tcg_main tcg_env of
Nothing -> return () -- not the main module
Just main_name ->
do { dflags <- getDynFlags
; let main_mod = mainModIs dflags
; checkTc (main_name `elem` concatMap availNames (tcg_exports tcg_env)) $
text "The" <+> ppMainFn (nameRdrName main_name) <+>
text "is not exported by module" <+> quotes (ppr main_mod) }
ppMainFn :: RdrName -> SDoc
ppMainFn main_fn
| rdrNameOcc main_fn == mainOcc
= text "IO action" <+> quotes (ppr main_fn)
| otherwise
= text "main IO action" <+> quotes (ppr main_fn)
mainOcc :: OccName
mainOcc = mkVarOccFS (fsLit "main")
{-
Note [Root-main Id]
~~~~~~~~~~~~~~~~~~~
The function that the RTS invokes is always :Main.main, which we call
root_main_id. (Because GHC allows the user to have a module not
called Main as the main module, we can't rely on the main function
being called "Main.main". That's why root_main_id has a fixed module
":Main".)
This is unusual: it's a LocalId whose Name has a Module from another
module. Tiresomely, we must filter it out again in MkIface, les we
get two defns for 'main' in the interface file!
*********************************************************
* *
GHCi stuff
* *
*********************************************************
-}
runTcInteractive :: HscEnv -> TcRn a -> IO (Messages, Maybe a)
-- Initialise the tcg_inst_env with instances from all home modules.
-- This mimics the more selective call to hptInstances in tcRnImports
runTcInteractive hsc_env thing_inside
= initTcInteractive hsc_env $ withTcPlugins hsc_env $
do { traceTc "setInteractiveContext" $
vcat [ text "ic_tythings:" <+> vcat (map ppr (ic_tythings icxt))
, text "ic_insts:" <+> vcat (map (pprBndr LetBind . instanceDFunId) ic_insts)
, text "ic_rn_gbl_env (LocalDef)" <+>
vcat (map ppr [ local_gres | gres <- occEnvElts (ic_rn_gbl_env icxt)
, let local_gres = filter isLocalGRE gres
, not (null local_gres) ]) ]
; let getOrphans m mb_pkg = fmap (\iface -> mi_module iface
: dep_orphs (mi_deps iface))
(loadSrcInterface (text "runTcInteractive") m
False mb_pkg)
; orphs <- fmap concat . forM (ic_imports icxt) $ \i ->
case i of
IIModule n -> getOrphans n Nothing
IIDecl i ->
let mb_pkg = sl_fs <$> ideclPkgQual i in
getOrphans (unLoc (ideclName i)) mb_pkg
; let imports = emptyImportAvails {
imp_orphs = orphs
}
; (gbl_env, lcl_env) <- getEnvs
; let gbl_env' = gbl_env {
tcg_rdr_env = ic_rn_gbl_env icxt
, tcg_type_env = type_env
, tcg_inst_env = extendInstEnvList
(extendInstEnvList (tcg_inst_env gbl_env) ic_insts)
home_insts
, tcg_fam_inst_env = extendFamInstEnvList
(extendFamInstEnvList (tcg_fam_inst_env gbl_env)
ic_finsts)
home_fam_insts
, tcg_field_env = mkNameEnv con_fields
-- setting tcg_field_env is necessary
-- to make RecordWildCards work (test: ghci049)
, tcg_fix_env = ic_fix_env icxt
, tcg_default = ic_default icxt
-- must calculate imp_orphs of the ImportAvails
-- so that instance visibility is done correctly
, tcg_imports = imports
}
; lcl_env' <- tcExtendLocalTypeEnv lcl_env lcl_ids
; setEnvs (gbl_env', lcl_env') thing_inside }
where
(home_insts, home_fam_insts) = hptInstances hsc_env (\_ -> True)
icxt = hsc_IC hsc_env
(ic_insts, ic_finsts) = ic_instances icxt
(lcl_ids, top_ty_things) = partitionWith is_closed (ic_tythings icxt)
is_closed :: TyThing -> Either (Name, TcTyThing) TyThing
-- Put Ids with free type variables (always RuntimeUnks)
-- in the *local* type environment
-- See Note [Initialising the type environment for GHCi]
is_closed thing
| AnId id <- thing
, not (isTypeClosedLetBndr id)
= Left (idName id, ATcId { tct_id = id
, tct_info = NotLetBound })
| otherwise
= Right thing
type_env1 = mkTypeEnvWithImplicits top_ty_things
type_env = extendTypeEnvWithIds type_env1 (map instanceDFunId ic_insts)
-- Putting the dfuns in the type_env
-- is just to keep Core Lint happy
con_fields = [ (dataConName c, dataConFieldLabels c)
| ATyCon t <- top_ty_things
, c <- tyConDataCons t ]
{- Note [Initialising the type environment for GHCi]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Most of the the Ids in ic_things, defined by the user in 'let' stmts,
have closed types. E.g.
ghci> let foo x y = x && not y
However the GHCi debugger creates top-level bindings for Ids whose
types have free RuntimeUnk skolem variables, standing for unknown
types. If we don't register these free TyVars as global TyVars then
the typechecker will try to quantify over them and fall over in
zonkQuantifiedTyVar. so we must add any free TyVars to the
typechecker's global TyVar set. That is most conveniently by using
tcExtendLocalTypeEnv, which automatically extends the global TyVar
set.
We do this by splitting out the Ids with open types, using 'is_closed'
to do the partition. The top-level things go in the global TypeEnv;
the open, NotTopLevel, Ids, with free RuntimeUnk tyvars, go in the
local TypeEnv.
Note that we don't extend the local RdrEnv (tcl_rdr); all the in-scope
things are already in the interactive context's GlobalRdrEnv.
Extending the local RdrEnv isn't terrible, but it means there is an
entry for the same Name in both global and local RdrEnvs, and that
lead to duplicate "perhaps you meant..." suggestions (e.g. T5564).
We don't bother with the tcl_th_bndrs environment either.
-}
-- | The returned [Id] is the list of new Ids bound by this statement. It can
-- be used to extend the InteractiveContext via extendInteractiveContext.
--
-- The returned TypecheckedHsExpr is of type IO [ () ], a list of the bound
-- values, coerced to ().
tcRnStmt :: HscEnv -> GhciLStmt GhcPs
-> IO (Messages, Maybe ([Id], LHsExpr GhcTc, FixityEnv))
tcRnStmt hsc_env rdr_stmt
= runTcInteractive hsc_env $ do {
-- The real work is done here
((bound_ids, tc_expr), fix_env) <- tcUserStmt rdr_stmt ;
zonked_expr <- zonkTopLExpr tc_expr ;
zonked_ids <- zonkTopBndrs bound_ids ;
failIfErrsM ; -- we can't do the next step if there are levity polymorphism errors
-- test case: ghci/scripts/T13202{,a}
-- None of the Ids should be of unboxed type, because we
-- cast them all to HValues in the end!
mapM_ bad_unboxed (filter (isUnliftedType . idType) zonked_ids) ;
traceTc "tcs 1" empty ;
this_mod <- getModule ;
global_ids <- mapM (externaliseAndTidyId this_mod) zonked_ids ;
-- Note [Interactively-bound Ids in GHCi] in HscTypes
{- ---------------------------------------------
At one stage I removed any shadowed bindings from the type_env;
they are inaccessible but might, I suppose, cause a space leak if we leave them there.
However, with Template Haskell they aren't necessarily inaccessible. Consider this
GHCi session
Prelude> let f n = n * 2 :: Int
Prelude> fName <- runQ [| f |]
Prelude> $(return $ AppE fName (LitE (IntegerL 7)))
14
Prelude> let f n = n * 3 :: Int
Prelude> $(return $ AppE fName (LitE (IntegerL 7)))
In the last line we use 'fName', which resolves to the *first* 'f'
in scope. If we delete it from the type env, GHCi crashes because
it doesn't expect that.
Hence this code is commented out
-------------------------------------------------- -}
traceOptTcRn Opt_D_dump_tc
(vcat [text "Bound Ids" <+> pprWithCommas ppr global_ids,
text "Typechecked expr" <+> ppr zonked_expr]) ;
return (global_ids, zonked_expr, fix_env)
}
where
bad_unboxed id = addErr (sep [text "GHCi can't bind a variable of unlifted type:",
nest 2 (ppr id <+> dcolon <+> ppr (idType id))])
{-
--------------------------------------------------------------------------
Typechecking Stmts in GHCi
Here is the grand plan, implemented in tcUserStmt
What you type The IO [HValue] that hscStmt returns
------------- ------------------------------------
let pat = expr ==> let pat = expr in return [coerce HVal x, coerce HVal y, ...]
bindings: [x,y,...]
pat <- expr ==> expr >>= \ pat -> return [coerce HVal x, coerce HVal y, ...]
bindings: [x,y,...]
expr (of IO type) ==> expr >>= \ it -> return [coerce HVal it]
[NB: result not printed] bindings: [it]
expr (of non-IO type, ==> let it = expr in print it >> return [coerce HVal it]
result showable) bindings: [it]
expr (of non-IO type,
result not showable) ==> error
-}
-- | A plan is an attempt to lift some code into the IO monad.
type PlanResult = ([Id], LHsExpr GhcTc)
type Plan = TcM PlanResult
-- | Try the plans in order. If one fails (by raising an exn), try the next.
-- If one succeeds, take it.
runPlans :: [Plan] -> TcM PlanResult
runPlans [] = panic "runPlans"
runPlans [p] = p
runPlans (p:ps) = tryTcDiscardingErrs (runPlans ps) p
-- | Typecheck (and 'lift') a stmt entered by the user in GHCi into the
-- GHCi 'environment'.
--
-- By 'lift' and 'environment we mean that the code is changed to
-- execute properly in an IO monad. See Note [Interactively-bound Ids
-- in GHCi] in HscTypes for more details. We do this lifting by trying
-- different ways ('plans') of lifting the code into the IO monad and
-- type checking each plan until one succeeds.
tcUserStmt :: GhciLStmt GhcPs -> TcM (PlanResult, FixityEnv)
-- An expression typed at the prompt is treated very specially
tcUserStmt (L loc (BodyStmt expr _ _ _))
= do { (rn_expr, fvs) <- checkNoErrs (rnLExpr expr)
-- Don't try to typecheck if the renamer fails!
; ghciStep <- getGhciStepIO
; uniq <- newUnique
; interPrintName <- getInteractivePrintName
; let fresh_it = itName uniq loc
matches = [mkMatch (mkPrefixFunRhs (L loc fresh_it)) [] rn_expr
(noLoc emptyLocalBinds)]
-- [it = expr]
the_bind = L loc $ (mkTopFunBind FromSource (L loc fresh_it) matches) { bind_fvs = fvs }
-- Care here! In GHCi the expression might have
-- free variables, and they in turn may have free type variables
-- (if we are at a breakpoint, say). We must put those free vars
-- [let it = expr]
let_stmt = L loc $ LetStmt $ noLoc $ HsValBinds $
ValBindsOut [(NonRecursive,unitBag the_bind)] []
-- [it <- e]
bind_stmt = L loc $ BindStmt (L loc (VarPat (L loc fresh_it)))
(nlHsApp ghciStep rn_expr)
(mkRnSyntaxExpr bindIOName)
noSyntaxExpr
PlaceHolder
-- [; print it]
print_it = L loc $ BodyStmt (nlHsApp (nlHsVar interPrintName) (nlHsVar fresh_it))
(mkRnSyntaxExpr thenIOName)
noSyntaxExpr placeHolderType
-- The plans are:
-- A. [it <- e; print it] but not if it::()
-- B. [it <- e]
-- C. [let it = e; print it]
--
-- Ensure that type errors don't get deferred when type checking the
-- naked expression. Deferring type errors here is unhelpful because the
-- expression gets evaluated right away anyway. It also would potentially
-- emit two redundant type-error warnings, one from each plan.
; plan <- unsetGOptM Opt_DeferTypeErrors $
unsetGOptM Opt_DeferTypedHoles $ runPlans [
-- Plan A
do { stuff@([it_id], _) <- tcGhciStmts [bind_stmt, print_it]
; it_ty <- zonkTcType (idType it_id)
; when (isUnitTy $ it_ty) failM
; return stuff },
-- Plan B; a naked bind statement
tcGhciStmts [bind_stmt],
-- Plan C; check that the let-binding is typeable all by itself.
-- If not, fail; if so, try to print it.
-- The two-step process avoids getting two errors: one from
-- the expression itself, and one from the 'print it' part
-- This two-step story is very clunky, alas
do { _ <- checkNoErrs (tcGhciStmts [let_stmt])
--- checkNoErrs defeats the error recovery of let-bindings
; tcGhciStmts [let_stmt, print_it] } ]
; fix_env <- getFixityEnv
; return (plan, fix_env) }
tcUserStmt rdr_stmt@(L loc _)
= do { (([rn_stmt], fix_env), fvs) <- checkNoErrs $
rnStmts GhciStmtCtxt rnLExpr [rdr_stmt] $ \_ -> do
fix_env <- getFixityEnv
return (fix_env, emptyFVs)
-- Don't try to typecheck if the renamer fails!
; traceRn "tcRnStmt" (vcat [ppr rdr_stmt, ppr rn_stmt, ppr fvs])
; rnDump rn_stmt ;
; ghciStep <- getGhciStepIO
; let gi_stmt
| (L loc (BindStmt pat expr op1 op2 ty)) <- rn_stmt
= L loc $ BindStmt pat (nlHsApp ghciStep expr) op1 op2 ty
| otherwise = rn_stmt
; opt_pr_flag <- goptM Opt_PrintBindResult
; let print_result_plan
| opt_pr_flag -- The flag says "print result"
, [v] <- collectLStmtBinders gi_stmt -- One binder
= [mk_print_result_plan gi_stmt v]
| otherwise = []
-- The plans are:
-- [stmt; print v] if one binder and not v::()
-- [stmt] otherwise
; plan <- runPlans (print_result_plan ++ [tcGhciStmts [gi_stmt]])
; return (plan, fix_env) }
where
mk_print_result_plan stmt v
= do { stuff@([v_id], _) <- tcGhciStmts [stmt, print_v]
; v_ty <- zonkTcType (idType v_id)
; when (isUnitTy v_ty || not (isTauTy v_ty)) failM
; return stuff }
where
print_v = L loc $ BodyStmt (nlHsApp (nlHsVar printName) (nlHsVar v))
(mkRnSyntaxExpr thenIOName) noSyntaxExpr
placeHolderType
-- | Typecheck the statements given and then return the results of the
-- statement in the form 'IO [()]'.
tcGhciStmts :: [GhciLStmt GhcRn] -> TcM PlanResult
tcGhciStmts stmts
= do { ioTyCon <- tcLookupTyCon ioTyConName ;
ret_id <- tcLookupId returnIOName ; -- return @ IO
let {
ret_ty = mkListTy unitTy ;
io_ret_ty = mkTyConApp ioTyCon [ret_ty] ;
tc_io_stmts = tcStmtsAndThen GhciStmtCtxt tcDoStmt stmts
(mkCheckExpType io_ret_ty) ;
names = collectLStmtsBinders stmts ;
} ;
-- OK, we're ready to typecheck the stmts
traceTc "TcRnDriver.tcGhciStmts: tc stmts" empty ;
((tc_stmts, ids), lie) <- captureTopConstraints $
tc_io_stmts $ \ _ ->
mapM tcLookupId names ;
-- Look up the names right in the middle,
-- where they will all be in scope
-- Simplify the context
traceTc "TcRnDriver.tcGhciStmts: simplify ctxt" empty ;
const_binds <- checkNoErrs (simplifyInteractive lie) ;
-- checkNoErrs ensures that the plan fails if context redn fails
traceTc "TcRnDriver.tcGhciStmts: done" empty ;
let { -- mk_return builds the expression
-- returnIO @ [()] [coerce () x, .., coerce () z]
--
-- Despite the inconvenience of building the type applications etc,
-- this *has* to be done in type-annotated post-typecheck form
-- because we are going to return a list of *polymorphic* values
-- coerced to type (). If we built a *source* stmt
-- return [coerce x, ..., coerce z]
-- then the type checker would instantiate x..z, and we wouldn't
-- get their *polymorphic* values. (And we'd get ambiguity errs
-- if they were overloaded, since they aren't applied to anything.)
ret_expr = nlHsApp (nlHsTyApp ret_id [ret_ty])
(noLoc $ ExplicitList unitTy Nothing (map mk_item ids)) ;
mk_item id = let ty_args = [idType id, unitTy] in
nlHsApp (nlHsTyApp unsafeCoerceId
(map getRuntimeRep ty_args ++ ty_args))
(nlHsVar id) ;
stmts = tc_stmts ++ [noLoc (mkLastStmt ret_expr)]
} ;
return (ids, mkHsDictLet (EvBinds const_binds) $
noLoc (HsDo GhciStmtCtxt (noLoc stmts) io_ret_ty))
}
-- | Generate a typed ghciStepIO expression (ghciStep :: Ty a -> IO a)
getGhciStepIO :: TcM (LHsExpr GhcRn)
getGhciStepIO = do
ghciTy <- getGHCiMonad
a_tv <- newName (mkTyVarOccFS (fsLit "a"))
let ghciM = nlHsAppTy (nlHsTyVar ghciTy) (nlHsTyVar a_tv)
ioM = nlHsAppTy (nlHsTyVar ioTyConName) (nlHsTyVar a_tv)
step_ty = noLoc $ HsForAllTy { hst_bndrs = [noLoc $ UserTyVar (noLoc a_tv)]
, hst_body = nlHsFunTy ghciM ioM }
stepTy :: LHsSigWcType GhcRn
stepTy = mkEmptyWildCardBndrs (mkEmptyImplicitBndrs step_ty)
return (noLoc $ ExprWithTySig (nlHsVar ghciStepIoMName) stepTy)
isGHCiMonad :: HscEnv -> String -> IO (Messages, Maybe Name)
isGHCiMonad hsc_env ty
= runTcInteractive hsc_env $ do
rdrEnv <- getGlobalRdrEnv
let occIO = lookupOccEnv rdrEnv (mkOccName tcName ty)
case occIO of
Just [n] -> do
let name = gre_name n
ghciClass <- tcLookupClass ghciIoClassName
userTyCon <- tcLookupTyCon name
let userTy = mkTyConApp userTyCon []
_ <- tcLookupInstance ghciClass [userTy]
return name
Just _ -> failWithTc $ text "Ambiguous type!"
Nothing -> failWithTc $ text ("Can't find type:" ++ ty)
-- | How should we infer a type? See Note [TcRnExprMode]
data TcRnExprMode = TM_Inst -- ^ Instantiate the type fully (:type)
| TM_NoInst -- ^ Do not instantiate the type (:type +v)
| TM_Default -- ^ Default the type eagerly (:type +d)
-- | tcRnExpr just finds the type of an expression
tcRnExpr :: HscEnv
-> TcRnExprMode
-> LHsExpr GhcPs
-> IO (Messages, Maybe Type)
tcRnExpr hsc_env mode rdr_expr
= runTcInteractive hsc_env $
do {
(rn_expr, _fvs) <- rnLExpr rdr_expr ;
failIfErrsM ;
-- Now typecheck the expression, and generalise its type
-- it might have a rank-2 type (e.g. :t runST)
uniq <- newUnique ;
let { fresh_it = itName uniq (getLoc rdr_expr)
; orig = lexprCtOrigin rn_expr } ;
(tclvl, lie, res_ty)
<- pushLevelAndCaptureConstraints $
do { (_tc_expr, expr_ty) <- tcInferSigma rn_expr
; if inst
then snd <$> deeplyInstantiate orig expr_ty
else return expr_ty } ;
-- Generalise
((qtvs, dicts, _, _), lie_top) <- captureTopConstraints $
{-# SCC "simplifyInfer" #-}
simplifyInfer tclvl
infer_mode
[] {- No sig vars -}
[(fresh_it, res_ty)]
lie ;
-- Ignore the dictionary bindings
_ <- perhaps_disable_default_warnings $
simplifyInteractive lie_top ;
let { all_expr_ty = mkInvForAllTys qtvs (mkLamTypes dicts res_ty) } ;
ty <- zonkTcType all_expr_ty ;
-- We normalise type families, so that the type of an expression is the
-- same as of a bound expression (TcBinds.mkInferredPolyId). See Trac
-- #10321 for further discussion.
fam_envs <- tcGetFamInstEnvs ;
-- normaliseType returns a coercion which we discard, so the Role is
-- irrelevant
return (snd (normaliseType fam_envs Nominal ty))
}
where
-- See Note [TcRnExprMode]
(inst, infer_mode, perhaps_disable_default_warnings) = case mode of
TM_Inst -> (True, NoRestrictions, id)
TM_NoInst -> (False, NoRestrictions, id)
TM_Default -> (True, EagerDefaulting, unsetWOptM Opt_WarnTypeDefaults)
--------------------------
tcRnImportDecls :: HscEnv
-> [LImportDecl GhcPs]
-> IO (Messages, Maybe GlobalRdrEnv)
-- Find the new chunk of GlobalRdrEnv created by this list of import
-- decls. In contract tcRnImports *extends* the TcGblEnv.
tcRnImportDecls hsc_env import_decls
= runTcInteractive hsc_env $
do { gbl_env <- updGblEnv zap_rdr_env $
tcRnImports hsc_env import_decls
; return (tcg_rdr_env gbl_env) }
where
zap_rdr_env gbl_env = gbl_env { tcg_rdr_env = emptyGlobalRdrEnv }
-- tcRnType just finds the kind of a type
tcRnType :: HscEnv
-> Bool -- Normalise the returned type
-> LHsType GhcPs
-> IO (Messages, Maybe (Type, Kind))
tcRnType hsc_env normalise rdr_type
= runTcInteractive hsc_env $
setXOptM LangExt.PolyKinds $ -- See Note [Kind-generalise in tcRnType]
do { (HsWC { hswc_wcs = wcs, hswc_body = rn_type }, _fvs)
<- rnHsWcType GHCiCtx (mkHsWildCardBndrs rdr_type)
-- The type can have wild cards, but no implicit
-- generalisation; e.g. :kind (T _)
; failIfErrsM
-- Now kind-check the type
-- It can have any rank or kind
-- First bring into scope any wildcards
; traceTc "tcRnType" (vcat [ppr wcs, ppr rn_type])
; (ty, kind) <- solveEqualities $
tcWildCardBinders wcs $ \ _ ->
tcLHsTypeUnsaturated rn_type
-- Do kind generalisation; see Note [Kind-generalise in tcRnType]
; kvs <- kindGeneralize kind
; ty <- zonkTcTypeToType emptyZonkEnv ty
; ty' <- if normalise
then do { fam_envs <- tcGetFamInstEnvs
; let (_, ty')
= normaliseType fam_envs Nominal ty
; return ty' }
else return ty ;
; return (ty', mkInvForAllTys kvs (typeKind ty')) }
{- Note [TcRnExprMode]
~~~~~~~~~~~~~~~~~~~~~~
How should we infer a type when a user asks for the type of an expression e
at the GHCi prompt? We offer 3 different possibilities, described below. Each
considers this example, with -fprint-explicit-foralls enabled:
foo :: forall a f b. (Show a, Num b, Foldable f) => a -> f b -> String
:type{,-spec,-def} foo @Int
:type / TM_Inst
In this mode, we report the type that would be inferred if a variable
were assigned to expression e, without applying the monomorphism restriction.
This means we deeply instantiate the type and then regeneralize, as discussed
in #11376.
> :type foo @Int
forall {b} {f :: * -> *}. (Foldable f, Num b) => Int -> f b -> String
Note that the variables and constraints are reordered here, because this
is possible during regeneralization. Also note that the variables are
reported as Inferred instead of Specified.
:type +v / TM_NoInst
This mode is for the benefit of users using TypeApplications. It does no
instantiation whatsoever, sometimes meaning that class constraints are not
solved.
> :type +v foo @Int
forall f b. (Show Int, Num b, Foldable f) => Int -> f b -> String
Note that Show Int is still reported, because the solver never got a chance
to see it.
:type +d / TM_Default
This mode is for the benefit of users who wish to see instantiations of
generalized types, and in particular to instantiate Foldable and Traversable.
In this mode, any type variable that can be defaulted is defaulted. Because
GHCi uses -XExtendedDefaultRules, this means that Foldable and Traversable are
defaulted.
> :type +d foo @Int
Int -> [Integer] -> String
Note that this mode can sometimes lead to a type error, if a type variable is
used with a defaultable class but cannot actually be defaulted:
bar :: (Num a, Monoid a) => a -> a
> :type +d bar
** error **
The error arises because GHC tries to default a but cannot find a concrete
type in the defaulting list that is both Num and Monoid. (If this list is
modified to include an element that is both Num and Monoid, the defaulting
would succeed, of course.)
Note [Kind-generalise in tcRnType]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We switch on PolyKinds when kind-checking a user type, so that we will
kind-generalise the type, even when PolyKinds is not otherwise on.
This gives the right default behaviour at the GHCi prompt, where if
you say ":k T", and T has a polymorphic kind, you'd like to see that
polymorphism. Of course. If T isn't kind-polymorphic you won't get
anything unexpected, but the apparent *loss* of polymorphism, for
types that you know are polymorphic, is quite surprising. See Trac
#7688 for a discussion.
Note that the goal is to generalise the *kind of the type*, not
the type itself! Example:
ghci> data T m a = MkT (m a) -- T :: forall . (k -> *) -> k -> *
ghci> :k T
We instantiate T to get (T kappa). We do not want to kind-generalise
that to forall k. T k! Rather we want to take its kind
T kappa :: (kappa -> *) -> kappa -> *
and now kind-generalise that kind, to forall k. (k->*) -> k -> *
(It was Trac #10122 that made me realise how wrong the previous
approach was.) -}
{-
************************************************************************
* *
tcRnDeclsi
* *
************************************************************************
tcRnDeclsi exists to allow class, data, and other declarations in GHCi.
-}
tcRnDeclsi :: HscEnv
-> [LHsDecl GhcPs]
-> IO (Messages, Maybe TcGblEnv)
tcRnDeclsi hsc_env local_decls
= runTcInteractive hsc_env $
tcRnSrcDecls False local_decls
externaliseAndTidyId :: Module -> Id -> TcM Id
externaliseAndTidyId this_mod id
= do { name' <- externaliseName this_mod (idName id)
; return (globaliseAndTidyId (setIdName id name')) }
{-
************************************************************************
* *
More GHCi stuff, to do with browsing and getting info
* *
************************************************************************
-}
-- | ASSUMES that the module is either in the 'HomePackageTable' or is
-- a package module with an interface on disk. If neither of these is
-- true, then the result will be an error indicating the interface
-- could not be found.
getModuleInterface :: HscEnv -> Module -> IO (Messages, Maybe ModIface)
getModuleInterface hsc_env mod
= runTcInteractive hsc_env $
loadModuleInterface (text "getModuleInterface") mod
tcRnLookupRdrName :: HscEnv -> Located RdrName
-> IO (Messages, Maybe [Name])
-- ^ Find all the Names that this RdrName could mean, in GHCi
tcRnLookupRdrName hsc_env (L loc rdr_name)
= runTcInteractive hsc_env $
setSrcSpan loc $
do { -- If the identifier is a constructor (begins with an
-- upper-case letter), then we need to consider both
-- constructor and type class identifiers.
let rdr_names = dataTcOccs rdr_name
; names_s <- mapM lookupInfoOccRn rdr_names
; let names = concat names_s
; when (null names) (addErrTc (text "Not in scope:" <+> quotes (ppr rdr_name)))
; return names }
tcRnLookupName :: HscEnv -> Name -> IO (Messages, Maybe TyThing)
tcRnLookupName hsc_env name
= runTcInteractive hsc_env $
tcRnLookupName' name
-- To look up a name we have to look in the local environment (tcl_lcl)
-- as well as the global environment, which is what tcLookup does.
-- But we also want a TyThing, so we have to convert:
tcRnLookupName' :: Name -> TcRn TyThing
tcRnLookupName' name = do
tcthing <- tcLookup name
case tcthing of
AGlobal thing -> return thing
ATcId{tct_id=id} -> return (AnId id)
_ -> panic "tcRnLookupName'"
tcRnGetInfo :: HscEnv
-> Name
-> IO ( Messages
, Maybe (TyThing, Fixity, [ClsInst], [FamInst], SDoc))
-- Used to implement :info in GHCi
--
-- Look up a RdrName and return all the TyThings it might be
-- A capitalised RdrName is given to us in the DataName namespace,
-- but we want to treat it as *both* a data constructor
-- *and* as a type or class constructor;
-- hence the call to dataTcOccs, and we return up to two results
tcRnGetInfo hsc_env name
= runTcInteractive hsc_env $
do { loadUnqualIfaces hsc_env (hsc_IC hsc_env)
-- Load the interface for all unqualified types and classes
-- That way we will find all the instance declarations
-- (Packages have not orphan modules, and we assume that
-- in the home package all relevant modules are loaded.)
; thing <- tcRnLookupName' name
; fixity <- lookupFixityRn name
; (cls_insts, fam_insts) <- lookupInsts thing
; let info = lookupKnownNameInfo name
; return (thing, fixity, cls_insts, fam_insts, info) }
-- Lookup all class and family instances for a type constructor.
--
-- This function filters all instances in the type environment, so there
-- is a lot of duplicated work if it is called many times in the same
-- type environment. If this becomes a problem, the NameEnv computed
-- in GHC.getNameToInstancesIndex could be cached in TcM and both functions
-- could be changed to consult that index.
lookupInsts :: TyThing -> TcM ([ClsInst],[FamInst])
lookupInsts (ATyCon tc)
= do { InstEnvs { ie_global = pkg_ie, ie_local = home_ie, ie_visible = vis_mods } <- tcGetInstEnvs
; (pkg_fie, home_fie) <- tcGetFamInstEnvs
-- Load all instances for all classes that are
-- in the type environment (which are all the ones
-- we've seen in any interface file so far)
-- Return only the instances relevant to the given thing, i.e.
-- the instances whose head contains the thing's name.
; let cls_insts =
[ ispec -- Search all
| ispec <- instEnvElts home_ie ++ instEnvElts pkg_ie
, instIsVisible vis_mods ispec
, tc_name `elemNameSet` orphNamesOfClsInst ispec ]
; let fam_insts =
[ fispec
| fispec <- famInstEnvElts home_fie ++ famInstEnvElts pkg_fie
, tc_name `elemNameSet` orphNamesOfFamInst fispec ]
; return (cls_insts, fam_insts) }
where
tc_name = tyConName tc
lookupInsts _ = return ([],[])
loadUnqualIfaces :: HscEnv -> InteractiveContext -> TcM ()
-- Load the interface for everything that is in scope unqualified
-- This is so that we can accurately report the instances for
-- something
loadUnqualIfaces hsc_env ictxt
= initIfaceTcRn $ do
mapM_ (loadSysInterface doc) (moduleSetElts (mkModuleSet unqual_mods))
where
this_pkg = thisPackage (hsc_dflags hsc_env)
unqual_mods = [ nameModule name
| gre <- globalRdrEnvElts (ic_rn_gbl_env ictxt)
, let name = gre_name gre
, nameIsFromExternalPackage this_pkg name
, isTcOcc (nameOccName name) -- Types and classes only
, unQualOK gre ] -- In scope unqualified
doc = text "Need interface for module whose export(s) are in scope unqualified"
{-
************************************************************************
* *
Degugging output
* *
************************************************************************
-}
rnDump :: (Outputable a, Data a) => a -> TcRn ()
-- Dump, with a banner, if -ddump-rn
rnDump rn = do { traceOptTcRn Opt_D_dump_rn (mkDumpDoc "Renamer" (ppr rn)) }
tcDump :: TcGblEnv -> TcRn ()
tcDump env
= do { dflags <- getDynFlags ;
-- Dump short output if -ddump-types or -ddump-tc
when (dopt Opt_D_dump_types dflags || dopt Opt_D_dump_tc dflags)
(printForUserTcRn short_dump) ;
-- Dump bindings if -ddump-tc
traceOptTcRn Opt_D_dump_tc (mkDumpDoc "Typechecker" full_dump);
-- Dump bindings as an hsSyn AST if -ddump-tc-ast
traceOptTcRn Opt_D_dump_tc_ast (mkDumpDoc "Typechecker" ast_dump)
}
where
short_dump = pprTcGblEnv env
full_dump = pprLHsBinds (tcg_binds env)
-- NB: foreign x-d's have undefined's in their types;
-- hence can't show the tc_fords
ast_dump = showAstData NoBlankSrcSpan (tcg_binds env)
-- It's unpleasant having both pprModGuts and pprModDetails here
pprTcGblEnv :: TcGblEnv -> SDoc
pprTcGblEnv (TcGblEnv { tcg_type_env = type_env,
tcg_insts = insts,
tcg_fam_insts = fam_insts,
tcg_rules = rules,
tcg_vects = vects,
tcg_imports = imports })
= vcat [ ppr_types type_env
, ppr_tycons fam_insts type_env
, ppr_insts insts
, ppr_fam_insts fam_insts
, vcat (map ppr rules)
, vcat (map ppr vects)
, text "Dependent modules:" <+>
pprUFM (imp_dep_mods imports) (ppr . sort)
, text "Dependent packages:" <+>
ppr (S.toList $ imp_dep_pkgs imports)]
where -- The use of sort is just to reduce unnecessary
-- wobbling in testsuite output
ppr_types :: TypeEnv -> SDoc
ppr_types type_env = getPprDebug $ \dbg ->
let
ids = [id | id <- typeEnvIds type_env, want_sig id]
want_sig id | dbg
= True
| otherwise
= isExternalName (idName id) &&
(not (isDerivedOccName (getOccName id)))
-- Top-level user-defined things have External names.
-- Suppress internally-generated things unless -dppr-debug
in
text "TYPE SIGNATURES" $$ nest 2 (ppr_sigs ids)
ppr_tycons :: [FamInst] -> TypeEnv -> SDoc
ppr_tycons fam_insts type_env = getPprDebug $ \dbg ->
let
fi_tycons = famInstsRepTyCons fam_insts
tycons = [tycon | tycon <- typeEnvTyCons type_env, want_tycon tycon]
want_tycon tycon | dbg = True
| otherwise = not (isImplicitTyCon tycon) &&
isExternalName (tyConName tycon) &&
not (tycon `elem` fi_tycons)
in
vcat [ text "TYPE CONSTRUCTORS"
, nest 2 (ppr_tydecls tycons)
, text "COERCION AXIOMS"
, nest 2 (vcat (map pprCoAxiom (typeEnvCoAxioms type_env))) ]
ppr_insts :: [ClsInst] -> SDoc
ppr_insts [] = empty
ppr_insts ispecs = text "INSTANCES" $$ nest 2 (pprInstances ispecs)
ppr_fam_insts :: [FamInst] -> SDoc
ppr_fam_insts [] = empty
ppr_fam_insts fam_insts =
text "FAMILY INSTANCES" $$ nest 2 (pprFamInsts fam_insts)
ppr_sigs :: [Var] -> SDoc
ppr_sigs ids
-- Print type signatures; sort by OccName
= vcat (map ppr_sig (sortBy (comparing getOccName) ids))
where
ppr_sig id = hang (ppr id <+> dcolon) 2 (ppr (tidyTopType (idType id)))
ppr_tydecls :: [TyCon] -> SDoc
ppr_tydecls tycons
-- Print type constructor info for debug purposes
-- Sort by OccName to reduce unnecessary changes
= vcat [ ppr (tyThingToIfaceDecl (ATyCon tc))
| tc <- sortBy (comparing getOccName) tycons ]
-- The Outputable instance for IfaceDecl uses
-- showToIface, which is what we want here, whereas
-- pprTyThing uses ShowSome.
{-
********************************************************************************
Type Checker Plugins
********************************************************************************
-}
withTcPlugins :: HscEnv -> TcM a -> TcM a
withTcPlugins hsc_env m =
do plugins <- liftIO (loadTcPlugins hsc_env)
case plugins of
[] -> m -- Common fast case
_ -> do ev_binds_var <- newTcEvBinds
(solvers,stops) <- unzip `fmap` mapM (startPlugin ev_binds_var) plugins
-- This ensures that tcPluginStop is called even if a type
-- error occurs during compilation (Fix of #10078)
eitherRes <- tryM $ do
updGblEnv (\e -> e { tcg_tc_plugins = solvers }) m
mapM_ (flip runTcPluginM ev_binds_var) stops
case eitherRes of
Left _ -> failM
Right res -> return res
where
startPlugin ev_binds_var (TcPlugin start solve stop) =
do s <- runTcPluginM start ev_binds_var
return (solve s, stop s)
loadTcPlugins :: HscEnv -> IO [TcPlugin]
#if !defined(GHCI)
loadTcPlugins _ = return []
#else
loadTcPlugins hsc_env =
do named_plugins <- loadPlugins hsc_env
return $ catMaybes $ map load_plugin named_plugins
where
load_plugin (_, plug, opts) = tcPlugin plug opts
#endif
|
ezyang/ghc
|
compiler/typecheck/TcRnDriver.hs
|
bsd-3-clause
| 113,232 | 20 | 29 | 37,138 | 18,431 | 9,698 | 8,733 | -1 | -1 |
{-# LANGUAGE RecordWildCards #-}
module Data.ComponentSystem.Terrain (
updateTerrain,
newTerrainState
) where
import Prelude hiding (Left)
import Data.ComponentSystem
import Data.Wizard.Model
import Data.FiniteDouble
import qualified Data.Set as S
type Left = Int
type Top = Int
type Width = Int
type Height = Int
type Square = ((Left, Top), (Width, Height))
pointsToSquares :: S.Set (Int, Int) -> [Square]
pointsToSquares s = case expand s of
Nothing -> []
Just (sq, s') -> sq : pointsToSquares s'
expand :: S.Set (Int, Int) -> Maybe (Square, S.Set (Int, Int))
expand s = uncurry (expand') <$> (S.minView s)
expand' :: (Int, Int) -> S.Set (Int,Int) -> (Square, S.Set (Int, Int))
expand' cur s = expandSquare ((cur, (10,10))) s
expandSquare :: Square -> S.Set (Int, Int) -> (Square, S.Set(Int, Int))
expandSquare s = (uncurry expandRight) . expandDown s
expandRight :: Square -> S.Set (Int, Int) -> (Square, S.Set(Int, Int))
expandRight ((x,y) ,(w,h)) s =
let next = (x + w, y)
in if S.member next s
then expandRight ((x,y), (w + 10, h)) (S.delete next s)
else (((x,y),(w,h)), s)
expandDown :: Square -> S.Set (Int, Int) -> (Square, S.Set(Int, Int))
expandDown ((x,y) ,(w,h)) s =
let area = S.fromList [(x', y + h) | x' <- [x .. (x+w)]]
in if S.isSubsetOf area s
then expandDown ((x,y), (w , h + 10)) (S.difference s area)
else (((x,y),(w,h)), s)
addTerrainBlock :: Square -> GameState -> IO GameState
addTerrainBlock ((l,t), (w, h)) g@GameState{..} = do
i <- createId
let add = addComponent i
pure $ g { terrainSys = add () terrainSys
, positionSys = add (clampedCast (f l w),clampedCast (f t h)) positionSys
, boundSys = add (RectangleBound (clampedCast (fromIntegral w)) (clampedCast (fromIntegral h))) boundSys}
where f p s = (fromIntegral p) + (fromIntegral s / 2)
updateTerrain :: GameState -> IO GameState
updateTerrain g@GameState{..} = let Points ts dirty = terrainState
in if dirty
then
let pSys = clearSys (asMarker terrainSys) positionSys
bSys = clearSys (asMarker terrainSys) boundSys
g' = g { positionSys = pSys
, boundSys = bSys
, terrainSys = newSystem
, terrainState = Points ts False }
sqs = pointsToSquares ts
in do
putStrLn "doing dirty work"
foldr (\s m -> m >>= addTerrainBlock s) (pure g') sqs
else pure g
newTerrainState :: TerrainState
newTerrainState = Points (S.fromList [(x,y) | x <- [0 .. 1000], y <- [0 .. 500], mod x 10 == 0, mod y 10 == 0, gradient x < y ]) True
gradient :: Int -> Int
gradient x = (-200) + if x < 500
then 500 - (div x 2)
else div x 2
|
smobs/elblog
|
server/src/Data/ComponentSystem/Terrain.hs
|
bsd-3-clause
| 3,543 | 0 | 16 | 1,485 | 1,269 | 697 | 572 | 64 | 2 |
module Config.Command.Run
( Config(..)
, mkCfg
, opts
)
where
import State.Types ( State )
import Network.URI ( parseRelativeReference, URI )
import Config.GetOpt ( MkCfg, Opts, noArgs, Err, contentDirDesc )
import Config.Store ( storeOptDescr, ParseStore )
import System.Console.GetOpt
import qualified State.Mem ( new )
-- |The configurable settings for an instance of the comments server
data Config =
Config
{ cfgStore :: !(IO State) -- ^How state is stored
, cfgPort :: !Int -- ^What TCP port to listen on
, cfgLogDir :: !FilePath -- ^Where to put the log files
, cfgHostName :: !String -- ^The hostname used by Snap
, cfgContentDir :: !FilePath -- ^Where to look for the
-- documents to index and content
-- to serve. This is mapped to the
-- root of the Web server.
, cfgScanOnStart :: !Bool -- ^Whether to scan for updated
-- ids in the content directory
, cfgStaticDir :: !(Maybe FilePath) -- ^Other static content to serve
, cfgDefaultPage :: !(Maybe URI)
-- ^Where to redirect to if no URL is requested
, cfgRunAs :: !(Maybe String)
, cfgLogTo :: !(Maybe FilePath)
}
mkCfg :: MkCfg Config
mkCfg =
noArgs $
Config
{ cfgStore = State.Mem.new
, cfgPort = 3000
, cfgLogDir = "."
, cfgHostName = "localhost"
, cfgContentDir = "."
, cfgStaticDir = Nothing
, cfgScanOnStart = True
, cfgDefaultPage = Nothing
, cfgRunAs = Nothing
, cfgLogTo = Nothing
}
-- All of the defined options
opts :: [ParseStore (IO State)] -> Opts Config
opts stores =
[ storeOptDescr stores $ \st cfg -> cfg { cfgStore = st }
, Option "" ["no-scan"] (NoArg (soptScan False))
"Do not update the chapter database at startup"
, Option "" ["scan"] (NoArg (soptScan True))
"Scan for updated content at server startup (default)"
, Option "" ["static-dir"] (ReqArg soptStaticDir "DIR")
"Serve static files from this directory"
, contentDirDesc soptContentDir
, Option "p" ["port"] (ReqArg soptPort "PORT")
"Listen on this port"
, Option "" ["log-dir"] (ReqArg soptLogDir "DIR")
"Store the error and access logs in this directory"
, Option "" ["host"] (ReqArg soptHost "HOSTNAME")
"Use this as the Web server host (not for binding address)"
, Option "" ["default-page"] (ReqArg soptDefaultPage "URL")
"Where the server should redirect when the URL / is requested"
, Option "" ["run-as"] (ReqArg soptRunAs "USERNAME")
"If started as root, attempt to drop privileges by \n\
\changing to this user once the port is bound"
, Option "" ["log-to"] (ReqArg soptLogTo "LOGFILE")
"Write a binary log file as a backup in case of primary\n\
\store failure"
]
type Opt = Err (Config -> Config)
-- Turn an option string into something that updates the configuration
soptPort, soptLogDir, soptHost, soptContentDir, soptStaticDir,
soptDefaultPage, soptRunAs, soptLogTo :: String -> Opt
soptLogDir str = return $ \cfg -> cfg { cfgLogDir = str }
soptHost str = return $ \cfg -> cfg { cfgHostName = str }
soptContentDir str = return $ \cfg -> cfg { cfgContentDir = str }
soptStaticDir str = return $ \cfg -> cfg { cfgStaticDir = Just str }
soptRunAs str = return $ \cfg -> cfg { cfgRunAs = Just str }
soptLogTo str = return $ \cfg -> cfg { cfgLogTo = Just str }
soptPort str =
case reads str of
[(x, [])] -> return $ \cfg -> cfg { cfgPort = x }
_ -> fail $ "Bad port: " ++ show str
soptDefaultPage str =
case parseRelativeReference str of
Nothing -> fail $ "Not a valid URI: " ++ show str
Just u -> return $ \cfg ->
cfg { cfgDefaultPage = Just u }
soptScan :: Bool -> Opt
soptScan st = return $ \cfg -> cfg { cfgScanOnStart = st }
|
j3h/doc-review
|
src/Config/Command/Run.hs
|
bsd-3-clause
| 4,072 | 0 | 12 | 1,201 | 940 | 533 | 407 | 98 | 2 |
-- import Control.Monad (forM)
-- import Data.Either (isRight)
-- import Data.TicTacToe
-- import qualified Data.TicTacToe as T (Result (..))
-- import Test.Hspec
-- import Test.QuickCheck
-- instance Arbitrary GameState where
-- arbitrary = do
-- player <- arbitrary
-- board <- arbitrary
-- result <- arbitrary
-- oneof $ map return [GameState player board, GameOver result]
-- instance Arbitrary Player where
-- arbitrary = oneof $ map return [XX, OO]
-- instance Arbitrary T.Result where
-- arbitrary = do
-- player <- arbitrary
-- frequency [(2, return $ T.Result player), (1, return T.Draw)]
-- instance Arbitrary Board where
-- arbitrary = Board <$> mapM arbitraryRow [[1..3],[4..6],[7..9]]
-- where
-- arbitraryRow xs =
-- forM xs $ \x -> do
-- player <- arbitrary
-- oneof $ return <$> [Left x, Right player]
-- prop_move :: Int -> GameState -> Bool
-- prop_move x gs@(GameOver _) = move x gs == Left GameIsOver
-- prop_move x gs
-- | x < 1 = move x gs == Left PositionIsTooLow
-- | x > 9 = move x gs == Left PositionIsTooHigh
-- | otherwise = (move x gs == Left PositionTaken) || isRight (move x gs)
-- qc :: IO ()
-- qc = verboseCheck prop_move
-- -- doubleMark =
-- -- let board = [[Left 1, Right XX, Left 3]
-- -- ,[Right OO, Left 5, Left 6]
-- -- ,[Left 7, Right XX, Right OO]]
-- -- gs = GameState XX board
-- -- in it "should not be able to mark an already marked position" $ do
-- -- undefined
-- hs :: IO ()
-- hs = putStrLn "\nHspec tests not implemented"
main :: IO ()
main = putStrLn "test suite not implemented"
-- qc >> hs
-- hspec $ do
-- describe "move operation" $ do
-- it "should not mark an already marked position" $ do
-- xs <- forM [1..9] $ \x -> move x initialGameState
-- mapM_ (`shouldBe` Left PositionTaken) xs
|
rodamber/haskell-tic-tac-toe
|
test/Spec.hs
|
bsd-3-clause
| 1,968 | 0 | 6 | 549 | 67 | 57 | 10 | 2 | 1 |
module Data.SymReg(
module X
) where
import Data.SymReg.AST as X
import Data.SymReg.Parser as X
import Data.SymReg.Evolvable as X
|
Teaspot-Studio/genmus
|
src/Data/SymReg.hs
|
bsd-3-clause
| 136 | 0 | 4 | 24 | 36 | 26 | 10 | 5 | 0 |
module Idris.Imports where
import Idris.AbsSyntax
import Idris.Error
import Idris.Core.TT
import Paths_idris
import System.FilePath
import System.Directory
import Control.Monad.State.Strict
data IFileType = IDR FilePath | LIDR FilePath | IBC FilePath IFileType
deriving (Show, Eq)
srcPath :: FilePath -> FilePath
srcPath fp = let (n, ext) = splitExtension fp in
case ext of
".idr" -> fp
_ -> fp ++ ".idr"
lsrcPath :: FilePath -> FilePath
lsrcPath fp = let (n, ext) = splitExtension fp in
case ext of
".lidr" -> fp
_ -> fp ++ ".lidr"
-- Get name of byte compiled version of an import
ibcPath :: FilePath -> Bool -> FilePath -> FilePath
ibcPath ibcsd use_ibcsd fp = let (d_fp, n_fp) = splitFileName fp
d = if (not use_ibcsd) || ibcsd == ""
then d_fp
else d_fp </> ibcsd
n = dropExtension n_fp
in d </> n <.> "ibc"
ibcPathWithFallback :: FilePath -> FilePath -> IO FilePath
ibcPathWithFallback ibcsd fp = do let ibcp = ibcPath ibcsd True fp
ibc <- doesFileExist ibcp
return (if ibc
then ibcp
else ibcPath ibcsd False fp)
ibcPathNoFallback :: FilePath -> FilePath -> FilePath
ibcPathNoFallback ibcsd fp = ibcPath ibcsd True fp
findImport :: [FilePath] -> FilePath -> FilePath -> Idris IFileType
findImport [] ibcsd fp = ierror . Msg $ "Can't find import " ++ fp
findImport (d:ds) ibcsd fp = do let fp_full = d </> fp
ibcp <- runIO $ ibcPathWithFallback ibcsd fp_full
let idrp = srcPath fp_full
let lidrp = lsrcPath fp_full
ibc <- runIO $ doesFileExist ibcp
idr <- runIO $ doesFileExist idrp
lidr <- runIO $ doesFileExist lidrp
-- when idr $ putStrLn $ idrp ++ " ok"
-- when lidr $ putStrLn $ lidrp ++ " ok"
-- when ibc $ putStrLn $ ibcp ++ " ok"
let isrc = if lidr
then LIDR lidrp
else IDR idrp
if ibc
then return (IBC ibcp isrc)
else if (idr || lidr)
then return isrc
else findImport ds ibcsd fp
-- find a specific filename somewhere in a path
findInPath :: [FilePath] -> FilePath -> IO FilePath
findInPath [] fp = fail $ "Can't find file " ++ fp
findInPath (d:ds) fp = do let p = d </> fp
e <- doesFileExist p
if e then return p else findInPath ds fp
|
DanielWaterworth/Idris-dev
|
src/Idris/Imports.hs
|
bsd-3-clause
| 3,137 | 0 | 13 | 1,462 | 729 | 368 | 361 | 57 | 4 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module LDrive.Tests.SPI where
import Ivory.Language
import Ivory.Stdlib
import Ivory.Tower
import Ivory.Tower.HAL.Bus.Interface
import Ivory.Tower.HAL.Bus.Sched
import Ivory.BSP.STM32.ClockConfig
import Ivory.BSP.STM32.Driver.SPI
import Ivory.BSP.STM32.Driver.UART
import Ivory.BSP.STM32.Peripheral.SPI
import LDrive.Platforms
import LDrive.LED
import LDrive.DRV8301
import LDrive.Types
import LDrive.Utils
app :: (e -> ClockConfig)
-> (e -> TestSPI)
-> (e -> TestUART)
-> (e -> ColoredLEDs)
-> Tower e ()
app tocc totestspi touart toleds = do
ldriveTowerDeps
spi <- fmap totestspi getEnv
leds <- fmap toleds getEnv
uart <- fmap touart getEnv
blink (Milliseconds 1000) [redLED leds]
(buffered_ostream, _istream, mon) <- uartTower tocc (testUARTPeriph uart) (testUARTPins uart) 115200
monitor "uart" mon
-- UART buffer transmits in buffers. We want to transmit byte-by-byte and let
-- this monitor manage periodically flushing a buffer.
ostream <- uartUnbuffer (buffered_ostream :: BackpressureTransmit UARTBuffer ('Stored IBool))
let devices = [ drv8301M0
, drv8301M1
]
(sreq, sready) <- spiTower tocc devices (testSPIPins spi)
initdone_sready <- channel
monitor "drv_enable" $ do
-- this just re-emits sready on initdone_sready, we could just use sready as well..
handler sready "init" $ do
e <- emitter (fst initdone_sready) 1
callback $ \t -> do
emit e t
(drvTask0, drvReq0) <- task "drv8301_m0"
drvTower drvReq0 (snd initdone_sready) (SPIDeviceHandle 0)
(drvTask1, drvReq1) <- task "drv8301_m1"
drvTower drvReq1 (snd initdone_sready) (SPIDeviceHandle 1)
schedule ("drv")
[drvTask0, drvTask1] sready sreq
periodic <- period (Milliseconds 500)
monitor "simplecontroller" $ do
handler periodic "periodic" $ do
o <- emitter ostream 64
callback $ \_ -> do
puts o "q"
|
sorki/odrive
|
src/LDrive/Tests/SPI.hs
|
bsd-3-clause
| 2,196 | 0 | 17 | 430 | 583 | 300 | 283 | 58 | 1 |
{- |
Module : ./Common/ConvertMixfixToken.hs
Description : generic conversion of mixfix tokens
Copyright : Christian Maeder and Uni Bremen 2004
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : portable
generic conversion of mixfix tokens
-}
module Common.ConvertMixfixToken
( convertMixfixToken
, AsAppl
) where
import Common.Id
import Common.Lexer
import Common.GlobalAnnotations
import Common.Result
-- * convert a literal to a term
type AsAppl a = Id -> [a] -> Range -> a
inc :: Int -> Range -> Range
inc n (Range p) =
Range (map (`incSourceColumn` n) p)
stripIdRange :: Id -> Id
stripIdRange (Id ts cs _) =
Id (map (\ t -> t { tokPos = nullRange }) ts) (map stripIdRange cs) nullRange
makeStringTerm :: Id -> Id -> AsAppl a -> Token -> a
makeStringTerm c f asAppl tok =
makeStrTerm (inc 1 sp) str
where
sp = tokPos tok
str = init (tail (tokStr tok))
makeStrTerm p l =
if null l then asAppl c [] p
else let (hd, tl) = splitString caslChar l
in asAppl f [asAppl (Id [Token ("'" ++ hd ++ "'") p]
[] nullRange) [] p,
makeStrTerm (inc (length hd) p) tl] p
makeNumberTerm :: Id -> AsAppl a -> Token -> a
makeNumberTerm f asAppl t@(Token n p) =
case n of
[] -> error "makeNumberTerm"
[_] -> asAppl (Id [t] [] nullRange) [] p
hd : tl -> asAppl f [asAppl (Id [Token [hd] p] [] nullRange) [] p,
makeNumberTerm f asAppl (Token tl
$ inc 1 p)] p
makeFraction :: Id -> Id -> AsAppl a -> Token -> a
makeFraction f d asAppl t@(Token s p) =
let (n, r) = span (/= '.') s
dotOffset = length n
in if null r then makeNumberTerm f asAppl t
else asAppl d [makeNumberTerm f asAppl (Token n p),
makeNumberTerm f asAppl $ Token (tail r)
$ inc (dotOffset + 1) p]
$ inc dotOffset p
makeSignedNumber :: Id -> AsAppl a -> Token -> a
makeSignedNumber f asAppl t@(Token n p) =
case n of
[] -> error "makeSignedNumber"
hd : tl ->
if hd == '-' || hd == '+' then
asAppl (Id [Token [hd] p, placeTok ] [] nullRange)
[makeNumberTerm f asAppl $ Token tl
$ inc 1 p] p
else makeNumberTerm f asAppl t
makeFloatTerm :: Id -> Id -> Id -> AsAppl a -> Token -> a
makeFloatTerm f d e asAppl t@(Token s p) =
let (m, r) = span (/= 'E') s
offset = length m
in if null r then makeFraction f d asAppl t
else asAppl e [makeFraction f d asAppl (Token m p),
makeSignedNumber f asAppl $ Token (tail r)
$ inc (offset + 1) p]
$ inc offset p
-- | convert a literal token to an application term
convertMixfixToken :: LiteralAnnos -> AsAppl a
-> (Token -> a) -> Token -> ([Diagnosis], a)
convertMixfixToken ga asAppl toTerm t = let
te = toTerm t
err s = ([Diag Error ("missing %" ++ s ++ " annotation") (tokPos t)], te)
in if isString t then case string_lit ga of
Nothing -> err "string"
Just (c, f) ->
([], makeStringTerm (stripIdRange c) (stripIdRange f) asAppl t)
else if isNumber t then case number_lit ga of
Nothing -> err "number"
Just f0 -> let f = stripIdRange f0 in
if isFloating t then case float_lit ga of
Nothing -> err "floating"
Just (d, e) ->
([], makeFloatTerm f (stripIdRange d) (stripIdRange e) asAppl t)
else ([], makeNumberTerm f asAppl t)
else ([], te)
|
spechub/Hets
|
Common/ConvertMixfixToken.hs
|
gpl-2.0
| 3,749 | 0 | 21 | 1,250 | 1,375 | 700 | 675 | 79 | 7 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.EC2.AssociateAddress
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Associates an Elastic IP address with an instance or a network
-- interface.
--
-- An Elastic IP address is for use in either the EC2-Classic platform or
-- in a VPC. For more information, see
-- <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/elastic-ip-addresses-eip.html Elastic IP Addresses>
-- in the /Amazon Elastic Compute Cloud User Guide/.
--
-- [EC2-Classic, VPC in an EC2-VPC-only account] If the Elastic IP address
-- is already associated with a different instance, it is disassociated
-- from that instance and associated with the specified instance.
--
-- [VPC in an EC2-Classic account] If you don\'t specify a private IP
-- address, the Elastic IP address is associated with the primary IP
-- address. If the Elastic IP address is already associated with a
-- different instance or a network interface, you get an error unless you
-- allow reassociation.
--
-- This is an idempotent operation. If you perform the operation more than
-- once, Amazon EC2 doesn\'t return an error.
--
-- /See:/ <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-AssociateAddress.html AWS API Reference> for AssociateAddress.
module Network.AWS.EC2.AssociateAddress
(
-- * Creating a Request
associateAddress
, AssociateAddress
-- * Request Lenses
, aasInstanceId
, aasAllocationId
, aasNetworkInterfaceId
, aasAllowReassociation
, aasPrivateIPAddress
, aasPublicIP
, aasDryRun
-- * Destructuring the Response
, associateAddressResponse
, AssociateAddressResponse
-- * Response Lenses
, arsAssociationId
, arsResponseStatus
) where
import Network.AWS.EC2.Types
import Network.AWS.EC2.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'associateAddress' smart constructor.
data AssociateAddress = AssociateAddress'
{ _aasInstanceId :: !(Maybe Text)
, _aasAllocationId :: !(Maybe Text)
, _aasNetworkInterfaceId :: !(Maybe Text)
, _aasAllowReassociation :: !(Maybe Bool)
, _aasPrivateIPAddress :: !(Maybe Text)
, _aasPublicIP :: !(Maybe Text)
, _aasDryRun :: !(Maybe Bool)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'AssociateAddress' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aasInstanceId'
--
-- * 'aasAllocationId'
--
-- * 'aasNetworkInterfaceId'
--
-- * 'aasAllowReassociation'
--
-- * 'aasPrivateIPAddress'
--
-- * 'aasPublicIP'
--
-- * 'aasDryRun'
associateAddress
:: AssociateAddress
associateAddress =
AssociateAddress'
{ _aasInstanceId = Nothing
, _aasAllocationId = Nothing
, _aasNetworkInterfaceId = Nothing
, _aasAllowReassociation = Nothing
, _aasPrivateIPAddress = Nothing
, _aasPublicIP = Nothing
, _aasDryRun = Nothing
}
-- | The ID of the instance. This is required for EC2-Classic. For EC2-VPC,
-- you can specify either the instance ID or the network interface ID, but
-- not both. The operation fails if you specify an instance ID unless
-- exactly one network interface is attached.
aasInstanceId :: Lens' AssociateAddress (Maybe Text)
aasInstanceId = lens _aasInstanceId (\ s a -> s{_aasInstanceId = a});
-- | [EC2-VPC] The allocation ID. This is required for EC2-VPC.
aasAllocationId :: Lens' AssociateAddress (Maybe Text)
aasAllocationId = lens _aasAllocationId (\ s a -> s{_aasAllocationId = a});
-- | [EC2-VPC] The ID of the network interface. If the instance has more than
-- one network interface, you must specify a network interface ID.
aasNetworkInterfaceId :: Lens' AssociateAddress (Maybe Text)
aasNetworkInterfaceId = lens _aasNetworkInterfaceId (\ s a -> s{_aasNetworkInterfaceId = a});
-- | [EC2-VPC] Allows an Elastic IP address that is already associated with
-- an instance or network interface to be re-associated with the specified
-- instance or network interface. Otherwise, the operation fails.
--
-- Default: 'false'
aasAllowReassociation :: Lens' AssociateAddress (Maybe Bool)
aasAllowReassociation = lens _aasAllowReassociation (\ s a -> s{_aasAllowReassociation = a});
-- | [EC2-VPC] The primary or secondary private IP address to associate with
-- the Elastic IP address. If no private IP address is specified, the
-- Elastic IP address is associated with the primary private IP address.
aasPrivateIPAddress :: Lens' AssociateAddress (Maybe Text)
aasPrivateIPAddress = lens _aasPrivateIPAddress (\ s a -> s{_aasPrivateIPAddress = a});
-- | The Elastic IP address. This is required for EC2-Classic.
aasPublicIP :: Lens' AssociateAddress (Maybe Text)
aasPublicIP = lens _aasPublicIP (\ s a -> s{_aasPublicIP = a});
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have
-- the required permissions, the error response is 'DryRunOperation'.
-- Otherwise, it is 'UnauthorizedOperation'.
aasDryRun :: Lens' AssociateAddress (Maybe Bool)
aasDryRun = lens _aasDryRun (\ s a -> s{_aasDryRun = a});
instance AWSRequest AssociateAddress where
type Rs AssociateAddress = AssociateAddressResponse
request = postQuery eC2
response
= receiveXML
(\ s h x ->
AssociateAddressResponse' <$>
(x .@? "associationId") <*> (pure (fromEnum s)))
instance ToHeaders AssociateAddress where
toHeaders = const mempty
instance ToPath AssociateAddress where
toPath = const "/"
instance ToQuery AssociateAddress where
toQuery AssociateAddress'{..}
= mconcat
["Action" =: ("AssociateAddress" :: ByteString),
"Version" =: ("2015-04-15" :: ByteString),
"InstanceId" =: _aasInstanceId,
"AllocationId" =: _aasAllocationId,
"NetworkInterfaceId" =: _aasNetworkInterfaceId,
"AllowReassociation" =: _aasAllowReassociation,
"PrivateIpAddress" =: _aasPrivateIPAddress,
"PublicIp" =: _aasPublicIP, "DryRun" =: _aasDryRun]
-- | /See:/ 'associateAddressResponse' smart constructor.
data AssociateAddressResponse = AssociateAddressResponse'
{ _arsAssociationId :: !(Maybe Text)
, _arsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'AssociateAddressResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'arsAssociationId'
--
-- * 'arsResponseStatus'
associateAddressResponse
:: Int -- ^ 'arsResponseStatus'
-> AssociateAddressResponse
associateAddressResponse pResponseStatus_ =
AssociateAddressResponse'
{ _arsAssociationId = Nothing
, _arsResponseStatus = pResponseStatus_
}
-- | [EC2-VPC] The ID that represents the association of the Elastic IP
-- address with an instance.
arsAssociationId :: Lens' AssociateAddressResponse (Maybe Text)
arsAssociationId = lens _arsAssociationId (\ s a -> s{_arsAssociationId = a});
-- | The response status code.
arsResponseStatus :: Lens' AssociateAddressResponse Int
arsResponseStatus = lens _arsResponseStatus (\ s a -> s{_arsResponseStatus = a});
|
fmapfmapfmap/amazonka
|
amazonka-ec2/gen/Network/AWS/EC2/AssociateAddress.hs
|
mpl-2.0
| 8,033 | 0 | 13 | 1,568 | 1,046 | 632 | 414 | 118 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.