code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CloudTrail.StartLogging
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Starts the recording of AWS API calls and log file delivery for a trail.
--
-- <http://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_StartLogging.html>
module Network.AWS.CloudTrail.StartLogging
(
-- * Request
StartLogging
-- ** Request constructor
, startLogging
-- ** Request lenses
, sl1Name
-- * Response
, StartLoggingResponse
-- ** Response constructor
, startLoggingResponse
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.CloudTrail.Types
import qualified GHC.Exts
newtype StartLogging = StartLogging
{ _sl1Name :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'StartLogging' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'sl1Name' @::@ 'Text'
--
startLogging :: Text -- ^ 'sl1Name'
-> StartLogging
startLogging p1 = StartLogging
{ _sl1Name = p1
}
-- | The name of the trail for which CloudTrail logs AWS API calls.
sl1Name :: Lens' StartLogging Text
sl1Name = lens _sl1Name (\s a -> s { _sl1Name = a })
data StartLoggingResponse = StartLoggingResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'StartLoggingResponse' constructor.
startLoggingResponse :: StartLoggingResponse
startLoggingResponse = StartLoggingResponse
instance ToPath StartLogging where
toPath = const "/"
instance ToQuery StartLogging where
toQuery = const mempty
instance ToHeaders StartLogging
instance ToJSON StartLogging where
toJSON StartLogging{..} = object
[ "Name" .= _sl1Name
]
instance AWSRequest StartLogging where
type Sv StartLogging = CloudTrail
type Rs StartLogging = StartLoggingResponse
request = post "StartLogging"
response = nullResponse StartLoggingResponse
| romanb/amazonka | amazonka-cloudtrail/gen/Network/AWS/CloudTrail/StartLogging.hs | mpl-2.0 | 2,876 | 0 | 9 | 649 | 356 | 217 | 139 | 48 | 1 |
{-# LANGUAGE NoImplicitPrelude, MagicHash, TypeOperators,
DataKinds, TypeFamilies, FlexibleContexts, MultiParamTypeClasses #-}
-----------------------------------------------------------------------------
-- |
-- Module : Java.Net
-- Copyright : (c) Jyothsna Srinivas 2017
--
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Bindings for Java Net utilities
--
-----------------------------------------------------------------------------
module Java.Net where
import GHC.Base
import GHC.Int
import Java.Array
import Java.Collections
import Java.IO
import Java.Primitive
-- Start java.net.URL
data {-# CLASS "java.net.URL" #-} URL = URL (Object# URL)
deriving Class
foreign import java unsafe getAuthority :: Java URL String
foreign import java unsafe getContent :: Java URL Object
-- foreign import java unsafe "getContent" getContent2 :: JClassArray -> Java URL Object
foreign import java unsafe getDefaultPort :: Java URL Int
foreign import java unsafe getFile :: Java URL String
foreign import java unsafe getHost :: Java URL String
foreign import java unsafe getPath :: Java URL String
foreign import java unsafe getPort :: Java URL Int
foreign import java unsafe getProtocol :: Java URL String
foreign import java unsafe getQuery :: Java URL String
foreign import java unsafe getRef :: Java URL String
foreign import java unsafe getUserInfo :: Java URL String
-- foreign import java unsafe getConnection :: Java URL URLConnection
-- foreign import java unsafe "getConnection" getConnection2 :: Proxy -> Java URL URLConnection
foreign import java unsafe openStream :: Java URL InputStream
foreign import java unsafe sameFile :: URL -> Java URL Bool
foreign import java unsafe set :: String -> String -> Int -> String -> String -> Java URL ()
foreign import java unsafe "set"
set2 :: String -> String -> Int -> String -> String -> String -> String -> String -> Java URL ()
foreign import java unsafe toExternalForm :: Java URL String
foreign import java unsafe toURI :: Java URL URI
foreign import java unsafe "toURI" toURIFile :: Java File URI
foreign import java unsafe "toURL" toURLFile :: Java File URL
-- TODO
-- End java.net.URL
-- Start java.net.URI
data {-# CLASS "java.net.URI" #-} URI = URI (Object# URI)
deriving Class
-- End java.net.URI
-- Start java.net.InetAddress
data {-# CLASS "java.net.InetAddress" #-} InetAddress = InetAddress (Object# InetAddress)
deriving Class
foreign import java unsafe getAddress :: (b <: InetAddress) => Java b JByteArray
foreign import java unsafe getCanonicalHostName :: (b <: InetAddress) => Java b String
foreign import java unsafe getHostAddress :: (b <: InetAddress) => Java b String
foreign import java unsafe getHostName :: (b <: InetAddress) => Java b String
foreign import java unsafe isAnyLocalAddress :: (b <: InetAddress) => Java b Bool
foreign import java unsafe isLinkLocalAddress :: (b <: InetAddress) => Java b Bool
foreign import java unsafe isLoopbackAddress :: (b <: InetAddress) => Java b Bool
foreign import java unsafe isMCGlobal :: (b <: InetAddress) => Java b Bool
foreign import java unsafe isMCLinkLocal :: (b <: InetAddress) => Java b Bool
foreign import java unsafe isMCNodeLocal :: (b <: InetAddress) => Java b Bool
foreign import java unsafe isMCOrgLocal :: (b <: InetAddress) => Java b Bool
foreign import java unsafe isMCSiteLocal :: (b <: InetAddress) => Java b Bool
foreign import java unsafe isMulticastAddress :: (b <: InetAddress) => Java b Bool
foreign import java unsafe isReachable :: (b <: InetAddress) => Int -> Java b Bool
foreign import java unsafe "isReachable"
isReachableNI :: (b <: InetAddress) => NetworkInterface -> Int -> Int -> Java b Bool
foreign import java unsafe isSiteLocalAddress :: (b <: InetAddress) => Java b Bool
-- End java.net.InetAddress
-- Start java.net.NetworkInterface
data {-# CLASS "java.net.NetworkInterface" #-} NetworkInterface = NetworkInterface (Object# NetworkInterface)
deriving Class
foreign import java unsafe getDisplayName :: Java NetworkInterface String
foreign import java unsafe getHardwareAddress :: Java NetworkInterface JByteArray
foreign import java unsafe getIndex :: Java NetworkInterface Int
foreign import java unsafe getInetAddresses :: Java NetworkInterface (Enumeration InetAddress)
foreign import java unsafe getInterfaceAddresses :: Java NetworkInterface (List InterfaceAddress)
foreign import java unsafe getMTU :: Java NetworkInterface Int
foreign import java unsafe getName :: Java NetworkInterface String
foreign import java unsafe getParent :: Java NetworkInterface NetworkInterface
foreign import java unsafe getSubInterfaces :: Java NetworkInterface (Enumeration NetworkInterface)
foreign import java unsafe isLoopback :: Java NetworkInterface Bool
foreign import java unsafe isPointToPoint :: Java NetworkInterface Bool
foreign import java unsafe isUp :: Java NetworkInterface Bool
foreign import java unsafe isVirtual :: Java NetworkInterface Bool
foreign import java unsafe supportsMulticast :: Java NetworkInterface Bool
-- End java.net.NetworkInterface
-- Start java.net.InterfaceAddress
data {-# CLASS "java.net.InterfaceAddress" #-} InterfaceAddress = InterfaceAddress (Object# InterfaceAddress)
deriving Class
foreign import java unsafe "getAddress" getAddressIA :: Java InterfaceAddress Bool
foreign import java unsafe getBroadcast :: Java InterfaceAddress Bool
foreign import java unsafe getNetworkPrefixLength :: Java InterfaceAddress Short
-- End java.net.InterfaceAddress
| pparkkin/eta | libraries/base/Java/Net.hs | bsd-3-clause | 5,665 | 63 | 10 | 882 | 1,366 | 721 | 645 | -1 | -1 |
{-# LANGUAGE GADTs, DataKinds, KindSignatures, TypeFamilies, TypeOperators,
UndecidableInstances, ScopedTypeVariables, FlexibleContexts #-}
module Semantic where
import Type
import Data.Proxy
import GHC.TypeLits
import qualified Reactive.Sodium.Denotational as D
import Reactive.Sodium.Denotational (S, C)
data List a = Nil | Cons a (List a)
type family Index (e :: List *) (i :: Nat) :: * where
Index (Cons a f) 0 = a
Index (Cons a f) i = Index f (i-1)
type family StreamType s :: * where
StreamType (Stream e a) = a
data Semantic (e :: List *) where
LetStream :: KnownType a => Stream e a -> Semantic (Cons (Stream e a) e) -> Semantic e
AssertEquals :: (KnownNat ref, KnownType (StreamType (Index e ref))) =>
Proxy (ref :: Nat) -> S (StreamType (Index e ref)) -> Semantic e -> Semantic e
End :: Semantic e
data Ref (e :: List *) a where
Ref :: Proxy (ref :: Nat) -> Ref e (Index e ref)
Lit :: a -> Ref e a
data Stream (e :: List *) a where
StreamRef :: KnownNat ref => Proxy (ref :: Nat) -> Stream e (StreamType (Index e ref))
MkStream :: KnownType a => S a -> Stream e a
Split :: KnownType a => Stream e [a] -> Stream e a
Defer :: KnownType a => Stream e a -> Stream e a
OrElse :: KnownType a => Stream e a -> Stream e a -> Stream e a
instance KnownType a => KnownType (Stream e a) where
typeOf _ = StreamT (typeOf (undefined :: a))
literalOf _ = Nothing
prev1 :: Proxy 0
prev1 = Proxy
prev2 :: Proxy 1
prev2 = Proxy
prev3 :: Proxy 2
prev3 = Proxy
data SemanticTest = SemanticTest String String (Semantic Nil)
| kevintvh/sodium | common-tests/Semantic.hs | bsd-3-clause | 1,625 | 0 | 12 | 400 | 654 | 346 | 308 | 38 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Syntax.Strokes.Haskell
-- License : GPL-2
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Produces 'Stroke's from a tree of tokens, used by some of the
-- Haskell modes.
module Yi.Syntax.Strokes.Haskell (getStrokes, tokenToAnnot) where
import Prelude hiding (any, error, exp)
import Data.Foldable (Foldable (foldMap), any)
import Data.Monoid (Endo (..), Monoid (mappend), (<>))
import Data.Traversable (Traversable (sequenceA))
import Yi.Debug (error, trace)
import Yi.Lexer.Alex (Posn (posnOfs), Stroke, Tok (tokPosn, tokT), tokToSpan)
import Yi.Lexer.Haskell
import Yi.String (showT)
import Yi.Style
import Yi.Syntax (Point, Span)
import Yi.Syntax.Haskell
import Yi.Syntax.Tree (subtrees)
-- TODO: (optimization) make sure we take in account the begin, so we
-- don't return useless strokes
getStrokes :: Point -> Point -> Point -> Tree TT -> [Stroke]
getStrokes point begin _end t0 = trace (showT t0) result
where result = appEndo (getStr tkDConst point begin _end t0) []
-- | Get strokes Module for module
getStrokeMod :: Point -> Point -> Point -> PModuleDecl TT -> Endo [Stroke]
getStrokeMod point begin _end tm@(PModuleDecl m na e w)
= pKW tm m <> getStr tkImport point begin _end na
<> getStrokes' e <> getStrokes' w
where getStrokes' = getStr tkDConst point begin _end
pKW b word | isErrN b = paintAtom errorStyle word
| otherwise = getStrokes' word
-- | Get strokes for Imports
getStrokeImp :: Point -> Point -> Point -> PImport TT -> Endo [Stroke]
getStrokeImp point begin _end imp@(PImport m qu na t t')
= pKW imp m <> paintQu qu
<> getStr tkImport point begin _end na <> paintAs t <> paintHi t'
where getStrokes' = getStr tkDConst point begin _end
paintAs (Opt (Just (Bin (PAtom n c) tw)))
= one ((fmap (const keywordStyle) . tokToSpan) n) <> com c
<> getStr tkImport point begin _end tw
paintAs a = getStrokes' a
paintQu (Opt (Just (PAtom n c))) = one ((fmap (const keywordStyle) . tokToSpan) n) <> com c
paintQu a = getStrokes' a
paintHi (TC (Bin (Bin (PAtom n c) tw) r)) = one ((fmap (const keywordStyle) . tokToSpan) n)
<> com c <> getStr tkImport point begin _end tw
<> getStrokes' r
paintHi a = getStrokes' a
pKW b word | isErrN b = paintAtom errorStyle word
| otherwise = getStrokes' word
-- | Get strokes for expressions and declarations
getStr :: (TT -> Endo [Stroke]) -> Point -> Point -> Point -> Exp TT
-> Endo [Stroke]
getStr tk point begin _end = getStrokes'
where getStrokes' :: Exp TT -> Endo [Stroke]
getStrokes' t@(PImport {}) = getStrokeImp point begin _end t
getStrokes' t@(PModuleDecl {}) = getStrokeMod point begin _end t
getStrokes' (PModule c m) = com c <> foldMap getStrokes' m
getStrokes' (PAtom t c) = tk t <> com c
getStrokes' (TS col ts') = tk col <> foldMap (getStr tkTConst point begin _end) ts'
getStrokes' (Modid t c) = tkImport t <> com c
getStrokes' (Paren (PAtom l c) g (PAtom r c'))
| isErr r = errStyle l <> getStrokesL g
-- left paren wasn't matched: paint it in red.
-- note that testing this on the "Paren" node actually forces the parsing of the
-- right paren, undermining online behaviour.
| posnOfs (tokPosn l) ==
point || posnOfs (tokPosn r) == point - 1
= pStyle hintStyle l <> com c <> getStrokesL g
<> pStyle hintStyle r <> com c'
| otherwise = tk l <> com c <> getStrokesL g
<> tk r <> com c'
getStrokes' (PError t _ c) = errStyle t <> com c
getStrokes' da@(PData kw na exp eq)
= pKW da kw <> getStrokes' na
<> getStrokes' exp <> getStrokes' eq
getStrokes' (PIn t l) = tk t <> getStrokesL l
getStrokes' (TC l) = getStr tkTConst point begin _end l
getStrokes' (DC (PAtom l c)) = tkDConst l <> com c
getStrokes' (DC r) = getStrokes' r -- do not color operator dc
getStrokes' g@(PGuard' t e t')
= pKW g t <> getStrokes' e <> getStrokes' t'
getStrokes' cl@(PClass e e' exp)
= pKW cl e <> getStrokes' e'
<> getStrokes' exp
getStrokes' t = foldMap getStrokes' (subtrees t) -- by default deal with subtrees
getStrokesL = foldMap getStrokes'
pKW b word | isErrN b = paintAtom errorStyle word
| otherwise = getStrokes' word
-- Stroke helpers follows
tokenToAnnot :: TT -> Maybe (Span String)
tokenToAnnot = sequenceA . tokToSpan . fmap tokenToText
ts :: TT -> Stroke
ts = tokenToStroke
pStyle :: StyleName -> TT -> Endo [Stroke]
pStyle style = one . modStroke style . ts
one :: Stroke -> Endo [Stroke]
one x = Endo (x :)
paintAtom :: StyleName -> Exp TT -> Endo [Stroke]
paintAtom col (PAtom a c) = pStyle col a <> com c
paintAtom _ _ = error "wrong usage of paintAtom"
isErr :: TT -> Bool
isErr = isErrorTok . tokT
isErrN :: (Foldable v) => v TT -> Bool
isErrN = any isErr
--
-- || not $ null $ isError' t
errStyle :: TT -> Endo [Stroke]
errStyle = pStyle errorStyle
tokenToStroke :: TT -> Stroke
tokenToStroke = fmap tokenToStyle . tokToSpan
modStroke :: StyleName -> Stroke -> Stroke
modStroke f = fmap (f `mappend`)
com :: [TT] -> Endo [Stroke]
com = foldMap tkDConst
tk' :: (TT -> Bool) -> (TT -> Endo [Stroke]) -> TT -> Endo [Stroke]
tk' f s t | isErr t = errStyle t
| tokT t `elem` fmap Reserved [As, Qualified, Hiding]
= one $ (fmap (const variableStyle) . tokToSpan) t
| f t = s t
| otherwise = one (ts t)
tkTConst :: TT -> Endo [Stroke]
tkTConst = tk' (const False) (const (Endo id))
tkDConst :: TT -> Endo [Stroke]
tkDConst = tk' ((== ConsIdent) . tokT) (pStyle dataConstructorStyle)
tkImport :: TT -> Endo [Stroke]
tkImport = tk' ((== ConsIdent) . tokT) (pStyle importStyle)
| TOSPIO/yi | src/library/Yi/Syntax/Strokes/Haskell.hs | gpl-2.0 | 6,545 | 0 | 17 | 1,989 | 2,210 | 1,116 | 1,094 | 113 | 16 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
-- | A MySQL backend for @persistent@.
module Database.Persist.MySQL
( withMySQLPool
, withMySQLConn
, createMySQLPool
, module Database.Persist.Sql
, MySQL.ConnectInfo(..)
, MySQLBase.SSLInfo(..)
, MySQL.defaultConnectInfo
, MySQLBase.defaultSSLInfo
, MySQLConf(..)
, mockMigration
) where
import Control.Arrow
import Control.Monad.Logger (MonadLogger, runNoLoggingT)
import Control.Monad.IO.Class (MonadIO (..))
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Error (ErrorT(..))
import Control.Monad.Trans.Reader (runReaderT)
import Control.Monad.Trans.Writer (runWriterT)
import Data.Aeson
import Data.Aeson.Types (modifyFailure)
import Data.ByteString (ByteString)
import Data.Either (partitionEithers)
import Data.Fixed (Pico)
import Data.Function (on)
import Data.IORef
import Data.List (find, intercalate, sort, groupBy)
import Data.Text (Text, pack)
import qualified Data.Text.IO as T
import Text.Read (readMaybe)
import System.Environment (getEnvironment)
import Data.Acquire (Acquire, mkAcquire, with)
import Data.Conduit
import qualified Blaze.ByteString.Builder.Char8 as BBB
import qualified Blaze.ByteString.Builder.ByteString as BBS
import qualified Data.Conduit.List as CL
import qualified Data.Map as Map
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Database.Persist.Sql
import Data.Int (Int64)
import qualified Database.MySQL.Simple as MySQL
import qualified Database.MySQL.Simple.Param as MySQL
import qualified Database.MySQL.Simple.Result as MySQL
import qualified Database.MySQL.Simple.Types as MySQL
import qualified Database.MySQL.Base as MySQLBase
import qualified Database.MySQL.Base.Types as MySQLBase
import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Trans.Resource (runResourceT)
-- | Create a MySQL connection pool and run the given action.
-- The pool is properly released after the action finishes using
-- it. Note that you should not use the given 'ConnectionPool'
-- outside the action since it may be already been released.
withMySQLPool :: (MonadIO m, MonadLogger m, MonadBaseControl IO m) =>
MySQL.ConnectInfo
-- ^ Connection information.
-> Int
-- ^ Number of connections to be kept open in the pool.
-> (ConnectionPool -> m a)
-- ^ Action to be executed that uses the connection pool.
-> m a
withMySQLPool ci = withSqlPool $ open' ci
-- | Create a MySQL connection pool. Note that it's your
-- responsibility to properly close the connection pool when
-- unneeded. Use 'withMySQLPool' for automatic resource control.
createMySQLPool :: (MonadBaseControl IO m, MonadIO m, MonadLogger m) =>
MySQL.ConnectInfo
-- ^ Connection information.
-> Int
-- ^ Number of connections to be kept open in the pool.
-> m ConnectionPool
createMySQLPool ci = createSqlPool $ open' ci
-- | Same as 'withMySQLPool', but instead of opening a pool
-- of connections, only one connection is opened.
withMySQLConn :: (MonadBaseControl IO m, MonadIO m, MonadLogger m) =>
MySQL.ConnectInfo
-- ^ Connection information.
-> (SqlBackend -> m a)
-- ^ Action to be executed that uses the connection.
-> m a
withMySQLConn = withSqlConn . open'
-- | Internal function that opens a connection to the MySQL
-- server.
open' :: MySQL.ConnectInfo -> LogFunc -> IO SqlBackend
open' ci logFunc = do
conn <- MySQL.connect ci
MySQLBase.autocommit conn False -- disable autocommit!
smap <- newIORef $ Map.empty
return SqlBackend
{ connPrepare = prepare' conn
, connStmtMap = smap
, connInsertSql = insertSql'
, connInsertManySql = Nothing
, connClose = MySQL.close conn
, connMigrateSql = migrate' ci
, connBegin = const $ MySQL.execute_ conn "start transaction" >> return ()
, connCommit = const $ MySQL.commit conn
, connRollback = const $ MySQL.rollback conn
, connEscapeName = pack . escapeDBName
, connNoLimit = "LIMIT 18446744073709551615"
-- This noLimit is suggested by MySQL's own docs, see
-- <http://dev.mysql.com/doc/refman/5.5/en/select.html>
, connRDBMS = "mysql"
, connLimitOffset = decorateSQLWithLimitOffset "LIMIT 18446744073709551615"
, connLogFunc = logFunc
}
-- | Prepare a query. We don't support prepared statements, but
-- we'll do some client-side preprocessing here.
prepare' :: MySQL.Connection -> Text -> IO Statement
prepare' conn sql = do
let query = MySQL.Query (T.encodeUtf8 sql)
return Statement
{ stmtFinalize = return ()
, stmtReset = return ()
, stmtExecute = execute' conn query
, stmtQuery = withStmt' conn query
}
-- | SQL code to be executed when inserting an entity.
insertSql' :: EntityDef -> [PersistValue] -> InsertSqlResult
insertSql' ent vals =
let sql = pack $ concat
[ "INSERT INTO "
, escapeDBName $ entityDB ent
, "("
, intercalate "," $ map (escapeDBName . fieldDB) $ entityFields ent
, ") VALUES("
, intercalate "," (map (const "?") $ entityFields ent)
, ")"
]
in case entityPrimary ent of
Just _ -> ISRManyKeys sql vals
Nothing -> ISRInsertGet sql "SELECT LAST_INSERT_ID()"
-- | Execute an statement that doesn't return any results.
execute' :: MySQL.Connection -> MySQL.Query -> [PersistValue] -> IO Int64
execute' conn query vals = MySQL.execute conn query (map P vals)
-- | Execute an statement that does return results. The results
-- are fetched all at once and stored into memory.
withStmt' :: MonadIO m
=> MySQL.Connection
-> MySQL.Query
-> [PersistValue]
-> Acquire (Source m [PersistValue])
withStmt' conn query vals = do
result <- mkAcquire createResult MySQLBase.freeResult
return $ fetchRows result >>= CL.sourceList
where
createResult = do
-- Execute the query
formatted <- MySQL.formatQuery conn query (map P vals)
MySQLBase.query conn formatted
MySQLBase.storeResult conn
fetchRows result = liftIO $ do
-- Find out the type of the columns
fields <- MySQLBase.fetchFields result
let getters = [ maybe PersistNull (getGetter f f . Just) | f <- fields]
convert = use getters
where use (g:gs) (col:cols) =
let v = g col
vs = use gs cols
in v `seq` vs `seq` (v:vs)
use _ _ = []
-- Ready to go!
let go acc = do
row <- MySQLBase.fetchRow result
case row of
[] -> return (acc [])
_ -> let converted = convert row
in converted `seq` go (acc . (converted:))
go id
-- | @newtype@ around 'PersistValue' that supports the
-- 'MySQL.Param' type class.
newtype P = P PersistValue
instance MySQL.Param P where
render (P (PersistText t)) = MySQL.render t
render (P (PersistByteString bs)) = MySQL.render bs
render (P (PersistInt64 i)) = MySQL.render i
render (P (PersistDouble d)) = MySQL.render d
render (P (PersistBool b)) = MySQL.render b
render (P (PersistDay d)) = MySQL.render d
render (P (PersistTimeOfDay t)) = MySQL.render t
render (P (PersistUTCTime t)) = MySQL.render t
render (P PersistNull) = MySQL.render MySQL.Null
render (P (PersistList l)) = MySQL.render $ listToJSON l
render (P (PersistMap m)) = MySQL.render $ mapToJSON m
render (P (PersistRational r)) =
MySQL.Plain $ BBB.fromString $ show (fromRational r :: Pico)
-- FIXME: Too Ambigous, can not select precision without information about field
render (P (PersistDbSpecific s)) = MySQL.Plain $ BBS.fromByteString s
render (P (PersistObjectId _)) =
error "Refusing to serialize a PersistObjectId to a MySQL value"
-- | @Getter a@ is a function that converts an incoming value
-- into a data type @a@.
type Getter a = MySQLBase.Field -> Maybe ByteString -> a
-- | Helper to construct 'Getter'@s@ using 'MySQL.Result'.
convertPV :: MySQL.Result a => (a -> b) -> Getter b
convertPV f = (f .) . MySQL.convert
-- | Get the corresponding @'Getter' 'PersistValue'@ depending on
-- the type of the column.
getGetter :: MySQLBase.Field -> Getter PersistValue
getGetter field = go (MySQLBase.fieldType field) (MySQLBase.fieldCharSet field)
where
-- Bool
go MySQLBase.Tiny _ = convertPV PersistBool
-- Int64
go MySQLBase.Int24 _ = convertPV PersistInt64
go MySQLBase.Short _ = convertPV PersistInt64
go MySQLBase.Long _ = convertPV PersistInt64
go MySQLBase.LongLong _ = convertPV PersistInt64
-- Double
go MySQLBase.Float _ = convertPV PersistDouble
go MySQLBase.Double _ = convertPV PersistDouble
go MySQLBase.Decimal _ = convertPV PersistDouble
go MySQLBase.NewDecimal _ = convertPV PersistDouble
-- ByteString and Text
-- The MySQL C client (and by extension the Haskell mysql package) doesn't distinguish between binary and non-binary string data at the type level.
-- (e.g. both BLOB and TEXT have the MySQLBase.Blob type).
-- Instead, the character set distinguishes them. Binary data uses character set number 63.
-- See https://dev.mysql.com/doc/refman/5.6/en/c-api-data-structures.html (Search for "63")
go MySQLBase.VarChar 63 = convertPV PersistByteString
go MySQLBase.VarString 63 = convertPV PersistByteString
go MySQLBase.String 63 = convertPV PersistByteString
go MySQLBase.VarChar _ = convertPV PersistText
go MySQLBase.VarString _ = convertPV PersistText
go MySQLBase.String _ = convertPV PersistText
go MySQLBase.Blob 63 = convertPV PersistByteString
go MySQLBase.TinyBlob 63 = convertPV PersistByteString
go MySQLBase.MediumBlob 63 = convertPV PersistByteString
go MySQLBase.LongBlob 63 = convertPV PersistByteString
go MySQLBase.Blob _ = convertPV PersistText
go MySQLBase.TinyBlob _ = convertPV PersistText
go MySQLBase.MediumBlob _ = convertPV PersistText
go MySQLBase.LongBlob _ = convertPV PersistText
-- Time-related
go MySQLBase.Time _ = convertPV PersistTimeOfDay
go MySQLBase.DateTime _ = convertPV PersistUTCTime
go MySQLBase.Timestamp _ = convertPV PersistUTCTime
go MySQLBase.Date _ = convertPV PersistDay
go MySQLBase.NewDate _ = convertPV PersistDay
go MySQLBase.Year _ = convertPV PersistDay
-- Null
go MySQLBase.Null _ = \_ _ -> PersistNull
-- Controversial conversions
go MySQLBase.Set _ = convertPV PersistText
go MySQLBase.Enum _ = convertPV PersistText
-- Conversion using PersistDbSpecific
go MySQLBase.Geometry _ = \_ m ->
case m of
Just g -> PersistDbSpecific g
Nothing -> error "Unexpected null in database specific value"
-- Unsupported
go other _ = error $ "MySQL.getGetter: type " ++
show other ++ " not supported."
----------------------------------------------------------------------
-- | Create the migration plan for the given 'PersistEntity'
-- @val@.
migrate' :: MySQL.ConnectInfo
-> [EntityDef]
-> (Text -> IO Statement)
-> EntityDef
-> IO (Either [Text] [(Bool, Text)])
migrate' connectInfo allDefs getter val = do
let name = entityDB val
(idClmn, old) <- getColumns connectInfo getter val
let (newcols, udefs, fdefs) = mkColumns allDefs val
let udspair = map udToPair udefs
case (idClmn, old, partitionEithers old) of
-- Nothing found, create everything
([], [], _) -> do
let uniques = flip concatMap udspair $ \(uname, ucols) ->
[ AlterTable name $
AddUniqueConstraint uname $
map (findTypeAndMaxLen name) ucols ]
let foreigns = do
Column { cName=cname, cReference=Just (refTblName, _a) } <- newcols
return $ AlterColumn name (refTblName, addReference allDefs (refName name cname) refTblName cname)
let foreignsAlt = map (\fdef -> let (childfields, parentfields) = unzip (map (\((_,b),(_,d)) -> (b,d)) (foreignFields fdef))
in AlterColumn name (foreignRefTableDBName fdef, AddReference (foreignRefTableDBName fdef) (foreignConstraintNameDBName fdef) childfields parentfields)) fdefs
return $ Right $ map showAlterDb $ (addTable newcols val): uniques ++ foreigns ++ foreignsAlt
-- No errors and something found, migrate
(_, _, ([], old')) -> do
let excludeForeignKeys (xs,ys) = (map (\c -> case cReference c of
Just (_,fk) -> case find (\f -> fk == foreignConstraintNameDBName f) fdefs of
Just _ -> c { cReference = Nothing }
Nothing -> c
Nothing -> c) xs,ys)
(acs, ats) = getAlters allDefs name (newcols, udspair) $ excludeForeignKeys $ partitionEithers old'
acs' = map (AlterColumn name) acs
ats' = map (AlterTable name) ats
return $ Right $ map showAlterDb $ acs' ++ ats'
-- Errors
(_, _, (errs, _)) -> return $ Left errs
where
findTypeAndMaxLen tblName col = let (col', ty) = findTypeOfColumn allDefs tblName col
(_, ml) = findMaxLenOfColumn allDefs tblName col
in (col', ty, ml)
addTable :: [Column] -> EntityDef -> AlterDB
addTable cols entity = AddTable $ concat
-- Lower case e: see Database.Persist.Sql.Migration
[ "CREATe TABLE "
, escapeDBName name
, "("
, idtxt
, if null cols then [] else ","
, intercalate "," $ map showColumn cols
, ")"
]
where
name = entityDB entity
idtxt = case entityPrimary entity of
Just pdef -> concat [" PRIMARY KEY (", intercalate "," $ map (escapeDBName . fieldDB) $ compositeFields pdef, ")"]
Nothing -> concat [escapeDBName $ fieldDB $ entityId entity, " BIGINT NOT NULL AUTO_INCREMENT PRIMARY KEY"]
-- | Find out the type of a column.
findTypeOfColumn :: [EntityDef] -> DBName -> DBName -> (DBName, FieldType)
findTypeOfColumn allDefs name col =
maybe (error $ "Could not find type of column " ++
show col ++ " on table " ++ show name ++
" (allDefs = " ++ show allDefs ++ ")")
((,) col) $ do
entDef <- find ((== name) . entityDB) allDefs
fieldDef <- find ((== col) . fieldDB) (entityFields entDef)
return (fieldType fieldDef)
-- | Find out the maxlen of a column (default to 200)
findMaxLenOfColumn :: [EntityDef] -> DBName -> DBName -> (DBName, Integer)
findMaxLenOfColumn allDefs name col =
maybe (col, 200)
((,) col) $ do
entDef <- find ((== name) . entityDB) allDefs
fieldDef <- find ((== col) . fieldDB) (entityFields entDef)
maxLenAttr <- find ((T.isPrefixOf "maxlen=") . T.toLower) (fieldAttrs fieldDef)
readMaybe . T.unpack . T.drop 7 $ maxLenAttr
-- | Helper for 'AddReference' that finds out the which primary key columns to reference.
addReference :: [EntityDef] -> DBName -> DBName -> DBName -> AlterColumn
addReference allDefs fkeyname reftable cname = AddReference reftable fkeyname [cname] referencedColumns
where
referencedColumns = maybe (error $ "Could not find ID of entity " ++ show reftable
++ " (allDefs = " ++ show allDefs ++ ")")
id $ do
entDef <- find ((== reftable) . entityDB) allDefs
return $ map fieldDB $ entityKeyFields entDef
data AlterColumn = Change Column
| Add' Column
| Drop
| Default String
| NoDefault
| Update' String
-- | See the definition of the 'showAlter' function to see how these fields are used.
| AddReference
DBName -- ^ Referenced table
DBName -- ^ Foreign key name
[DBName] -- ^ Referencing columns
[DBName] -- ^ Referenced columns
| DropReference DBName
type AlterColumn' = (DBName, AlterColumn)
data AlterTable = AddUniqueConstraint DBName [(DBName, FieldType, Integer)]
| DropUniqueConstraint DBName
data AlterDB = AddTable String
| AlterColumn DBName AlterColumn'
| AlterTable DBName AlterTable
udToPair :: UniqueDef -> (DBName, [DBName])
udToPair ud = (uniqueDBName ud, map snd $ uniqueFields ud)
----------------------------------------------------------------------
-- | Returns all of the 'Column'@s@ in the given table currently
-- in the database.
getColumns :: MySQL.ConnectInfo
-> (Text -> IO Statement)
-> EntityDef
-> IO ( [Either Text (Either Column (DBName, [DBName]))] -- ID column
, [Either Text (Either Column (DBName, [DBName]))] -- everything else
)
getColumns connectInfo getter def = do
-- Find out ID column.
stmtIdClmn <- getter "SELECT COLUMN_NAME, \
\IS_NULLABLE, \
\DATA_TYPE, \
\COLUMN_DEFAULT \
\FROM INFORMATION_SCHEMA.COLUMNS \
\WHERE TABLE_SCHEMA = ? \
\AND TABLE_NAME = ? \
\AND COLUMN_NAME = ?"
inter1 <- with (stmtQuery stmtIdClmn vals) ($$ CL.consume)
ids <- runResourceT $ CL.sourceList inter1 $$ helperClmns -- avoid nested queries
-- Find out all columns.
stmtClmns <- getter "SELECT COLUMN_NAME, \
\IS_NULLABLE, \
\COLUMN_TYPE, \
\COLUMN_DEFAULT \
\FROM INFORMATION_SCHEMA.COLUMNS \
\WHERE TABLE_SCHEMA = ? \
\AND TABLE_NAME = ? \
\AND COLUMN_NAME <> ?"
inter2 <- with (stmtQuery stmtClmns vals) ($$ CL.consume)
cs <- runResourceT $ CL.sourceList inter2 $$ helperClmns -- avoid nested queries
-- Find out the constraints.
stmtCntrs <- getter "SELECT CONSTRAINT_NAME, \
\COLUMN_NAME \
\FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE \
\WHERE TABLE_SCHEMA = ? \
\AND TABLE_NAME = ? \
\AND COLUMN_NAME <> ? \
\AND CONSTRAINT_NAME <> 'PRIMARY' \
\AND REFERENCED_TABLE_SCHEMA IS NULL \
\ORDER BY CONSTRAINT_NAME, \
\COLUMN_NAME"
us <- with (stmtQuery stmtCntrs vals) ($$ helperCntrs)
-- Return both
return (ids, cs ++ us)
where
vals = [ PersistText $ pack $ MySQL.connectDatabase connectInfo
, PersistText $ unDBName $ entityDB def
, PersistText $ unDBName $ fieldDB $ entityId def ]
helperClmns = CL.mapM getIt =$ CL.consume
where
getIt = fmap (either Left (Right . Left)) .
liftIO .
getColumn connectInfo getter (entityDB def)
helperCntrs = do
let check [ PersistText cntrName
, PersistText clmnName] = return ( cntrName, clmnName )
check other = fail $ "helperCntrs: unexpected " ++ show other
rows <- mapM check =<< CL.consume
return $ map (Right . Right . (DBName . fst . head &&& map (DBName . snd)))
$ groupBy ((==) `on` fst) rows
-- | Get the information about a column in a table.
getColumn :: MySQL.ConnectInfo
-> (Text -> IO Statement)
-> DBName
-> [PersistValue]
-> IO (Either Text Column)
getColumn connectInfo getter tname [ PersistText cname
, PersistText null_
, PersistText type'
, default'] =
fmap (either (Left . pack) Right) $
runErrorT $ do
-- Default value
default_ <- case default' of
PersistNull -> return Nothing
PersistText t -> return (Just t)
PersistByteString bs ->
case T.decodeUtf8' bs of
Left exc -> fail $ "Invalid default column: " ++
show default' ++ " (error: " ++
show exc ++ ")"
Right t -> return (Just t)
_ -> fail $ "Invalid default column: " ++ show default'
-- Foreign key (if any)
stmt <- lift $ getter "SELECT REFERENCED_TABLE_NAME, \
\CONSTRAINT_NAME, \
\ORDINAL_POSITION \
\FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE \
\WHERE TABLE_SCHEMA = ? \
\AND TABLE_NAME = ? \
\AND COLUMN_NAME = ? \
\AND REFERENCED_TABLE_SCHEMA = ? \
\ORDER BY CONSTRAINT_NAME, \
\COLUMN_NAME"
let vars = [ PersistText $ pack $ MySQL.connectDatabase connectInfo
, PersistText $ unDBName $ tname
, PersistText cname
, PersistText $ pack $ MySQL.connectDatabase connectInfo ]
cntrs <- with (stmtQuery stmt vars) ($$ CL.consume)
ref <- case cntrs of
[] -> return Nothing
[[PersistText tab, PersistText ref, PersistInt64 pos]] ->
return $ if pos == 1 then Just (DBName tab, DBName ref) else Nothing
_ -> fail "MySQL.getColumn/getRef: never here"
-- Okay!
return Column
{ cName = DBName $ cname
, cNull = null_ == "YES"
, cSqlType = parseType type'
, cDefault = default_
, cDefaultConstraintName = Nothing
, cMaxLen = Nothing -- FIXME: maxLen
, cReference = ref
}
getColumn _ _ _ x =
return $ Left $ pack $ "Invalid result from INFORMATION_SCHEMA: " ++ show x
-- | Parse the type of column as returned by MySQL's
-- @INFORMATION_SCHEMA@ tables.
parseType :: Text -> SqlType
parseType "bigint(20)" = SqlInt64
parseType "decimal(32,20)" = SqlNumeric 32 20
{-
parseType "tinyint" = SqlBool
-- Ints
parseType "int" = SqlInt32
--parseType "short" = SqlInt32
--parseType "long" = SqlInt64
--parseType "longlong" = SqlInt64
--parseType "mediumint" = SqlInt32
parseType "bigint" = SqlInt64
-- Double
--parseType "float" = SqlReal
parseType "double" = SqlReal
--parseType "decimal" = SqlReal
--parseType "newdecimal" = SqlReal
-- Text
parseType "varchar" = SqlString
--parseType "varstring" = SqlString
--parseType "string" = SqlString
parseType "text" = SqlString
--parseType "tinytext" = SqlString
--parseType "mediumtext" = SqlString
--parseType "longtext" = SqlString
-- ByteString
parseType "varbinary" = SqlBlob
parseType "blob" = SqlBlob
--parseType "tinyblob" = SqlBlob
--parseType "mediumblob" = SqlBlob
--parseType "longblob" = SqlBlob
-- Time-related
parseType "time" = SqlTime
parseType "datetime" = SqlDayTime
--parseType "timestamp" = SqlDayTime
parseType "date" = SqlDay
--parseType "newdate" = SqlDay
--parseType "year" = SqlDay
-}
parseType b = SqlOther b
----------------------------------------------------------------------
-- | @getAlters allDefs tblName new old@ finds out what needs to
-- be changed from @old@ to become @new@.
getAlters :: [EntityDef]
-> DBName
-> ([Column], [(DBName, [DBName])])
-> ([Column], [(DBName, [DBName])])
-> ([AlterColumn'], [AlterTable])
getAlters allDefs tblName (c1, u1) (c2, u2) =
(getAltersC c1 c2, getAltersU u1 u2)
where
getAltersC [] old = concatMap dropColumn old
getAltersC (new:news) old =
let (alters, old') = findAlters tblName allDefs new old
in alters ++ getAltersC news old'
dropColumn col =
map ((,) (cName col)) $
[DropReference n | Just (_, n) <- [cReference col]] ++
[Drop]
getAltersU [] old = map (DropUniqueConstraint . fst) old
getAltersU ((name, cols):news) old =
case lookup name old of
Nothing ->
AddUniqueConstraint name (map findTypeAndMaxLen cols) : getAltersU news old
Just ocols ->
let old' = filter (\(x, _) -> x /= name) old
in if sort cols == ocols
then getAltersU news old'
else DropUniqueConstraint name
: AddUniqueConstraint name (map findTypeAndMaxLen cols)
: getAltersU news old'
where
findTypeAndMaxLen col = let (col', ty) = findTypeOfColumn allDefs tblName col
(_, ml) = findMaxLenOfColumn allDefs tblName col
in (col', ty, ml)
-- | @findAlters newColumn oldColumns@ finds out what needs to be
-- changed in the columns @oldColumns@ for @newColumn@ to be
-- supported.
findAlters :: DBName -> [EntityDef] -> Column -> [Column] -> ([AlterColumn'], [Column])
findAlters tblName allDefs col@(Column name isNull type_ def _defConstraintName maxLen ref) cols =
case filter ((name ==) . cName) cols of
-- new fkey that didnt exist before
[] -> case ref of
Nothing -> ([(name, Add' col)],[])
Just (tname, _b) -> let cnstr = [addReference allDefs (refName tblName name) tname name]
in (map ((,) tname) (Add' col : cnstr), cols)
Column _ isNull' type_' def' _defConstraintName' maxLen' ref':_ ->
let -- Foreign key
refDrop = case (ref == ref', ref') of
(False, Just (_, cname)) -> [(name, DropReference cname)]
_ -> []
refAdd = case (ref == ref', ref) of
(False, Just (tname, _cname)) -> [(tname, addReference allDefs (refName tblName name) tname name)]
_ -> []
-- Type and nullability
modType | showSqlType type_ maxLen False `ciEquals` showSqlType type_' maxLen' False && isNull == isNull' = []
| otherwise = [(name, Change col)]
-- Default value
modDef | def == def' = []
| otherwise = case def of
Nothing -> [(name, NoDefault)]
Just s -> [(name, Default $ T.unpack s)]
in ( refDrop ++ modType ++ modDef ++ refAdd
, filter ((name /=) . cName) cols )
where
ciEquals x y = T.toCaseFold (T.pack x) == T.toCaseFold (T.pack y)
----------------------------------------------------------------------
-- | Prints the part of a @CREATE TABLE@ statement about a given
-- column.
showColumn :: Column -> String
showColumn (Column n nu t def _defConstraintName maxLen ref) = concat
[ escapeDBName n
, " "
, showSqlType t maxLen True
, " "
, if nu then "NULL" else "NOT NULL"
, case def of
Nothing -> ""
Just s -> " DEFAULT " ++ T.unpack s
, case ref of
Nothing -> ""
Just (s, _) -> " REFERENCES " ++ escapeDBName s
]
-- | Renders an 'SqlType' in MySQL's format.
showSqlType :: SqlType
-> Maybe Integer -- ^ @maxlen@
-> Bool -- ^ include character set information?
-> String
showSqlType SqlBlob Nothing _ = "BLOB"
showSqlType SqlBlob (Just i) _ = "VARBINARY(" ++ show i ++ ")"
showSqlType SqlBool _ _ = "TINYINT(1)"
showSqlType SqlDay _ _ = "DATE"
showSqlType SqlDayTime _ _ = "DATETIME"
showSqlType SqlInt32 _ _ = "INT(11)"
showSqlType SqlInt64 _ _ = "BIGINT"
showSqlType SqlReal _ _ = "DOUBLE"
showSqlType (SqlNumeric s prec) _ _ = "NUMERIC(" ++ show s ++ "," ++ show prec ++ ")"
showSqlType SqlString Nothing True = "TEXT CHARACTER SET utf8"
showSqlType SqlString Nothing False = "TEXT"
showSqlType SqlString (Just i) True = "VARCHAR(" ++ show i ++ ") CHARACTER SET utf8"
showSqlType SqlString (Just i) False = "VARCHAR(" ++ show i ++ ")"
showSqlType SqlTime _ _ = "TIME"
showSqlType (SqlOther t) _ _ = T.unpack t
-- | Render an action that must be done on the database.
showAlterDb :: AlterDB -> (Bool, Text)
showAlterDb (AddTable s) = (False, pack s)
showAlterDb (AlterColumn t (c, ac)) =
(isUnsafe ac, pack $ showAlter t (c, ac))
where
isUnsafe Drop = True
isUnsafe _ = False
showAlterDb (AlterTable t at) = (False, pack $ showAlterTable t at)
-- | Render an action that must be done on a table.
showAlterTable :: DBName -> AlterTable -> String
showAlterTable table (AddUniqueConstraint cname cols) = concat
[ "ALTER TABLE "
, escapeDBName table
, " ADD CONSTRAINT "
, escapeDBName cname
, " UNIQUE("
, intercalate "," $ map escapeDBName' cols
, ")"
]
where
escapeDBName' (name, (FTTypeCon _ "Text" ), maxlen) = escapeDBName name ++ "(" ++ show maxlen ++ ")"
escapeDBName' (name, (FTTypeCon _ "String" ), maxlen) = escapeDBName name ++ "(" ++ show maxlen ++ ")"
escapeDBName' (name, (FTTypeCon _ "ByteString"), maxlen) = escapeDBName name ++ "(" ++ show maxlen ++ ")"
escapeDBName' (name, _ , _) = escapeDBName name
showAlterTable table (DropUniqueConstraint cname) = concat
[ "ALTER TABLE "
, escapeDBName table
, " DROP INDEX "
, escapeDBName cname
]
-- | Render an action that must be done on a column.
showAlter :: DBName -> AlterColumn' -> String
showAlter table (oldName, Change (Column n nu t def defConstraintName maxLen _ref)) =
concat
[ "ALTER TABLE "
, escapeDBName table
, " CHANGE "
, escapeDBName oldName
, " "
, showColumn (Column n nu t def defConstraintName maxLen Nothing)
]
showAlter table (_, Add' col) =
concat
[ "ALTER TABLE "
, escapeDBName table
, " ADD COLUMN "
, showColumn col
]
showAlter table (n, Drop) =
concat
[ "ALTER TABLE "
, escapeDBName table
, " DROP COLUMN "
, escapeDBName n
]
showAlter table (n, Default s) =
concat
[ "ALTER TABLE "
, escapeDBName table
, " ALTER COLUMN "
, escapeDBName n
, " SET DEFAULT "
, s
]
showAlter table (n, NoDefault) =
concat
[ "ALTER TABLE "
, escapeDBName table
, " ALTER COLUMN "
, escapeDBName n
, " DROP DEFAULT"
]
showAlter table (n, Update' s) =
concat
[ "UPDATE "
, escapeDBName table
, " SET "
, escapeDBName n
, "="
, s
, " WHERE "
, escapeDBName n
, " IS NULL"
]
showAlter table (_, AddReference reftable fkeyname t2 id2) = concat
[ "ALTER TABLE "
, escapeDBName table
, " ADD CONSTRAINT "
, escapeDBName fkeyname
, " FOREIGN KEY("
, intercalate "," $ map escapeDBName t2
, ") REFERENCES "
, escapeDBName reftable
, "("
, intercalate "," $ map escapeDBName id2
, ")"
]
showAlter table (_, DropReference cname) = concat
[ "ALTER TABLE "
, escapeDBName table
, " DROP FOREIGN KEY "
, escapeDBName cname
]
refName :: DBName -> DBName -> DBName
refName (DBName table) (DBName column) =
DBName $ T.concat [table, "_", column, "_fkey"]
----------------------------------------------------------------------
-- | Escape a database name to be included on a query.
escapeDBName :: DBName -> String
escapeDBName (DBName s) = '`' : go (T.unpack s)
where
go ('`':xs) = '`' : '`' : go xs
go ( x :xs) = x : go xs
go "" = "`"
-- | Information required to connect to a MySQL database
-- using @persistent@'s generic facilities. These values are the
-- same that are given to 'withMySQLPool'.
data MySQLConf = MySQLConf
{ myConnInfo :: MySQL.ConnectInfo
-- ^ The connection information.
, myPoolSize :: Int
-- ^ How many connections should be held on the connection pool.
} deriving Show
instance FromJSON MySQLConf where
parseJSON v = modifyFailure ("Persistent: error loading MySQL conf: " ++) $
flip (withObject "MySQLConf") v $ \o -> do
database <- o .: "database"
host <- o .: "host"
port <- o .: "port"
path <- o .:? "path"
user <- o .: "user"
password <- o .: "password"
pool <- o .: "poolsize"
let ci = MySQL.defaultConnectInfo
{ MySQL.connectHost = host
, MySQL.connectPort = port
, MySQL.connectPath = case path of
Just p -> p
Nothing -> MySQL.connectPath MySQL.defaultConnectInfo
, MySQL.connectUser = user
, MySQL.connectPassword = password
, MySQL.connectDatabase = database
}
return $ MySQLConf ci pool
instance PersistConfig MySQLConf where
type PersistConfigBackend MySQLConf = SqlPersistT
type PersistConfigPool MySQLConf = ConnectionPool
createPoolConfig (MySQLConf cs size) = runNoLoggingT $ createMySQLPool cs size -- FIXME
runPool _ = runSqlPool
loadConfig = parseJSON
applyEnv conf = do
env <- getEnvironment
let maybeEnv old var = maybe old id $ lookup ("MYSQL_" ++ var) env
return conf
{ myConnInfo =
case myConnInfo conf of
MySQL.ConnectInfo
{ MySQL.connectHost = host
, MySQL.connectPort = port
, MySQL.connectPath = path
, MySQL.connectUser = user
, MySQL.connectPassword = password
, MySQL.connectDatabase = database
} -> (myConnInfo conf)
{ MySQL.connectHost = maybeEnv host "HOST"
, MySQL.connectPort = read $ maybeEnv (show port) "PORT"
, MySQL.connectPath = maybeEnv path "PATH"
, MySQL.connectUser = maybeEnv user "USER"
, MySQL.connectPassword = maybeEnv password "PASSWORD"
, MySQL.connectDatabase = maybeEnv database "DATABASE"
}
}
mockMigrate :: MySQL.ConnectInfo
-> [EntityDef]
-> (Text -> IO Statement)
-> EntityDef
-> IO (Either [Text] [(Bool, Text)])
mockMigrate _connectInfo allDefs _getter val = do
let name = entityDB val
let (newcols, udefs, fdefs) = mkColumns allDefs val
let udspair = map udToPair udefs
case ([], [], partitionEithers []) of
-- Nothing found, create everything
([], [], _) -> do
let uniques = flip concatMap udspair $ \(uname, ucols) ->
[ AlterTable name $
AddUniqueConstraint uname $
map (findTypeAndMaxLen name) ucols ]
let foreigns = do
Column { cName=cname, cReference=Just (refTblName, _a) } <- newcols
return $ AlterColumn name (refTblName, addReference allDefs (refName name cname) refTblName cname)
let foreignsAlt = map (\fdef -> let (childfields, parentfields) = unzip (map (\((_,b),(_,d)) -> (b,d)) (foreignFields fdef))
in AlterColumn name (foreignRefTableDBName fdef, AddReference (foreignRefTableDBName fdef) (foreignConstraintNameDBName fdef) childfields parentfields)) fdefs
return $ Right $ map showAlterDb $ (addTable newcols val): uniques ++ foreigns ++ foreignsAlt
-- No errors and something found, migrate
(_, _, ([], old')) -> do
let excludeForeignKeys (xs,ys) = (map (\c -> case cReference c of
Just (_,fk) -> case find (\f -> fk == foreignConstraintNameDBName f) fdefs of
Just _ -> c { cReference = Nothing }
Nothing -> c
Nothing -> c) xs,ys)
(acs, ats) = getAlters allDefs name (newcols, udspair) $ excludeForeignKeys $ partitionEithers old'
acs' = map (AlterColumn name) acs
ats' = map (AlterTable name) ats
return $ Right $ map showAlterDb $ acs' ++ ats'
-- Errors
(_, _, (errs, _)) -> return $ Left errs
where
findTypeAndMaxLen tblName col = let (col', ty) = findTypeOfColumn allDefs tblName col
(_, ml) = findMaxLenOfColumn allDefs tblName col
in (col', ty, ml)
-- | Mock a migration even when the database is not present.
-- This function will mock the migration for a database even when
-- the actual database isn't already present in the system.
mockMigration :: Migration -> IO ()
mockMigration mig = do
smap <- newIORef $ Map.empty
let sqlbackend = SqlBackend { connPrepare = \_ -> do
return Statement
{ stmtFinalize = return ()
, stmtReset = return ()
, stmtExecute = undefined
, stmtQuery = \_ -> return $ return ()
},
connInsertManySql = Nothing,
connInsertSql = undefined,
connStmtMap = smap,
connClose = undefined,
connMigrateSql = mockMigrate undefined,
connBegin = undefined,
connCommit = undefined,
connRollback = undefined,
connEscapeName = undefined,
connNoLimit = undefined,
connRDBMS = undefined,
connLimitOffset = undefined,
connLogFunc = undefined}
result = runReaderT . runWriterT . runWriterT $ mig
resp <- result sqlbackend
mapM_ T.putStrLn $ map snd $ snd resp
| jasonzoladz/persistent | persistent-mysql/Database/Persist/MySQL.hs | mit | 40,047 | 12 | 37 | 13,525 | 9,453 | 4,963 | 4,490 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pt-BR">
<title>SOAP Support Add-on</title>
<maps>
<homeID>soap</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/soap/src/main/javahelp/org/zaproxy/zap/extension/soap/resources/help_pt_BR/helpset_pt_BR.hs | apache-2.0 | 965 | 77 | 67 | 157 | 413 | 209 | 204 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.ByteString.Lazy.Lens
-- Copyright : (C) 2012-2015 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : non-portable
--
-- Lazy 'ByteString' lenses.
----------------------------------------------------------------------------
module Data.ByteString.Lazy.Lens
( packedBytes, unpackedBytes, bytes
, packedChars, unpackedChars, chars
) where
import Control.Lens
import Control.Lens.Internal.ByteString
import Data.ByteString.Lazy as Words
import Data.ByteString.Lazy.Char8 as Char8
import Data.Word (Word8)
import Data.Int (Int64)
-- $setup
-- >>> :set -XOverloadedStrings
-- >>> import Numeric.Lens
-- | 'Data.ByteString.Lazy.pack' (or 'Data.ByteString.Lazy.unpack') a list of bytes into a 'ByteString'.
--
-- @
-- 'packedBytes' ≡ 'from' 'unpackedBytes'
-- 'Data.ByteString.pack' x ≡ x '^.' 'packedBytes'
-- 'Data.ByteString.unpack' x ≡ x '^.' 'from' 'packedBytes'
-- @
--
-- >>> [104,101,108,108,111]^.packedBytes == Char8.pack "hello"
-- True
packedBytes :: Iso' [Word8] ByteString
packedBytes = iso Words.pack unpackLazy
{-# INLINE packedBytes #-}
-- | 'Data.ByteString.Lazy.unpack' (or 'Data.ByteString.Lazy.pack') a 'ByteString' into a list of bytes
--
-- @
-- 'unpackedBytes' ≡ 'from' 'packedBytes'
-- 'Data.ByteString.unpack' x ≡ x '^.' 'unpackedBytes'
-- 'Data.ByteString.pack' x ≡ x '^.' 'from' 'unpackedBytes'
-- @
--
-- >>> "hello"^.packedChars.unpackedBytes
-- [104,101,108,108,111]
unpackedBytes :: Iso' ByteString [Word8]
unpackedBytes = from packedBytes
{-# INLINE unpackedBytes #-}
-- | Traverse the individual bytes in a 'ByteString'.
--
-- This 'Traversal' walks each strict 'ByteString' chunk in a tree-like fashion
-- enable zippers to seek to locations more quickly and accelerate
-- many monoidal queries, but up to associativity (and constant factors) it is
-- equivalent to the much slower:
--
-- @
-- 'bytes' ≡ 'unpackedBytes' '.' 'traversed'
-- @
--
-- >>> anyOf bytes (== 0x80) (Char8.pack "hello")
-- False
--
-- Note that when just using this as a 'Setter', @'setting' 'Data.ByteString.Lazy.map'@
-- can be more efficient.
bytes :: IndexedTraversal' Int64 ByteString Word8
bytes = traversedLazy
{-# INLINE bytes #-}
-- | 'Data.ByteString.Lazy.Char8.pack' (or 'Data.ByteString.Lazy.Char8.unpack') a list of characters into a 'ByteString'.
--
-- When writing back to the 'ByteString' it is assumed that every 'Char'
-- lies between @'\x00'@ and @'\xff'@.
--
-- @
-- 'packedChars' ≡ 'from' 'unpackedChars'
-- 'Data.ByteString.Char8.pack' x ≡ x '^.' 'packedChars'
-- 'Data.ByteString.Char8.unpack' x ≡ x '^.' 'from' 'packedChars'
-- @
--
-- >>> "hello"^.packedChars.each.re (base 16 . enum).to (\x -> if Prelude.length x == 1 then '0':x else x)
-- "68656c6c6f"
packedChars :: Iso' String ByteString
packedChars = iso Char8.pack unpackLazy8
{-# INLINE packedChars #-}
-- | 'Data.ByteString.Lazy.Char8.unpack' (or 'Data.ByteString.Lazy.Char8.pack') a list of characters into a 'ByteString'
--
-- When writing back to the 'ByteString' it is assumed that every 'Char'
-- lies between @'\x00'@ and @'\xff'@.
--
-- @
-- 'unpackedChars' ≡ 'from' 'packedChars'
-- 'Data.ByteString.Char8.unpack' x ≡ x '^.' 'unpackedChars'
-- 'Data.ByteString.Char8.pack' x ≡ x '^.' 'from' 'unpackedChars'
-- @
--
-- >>> [104,101,108,108,111]^.packedBytes.unpackedChars
-- "hello"
unpackedChars :: Iso' ByteString String
unpackedChars = from packedChars
{-# INLINE unpackedChars #-}
-- | Traverse the individual bytes in a 'ByteString' as characters.
--
-- When writing back to the 'ByteString' it is assumed that every 'Char'
-- lies between @'\x00'@ and @'\xff'@.
--
-- This 'Traversal' walks each strict 'ByteString' chunk in a tree-like fashion
-- enable zippers to seek to locations more quickly and accelerate
-- many monoidal queries, but up to associativity (and constant factors) it is
-- equivalent to:
--
-- @
-- 'chars' = 'unpackedChars' '.' 'traversed'
-- @
--
-- >>> anyOf chars (== 'h') "hello"
-- True
chars :: IndexedTraversal' Int64 ByteString Char
chars = traversedLazy8
{-# INLINE chars #-}
| rpglover64/lens | src/Data/ByteString/Lazy/Lens.hs | bsd-3-clause | 4,308 | 0 | 6 | 622 | 296 | 213 | 83 | 28 | 1 |
module Foo where
moo = poo z z
where z = blerg True
blerg True = Nothing
{-@ poo :: x:Maybe a -> {v: Maybe a | v = x } -> Bool @-}
poo :: Maybe a -> Maybe a -> Bool
poo x y = True
| ssaavedra/liquidhaskell | tests/todo/maybe4.hs | bsd-3-clause | 208 | 0 | 7 | 76 | 62 | 32 | 30 | 6 | 1 |
{-# LANGUAGE BangPatterns #-}
-- | Get markdown templates.
module HL.Controller.Markdown where
import HL.Controller
import HL.Model.Markdown
import HL.View
import HL.View.Markdown
-- | Render a simple markdown page.
markdownPage :: [Route App] -> Text -> FilePath -> C (Html ())
markdownPage crumbs t name =
do content <- io (getMarkdown name)
lucid (markdownV crumbs t content)
| josefs/hl | src/HL/Controller/Markdown.hs | bsd-3-clause | 390 | 0 | 11 | 66 | 110 | 59 | 51 | 10 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Hooks.ScreenCorners
-- Copyright : (c) 2009 Nils Schweinsberg
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Nils Schweinsberg <[email protected]>
-- Stability : unstable
-- Portability : unportable
--
-- Run @X ()@ actions by touching the edge of your screen with your mouse.
--
-----------------------------------------------------------------------------
module XMonad.Hooks.ScreenCorners
(
-- * Usage
-- $usage
-- * Adding screen corners
ScreenCorner (..)
, addScreenCorner
, addScreenCorners
-- * Event hook
, screenCornerEventHook
) where
import Data.Monoid
import Data.List (find)
import XMonad
import XMonad.Util.XUtils (fi)
import qualified Data.Map as M
import qualified XMonad.Util.ExtensibleState as XS
data ScreenCorner = SCUpperLeft
| SCUpperRight
| SCLowerLeft
| SCLowerRight
deriving (Eq, Ord, Show)
--------------------------------------------------------------------------------
-- ExtensibleState modifications
--------------------------------------------------------------------------------
newtype ScreenCornerState = ScreenCornerState (M.Map Window (ScreenCorner, X ()))
deriving Typeable
instance ExtensionClass ScreenCornerState where
initialValue = ScreenCornerState M.empty
-- | Add one single @X ()@ action to a screen corner
addScreenCorner :: ScreenCorner -> X () -> X ()
addScreenCorner corner xF = do
ScreenCornerState m <- XS.get
(win,xFunc) <- case find (\(_,(sc,_)) -> sc == corner) (M.toList m) of
Just (w, (_,xF')) -> return (w, xF' >> xF) -- chain X actions
Nothing -> flip (,) xF `fmap` createWindowAt corner
XS.modify $ \(ScreenCornerState m') -> ScreenCornerState $ M.insert win (corner,xFunc) m'
-- | Add a list of @(ScreenCorner, X ())@ tuples
addScreenCorners :: [ (ScreenCorner, X ()) ] -> X ()
addScreenCorners = mapM_ (\(corner, xF) -> addScreenCorner corner xF)
--------------------------------------------------------------------------------
-- Xlib functions
--------------------------------------------------------------------------------
-- "Translate" a ScreenCorner to real (x,y) Positions
createWindowAt :: ScreenCorner -> X Window
createWindowAt SCUpperLeft = createWindowAt' 0 0
createWindowAt SCUpperRight = withDisplay $ \dpy ->
let w = displayWidth dpy (defaultScreen dpy) - 1
in createWindowAt' (fi w) 0
createWindowAt SCLowerLeft = withDisplay $ \dpy ->
let h = displayHeight dpy (defaultScreen dpy) - 1
in createWindowAt' 0 (fi h)
createWindowAt SCLowerRight = withDisplay $ \dpy ->
let w = displayWidth dpy (defaultScreen dpy) - 1
h = displayHeight dpy (defaultScreen dpy) - 1
in createWindowAt' (fi w) (fi h)
-- Create a new X window at a (x,y) Position
createWindowAt' :: Position -> Position -> X Window
createWindowAt' x y = withDisplay $ \dpy -> io $ do
rootw <- rootWindow dpy (defaultScreen dpy)
let
visual = defaultVisualOfScreen $ defaultScreenOfDisplay dpy
attrmask = cWOverrideRedirect
w <- allocaSetWindowAttributes $ \attributes -> do
set_override_redirect attributes True
createWindow dpy -- display
rootw -- parent window
x -- x
y -- y
1 -- width
1 -- height
0 -- border width
0 -- depth
inputOnly -- class
visual -- visual
attrmask -- valuemask
attributes -- attributes
-- we only need mouse entry events
selectInput dpy w enterWindowMask
mapWindow dpy w
sync dpy False
return w
--------------------------------------------------------------------------------
-- Event hook
--------------------------------------------------------------------------------
-- | Handle screen corner events
screenCornerEventHook :: Event -> X All
screenCornerEventHook CrossingEvent { ev_window = win } = do
ScreenCornerState m <- XS.get
case M.lookup win m of
Just (_, xF) -> xF
Nothing -> return ()
return (All True)
screenCornerEventHook _ = return (All True)
--------------------------------------------------------------------------------
-- $usage
--
-- This extension adds KDE-like screen corners to XMonad. By moving your cursor
-- into one of your screen corners you can trigger an @X ()@ action, for
-- example @"XMonad.Actions.GridSelect".goToSelected@ or
-- @"XMonad.Actions.CycleWS".nextWS@ etc.
--
-- To use it, import it on top of your @xmonad.hs@:
--
-- > import XMonad.Hooks.ScreenCorners
--
-- Then add your screen corners in our startup hook:
--
-- > myStartupHook = do
-- > ...
-- > addScreenCorner SCUpperRight (goToSelected defaultGSConfig { gs_cellwidth = 200})
-- > addScreenCorners [ (SCLowerRight, nextWS)
-- > , (SCLowerLeft, prevWS)
-- > ]
--
-- And finally wait for screen corner events in your event hook:
--
-- > myEventHook e = do
-- > ...
-- > screenCornerEventHook e
| adinapoli/xmonad-contrib | XMonad/Hooks/ScreenCorners.hs | bsd-3-clause | 5,455 | 0 | 15 | 1,385 | 959 | 526 | 433 | 75 | 2 |
-- Test the flage `force-no-intermediates` (issue #4114)
module Main (main) where
import T4114aSub
keep, nokeep :: [FilePath]
keep = ["T4114aSub.hi", "T4114aSub.o", "T4114a.hi", "T4114a.o"]
nokeep = [ ]
main :: IO ()
main = do
mapM_ assertNoKeep nokeep
mapM_ assertKeep keep
| ezyang/ghc | testsuite/tests/driver/T4114a.hs | bsd-3-clause | 285 | 0 | 7 | 50 | 78 | 45 | 33 | 9 | 1 |
{-# LANGUAGE Unsafe #-}
{-# LANGUAGE NoImplicitPrelude, MagicHash, UnboxedTuples #-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Pack
-- Copyright : (c) The University of Glasgow 1997-2002
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable (GHC Extensions)
--
-- This module provides a small set of low-level functions for packing
-- and unpacking a chunk of bytes. Used by code emitted by the compiler
-- plus the prelude libraries.
--
-- The programmer level view of packed strings is provided by a GHC
-- system library PackedString.
--
-----------------------------------------------------------------------------
module GHC.Pack
(
-- (**) - emitted by compiler.
packCString#,
unpackCString,
unpackCString#,
unpackNBytes#,
unpackFoldrCString#, -- (**)
unpackAppendCString#, -- (**)
)
where
import GHC.Base
import GHC.List ( length )
import GHC.ST
import GHC.Ptr
data ByteArray ix = ByteArray ix ix ByteArray#
data MutableByteArray s ix = MutableByteArray ix ix (MutableByteArray# s)
unpackCString :: Ptr a -> [Char]
unpackCString a@(Ptr addr)
| a == nullPtr = []
| otherwise = unpackCString# addr
packCString# :: [Char] -> ByteArray#
packCString# str = case (packString str) of { ByteArray _ _ bytes -> bytes }
packString :: [Char] -> ByteArray Int
packString str = runST (packStringST str)
packStringST :: [Char] -> ST s (ByteArray Int)
packStringST str =
let len = length str in
packNBytesST len str
packNBytesST :: Int -> [Char] -> ST s (ByteArray Int)
packNBytesST (I# length#) str =
{-
allocate an array that will hold the string
(not forgetting the NUL byte at the end)
-}
new_ps_array (length# +# 1#) >>= \ ch_array ->
-- fill in packed string from "str"
fill_in ch_array 0# str >>
-- freeze the puppy:
freeze_ps_array ch_array length#
where
fill_in :: MutableByteArray s Int -> Int# -> [Char] -> ST s ()
fill_in arr_in# idx [] =
write_ps_array arr_in# idx (chr# 0#) >>
return ()
fill_in arr_in# idx (C# c : cs) =
write_ps_array arr_in# idx c >>
fill_in arr_in# (idx +# 1#) cs
-- (Very :-) ``Specialised'' versions of some CharArray things...
new_ps_array :: Int# -> ST s (MutableByteArray s Int)
write_ps_array :: MutableByteArray s Int -> Int# -> Char# -> ST s ()
freeze_ps_array :: MutableByteArray s Int -> Int# -> ST s (ByteArray Int)
new_ps_array size = ST $ \ s ->
case (newByteArray# size s) of { (# s2#, barr# #) ->
(# s2#, MutableByteArray bot bot barr# #) }
where
bot = errorWithoutStackTrace "new_ps_array"
write_ps_array (MutableByteArray _ _ barr#) n ch = ST $ \ s# ->
case writeCharArray# barr# n ch s# of { s2# ->
(# s2#, () #) }
-- same as unsafeFreezeByteArray
freeze_ps_array (MutableByteArray _ _ arr#) len# = ST $ \ s# ->
case unsafeFreezeByteArray# arr# s# of { (# s2#, frozen# #) ->
(# s2#, ByteArray 0 (I# len#) frozen# #) }
| tolysz/prepare-ghcjs | spec-lts8/base/GHC/Pack.hs | bsd-3-clause | 3,178 | 0 | 13 | 721 | 766 | 409 | 357 | 54 | 2 |
module NoPatternSynonyms where
pattern P :: G Int
| olsner/ghc | testsuite/tests/parser/should_fail/NoPatternSynonyms.hs | bsd-3-clause | 51 | 0 | 6 | 9 | 14 | 8 | 6 | -1 | -1 |
{-# LANGUAGE ExistentialQuantification #-}
module Boot where
import A
data Data = forall n. Class n => D n
| urbanslug/ghc | testsuite/tests/ghci/prog006/Boot2.hs | bsd-3-clause | 109 | 0 | 7 | 21 | 29 | 17 | 12 | 4 | 0 |
-- !!! deriving Enum on d. type with nullary constructors
module ShouldSucceed where
data AD = A | B | C | D deriving (Enum)
| wxwxwwxxx/ghc | testsuite/tests/deriving/should_compile/drv010.hs | bsd-3-clause | 126 | 0 | 6 | 26 | 28 | 18 | 10 | 2 | 0 |
-- !!! dcon hiding (in the presence of identically named tycon.)
-- (test contributed by Ross Paterson.)
module M where
import Mod132_B
foo = Foo
| hferreiro/replay | testsuite/tests/module/mod132.hs | bsd-3-clause | 147 | 0 | 4 | 26 | 14 | 10 | 4 | 3 | 1 |
-- Testing showInt, lightly.
module Main(main) where
import Numeric
showSignedInt :: Integral a => a -> String
showSignedInt x = showSigned (showInt) 0 x ""
main =
do
putStrLn (showInt (343023920121::Integer) [])
putStrLn (showInt (3430239::Int) [])
putStrLn (showInt (1212 :: Int) [])
putStrLn (showSignedInt (591125662431 `div` (517::Int)))
-- showInt just works over naturals, wrap it up inside
-- a use of Numeric.showSigned to show negative nums.
putStrLn (showSignedInt (-111::Int))
putStrLn (showInt (232189458241::Integer) [])
| beni55/ghcjs | test/pkg/base/Numeric/num002.hs | mit | 566 | 0 | 12 | 104 | 191 | 101 | 90 | 12 | 1 |
module Paths_haskell (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
catchIO = Exception.catch
version :: Version
version = Version [1,0] []
bindir, libdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/Users/chilchenchi/.cabal/bin"
libdir = "/Users/chilchenchi/.cabal/lib/x86_64-osx-ghc-7.10.1/haske_HotUm2Ye6mjLmEWihcBSw6"
datadir = "/Users/chilchenchi/.cabal/share/x86_64-osx-ghc-7.10.1/haskell-1.0"
libexecdir = "/Users/chilchenchi/.cabal/libexec"
sysconfdir = "/Users/chilchenchi/.cabal/etc"
getBinDir, getLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "haskell_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "haskell_libdir") (\_ -> return libdir)
getDataDir = catchIO (getEnv "haskell_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "haskell_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "haskell_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
| momo9/seven-lang | haskell/dist/build/autogen/Paths_haskell.hs | mit | 1,343 | 0 | 10 | 177 | 356 | 202 | 154 | 28 | 1 |
module HCraft.Math
( module Math
, fract
) where
import HCraft.Math.Matrix as Math
import HCraft.Math.Shapes as Math
import HCraft.Math.Vector as Math
-- |Fractional part of a number
fract :: RealFrac a => a -> a
fract x
= x - fromIntegral (floor x :: Int)
| nandor/hcraft | HCraft/Math.hs | mit | 267 | 0 | 8 | 55 | 80 | 49 | 31 | 9 | 1 |
module Main where
import System.Console.Style
import Control.Monad.Trans
import Data.Foldable
import Text.Printf
import Control.Monad.Trans.State.Strict
import System.IO (stdout)
ansiColors :: [Color]
ansiColors = [ DefaultColor
, Black
, Red
, Green
, Yellow
, Blue
, Magenta
, Cyan
, White
, DullBlack
, DullRed
, DullGreen
, DullYellow
, DullBlue
, DullMagenta
, DullCyan
, DullWhite
]
ansiColorsExample :: IO ()
ansiColorsExample = runWithStyle [] $
for_ ansiColors $ \c -> do
withStyle [BgColor c] $ liftIO $ printf "%-15s" $ show c
withStyle [FgColor c] $ liftIO $ printf " %-15s" $ show c
withStyle [BgColor c, Invert] $ liftIO $ printf " %-15s" $ show c
withStyle [FgColor c, Invert] $ liftIO $ printf " %-15s" $ show c
liftIO $ putChar '\n'
colors256Example :: IO ()
colors256Example = runWithStyle [] $
for_ [0..255] $ \c -> do
withStyle [BgColor $ Color256 c] $ liftIO $ printf "%02x" c
withStyle [FgColor $ Color256 c] $ liftIO $ printf " %02x" c
withStyle [BgColor $ Color256 c, Invert] $ liftIO $ printf " %02x" c
withStyle [FgColor $ Color256 c, Invert] $ liftIO $ printf " %02x" c
liftIO $ putChar '\n'
rgbExample :: IO ()
rgbExample = runWithStyle [] $
for_ [0,64..255] $ \r ->
for_ [0,64..255] $ \g ->
for_ [0,64..255] $ \b -> do
let c = RGB r g b
withStyle [BgColor c] $ liftIO $ printf "%-20s" $ show c
withStyle [FgColor c] $ liftIO $ printf " %-20s" $ show c
withStyle [BgColor c, Invert] $ liftIO $ printf " %-20s" $ show c
withStyle [FgColor c, Invert] $ liftIO $ printf " %-20s" $ show c
liftIO $ putChar '\n'
specialExample :: IO ()
specialExample = runWithStyle [] $
for_ [(Bold,NotBold),(Italic,NotItalic),(Under,NotUnder),(Invert,NotInvert),(Blink,NotBlink)] $ \(a, b) -> do
setStyle [a]
liftIO $ printf "%-20s" $ show a
setStyle [b]
liftIO $ printf " %-20s" $ show b
liftIO $ putChar '\n'
stackExample :: IO ()
stackExample = do
runWithStyle [] $ loop 0
liftIO $ putChar '\n'
where
loop 8 = pure ()
loop n = do
setStyle [Save, BgColor $ Color256 n]
liftIO $ putStr $ replicate (fromIntegral n) ' '
loop (n + 1)
liftIO $ putStr $ replicate (fromIntegral n) ' '
setStyle [Restore]
basicExample :: IO ()
basicExample = runWithStyle [FgColor Blue] $ do
withStyle [Bold] $ liftIO $ putStr "Bold Blue"
setStyle [Save, Italic, BgColor Red]
liftIO $ putStr "Italic Red"
setStyle [Restore]
setStyle [Under]
liftIO $ putStr "Under Blue"
setStyle [Reset]
liftIO $ putStrLn "Normal output"
setStyleCodeExample :: IO ()
setStyleCodeExample = runStyleT Term8 $ do
start <- setStyleCode [FgColor Green, Bold]
end <- setStyleCode [Reset]
liftIO $ putStrLn (start ++ "Green" ++ end)
applyStyleExample :: IO ()
applyStyleExample = runWithStyle [] $ do
changeStyle [FgColor Blue] -- No escape sequence generated
changeStyle [FgColor Red]
applyStyle -- Escape sequences generated
liftIO $ putStrLn "Red"
reduceExample :: IO ()
reduceExample = do
for_ [0..255] $ \c -> do
flip evalStateT (hDefaultStyle stdout Term256) $ withStyle [BgColor $ Color256 c] $ liftIO $ printf "%02x" c
flip evalStateT (hDefaultStyle stdout Term8) $ withStyle [BgColor $ Color256 c] $ liftIO $ printf "%02x" c
liftIO $ putChar '\n'
for_ [0,64..255] $ \r ->
for_ [0,64..255] $ \g ->
for_ [0,64..255] $ \b -> do
let c = RGB r g b
flip evalStateT (hDefaultStyle stdout TermRGB) $ withStyle [BgColor c] $ liftIO $ printf "%20s" $ show c
flip evalStateT (hDefaultStyle stdout Term256) $ withStyle [BgColor c] $ liftIO $ printf "%20s" $ show c
flip evalStateT (hDefaultStyle stdout Term8) $ withStyle [BgColor c] $ liftIO $ printf "%20s" $ show c
liftIO $ putChar '\n'
main :: IO ()
main = do
ansiColorsExample
colors256Example
rgbExample
specialExample
stackExample
basicExample
setStyleCodeExample
applyStyleExample
reduceExample
| minad/console-style | Example.hs | mit | 4,253 | 0 | 21 | 1,160 | 1,656 | 798 | 858 | 117 | 2 |
-- Knapsack non path tracking, lazy evaluation
module Main where
import System.Random
import System.Environment
import Data.Array
import Nilsson
knapsack :: Int -> Weight -> Int -> [(Value,Weight)]
knapsack n wc s = ( sol!(n) )
where
sol :: Array Int [(Value,Weight)]
sol = array (1,n) ([(1, chx (z!!0) ) ] ++
-- ~~~~~~~~~~ Lazy, non path tracking functions
[(i, nchk (sol!(i-1)) (njnk wc (z!!(i-1)) (sol!(i-1))) ) | i<-[2..n] ])
chx :: (Value,Weight) -> [(Value,Weight)]
chx (v,w) = if w <= wc then [(v,w)] else zeroK
z :: [(Value,Weight)]
z = zip (take n (randomValues s)) (take n (randomWeights (s+1)))
randomValues seed = randomRs(1,20) (mkStdGen seed)
randomWeights seed = randomRs(5,25) (mkStdGen seed)
| jcsaenzcarrasco/MPhil-thesis | knp.hs | mit | 792 | 0 | 18 | 194 | 382 | 217 | 165 | 16 | 2 |
module Game.GBA.Boot
where
import Game.GBA.Monad
import Game.GBA.Register
bootForTest :: ProcessorMode -> GBA s ()
bootForTest mode = do
writeStatus statusB UserMode
writeStatus statusT mode
| jhance/gba-hs | src/Game/GBA/Boot.hs | mit | 201 | 0 | 7 | 34 | 59 | 31 | 28 | 7 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Oden.Parser.ParsePackageSpec where
import Test.Hspec
import Oden.Parser
import Oden.SourceInfo
import Oden.Syntax
import Oden.Assertions
src :: Line -> Column -> SourceInfo
src l c = SourceInfo (Position "<stdin>" l c)
spec :: Spec
spec = describe "parsePackage" $ do
it "parses package declaration" $
parsePackage "<stdin>" "package foo/bar"
`shouldSucceedWith`
Package (PackageDeclaration (src 1 1) ["foo", "bar"]) []
it "parses foreign imports" $
parsePackage "<stdin>" "package foo/bar\nimport foreign \"bar/baz\"\nimport foreign \"foo\"\nimport foreign \"github.com/foo/bar\""
`shouldSucceedWith`
Package
(PackageDeclaration (src 1 1) ["foo", "bar"])
[ ImportForeignDeclaration (src 2 1) "bar/baz"
, ImportForeignDeclaration (src 3 1) "foo"
, ImportForeignDeclaration (src 4 1) "github.com/foo/bar"
]
it "parses foreign github.com import" $
parsePackage "<stdin>" "package foo/bar\nimport foreign \"github.com/foo/bar\""
`shouldSucceedWith`
Package
(PackageDeclaration (src 1 1) ["foo", "bar"])
[ImportForeignDeclaration (src 2 1) "github.com/foo/bar"]
it "parses native imports" $
parsePackage "<stdin>" "package foo/bar\nimport bar/baz\nimport foo\nimport company/foo/bar"
`shouldSucceedWith`
Package
(PackageDeclaration (src 1 1) ["foo", "bar"])
[ ImportDeclaration (src 2 1) ["bar", "baz"]
, ImportDeclaration (src 3 1) ["foo"]
, ImportDeclaration (src 4 1) ["company", "foo", "bar"]
]
it "parses native and foreign and imports" $
parsePackage "<stdin>" "package foo/bar\nimport foo_bar/baz\nimport foreign \"foo\""
`shouldSucceedWith`
Package
(PackageDeclaration (src 1 1) ["foo", "bar"])
[ ImportDeclaration (src 2 1) ["foo_bar", "baz"]
, ImportForeignDeclaration (src 3 1) "foo"
]
it "fails to parse URL-like import name" $
shouldFail (parsePackage "<stdin>" "package foo\nimport github.com/mypkg")
it "fails to parse import name with dash" $
shouldFail (parsePackage "<stdin>" "package foo\nimport company/their-pkg")
| oden-lang/oden | test/Oden/Parser/ParsePackageSpec.hs | mit | 2,180 | 0 | 13 | 442 | 538 | 275 | 263 | 48 | 1 |
{-# OPTIONS_GHC -Wall #-}
-- Naive fibonacci - Only usable for very small n (<30)
naive_fib :: Integer -> Integer
naive_fib 0 = 0
naive_fib 1 = 1
naive_fib n = naive_fib (n-1) + naive_fib (n-2)
fibs1 :: [Integer]
fibs1 = map naive_fib [0..]
-- Memoized fibonacci - Never re-evaluate thunks
memoized_fib :: Int -> Integer
memoized_fib = (map fib [0..] !!)
where fib 0 = 0
fib 1 = 1
fib n = memoized_fib (n-1) + memoized_fib (n-2)
fibs2 :: [Integer]
fibs2 = map memoized_fib [0..]
| vaibhav276/haskell_cs194_assignments | lazyness/Fibancci.hs | mit | 505 | 0 | 10 | 113 | 179 | 96 | 83 | 14 | 3 |
module Main where
import Char
import Stack
import Grammar
import Parser
| SamyNarrainen/SimpleGrammarParser | Haskell/Main.hs | mit | 73 | 0 | 3 | 12 | 16 | 11 | 5 | 5 | 0 |
module WorkerSpec
( main
, spec
) where
import SpecHelper
import Worker
import Data.Time.Duration
main :: IO ()
main = hspec spec
spec :: Spec
spec = withApp $
describe "archivableCommands" $ do
it "does not find recent commands" $ do
now <- liftIO getCurrentTime
token <- newToken
void $ runDB $ insert Command
{ commandToken = token
, commandRunning = True
, commandDescription = Nothing
, commandCreatedAt = now
}
results <- runDB $ archivableCommands 30
results `shouldBe` []
it "does not find old commands with recent output" $ do
now <- liftIO getCurrentTime
token <- newToken
runDB $ do
commandId <- insert Command
{ commandToken = token
, commandRunning = True
, commandDescription = Nothing
, commandCreatedAt = (35 :: Second) `priorTo` now
}
void $ insert Output
{ outputCommand = commandId
, outputContent = ""
, outputCreatedAt = now
}
results <- runDB $ archivableCommands 30
results `shouldBe` []
it "finds old commands with old or no output" $ do
now <- liftIO getCurrentTime
token1 <- newToken
token2 <- newToken
runDB $ do
void $ insert Command
{ commandToken = token1
, commandRunning = True
, commandDescription = Nothing
, commandCreatedAt = (45 :: Second) `priorTo` now
}
commandId <- insert Command
{ commandToken = token2
, commandRunning = True
, commandDescription = Nothing
, commandCreatedAt = (40 :: Second) `priorTo` now
}
void $ insert Output
{ outputCommand = commandId
, outputContent = ""
, outputCreatedAt = (35 :: Second) `priorTo` now
}
results <- runDB $ archivableCommands 30
length results `shouldBe` 2
| mrb/tee-io | test/WorkerSpec.hs | mit | 2,406 | 0 | 19 | 1,121 | 497 | 266 | 231 | 57 | 1 |
module Main where
import Test.Hspec
import qualified Shapes.Linear.TemplateSpec
main :: IO ()
main = hspec $ describe "TemplateSpec" Shapes.Linear.TemplateSpec.spec
| ublubu/shapes | shapes-math/test/Spec.hs | mit | 168 | 0 | 7 | 22 | 44 | 26 | 18 | 5 | 1 |
mult' :: (Num a) => a -> a -> a -> a
mult' x y z = x * y * z
mult'' :: (Num a) => a -> a -> a -> a
mult'' = \x -> \y -> \z -> x * y * z
main = do
print $ mult' 1 2 3
print $ mult'' 1 2 3
| fabioyamate/programming-in-haskell | ch04/ex06.hs | mit | 197 | 0 | 9 | 72 | 134 | 69 | 65 | 7 | 1 |
module GHCJS.DOM.IDBIndex (
) where
| manyoo/ghcjs-dom | ghcjs-dom-webkit/src/GHCJS/DOM/IDBIndex.hs | mit | 38 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module WeatherSpec (spec) where
import Weather
import Test.Hspec
spec :: Spec
spec = do
describe "getEmoji" $ do
it "returns ⁉ on undefined" $ do
getEmoji "lol" `shouldBe` "\8265\65039"
it "returns ⁉ if code length is < 2" $ do
getEmoji "1" `shouldBe` "\8265\65039"
it "returns an emoji for valid codes" $ do
getEmoji "01" `shouldBe` "\9728\65039"
getEmoji "02" `shouldBe` "\9925\65039"
getEmoji "03" `shouldBe` "\9729\65039"
getEmoji "04" `shouldBe` "\9729\65039"
getEmoji "09" `shouldBe` "\128166"
getEmoji "10" `shouldBe` "\128166"
getEmoji "11" `shouldBe` "\9889\65039"
getEmoji "13" `shouldBe` "\10052\65039"
getEmoji "50" `shouldBe` "\9810\65038"
describe "urlBuilder" $ do
it "return an URL" $ do
urlBuilder "london" `shouldBe` "http://api.openweathermap.org/data/2.5/weather?q=london&units=metric"
| julienXX/weather | test/WeatherSpec.hs | mit | 1,031 | 0 | 14 | 293 | 232 | 114 | 118 | 24 | 1 |
module Oden.Assertions where
import Text.PrettyPrint.Leijen hiding ((<$>))
import Test.Hspec
isLeft :: Either a b -> Bool
isLeft (Left _) = True
isLeft _ = False
isRight :: Either a b -> Bool
isRight (Right _) = True
isRight _ = False
shouldSucceed :: (Eq a, Show a, Show e) => Either e a -> Expectation
shouldSucceed res = res `shouldSatisfy` isRight
shouldSucceedWith :: (Eq v, Show v, Show e) => Either e v -> v -> Expectation
(Left err) `shouldSucceedWith` _ = expectationFailure . show $ err
(Right value) `shouldSucceedWith` expected = value `shouldBe` expected
shouldFail :: (Eq a, Show a, Show e) => Either e a -> Expectation
shouldFail res = res `shouldSatisfy` isLeft
shouldFailWith :: (Eq a, Show a, Eq e, Show e) => Either e a -> e -> Expectation
res `shouldFailWith` err = res `shouldSatisfy` (== Left err)
-- PRETTY PRINTING RESULTS
newtype PrettyWrapper a = PrettyWrapper a deriving (Eq)
instance Pretty a => Show (PrettyWrapper a) where
show (PrettyWrapper x) = displayS (renderPretty 0.4 100 (pretty x)) ""
shouldSucceed' :: (Eq a, Pretty a, Show e) => Either e a -> Expectation
shouldSucceed' = shouldSucceed . (PrettyWrapper <$>)
shouldSucceedWith' :: (Eq v, Pretty v, Show e) => Either e v -> v -> Expectation
shouldSucceedWith' res expected = (PrettyWrapper <$> res) `shouldSucceedWith` PrettyWrapper expected
shouldFail' :: (Eq a, Pretty a, Show e) => Either e a -> Expectation
shouldFail' = shouldFail . (PrettyWrapper <$>)
shouldFailWith' :: (Eq a, Pretty a, Eq e, Show e) => Either e a -> e -> Expectation
shouldFailWith' res expected = (PrettyWrapper <$> res) `shouldFailWith` expected
| AlbinTheander/oden | test/Oden/Assertions.hs | mit | 1,677 | 0 | 10 | 331 | 659 | 351 | 308 | 29 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, OverloadedStrings #-}
module GitHub.Types where
import Control.Applicative
import Control.Monad.RWS.Strict
import Data.ByteString.Char8 (ByteString, append, pack)
import Data.Text (Text)
import Network.HTTP.Conduit
data APILocation = GitHub | Enterprise String
githubConfig :: APILocation -> Maybe String -> Maybe (Request m)
githubConfig loc mk = do
req <- parseUrl endpoint
return $ addTokenHeader req
where
endpoint = case loc of
GitHub -> "https://api.github.com/"
Enterprise url -> url
addTokenHeader = case mk of
Nothing -> id
Just k -> \req -> req
{ requestHeaders = ("Authorization", append "token " (pack k)) : requestHeaders req
}
data RateLimit = RateLimit
{ rateLimitLimit :: !Int
, rateLimitRemaining :: !Int
}
data GitHubConfig m = GitHubConfig
{ manager :: Manager
, baseRequest :: Request m
}
newtype GitHub a = GitHubM { runGitHub :: RWST (GitHubConfig IO) () RateLimit IO a }
deriving (Functor, Applicative, Monad, MonadIO)
data PublicKey = PublicKey
{ publicKeyId :: Int
, publicKeyKey :: Text
}
data CurrentUserKey = CurrentUserKey
{ currentUserKeyId :: Int
, currentUserKeyKey :: Text
, currentUserKeyUrl :: Text
, currentUserKeyTitle :: Text
}
data NewUserKey = NewUserKey
{ newUserKeyTitle :: Text
, newUserKeyKey :: Text
}
data UserKeyPatch = UserKeyPatch
{ keyPatchTitle :: Text
, keyPatchKey :: Text
}
data EventsData
data OwnerName
data RepoName
data OrgName
data UserName
| SaneApp/github-api | src/GitHub/Types.hs | mit | 1,548 | 0 | 17 | 318 | 417 | 238 | 179 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- Task ventilator
-- Binds PUSH socket to tcp://localhost:5557
-- Sends batch of tasks to workers via that socket
module Main where
import Control.Monad
import qualified Data.ByteString.Char8 as BS
import System.ZMQ4.Monadic
import System.Random
main :: IO ()
main = runZMQ $ do
-- Socket to send messages on
sender <- socket Push
bind sender "tcp://*:5557"
-- Socket to send start of batch message on
sink <- socket Push
connect sink "tcp://localhost:5558"
liftIO $ do
putStrLn "Press Enter when the workers are ready: "
_ <- getLine
putStrLn "Sending tasks to workers..."
-- The first message is "0" and signals start of batch
send sink [] "0"
-- Send 100 tasks
total_msec <- fmap sum $
replicateM 100 $ do
-- Random workload from 1 to 100msecs
workload :: Int <- liftIO $ randomRIO (1, 100)
send sender [] $ BS.pack (show workload)
return workload
liftIO . putStrLn $ "Total expected cost: " ++ show total_msec ++ " msec"
| soscpd/bee | root/tests/zguide/examples/Haskell/taskvent.hs | mit | 1,170 | 0 | 15 | 338 | 230 | 113 | 117 | 24 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Tests.Text.XML.Expat.Mapping (
main, tests
) where
import Control.Error
import Data.ByteString (ByteString)
import Data.Maybe
import Test.Tasty
import Test.Tasty.HUnit
import Text.XML.Expat.Tree
main :: IO ()
main = defaultMain tests
treeOf :: ByteString -> NNode ByteString
treeOf = toNamespaced . toQualified . fromJust . hush . parse' defaultParseOptions
tests :: TestTree
tests =
testGroup "Text.XML.Expat.Mapping"
[ testCase "Basic test 1" (eChildren (treeOf "<foo />") @?= [])
, testCase "Basic test 2" (eChildren (treeOf "<foo>Hi</foo>") @?= [Text "hi"])
]
| tel/xml-mapping | tests/Tests/Text/XML/Expat/Mapping.hs | mit | 690 | 0 | 12 | 164 | 181 | 100 | 81 | 18 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell, CPP #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE PatternGuards #-}
-- | Static file serving for WAI.
module Network.Wai.Application.Static
( -- * WAI application
staticApp
-- ** Default Settings
, defaultWebAppSettings
, webAppSettingsWithLookup
, defaultFileServerSettings
, embeddedSettings
-- ** Settings
, StaticSettings
, ssLookupFile
, ssMkRedirect
, ssGetMimeType
, ssListing
, ssIndices
, ssMaxAge
, ssRedirectToIndex
, ssAddTrailingSlash
, ss404Handler
) where
import Prelude hiding (FilePath)
import qualified Network.Wai as W
import qualified Network.HTTP.Types as H
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as S8
import qualified Data.ByteString.Lazy as L
import Data.ByteString.Lazy.Char8 ()
import Control.Monad.IO.Class (liftIO)
import Blaze.ByteString.Builder (toByteString)
import Data.FileEmbed (embedFile)
import Data.Text (Text)
import qualified Data.Text as T
import Network.HTTP.Date (parseHTTPDate, epochTimeToHTTPDate, formatHTTPDate)
import WaiAppStatic.Types
import Util
import WaiAppStatic.Storage.Filesystem
import WaiAppStatic.Storage.Embedded
import Network.Mime (MimeType)
data StaticResponse =
-- | Just the etag hash or Nothing for no etag hash
Redirect Pieces (Maybe ByteString)
| RawRedirect ByteString
| NotFound
| FileResponse File H.ResponseHeaders
| NotModified
-- TODO: add file size
| SendContent MimeType L.ByteString
| WaiResponse W.Response
safeInit :: [a] -> [a]
safeInit [] = []
safeInit xs = init xs
filterButLast :: (a -> Bool) -> [a] -> [a]
filterButLast _ [] = []
filterButLast _ [x] = [x]
filterButLast f (x:xs)
| f x = x : filterButLast f xs
| otherwise = filterButLast f xs
-- | Serve an appropriate response for a folder request.
serveFolder :: StaticSettings -> Pieces -> W.Request -> Folder -> IO StaticResponse
serveFolder StaticSettings {..} pieces req folder@Folder {..} =
case ssListing of
Just _ | Just path <- addTrailingSlash req, ssAddTrailingSlash ->
return $ RawRedirect path
Just listing -> do
-- directory listings turned on, display it
builder <- listing pieces folder
return $ WaiResponse $ W.responseBuilder H.status200
[ ("Content-Type", "text/html; charset=utf-8")
] builder
Nothing -> return $ WaiResponse $ W.responseLBS H.status403
[ ("Content-Type", "text/plain")
] "Directory listings disabled"
where
addTrailingSlash :: W.Request -> Maybe ByteString
addTrailingSlash req
| S8.null rp = Just "/"
| S8.last rp == '/' = Nothing
| otherwise = Just $ S8.snoc rp '/'
where
rp = W.rawPathInfo req
checkPieces :: StaticSettings
-> Pieces -- ^ parsed request
-> W.Request
-> IO StaticResponse
-- If we have any empty pieces in the middle of the requested path, generate a
-- redirect to get rid of them.
checkPieces _ pieces _ | any (T.null . fromPiece) $ safeInit pieces =
return $ Redirect (filterButLast (not . T.null . fromPiece) pieces) Nothing
checkPieces ss@StaticSettings {..} pieces req = do
res <- lookupResult
case res of
LRNotFound -> return NotFound
LRFile file -> serveFile ss req file
LRFolder folder -> serveFolder ss pieces req folder
where
lookupResult :: IO LookupResult
lookupResult = do
nonIndexResult <- ssLookupFile pieces
case nonIndexResult of
LRFile{} -> return nonIndexResult
_ -> do
indexResult <- lookupIndices (map (\ index -> pieces ++ [index]) ssIndices)
return $ case indexResult of
LRNotFound -> nonIndexResult
_ -> indexResult
lookupIndices :: [Pieces] -> IO LookupResult
lookupIndices (x : xs) = do
res <- ssLookupFile x
case res of
LRNotFound -> lookupIndices xs
_ -> return res
lookupIndices [] = return LRNotFound
serveFile :: StaticSettings -> W.Request -> File -> IO StaticResponse
serveFile StaticSettings {..} req file
-- First check etag values, if turned on
| ssUseHash = do
mHash <- fileGetHash file
case (mHash, lookup "if-none-match" $ W.requestHeaders req) of
-- if-none-match matches the actual hash, return a 304
(Just hash, Just lastHash) | hash == lastHash -> return NotModified
-- Didn't match, but we have a hash value. Send the file contents
-- with an ETag header.
--
-- Note: It would be arguably better to next check
-- if-modified-since and return a 304 if that indicates a match as
-- well. However, the circumstances under which such a situation
-- could arise would be very anomolous, and should likely warrant a
-- new file being sent anyway.
(Just hash, _) -> respond [("ETag", hash)]
-- No hash value available, fall back to last modified support.
(Nothing, _) -> lastMod
-- etag turned off, so jump straight to last modified
| otherwise = lastMod
where
mLastSent = lookup "if-modified-since" (W.requestHeaders req) >>= parseHTTPDate
lastMod =
case (fmap epochTimeToHTTPDate $ fileGetModified file, mLastSent) of
-- File modified time is equal to the if-modified-since header,
-- return a 304.
--
-- Question: should the comparison be, date <= lastSent?
(Just mdate, Just lastSent)
| mdate == lastSent -> return NotModified
-- Did not match, but we have a new last-modified header
(Just mdate, _) -> respond [("last-modified", formatHTTPDate mdate)]
-- No modification time available
(Nothing, _) -> respond []
-- Send a file response with the additional weak headers provided.
respond headers = return $ FileResponse file $ cacheControl ssMaxAge headers
-- | Return a difference list of headers based on the specified MaxAge.
--
-- This function will return both Cache-Control and Expires headers, as
-- relevant.
cacheControl :: MaxAge -> (H.ResponseHeaders -> H.ResponseHeaders)
cacheControl maxage =
headerCacheControl . headerExpires
where
ccInt =
case maxage of
NoMaxAge -> Nothing
MaxAgeSeconds i -> Just i
MaxAgeForever -> Just oneYear
oneYear :: Int
oneYear = 60 * 60 * 24 * 365
headerCacheControl =
case ccInt of
Nothing -> id
Just i -> (:) ("Cache-Control", S8.append "public, max-age=" $ S8.pack $ show i)
headerExpires =
case maxage of
NoMaxAge -> id
MaxAgeSeconds _ -> id -- FIXME
MaxAgeForever -> (:) ("Expires", "Thu, 31 Dec 2037 23:55:55 GMT")
-- | Turn a @StaticSettings@ into a WAI application.
staticApp :: StaticSettings -> W.Application
staticApp set req = staticAppPieces set (W.pathInfo req) req
staticAppPieces :: StaticSettings -> [Text] -> W.Application
staticAppPieces _ _ req sendResponse
| notElem (W.requestMethod req) ["GET", "HEAD"] = sendResponse $ W.responseLBS
H.status405
[("Content-Type", "text/plain")]
"Only GET or HEAD is supported"
staticAppPieces _ [".hidden", "folder.png"] _ sendResponse = sendResponse $ W.responseLBS H.status200 [("Content-Type", "image/png")] $ L.fromChunks [$(embedFile "images/folder.png")]
staticAppPieces _ [".hidden", "haskell.png"] _ sendResponse = sendResponse $ W.responseLBS H.status200 [("Content-Type", "image/png")] $ L.fromChunks [$(embedFile "images/haskell.png")]
staticAppPieces ss rawPieces req sendResponse = liftIO $ do
case toPieces rawPieces of
Just pieces -> checkPieces ss pieces req >>= response
Nothing -> sendResponse $ W.responseLBS H.status403
[ ("Content-Type", "text/plain")
] "Forbidden"
where
response :: StaticResponse -> IO W.ResponseReceived
response (FileResponse file ch) = do
mimetype <- ssGetMimeType ss file
let filesize = fileGetSize file
let headers = ("Content-Type", mimetype)
-- Let Warp provide the content-length, since it takes
-- range requests into account
-- : ("Content-Length", S8.pack $ show filesize)
: ch
sendResponse $ fileToResponse file H.status200 headers
response NotModified =
sendResponse $ W.responseLBS H.status304 [] ""
response (SendContent mt lbs) = do
-- TODO: set caching headers
sendResponse $ W.responseLBS H.status200
[ ("Content-Type", mt)
-- TODO: set Content-Length
] lbs
response (Redirect pieces' mHash) = do
let loc = (ssMkRedirect ss) pieces' $ toByteString (H.encodePathSegments $ map fromPiece pieces')
let qString = case mHash of
Just hash -> replace "etag" (Just hash) (W.queryString req)
Nothing -> remove "etag" (W.queryString req)
sendResponse $ W.responseLBS H.status301
[ ("Content-Type", "text/plain")
, ("Location", S8.append loc $ H.renderQuery True qString)
] "Redirect"
response (RawRedirect path) =
sendResponse $ W.responseLBS H.status301
[ ("Content-Type", "text/plain")
, ("Location", path)
] "Redirect"
response NotFound = case (ss404Handler ss) of
Just app -> app req sendResponse
Nothing -> sendResponse $ W.responseLBS H.status404
[ ("Content-Type", "text/plain")
] "File not found"
response (WaiResponse r) = sendResponse r
| rgrinberg/wai | wai-app-static/Network/Wai/Application/Static.hs | mit | 10,080 | 0 | 21 | 2,834 | 2,348 | 1,226 | 1,122 | 190 | 10 |
{-# LANGUAGE OverloadedStrings, DeriveDataTypeable #-}
module Rievaulx.Web
( runServer
) where
import qualified Data.Aeson as Aeson
import qualified Data.Text as T
import Network.HTTP.Types (status200, status404)
import Network.Wai
import Network.Wai.Handler.Warp (run)
import Network.Wai.Middleware.Static (staticPolicy, addBase)
import Rievaulx (TernaryTree, insertMany, prefixTerms)
import Rievaulx.Sources (getRandomizedWords)
import System.Console.CmdArgs (Data, Typeable, (&=), typ, help, program, cmdArgs)
notFound :: Response
notFound = responseLBS status404 [("Content-Type", "text/html")]
"<h1>404 Not Found</h1>"
createApp :: TernaryTree Char -> Application
createApp tree = application
where application req = return $ case pathInfo req of
(_:word:[]) -> responseLBS status200 [("Content-Type", "application/json")]
(Aeson.encode . prefixTerms tree $ T.unpack word)
_ -> notFound
staticMiddleware :: Middleware
staticMiddleware = staticPolicy $ addBase "./public"
port :: Int
port = 3000
runServer :: IO ()
runServer = do
opts <- cmdArgs options
let filename = wordsFile opts
words <- getRandomizedWords filename
let completionCandidates = insertMany words
putStrLn $ "Rievaulx is serving \"" ++ filename ++ "\" on http://localhost:" ++ show port ++ "/ ..."
run port $ staticMiddleware $ createApp completionCandidates
data Options = Options
{ wordsFile :: String
} deriving (Show, Data, Typeable)
options :: Options
options = Options { wordsFile = "/usr/share/dict/words"
&= typ "FILE"
&= help "A file containing a list of words to use"
}
&= program "rievaulx"
| ryankask/rievaulx | src/Rievaulx/Web.hs | mit | 1,853 | 0 | 15 | 484 | 456 | 250 | 206 | 41 | 2 |
import Data.List
data Edge = Edge {label :: Char, from :: Int, to :: Int}
instance Show Edge where
show e = show (label e, from e, to e)
data RE = Plus RE RE
| Concat RE RE
| Closure RE
| Single Char
| Epsilon
| Empty deriving Eq
isPlus :: RE -> Bool
isPlus (Plus _ _) = True
isPlus _ = False
isEpsilon :: RE -> Bool
isEpsilon Epsilon = True
isEpsilon _ = False
isConcat :: RE -> Bool
isConcat (Concat _ _) = True
isConcat _ = False
isClosure :: RE -> Bool
isClosure (Closure _) = True
isClosure _ = False
isSingle :: RE -> Bool
isSingle (Single _) = True
isSingle _ = False
left :: RE -> RE
left (Plus a b) = a
left (Concat a b) = a
left k = k
right :: RE -> RE
right (Plus a b) = b
right (Concat a b) = b
right k = k
instance Read RE where
read [a] = Single a
read
instance Show RE where
show (Plus a b) = show a ++ " + " ++ show b
show (Concat a b) = sa ++ sb
where sa = if isPlus a then "(" ++ show a ++ ")" else show a
sb = if isPlus b then "(" ++ show b ++ ")" else show b
show (Closure k)
| isEpsilon k = show k ++ "*"
| isSingle k = show k ++ "*"
| otherwise = "(" ++ show k ++ ")*"
show (Single k) = [k]
show Epsilon = "%"
show Empty = ""
type Graph = [Edge]
makeGraph :: [(Char, Int, Int)] -> Graph
makeGraph edges = foldr (\(l,f,t) acc -> Edge {label = l, from = f, to = t}:acc) [] edges
reduce :: RE -> RE
reduce r = if go r == r then r else reduce (go r)
where go (Plus Empty Empty) = Empty
go (Plus a Empty) = go a
go (Plus Empty b) = go b
go (Plus a b) = Plus (go a) (go b)
go (Concat Empty _) = Empty
go (Concat _ Empty) = Empty
go (Concat Epsilon Epsilon) = Epsilon
go (Concat a Epsilon) = go a
go (Concat Epsilon b) = go b
go (Concat a b) = Concat (go a) (go b)
go (Closure Empty) = Empty
go (Closure Epsilon) = Epsilon
go (Closure k) = Closure (go k)
go (Single k) = Single k
go Epsilon = Epsilon
go Empty = Empty
removeEpsilon :: RE -> RE
removeEpsilon (Plus a b)
| a == Epsilon = b
| b == Epsilon = a
| a == Epsilon && b == Epsilon = Empty
| otherwise = (removeEpsilon a) `Plus` (removeEpsilon b)
removeEpsilon Epsilon = Empty
removeEpsilon k = k
-- a contain b
contain :: RE -> RE -> Bool
contain _ Empty = True
contain k@(Plus a b) r
| isPlus a = partial || ((left a) `Plus` ((right a) `Plus` b)) `contain` r
| otherwise = partial
where partial = a `contain` r || b `contain` r || k == r
contain k@(Concat a b) Epsilon = (a `contain` Epsilon && b `contain` Epsilon)
contain k@(Concat a b) r = k == r || (a `contain` Epsilon && b `contain` (removeEpsilon r)) || (a `contain` (removeEpsilon r) && b `contain` Epsilon)
contain a@(Closure k) r = a ==r || k `contain` (removeEpsilon r) && r `contain` Epsilon
contain (Single k) (Single l) = k == l
contain (Single k) _ = False
contain Epsilon Epsilon = True
contain Epsilon _ = False
contain Empty _ = False
moreReduce :: RE -> RE
moreReduce r = if go r' == r' then r' else moreReduce (go r')
where r' = reduce r
go r@(Plus a b)
| a `contain` b = a
| b `contain` a = b
| otherwise = (go a) `Plus` (go b)
go r@(Concat a b)
| a `contain` Epsilon && b `contain` a && isClosure b = b
| b `contain` Epsilon && a `contain` b && isClosure a = a
| otherwise = foldl (\x y -> (go x) `Concat` (go y)) (head rs) (tail rs)
where rs = concatListReduce (concatToList r)
go (Closure k)
| isPlus k && k `contain` Epsilon = Closure (removeEpsilon k)
| otherwise = Closure (go k)
go k = k
concatToList (Concat a b) = concatToList a ++ concatToList b
concatToList k = [k]
concatReduce r@(Concat a b)
| a `contain` Epsilon && b `contain` a && isClosure b = b
| b `contain` Epsilon && a `contain` b && isClosure a = a
| otherwise = r
concatListReduce [] = []
concatListReduce [a] = [a]
concatListReduce (a:b:l) = if rab == ab then a:concatListReduce (b:l) else rab:concatListReduce l
where ab = a `Concat` b
rab = concatReduce ab
toRe :: Graph -> Int -> Int -> RE
toRe g start end = go start end (length g)
where direct i j = filter (\x-> from x == i && to x == j) g
go i j 0 = if i == j then Plus Epsilon s else s
where s = foldl (\acc x -> Plus acc $ Single (label x)) Empty (direct i j)
go i j k = Plus (go i j (k-1)) (Concat (Concat (go i k (k-1)) (Closure (go k k (k-1)))) (go k j (k-1))) | jwvg0425/HaskellScratchPad | src/dfare.hs | mit | 4,794 | 0 | 17 | 1,561 | 2,355 | 1,213 | 1,142 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.Graph.Typed
-- Copyright : (c) Anton Lorenzen, Andrey Mokhov 2016-2022
-- License : MIT (see the file LICENSE)
-- Maintainer : [email protected], [email protected]
-- Stability : unstable
--
-- __Alga__ is a library for algebraic construction and manipulation of graphs
-- in Haskell. See <https://github.com/snowleopard/alga-paper this paper> for the
-- motivation behind the library, the underlying theory, and implementation details.
--
-- This module provides primitives for interoperability between this library and
-- the "Data.Graph" module of the containers library. It is for internal use only
-- and may be removed without notice at any point.
-----------------------------------------------------------------------------
module Data.Graph.Typed (
-- * Data type and construction
GraphKL(..), fromAdjacencyMap, fromAdjacencyIntMap,
-- * Basic algorithms
dfsForest, dfsForestFrom, dfs, topSort, scc
) where
import Data.Tree
import Data.Maybe
import Data.Foldable
import qualified Data.Graph as KL
import qualified Algebra.Graph.AdjacencyMap as AM
import qualified Algebra.Graph.NonEmpty.AdjacencyMap as NonEmpty
import qualified Algebra.Graph.AdjacencyIntMap as AIM
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
-- | 'GraphKL' encapsulates King-Launchbury graphs, which are implemented in
-- the "Data.Graph" module of the @containers@ library.
data GraphKL a = GraphKL {
-- | Array-based graph representation (King and Launchbury, 1995).
toGraphKL :: KL.Graph,
-- | A mapping of "Data.Graph.Vertex" to vertices of type @a@.
-- This is partial and may fail if the vertex is out of bounds.
fromVertexKL :: KL.Vertex -> a,
-- | A mapping from vertices of type @a@ to "Data.Graph.Vertex".
-- Returns 'Nothing' if the argument is not in the graph.
toVertexKL :: a -> Maybe KL.Vertex }
-- | Build 'GraphKL' from an 'AM.AdjacencyMap'. If @fromAdjacencyMap g == h@
-- then the following holds:
--
-- @
-- map ('fromVertexKL' h) ('Data.Graph.vertices' $ 'toGraphKL' h) == 'AM.vertexList' g
-- map (\\(x, y) -> ('fromVertexKL' h x, 'fromVertexKL' h y)) ('Data.Graph.edges' $ 'toGraphKL' h) == 'AM.edgeList' g
-- 'toGraphKL' (fromAdjacencyMap (1 * 2 + 3 * 1)) == 'array' (0,2) [(0,[1]), (1,[]), (2,[0])]
-- 'toGraphKL' (fromAdjacencyMap (1 * 2 + 2 * 1)) == 'array' (0,1) [(0,[1]), (1,[0])]
-- @
fromAdjacencyMap :: Ord a => AM.AdjacencyMap a -> GraphKL a
fromAdjacencyMap am = GraphKL
{ toGraphKL = g
, fromVertexKL = \u -> case r u of (_, v, _) -> v
, toVertexKL = t }
where
(g, r, t) = KL.graphFromEdges [ ((), x, ys) | (x, ys) <- AM.adjacencyList am ]
-- | Build 'GraphKL' from an 'AIM.AdjacencyIntMap'. If
-- @fromAdjacencyIntMap g == h@ then the following holds:
--
-- @
-- map ('fromVertexKL' h) ('Data.Graph.vertices' $ 'toGraphKL' h) == 'Data.IntSet.toAscList' ('Algebra.Graph.AdjacencyIntMap.vertexIntSet' g)
-- map (\\(x, y) -> ('fromVertexKL' h x, 'fromVertexKL' h y)) ('Data.Graph.edges' $ 'toGraphKL' h) == 'Algebra.Graph.AdjacencyIntMap.edgeList' g
-- 'toGraphKL' (fromAdjacencyIntMap (1 * 2 + 3 * 1)) == 'array' (0,2) [(0,[1]), (1,[]), (2,[0])]
-- 'toGraphKL' (fromAdjacencyIntMap (1 * 2 + 2 * 1)) == 'array' (0,1) [(0,[1]), (1,[0])]
-- @
fromAdjacencyIntMap :: AIM.AdjacencyIntMap -> GraphKL Int
fromAdjacencyIntMap aim = GraphKL
{ toGraphKL = g
, fromVertexKL = \x -> case r x of (_, v, _) -> v
, toVertexKL = t }
where
(g, r, t) = KL.graphFromEdges [ ((), x, ys) | (x, ys) <- AIM.adjacencyList aim ]
-- | Compute the /depth-first search/ forest of a graph.
--
-- In the following examples we will use the helper function:
--
-- @
-- (%) :: (GraphKL Int -> a) -> 'AM.AdjacencyMap' Int -> a
-- a % g = a $ 'fromAdjacencyMap' g
-- @
--
-- for greater clarity.
--
-- @
-- 'AM.forest' (dfsForest % 'AM.edge' 1 1) == 'AM.vertex' 1
-- 'AM.forest' (dfsForest % 'AM.edge' 1 2) == 'AM.edge' 1 2
-- 'AM.forest' (dfsForest % 'AM.edge' 2 1) == 'AM.vertices' [1, 2]
-- 'AM.isSubgraphOf' ('AM.forest' $ dfsForest % x) x == True
-- dfsForest % 'AM.forest' (dfsForest % x) == dfsForest % x
-- dfsForest % 'AM.vertices' vs == 'map' (\\v -> Node v []) ('Data.List.nub' $ 'Data.List.sort' vs)
-- 'AM.dfsForestFrom' ('AM.vertexList' x) % x == dfsForest % x
-- dfsForest % (3 * (1 + 4) * (1 + 5)) == [ Node { rootLabel = 1
-- , subForest = [ Node { rootLabel = 5
-- , subForest = [] }]}
-- , Node { rootLabel = 3
-- , subForest = [ Node { rootLabel = 4
-- , subForest = [] }]}]
-- @
dfsForest :: GraphKL a -> Forest a
dfsForest (GraphKL g r _) = fmap (fmap r) (KL.dff g)
-- | Compute the /depth-first search/ forest of a graph, searching from each of
-- the given vertices in order. Note that the resulting forest does not
-- necessarily span the whole graph, as some vertices may be unreachable.
--
-- In the following examples we will use the helper function:
--
-- @
-- (%) :: (GraphKL Int -> a) -> 'AM.AdjacencyMap' Int -> a
-- a % g = a $ 'fromAdjacencyMap' g
-- @
--
-- for greater clarity.
--
-- @
-- 'AM.forest' (dfsForestFrom [1] % 'AM.edge' 1 1) == 'AM.vertex' 1
-- 'AM.forest' (dfsForestFrom [1] % 'AM.edge' 1 2) == 'AM.edge' 1 2
-- 'AM.forest' (dfsForestFrom [2] % 'AM.edge' 1 2) == 'AM.vertex' 2
-- 'AM.forest' (dfsForestFrom [3] % 'AM.edge' 1 2) == 'AM.empty'
-- 'AM.forest' (dfsForestFrom [2, 1] % 'AM.edge' 1 2) == 'AM.vertices' [1, 2]
-- 'AM.isSubgraphOf' ('AM.forest' $ dfsForestFrom vs % x) x == True
-- dfsForestFrom ('AM.vertexList' x) % x == 'dfsForest' % x
-- dfsForestFrom vs % 'AM.vertices' vs == 'map' (\\v -> Node v []) ('Data.List.nub' vs)
-- dfsForestFrom [] % x == []
-- dfsForestFrom [1, 4] % (3 * (1 + 4) * (1 + 5)) == [ Node { rootLabel = 1
-- , subForest = [ Node { rootLabel = 5
-- , subForest = [] }
-- , Node { rootLabel = 4
-- , subForest = [] }]
-- @
dfsForestFrom :: [a] -> GraphKL a -> Forest a
dfsForestFrom vs (GraphKL g r t) = fmap (fmap r) (KL.dfs g (mapMaybe t vs))
-- | Compute the list of vertices visited by the /depth-first search/ in a
-- graph, when searching from each of the given vertices in order.
--
-- In the following examples we will use the helper function:
--
-- @
-- (%) :: (GraphKL Int -> a) -> 'AM.AdjacencyMap' Int -> a
-- a % g = a $ 'fromAdjacencyMap' g
-- @
--
-- for greater clarity.
--
-- @
-- dfs [1] % 'AM.edge' 1 1 == [1]
-- dfs [1] % 'AM.edge' 1 2 == [1,2]
-- dfs [2] % 'AM.edge' 1 2 == [2]
-- dfs [3] % 'AM.edge' 1 2 == []
-- dfs [1,2] % 'AM.edge' 1 2 == [1,2]
-- dfs [2,1] % 'AM.edge' 1 2 == [2,1]
-- dfs [] % x == []
-- dfs [1,4] % (3 * (1 + 4) * (1 + 5)) == [1,5,4]
-- 'AM.isSubgraphOf' ('AM.vertices' $ dfs vs x) x == True
-- @
dfs :: [a] -> GraphKL a -> [a]
dfs vs = concatMap flatten . dfsForestFrom vs
-- | Compute the /topological sort/ of a graph. Note that this function returns
-- a result even if the graph is cyclic.
--
-- In the following examples we will use the helper function:
--
-- @
-- (%) :: (GraphKL Int -> a) -> 'AM.AdjacencyMap' Int -> a
-- a % g = a $ 'fromAdjacencyMap' g
-- @
--
-- for greater clarity.
--
-- @
-- topSort % (1 * 2 + 3 * 1) == [3,1,2]
-- topSort % (1 * 2 + 2 * 1) == [1,2]
-- @
topSort :: GraphKL a -> [a]
topSort (GraphKL g r _) = map r (KL.topSort g)
scc :: Ord a => AM.AdjacencyMap a -> AM.AdjacencyMap (NonEmpty.AdjacencyMap a)
scc m = AM.gmap (component Map.!) $ removeSelfLoops $ AM.gmap (leader Map.!) m
where
GraphKL g decode _ = fromAdjacencyMap m
sccs = map toList (KL.scc g)
leader = Map.fromList [ (decode y, x) | x:xs <- sccs, y <- x:xs ]
component = Map.fromList [ (x, expand (x:xs)) | x:xs <- sccs ]
expand xs = fromJust $ NonEmpty.toNonEmpty $ AM.induce (`Set.member` s) m
where
s = Set.fromList (map decode xs)
removeSelfLoops :: Ord a => AM.AdjacencyMap a -> AM.AdjacencyMap a
removeSelfLoops m = foldr (\x -> AM.removeEdge x x) m (AM.vertexList m)
| snowleopard/alga | src/Data/Graph/Typed.hs | mit | 9,042 | 0 | 12 | 2,620 | 1,068 | 644 | 424 | 46 | 1 |
module Parser where
type Parser a = String -> Maybe (a, String)
char :: Parser Char
char [] = Nothing
char (x:xs) = Just (x, xs)
alt :: Parser a -> Parser a -> Parser a
(p `alt` q) xs = case (p xs) of
Nothing -> q xs
x -> x
(#) :: Parser a -> Parser b -> Parser (a, b)
(p # q) xs = case (p xs) of
Nothing -> Nothing
Just (x, ys) -> (case (q ys) of
Nothing -> Nothing
Just (y, zs) -> Just ((x, y), zs))
-- A parser that always succeeds with the given value as the result.
succeed :: a -> Parser a
succeed c xs = Just (c, xs)
-- Zero or more applications of p
many :: Parser a -> Parser [a]
many p = many1 p `alt` succeed []
-- One or more applications of p
many1 :: Parser a -> Parser [a]
many1 p = p # many p `build` (uncurry (:))
-- Apply a function to the result of applying the given parser.
build :: Parser a -> (a -> b) -> Parser b
build p f xs =
case (p xs) of
Just (x, ys) -> Just (f x, ys)
otherwise -> Nothing
-- Parse using p and keep the result if it satisfies the predicate f.
sat :: Parser a -> (a -> Bool) -> Parser a
(p `sat` f) xs = case (p xs) of
Nothing -> Nothing
Just (x, ys) -> case (f x) of
True -> Just (x, ys)
False -> Nothing
| iain-logan/MU-Puzzle | Parser.hs | mit | 1,392 | 5 | 14 | 503 | 570 | 299 | 271 | 32 | 3 |
module ACME.Yes.PreCure5.Profiles
( PreCure5(..)
, allPrecures
, introducesHerselfAs
, transformationPhraseOf
, metamorphoseOf
) where
import ACME.Yes.PreCure5.Class
import qualified Data.Set as S
data PreCure5 =
CureDream | CureRouge | CureLemonade | CureMint | CureAqua
deriving (Show, Bounded, Enum, Eq, Ord)
instance PreCure PreCure5 where
allPrecures = S.fromAscList [minBound..maxBound]
introducesHerselfAs CureDream = "大いなる希望の力、キュアドリーム!"
introducesHerselfAs CureRouge = "情熱の赤い炎、キュアルージュ!"
introducesHerselfAs CureLemonade = "はじけるレモンの香り、キュアレモネード!"
introducesHerselfAs CureMint = "安らぎの緑の大地、キュアミント!"
introducesHerselfAs CureAqua = "知性の青き泉、キュアアクア!"
transformationPhraseOf ps =
"プリキュア!メタモルフォーゼ!\n"
++ (unlines $ map introducesHerselfAs $ S.toAscList ps)
++ "希望の力と未来の光!\n"
++ "華麗に羽ばたく5つの心!\n"
++ "Yes!プリキュア5!\n"
metamorphoseOf :: S.Set PreCure5 -> String
metamorphoseOf = transformationPhraseOf
| igrep/yes-precure5-command | ACME/Yes/PreCure5/Profiles.hs | mit | 1,219 | 0 | 13 | 168 | 211 | 118 | 93 | 26 | 1 |
-- file: ch09/RecursiveContents.hs
module RecursiveContents (getRecursiveContents) where
import Control.Monad (forM)
import System.Directory (doesDirectoryExist, getDirectoryContents)
import System.FilePath ((</>))
getRecursiveContents :: FilePath -> IO [FilePath]
getRecursiveContents topdir = do
names <- getDirectoryContents topdir
let properNames = filter (`notElem` [".", ".."]) names
paths <- forM properNames $ \name -> do
let path = topdir </> name
isDirectory <- doesDirectoryExist path
if isDirectory
then getRecursiveContents path
else return [path]
return (concat paths)
simpleFind :: (FilePath -> Bool) -> FilePath -> IO [FilePath]
simpleFind p path = do
names <- getRecursiveContents path
return $ filter p names
| Numberartificial/workflow | haskell-first-principles/src/RW/CH9/RecursiveContents.hs | mit | 781 | 0 | 15 | 146 | 236 | 122 | 114 | 19 | 2 |
module Arbre.Mutation
(
applyMutation
)
where
import Arbre.Context
import Arbre.Expressions
applyMutation :: Expression -> Context -> Context
applyMutation (Mutation Define (Symdef sym) value) context =
bindPair Dyn context (sym, value)
applyMutation (Mutation Set (Symdef sym) value) context =
bindPair Dyn context (sym, value)
| blanu/arbre | Arbre/Mutation.hs | gpl-2.0 | 337 | 0 | 9 | 51 | 112 | 60 | 52 | 10 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Types
import Mario
import NeuralNetwork hiding (run)
import Emulator (saveAsFM2)
import Control.Concurrent
import Control.Exception
import Control.Monad
import Data.Binary
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString as B
import Data.ByteString.Char8 (hPutStrLn)
import Network.Socket hiding (send, sendTo, recv, recvFrom)
import System.IO hiding (hPutStrLn)
import System.Directory
import System.Log.Logger
import System.Log.Formatter
import System.Log.Handler (setFormatter)
import System.Log.Handler.Simple
import System.Random
--to test this load into cabal repl and run "testMain" and start
--the worker in another terminal cabal repl with "main"
replaceGenomes :: Population -> [Genome] -> Population
replaceGenomes pop gns = map (\(a,b,c,d,gs) -> (a,b,c,d,replace gs gns)) pop
where
replace :: [Genome] -> [Genome] -> [Genome]
replace [] _ = []
replace g [] = g
replace (x:xs) gs = fndMatch x gs : replace xs gs
fndMatch :: Genome -> [Genome] -> Genome
fndMatch x [] = x
fndMatch x (g:gs)
| x `eq` g = g
| otherwise = fndMatch x gs
eq (Genome _ x y) (Genome _ x1 y1) = x == x1 && y == y1
runPopulation :: (Int, Population, [Float]) -> Int -> Socket -> IO b
runPopulation (gInnov,p0,gen) n sock = do
let genomes = concatMap (\(_,_,_,_,gs) -> gs) p0
genomes' <- run genomes sock
let p1 = replaceGenomes p0 genomes'
savePopulation ("./data/" ++ show n ++ ".bin") p1
let (gInnov',p2,gen') = stepNetwork p0 (gInnov,p1,gen) marioConfig
savePopulation ("./data/" ++ show (n+1) ++ ".bin") p2
joydata <- recordMario (fittestGenome p1)
saveAsFM2 ("./data/" ++ show n ++ ".fm2") joydata
runPopulation (gInnov',p2,gen') (n+1) sock
loadMaxPopulation :: IO (Int, Population)
loadMaxPopulation = loadMaxPop 1
where
loadMaxPop n = do
exists <- doesFileExist $ "./data/" ++ show n ++ ".bin"
if exists
then loadMaxPop (n+1)
else do
p0 <- loadPopulation $ "./data/" ++ show (n-1) ++ ".bin"
return (n-1,p0)
main :: IO ()
main = withSocketsDo $ do
logH <- fileHandler "Master.log" INFO >>= \lh -> return $
setFormatter lh (simpleLogFormatter "[$time : $prio] $msg")
updateGlobalLogger rootLoggerName (addHandler logH)
(num,p0) <- loadMaxPopulation
let genomes = concatMap (\(_,_,_,_,gs) -> gs) p0
let gInnov = maximum . map _innovation . concatMap _genes $ genomes
let gen = randomRs (0.0,1.0) $ mkStdGen 23
sock <- socket AF_INET Stream 0
setSocketOption sock ReuseAddr 1
bindSocket sock (SockAddrInet 3000 iNADDR_ANY)
listen sock 5
infoM rootLoggerName "Listening on port 3000"
runPopulation (gInnov,p0,gen) num sock
run :: [Genome] -> Socket -> IO [Genome]
run genomes sock = do
let ntodo = length genomes
ndone <- newMVar 0
todo <- newChan :: IO (Chan Genome)
done <- newChan :: IO (Chan Genome)
writeList2Chan todo genomes
sender ntodo ndone todo done sock
readNextN done ntodo
{- ntodo is the number of genomes to be evaluated and should not change
- ndone is the number of genomes completed can goes from 0 to ntodo
- todo is a channel containing all the genomes yet to be done
- done is a channel containing all the genomes completed
-}
sender :: Int -> MVar Int -> Chan Genome -> Chan Genome -> Socket -> IO ()
sender ntodo ndone todo done sock0 = withSocketsDo $ loop sock0
where
loop :: Socket -> IO ()
loop sock = do
(sck,_) <- accept sock
infoM rootLoggerName "Connection accepted!"
sHandle <- socketToHandle sck ReadWriteMode
_ <- forkIO $ runLoop sHandle
isDone <- (== ntodo) <$> readMVar ndone
unless isDone (loop sock)
runLoop :: Handle -> IO ()
runLoop sHandle = do
isDone <- (== ntodo) <$> readMVar ndone
unless isDone $ do
g <- readChan todo
result <- try $ sendGenome sHandle g
case result :: Either SomeException Genome of
Right g' -> writeChan done g' >>
incMVar ndone >>
runLoop sHandle
Left _ -> writeChan todo g >>
runLoop sHandle
incMVar :: MVar Int -> IO ()
incMVar mv = do
i <- takeMVar mv
putMVar mv (i + 1)
sendGenome :: Handle -> Genome -> IO Genome
sendGenome sHandle g = do
let str = strictEncode g
gbytes = strictEncode $ B.length str
hPutStrLn sHandle gbytes
B.hPut sHandle str
putStrLn $ "sent " ++ show (B.length str)
rb <- B.hGetLine sHandle
let rbytes = strictDecode rb
resp <- B.hGet sHandle rbytes
putStrLn $ "received " ++ show rbytes
return $ strictDecode resp
strictEncode :: Binary a => a -> B.ByteString
strictEncode = BL.toStrict . encode
strictDecode :: Binary a => B.ByteString -> a
strictDecode = decode . BL.fromStrict
readNextN :: Chan a -> Int -> IO [a]
readNextN _ 0 = return []
readNextN ch n = readChan ch >>= \x -> (x:) <$> readNextN ch (n - 1)
| mdietz94/MAAX | app/src/Master.hs | gpl-2.0 | 4,952 | 0 | 17 | 1,139 | 1,818 | 911 | 907 | 121 | 4 |
{-# LANGUAGE QuasiQuotes, TypeFamilies, GeneralizedNewtypeDeriving,
TemplateHaskell, OverloadedStrings, GADTs, FlexibleContexts #-}
module Model.Fields where
import Control.Applicative (pure)
import Data.Aeson
import Data.Maybe (fromJust)
import Data.Text (pack, unpack)
import Network.URI (URI, uriToString, parseURI)
import Database.Persist
import Database.Persist.Sql (PersistFieldSql, sqlType)
uriToText uri = pack $ uriToString id uri ""
instance PersistField URI where
toPersistValue = PersistText . uriToText
fromPersistValue (PersistText v) = case (parseURI $ unpack v) of
Just uri -> Right uri
Nothing -> Left "Not a valid URI"
instance PersistFieldSql URI where
sqlType _ = SqlString
instance ToJSON URI where
toJSON = String . uriToText
instance FromJSON URI where
parseJSON (String u) = pure $ fromJust $ parseURI $ unpack u
parseJSON _ = fail "could not parse"
| bitraten/hands | src/Model/Fields.hs | gpl-3.0 | 963 | 0 | 9 | 205 | 246 | 131 | 115 | -1 | -1 |
module Hardware where
import HardwareTypes
import Data.IORef
initBoosters = do
controllerImpl <- newIORef (ControllerImpl "" Online 0 (map fromIntegral [1..]))
return $ ControllerMock controllerImpl
reportValue v = print ("reported: " ++ show v)
storeValue v = print ("stored: " ++ show v)
readTemperature (ControllerMock controllerImpl) = do
ControllerImpl _ st n ts <- readIORef controllerImpl
return (head ts)
askStatus _ = return Online
heatUpBoosters (ControllerMock controller) _ _ = do
ControllerImpl _ st n ts <- readIORef controller
writeIORef controller $ ControllerImpl "" st (n + 1) (drop 1 ts)
| graninas/Haskell-Algorithms | Tests/ArrowsAndFrees/Hardware.hs | gpl-3.0 | 633 | 0 | 13 | 116 | 229 | 109 | 120 | 15 | 1 |
module Handler.Experiment where
import Import
import Data.Time
import Crypto.PasswordStore
import Data.Text.Encoding
getExperimentR :: Text -> Handler Value
getExperimentR subjNumber = do
mexp <- runDB $ do
subj <- getBy $ UniqueSubject subjNumber
case subj of
Nothing -> return Nothing
Just (Entity sid _) -> do
question <- getBy $ UniqueQuestions sid
rs <- selectList [ RatingSubject ==. sid ] [ ]
return $ Just (subj, question, rs)
case mexp of
Just (subj, question, rs) -> return $ object
[ "subject" .= subj
, "questions" .= question
, "ratings" .= rs
]
_ -> return $ object [ "msg" .= ("No subject" :: Text) ]
postSubmitExperimentR :: Handler Value
postSubmitExperimentR = do
addHeader "Access-Control-Allow-Origin" "*"
experiment <- requireJsonBody :: Handler Experiment
let authinput = authentication experiment
user = username authinput
pass = password authinput
mauth <- runDB $ getBy $ UniqueAuthentication user
case mauth of
Nothing -> return $ object [ "success" .= False ]
Just (Entity _ auth) -> do
let valid = verifyPassword (encodeUtf8 pass)
(authenticationPassword auth)
case valid of
True -> do
time <- liftIO $ getZonedTime >>= \time -> return $ zonedTimeToUTC time
_ <- runDB $ do
subjId <- insert $ subject experiment
_ <- insert $ constructQuestion subjId time (questions experiment)
let rs = map (constructRating subjId) (ratings experiment)
mapM insert rs
return $ object [ "success" .= True ]
False -> return $ object [ "success" .= False ]
constructQuestion :: SubjectId -> UTCTime -> QuestionsInput -> Questions
constructQuestion sid time input = Questions
sid time
(age input)
(sex input)
(question1 input)
(question2 input)
(question3 input)
(question4 input)
(question5 input)
(question6 input)
(question7 input)
(question8 input)
(question9 input)
(question10 input)
(question11 input)
(question12 input)
(remark1 input)
(remark2 input)
(remark3 input)
(remark4 input)
(remark5 input)
(remark6 input)
(remark7 input)
(remark8 input)
(remark9 input)
(remark10 input)
constructRating :: SubjectId -> RatingInput -> Rating
constructRating sid input = Rating
sid (sample input) (rating input) (position input) (repeats input) (practice input)
| sgillis/prestapi | src/Handler/Experiment.hs | gpl-3.0 | 2,742 | 0 | 26 | 894 | 838 | 410 | 428 | 74 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Games.TurnBasedMatches.Create
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Create a turn-based match.
--
-- /See:/ <https://developers.google.com/games/services/ Google Play Game Services API Reference> for @games.turnBasedMatches.create@.
module Network.Google.Resource.Games.TurnBasedMatches.Create
(
-- * REST Resource
TurnBasedMatchesCreateResource
-- * Creating a Request
, turnBasedMatchesCreate
, TurnBasedMatchesCreate
-- * Request Lenses
, tbmcConsistencyToken
, tbmcPayload
, tbmcLanguage
) where
import Network.Google.Games.Types
import Network.Google.Prelude
-- | A resource alias for @games.turnBasedMatches.create@ method which the
-- 'TurnBasedMatchesCreate' request conforms to.
type TurnBasedMatchesCreateResource =
"games" :>
"v1" :>
"turnbasedmatches" :>
"create" :>
QueryParam "consistencyToken" (Textual Int64) :>
QueryParam "language" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] TurnBasedMatchCreateRequest :>
Post '[JSON] TurnBasedMatch
-- | Create a turn-based match.
--
-- /See:/ 'turnBasedMatchesCreate' smart constructor.
data TurnBasedMatchesCreate = TurnBasedMatchesCreate'
{ _tbmcConsistencyToken :: !(Maybe (Textual Int64))
, _tbmcPayload :: !TurnBasedMatchCreateRequest
, _tbmcLanguage :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TurnBasedMatchesCreate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tbmcConsistencyToken'
--
-- * 'tbmcPayload'
--
-- * 'tbmcLanguage'
turnBasedMatchesCreate
:: TurnBasedMatchCreateRequest -- ^ 'tbmcPayload'
-> TurnBasedMatchesCreate
turnBasedMatchesCreate pTbmcPayload_ =
TurnBasedMatchesCreate'
{ _tbmcConsistencyToken = Nothing
, _tbmcPayload = pTbmcPayload_
, _tbmcLanguage = Nothing
}
-- | The last-seen mutation timestamp.
tbmcConsistencyToken :: Lens' TurnBasedMatchesCreate (Maybe Int64)
tbmcConsistencyToken
= lens _tbmcConsistencyToken
(\ s a -> s{_tbmcConsistencyToken = a})
. mapping _Coerce
-- | Multipart request metadata.
tbmcPayload :: Lens' TurnBasedMatchesCreate TurnBasedMatchCreateRequest
tbmcPayload
= lens _tbmcPayload (\ s a -> s{_tbmcPayload = a})
-- | The preferred language to use for strings returned by this method.
tbmcLanguage :: Lens' TurnBasedMatchesCreate (Maybe Text)
tbmcLanguage
= lens _tbmcLanguage (\ s a -> s{_tbmcLanguage = a})
instance GoogleRequest TurnBasedMatchesCreate where
type Rs TurnBasedMatchesCreate = TurnBasedMatch
type Scopes TurnBasedMatchesCreate =
'["https://www.googleapis.com/auth/games",
"https://www.googleapis.com/auth/plus.login"]
requestClient TurnBasedMatchesCreate'{..}
= go _tbmcConsistencyToken _tbmcLanguage
(Just AltJSON)
_tbmcPayload
gamesService
where go
= buildClient
(Proxy :: Proxy TurnBasedMatchesCreateResource)
mempty
| rueshyna/gogol | gogol-games/gen/Network/Google/Resource/Games/TurnBasedMatches/Create.hs | mpl-2.0 | 3,948 | 0 | 15 | 897 | 493 | 290 | 203 | 76 | 1 |
module Main where
import System.Environment(getArgs)
import Data.List(sort)
import Control.Monad(join)
main :: IO()
main = pLn $ (take 3 . reverse . sort . fmap read <$> getArgs :: IO[Int])
pLn :: (Show a) => IO[a] -> IO()
pLn x = join $ fmap f x where
f :: (Show a) => [a] -> IO()
f xs = sequence_ $ fmap (putStrLn . show) xs
| dkpsk/aoj | 0-1/app.hs | unlicense | 334 | 0 | 11 | 71 | 185 | 98 | 87 | 10 | 1 |
module Helpers.ListHelpers (allDistinct, cartesianProduct, concatReplicate, firstDifferences, reciprocalSum, runLengths, zipWithPadding) where
import Data.List (group)
import Data.Set (Set)
import qualified Data.Set as Set
-- concatReplicate is to replicate as concatMap is to map
concatReplicate :: Int -> [a] -> [a]
concatReplicate n list = take (n * length list) $ cycle list
-- reciprocalSum [2,5] = (1 % 2) + (1 % 5)
-- = 7 % 1
reciprocalSum :: Integral a => [a] -> Rational
reciprocalSum = sum . map (recip . toRational)
runLengths :: Eq a => [a] -> [Int]
runLengths = map length . group
-- Inspired by:
-- http://stackoverflow.com/questions/22403029/how-to-zip-lists-with-different-length
zipWithPadding :: a -> [a] -> [a] -> [(a, a)]
zipWithPadding pad (a:as) (b:bs) = (a, b) : zipWithPadding pad as bs
zipWithPadding pad as [] = zip as (repeat pad)
zipWithPadding pad [] bs = zip (repeat pad) bs
firstDifferences :: Integral a => [a] -> [a]
firstDifferences [] = []
firstDifferences ls'@(_:ls) = zipWith (-) ls ls'
allDistinct :: Ord a => [a] -> Bool
allDistinct = recurse Set.empty where
recurse seen [] = True
recurse seen (r:rs)
| r `Set.member` seen = False
| otherwise = recurse (r `Set.insert` seen) rs
cartesianProduct 0 _ = []
cartesianProduct 1 elements = map (:[]) elements
cartesianProduct n elements = concatMap f $ cartesianProduct (n - 1) elements where
f es = map (:es) elements
-- minBy :: (a -> Int) -> [a] -> a
-- minBy f (a:as) = recurse a as where
-- recurse knownMin [] = knownMin
-- recurse knownMin (x:xs) = if f knownMin <= f x then recurse knownMin xs else recurse x xs
--
-- minByUniq :: (a -> Int) -> [a] -> a
-- minByUniq f (a:as) = recurse [a] as where
-- recurse [knownMin] [] = knownMin
-- recurse (x:x':xs) [] = error "Multiple minima" + show x + show x'
-- recurse knownMin (x:xs)
-- | f knownMin < f x = recurse knownMin xs
-- | f knownMin == f x = recurse (x:knownMin)
-- | f knownMin > f x = recurse x xs
--
-- allMin :: (a -> Int) -> [a] -> [a]
-- allMin _ [] = []
-- allMin f (a:as) = recurse [a] (f a) as where
-- recurse known _ [] = known
-- recurse known knownMin (x:xs)
-- | f x == knownMin = recurse (x : known) knownMin xs
-- | f x > knownMin = recurse known knownMin xs
-- | otherwise = recurse [x] (f x) xs
| peterokagey/haskellOEIS | src/Helpers/ListHelpers.hs | apache-2.0 | 2,410 | 0 | 10 | 566 | 571 | 318 | 253 | 27 | 2 |
{-# LANGUAGE
CPP
, ScopedTypeVariables
#-}
module Main where
import qualified Foreign.CUDA.Driver as CUDA
import System.IO.MMap (
mmapFilePtr
, munmapFilePtr
, Mode(..)
)
import Data.Maybe (catMaybes)
import Text.Printf (printf)
import System.Directory(doesFileExist)
import Foreign (Word8, plusPtr)
#define vis_size 64
#define vis_data_size 80
type RawPtr = CUDA.DevicePtr Word8
initialize :: IO (CUDA.Context, CUDA.Fun)
initialize = do
CUDA.initialise []
dev0 <- CUDA.device 0
ctx <- CUDA.create dev0 [CUDA.SchedAuto]
m <- CUDA.loadFile "grid_kernel.cubin"
f <- CUDA.getFun m "grid_kernel"
return (ctx, f)
main :: IO ()
main = let gridsize = 2048 * 2048 * vis_size in do
(ctx, grid_kernel) <- initialize
(grid_ptr_host, grid_rawsize, grid_offset, grid_size) <- mmapFilePtr "GPUBinned.dat" ReadWriteEx $ Just (0, gridsize)
(gcf_ptr_host, gcf_rawsize, gcf_offset, gcf_size) <- mmapFilePtr "GCF.dat" ReadOnly Nothing
CUDA.allocaArray gridsize $ \(grid_out :: RawPtr) -> do
CUDA.memset grid_out gridsize 0
CUDA.allocaArray gcf_size $ \(gcf_in :: RawPtr) -> do
CUDA.pokeArray gcf_size (plusPtr gcf_ptr_host gcf_offset) gcf_in
--
let processBin (up, vp) = let binfile = printf "bins/000000-%03d-%03d" up vp in do
doMe <- doesFileExist binfile
if doMe
then do
(data_ptr_host, data_rawsize, data_offset, data_size) <- mmapFilePtr binfile ReadOnly Nothing
data_in <- CUDA.mallocArray data_size :: IO RawPtr
CUDA.pokeArray data_size (plusPtr data_ptr_host data_offset) data_in
CUDA.launchKernel grid_kernel (16,16, 1) (8,8,1) 0 Nothing
[CUDA.IArg up, CUDA.IArg vp, CUDA.VArg data_in, CUDA.IArg(fromIntegral $ data_size `div` vis_data_size), CUDA.VArg gcf_in, CUDA.VArg grid_out]
munmapFilePtr data_ptr_host data_rawsize
return $ Just data_in
else return Nothing
resourcesToFree <- mapM processBin [(up, vp) | up <- [0..15], vp <- [0..15]]
CUDA.peekArray grid_size grid_out (plusPtr grid_ptr_host grid_offset)
munmapFilePtr grid_ptr_host grid_rawsize
munmapFilePtr gcf_ptr_host gcf_rawsize
-- Cleanup
mapM_ (CUDA.free) (catMaybes resourcesToFree)
CUDA.destroy ctx
| SKA-ScienceDataProcessor/RC | MS2/lib/DSL_Gridder/binner/RunGridder1.hs | apache-2.0 | 2,389 | 3 | 17 | 588 | 708 | 376 | 332 | 49 | 2 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QAbstractFileEngine.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:36
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Enums.Core.QAbstractFileEngine (
FileFlag, FileFlags, eReadOwnerPerm, fReadOwnerPerm, eWriteOwnerPerm, fWriteOwnerPerm, eExeOwnerPerm, fExeOwnerPerm, eReadUserPerm, fReadUserPerm, eWriteUserPerm, fWriteUserPerm, eExeUserPerm, fExeUserPerm, eReadGroupPerm, fReadGroupPerm, eWriteGroupPerm, fWriteGroupPerm, eExeGroupPerm, fExeGroupPerm, eReadOtherPerm, fReadOtherPerm, eWriteOtherPerm, fWriteOtherPerm, eExeOtherPerm, fExeOtherPerm, eLinkType, fLinkType, fFileType, eDirectoryType, fDirectoryType, eBundleType, fBundleType, eHiddenFlag, fHiddenFlag, eLocalDiskFlag, fLocalDiskFlag, eExistsFlag, fExistsFlag, eRootFlag, fRootFlag, fRefresh, ePermsMask, fPermsMask, eTypesMask, fTypesMask, eFlagsMask, fFlagsMask, eFileInfoAll, fFileInfoAll
, FileName, eDefaultName, eBaseName, ePathName, eAbsoluteName, eAbsolutePathName, eLinkName, eCanonicalName, eCanonicalPathName, eBundleName
, FileOwner, eOwnerUser, eOwnerGroup
, FileTime, eCreationTime, eModificationTime, eAccessTime
, QAbstractFileEngineExtension, eAtEndExtension, eFastReadLineExtension
)
where
import Foreign.C.Types
import Qtc.Classes.Base
import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr)
import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int)
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
data CFileFlag a = CFileFlag a
type FileFlag = QEnum(CFileFlag Int)
ieFileFlag :: Int -> FileFlag
ieFileFlag x = QEnum (CFileFlag x)
instance QEnumC (CFileFlag Int) where
qEnum_toInt (QEnum (CFileFlag x)) = x
qEnum_fromInt x = QEnum (CFileFlag x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> FileFlag -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
data CFileFlags a = CFileFlags a
type FileFlags = QFlags(CFileFlags Int)
ifFileFlags :: Int -> FileFlags
ifFileFlags x = QFlags (CFileFlags x)
instance QFlagsC (CFileFlags Int) where
qFlags_toInt (QFlags (CFileFlags x)) = x
qFlags_fromInt x = QFlags (CFileFlags x)
withQFlagsResult x
= do
ti <- x
return $ qFlags_fromInt $ fromIntegral ti
withQFlagsListResult x
= do
til <- x
return $ map qFlags_fromInt til
instance Qcs (QObject c -> FileFlags -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qFlags_fromInt hint)
return ()
eReadOwnerPerm :: FileFlag
eReadOwnerPerm
= ieFileFlag $ 16384
eWriteOwnerPerm :: FileFlag
eWriteOwnerPerm
= ieFileFlag $ 8192
eExeOwnerPerm :: FileFlag
eExeOwnerPerm
= ieFileFlag $ 4096
eReadUserPerm :: FileFlag
eReadUserPerm
= ieFileFlag $ 1024
eWriteUserPerm :: FileFlag
eWriteUserPerm
= ieFileFlag $ 512
eExeUserPerm :: FileFlag
eExeUserPerm
= ieFileFlag $ 256
eReadGroupPerm :: FileFlag
eReadGroupPerm
= ieFileFlag $ 64
eWriteGroupPerm :: FileFlag
eWriteGroupPerm
= ieFileFlag $ 32
eExeGroupPerm :: FileFlag
eExeGroupPerm
= ieFileFlag $ 16
eReadOtherPerm :: FileFlag
eReadOtherPerm
= ieFileFlag $ 4
eWriteOtherPerm :: FileFlag
eWriteOtherPerm
= ieFileFlag $ 2
eExeOtherPerm :: FileFlag
eExeOtherPerm
= ieFileFlag $ 1
eLinkType :: FileFlag
eLinkType
= ieFileFlag $ 65536
instance QeFileType FileFlag where
eFileType
= ieFileFlag $ 131072
eDirectoryType :: FileFlag
eDirectoryType
= ieFileFlag $ 262144
eBundleType :: FileFlag
eBundleType
= ieFileFlag $ 524288
eHiddenFlag :: FileFlag
eHiddenFlag
= ieFileFlag $ 1048576
eLocalDiskFlag :: FileFlag
eLocalDiskFlag
= ieFileFlag $ 2097152
eExistsFlag :: FileFlag
eExistsFlag
= ieFileFlag $ 4194304
eRootFlag :: FileFlag
eRootFlag
= ieFileFlag $ 8388608
instance QeRefresh FileFlag where
eRefresh
= ieFileFlag $ 16777216
ePermsMask :: FileFlag
ePermsMask
= ieFileFlag $ 65535
eTypesMask :: FileFlag
eTypesMask
= ieFileFlag $ 983040
eFlagsMask :: FileFlag
eFlagsMask
= ieFileFlag $ 267386880
eFileInfoAll :: FileFlag
eFileInfoAll
= ieFileFlag $ 268435455
fReadOwnerPerm :: FileFlags
fReadOwnerPerm
= ifFileFlags $ 16384
fWriteOwnerPerm :: FileFlags
fWriteOwnerPerm
= ifFileFlags $ 8192
fExeOwnerPerm :: FileFlags
fExeOwnerPerm
= ifFileFlags $ 4096
fReadUserPerm :: FileFlags
fReadUserPerm
= ifFileFlags $ 1024
fWriteUserPerm :: FileFlags
fWriteUserPerm
= ifFileFlags $ 512
fExeUserPerm :: FileFlags
fExeUserPerm
= ifFileFlags $ 256
fReadGroupPerm :: FileFlags
fReadGroupPerm
= ifFileFlags $ 64
fWriteGroupPerm :: FileFlags
fWriteGroupPerm
= ifFileFlags $ 32
fExeGroupPerm :: FileFlags
fExeGroupPerm
= ifFileFlags $ 16
fReadOtherPerm :: FileFlags
fReadOtherPerm
= ifFileFlags $ 4
fWriteOtherPerm :: FileFlags
fWriteOtherPerm
= ifFileFlags $ 2
fExeOtherPerm :: FileFlags
fExeOtherPerm
= ifFileFlags $ 1
fLinkType :: FileFlags
fLinkType
= ifFileFlags $ 65536
fFileType :: FileFlags
fFileType
= ifFileFlags $ 131072
fDirectoryType :: FileFlags
fDirectoryType
= ifFileFlags $ 262144
fBundleType :: FileFlags
fBundleType
= ifFileFlags $ 524288
fHiddenFlag :: FileFlags
fHiddenFlag
= ifFileFlags $ 1048576
fLocalDiskFlag :: FileFlags
fLocalDiskFlag
= ifFileFlags $ 2097152
fExistsFlag :: FileFlags
fExistsFlag
= ifFileFlags $ 4194304
fRootFlag :: FileFlags
fRootFlag
= ifFileFlags $ 8388608
fRefresh :: FileFlags
fRefresh
= ifFileFlags $ 16777216
fPermsMask :: FileFlags
fPermsMask
= ifFileFlags $ 65535
fTypesMask :: FileFlags
fTypesMask
= ifFileFlags $ 983040
fFlagsMask :: FileFlags
fFlagsMask
= ifFileFlags $ 267386880
fFileInfoAll :: FileFlags
fFileInfoAll
= ifFileFlags $ 268435455
data CFileName a = CFileName a
type FileName = QEnum(CFileName Int)
ieFileName :: Int -> FileName
ieFileName x = QEnum (CFileName x)
instance QEnumC (CFileName Int) where
qEnum_toInt (QEnum (CFileName x)) = x
qEnum_fromInt x = QEnum (CFileName x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> FileName -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
eDefaultName :: FileName
eDefaultName
= ieFileName $ 0
eBaseName :: FileName
eBaseName
= ieFileName $ 1
ePathName :: FileName
ePathName
= ieFileName $ 2
eAbsoluteName :: FileName
eAbsoluteName
= ieFileName $ 3
eAbsolutePathName :: FileName
eAbsolutePathName
= ieFileName $ 4
eLinkName :: FileName
eLinkName
= ieFileName $ 5
eCanonicalName :: FileName
eCanonicalName
= ieFileName $ 6
eCanonicalPathName :: FileName
eCanonicalPathName
= ieFileName $ 7
eBundleName :: FileName
eBundleName
= ieFileName $ 8
data CFileOwner a = CFileOwner a
type FileOwner = QEnum(CFileOwner Int)
ieFileOwner :: Int -> FileOwner
ieFileOwner x = QEnum (CFileOwner x)
instance QEnumC (CFileOwner Int) where
qEnum_toInt (QEnum (CFileOwner x)) = x
qEnum_fromInt x = QEnum (CFileOwner x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> FileOwner -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
eOwnerUser :: FileOwner
eOwnerUser
= ieFileOwner $ 0
eOwnerGroup :: FileOwner
eOwnerGroup
= ieFileOwner $ 1
data CFileTime a = CFileTime a
type FileTime = QEnum(CFileTime Int)
ieFileTime :: Int -> FileTime
ieFileTime x = QEnum (CFileTime x)
instance QEnumC (CFileTime Int) where
qEnum_toInt (QEnum (CFileTime x)) = x
qEnum_fromInt x = QEnum (CFileTime x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> FileTime -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
eCreationTime :: FileTime
eCreationTime
= ieFileTime $ 0
eModificationTime :: FileTime
eModificationTime
= ieFileTime $ 1
eAccessTime :: FileTime
eAccessTime
= ieFileTime $ 2
data CQAbstractFileEngineExtension a = CQAbstractFileEngineExtension a
type QAbstractFileEngineExtension = QEnum(CQAbstractFileEngineExtension Int)
ieQAbstractFileEngineExtension :: Int -> QAbstractFileEngineExtension
ieQAbstractFileEngineExtension x = QEnum (CQAbstractFileEngineExtension x)
instance QEnumC (CQAbstractFileEngineExtension Int) where
qEnum_toInt (QEnum (CQAbstractFileEngineExtension x)) = x
qEnum_fromInt x = QEnum (CQAbstractFileEngineExtension x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> QAbstractFileEngineExtension -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
eAtEndExtension :: QAbstractFileEngineExtension
eAtEndExtension
= ieQAbstractFileEngineExtension $ 0
eFastReadLineExtension :: QAbstractFileEngineExtension
eFastReadLineExtension
= ieQAbstractFileEngineExtension $ 1
| keera-studios/hsQt | Qtc/Enums/Core/QAbstractFileEngine.hs | bsd-2-clause | 15,120 | 18 | 22 | 3,215 | 4,137 | 2,096 | 2,041 | 433 | 1 |
{-# LANGUAGE FlexibleInstances, TypeFamilies, TypeOperators, DataKinds
, UndecidableInstances #-}
-- | Functions to replace the numerical functions from the Prelude.
-- See "Units" for the documentation of '(+)', '(-)', '(*)' and '(/)'.
--
-- This module includes Num, Fractional, Floating instances for '(:@)', which
-- implement 'fromInteger' and 'fromRational', to avoid having to use 'lit'
-- on every single numeric literal. This allows, for example:
--
-- > 5 * meter
--
-- It also means you can directly work with dimensionless quantities as with
-- regular numbers without having to constantly untag/retag them.
module Units.Prelude
( module Units
, module Units.TH
, (+), (-), (*), (/), sqrt
, Convert, convert
, Linear, Base, normalize
-- Re-export the rest of the Prelude
, module Prelude
) where
import Prelude hiding ((+),(-),(*),(/),sqrt)
import qualified Prelude
import Units
import Units.TH
import Units.Convert
(+) :: Num a => a :@ u -> a :@ u -> a :@ u
(+) = addU
infixl 6 +
(-) :: Num a => a :@ u -> a :@ u -> a :@ u
(-) = subU
infixl 6 -
(*) :: Num a => a :@ u -> a :@ v -> a :@ u*v
(*) = mulU
infixl 7 *
(/) :: Fractional a => a :@ u -> a :@ v -> a :@ u/v
(/) = divU
infixl 7 /
sqrt :: Floating a => a :@ u^2 -> a :@ u
sqrt = sqrtU
-- Ability to write units like “5 meter” or even “5 kilo*meter/hour”
instance (Num a, t ~ (a :@ One*u)) => Num (a :@ u -> t) where
fromInteger = (*) . fromInteger
negate = error "negate on not fully applied number"
(+) = error "(+) on not fully applied number"
(*) = error "(*) on not fully applied number"
abs = error "abs on not fully applied number"
signum = error "signum on not fully applied number"
instance (Num a, u ~ One) => Num (a :@ u) where
fromInteger = lit . fromInteger
(+) = addU
(*) = mulU
negate = lit . abs . unTag
abs = lit . abs . unTag
signum = lit . abs . unTag
instance (Fractional a, t ~ (a :@ One*u)) => Fractional (a :@ u -> t) where
fromRational = (*) . fromRational
(/) = error "(/) on not fully applied number"
recip = error "recip on not fully applied number"
instance (Fractional a, u ~ One) => Fractional (a :@ u) where
fromRational = lit . fromRational
(/) = divU
recip = lit . recip . unTag
instance (Floating a, u ~ One) => Floating (a :@ u) where
pi = lit pi
a ** b = lit $ unTag a ** unTag b
logBase a b = lit $ unTag a `logBase` unTag b
exp = lit . exp . unTag
sqrt = lit . Prelude.sqrt . unTag
log = lit . log . unTag
sin = lit . sin . unTag
tan = lit . tan . unTag
cos = lit . cos . unTag
asin = lit . asin . unTag
atan = lit . atan . unTag
acos = lit . acos . unTag
sinh = lit . sinh . unTag
tanh = lit . tanh . unTag
cosh = lit . cosh . unTag
asinh = lit . asinh . unTag
atanh = lit . atanh . unTag
acosh = lit . acosh . unTag
| haasn/units | src/Units/Prelude.hs | bsd-3-clause | 2,904 | 4 | 10 | 747 | 969 | 551 | 418 | 69 | 1 |
import Graphics.X11.Turtle
import Control.Monad
main :: IO ()
main = do
f <- openField
t <- newTurtle f
shape t "turtle"
replicateM_ 4 $ do
forward t 100
left t 90
replicateM_ 4 $ do
backward t 100
left t 90
silentundo t 4
left t 90
replicateM_ 4 $ do
backward t 100
left t 90
onkeypress f $ return . ('q' /=)
waitField f
| YoshikuniJujo/xturtle_haskell | tests/testSilentUndo.hs | bsd-3-clause | 345 | 0 | 10 | 89 | 170 | 72 | 98 | 20 | 1 |
module Function where
import Control.Monad (liftM2, (<=<))
import Data.Functor.Classes (liftEq2)
import qualified Data.Matrix as Mx
import qualified Data.Vector as Vec
import Library
data Function = Function
{ showFunction :: String
, runFunction :: Vector -> Either ComputeError Double
}
type FMatrix = Mx.Matrix Function
type FVector = Vec.Vector Function
instance Show Function where
show = take 100 . showFunction
instance Eq Function where
Function _ f == Function _ g = and $ zipWith (liftEq2 (==) closeTo) (map f args) (map g args)
where args = Vec.fromList <$> [[x, x + 10 .. x + 100] | x <- [1, 10 .. 100]]
instance Num Function where
f + g = concat ["(", showFunction f, "+", showFunction g, ")"] `Function` (\x -> liftM2 (+) (runFunction f x) (runFunction g x))
f * g = concat ["(", showFunction f, "*", showFunction g, ")"] `Function` (\x -> liftM2 (*) (runFunction f x) (runFunction g x))
negate f = concat ["-", "(", showFunction f, ")"] `Function` (return . negate <=< runFunction f)
abs f = compose (simpleFunc "abs" $ Right . abs) f
signum f = compose (simpleFunc "signum" $ Right . signum) f
fromInteger x = simpleFunc (show x) (Right . const (fromInteger x))
instance Fractional Function where
f / g = concat ["(", showFunction f, "/", showFunction g, ")"] `Function` (\x -> do
denominator <- runFunction g x
numerator <- runFunction f x
if nearZero denominator
then Left $ ArgumentOutOfRange "Division by zero"
else return $ numerator / denominator
)
fromRational x = simpleFunc (show x) (Right . const (fromRational x))
compose :: Function -> Function -> Function
compose f g = Function (show f ++ "(" ++ show g ++ ")") (\v -> runFunction f =<< (Vec.fromList . (: []) <$> runFunction g v))
compose2 :: Function -> Function -> Function -> Function
compose2 f g h = Function (show f ++ "(" ++ show g ++ "," ++ show h ++ ")") (\v ->
runFunction f =<< (do
gv <- runFunction g v
hv <- runFunction h v
return $ Vec.fromList [gv, hv]
))
simpleFunc :: String -> (Double -> Either ComputeError Double) -> Function
simpleFunc s f = Function s $ f . (Vec.! 0)
runMeshFunction :: Traversable t => Function -> t Vector -> Either ComputeError (t Double)
runMeshFunction = traverse . runFunction
runMeshFunctionSystem :: (Traversable v, Traversable t) => v Function -> t Vector -> Either ComputeError (v (t Double))
runMeshFunctionSystem fs xs = traverse (flip runMeshFunction xs) fs
runFunctionSystem :: Traversable t => t Function -> Vector -> Either ComputeError (t Double)
runFunctionSystem fs x = traverse (flip runFunction x) fs
| hrsrashid/nummet | lib/Function.hs | bsd-3-clause | 2,726 | 0 | 14 | 621 | 1,113 | 575 | 538 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoImplicitPrelude #-}
-- | News page view.
module HL.V.News where
import HL.V
import HL.V.Template
-- | News view.
newsV :: Html -> FromBlaze App
newsV inner =
template []
"News"
(\_ ->
container (do row (span12 (do h1 "News"))
inner))
| chrisdone/hl | src/HL/V/News.hs | bsd-3-clause | 354 | 0 | 18 | 116 | 89 | 48 | 41 | 12 | 1 |
{-# LANGUAGE PatternGuards #-}
module IRTS.CodegenJavaScript (codegenJavaScript, JSTarget(..)) where
import Idris.AbsSyntax hiding (TypeCase)
import IRTS.Bytecode
import IRTS.Lang
import IRTS.Simplified
import IRTS.CodegenCommon
import Core.TT
import Paths_idris
import Util.System
import Control.Arrow
import Data.Char
import Data.List
import Data.Maybe
import System.IO
import System.Directory
idrNamespace :: String
idrNamespace = "__IDR__"
idrRTNamespace = "__IDRRT__"
idrLTNamespace = "__IDRLT__"
data JSTarget = Node | JavaScript deriving Eq
data JSType = JSIntTy
| JSStringTy
| JSIntegerTy
| JSFloatTy
| JSCharTy
| JSPtrTy
| JSForgotTy
deriving Eq
data JSNum = JSInt Int
| JSFloat Double
| JSInteger Integer
data JS = JSRaw String
| JSFunction [String] JS
| JSType JSType
| JSSeq [JS]
| JSReturn JS
| JSApp JS [JS]
| JSNew String [JS]
| JSError String
| JSOp String JS JS
| JSProj JS String
| JSVar LVar
| JSNull
| JSThis
| JSTrue
| JSArray [JS]
| JSObject [(String, JS)]
| JSString String
| JSNum JSNum
| JSAssign JS JS
| JSAlloc String (Maybe JS)
| JSIndex JS JS
| JSCond [(JS, JS)]
| JSTernary JS JS JS
compileJS :: JS -> String
compileJS (JSRaw code) =
code
compileJS (JSFunction args body) =
"function("
++ intercalate "," args
++ "){\n"
++ compileJS body
++ "\n}"
compileJS (JSType ty)
| JSIntTy <- ty = idrRTNamespace ++ "Int"
| JSStringTy <- ty = idrRTNamespace ++ "String"
| JSIntegerTy <- ty = idrRTNamespace ++ "Integer"
| JSFloatTy <- ty = idrRTNamespace ++ "Float"
| JSCharTy <- ty = idrRTNamespace ++ "Char"
| JSPtrTy <- ty = idrRTNamespace ++ "Ptr"
| JSForgotTy <- ty = idrRTNamespace ++ "Forgot"
compileJS (JSSeq seq) =
intercalate ";\n" (map compileJS seq)
compileJS (JSReturn val) =
"return " ++ compileJS val
compileJS (JSApp lhs rhs)
| JSFunction {} <- lhs =
concat ["(", compileJS lhs, ")(", args, ")"]
| otherwise =
concat [compileJS lhs, "(", args, ")"]
where args :: String
args = intercalate "," $ map compileJS rhs
compileJS (JSNew name args) =
"new " ++ name ++ "(" ++ intercalate "," (map compileJS args) ++ ")"
compileJS (JSError exc) =
"(function(){throw '" ++ exc ++ "';})()"
compileJS (JSOp op lhs rhs) =
compileJS lhs ++ " " ++ op ++ " " ++ compileJS rhs
compileJS (JSProj obj field)
| JSFunction {} <- obj =
concat ["(", compileJS obj, ").", field]
| otherwise =
compileJS obj ++ '.' : field
compileJS (JSVar var) =
translateVariableName var
compileJS JSNull =
"null"
compileJS JSThis =
"this"
compileJS JSTrue =
"true"
compileJS (JSArray elems) =
"[" ++ intercalate "," (map compileJS elems) ++ "]"
compileJS (JSObject fields) =
"{" ++ intercalate ",\n" (map compileField fields) ++ "}"
where
compileField :: (String, JS) -> String
compileField (name, val) = '\'' : name ++ "' : " ++ compileJS val
compileJS (JSString str) =
show str
compileJS (JSNum num)
| JSInt i <- num = show i
| JSFloat f <- num = show f
| JSInteger i <- num = show i
compileJS (JSAssign lhs rhs) =
compileJS lhs ++ "=" ++ compileJS rhs
compileJS (JSAlloc name val) =
"var " ++ name ++ maybe "" ((" = " ++) . compileJS) val
compileJS (JSIndex lhs rhs) =
compileJS lhs ++ "[" ++ compileJS rhs ++ "]"
compileJS (JSCond branches) =
intercalate " else " $ map createIfBlock branches
where
createIfBlock (cond, e) =
"if (" ++ compileJS cond ++") {\n"
++ "return " ++ compileJS e
++ ";\n}"
compileJS (JSTernary cond true false) =
let c = compileJS cond
t = compileJS true
f = compileJS false in
"(" ++ c ++ ")?(" ++ t ++ "):(" ++ f ++ ")"
jsTailcall :: JS -> JS
jsTailcall call =
jsCall (idrRTNamespace ++ "tailcall") [
JSFunction [] (JSReturn call)
]
jsCall :: String -> [JS] -> JS
jsCall fun = JSApp (JSRaw fun)
jsMeth :: JS -> String -> [JS] -> JS
jsMeth obj meth =
JSApp (JSProj obj meth)
jsInstanceOf :: JS -> JS -> JS
jsInstanceOf = JSOp "instanceof"
jsEq :: JS -> JS -> JS
jsEq = JSOp "=="
jsAnd :: JS -> JS -> JS
jsAnd = JSOp "&&"
jsType :: JS
jsType = JSRaw $ idrRTNamespace ++ "Type"
jsCon :: JS
jsCon = JSRaw $ idrRTNamespace ++ "Con"
jsTag :: JS -> JS
jsTag obj = JSProj obj "tag"
jsTypeTag :: JS -> JS
jsTypeTag obj = JSProj obj "type"
jsBigInt :: JS -> JS
jsBigInt val =
JSApp (JSRaw $ idrRTNamespace ++ "bigInt") [val]
jsVar :: Int -> String
jsVar = ("__var_" ++) . show
jsLet :: String -> JS -> JS -> JS
jsLet name value body =
JSApp (
JSFunction [name] (
JSReturn body
)
) [value]
codegenJavaScript
:: JSTarget
-> [(Name, SDecl)]
-> FilePath
-> OutputType
-> IO ()
codegenJavaScript target definitions filename outputType = do
let (header, runtime) = case target of
Node ->
("#!/usr/bin/env node\n", "-node")
JavaScript ->
("", "-browser")
path <- getDataDir
idrRuntime <- readFile $ path ++ "/js/Runtime-common.js"
tgtRuntime <- readFile $ concat [path, "/js/Runtime", runtime, ".js"]
writeFile filename $ intercalate "\n" $ [ header
, idrRuntime
, tgtRuntime
] ++ functions ++ [mainLoop]
setPermissions filename (emptyPermissions { readable = True
, executable = target == Node
, writable = True
})
where
def :: [(String, SDecl)]
def = map (first translateNamespace) definitions
functions :: [String]
functions = map (compileJS . translateDeclaration) def
mainLoop :: String
mainLoop = compileJS $
JSSeq [ JSAlloc "main" $ Just $ JSFunction [] (
jsTailcall $ jsCall "__IDR__runMain0" []
)
, jsCall "main" []
]
translateIdentifier :: String -> String
translateIdentifier =
replaceReserved . concatMap replaceBadChars
where replaceBadChars :: Char -> String
replaceBadChars c
| ' ' <- c = "_"
| '_' <- c = "__"
| isDigit c = "_" ++ [c] ++ "_"
| not (isLetter c && isAscii c) = '_' : show (ord c)
| otherwise = [c]
replaceReserved s
| s `elem` reserved = '_' : s
| otherwise = s
reserved = [ "break"
, "case"
, "catch"
, "continue"
, "debugger"
, "default"
, "delete"
, "do"
, "else"
, "finally"
, "for"
, "function"
, "if"
, "in"
, "instanceof"
, "new"
, "return"
, "switch"
, "this"
, "throw"
, "try"
, "typeof"
, "var"
, "void"
, "while"
, "with"
, "class"
, "enum"
, "export"
, "extends"
, "import"
, "super"
, "implements"
, "interface"
, "let"
, "package"
, "private"
, "protected"
, "public"
, "static"
, "yield"
]
translateNamespace :: Name -> String
translateNamespace (UN _) = idrNamespace
translateNamespace (NS _ ns) = idrNamespace ++ concatMap translateIdentifier ns
translateNamespace (MN _ _) = idrNamespace
translateName :: Name -> String
translateName (UN name) = translateIdentifier name
translateName (NS name _) = translateName name
translateName (MN i name) = translateIdentifier name ++ show i
translateConstant :: Const -> JS
translateConstant (I i) = JSNum (JSInt i)
translateConstant (Fl f) = JSNum (JSFloat f)
translateConstant (Ch c) = JSString [c]
translateConstant (Str s) = JSString s
translateConstant (AType (ATInt ITNative)) = JSType JSIntTy
translateConstant StrType = JSType JSStringTy
translateConstant (AType (ATInt ITBig)) = JSType JSIntegerTy
translateConstant (AType ATFloat) = JSType JSFloatTy
translateConstant (AType (ATInt ITChar)) = JSType JSCharTy
translateConstant PtrType = JSType JSPtrTy
translateConstant Forgot = JSType JSForgotTy
translateConstant (BI i) = jsBigInt $ JSNum (JSInteger i)
translateConstant c =
JSError $ "Unimplemented Constant: " ++ show c
translateDeclaration :: (String, SDecl) -> JS
translateDeclaration (path, SFun name params stackSize body)
| (MN _ "APPLY") <- name
, (SLet var val next) <- body
, (SChkCase cvar cases) <- next =
let lvar = translateVariableName var
lookup = "[" ++ lvar ++ ".tag](fn0,arg0," ++ lvar ++ ")" in
JSSeq [ lookupTable [(var, "chk")] var cases
, jsDecl $ JSFunction ["fn0", "arg0"] (
JSSeq [ JSAlloc "__var_0" (Just $ JSRaw "fn0")
, JSReturn $ jsLet (translateVariableName var) (
translateExpression val
) (JSTernary (
(JSVar var `jsInstanceOf` jsCon) `jsAnd`
(hasProp lookupTableName (translateVariableName var))
) (JSRaw $
lookupTableName ++ lookup
) JSNull
)
]
)
]
| (MN _ "EVAL") <- name
, (SChkCase var cases) <- body =
JSSeq [ lookupTable [] var cases
, jsDecl $ JSFunction ["arg0"] (JSReturn $
JSTernary (
(JSRaw "arg0" `jsInstanceOf` jsCon) `jsAnd`
(hasProp lookupTableName "arg0")
) (JSRaw $ lookupTableName ++ "[arg0.tag](arg0)") (JSRaw "arg0")
)
]
| otherwise =
let fun = translateExpression body in
jsDecl $ jsFun fun
where
hasProp :: String -> String -> JS
hasProp table var =
jsMeth (JSRaw table) "hasOwnProperty" [JSRaw $ var ++ ".tag"]
caseFun :: [(LVar, String)] -> LVar -> SAlt -> JS
caseFun aux var cse =
jsFunAux aux (translateCase (Just (translateVariableName var)) cse)
getTag :: SAlt -> Maybe String
getTag (SConCase _ tag _ _ _) = Just $ show tag
getTag _ = Nothing
lookupTableName :: String
lookupTableName = idrLTNamespace ++ translateName name
lookupTable :: [(LVar, String)] -> LVar -> [SAlt] -> JS
lookupTable aux var cases =
JSAlloc lookupTableName $ Just (
JSObject $ catMaybes $ map (lookupEntry aux var) cases
)
where
lookupEntry :: [(LVar, String)] -> LVar -> SAlt -> Maybe (String, JS)
lookupEntry aux var alt = do
tag <- getTag alt
return (tag, caseFun aux var alt)
jsDecl :: JS -> JS
jsDecl = JSAlloc (path ++ translateName name) . Just
jsFun body = jsFunAux [] body
jsFunAux :: [(LVar, String)] -> JS -> JS
jsFunAux aux body =
JSFunction (p ++ map snd aux) (
JSSeq $
zipWith assignVar [0..] p ++
map allocVar [numP .. (numP + stackSize - 1)] ++
map assignAux aux ++
[JSReturn body]
)
where
numP :: Int
numP = length params
allocVar :: Int -> JS
allocVar n = JSAlloc (jsVar n) Nothing
assignVar :: Int -> String -> JS
assignVar n s = JSAlloc (jsVar n) (Just $ JSRaw s)
assignAux :: (LVar, String) -> JS
assignAux (var, val) = JSAssign (JSRaw $ translateVariableName var) (JSRaw val)
p :: [String]
p = map translateName params
translateVariableName :: LVar -> String
translateVariableName (Loc i) =
jsVar i
translateExpression :: SExp -> JS
translateExpression (SLet name value body) =
jsLet (translateVariableName name) (
translateExpression value
) (translateExpression body)
translateExpression (SConst cst) =
translateConstant cst
translateExpression (SV var) =
JSVar var
translateExpression (SApp tc name vars)
| False <- tc =
jsTailcall $ translateFunctionCall name vars
| True <- tc =
JSNew (idrRTNamespace ++ "Tailcall") [JSFunction [] (
JSReturn $ translateFunctionCall name vars
)]
where
translateFunctionCall name vars =
jsCall (translateNamespace name ++ translateName name) (map JSVar vars)
translateExpression (SOp op vars)
| LNoOp <- op = JSVar (last vars)
| (LZExt _ ITBig) <- op = jsBigInt $ JSVar (last vars)
| (LPlus (ATInt ITBig)) <- op
, (lhs:rhs:_) <- vars = invokeMeth lhs "add" [rhs]
| (LMinus (ATInt ITBig)) <- op
, (lhs:rhs:_) <- vars = invokeMeth lhs "minus" [rhs]
| (LTimes (ATInt ITBig)) <- op
, (lhs:rhs:_) <- vars = invokeMeth lhs "times" [rhs]
| (LSDiv (ATInt ITBig)) <- op
, (lhs:rhs:_) <- vars = invokeMeth lhs "divide" [rhs]
| (LSRem (ATInt ITBig)) <- op
, (lhs:rhs:_) <- vars = invokeMeth lhs "mod" [rhs]
| (LEq (ATInt ITBig)) <- op
, (lhs:rhs:_) <- vars = invokeMeth lhs "equals" [rhs]
| (LSLt (ATInt ITBig)) <- op
, (lhs:rhs:_) <- vars = invokeMeth lhs "lesser" [rhs]
| (LSLe (ATInt ITBig)) <- op
, (lhs:rhs:_) <- vars = invokeMeth lhs "lesserOrEquals" [rhs]
| (LSGt (ATInt ITBig)) <- op
, (lhs:rhs:_) <- vars = invokeMeth lhs "greater" [rhs]
| (LSGe (ATInt ITBig)) <- op
, (lhs:rhs:_) <- vars = invokeMeth lhs "greaterOrEquals" [rhs]
| (LPlus ATFloat) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "+" lhs rhs
| (LMinus ATFloat) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "-" lhs rhs
| (LTimes ATFloat) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "*" lhs rhs
| (LSDiv ATFloat) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "/" lhs rhs
| (LEq ATFloat) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "==" lhs rhs
| (LSLt ATFloat) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "<" lhs rhs
| (LSLe ATFloat) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "<=" lhs rhs
| (LSGt ATFloat) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp ">" lhs rhs
| (LSGe ATFloat) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp ">=" lhs rhs
| (LPlus _) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "+" lhs rhs
| (LMinus _) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "-" lhs rhs
| (LTimes _) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "*" lhs rhs
| (LSDiv _) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "/" lhs rhs
| (LSRem _) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "%" lhs rhs
| (LEq _) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "==" lhs rhs
| (LSLt _) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "<" lhs rhs
| (LSLe _) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "<=" lhs rhs
| (LSGt _) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp ">" lhs rhs
| (LSGe _) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp ">=" lhs rhs
| (LAnd _) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "&" lhs rhs
| (LOr _) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "|" lhs rhs
| (LXOr _) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "^" lhs rhs
| (LSHL _) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "<<" rhs lhs
| (LASHR _) <- op
, (lhs:rhs:_) <- vars = translateBinaryOp ">>" rhs lhs
| (LCompl _) <- op
, (arg:_) <- vars = JSRaw $ '~' : translateVariableName arg
| LStrConcat <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "+" lhs rhs
| LStrEq <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "==" lhs rhs
| LStrLt <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "<" lhs rhs
| LStrLen <- op
, (arg:_) <- vars = JSProj (JSVar arg) "length"
| (LStrInt ITNative) <- op
, (arg:_) <- vars = jsCall "parseInt" [JSVar arg]
| (LIntStr ITNative) <- op
, (arg:_) <- vars = jsCall "String" [JSVar arg]
| (LSExt ITNative ITBig) <- op
, (arg:_) <- vars = jsBigInt $ JSVar arg
| (LTrunc ITBig ITNative) <- op
, (arg:_) <- vars = jsMeth (JSVar arg) "valueOf" []
| (LIntStr ITBig) <- op
, (arg:_) <- vars = jsMeth (JSVar arg) "toString" []
| (LStrInt ITBig) <- op
, (arg:_) <- vars = jsBigInt $ JSVar arg
| LFloatStr <- op
, (arg:_) <- vars = jsCall "String" [JSVar arg]
| LStrFloat <- op
, (arg:_) <- vars = jsCall "parseFloat" [JSVar arg]
| (LIntFloat ITNative) <- op
, (arg:_) <- vars = JSVar arg
| (LFloatInt ITNative) <- op
, (arg:_) <- vars = JSVar arg
| (LChInt ITNative) <- op
, (arg:_) <- vars = JSProj (JSVar arg) "charCodeAt(0)"
| (LIntCh ITNative) <- op
, (arg:_) <- vars = jsCall "String.fromCharCode" [JSVar arg]
| LFExp <- op
, (arg:_) <- vars = jsCall "Math.exp" [JSVar arg]
| LFLog <- op
, (arg:_) <- vars = jsCall "Math.log" [JSVar arg]
| LFSin <- op
, (arg:_) <- vars = jsCall "Math.sin" [JSVar arg]
| LFCos <- op
, (arg:_) <- vars = jsCall "Math.cos" [JSVar arg]
| LFTan <- op
, (arg:_) <- vars = jsCall "Math.tan" [JSVar arg]
| LFASin <- op
, (arg:_) <- vars = jsCall "Math.asin" [JSVar arg]
| LFACos <- op
, (arg:_) <- vars = jsCall "Math.acos" [JSVar arg]
| LFATan <- op
, (arg:_) <- vars = jsCall "Math.atan" [JSVar arg]
| LFSqrt <- op
, (arg:_) <- vars = jsCall "Math.sqrt" [JSVar arg]
| LFFloor <- op
, (arg:_) <- vars = jsCall "Math.floor" [JSVar arg]
| LFCeil <- op
, (arg:_) <- vars = jsCall "Math.ceil" [JSVar arg]
| LStrCons <- op
, (lhs:rhs:_) <- vars = translateBinaryOp "+" lhs rhs
| LStrHead <- op
, (arg:_) <- vars = JSIndex (JSVar arg) (JSRaw "0")
| LStrRev <- op
, (arg:_) <- vars = JSProj (JSVar arg) "split('').reverse().join('')"
| LStrIndex <- op
, (lhs:rhs:_) <- vars = JSIndex (JSVar lhs) (JSVar rhs)
| LStrTail <- op
, (arg:_) <- vars = let v = translateVariableName arg in
JSRaw $ v ++ ".substr(1," ++ v ++ ".length-1)"
where
translateBinaryOp :: String -> LVar -> LVar -> JS
translateBinaryOp f lhs rhs = JSOp f (JSVar lhs) (JSVar rhs)
invokeMeth :: LVar -> String -> [LVar] -> JS
invokeMeth obj meth args = jsMeth (JSVar obj) meth (map JSVar args)
translateExpression (SError msg) =
JSError msg
translateExpression (SForeign _ _ "putStr" [(FString, var)]) =
jsCall (idrRTNamespace ++ "print") [JSVar var]
translateExpression (SForeign _ _ fun args)
| "[]=" `isSuffixOf` fun
, (obj:idx:val:[]) <- args =
JSRaw $ concat [object obj, index idx, assign val]
| "[]" `isSuffixOf` fun
, (obj:idx:[]) <- args =
JSRaw $ object obj ++ index idx
| "[" `isPrefixOf` fun && "]=" `isSuffixOf` fun
, (obj:val:[]) <- args =
JSRaw $ concat [object obj, '[' : name ++ "]", assign val]
| "[" `isPrefixOf` fun && "]" `isSuffixOf` fun
, (obj:[]) <- args =
JSRaw $ object obj ++ '[' : name ++ "]"
| "." `isPrefixOf` fun, "=" `isSuffixOf` fun
, (obj:val:[]) <- args =
JSRaw $ concat [object obj, field, assign val]
| "." `isPrefixOf` fun
, (obj:[]) <- args =
JSRaw $ object obj ++ field
| "." `isPrefixOf` fun
, (obj:[(FUnit, _)]) <- args =
JSRaw $ concat [object obj, method, "()"]
| "." `isPrefixOf` fun
, (obj:as) <- args =
JSRaw $ concat [object obj, method, arguments as]
| "[]=" `isSuffixOf` fun
, (idx:val:[]) <- args =
JSRaw $ concat [array, index idx, assign val]
| "[]" `isSuffixOf` fun
, (idx:[]) <- args =
JSRaw $ array ++ index idx
| otherwise = JSRaw $ fun ++ arguments args
where
name = filter (`notElem` "[]=") fun
method = name
field = name
array = name
object o = translateVariableName (snd o)
index i = "[" ++ translateVariableName (snd i) ++ "]"
assign v = '=' : generateWrapper v
arguments as =
'(' : intercalate "," (map generateWrapper as) ++ ")"
generateWrapper (ffunc, name)
| FFunction <- ffunc =
idrRTNamespace ++ "ffiWrap(" ++ translateVariableName name ++ ")"
| FFunctionIO <- ffunc =
idrRTNamespace ++ "ffiWrap(" ++ translateVariableName name ++ ")"
generateWrapper (_, name) =
translateVariableName name
translateExpression patterncase
| (SChkCase var cases) <- patterncase = caseHelper var cases "chk"
| (SCase var cases) <- patterncase = caseHelper var cases "cse"
where
caseHelper var cases param =
JSApp (JSFunction [param] (
JSCond $ map (expandCase param . translateCaseCond param) cases
)) [JSVar var]
expandCase :: String -> (Cond, JS) -> (JS, JS)
expandCase _ (RawCond cond, branch) = (cond, branch)
expandCase _ (CaseCond DefaultCase, branch) = (JSTrue , branch)
expandCase var (CaseCond caseTy, branch)
| ConCase tag <- caseTy =
let checkCon = JSRaw var `jsInstanceOf` jsCon
checkTag = (JSRaw $ show tag) `jsEq` jsTag (JSRaw var) in
(checkCon `jsAnd` checkTag, branch)
| TypeCase ty <- caseTy =
let checkTy = JSRaw var `jsInstanceOf` jsType
checkTag = jsTypeTag (JSRaw var) `jsEq` JSType ty in
(checkTy `jsAnd` checkTag, branch)
translateExpression (SCon i name vars) =
JSNew (idrRTNamespace ++ "Con") [ JSRaw $ show i
, JSArray $ map JSVar vars
]
translateExpression (SUpdate var e) =
JSAssign (JSVar var) (translateExpression e)
translateExpression (SProj var i) =
JSIndex (JSProj (JSVar var) "vars") (JSRaw $ show i)
translateExpression SNothing = JSNull
translateExpression e =
JSError $ "Not yet implemented: " ++ filter (/= '\'') (show e)
data CaseType = ConCase Int
| TypeCase JSType
| DefaultCase
deriving Eq
data Cond = CaseCond CaseType
| RawCond JS
translateCaseCond :: String -> SAlt -> (Cond, JS)
translateCaseCond _ cse@(SDefaultCase _) =
(CaseCond DefaultCase, translateCase Nothing cse)
translateCaseCond var cse@(SConstCase ty _)
| StrType <- ty = matchHelper JSStringTy
| PtrType <- ty = matchHelper JSPtrTy
| Forgot <- ty = matchHelper JSForgotTy
| (AType ATFloat) <- ty = matchHelper JSFloatTy
| (AType (ATInt ITBig)) <- ty = matchHelper JSIntegerTy
| (AType (ATInt ITNative)) <- ty = matchHelper JSIntTy
| (AType (ATInt ITChar)) <- ty = matchHelper JSCharTy
where
matchHelper :: JSType -> (Cond, JS)
matchHelper ty = (CaseCond $ TypeCase ty, translateCase Nothing cse)
translateCaseCond var cse@(SConstCase cst@(BI _) _) =
let cond = jsMeth (JSRaw var) "equals" [translateConstant cst] in
(RawCond cond, translateCase Nothing cse)
translateCaseCond var cse@(SConstCase cst _) =
let cond = JSRaw var `jsEq` translateConstant cst in
(RawCond cond, translateCase Nothing cse)
translateCaseCond var cse@(SConCase _ tag _ _ _) =
(CaseCond $ ConCase tag, translateCase (Just var) cse)
translateCase :: Maybe String -> SAlt -> JS
translateCase _ (SDefaultCase e) = translateExpression e
translateCase _ (SConstCase _ e) = translateExpression e
translateCase (Just var) (SConCase a _ _ vars e) =
let params = map jsVar [a .. (a + length vars)] in
jsMeth (JSFunction params (JSReturn $ translateExpression e)) "apply" [
JSThis, JSProj (JSRaw var) "vars"
]
| christiaanb/Idris-dev | src/IRTS/CodegenJavaScript.hs | bsd-3-clause | 24,752 | 0 | 24 | 8,068 | 9,274 | 4,687 | 4,587 | 619 | 4 |
module BurntSushiSpec
( main
, spec
) where
import SpecHelper ()
import Test.Hspec
import Text.Toml.Parser
import Control.Monad (forM_)
import Data.Aeson hiding (json)
import Data.Aeson.Encode.Pretty
import System.FilePath
import System.FilePath.Glob (compile, globDir1)
import qualified Data.ByteString.Lazy as BS
import qualified Data.ByteString.Lazy.Char8 as C8
import qualified Data.Text as T
import qualified Data.Text.IO as T
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
let tests = "toml-test/tests"
validPaths <- runIO $ globDir1 (compile "*.toml") $ tests </> "valid"
invalidPaths <- runIO $ globDir1 (compile "*.toml") $ tests </> "invalid"
forM_ validPaths $ \fp -> do
toml <- runIO $ T.readFile fp
json <- runIO $ BS.readFile $ asJSON fp
it (makeRelative tests fp) $ do
let Right value = eitherDecode json :: Either String Value
eparsed = toJSON <$> parseToml fp toml
case eparsed of
Right parsed | parsed /= value ->
expectationFailure $ unlines
[ "Decoding mismatch"
, "Input TOML:"
, T.unpack toml
, "", "Expected JSON:"
, C8.unpack $ encodePretty value
, "", "Actual JSON:"
, C8.unpack $ encodePretty parsed
]
Left err ->
expectationFailure $ unlines
[ "Decoding error:"
, "Input TOML:"
, T.unpack toml
, "", "Expected JSON:"
, C8.unpack $ encodePretty value
, "", "Error:"
, err
]
_ -> return () -- success
forM_ invalidPaths $ \fp -> do
toml <- runIO $ T.readFile fp
it (makeRelative tests fp) $ do
let eparsed = toJSON <$> parseToml fp toml
case eparsed of
Right parsed ->
expectationFailure $ unlines
[ "Decoding success, expected failure"
, "Input TOML:"
, T.unpack toml
, "", "Unexpected JSON:"
, C8.unpack $ encodePretty parsed
]
Left _ -> return () -- success
asJSON :: FilePath -> FilePath
asJSON = (<.> "json") . dropExtension
| pbrisbin/toml-parse | test/BurntSushiSpec.hs | bsd-3-clause | 2,583 | 0 | 22 | 1,104 | 623 | 327 | 296 | 63 | 4 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
A ``lint'' pass to check for Core correctness
-}
{-# LANGUAGE CPP #-}
module CoreLint (
lintCoreBindings, lintUnfolding,
lintPassResult, lintInteractiveExpr, lintExpr,
lintAnnots,
-- ** Debug output
endPass, endPassIO,
dumpPassResult,
CoreLint.dumpIfSet,
) where
#include "HsVersions.h"
import GhcPrelude
import CoreSyn
import CoreFVs
import CoreUtils
import CoreStats ( coreBindsStats )
import CoreMonad
import Bag
import Literal
import DataCon
import TysWiredIn
import TysPrim
import TcType ( isFloatingTy )
import Var
import VarEnv
import VarSet
import Name
import Id
import IdInfo
import PprCore
import ErrUtils
import Coercion
import SrcLoc
import Kind
import Type
import RepType
import TyCoRep -- checks validity of types/coercions
import TyCon
import CoAxiom
import BasicTypes
import ErrUtils as Err
import ListSetOps
import PrelNames
import Outputable
import FastString
import Util
import InstEnv ( instanceDFunId )
import OptCoercion ( checkAxInstCo )
import UniqSupply
import CoreArity ( typeArity )
import Demand ( splitStrictSig, isBotRes )
import HscTypes
import DynFlags
import Control.Monad
import qualified Control.Monad.Fail as MonadFail
import MonadUtils
import Data.Foldable ( toList )
import Data.List.NonEmpty ( NonEmpty )
import Data.Maybe
import Pair
import qualified GHC.LanguageExtensions as LangExt
{-
Note [GHC Formalism]
~~~~~~~~~~~~~~~~~~~~
This file implements the type-checking algorithm for System FC, the "official"
name of the Core language. Type safety of FC is heart of the claim that
executables produced by GHC do not have segmentation faults. Thus, it is
useful to be able to reason about System FC independently of reading the code.
To this purpose, there is a document core-spec.pdf built in docs/core-spec that
contains a formalism of the types and functions dealt with here. If you change
just about anything in this file or you change other types/functions throughout
the Core language (all signposted to this note), you should update that
formalism. See docs/core-spec/README for more info about how to do so.
Note [check vs lint]
~~~~~~~~~~~~~~~~~~~~
This file implements both a type checking algorithm and also general sanity
checking. For example, the "sanity checking" checks for TyConApp on the left
of an AppTy, which should never happen. These sanity checks don't really
affect any notion of type soundness. Yet, it is convenient to do the sanity
checks at the same time as the type checks. So, we use the following naming
convention:
- Functions that begin with 'lint'... are involved in type checking. These
functions might also do some sanity checking.
- Functions that begin with 'check'... are *not* involved in type checking.
They exist only for sanity checking.
Issues surrounding variable naming, shadowing, and such are considered *not*
to be part of type checking, as the formalism omits these details.
Summary of checks
~~~~~~~~~~~~~~~~~
Checks that a set of core bindings is well-formed. The PprStyle and String
just control what we print in the event of an error. The Bool value
indicates whether we have done any specialisation yet (in which case we do
some extra checks).
We check for
(a) type errors
(b) Out-of-scope type variables
(c) Out-of-scope local variables
(d) Ill-kinded types
(e) Incorrect unsafe coercions
If we have done specialisation the we check that there are
(a) No top-level bindings of primitive (unboxed type)
Outstanding issues:
-- Things are *not* OK if:
--
-- * Unsaturated type app before specialisation has been done;
--
-- * Oversaturated type app after specialisation (eta reduction
-- may well be happening...);
Note [Linting function types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
As described in Note [Representation of function types], all saturated
applications of funTyCon are represented with the FunTy constructor. We check
this invariant in lintType.
Note [Linting type lets]
~~~~~~~~~~~~~~~~~~~~~~~~
In the desugarer, it's very very convenient to be able to say (in effect)
let a = Type Int in <body>
That is, use a type let. See Note [Type let] in CoreSyn.
However, when linting <body> we need to remember that a=Int, else we might
reject a correct program. So we carry a type substitution (in this example
[a -> Int]) and apply this substitution before comparing types. The functin
lintInTy :: Type -> LintM (Type, Kind)
returns a substituted type.
When we encounter a binder (like x::a) we must apply the substitution
to the type of the binding variable. lintBinders does this.
For Ids, the type-substituted Id is added to the in_scope set (which
itself is part of the TCvSubst we are carrying down), and when we
find an occurrence of an Id, we fetch it from the in-scope set.
Note [Bad unsafe coercion]
~~~~~~~~~~~~~~~~~~~~~~~~~~
For discussion see https://ghc.haskell.org/trac/ghc/wiki/BadUnsafeCoercions
Linter introduces additional rules that checks improper coercion between
different types, called bad coercions. Following coercions are forbidden:
(a) coercions between boxed and unboxed values;
(b) coercions between unlifted values of the different sizes, here
active size is checked, i.e. size of the actual value but not
the space allocated for value;
(c) coercions between floating and integral boxed values, this check
is not yet supported for unboxed tuples, as no semantics were
specified for that;
(d) coercions from / to vector type
(e) If types are unboxed tuples then tuple (# A_1,..,A_n #) can be
coerced to (# B_1,..,B_m #) if n=m and for each pair A_i, B_i rules
(a-e) holds.
Note [Join points]
~~~~~~~~~~~~~~~~~~
We check the rules listed in Note [Invariants on join points] in CoreSyn. The
only one that causes any difficulty is the first: All occurrences must be tail
calls. To this end, along with the in-scope set, we remember in le_joins the
subset of in-scope Ids that are valid join ids. For example:
join j x = ... in
case e of
A -> jump j y -- good
B -> case (jump j z) of -- BAD
C -> join h = jump j w in ... -- good
D -> let x = jump j v in ... -- BAD
A join point remains valid in case branches, so when checking the A
branch, j is still valid. When we check the scrutinee of the inner
case, however, we set le_joins to empty, and catch the
error. Similarly, join points can occur free in RHSes of other join
points but not the RHSes of value bindings (thunks and functions).
************************************************************************
* *
Beginning and ending passes
* *
************************************************************************
These functions are not CoreM monad stuff, but they probably ought to
be, and it makes a conveneint place. place for them. They print out
stuff before and after core passes, and do Core Lint when necessary.
-}
endPass :: CoreToDo -> CoreProgram -> [CoreRule] -> CoreM ()
endPass pass binds rules
= do { hsc_env <- getHscEnv
; print_unqual <- getPrintUnqualified
; liftIO $ endPassIO hsc_env print_unqual pass binds rules }
endPassIO :: HscEnv -> PrintUnqualified
-> CoreToDo -> CoreProgram -> [CoreRule] -> IO ()
-- Used by the IO-is CorePrep too
endPassIO hsc_env print_unqual pass binds rules
= do { dumpPassResult dflags print_unqual mb_flag
(ppr pass) (pprPassDetails pass) binds rules
; lintPassResult hsc_env pass binds }
where
dflags = hsc_dflags hsc_env
mb_flag = case coreDumpFlag pass of
Just flag | dopt flag dflags -> Just flag
| dopt Opt_D_verbose_core2core dflags -> Just flag
_ -> Nothing
dumpIfSet :: DynFlags -> Bool -> CoreToDo -> SDoc -> SDoc -> IO ()
dumpIfSet dflags dump_me pass extra_info doc
= Err.dumpIfSet dflags dump_me (showSDoc dflags (ppr pass <+> extra_info)) doc
dumpPassResult :: DynFlags
-> PrintUnqualified
-> Maybe DumpFlag -- Just df => show details in a file whose
-- name is specified by df
-> SDoc -- Header
-> SDoc -- Extra info to appear after header
-> CoreProgram -> [CoreRule]
-> IO ()
dumpPassResult dflags unqual mb_flag hdr extra_info binds rules
= do { forM_ mb_flag $ \flag ->
Err.dumpSDoc dflags unqual flag (showSDoc dflags hdr) dump_doc
-- Report result size
-- This has the side effect of forcing the intermediate to be evaluated
-- if it's not already forced by a -ddump flag.
; Err.debugTraceMsg dflags 2 size_doc
}
where
size_doc = sep [text "Result size of" <+> hdr, nest 2 (equals <+> ppr (coreBindsStats binds))]
dump_doc = vcat [ nest 2 extra_info
, size_doc
, blankLine
, pprCoreBindingsWithSize binds
, ppUnless (null rules) pp_rules ]
pp_rules = vcat [ blankLine
, text "------ Local rules for imported ids --------"
, pprRules rules ]
coreDumpFlag :: CoreToDo -> Maybe DumpFlag
coreDumpFlag (CoreDoSimplify {}) = Just Opt_D_verbose_core2core
coreDumpFlag (CoreDoPluginPass {}) = Just Opt_D_verbose_core2core
coreDumpFlag CoreDoFloatInwards = Just Opt_D_verbose_core2core
coreDumpFlag (CoreDoFloatOutwards {}) = Just Opt_D_verbose_core2core
coreDumpFlag CoreLiberateCase = Just Opt_D_verbose_core2core
coreDumpFlag CoreDoStaticArgs = Just Opt_D_verbose_core2core
coreDumpFlag CoreDoCallArity = Just Opt_D_dump_call_arity
coreDumpFlag CoreDoExitify = Just Opt_D_dump_exitify
coreDumpFlag CoreDoStrictness = Just Opt_D_dump_stranal
coreDumpFlag CoreDoWorkerWrapper = Just Opt_D_dump_worker_wrapper
coreDumpFlag CoreDoSpecialising = Just Opt_D_dump_spec
coreDumpFlag CoreDoSpecConstr = Just Opt_D_dump_spec
coreDumpFlag CoreCSE = Just Opt_D_dump_cse
coreDumpFlag CoreDoVectorisation = Just Opt_D_dump_vect
coreDumpFlag CoreDesugar = Just Opt_D_dump_ds
coreDumpFlag CoreDesugarOpt = Just Opt_D_dump_ds
coreDumpFlag CoreTidy = Just Opt_D_dump_simpl
coreDumpFlag CorePrep = Just Opt_D_dump_prep
coreDumpFlag CoreOccurAnal = Just Opt_D_dump_occur_anal
coreDumpFlag CoreDoPrintCore = Nothing
coreDumpFlag (CoreDoRuleCheck {}) = Nothing
coreDumpFlag CoreDoNothing = Nothing
coreDumpFlag (CoreDoPasses {}) = Nothing
{-
************************************************************************
* *
Top-level interfaces
* *
************************************************************************
-}
lintPassResult :: HscEnv -> CoreToDo -> CoreProgram -> IO ()
lintPassResult hsc_env pass binds
| not (gopt Opt_DoCoreLinting dflags)
= return ()
| otherwise
= do { let (warns, errs) = lintCoreBindings dflags pass (interactiveInScope hsc_env) binds
; Err.showPass dflags ("Core Linted result of " ++ showPpr dflags pass)
; displayLintResults dflags pass warns errs binds }
where
dflags = hsc_dflags hsc_env
displayLintResults :: DynFlags -> CoreToDo
-> Bag Err.MsgDoc -> Bag Err.MsgDoc -> CoreProgram
-> IO ()
displayLintResults dflags pass warns errs binds
| not (isEmptyBag errs)
= do { putLogMsg dflags NoReason Err.SevDump noSrcSpan
(defaultDumpStyle dflags)
(vcat [ lint_banner "errors" (ppr pass), Err.pprMessageBag errs
, text "*** Offending Program ***"
, pprCoreBindings binds
, text "*** End of Offense ***" ])
; Err.ghcExit dflags 1 }
| not (isEmptyBag warns)
, not (hasNoDebugOutput dflags)
, showLintWarnings pass
-- If the Core linter encounters an error, output to stderr instead of
-- stdout (#13342)
= putLogMsg dflags NoReason Err.SevInfo noSrcSpan
(defaultDumpStyle dflags)
(lint_banner "warnings" (ppr pass) $$ Err.pprMessageBag (mapBag ($$ blankLine) warns))
| otherwise = return ()
where
lint_banner :: String -> SDoc -> SDoc
lint_banner string pass = text "*** Core Lint" <+> text string
<+> text ": in result of" <+> pass
<+> text "***"
showLintWarnings :: CoreToDo -> Bool
-- Disable Lint warnings on the first simplifier pass, because
-- there may be some INLINE knots still tied, which is tiresomely noisy
showLintWarnings (CoreDoSimplify _ (SimplMode { sm_phase = InitialPhase })) = False
showLintWarnings _ = True
lintInteractiveExpr :: String -> HscEnv -> CoreExpr -> IO ()
lintInteractiveExpr what hsc_env expr
| not (gopt Opt_DoCoreLinting dflags)
= return ()
| Just err <- lintExpr dflags (interactiveInScope hsc_env) expr
= do { display_lint_err err
; Err.ghcExit dflags 1 }
| otherwise
= return ()
where
dflags = hsc_dflags hsc_env
display_lint_err err
= do { putLogMsg dflags NoReason Err.SevDump
noSrcSpan (defaultDumpStyle dflags)
(vcat [ lint_banner "errors" (text what)
, err
, text "*** Offending Program ***"
, pprCoreExpr expr
, text "*** End of Offense ***" ])
; Err.ghcExit dflags 1 }
interactiveInScope :: HscEnv -> [Var]
-- In GHCi we may lint expressions, or bindings arising from 'deriving'
-- clauses, that mention variables bound in the interactive context.
-- These are Local things (see Note [Interactively-bound Ids in GHCi] in HscTypes).
-- So we have to tell Lint about them, lest it reports them as out of scope.
--
-- We do this by find local-named things that may appear free in interactive
-- context. This function is pretty revolting and quite possibly not quite right.
-- When we are not in GHCi, the interactive context (hsc_IC hsc_env) is empty
-- so this is a (cheap) no-op.
--
-- See Trac #8215 for an example
interactiveInScope hsc_env
= tyvars ++ ids
where
-- C.f. TcRnDriver.setInteractiveContext, Desugar.deSugarExpr
ictxt = hsc_IC hsc_env
(cls_insts, _fam_insts) = ic_instances ictxt
te1 = mkTypeEnvWithImplicits (ic_tythings ictxt)
te = extendTypeEnvWithIds te1 (map instanceDFunId cls_insts)
ids = typeEnvIds te
tyvars = tyCoVarsOfTypesList $ map idType ids
-- Why the type variables? How can the top level envt have free tyvars?
-- I think it's because of the GHCi debugger, which can bind variables
-- f :: [t] -> [t]
-- where t is a RuntimeUnk (see TcType)
lintCoreBindings :: DynFlags -> CoreToDo -> [Var] -> CoreProgram -> (Bag MsgDoc, Bag MsgDoc)
-- Returns (warnings, errors)
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintCoreBindings dflags pass local_in_scope binds
= initL dflags flags in_scope_set $
addLoc TopLevelBindings $
lintLetBndrs TopLevel binders $
-- Put all the top-level binders in scope at the start
-- This is because transformation rules can bring something
-- into use 'unexpectedly'
do { checkL (null dups) (dupVars dups)
; checkL (null ext_dups) (dupExtVars ext_dups)
; mapM lint_bind binds }
where
in_scope_set = mkInScopeSet (mkVarSet local_in_scope)
flags = LF { lf_check_global_ids = check_globals
, lf_check_inline_loop_breakers = check_lbs
, lf_check_static_ptrs = check_static_ptrs }
-- See Note [Checking for global Ids]
check_globals = case pass of
CoreTidy -> False
CorePrep -> False
_ -> True
-- See Note [Checking for INLINE loop breakers]
check_lbs = case pass of
CoreDesugar -> False
CoreDesugarOpt -> False
_ -> True
-- See Note [Checking StaticPtrs]
check_static_ptrs | not (xopt LangExt.StaticPointers dflags) = AllowAnywhere
| otherwise = case pass of
CoreDoFloatOutwards _ -> AllowAtTopLevel
CoreTidy -> RejectEverywhere
CorePrep -> AllowAtTopLevel
_ -> AllowAnywhere
binders = bindersOfBinds binds
(_, dups) = removeDups compare binders
-- dups_ext checks for names with different uniques
-- but but the same External name M.n. We don't
-- allow this at top level:
-- M.n{r3} = ...
-- M.n{r29} = ...
-- because they both get the same linker symbol
ext_dups = snd (removeDups ord_ext (map Var.varName binders))
ord_ext n1 n2 | Just m1 <- nameModule_maybe n1
, Just m2 <- nameModule_maybe n2
= compare (m1, nameOccName n1) (m2, nameOccName n2)
| otherwise = LT
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lint_bind (Rec prs) = mapM_ (lintSingleBinding TopLevel Recursive) prs
lint_bind (NonRec bndr rhs) = lintSingleBinding TopLevel NonRecursive (bndr,rhs)
{-
************************************************************************
* *
\subsection[lintUnfolding]{lintUnfolding}
* *
************************************************************************
Note [Linting Unfoldings from Interfaces]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We use this to check all top-level unfoldings that come in from interfaces
(it is very painful to catch errors otherwise).
We do not need to call lintUnfolding on unfoldings that are nested within
top-level unfoldings; they are linted when we lint the top-level unfolding;
hence the `TopLevelFlag` on `tcPragExpr` in TcIface.
-}
lintUnfolding :: DynFlags
-> SrcLoc
-> VarSet -- Treat these as in scope
-> CoreExpr
-> Maybe MsgDoc -- Nothing => OK
lintUnfolding dflags locn vars expr
| isEmptyBag errs = Nothing
| otherwise = Just (pprMessageBag errs)
where
in_scope = mkInScopeSet vars
(_warns, errs) = initL dflags defaultLintFlags in_scope linter
linter = addLoc (ImportedUnfolding locn) $
lintCoreExpr expr
lintExpr :: DynFlags
-> [Var] -- Treat these as in scope
-> CoreExpr
-> Maybe MsgDoc -- Nothing => OK
lintExpr dflags vars expr
| isEmptyBag errs = Nothing
| otherwise = Just (pprMessageBag errs)
where
in_scope = mkInScopeSet (mkVarSet vars)
(_warns, errs) = initL dflags defaultLintFlags in_scope linter
linter = addLoc TopLevelBindings $
lintCoreExpr expr
{-
************************************************************************
* *
\subsection[lintCoreBinding]{lintCoreBinding}
* *
************************************************************************
Check a core binding, returning the list of variables bound.
-}
lintSingleBinding :: TopLevelFlag -> RecFlag -> (Id, CoreExpr) -> LintM ()
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintSingleBinding top_lvl_flag rec_flag (binder,rhs)
= addLoc (RhsOf binder) $
-- Check the rhs
do { ty <- lintRhs binder rhs
; binder_ty <- applySubstTy (idType binder)
; ensureEqTys binder_ty ty (mkRhsMsg binder (text "RHS") ty)
-- Check that it's not levity-polymorphic
-- Do this first, because otherwise isUnliftedType panics
-- Annoyingly, this duplicates the test in lintIdBdr,
-- because for non-rec lets we call lintSingleBinding first
; checkL (isJoinId binder || not (isTypeLevPoly binder_ty))
(badBndrTyMsg binder (text "levity-polymorphic"))
-- Check the let/app invariant
-- See Note [CoreSyn let/app invariant] in CoreSyn
; checkL ( isJoinId binder
|| not (isUnliftedType binder_ty)
|| (isNonRec rec_flag && exprOkForSpeculation rhs)
|| exprIsLiteralString rhs)
(badBndrTyMsg binder (text "unlifted"))
-- Check that if the binder is top-level or recursive, it's not
-- demanded. Primitive string literals are exempt as there is no
-- computation to perform, see Note [CoreSyn top-level string literals].
; checkL (not (isStrictId binder)
|| (isNonRec rec_flag && not (isTopLevel top_lvl_flag))
|| exprIsLiteralString rhs)
(mkStrictMsg binder)
-- Check that if the binder is at the top level and has type Addr#,
-- that it is a string literal, see
-- Note [CoreSyn top-level string literals].
; checkL (not (isTopLevel top_lvl_flag && binder_ty `eqType` addrPrimTy)
|| exprIsLiteralString rhs)
(mkTopNonLitStrMsg binder)
; flags <- getLintFlags
-- Check that a join-point binder has a valid type
-- NB: lintIdBinder has checked that it is not top-level bound
; case isJoinId_maybe binder of
Nothing -> return ()
Just arity -> checkL (isValidJoinPointType arity binder_ty)
(mkInvalidJoinPointMsg binder binder_ty)
; when (lf_check_inline_loop_breakers flags
&& isStableUnfolding (realIdUnfolding binder)
&& isStrongLoopBreaker (idOccInfo binder)
&& isInlinePragma (idInlinePragma binder))
(addWarnL (text "INLINE binder is (non-rule) loop breaker:" <+> ppr binder))
-- Only non-rule loop breakers inhibit inlining
-- Check whether arity and demand type are consistent (only if demand analysis
-- already happened)
--
-- Note (Apr 2014): this is actually ok. See Note [Demand analysis for trivial right-hand sides]
-- in DmdAnal. After eta-expansion in CorePrep the rhs is no longer trivial.
-- ; let dmdTy = idStrictness binder
-- ; checkL (case dmdTy of
-- StrictSig dmd_ty -> idArity binder >= dmdTypeDepth dmd_ty || exprIsTrivial rhs)
-- (mkArityMsg binder)
-- Check that the binder's arity is within the bounds imposed by
-- the type and the strictness signature. See Note [exprArity invariant]
-- and Note [Trimming arity]
; checkL (typeArity (idType binder) `lengthAtLeast` idArity binder)
(text "idArity" <+> ppr (idArity binder) <+>
text "exceeds typeArity" <+>
ppr (length (typeArity (idType binder))) <> colon <+>
ppr binder)
; case splitStrictSig (idStrictness binder) of
(demands, result_info) | isBotRes result_info ->
checkL (demands `lengthAtLeast` idArity binder)
(text "idArity" <+> ppr (idArity binder) <+>
text "exceeds arity imposed by the strictness signature" <+>
ppr (idStrictness binder) <> colon <+>
ppr binder)
_ -> return ()
; mapM_ (lintCoreRule binder binder_ty) (idCoreRules binder)
; addLoc (UnfoldingOf binder) $
lintIdUnfolding binder binder_ty (idUnfolding binder) }
-- We should check the unfolding, if any, but this is tricky because
-- the unfolding is a SimplifiableCoreExpr. Give up for now.
-- | Checks the RHS of bindings. It only differs from 'lintCoreExpr'
-- in that it doesn't reject occurrences of the function 'makeStatic' when they
-- appear at the top level and @lf_check_static_ptrs == AllowAtTopLevel@, and
-- for join points, it skips the outer lambdas that take arguments to the
-- join point.
--
-- See Note [Checking StaticPtrs].
lintRhs :: Id -> CoreExpr -> LintM OutType
lintRhs bndr rhs
| Just arity <- isJoinId_maybe bndr
= lint_join_lams arity arity True rhs
| AlwaysTailCalled arity <- tailCallInfo (idOccInfo bndr)
= lint_join_lams arity arity False rhs
where
lint_join_lams 0 _ _ rhs
= lintCoreExpr rhs
lint_join_lams n tot enforce (Lam var expr)
= addLoc (LambdaBodyOf var) $
lintBinder LambdaBind var $ \ var' ->
do { body_ty <- lint_join_lams (n-1) tot enforce expr
; return $ mkLamType var' body_ty }
lint_join_lams n tot True _other
= failWithL $ mkBadJoinArityMsg bndr tot (tot-n) rhs
lint_join_lams _ _ False rhs
= markAllJoinsBad $ lintCoreExpr rhs
-- Future join point, not yet eta-expanded
-- Body is not a tail position
-- Allow applications of the data constructor @StaticPtr@ at the top
-- but produce errors otherwise.
lintRhs _bndr rhs = fmap lf_check_static_ptrs getLintFlags >>= go
where
-- Allow occurrences of 'makeStatic' at the top-level but produce errors
-- otherwise.
go AllowAtTopLevel
| (binders0, rhs') <- collectTyBinders rhs
, Just (fun, t, info, e) <- collectMakeStaticArgs rhs'
= markAllJoinsBad $
foldr
-- imitate @lintCoreExpr (Lam ...)@
(\var loopBinders ->
addLoc (LambdaBodyOf var) $
lintBinder LambdaBind var $ \var' ->
do { body_ty <- loopBinders
; return $ mkLamType var' body_ty }
)
-- imitate @lintCoreExpr (App ...)@
(do fun_ty <- lintCoreExpr fun
addLoc (AnExpr rhs') $ lintCoreArgs fun_ty [Type t, info, e]
)
binders0
go _ = markAllJoinsBad $ lintCoreExpr rhs
lintIdUnfolding :: Id -> Type -> Unfolding -> LintM ()
lintIdUnfolding bndr bndr_ty (CoreUnfolding { uf_tmpl = rhs, uf_src = src })
| isStableSource src
= do { ty <- lintRhs bndr rhs
; ensureEqTys bndr_ty ty (mkRhsMsg bndr (text "unfolding") ty) }
lintIdUnfolding bndr bndr_ty (DFunUnfolding { df_con = con, df_bndrs = bndrs
, df_args = args })
= do { ty <- lintBinders LambdaBind bndrs $ \ bndrs' ->
do { res_ty <- lintCoreArgs (dataConRepType con) args
; return (mkLamTypes bndrs' res_ty) }
; ensureEqTys bndr_ty ty (mkRhsMsg bndr (text "dfun unfolding") ty) }
lintIdUnfolding _ _ _
= return () -- Do not Lint unstable unfoldings, because that leads
-- to exponential behaviour; c.f. CoreFVs.idUnfoldingVars
{-
Note [Checking for INLINE loop breakers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's very suspicious if a strong loop breaker is marked INLINE.
However, the desugarer generates instance methods with INLINE pragmas
that form a mutually recursive group. Only after a round of
simplification are they unravelled. So we suppress the test for
the desugarer.
************************************************************************
* *
\subsection[lintCoreExpr]{lintCoreExpr}
* *
************************************************************************
-}
-- For OutType, OutKind, the substitution has been applied,
-- but has not been linted yet
type LintedType = Type -- Substitution applied, and type is linted
type LintedKind = Kind
lintCoreExpr :: CoreExpr -> LintM OutType
-- The returned type has the substitution from the monad
-- already applied to it:
-- lintCoreExpr e subst = exprType (subst e)
--
-- The returned "type" can be a kind, if the expression is (Type ty)
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintCoreExpr (Var var)
= lintVarOcc var 0
lintCoreExpr (Lit lit)
= return (literalType lit)
lintCoreExpr (Cast expr co)
= do { expr_ty <- markAllJoinsBad $ lintCoreExpr expr
; co' <- applySubstCo co
; (_, k2, from_ty, to_ty, r) <- lintCoercion co'
; lintL (classifiesTypeWithValues k2)
(text "Target of cast not # or *:" <+> ppr co)
; lintRole co' Representational r
; ensureEqTys from_ty expr_ty (mkCastErr expr co' from_ty expr_ty)
; return to_ty }
lintCoreExpr (Tick tickish expr)
= do case tickish of
Breakpoint _ ids -> forM_ ids $ \id -> do
checkDeadIdOcc id
lookupIdInScope id
_ -> return ()
markAllJoinsBadIf block_joins $ lintCoreExpr expr
where
block_joins = not (tickish `tickishScopesLike` SoftScope)
-- TODO Consider whether this is the correct rule. It is consistent with
-- the simplifier's behaviour - cost-centre-scoped ticks become part of
-- the continuation, and thus they behave like part of an evaluation
-- context, but soft-scoped and non-scoped ticks simply wrap the result
-- (see Simplify.simplTick).
lintCoreExpr (Let (NonRec tv (Type ty)) body)
| isTyVar tv
= -- See Note [Linting type lets]
do { ty' <- applySubstTy ty
; lintTyBndr tv $ \ tv' ->
do { addLoc (RhsOf tv) $ lintTyKind tv' ty'
-- Now extend the substitution so we
-- take advantage of it in the body
; extendSubstL tv ty' $
addLoc (BodyOfLetRec [tv]) $
lintCoreExpr body } }
lintCoreExpr (Let (NonRec bndr rhs) body)
| isId bndr
= do { lintSingleBinding NotTopLevel NonRecursive (bndr,rhs)
; addLoc (BodyOfLetRec [bndr])
(lintIdBndr NotTopLevel LetBind bndr $ \_ ->
addGoodJoins [bndr] $
lintCoreExpr body) }
| otherwise
= failWithL (mkLetErr bndr rhs) -- Not quite accurate
lintCoreExpr e@(Let (Rec pairs) body)
= lintLetBndrs NotTopLevel bndrs $
addGoodJoins bndrs $
do { -- Check that the list of pairs is non-empty
checkL (not (null pairs)) (emptyRec e)
-- Check that there are no duplicated binders
; checkL (null dups) (dupVars dups)
-- Check that either all the binders are joins, or none
; checkL (all isJoinId bndrs || all (not . isJoinId) bndrs) $
mkInconsistentRecMsg bndrs
; mapM_ (lintSingleBinding NotTopLevel Recursive) pairs
; addLoc (BodyOfLetRec bndrs) (lintCoreExpr body) }
where
bndrs = map fst pairs
(_, dups) = removeDups compare bndrs
lintCoreExpr e@(App _ _)
= addLoc (AnExpr e) $
do { fun_ty <- lintCoreFun fun (length args)
; lintCoreArgs fun_ty args }
where
(fun, args) = collectArgs e
lintCoreExpr (Lam var expr)
= addLoc (LambdaBodyOf var) $
markAllJoinsBad $
lintBinder LambdaBind var $ \ var' ->
do { body_ty <- lintCoreExpr expr
; return $ mkLamType var' body_ty }
lintCoreExpr e@(Case scrut var alt_ty alts) =
-- Check the scrutinee
do { let scrut_diverges = exprIsBottom scrut
; scrut_ty <- markAllJoinsBad $ lintCoreExpr scrut
; (alt_ty, _) <- lintInTy alt_ty
; (var_ty, _) <- lintInTy (idType var)
-- We used to try to check whether a case expression with no
-- alternatives was legitimate, but this didn't work.
-- See Note [No alternatives lint check] for details.
-- See Note [Rules for floating-point comparisons] in PrelRules
; let isLitPat (LitAlt _, _ , _) = True
isLitPat _ = False
; checkL (not $ isFloatingTy scrut_ty && any isLitPat alts)
(ptext (sLit $ "Lint warning: Scrutinising floating-point " ++
"expression with literal pattern in case " ++
"analysis (see Trac #9238).")
$$ text "scrut" <+> ppr scrut)
; case tyConAppTyCon_maybe (idType var) of
Just tycon
| debugIsOn
, isAlgTyCon tycon
, not (isAbstractTyCon tycon)
, null (tyConDataCons tycon)
, not scrut_diverges
-> pprTrace "Lint warning: case binder's type has no constructors" (ppr var <+> ppr (idType var))
-- This can legitimately happen for type families
$ return ()
_otherwise -> return ()
-- Don't use lintIdBndr on var, because unboxed tuple is legitimate
; subst <- getTCvSubst
; ensureEqTys var_ty scrut_ty (mkScrutMsg var var_ty scrut_ty subst)
; lintIdBndr NotTopLevel CaseBind var $ \_ ->
do { -- Check the alternatives
mapM_ (lintCoreAlt scrut_ty alt_ty) alts
; checkCaseAlts e scrut_ty alts
; return alt_ty } }
-- This case can't happen; linting types in expressions gets routed through
-- lintCoreArgs
lintCoreExpr (Type ty)
= failWithL (text "Type found as expression" <+> ppr ty)
lintCoreExpr (Coercion co)
= do { (k1, k2, ty1, ty2, role) <- lintInCo co
; return (mkHeteroCoercionType role k1 k2 ty1 ty2) }
----------------------
lintVarOcc :: Var -> Int -- Number of arguments (type or value) being passed
-> LintM Type -- returns type of the *variable*
lintVarOcc var nargs
= do { checkL (isNonCoVarId var)
(text "Non term variable" <+> ppr var)
-- Cneck that the type of the occurrence is the same
-- as the type of the binding site
; ty <- applySubstTy (idType var)
; var' <- lookupIdInScope var
; let ty' = idType var'
; ensureEqTys ty ty' $ mkBndrOccTypeMismatchMsg var' var ty' ty
-- Check for a nested occurrence of the StaticPtr constructor.
-- See Note [Checking StaticPtrs].
; lf <- getLintFlags
; when (nargs /= 0 && lf_check_static_ptrs lf /= AllowAnywhere) $
checkL (idName var /= makeStaticName) $
text "Found makeStatic nested in an expression"
; checkDeadIdOcc var
; checkJoinOcc var nargs
; return (idType var') }
lintCoreFun :: CoreExpr
-> Int -- Number of arguments (type or val) being passed
-> LintM Type -- Returns type of the *function*
lintCoreFun (Var var) nargs
= lintVarOcc var nargs
lintCoreFun (Lam var body) nargs
-- Act like lintCoreExpr of Lam, but *don't* call markAllJoinsBad; see
-- Note [Beta redexes]
| nargs /= 0
= addLoc (LambdaBodyOf var) $
lintBinder LambdaBind var $ \ var' ->
do { body_ty <- lintCoreFun body (nargs - 1)
; return $ mkLamType var' body_ty }
lintCoreFun expr nargs
= markAllJoinsBadIf (nargs /= 0) $
lintCoreExpr expr
------------------
checkDeadIdOcc :: Id -> LintM ()
-- Occurrences of an Id should never be dead....
-- except when we are checking a case pattern
checkDeadIdOcc id
| isDeadOcc (idOccInfo id)
= do { in_case <- inCasePat
; checkL in_case
(text "Occurrence of a dead Id" <+> ppr id) }
| otherwise
= return ()
------------------
checkJoinOcc :: Id -> JoinArity -> LintM ()
-- Check that if the occurrence is a JoinId, then so is the
-- binding site, and it's a valid join Id
checkJoinOcc var n_args
| Just join_arity_occ <- isJoinId_maybe var
= do { mb_join_arity_bndr <- lookupJoinId var
; case mb_join_arity_bndr of {
Nothing -> -- Binder is not a join point
addErrL (invalidJoinOcc var) ;
Just join_arity_bndr ->
do { checkL (join_arity_bndr == join_arity_occ) $
-- Arity differs at binding site and occurrence
mkJoinBndrOccMismatchMsg var join_arity_bndr join_arity_occ
; checkL (n_args == join_arity_occ) $
-- Arity doesn't match #args
mkBadJumpMsg var join_arity_occ n_args } } }
| otherwise
= return ()
{-
Note [No alternatives lint check]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Case expressions with no alternatives are odd beasts, and it would seem
like they would worth be looking at in the linter (cf Trac #10180). We
used to check two things:
* exprIsHNF is false: it would *seem* to be terribly wrong if
the scrutinee was already in head normal form.
* exprIsBottom is true: we should be able to see why GHC believes the
scrutinee is diverging for sure.
It was already known that the second test was not entirely reliable.
Unfortunately (Trac #13990), the first test turned out not to be reliable
either. Getting the checks right turns out to be somewhat complicated.
For example, suppose we have (comment 8)
data T a where
TInt :: T Int
absurdTBool :: T Bool -> a
absurdTBool v = case v of
data Foo = Foo !(T Bool)
absurdFoo :: Foo -> a
absurdFoo (Foo x) = absurdTBool x
GHC initially accepts the empty case because of the GADT conditions. But then
we inline absurdTBool, getting
absurdFoo (Foo x) = case x of
x is in normal form (because the Foo constructor is strict) but the
case is empty. To avoid this problem, GHC would have to recognize
that matching on Foo x is already absurd, which is not so easy.
More generally, we don't really know all the ways that GHC can
lose track of why an expression is bottom, so we shouldn't make too
much fuss when that happens.
Note [Beta redexes]
~~~~~~~~~~~~~~~~~~~
Consider:
join j @x y z = ... in
(\@x y z -> jump j @x y z) @t e1 e2
This is clearly ill-typed, since the jump is inside both an application and a
lambda, either of which is enough to disqualify it as a tail call (see Note
[Invariants on join points] in CoreSyn). However, strictly from a
lambda-calculus perspective, the term doesn't go wrong---after the two beta
reductions, the jump *is* a tail call and everything is fine.
Why would we want to allow this when we have let? One reason is that a compound
beta redex (that is, one with more than one argument) has different scoping
rules: naively reducing the above example using lets will capture any free
occurrence of y in e2. More fundamentally, type lets are tricky; many passes,
such as Float Out, tacitly assume that the incoming program's type lets have
all been dealt with by the simplifier. Thus we don't want to let-bind any types
in, say, CoreSubst.simpleOptPgm, which in some circumstances can run immediately
before Float Out.
All that said, currently CoreSubst.simpleOptPgm is the only thing using this
loophole, doing so to avoid re-traversing large functions (beta-reducing a type
lambda without introducing a type let requires a substitution). TODO: Improve
simpleOptPgm so that we can forget all this ever happened.
************************************************************************
* *
\subsection[lintCoreArgs]{lintCoreArgs}
* *
************************************************************************
The basic version of these functions checks that the argument is a
subtype of the required type, as one would expect.
-}
lintCoreArgs :: OutType -> [CoreArg] -> LintM OutType
lintCoreArgs fun_ty args = foldM lintCoreArg fun_ty args
lintCoreArg :: OutType -> CoreArg -> LintM OutType
lintCoreArg fun_ty (Type arg_ty)
= do { checkL (not (isCoercionTy arg_ty))
(text "Unnecessary coercion-to-type injection:"
<+> ppr arg_ty)
; arg_ty' <- applySubstTy arg_ty
; lintTyApp fun_ty arg_ty' }
lintCoreArg fun_ty arg
= do { arg_ty <- markAllJoinsBad $ lintCoreExpr arg
-- See Note [Levity polymorphism invariants] in CoreSyn
; lintL (not (isTypeLevPoly arg_ty))
(text "Levity-polymorphic argument:" <+>
(ppr arg <+> dcolon <+> parens (ppr arg_ty <+> dcolon <+> ppr (typeKind arg_ty))))
-- check for levity polymorphism first, because otherwise isUnliftedType panics
; checkL (not (isUnliftedType arg_ty) || exprOkForSpeculation arg)
(mkLetAppMsg arg)
; lintValApp arg fun_ty arg_ty }
-----------------
lintAltBinders :: OutType -- Scrutinee type
-> OutType -- Constructor type
-> [OutVar] -- Binders
-> LintM ()
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintAltBinders scrut_ty con_ty []
= ensureEqTys con_ty scrut_ty (mkBadPatMsg con_ty scrut_ty)
lintAltBinders scrut_ty con_ty (bndr:bndrs)
| isTyVar bndr
= do { con_ty' <- lintTyApp con_ty (mkTyVarTy bndr)
; lintAltBinders scrut_ty con_ty' bndrs }
| otherwise
= do { con_ty' <- lintValApp (Var bndr) con_ty (idType bndr)
; lintAltBinders scrut_ty con_ty' bndrs }
-----------------
lintTyApp :: OutType -> OutType -> LintM OutType
lintTyApp fun_ty arg_ty
| Just (tv,body_ty) <- splitForAllTy_maybe fun_ty
= do { lintTyKind tv arg_ty
; in_scope <- getInScope
-- substTy needs the set of tyvars in scope to avoid generating
-- uniques that are already in scope.
-- See Note [The substitution invariant] in TyCoRep
; return (substTyWithInScope in_scope [tv] [arg_ty] body_ty) }
| otherwise
= failWithL (mkTyAppMsg fun_ty arg_ty)
-----------------
lintValApp :: CoreExpr -> OutType -> OutType -> LintM OutType
lintValApp arg fun_ty arg_ty
| Just (arg,res) <- splitFunTy_maybe fun_ty
= do { ensureEqTys arg arg_ty err1
; return res }
| otherwise
= failWithL err2
where
err1 = mkAppMsg fun_ty arg_ty arg
err2 = mkNonFunAppMsg fun_ty arg_ty arg
lintTyKind :: OutTyVar -> OutType -> LintM ()
-- Both args have had substitution applied
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintTyKind tyvar arg_ty
-- Arg type might be boxed for a function with an uncommitted
-- tyvar; notably this is used so that we can give
-- error :: forall a:*. String -> a
-- and then apply it to both boxed and unboxed types.
= do { arg_kind <- lintType arg_ty
; unless (arg_kind `eqType` tyvar_kind)
(addErrL (mkKindErrMsg tyvar arg_ty $$ (text "Linted Arg kind:" <+> ppr arg_kind))) }
where
tyvar_kind = tyVarKind tyvar
{-
************************************************************************
* *
\subsection[lintCoreAlts]{lintCoreAlts}
* *
************************************************************************
-}
checkCaseAlts :: CoreExpr -> OutType -> [CoreAlt] -> LintM ()
-- a) Check that the alts are non-empty
-- b1) Check that the DEFAULT comes first, if it exists
-- b2) Check that the others are in increasing order
-- c) Check that there's a default for infinite types
-- NB: Algebraic cases are not necessarily exhaustive, because
-- the simplifier correctly eliminates case that can't
-- possibly match.
checkCaseAlts e ty alts =
do { checkL (all non_deflt con_alts) (mkNonDefltMsg e)
; checkL (increasing_tag con_alts) (mkNonIncreasingAltsMsg e)
-- For types Int#, Word# with an infinite (well, large!) number of
-- possible values, there should usually be a DEFAULT case
-- But (see Note [Empty case alternatives] in CoreSyn) it's ok to
-- have *no* case alternatives.
-- In effect, this is a kind of partial test. I suppose it's possible
-- that we might *know* that 'x' was 1 or 2, in which case
-- case x of { 1 -> e1; 2 -> e2 }
-- would be fine.
; checkL (isJust maybe_deflt || not is_infinite_ty || null alts)
(nonExhaustiveAltsMsg e) }
where
(con_alts, maybe_deflt) = findDefault alts
-- Check that successive alternatives have increasing tags
increasing_tag (alt1 : rest@( alt2 : _)) = alt1 `ltAlt` alt2 && increasing_tag rest
increasing_tag _ = True
non_deflt (DEFAULT, _, _) = False
non_deflt _ = True
is_infinite_ty = case tyConAppTyCon_maybe ty of
Nothing -> False
Just tycon -> isPrimTyCon tycon
lintAltExpr :: CoreExpr -> OutType -> LintM ()
lintAltExpr expr ann_ty
= do { actual_ty <- lintCoreExpr expr
; ensureEqTys actual_ty ann_ty (mkCaseAltMsg expr actual_ty ann_ty) }
lintCoreAlt :: OutType -- Type of scrutinee
-> OutType -- Type of the alternative
-> CoreAlt
-> LintM ()
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintCoreAlt _ alt_ty (DEFAULT, args, rhs) =
do { lintL (null args) (mkDefaultArgsMsg args)
; lintAltExpr rhs alt_ty }
lintCoreAlt scrut_ty alt_ty (LitAlt lit, args, rhs)
| litIsLifted lit
= failWithL integerScrutinisedMsg
| otherwise
= do { lintL (null args) (mkDefaultArgsMsg args)
; ensureEqTys lit_ty scrut_ty (mkBadPatMsg lit_ty scrut_ty)
; lintAltExpr rhs alt_ty }
where
lit_ty = literalType lit
lintCoreAlt scrut_ty alt_ty alt@(DataAlt con, args, rhs)
| isNewTyCon (dataConTyCon con)
= addErrL (mkNewTyDataConAltMsg scrut_ty alt)
| Just (tycon, tycon_arg_tys) <- splitTyConApp_maybe scrut_ty
= addLoc (CaseAlt alt) $ do
{ -- First instantiate the universally quantified
-- type variables of the data constructor
-- We've already check
lintL (tycon == dataConTyCon con) (mkBadConMsg tycon con)
; let con_payload_ty = piResultTys (dataConRepType con) tycon_arg_tys
-- And now bring the new binders into scope
; lintBinders CasePatBind args $ \ args' -> do
{ addLoc (CasePat alt) (lintAltBinders scrut_ty con_payload_ty args')
; lintAltExpr rhs alt_ty } }
| otherwise -- Scrut-ty is wrong shape
= addErrL (mkBadAltMsg scrut_ty alt)
{-
************************************************************************
* *
\subsection[lint-types]{Types}
* *
************************************************************************
-}
-- When we lint binders, we (one at a time and in order):
-- 1. Lint var types or kinds (possibly substituting)
-- 2. Add the binder to the in scope set, and if its a coercion var,
-- we may extend the substitution to reflect its (possibly) new kind
lintBinders :: BindingSite -> [Var] -> ([Var] -> LintM a) -> LintM a
lintBinders _ [] linterF = linterF []
lintBinders site (var:vars) linterF = lintBinder site var $ \var' ->
lintBinders site vars $ \ vars' ->
linterF (var':vars')
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintBinder :: BindingSite -> Var -> (Var -> LintM a) -> LintM a
lintBinder site var linterF
| isTyVar var = lintTyBndr var linterF
| isCoVar var = lintCoBndr var linterF
| otherwise = lintIdBndr NotTopLevel site var linterF
lintTyBndr :: InTyVar -> (OutTyVar -> LintM a) -> LintM a
lintTyBndr tv thing_inside
= do { subst <- getTCvSubst
; let (subst', tv') = substTyVarBndr subst tv
; lintKind (varType tv')
; updateTCvSubst subst' (thing_inside tv') }
lintCoBndr :: InCoVar -> (OutCoVar -> LintM a) -> LintM a
lintCoBndr cv thing_inside
= do { subst <- getTCvSubst
; let (subst', cv') = substCoVarBndr subst cv
; lintKind (varType cv')
; lintL (isCoercionType (varType cv'))
(text "CoVar with non-coercion type:" <+> pprTyVar cv)
; updateTCvSubst subst' (thing_inside cv') }
lintLetBndrs :: TopLevelFlag -> [Var] -> LintM a -> LintM a
lintLetBndrs top_lvl ids linterF
= go ids
where
go [] = linterF
go (id:ids) = lintIdBndr top_lvl LetBind id $ \_ ->
go ids
lintIdBndr :: TopLevelFlag -> BindingSite
-> InVar -> (OutVar -> LintM a) -> LintM a
-- Do substitution on the type of a binder and add the var with this
-- new type to the in-scope set of the second argument
-- ToDo: lint its rules
lintIdBndr top_lvl bind_site id linterF
= ASSERT2( isId id, ppr id )
do { flags <- getLintFlags
; checkL (not (lf_check_global_ids flags) || isLocalId id)
(text "Non-local Id binder" <+> ppr id)
-- See Note [Checking for global Ids]
-- Check that if the binder is nested, it is not marked as exported
; checkL (not (isExportedId id) || is_top_lvl)
(mkNonTopExportedMsg id)
-- Check that if the binder is nested, it does not have an external name
; checkL (not (isExternalName (Var.varName id)) || is_top_lvl)
(mkNonTopExternalNameMsg id)
; (ty, k) <- lintInTy (idType id)
-- See Note [Levity polymorphism invariants] in CoreSyn
; lintL (isJoinId id || not (isKindLevPoly k))
(text "Levity-polymorphic binder:" <+>
(ppr id <+> dcolon <+> parens (ppr ty <+> dcolon <+> ppr k)))
-- Check that a join-id is a not-top-level let-binding
; when (isJoinId id) $
checkL (not is_top_lvl && is_let_bind) $
mkBadJoinBindMsg id
; let id' = setIdType id ty
; addInScopeVar id' $ (linterF id') }
where
is_top_lvl = isTopLevel top_lvl
is_let_bind = case bind_site of
LetBind -> True
_ -> False
{-
%************************************************************************
%* *
Types
%* *
%************************************************************************
-}
lintInTy :: InType -> LintM (LintedType, LintedKind)
-- Types only, not kinds
-- Check the type, and apply the substitution to it
-- See Note [Linting type lets]
lintInTy ty
= addLoc (InType ty) $
do { ty' <- applySubstTy ty
; k <- lintType ty'
; lintKind k
; return (ty', k) }
checkTyCon :: TyCon -> LintM ()
checkTyCon tc
= checkL (not (isTcTyCon tc)) (text "Found TcTyCon:" <+> ppr tc)
-------------------
lintType :: OutType -> LintM LintedKind
-- The returned Kind has itself been linted
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintType (TyVarTy tv)
= do { checkL (isTyVar tv) (mkBadTyVarMsg tv)
; lintTyCoVarInScope tv
; return (tyVarKind tv) }
-- We checked its kind when we added it to the envt
lintType ty@(AppTy t1 t2)
| TyConApp {} <- t1
= failWithL $ text "TyConApp to the left of AppTy:" <+> ppr ty
| otherwise
= do { k1 <- lintType t1
; k2 <- lintType t2
; lint_ty_app ty k1 [(t2,k2)] }
lintType ty@(TyConApp tc tys)
| Just ty' <- coreView ty
= lintType ty' -- Expand type synonyms, so that we do not bogusly complain
-- about un-saturated type synonyms
-- We should never see a saturated application of funTyCon; such applications
-- should be represented with the FunTy constructor. See Note [Linting
-- function types] and Note [Representation of function types].
| isFunTyCon tc
, tys `lengthIs` 4
= failWithL (hang (text "Saturated application of (->)") 2 (ppr ty))
| isTypeSynonymTyCon tc || isTypeFamilyTyCon tc
-- Also type synonyms and type families
, tys `lengthLessThan` tyConArity tc
= failWithL (hang (text "Un-saturated type application") 2 (ppr ty))
| otherwise
= do { checkTyCon tc
; ks <- mapM lintType tys
; lint_ty_app ty (tyConKind tc) (tys `zip` ks) }
-- arrows can related *unlifted* kinds, so this has to be separate from
-- a dependent forall.
lintType ty@(FunTy t1 t2)
= do { k1 <- lintType t1
; k2 <- lintType t2
; lintArrow (text "type or kind" <+> quotes (ppr ty)) k1 k2 }
lintType t@(ForAllTy (TvBndr tv _vis) ty)
= do { lintL (isTyVar tv) (text "Covar bound in type:" <+> ppr t)
; lintTyBndr tv $ \tv' ->
do { k <- lintType ty
; lintL (not (tv' `elemVarSet` tyCoVarsOfType k))
(text "Variable escape in forall:" <+> ppr t)
; lintL (classifiesTypeWithValues k)
(text "Non-* and non-# kind in forall:" <+> ppr t)
; return k }}
lintType ty@(LitTy l) = lintTyLit l >> return (typeKind ty)
lintType (CastTy ty co)
= do { k1 <- lintType ty
; (k1', k2) <- lintStarCoercion co
; ensureEqTys k1 k1' (mkCastErr ty co k1' k1)
; return k2 }
lintType (CoercionTy co)
= do { (k1, k2, ty1, ty2, r) <- lintCoercion co
; return $ mkHeteroCoercionType r k1 k2 ty1 ty2 }
lintKind :: OutKind -> LintM ()
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintKind k = do { sk <- lintType k
; unless (classifiesTypeWithValues sk)
(addErrL (hang (text "Ill-kinded kind:" <+> ppr k)
2 (text "has kind:" <+> ppr sk))) }
-- confirms that a type is really *
lintStar :: SDoc -> OutKind -> LintM ()
lintStar doc k
= lintL (classifiesTypeWithValues k)
(text "Non-*-like kind when *-like expected:" <+> ppr k $$
text "when checking" <+> doc)
lintArrow :: SDoc -> LintedKind -> LintedKind -> LintM LintedKind
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintArrow what k1 k2 -- Eg lintArrow "type or kind `blah'" k1 k2
-- or lintarrow "coercion `blah'" k1 k2
= do { unless (classifiesTypeWithValues k1) (addErrL (msg (text "argument") k1))
; unless (classifiesTypeWithValues k2) (addErrL (msg (text "result") k2))
; return liftedTypeKind }
where
msg ar k
= vcat [ hang (text "Ill-kinded" <+> ar)
2 (text "in" <+> what)
, what <+> text "kind:" <+> ppr k ]
lint_ty_app :: Type -> LintedKind -> [(LintedType,LintedKind)] -> LintM LintedKind
lint_ty_app ty k tys
= lint_app (text "type" <+> quotes (ppr ty)) k tys
----------------
lint_co_app :: Coercion -> LintedKind -> [(LintedType,LintedKind)] -> LintM LintedKind
lint_co_app ty k tys
= lint_app (text "coercion" <+> quotes (ppr ty)) k tys
----------------
lintTyLit :: TyLit -> LintM ()
lintTyLit (NumTyLit n)
| n >= 0 = return ()
| otherwise = failWithL msg
where msg = text "Negative type literal:" <+> integer n
lintTyLit (StrTyLit _) = return ()
lint_app :: SDoc -> LintedKind -> [(LintedType,LintedKind)] -> LintM Kind
-- (lint_app d fun_kind arg_tys)
-- We have an application (f arg_ty1 .. arg_tyn),
-- where f :: fun_kind
-- Takes care of linting the OutTypes
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lint_app doc kfn kas
= do { in_scope <- getInScope
-- We need the in_scope set to satisfy the invariant in
-- Note [The substitution invariant] in TyCoRep
; foldlM (go_app in_scope) kfn kas }
where
fail_msg extra = vcat [ hang (text "Kind application error in") 2 doc
, nest 2 (text "Function kind =" <+> ppr kfn)
, nest 2 (text "Arg kinds =" <+> ppr kas)
, extra ]
go_app in_scope kfn tka
| Just kfn' <- coreView kfn
= go_app in_scope kfn' tka
go_app _ (FunTy kfa kfb) tka@(_,ka)
= do { unless (ka `eqType` kfa) $
addErrL (fail_msg (text "Fun:" <+> (ppr kfa $$ ppr tka)))
; return kfb }
go_app in_scope (ForAllTy (TvBndr kv _vis) kfn) tka@(ta,ka)
= do { let kv_kind = tyVarKind kv
; unless (ka `eqType` kv_kind) $
addErrL (fail_msg (text "Forall:" <+> (ppr kv $$ ppr kv_kind $$ ppr tka)))
; return (substTyWithInScope in_scope [kv] [ta] kfn) }
go_app _ kfn ka
= failWithL (fail_msg (text "Not a fun:" <+> (ppr kfn $$ ppr ka)))
{- *********************************************************************
* *
Linting rules
* *
********************************************************************* -}
lintCoreRule :: OutVar -> OutType -> CoreRule -> LintM ()
lintCoreRule _ _ (BuiltinRule {})
= return () -- Don't bother
lintCoreRule fun fun_ty rule@(Rule { ru_name = name, ru_bndrs = bndrs
, ru_args = args, ru_rhs = rhs })
= lintBinders LambdaBind bndrs $ \ _ ->
do { lhs_ty <- foldM lintCoreArg fun_ty args
; rhs_ty <- case isJoinId_maybe fun of
Just join_arity
-> do { checkL (args `lengthIs` join_arity) $
mkBadJoinPointRuleMsg fun join_arity rule
-- See Note [Rules for join points]
; lintCoreExpr rhs }
_ -> markAllJoinsBad $ lintCoreExpr rhs
; ensureEqTys lhs_ty rhs_ty $
(rule_doc <+> vcat [ text "lhs type:" <+> ppr lhs_ty
, text "rhs type:" <+> ppr rhs_ty ])
; let bad_bndrs = filter is_bad_bndr bndrs
; checkL (null bad_bndrs)
(rule_doc <+> text "unbound" <+> ppr bad_bndrs)
-- See Note [Linting rules]
}
where
rule_doc = text "Rule" <+> doubleQuotes (ftext name) <> colon
lhs_fvs = exprsFreeVars args
rhs_fvs = exprFreeVars rhs
is_bad_bndr :: Var -> Bool
-- See Note [Unbound RULE binders] in Rules
is_bad_bndr bndr = not (bndr `elemVarSet` lhs_fvs)
&& bndr `elemVarSet` rhs_fvs
&& isNothing (isReflCoVar_maybe bndr)
{- Note [Linting rules]
~~~~~~~~~~~~~~~~~~~~~~~
It's very bad if simplifying a rule means that one of the template
variables (ru_bndrs) that /is/ mentioned on the RHS becomes
not-mentioned in the LHS (ru_args). How can that happen? Well, in
Trac #10602, SpecConstr stupidly constructed a rule like
forall x,c1,c2.
f (x |> c1 |> c2) = ....
But simplExpr collapses those coercions into one. (Indeed in
Trac #10602, it collapsed to the identity and was removed altogether.)
We don't have a great story for what to do here, but at least
this check will nail it.
NB (Trac #11643): it's possible that a variable listed in the
binders becomes not-mentioned on both LHS and RHS. Here's a silly
example:
RULE forall x y. f (g x y) = g (x+1) (y-1)
And suppose worker/wrapper decides that 'x' is Absent. Then
we'll end up with
RULE forall x y. f ($gw y) = $gw (x+1)
This seems sufficiently obscure that there isn't enough payoff to
try to trim the forall'd binder list.
Note [Rules for join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A join point cannot be partially applied. However, the left-hand side of a rule
for a join point is effectively a *pattern*, not a piece of code, so there's an
argument to be made for allowing a situation like this:
join $sj :: Int -> Int -> String
$sj n m = ...
j :: forall a. Eq a => a -> a -> String
{-# RULES "SPEC j" jump j @ Int $dEq = jump $sj #-}
j @a $dEq x y = ...
Applying this rule can't turn a well-typed program into an ill-typed one, so
conceivably we could allow it. But we can always eta-expand such an
"undersaturated" rule (see 'CoreArity.etaExpandToJoinPointRule'), and in fact
the simplifier would have to in order to deal with the RHS. So we take a
conservative view and don't allow undersaturated rules for join points. See
Note [Rules and join points] in OccurAnal for further discussion.
-}
{-
************************************************************************
* *
Linting coercions
* *
************************************************************************
-}
lintInCo :: InCoercion -> LintM (LintedKind, LintedKind, LintedType, LintedType, Role)
-- Check the coercion, and apply the substitution to it
-- See Note [Linting type lets]
lintInCo co
= addLoc (InCo co) $
do { co' <- applySubstCo co
; lintCoercion co' }
-- lints a coercion, confirming that its lh kind and its rh kind are both *
-- also ensures that the role is Nominal
lintStarCoercion :: OutCoercion -> LintM (LintedType, LintedType)
lintStarCoercion g
= do { (k1, k2, t1, t2, r) <- lintCoercion g
; lintStar (text "the kind of the left type in" <+> ppr g) k1
; lintStar (text "the kind of the right type in" <+> ppr g) k2
; lintRole g Nominal r
; return (t1, t2) }
lintCoercion :: OutCoercion -> LintM (LintedKind, LintedKind, LintedType, LintedType, Role)
-- Check the kind of a coercion term, returning the kind
-- Post-condition: the returned OutTypes are lint-free
--
-- If lintCoercion co = (k1, k2, s1, s2, r)
-- then co :: s1 ~r s2
-- s1 :: k2
-- s2 :: k2
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism]
lintCoercion (Refl r ty)
= do { k <- lintType ty
; return (k, k, ty, ty, r) }
lintCoercion co@(TyConAppCo r tc cos)
| tc `hasKey` funTyConKey
, [_rep1,_rep2,_co1,_co2] <- cos
= do { failWithL (text "Saturated TyConAppCo (->):" <+> ppr co)
} -- All saturated TyConAppCos should be FunCos
| Just {} <- synTyConDefn_maybe tc
= failWithL (text "Synonym in TyConAppCo:" <+> ppr co)
| otherwise
= do { checkTyCon tc
; (k's, ks, ss, ts, rs) <- mapAndUnzip5M lintCoercion cos
; k' <- lint_co_app co (tyConKind tc) (ss `zip` k's)
; k <- lint_co_app co (tyConKind tc) (ts `zip` ks)
; _ <- zipWith3M lintRole cos (tyConRolesX r tc) rs
; return (k', k, mkTyConApp tc ss, mkTyConApp tc ts, r) }
lintCoercion co@(AppCo co1 co2)
| TyConAppCo {} <- co1
= failWithL (text "TyConAppCo to the left of AppCo:" <+> ppr co)
| Refl _ (TyConApp {}) <- co1
= failWithL (text "Refl (TyConApp ...) to the left of AppCo:" <+> ppr co)
| otherwise
= do { (k1, k2, s1, s2, r1) <- lintCoercion co1
; (k'1, k'2, t1, t2, r2) <- lintCoercion co2
; k3 <- lint_co_app co k1 [(t1,k'1)]
; k4 <- lint_co_app co k2 [(t2,k'2)]
; if r1 == Phantom
then lintL (r2 == Phantom || r2 == Nominal)
(text "Second argument in AppCo cannot be R:" $$
ppr co)
else lintRole co Nominal r2
; return (k3, k4, mkAppTy s1 t1, mkAppTy s2 t2, r1) }
----------
lintCoercion (ForAllCo tv1 kind_co co)
= do { (_, k2) <- lintStarCoercion kind_co
; let tv2 = setTyVarKind tv1 k2
; addInScopeVar tv1 $
do {
; (k3, k4, t1, t2, r) <- lintCoercion co
; in_scope <- getInScope
; let tyl = mkInvForAllTy tv1 t1
subst = mkTvSubst in_scope $
-- We need both the free vars of the `t2` and the
-- free vars of the range of the substitution in
-- scope. All the free vars of `t2` and `kind_co` should
-- already be in `in_scope`, because they've been
-- linted and `tv2` has the same unique as `tv1`.
-- See Note [The substitution invariant]
unitVarEnv tv1 (TyVarTy tv2 `mkCastTy` mkSymCo kind_co)
tyr = mkInvForAllTy tv2 $
substTy subst t2
; return (k3, k4, tyl, tyr, r) } }
lintCoercion co@(FunCo r co1 co2)
= do { (k1,k'1,s1,t1,r1) <- lintCoercion co1
; (k2,k'2,s2,t2,r2) <- lintCoercion co2
; k <- lintArrow (text "coercion" <+> quotes (ppr co)) k1 k2
; k' <- lintArrow (text "coercion" <+> quotes (ppr co)) k'1 k'2
; lintRole co1 r r1
; lintRole co2 r r2
; return (k, k', mkFunTy s1 s2, mkFunTy t1 t2, r) }
lintCoercion (CoVarCo cv)
| not (isCoVar cv)
= failWithL (hang (text "Bad CoVarCo:" <+> ppr cv)
2 (text "With offending type:" <+> ppr (varType cv)))
| otherwise
= do { lintTyCoVarInScope cv
; cv' <- lookupIdInScope cv
; lintUnliftedCoVar cv
; return $ coVarKindsTypesRole cv' }
-- See Note [Bad unsafe coercion]
lintCoercion co@(UnivCo prov r ty1 ty2)
= do { k1 <- lintType ty1
; k2 <- lintType ty2
; case prov of
UnsafeCoerceProv -> return () -- no extra checks
PhantomProv kco -> do { lintRole co Phantom r
; check_kinds kco k1 k2 }
ProofIrrelProv kco -> do { lintL (isCoercionTy ty1) $
mkBadProofIrrelMsg ty1 co
; lintL (isCoercionTy ty2) $
mkBadProofIrrelMsg ty2 co
; check_kinds kco k1 k2 }
PluginProv _ -> return () -- no extra checks
HoleProv h -> addErrL $
text "Unfilled coercion hole:" <+> ppr h
; when (r /= Phantom && classifiesTypeWithValues k1
&& classifiesTypeWithValues k2)
(checkTypes ty1 ty2)
; return (k1, k2, ty1, ty2, r) }
where
report s = hang (text $ "Unsafe coercion: " ++ s)
2 (vcat [ text "From:" <+> ppr ty1
, text " To:" <+> ppr ty2])
isUnBoxed :: PrimRep -> Bool
isUnBoxed = not . isGcPtrRep
-- see #9122 for discussion of these checks
checkTypes t1 t2
= do { checkWarnL (not lev_poly1)
(report "left-hand type is levity-polymorphic")
; checkWarnL (not lev_poly2)
(report "right-hand type is levity-polymorphic")
; when (not (lev_poly1 || lev_poly2)) $
do { checkWarnL (reps1 `equalLength` reps2)
(report "between values with different # of reps")
; zipWithM_ validateCoercion reps1 reps2 }}
where
lev_poly1 = isTypeLevPoly t1
lev_poly2 = isTypeLevPoly t2
-- don't look at these unless lev_poly1/2 are False
-- Otherwise, we get #13458
reps1 = typePrimRep t1
reps2 = typePrimRep t2
validateCoercion :: PrimRep -> PrimRep -> LintM ()
validateCoercion rep1 rep2
= do { dflags <- getDynFlags
; checkWarnL (isUnBoxed rep1 == isUnBoxed rep2)
(report "between unboxed and boxed value")
; checkWarnL (TyCon.primRepSizeB dflags rep1
== TyCon.primRepSizeB dflags rep2)
(report "between unboxed values of different size")
; let fl = liftM2 (==) (TyCon.primRepIsFloat rep1)
(TyCon.primRepIsFloat rep2)
; case fl of
Nothing -> addWarnL (report "between vector types")
Just False -> addWarnL (report "between float and integral values")
_ -> return ()
}
check_kinds kco k1 k2 = do { (k1', k2') <- lintStarCoercion kco
; ensureEqTys k1 k1' (mkBadUnivCoMsg CLeft co)
; ensureEqTys k2 k2' (mkBadUnivCoMsg CRight co) }
lintCoercion (SymCo co)
= do { (k1, k2, ty1, ty2, r) <- lintCoercion co
; return (k2, k1, ty2, ty1, r) }
lintCoercion co@(TransCo co1 co2)
= do { (k1a, _k1b, ty1a, ty1b, r1) <- lintCoercion co1
; (_k2a, k2b, ty2a, ty2b, r2) <- lintCoercion co2
; ensureEqTys ty1b ty2a
(hang (text "Trans coercion mis-match:" <+> ppr co)
2 (vcat [ppr ty1a, ppr ty1b, ppr ty2a, ppr ty2b]))
; lintRole co r1 r2
; return (k1a, k2b, ty1a, ty2b, r1) }
lintCoercion the_co@(NthCo n co)
= do { (_, _, s, t, r) <- lintCoercion co
; case (splitForAllTy_maybe s, splitForAllTy_maybe t) of
{ (Just (tv_s, _ty_s), Just (tv_t, _ty_t))
| n == 0
-> return (ks, kt, ts, tt, Nominal)
where
ts = tyVarKind tv_s
tt = tyVarKind tv_t
ks = typeKind ts
kt = typeKind tt
; _ -> case (splitTyConApp_maybe s, splitTyConApp_maybe t) of
{ (Just (tc_s, tys_s), Just (tc_t, tys_t))
| tc_s == tc_t
, isInjectiveTyCon tc_s r
-- see Note [NthCo and newtypes] in TyCoRep
, tys_s `equalLength` tys_t
, tys_s `lengthExceeds` n
-> return (ks, kt, ts, tt, tr)
where
ts = getNth tys_s n
tt = getNth tys_t n
tr = nthRole r tc_s n
ks = typeKind ts
kt = typeKind tt
; _ -> failWithL (hang (text "Bad getNth:")
2 (ppr the_co $$ ppr s $$ ppr t)) }}}
lintCoercion the_co@(LRCo lr co)
= do { (_,_,s,t,r) <- lintCoercion co
; lintRole co Nominal r
; case (splitAppTy_maybe s, splitAppTy_maybe t) of
(Just s_pr, Just t_pr)
-> return (ks_pick, kt_pick, s_pick, t_pick, Nominal)
where
s_pick = pickLR lr s_pr
t_pick = pickLR lr t_pr
ks_pick = typeKind s_pick
kt_pick = typeKind t_pick
_ -> failWithL (hang (text "Bad LRCo:")
2 (ppr the_co $$ ppr s $$ ppr t)) }
lintCoercion (InstCo co arg)
= do { (k3, k4, t1',t2', r) <- lintCoercion co
; (k1',k2',s1,s2, r') <- lintCoercion arg
; lintRole arg Nominal r'
; in_scope <- getInScope
; case (splitForAllTy_maybe t1', splitForAllTy_maybe t2') of
(Just (tv1,t1), Just (tv2,t2))
| k1' `eqType` tyVarKind tv1
, k2' `eqType` tyVarKind tv2
-> return (k3, k4,
substTyWithInScope in_scope [tv1] [s1] t1,
substTyWithInScope in_scope [tv2] [s2] t2, r)
| otherwise
-> failWithL (text "Kind mis-match in inst coercion")
_ -> failWithL (text "Bad argument of inst") }
lintCoercion co@(AxiomInstCo con ind cos)
= do { unless (0 <= ind && ind < numBranches (coAxiomBranches con))
(bad_ax (text "index out of range"))
; let CoAxBranch { cab_tvs = ktvs
, cab_cvs = cvs
, cab_roles = roles
, cab_lhs = lhs
, cab_rhs = rhs } = coAxiomNthBranch con ind
; unless (cos `equalLength` (ktvs ++ cvs)) $
bad_ax (text "lengths")
; subst <- getTCvSubst
; let empty_subst = zapTCvSubst subst
; (subst_l, subst_r) <- foldlM check_ki
(empty_subst, empty_subst)
(zip3 (ktvs ++ cvs) roles cos)
; let lhs' = substTys subst_l lhs
rhs' = substTy subst_r rhs
; case checkAxInstCo co of
Just bad_branch -> bad_ax $ text "inconsistent with" <+>
pprCoAxBranch con bad_branch
Nothing -> return ()
; let s2 = mkTyConApp (coAxiomTyCon con) lhs'
; return (typeKind s2, typeKind rhs', s2, rhs', coAxiomRole con) }
where
bad_ax what = addErrL (hang (text "Bad axiom application" <+> parens what)
2 (ppr co))
check_ki (subst_l, subst_r) (ktv, role, arg)
= do { (k', k'', s', t', r) <- lintCoercion arg
; lintRole arg role r
; let ktv_kind_l = substTy subst_l (tyVarKind ktv)
ktv_kind_r = substTy subst_r (tyVarKind ktv)
; unless (k' `eqType` ktv_kind_l)
(bad_ax (text "check_ki1" <+> vcat [ ppr co, ppr k', ppr ktv, ppr ktv_kind_l ] ))
; unless (k'' `eqType` ktv_kind_r)
(bad_ax (text "check_ki2" <+> vcat [ ppr co, ppr k'', ppr ktv, ppr ktv_kind_r ] ))
; return (extendTCvSubst subst_l ktv s',
extendTCvSubst subst_r ktv t') }
lintCoercion (CoherenceCo co1 co2)
= do { (_, k2, t1, t2, r) <- lintCoercion co1
; let lhsty = mkCastTy t1 co2
; k1' <- lintType lhsty
; return (k1', k2, lhsty, t2, r) }
lintCoercion (KindCo co)
= do { (k1, k2, _, _, _) <- lintCoercion co
; return (liftedTypeKind, liftedTypeKind, k1, k2, Nominal) }
lintCoercion (SubCo co')
= do { (k1,k2,s,t,r) <- lintCoercion co'
; lintRole co' Nominal r
; return (k1,k2,s,t,Representational) }
lintCoercion this@(AxiomRuleCo co cs)
= do { eqs <- mapM lintCoercion cs
; lintRoles 0 (coaxrAsmpRoles co) eqs
; case coaxrProves co [ Pair l r | (_,_,l,r,_) <- eqs ] of
Nothing -> err "Malformed use of AxiomRuleCo" [ ppr this ]
Just (Pair l r) ->
return (typeKind l, typeKind r, l, r, coaxrRole co) }
where
err m xs = failWithL $
hang (text m) 2 $ vcat (text "Rule:" <+> ppr (coaxrName co) : xs)
lintRoles n (e : es) ((_,_,_,_,r) : rs)
| e == r = lintRoles (n+1) es rs
| otherwise = err "Argument roles mismatch"
[ text "In argument:" <+> int (n+1)
, text "Expected:" <+> ppr e
, text "Found:" <+> ppr r ]
lintRoles _ [] [] = return ()
lintRoles n [] rs = err "Too many coercion arguments"
[ text "Expected:" <+> int n
, text "Provided:" <+> int (n + length rs) ]
lintRoles n es [] = err "Not enough coercion arguments"
[ text "Expected:" <+> int (n + length es)
, text "Provided:" <+> int n ]
----------
lintUnliftedCoVar :: CoVar -> LintM ()
lintUnliftedCoVar cv
= when (not (isUnliftedType (coVarKind cv))) $
failWithL (text "Bad lifted equality:" <+> ppr cv
<+> dcolon <+> ppr (coVarKind cv))
{-
************************************************************************
* *
\subsection[lint-monad]{The Lint monad}
* *
************************************************************************
-}
-- If you edit this type, you may need to update the GHC formalism
-- See Note [GHC Formalism]
data LintEnv
= LE { le_flags :: LintFlags -- Linting the result of this pass
, le_loc :: [LintLocInfo] -- Locations
, le_subst :: TCvSubst -- Current type substitution; we also use this
-- to keep track of all the variables in scope,
-- both Ids and TyVars
, le_joins :: IdSet -- Join points in scope that are valid
-- A subset of teh InScopeSet in le_subst
-- See Note [Join points]
, le_dynflags :: DynFlags -- DynamicFlags
}
data LintFlags
= LF { lf_check_global_ids :: Bool -- See Note [Checking for global Ids]
, lf_check_inline_loop_breakers :: Bool -- See Note [Checking for INLINE loop breakers]
, lf_check_static_ptrs :: StaticPtrCheck
-- ^ See Note [Checking StaticPtrs]
}
-- See Note [Checking StaticPtrs]
data StaticPtrCheck
= AllowAnywhere
-- ^ Allow 'makeStatic' to occur anywhere.
| AllowAtTopLevel
-- ^ Allow 'makeStatic' calls at the top-level only.
| RejectEverywhere
-- ^ Reject any 'makeStatic' occurrence.
deriving Eq
defaultLintFlags :: LintFlags
defaultLintFlags = LF { lf_check_global_ids = False
, lf_check_inline_loop_breakers = True
, lf_check_static_ptrs = AllowAnywhere
}
newtype LintM a =
LintM { unLintM ::
LintEnv ->
WarnsAndErrs -> -- Error and warning messages so far
(Maybe a, WarnsAndErrs) } -- Result and messages (if any)
type WarnsAndErrs = (Bag MsgDoc, Bag MsgDoc)
{- Note [Checking for global Ids]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Before CoreTidy, all locally-bound Ids must be LocalIds, even
top-level ones. See Note [Exported LocalIds] and Trac #9857.
Note [Checking StaticPtrs]
~~~~~~~~~~~~~~~~~~~~~~~~~~
See Note [Grand plan for static forms] in StaticPtrTable for an overview.
Every occurrence of the function 'makeStatic' should be moved to the
top level by the FloatOut pass. It's vital that we don't have nested
'makeStatic' occurrences after CorePrep, because we populate the Static
Pointer Table from the top-level bindings. See SimplCore Note [Grand
plan for static forms].
The linter checks that no occurrence is left behind, nested within an
expression. The check is enabled only after the FloatOut, CorePrep,
and CoreTidy passes and only if the module uses the StaticPointers
language extension. Checking more often doesn't help since the condition
doesn't hold until after the first FloatOut pass.
Note [Type substitution]
~~~~~~~~~~~~~~~~~~~~~~~~
Why do we need a type substitution? Consider
/\(a:*). \(x:a). /\(a:*). id a x
This is ill typed, because (renaming variables) it is really
/\(a:*). \(x:a). /\(b:*). id b x
Hence, when checking an application, we can't naively compare x's type
(at its binding site) with its expected type (at a use site). So we
rename type binders as we go, maintaining a substitution.
The same substitution also supports let-type, current expressed as
(/\(a:*). body) ty
Here we substitute 'ty' for 'a' in 'body', on the fly.
-}
instance Functor LintM where
fmap = liftM
instance Applicative LintM where
pure x = LintM $ \ _ errs -> (Just x, errs)
(<*>) = ap
instance Monad LintM where
fail = MonadFail.fail
m >>= k = LintM (\ env errs ->
let (res, errs') = unLintM m env errs in
case res of
Just r -> unLintM (k r) env errs'
Nothing -> (Nothing, errs'))
instance MonadFail.MonadFail LintM where
fail err = failWithL (text err)
instance HasDynFlags LintM where
getDynFlags = LintM (\ e errs -> (Just (le_dynflags e), errs))
data LintLocInfo
= RhsOf Id -- The variable bound
| LambdaBodyOf Id -- The lambda-binder
| UnfoldingOf Id -- Unfolding of a binder
| BodyOfLetRec [Id] -- One of the binders
| CaseAlt CoreAlt -- Case alternative
| CasePat CoreAlt -- The *pattern* of the case alternative
| AnExpr CoreExpr -- Some expression
| ImportedUnfolding SrcLoc -- Some imported unfolding (ToDo: say which)
| TopLevelBindings
| InType Type -- Inside a type
| InCo Coercion -- Inside a coercion
initL :: DynFlags -> LintFlags -> InScopeSet
-> LintM a -> WarnsAndErrs -- Errors and warnings
initL dflags flags in_scope m
= case unLintM m env (emptyBag, emptyBag) of
(_, errs) -> errs
where
env = LE { le_flags = flags
, le_subst = mkEmptyTCvSubst in_scope
, le_joins = emptyVarSet
, le_loc = []
, le_dynflags = dflags }
getLintFlags :: LintM LintFlags
getLintFlags = LintM $ \ env errs -> (Just (le_flags env), errs)
checkL :: Bool -> MsgDoc -> LintM ()
checkL True _ = return ()
checkL False msg = failWithL msg
-- like checkL, but relevant to type checking
lintL :: Bool -> MsgDoc -> LintM ()
lintL = checkL
checkWarnL :: Bool -> MsgDoc -> LintM ()
checkWarnL True _ = return ()
checkWarnL False msg = addWarnL msg
failWithL :: MsgDoc -> LintM a
failWithL msg = LintM $ \ env (warns,errs) ->
(Nothing, (warns, addMsg env errs msg))
addErrL :: MsgDoc -> LintM ()
addErrL msg = LintM $ \ env (warns,errs) ->
(Just (), (warns, addMsg env errs msg))
addWarnL :: MsgDoc -> LintM ()
addWarnL msg = LintM $ \ env (warns,errs) ->
(Just (), (addMsg env warns msg, errs))
addMsg :: LintEnv -> Bag MsgDoc -> MsgDoc -> Bag MsgDoc
addMsg env msgs msg
= ASSERT( notNull locs )
msgs `snocBag` mk_msg msg
where
locs = le_loc env
(loc, cxt1) = dumpLoc (head locs)
cxts = [snd (dumpLoc loc) | loc <- locs]
context = ifPprDebug (vcat (reverse cxts) $$ cxt1 $$
text "Substitution:" <+> ppr (le_subst env))
cxt1
mk_msg msg = mkLocMessage SevWarning (mkSrcSpan loc loc) (context $$ msg)
addLoc :: LintLocInfo -> LintM a -> LintM a
addLoc extra_loc m
= LintM $ \ env errs ->
unLintM m (env { le_loc = extra_loc : le_loc env }) errs
inCasePat :: LintM Bool -- A slight hack; see the unique call site
inCasePat = LintM $ \ env errs -> (Just (is_case_pat env), errs)
where
is_case_pat (LE { le_loc = CasePat {} : _ }) = True
is_case_pat _other = False
addInScopeVar :: Var -> LintM a -> LintM a
addInScopeVar var m
= LintM $ \ env errs ->
unLintM m (env { le_subst = extendTCvInScope (le_subst env) var
, le_joins = delVarSet (le_joins env) var
}) errs
extendSubstL :: TyVar -> Type -> LintM a -> LintM a
extendSubstL tv ty m
= LintM $ \ env errs ->
unLintM m (env { le_subst = Type.extendTvSubst (le_subst env) tv ty }) errs
updateTCvSubst :: TCvSubst -> LintM a -> LintM a
updateTCvSubst subst' m
= LintM $ \ env errs -> unLintM m (env { le_subst = subst' }) errs
markAllJoinsBad :: LintM a -> LintM a
markAllJoinsBad m
= LintM $ \ env errs -> unLintM m (env { le_joins = emptyVarSet }) errs
markAllJoinsBadIf :: Bool -> LintM a -> LintM a
markAllJoinsBadIf True m = markAllJoinsBad m
markAllJoinsBadIf False m = m
addGoodJoins :: [Var] -> LintM a -> LintM a
addGoodJoins vars thing_inside
| null join_ids
= thing_inside
| otherwise
= LintM $ \ env errs -> unLintM thing_inside (add_joins env) errs
where
add_joins env = env { le_joins = le_joins env `extendVarSetList` join_ids }
join_ids = filter isJoinId vars
getValidJoins :: LintM IdSet
getValidJoins = LintM (\ env errs -> (Just (le_joins env), errs))
getTCvSubst :: LintM TCvSubst
getTCvSubst = LintM (\ env errs -> (Just (le_subst env), errs))
getInScope :: LintM InScopeSet
getInScope = LintM (\ env errs -> (Just (getTCvInScope $ le_subst env), errs))
applySubstTy :: InType -> LintM OutType
applySubstTy ty = do { subst <- getTCvSubst; return (substTy subst ty) }
applySubstCo :: InCoercion -> LintM OutCoercion
applySubstCo co = do { subst <- getTCvSubst; return (substCo subst co) }
lookupIdInScope :: Id -> LintM Id
lookupIdInScope id
| not (mustHaveLocalBinding id)
= return id -- An imported Id
| otherwise
= do { subst <- getTCvSubst
; case lookupInScope (getTCvInScope subst) id of
Just v -> return v
Nothing -> do { addErrL out_of_scope
; return id } }
where
out_of_scope = pprBndr LetBind id <+> text "is out of scope"
lookupJoinId :: Id -> LintM (Maybe JoinArity)
-- Look up an Id which should be a join point, valid here
-- If so, return its arity, if not return Nothing
lookupJoinId id
= do { join_set <- getValidJoins
; case lookupVarSet join_set id of
Just id' -> return (isJoinId_maybe id')
Nothing -> return Nothing }
lintTyCoVarInScope :: Var -> LintM ()
lintTyCoVarInScope v = lintInScope (text "is out of scope") v
lintInScope :: SDoc -> Var -> LintM ()
lintInScope loc_msg var =
do { subst <- getTCvSubst
; lintL (not (mustHaveLocalBinding var) || (var `isInScope` subst))
(hsep [pprBndr LetBind var, loc_msg]) }
ensureEqTys :: OutType -> OutType -> MsgDoc -> LintM ()
-- check ty2 is subtype of ty1 (ie, has same structure but usage
-- annotations need only be consistent, not equal)
-- Assumes ty1,ty2 are have already had the substitution applied
ensureEqTys ty1 ty2 msg = lintL (ty1 `eqType` ty2) msg
lintRole :: Outputable thing
=> thing -- where the role appeared
-> Role -- expected
-> Role -- actual
-> LintM ()
lintRole co r1 r2
= lintL (r1 == r2)
(text "Role incompatibility: expected" <+> ppr r1 <> comma <+>
text "got" <+> ppr r2 $$
text "in" <+> ppr co)
{-
************************************************************************
* *
\subsection{Error messages}
* *
************************************************************************
-}
dumpLoc :: LintLocInfo -> (SrcLoc, SDoc)
dumpLoc (RhsOf v)
= (getSrcLoc v, brackets (text "RHS of" <+> pp_binders [v]))
dumpLoc (LambdaBodyOf b)
= (getSrcLoc b, brackets (text "in body of lambda with binder" <+> pp_binder b))
dumpLoc (UnfoldingOf b)
= (getSrcLoc b, brackets (text "in the unfolding of" <+> pp_binder b))
dumpLoc (BodyOfLetRec [])
= (noSrcLoc, brackets (text "In body of a letrec with no binders"))
dumpLoc (BodyOfLetRec bs@(_:_))
= ( getSrcLoc (head bs), brackets (text "in body of letrec with binders" <+> pp_binders bs))
dumpLoc (AnExpr e)
= (noSrcLoc, text "In the expression:" <+> ppr e)
dumpLoc (CaseAlt (con, args, _))
= (noSrcLoc, text "In a case alternative:" <+> parens (ppr con <+> pp_binders args))
dumpLoc (CasePat (con, args, _))
= (noSrcLoc, text "In the pattern of a case alternative:" <+> parens (ppr con <+> pp_binders args))
dumpLoc (ImportedUnfolding locn)
= (locn, brackets (text "in an imported unfolding"))
dumpLoc TopLevelBindings
= (noSrcLoc, Outputable.empty)
dumpLoc (InType ty)
= (noSrcLoc, text "In the type" <+> quotes (ppr ty))
dumpLoc (InCo co)
= (noSrcLoc, text "In the coercion" <+> quotes (ppr co))
pp_binders :: [Var] -> SDoc
pp_binders bs = sep (punctuate comma (map pp_binder bs))
pp_binder :: Var -> SDoc
pp_binder b | isId b = hsep [ppr b, dcolon, ppr (idType b)]
| otherwise = hsep [ppr b, dcolon, ppr (tyVarKind b)]
------------------------------------------------------
-- Messages for case expressions
mkDefaultArgsMsg :: [Var] -> MsgDoc
mkDefaultArgsMsg args
= hang (text "DEFAULT case with binders")
4 (ppr args)
mkCaseAltMsg :: CoreExpr -> Type -> Type -> MsgDoc
mkCaseAltMsg e ty1 ty2
= hang (text "Type of case alternatives not the same as the annotation on case:")
4 (vcat [ text "Actual type:" <+> ppr ty1,
text "Annotation on case:" <+> ppr ty2,
text "Alt Rhs:" <+> ppr e ])
mkScrutMsg :: Id -> Type -> Type -> TCvSubst -> MsgDoc
mkScrutMsg var var_ty scrut_ty subst
= vcat [text "Result binder in case doesn't match scrutinee:" <+> ppr var,
text "Result binder type:" <+> ppr var_ty,--(idType var),
text "Scrutinee type:" <+> ppr scrut_ty,
hsep [text "Current TCv subst", ppr subst]]
mkNonDefltMsg, mkNonIncreasingAltsMsg :: CoreExpr -> MsgDoc
mkNonDefltMsg e
= hang (text "Case expression with DEFAULT not at the beginning") 4 (ppr e)
mkNonIncreasingAltsMsg e
= hang (text "Case expression with badly-ordered alternatives") 4 (ppr e)
nonExhaustiveAltsMsg :: CoreExpr -> MsgDoc
nonExhaustiveAltsMsg e
= hang (text "Case expression with non-exhaustive alternatives") 4 (ppr e)
mkBadConMsg :: TyCon -> DataCon -> MsgDoc
mkBadConMsg tycon datacon
= vcat [
text "In a case alternative, data constructor isn't in scrutinee type:",
text "Scrutinee type constructor:" <+> ppr tycon,
text "Data con:" <+> ppr datacon
]
mkBadPatMsg :: Type -> Type -> MsgDoc
mkBadPatMsg con_result_ty scrut_ty
= vcat [
text "In a case alternative, pattern result type doesn't match scrutinee type:",
text "Pattern result type:" <+> ppr con_result_ty,
text "Scrutinee type:" <+> ppr scrut_ty
]
integerScrutinisedMsg :: MsgDoc
integerScrutinisedMsg
= text "In a LitAlt, the literal is lifted (probably Integer)"
mkBadAltMsg :: Type -> CoreAlt -> MsgDoc
mkBadAltMsg scrut_ty alt
= vcat [ text "Data alternative when scrutinee is not a tycon application",
text "Scrutinee type:" <+> ppr scrut_ty,
text "Alternative:" <+> pprCoreAlt alt ]
mkNewTyDataConAltMsg :: Type -> CoreAlt -> MsgDoc
mkNewTyDataConAltMsg scrut_ty alt
= vcat [ text "Data alternative for newtype datacon",
text "Scrutinee type:" <+> ppr scrut_ty,
text "Alternative:" <+> pprCoreAlt alt ]
------------------------------------------------------
-- Other error messages
mkAppMsg :: Type -> Type -> CoreExpr -> MsgDoc
mkAppMsg fun_ty arg_ty arg
= vcat [text "Argument value doesn't match argument type:",
hang (text "Fun type:") 4 (ppr fun_ty),
hang (text "Arg type:") 4 (ppr arg_ty),
hang (text "Arg:") 4 (ppr arg)]
mkNonFunAppMsg :: Type -> Type -> CoreExpr -> MsgDoc
mkNonFunAppMsg fun_ty arg_ty arg
= vcat [text "Non-function type in function position",
hang (text "Fun type:") 4 (ppr fun_ty),
hang (text "Arg type:") 4 (ppr arg_ty),
hang (text "Arg:") 4 (ppr arg)]
mkLetErr :: TyVar -> CoreExpr -> MsgDoc
mkLetErr bndr rhs
= vcat [text "Bad `let' binding:",
hang (text "Variable:")
4 (ppr bndr <+> dcolon <+> ppr (varType bndr)),
hang (text "Rhs:")
4 (ppr rhs)]
mkTyAppMsg :: Type -> Type -> MsgDoc
mkTyAppMsg ty arg_ty
= vcat [text "Illegal type application:",
hang (text "Exp type:")
4 (ppr ty <+> dcolon <+> ppr (typeKind ty)),
hang (text "Arg type:")
4 (ppr arg_ty <+> dcolon <+> ppr (typeKind arg_ty))]
emptyRec :: CoreExpr -> MsgDoc
emptyRec e = hang (text "Empty Rec binding:") 2 (ppr e)
mkRhsMsg :: Id -> SDoc -> Type -> MsgDoc
mkRhsMsg binder what ty
= vcat
[hsep [text "The type of this binder doesn't match the type of its" <+> what <> colon,
ppr binder],
hsep [text "Binder's type:", ppr (idType binder)],
hsep [text "Rhs type:", ppr ty]]
mkLetAppMsg :: CoreExpr -> MsgDoc
mkLetAppMsg e
= hang (text "This argument does not satisfy the let/app invariant:")
2 (ppr e)
badBndrTyMsg :: Id -> SDoc -> MsgDoc
badBndrTyMsg binder what
= vcat [ text "The type of this binder is" <+> what <> colon <+> ppr binder
, text "Binder's type:" <+> ppr (idType binder) ]
mkStrictMsg :: Id -> MsgDoc
mkStrictMsg binder
= vcat [hsep [text "Recursive or top-level binder has strict demand info:",
ppr binder],
hsep [text "Binder's demand info:", ppr (idDemandInfo binder)]
]
mkNonTopExportedMsg :: Id -> MsgDoc
mkNonTopExportedMsg binder
= hsep [text "Non-top-level binder is marked as exported:", ppr binder]
mkNonTopExternalNameMsg :: Id -> MsgDoc
mkNonTopExternalNameMsg binder
= hsep [text "Non-top-level binder has an external name:", ppr binder]
mkTopNonLitStrMsg :: Id -> MsgDoc
mkTopNonLitStrMsg binder
= hsep [text "Top-level Addr# binder has a non-literal rhs:", ppr binder]
mkKindErrMsg :: TyVar -> Type -> MsgDoc
mkKindErrMsg tyvar arg_ty
= vcat [text "Kinds don't match in type application:",
hang (text "Type variable:")
4 (ppr tyvar <+> dcolon <+> ppr (tyVarKind tyvar)),
hang (text "Arg type:")
4 (ppr arg_ty <+> dcolon <+> ppr (typeKind arg_ty))]
{- Not needed now
mkArityMsg :: Id -> MsgDoc
mkArityMsg binder
= vcat [hsep [text "Demand type has",
ppr (dmdTypeDepth dmd_ty),
text "arguments, rhs has",
ppr (idArity binder),
text "arguments,",
ppr binder],
hsep [text "Binder's strictness signature:", ppr dmd_ty]
]
where (StrictSig dmd_ty) = idStrictness binder
-}
mkCastErr :: Outputable casted => casted -> Coercion -> Type -> Type -> MsgDoc
mkCastErr expr co from_ty expr_ty
= vcat [text "From-type of Cast differs from type of enclosed expression",
text "From-type:" <+> ppr from_ty,
text "Type of enclosed expr:" <+> ppr expr_ty,
text "Actual enclosed expr:" <+> ppr expr,
text "Coercion used in cast:" <+> ppr co
]
mkBadUnivCoMsg :: LeftOrRight -> Coercion -> SDoc
mkBadUnivCoMsg lr co
= text "Kind mismatch on the" <+> pprLeftOrRight lr <+>
text "side of a UnivCo:" <+> ppr co
mkBadProofIrrelMsg :: Type -> Coercion -> SDoc
mkBadProofIrrelMsg ty co
= hang (text "Found a non-coercion in a proof-irrelevance UnivCo:")
2 (vcat [ text "type:" <+> ppr ty
, text "co:" <+> ppr co ])
mkBadTyVarMsg :: Var -> SDoc
mkBadTyVarMsg tv
= text "Non-tyvar used in TyVarTy:"
<+> ppr tv <+> dcolon <+> ppr (varType tv)
mkBadJoinBindMsg :: Var -> SDoc
mkBadJoinBindMsg var
= vcat [ text "Bad join point binding:" <+> ppr var
, text "Join points can be bound only by a non-top-level let" ]
mkInvalidJoinPointMsg :: Var -> Type -> SDoc
mkInvalidJoinPointMsg var ty
= hang (text "Join point has invalid type:")
2 (ppr var <+> dcolon <+> ppr ty)
mkBadJoinArityMsg :: Var -> Int -> Int -> CoreExpr -> SDoc
mkBadJoinArityMsg var ar nlams rhs
= vcat [ text "Join point has too few lambdas",
text "Join var:" <+> ppr var,
text "Join arity:" <+> ppr ar,
text "Number of lambdas:" <+> ppr nlams,
text "Rhs = " <+> ppr rhs
]
invalidJoinOcc :: Var -> SDoc
invalidJoinOcc var
= vcat [ text "Invalid occurrence of a join variable:" <+> ppr var
, text "The binder is either not a join point, or not valid here" ]
mkBadJumpMsg :: Var -> Int -> Int -> SDoc
mkBadJumpMsg var ar nargs
= vcat [ text "Join point invoked with wrong number of arguments",
text "Join var:" <+> ppr var,
text "Join arity:" <+> ppr ar,
text "Number of arguments:" <+> int nargs ]
mkInconsistentRecMsg :: [Var] -> SDoc
mkInconsistentRecMsg bndrs
= vcat [ text "Recursive let binders mix values and join points",
text "Binders:" <+> hsep (map ppr_with_details bndrs) ]
where
ppr_with_details bndr = ppr bndr <> ppr (idDetails bndr)
mkJoinBndrOccMismatchMsg :: Var -> JoinArity -> JoinArity -> SDoc
mkJoinBndrOccMismatchMsg bndr join_arity_bndr join_arity_occ
= vcat [ text "Mismatch in join point arity between binder and occurrence"
, text "Var:" <+> ppr bndr
, text "Arity at binding site:" <+> ppr join_arity_bndr
, text "Arity at occurrence: " <+> ppr join_arity_occ ]
mkBndrOccTypeMismatchMsg :: Var -> Var -> OutType -> OutType -> SDoc
mkBndrOccTypeMismatchMsg bndr var bndr_ty var_ty
= vcat [ text "Mismatch in type between binder and occurrence"
, text "Var:" <+> ppr bndr
, text "Binder type:" <+> ppr bndr_ty
, text "Occurrence type:" <+> ppr var_ty
, text " Before subst:" <+> ppr (idType var) ]
mkBadJoinPointRuleMsg :: JoinId -> JoinArity -> CoreRule -> SDoc
mkBadJoinPointRuleMsg bndr join_arity rule
= vcat [ text "Join point has rule with wrong number of arguments"
, text "Var:" <+> ppr bndr
, text "Join arity:" <+> ppr join_arity
, text "Rule:" <+> ppr rule ]
pprLeftOrRight :: LeftOrRight -> MsgDoc
pprLeftOrRight CLeft = text "left"
pprLeftOrRight CRight = text "right"
dupVars :: [NonEmpty Var] -> MsgDoc
dupVars vars
= hang (text "Duplicate variables brought into scope")
2 (ppr (map toList vars))
dupExtVars :: [NonEmpty Name] -> MsgDoc
dupExtVars vars
= hang (text "Duplicate top-level variables with the same qualified name")
2 (ppr (map toList vars))
{-
************************************************************************
* *
\subsection{Annotation Linting}
* *
************************************************************************
-}
-- | This checks whether a pass correctly looks through debug
-- annotations (@SourceNote@). This works a bit different from other
-- consistency checks: We check this by running the given task twice,
-- noting all differences between the results.
lintAnnots :: SDoc -> (ModGuts -> CoreM ModGuts) -> ModGuts -> CoreM ModGuts
lintAnnots pname pass guts = do
-- Run the pass as we normally would
dflags <- getDynFlags
when (gopt Opt_DoAnnotationLinting dflags) $
liftIO $ Err.showPass dflags "Annotation linting - first run"
nguts <- pass guts
-- If appropriate re-run it without debug annotations to make sure
-- that they made no difference.
when (gopt Opt_DoAnnotationLinting dflags) $ do
liftIO $ Err.showPass dflags "Annotation linting - second run"
nguts' <- withoutAnnots pass guts
-- Finally compare the resulting bindings
liftIO $ Err.showPass dflags "Annotation linting - comparison"
let binds = flattenBinds $ mg_binds nguts
binds' = flattenBinds $ mg_binds nguts'
(diffs,_) = diffBinds True (mkRnEnv2 emptyInScopeSet) binds binds'
when (not (null diffs)) $ CoreMonad.putMsg $ vcat
[ lint_banner "warning" pname
, text "Core changes with annotations:"
, withPprStyle (defaultDumpStyle dflags) $ nest 2 $ vcat diffs
]
-- Return actual new guts
return nguts
-- | Run the given pass without annotations. This means that we both
-- set the debugLevel setting to 0 in the environment as well as all
-- annotations from incoming modules.
withoutAnnots :: (ModGuts -> CoreM ModGuts) -> ModGuts -> CoreM ModGuts
withoutAnnots pass guts = do
-- Remove debug flag from environment.
dflags <- getDynFlags
let removeFlag env = env{ hsc_dflags = dflags{ debugLevel = 0} }
withoutFlag corem =
liftIO =<< runCoreM <$> fmap removeFlag getHscEnv <*> getRuleBase <*>
getUniqueSupplyM <*> getModule <*>
getVisibleOrphanMods <*>
getPrintUnqualified <*> getSrcSpanM <*>
pure corem
-- Nuke existing ticks in module.
-- TODO: Ticks in unfoldings. Maybe change unfolding so it removes
-- them in absence of debugLevel > 0.
let nukeTicks = stripTicksE (not . tickishIsCode)
nukeAnnotsBind :: CoreBind -> CoreBind
nukeAnnotsBind bind = case bind of
Rec bs -> Rec $ map (\(b,e) -> (b, nukeTicks e)) bs
NonRec b e -> NonRec b $ nukeTicks e
nukeAnnotsMod mg@ModGuts{mg_binds=binds}
= mg{mg_binds = map nukeAnnotsBind binds}
-- Perform pass with all changes applied
fmap fst $ withoutFlag $ pass (nukeAnnotsMod guts)
| ezyang/ghc | compiler/coreSyn/CoreLint.hs | bsd-3-clause | 100,021 | 360 | 21 | 29,002 | 19,680 | 10,328 | 9,352 | -1 | -1 |
Tried to remove Constructors from a Type that exposed all Constructors.
This does not work because other Constructors are not available for HsImport.
Thus, this operation can not be performed.
Example:
import Foo.Bar (Baz(..))
> hsimport --hiding -m Foo.Bar -s Baz -w A
The correct solution would be, assuming Constructors are A, B and C, to change the import to:
import Foo.Bar (Baz(B,C))
However, this is not possible for this program, thus, we abort the program execution.
| dan-t/hsimport | tests/goldenFiles/SymbolTest51.hs | bsd-3-clause | 481 | 21 | 8 | 82 | 201 | 97 | 104 | -1 | -1 |
{- |
Module : SAWScript.Crucible.JVM.Override
Description : Override matching and application for JVM
License : BSD3
Maintainer : atomb
Stability : provisional
-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE EmptyCase #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE ImplicitParams #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ParallelListComp #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -Wno-orphans #-} -- Pretty JVMVal
module SAWScript.Crucible.JVM.Override
( OverrideMatcher
, OverrideMatcher'(..)
, runOverrideMatcher
, setupValueSub
, osAsserts
, termSub
, learnCond
, matchArg
, methodSpecHandler
, valueToSC
, injectJVMVal
, decodeJVMVal
, doEntireArrayStore
, destVecTypedTerm
) where
import Control.Lens.At
import Control.Lens.Each
import Control.Lens.Fold
import Control.Lens.Getter
import Control.Lens.Lens
import Control.Lens.Setter
import Control.Exception as X
import Control.Monad.IO.Class (liftIO)
import Control.Monad
import Data.Either (partitionEithers)
import Data.Foldable (for_, traverse_)
import Data.List (tails)
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Void (absurd)
import qualified Prettyprinter as PP
-- cryptol
import qualified Cryptol.TypeCheck.AST as Cryptol
import qualified Cryptol.Eval.Type as Cryptol (TValue(..), evalType, evalValType)
-- what4
import qualified What4.BaseTypes as W4
import qualified What4.Interface as W4
import qualified What4.ProgramLoc as W4
import What4.LabeledPred (labeledPred)
-- crucible
import qualified Lang.Crucible.Backend as Crucible
import qualified Lang.Crucible.CFG.Core as Crucible ( TypeRepr(UnitRepr) )
import qualified Lang.Crucible.FunctionHandle as Crucible
import qualified Lang.Crucible.Simulator as Crucible
-- crucible-jvm
import qualified Lang.Crucible.JVM as CJ
-- parameterized-utils
import Data.Parameterized.Classes ((:~:)(..), testEquality)
import qualified Data.Parameterized.Context as Ctx
import Data.Parameterized.Some (Some(Some))
-- saw-core
import Verifier.SAW.SharedTerm
import Verifier.SAW.Prelude (scEq)
import Verifier.SAW.TypedAST
import Verifier.SAW.TypedTerm
import Verifier.SAW.Simulator.What4.ReturnTrip (toSC)
-- cryptol-saw-core
import qualified Verifier.SAW.Cryptol as Cryptol
import SAWScript.Crucible.Common
import SAWScript.Crucible.Common.MethodSpec (AllocIndex(..), PrePost(..))
import SAWScript.Crucible.Common.Override hiding (getSymInterface)
import qualified SAWScript.Crucible.Common.Override as Ov (getSymInterface)
import qualified SAWScript.Crucible.Common.MethodSpec as MS
import SAWScript.Crucible.JVM.MethodSpecIR
import SAWScript.Crucible.JVM.ResolveSetupValue
import SAWScript.Options
import SAWScript.Panic
import SAWScript.Utils (handleException)
-- jvm-parser
import qualified Language.JVM.Parser as J
-- A few convenient synonyms
type SetupValue = MS.SetupValue CJ.JVM
type CrucibleMethodSpecIR = MS.CrucibleMethodSpecIR CJ.JVM
type StateSpec = MS.StateSpec CJ.JVM
type SetupCondition = MS.SetupCondition CJ.JVM
type instance Pointer' CJ.JVM Sym = JVMRefVal
-- TODO: Improve?
ppJVMVal :: JVMVal -> PP.Doc ann
ppJVMVal = PP.viaShow
instance PP.Pretty JVMVal where
pretty = ppJVMVal
-- | Try to translate the spec\'s 'SetupValue' into an 'LLVMVal', pretty-print
-- the 'LLVMVal'.
mkStructuralMismatch ::
Options {- ^ output/verbosity options -} ->
JVMCrucibleContext ->
SharedContext {- ^ context for constructing SAW terms -} ->
CrucibleMethodSpecIR {- ^ for name and typing environments -} ->
JVMVal {- ^ the value from the simulator -} ->
SetupValue {- ^ the value from the spec -} ->
J.Type {- ^ the expected type -} ->
OverrideMatcher CJ.JVM w (OverrideFailureReason CJ.JVM)
mkStructuralMismatch opts cc sc spec jvmval setupval jty = do
setupTy <- typeOfSetupValueJVM cc spec setupval
setupJVal <- resolveSetupValueJVM opts cc sc spec setupval
pure $ StructuralMismatch
(ppJVMVal jvmval)
(ppJVMVal setupJVal)
(Just setupTy)
jty
------------------------------------------------------------------------
-- | This function is responsible for implementing the \"override\" behavior
-- of method specifications. The main work done in this function to manage
-- the process of selecting between several possible different override
-- specifications that could apply. We want a proof to succeed if /any/
-- choice of method spec allows the proof to go through, which is a slightly
-- awkward thing to fit into the symbolic simulation framework.
--
-- The main work of determining the preconditions, postconditions, memory
-- updates and return value for a single specification is done by
-- the @methodSpecHandler_prestate@ and @methodSpecHandler_poststate@ functions.
--
-- In a first phase, we attempt to apply the precondition portion of each of
-- the given method specifications. Each of them that might apply generate
-- a substitution for the setup variables and a collection of preconditions
-- that guard the specification. We use these preconditions to compute
-- a multiway symbolic branch, one for each override which might apply.
--
-- In the body of each of the individual branches, we compute the postcondition
-- actions of the corresponding method specification. This will update memory
-- and compute function return values, in addition to assuming postcondition
-- predicates.
methodSpecHandler ::
forall rtp args ret.
Options {- ^ output/verbosity options -} ->
SharedContext {- ^ context for constructing SAW terms -} ->
JVMCrucibleContext {- ^ context for interacting with Crucible -} ->
W4.ProgramLoc {- ^ Location of the call site for error reporting-} ->
[CrucibleMethodSpecIR] {- ^ specification for current function override -} ->
Crucible.FnHandle args ret {- ^ a handle for the function -} ->
Crucible.OverrideSim (SAWCruciblePersonality Sym) Sym CJ.JVM rtp args ret
(Crucible.RegValue Sym ret)
methodSpecHandler opts sc cc top_loc css h =
jccWithBackend cc $ \bak -> do
let sym = backendGetSym bak
Crucible.RegMap args <- Crucible.getOverrideArgs
-- First, run the precondition matcher phase. Collect together a list of the results.
-- For each override, this will either be an error message, or a matcher state and
-- a method spec.
prestates <-
do g0 <- Crucible.readGlobals
forM css $ \cs -> liftIO $
let initialFree =
Set.fromList (cs ^.. MS.csPreState. MS.csFreshVars . each . to tecExt . to ecVarIndex)
in runOverrideMatcher sym g0 Map.empty Map.empty initialFree (view MS.csLoc cs)
(do methodSpecHandler_prestate opts sc cc args cs
return cs)
-- Print a failure message if all overrides failed to match. Otherwise, collect
-- all the override states that might apply, and compute the conjunction of all
-- the preconditions. We'll use these to perform symbolic branches between the
-- various overrides.
branches <- case partitionEithers prestates of
(e, []) ->
fail $ show $
PP.vcat
[ "All overrides failed during structural matching:"
, PP.vcat (map (\x -> "*" <> PP.indent 2 (ppOverrideFailure x)) e)
]
(_, ss) -> liftIO $
forM ss $ \(cs,st) ->
do precond <- W4.andAllOf sym (folded.labeledPred) (st^.osAsserts)
return ( precond, cs, st )
-- Now use crucible's symbolic branching machinery to select between the branches.
-- Essentially, we are doing an n-way if statement on the precondition predicates
-- for each override, and selecting the first one whose preconditions hold.
--
-- Then, in the body of the branch, we run the poststate handler to update the
-- memory state, compute return values and compute postcondition predicates.
--
-- For each override branch that doesn't fail outright, we assume the relevant
-- postconditions, update the crucible global variable state, and return the
-- computed return value.
--
-- We add a final default branch that simply fails unless some previous override
-- branch has already succeeded.
let retTy = Crucible.handleReturnType h
Crucible.regValue <$> Crucible.callOverride h
(Crucible.mkOverride' "overrideBranches" retTy
(Crucible.symbolicBranches Crucible.emptyRegMap $
[ ( precond
, do g <- Crucible.readGlobals
res <- liftIO $ runOverrideMatcher sym g
(st^.setupValueSub)
(st^.termSub)
(st^.osFree)
(st^.osLocation)
(methodSpecHandler_poststate opts sc cc retTy cs)
case res of
Left (OF loc rsn) ->
-- TODO, better pretty printing for reasons
liftIO
$ Crucible.abortExecBecause
$ Crucible.AssertionFailure
$ Crucible.SimError loc
$ Crucible.AssertFailureSimError "assumed false" (show rsn)
Right (ret,st') ->
do liftIO $ forM_ (st'^.osAssumes) $ \asum ->
Crucible.addAssumption bak
$ Crucible.GenericAssumption (st^.osLocation) "override postcondition" asum
Crucible.writeGlobals (st'^.overrideGlobals)
Crucible.overrideReturn' (Crucible.RegEntry retTy ret)
, Just (W4.plSourceLoc (cs ^. MS.csLoc))
)
| (precond, cs, st) <- branches
] ++
[
let fnName = case branches of
(_, cs, _) : _ -> cs ^. MS.csMethod . jvmMethodName
_ -> "unknown function"
in
( W4.truePred sym
, liftIO $ Crucible.addFailedAssertion bak (Crucible.GenericSimError $ "no override specification applies for " ++ fnName)
, Just (W4.plSourceLoc top_loc)
)
]
))
(Crucible.RegMap args)
------------------------------------------------------------------------
-- | Use a method spec to override the behavior of a function.
-- This function computes the pre-state portion of the override,
-- which involves reading values from arguments and memory and computing
-- substitutions for the setup value variables, and computing precondition
-- predicates.
methodSpecHandler_prestate ::
forall ctx w.
Options {- ^ output/verbosity options -} ->
SharedContext {- ^ context for constructing SAW terms -} ->
JVMCrucibleContext {- ^ context for interacting with Crucible -} ->
Ctx.Assignment (Crucible.RegEntry Sym) ctx
{- ^ the arguments to the function -} ->
CrucibleMethodSpecIR {- ^ specification for current function override -} ->
OverrideMatcher CJ.JVM w ()
methodSpecHandler_prestate opts sc cc args cs =
do let expectedArgTypes = Map.elems (cs ^. MS.csArgBindings)
let aux ::
(J.Type, SetupValue) -> Crucible.AnyValue Sym ->
IO (JVMVal, J.Type, SetupValue)
aux (argTy, setupVal) val =
case decodeJVMVal argTy val of
Just val' -> return (val', argTy, setupVal)
Nothing -> fail "unexpected type"
-- todo: fail if list lengths mismatch
xs <- liftIO (zipWithM aux expectedArgTypes (assignmentToList args))
sequence_ [ matchArg opts sc cc cs PreState x y z | (x, y, z) <- xs]
learnCond opts sc cc cs PreState (cs ^. MS.csPreState)
-- | Use a method spec to override the behavior of a function.
-- This function computes the post-state portion of the override,
-- which involves writing values into memory, computing the return value,
-- and computing postcondition predicates.
methodSpecHandler_poststate ::
forall ret w.
Options {- ^ output/verbosity options -} ->
SharedContext {- ^ context for constructing SAW terms -} ->
JVMCrucibleContext {- ^ context for interacting with Crucible -} ->
Crucible.TypeRepr ret {- ^ type representation of function return value -} ->
CrucibleMethodSpecIR {- ^ specification for current function override -} ->
OverrideMatcher CJ.JVM w (Crucible.RegValue Sym ret)
methodSpecHandler_poststate opts sc cc retTy cs =
do executeCond opts sc cc cs (cs ^. MS.csPostState)
computeReturnValue opts cc sc cs retTy (cs ^. MS.csRetValue)
-- learn pre/post condition
learnCond ::
Options ->
SharedContext ->
JVMCrucibleContext ->
CrucibleMethodSpecIR ->
PrePost ->
StateSpec ->
OverrideMatcher CJ.JVM w ()
learnCond opts sc cc cs prepost ss =
do let loc = cs ^. MS.csLoc
matchPointsTos opts sc cc cs prepost (ss ^. MS.csPointsTos)
traverse_ (learnSetupCondition opts sc cc cs prepost) (ss ^. MS.csConditions)
enforceDisjointness cc loc ss
enforceCompleteSubstitution loc ss
-- | Verify that all of the fresh variables for the given
-- state spec have been "learned". If not, throws
-- 'AmbiguousVars' exception.
enforceCompleteSubstitution :: W4.ProgramLoc -> StateSpec -> OverrideMatcher CJ.JVM w ()
enforceCompleteSubstitution loc ss =
do sub <- OM (use termSub)
let -- predicate matches terms that are not covered by the computed
-- term substitution
isMissing tt = ecVarIndex (tecExt tt) `Map.notMember` sub
-- list of all terms not covered by substitution
missing = filter isMissing (view MS.csFreshVars ss)
unless (null missing) (failure loc (AmbiguousVars missing))
-- execute a pre/post condition
executeCond ::
Options ->
SharedContext ->
JVMCrucibleContext ->
CrucibleMethodSpecIR ->
StateSpec ->
OverrideMatcher CJ.JVM w ()
executeCond opts sc cc cs ss =
do refreshTerms sc ss
traverse_ (executeAllocation opts cc) (Map.assocs (ss ^. MS.csAllocs))
traverse_ (executePointsTo opts sc cc cs) (ss ^. MS.csPointsTos)
traverse_ (executeSetupCondition opts sc cc cs) (ss ^. MS.csConditions)
-- | Allocate fresh variables for all of the "fresh" vars
-- used in this phase and add them to the term substitution.
refreshTerms ::
SharedContext {- ^ shared context -} ->
StateSpec {- ^ current phase spec -} ->
OverrideMatcher CJ.JVM w ()
refreshTerms sc ss =
do extension <- Map.fromList <$> traverse freshenTerm (view MS.csFreshVars ss)
OM (termSub %= Map.union extension)
where
freshenTerm (TypedExtCns _cty ec) =
do ec' <- liftIO $ scFreshEC sc (toShortName (ecName ec)) (ecType ec)
new <- liftIO $ scExtCns sc ec'
return (ecVarIndex ec, new)
------------------------------------------------------------------------
-- | Generate assertions that all of the memory allocations matched by
-- an override's precondition are disjoint.
enforceDisjointness ::
JVMCrucibleContext -> W4.ProgramLoc -> StateSpec -> OverrideMatcher CJ.JVM w ()
enforceDisjointness cc loc ss =
do let sym = cc^.jccSym
sub <- OM (use setupValueSub)
let mems = Map.elems $ Map.intersectionWith (,) (view MS.csAllocs ss) sub
-- Ensure that all regions are disjoint from each other.
sequence_
[ do c <- liftIO $ W4.notPred sym =<< CJ.refIsEqual sym p q
addAssert c a
| let a = Crucible.SimError loc $
Crucible.AssertFailureSimError "Memory regions not disjoint" ""
, ((_ploc, _pty), p) : ps <- tails mems
, ((_qloc, _qty), q) <- ps
]
------------------------------------------------------------------------
-- | For each points-to statement read the memory value through the
-- given pointer (lhs) and match the value against the given pattern
-- (rhs). Statements are processed in dependency order: a points-to
-- statement cannot be executed until bindings for any/all lhs
-- variables exist.
matchPointsTos ::
Options {- ^ saw script print out opts -} ->
SharedContext {- ^ term construction context -} ->
JVMCrucibleContext {- ^ simulator context -} ->
CrucibleMethodSpecIR ->
PrePost ->
[JVMPointsTo] {- ^ points-tos -} ->
OverrideMatcher CJ.JVM w ()
matchPointsTos opts sc cc spec prepost = go False []
where
go ::
Bool {- progress indicator -} ->
[JVMPointsTo] {- delayed conditions -} ->
[JVMPointsTo] {- queued conditions -} ->
OverrideMatcher CJ.JVM w ()
-- all conditions processed, success
go _ [] [] = return ()
-- not all conditions processed, no progress, failure
go False delayed [] = failure (spec ^. MS.csLoc) (AmbiguousPointsTos delayed)
-- not all conditions processed, progress made, resume delayed conditions
go True delayed [] = go False [] delayed
-- progress the next points-to in the work queue
go progress delayed (c:cs) =
do ready <- checkPointsTo c
if ready then
do learnPointsTo opts sc cc spec prepost c
go True delayed cs
else
do go progress (c:delayed) cs
-- determine if a precondition is ready to be checked
checkPointsTo :: JVMPointsTo -> OverrideMatcher CJ.JVM w Bool
checkPointsTo (JVMPointsToField _loc p _ _) = checkAllocIndex p
checkPointsTo (JVMPointsToStatic _loc _ _) = pure True
checkPointsTo (JVMPointsToElem _loc p _ _) = checkAllocIndex p
checkPointsTo (JVMPointsToArray _loc p _) = checkAllocIndex p
checkAllocIndex :: AllocIndex -> OverrideMatcher CJ.JVM w Bool
checkAllocIndex i =
do m <- OM (use setupValueSub)
return (Map.member i m)
------------------------------------------------------------------------
computeReturnValue ::
Options {- ^ saw script debug and print options -} ->
JVMCrucibleContext {- ^ context of the crucible simulation -} ->
SharedContext {- ^ context for generating saw terms -} ->
CrucibleMethodSpecIR {- ^ method specification -} ->
Crucible.TypeRepr ret {- ^ representation of function return type -} ->
Maybe SetupValue {- ^ optional symbolic return value -} ->
OverrideMatcher CJ.JVM w (Crucible.RegValue Sym ret)
{- ^ concrete return value -}
computeReturnValue _opts _cc _sc spec ty Nothing =
case ty of
Crucible.UnitRepr -> return ()
_ -> failure (spec ^. MS.csLoc) (BadReturnSpecification (Some ty))
computeReturnValue opts cc sc spec ty (Just val) =
do val' <- resolveSetupValueJVM opts cc sc spec val
let fail_ = failure (spec ^. MS.csLoc) (BadReturnSpecification (Some ty))
case val' of
IVal i ->
case testEquality ty CJ.intRepr of
Just Refl -> return i
Nothing -> fail_
LVal l ->
case testEquality ty CJ.longRepr of
Just Refl -> return l
Nothing -> fail_
RVal r ->
case testEquality ty CJ.refRepr of
Just Refl -> return r
Nothing -> fail_
------------------------------------------------------------------------
-- | Assign the given pointer value to the given allocation index in
-- the current substitution. If there is already a binding for this
-- index, then add a pointer-equality constraint.
assignVar ::
JVMCrucibleContext {- ^ context for interacting with Crucible -} ->
W4.ProgramLoc ->
AllocIndex {- ^ variable index -} ->
JVMRefVal {- ^ concrete value -} ->
OverrideMatcher CJ.JVM w ()
assignVar cc loc var ref =
do old <- OM (setupValueSub . at var <<.= Just ref)
let sym = cc ^. jccSym
for_ old $ \ref' ->
do p <- liftIO (CJ.refIsEqual sym ref ref')
addAssert p (Crucible.SimError loc (Crucible.AssertFailureSimError "equality of aliased pointers" ""))
------------------------------------------------------------------------
assignTerm ::
SharedContext {- ^ context for constructing SAW terms -} ->
JVMCrucibleContext {- ^ context for interacting with Crucible -} ->
W4.ProgramLoc ->
PrePost ->
VarIndex {- ^ external constant index -} ->
Term {- ^ value -} ->
OverrideMatcher CJ.JVM w ()
assignTerm sc cc loc prepost var val =
do mb <- OM (use (termSub . at var))
case mb of
Nothing -> OM (termSub . at var ?= val)
Just old ->
matchTerm sc cc loc prepost val old
------------------------------------------------------------------------
-- | Match the value of a function argument with a symbolic 'SetupValue'.
matchArg ::
Options {- ^ saw script print out opts -} ->
SharedContext {- ^ context for constructing SAW terms -} ->
JVMCrucibleContext {- ^ context for interacting with Crucible -} ->
CrucibleMethodSpecIR {- ^ specification for current function override -} ->
PrePost ->
JVMVal {- ^ concrete simulation value -} ->
J.Type {- ^ expected memory type -} ->
SetupValue {- ^ expected specification value -} ->
OverrideMatcher CJ.JVM w ()
matchArg opts sc cc cs prepost actual expectedTy expected@(MS.SetupTerm expectedTT)
| TypedTermSchema (Cryptol.Forall [] [] tyexpr) <- ttType expectedTT
, Right tval <- Cryptol.evalType mempty tyexpr
= do sym <- Ov.getSymInterface
failMsg <- mkStructuralMismatch opts cc sc cs actual expected expectedTy
realTerm <- valueToSC sym (cs ^. MS.csLoc) failMsg tval actual
matchTerm sc cc (cs ^. MS.csLoc) prepost realTerm (ttTerm expectedTT)
matchArg opts sc cc cs prepost actual@(RVal ref) expectedTy setupval =
case setupval of
MS.SetupVar var ->
do assignVar cc (cs ^. MS.csLoc) var ref
MS.SetupNull () ->
do sym <- Ov.getSymInterface
p <- liftIO (CJ.refIsNull sym ref)
addAssert p (Crucible.SimError (cs ^. MS.csLoc) (Crucible.AssertFailureSimError ("null-equality " ++ stateCond prepost) ""))
MS.SetupGlobal empty _ -> absurd empty
_ -> failure (cs ^. MS.csLoc) =<<
mkStructuralMismatch opts cc sc cs actual setupval expectedTy
matchArg opts sc cc cs _prepost actual expectedTy expected =
failure (cs ^. MS.csLoc) =<<
mkStructuralMismatch opts cc sc cs actual expected expectedTy
------------------------------------------------------------------------
valueToSC ::
Sym ->
W4.ProgramLoc ->
OverrideFailureReason CJ.JVM ->
Cryptol.TValue ->
JVMVal ->
OverrideMatcher CJ.JVM w Term
valueToSC sym _ _ Cryptol.TVBit (IVal x) =
do b <- liftIO $ W4.bvIsNonzero sym x
-- TODO: assert that x is 0 or 1
st <- liftIO (sawCoreState sym)
liftIO (toSC sym st b)
valueToSC sym _ _ (Cryptol.TVSeq 8 Cryptol.TVBit) (IVal x) =
do st <- liftIO (sawCoreState sym)
liftIO (toSC sym st =<< W4.bvTrunc sym (W4.knownNat @8) x)
valueToSC sym _ _ (Cryptol.TVSeq 16 Cryptol.TVBit) (IVal x) =
do st <- liftIO (sawCoreState sym)
liftIO (toSC sym st =<< W4.bvTrunc sym (W4.knownNat @16) x)
valueToSC sym _ _ (Cryptol.TVSeq 32 Cryptol.TVBit) (IVal x) =
do st <- liftIO (sawCoreState sym)
liftIO (toSC sym st x)
valueToSC sym _ _ (Cryptol.TVSeq 64 Cryptol.TVBit) (LVal x) =
do st <- liftIO (sawCoreState sym)
liftIO (toSC sym st x)
valueToSC _sym loc failMsg _tval _val =
failure loc failMsg
------------------------------------------------------------------------
-- | NOTE: The two 'Term' arguments must have the same type.
matchTerm ::
SharedContext {- ^ context for constructing SAW terms -} ->
JVMCrucibleContext {- ^ context for interacting with Crucible -} ->
W4.ProgramLoc ->
PrePost ->
Term {- ^ exported concrete term -} ->
Term {- ^ expected specification term -} ->
OverrideMatcher CJ.JVM w ()
matchTerm _ _ _ _ real expect | real == expect = return ()
matchTerm sc cc loc prepost real expect =
do free <- OM (use osFree)
case unwrapTermF expect of
FTermF (ExtCns ec)
| Set.member (ecVarIndex ec) free ->
do assignTerm sc cc loc prepost (ecVarIndex ec) real
_ ->
do t <- liftIO $ scEq sc real expect
p <- liftIO $ resolveBoolTerm (cc ^. jccSym) t
addAssert p (Crucible.SimError loc (Crucible.AssertFailureSimError ("literal equality " ++ stateCond prepost) ""))
------------------------------------------------------------------------
-- | Use the current state to learn about variable assignments based on
-- preconditions for a procedure specification.
learnSetupCondition ::
Options ->
SharedContext ->
JVMCrucibleContext ->
CrucibleMethodSpecIR ->
PrePost ->
SetupCondition ->
OverrideMatcher CJ.JVM w ()
learnSetupCondition opts sc cc spec prepost (MS.SetupCond_Equal loc val1 val2) = learnEqual opts sc cc spec loc prepost val1 val2
learnSetupCondition _opts sc cc _ prepost (MS.SetupCond_Pred loc tm) = learnPred sc cc loc prepost (ttTerm tm)
learnSetupCondition _opts _ _ _ _ (MS.SetupCond_Ghost empty _ _ _) = absurd empty
------------------------------------------------------------------------
-- | Process a "points_to" statement from the precondition section of
-- the CrucibleSetup block. First, load the value from the address
-- indicated by 'ptr', and then match it against the pattern 'val'.
learnPointsTo ::
Options ->
SharedContext ->
JVMCrucibleContext ->
CrucibleMethodSpecIR ->
PrePost ->
JVMPointsTo ->
OverrideMatcher CJ.JVM w ()
learnPointsTo opts sc cc spec prepost pt =
jccWithBackend cc $ \bak -> do
let sym = backendGetSym bak
let tyenv = MS.csAllocations spec
let nameEnv = MS.csTypeNames spec
let jc = cc ^. jccJVMContext
globals <- OM (use overrideGlobals)
case pt of
JVMPointsToField loc ptr fid (Just val) ->
do ty <- typeOfSetupValue cc tyenv nameEnv val
rval <- resolveAllocIndexJVM ptr
dyn <- liftIO $ CJ.doFieldLoad bak globals rval fid
v <- liftIO $ projectJVMVal bak ty ("field load " ++ J.fieldIdName fid ++ ", " ++ show loc) dyn
matchArg opts sc cc spec prepost v ty val
JVMPointsToStatic loc fid (Just val) ->
do ty <- typeOfSetupValue cc tyenv nameEnv val
dyn <- liftIO $ CJ.doStaticFieldLoad bak jc globals fid
v <- liftIO $ projectJVMVal bak ty ("static field load " ++ J.fieldIdName fid ++ ", " ++ show loc) dyn
matchArg opts sc cc spec prepost v ty val
JVMPointsToElem loc ptr idx (Just val) ->
do ty <- typeOfSetupValue cc tyenv nameEnv val
rval <- resolveAllocIndexJVM ptr
dyn <- liftIO $ CJ.doArrayLoad bak globals rval idx
v <- liftIO $ projectJVMVal bak ty ("array load " ++ show idx ++ ", " ++ show loc) dyn
matchArg opts sc cc spec prepost v ty val
JVMPointsToArray loc ptr (Just tt) ->
do (len, ety) <-
case ttIsMono (ttType tt) of
Nothing -> fail "jvm_array_is: invalid polymorphic value"
Just cty ->
case Cryptol.tIsSeq cty of
Nothing -> fail "jvm_array_is: expected array type"
Just (lty, ety) ->
case Cryptol.tIsNum lty of
Nothing -> fail "jvm_array_is: expected finite-sized array"
Just len -> pure (len, ety)
jty <-
case toJVMType (Cryptol.evalValType mempty ety) of
Nothing -> fail "jvm_array_is: invalid element type"
Just jty -> pure jty
rval <- resolveAllocIndexJVM ptr
let tval = Cryptol.evalValType mempty ety
let
load idx =
do dyn <- liftIO $ CJ.doArrayLoad bak globals rval idx
let msg = "array load " ++ show idx ++ ", " ++ show loc
jval <- liftIO $ projectJVMVal bak jty msg dyn
let failMsg = StructuralMismatch (ppJVMVal jval) mempty (Just jty) jty -- REVISIT
valueToSC sym loc failMsg tval jval
when (len > toInteger (maxBound :: Int)) $ fail "jvm_array_is: array length too long"
ety_tm <- liftIO $ Cryptol.importType sc Cryptol.emptyEnv ety
ts <- traverse load [0 .. fromInteger len - 1]
realTerm <- liftIO $ scVector sc ety_tm ts
matchTerm sc cc loc prepost realTerm (ttTerm tt)
-- If the right-hand-side is 'Nothing', this is indicates a "modifies" declaration,
-- which should probably not appear in the pre-state section, and has no effect.
_ -> pure ()
------------------------------------------------------------------------
stateCond :: PrePost -> String
stateCond PreState = "precondition"
stateCond PostState = "postcondition"
-- | Process a "crucible_equal" statement from the precondition
-- section of the CrucibleSetup block.
learnEqual ::
Options ->
SharedContext ->
JVMCrucibleContext ->
CrucibleMethodSpecIR ->
W4.ProgramLoc ->
PrePost ->
SetupValue {- ^ first value to compare -} ->
SetupValue {- ^ second value to compare -} ->
OverrideMatcher CJ.JVM w ()
learnEqual opts sc cc spec loc prepost v1 v2 =
do val1 <- resolveSetupValueJVM opts cc sc spec v1
val2 <- resolveSetupValueJVM opts cc sc spec v2
p <- liftIO (equalValsPred cc val1 val2)
let name = "equality " ++ stateCond prepost
addAssert p (Crucible.SimError loc (Crucible.AssertFailureSimError name ""))
-- | Process a "crucible_precond" statement from the precondition
-- section of the CrucibleSetup block.
learnPred ::
SharedContext ->
JVMCrucibleContext ->
W4.ProgramLoc ->
PrePost ->
Term {- ^ the precondition to learn -} ->
OverrideMatcher CJ.JVM w ()
learnPred sc cc loc prepost t =
do s <- OM (use termSub)
u <- liftIO $ scInstantiateExt sc s t
p <- liftIO $ resolveBoolTerm (cc ^. jccSym) u
addAssert p (Crucible.SimError loc (Crucible.AssertFailureSimError (stateCond prepost) ""))
------------------------------------------------------------------------
-- TODO: replace (W4.ProgramLoc, J.Type) by some allocation datatype
-- that includes constructors for object allocations and array
-- allocations (with length).
-- | Perform an allocation as indicated by a 'crucible_alloc'
-- statement from the postcondition section.
executeAllocation ::
Options ->
JVMCrucibleContext ->
(AllocIndex, (W4.ProgramLoc, Allocation)) ->
OverrideMatcher CJ.JVM w ()
executeAllocation opts cc (var, (loc, alloc)) =
jccWithBackend cc $ \bak ->
do liftIO $ printOutLn opts Debug $ unwords ["executeAllocation:", show var, show alloc]
let jc = cc^.jccJVMContext
let halloc = cc^.jccHandleAllocator
globals <- OM (use overrideGlobals)
let mut = True -- allocate objects/arrays from post-state as mutable
(ptr, globals') <-
case alloc of
AllocObject cname ->
liftIO $ CJ.doAllocateObject bak halloc jc cname (const mut) globals
AllocArray len elemTy ->
liftIO $ CJ.doAllocateArray bak halloc jc len elemTy (const mut) globals
OM (overrideGlobals .= globals')
assignVar cc loc var ptr
------------------------------------------------------------------------
-- | Update the simulator state based on the postconditions from the
-- procedure specification.
executeSetupCondition ::
Options ->
SharedContext ->
JVMCrucibleContext ->
CrucibleMethodSpecIR ->
SetupCondition ->
OverrideMatcher CJ.JVM w ()
executeSetupCondition opts sc cc spec (MS.SetupCond_Equal _loc val1 val2) = executeEqual opts sc cc spec val1 val2
executeSetupCondition _opts sc cc _ (MS.SetupCond_Pred _loc tm) = executePred sc cc tm
executeSetupCondition _ _ _ _ (MS.SetupCond_Ghost empty _ _ _) = absurd empty
------------------------------------------------------------------------
-- | Process a "points_to" statement from the postcondition section of
-- the CrucibleSetup block. First we compute the value indicated by
-- 'val', and then write it to the address indicated by 'ptr'.
executePointsTo ::
Options ->
SharedContext ->
JVMCrucibleContext ->
CrucibleMethodSpecIR ->
JVMPointsTo ->
OverrideMatcher CJ.JVM w ()
executePointsTo opts sc cc spec pt =
jccWithBackend cc $ \bak -> do
let sym = backendGetSym bak
globals <- OM (use overrideGlobals)
let jc = cc ^. jccJVMContext
case pt of
JVMPointsToField _loc ptr fid val ->
do dyn <- maybe (pure CJ.unassignedJVMValue) (injectSetupValueJVM sym opts cc sc spec) val
rval <- resolveAllocIndexJVM ptr
globals' <- liftIO $ CJ.doFieldStore bak globals rval fid dyn
OM (overrideGlobals .= globals')
JVMPointsToStatic _loc fid val ->
do dyn <- maybe (pure CJ.unassignedJVMValue) (injectSetupValueJVM sym opts cc sc spec) val
globals' <- liftIO $ CJ.doStaticFieldStore bak jc globals fid dyn
OM (overrideGlobals .= globals')
JVMPointsToElem _loc ptr idx val ->
do dyn <- maybe (pure CJ.unassignedJVMValue) (injectSetupValueJVM sym opts cc sc spec) val
rval <- resolveAllocIndexJVM ptr
globals' <- liftIO $ CJ.doArrayStore bak globals rval idx dyn
OM (overrideGlobals .= globals')
JVMPointsToArray _loc ptr (Just tt) ->
do (_ety, tts) <-
liftIO (destVecTypedTerm sc tt) >>=
\case
Nothing -> fail "jvm_array_is: not a monomorphic sequence type"
Just x -> pure x
rval <- resolveAllocIndexJVM ptr
vs <- traverse (injectSetupValueJVM sym opts cc sc spec . MS.SetupTerm) tts
globals' <- liftIO $ doEntireArrayStore bak globals rval vs
OM (overrideGlobals .= globals')
JVMPointsToArray _loc ptr Nothing ->
case Map.lookup ptr (MS.csAllocations spec) of
Just (_, AllocArray len _) ->
do let vs = replicate len CJ.unassignedJVMValue
rval <- resolveAllocIndexJVM ptr
globals' <- liftIO $ doEntireArrayStore bak globals rval vs
OM (overrideGlobals .= globals')
_ -> panic "JVMSetup" ["executePointsTo", "expected array allocation"]
injectSetupValueJVM ::
Sym ->
Options ->
JVMCrucibleContext ->
SharedContext ->
CrucibleMethodSpecIR ->
SetupValue ->
OverrideMatcher CJ.JVM w (Crucible.RegValue Sym CJ.JVMValueType)
injectSetupValueJVM sym opts cc sc spec val =
injectJVMVal sym <$> resolveSetupValueJVM opts cc sc spec val
doEntireArrayStore ::
(Crucible.IsSymBackend sym bak) =>
bak ->
Crucible.SymGlobalState sym ->
Crucible.RegValue sym CJ.JVMRefType ->
[Crucible.RegValue sym CJ.JVMValueType] ->
IO (Crucible.SymGlobalState sym)
doEntireArrayStore bak glob ref vs = foldM store glob (zip [0..] vs)
where store g (i, v) = CJ.doArrayStore bak g ref i v
-- | Given a 'TypedTerm' with a vector type, return the element type
-- along with a list of its projected components. Return 'Nothing' if
-- the 'TypedTerm' does not have a vector type.
destVecTypedTerm :: SharedContext -> TypedTerm -> IO (Maybe (Cryptol.Type, [TypedTerm]))
destVecTypedTerm sc (TypedTerm ttp t) =
case asVec of
Nothing -> pure Nothing
Just (len, ety) ->
do len_tm <- scNat sc (fromInteger len)
ty_tm <- Cryptol.importType sc Cryptol.emptyEnv ety
idxs <- traverse (scNat sc) (map fromInteger [0 .. len-1])
ts <- traverse (scAt sc len_tm ty_tm t) idxs
pure $ Just (ety, map (TypedTerm (TypedTermSchema (Cryptol.tMono ety))) ts)
where
asVec =
do ty <- ttIsMono ttp
(n, a) <- Cryptol.tIsSeq ty
n' <- Cryptol.tIsNum n
Just (n', a)
------------------------------------------------------------------------
-- | Process a "crucible_equal" statement from the postcondition
-- section of the CrucibleSetup block.
executeEqual ::
Options ->
SharedContext ->
JVMCrucibleContext ->
CrucibleMethodSpecIR ->
SetupValue {- ^ first value to compare -} ->
SetupValue {- ^ second value to compare -} ->
OverrideMatcher CJ.JVM w ()
executeEqual opts sc cc spec v1 v2 =
do val1 <- resolveSetupValueJVM opts cc sc spec v1
val2 <- resolveSetupValueJVM opts cc sc spec v2
p <- liftIO (equalValsPred cc val1 val2)
addAssume p
-- | Process a "crucible_postcond" statement from the postcondition
-- section of the CrucibleSetup block.
executePred ::
SharedContext ->
JVMCrucibleContext ->
TypedTerm {- ^ the term to assert as a postcondition -} ->
OverrideMatcher CJ.JVM w ()
executePred sc cc tt =
do s <- OM (use termSub)
t <- liftIO $ scInstantiateExt sc s (ttTerm tt)
p <- liftIO $ resolveBoolTerm (cc ^. jccSym) t
addAssume p
------------------------------------------------------------------------
-- | Map the given substitution over all 'SetupTerm' constructors in
-- the given 'SetupValue'.
instantiateSetupValue ::
SharedContext ->
Map VarIndex Term ->
SetupValue ->
IO SetupValue
instantiateSetupValue sc s v =
case v of
MS.SetupVar _ -> return v
MS.SetupTerm tt -> MS.SetupTerm <$> doTerm tt
MS.SetupNull () -> return v
MS.SetupGlobal empty _ -> absurd empty
MS.SetupStruct empty _ _ -> absurd empty
MS.SetupArray empty _ -> absurd empty
MS.SetupElem empty _ _ -> absurd empty
MS.SetupField empty _ _ -> absurd empty
MS.SetupCast empty _ _ -> absurd empty
MS.SetupUnion empty _ _ -> absurd empty
MS.SetupGlobalInitializer empty _ -> absurd empty
where
doTerm (TypedTerm schema t) = TypedTerm schema <$> scInstantiateExt sc s t
------------------------------------------------------------------------
resolveAllocIndexJVM :: AllocIndex -> OverrideMatcher CJ.JVM w JVMRefVal
resolveAllocIndexJVM i =
do m <- OM (use setupValueSub)
case Map.lookup i m of
Just rval -> pure rval
Nothing ->
panic "JVMSetup" ["resolveAllocIndexJVM", "Unresolved prestate variable:" ++ show i]
resolveSetupValueJVM ::
Options ->
JVMCrucibleContext ->
SharedContext ->
CrucibleMethodSpecIR ->
SetupValue ->
OverrideMatcher CJ.JVM w JVMVal
resolveSetupValueJVM opts cc sc spec sval =
do m <- OM (use setupValueSub)
s <- OM (use termSub)
let tyenv = MS.csAllocations spec
nameEnv = MS.csTypeNames spec
sval' <- liftIO $ instantiateSetupValue sc s sval
liftIO $ resolveSetupVal cc m tyenv nameEnv sval' `X.catch` handleException opts
typeOfSetupValueJVM ::
JVMCrucibleContext ->
CrucibleMethodSpecIR ->
SetupValue ->
OverrideMatcher CJ.JVM w J.Type
typeOfSetupValueJVM cc spec sval =
do let tyenv = MS.csAllocations spec
nameEnv = MS.csTypeNames spec
liftIO $ typeOfSetupValue cc tyenv nameEnv sval
injectJVMVal :: Sym -> JVMVal -> Crucible.RegValue Sym CJ.JVMValueType
injectJVMVal sym jv =
case jv of
RVal x -> Crucible.injectVariant sym W4.knownRepr CJ.tagR x
IVal x -> Crucible.injectVariant sym W4.knownRepr CJ.tagI x
LVal x -> Crucible.injectVariant sym W4.knownRepr CJ.tagL x
projectJVMVal :: OnlineSolver solver =>
Backend solver -> J.Type -> String -> Crucible.RegValue Sym CJ.JVMValueType -> IO JVMVal
projectJVMVal bak ty msg' v =
case ty of
J.BooleanType -> IVal <$> proj v CJ.tagI
J.ByteType -> IVal <$> proj v CJ.tagI
J.CharType -> IVal <$> proj v CJ.tagI
J.ShortType -> IVal <$> proj v CJ.tagI
J.IntType -> IVal <$> proj v CJ.tagI
J.LongType -> LVal <$> proj v CJ.tagL
J.FloatType -> err -- FIXME
J.DoubleType -> err -- FIXME
J.ArrayType{} -> RVal <$> proj v CJ.tagR
J.ClassType{} -> RVal <$> proj v CJ.tagR
where
proj ::
forall tp.
Crucible.RegValue Sym CJ.JVMValueType ->
Ctx.Index CJ.JVMValueCtx tp ->
IO (Crucible.RegValue Sym tp)
proj val idx = Crucible.readPartExpr bak (Crucible.unVB (val Ctx.! idx)) msg
msg = Crucible.GenericSimError $ "Ill-formed value for type " ++ show ty ++ " (" ++ msg' ++ ")"
err = Crucible.addFailedAssertion bak msg
decodeJVMVal :: J.Type -> Crucible.AnyValue Sym -> Maybe JVMVal
decodeJVMVal ty v =
case ty of
J.BooleanType -> go v CJ.intRepr IVal
J.ByteType -> go v CJ.intRepr IVal
J.CharType -> go v CJ.intRepr IVal
J.ShortType -> go v CJ.intRepr IVal
J.IntType -> go @CJ.JVMIntType v CJ.intRepr IVal
J.LongType -> go @CJ.JVMLongType v CJ.longRepr LVal
J.FloatType -> Nothing -- FIXME
J.DoubleType -> Nothing -- FIXME
J.ArrayType{} -> go @CJ.JVMRefType v CJ.refRepr RVal
J.ClassType{} -> go @CJ.JVMRefType v CJ.refRepr RVal
where
go ::
forall t.
Crucible.AnyValue Sym ->
Crucible.TypeRepr t ->
(Crucible.RegValue Sym t -> JVMVal) ->
Maybe JVMVal
go (Crucible.AnyValue repr rv) repr' k =
case testEquality repr repr' of
Just Refl -> Just (k rv)
Nothing -> Nothing
| GaloisInc/saw-script | src/SAWScript/Crucible/JVM/Override.hs | bsd-3-clause | 43,174 | 337 | 30 | 11,748 | 9,604 | 4,993 | 4,611 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module Client.MenuFrameworkS where
import Control.Lens (makeLenses)
import qualified Data.Vector as V
import Client.MenuItem
import Types
makeLenses ''MenuFrameworkS
newMenuFrameworkS :: MenuFrameworkS
newMenuFrameworkS = MenuFrameworkS
{ _mfX = 0
, _mfY = 0
, _mfCursor = 0
, _mfNItems = 0
, _mfNSlots = 0
, _mfItems = V.empty
, _mfStatusBar = Nothing
, _mfCursorDraw = Nothing
}
| ksaveljev/hake-2 | src/Client/MenuFrameworkS.hs | bsd-3-clause | 521 | 0 | 7 | 167 | 103 | 65 | 38 | 17 | 1 |
module GeekBar.Layout (layout) where
import GeekBar.Node
import GeekBar.Props
import Control.Lens
import qualified Control.Monad.State.Strict as ST
-- | Layout nodes
-- Updates content{x,y} to its correct values based on position of upper-left
-- corner and properites of the node.
layout :: (Float, Float) -> Node -> Node
layout (x', y') (NLeaf p) = NLeaf (p & content . x .~ x'
& content . y .~ y')
layout (x', y') (NBranch p cs) =
alignWith $ case p ^. align of
AlignHor -> alignHor
AlignVer -> alignVer
AlignAbs -> alignAbs
where
-- | Align children with align function
alignWith :: (Node -> ST.State (Rect, Edge) Node) -> Node
alignWith a = let (cs', (r, _)) = ST.runState (mapM a cs) (Rect x' y' 0 0, Edge 0 0 0 0)
in NBranch (p & content .~ r) cs'
-- | Horiziontal alignment
-- State is (<all_content_rect>, <margin_edge>)
alignHor :: Node -> ST.State (Rect, Edge) Node
alignHor n = do
(r, m) <- ST.get
let -- x coordinate for contents of current node
x'' = r ^. x
+ r ^. width
+ max (m ^. right) (n ^. props.margin.left)
+ n ^. props.border.left
+ n ^. props.padding.left
-- y cooridnate for contents of current nede
y'' = r ^. y
+ n ^. props.margin.top
+ n ^. props.border.top
+ n ^. props.padding.top
-- layout current node with regards to colclulated coordiantes
n'' = layout (x'', y'') n
-- new width for contnet area
w = x''
- r ^. x
+ n'' ^. props.content.width
+ n'' ^. props.padding.right
+ n'' ^. props.border.right
-- new neight for content area
h = y''
- r ^. y
+ n'' ^. props.content.height
+ n'' ^. props.padding.bottom
+ n'' ^. props.border.bottom
+ n'' ^. props.margin.bottom
ST.put (r & width .~ w & height .~ max h (r ^. height)
,m & right .~ n'' ^. props.margin.right)
return n''
-- | vertical alignment
alignVer :: Node -> ST.State (Rect, Edge) Node
alignVer n = do
(r, m) <- ST.get
let x'' = r ^. x
+ n ^. props.margin.left
+ n ^. props.border.left
+ n ^. props.padding.left
y'' = r ^. y
+ r ^. height
+ max (m ^. bottom) (n ^. props.margin.top)
+ n ^. props.border.top
+ n ^. props.padding.top
n'' = layout (x'', y'') n
w = x''
- r ^. x
+ n'' ^. props.content.width
+ n'' ^. props.padding.right
+ n'' ^. props.border.right
+ n'' ^. props.margin.right
h = y''
- r ^. y
+ n'' ^. props.content.height
+ n'' ^. props.padding.bottom
+ n'' ^. props.border.bottom
ST.put (r & width .~ max w (r ^. width) & height .~ h
,m & bottom .~ n'' ^. props.margin.bottom)
return n''
-- | absolute alignment
alignAbs :: Node -> ST.State (Rect, Edge) Node
alignAbs n = do
(r, m) <- ST.get
let x'' = r ^. x
+ n ^. props.margin.left
+ n ^. props.border.left
+ n ^. props.padding.left
y'' = r ^. y
+ n ^. props.margin.top
+ n ^. props.border.top
+ n ^. props.padding.top
n'' = layout (x'', y'') n
w = x''
- r ^. x
+ n'' ^. props.content.width
+ n'' ^. props.padding.right
+ n'' ^. props.border.right
+ n'' ^. props.margin.right
h = y''
- r ^. y
+ n'' ^. props.content.height
+ n'' ^. props.padding.bottom
+ n'' ^. props.border.bottom
+ n'' ^. props.margin.bottom
ST.put (r & width .~ max w (r ^. width) & height .~ max h (r ^. height), m)
return n''
| aslpavel/geekbar | lib/GeekBar/Layout.hs | bsd-3-clause | 4,317 | 0 | 21 | 1,836 | 1,333 | 693 | 640 | -1 | -1 |
module TypedPerl.PerlRecs (
unionRec
) where
import qualified Data.Map as M
import TypedPerl.Types
unionRec :: Ord k => M.Map k PerlType -> PerlRecs k -> PerlRecs k
unionRec m r = r {recMap = (M.union (recMap r) m)}
| hiratara/TypedPerl | src/TypedPerl/PerlRecs.hs | bsd-3-clause | 221 | 0 | 10 | 43 | 91 | 49 | 42 | 6 | 1 |
module Main where
import qualified EFA.Example.Topology.Tripod.Given as TripodGiven
import qualified EFA.Flow.State.Absolute as StateEqSys
import qualified EFA.Flow.State.Quantity as StateFlow
import qualified EFA.Flow.Sequence.Absolute as EqSys
import qualified EFA.Flow.Draw as Draw
import EFA.Utility.Async (concurrentlyMany_)
import Data.Monoid (mempty)
main :: IO ()
main = do
let seqFlowGraph =
EqSys.solve TripodGiven.seqFlowGraph TripodGiven.equationSystem
stateFlowGraph =
StateFlow.graphFromCumResult $
StateFlow.fromSequenceFlowResult False seqFlowGraph
concurrentlyMany_ $ map Draw.xterm $
Draw.seqFlowGraph Draw.optionsDefault seqFlowGraph :
Draw.stateFlowGraph Draw.optionsDefault stateFlowGraph :
Draw.stateFlowGraph Draw.optionsDefault
(StateEqSys.solve stateFlowGraph mempty) :
[]
| energyflowanalysis/efa-2.1 | demo/stateFlow/Main.hs | bsd-3-clause | 883 | 0 | 13 | 159 | 195 | 113 | 82 | 21 | 1 |
module Day21 (part1,part2,test1,test2,testInstructions1, doInstruction, reverseDoInstruction, Instruction(..)) where
import Data.List
import Data.Maybe
import Text.Trifecta
import Control.Applicative
data Instruction = SwapPos Int Int
| SwapLet Char Char
| RotateR Int
| RotateL Int
| RotateRBase Char
| Reverse Int Int
| Move Int Int
deriving (Eq, Show)
int :: Parser Int
int = fromInteger <$> integer
parseInput :: String -> Result [Instruction]
parseInput = parseString (some (instructionParser <* skipOptional windowsNewLine)) mempty
where windowsNewLine = const () <$ skipOptional newline <*> skipOptional (char '\r')
instructionParser :: Parser Instruction
instructionParser = try moveParser
<|> try reverseParser
<|> try rotateRBaseParser
<|> try rotateRParser
<|> try rotateLParser
<|> try swapLetParser
<|> swapPosParser
moveParser :: Parser Instruction
moveParser = string "move position " *> (Move <$> int <*> secondPart)
where secondPart = string "to position " *> int
reverseParser :: Parser Instruction
reverseParser = string "reverse positions " *> (Reverse <$> int <*> secondPart)
where secondPart = string "through " *> int
rotateRBaseParser :: Parser Instruction
rotateRBaseParser = string "rotate based on position of letter " *> (RotateRBase <$> anyChar)
rotateRParser :: Parser Instruction
rotateRParser = string "rotate right " *> (RotateR <$> int <* (string "step" <* skipOptional (char 's')))
rotateLParser :: Parser Instruction
rotateLParser = string "rotate left " *> (RotateL <$> int <* (string "step" <* skipOptional (char 's')))
swapLetParser :: Parser Instruction
swapLetParser = string "swap letter " *> (SwapLet <$> anyChar <* whiteSpace <*> secondPart)
where
secondPart :: Parser Char
secondPart = string "with letter " *> anyChar
swapPosParser :: Parser Instruction
swapPosParser = string "swap position " *> (SwapPos <$> int <*> secondPart)
where
secondPart = string "with position " *> int
doInstruction :: String -> Instruction -> String
doInstruction s (SwapPos x y) = take (min x y) s
++ [larger]
++ take (max x y - min x y - 1) (drop (min x y+1) s)
++ [smaller]
++ drop (max x y + 1) s
where
smaller = s !! min x y
larger = s !! max x y
doInstruction s (SwapLet x y) = doInstruction s (SwapPos xi yi)
where
xi = fromJust $ elemIndex x s
yi = fromJust $ elemIndex y s
doInstruction s (Reverse x y) = take x s
++ reverse (take (y-x + 1) (drop x s))
++ drop (y+1) s
doInstruction s (RotateL x) = drop (x `mod` length s) s ++ take (x `mod` length s) s
doInstruction s (RotateR x) = reverse $ doInstruction (reverse s) (RotateL x)
doInstruction s (Move x y)
| x > y = take y removed ++ [s !! x] ++ drop y removed
| otherwise = take y removed ++ [s !! x] ++ drop y removed
where removed = take x s ++ drop (x+1) s
doInstruction s (RotateRBase c) = doInstruction s (RotateR rotateTimes)
where
ci = fromJust $ elemIndex c s
rotateTimes
| ci >= 4 = 2 + ci
| otherwise = 1 + ci
fromSuccess :: Result x -> x
fromSuccess (Success x) = x
fromSuccess (Failure x) = error (show x)
reverseDoInstruction :: Instruction -> String -> String
reverseDoInstruction (SwapPos x y) s = doInstruction s (SwapPos y x)
reverseDoInstruction (SwapLet x y) s = doInstruction s (SwapLet y x)
reverseDoInstruction (Reverse x y) s = doInstruction s (Reverse x y)
reverseDoInstruction (RotateL x) s = doInstruction s (RotateR x)
reverseDoInstruction (RotateR x) s = doInstruction s (RotateL x)
reverseDoInstruction (Move x y) s = doInstruction s (Move y x)
--Non-deterministic for length 5!
reverseDoInstruction (RotateRBase c) s = fromJust
$ find (\a -> doInstruction a (RotateRBase c) == s)
$ map (doInstruction s . RotateR) [0..]
part1Solution = last . part1 input1 <$> readFile "./data/Day21.txt"
part2Solution = head . part2 input2 <$> readFile "./data/Day21.txt"
part1 l s = scanl' doInstruction l $ fromSuccess $ parseInput s
part2 l s = scanr reverseDoInstruction l $ fromSuccess $ parseInput s
test1 = "abcde"
test2 = "decab"
testInstructions1 = "swap position 4 with position 0\nswap letter d with letter b\nreverse positions 0 through 4\nrotate left 1 step\nmove position 1 to position 4\nmove position 3 to position 0\nrotate based on position of letter b\nrotate based on position of letter d"
input1 = "abcdefgh"
input2 = "fbgdceah"
| z0isch/aoc2016 | src/Day21.hs | bsd-3-clause | 4,864 | 2 | 27 | 1,309 | 1,577 | 789 | 788 | 92 | 1 |
module Web.XING
(
module Web.XING.Auth
, module Web.XING.API
, module Web.XING.Types
-- * Calls
, module Web.XING.Calls.IdCard
, module Web.XING.Calls.User
-- * Common used functions (re-exports)
, liftIO
, withManager
) where
import Web.XING.Auth
import Web.XING.API
import Web.XING.Types
import Web.XING.Calls.IdCard
import Web.XING.Calls.User
import Control.Monad.IO.Class (MonadIO (liftIO))
import Network.HTTP.Conduit (withManager)
| JanAhrens/xing-api-haskell | lib/Web/XING.hs | bsd-3-clause | 485 | 0 | 6 | 97 | 112 | 78 | 34 | 16 | 0 |
{-# OPTIONS_GHC -Wall -fwarn-tabs #-}
{-# LANGUAGE CPP, DeriveDataTypeable #-}
----------------------------------------------------------------
-- 2011.04.17
-- |
-- Module : Control.Concurrent.STM.TBMChan1
-- Copyright : Copyright (c) 2011 wren gayle romano
-- License : BSD
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (GHC STM, DeriveDataTypeable)
--
-- A version of "Control.Concurrent.STM.TChan" where the queue is
-- bounded in length and closeable. This combines the abilities of
-- "Control.Concurrent.STM.TBChan" and "Control.Concurrent.STM.TMChan".
----------------------------------------------------------------
module Control.Concurrent.STM.TBMChan1
(
-- * The TBMChan type
TBMChan()
-- ** Creating TBMChans
, newTBMChan
, newTBMChanIO
-- I don't know how to define dupTBMChan with the correct semantics
-- ** Reading from TBMChans
, readTBMChan
, tryReadTBMChan
, peekTBMChan
, tryPeekTBMChan
-- ** Writing to TBMChans
, writeTBMChan
, tryWriteTBMChan
, unGetTBMChan
-- ** Closing TBMChans
, closeTBMChan
-- ** Predicates
, isClosedTBMChan
, isEmptyTBMChan
, isFullTBMChan
-- ** Other functionality
, freeSlotsTBMChan
) where
import Data.Typeable (Typeable)
import Control.Applicative ((<$>))
import Control.Monad.STM (STM, retry)
import Control.Concurrent.STM.TVar.Compat
import Control.Concurrent.STM.TChan.Compat -- N.B., GHC only
-- N.B., we need a Custom cabal build-type for this to work.
#ifdef __HADDOCK__
import Control.Monad.STM (atomically)
import System.IO.Unsafe (unsafePerformIO)
#endif
----------------------------------------------------------------
-- | @TBMChan@ is an abstract type representing a bounded closeable
-- FIFO channel.
data TBMChan a = TBMChan !(TVar Bool) !(TVar Int) !(TChan a)
deriving Typeable
-- | Build and returns a new instance of @TBMChan@ with the given
-- capacity. /N.B./, we do not verify the capacity is positive, but
-- if it is non-positive then 'writeTBMChan' will always retry and
-- 'isFullTBMChan' will always be true.
newTBMChan :: Int -> STM (TBMChan a)
newTBMChan n = do
closed <- newTVar False
limit <- newTVar n
chan <- newTChan
return (TBMChan closed limit chan)
-- | @IO@ version of 'newTBMChan'. This is useful for creating
-- top-level @TBMChan@s using 'unsafePerformIO', because using
-- 'atomically' inside 'unsafePerformIO' isn't possible.
newTBMChanIO :: Int -> IO (TBMChan a)
newTBMChanIO n = do
closed <- newTVarIO False
limit <- newTVarIO n
chan <- newTChanIO
return (TBMChan closed limit chan)
-- | Read the next value from the @TBMChan@, retrying if the channel
-- is empty (and not closed). We return @Nothing@ immediately if
-- the channel is closed and empty.
readTBMChan :: TBMChan a -> STM (Maybe a)
readTBMChan (TBMChan closed limit chan) = do
b <- isEmptyTChan chan
b' <- readTVar closed
if b && b'
then return Nothing
else do
x <- readTChan chan
modifyTVar' limit (1 +)
return (Just x)
-- | A version of 'readTBMChan' which does not retry. Instead it
-- returns @Just Nothing@ if the channel is open but no value is
-- available; it still returns @Nothing@ if the channel is closed
-- and empty.
tryReadTBMChan :: TBMChan a -> STM (Maybe (Maybe a))
tryReadTBMChan (TBMChan closed limit chan) = do
b <- isEmptyTChan chan
b' <- readTVar closed
if b && b'
then return Nothing
else do
mx <- tryReadTChan chan
case mx of
Nothing -> return (Just Nothing)
Just _x -> do
modifyTVar' limit (1 +)
return (Just mx)
-- | Get the next value from the @TBMChan@ without removing it,
-- retrying if the channel is empty.
peekTBMChan :: TBMChan a -> STM (Maybe a)
peekTBMChan (TBMChan closed _limit chan) = do
b <- isEmptyTChan chan
b' <- readTVar closed
if b && b'
then return Nothing
else Just <$> peekTChan chan
-- | A version of 'peekTBMChan' which does not retry. Instead it
-- returns @Just Nothing@ if the channel is open but no value is
-- available; it still returns @Nothing@ if the channel is closed
-- and empty.
tryPeekTBMChan :: TBMChan a -> STM (Maybe (Maybe a))
tryPeekTBMChan (TBMChan closed _limit chan) = do
b <- isEmptyTChan chan
b' <- readTVar closed
if b && b'
then return Nothing
else Just <$> tryPeekTChan chan
-- | Write a value to a @TBMChan@, retrying if the channel is full.
-- If the channel is closed then the value is silently discarded.
-- Use 'isClosedTBMChan' to determine if the channel is closed
-- before writing, as needed.
writeTBMChan :: TBMChan a -> a -> STM ()
writeTBMChan self@(TBMChan closed limit chan) x = do
b <- readTVar closed
if b
then return () -- Discard silently
else do
b' <- isFullTBMChan self
if b'
then retry
else do
writeTChan chan x
modifyTVar' limit (subtract 1)
-- | A version of 'writeTBMChan' which does not retry. Returns @Just
-- True@ if the value was successfully written, @Just False@ if it
-- could not be written (but the channel was open), and @Nothing@
-- if it was discarded (i.e., the channel was closed).
tryWriteTBMChan :: TBMChan a -> a -> STM (Maybe Bool)
tryWriteTBMChan self@(TBMChan closed limit chan) x = do
b <- readTVar closed
if b
then return Nothing
else do
b' <- isFullTBMChan self
if b'
then return (Just False)
else do
writeTChan chan x
modifyTVar' limit (subtract 1)
return (Just True)
-- | Put a data item back onto a channel, where it will be the next
-- item read. If the channel is closed then the value is silently
-- discarded; you can use 'peekTBMChan' to circumvent this in certain
-- circumstances. /N.B./, this could allow the channel to temporarily
-- become longer than the specified limit, which is necessary to
-- ensure that the item is indeed the next one read.
unGetTBMChan :: TBMChan a -> a -> STM ()
unGetTBMChan (TBMChan closed limit chan) x = do
b <- readTVar closed
if b
then return () -- Discard silently
else do
unGetTChan chan x
modifyTVar' limit (subtract 1)
-- | Closes the @TBMChan@, preventing any further writes.
closeTBMChan :: TBMChan a -> STM ()
closeTBMChan (TBMChan closed _limit _chan) =
writeTVar closed True
-- | Returns @True@ if the supplied @TBMChan@ has been closed.
isClosedTBMChan :: TBMChan a -> STM Bool
isClosedTBMChan (TBMChan closed _limit _chan) =
readTVar closed
-- | Returns @True@ if the supplied @TBMChan@ is empty (i.e., has
-- no elements). /N.B./, a @TBMChan@ can be both ``empty'' and
-- ``full'' at the same time, if the initial limit was non-positive.
isEmptyTBMChan :: TBMChan a -> STM Bool
isEmptyTBMChan (TBMChan _closed _limit chan) =
isEmptyTChan chan
-- | Returns @True@ if the supplied @TBMChan@ is full (i.e., is
-- over its limit). /N.B./, a @TBMChan@ can be both ``empty'' and
-- ``full'' at the same time, if the initial limit was non-positive.
-- /N.B./, a @TBMChan@ may still be full after reading, if
-- 'unGetTBMChan' was used to go over the initial limit.
isFullTBMChan :: TBMChan a -> STM Bool
isFullTBMChan (TBMChan _closed limit _chan) = do
n <- readTVar limit
return $! n <= 0
-- | Return the exact number of free slots. The result can be
-- negative if the initial limit was negative or if 'unGetTBMChan'
-- was used to go over the initial limit.
freeSlotsTBMChan :: TBMChan a -> STM Int
freeSlotsTBMChan (TBMChan _closed limit _chan) =
readTVar limit
----------------------------------------------------------------
----------------------------------------------------------- fin.
| bitemyapp/stm-chans | test/bench/Control/Concurrent/STM/TBMChan1.hs | bsd-3-clause | 8,127 | 0 | 17 | 2,004 | 1,363 | 703 | 660 | 133 | 3 |
{-# LANGUAGE RecursiveDo, RankNTypes #-}
module Mire.Reactive.Main where
import Mire.Prelude
import Mire.Plugin
import Mire.Data.Flow
import Mire.Reactive.Utils
import Mire.Reactive.STM
import Mire.Hint
import Mire.Pipes
import Reactive.Banana
import Reactive.Banana.Frameworks
import System.IO
import Network
import Control.Concurrent
import System.IO.Error
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Data.Text.Encoding as T
import qualified Data.ByteString.Char8 as BS
-- TODO replace setupMain with setupNetwork
setupNetwork :: ConfigVar -> Event Text -> Event Hotkey -> Event () -> MomentIO (Event Flow)
setupNetwork = setupMain
data Session = Session {
sessionClose :: IO (),
sessionHandle :: Handle
}
-- Loading and unloading of worlds, handling network connections.
setupMain :: ConfigVar -> Event Text -> Event Hotkey -> Event () -> MomentIO (Event Flow)
setupMain configVar eInputText eHotkey eInit = mdo
-- (Re)load config
(eConfigFail, eConfig) <- split <$> eTMVar configVar
bConfig <- stepper baseConfig eConfig
(eSession, fireSession) <- newEvent
bSession <- stepper Nothing eSession
-- Channel used to receive the network stream.
netChan <- liftIO newTChanIO
eNet <- eTChan netChan
(eFlowExtra, fireFlowExtra) <- newEvent
let simpleCommand cmd evt = (() <$ filterE (eq cmd) evt, filterE (neq cmd) evt)
-- Load config
(eReloadConfig, eNotReloadConfig) = simpleCommand "/config" eInputText
-- Load a world when no world is loaded. Otherwise the command is
-- treated as normal text.
(eWorld, eNotWorld) = split (f <$> bConfig <*> bWorld <@> eNotReloadConfig) where
f config world text =
case world of
Just _ -> Right text
Nothing -> case findWorldByActivator text config of
Nothing -> Right text
Just w -> Left w
-- Unload a world when a world is loaded. Otherwise the command is
-- treated as normal text.
(eDisconnect, eNotDisconnect) = split (f <$> bSession <@> eNotWorld) where
f h text = if text == "/dc" then Left h else Right text
eInputRest = eNotDisconnect
reactimate $ eReloadConfig $> readConfigAsync configVar
-- A world is unloaded when the connection closes.
let eWorldUnload = () <$ filterE isNothing eSession
-- The currently loaded world.
bWorld <- stepper Nothing eMaybeWorld
-- When the set of active plugins changes.
let eMaybeWorld = unionWith const (Just <$> eWorld) (Nothing <$ eWorldUnload)
eExtraPlugins = filterJust $ fmap (\case
FlowAction (ActionPlugins p) -> Just p
_ -> Nothing) eFlow'
ePlugins = activePlugins eConfig bConfig eMaybeWorld bWorld eExtraPlugins
-- Convert Net to Flow and buffer.
eNetFlow <- bufferOutput eNet
-- Input, hotkeys, output and manually added flow elements.
eFlow <- unions' [
FlowInput <$> eInputRest,
FlowHotkey <$> eHotkey,
eNetFlow,
const (flowInfo "Main" "Loading config...") <$> eReloadConfig,
const (flowInfo "Main" "Successfully loaded config.") <$> eConfig,
flowError "Main" . errorToString <$> eConfigFail,
flowInfo "Main" . ("World loaded: " <>) . worldName <$> eWorld,
const (flowInfo "Main" "World unloaded.") <$> eWorldUnload
]
eFlow' <- connectPluginNetwork eInit ePlugins eFlow >>= unions' . (:[eFlowExtra])
-- When a world loads, a connection is made and incoming data is sent
-- through the event network.
reactimate (tryConnectWorld netChan fireFlowExtra fireSession <$> eWorld)
-- Try to close the network connection manually.
reactimate (tryDisconnect fireFlowExtra <$> eDisconnect)
-- Filter data that needs to be sent to the server from the flow and try to
-- send it.
reactimate (((,) <$> bSession <@> eFlow') <$$> trySendInput)
return eFlow'
-- True when there was a world, but it does not exist anymore in the given config.
worldStoppedExisting :: Maybe World -> Config -> Bool
worldStoppedExisting mw c = maybe False (isNothing . flip findUpdatedWorld c) mw
-- Try to create a session.
-- - outChan: Channel to which the network data is sent.
-- - fireFlow: Handler used for showing messages (errors, warnings, info).
-- - fireSession: Handler used for creating or ending sessions.
-- - world: The currently active world.
tryConnectWorld :: TChan Net -> Handler Flow -> Handler (Maybe Session) -> World -> IO ()
tryConnectWorld outChan fireFlow fireSession w = do
let fireError = fireFlow . flowError "Main"
fireInfo = fireFlow . flowInfo "Main"
let showHost = worldHost w <> ":" <> tshow (worldPort w)
host = T.unpack (worldHost w)
port = PortNumber (toEnum $ worldPort w)
fireInfo ("Connecting to " <> showHost <> ".")
connected <- tryIOError (connectTo host port)
case connected of
Left _ -> do
fireError ("Could not connect to " <> showHost <> ".")
fireSession Nothing
Right h -> void $ forkIO $ do
fireInfo ("Connected to " <> showHost <> ".")
fireSession (Just (Session (hClose h) h))
runNetPipe h outChan
fireInfo ("Disconnected from " <> showHost <> ".")
fireSession Nothing
return ()
tryDisconnect :: Handler Flow -> Maybe Session -> IO ()
tryDisconnect fireFlow = maybe (return ()) doClose where
fireInfo = fireFlow . flowInfo "Main"
doClose session = do
fireInfo "Attempting to close connection."
sessionClose session
trySendInput :: (Maybe Session, Flow) -> IO ()
trySendInput (Just (Session _ h), FlowInput t) = hPutStrLnUtf8 h t -- T.hPutStrLn h t
trySendInput (Just (Session _ h), FlowMeta (MetaSend t)) = BS.hPutStr h t
trySendInput (Just (Session _ h), FlowAction (ActionSend t)) = hPutStrLnUtf8 h t -- T.hPutStrLn h t
trySendInput _ = return ()
hPutStrLnUtf8 h = BS.hPutStrLn h . T.encodeUtf8
| ellej/mire | src/Mire/Reactive/Main.hs | bsd-3-clause | 6,140 | 0 | 19 | 1,523 | 1,548 | 779 | 769 | -1 | -1 |
module BasicTest where
import qualified Config.Dyre as Dyre
import Config.Dyre.Relaunch
import Control.Monad
import System.IO
data Config = Config { message :: String, errorMsg :: Maybe String }
defaultConfig :: Config
defaultConfig = Config "Basic Test Version 1.0" Nothing
showError :: Config -> String -> Config
showError cfg msg = cfg { errorMsg = Just msg }
realMain (Config message (Just err)) = putStrLn "Compile Error"
realMain (Config message Nothing) = do
state <- restoreTextState 1
when (state < 3) $ relaunchWithTextState (state + 1) Nothing
putStrLn $ message ++ " - " ++ show state
basicTest = Dyre.wrapMain $ Dyre.defaultParams
{ Dyre.projectName = "basicTest"
, Dyre.realMain = realMain
, Dyre.showError = showError
, Dyre.statusOut = const . return $ ()
}
| bitemyapp/dyre | Tests/basic/BasicTest.hs | bsd-3-clause | 820 | 0 | 10 | 167 | 256 | 138 | 118 | 20 | 1 |
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE Strict #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Graphics.Vulkan.CommandBuffer where
import Graphics.Vulkan.Device( VkDevice(..)
)
import Graphics.Vulkan.Pass( VkFramebuffer(..)
, VkRenderPass(..)
)
import Data.Word( Word64
, Word32
)
import Foreign.Ptr( Ptr
, plusPtr
)
import Data.Int( Int32
)
import Data.Bits( Bits
, FiniteBits
)
import Foreign.Storable( Storable(..)
)
import Graphics.Vulkan.CommandPool( VkCommandPool(..)
)
import Data.Void( Void
)
import Graphics.Vulkan.Query( VkQueryPipelineStatisticFlags(..)
, VkQueryControlFlagBits(..)
, VkQueryControlFlags(..)
, VkQueryPipelineStatisticFlagBits(..)
)
import Graphics.Vulkan.Core( VkResult(..)
, VkBool32(..)
, VkFlags(..)
, VkStructureType(..)
)
-- ** VkCommandBufferLevel
newtype VkCommandBufferLevel = VkCommandBufferLevel Int32
deriving (Eq, Storable)
pattern VK_COMMAND_BUFFER_LEVEL_PRIMARY = VkCommandBufferLevel 0
pattern VK_COMMAND_BUFFER_LEVEL_SECONDARY = VkCommandBufferLevel 1
-- ** vkAllocateCommandBuffers
foreign import ccall "vkAllocateCommandBuffers" vkAllocateCommandBuffers ::
VkDevice ->
Ptr VkCommandBufferAllocateInfo ->
Ptr VkCommandBuffer -> IO VkResult
-- ** vkResetCommandBuffer
foreign import ccall "vkResetCommandBuffer" vkResetCommandBuffer ::
VkCommandBuffer -> VkCommandBufferResetFlags -> IO VkResult
-- ** vkFreeCommandBuffers
foreign import ccall "vkFreeCommandBuffers" vkFreeCommandBuffers ::
VkDevice -> VkCommandPool -> Word32 -> Ptr VkCommandBuffer -> IO ()
-- ** VkCommandBufferUsageFlags
newtype VkCommandBufferUsageFlagBits = VkCommandBufferUsageFlagBits VkFlags
deriving (Eq, Storable, Bits, FiniteBits)
-- | Alias for VkCommandBufferUsageFlagBits
type VkCommandBufferUsageFlags = VkCommandBufferUsageFlagBits
pattern VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT = VkCommandBufferUsageFlagBits 0x1
pattern VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT = VkCommandBufferUsageFlagBits 0x2
-- | Command buffer may be submitted/executed more than once simultaneously
pattern VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT = VkCommandBufferUsageFlagBits 0x4
data VkCommandBufferBeginInfo =
VkCommandBufferBeginInfo{ vkSType :: VkStructureType
, vkPNext :: Ptr Void
, vkFlags :: VkCommandBufferUsageFlags
, vkPInheritanceInfo :: Ptr VkCommandBufferInheritanceInfo
}
deriving (Eq)
instance Storable VkCommandBufferBeginInfo where
sizeOf ~_ = 32
alignment ~_ = 8
peek ptr = VkCommandBufferBeginInfo <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 8)
<*> peek (ptr `plusPtr` 16)
<*> peek (ptr `plusPtr` 24)
poke ptr poked = poke (ptr `plusPtr` 0) (vkSType (poked :: VkCommandBufferBeginInfo))
*> poke (ptr `plusPtr` 8) (vkPNext (poked :: VkCommandBufferBeginInfo))
*> poke (ptr `plusPtr` 16) (vkFlags (poked :: VkCommandBufferBeginInfo))
*> poke (ptr `plusPtr` 24) (vkPInheritanceInfo (poked :: VkCommandBufferBeginInfo))
data VkCommandBufferInheritanceInfo =
VkCommandBufferInheritanceInfo{ vkSType :: VkStructureType
, vkPNext :: Ptr Void
, vkRenderPass :: VkRenderPass
, vkSubpass :: Word32
, vkFramebuffer :: VkFramebuffer
, vkOcclusionQueryEnable :: VkBool32
, vkQueryFlags :: VkQueryControlFlags
, vkPipelineStatistics :: VkQueryPipelineStatisticFlags
}
deriving (Eq)
instance Storable VkCommandBufferInheritanceInfo where
sizeOf ~_ = 56
alignment ~_ = 8
peek ptr = VkCommandBufferInheritanceInfo <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 8)
<*> peek (ptr `plusPtr` 16)
<*> peek (ptr `plusPtr` 24)
<*> peek (ptr `plusPtr` 32)
<*> peek (ptr `plusPtr` 40)
<*> peek (ptr `plusPtr` 44)
<*> peek (ptr `plusPtr` 48)
poke ptr poked = poke (ptr `plusPtr` 0) (vkSType (poked :: VkCommandBufferInheritanceInfo))
*> poke (ptr `plusPtr` 8) (vkPNext (poked :: VkCommandBufferInheritanceInfo))
*> poke (ptr `plusPtr` 16) (vkRenderPass (poked :: VkCommandBufferInheritanceInfo))
*> poke (ptr `plusPtr` 24) (vkSubpass (poked :: VkCommandBufferInheritanceInfo))
*> poke (ptr `plusPtr` 32) (vkFramebuffer (poked :: VkCommandBufferInheritanceInfo))
*> poke (ptr `plusPtr` 40) (vkOcclusionQueryEnable (poked :: VkCommandBufferInheritanceInfo))
*> poke (ptr `plusPtr` 44) (vkQueryFlags (poked :: VkCommandBufferInheritanceInfo))
*> poke (ptr `plusPtr` 48) (vkPipelineStatistics (poked :: VkCommandBufferInheritanceInfo))
data VkCommandBuffer_T
type VkCommandBuffer = Ptr VkCommandBuffer_T
-- ** VkCommandBufferResetFlags
newtype VkCommandBufferResetFlagBits = VkCommandBufferResetFlagBits VkFlags
deriving (Eq, Storable, Bits, FiniteBits)
-- | Alias for VkCommandBufferResetFlagBits
type VkCommandBufferResetFlags = VkCommandBufferResetFlagBits
-- | Release resources owned by the buffer
pattern VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = VkCommandBufferResetFlagBits 0x1
-- ** vkEndCommandBuffer
foreign import ccall "vkEndCommandBuffer" vkEndCommandBuffer ::
VkCommandBuffer -> IO VkResult
-- ** vkBeginCommandBuffer
foreign import ccall "vkBeginCommandBuffer" vkBeginCommandBuffer ::
VkCommandBuffer -> Ptr VkCommandBufferBeginInfo -> IO VkResult
data VkCommandBufferAllocateInfo =
VkCommandBufferAllocateInfo{ vkSType :: VkStructureType
, vkPNext :: Ptr Void
, vkCommandPool :: VkCommandPool
, vkLevel :: VkCommandBufferLevel
, vkCommandBufferCount :: Word32
}
deriving (Eq)
instance Storable VkCommandBufferAllocateInfo where
sizeOf ~_ = 32
alignment ~_ = 8
peek ptr = VkCommandBufferAllocateInfo <$> peek (ptr `plusPtr` 0)
<*> peek (ptr `plusPtr` 8)
<*> peek (ptr `plusPtr` 16)
<*> peek (ptr `plusPtr` 24)
<*> peek (ptr `plusPtr` 28)
poke ptr poked = poke (ptr `plusPtr` 0) (vkSType (poked :: VkCommandBufferAllocateInfo))
*> poke (ptr `plusPtr` 8) (vkPNext (poked :: VkCommandBufferAllocateInfo))
*> poke (ptr `plusPtr` 16) (vkCommandPool (poked :: VkCommandBufferAllocateInfo))
*> poke (ptr `plusPtr` 24) (vkLevel (poked :: VkCommandBufferAllocateInfo))
*> poke (ptr `plusPtr` 28) (vkCommandBufferCount (poked :: VkCommandBufferAllocateInfo))
| oldmanmike/vulkan | src/Graphics/Vulkan/CommandBuffer.hs | bsd-3-clause | 7,862 | 0 | 16 | 2,578 | 1,584 | 904 | 680 | -1 | -1 |
module Data.Geo.GPX.Lens.MagvarL where
import Data.Geo.GPX.Type.Degrees
import Data.Lens.Common
class MagvarL a where
magvarL :: Lens a (Maybe Degrees)
| tonymorris/geo-gpx | src/Data/Geo/GPX/Lens/MagvarL.hs | bsd-3-clause | 157 | 0 | 9 | 22 | 48 | 29 | 19 | 5 | 0 |
-- Chapter2Exercise1Part1.hs
module Chapter2Exercise1Part1 where
-- Exercise 1.
-- Rewrite parseNumber, without liftM, using
-- 1. do-notation
import Control.Monad
import System.Environment
import Text.ParserCombinators.Parsec hiding (spaces)
data LispVal = Atom String
| List [LispVal]
| DottedList [LispVal] LispVal -- (a b c ... . z)
| Number Integer
| String String
| Bool Bool
parseString :: Parser LispVal
parseString = do
char '"'
x <- many (noneOf "\"") -- 0 or more non-doublequote characters.
char '"'
return $ String x
parseAtom :: Parser LispVal
parseAtom = do
first <- letter <|> symbol
rest <- many (letter <|> digit <|> symbol)
let atom = first:rest
return $ case atom of
"#t" -> Bool True
"#f" -> Bool False
_ -> Atom atom
parseNumber :: Parser LispVal
parseNumber = liftM (Number . read) $ many1 digit
parseNumber' :: Parser LispVal
parseNumber' = do
digits <- many1 digit
return $ (Number . read) digits
parseExpr :: Parser LispVal
parseExpr = parseAtom
<|> parseString
<|> parseNumber'
symbol :: Parser Char
symbol = oneOf "!#$%&|*+-/:<=>?@^_~"
spaces :: Parser ()
spaces = skipMany1 space
readExpr :: String -> String
readExpr input = case parse parseExpr "lisp" input of
Left err -> "No match: " ++ show err
Right val -> "Found value"
main :: IO ()
main = do
(expr:_) <- getArgs
putStrLn (readExpr expr)
| EFulmer/haskell-scheme-wikibook | src/Exercises/Ch2/Pt1/Ex1Pt1.hs | bsd-3-clause | 1,604 | 0 | 11 | 497 | 440 | 222 | 218 | 47 | 3 |
module Physics.Falling.Collision.Detection.PlaneImplicitShapeCollisionDetector
(
collidePlaneImplicitShape
)
where
import Physics.Falling.Math.Transform
import Physics.Falling.Shape.ImplicitShape
import Physics.Falling.Shape.Plane
import Physics.Falling.Collision.Collision
collidePlaneImplicitShape :: (ImplicitShape g v, UnitVector v n) =>
Plane v n -> g -> Maybe (PartialCollisionDescr v n)
collidePlaneImplicitShape (Plane center planeNormal) other =
if d > 0.0 then
Just $ (cp1, cp2, n, d)
else
Nothing
where
invPlaneNormal = neg $ fromNormal planeNormal
deepestPoint = supportPoint other invPlaneNormal
n = planeNormal
d = (center &- deepestPoint) &. fromNormal planeNormal
cp1 = deepestPoint &+ fromNormal planeNormal &* d
cp2 = deepestPoint
| sebcrozet/falling | Physics/Falling/Collision/Detection/PlaneImplicitShapeCollisionDetector.hs | bsd-3-clause | 1,121 | 0 | 10 | 458 | 205 | 116 | 89 | 19 | 2 |
module TypeBug5 where
f :: [[String]]->[String]
f (x:xs) = x ++ filter (not(eqString unwords(concat x xs))) xs
eqString :: String -> String -> Bool
eqString = (==)
| roberth/uu-helium | test/typeerrors/Examples/TypeBug5.hs | gpl-3.0 | 167 | 0 | 12 | 30 | 88 | 49 | 39 | 5 | 1 |
alpha :: ((a, b), c) -> (a, (b, c))
alpha ((x, y), z) = (x, (y, z)) | hmemcpy/milewski-ctfp-pdf | src/content/3.6/code/haskell/snippet11.hs | gpl-3.0 | 67 | 0 | 7 | 16 | 66 | 41 | 25 | 2 | 1 |
-- %************************************************************************
-- %* *
-- The known-key names for Template Haskell
-- %* *
-- %************************************************************************
module THNames where
import PrelNames( mk_known_key_name )
import Module( Module, mkModuleNameFS, mkModule, thUnitId )
import Name( Name )
import OccName( tcName, clsName, dataName, varName )
import RdrName( RdrName, nameRdrName )
import Unique
import FastString
-- To add a name, do three things
--
-- 1) Allocate a key
-- 2) Make a "Name"
-- 3) Add the name to templateHaskellNames
templateHaskellNames :: [Name]
-- The names that are implicitly mentioned by ``bracket''
-- Should stay in sync with the import list of DsMeta
templateHaskellNames = [
returnQName, bindQName, sequenceQName, newNameName, liftName,
mkNameName, mkNameG_vName, mkNameG_dName, mkNameG_tcName, mkNameLName,
mkNameSName,
liftStringName,
unTypeName,
unTypeQName,
unsafeTExpCoerceName,
-- Lit
charLName, stringLName, integerLName, intPrimLName, wordPrimLName,
floatPrimLName, doublePrimLName, rationalLName, stringPrimLName,
charPrimLName,
-- Pat
litPName, varPName, tupPName, unboxedTupPName, unboxedSumPName,
conPName, tildePName, bangPName, infixPName,
asPName, wildPName, recPName, listPName, sigPName, viewPName,
-- FieldPat
fieldPatName,
-- Match
matchName,
-- Clause
clauseName,
-- Exp
varEName, conEName, litEName, appEName, appTypeEName, infixEName,
infixAppName, sectionLName, sectionRName, lamEName, lamCaseEName,
tupEName, unboxedTupEName, unboxedSumEName,
condEName, multiIfEName, letEName, caseEName, doEName, compEName,
fromEName, fromThenEName, fromToEName, fromThenToEName,
listEName, sigEName, recConEName, recUpdEName, staticEName, unboundVarEName,
-- FieldExp
fieldExpName,
-- Body
guardedBName, normalBName,
-- Guard
normalGEName, patGEName,
-- Stmt
bindSName, letSName, noBindSName, parSName,
-- Dec
funDName, valDName, dataDName, newtypeDName, tySynDName,
classDName, instanceWithOverlapDName,
standaloneDerivWithStrategyDName, sigDName, forImpDName,
pragInlDName, pragSpecDName, pragSpecInlDName, pragSpecInstDName,
pragRuleDName, pragAnnDName, defaultSigDName,
dataFamilyDName, openTypeFamilyDName, closedTypeFamilyDName,
dataInstDName, newtypeInstDName, tySynInstDName,
infixLDName, infixRDName, infixNDName,
roleAnnotDName, patSynDName, patSynSigDName,
-- Cxt
cxtName,
-- SourceUnpackedness
noSourceUnpackednessName, sourceNoUnpackName, sourceUnpackName,
-- SourceStrictness
noSourceStrictnessName, sourceLazyName, sourceStrictName,
-- Con
normalCName, recCName, infixCName, forallCName, gadtCName, recGadtCName,
-- Bang
bangName,
-- BangType
bangTypeName,
-- VarBangType
varBangTypeName,
-- PatSynDir (for pattern synonyms)
unidirPatSynName, implBidirPatSynName, explBidirPatSynName,
-- PatSynArgs (for pattern synonyms)
prefixPatSynName, infixPatSynName, recordPatSynName,
-- Type
forallTName, varTName, conTName, appTName, equalityTName,
tupleTName, unboxedTupleTName, unboxedSumTName,
arrowTName, listTName, sigTName, litTName,
promotedTName, promotedTupleTName, promotedNilTName, promotedConsTName,
wildCardTName,
-- TyLit
numTyLitName, strTyLitName,
-- TyVarBndr
plainTVName, kindedTVName,
-- Role
nominalRName, representationalRName, phantomRName, inferRName,
-- Kind
varKName, conKName, tupleKName, arrowKName, listKName, appKName,
starKName, constraintKName,
-- FamilyResultSig
noSigName, kindSigName, tyVarSigName,
-- InjectivityAnn
injectivityAnnName,
-- Callconv
cCallName, stdCallName, cApiCallName, primCallName, javaScriptCallName,
-- Safety
unsafeName,
safeName,
interruptibleName,
-- Inline
noInlineDataConName, inlineDataConName, inlinableDataConName,
-- RuleMatch
conLikeDataConName, funLikeDataConName,
-- Phases
allPhasesDataConName, fromPhaseDataConName, beforePhaseDataConName,
-- Overlap
overlappableDataConName, overlappingDataConName, overlapsDataConName,
incoherentDataConName,
-- DerivStrategy
stockStrategyDataConName, anyclassStrategyDataConName,
newtypeStrategyDataConName,
-- TExp
tExpDataConName,
-- RuleBndr
ruleVarName, typedRuleVarName,
-- FunDep
funDepName,
-- FamFlavour
typeFamName, dataFamName,
-- TySynEqn
tySynEqnName,
-- AnnTarget
valueAnnotationName, typeAnnotationName, moduleAnnotationName,
-- DerivClause
derivClauseName,
-- The type classes
liftClassName,
-- And the tycons
qTyConName, nameTyConName, patTyConName, fieldPatTyConName, matchQTyConName,
clauseQTyConName, expQTyConName, fieldExpTyConName, predTyConName,
stmtQTyConName, decQTyConName, conQTyConName, bangTypeQTyConName,
varBangTypeQTyConName, typeQTyConName, expTyConName, decTyConName,
typeTyConName, tyVarBndrTyConName, matchTyConName, clauseTyConName,
patQTyConName, fieldPatQTyConName, fieldExpQTyConName, funDepTyConName,
predQTyConName, decsQTyConName, ruleBndrQTyConName, tySynEqnQTyConName,
roleTyConName, tExpTyConName, injAnnTyConName, kindTyConName,
overlapTyConName, derivClauseQTyConName, derivStrategyTyConName,
-- Quasiquoting
quoteDecName, quoteTypeName, quoteExpName, quotePatName]
thSyn, thLib, qqLib :: Module
thSyn = mkTHModule (fsLit "Language.Haskell.TH.Syntax")
thLib = mkTHModule (fsLit "Language.Haskell.TH.Lib")
qqLib = mkTHModule (fsLit "Language.Haskell.TH.Quote")
mkTHModule :: FastString -> Module
mkTHModule m = mkModule thUnitId (mkModuleNameFS m)
libFun, libTc, thFun, thTc, thCls, thCon, qqFun :: FastString -> Unique -> Name
libFun = mk_known_key_name OccName.varName thLib
libTc = mk_known_key_name OccName.tcName thLib
thFun = mk_known_key_name OccName.varName thSyn
thTc = mk_known_key_name OccName.tcName thSyn
thCls = mk_known_key_name OccName.clsName thSyn
thCon = mk_known_key_name OccName.dataName thSyn
qqFun = mk_known_key_name OccName.varName qqLib
-------------------- TH.Syntax -----------------------
liftClassName :: Name
liftClassName = thCls (fsLit "Lift") liftClassKey
qTyConName, nameTyConName, fieldExpTyConName, patTyConName,
fieldPatTyConName, expTyConName, decTyConName, typeTyConName,
tyVarBndrTyConName, matchTyConName, clauseTyConName, funDepTyConName,
predTyConName, tExpTyConName, injAnnTyConName, kindTyConName,
overlapTyConName, derivStrategyTyConName :: Name
qTyConName = thTc (fsLit "Q") qTyConKey
nameTyConName = thTc (fsLit "Name") nameTyConKey
fieldExpTyConName = thTc (fsLit "FieldExp") fieldExpTyConKey
patTyConName = thTc (fsLit "Pat") patTyConKey
fieldPatTyConName = thTc (fsLit "FieldPat") fieldPatTyConKey
expTyConName = thTc (fsLit "Exp") expTyConKey
decTyConName = thTc (fsLit "Dec") decTyConKey
typeTyConName = thTc (fsLit "Type") typeTyConKey
tyVarBndrTyConName = thTc (fsLit "TyVarBndr") tyVarBndrTyConKey
matchTyConName = thTc (fsLit "Match") matchTyConKey
clauseTyConName = thTc (fsLit "Clause") clauseTyConKey
funDepTyConName = thTc (fsLit "FunDep") funDepTyConKey
predTyConName = thTc (fsLit "Pred") predTyConKey
tExpTyConName = thTc (fsLit "TExp") tExpTyConKey
injAnnTyConName = thTc (fsLit "InjectivityAnn") injAnnTyConKey
kindTyConName = thTc (fsLit "Kind") kindTyConKey
overlapTyConName = thTc (fsLit "Overlap") overlapTyConKey
derivStrategyTyConName = thTc (fsLit "DerivStrategy") derivStrategyTyConKey
returnQName, bindQName, sequenceQName, newNameName, liftName,
mkNameName, mkNameG_vName, mkNameG_dName, mkNameG_tcName,
mkNameLName, mkNameSName, liftStringName, unTypeName, unTypeQName,
unsafeTExpCoerceName :: Name
returnQName = thFun (fsLit "returnQ") returnQIdKey
bindQName = thFun (fsLit "bindQ") bindQIdKey
sequenceQName = thFun (fsLit "sequenceQ") sequenceQIdKey
newNameName = thFun (fsLit "newName") newNameIdKey
liftName = thFun (fsLit "lift") liftIdKey
liftStringName = thFun (fsLit "liftString") liftStringIdKey
mkNameName = thFun (fsLit "mkName") mkNameIdKey
mkNameG_vName = thFun (fsLit "mkNameG_v") mkNameG_vIdKey
mkNameG_dName = thFun (fsLit "mkNameG_d") mkNameG_dIdKey
mkNameG_tcName = thFun (fsLit "mkNameG_tc") mkNameG_tcIdKey
mkNameLName = thFun (fsLit "mkNameL") mkNameLIdKey
mkNameSName = thFun (fsLit "mkNameS") mkNameSIdKey
unTypeName = thFun (fsLit "unType") unTypeIdKey
unTypeQName = thFun (fsLit "unTypeQ") unTypeQIdKey
unsafeTExpCoerceName = thFun (fsLit "unsafeTExpCoerce") unsafeTExpCoerceIdKey
-------------------- TH.Lib -----------------------
-- data Lit = ...
charLName, stringLName, integerLName, intPrimLName, wordPrimLName,
floatPrimLName, doublePrimLName, rationalLName, stringPrimLName,
charPrimLName :: Name
charLName = libFun (fsLit "charL") charLIdKey
stringLName = libFun (fsLit "stringL") stringLIdKey
integerLName = libFun (fsLit "integerL") integerLIdKey
intPrimLName = libFun (fsLit "intPrimL") intPrimLIdKey
wordPrimLName = libFun (fsLit "wordPrimL") wordPrimLIdKey
floatPrimLName = libFun (fsLit "floatPrimL") floatPrimLIdKey
doublePrimLName = libFun (fsLit "doublePrimL") doublePrimLIdKey
rationalLName = libFun (fsLit "rationalL") rationalLIdKey
stringPrimLName = libFun (fsLit "stringPrimL") stringPrimLIdKey
charPrimLName = libFun (fsLit "charPrimL") charPrimLIdKey
-- data Pat = ...
litPName, varPName, tupPName, unboxedTupPName, unboxedSumPName, conPName,
infixPName, tildePName, bangPName, asPName, wildPName, recPName, listPName,
sigPName, viewPName :: Name
litPName = libFun (fsLit "litP") litPIdKey
varPName = libFun (fsLit "varP") varPIdKey
tupPName = libFun (fsLit "tupP") tupPIdKey
unboxedTupPName = libFun (fsLit "unboxedTupP") unboxedTupPIdKey
unboxedSumPName = libFun (fsLit "unboxedSumP") unboxedSumPIdKey
conPName = libFun (fsLit "conP") conPIdKey
infixPName = libFun (fsLit "infixP") infixPIdKey
tildePName = libFun (fsLit "tildeP") tildePIdKey
bangPName = libFun (fsLit "bangP") bangPIdKey
asPName = libFun (fsLit "asP") asPIdKey
wildPName = libFun (fsLit "wildP") wildPIdKey
recPName = libFun (fsLit "recP") recPIdKey
listPName = libFun (fsLit "listP") listPIdKey
sigPName = libFun (fsLit "sigP") sigPIdKey
viewPName = libFun (fsLit "viewP") viewPIdKey
-- type FieldPat = ...
fieldPatName :: Name
fieldPatName = libFun (fsLit "fieldPat") fieldPatIdKey
-- data Match = ...
matchName :: Name
matchName = libFun (fsLit "match") matchIdKey
-- data Clause = ...
clauseName :: Name
clauseName = libFun (fsLit "clause") clauseIdKey
-- data Exp = ...
varEName, conEName, litEName, appEName, appTypeEName, infixEName, infixAppName,
sectionLName, sectionRName, lamEName, lamCaseEName, tupEName,
unboxedTupEName, unboxedSumEName, condEName, multiIfEName, letEName,
caseEName, doEName, compEName, staticEName, unboundVarEName :: Name
varEName = libFun (fsLit "varE") varEIdKey
conEName = libFun (fsLit "conE") conEIdKey
litEName = libFun (fsLit "litE") litEIdKey
appEName = libFun (fsLit "appE") appEIdKey
appTypeEName = libFun (fsLit "appTypeE") appTypeEIdKey
infixEName = libFun (fsLit "infixE") infixEIdKey
infixAppName = libFun (fsLit "infixApp") infixAppIdKey
sectionLName = libFun (fsLit "sectionL") sectionLIdKey
sectionRName = libFun (fsLit "sectionR") sectionRIdKey
lamEName = libFun (fsLit "lamE") lamEIdKey
lamCaseEName = libFun (fsLit "lamCaseE") lamCaseEIdKey
tupEName = libFun (fsLit "tupE") tupEIdKey
unboxedTupEName = libFun (fsLit "unboxedTupE") unboxedTupEIdKey
unboxedSumEName = libFun (fsLit "unboxedSumE") unboxedSumEIdKey
condEName = libFun (fsLit "condE") condEIdKey
multiIfEName = libFun (fsLit "multiIfE") multiIfEIdKey
letEName = libFun (fsLit "letE") letEIdKey
caseEName = libFun (fsLit "caseE") caseEIdKey
doEName = libFun (fsLit "doE") doEIdKey
compEName = libFun (fsLit "compE") compEIdKey
-- ArithSeq skips a level
fromEName, fromThenEName, fromToEName, fromThenToEName :: Name
fromEName = libFun (fsLit "fromE") fromEIdKey
fromThenEName = libFun (fsLit "fromThenE") fromThenEIdKey
fromToEName = libFun (fsLit "fromToE") fromToEIdKey
fromThenToEName = libFun (fsLit "fromThenToE") fromThenToEIdKey
-- end ArithSeq
listEName, sigEName, recConEName, recUpdEName :: Name
listEName = libFun (fsLit "listE") listEIdKey
sigEName = libFun (fsLit "sigE") sigEIdKey
recConEName = libFun (fsLit "recConE") recConEIdKey
recUpdEName = libFun (fsLit "recUpdE") recUpdEIdKey
staticEName = libFun (fsLit "staticE") staticEIdKey
unboundVarEName = libFun (fsLit "unboundVarE") unboundVarEIdKey
-- type FieldExp = ...
fieldExpName :: Name
fieldExpName = libFun (fsLit "fieldExp") fieldExpIdKey
-- data Body = ...
guardedBName, normalBName :: Name
guardedBName = libFun (fsLit "guardedB") guardedBIdKey
normalBName = libFun (fsLit "normalB") normalBIdKey
-- data Guard = ...
normalGEName, patGEName :: Name
normalGEName = libFun (fsLit "normalGE") normalGEIdKey
patGEName = libFun (fsLit "patGE") patGEIdKey
-- data Stmt = ...
bindSName, letSName, noBindSName, parSName :: Name
bindSName = libFun (fsLit "bindS") bindSIdKey
letSName = libFun (fsLit "letS") letSIdKey
noBindSName = libFun (fsLit "noBindS") noBindSIdKey
parSName = libFun (fsLit "parS") parSIdKey
-- data Dec = ...
funDName, valDName, dataDName, newtypeDName, tySynDName, classDName,
instanceWithOverlapDName, sigDName, forImpDName, pragInlDName,
pragSpecDName, pragSpecInlDName, pragSpecInstDName, pragRuleDName,
pragAnnDName, standaloneDerivWithStrategyDName, defaultSigDName,
dataInstDName, newtypeInstDName, tySynInstDName, dataFamilyDName,
openTypeFamilyDName, closedTypeFamilyDName, infixLDName, infixRDName,
infixNDName, roleAnnotDName, patSynDName, patSynSigDName :: Name
funDName = libFun (fsLit "funD") funDIdKey
valDName = libFun (fsLit "valD") valDIdKey
dataDName = libFun (fsLit "dataD") dataDIdKey
newtypeDName = libFun (fsLit "newtypeD") newtypeDIdKey
tySynDName = libFun (fsLit "tySynD") tySynDIdKey
classDName = libFun (fsLit "classD") classDIdKey
instanceWithOverlapDName
= libFun (fsLit "instanceWithOverlapD") instanceWithOverlapDIdKey
standaloneDerivWithStrategyDName = libFun
(fsLit "standaloneDerivWithStrategyD") standaloneDerivWithStrategyDIdKey
sigDName = libFun (fsLit "sigD") sigDIdKey
defaultSigDName = libFun (fsLit "defaultSigD") defaultSigDIdKey
forImpDName = libFun (fsLit "forImpD") forImpDIdKey
pragInlDName = libFun (fsLit "pragInlD") pragInlDIdKey
pragSpecDName = libFun (fsLit "pragSpecD") pragSpecDIdKey
pragSpecInlDName = libFun (fsLit "pragSpecInlD") pragSpecInlDIdKey
pragSpecInstDName = libFun (fsLit "pragSpecInstD") pragSpecInstDIdKey
pragRuleDName = libFun (fsLit "pragRuleD") pragRuleDIdKey
pragAnnDName = libFun (fsLit "pragAnnD") pragAnnDIdKey
dataInstDName = libFun (fsLit "dataInstD") dataInstDIdKey
newtypeInstDName = libFun (fsLit "newtypeInstD") newtypeInstDIdKey
tySynInstDName = libFun (fsLit "tySynInstD") tySynInstDIdKey
openTypeFamilyDName = libFun (fsLit "openTypeFamilyD") openTypeFamilyDIdKey
closedTypeFamilyDName= libFun (fsLit "closedTypeFamilyD") closedTypeFamilyDIdKey
dataFamilyDName = libFun (fsLit "dataFamilyD") dataFamilyDIdKey
infixLDName = libFun (fsLit "infixLD") infixLDIdKey
infixRDName = libFun (fsLit "infixRD") infixRDIdKey
infixNDName = libFun (fsLit "infixND") infixNDIdKey
roleAnnotDName = libFun (fsLit "roleAnnotD") roleAnnotDIdKey
patSynDName = libFun (fsLit "patSynD") patSynDIdKey
patSynSigDName = libFun (fsLit "patSynSigD") patSynSigDIdKey
-- type Ctxt = ...
cxtName :: Name
cxtName = libFun (fsLit "cxt") cxtIdKey
-- data SourceUnpackedness = ...
noSourceUnpackednessName, sourceNoUnpackName, sourceUnpackName :: Name
noSourceUnpackednessName = libFun (fsLit "noSourceUnpackedness") noSourceUnpackednessKey
sourceNoUnpackName = libFun (fsLit "sourceNoUnpack") sourceNoUnpackKey
sourceUnpackName = libFun (fsLit "sourceUnpack") sourceUnpackKey
-- data SourceStrictness = ...
noSourceStrictnessName, sourceLazyName, sourceStrictName :: Name
noSourceStrictnessName = libFun (fsLit "noSourceStrictness") noSourceStrictnessKey
sourceLazyName = libFun (fsLit "sourceLazy") sourceLazyKey
sourceStrictName = libFun (fsLit "sourceStrict") sourceStrictKey
-- data Con = ...
normalCName, recCName, infixCName, forallCName, gadtCName, recGadtCName :: Name
normalCName = libFun (fsLit "normalC" ) normalCIdKey
recCName = libFun (fsLit "recC" ) recCIdKey
infixCName = libFun (fsLit "infixC" ) infixCIdKey
forallCName = libFun (fsLit "forallC" ) forallCIdKey
gadtCName = libFun (fsLit "gadtC" ) gadtCIdKey
recGadtCName = libFun (fsLit "recGadtC") recGadtCIdKey
-- data Bang = ...
bangName :: Name
bangName = libFun (fsLit "bang") bangIdKey
-- type BangType = ...
bangTypeName :: Name
bangTypeName = libFun (fsLit "bangType") bangTKey
-- type VarBangType = ...
varBangTypeName :: Name
varBangTypeName = libFun (fsLit "varBangType") varBangTKey
-- data PatSynDir = ...
unidirPatSynName, implBidirPatSynName, explBidirPatSynName :: Name
unidirPatSynName = libFun (fsLit "unidir") unidirPatSynIdKey
implBidirPatSynName = libFun (fsLit "implBidir") implBidirPatSynIdKey
explBidirPatSynName = libFun (fsLit "explBidir") explBidirPatSynIdKey
-- data PatSynArgs = ...
prefixPatSynName, infixPatSynName, recordPatSynName :: Name
prefixPatSynName = libFun (fsLit "prefixPatSyn") prefixPatSynIdKey
infixPatSynName = libFun (fsLit "infixPatSyn") infixPatSynIdKey
recordPatSynName = libFun (fsLit "recordPatSyn") recordPatSynIdKey
-- data Type = ...
forallTName, varTName, conTName, tupleTName, unboxedTupleTName,
unboxedSumTName, arrowTName, listTName, appTName, sigTName, equalityTName,
litTName, promotedTName, promotedTupleTName, promotedNilTName,
promotedConsTName, wildCardTName :: Name
forallTName = libFun (fsLit "forallT") forallTIdKey
varTName = libFun (fsLit "varT") varTIdKey
conTName = libFun (fsLit "conT") conTIdKey
tupleTName = libFun (fsLit "tupleT") tupleTIdKey
unboxedTupleTName = libFun (fsLit "unboxedTupleT") unboxedTupleTIdKey
unboxedSumTName = libFun (fsLit "unboxedSumT") unboxedSumTIdKey
arrowTName = libFun (fsLit "arrowT") arrowTIdKey
listTName = libFun (fsLit "listT") listTIdKey
appTName = libFun (fsLit "appT") appTIdKey
sigTName = libFun (fsLit "sigT") sigTIdKey
equalityTName = libFun (fsLit "equalityT") equalityTIdKey
litTName = libFun (fsLit "litT") litTIdKey
promotedTName = libFun (fsLit "promotedT") promotedTIdKey
promotedTupleTName = libFun (fsLit "promotedTupleT") promotedTupleTIdKey
promotedNilTName = libFun (fsLit "promotedNilT") promotedNilTIdKey
promotedConsTName = libFun (fsLit "promotedConsT") promotedConsTIdKey
wildCardTName = libFun (fsLit "wildCardT") wildCardTIdKey
-- data TyLit = ...
numTyLitName, strTyLitName :: Name
numTyLitName = libFun (fsLit "numTyLit") numTyLitIdKey
strTyLitName = libFun (fsLit "strTyLit") strTyLitIdKey
-- data TyVarBndr = ...
plainTVName, kindedTVName :: Name
plainTVName = libFun (fsLit "plainTV") plainTVIdKey
kindedTVName = libFun (fsLit "kindedTV") kindedTVIdKey
-- data Role = ...
nominalRName, representationalRName, phantomRName, inferRName :: Name
nominalRName = libFun (fsLit "nominalR") nominalRIdKey
representationalRName = libFun (fsLit "representationalR") representationalRIdKey
phantomRName = libFun (fsLit "phantomR") phantomRIdKey
inferRName = libFun (fsLit "inferR") inferRIdKey
-- data Kind = ...
varKName, conKName, tupleKName, arrowKName, listKName, appKName,
starKName, constraintKName :: Name
varKName = libFun (fsLit "varK") varKIdKey
conKName = libFun (fsLit "conK") conKIdKey
tupleKName = libFun (fsLit "tupleK") tupleKIdKey
arrowKName = libFun (fsLit "arrowK") arrowKIdKey
listKName = libFun (fsLit "listK") listKIdKey
appKName = libFun (fsLit "appK") appKIdKey
starKName = libFun (fsLit "starK") starKIdKey
constraintKName = libFun (fsLit "constraintK") constraintKIdKey
-- data FamilyResultSig = ...
noSigName, kindSigName, tyVarSigName :: Name
noSigName = libFun (fsLit "noSig") noSigIdKey
kindSigName = libFun (fsLit "kindSig") kindSigIdKey
tyVarSigName = libFun (fsLit "tyVarSig") tyVarSigIdKey
-- data InjectivityAnn = ...
injectivityAnnName :: Name
injectivityAnnName = libFun (fsLit "injectivityAnn") injectivityAnnIdKey
-- data Callconv = ...
cCallName, stdCallName, cApiCallName, primCallName, javaScriptCallName :: Name
cCallName = libFun (fsLit "cCall") cCallIdKey
stdCallName = libFun (fsLit "stdCall") stdCallIdKey
cApiCallName = libFun (fsLit "cApi") cApiCallIdKey
primCallName = libFun (fsLit "prim") primCallIdKey
javaScriptCallName = libFun (fsLit "javaScript") javaScriptCallIdKey
-- data Safety = ...
unsafeName, safeName, interruptibleName :: Name
unsafeName = libFun (fsLit "unsafe") unsafeIdKey
safeName = libFun (fsLit "safe") safeIdKey
interruptibleName = libFun (fsLit "interruptible") interruptibleIdKey
-- newtype TExp a = ...
tExpDataConName :: Name
tExpDataConName = thCon (fsLit "TExp") tExpDataConKey
-- data RuleBndr = ...
ruleVarName, typedRuleVarName :: Name
ruleVarName = libFun (fsLit ("ruleVar")) ruleVarIdKey
typedRuleVarName = libFun (fsLit ("typedRuleVar")) typedRuleVarIdKey
-- data FunDep = ...
funDepName :: Name
funDepName = libFun (fsLit "funDep") funDepIdKey
-- data FamFlavour = ...
typeFamName, dataFamName :: Name
typeFamName = libFun (fsLit "typeFam") typeFamIdKey
dataFamName = libFun (fsLit "dataFam") dataFamIdKey
-- data TySynEqn = ...
tySynEqnName :: Name
tySynEqnName = libFun (fsLit "tySynEqn") tySynEqnIdKey
-- data AnnTarget = ...
valueAnnotationName, typeAnnotationName, moduleAnnotationName :: Name
valueAnnotationName = libFun (fsLit "valueAnnotation") valueAnnotationIdKey
typeAnnotationName = libFun (fsLit "typeAnnotation") typeAnnotationIdKey
moduleAnnotationName = libFun (fsLit "moduleAnnotation") moduleAnnotationIdKey
-- type DerivClause = ...
derivClauseName :: Name
derivClauseName = libFun (fsLit "derivClause") derivClauseIdKey
matchQTyConName, clauseQTyConName, expQTyConName, stmtQTyConName,
decQTyConName, conQTyConName, bangTypeQTyConName,
varBangTypeQTyConName, typeQTyConName, fieldExpQTyConName,
patQTyConName, fieldPatQTyConName, predQTyConName, decsQTyConName,
ruleBndrQTyConName, tySynEqnQTyConName, roleTyConName,
derivClauseQTyConName :: Name
matchQTyConName = libTc (fsLit "MatchQ") matchQTyConKey
clauseQTyConName = libTc (fsLit "ClauseQ") clauseQTyConKey
expQTyConName = libTc (fsLit "ExpQ") expQTyConKey
stmtQTyConName = libTc (fsLit "StmtQ") stmtQTyConKey
decQTyConName = libTc (fsLit "DecQ") decQTyConKey
decsQTyConName = libTc (fsLit "DecsQ") decsQTyConKey -- Q [Dec]
conQTyConName = libTc (fsLit "ConQ") conQTyConKey
bangTypeQTyConName = libTc (fsLit "BangTypeQ") bangTypeQTyConKey
varBangTypeQTyConName = libTc (fsLit "VarBangTypeQ") varBangTypeQTyConKey
typeQTyConName = libTc (fsLit "TypeQ") typeQTyConKey
fieldExpQTyConName = libTc (fsLit "FieldExpQ") fieldExpQTyConKey
patQTyConName = libTc (fsLit "PatQ") patQTyConKey
fieldPatQTyConName = libTc (fsLit "FieldPatQ") fieldPatQTyConKey
predQTyConName = libTc (fsLit "PredQ") predQTyConKey
ruleBndrQTyConName = libTc (fsLit "RuleBndrQ") ruleBndrQTyConKey
tySynEqnQTyConName = libTc (fsLit "TySynEqnQ") tySynEqnQTyConKey
roleTyConName = libTc (fsLit "Role") roleTyConKey
derivClauseQTyConName = libTc (fsLit "DerivClauseQ") derivClauseQTyConKey
-- quasiquoting
quoteExpName, quotePatName, quoteDecName, quoteTypeName :: Name
quoteExpName = qqFun (fsLit "quoteExp") quoteExpKey
quotePatName = qqFun (fsLit "quotePat") quotePatKey
quoteDecName = qqFun (fsLit "quoteDec") quoteDecKey
quoteTypeName = qqFun (fsLit "quoteType") quoteTypeKey
-- data Inline = ...
noInlineDataConName, inlineDataConName, inlinableDataConName :: Name
noInlineDataConName = thCon (fsLit "NoInline") noInlineDataConKey
inlineDataConName = thCon (fsLit "Inline") inlineDataConKey
inlinableDataConName = thCon (fsLit "Inlinable") inlinableDataConKey
-- data RuleMatch = ...
conLikeDataConName, funLikeDataConName :: Name
conLikeDataConName = thCon (fsLit "ConLike") conLikeDataConKey
funLikeDataConName = thCon (fsLit "FunLike") funLikeDataConKey
-- data Phases = ...
allPhasesDataConName, fromPhaseDataConName, beforePhaseDataConName :: Name
allPhasesDataConName = thCon (fsLit "AllPhases") allPhasesDataConKey
fromPhaseDataConName = thCon (fsLit "FromPhase") fromPhaseDataConKey
beforePhaseDataConName = thCon (fsLit "BeforePhase") beforePhaseDataConKey
-- data Overlap = ...
overlappableDataConName,
overlappingDataConName,
overlapsDataConName,
incoherentDataConName :: Name
overlappableDataConName = thCon (fsLit "Overlappable") overlappableDataConKey
overlappingDataConName = thCon (fsLit "Overlapping") overlappingDataConKey
overlapsDataConName = thCon (fsLit "Overlaps") overlapsDataConKey
incoherentDataConName = thCon (fsLit "Incoherent") incoherentDataConKey
-- data DerivStrategy = ...
stockStrategyDataConName, anyclassStrategyDataConName,
newtypeStrategyDataConName :: Name
stockStrategyDataConName = thCon (fsLit "StockStrategy") stockDataConKey
anyclassStrategyDataConName = thCon (fsLit "AnyclassStrategy") anyclassDataConKey
newtypeStrategyDataConName = thCon (fsLit "NewtypeStrategy") newtypeDataConKey
{- *********************************************************************
* *
Class keys
* *
********************************************************************* -}
-- ClassUniques available: 200-299
-- Check in PrelNames if you want to change this
liftClassKey :: Unique
liftClassKey = mkPreludeClassUnique 200
{- *********************************************************************
* *
TyCon keys
* *
********************************************************************* -}
-- TyConUniques available: 200-299
-- Check in PrelNames if you want to change this
expTyConKey, matchTyConKey, clauseTyConKey, qTyConKey, expQTyConKey,
decQTyConKey, patTyConKey, matchQTyConKey, clauseQTyConKey,
stmtQTyConKey, conQTyConKey, typeQTyConKey, typeTyConKey, tyVarBndrTyConKey,
decTyConKey, bangTypeQTyConKey, varBangTypeQTyConKey,
fieldExpTyConKey, fieldPatTyConKey, nameTyConKey, patQTyConKey,
fieldPatQTyConKey, fieldExpQTyConKey, funDepTyConKey, predTyConKey,
predQTyConKey, decsQTyConKey, ruleBndrQTyConKey, tySynEqnQTyConKey,
roleTyConKey, tExpTyConKey, injAnnTyConKey, kindTyConKey,
overlapTyConKey, derivClauseQTyConKey, derivStrategyTyConKey :: Unique
expTyConKey = mkPreludeTyConUnique 200
matchTyConKey = mkPreludeTyConUnique 201
clauseTyConKey = mkPreludeTyConUnique 202
qTyConKey = mkPreludeTyConUnique 203
expQTyConKey = mkPreludeTyConUnique 204
decQTyConKey = mkPreludeTyConUnique 205
patTyConKey = mkPreludeTyConUnique 206
matchQTyConKey = mkPreludeTyConUnique 207
clauseQTyConKey = mkPreludeTyConUnique 208
stmtQTyConKey = mkPreludeTyConUnique 209
conQTyConKey = mkPreludeTyConUnique 210
typeQTyConKey = mkPreludeTyConUnique 211
typeTyConKey = mkPreludeTyConUnique 212
decTyConKey = mkPreludeTyConUnique 213
bangTypeQTyConKey = mkPreludeTyConUnique 214
varBangTypeQTyConKey = mkPreludeTyConUnique 215
fieldExpTyConKey = mkPreludeTyConUnique 216
fieldPatTyConKey = mkPreludeTyConUnique 217
nameTyConKey = mkPreludeTyConUnique 218
patQTyConKey = mkPreludeTyConUnique 219
fieldPatQTyConKey = mkPreludeTyConUnique 220
fieldExpQTyConKey = mkPreludeTyConUnique 221
funDepTyConKey = mkPreludeTyConUnique 222
predTyConKey = mkPreludeTyConUnique 223
predQTyConKey = mkPreludeTyConUnique 224
tyVarBndrTyConKey = mkPreludeTyConUnique 225
decsQTyConKey = mkPreludeTyConUnique 226
ruleBndrQTyConKey = mkPreludeTyConUnique 227
tySynEqnQTyConKey = mkPreludeTyConUnique 228
roleTyConKey = mkPreludeTyConUnique 229
tExpTyConKey = mkPreludeTyConUnique 230
injAnnTyConKey = mkPreludeTyConUnique 231
kindTyConKey = mkPreludeTyConUnique 232
overlapTyConKey = mkPreludeTyConUnique 233
derivClauseQTyConKey = mkPreludeTyConUnique 234
derivStrategyTyConKey = mkPreludeTyConUnique 235
{- *********************************************************************
* *
DataCon keys
* *
********************************************************************* -}
-- DataConUniques available: 100-150
-- If you want to change this, make sure you check in PrelNames
-- data Inline = ...
noInlineDataConKey, inlineDataConKey, inlinableDataConKey :: Unique
noInlineDataConKey = mkPreludeDataConUnique 100
inlineDataConKey = mkPreludeDataConUnique 101
inlinableDataConKey = mkPreludeDataConUnique 102
-- data RuleMatch = ...
conLikeDataConKey, funLikeDataConKey :: Unique
conLikeDataConKey = mkPreludeDataConUnique 103
funLikeDataConKey = mkPreludeDataConUnique 104
-- data Phases = ...
allPhasesDataConKey, fromPhaseDataConKey, beforePhaseDataConKey :: Unique
allPhasesDataConKey = mkPreludeDataConUnique 105
fromPhaseDataConKey = mkPreludeDataConUnique 106
beforePhaseDataConKey = mkPreludeDataConUnique 107
-- newtype TExp a = ...
tExpDataConKey :: Unique
tExpDataConKey = mkPreludeDataConUnique 108
-- data Overlap = ..
overlappableDataConKey,
overlappingDataConKey,
overlapsDataConKey,
incoherentDataConKey :: Unique
overlappableDataConKey = mkPreludeDataConUnique 109
overlappingDataConKey = mkPreludeDataConUnique 110
overlapsDataConKey = mkPreludeDataConUnique 111
incoherentDataConKey = mkPreludeDataConUnique 112
-- data DerivStrategy = ...
stockDataConKey, anyclassDataConKey, newtypeDataConKey :: Unique
stockDataConKey = mkPreludeDataConUnique 113
anyclassDataConKey = mkPreludeDataConUnique 114
newtypeDataConKey = mkPreludeDataConUnique 115
{- *********************************************************************
* *
Id keys
* *
********************************************************************* -}
-- IdUniques available: 200-499
-- If you want to change this, make sure you check in PrelNames
returnQIdKey, bindQIdKey, sequenceQIdKey, liftIdKey, newNameIdKey,
mkNameIdKey, mkNameG_vIdKey, mkNameG_dIdKey, mkNameG_tcIdKey,
mkNameLIdKey, mkNameSIdKey, unTypeIdKey, unTypeQIdKey,
unsafeTExpCoerceIdKey :: Unique
returnQIdKey = mkPreludeMiscIdUnique 200
bindQIdKey = mkPreludeMiscIdUnique 201
sequenceQIdKey = mkPreludeMiscIdUnique 202
liftIdKey = mkPreludeMiscIdUnique 203
newNameIdKey = mkPreludeMiscIdUnique 204
mkNameIdKey = mkPreludeMiscIdUnique 205
mkNameG_vIdKey = mkPreludeMiscIdUnique 206
mkNameG_dIdKey = mkPreludeMiscIdUnique 207
mkNameG_tcIdKey = mkPreludeMiscIdUnique 208
mkNameLIdKey = mkPreludeMiscIdUnique 209
mkNameSIdKey = mkPreludeMiscIdUnique 210
unTypeIdKey = mkPreludeMiscIdUnique 211
unTypeQIdKey = mkPreludeMiscIdUnique 212
unsafeTExpCoerceIdKey = mkPreludeMiscIdUnique 213
-- data Lit = ...
charLIdKey, stringLIdKey, integerLIdKey, intPrimLIdKey, wordPrimLIdKey,
floatPrimLIdKey, doublePrimLIdKey, rationalLIdKey, stringPrimLIdKey,
charPrimLIdKey:: Unique
charLIdKey = mkPreludeMiscIdUnique 220
stringLIdKey = mkPreludeMiscIdUnique 221
integerLIdKey = mkPreludeMiscIdUnique 222
intPrimLIdKey = mkPreludeMiscIdUnique 223
wordPrimLIdKey = mkPreludeMiscIdUnique 224
floatPrimLIdKey = mkPreludeMiscIdUnique 225
doublePrimLIdKey = mkPreludeMiscIdUnique 226
rationalLIdKey = mkPreludeMiscIdUnique 227
stringPrimLIdKey = mkPreludeMiscIdUnique 228
charPrimLIdKey = mkPreludeMiscIdUnique 229
liftStringIdKey :: Unique
liftStringIdKey = mkPreludeMiscIdUnique 230
-- data Pat = ...
litPIdKey, varPIdKey, tupPIdKey, unboxedTupPIdKey, unboxedSumPIdKey, conPIdKey,
infixPIdKey, tildePIdKey, bangPIdKey, asPIdKey, wildPIdKey, recPIdKey,
listPIdKey, sigPIdKey, viewPIdKey :: Unique
litPIdKey = mkPreludeMiscIdUnique 240
varPIdKey = mkPreludeMiscIdUnique 241
tupPIdKey = mkPreludeMiscIdUnique 242
unboxedTupPIdKey = mkPreludeMiscIdUnique 243
unboxedSumPIdKey = mkPreludeMiscIdUnique 244
conPIdKey = mkPreludeMiscIdUnique 245
infixPIdKey = mkPreludeMiscIdUnique 246
tildePIdKey = mkPreludeMiscIdUnique 247
bangPIdKey = mkPreludeMiscIdUnique 248
asPIdKey = mkPreludeMiscIdUnique 249
wildPIdKey = mkPreludeMiscIdUnique 250
recPIdKey = mkPreludeMiscIdUnique 251
listPIdKey = mkPreludeMiscIdUnique 252
sigPIdKey = mkPreludeMiscIdUnique 253
viewPIdKey = mkPreludeMiscIdUnique 254
-- type FieldPat = ...
fieldPatIdKey :: Unique
fieldPatIdKey = mkPreludeMiscIdUnique 260
-- data Match = ...
matchIdKey :: Unique
matchIdKey = mkPreludeMiscIdUnique 261
-- data Clause = ...
clauseIdKey :: Unique
clauseIdKey = mkPreludeMiscIdUnique 262
-- data Exp = ...
varEIdKey, conEIdKey, litEIdKey, appEIdKey, appTypeEIdKey, infixEIdKey,
infixAppIdKey, sectionLIdKey, sectionRIdKey, lamEIdKey, lamCaseEIdKey,
tupEIdKey, unboxedTupEIdKey, unboxedSumEIdKey, condEIdKey, multiIfEIdKey,
letEIdKey, caseEIdKey, doEIdKey, compEIdKey,
fromEIdKey, fromThenEIdKey, fromToEIdKey, fromThenToEIdKey,
listEIdKey, sigEIdKey, recConEIdKey, recUpdEIdKey, staticEIdKey,
unboundVarEIdKey :: Unique
varEIdKey = mkPreludeMiscIdUnique 270
conEIdKey = mkPreludeMiscIdUnique 271
litEIdKey = mkPreludeMiscIdUnique 272
appEIdKey = mkPreludeMiscIdUnique 273
appTypeEIdKey = mkPreludeMiscIdUnique 274
infixEIdKey = mkPreludeMiscIdUnique 275
infixAppIdKey = mkPreludeMiscIdUnique 276
sectionLIdKey = mkPreludeMiscIdUnique 277
sectionRIdKey = mkPreludeMiscIdUnique 278
lamEIdKey = mkPreludeMiscIdUnique 279
lamCaseEIdKey = mkPreludeMiscIdUnique 280
tupEIdKey = mkPreludeMiscIdUnique 281
unboxedTupEIdKey = mkPreludeMiscIdUnique 282
unboxedSumEIdKey = mkPreludeMiscIdUnique 283
condEIdKey = mkPreludeMiscIdUnique 284
multiIfEIdKey = mkPreludeMiscIdUnique 285
letEIdKey = mkPreludeMiscIdUnique 286
caseEIdKey = mkPreludeMiscIdUnique 287
doEIdKey = mkPreludeMiscIdUnique 288
compEIdKey = mkPreludeMiscIdUnique 289
fromEIdKey = mkPreludeMiscIdUnique 290
fromThenEIdKey = mkPreludeMiscIdUnique 291
fromToEIdKey = mkPreludeMiscIdUnique 292
fromThenToEIdKey = mkPreludeMiscIdUnique 293
listEIdKey = mkPreludeMiscIdUnique 294
sigEIdKey = mkPreludeMiscIdUnique 295
recConEIdKey = mkPreludeMiscIdUnique 296
recUpdEIdKey = mkPreludeMiscIdUnique 297
staticEIdKey = mkPreludeMiscIdUnique 298
unboundVarEIdKey = mkPreludeMiscIdUnique 299
-- type FieldExp = ...
fieldExpIdKey :: Unique
fieldExpIdKey = mkPreludeMiscIdUnique 305
-- data Body = ...
guardedBIdKey, normalBIdKey :: Unique
guardedBIdKey = mkPreludeMiscIdUnique 306
normalBIdKey = mkPreludeMiscIdUnique 307
-- data Guard = ...
normalGEIdKey, patGEIdKey :: Unique
normalGEIdKey = mkPreludeMiscIdUnique 308
patGEIdKey = mkPreludeMiscIdUnique 309
-- data Stmt = ...
bindSIdKey, letSIdKey, noBindSIdKey, parSIdKey :: Unique
bindSIdKey = mkPreludeMiscIdUnique 310
letSIdKey = mkPreludeMiscIdUnique 311
noBindSIdKey = mkPreludeMiscIdUnique 312
parSIdKey = mkPreludeMiscIdUnique 313
-- data Dec = ...
funDIdKey, valDIdKey, dataDIdKey, newtypeDIdKey, tySynDIdKey, classDIdKey,
instanceWithOverlapDIdKey, instanceDIdKey, sigDIdKey, forImpDIdKey,
pragInlDIdKey, pragSpecDIdKey, pragSpecInlDIdKey, pragSpecInstDIdKey,
pragRuleDIdKey, pragAnnDIdKey, defaultSigDIdKey, dataFamilyDIdKey,
openTypeFamilyDIdKey, closedTypeFamilyDIdKey, dataInstDIdKey,
newtypeInstDIdKey, tySynInstDIdKey, standaloneDerivWithStrategyDIdKey,
infixLDIdKey, infixRDIdKey, infixNDIdKey, roleAnnotDIdKey, patSynDIdKey,
patSynSigDIdKey :: Unique
funDIdKey = mkPreludeMiscIdUnique 320
valDIdKey = mkPreludeMiscIdUnique 321
dataDIdKey = mkPreludeMiscIdUnique 322
newtypeDIdKey = mkPreludeMiscIdUnique 323
tySynDIdKey = mkPreludeMiscIdUnique 324
classDIdKey = mkPreludeMiscIdUnique 325
instanceWithOverlapDIdKey = mkPreludeMiscIdUnique 326
instanceDIdKey = mkPreludeMiscIdUnique 327
sigDIdKey = mkPreludeMiscIdUnique 328
forImpDIdKey = mkPreludeMiscIdUnique 329
pragInlDIdKey = mkPreludeMiscIdUnique 330
pragSpecDIdKey = mkPreludeMiscIdUnique 331
pragSpecInlDIdKey = mkPreludeMiscIdUnique 332
pragSpecInstDIdKey = mkPreludeMiscIdUnique 333
pragRuleDIdKey = mkPreludeMiscIdUnique 334
pragAnnDIdKey = mkPreludeMiscIdUnique 335
dataFamilyDIdKey = mkPreludeMiscIdUnique 336
openTypeFamilyDIdKey = mkPreludeMiscIdUnique 337
dataInstDIdKey = mkPreludeMiscIdUnique 338
newtypeInstDIdKey = mkPreludeMiscIdUnique 339
tySynInstDIdKey = mkPreludeMiscIdUnique 340
closedTypeFamilyDIdKey = mkPreludeMiscIdUnique 341
infixLDIdKey = mkPreludeMiscIdUnique 342
infixRDIdKey = mkPreludeMiscIdUnique 343
infixNDIdKey = mkPreludeMiscIdUnique 344
roleAnnotDIdKey = mkPreludeMiscIdUnique 345
standaloneDerivWithStrategyDIdKey = mkPreludeMiscIdUnique 346
defaultSigDIdKey = mkPreludeMiscIdUnique 347
patSynDIdKey = mkPreludeMiscIdUnique 348
patSynSigDIdKey = mkPreludeMiscIdUnique 349
-- type Cxt = ...
cxtIdKey :: Unique
cxtIdKey = mkPreludeMiscIdUnique 350
-- data SourceUnpackedness = ...
noSourceUnpackednessKey, sourceNoUnpackKey, sourceUnpackKey :: Unique
noSourceUnpackednessKey = mkPreludeMiscIdUnique 351
sourceNoUnpackKey = mkPreludeMiscIdUnique 352
sourceUnpackKey = mkPreludeMiscIdUnique 353
-- data SourceStrictness = ...
noSourceStrictnessKey, sourceLazyKey, sourceStrictKey :: Unique
noSourceStrictnessKey = mkPreludeMiscIdUnique 354
sourceLazyKey = mkPreludeMiscIdUnique 355
sourceStrictKey = mkPreludeMiscIdUnique 356
-- data Con = ...
normalCIdKey, recCIdKey, infixCIdKey, forallCIdKey, gadtCIdKey,
recGadtCIdKey :: Unique
normalCIdKey = mkPreludeMiscIdUnique 357
recCIdKey = mkPreludeMiscIdUnique 358
infixCIdKey = mkPreludeMiscIdUnique 359
forallCIdKey = mkPreludeMiscIdUnique 360
gadtCIdKey = mkPreludeMiscIdUnique 361
recGadtCIdKey = mkPreludeMiscIdUnique 362
-- data Bang = ...
bangIdKey :: Unique
bangIdKey = mkPreludeMiscIdUnique 363
-- type BangType = ...
bangTKey :: Unique
bangTKey = mkPreludeMiscIdUnique 364
-- type VarBangType = ...
varBangTKey :: Unique
varBangTKey = mkPreludeMiscIdUnique 365
-- data PatSynDir = ...
unidirPatSynIdKey, implBidirPatSynIdKey, explBidirPatSynIdKey :: Unique
unidirPatSynIdKey = mkPreludeMiscIdUnique 366
implBidirPatSynIdKey = mkPreludeMiscIdUnique 367
explBidirPatSynIdKey = mkPreludeMiscIdUnique 368
-- data PatSynArgs = ...
prefixPatSynIdKey, infixPatSynIdKey, recordPatSynIdKey :: Unique
prefixPatSynIdKey = mkPreludeMiscIdUnique 369
infixPatSynIdKey = mkPreludeMiscIdUnique 370
recordPatSynIdKey = mkPreludeMiscIdUnique 371
-- data Type = ...
forallTIdKey, varTIdKey, conTIdKey, tupleTIdKey, unboxedTupleTIdKey,
unboxedSumTIdKey, arrowTIdKey, listTIdKey, appTIdKey, sigTIdKey,
equalityTIdKey, litTIdKey, promotedTIdKey, promotedTupleTIdKey,
promotedNilTIdKey, promotedConsTIdKey, wildCardTIdKey :: Unique
forallTIdKey = mkPreludeMiscIdUnique 380
varTIdKey = mkPreludeMiscIdUnique 381
conTIdKey = mkPreludeMiscIdUnique 382
tupleTIdKey = mkPreludeMiscIdUnique 383
unboxedTupleTIdKey = mkPreludeMiscIdUnique 384
unboxedSumTIdKey = mkPreludeMiscIdUnique 385
arrowTIdKey = mkPreludeMiscIdUnique 386
listTIdKey = mkPreludeMiscIdUnique 387
appTIdKey = mkPreludeMiscIdUnique 388
sigTIdKey = mkPreludeMiscIdUnique 389
equalityTIdKey = mkPreludeMiscIdUnique 390
litTIdKey = mkPreludeMiscIdUnique 391
promotedTIdKey = mkPreludeMiscIdUnique 392
promotedTupleTIdKey = mkPreludeMiscIdUnique 393
promotedNilTIdKey = mkPreludeMiscIdUnique 394
promotedConsTIdKey = mkPreludeMiscIdUnique 395
wildCardTIdKey = mkPreludeMiscIdUnique 396
-- data TyLit = ...
numTyLitIdKey, strTyLitIdKey :: Unique
numTyLitIdKey = mkPreludeMiscIdUnique 400
strTyLitIdKey = mkPreludeMiscIdUnique 401
-- data TyVarBndr = ...
plainTVIdKey, kindedTVIdKey :: Unique
plainTVIdKey = mkPreludeMiscIdUnique 402
kindedTVIdKey = mkPreludeMiscIdUnique 403
-- data Role = ...
nominalRIdKey, representationalRIdKey, phantomRIdKey, inferRIdKey :: Unique
nominalRIdKey = mkPreludeMiscIdUnique 404
representationalRIdKey = mkPreludeMiscIdUnique 405
phantomRIdKey = mkPreludeMiscIdUnique 406
inferRIdKey = mkPreludeMiscIdUnique 407
-- data Kind = ...
varKIdKey, conKIdKey, tupleKIdKey, arrowKIdKey, listKIdKey, appKIdKey,
starKIdKey, constraintKIdKey :: Unique
varKIdKey = mkPreludeMiscIdUnique 408
conKIdKey = mkPreludeMiscIdUnique 409
tupleKIdKey = mkPreludeMiscIdUnique 410
arrowKIdKey = mkPreludeMiscIdUnique 411
listKIdKey = mkPreludeMiscIdUnique 412
appKIdKey = mkPreludeMiscIdUnique 413
starKIdKey = mkPreludeMiscIdUnique 414
constraintKIdKey = mkPreludeMiscIdUnique 415
-- data FamilyResultSig = ...
noSigIdKey, kindSigIdKey, tyVarSigIdKey :: Unique
noSigIdKey = mkPreludeMiscIdUnique 416
kindSigIdKey = mkPreludeMiscIdUnique 417
tyVarSigIdKey = mkPreludeMiscIdUnique 418
-- data InjectivityAnn = ...
injectivityAnnIdKey :: Unique
injectivityAnnIdKey = mkPreludeMiscIdUnique 419
-- data Callconv = ...
cCallIdKey, stdCallIdKey, cApiCallIdKey, primCallIdKey,
javaScriptCallIdKey :: Unique
cCallIdKey = mkPreludeMiscIdUnique 420
stdCallIdKey = mkPreludeMiscIdUnique 421
cApiCallIdKey = mkPreludeMiscIdUnique 422
primCallIdKey = mkPreludeMiscIdUnique 423
javaScriptCallIdKey = mkPreludeMiscIdUnique 424
-- data Safety = ...
unsafeIdKey, safeIdKey, interruptibleIdKey :: Unique
unsafeIdKey = mkPreludeMiscIdUnique 430
safeIdKey = mkPreludeMiscIdUnique 431
interruptibleIdKey = mkPreludeMiscIdUnique 432
-- data FunDep = ...
funDepIdKey :: Unique
funDepIdKey = mkPreludeMiscIdUnique 440
-- data FamFlavour = ...
typeFamIdKey, dataFamIdKey :: Unique
typeFamIdKey = mkPreludeMiscIdUnique 450
dataFamIdKey = mkPreludeMiscIdUnique 451
-- data TySynEqn = ...
tySynEqnIdKey :: Unique
tySynEqnIdKey = mkPreludeMiscIdUnique 460
-- quasiquoting
quoteExpKey, quotePatKey, quoteDecKey, quoteTypeKey :: Unique
quoteExpKey = mkPreludeMiscIdUnique 470
quotePatKey = mkPreludeMiscIdUnique 471
quoteDecKey = mkPreludeMiscIdUnique 472
quoteTypeKey = mkPreludeMiscIdUnique 473
-- data RuleBndr = ...
ruleVarIdKey, typedRuleVarIdKey :: Unique
ruleVarIdKey = mkPreludeMiscIdUnique 480
typedRuleVarIdKey = mkPreludeMiscIdUnique 481
-- data AnnTarget = ...
valueAnnotationIdKey, typeAnnotationIdKey, moduleAnnotationIdKey :: Unique
valueAnnotationIdKey = mkPreludeMiscIdUnique 490
typeAnnotationIdKey = mkPreludeMiscIdUnique 491
moduleAnnotationIdKey = mkPreludeMiscIdUnique 492
-- type DerivPred = ...
derivClauseIdKey :: Unique
derivClauseIdKey = mkPreludeMiscIdUnique 493
{-
************************************************************************
* *
RdrNames
* *
************************************************************************
-}
lift_RDR, mkNameG_dRDR, mkNameG_vRDR :: RdrName
lift_RDR = nameRdrName liftName
mkNameG_dRDR = nameRdrName mkNameG_dName
mkNameG_vRDR = nameRdrName mkNameG_vName
-- data Exp = ...
conE_RDR, litE_RDR, appE_RDR, infixApp_RDR :: RdrName
conE_RDR = nameRdrName conEName
litE_RDR = nameRdrName litEName
appE_RDR = nameRdrName appEName
infixApp_RDR = nameRdrName infixAppName
-- data Lit = ...
stringL_RDR, intPrimL_RDR, wordPrimL_RDR, floatPrimL_RDR,
doublePrimL_RDR, stringPrimL_RDR, charPrimL_RDR :: RdrName
stringL_RDR = nameRdrName stringLName
intPrimL_RDR = nameRdrName intPrimLName
wordPrimL_RDR = nameRdrName wordPrimLName
floatPrimL_RDR = nameRdrName floatPrimLName
doublePrimL_RDR = nameRdrName doublePrimLName
stringPrimL_RDR = nameRdrName stringPrimLName
charPrimL_RDR = nameRdrName charPrimLName
| olsner/ghc | compiler/prelude/THNames.hs | bsd-3-clause | 47,787 | 0 | 8 | 9,952 | 8,474 | 4,918 | 3,556 | 784 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.InstallPlan
-- Copyright : (c) Duncan Coutts 2008
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Package installation plan
--
-----------------------------------------------------------------------------
module Distribution.Client.InstallPlan (
InstallPlan,
GenericInstallPlan,
PlanPackage,
GenericPlanPackage(..),
-- * Operations on 'InstallPlan's
new,
toList,
ready,
processing,
completed,
failed,
remove,
showPlanIndex,
showInstallPlan,
-- * Checking validity of plans
valid,
closed,
consistent,
acyclic,
-- ** Details on invalid plans
PlanProblem(..),
showPlanProblem,
problems,
-- ** Querying the install plan
dependencyClosure,
) where
import Distribution.InstalledPackageInfo
( InstalledPackageInfo )
import Distribution.Package
( PackageIdentifier(..), PackageName(..), Package(..)
, HasComponentId(..), ComponentId(..) )
import Distribution.Client.Types
( BuildSuccess, BuildFailure
, PackageFixedDeps(..), ConfiguredPackage
, GenericReadyPackage(..), fakeComponentId )
import Distribution.Version
( Version )
import Distribution.Client.ComponentDeps (ComponentDeps)
import qualified Distribution.Client.ComponentDeps as CD
import Distribution.Simple.PackageIndex
( PackageIndex )
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Client.PlanIndex
( FakeMap )
import qualified Distribution.Client.PlanIndex as PlanIndex
import Distribution.Text
( display )
import Data.List
( intercalate )
import Data.Maybe
( fromMaybe, maybeToList )
import qualified Data.Graph as Graph
import Data.Graph (Graph)
import Control.Exception
( assert )
import Data.Maybe (catMaybes)
import qualified Data.Map as Map
import qualified Data.Traversable as T
-- When cabal tries to install a number of packages, including all their
-- dependencies it has a non-trivial problem to solve.
--
-- The Problem:
--
-- In general we start with a set of installed packages and a set of source
-- packages.
--
-- Installed packages have fixed dependencies. They have already been built and
-- we know exactly what packages they were built against, including their exact
-- versions.
--
-- Source package have somewhat flexible dependencies. They are specified as
-- version ranges, though really they're predicates. To make matters worse they
-- have conditional flexible dependencies. Configuration flags can affect which
-- packages are required and can place additional constraints on their
-- versions.
--
-- These two sets of package can and usually do overlap. There can be installed
-- packages that are also available as source packages which means they could
-- be re-installed if required, though there will also be packages which are
-- not available as source and cannot be re-installed. Very often there will be
-- extra versions available than are installed. Sometimes we may like to prefer
-- installed packages over source ones or perhaps always prefer the latest
-- available version whether installed or not.
--
-- The goal is to calculate an installation plan that is closed, acyclic and
-- consistent and where every configured package is valid.
--
-- An installation plan is a set of packages that are going to be used
-- together. It will consist of a mixture of installed packages and source
-- packages along with their exact version dependencies. An installation plan
-- is closed if for every package in the set, all of its dependencies are
-- also in the set. It is consistent if for every package in the set, all
-- dependencies which target that package have the same version.
-- Note that plans do not necessarily compose. You might have a valid plan for
-- package A and a valid plan for package B. That does not mean the composition
-- is simultaneously valid for A and B. In particular you're most likely to
-- have problems with inconsistent dependencies.
-- On the other hand it is true that every closed sub plan is valid.
-- | Packages in an install plan
--
-- NOTE: 'ConfiguredPackage', 'GenericReadyPackage' and 'GenericPlanPackage'
-- intentionally have no 'PackageInstalled' instance. `This is important:
-- PackageInstalled returns only library dependencies, but for package that
-- aren't yet installed we know many more kinds of dependencies (setup
-- dependencies, exe, test-suite, benchmark, ..). Any functions that operate on
-- dependencies in cabal-install should consider what to do with these
-- dependencies; if we give a 'PackageInstalled' instance it would be too easy
-- to get this wrong (and, for instance, call graph traversal functions from
-- Cabal rather than from cabal-install). Instead, see 'PackageFixedDeps'.
data GenericPlanPackage ipkg srcpkg iresult ifailure
= PreExisting ipkg
| Configured srcpkg
| Processing (GenericReadyPackage srcpkg ipkg)
| Installed (GenericReadyPackage srcpkg ipkg) (Maybe ipkg) iresult
| Failed srcpkg ifailure
type PlanPackage = GenericPlanPackage
InstalledPackageInfo ConfiguredPackage
BuildSuccess BuildFailure
instance (Package ipkg, Package srcpkg) =>
Package (GenericPlanPackage ipkg srcpkg iresult ifailure) where
packageId (PreExisting ipkg) = packageId ipkg
packageId (Configured spkg) = packageId spkg
packageId (Processing rpkg) = packageId rpkg
packageId (Installed rpkg _ _) = packageId rpkg
packageId (Failed spkg _) = packageId spkg
instance (PackageFixedDeps srcpkg,
PackageFixedDeps ipkg, HasComponentId ipkg) =>
PackageFixedDeps (GenericPlanPackage ipkg srcpkg iresult ifailure) where
depends (PreExisting pkg) = depends pkg
depends (Configured pkg) = depends pkg
depends (Processing pkg) = depends pkg
depends (Installed pkg _ _) = depends pkg
depends (Failed pkg _) = depends pkg
instance (HasComponentId ipkg, HasComponentId srcpkg) =>
HasComponentId
(GenericPlanPackage ipkg srcpkg iresult ifailure) where
installedComponentId (PreExisting ipkg ) = installedComponentId ipkg
installedComponentId (Configured spkg) = installedComponentId spkg
installedComponentId (Processing rpkg) = installedComponentId rpkg
-- NB: defer to the actual installed package info in this case
installedComponentId (Installed _ (Just ipkg) _) = installedComponentId ipkg
installedComponentId (Installed rpkg _ _) = installedComponentId rpkg
installedComponentId (Failed spkg _) = installedComponentId spkg
data GenericInstallPlan ipkg srcpkg iresult ifailure = GenericInstallPlan {
planIndex :: (PlanIndex ipkg srcpkg iresult ifailure),
planFakeMap :: FakeMap,
planGraph :: Graph,
planGraphRev :: Graph,
planPkgOf :: Graph.Vertex
-> GenericPlanPackage ipkg srcpkg iresult ifailure,
planVertexOf :: ComponentId -> Graph.Vertex,
planIndepGoals :: Bool
}
-- | 'GenericInstallPlan' specialised to most commonly used types.
type InstallPlan = GenericInstallPlan
InstalledPackageInfo ConfiguredPackage
BuildSuccess BuildFailure
type PlanIndex ipkg srcpkg iresult ifailure =
PackageIndex (GenericPlanPackage ipkg srcpkg iresult ifailure)
invariant :: (HasComponentId ipkg, PackageFixedDeps ipkg,
HasComponentId srcpkg, PackageFixedDeps srcpkg)
=> GenericInstallPlan ipkg srcpkg iresult ifailure -> Bool
invariant plan =
valid (planFakeMap plan)
(planIndepGoals plan)
(planIndex plan)
internalError :: String -> a
internalError msg = error $ "InstallPlan: internal error: " ++ msg
showPlanIndex :: (HasComponentId ipkg, HasComponentId srcpkg)
=> PlanIndex ipkg srcpkg iresult ifailure -> String
showPlanIndex index =
intercalate "\n" (map showPlanPackage (PackageIndex.allPackages index))
where showPlanPackage p =
showPlanPackageTag p ++ " "
++ display (packageId p) ++ " ("
++ display (installedComponentId p) ++ ")"
showInstallPlan :: (HasComponentId ipkg, HasComponentId srcpkg)
=> GenericInstallPlan ipkg srcpkg iresult ifailure -> String
showInstallPlan plan =
showPlanIndex (planIndex plan) ++ "\n" ++
"fake map:\n " ++
intercalate "\n " (map showKV (Map.toList (planFakeMap plan)))
where showKV (k,v) = display k ++ " -> " ++ display v
showPlanPackageTag :: GenericPlanPackage ipkg srcpkg iresult ifailure -> String
showPlanPackageTag (PreExisting _) = "PreExisting"
showPlanPackageTag (Configured _) = "Configured"
showPlanPackageTag (Processing _) = "Processing"
showPlanPackageTag (Installed _ _ _) = "Installed"
showPlanPackageTag (Failed _ _) = "Failed"
-- | Build an installation plan from a valid set of resolved packages.
--
new :: (HasComponentId ipkg, PackageFixedDeps ipkg,
HasComponentId srcpkg, PackageFixedDeps srcpkg)
=> Bool
-> PlanIndex ipkg srcpkg iresult ifailure
-> Either [PlanProblem ipkg srcpkg iresult ifailure]
(GenericInstallPlan ipkg srcpkg iresult ifailure)
new indepGoals index =
-- NB: Need to pre-initialize the fake-map with pre-existing
-- packages
let isPreExisting (PreExisting _) = True
isPreExisting _ = False
fakeMap = Map.fromList
. map (\p -> (fakeComponentId (packageId p)
,installedComponentId p))
. filter isPreExisting
$ PackageIndex.allPackages index in
case problems fakeMap indepGoals index of
[] -> Right GenericInstallPlan {
planIndex = index,
planFakeMap = fakeMap,
planGraph = graph,
planGraphRev = Graph.transposeG graph,
planPkgOf = vertexToPkgId,
planVertexOf = fromMaybe noSuchPkgId . pkgIdToVertex,
planIndepGoals = indepGoals
}
where (graph, vertexToPkgId, pkgIdToVertex) =
PlanIndex.dependencyGraph fakeMap index
noSuchPkgId = internalError "package is not in the graph"
probs -> Left probs
toList :: GenericInstallPlan ipkg srcpkg iresult ifailure
-> [GenericPlanPackage ipkg srcpkg iresult ifailure]
toList = PackageIndex.allPackages . planIndex
-- | Remove packages from the install plan. This will result in an
-- error if there are remaining packages that depend on any matching
-- package. This is primarily useful for obtaining an install plan for
-- the dependencies of a package or set of packages without actually
-- installing the package itself, as when doing development.
--
remove :: (HasComponentId ipkg, PackageFixedDeps ipkg,
HasComponentId srcpkg, PackageFixedDeps srcpkg)
=> (GenericPlanPackage ipkg srcpkg iresult ifailure -> Bool)
-> GenericInstallPlan ipkg srcpkg iresult ifailure
-> Either [PlanProblem ipkg srcpkg iresult ifailure]
(GenericInstallPlan ipkg srcpkg iresult ifailure)
remove shouldRemove plan =
new (planIndepGoals plan) newIndex
where
newIndex = PackageIndex.fromList $
filter (not . shouldRemove) (toList plan)
-- | The packages that are ready to be installed. That is they are in the
-- configured state and have all their dependencies installed already.
-- The plan is complete if the result is @[]@.
--
ready :: forall ipkg srcpkg iresult ifailure. PackageFixedDeps srcpkg
=> GenericInstallPlan ipkg srcpkg iresult ifailure
-> [GenericReadyPackage srcpkg ipkg]
ready plan = assert check readyPackages
where
check = if null readyPackages && null processingPackages
then null configuredPackages
else True
configuredPackages = [ pkg | Configured pkg <- toList plan ]
processingPackages = [ pkg | Processing pkg <- toList plan]
readyPackages :: [GenericReadyPackage srcpkg ipkg]
readyPackages =
[ ReadyPackage srcpkg deps
| srcpkg <- configuredPackages
-- select only the package that have all of their deps installed:
, deps <- maybeToList (hasAllInstalledDeps srcpkg)
]
hasAllInstalledDeps :: srcpkg -> Maybe (ComponentDeps [ipkg])
hasAllInstalledDeps = T.mapM (mapM isInstalledDep) . depends
isInstalledDep :: ComponentId -> Maybe ipkg
isInstalledDep pkgid =
-- NB: Need to check if the ID has been updated in planFakeMap, in which
-- case we might be dealing with an old pointer
case PlanIndex.fakeLookupComponentId
(planFakeMap plan) (planIndex plan) pkgid
of
Just (PreExisting ipkg) -> Just ipkg
Just (Configured _) -> Nothing
Just (Processing _) -> Nothing
Just (Installed _ (Just ipkg) _) -> Just ipkg
Just (Installed _ Nothing _) -> internalError depOnNonLib
Just (Failed _ _) -> internalError depOnFailed
Nothing -> internalError incomplete
incomplete = "install plan is not closed"
depOnFailed = "configured package depends on failed package"
depOnNonLib = "configured package depends on a non-library package"
-- | Marks packages in the graph as currently processing (e.g. building).
--
-- * The package must exist in the graph and be in the configured state.
--
processing :: (HasComponentId ipkg, PackageFixedDeps ipkg,
HasComponentId srcpkg, PackageFixedDeps srcpkg)
=> [GenericReadyPackage srcpkg ipkg]
-> GenericInstallPlan ipkg srcpkg iresult ifailure
-> GenericInstallPlan ipkg srcpkg iresult ifailure
processing pkgs plan = assert (invariant plan') plan'
where
plan' = plan {
planIndex = PackageIndex.merge (planIndex plan) processingPkgs
}
processingPkgs = PackageIndex.fromList [Processing pkg | pkg <- pkgs]
-- | Marks a package in the graph as completed. Also saves the build result for
-- the completed package in the plan.
--
-- * The package must exist in the graph and be in the processing state.
-- * The package must have had no uninstalled dependent packages.
--
completed :: (HasComponentId ipkg, PackageFixedDeps ipkg,
HasComponentId srcpkg, PackageFixedDeps srcpkg)
=> ComponentId
-> Maybe ipkg -> iresult
-> GenericInstallPlan ipkg srcpkg iresult ifailure
-> GenericInstallPlan ipkg srcpkg iresult ifailure
completed pkgid mipkg buildResult plan = assert (invariant plan') plan'
where
plan' = plan {
-- NB: installation can change the IPID, so better
-- record it in the fake mapping...
planFakeMap = insert_fake_mapping mipkg
$ planFakeMap plan,
planIndex = PackageIndex.insert installed
. PackageIndex.deleteComponentId pkgid
$ planIndex plan
}
-- ...but be sure to use the *old* IPID for the lookup for the
-- preexisting record
installed = Installed (lookupProcessingPackage plan pkgid) mipkg buildResult
insert_fake_mapping (Just ipkg) = Map.insert pkgid (installedComponentId ipkg)
insert_fake_mapping _ = id
-- | Marks a package in the graph as having failed. It also marks all the
-- packages that depended on it as having failed.
--
-- * The package must exist in the graph and be in the processing
-- state.
--
failed :: (HasComponentId ipkg, PackageFixedDeps ipkg,
HasComponentId srcpkg, PackageFixedDeps srcpkg)
=> ComponentId -- ^ The id of the package that failed to install
-> ifailure -- ^ The build result to use for the failed package
-> ifailure -- ^ The build result to use for its dependencies
-> GenericInstallPlan ipkg srcpkg iresult ifailure
-> GenericInstallPlan ipkg srcpkg iresult ifailure
failed pkgid buildResult buildResult' plan = assert (invariant plan') plan'
where
-- NB: failures don't update IPIDs
plan' = plan {
planIndex = PackageIndex.merge (planIndex plan) failures
}
ReadyPackage srcpkg _deps = lookupProcessingPackage plan pkgid
failures = PackageIndex.fromList
$ Failed srcpkg buildResult
: [ Failed pkg' buildResult'
| Just pkg' <- map checkConfiguredPackage
$ packagesThatDependOn plan pkgid ]
-- | Lookup the reachable packages in the reverse dependency graph.
--
packagesThatDependOn :: GenericInstallPlan ipkg srcpkg iresult ifailure
-> ComponentId
-> [GenericPlanPackage ipkg srcpkg iresult ifailure]
packagesThatDependOn plan pkgid = map (planPkgOf plan)
. tail
. Graph.reachable (planGraphRev plan)
. planVertexOf plan
$ Map.findWithDefault pkgid pkgid (planFakeMap plan)
-- | Lookup a package that we expect to be in the processing state.
--
lookupProcessingPackage :: GenericInstallPlan ipkg srcpkg iresult ifailure
-> ComponentId
-> GenericReadyPackage srcpkg ipkg
lookupProcessingPackage plan pkgid =
-- NB: processing packages are guaranteed to not indirect through
-- planFakeMap
case PackageIndex.lookupComponentId (planIndex plan) pkgid of
Just (Processing pkg) -> pkg
_ -> internalError $ "not in processing state or no such pkg " ++
display pkgid
-- | Check a package that we expect to be in the configured or failed state.
--
checkConfiguredPackage :: (Package srcpkg, Package ipkg)
=> GenericPlanPackage ipkg srcpkg iresult ifailure
-> Maybe srcpkg
checkConfiguredPackage (Configured pkg) = Just pkg
checkConfiguredPackage (Failed _ _) = Nothing
checkConfiguredPackage pkg =
internalError $ "not configured or no such pkg " ++ display (packageId pkg)
-- ------------------------------------------------------------
-- * Checking validity of plans
-- ------------------------------------------------------------
-- | A valid installation plan is a set of packages that is 'acyclic',
-- 'closed' and 'consistent'. Also, every 'ConfiguredPackage' in the
-- plan has to have a valid configuration (see 'configuredPackageValid').
--
-- * if the result is @False@ use 'problems' to get a detailed list.
--
valid :: (HasComponentId ipkg, PackageFixedDeps ipkg,
HasComponentId srcpkg, PackageFixedDeps srcpkg)
=> FakeMap -> Bool
-> PlanIndex ipkg srcpkg iresult ifailure
-> Bool
valid fakeMap indepGoals index =
null $ problems fakeMap indepGoals index
data PlanProblem ipkg srcpkg iresult ifailure =
PackageMissingDeps (GenericPlanPackage ipkg srcpkg iresult ifailure)
[PackageIdentifier]
| PackageCycle [GenericPlanPackage ipkg srcpkg iresult ifailure]
| PackageInconsistency PackageName [(PackageIdentifier, Version)]
| PackageStateInvalid (GenericPlanPackage ipkg srcpkg iresult ifailure)
(GenericPlanPackage ipkg srcpkg iresult ifailure)
showPlanProblem :: (Package ipkg, Package srcpkg)
=> PlanProblem ipkg srcpkg iresult ifailure -> String
showPlanProblem (PackageMissingDeps pkg missingDeps) =
"Package " ++ display (packageId pkg)
++ " depends on the following packages which are missing from the plan: "
++ intercalate ", " (map display missingDeps)
showPlanProblem (PackageCycle cycleGroup) =
"The following packages are involved in a dependency cycle "
++ intercalate ", " (map (display.packageId) cycleGroup)
showPlanProblem (PackageInconsistency name inconsistencies) =
"Package " ++ display name
++ " is required by several packages,"
++ " but they require inconsistent versions:\n"
++ unlines [ " package " ++ display pkg ++ " requires "
++ display (PackageIdentifier name ver)
| (pkg, ver) <- inconsistencies ]
showPlanProblem (PackageStateInvalid pkg pkg') =
"Package " ++ display (packageId pkg)
++ " is in the " ++ showPlanState pkg
++ " state but it depends on package " ++ display (packageId pkg')
++ " which is in the " ++ showPlanState pkg'
++ " state"
where
showPlanState (PreExisting _) = "pre-existing"
showPlanState (Configured _) = "configured"
showPlanState (Processing _) = "processing"
showPlanState (Installed _ _ _) = "installed"
showPlanState (Failed _ _) = "failed"
-- | For an invalid plan, produce a detailed list of problems as human readable
-- error messages. This is mainly intended for debugging purposes.
-- Use 'showPlanProblem' for a human readable explanation.
--
problems :: (HasComponentId ipkg, PackageFixedDeps ipkg,
HasComponentId srcpkg, PackageFixedDeps srcpkg)
=> FakeMap -> Bool
-> PlanIndex ipkg srcpkg iresult ifailure
-> [PlanProblem ipkg srcpkg iresult ifailure]
problems fakeMap indepGoals index =
[ PackageMissingDeps pkg
(catMaybes
(map
(fmap packageId . PlanIndex.fakeLookupComponentId fakeMap index)
missingDeps))
| (pkg, missingDeps) <- PlanIndex.brokenPackages fakeMap index ]
++ [ PackageCycle cycleGroup
| cycleGroup <- PlanIndex.dependencyCycles fakeMap index ]
++ [ PackageInconsistency name inconsistencies
| (name, inconsistencies) <-
PlanIndex.dependencyInconsistencies fakeMap indepGoals index ]
++ [ PackageStateInvalid pkg pkg'
| pkg <- PackageIndex.allPackages index
, Just pkg' <- map (PlanIndex.fakeLookupComponentId fakeMap index)
(CD.nonSetupDeps (depends pkg))
, not (stateDependencyRelation pkg pkg') ]
-- | The graph of packages (nodes) and dependencies (edges) must be acyclic.
--
-- * if the result is @False@ use 'PackageIndex.dependencyCycles' to find out
-- which packages are involved in dependency cycles.
--
acyclic :: (HasComponentId ipkg, PackageFixedDeps ipkg,
HasComponentId srcpkg, PackageFixedDeps srcpkg)
=> FakeMap -> PlanIndex ipkg srcpkg iresult ifailure -> Bool
acyclic fakeMap = null . PlanIndex.dependencyCycles fakeMap
-- | An installation plan is closed if for every package in the set, all of
-- its dependencies are also in the set. That is, the set is closed under the
-- dependency relation.
--
-- * if the result is @False@ use 'PackageIndex.brokenPackages' to find out
-- which packages depend on packages not in the index.
--
closed :: (HasComponentId ipkg, PackageFixedDeps ipkg,
PackageFixedDeps srcpkg)
=> FakeMap -> PlanIndex ipkg srcpkg iresult ifailure -> Bool
closed fakeMap = null . PlanIndex.brokenPackages fakeMap
-- | An installation plan is consistent if all dependencies that target a
-- single package name, target the same version.
--
-- This is slightly subtle. It is not the same as requiring that there be at
-- most one version of any package in the set. It only requires that of
-- packages which have more than one other package depending on them. We could
-- actually make the condition even more precise and say that different
-- versions are OK so long as they are not both in the transitive closure of
-- any other package (or equivalently that their inverse closures do not
-- intersect). The point is we do not want to have any packages depending
-- directly or indirectly on two different versions of the same package. The
-- current definition is just a safe approximation of that.
--
-- * if the result is @False@ use 'PackageIndex.dependencyInconsistencies' to
-- find out which packages are.
--
consistent :: (HasComponentId ipkg, PackageFixedDeps ipkg,
HasComponentId srcpkg, PackageFixedDeps srcpkg)
=> FakeMap -> PlanIndex ipkg srcpkg iresult ifailure -> Bool
consistent fakeMap = null . PlanIndex.dependencyInconsistencies fakeMap False
-- | The states of packages have that depend on each other must respect
-- this relation. That is for very case where package @a@ depends on
-- package @b@ we require that @dependencyStatesOk a b = True@.
--
stateDependencyRelation :: GenericPlanPackage ipkg srcpkg iresult ifailure
-> GenericPlanPackage ipkg srcpkg iresult ifailure
-> Bool
stateDependencyRelation (PreExisting _) (PreExisting _) = True
stateDependencyRelation (Configured _) (PreExisting _) = True
stateDependencyRelation (Configured _) (Configured _) = True
stateDependencyRelation (Configured _) (Processing _) = True
stateDependencyRelation (Configured _) (Installed _ _ _) = True
stateDependencyRelation (Processing _) (PreExisting _) = True
stateDependencyRelation (Processing _) (Installed _ _ _) = True
stateDependencyRelation (Installed _ _ _) (PreExisting _) = True
stateDependencyRelation (Installed _ _ _) (Installed _ _ _) = True
stateDependencyRelation (Failed _ _) (PreExisting _) = True
-- failed can depends on configured because a package can depend on
-- several other packages and if one of the deps fail then we fail
-- but we still depend on the other ones that did not fail:
stateDependencyRelation (Failed _ _) (Configured _) = True
stateDependencyRelation (Failed _ _) (Processing _) = True
stateDependencyRelation (Failed _ _) (Installed _ _ _) = True
stateDependencyRelation (Failed _ _) (Failed _ _) = True
stateDependencyRelation _ _ = False
-- | Compute the dependency closure of a _source_ package in a install plan
--
-- See `Distribution.Client.PlanIndex.dependencyClosure`
dependencyClosure :: (HasComponentId ipkg, PackageFixedDeps ipkg,
HasComponentId srcpkg, PackageFixedDeps srcpkg)
=> GenericInstallPlan ipkg srcpkg iresult ifailure
-> [PackageIdentifier]
-> Either [(GenericPlanPackage ipkg srcpkg iresult ifailure,
[ComponentId])]
(PackageIndex
(GenericPlanPackage ipkg srcpkg iresult ifailure))
dependencyClosure installPlan pids =
PlanIndex.dependencyClosure
(planFakeMap installPlan)
(planIndex installPlan)
(map (resolveFakeId . fakeComponentId) pids)
where
resolveFakeId :: ComponentId -> ComponentId
resolveFakeId ipid = Map.findWithDefault ipid ipid (planFakeMap installPlan)
| thoughtpolice/cabal | cabal-install/Distribution/Client/InstallPlan.hs | bsd-3-clause | 26,957 | 0 | 18 | 6,377 | 4,778 | 2,536 | 2,242 | 374 | 8 |
--
-- Data vault for metrics
--
-- Copyright © 2013-2014 Anchor Systems, Pty Ltd and Others
--
-- The code in this file, and the program it is a part of, is
-- made available to you by its authors as open source software:
-- you can redistribute it and/or modify it under the terms of
-- the 3-clause BSD licence.
--
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Main where
import Control.Concurrent
import Control.Concurrent.Async
import Control.Monad
import Data.Binary.IEEE754 (doubleToWord, wordToDouble)
import qualified Data.ByteString.Char8 as S
import Data.Word
import Options.Applicative
import System.Log.Logger
import Text.Printf
import Marquise.Client
import Package
import Vaultaire.Program
import Vaultaire.Types
data Options = Options {
debug :: Bool
}
-- | Command line option parsing
helpfulParser :: ParserInfo Options
helpfulParser = info (helper <*> optionsParser) fullDesc
optionsParser :: Parser Options
optionsParser =
Options <$> parseDebug
where
parseDebug = switch $
long "debug"
<> short 'd'
<> help "Set log level to DEBUG"
parseOrigin :: Parser Origin
parseOrigin = argument (fmap mkOrigin str) (metavar "ORIGIN")
where
mkOrigin = Origin . S.pack
main :: IO ()
main = do
Options{..} <- execParser helpfulParser
let level = if debug
then Debug
else Normal
quit <- initializeProgram (package ++ "-" ++ version) level
logM "Main.main" DEBUG "Starting generator"
spool <- createSpoolFiles "demowave"
let a = hashIdentifier "This is a test of the emergency broadcast system"
loop quit spool a
logM "Main.main" DEBUG "End"
loop :: MVar () -> SpoolFiles -> Address -> IO ()
loop shutdown spool address = do
i <- getCurrentTimeNanoseconds
let v = demoWaveAt i
let msg = printf "%s\t%d\t% 9.6f" (show address) (unTimeStamp i) (wordToDouble v)
logM "Main.loop" DEBUG msg
queueSimple spool address i v
a1 <- async (do
readMVar shutdown
return True)
a2 <- async (do
threadDelay (5 * 1000000) -- every 5 s
return False)
(_,done) <- waitAny [a1,a2]
unless done $ loop shutdown spool address
demoWaveAt :: TimeStamp -> Word64
demoWaveAt (TimeStamp x) =
let
period = 3600 * 3
f = 1/period -- instances per second
w = 2 * pi * f :: Double
t = ((/ 1e9) . fromRational . toRational) x
y = sin (w * t)
in
doubleToWord y
| afcowie/vaultaire | src/DemoWave.hs | bsd-3-clause | 2,540 | 0 | 14 | 643 | 666 | 342 | 324 | 66 | 2 |
{-# LANGUAGE OverloadedStrings #-}
-- Requirements:
-- - Empty ~/.haskeline (or set to defaults)
-- - On Mac OS X, the "dumb term" test may fail.
-- In particular, the line "* UTF-8" which makes locale_charset()
-- always return UTF-8; otherwise we can't test latin-1.
-- - NB: Window size isn't provided by screen so it's picked up from
-- terminfo or defaults (either way: 80x24), rather than the user's
-- terminal.
module Main where
import Control.Monad (when)
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC
import Data.Word
import qualified Data.Text as T
import qualified Data.Text.Encoding as E
import Data.Monoid ((<>))
import System.Exit (exitFailure)
import System.Process (readProcess)
import Test.HUnit
import RunTTY
legacyEncoding :: Bool
legacyEncoding = False
-- Generally we want the legacy and new backends to perform the same.
-- The only two differences I'm aware of are:
-- 1. base decodes invalid bytes as '\65533', but legacy decodes them as '?'
-- 2. if there's an incomplete sequence and no more input immediately
-- available (but not eof), then base will pause to wait for more input,
-- whereas legacy will immediately stop.
whenLegacy :: BC.ByteString -> BC.ByteString
whenLegacy s = if legacyEncoding then s else B.empty
main :: IO ()
main = do
-- forkProcess needs an absolute path to the binary.
p <- head . lines <$> readProcess "which" ["haskeline-examples-Test"] ""
let i = setTerm "xterm"
Invocation {
prog = p,
progArgs = [],
runInTTY = True,
environment = []
}
result <- runTestTT $ test [interactionTests i, fileStyleTests i]
when (errors result > 0 || failures result > 0) exitFailure
interactionTests :: Invocation -> Test
interactionTests i = "interaction" ~: test
[ unicodeEncoding i
, unicodeMovement i
, tabCompletion i
, incorrectInput i
, historyTests i
, inputChar $ setCharInput i
, dumbTests $ setTerm "dumb" i
]
unicodeEncoding :: Invocation -> Test
unicodeEncoding i = "Unicode encoding (valid)" ~:
[ utf8Test i [utf8 "xαβγy"]
[prompt 0, utf8 "xαβγy"]
, utf8Test i [utf8 "a\n", "quit\n"]
[ prompt 0
, utf8 "a" <> end
<> output 0 (utf8 "a") <> prompt 1
, utf8 "quit" <> end
]
, utf8Test i [utf8 "xαβyψ안기q영\n", "quit\n"]
[ prompt 0
, utf8 "xαβyψ안기q영" <> end
<> output 0 (utf8 "xαβyψ안기q영") <> prompt 1
, utf8 "quit" <> end
]
-- test buffering: 32 bytes is in middle of a char encoding,
-- also test long paste
, "multipleLines" ~: utf8Test i [l1 <> "\n" <> l1]
[ prompt 0
, l1 <> end <> output 0 l1 <> prompt 1 <> l1]
]
where
l1 = utf8 $ T.replicate 30 "안" -- three bytes, width 60
unicodeMovement :: Invocation -> Test
unicodeMovement i = "Unicode movement" ~:
[ "separate" ~: utf8Test i [utf8 "α", utf8 "\ESC[Dx"]
[prompt 0, utf8 "α", utf8 "\bxα\b"]
, "coalesced" ~: utf8Test i [utf8 "α\ESC[Dx"]
[prompt 0, utf8 "xα\b"]
, "lineWrap" ~: utf8Test i
[ utf8 longWideChar
, raw [1]
, raw [5]
]
[prompt 0, utf8 lwc1 <> wrap <> utf8 lwc2 <> wrap <> utf8 lwc3
, cr <> "\ESC[2A\ESC[2C"
, cr <> nl <> nl <> "\ESC[22C"
]
]
where
longWideChar = T.concat $ replicate 30 $ "안기영"
(lwc1,lwcs1) = T.splitAt ((80-2)`div`2) longWideChar
(lwc2,lwcs2) = T.splitAt (80`div`2) lwcs1
(lwc3,_lwcs3) = T.splitAt (80`div`2) lwcs2
-- lwc3 has length 90 - (80-2)/2 - 80/2 = 11,
-- so the last line as wide width 2*11=22.
tabCompletion :: Invocation -> Test
tabCompletion i = "tab completion" ~:
[ utf8Test i [ utf8 "tests/dummy-μ\t\t" ]
[ prompt 0, utf8 "tests/dummy-μασ/"
<> nl <> utf8 "ςερτ bar" <> nl
<> prompt' 0 <> utf8 "tests/dummy-μασ/"
]
]
incorrectInput :: Invocation -> Test
incorrectInput i = "incorrect input" ~:
[ utf8Test i [ utf8 "x" <> raw [206] ] -- needs one more byte
-- non-legacy encoder ignores the "206" since it's still waiting
-- for more input.
[ prompt 0, utf8 "x" <> whenLegacy err ]
, utf8Test i [ raw [206] <> utf8 "x" ]
-- 'x' is not valid after '\206', so both the legacy and
-- non-legacy encoders should handle the "x" correctly.
[ prompt 0, err <> utf8 "x"]
, utf8Test i [ raw [236,149] <> utf8 "x" ] -- needs one more byte
[prompt 0, err <> err <> utf8 "x"]
]
historyTests :: Invocation -> Test
historyTests i = "history encoding" ~:
[ utf8TestValidHist i [ "\ESC[A" ]
[prompt 0, utf8 "abcα" ]
, utf8TestInvalidHist i [ "\ESC[A" ]
-- NB: this is decoded by either utf8-string or base;
-- either way they produce \65533 instead of '?'.
[prompt 0, utf8 "abcα\65533x\65533x\65533" ]
-- In latin-1: read errors as utf-8 '\65533', display as '?'
, latin1TestInvalidHist i [ "\ESC[A" ]
[prompt 0, utf8 "abc??x?x?" ]
]
invalidHist :: BC.ByteString
invalidHist = utf8 "abcα"
`B.append` raw [149] -- invalid start of UTF-8 sequence
`B.append` utf8 "x"
`B.append` raw [206] -- incomplete start
`B.append` utf8 "x"
-- incomplete at end of file
`B.append` raw [206]
validHist :: BC.ByteString
validHist = utf8 "abcα"
inputChar :: Invocation -> Test
inputChar i = "getInputChar" ~:
[ utf8Test i [utf8 "xαβ"]
[ prompt 0, utf8 "x" <> end <> output 0 (utf8 "x")
<> prompt 1 <> utf8 "α" <> end <> output 1 (utf8 "α")
<> prompt 2 <> utf8 "β" <> end <> output 2 (utf8 "β")
<> prompt 3
]
, "bad encoding (separate)" ~:
utf8Test i [utf8 "α", raw [149], utf8 "x", raw [206]]
[ prompt 0, utf8 "α" <> end <> output 0 (utf8 "α") <> prompt 1
, err <> end <> output 1 err <> prompt 2
, utf8 "x" <> end <> output 2 (utf8 "x") <> prompt 3
, whenLegacy (err <> end <> output 3 err <> prompt 4)
]
, "bad encoding (together)" ~:
utf8Test i [utf8 "α" <> raw [149] <> utf8 "x" <> raw [206]]
[ prompt 0, utf8 "α" <> end <> output 0 (utf8 "α")
<> prompt 1 <> err <> end <> output 1 err
<> prompt 2 <> utf8 "x" <> end <> output 2 (utf8 "x")
<> prompt 3 <> whenLegacy (err <> end <> output 3 err <> prompt 4)
]
, utf8Test i [raw [206]] -- incomplete
[ prompt 0, whenLegacy (utf8 "?" <> end <> output 0 (utf8 "?"))
<> whenLegacy (prompt 1)
]
]
setCharInput :: Invocation -> Invocation
setCharInput i = i { progArgs = ["chars"] }
fileStyleTests :: Invocation -> Test
fileStyleTests i = "file style" ~:
[ "line input" ~: utf8Test iFile
[utf8 "xαβyψ안기q영\nquit\n"]
[ prompt' 0, output 0 (utf8 "xαβyψ안기q영") <> prompt' 1]
, "char input" ~: utf8Test iFileChar
[utf8 "xαβt"]
[ prompt' 0
, output 0 (utf8 "x")
<> prompt' 1 <> output 1 (utf8 "α")
<> prompt' 2 <> output 2 (utf8 "β")
<> prompt' 3 <> output 3 (utf8 "t")
<> prompt' 4]
, "invalid line input" ~: utf8Test iFile
-- NOTE: the 206 is an incomplete byte sequence,
-- but we MUST not pause since we're at EOF, not just
-- end of term.
--
-- Also recall GHC bug #5436 which caused a crash
-- if the last byte started an incomplete sequence.
[ utf8 "a" <> raw [149] <> utf8 "x" <> raw [206] ]
[ prompt' 0
, B.empty
-- It only prompts after the EOF.
, output 0 (utf8 "a" <> err <> utf8 "x" <> err) <> prompt' 1
]
, "invalid char input (following a newline)" ~: utf8Test iFileChar
[ utf8 "a\n" <> raw [149] <> utf8 "x\n" <> raw [206] ]
$ [ prompt' 0
, output 0 (utf8 "a")
<> prompt' 1 <> output 1 err
<> prompt' 2 <> output 2 (utf8 "x")
<> prompt' 3
<> whenLegacy (output 3 err <> prompt' 4)
] ++ if legacyEncoding then [] else [ output 3 err <> prompt' 4 ]
, "invalid char file input (no preceding newline)" ~: utf8Test iFileChar
[ utf8 "a" <> raw [149] <> utf8 "x" <> raw [206] ]
-- make sure it tries to read a newline
-- and instead gets the incomplete 206.
-- This should *not* cause it to crash or block.
$ [ prompt' 0
, output 0 (utf8 "a")
<> prompt' 1 <> output 1 err
<> prompt' 2 <> output 2 (utf8 "x")
<> prompt' 3
<> whenLegacy (output 3 err <> prompt' 4)
] ++ if legacyEncoding then [] else [ output 3 err <> prompt' 4 ]
]
-- also single char and buffer break and other stuff
where
iFile = i { runInTTY = False }
iFileChar = setCharInput iFile
-- Test that the dumb terminal backend does encoding correctly.
-- If all the above tests work for the terminfo backend,
-- then we just need to make sure the dumb term plugs into everything
-- correctly, i.e., encodes the input/output and doesn't double-encode.
dumbTests :: Invocation -> Test
dumbTests i = "dumb term" ~:
[ "line input" ~: utf8Test i
[ utf8 "xαβγy" ]
[ prompt' 0, utf8 "xαβγy" ]
, "line input wide movement" ~: utf8Test i
[ utf8 wideChar, raw [1], raw [5] ]
[ prompt' 0, utf8 wideChar
, utf8 (T.replicate 60 "\b")
, utf8 wideChar
]
, "line char input" ~: utf8Test (setCharInput i)
[utf8 "xαβ"]
[ prompt' 0, utf8 "x" <> nl <> output 0 (utf8 "x")
<> prompt' 1 <> utf8 "α" <> nl <> output 1 (utf8 "α")
<> prompt' 2 <> utf8 "β" <> nl <> output 2 (utf8 "β")
<> prompt' 3
]
]
where
wideChar = T.concat $ replicate 10 $ "안기영"
-------------
-- Building blocks for expected input/output
smkx,rmkx :: B.ByteString
smkx = utf8 "\ESC[?1h\ESC="
rmkx = utf8 "\ESC[?1l\ESC>"
prompt, prompt' :: Int -> B.ByteString
prompt k = smkx <> prompt' k
prompt' k = utf8 (T.pack (show k ++ ":"))
end :: B.ByteString
end = nl <> rmkx
cr :: B.ByteString
cr = raw [13]
nl :: B.ByteString
nl = raw [13,10] -- NB: see fixNL: this is really [13,13,10]
output :: Int -> B.ByteString -> B.ByteString
output k s = utf8 (T.pack $ "line " ++ show k ++ ":")
<> s <> raw [10]
wrap :: B.ByteString
wrap = utf8 " \b"
utf8 :: T.Text -> B.ByteString
utf8 = E.encodeUtf8
raw :: [Word8] -> B.ByteString
raw = B.pack
err :: B.ByteString
err = if legacyEncoding
then utf8 "?"
else utf8 "\65533"
----------------------
utf8Test ::
Invocation -> [BC.ByteString] -> [BC.ByteString] -> Test
utf8Test = testI . setUTF8
utf8TestInvalidHist ::
Invocation -> [BC.ByteString] -> [BC.ByteString] -> Test
utf8TestInvalidHist i input output' = test $ do
B.writeFile "myhist" $ invalidHist
assertInvocation (setUTF8 i) input output'
utf8TestValidHist ::
Invocation -> [BC.ByteString] -> [BC.ByteString] -> Test
utf8TestValidHist i input output' = test $ do
B.writeFile "myhist" validHist
assertInvocation (setUTF8 i) input output'
latin1TestInvalidHist ::
Invocation -> [BC.ByteString] -> [BC.ByteString] -> Test
latin1TestInvalidHist i input output' = test $ do
B.writeFile "myhist" $ invalidHist
assertInvocation (setLatin1 i) input output'
| judah/haskeline | tests/Unit.hs | bsd-3-clause | 11,627 | 0 | 23 | 3,373 | 3,397 | 1,747 | 1,650 | 225 | 3 |
{-# LANGUAGE Haskell98, MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances #-}
{-# LINE 1 "Control/Monad/RWS/Lazy.hs" #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.RWS.Lazy
-- Copyright : (c) Andy Gill 2001,
-- (c) Oregon Graduate Institute of Science and Technology, 2001
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (multi-param classes, functional dependencies)
--
-- Lazy RWS monad.
--
-- Inspired by the paper
-- /Functional Programming with Overloading and Higher-Order Polymorphism/,
-- Mark P Jones (<http://web.cecs.pdx.edu/~mpj/>)
-- Advanced School of Functional Programming, 1995.
-----------------------------------------------------------------------------
module Control.Monad.RWS.Lazy (
-- * The RWS monad
RWS,
rws,
runRWS,
evalRWS,
execRWS,
mapRWS,
withRWS,
-- * The RWST monad transformer
RWST(RWST),
runRWST,
evalRWST,
execRWST,
mapRWST,
withRWST,
-- * Lazy Reader-writer-state monads
module Control.Monad.RWS.Class,
module Control.Monad,
module Control.Monad.Fix,
module Control.Monad.Trans,
module Data.Monoid,
) where
import Control.Monad.RWS.Class
import Control.Monad.Trans
import Control.Monad.Trans.RWS.Lazy (
RWS, rws, runRWS, evalRWS, execRWS, mapRWS, withRWS,
RWST(RWST), runRWST, evalRWST, execRWST, mapRWST, withRWST)
import Control.Monad
import Control.Monad.Fix
import Data.Monoid
| phischu/fragnix | tests/packages/scotty/Control.Monad.RWS.Lazy.hs | bsd-3-clause | 1,654 | 0 | 6 | 322 | 193 | 139 | 54 | 31 | 0 |
{-# LANGUAGE OverloadedStrings, CPP, ScopedTypeVariables #-}
{-
Copyright (C) 2012-2014 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.PDF
Copyright : Copyright (C) 2012-2014 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <[email protected]>
Stability : alpha
Portability : portable
Conversion of LaTeX documents to PDF.
-}
module Text.Pandoc.PDF ( makePDF ) where
import Data.ByteString.Lazy (ByteString)
import qualified Data.ByteString.Lazy as B
import qualified Data.ByteString.Lazy.Char8 as BC
import qualified Data.ByteString as BS
import System.Exit (ExitCode (..))
import System.FilePath
import System.Directory
import Data.Digest.Pure.SHA (showDigest, sha1)
import System.Environment
import Control.Monad (unless, (<=<))
import qualified Control.Exception as E
import Control.Applicative ((<$))
import Data.List (isInfixOf)
import Data.Maybe (fromMaybe)
import qualified Text.Pandoc.UTF8 as UTF8
import Text.Pandoc.Definition
import Text.Pandoc.Walk (walkM)
import Text.Pandoc.Shared (fetchItem', warn, withTempDir)
import Text.Pandoc.Options (WriterOptions(..))
import Text.Pandoc.MIME (extensionFromMimeType, getMimeType)
import Text.Pandoc.Process (pipeProcess)
import qualified Data.ByteString.Lazy as BL
import qualified Codec.Picture as JP
#ifdef _WINDOWS
import Data.List (intercalate)
#endif
#ifdef _WINDOWS
changePathSeparators :: FilePath -> FilePath
changePathSeparators = intercalate "/" . splitDirectories
#endif
makePDF :: String -- ^ pdf creator (pdflatex, lualatex, xelatex)
-> (WriterOptions -> Pandoc -> String) -- ^ writer
-> WriterOptions -- ^ options
-> Pandoc -- ^ document
-> IO (Either ByteString ByteString)
makePDF program writer opts doc = withTempDir "tex2pdf." $ \tmpdir -> do
doc' <- handleImages opts tmpdir doc
let source = writer opts doc'
tex2pdf' tmpdir program source
handleImages :: WriterOptions
-> FilePath -- ^ temp dir to store images
-> Pandoc -- ^ document
-> IO Pandoc
handleImages opts tmpdir = walkM (convertImages tmpdir) <=< walkM (handleImage' opts tmpdir)
handleImage' :: WriterOptions
-> FilePath
-> Inline
-> IO Inline
handleImage' opts tmpdir (Image ils (src,tit)) = do
exists <- doesFileExist src
if exists
then return $ Image ils (src,tit)
else do
res <- fetchItem' (writerMediaBag opts) (writerSourceURL opts) src
case res of
Right (contents, Just mime) -> do
let ext = fromMaybe (takeExtension src) $
extensionFromMimeType mime
let basename = showDigest $ sha1 $ BL.fromChunks [contents]
let fname = tmpdir </> basename <.> ext
BS.writeFile fname contents
return $ Image ils (fname,tit)
_ -> do
warn $ "Could not find image `" ++ src ++ "', skipping..."
return $ Image ils (src,tit)
handleImage' _ _ x = return x
convertImages :: FilePath -> Inline -> IO Inline
convertImages tmpdir (Image ils (src, tit)) = do
img <- convertImage tmpdir src
newPath <-
case img of
Left e -> src <$
warn ("Unable to convert image `" ++ src ++ "':\n" ++ e)
Right fp -> return fp
return (Image ils (newPath, tit))
convertImages _ x = return x
-- Convert formats which do not work well in pdf to png
convertImage :: FilePath -> FilePath -> IO (Either String FilePath)
convertImage tmpdir fname =
case mime of
Just "image/png" -> doNothing
Just "image/jpeg" -> doNothing
Just "application/pdf" -> doNothing
_ -> JP.readImage fname >>= \res ->
case res of
Left msg -> return $ Left msg
Right img ->
E.catch (Right fileOut <$ JP.savePngImage fileOut img) $
\(e :: E.SomeException) -> return (Left (show e))
where
fileOut = replaceDirectory (replaceExtension fname (".png")) tmpdir
mime = getMimeType fname
doNothing = return (Right fname)
tex2pdf' :: FilePath -- ^ temp directory for output
-> String -- ^ tex program
-> String -- ^ tex source
-> IO (Either ByteString ByteString)
tex2pdf' tmpDir program source = do
let numruns = if "\\tableofcontents" `isInfixOf` source
then 3 -- to get page numbers
else 2 -- 1 run won't give you PDF bookmarks
(exit, log', mbPdf) <- runTeXProgram program numruns tmpDir source
case (exit, mbPdf) of
(ExitFailure _, _) -> do
let logmsg = extractMsg log'
let extramsg =
case logmsg of
x | "! Package inputenc Error" `BC.isPrefixOf` x ->
"\nTry running pandoc with --latex-engine=xelatex."
_ -> ""
return $ Left $ logmsg <> extramsg
(ExitSuccess, Nothing) -> return $ Left ""
(ExitSuccess, Just pdf) -> return $ Right pdf
(<>) :: ByteString -> ByteString -> ByteString
(<>) = B.append
-- parsing output
extractMsg :: ByteString -> ByteString
extractMsg log' = do
let msg' = dropWhile (not . ("!" `BC.isPrefixOf`)) $ BC.lines log'
let (msg'',rest) = break ("l." `BC.isPrefixOf`) msg'
let lineno = take 1 rest
if null msg'
then log'
else BC.unlines (msg'' ++ lineno)
-- running tex programs
-- Run a TeX program on an input bytestring and return (exit code,
-- contents of stdout, contents of produced PDF if any). Rerun
-- a fixed number of times to resolve references.
runTeXProgram :: String -> Int -> FilePath -> String
-> IO (ExitCode, ByteString, Maybe ByteString)
runTeXProgram program runsLeft tmpDir source = do
let file = tmpDir </> "input.tex"
exists <- doesFileExist file
unless exists $ UTF8.writeFile file source
#ifdef _WINDOWS
-- note: we want / even on Windows, for TexLive
let tmpDir' = changePathSeparators tmpDir
let file' = changePathSeparators file
#else
let tmpDir' = tmpDir
let file' = file
#endif
let programArgs = ["-halt-on-error", "-interaction", "nonstopmode",
"-output-directory", tmpDir', file']
env' <- getEnvironment
let sep = searchPathSeparator:[]
let texinputs = maybe (tmpDir' ++ sep) ((tmpDir' ++ sep) ++)
$ lookup "TEXINPUTS" env'
let env'' = ("TEXINPUTS", texinputs) :
[(k,v) | (k,v) <- env', k /= "TEXINPUTS"]
(exit, out, err) <- pipeProcess (Just env'') program programArgs BL.empty
if runsLeft > 1
then runTeXProgram program (runsLeft - 1) tmpDir source
else do
let pdfFile = replaceDirectory (replaceExtension file ".pdf") tmpDir
pdfExists <- doesFileExist pdfFile
pdf <- if pdfExists
-- We read PDF as a strict bytestring to make sure that the
-- temp directory is removed on Windows.
-- See https://github.com/jgm/pandoc/issues/1192.
then (Just . B.fromChunks . (:[])) `fmap` BS.readFile pdfFile
else return Nothing
return (exit, out <> err, pdf)
| peter-fogg/pardoc | src/Text/Pandoc/PDF.hs | gpl-2.0 | 8,029 | 12 | 23 | 2,183 | 1,876 | 996 | 880 | 142 | 5 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.IAM.AddRoleToInstanceProfile
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Adds the specified role to the specified instance profile. For more
-- information about roles, go to <http://docs.aws.amazon.com/IAM/latest/UserGuide/WorkingWithRoles.html Working with Roles>. For more information about
-- instance profiles, go to <http://docs.aws.amazon.com/IAM/latest/UserGuide/AboutInstanceProfiles.html About Instance Profiles>.
--
-- <http://docs.aws.amazon.com/IAM/latest/APIReference/API_AddRoleToInstanceProfile.html>
module Network.AWS.IAM.AddRoleToInstanceProfile
(
-- * Request
AddRoleToInstanceProfile
-- ** Request constructor
, addRoleToInstanceProfile
-- ** Request lenses
, artipInstanceProfileName
, artipRoleName
-- * Response
, AddRoleToInstanceProfileResponse
-- ** Response constructor
, addRoleToInstanceProfileResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.IAM.Types
import qualified GHC.Exts
data AddRoleToInstanceProfile = AddRoleToInstanceProfile
{ _artipInstanceProfileName :: Text
, _artipRoleName :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'AddRoleToInstanceProfile' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'artipInstanceProfileName' @::@ 'Text'
--
-- * 'artipRoleName' @::@ 'Text'
--
addRoleToInstanceProfile :: Text -- ^ 'artipInstanceProfileName'
-> Text -- ^ 'artipRoleName'
-> AddRoleToInstanceProfile
addRoleToInstanceProfile p1 p2 = AddRoleToInstanceProfile
{ _artipInstanceProfileName = p1
, _artipRoleName = p2
}
-- | The name of the instance profile to update.
artipInstanceProfileName :: Lens' AddRoleToInstanceProfile Text
artipInstanceProfileName =
lens _artipInstanceProfileName
(\s a -> s { _artipInstanceProfileName = a })
-- | The name of the role to add.
artipRoleName :: Lens' AddRoleToInstanceProfile Text
artipRoleName = lens _artipRoleName (\s a -> s { _artipRoleName = a })
data AddRoleToInstanceProfileResponse = AddRoleToInstanceProfileResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'AddRoleToInstanceProfileResponse' constructor.
addRoleToInstanceProfileResponse :: AddRoleToInstanceProfileResponse
addRoleToInstanceProfileResponse = AddRoleToInstanceProfileResponse
instance ToPath AddRoleToInstanceProfile where
toPath = const "/"
instance ToQuery AddRoleToInstanceProfile where
toQuery AddRoleToInstanceProfile{..} = mconcat
[ "InstanceProfileName" =? _artipInstanceProfileName
, "RoleName" =? _artipRoleName
]
instance ToHeaders AddRoleToInstanceProfile
instance AWSRequest AddRoleToInstanceProfile where
type Sv AddRoleToInstanceProfile = IAM
type Rs AddRoleToInstanceProfile = AddRoleToInstanceProfileResponse
request = post "AddRoleToInstanceProfile"
response = nullResponse AddRoleToInstanceProfileResponse
| kim/amazonka | amazonka-iam/gen/Network/AWS/IAM/AddRoleToInstanceProfile.hs | mpl-2.0 | 3,975 | 0 | 9 | 803 | 395 | 242 | 153 | 54 | 1 |
module Main where
-- so we can fix-intro
import Data.Function (fix)
import Prelude hiding ((+))
data Nat = Z | S Nat
{-# RULES "ww" forall f . fix f = wrap (fix (unwrap . f . wrap)) #-}
{-# RULES "precondition" forall w . wrap (unwrap w) = w #-}
(+) :: Nat -> Nat -> Nat
Z + n = n
(S n') + n = S (n' + n)
fromInt :: Int -> Nat
fromInt 0 = Z
fromInt i | i < 0 = error "fromInt negative"
| otherwise = S (fromInt (i-1))
toInt :: Nat -> Int
toInt Z = 0
toInt (S n) = succ (toInt n)
-- original fib definition
fib :: Nat -> Nat
fib Z = Z
fib (S Z) = S Z
fib (S (S n)) = fib (S n) + fib n
-- goal:
-- fib' = fst work
-- where work Z = (Z, S Z)
-- work (S n) = let (x,y) = work n
-- in (y,x+y)
wrap :: (Nat -> (Nat, Nat)) -> Nat -> Nat
wrap h = fst . h
unwrap :: (Nat -> Nat) -> Nat -> (Nat, Nat)
unwrap h n = (h n, h (S n))
main :: IO ()
main = print $ toInt $ fib $ fromInt 30
| conal/hermit | examples/IFL2012/Fib.hs | bsd-2-clause | 934 | 0 | 10 | 281 | 394 | 209 | 185 | 26 | 1 |
module Bar where
resource :: Int
resource = 2
| abuiles/turbinado-blog | tmp/dependencies/hs-plugins-1.3.1/testsuite/makewith/multi_make/Bar.hs | bsd-3-clause | 47 | 0 | 4 | 10 | 14 | 9 | 5 | 3 | 1 |
module AppNotEnough1 where
sumInts :: [Int] -> Int
sumInts xs = foldr (+) 0
| roberth/uu-helium | test/typeerrors/Heuristics/AppNotEnough1.hs | gpl-3.0 | 78 | 0 | 6 | 16 | 31 | 18 | 13 | 3 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.ConditionalRendering
-- Copyright : (c) Sven Panne 2013
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- This module corresponds to section 10.10 (Conditional Rendering) of the
-- OpenGL 4.4 specs.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.ConditionalRendering (
ConditionalRenderMode(..),
beginConditionalRender, endConditionalRender, withConditionalRender
) where
import Graphics.Rendering.OpenGL.GL.Exception
import Graphics.Rendering.OpenGL.GL.QueryObject
import Graphics.Rendering.OpenGL.Raw
--------------------------------------------------------------------------------
data ConditionalRenderMode =
QueryWait
| QueryNoWait
| QueryByRegionWait
| QueryByRegionNoWait
deriving ( Eq, Ord, Show )
marshalConditionalRenderMode :: ConditionalRenderMode -> GLenum
marshalConditionalRenderMode x = case x of
QueryWait -> gl_QUERY_WAIT
QueryNoWait -> gl_QUERY_NO_WAIT
QueryByRegionWait -> gl_QUERY_BY_REGION_WAIT
QueryByRegionNoWait -> gl_QUERY_BY_REGION_NO_WAIT
--------------------------------------------------------------------------------
beginConditionalRender :: QueryObject -> ConditionalRenderMode -> IO ()
beginConditionalRender q =
glBeginConditionalRender (queryID q) . marshalConditionalRenderMode
endConditionalRender :: IO ()
endConditionalRender = glEndConditionalRender
withConditionalRender :: QueryObject -> ConditionalRenderMode -> IO a -> IO a
withConditionalRender q m =
bracket_ (beginConditionalRender q m) endConditionalRender
| hesiod/OpenGL | src/Graphics/Rendering/OpenGL/GL/ConditionalRendering.hs | bsd-3-clause | 1,800 | 0 | 8 | 220 | 242 | 142 | 100 | 26 | 4 |
module Test2 where
f x y = let ls = x + 1;
rs = y - 1 in
(ls,rs)
| kmate/HaRe | old/testing/refacSlicing/Test2.hs | bsd-3-clause | 104 | 0 | 9 | 59 | 45 | 25 | 20 | 4 | 1 |
{-# LANGUAGE CPP #-}
-- | Handy functions for creating much Core syntax
module MkCore (
-- * Constructing normal syntax
mkCoreLet, mkCoreLets,
mkCoreApp, mkCoreApps, mkCoreConApps,
mkCoreLams, mkWildCase, mkIfThenElse,
mkWildValBinder, mkWildEvBinder,
sortQuantVars, castBottomExpr,
-- * Constructing boxed literals
mkWordExpr, mkWordExprWord,
mkIntExpr, mkIntExprInt,
mkIntegerExpr,
mkFloatExpr, mkDoubleExpr,
mkCharExpr, mkStringExpr, mkStringExprFS,
-- * Floats
FloatBind(..), wrapFloat,
-- * Constructing equality evidence boxes
mkEqBox,
-- * Constructing general big tuples
-- $big_tuples
mkChunkified,
-- * Constructing small tuples
mkCoreVarTup, mkCoreVarTupTy, mkCoreTup,
-- * Constructing big tuples
mkBigCoreVarTup, mkBigCoreVarTupTy,
mkBigCoreTup, mkBigCoreTupTy,
-- * Deconstructing small tuples
mkSmallTupleSelector, mkSmallTupleCase,
-- * Deconstructing big tuples
mkTupleSelector, mkTupleCase,
-- * Constructing list expressions
mkNilExpr, mkConsExpr, mkListExpr,
mkFoldrExpr, mkBuildExpr,
-- * Error Ids
mkRuntimeErrorApp, mkImpossibleExpr, errorIds,
rEC_CON_ERROR_ID, iRREFUT_PAT_ERROR_ID, rUNTIME_ERROR_ID,
nON_EXHAUSTIVE_GUARDS_ERROR_ID, nO_METHOD_BINDING_ERROR_ID,
pAT_ERROR_ID, eRROR_ID, rEC_SEL_ERROR_ID, aBSENT_ERROR_ID,
uNDEFINED_ID, undefinedName
) where
#include "HsVersions.h"
import Id
import Var ( EvVar, setTyVarUnique )
import CoreSyn
import CoreUtils ( exprType, needsCaseBinding, bindNonRec )
import Literal
import HscTypes
import TysWiredIn
import PrelNames
import TcType ( mkSigmaTy )
import Type
import Coercion
import TysPrim
import DataCon ( DataCon, dataConWorkId )
import IdInfo ( vanillaIdInfo, setStrictnessInfo,
setArityInfo )
import Demand
import Name hiding ( varName )
import Outputable
import FastString
import UniqSupply
import BasicTypes
import Util
import Pair
import Constants
import DynFlags
import Data.Char ( ord )
import Data.List
import Data.Ord
#if __GLASGOW_HASKELL__ < 709
import Data.Word ( Word )
#endif
infixl 4 `mkCoreApp`, `mkCoreApps`
{-
************************************************************************
* *
\subsection{Basic CoreSyn construction}
* *
************************************************************************
-}
sortQuantVars :: [Var] -> [Var]
-- Sort the variables (KindVars, TypeVars, and Ids)
-- into order: Kind, then Type, then Id
sortQuantVars = sortBy (comparing withCategory)
where
withCategory v = (category v, v)
category :: Var -> Int
category v
| isKindVar v = 1
| isTyVar v = 2
| otherwise = 3
-- | Bind a binding group over an expression, using a @let@ or @case@ as
-- appropriate (see "CoreSyn#let_app_invariant")
mkCoreLet :: CoreBind -> CoreExpr -> CoreExpr
mkCoreLet (NonRec bndr rhs) body -- See Note [CoreSyn let/app invariant]
| needsCaseBinding (idType bndr) rhs
= Case rhs bndr (exprType body) [(DEFAULT,[],body)]
mkCoreLet bind body
= Let bind body
-- | Bind a list of binding groups over an expression. The leftmost binding
-- group becomes the outermost group in the resulting expression
mkCoreLets :: [CoreBind] -> CoreExpr -> CoreExpr
mkCoreLets binds body = foldr mkCoreLet body binds
-- | Construct an expression which represents the application of one expression
-- to the other
mkCoreApp :: CoreExpr -> CoreExpr -> CoreExpr
-- Respects the let/app invariant by building a case expression where necessary
-- See CoreSyn Note [CoreSyn let/app invariant]
mkCoreApp fun (Type ty) = App fun (Type ty)
mkCoreApp fun (Coercion co) = App fun (Coercion co)
mkCoreApp fun arg = ASSERT2( isFunTy fun_ty, ppr fun $$ ppr arg )
mk_val_app fun arg arg_ty res_ty
where
fun_ty = exprType fun
(arg_ty, res_ty) = splitFunTy fun_ty
-- | Construct an expression which represents the application of a number of
-- expressions to another. The leftmost expression in the list is applied first
-- Respects the let/app invariant by building a case expression where necessary
-- See CoreSyn Note [CoreSyn let/app invariant]
mkCoreApps :: CoreExpr -> [CoreExpr] -> CoreExpr
-- Slightly more efficient version of (foldl mkCoreApp)
mkCoreApps orig_fun orig_args
= go orig_fun (exprType orig_fun) orig_args
where
go fun _ [] = fun
go fun fun_ty (Type ty : args) = go (App fun (Type ty)) (applyTy fun_ty ty) args
go fun fun_ty (Coercion co : args) = go (App fun (Coercion co)) (applyCo fun_ty co) args
go fun fun_ty (arg : args) = ASSERT2( isFunTy fun_ty, ppr fun_ty $$ ppr orig_fun
$$ ppr orig_args )
go (mk_val_app fun arg arg_ty res_ty) res_ty args
where
(arg_ty, res_ty) = splitFunTy fun_ty
-- | Construct an expression which represents the application of a number of
-- expressions to that of a data constructor expression. The leftmost expression
-- in the list is applied first
mkCoreConApps :: DataCon -> [CoreExpr] -> CoreExpr
mkCoreConApps con args = mkCoreApps (Var (dataConWorkId con)) args
mk_val_app :: CoreExpr -> CoreExpr -> Type -> Type -> CoreExpr
-- Build an application (e1 e2),
-- or a strict binding (case e2 of x -> e1 x)
-- using the latter when necessary to respect the let/app invariant
-- See Note [CoreSyn let/app invariant]
mk_val_app fun arg arg_ty res_ty
| not (needsCaseBinding arg_ty arg)
= App fun arg -- The vastly common case
| otherwise
= Case arg arg_id res_ty [(DEFAULT,[],App fun (Var arg_id))]
where
arg_id = mkWildValBinder arg_ty
-- Lots of shadowing, but it doesn't matter,
-- because 'fun ' should not have a free wild-id
--
-- This is Dangerous. But this is the only place we play this
-- game, mk_val_app returns an expression that does not have
-- have a free wild-id. So the only thing that can go wrong
-- is if you take apart this case expression, and pass a
-- fragmet of it as the fun part of a 'mk_val_app'.
-----------
mkWildEvBinder :: PredType -> EvVar
mkWildEvBinder pred = mkWildValBinder pred
-- | Make a /wildcard binder/. This is typically used when you need a binder
-- that you expect to use only at a *binding* site. Do not use it at
-- occurrence sites because it has a single, fixed unique, and it's very
-- easy to get into difficulties with shadowing. That's why it is used so little.
-- See Note [WildCard binders] in SimplEnv
mkWildValBinder :: Type -> Id
mkWildValBinder ty = mkLocalId wildCardName ty
mkWildCase :: CoreExpr -> Type -> Type -> [CoreAlt] -> CoreExpr
-- Make a case expression whose case binder is unused
-- The alts should not have any occurrences of WildId
mkWildCase scrut scrut_ty res_ty alts
= Case scrut (mkWildValBinder scrut_ty) res_ty alts
mkIfThenElse :: CoreExpr -> CoreExpr -> CoreExpr -> CoreExpr
mkIfThenElse guard then_expr else_expr
-- Not going to be refining, so okay to take the type of the "then" clause
= mkWildCase guard boolTy (exprType then_expr)
[ (DataAlt falseDataCon, [], else_expr), -- Increasing order of tag!
(DataAlt trueDataCon, [], then_expr) ]
castBottomExpr :: CoreExpr -> Type -> CoreExpr
-- (castBottomExpr e ty), assuming that 'e' diverges,
-- return an expression of type 'ty'
-- See Note [Empty case alternatives] in CoreSyn
castBottomExpr e res_ty
| e_ty `eqType` res_ty = e
| otherwise = Case e (mkWildValBinder e_ty) res_ty []
where
e_ty = exprType e
{-
The functions from this point don't really do anything cleverer than
their counterparts in CoreSyn, but they are here for consistency
-}
-- | Create a lambda where the given expression has a number of variables
-- bound over it. The leftmost binder is that bound by the outermost
-- lambda in the result
mkCoreLams :: [CoreBndr] -> CoreExpr -> CoreExpr
mkCoreLams = mkLams
{-
************************************************************************
* *
\subsection{Making literals}
* *
************************************************************************
-}
-- | Create a 'CoreExpr' which will evaluate to the given @Int@
mkIntExpr :: DynFlags -> Integer -> CoreExpr -- Result = I# i :: Int
mkIntExpr dflags i = mkConApp intDataCon [mkIntLit dflags i]
-- | Create a 'CoreExpr' which will evaluate to the given @Int@
mkIntExprInt :: DynFlags -> Int -> CoreExpr -- Result = I# i :: Int
mkIntExprInt dflags i = mkConApp intDataCon [mkIntLitInt dflags i]
-- | Create a 'CoreExpr' which will evaluate to the a @Word@ with the given value
mkWordExpr :: DynFlags -> Integer -> CoreExpr
mkWordExpr dflags w = mkConApp wordDataCon [mkWordLit dflags w]
-- | Create a 'CoreExpr' which will evaluate to the given @Word@
mkWordExprWord :: DynFlags -> Word -> CoreExpr
mkWordExprWord dflags w = mkConApp wordDataCon [mkWordLitWord dflags w]
-- | Create a 'CoreExpr' which will evaluate to the given @Integer@
mkIntegerExpr :: MonadThings m => Integer -> m CoreExpr -- Result :: Integer
mkIntegerExpr i = do t <- lookupTyCon integerTyConName
return (Lit (mkLitInteger i (mkTyConTy t)))
-- | Create a 'CoreExpr' which will evaluate to the given @Float@
mkFloatExpr :: Float -> CoreExpr
mkFloatExpr f = mkConApp floatDataCon [mkFloatLitFloat f]
-- | Create a 'CoreExpr' which will evaluate to the given @Double@
mkDoubleExpr :: Double -> CoreExpr
mkDoubleExpr d = mkConApp doubleDataCon [mkDoubleLitDouble d]
-- | Create a 'CoreExpr' which will evaluate to the given @Char@
mkCharExpr :: Char -> CoreExpr -- Result = C# c :: Int
mkCharExpr c = mkConApp charDataCon [mkCharLit c]
-- | Create a 'CoreExpr' which will evaluate to the given @String@
mkStringExpr :: MonadThings m => String -> m CoreExpr -- Result :: String
-- | Create a 'CoreExpr' which will evaluate to a string morally equivalent to the given @FastString@
mkStringExprFS :: MonadThings m => FastString -> m CoreExpr -- Result :: String
mkStringExpr str = mkStringExprFS (mkFastString str)
mkStringExprFS str
| nullFS str
= return (mkNilExpr charTy)
| all safeChar chars
= do unpack_id <- lookupId unpackCStringName
return (App (Var unpack_id) (Lit (MachStr (fastStringToByteString str))))
| otherwise
= do unpack_id <- lookupId unpackCStringUtf8Name
return (App (Var unpack_id) (Lit (MachStr (fastStringToByteString str))))
where
chars = unpackFS str
safeChar c = ord c >= 1 && ord c <= 0x7F
-- This take a ~# b (or a ~# R b) and returns a ~ b (or Coercible a b)
mkEqBox :: Coercion -> CoreExpr
mkEqBox co = ASSERT2( typeKind ty2 `eqKind` k, ppr co $$ ppr ty1 $$ ppr ty2 $$ ppr (typeKind ty1) $$ ppr (typeKind ty2) )
Var (dataConWorkId datacon) `mkTyApps` [k, ty1, ty2] `App` Coercion co
where (Pair ty1 ty2, role) = coercionKindRole co
k = typeKind ty1
datacon = case role of
Nominal -> eqBoxDataCon
Representational -> coercibleDataCon
Phantom -> pprPanic "mkEqBox does not support boxing phantom coercions"
(ppr co)
{-
************************************************************************
* *
\subsection{Tuple constructors}
* *
************************************************************************
-}
-- $big_tuples
-- #big_tuples#
--
-- GHCs built in tuples can only go up to 'mAX_TUPLE_SIZE' in arity, but
-- we might concievably want to build such a massive tuple as part of the
-- output of a desugaring stage (notably that for list comprehensions).
--
-- We call tuples above this size \"big tuples\", and emulate them by
-- creating and pattern matching on >nested< tuples that are expressible
-- by GHC.
--
-- Nesting policy: it's better to have a 2-tuple of 10-tuples (3 objects)
-- than a 10-tuple of 2-tuples (11 objects), so we want the leaves of any
-- construction to be big.
--
-- If you just use the 'mkBigCoreTup', 'mkBigCoreVarTupTy', 'mkTupleSelector'
-- and 'mkTupleCase' functions to do all your work with tuples you should be
-- fine, and not have to worry about the arity limitation at all.
-- | Lifts a \"small\" constructor into a \"big\" constructor by recursive decompositon
mkChunkified :: ([a] -> a) -- ^ \"Small\" constructor function, of maximum input arity 'mAX_TUPLE_SIZE'
-> [a] -- ^ Possible \"big\" list of things to construct from
-> a -- ^ Constructed thing made possible by recursive decomposition
mkChunkified small_tuple as = mk_big_tuple (chunkify as)
where
-- Each sub-list is short enough to fit in a tuple
mk_big_tuple [as] = small_tuple as
mk_big_tuple as_s = mk_big_tuple (chunkify (map small_tuple as_s))
chunkify :: [a] -> [[a]]
-- ^ Split a list into lists that are small enough to have a corresponding
-- tuple arity. The sub-lists of the result all have length <= 'mAX_TUPLE_SIZE'
-- But there may be more than 'mAX_TUPLE_SIZE' sub-lists
chunkify xs
| n_xs <= mAX_TUPLE_SIZE = [xs]
| otherwise = split xs
where
n_xs = length xs
split [] = []
split xs = take mAX_TUPLE_SIZE xs : split (drop mAX_TUPLE_SIZE xs)
{-
Creating tuples and their types for Core expressions
@mkBigCoreVarTup@ builds a tuple; the inverse to @mkTupleSelector@.
* If it has only one element, it is the identity function.
* If there are more elements than a big tuple can have, it nests
the tuples.
-}
-- | Build a small tuple holding the specified variables
mkCoreVarTup :: [Id] -> CoreExpr
mkCoreVarTup ids = mkCoreTup (map Var ids)
-- | Bulid the type of a small tuple that holds the specified variables
mkCoreVarTupTy :: [Id] -> Type
mkCoreVarTupTy ids = mkBoxedTupleTy (map idType ids)
-- | Build a small tuple holding the specified expressions
mkCoreTup :: [CoreExpr] -> CoreExpr
mkCoreTup [] = Var unitDataConId
mkCoreTup [c] = c
mkCoreTup cs = mkConApp (tupleCon BoxedTuple (length cs))
(map (Type . exprType) cs ++ cs)
-- | Build a big tuple holding the specified variables
mkBigCoreVarTup :: [Id] -> CoreExpr
mkBigCoreVarTup ids = mkBigCoreTup (map Var ids)
-- | Build the type of a big tuple that holds the specified variables
mkBigCoreVarTupTy :: [Id] -> Type
mkBigCoreVarTupTy ids = mkBigCoreTupTy (map idType ids)
-- | Build a big tuple holding the specified expressions
mkBigCoreTup :: [CoreExpr] -> CoreExpr
mkBigCoreTup = mkChunkified mkCoreTup
-- | Build the type of a big tuple that holds the specified type of thing
mkBigCoreTupTy :: [Type] -> Type
mkBigCoreTupTy = mkChunkified mkBoxedTupleTy
{-
************************************************************************
* *
Floats
* *
************************************************************************
-}
data FloatBind
= FloatLet CoreBind
| FloatCase CoreExpr Id AltCon [Var]
-- case e of y { C ys -> ... }
-- See Note [Floating cases] in SetLevels
instance Outputable FloatBind where
ppr (FloatLet b) = ptext (sLit "LET") <+> ppr b
ppr (FloatCase e b c bs) = hang (ptext (sLit "CASE") <+> ppr e <+> ptext (sLit "of") <+> ppr b)
2 (ppr c <+> ppr bs)
wrapFloat :: FloatBind -> CoreExpr -> CoreExpr
wrapFloat (FloatLet defns) body = Let defns body
wrapFloat (FloatCase e b con bs) body = Case e b (exprType body) [(con, bs, body)]
{-
************************************************************************
* *
\subsection{Tuple destructors}
* *
************************************************************************
-}
-- | Builds a selector which scrutises the given
-- expression and extracts the one name from the list given.
-- If you want the no-shadowing rule to apply, the caller
-- is responsible for making sure that none of these names
-- are in scope.
--
-- If there is just one 'Id' in the tuple, then the selector is
-- just the identity.
--
-- If necessary, we pattern match on a \"big\" tuple.
mkTupleSelector :: [Id] -- ^ The 'Id's to pattern match the tuple against
-> Id -- ^ The 'Id' to select
-> Id -- ^ A variable of the same type as the scrutinee
-> CoreExpr -- ^ Scrutinee
-> CoreExpr -- ^ Selector expression
-- mkTupleSelector [a,b,c,d] b v e
-- = case e of v {
-- (p,q) -> case p of p {
-- (a,b) -> b }}
-- We use 'tpl' vars for the p,q, since shadowing does not matter.
--
-- In fact, it's more convenient to generate it innermost first, getting
--
-- case (case e of v
-- (p,q) -> p) of p
-- (a,b) -> b
mkTupleSelector vars the_var scrut_var scrut
= mk_tup_sel (chunkify vars) the_var
where
mk_tup_sel [vars] the_var = mkSmallTupleSelector vars the_var scrut_var scrut
mk_tup_sel vars_s the_var = mkSmallTupleSelector group the_var tpl_v $
mk_tup_sel (chunkify tpl_vs) tpl_v
where
tpl_tys = [mkBoxedTupleTy (map idType gp) | gp <- vars_s]
tpl_vs = mkTemplateLocals tpl_tys
[(tpl_v, group)] = [(tpl,gp) | (tpl,gp) <- zipEqual "mkTupleSelector" tpl_vs vars_s,
the_var `elem` gp ]
-- | Like 'mkTupleSelector' but for tuples that are guaranteed
-- never to be \"big\".
--
-- > mkSmallTupleSelector [x] x v e = [| e |]
-- > mkSmallTupleSelector [x,y,z] x v e = [| case e of v { (x,y,z) -> x } |]
mkSmallTupleSelector :: [Id] -- The tuple args
-> Id -- The selected one
-> Id -- A variable of the same type as the scrutinee
-> CoreExpr -- Scrutinee
-> CoreExpr
mkSmallTupleSelector [var] should_be_the_same_var _ scrut
= ASSERT(var == should_be_the_same_var)
scrut
mkSmallTupleSelector vars the_var scrut_var scrut
= ASSERT( notNull vars )
Case scrut scrut_var (idType the_var)
[(DataAlt (tupleCon BoxedTuple (length vars)), vars, Var the_var)]
-- | A generalization of 'mkTupleSelector', allowing the body
-- of the case to be an arbitrary expression.
--
-- To avoid shadowing, we use uniques to invent new variables.
--
-- If necessary we pattern match on a \"big\" tuple.
mkTupleCase :: UniqSupply -- ^ For inventing names of intermediate variables
-> [Id] -- ^ The tuple identifiers to pattern match on
-> CoreExpr -- ^ Body of the case
-> Id -- ^ A variable of the same type as the scrutinee
-> CoreExpr -- ^ Scrutinee
-> CoreExpr
-- ToDo: eliminate cases where none of the variables are needed.
--
-- mkTupleCase uniqs [a,b,c,d] body v e
-- = case e of v { (p,q) ->
-- case p of p { (a,b) ->
-- case q of q { (c,d) ->
-- body }}}
mkTupleCase uniqs vars body scrut_var scrut
= mk_tuple_case uniqs (chunkify vars) body
where
-- This is the case where don't need any nesting
mk_tuple_case _ [vars] body
= mkSmallTupleCase vars body scrut_var scrut
-- This is the case where we must make nest tuples at least once
mk_tuple_case us vars_s body
= let (us', vars', body') = foldr one_tuple_case (us, [], body) vars_s
in mk_tuple_case us' (chunkify vars') body'
one_tuple_case chunk_vars (us, vs, body)
= let (uniq, us') = takeUniqFromSupply us
scrut_var = mkSysLocal (fsLit "ds") uniq
(mkBoxedTupleTy (map idType chunk_vars))
body' = mkSmallTupleCase chunk_vars body scrut_var (Var scrut_var)
in (us', scrut_var:vs, body')
-- | As 'mkTupleCase', but for a tuple that is small enough to be guaranteed
-- not to need nesting.
mkSmallTupleCase
:: [Id] -- ^ The tuple args
-> CoreExpr -- ^ Body of the case
-> Id -- ^ A variable of the same type as the scrutinee
-> CoreExpr -- ^ Scrutinee
-> CoreExpr
mkSmallTupleCase [var] body _scrut_var scrut
= bindNonRec var scrut body
mkSmallTupleCase vars body scrut_var scrut
-- One branch no refinement?
= Case scrut scrut_var (exprType body) [(DataAlt (tupleCon BoxedTuple (length vars)), vars, body)]
{-
************************************************************************
* *
\subsection{Common list manipulation expressions}
* *
************************************************************************
Call the constructor Ids when building explicit lists, so that they
interact well with rules.
-}
-- | Makes a list @[]@ for lists of the specified type
mkNilExpr :: Type -> CoreExpr
mkNilExpr ty = mkConApp nilDataCon [Type ty]
-- | Makes a list @(:)@ for lists of the specified type
mkConsExpr :: Type -> CoreExpr -> CoreExpr -> CoreExpr
mkConsExpr ty hd tl = mkConApp consDataCon [Type ty, hd, tl]
-- | Make a list containing the given expressions, where the list has the given type
mkListExpr :: Type -> [CoreExpr] -> CoreExpr
mkListExpr ty xs = foldr (mkConsExpr ty) (mkNilExpr ty) xs
-- | Make a fully applied 'foldr' expression
mkFoldrExpr :: MonadThings m
=> Type -- ^ Element type of the list
-> Type -- ^ Fold result type
-> CoreExpr -- ^ "Cons" function expression for the fold
-> CoreExpr -- ^ "Nil" expression for the fold
-> CoreExpr -- ^ List expression being folded acress
-> m CoreExpr
mkFoldrExpr elt_ty result_ty c n list = do
foldr_id <- lookupId foldrName
return (Var foldr_id `App` Type elt_ty
`App` Type result_ty
`App` c
`App` n
`App` list)
-- | Make a 'build' expression applied to a locally-bound worker function
mkBuildExpr :: (MonadThings m, MonadUnique m)
=> Type -- ^ Type of list elements to be built
-> ((Id, Type) -> (Id, Type) -> m CoreExpr) -- ^ Function that, given information about the 'Id's
-- of the binders for the build worker function, returns
-- the body of that worker
-> m CoreExpr
mkBuildExpr elt_ty mk_build_inside = do
[n_tyvar] <- newTyVars [alphaTyVar]
let n_ty = mkTyVarTy n_tyvar
c_ty = mkFunTys [elt_ty, n_ty] n_ty
[c, n] <- sequence [mkSysLocalM (fsLit "c") c_ty, mkSysLocalM (fsLit "n") n_ty]
build_inside <- mk_build_inside (c, c_ty) (n, n_ty)
build_id <- lookupId buildName
return $ Var build_id `App` Type elt_ty `App` mkLams [n_tyvar, c, n] build_inside
where
newTyVars tyvar_tmpls = do
uniqs <- getUniquesM
return (zipWith setTyVarUnique tyvar_tmpls uniqs)
{-
************************************************************************
* *
Error expressions
* *
************************************************************************
-}
mkRuntimeErrorApp
:: Id -- Should be of type (forall a. Addr# -> a)
-- where Addr# points to a UTF8 encoded string
-> Type -- The type to instantiate 'a'
-> String -- The string to print
-> CoreExpr
mkRuntimeErrorApp err_id res_ty err_msg
= mkApps (Var err_id) [Type res_ty, err_string]
where
err_string = Lit (mkMachString err_msg)
mkImpossibleExpr :: Type -> CoreExpr
mkImpossibleExpr res_ty
= mkRuntimeErrorApp rUNTIME_ERROR_ID res_ty "Impossible case alternative"
{-
************************************************************************
* *
Error Ids
* *
************************************************************************
GHC randomly injects these into the code.
@patError@ is just a version of @error@ for pattern-matching
failures. It knows various ``codes'' which expand to longer
strings---this saves space!
@absentErr@ is a thing we put in for ``absent'' arguments. They jolly
well shouldn't be yanked on, but if one is, then you will get a
friendly message from @absentErr@ (rather than a totally random
crash).
@parError@ is a special version of @error@ which the compiler does
not know to be a bottoming Id. It is used in the @_par_@ and @_seq_@
templates, but we don't ever expect to generate code for it.
-}
errorIds :: [Id]
errorIds
= [ eRROR_ID, -- This one isn't used anywhere else in the compiler
-- But we still need it in wiredInIds so that when GHC
-- compiles a program that mentions 'error' we don't
-- import its type from the interface file; we just get
-- the Id defined here. Which has an 'open-tyvar' type.
uNDEFINED_ID, -- Ditto for 'undefined'. The big deal is to give it
-- an 'open-tyvar' type.
rUNTIME_ERROR_ID,
iRREFUT_PAT_ERROR_ID,
nON_EXHAUSTIVE_GUARDS_ERROR_ID,
nO_METHOD_BINDING_ERROR_ID,
pAT_ERROR_ID,
rEC_CON_ERROR_ID,
rEC_SEL_ERROR_ID,
aBSENT_ERROR_ID ]
recSelErrorName, runtimeErrorName, absentErrorName :: Name
irrefutPatErrorName, recConErrorName, patErrorName :: Name
nonExhaustiveGuardsErrorName, noMethodBindingErrorName :: Name
recSelErrorName = err_nm "recSelError" recSelErrorIdKey rEC_SEL_ERROR_ID
absentErrorName = err_nm "absentError" absentErrorIdKey aBSENT_ERROR_ID
runtimeErrorName = err_nm "runtimeError" runtimeErrorIdKey rUNTIME_ERROR_ID
irrefutPatErrorName = err_nm "irrefutPatError" irrefutPatErrorIdKey iRREFUT_PAT_ERROR_ID
recConErrorName = err_nm "recConError" recConErrorIdKey rEC_CON_ERROR_ID
patErrorName = err_nm "patError" patErrorIdKey pAT_ERROR_ID
noMethodBindingErrorName = err_nm "noMethodBindingError"
noMethodBindingErrorIdKey nO_METHOD_BINDING_ERROR_ID
nonExhaustiveGuardsErrorName = err_nm "nonExhaustiveGuardsError"
nonExhaustiveGuardsErrorIdKey nON_EXHAUSTIVE_GUARDS_ERROR_ID
err_nm :: String -> Unique -> Id -> Name
err_nm str uniq id = mkWiredInIdName cONTROL_EXCEPTION_BASE (fsLit str) uniq id
rEC_SEL_ERROR_ID, rUNTIME_ERROR_ID, iRREFUT_PAT_ERROR_ID, rEC_CON_ERROR_ID :: Id
pAT_ERROR_ID, nO_METHOD_BINDING_ERROR_ID, nON_EXHAUSTIVE_GUARDS_ERROR_ID :: Id
aBSENT_ERROR_ID :: Id
rEC_SEL_ERROR_ID = mkRuntimeErrorId recSelErrorName
rUNTIME_ERROR_ID = mkRuntimeErrorId runtimeErrorName
iRREFUT_PAT_ERROR_ID = mkRuntimeErrorId irrefutPatErrorName
rEC_CON_ERROR_ID = mkRuntimeErrorId recConErrorName
pAT_ERROR_ID = mkRuntimeErrorId patErrorName
nO_METHOD_BINDING_ERROR_ID = mkRuntimeErrorId noMethodBindingErrorName
nON_EXHAUSTIVE_GUARDS_ERROR_ID = mkRuntimeErrorId nonExhaustiveGuardsErrorName
aBSENT_ERROR_ID = mkRuntimeErrorId absentErrorName
mkRuntimeErrorId :: Name -> Id
mkRuntimeErrorId name = pc_bottoming_Id1 name runtimeErrorTy
runtimeErrorTy :: Type
-- The runtime error Ids take a UTF8-encoded string as argument
runtimeErrorTy = mkSigmaTy [openAlphaTyVar] [] (mkFunTy addrPrimTy openAlphaTy)
errorName :: Name
errorName = mkWiredInIdName gHC_ERR (fsLit "error") errorIdKey eRROR_ID
eRROR_ID :: Id
eRROR_ID = pc_bottoming_Id1 errorName errorTy
errorTy :: Type -- See Note [Error and friends have an "open-tyvar" forall]
errorTy = mkSigmaTy [openAlphaTyVar] [] (mkFunTys [mkListTy charTy] openAlphaTy)
undefinedName :: Name
undefinedName = mkWiredInIdName gHC_ERR (fsLit "undefined") undefinedKey uNDEFINED_ID
uNDEFINED_ID :: Id
uNDEFINED_ID = pc_bottoming_Id0 undefinedName undefinedTy
undefinedTy :: Type -- See Note [Error and friends have an "open-tyvar" forall]
undefinedTy = mkSigmaTy [openAlphaTyVar] [] openAlphaTy
{-
Note [Error and friends have an "open-tyvar" forall]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
'error' and 'undefined' have types
error :: forall (a::OpenKind). String -> a
undefined :: forall (a::OpenKind). a
Notice the 'OpenKind' (manifested as openAlphaTyVar in the code). This ensures that
"error" can be instantiated at
* unboxed as well as boxed types
* polymorphic types
This is OK because it never returns, so the return type is irrelevant.
See Note [OpenTypeKind accepts foralls] in TcUnify.
************************************************************************
* *
\subsection{Utilities}
* *
************************************************************************
-}
pc_bottoming_Id1 :: Name -> Type -> Id
-- Function of arity 1, which diverges after being given one argument
pc_bottoming_Id1 name ty
= mkVanillaGlobalWithInfo name ty bottoming_info
where
bottoming_info = vanillaIdInfo `setStrictnessInfo` strict_sig
`setArityInfo` 1
-- Make arity and strictness agree
-- Do *not* mark them as NoCafRefs, because they can indeed have
-- CAF refs. For example, pAT_ERROR_ID calls GHC.Err.untangle,
-- which has some CAFs
-- In due course we may arrange that these error-y things are
-- regarded by the GC as permanently live, in which case we
-- can give them NoCaf info. As it is, any function that calls
-- any pc_bottoming_Id will itself have CafRefs, which bloats
-- SRTs.
strict_sig = mkClosedStrictSig [evalDmd] botRes
-- These "bottom" out, no matter what their arguments
pc_bottoming_Id0 :: Name -> Type -> Id
-- Same but arity zero
pc_bottoming_Id0 name ty
= mkVanillaGlobalWithInfo name ty bottoming_info
where
bottoming_info = vanillaIdInfo `setStrictnessInfo` strict_sig
strict_sig = mkClosedStrictSig [] botRes
| green-haskell/ghc | compiler/coreSyn/MkCore.hs | bsd-3-clause | 31,566 | 0 | 16 | 8,638 | 4,809 | 2,618 | 2,191 | 365 | 4 |
{-# LANGUAGE PolyKinds, TypeFamilies, DataKinds #-}
module T5938 where
import Data.Kind (Type)
type family KindFam (a :: k)
type instance KindFam (a :: Type) = Int
type instance KindFam (a :: Bool) = Bool
type instance KindFam (a :: Maybe k) = Char -- doesn't work
| sdiehl/ghc | testsuite/tests/polykinds/T5938.hs | bsd-3-clause | 267 | 0 | 7 | 48 | 79 | 50 | 29 | 7 | 0 |
import Test.Cabal.Prelude
-- Test build when the library is empty, for #1241
main = setupAndCabalTest $ withDirectory "empty" $ setup_build []
| themoritz/cabal | cabal-testsuite/PackageTests/EmptyLib/setup.test.hs | bsd-3-clause | 143 | 0 | 7 | 21 | 29 | 15 | 14 | 2 | 1 |
{-# LANGUAGE PartialTypeSignatures, NamedWildCards #-}
module GenNamed where
bar :: _a -> _a
bar x = not x
| siddhanathan/ghc | testsuite/tests/partial-sigs/should_compile/GenNamed.hs | bsd-3-clause | 108 | 0 | 5 | 19 | 25 | 14 | 11 | 4 | 1 |
{-# htermination (.) :: (b -> c) -> (a -> b) -> (a -> c) #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_DOT_1.hs | mit | 61 | 0 | 2 | 16 | 3 | 2 | 1 | 1 | 0 |
module Week1.Tests where
import Test.HUnit
import Week1.Week1
week1Tests = do runTestTT week1Ex1Tests
runTestTT week1Ex2Tests
runTestTT week1Ex3Tests
runTestTT week1Ex4Tests
runTestTT week1Ex5Tests
runTestTT week1Ex6Tests
test1_1 = TestCase (assertEqual "lastDigit" (3) (lastDigit 123))
test1_2 = TestCase (assertEqual "lastDigit" (0) (lastDigit 0))
test1_3 = TestCase (assertEqual "dropLastDigit" (12) (dropLastDigit 123))
test1_4 = TestCase (assertEqual "dropLastDigit" (0) (dropLastDigit 5))
test2_1 = TestCase (assertEqual "toDigits" ([1,2,3,4]) (toDigits 1234))
test2_2 = TestCase (assertEqual "toDigits" ([]) (toDigits 0))
test2_3 = TestCase (assertEqual "toDigits" ([]) (toDigits (-17)))
test3_1 = TestCase (assertEqual "doubleEveryOther" ([16,7,12,5]) (doubleEveryOther [8,7,6,5]))
test3_2 = TestCase (assertEqual "doubleEveryOther" ([1,4,3]) (doubleEveryOther [1,2,3]))
test4_1 = TestCase (assertEqual "sumDigits" (22) (sumDigits [16,7,12,5]))
test5_1 = TestCase (assertEqual "validate" (True) (validate 4012888888881881))
test5_2 = TestCase (assertEqual "validate" (False) (validate 4012888888881882))
test6_1 = TestCase (assertEqual "hanoi" ([("a","c"), ("a","b"), ("c","b")]) (hanoi 2 "a" "b" "c"))
week1Ex1Tests = TestList [TestLabel "week1Ex1Tests" test1_1, test1_2, test1_3, test1_4]
week1Ex2Tests = TestList [TestLabel "week1Ex2Tests" test2_1, test2_2, test2_3]
week1Ex3Tests = TestList [TestLabel "week1Ex3Tests" test3_1, test3_2]
week1Ex4Tests = TestList [TestLabel "week1Ex4Tests" test4_1]
week1Ex5Tests = TestList [TestLabel "week1Ex5Tests" test5_1, test5_2]
week1Ex6Tests = TestList [TestLabel "week1Ex6Tests" test6_1]
| rglew/cis194 | src/Week1/Tests.hs | mit | 1,737 | 0 | 11 | 271 | 633 | 339 | 294 | 28 | 1 |
module Menu where
import GlossUtilities
import Graphics.Gloss
import Data.List
menuBackgroundColor, menuBorderColor, menuTextColor :: Color
menuBackgroundColor = blue
menuBorderColor = white
menuTextColor = white
menuWidth, menuHeight, menuTextScale :: Float
menuWidth = 350.0
menuHeight = 650.0
menuTextScale = 0.4
menuLocation, menuLabelLocation :: (Float,Float)
menuLocation = (0,0)
menuLabelLocation = (10,200)
data MenuButtonType = StartButton | ExitButton deriving (Show)
data MenuButton = MenuButton { location :: (Float,Float)
, buttonSize :: (Float,Float)
, menuButtonPicture :: Picture
, buttonType :: MenuButtonType
} deriving (Show)
startButton :: MenuButton
startButton = MenuButton { location = (0,40)
, buttonSize = (100,80)
, menuButtonPicture = startButtonPic
, buttonType = StartButton
}
exitButton :: MenuButton
exitButton = MenuButton { location = (0,-60)
, buttonSize = (100,80)
, menuButtonPicture = exitButtonPic
, buttonType = ExitButton
}
leftMostCoordinate :: MenuButton -> Float
leftMostCoordinate b = middleX - (width / 2)
where middleX = fst $ location b
width = fst $ buttonSize b
rightMostCoordinate :: MenuButton -> Float
rightMostCoordinate b = (leftMostCoordinate b) + (fst $ buttonSize b)
topMostCoordinate :: MenuButton -> Float
topMostCoordinate b = (bottomMostCoordinate b) + (snd $ buttonSize b)
bottomMostCoordinate :: MenuButton -> Float
bottomMostCoordinate b = middleY - (height / 2)
where middleY = snd $ location b
height = snd $ buttonSize b
renderButton :: MenuButton -> Picture
renderButton b = uncurry translate (location b) $ menuButtonPicture b
renderMenu :: Picture
renderMenu = pictures [menuBox, title, startB, exitB]
where (mX,mY) = menuLocation
(tX,tY) = menuLabelLocation
menuBox = translate mX mY $ menuBgPicture
title = translate tX tY $
scale menuTextScale menuTextScale $
logoPic
startB = renderButton startButton
exitB = renderButton exitButton
checkClickEvent :: (Float,Float) -> Maybe MenuButtonType
checkClickEvent (x', y') = case button of
Nothing -> Nothing
Just b -> Just (buttonType b)
where isX b = elem x' [(leftMostCoordinate b)..(rightMostCoordinate b)]
isY b = elem y' [(topMostCoordinate b),((topMostCoordinate b) - 1)..(bottomMostCoordinate b)]
button = find (\b -> (isX b) && (isY b)) [startButton, restartButton, exitButton]
restartButton :: MenuButton
restartButton = startButton { location = (0,-200) }
renderRestartButton :: Picture
renderRestartButton = uncurry translate (location restartButton) (menuButtonPicture restartButton) | maple-shaft/HaskellTetris | src/Menu.hs | mit | 3,164 | 0 | 12 | 992 | 824 | 465 | 359 | 66 | 2 |
module Data.List.Marquee where
import Data.Maybe (fromMaybe)
lookupOr :: (Eq key) => a -> key -> [(key, a)] -> a
lookupOr d key = fromMaybe d . lookup key
| DanielRS/marquee | src/Data/List/Marquee.hs | mit | 157 | 0 | 10 | 30 | 72 | 40 | 32 | 4 | 1 |
add x y = x + y
main = do
print $ add 1 2
print $ 1 `add` 2
| shigemk2/haskell_abc | Ope.hs | mit | 71 | 0 | 8 | 31 | 46 | 22 | 24 | 4 | 1 |
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE EmptyDataDecls #-}
module MaxLenTest (
specs
#ifndef WITH_MONGODB
, maxlenMigrate
#endif
) where
import Init
import Data.String (IsString)
import Data.ByteString (ByteString)
#ifdef WITH_MONGODB
db :: Action IO () -> Assertion
db = db' (return ())
mkPersist persistSettings [persistUpperCase|
#else
share [mkPersist sqlSettings, mkMigrate "maxlenMigrate"] [persistLowerCase|
#endif
MaxLen
text1 Text
text2 Text maxlen=3
bs1 ByteString
bs2 ByteString maxlen=3
str1 String
str2 String maxlen=3
deriving Show Eq
|]
specs :: Spec
specs = describe "Maximum length attribute" $ do
it "" $ db $ do
let t1 = MaxLen a a a a a a
t2 = MaxLen b b b b b b
t2' = MaxLen b b' b b' b b'
a, b, b' :: IsString t => t
a = "a"
b = "12345"
b' = "123"
t1k <- insert t1
t2k <- insert t2
Just t1v <- get t1k
Just t2v <- get t2k
liftIO $ do t1v @?= t1
if t2v == t2
then t2v @?= t2 -- FIXME: why u no truncate?
else t2v @?= t2'
| gbwey/persistentold | persistent-test/MaxLenTest.hs | mit | 1,337 | 0 | 15 | 378 | 295 | 156 | 139 | 34 | 2 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Network.API.Mandrill.Monad where
import Network.API.Mandrill.Types
import Control.Applicative
import Control.Monad.Reader
newtype MandrillT m a = MandrillT {
runMandrillT :: ReaderT ApiKey m a
} deriving ( MonadTrans, MonadReader ApiKey
, Functor, Applicative, Monad, MonadIO)
runMandrill :: MonadIO m => ApiKey -> MandrillT m a -> m a
runMandrill k action = runReaderT (runMandrillT action) k
| krgn/hamdrill | src/Network/API/Mandrill/Monad.hs | mit | 471 | 0 | 8 | 84 | 127 | 71 | 56 | 11 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Nix.Evaluator.Evaluator where
import Control.Monad.State.Strict --(MonadState(..), modify, execStateT)
import Nix.Common
import Nix.Evaluator.Builtins.Operators (interpretBinop, interpretUnop)
import Nix.Evaluator.Errors
import Nix.Atoms
import Nix.Expr (Params(..), ParamSet(..), NExpr, Antiquoted(..),
NUnaryOp(..), NBinaryOp(..), NKeyName(..), NExprF(..),
NString(..), Binding(..), mkSym, mkDot)
import Nix.Values
import Nix.Values.NativeConversion
import qualified Data.Map as M
import qualified Data.HashMap.Strict as H
import qualified Data.Set as S
-- | Given a lazy value meant to contain a function, and a second lazy
-- value to apply that function to, perform the function application.
evalApply :: Monad m => LazyValue m -> LazyValue m -> LazyValue m
evalApply func arg = func >>= \case
VNative (NativeFunction f) -> unwrapNative =<< f arg
VFunction params (Closure cEnv body) -> case params of
Param param -> do
let env' = insertEnvL param arg cEnv
evalNExpr env' body
ParamSet params mname -> arg >>= \case
-- We need the argument to be an attribute set to unpack arguments.
VAttrSet argSet -> do
let
-- The step function will construct a new env, and also find
-- any keys that are missing and don't have defaults.
-- Note that we have another recursive definition here,
-- since `step` refers to `callingEnv` and vice versa. This
-- means that we'll loop infinitely on circular variables.
step (newEnv, missingArgs) (key, mdef) =
case lookupEnv key argSet of
-- If the argument is provided, insert it.
Just val -> (insertEnvL key val newEnv, missingArgs)
-- If the argument is missing, see if there's a default.
Nothing -> case mdef of
-- Evaluate the default expression and insert it.
Just def -> do
(insertEnvL key (evalNExpr callingEnv def) newEnv,
missingArgs)
-- Otherwise record it as an error.
Nothing -> (newEnv, key : missingArgs)
-- Fold through the parameters with the step function.
(callingEnv', missing) = foldl' step (cEnv, []) $ paramList params
-- If there's a variable attached to the param set, add it
-- to the calling environment.
callingEnv = case mname of
Nothing -> callingEnv'
Just name -> insertEnvL name arg callingEnv'
case missing of
_:_ -> throwError $ MissingArguments missing
_ -> case params of
VariadicParamSet _ -> evalNExpr callingEnv body
-- We need to make sure there aren't any extra arguments.
FixedParamSet ps -> do
let keyList :: [Text] = envKeyList argSet
-- For each key, we'll check if it's in the params,
-- and otherwise it's an `ExtraArguments` error.
getExtra extraKeys key = case M.lookup key ps of
Nothing -> (key:extraKeys)
Just _ -> extraKeys
case foldl' getExtra [] keyList of
[] -> evalNExpr callingEnv body
extras -> throwError $ ExtraArguments extras
v -> expectedAttrs v
v -> expectedFunction v
-- | Evaluate a nix string into actual text.
evalString :: Monad ctx => LEnvironment ctx -> NString NExpr -> Eval ctx Text
evalString env = \case
DoubleQuoted strs -> concat <$> mapEval strs
Indented strs -> intercalate "\n" <$> mapEval strs
where mapEval = mapM (evalAntiquoted pure env)
-- | Evaluate an 'Antiquoted', resulting in 'Text'.
evalAntiquoted :: Monad ctx =>
(txt -> Eval ctx Text) ->
LEnvironment ctx ->
Antiquoted txt NExpr ->
Eval ctx Text
evalAntiquoted convertor env = \case
Plain string -> convertor string
Antiquoted expr -> evalNExpr env expr >>= \case
VString str -> pure str
v -> expectedString v
-- | Evaluate an 'NKeyName', which must result in 'Text'.
evalKeyName :: Monad ctx => LEnvironment ctx ->
NKeyName NExpr -> Eval ctx Text
evalKeyName env = \case
StaticKey text -> pure text
DynamicKey antiquoted -> evalAntiquoted convertor env antiquoted
where convertor = evalString env
-- | Data type to represent the state of an attribute set as it's built.
-- Values of this type are either actual lazy values ('Defined'), or they are
-- records containing other 'InProgress' objects ('InProgress').
-- This allows us to respond to this kind of syntax:
-- @{a.x = 1; b = 2; a.y = 3;}@. We can represent this intermediately as
-- @InProgress {a: InProgress {x: Defined 1, y: Defined 3}, b: Defined 2}@.
-- The idea is that when building the set, we know if something is an error
-- like a duplicate key if the key path results in a 'Defined'.
data InProgress ctx
= Defined (LazyValue ctx)
| InProgress (Record (InProgress ctx))
-- | Once it's finished, we can convert an 'InProgress' back into an
-- a lazy value.
convertIP :: Monad ctx => InProgress ctx -> LazyValue ctx
convertIP (Defined v) = v
convertIP (InProgress record) = do
let items = H.toList record
step aset (key, asip) = insertEnvL key (convertIP asip) aset
pure $ VAttrSet $ foldl' step emptyE items
insertKeyPath :: Monad ctx =>
[Text] ->
LazyValue ctx ->
InProgress ctx ->
Eval ctx (InProgress ctx)
insertKeyPath kpath lval inProg = loop kpath inProg where
loop [] _ = pure $ Defined lval
loop _ (Defined _) = throwError $ DuplicateKeyPath kpath
loop (key:keys) (InProgress record) = do
next <- loop keys $ H.lookupDefault (InProgress mempty) key record
pure $ InProgress $ H.insert key next record
-- | Convert a list of bindings to an attribute set.
bindingsToSet :: Monad ctx =>
LEnvironment ctx ->
[Binding NExpr] ->
LazyValue ctx
bindingsToSet env bindings = do
let start = InProgress mempty
finish <- flip execStateT start $ forM_ bindings $ \case
NamedVar keys expr -> do
-- Convert the key expressions to a list of text.
keyPath <- lift $ mapM (evalKeyName env) keys
-- Insert the key into the in-progress set.
let lval = evalNExpr env expr
get >>= lift . insertKeyPath keyPath lval >>= put
Inherit maybeExpr keyNames -> forM_ keyNames $ \keyName -> do
-- Evaluate the keyName to a string.
varName <- lift $ evalKeyName env keyName
-- Create the lazy value.
let lval = evalNExpr env $ case maybeExpr of
Nothing -> mkSym varName
Just expr -> mkDot expr varName
-- Insert the keyname into the state.
get >>= lift . insertKeyPath [varName] lval >>= put
-- Convert the finished in-progress object into a LazyValue.
convertIP finish
paramList :: ParamSet e -> [(Text, Maybe e)]
paramList (FixedParamSet params) = M.toList params
paramList (VariadicParamSet params) = M.toList params
evalNExpr :: Monad m =>
LEnvironment m ->
NExpr ->
LazyValue m
evalNExpr env (Fix expr) = do
let recur = evalNExpr env
case expr of
NConstant atom -> pure $ VConstant atom
NSym name -> case lookupEnv name env of
Nothing -> throwError $ NameError name (envKeySet env)
Just val -> val
NList exprs -> pure $ VList $ fromList $ map recur exprs
NApp func arg -> recur func `evalApply` recur arg
NAbs param body -> pure $ VFunction param $ Closure env body
NSet bindings -> bindingsToSet env bindings
NStr string -> VString <$> evalString env string
NSelect expr' attrPath maybeDefault -> go attrPath $ recur expr' where
go [] lval = lval
go (keyName:keyNames) lval = lval >>= \case
VAttrSet attrs -> do
key <- evalKeyName env keyName
case lookupEnv key attrs of
Just lval' -> go keyNames lval'
Nothing -> case maybeDefault of
Just def -> recur def
Nothing -> throwError $ KeyError key $ envKeySet attrs
v -> expectedAttrs v
NUnary op innerExpr -> do
-- Translate the operator into a native function.
let func = interpretUnop op
-- Apply the function to the inner expression.
unwrapNative =<< applyNative func (recur innerExpr)
NBinary op left right -> do
-- Turn the operator into a binary native function.
let func = interpretBinop op
-- Apply the function to the two arguments and unwrap the result.
unwrapNative =<< applyNative2 func (recur left) (recur right)
_ -> error $ "We don't handle " <> show expr <> " yet"
| adnelson/nix-eval | src/Nix/Evaluator/Evaluator.hs | mit | 8,852 | 60 | 27 | 2,514 | 1,827 | 971 | 856 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables
, OverloadedStrings
, DeriveDataTypeable
, RecordWildCards
, RankNTypes #-}
module ImageCache ( ImageCache
, ImageRes(..)
, CacheEntry(..)
, withImageCache
, fetchImage
, deleteImage
, gatherCacheStats
) where
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Char8 as B8
import Data.Monoid
import Data.Word
import Data.Bits
import Data.IORef
import Data.Typeable
import Control.Concurrent.Async
import Control.Concurrent.STM
import Control.Monad.Trans.Resource (runResourceT)
import Network.URI
import qualified Data.Vector.Storable as VS
import qualified Data.Vector.Storable.Mutable as VSM
import System.Directory
import System.FilePath
import Control.Monad
import Control.Monad.IO.Class
import Network.HTTP.Conduit
import System.IO.Error
import qualified Codec.Picture as JP
import qualified Codec.Picture.Types as JPT
import Control.Exception
import Text.Printf
import qualified LRUBoundedMap as LBM
import Trace
import Timing
-- Caching system (disk & memory) for image fetches over HTTP and from disk
-- TODO: Add support for retiring elements from the disk cache, consider not having the disk
-- cache at all and only use it to speed up application startup / offline mode
-- TODO: Now that all image data ends up in the texture cache, this is mostly a
-- request queue and a staging area, with some record keeping for fetches
-- / errors. Maybe we can simplify and speed up some things?
data ImageCache = ImageCache
{ icCacheFolder :: B.ByteString
, icOutstandingReq :: TVar (LBM.Map B.ByteString ()) -- No 'v', used as a Set + LRU
, icCacheEntries :: TVar (LBM.Map B.ByteString CacheEntry)
-- Statistics
, icBytesTrans :: IORef Word64
, icMisses :: IORef Word64
, icDiskHits :: IORef Word64
, icMemHits :: IORef Word64
}
data CacheEntry = Fetching -- We keep in-progress entries in the cache to avoid double fetches
| Fetched !ImageRes
| CacheError -- Failed to load / fetch / decode image
!Double -- Tick after which we're allowed to retry
!Int -- Number of retry attempt scheduled next
deriving Eq
instance Show CacheEntry where
show Fetching = "Fetching"
show (Fetched _) = "Fetched"
show (CacheError retryTick retryAttempt) =
"CacheError " ++ show retryTick ++ " " ++ show retryAttempt
data ImageRes = ImageRes !Int !Int !(VS.Vector Word32)
deriving Eq
mkURICacheFn :: ImageCache -> B.ByteString -> B.ByteString
mkURICacheFn ic url = icCacheFolder ic
<> (B8.pack . escapeURIString isUnescapedInURIComponent $ B8.unpack url)
withImageCache :: Manager
-> Int
-> Int
-> String
-> (ImageCache -> IO ())
-> IO ()
withImageCache manager memCacheEntryLimit numConcReq cacheFolder f = do
-- Make sure our cache folder exists
createDirectoryIfMissing True cacheFolder
-- Build record
ic <- do
icOutstandingReq <- newTVarIO $ LBM.empty $ memCacheEntryLimit `div` 2
icCacheEntries <- newTVarIO $ LBM.empty memCacheEntryLimit
[icBytesTrans, icMisses, icDiskHits, icMemHits] <- replicateM 4 $ newIORef 0
return $ ImageCache { icCacheFolder = B8.pack $ addTrailingPathSeparator cacheFolder
, ..
}
bracket -- Fetch thread launch and cleanup
--
-- Note the asyncWithUnmask. Otherwise all fetch threads would be
-- created with a MaskedInterruptible masking state (bracket does that
-- for its 'before' operations) and we would have hangs during cleanup
--
(forM [1..numConcReq] $ \_ -> asyncWithUnmask $ fetchThread ic manager)
(\threads -> do
-- Error checking, statistics
req <- LBM.valid <$> (atomically . readTVar $ icOutstandingReq ic)
cache <- LBM.valid <$> (atomically . readTVar $ icCacheEntries ic)
case req of Just err -> traceS TLError $ "LRUBoundedMap: icOutstandingReq:\n" ++ err
Nothing -> return ()
case cache of Just err -> traceS TLError $ "LRUBoundedMap: icCacheEntries:\n" ++ err
Nothing -> return ()
-- Shutdown
traceT TLInfo "Shutting down image fetch threads"
forM_ threads cancel
forM_ threads $ \thread -> do
r <- waitCatch thread
case r of
Left ex -> traceS TLError $ printf "Exception from fetch thread '%s': %s"
(show $ asyncThreadId thread)
(show ex)
_ -> return ()
)
(\_ -> f ic)
dynImgToRGBA8 :: JP.DynamicImage -> Either String (JP.Image JP.PixelRGBA8)
dynImgToRGBA8 di =
case di of
JP.ImageYCbCr8 i -> Right $ JPT.promoteImage (JPT.convertImage i :: JP.Image JP.PixelRGB8)
JP.ImageRGBA8 i -> Right $ i
JP.ImageRGB8 i -> Right $ JPT.promoteImage i
JP.ImageY8 i -> Right $ JPT.promoteImage i
JP.ImageYA8 i -> Right $ JPT.promoteImage i
_ -> Left "Can't convert image format to RGBA8"
-- TODO: Include source format in error message
toImageRes :: JP.Image JP.PixelRGBA8 -> Either String ImageRes
toImageRes jp =
let w = JPT.imageWidth jp
h = JPT.imageHeight jp
pixToWord32 (JP.PixelRGBA8 r g b a) =
((fromIntegral r) `shiftL` 0 ) .|.
((fromIntegral g) `shiftL` 8 ) .|.
((fromIntegral b) `shiftL` 16) .|.
((fromIntegral a) `shiftL` 24)
:: Word32
convert = VS.create $ do
v <- VSM.new $ w * h
forM_ [(x, y) | y <- [0..h - 1], x <- [0..w - 1]] $
\(x, y) -> VSM.write v
(x + (h - 1 - y) * w) -- Flip image
(pixToWord32 $ JP.pixelAt jp x y)
return v
in if w > 512 || h > 512 -- TODO: Hardcoded / arbitrary
then Left $ printf "Image to large (%ix%i), won't convert" w h
else Right $ ImageRes w h convert
-- Pop an uncached request of the request stack
popRequestStack :: ImageCache
-> IO (B.ByteString, Int) -- Return URI and the retry attempt number we're on
popRequestStack ic = do
let pop = atomically $ do
requests <- readTVar $ icOutstandingReq ic
let (requests', maybeURI) = LBM.popNewest requests
case maybeURI of
Just (uri, ()) ->
do -- Write the stack with the removed top item back
writeTVar (icOutstandingReq ic) requests'
-- Check if the request is already in the cache
cache <- readTVar $ icCacheEntries ic
case LBM.lookupNoLRU uri cache of
Nothing -> do
-- New request, mark fetch status and return URI
writeTVar (icCacheEntries ic) .
fst $ LBM.insert uri Fetching cache
return $ Just (uri, 0) -- No retries
Just (CacheError _ retryAttempt) -> do
-- Marked as an error in the cache, update to fetching and
-- return current retry attempt
writeTVar (icCacheEntries ic) $ LBM.update uri Fetching cache
return $ Just (uri, retryAttempt)
Just entry -> -- TODO: Maybe just commit transaction and trace instead?
error $ "popRequestStack internal error: '"
++ B8.unpack uri ++ "' in request queue, but already "
++ "marked as '" ++ (show entry) ++ "' in cache"
Nothing -> retry -- Empty request list, block till it makes sense to retry
r <- pop
case r of
Nothing -> popRequestStack ic -- Recurse till we get a URI or block on retry
Just res -> return res
-- File or HTTP?
uriIsHTTP :: B.ByteString -> Bool
uriIsHTTP uri = B.isPrefixOf "http://" uri || B.isPrefixOf "https://" uri
-- Fetch an image into the disk cache (if not already there) and return it as a lazy ByteString
fetchDiskCache :: ImageCache -> Manager -> B.ByteString -> FilePath -> IO BL.ByteString
fetchDiskCache ic manager uri cacheFn = do
bs <- tryIOError $ BL.readFile cacheFn -- Already in the disk cache?
case bs of
Left ex -> do
unless (isDoesNotExistError ex) $ -- Only handle the missing file ones silently
throwIO ex
-- No, fetch image
if uriIsHTTP uri
then runResourceT $ do
req <- parseUrl $ B8.unpack uri
res <- httpLbs req manager
-- Store in disk cache
liftIO $ do
incCacheMisses ic
-- TODO: Misses HTTP protocol overhead
incCacheBytesTransf ic . fromIntegral . BL.length . responseBody $ res
-- TODO: Disk cache writes disabled for now
-- BL.writeFile cacheFn $ responseBody res
return $ responseBody res
else BL.readFile $ B8.unpack uri -- TODO: We should restrict fetches of disk files
-- to certain directories, might be a security
-- issue otherwise
Right x -> incCacheDiskHits ic >> return x
modifyCacheEntries :: ImageCache
-> (LBM.Map B.ByteString CacheEntry -> LBM.Map B.ByteString CacheEntry)
-> IO ()
modifyCacheEntries ic f = atomically . modifyTVar' (icCacheEntries ic) $! f
data DecodeException = DecodeException { deError :: String
, deURI :: String
, deCacheFn :: String
} deriving (Show, Typeable)
instance Exception DecodeException
fetchThread :: ImageCache -> Manager -> (forall a. IO a -> IO a) -> IO ()
fetchThread ic manager unmask =
handle (\ThreadKilled -> -- Handle this exception here so we exit cleanly
-- traceT TLInfo "Fetch thread received 'ThreadKilled', exiting"
return ()
) . forever . unmask $ do
-- The inner bracket takes care of cleanup, here we decide if the exception is
-- recoverable or if we should stop the thread
catches
( do
-- Once we pop a request from the stack we own it, either fill it with valid
-- image data or mark it as a cache error. Note that we never want to call
-- LBM.insert on the cache directory, only LBM.update. Our entry might have
-- been removed while we're fetching it, don't add it back
bracketOnError
(popRequestStack ic)
(\(uriUncached, retryAttempt) -> do
time <- (+ (retryDelay retryAttempt)) <$> getTick
modifyCacheEntries ic . LBM.update uriUncached $ CacheError time (retryAttempt + 1)
)
(\(uriUncached, retryAttempt) -> do
when (retryAttempt > 0) .
traceS TLWarn $ printf
"Now attempting retry no. %i of failed URI fetch after >=%.1fsec delay: %s"
retryAttempt
(retryDelay $ retryAttempt - 1)
(B8.unpack uriUncached)
let cacheFn = B8.unpack $ mkURICacheFn ic uriUncached
imgBS <- fetchDiskCache ic manager uriUncached cacheFn
-- Decompress and convert
di <- {-# SCC decompressAndConvert #-} -- TODO: toStrict, nasty
case toImageRes =<< dynImgToRGBA8 =<< (JP.decodeImage $ BL.toStrict imgBS) of
Left err -> throwIO $ DecodeException
{ deError = err
, deURI = (B8.unpack uriUncached)
, deCacheFn = cacheFn
}
Right x -> return x
-- Update cache with image, make sure we actually decompress /
-- covert it here instead of just storing a thunk
di `seq` modifyCacheEntries ic $! LBM.update uriUncached (Fetched di)
)
)
[ Handler $ \(ex :: IOException ) -> reportEx ex
, Handler $ \(ex :: HttpException ) -> reportEx ex
, Handler $ \(ex :: DecodeException) -> reportEx ex
]
where reportEx ex = traceS TLError $ "Image Cache Exception: " ++ show ex
retryDelay retryAttempt = ([2, 10, 30, 60, 120] ++ repeat 600) !! retryAttempt :: Double
-- Return the image at the given URI from the cache, or schedule fetching if not present
fetchImage :: ImageCache -> Double -> B.ByteString -> IO (Maybe CacheEntry)
fetchImage ic tick uri = do
r <- atomically $ do
cache <- readTVar (icCacheEntries ic)
case LBM.lookup uri cache of
(_, Nothing) -> addRequest >> return Nothing -- New image, add request
(cache', e@(Just (CacheError retryTick _))) -> do
when (tick > retryTick)
-- Time to retry this failed fetch, add to fetch queue. Note that we don't
-- remove the error from the cache, we want to keep the retry count around
addRequest
writeTVar (icCacheEntries ic) cache' -- Update LRU
return e
(cache', e) -> -- Hit, just update LRU
writeTVar (icCacheEntries ic) cache' >> return e
case r of
Just _ -> incCacheMemHits ic
Nothing -> return ()
return r
where addRequest = modifyTVar' (icOutstandingReq ic) (fst . LBM.insert uri ())
deleteImage :: ImageCache -> B.ByteString -> IO ()
deleteImage ic uri = modifyCacheEntries ic (fst . LBM.delete uri)
-- Cache statistics
incCacheBytesTransf :: ImageCache -> Word64 -> IO ()
incCacheBytesTransf ic n = atomicModifyIORef' (icBytesTrans ic) (\b -> (b + n, ()))
incCacheMisses :: ImageCache -> IO ()
incCacheMisses ic = atomicModifyIORef' (icMisses ic) (\n -> (n + 1, ()))
incCacheDiskHits :: ImageCache -> IO ()
incCacheDiskHits ic = atomicModifyIORef' (icDiskHits ic) (\n -> (n + 1, ()))
incCacheMemHits :: ImageCache -> IO ()
incCacheMemHits ic = atomicModifyIORef' (icMemHits ic) (\n -> (n + 1, ()))
-- TODO: Add statistics from TextureGrid
gatherCacheStats :: ImageCache -> IO String
gatherCacheStats ic = do
bytesTransf <- readIORef $ icBytesTrans ic
misses <- readIORef $ icMisses ic
diskHits <- readIORef $ icDiskHits ic
memHits <- readIORef $ icMemHits ic
entries <- atomically . readTVar $ icCacheEntries ic
requests <- atomically . readTVar $ icOutstandingReq ic
let (fetching, fetched, cacheErr, mem) = foldr
(\(_, v) (fetching', fetched', cacheErr', mem') -> case v of
Fetching -> (fetching' + 1, fetched', cacheErr', mem')
Fetched (ImageRes w h _) -> (fetching', fetched' + 1, cacheErr', mem' + w * h * 4)
CacheError _ _ -> (fetching', fetched', cacheErr' + 1, mem')
)
((0, 0, 0, 0) :: (Word64, Word64, Word64, Int))
(LBM.toList entries)
return $ printf
( "Netw. Recv. Total: %.3fMB · Mem %.3fMB | "
++ "Req: %i/%i · Dir: %i/%i | "
++ "Misses: %i · DiskHits: %i · MemHits: %i | "
++ "Fetching: %i · Fetched: %i · Error: %i"
)
(fromIntegral bytesTransf / 1024 / 1024 :: Double)
(fromIntegral mem / 1024 / 1024 :: Double)
(fst $ LBM.size requests)
(snd $ LBM.size requests)
(fst $ LBM.size entries)
(snd $ LBM.size entries)
misses
diskHits
memHits
fetching
fetched
cacheErr
| blitzcode/jacky | src/ImageCache.hs | mit | 16,797 | 97 | 28 | 6,077 | 3,625 | 1,900 | 1,725 | -1 | -1 |
Subsets and Splits