code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DeriveDataTypeable #-}
module ViperVM.VirtualPlatform.Object (
Object(..), ObjectPeer(..), initObject, directTransfer,
initSubObject,
lockObject, unlockObject,
lockObjectIO, unlockObjectIO
) where
import ViperVM.Platform.Memory
import ViperVM.Platform.Link
import ViperVM.VirtualPlatform.Objects.Vector
import ViperVM.VirtualPlatform.Objects.Matrix
import Control.Concurrent.STM
import Data.Typeable
import Control.Applicative ( (<$>) )
import Control.Monad (when)
data ObjectPeer =
VectorObject Vector -- ^ Vector object
| MatrixObject Matrix -- ^ Matrix object
deriving (Show,Eq,Ord)
-- | An object in a memory
data Object = Object {
objectPeer :: ObjectPeer,
objectMemory :: Memory,
locking :: TVar Bool
} deriving (Typeable)
instance Eq Object where
(==) a b = objectPeer a == objectPeer b
instance Ord Object where
compare a b = compare (objectPeer a) (objectPeer b)
instance Show Object where
show = show . objectPeer
initObject :: ObjectPeer -> Memory -> IO Object
initObject peer mem = Object peer mem <$> newTVarIO False
-- | Initialize an object with the lock mechanism shared with its parent
initSubObject :: (ObjectPeer -> ObjectPeer) -> Object -> Object
initSubObject f obj = Object peer mem lck
where
peer = f (objectPeer obj)
mem = objectMemory obj
lck = locking obj
-- | Lock an object in read-only mode
lockObject :: Object -> STM ()
lockObject obj = do
v <- readTVar (locking obj)
when v retry
writeTVar (locking obj) True
-- | Unlock an object
unlockObject :: Object -> STM ()
unlockObject obj = do
writeTVar (locking obj) False
lockObjectIO :: Object -> IO ()
lockObjectIO = atomically . lockObject
unlockObjectIO :: Object -> IO ()
unlockObjectIO = atomically . unlockObject
-- | Perform a transfer
directTransfer :: Link -> Object -> Object -> IO ()
directTransfer link src dst = do
let srcP = objectPeer src
dstP = objectPeer dst
atomically $ do
lockObject src
lockObject dst
case (srcP,dstP) of
(VectorObject v1, VectorObject v2) -> vectorTransfer link v1 v2
(MatrixObject m1, MatrixObject m2) -> matrixTransfer link m1 m2
_ -> error "Invalid transfer"
atomically $ do
unlockObject src
unlockObject dst
| hsyl20/HViperVM | lib/ViperVM/VirtualPlatform/Object.hs | lgpl-3.0 | 2,301 | 0 | 11 | 480 | 672 | 352 | 320 | 62 | 3 |
import Text.Printf
log2 k n x =
if n > x
then k-1
else log2 (k+1) (2*n) x
ans x =
let l2 = log2 0 1 x
in
if 2^(l2+1) - 1 == x
then l2+1
else l2
main = do
x <- fmap read getLine :: IO Int
let o = ans x
print o
| a143753/AOJ | 3027.hs | apache-2.0 | 243 | 0 | 12 | 91 | 153 | 76 | 77 | 14 | 2 |
{-
Copyright 2019 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-|
High-level functions for working with local account databases
-}
{-# LANGUAGE OverloadedStrings #-}
module CodeWorld.Account.Actions
( createAccount
, deleteAccount
, storeExists
, fetchAllAccounts
, incrementTokenId
, initStore
, updateAccount
, verifyPassword
, verifyTokenId
) where
import qualified CodeWorld.Account.Hashing as Hashing
import CodeWorld.Account.Types
import Control.Monad.Trans.State.Strict
( State
, execState
, modify
)
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as Text (intercalate, pack)
import Database.SQLite.Simple
( Connection
, NamedParam(..)
, Only(..)
, Query(..)
, SQLData(..)
, execute
, execute_
, executeNamed
, query_
, queryNamed
, withConnection
, withTransaction
)
import Database.SQLite.Simple.ToField (ToField(..))
import System.Directory (doesFileExist)
type QueryParams = [(Text, SQLData)]
type QueryParamsBuilder = State QueryParams ()
withStore :: Store -> (Connection -> IO a) -> IO a
withStore (Store dbPath) = withConnection dbPath
storeExists :: Store -> IO Bool
storeExists (Store dbPath) = doesFileExist dbPath
initStore :: Store -> IO ()
initStore store = withStore store $ \conn -> do
execute_ conn "DROP TABLE IF EXISTS accounts"
execute_ conn
"CREATE TABLE IF NOT EXISTS accounts ( \
\ id INTEGER PRIMARY KEY NOT NULL \
\ , userId TEXT NOT NULL UNIQUE \
\ , status TEXT NOT NULL \
\ , passwordHash TEXT NOT NULL \
\ , tokenId INTEGER NOT NULL \
\)"
createAccount :: Store -> UserId -> Status -> PasswordHash -> IO ()
createAccount store (UserId userIdRaw) status (PasswordHash passwordHashRaw) =
withStore store $ \conn ->
execute conn
"INSERT INTO accounts (userId, status, passwordHash, tokenId) VALUES (?, ?, ?, 0)"
(userIdRaw, status, passwordHashRaw)
updateAccount :: Store -> UserId -> Maybe Status -> Maybe PasswordHash -> IO ()
updateAccount store (UserId userIdRaw) mbStatus mbPasswordHash =
let params = buildParams $ do
addParam mbStatus "status" toField
addParam mbPasswordHash "passwordHash" (\(PasswordHash passwordHashRaw) -> SQLBlob passwordHashRaw)
(q, ps) = renderInsert "accounts" "userId" (SQLText $ Text.pack userIdRaw) params
in case ps of
[_] -> pure ()
_ -> withStore store $ \conn -> executeNamed conn q ps
deleteAccount :: Store -> UserId -> IO ()
deleteAccount store (UserId userIdRaw) =
withStore store $ \conn ->
executeNamed conn "DELETE FROM accounts WHERE userId = :userId" [ ":userId" := userIdRaw ]
fetchAllAccounts :: Store -> IO [(UserId, Status, TokenId)]
fetchAllAccounts store =
withStore store $ \conn -> do
result <- query_ conn "SELECT userId, status, tokenId FROM accounts"
return $ map (\(userIdRaw, status, tokenIdRaw) -> (UserId userIdRaw, status, TokenId tokenIdRaw)) result
verifyPassword :: Store -> UserId -> Password -> IO (Maybe Status)
verifyPassword store (UserId userIdRaw) password = do
mbResult <- fetch
case mbResult of
Nothing -> pure Nothing
Just (status, passwordHash) ->
if Hashing.validate passwordHash password
then pure $ Just status
else pure Nothing
where
fetch :: IO (Maybe (Status, PasswordHash))
fetch =
withStore store $ \conn -> do
result <- queryNamed conn
"SELECT status, passwordHash FROM accounts WHERE userId = :userId"
[ ":userId" := userIdRaw ]
case result of
[ (status, passwordHashRaw) ] -> return $ Just (status, PasswordHash passwordHashRaw)
[] -> return Nothing
_ -> error "Assertion failure"
incrementTokenId :: Store -> UserId -> IO (Maybe TokenId)
incrementTokenId store (UserId userIdRaw) = withStore store $ \conn -> do
result <- withTransaction conn $ do
let params = [ ":userId" := userIdRaw ]
executeNamed conn
"UPDATE accounts SET tokenId = tokenId + 1 WHERE userId = :userId"
params
queryNamed conn
"SELECT tokenId FROM accounts WHERE userId = :userId"
params
case result of
[ (Only tokenIdRaw) ] -> return $ Just (TokenId tokenIdRaw)
[] -> return Nothing
_ -> error "Assertion failure"
-- |Verifies that token ID is valid for active user
verifyTokenId ::
Store -- ^ Account database configuration
-> UserId -- ^ User ID
-> TokenId -- ^ Token ID
-> IO Bool -- ^ True if valid, False otherwise
verifyTokenId store (UserId userIdRaw) (TokenId tokenIdRaw) = withStore store $ \conn -> do
let params =
[ ":userId" := userIdRaw
, ":tokenId" := tokenIdRaw
]
result <- queryNamed conn
"SELECT COUNT(*) FROM accounts WHERE userId = :userId AND tokenId = :tokenId AND status = 'Active'"
params
case result of
[ (Only count) ] -> case (count :: Int) of
0 -> return False
1 -> return True
_ -> error "Assertion failure"
_ -> error "Assertion failure"
buildParams :: QueryParamsBuilder -> QueryParams
buildParams = (flip execState) []
addParam :: Maybe a -> Text -> (a -> SQLData) -> QueryParamsBuilder
addParam Nothing _ _ = pure ()
addParam (Just value) name f =
modify
(\items -> (name, f value) : items)
renderInsert :: Text -> Text -> SQLData -> QueryParams -> (Query, [NamedParam])
renderInsert tableName keyName keyValue params =
let assignments = map (\(name, _) -> name <> " = :" <> name) params
ps = (":" <> keyName := keyValue) : map (\(name, value) -> ":" <> name := value) params
in (Query $ "UPDATE "
<> tableName
<> " SET "
<> Text.intercalate ", " assignments
<> " WHERE " <> keyName <> " = :" <> keyName, ps)
| pranjaltale16/codeworld | codeworld-account/src/CodeWorld/Account/Actions.hs | apache-2.0 | 7,156 | 0 | 17 | 2,272 | 1,598 | 839 | 759 | 137 | 5 |
module Programs (
correctPrograms,
typeIncorrectPrograms,
incorrectPrograms
) where
correctPrograms :: [String]
correctPrograms = [
"program hello in Haskell",
"platform x64-windows",
"platform x86",
"interpreter hugs for Haskell in i686-windows",
"compiler uuagc from UUAG to Haskell in i686-windows",
"execute program hello in Haskell on interpreter hugs for Haskell in x86-windows end",
"execute interpreter hugs for Haskell in x86-windows on platform x86-windows end",
"execute program hello in Haskell on interpreter hugs for Haskell in i686-windows end",
"compile program hello in UUAG with compiler uuagc from UUAG to Haskell in i686-windows end",
"execute compile program hello in UUAG with compiler uuagc from UUAG to Haskell in i686-windows end on interpreter hugs for Haskell in i686-windows end",
"execute execute compile program hello in UUAG with compiler uuagc from UUAG to Haskell in i686-windows end on platform i686-windows end on interpreter hugs for Haskell in i686-windows end"
]
typeIncorrectPrograms :: [String]
typeIncorrectPrograms = [
"execute platform x86 on platform ARM end", -- 1
"execute platform x86 on platform x86 end", -- 1
"execute program hello in Haskell on program world in Haskell end", -- 2
"execute interpreter hugs for Haskell in x86-windows on compiler hugs from Haskell to x86-windows in x86-windows end", -- 2
"execute compile program hello in UUAG with compiler uuagc from UUAG to Haskell in i686-windows end on interpreter x64-on-arm for x64 in arm end", -- 3
"compile platform x64 with compiler x64-to-x86 from x64 to x86 in x64 end", -- 4
"compile program hello in x64 with platform x64 end", -- 5
"compile program hello in Haskell with compiler uuagc from UUAG to Haskell in i686-windows end" -- 6
]
incorrectPrograms :: [String]
incorrectPrograms = [
"program hello",
"platform"
]
| aochagavia/CompilerConstruction | tdiagrams/test/Programs.hs | apache-2.0 | 1,948 | 0 | 5 | 393 | 125 | 84 | 41 | 31 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QHttp.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:31
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Network.QHttp (
QqHttp(..)
,clearPendingRequests
,closeConnection
,currentDestinationDevice
,QcurrentRequest(..), QcurrentRequest_nf(..)
,currentSourceDevice
,hasPendingRequests
,qhead
,QlastResponse(..), QlastResponse_nf(..)
,Qpost(..)
,Qrequest(..)
,setSocket
,qHttp_delete
,qHttp_deleteLater
)
where
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Enums.Network.QHttp
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Network
import Qtc.ClassTypes.Network
instance QuserMethod (QHttp ()) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QHttp_userMethod cobj_qobj (toCInt evid)
foreign import ccall "qtc_QHttp_userMethod" qtc_QHttp_userMethod :: Ptr (TQHttp a) -> CInt -> IO ()
instance QuserMethod (QHttpSc a) (()) (IO ()) where
userMethod qobj evid ()
= withObjectPtr qobj $ \cobj_qobj ->
qtc_QHttp_userMethod cobj_qobj (toCInt evid)
instance QuserMethod (QHttp ()) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QHttp_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
foreign import ccall "qtc_QHttp_userMethodVariant" qtc_QHttp_userMethodVariant :: Ptr (TQHttp a) -> CInt -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
instance QuserMethod (QHttpSc a) (QVariant ()) (IO (QVariant ())) where
userMethod qobj evid qvoj
= withObjectRefResult $
withObjectPtr qobj $ \cobj_qobj ->
withObjectPtr qvoj $ \cobj_qvoj ->
qtc_QHttp_userMethodVariant cobj_qobj (toCInt evid) cobj_qvoj
class QqHttp x1 where
qHttp :: x1 -> IO (QHttp ())
instance QqHttp (()) where
qHttp ()
= withQHttpResult $
qtc_QHttp
foreign import ccall "qtc_QHttp" qtc_QHttp :: IO (Ptr (TQHttp ()))
instance QqHttp ((String)) where
qHttp (x1)
= withQHttpResult $
withCWString x1 $ \cstr_x1 ->
qtc_QHttp1 cstr_x1
foreign import ccall "qtc_QHttp1" qtc_QHttp1 :: CWString -> IO (Ptr (TQHttp ()))
instance QqHttp ((QObject t1)) where
qHttp (x1)
= withQHttpResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QHttp2 cobj_x1
foreign import ccall "qtc_QHttp2" qtc_QHttp2 :: Ptr (TQObject t1) -> IO (Ptr (TQHttp ()))
instance QqHttp ((String, Int)) where
qHttp (x1, x2)
= withQHttpResult $
withCWString x1 $ \cstr_x1 ->
qtc_QHttp3 cstr_x1 (toCUShort x2)
foreign import ccall "qtc_QHttp3" qtc_QHttp3 :: CWString -> CUShort -> IO (Ptr (TQHttp ()))
instance QqHttp ((String, ConnectionMode)) where
qHttp (x1, x2)
= withQHttpResult $
withCWString x1 $ \cstr_x1 ->
qtc_QHttp4 cstr_x1 (toCLong $ qEnum_toInt x2)
foreign import ccall "qtc_QHttp4" qtc_QHttp4 :: CWString -> CLong -> IO (Ptr (TQHttp ()))
instance QqHttp ((String, ConnectionMode, Int)) where
qHttp (x1, x2, x3)
= withQHttpResult $
withCWString x1 $ \cstr_x1 ->
qtc_QHttp5 cstr_x1 (toCLong $ qEnum_toInt x2) (toCUShort x3)
foreign import ccall "qtc_QHttp5" qtc_QHttp5 :: CWString -> CLong -> CUShort -> IO (Ptr (TQHttp ()))
instance QqHttp ((String, Int, QObject t3)) where
qHttp (x1, x2, x3)
= withQHttpResult $
withCWString x1 $ \cstr_x1 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QHttp6 cstr_x1 (toCUShort x2) cobj_x3
foreign import ccall "qtc_QHttp6" qtc_QHttp6 :: CWString -> CUShort -> Ptr (TQObject t3) -> IO (Ptr (TQHttp ()))
instance QqHttp ((String, ConnectionMode, Int, QObject t4)) where
qHttp (x1, x2, x3, x4)
= withQHttpResult $
withCWString x1 $ \cstr_x1 ->
withObjectPtr x4 $ \cobj_x4 ->
qtc_QHttp7 cstr_x1 (toCLong $ qEnum_toInt x2) (toCUShort x3) cobj_x4
foreign import ccall "qtc_QHttp7" qtc_QHttp7 :: CWString -> CLong -> CUShort -> Ptr (TQObject t4) -> IO (Ptr (TQHttp ()))
instance Qabort (QHttp a) (()) (IO ()) where
abort x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_abort cobj_x0
foreign import ccall "qtc_QHttp_abort" qtc_QHttp_abort :: Ptr (TQHttp a) -> IO ()
instance QbytesAvailable (QHttp a) (()) where
bytesAvailable x0 ()
= withLongLongResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_bytesAvailable cobj_x0
foreign import ccall "qtc_QHttp_bytesAvailable" qtc_QHttp_bytesAvailable :: Ptr (TQHttp a) -> IO CLLong
clearPendingRequests :: QHttp a -> (()) -> IO ()
clearPendingRequests x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_clearPendingRequests cobj_x0
foreign import ccall "qtc_QHttp_clearPendingRequests" qtc_QHttp_clearPendingRequests :: Ptr (TQHttp a) -> IO ()
instance Qclose (QHttp a) (()) (IO (Int)) where
close x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_close cobj_x0
foreign import ccall "qtc_QHttp_close" qtc_QHttp_close :: Ptr (TQHttp a) -> IO CInt
closeConnection :: QHttp a -> (()) -> IO (Int)
closeConnection x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_closeConnection cobj_x0
foreign import ccall "qtc_QHttp_closeConnection" qtc_QHttp_closeConnection :: Ptr (TQHttp a) -> IO CInt
currentDestinationDevice :: QHttp a -> (()) -> IO (QIODevice ())
currentDestinationDevice x0 ()
= withQIODeviceResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_currentDestinationDevice cobj_x0
foreign import ccall "qtc_QHttp_currentDestinationDevice" qtc_QHttp_currentDestinationDevice :: Ptr (TQHttp a) -> IO (Ptr (TQIODevice ()))
instance QcurrentId (QHttp a) (()) where
currentId x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_currentId cobj_x0
foreign import ccall "qtc_QHttp_currentId" qtc_QHttp_currentId :: Ptr (TQHttp a) -> IO CInt
class QcurrentRequest x0 x1 where
currentRequest :: x0 -> x1 -> IO (QHttpRequestHeader ())
class QcurrentRequest_nf x0 x1 where
currentRequest_nf :: x0 -> x1 -> IO (QHttpRequestHeader ())
instance QcurrentRequest (QHttp ()) (()) where
currentRequest x0 ()
= withQHttpRequestHeaderResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_currentRequest cobj_x0
foreign import ccall "qtc_QHttp_currentRequest" qtc_QHttp_currentRequest :: Ptr (TQHttp a) -> IO (Ptr (TQHttpRequestHeader ()))
instance QcurrentRequest (QHttpSc a) (()) where
currentRequest x0 ()
= withQHttpRequestHeaderResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_currentRequest cobj_x0
instance QcurrentRequest_nf (QHttp ()) (()) where
currentRequest_nf x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_currentRequest cobj_x0
instance QcurrentRequest_nf (QHttpSc a) (()) where
currentRequest_nf x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_currentRequest cobj_x0
currentSourceDevice :: QHttp a -> (()) -> IO (QIODevice ())
currentSourceDevice x0 ()
= withQIODeviceResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_currentSourceDevice cobj_x0
foreign import ccall "qtc_QHttp_currentSourceDevice" qtc_QHttp_currentSourceDevice :: Ptr (TQHttp a) -> IO (Ptr (TQIODevice ()))
instance Qqerror (QHttp a) (()) (IO (QHttpError)) where
qerror x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_error cobj_x0
foreign import ccall "qtc_QHttp_error" qtc_QHttp_error :: Ptr (TQHttp a) -> IO CLong
instance QerrorString (QHttp a) (()) where
errorString x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_errorString cobj_x0
foreign import ccall "qtc_QHttp_errorString" qtc_QHttp_errorString :: Ptr (TQHttp a) -> IO (Ptr (TQString ()))
instance Qqget (QHttp a) ((String)) where
qget x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QHttp_get cobj_x0 cstr_x1
foreign import ccall "qtc_QHttp_get" qtc_QHttp_get :: Ptr (TQHttp a) -> CWString -> IO CInt
instance Qqget (QHttp a) ((String, QIODevice t2)) where
qget x0 (x1, x2)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QHttp_get1 cobj_x0 cstr_x1 cobj_x2
foreign import ccall "qtc_QHttp_get1" qtc_QHttp_get1 :: Ptr (TQHttp a) -> CWString -> Ptr (TQIODevice t2) -> IO CInt
hasPendingRequests :: QHttp a -> (()) -> IO (Bool)
hasPendingRequests x0 ()
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_hasPendingRequests cobj_x0
foreign import ccall "qtc_QHttp_hasPendingRequests" qtc_QHttp_hasPendingRequests :: Ptr (TQHttp a) -> IO CBool
qhead :: QHttp a -> ((String)) -> IO (Int)
qhead x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QHttp_head cobj_x0 cstr_x1
foreign import ccall "qtc_QHttp_head" qtc_QHttp_head :: Ptr (TQHttp a) -> CWString -> IO CInt
class QlastResponse x0 x1 where
lastResponse :: x0 -> x1 -> IO (QHttpResponseHeader ())
class QlastResponse_nf x0 x1 where
lastResponse_nf :: x0 -> x1 -> IO (QHttpResponseHeader ())
instance QlastResponse (QHttp ()) (()) where
lastResponse x0 ()
= withQHttpResponseHeaderResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_lastResponse cobj_x0
foreign import ccall "qtc_QHttp_lastResponse" qtc_QHttp_lastResponse :: Ptr (TQHttp a) -> IO (Ptr (TQHttpResponseHeader ()))
instance QlastResponse (QHttpSc a) (()) where
lastResponse x0 ()
= withQHttpResponseHeaderResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_lastResponse cobj_x0
instance QlastResponse_nf (QHttp ()) (()) where
lastResponse_nf x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_lastResponse cobj_x0
instance QlastResponse_nf (QHttpSc a) (()) where
lastResponse_nf x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_lastResponse cobj_x0
class Qpost x1 where
post :: QHttp a -> x1 -> IO (Int)
instance Qpost ((String, QIODevice t2)) where
post x0 (x1, x2)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QHttp_post1 cobj_x0 cstr_x1 cobj_x2
foreign import ccall "qtc_QHttp_post1" qtc_QHttp_post1 :: Ptr (TQHttp a) -> CWString -> Ptr (TQIODevice t2) -> IO CInt
instance Qpost ((String, QIODevice t2, QIODevice t3)) where
post x0 (x1, x2, x3)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QHttp_post3 cobj_x0 cstr_x1 cobj_x2 cobj_x3
foreign import ccall "qtc_QHttp_post3" qtc_QHttp_post3 :: Ptr (TQHttp a) -> CWString -> Ptr (TQIODevice t2) -> Ptr (TQIODevice t3) -> IO CInt
instance Qpost ((String, String)) where
post x0 (x1, x2)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withCWString x2 $ \cstr_x2 ->
qtc_QHttp_post cobj_x0 cstr_x1 cstr_x2
foreign import ccall "qtc_QHttp_post" qtc_QHttp_post :: Ptr (TQHttp a) -> CWString -> CWString -> IO CInt
instance Qpost ((String, String, QIODevice t3)) where
post x0 (x1, x2, x3)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withCWString x2 $ \cstr_x2 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QHttp_post2 cobj_x0 cstr_x1 cstr_x2 cobj_x3
foreign import ccall "qtc_QHttp_post2" qtc_QHttp_post2 :: Ptr (TQHttp a) -> CWString -> CWString -> Ptr (TQIODevice t3) -> IO CInt
instance QreadAll (QHttp a) (()) where
readAll x0 ()
= withStringResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_readAll cobj_x0
foreign import ccall "qtc_QHttp_readAll" qtc_QHttp_readAll :: Ptr (TQHttp a) -> IO (Ptr (TQString ()))
class Qrequest x1 where
request :: QHttp a -> x1 -> IO (Int)
instance Qrequest ((QHttpRequestHeader t1)) where
request x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QHttp_request cobj_x0 cobj_x1
foreign import ccall "qtc_QHttp_request" qtc_QHttp_request :: Ptr (TQHttp a) -> Ptr (TQHttpRequestHeader t1) -> IO CInt
instance Qrequest ((QHttpRequestHeader t1, QIODevice t2)) where
request x0 (x1, x2)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QHttp_request2 cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QHttp_request2" qtc_QHttp_request2 :: Ptr (TQHttp a) -> Ptr (TQHttpRequestHeader t1) -> Ptr (TQIODevice t2) -> IO CInt
instance Qrequest ((QHttpRequestHeader t1, QIODevice t2, QIODevice t3)) where
request x0 (x1, x2, x3)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QHttp_request4 cobj_x0 cobj_x1 cobj_x2 cobj_x3
foreign import ccall "qtc_QHttp_request4" qtc_QHttp_request4 :: Ptr (TQHttp a) -> Ptr (TQHttpRequestHeader t1) -> Ptr (TQIODevice t2) -> Ptr (TQIODevice t3) -> IO CInt
instance Qrequest ((QHttpRequestHeader t1, String)) where
request x0 (x1, x2)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withCWString x2 $ \cstr_x2 ->
qtc_QHttp_request1 cobj_x0 cobj_x1 cstr_x2
foreign import ccall "qtc_QHttp_request1" qtc_QHttp_request1 :: Ptr (TQHttp a) -> Ptr (TQHttpRequestHeader t1) -> CWString -> IO CInt
instance Qrequest ((QHttpRequestHeader t1, String, QIODevice t3)) where
request x0 (x1, x2, x3)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withCWString x2 $ \cstr_x2 ->
withObjectPtr x3 $ \cobj_x3 ->
qtc_QHttp_request3 cobj_x0 cobj_x1 cstr_x2 cobj_x3
foreign import ccall "qtc_QHttp_request3" qtc_QHttp_request3 :: Ptr (TQHttp a) -> Ptr (TQHttpRequestHeader t1) -> CWString -> Ptr (TQIODevice t3) -> IO CInt
instance QsetHost (QHttp a) ((String)) (IO (Int)) where
setHost x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QHttp_setHost cobj_x0 cstr_x1
foreign import ccall "qtc_QHttp_setHost" qtc_QHttp_setHost :: Ptr (TQHttp a) -> CWString -> IO CInt
instance QsetHost (QHttp a) ((String, ConnectionMode)) (IO (Int)) where
setHost x0 (x1, x2)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QHttp_setHost2 cobj_x0 cstr_x1 (toCLong $ qEnum_toInt x2)
foreign import ccall "qtc_QHttp_setHost2" qtc_QHttp_setHost2 :: Ptr (TQHttp a) -> CWString -> CLong -> IO CInt
instance QsetHost (QHttp a) ((String, ConnectionMode, Int)) (IO (Int)) where
setHost x0 (x1, x2, x3)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QHttp_setHost3 cobj_x0 cstr_x1 (toCLong $ qEnum_toInt x2) (toCUShort x3)
foreign import ccall "qtc_QHttp_setHost3" qtc_QHttp_setHost3 :: Ptr (TQHttp a) -> CWString -> CLong -> CUShort -> IO CInt
instance QsetHost (QHttp a) ((String, Int)) (IO (Int)) where
setHost x0 (x1, x2)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QHttp_setHost1 cobj_x0 cstr_x1 (toCUShort x2)
foreign import ccall "qtc_QHttp_setHost1" qtc_QHttp_setHost1 :: Ptr (TQHttp a) -> CWString -> CUShort -> IO CInt
instance QsetProxy (QHttp a) ((QNetworkProxy t1)) (IO (Int)) where
setProxy x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QHttp_setProxy cobj_x0 cobj_x1
foreign import ccall "qtc_QHttp_setProxy" qtc_QHttp_setProxy :: Ptr (TQHttp a) -> Ptr (TQNetworkProxy t1) -> IO CInt
instance QsetProxy (QHttp a) ((String, Int)) (IO (Int)) where
setProxy x0 (x1, x2)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QHttp_setProxy1 cobj_x0 cstr_x1 (toCInt x2)
foreign import ccall "qtc_QHttp_setProxy1" qtc_QHttp_setProxy1 :: Ptr (TQHttp a) -> CWString -> CInt -> IO CInt
instance QsetProxy (QHttp a) ((String, Int, String)) (IO (Int)) where
setProxy x0 (x1, x2, x3)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withCWString x3 $ \cstr_x3 ->
qtc_QHttp_setProxy2 cobj_x0 cstr_x1 (toCInt x2) cstr_x3
foreign import ccall "qtc_QHttp_setProxy2" qtc_QHttp_setProxy2 :: Ptr (TQHttp a) -> CWString -> CInt -> CWString -> IO CInt
instance QsetProxy (QHttp a) ((String, Int, String, String)) (IO (Int)) where
setProxy x0 (x1, x2, x3, x4)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withCWString x3 $ \cstr_x3 ->
withCWString x4 $ \cstr_x4 ->
qtc_QHttp_setProxy3 cobj_x0 cstr_x1 (toCInt x2) cstr_x3 cstr_x4
foreign import ccall "qtc_QHttp_setProxy3" qtc_QHttp_setProxy3 :: Ptr (TQHttp a) -> CWString -> CInt -> CWString -> CWString -> IO CInt
setSocket :: QHttp a -> ((QTcpSocket t1)) -> IO (Int)
setSocket x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QHttp_setSocket cobj_x0 cobj_x1
foreign import ccall "qtc_QHttp_setSocket" qtc_QHttp_setSocket :: Ptr (TQHttp a) -> Ptr (TQTcpSocket t1) -> IO CInt
instance QsetUser (QHttp a) ((String)) (IO (Int)) where
setUser x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QHttp_setUser cobj_x0 cstr_x1
foreign import ccall "qtc_QHttp_setUser" qtc_QHttp_setUser :: Ptr (TQHttp a) -> CWString -> IO CInt
instance QsetUser (QHttp a) ((String, String)) (IO (Int)) where
setUser x0 (x1, x2)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
withCWString x2 $ \cstr_x2 ->
qtc_QHttp_setUser1 cobj_x0 cstr_x1 cstr_x2
foreign import ccall "qtc_QHttp_setUser1" qtc_QHttp_setUser1 :: Ptr (TQHttp a) -> CWString -> CWString -> IO CInt
instance Qstate (QHttp a) (()) (IO (QHttpState)) where
state x0 ()
= withQEnumResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_state cobj_x0
foreign import ccall "qtc_QHttp_state" qtc_QHttp_state :: Ptr (TQHttp a) -> IO CLong
qHttp_delete :: QHttp a -> IO ()
qHttp_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_delete cobj_x0
foreign import ccall "qtc_QHttp_delete" qtc_QHttp_delete :: Ptr (TQHttp a) -> IO ()
qHttp_deleteLater :: QHttp a -> IO ()
qHttp_deleteLater x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_deleteLater cobj_x0
foreign import ccall "qtc_QHttp_deleteLater" qtc_QHttp_deleteLater :: Ptr (TQHttp a) -> IO ()
instance QchildEvent (QHttp ()) ((QChildEvent t1)) where
childEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QHttp_childEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QHttp_childEvent" qtc_QHttp_childEvent :: Ptr (TQHttp a) -> Ptr (TQChildEvent t1) -> IO ()
instance QchildEvent (QHttpSc a) ((QChildEvent t1)) where
childEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QHttp_childEvent cobj_x0 cobj_x1
instance QconnectNotify (QHttp ()) ((String)) where
connectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QHttp_connectNotify cobj_x0 cstr_x1
foreign import ccall "qtc_QHttp_connectNotify" qtc_QHttp_connectNotify :: Ptr (TQHttp a) -> CWString -> IO ()
instance QconnectNotify (QHttpSc a) ((String)) where
connectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QHttp_connectNotify cobj_x0 cstr_x1
instance QcustomEvent (QHttp ()) ((QEvent t1)) where
customEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QHttp_customEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QHttp_customEvent" qtc_QHttp_customEvent :: Ptr (TQHttp a) -> Ptr (TQEvent t1) -> IO ()
instance QcustomEvent (QHttpSc a) ((QEvent t1)) where
customEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QHttp_customEvent cobj_x0 cobj_x1
instance QdisconnectNotify (QHttp ()) ((String)) where
disconnectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QHttp_disconnectNotify cobj_x0 cstr_x1
foreign import ccall "qtc_QHttp_disconnectNotify" qtc_QHttp_disconnectNotify :: Ptr (TQHttp a) -> CWString -> IO ()
instance QdisconnectNotify (QHttpSc a) ((String)) where
disconnectNotify x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QHttp_disconnectNotify cobj_x0 cstr_x1
instance Qevent (QHttp ()) ((QEvent t1)) where
event x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QHttp_event_h cobj_x0 cobj_x1
foreign import ccall "qtc_QHttp_event_h" qtc_QHttp_event_h :: Ptr (TQHttp a) -> Ptr (TQEvent t1) -> IO CBool
instance Qevent (QHttpSc a) ((QEvent t1)) where
event x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QHttp_event_h cobj_x0 cobj_x1
instance QeventFilter (QHttp ()) ((QObject t1, QEvent t2)) where
eventFilter x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QHttp_eventFilter_h cobj_x0 cobj_x1 cobj_x2
foreign import ccall "qtc_QHttp_eventFilter_h" qtc_QHttp_eventFilter_h :: Ptr (TQHttp a) -> Ptr (TQObject t1) -> Ptr (TQEvent t2) -> IO CBool
instance QeventFilter (QHttpSc a) ((QObject t1, QEvent t2)) where
eventFilter x0 (x1, x2)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
withObjectPtr x2 $ \cobj_x2 ->
qtc_QHttp_eventFilter_h cobj_x0 cobj_x1 cobj_x2
instance Qreceivers (QHttp ()) ((String)) where
receivers x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QHttp_receivers cobj_x0 cstr_x1
foreign import ccall "qtc_QHttp_receivers" qtc_QHttp_receivers :: Ptr (TQHttp a) -> CWString -> IO CInt
instance Qreceivers (QHttpSc a) ((String)) where
receivers x0 (x1)
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
withCWString x1 $ \cstr_x1 ->
qtc_QHttp_receivers cobj_x0 cstr_x1
instance Qsender (QHttp ()) (()) where
sender x0 ()
= withQObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_sender cobj_x0
foreign import ccall "qtc_QHttp_sender" qtc_QHttp_sender :: Ptr (TQHttp a) -> IO (Ptr (TQObject ()))
instance Qsender (QHttpSc a) (()) where
sender x0 ()
= withQObjectResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QHttp_sender cobj_x0
instance QtimerEvent (QHttp ()) ((QTimerEvent t1)) where
timerEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QHttp_timerEvent cobj_x0 cobj_x1
foreign import ccall "qtc_QHttp_timerEvent" qtc_QHttp_timerEvent :: Ptr (TQHttp a) -> Ptr (TQTimerEvent t1) -> IO ()
instance QtimerEvent (QHttpSc a) ((QTimerEvent t1)) where
timerEvent x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QHttp_timerEvent cobj_x0 cobj_x1
| uduki/hsQt | Qtc/Network/QHttp.hs | bsd-2-clause | 23,271 | 0 | 16 | 4,260 | 8,137 | 4,153 | 3,984 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -Wall -fno-warn-orphans #-}
----------------------------------------------------------------------
{- |
Module : Data.ZoomCache.PCM.IEEE754
Copyright : Conrad Parker
License : BSD3-style (see LICENSE)
Maintainer : Conrad Parker <[email protected]>
Stability : unstable
Portability : unknown
Default codec implementation for PCM Audio of type Float and Double.
This module implements the interfaces documented in "Data.ZoomCache.Codec".
The table below describes the encoding of SummaryData for PCM.Float.
@
| ... | -35
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Min (float) | 36-39
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Max (float) | 40-43
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Mean [DC Bias] (float) | 44-47
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| RMS (float) | 48-51
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
@
The table below describes the encoding of SummaryData for PCM.Double.
@
| ... | -35
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Min (double) | 36-39
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| | 40-43
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Max (double) | 44-47
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| | 48-51
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Mean [DC Bias] (double) | 52-55
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| | 56-59
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| RMS (double) | 60-63
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| | 64-67
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
@
Field encoding formats:
@float@: big-endian IEEE 754-2008 binary32 (IEEE 754-1985 single)
@double@: big-endian IEEE 754-2008 binary64 (IEEE 754-1985 double)
-}
----------------------------------------------------------------------
module Data.ZoomCache.PCM.IEEE754 (
SummaryData(..)
, SummaryWork(..)
)where
import Blaze.ByteString.Builder
import Control.Applicative ((<$>))
import Data.ByteString (ByteString)
import Data.Iteratee (Iteratee)
import Text.Printf
import Data.ZoomCache.Codec
import Data.ZoomCache.PCM.Internal
import Data.ZoomCache.PCM.Types
----------------------------------------------------------------------
-- Float
instance ZoomReadable (PCM Float) where
data SummaryData (PCM Float) = SummaryPCMFloat
{ summaryPCMFloatMin :: {-# UNPACK #-}!Float
, summaryPCMFloatMax :: {-# UNPACK #-}!Float
, summaryPCMFloatAvg :: {-# UNPACK #-}!Double
, summaryPCMFloatRMS :: {-# UNPACK #-}!Double
}
trackIdentifier = const "ZPCMf32b"
readRaw = PCM <$> readFloat32be
readSummary = readSummaryPCM
prettyRaw = prettyPacketPCMFloat
prettySummaryData = prettySummaryPCMFloat
deltaDecodeRaw = deltaDecodePCM
#if __GLASGOW_HASKELL__ >= 702
{-# SPECIALIZE readSummaryPCM :: (Functor m, Monad m) => Iteratee ByteString m (SummaryData (PCM Float)) #-}
#endif
instance ZoomWrite (PCM Float) where
write = writeData
instance ZoomWrite (SampleOffset, PCM Float) where
write = writeDataVBR
instance ZoomWritable (PCM Float) where
data SummaryWork (PCM Float) = SummaryWorkPCMFloat
{ swPCMFloatTime :: {-# UNPACK #-}!SampleOffset
, swPCMFloatLast :: {-# UNPACK #-}!Float
, swPCMFloatMin :: {-# UNPACK #-}!Float
, swPCMFloatMax :: {-# UNPACK #-}!Float
, swPCMFloatSum :: {-# UNPACK #-}!Double
, swPCMFloatSumSq :: {-# UNPACK #-}!Double
}
fromRaw = pcmFromRaw . unPCM
fromSummaryData = fromSummaryPCM
initSummaryWork = initSummaryPCMFloat
toSummaryData = mkSummaryPCM
updateSummaryData = updateSummaryPCM
appendSummaryData = appendSummaryPCM
deltaEncodeRaw = deltaEncodePCM
instance ZoomPCM Float where
pcmFromRaw = fromFloat
pcmMin = summaryPCMFloatMin
pcmMax = summaryPCMFloatMax
pcmAvg = summaryPCMFloatAvg
pcmRMS = summaryPCMFloatRMS
pcmWorkSO = swPCMFloatTime
pcmWorkLast = swPCMFloatLast
pcmWorkMin = swPCMFloatMin
pcmWorkMax = swPCMFloatMax
pcmWorkSum = swPCMFloatSum
pcmWorkSumSq = swPCMFloatSumSq
pcmMkSummary = SummaryPCMFloat
pcmMkSummaryWork = SummaryWorkPCMFloat
#if __GLASGOW_HASKELL__ >= 702
{-# SPECIALIZE fromSummaryPCM :: SummaryData (PCM Float) -> Builder #-}
{-# SPECIALIZE mkSummaryPCM :: SampleOffsetDiff -> SummaryWork (PCM Float) -> SummaryData (PCM Float) #-}
{-# SPECIALIZE appendSummaryPCM :: SampleOffsetDiff -> SummaryData (PCM Float) -> SampleOffsetDiff -> SummaryData (PCM Float) -> SummaryData (PCM Float) #-}
{-# SPECIALIZE updateSummaryPCM :: SampleOffset -> PCM Float -> SummaryWork (PCM Float) -> SummaryWork (PCM Float) #-}
#endif
----------------------------------------------------------------------
-- Double
instance ZoomReadable (PCM Double) where
data SummaryData (PCM Double) = SummaryPCMDouble
{ summaryPCMDoubleMin :: {-# UNPACK #-}!Double
, summaryPCMDoubleMax :: {-# UNPACK #-}!Double
, summaryPCMDoubleAvg :: {-# UNPACK #-}!Double
, summaryPCMDoubleRMS :: {-# UNPACK #-}!Double
}
trackIdentifier = const "ZPCMf64b"
readRaw = PCM <$> readDouble64be
readSummary = readSummaryPCM
prettyRaw = prettyPacketPCMFloat
prettySummaryData = prettySummaryPCMFloat
deltaDecodeRaw = deltaDecodePCM
#if __GLASGOW_HASKELL__ >= 702
{-# SPECIALIZE readSummaryPCM :: (Functor m, Monad m) => Iteratee ByteString m (SummaryData (PCM Double)) #-}
#endif
instance ZoomWrite (PCM Double) where
write = writeData
instance ZoomWrite (SampleOffset, PCM Double) where
write = writeDataVBR
instance ZoomWritable (PCM Double) where
data SummaryWork (PCM Double) = SummaryWorkPCMDouble
{ swPCMDoubleTime :: {-# UNPACK #-}!SampleOffset
, swPCMDoubleLast :: {-# UNPACK #-}!Double
, swPCMDoubleMin :: {-# UNPACK #-}!Double
, swPCMDoubleMax :: {-# UNPACK #-}!Double
, swPCMDoubleSum :: {-# UNPACK #-}!Double
, swPCMDoubleSumSq :: {-# UNPACK #-}!Double
}
fromRaw = pcmFromRaw . unPCM
fromSummaryData = fromSummaryPCM
initSummaryWork = initSummaryPCMFloat
toSummaryData = mkSummaryPCM
updateSummaryData = updateSummaryPCM
appendSummaryData = appendSummaryPCM
deltaEncodeRaw = deltaEncodePCM
instance ZoomPCM Double where
pcmFromRaw = fromDouble
pcmMin = summaryPCMDoubleMin
pcmMax = summaryPCMDoubleMax
pcmAvg = summaryPCMDoubleAvg
pcmRMS = summaryPCMDoubleRMS
pcmWorkSO = swPCMDoubleTime
pcmWorkLast = swPCMDoubleLast
pcmWorkMin = swPCMDoubleMin
pcmWorkMax = swPCMDoubleMax
pcmWorkSum = swPCMDoubleSum
pcmWorkSumSq = swPCMDoubleSumSq
pcmMkSummary = SummaryPCMDouble
pcmMkSummaryWork = SummaryWorkPCMDouble
#if __GLASGOW_HASKELL__ >= 702
{-# SPECIALIZE fromSummaryPCM :: SummaryData (PCM Double) -> Builder #-}
{-# SPECIALIZE mkSummaryPCM :: SampleOffsetDiff -> SummaryWork (PCM Double) -> SummaryData (PCM Double) #-}
{-# SPECIALIZE appendSummaryPCM :: SampleOffsetDiff -> SummaryData (PCM Double) -> SampleOffsetDiff -> SummaryData (PCM Double) -> SummaryData (PCM Double) #-}
{-# SPECIALIZE updateSummaryPCM :: SampleOffset -> PCM Double -> SummaryWork (PCM Double) -> SummaryWork (PCM Double) #-}
#endif
----------------------------------------------------------------------
-- Helpers for float and double
prettyPacketPCMFloat :: PrintfArg a => PCM a -> String
prettyPacketPCMFloat = printf "%.3f" . unPCM
prettySummaryPCMFloat :: (PrintfArg a, ZoomPCM a)
=> SummaryData (PCM a) -> String
prettySummaryPCMFloat s = concat
[ printf "\tmin: %.3f\tmax: %.3f\t" (pcmMin s) (pcmMax s)
, printf "avg: %.3f\trms: %.3f" (pcmAvg s) (pcmRMS s)
]
initSummaryPCMFloat :: (RealFloat a, ZoomPCM a)
=> SampleOffset -> SummaryWork (PCM a)
initSummaryPCMFloat entry = pcmMkSummaryWork
entry
0.0
floatMax
(negate floatMax)
0.0
0.0
| kfish/zoom-cache-pcm | Data/ZoomCache/PCM/IEEE754.hs | bsd-2-clause | 9,367 | 0 | 10 | 2,398 | 973 | 564 | 409 | 125 | 1 |
-- Read-Eval-Print loop for LambdaQuest.Finter
module Main where
import LambdaQuest.Finter
import LambdaQuest.Finter.Parse
import LambdaQuest.Finter.PrettyPrint
import LambdaQuest.Finter.Type
import LambdaQuest.Finter.TypeCheck (typeOf)
import LambdaQuest.Finter.Eval (termShift,termTypeSubst,eval1,ValueBinding(..))
import Control.Monad (when)
import System.IO
import Text.Parsec
import System.Console.Haskeline -- from `haskeline' package
data ReplCommand = ReplEval Term
| ReplTermDef String Term
| ReplTypeDef String Type
replCommand :: [NameBinding] -> Parser ReplCommand
replCommand ctx = termDef <|> typeDef <|> termEval <?> "REPL Command"
where
termEval = do
whiteSpace
t <- term ctx
eof
return (ReplEval t)
termDef = try $ do
reserved "let"
name <- identifier
reservedOp "="
t <- term ctx
eof
return (ReplTermDef name t)
typeDef = do
reserved "type"
name <- identifier
reservedOp "="
t <- typeExpr ctx
eof
return (ReplTypeDef name t)
data REPLBinding = Let String Term CCanonicalType
| TypeDef String CCanonicalType
toNameBinding :: REPLBinding -> NameBinding
toNameBinding (Let name _ _) = NVarBind name
toNameBinding (TypeDef name _) = NTyVarBind name
toBinding :: REPLBinding -> Binding
toBinding (Let name _ ty) = VarBind name ty
toBinding (TypeDef name ty) = TyVarBind name []
toValueBinding :: REPLBinding -> ValueBinding
toValueBinding (Let _ v _) = ValueBind v
toValueBinding (TypeDef _ _) = TypeBind
resolveTypeAliasInTerm :: Type -> Int -> Term -> Term
resolveTypeAliasInTerm ty i = termShift 1 i . termTypeSubst ty i
resolveTypeAliasesInTerm :: [REPLBinding] -> Int -> Term -> Term
resolveTypeAliasesInTerm [] _ = id
resolveTypeAliasesInTerm (Let name m ty : xs) i = resolveTypeAliasesInTerm xs (i + 1)
resolveTypeAliasesInTerm (TypeDef name ty : xs) i = resolveTypeAliasesInTerm xs (i + 1) . resolveTypeAliasInTerm (canonicalToOrdinary ty) i
resolveTypeAliasInType :: Type -> Int -> Type -> Type
resolveTypeAliasInType ty i = typeShift 1 i . typeSubst ty i
resolveTypeAliasesInType :: [REPLBinding] -> Int -> Type -> Type
resolveTypeAliasesInType [] _ = id
resolveTypeAliasesInType (Let name m ty : xs) i = resolveTypeAliasesInType xs (i + 1)
resolveTypeAliasesInType (TypeDef name ty : xs) i = resolveTypeAliasesInType xs (i + 1) . resolveTypeAliasInType (canonicalToOrdinary ty) i
repl :: [REPLBinding] -> InputT IO ()
repl ctx = do
mline <- getInputLine "> "
case mline of
Nothing -> outputStrLn "Bye!" -- EOF / Ctrl-D
Just line -> do
case parse (replCommand (map toNameBinding ctx)) "<stdin>" line of
Left error -> do
outputStrLn $ show error -- parse error
repl ctx
Right (ReplEval tm) -> let tm' = resolveTypeAliasesInTerm ctx 0 tm
in case typeOf (map toBinding ctx) tm' of
Left error -> do
outputStrLn $ "Type error: " ++ error
repl ctx
Right ty -> do
let ty' = normalizeType (map toBinding ctx) ty
outputStrLn $ "Type is " ++ prettyPrintCanonicalType ty' ++ "."
outputStrLn "Evaluation:"
outputStrLn (prettyPrintTerm tm')
evalLoop tm'
repl ctx
Right (ReplTermDef name tm) -> let tm' = resolveTypeAliasesInTerm ctx 0 tm
in case typeOf (map toBinding ctx) tm' of
Left error -> do
outputStrLn $ "Type error: " ++ error
repl ctx
Right ty -> do
let ty' = normalizeType (map toBinding ctx) ty
outputStrLn $ name ++ " : " ++ prettyPrintCanonicalType ty' ++ "."
outputStrLn "Evaluation:"
outputStrLn (prettyPrintTerm tm')
result <- evalLoop tm'
case result of
Just value -> repl (Let name value ty' : ctx)
Nothing -> repl ctx
Right (ReplTypeDef name ty) -> do
let ty' = normalizeType (map toBinding ctx) ty
outputStrLn $ name ++ " := " ++ prettyPrintCanonicalType ty' ++ "."
repl (TypeDef name ty' : ctx)
where
prettyPrintCanonicalType t = prettyPrintCanonicalTypeP 0 (map toNameBinding ctx) t ""
prettyPrintTerm t = prettyPrintTermP 0 (map toNameBinding ctx) t ""
evalLoop :: Term -> InputT IO (Maybe Term)
evalLoop t = case eval1 (map toValueBinding ctx) t of
Left error -> do outputStrLn $ "Evaluation error: " ++ error
return Nothing
Right t' | isValue t' -> do
outputStrLn $ "--> " ++ prettyPrintTerm t' ++ "."
return (Just t')
| otherwise -> do
outputStrLn $ "--> " ++ prettyPrintTerm t'
evalLoop t'
main :: IO ()
main = runInputT defaultSettings $ do
outputStrLn "This is Finter REPL."
outputStrLn "Press Ctrl-D to exit."
repl []
| minoki/LambdaQuest | src/Finter-repl.hs | bsd-3-clause | 5,076 | 0 | 28 | 1,477 | 1,569 | 746 | 823 | 115 | 9 |
{-# language CPP #-}
-- | = Name
--
-- VK_AMD_negative_viewport_height - device extension
--
-- == VK_AMD_negative_viewport_height
--
-- [__Name String__]
-- @VK_AMD_negative_viewport_height@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 36
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- [__Deprecation state__]
--
-- - /Obsoleted/ by @VK_KHR_maintenance1@ extension
--
-- - Which in turn was /promoted/ to
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#versions-1.1-promotions Vulkan 1.1>
--
-- [__Contact__]
--
-- - Matthaeus G. Chajdas
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_AMD_negative_viewport_height] @anteru%0A<<Here describe the issue or question you have about the VK_AMD_negative_viewport_height extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2016-09-02
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Contributors__]
--
-- - Matthaeus G. Chajdas, AMD
--
-- - Graham Sellers, AMD
--
-- - Baldur Karlsson
--
-- == Description
--
-- This extension allows an application to specify a negative viewport
-- height. The result is that the viewport transformation will flip along
-- the y-axis.
--
-- - Allow negative height to be specified in the
-- 'Vulkan.Core10.Pipeline.Viewport'::@height@ field to perform
-- y-inversion of the clip-space to framebuffer-space transform. This
-- allows apps to avoid having to use @gl_Position.y = -gl_Position.y@
-- in shaders also targeting other APIs.
--
-- == Obsoletion by @VK_KHR_maintenance1@ and Vulkan 1.1
--
-- Functionality in this extension is included in @VK_KHR_maintenance1@ and
-- subsequently Vulkan 1.1. Due to some slight behavioral differences, this
-- extension /must/ not be enabled alongside @VK_KHR_maintenance1@, or in
-- an instance created with version 1.1 or later requested in
-- 'Vulkan.Core10.DeviceInitialization.ApplicationInfo'::@apiVersion@.
--
-- == New Enum Constants
--
-- - 'AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME'
--
-- - 'AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION'
--
-- == Version History
--
-- - Revision 1, 2016-09-02 (Matthaeus Chajdas)
--
-- - Initial draft
--
-- == See Also
--
-- No cross-references are available
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_AMD_negative_viewport_height Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_AMD_negative_viewport_height ( AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION
, pattern AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION
, AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME
, pattern AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME
) where
import Data.String (IsString)
type AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION"
pattern AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION :: forall a . Integral a => a
pattern AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION = 1
type AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME = "VK_AMD_negative_viewport_height"
-- No documentation found for TopLevel "VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME"
pattern AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME = "VK_AMD_negative_viewport_height"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_AMD_negative_viewport_height.hs | bsd-3-clause | 3,905 | 0 | 8 | 783 | 205 | 159 | 46 | -1 | -1 |
module Evaluation
( eval
, liftThrows
, primitiveEnv
, bindVars)
where
import Data( LispVal(..)
, LispError(..)
, LispFunction
, IOLispFunction
, Env
, emptyEnv
, IOThrowsError
, liftThrows
)
import Control.Monad.Except(throwError, liftIO)
import Data.Maybe(isJust, isNothing)
import Data.IORef
import System.IO(IOMode(ReadMode,WriteMode))
import Evaluation.Primitives
import Evaluation.IOPrimitives
isBound :: Env -> String -> IO Bool
isBound envRef var = fmap (isJust . lookup var) (readIORef envRef)
getVar :: Env -> String -> IOThrowsError LispVal
getVar envRef var = do env <- liftIO $ readIORef envRef
maybe (throwError $ UnboundVar "Getting an unbound variable" var)
(liftIO . readIORef)
(lookup var env)
setVar :: Env -> String -> LispVal -> IOThrowsError LispVal
setVar envRef var value = do env <- liftIO $ readIORef envRef
maybe (throwError $ UnboundVar "Setting an unbound variable" var)
(liftIO . flip writeIORef value)
(lookup var env)
return value
defineVar :: Env -> String -> LispVal -> IOThrowsError LispVal
defineVar envRef var value = do
alreadyDefined <- liftIO $ isBound envRef var
if alreadyDefined
then setVar envRef var value >> return value
else liftIO $ do
valueRef <- newIORef value
env <- readIORef envRef
writeIORef envRef ((var, valueRef) : env)
return value
bindVars :: Env -> [(String, LispVal)] -> IO Env
bindVars envRef bindings = readIORef envRef >>= extendEnv bindings >>= newIORef
where extendEnv bindings' env = fmap (++ env) (mapM addBinding bindings')
addBinding (var, value) = do ref <- newIORef value
return (var, ref)
eval :: Env -> LispVal -> IOThrowsError LispVal
eval _ val@(String _) = return val
eval _ val@(Number _) = return val
eval _ val@(Bool _) = return val
eval env (Atom atom) = getVar env atom
eval _ (List [Atom "quote", val]) = return val
eval env (List [Atom "if", p, c, a]) = controlIf env p c a
eval env (List [Atom "set!", Atom var, form]) =
eval env form >>= setVar env var
eval env (List [Atom "define", Atom var, form]) =
eval env form >>= defineVar env var
eval env (List (Atom "define" : List (Atom var : params) : body)) =
makeNormalFunc env params body >>= defineVar env var
eval env (List (Atom "define" : DottedList (Atom var : params) varargs : body)) =
makeVarArgs varargs env params body >>= defineVar env var
eval env (List (Atom "lambda" : List params : body)) =
makeNormalFunc env params body
eval env (List (Atom "lambda" : DottedList params varargs : body)) =
makeVarArgs varargs env params body
eval env (List (Atom "lambda" : varargs@(Atom _) : body)) =
makeVarArgs varargs env [] body
eval env (List (Atom "begin":first:rest)) = controlBegin env (first:rest)
eval env (List [Atom "load", String filename]) = load filename >>= fmap last . mapM (eval env)
eval env (List (function : args)) = do
func <- eval env function
argVals <- mapM (eval env) args
apply func argVals
eval _ badform = throwError $ BadSpecialForm "Unrecognized special form" badform
apply :: LispVal -> IOLispFunction
apply (PrimitiveFunc func) args = liftThrows $ func args
apply (Func params varargs body closure) args =
if num params /= num args && isNothing varargs
then throwError $ NumArgs (num params) args
else liftIO (bindVars closure $ zip params args) >>= bindVarArgs varargs >>= evalBody
where remainingArgs = drop (length params) args
num = toInteger . length
evalBody env = last <$> mapM (eval env) body
bindVarArgs arg env = case arg of
Just argName -> liftIO $ bindVars env [(argName, List remainingArgs)]
Nothing -> return env
apply (IOFunc func) args = func args
apply f args = throwError $ BadSpecialForm "Unrecognized special form" (List $ f:args)
controlIf :: Env -> LispVal -> LispVal -> LispVal -> IOThrowsError LispVal
controlIf env predicate consequence alternative =
do result <- eval env predicate
case result of
Bool False -> eval env alternative
_ -> eval env consequence
controlBegin :: Env -> IOLispFunction
controlBegin env (first:rest) = last <$> mapM (eval env) (first:rest)
controlBegin _ [] = throwError $ BadSpecialForm "Begin needs at least 1 expression" (List [])
makeFunc :: Maybe String -> Env -> [LispVal] -> IOLispFunction
makeFunc varargs env params body = return $ Func (map show params) varargs body env
makeNormalFunc :: Env -> [LispVal] -> IOLispFunction
makeNormalFunc = makeFunc Nothing
makeVarArgs :: LispVal -> Env -> [LispVal] -> IOLispFunction
makeVarArgs = makeFunc . Just . show
primitiveEnv :: IO Env
primitiveEnv = emptyEnv >>= flip bindVars (map (mkfunc IOFunc) ioPrimitives
++ map (mkfunc PrimitiveFunc) primitives)
where mkfunc constructor (var, func) = (var, constructor func)
primitives :: [(String, LispFunction)]
primitives = [ ("+", numPlus)
, ("-", numMinus)
, ("*", numMul)
, ("/", numDiv)
, ("mod", numMod)
, ("quotient", numQuot)
, ("remainder", numRem)
, ("symbol?", isSymbol)
, ("bool?", isBool)
, ("string?", isString)
, ("number?", isNumber)
, ("list?", isList )
, ("=", numEq)
, ("<", numLessT)
, (">", numGreatT)
, ("/=", numNotEq)
, (">=", numGE)
, ("<=", numLE)
, ("&&", boolAnd)
, ("||", boolOr)
, ("not", boolNot)
, ("string=?", strEq)
, ("string<?", strLessT)
, ("string>?", strGreatT)
, ("string<=?", strLE)
, ("string>=?", strGE)
, ("head", head_)
, ("tail", tail_)
, ("cons", cons)
, ("eqv?", eqv)
, ("eq?", eqv)
, ("equal?", equal)
]
ioPrimitives :: [(String, IOLispFunction)]
ioPrimitives = [("apply", applyProc),
("open-input-file", makePort ReadMode),
("open-output-file", makePort WriteMode),
("close-input-port", closePort),
("close-output-port", closePort),
("read", readProc),
("write", writeProc),
("read-contents", readContents),
("read-all", readAll)]
applyProc :: IOLispFunction
applyProc [func, List args] = apply func args
applyProc (func : args) = apply func args
applyProc args = throwError $ BadSpecialForm "Bad apply form" (List args)
| davideGiovannini/scheme-repl | src/Evaluation.hs | bsd-3-clause | 7,224 | 0 | 14 | 2,334 | 2,369 | 1,247 | 1,122 | 155 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module IrcScanner.KeywordIndexPage where
import IrcScanner.Types
--import IrcScanner.Index(getIState)
import Snap
import Snap.Snaplet.Heist
import Control.Lens
import Control.Monad.IO.Class(liftIO)
--import Control.Monad.Trans(lift)
import Control.Monad.Reader(ask)
import Data.IORef(readIORef)
import Heist.Interpreted
import Heist
import Data.Map.Syntax((##))
--import Data.Text(pack)
-- import Control.Monad.Trans.Either (runEitherT,left, EitherT(..))
-- import Control.Monad.Trans (lift)
-- import Data.ByteString(ByteString)
-- import Data.Text.Encoding
import Data.List(sortBy)
splicesFromCIR :: Monad n => CachedIndexResult -> Splices (Splice n)
splicesFromCIR cir =
do
"keyword" ## textSplice $ view (cindex . idisplayName) cir
allSplices :: [CachedIndexResult] -> Splices (SnapletISplice x)
allSplices cirs =
do
"allIndexes" ## (mapSplices (runChildrenWith . splicesFromCIR)
(sortBy (\a b -> compare (_idisplayName (_cindex a))
(_idisplayName (_cindex b)))
cirs))
keywordIndexHandler :: HasHeist x => Handler x IrcSnaplet ()
keywordIndexHandler = do
s <- ask
st <- liftIO $ readIORef (view (iconfig . cstate) s)
renderWithSplices "keyword_index" (allSplices (_scirs st))
| redfish64/IrcScanner | src/IrcScanner/KeywordIndexPage.hs | bsd-3-clause | 1,353 | 0 | 18 | 270 | 333 | 181 | 152 | 29 | 1 |
{-# LANGUAGE CPP #-}
-- -fno-warn-deprecations for use of Map.foldWithKey
{-# OPTIONS_GHC -fno-warn-deprecations #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.PackageDescription.Configuration
-- Copyright : Thomas Schilling, 2007
-- License : BSD3
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- This is about the cabal configurations feature. It exports
-- 'finalizePackageDescription' and 'flattenPackageDescription' which are
-- functions for converting 'GenericPackageDescription's down to
-- 'PackageDescription's. It has code for working with the tree of conditions
-- and resolving or flattening conditions.
module Distribution.PackageDescription.Configuration (
finalizePackageDescription,
flattenPackageDescription,
-- Utils
parseCondition,
freeVars,
mapCondTree,
mapTreeData,
mapTreeConds,
mapTreeConstrs,
) where
import Distribution.Package
( PackageName, Dependency(..) )
import Distribution.PackageDescription
( GenericPackageDescription(..), PackageDescription(..)
, Library(..), Executable(..), BuildInfo(..)
, Flag(..), FlagName(..), FlagAssignment
, Benchmark(..), CondTree(..), ConfVar(..), Condition(..)
, TestSuite(..) )
import Distribution.PackageDescription.Utils
( cabalBug, userBug )
import Distribution.Version
( VersionRange, anyVersion, intersectVersionRanges, withinRange )
import Distribution.Compiler
( CompilerId(CompilerId) )
import Distribution.System
( Platform(..), OS, Arch )
import Distribution.Simple.Utils
( currentDir, lowercase )
import Distribution.Simple.Compiler
( CompilerInfo(..) )
import Distribution.Text
( Text(parse) )
import Distribution.Compat.ReadP as ReadP hiding ( char )
import Control.Arrow (first)
import qualified Distribution.Compat.ReadP as ReadP ( char )
import Data.Char ( isAlphaNum )
import Data.Maybe ( mapMaybe, maybeToList )
import Data.Map ( Map, fromListWith, toList )
import qualified Data.Map as Map
#if __GLASGOW_HASKELL__ < 710
import Data.Monoid
#endif
------------------------------------------------------------------------------
-- | Simplify the condition and return its free variables.
simplifyCondition :: Condition c
-> (c -> Either d Bool) -- ^ (partial) variable assignment
-> (Condition d, [d])
simplifyCondition cond i = fv . walk $ cond
where
walk cnd = case cnd of
Var v -> either Var Lit (i v)
Lit b -> Lit b
CNot c -> case walk c of
Lit True -> Lit False
Lit False -> Lit True
c' -> CNot c'
COr c d -> case (walk c, walk d) of
(Lit False, d') -> d'
(Lit True, _) -> Lit True
(c', Lit False) -> c'
(_, Lit True) -> Lit True
(c',d') -> COr c' d'
CAnd c d -> case (walk c, walk d) of
(Lit False, _) -> Lit False
(Lit True, d') -> d'
(_, Lit False) -> Lit False
(c', Lit True) -> c'
(c',d') -> CAnd c' d'
-- gather free vars
fv c = (c, fv' c)
fv' c = case c of
Var v -> [v]
Lit _ -> []
CNot c' -> fv' c'
COr c1 c2 -> fv' c1 ++ fv' c2
CAnd c1 c2 -> fv' c1 ++ fv' c2
-- | Simplify a configuration condition using the OS and arch names. Returns
-- the names of all the flags occurring in the condition.
simplifyWithSysParams :: OS -> Arch -> CompilerInfo -> Condition ConfVar
-> (Condition FlagName, [FlagName])
simplifyWithSysParams os arch cinfo cond = (cond', flags)
where
(cond', flags) = simplifyCondition cond interp
interp (OS os') = Right $ os' == os
interp (Arch arch') = Right $ arch' == arch
interp (Impl comp vr)
| matchImpl (compilerInfoId cinfo) = Right True
| otherwise = case compilerInfoCompat cinfo of
-- fixme: treat Nothing as unknown, rather than empty list once we
-- support partial resolution of system parameters
Nothing -> Right False
Just compat -> Right (any matchImpl compat)
where
matchImpl (CompilerId c v) = comp == c && v `withinRange` vr
interp (Flag f) = Left f
-- TODO: Add instances and check
--
-- prop_sC_idempotent cond a o = cond' == cond''
-- where
-- cond' = simplifyCondition cond a o
-- cond'' = simplifyCondition cond' a o
--
-- prop_sC_noLits cond a o = isLit res || not (hasLits res)
-- where
-- res = simplifyCondition cond a o
-- hasLits (Lit _) = True
-- hasLits (CNot c) = hasLits c
-- hasLits (COr l r) = hasLits l || hasLits r
-- hasLits (CAnd l r) = hasLits l || hasLits r
-- hasLits _ = False
--
-- | Parse a configuration condition from a string.
parseCondition :: ReadP r (Condition ConfVar)
parseCondition = condOr
where
condOr = sepBy1 condAnd (oper "||") >>= return . foldl1 COr
condAnd = sepBy1 cond (oper "&&")>>= return . foldl1 CAnd
cond = sp >> (boolLiteral +++ inparens condOr +++ notCond +++ osCond
+++ archCond +++ flagCond +++ implCond )
inparens = between (ReadP.char '(' >> sp) (sp >> ReadP.char ')' >> sp)
notCond = ReadP.char '!' >> sp >> cond >>= return . CNot
osCond = string "os" >> sp >> inparens osIdent >>= return . Var
archCond = string "arch" >> sp >> inparens archIdent >>= return . Var
flagCond = string "flag" >> sp >> inparens flagIdent >>= return . Var
implCond = string "impl" >> sp >> inparens implIdent >>= return . Var
boolLiteral = fmap Lit parse
archIdent = fmap Arch parse
osIdent = fmap OS parse
flagIdent = fmap (Flag . FlagName . lowercase) (munch1 isIdentChar)
isIdentChar c = isAlphaNum c || c == '_' || c == '-'
oper s = sp >> string s >> sp
sp = skipSpaces
implIdent = do i <- parse
vr <- sp >> option anyVersion parse
return $ Impl i vr
------------------------------------------------------------------------------
mapCondTree :: (a -> b) -> (c -> d) -> (Condition v -> Condition w)
-> CondTree v c a -> CondTree w d b
mapCondTree fa fc fcnd (CondNode a c ifs) =
CondNode (fa a) (fc c) (map g ifs)
where
g (cnd, t, me) = (fcnd cnd, mapCondTree fa fc fcnd t,
fmap (mapCondTree fa fc fcnd) me)
mapTreeConstrs :: (c -> d) -> CondTree v c a -> CondTree v d a
mapTreeConstrs f = mapCondTree id f id
mapTreeConds :: (Condition v -> Condition w) -> CondTree v c a -> CondTree w c a
mapTreeConds f = mapCondTree id id f
mapTreeData :: (a -> b) -> CondTree v c a -> CondTree v c b
mapTreeData f = mapCondTree f id id
-- | Result of dependency test. Isomorphic to @Maybe d@ but renamed for
-- clarity.
data DepTestRslt d = DepOk | MissingDeps d
instance Monoid d => Monoid (DepTestRslt d) where
mempty = DepOk
mappend DepOk x = x
mappend x DepOk = x
mappend (MissingDeps d) (MissingDeps d') = MissingDeps (d `mappend` d')
data BT a = BTN a | BTB (BT a) (BT a) -- very simple binary tree
-- | Try to find a flag assignment that satisfies the constraints of all trees.
--
-- Returns either the missing dependencies, or a tuple containing the
-- resulting data, the associated dependencies, and the chosen flag
-- assignments.
--
-- In case of failure, the _smallest_ number of of missing dependencies is
-- returned. [TODO: Could also be specified with a function argument.]
--
-- TODO: The current algorithm is rather naive. A better approach would be to:
--
-- * Rule out possible paths, by taking a look at the associated dependencies.
--
-- * Infer the required values for the conditions of these paths, and
-- calculate the required domains for the variables used in these
-- conditions. Then picking a flag assignment would be linear (I guess).
--
-- This would require some sort of SAT solving, though, thus it's not
-- implemented unless we really need it.
--
resolveWithFlags ::
[(FlagName,[Bool])]
-- ^ Domain for each flag name, will be tested in order.
-> OS -- ^ OS as returned by Distribution.System.buildOS
-> Arch -- ^ Arch as returned by Distribution.System.buildArch
-> CompilerInfo -- ^ Compiler information
-> [Dependency] -- ^ Additional constraints
-> [CondTree ConfVar [Dependency] PDTagged]
-> ([Dependency] -> DepTestRslt [Dependency]) -- ^ Dependency test function.
-> Either [Dependency] (TargetSet PDTagged, FlagAssignment)
-- ^ Either the missing dependencies (error case), or a pair of
-- (set of build targets with dependencies, chosen flag assignments)
resolveWithFlags dom os arch impl constrs trees checkDeps =
case try dom [] of
Right r -> Right r
Left dbt -> Left $ findShortest dbt
where
extraConstrs = toDepMap constrs
-- simplify trees by (partially) evaluating all conditions and converting
-- dependencies to dependency maps.
simplifiedTrees = map ( mapTreeConstrs toDepMap -- convert to maps
. mapTreeConds (fst . simplifyWithSysParams os arch impl))
trees
-- @try@ recursively tries all possible flag assignments in the domain and
-- either succeeds or returns a binary tree with the missing dependencies
-- encountered in each run. Since the tree is constructed lazily, we
-- avoid some computation overhead in the successful case.
try [] flags =
let targetSet = TargetSet $ flip map simplifiedTrees $
-- apply additional constraints to all dependencies
first (`constrainBy` extraConstrs) .
simplifyCondTree (env flags)
deps = overallDependencies targetSet
in case checkDeps (fromDepMap deps) of
DepOk -> Right (targetSet, flags)
MissingDeps mds -> Left (BTN mds)
try ((n, vals):rest) flags =
tryAll $ map (\v -> try rest ((n, v):flags)) vals
tryAll = foldr mp mz
-- special version of `mplus' for our local purposes
mp (Left xs) (Left ys) = (Left (BTB xs ys))
mp (Left _) m@(Right _) = m
mp m@(Right _) _ = m
-- `mzero'
mz = Left (BTN [])
env flags flag = (maybe (Left flag) Right . lookup flag) flags
-- for the error case we inspect our lazy tree of missing dependencies and
-- pick the shortest list of missing dependencies
findShortest (BTN x) = x
findShortest (BTB lt rt) =
let l = findShortest lt
r = findShortest rt
in case (l,r) of
([], xs) -> xs -- [] is too short
(xs, []) -> xs
([x], _) -> [x] -- single elem is optimum
(_, [x]) -> [x]
(xs, ys) -> if lazyLengthCmp xs ys
then xs else ys
-- lazy variant of @\xs ys -> length xs <= length ys@
lazyLengthCmp [] _ = True
lazyLengthCmp _ [] = False
lazyLengthCmp (_:xs) (_:ys) = lazyLengthCmp xs ys
-- | A map of dependencies. Newtyped since the default monoid instance is not
-- appropriate. The monoid instance uses 'intersectVersionRanges'.
newtype DependencyMap = DependencyMap { unDependencyMap :: Map PackageName VersionRange }
deriving (Show, Read)
instance Monoid DependencyMap where
mempty = DependencyMap Map.empty
(DependencyMap a) `mappend` (DependencyMap b) =
DependencyMap (Map.unionWith intersectVersionRanges a b)
toDepMap :: [Dependency] -> DependencyMap
toDepMap ds =
DependencyMap $ fromListWith intersectVersionRanges [ (p,vr) | Dependency p vr <- ds ]
fromDepMap :: DependencyMap -> [Dependency]
fromDepMap m = [ Dependency p vr | (p,vr) <- toList (unDependencyMap m) ]
simplifyCondTree :: (Monoid a, Monoid d) =>
(v -> Either v Bool)
-> CondTree v d a
-> (d, a)
simplifyCondTree env (CondNode a d ifs) =
mconcat $ (d, a) : mapMaybe simplifyIf ifs
where
simplifyIf (cnd, t, me) =
case simplifyCondition cnd env of
(Lit True, _) -> Just $ simplifyCondTree env t
(Lit False, _) -> fmap (simplifyCondTree env) me
_ -> error $ "Environment not defined for all free vars"
-- | Flatten a CondTree. This will resolve the CondTree by taking all
-- possible paths into account. Note that since branches represent exclusive
-- choices this may not result in a \"sane\" result.
ignoreConditions :: (Monoid a, Monoid c) => CondTree v c a -> (a, c)
ignoreConditions (CondNode a c ifs) = (a, c) `mappend` mconcat (concatMap f ifs)
where f (_, t, me) = ignoreConditions t
: maybeToList (fmap ignoreConditions me)
freeVars :: CondTree ConfVar c a -> [FlagName]
freeVars t = [ f | Flag f <- freeVars' t ]
where
freeVars' (CondNode _ _ ifs) = concatMap compfv ifs
compfv (c, ct, mct) = condfv c ++ freeVars' ct ++ maybe [] freeVars' mct
condfv c = case c of
Var v -> [v]
Lit _ -> []
CNot c' -> condfv c'
COr c1 c2 -> condfv c1 ++ condfv c2
CAnd c1 c2 -> condfv c1 ++ condfv c2
------------------------------------------------------------------------------
-- | A set of targets with their package dependencies
newtype TargetSet a = TargetSet [(DependencyMap, a)]
-- | Combine the target-specific dependencies in a TargetSet to give the
-- dependencies for the package as a whole.
overallDependencies :: TargetSet PDTagged -> DependencyMap
overallDependencies (TargetSet targets) = mconcat depss
where
(depss, _) = unzip $ filter (removeDisabledSections . snd) targets
removeDisabledSections :: PDTagged -> Bool
removeDisabledSections (Lib _) = True
removeDisabledSections (Exe _ _) = True
removeDisabledSections (Test _ t) = testEnabled t
removeDisabledSections (Bench _ b) = benchmarkEnabled b
removeDisabledSections PDNull = True
-- Apply extra constraints to a dependency map.
-- Combines dependencies where the result will only contain keys from the left
-- (first) map. If a key also exists in the right map, both constraints will
-- be intersected.
constrainBy :: DependencyMap -- ^ Input map
-> DependencyMap -- ^ Extra constraints
-> DependencyMap
constrainBy left extra =
DependencyMap $
Map.foldWithKey tightenConstraint (unDependencyMap left)
(unDependencyMap extra)
where tightenConstraint n c l =
case Map.lookup n l of
Nothing -> l
Just vr -> Map.insert n (intersectVersionRanges vr c) l
-- | Collect up the targets in a TargetSet of tagged targets, storing the
-- dependencies as we go.
flattenTaggedTargets :: TargetSet PDTagged ->
(Maybe Library, [(String, Executable)], [(String, TestSuite)]
, [(String, Benchmark)])
flattenTaggedTargets (TargetSet targets) = foldr untag (Nothing, [], [], []) targets
where
untag (_, Lib _) (Just _, _, _, _) = userBug "Only one library expected"
untag (deps, Lib l) (Nothing, exes, tests, bms) =
(Just l', exes, tests, bms)
where
l' = l {
libBuildInfo = (libBuildInfo l) { targetBuildDepends = fromDepMap deps }
}
untag (deps, Exe n e) (mlib, exes, tests, bms)
| any ((== n) . fst) exes =
userBug $ "There exist several exes with the same name: '" ++ n ++ "'"
| any ((== n) . fst) tests =
userBug $ "There exists a test with the same name as an exe: '" ++ n ++ "'"
| any ((== n) . fst) bms =
userBug $ "There exists a benchmark with the same name as an exe: '" ++ n ++ "'"
| otherwise = (mlib, (n, e'):exes, tests, bms)
where
e' = e {
buildInfo = (buildInfo e) { targetBuildDepends = fromDepMap deps }
}
untag (deps, Test n t) (mlib, exes, tests, bms)
| any ((== n) . fst) tests =
userBug $ "There exist several tests with the same name: '" ++ n ++ "'"
| any ((== n) . fst) exes =
userBug $ "There exists an exe with the same name as the test: '" ++ n ++ "'"
| any ((== n) . fst) bms =
userBug $ "There exists a benchmark with the same name as the test: '" ++ n ++ "'"
| otherwise = (mlib, exes, (n, t'):tests, bms)
where
t' = t {
testBuildInfo = (testBuildInfo t)
{ targetBuildDepends = fromDepMap deps }
}
untag (deps, Bench n b) (mlib, exes, tests, bms)
| any ((== n) . fst) bms =
userBug $ "There exist several benchmarks with the same name: '" ++ n ++ "'"
| any ((== n) . fst) exes =
userBug $ "There exists an exe with the same name as the benchmark: '" ++ n ++ "'"
| any ((== n) . fst) tests =
userBug $ "There exists a test with the same name as the benchmark: '" ++ n ++ "'"
| otherwise = (mlib, exes, tests, (n, b'):bms)
where
b' = b {
benchmarkBuildInfo = (benchmarkBuildInfo b)
{ targetBuildDepends = fromDepMap deps }
}
untag (_, PDNull) x = x -- actually this should not happen, but let's be liberal
------------------------------------------------------------------------------
-- Convert GenericPackageDescription to PackageDescription
--
data PDTagged = Lib Library
| Exe String Executable
| Test String TestSuite
| Bench String Benchmark
| PDNull
deriving Show
instance Monoid PDTagged where
mempty = PDNull
PDNull `mappend` x = x
x `mappend` PDNull = x
Lib l `mappend` Lib l' = Lib (l `mappend` l')
Exe n e `mappend` Exe n' e' | n == n' = Exe n (e `mappend` e')
Test n t `mappend` Test n' t' | n == n' = Test n (t `mappend` t')
Bench n b `mappend` Bench n' b' | n == n' = Bench n (b `mappend` b')
_ `mappend` _ = cabalBug "Cannot combine incompatible tags"
-- | Create a package description with all configurations resolved.
--
-- This function takes a `GenericPackageDescription` and several environment
-- parameters and tries to generate `PackageDescription` by finding a flag
-- assignment that result in satisfiable dependencies.
--
-- It takes as inputs a not necessarily complete specifications of flags
-- assignments, an optional package index as well as platform parameters. If
-- some flags are not assigned explicitly, this function will try to pick an
-- assignment that causes this function to succeed. The package index is
-- optional since on some platforms we cannot determine which packages have
-- been installed before. When no package index is supplied, every dependency
-- is assumed to be satisfiable, therefore all not explicitly assigned flags
-- will get their default values.
--
-- This function will fail if it cannot find a flag assignment that leads to
-- satisfiable dependencies. (It will not try alternative assignments for
-- explicitly specified flags.) In case of failure it will return a /minimum/
-- number of dependencies that could not be satisfied. On success, it will
-- return the package description and the full flag assignment chosen.
--
finalizePackageDescription ::
FlagAssignment -- ^ Explicitly specified flag assignments
-> (Dependency -> Bool) -- ^ Is a given dependency satisfiable from the set of
-- available packages? If this is unknown then use
-- True.
-> Platform -- ^ The 'Arch' and 'OS'
-> CompilerInfo -- ^ Compiler information
-> [Dependency] -- ^ Additional constraints
-> GenericPackageDescription
-> Either [Dependency]
(PackageDescription, FlagAssignment)
-- ^ Either missing dependencies or the resolved package
-- description along with the flag assignments chosen.
finalizePackageDescription userflags satisfyDep
(Platform arch os) impl constraints
(GenericPackageDescription pkg flags mlib0 exes0 tests0 bms0) =
case resolveFlags of
Right ((mlib, exes', tests', bms'), targetSet, flagVals) ->
Right ( pkg { library = mlib
, executables = exes'
, testSuites = tests'
, benchmarks = bms'
, buildDepends = fromDepMap (overallDependencies targetSet)
--TODO: we need to find a way to avoid pulling in deps
-- for non-buildable components. However cannot simply
-- filter at this stage, since if the package were not
-- available we would have failed already.
}
, flagVals )
Left missing -> Left missing
where
-- Combine lib, exes, and tests into one list of @CondTree@s with tagged data
condTrees = maybeToList (fmap (mapTreeData Lib) mlib0 )
++ map (\(name,tree) -> mapTreeData (Exe name) tree) exes0
++ map (\(name,tree) -> mapTreeData (Test name) tree) tests0
++ map (\(name,tree) -> mapTreeData (Bench name) tree) bms0
resolveFlags =
case resolveWithFlags flagChoices os arch impl constraints condTrees check of
Right (targetSet, fs) ->
let (mlib, exes, tests, bms) = flattenTaggedTargets targetSet in
Right ( (fmap libFillInDefaults mlib,
map (\(n,e) -> (exeFillInDefaults e) { exeName = n }) exes,
map (\(n,t) -> (testFillInDefaults t) { testName = n }) tests,
map (\(n,b) -> (benchFillInDefaults b) { benchmarkName = n }) bms),
targetSet, fs)
Left missing -> Left missing
flagChoices = map (\(MkFlag n _ d manual) -> (n, d2c manual n d)) flags
d2c manual n b = case lookup n userflags of
Just val -> [val]
Nothing
| manual -> [b]
| otherwise -> [b, not b]
--flagDefaults = map (\(n,x:_) -> (n,x)) flagChoices
check ds = let missingDeps = filter (not . satisfyDep) ds
in if null missingDeps
then DepOk
else MissingDeps missingDeps
{-
let tst_p = (CondNode [1::Int] [Distribution.Package.Dependency "a" AnyVersion] [])
let tst_p2 = (CondNode [1::Int] [Distribution.Package.Dependency "a" (EarlierVersion (Version [1,0] [])), Distribution.Package.Dependency "a" (LaterVersion (Version [2,0] []))] [])
let p_index = Distribution.Simple.PackageIndex.fromList [Distribution.Package.PackageIdentifier "a" (Version [0,5] []), Distribution.Package.PackageIdentifier "a" (Version [2,5] [])]
let look = not . null . Distribution.Simple.PackageIndex.lookupDependency p_index
let looks ds = mconcat $ map (\d -> if look d then DepOk else MissingDeps [d]) ds
resolveWithFlags [] Distribution.System.Linux Distribution.System.I386 (Distribution.Compiler.GHC,Version [6,8,2] []) [tst_p] looks ===> Right ...
resolveWithFlags [] Distribution.System.Linux Distribution.System.I386 (Distribution.Compiler.GHC,Version [6,8,2] []) [tst_p2] looks ===> Left ...
-}
-- | Flatten a generic package description by ignoring all conditions and just
-- join the field descriptors into on package description. Note, however,
-- that this may lead to inconsistent field values, since all values are
-- joined into one field, which may not be possible in the original package
-- description, due to the use of exclusive choices (if ... else ...).
--
-- TODO: One particularly tricky case is defaulting. In the original package
-- description, e.g., the source directory might either be the default or a
-- certain, explicitly set path. Since defaults are filled in only after the
-- package has been resolved and when no explicit value has been set, the
-- default path will be missing from the package description returned by this
-- function.
flattenPackageDescription :: GenericPackageDescription -> PackageDescription
flattenPackageDescription (GenericPackageDescription pkg _ mlib0 exes0 tests0 bms0) =
pkg { library = mlib
, executables = reverse exes
, testSuites = reverse tests
, benchmarks = reverse bms
, buildDepends = ldeps ++ reverse edeps ++ reverse tdeps ++ reverse bdeps
}
where
(mlib, ldeps) = case mlib0 of
Just lib -> let (l,ds) = ignoreConditions lib in
(Just (libFillInDefaults l), ds)
Nothing -> (Nothing, [])
(exes, edeps) = foldr flattenExe ([],[]) exes0
(tests, tdeps) = foldr flattenTst ([],[]) tests0
(bms, bdeps) = foldr flattenBm ([],[]) bms0
flattenExe (n, t) (es, ds) =
let (e, ds') = ignoreConditions t in
( (exeFillInDefaults $ e { exeName = n }) : es, ds' ++ ds )
flattenTst (n, t) (es, ds) =
let (e, ds') = ignoreConditions t in
( (testFillInDefaults $ e { testName = n }) : es, ds' ++ ds )
flattenBm (n, t) (es, ds) =
let (e, ds') = ignoreConditions t in
( (benchFillInDefaults $ e { benchmarkName = n }) : es, ds' ++ ds )
-- This is in fact rather a hack. The original version just overrode the
-- default values, however, when adding conditions we had to switch to a
-- modifier-based approach. There, nothing is ever overwritten, but only
-- joined together.
--
-- This is the cleanest way i could think of, that doesn't require
-- changing all field parsing functions to return modifiers instead.
libFillInDefaults :: Library -> Library
libFillInDefaults lib@(Library { libBuildInfo = bi }) =
lib { libBuildInfo = biFillInDefaults bi }
exeFillInDefaults :: Executable -> Executable
exeFillInDefaults exe@(Executable { buildInfo = bi }) =
exe { buildInfo = biFillInDefaults bi }
testFillInDefaults :: TestSuite -> TestSuite
testFillInDefaults tst@(TestSuite { testBuildInfo = bi }) =
tst { testBuildInfo = biFillInDefaults bi }
benchFillInDefaults :: Benchmark -> Benchmark
benchFillInDefaults bm@(Benchmark { benchmarkBuildInfo = bi }) =
bm { benchmarkBuildInfo = biFillInDefaults bi }
biFillInDefaults :: BuildInfo -> BuildInfo
biFillInDefaults bi =
if null (hsSourceDirs bi)
then bi { hsSourceDirs = [currentDir] }
else bi
| Peaker/cabal | Cabal/Distribution/PackageDescription/Configuration.hs | bsd-3-clause | 26,545 | 0 | 20 | 7,210 | 6,254 | 3,379 | 2,875 | 371 | 19 |
module FunctionsExpr (
madd,
prefixadd,
msubtract,
mproduct,
mif,
mcharif,
mnot,
appendString,
concatenate,
firstInt,
tailInt,
msum
) where
madd :: Int -> Int -> Int
--Add 2 numbers
madd x y = undefined
prefixadd :: Int -> Int -> Int
prefixadd x y = (+) undefined undefined
msubtract :: Int -> Int -> Int
--Subtract 2 numbers
msubtract x y = undefined
mproduct :: Int -> Int -> Int
--Product of 2 numbers
mproduct x y = undefined
mif :: Int -> Bool
--If `x` > 15 return True else False
mif x = if undefined then undefined else undefined
mcharif :: Char -> Bool
--If `c` == 'i' return True else False
mcharif c = if undefined then undefined else undefined
mnot :: Bool -> Bool
mnot x = if undefined then undefined else undefined
appendString :: [Char] -> [Char] -> [Char]
--Given 2 strings, returns a single string that is a concatenation of both input strings
appendString = (++)
concatenate :: [Char] -> [Char] -> [Char]
--Implement this function in terms of `appendString`
concatenate s1 s2 = undefined
firstInt :: [Int] -> Int
--Returns the first element of a list
firstInt = head
tailInt :: [Int] -> [Int]
--Returns all elements of list except first.
tailInt = tail
msum :: [Int] -> Int
--Define this functions in terms of itself
--You have 2 helper functions defined for you.
--`firstInt` and `tailInt`
msum x = if length x == 0
then undefined
else undefined
| ajjaic/iit-workshop | src/FunctionsExpr.hs | bsd-3-clause | 1,452 | 0 | 7 | 332 | 361 | 211 | 150 | 39 | 2 |
module System.Hardware.Z21 (
module System.Hardware.Z21.Types
, module System.Hardware.Z21.Actions
) where
import System.Hardware.Z21.Types
import System.Hardware.Z21.Actions
| akru/z21-hs | src/System/Hardware/Z21.hs | bsd-3-clause | 185 | 0 | 5 | 24 | 39 | 28 | 11 | 5 | 0 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE CPP #-}
module Language.Sh.Arithmetic ( runMathParser ) where
-- This doesn't depend on any expansion at all...
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Language
import Text.ParserCombinators.Parsec.Expr
import qualified Text.ParserCombinators.Parsec.Token as P
import Data.Bits ( shiftL, shiftR, complement, xor, (.&.), (.|.) )
import Data.List ( unionBy )
import Data.Maybe ( fromMaybe )
import Language.Sh.Compat ( on )
type SS = [(String,String)]
type SI = [(String,Int)]
type AP a = CharParser SS a -- just keep this state... - update when we can
data Term = Literal SI Int | Variable String | Error String
deriving ( Show )
runMathParser :: SS -> String -> Either String (Int,SI)
runMathParser subs s = case runParser exprSubs (subs) "" s of
Left err -> Left $ show err
Right x -> Right x
joinS :: Eq a => [(a,b)] -> [(a,b)] -> [(a,b)]
joinS = unionBy ((==) `on` fst)
mapS :: (b -> c) -> [(a,b)] -> [(a,c)]
mapS f = map $ \(a,b)->(a,f b)
-- after a buildExprParser, we'll check the new assignments and make them...
exprSubs :: AP (Int,SI)
exprSubs = do e <- expr
--e <- parens (string "1+2") >> return (Literal [] 3)
--string "(1+2)"
--let e = Literal [] 3
eof
case e of
Literal subs i -> return (i,subs)
Variable s -> do ss <- getState
let val = fromMaybe "0" $ lookup s ss
case runMathParser ss val of
Left err -> fail err
Right (i,si) -> return (i,si)
Error err -> fail err
lexer :: P.TokenParser st
lexer = P.makeTokenParser $
emptyDef {identLetter = alphaNum <|> char '_'
, opStart = oneOf [] -- no nonreserved operators
, opLetter = oneOf []
, reservedOpNames= ["++","+","--","-","*","/","%","^"
,"|","||","&","&&","<<",">>"
,"<","<=",">",">=","==","=","!=","!","~"
,"?",":"
,"+=","-=","*=","/=","%=","|=","&="
,"^=","<<=",">>="]
}
parens :: AP a -> AP a
parens = P.parens lexer -- what is P?
whiteSpace :: AP ()
whiteSpace = P.whiteSpace lexer
hexadecimal :: Integral a => AP a
hexadecimal = fromIntegral `fmap` P.hexadecimal lexer
decimal :: Integral a => AP a
decimal = fromIntegral `fmap` P.decimal lexer
reservedOp :: String -> AP ()
reservedOp = P.reservedOp lexer
identifier :: AP String
identifier = P.identifier lexer
natural :: (Integral a, Read a) => AP a
natural = do n <- octal <|> decimal <|> hexadecimal
whiteSpace
return n
where octal = do char '0'
bo 0
bo n = do d <- oneOf "01234567" <?> "octal digit"
return $ 8*n + read [d]
<|> return n
mapT :: (Int -> Int) -> Term -> Term
mapT _ (Error err) = Error err
mapT _ (Variable v) = Error $ "impossible: unexpanded variable: "++v
mapT f (Literal s i) = Literal s $ f i
mapT2 :: (Int -> Int -> Int) -> Term -> Term -> Term
mapT2 _ (Error err) _ = Error err
mapT2 _ (Variable v) _ = Error $ "impossible: unexpanded variable: "++v
mapT2 _ _ (Error err) = Error err
mapT2 _ _ (Variable v) = Error $ "impossible: unexpanded variable: "++v
mapT2 f (Literal s1 i1) (Literal s2 i2) = Literal (s1 `joinS` s2) $ f i1 i2
expr1 :: AP Term
expr1 = buildExpressionParser table1 term
-- <?> "expression"
expr2 :: AP Term -- here's where we get the ternary operator
expr2 = try (do eIf <- expr1
reservedOp "?"
eThen <- expr1
reservedOp ":"
eElse <- expr1
ss <- getState
case expand ss eIf of
Error err -> return $ Error err
Literal si i -> return $ if (i/=0) then expandWith ss si eThen
else expandWith ss si eElse
Variable _ -> error "impossible"
) <|> expr1
where expandWith ss si t = case expand (mapS show si `joinS` ss) t of
Error err -> Error err
Literal si' i -> Literal (si `joinS` si') i
Variable _ -> error "impossible"
expr :: AP Term
expr = buildExpressionParser table2 expr2 -- short circuit
where
table2 = [ [op "=" $ flip const, op "*=" (*), op "/=" div
,op "%=" mod, op "+=" (+), op "-=" (-)]
, [op "<<=" shiftL, op ">>=" shiftR
,op "&=" (.&.), op "^=" xor, op "|=" (.|.)] ]
a2 :: (Int -> Int -> Int) -> AP (Term -> Term -> Term)
a2 = assignReturn2
-- a2's first Term MUST be a string... (else "assignment to non-variable")
op name fun = Infix (reservedOp name >> a2 fun) AssocLeft
-- In between tables 1 and 2: the ternary operator...
-- operators:
-- endTok = (`elem` " \t\r\n()+*-/%^|&")
term :: AP Term
term = parens expr
<|> fmap (Literal []) natural
<|> fmap Variable identifier
<?> "simple expression"
-- Type depends on which parsec we're using...
table1 :: OperatorTable Char SS Term
#ifdef HAVE_PARSEC_POSTFIX
table1 = [ [postfix "++" $ postinc (+1), postfix "--" $ postinc (+(-1))]
, [prefix "+" $ e1 id, prefix "-" $ e1 negate]
, [prefix "++" $ preinc (+1), prefix "--" $ preinc (+(-1))]
#else
table1 = [ [prefix "+" $ e1 id, prefix "-" $ e1 negate]
#endif
, [prefix "~" $ e1 complement,prefix "!" $ e1 $ b2i . not . i2b]
, [binary "*" $ e2 (*), binary "/" $ e2 div, binary "%" $ e2 mod]
, [binary "+" $ e2 (+), binary "-" $ e2 (-)]
, [binary "<<" $ e2 shiftL, binary ">>" $ e2 shiftR]
, [binary "<" $ e2 $ b2i .: (<), binary "<=" $ e2 $ b2i .: (<=)
,binary ">" $ e2 $ b2i .: (>), binary ">=" $ e2 $ b2i .: (>=)
,binary "==" $ e2 $ b2i .: (==), binary "!=" $ e2 $ b2i .: (/=)]
, [binary "&" $ e2 (.&.)]
, [binary "^" $ e2 xor]
, [binary "|" $ e2 (.|.)]
, [binary "&&" $ e2 $ b2 (&&)]
, [binary "||" $ e2 $ b2 (||)] ]
where e1 :: (Int -> Int) -> AP (Term -> Term)
e1 f = do ss <- getState
return $ mapT f . expand ss
e2 :: (Int -> Int -> Int) -> AP (Term -> Term -> Term)
e2 f = do ss <- getState
return $ \t1 t2 -> mapT2 f (expand ss t1) (expand ss t2)
b2 :: (Bool -> Bool -> Bool) -> Int -> Int -> Int
b2 f i j = b2i $ f (i2b i) (i2b j)
i2b i = if i==0 then False else True
b2i b = if b then 1 else 0
(.:) f g a b = f $ g a b -- (c -> d) -> (a -> b -> c) -> a -> b -> d
ro name = try (reservedOp name >> notFollowedBy (char '='))
binary name fun = Infix (ro name >> fun) AssocLeft
prefix name fun = Prefix (reservedOp name >> fun)
#ifdef HAVE_PARSEC_POSTFIX
postfix name fun = Postfix (ro name >> fun)
#endif
expand :: SS -> Term -> Term
expand _ (Error err) = Error err
expand _ (Literal s i) = Literal s i
expand subs (Variable name) =
case lookup name subs of
Nothing -> Literal [] 0
Just s -> case runMathParser subs s of
Left err -> Error err
Right (i,si) -> Literal si i
postinc,preinc :: (Int -> Int) -> AP (Term -> Term)
postinc f = assignReturn $ \i -> (f i,i)
preinc f = assignReturn $ \i -> (f i,f i)
-- These are some weird helper functions.
assignReturn' :: SS -> SI -> (Int -> (Int,Int)) -> (Term -> Term)
assignReturn' ss si f = ar
where ar (Error err) = Error err
ar (Literal _ i) = Error $ "assignment to non-variable: "++show i
ar (Variable v) = let val = fromMaybe "0" $ lookup v ss
in case runMathParser ss val of
Left err -> Error err
Right (i,si') ->
let (ass,ret) = f i
si'' = [(v,ass)] `joinS` si' `joinS` si
in Literal si'' ret
assignReturn :: (Int -> (Int,Int)) -> AP (Term -> Term)
assignReturn f = do ss <- getState
return $ assignReturn' ss [] f
assignReturn2 :: (Int -> Int -> Int) -> AP (Term -> Term -> Term)
assignReturn2 f = ar `fmap` getState
where ar ss t t' = let t'' = expand ss t'
in case t'' of
Error err -> Error err
Literal si j ->
assignReturn' ss si (\i -> (f i j,f i j)) t
Variable _ -> error "impossible"
| shicks/shsh | Language/Sh/Arithmetic.hs | bsd-3-clause | 9,008 | 0 | 19 | 3,287 | 3,284 | 1,719 | 1,565 | 172 | 6 |
module Database.CQL.Protocol.Extra where
import Database.CQL.Protocol
( QueryParams(..)
, Consistency (One, LocalQuorum)
)
defQueryParams :: a -> QueryParams a
defQueryParams a = QueryParams One True a Nothing Nothing Nothing
defQueryParamsMeta :: a -> QueryParams a
defQueryParamsMeta a = QueryParams One False a Nothing Nothing Nothing
| onurzdg/clicklac | src/Database/CQL/Protocol/Extra.hs | bsd-3-clause | 384 | 0 | 6 | 86 | 98 | 55 | 43 | 8 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleInstances #-}
module Web.Diamond.Types
where
import Data.Text(Text, pack, unpack)
import Data.Aeson.Types
import GHC.Generics
import Data.Maybe
import Web.HttpApiData
-- dummy for now, not sure if we will ever implement it
data SearchResult =
SearchResult {
}
deriving (Eq, Read, Show, Generic)
-- | successful action response from Confluence. Solid specification of the
-- fields is lacking, so this is done on a by-need basis.
data CfResponse = CfResponse
{ id :: Text -- ^ object ID in Confluence
-- , type :: CfType -- very inconvenient name. Not required for now...
, status :: Text -- ^ status of content (often "current")
, title :: Text -- ^ title of content
, version :: Maybe CfVersion -- ^ current version information (incl. number)
-- not present in CfResponseList
-- , extensions :: CfObject -- ^ unclear what this is for
, _links :: CfObject -- ^ links related to the returned ID
, _expandable :: CfObject -- ^ more details (must be requested by
-- "?expand=item1,item2,..."), not implemented
-- expandable things, missing in the CfResponseList for rest/api/content
-- , space :: Maybe CfObject -- ^ containing Conf.space
-- , history :: Maybe CfObject -- ^ editing metadata (creator, time etc)
-- editing (updating) a page returned additionally
-- , ancestors :: [ CfObject ] -- ^ hierarchy (or is it? :-) between things
-- , container :: CfObject -- ^ the containing space
, body :: Maybe CfBody -- ^ page body within "body.storage.value"
} deriving (Eq, Read, Show, Generic)
instance ToJSON CfResponse
instance FromJSON CfResponse
-- | CfObject is a "we don't bother" object to avoid implementing specific
-- fields we won't be interested in. anything important or perceived as
-- reasonably stable in Confluence should become a specific type.
type CfObject = Object
-- | optional page body in a response (need to request "body.storage" to
-- expand it in a cfGet). Similar but not equal to body in a request :-)
data CfBody = CfBody { content :: Text }
deriving (Eq, Read, Show, Generic)
instance ToJSON CfBody where
toJSON CfBody{..} =
object [ "storage" .= object [ "representation" .= string "storage"
, "value" .= content
]
]
where string = Data.Aeson.Types.String
instance FromJSON CfBody where
parseJSON (Object o) = CfBody <$>
-- could check "representation": "storage"
((.: "value") =<< o .: "storage")
parseJSON other = typeMismatch "page body object" other
-- | list (including size and pagination information) of CfResponse items,
-- used in content listing. Some expandable fields not present in this response
data CfResponseList = CfResponseList
{ results :: [ CfResponse ]
, start :: Int
, limit :: Int
, size :: Int
, _links :: CfObject
} deriving (Eq, Read, Show, Generic)
instance ToJSON CfResponseList
instance FromJSON CfResponseList
----------------------------------------
-- | version information for confluence things
data CfVersion =
CfVersion { by :: CfPerson
, when :: CfTime
, number :: Int
, message :: Maybe Text
, minorEdit :: Bool
} deriving (Eq, Read, Show, Generic)
instance FromJSON CfVersion
instance ToJSON CfVersion
----------------------------------------
-- | models people in Confluence
data CfPerson =
CfPerson -- ^ a known confluence User, "type": "known"
{ username :: Text
, userKey :: Text
, profilePicture :: CfObject
, displayName :: Text
, _links :: CfObject -- ^ contains one `self` link to the person's page
}
-- | CfUnknown -- ^ type == ???
deriving (Eq, Read, Show, Generic)
instance FromJSON CfPerson
instance ToJSON CfPerson
-- will require distinction according to "type" field when CfUnknown is added
-- | Time representation for Confluence JSON. Example value
-- "2016-08-11T11:35:18.951+10:00"
type CfTime = Text
------------------------------------------------------------
-- Request data
-- | Page creation and update
data CfPageBody =
CfPageBody { title :: Text -- ^ page title, mandatory! also for updates
, ancestors :: [Int] -- ^ IDs, possibly empty (optional)
, space :: Maybe (Either Text Int) -- ^ space by key or ID
-- Required when creating
, body :: CfBody -- ^ page body in storage.value
, version :: Maybe Int -- ^ number, required (needs inc) on update
}
deriving (Eq, Read, Show, Generic)
instance FromJSON CfPageBody where
parseJSON (Object o)
= do title <- o .: "title"
ancestors <- maybe (return []) (mapM ( .: "id")) =<< o .:? "ancestors"
let spaceId (Object space') =
do key <- space' .:? "key"
iD <- space' .:? "id"
return $ case (key, iD) of
(_, Just n) -> Just (Right n) -- prefer iD
(Just k, _) -> Just (Left k)
other -> Nothing
space <- maybe (return Nothing) spaceId =<< o .:? "space"
version <- maybe (return Nothing) (.: "number") =<< o .:? "version"
body <- o .: "body"
return CfPageBody{..}
instance ToJSON CfPageBody where
toJSON CfPageBody{..}
= object $
[ "type" .= string "page"
, "title" .= title
, "body" .= body
]
++ catMaybes
[ "space" .=? fmap mkSpace space
, "version" .=? fmap (object . mkVersion) version
, "ancestors" .=? if null ancestors then Nothing
else Just [ object [ "id" .= n] | n <- ancestors ]
]
where mkSpace :: Either Text Int -> Pair
mkSpace (Left key) = "key" .= key
mkSpace (Right iD) = "id" .= iD
mkVersion :: Int -> [Pair]
mkVersion n = [ "number" .= n ]
string = Data.Aeson.Types.String
(.=?) field = fmap (\mx -> field .= mx)
-- support types
-- | Confluence content type (for query): "page" or "blogpost".
data CfContentType = Page | Blogpost
deriving (Eq, Generic, Read, Show)
instance ToJSON CfContentType where
toJSON = String . showTextData
instance FromJSON CfContentType where
parseJSON (String s) = either (fail . unpack) return $ readTextData s
parseJSON other = typeMismatch "String" other
instance ToHttpApiData CfContentType where
toUrlPiece = showTextData
instance FromHttpApiData CfContentType where
parseUrlPiece = readTextData
| jberthold/diamond | src/Web/Diamond/Types.hs | bsd-3-clause | 7,011 | 1 | 19 | 1,991 | 1,309 | 728 | 581 | 119 | 1 |
{-# LANGUAGE CPP, BangPatterns, RecordWildCards, DeriveDataTypeable, TupleSections #-}
-- | CommSec is a package that provides communication security for
-- use with Haskell sockets. Using an ephemeral shared
-- secret you can build contexts for sending or receiving data between one
-- or more peers.
--
-- Do not reuse the shared secret! Key agreement mechanisms that leverage
-- PKI might be added later.
module Network.CommSec.Package
( -- * Types
OutContext
, InContext
, CommSecError(..)
, SequenceMode(..)
-- * Build contexts for use sending and receiving
, newInContext, newOutContext, inContext, outContext
-- * Pure / ByteString based encryption and decryption routines
, decode
, encode
-- * IO / Pointer based encryption and decryption routines
, decodePtr
, encodePtr
-- * Utility functions
, encBytes, decBytes
-- * Wrappers for network sending and receiving
-- * Utilities
, peekBE32
, pokeBE32
, peekBE
, pokeBE
) where
import Prelude hiding (seq)
import qualified Crypto.Cipher.AES128.Internal as AES
import Crypto.Cipher.AES128.Internal (GCMpc, AESKey128)
import Crypto.Cipher.AES128 ()
import qualified Data.ByteString.Internal as B
import qualified Data.ByteString as B
import qualified Data.ByteString.Unsafe as B
import Data.ByteString (ByteString)
import Data.Bits
import Data.Maybe (fromMaybe)
import Data.Word
import Data.List
import Foreign.Ptr
import Foreign.ForeignPtr
import Foreign.Storable
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Marshal.Utils (copyBytes)
import System.IO.Unsafe
import Data.Data
import Data.Typeable
import Control.Exception
import Network.CommSec.Types
import Network.CommSec.BitWindow
gTagLen,gCtrSize :: Int
gTagLen = 16
gCtrSize = 8
-- IPSec inspired packet format:
--
-- [CNT (used for both the IV and seq) | CT of Payload | ICV]
-- | A tuple of key and precomputed data for use by GCM
data GCMdata = GCMdata { gcmkey :: AESKey128
, gcmpc :: GCMpc
}
-- | A context useful for sending data.
data OutContext =
Out { aesCtr :: {-# UNPACK #-} !Word64
, saltOut :: {-# UNPACK #-} !Word32
, outKey :: GCMdata
}
-- | A context useful for receiving data.
data InContext
= In { bitWindow :: {-# UNPACK #-} !BitWindow
, saltIn :: {-# UNPACK #-} !Word32
, inKey :: GCMdata
}
| InStrict
{ seqVal :: {-# UNPACK #-} !Word64
, saltIn :: {-# UNPACK #-} !Word32
, inKey :: GCMdata
}
| InSequential
{ seqVal :: {-# UNPACK #-} !Word64
, saltIn :: {-# UNPACK #-} !Word32
, inKey :: GCMdata
}
-- | Given at least 24 bytes of entropy, produce an out context that can
-- communicate with an identically initialized in context.
newOutContext :: ByteString -> OutContext
newOutContext bs
| B.length bs < 20 = error $ "Not enough entropy: " ++ show (B.length bs)
| otherwise =
let aesCtr = 1
saltOut = unsafePerformIO $ B.unsafeUseAsCString bs $ peekBE32 . castPtr
outKey = buildGCM $ B.drop (sizeOf saltOut) bs
in Out {..}
-- | Construct an out context from a counter, salt, and AES key.
outContext :: Word64 -> Word32 -> AESKey128 -> OutContext
outContext c s k = Out c s (GCMdata k (precomputeGCMdata k))
-- | Construct an in context from a counter, salt, and AES key.
-- The in context will be 'StrictOrdering'.
inContext :: Word64 -> Word32 -> AESKey128 -> InContext
inContext c s k = InStrict c s (GCMdata k (precomputeGCMdata k))
-- | Given at least 20 bytes of entropy, produce an in context that can
-- communicate with an identically initialized out context.
newInContext :: ByteString -> SequenceMode -> InContext
newInContext bs md
| B.length bs < 20 = error $ "Not enough entropy: " ++ show (B.length bs)
| otherwise =
let bitWindow = zeroWindow
seqVal = 0
saltIn = unsafePerformIO $ B.unsafeUseAsCString bs $ peekBE32 . castPtr
inKey = buildGCM $ B.drop (sizeOf saltIn) bs
in case md of
AllowOutOfOrder -> In {..}
StrictOrdering -> InStrict {..}
Sequential -> InSequential {..}
buildGCM :: B.ByteString -> GCMdata
buildGCM key
| B.length key >= 16 = unsafePerformIO $
B.unsafeUseAsCString key $ \bPtr -> do
kStruct <- AES.generateKey128 (castPtr bPtr)
case kStruct of
Just t -> return $ GCMdata t (precomputeGCMdata t)
Nothing -> throw BuildKeyFailure
| otherwise = throw BuildKeyFailure
-- Encrypts multiple-of-block-sized input, returing a bytestring of the
-- [ctr, ct, tag].
encryptGCM :: GCMdata
-> Word64 -- ^ AES GCM Counter (IV)
-> Word32 -- ^ Salt
-> ByteString -- ^ Plaintext
-> ByteString
encryptGCM key ctr salt pt = unsafePerformIO $ do
B.unsafeUseAsCString pt $ \ptPtr -> do
B.create (encBytes (B.length pt)) $ \ctPtr -> do
encryptGCMPtr key ctr salt (castPtr ptPtr) (B.length pt) (castPtr ctPtr)
-- Encrypts multiple-of-block-sized input, filling a pointer with the
-- result of [ctr, ct, tag].
encryptGCMPtr :: GCMdata
-> Word64 -- ^ AES GCM Counter (IV)
-> Word32 -- ^ Salt
-> Ptr Word8 -- ^ Plaintext buffer
-> Int -- ^ Plaintext length
-> Ptr Word8 -- ^ ciphertext buffer (at least encBytes large)
-> IO ()
encryptGCMPtr (GCMdata {..}) ctr salt ptPtr ptLen ctPtr = do
let ivLen = sizeOf ctr + sizeOf salt
tagLen = gTagLen
allocaBytes ivLen $ \ptrIV -> do
-- Build the IV
pokeBE32 ptrIV salt
pokeBE (ptrIV `plusPtr` sizeOf salt) ctr
pokeBE ctPtr ctr
let tagPtr = ctPtr' `plusPtr` ptLen
ctPtr' = ctPtr `plusPtr` sizeOf ctr
AES.encryptGCM gcmkey gcmpc ptrIV (fromIntegral ivLen) nullPtr 0 (castPtr ptPtr) (fromIntegral ptLen) (castPtr ctPtr') tagPtr
-- | GCM decrypt and verify ICV.
decryptGCMPtr :: GCMdata
-> Word64 -- ^ AES GCM Counter (IV)
-> Word32 -- ^ Salt
-> Ptr Word8 -- ^ Ciphertext
-> Int -- ^ Ciphertext length
-> Ptr Word8 -- ^ Tag
-> Int -- ^ Tag length
-> Ptr Word8 -- ^ Plaintext result ptr (at least 'decBytes' large)
-> IO (Either CommSecError ())
decryptGCMPtr (GCMdata {..}) ctr salt ctPtr ctLen tagPtr tagLen ptPtr
| tagLen /= gTagLen = return $ Left InvalidICV
| otherwise = do
let ivLen = sizeOf ctr + sizeOf salt
paddedLen = ctLen
allocaBytes ivLen $ \ptrIV -> allocaBytes tagLen $ \ctagPtr -> do
-- Build the IV
pokeBE32 ptrIV salt
pokeBE (ptrIV `plusPtr` sizeOf salt) ctr
AES.decryptGCM gcmkey gcmpc ptrIV (fromIntegral ivLen) nullPtr 0 (castPtr ctPtr) (fromIntegral paddedLen) (castPtr ptPtr) ctagPtr
w1 <- peekBE ctagPtr
w2 <- peekBE (ctagPtr `plusPtr` sizeOf w1)
y1 <- peekBE (castPtr tagPtr)
y2 <- peekBE (castPtr tagPtr `plusPtr` sizeOf y1)
if (w1 /= y1 || w2 /= y2)
then return (Left InvalidICV)
else return (Right ())
-- Decrypts multiple-of-block-sized input, returing a bytestring of the
-- [ctr, ct, tag].
decryptGCM :: GCMdata
-> Word64 -- ^ AES GCM Counter (IV)
-> Word32 -- ^ Salt
-> ByteString -- ^ Ciphertext
-> ByteString -- ^ Tag
-> Either CommSecError ByteString -- Plaintext (or an exception due to bad tag)
decryptGCM (GCMdata {..}) ctr salt ct tag
| B.length tag < gTagLen = Left InvalidICV
| otherwise = unsafePerformIO $ do
let ivLen = sizeOf ctr + sizeOf salt
tagLen = gTagLen
paddedLen = B.length ct
allocaBytes ivLen $ \ptrIV -> allocaBytes tagLen $ \ctagPtr -> do
-- Build the IV
pokeBE32 ptrIV salt
pokeBE (ptrIV `plusPtr` sizeOf salt) ctr
B.unsafeUseAsCString tag $ \tagPtr -> do
B.unsafeUseAsCString ct $ \ptrCT -> do
pt <- B.create paddedLen $ \ptrPT -> do
AES.decryptGCM gcmkey gcmpc ptrIV (fromIntegral ivLen) nullPtr 0 (castPtr ptrCT) (fromIntegral $ B.length ct) (castPtr ptrPT) ctagPtr
w1 <- peekBE ctagPtr
w2 <- peekBE (ctagPtr `plusPtr` sizeOf w1)
y1 <- peekBE (castPtr tagPtr)
y2 <- peekBE (castPtr tagPtr `plusPtr` sizeOf y1)
if (w1 /= y1 || w2 /= y2)
then return (Left InvalidICV)
else return (Right pt)
-- |Use an 'OutContext' to protect a message for transport.
-- Message format: [ctr, ct, tag].
--
-- This routine can throw an exception of 'OldContext' if the context being
-- used has expired.
encode :: OutContext -> ByteString -> (ByteString, OutContext)
encode ctx@(Out {..}) pt
| aesCtr == maxBound = throw OldContext
| otherwise =
let !iv_ct_tag = encryptGCM outKey aesCtr saltOut pt
in (iv_ct_tag, ctx { aesCtr = 1 + aesCtr })
-- |Given a message length, returns the number of bytes an encoded message
-- will consume.
encBytes :: Int -> Int
encBytes lenMsg =
let tagLen = gTagLen
ctrLen = gCtrSize
in ctrLen + lenMsg + tagLen
-- |Given a package length, returns the number of bytes in the
-- underlying message.
decBytes :: Int -> Int
decBytes lenPkg =
let tagLen = gTagLen
ctrLen = gCtrSize
in lenPkg - tagLen - ctrLen
-- |@encodePtr outCtx msg result msgLen@ will encode @msgLen@ bytes at
-- location @msg@, placing the result at location @result@. The buffer
-- pointed to by @result@ must be at least @encBytes msgLen@ bytes large,
-- the actual package will be exactly @encBytes msgLen@ in size.
encodePtr :: OutContext -> Ptr Word8 -> Ptr Word8 -> Int -> IO OutContext
encodePtr ctx@(Out {..}) ptPtr pkgPtr ptLen
| aesCtr == maxBound = throw OldContext
| otherwise = do
encryptGCMPtr outKey aesCtr saltOut ptPtr ptLen pkgPtr
return (ctx { aesCtr = 1 + aesCtr })
-- |@decodePtr inCtx pkg msg pkgLen@ decrypts and verifies a package at
-- location @pkg@ of size @pkgLen@. The resulting message is placed at
-- location @msg@ and its size is returned along with a new context (or
-- error).
decodePtr :: InContext -> Ptr Word8 -> Ptr Word8 -> Int -> IO (Either CommSecError (Int,InContext))
decodePtr ctx pkgPtr msgPtr pkgLen = do
cnt <- peekBE pkgPtr
let !ctPtr = pkgPtr `plusPtr` sizeOf cnt
!ctLen = pkgLen - tagLen - sizeOf cnt
!tagPtr = pkgPtr `plusPtr` (pkgLen - tagLen)
tagLen = gTagLen
r <- decryptGCMPtr (inKey ctx) cnt (saltIn ctx) ctPtr ctLen tagPtr tagLen msgPtr
case r of
Left err -> return (Left err)
Right () -> fmap (ctLen,) `fmap` helper ctx cnt
where
{-# INLINE helper #-}
helper :: InContext -> Word64
-> IO (Either CommSecError InContext)
helper (InStrict {..}) cnt
| cnt > seqVal = return $ Right (InStrict cnt saltIn inKey)
| otherwise = return (Left DuplicateSeq)
helper (InSequential {..}) cnt
| cnt == seqVal + 1 = return $ Right (InSequential cnt saltIn inKey)
| otherwise = return (Left DuplicateSeq)
helper (In {..}) cnt = do
case updateBitWindow bitWindow cnt of
Left e -> return (Left e)
Right newMask -> return $ Right (In newMask saltIn inKey)
-- |Use an 'InContext' to decrypt a message, verifying the ICV and sequence
-- number. Unlike sending, receiving is more likely to result in an
-- exceptional condition and thus it returns an 'Either' value.
--
-- Message format: [ctr, ct, tag].
decode :: InContext -> ByteString -> Either CommSecError (ByteString, InContext)
decode ctx pkg = unsafePerformIO $ do
let ptLen = decBytes (B.length pkg)
pt <- B.mallocByteString ptLen
r <- withForeignPtr pt $ \ptPtr -> do
B.unsafeUseAsCString pkg $ \pkgPtr -> do
decodePtr ctx (castPtr pkgPtr) (castPtr ptPtr) (B.length pkg)
case r of
Left e -> return (Left e)
Right (_,c) -> return (Right (B.fromForeignPtr pt 0 ptLen,c))
peekBE :: Ptr Word8 -> IO Word64
peekBE p = do
let op n = fromIntegral `fmap` peekElemOff p n
as <- mapM op [0..7]
return (foldl1' (\r a -> (r `shiftL` 8) .|. a) as)
{-# INLINE peekBE #-}
pokeBE :: Ptr Word8 -> Word64 -> IO ()
pokeBE p w = do
let op n = pokeElemOff p n (fromIntegral (w `shiftR` (56-(8*n) :: Int)))
mapM_ op [0..7]
{-# INLINE pokeBE #-}
pokeBE32 :: Ptr Word8 -> Word32 -> IO ()
pokeBE32 p w = do
let op n = pokeElemOff p n (fromIntegral (w `shiftR` (24 - (8*n) :: Int)))
mapM_ op [0..3]
{-# INLINE pokeBE32 #-}
peekBE32 :: Ptr Word8 -> IO Word32
peekBE32 p = do
let op n = fromIntegral `fmap` peekElemOff p n
as <- mapM op [0..3]
return (foldl1' (\r a -> (r `shiftL` 8) .|. a) as)
{-# INLINE peekBE32 #-}
precomputeGCMdata :: AESKey128 -> GCMpc
precomputeGCMdata = unsafePerformIO . AES.precomputeGCMdata
| TomMD/commsec | Network/CommSec/Package.hs | bsd-3-clause | 13,050 | 0 | 32 | 3,470 | 3,466 | 1,791 | 1,675 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Opaleye.Trans
( OpaleyeT (..)
, runOpaleyeT
, -- * Transactions
Transaction
, transaction
, run
, -- * Queries
query
, queryFirst
, -- * Inserts
insert
, insertMany
, insertReturning
, insertReturningFirst
, insertManyReturning
, -- * Updates
update
, updateReturning
, updateReturningFirst
, -- * Deletes
delete
, -- * Utilities
withConn
, -- * Reexports
liftIO
, MonadIO
, ask
, Int64
) where
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Reader (MonadReader, ReaderT (..),
ask)
import Control.Monad.Trans (MonadTrans (..))
import Control.Monad.Catch (MonadCatch, MonadThrow)
import Data.Maybe (listToMaybe)
import Data.Profunctor.Product.Default (Default)
import Database.PostgreSQL.Simple (Connection, withTransaction)
import qualified Database.PostgreSQL.Simple as PSQL
import GHC.Int (Int64)
import Opaleye
-- | The 'Opaleye' monad transformer
newtype OpaleyeT m a = OpaleyeT { unOpaleyeT :: ReaderT Connection m a }
deriving ( Functor, Applicative, Monad, MonadTrans, MonadIO
, MonadReader Connection, MonadCatch, MonadThrow
)
-- | Given a 'Connection', run an 'OpaleyeT'
runOpaleyeT :: PSQL.Connection -> OpaleyeT m a -> m a
runOpaleyeT c = flip runReaderT c . unOpaleyeT
withConn :: MonadIO m => (Connection -> IO a) -> OpaleyeT m a
withConn f = ask >>= liftIO . f
newtype Transaction a = Transaction { unTransaction :: ReaderT Connection IO a }
deriving (Functor, Applicative, Monad, MonadReader Connection)
-- | Run a postgresql transaction in the 'OpaleyeT' monad
transaction :: MonadIO m => Transaction a -> OpaleyeT m a
transaction (Transaction t) = withConn $ \conn ->
withTransaction conn (runReaderT t conn)
-- | Execute a query without a literal transaction
run :: MonadIO m => Transaction a -> OpaleyeT m a
run = withConn . runReaderT . unTransaction
-- | With a 'Connection' in a 'Transaction'
-- This isn't exposed so that users can't just drop down to IO
-- in a transaction
withConnIO :: (Connection -> IO a) -> Transaction a
withConnIO = Transaction . ReaderT
-- | Execute a 'Query'. See 'runQuery'.
query :: Default QueryRunner a b => Query a -> Transaction [b]
query q = withConnIO (`runQuery` q)
-- | Retrieve the first result from a 'Query'. Similar to @listToMaybe <$> runQuery@.
queryFirst :: Default QueryRunner a b => Query a -> Transaction (Maybe b)
queryFirst q = listToMaybe <$> query q
-- | Insert into a 'Table'. See 'runInsert'.
insert :: Table w r -> w -> Transaction Int64
insert t w = withConnIO (\c -> runInsertMany c t [w])
-- | Insert many records into a 'Table'. See 'runInsertMany'.
insertMany :: Table w r -> [w] -> Transaction Int64
insertMany t ws = withConnIO (\c -> runInsertMany c t ws)
-- | Insert a record into a 'Table' with a return value. See 'runInsertReturning'.
insertReturning :: Default QueryRunner a b => Table w r -> (r -> a) -> w -> Transaction [b]
insertReturning t ret w = withConnIO (\c -> runInsertManyReturning c t [w] ret)
-- | Insert a record into a 'Table' with a return value. Retrieve only the first result.
-- Similar to @'listToMaybe' '<$>' 'insertReturning'@
insertReturningFirst :: Default QueryRunner a b => Table w r -> (r -> a) -> w -> Transaction (Maybe b)
insertReturningFirst t ret w = listToMaybe <$> insertReturning t ret w
-- | Insert many records into a 'Table' with a return value for each record.
--
-- Maybe not worth defining. This almost certainly does the wrong thing.
insertManyReturning :: Default QueryRunner a b => Table w r -> [w] -> (r -> a) -> Transaction [b]
insertManyReturning t ws ret = withConnIO (\c -> runInsertManyReturning c t ws ret)
-- | Update items in a 'Table' where the predicate is true. See 'runUpdate'.
update :: Table w r -> (r -> w) -> (r -> Column PGBool) -> Transaction Int64
update t r2w predicate = withConnIO (\c -> runUpdate c t r2w predicate)
-- | Update items in a 'Table' with a return value. See 'runUpdateReturning'.
updateReturning :: Default QueryRunner a b
=> Table w r
-> (r -> w)
-> (r -> Column PGBool)
-> (r -> a)
-> Transaction [b]
updateReturning table r2w predicate r2returned = withConnIO (\c -> runUpdateReturning c table r2w predicate r2returned)
-- | Update items in a 'Table' with a return value. Similar to @'listToMaybe' '<$>' 'updateReturning'@.
updateReturningFirst :: Default QueryRunner a b
=> Table w r
-> (r -> w)
-> (r -> Column PGBool)
-> (r -> a)
-> Transaction (Maybe b)
updateReturningFirst table r2w predicate r2returned = listToMaybe <$> updateReturning table r2w predicate r2returned
-- | Delete items in a 'Table' that satisfy some boolean predicate. See 'runDelete'.
delete :: Table a b -> (b -> Column PGBool) -> Transaction Int64
delete table r2b = withConnIO (\c -> runDelete c table r2b)
| WraithM/opaleye-trans | src/Opaleye/Trans.hs | bsd-3-clause | 5,461 | 0 | 12 | 1,444 | 1,309 | 705 | 604 | 84 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Answer (answer) where
{-
This module connects to a Database to find the answer.
-}
import Database.MySQL.Simple
answer :: IO Int
answer = do
conn <- connect defaultConnectInfo
[Only i] <- query_ conn "select 21 * 2"
return i
| aztecrex/haskell-scotty-spike | src/Answer.hs | bsd-3-clause | 276 | 0 | 9 | 53 | 64 | 33 | 31 | 8 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- -*-haskell-*-
-- GIMP Toolkit (GTK) CustomStore TreeModel
--
-- Author : Duncan Coutts, Axel Simon
--
-- Created: 11 Feburary 2006
--
-- Copyright (C) 2005-2016 Duncan Coutts, Axel Simon, Hamish Mackenzie
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- |
-- Stability : provisional
-- Portability : portable (depends on GHC)
--
-- Standard model to store hierarchical data.
--
module Data.GI.Gtk.ModelView.ForestStore (
-- * Types
ForestStore(..),
-- * Constructors
forestStoreNew,
forestStoreNewDND,
-- * Implementation of Interfaces
forestStoreDefaultDragSourceIface,
forestStoreDefaultDragDestIface,
-- * Methods
forestStoreGetValue,
forestStoreGetTree,
forestStoreGetForest,
forestStoreLookup,
forestStoreSetValue,
forestStoreInsert,
forestStoreInsertTree,
forestStoreInsertForest,
forestStoreRemove,
forestStoreClear,
forestStoreChange,
forestStoreChangeM,
) where
import Prelude ()
import Prelude.Compat
import Data.Bits
import Data.Word (Word32)
import Data.Int (Int32)
import Data.Maybe ( fromMaybe, isJust )
import Data.Tree
import Control.Monad ((>=>), when)
import Control.Monad.IO.Class (MonadIO(..))
import Control.Exception (assert)
import Data.IORef
import Foreign.ForeignPtr (ForeignPtr)
import Data.GI.Base.BasicTypes
(ManagedPtr(..), GObject(..), GObject)
import Data.GI.Base.ManagedPtr (withManagedPtr)
import Data.GI.Gtk.ModelView.Types
import Data.GI.Gtk.ModelView.CustomStore
(customStoreGetStamp, customStoreGetPrivate,
TreeModelIface(..), customStoreNew, DragDestIface(..),
DragSourceIface(..), CustomStore(..), customStoreInvalidateIters)
import GI.GObject.Objects.Object (Object(..))
import GI.Gtk.Interfaces.TreeModel
(treeModelRowDeleted, treeModelRowInserted,
treeModelRowChanged, toTreeModel, TreeModel(..), IsTreeModel(..),
treeModelRowHasChildToggled)
import GI.Gtk.Functions (treeSetRowDragData, treeGetRowDragData)
import GI.Gtk.Structs.TreePath
(TreePath)
import GI.Gtk.Structs.TreeIter
(getTreeIterUserData3, getTreeIterUserData2, getTreeIterUserData,
getTreeIterStamp, setTreeIterUserData3, setTreeIterUserData2,
setTreeIterUserData, setTreeIterStamp, TreeIter(..))
import Data.GI.Base (get, new)
import Unsafe.Coerce (unsafeCoerce)
--------------------------------------------
-- internal model data types
--
data ForestStoreIter = ForestStoreIter Int32 Word32 Word32 Word32
fromForestStoreIter :: MonadIO m => ForestStoreIter -> m TreeIter
fromForestStoreIter (ForestStoreIter s u1 u2 u3) = do
i <- new TreeIter []
setTreeIterStamp i s
setTreeIterUserData i $ unsafeCoerce u1
setTreeIterUserData2 i $ unsafeCoerce u2
setTreeIterUserData3 i $ unsafeCoerce u3
return i
toForestStoreIter :: MonadIO m => TreeIter -> m ForestStoreIter
toForestStoreIter iter = do
stamp <- getTreeIterStamp iter
u1 <- getTreeIterUserData iter
u2 <- getTreeIterUserData2 iter
u3 <- getTreeIterUserData3 iter
return $ ForestStoreIter stamp (unsafeCoerce u1) (unsafeCoerce u2) (unsafeCoerce u3)
forestStoreIterSetStamp :: ForestStoreIter -> Int32 -> ForestStoreIter
forestStoreIterSetStamp (ForestStoreIter _ a b c) s = ForestStoreIter s a b c
-- | A store for hierarchical data.
--
newtype ForestStore a = ForestStore (ManagedPtr (CustomStore (IORef (Store a)) a))
mkForestStore :: CustomStore (IORef (Store a)) a -> ForestStore a
mkForestStore (CustomStore ptr) = ForestStore ptr
instance IsTreeModel (ForestStore a)
instance GObject (ForestStore a) where
#if !MIN_VERSION_haskell_gi_base(0,20,1)
gobjectIsInitiallyUnowned _ = False
#endif
gobjectType _ = gobjectType (undefined :: TreeModel)
instance IsTypedTreeModel ForestStore
-- | Maximum number of nodes on each level.
--
-- * These numbers determine how many bits in a 'TreeIter' are devoted to
-- each level. Hence, these numbers reflect log2 of the maximum number
-- of nodes at a level, rounded up.
--
type Depth = [Int]
data Store a = Store {
depth :: Depth,
content :: Cache a
}
-- | Create a new list store.
--
-- * The given rose tree determines the initial content and may be the empty
-- list. Each 'Tree' in the forest corresponds to one top-level node.
--
-- * The ForestStore maintains the initially given Forest and aligns the 'TreePath'
-- bits to fit in 96-bit length 'TreeIter' storage.
--
-- * Additionally, a cache is used to achieve higher performance if operating on
-- recently used TreePaths.
--
-- * __Note:__ due to the limited amount of bits available in TreeIter storage, only
-- limited depth forests can be used with this implementation, the result of too deep
-- Forests is an undefined behaviour while trying to retrieve the deeply nested nodes.
-- For example: assuming the average requiement is 8 bits per tree level (max number of
-- children at the level is 255), then we can only use 12 levels deep trees (96/8) -
-- any further levels in a TreePath will not be encoded in the corresponding TreeIter
-- storage.
--
forestStoreNew :: MonadIO m => Forest a -> m (ForestStore a)
forestStoreNew forest = forestStoreNewDND forest
(Just forestStoreDefaultDragSourceIface)
(Just forestStoreDefaultDragDestIface)
-- | Create a new list store.
--
-- * In addition to 'forestStoreNew', this function takes an two interfaces
-- to implement user-defined drag-and-drop functionality.
--
forestStoreNewDND :: MonadIO m => Forest a -- ^ the inital tree stored in this model
-> Maybe (DragSourceIface ForestStore a) -- ^ an optional interface for drags
-> Maybe (DragDestIface ForestStore a) -- ^ an optional interface to handle drops
-> m (ForestStore a)
forestStoreNewDND forest mDSource mDDest = liftIO $ do
(storeRef :: IORef (Store a)) <- newIORef Store {
depth = calcForestDepth forest,
content = storeToCache forest
}
let withStore :: (Store a -> IO result) -> IO result
withStore f = readIORef storeRef >>= f
withStoreUpdateCache :: (Store a -> (result, Cache a)) -> IO result
withStoreUpdateCache f = do
store <- readIORef storeRef
let (result, cache') = f store
writeIORef storeRef store { content = cache' }
return result
customStoreNew storeRef mkForestStore TreeModelIface {
treeModelIfaceGetFlags = return [],
treeModelIfaceGetIter = \path -> withStore $
\Store { depth = d } -> fromPath d <$> treePathGetIndices' path >>= mapM fromForestStoreIter,
treeModelIfaceGetPath = toForestStoreIter >=> \iter -> withStore $
\Store { depth = d } -> treePathNewFromIndices' $ toPath d iter,
treeModelIfaceGetRow = toForestStoreIter >=> \iter -> withStoreUpdateCache $
\Store { depth = d, content = cache } ->
case checkSuccess d iter cache of
(True, cache'@((_, (Node { rootLabel = val }:_)):_)) ->
(val, cache')
_ -> error "ForestStore.getRow: iter does not refer to a valid entry",
treeModelIfaceIterNext = toForestStoreIter >=> \iter -> withStoreUpdateCache (
\Store { depth = d, content = cache } -> iterNext d iter cache) >>= mapM fromForestStoreIter,
treeModelIfaceIterChildren = \mIter -> do
iter <- maybe (return invalidIter) toForestStoreIter mIter
withStoreUpdateCache (
\Store { depth = d, content = cache } ->
iterNthChild d 0 iter cache) >>= mapM fromForestStoreIter,
treeModelIfaceIterHasChild = toForestStoreIter >=> \iter -> withStoreUpdateCache $
\Store { depth = d, content = cache } ->
let (mIter, cache') = iterNthChild d 0 iter cache
in (isJust mIter, cache'),
treeModelIfaceIterNChildren = mapM toForestStoreIter >=> \mIter -> withStoreUpdateCache $
\Store { depth = d, content = cache } ->
let iter = fromMaybe invalidIter mIter
in iterNChildren d iter cache,
treeModelIfaceIterNthChild = \mIter idx -> do
iter <- maybe (return invalidIter) toForestStoreIter mIter
withStoreUpdateCache (
\Store { depth = d, content = cache } ->
iterNthChild d idx iter cache) >>= mapM fromForestStoreIter,
treeModelIfaceIterParent = toForestStoreIter >=> \iter -> withStore $
\Store { depth = d } -> mapM fromForestStoreIter (iterParent d iter),
treeModelIfaceRefNode = \_ -> return (),
treeModelIfaceUnrefNode = \_ -> return ()
} mDSource mDDest
-- | Default drag functions for
-- 'Data.GI.Gtk.ModelView.ForestStore'. These functions allow the rows of
-- the model to serve as drag source. Any row is allowed to be dragged and the
-- data set in the 'SelectionDataM' object is set with 'treeSetRowDragData',
-- i.e. it contains the model and the 'TreePath' to the row.
forestStoreDefaultDragSourceIface :: DragSourceIface ForestStore row
forestStoreDefaultDragSourceIface = DragSourceIface {
customDragSourceRowDraggable = \_ _-> return True,
customDragSourceDragDataGet = \model path sel -> treeSetRowDragData sel model path,
customDragSourceDragDataDelete = \model path -> treePathGetIndices' path >>= \dest@(_:_) -> do
liftIO $ forestStoreRemove model path
return True
}
-- | Default drop functions for 'Data.GI.Gtk.ModelView.ForestStore'. These
-- functions accept a row and insert the row into the new location if it is
-- dragged into a tree view
-- that uses the same model.
forestStoreDefaultDragDestIface :: DragDestIface ForestStore row
forestStoreDefaultDragDestIface = DragDestIface {
customDragDestRowDropPossible = \model path sel -> do
mModelPath <- treeGetRowDragData sel
case mModelPath of
(True, Just model', source) -> do
tm <- toTreeModel model
withManagedPtr tm $ \m ->
withManagedPtr model' $ \m' -> return (m==m')
_ -> return False,
customDragDestDragDataReceived = \model path sel -> do
dest@(_:_) <- treePathGetIndices' path
mModelPath <- treeGetRowDragData sel
case mModelPath of
(True, Just model', Just path) -> do
source@(_:_) <- treePathGetIndices' path
tm <- toTreeModel model
withManagedPtr tm $ \m ->
withManagedPtr model' $ \m' ->
if m/=m' then return False
else do
row <- forestStoreGetTree model =<< treePathNewFromIndices' source
initPath <- treePathNewFromIndices' (init dest)
forestStoreInsertTree model initPath (fromIntegral $ last dest) row
return True
_ -> return False
}
--------------------------------------------
-- low level bit-twiddling utility functions
--
bitsNeeded :: Word32 -> Int
bitsNeeded n = bitsNeeded' 0 n
where bitsNeeded' b 0 = b
bitsNeeded' b n = bitsNeeded' (b+1) (n `shiftR` 1)
getBitSlice :: ForestStoreIter -> Int -> Int -> Word32
getBitSlice (ForestStoreIter _ a b c) off count =
getBitSliceWord a off count
.|. getBitSliceWord b (off-32) count
.|. getBitSliceWord c (off-64) count
where getBitSliceWord :: Word32 -> Int -> Int -> Word32
getBitSliceWord word off count =
word `shift` (-off) .&. (1 `shiftL` count - 1)
setBitSlice :: ForestStoreIter -> Int -> Int -> Word32 -> ForestStoreIter
setBitSlice (ForestStoreIter stamp a b c) off count value =
assert (value < 1 `shiftL` count) $
ForestStoreIter stamp
(setBitSliceWord a off count value)
(setBitSliceWord b (off-32) count value)
(setBitSliceWord c (off-64) count value)
where setBitSliceWord :: Word32 -> Int -> Int -> Word32 -> Word32
setBitSliceWord word off count value =
let mask = (1 `shiftL` count - 1) `shift` off
in (word .&. complement mask) .|. (value `shift` off)
--iterPrefixEqual :: TreeIter -> TreeIter -> Int -> Bool
--iterPrefixEqual (TreeIter _ a1 b1 c1) (TreeIter _ a2 b2 c2) pos
-- | pos>64 = let mask = 1 `shiftL` (pos-64) - 1 in
-- a1==a2 && b1==b2 && (c1 .&. mask) == (c2 .&. mask)
-- | pos>32 = let mask = 1 `shiftL` (pos-32) - 1 in
-- a1==a2 && (b1 .&. mask) == (b2 .&. mask)
-- | otherwise = let mask = 1 `shiftL` pos - 1 in
-- (a1 .&. mask) == (a2 .&. mask)
-- | The invalid tree iterator.
--
invalidIter :: ForestStoreIter
invalidIter = ForestStoreIter 0 0 0 0
--showIterBits (TreeIter _ a b c) = [showBits a, showBits b, showBits c]
--
--showBits :: Bits a => a -> String
--showBits a = [ if testBit a i then '1' else '0' | i <- [0..bitSize a - 1] ]
-- | Calculate the maximum number of nodes on a per-level basis.
--
calcForestDepth :: Forest a -> Depth
calcForestDepth f = map bitsNeeded $
takeWhile (/=0) $
foldr calcTreeDepth (repeat 0) f
where
calcTreeDepth Node { subForest = f } (d:ds) =
(d+1): zipWith max ds (foldr calcTreeDepth (repeat 0) f)
-- | Convert an iterator into a path.
--
toPath :: Depth -> ForestStoreIter -> [Int32]
toPath d iter = gP 0 d
where
gP pos [] = []
gP pos (d:ds) = let idx = getBitSlice iter pos d in
if idx==0 then [] else fromIntegral (idx-1) : gP (pos+d) ds
-- | Try to convert a path into a 'TreeIter'.
--
fromPath :: Depth -> [Int32] -> Maybe ForestStoreIter
fromPath = fP 0 invalidIter
where
fP pos ti _ [] = Just ti -- the remaining bits are zero anyway
fP pos ti [] _ = Nothing
fP pos ti (d:ds) (p:ps) = let idx = fromIntegral (p+1) in
if idx >= bit d then Nothing else
fP (pos+d) (setBitSlice ti pos d idx) ds ps
-- | The 'Cache' type synonym is only used iternally. What it represents
-- the stack during a (fictional) lookup operations.
-- The topmost frame is the node
-- for which this lookup was started and the innermost frame (the last
-- element of the list) contains the root of the tree.
--
type Cache a = [(ForestStoreIter, Forest a)]
-- | Create a traversal structure that allows a pre-order traversal in linear
-- time.
--
-- * The returned structure points at the root of the first level which doesn't
-- really exist, but serves to indicate that it is before the very first
-- node.
--
storeToCache :: Forest a -> Cache a
storeToCache [] = []
storeToCache forest = [(invalidIter, [Node root forest])]
where
root = error "ForestStore.storeToCache: accessed non-exitent root of tree"
-- | Extract the store from the cache data structure.
cacheToStore :: Cache a -> Forest a
cacheToStore [] = []
cacheToStore cache = case last cache of (_, [Node _ forest]) -> forest
-- | Advance the traversal structure to the given 'TreeIter'.
--
advanceCache :: Depth -> ForestStoreIter -> Cache a -> Cache a
advanceCache depth goal [] = []
advanceCache depth goal cache@((rootIter,_):_) =
moveToSameLevel 0 depth
where
moveToSameLevel pos [] = cache
moveToSameLevel pos (d:ds) =
let
goalIdx = getBitSlice goal pos d
curIdx = getBitSlice rootIter pos d
isNonZero pos d (ti,_) = getBitSlice ti pos d/=0
in
if goalIdx==curIdx then moveToSameLevel (pos+d) ds else
if goalIdx==0 then dropWhile (isNonZero pos d) cache else
if curIdx==0 then moveToChild pos (d:ds) cache else
if goalIdx<curIdx then
moveToChild pos (d:ds) (dropWhile (isNonZero pos d) cache)
else let
-- advance the current iterator to coincide with the goal iterator
-- at this level
moveWithinLevel pos d ((ti,forest):parents) = let
diff = fromIntegral (goalIdx-curIdx)
(dropped, remain) = splitAt diff forest
advance = length dropped
ti' = setBitSlice ti pos d (curIdx+fromIntegral advance)
in
if advance==diff then moveToChild (pos+d) ds ((ti',remain):parents)
else (ti',remain):parents -- node not found
in moveWithinLevel pos d $ case ds of
[] -> cache
(d':_) -> dropWhile (isNonZero (pos+d) d') cache
-- Descend into the topmost forest to find the goal iterator. The position
-- and the remainding depths specify the index in the cache that is zero.
-- All indices in front of pos coincide with that of the goal iterator.
moveToChild :: Int -> Depth -> Cache a -> Cache a
moveToChild pos [] cache = cache -- we can't set more than the leaf
moveToChild pos (d:ds) cache@((ti,forest):parents)
| getBitSlice goal pos d == 0 = cache
| otherwise = case forest of
[] -> cache -- impossible request
Node { subForest = children }:_ ->
let
childIdx :: Int
childIdx = fromIntegral (getBitSlice goal pos d)-1
(dropped, remain) = splitAt childIdx children
advanced = length dropped
ti' = setBitSlice ti pos d (fromIntegral advanced+1)
in if advanced<childIdx then ((ti',remain):cache) else
moveToChild (pos+d) ds ((ti',remain):cache)
-- | Advance to the given iterator and return weather this was successful.
--
checkSuccess :: Depth -> ForestStoreIter -> Cache a -> (Bool, Cache a)
checkSuccess depth iter cache = case advanceCache depth iter cache of
cache'@((cur,sibs):_) -> (cmp cur iter && not (null sibs), cache')
[] -> (False, [])
where
cmp (ForestStoreIter _ a1 b1 c1) (ForestStoreIter _ a2 b2 c2) =
a1==a2 && b1==b2 && c2==c2
-- | Get the leaf index of this iterator.
--
-- * Due to the way we construct the 'TreeIter's, we can check which the last
-- level of an iterator is: The bit sequence of level n is zero if n is
-- greater or equal to the level that the iterator refers to. The returned
-- triple is (pos, leaf, zero) such that pos..pos+leaf denotes the leaf
-- index and pos+leaf..pos+leaf+zero denotes the bit field that is zero.
--
getTreeIterLeaf :: Depth -> ForestStoreIter -> (Int, Int, Int)
getTreeIterLeaf ds ti = gTIL 0 0 ds
where
gTIL pos dCur (dNext:ds)
| getBitSlice ti (pos+dCur) dNext==0 = (pos,dCur,dNext)
| otherwise = gTIL (pos+dCur) dNext ds
gTIL pos d [] = (pos, d, 0)
-- | Move an iterator forwards on the same level.
--
iterNext :: Depth -> ForestStoreIter -> Cache a -> (Maybe ForestStoreIter, Cache a)
iterNext depth iter cache = let
(pos,leaf,_child) = getTreeIterLeaf depth iter
curIdx = getBitSlice iter pos leaf
nextIdx = curIdx+1
nextIter = setBitSlice iter pos leaf nextIdx
in
if nextIdx==bit leaf then (Nothing, cache) else
case checkSuccess depth nextIter cache of
(True, cache) -> (Just nextIter, cache)
(False, cache) -> (Nothing, cache)
-- | Move down to the child of the given iterator.
--
iterNthChild :: Depth -> Int -> ForestStoreIter -> Cache a ->
(Maybe ForestStoreIter, Cache a)
iterNthChild depth childIdx_ iter cache = let
(pos,leaf,child) = getTreeIterLeaf depth iter
childIdx = fromIntegral childIdx_+1
nextIter = setBitSlice iter (pos+leaf) child childIdx
in
if childIdx>=bit child then (Nothing, cache) else
case checkSuccess depth nextIter cache of
(True, cache) -> (Just nextIter, cache)
(False, cache) -> (Nothing, cache)
-- | Descend to the first child.
--
iterNChildren :: Depth -> ForestStoreIter -> Cache a -> (Int, Cache a)
iterNChildren depth iter cache = case checkSuccess depth iter cache of
(True, cache@((_,Node { subForest = forest}:_):_)) -> (length forest, cache)
(_, cache) -> (0, cache)
-- | Ascend to parent.
--
iterParent :: Depth -> ForestStoreIter -> Maybe ForestStoreIter
iterParent depth iter = let
(pos,leaf,_child) = getTreeIterLeaf depth iter
in if pos==0 then Nothing else
if getBitSlice iter pos leaf==0 then Nothing else
Just (setBitSlice iter pos leaf 0)
-- | Insert nodes into the store.
--
-- * The given list of nodes is inserted into given parent at @pos@.
-- If the parent existed, the function returns @Just path@ where @path@
-- is the position of the newly inserted elements. If @pos@ is negative
-- or greater or equal to the number of children of the node at @path@,
-- the new nodes are appended to the list.
--
forestStoreInsertForest :: MonadIO m
=> ForestStore a -- ^ the store
-> TreePath -- ^ @path@ - the position of the parent
-> Int -- ^ @pos@ - the index of the new tree
-> Forest a -- ^ the list of trees to be inserted
-> m ()
forestStoreInsertForest (ForestStore model) path pos nodes = liftIO $ do
ipath <- treePathGetIndices' path
customStoreInvalidateIters $ CustomStore model
(idx, toggle) <- atomicModifyIORef (customStoreGetPrivate $ CustomStore model) $
\store@Store { depth = d, content = cache } ->
case insertIntoForest (cacheToStore cache) nodes ipath pos of
Nothing -> error ("forestStoreInsertForest: path does not exist " ++ show ipath)
Just (newForest, idx, toggle) ->
let depth = calcForestDepth newForest
in (Store { depth = depth,
content = storeToCache newForest },
(idx, toggle))
Store { depth = depth } <- readIORef (customStoreGetPrivate $ CustomStore model)
let rpath = reverse ipath
stamp <- customStoreGetStamp $ CustomStore model
sequence_ [ let p' = reverse p
Just iter = fromPath depth p'
in do
p'' <- treePathNewFromIndices' p'
treeModelRowInserted (CustomStore model) p'' =<< fromForestStoreIter (forestStoreIterSetStamp iter stamp)
| (i, node) <- zip [idx..] nodes
, p <- paths (fromIntegral i : rpath) node ]
let Just iter = fromPath depth ipath
when toggle $ treeModelRowHasChildToggled (CustomStore model) path
=<< fromForestStoreIter (forestStoreIterSetStamp iter stamp)
where paths :: [Int32] -> Tree a -> [[Int32]]
paths path Node { subForest = ts } =
path : concat [ paths (n:path) t | (n, t) <- zip [0..] ts ]
-- | Insert a node into the store.
--
forestStoreInsertTree :: MonadIO m
=> ForestStore a -- ^ the store
-> TreePath -- ^ @path@ - the position of the parent
-> Int -- ^ @pos@ - the index of the new tree
-> Tree a -- ^ the value to be inserted
-> m ()
forestStoreInsertTree store path pos node =
forestStoreInsertForest store path pos [node]
-- | Insert a single node into the store.
--
-- * This function inserts a single node without children into the tree.
-- Its arguments are similar to those of 'forestStoreInsert'.
--
forestStoreInsert :: MonadIO m
=> ForestStore a -- ^ the store
-> TreePath -- ^ @path@ - the position of the parent
-> Int -- ^ @pos@ - the index of the new tree
-> a -- ^ the value to be inserted
-> m ()
forestStoreInsert store path pos node =
forestStoreInsertForest store path pos [Node node []]
-- | Insert nodes into a forest.
--
-- * If the parent was found, returns the new tree, the child number
-- and a flag denoting if these new nodes were the first children
-- of the parent.
--
insertIntoForest :: Forest a -> Forest a -> [Int32] -> Int ->
Maybe (Forest a, Int, Bool)
insertIntoForest forest nodes [] pos
| pos<0 = Just (forest++nodes, length forest, null forest)
| otherwise = Just (prev++nodes++next, length prev, null forest)
where (prev, next) = splitAt pos forest
insertIntoForest forest nodes (p:ps) pos = case splitAt (fromIntegral p) forest of
(prev, []) -> Nothing
(prev, Node { rootLabel = val,
subForest = for}:next) ->
case insertIntoForest for nodes ps pos of
Nothing -> Nothing
Just (for, pos, toggle) -> Just (prev++Node { rootLabel = val,
subForest = for }:next,
pos, toggle)
-- | Remove a node from the store.
--
-- * The node denoted by the path is removed, along with all its children.
-- The function returns @True@ if the given node was found.
--
forestStoreRemove :: MonadIO m => ForestStore a -> TreePath -> m Bool
forestStoreRemove model path = treePathGetIndices' path >>= forestStoreRemoveImpl model path
forestStoreRemoveImpl :: MonadIO m => ForestStore a -> TreePath -> [Int32] -> m Bool
--TODO: eliminate this special case without segfaulting!
forestStoreRemoveImpl (ForestStore model) _ [] = return False
forestStoreRemoveImpl (ForestStore model) path ipath = liftIO $ do
customStoreInvalidateIters (CustomStore model)
(found, toggle) <- atomicModifyIORef (customStoreGetPrivate (CustomStore model)) $
\store@Store { depth = d, content = cache } ->
if null cache then (store, (False, False)) else
case deleteFromForest (cacheToStore cache) ipath of
Nothing -> (store, (False, False))
Just (newForest, toggle) ->
(Store { depth = d, -- this might be a space leak
content = storeToCache newForest }, (True, toggle))
when found $ do
when (toggle && not (null ipath)) $ do
Store { depth = depth } <- readIORef (customStoreGetPrivate (CustomStore model))
let iparent = init ipath
Just iter = fromPath depth iparent
parent <- treePathNewFromIndices' iparent
treeModelRowHasChildToggled (CustomStore model) parent =<< fromForestStoreIter iter
treeModelRowDeleted (CustomStore model) path
return found
forestStoreClear :: MonadIO m => ForestStore a -> m ()
forestStoreClear (ForestStore model) = liftIO $ do
customStoreInvalidateIters (CustomStore model)
Store { content = cache } <- readIORef (customStoreGetPrivate (CustomStore model))
let forest = cacheToStore cache
writeIORef (customStoreGetPrivate (CustomStore model)) Store {
depth = calcForestDepth [],
content = storeToCache []
}
let loop (-1) = return ()
loop n = treePathNewFromIndices' [fromIntegral n] >>= treeModelRowDeleted (CustomStore model) >> loop (n-1)
loop (length forest - 1)
-- | Remove a node from a rose tree.
--
-- * Returns the new tree if the node was found. The returned flag is
-- @True@ if deleting the node left the parent without any children.
--
deleteFromForest :: Forest a -> [Int32] -> Maybe (Forest a, Bool)
deleteFromForest forest [] = Just ([], False)
deleteFromForest forest (p:ps) =
case splitAt (fromIntegral p) forest of
(prev, kill@Node { rootLabel = val,
subForest = for}:next) ->
if null ps then Just (prev++next, null prev && null next) else
case deleteFromForest for ps of
Nothing -> Nothing
Just (for,toggle) -> Just (prev++Node {rootLabel = val,
subForest = for }:next, toggle)
(prev, []) -> Nothing
-- | Set a node in the store.
--
forestStoreSetValue :: MonadIO m => ForestStore a -> TreePath -> a -> m ()
forestStoreSetValue store path value = forestStoreChangeM store path (\_ -> return value)
>> return ()
-- | Change a node in the store.
--
-- * Returns @True@ if the node was found. For a monadic version, see
-- 'forestStoreChangeM'.
--
forestStoreChange :: MonadIO m => ForestStore a -> TreePath -> (a -> a) -> m Bool
forestStoreChange store path func = forestStoreChangeM store path (return . func)
-- | Change a node in the store.
--
-- * Returns @True@ if the node was found. For a purely functional version, see
-- 'forestStoreChange'.
--
forestStoreChangeM :: MonadIO m => ForestStore a -> TreePath -> (a -> m a) -> m Bool
forestStoreChangeM (ForestStore model) path act = do
ipath <- treePathGetIndices' path
customStoreInvalidateIters (CustomStore model)
store@Store { depth = d, content = cache } <-
liftIO $ readIORef (customStoreGetPrivate (CustomStore model))
(store'@Store { depth = d, content = cache }, found) <- do
mRes <- changeForest (cacheToStore cache) act ipath
return $ case mRes of
Nothing -> (store, False)
Just newForest -> (Store { depth = d,
content = storeToCache newForest }, True)
liftIO $ writeIORef (customStoreGetPrivate (CustomStore model)) store'
let Just iter = fromPath d ipath
stamp <- customStoreGetStamp (CustomStore model)
when found $ treeModelRowChanged (CustomStore model) path =<< fromForestStoreIter (forestStoreIterSetStamp iter stamp)
return found
-- | Change a node in the forest.
--
-- * Returns @True@ if the given node was found.
--
changeForest :: MonadIO m => Forest a -> (a -> m a) -> [Int32] -> m (Maybe (Forest a))
changeForest forest act [] = return Nothing
changeForest forest act (p:ps) = case splitAt (fromIntegral p) forest of
(prev, []) -> return Nothing
(prev, Node { rootLabel = val,
subForest = for}:next) ->
if null ps then do
val' <- act val
return (Just (prev++Node { rootLabel = val',
subForest = for }:next))
else do
mFor <- changeForest for act ps
case mFor of
Nothing -> return Nothing
Just for -> return $ Just (prev++Node { rootLabel = val,
subForest = for }:next)
-- | Extract one node from the current model. Fails if the given
-- 'TreePath' refers to a non-existent node.
--
forestStoreGetValue :: (Applicative m, MonadIO m) => ForestStore a -> TreePath -> m a
forestStoreGetValue model path = rootLabel <$> forestStoreGetTree model path
-- | Extract a subtree from the current model. Fails if the given
-- 'TreePath' refers to a non-existent node.
--
forestStoreGetTree :: MonadIO m => ForestStore a -> TreePath -> m (Tree a)
forestStoreGetTree (ForestStore model) path = liftIO $ do
ipath <- treePathGetIndices' path
store@Store { depth = d, content = cache } <-
readIORef (customStoreGetPrivate (CustomStore model))
case fromPath d ipath of
(Just iter) -> do
let (res, cache') = checkSuccess d iter cache
writeIORef (customStoreGetPrivate (CustomStore model)) store { content = cache' }
case cache' of
((_,node:_):_) | res -> return node
_ -> fail ("forestStoreGetTree: path does not exist " ++ show ipath)
_ -> fail ("forestStoreGetTree: path does not exist " ++ show ipath)
-- | Extract the forest from the current model.
--
forestStoreGetForest :: MonadIO m => ForestStore a -> m (Forest a)
forestStoreGetForest (ForestStore model) = liftIO $ do
store@Store { depth = d, content = cache } <-
readIORef (customStoreGetPrivate (CustomStore model))
return $ cacheToStore cache
-- | Extract a subtree from the current model. Like 'forestStoreGetTree'
-- but returns @Nothing@ if the path refers to a non-existant node.
--
forestStoreLookup :: MonadIO m => ForestStore a -> TreePath -> m (Maybe (Tree a))
forestStoreLookup (ForestStore model) path = liftIO $ do
ipath <- treePathGetIndices' path
store@Store { depth = d, content = cache } <-
readIORef (customStoreGetPrivate (CustomStore model))
case fromPath d ipath of
(Just iter) -> do
let (res, cache') = checkSuccess d iter cache
writeIORef (customStoreGetPrivate (CustomStore model)) store { content = cache' }
case cache' of
((_,node:_):_) | res -> return (Just node)
_ -> return Nothing
_ -> return Nothing
| gtk2hs/gi-gtk-hs | src/Data/GI/Gtk/ModelView/ForestStore.hs | lgpl-2.1 | 31,709 | 0 | 27 | 7,295 | 8,365 | 4,402 | 3,963 | 493 | 11 |
module Foundation
( App (..)
, Route (..)
, AppMessage (..)
, resourcesApp
, Handler
, Widget
, Form
, maybeAuth
, requireAuth
, module Settings
, module Model
, getExtra
) where
import Prelude
import Yesod
import Yesod.Static
import Yesod.Auth
import Yesod.Auth.BrowserId
import Yesod.Auth.GoogleEmail
import Yesod.Default.Config
import Yesod.Default.Util (addStaticContentExternal)
import Network.HTTP.Conduit (Manager)
import qualified Settings
import qualified Database.Persist.Store
import Settings.StaticFiles
import Database.Persist.GenericSql
import Settings (widgetFile, Extra (..))
import Model
import Text.Jasmine (minifym)
import Web.ClientSession (getKey)
import Text.Hamlet (hamletFile)
-- | The site argument for your application. This can be a good place to
-- keep settings and values requiring initialization before your application
-- starts running, such as database connections. Every handler will have
-- access to the data present here.
data App = App
{ settings :: AppConfig DefaultEnv Extra
, getStatic :: Static -- ^ Settings for static file serving.
, connPool :: Database.Persist.Store.PersistConfigPool Settings.PersistConfig -- ^ Database connection pool.
, httpManager :: Manager
, persistConfig :: Settings.PersistConfig
}
-- Set up i18n messages. See the message folder.
mkMessage "App" "messages" "en"
-- This is where we define all of the routes in our application. For a full
-- explanation of the syntax, please see:
-- http://www.yesodweb.com/book/handler
--
-- This function does three things:
--
-- * Creates the route datatype AppRoute. Every valid URL in your
-- application can be represented as a value of this type.
-- * Creates the associated type:
-- type instance Route App = AppRoute
-- * Creates the value resourcesApp which contains information on the
-- resources declared below. This is used in Handler.hs by the call to
-- mkYesodDispatch
--
-- What this function does *not* do is create a YesodSite instance for
-- App. Creating that instance requires all of the handler functions
-- for our application to be in scope. However, the handler functions
-- usually require access to the AppRoute datatype. Therefore, we
-- split these actions into two functions and place them in separate files.
mkYesodData "App" $(parseRoutesFile "config/routes")
type Form x = Html -> MForm App App (FormResult x, Widget)
-- Please see the documentation for the Yesod typeclass. There are a number
-- of settings which can be configured by overriding methods here.
instance Yesod App where
approot = ApprootMaster $ appRoot . settings
-- Store session data on the client in encrypted cookies,
-- default session idle timeout is 120 minutes
makeSessionBackend _ = do
key <- getKey "config/client_session_key.aes"
return . Just $ clientSessionBackend key 120
defaultLayout widget = do
master <- getYesod
mmsg <- getMessage
-- We break up the default layout into two components:
-- default-layout is the contents of the body tag, and
-- default-layout-wrapper is the entire page. Since the final
-- value passed to hamletToRepHtml cannot be a widget, this allows
-- you to use normal widget features in default-layout.
pc <- widgetToPageContent $ do
$(widgetFile "normalize")
addStylesheet $ StaticR css_bootstrap_css
$(widgetFile "default-layout")
hamletToRepHtml $(hamletFile "templates/default-layout-wrapper.hamlet")
-- This is done to provide an optimization for serving static files from
-- a separate domain. Please see the staticRoot setting in Settings.hs
urlRenderOverride y (StaticR s) =
Just $ uncurry (joinPath y (Settings.staticRoot $ settings y)) $ renderRoute s
urlRenderOverride _ _ = Nothing
-- The page to be redirected to when authentication is required.
authRoute _ = Just $ AuthR LoginR
-- This function creates static content files in the static folder
-- and names them based on a hash of their content. This allows
-- expiration dates to be set far in the future without worry of
-- users receiving stale content.
addStaticContent = addStaticContentExternal minifym base64md5 Settings.staticDir (StaticR . flip StaticRoute [])
-- Place Javascript at bottom of the body tag so the rest of the page loads first
jsLoader _ = BottomOfBody
-- How to run database actions.
instance YesodPersist App where
type YesodPersistBackend App = SqlPersist
runDB f = do
master <- getYesod
Database.Persist.Store.runPool
(persistConfig master)
f
(connPool master)
instance YesodAuth App where
type AuthId App = UserId
-- Where to send a user after successful login
loginDest _ = HomeR
-- Where to send a user after logout
logoutDest _ = HomeR
getAuthId creds = runDB $ do
x <- getBy $ UniqueUser $ credsIdent creds
case x of
Just (Entity uid _) -> return $ Just uid
Nothing -> do
fmap Just $ insert $ User (credsIdent creds) Nothing
-- You can add other plugins like BrowserID, email or OAuth here
authPlugins _ = [authBrowserId, authGoogleEmail]
authHttpManager = httpManager
-- This instance is required to use forms. You can modify renderMessage to
-- achieve customized and internationalized form validation messages.
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
-- | Get the 'Extra' value, used to hold data from the settings.yml file.
getExtra :: Handler Extra
getExtra = fmap (appExtra . settings) getYesod
-- Note: previous versions of the scaffolding included a deliver function to
-- send emails. Unfortunately, there are too many different options for us to
-- give a reasonable default. Instead, the information is available on the
-- wiki:
--
-- https://github.com/yesodweb/yesod/wiki/Sending-email
| marcotmarcot/mini-scilab-site | Foundation.hs | bsd-2-clause | 6,065 | 0 | 17 | 1,316 | 836 | 469 | 367 | -1 | -1 |
--------------------------------------------------------------------------------
module Language.Haskell.Stylish.Step.UnicodeSyntax.Tests
( tests
) where
--------------------------------------------------------------------------------
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Test.HUnit (Assertion, (@=?))
--------------------------------------------------------------------------------
import Language.Haskell.Stylish.Step.UnicodeSyntax
import Language.Haskell.Stylish.Tests.Util
--------------------------------------------------------------------------------
tests :: Test
tests = testGroup "Language.Haskell.Stylish.Step.UnicodeSyntax.Tests"
[ testCase "case 01" case01
]
--------------------------------------------------------------------------------
case01 :: Assertion
case01 = expected @=? testStep (step True) input
where
input = unlines
[ "sort :: Ord a => [a] -> [a]"
, "sort _ = []"
]
expected = unlines
[ "{-# LANGUAGE UnicodeSyntax #-}"
, "sort ∷ Ord a ⇒ [a] → [a]"
, "sort _ = []"
]
| silkapp/stylish-haskell | tests/Language/Haskell/Stylish/Step/UnicodeSyntax/Tests.hs | bsd-3-clause | 1,246 | 0 | 8 | 278 | 157 | 98 | 59 | 19 | 1 |
module Baum.Label
( label
, Order (..)
)
where
-- $Id$
import Baum.Type
import Baum.Order
import Baum.Traverse
import Control.Monad.State
-- | inlabel t cs = t'
-- such that (in)order t' = cs
-- and t' has same shape as t
label :: Order -> Term a b -> [ c ] -> Term d c
label o t cs = evalState ( work o t ) cs
type ST c = State [ c ]
pop :: ST c c
pop = do
c : cs <- get
put cs
return c
work :: Order -> Term a b -> ST c ( Term d c )
work Pre ( Node f xs ) = do
g <- pop
ys <- mapM ( work Pre ) xs
return $ Node g ys
| florianpilz/autotool | src/Baum/Label.hs | gpl-2.0 | 552 | 0 | 10 | 167 | 235 | 121 | 114 | 20 | 1 |
{- |
Module : $Header$
Description : static basic analysis for FPL
Copyright : (c) Christian Maeder, DFKI GmbH 2011
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
basic static analysis for FPL
-}
module Fpl.StatAna
( basicFplAnalysis
, minFplTerm
, simplifyTermExt
) where
import Fpl.As
import Fpl.Sign
import CASL.Sign
import CASL.MixfixParser
import CASL.StaticAna
import CASL.AS_Basic_CASL
import CASL.ShowMixfix
import CASL.Overload
import CASL.Quantification
import CASL.SimplifySen
import Common.AS_Annotation
import Common.DocUtils
import Common.ExtSign
import Common.GlobalAnnotations
import Common.Id
import Common.Lib.State
import Common.Result
import Common.Utils
import qualified Common.Lib.MapSet as MapSet
import Control.Monad
import qualified Data.Set as Set
import qualified Data.Map as Map
import Data.Maybe
basicFplAnalysis
:: (FplBasicSpec, FplSign, GlobalAnnos)
-> Result (FplBasicSpec, ExtSign FplSign Symbol, [Named FplForm])
basicFplAnalysis (b, s, ga) =
fmap (\ (r, ExtSign t syms, sens) ->
(r, ExtSign (delBuiltins t) syms, sens))
$ basicAnalysis minFplTerm anaFplExt (const return) mixFplAna
(b, addBuiltins s, ga)
mixFplAna :: Mix FplExt () TermExt SignExt
mixFplAna = emptyMix
{ getBaseIds = fplIds
, putParen = mapTermExt
, mixResolve = resolveTermExt
}
fplIds :: FplExt -> IdSets
fplIds fe = case fe of
FplSortItems sis _ -> unite $ map (fplSortIds . item) sis
FplOpItems ois _ -> unite $ map (fplOpIds . item) ois
fplSortIds :: FplSortItem -> IdSets
fplSortIds si = case si of
FreeType dt -> (ids_DATATYPE_DECL dt, Set.empty)
CaslSortItem _ -> emptyIdSets
fplOpIds :: FplOpItem -> IdSets
fplOpIds oi = let e = Set.empty in case oi of
FunOp (FunDef i (Op_head _ vs _ _) _ _) -> let s = Set.singleton i in
(if null vs then (s, e) else (e, s), e)
CaslOpItem o -> (ids_OP_ITEM o, e)
-- | put parens around terms
mapTermExt :: TermExt -> TermExt
mapTermExt te = let rec = mapTerm mapTermExt in case te of
FixDef fd -> FixDef $ mapFunDef fd
Case o l r -> Case (rec o)
(map (\ (p, t) -> (rec p, rec t)) l) r
Let fd t r -> Let (mapFunDef fd) (rec t) r
IfThenElse i t e r -> IfThenElse (rec i) (rec t) (rec e) r
EqTerm t e r -> EqTerm (rec t) (rec e) r
BoolTerm t -> BoolTerm (rec t)
-- | put parens around final term
mapFunDef :: FunDef -> FunDef
mapFunDef (FunDef o h at r) =
FunDef o h (fmap (mapTerm mapTermExt) at) r
{- | The is the plugin function for the mixfix analysis. Due to patterns there
may be unknown simple identifiers that are turned to constants and later by
the overload resolution to variables. Obviously, such variables cannot be fed
into the mixfix analysis like all other known variables. -}
resolveTermExt :: MixResolve TermExt
resolveTermExt ga ids te =
let recAux = resolveMixTrm mapTermExt resolveTermExt ga
rec = recAux ids
in case te of
FixDef fd -> fmap FixDef $ resolveFunDef ga ids fd
Case o l r -> do
ro <- rec o
-- CHECK: consider pattern variables
rl <- mapM (\ (p, t) -> liftM2 (,)
(rec p)
$ rec t) l
return $ Case ro rl r
Let fd@(FunDef o _ _ _) t r -> do
rfd <- resolveFunDef ga ids fd
rt <- recAux (addIdToRules o ids) t
return $ Let rfd rt r
IfThenElse i t e r -> do
ri <- rec i
rt <- rec t
re <- rec e
return $ IfThenElse ri rt re r
EqTerm t e r -> do
rt <- rec t
re <- rec e
return $ EqTerm rt re r
BoolTerm t -> fmap BoolTerm $ rec t
-- | resolve overloading in rhs and assume function to be in the signature
resolveFunDef :: MixResolve FunDef
resolveFunDef ga ids (FunDef o h@(Op_head _ vs _ _) at r) = do
nt <- resolveMixTrm mapTermExt resolveTermExt ga
(addIdToRules o $ extendRules (varDeclTokens vs) ids) $ item at
return $ FunDef o h at { item = nt } r
-- | get constructors for input sort
getConstrs :: FplSign -> SORT -> OpMap
getConstrs sign resSort = MapSet.mapSet
(Set.filter $ leqSort sign resSort . opRes) $ constr $ extendedInfo sign
{- | This functions tries to recognize variables in case-patterns (application
terms) after overload resolution. A current limitation is that a unique sort
is needed as input that is taken from the term between @case@ and @of@. -}
resolvePattern :: FplSign -> (SORT, FplTerm) -> Result ([VAR_DECL], FplTerm)
resolvePattern sign (resSort, term) =
let err msg = fail $ msg ++ " " ++ showDoc term "" in
case term of
Application opSym args p ->
let ide@(Id ts _ _) = opSymbName opSym in
case filter ( \ oTy -> length (opArgs oTy) == length args
&& case opSym of
Qual_op_name _ symTy _ ->
leqF sign oTy $ toOpType symTy
_ -> True
)
$ Set.toList $ MapSet.lookup ide $ getConstrs sign resSort of
[] -> if null args && isSimpleId ide then
let v = Var_decl [head ts] resSort $ posOfId ide
in return ([v], toQualVar v)
else err "unresolved pattern"
[OpType k as r] -> do
l <- mapM (resolvePattern sign) $ zip as args
return (concatMap fst l,
Application (Qual_op_name ide (Op_type k as r p) p) (map snd l) p)
_ -> err "ambiguous pattern"
Qual_var v s r -> if leqSort sign s resSort then
return ([Var_decl [v] s r], term)
else err "wrong type of pattern variable"
Sorted_term t s r -> if leqSort sign s resSort then do
(vs, nt) <- resolvePattern sign (s, t)
return (vs, Sorted_term nt s r)
else err "wrong typed pattern"
_ -> err "unexpected pattern"
addFunToSign :: FunDef -> State FplSign ()
addFunToSign (FunDef o h _ _) =
maybe (return ()) (\ ty -> addOp (emptyAnno o) (toOpType ty) o)
$ headToType h
letVars :: FunDef -> [VAR_DECL]
letVars (FunDef o (Op_head _ vs ms _) at ps) =
[ Var_decl [idToSimpleId o] (sortOfTerm $ item at) ps
| isSimpleId o && isNothing ms && null vs ]
addFunVar :: FunDef -> State FplSign ()
addFunVar = mapM_ addVars . letVars
{- | perform overload resolution after mixfix analysis. The type of patterns
is deduced from the top term. Overlapping or exhaustive patterns are not
recognized yet. -}
minFplTerm :: Min TermExt SignExt
minFplTerm sig te = case te of
FixDef fd -> fmap FixDef $ minFunDef sig fd
Case o l r -> do
ro <- oneExpTerm minFplTerm sig o
-- assume unique type of top-level term for now
let s = sortOfTerm ro
rl <- mapM (\ (p, t) -> do
(vs, np) <- resolvePattern sig (s, p)
appendDiags $ checkUniqueness . map fst $ flatVAR_DECLs vs
let newSign = execState (mapM_ addVars vs) sig
rt <- minExpTerm minFplTerm newSign t
return (np, rt)) l
let (ps, tts) = unzip rl
cSupers tl = case tl of
[] -> True
hd : rt -> all (haveCommonSupersorts True sig
(sortOfTerm hd) . sortOfTerm) rt && cSupers rt
nts <- isUnambiguous "" (globAnnos sig) (map snd l)
(map (filter cSupers . combine) $ combine tts) r
let nl = zip ps nts
minSort sl = if Set.null sl then Set.empty else
let (hd, rt) = Set.deleteFindMin sl
in Set.unions . map (Set.fromList . minimalSupers sig hd)
. Set.toList $ Set.insert hd $ minSort rt
mSort = minSort . Set.fromList $ map sortOfTerm nts
case Set.toList mSort of
[tSort] -> do
fl <- mapM (\ (p, t) -> do
let pvs = freeTermVars sig p
tvs = freeTermVars sig t
unused = Set.difference pvs tvs
unless (Set.null unused) $
appendDiags $ map (mkDiag Warning "unused pattern variables")
$ Set.toList unused
return (p, mkSorted sig t tSort r)) nl
return $ Case ro fl r
sl -> mkError ("no common supersort for case terms: " ++ show sl) r
Let fd t r -> do
let newSign = execState (addFunToSign fd) sig
rfd <- minFunDef newSign fd
let sign2 = execState (addFunVar rfd) newSign
rt <- oneExpTerm minFplTerm sign2 t
return $ Let rfd rt r
IfThenElse i t e r -> do
ri <- oneExpTerm minFplTerm sig $ Sorted_term i boolSort r
Equation rt Strong re _ <-
minExpFORMULAeq minFplTerm sig (`Equation` Strong) t e r
return $ IfThenElse ri rt re r
EqTerm t e r -> do
Equation rt Strong re _ <-
minExpFORMULAeq minFplTerm sig (`Equation` Strong) t e r
return $ EqTerm rt re r
BoolTerm t -> fmap BoolTerm $ oneExpTerm minFplTerm sig t
-- | type check rhs and assume function to be in the signature
minFunDef :: Sign TermExt SignExt -> FunDef -> Result FunDef
minFunDef sig fd@(FunDef o h@(Op_head _ vs ms _) at r) = do
let newSign = execState (mapM_ addVars vs >> addFunToSign fd) sig
varSign = execState (mapM_ addVars vs) $ emptySign emptyFplSign
t = item at
nt <- oneExpTerm minFplTerm newSign $ maybe t (\ s -> Sorted_term t s r) ms
appendDiags $ warnUnusedVars " function " varSign $ freeTermVars newSign nt
return $ FunDef o h at { item = nt } r
getDDSorts :: [Annoted FplSortItem] -> [SORT]
getDDSorts = foldl (\ l si -> case item si of
FreeType (Datatype_decl s _ _) -> s : l
CaslSortItem _ -> l) []
anaFplExt :: Ana FplExt FplExt () TermExt SignExt
anaFplExt mix fe = case fe of
FplSortItems ais r -> do
mapM_ (\ s -> addSort NonEmptySorts (emptyAnno s) s)
$ getDDSorts ais
ns <- mapAnM (anaFplSortItem mix) ais
closeSubsortRel
return $ FplSortItems ns r
FplOpItems ais r -> do
ns <- mapAnM (anaFplOpItem mix) ais
return $ FplOpItems ns r
anaFplSortItem :: Ana FplSortItem FplExt () TermExt SignExt
anaFplSortItem mix si = case si of
FreeType dt@(Datatype_decl s aalts _) -> do
ana_DATATYPE_DECL Free dt
sign <- get
let cm = getConstrs sign s
updateExtInfo $ \ cs -> foldM
(\ e aa -> let a = item aa in if isConsAlt a then do
let (c, ty, _) = getConsType s a
unless (MapSet.null cm)
$ if Set.member (mkPartial ty)
$ makePartial $ MapSet.lookup c cm
then appendDiags [mkDiag Warning "repeated constructor" c]
else mkError "illegal new constructor" c
return e { constr = addOpTo c ty $ constr e }
else mkError "unexpected subsort embedding" a) cs aalts
return si
CaslSortItem s -> fmap (CaslSortItem . item)
$ ana_SORT_ITEM minFplTerm mix NonEmptySorts $ emptyAnno s
anaFplOpItem :: Ana FplOpItem FplExt () TermExt SignExt
anaFplOpItem mix oi = case oi of
FunOp fd@(FunDef i oh@(Op_head _ vs r _) at ps) -> do
let mty = headToType oh
lb = getRLabel at
addFunToSign fd
e <- get -- save
put e { varMap = Map.empty }
mapM_ addVars vs
sign <- get
put e -- restore
let Result ds mt = anaTerm minFplTerm mix sign r ps $ item at
addDiags ds
case mt of
Nothing -> return
$ maybe oi (\ ty -> CaslOpItem $ Op_decl [i] ty [] ps) mty
Just (resT, anaT) -> do
addSentences
[(makeNamed lb $ ExtFORMULA $ FixDef
$ FunDef i oh at { item = anaT } ps)
{ isAxiom = notImplied at, isDef = True }]
return $ FunOp $ FunDef i oh at { item = resT } ps
CaslOpItem o -> fmap (CaslOpItem . item)
$ ana_OP_ITEM minFplTerm mix (emptyAnno o)
freeFunDefVars :: Sign TermExt e -> FunDef -> VarSet
freeFunDefVars s (FunDef _ (Op_head _ vs _ _) at _) = Set.difference
(freeTermVars s $ item at) $ Set.fromList $ flatVAR_DECLs vs
instance TermExtension TermExt where
freeVarsOfExt s te = case te of
FixDef fd -> freeFunDefVars s fd
Case o l _ -> Set.unions $ freeTermVars s o
: map (\ (p, t) -> Set.difference (freeTermVars s t) $ freeTermVars s p) l
Let fd t _ -> Set.difference
(Set.union (freeFunDefVars s fd) $ freeTermVars s t)
$ Set.fromList $ flatVAR_DECLs $ letVars fd
IfThenElse f t e _ -> Set.unions $ map (freeTermVars s) [f, t, e]
EqTerm t e _ -> Set.unions $ map (freeTermVars s) [t, e]
BoolTerm t -> freeTermVars s t
optTermSort te = case te of
Case _ ((_, t) : _) _ -> optTermSort t
Let _ t _ -> optTermSort t
IfThenElse _ t _ _ -> optTermSort t
EqTerm {} -> Just boolSort
BoolTerm t -> optTermSort t
_ -> Nothing -- all others are formulas
termToFormula t = let s = sortOfTerm t in
if s == boolSort
then return $ ExtFORMULA $ BoolTerm t
else fail $ "expected boolean term but found sort: " ++ show s
simplifyTermExt :: FplSign -> TermExt -> TermExt
simplifyTermExt s te = let rec = simplifyTerm minFplTerm simplifyTermExt in
case te of
FixDef fd -> FixDef $ simplifyFunDef s fd
Case o l r -> Case (rec s o)
(map (\ (p, t) -> let
vs = freeTermVars s p
newSign = execState
(mapM_ (uncurry $ flip addVar) $ Set.toList vs) s
in (rec newSign p, rec newSign t)) l) r
Let fd t r ->
let newSign = execState (addFunToSign fd) s
sign2 = execState (addFunVar fd) newSign
in Let (simplifyFunDef newSign fd)
(rec sign2 t) r
IfThenElse f t e r -> IfThenElse (rec s f) (rec s t) (rec s e) r
EqTerm t e r -> EqTerm (rec s t) (rec s e) r
BoolTerm t -> BoolTerm (rec s t)
simplifyFunDef :: FplSign -> FunDef -> FunDef
simplifyFunDef sig fd@(FunDef o h@(Op_head _ vs _ _) at r) =
let newSign = execState (mapM_ addVars vs >> addFunToSign fd) sig
in FunDef o h (fmap (simplifyTerm minFplTerm simplifyTermExt newSign) at) r
| mariefarrell/Hets | Fpl/StatAna.hs | gpl-2.0 | 13,685 | 0 | 27 | 3,750 | 5,129 | 2,503 | 2,626 | 304 | 10 |
module Usage where
import qualified Definition as Def
test :: Int
test = D<caret>ef.seven + 1
| charleso/intellij-haskforce | tests/gold/codeInsight/QualifiedImport_QualifierResolves/Usage.hs | apache-2.0 | 97 | 0 | 7 | 19 | 34 | 21 | 13 | -1 | -1 |
module AST.Type where
import Control.Applicative ((<$>), (<*>))
import Data.Binary
import qualified Data.Map as Map
import qualified AST.Variable as Var
import AST.PrettyPrint
import qualified AST.Helpers as Help
import Text.PrettyPrint as P
data Type var
= Lambda (Type var) (Type var)
| Var String
| Type var
| App (Type var) [Type var]
| Record [(String, Type var)] (Maybe (Type var))
| Aliased Var.Canonical (Type var)
deriving (Eq,Show)
type RawType = Type Var.Raw
type CanonicalType = Type Var.Canonical
fieldMap :: [(String,a)] -> Map.Map String [a]
fieldMap fields =
foldl (\r (x,t) -> Map.insertWith (++) x [t] r) Map.empty fields
recordOf :: [(String, Type var)] -> Type var
recordOf fields = Record fields Nothing
listOf :: RawType -> RawType
listOf t = App (Type (Var.Raw "List")) [t]
tupleOf :: [RawType] -> RawType
tupleOf ts = App (Type t) ts
where
t = Var.Raw ("_Tuple" ++ show (length ts))
instance (Var.ToString var, Pretty var) => Pretty (Type var) where
pretty tipe =
case tipe of
Lambda _ _ -> P.sep [ t, P.sep (map (P.text "->" <+>) ts) ]
where
t:ts = map prettyLambda (collectLambdas tipe)
prettyLambda t = case t of
Lambda _ _ -> P.parens (pretty t)
_ -> pretty t
Var x -> P.text x
Type var ->
let v = Var.toString var in
P.text (if v == "_Tuple0" then "()" else v)
App f args ->
case (f,args) of
(Type name, _)
| Help.isTuple (Var.toString name) ->
P.parens . P.sep . P.punctuate P.comma $ map pretty args
_ -> P.hang (pretty f) 2 (P.sep $ map prettyParens args)
Record _ _ ->
case flattenRecord tipe of
([], Nothing) ->
P.text "{}"
(fields, Nothing) ->
P.sep
[ P.cat (zipWith (<+>) (P.lbrace : repeat P.comma) (map prettyField fields))
, P.rbrace
]
(fields, Just x) ->
P.hang
(P.lbrace <+> P.text x <+> P.text "|")
4
(P.sep
[ P.cat (zipWith (<+>) (P.space : repeat P.comma) (map prettyField fields))
, P.rbrace
])
where
prettyField (field, tipe) =
P.text field <+> P.text ":" <+> pretty tipe
Aliased name t ->
let t' = pretty t in
if show t' `elem` ["Int", "Float", "String", "Char", "Bool"]
then t'
else pretty name
collectLambdas :: Type var -> [Type var]
collectLambdas tipe =
case tipe of
Lambda arg body -> arg : collectLambdas body
_ -> [tipe]
prettyParens :: (Var.ToString var, Pretty var) => Type var -> Doc
prettyParens tipe = parensIf (needed tipe) (pretty tipe)
where
needed t =
case t of
Aliased _ t' -> needed t'
Lambda _ _ -> True
App (Type name) _ | Help.isTuple (Var.toString name) -> False
App t' [] -> needed t'
App _ _ -> True
_ -> False
flattenRecord :: Type var -> ( [(String, Type var)], Maybe String )
flattenRecord tipe =
case tipe of
Var x -> ([], Just x)
Record fields Nothing -> (fields, Nothing)
Record fields (Just ext) ->
let (fields',ext') = flattenRecord ext
in (fields' ++ fields, ext')
Aliased _ tipe' -> flattenRecord tipe'
_ -> error "Trying to flatten ill-formed record."
instance Binary var => Binary (Type var) where
put tipe =
case tipe of
Lambda t1 t2 -> putWord8 0 >> put t1 >> put t2
Var x -> putWord8 1 >> put x
Type name -> putWord8 2 >> put name
App t1 t2 -> putWord8 3 >> put t1 >> put t2
Record fs ext -> putWord8 4 >> put fs >> put ext
Aliased var t -> putWord8 5 >> put var >> put t
get = do
n <- getWord8
case n of
0 -> Lambda <$> get <*> get
1 -> Var <$> get
2 -> Type <$> get
3 -> App <$> get <*> get
4 -> Record <$> get <*> get
5 -> Aliased <$> get <*> get
_ -> error "Error reading a valid type from serialized string"
| avh4/elm-compiler | src/AST/Type.hs | bsd-3-clause | 4,312 | 0 | 22 | 1,546 | 1,697 | 855 | 842 | 112 | 6 |
{-# OPTIONS -w #-}
module Plugin.Free.Expr where
import Plugin.Free.Type
import Plugin.Free.Util
varInExpr :: Var -> Expr -> Bool
varInExpr v (EBuiltin _)
= False
varInExpr v (EVar v')
= v == v'
varInExpr v (EVarOp _ _ v')
= False
varInExpr v (EApp e1 e2)
= varInExpr v e1 || varInExpr v e2
varInExpr v (ETyApp e1 t)
= varInExpr v e1
leftVarOfExpr :: Expr -> Var
leftVarOfExpr (EVar v) = v
leftVarOfExpr (EApp e _) = leftVarOfExpr e
leftVarOfExpr (ETyApp e _) = leftVarOfExpr e
exprSubst :: Var -> Expr -> Expr -> Expr
exprSubst v e e'@(EBuiltin _)
= e'
exprSubst v e e'@(EVar v')
| v == v' = e
| otherwise = e'
exprSubst v e e'@(EVarOp _ _ v')
| v == v' = e
| otherwise = e'
exprSubst v e (EApp e1 e2)
= EApp (exprSubst v e e1) (exprSubst v e e2)
exprSubst v e (ETyApp e1 t)
= ETyApp (exprSubst v e e1) t
type Var = String
data Fixity
= FL | FN | FR
deriving (Eq, Show)
data Expr
= EVar Var
| EBuiltin Builtin
| EVarOp Fixity Int Var
| EApp Expr Expr
| ETyApp Expr Type
deriving (Eq, Show)
data Builtin
= BMap TyName
| BId
| BProj Int Int
| BMapTuple Int
| BArr
deriving (Eq, Show)
data ExprCtx
= ECDot
| ECAppL ExprCtx Expr
| ECAppR Expr ExprCtx
| ECTyApp ExprCtx Type
deriving (Eq, Show)
applySimplifierExpr :: (Expr -> Expr) -> (Expr -> Expr)
applySimplifierExpr s (EApp e1 e2)
= EApp (s e1) (s e2)
applySimplifierExpr s (ETyApp e t)
= ETyApp (s e) t
applySimplifierExpr s e
= e
unzipExpr :: Expr -> ExprCtx -> Expr
unzipExpr e ECDot = e
unzipExpr e (ECAppL c e2) = unzipExpr (EApp e e2) c
unzipExpr e (ECAppR e1 c) = unzipExpr (EApp e1 e) c
unzipExpr e (ECTyApp c t) = unzipExpr (ETyApp e t) c
varInCtx :: Var -> ExprCtx -> Bool
varInCtx v ECDot
= False
varInCtx v (ECAppL c e2)
= varInCtx v c || varInExpr v e2
varInCtx v (ECAppR e1 c)
= varInCtx v c || varInExpr v e1
varInCtx v (ECTyApp c _)
= varInCtx v c
precAPP :: Int
precAPP = 10
instance Pretty Expr where
prettyP p (EBuiltin b)
= prettyP p b
prettyP _ (EVar v)
= text v
prettyP _ (EVarOp _ _ v)
= lparen <> text v <> rparen
prettyP p (EApp (EApp (EVarOp fix prec op) e1) e2)
= prettyParen (p > prec) (
prettyP pl e1 <+> text op <+> prettyP pr e2
)
where
pl = if fix == FL then prec else prec+1
pr = if fix == FR then prec else prec+1
prettyP p (EApp e1 e2)
= prettyParen (p > precAPP) (
prettyP precAPP e1 <+> prettyP (precAPP+1) e2
)
prettyP p (ETyApp e t)
= prettyP precAPP e
instance Pretty Builtin where
prettyP p (BMap "[]") = text "$map"
prettyP p (BMap c) = text ("$map_" ++ c)
prettyP p BId = text "$id"
prettyP p (BProj 2 1) = text "$fst"
prettyP p (BProj 2 2) = text "$snd"
prettyP p (BProj 3 1) = text "$fst3"
prettyP p (BProj 3 2) = text "$snd3"
prettyP p (BProj 3 3) = text "$thd3"
prettyP p (BProj l i) = text ("$proj_" ++ show l ++ "_" ++ show i)
prettyP p (BMapTuple 2) = text "$map_Pair"
prettyP p (BMapTuple 3) = text "$map_Triple"
prettyP p (BMapTuple n) = text $ "$map_Tuple" ++ show n
prettyP p BArr = text "$arr"
-- vim: ts=4:sts=4:expandtab:ai
| zeekay/lambdabot | Plugin/Free/Expr.hs | mit | 3,484 | 0 | 12 | 1,149 | 1,459 | 734 | 725 | 110 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.GHC.IPI642
-- Copyright : (c) The University of Glasgow 2004
--
-- Maintainer : [email protected]
-- Portability : portable
--
{- All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of the University nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Simple.GHC.IPI642 (
InstalledPackageInfo,
toCurrent,
-- Don't use these, they're only for conversion purposes
PackageIdentifier, convertPackageId,
License, convertLicense,
convertModuleName
) where
import qualified Distribution.InstalledPackageInfo as Current
import qualified Distribution.Package as Current hiding (depends)
import qualified Distribution.License as Current
import Distribution.Version (Version)
import Distribution.ModuleName (ModuleName)
import Distribution.Text (simpleParse,display)
import Data.Maybe
-- | This is the InstalledPackageInfo type used by ghc-6.4.2 and later.
--
-- It's here purely for the 'Read' instance so that we can read the package
-- database used by those ghc versions. It is a little hacky to read the
-- package db directly, but we do need the info and until ghc-6.9 there was
-- no better method.
--
-- In ghc-6.4.1 and before the format was slightly different.
-- See "Distribution.Simple.GHC.IPI642"
--
data InstalledPackageInfo = InstalledPackageInfo {
package :: PackageIdentifier,
license :: License,
copyright :: String,
maintainer :: String,
author :: String,
stability :: String,
homepage :: String,
pkgUrl :: String,
description :: String,
category :: String,
exposed :: Bool,
exposedModules :: [String],
hiddenModules :: [String],
importDirs :: [FilePath],
libraryDirs :: [FilePath],
hsLibraries :: [String],
extraLibraries :: [String],
extraGHCiLibraries:: [String],
includeDirs :: [FilePath],
includes :: [String],
depends :: [PackageIdentifier],
hugsOptions :: [String],
ccOptions :: [String],
ldOptions :: [String],
frameworkDirs :: [FilePath],
frameworks :: [String],
haddockInterfaces :: [FilePath],
haddockHTMLs :: [FilePath]
}
deriving Read
data PackageIdentifier = PackageIdentifier {
pkgName :: String,
pkgVersion :: Version
}
deriving Read
data License = GPL | LGPL | BSD3 | BSD4
| PublicDomain | AllRightsReserved | OtherLicense
deriving Read
convertPackageId :: PackageIdentifier -> Current.PackageIdentifier
convertPackageId PackageIdentifier { pkgName = n, pkgVersion = v } =
Current.PackageIdentifier (Current.PackageName n) v
mkInstalledPackageId :: Current.PackageIdentifier -> Current.InstalledPackageId
mkInstalledPackageId = Current.InstalledPackageId . display
convertModuleName :: String -> ModuleName
convertModuleName s = fromJust $ simpleParse s
convertLicense :: License -> Current.License
convertLicense GPL = Current.GPL Nothing
convertLicense LGPL = Current.LGPL Nothing
convertLicense BSD3 = Current.BSD3
convertLicense BSD4 = Current.BSD4
convertLicense PublicDomain = Current.PublicDomain
convertLicense AllRightsReserved = Current.AllRightsReserved
convertLicense OtherLicense = Current.OtherLicense
toCurrent :: InstalledPackageInfo -> Current.InstalledPackageInfo
toCurrent ipi@InstalledPackageInfo{} = Current.InstalledPackageInfo {
Current.installedPackageId = mkInstalledPackageId (convertPackageId (package ipi)),
Current.sourcePackageId = convertPackageId (package ipi),
Current.license = convertLicense (license ipi),
Current.copyright = copyright ipi,
Current.maintainer = maintainer ipi,
Current.author = author ipi,
Current.stability = stability ipi,
Current.homepage = homepage ipi,
Current.pkgUrl = pkgUrl ipi,
Current.synopsis = "",
Current.description = description ipi,
Current.category = category ipi,
Current.exposed = exposed ipi,
Current.exposedModules = map convertModuleName (exposedModules ipi),
Current.hiddenModules = map convertModuleName (hiddenModules ipi),
Current.trusted = Current.trusted Current.emptyInstalledPackageInfo,
Current.importDirs = importDirs ipi,
Current.libraryDirs = libraryDirs ipi,
Current.hsLibraries = hsLibraries ipi,
Current.extraLibraries = extraLibraries ipi,
Current.extraGHCiLibraries = extraGHCiLibraries ipi,
Current.includeDirs = includeDirs ipi,
Current.includes = includes ipi,
Current.depends = map (mkInstalledPackageId.convertPackageId) (depends ipi),
Current.hugsOptions = hugsOptions ipi,
Current.ccOptions = ccOptions ipi,
Current.ldOptions = ldOptions ipi,
Current.frameworkDirs = frameworkDirs ipi,
Current.frameworks = frameworks ipi,
Current.haddockInterfaces = haddockInterfaces ipi,
Current.haddockHTMLs = haddockHTMLs ipi
}
| jwiegley/ghc-release | libraries/Cabal/cabal/Distribution/Simple/GHC/IPI642.hs | gpl-3.0 | 6,703 | 0 | 11 | 1,519 | 1,011 | 596 | 415 | 98 | 1 |
{-# LANGUAGE RankNTypes, PolyKinds, TypeInType #-}
module TypeSkolEscape where
import GHC.Types
import GHC.Exts
type Bad = forall (v :: Levity) (a :: TYPE v). a
| nushio3/ghc | testsuite/tests/dependent/should_fail/TypeSkolEscape.hs | bsd-3-clause | 164 | 0 | 8 | 28 | 42 | 27 | 15 | -1 | -1 |
{-# LANGUAGE StandaloneKindSignatures #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE PolyKinds, ExplicitForAll #-}
module SAKS_Fail018 where
import Data.Kind (Type)
data P w
-- j = k, x = a
type T :: forall k. forall (a :: k) -> Type
data T (x :: j) = MkT (P k) (P j) (P x) -- 'k' is not brought into scope by ScopedTypeVariables
| sdiehl/ghc | testsuite/tests/saks/should_fail/saks_fail018.hs | bsd-3-clause | 344 | 0 | 8 | 69 | 83 | 52 | 31 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
-- | Construct a @Plan@ for how to build
module Stack.Build.ConstructPlan
( constructPlan
) where
import Control.Exception.Lifted
import Control.Monad
import Control.Monad.Catch (MonadCatch)
import Control.Monad.IO.Class
import Control.Monad.Logger (MonadLogger)
import Control.Monad.RWS.Strict
import Control.Monad.Trans.Resource
import qualified Data.ByteString.Char8 as S8
import Data.Either
import Data.Function
import Data.List
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import qualified Data.Map.Strict as Map
import Data.Maybe
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import Distribution.Package (Dependency (..))
import Distribution.Version (anyVersion)
import Network.HTTP.Client.Conduit (HasHttpManager)
import Prelude hiding (FilePath, pi, writeFile)
import Stack.Build.Cache
import Stack.Build.Haddock
import Stack.Build.Installed
import Stack.Build.Source
import Stack.Types.Build
import Stack.BuildPlan
import Stack.Package
import Stack.PackageIndex
import Stack.Types
data PackageInfo
= PIOnlyInstalled Version InstallLocation Installed
| PIOnlySource PackageSource
| PIBoth PackageSource Installed
combineSourceInstalled :: PackageSource
-> (Version, InstallLocation, Installed)
-> PackageInfo
combineSourceInstalled ps (version, location, installed) =
assert (piiVersion ps == version) $
assert (piiLocation ps == location) $
case location of
-- Always trust something in the snapshot
Snap -> PIOnlyInstalled version location installed
Local -> PIBoth ps installed
type CombinedMap = Map PackageName PackageInfo
combineMap :: SourceMap -> InstalledMap -> CombinedMap
combineMap = Map.mergeWithKey
(\_ s i -> Just $ combineSourceInstalled s i)
(fmap PIOnlySource)
(fmap (\(v, l, i) -> PIOnlyInstalled v l i))
data AddDepRes
= ADRToInstall Task
| ADRFound InstallLocation Version Installed
deriving Show
data W = W
{ wFinals :: !(Map PackageName (Either ConstructPlanException (Task, LocalPackageTB)))
, wInstall :: !(Map Text InstallLocation)
-- ^ executable to be installed, and location where the binary is placed
, wDirty :: !(Map PackageName Text)
-- ^ why a local package is considered dirty
, wDeps :: !(Set PackageName)
-- ^ Packages which count as dependencies
}
instance Monoid W where
mempty = W mempty mempty mempty mempty
mappend (W a b c d) (W w x y z) = W (mappend a w) (mappend b x) (mappend c y) (mappend d z)
type M = RWST
Ctx
W
(Map PackageName (Either ConstructPlanException AddDepRes))
IO
data Ctx = Ctx
{ mbp :: !MiniBuildPlan
, baseConfigOpts :: !BaseConfigOpts
, loadPackage :: !(PackageName -> Version -> Map FlagName Bool -> IO Package)
, combinedMap :: !CombinedMap
, toolToPackages :: !(Dependency -> Map PackageName VersionRange)
, ctxEnvConfig :: !EnvConfig
, callStack :: ![PackageName]
, extraToBuild :: !(Set PackageName)
, latestVersions :: !(Map PackageName Version)
, wanted :: !(Set PackageName)
}
instance HasStackRoot Ctx
instance HasPlatform Ctx
instance HasConfig Ctx
instance HasBuildConfig Ctx where
getBuildConfig = getBuildConfig . getEnvConfig
instance HasEnvConfig Ctx where
getEnvConfig = ctxEnvConfig
constructPlan :: forall env m.
(MonadCatch m, MonadReader env m, HasEnvConfig env, MonadIO m, MonadLogger m, MonadBaseControl IO m, HasHttpManager env)
=> MiniBuildPlan
-> BaseConfigOpts
-> [LocalPackage]
-> Set PackageName -- ^ additional packages that must be built
-> Map GhcPkgId PackageIdentifier -- ^ locally registered
-> (PackageName -> Version -> Map FlagName Bool -> IO Package) -- ^ load upstream package
-> SourceMap
-> InstalledMap
-> m Plan
constructPlan mbp0 baseConfigOpts0 locals extraToBuild0 locallyRegistered loadPackage0 sourceMap installedMap = do
menv <- getMinimalEnvOverride
caches <- getPackageCaches menv
let latest = Map.fromListWith max $ map toTuple $ Map.keys caches
econfig <- asks getEnvConfig
let onWanted lp = do
case lpExeComponents lp of
Nothing -> return ()
Just _ -> void $ addDep False $ packageName $ lpPackage lp
case lpTestBench lp of
Just tb -> addFinal lp tb
Nothing -> return ()
let inner = do
mapM_ onWanted $ filter lpWanted locals
mapM_ (addDep False) $ Set.toList extraToBuild0
((), m, W efinals installExes dirtyReason deps) <- liftIO $ runRWST inner (ctx econfig latest) M.empty
let toEither (_, Left e) = Left e
toEither (k, Right v) = Right (k, v)
(errlibs, adrs) = partitionEithers $ map toEither $ M.toList m
(errfinals, finals) = partitionEithers $ map toEither $ M.toList efinals
errs = errlibs ++ errfinals
if null errs
then do
let toTask (_, ADRFound _ _ _) = Nothing
toTask (name, ADRToInstall task) = Just (name, task)
tasks = M.fromList $ mapMaybe toTask adrs
takeSubset =
case boptsBuildSubset $ bcoBuildOpts baseConfigOpts0 of
BSAll -> id
BSOnlySnapshot -> stripLocals
BSOnlyDependencies -> stripNonDeps deps
return $ takeSubset Plan
{ planTasks = tasks
, planFinals = M.fromList finals
, planUnregisterLocal = mkUnregisterLocal tasks dirtyReason locallyRegistered
, planInstallExes =
if boptsInstallExes $ bcoBuildOpts baseConfigOpts0
then installExes
else Map.empty
}
else throwM $ ConstructPlanExceptions errs (bcStackYaml $ getBuildConfig econfig)
where
ctx econfig latest = Ctx
{ mbp = mbp0
, baseConfigOpts = baseConfigOpts0
, loadPackage = loadPackage0
, combinedMap = combineMap sourceMap installedMap
, toolToPackages = \ (Dependency name _) ->
maybe Map.empty (Map.fromSet (\_ -> anyVersion)) $
Map.lookup (S8.pack . packageNameString . fromCabalPackageName $ name) toolMap
, ctxEnvConfig = econfig
, callStack = []
, extraToBuild = extraToBuild0
, latestVersions = latest
, wanted = wantedLocalPackages locals
}
-- TODO Currently, this will only consider and install tools from the
-- snapshot. It will not automatically install build tools from extra-deps
-- or local packages.
toolMap = getToolMap mbp0
-- | Determine which packages to unregister based on the given tasks and
-- already registered local packages
mkUnregisterLocal :: Map PackageName Task
-> Map PackageName Text
-> Map GhcPkgId PackageIdentifier
-> Map GhcPkgId (PackageIdentifier, Text)
mkUnregisterLocal tasks dirtyReason locallyRegistered =
Map.unions $ map toUnregisterMap $ Map.toList locallyRegistered
where
toUnregisterMap (gid, ident) =
case M.lookup name tasks of
Nothing -> Map.empty
Just _ -> Map.singleton gid
( ident
, fromMaybe "likely unregistering due to a version change"
$ Map.lookup name dirtyReason
)
where
name = packageIdentifierName ident
addFinal :: LocalPackage -> LocalPackageTB -> M ()
addFinal lp lptb = do
depsRes <- addPackageDeps False package
res <- case depsRes of
Left e -> return $ Left e
Right (missing, present, _minLoc) -> do
ctx <- ask
return $ Right (Task
{ taskProvides = PackageIdentifier
(packageName package)
(packageVersion package)
, taskConfigOpts = TaskConfigOpts missing $ \missing' ->
let allDeps = Map.union present missing'
in configureOpts
(getEnvConfig ctx)
(baseConfigOpts ctx)
allDeps
True -- wanted
Local
package
, taskPresent = present
, taskType = TTLocal lp
}, lptb)
tell mempty { wFinals = Map.singleton (packageName package) res }
where
package = lptbPackage lptb
addDep :: Bool -- ^ is this being used by a dependency?
-> PackageName -> M (Either ConstructPlanException AddDepRes)
addDep treatAsDep' name = do
ctx <- ask
let treatAsDep = treatAsDep' || name `Set.notMember` wanted ctx
when treatAsDep $ markAsDep name
m <- get
case Map.lookup name m of
Just res -> return res
Nothing -> do
res <- addDep' treatAsDep name
modify $ Map.insert name res
return res
addDep' :: Bool -- ^ is this being used by a dependency?
-> PackageName -> M (Either ConstructPlanException AddDepRes)
addDep' treatAsDep name = do
ctx <- ask
if name `elem` callStack ctx
then return $ Left $ DependencyCycleDetected $ name : callStack ctx
else local
(\ctx' -> ctx' { callStack = name : callStack ctx' }) $ do
(addDep'' treatAsDep name)
addDep'' :: Bool -- ^ is this being used by a dependency?
-> PackageName -> M (Either ConstructPlanException AddDepRes)
addDep'' treatAsDep name = do
ctx <- ask
case Map.lookup name $ combinedMap ctx of
-- TODO look up in the package index and see if there's a
-- recommendation available
Nothing -> return $ Left $ UnknownPackage name
Just (PIOnlyInstalled version loc installed) -> do
tellExecutablesUpstream name version loc Map.empty -- slightly hacky, no flags since they likely won't affect executable names
return $ Right $ ADRFound loc version installed
Just (PIOnlySource ps) -> do
tellExecutables name ps
installPackage treatAsDep name ps
Just (PIBoth ps installed) -> do
tellExecutables name ps
needInstall <- checkNeedInstall treatAsDep name ps installed (wanted ctx)
if needInstall
then installPackage treatAsDep name ps
else return $ Right $ ADRFound (piiLocation ps) (piiVersion ps) installed
tellExecutables :: PackageName -> PackageSource -> M () -- TODO merge this with addFinal above?
tellExecutables _ (PSLocal lp)
| lpWanted lp = tellExecutablesPackage Local $ lpPackage lp
| otherwise = return ()
tellExecutables name (PSUpstream version loc flags) = do
tellExecutablesUpstream name version loc flags
tellExecutablesUpstream :: PackageName -> Version -> InstallLocation -> Map FlagName Bool -> M ()
tellExecutablesUpstream name version loc flags = do
ctx <- ask
when (name `Set.member` extraToBuild ctx) $ do
p <- liftIO $ loadPackage ctx name version flags
tellExecutablesPackage loc p
tellExecutablesPackage :: InstallLocation -> Package -> M ()
tellExecutablesPackage loc p = do
cm <- asks combinedMap
-- Determine which components are enabled so we know which ones to copy
let myComps =
case Map.lookup (packageName p) cm of
Nothing -> assert False Set.empty
Just (PIOnlyInstalled _ _ _) -> Set.empty
Just (PIOnlySource ps) -> goSource ps
Just (PIBoth ps _) -> goSource ps
goSource (PSLocal lp) = fromMaybe Set.empty $ lpExeComponents lp
goSource (PSUpstream _ _ _) = Set.empty
tell mempty { wInstall = m myComps }
where
m myComps = Map.fromList $ map (, loc) $ Set.toList
$ filterComps myComps $ packageExes p
filterComps myComps x
| Set.null myComps = x
| otherwise = Set.intersection x $ Set.map toExe myComps
toExe x = fromMaybe x $ T.stripPrefix "exe:" x
-- TODO There are a lot of duplicated computations below. I've kept that for
-- simplicity right now
installPackage :: Bool -- ^ is this being used by a dependency?
-> PackageName -> PackageSource -> M (Either ConstructPlanException AddDepRes)
installPackage treatAsDep name ps = do
ctx <- ask
package <- psPackage name ps
depsRes <- addPackageDeps treatAsDep package
case depsRes of
Left e -> return $ Left e
Right (missing, present, minLoc) -> do
return $ Right $ ADRToInstall Task
{ taskProvides = PackageIdentifier
(packageName package)
(packageVersion package)
, taskConfigOpts = TaskConfigOpts missing $ \missing' ->
let allDeps = Map.union present missing'
destLoc = piiLocation ps <> minLoc
in configureOpts
(getEnvConfig ctx)
(baseConfigOpts ctx)
allDeps
(psWanted ps)
-- An assertion to check for a recurrence of
-- https://github.com/commercialhaskell/stack/issues/345
(assert (destLoc == piiLocation ps) destLoc)
package
, taskPresent = present
, taskType =
case ps of
PSLocal lp -> TTLocal lp
PSUpstream _ loc _ -> TTUpstream package $ loc <> minLoc
}
checkNeedInstall :: Bool
-> PackageName -> PackageSource -> Installed -> Set PackageName -> M Bool
checkNeedInstall treatAsDep name ps installed wanted = assert (piiLocation ps == Local) $ do
package <- psPackage name ps
depsRes <- addPackageDeps treatAsDep package
case depsRes of
Left _e -> return True -- installPackage will find the error again
Right (missing, present, _loc)
| Set.null missing -> checkDirtiness ps installed package present wanted
| otherwise -> do
tell mempty { wDirty = Map.singleton name $
let t = T.intercalate ", " $ map (T.pack . packageNameString . packageIdentifierName) (Set.toList missing)
in T.append "missing dependencies: " $
if T.length t < 100
then t
else T.take 97 t <> "..." }
return True
addPackageDeps :: Bool -- ^ is this being used by a dependency?
-> Package -> M (Either ConstructPlanException (Set PackageIdentifier, Map PackageIdentifier GhcPkgId, InstallLocation))
addPackageDeps treatAsDep package = do
ctx <- ask
deps' <- packageDepsWithTools package
deps <- forM (Map.toList deps') $ \(depname, range) -> do
eres <- addDep treatAsDep depname
let mlatest = Map.lookup depname $ latestVersions ctx
case eres of
Left e ->
let bd =
case e of
UnknownPackage name -> assert (name == depname) NotInBuildPlan
_ -> Couldn'tResolveItsDependencies
in return $ Left (depname, (range, mlatest, bd))
Right adr | not $ adrVersion adr `withinRange` range ->
return $ Left (depname, (range, mlatest, DependencyMismatch $ adrVersion adr))
Right (ADRToInstall task) -> return $ Right
(Set.singleton $ taskProvides task, Map.empty, taskLocation task)
Right (ADRFound loc _ (Executable _)) -> return $ Right
(Set.empty, Map.empty, loc)
Right (ADRFound loc _ (Library ident gid)) -> return $ Right
(Set.empty, Map.singleton ident gid, loc)
case partitionEithers deps of
([], pairs) -> return $ Right $ mconcat pairs
(errs, _) -> return $ Left $ DependencyPlanFailures
(PackageIdentifier
(packageName package)
(packageVersion package))
(Map.fromList errs)
where
adrVersion (ADRToInstall task) = packageIdentifierVersion $ taskProvides task
adrVersion (ADRFound _ v _) = v
checkDirtiness :: PackageSource
-> Installed
-> Package
-> Map PackageIdentifier GhcPkgId
-> Set PackageName
-> M Bool
checkDirtiness ps installed package present wanted = do
ctx <- ask
moldOpts <- tryGetFlagCache installed
let configOpts = configureOpts
(getEnvConfig ctx)
(baseConfigOpts ctx)
present
(psWanted ps)
(piiLocation ps) -- should be Local always
package
buildOpts = bcoBuildOpts (baseConfigOpts ctx)
wantConfigCache = ConfigCache
{ configCacheOpts = configOpts
, configCacheDeps = Set.fromList $ Map.elems present
, configCacheComponents =
case ps of
PSLocal lp -> Set.map renderComponent $ lpComponents lp
PSUpstream _ _ _ -> Set.empty
, configCacheHaddock =
shouldHaddockPackage buildOpts wanted (packageName package) ||
-- Disabling haddocks when old config had haddocks doesn't make dirty.
maybe False configCacheHaddock moldOpts
}
let mreason =
case moldOpts of
Nothing -> Just "old configure information not found"
Just oldOpts
| Just reason <- describeConfigDiff oldOpts wantConfigCache -> Just reason
| psDirty ps -> Just "local file changes"
| otherwise -> Nothing
case mreason of
Nothing -> return False
Just reason -> do
tell mempty { wDirty = Map.singleton (packageName package) reason }
return True
describeConfigDiff :: ConfigCache -> ConfigCache -> Maybe Text
describeConfigDiff old new
| configCacheDeps old /= configCacheDeps new = Just "dependencies changed"
| not $ Set.null newComponents =
Just $ "components added: " `T.append` T.intercalate ", "
(map (decodeUtf8With lenientDecode) (Set.toList newComponents))
| not (configCacheHaddock old) && configCacheHaddock new = Just "rebuilding with haddocks"
| oldOpts /= newOpts = Just $ T.pack $ concat
[ "flags changed from "
, show oldOpts
, " to "
, show newOpts
]
| otherwise = Nothing
where
-- options set by stack
isStackOpt t = any (`T.isPrefixOf` t)
[ "--dependency="
, "--constraint="
, "--package-db="
, "--libdir="
, "--bindir="
, "--enable-tests"
, "--enable-benchmarks"
]
userOpts = filter (not . isStackOpt)
. map T.pack
. (\(ConfigureOpts x y) -> x ++ y)
. configCacheOpts
(oldOpts, newOpts) = removeMatching (userOpts old) (userOpts new)
removeMatching (x:xs) (y:ys)
| x == y = removeMatching xs ys
removeMatching xs ys = (xs, ys)
newComponents = configCacheComponents new `Set.difference` configCacheComponents old
psDirty :: PackageSource -> Bool
psDirty (PSLocal lp) = lpDirtyFiles lp
psDirty (PSUpstream _ _ _) = False -- files never change in an upstream package
psWanted :: PackageSource -> Bool
psWanted (PSLocal lp) = lpWanted lp
psWanted (PSUpstream _ _ _) = False
psPackage :: PackageName -> PackageSource -> M Package
psPackage _ (PSLocal lp) = return $ lpPackage lp
psPackage name (PSUpstream version _ flags) = do
ctx <- ask
liftIO $ loadPackage ctx name version flags
-- | Get all of the dependencies for a given package, including guessed build
-- tool dependencies.
packageDepsWithTools :: Package -> M (Map PackageName VersionRange)
packageDepsWithTools p = do
ctx <- ask
return $ Map.unionsWith intersectVersionRanges
$ packageDeps p
: map (toolToPackages ctx) (packageTools p)
-- | Strip out anything from the @Plan@ intended for the local database
stripLocals :: Plan -> Plan
stripLocals plan = plan
{ planTasks = Map.filter checkTask $ planTasks plan
, planFinals = Map.empty
, planUnregisterLocal = Map.empty
, planInstallExes = Map.filter (/= Local) $ planInstallExes plan
}
where
checkTask task =
case taskType task of
TTLocal _ -> False
TTUpstream _ Local -> False
TTUpstream _ Snap -> True
stripNonDeps :: Set PackageName -> Plan -> Plan
stripNonDeps deps plan = plan
{ planTasks = Map.filter checkTask $ planTasks plan
, planFinals = Map.empty
, planInstallExes = Map.empty -- TODO maybe don't disable this?
}
where
checkTask task = packageIdentifierName (taskProvides task) `Set.member` deps
markAsDep :: PackageName -> M ()
markAsDep name = tell mempty { wDeps = Set.singleton name }
| akhileshs/stack | src/Stack/Build/ConstructPlan.hs | bsd-3-clause | 22,010 | 0 | 27 | 7,062 | 5,633 | 2,834 | 2,799 | 480 | 9 |
module Foo.Bar where
baz1 = 6
| RefactoringTools/HaRe | test/testdata/cabal/cabal4/src/Foo/Bar.expected.hs | bsd-3-clause | 31 | 0 | 4 | 7 | 11 | 7 | 4 | 2 | 1 |
-- | POSIX time, if you need to deal with timestamps and the like.
-- Most people won't need this module.
module Data.Time.Clock.POSIX
(
posixDayLength,POSIXTime,posixSecondsToUTCTime,utcTimeToPOSIXSeconds,getPOSIXTime
) where
import Data.Time.Clock.UTC
import Data.Time.Calendar.Days
import Data.Fixed
import Control.Monad
import Data.Time.Clock.CTimeval
-- | 86400 nominal seconds in every day
posixDayLength :: NominalDiffTime
posixDayLength = 86400
-- | POSIX time is the nominal time since 1970-01-01 00:00 UTC
--
-- To convert from a 'Foreign.C.CTime' or 'System.Posix.EpochTime', use 'realToFrac'.
--
type POSIXTime = NominalDiffTime
unixEpochDay :: Day
unixEpochDay = ModifiedJulianDay 40587
posixSecondsToUTCTime :: POSIXTime -> UTCTime
posixSecondsToUTCTime i = let
(d,t) = divMod' i posixDayLength
in UTCTime (addDays d unixEpochDay) (realToFrac t)
utcTimeToPOSIXSeconds :: UTCTime -> POSIXTime
utcTimeToPOSIXSeconds (UTCTime d t) =
(fromInteger (diffDays d unixEpochDay) * posixDayLength) + min posixDayLength (realToFrac t)
-- | Get the current POSIX time from the system clock.
getPOSIXTime :: IO POSIXTime
-- Use POSIX time
ctimevalToPosixSeconds :: CTimeval -> POSIXTime
ctimevalToPosixSeconds (MkCTimeval s mus) = (fromIntegral s) + (fromIntegral mus) / 1000000
getPOSIXTime = liftM ctimevalToPosixSeconds getCTimeval
| beni55/haste-compiler | libraries/time/lib/Data/Time/Clock/POSIX.hs | bsd-3-clause | 1,349 | 2 | 10 | 183 | 276 | 155 | 121 | 24 | 1 |
module B3 (myFringe) where
import D3 hiding (sumSquares)
import qualified D3
instance Same Float
where
isSame a b = a == b
isNotSame a b = a /= b
myFringe :: (Tree a) -> [a]
myFringe (Leaf x) = [x]
myFringe (Branch left right) = myFringe right
sumSquares ((x : xs)) = (x ^ 2) + (sumSquares xs)
sumSquares [] = 0
| SAdams601/HaRe | old/testing/renaming/B3_AstOut.hs | bsd-3-clause | 333 | 0 | 8 | 83 | 157 | 85 | 72 | 11 | 1 |
-- Written by Bertram Felgenhauer
--
-- https://ghc.haskell.org/trac/ghc/ticket/11760#comment:14
--
-- Compile with -threaded -with-rtsopts=-N2
{-# LANGUAGE BangPatterns #-}
import Control.Concurrent
import Control.Monad
import Control.Monad.ST.Lazy
import Control.Exception
import Data.STRef
import Data.IORef
import Control.Concurrent.MVar
import Data.List
-- evil ST action that tries to synchronize (by busy waiting on the
-- shared STRef) with a concurrent evaluation
evil :: ST s [Int]
evil = do
r <- strictToLazyST $ newSTRef 0
replicateM 100 $ do
i <- strictToLazyST $ readSTRef r
let !j = i + 1
strictToLazyST $ writeSTRef r j
let go 0 = return ()
go n = do
i' <- strictToLazyST $ readSTRef r
when (j == i') $ go (n-1)
go 100
return j
main = do
let res = runST evil
s0 <- newIORef (map pred (0 : res))
s1 <- newIORef (map pred (1 : res))
m0 <- newMVar ()
m1 <- newMVar ()
forkIO $ do
putMVar m0 ()
readIORef s0 >>= evaluate . foldl' (+) 0
putMVar m0 ()
forkIO $ do
putMVar m1 ()
readIORef s1 >>= evaluate . foldl' (+) 0
putMVar m1 ()
threadDelay 10000
replicateM 3 $ takeMVar m0 >> takeMVar m1
v0 <- tail <$> readIORef s0
v1 <- tail <$> readIORef s1
print (v0 == v1)
| ezyang/ghc | libraries/base/tests/T11760.hs | bsd-3-clause | 1,375 | 0 | 18 | 407 | 467 | 218 | 249 | 41 | 2 |
{-# LANGUAGE GHCForeignImportPrim, UnliftedFFITypes, MagicHash #-}
module Lib where
import GHC.Prim
foreign import prim f1 :: Int# -> Int#
| olsner/ghc | testsuite/tests/codeGen/should_compile/T12355.hs | bsd-3-clause | 142 | 0 | 6 | 22 | 24 | 15 | 9 | -1 | -1 |
module Main where
import Control.Monad
import Data.List
import qualified Data.IntMap as IntMap
import Data.IntMap ( IntMap )
main :: IO ()
main =
do print (runSim initialState)
print (runSim initialState { difficulty = 1 })
runSim :: GameState -> Int
runSim s = search (IntMap.singleton 0 [s]) maxBound
data Spell = Recharge | Poison | Shield | Drain | MagicMissile
deriving Eq
spellDamage :: Spell -> Int
spellDamage spell =
case spell of
MagicMissile -> 4
Drain -> 2
_ -> 0
spellHeal :: Spell -> Int
spellHeal spell =
case spell of
Drain -> 2
_ -> 0
spellCost :: Spell -> Int
spellCost s =
case s of
Recharge -> 229
Poison -> 173
Shield -> 113
Drain -> 73
MagicMissile -> 53
data GameState = GameState
{ manaPool
, manaSpent
, poisonTimer
, rechargeTimer
, shieldTimer
, playerHp
, bossHp, bossDamage, difficulty :: !Int
}
deriving (Eq, Show)
initialState :: GameState
initialState = GameState
{ manaPool = 500
, manaSpent = 0
, poisonTimer = 0
, rechargeTimer = 0
, shieldTimer = 0
, playerHp = 50
, bossHp = 51
, bossDamage = 9
, difficulty = 0
}
stepTimers :: GameState -> GameState
stepTimers s = s
{ manaPool = manaPool s +
if rechargeTimer s > 0 then 101 else 0
, bossHp = bossHp s -
if poisonTimer s > 0 then 3 else 0
, poisonTimer = dec (poisonTimer s)
, rechargeTimer = dec (rechargeTimer s)
, shieldTimer = dec (shieldTimer s)
}
bossAttack :: GameState -> GameState
bossAttack s = s { playerHp = playerHp s - effectiveAttack }
where
effectiveAttack = max 1 (bossDamage s - armor)
armor | shieldTimer s > 0 = 7
| otherwise = 0
applyDifficulty :: GameState -> GameState
applyDifficulty s = s { playerHp = playerHp s - difficulty s }
advance :: GameState -> [GameState]
advance s =
stepTimers (applyDifficulty s) --> \s1 ->
availableSpells s1 >>= \spell ->
stepTimers (applySpell spell s1) --> \s2 ->
bossAttack s2 --> \s3 ->
return s3
infixl 1 -->
(-->) :: GameState -> (GameState -> [GameState]) -> [GameState]
s --> k
| playerDead s = []
| bossDead s = [s]
| otherwise = k s
applySpell :: Spell -> GameState -> GameState
applySpell spell s =
s { manaSpent = manaSpent s + spellCost spell
, manaPool = manaPool s - spellCost spell
, rechargeTimer = if spell == Recharge then 5 else rechargeTimer s
, poisonTimer = if spell == Poison then 6 else poisonTimer s
, shieldTimer = if spell == Shield then 6 else shieldTimer s
, bossHp = bossHp s - spellDamage spell
, playerHp = playerHp s + spellHeal spell
}
availableSpells :: GameState -> [Spell]
availableSpells s =
filter (\spell -> spellCost spell <= manaPool s)
$ [Poison | poisonTimer s == 0]
++ [Recharge | rechargeTimer s == 0]
++ [Shield | shieldTimer s == 0]
++ [MagicMissile, Drain]
dec :: Int -> Int
dec x | x <= 1 = 0
dec x = x-1
-- Search the frontier of possible game states for the minimum possible mana
-- needed to kill a boss. The frontier is advanced by stepping one full turn
-- for each state picking the states with the minimum mana used so far.
-- Once the best seen so far is as good as the best states in the frontier
-- we know we're done because mana spending is monotonic.
search ::
IntMap [GameState] {- ^ search frontier indexed by mana spent -} ->
Int {- ^ lowest mana used to kill boss so far -} ->
Int {- ^ lowest possible mana used to kill boss -}
search states best =
case IntMap.minViewWithKey states of
Nothing -> best
Just ((k,ss),states')
| best <= k -> best
| otherwise -> search (foldl' schedule states' nextss) best'
where
nextss = nub (concatMap advance ss)
best' = minimum (best : map manaSpent (filter bossDead nextss))
schedule m t = IntMap.insertWith (++) (manaSpent t) [t] m
bossDead :: GameState -> Bool
bossDead s = bossHp s <= 0
playerDead :: GameState -> Bool
playerDead s = playerHp s <= 0
| glguy/advent2015 | Day22.hs | isc | 4,229 | 0 | 15 | 1,223 | 1,300 | 694 | 606 | 119 | 5 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeFamilies #-}
-- | Expand allocations inside of maps when possible.
module Futhark.Pass.ExpandAllocations (expandAllocations) where
import Control.Monad.Except
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Writer
import Data.List (find, foldl')
import qualified Data.Map.Strict as M
import Data.Maybe
import Futhark.Analysis.Rephrase
import qualified Futhark.Analysis.SymbolTable as ST
import Futhark.Error
import Futhark.IR
import qualified Futhark.IR.GPU.Simplify as GPU
import Futhark.IR.GPUMem
import qualified Futhark.IR.Mem.IxFun as IxFun
import Futhark.MonadFreshNames
import Futhark.Optimise.Simplify.Rep (addScopeWisdom)
import Futhark.Pass
import Futhark.Pass.ExplicitAllocations.GPU (explicitAllocationsInStms)
import Futhark.Pass.ExtractKernels.BlockedKernel (nonSegRed)
import Futhark.Pass.ExtractKernels.ToGPU (segThread)
import Futhark.Tools
import Futhark.Transform.CopyPropagate (copyPropagateInFun)
import Futhark.Transform.Rename (renameStm)
import Futhark.Util (mapAccumLM)
import Futhark.Util.IntegralExp
import Prelude hiding (quot)
-- | The memory expansion pass definition.
expandAllocations :: Pass GPUMem GPUMem
expandAllocations =
Pass "expand allocations" "Expand allocations" $
\(Prog consts funs) -> do
consts' <-
modifyNameSource $ limitationOnLeft . runStateT (runReaderT (transformStms consts) mempty)
Prog consts' <$> mapM (transformFunDef $ scopeOf consts') funs
-- Cannot use intraproceduralTransformation because it might create
-- duplicate size keys (which are not fixed by renamer, and size
-- keys must currently be globally unique).
type ExpandM = ReaderT (Scope GPUMem) (StateT VNameSource (Either String))
limitationOnLeft :: Either String a -> a
limitationOnLeft = either compilerLimitationS id
transformFunDef ::
Scope GPUMem ->
FunDef GPUMem ->
PassM (FunDef GPUMem)
transformFunDef scope fundec = do
body' <- modifyNameSource $ limitationOnLeft . runStateT (runReaderT m mempty)
copyPropagateInFun
simpleGPUMem
(ST.fromScope (addScopeWisdom scope))
fundec {funDefBody = body'}
where
m =
localScope scope $
inScopeOf fundec $
transformBody $ funDefBody fundec
transformBody :: Body GPUMem -> ExpandM (Body GPUMem)
transformBody (Body () stms res) = Body () <$> transformStms stms <*> pure res
transformLambda :: Lambda GPUMem -> ExpandM (Lambda GPUMem)
transformLambda (Lambda params body ret) =
Lambda params
<$> localScope (scopeOfLParams params) (transformBody body)
<*> pure ret
transformStms :: Stms GPUMem -> ExpandM (Stms GPUMem)
transformStms stms =
inScopeOf stms $ mconcat <$> mapM transformStm (stmsToList stms)
transformStm :: Stm GPUMem -> ExpandM (Stms GPUMem)
-- It is possible that we are unable to expand allocations in some
-- code versions. If so, we can remove the offending branch. Only if
-- both versions fail do we propagate the error.
transformStm (Let pat aux (If cond tbranch fbranch (IfDec ts IfEquiv))) = do
tbranch' <- (Right <$> transformBody tbranch) `catchError` (pure . Left)
fbranch' <- (Right <$> transformBody fbranch) `catchError` (pure . Left)
case (tbranch', fbranch') of
(Left _, Right fbranch'') ->
pure $ useBranch fbranch''
(Right tbranch'', Left _) ->
pure $ useBranch tbranch''
(Right tbranch'', Right fbranch'') ->
pure $ oneStm $ Let pat aux $ If cond tbranch'' fbranch'' (IfDec ts IfEquiv)
(Left e, _) ->
throwError e
where
bindRes pe (SubExpRes cs se) =
certify cs $ Let (Pat [pe]) (defAux ()) $ BasicOp $ SubExp se
useBranch b =
bodyStms b
<> stmsFromList (zipWith bindRes (patElems pat) (bodyResult b))
transformStm (Let pat aux e) = do
(stms, e') <- transformExp =<< mapExpM transform e
pure $ stms <> oneStm (Let pat aux e')
where
transform =
identityMapper
{ mapOnBody = \scope -> localScope scope . transformBody
}
transformExp :: Exp GPUMem -> ExpandM (Stms GPUMem, Exp GPUMem)
transformExp (Op (Inner (SegOp (SegMap lvl space ts kbody)))) = do
(alloc_stms, (_, kbody')) <- transformScanRed lvl space [] kbody
pure
( alloc_stms,
Op $ Inner $ SegOp $ SegMap lvl space ts kbody'
)
transformExp (Op (Inner (SegOp (SegRed lvl space reds ts kbody)))) = do
(alloc_stms, (lams, kbody')) <-
transformScanRed lvl space (map segBinOpLambda reds) kbody
let reds' = zipWith (\red lam -> red {segBinOpLambda = lam}) reds lams
pure
( alloc_stms,
Op $ Inner $ SegOp $ SegRed lvl space reds' ts kbody'
)
transformExp (Op (Inner (SegOp (SegScan lvl space scans ts kbody)))) = do
(alloc_stms, (lams, kbody')) <-
transformScanRed lvl space (map segBinOpLambda scans) kbody
let scans' = zipWith (\red lam -> red {segBinOpLambda = lam}) scans lams
pure
( alloc_stms,
Op $ Inner $ SegOp $ SegScan lvl space scans' ts kbody'
)
transformExp (Op (Inner (SegOp (SegHist lvl space ops ts kbody)))) = do
(alloc_stms, (lams', kbody')) <- transformScanRed lvl space lams kbody
let ops' = zipWith onOp ops lams'
pure
( alloc_stms,
Op $ Inner $ SegOp $ SegHist lvl space ops' ts kbody'
)
where
lams = map histOp ops
onOp op lam = op {histOp = lam}
transformExp (WithAcc inputs lam) = do
lam' <- transformLambda lam
(input_alloc_stms, inputs') <- unzip <$> mapM onInput inputs
pure
( mconcat input_alloc_stms,
WithAcc inputs' lam'
)
where
onInput (shape, arrs, Nothing) =
pure (mempty, (shape, arrs, Nothing))
onInput (shape, arrs, Just (op_lam, nes)) = do
bound_outside <- asks $ namesFromList . M.keys
let -- XXX: fake a SegLevel, which we don't have here. We will not
-- use it for anything, as we will not allow irregular
-- allocations inside the update function.
lvl = SegThread (Count $ intConst Int64 0) (Count $ intConst Int64 0) SegNoVirt
(op_lam', lam_allocs) =
extractLambdaAllocations (lvl, [0]) bound_outside mempty op_lam
variantAlloc (_, Var v, _) = not $ v `nameIn` bound_outside
variantAlloc _ = False
(variant_allocs, invariant_allocs) = M.partition variantAlloc lam_allocs
case M.elems variant_allocs of
(_, v, _) : _ ->
throwError $
"Cannot handle un-sliceable allocation size: " ++ pretty v
++ "\nLikely cause: irregular nested operations inside accumulator update operator."
[] ->
pure ()
let num_is = shapeRank shape
is = map paramName $ take num_is $ lambdaParams op_lam
(alloc_stms, alloc_offsets) <-
genericExpandedInvariantAllocations (const (shape, map le64 is)) invariant_allocs
scope <- askScope
let scope' = scopeOf op_lam <> scope
either throwError pure $
runOffsetM scope' alloc_offsets $ do
op_lam'' <- offsetMemoryInLambda op_lam'
pure (alloc_stms, (shape, arrs, Just (op_lam'', nes)))
transformExp e =
pure (mempty, e)
transformScanRed ::
SegLevel ->
SegSpace ->
[Lambda GPUMem] ->
KernelBody GPUMem ->
ExpandM (Stms GPUMem, ([Lambda GPUMem], KernelBody GPUMem))
transformScanRed lvl space ops kbody = do
bound_outside <- asks $ namesFromList . M.keys
let user = (lvl, [le64 $ segFlat space])
(kbody', kbody_allocs) =
extractKernelBodyAllocations user bound_outside bound_in_kernel kbody
(ops', ops_allocs) = unzip $ map (extractLambdaAllocations user bound_outside mempty) ops
variantAlloc (_, Var v, _) = not $ v `nameIn` bound_outside
variantAlloc _ = False
(variant_allocs, invariant_allocs) =
M.partition variantAlloc $ kbody_allocs <> mconcat ops_allocs
badVariant (_, Var v, _) = not $ v `nameIn` bound_in_kernel
badVariant _ = False
case find badVariant $ M.elems variant_allocs of
Just v ->
throwError $
"Cannot handle un-sliceable allocation size: " ++ pretty v
++ "\nLikely cause: irregular nested operations inside parallel constructs."
Nothing ->
pure ()
case lvl of
SegGroup {}
| not $ null variant_allocs ->
throwError "Cannot handle invariant allocations in SegGroup."
_ ->
pure ()
allocsForBody variant_allocs invariant_allocs lvl space kbody' $ \alloc_stms kbody'' -> do
ops'' <- forM ops' $ \op' ->
localScope (scopeOf op') $ offsetMemoryInLambda op'
pure (alloc_stms, (ops'', kbody''))
where
bound_in_kernel =
namesFromList (M.keys $ scopeOfSegSpace space)
<> boundInKernelBody kbody
boundInKernelBody :: KernelBody GPUMem -> Names
boundInKernelBody = namesFromList . M.keys . scopeOf . kernelBodyStms
allocsForBody ::
Extraction ->
Extraction ->
SegLevel ->
SegSpace ->
KernelBody GPUMem ->
(Stms GPUMem -> KernelBody GPUMem -> OffsetM b) ->
ExpandM b
allocsForBody variant_allocs invariant_allocs lvl space kbody' m = do
(alloc_offsets, alloc_stms) <-
memoryRequirements
lvl
space
(kernelBodyStms kbody')
variant_allocs
invariant_allocs
scope <- askScope
let scope' = scopeOfSegSpace space <> scope
either throwError pure $
runOffsetM scope' alloc_offsets $ do
kbody'' <- offsetMemoryInKernelBody kbody'
m alloc_stms kbody''
memoryRequirements ::
SegLevel ->
SegSpace ->
Stms GPUMem ->
Extraction ->
Extraction ->
ExpandM (RebaseMap, Stms GPUMem)
memoryRequirements lvl space kstms variant_allocs invariant_allocs = do
(num_threads, num_threads_stms) <-
runBuilder . letSubExp "num_threads" . BasicOp $
BinOp
(Mul Int64 OverflowUndef)
(unCount $ segNumGroups lvl)
(unCount $ segGroupSize lvl)
(invariant_alloc_stms, invariant_alloc_offsets) <-
inScopeOf num_threads_stms $
expandedInvariantAllocations
num_threads
(segNumGroups lvl)
(segGroupSize lvl)
invariant_allocs
(variant_alloc_stms, variant_alloc_offsets) <-
inScopeOf num_threads_stms $
expandedVariantAllocations
num_threads
space
kstms
variant_allocs
pure
( invariant_alloc_offsets <> variant_alloc_offsets,
num_threads_stms <> invariant_alloc_stms <> variant_alloc_stms
)
-- | Identifying the spot where an allocation occurs in terms of its
-- level and unique thread ID.
type User = (SegLevel, [TPrimExp Int64 VName])
-- | A description of allocations that have been extracted, and how
-- much memory (and which space) is needed.
type Extraction = M.Map VName (User, SubExp, Space)
extractKernelBodyAllocations ::
User ->
Names ->
Names ->
KernelBody GPUMem ->
( KernelBody GPUMem,
Extraction
)
extractKernelBodyAllocations lvl bound_outside bound_kernel =
extractGenericBodyAllocations lvl bound_outside bound_kernel kernelBodyStms $
\stms kbody -> kbody {kernelBodyStms = stms}
extractBodyAllocations ::
User ->
Names ->
Names ->
Body GPUMem ->
(Body GPUMem, Extraction)
extractBodyAllocations user bound_outside bound_kernel =
extractGenericBodyAllocations user bound_outside bound_kernel bodyStms $
\stms body -> body {bodyStms = stms}
extractLambdaAllocations ::
User ->
Names ->
Names ->
Lambda GPUMem ->
(Lambda GPUMem, Extraction)
extractLambdaAllocations user bound_outside bound_kernel lam = (lam {lambdaBody = body'}, allocs)
where
(body', allocs) = extractBodyAllocations user bound_outside bound_kernel $ lambdaBody lam
extractGenericBodyAllocations ::
User ->
Names ->
Names ->
(body -> Stms GPUMem) ->
(Stms GPUMem -> body -> body) ->
body ->
( body,
Extraction
)
extractGenericBodyAllocations user bound_outside bound_kernel get_stms set_stms body =
let bound_kernel' = bound_kernel <> boundByStms (get_stms body)
(stms, allocs) =
runWriter $
fmap catMaybes $
mapM (extractStmAllocations user bound_outside bound_kernel') $
stmsToList $ get_stms body
in (set_stms (stmsFromList stms) body, allocs)
expandable, notScalar :: Space -> Bool
expandable (Space "local") = False
expandable ScalarSpace {} = False
expandable _ = True
notScalar ScalarSpace {} = False
notScalar _ = True
extractStmAllocations ::
User ->
Names ->
Names ->
Stm GPUMem ->
Writer Extraction (Maybe (Stm GPUMem))
extractStmAllocations user bound_outside bound_kernel (Let (Pat [patElem]) _ (Op (Alloc size space)))
| expandable space && expandableSize size
-- FIXME: the '&& notScalar space' part is a hack because we
-- don't otherwise hoist the sizes out far enough, and we
-- promise to be super-duper-careful about not having variant
-- scalar allocations.
|| (boundInKernel size && notScalar space) = do
tell $ M.singleton (patElemName patElem) (user, size, space)
pure Nothing
where
expandableSize (Var v) = v `nameIn` bound_outside || v `nameIn` bound_kernel
expandableSize Constant {} = True
boundInKernel (Var v) = v `nameIn` bound_kernel
boundInKernel Constant {} = False
extractStmAllocations user bound_outside bound_kernel stm = do
e <- mapExpM (expMapper user) $ stmExp stm
pure $ Just $ stm {stmExp = e}
where
expMapper user' =
identityMapper
{ mapOnBody = const $ onBody user',
mapOnOp = onOp user'
}
onBody user' body = do
let (body', allocs) = extractBodyAllocations user' bound_outside bound_kernel body
tell allocs
pure body'
onOp (_, user_ids) (Inner (SegOp op)) =
Inner . SegOp <$> mapSegOpM (opMapper user'') op
where
user'' =
(segLevel op, user_ids ++ [le64 (segFlat (segSpace op))])
onOp _ op = pure op
opMapper user' =
identitySegOpMapper
{ mapOnSegOpLambda = onLambda user',
mapOnSegOpBody = onKernelBody user'
}
onKernelBody user' body = do
let (body', allocs) = extractKernelBodyAllocations user' bound_outside bound_kernel body
tell allocs
pure body'
onLambda user' lam = do
body <- onBody user' $ lambdaBody lam
pure lam {lambdaBody = body}
genericExpandedInvariantAllocations ::
(User -> (Shape, [TPrimExp Int64 VName])) -> Extraction -> ExpandM (Stms GPUMem, RebaseMap)
genericExpandedInvariantAllocations getNumUsers invariant_allocs = do
-- We expand the invariant allocations by adding an inner dimension
-- equal to the number of kernel threads.
(rebases, alloc_stms) <- runBuilder $ mapM expand $ M.toList invariant_allocs
pure (alloc_stms, mconcat rebases)
where
expand (mem, (user, per_thread_size, space)) = do
let num_users = fst $ getNumUsers user
allocpat = Pat [PatElem mem $ MemMem space]
total_size <-
letExp "total_size" <=< toExp . product $
pe64 per_thread_size : map pe64 (shapeDims num_users)
letBind allocpat $ Op $ Alloc (Var total_size) space
pure $ M.singleton mem $ newBase user
untouched d = DimSlice 0 d 1
newBase user@(SegThread {}, _) (old_shape, _) =
let (users_shape, user_ids) = getNumUsers user
num_dims = length old_shape
perm = [num_dims .. num_dims + shapeRank users_shape -1] ++ [0 .. num_dims -1]
root_ixfun = IxFun.iota (old_shape ++ map pe64 (shapeDims users_shape))
permuted_ixfun = IxFun.permute root_ixfun perm
offset_ixfun =
IxFun.slice permuted_ixfun $
Slice $ map DimFix user_ids ++ map untouched old_shape
in offset_ixfun
newBase user@(SegGroup {}, _) (old_shape, _) =
let (users_shape, user_ids) = getNumUsers user
root_ixfun = IxFun.iota $ map pe64 (shapeDims users_shape) ++ old_shape
offset_ixfun =
IxFun.slice root_ixfun . Slice $
map DimFix user_ids ++ map untouched old_shape
in offset_ixfun
expandedInvariantAllocations ::
SubExp ->
Count NumGroups SubExp ->
Count GroupSize SubExp ->
Extraction ->
ExpandM (Stms GPUMem, RebaseMap)
expandedInvariantAllocations num_threads (Count num_groups) (Count group_size) =
genericExpandedInvariantAllocations getNumUsers
where
getNumUsers (SegThread {}, [gtid]) = (Shape [num_threads], [gtid])
getNumUsers (SegThread {}, [gid, ltid]) = (Shape [num_groups, group_size], [gid, ltid])
getNumUsers (SegGroup {}, [gid]) = (Shape [num_groups], [gid])
getNumUsers user = error $ "getNumUsers: unhandled " ++ show user
expandedVariantAllocations ::
SubExp ->
SegSpace ->
Stms GPUMem ->
Extraction ->
ExpandM (Stms GPUMem, RebaseMap)
expandedVariantAllocations _ _ _ variant_allocs
| null variant_allocs = pure (mempty, mempty)
expandedVariantAllocations num_threads kspace kstms variant_allocs = do
let sizes_to_blocks = removeCommonSizes variant_allocs
variant_sizes = map fst sizes_to_blocks
(slice_stms, offsets, size_sums) <-
sliceKernelSizes num_threads variant_sizes kspace kstms
-- Note the recursive call to expand allocations inside the newly
-- produced kernels.
slice_stms_tmp <- simplifyStms =<< explicitAllocationsInStms slice_stms
slice_stms' <- transformStms slice_stms_tmp
let variant_allocs' :: [(VName, (SubExp, SubExp, Space))]
variant_allocs' =
concat $
zipWith
memInfo
(map snd sizes_to_blocks)
(zip offsets size_sums)
memInfo blocks (offset, total_size) =
[(mem, (Var offset, Var total_size, space)) | (mem, space) <- blocks]
-- We expand the invariant allocations by adding an inner dimension
-- equal to the sum of the sizes required by different threads.
(alloc_stms, rebases) <- unzip <$> mapM expand variant_allocs'
pure (slice_stms' <> stmsFromList alloc_stms, mconcat rebases)
where
expand (mem, (offset, total_size, space)) = do
let allocpat = Pat [PatElem mem $ MemMem space]
pure
( Let allocpat (defAux ()) $ Op $ Alloc total_size space,
M.singleton mem $ newBase offset
)
num_threads' = pe64 num_threads
gtid = le64 $ segFlat kspace
-- For the variant allocations, we add an inner dimension,
-- which is then offset by a thread-specific amount.
newBase size_per_thread (old_shape, pt) =
let elems_per_thread =
pe64 size_per_thread `quot` primByteSize pt
root_ixfun = IxFun.iota [elems_per_thread, num_threads']
offset_ixfun =
IxFun.slice root_ixfun . Slice $
[DimSlice 0 num_threads' 1, DimFix gtid]
shapechange =
if length old_shape == 1
then map DimCoercion old_shape
else map DimNew old_shape
in IxFun.reshape offset_ixfun shapechange
-- | A map from memory block names to new index function bases.
type RebaseMap = M.Map VName (([TPrimExp Int64 VName], PrimType) -> IxFun)
newtype OffsetM a
= OffsetM
( ReaderT
(Scope GPUMem)
(ReaderT RebaseMap (Either String))
a
)
deriving
( Applicative,
Functor,
Monad,
HasScope GPUMem,
LocalScope GPUMem,
MonadError String
)
runOffsetM :: Scope GPUMem -> RebaseMap -> OffsetM a -> Either String a
runOffsetM scope offsets (OffsetM m) =
runReaderT (runReaderT m scope) offsets
askRebaseMap :: OffsetM RebaseMap
askRebaseMap = OffsetM $ lift ask
localRebaseMap :: (RebaseMap -> RebaseMap) -> OffsetM a -> OffsetM a
localRebaseMap f (OffsetM m) = OffsetM $ do
scope <- ask
lift $ local f $ runReaderT m scope
lookupNewBase :: VName -> ([TPrimExp Int64 VName], PrimType) -> OffsetM (Maybe IxFun)
lookupNewBase name x = do
offsets <- askRebaseMap
pure $ ($ x) <$> M.lookup name offsets
offsetMemoryInKernelBody :: KernelBody GPUMem -> OffsetM (KernelBody GPUMem)
offsetMemoryInKernelBody kbody = do
scope <- askScope
stms' <-
stmsFromList . snd
<$> mapAccumLM
(\scope' -> localScope scope' . offsetMemoryInStm)
scope
(stmsToList $ kernelBodyStms kbody)
pure kbody {kernelBodyStms = stms'}
offsetMemoryInBody :: Body GPUMem -> OffsetM (Body GPUMem)
offsetMemoryInBody (Body dec stms res) = do
scope <- askScope
stms' <-
stmsFromList . snd
<$> mapAccumLM
(\scope' -> localScope scope' . offsetMemoryInStm)
scope
(stmsToList stms)
pure $ Body dec stms' res
offsetMemoryInStm :: Stm GPUMem -> OffsetM (Scope GPUMem, Stm GPUMem)
offsetMemoryInStm (Let pat dec e) = do
e' <- offsetMemoryInExp e
pat' <- offsetMemoryInPat pat =<< expReturns e'
scope <- askScope
-- Try to recompute the index function. Fall back to creating rebase
-- operations with the RebaseMap.
rts <- runReaderT (expReturns e') scope
let pat'' = Pat $ zipWith pick (patElems pat') rts
stm = Let pat'' dec e'
let scope' = scopeOf stm <> scope
pure (scope', stm)
where
pick ::
PatElem (MemInfo SubExp NoUniqueness MemBind) ->
ExpReturns ->
PatElem (MemInfo SubExp NoUniqueness MemBind)
pick
(PatElem name (MemArray pt s u _ret))
(MemArray _ _ _ (Just (ReturnsInBlock m extixfun)))
| Just ixfun <- instantiateIxFun extixfun =
PatElem name (MemArray pt s u (ArrayIn m ixfun))
pick p _ = p
instantiateIxFun :: ExtIxFun -> Maybe IxFun
instantiateIxFun = traverse (traverse inst)
where
inst Ext {} = Nothing
inst (Free x) = pure x
offsetMemoryInPat :: Pat LetDecMem -> [ExpReturns] -> OffsetM (Pat LetDecMem)
offsetMemoryInPat (Pat pes) rets = do
Pat <$> zipWithM onPE pes rets
where
onPE
(PatElem name (MemArray pt shape u (ArrayIn mem _)))
(MemArray _ _ _ (Just (ReturnsNewBlock _ _ ixfun))) =
pure . PatElem name . MemArray pt shape u . ArrayIn mem $
fmap (fmap unExt) ixfun
onPE pe _ = do
new_dec <- offsetMemoryInMemBound $ patElemDec pe
pure pe {patElemDec = new_dec}
unExt (Ext i) = patElemName (pes !! i)
unExt (Free v) = v
offsetMemoryInParam :: Param (MemBound u) -> OffsetM (Param (MemBound u))
offsetMemoryInParam fparam = do
fparam' <- offsetMemoryInMemBound $ paramDec fparam
pure fparam {paramDec = fparam'}
offsetMemoryInMemBound :: MemBound u -> OffsetM (MemBound u)
offsetMemoryInMemBound summary@(MemArray pt shape u (ArrayIn mem ixfun)) = do
new_base <- lookupNewBase mem (IxFun.base ixfun, pt)
pure . fromMaybe summary $ do
new_base' <- new_base
pure $ MemArray pt shape u $ ArrayIn mem $ IxFun.rebase new_base' ixfun
offsetMemoryInMemBound summary = pure summary
offsetMemoryInBodyReturns :: BodyReturns -> OffsetM BodyReturns
offsetMemoryInBodyReturns br@(MemArray pt shape u (ReturnsInBlock mem ixfun))
| Just ixfun' <- isStaticIxFun ixfun = do
new_base <- lookupNewBase mem (IxFun.base ixfun', pt)
pure . fromMaybe br $ do
new_base' <- new_base
pure . MemArray pt shape u . ReturnsInBlock mem $
IxFun.rebase (fmap (fmap Free) new_base') ixfun
offsetMemoryInBodyReturns br = pure br
offsetMemoryInLambda :: Lambda GPUMem -> OffsetM (Lambda GPUMem)
offsetMemoryInLambda lam = inScopeOf lam $ do
body <- offsetMemoryInBody $ lambdaBody lam
pure $ lam {lambdaBody = body}
-- A loop may have memory parameters, and those memory blocks may
-- be expanded. We assume (but do not check - FIXME) that if the
-- initial value of a loop parameter is an expanded memory block,
-- then so will the result be.
offsetMemoryInLoopParams ::
[(FParam GPUMem, SubExp)] ->
([(FParam GPUMem, SubExp)] -> OffsetM a) ->
OffsetM a
offsetMemoryInLoopParams merge f = do
let (params, args) = unzip merge
localRebaseMap extend $ do
params' <- mapM offsetMemoryInParam params
f $ zip params' args
where
extend rm = foldl' onParamArg rm merge
onParamArg rm (param, Var arg)
| Just x <- M.lookup arg rm =
M.insert (paramName param) x rm
onParamArg rm _ = rm
offsetMemoryInExp :: Exp GPUMem -> OffsetM (Exp GPUMem)
offsetMemoryInExp (DoLoop merge form body) = do
offsetMemoryInLoopParams merge $ \merge' -> do
body' <-
localScope
(scopeOfFParams (map fst merge') <> scopeOf form)
(offsetMemoryInBody body)
pure $ DoLoop merge' form body'
offsetMemoryInExp e = mapExpM recurse e
where
recurse =
identityMapper
{ mapOnBody = \bscope -> localScope bscope . offsetMemoryInBody,
mapOnBranchType = offsetMemoryInBodyReturns,
mapOnOp = onOp
}
onOp (Inner (SegOp op)) =
Inner . SegOp
<$> localScope (scopeOfSegSpace (segSpace op)) (mapSegOpM segOpMapper op)
where
segOpMapper =
identitySegOpMapper
{ mapOnSegOpBody = offsetMemoryInKernelBody,
mapOnSegOpLambda = offsetMemoryInLambda
}
onOp op = pure op
---- Slicing allocation sizes out of a kernel.
unAllocGPUStms :: Stms GPUMem -> Either String (Stms GPU.GPU)
unAllocGPUStms = unAllocStms False
where
unAllocBody (Body dec stms res) =
Body dec <$> unAllocStms True stms <*> pure res
unAllocKernelBody (KernelBody dec stms res) =
KernelBody dec <$> unAllocStms True stms <*> pure res
unAllocStms nested =
fmap (stmsFromList . catMaybes) . mapM (unAllocStm nested) . stmsToList
unAllocStm nested stm@(Let _ _ (Op Alloc {}))
| nested = throwError $ "Cannot handle nested allocation: " ++ pretty stm
| otherwise = pure Nothing
unAllocStm _ (Let pat dec e) =
Just <$> (Let <$> unAllocPat pat <*> pure dec <*> mapExpM unAlloc' e)
unAllocLambda (Lambda params body ret) =
Lambda (map unParam params) <$> unAllocBody body <*> pure ret
unAllocPat (Pat pes) =
Pat <$> mapM (rephrasePatElem (Right . unMem)) pes
unAllocOp Alloc {} = Left "unAllocOp: unhandled Alloc"
unAllocOp (Inner OtherOp {}) = Left "unAllocOp: unhandled OtherOp"
unAllocOp (Inner (SizeOp op)) = pure $ SizeOp op
unAllocOp (Inner (SegOp op)) = SegOp <$> mapSegOpM mapper op
where
mapper =
identitySegOpMapper
{ mapOnSegOpLambda = unAllocLambda,
mapOnSegOpBody = unAllocKernelBody
}
unParam = fmap unMem
unT = Right . unMem
unAlloc' =
Mapper
{ mapOnBody = const unAllocBody,
mapOnRetType = unT,
mapOnBranchType = unT,
mapOnFParam = Right . unParam,
mapOnLParam = Right . unParam,
mapOnOp = unAllocOp,
mapOnSubExp = Right,
mapOnVName = Right
}
unMem :: MemInfo d u ret -> TypeBase (ShapeBase d) u
unMem (MemPrim pt) = Prim pt
unMem (MemArray pt shape u _) = Array pt shape u
unMem (MemAcc acc ispace ts u) = Acc acc ispace ts u
unMem MemMem {} = Prim Unit
unAllocScope :: Scope GPUMem -> Scope GPU.GPU
unAllocScope = M.map unInfo
where
unInfo (LetName dec) = LetName $ unMem dec
unInfo (FParamName dec) = FParamName $ unMem dec
unInfo (LParamName dec) = LParamName $ unMem dec
unInfo (IndexName it) = IndexName it
removeCommonSizes :: Extraction -> [(SubExp, [(VName, Space)])]
removeCommonSizes = M.toList . foldl' comb mempty . M.toList
where
comb m (mem, (_, size, space)) = M.insertWith (++) size [(mem, space)] m
sliceKernelSizes ::
SubExp ->
[SubExp] ->
SegSpace ->
Stms GPUMem ->
ExpandM (Stms GPU.GPU, [VName], [VName])
sliceKernelSizes num_threads sizes space kstms = do
kstms' <- either throwError pure $ unAllocGPUStms kstms
let num_sizes = length sizes
i64s = replicate num_sizes $ Prim int64
kernels_scope <- asks unAllocScope
(max_lam, _) <- flip runBuilderT kernels_scope $ do
xs <- replicateM num_sizes $ newParam "x" (Prim int64)
ys <- replicateM num_sizes $ newParam "y" (Prim int64)
(zs, stms) <- localScope (scopeOfLParams $ xs ++ ys) $
collectStms $
forM (zip xs ys) $ \(x, y) ->
fmap subExpRes . letSubExp "z" . BasicOp $
BinOp (SMax Int64) (Var $ paramName x) (Var $ paramName y)
pure $ Lambda (xs ++ ys) (mkBody stms zs) i64s
flat_gtid_lparam <- newParam "flat_gtid" (Prim (IntType Int64))
(size_lam', _) <- flip runBuilderT kernels_scope $ do
params <- replicateM num_sizes $ newParam "x" (Prim int64)
(zs, stms) <- localScope
(scopeOfLParams params <> scopeOfLParams [flat_gtid_lparam])
$ collectStms $ do
-- Even though this SegRed is one-dimensional, we need to
-- provide indexes corresponding to the original potentially
-- multi-dimensional construct.
let (kspace_gtids, kspace_dims) = unzip $ unSegSpace space
new_inds =
unflattenIndex
(map pe64 kspace_dims)
(pe64 $ Var $ paramName flat_gtid_lparam)
zipWithM_ letBindNames (map pure kspace_gtids) =<< mapM toExp new_inds
mapM_ addStm kstms'
pure $ subExpsRes sizes
localScope (scopeOfSegSpace space) $
GPU.simplifyLambda (Lambda [flat_gtid_lparam] (Body () stms zs) i64s)
((maxes_per_thread, size_sums), slice_stms) <- flip runBuilderT kernels_scope $ do
pat <-
basicPat <$> replicateM num_sizes (newIdent "max_per_thread" $ Prim int64)
w <-
letSubExp "size_slice_w"
=<< foldBinOp (Mul Int64 OverflowUndef) (intConst Int64 1) (segSpaceDims space)
thread_space_iota <-
letExp "thread_space_iota" $
BasicOp $ Iota w (intConst Int64 0) (intConst Int64 1) Int64
let red_op =
SegBinOp
Commutative
max_lam
(replicate num_sizes $ intConst Int64 0)
mempty
lvl <- segThread "segred"
addStms =<< mapM renameStm
=<< nonSegRed lvl pat w [red_op] size_lam' [thread_space_iota]
size_sums <- forM (patNames pat) $ \threads_max ->
letExp "size_sum" $
BasicOp $ BinOp (Mul Int64 OverflowUndef) (Var threads_max) num_threads
pure (patNames pat, size_sums)
pure (slice_stms, maxes_per_thread, size_sums)
| diku-dk/futhark | src/Futhark/Pass/ExpandAllocations.hs | isc | 30,085 | 0 | 21 | 7,148 | 9,265 | 4,644 | 4,621 | -1 | -1 |
module Main where
import Data.List.Split
import Data.List
import System.IO
import System.Exit
import System.Environment
main :: IO ()
main = getArgs >>= go
go ["-h" ] = help
go ["--help"] = help
go ("--" :xs) = go' xs
go xs = go' xs
go' [] = interact (process "," ",")
go' [i] = interact (process i i)
go' [i,o] = interact (process i o)
go' _ = help >> exitFailure
process i o = unlines . map (intercalate o) . transpose . map (splitOn i) . lines
help = putStrLn "Usage: transpose [-h|--help] [--] [DELIM_IN] [DELIM_OUT]"
| sordina/transpose | src/Main.hs | mit | 556 | 0 | 11 | 129 | 232 | 120 | 112 | 18 | 1 |
module Lights where
import Data.Vect.Float.Base
import Types
data Light
= PointLight Vec3 Color Attenuation
| DirectionalLight Vec3 Color
deriving Show
colorOf :: Light -> Color
colorOf (PointLight _ color _) = color
colorOf (DirectionalLight _ color) = color
buildPointLight :: Vec3 -> Color -> Attenuation -> Light
buildPointLight pos color attenuation = PointLight pos color attenuation
buildDirectionalLight :: Vec3 -> Color -> Light
buildDirectionalLight dir color = DirectionalLight (normalize dir) color
getDirectionFrom :: Light -> Vec3 -> Vec3
getDirectionFrom (PointLight pos _ _) point = normalize $ pos &- point
getDirectionFrom (DirectionalLight direction _) _ = direction -- TODO: why negate?
getSqrDistanceFrom :: Light -> Vec3 -> Float
getSqrDistanceFrom (PointLight pos _ _) point = normsqr $ pos &- point
getSqrDistanceFrom (DirectionalLight _ _) _ = 1.0 / 0
getAttenuationAtDistance :: Light -> Float -> Float
getAttenuationAtDistance (PointLight _ _ (Attenuation const lin quad)) distance =
const + lin * distance + quad * distance * distance
getAttenuationAtDistance (DirectionalLight _ _) _ = 1.0
| CanisLupus/haskell-raytracer | Lights.hs | mit | 1,132 | 6 | 9 | 177 | 360 | 187 | 173 | 24 | 1 |
{-# htermination eltsFM_LE :: (Ord a, Ord k) => FiniteMap (a,k) b -> (a,k) -> [b] #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_eltsFM_LE_12.hs | mit | 103 | 0 | 3 | 19 | 5 | 3 | 2 | 1 | 0 |
-------------------------------------------------------------------------
-- Copyright (c) 2007-2011, 2012 ETH Zurich.
-- All rights reserved.
--
-- This file is distributed under the terms in the attached LICENSE file.
-- If you do not find this file, copies can be found by writing to:-
-- ETH Zurich D-INFK CAB F.78, Universitaetstr 6, CH-8092 Zurich.
-- Attn: Systems Group.
--
-- Basic Hake rule definitions and combinators
--
--------------------------------------------------------------------------
module RuleDefs where
import Data.List (intersect, isSuffixOf, union, (\\), nub, sortBy, elemIndex)
import Path
import qualified X86_64
import qualified K1om
import qualified X86_32
import qualified SCC
import qualified ARMv5
import qualified ARM11MP
import qualified XScale
import qualified ARMv7
import qualified ARMv7_M
import HakeTypes
import qualified Args
import qualified Config
import Debug.Trace
-- enable debug spew
-- should we move this to Config.hs? -AB
debugFlag = False
--
-- Is a token to be displayed in a rule?
--
inRule :: RuleToken -> Bool
inRule (Dep _ _ _) = False
inRule (PreDep _ _ _) = False
inRule (Target _ _) = False
inRule _ = True
--
-- Look for a set of files: this is called using the "find" combinator
--
withSuffix :: [String] -> String -> String -> [String]
withSuffix af tf arg =
[ basename f | f <- af, f `isInSameDirAs` tf, isSuffixOf arg f ]
withSuffices :: [String] -> String -> [String] -> [String]
withSuffices af tf args =
concat [ withSuffix af tf arg | arg <- args ]
--
-- Find files with a given suffix in a given dir
--
inDir :: [String] -> String -> String -> String -> [String]
inDir af tf dir suffix =
-- Dummy is here so that we can find files in the same dir :-/
let subdir = (if head dir == '/' then absdir else reldir) ./. "dummy"
absdir = if head tf == '/' then dir else '.':dir
reldir = (dirname tf) ./. dir
files = withSuffix af subdir suffix
in
[ dir ./. f | f <- files ]
cInDir :: [String] -> String -> String -> [String]
cInDir af tf dir = inDir af tf dir ".c"
cxxInDir :: [String] -> String -> String -> [String]
cxxInDir af tf dir = (inDir af tf dir ".cpp") ++ (inDir af tf dir ".cc")
sInDir :: [String] -> String -> String -> [String]
sInDir af tf dir = inDir af tf dir ".S"
-------------------------------------------------------------------------
--
-- Architecture specific definitions
--
-------------------------------------------------------------------------
options :: String -> Options
options "x86_64" = X86_64.options
options "k1om" = K1om.options
options "x86_32" = X86_32.options
options "scc" = SCC.options
options "armv5" = ARMv5.options
options "arm11mp" = ARM11MP.options
options "xscale" = XScale.options
options "armv7" = ARMv7.options
options "armv7-m" = ARMv7_M.options
kernelCFlags "x86_64" = X86_64.kernelCFlags
kernelCFlags "k1om" = K1om.kernelCFlags
kernelCFlags "x86_32" = X86_32.kernelCFlags
kernelCFlags "scc" = SCC.kernelCFlags
kernelCFlags "armv5" = ARMv5.kernelCFlags
kernelCFlags "arm11mp" = ARM11MP.kernelCFlags
kernelCFlags "xscale" = XScale.kernelCFlags
kernelCFlags "armv7" = ARMv7.kernelCFlags
kernelCFlags "armv7-m" = ARMv7_M.kernelCFlags
kernelLdFlags "x86_64" = X86_64.kernelLdFlags
kernelLdFlags "k1om" = K1om.kernelLdFlags
kernelLdFlags "x86_32" = X86_32.kernelLdFlags
kernelLdFlags "scc" = SCC.kernelLdFlags
kernelLdFlags "armv5" = ARMv5.kernelLdFlags
kernelLdFlags "arm11mp" = ARM11MP.kernelLdFlags
kernelLdFlags "xscale" = XScale.kernelLdFlags
kernelLdFlags "armv7" = ARMv7.kernelLdFlags
kernelLdFlags "armv7-m" = ARMv7_M.kernelLdFlags
archFamily :: String -> String
archFamily arch = optArchFamily (options arch)
-------------------------------------------------------------------------
--
-- Options for compiling the kernel, which is special
--
-------------------------------------------------------------------------
kernelIncludes arch = [ NoDep BuildTree arch f | f <- [
"/include",
"/include/dev" ]]
++
[ NoDep SrcTree "src" f | f <- [
"/kernel/include/arch" ./. arch,
"/kernel/include/arch" ./. archFamily arch,
"/kernel/include",
"/include",
"/include/arch" ./. archFamily arch,
Config.libcInc,
"/include/c",
"/include/target" ./. archFamily arch]]
kernelOptions arch = Options {
optArch = arch,
optArchFamily = archFamily arch,
optFlags = kernelCFlags arch,
optCxxFlags = [],
optDefines = (optDefines (options arch)) ++ [ Str "-DIN_KERNEL",
Str ("-DCONFIG_SCHEDULER_" ++ (show Config.scheduler)),
Str ("-DCONFIG_TIMESLICE=" ++ (show Config.timeslice)) ],
optIncludes = kernelIncludes arch,
optDependencies =
[ Dep InstallTree arch "/include/errors/errno.h",
Dep InstallTree arch "/include/barrelfish_kpi/capbits.h",
Dep InstallTree arch "/include/asmoffsets.h",
Dep InstallTree arch "/include/trace_definitions/trace_defs.h" ],
optLdFlags = kernelLdFlags arch,
optLdCxxFlags = [],
optLibs = [],
optCxxLibs = [],
optSuffix = [],
optInterconnectDrivers = [],
optFlounderBackends = [],
extraFlags = [],
extraDefines = [],
extraIncludes = [],
extraDependencies = [],
extraLdFlags = []
}
-------------------------------------------------------------------------
--
-- IMPORTANT: This section contains extraction of functions from the
-- relevant architecture module. The names and types should be
-- exactly the same as in the architecture.hs file. This section
-- should not contain any logic; ony architecture extraction.
--
--------------------------------------------------------------------------
--
-- First, the default C compiler for an architecture
--
cCompiler :: Options -> String -> String -> String -> [ RuleToken ]
cCompiler opts phase src obj
| optArch opts == "x86_64" = X86_64.cCompiler opts phase src obj
| optArch opts == "k1om" = K1om.cCompiler opts phase src obj
| optArch opts == "x86_32" = X86_32.cCompiler opts phase src obj
| optArch opts == "scc" = SCC.cCompiler opts phase src obj
| optArch opts == "armv5" = ARMv5.cCompiler opts phase src obj
| optArch opts == "arm11mp" = ARM11MP.cCompiler opts phase src obj
| optArch opts == "xscale" = XScale.cCompiler opts phase src obj
| optArch opts == "armv7" = ARMv7.cCompiler opts phase src obj
| optArch opts == "armv7-m" = ARMv7_M.cCompiler opts phase src obj
| otherwise = [ ErrorMsg ("no C compiler for " ++ (optArch opts)) ]
cPreprocessor :: Options -> String -> String -> String -> [ RuleToken ]
cPreprocessor opts phase src obj
| otherwise = [ ErrorMsg ("no C preprocessor for " ++ (optArch opts)) ]
--
-- C++ compiler, where supported
--
cxxCompiler :: Options -> String -> String -> String -> [ RuleToken ]
cxxCompiler opts phase src obj
| optArch opts == "x86_64" = X86_64.cxxCompiler opts phase src obj
| optArch opts == "k1om" = K1om.cxxCompiler opts phase src obj
| otherwise = [ ErrorMsg ("no C++ compiler for " ++ (optArch opts)) ]
--
-- makeDepend step; note that obj can be whatever the intended output is
--
makeDepend :: Options -> String -> String -> String -> String -> [ RuleToken ]
makeDepend opts phase src obj depfile
| optArch opts == "x86_64" =
X86_64.makeDepend opts phase src obj depfile
| optArch opts == "k1om" =
K1om.makeDepend opts phase src obj depfile
| optArch opts == "x86_32" =
X86_32.makeDepend opts phase src obj depfile
| optArch opts == "scc" =
SCC.makeDepend opts phase src obj depfile
| optArch opts == "armv5" =
ARMv5.makeDepend opts phase src obj depfile
| optArch opts == "arm11mp" =
ARM11MP.makeDepend opts phase src obj depfile
| optArch opts == "xscale" =
XScale.makeDepend opts phase src obj depfile
| optArch opts == "armv7" =
ARMv7.makeDepend opts phase src obj depfile
| optArch opts == "armv7-m" =
ARMv7_M.makeDepend opts phase src obj depfile
| otherwise = [ ErrorMsg ("no dependency generator for " ++ (optArch opts)) ]
makeCxxDepend :: Options -> String -> String -> String -> String -> [ RuleToken ]
makeCxxDepend opts phase src obj depfile
| optArch opts == "x86_64" =
X86_64.makeCxxDepend opts phase src obj depfile
| optArch opts == "k1om" =
K1om.makeCxxDepend opts phase src obj depfile
| otherwise = [ ErrorMsg ("no C++ dependency generator for " ++ (optArch opts)) ]
cToAssembler :: Options -> String -> String -> String -> String -> [ RuleToken ]
cToAssembler opts phase src afile objdepfile
| optArch opts == "x86_64" = X86_64.cToAssembler opts phase src afile objdepfile
| optArch opts == "k1om" = K1om.cToAssembler opts phase src afile objdepfile
| optArch opts == "x86_32" = X86_32.cToAssembler opts phase src afile objdepfile
| optArch opts == "scc" = SCC.cToAssembler opts phase src afile objdepfile
| optArch opts == "armv5" = ARMv5.cToAssembler opts phase src afile objdepfile
| optArch opts == "arm11mp" = ARM11MP.cToAssembler opts phase src afile objdepfile
| optArch opts == "xscale" = XScale.cToAssembler opts phase src afile objdepfile
| optArch opts == "armv7" = ARMv7.cToAssembler opts phase src afile objdepfile
| optArch opts == "armv7-m" = ARMv7_M.cToAssembler opts phase src afile objdepfile
| otherwise = [ ErrorMsg ("no C compiler for " ++ (optArch opts)) ]
--
-- Assemble an assembly language file
--
assembler :: Options -> String -> String -> [ RuleToken ]
assembler opts src obj
| optArch opts == "x86_64" = X86_64.assembler opts src obj
| optArch opts == "k1om" = K1om.assembler opts src obj
| optArch opts == "x86_32" = X86_32.assembler opts src obj
| optArch opts == "scc" = SCC.assembler opts src obj
| optArch opts == "armv5" = ARMv5.assembler opts src obj
| optArch opts == "arm11mp" = ARM11MP.assembler opts src obj
| optArch opts == "xscale" = XScale.assembler opts src obj
| optArch opts == "armv7" = ARMv7.assembler opts src obj
| optArch opts == "armv7-m" = ARMv7_M.assembler opts src obj
| otherwise = [ ErrorMsg ("no assembler for " ++ (optArch opts)) ]
archive :: Options -> [String] -> [String] -> String -> String -> [ RuleToken ]
archive opts objs libs name libname
| optArch opts == "x86_64" = X86_64.archive opts objs libs name libname
| optArch opts == "k1om" = K1om.archive opts objs libs name libname
| optArch opts == "x86_32" = X86_32.archive opts objs libs name libname
| optArch opts == "scc" = SCC.archive opts objs libs name libname
| optArch opts == "armv5" = ARMv5.archive opts objs libs name libname
| optArch opts == "arm11mp" = ARM11MP.archive opts objs libs name libname
| optArch opts == "xscale" = XScale.archive opts objs libs name libname
| optArch opts == "armv7" = ARMv7.archive opts objs libs name libname
| optArch opts == "armv7-m" = ARMv7_M.archive opts objs libs name libname
| otherwise = [ ErrorMsg ("Can't build a library for " ++ (optArch opts)) ]
linker :: Options -> [String] -> [String] -> String -> [RuleToken]
linker opts objs libs bin
| optArch opts == "x86_64" = X86_64.linker opts objs libs bin
| optArch opts == "k1om" = K1om.linker opts objs libs bin
| optArch opts == "x86_32" = X86_32.linker opts objs libs bin
| optArch opts == "scc" = SCC.linker opts objs libs bin
| optArch opts == "armv5" = ARMv5.linker opts objs libs bin
| optArch opts == "arm11mp" = ARM11MP.linker opts objs libs bin
| optArch opts == "xscale" = XScale.linker opts objs libs bin
| optArch opts == "armv7" = ARMv7.linker opts objs libs bin
| optArch opts == "armv7-m" = ARMv7_M.linker opts objs libs bin
| otherwise = [ ErrorMsg ("Can't link executables for " ++ (optArch opts)) ]
cxxlinker :: Options -> [String] -> [String] -> String -> [RuleToken]
cxxlinker opts objs libs bin
| optArch opts == "x86_64" = X86_64.cxxlinker opts objs libs bin
| optArch opts == "k1om" = K1om.cxxlinker opts objs libs bin
| otherwise = [ ErrorMsg ("Can't link C++ executables for " ++ (optArch opts)) ]
--
-- The C compiler for compiling things on the host
--
nativeCCompiler :: String
nativeCCompiler = "$(CC)"
-------------------------------------------------------------------------
--
-- Functions to create useful filenames
--
dependFilePath :: String -> String
dependFilePath obj = obj ++ ".depend"
objectFilePath :: Options -> String -> String
objectFilePath opts src = (optSuffix opts) ./. ((removeSuffix src) ++ ".o")
generatedObjectFilePath :: Options -> String -> String
generatedObjectFilePath opts src = (removeSuffix src) ++ ".o"
preprocessedFilePath :: Options -> String -> String
preprocessedFilePath opts src = (optSuffix opts) ./. ((removeSuffix src) ++ ".i")
-- Standard convention is that human generated assembler is .S, machine generated is .s
assemblerFilePath :: Options -> String -> String
assemblerFilePath opts src = (optSuffix opts) ./. ((removeSuffix src) ++ ".s")
-------------------------------------------------------------------------
--
-- Functions with logic to start doing things
--
--
-- Create C file dependencies
--
-- Since this is where we know what the depfile is called it is here that we also
-- decide to include it. This stops many different places below trying to
-- guess what the depfile is called
--
makeDependArchSub :: Options -> String -> String -> String -> String -> [ RuleToken ]
makeDependArchSub opts phase src objfile depfile =
[ Str ("@echo Generating $@"), NL ] ++
makeDepend opts phase src objfile depfile
makeDependArch :: Options -> String -> String -> String -> String -> HRule
makeDependArch opts phase src objfile depfile =
Rules [ Rule (makeDependArchSub opts phase src objfile depfile),
Include (Out (optArch opts) depfile)
]
-- Make depend for a standard object file
makeDependObj :: Options -> String -> String -> HRule
makeDependObj opts phase src =
let objfile = (objectFilePath opts src)
in
makeDependArch opts phase src objfile (dependFilePath objfile)
-- Make depend for a C++ object file
makeDependCxxArchSub :: Options -> String -> String -> String -> String -> [ RuleToken ]
makeDependCxxArchSub opts phase src objfile depfile =
[ Str ("@echo Generating $@"), NL ] ++
makeCxxDepend opts phase src objfile depfile
makeDependCxxArch :: Options -> String -> String -> String -> String -> HRule
makeDependCxxArch opts phase src objfile depfile =
Rules [ Rule (makeDependCxxArchSub opts phase src objfile depfile),
Include (Out (optArch opts) depfile)
]
makeDependCxxObj :: Options -> String -> String -> HRule
makeDependCxxObj opts phase src =
let objfile = (objectFilePath opts src)
in
makeDependCxxArch opts phase src objfile (dependFilePath objfile)
-- Make depend for an assembler output
makeDependAssembler :: Options -> String -> String -> HRule
makeDependAssembler opts phase src =
let objfile = (assemblerFilePath opts src)
in
makeDependArch opts phase src objfile (dependFilePath objfile)
--
-- Compile a C program to assembler
--
makecToAssembler :: Options -> String -> String -> String -> [ RuleToken ]
makecToAssembler opts phase src obj =
cToAssembler opts phase src (assemblerFilePath opts src) (dependFilePath obj)
--
-- Assemble an assembly language file
--
assemble :: Options -> String -> [ RuleToken ]
assemble opts src =
assembler opts src (objectFilePath opts src)
--
-- Create a library from a set of object files
--
archiveLibrary :: Options -> String -> [String] -> [String] -> [ RuleToken ]
archiveLibrary opts name objs libs =
archive opts objs libs name (libraryPath name)
--
-- Link an executable
--
linkExecutable :: Options -> [String] -> [String] -> String -> [RuleToken]
linkExecutable opts objs libs bin =
linker opts objs libs (applicationPath bin)
--
-- Link a C++ executable
--
linkCxxExecutable :: Options -> [String] -> [String] -> String -> [RuleToken]
linkCxxExecutable opts objs libs bin =
cxxlinker opts objs libs (applicationPath bin)
-------------------------------------------------------------------------
-------------------------------------------------------------------------
--
-- Hake macros (hacros?): each of these evaluates to HRule, i.e. a
-- list of templates for Makefile rules
--
-------------------------------------------------------------------------
--
-- Compile a C file for a particular architecture
-- We include cToAssembler to permit humans to type "make foo/bar.s"
--
compileCFile :: Options -> String -> HRule
compileCFile opts src =
Rules [ Rule (cCompiler opts "src" src (objectFilePath opts src)),
Rule (makecToAssembler opts "src" src (objectFilePath opts src)),
makeDependObj opts "src" src
]
--
-- Compile a C++ file for a particular architecture
--
compileCxxFile :: Options -> String -> HRule
compileCxxFile opts src =
Rules [ Rule (cxxCompiler opts "src" src (objectFilePath opts src)),
makeDependCxxObj opts "src" src
]
--
-- Compile a C file for a particular architecture
--
compileGeneratedCFile :: Options -> String -> HRule
compileGeneratedCFile opts src =
let o2 = opts { optSuffix = "" }
arch = optArch o2
in
Rules [ Rule (cCompiler o2 arch src (objectFilePath o2 src) ),
Rule (makecToAssembler o2 arch src (objectFilePath o2 src)),
makeDependObj o2 arch src
]
compileCFiles :: Options -> [String] -> HRule
compileCFiles opts srcs = Rules [ compileCFile opts s | s <- srcs ]
compileCxxFiles :: Options -> [String] -> HRule
compileCxxFiles opts srcs = Rules [ compileCxxFile opts s | s <- srcs ]
compileGeneratedCFiles :: Options -> [String] -> HRule
compileGeneratedCFiles opts srcs =
Rules [ compileGeneratedCFile opts s | s <- srcs ]
--
-- Add a set of C (or whatever) dependences on a *generated* file.
-- Somewhere else this file has to be defined as a target, of
-- course...
--
extraCDependencyForObj :: Options -> String -> String -> String -> [RuleToken]
extraCDependencyForObj opts file s obj =
let arch = optArch opts
in
[ Target arch (dependFilePath obj),
Target arch obj,
Dep BuildTree arch file
]
extraCDependency :: Options -> String -> String -> HRule
extraCDependency opts file s = Rule (extraCDependencyForObj opts file s obj)
where obj = objectFilePath opts s
extraCDependencies :: Options -> String -> [String] -> HRule
extraCDependencies opts file srcs =
Rules [ extraCDependency opts file s | s <- srcs ]
extraGeneratedCDependency :: Options -> String -> String -> HRule
extraGeneratedCDependency opts file s =
extraCDependency (opts { optSuffix = "" }) file s
--
-- Copy include files to the appropriate directory
--
includeFile :: Options -> String -> HRule
includeFile opts hdr =
Rules [ (Rule [ Str "cp", In SrcTree "src" hdr, Out (optArch opts) hdr ]),
(Rule [ PreDep BuildTree (optArch opts) hdr,
Target (optArch opts) "/include/errors/errno.h" ]
)
]
--
-- Build a Mackerel header file from a definition.
--
mackerelProgLoc = In InstallTree "tools" "/bin/mackerel"
mackerelDevFileLoc d = In SrcTree "src" ("/devices" ./. (d ++ ".dev"))
mackerelDevHdrPath d = "/include/dev/" ./. (d ++ "_dev.h")
mackerel2 :: Options -> String -> HRule
mackerel2 opts dev = mackerel_generic opts dev "shift-driver"
mackerel :: Options -> String -> HRule
mackerel opts dev = mackerel_generic opts dev "bitfield-driver"
mackerel_generic :: Options -> String -> String -> HRule
mackerel_generic opts dev flag =
let
arch = optArch opts
in
Rule [ mackerelProgLoc,
Str ("--" ++ flag),
Str "-c", mackerelDevFileLoc dev,
Str "-o", Out arch (mackerelDevHdrPath dev)
]
mackerelDependencies :: Options -> String -> [String] -> HRule
mackerelDependencies opts d srcs =
extraCDependencies opts (mackerelDevHdrPath d) srcs
--
-- Basic Flounder definitions: where things are
--
flounderProgLoc = In InstallTree "tools" "/bin/flounder"
flounderIfFileLoc ifn = In SrcTree "src" ("/if" ./. (ifn ++ ".if"))
-- new-style stubs: path for generic header
flounderIfDefsPath ifn = "/include/if" ./. (ifn ++ "_defs.h")
-- new-style stubs: path for specific backend header
flounderIfDrvDefsPath ifn drv = "/include/if" ./. (ifn ++ "_" ++ drv ++ "_defs.h")
-- new-style stubs: generated C code (for all default enabled backends)
flounderBindingPath opts ifn =
(optSuffix opts) ./. (ifn ++ "_flounder_bindings.c")
-- new-style stubs: generated C code (for extra backends enabled by the user)
flounderExtraBindingPath opts ifn =
(optSuffix opts) ./. (ifn ++ "_flounder_extra_bindings.c")
flounderTHCHdrPath ifn = "/include/if" ./. (ifn ++ "_thc.h")
flounderTHCStubPath opts ifn =
(optSuffix opts) ./. (ifn ++ "_thc.c")
applicationPath name = "/sbin" ./. name
libraryPath libname = "/lib" ./. ("lib" ++ libname ++ ".a")
kernelPath = "/sbin/cpu"
-- construct include arguments to flounder for common types
-- these are:
-- 1. platform-specific types (if/platform/foo.if)
-- 2. architecture-specific types (if/arch/foo.if)
-- 3. generic types (if/types.if)
flounderIncludes :: Options -> [RuleToken]
flounderIncludes opts
= concat [ [Str "-i", flounderIfFileLoc ifn]
| ifn <- [ "platform" ./. (optArch opts), -- XXX: optPlatform
"arch" ./. (optArch opts),
"types" ] ]
flounderRule :: Options -> [RuleToken] -> HRule
flounderRule opts args
= Rule $ [ flounderProgLoc ] ++ (flounderIncludes opts) ++ args
--
-- Build new-style Flounder header files from a definition
-- (generic header, plus one per backend)
--
flounderGenDefs :: Options -> String -> HRule
flounderGenDefs opts ifn =
Rules $ flounderRule opts [
Str "--generic-header", flounderIfFileLoc ifn,
Out (optArch opts) (flounderIfDefsPath ifn)
] : [ flounderRule opts [
Str $ "--" ++ drv ++ "-header", flounderIfFileLoc ifn,
Out (optArch opts) (flounderIfDrvDefsPath ifn drv)]
| drv <- Args.allFlounderBackends ]
--
-- Build a new Flounder binding file from a definition.
-- This builds the binding for all enabled backends
--
flounderBinding :: Options -> String -> [String] -> HRule
flounderBinding opts ifn =
flounderBindingHelper opts ifn backends (flounderBindingPath opts ifn)
where
backends = "generic" : (optFlounderBackends opts)
-- as above, but for a specific set of user-specified backends
flounderExtraBinding :: Options -> String -> [String] -> [String] -> HRule
flounderExtraBinding opts ifn backends =
flounderBindingHelper opts ifn backends (flounderExtraBindingPath opts ifn)
flounderBindingHelper :: Options -> String -> [String] -> String -> [String] -> HRule
flounderBindingHelper opts ifn backends cfile srcs = Rules $
[ flounderRule opts $ args ++ [flounderIfFileLoc ifn, Out arch cfile ],
compileGeneratedCFile opts cfile,
flounderDefsDepend opts ifn allbackends srcs]
++ [extraGeneratedCDependency opts (flounderIfDrvDefsPath ifn d) cfile
| d <- allbackends]
where
arch = optArch opts
archfam = optArchFamily opts
args = [Str "-a", Str archfam] ++ [Str $ "--" ++ d ++ "-stub" | d <- backends]
allbackends = backends `union` optFlounderBackends opts \\ ["generic"]
--
-- Build a Flounder THC header file from a definition.
--
flounderTHCFile :: Options -> String -> HRule
flounderTHCFile opts ifn =
flounderRule opts [
Str "--thc-header", flounderIfFileLoc ifn,
Out (optArch opts) (flounderTHCHdrPath ifn)
]
--
-- Build a Flounder THC stubs file from a definition.
--
flounderTHCStub :: Options -> String -> [String] -> HRule
flounderTHCStub opts ifn srcs =
let cfile = flounderTHCStubPath opts ifn
hfile = flounderTHCHdrPath ifn
arch = optArch opts
in
Rules [ flounderRule opts [
Str "--thc-stubs", flounderIfFileLoc ifn,
Out arch cfile
],
compileGeneratedCFile opts cfile,
extraCDependencies opts hfile srcs,
extraGeneratedCDependency opts hfile cfile
]
--
-- Create a dependency on a Flounder header file for a set of files,
-- but don't actually build either stub (useful for libraries)
--
flounderDefsDepend :: Options -> String -> [String] -> [String] -> HRule
flounderDefsDepend opts ifn backends srcs = Rules $
(extraCDependencies opts (flounderIfDefsPath ifn) srcs) :
[extraCDependencies opts (flounderIfDrvDefsPath ifn drv) srcs
| drv <- backends, drv /= "generic" ]
--
-- Emit all the Flounder-related rules/dependencies for a given target
--
flounderRules :: Options -> Args.Args -> [String] -> [HRule]
flounderRules opts args csrcs =
([ flounderBinding opts f csrcs | f <- Args.flounderBindings args ]
++
[ flounderExtraBinding opts f backends csrcs
| (f, backends) <- Args.flounderExtraBindings args ]
++
[ flounderTHCStub opts f csrcs | f <- Args.flounderTHCStubs args ]
++
-- Flounder extra defs (header files) also depend on the base
-- Flounder headers for the same interface
[ flounderDefsDepend opts f baseBackends csrcs | f <- allIf ]
++
-- Extra defs only for non-base backends (those were already emitted above)
[ flounderDefsDepend opts f (backends \\ baseBackends) csrcs
| (f, backends) <- Args.flounderExtraDefs args ]
)
where
-- base backends enabled by default
baseBackends = optFlounderBackends opts
-- all interfaces mentioned in flounderDefs or ExtraDefs
allIf = nub $ Args.flounderDefs args ++ [f | (f,_) <- Args.flounderExtraDefs args]
--
-- Build a Fugu library
--
fuguFile :: Options -> String -> HRule
fuguFile opts file =
let arch = optArch opts
cfile = file ++ ".c"
hfile = "/include/errors/" ++ file ++ ".h"
in
Rules [ Rule [In InstallTree "tools" "/bin/fugu",
In SrcTree "src" (file++".fugu"),
Out arch hfile,
Out arch cfile ],
compileGeneratedCFile opts cfile
]
--
-- Build a Pleco library
--
plecoFile :: Options -> String -> HRule
plecoFile opts file =
let arch = optArch opts
cfile = file ++ ".c"
hfile = "/include/trace_definitions/" ++ file ++ ".h"
jsonfile = "/trace_definitions/" ++ file ++ ".json"
in
Rules [ Rule [In InstallTree "tools" "/bin/pleco",
In SrcTree "src" (file++".pleco"),
Out arch hfile,
Out arch jsonfile,
Out arch cfile ],
compileGeneratedCFile opts cfile
]
--
-- Build a Hamlet file
--
hamletFile :: Options -> String -> HRule
hamletFile opts file =
let arch = optArch opts
hfile = "/include/barrelfish_kpi/capbits.h"
cfile = "cap_predicates.c"
usercfile = "user_cap_predicates.c"
ofile = "user_cap_predicates.o"
nfile = "cap_predicates"
afile = "/lib/libcap_predicates.a"
in
Rules [ Rule [In InstallTree "tools" "/bin/hamlet",
In SrcTree "src" (file++".hl"),
Out arch hfile,
Out arch cfile,
Out arch usercfile ],
compileGeneratedCFile opts usercfile,
Rule (archive opts [ ofile ] [] nfile afile)
]
--
-- Link a set of object files and libraries together
--
link :: Options -> [String] -> [ String ] -> String -> HRule
link opts objs libs bin =
Rule (linkExecutable opts objs libs bin)
--
-- Link a set of C++ object files and libraries together
--
linkCxx :: Options -> [String] -> [ String ] -> String -> HRule
linkCxx opts objs libs bin =
Rule (linkCxxExecutable opts objs libs bin)
--
-- Link a CPU driver. This is where it gets distinctly architecture-specific.
--
linkKernel :: Options -> String -> [String] -> [String] -> HRule
linkKernel opts name objs libs
| optArch opts == "x86_64" = X86_64.linkKernel opts objs [libraryPath l | l <- libs ] ("/sbin" ./. name)
| optArch opts == "k1om" = K1om.linkKernel opts objs [libraryPath l | l <- libs ] ("/sbin" ./. name)
| optArch opts == "x86_32" = X86_32.linkKernel opts objs [libraryPath l | l <- libs ] ("/sbin" ./. name)
| optArch opts == "scc" = SCC.linkKernel opts objs [libraryPath l | l <- libs ] ("/sbin" ./. name)
| optArch opts == "armv5" = ARMv5.linkKernel opts objs [libraryPath l | l <- libs ] ("/sbin" ./. name)
| optArch opts == "arm11mp" = ARM11MP.linkKernel opts objs [libraryPath l | l <- libs ] ("/sbin" ./. name)
| optArch opts == "xscale" = XScale.linkKernel opts objs [libraryPath l | l <- libs ] ("/sbin" ./. name)
| optArch opts == "armv7" = ARMv7.linkKernel opts objs [libraryPath l | l <- libs ] name
| optArch opts == "armv7-m" = ARMv7_M.linkKernel opts objs [libraryPath l | l <- libs ] name
| otherwise = Rule [ Str ("Error: Can't link kernel for '" ++ (optArch opts) ++ "'") ]
--
-- Copy a file from one place to another
--
copy :: Options -> String -> String -> HRule
copy opts src dest =
Rule [ Str "cp", In BuildTree (optArch opts) src, Out (optArch opts) dest ]
--
-- Assemble a list of S files for a particular architecture
--
assembleSFile :: Options -> String -> HRule
assembleSFile opts src =
Rules [ Rule (assemble opts src),
makeDependObj opts "src" src
]
assembleSFiles :: Options -> [String] -> HRule
assembleSFiles opts srcs = Rules [ assembleSFile opts s | s <- srcs ]
--
-- Archive a bunch of objects into a library
--
staticLibrary :: Options -> String -> [String] -> [String] -> HRule
staticLibrary opts libpath objs libs =
Rule (archiveLibrary opts libpath objs libs)
--
-- Compile a Haskell binary (for the host architecture)
--
compileHaskell prog main deps = compileHaskellWithLibs prog main deps []
compileHaskellWithLibs prog main deps dirs =
let
tools_dir = (Dep InstallTree "tools" "/tools/.marker")
in
Rule ([ NStr "ghc -i",
NoDep SrcTree "src" ".",
Str "-odir ", NoDep BuildTree "tools" ".",
Str "-hidir ", NoDep BuildTree "tools" ".",
Str "-rtsopts=all",
Str "--make ",
In SrcTree "src" main,
Str "-o ",
Out "tools" ("/bin" ./. prog),
Str "$(LDFLAGS)" ]
++ concat [[ NStr "-i", NoDep SrcTree "src" d] | d <- dirs]
++ [ (Dep SrcTree "src" dep) | dep <- deps ]
++ [ tools_dir ])
--
-- Compile (and link) a C binary (for the host architecture)
--
compileNativeC :: String -> [String] -> [String] -> [String] -> HRule
compileNativeC prog cfiles cflags ldflags =
Rule ([ Str nativeCCompiler,
Str "-o",
Out "tools" ("/bin" ./. prog),
Str "$(CFLAGS)",
Str "$(LDFLAGS)" ]
++ [ (Str flag) | flag <- cflags ]
++ [ (Str flag) | flag <- ldflags ]
++ [ (In SrcTree "src" dep) | dep <- cfiles ])
--
-- Build a Technical Note
--
buildTechNote :: String -> String -> Bool -> Bool -> [String] -> HRule
buildTechNote input output bib glo figs =
buildTechNoteWithDeps input output bib glo figs []
buildTechNoteWithDeps :: String -> String -> Bool -> Bool -> [String] -> [RuleToken] -> HRule
buildTechNoteWithDeps input output bib glo figs deps =
let
working_dir = NoDep BuildTree "tools" "/tmp/"
style_files = [ "bfish-logo.pdf", "bftn.sty", "defs.bib", "barrelfish.bib" ]
in
Rule ( [ Dep SrcTree "src" (f ++ ".pdf") | f <- figs]
++
[ Dep SrcTree "src" ("/doc/style" ./. f) | f <- style_files ]
++
[ Str "mkdir", Str "-p", working_dir, NL ]
++
deps
++
[ In SrcTree "src" "/tools/run-pdflatex.sh",
Str "--input-tex", In SrcTree "src" input,
Str "--working-dir", working_dir,
Str "--output-pdf", Out "docs" ("/" ++ output),
Str "--texinput", NoDep SrcTree "src" "/doc/style",
Str "--bibinput", NoDep SrcTree "src" "/doc/style"
]
++ (if bib then [ Str "--has-bib" ] else [])
++ (if glo then [ Str "--has-glo" ] else [])
)
---------------------------------------------------------------------
--
-- Transformations on file names
--
----------------------------------------------------------------------
allObjectPaths :: Options -> Args.Args -> [String]
allObjectPaths opts args =
[objectFilePath opts g
| g <- (Args.cFiles args)++(Args.cxxFiles args)++(Args.assemblyFiles args)]
++
[generatedObjectFilePath opts g
| g <- [ flounderBindingPath opts f
| f <- (Args.flounderBindings args)]
++
[ flounderExtraBindingPath opts f
| (f, _) <- (Args.flounderExtraBindings args)]
++
[ flounderTHCStubPath opts f
| f <- (Args.flounderTHCStubs args)]
]
allLibraryPaths :: Args.Args -> [String]
allLibraryPaths args =
[ libraryPath l | l <- Args.addLibraries args ]
---------------------------------------------------------------------
--
-- Very large-scale macros
--
----------------------------------------------------------------------
--
-- Build an application binary
--
application :: Args.Args
application = Args.defaultArgs { Args.buildFunction = applicationBuildFn }
applicationBuildFn :: [String] -> String -> Args.Args -> HRule
applicationBuildFn af tf args
| debugFlag && trace (Args.showArgs (tf ++ " Application ") args) False
= undefined
applicationBuildFn af tf args =
Rules [ appBuildArch af tf args arch | arch <- Args.architectures args ]
appGetOptionsForArch arch args =
(options arch) { extraIncludes =
[ NoDep SrcTree "src" a | a <- Args.addIncludes args],
optIncludes = (optIncludes $ options arch) \\
[ NoDep SrcTree "src" i | i <- Args.omitIncludes args ],
optFlags = (optFlags $ options arch) \\
[ Str f | f <- Args.omitCFlags args ],
optCxxFlags = (optCxxFlags $ options arch) \\
[ Str f | f <- Args.omitCxxFlags args ],
optSuffix = "_for_app_" ++ Args.target args,
extraFlags = Args.addCFlags args ++ Args.addCxxFlags args,
extraLdFlags = [ Str f | f <- Args.addLinkFlags args ],
extraDependencies =
[Dep BuildTree arch s | s <- Args.addGeneratedDependencies args]
}
appBuildArch af tf args arch =
let -- Fiddle the options
opts = appGetOptionsForArch arch args
csrcs = Args.cFiles args
cxxsrcs = Args.cxxFiles args
appname = Args.target args
-- XXX: Not sure if this is correct. Currently assuming that if the app
-- contains C++ files, we have to use the C++ linker.
mylink = if cxxsrcs == [] then link else linkCxx
in
Rules ( flounderRules opts args csrcs
++
[ mackerelDependencies opts m csrcs | m <- Args.mackerelDevices args ]
++
[ compileCFiles opts csrcs,
compileCxxFiles opts cxxsrcs,
assembleSFiles opts (Args.assemblyFiles args),
mylink opts (allObjectPaths opts args) (allLibraryPaths args) appname
]
)
--
-- Build an Arrakis application binary
--
arrakisapplication :: Args.Args
arrakisapplication = Args.defaultArgs { Args.buildFunction = arrakisApplicationBuildFn }
arrakisApplicationBuildFn :: [String] -> String -> Args.Args -> HRule
arrakisApplicationBuildFn af tf args
| debugFlag && trace (Args.showArgs (tf ++ " Arrakis Application ") args) False
= undefined
arrakisApplicationBuildFn af tf args =
Rules [ arrakisAppBuildArch af tf args arch | arch <- Args.architectures args ]
arrakisAppGetOptionsForArch arch args =
(options arch) { extraIncludes =
[ NoDep SrcTree "src" a | a <- Args.addIncludes args],
optIncludes = (optIncludes $ options arch) \\
[ NoDep SrcTree "src" i | i <- Args.omitIncludes args ],
optFlags = ((optFlags $ options arch) ++ [ Str "-DARRAKIS" ]) \\
[ Str f | f <- Args.omitCFlags args ],
optCxxFlags = (optCxxFlags $ options arch) \\
[ Str f | f <- Args.omitCxxFlags args ],
optSuffix = "_for_app_" ++ Args.target args,
optLibs = [ In InstallTree arch "/lib/libarrakis.a" ] ++
((optLibs $ options arch) \\
[ In InstallTree arch "/lib/libbarrelfish.a" ]),
extraFlags = Args.addCFlags args ++ Args.addCxxFlags args,
extraLdFlags = [ Str f | f <- Args.addLinkFlags args ],
extraDependencies =
[Dep BuildTree arch s | s <- Args.addGeneratedDependencies args]
}
arrakisAppBuildArch af tf args arch =
let -- Fiddle the options
opts = arrakisAppGetOptionsForArch arch args
csrcs = Args.cFiles args
cxxsrcs = Args.cxxFiles args
appname = Args.target args
-- XXX: Not sure if this is correct. Currently assuming that if the app
-- contains C++ files, we have to use the C++ linker.
mylink = if cxxsrcs == [] then link else linkCxx
in
Rules ( flounderRules opts args csrcs
++
[ mackerelDependencies opts m csrcs | m <- Args.mackerelDevices args ]
++
[ compileCFiles opts csrcs,
compileCxxFiles opts cxxsrcs,
assembleSFiles opts (Args.assemblyFiles args),
mylink opts (allObjectPaths opts args) (allLibraryPaths args) appname
]
)
--
-- Build a static library
--
library :: Args.Args
library = Args.defaultArgs { Args.buildFunction = libraryBuildFn }
libraryBuildFn :: [String] -> String -> Args.Args -> HRule
libraryBuildFn af tf args | debugFlag && trace (Args.showArgs (tf ++ " Library ") args) False = undefined
libraryBuildFn af tf args =
Rules [ libBuildArch af tf args arch | arch <- Args.architectures args ]
libGetOptionsForArch arch args =
(options arch) { extraIncludes =
[ NoDep SrcTree "src" a | a <- Args.addIncludes args],
optIncludes = (optIncludes $ options arch) \\
[ NoDep SrcTree "src" i | i <- Args.omitIncludes args ],
optFlags = (optFlags $ options arch) \\
[ Str f | f <- Args.omitCFlags args ],
optCxxFlags = (optCxxFlags $ options arch) \\
[ Str f | f <- Args.omitCxxFlags args ],
optSuffix = "_for_lib_" ++ Args.target args,
extraFlags = Args.addCFlags args ++ Args.addCxxFlags args,
extraDependencies =
[Dep BuildTree arch s | s <- Args.addGeneratedDependencies args]
}
libBuildArch af tf args arch =
let -- Fiddle the options
opts = libGetOptionsForArch arch args
csrcs = Args.cFiles args
cxxsrcs = Args.cxxFiles args
in
Rules ( flounderRules opts args csrcs
++
[ mackerelDependencies opts m csrcs | m <- Args.mackerelDevices args ]
++
[ compileCFiles opts csrcs,
compileCxxFiles opts cxxsrcs,
assembleSFiles opts (Args.assemblyFiles args),
staticLibrary opts (Args.target args) (allObjectPaths opts args) (allLibraryPaths args)
]
)
--
-- Library dependecies
--
-- The following code is under heavy construction, and also somewhat ugly
data LibDepTree = LibDep String | LibDeps [LibDepTree] deriving (Show,Eq)
-- manually add dependencies for now (it would be better if each library
-- defined each own dependencies locally, but that does not seem to be an
-- easy thing to do currently
libposixcompat_deps = LibDeps [ LibDep "posixcompat",
libvfs_deps_all, LibDep "term_server" ]
liblwip_deps = LibDeps $ [ LibDep x | x <- deps ]
where deps = ["lwip" ,"contmng" ,"net_if_raw" ,"timer" ,"hashtable"]
libnetQmng_deps = LibDeps $ [ LibDep x | x <- deps ]
where deps = ["net_queue_manager", "contmng" ,"procon" , "net_if_raw", "bfdmuxvm"]
libnfs_deps = LibDeps $ [ LibDep "nfs" ]
libssh_deps = LibDeps [ libposixcompat_deps, libopenbsdcompat_deps,
LibDep "zlib", LibDep "crypto", LibDep "ssh" ]
libopenbsdcompat_deps = LibDeps [ libposixcompat_deps, LibDep "crypto",
LibDep "openbsdcompat" ]
-- we need to make vfs more modular to make this actually useful
data VFSModules = VFS_RamFS | VFS_NFS | VFS_BlockdevFS | VFS_FAT
vfsdeps :: [VFSModules] -> [LibDepTree]
vfsdeps [] = [LibDep "vfs"]
vfsdeps (VFS_RamFS:xs) = [] ++ vfsdeps xs
vfsdeps (VFS_NFS:xs) = [libnfs_deps] ++ vfsdeps xs
vfsdeps (VFS_BlockdevFS:xs) = [LibDep "ahci" ] ++ vfsdeps xs
vfsdeps (VFS_FAT:xs) = [] ++ vfsdeps xs
libvfs_deps_all = LibDeps $ vfsdeps [VFS_NFS, VFS_RamFS, VFS_BlockdevFS,
VFS_FAT]
libvfs_deps_nonfs = LibDeps $ vfsdeps [VFS_RamFS, VFS_BlockdevFS, VFS_FAT]
libvfs_deps_nfs = LibDeps $ vfsdeps [VFS_NFS]
libvfs_deps_ramfs = LibDeps $ vfsdeps [VFS_RamFS]
libvfs_deps_blockdevfs = LibDeps $ vfsdeps [VFS_BlockdevFS]
libvfs_deps_fat = LibDeps $ vfsdeps [VFS_FAT, VFS_BlockdevFS]
-- flatten the dependency tree
flat :: [LibDepTree] -> [LibDepTree]
flat [] = []
flat ((LibDep l):xs) = [LibDep l] ++ flat xs
flat ((LibDeps t):xs) = flat t ++ flat xs
str2dep :: String -> LibDepTree
str2dep str
| str == "vfs" = libvfs_deps_all
| str == "vfs_nonfs" = libvfs_deps_nonfs
| str == "posixcompat" = libposixcompat_deps
| str == "lwip" = liblwip_deps
| str == "netQmng" = libnetQmng_deps
| str == "ssh" = libssh_deps
| str == "openbsdcompat" = libopenbsdcompat_deps
| otherwise = LibDep str
-- get library depdencies
-- we need a specific order for the .a, so we define a total order
libDeps :: [String] -> [String]
libDeps xs = [x | (LibDep x) <- (sortBy xcmp) . nub . flat $ map str2dep xs ]
where xord = [ "ssh"
, "openbsdcompat"
, "crypto"
, "zlib"
, "posixcompat"
, "term_server"
, "vfs"
, "ahci"
, "nfs"
, "net_queue_manager"
, "bfdmuxvm"
, "lwip"
, "arranet"
, "e1000n"
, "e10k"
, "e10k_vf"
, "contmng"
, "procon"
, "net_if_raw"
, "vfsfd"
, "timer"
, "hashtable"]
xcmp (LibDep a) (LibDep b) = compare (elemIndex a xord) (elemIndex b xord)
--
-- Build a CPU driver
--
cpuDriver :: Args.Args
cpuDriver = Args.defaultArgs { Args.buildFunction = cpuDriverBuildFn,
Args.target = "cpu" }
-- CPU drivers are built differently
cpuDriverBuildFn :: [String] -> String -> Args.Args -> HRule
cpuDriverBuildFn af tf args = Rules []
| joe9/barrelfish | hake/RuleDefs.hs | mit | 44,692 | 0 | 18 | 11,775 | 12,007 | 6,191 | 5,816 | 728 | 3 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
import GHC.Generics
import System.Environment (getArgs)
import Data.Aeson hiding (encode)
import Data.Yaml
import Data.List
import System.Directory (doesFileExist, findExecutable, getCurrentDirectory, getHomeDirectory)
import System.FilePath.Posix
import Hoogle (defaultDatabaseLocation, mergeDatabase)
import Control.Applicative
import Control.Monad.Error
import Data.Maybe (isJust)
import Hoobuddy
import Data.ByteString.Char8 (unpack)
-- TODOs:
-- hoo-4 : use reader monad for config ?
deriving instance Generic Hoobuddy
instance ToJSON Hoobuddy
instance FromJSON Hoobuddy
hoogleMissingError :: String
hoogleMissingError =
unlines [ "Error: hoogle is not installed or not in path"
, "Please install hoogle and run `hoogle data`"]
defaultPkgs :: [String]
defaultPkgs = words "Cabal.hoo array.hoo base.hoo binary.hoo bytestring.hoo containers.hoo deepseq.hoo directory.hoo filepath.hoo haskell2010.hoo haskell98.hoo hoopl.hoo hpc.hoo old-locale.hoo old-time.hoo pretty.hoo process.hoo template-haskell.hoo time.hoo unix.hoo GLURaw.hoo GLUT.hoo HTTP.hoo HUnit.hoo OpenGL.hoo OpenGLRaw.hoo QuickCheck.hoo async.hoo attoparsec.hoo case-insensitive.hoo cgi.hoo fgl.hoo hashable.hoo haskell-src.hoo html.hoo mtl.hoo network.hoo parallel.hoo parsec.hoo primitive.hoo random.hoo regex-base.hoo regex-compat.hoo regex-posix.hoo split.hoo stm.hoo syb.hoo text.hoo transformers.hoo unordered-containers.hoo vector.hoo xhtml.hoo zlib.hoo"
help :: IO ()
help = putStrLn $
unlines [ "Usage : hoobuddy [deps|fetch] <cabal-file>"
, " [--help]"
, " [--default]"
, ""
, "deps list configured dependencies"
, "fetch fetch and merge documentation databases"
, ""
, "--default prints the default configuration"
, "--help prints this help"
]
type HoobuddyAction = ErrorT String IO
main :: IO ()
main = do
conf <- loadConfig
args <- getArgs
ret <- runErrorT $ runHoobuddy conf args
either putStrLn (\_ -> putStrLn "") ret
runHoobuddy :: Hoobuddy -> [String] -> HoobuddyAction ()
runHoobuddy cfg args = do
hoogleInstalled <- liftM isJust (liftIO $ findExecutable "hoogle")
unless hoogleInstalled (throwError hoogleMissingError)
run cfg args
where
run _ ["deps", file] = liftIO $ deps file
run conf ["fetch", file] = build file conf
run _ ["--help"] = liftIO help
run _ ["--default"] = liftIO $ defaultConfig >>= \x -> putStrLn $ unpack $ encode x
run _ _ = liftIO help
-- | Loads configuration from file or uses defaults
loadConfig :: IO Hoobuddy
loadConfig = decodeConfig >>= maybe defaultConfig return
unique :: (Ord a) => [a] -> [a]
unique = map head . group . sort
defaultConfig :: IO Hoobuddy
defaultConfig = do
location <- defaultDatabaseLocation
return $ Hoobuddy (location </> "databases") True []
-- | Decodes configuration from JSON
decodeConfig :: IO (Maybe Hoobuddy)
decodeConfig = do
homeDir <- getHomeDirectory
parseResult <- decodeFileEither $ homeDir </> ".hoobuddy.conf"
return $ either (const Nothing) Just parseResult
build :: FilePath -> Hoobuddy -> HoobuddyAction ()
build cabalFile conf = do
pkgs <- map (++ ".hoo") <$> liftIO (getDeps cabalFile)
dbs <- liftIO $ getHooDatabases (databases conf)
let allPkgs = (++) pkgs (if useBase conf then defaultPkgs else custom conf)
let available = allPkgs `intersect` dbs
let missing = filter (`notElem` available) allPkgs
liftIO $ if null missing
then putStrLn "No data needs to be fetched"
else printInfo "Fetching databases for: " missing
fetchOp <- liftIO $ hoogleFetch missing
either (\_ -> throwError "Error executing hoogle") (\_ -> return ()) fetchOp
liftIO $ putStrLn "Merging databases ..."
currDir <- liftIO getCurrentDirectory
existingDbs <- liftIO $ filterM doesFileExist (fmap (databases conf </>) allPkgs)
liftIO $ mergeDatabase existingDbs (currDir </> "default.hoo")
liftIO $ putStrLn "Success: default.hoo"
-- | Pretty printer for info output
printInfo :: String -> [String] -> IO ()
printInfo str xs = putStrLn $ str ++ "[" ++ intercalate "," xs ++ "]"
| gilligan/hoobuddy | src/Main.hs | mit | 4,463 | 0 | 13 | 996 | 1,001 | 515 | 486 | 85 | 5 |
-- Collects the Hackage dependency tree for a package.
-- I feel like it would be better to handle lookup failures core data
-- structure instead of ignoring them. However I'm going to defer that change
-- for now. This approach should work well enough for a first draft.
-- TODO Is there an alternative to classyPrelude that's suitable for
-- libraries? This is absurd.
{-# LANGUAGE UnicodeSyntax, TupleSections, ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings, LambdaCase #-}
module DepViz where
import Data.Text (Text)
import qualified Data.Text as T
-- import Data.Map (Map)
import qualified Data.Map as M
import qualified Data.List as L
import Data.Set (Set)
import qualified Data.Set as S
import Data.Maybe
import Distribution.Hackage.DB (Hackage)
import Control.Monad
import Data.Functor
import Distribution.PackageDescription
import Distribution.Package
import Distribution.Text (display)
import Distribution.PackageDescription.Configuration
import Data.Graph.Inductive.Graph hiding (edges,nodes)
-- Should we represent edges by embeding or by storing a name? The
-- second sounds far more sane if recursive dependencies are a thing. Are
-- they? let's assume no.
--
-- Also, we ignore version information for now. We don't need 100%
-- correct results.
data DepTree = DepTree Text [DepTree] -- TODO Use ‘Data.Tree’.
type DepGraph = Set (Text,Text)
toFgr ∷ Graph gr => DepGraph → gr Text ()
toFgr dg = mkGraph (M.elems nodes) edges
where nodeNames = S.toAscList $ S.union (S.map fst dg) (S.map snd dg)
nodes = M.fromList $ snd $ L.foldl' loop (0,[]) nodeNames
loop (i,acc) nm = (i+1, (nm,(i,nm)):acc)
edges = catMaybes $ mkEdge <$> S.toList dg
mkEdge (a,b) = do
(aId,_) ← M.lookup a nodes
(bId,_) ← M.lookup b nodes
return (aId, bId, ())
graphDeps ∷ DepTree → DepGraph
graphDeps = snd . loop (S.empty,S.empty)
where depName (DepTree n _) = n
loop acc@(seen,gr) (DepTree n deps) =
if alreadySeen then acc else dive
where edges ∷ Set (Text,Text)
edges = S.fromList $ (n,) . depName <$> deps
dive = L.foldl' loop (S.insert n seen, S.union edges gr) deps
alreadySeen = S.member n seen
justLibs ∷ GenericPackageDescription → PackageDescription
justLibs gpd = flattenPackageDescription $ gpd
{ condTestSuites = []
, condBenchmarks = []
}
pkgDeps ∷ GenericPackageDescription → [Text]
pkgDeps gdesc = depName <$> buildDepends desc
where desc = justLibs gdesc
depName (Dependency nm _) = T.pack $ display nm
lookupDepTree ∷ Hackage -> Text → DepTree
lookupDepTree hack nm = DepTree nm children
where versions = fromMaybe M.empty $ M.lookup (T.unpack nm) hack
children = join $ maybeToList $ do
(_,pkg) ← if M.null versions then Nothing else Just(M.findMax versions)
let depNames = pkgDeps pkg
Just $ lookupDepTree hack <$> depNames
| bsummer4/depviz | DepViz.hs | mit | 3,021 | 0 | 15 | 673 | 821 | 454 | 367 | 54 | 2 |
{-
pandoc-crossref is a pandoc filter for numbering figures,
equations, tables and cross-references to them.
Copyright (C) 2015 Nikolay Yakimov <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-}
{-# LANGUAGE TemplateHaskell #-}
module Text.Pandoc.CrossRef.References.Types where
import qualified Data.Map as M
import Text.Pandoc.Definition
import Control.Monad.State
import Data.Default
import Data.Text (Text)
import Lens.Micro.TH
type Index = [(Int, Maybe Text)]
data RefRec = RefRec { refIndex :: Index
, refTitle :: [Inline]
, refSubfigure :: Maybe Index
} deriving (Show, Eq)
type RefMap = M.Map Text RefRec
-- state data type
data References = References { _imgRefs :: RefMap
, _eqnRefs :: RefMap
, _tblRefs :: RefMap
, _lstRefs :: RefMap
, _secRefs :: RefMap
, _curChap :: Index
} deriving (Show, Eq)
--state monad
type WS a = State References a
instance Default References where
def = References n n n n n []
where n = M.empty
makeLenses ''References
| lierdakil/pandoc-crossref | lib-internal/Text/Pandoc/CrossRef/References/Types.hs | gpl-2.0 | 1,864 | 0 | 9 | 512 | 242 | 146 | 96 | 26 | 0 |
module NFA.Epsilon.Un where
import qualified NFA.Epsilon.Data as E
import Autolib.NFA
import Autolib.NFA.Epsilon
import qualified Autolib.Relation
-- | the real part of the automaton (all non-eps transitions)
real :: NFAC c s => E.ENFA c s -> NFA c s
real e = NFA
{ nfa_info = funni "Un.real" [ E.nfa_info e ]
, alphabet = E.alphabet e
, states = E.states e
, starts = E.starts e
, finals = E.finals e
, trans = E.trans e
}
-- | compute language-equivalent automaton
un :: NFAC c s => E.ENFA c s -> NFA c s
un e = add_epsilons ( real e )
$ Autolib.Relation.pairs $ E.epsilon_trans e
| marcellussiegburg/autotool | collection/src/NFA/Epsilon/Un.hs | gpl-2.0 | 720 | 0 | 10 | 246 | 212 | 114 | 98 | 16 | 1 |
module REPL where
import System.Console.Haskeline
import Types
import Evaluator
import Control.Monad.Trans
runRepl :: IO ()
runRepl = do
putStrLn "Welcome to the scheme REPL!"
env <- primitiveBindings
runInputT defaultSettings (loop env)
where loop :: Env -> InputT IO ()
loop env = do
ln <- getInputLine "> "
case ln of
Nothing -> return ()
Just "quit" -> outputStrLn "Goodbye!"
Just ":q" -> outputStrLn "Goodbye!"
Just input -> do
str <- lift $ evalString env input
outputStrLn str
loop env | 5outh/wyas | REPL.hs | gpl-2.0 | 659 | 0 | 16 | 246 | 179 | 84 | 95 | 21 | 4 |
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : ./RDF/Symbols.hs
Copyright : (c) Francisc-Nicolae Bungiu
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
Symbol items for Hets
-}
module RDF.Symbols where
import Common.Id
import Common.IRI
import Data.Data
import RDF.AS
-- * SYMBOL ITEMS FOR HETS
data SymbItems = SymbItems (Maybe RDFEntityType) [IRI]
deriving (Show, Eq, Typeable, Data)
data SymbMapItems = SymbMapItems (Maybe RDFEntityType) [(IRI, Maybe IRI)]
deriving (Show, Eq, Typeable, Data)
-- | raw symbols
data RawSymb = ASymbol RDFEntity | AnUri IRI
deriving (Show, Eq, Ord, Typeable, Data)
instance GetRange RawSymb
| spechub/Hets | RDF/Symbols.hs | gpl-2.0 | 755 | 0 | 9 | 141 | 161 | 90 | 71 | 13 | 0 |
module Language.Commands where
import Language.Expressions
import qualified Data.Map as M
import qualified System.Directory as D
builtinCmd =
M.fromList [
("cd", changeDir)
]
changeDir :: [String] -> IO ()
changeDir args = do
case null args of
True -> do
home <- D.getHomeDirectory
D.setCurrentDirectory home
False -> D.setCurrentDirectory $ head args
| tomicm/puh-hash | Language/Commands.hs | gpl-2.0 | 388 | 0 | 13 | 86 | 119 | 64 | 55 | 14 | 2 |
{-# LANGUAGE ImportQualifiedPost #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE RankNTypes #-}
import Control.Applicative(ZipList(..), Alternative ((<|>)))
import Control.Arrow((&&&))
import Control.Exception qualified as E
import Control.Monad(unless, when, forM_)
import Control.Lens
import Data.Default(def)
import Data.List(intercalate, sortOn)
import Data.Map(Map)
import Data.Map qualified as M
import Data.Maybe(catMaybes, isNothing)
import Data.Text(Text)
import Data.Text qualified as T
import Data.Text.IO qualified as T
import System.Environment(getArgs, getProgName)
import System.Exit(exitFailure, exitSuccess)
import System.IO(hPutStrLn, stderr)
import System.IO.Unsafe(unsafePerformIO)
import System.Console.JMVOptions
import Col
import HRowsException
import Model.Row
import Model.RowStore
import Model.SourceInfo
import Numeric (showFFloat)
data Format = HTML | LaTeX | Listatab deriving (Show, Read, Enum, Eq)
type AnonDic = Map Text Text
data ColIndices = ColIndices { _keyIndex :: Int
, _markStart :: Int
, _extrasStart :: Int
, _globalIndex :: Int
, _messageIndex :: Int
} deriving Show
makeLenses ''ColIndices
totalCols :: Getter ColIndices Int
totalCols = messageIndex
markEnd :: Getter ColIndices Int
markEnd = extrasStart
markInterval :: Getter ColIndices (Int, Int)
markInterval = to $ view markStart &&& view markEnd
extrasEnd :: Getter ColIndices Int
extrasEnd = globalIndex
extrasInterval :: Getter ColIndices (Int, Int)
extrasInterval = to $ view extrasStart &&& view extrasEnd
globalEnd :: Getter ColIndices Int
globalEnd = messageIndex
globalInterval :: Getter ColIndices (Int, Int)
globalInterval = to $ view globalIndex &&& view globalEnd
data Options = Options { _help :: Bool
, _anonymize :: Bool
, _anonFile :: Maybe FilePath
, _anonKey :: Col
, _anonLength :: Int
, _format :: Format
, _minPass :: Double
, _canCompensate :: Double
, _sortByGlobal :: Bool
, _key :: Col
, _marks :: [Col]
, _decimals :: Int
, _global :: Maybe Col
, _globalDecimals :: Int
, _message :: Maybe Col
, _extraCols :: [Col]
, _optionsFile :: Maybe FilePath
, _iOptions :: ListatabInfo
, _oOptions :: ListatabInfo
, _inputFileName :: Maybe FilePath
, _confFileName :: Maybe FilePath
}
makeLenses ''Options
defOpts :: Options
defOpts = Options { _help = False
, _anonymize = False
, _anonFile = Nothing
, _anonKey = Single (mkPosition 0) Nothing
, _anonLength = 5
, _format = LaTeX
, _minPass = 5
, _canCompensate = 4
, _sortByGlobal = False
, _key = Single (mkPosition 0) Nothing
, _marks = []
, _decimals = 2
, _global = Nothing
, _globalDecimals = 1
, _message = Nothing
, _extraCols = []
, _optionsFile = Nothing
, _iOptions = def
, _oOptions = def
, _inputFileName = Nothing
, _confFileName = Nothing
}
-- Parses a String to a Char representing a separator. Recongizes only
-- strings with one char or with a scape followed by a t.
parseSeparator :: String -> Char
parseSeparator [c] = c
parseSeparator "\\t" = '\t'
parseSeparator s = myError $ "Illegal string for separator: " ++ show s
setSeparator :: Lens' Options ListatabInfo -> Char -> Options -> Options
setSeparator l s = over l (\oc -> oc { ltSeparator = s })
setHeader :: Lens' Options ListatabInfo -> HeaderType -> Options -> Options
setHeader l c = over l (\oc -> oc { ltHeaderType = c })
setCols :: Lens' Options [Col] -> String -> String -> Options -> Options
setCols l n s = case parseCols (T.pack s) of
Left e -> myError $ "Bad column especification in " ++ n ++ ": " ++ T.unpack e
Right cs -> set l cs
setSingleCol :: Traversal' Options Col -> String -> String -> Options -> Options
setSingleCol l n s = case parseCols (T.pack s) of
Left e -> myError $ "Bad column especification in " ++ n ++ ": " ++ T.unpack e
Right [c@(Single _ _)]-> set l c
Right _ -> myError $ "For " ++ n ++ " you have to specify exactly one column"
setMaybeCol :: Lens' Options (Maybe Col) -> String -> String -> Options -> Options
setMaybeCol l n s = setSingleCol (l . _Just) n s . set l (Just AllCols)
defValue :: Show a => Lens' Options a -> String
defValue l = "Default: " ++ show (defOpts ^. l) ++ "."
options :: [OptDescr (Options -> Options)]
options = processOptions $ do
'h' ~: "help" ==> NoArg (set help True) ~: "This help."
'0' ~: "iNoHeader" ==> NoArg (setHeader iOptions NoHeader . setHeader oOptions NoHeader) ~: "Do not use header in the input."
'O' ~: "oNoHeader" ==> NoArg (setHeader oOptions NoHeader) ~: "Do not use header in the output. Must be used after -0 if both are present."
'1' ~: "iHeader1" ==> NoArg (setHeader iOptions FirstLine . setHeader oOptions FirstLine) ~: "Use the first line as header in the input."
'H' ~: "oHeader1" ==> NoArg (setHeader oOptions FirstLine) ~: "Use the first line as header in the output"
's' ~: "separator" ==> ReqArg (\s -> let c = parseSeparator s in setSeparator iOptions c . setSeparator oOptions c) "CHAR" ~:
("Field separator for the input and output. (Default: " ++ show (ltSeparator $ defOpts ^. iOptions) ++ ").")
'S' ~: "oSeparator" ==> ReqArg (setSeparator oOptions . parseSeparator) "CHAR" ~:
"Field separator for the output. (Default: same as -s). Must appear after -s when both are present."
'a' ~: "anonymize" ==> NoArg (set anonymize True) ~: "Anonymize the key column"
'A' ~: "anonFile" ==> ReqArg ( (set anonymize True .)
. set anonFile . Just) "FILE"
~: "Anonymize the key column using the file as reference (implies -a)"
'K' ~: "anonKey" ==> ReqArg (setSingleCol anonKey "anonKey") "KEY" ~: "Column with the key in the anonymous file. Default: first column."
'l' ~: "anonLength" ==> ReqArg (set anonLength . read) "INT" ~: "Length of the anoymous keys. " ++ defValue anonLength
'k' ~: "key" ==> ReqArg (setSingleCol key "key") "KEY" ~: "Column with the key. Default: first column."
'm' ~: "marks" ==> ReqArg (setCols marks "marks") "COLS" ~: "Columns with the marks. Default: no columns."
'g' ~: "global" ==> ReqArg (setMaybeCol global "global") "COL" ~: "Column with the global mark."
'M' ~: "message" ==> ReqArg (setMaybeCol message "message") "COL" ~: "Column that if not empty overrides the others. Default: no column."
'x' ~: "extraCols" ==> ReqArg (setCols extraCols "extraCols") "COLS" ~: "Columns with additional information"
'd' ~: "decimals" ==> ReqArg (set decimals . read) "DECS" ~: "Number of decimal places. " ++ defValue decimals
'D' ~: "globalDecimals" ==> ReqArg (set globalDecimals . read) "DECS" ~: "Number of decimal places of the global mark. " ++ defValue globalDecimals
'p' ~: "minPass" ==> ReqArg (set minPass . read) "MARK" ~: "Minimum passing mark. " ++ defValue minPass
'P' ~: "canCompesate" ==> ReqArg (set minPass . read) "MARK" ~: "Minimum mark that can be compensated. " ++ defValue canCompensate
'F' ~: "format" ==> ReqArg (set format . read) "FORMAT" ~: "Format of the output, one of " ++ showEnum HTML ++ ". " ++ defValue format
'G' ~: "sortByGlobal" ==> NoArg (set sortByGlobal True) ~: "Sort using the global column instead of the key"
showEnum :: (Enum a, Show a) => a -> String
showEnum = intercalate ", " . map show . enumFrom
getOptions :: IO Options
getOptions = do
args <- getArgs
let (o, a, e) = getOpt Permute options args
let opt = foldl (flip id) defOpts o
when (opt ^. help) $ putStrLn helpMessage >> exitSuccess
unless (null e) $ myError $ concat e ++ helpMessage
return $ case a of
[] -> opt
[f] -> set inputFileName (Just f) opt
[f, c] -> set inputFileName (Just f) $ set confFileName (Just c) opt
_ -> myError "Too many filenames"
myError :: String -> a
myError m = unsafePerformIO $ do
n <- getProgName
hPutStrLn stderr $ n ++ " error: " ++ m
exitFailure
helpMessage :: String
helpMessage = usageInfo header options
where header = "Usage: listing [Options] [files]\
\Creates a listing of marks from a listatab file.\n\
\Columns can be specified by formulas using hrows syntax.\n\
\Options receiving a list of formulas can list them \
\separated\n\
\by commas and also as ranges like [$1:$3] or [Q1:Q4]."
load :: Options -> IO RowStore
load opts = do
let Just fn = opts ^. inputFileName
pc <- mkPathAndConf fn $ opts ^. confFileName
let sinfo = mkSourceInfo Nothing pc $ opts ^. iOptions
r <- E.try $ readRowStore sinfo
case r of
Right (rst, _) -> return rst
Left (HRowsException mess) -> myError $ T.unpack mess
translate :: Options -> Maybe AnonDic -> RowStore -> (RowStore, ColIndices)
translate opts mdic rst = let
trKey = getKeyCol opts mdic rst
trMarks = applyCols (opts ^. marks) rst
trExtras = applyCols (opts ^. extraCols) rst
trGlobal = applyCols (catMaybes [opts ^. global]) rst
trMessage = applyCols (catMaybes [opts ^. message]) rst
allTr = [trKey, trMarks, trExtras, trGlobal, trMessage]
allRows = map concat . getZipList . sequenceA $
ZipList . rows <$> allTr
allNames = concat <$> traverse names allTr
inds = ColIndices { _keyIndex = 0
, _markStart = nFields trKey
, _extrasStart = _markStart inds + nFields trMarks
, _globalIndex = _extrasStart inds + nFields trExtras
, _messageIndex = _globalIndex inds + nFields trGlobal
}
in case allNames of
Nothing -> (fromRows (getName rst) allRows, inds)
Just nms -> (fromRowsNames (getName rst) nms allRows, inds)
getKeyCol :: Options -> Maybe AnonDic -> RowStore -> RowStore
getKeyCol opts mdic rst
| isNothing mdic = col
| otherwise = mapCol 0 mkAnon col
where col = applyCols [opts ^. key] rst
Just dic = mdic
mkAnon = toField . (dic M.!) . toString
keys :: Col -> RowStore -> [Text]
keys col rst = rst ^.. colF col . element 0 . to toString
anonymizeDic :: Int -> [Text] -> Map Text Text
anonymizeDic ml ts = let
s = sortOn T.reverse ts
d = zipWith3 combine s ("":s) (tail s ++ [""])
combine r p n = maxBy T.length (discriminate r p) (discriminate r n)
dots t = T.pack (replicate (ml - T.length t) '.') `T.append` t
in M.fromList $ zip s (map dots d)
maxBy :: Ord o => (a -> o) -> a -> a -> a
maxBy f x y | f x >= f y = x
| otherwise = y
discriminate :: Text -> Text -> Text
discriminate ref other = T.pack . sel []
$ T.zip (T.reverse ref) (T.reverse other)
where sel d [] = d
sel d ((r, o):xs) | r /= o = r:d
| otherwise = sel (r:d) xs
createAnonDic :: Options -> RowStore -> IO (Maybe AnonDic)
createAnonDic opts rst = fmap (anonymizeDic (opts ^. anonLength))
<$> sequence (fromFile <|> fromRst)
where fromFile = do
f <- opts ^. anonFile
let sinfo = mkSourceInfo Nothing (PathAndConf f Nothing) def
return (keys (opts ^. anonKey) . fst <$> readRowStore sinfo)
fromRst = if opts ^. anonymize
then Just . return $ keys (opts ^. key) rst
else Nothing
data Formatter = Formatter { _begin :: Text
, _end :: Text
, _titleLine :: [Text] -> Text
, _normalLine :: ColIndices -> Options -> Int -> Row -> Text
, _messageLine :: ColIndices -> Options -> Int -> Row -> Text
}
makeLenses ''Formatter
hTMLFormatter :: Formatter
hTMLFormatter = Formatter {
_begin = "<TT><TABLE>"
, _end = "</TABLE></TT>"
, _titleLine = htmlTitle
, _normalLine = htmlLine
, _messageLine = htmlMessage
}
htmlTitle :: [Text] -> Text
htmlTitle ts = T.concat
$ "<tr>"
: map (\t -> "<TH> <B>" <> t <> "</B> </TH>") ts
++ ["</tr>"]
fToText :: Int -> Field -> Text
fToText d f = case typeOf f of
TypeInt -> toString f <> "." <> T.replicate d "0"
TypeDouble -> T.pack $ showFFloat (Just d) (toDouble f) ""
_ -> toString f
colorGlobal :: Options -> Field -> Text
colorGlobal opts f = let
v = case typeOf f of
TypeInt -> fromIntegral $ toInt f
TypeDouble -> toDouble f
_ -> opts ^. minPass
in if v < opts ^. canCompensate
then "red"
else if v < opts ^. minPass
then "black"
else "blue"
trMark :: Int -> Text
trMark n | odd n = "<TR bgcolor=\"#bbbbbb\">"
| otherwise = "<TR>"
segment :: ColIndices -> Getter ColIndices (Int, Int) -> [a] -> [a]
segment inds g = uncurry slice' (inds ^. g)
htmlLine :: ColIndices -> Options -> Int -> Row -> Text
htmlLine inds opts n r = T.concat
( trMark n
: "<TD> " -- key
: toString (r !! (inds ^. keyIndex))
: " </TD>"
: [ "<TD align=\"center\"> "
<> fToText (opts ^. decimals) t
<> " </TD>"
| t <- segment inds markInterval r
]
++ [ "<TD align=\"center\"><font color=\""
<> colorGlobal opts t <> "\"> <b>"
<> fToText (opts ^. globalDecimals) t
<> "</font></TD>"
| t <- segment inds globalInterval r
]
++ [ "<TD align=\"left\"> " <> toString t <> "</TD>"
| t <- segment inds extrasInterval r
]
++ [ "</TR>" ]
)
htmlMessage :: ColIndices -> Options -> Int -> Row -> Text
htmlMessage inds _ n r = T.concat
( trMark n
: "<TD> " -- key
: toString (r !! (inds ^. keyIndex))
: " </TD>"
: [ "<TD colspan =\"" <> sp
<> "\"align = \"left\"><font color=\"red\">"
<> m
<> "</font></TD></TR>" ]
)
where m = toString (r !! (inds ^. messageIndex))
sp = T.pack $ show $ inds ^. totalCols - 1
laTeXFormatter :: Formatter
laTeXFormatter = Formatter {
_begin = ""
, _end = ""
, _titleLine = laTeXTitle
, _normalLine = laTeXLine
, _messageLine = laTeXMessage
}
escapeLaTeX :: Text -> Text
escapeLaTeX = T.concatMap charEscape
where charEscape '_' = "\\_"
charEscape '$' = "\\$"
charEscape '%' = "\\%"
charEscape '{' = "\\{"
charEscape '}' = "\\}"
charEscape '&' = "\\&"
charEscape '#' = "\\#"
charEscape '<' = "\\textless"
charEscape '>' = "\\textgreater"
charEscape '~' = "\\textasciitilde"
charEscape '\\' = "\\textbackslash"
charEscape c = T.singleton c
laTeXTitle :: [Text] -> Text
laTeXTitle = (<> "\\\\\\hline") . T.intercalate " & " . map escapeLaTeX
laTeXLine :: ColIndices -> Options -> Int -> Row -> Text
laTeXLine inds opts n r = T.concat
( (if odd n then "\\rowcolor[gray]{0.8}" else "")
: toString (r !! (inds ^. keyIndex))
: [ " & " <> fToText (opts ^. decimals) t
| t <- segment inds markInterval r
]
++ [ " & \\textcolor{"
<> colorGlobal opts t <> "}{"
<> fToText (opts ^. globalDecimals) t
<> "}"
| t <- segment inds globalInterval r
]
++ [ " & " <> toString t
| t <- segment inds extrasInterval r
]
++ [ "\\\\" ]
)
laTeXMessage :: ColIndices -> Options -> Int -> Row -> Text
laTeXMessage inds _ n r = T.concat
( (if odd n then "\\rowcolor[gray]{0.8}" else "")
: toString (r !! (inds ^. keyIndex))
: " & "
: [ "\\multicolumn{" <> sp <> "}{l}{\\qquad\\textcolor{red}{"<> m <> "}}\\\\" ]
)
where m = toString (r !! (inds ^. messageIndex))
sp = T.pack $ show $ inds ^. totalCols - 1
listatabFormatter :: Formatter
listatabFormatter = Formatter {
_begin = ""
, _end = ""
, _titleLine = listatabTitle
, _normalLine = listatabLine
, _messageLine = listatabMessage
}
escapeListatab :: Text -> Text
escapeListatab = T.concatMap charEscape
where charEscape '>' = "\\>"
charEscape '\\' = "\\\\"
charEscape c = T.singleton c
listatabTitle :: [Text] -> Text
listatabTitle = ("#<" <>) . (<> ">") . T.intercalate "><" . map escapeListatab
listatabLine :: ColIndices -> Options -> Int -> Row -> Text
listatabLine inds opts _ r = T.concat
( toString (r !! (inds ^. keyIndex))
: [ sep <> fToText (opts ^. decimals) t
| t <- segment inds markInterval r
]
++ [ sep <> fToText (opts ^. globalDecimals) t
| t <- segment inds globalInterval r
]
++ [ sep <> toString t
| t <- segment inds extrasInterval r
]
)
where sep = T.singleton . ltSeparator $ opts ^. oOptions
listatabMessage :: ColIndices -> Options -> Int -> Row -> Text
listatabMessage = myError "There can be no messages in listatab format"
writeListing :: Formatter -> Options -> ColIndices -> RowStore -> IO ()
writeListing fmt opts inds rst = do
unless (T.null $ fmt ^. begin) $ T.putStrLn $ fmt ^. begin
unless (ltHeaderType (opts ^. oOptions) == NoHeader) $ do
let nms = fnames rst
T.putStrLn $ fmt ^. titleLine $
concat [ [nms !! (inds ^. keyIndex)]
, segment inds markInterval nms
, segment inds globalInterval nms
, segment inds extrasInterval nms
]
forM_ (zip [1..] $ rows rst) $ \(n, r) -> do
let t = toString (r !! (inds ^. messageIndex))
if inds ^. messageIndex >= nFields rst || T.null t
then T.putStrLn $ (fmt ^. normalLine) inds opts n r
else T.putStrLn $ (fmt ^. messageLine) inds opts n r
unless (T.null $ fmt ^. end) $ T.putStrLn $ fmt ^. end
main :: IO ()
main = do
opts <- getOptions
unless (opts ^. format /= Listatab || isNothing (opts ^. message))
$ myError "There can not be a message column in listatab format"
rst <- case opts ^. inputFileName of
Nothing -> readRowStoreStdin $ opts ^. iOptions
Just _ -> load opts
anonDic <- createAnonDic opts rst
let (rst', inds) = translate opts anonDic rst
formatter = case opts ^. format of
HTML -> hTMLFormatter
LaTeX -> laTeXFormatter
Listatab -> listatabFormatter
ind = fromIntegral $ if opts ^. sortByGlobal
then inds ^. globalIndex
else inds ^. keyIndex
sorted = if opts ^. sortByGlobal || not (opts ^. anonymize)
then sortRows ind Ascending rst'
else sortRowsOn (T.reverse . toString . (!!! ind)) rst'
writeListing formatter opts inds sorted
| jvilar/hrows | src/Listing.hs | gpl-2.0 | 20,311 | 0 | 18 | 6,622 | 5,969 | 3,073 | 2,896 | -1 | -1 |
import Automata
import Text.Parsec
import Text.Parsec.String
import Text.Parsec.Expr
import Control.Applicative ((<$>))
dfaparser :: Parser DFA
nfaparser :: Parser NFA
| computation-hs/comp-models | ParseAutomata.hs | gpl-2.0 | 169 | 0 | 5 | 20 | 49 | 29 | 20 | 7 | 0 |
module Value(Value(Nil,Cons),isNil,car,cdr,strToValue,valueToStr) where
import Data.Bits(bit,testBit)
import Data.Char(chr,ord)
data Value = Nil | Cons Value Value
deriving (Eq, Ord)
instance Show Value where
show a = '(' : shows a ")" where
shows Nil s = s
shows (Cons a b) s = '(' : shows a (')' : shows b s)
isNil :: Value -> Bool
isNil Nil = True
isNil _ = False
car :: Value -> Value
car (Cons a _) = a
car Nil = Nil
cdr :: Value -> Value
cdr (Cons _ b) = b
cdr Nil = Nil
instance Read Value where
readsPrec p s@('(':_) = [readNext s]
readsPrec p (_:s) = readsPrec p s
readsPrec p [] = []
readNext :: String -> (Value,String)
readNext ('(':s) = readNext' s
where
readNext' ('(':s) =
let (car,s') = readNext' s
(cdr,s'') = readNext' s'
in (Cons car cdr,s'')
readNext' (')':s) = (Nil,s)
readNext' (_:s) = readNext' s
readNext' [] = error "unmatched ("
readNext (')':_) = error "unmatched )"
readNext (_:s) = readNext s
readNext [] = error "unmatched ("
strToValue :: String -> Value
strToValue [] = Cons Nil Nil
strToValue (c:cs) = bitsToValue [7,6..0] c
where
bitsToValue [] _ = strToValue cs
bitsToValue (b:bs) byte =
(if testBit (ord c) b then Cons else flip Cons)
(bitsToValue bs byte) Nil
valueToStr :: Value -> String
valueToStr value = valueToBits [7,6..0] 0 value
where
valueToBits [] byte rest = chr byte : valueToStr rest
valueToBits _ _ Nil = []
valueToBits (_:bs) byte (Cons Nil rest) = valueToBits bs byte rest
valueToBits (b:bs) byte (Cons rest _) = valueToBits bs (byte + bit b) rest
| qpliu/esolang | ph/hs/interp/Value.hs | gpl-3.0 | 1,644 | 0 | 12 | 413 | 790 | 411 | 379 | 49 | 4 |
module Web.CookieJar.Parser.Util where
import qualified Data.ByteString as BS
import Web.CookieJar.Types
colon :: Word8
colon = 0x3A
semicolon :: Word8
semicolon = 0x3B
equals :: Word8
equals = 0x3D
space :: Word8
space = 0x20
hTab :: Word8
hTab = 0x09
slash :: Word8
slash = 0x2F
period :: Word8
period = 0x2E
negative :: Word8
negative = 0x2D
isWhitespace :: Word8 -> Bool
isWhitespace w = w == space || w == hTab
trim :: Bytes -> Bytes
trim = let f = BS.reverse . BS.dropWhile isWhitespace in f . f
-- TODO swap the details into isDigit?
isNonDigit :: Word8 -> Bool
isNonDigit w =
w >= 0x00 && w <= 0x2F
|| w >= 0x3A && w <= 0xFF
isDigit :: Word8 -> Bool
isDigit = not . isNonDigit
digitValue :: (Integral a) => Word8 -> Maybe a
digitValue w
| w >= 0x30 && w <= 0x39
= Just $ fromIntegral $ w - 0x30
| otherwise
= Nothing
digitsValue :: (Integral a) => [Word8] -> Maybe a
digitsValue = f . reverse . map digitValue
where
f [] = Just 0
f (Nothing : _) = Nothing
f (Just d : ds) = fmap ((+ d) . (* 10)) $ f ds
| ktvoelker/cookie-jar | src/Web/CookieJar/Parser/Util.hs | gpl-3.0 | 1,073 | 0 | 11 | 268 | 416 | 225 | 191 | 40 | 3 |
module HLinear.NormalForm.FoldUnfold.ReduceEchelonForm.EuclideanDomain
where
import qualified Prelude as P
import HLinear.Utility.Prelude
import Data.Sequence ( ViewR(..), viewr )
import qualified Data.Sequence as S
import qualified Data.Vector as V
import qualified Data.Permute as P
import HLinear.Hook.ERHook ( ERHook(..) )
import HLinear.Hook.EchelonForm ( EchelonForm(..), PivotStructure(..) )
import HLinear.Hook.EchelonTransformation ( EchelonTransformation(..) )
import HLinear.Hook.PLEHook ( UEHook(..) )
import HLinear.Matrix ( headRows, tailRows )
import HLinear.Matrix.Definition ( Matrix(..) )
import qualified HLinear.Hook.EchelonForm as EF
import qualified HLinear.Hook.EchelonForm.Row as EFR
import qualified HLinear.Hook.EchelonTransformation as ET
import qualified HLinear.Matrix as M
reduceEchelonForm
:: ( EuclideanDomain a, DecidableZero a )
=> EchelonForm a -> UEHook a a
reduceEchelonForm ef =
case reduceLastPivot (ef, EF.pivotStructure ef) of
Nothing -> UEHook (ET.one nrs) (EF.zero nrs ncs)
Just (er, efp') ->
let ERHook et' _ ef' = V.foldl (*) er $
V.unfoldr reduceLastPivot efp'
in UEHook et' ef'
where
nrs = nmbRows ef
ncs = nmbCols ef
reduceLastPivot
:: ( EuclideanDomain a, DecidableZero a )
=> (EchelonForm a, PivotStructure)
-> Maybe (ERHook a, (EchelonForm a, PivotStructure))
reduceLastPivot ( ef@(EchelonForm nrs ncs _), PivotStructure pivots )
| nrs == 0 && ncs == 0 = Nothing
| pivots' :> pivotRC <- viewr pivots = Just $
let (efLeft, Matrix nrs' ncs' efTopRight, efBottomRight) =
EF.splitAtHook pivotRC ef
pivotRow = efBottomRight `EF.atRow` 0
pivot = V.head pivotRow
pivotTop = fmap V.head efTopRight
(pivotTop', pivotTopNormalization) =
V.unzip $ fmap (`quotRem` pivot) pivotTop
et = ET.singleton $ fmap negate pivotTopNormalization
efTopRight' =
(\f -> V.zipWith f efTopRight pivotTopNormalization) $ \r h ->
V.zipWith (\re pe -> re - h*pe) r pivotRow
in ( ERHook et (Matrix nrs' ncs' efTopRight') efBottomRight
, (efLeft, PivotStructure pivots')
)
| otherwise = Just
( ERHook (ET.one nrs) (M.zero 0 ncs) ef
, (EF.zero 0 0, PivotStructure S.empty)
)
| martinra/hlinear | src/HLinear/NormalForm/FoldUnfold/ReduceEchelonForm/EuclideanDomain.hs | gpl-3.0 | 2,347 | 0 | 17 | 543 | 751 | 420 | 331 | 52 | 2 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE Trustworthy #-}
{- |
Module : Data.Complex.Cyclotomic
Copyright : (c) Scott N. Walck 2012-2017
License : GPL-3 (see LICENSE)
Maintainer : Scott N. Walck <[email protected]>
Stability : experimental
The cyclotomic numbers are a subset of the complex numbers with
the following properties:
1. The cyclotomic numbers are represented exactly, enabling exact
computations and equality comparisons.
2. The cyclotomic numbers contain the Gaussian rationals
(complex numbers of the form 'p' + 'q' 'i' with 'p' and 'q' rational).
As a consequence, the cyclotomic numbers are a dense subset of the
complex numbers.
3. The cyclotomic numbers contain the square roots of all rational numbers.
4. The cyclotomic numbers form a field: they are closed under addition, subtraction,
multiplication, and division.
5. The cyclotomic numbers contain the sine and cosine of all rational
multiples of pi.
6. The cyclotomic numbers can be thought of as the rational field extended
with 'n'th roots of unity for arbitrarily large integers 'n'.
Floating point numbers do not do well with equality comparison:
>(sqrt 2 + sqrt 3)^2 == 5 + 2 * sqrt 6
> -> False
"Data.Complex.Cyclotomic" represents these numbers exactly, allowing equality comparison:
>(sqrtRat 2 + sqrtRat 3)^2 == 5 + 2 * sqrtRat 6
> -> True
'Cyclotomic's can be exported as inexact complex numbers using the 'toComplex' function:
>e 6
> -> -e(3)^2
>real $ e 6
> -> 1/2
>imag $ e 6
> -> -1/2*e(12)^7 + 1/2*e(12)^11
>imag (e 6) == sqrtRat 3 / 2
> -> True
>toComplex $ e 6
> -> 0.5000000000000003 :+ 0.8660254037844384
The algorithms for cyclotomic numbers are adapted from code by
Martin Schoenert and Thomas Breuer in the GAP project <http://www.gap-system.org/>
(in particular source files gap4r4\/src\/cyclotom.c and
gap4r4\/lib\/cyclotom.gi).
-}
module Data.Complex.Cyclotomic
( Cyclotomic
, i
, e
, sqrtInteger
, sqrtRat
, sinDeg
, cosDeg
, sinRev
, cosRev
, gaussianRat
, polarRat
, polarRatDeg
, polarRatRev
, conj
, real
, imag
, isReal
, isRat
, isGaussianRat
, toComplex
, toReal
, toRat
, goldenRatio
, dft
, dftInv
, rootsQuadEq
, heron
)
where
import Data.List
( nub
)
import Data.Ratio
( (%)
, numerator
, denominator
)
import Data.Complex
( Complex(..)
, realPart
)
import qualified Data.Map as M
( Map
, empty
, singleton
, lookup
, keys
, elems
, size
, fromList
, toList
, mapKeys
, filter
, insertWith
, delete
, map
, unionWith
, findWithDefault
, fromListWith
)
import Math.NumberTheory.ArithmeticFunctions
( runFunction
, totientA
, smallOmegaA
, isNFreeA
)
import Math.NumberTheory.Primes
( unPrime
, factorise
)
-- | A cyclotomic number.
data Cyclotomic = Cyclotomic { order :: Integer
, coeffs :: M.Map Integer Rational
} deriving (Eq)
-- | @abs@ and @signum@ are partial functions.
-- A cyclotomic number is not guaranteed to have a cyclotomic absolute value.
-- When defined, @signum c@ is the complex number with magnitude 1 that has the same argument as c;
-- @signum c = c / abs c@.
instance Num Cyclotomic where
(+) = sumCyc
(*) = prodCyc
(-) c1 c2 = sumCyc c1 (aInvCyc c2)
negate = aInvCyc
abs = absVal
signum = sigNum
fromInteger 0 = zeroCyc
fromInteger n = Cyclotomic 1 (M.singleton 0 (fromIntegral n))
instance Fractional Cyclotomic where
recip = invCyc
fromRational 0 = zeroCyc
fromRational r = Cyclotomic 1 (M.singleton 0 r)
-- | The primitive @n@th root of unity.
-- For example, @'e'(4) = 'i'@ is the primitive 4th root of unity,
-- and 'e'(5) = exp(2*pi*i/5) is the primitive 5th root of unity.
-- In general, 'e' 'n' = exp(2*pi*i/'n').
e :: Integer -> Cyclotomic
e n
| n < 1 = error "e requires a positive integer"
| n == 1 = Cyclotomic 1 (M.singleton 0 1)
| otherwise = cyclotomic n $ convertToBase n (M.singleton 1 1)
instance Show Cyclotomic where
show (Cyclotomic n mp)
| mp == M.empty = "0"
| otherwise = leadingTerm rat n ex ++ followingTerms n xs
where ((ex,rat):xs) = M.toList mp
showBaseExp :: Integer -> Integer -> String
showBaseExp n 1 = "e(" ++ show n ++ ")"
showBaseExp n ex = "e(" ++ show n ++ ")^" ++ show ex
leadingTerm :: Rational -> Integer -> Integer -> String
leadingTerm r _ 0 = showRat r
leadingTerm r n ex
| r == 1 = t
| r == (-1) = '-' : t
| r > 0 = showRat r ++ "*" ++ t
| r < 0 = "-" ++ showRat (abs r) ++ "*" ++ t
| otherwise = ""
where t = showBaseExp n ex
followingTerms :: Integer -> [(Integer,Rational)] -> String
followingTerms _ [] = ""
followingTerms n ((ex,rat):xs) = followingTerm rat n ex ++ followingTerms n xs
followingTerm :: Rational -> Integer -> Integer -> String
followingTerm r n ex
| r == 1 = " + " ++ t
| r == (-1) = " - " ++ t
| r > 0 = " + " ++ showRat r ++ "*" ++ t
| r < 0 = " - " ++ showRat (abs r) ++ "*" ++ t
| otherwise = ""
where t = showBaseExp n ex
showRat :: Rational -> String
showRat r
| d == 1 = show n
| otherwise = show n ++ "/" ++ show d
where
n = numerator r
d = denominator r
-- GAP function EB from gap4r4/lib/cyclotom.gi
eb :: Integer -> Cyclotomic
eb n
| n < 1 = error "eb needs a positive integer"
| n `mod` 2 /= 1 = error "eb needs an odd integer"
| n == 1 = zeroCyc
| otherwise = let en = e n
in sum [en^(k*k `mod` n) | k <- [1..(n-1) `div` 2]]
sqrt2 :: Cyclotomic
sqrt2 = e 8 - e 8 ^ (3 :: Int)
-- | The square root of an 'Integer'.
sqrtInteger :: Integer -> Cyclotomic
sqrtInteger n
| n == 0 = zeroCyc
| n < 0 = i * sqrtPositiveInteger (-n)
| otherwise = sqrtPositiveInteger n
sqrtPositiveInteger :: Integer -> Cyclotomic
sqrtPositiveInteger n
| n < 1 = error "sqrtPositiveInteger needs a positive integer"
| otherwise = let factors = factorise n
factor = product [unPrime p ^ (m `div` 2) | (p, m) <- factors]
nn = product [unPrime p ^ (m `mod` 2) | (p, m) <- factors]
in case nn `mod` 4 of
1 -> fromInteger factor * (2 * eb nn + 1)
2 -> fromInteger factor * sqrt2 * sqrtPositiveInteger (nn `div` 2)
3 -> fromInteger factor * (-i) * (2 * eb nn + 1)
_ -> fromInteger factor * 2 * sqrtPositiveInteger (nn `div` 4)
-- | The square root of a 'Rational' number.
sqrtRat :: Rational -> Cyclotomic
sqrtRat r = prodRatCyc (1 % fromInteger den) (sqrtInteger (numerator r * den))
where
den = denominator r
-- | The square root of -1.
i :: Cyclotomic
i = e 4
-- | Make a Gaussian rational; @gaussianRat p q@ is the same as @p + q * i@.
gaussianRat :: Rational -> Rational -> Cyclotomic
gaussianRat p q = fromRational p + fromRational q * i
-- | A complex number in polar form, with rational magnitude @r@ and rational angle @s@
-- of the form @r * exp(2*pi*i*s)@; @polarRat r s@ is the same as @r * e q ^ p@,
-- where @s = p/q@. This function is the same as 'polarRatRev'.
polarRat :: Rational -- ^ magnitude
-> Rational -- ^ angle, in revolutions
-> Cyclotomic -- ^ cyclotomic number
polarRat r s
= let p = numerator s
q = denominator s
in case p >= 0 of
True -> fromRational r * e q ^ p
False -> conj $ fromRational r * e q ^ (-p)
-- | A complex number in polar form, with rational magnitude and rational angle
-- in degrees.
polarRatDeg :: Rational -- ^ magnitude
-> Rational -- ^ angle, in degrees
-> Cyclotomic -- ^ cyclotomic number
polarRatDeg r deg
= let s = deg / 360
p = numerator s
q = denominator s
in case p >= 0 of
True -> fromRational r * e q ^ p
False -> conj $ fromRational r * e q ^ (-p)
-- | A complex number in polar form, with rational magnitude and rational angle
-- in revolutions.
polarRatRev :: Rational -- ^ magnitude
-> Rational -- ^ angle, in revolutions
-> Cyclotomic -- ^ cyclotomic number
polarRatRev r s
= let p = numerator s
q = denominator s
in case p >= 0 of
True -> fromRational r * e q ^ p
False -> conj $ fromRational r * e q ^ (-p)
-- | Complex conjugate.
conj :: Cyclotomic -> Cyclotomic
conj (Cyclotomic n mp)
= mkCyclotomic n (M.mapKeys (\k -> (n-k) `mod` n) mp)
-- | Real part of the cyclotomic number.
real :: Cyclotomic -> Cyclotomic
real z = (z + conj z) / 2
-- | Imaginary part of the cyclotomic number.
imag :: Cyclotomic -> Cyclotomic
imag z = (z - conj z) / (2*i)
absVal :: Cyclotomic -> Cyclotomic
absVal c = let modsq = c * conj c
in case toRat modsq of
Just msq -> sqrtRat msq
Nothing -> error "abs not available for this number"
sigNum :: Cyclotomic -> Cyclotomic
sigNum 0 = zeroCyc
sigNum c = let modsq = c * conj c
in if isRat modsq then c / absVal c
else error "signum not available for this number"
convertToBase :: Integer -> M.Map Integer Rational -> M.Map Integer Rational
convertToBase n mp = foldr (\(p,r) m -> replace n p r m) mp (extraneousPowers n)
removeZeros :: M.Map Integer Rational -> M.Map Integer Rational
removeZeros = M.filter (/= 0)
-- Corresponds to GAP implementation.
-- Expects that convertToBase has already been done.
cyclotomic :: Integer -> M.Map Integer Rational -> Cyclotomic
cyclotomic ord = tryReduce . tryRational . gcdReduce . Cyclotomic ord
mkCyclotomic :: Integer -> M.Map Integer Rational -> Cyclotomic
mkCyclotomic ord = cyclotomic ord . removeZeros . convertToBase ord
-- | Step 1 of cyclotomic is gcd reduction.
gcdReduce :: Cyclotomic -> Cyclotomic
gcdReduce cyc@(Cyclotomic n mp) = case gcdCyc cyc of
1 -> cyc
d -> Cyclotomic (n `div` d) (M.mapKeys (`div` d) mp)
gcdCyc :: Cyclotomic -> Integer
gcdCyc (Cyclotomic n mp) = gcdList (n:M.keys mp)
-- | Step 2 of cyclotomic is reduction to a rational if possible.
tryRational :: Cyclotomic -> Cyclotomic
tryRational c
| lenCyc c == fromIntegral phi && sqfree
= case equalCoefficients c of
Nothing -> c
Just r -> fromRational $ (-1)^(nrp `mod` 2)*r
| otherwise
= c
where
(phi,nrp,sqfree) = phiNrpSqfree (order c)
-- | Compute phi(n), the number of prime factors, and test if n is square-free.
phiNrpSqfree :: Integer -> (Integer, Int, Bool)
phiNrpSqfree = runFunction $ (,,) <$> totientA <*> smallOmegaA <*> isNFreeA 2
equalCoefficients :: Cyclotomic -> Maybe Rational
equalCoefficients (Cyclotomic _ mp)
= case ts of
[] -> Nothing
(x:_) -> if equal ts then Just x else Nothing
where
ts = M.elems mp
lenCyc :: Cyclotomic -> Int
lenCyc (Cyclotomic _ mp) = M.size $ removeZeros mp
-- | Step 3 of cyclotomic is base reduction
tryReduce :: Cyclotomic -> Cyclotomic
tryReduce c
= foldr reduceByPrime c squareFreeOddFactors
where
squareFreeOddFactors = [unPrime p | (p, m) <- factorise (order c), unPrime p > 2, m <= 1]
reduceByPrime :: Integer -> Cyclotomic -> Cyclotomic
reduceByPrime p c@(Cyclotomic n _)
= case mapM (\r -> equalReplacements p r c) [0,p..n-p] of
Just cfs -> Cyclotomic (n `div` p) $ removeZeros $ M.fromList $ zip [0..(n `div` p)-1] (map negate cfs)
Nothing -> c
equalReplacements :: Integer -> Integer -> Cyclotomic -> Maybe Rational
equalReplacements p r (Cyclotomic n mp)
= case [M.findWithDefault 0 k mp | k <- replacements n p r] of
[] -> error "equalReplacements generated empty list"
(x:xs) | equal (x:xs) -> Just x
_ -> Nothing
replacements :: Integer -> Integer -> Integer -> [Integer]
replacements n p r = takeWhile (>= 0) [r-s,r-2*s..] ++ takeWhile (< n) [r+s,r+2*s..]
where s = n `div` p
replace :: Integer -> Integer -> Integer -> M.Map Integer Rational -> M.Map Integer Rational
replace n p r mp = case M.lookup r mp of
Nothing -> mp
Just rat -> foldr (\k m -> M.insertWith (+) k (-rat) m) (M.delete r mp) (replacements n p r)
includeMods :: Integer -> Integer -> Integer -> [Integer]
includeMods n q start = [start] ++ takeWhile (>= 0) [start-q,start-2*q..] ++ takeWhile (< n) [start+q,start+2*q..]
removeExps :: Integer -> Integer -> Integer -> [Integer]
removeExps n 2 q = concatMap (includeMods n q) $ map ((n `div` q) *) [q `div` 2..q-1]
removeExps n p q = concatMap (includeMods n q) $ map ((n `div` q) *) [-m..m]
where m = (q `div` p - 1) `div` 2
pqPairs :: Integer -> [(Integer, Integer)]
pqPairs n = map (\(p, k) -> (unPrime p, unPrime p ^ k)) (factorise n)
extraneousPowers :: Integer -> [(Integer,Integer)]
extraneousPowers n
| n < 1 = error "extraneousPowers needs a positive integer"
| otherwise = nub $ concat [[(p,r) | r <- removeExps n p q] | (p,q) <- pqPairs n]
-- | Sum of two cyclotomic numbers.
sumCyc :: Cyclotomic -> Cyclotomic -> Cyclotomic
sumCyc (Cyclotomic o1 map1) (Cyclotomic o2 map2)
= let ord = lcm o1 o2
m1 = ord `div` o1
m2 = ord `div` o2
map1' = M.mapKeys (m1*) map1
map2' = M.mapKeys (m2*) map2
in mkCyclotomic ord $ M.unionWith (+) map1' map2'
-- | Product of two cyclotomic numbers.
prodCyc :: Cyclotomic -> Cyclotomic -> Cyclotomic
prodCyc (Cyclotomic o1 map1) (Cyclotomic o2 map2)
= let ord = lcm o1 o2
m1 = ord `div` o1
m2 = ord `div` o2
in mkCyclotomic ord $ M.fromListWith (+)
[((m1*e1+m2*e2) `mod` ord,c1*c2) | (e1,c1) <- M.toList map1, (e2,c2) <- M.toList map2]
-- | Product of a rational number and a cyclotomic number.
prodRatCyc :: Rational -> Cyclotomic -> Cyclotomic
prodRatCyc 0 _ = zeroCyc
prodRatCyc r (Cyclotomic ord mp) = Cyclotomic ord $ M.map (r*) mp
-- | Additive identity.
zeroCyc :: Cyclotomic
zeroCyc = Cyclotomic 1 M.empty
-- | Additive inverse.
aInvCyc :: Cyclotomic -> Cyclotomic
aInvCyc = prodRatCyc (-1)
multiplyExponents :: Integer -> Cyclotomic -> Cyclotomic
multiplyExponents j (Cyclotomic n mp)
| gcd j n /= 1 = error "multiplyExponents needs gcd == 1"
| otherwise = mkCyclotomic n (M.mapKeys (\k -> j*k `mod` n) mp)
productOfGaloisConjugates :: Cyclotomic -> Cyclotomic
productOfGaloisConjugates c
= product [multiplyExponents j c | j <- [2..ord], gcd j ord == 1]
where
ord = order c
-- | Multiplicative inverse.
invCyc :: Cyclotomic -> Cyclotomic
invCyc z = case toRat (z * prod) of
Just r -> prodRatCyc (1 / r) prod
Nothing -> error "invCyc: product of Galois conjugates not rational; this is a bug, please inform package maintainer"
where
prod = productOfGaloisConjugates z
-- | Is the cyclotomic a real number?
isReal :: Cyclotomic -> Bool
isReal c = c == conj c
-- | Is the cyclotomic a rational?
isRat :: Cyclotomic -> Bool
isRat (Cyclotomic 1 _) = True
isRat _ = False
-- | Is the cyclotomic a Gaussian rational?
isGaussianRat :: Cyclotomic -> Bool
isGaussianRat c = isRat (real c) && isRat (imag c)
-- | Export as an inexact complex number.
toComplex :: RealFloat a => Cyclotomic -> Complex a
toComplex c = sum [fromRational r * en^p | (p,r) <- M.toList (coeffs c)]
where en = exp (0 :+ 2*pi/n)
n = fromIntegral (order c)
-- | Export as an inexact real number if possible.
toReal :: RealFloat a => Cyclotomic -> Maybe a
toReal c
| isReal c = Just $ realPart (toComplex c)
| otherwise = Nothing
-- | Return an exact rational number if possible.
toRat :: Cyclotomic -> Maybe Rational
toRat (Cyclotomic 1 mp)
| mp == M.empty = Just 0
| otherwise = M.lookup 0 mp
toRat _ = Nothing
-- | Sine function with argument in degrees.
sinDeg :: Rational -> Cyclotomic
sinDeg d = let n = d / 360
nm = abs (numerator n)
dn = denominator n
a = e dn^nm
in fromRational(signum d) * (a - conj a) / (2*i)
-- | Cosine function with argument in degrees.
cosDeg :: Rational -> Cyclotomic
cosDeg d = let n = d / 360
nm = abs (numerator n)
dn = denominator n
a = e dn^nm
in (a + conj a) / 2
-- | Sine function with argument in revolutions.
sinRev :: Rational -> Cyclotomic
sinRev n = let nm = abs (numerator n)
dn = denominator n
a = e dn^nm
in fromRational(signum n) * (a - conj a) / (2*i)
-- | Cosine function with argument in revolutions.
cosRev :: Rational -> Cyclotomic
cosRev n = let nm = abs (numerator n)
dn = denominator n
a = e dn^nm
in (a + conj a) / 2
gcdList :: [Integer] -> Integer
gcdList [] = error "gcdList called on empty list"
gcdList (n:ns) = foldr gcd n ns
equal :: Eq a => [a] -> Bool
equal [] = True
equal [_] = True
equal (x:y:ys) = x == y && equal (y:ys)
-- | The golden ratio, @(1 + √5)/2@.
goldenRatio :: Cyclotomic
goldenRatio = (1 + sqrtRat 5) / 2
-- | Discrete Fourier transform,
-- @X_k = \sum_{n=0}^{N-1} x_n \cdot e^{-i 2 \pi \frac{k}{N} n}@.
dft :: [Cyclotomic] -> [Cyclotomic]
dft cs = [sum $ zipWith (*) [conj (e m^(k*n)) | n <- [0..]] cs | k <- [0..m-1]]
where m = fromIntegral $ length cs
-- | Inverse discrete Fourier transform,
-- @x_n = \frac{1}{N} \sum_{k=0}^{N-1} X_k \cdot e^{i 2 \pi \frac{k}{N} n}@.
dftInv :: [Cyclotomic] -> [Cyclotomic]
dftInv cs = [minv * sum (zipWith (*) [e m^(k*n) | n <- [0..]] cs) | k <- [0..m-1]]
where m = fromIntegral $ length cs
minv = fromRational (1 % m)
-- | Solutions to the quadratic equation
-- a x^2 + b x + c = 0.
-- Returns 'Nothing' if a == 0.
rootsQuadEq :: Rational -- ^ a
-> Rational -- ^ b
-> Rational -- ^ c
-> Maybe (Cyclotomic,Cyclotomic) -- ^ roots
rootsQuadEq a b c
| a == 0 = Nothing
| otherwise = Just ((-bb + sqrtDisc)/(2*aa),(-bb - sqrtDisc)/(2*aa))
where
aa = fromRational a
bb = fromRational b
sqrtDisc = sqrtRat (b*b - 4*a*c)
-- | Heron's formula for the area of a triangle with
-- side lengths a, b, c.
heron :: Rational -- ^ a
-> Rational -- ^ b
-> Rational -- ^ c
-> Cyclotomic -- ^ area of triangle
heron a b c
= sqrtRat (s * (s-a) * (s-b) * (s-c))
where
s = (a + b + c) / 2
| walck/cyclotomic | src/Data/Complex/Cyclotomic.hs | gpl-3.0 | 18,916 | 0 | 15 | 5,384 | 5,831 | 3,039 | 2,792 | 374 | 4 |
{-# LANGUAGE OverloadedStrings, GADTs #-}
module Java2js.Load(loadKlass, withKlass, loadCPEntry, entry2Direct) where
--
import Data.ByteString.Lazy.Char8 (unpack)
import Java2js.Type
import Java2js.Mangle
import Java2js.Java.ClassPath
import Java2js.Java.JAR.Archive (readFromJAR)
import Java2js.JVM.Common ()
import Java2js.JVM.Converter
import Java2js.JVM.ClassFile
import Java2js.JVM.Assembler
import qualified Data.Set as S
import qualified Data.Map.Strict as M
loadCPEntry :: Tree CPEntry -> IO [Klass]
loadCPEntry x = fmap (fmap loadKlass) $ treeCPEntry2Direct x
treeCPEntry2Direct :: Tree CPEntry -> IO [Class Direct]
treeCPEntry2Direct = treeCPEntry2Direct' ""
treeCPEntry2Direct' :: String -> Tree CPEntry -> IO [Class Direct]
treeCPEntry2Direct' prefix (Directory dir left) = fmap concat (mapM (treeCPEntry2Direct' (prefix++dir++"/")) left)
treeCPEntry2Direct' _ (File (Loaded _ cls)) = return [cls]
treeCPEntry2Direct' _ (File (LoadedJAR _ cls)) = return [cls]
treeCPEntry2Direct' prefix (File (NotLoaded f)) = fmap (\cls -> [cls]) (parseClassFile (prefix++f))
treeCPEntry2Direct' prefix (File (NotLoadedJAR jarfile path)) = fmap (\cls -> [cls]) (readFromJAR jarfile (prefix++path))
entry2Direct :: CPEntry -> IO (Class Direct)
entry2Direct (Loaded _ cls) = return cls
entry2Direct (LoadedJAR _ cls) = return cls
entry2Direct x = fail (show x)
loadKlass :: Class Direct -> Klass
loadKlass cls = Klass{
klassClass = cls,
klassName = unpack (thisClass cls),
superKlass = unpack (superClass cls),
fields = map extField (classFields cls),
methods = M.fromList $ map extMeth (classMethods cls),
constantPool = constsPool cls
}
where
isStatic accs = S.member ACC_STATIC accs
extField f = (unpack $ fieldName f, (f, fieldConstant cls (fieldName f)))
extMeth meth = (mangleMethod meth, (meth, extCode meth))
extCode :: Method Direct -> Maybe Code
extCode meth = fmap decodeMethod (attrByName meth "Code")
withKlass :: [Tree CPEntry] -> String -> (Klass -> a) -> IO (Maybe a)
withKlass entries klsName fun =
do
cls <- getEntry entries klsName
return $ fmap (fun.loadKlass.extract) cls
where
extract (Loaded _ cls) = cls
extract (LoadedJAR _ cls) = cls
extract _ = error "[BUG]"
| ledyba/java.js | lib/Java2js/Load.hs | gpl-3.0 | 2,231 | 14 | 12 | 347 | 854 | 448 | 406 | 48 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Route53Domains.GetOperationDetail
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | This operation returns the current status of an operation that is not
-- completed.
--
-- <http://docs.aws.amazon.com/Route53/latest/APIReference/api-GetOperationDetail.html>
module Network.AWS.Route53Domains.GetOperationDetail
(
-- * Request
GetOperationDetail
-- ** Request constructor
, getOperationDetail
-- ** Request lenses
, godOperationId
-- * Response
, GetOperationDetailResponse
-- ** Response constructor
, getOperationDetailResponse
-- ** Response lenses
, godrDomainName
, godrMessage
, godrOperationId
, godrStatus
, godrSubmittedDate
, godrType
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.Route53Domains.Types
import qualified GHC.Exts
newtype GetOperationDetail = GetOperationDetail
{ _godOperationId :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'GetOperationDetail' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'godOperationId' @::@ 'Text'
--
getOperationDetail :: Text -- ^ 'godOperationId'
-> GetOperationDetail
getOperationDetail p1 = GetOperationDetail
{ _godOperationId = p1
}
-- | The identifier for the operation for which you want to get the status. Amazon
-- Route 53 returned the identifier in the response to the original request.
--
-- Type: String
--
-- Default: None
--
-- Required: Yes
godOperationId :: Lens' GetOperationDetail Text
godOperationId = lens _godOperationId (\s a -> s { _godOperationId = a })
data GetOperationDetailResponse = GetOperationDetailResponse
{ _godrDomainName :: Maybe Text
, _godrMessage :: Maybe Text
, _godrOperationId :: Maybe Text
, _godrStatus :: Maybe OperationStatus
, _godrSubmittedDate :: Maybe POSIX
, _godrType :: Maybe OperationType
} deriving (Eq, Read, Show)
-- | 'GetOperationDetailResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'godrDomainName' @::@ 'Maybe' 'Text'
--
-- * 'godrMessage' @::@ 'Maybe' 'Text'
--
-- * 'godrOperationId' @::@ 'Maybe' 'Text'
--
-- * 'godrStatus' @::@ 'Maybe' 'OperationStatus'
--
-- * 'godrSubmittedDate' @::@ 'Maybe' 'UTCTime'
--
-- * 'godrType' @::@ 'Maybe' 'OperationType'
--
getOperationDetailResponse :: GetOperationDetailResponse
getOperationDetailResponse = GetOperationDetailResponse
{ _godrOperationId = Nothing
, _godrStatus = Nothing
, _godrMessage = Nothing
, _godrDomainName = Nothing
, _godrType = Nothing
, _godrSubmittedDate = Nothing
}
-- | The name of a domain.
--
-- Type: String
godrDomainName :: Lens' GetOperationDetailResponse (Maybe Text)
godrDomainName = lens _godrDomainName (\s a -> s { _godrDomainName = a })
-- | Detailed information on the status including possible errors.
--
-- Type: String
godrMessage :: Lens' GetOperationDetailResponse (Maybe Text)
godrMessage = lens _godrMessage (\s a -> s { _godrMessage = a })
-- | The identifier for the operation.
--
-- Type: String
godrOperationId :: Lens' GetOperationDetailResponse (Maybe Text)
godrOperationId = lens _godrOperationId (\s a -> s { _godrOperationId = a })
-- | The current status of the requested operation in the system.
--
-- Type: String
godrStatus :: Lens' GetOperationDetailResponse (Maybe OperationStatus)
godrStatus = lens _godrStatus (\s a -> s { _godrStatus = a })
-- | The date when the request was submitted.
godrSubmittedDate :: Lens' GetOperationDetailResponse (Maybe UTCTime)
godrSubmittedDate =
lens _godrSubmittedDate (\s a -> s { _godrSubmittedDate = a })
. mapping _Time
-- | The type of operation that was requested.
--
-- Type: String
godrType :: Lens' GetOperationDetailResponse (Maybe OperationType)
godrType = lens _godrType (\s a -> s { _godrType = a })
instance ToPath GetOperationDetail where
toPath = const "/"
instance ToQuery GetOperationDetail where
toQuery = const mempty
instance ToHeaders GetOperationDetail
instance ToJSON GetOperationDetail where
toJSON GetOperationDetail{..} = object
[ "OperationId" .= _godOperationId
]
instance AWSRequest GetOperationDetail where
type Sv GetOperationDetail = Route53Domains
type Rs GetOperationDetail = GetOperationDetailResponse
request = post "GetOperationDetail"
response = jsonResponse
instance FromJSON GetOperationDetailResponse where
parseJSON = withObject "GetOperationDetailResponse" $ \o -> GetOperationDetailResponse
<$> o .:? "DomainName"
<*> o .:? "Message"
<*> o .:? "OperationId"
<*> o .:? "Status"
<*> o .:? "SubmittedDate"
<*> o .:? "Type"
| dysinger/amazonka | amazonka-route53-domains/gen/Network/AWS/Route53Domains/GetOperationDetail.hs | mpl-2.0 | 5,776 | 0 | 19 | 1,232 | 830 | 497 | 333 | 87 | 1 |
module Kata.AreWeAlternate (isAlt) where
isAlt :: String -> Bool
isAlt [ ] = True
isAlt (a : b) = f b $ isV a
where f (h : t) l = l /= isV h && (f t $ not l)
f [ ] l = True
isV = (`elem` "aeiou")
--
| ice1000/OI-codes | codewars/401-500/are-we-alternate.hs | agpl-3.0 | 227 | 0 | 10 | 80 | 122 | 65 | 57 | 7 | 2 |
module Main where
import Types.Cards
import Types.Lists
import Types.Trees
import Types.User
main :: IO ()
main = putStrLn "TypeLineTests compiled"
-- Testing Type Signature Insertion is a bit thin
-- As Blackbox doesn't really do much work in this for this at all
-- All that needs tested is the ability to extract the function name
-- And put a signature on the front of it in the token stream
{-Exected Type :: User -> String -}
test0 :: User -> [Char]
test0 (Student s i ss) = s ++ ss
test0 (Admin s i) = s ++ (show i)
{-Exected Type :: User -> [String] -}
test1 :: User -> [String]
test1 (Student s i ss) = s : [ss]
test1 (Admin s i) = s : [show i]
{-Exected Type :: x -> x -}
test2 :: t -> t
test2 x = x
{- Expected Type :: Maybe Int -> Int -}
test3 :: Num a => Maybe a -> a
test3 Nothing = 0
test3 (Just i) = i
{-
Sometimes compiler warnings are sent over STDERR
We don't care so this tests that they are ignored when
testing if a file loaded
-}
testCompilerWarningIgnoring a = a
testCompilerWarningIgnoring [] = []
| DarrenMowat/blackbox | tests/results/TypeLineTests.hs | unlicense | 1,048 | 0 | 7 | 231 | 252 | 136 | 116 | 20 | 1 |
module Handler.Synthax.Synthaxes
( getSynthaxesR
, postSynthaxesR
) where
import Handler.Partials
import Handler.Synthax.Partials
import Import
import Data.Time
import Yesod.Auth
getSynthaxesR :: Handler Html
getSynthaxesR = do
euser <- requireAuth
synthaxes <- runDB $ selectList [] [Desc SynthaxCreated]
defaultLayout $ do
setTitle "Synthaxes"
let _userInfo = _userInfo' euser
let _synthaxList = _synthaxList' synthaxes True True
$(widgetFile "Synthax/synthaxes")
postSynthaxesR :: Handler ()
postSynthaxesR = do
Entity uid _ <- requireAuth
SynthaxResponse c n <- requireJsonBody
t <- liftIO $ getCurrentTime
_ <- runDB $ insert $ Synthax c n uid t
sendResponseStatus status201 ("CREATED" :: Text)
| burz/sonada | Handler/Synthax/Synthaxes.hs | apache-2.0 | 769 | 0 | 13 | 162 | 222 | 108 | 114 | -1 | -1 |
module Handler.Api
( postNewPlayerR
, postVoteR
, postMakePostR
, postMakeReplyR
) where
import Import
import Data.Aeson
import Control.Monad (mzero, when)
import Data.Time (getCurrentTime)
import Data.Maybe (isNothing)
data VoteReq = VoteReq Vote Name
instance FromJSON VoteReq where
parseJSON (Object v) = VoteReq <$> v .: "vote" <*> v .: "name"
parseJSON _ = mzero
postVoteR :: Handler RepJson
postVoteR = do
VoteReq vote name <- parseJsonBody_
ip <- requestIP
acid <- getAcid
_ <- update' acid $ ProcessVote ip name vote
jsonToRepJson ()
postNewPlayerR :: Handler RepJson
postNewPlayerR = do
(name:_) <- parseJsonBody_
acid <- getAcid
_ <- update' acid $ NewPlayer name
jsonToRepJson ()
data PostInput = PostInput { inputPlayer :: Text
, inputName :: Text
, inputUrl :: Maybe Text
, inputBody :: Text
}
instance FromJSON PostInput where
parseJSON (Object v) = PostInput <$> v .: "player" <*> v .: "name" <*> v .:? "url" <*> v .: "text"
parseJSON _ = mzero
maybeRight :: Either z a -> Maybe a
maybeRight x = case x of
Right y -> Just y
Left _ -> Nothing
postMakePostR :: Handler RepJson
postMakePostR = do
PostInput player name1 url text <- parseJsonBody_
when (text == "" && isNothing url) notFound
let name = if name1 == "" then "Anonymous" else name1
thumb_ <- case url of
Just u -> liftIO . createThumbs $ u
Nothing -> return $ Left ThumbError
let thumb = maybeRight thumb_
time <- liftIO getCurrentTime
acid <- getAcid
num <- update' acid $ NewTopPost (Name player) $ Post name text thumb time
jsonToRepJson num
data ReplyInput = ReplyInput { replyParent :: Int
, replyName :: Text
, replyUrl :: Maybe Text
, replyBody :: Text
}
instance FromJSON ReplyInput where
parseJSON (Object v) = ReplyInput <$> v .: "parent" <*> v .: "name" <*> v .:? "url" <*> v .: "text"
parseJSON _ = mzero
postMakeReplyR :: Handler RepJson
postMakeReplyR = do
ReplyInput parNum name1 url text <- parseJsonBody_
when (text == "" && isNothing url) notFound
let name = if name1 == "" then "Anonymous" else name1
thumb_ <- case url of
Just u -> liftIO . createThumbs $ u
Nothing -> return $ Left ThumbError
let thumb = maybeRight thumb_
acid <- getAcid
time <- liftIO getCurrentTime
postNumber <- either500 $ update' acid $ NewReply parNum $ Post name text thumb time
jsonToRepJson postNumber
| JerrySun/league-famous | Handler/Api.hs | bsd-2-clause | 2,729 | 0 | 13 | 844 | 857 | 420 | 437 | 71 | 3 |
{-# LANGUAGE OverloadedStrings #-}
-- |Executable that writes all 3D vertices found in all @PLY@ files
-- indicated by a @.conf@ file to a single binary file.
module Main (main) where
import Control.Monad (when)
import qualified Data.Vector.Storable as VS
import Linear.V3
import PLY
import System.Environment (getArgs)
import System.IO (withBinaryFile, IOMode(WriteMode), hPutBuf)
main :: IO ()
main = do args@(~[confFile, outputFile]) <- getArgs
when (length args /= 2)
(error "Usage: ply2bin confFile outputFile")
Right pts <- loadConfV3 "vertex" confFile
:: IO (Either String (VS.Vector (V3 Float)))
putStrLn $ "Loaded "++show (VS.length pts)++" vertices"
withBinaryFile outputFile WriteMode $ \h ->
VS.unsafeWith pts $ \ptr ->
hPutBuf h ptr (VS.length pts * 12)
| acowley/ply-loader | src/executable/Main.hs | bsd-3-clause | 874 | 0 | 15 | 217 | 247 | 132 | 115 | 18 | 1 |
{-# LANGUAGE RecordWildCards #-}
-----------------------------------------------------------------------------
-- |
-- Module : Blip.Marshal
-- Copyright : (c) 2012, 2013 Bernie Pope
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : ghc
--
-- Reading, writing and representation of Python bytecode files.
--
-----------------------------------------------------------------------------
module Blip.Marshal
( readPyc, writePyc, PycFile (..), PyObject (..), CodeObjectFlagMask
, co_optimized, co_newlocals, co_varargs, co_varkeywords
, co_nested, co_generator, co_nofree )
where
import Prelude hiding ((<>))
import Blip.MarshalDouble (bytesToDouble, doubleToBytes)
import Blip.Bytecode (decode, BytecodeSeq (..))
import Blip.Pretty (Pretty (..), prettyList, prettyTuple)
import Control.Applicative ((<$>), (<*>))
import Data.Map as Map hiding (map, size, empty)
import Data.Word (Word8, Word16, Word32)
import Control.Monad.Error (ErrorT (..), lift, replicateM)
import System.IO (Handle)
import qualified Data.ByteString.Lazy as B
(ByteString, hGetContents, unpack, hPutStr, length)
import Data.ByteString.Lazy.UTF8 as UTF8 (toString, fromString)
import Data.Binary.Get (Get, runGet, getLazyByteString, getWord32le, getWord8, getWord16le)
import Data.Binary.Put (PutM, putWord32le, putLazyByteString, runPutM, putWord8, putWord16le)
import Data.Bits ((.&.))
import Data.Int (Int64, Int32)
import Data.Char (chr, ord)
import Text.PrettyPrint
(text, (<+>), ($$), (<>), Doc , vcat, int, equals, doubleQuotes, hsep, empty)
data PycFile =
PycFile
{ magic :: !Word32
, modified_time :: !Word32
, size :: !Word32 -- the size in bytes of the original Python source
, object :: !PyObject -- a code object
}
deriving Show
instance Pretty PycFile where
pretty pycFile =
text "magic =" <+> pretty (magic pycFile) $$
text "modified time =" <+> pretty (modified_time pycFile) $$
text "size =" <+> pretty (size pycFile) $$
text "object =" <+> pretty (object pycFile)
data PyObject
= Code
{ argcount :: !Word32 -- #arguments, except *args
, kwonlyargcount :: !Word32 -- #keyword only arguments
, nlocals :: !Word32 -- #local variables
, stacksize :: !Word32 -- #entries needed for evaluation stack
, flags :: !Word32 -- CO_..., see below
, code :: !PyObject -- instruction opcodes (a string)
, consts :: !PyObject -- list (constants used) XXX seems to be a tuple
, names :: !PyObject -- list of strings (names used)
, varnames :: !PyObject -- tuple of strings (local variable names)
, freevars :: !PyObject -- tuple of strings (free variable names)
, cellvars :: !PyObject -- tuple of strings (cell variable names)
, filename :: !PyObject -- unicode (where it was loaded from)
, name :: !PyObject -- unicode (name, for reference)
, firstlineno :: !Word32 -- first source line number
, lnotab :: !PyObject -- string (encoding addr<->lineno mapping)
}
| String { string :: !B.ByteString }
| Tuple { elements :: ![PyObject] }
| Int { int_value :: !Word32 } -- XXX should that be Int32?
| Float { float_value :: !Double }
| None
| Ellipsis
| Unicode { unicode :: !String } -- should be decoded into a String
| TrueObj
| FalseObj
| Complex { real :: !Double, imaginary :: !Double }
| Long { long_value :: !Integer }
deriving (Eq, Ord, Show)
instance Pretty PyObject where
pretty (String {..}) = doubleQuotes $ pretty string
pretty (Tuple {..}) = prettyTuple $ map pretty elements
pretty (Int {..}) = pretty int_value
pretty (Long {..}) = pretty long_value
pretty (Float {..}) = pretty float_value
pretty None = text "None"
pretty Ellipsis = text "..."
pretty TrueObj = text "True"
pretty FalseObj = text "False"
pretty (Unicode {..}) = doubleQuotes $ text unicode
pretty (Code {..}) =
text "argcount =" <+> pretty argcount $$
text "kwonlyargcount =" <+> pretty kwonlyargcount $$
text "nlocals =" <+> pretty nlocals $$
text "stacksize =" <+> pretty stacksize $$
text "flags =" <+> prettyFlags flags $$
text "varnames =" <+> pretty varnames $$
text "freevars =" <+> pretty freevars $$
text "cellvars =" <+> pretty cellvars $$
text "filename =" <+> pretty filename $$
text "name =" <+> pretty name $$
text "firstlineno =" <+> pretty firstlineno $$
text "lnotab =" <+> prettyLnotab lnotab $$
text "names =" <+> pretty names $$
prettyConsts consts $$
text "code =" <+> pretty (BytecodeSeq $ decode $ string code)
pretty (Complex {..}) = pretty real <+> text "+" <+> pretty imaginary <> text "j"
prettyConsts :: PyObject -> Doc
prettyConsts obj =
case obj of
Tuple {..} ->
vcat $ map prettyConst $ zip [0..] elements
_other -> text ("consts not a tuple: " ++ show obj)
where
prettyConst :: (Int, PyObject) -> Doc
prettyConst (i, obj) = text "const" <+> int i <+> equals <+> pretty obj
prettyLnotab :: PyObject -> Doc
prettyLnotab obj =
case obj of
String {..} -> prettyList $ map pretty $ pairs $ B.unpack string
_other -> text ("lnotab not a string: " ++ show obj)
where
pairs :: [Word8] -> [(Word8, Word8)]
pairs [] = []
pairs [_] = error $ "Odd numbered linenotab"
pairs (nextCode:nextLine:rest) = (nextCode, nextLine) : pairs rest
readPyc :: Handle -> IO PycFile
readPyc handle = do
bytes <- B.hGetContents handle
runGetDataCheck getPycFile bytes
writePyc :: Handle -> PycFile -> IO ()
writePyc handle pycFile = do
bytes <- runPutDataCheck $ putPycFile pycFile
B.hPutStr handle bytes
getPycFile :: GetData PycFile
getPycFile = PycFile <$> getU32 <*> getU32 <*> getU32 <*> readObject
putPycFile :: PycFile -> PutData
putPycFile pycFile = do
putU32 $ magic pycFile
putU32 $ modified_time pycFile
putU32 $ size pycFile
writeObject $ object pycFile
readObject :: GetData PyObject
readObject = do
object_type <- decodeObjectType <$> getU8
case object_type of
CODE -> readCodeObject
STRING -> readStringObject
TUPLE -> readTupleObject
INT -> readIntObject
NONE -> return None
ELLIPSIS -> return Ellipsis
TRUE -> return TrueObj
FALSE -> return FalseObj
UNICODE -> readUnicodeObject
BINARY_FLOAT -> readFloatObject
BINARY_COMPLEX -> readComplexObject
LONG -> readLongObject
_other -> error ("readObject: unsupported object type: " ++ show object_type)
writeObject :: PyObject -> PutData
writeObject object =
case object of
Code {..} -> writeCodeObject object
String {..} -> writeStringObject object
Tuple {..} -> writeTupleObject object
Int {..} -> writeIntObject object
None -> putU8 $ encodeObjectType NONE
Ellipsis -> putU8 $ encodeObjectType ELLIPSIS
Unicode {..} -> writeUnicodeObject object
TrueObj -> putU8 $ encodeObjectType TRUE
FalseObj -> putU8 $ encodeObjectType FALSE
Float {..} -> writeFloatObject object
Complex {..} -> writeComplexObject object
Long {..} -> writeLongObject object
writeObjectType :: ObjectType -> PutData
writeObjectType = putU8 . encodeObjectType
readCodeObject :: GetData PyObject
readCodeObject =
Code <$> getU32 <*> getU32 <*> getU32 <*> getU32 <*> getU32 <*>
readObject <*> readObject <*> readObject <*> readObject <*>
readObject <*> readObject <*> readObject <*> readObject <*>
getU32 <*> readObject
writeCodeObject :: PyObject -> PutData
writeCodeObject (Code {..}) =
writeObjectType CODE >>
mapM_ putU32 [argcount, kwonlyargcount, nlocals, stacksize, flags] >>
mapM_ writeObject [code, consts, names, varnames, freevars, cellvars,
filename, name] >>
putU32 firstlineno >>
writeObject lnotab
writeCodeObject other = error $ "writeCodeObject called on non code object: " ++ show other
readStringObject :: GetData PyObject
readStringObject = do
len <- getU32
String <$> (getBS $ fromIntegral len)
writeStringObject :: PyObject -> PutData
writeStringObject (String {..}) =
writeObjectType STRING >>
putU32 (fromIntegral $ B.length string) >>
putBS string
writeStringObject other = error $ "writStringObject called on non string object: " ++ show other
readTupleObject :: GetData PyObject
readTupleObject = do
len <- getU32
Tuple <$> replicateM (fromIntegral len) readObject
writeTupleObject :: PyObject -> PutData
writeTupleObject (Tuple {..}) =
writeObjectType TUPLE >>
putU32 (fromIntegral $ length elements) >>
mapM_ writeObject elements
writeTupleObject other = error $ "writeTupleObject called on non tuple object: " ++ show other
readIntObject :: GetData PyObject
readIntObject = Int <$> getU32
writeIntObject :: PyObject -> PutData
writeIntObject (Int {..}) =
writeObjectType INT >> putU32 int_value
writeIntObject other = error $ "writeIntObject called on non int object: " ++ show other
readFloatObject :: GetData PyObject
readFloatObject = Float <$> getDouble
readComplexObject :: GetData PyObject
readComplexObject = Complex <$> getDouble <*> getDouble
writeFloatObject :: PyObject -> PutData
writeFloatObject (Float {..}) =
writeObjectType BINARY_FLOAT >> putDouble float_value
writeFloatObject other = error $ "writeFloatObject called on non float object: " ++ show other
writeComplexObject :: PyObject -> PutData
writeComplexObject (Complex {..}) =
writeObjectType BINARY_COMPLEX >> putDouble real >> putDouble imaginary
writeComplexObject other = error $ "writeComplexObject called on non complex object: " ++ show other
readUnicodeObject :: GetData PyObject
readUnicodeObject = do
len <- getU32
bs <- getBS $ fromIntegral len
return $ Unicode $ UTF8.toString bs
writeUnicodeObject :: PyObject -> PutData
writeUnicodeObject (Unicode {..}) = do
writeObjectType UNICODE
let uc = UTF8.fromString unicode
putU32 (fromIntegral $ B.length uc)
putBS uc
writeUnicodeObject other = error $ "writeUnicodeObject called on non unicode object: " ++ show other
longDigitBase :: Integer
longDigitBase = 2^(15::Integer)
readLongObject :: GetData PyObject
readLongObject = do
-- Read the len as a signed 32bit integer.
-- The sign tells us whether the number is positive or negative.
-- The magnitude tells us how many digits are in the number.
-- Digits are stored in base 2^15, in 16 bit chunks.
len <- getI32
if len == 0
then return $ Long 0
else do
-- read 'len' digits
base15digits <- replicateM (fromIntegral (abs len)) getU16
-- pair each digit with its exponent
-- [(d0, 0), (d1, 1), ... (dn, n)]
let digitsExponents = zip (map fromIntegral base15digits) [(0::Integer) ..]
-- (d0 * base^0) + (d1 * base^1) + ... + ... (dn * base^n)
val = sum [(longDigitBase ^ exp) * digit | (digit, exp) <- digitsExponents]
-- Check if the result is positive or negative based on the sign of len.
if len < 0
then return $! Long $! negate val
else return $! Long val
writeLongObject :: PyObject -> PutData
writeLongObject (Long {..}) = do
writeObjectType LONG
case compare long_value 0 of
EQ -> putI32 0
GT -> do
putI32 numDigits
mapM_ putU16 digits
LT -> do
putI32 $ negate numDigits
mapM_ putU16 digits
where
digits :: [Word16]
digits = getDigits (abs long_value) longDigitBase
numDigits :: Int32
numDigits = fromIntegral $ length digits
-- We assume n > 0
getDigits :: Integer -> Integer -> [Word16]
getDigits 0 _base = []
getDigits n base = (fromIntegral (n `mod` base)) : getDigits (n `div` base) base
writeLongObject other = error $ "writeLongObject called on non long object: " ++ show other
data ObjectType
= NULL -- '0'
| NONE -- 'N'
| FALSE -- 'F'
| TRUE -- 'T'
| STOPITER -- 'S'
| ELLIPSIS -- '.'
| INT -- 'i'
| INT64 -- 'I' INT64 is deprecated. It is not,
-- generated anymore, and support for reading it
-- will be removed in Python 3.4.
| FLOAT -- 'f'
| BINARY_FLOAT -- 'g'
| COMPLEX -- 'x'
| BINARY_COMPLEX -- 'y'
| LONG -- 'l'
| STRING -- 's'
| TUPLE -- '('
| LIST -- '['
| DICT -- '{'
| CODE -- 'c'
| UNICODE -- 'u'
| UNKNOWN -- '?'
| SET -- '<'
| FROZENSET -- '>'
deriving (Eq, Ord, Show)
charToObjectType :: Map.Map Char ObjectType
charToObjectType = Map.fromList objectTypeList
objectTypeToChar :: Map.Map ObjectType Char
objectTypeToChar = Map.fromList [ (y, x) | (x, y) <- objectTypeList ]
objectTypeList :: [(Char, ObjectType)]
objectTypeList = [
('0', NULL),
('N', NONE),
('F', FALSE),
('T', TRUE),
('S', STOPITER),
('.', ELLIPSIS),
('i', INT),
('I', INT64),
('f', FLOAT),
('g', BINARY_FLOAT),
('x', COMPLEX),
('y', BINARY_COMPLEX),
('l', LONG),
('s', STRING),
('(', TUPLE),
('[', LIST),
('{', DICT),
('c', CODE),
('u', UNICODE),
('?', UNKNOWN),
('<', SET),
('>', FROZENSET) ]
encodeObjectType :: ObjectType -> Word8
encodeObjectType objectType =
case Map.lookup objectType objectTypeToChar of
Nothing -> error $ "bad object type: " ++ show objectType
Just chr -> fromIntegral $ ord chr
decodeObjectType :: Word8 -> ObjectType
decodeObjectType byte =
case Map.lookup byteChar charToObjectType of
Nothing -> error $ "bad object type: " ++ show byteChar
Just t -> t
where
byteChar = chr $ fromIntegral byte
-- utilities for reading binary data from a sequence of bytes
type GetData a = ErrorT String Get a
getDouble :: GetData Double
getDouble = do
bs <- replicateM 8 getU8
return $ bytesToDouble bs
getBS :: Int64 -> GetData B.ByteString
getBS = lift . getLazyByteString
-- read an unsigned 8 bit word
getU8 :: GetData Word8
getU8 = lift getWord8
-- XXX is it always little endian?
-- read an unsigned 32 bit word
getU32 :: GetData Word32
getU32 = lift getWord32le
getI32 :: GetData Int32
getI32 = fromIntegral `fmap` lift getWord32le
getU16 :: GetData Word16
getU16 = lift getWord16le
runGetData :: GetData a -> B.ByteString -> Either String a
runGetData = runGet . runErrorT
runGetDataCheck :: GetData a -> B.ByteString -> IO a
runGetDataCheck g b =
case runGetData g b of
Left e -> fail e
Right v -> return v
-- utilities for writing binary data to a sequence of bytes
type PutData = ErrorT String PutM ()
putDouble :: Double -> PutData
putDouble d = mapM_ putU8 $ doubleToBytes d
-- write a bytestring
putBS :: B.ByteString -> PutData
putBS = lift . putLazyByteString
-- write an unsigned 8 bit word
putU8 :: Word8 -> PutData
putU8 = lift . putWord8
-- write an unsigned 16 bit word
putU16 :: Word16 -> PutData
putU16 = lift . putWord16le
-- XXX is it always little endian?
-- write an unsigned 32 bit word
putU32 :: Word32 -> PutData
putU32 = lift . putWord32le
putI32 :: Int32 -> PutData
putI32 = putU32 . fromIntegral
runPutData :: PutData -> Either String B.ByteString
runPutData comp =
case runPutM (runErrorT comp) of
(Left err, _) -> Left err
(Right (), bs) -> Right bs
runPutDataCheck :: PutData -> IO B.ByteString
runPutDataCheck comp =
case runPutData comp of
Left e -> fail e
Right bs -> return bs
-- masks for the code object flags
type CodeObjectFlagMask = Word32
co_optimized :: CodeObjectFlagMask
co_optimized = 0x0001
co_newlocals :: CodeObjectFlagMask
co_newlocals = 0x0002
co_varargs :: CodeObjectFlagMask
co_varargs = 0x0004
co_varkeywords :: CodeObjectFlagMask
co_varkeywords = 0x0008
co_nested :: CodeObjectFlagMask
co_nested = 0x0010
co_generator :: CodeObjectFlagMask
co_generator = 0x0020
co_nofree :: CodeObjectFlagMask
co_nofree = 0x0040
prettyFlags :: Word32 -> Doc
prettyFlags bits =
hsep $ map (uncurry showFlag) masks
where
checkFlag :: CodeObjectFlagMask -> Bool
checkFlag mask = (bits .&. mask) /= 0
showFlag :: CodeObjectFlagMask -> String -> Doc
showFlag mask name
| checkFlag mask = text name
| otherwise = empty
masks = [ (co_optimized, "CO_OPTIMIZED")
, (co_newlocals, "CO_NEWLOCALS")
, (co_varargs, "CO_VARARGS")
, (co_varkeywords, "CO_VARKEYWORDS")
, (co_nested, "CO_NESTED")
, (co_generator, "CO_GENERATOR")
, (co_nofree, "CO_NOFREE") ]
| bjpop/blip | bliplib/src/Blip/Marshal.hs | bsd-3-clause | 16,861 | 0 | 34 | 4,050 | 4,524 | 2,415 | 2,109 | 452 | 13 |
{-# LANGUAGE PatternGuards #-}
module Idris.DSL where
import Idris.AbsSyntax
import Paths_idris
import Core.CoreParser
import Core.TT
import Core.Evaluate
import Debug.Trace
desugar :: SyntaxInfo -> IState -> PTerm -> PTerm
desugar syn i t = let t' = expandDo (dsl_info syn) t in
t' -- addImpl i t'
expandDo :: DSL -> PTerm -> PTerm
expandDo dsl (PLam n ty tm)
| Just lam <- dsl_lambda dsl
= let sc = PApp (FC "(dsl)" 0) lam [pexp (var dsl n tm 0)] in
expandDo dsl sc
expandDo dsl (PLam n ty tm) = PLam n (expandDo dsl ty) (expandDo dsl tm)
expandDo dsl (PLet n ty v tm)
| Just letb <- dsl_let dsl
= let sc = PApp (FC "(dsl)" 0) letb [pexp v, pexp (var dsl n tm 0)] in
expandDo dsl sc
expandDo dsl (PLet n ty v tm) = PLet n (expandDo dsl ty) (expandDo dsl v) (expandDo dsl tm)
expandDo dsl (PPi p n ty tm) = PPi p n (expandDo dsl ty) (expandDo dsl tm)
expandDo dsl (PApp fc t args) = PApp fc (expandDo dsl t)
(map (fmap (expandDo dsl)) args)
expandDo dsl (PCase fc s opts) = PCase fc (expandDo dsl s)
(map (pmap (expandDo dsl)) opts)
expandDo dsl (PEq fc l r) = PEq fc (expandDo dsl l) (expandDo dsl r)
expandDo dsl (PPair fc l r) = PPair fc (expandDo dsl l) (expandDo dsl r)
expandDo dsl (PDPair fc l t r) = PDPair fc (expandDo dsl l) (expandDo dsl t)
(expandDo dsl r)
expandDo dsl (PAlternative a as) = PAlternative a (map (expandDo dsl) as)
expandDo dsl (PHidden t) = PHidden (expandDo dsl t)
expandDo dsl (PReturn fc) = dsl_return dsl
expandDo dsl (PRewrite fc r t ty)
= PRewrite fc r (expandDo dsl t) ty
expandDo dsl (PGoal fc r n sc)
= PGoal fc (expandDo dsl r) n (expandDo dsl sc)
expandDo dsl (PDoBlock ds) = expandDo dsl $ block (dsl_bind dsl) ds
where
block b [DoExp fc tm] = tm
block b [a] = PElabError (Msg "Last statement in do block must be an expression")
block b (DoBind fc n tm : rest)
= PApp fc b [pexp tm, pexp (PLam n Placeholder (block b rest))]
block b (DoBindP fc p tm : rest)
= PApp fc b [pexp tm, pexp (PLam (MN 0 "bpat") Placeholder
(PCase fc (PRef fc (MN 0 "bpat"))
[(p, block b rest)]))]
block b (DoLet fc n ty tm : rest)
= PLet n ty tm (block b rest)
block b (DoLetP fc p tm : rest)
= PCase fc tm [(p, block b rest)]
block b (DoExp fc tm : rest)
= PApp fc b
[pexp tm,
pexp (PLam (MN 0 "bindx") Placeholder (block b rest))]
block b _ = PElabError (Msg "Invalid statement in do block")
expandDo dsl (PIdiom fc e) = expandDo dsl $ unIdiom (dsl_apply dsl) (dsl_pure dsl) fc e
expandDo dsl t = t
var :: DSL -> Name -> PTerm -> Int -> PTerm
var dsl n t i = v' i t where
v' i (PRef fc x) | x == n =
case dsl_var dsl of
Nothing -> PElabError (Msg "No 'variable' defined in dsl")
Just v -> PApp fc v [pexp (mkVar fc i)]
v' i (PLam n ty sc)
| Nothing <- dsl_lambda dsl
= PLam n ty (v' i sc)
| otherwise = PLam n (v' i ty) (v' (i + 1) sc)
v' i (PLet n ty val sc)
| Nothing <- dsl_let dsl
= PLet n (v' i ty) (v' i val) (v' i sc)
| otherwise = PLet n (v' i ty) (v' i val) (v' (i + 1) sc)
v' i (PPi p n ty sc) = PPi p n (v' i ty) (v' i sc)
v' i (PTyped l r) = PTyped (v' i l) (v' i r)
v' i (PApp f x as) = PApp f (v' i x) (fmap (fmap (v' i)) as)
v' i (PCase f t as) = PCase f (v' i t) (fmap (pmap (v' i)) as)
v' i (PEq f l r) = PEq f (v' i l) (v' i r)
v' i (PPair f l r) = PPair f (v' i l) (v' i r)
v' i (PDPair f l t r) = PDPair f (v' i l) (v' i t) (v' i r)
v' i (PAlternative a as) = PAlternative a $ map (v' i) as
v' i (PHidden t) = PHidden (v' i t)
v' i (PIdiom f t) = PIdiom f (v' i t)
v' i (PDoBlock ds) = PDoBlock (map (fmap (v' i)) ds)
v' i t = t
mkVar fc 0 = case index_first dsl of
Nothing -> PElabError (Msg "No index_first defined")
Just f -> f
mkVar fc n = case index_next dsl of
Nothing -> PElabError (Msg "No index_next defined")
Just f -> PApp fc f [pexp (mkVar fc (n-1))]
unIdiom :: PTerm -> PTerm -> FC -> PTerm -> PTerm
unIdiom ap pure fc e@(PApp _ _ _) = let f = getFn e in
mkap (getFn e)
where
getFn (PApp fc f args) = (PApp fc pure [pexp f], args)
getFn f = (f, [])
mkap (f, []) = f
mkap (f, a:as) = mkap (PApp fc ap [pexp f, a], as)
unIdiom ap pure fc e = PApp fc pure [pexp e]
| christiaanb/Idris-dev | src/Idris/DSL.hs | bsd-3-clause | 4,736 | 0 | 17 | 1,612 | 2,439 | 1,194 | 1,245 | 98 | 19 |
module Types where
type ID = Int
type Name = String
type Description = String
type FirstAired = String
type Banner = String
type ImdbID = Int
type Rating = Float
type EpisodeNumber = Int
type SeasonNumber = Int
data Episode = Episode {
episodeID :: ID
, episodeName :: Name
, episodeNumber :: EpisodeNumber
, episodeAbsoluteNumber :: EpisodeNumber
, episodeSeason :: SeasonNumber
, episodeAired :: FirstAired
, episodeImdbID :: ImdbID
, episodeDescription :: Description
} deriving (Show)
data Season = Season {
seasonID :: ID
, episodes :: (SeasonNumber, [Episode])
} deriving (Show)
data TVShow = TVShow {
showID :: ID
, showName :: Name
, showAlias :: Name
, showDescription :: Description
, showAired :: FirstAired
, showBanner :: Banner
, showImdbID :: ImdbID
, seasons :: [Season]
} deriving (Show)
| Tehnix/HsPVR | src/Types.hs | bsd-3-clause | 1,005 | 0 | 10 | 333 | 226 | 146 | 80 | 34 | 0 |
module Math.DyckWord.Binary.Internal where
import Control.Applicative ( (<$>) )
import Math.Combinatorics.Exact.Binomial
catalanTriangle :: Integer -> Integer -> Integer
catalanTriangle _ 0 = 1
catalanTriangle n k = (choose (n + k) (k - 1) * (n - k + 1)) `div` k
catalanSum :: Int -> Integer
catalanSum = (scanl (+) 0 (catalan <$> [0..]) !!)
catalan :: (Integral a, Integral b) => a -> b
catalan 0 = 1
catalan 1 = 1
catalan 2 = 2
catalan n = let m = fromIntegral n in (2*m) `choose` m `div` (m + 1)
| johanneshilden/dyckword | src/Math/DyckWord/Binary/Internal.hs | bsd-3-clause | 524 | 0 | 10 | 118 | 236 | 133 | 103 | 13 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UnboxedTuples #-}
{-# LANGUAGE PatternGuards #-}
-- |
-- Module : Data.Scientific
-- Copyright : Bas van Dijk 2013
-- License : BSD3
-- Maintainer : Bas van Dijk <[email protected]>
--
-- This module provides the number type 'Scientific'. Scientific numbers are
-- arbitrary precision and space efficient. They are represented using
-- <http://en.wikipedia.org/wiki/Scientific_notation scientific notation>. The
-- implementation uses an 'Integer' 'coefficient' @c@ and an 'Int'
-- 'base10Exponent' @e@. A scientific number corresponds to the 'Fractional'
-- number: @'fromInteger' c * 10 '^^' e@.
--
-- Note that since we're using an 'Int' to represent the exponent these numbers
-- aren't truly arbitrary precision. I intend to change the type of the exponent
-- to 'Integer' in a future release.
--
-- The main application of 'Scientific' is to be used as the target of parsing
-- arbitrary precision numbers coming from an untrusted source. The advantages
-- over using 'Rational' for this are that:
--
-- * A 'Scientific' is more efficient to construct. Rational numbers need to be
-- constructed using '%' which has to compute the 'gcd' of the 'numerator' and
-- 'denominator'.
--
-- * 'Scientific' is safe against numbers with huge exponents. For example:
-- @1e1000000000 :: 'Rational'@ will fill up all space and crash your
-- program. Scientific works as expected:
--
-- > > read "1e1000000000" :: Scientific
-- > 1.0e1000000000
--
-- * Also, the space usage of converting scientific numbers with huge exponents
-- to @'Integral's@ (like: 'Int') or @'RealFloat's@ (like: 'Double' or 'Float')
-- will always be bounded by the target type.
--
-- /WARNING:/ Although @Scientific@ is an instance of 'Fractional', the methods
-- are only partially defined! Specifically 'recip' and '/' will diverge
-- (i.e. loop and consume all space) when their outputs have an infinite decimal
-- expansion. 'fromRational' will diverge when the input 'Rational' has an
-- infinite decimal expansion. Consider using 'fromRationalRepetend' for these
-- rationals which will detect the repetition and indicate where it starts.
--
-- This module is designed to be imported qualified:
--
-- @import Data.Scientific as Scientific@
module Data.Scientific
( Scientific
-- * Construction
, scientific
-- * Projections
, coefficient
, base10Exponent
-- * Predicates
, isFloating
, isInteger
-- * Conversions
, fromRationalRepetend
, toRationalRepetend
, floatingOrInteger
, toRealFloat
, toBoundedRealFloat
, toBoundedInteger
, fromFloatDigits
-- * Pretty printing
, formatScientific
, FPFormat(..)
, toDecimalDigits
-- * Normalization
, normalize
) where
----------------------------------------------------------------------
-- Imports
----------------------------------------------------------------------
import Control.Exception (throw, ArithException(DivideByZero))
import Control.Monad (mplus)
import Control.Monad.ST (runST)
import Control.DeepSeq (NFData, rnf)
import Data.Binary (Binary, get, put)
import Data.Char (intToDigit, ord)
import Data.Data (Data)
import Data.Function (on)
import Data.Hashable (Hashable(..))
import qualified Data.Map as M (Map, empty, insert, lookup)
import Data.Ratio ((%), numerator, denominator)
import Data.Typeable (Typeable)
import qualified Data.Vector as V
import qualified Data.Vector.Mutable as VM
import Math.NumberTheory.Logarithms (integerLog10')
import qualified Numeric (floatToDigits)
import qualified Text.Read as Read
import Text.Read (readPrec)
import qualified Text.ParserCombinators.ReadPrec as ReadPrec
import qualified Text.ParserCombinators.ReadP as ReadP
import Text.ParserCombinators.ReadP ( ReadP )
import Data.Text.Lazy.Builder.RealFloat (FPFormat(..))
#if !MIN_VERSION_base(4,8,0)
import Data.Functor ((<$>))
import Control.Applicative ((<*>))
#endif
#if MIN_VERSION_base(4,5,0)
import Data.Bits (unsafeShiftR)
#else
import Data.Bits (shiftR)
#endif
import GHC.Integer (quotRemInteger, quotInteger)
import GHC.Integer.Compat (divInteger)
import Utils (roundTo)
----------------------------------------------------------------------
-- Type
----------------------------------------------------------------------
-- | An arbitrary-precision number represented using
-- <http://en.wikipedia.org/wiki/Scientific_notation scientific notation>.
--
-- This type describes the set of all @'Real's@ which have a finite
-- decimal expansion.
--
-- A scientific number with 'coefficient' @c@ and 'base10Exponent' @e@
-- corresponds to the 'Fractional' number: @'fromInteger' c * 10 '^^' e@
data Scientific = Scientific
{ coefficient :: !Integer
-- ^ The coefficient of a scientific number.
--
-- Note that this number is not necessarily normalized, i.e.
-- it could contain trailing zeros.
--
-- Scientific numbers are automatically normalized when pretty printed or
-- in 'toDecimalDigits'.
--
-- Use 'normalize' to do manual normalization.
, base10Exponent :: {-# UNPACK #-} !Int
-- ^ The base-10 exponent of a scientific number.
} deriving (Typeable, Data)
-- | @scientific c e@ constructs a scientific number which corresponds
-- to the 'Fractional' number: @'fromInteger' c * 10 '^^' e@.
scientific :: Integer -> Int -> Scientific
scientific = Scientific
----------------------------------------------------------------------
-- Instances
----------------------------------------------------------------------
instance NFData Scientific where
rnf (Scientific _ _) = ()
instance Hashable Scientific where
hashWithSalt salt = hashWithSalt salt . toRational
instance Binary Scientific where
put (Scientific c e) = do
put c
-- In the future I intend to change the type of the base10Exponent e from
-- Int to Integer. To support backward compatability I already convert e
-- to Integer here:
put $ toInteger e
get = Scientific <$> get <*> (fromInteger <$> get)
instance Eq Scientific where
(==) = (==) `on` toRational
{-# INLINE (==) #-}
(/=) = (/=) `on` toRational
{-# INLINE (/=) #-}
instance Ord Scientific where
(<) = (<) `on` toRational
{-# INLINE (<) #-}
(<=) = (<=) `on` toRational
{-# INLINE (<=) #-}
(>) = (>) `on` toRational
{-# INLINE (>) #-}
(>=) = (>=) `on` toRational
{-# INLINE (>=) #-}
compare = compare `on` toRational
{-# INLINE compare #-}
instance Num Scientific where
Scientific c1 e1 + Scientific c2 e2
| e1 < e2 = Scientific (c1 + c2*l) e1
| otherwise = Scientific (c1*r + c2 ) e2
where
l = magnitude (e2 - e1)
r = magnitude (e1 - e2)
{-# INLINE (+) #-}
Scientific c1 e1 - Scientific c2 e2
| e1 < e2 = Scientific (c1 - c2*l) e1
| otherwise = Scientific (c1*r - c2 ) e2
where
l = magnitude (e2 - e1)
r = magnitude (e1 - e2)
{-# INLINE (-) #-}
Scientific c1 e1 * Scientific c2 e2 =
Scientific (c1 * c2) (e1 + e2)
{-# INLINE (*) #-}
abs (Scientific c e) = Scientific (abs c) e
{-# INLINE abs #-}
negate (Scientific c e) = Scientific (negate c) e
{-# INLINE negate #-}
signum (Scientific c _) = Scientific (signum c) 0
{-# INLINE signum #-}
fromInteger i = Scientific i 0
{-# INLINE fromInteger #-}
-- | /WARNING:/ 'toRational' needs to compute the 'Integer' magnitude:
-- @10^e@. If applied to a huge exponent this could fill up all space
-- and crash your program!
--
-- Avoid applying 'toRational' (or 'realToFrac') to scientific numbers
-- coming from an untrusted source and use 'toRealFloat' instead. The
-- latter guards against excessive space usage.
instance Real Scientific where
toRational (Scientific c e)
| e < 0 = c % magnitude (-e)
| otherwise = (c * magnitude e) % 1
{-# INLINE toRational #-}
{-# RULES
"realToFrac_toRealFloat_Double"
realToFrac = toRealFloat :: Scientific -> Double #-}
{-# RULES
"realToFrac_toRealFloat_Float"
realToFrac = toRealFloat :: Scientific -> Float #-}
-- | /WARNING:/ 'recip' and '/' will diverge (i.e. loop and consume all space)
-- when their outputs are <https://en.wikipedia.org/wiki/Repeating_decimal repeating decimals>.
--
-- 'fromRational' will diverge when the input 'Rational' is a repeating decimal.
-- Consider using 'fromRationalRepetend' for these rationals which will detect
-- the repetition and indicate where it starts.
instance Fractional Scientific where
recip = fromRational . recip . toRational
{-# INLINE recip #-}
x / y = fromRational $ toRational x / toRational y
{-# INLINE (/) #-}
fromRational rational
| d == 0 = throw DivideByZero
| otherwise = positivize (longDiv 0 0) (numerator rational)
where
-- Divide the numerator by the denominator using long division.
longDiv :: Integer -> Int -> (Integer -> Scientific)
longDiv !c !e 0 = Scientific c e
longDiv !c !e !n
-- TODO: Use a logarithm here!
| n < d = longDiv (c * 10) (e - 1) (n * 10)
| otherwise = case n `quotRemInteger` d of
(#q, r#) -> longDiv (c + q) e r
d = denominator rational
-- | Like 'fromRational', this function converts a `Rational` to a `Scientific`
-- but instead of diverging (i.e loop and consume all space) on
-- <https://en.wikipedia.org/wiki/Repeating_decimal repeating decimals>
-- it detects the repeating part, the /repetend/, and returns where it starts.
--
-- To detect the repetition this function consumes space linear in the number of
-- digits in the resulting scientific. In order to bound the space usage an
-- optional limit can be specified. If the number of digits reaches this limit
-- @Left (s, r)@ will be returned. Here @s@ is the 'Scientific' constructed so
-- far and @r@ is the remaining 'Rational'. @toRational s + r@ yields the
-- original 'Rational'
--
-- If the limit is not reached or no limit was specified @Right (s,
-- mbRepetendIx)@ will be returned. Here @s@ is the 'Scientific' without any
-- repetition and @mbRepetendIx@ specifies if and where in the fractional part
-- the repetend begins.
--
-- For example:
--
-- @fromRationalRepetend Nothing (1 % 28) == Right (3.571428e-2, Just 2)@
--
-- This represents the repeating decimal: @0.03571428571428571428...@
-- which is sometimes also unambiguously denoted as @0.03(571428)@.
-- Here the repetend is enclosed in parentheses and starts at the 3rd digit (index 2)
-- in the fractional part. Specifying a limit results in the following:
--
-- @fromRationalRepetend (Just 4) (1 % 28) == Left (3.5e-2, 1 % 1400)@
--
-- You can expect the following property to hold.
--
-- @ forall (mbLimit :: Maybe Int) (r :: Rational).
-- r == (case 'fromRationalRepetend' mbLimit r of
-- Left (s, r') -> toRational s + r'
-- Right (s, mbRepetendIx) ->
-- case mbRepetendIx of
-- Nothing -> toRational s
-- Just repetendIx -> 'toRationalRepetend' s repetendIx)
-- @
fromRationalRepetend
:: Maybe Int -- ^ Optional limit
-> Rational
-> Either (Scientific, Rational)
(Scientific, Maybe Int)
fromRationalRepetend mbLimit rational
| d == 0 = throw DivideByZero
| num < 0 = case longDiv (-num) of
Left (s, r) -> Left (-s, -r)
Right (s, mb) -> Right (-s, mb)
| otherwise = longDiv num
where
num = numerator rational
longDiv :: Integer -> Either (Scientific, Rational) (Scientific, Maybe Int)
longDiv n = case mbLimit of
Nothing -> Right $ longDivNoLimit 0 0 M.empty n
Just l -> longDivWithLimit (-l) n
-- Divide the numerator by the denominator using long division.
longDivNoLimit :: Integer
-> Int
-> M.Map Integer Int
-> (Integer -> (Scientific, Maybe Int))
longDivNoLimit !c !e _ns 0 = (Scientific c e, Nothing)
longDivNoLimit !c !e ns !n
| Just e' <- M.lookup n ns = (Scientific c e, Just (-e'))
| n < d = longDivNoLimit (c * 10) (e - 1) (M.insert n e ns) (n * 10)
| otherwise = case n `quotRemInteger` d of
(#q, r#) -> longDivNoLimit (c + q) e ns r
longDivWithLimit :: Int -> Integer -> Either (Scientific, Rational) (Scientific, Maybe Int)
longDivWithLimit l = go 0 0 M.empty
where
go :: Integer
-> Int
-> M.Map Integer Int
-> (Integer -> Either (Scientific, Rational) (Scientific, Maybe Int))
go !c !e _ns 0 = Right (Scientific c e, Nothing)
go !c !e ns !n
| Just e' <- M.lookup n ns = Right (Scientific c e, Just (-e'))
| e <= l = Left (Scientific c e, n % (d * magnitude (-e)))
| n < d = go (c * 10) (e - 1) (M.insert n e ns) (n * 10)
| otherwise = case n `quotRemInteger` d of
(#q, r#) -> go (c + q) e ns r
d = denominator rational
-- |
-- Converts a `Scientific` with a /repetend/ (a repeating part in the fraction),
-- which starts at the given index, into its corresponding 'Rational'.
--
-- For example to convert the repeating decimal @0.03(571428)@ you would use:
-- @toRationalRepetend 0.03571428 2 == 1 % 28@
--
-- Preconditions for @toRationalRepetend s r@:
--
-- * @r >= 0@
--
-- * @r < -(base10Exponent s)@
--
-- The formula to convert the @Scientific@ @s@
-- with a repetend starting at index @r@ is described in the paper:
-- <http://fiziko.bureau42.com/teaching_tidbits/turning_repeating_decimals_into_fractions.pdf turning_repeating_decimals_into_fractions.pdf>
-- and is defined as follows:
--
-- @
-- (fromInteger nonRepetend + repetend % nines) /
-- fromInteger (10^^r)
-- where
-- c = coefficient s
-- e = base10Exponent s
--
-- -- Size of the fractional part.
-- f = (-e)
--
-- -- Size of the repetend.
-- n = f - r
--
-- m = 10^^n
--
-- (nonRepetend, repetend) = c \`quotRem\` m
--
-- nines = m - 1
-- @
-- Also see: 'fromRationalRepetend'.
toRationalRepetend
:: Scientific
-> Int -- ^ Repetend index
-> Rational
toRationalRepetend s r
| r < 0 = error "toRationalRepetend: Negative repetend index!"
| r >= f = error "toRationalRepetend: Repetend index >= than number of digits in the fractional part!"
| otherwise = (fromInteger nonRepetend + repetend % nines) /
fromInteger (magnitude r)
where
c = coefficient s
e = base10Exponent s
-- Size of the fractional part.
f = (-e)
-- Size of the repetend.
n = f - r
m = magnitude n
(#nonRepetend, repetend#) = c `quotRemInteger` m
nines = m - 1
instance RealFrac Scientific where
-- | The function 'properFraction' takes a Scientific number @s@
-- and returns a pair @(n,f)@ such that @s = n+f@, and:
--
-- * @n@ is an integral number with the same sign as @s@; and
--
-- * @f@ is a fraction with the same type and sign as @s@,
-- and with absolute value less than @1@.
properFraction s@(Scientific c e)
| e < 0 = if dangerouslySmall c e
then (0, s)
else case c `quotRemInteger` magnitude (-e) of
(#q, r#) -> (fromInteger q, Scientific r e)
| otherwise = (toIntegral s, 0)
{-# INLINE properFraction #-}
-- | @'truncate' s@ returns the integer nearest @s@
-- between zero and @s@
truncate = whenFloating $ \c e ->
if dangerouslySmall c e
then 0
else fromInteger $ c `quotInteger` magnitude (-e)
{-# INLINE truncate #-}
-- | @'round' s@ returns the nearest integer to @s@;
-- the even integer if @s@ is equidistant between two integers
round = whenFloating $ \c e ->
if dangerouslySmall c e
then 0
else let (#q, r#) = c `quotRemInteger` magnitude (-e)
n = fromInteger q
m | r < 0 = n - 1
| otherwise = n + 1
f = Scientific r e
in case signum $ coefficient $ abs f - 0.5 of
-1 -> n
0 -> if even n then n else m
1 -> m
_ -> error "round default defn: Bad value"
{-# INLINE round #-}
-- | @'ceiling' s@ returns the least integer not less than @s@
ceiling = whenFloating $ \c e ->
if dangerouslySmall c e
then if c <= 0
then 0
else 1
else case c `quotRemInteger` magnitude (-e) of
(#q, r#) | r <= 0 -> fromInteger q
| otherwise -> fromInteger (q + 1)
{-# INLINE ceiling #-}
-- | @'floor' s@ returns the greatest integer not greater than @s@
floor = whenFloating $ \c e ->
if dangerouslySmall c e
then if c < 0
then -1
else 0
else fromInteger (c `divInteger` magnitude (-e))
{-# INLINE floor #-}
----------------------------------------------------------------------
-- Internal utilities
----------------------------------------------------------------------
-- | This function is used in the 'RealFrac' methods to guard against
-- computing a huge magnitude (-e) which could take up all space.
--
-- Think about parsing a scientific number from an untrusted
-- string. An attacker could supply 1e-1000000000. Lets say we want to
-- 'floor' that number to an 'Int'. When we naively try to floor it
-- using:
--
-- @
-- floor = whenFloating $ \c e ->
-- fromInteger (c `div` magnitude (-e))
-- @
--
-- We will compute the huge Integer: @magnitude 1000000000@. This
-- computation will quickly fill up all space and crash the program.
--
-- Note that for large /positive/ exponents there is no risk of a
-- space-leak since 'whenFloating' will compute:
--
-- @fromInteger c * magnitude e :: a@
--
-- where @a@ is the target type (Int in this example). So here the
-- space usage is bounded by the target type.
--
-- For large negative exponents we check if the exponent is smaller
-- than some limit (currently -324). In that case we know that the
-- scientific number is really small (unless the coefficient has many
-- digits) so we can immediately return -1 for negative scientific
-- numbers or 0 for positive numbers.
--
-- More precisely if @dangerouslySmall c e@ returns 'True' the
-- scientific number @s@ is guaranteed to be between:
-- @-0.1 > s < 0.1@.
--
-- Note that we avoid computing the number of decimal digits in c
-- (log10 c) if the exponent is not below the limit.
dangerouslySmall :: Integer -> Int -> Bool
dangerouslySmall c e = e < (-limit) && e < (-integerLog10' (abs c)) - 1
{-# INLINE dangerouslySmall #-}
limit :: Int
limit = maxExpt
positivize :: (Ord a, Num a, Num b) => (a -> b) -> (a -> b)
positivize f x | x < 0 = -(f (-x))
| otherwise = f x
{-# INLINE positivize #-}
whenFloating :: (Num a) => (Integer -> Int -> a) -> Scientific -> a
whenFloating f s@(Scientific c e)
| e < 0 = f c e
| otherwise = toIntegral s
{-# INLINE whenFloating #-}
-- | Precondition: the 'Scientific' @s@ needs to be an integer:
-- @base10Exponent (normalize s) >= 0@
toIntegral :: (Num a) => Scientific -> a
toIntegral (Scientific c e) = fromInteger c * magnitude e
{-# INLINE toIntegral #-}
----------------------------------------------------------------------
-- Exponentiation with a cache for the most common numbers.
----------------------------------------------------------------------
-- | The same limit as in GHC.Float.
maxExpt :: Int
maxExpt = 324
expts10 :: V.Vector Integer
expts10 = runST $ do
mv <- VM.unsafeNew maxExpt
VM.unsafeWrite mv 0 1
VM.unsafeWrite mv 1 10
let go !ix
| ix == maxExpt = V.unsafeFreeze mv
| otherwise = do
VM.unsafeWrite mv ix xx
VM.unsafeWrite mv (ix+1) (10*xx)
go (ix+2)
where
xx = x * x
x = V.unsafeIndex expts10 half
#if MIN_VERSION_base(4,5,0)
!half = ix `unsafeShiftR` 1
#else
!half = ix `shiftR` 1
#endif
go 2
-- | @magnitude e == 10 ^ e@
magnitude :: (Num a) => Int -> a
magnitude e | e < maxExpt = cachedPow10 e
| otherwise = cachedPow10 hi * 10 ^ (e - hi)
where
cachedPow10 p = fromInteger (V.unsafeIndex expts10 p)
hi = maxExpt - 1
{-# INLINE magnitude #-}
----------------------------------------------------------------------
-- Conversions
----------------------------------------------------------------------
-- | Convert a 'RealFloat' (like a 'Double' or 'Float') into a 'Scientific'
-- number.
--
-- Note that this function uses 'Numeric.floatToDigits' to compute the digits
-- and exponent of the 'RealFloat' number. Be aware that the algorithm used in
-- 'Numeric.floatToDigits' doesn't work as expected for some numbers, e.g. as
-- the 'Double' @1e23@ is converted to @9.9999999999999991611392e22@, and that
-- value is shown as @9.999999999999999e22@ rather than the shorter @1e23@; the
-- algorithm doesn't take the rounding direction for values exactly half-way
-- between two adjacent representable values into account, so if you have a
-- value with a short decimal representation exactly half-way between two
-- adjacent representable values, like @5^23*2^e@ for @e@ close to 23, the
-- algorithm doesn't know in which direction the short decimal representation
-- would be rounded and computes more digits
fromFloatDigits :: (RealFloat a) => a -> Scientific
fromFloatDigits = positivize fromPositiveRealFloat
where
fromPositiveRealFloat r = go digits 0 0
where
(digits, e) = Numeric.floatToDigits 10 r
go [] !c !n = Scientific c (e - n)
go (d:ds) !c !n = go ds (c * 10 + fromIntegral d) (n + 1)
-- | Safely convert a 'Scientific' number into a 'RealFloat' (like a 'Double' or a
-- 'Float').
--
-- Note that this function uses 'realToFrac' (@'fromRational' . 'toRational'@)
-- internally but it guards against computing huge Integer magnitudes (@10^e@)
-- that could fill up all space and crash your program. If the 'base10Exponent'
-- of the given 'Scientific' is too big or too small to be represented in the
-- target type, Infinity or 0 will be returned respectively. Use
-- 'toBoundedRealFloat' which explicitly handles this case by returning 'Left'.
--
-- Always prefer 'toRealFloat' over 'realToFrac' when converting from scientific
-- numbers coming from an untrusted source.
toRealFloat :: (RealFloat a) => Scientific -> a
toRealFloat = either id id . toBoundedRealFloat
-- | Preciser version of `toRealFloat`. If the 'base10Exponent' of the given
-- 'Scientific' is too big or too small to be represented in the target type,
-- Infinity or 0 will be returned as 'Left'.
toBoundedRealFloat :: forall a. (RealFloat a) => Scientific -> Either a a
toBoundedRealFloat s@(Scientific c e)
| c == 0 = Right 0
| e > limit && e > hiLimit = Left $ sign (1/0) -- Infinity
| e < -limit && e < loLimit && e + d < loLimit = Left $ sign 0
| otherwise = Right $ realToFrac s
where
(loLimit, hiLimit) = exponentLimits (undefined :: a)
d = integerLog10' (abs c)
sign x | c < 0 = -x
| otherwise = x
exponentLimits :: forall a. (RealFloat a) => a -> (Int, Int)
exponentLimits _ = (loLimit, hiLimit)
where
loLimit = floor (fromIntegral lo * log10Radix) -
ceiling (fromIntegral digits * log10Radix)
hiLimit = ceiling (fromIntegral hi * log10Radix)
log10Radix :: Double
log10Radix = logBase 10 $ fromInteger radix
radix = floatRadix (undefined :: a)
digits = floatDigits (undefined :: a)
(lo, hi) = floatRange (undefined :: a)
-- | Convert a `Scientific` to a bounded integer.
--
-- If the given `Scientific` doesn't fit in the target representation, it will
-- return `Nothing`.
--
-- This function also guards against computing huge Integer magnitudes (@10^e@)
-- that could fill up all space and crash your program.
toBoundedInteger :: forall i. (Integral i, Bounded i) => Scientific -> Maybe i
toBoundedInteger s
| c == 0 = fromIntegerBounded 0
| integral = if dangerouslyBig
then Nothing
else fromIntegerBounded n
| otherwise = Nothing
where
c = coefficient s
integral = e >= 0 || e' >= 0
e = base10Exponent s
e' = base10Exponent s'
s' = normalize s
dangerouslyBig = e > limit &&
e > integerLog10' (max (abs iMinBound) (abs iMaxBound))
fromIntegerBounded :: Integer -> Maybe i
fromIntegerBounded i
| i < iMinBound || i > iMaxBound = Nothing
| otherwise = Just $ fromInteger i
iMinBound = toInteger (minBound :: i)
iMaxBound = toInteger (maxBound :: i)
-- This should not be evaluated if the given Scientific is dangerouslyBig
-- since it could consume all space and crash the process:
n :: Integer
n = toIntegral s'
-- | @floatingOrInteger@ determines if the scientific is floating point
-- or integer. In case it's floating-point the scientific is converted
-- to the desired 'RealFloat' using 'toRealFloat'.
--
-- Also see: 'isFloating' or 'isInteger'.
floatingOrInteger :: (RealFloat r, Integral i) => Scientific -> Either r i
floatingOrInteger s
| base10Exponent s >= 0 = Right (toIntegral s)
| base10Exponent s' >= 0 = Right (toIntegral s')
| otherwise = Left (toRealFloat s')
where
s' = normalize s
----------------------------------------------------------------------
-- Predicates
----------------------------------------------------------------------
-- | Return 'True' if the scientific is a floating point, 'False' otherwise.
--
-- Also see: 'floatingOrInteger'.
isFloating :: Scientific -> Bool
isFloating = not . isInteger
-- | Return 'True' if the scientific is an integer, 'False' otherwise.
--
-- Also see: 'floatingOrInteger'.
isInteger :: Scientific -> Bool
isInteger s = base10Exponent s >= 0 ||
base10Exponent s' >= 0
where
s' = normalize s
----------------------------------------------------------------------
-- Parsing
----------------------------------------------------------------------
instance Read Scientific where
readPrec = Read.parens $ ReadPrec.lift (ReadP.skipSpaces >> scientificP)
-- A strict pair
data SP = SP !Integer {-# UNPACK #-}!Int
scientificP :: ReadP Scientific
scientificP = do
let positive = (('+' ==) <$> ReadP.satisfy isSign) `mplus` return True
pos <- positive
let step :: Num a => a -> Int -> a
step a digit = a * 10 + fromIntegral digit
{-# INLINE step #-}
n <- foldDigits step 0
let s = SP n 0
fractional = foldDigits (\(SP a e) digit ->
SP (step a digit) (e-1)) s
SP coeff expnt <- (ReadP.satisfy (== '.') >> fractional)
ReadP.<++ return s
let signedCoeff | pos = coeff
| otherwise = (-coeff)
eP = do posE <- positive
e <- foldDigits step 0
if posE
then return e
else return (-e)
(ReadP.satisfy isE >>
((Scientific signedCoeff . (expnt +)) <$> eP)) `mplus`
return (Scientific signedCoeff expnt)
foldDigits :: (a -> Int -> a) -> a -> ReadP a
foldDigits f z = do
c <- ReadP.satisfy isDecimal
let digit = ord c - 48
a = f z digit
ReadP.look >>= go a
where
go !a [] = return a
go !a (c:cs)
| isDecimal c = do
_ <- ReadP.get
let digit = ord c - 48
go (f a digit) cs
| otherwise = return a
isDecimal :: Char -> Bool
isDecimal c = c >= '0' && c <= '9'
{-# INLINE isDecimal #-}
isSign :: Char -> Bool
isSign c = c == '-' || c == '+'
{-# INLINE isSign #-}
isE :: Char -> Bool
isE c = c == 'e' || c == 'E'
{-# INLINE isE #-}
----------------------------------------------------------------------
-- Pretty Printing
----------------------------------------------------------------------
instance Show Scientific where
show s | coefficient s < 0 = '-':showPositive (-s)
| otherwise = showPositive s
where
showPositive :: Scientific -> String
showPositive = fmtAsGeneric . toDecimalDigits
fmtAsGeneric :: ([Int], Int) -> String
fmtAsGeneric x@(_is, e)
| e < 0 || e > 7 = fmtAsExponent x
| otherwise = fmtAsFixed x
fmtAsExponent :: ([Int], Int) -> String
fmtAsExponent (is, e) =
case ds of
"0" -> "0.0e0"
[d] -> d : '.' :'0' : 'e' : show_e'
(d:ds') -> d : '.' : ds' ++ ('e' : show_e')
[] -> error "formatScientific/doFmt/FFExponent: []"
where
show_e' = show (e-1)
ds = map intToDigit is
fmtAsFixed :: ([Int], Int) -> String
fmtAsFixed (is, e)
| e <= 0 = '0':'.':(replicate (-e) '0' ++ ds)
| otherwise =
let
f 0 s rs = mk0 (reverse s) ++ '.':mk0 rs
f n s "" = f (n-1) ('0':s) ""
f n s (r:rs) = f (n-1) (r:s) rs
in
f e "" ds
where
mk0 "" = "0"
mk0 ls = ls
ds = map intToDigit is
-- | Like 'show' but provides rendering options.
formatScientific :: FPFormat
-> Maybe Int -- ^ Number of decimal places to render.
-> Scientific
-> String
formatScientific format mbDecs s
| coefficient s < 0 = '-':formatPositiveScientific (-s)
| otherwise = formatPositiveScientific s
where
formatPositiveScientific :: Scientific -> String
formatPositiveScientific s' = case format of
Generic -> fmtAsGeneric $ toDecimalDigits s'
Exponent -> fmtAsExponentMbDecs $ toDecimalDigits s'
Fixed -> fmtAsFixedMbDecs $ toDecimalDigits s'
fmtAsGeneric :: ([Int], Int) -> String
fmtAsGeneric x@(_is, e)
| e < 0 || e > 7 = fmtAsExponentMbDecs x
| otherwise = fmtAsFixedMbDecs x
fmtAsExponentMbDecs :: ([Int], Int) -> String
fmtAsExponentMbDecs x = case mbDecs of
Nothing -> fmtAsExponent x
Just dec -> fmtAsExponentDecs dec x
fmtAsFixedMbDecs :: ([Int], Int) -> String
fmtAsFixedMbDecs x = case mbDecs of
Nothing -> fmtAsFixed x
Just dec -> fmtAsFixedDecs dec x
fmtAsExponentDecs :: Int -> ([Int], Int) -> String
fmtAsExponentDecs dec (is, e) =
let dec' = max dec 1 in
case is of
[0] -> '0' :'.' : take dec' (repeat '0') ++ "e0"
_ ->
let
(ei,is') = roundTo (dec'+1) is
(d:ds') = map intToDigit (if ei > 0 then init is' else is')
in
d:'.':ds' ++ 'e':show (e-1+ei)
fmtAsFixedDecs :: Int -> ([Int], Int) -> String
fmtAsFixedDecs dec (is, e) =
let dec' = max dec 0 in
if e >= 0 then
let
(ei,is') = roundTo (dec' + e) is
(ls,rs) = splitAt (e+ei) (map intToDigit is')
in
mk0 ls ++ (if null rs then "" else '.':rs)
else
let
(ei,is') = roundTo dec' (replicate (-e) 0 ++ is)
d:ds' = map intToDigit (if ei > 0 then is' else 0:is')
in
d : (if null ds' then "" else '.':ds')
where
mk0 ls = case ls of { "" -> "0" ; _ -> ls}
----------------------------------------------------------------------
-- | Similar to 'Numeric.floatToDigits', @toDecimalDigits@ takes a
-- positive 'Scientific' number, and returns a list of digits and
-- a base-10 exponent. In particular, if @x>=0@, and
--
-- > toDecimalDigits x = ([d1,d2,...,dn], e)
--
-- then
--
-- 1. @n >= 1@
-- 2. @x = 0.d1d2...dn * (10^^e)@
-- 3. @0 <= di <= 9@
-- 4. @null $ takeWhile (==0) $ reverse [d1,d2,...,dn]@
--
-- The last property means that the coefficient will be normalized, i.e. doesn't
-- contain trailing zeros.
toDecimalDigits :: Scientific -> ([Int], Int)
toDecimalDigits (Scientific 0 _) = ([0], 1)
toDecimalDigits (Scientific c' e') =
case normalizePositive c' e' of
Scientific c e -> go c 0 []
where
go :: Integer -> Int -> [Int] -> ([Int], Int)
go 0 !n ds = (ds, ne) where !ne = n + e
go i !n ds = case i `quotRemInteger` 10 of
(# q, r #) -> go q (n+1) (d:ds)
where
!d = fromIntegral r
----------------------------------------------------------------------
-- Normalization
----------------------------------------------------------------------
-- | Normalize a scientific number by dividing out powers of 10 from the
-- 'coefficient' and incrementing the 'base10Exponent' each time.
--
-- You should rarely have a need for this function since scientific numbers are
-- automatically normalized when pretty-printed and in 'toDecimalDigits'.
normalize :: Scientific -> Scientific
normalize (Scientific c e)
| c > 0 = normalizePositive c e
| c < 0 = -(normalizePositive (-c) e)
| otherwise {- c == 0 -} = Scientific 0 0
normalizePositive :: Integer -> Int -> Scientific
normalizePositive !c !e = case quotRemInteger c 10 of
(# c', r #)
| r == 0 -> normalizePositive c' (e+1)
| otherwise -> Scientific c e
| phadej/scientific | src/Data/Scientific.hs | bsd-3-clause | 34,902 | 21 | 19 | 9,882 | 7,104 | 3,815 | 3,289 | 483 | 12 |
module Util.Misc where
import Control.Monad (replicateM)
import qualified Data.ByteString as B
infinity :: Double
infinity = read "Infinity"
ninfinity :: Double
ninfinity = read "-Infinity"
replicateM' :: (Monad m, Integral i) => m a -> i -> m [a]
replicateM' m i = Control.Monad.replicateM (fromIntegral i) m
forN :: (Ord n, Num n, Monad m)
=> (a -> m a)
-> a
-> n
-> m a
forN f m n
| n > 0 = return m >>= f >>= \m' -> forN f m' (n-1)
| otherwise = return m
forN' f a n
| n > 0 = forN' f (f a) (n-1)
| otherwise = a
t21 = fst
t22 = snd
t31 (a,_,_) = a
t32 (_,a,_) = a
t33 (_,_,a) = a
t41 (a,_,_,_) = a
t42 (_,a,_,_) = a
t43 (_,_,a,_) = a
t44 (_,_,_,a) = a
t51 (a,_,_,_,_) = a
t52 (_,a,_,_,_) = a
t53 (_,_,a,_,_) = a
t54 (_,_,_,a,_) = a
t55 (_,_,_,_,a) = a
| phylake/haskell-util | Misc.hs | bsd-3-clause | 809 | 0 | 10 | 216 | 533 | 295 | 238 | 34 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE PackageImports #-}
import qualified Data.ByteString as B
import Data.ByteString.Char8 (pack)
import Data.Digest.SHA256 (hash)
import Data.LargeWord
import Data.Maybe (fromJust)
import qualified Data.Serialize as S (decode, encode)
import Data.Word
import Filesystem.Path.CurrentOS ((</>))
import qualified Filesystem.Path.CurrentOS as FP
import HECFS
import System.Console.Haskeline
import System.Environment (getArgs)
import qualified Text.JSON.Generic as J
data Config = Config{ nodeSet :: [FilePath], numPrimaryNodes :: Int } deriving (J.Typeable, J.Data, Show)
main :: IO ()
main = do
(Config nodes k) <- readConf "conf.json"
[op, file] <- getArgs
key <- runInputT defaultSettings (getPassword (Just '*') "Password: ") >>= return . createKey . fromJust
let parts = map (FP.encodeString . (</> (FP.decodeString file)) . FP.decodeString) nodes
case op of
"en" -> store key file k parts
"de" -> retrieve key parts k (file ++ ".dec")
_ -> error "invalid option"
createKey :: String -> Word64
createKey = either error id . S.decode . B.pack . hash . map (fromIntegral . fromEnum)
readConf :: FilePath -> IO Config
readConf fn = do
str <- readFile fn
either error return $
J.resultToEither (J.decode str >>= J.fromJSON)
| comatose/hecs | src/Main.hs | bsd-3-clause | 1,458 | 0 | 17 | 371 | 450 | 245 | 205 | 33 | 3 |
-- |
-- Module : Data.Functor.Product
-- Copyright : (c) Ross Paterson 2010
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Products, lifted to functors.
module Data.Functor.Product (
Product(..),
) where
import Control.Applicative
import Data.Foldable (Foldable(foldMap))
import Data.Monoid (mappend)
import Data.Traversable (Traversable(traverse))
-- | Lifted product of functors.
data Product f g a = Pair (f a) (g a)
instance (Functor f, Functor g) => Functor (Product f g) where
fmap f (Pair x y) = Pair (fmap f x) (fmap f y)
instance (Foldable f, Foldable g) => Foldable (Product f g) where
foldMap f (Pair x y) = foldMap f x `mappend` foldMap f y
instance (Traversable f, Traversable g) => Traversable (Product f g) where
traverse f (Pair x y) = Pair <$> traverse f x <*> traverse f y
instance (Applicative f, Applicative g) => Applicative (Product f g) where
pure x = Pair (pure x) (pure x)
Pair f g <*> Pair x y = Pair (f <*> x) (g <*> y)
instance (Alternative f, Alternative g) => Alternative (Product f g) where
empty = Pair empty empty
Pair x1 y1 <|> Pair x2 y2 = Pair (x1 <|> x2) (y1 <|> y2)
| ekmett/transformers | Data/Functor/Product.hs | bsd-3-clause | 1,265 | 0 | 8 | 276 | 476 | 253 | 223 | 19 | 0 |
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies #-}
module Math.Budget.Lens.ZonedTimeL where
import Data.Time
class ZonedTimeL cat target | target -> cat where
zonedTimeL :: cat target ZonedTime
| tonymorris/hbudget | src/Math/Budget/Lens/ZonedTimeL.hs | bsd-3-clause | 211 | 0 | 7 | 29 | 41 | 24 | 17 | 5 | 0 |
{-# LANGUAGE LambdaCase #-}
module Main where
import System.Environment
import qualified System.IO.Strict as Strict
import Text.AsciiArt
data Mode
= Inplace FilePath
| Read FilePath
| Pipe
main :: IO ()
main = do
mode <- getArgs <&> \case
[] -> Pipe
["-i", file] -> Inplace file
[file] -> Read file
_ -> error "Usage: \n\
\ aa2u # reads from stdin, prints to stdout\n\
\ aa2u FILE # reads FILE, prints to stdout\n\
\ aa2u -i FILE # reads from and writes to FILE\n"
input <- case mode of
Inplace file -> Strict.readFile file
Read file -> readFile file
Pipe -> getContents
let inputLines = lines input
plane = planeFromList ' ' inputLines
width = maximum (fmap length inputLines)
height = length inputLines
let unicodeArt
= unlines
. fmap trimRight
. planeToList height width
. renderAsciiToUnicode
$ plane
case mode of
Inplace file -> writeFile file unicodeArt
Read _ -> putStr unicodeArt
Pipe -> putStr unicodeArt
trimRight :: String -> String
trimRight = reverse . dropWhile (== ' ') . reverse
(<&>) :: Functor f => f a -> (a -> b) -> f b
(<&>) = flip (<$>)
| fmthoma/ascii-art-to-unicode | app/Main.hs | bsd-3-clause | 1,451 | 0 | 14 | 581 | 353 | 179 | 174 | 38 | 8 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UnicodeSyntax #-}
{-|
[@ISO639-1@] -
[@ISO639-2B@] paa
[@ISO639-3@] hui
[@Native name@] -
[@English name@] Huli
-}
module Text.Numeral.Language.PAA
( -- * Language entry
entry
-- * Conversions
, cardinal
-- * Structure
, struct
-- * Bounds
, bounds
) where
--------------------------------------------------------------------------------
-- Imports
--------------------------------------------------------------------------------
import "base" Data.Function ( ($), const, fix )
import "base" Data.Maybe ( Maybe(Just) )
import "base" Data.Monoid ( Monoid )
import "base" Data.String ( IsString )
import "base" Prelude ( Integral )
import "base-unicode-symbols" Data.Function.Unicode ( (∘) )
import qualified "containers" Data.Map as M ( fromList, lookup )
import "this" Text.Numeral
import qualified "this" Text.Numeral.Exp as E
import "this" Text.Numeral.Entry
--------------------------------------------------------------------------------
-- PAA
--------------------------------------------------------------------------------
{-
TODO:
Need new Exp constructor to express
42 = (15 × 2) + (12 obj. of the 3rd 15) = ngui ki, ngui tebone-gonaga hombearia
Probably also need a constructor to express "4 obj" as opposed to just "4".
-}
entry ∷ Entry
entry = emptyEntry
{ entIso639_2 = ["paa"]
, entIso639_3 = Just "hui"
, entEnglishName = Just "Huli"
, entCardinal = Just Conversion
{ toNumeral = cardinal
, toStructure = struct
}
}
cardinal ∷ (Integral α, Monoid s, IsString s) ⇒ α → Maybe s
cardinal = cardinalRepr ∘ struct
struct ∷ (Integral α, E.Unknown β, E.Lit β, E.Add β, E.Mul β) ⇒ α → β
struct = checkPos
$ fix
$ findRule ( 1, lit )
[ (16, add 15 R )
, (30, mul 15 R R)
, (31, add 30 R )
]
100
bounds ∷ (Integral α) ⇒ (α, α)
bounds = (1, 100)
cardinalRepr ∷ (Monoid s, IsString s) ⇒ Exp → Maybe s
cardinalRepr = render defaultRepr
{ reprValue = \n → M.lookup n syms
, reprAdd = Just (⊞)
, reprMul = Just (⊡)
}
where
-- (_ ⊞ _) _ = ", ngui "
(_ ⊞ _) _ = "-ni "
(_ ⊡ _) _ = " "
syms =
M.fromList
[ ( 1, const "mbira")
, ( 2, \c → case c of
CtxMul {} → "ki"
_ → "kira"
)
, ( 3, const "tebira")
, ( 4, const "maria")
, ( 5, const "duria")
, ( 6, const "waragaria")
, ( 7, const "karia")
, ( 8, const "halira")
, ( 9, const "dira")
, (10, const "pira")
, (11, const "bearia")
, (12, const "hombearia")
, (13, const "haleria")
, (14, const "deria")
, (15, \c → case c of
CtxMul {} → "ngui"
_ → "nguira"
)
]
| telser/numerals | src/Text/Numeral/Language/PAA.hs | bsd-3-clause | 3,356 | 6 | 14 | 1,163 | 753 | 452 | 301 | 69 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Guide.Session
(
newAcidSessionStore
)
where
import Imports
-- Spock
import Web.Spock.Config
-- acid-state
import Data.Acid as Acid
import Guide.State
import Guide.Types.Session
-- |Queries for all user sessions and then removes sessions unless predicate matches.
filterSessions :: AcidState GlobalState -> (SpockSession conn st -> Bool) -> IO ()
filterSessions db p = do
sessions <- Acid.query db GetSessions
for_ sessions $ \sess -> do
unless (p $ unwrapSession sess) $
Acid.update db $ DeleteSession (sess ^. sess_id)
-- |Queries for all user sessions and then performs an operation over all.
mapSessions :: MonadIO m => AcidState GlobalState -> (SpockSession conn st -> m (SpockSession conn st)) -> m ()
mapSessions db f = do
sessions <- liftIO $ Acid.query db GetSessions
for_ sessions $ \sess -> do
newSess <- f (unwrapSession sess)
liftIO $ Acid.update db $ StoreSession (wrapSession newSess)
-- |Wraps an STM session store and periodically commits session to the database.
newAcidSessionStore' :: AcidState GlobalState -> IO (SessionStore (SpockSession conn st) IO)
newAcidSessionStore' db = do
-- See Note [Session Linearizability]
lock <- newMVar ()
return $ SessionStore {
ss_runTx = withMVar lock . const,
ss_loadSession = \sessId -> do
sess <- Acid.query db $ LoadSession sessId
return $ unwrapSession <$> sess,
ss_deleteSession = Acid.update db . DeleteSession,
ss_storeSession = Acid.update db . StoreSession . wrapSession,
ss_toList = do
sessions <- Acid.query db GetSessions
return $ map unwrapSession sessions,
ss_filterSessions = filterSessions db,
ss_mapSessions = mapSessions db
}
newAcidSessionStore :: AcidState GlobalState -> IO (SessionStoreInstance (SpockSession conn st))
newAcidSessionStore db = SessionStoreInstance <$> newAcidSessionStore' db
{- Note [Session Linearizability]
Acid-State transactions are, I believe, serializable by default.
Updates can be issued in parallel, and the temporal ordering of each update
can vary, but each atomic update can be executed in arbitrary order.
Acid-state may also be sequentially consistent, not sure. It's definitely
not linearizable, which is a property we really want for session data
types. In other words, we can have data races.
Consider two actions taken by an administrator:
* Administrator updates user profiles to remove access rights,
running GetSession and then StoreSession, via filterSessions or mapSessions.
* Eve at the same time updates their user profile to change their user name,
running LoadSession and then StoreSession.
Since filterSession is not atomic, this sequence could occur:
| Process | Command | Context
| Admin | GetSessions | mapSessions runs GetSessions, obtaining a list of all sessions
| Eve | LoadSession | user profile page view
| Admin | StoreSession | mapSessions runs StoreSession for Eve, removing permissions
| Eve | StoreSession | Eve clicks "save profile" which refreshes her session
This is a classic race condition. So we use a lock on the Session Store.
-}
| aelve/hslibs | src/Guide/Session.hs | bsd-3-clause | 3,390 | 0 | 16 | 637 | 541 | 273 | 268 | 45 | 1 |
module Main where
import System.IO
import XMonad
import XMonad.Util.EZConfig (additionalKeys)
import XMonad.Util.Run (spawnPipe)
import XMonad.Layout.NoBorders (smartBorders)
import XMonad.Layout.LayoutHints (layoutHints)
import XMonad.Hooks.DynamicLog (dynamicLogWithPP, PP(..))
import XMonad.Hooks.ManageDocks -- (avoidStruts, manageDocks, ToggleStruts)
windowKey :: KeyMask
windowKey = mod4Mask
layout :: Handle -> PP
layout handle = def { ppLayout = const ""
, ppOutput = hPutStrLn handle }
main :: IO ()
main = do
spawn "i3status | dzen2 -e - -h 32 -w 1280 -x 1280 -y 0 -ta r"
dzenLeft <- spawnPipe "dzen2 -h 32 -w 1280 -x 0 -y 0 -ta l"
xmonad $ def { modMask = windowKey
, terminal = "termite"
, manageHook = manageDocks <+> manageHook def
, layoutHook = avoidStruts . layoutHints . smartBorders $ Full
, logHook = dynamicLogWithPP $ layout dzenLeft
, startupHook = do
spawn "xbindkeys"
spawn "feh --bg-scale ~/hask-bg-1920x1200.png"
}
`additionalKeys`
[ ((windowKey, xK_b), sendMessage ToggleStruts) ]
| sshine/xsimonad | src/Xmonad.hs | bsd-3-clause | 1,174 | 0 | 13 | 310 | 266 | 152 | 114 | 28 | 1 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2015 Galois, Inc.
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
{-# LANGUAGE RecordWildCards #-}
module Cryptol.Parser.NoInclude
( removeIncludesModule
, IncludeError(..), ppIncludeError
) where
import Cryptol.Parser (parseProgramWith)
import Cryptol.Parser.AST
import Cryptol.Parser.LexerUtils (Config(..),defaultConfig)
import Cryptol.Parser.ParserUtils
import Cryptol.Utils.PP
import qualified Control.Applicative as A
import Data.Text.Lazy (Text)
import qualified Data.Text.Lazy.IO as T
import Data.Either (partitionEithers)
import MonadLib
import qualified Control.Exception as X
import System.FilePath (takeDirectory,(</>),isAbsolute)
#if MIN_VERSION_directory(1,2,2)
import System.Directory (makeAbsolute)
#else
import System.Directory (getCurrentDirectory)
import System.FilePath (isRelative, normalise)
-- from the source of directory-1.2.2.1
makeAbsolute :: FilePath -> IO FilePath
makeAbsolute = fmap normalise . absolutize
where absolutize path
| isRelative path = fmap (</> path) getCurrentDirectory
| otherwise = return path
#endif
removeIncludesModule :: FilePath -> Module -> IO (Either [IncludeError] Module)
removeIncludesModule modPath m = runNoIncM modPath (noIncludeModule m)
data IncludeError
= IncludeFailed (Located FilePath)
| IncludeParseError ParseError
| IncludeCycle [Located FilePath]
deriving (Show)
ppIncludeError :: IncludeError -> Doc
ppIncludeError ie = case ie of
IncludeFailed lp -> (char '`' <> text (thing lp) <> char '`')
<+> text "included at"
<+> pp (srcRange lp)
<+> text "was not found"
IncludeParseError pe -> ppError pe
IncludeCycle is -> text "includes form a cycle:"
$$ nest 2 (vcat (map (pp . srcRange) is))
newtype NoIncM a = M
{ unM :: ReaderT Env (ExceptionT [IncludeError] IO) a }
data Env = Env { envSeen :: [Located FilePath]
-- ^ Files that have been loaded
, envIncPath :: FilePath
-- ^ The path that includes are relative to
}
runNoIncM :: FilePath -> NoIncM a -> IO (Either [IncludeError] a)
runNoIncM sourcePath m =
do incPath <- getIncPath sourcePath
runM (unM m) Env { envSeen = [], envIncPath = incPath }
tryNoIncM :: NoIncM a -> NoIncM (Either [IncludeError] a)
tryNoIncM m = M (try (unM m))
-- | Get the absolute directory name of a file that contains cryptol source.
getIncPath :: FilePath -> IO FilePath
getIncPath file = makeAbsolute (takeDirectory file)
-- | Run a 'NoIncM' action with a different include path. The argument is
-- expected to be the path of a file that contains cryptol source, and will be
-- adjusted with getIncPath.
withIncPath :: FilePath -> NoIncM a -> NoIncM a
withIncPath path (M body) = M $
do incPath <- inBase (getIncPath path)
env <- ask
local env { envIncPath = incPath } body
-- | Adjust an included file with the current include path.
fromIncPath :: FilePath -> NoIncM FilePath
fromIncPath path
| isAbsolute path = return path
| otherwise = M $
do Env { .. } <- ask
return (envIncPath </> path)
instance Functor NoIncM where
fmap = liftM
instance A.Applicative NoIncM where
pure = return
(<*>) = ap
instance Monad NoIncM where
return x = M (return x)
m >>= f = M (unM m >>= unM . f)
fail x = M (fail x)
-- | Raise an 'IncludeFailed' error.
includeFailed :: Located FilePath -> NoIncM a
includeFailed path = M (raise [IncludeFailed path])
-- | Push a path on the stack of included files, and run an action. If the path
-- is already on the stack, an include cycle has happened, and an error is
-- raised.
pushPath :: Located FilePath -> NoIncM a -> NoIncM a
pushPath path m = M $ do
Env { .. } <- ask
let alreadyIncluded l = thing path == thing l
when (any alreadyIncluded envSeen) (raise [IncludeCycle envSeen])
local Env { envSeen = path:envSeen, .. } (unM m)
-- | Lift an IO operation, with a way to handle the exception that it might
-- throw.
failsWith :: X.Exception e => IO a -> (e -> NoIncM a) -> NoIncM a
failsWith m k = M $ do
e <- inBase (X.try m)
case e of
Right a -> return a
Left exn -> unM (k exn)
-- | Like 'mapM', but tries to collect as many errors as possible before
-- failing.
collectErrors :: (a -> NoIncM b) -> [a] -> NoIncM [b]
collectErrors f ts = do
es <- mapM (tryNoIncM . f) ts
let (ls,rs) = partitionEithers es
errs = concat ls
unless (null errs) (M (raise errs))
return rs
-- | Remove includes from a module.
noIncludeModule :: Module -> NoIncM Module
noIncludeModule m = update `fmap` collectErrors noIncTopDecl (mDecls m)
where
update tds = m { mDecls = concat tds }
-- | Remove includes from a program.
noIncludeProgram :: Program -> NoIncM Program
noIncludeProgram (Program tds) =
(Program . concat) `fmap` collectErrors noIncTopDecl tds
-- | Substitute top-level includes with the declarations from the files they
-- reference.
noIncTopDecl :: TopDecl -> NoIncM [TopDecl]
noIncTopDecl td = case td of
Decl _ -> return [td]
TDNewtype _-> return [td]
Include lf -> resolveInclude lf
-- | Resolve the file referenced by a include into a list of top-level
-- declarations.
resolveInclude :: Located FilePath -> NoIncM [TopDecl]
resolveInclude lf = pushPath lf $ do
source <- readInclude lf
case parseProgramWith (defaultConfig { cfgSource = thing lf }) source of
Right prog -> do
Program ds <- withIncPath (thing lf) (noIncludeProgram prog)
return ds
Left err -> M (raise [IncludeParseError err])
-- | Read a file referenced by an include.
readInclude :: Located FilePath -> NoIncM Text
readInclude path = do
file <- fromIncPath (thing path)
source <- T.readFile file `failsWith` handler
return source
where
handler :: X.IOException -> NoIncM a
handler _ = includeFailed path
| ntc2/cryptol | src/Cryptol/Parser/NoInclude.hs | bsd-3-clause | 6,066 | 0 | 16 | 1,367 | 1,662 | 850 | 812 | 118 | 3 |
-- An abstract data type for performing multiple linear regression,
-- using HMatrix as the backend for matrix computations.
--
-- types:
-- Matrix a (2D-array)
-- Vector a (vector from the standard vector package)
--
-- Both types are dense, immutable, and strict in all elements and are
-- manipulated as whole blocks.
--
-- matrix product is a <> b, where a and b are matrices
-- matrix-vector product is a #> x, where a is a matrix and x a vector
-- vector-vector dot product is x <.> y
-- the (conjugate) transpose is trans a, where a is a matrix or vector
-- the general linear system solver is <\>
--
-- to construct a 4 by 3 matrix:
-- let a = (4><3) [1,2,3,4,5,6,7,8,9,10,11,12] :: Matrix R
--
-- to construct a vector of double-precision floating points:
-- let x = vector [1,2,3]
--
-- to multiply vectors:
-- x * y
--
-- to add two vectors elementwise:
-- x + y
--
-- to compute vector dot product:
-- x <.> y
--
-- to perform matrix-vector product:
-- m #> x
--
-- to perform matrix-matrix product
-- m <> n
--
-- to sum elements of a vector or matrix:
-- sumelements m
--
module Regression.HMatrix (
model_features,
model_outputs,
model_weights,
model_predictions,
model_rss,
create_model,
create_features,
create_weights,
predict,
powers,
normalize,
newtons_method,
newtons_method_with_norm,
cross_validate,
gradient_descent
) where
import Numeric.LinearAlgebra hiding (magnitude)
import Numeric.LinearAlgebra.HMatrix hiding (magnitude)
import Data.List hiding (transpose)
import Data.Maybe (fromJust)
import Debug.Trace (trace)
import Stat (range, mean, stdev)
import qualified Data.Vector as V
-- A model is:
-- * a feature matrix with n rows and d columns
-- * a vector of length n representing the observed output
-- * a weight vector containing the calculated vector of optimized weights
-- * the predicted outputs using the optimized weights
-- * the
data Model = MO {
model_features :: FeatureMatrix,
model_outputs :: FeatureVector,
model_weights :: WeightVector,
model_predictions :: FeatureVector,
model_rss :: Double
} deriving (Show)
-- A two-dimensional matrix whose columns contain feature values.
data FeatureMatrix = FM {
fm_name_indexes:: [(String, Int)],
fm_values :: Matrix Double
} deriving (Show)
-- A vector of values for a named feature.
data FeatureVector = FV {
fv_name :: String,
fv_values :: Vector Double
}
instance Show FeatureVector where
show (FV name values) =
name ++ " = " ++ show (toList values)
type Feature a = (String, a -> Double)
type Output a = (String, a -> Double)
type Optimizer = FeatureMatrix -> FeatureVector -> WeightVector
-- A vector of weights. Each weight corresponds to a feature in the
-- feature matrix used to calculate the weights.
data WeightVector = WV {
wv_name_indexes :: [(String, Int)],
wv_values :: Vector Double
}
instance Show WeightVector where
show (WV name_indexes values) =
concatMap showWeight (zip name_indexes (toList values))
where showWeight ((name, i), value) = name ++ " = " ++ show value ++ "\n"
-- Creates a model from a list of records, a list of features, and the output
-- accessor function. This computes the weight vector as well. This function
-- does not scale the features nor does it add an intercept feature (a feature
-- whose values are all 1).
create_model :: [a] -> [Feature a] -> Output a -> Optimizer -> Model
create_model rows features (output_name, output) optimizer =
let fmat = create_features features rows
nn = length rows
observations = FV output_name (fromList (Data.List.map output rows))
weights = optimizer fmat observations
predictions = predict fmat weights
residuals = rss observations predictions
in MO fmat observations weights predictions residuals
-- Multiplies a feature matrix by a weights vector to obtain a prediction of
-- output.
predict :: FeatureMatrix -> WeightVector -> FeatureVector
predict (FM _ h) (WV _ w) = FV "" (h #> w)
-- Generates the feature matrix, usually denoted as H, with N rows and
-- D features where D is the length of the feature list.
create_features :: [Feature a] -> [a] -> FeatureMatrix
create_features hs inputs =
let n = length inputs
d = length hs
names = (Data.List.map fst hs)
name_indexes = Prelude.zip names [0..]
dat = [h(row) | row <- inputs, (_,h) <- hs]
h = (n><d) dat
in FM name_indexes h
-- Computes the residual sum of squares from the observed output and
-- the predicted output.
rss :: (Num a) => FeatureVector -> FeatureVector -> Double
rss (FV _ v1) (FV _ v2) = rss' v1 v2
rss' :: Vector Double -> Vector Double -> Double
rss' v1 v2 = diff <.> diff
where diff = v1 - v2
-- Takes a list of numbers and turns them into a vector of weights.
create_weights :: FeatureMatrix -> [Double] -> WeightVector
create_weights (FM name_indexes h) weights = WV name_indexes (vector weights)
newtons_method :: Optimizer
newtons_method h y = newtons_method_with_norm 0 h y
newtons_method_with_norm :: Matrix Double -> Optimizer
newtons_method_with_norm l (FM n h) (FV o y) = WV n w
where w = newtons_method_with_norm' l h y
-- Performs newtons method, finding an estimate of the minimum. This method
-- uses the formula:
-- w_hat = (H^t * H - lambda*I)^-1 * H^T * y
newtons_method_with_norm' :: Matrix Double -> Matrix Double -> Vector Double -> Vector Double
newtons_method_with_norm' l h y = ((inv (hth - mod)) <> th) #> y
where th = tr h
hth = th <> h
(d,_) = size hth
mod = l * (ident d) :: Matrix Double
-- Performs gradient descent, updating the weights until the residual
-- sum of squares is less than the epsilon value e, at which point the
-- weight matrix is returned. n is the step size.
gradient_descent :: Double -> Double -> WeightVector -> Optimizer
gradient_descent e n (WV name w) (FM _ f) (FV _ o) =
let ft = tr f
weights = gradient_descent' e n w f ft o
in WV name weights
gradient_descent' :: Double -> Double -> Vector Double -> Matrix Double ->
Matrix Double -> Vector Double -> Vector Double
gradient_descent' e n w h ht y =
let grad = gradient h ht y w -- (-2H^t(y-Hw))
grad_len = magnitude grad -- grad RSS(w) == ||2H^t(y-HW)||
--in if grad_len < e
in if (trace ("gradient = " ++ show grad_len) grad_len) < e
then w
else let delta = cmap (*(-n)) grad -- (2nH^t(y-Hw))
w' = w + delta
in gradient_descent' e n w' h ht y
-- Compute the magnitude of the given vector.
magnitude :: Vector Double -> Double
magnitude vec = sumElements $ cmap (\y -> y^2) vec
-- Calculates the gradient of the residual sum of squares (-2H^t(y-Hw)).
-- This is used to compute the magnitude of the gradient, to see if the
-- function is minimized. It is also used to update the weights of the
-- features.
gradient :: Matrix Double -> Matrix Double -> Vector Double ->
Vector Double -> Vector Double
gradient h ht y w =
let yhat = h #> w
err = y - yhat
prod = ht #> err
grad = cmap (*(-2)) prod
in grad
-- Given a feature, computes polynomial powers of that feature from
-- 1 (the original feature) up to and including n.
powers :: Feature a -> Int -> [Feature a]
powers (name, f) n = Data.List.map (\i -> (name ++ (show i), (\a -> (f a)^i))) [1..n]
-- given a list of values and a feature, return a normalized version of that
-- feature where the mean of the feature is subtracted from each feature and
-- the result is divided by the standard deviation of the feature.
normalize :: [a] -> (a -> Double) -> a -> Double
normalize xs f = (/sdxs) . (+(-meanxs)) . f
where meanxs = mean (Data.List.map f xs)
sdxs = stdev (Data.List.map f xs)
-- Compute the average k-fold cross-validation error for a given L2 penalty and
-- a given split size k.
-- Note that the rows should be shuffled before they are provided.
cross_validate :: Int -> [a] -> [Feature a] -> Output a -> Optimizer -> Double
cross_validate k rows features o@(output_name, output) optimizer = kmean
where ksplits = ksplit k rows
training_data = map fst ksplits
valid_data = map snd ksplits
trained_models = map (\rows -> create_model rows features o optimizer) training_data
trained_weights = map model_weights trained_models
valid_features = map (create_features features) valid_data
predictions = zipWith (\h w -> predict h w) valid_features trained_weights
observations = map (\dat -> FV output_name (fromList (Data.List.map output dat))) valid_data
valid_rss = zipWith (\o p -> rss o p) observations predictions
kmean = mean valid_rss
-- prepare splits of the data for k-means where k is the number of
-- clusters and the number of values in the test set (the second list
-- in the pair) is n/k where n is the size of the input list.
ksplit :: Int -> [a] -> [([a], [a])]
ksplit k xs = map (ksplit' k xs) [0..k-1]
-- prepare a k split ([a], [a]) for the chunk at index i with list xs
-- and with number of chunks k
ksplit' :: Int -> [a] -> Int -> ([a], [a])
ksplit' k xs i = ((take (sz*i) xs) ++ (drop (sz*(i+1)) xs), take sz (drop (sz*i) xs))
where sz = length xs `div` k
| markrgrant/regression | src/Regression/HMatrix.hs | bsd-3-clause | 9,482 | 0 | 15 | 2,233 | 2,244 | 1,236 | 1,008 | 136 | 2 |
module Main where
import Options.Applicative
import Plunge.Options
import Plunge.Parsers.PreprocessorOutput
import Plunge.Preprocessor
import Plunge.Analytics.C2CPP
import Plunge.Printers.Analytics
import Plunge.Types.PreprocessorOutput
main :: IO ()
main = execParser options >>= runWithOptions
runWithOptions :: Options -> IO ()
runWithOptions opts = do
cppResult <- preprocessFile (inputFile opts) (gccOptions opts)
cData <- readFile (inputFile opts)
case cppResult of
Left err -> outputPreprocessorError err
Right cppData -> parse opts (inputFile opts) cData cppData
parse :: Options -> FilePath -> String -> String -> IO ()
parse opts fileName cData cppData = do
parsed <- runCppParser fileName cppData
case parsed of
Left err -> putStrLn $ "ERROR: " ++ (show err)
Right result -> analyze opts result (lines cData) (lines cppData)
analyze :: Options -> [Section] -> [CLine] -> [CppLine] -> IO ()
analyze opts result cLines cppLines = do
let assocs = lineAssociations result
putStrLn $ renderAssociation opts assocs cLines cppLines
outputPreprocessorError :: CppError -> IO ()
outputPreprocessorError e = do
mapM_ putStrLn [ "C PREPROCESSOR ERROR"
, "--------------------"
, e
]
| sw17ch/plunge | src/Plunge.hs | bsd-3-clause | 1,275 | 0 | 12 | 257 | 398 | 198 | 200 | 32 | 2 |
{-# LANGUAGE PatternGuards, NoMonomorphismRestriction, CPP #-}
module Ant.Vector where
import qualified Data.Vector.Unboxed as U
import qualified Data.Vector.Unboxed.Mutable as UM
import qualified Data.Vector as V
import qualified Data.Vector.Mutable as VM
#ifndef DEBUG
indexU :: (U.Unbox a) => U.Vector a -> Int -> a
indexU = U.unsafeIndex
{-# INLINE indexU #-}
indexV :: V.Vector a -> Int -> a
indexV = V.unsafeIndex
{-# INLINE indexV #-}
readV = VM.unsafeRead
{-# INLINE readV #-}
writeV = VM.unsafeWrite
{-# INLINE writeV #-}
readU = UM.unsafeRead
{-# INLINE readU #-}
writeU = UM.unsafeWrite
{-# INLINE writeU #-}
#else
indexU :: (U.Unbox a) => U.Vector a -> Int -> a
indexU = (U.!)
{-# INLINE indexU #-}
indexV :: V.Vector a -> Int -> a
indexV = (V.!)
{-# INLINE indexV #-}
readV = VM.read
{-# INLINE readV #-}
writeV = VM.write
{-# INLINE writeV #-}
readU = UM.read
{-# INLINE readU #-}
writeU = UM.write
{-# INLINE writeU #-}
#endif | Saulzar/Ants | Ant/Vector.hs | bsd-3-clause | 961 | 0 | 8 | 174 | 140 | 89 | 51 | 20 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Database.HMDX.Info
( createDB
) where
import Database.HMDX.SSAS ( defaultSettings
, catalogName
, cubeName
, mdSchemaDimensions)
import BasicPrelude hiding (putStrLn, words)
import Network.HTTP.Client (Request, applyBasicAuth)
import Network.SOAP.Transport.HTTP (initTransport)
import Language.Haskell.TH
import Data.Char (toUpper)
import Prelude (putStrLn, words)
{-#NOINLINE createDB #-}
createDB :: Q [Dec]
createDB = do
let ip = "localhost:8080"
let url = "http://" ++ ip ++ "/OLAP/msmdpump.dll"
let settings = defaultSettings {
catalogName = "AdventureWorksDW2012Multidimensional-SE"
, cubeName = "Adventure Works"
}
transport <- runIO $ initTransport url addAuth id
dText <- runIO $ mdSchemaDimensions transport settings
let dimensions = map (toTitleCase . textToString) dText
qs <- createDimensionRecords dimensions
runIO $ putStrLn $ pprint qs
ir <- createIntersectionRecord dimensions
runIO $ putStrLn $ pprint ir
cr <- createCubeRecord
runIO $ putStrLn $ pprint cr
return qs
addAuth :: Request -> Request
addAuth = applyBasicAuth "WIN-SSAS\\ReadUser" "Password01"
createDimensionRecords :: [String] -> Q [Dec]
createDimensionRecords xs = return $ map createDimensionRecord xs
createDimensionRecord :: String -> Dec
createDimensionRecord dimName = DataD context name vars cons derives
where
context = []
name = mkName dimName
vars = []
cons = [NormalC name [field]]
field = (NotStrict, ConT ''String)
derives = [''Show]
createIntersectionRecord :: [String] -> Q Dec
createIntersectionRecord dimensions = return $ DataD context name vars cons derives
where
context = []
name = mkName "Intersection"
vars = []
field = (NotStrict, createDimensionsTuple (reverse dimensions) $ length dimensions)
cons = [NormalC name [field]]
derives = [''Show]
type NumberOfDimensions = Int
createDimensionsTuple :: [String] -> NumberOfDimensions -> Type
createDimensionsTuple [] _ = error "List of dimensions should not be empty!"
createDimensionsTuple [x] n = AppT (TupleT n) (ConT $ mkName x)
createDimensionsTuple (x:xs) n = AppT (createDimensionsTuple xs n) (ConT $ mkName x)
createCubeRecord :: Q Dec
createCubeRecord = return $ DataD context name vars cons derives
where
context = []
name = mkName "Cube"
vars = []
field = (NotStrict, AppT ListT (AppT (AppT (TupleT 2) (ConT $ mkName "Intersection")) (ConT ''Double)))
cons = [NormalC name [field]]
derives = [''Show]
toTitleCase :: String -> String
toTitleCase = concat . map capitaliseWord . words
capitaliseWord :: String -> String
capitaliseWord [] = ""
capitaliseWord (x:xs) = toUpper x : xs
| sfilipov/hmdx | src/Database/HMDX/Info.hs | mit | 2,915 | 0 | 15 | 693 | 868 | 457 | 411 | 69 | 1 |
--
--
--
------------------
-- Exercise 11.18.
------------------
--
--
--
module E'11'18 where
curryList :: ([a] -> d) -> (a -> [a] -> d)
curryList function
= \item remainingItems -> function (item : remainingItems) -- Partial anonymous function application.
{- GHCi>
curryList sum 1 [2]
-}
-- 3
uncurryList :: (a -> [a] -> d) -> ([a] -> d)
uncurryList function
= \(item : remainingItems) -> function item remainingItems -- List constructor in lambda abstraction argument.
{- GHCi>
:{
let sum' :: Num a => a -> [a] -> a
sum' number = (+ number) . sum
:}
uncurryList sum' [1 , 2]
curryList (uncurryList sum') 1 [2]
-}
-- 3
-- 3
uncurryList' :: (a -> [a] -> d) -> ([a] -> d)
uncurryList' function list
= function (head list) (tail list)
-- Can you use "curry" and "uncurry" in defining "curryList" and "uncurryList"?
-------------------------------------------------------------------------------
-- Similar question like in 11.17, sure we could just use it for nonsense:
--
-- Using: curry (uncurry f) = f
-- <=> curry $ uncurry f = f
curryList' :: ([a] -> d) -> (a -> [a] -> d)
curryList' function
= curry $ uncurry ( \item remainingItems -> function (item : remainingItems) )
{- GHCi>
curryList' sum 1 [2]
-}
-- 3
-- But is there an interesting use case?
-- Maybe one that shows a dependency between "curryList"/"uncurryList" and "curry"/"uncurry"?
-- "curryList" and "curry":
--
-- The "curry" function is not directly applicable to the argument of "curryList" due
-- to the different typing. "([a] -> d)" doesn't match the tuple-type
-- that is expected by "curry :: ( (a , b) -> c ) -> (a -> b -> c)".
-- Yes, we could use lambda abstractions to fix this.
{- GHCi>
let f :: [a] -> b ; f = undefined
curry f
-}
--
-- <interactive>:9:7:
-- Couldn't match type `[a0]' with `(a, b)'
-- Expected type: (a, b) -> c
-- Actual type: [a0] -> c
-- Relevant bindings include
-- it :: a -> b -> c (bound at <interactive>:9:1)
-- In the first argument of `curry', namely `f'
-- In the expression: curry f
-- "curryList" and "uncurry":
--
-- "uncurry" is applicable to the function argument from "curryList".
-- The application results in the type "([a] , d) -> c".
-- What do we do with this? What is about the new polymorphic "c"?
curryListUC :: ([a] -> d) -> (a -> [a] -> d)
curryListUC function a as
= undefined
-- "uncurryList" and "uncurry":
--
-- "uncurry :: (a -> b -> c) -> ( (a , b ) -> c )" is applicable to
-- the function argument in "uncurryList :: (a -> [a] -> d) -> ([a] -> d)".
-- It results in "uncurry function :: ( (a , [a]) -> d ).
uncurryListUC :: (a -> [a] -> d) -> ([a] -> d)
uncurryListUC function
= \(a : as) -> (uncurry function) (a , as)
-- "uncurryList" and "curry":
--
-- Similar to "curryList" and "curry".
| pascal-knodel/haskell-craft | _/links/E'11'18.hs | mit | 2,879 | 0 | 11 | 658 | 416 | 254 | 162 | 19 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Countdown.AttemptsSpecs (main, spec) where
import Test.Hspec
import Countdown.Game (Player (..), Challange (..), Attempt(..), attemptFromFormula)
-- `main` is here so that this module can be run from GHCi on its own. It is
-- not needed for automatic spec discovery.
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "wenn ein Spieler einen Vorschlag einreicht" $ do
let player = Player "Testspieler" 1
let challange = Challange 765 [1,3,7,10,25,25,50]
context "und dabei eine valide Formel nur mit den gegebenen Zahlen benutzt" $ do
let playerAttempt = "7*25 + 10*50"
attmpt = attemptFromFormula challange player playerAttempt
it "wird die Formel uebernommen" $ do
formula attmpt `shouldBe` playerAttempt
it "wird der Wert gesetzt" $ do
value attmpt `shouldBe` Just 675
it "wird die Differenz zum Zielwert gebildet" $ do
difference attmpt `shouldBe` Just 90
it "ist die Info 'OK'" $ do
info attmpt `shouldBe` "OK"
context "und die Formel einen Syntaxfehler enthaelt" $ do
let playerAttempt = "7*25 +"
attmpt = attemptFromFormula challange player playerAttempt
it "wird die Formel uebernommen" $ do
formula attmpt `shouldBe` playerAttempt
it "wird der Wert nicht gesetzt" $ do
value attmpt `shouldBe` Nothing
it "wird die Differenz nicht gesetzt" $ do
difference attmpt `shouldBe` Nothing
it "ist die Info 'Syntaxfehler in Formel'" $ do
info attmpt `shouldBe` "Syntaxfehler in Formel"
context "und die Formel nicht den Regeln entspricht (Teilterm negativ)" $ do
let playerAttempt = "7*(3-10)"
attmpt = attemptFromFormula challange player playerAttempt
it "wird die Formel uebernommen" $ do
formula attmpt `shouldBe` playerAttempt
it "wird der Wert nicht gesetzt" $ do
value attmpt `shouldBe` Nothing
it "wird die Differenz nicht gesetzt" $ do
difference attmpt `shouldBe` Nothing
it "ist die Info 'Formel enthaelt ungueltige Terme'" $ do
info attmpt `shouldBe` "Formel enthaelt ungueltige Terme"
context "die Formel nicht den Regeln entspricht (Teilen durch 0)" $ do
let playerAttempt = "7/(25-25)"
attmpt = attemptFromFormula challange player playerAttempt
it "wird die Formel uebernommen" $ do
formula attmpt `shouldBe` playerAttempt
it "wird der Wert nicht gesetzt" $ do
value attmpt `shouldBe` Nothing
it "wird die Differenz nicht gesetzt" $ do
difference attmpt `shouldBe` Nothing
it "ist die Info 'Formel enthaelt ungueltige Terme'" $ do
info attmpt `shouldBe` "Formel enthaelt ungueltige Terme"
context "und die Formel nicht vorgegebene Zahlen enthaelt" $ do
let playerAttempt = "7*5"
attmpt = attemptFromFormula challange player playerAttempt
it "wird die Formel uebernommen" $ do
formula attmpt `shouldBe` playerAttempt
it "wird der Wert nicht gesetzt" $ do
value attmpt `shouldBe` Nothing
it "wird die Differenz nicht gesetzt" $ do
difference attmpt `shouldBe` Nothing
it "ist die Info 'Formel darf nur die gegebenen Zahlen verwenden'" $ do
info attmpt `shouldBe` "Formel darf nur die gegebenen Zahlen verwenden"
context "und die Formel vorgegebene Zahlen zu oft enthaelt" $ do
let playerAttempt = "25+25*25"
attmpt = attemptFromFormula challange player playerAttempt
it "wird die Formel uebernommen" $ do
formula attmpt `shouldBe` playerAttempt
it "wird der Wert nicht gesetzt" $ do
value attmpt `shouldBe` Nothing
it "wird die Differenz nicht gesetzt" $ do
difference attmpt `shouldBe` Nothing
it "ist die Info 'Formel darf nur die gegebenen Zahlen verwenden'" $ do
info attmpt `shouldBe` "Formel darf nur die gegebenen Zahlen verwenden"
| CarstenKoenig/Countdown | test/Countdown/AttemptsSpecs.hs | mit | 3,986 | 0 | 17 | 1,008 | 860 | 407 | 453 | 77 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : $Header$
Description : abstract CSMOF syntax
Copyright : (c) Daniel Calegari Universidad de la Republica, Uruguay 2013
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
-}
module CSMOF.As where
import Common.Id
import Data.Data
-- Simplified MOF Metamodel
data Metamodel = Metamodel
{ metamodelName :: String
, element :: [NamedElement]
, model :: [Model]
} deriving (Eq, Ord, Typeable, Data)
instance GetRange Metamodel where
getRange _ = nullRange
rangeSpan _ = []
data NamedElement = NamedElement
{ namedElementName :: String
, namedElementOwner :: Metamodel
, namedElementSubClasses :: TypeOrTypedElement
} deriving (Eq, Ord, Typeable, Data)
instance GetRange NamedElement where
getRange _ = nullRange
rangeSpan _ = []
data TypeOrTypedElement = TType { getType :: Type }
| TTypedElement { getTypeElement :: TypedElement }
deriving (Eq, Ord, Typeable, Data)
instance GetRange TypeOrTypedElement where
getRange _ = nullRange
rangeSpan _ = []
-- When going downside-up, we can sort the auxiliary class TypeOrTypedElement and make super of type NamedElement
data Type = Type { typeSuper :: NamedElement
, typeSubClasses :: DataTypeOrClass
} deriving (Eq, Ord, Typeable, Data)
instance GetRange Type where
getRange _ = nullRange
rangeSpan _ = []
data DataTypeOrClass = DDataType { getDataType :: Datatype }
| DClass { getClass :: Class }
deriving (Eq, Ord, Typeable, Data)
instance GetRange DataTypeOrClass where
getRange _ = nullRange
rangeSpan _ = []
-- When going downside-up, we can sort the auxiliary class DataTypeOrClass and make super of type Type
data Datatype = Datatype { classSuper :: Type } deriving (Eq, Ord, Typeable, Data)
instance GetRange Datatype where
getRange _ = nullRange
rangeSpan _ = []
-- When going downside-up, we can sort the auxiliary class DataTypeOrClass and make super of type Type
data Class = Class
{ classSuperType :: Type
, isAbstract :: Bool
, superClass :: [Class]
, ownedAttribute :: [Property]
} deriving (Eq, Ord, Typeable, Data)
instance GetRange Class where
getRange _ = nullRange
rangeSpan _ = []
-- When going downside-up, we can sort the auxiliary class TypeOrTypedElement and make super of type NamedElement
data TypedElement = TypedElement
{ typedElementSuper :: NamedElement
, typedElementType :: Type
, typedElementSubClasses :: Property
} deriving (Eq, Ord, Typeable, Data)
instance GetRange TypedElement where
getRange _ = nullRange
rangeSpan _ = []
data Property = Property
{ propertySuper :: TypedElement
, multiplicityElement :: MultiplicityElement
, opposite :: Maybe Property
, propertyClass :: Class
} deriving (Eq, Ord, Typeable, Data)
instance GetRange Property where
getRange _ = nullRange
rangeSpan _ = []
data MultiplicityElement = MultiplicityElement
{ lower :: Integer
, upper :: Integer
, multiplicityElementSubClasses :: Property
} deriving (Eq, Ord, Typeable, Data)
instance GetRange MultiplicityElement where
getRange _ = nullRange
rangeSpan _ = []
-- Model part of CSMOF
data Model = Model
{ modelName :: String
, object :: [Object]
, link :: [Link]
, modelType :: Metamodel
} deriving (Eq, Ord, Typeable, Data)
instance GetRange Model where
getRange _ = nullRange
rangeSpan _ = []
data Object = Object
{ objectName :: String
, objectType :: Type
, objectOwner :: Model
} deriving (Eq, Ord, Typeable, Data)
instance GetRange Object where
getRange _ = nullRange
rangeSpan _ = []
data Link = Link
{ linkType :: Property
, source :: Object
, target :: Object
, linkOwner :: Model
} deriving (Eq, Ord, Typeable, Data)
instance GetRange Link where
getRange _ = nullRange
rangeSpan _ = []
| keithodulaigh/Hets | CSMOF/As.hs | gpl-2.0 | 4,554 | 0 | 9 | 1,448 | 985 | 553 | 432 | 102 | 0 |
-- This file is part of HamSql
--
-- Copyright 2016 by it's authors.
-- Some rights reserved. See COPYING, AUTHORS.
{-# LANGUAGE FlexibleInstances #-}
module Database.HamSql.Internal.Stmt.Function where
import Data.Maybe
import qualified Data.Text as T
import Database.HamSql.Internal.Stmt.Basic
stmtsDropFunction' :: SqlId -> [SqlStmt]
stmtsDropFunction' x =
catMaybes [newSqlStmt SqlDropFunction x $ "DROP FUNCTION " <> toSqlCode x]
stmtsDropFunction :: SqlObj SQL_FUNCTION (SqlName, [SqlType]) -> [Maybe SqlStmt]
stmtsDropFunction x = map Just $ stmtsDropFunction' $ sqlId x
instance ToSqlStmts (SqlContext (Schema, Function)) where
toSqlStmts SetupContext {setupContextSetup = setup} obj@(SqlContext (s, f)) =
stmtCreateFunction :
sqlSetOwner (functionOwner f) :
stmtComment : maybeMap sqlStmtGrantExecute (functionPrivExecute f)
--name = schemaName m <.> functionName f
where
sqlStmtGrantExecute u = newSqlStmt SqlPriv obj $ sqlGrantExecute u
sqlGrantExecute u =
"GRANT EXECUTE ON FUNCTION \n" <>
sqlIdCode obj <> "\nTO " <> prefixedRole setup u
stmtCreateFunction =
newSqlStmt SqlCreateFunction obj $
--(maybeMap _variableType (_functionParameters f)) $
"CREATE OR REPLACE FUNCTION " <>
sqlFunctionIdentifierDef <>
"\n" <>
"RETURNS" <-> sqlReturns (_functionReturns f) <>
"\nLANGUAGE " <>
sqlLanguage (functionLanguage f) <>
"\nSECURITY " <>
sqlSecurity (functionSecurityDefiner f) <>
"\nAS\n$BODY$" <> sqlBody <> "$BODY$\n"
stmtComment = stmtCommentOn obj $ functionDescription f
sqlSetOwner (Just o) =
newSqlStmt SqlPriv obj $
"ALTER FUNCTION " <>
sqlIdCode obj <> "OWNER TO " <> prefixedRole setup o
sqlSetOwner Nothing = Nothing
sqlFunctionIdentifierDef =
toSqlCode (schemaName s <.> functionName f) <>
"(\n" <>
T.intercalate ",\n" (maybeMap sqlParameterDef (_functionParameters f)) <>
"\n)"
-- function parameter
sqlParameterDef p =
toSqlCode (variableName p) <-> toSqlCode (_variableType p) <->
sqlParamDefault (variableDefault p)
where
sqlParamDefault Nothing = ""
sqlParamDefault (Just x) = "DEFAULT" <-> x
-- If function returns a table, use service for field definition
sqlReturns (ReturnType rt) = toSqlCode rt
sqlReturns (ReturnTypeSetof rt) = "SETOF" <-> toSqlCode rt
sqlReturns (ReturnTypeTable cs) =
"TABLE (" <\> T.intercalate ",\n" (map sqlReturnsColumn cs) <> ") "
sqlReturnsColumn c =
toSqlCode (parameterName c) <> " " <> toSqlCode (_parameterType c)
-- If language not defined, use service for variable definitions
sqlBody
| isNothing (functionLanguage f) =
"DECLARE" <\> sqlVariables (functionVariables f) <>
"BEGIN" <\> body <\> "END;"
| otherwise = body
where
body =
T.intercalate "\n" preludes <>
fromMaybe "" (functionBody f) <> T.intercalate "\n" postludes
preludes :: [Text]
preludes =
catMaybes $ maybeMap functiontplBodyPrelude (functionTemplateData f)
postludes :: [Text]
postludes =
catMaybes $
maybeMap functiontplBodyPostlude (functionTemplateData f)
-- Service for variable definitions
sqlVariables Nothing = ""
sqlVariables (Just vs) = T.concat (map sqlVariable vs)
sqlVariable v =
toSqlCode (variableName v) <-> toSqlCode (_variableType v) <->
sqlVariableDefault (variableDefault v) <>
";\n"
sqlVariableDefault Nothing = ""
sqlVariableDefault (Just d) = ":=" <-> d
-- SECURITY
sqlSecurity (Just True) = "DEFINER"
sqlSecurity _ = "INVOKER"
-- LANGUAGE
sqlLanguage Nothing = "plpgsql"
sqlLanguage (Just lang) = lang
| hemio-ev/hamsql | src/Database/HamSql/Internal/Stmt/Function.hs | gpl-3.0 | 3,951 | 0 | 20 | 1,045 | 959 | 482 | 477 | 79 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module View.Add (render) where
import Text.Blaze.Html5.Attributes (action, method, name, type_, class_)
import Text.Blaze.Html.Renderer.Text
import View.Header
import qualified Data.Text.Lazy as D
import qualified Text.Blaze.Html5 as H
-- |
--
-- >>> render
-- "<!DOCTYPE HTML>\n<html><head><title>Pirate Gold</title><link rel=\"stylesheet\" href=\"css/style.css\"></head><body class=\"add\"><h2>Add a definition to the treasure chest</h2><form action=\"/add\" method=\"post\"><p>Phrase: </p><input name=\"phrase\" type=\"text\"><p>Meaning: </p><input name=\"meaning\" type=\"text\"><p><input type=\"submit\"></p></form></body></html>"
render :: D.Text
render = renderHtml . H.docTypeHtml $ do
header
H.body H.! class_ "add" $ do
H.h2 "Add a definition to the treasure chest"
H.form H.! action "/add" H.! method "post" $ do
H.p "Phrase: "
H.input H.! name "phrase" H.! type_ "text"
H.p "Meaning: "
H.input H.! name "meaning" H.! type_ "text"
H.p (H.input H.! type_ "submit")
| codemiller/pirate-gold | src/View/Add.hs | apache-2.0 | 1,057 | 0 | 17 | 163 | 233 | 123 | 110 | 18 | 1 |
-- Add two numbers:
add :: Int -> Int -> Int
add x y = x + y
-- Call a function three times:
tick :: (Int -> Int) -> [Int]
tick f = [f 1, f 2, f 3]
-- Prints "[11,12,13]"
main = print (tick (add 10))
| rm-training/advanced-js | examples/partial.hs | bsd-2-clause | 202 | 0 | 9 | 51 | 96 | 51 | 45 | 5 | 1 |
-- From http://lpaste.net/81623, courtesy of Albert Y. C. Lai
main = do
print 3
if True then do
print 5
else print 7
| mpickering/ghc-exactprint | tests/examples/ghc710/IfThenElse3.hs | bsd-3-clause | 129 | 0 | 10 | 35 | 33 | 15 | 18 | 5 | 2 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2014 Galois, Inc.
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- This module contains types used during type inference.
{-# LANGUAGE Safe #-}
{-# LANGUAGE FlexibleInstances, FlexibleContexts #-}
module Cryptol.TypeCheck.InferTypes where
import Cryptol.TypeCheck.AST
import Cryptol.TypeCheck.Subst
import Cryptol.Parser.Position
import qualified Cryptol.Parser.AST as P
import Cryptol.Parser.AST(LQName)
import Cryptol.Prims.Syntax(ECon(..))
import Cryptol.Utils.PP
import Cryptol.TypeCheck.PP
import Cryptol.Utils.Panic(panic)
import qualified Data.Set as Set
import qualified Data.Map as Map
import qualified Data.IntMap as IntMap
-- | The types of variables in the environment.
data VarType = ExtVar Schema -- ^ Known type
| CurSCC Expr Type -- ^ Part of current SCC
-- | Something that we need to find evidence for.
data Goal = Goal
{ goalSource :: ConstraintSource -- ^ With it is about
, goalRange :: Range -- ^ Part of source code that caused goal
, goal :: Prop -- ^ What needs to be proved
} deriving Show
data HasGoal = HasGoal
{ hasName :: !Int
, hasGoal :: Goal
} deriving Show
-- | Delayed implication constraints, arising from user-specified type sigs.
data DelayedCt = DelayedCt
{ dctSource :: LQName -- ^ Signature that gave rise to this constraint
, dctForall :: [TParam]
, dctAsmps :: [Prop]
, dctGoals :: [Goal]
} deriving Show
data Solved = Solved (Maybe Subst) [Goal] -- ^ Solved, assumeing the sub-goals.
| Unsolved -- ^ We could not solved the goal.
| Unsolvable -- ^ The goal can never be solved
deriving (Show)
data Warning = DefaultingKind P.TParam P.Kind
| DefaultingWildType P.Kind
| DefaultingTo Doc Type
deriving Show
-- | Various errors that might happen during type checking/inference
data Error = ErrorMsg Doc
-- ^ Just say this
| KindMismatch Kind Kind
-- ^ Expected kind, inferred kind
| TooManyTypeParams Int Kind
-- ^ Number of extra parameters, kind of resut
-- (which should not be of the form @_ -> _@)
| TooManyTySynParams QName Int
-- ^ Type-synonym, number of extra params
| TooFewTySynParams QName Int
-- ^ Type-synonym, number of missing params
| RepeatedTyParams [P.TParam]
-- ^ Type parameters with the same name (in definition)
| RepeatedDefinitions QName [Range]
-- ^ Multiple definitions for the same name
| RecursiveTypeDecls [LQName]
-- ^ The type synonym declarations are recursive
| UndefinedTypeSynonym QName
-- ^ Use of a type synonym that was not defined
| UndefinedVariable QName
-- ^ Use of a variable that was not defined
| UndefinedTypeParam QName
-- ^ Attempt to explicitly instantiate a non-existent param.
| MultipleTypeParamDefs QName [Range]
-- ^ Multiple definitions for the same type parameter
| TypeMismatch Type Type
-- ^ Expected type, inferred type
| RecursiveType Type Type
-- ^ Unification results in a recursive type
| UnsolvedGoal Goal
-- ^ A constraint that we could not solve
| UnsolvedDelcayedCt DelayedCt
-- ^ A constraint (with context) that we could not solve
| UnexpectedTypeWildCard
-- ^ Type wild cards are not allowed in this context
-- (e.g., definitions of type synonyms).
| TypeVariableEscaped Type [TVar]
-- ^ Unification variable depends on quantified variables
-- that are not in scope.
| NotForAll TVar Type
-- ^ Quantified type variables (of kind *) needs to
-- match the given type, so it does not work for all types.
| UnusableFunction QName Prop
-- ^ The given constraint causes the signature of the
-- function to be not-satisfiable.
| TooManyPositionalTypeParams
-- ^ Too many positional type arguments, in an explicit
-- type instantiation
| CannotMixPositionalAndNamedTypeParams
| AmbiguousType [QName]
deriving Show
-- | Information about how a constraint came to be, used in error reporting.
data ConstraintSource
= CtComprehension -- ^ Computing shape of list comprehension
| CtSplitPat -- ^ Use of a split pattern
| CtTypeSig -- ^ A type signature in a pattern or expression
| CtInst Expr -- ^ Instantiation of this expreesion
| CtSelector
| CtExactType
| CtEnumeration
| CtDefaulting -- ^ Just defaulting on the command line
| CtPartialTypeFun TyFunName -- ^ Use of a partial type function.
deriving Show
data TyFunName = UserTyFun QName | BuiltInTyFun TFun
deriving Show
instance PP TyFunName where
ppPrec c (UserTyFun x) = ppPrec c x
ppPrec c (BuiltInTyFun x) = ppPrec c x
instance TVars ConstraintSource where
apSubst su src =
case src of
CtComprehension -> src
CtSplitPat -> src
CtTypeSig -> src
CtInst e -> CtInst (apSubst su e)
CtSelector -> src
CtExactType -> src
CtEnumeration -> src
CtDefaulting -> src
CtPartialTypeFun _ -> src
instance TVars Warning where
apSubst su warn =
case warn of
DefaultingKind {} -> warn
DefaultingWildType {} -> warn
DefaultingTo d ty -> DefaultingTo d (apSubst su ty)
instance FVS Warning where
fvs warn =
case warn of
DefaultingKind {} -> Set.empty
DefaultingWildType {} -> Set.empty
DefaultingTo _ ty -> fvs ty
instance TVars Error where
apSubst su err =
case err of
ErrorMsg _ -> err
KindMismatch {} -> err
TooManyTypeParams {} -> err
TooManyTySynParams {} -> err
TooFewTySynParams {} -> err
RepeatedTyParams {} -> err
RepeatedDefinitions {} -> err
RecursiveTypeDecls {} -> err
UndefinedTypeSynonym {} -> err
UndefinedVariable {} -> err
UndefinedTypeParam {} -> err
MultipleTypeParamDefs {} -> err
TypeMismatch t1 t2 -> TypeMismatch (apSubst su t1) (apSubst su t2)
RecursiveType t1 t2 -> RecursiveType (apSubst su t1) (apSubst su t2)
UnsolvedGoal g -> UnsolvedGoal (apSubst su g)
UnsolvedDelcayedCt g -> UnsolvedDelcayedCt (apSubst su g)
UnexpectedTypeWildCard -> err
TypeVariableEscaped t xs -> TypeVariableEscaped (apSubst su t) xs
NotForAll x t -> NotForAll x (apSubst su t)
UnusableFunction f p -> UnusableFunction f (apSubst su p)
TooManyPositionalTypeParams -> err
CannotMixPositionalAndNamedTypeParams -> err
AmbiguousType _ -> err
instance FVS Error where
fvs err =
case err of
ErrorMsg {} -> Set.empty
KindMismatch {} -> Set.empty
TooManyTypeParams {} -> Set.empty
TooManyTySynParams {} -> Set.empty
TooFewTySynParams {} -> Set.empty
RepeatedTyParams {} -> Set.empty
RepeatedDefinitions {} -> Set.empty
RecursiveTypeDecls {} -> Set.empty
UndefinedTypeSynonym {} -> Set.empty
UndefinedVariable {} -> Set.empty
UndefinedTypeParam {} -> Set.empty
MultipleTypeParamDefs {} -> Set.empty
TypeMismatch t1 t2 -> fvs (t1,t2)
RecursiveType t1 t2 -> fvs (t1,t2)
UnsolvedGoal g -> fvs g
UnsolvedDelcayedCt g -> fvs g
UnexpectedTypeWildCard -> Set.empty
TypeVariableEscaped t _ -> fvs t
NotForAll _ t -> fvs t
UnusableFunction _ p -> fvs p
TooManyPositionalTypeParams -> Set.empty
CannotMixPositionalAndNamedTypeParams -> Set.empty
AmbiguousType _ -> Set.empty
instance FVS Goal where
fvs g = fvs (goal g)
instance FVS DelayedCt where
fvs d = fvs (dctAsmps d, dctGoals d) `Set.difference`
Set.fromList (map tpVar (dctForall d))
instance TVars Goal where
apSubst su g = Goal { goalSource = apSubst su (goalSource g)
, goalRange = goalRange g
, goal = apSubst su (goal g)
}
instance TVars HasGoal where
apSubst su h = h { hasGoal = apSubst su (hasGoal h) }
instance TVars DelayedCt where
apSubst su g
| Set.null captured =
DelayedCt { dctSource = dctSource g
, dctForall = dctForall g
, dctAsmps = apSubst su1 (dctAsmps g)
, dctGoals = apSubst su1 (dctGoals g)
}
| otherwise = panic "Cryptol.TypeCheck.Subst.apSubst (DelayedCt)"
[ "Captured quantified variables:"
, "Substitution: " ++ show m1
, "Variables: " ++ show captured
, "Constraint: " ++ show g
]
where
used = fvs (dctAsmps g, map goal (dctGoals g)) `Set.difference`
Set.fromList (map tpVar (dctForall g))
m1 = Map.filterWithKey (\k _ -> k `Set.member` used) (suMap su)
su1 = S { suMap = m1, suDefaulting = suDefaulting su }
captured = Set.fromList (map tpVar (dctForall g)) `Set.intersection`
fvs (Map.elems m1)
-- | For use in error messages
cppKind :: Kind -> Doc
cppKind ki =
case ki of
KNum -> text "a numeric type"
KType -> text "a value type"
KProp -> text "a constraint"
_ -> pp ki
addTVarsDescs :: FVS t => NameMap -> t -> Doc -> Doc
addTVarsDescs nm t d
| Set.null vs = d
| otherwise = d $$ text "where" $$ vcat (map desc (Set.toList vs))
where
vs = Set.filter isFreeTV (fvs t)
desc v@(TVFree _ _ _ x) = ppWithNames nm v <+> text "is" <+> x
desc (TVBound {}) = empty
instance PP Warning where
ppPrec = ppWithNamesPrec IntMap.empty
instance PP Error where
ppPrec = ppWithNamesPrec IntMap.empty
instance PP (WithNames Warning) where
ppPrec _ (WithNames warn names) =
addTVarsDescs names warn $
case warn of
DefaultingKind x k ->
text "Assuming " <+> pp x <+> text "to have" <+> P.cppKind k
DefaultingWildType k ->
text "Assuming _ to have" <+> P.cppKind k
DefaultingTo d ty ->
text "Defaulting" <+> d $$ text "to" <+> ppWithNames names ty
instance PP (WithNames Error) where
ppPrec _ (WithNames err names) =
addTVarsDescs names err $
case err of
ErrorMsg msg -> msg
RecursiveType t1 t2 ->
nested (text "Matching would result in an infinite type.")
(text "The type: " <+> ppWithNames names t1 $$
text "occurs in:" <+> ppWithNames names t2)
UnexpectedTypeWildCard ->
nested (text "Wild card types are not allowed in this context")
(text "(e.g., they cannot be used in type synonyms).")
KindMismatch k1 k2 ->
nested (text "Incorrect type form.")
(text "Expected:" <+> cppKind k1 $$
text "Inferred:" <+> cppKind k2)
TooManyTypeParams extra k ->
nested (text "Malformed type.")
(text "Kind" <+> quotes (pp k) <+> text "is not a function," $$
text "but it was applied to" <+> pl extra "parameter" <> text ".")
TooManyTySynParams t extra ->
nested (text "Malformed type.")
(text "Type synonym" <+> nm t <+> text "was applied to" <+>
pl extra "extra parameter" <> text ".")
TooFewTySynParams t few ->
nested (text "Malformed type.")
(text "Type" <+> nm t <+> text "is missing" <+>
int few <+> text "parameters.")
RepeatedTyParams ps ->
nested (text "Different type parameters use the same name:")
(vmulti [ nm (P.tpName p) <+>
text "defined at" <+> mb (P.tpRange p) | p <- ps ] )
where mb Nothing = text "unknown location"
mb (Just x) = pp x
RepeatedDefinitions x ps ->
nested (text "Multiple definitions for the same name:")
(vmulti [ nm x <+> text "defined at" <+> pp p | p <- ps ])
RecursiveTypeDecls ts ->
nested (text "Recursive type declarations:")
(fsep $ punctuate comma $ map nm ts)
UndefinedTypeSynonym x ->
text "Type synonym" <+> nm x <+> text "is not defined."
UndefinedVariable x ->
text "Variable" <+> nm x <+> text "was not defined."
UndefinedTypeParam x ->
text "Type variable" <+> nm x <+> text "was not defined."
MultipleTypeParamDefs x ps ->
nested (text "Multiple definitions for the same type parameter"
<+> nm x <> text ":")
(vmulti [ text "defined at" <+> pp p | p <- ps ])
TypeMismatch t1 t2 ->
nested (text "Type mismatch:")
(text "Expected type:" <+> ppWithNames names t1 $$
text "Inferred type:" <+> ppWithNames names t2)
UnsolvedGoal g ->
nested (text "Unsolved constraint:") (ppWithNames names g)
UnsolvedDelcayedCt g ->
nested (text "Failed to validate user-specified signature.")
(ppWithNames names g)
TypeVariableEscaped t xs ->
nested (text "The type" <+> ppWithNames names t <+>
text "is not sufficiently polymorphic.")
(text "It may not depend on quantified variables:" <+>
sep (punctuate comma (map (ppWithNames names) xs)))
NotForAll x t ->
nested (text "Inferred type is not sufficiently polymorphic.")
(text "Quantified variable:" <+> ppWithNames names x $$
text "cannot match type:" <+> ppWithNames names t)
UnusableFunction f p ->
nested (text "The constraints in the type signature of"
<+> quotes (pp f) <+> text "are unsolvable.")
(text "Detected while analyzing constraint:" $$ ppWithNames names p)
TooManyPositionalTypeParams ->
text "Too many positional type-parameters in explicit type application"
CannotMixPositionalAndNamedTypeParams ->
text "Named and positional type applications may not be mixed."
AmbiguousType xs ->
text "The inferred type for" <+> commaSep (map pp xs)
<+> text "is ambiguous."
where
nested x y = x $$ nest 2 y
pl 1 x = text "1" <+> text x
pl n x = text (show n) <+> text x <> text "s"
nm x = text "`" <> pp x <> text "`"
vmulti = vcat . multi
multi [] = []
multi [x] = [x <> text "."]
multi [x,y] = [x <> text ", and", y <> text "." ]
multi (x : xs) = x <> text "," : multi xs
instance PP ConstraintSource where
ppPrec _ src =
case src of
CtComprehension -> text "list comprehension"
CtSplitPat -> text "split (#) pattern"
CtTypeSig -> text "type signature"
CtInst e -> text "use of" <+> ppUse e
CtSelector -> text "use of selector"
CtExactType -> text "matching types"
CtEnumeration -> text "list enumeration"
CtDefaulting -> text "defaulting"
CtPartialTypeFun f -> text "use of partial type function" <+> pp f
ppUse :: Expr -> Doc
ppUse expr =
case expr of
ECon ECDemote -> text "literal or demoted expression"
ECon ECInfFrom -> text "infinite enumeration"
ECon ECInfFromThen -> text "infinite enumeration (with step)"
ECon ECFromThen -> text "finite enumeration"
ECon ECFromTo -> text "finite enumeration"
ECon ECFromThenTo -> text "finite enumeration"
_ -> text "expression" <+> pp expr
instance PP (WithNames Goal) where
ppPrec _ (WithNames g names) =
(ppWithNames names (goal g)) $$
nest 2 (text "arising from" $$
pp (goalSource g) $$
text "at" <+> pp (goalRange g))
instance PP (WithNames DelayedCt) where
ppPrec _ (WithNames d names) =
sig $$ nest 2 (vars $$ asmps $$ vcat (map (ppWithNames ns1) (dctGoals d)))
where
sig = text "In the definition of" <+> quotes (pp (thing name)) <>
comma <+> text "at" <+> pp (srcRange name) <> colon
name = dctSource d
vars = case dctForall d of
[] -> empty
xs -> text "for any type" <+>
fsep (punctuate comma (map (ppWithNames ns1 ) xs))
asmps = case dctAsmps d of
[] -> empty
xs -> nest 2 (vcat (map (ppWithNames ns1) xs)) $$ text "=>"
ns1 = addTNames (dctForall d) names
instance PP Solved where
ppPrec _ res =
case res of
Solved mb gs -> text "solved" $$ nest 2 (suDoc $$ vcat (map (pp . goal) gs))
where suDoc = maybe empty pp mb
Unsolved -> text "unsolved"
Unsolvable -> text "unsolvable"
| TomMD/cryptol | src/Cryptol/TypeCheck/InferTypes.hs | bsd-3-clause | 17,735 | 0 | 19 | 6,026 | 4,285 | 2,140 | 2,145 | 352 | 7 |
{-# LANGUAGE CPP, DeriveDataTypeable #-}
{-# OPTIONS -Wall #-}
-- | An 'Annotation' that lets you call __syncthreads() before
-- or after a statement.
module Language.Paraiso.Annotation.SyncThreads (
Timing(..)
) where
import Data.Dynamic
import Language.Paraiso.Prelude
data Timing = Pre | Post
deriving (Eq, Ord, Typeable)
| nushio3/Paraiso | Language/Paraiso/Annotation/SyncThreads.hs | bsd-3-clause | 338 | 0 | 6 | 57 | 57 | 37 | 20 | 8 | 0 |
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE BangPatterns #-}
module Network.Wai.Handler.Warp.HTTP2.Worker (
Responder
, response
, worker
) where
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative
import Data.Monoid (mempty)
#endif
import Control.Concurrent.STM
import Control.Exception (SomeException(..), AsyncException(..))
import qualified Control.Exception as E
import Control.Monad (when)
import Data.ByteString.Builder (byteString)
import qualified Network.HTTP.Types as H
import Network.HTTP2
import Network.HPACK
import Network.Wai
import Network.Wai.Handler.Warp.FileInfoCache
import Network.Wai.Handler.Warp.HTTP2.EncodeFrame
import Network.Wai.Handler.Warp.HTTP2.File
import Network.Wai.Handler.Warp.HTTP2.Manager
import Network.Wai.Handler.Warp.HTTP2.Types
import Network.Wai.Handler.Warp.IORef
import qualified Network.Wai.Handler.Warp.Response as R
import qualified Network.Wai.Handler.Warp.Settings as S
import qualified Network.Wai.Handler.Warp.Timeout as T
import Network.Wai.Handler.Warp.Types
import Network.Wai.Internal (Response(..), ResponseReceived(..), ResponseReceived(..))
----------------------------------------------------------------
-- | The wai definition is 'type Application = Request -> (Response -> IO ResponseReceived) -> IO ResponseReceived'.
-- This type implements the second argument (Response -> IO ResponseReceived)
-- with extra arguments.
type Responder = InternalInfo -> ValueTable ->
ThreadContinue -> Stream -> Request ->
Response -> IO ResponseReceived
-- | This function is passed to workers.
-- They also pass 'Response's from 'Application's to this function.
-- This function enqueues commands for the HTTP/2 sender.
response :: S.Settings -> Context -> Manager -> Responder
response settings Context{outputQ} mgr ii reqvt tconf strm req rsp = case rsp of
ResponseStream s0 hs0 strmbdy
| noBody s0 -> responseNoBody s0 hs0
| isHead -> responseNoBody s0 hs0
| otherwise -> responseStreaming s0 hs0 strmbdy
ResponseBuilder s0 hs0 b
| noBody s0 -> responseNoBody s0 hs0
| isHead -> responseNoBody s0 hs0
| otherwise -> responseBuilderBody s0 hs0 b
ResponseFile s0 hs0 p mp
| noBody s0 -> responseNoBody s0 hs0
| otherwise -> responseFileXXX s0 hs0 p mp
ResponseRaw _ _ -> error "HTTP/2 does not support ResponseRaw"
where
noBody = not . R.hasBody
!isHead = requestMethod req == H.methodHead
!logger = S.settingsLogger settings
!th = threadHandle ii
-- Ideally, log messages should be written when responses are
-- actually sent. But there is no way to keep good memory usage
-- (resist to Request leak) and throughput. By compromise,
-- log message are written here even the window size of streams
-- is 0.
responseNoBody s hs0 = toHeaderTable hs0 >>= responseNoBody' s
responseNoBody' s tbl = do
logger req s Nothing
setThreadContinue tconf True
let rspn = RspnNobody s tbl
out = ORspn strm rspn ii
enqueueOutput outputQ out
return ResponseReceived
responseBuilderBody s hs0 bdy = do
logger req s Nothing
setThreadContinue tconf True
tbl <- toHeaderTable hs0
let rspn = RspnBuilder s tbl bdy
out = ORspn strm rspn ii
enqueueOutput outputQ out
return ResponseReceived
responseFileXXX _ hs0 path Nothing = do
efinfo <- E.try $ getFileInfo ii path
case efinfo of
Left (_ex :: E.IOException) -> response404 hs0
Right finfo -> do
(rspths0,vt) <- toHeaderTable hs0
case conditionalRequest finfo rspths0 reqvt of
WithoutBody s -> responseNoBody s hs0
WithBody s rspths beg len -> responseFile2XX s (rspths,vt) path (Just (FilePart beg len (fileInfoSize finfo)))
responseFileXXX s0 hs0 path mpart = do
tbl <- toHeaderTable hs0
responseFile2XX s0 tbl path mpart
responseFile2XX s tbl path mpart
| isHead = do
logger req s Nothing
responseNoBody' s tbl
| otherwise = do
logger req s (filePartByteCount <$> mpart)
setThreadContinue tconf True
let rspn = RspnFile s tbl path mpart
out = ORspn strm rspn ii
enqueueOutput outputQ out
return ResponseReceived
response404 hs0 = responseBuilderBody s hs body
where
s = H.notFound404
hs = R.replaceHeader H.hContentType "text/plain; charset=utf-8" hs0
body = byteString "File not found"
responseStreaming s0 hs0 strmbdy = do
logger req s0 Nothing
-- We must not exit this WAI application.
-- If the application exits, streaming would be also closed.
-- So, this work occupies this thread.
--
-- We need to increase the number of workers.
spawnAction mgr
-- After this work, this thread stops to decease
-- the number of workers.
setThreadContinue tconf False
-- Since 'StreamingBody' is loop, we cannot control it.
-- So, let's serialize 'Builder' with a designated queue.
tbq <- newTBQueueIO 10 -- fixme: hard coding: 10
tbl <- toHeaderTable hs0
let rspn = RspnStreaming s0 tbl tbq
out = ORspn strm rspn ii
enqueueOutput outputQ out
let push b = do
atomically $ writeTBQueue tbq (SBuilder b)
T.tickle th
flush = atomically $ writeTBQueue tbq SFlush
_ <- strmbdy push flush
atomically $ writeTBQueue tbq SFinish
deleteMyId mgr
return ResponseReceived
worker :: Context -> S.Settings -> Application -> Responder -> T.Manager -> IO ()
worker ctx@Context{inputQ,controlQ} set app responder tm = do
sinfo <- newStreamInfo
tcont <- newThreadContinue
E.bracket (T.registerKillThread tm) T.cancel $ go sinfo tcont
where
go sinfo tcont th = do
setThreadContinue tcont True
ex <- E.try $ do
T.pause th
inp@(Input strm req reqvt ii) <- atomically $ readTQueue inputQ
setStreamInfo sinfo inp
T.resume th
T.tickle th
app req $ responder ii reqvt tcont strm req
cont1 <- case ex of
Right ResponseReceived -> return True
Left e@(SomeException _)
-- killed by the local worker manager
| Just ThreadKilled <- E.fromException e -> return False
-- killed by the local timeout manager
| Just T.TimeoutThread <- E.fromException e -> do
cleanup sinfo Nothing
return True
| otherwise -> do
cleanup sinfo $ Just e
return True
cont2 <- getThreadContinue tcont
clearStreamInfo sinfo
when (cont1 && cont2) $ go sinfo tcont th
cleanup sinfo me = do
minp <- getStreamInfo sinfo
case minp of
Nothing -> return ()
Just (Input strm req _reqvt _ii) -> do
closed ctx strm Killed
let frame = resetFrame InternalError (streamNumber strm)
enqueueControl controlQ $ CFrame frame
case me of
Nothing -> return ()
Just e -> S.settingsOnException set (Just req) e
----------------------------------------------------------------
-- | It would nice if responders could return values to workers.
-- Unfortunately, 'ResponseReceived' is already defined in WAI 2.0.
-- It is not wise to change this type.
-- So, a reference is shared by a responder and its worker.
-- The reference refers a value of this type as a return value.
-- If 'True', the worker continue to serve requests.
-- Otherwise, the worker get finished.
newtype ThreadContinue = ThreadContinue (IORef Bool)
{-# INLINE newThreadContinue #-}
newThreadContinue :: IO ThreadContinue
newThreadContinue = ThreadContinue <$> newIORef True
{-# INLINE setThreadContinue #-}
setThreadContinue :: ThreadContinue -> Bool -> IO ()
setThreadContinue (ThreadContinue ref) x = writeIORef ref x
{-# INLINE getThreadContinue #-}
getThreadContinue :: ThreadContinue -> IO Bool
getThreadContinue (ThreadContinue ref) = readIORef ref
----------------------------------------------------------------
-- | The type to store enough information for 'settingsOnException'.
newtype StreamInfo = StreamInfo (IORef (Maybe Input))
{-# INLINE newStreamInfo #-}
newStreamInfo :: IO StreamInfo
newStreamInfo = StreamInfo <$> newIORef Nothing
{-# INLINE clearStreamInfo #-}
clearStreamInfo :: StreamInfo -> IO ()
clearStreamInfo (StreamInfo ref) = writeIORef ref Nothing
{-# INLINE setStreamInfo #-}
setStreamInfo :: StreamInfo -> Input -> IO ()
setStreamInfo (StreamInfo ref) inp = writeIORef ref $ Just inp
{-# INLINE getStreamInfo #-}
getStreamInfo :: StreamInfo -> IO (Maybe Input)
getStreamInfo (StreamInfo ref) = readIORef ref
| utdemir/wai | warp/Network/Wai/Handler/Warp/HTTP2/Worker.hs | mit | 9,293 | 0 | 23 | 2,500 | 2,101 | 1,039 | 1,062 | 174 | 7 |
{-# LANGUAGE CPP #-}
#ifndef __GHCJS__
{-# LANGUAGE RankNTypes, TypeSynonymInstances, FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-missing-methods #-}
#endif
module React.GHCJS
( currentDocument
, Document
, Element
, JSAny
, documentGetElementById
-- * GHCJS stubs
#ifdef __GHCJS__
, module X
#else
, ForeignRetention(..)
, JSRef(..)
, JSFun
, JSArray
, JSString
, FromJSString(..)
, ToJSString(..)
, FromJSRef(..)
, ToJSRef(..)
, castRef
, newObj
, setProp
, eqRef
, toArray
, setProp
, syncCallback1
, syncCallback2
#endif
) where
-- Export useful things from GHCJS, or mocks of them if we're running in GHC
import qualified Data.Aeson as Aeson
import Data.String
import Data.Text (Text)
#ifdef __GHCJS__
import GHCJS.Foreign as X
import GHCJS.Marshal as X
import GHCJS.Types as X
import GHCJS.DOM (currentDocument)
import GHCJS.DOM.Types (Document, Element)
import GHCJS.DOM.Document (documentGetElementById)
#else
data Document
data Element
data JSRef a = JSRef
data JSString_
type JSFun = JSRef
type JSArray = JSRef
type JSString = JSRef JSString_
class ToJSString a where
toJSString :: a -> JSString
class FromJSString a where
fromJSString :: JSString -> a
class FromJSRef a where
fromJSRef :: JSRef a -> IO (Maybe a)
class ToJSRef a where
toJSRef :: a -> IO (JSRef a)
instance FromJSRef Aeson.Value
instance FromJSRef Int
instance (FromJSRef a, FromJSRef b) => FromJSRef (a, b)
instance FromJSRef (JSRef ())
instance ToJSRef Int
instance ToJSRef Aeson.Value
instance ToJSRef a => ToJSRef (Maybe a)
instance ToJSRef (JSRef a)
instance FromJSString String
instance FromJSString Text
instance FromJSString JSString
instance ToJSString String
instance ToJSString Text
instance ToJSString JSString
instance IsString JSString
currentDocument :: IO (Maybe Document)
currentDocument = undefined
documentGetElementById ::
-- (IsDocument self, ToJSString elementId) =>
self -> elementId -> IO (Maybe Element)
documentGetElementById = undefined
castRef :: JSRef a -> JSRef b
castRef _ = JSRef
newObj :: IO (JSRef a)
newObj = undefined
data ForeignRetention
= NeverRetain
| AlwaysRetain
| DomRetain (forall a. JSRef a)
eqRef :: JSRef a -> JSRef a -> Bool
eqRef = undefined
toArray :: [JSRef a] -> IO (JSArray a)
toArray = undefined
setProp :: ToJSString a => a -> JSRef b -> JSRef c -> IO ()
setProp = undefined
syncCallback1 :: ForeignRetention
-> Bool
-> (JSRef a -> IO b)
-> IO (JSFun (JSRef a -> IO b))
syncCallback1 = undefined
syncCallback2 :: ForeignRetention
-> Bool
-> (JSRef a -> JSRef b -> IO c)
-> IO (JSFun (JSRef a -> JSRef b -> IO c))
syncCallback2 = undefined
#endif
type JSAny = JSRef ()
| silky/react-haskell | src/React/GHCJS.hs | mit | 2,850 | 0 | 6 | 648 | 130 | 89 | 41 | -1 | -1 |
data Either a b = Left a | Right b
-- class Functor f where
-- fmap :: (a -> b) -> f a -> f b
-- instance Functor [] where
-- fmap f [] = []
-- fmap f (x:xs) = (f x):(fmap f xs)
-- instance Functor ((,) a) where
-- fmap f (x, y) = (x, f y)
class C1 a where
quux :: a -> Int
class C1 a => C2 a
class C2 a => C3 a
-- instance C1 Char
instance C2 Char -- Works because of C1 Char
instance C2 Int
instance C1 Int
instance C1 a => C1 [a]
instance C2 a => C2 [a] -- Works because C2 a ensures C1 a, which in turn ensures C1 [a]
-- instance (C1 a, C2 a) => C3 a
instance (C1 a, C1 b) => C1 (a, b)
--instance C1 (Maybe Int)
--instance C2 ([] a)
-- foo :: C1 a => a -> a
-- foo = undefined
-- bar1 x = quux ([[x]], "foo")
bar1 x = quux ("foo", "bar")
| themattchan/tandoori | input/class-inherit.hs | bsd-3-clause | 804 | 0 | 7 | 242 | 179 | 96 | 83 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="hr-HR">
<title>Automation Framework</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/automation/src/main/javahelp/org/zaproxy/addon/automation/resources/help_hr_HR/helpset_hr_HR.hs | apache-2.0 | 965 | 77 | 66 | 156 | 407 | 206 | 201 | -1 | -1 |
module LocalCommand
( localCommand, localCommand'
)
where
import Data.List (intercalate)
import Development.Shake (Action, CmdOption(..), CmdResult, command,
doesFileExist, liftIO, need)
import Development.Shake.FilePath ( (</>), (<.>), exe, searchPathSeparator,
isPathSeparator )
import System.Environment (getEnvironment)
import Dirs
import Paths
import Utils
-- | Run a command, but favor the local GHC instalation.
-- Note that 'command' ends up calling 'createProcess', which in turn in this
-- case call 'execvpe()' in C. That call isn't part of Posix, it is supplied
-- by the unix library from GHC. That code chooses to use the PATH in the
-- calling ENV, not the supplied environment for the new process.
-- Hence, this code here has to look up to see if the command should be in the
-- bin dir. It also needs to modify the path so that all commands called from
-- this command will see the bin dir as well.
localCommand :: CmdResult r => [CmdOption] -> String -> [String] -> Action r
localCommand opts cmdName args = do
need [ dir ghcLocalDir ]
let localBin = ghcLocalDir </> "bin"
absLocalBin <- liftIO $ absolutePath localBin
localPath <- addPath' [absLocalBin] []
let localCmd = absLocalBin </> cmdName
useLocalCmd <- if any isPathSeparator cmdName
then return False
else doesFileExist $ localCmd <.> exe
command (localPath : opts) (if useLocalCmd then localCmd else cmdName) args
localCommand' :: [CmdOption] -> String -> [String] -> Action ()
localCommand' = localCommand
-- | Fixed addPath
addPath' :: [String] -> [String] -> Action CmdOption
addPath' pre post = do
args <- liftIO getEnvironment
return $ Env $ map (onPath updatePath) $ ensurePath args
where
onPath f e@(a, b) | a == "PATH" = (a, f b)
| otherwise = e
updatePath p = intercalate [searchPathSeparator] $ pre ++ [p | p /= ""] ++ post
ensurePath e = maybe (("PATH", ""):e) (const e) $ lookup "PATH" e
| erantapaa/haskell-platform | hptool/src/LocalCommand.hs | bsd-3-clause | 2,078 | 0 | 11 | 506 | 511 | 275 | 236 | 32 | 3 |
module Util.FilterInput (filterInput,readSystem) where
import Control.Monad (when)
import Data.List
import System
import System.IO
import System.Posix
import Text.Printf
import Util.Gen
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
filterInput :: String -> [String] -> Handle -> IO String
filterInput prog args ifh = do
(rfd,wfd) <- createPipe
ifd <- handleToFd ifh
pid <- forkProcess (do dupAndClose ifd stdInput
dupAndClose wfd stdOutput
executeFile prog True args Nothing
putErrDie "exec failed")
closeFd wfd
str <- hGetContents =<< fdToHandle rfd
ret <- length str `seq` getProcessStatus True False pid
when (ret /= Just (Exited ExitSuccess)) $ putErrDie (prog ++ " exited abnormally")
return str
dupAndClose :: Fd -> Fd -> IO ()
dupAndClose from to = dupTo from to >> closeFd from
readSystem :: String -> [String] -> IO LBS.ByteString
readSystem prog args = do
(rfd,wfd) <- createPipe
pid <- forkProcess (do dupAndClose wfd stdOutput
executeFile prog True args Nothing
putErrDie "exec failed")
closeFd wfd
-- printf "readSystem %s %s\n" prog (show args)
str <- BS.hGetContents =<< fdToHandle rfd
ret <- getProcessStatus True False pid
when (ret /= Just (Exited ExitSuccess)) $ putErrDie (printf "'%s' exited abnormally (%s)" (intercalate " " (prog:args)) (show ret))
return $ LBS.fromChunks [str]
| hvr/jhc | src/Util/FilterInput.hs | mit | 1,545 | 0 | 14 | 401 | 494 | 241 | 253 | 36 | 1 |
module LiftOneLevel.A3 where
import LiftOneLevel.C3 (anotherFun)
import LiftOneLevel.D3 (sumSquares)
main = sumSquares [1..4] + anotherFun [1..4]
| RefactoringTools/HaRe | test/testdata/LiftOneLevel/A3.hs | bsd-3-clause | 150 | 0 | 7 | 20 | 51 | 29 | 22 | 4 | 1 |
module HasElem where
{-@ LIQUID "--no-termination" @-}
data L a = Nil | Cons a (L a)
{-@ measure hasElem @-}
hasElem :: Eq a => a -> L a -> Bool
hasElem x Nil = False
hasElem x (Cons y ys) = x == y || hasElem x ys
{-@ prop :: {v:Bool | Prop v <=> true} @-}
prop :: Bool
prop = hasElem 1 (Cons 1 Nil)
{-@ prop1 :: {v:Bool | Prop v <=> false} @-}
prop1 :: Bool
prop1 = hasElem 1 (Cons 2 Nil)
{-@ prop2 :: {v:Bool | Prop v <=> false} @-}
prop2 :: Bool
prop2 = hasElem 1 Nil
| ssaavedra/liquidhaskell | tests/pos/HasElem.hs | bsd-3-clause | 477 | 0 | 8 | 114 | 155 | 83 | 72 | 11 | 1 |
module HAD.Y2014.M03.D03.Solution where
import GHC.Exts (sortWith)
-- | Sort a list of list of elements by the maximum of each list,
-- in ascending order
--
-- Point-free: easy and readable
-- Level: EASY
--
-- Examples:
-- >>> sortByMax [[1,10],[5,5]]
-- [[5,5],[1,10]]
-- >>> sortByMax []
-- []
--
-- sortByMax [[], [1,2]]
-- should throw an execption: no max for empty list
sortByMax :: Ord a => [[a]] -> [[a]]
sortByMax = sortWith maximum
| weima/1HAD | exercises/HAD/Y2014/M03/D03/Solution.hs | mit | 448 | 0 | 8 | 80 | 69 | 47 | 22 | 4 | 1 |
module Foundation where
import Prelude
import Yesod
import Yesod.Static
import Yesod.Auth
import Yesod.Auth.BrowserId
import Yesod.Auth.GoogleEmail
import Yesod.Default.Config
import Yesod.Default.Util (addStaticContentExternal)
import Network.HTTP.Conduit (Manager)
import qualified Settings
import Settings.Development (development)
import qualified Database.Persist
import Database.Persist.Sql (SqlPersistT)
import Settings.StaticFiles
import Settings (widgetFile, Extra (..))
import Model
import Text.Jasmine (minifym)
import Text.Hamlet (hamletFile)
import System.Log.FastLogger (Logger)
-- WebSocket: New imports
import qualified Network.WebSockets as WS
import Data.Map (Map)
import Control.Concurrent.MVar
import Yesod.Auth.Dummy
-- | The site argument for your application. This can be a good place to
-- keep settings and values requiring initialization before your application
-- starts running, such as database connections. Every handler will have
-- access to the data present here.
data App = App
{ settings :: AppConfig DefaultEnv Extra
, getStatic :: Static -- ^ Settings for static file serving.
, connPool :: Database.Persist.PersistConfigPool Settings.PersistConf -- ^ Database connection pool.
, httpManager :: Manager
, persistConfig :: Settings.PersistConf
, appLogger :: Logger
, appSessionBackend :: SessionBackend -- ^ WebSocket: Need SessionBackend available to process cookies in WebSocket handler
, appConnections :: MVar (Map UserId WS.Connection) -- ^ WebSocket: An MVar holding a Data.Map of the active WebSocket connections
}
-- Set up i18n messages. See the message folder.
mkMessage "App" "messages" "en"
-- This is where we define all of the routes in our application. For a full
-- explanation of the syntax, please see:
-- http://www.yesodweb.com/book/handler
--
-- This function does three things:
--
-- * Creates the route datatype AppRoute. Every valid URL in your
-- application can be represented as a value of this type.
-- * Creates the associated type:
-- type instance Route App = AppRoute
-- * Creates the value resourcesApp which contains information on the
-- resources declared below. This is used in Handler.hs by the call to
-- mkYesodDispatch
--
-- What this function does *not* do is create a YesodSite instance for
-- App. Creating that instance requires all of the handler functions
-- for our application to be in scope. However, the handler functions
-- usually require access to the AppRoute datatype. Therefore, we
-- split these actions into two functions and place them in separate files.
mkYesodData "App" $(parseRoutesFile "config/routes")
type Form x = Html -> MForm (HandlerT App IO) (FormResult x, Widget)
-- Please see the documentation for the Yesod typeclass. There are a number
-- of settings which can be configured by overriding methods here.
instance Yesod App where
approot = ApprootMaster $ appRoot . settings
-- Store session data on the client in encrypted cookies,
-- default session idle timeout is 120 minutes
makeSessionBackend site = return $ Just $ appSessionBackend site -- WebSocket: Use SessionBackend from App
defaultLayout widget = do
master <- getYesod
mmsg <- getMessage
-- We break up the default layout into two components:
-- default-layout is the contents of the body tag, and
-- default-layout-wrapper is the entire page. Since the final
-- value passed to hamletToRepHtml cannot be a widget, this allows
-- you to use normal widget features in default-layout.
pc <- widgetToPageContent $ do
$(combineStylesheets 'StaticR
[ css_normalize_css
, css_bootstrap_css
])
$(widgetFile "default-layout")
giveUrlRenderer $(hamletFile "templates/default-layout-wrapper.hamlet")
-- This is done to provide an optimization for serving static files from
-- a separate domain. Please see the staticRoot setting in Settings.hs
urlRenderOverride y (StaticR s) =
Just $ uncurry (joinPath y (Settings.staticRoot $ settings y)) $ renderRoute s
urlRenderOverride _ _ = Nothing
-- The page to be redirected to when authentication is required.
authRoute _ = Just $ AuthR LoginR
-- This function creates static content files in the static folder
-- and names them based on a hash of their content. This allows
-- expiration dates to be set far in the future without worry of
-- users receiving stale content.
addStaticContent =
addStaticContentExternal minifym genFileName Settings.staticDir (StaticR . flip StaticRoute [])
where
-- Generate a unique filename based on the content itself
genFileName lbs
| development = "autogen-" ++ base64md5 lbs
| otherwise = base64md5 lbs
-- Place Javascript at bottom of the body tag so the rest of the page loads first
jsLoader _ = BottomOfBody
-- What messages should be logged. The following includes all messages when
-- in development, and warnings and errors in production.
shouldLog _ _source level =
development || level == LevelWarn || level == LevelError
makeLogger = return . appLogger
-- | WebSocket: Pulled out so that we can initialize the SessionBackend while creating App instance
makeAppSessionBackend = defaultClientSessionBackend
(120 * 60) -- 120 minutes
"config/client_session_key.aes"
-- How to run database actions.
instance YesodPersist App where
type YesodPersistBackend App = SqlPersistT
runDB = defaultRunDB persistConfig connPool
instance YesodPersistRunner App where
getDBRunner = defaultGetDBRunner connPool
instance YesodAuth App where
type AuthId App = UserId
-- Where to send a user after successful login
loginDest _ = HomeR
-- Where to send a user after logout
logoutDest _ = HomeR
getAuthId creds = runDB $ do
x <- getBy $ UniqueUser $ credsIdent creds
case x of
Just (Entity uid _) -> return $ Just uid
Nothing -> do
fmap Just $ insert $ User (credsIdent creds) Nothing
-- You can add other plugins like BrowserID, email or OAuth here
authPlugins _ = [authBrowserId def, authGoogleEmail, authDummy] -- WebSocket: Added dummy authentication for example
authHttpManager = httpManager
-- This instance is required to use forms. You can modify renderMessage to
-- achieve customized and internationalized form validation messages.
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
-- | Get the 'Extra' value, used to hold data from the settings.yml file.
getExtra :: Handler Extra
getExtra = fmap (appExtra . settings) getYesod
-- Note: previous versions of the scaffolding included a deliver function to
-- send emails. Unfortunately, there are too many different options for us to
-- give a reasonable default. Instead, the information is available on the
-- wiki:
--
-- https://github.com/yesodweb/yesod/wiki/Sending-email
| BJTerry/WebSockets-example | Foundation.hs | mit | 7,099 | 0 | 17 | 1,471 | 939 | 527 | 412 | -1 | -1 |
module Object
( Env, Params, Body, Obj(..)
, atom, eq, car, cdr, cons
) where
import qualified Data.Map as Map
data Env = Env { parent :: Maybe Env
, reftbl :: Map.Map Obj Obj} deriving (Show, Eq, Ord)
type Params = [Obj]
type Body = (Obj, [Obj])
data Obj = Number Int
| Bool Bool
| String String
| Nil
| Lambda Env Params Body
| Symbol String
| Cons Obj Obj deriving (Show, Eq, Ord)
atom :: Obj -> Obj
atom (Cons _ _) = Nil
atom _ = Bool True
eq :: Obj -> Obj -> Obj
eq x y = if x == y then Bool True else Nil
car :: Obj -> Obj
car (Cons x _) = x
cdr :: Obj -> Obj
cdr (Cons _ y) = y
cons :: Obj -> Obj -> Obj
cons x y = Cons x y
| sawaken/pure-lisp.hs | src/Object.hs | mit | 669 | 0 | 10 | 187 | 325 | 183 | 142 | 26 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.