code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Data.Monoid
import Data.Bifunctor
import Data.Either
import Test.Tasty
import Test.Tasty.HUnit
import Control.Monad
import Control.Applicative
import Control.Monad.Trans.Except
import qualified Data.Text as T
import qualified Data.Attoparsec.Text as A
import qualified Control.Foldl as L
import Control.Foldl.Transduce
import Control.Foldl.Transduce.Attoparsec
main :: IO ()
main = defaultMain tests
stripping :: A.Parser r -> [T.Text] -> Either (ParsingError T.Text) (r,T.Text)
stripping parser ts =
runExcept (L.foldM (transduceM' (stripParse parser) (L.generalize L.mconcat)) ts)
parsing :: A.Parser r -> [T.Text] -> Either (ParsingError T.Text) [r]
parsing parser ts =
runExcept (fmap snd (L.foldM (transduceM' (parses parser) (L.generalize L.list)) ts))
tests :: TestTree
tests =
testGroup "Tests"
[
testGroup "stripPrefix"
[ testCase "empty"
(assertEqual mempty
(Right ((),""))
(stripping (pure ()) [""]))
, testCase "empty2"
(assertEqual mempty
(Right ([],""))
(stripping (many (A.char 'c')) [""]))
, testCase "empty3"
(assertBool mempty
(isLeft (stripping (A.char 'c') [""])))
, testCase "acrossChunks"
(assertEqual mempty
(Right (1111::Int,"aaabb"))
(stripping (A.skipSpace *> A.decimal) [" "," 11", "11aaa", "bb"]))
, testCase "whole"
(assertEqual mempty
(Right ("cccc",""))
(stripping (many (A.char 'c')) ["cc","","cc"]))
, testCase "atEnd"
(assertEqual mempty
(Right (1111::Int,""))
(stripping (A.skipSpace *> A.decimal) [" "," 11", "", "11"]))
]
, testGroup "parses"
[ testCase "empty"
(assertEqual mempty
(Right ([]::[Int]))
(parsing (A.skipSpace *> A.decimal) []))
, testCase "chunks"
(assertEqual mempty
(Right ([1,22,3,4,5]::[Int]))
(parsing (A.skipSpace *> A.decimal) [""," ","1 2","","2",""," 3 4 5"]))
, testCase "whole"
(assertEqual mempty
(Right ([1111]::[Int]))
(parsing (A.skipSpace *> A.decimal) [" 1111"]))
]
]
|
danidiaz/foldl-transduce-attoparsec
|
tests/tests.hs
|
bsd-3-clause
| 2,494 | 0 | 18 | 830 | 860 | 474 | 386 | 64 | 1 |
{-# LANGUAGE FlexibleContexts, BangPatterns #-}
-- |Monadic Iteratees:
-- incremental input parsers, processors, and transformers
--
-- Iteratees for parsing binary data.
module Data.Iteratee.Binary (
-- * Types
Endian (..)
-- * Endian multi-byte iteratees
,endianRead2
,endianRead3
,endianRead3i
,endianRead4
,endianRead8
-- ** bytestring specializations
-- | In current versions of @iteratee@ there is no difference between the
-- bytestring specializations and polymorphic functions. They exist
-- for compatibility.
,readWord16be_bs
,readWord16le_bs
,readWord32be_bs
,readWord32le_bs
,readWord64be_bs
,readWord64le_bs
)
where
import Data.Iteratee.Base
import qualified Data.Iteratee.ListLike as I
import qualified Data.ListLike as LL
import qualified Data.ByteString as B
import Data.Word
import Data.Bits
import Data.Int
-- ------------------------------------------------------------------------
-- Binary Random IO Iteratees
-- Iteratees to read unsigned integers written in Big- or Little-endian ways
-- | Indicate endian-ness.
data Endian = MSB -- ^ Most Significant Byte is first (big-endian)
| LSB -- ^ Least Significan Byte is first (little-endian)
deriving (Eq, Ord, Show, Enum)
endianRead2
:: (Nullable s, LL.ListLike s Word8, Monad m)
=> Endian
-> Iteratee s m Word16
endianRead2 e = endianReadN e 2 word16'
{-# INLINE endianRead2 #-}
endianRead3
:: (Nullable s, LL.ListLike s Word8, Monad m)
=> Endian
-> Iteratee s m Word32
endianRead3 e = endianReadN e 3 (word32' . (0:))
{-# INLINE endianRead3 #-}
-- |Read 3 bytes in an endian manner. If the first bit is set (negative),
-- set the entire first byte so the Int32 will be negative as
-- well.
endianRead3i
:: (Nullable s, LL.ListLike s Word8, Monad m)
=> Endian
-> Iteratee s m Int32
endianRead3i e = do
c1 <- I.head
c2 <- I.head
c3 <- I.head
case e of
MSB -> return $ (((fromIntegral c1
`shiftL` 8) .|. fromIntegral c2)
`shiftL` 8) .|. fromIntegral c3
LSB ->
let m :: Int32
m = shiftR (shiftL (fromIntegral c3) 24) 8
in return $ (((fromIntegral c3
`shiftL` 8) .|. fromIntegral c2)
`shiftL` 8) .|. fromIntegral m
{-# INLINE endianRead3i #-}
endianRead4
:: (Nullable s, LL.ListLike s Word8, Monad m)
=> Endian
-> Iteratee s m Word32
endianRead4 e = endianReadN e 4 word32'
{-# INLINE endianRead4 #-}
endianRead8
:: (Nullable s, LL.ListLike s Word8, Monad m)
=> Endian
-> Iteratee s m Word64
endianRead8 e = endianReadN e 8 word64'
{-# INLINE endianRead8 #-}
-- This function does all the parsing work, depending upon provided arguments
endianReadN ::
(Nullable s, LL.ListLike s Word8, Monad m)
=> Endian
-> Int
-> ([Word8] -> b)
-> Iteratee s m b
endianReadN MSB n0 cnct = liftI (step n0 [])
where
step !n acc (Chunk c)
| LL.null c = liftI (step n acc)
| LL.length c >= n = let (this,next) = LL.splitAt n c
!result = cnct $ acc ++ LL.toList this
in idone result (Chunk next)
| otherwise = liftI (step (n - LL.length c) (acc ++ LL.toList c))
step !n acc (EOF Nothing) = icont (step n acc) (Just $ toException EofException)
step !n acc (EOF (Just e)) = icont (step n acc) (Just e)
endianReadN LSB n0 cnct = liftI (step n0 [])
where
step !n acc (Chunk c)
| LL.null c = liftI (step n acc)
| LL.length c >= n = let (this,next) = LL.splitAt n c
!result = cnct $ reverse (LL.toList this) ++ acc
in idone result (Chunk next)
| otherwise = liftI (step (n - LL.length c)
(reverse (LL.toList c) ++ acc))
step !n acc (EOF Nothing) = icont (step n acc)
(Just $ toException EofException)
step !n acc (EOF (Just e)) = icont (step n acc) (Just e)
{-# INLINE endianReadN #-}
-- As of now, the polymorphic code is as fast as the best specializations
-- I have found, so these just call out. They may be improved in the
-- future, or possibly deprecated.
-- JWL, 2012-01-16
readWord16be_bs :: Monad m => Iteratee B.ByteString m Word16
readWord16be_bs = endianRead2 MSB
{-# INLINE readWord16be_bs #-}
readWord16le_bs :: Monad m => Iteratee B.ByteString m Word16
readWord16le_bs = endianRead2 LSB
{-# INLINE readWord16le_bs #-}
readWord32be_bs :: Monad m => Iteratee B.ByteString m Word32
readWord32be_bs = endianRead4 MSB
{-# INLINE readWord32be_bs #-}
readWord32le_bs :: Monad m => Iteratee B.ByteString m Word32
readWord32le_bs = endianRead4 LSB
{-# INLINE readWord32le_bs #-}
readWord64be_bs :: Monad m => Iteratee B.ByteString m Word64
readWord64be_bs = endianRead8 MSB
{-# INLINE readWord64be_bs #-}
readWord64le_bs :: Monad m => Iteratee B.ByteString m Word64
readWord64le_bs = endianRead8 LSB
{-# INLINE readWord64le_bs #-}
word16' :: [Word8] -> Word16
word16' [c1,c2] = word16 c1 c2
word16' _ = error "iteratee: internal error in word16'"
word16 :: Word8 -> Word8 -> Word16
word16 c1 c2 = (fromIntegral c1 `shiftL` 8) .|. fromIntegral c2
{-# INLINE word16 #-}
word32' :: [Word8] -> Word32
word32' [c1,c2,c3,c4] = word32 c1 c2 c3 c4
word32' _ = error "iteratee: internal error in word32'"
word32 :: Word8 -> Word8 -> Word8 -> Word8 -> Word32
word32 c1 c2 c3 c4 =
(fromIntegral c1 `shiftL` 24) .|.
(fromIntegral c2 `shiftL` 16) .|.
(fromIntegral c3 `shiftL` 8) .|.
fromIntegral c4
{-# INLINE word32 #-}
word64' :: [Word8] -> Word64
word64' [c1,c2,c3,c4,c5,c6,c7,c8] = word64 c1 c2 c3 c4 c5 c6 c7 c8
word64' _ = error "iteratee: internal error in word64'"
{-# INLINE word64' #-}
word64
:: Word8 -> Word8 -> Word8 -> Word8
-> Word8 -> Word8 -> Word8 -> Word8
-> Word64
word64 c1 c2 c3 c4 c5 c6 c7 c8 =
(fromIntegral c1 `shiftL` 56) .|.
(fromIntegral c2 `shiftL` 48) .|.
(fromIntegral c3 `shiftL` 40) .|.
(fromIntegral c4 `shiftL` 32) .|.
(fromIntegral c5 `shiftL` 24) .|.
(fromIntegral c6 `shiftL` 16) .|.
(fromIntegral c7 `shiftL` 8) .|.
fromIntegral c8
{-# INLINE word64 #-}
|
iteloo/tsuru-sample
|
iteratee-0.8.9.6/src/Data/Iteratee/Binary.hs
|
bsd-3-clause
| 6,174 | 0 | 20 | 1,472 | 1,871 | 977 | 894 | 146 | 5 |
module Blog.Widgets.Twitter (boot_twitter, get_tweets, TwitterController ( poller, twitter_tid )) where
import Text.ParserCombinators.Parsec
import qualified Debug.Trace as D
import Network.HTTP
import Network.HTTP.Headers
import Network.URI ( parseURI )
import Data.Maybe ( fromJust )
import Data.List ( elemIndex, intersperse, isPrefixOf )
import Text.XHtml.Strict
import Control.Concurrent
import Control.Concurrent.Chan
import Control.Concurrent.MVar
import qualified Codec.Binary.Base64.String as B64
import qualified Blog.Constants as C
import Blog.FrontEnd.ContentAtoms (divid, _a)
import Blog.Widgets.JsonUtilities
import Blog.BackEnd.HttpPoller
data TRequest = GetTweets ( MVar String )
| UpdateTweets String
data TwitterController = TwitterController { request_channel :: Chan TRequest
, poller :: HttpPoller
, twitter_tid :: ThreadId }
boot_twitter :: String -> String -> Int -> IO TwitterController
boot_twitter user password count = do { let req = build_tweet_request user password count
; rc <- newChan
; p <- start_poller "Tweets" req (handle_body rc) (120 * 10^6)
; tid <- forkIO $ loop rc ""
; return $ TwitterController rc p tid }
loop :: Chan TRequest -> String -> IO ()
loop rc xh = do { req <- readChan rc
; case req of
GetTweets hb ->
putMVar hb xh >> loop rc xh
UpdateTweets xh' ->
loop rc xh' }
build_tweet_request :: String -> String -> Int -> Request
build_tweet_request user password count = Request uri GET heads ""
where
uri = fromJust $ parseURI $ "http://twitter.com/statuses/user_timeline/"
++ user ++ ".json?count=" ++ (show count)
heads = [ Header HdrAuthorization $ (++) "Basic " $ B64.encode $ user ++ ":" ++ password ]
handle_body :: Chan TRequest -> String -> IO ()
handle_body tc body = do { case parse_json body of
Right v ->
do { let texts = map uns $ una $ v </> "text"
; let times = map (convert_twitter_tstmp . uns) $ una $ v </> "created_at"
; let ids = map (show . unn) $ una $ v </> "id"
; update_tweets tc $ tweets_to_xhtml $ zip3 times texts ids }
Left err ->
D.putTraceMsg . show $ err }
send :: TwitterController -> TRequest -> IO ()
send tc = writeChan (request_channel tc)
update_tweets :: Chan TRequest -> String -> IO ()
update_tweets tc tweets = writeChan tc $ UpdateTweets tweets
get_tweets :: TwitterController -> IO String
get_tweets tc = do { hb <- newEmptyMVar
; send tc $ GetTweets hb
; takeMVar hb }
convert_twitter_tstmp :: String -> String
convert_twitter_tstmp ts = concat [ y, "-", mo', "-", d, "T", tm, "Z" ]
where
mo = take 3 $ drop 4 ts
mo' = pad $ 1 + ( fromJust $ elemIndex mo [ "Jan", "Feb", "Mar", "Apr", "May", "Jun"
, "Jul", "Aug", "Sep", "Oct", "Nov", "Dec" ] )
pad = \n -> if n <10 then ('0':show n) else show n
y = take 4 $ drop 26 ts
d = take 2 $ drop 8 ts
tm = take 8 $ drop 11 ts
tweets_to_xhtml :: [(String,String,String)] -> String
tweets_to_xhtml = showHtmlFragment . (divid "tweets") . (build_tweet_list "1970-01-01")
build_tweet_list :: String -> [(String,String,String)] -> Html
build_tweet_list _ [] = noHtml
build_tweet_list last_date ((d,t,tweet_id):dts)
= concatHtml [ if last_date `isPrefixOf` d then
noHtml
else
(p ! [theclass "tweet_group" ]) . stringToHtml $ date
, p ! [theclass "tweet"] $ concatHtml [ _a tweet_link time
, stringToHtml " "
, text ]
, build_tweet_list date dts ]
where
wrap st = (thespan ! [ theclass st ])
time_hunk = (take 9) . (drop 11)
time = wrap "tweet_stamp" $ stringToHtml . time_hunk $ d
date = take 10 d
text = wrap "tweet_text" $ primHtml . pre_process $ t
tweet_link = "http://twitter.com/" ++ C.twitter_user ++ "/statuses/"
++ tweet_id
pre_process :: String -> String
pre_process s = case parse pre_process_parser "" s of
Left err -> error . show $ err
Right v -> v
pre_process_parser :: Parser String
pre_process_parser = do { ts <- tok `sepBy` (many1 space)
; return $ concat . (intersperse " ") $ ts }
tok :: Parser String
tok = try http_link <|> try at_someone <|> word
word :: Parser String
word = many1 $ noneOf " "
at_someone :: Parser String
at_someone = do { char '@'
; s <- many1 $ noneOf " "
; return $ "<a href=\"http://twitter.com/" ++ (urlEncode s) ++ "\">@" ++ s ++ "</a>" }
http_link :: Parser String
http_link = do { string "http://"
; s <- many1 $ noneOf " "
; return $ "<a href=\"http://" ++ s ++ "\">" ++ s ++ "</a>" }
|
prb/perpubplat
|
src/Blog/Widgets/Twitter.hs
|
bsd-3-clause
| 5,455 | 19 | 18 | 1,922 | 1,607 | 852 | 755 | 106 | 2 |
module FreeDSL.BFS.Interpreter (
runBFSState
, runBFSFullState
, runBFS
) where
import Data.Hashable
--import Control.Monad
--import Data.Maybe (maybe)
import Control.Monad.Free (Free(..))
import PolyGraph.ReadOnly.Graph (AdjacencyIndex(..), neighborsOf)
import qualified FreeDSL.BFS.VTraversal as DSL
import qualified PolyGraph.Common.NonBlockingQueue as Q
import Control.Monad.State (State, execState, evalState, modify, get, put)
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
-- TODO need remainingNeighbors! all adjacent vertices need to be visited before being enqueued
-- this will start with (NoMore, Nothing, [], queue with root, emptySet, emptyMap)
-- State (currentObservation, Maybe workingFromV, workingNeighbors, queue, visited, storage)
runBFSState :: forall a g v e t r . (Hashable v, Eq v, AdjacencyIndex g v e t) =>
DSL.VTraversal a v r -> g -> HM.HashMap v a
runBFSState program g =
let initdata = (DSL.NoMore, Nothing, [], Q.emptyQueue, HS.empty, HM.empty)
(_,_,_,_,_,hm) = execState (interpretBSF g program) initdata
in hm
runBFSFullState :: forall a g v e t r . (Hashable v, Eq v, AdjacencyIndex g v e t) =>
DSL.VTraversal a v r -> g -> BSFState v a
runBFSFullState program g =
let initdata = (DSL.NoMore, Nothing, [], Q.emptyQueue, HS.empty, HM.empty)
in execState (interpretBSF g program) initdata
runBFS :: forall a b g v e t . (Hashable v, Eq v, AdjacencyIndex g v e t) =>
DSL.VTraversal a v b -> g -> b
runBFS program g = evalState (interpretBSF g program) (DSL.NoMore, Nothing, [], Q.emptyQueue, HS.empty, HM.empty)
type BSFState v a = (DSL.VObservation v, Maybe v, [v], Q.SimpleQueue v, HS.HashSet v, HM.HashMap v a)
interpretBSF :: forall a g v e t r . (Hashable v, Eq v, AdjacencyIndex g v e t) =>
g -> DSL.VTraversal a v r -> State (BSFState v a) r
interpretBSF g (Free (DSL.StartAt root next)) = (put (DSL.NoMore, Nothing, [], Q.enqueue root Q.emptyQueue, HS.singleton root, HM.empty)) >> interpretBSF g next
interpretBSF g (Free (DSL.NextVObs vNext)) = do
s <- get
let (obs,ns) = traversalHelper g s
put ns
interpretBSF g (vNext obs)
interpretBSF g (Free (DSL.CurrentVObs vNext)) = do
(o,_, _, _, _, _) <- get
interpretBSF g (vNext o)
interpretBSF g (Free (DSL.Put va next)) = (modify (putHelper va)) >> interpretBSF g next
interpretBSF g (Free (DSL.Get v aNext)) = do
(_, _, _, _, _, hm) <- get
let a = HM.lookup v hm
interpretBSF g (aNext a)
interpretBSF _ (Pure r) = return r
putHelper :: (Hashable v, Eq v) => (v, a) -> BSFState v a -> BSFState v a
putHelper (v0, a) (o, v, vs, queue, vlist, hm) = (o, v, vs, queue, vlist, HM.insert v0 a hm)
traversalHelper :: forall a g v e t . (AdjacencyIndex g v e t, Hashable v, Eq v) =>
g -> BSFState v a -> (DSL.VObservation v, BSFState v a)
-- v is marked visited when is processed all elements in the queue are visited
traversalHelper g (_, Nothing, _, queue, visited, hm) =
let (newV, newQ) = Q.dequeue queue
neighborVs = HS.fromList . (neighborsOf g)
notVisitedVs v0 = HS.toList $ HS.difference (neighborVs v0) visited
in case newV of
Nothing -> (DSL.NoMore, (DSL.NoMore, Nothing, [], newQ, visited, hm))
Just v0 -> traversalHelper g (DSL.NoMore, newV, notVisitedVs v0, newQ, visited, hm)
traversalHelper g (_, Just _, [], queue, visited, hm) = traversalHelper g (DSL.NoMore, Nothing, [], queue, visited, hm)
traversalHelper _ (_, Just v0, v1:rest, queue, visited, hm) = (DSL.Observe v0 v1, (DSL.Observe v0 v1, Just v0, rest, Q.enqueue v1 queue, HS.insert v1 visited, hm))
|
rpeszek/GraphPlay
|
src/FreeDSL/BFS/Interpreter.hs
|
bsd-3-clause
| 3,951 | 0 | 12 | 1,032 | 1,495 | 825 | 670 | -1 | -1 |
module Pear.Operator.Tree where
import Pear.Operator.Algebra
import Control.Monad.State.Lazy
import Data.Functor
data AST a b = UNode { uNode :: b, child :: AST a b}
| BNode { bNode :: b, lChild :: AST a b, rchild :: AST a b}
| Leaf { terminal :: b } deriving (Show)
-- Build an abstract syntax tree of arithmetic tokens
-- I would really like to refactor this to get rid of this tuple
-- state and use transformers or anything else.
type PearTree a = State ([AToken a], [AST a (AToken a)])
growTree :: PearTree a (AST a (AToken a))
growTree = do
toks <- fst <$> get
case toks of
[] -> error "unbalanced expression"
_ -> case (head toks) of
(Sym _) -> buildLeaf
(Bin _) -> buildBNode
(Un _) -> buildUNode
buildLeaf :: PearTree a (AST a (AToken a))
buildLeaf = do
leaf <- head . fst <$> get
modify (\(tks,trs) -> ((tail tks), (Leaf leaf):trs))
return (Leaf leaf)
buildBNode :: PearTree a (AST a (AToken a))
buildBNode = do
node <- head . fst <$> get
modify (\(tks, trs) -> (tail tks, trs))
child2 <- growTree
child1 <- growTree
let bNode = BNode node child1 child2
modify (\(tks, trs) -> (tks, bNode:trs))
return bNode
buildUNode :: PearTree a (AST a (AToken a))
buildUNode = do
node <- head . fst <$> get
modify (\(tks, trs) -> (tail tks, trs))
child <- growTree
let uNode = UNode node child
modify (\(tks, trs) -> (tks, uNode:trs))
return uNode
buildTree :: [AToken a] -> AST a (AToken a)
buildTree tokens = (head . snd) $ execState (growTree) (tokens, [])
evalTree :: AST a (AToken a) -> a
evalTree t = case t of
UNode (Un (Unary uop)) c -> uop (evalTree c)
BNode (Bin (Binary bop _ _)) c1 c2 -> bop (evalTree c1) (evalTree c2)
Leaf (Sym a) -> a
|
Charlesetc/haskell-parsing
|
src/Pear/Operator/Tree.hs
|
bsd-3-clause
| 1,756 | 0 | 14 | 419 | 806 | 419 | 387 | 46 | 4 |
import Data.Conduit.Shell
import Options.Applicative as OA
import Imessage
data Options = Options
{ buddy :: String
, message :: String
}
main :: IO ()
main = execParser opts >>= runMain
where
opts = OA.info (helper <*> parseOptions)
( fullDesc
<> progDesc "Interact with iMessage.app"
<> header "imessage, a cli to send messages to iMessage.app")
parseOptions :: Parser Options
parseOptions = Options
<$> strOption
( short 'b'
<> long "buddy"
<> metavar "NAME"
<> help "Who to send a message to." )
<*> strOption
( short 'm'
<> long "message"
<> metavar "CONTENT"
<> help "The message contents to be sent." )
runMain :: Options -> IO ()
runMain (Options b m) = (run (do echo imessageOsaScript $= osascript "-" b m))
|
mitchty/imessage
|
Main.hs
|
bsd-3-clause
| 971 | 0 | 12 | 376 | 235 | 118 | 117 | 26 | 1 |
import Data.Maybe (fromMaybe)
import Text.Read (readMaybe)
getInteger :: IO Integer
getInteger = fromMaybe 0 . readMaybe <$> getLine
main :: IO ()
main = do
putStrLn "Please input two number for add"
putStrLn . show =<< (+) <$> getInteger <*> getInteger
|
YoshikuniJujo/funpaala
|
samples/39_learn_io/additionA.hs
|
bsd-3-clause
| 258 | 0 | 10 | 45 | 87 | 45 | 42 | 8 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Loading interface files
-}
{-# LANGUAGE CPP #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module LoadIface (
-- Importing one thing
tcLookupImported_maybe, importDecl,
checkWiredInTyCon, ifCheckWiredInThing,
-- RnM/TcM functions
loadModuleInterface, loadModuleInterfaces,
loadSrcInterface, loadSrcInterface_maybe,
loadInterfaceForName, loadInterfaceForModule,
-- IfM functions
loadInterface,
loadSysInterface, loadUserInterface, loadPluginInterface,
findAndReadIface, readIface, -- Used when reading the module's old interface
loadDecls, -- Should move to TcIface and be renamed
initExternalPackageState,
moduleFreeHolesPrecise,
pprModIfaceSimple,
ifaceStats, pprModIface, showIface
) where
#include "HsVersions.h"
import GhcPrelude
import {-# SOURCE #-} TcIface( tcIfaceDecl, tcIfaceRules, tcIfaceInst,
tcIfaceFamInst, tcIfaceVectInfo,
tcIfaceAnnotations, tcIfaceCompleteSigs )
import DynFlags
import IfaceSyn
import IfaceEnv
import HscTypes
import BasicTypes hiding (SuccessFlag(..))
import TcRnMonad
import Constants
import PrelNames
import PrelInfo
import PrimOp ( allThePrimOps, primOpFixity, primOpOcc )
import MkId ( seqId )
import TysPrim ( funTyConName )
import Rules
import TyCon
import Annotations
import InstEnv
import FamInstEnv
import Name
import NameEnv
import Avail
import Module
import Maybes
import ErrUtils
import Finder
import UniqFM
import SrcLoc
import Outputable
import BinIface
import Panic
import Util
import FastString
import Fingerprint
import Hooks
import FieldLabel
import RnModIface
import UniqDSet
import Control.Monad
import Control.Exception
import Data.IORef
import System.FilePath
{-
************************************************************************
* *
* tcImportDecl is the key function for "faulting in" *
* imported things
* *
************************************************************************
The main idea is this. We are chugging along type-checking source code, and
find a reference to GHC.Base.map. We call tcLookupGlobal, which doesn't find
it in the EPS type envt. So it
1 loads GHC.Base.hi
2 gets the decl for GHC.Base.map
3 typechecks it via tcIfaceDecl
4 and adds it to the type env in the EPS
Note that DURING STEP 4, we may find that map's type mentions a type
constructor that also
Notice that for imported things we read the current version from the EPS
mutable variable. This is important in situations like
...$(e1)...$(e2)...
where the code that e1 expands to might import some defns that
also turn out to be needed by the code that e2 expands to.
-}
tcLookupImported_maybe :: Name -> TcM (MaybeErr MsgDoc TyThing)
-- Returns (Failed err) if we can't find the interface file for the thing
tcLookupImported_maybe name
= do { hsc_env <- getTopEnv
; mb_thing <- liftIO (lookupTypeHscEnv hsc_env name)
; case mb_thing of
Just thing -> return (Succeeded thing)
Nothing -> tcImportDecl_maybe name }
tcImportDecl_maybe :: Name -> TcM (MaybeErr MsgDoc TyThing)
-- Entry point for *source-code* uses of importDecl
tcImportDecl_maybe name
| Just thing <- wiredInNameTyThing_maybe name
= do { when (needWiredInHomeIface thing)
(initIfaceTcRn (loadWiredInHomeIface name))
-- See Note [Loading instances for wired-in things]
; return (Succeeded thing) }
| otherwise
= initIfaceTcRn (importDecl name)
importDecl :: Name -> IfM lcl (MaybeErr MsgDoc TyThing)
-- Get the TyThing for this Name from an interface file
-- It's not a wired-in thing -- the caller caught that
importDecl name
= ASSERT( not (isWiredInName name) )
do { traceIf nd_doc
-- Load the interface, which should populate the PTE
; mb_iface <- ASSERT2( isExternalName name, ppr name )
loadInterface nd_doc (nameModule name) ImportBySystem
; case mb_iface of {
Failed err_msg -> return (Failed err_msg) ;
Succeeded _ -> do
-- Now look it up again; this time we should find it
{ eps <- getEps
; case lookupTypeEnv (eps_PTE eps) name of
Just thing -> return $ Succeeded thing
Nothing -> let doc = whenPprDebug (found_things_msg eps $$ empty)
$$ not_found_msg
in return $ Failed doc
}}}
where
nd_doc = text "Need decl for" <+> ppr name
not_found_msg = hang (text "Can't find interface-file declaration for" <+>
pprNameSpace (occNameSpace (nameOccName name)) <+> ppr name)
2 (vcat [text "Probable cause: bug in .hi-boot file, or inconsistent .hi file",
text "Use -ddump-if-trace to get an idea of which file caused the error"])
found_things_msg eps =
hang (text "Found the following declarations in" <+> ppr (nameModule name) <> colon)
2 (vcat (map ppr $ filter is_interesting $ nameEnvElts $ eps_PTE eps))
where
is_interesting thing = nameModule name == nameModule (getName thing)
{-
************************************************************************
* *
Checks for wired-in things
* *
************************************************************************
Note [Loading instances for wired-in things]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to make sure that we have at least *read* the interface files
for any module with an instance decl or RULE that we might want.
* If the instance decl is an orphan, we have a whole separate mechanism
(loadOrphanModules)
* If the instance decl is not an orphan, then the act of looking at the
TyCon or Class will force in the defining module for the
TyCon/Class, and hence the instance decl
* BUT, if the TyCon is a wired-in TyCon, we don't really need its interface;
but we must make sure we read its interface in case it has instances or
rules. That is what LoadIface.loadWiredInHomeIface does. It's called
from TcIface.{tcImportDecl, checkWiredInTyCon, ifCheckWiredInThing}
* HOWEVER, only do this for TyCons. There are no wired-in Classes. There
are some wired-in Ids, but we don't want to load their interfaces. For
example, Control.Exception.Base.recSelError is wired in, but that module
is compiled late in the base library, and we don't want to force it to
load before it's been compiled!
All of this is done by the type checker. The renamer plays no role.
(It used to, but no longer.)
-}
checkWiredInTyCon :: TyCon -> TcM ()
-- Ensure that the home module of the TyCon (and hence its instances)
-- are loaded. See Note [Loading instances for wired-in things]
-- It might not be a wired-in tycon (see the calls in TcUnify),
-- in which case this is a no-op.
checkWiredInTyCon tc
| not (isWiredInName tc_name)
= return ()
| otherwise
= do { mod <- getModule
; traceIf (text "checkWiredInTyCon" <+> ppr tc_name $$ ppr mod)
; ASSERT( isExternalName tc_name )
when (mod /= nameModule tc_name)
(initIfaceTcRn (loadWiredInHomeIface tc_name))
-- Don't look for (non-existent) Float.hi when
-- compiling Float.hs, which mentions Float of course
-- A bit yukky to call initIfaceTcRn here
}
where
tc_name = tyConName tc
ifCheckWiredInThing :: TyThing -> IfL ()
-- Even though we are in an interface file, we want to make
-- sure the instances of a wired-in thing are loaded (imagine f :: Double -> Double)
-- Ditto want to ensure that RULES are loaded too
-- See Note [Loading instances for wired-in things]
ifCheckWiredInThing thing
= do { mod <- getIfModule
-- Check whether we are typechecking the interface for this
-- very module. E.g when compiling the base library in --make mode
-- we may typecheck GHC.Base.hi. At that point, GHC.Base is not in
-- the HPT, so without the test we'll demand-load it into the PIT!
-- C.f. the same test in checkWiredInTyCon above
; let name = getName thing
; ASSERT2( isExternalName name, ppr name )
when (needWiredInHomeIface thing && mod /= nameModule name)
(loadWiredInHomeIface name) }
needWiredInHomeIface :: TyThing -> Bool
-- Only for TyCons; see Note [Loading instances for wired-in things]
needWiredInHomeIface (ATyCon {}) = True
needWiredInHomeIface _ = False
{-
************************************************************************
* *
loadSrcInterface, loadOrphanModules, loadInterfaceForName
These three are called from TcM-land
* *
************************************************************************
-}
-- | Load the interface corresponding to an @import@ directive in
-- source code. On a failure, fail in the monad with an error message.
loadSrcInterface :: SDoc
-> ModuleName
-> IsBootInterface -- {-# SOURCE #-} ?
-> Maybe FastString -- "package", if any
-> RnM ModIface
loadSrcInterface doc mod want_boot maybe_pkg
= do { res <- loadSrcInterface_maybe doc mod want_boot maybe_pkg
; case res of
Failed err -> failWithTc err
Succeeded iface -> return iface }
-- | Like 'loadSrcInterface', but returns a 'MaybeErr'.
loadSrcInterface_maybe :: SDoc
-> ModuleName
-> IsBootInterface -- {-# SOURCE #-} ?
-> Maybe FastString -- "package", if any
-> RnM (MaybeErr MsgDoc ModIface)
loadSrcInterface_maybe doc mod want_boot maybe_pkg
-- We must first find which Module this import refers to. This involves
-- calling the Finder, which as a side effect will search the filesystem
-- and create a ModLocation. If successful, loadIface will read the
-- interface; it will call the Finder again, but the ModLocation will be
-- cached from the first search.
= do { hsc_env <- getTopEnv
; res <- liftIO $ findImportedModule hsc_env mod maybe_pkg
; case res of
Found _ mod -> initIfaceTcRn $ loadInterface doc mod (ImportByUser want_boot)
-- TODO: Make sure this error message is good
err -> return (Failed (cannotFindModule (hsc_dflags hsc_env) mod err)) }
-- | Load interface directly for a fully qualified 'Module'. (This is a fairly
-- rare operation, but in particular it is used to load orphan modules
-- in order to pull their instances into the global package table and to
-- handle some operations in GHCi).
loadModuleInterface :: SDoc -> Module -> TcM ModIface
loadModuleInterface doc mod = initIfaceTcRn (loadSysInterface doc mod)
-- | Load interfaces for a collection of modules.
loadModuleInterfaces :: SDoc -> [Module] -> TcM ()
loadModuleInterfaces doc mods
| null mods = return ()
| otherwise = initIfaceTcRn (mapM_ load mods)
where
load mod = loadSysInterface (doc <+> parens (ppr mod)) mod
-- | Loads the interface for a given Name.
-- Should only be called for an imported name;
-- otherwise loadSysInterface may not find the interface
loadInterfaceForName :: SDoc -> Name -> TcRn ModIface
loadInterfaceForName doc name
= do { when debugIsOn $ -- Check pre-condition
do { this_mod <- getModule
; MASSERT2( not (nameIsLocalOrFrom this_mod name), ppr name <+> parens doc ) }
; ASSERT2( isExternalName name, ppr name )
initIfaceTcRn $ loadSysInterface doc (nameModule name) }
-- | Loads the interface for a given Module.
loadInterfaceForModule :: SDoc -> Module -> TcRn ModIface
loadInterfaceForModule doc m
= do
-- Should not be called with this module
when debugIsOn $ do
this_mod <- getModule
MASSERT2( this_mod /= m, ppr m <+> parens doc )
initIfaceTcRn $ loadSysInterface doc m
{-
*********************************************************
* *
loadInterface
The main function to load an interface
for an imported module, and put it in
the External Package State
* *
*********************************************************
-}
-- | An 'IfM' function to load the home interface for a wired-in thing,
-- so that we're sure that we see its instance declarations and rules
-- See Note [Loading instances for wired-in things]
loadWiredInHomeIface :: Name -> IfM lcl ()
loadWiredInHomeIface name
= ASSERT( isWiredInName name )
do _ <- loadSysInterface doc (nameModule name); return ()
where
doc = text "Need home interface for wired-in thing" <+> ppr name
------------------
-- | Loads a system interface and throws an exception if it fails
loadSysInterface :: SDoc -> Module -> IfM lcl ModIface
loadSysInterface doc mod_name = loadInterfaceWithException doc mod_name ImportBySystem
------------------
-- | Loads a user interface and throws an exception if it fails. The first parameter indicates
-- whether we should import the boot variant of the module
loadUserInterface :: Bool -> SDoc -> Module -> IfM lcl ModIface
loadUserInterface is_boot doc mod_name
= loadInterfaceWithException doc mod_name (ImportByUser is_boot)
loadPluginInterface :: SDoc -> Module -> IfM lcl ModIface
loadPluginInterface doc mod_name
= loadInterfaceWithException doc mod_name ImportByPlugin
------------------
-- | A wrapper for 'loadInterface' that throws an exception if it fails
loadInterfaceWithException :: SDoc -> Module -> WhereFrom -> IfM lcl ModIface
loadInterfaceWithException doc mod_name where_from
= withException (loadInterface doc mod_name where_from)
------------------
loadInterface :: SDoc -> Module -> WhereFrom
-> IfM lcl (MaybeErr MsgDoc ModIface)
-- loadInterface looks in both the HPT and PIT for the required interface
-- If not found, it loads it, and puts it in the PIT (always).
-- If it can't find a suitable interface file, we
-- a) modify the PackageIfaceTable to have an empty entry
-- (to avoid repeated complaints)
-- b) return (Left message)
--
-- It's not necessarily an error for there not to be an interface
-- file -- perhaps the module has changed, and that interface
-- is no longer used
loadInterface doc_str mod from
| isHoleModule mod
-- Hole modules get special treatment
= do dflags <- getDynFlags
-- Redo search for our local hole module
loadInterface doc_str (mkModule (thisPackage dflags) (moduleName mod)) from
| otherwise
= do { -- Read the state
(eps,hpt) <- getEpsAndHpt
; gbl_env <- getGblEnv
; traceIf (text "Considering whether to load" <+> ppr mod <+> ppr from)
-- Check whether we have the interface already
; dflags <- getDynFlags
; case lookupIfaceByModule dflags hpt (eps_PIT eps) mod of {
Just iface
-> return (Succeeded iface) ; -- Already loaded
-- The (src_imp == mi_boot iface) test checks that the already-loaded
-- interface isn't a boot iface. This can conceivably happen,
-- if an earlier import had a before we got to real imports. I think.
_ -> do {
-- READ THE MODULE IN
; read_result <- case (wantHiBootFile dflags eps mod from) of
Failed err -> return (Failed err)
Succeeded hi_boot_file ->
-- Stoutly warn against an EPS-updating import
-- of one's own boot file! (one-shot only)
--See Note [Do not update EPS with your own hi-boot]
-- in MkIface.
WARN( hi_boot_file &&
fmap fst (if_rec_types gbl_env) == Just mod,
ppr mod )
computeInterface doc_str hi_boot_file mod
; case read_result of {
Failed err -> do
{ let fake_iface = emptyModIface mod
; updateEps_ $ \eps ->
eps { eps_PIT = extendModuleEnv (eps_PIT eps) (mi_module fake_iface) fake_iface }
-- Not found, so add an empty iface to
-- the EPS map so that we don't look again
; return (Failed err) } ;
-- Found and parsed!
-- We used to have a sanity check here that looked for:
-- * System importing ..
-- * a home package module ..
-- * that we know nothing about (mb_dep == Nothing)!
--
-- But this is no longer valid because thNameToGhcName allows users to
-- cause the system to load arbitrary interfaces (by supplying an appropriate
-- Template Haskell original-name).
Succeeded (iface, loc) ->
let
loc_doc = text loc
in
initIfaceLcl (mi_semantic_module iface) loc_doc (mi_boot iface) $ do
-- Load the new ModIface into the External Package State
-- Even home-package interfaces loaded by loadInterface
-- (which only happens in OneShot mode; in Batch/Interactive
-- mode, home-package modules are loaded one by one into the HPT)
-- are put in the EPS.
--
-- The main thing is to add the ModIface to the PIT, but
-- we also take the
-- IfaceDecls, IfaceClsInst, IfaceFamInst, IfaceRules, IfaceVectInfo
-- out of the ModIface and put them into the big EPS pools
-- NB: *first* we do loadDecl, so that the provenance of all the locally-defined
--- names is done correctly (notably, whether this is an .hi file or .hi-boot file).
-- If we do loadExport first the wrong info gets into the cache (unless we
-- explicitly tag each export which seems a bit of a bore)
; ignore_prags <- goptM Opt_IgnoreInterfacePragmas
; new_eps_decls <- loadDecls ignore_prags (mi_decls iface)
; new_eps_insts <- mapM tcIfaceInst (mi_insts iface)
; new_eps_fam_insts <- mapM tcIfaceFamInst (mi_fam_insts iface)
; new_eps_rules <- tcIfaceRules ignore_prags (mi_rules iface)
; new_eps_anns <- tcIfaceAnnotations (mi_anns iface)
; new_eps_vect_info <- tcIfaceVectInfo mod (mkNameEnv new_eps_decls) (mi_vect_info iface)
; new_eps_complete_sigs <- tcIfaceCompleteSigs (mi_complete_sigs iface)
; let { final_iface = iface {
mi_decls = panic "No mi_decls in PIT",
mi_insts = panic "No mi_insts in PIT",
mi_fam_insts = panic "No mi_fam_insts in PIT",
mi_rules = panic "No mi_rules in PIT",
mi_anns = panic "No mi_anns in PIT"
}
}
; updateEps_ $ \ eps ->
if elemModuleEnv mod (eps_PIT eps) || is_external_sig dflags iface
then eps else
eps {
eps_PIT = extendModuleEnv (eps_PIT eps) mod final_iface,
eps_PTE = addDeclsToPTE (eps_PTE eps) new_eps_decls,
eps_rule_base = extendRuleBaseList (eps_rule_base eps)
new_eps_rules,
eps_complete_matches
= extendCompleteMatchMap
(eps_complete_matches eps)
new_eps_complete_sigs,
eps_inst_env = extendInstEnvList (eps_inst_env eps)
new_eps_insts,
eps_fam_inst_env = extendFamInstEnvList (eps_fam_inst_env eps)
new_eps_fam_insts,
eps_vect_info = plusVectInfo (eps_vect_info eps)
new_eps_vect_info,
eps_ann_env = extendAnnEnvList (eps_ann_env eps)
new_eps_anns,
eps_mod_fam_inst_env
= let
fam_inst_env =
extendFamInstEnvList emptyFamInstEnv
new_eps_fam_insts
in
extendModuleEnv (eps_mod_fam_inst_env eps)
mod
fam_inst_env,
eps_stats = addEpsInStats (eps_stats eps)
(length new_eps_decls)
(length new_eps_insts)
(length new_eps_rules) }
; return (Succeeded final_iface)
}}}}
-- | Returns @True@ if a 'ModIface' comes from an external package.
-- In this case, we should NOT load it into the EPS; the entities
-- should instead come from the local merged signature interface.
is_external_sig :: DynFlags -> ModIface -> Bool
is_external_sig dflags iface =
-- It's a signature iface...
mi_semantic_module iface /= mi_module iface &&
-- and it's not from the local package
moduleUnitId (mi_module iface) /= thisPackage dflags
-- | This is an improved version of 'findAndReadIface' which can also
-- handle the case when a user requests @p[A=<B>]:M@ but we only
-- have an interface for @p[A=<A>]:M@ (the indefinite interface.
-- If we are not trying to build code, we load the interface we have,
-- *instantiating it* according to how the holes are specified.
-- (Of course, if we're actually building code, this is a hard error.)
--
-- In the presence of holes, 'computeInterface' has an important invariant:
-- to load module M, its set of transitively reachable requirements must
-- have an up-to-date local hi file for that requirement. Note that if
-- we are loading the interface of a requirement, this does not
-- apply to the requirement itself; e.g., @p[A=<A>]:A@ does not require
-- A.hi to be up-to-date (and indeed, we MUST NOT attempt to read A.hi, unless
-- we are actually typechecking p.)
computeInterface ::
SDoc -> IsBootInterface -> Module
-> TcRnIf gbl lcl (MaybeErr MsgDoc (ModIface, FilePath))
computeInterface doc_str hi_boot_file mod0 = do
MASSERT( not (isHoleModule mod0) )
dflags <- getDynFlags
case splitModuleInsts mod0 of
(imod, Just indef) | not (unitIdIsDefinite (thisPackage dflags)) -> do
r <- findAndReadIface doc_str imod mod0 hi_boot_file
case r of
Succeeded (iface0, path) -> do
hsc_env <- getTopEnv
r <- liftIO $
rnModIface hsc_env (indefUnitIdInsts (indefModuleUnitId indef))
Nothing iface0
case r of
Right x -> return (Succeeded (x, path))
Left errs -> liftIO . throwIO . mkSrcErr $ errs
Failed err -> return (Failed err)
(mod, _) ->
findAndReadIface doc_str mod mod0 hi_boot_file
-- | Compute the signatures which must be compiled in order to
-- load the interface for a 'Module'. The output of this function
-- is always a subset of 'moduleFreeHoles'; it is more precise
-- because in signature @p[A=<A>,B=<B>]:B@, although the free holes
-- are A and B, B might not depend on A at all!
--
-- If this is invoked on a signature, this does NOT include the
-- signature itself; e.g. precise free module holes of
-- @p[A=<A>,B=<B>]:B@ never includes B.
moduleFreeHolesPrecise
:: SDoc -> Module
-> TcRnIf gbl lcl (MaybeErr MsgDoc (UniqDSet ModuleName))
moduleFreeHolesPrecise doc_str mod
| moduleIsDefinite mod = return (Succeeded emptyUniqDSet)
| otherwise =
case splitModuleInsts mod of
(imod, Just indef) -> do
let insts = indefUnitIdInsts (indefModuleUnitId indef)
traceIf (text "Considering whether to load" <+> ppr mod <+>
text "to compute precise free module holes")
(eps, hpt) <- getEpsAndHpt
dflags <- getDynFlags
case tryEpsAndHpt dflags eps hpt `firstJust` tryDepsCache eps imod insts of
Just r -> return (Succeeded r)
Nothing -> readAndCache imod insts
(_, Nothing) -> return (Succeeded emptyUniqDSet)
where
tryEpsAndHpt dflags eps hpt =
fmap mi_free_holes (lookupIfaceByModule dflags hpt (eps_PIT eps) mod)
tryDepsCache eps imod insts =
case lookupInstalledModuleEnv (eps_free_holes eps) imod of
Just ifhs -> Just (renameFreeHoles ifhs insts)
_otherwise -> Nothing
readAndCache imod insts = do
mb_iface <- findAndReadIface (text "moduleFreeHolesPrecise" <+> doc_str) imod mod False
case mb_iface of
Succeeded (iface, _) -> do
let ifhs = mi_free_holes iface
-- Cache it
updateEps_ (\eps ->
eps { eps_free_holes = extendInstalledModuleEnv (eps_free_holes eps) imod ifhs })
return (Succeeded (renameFreeHoles ifhs insts))
Failed err -> return (Failed err)
wantHiBootFile :: DynFlags -> ExternalPackageState -> Module -> WhereFrom
-> MaybeErr MsgDoc IsBootInterface
-- Figure out whether we want Foo.hi or Foo.hi-boot
wantHiBootFile dflags eps mod from
= case from of
ImportByUser usr_boot
| usr_boot && not this_package
-> Failed (badSourceImport mod)
| otherwise -> Succeeded usr_boot
ImportByPlugin
-> Succeeded False
ImportBySystem
| not this_package -- If the module to be imported is not from this package
-> Succeeded False -- don't look it up in eps_is_boot, because that is keyed
-- on the ModuleName of *home-package* modules only.
-- We never import boot modules from other packages!
| otherwise
-> case lookupUFM (eps_is_boot eps) (moduleName mod) of
Just (_, is_boot) -> Succeeded is_boot
Nothing -> Succeeded False
-- The boot-ness of the requested interface,
-- based on the dependencies in directly-imported modules
where
this_package = thisPackage dflags == moduleUnitId mod
badSourceImport :: Module -> SDoc
badSourceImport mod
= hang (text "You cannot {-# SOURCE #-} import a module from another package")
2 (text "but" <+> quotes (ppr mod) <+> ptext (sLit "is from package")
<+> quotes (ppr (moduleUnitId mod)))
-----------------------------------------------------
-- Loading type/class/value decls
-- We pass the full Module name here, replete with
-- its package info, so that we can build a Name for
-- each binder with the right package info in it
-- All subsequent lookups, including crucially lookups during typechecking
-- the declaration itself, will find the fully-glorious Name
--
-- We handle ATs specially. They are not main declarations, but also not
-- implicit things (in particular, adding them to `implicitTyThings' would mess
-- things up in the renaming/type checking of source programs).
-----------------------------------------------------
addDeclsToPTE :: PackageTypeEnv -> [(Name,TyThing)] -> PackageTypeEnv
addDeclsToPTE pte things = extendNameEnvList pte things
loadDecls :: Bool
-> [(Fingerprint, IfaceDecl)]
-> IfL [(Name,TyThing)]
loadDecls ignore_prags ver_decls
= do { thingss <- mapM (loadDecl ignore_prags) ver_decls
; return (concat thingss)
}
loadDecl :: Bool -- Don't load pragmas into the decl pool
-> (Fingerprint, IfaceDecl)
-> IfL [(Name,TyThing)] -- The list can be poked eagerly, but the
-- TyThings are forkM'd thunks
loadDecl ignore_prags (_version, decl)
= do { -- Populate the name cache with final versions of all
-- the names associated with the decl
let main_name = ifName decl
-- Typecheck the thing, lazily
-- NB. Firstly, the laziness is there in case we never need the
-- declaration (in one-shot mode), and secondly it is there so that
-- we don't look up the occurrence of a name before calling mk_new_bndr
-- on the binder. This is important because we must get the right name
-- which includes its nameParent.
; thing <- forkM doc $ do { bumpDeclStats main_name
; tcIfaceDecl ignore_prags decl }
-- Populate the type environment with the implicitTyThings too.
--
-- Note [Tricky iface loop]
-- ~~~~~~~~~~~~~~~~~~~~~~~~
-- Summary: The delicate point here is that 'mini-env' must be
-- buildable from 'thing' without demanding any of the things
-- 'forkM'd by tcIfaceDecl.
--
-- In more detail: Consider the example
-- data T a = MkT { x :: T a }
-- The implicitTyThings of T are: [ <datacon MkT>, <selector x>]
-- (plus their workers, wrappers, coercions etc etc)
--
-- We want to return an environment
-- [ "MkT" -> <datacon MkT>, "x" -> <selector x>, ... ]
-- (where the "MkT" is the *Name* associated with MkT, etc.)
--
-- We do this by mapping the implicit_names to the associated
-- TyThings. By the invariant on ifaceDeclImplicitBndrs and
-- implicitTyThings, we can use getOccName on the implicit
-- TyThings to make this association: each Name's OccName should
-- be the OccName of exactly one implicitTyThing. So the key is
-- to define a "mini-env"
--
-- [ 'MkT' -> <datacon MkT>, 'x' -> <selector x>, ... ]
-- where the 'MkT' here is the *OccName* associated with MkT.
--
-- However, there is a subtlety: due to how type checking needs
-- to be staged, we can't poke on the forkM'd thunks inside the
-- implicitTyThings while building this mini-env.
-- If we poke these thunks too early, two problems could happen:
-- (1) When processing mutually recursive modules across
-- hs-boot boundaries, poking too early will do the
-- type-checking before the recursive knot has been tied,
-- so things will be type-checked in the wrong
-- environment, and necessary variables won't be in
-- scope.
--
-- (2) Looking up one OccName in the mini_env will cause
-- others to be looked up, which might cause that
-- original one to be looked up again, and hence loop.
--
-- The code below works because of the following invariant:
-- getOccName on a TyThing does not force the suspended type
-- checks in order to extract the name. For example, we don't
-- poke on the "T a" type of <selector x> on the way to
-- extracting <selector x>'s OccName. Of course, there is no
-- reason in principle why getting the OccName should force the
-- thunks, but this means we need to be careful in
-- implicitTyThings and its helper functions.
--
-- All a bit too finely-balanced for my liking.
-- This mini-env and lookup function mediates between the
--'Name's n and the map from 'OccName's to the implicit TyThings
; let mini_env = mkOccEnv [(getOccName t, t) | t <- implicitTyThings thing]
lookup n = case lookupOccEnv mini_env (getOccName n) of
Just thing -> thing
Nothing ->
pprPanic "loadDecl" (ppr main_name <+> ppr n $$ ppr (decl))
; implicit_names <- mapM lookupIfaceTop (ifaceDeclImplicitBndrs decl)
-- ; traceIf (text "Loading decl for " <> ppr main_name $$ ppr implicit_names)
; return $ (main_name, thing) :
-- uses the invariant that implicit_names and
-- implicitTyThings are bijective
[(n, lookup n) | n <- implicit_names]
}
where
doc = text "Declaration for" <+> ppr (ifName decl)
bumpDeclStats :: Name -> IfL () -- Record that one more declaration has actually been used
bumpDeclStats name
= do { traceIf (text "Loading decl for" <+> ppr name)
; updateEps_ (\eps -> let stats = eps_stats eps
in eps { eps_stats = stats { n_decls_out = n_decls_out stats + 1 } })
}
{-
*********************************************************
* *
\subsection{Reading an interface file}
* *
*********************************************************
Note [Home module load error]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the sought-for interface is in the current package (as determined
by -package-name flag) then it jolly well should already be in the HPT
because we process home-package modules in dependency order. (Except
in one-shot mode; see notes with hsc_HPT decl in HscTypes).
It is possible (though hard) to get this error through user behaviour.
* Suppose package P (modules P1, P2) depends on package Q (modules Q1,
Q2, with Q2 importing Q1)
* We compile both packages.
* Now we edit package Q so that it somehow depends on P
* Now recompile Q with --make (without recompiling P).
* Then Q1 imports, say, P1, which in turn depends on Q2. So Q2
is a home-package module which is not yet in the HPT! Disaster.
This actually happened with P=base, Q=ghc-prim, via the AMP warnings.
See Trac #8320.
-}
findAndReadIface :: SDoc
-- The unique identifier of the on-disk module we're
-- looking for
-> InstalledModule
-- The *actual* module we're looking for. We use
-- this to check the consistency of the requirements
-- of the module we read out.
-> Module
-> IsBootInterface -- True <=> Look for a .hi-boot file
-- False <=> Look for .hi file
-> TcRnIf gbl lcl (MaybeErr MsgDoc (ModIface, FilePath))
-- Nothing <=> file not found, or unreadable, or illegible
-- Just x <=> successfully found and parsed
-- It *doesn't* add an error to the monad, because
-- sometimes it's ok to fail... see notes with loadInterface
findAndReadIface doc_str mod wanted_mod_with_insts hi_boot_file
= do traceIf (sep [hsep [text "Reading",
if hi_boot_file
then text "[boot]"
else Outputable.empty,
text "interface for",
ppr mod <> semi],
nest 4 (text "reason:" <+> doc_str)])
-- Check for GHC.Prim, and return its static interface
-- TODO: make this check a function
if mod `installedModuleEq` gHC_PRIM
then do
iface <- getHooked ghcPrimIfaceHook ghcPrimIface
return (Succeeded (iface,
"<built in interface for GHC.Prim>"))
else do
dflags <- getDynFlags
-- Look for the file
hsc_env <- getTopEnv
mb_found <- liftIO (findExactModule hsc_env mod)
case mb_found of
InstalledFound loc mod -> do
-- Found file, so read it
let file_path = addBootSuffix_maybe hi_boot_file
(ml_hi_file loc)
-- See Note [Home module load error]
if installedModuleUnitId mod `installedUnitIdEq` thisPackage dflags &&
not (isOneShot (ghcMode dflags))
then return (Failed (homeModError mod loc))
else do r <- read_file file_path
checkBuildDynamicToo r
return r
err -> do
traceIf (text "...not found")
dflags <- getDynFlags
return (Failed (cannotFindInterface dflags
(installedModuleName mod) err))
where read_file file_path = do
traceIf (text "readIFace" <+> text file_path)
-- Figure out what is recorded in mi_module. If this is
-- a fully definite interface, it'll match exactly, but
-- if it's indefinite, the inside will be uninstantiated!
dflags <- getDynFlags
let wanted_mod =
case splitModuleInsts wanted_mod_with_insts of
(_, Nothing) -> wanted_mod_with_insts
(_, Just indef_mod) ->
indefModuleToModule dflags
(generalizeIndefModule indef_mod)
read_result <- readIface wanted_mod file_path
case read_result of
Failed err -> return (Failed (badIfaceFile file_path err))
Succeeded iface -> return (Succeeded (iface, file_path))
-- Don't forget to fill in the package name...
checkBuildDynamicToo (Succeeded (iface, filePath)) = do
dflags <- getDynFlags
-- Indefinite interfaces are ALWAYS non-dynamic, and
-- that's OK.
let is_definite_iface = moduleIsDefinite (mi_module iface)
when is_definite_iface $
whenGeneratingDynamicToo dflags $ withDoDynamicToo $ do
let ref = canGenerateDynamicToo dflags
dynFilePath = addBootSuffix_maybe hi_boot_file
$ replaceExtension filePath (dynHiSuf dflags)
r <- read_file dynFilePath
case r of
Succeeded (dynIface, _)
| mi_mod_hash iface == mi_mod_hash dynIface ->
return ()
| otherwise ->
do traceIf (text "Dynamic hash doesn't match")
liftIO $ writeIORef ref False
Failed err ->
do traceIf (text "Failed to load dynamic interface file:" $$ err)
liftIO $ writeIORef ref False
checkBuildDynamicToo _ = return ()
-- @readIface@ tries just the one file.
readIface :: Module -> FilePath
-> TcRnIf gbl lcl (MaybeErr MsgDoc ModIface)
-- Failed err <=> file not found, or unreadable, or illegible
-- Succeeded iface <=> successfully found and parsed
readIface wanted_mod file_path
= do { res <- tryMostM $
readBinIface CheckHiWay QuietBinIFaceReading file_path
; dflags <- getDynFlags
; case res of
Right iface
-- NB: This check is NOT just a sanity check, it is
-- critical for correctness of recompilation checking
-- (it lets us tell when -this-unit-id has changed.)
| wanted_mod == actual_mod
-> return (Succeeded iface)
| otherwise -> return (Failed err)
where
actual_mod = mi_module iface
err = hiModuleNameMismatchWarn dflags wanted_mod actual_mod
Left exn -> return (Failed (text (showException exn)))
}
{-
*********************************************************
* *
Wired-in interface for GHC.Prim
* *
*********************************************************
-}
initExternalPackageState :: ExternalPackageState
initExternalPackageState
= EPS {
eps_is_boot = emptyUFM,
eps_PIT = emptyPackageIfaceTable,
eps_free_holes = emptyInstalledModuleEnv,
eps_PTE = emptyTypeEnv,
eps_inst_env = emptyInstEnv,
eps_fam_inst_env = emptyFamInstEnv,
eps_rule_base = mkRuleBase builtinRules,
-- Initialise the EPS rule pool with the built-in rules
eps_mod_fam_inst_env
= emptyModuleEnv,
eps_vect_info = noVectInfo,
eps_complete_matches = emptyUFM,
eps_ann_env = emptyAnnEnv,
eps_stats = EpsStats { n_ifaces_in = 0, n_decls_in = 0, n_decls_out = 0
, n_insts_in = 0, n_insts_out = 0
, n_rules_in = length builtinRules, n_rules_out = 0 }
}
{-
*********************************************************
* *
Wired-in interface for GHC.Prim
* *
*********************************************************
-}
ghcPrimIface :: ModIface
ghcPrimIface
= (emptyModIface gHC_PRIM) {
mi_exports = ghcPrimExports,
mi_decls = [],
mi_fixities = fixities,
mi_fix_fn = mkIfaceFixCache fixities
}
where
fixities = (getOccName seqId, Fixity NoSourceText 0 InfixR)
-- seq is infixr 0
: (occName funTyConName, funTyFixity) -- trac #10145
: mapMaybe mkFixity allThePrimOps
mkFixity op = (,) (primOpOcc op) <$> primOpFixity op
{-
*********************************************************
* *
\subsection{Statistics}
* *
*********************************************************
-}
ifaceStats :: ExternalPackageState -> SDoc
ifaceStats eps
= hcat [text "Renamer stats: ", msg]
where
stats = eps_stats eps
msg = vcat
[int (n_ifaces_in stats) <+> text "interfaces read",
hsep [ int (n_decls_out stats), text "type/class/variable imported, out of",
int (n_decls_in stats), text "read"],
hsep [ int (n_insts_out stats), text "instance decls imported, out of",
int (n_insts_in stats), text "read"],
hsep [ int (n_rules_out stats), text "rule decls imported, out of",
int (n_rules_in stats), text "read"]
]
{-
************************************************************************
* *
Printing interfaces
* *
************************************************************************
-}
-- | Read binary interface, and print it out
showIface :: HscEnv -> FilePath -> IO ()
showIface hsc_env filename = do
-- skip the hi way check; we don't want to worry about profiled vs.
-- non-profiled interfaces, for example.
iface <- initTcRnIf 's' hsc_env () () $
readBinIface IgnoreHiWay TraceBinIFaceReading filename
let dflags = hsc_dflags hsc_env
putLogMsg dflags NoReason SevDump noSrcSpan
(defaultDumpStyle dflags) (pprModIface iface)
-- Show a ModIface but don't display details; suitable for ModIfaces stored in
-- the EPT.
pprModIfaceSimple :: ModIface -> SDoc
pprModIfaceSimple iface = ppr (mi_module iface) $$ pprDeps (mi_deps iface) $$ nest 2 (vcat (map pprExport (mi_exports iface)))
pprModIface :: ModIface -> SDoc
-- Show a ModIface
pprModIface iface
= vcat [ text "interface"
<+> ppr (mi_module iface) <+> pp_hsc_src (mi_hsc_src iface)
<+> (if mi_orphan iface then text "[orphan module]" else Outputable.empty)
<+> (if mi_finsts iface then text "[family instance module]" else Outputable.empty)
<+> (if mi_hpc iface then text "[hpc]" else Outputable.empty)
<+> integer hiVersion
, nest 2 (text "interface hash:" <+> ppr (mi_iface_hash iface))
, nest 2 (text "ABI hash:" <+> ppr (mi_mod_hash iface))
, nest 2 (text "export-list hash:" <+> ppr (mi_exp_hash iface))
, nest 2 (text "orphan hash:" <+> ppr (mi_orphan_hash iface))
, nest 2 (text "flag hash:" <+> ppr (mi_flag_hash iface))
, nest 2 (text "sig of:" <+> ppr (mi_sig_of iface))
, nest 2 (text "used TH splices:" <+> ppr (mi_used_th iface))
, nest 2 (text "where")
, text "exports:"
, nest 2 (vcat (map pprExport (mi_exports iface)))
, pprDeps (mi_deps iface)
, vcat (map pprUsage (mi_usages iface))
, vcat (map pprIfaceAnnotation (mi_anns iface))
, pprFixities (mi_fixities iface)
, vcat [ppr ver $$ nest 2 (ppr decl) | (ver,decl) <- mi_decls iface]
, vcat (map ppr (mi_insts iface))
, vcat (map ppr (mi_fam_insts iface))
, vcat (map ppr (mi_rules iface))
, pprVectInfo (mi_vect_info iface)
, ppr (mi_warns iface)
, pprTrustInfo (mi_trust iface)
, pprTrustPkg (mi_trust_pkg iface)
, vcat (map ppr (mi_complete_sigs iface))
]
where
pp_hsc_src HsBootFile = text "[boot]"
pp_hsc_src HsigFile = text "[hsig]"
pp_hsc_src HsSrcFile = Outputable.empty
{-
When printing export lists, we print like this:
Avail f f
AvailTC C [C, x, y] C(x,y)
AvailTC C [x, y] C!(x,y) -- Exporting x, y but not C
-}
pprExport :: IfaceExport -> SDoc
pprExport (Avail n) = ppr n
pprExport (AvailTC _ [] []) = Outputable.empty
pprExport (AvailTC n ns0 fs)
= case ns0 of
(n':ns) | n==n' -> ppr n <> pp_export ns fs
_ -> ppr n <> vbar <> pp_export ns0 fs
where
pp_export [] [] = Outputable.empty
pp_export names fs = braces (hsep (map ppr names ++ map (ppr . flLabel) fs))
pprUsage :: Usage -> SDoc
pprUsage usage@UsagePackageModule{}
= pprUsageImport usage usg_mod
pprUsage usage@UsageHomeModule{}
= pprUsageImport usage usg_mod_name $$
nest 2 (
maybe Outputable.empty (\v -> text "exports: " <> ppr v) (usg_exports usage) $$
vcat [ ppr n <+> ppr v | (n,v) <- usg_entities usage ]
)
pprUsage usage@UsageFile{}
= hsep [text "addDependentFile",
doubleQuotes (text (usg_file_path usage)),
ppr (usg_file_hash usage)]
pprUsage usage@UsageMergedRequirement{}
= hsep [text "merged", ppr (usg_mod usage), ppr (usg_mod_hash usage)]
pprUsageImport :: Outputable a => Usage -> (Usage -> a) -> SDoc
pprUsageImport usage usg_mod'
= hsep [text "import", safe, ppr (usg_mod' usage),
ppr (usg_mod_hash usage)]
where
safe | usg_safe usage = text "safe"
| otherwise = text " -/ "
pprDeps :: Dependencies -> SDoc
pprDeps (Deps { dep_mods = mods, dep_pkgs = pkgs, dep_orphs = orphs,
dep_finsts = finsts })
= vcat [text "module dependencies:" <+> fsep (map ppr_mod mods),
text "package dependencies:" <+> fsep (map ppr_pkg pkgs),
text "orphans:" <+> fsep (map ppr orphs),
text "family instance modules:" <+> fsep (map ppr finsts)
]
where
ppr_mod (mod_name, boot) = ppr mod_name <+> ppr_boot boot
ppr_pkg (pkg,trust_req) = ppr pkg <>
(if trust_req then text "*" else Outputable.empty)
ppr_boot True = text "[boot]"
ppr_boot False = Outputable.empty
pprFixities :: [(OccName, Fixity)] -> SDoc
pprFixities [] = Outputable.empty
pprFixities fixes = text "fixities" <+> pprWithCommas pprFix fixes
where
pprFix (occ,fix) = ppr fix <+> ppr occ
pprVectInfo :: IfaceVectInfo -> SDoc
pprVectInfo (IfaceVectInfo { ifaceVectInfoVar = vars
, ifaceVectInfoTyCon = tycons
, ifaceVectInfoTyConReuse = tyconsReuse
, ifaceVectInfoParallelVars = parallelVars
, ifaceVectInfoParallelTyCons = parallelTyCons
}) =
vcat
[ text "vectorised variables:" <+> hsep (map ppr vars)
, text "vectorised tycons:" <+> hsep (map ppr tycons)
, text "vectorised reused tycons:" <+> hsep (map ppr tyconsReuse)
, text "parallel variables:" <+> hsep (map ppr parallelVars)
, text "parallel tycons:" <+> hsep (map ppr parallelTyCons)
]
pprTrustInfo :: IfaceTrustInfo -> SDoc
pprTrustInfo trust = text "trusted:" <+> ppr trust
pprTrustPkg :: Bool -> SDoc
pprTrustPkg tpkg = text "require own pkg trusted:" <+> ppr tpkg
instance Outputable Warnings where
ppr = pprWarns
pprWarns :: Warnings -> SDoc
pprWarns NoWarnings = Outputable.empty
pprWarns (WarnAll txt) = text "Warn all" <+> ppr txt
pprWarns (WarnSome prs) = text "Warnings"
<+> vcat (map pprWarning prs)
where pprWarning (name, txt) = ppr name <+> ppr txt
pprIfaceAnnotation :: IfaceAnnotation -> SDoc
pprIfaceAnnotation (IfaceAnnotation { ifAnnotatedTarget = target, ifAnnotatedValue = serialized })
= ppr target <+> text "annotated by" <+> ppr serialized
{-
*********************************************************
* *
\subsection{Errors}
* *
*********************************************************
-}
badIfaceFile :: String -> SDoc -> SDoc
badIfaceFile file err
= vcat [text "Bad interface file:" <+> text file,
nest 4 err]
hiModuleNameMismatchWarn :: DynFlags -> Module -> Module -> MsgDoc
hiModuleNameMismatchWarn dflags requested_mod read_mod
| moduleUnitId requested_mod == moduleUnitId read_mod =
sep [text "Interface file contains module" <+> quotes (ppr read_mod) <> comma,
text "but we were expecting module" <+> quotes (ppr requested_mod),
sep [text "Probable cause: the source code which generated interface file",
text "has an incompatible module name"
]
]
| otherwise =
-- ToDo: This will fail to have enough qualification when the package IDs
-- are the same
withPprStyle (mkUserStyle dflags alwaysQualify AllTheWay) $
-- we want the Modules below to be qualified with package names,
-- so reset the PrintUnqualified setting.
hsep [ text "Something is amiss; requested module "
, ppr requested_mod
, text "differs from name found in the interface file"
, ppr read_mod
, parens (text "if these names look the same, try again with -dppr-debug")
]
homeModError :: InstalledModule -> ModLocation -> SDoc
-- See Note [Home module load error]
homeModError mod location
= text "attempting to use module " <> quotes (ppr mod)
<> (case ml_hs_file location of
Just file -> space <> parens (text file)
Nothing -> Outputable.empty)
<+> text "which is not loaded"
|
ezyang/ghc
|
compiler/iface/LoadIface.hs
|
bsd-3-clause
| 52,514 | 475 | 24 | 17,052 | 8,544 | 4,576 | 3,968 | -1 | -1 |
import Types
import Graphics.GL.Pal
someCubes1 :: Float -> [Cube]
someCubes1 t =
[ newCube
{ cubeColor = colorHSL (x*0.01+t*0.3) 0.9 0.4
, cubeRotation = axisAngle (V3 0 1 0) 2
, cubePosition = V3 (sin (t+x*0.11))
(x*0.1-1)
0
, cubeScale = V3 (0.1*x + 0.5)
0.1
(0.1)
}
| x <- [0..n]
]
where n = fromIntegral $ min 100 (mod (floor (t*50)) 100)
|
lukexi/cubensis
|
defs/Cubes1.hs
|
bsd-3-clause
| 520 | 0 | 13 | 241 | 203 | 109 | 94 | 15 | 1 |
{-# LANGUAGE TypeFamilies, GeneralizedNewtypeDeriving #-}
module Data.Bitmap.Array.Internal
( BitmapArray(..)
) where
import Data.Array.Unboxed
import Data.Binary
import Data.Bitmap.Class
import Data.Bitmap.Pixel
import Data.Bitmap.Types
import Data.Serialize
-- | Arrays of 32-bit RGBA pixels
newtype BitmapArray = BitmapArray {unwrapBitmapArray :: UArray (Integer, Integer) Word32}
deriving (Eq, Ord, Binary, Serialize)
-- | Instance for debugging purposes
instance Show BitmapArray where
--show = map (chr . fromIntegral) . elems . unwrapBitmap
show = show . unwrapBitmapArray
instance Bitmap BitmapArray where
type BIndexType BitmapArray = Integer
type BPixelType BitmapArray = PixelRGBA
depth = const Depth32RGBA
dimensions (BitmapArray a) =
let (_, (maxRow, maxColumn)) = bounds a
in (abs . succ $ maxColumn, abs . succ $ maxRow)
getPixel (BitmapArray a) = PixelRGBA . (a !)
constructPixels f (width, height) = let maxRow = abs . pred $ height
maxColumn = abs . pred $ width
f' = unwrapPixelRGBA . toPixelRGBA . f
in BitmapArray . array ((0, 0), (maxRow, maxColumn)) $ [(i, f' i) | row <- [0..maxRow], column <- [0..maxColumn], let i = (row, column)]
|
bairyn/bitmaps
|
src/Data/Bitmap/Array/Internal.hs
|
bsd-3-clause
| 1,366 | 0 | 14 | 378 | 378 | 214 | 164 | 25 | 0 |
module NeuralA where
import Numeric.LinearAlgebra
x = vector [-5.0,-4.9..5.0]
y1 = cmap sigmoid x
sigmoid :: Double -> Double
sigmoid x = 1.0 / (1.0 + exp(-x))
step_func :: Double -> Double
step_func x
| x > 0.0 = 1.0
| otherwise = 0.0
relu :: Double -> Double
relu x
| x > 0.0 = x
| otherwise = 0.0
|
chupaaaaaaan/nn-with-haskell
|
src/NeuralA.hs
|
bsd-3-clause
| 315 | 0 | 10 | 76 | 147 | 75 | 72 | 14 | 1 |
module Feldspar.Core where
import Data.List
import Data.Int
import Data.Word
type Var = Int
data Expr =
Int Type Integer
| Float Float
| Double Double
| Boolean Bool
| Rational Rational
| Binop Binop Expr Expr
| Unop Unop Expr
| Parallel Expr Var Expr
-- Array value. Without this, the language is not closed under evaluation
| Array [Expr]
| Index Expr Expr
| Pair [Expr]
| If Expr Expr Expr
| ForLoop Expr Expr Var Var Expr
-- Binding
| Let Var Expr Expr
| Var Var
-- P, The length of the array that is created and the program which
-- writes to it
| RunP Expr P
deriving Show
data P =
Assign Expr Expr
| ParFor Expr Var P
| LetP Var Expr P
| Par P P
deriving Show
data Binop =
Plus
| Minus
| Times
| Div
| Eq
| NEq
| LT
| GT
| LTE
| GTE
| And
| Or
| Xor
deriving Show
data Unop =
Abs
| Signum
| Recip
| Not
-- Should I have this?
| Proj Int
deriving Show
instance Num Expr where
(+) = Binop Plus
(-) = Binop Minus
(*) = Binop Times
abs = Unop Abs
signum = Unop Signum
fromInteger = Int Unknown . fromInteger
instance Fractional Expr where
(/) = Binop Div
recip = Unop Recip
fromRational = Rational
data Type =
IntT { signed :: Bool, bits :: Size }
| FloatT
| DoubleT
| BoolT
| ArrayT Type
| PairT [Type]
| Unknown -- See if I really need this one
deriving Show
intTy = IntT True Machine
data Size = Eight | Sixteen | Thirtytwo | Sixtyfour
| Machine
deriving Show
-- Built-in types
class Ty a where
reify :: a -> Type
instance Ty Int where
reify _ = IntT True Machine
instance Ty Int8 where
reify _ = IntT True Eight
instance Ty Int16 where
reify _ = IntT True Sixteen
instance Ty Int32 where
reify _ = IntT True Thirtytwo
instance Ty Int64 where
reify _ = IntT True Sixtyfour
instance Ty Word where
reify _ = IntT False Machine
instance Ty Word8 where
reify _ = IntT False Eight
instance Ty Word16 where
reify _ = IntT False Sixteen
instance Ty Word32 where
reify _ = IntT False Thirtytwo
instance Ty Word64 where
reify _ = IntT False Sixtyfour
instance Ty Float where
reify _ = FloatT
instance Ty Double where
reify _ = DoubleT
instance Ty Bool where
reify _ = BoolT
data TupleTree a = Tuple [TupleTree a] | Atom a
splitPairs :: Type -> TupleTree Type
splitPairs (PairT ts) = Tuple (map splitPairs ts)
splitPairs t = Atom t
newVar :: Expr -> Var
newVar (Binop _ e1 e2) = newVar e1 ⊔ newVar e2
newVar (Unop _ e) = newVar e
newVar (Parallel e v _) = newVar e ⊔ (v + 1)
newVar (Index e1 e2) = newVar e1 ⊔ newVar e2
newVar (Pair es) = foldl' (⊔) 0 (map newVar es)
newVar (If e1 e2 e3) = newVar e1 ⊔ newVar e2 ⊔ newVar e3
newVar (Var v) = 0
newVar (RunP e p) = newVar e ⊔ newVarP p
newVar _ = 0
newVarP :: P -> Var
newVarP (Assign e1 e2) = newVar e1 ⊔ newVar e2
newVarP (ParFor e v _) = newVar e ⊔ v
newVarP (LetP v e _) = newVar e ⊔ v
newVarP (Par p1 p2) = newVarP p1 ⊔ newVarP p2
v1 ⊔ v2 = max v1 v2
newtype EvalM a = EvalM { unEvalM :: [(Var,Expr)] -> Maybe a }
instance Monad EvalM where
return a = EvalM (\_ -> Just a)
EvalM f >>= m = EvalM (\e -> case f e of
Nothing -> Nothing
Just a -> unEvalM (m a) e)
newtype EvalP a = EvalP { unEvalP :: [(Var,Expr)] -> Maybe (a,[(Int,Expr)]) }
instance Monad EvalP where
return a = EvalP (\_ -> Just (a,[]))
EvalP f >>= m = EvalP (\e -> case f e of
Nothing -> Nothing
Just (a,arr) -> case unEvalP (m a) e of
Nothing -> Nothing
Just (b,arr') -> Just (b,arr++arr'))
runEvalP :: EvalP a -> EvalM [(Int,Expr)]
runEvalP (EvalP f) = EvalM (\e -> case f e of
Nothing -> Nothing
Just (_,arr) -> Just arr)
liftEvalM :: EvalM a -> EvalP a
liftEvalM (EvalM f) = EvalP (\e -> case f e of
Nothing -> Nothing
Just a -> Just (a,[]))
evalM :: Expr -> EvalP Expr
evalM = liftEvalM . eval
crash :: EvalM a
crash = EvalM (\_ -> Nothing)
eval :: Expr -> EvalM Expr
eval (Int ty i) = return (Int ty i)
eval (Float f) = return (Float f)
eval (Rational r) = return (Rational r)
eval (Binop bop e1 e2) = do let f = evalBinop bop
a <- eval e1
b <- eval e2
f a b
eval (Unop unop e) = do let f = evalUnop unop
a <- eval e
f a
eval (Parallel l v body) = do (Int _ l) <- eval l
es <- mapM (evalPar v body) [0..l-1]
return (Array es)
where evalPar v body i = bindV v (Int intTy i) $ eval body
eval (Array es) = do vs <- mapM eval es
return (Array vs)
eval (Index e1 e2) = do Array arr <- eval e1
Int _ i <- eval e2
return (arr!!fromInteger i)
eval (Pair es) = do vs <- mapM eval es
return (Pair vs)
eval (If c t e) = do b <- eval c
case b of
Boolean True -> eval t
Boolean False -> eval e
eval (ForLoop l init i s body) =
do (Int _ len) <- eval l
ist <- eval init
loop 0 len ist i s body
where loop i n s _ _ _ | i >= n = return s
loop i n s v w body =
do s' <- bindV v (Int intTy i) $ bindV w s $ eval body
loop i n s v w body
eval (Let v e1 e2) = do val <- eval e1
bindV v val $ eval e2
eval (Var v) = lookupVar v
eval (RunP e p) = error "Unimplemented"
evalBinop :: Binop -> (Expr -> Expr -> EvalM Expr)
evalBinop Plus (Int ty i) (Int _ j) = return (Int ty (i+j))
evalBinop Plus (Float a) (Float b) = return (Float (a+b))
evalBinop _ _ _ = crash
evalUnop :: Unop -> (Expr -> EvalM Expr)
evalUnop Abs (Int ty i) = return (Int ty (abs i))
evalUnop _ _ = crash
bindV :: Var -> Expr -> EvalM a -> EvalM a
bindV v expr (EvalM f) = EvalM (\e -> f ((v,expr):e))
bindP :: Var -> Expr -> EvalP a -> EvalP a
bindP v expr (EvalP f) = EvalP (\e -> f ((v,expr):e))
lookupVar :: Var -> EvalM Expr
lookupVar v = EvalM (lookup v)
evalP (Assign ix e) = do (Int _ i) <- evalM ix
elm <- evalM e
assign (fromInteger i) elm
evalP (ParFor len v body) =
do (Int ty l) <- evalM len
mapM_ (\i -> bindP v (Int ty i) (evalP body)) [0..l-1]
evalP (LetP v e p) = do val <- evalM e
bindP v val $ evalP p
evalP (Par p1 p2) = do evalP p1
evalP p2
assign i e = EvalP (\_ -> Just ((),[(i,e)]))
|
josefs/MiniDSL
|
Feldspar/Core.hs
|
bsd-3-clause
| 6,893 | 0 | 17 | 2,435 | 2,990 | 1,499 | 1,491 | 211 | 3 |
module Data.Knot where
|
kylcarte/knots
|
src/Data/Knot.hs
|
bsd-3-clause
| 27 | 0 | 3 | 7 | 6 | 4 | 2 | 1 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
module ModelFit.Fit where
import Data.Foldable as F
import Data.Monoid (Monoid (..), Sum (..))
import Control.Lens
import qualified Data.Vector.Storable as VS
import Numeric.LevMar hiding (Model)
import ModelFit.Model
import ModelFit.Types
degreesOfFreedom :: (VS.Storable p, VS.Storable x, VS.Storable y)
=> FitDesc p x y -> Int
degreesOfFreedom fd =
VS.length (fitPoints fd) - freeParams (fitModel fd)
{-# INLINE degreesOfFreedom #-}
chiSquared' :: (Fractional y, Foldable f)
=> f (Point x y) -> (x -> y) -> y
chiSquared' pts model = getSum $ foldMap go pts
where
go pt = Sum $ (residual pt model)^2 / (pt ^. ptVar)
chiSquared :: (Fractional y, VS.Storable p, VS.Storable x, VS.Storable y)
=> FitDesc p x y -> Packed VS.Vector p -> y
chiSquared fd packed = chiSquared' (VS.toList $ fitPoints fd) (fitEval fd packed)
reducedChiSquared :: (VS.Storable p, VS.Storable x, VS.Storable y, Fractional y)
=> FitDesc p x y -> Packed VS.Vector p -> y
reducedChiSquared fd packed =
chiSquared fd packed / dof
where
dof = realToFrac $ degreesOfFreedom fd
leastSquares :: (y ~ p, VS.Storable x, VS.Storable p, RealFloat y, LevMarable y)
=> [Curve p x y]
-> Packed VS.Vector p
-> Either LevMarError (Packed VS.Vector p)
leastSquares curves p0 =
case levmar objective Nothing (p0 ^. _Wrapped) ys 5000 defaultOpts mempty of
Left e -> Left e
Right (p, _, covar) -> Right $ Packed p
where
ys = VS.concat $ map (VS.map (const 0) . curvePoints) curves
objective packed = VS.concat $ map doCurve curves
where
-- We first evaluate this to avoid repeating model evaluation if possible
doCurve (Curve pts m) = VS.map (\(Point x y e) -> (mp x - y) / sqrt e) pts
where
mp = evalParam m (Packed packed)
residual :: Num y => Point x y -> (x -> y) -> y
residual pt f = pt^.ptY - f (pt^.ptX)
|
bgamari/model-fit
|
ModelFit/Fit.hs
|
bsd-3-clause
| 2,116 | 0 | 15 | 522 | 791 | 409 | 382 | 44 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
-- | This module provides a monadic facility similar (and built on top
-- of) "Futhark.FreshNames". The removes the need for a (small) amount of
-- boilerplate, at the cost of using some GHC extensions. The idea is
-- that if your compiler pass runs in a monad that is an instance of
-- 'MonadFreshNames', you can automatically use the name generation
-- functions exported by this module.
module Futhark.MonadFreshNames
( MonadFreshNames (..),
modifyNameSource,
newName,
newNameFromString,
newVName,
newIdent,
newIdent',
newParam,
module Futhark.FreshNames,
)
where
import Control.Monad.Except
import qualified Control.Monad.RWS.Lazy
import qualified Control.Monad.RWS.Strict
import Control.Monad.Reader
import qualified Control.Monad.State.Lazy
import qualified Control.Monad.State.Strict
import qualified Control.Monad.Trans.Maybe
import qualified Control.Monad.Writer.Lazy
import qualified Control.Monad.Writer.Strict
import Futhark.FreshNames hiding (newName)
import qualified Futhark.FreshNames as FreshNames
import Futhark.IR.Syntax
-- | A monad that stores a name source. The following is a good
-- instance for a monad in which the only state is a @NameSource vn@:
--
-- @
-- instance MonadFreshNames vn MyMonad where
-- getNameSource = get
-- putNameSource = put
-- @
class (Applicative m, Monad m) => MonadFreshNames m where
getNameSource :: m VNameSource
putNameSource :: VNameSource -> m ()
instance (Applicative im, Monad im) => MonadFreshNames (Control.Monad.State.Lazy.StateT VNameSource im) where
getNameSource = Control.Monad.State.Lazy.get
putNameSource = Control.Monad.State.Lazy.put
instance (Applicative im, Monad im) => MonadFreshNames (Control.Monad.State.Strict.StateT VNameSource im) where
getNameSource = Control.Monad.State.Strict.get
putNameSource = Control.Monad.State.Strict.put
instance
(Applicative im, Monad im, Monoid w) =>
MonadFreshNames (Control.Monad.RWS.Lazy.RWST r w VNameSource im)
where
getNameSource = Control.Monad.RWS.Lazy.get
putNameSource = Control.Monad.RWS.Lazy.put
instance
(Applicative im, Monad im, Monoid w) =>
MonadFreshNames (Control.Monad.RWS.Strict.RWST r w VNameSource im)
where
getNameSource = Control.Monad.RWS.Strict.get
putNameSource = Control.Monad.RWS.Strict.put
-- | Run a computation needing a fresh name source and returning a new
-- one, using 'getNameSource' and 'putNameSource' before and after the
-- computation.
modifyNameSource :: MonadFreshNames m => (VNameSource -> (a, VNameSource)) -> m a
modifyNameSource m = do
src <- getNameSource
let (x, src') = m src
putNameSource src'
return x
-- | Produce a fresh name, using the given name as a template.
newName :: MonadFreshNames m => VName -> m VName
newName = modifyNameSource . flip FreshNames.newName
-- | As @newName@, but takes a 'String' for the name template.
newNameFromString :: MonadFreshNames m => String -> m VName
newNameFromString s = newName $ VName (nameFromString s) 0
-- | Produce a fresh 'VName', using the given base name as a template.
newID :: MonadFreshNames m => Name -> m VName
newID s = newName $ VName s 0
-- | Produce a fresh 'VName', using the given base name as a template.
newVName :: MonadFreshNames m => String -> m VName
newVName = newID . nameFromString
-- | Produce a fresh 'Ident', using the given name as a template.
newIdent ::
MonadFreshNames m =>
String ->
Type ->
m Ident
newIdent s t = do
s' <- newID $ nameFromString s
return $ Ident s' t
-- | Produce a fresh 'Ident', using the given 'Ident' as a template,
-- but possibly modifying the name.
newIdent' ::
MonadFreshNames m =>
(String -> String) ->
Ident ->
m Ident
newIdent' f ident =
newIdent
(f $ nameToString $ baseName $ identName ident)
(identType ident)
-- | Produce a fresh 'Param', using the given name as a template.
newParam ::
MonadFreshNames m =>
String ->
dec ->
m (Param dec)
newParam s t = do
s' <- newID $ nameFromString s
return $ Param mempty s' t
-- Utility instance defintions for MTL classes. This requires
-- UndecidableInstances, but saves on typing elsewhere.
instance MonadFreshNames m => MonadFreshNames (ReaderT s m) where
getNameSource = lift getNameSource
putNameSource = lift . putNameSource
instance
(MonadFreshNames m, Monoid s) =>
MonadFreshNames (Control.Monad.Writer.Lazy.WriterT s m)
where
getNameSource = lift getNameSource
putNameSource = lift . putNameSource
instance
(MonadFreshNames m, Monoid s) =>
MonadFreshNames (Control.Monad.Writer.Strict.WriterT s m)
where
getNameSource = lift getNameSource
putNameSource = lift . putNameSource
instance
MonadFreshNames m =>
MonadFreshNames (Control.Monad.Trans.Maybe.MaybeT m)
where
getNameSource = lift getNameSource
putNameSource = lift . putNameSource
instance
MonadFreshNames m =>
MonadFreshNames (ExceptT e m)
where
getNameSource = lift getNameSource
putNameSource = lift . putNameSource
|
HIPERFIT/futhark
|
src/Futhark/MonadFreshNames.hs
|
isc
| 5,062 | 0 | 10 | 874 | 1,096 | 611 | 485 | 105 | 1 |
{-# Language OverloadedStrings #-}
{-|
Module : Client.View.Cert
Description : Network certificate renderer
Copyright : (c) Eric Mertens, 2019
License : ISC
Maintainer : [email protected]
-}
module Client.View.Cert
( certViewLines
) where
import Client.Image.PackedImage
import Client.Image.Palette
import Client.Image.MircFormatting
import Client.State
import Client.State.Focus
import Client.State.Network
import Control.Lens
import Data.Text (Text)
import qualified Data.Text.Lazy as LText
-- | Render the lines used in a channel mask list
certViewLines ::
ClientState -> [Image']
certViewLines st
| Just network <- currentNetwork st
, Just cs <- preview (clientConnection network) st
, let xs = view csCertificate cs
, not (null xs)
= map (parseIrcText pal)
$ clientFilter st LText.fromStrict xs
| otherwise = [text' (view palError pal) "No certificate available"]
where
pal = clientPalette st
currentNetwork :: ClientState -> Maybe Text
currentNetwork st =
case view clientFocus st of
NetworkFocus net -> Just net
ChannelFocus net _ -> Just net
Unfocused -> Nothing
|
dolio/irc-core
|
src/Client/View/Cert.hs
|
isc
| 1,221 | 0 | 11 | 298 | 278 | 145 | 133 | 29 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
module DigitalOcean.Size(
Size(..),
sizes
) where
import Control.Applicative ()
import Control.Monad (liftM, mzero)
import Control.Monad.IO.Class
import Data.Aeson ((.:), FromJSON(..), Value(..), decode)
import DigitalOcean.Base(Authentication, requestGet)
-- $setup
-- >>> import System.Environment
-- >>> import DigitalOcean.Base(Authentication(..))
-- >>> import Data.Maybe(isJust)
data Size = Size {
sizSlug :: String,
sizAvailable :: Bool,
sizTransfer :: Integer,
sizPriceMonthly :: Float,
sizHourly :: Float,
sizMemory :: Integer,
sizVcpus :: Integer,
sizDisk :: Integer
} deriving(Show, Read)
newtype Sizes = Sizes [Size]
instance FromJSON Sizes where
parseJSON (Object v) = Sizes <$> v.: "sizes"
parseJSON _ = mzero
-- | List all Sizes
--
-- @
-- do
-- tkn <- getEnv "DIGITAL_OCEAN_PERSONAL_ACCESS_TOKEN"
-- maybeSizes <- sizes $ Authentication tkn
-- print $ show $ isJust maybeSizes
-- @
--
sizes :: Authentication -> (MonadIO m) => m (Maybe [Size])
sizes a = liftM toList $ liftM decode (requestGet "sizes" a)
instance FromJSON Size where
parseJSON (Object v) =
Size <$>
(v .: "slug") <*>
(v .: "available") <*>
(v .: "transfer") <*>
(v .: "price_monthly") <*>
(v .: "price_hourly") <*>
(v .: "memory") <*>
(v .: "vcpus") <*>
(v .: "disk")
parseJSON _ = mzero
toList :: Maybe Sizes -> Maybe [Size]
toList = \ss -> case ss of Just(Sizes l) -> Just l
Nothing -> Nothing
|
satokazuma/digitalocean-kzs
|
src/DigitalOcean/Size.hs
|
mit
| 1,606 | 0 | 15 | 383 | 452 | 262 | 190 | 41 | 2 |
--
-- exponent
myExponent :: Int -> Int -> Int
myExponent n x
| n < 0 || x < 0 = error "Positives only!"
| x == 0 = 1
| otherwise = n * (myExponent n (x-1))
and' :: [Bool] -> Bool
and' list = foldr (\x l -> x && l) True list
concat' :: [[x]] -> [x]
concat' list = foldr (\x l -> x ++ l) [] list
replicate' :: Int -> a -> [a]
replicate' 0 _ = []
replicate' n x = x : (replicate' (n-1) x)
(!!!) :: [a] -> Int -> a
(!!!) [] _ = error "need something in that list"
(!!!) (x:xs) n
| n == 0 = x
| otherwise = (!!!) xs (n-1)
elem' :: Eq a => a -> [a] -> Bool
elem' _ [] = False
elem' x (y:ys)
| x == y = True
| otherwise = elem' x ys
merge :: Ord a => [a] -> [a] -> [a]
merge [] [] = []
merge a [] = a
merge [] a = a
merge (a:as) (b:bs)
| a < b = a:(merge as (b:bs))
| a > b = b:(merge (a:as) bs)
| otherwise = a:b:(merge as bs)
split :: [a] -> ([a],[a])
split [] = ([],[])
split list =
let len = length list `div` 2 in
(take len list, drop len list)
msort :: Ord a => [a] -> [a]
msort [] = []
msort [a] = [a]
msort [a,b] = if a > b then [b,a] else [a,b]
msort list = merge (msort start) (msort end) where
start = fst $ split list
end = snd $ split list
|
decomputed/haskellLaboratory
|
programmingInHaskell/chapter06.hs
|
mit
| 1,217 | 5 | 10 | 350 | 844 | 430 | 414 | 42 | 2 |
--
-- 10.DataChar.hs
-- R_Functional_Programming
--
-- Created by RocKK on 2/13/14.
-- Copyright (c) 2014 RocKK.
-- All rights reserved.
--
-- Redistribution and use in source and binary forms are permitted
-- provided that the above copyright notice and this paragraph are
-- duplicated in all such forms and that any documentation,
-- advertising materials, and other materials related to such
-- distribution and use acknowledge that the software was developed
-- by the RocKK. The name of the
-- RocKK may not be used to endorse or promote products derived
-- from this software without specific prior written permission.
-- THIS SOFTWARE IS PROVIDED ''AS IS'' AND WITHOUT ANY EXPRESS OR
-- IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED
-- WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
import Data.Char
main = do
print $ isAlpha 'c'
print $ isDigit '4'
print $ toUpper 'a'
print $ toLower 'E'
print $ digitToInt '2'
print $ intToDigit 9
print $ intToDigit 10
print $ ord('A')
print $ chr(61)
|
RocKK-MD/R_Functional_Programming
|
Sources/10.DataChar.hs
|
bsd-2-clause
| 1,089 | 1 | 10 | 225 | 130 | 65 | 65 | 11 | 1 |
{-# LANGUAGE BangPatterns, CPP, MagicHash #-}
-----------------------------------------------------------------------------
-- |
-- Module : ExtsCompat46
-- Copyright : (c) Lodz University of Technology 2013
-- License : see LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable (GHC internal)
--
-- Compatibility module to encapsulate primops API change between GHC 7.6
-- GHC 7.8.
--
-- In GHC we use comparison primops in a couple of modules, but that primops
-- have different type signature in GHC 7.6 (where they return Bool) than
-- in GHC 7.8 (where they return Int#). As long as we allow bootstrapping
-- with GHC 7.6 or earlier we need to have this compatibility module, so that
-- we can compile stage1 compiler using the old API and then continue with
-- stage2 using the new API. When we set GHC 7.8 as the minimum version
-- required for bootstrapping, we should remove this module.
--
-----------------------------------------------------------------------------
module Eta.Utils.ExtsCompat46 (
module GHC.Exts,
gtChar#, geChar#, eqChar#,
neChar#, ltChar#, leChar#,
(>#), (>=#), (==#), (/=#), (<#), (<=#),
gtWord#, geWord#, eqWord#,
neWord#, ltWord#, leWord#,
(>##), (>=##), (==##), (/=##), (<##), (<=##),
gtFloat#, geFloat#, eqFloat#,
neFloat#, ltFloat#, leFloat#,
gtAddr#, geAddr#, eqAddr#,
neAddr#, ltAddr#, leAddr#,
sameMutableArray#, sameMutableByteArray#, sameMutableArrayArray#,
sameMutVar#, sameTVar#, sameMVar#
) where
import GHC.Exts hiding (
gtChar#, geChar#, eqChar#,
neChar#, ltChar#, leChar#,
(>#), (>=#), (==#), (/=#), (<#), (<=#),
gtWord#, geWord#, eqWord#,
neWord#, ltWord#, leWord#,
(>##), (>=##), (==##), (/=##), (<##), (<=##),
gtFloat#, geFloat#, eqFloat#,
neFloat#, ltFloat#, leFloat#,
gtAddr#, geAddr#, eqAddr#,
neAddr#, ltAddr#, leAddr#,
sameMutableArray#, sameMutableByteArray#, sameMutableArrayArray#,
sameMutVar#, sameTVar#, sameMVar#
)
import qualified GHC.Exts as E (
gtChar#, geChar#, eqChar#,
neChar#, ltChar#, leChar#,
(>#), (>=#), (==#), (/=#), (<#), (<=#),
gtWord#, geWord#, eqWord#,
neWord#, ltWord#, leWord#,
(>##), (>=##), (==##), (/=##), (<##), (<=##),
gtFloat#, geFloat#, eqFloat#,
neFloat#, ltFloat#, leFloat#,
gtAddr#, geAddr#, eqAddr#,
neAddr#, ltAddr#, leAddr#,
sameMutableArray#, sameMutableByteArray#, sameMutableArrayArray#,
sameMutVar#, sameTVar#, sameMVar#
)
-- See #8330
#if __GLASGOW_HASKELL__ > 706
gtChar# :: Char# -> Char# -> Bool
gtChar# a b = isTrue# (a `E.gtChar#` b)
geChar# :: Char# -> Char# -> Bool
geChar# a b = isTrue# (a `E.geChar#` b)
eqChar# :: Char# -> Char# -> Bool
eqChar# a b = isTrue# (a `E.eqChar#` b)
neChar# :: Char# -> Char# -> Bool
neChar# a b = isTrue# (a `E.neChar#` b)
ltChar# :: Char# -> Char# -> Bool
ltChar# a b = isTrue# (a `E.ltChar#` b)
leChar# :: Char# -> Char# -> Bool
leChar# a b = isTrue# (a `E.leChar#` b)
infix 4 >#, >=#, ==#, /=#, <#, <=#
(>#) :: Int# -> Int# -> Bool
(>#) a b = isTrue# (a E.># b)
(>=#) :: Int# -> Int# -> Bool
(>=#) a b = isTrue# (a E.>=# b)
(==#) :: Int# -> Int# -> Bool
(==#) a b = isTrue# (a E.==# b)
(/=#) :: Int# -> Int# -> Bool
(/=#) a b = isTrue# (a E./=# b)
(<#) :: Int# -> Int# -> Bool
(<#) a b = isTrue# (a E.<# b)
(<=#) :: Int# -> Int# -> Bool
(<=#) a b = isTrue# (a E.<=# b)
gtWord# :: Word# -> Word# -> Bool
gtWord# a b = isTrue# (a `E.gtWord#` b)
geWord# :: Word# -> Word# -> Bool
geWord# a b = isTrue# (a `E.geWord#` b)
eqWord# :: Word# -> Word# -> Bool
eqWord# a b = isTrue# (a `E.eqWord#` b)
neWord# :: Word# -> Word# -> Bool
neWord# a b = isTrue# (a `E.neWord#` b)
ltWord# :: Word# -> Word# -> Bool
ltWord# a b = isTrue# (a `E.ltWord#` b)
leWord# :: Word# -> Word# -> Bool
leWord# a b = isTrue# (a `E.leWord#` b)
infix 4 >##, >=##, ==##, /=##, <##, <=##
(>##) :: Double# -> Double# -> Bool
(>##) a b = isTrue# (a E.>## b)
(>=##) :: Double# -> Double# -> Bool
(>=##) a b = isTrue# (a E.>=## b)
(==##) :: Double# -> Double# -> Bool
(==##) a b = isTrue# (a E.==## b)
(/=##) :: Double# -> Double# -> Bool
(/=##) a b = isTrue# (a E./=## b)
(<##) :: Double# -> Double# -> Bool
(<##) a b = isTrue# (a E.<## b)
(<=##) :: Double# -> Double# -> Bool
(<=##) a b = isTrue# (a E.<=## b)
gtFloat# :: Float# -> Float# -> Bool
gtFloat# a b = isTrue# (a `E.gtFloat#` b)
geFloat# :: Float# -> Float# -> Bool
geFloat# a b = isTrue# (a `E.geFloat#` b)
eqFloat# :: Float# -> Float# -> Bool
eqFloat# a b = isTrue# (a `E.eqFloat#` b)
neFloat# :: Float# -> Float# -> Bool
neFloat# a b = isTrue# (a `E.neFloat#` b)
ltFloat# :: Float# -> Float# -> Bool
ltFloat# a b = isTrue# (a `E.ltFloat#` b)
leFloat# :: Float# -> Float# -> Bool
leFloat# a b = isTrue# (a `E.leFloat#` b)
gtAddr# :: Addr# -> Addr# -> Bool
gtAddr# a b = isTrue# (a `E.gtAddr#` b)
geAddr# :: Addr# -> Addr# -> Bool
geAddr# a b = isTrue# (a `E.geAddr#` b)
eqAddr# :: Addr# -> Addr# -> Bool
eqAddr# a b = isTrue# (a `E.eqAddr#` b)
neAddr# :: Addr# -> Addr# -> Bool
neAddr# a b = isTrue# (a `E.neAddr#` b)
ltAddr# :: Addr# -> Addr# -> Bool
ltAddr# a b = isTrue# (a `E.ltAddr#` b)
leAddr# :: Addr# -> Addr# -> Bool
leAddr# a b = isTrue# (a `E.leAddr#` b)
sameMutableArray# :: MutableArray# s a -> MutableArray# s a -> Bool
sameMutableArray# a b = isTrue# (E.sameMutableArray# a b)
sameMutableByteArray# :: MutableByteArray# s -> MutableByteArray# s -> Bool
sameMutableByteArray# a b = isTrue# (E.sameMutableByteArray# a b)
sameMutableArrayArray# :: MutableArrayArray# s -> MutableArrayArray# s -> Bool
sameMutableArrayArray# a b = isTrue# (E.sameMutableArrayArray# a b)
sameMutVar# :: MutVar# s a -> MutVar# s a -> Bool
sameMutVar# a b = isTrue# (E.sameMutVar# a b)
sameTVar# :: TVar# s a -> TVar# s a -> Bool
sameTVar# a b = isTrue# (E.sameTVar# a b)
sameMVar# :: MVar# s a -> MVar# s a -> Bool
sameMVar# a b = isTrue# (E.sameMVar# a b)
#else
gtChar# :: Char# -> Char# -> Bool
gtChar# a b = a `E.gtChar#` b
geChar# :: Char# -> Char# -> Bool
geChar# a b = a `E.geChar#` b
eqChar# :: Char# -> Char# -> Bool
eqChar# a b = a `E.eqChar#` b
neChar# :: Char# -> Char# -> Bool
neChar# a b = a `E.neChar#` b
ltChar# :: Char# -> Char# -> Bool
ltChar# a b = a `E.ltChar#` b
leChar# :: Char# -> Char# -> Bool
leChar# a b = a `E.leChar#` b
infix 4 >#, >=#, ==#, /=#, <#, <=#
(>#) :: Int# -> Int# -> Bool
(>#) a b = a E.># b
(>=#) :: Int# -> Int# -> Bool
(>=#) a b = a E.>=# b
(==#) :: Int# -> Int# -> Bool
(==#) a b = a E.==# b
(/=#) :: Int# -> Int# -> Bool
(/=#) a b = a E./=# b
(<#) :: Int# -> Int# -> Bool
(<#) a b = a E.<# b
(<=#) :: Int# -> Int# -> Bool
(<=#) a b = a E.<=# b
gtWord# :: Word# -> Word# -> Bool
gtWord# a b = a `E.gtWord#` b
geWord# :: Word# -> Word# -> Bool
geWord# a b = a `E.geWord#` b
eqWord# :: Word# -> Word# -> Bool
eqWord# a b = a `E.eqWord#` b
neWord# :: Word# -> Word# -> Bool
neWord# a b = a `E.neWord#` b
ltWord# :: Word# -> Word# -> Bool
ltWord# a b = a `E.ltWord#` b
leWord# :: Word# -> Word# -> Bool
leWord# a b = a `E.leWord#` b
infix 4 >##, >=##, ==##, /=##, <##, <=##
(>##) :: Double# -> Double# -> Bool
(>##) a b = a E.>## b
(>=##) :: Double# -> Double# -> Bool
(>=##) a b = a E.>=## b
(==##) :: Double# -> Double# -> Bool
(==##) a b = a E.==## b
(/=##) :: Double# -> Double# -> Bool
(/=##) a b = a E./=## b
(<##) :: Double# -> Double# -> Bool
(<##) a b = a E.<## b
(<=##) :: Double# -> Double# -> Bool
(<=##) a b = a E.<=## b
gtFloat# :: Float# -> Float# -> Bool
gtFloat# a b = a `E.gtFloat#` b
geFloat# :: Float# -> Float# -> Bool
geFloat# a b = a `E.geFloat#` b
eqFloat# :: Float# -> Float# -> Bool
eqFloat# a b = a `E.eqFloat#` b
neFloat# :: Float# -> Float# -> Bool
neFloat# a b = a `E.neFloat#` b
ltFloat# :: Float# -> Float# -> Bool
ltFloat# a b = a `E.ltFloat#` b
leFloat# :: Float# -> Float# -> Bool
leFloat# a b = a `E.leFloat#` b
gtAddr# :: Addr# -> Addr# -> Bool
gtAddr# a b = a `E.gtAddr#` b
geAddr# :: Addr# -> Addr# -> Bool
geAddr# a b = a `E.geAddr#` b
eqAddr# :: Addr# -> Addr# -> Bool
eqAddr# a b = a `E.eqAddr#` b
neAddr# :: Addr# -> Addr# -> Bool
neAddr# a b = a `E.neAddr#` b
ltAddr# :: Addr# -> Addr# -> Bool
ltAddr# a b = a `E.ltAddr#` b
leAddr# :: Addr# -> Addr# -> Bool
leAddr# a b = a `E.leAddr#` b
sameMutableArray# :: MutableArray# s a -> MutableArray# s a -> Bool
sameMutableArray# a b = E.sameMutableArray# a b
sameMutableByteArray# :: MutableByteArray# s -> MutableByteArray# s -> Bool
sameMutableByteArray# a b = E.sameMutableByteArray# a b
sameMutableArrayArray# :: MutableArrayArray# s -> MutableArrayArray# s -> Bool
sameMutableArrayArray# a b = E.sameMutableArrayArray# a b
sameMutVar# :: MutVar# s a -> MutVar# s a -> Bool
sameMutVar# a b = E.sameMutVar# a b
sameTVar# :: TVar# s a -> TVar# s a -> Bool
sameTVar# a b = E.sameTVar# a b
sameMVar# :: MVar# s a -> MVar# s a -> Bool
sameMVar# a b = E.sameMVar# a b
#endif
|
rahulmutt/ghcvm
|
compiler/Eta/Utils/ExtsCompat46.hs
|
bsd-3-clause
| 8,932 | 0 | 8 | 1,824 | 2,174 | 1,271 | 903 | 127 | 1 |
{-|
Functions on lists and sequences.
Some of the functions follow the style of Data.Random.Extras
(from the random-extras package), but are written for use with
PRNGs from the "mwc-random" package rather than from the "random-fu" package.
-}
module Language.Hakaru.Util.Extras where
import qualified Data.Sequence as S
import qualified System.Random.MWC as MWC
import Data.Maybe
import qualified Data.Foldable as F
import qualified Data.Number.LogFloat as LF
import qualified Data.Vector as V
extract :: S.Seq a -> Int -> Maybe (S.Seq a, a)
extract s i | S.null r = Nothing
| otherwise = Just (a S.>< c, b)
where (a, r) = S.splitAt i s
(b S.:< c) = S.viewl r
randomExtract :: S.Seq a -> MWC.GenIO -> IO (Maybe (S.Seq a, a))
randomExtract s g = do
i <- MWC.uniformR (0, S.length s - 1) g
return $ extract s i
{-|
Given a sequence, return a *sorted* sequence of
n randomly selected elements from *distinct positions* in the sequence
-}
randomElems :: Ord a => S.Seq a -> Int -> IO (S.Seq a)
randomElems s n = do
g <- MWC.create
randomElemsTR S.empty s g n
randomElemsTR :: Ord a => S.Seq a -> S.Seq a -> MWC.GenIO -> Int -> IO (S.Seq a)
randomElemsTR ixs s g n
| n == S.length s = return $ S.unstableSort s
| n == 1 = do (_,i) <- fmap fromJust (randomExtract s g)
return.S.unstableSort $ i S.<| ixs
| otherwise = do (s',i) <- fmap fromJust (randomExtract s g)
(randomElemsTR $! (i S.<| ixs)) s' g (n-1)
{-|
Chop a sequence at the given indices.
Assume number of indices given < length of sequence to be chopped
-}
pieces :: S.Seq a -> S.Seq Int -> [S.Seq a]
pieces s ixs = let f (ps,r,x) y = let (p,r') = S.splitAt (y-x) r
in (p:ps,r',y)
g (a,b,_) = b:a
in g $ F.foldl f ([],s,0) ixs
{-|
Given n, chop a sequence at m random points
where m = min (length-1, n-1)
-}
randomPieces :: Int -> S.Seq a -> IO [S.Seq a]
randomPieces n s
| n >= l = return $ F.toList $ fmap S.singleton s
| otherwise = do ixs <- randomElems (S.fromList [1..l-1]) (n-1)
return $ pieces s ixs
where l = S.length s
{-|
> pairs [1,2,3,4]
[(1,2),(1,3),(1,4),(2,3),(2,4),(3,4)]
> pairs [1,2,4,4]
[(1,2),(1,4),(1,4),(2,4),(2,4),(4,4)]
-}
pairs :: [a] -> [(a,a)]
pairs [] = []
pairs (x:xs) = (zip (repeat x) xs) ++ pairs xs
l2Norm :: Floating a => [a] -> a
l2Norm l = sqrt.sum $ zipWith (*) l l
normalize :: [LF.LogFloat] ->
(LF.LogFloat, Double, [Double])
-- normalize xs == (x, y, ys)
-- ===> all (0 <=) ys && sum ys == y && xs == map (x *) ys
-- (therefore sum xs == x * y)
normalize [] = (0, 0, [])
normalize [x] = (x, 1, [1])
normalize xs = (m, y, ys)
where m = maximum xs
ys = [ LF.fromLogFloat (x/m) | x <- xs ]
y = sum ys
normalizeVector :: V.Vector LF.LogFloat ->
(LF.LogFloat, Double, V.Vector Double)
normalizeVector xs = case V.length xs of
0 -> (0, 0, V.empty)
1 -> (V.unsafeHead xs, 1, V.singleton 1)
_ -> let m = V.maximum xs
ys = V.map (\x -> LF.fromLogFloat (x/m)) xs
y = V.sum ys
in (m, y, ys)
|
suhailshergill/hakaru
|
Language/Hakaru/Util/Extras.hs
|
bsd-3-clause
| 3,237 | 1 | 17 | 909 | 1,257 | 654 | 603 | 60 | 3 |
-- Copyright (c) 2014 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -Wall -Werror #-}
-- | Defines a class of monads that buffer source. Useful for
module Control.Monad.SourceBuffer.Class(
MonadSourceBuffer(..)
) where
import Control.Monad.Cont
import Control.Monad.Except
import Control.Monad.List
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Trans.Journal
import Control.Monad.Writer
import Data.Position.Filename
import qualified Data.ByteString.Lazy as Lazy
-- | Class of monads that store source file contents during lexing and
-- split it into lines, which are then subsequently stored and
-- accessed through a 'MonadSourceFiles' interface.
class Monad m => MonadSourceBuffer m where
-- | Report a linebreak at the given offset in the file.
linebreak :: Int
-- ^ The offset of the linebreak.
-> m()
-- | Start a new file.
startFile :: Filename
-- ^ The file name.
-> Lazy.ByteString
-- ^ The file contents.
-> m ()
-- | Complete the file, adding the last line, and storing the split
-- lines.
finishFile :: m ()
instance MonadSourceBuffer m => MonadSourceBuffer (ContT r m) where
linebreak = lift . linebreak
startFile fname = lift . startFile fname
finishFile = lift finishFile
instance (MonadSourceBuffer m) => MonadSourceBuffer (ExceptT e m) where
linebreak = lift . linebreak
startFile fname = lift . startFile fname
finishFile = lift finishFile
instance (MonadSourceBuffer m) => MonadSourceBuffer (JournalT e m) where
linebreak = lift . linebreak
startFile fname = lift . startFile fname
finishFile = lift finishFile
instance MonadSourceBuffer m => MonadSourceBuffer (ListT m) where
linebreak = lift . linebreak
startFile fname = lift . startFile fname
finishFile = lift finishFile
instance MonadSourceBuffer m => MonadSourceBuffer (ReaderT r m) where
linebreak = lift . linebreak
startFile fname = lift . startFile fname
finishFile = lift finishFile
instance MonadSourceBuffer m => MonadSourceBuffer (StateT s m) where
linebreak = lift . linebreak
startFile fname = lift . startFile fname
finishFile = lift finishFile
instance (MonadSourceBuffer m, Monoid w) =>
MonadSourceBuffer (WriterT w m) where
linebreak = lift . linebreak
startFile fname = lift . startFile fname
finishFile = lift finishFile
|
emc2/compiler-misc
|
src/Control/Monad/SourceBuffer/Class.hs
|
bsd-3-clause
| 3,903 | 0 | 10 | 752 | 568 | 316 | 252 | 48 | 0 |
{-# LANGUAGE CPP, BangPatterns, ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{- | Copyright : (c) 2010-2012 Simon Meier
(c) 2010 Jasper van der Jeugt
License : BSD3-style (see LICENSE)
Maintainer : Simon Meier <[email protected]>
Portability : GHC
This module provides the types of fixed-size and bounded-size encodings,
which are the basic building blocks for constructing 'Builder's.
They are used for application specific performance tuning of 'Builder's.
For example,
libraries such as @blaze-html@
or @aeson@ use the functions provided by this module to implement efficient
encodings that combine escaping and character encoding.
We explain fixed-size and bounded-size encodings in three steps.
First, we define them formally.
Then, we explain how they can improve the performance of a 'Builder'.
Finally, we give two examples to illustrate their use.
/Fixed(-size) encodings/ are encodings that always result in a sequence of bytes
of a predetermined, fixed length.
An example for a fixed encoding is the big-endian encoding of a 'Word64',
which always results in exactly 8 bytes.
/Bounded(-size) encodings/ are encodings that always result in a sequence
of bytes that is no larger than a predetermined bound.
An example for a bounded encoding is the UTF-8 encoding of a 'Char',
which results always in less or equal to 4 bytes.
Note that every fixed encoding is also a bounded encoding.
This module does not expose functions that exploit the special
properties of fixed-size encodings.
However,
they are exploited in the functions 'encodeSizePrefixed' and 'encodeChunked'
from "Data.ByteString.Lazy.Builder.Extras", which prefix a 'Builder' with
its (chunk) size.
In the following,
we therefore only refer to bounded encodings.
The goal of bounded encodings is to improve the performance of 'Builder's.
These improvements stem from making the two
most common steps performed by a 'Builder' more efficient.
The first most common step is the concatentation of two 'Builder's.
Internally,
concatentation corresponds to function composition.
(Note that 'Builder's can be seen as difference-lists
of buffer-filling functions;
cf. <http://hackage.haskell.org/cgi-bin/hackage-scripts/package/dlist>.
)
Function composition is a fast /O(1)/ operation.
However,
we can use bounded encodings to
remove some of these function compositions altoghether,
which is obviously more efficient.
The second most common step performed by a 'Builder' is to fill a buffer
using a bounded encoding,
which works as follows.
The 'Builder' checks whether there is enough space left to
execute the bounded encoding.
If there is, then the 'Builder' executes the bounded encoding
and calls the next 'Builder' with the updated buffer.
Otherwise,
the 'Builder' signals its driver that it requires a new buffer.
This buffer must be at least as large as the bound of the encoding.
We can use bounded encodings to reduce the number of buffer-free
checks by fusing the buffer-free checks of consecutive
'Builder's.
We can also use bounded encodings to simplify the control flow
for signalling that a buffer is full by
ensuring that we check first that there is enough space left
and only then decide on how to encode a given value.
Let us illustrate these improvements on the
CSV-table rendering example from "Data.ByteString.Lazy.Builder".
Its \"hot code\" is the rendering of a table's cells,
which we implement as follows using only the functions from the
'Builder' API.
@
import "Data.ByteString.Lazy.Builder" as B
import "Data.ByteString.Lazy.Builder.ASCII" as B
renderCell :: Cell -> Builder
renderCell (StringC cs) = renderString cs
renderCell (IntC i) = B.intDec i
renderString :: String -> Builder
renderString cs = B.charUtf8 \'\"\' \<\> foldMap escape cs \<\> B.charUtf8 \'\"\'
where
escape \'\\\\\' = B.charUtf8 \'\\\\\' \<\> B.charUtf8 \'\\\\\'
escape \'\\\"\' = B.charUtf8 \'\\\\\' \<\> B.charUtf8 \'\\\"\'
escape c = B.charUtf8 c
@
Efficient encoding of 'Int's as decimal numbers is performed by @intDec@
from "Data.ByteString.Lazy.Builder.ASCII".
Optimization potential exists for the escaping of 'String's.
The above implementation has two optimization opportunities.
First,
we can use a single buffer free check for @4@ bytes before escaping a
character and only then decide on how to escape the character.
This simplifies the control flow and allows to avoid a closure construction,
which improves the performance of the 'Builder'.
Second,
the concatenations performed by 'foldMap' can be eliminated.
The following implementation exploits these optimizations.
@
import qualified Data.ByteString.Lazy.Builder.BasicEncoding as E
import Data.ByteString.Lazy.Builder.BasicEncoding
( 'ifB', 'fromF', 'pairF', ('>$<') )
renderString :: String -\> Builder
renderString cs =
B.charUtf8 \'\"\' \<\> E.'encodeListWithB' escape cs \<\> B.charUtf8 \'\"\'
where
escape :: E.'BoundedEncoding' Char
escape = E.'charUtf8AsciiWith' $
'ifB' (== \'\\\\\') (fixed2 (\'\\\\\', \'\\\\\')) $
'ifB' (== \'\\\"\') (fixed2 (\'\\\\\', \'\\\"\')) $
('fromF' E.'char8')
 
{-\# INLINE fixed2 \#-}
fixed2 x = 'fromF' $ const x '>$<' E.'char8' `pairF` E.'char8'
@
The code should be mostly self-explanatory.
The slightly awkward syntax is because the combinators
are written such that the 'sizeBound' of the resulting 'BoundedEncoding'
can be computed at compile time.
We also explicitly inline the 'fixed2' encoding,
which encodes a fixed tuple of characters,
to ensure that the bound compuation happens at compile time.
When encoding the following list of 'String's,
the optimized implementation of 'renderString' is two times faster.
@
maxiStrings :: [String]
maxiStrings = take 1000 $ cycle [\"hello\", \"\\\"1\\\"\", \"λ-wörld\"]
@
Most of the performance gain stems from using 'encodeListWithB',
which encodes a list of values from left-to-right with a
'BoundedEncoding'.
It exploits the 'Builder' internals to avoid unnecessary function
compositions (i.e., concatentations).
In the future,
we would expect the compiler to perform the optimizations
implemented in 'encodeListWithB' by himself.
However,
it seems that the code is currently to complicated for the
compiler to see through.
Therefore,
we provide the 'BoundedEncoding' escape hatch,
which allows data structures to provide very efficient encoding traversals,
like 'encodeListWithB' for lists.
Note that 'BoundedEncoding's are a bit verbose, but quite versatile.
Here is an example of a 'BoundedEncoding' for combined HTML escaping and
UTF-8 encoding.
@
{-\# INLINE charUtf8HtmlEscaped \#-}
charUtf8HtmlEscaped :: E.BoundedEncoding Char
charUtf8HtmlEscaped =
E.'charUtf8AsciiWith' $
'ifB' (== \'\<\' ) (fixed4 (\'&\',(\'l\',(\'t\',\';\')))) $ -- <
'ifB' (== \'\>\' ) (fixed4 (\'&\',(\'g\',(\'t\',\';\')))) $ -- >
'ifB' (== \'&\' ) (fixed5 (\'&\',(\'a\',(\'m\',(\'p\',\';\'))))) $ -- &
'ifB' (== \'\"\' ) (fixed5 (\'&\',(\'\#\',(\'3\',(\'4\',\';\'))))) $ -- &\#34;
('fromF' E.'char8') -- unescaped ASCII characters
where
{-\# INLINE fixed4 \#-}
fixed4 x = 'fromF' $ const x '>$<'
E.char8 '>*<' E.char8 '>*<' E.char8 '>*<' E.char8
 
{-\# INLINE fixed5 \#-}
fixed5 x = 'fromF' $ const x '>$<'
E.char8 '>*<' E.char8 '>*<' E.char8 '>*<' E.char8 '>*<' E.char8
@
Note that this HTML escaping is only suitable for HTML attribute values that
are /double-quoted/ and for HTML content.
-}
module Data.ByteString.Lazy.Builder.BasicEncoding (
-- * Fixed-size encodings
FixedEncoding
, size
-- ** Combinators
-- | The combinators for 'FixedEncoding's are implemented such that the 'size'
-- of the resulting 'FixedEncoding' is computed at compile time.
, emptyF
, pairF
, contramapF
-- ** Builder construction
-- | In terms of expressivity, the function 'encodeWithF' would be sufficient
-- for constructing 'Builder's from 'FixedEncoding's. The fused variants of
-- this function are provided because they allow for more efficient
-- implementations. Our compilers are just not smart enough yet; and for some
-- of the employed optimizations (see the code of 'encodeByteStringWithF')
-- they will very likely never be.
--
-- Note that functions marked with \"/Heavy inlining./\" are forced to be
-- inlined because they must be specialized for concrete encodings,
-- but are rather heavy in terms of code size. We recommend to define a
-- top-level function for every concrete instantiation of such a function in
-- order to share its code. A typical example is the function
-- 'byteStringHexFixed' from "Data.ByteString.Lazy.Builder.ASCII", which is
-- implemented as follows.
--
-- @
-- {-\# NOINLINE byteStringHexFixed \#-}
-- byteStringHexFixed :: S.ByteString -> Builder
-- byteStringHexFixed = 'encodeByteStringWithF' 'word8HexFixed'
-- @
--
, encodeWithF
, encodeListWithF
, encodeUnfoldrWithF
, encodeByteStringWithF
, encodeLazyByteStringWithF
-- * Bounded-size encodings
, BoundedEncoding
, sizeBound
-- ** Combinators
-- | The combinators for 'BoundedEncoding's are implemented such that the
-- 'sizeBound' of the resulting 'BoundedEncoding' is computed at compile time.
, fromF
, emptyB
, pairB
, eitherB
, ifB
, contramapB
-- | We provide an overloaded operator for the 'contramapF' and 'contramapB'
-- combinators to allow for a more convenient syntax. The operator is
-- source-compatible with the one provided by the @contravariant@ library.
-- Once this library is part of the Haskell platform, we will make
-- 'FixedEncoding' and 'BoundedEncoding' instances of its 'Contravariant'
-- type-class.
, (>$<)
-- ** Builder construction
, encodeWithB
, encodeListWithB
, encodeUnfoldrWithB
, encodeByteStringWithB
, encodeLazyByteStringWithB
-- * Standard encodings of Haskell values
, module Data.ByteString.Lazy.Builder.BasicEncoding.Binary
-- ** Character encodings
, module Data.ByteString.Lazy.Builder.BasicEncoding.ASCII
-- *** ISO/IEC 8859-1 (Char8)
-- | The ISO/IEC 8859-1 encoding is an 8-bit encoding often known as Latin-1.
-- The /Char8/ encoding implemented here works by truncating the Unicode
-- codepoint to 8-bits and encoding them as a single byte. For the codepoints
-- 0-255 this corresponds to the ISO/IEC 8859-1 encoding. Note that the
-- Char8 encoding is equivalent to the ASCII encoding on the Unicode
-- codepoints 0-127. Hence, functions such as 'intDec' can also be used for
-- encoding 'Int's as a decimal number with Char8 encoded characters.
, char8
-- *** UTF-8
-- | The UTF-8 encoding can encode all Unicode codepoints.
-- It is equivalent to the ASCII encoding on the Unicode codepoints 0-127.
-- Hence, functions such as 'intDec' can also be used for encoding 'Int's as
-- a decimal number with UTF-8 encoded characters.
, charUtf8
, charUtf8AsciiWith
-- * Testing support
-- | The following four functions are intended for testing use
-- only. They are /not/ efficient. 'FixedEncoding's and 'BoundedEncoding's
-- are efficently executed by creating 'Builder's from them using the
-- @encodeXXX@ functions explained at the top of this module.
, evalF
, evalB
, showF
, showB
) where
import Data.ByteString.Lazy.Builder.Internal
import Data.ByteString.Lazy.Builder.BasicEncoding.Internal.UncheckedShifts
import Data.ByteString.Lazy.Builder.BasicEncoding.Internal.Base16 (lowerTable, encode4_as_8)
import qualified Data.ByteString as S
import qualified Data.ByteString.Internal as S
import qualified Data.ByteString.Lazy.Internal as L
import Data.Monoid
import Data.Foldable (foldMap) -- HADDOCK ONLY
import Data.List (unfoldr) -- HADDOCK ONLY
import Data.Char (chr, ord)
import Control.Monad ((<=<), unless)
import Data.ByteString.Lazy.Builder.BasicEncoding.Internal
import Data.ByteString.Lazy.Builder.BasicEncoding.Binary
import Data.ByteString.Lazy.Builder.BasicEncoding.ASCII
#if MIN_VERSION_base(4,4,0)
import Foreign hiding (unsafePerformIO, unsafeForeignPtrToPtr)
import Foreign.ForeignPtr.Unsafe (unsafeForeignPtrToPtr)
import System.IO.Unsafe (unsafePerformIO)
#else
import Foreign
#endif
------------------------------------------------------------------------------
-- Creating Builders from bounded encodings
------------------------------------------------------------------------------
-- | Encode a value with a 'FixedEncoding'.
{-# INLINE encodeWithF #-}
encodeWithF :: FixedEncoding a -> (a -> Builder)
encodeWithF = encodeWithB . fromF
-- | Encode a list of values from left-to-right with a 'FixedEncoding'.
{-# INLINE encodeListWithF #-}
encodeListWithF :: FixedEncoding a -> ([a] -> Builder)
encodeListWithF = encodeListWithB . fromF
-- | Encode a list of values represented as an 'unfoldr' with a 'FixedEncoding'.
{-# INLINE encodeUnfoldrWithF #-}
encodeUnfoldrWithF :: FixedEncoding b -> (a -> Maybe (b, a)) -> a -> Builder
encodeUnfoldrWithF = encodeUnfoldrWithB . fromF
-- | /Heavy inlining./ Encode all bytes of a strict 'S.ByteString' from
-- left-to-right with a 'FixedEncoding'. This function is quite versatile. For
-- example, we can use it to construct a 'Builder' that maps every byte before
-- copying it to the buffer to be filled.
--
-- @
--mapToBuilder :: (Word8 -> Word8) -> S.ByteString -> Builder
--mapToBuilder f = 'encodeByteStringWithF' ('contramapF' f 'word8')
-- @
--
-- We can also use it to hex-encode a strict 'S.ByteString' as shown in the
-- 'byteStringHexFixed' example above.
{-# INLINE encodeByteStringWithF #-}
encodeByteStringWithF :: FixedEncoding Word8 -> (S.ByteString -> Builder)
encodeByteStringWithF = encodeByteStringWithB . fromF
-- | /Heavy inlining./ Encode all bytes of a lazy 'L.ByteString' from
-- left-to-right with a 'FixedEncoding'.
{-# INLINE encodeLazyByteStringWithF #-}
encodeLazyByteStringWithF :: FixedEncoding Word8 -> (L.ByteString -> Builder)
encodeLazyByteStringWithF = encodeLazyByteStringWithB . fromF
-- IMPLEMENTATION NOTE: Sadly, 'encodeListWith' cannot be used for foldr/build
-- fusion. Its performance relies on hoisting several variables out of the
-- inner loop. That's not possible when writing 'encodeListWith' as a 'foldr'.
-- If we had stream fusion for lists, then we could fuse 'encodeListWith', as
-- 'encodeWithStream' can keep control over the execution.
-- | Encode a value with a 'BoundedEncoding'.
--
-- Note that consecutive uses of 'encodeWithB' and 'encodeWithF' are rewritten
-- such that their bounds-checks are fused; i.e., we rewrite using the rules
--
-- > encodeWithF
-- >= encodeWithB . fromF
--
-- > encodeWithB be1 x1 `mappend` (encodeWithB be2 x2)
-- >= encodeWithB (pairB be1 be2) (x1, x2)
--
-- For example,
--
-- >encodeWithB (word32 x1) `mappend` encodeWithB (word32 x2)
--
-- is rewritten such that the resulting 'Builder' checks only once, if ther are
-- at least 8 free bytes, instead of checking twice, if there are 4 free bytes.
-- This rewrite rule is not observationally equivalent, as it may change the
-- boundaries of the generated chunks. We deem this acceptable, as for all
-- use-cases of 'Builder's known to us the precise location of chunk
-- boundaries does not matter.
--
{-# INLINE[1] encodeWithB #-}
encodeWithB :: BoundedEncoding a -> (a -> Builder)
encodeWithB w x =
-- It is important to avoid recursive 'BuildStep's where possible, as
-- their closure allocation is expensive. Using 'ensureFree' allows the
-- 'step' to assume that at least 'sizeBound w' free space is available.
ensureFree (sizeBound w) `mappend` builder step
where
step k (BufferRange op ope) = do
op' <- runB w x op
let !br' = BufferRange op' ope
k br'
-- Fuse bounds-checks of concatenated 'BoundedEncoding's.
{-# RULES
"append/encodeWithB" forall w1 w2 x1 x2.
append (encodeWithB w1 x1) (encodeWithB w2 x2)
= encodeWithB (pairB w1 w2) (x1, x2)
"append/encodeWithB/assoc_r" forall w1 w2 x1 x2 b.
append (encodeWithB w1 x1) (append (encodeWithB w2 x2) b)
= append (encodeWithB (pairB w1 w2) (x1, x2)) b
"append/encodeWithB/assoc_l" forall w1 w2 x1 x2 b.
append (append b (encodeWithB w1 x1)) (encodeWithB w2 x2)
= append b (encodeWithB (pairB w1 w2) (x1, x2))
#-}
-- | Encodes a list of values from left-to-right using a 'BoundedEncoding'.
{-# INLINE encodeListWithB #-}
encodeListWithB :: BoundedEncoding a -> [a] -> Builder
encodeListWithB w xs0 =
builder $ step xs0
where
step xs1 k (BufferRange op0 ope0) =
go xs1 op0
where
go [] !op = k (BufferRange op ope0)
go xs@(x':xs') !op
| op `plusPtr` bound <= ope0 = runB w x' op >>= go xs'
| otherwise =
return $ bufferFull bound op (step xs k)
bound = sizeBound w
-- TODO: Think about adding 'foldMap/encodeWith' fusion its variants
-- TODO: Think about rewriting 'encodeWithB w . f = encodeWithB (w #. f)'
-- | Create a 'Builder' that encodes a list of values represented as an
-- 'unfoldr' with a 'BoundedEncoding'.
{-# INLINE encodeUnfoldrWithB #-}
encodeUnfoldrWithB :: BoundedEncoding b -> (a -> Maybe (b, a)) -> a -> Builder
encodeUnfoldrWithB w f x0 =
builder $ fillWith x0
where
fillWith x k !(BufferRange op0 ope0) =
go (f x) op0
where
go !Nothing !op = do let !br' = BufferRange op ope0
k br'
go !(Just (y, x')) !op
| op `plusPtr` bound <= ope0 = runB w y op >>= go (f x')
| otherwise = return $ bufferFull bound op $
\(BufferRange opNew opeNew) -> do
!opNew' <- runB w y opNew
fillWith x' k (BufferRange opNew' opeNew)
bound = sizeBound w
-- | /Heavy inlining./ Encode all bytes of a strict 'S.ByteString' from
-- left-to-right with a 'BoundedEncoding'.
--
-- For example, we can use this function to construct a 'Builder' that filters
-- every byte before copying it to the buffer to be filled.
--
-- @
--filterToBuilder :: (Word8 -> Bool) -> S.ByteString -> Builder
--filterToBuilder p =
-- 'encodeByteStringWithB' ('ifB' p ('fromF' 'word8') 'emptyB')
-- @
--
{-# INLINE encodeByteStringWithB #-}
encodeByteStringWithB :: BoundedEncoding Word8 -> S.ByteString -> Builder
encodeByteStringWithB w =
\bs -> builder $ step bs
where
bound = sizeBound w
step (S.PS ifp ioff isize) !k =
goBS (unsafeForeignPtrToPtr ifp `plusPtr` ioff)
where
!ipe = unsafeForeignPtrToPtr ifp `plusPtr` (ioff + isize)
goBS !ip0 !br@(BufferRange op0 ope)
| ip0 >= ipe = do
touchForeignPtr ifp -- input buffer consumed
k br
| op0 `plusPtr` bound < ope =
goPartial (ip0 `plusPtr` min outRemaining inpRemaining)
| otherwise = return $ bufferFull bound op0 (goBS ip0)
where
outRemaining = (ope `minusPtr` op0) `div` bound
inpRemaining = ipe `minusPtr` ip0
goPartial !ipeTmp = go ip0 op0
where
go !ip !op
| ip < ipeTmp = do
x <- peek ip
op' <- runB w x op
go (ip `plusPtr` 1) op'
| otherwise =
goBS ip (BufferRange op ope)
-- | /Heavy inlining./ Encode all bytes of a lazy 'L.ByteString' from
-- left-to-right with a 'BoundedEncoding'.
{-# INLINE encodeLazyByteStringWithB #-}
encodeLazyByteStringWithB :: BoundedEncoding Word8 -> L.ByteString -> Builder
encodeLazyByteStringWithB w =
L.foldrChunks (\x b -> encodeByteStringWithB w x `mappend` b) mempty
------------------------------------------------------------------------------
-- Char8 encoding
------------------------------------------------------------------------------
-- | Char8 encode a 'Char'.
{-# INLINE char8 #-}
char8 :: FixedEncoding Char
char8 = (fromIntegral . ord) >$< word8
------------------------------------------------------------------------------
-- UTF-8 encoding
------------------------------------------------------------------------------
-- | UTF-8 encode a 'Char'.
{-# INLINE charUtf8 #-}
charUtf8 :: BoundedEncoding Char
charUtf8 = charUtf8AsciiWith (fromF char8)
-- | UTF-8 encode all 'Char's with codepoints greater or equal to 128 and
-- use a special encoding for the ASCII characters.
--
-- This function is typically used to implement UTF-8 encoding combined with
-- escaping. For example, escaping the \" and the \\ characters as in
-- Haskell 'String's works follows.
--
-- @
--{-\# INLINE charUtf8Escaped \#-}
--charUtf8Escaped :: 'BoundedEncoding' Char
--charUtf8Escaped = 'charUtf8AsciiWith' $
-- 'ifB' (== \'\\\\\') (fixed2 (\'\\\\\', \'\\\\\')) $
-- 'ifB' (== \'\\\"\') (fixed2 (\'\\\\\', \'\\\"\')) $
-- ('fromF' 'char8')
-- where
-- {-\# INLINE fixed2 \#-}
-- fixed2 x = 'fromF' $ const x '>$<' 'char8' \`pairF\` 'char8'
-- @
--
-- The following function would then escape 'String's.
--
-- @
--escapeString :: String -> 'Builder'
--escapeString = 'encodeListWithB' charUtf8Escaped
-- @
--
-- For example,
-- @toLazyByteString (escapeString \"\\\"λ-wörld\\\"\") == \"\\\"\\206\\187-w\\195\\182rld\\\"\"@.
--
{-# INLINE charUtf8AsciiWith #-}
charUtf8AsciiWith
:: BoundedEncoding Char
-- ^ Encoding for the ASCII characters. It is guaranteed
-- to be called only with 'Char's with codepoint less than 128.
-> BoundedEncoding Char
-- ^ Resulting 'BoundedEncoding' that combines UTF-8 encoding with
-- the special encoding for the ASCII characters.
charUtf8AsciiWith ascii =
ifB (<= '\x7F') ascii $ (ord >$<) $
ifB (<= 0x07FF)
(fromF (
(\x ->
( fromIntegral $ (x `shiftR` 6) + 0xC0
, fromIntegral $ (x .&. 0x3F) + 0x80 )
) >$< (word8 `pairF` word8)
) ) $
ifB (<= 0xFFFF)
(fromF $ (
(\x ->
( (
fromIntegral $ (x `shiftR` 12) + 0xE0
, fromIntegral $ ((x `shiftR` 6) .&. 0x3F) + 0x80 )
, fromIntegral $ (x .&. 0x3F) + 0x80 )
) >$< (word8 `pairF` word8 `pairF` word8)
) ) $
(fromF $ (
(\x ->
( ( (
fromIntegral $ (x `shiftR` 18) + 0xF0
, fromIntegral $ ((x `shiftR` 12) .&. 0x3F) + 0x80 )
, fromIntegral $ ((x `shiftR` 6) .&. 0x3F) + 0x80 )
, fromIntegral $ (x .&. 0x3F) + 0x80 )
) >$< (word8 `pairF` word8 `pairF` word8 `pairF` word8)
) )
------------------------------------------------------------------------------
-- Testing encodings
------------------------------------------------------------------------------
-- | /For testing use only./ Evaluate a 'FixedEncoding' on a given value.
evalF :: FixedEncoding a -> a -> [Word8]
evalF fe = S.unpack . S.unsafeCreate (size fe) . runF fe
-- | /For testing use only./ Evaluate a 'BoundedEncoding' on a given value.
evalB :: BoundedEncoding a -> a -> [Word8]
evalB be x = S.unpack $ unsafePerformIO $
S.createAndTrim (sizeBound be) $ \op -> do
op' <- runB be x op
return (op' `minusPtr` op)
-- | /For testing use only./ Show the result of a 'FixedEncoding' of a given
-- value as a 'String' by interpreting the resulting bytes as Unicode
-- codepoints.
showF :: FixedEncoding a -> a -> String
showF fe = map (chr . fromIntegral) . evalF fe
-- | /For testing use only./ Show the result of a 'BoundedEncoding' of a given
-- value as a 'String' by interpreting the resulting bytes as Unicode
-- codepoints.
showB :: BoundedEncoding a -> a -> String
showB be = map (chr . fromIntegral) . evalB be
|
meiersi/bytestring-builder
|
Data/ByteString/Lazy/Builder/BasicEncoding.hs
|
bsd-3-clause
| 24,284 | 0 | 21 | 5,159 | 2,266 | 1,315 | 951 | 187 | 2 |
{-# LANGUAGE RankNTypes #-}
{-# OPTIONS_HADDOCK hide #-}
-- |
-- Lens are immensely useful for working with state but I don't want
-- to pull in full Kmettoverse for small set of combinators.
--
-- Here we redefine all necessary combinators. Full compatibility with
-- lens is maintained.
module DNA.Lens where
import Control.Applicative
import Control.Monad.State
import Data.Functor.Identity
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
type Lens' s a = forall f. Functor f => (a -> f a) -> (s -> f s)
lens :: (s -> a) -> (a -> s -> s) -> Lens' s a
lens getf putf = \f s -> flip putf s <$> f (getf s)
-- Get value from object
(^.) :: s -> Lens' s a -> a
s ^. l = getConst $ l Const s
-- Put value into object
set :: Lens' s a -> a -> s -> s
set l a s = runIdentity $ l (\_ -> Identity a) s
over :: Lens' s a -> (a -> a) -> s -> s
over l f s = runIdentity $ l (Identity . f) s
(.=) :: MonadState s m => Lens' s a -> a -> m ()
l .= b = modify' $ set l b
(%=) :: MonadState s m => Lens' s a -> (a -> a) -> m ()
l %= b = modify' $ over l b
infix 4 .=, %=
use :: MonadState s m => Lens' s a -> m a
use l = do
s <- get
return $ s ^. l
at :: (Ord k) => k -> Lens' (Map k v) (Maybe v)
at k f m = f mv <&> \r -> case r of
Nothing -> maybe m (const (Map.delete k m)) mv
Just v' -> Map.insert k v' m
where mv = Map.lookup k m
{-# INLINE at #-}
(<&>) :: Functor f => f a -> (a -> b) -> f b
(<&>) = flip (<$>)
failure :: MonadIO m => String -> m a
failure msg = do
liftIO $ putStrLn $ "FAILED: " ++ msg
error msg
zoom :: Monad m => Lens' s a -> StateT a m b -> StateT s m b
zoom l action = do
s <- get
(b,a') <- lift $ runStateT action (s ^. l)
put $ set l a' s
return b
|
SKA-ScienceDataProcessor/RC
|
MS6/dna/core/DNA/Lens.hs
|
apache-2.0
| 1,760 | 0 | 15 | 479 | 821 | 420 | 401 | 44 | 2 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.PixelRectangles.PixelTransfer
-- Copyright : (c) Sven Panne 2002-2013
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- This module corresponds to a part of section 3.6.1 (Pixel Storage Modes) of
-- the OpenGL 2.1 specs.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.PixelRectangles.PixelTransfer (
PixelTransferStage(..),
mapColor, mapStencil, indexShift, indexOffset, depthScale, depthBias,
rgbaScale, rgbaBias
) where
import Graphics.Rendering.OpenGL.GL.Capability
import Graphics.Rendering.OpenGL.GL.QueryUtils
import Graphics.Rendering.OpenGL.GL.StateVar
import Graphics.Rendering.OpenGL.GL.VertexSpec
import Graphics.Rendering.OpenGL.Raw
--------------------------------------------------------------------------------
data PixelTransfer =
MapColor
| MapStencil
| IndexShift
| IndexOffset
| RedScale
| RedBias
| GreenScale
| GreenBias
| BlueScale
| BlueBias
| AlphaScale
| AlphaBias
| DepthScale
| DepthBias
| PostConvolutionRedScale
| PostConvolutionGreenScale
| PostConvolutionBlueScale
| PostConvolutionAlphaScale
| PostConvolutionRedBias
| PostConvolutionGreenBias
| PostConvolutionBlueBias
| PostConvolutionAlphaBias
| PostColorMatrixRedScale
| PostColorMatrixGreenScale
| PostColorMatrixBlueScale
| PostColorMatrixAlphaScale
| PostColorMatrixRedBias
| PostColorMatrixGreenBias
| PostColorMatrixBlueBias
| PostColorMatrixAlphaBias
marshalPixelTransfer :: PixelTransfer -> GLenum
marshalPixelTransfer x = case x of
MapColor -> gl_MAP_COLOR
MapStencil -> gl_MAP_STENCIL
IndexShift -> gl_INDEX_SHIFT
IndexOffset -> gl_INDEX_OFFSET
RedScale -> gl_RED_SCALE
RedBias -> gl_RED_BIAS
GreenScale -> gl_GREEN_SCALE
GreenBias -> gl_GREEN_BIAS
BlueScale -> gl_BLUE_SCALE
BlueBias -> gl_BLUE_BIAS
AlphaScale -> gl_ALPHA_SCALE
AlphaBias -> gl_ALPHA_BIAS
DepthScale -> gl_DEPTH_SCALE
DepthBias -> gl_DEPTH_BIAS
PostConvolutionRedScale -> gl_POST_CONVOLUTION_RED_SCALE
PostConvolutionGreenScale -> gl_POST_CONVOLUTION_GREEN_SCALE
PostConvolutionBlueScale -> gl_POST_CONVOLUTION_BLUE_SCALE
PostConvolutionAlphaScale -> gl_POST_CONVOLUTION_ALPHA_SCALE
PostConvolutionRedBias -> gl_POST_CONVOLUTION_RED_BIAS
PostConvolutionGreenBias -> gl_POST_CONVOLUTION_GREEN_BIAS
PostConvolutionBlueBias -> gl_POST_CONVOLUTION_BLUE_BIAS
PostConvolutionAlphaBias -> gl_POST_CONVOLUTION_ALPHA_BIAS
PostColorMatrixRedScale -> gl_POST_COLOR_MATRIX_RED_SCALE
PostColorMatrixGreenScale -> gl_POST_COLOR_MATRIX_GREEN_SCALE
PostColorMatrixBlueScale -> gl_POST_COLOR_MATRIX_BLUE_SCALE
PostColorMatrixAlphaScale -> gl_POST_COLOR_MATRIX_ALPHA_SCALE
PostColorMatrixRedBias -> gl_POST_COLOR_MATRIX_RED_BIAS
PostColorMatrixGreenBias -> gl_POST_COLOR_MATRIX_GREEN_BIAS
PostColorMatrixBlueBias -> gl_POST_COLOR_MATRIX_BLUE_BIAS
PostColorMatrixAlphaBias -> gl_POST_COLOR_MATRIX_ALPHA_BIAS
--------------------------------------------------------------------------------
data PixelTransferStage =
PreConvolution
| PostConvolution
| PostColorMatrix
deriving ( Eq, Ord, Show )
stageToGetScales ::
PixelTransferStage
-> (PName1F, PName1F, PName1F, PName1F)
stageToGetScales s = case s of
PreConvolution -> (GetRedScale,
GetGreenScale,
GetBlueScale,
GetAlphaScale)
PostConvolution -> (GetPostConvolutionRedScale,
GetPostConvolutionGreenScale,
GetPostConvolutionBlueScale,
GetPostConvolutionAlphaScale)
PostColorMatrix -> (GetPostColorMatrixRedScale,
GetPostColorMatrixGreenScale,
GetPostColorMatrixBlueScale,
GetPostColorMatrixAlphaScale)
stageToSetScales ::
PixelTransferStage
-> (PixelTransfer, PixelTransfer, PixelTransfer, PixelTransfer)
stageToSetScales s = case s of
PreConvolution -> (RedScale,
GreenScale,
BlueScale,
AlphaScale)
PostConvolution -> (PostConvolutionRedScale,
PostConvolutionGreenScale,
PostConvolutionBlueScale,
PostConvolutionAlphaScale)
PostColorMatrix -> (PostColorMatrixRedScale,
PostColorMatrixGreenScale,
PostColorMatrixBlueScale,
PostColorMatrixAlphaScale)
stageToGetBiases ::
PixelTransferStage
-> (PName1F, PName1F, PName1F, PName1F)
stageToGetBiases s = case s of
PreConvolution -> (GetRedBias,
GetGreenBias,
GetBlueBias,
GetAlphaBias)
PostConvolution -> (GetPostConvolutionRedBias,
GetPostConvolutionGreenBias,
GetPostConvolutionBlueBias,
GetPostConvolutionAlphaBias)
PostColorMatrix -> (GetPostColorMatrixRedBias,
GetPostColorMatrixGreenBias,
GetPostColorMatrixBlueBias,
GetPostColorMatrixAlphaBias)
stageToSetBiases ::
PixelTransferStage
-> (PixelTransfer, PixelTransfer, PixelTransfer, PixelTransfer)
stageToSetBiases s = case s of
PreConvolution -> (RedBias,
GreenBias,
BlueBias,
AlphaBias)
PostConvolution -> (PostConvolutionRedBias,
PostConvolutionGreenBias,
PostConvolutionBlueBias,
PostConvolutionAlphaBias)
PostColorMatrix -> (PostColorMatrixRedBias,
PostColorMatrixGreenBias,
PostColorMatrixBlueBias,
PostColorMatrixAlphaBias)
--------------------------------------------------------------------------------
mapColor :: StateVar Capability
mapColor = pixelTransferb GetMapColor MapColor
mapStencil :: StateVar Capability
mapStencil = pixelTransferb GetMapStencil MapStencil
indexShift :: StateVar GLint
indexShift = pixelTransferi GetIndexShift IndexShift
indexOffset :: StateVar GLint
indexOffset = pixelTransferi GetIndexOffset IndexOffset
depthScale :: StateVar GLfloat
depthScale = pixelTransferf GetDepthScale DepthScale
depthBias :: StateVar GLfloat
depthBias = pixelTransferf GetDepthBias DepthBias
rgbaScale :: PixelTransferStage -> StateVar (Color4 GLfloat)
rgbaScale s = pixelTransfer4f (stageToGetScales s) (stageToSetScales s)
rgbaBias :: PixelTransferStage -> StateVar (Color4 GLfloat)
rgbaBias s = pixelTransfer4f (stageToGetBiases s) (stageToSetBiases s)
--------------------------------------------------------------------------------
pixelTransferb :: GetPName1I p => p -> PixelTransfer -> StateVar Capability
pixelTransferb pn pt =
makeStateVar
(getBoolean1 unmarshalCapability pn)
(glPixelTransferi (marshalPixelTransfer pt) .
fromIntegral . marshalCapability)
pixelTransferi :: GetPName1I p => p -> PixelTransfer -> StateVar GLint
pixelTransferi pn pt =
makeStateVar
(getInteger1 id pn)
(glPixelTransferi (marshalPixelTransfer pt))
pixelTransferf :: GetPName1F p => p -> PixelTransfer -> StateVar GLfloat
pixelTransferf pn pt =
makeStateVar
(getFloat1 id pn)
(glPixelTransferf (marshalPixelTransfer pt))
pixelTransfer4f :: GetPName1F p =>
(p, p, p, p)
-> (PixelTransfer, PixelTransfer, PixelTransfer, PixelTransfer)
-> StateVar (Color4 GLfloat)
pixelTransfer4f (pr, pg, pb, pa) (tr, tg, tb, ta) = makeStateVar get4f set4f
where get4f = do
r <- getFloat1 id pr
g <- getFloat1 id pg
b <- getFloat1 id pb
a <- getFloat1 id pa
return $ Color4 r g b a
set4f (Color4 r g b a) = do
glPixelTransferf (marshalPixelTransfer tr) r
glPixelTransferf (marshalPixelTransfer tg) g
glPixelTransferf (marshalPixelTransfer tb) b
glPixelTransferf (marshalPixelTransfer ta) a
|
hesiod/OpenGL
|
src/Graphics/Rendering/OpenGL/GL/PixelRectangles/PixelTransfer.hs
|
bsd-3-clause
| 8,389 | 0 | 11 | 1,966 | 1,386 | 766 | 620 | 189 | 30 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- This module is full of orphans, unfortunately
module GHCi.TH.Binary () where
import Data.Binary
import qualified Data.ByteString as B
import Data.Typeable
import GHC.Serialized
import qualified Language.Haskell.TH as TH
import qualified Language.Haskell.TH.Syntax as TH
-- Put these in a separate module because they take ages to compile
instance Binary TH.Loc
instance Binary TH.Name
instance Binary TH.ModName
instance Binary TH.NameFlavour
instance Binary TH.PkgName
instance Binary TH.NameSpace
instance Binary TH.Module
instance Binary TH.Info
instance Binary TH.Type
instance Binary TH.TyLit
instance Binary TH.TyVarBndr
instance Binary TH.Role
instance Binary TH.Lit
instance Binary TH.Range
instance Binary TH.Stmt
instance Binary TH.Pat
instance Binary TH.Exp
instance Binary TH.Dec
instance Binary TH.Overlap
instance Binary TH.Guard
instance Binary TH.Body
instance Binary TH.Match
instance Binary TH.Fixity
instance Binary TH.TySynEqn
instance Binary TH.FamFlavour
instance Binary TH.FunDep
instance Binary TH.AnnTarget
instance Binary TH.RuleBndr
instance Binary TH.Phases
instance Binary TH.RuleMatch
instance Binary TH.Inline
instance Binary TH.Pragma
instance Binary TH.Safety
instance Binary TH.Callconv
instance Binary TH.Foreign
instance Binary TH.Bang
instance Binary TH.SourceUnpackedness
instance Binary TH.SourceStrictness
instance Binary TH.DecidedStrictness
instance Binary TH.FixityDirection
instance Binary TH.OccName
instance Binary TH.Con
instance Binary TH.AnnLookup
instance Binary TH.ModuleInfo
instance Binary TH.Clause
instance Binary TH.InjectivityAnn
instance Binary TH.FamilyResultSig
instance Binary TH.TypeFamilyHead
-- We need Binary TypeRep for serializing annotations
instance Binary TyCon where
put tc = put (tyConPackage tc) >> put (tyConModule tc) >> put (tyConName tc)
get = mkTyCon3 <$> get <*> get <*> get
instance Binary TypeRep where
put type_rep = put (splitTyConApp type_rep)
get = do
(ty_con, child_type_reps) <- get
return (mkTyConApp ty_con child_type_reps)
instance Binary Serialized where
put (Serialized tyrep wds) = put tyrep >> put (B.pack wds)
get = Serialized <$> get <*> (B.unpack <$> get)
|
tolysz/prepare-ghcjs
|
spec-lts8/ghci/GHCi/TH/Binary.hs
|
bsd-3-clause
| 2,248 | 0 | 10 | 328 | 677 | 326 | 351 | 67 | 0 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
--
-- Stg to C--: code generation for constructors
--
-- This module provides the support code for StgCmm to deal with with
-- constructors on the RHSs of let(rec)s.
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module StgCmmCon (
cgTopRhsCon, buildDynCon, bindConArgs
) where
#include "HsVersions.h"
import StgSyn
import CoreSyn ( AltCon(..) )
import StgCmmMonad
import StgCmmEnv
import StgCmmHeap
import StgCmmLayout
import StgCmmUtils
import StgCmmClosure
import StgCmmProf ( curCCS )
import CmmExpr
import CLabel
import MkGraph
import SMRep
import CostCentre
import Module
import DataCon
import DynFlags
import FastString
import Id
import Literal
import PrelInfo
import Outputable
import Platform
import Util
import Control.Monad
import Data.Char
---------------------------------------------------------------
-- Top-level constructors
---------------------------------------------------------------
cgTopRhsCon :: DynFlags
-> Id -- Name of thing bound to this RHS
-> DataCon -- Id
-> [StgArg] -- Args
-> (CgIdInfo, FCode ())
cgTopRhsCon dflags id con args =
let id_info = litIdInfo dflags id (mkConLFInfo con) (CmmLabel closure_label)
in (id_info, gen_code)
where
name = idName id
caffy = idCafInfo id -- any stgArgHasCafRefs args
closure_label = mkClosureLabel name caffy
gen_code =
do { this_mod <- getModuleName
; when (platformOS (targetPlatform dflags) == OSMinGW32) $
-- Windows DLLs have a problem with static cross-DLL refs.
ASSERT( not (isDllConApp dflags this_mod con args) ) return ()
; ASSERT( args `lengthIs` dataConRepRepArity con ) return ()
-- LAY IT OUT
; let
(tot_wds, -- #ptr_wds + #nonptr_wds
ptr_wds, -- #ptr_wds
nv_args_w_offsets) = mkVirtConstrOffsets dflags (addArgReps args)
nonptr_wds = tot_wds - ptr_wds
-- we're not really going to emit an info table, so having
-- to make a CmmInfoTable is a bit overkill, but mkStaticClosureFields
-- needs to poke around inside it.
info_tbl = mkDataConInfoTable dflags con True ptr_wds nonptr_wds
get_lit (arg, _offset) = do { CmmLit lit <- getArgAmode arg
; return lit }
; payload <- mapM get_lit nv_args_w_offsets
-- NB1: nv_args_w_offsets is sorted into ptrs then non-ptrs
-- NB2: all the amodes should be Lits!
; let closure_rep = mkStaticClosureFields
dflags
info_tbl
dontCareCCS -- Because it's static data
caffy -- Has CAF refs
payload
-- BUILD THE OBJECT
; emitDataLits closure_label closure_rep
; return () }
---------------------------------------------------------------
-- Lay out and allocate non-top-level constructors
---------------------------------------------------------------
buildDynCon :: Id -- Name of the thing to which this constr will
-- be bound
-> Bool -- is it genuinely bound to that name, or just for profiling?
-> CostCentreStack -- Where to grab cost centre from;
-- current CCS if currentOrSubsumedCCS
-> DataCon -- The data constructor
-> [StgArg] -- Its args
-> FCode (CgIdInfo, FCode CmmAGraph)
-- Return details about how to find it and initialization code
buildDynCon binder actually_bound cc con args
= do dflags <- getDynFlags
buildDynCon' dflags (targetPlatform dflags) binder actually_bound cc con args
buildDynCon' :: DynFlags
-> Platform
-> Id -> Bool
-> CostCentreStack
-> DataCon
-> [StgArg]
-> FCode (CgIdInfo, FCode CmmAGraph)
{- We used to pass a boolean indicating whether all the
args were of size zero, so we could use a static
constructor; but I concluded that it just isn't worth it.
Now I/O uses unboxed tuples there just aren't any constructors
with all size-zero args.
The reason for having a separate argument, rather than looking at
the addr modes of the args is that we may be in a "knot", and
premature looking at the args will cause the compiler to black-hole!
-}
-------- buildDynCon': Nullary constructors --------------
-- First we deal with the case of zero-arity constructors. They
-- will probably be unfolded, so we don't expect to see this case much,
-- if at all, but it does no harm, and sets the scene for characters.
--
-- In the case of zero-arity constructors, or, more accurately, those
-- which have exclusively size-zero (VoidRep) args, we generate no code
-- at all.
buildDynCon' dflags _ binder _ _cc con []
= return (litIdInfo dflags binder (mkConLFInfo con)
(CmmLabel (mkClosureLabel (dataConName con) (idCafInfo binder))),
return mkNop)
-------- buildDynCon': Charlike and Intlike constructors -----------
{- The following three paragraphs about @Char@-like and @Int@-like
closures are obsolete, but I don't understand the details well enough
to properly word them, sorry. I've changed the treatment of @Char@s to
be analogous to @Int@s: only a subset is preallocated, because @Char@
has now 31 bits. Only literals are handled here. -- Qrczak
Now for @Char@-like closures. We generate an assignment of the
address of the closure to a temporary. It would be possible simply to
generate no code, and record the addressing mode in the environment,
but we'd have to be careful if the argument wasn't a constant --- so
for simplicity we just always asssign to a temporary.
Last special case: @Int@-like closures. We only special-case the
situation in which the argument is a literal in the range
@mIN_INTLIKE@..@mAX_INTLILKE@. NB: for @Char@-like closures we can
work with any old argument, but for @Int@-like ones the argument has
to be a literal. Reason: @Char@ like closures have an argument type
which is guaranteed in range.
Because of this, we use can safely return an addressing mode.
We don't support this optimisation when compiling into Windows DLLs yet
because they don't support cross package data references well.
-}
buildDynCon' dflags platform binder _ _cc con [arg]
| maybeIntLikeCon con
, platformOS platform /= OSMinGW32 || not (gopt Opt_PIC dflags)
, StgLitArg (MachInt val) <- arg
, val <= fromIntegral (mAX_INTLIKE dflags) -- Comparisons at type Integer!
, val >= fromIntegral (mIN_INTLIKE dflags) -- ...ditto...
= do { let intlike_lbl = mkCmmClosureLabel rtsPackageId (fsLit "stg_INTLIKE")
val_int = fromIntegral val :: Int
offsetW = (val_int - mIN_INTLIKE dflags) * (fixedHdrSizeW dflags + 1)
-- INTLIKE closures consist of a header and one word payload
intlike_amode = cmmLabelOffW dflags intlike_lbl offsetW
; return ( litIdInfo dflags binder (mkConLFInfo con) intlike_amode
, return mkNop) }
buildDynCon' dflags platform binder _ _cc con [arg]
| maybeCharLikeCon con
, platformOS platform /= OSMinGW32 || not (gopt Opt_PIC dflags)
, StgLitArg (MachChar val) <- arg
, let val_int = ord val :: Int
, val_int <= mAX_CHARLIKE dflags
, val_int >= mIN_CHARLIKE dflags
= do { let charlike_lbl = mkCmmClosureLabel rtsPackageId (fsLit "stg_CHARLIKE")
offsetW = (val_int - mIN_CHARLIKE dflags) * (fixedHdrSizeW dflags + 1)
-- CHARLIKE closures consist of a header and one word payload
charlike_amode = cmmLabelOffW dflags charlike_lbl offsetW
; return ( litIdInfo dflags binder (mkConLFInfo con) charlike_amode
, return mkNop) }
-------- buildDynCon': the general case -----------
buildDynCon' dflags _ binder actually_bound ccs con args
= do { (id_info, reg) <- rhsIdInfo binder lf_info
; return (id_info, gen_code reg)
}
where
lf_info = mkConLFInfo con
gen_code reg
= do { let (tot_wds, ptr_wds, args_w_offsets)
= mkVirtConstrOffsets dflags (addArgReps args)
-- No void args in args_w_offsets
nonptr_wds = tot_wds - ptr_wds
info_tbl = mkDataConInfoTable dflags con False
ptr_wds nonptr_wds
; let ticky_name | actually_bound = Just binder
| otherwise = Nothing
; hp_plus_n <- allocDynClosure ticky_name info_tbl lf_info
use_cc blame_cc args_w_offsets
; return (mkRhsInit dflags reg lf_info hp_plus_n) }
where
use_cc -- cost-centre to stick in the object
| isCurrentCCS ccs = curCCS
| otherwise = panic "buildDynCon: non-current CCS not implemented"
blame_cc = use_cc -- cost-centre on which to blame the alloc (same)
---------------------------------------------------------------
-- Binding constructor arguments
---------------------------------------------------------------
bindConArgs :: AltCon -> LocalReg -> [Id] -> FCode [LocalReg]
-- bindConArgs is called from cgAlt of a case
-- (bindConArgs con args) augments the environment with bindings for the
-- binders args, assuming that we have just returned from a 'case' which
-- found a con
bindConArgs (DataAlt con) base args
= ASSERT(not (isUnboxedTupleCon con))
do dflags <- getDynFlags
let (_, _, args_w_offsets) = mkVirtConstrOffsets dflags (addIdReps args)
tag = tagForCon dflags con
-- The binding below forces the masking out of the tag bits
-- when accessing the constructor field.
bind_arg :: (NonVoid Id, VirtualHpOffset) -> FCode LocalReg
bind_arg (arg, offset)
= do emit $ mkTaggedObjectLoad dflags (idToReg dflags arg) base offset tag
bindArgToReg arg
mapM bind_arg args_w_offsets
bindConArgs _other_con _base args
= ASSERT( null args ) return []
|
frantisekfarka/ghc-dsi
|
compiler/codeGen/StgCmmCon.hs
|
bsd-3-clause
| 10,519 | 0 | 16 | 2,882 | 1,606 | 846 | 760 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
------------------------------------------------------------------------------
import Control.Concurrent
import Control.Exception (SomeException (..), bracket, catch, finally)
import Control.Monad (void)
import System.Directory (getCurrentDirectory, setCurrentDirectory)
import System.FilePath ((</>))
import System.IO
------------------------------------------------------------------------------
import qualified Blackbox.Tests
import Prelude (Bool (False), IO, Int, Maybe (Nothing), Monad (..), Num (..), flip, return, ($), (.), (^))
import Snap.Http.Server (simpleHttpServe)
import Snap.Http.Server.Config
import Snap.Snaplet
import qualified Snap.Snaplet.Auth.Tests
import qualified Snap.Snaplet.Config.Tests
import qualified Snap.Snaplet.Heist.Tests
import qualified Snap.Snaplet.Internal.Lensed.Tests
import qualified Snap.Snaplet.Internal.LensT.Tests
import qualified Snap.Snaplet.Internal.RST.Tests
import qualified Snap.Snaplet.Internal.Tests
import Snap.Snaplet.Test.Common.App
import qualified Snap.Snaplet.Test.Tests
import Test.Framework
import SafeCWD
------------------------------------------------------------------------------
main :: IO ()
main = do
-- chdir into test/
cwd <- getCurrentDirectory
setCurrentDirectory (cwd </> "test")
Blackbox.Tests.remove
"snaplets/heist/templates/bad.tpl"
Blackbox.Tests.remove
"snaplets/heist/templates/good.tpl"
{- Why were we removing this?
Blackbox.Tests.removeDir "snaplets/foosnaplet"
-}
-- (tid, mvar) <- inDir False "non-cabal-appdir" startServer
(tid, mvar) <- inDir False "." startServer
defaultMain [tests]
`finally` do
setCurrentDirectory cwd
killThread tid
putStrLn "waiting for termination mvar"
takeMVar mvar
where tests = mutuallyExclusive $
testGroup "snap" [ internalServerTests
, Snap.Snaplet.Auth.Tests.tests
, Snap.Snaplet.Test.Tests.tests
, Snap.Snaplet.Heist.Tests.heistTests
, Snap.Snaplet.Config.Tests.configTests
, Snap.Snaplet.Internal.RST.Tests.tests
, Snap.Snaplet.Internal.LensT.Tests.tests
, Snap.Snaplet.Internal.Lensed.Tests.tests
]
------------------------------------------------------------------------------
internalServerTests :: Test
internalServerTests =
mutuallyExclusive $
testGroup "internal server tests"
[ Blackbox.Tests.tests
, Snap.Snaplet.Internal.Lensed.Tests.tests
, Snap.Snaplet.Internal.LensT.Tests.tests
, Snap.Snaplet.Internal.RST.Tests.tests
, Snap.Snaplet.Internal.Tests.tests
]
------------------------------------------------------------------------------
startServer :: IO (ThreadId, MVar ())
startServer = do
mvar <- newEmptyMVar
t <- forkIOWithUnmask $ \restore ->
serve restore mvar (setPort 9753 .
setBind "127.0.0.1" $ defaultConfig) appInit
threadDelay $ 2*10^(6::Int)
return (t, mvar)
where
gobble m = void m `catch` \(_::SomeException) -> return ()
serve restore mvar config initializer =
flip finally (putMVar mvar ()) $
gobble $ restore $ do
hPutStrLn stderr "initializing snaplet"
bracket (runSnaplet Nothing initializer)
(\(_, _, doCleanup) -> doCleanup)
(\(_, handler, _ ) -> do
(conf, site) <- combineConfig config handler
hPutStrLn stderr "bringing up server"
simpleHttpServe conf site)
|
sopvop/snap
|
test/suite/TestSuite.hs
|
bsd-3-clause
| 4,159 | 0 | 16 | 1,250 | 781 | 467 | 314 | 77 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ar-SA">
<title>Groovy Support</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/groovy/src/main/javahelp/org/zaproxy/zap/extension/groovy/resources/help_ar_SA/helpset_ar_SA.hs
|
apache-2.0
| 959 | 89 | 29 | 156 | 389 | 209 | 180 | -1 | -1 |
{-# LANGUAGE CPP #-}
import System.Environment
import System.FilePath
import Test.Haddock
checkConfig :: CheckConfig String
checkConfig = CheckConfig
{ ccfgRead = Just
, ccfgClean = \_ -> id
, ccfgDump = id
, ccfgEqual = (==)
}
dirConfig :: DirConfig
dirConfig = defaultDirConfig $ takeDirectory __FILE__
main :: IO ()
main = do
cfg <- parseArgs checkConfig dirConfig =<< getArgs
runAndCheck $ cfg
{ cfgHaddockArgs = cfgHaddockArgs cfg ++
[ "--package-name=test"
, "--package-version=0.0.0"
, "--hoogle"
]
}
|
Helkafen/haddock
|
hoogle-test/Main.hs
|
bsd-2-clause
| 609 | 1 | 11 | 177 | 148 | 81 | 67 | 20 | 1 |
{- Refactoring: move the definiton 'fringe' to module B2 which is a client module of
D2. This example aims to test the moving of the definition and the modification
of export/import -}
module D2(fringe, sumSquares) where
import C2
fringe :: Tree a -> [a]
fringe (Leaf x) = [x]
fringe (Branch left right) = fringe left ++ fringe right
sumSquares (x:xs) = sq x + sumSquares xs
sumSquares [] = 0
sq x = x ^pow
pow = 2
|
kmate/HaRe
|
old/testing/moveDefBtwMods/D2.hs
|
bsd-3-clause
| 457 | 0 | 7 | 120 | 126 | 66 | 60 | 9 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ImpredicativeTypes #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- |
-- Module : Data.Array.Accelerate.CUDA.CodeGen.Stencil
-- Copyright : [2008..2014] Manuel M T Chakravarty, Gabriele Keller
-- [2009..2014] Trevor L. McDonell
-- License : BSD3
--
-- Maintainer : Trevor L. McDonell <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
module Data.Array.Accelerate.CUDA.CodeGen.Stencil (
mkStencil, mkStencil2
) where
import Foreign.CUDA.Analysis
import Language.C.Quote.CUDA
import Data.Array.Accelerate.Type ( Boundary(..) )
import Data.Array.Accelerate.Array.Sugar ( Array, Elt )
import Data.Array.Accelerate.Analysis.Stencil
import Data.Array.Accelerate.CUDA.AST hiding ( stencil, stencilAccess )
import Data.Array.Accelerate.CUDA.CodeGen.Base
import Data.Array.Accelerate.CUDA.CodeGen.Stencil.Extra
-- Map a stencil over an array. In contrast to 'map', the domain of a stencil
-- function is an entire /neighbourhood/ of each array element. Neighbourhoods
-- are sub-arrays centred around a focal point. They are not necessarily
-- rectangular, but they are symmetric and have an extent of at least three in
-- each dimensions. Due to this symmetry requirement, the extent is necessarily
-- odd. The focal point is the array position that determines the single output
-- element for each application of the stencil.
--
-- For those array positions where the neighbourhood extends past the boundaries
-- of the source array, a boundary condition determines the contents of the
-- out-of-bounds neighbourhood positions.
--
-- stencil :: (Shape ix, Elt a, Elt b, Stencil ix a stencil)
-- => (stencil -> Exp b) -- stencil function
-- -> Boundary a -- boundary condition
-- -> Acc (Array ix a) -- source array
-- -> Acc (Array ix b) -- destination array
--
-- To improve performance on older (1.x series) devices, the input array(s) are
-- read through the texture cache.
--
mkStencil
:: forall aenv sh stencil a b. (Stencil sh a stencil, Elt b)
=> DeviceProperties
-> Gamma aenv
-> CUFun1 aenv (stencil -> b)
-> Boundary (CUExp aenv a)
-> [CUTranslSkel aenv (Array sh b)]
mkStencil dev aenv (CUFun1 dce f) boundary
= return
$ CUTranslSkel "stencil" [cunit|
$esc:("#include <accelerate_cuda.h>")
$edecls:texIn
$edecls:texStencil
extern "C" __global__ void
stencil
(
$params:argIn,
$params:argOut,
$params:argStencil
)
{
const int shapeSize = $exp:(csize shOut);
const int gridSize = $exp:(gridSize dev);
int ix;
for ( ix = $exp:(threadIdx dev)
; ix < shapeSize
; ix += gridSize )
{
$items:(sh .=. cfromIndex shOut "ix" "tmp")
$items:stencilBody
}
}
|]
where
(texIn, argIn) = environment dev aenv
(argOut, shOut, setOut) = writeArray "Out" (undefined :: Array sh b)
(sh, _, _) = locals "sh" (undefined :: sh)
(xs,_,_) = locals "x" (undefined :: stencil)
dx = offsets (undefined :: Fun aenv (stencil -> b))
(undefined :: OpenAcc aenv (Array sh a))
(texStencil, argStencil, safeIndex) = stencilAccess dev True True "Stencil" "Stencil" "w" "ix" dx boundary dce
(_, _, unsafeIndex) = stencilAccess dev True False "Stencil" "Stencil" "w" "ix" dx boundary dce
stencilBody
| computeCapability dev < Compute 1 2 = with safeIndex
| otherwise =
[[citem| if ( __all( $exp:(insideRegion shOut (borderRegion dx) (map rvalue sh)) ) ) {
$items:(with unsafeIndex)
} else {
$items:(with safeIndex)
} |]]
where
with stencil = (dce xs .=. stencil sh) ++
(setOut "ix" .=. f xs)
-- Map a binary stencil of an array. The extent of the resulting array is the
-- intersection of the extents of the two source arrays.
--
-- stencil2 :: (Shape ix, Elt a, Elt b, Elt c,
-- Stencil ix a stencil1,
-- Stencil ix b stencil2)
-- => (stencil1 -> stencil2 -> Exp c) -- binary stencil function
-- -> Boundary a -- boundary condition #1
-- -> Acc (Array ix a) -- source array #1
-- -> Boundary b -- boundary condition #2
-- -> Acc (Array ix b) -- source array #2
-- -> Acc (Array ix c) -- destination array
--
mkStencil2
:: forall aenv sh stencil1 stencil2 a b c.
(Stencil sh a stencil1, Stencil sh b stencil2, Elt c)
=> DeviceProperties
-> Gamma aenv
-> CUFun2 aenv (stencil1 -> stencil2 -> c)
-> Boundary (CUExp aenv a)
-> Boundary (CUExp aenv b)
-> [CUTranslSkel aenv (Array sh c)]
mkStencil2 dev aenv stencil boundary1 boundary2
= [ mkStencil2' dev False aenv stencil boundary1 boundary2
, mkStencil2' dev True aenv stencil boundary1 boundary2
]
mkStencil2'
:: forall aenv sh stencil1 stencil2 a b c.
(Stencil sh a stencil1, Stencil sh b stencil2, Elt c)
=> DeviceProperties
-> Bool -- are the source arrays the same extent?
-> Gamma aenv
-> CUFun2 aenv (stencil1 -> stencil2 -> c)
-> Boundary (CUExp aenv a)
-> Boundary (CUExp aenv b)
-> CUTranslSkel aenv (Array sh c)
mkStencil2' dev sameExtent aenv (CUFun2 dce1 dce2 f) boundary1 boundary2
= CUTranslSkel "stencil2" [cunit|
$esc:("#include <accelerate_cuda.h>")
$edecls:texIn
$edecls:texS1
$edecls:texS2
extern "C" __global__ void
stencil2
(
$params:argIn,
$params:argOut,
$params:argS1,
$params:argS2
)
{
const int shapeSize = $exp:(csize shOut);
const int gridSize = $exp:(gridSize dev);
int ix;
for ( ix = $exp:(threadIdx dev)
; ix < shapeSize
; ix += gridSize )
{
$items:(sh .=. cfromIndex shOut "ix" "tmp")
$items:stencilBody
}
}
|]
where
(texIn, argIn) = environment dev aenv
(argOut, shOut, setOut) = writeArray "Out" (undefined :: Array sh c)
(sh, _, _) = locals "sh" (undefined :: sh)
(xs,_,_) = locals "x" (undefined :: stencil1)
(ys,_,_) = locals "y" (undefined :: stencil2)
grp1 = "Stencil1"
grp2 = "Stencil2"
-- If the source arrays are the same extent, twiddle the names a bit so that
-- code generation refers to the same set of shape variables. Then, if there
-- are any duplicate calculations, hope that the CUDA compiler is smart
-- enough and spots this.
--
sh1 = grp1
sh2 | sameExtent = sh1
| otherwise = grp2
(dx1, dx2) = offsets2 (undefined :: Fun aenv (stencil1 -> stencil2 -> c))
(undefined :: OpenAcc aenv (Array sh a))
(undefined :: OpenAcc aenv (Array sh b))
border = zipWith max (borderRegion dx1) (borderRegion dx2)
(texS1, argS1, safeIndex1) = stencilAccess dev sameExtent True grp1 sh1 "w" "ix" dx1 boundary1 dce1
(_, _, unsafeIndex1) = stencilAccess dev sameExtent False grp1 sh1 "w" "ix" dx1 boundary1 dce1
(texS2, argS2, safeIndex2) = stencilAccess dev sameExtent True grp2 sh2 "z" "ix" dx2 boundary2 dce2
(_, _, unsafeIndex2) = stencilAccess dev sameExtent False grp2 sh2 "z" "ix" dx2 boundary2 dce2
stencilBody
| computeCapability dev < Compute 1 2 = with safeIndex1 safeIndex2
| otherwise =
[[citem| if ( __all( $exp:(insideRegion shOut border (map rvalue sh)) ) ) {
$items:(with unsafeIndex1 unsafeIndex2)
} else {
$items:(with safeIndex1 safeIndex2)
} |]]
where
with stencil1 stencil2 =
(dce1 xs .=. stencil1 sh) ++
(dce2 ys .=. stencil2 sh) ++
(setOut "ix" .=. f xs ys)
|
flowbox-public/accelerate-cuda
|
Data/Array/Accelerate/CUDA/CodeGen/Stencil.hs
|
bsd-3-clause
| 8,667 | 0 | 15 | 2,888 | 1,436 | 802 | 634 | 89 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : System.IO.Error
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Standard IO Errors.
--
-----------------------------------------------------------------------------
module System.IO.Error (
-- * I\/O errors
IOError, -- = IOException
userError, -- :: String -> IOError
mkIOError, -- :: IOErrorType -> String -> Maybe Handle
-- -> Maybe FilePath -> IOError
annotateIOError, -- :: IOError -> String -> Maybe Handle
-- -> Maybe FilePath -> IOError
-- ** Classifying I\/O errors
isAlreadyExistsError, -- :: IOError -> Bool
isDoesNotExistError,
isAlreadyInUseError,
isFullError,
isEOFError,
isIllegalOperation,
isPermissionError,
isUserError,
-- ** Attributes of I\/O errors
ioeGetErrorType, -- :: IOError -> IOErrorType
ioeGetLocation, -- :: IOError -> String
ioeGetErrorString, -- :: IOError -> String
ioeGetHandle, -- :: IOError -> Maybe Handle
ioeGetFileName, -- :: IOError -> Maybe FilePath
ioeSetErrorType, -- :: IOError -> IOErrorType -> IOError
ioeSetErrorString, -- :: IOError -> String -> IOError
ioeSetLocation, -- :: IOError -> String -> IOError
ioeSetHandle, -- :: IOError -> Handle -> IOError
ioeSetFileName, -- :: IOError -> FilePath -> IOError
-- * Types of I\/O error
IOErrorType, -- abstract
alreadyExistsErrorType, -- :: IOErrorType
doesNotExistErrorType,
alreadyInUseErrorType,
fullErrorType,
eofErrorType,
illegalOperationErrorType,
permissionErrorType,
userErrorType,
-- ** 'IOErrorType' predicates
isAlreadyExistsErrorType, -- :: IOErrorType -> Bool
isDoesNotExistErrorType,
isAlreadyInUseErrorType,
isFullErrorType,
isEOFErrorType,
isIllegalOperationErrorType,
isPermissionErrorType,
isUserErrorType,
-- * Throwing and catching I\/O errors
ioError, -- :: IOError -> IO a
catchIOError, -- :: IO a -> (IOError -> IO a) -> IO a
tryIOError, -- :: IO a -> IO (Either IOError a)
modifyIOError, -- :: (IOError -> IOError) -> IO a -> IO a
) where
#ifndef __HUGS__
import Control.Exception.Base
#endif
#ifndef __HUGS__
import Data.Either
#endif
import Data.Maybe
#ifdef __GLASGOW_HASKELL__
import GHC.Base
import GHC.IO
import GHC.IO.Exception
import GHC.IO.Handle.Types
import Text.Show
#endif
#ifdef __HUGS__
import Hugs.Prelude(Handle, IOException(..), IOErrorType(..), IO)
#endif
#ifdef __NHC__
import IO
( IOError ()
, Handle ()
, try
, ioError
, userError
, isAlreadyExistsError -- :: IOError -> Bool
, isDoesNotExistError
, isAlreadyInUseError
, isFullError
, isEOFError
, isIllegalOperation
, isPermissionError
, isUserError
, ioeGetErrorString -- :: IOError -> String
, ioeGetHandle -- :: IOError -> Maybe Handle
, ioeGetFileName -- :: IOError -> Maybe FilePath
)
import qualified NHC.Internal as NHC (IOError(..))
import qualified NHC.DErrNo as NHC (ErrNo(..))
import Data.Maybe (fromJust)
import Control.Monad (MonadPlus(mplus))
#endif
-- | The construct 'tryIOError' @comp@ exposes IO errors which occur within a
-- computation, and which are not fully handled.
--
-- Non-I\/O exceptions are not caught by this variant; to catch all
-- exceptions, use 'Control.Exception.try' from "Control.Exception".
tryIOError :: IO a -> IO (Either IOError a)
tryIOError f = catch (do r <- f
return (Right r))
(return . Left)
#if defined(__GLASGOW_HASKELL__) || defined(__HUGS__)
-- -----------------------------------------------------------------------------
-- Constructing an IOError
-- | Construct an 'IOError' of the given type where the second argument
-- describes the error location and the third and fourth argument
-- contain the file handle and file path of the file involved in the
-- error if applicable.
mkIOError :: IOErrorType -> String -> Maybe Handle -> Maybe FilePath -> IOError
mkIOError t location maybe_hdl maybe_filename =
IOError{ ioe_type = t,
ioe_location = location,
ioe_description = "",
#if defined(__GLASGOW_HASKELL__)
ioe_errno = Nothing,
#endif
ioe_handle = maybe_hdl,
ioe_filename = maybe_filename
}
#endif /* __GLASGOW_HASKELL__ || __HUGS__ */
#ifdef __NHC__
mkIOError EOF location maybe_hdl maybe_filename =
NHC.EOFError location (fromJust maybe_hdl)
mkIOError UserError location maybe_hdl maybe_filename =
NHC.UserError location ""
mkIOError t location maybe_hdl maybe_filename =
NHC.IOError location maybe_filename maybe_hdl (ioeTypeToErrNo t)
where
ioeTypeToErrNo AlreadyExists = NHC.EEXIST
ioeTypeToErrNo NoSuchThing = NHC.ENOENT
ioeTypeToErrNo ResourceBusy = NHC.EBUSY
ioeTypeToErrNo ResourceExhausted = NHC.ENOSPC
ioeTypeToErrNo IllegalOperation = NHC.EPERM
ioeTypeToErrNo PermissionDenied = NHC.EACCES
#endif /* __NHC__ */
#ifndef __NHC__
-- -----------------------------------------------------------------------------
-- IOErrorType
-- | An error indicating that an 'IO' operation failed because
-- one of its arguments already exists.
isAlreadyExistsError :: IOError -> Bool
isAlreadyExistsError = isAlreadyExistsErrorType . ioeGetErrorType
-- | An error indicating that an 'IO' operation failed because
-- one of its arguments does not exist.
isDoesNotExistError :: IOError -> Bool
isDoesNotExistError = isDoesNotExistErrorType . ioeGetErrorType
-- | An error indicating that an 'IO' operation failed because
-- one of its arguments is a single-use resource, which is already
-- being used (for example, opening the same file twice for writing
-- might give this error).
isAlreadyInUseError :: IOError -> Bool
isAlreadyInUseError = isAlreadyInUseErrorType . ioeGetErrorType
-- | An error indicating that an 'IO' operation failed because
-- the device is full.
isFullError :: IOError -> Bool
isFullError = isFullErrorType . ioeGetErrorType
-- | An error indicating that an 'IO' operation failed because
-- the end of file has been reached.
isEOFError :: IOError -> Bool
isEOFError = isEOFErrorType . ioeGetErrorType
-- | An error indicating that an 'IO' operation failed because
-- the operation was not possible.
-- Any computation which returns an 'IO' result may fail with
-- 'isIllegalOperation'. In some cases, an implementation will not be
-- able to distinguish between the possible error causes. In this case
-- it should fail with 'isIllegalOperation'.
isIllegalOperation :: IOError -> Bool
isIllegalOperation = isIllegalOperationErrorType . ioeGetErrorType
-- | An error indicating that an 'IO' operation failed because
-- the user does not have sufficient operating system privilege
-- to perform that operation.
isPermissionError :: IOError -> Bool
isPermissionError = isPermissionErrorType . ioeGetErrorType
-- | A programmer-defined error value constructed using 'userError'.
isUserError :: IOError -> Bool
isUserError = isUserErrorType . ioeGetErrorType
#endif /* __NHC__ */
-- -----------------------------------------------------------------------------
-- IOErrorTypes
#ifdef __NHC__
data IOErrorType = AlreadyExists | NoSuchThing | ResourceBusy
| ResourceExhausted | EOF | IllegalOperation
| PermissionDenied | UserError
#endif
-- | I\/O error where the operation failed because one of its arguments
-- already exists.
alreadyExistsErrorType :: IOErrorType
alreadyExistsErrorType = AlreadyExists
-- | I\/O error where the operation failed because one of its arguments
-- does not exist.
doesNotExistErrorType :: IOErrorType
doesNotExistErrorType = NoSuchThing
-- | I\/O error where the operation failed because one of its arguments
-- is a single-use resource, which is already being used.
alreadyInUseErrorType :: IOErrorType
alreadyInUseErrorType = ResourceBusy
-- | I\/O error where the operation failed because the device is full.
fullErrorType :: IOErrorType
fullErrorType = ResourceExhausted
-- | I\/O error where the operation failed because the end of file has
-- been reached.
eofErrorType :: IOErrorType
eofErrorType = EOF
-- | I\/O error where the operation is not possible.
illegalOperationErrorType :: IOErrorType
illegalOperationErrorType = IllegalOperation
-- | I\/O error where the operation failed because the user does not
-- have sufficient operating system privilege to perform that operation.
permissionErrorType :: IOErrorType
permissionErrorType = PermissionDenied
-- | I\/O error that is programmer-defined.
userErrorType :: IOErrorType
userErrorType = UserError
-- -----------------------------------------------------------------------------
-- IOErrorType predicates
-- | I\/O error where the operation failed because one of its arguments
-- already exists.
isAlreadyExistsErrorType :: IOErrorType -> Bool
isAlreadyExistsErrorType AlreadyExists = True
isAlreadyExistsErrorType _ = False
-- | I\/O error where the operation failed because one of its arguments
-- does not exist.
isDoesNotExistErrorType :: IOErrorType -> Bool
isDoesNotExistErrorType NoSuchThing = True
isDoesNotExistErrorType _ = False
-- | I\/O error where the operation failed because one of its arguments
-- is a single-use resource, which is already being used.
isAlreadyInUseErrorType :: IOErrorType -> Bool
isAlreadyInUseErrorType ResourceBusy = True
isAlreadyInUseErrorType _ = False
-- | I\/O error where the operation failed because the device is full.
isFullErrorType :: IOErrorType -> Bool
isFullErrorType ResourceExhausted = True
isFullErrorType _ = False
-- | I\/O error where the operation failed because the end of file has
-- been reached.
isEOFErrorType :: IOErrorType -> Bool
isEOFErrorType EOF = True
isEOFErrorType _ = False
-- | I\/O error where the operation is not possible.
isIllegalOperationErrorType :: IOErrorType -> Bool
isIllegalOperationErrorType IllegalOperation = True
isIllegalOperationErrorType _ = False
-- | I\/O error where the operation failed because the user does not
-- have sufficient operating system privilege to perform that operation.
isPermissionErrorType :: IOErrorType -> Bool
isPermissionErrorType PermissionDenied = True
isPermissionErrorType _ = False
-- | I\/O error that is programmer-defined.
isUserErrorType :: IOErrorType -> Bool
isUserErrorType UserError = True
isUserErrorType _ = False
-- -----------------------------------------------------------------------------
-- Miscellaneous
#if defined(__GLASGOW_HASKELL__) || defined(__HUGS__)
ioeGetErrorType :: IOError -> IOErrorType
ioeGetErrorString :: IOError -> String
ioeGetLocation :: IOError -> String
ioeGetHandle :: IOError -> Maybe Handle
ioeGetFileName :: IOError -> Maybe FilePath
ioeGetErrorType ioe = ioe_type ioe
ioeGetErrorString ioe
| isUserErrorType (ioe_type ioe) = ioe_description ioe
| otherwise = show (ioe_type ioe)
ioeGetLocation ioe = ioe_location ioe
ioeGetHandle ioe = ioe_handle ioe
ioeGetFileName ioe = ioe_filename ioe
ioeSetErrorType :: IOError -> IOErrorType -> IOError
ioeSetErrorString :: IOError -> String -> IOError
ioeSetLocation :: IOError -> String -> IOError
ioeSetHandle :: IOError -> Handle -> IOError
ioeSetFileName :: IOError -> FilePath -> IOError
ioeSetErrorType ioe errtype = ioe{ ioe_type = errtype }
ioeSetErrorString ioe str = ioe{ ioe_description = str }
ioeSetLocation ioe str = ioe{ ioe_location = str }
ioeSetHandle ioe hdl = ioe{ ioe_handle = Just hdl }
ioeSetFileName ioe filename = ioe{ ioe_filename = Just filename }
#elif defined(__NHC__)
ioeGetErrorType :: IOError -> IOErrorType
ioeGetLocation :: IOError -> String
ioeGetErrorType e | isAlreadyExistsError e = AlreadyExists
| isDoesNotExistError e = NoSuchThing
| isAlreadyInUseError e = ResourceBusy
| isFullError e = ResourceExhausted
| isEOFError e = EOF
| isIllegalOperation e = IllegalOperation
| isPermissionError e = PermissionDenied
| isUserError e = UserError
ioeGetLocation (NHC.IOError _ _ _ _) = "unknown location"
ioeGetLocation (NHC.EOFError _ _ ) = "unknown location"
ioeGetLocation (NHC.PatternError loc) = loc
ioeGetLocation (NHC.UserError loc _) = loc
ioeSetErrorType :: IOError -> IOErrorType -> IOError
ioeSetErrorString :: IOError -> String -> IOError
ioeSetLocation :: IOError -> String -> IOError
ioeSetHandle :: IOError -> Handle -> IOError
ioeSetFileName :: IOError -> FilePath -> IOError
ioeSetErrorType e _ = e
ioeSetErrorString (NHC.IOError _ f h e) s = NHC.IOError s f h e
ioeSetErrorString (NHC.EOFError _ f) s = NHC.EOFError s f
ioeSetErrorString e@(NHC.PatternError _) _ = e
ioeSetErrorString (NHC.UserError l _) s = NHC.UserError l s
ioeSetLocation e@(NHC.IOError _ _ _ _) _ = e
ioeSetLocation e@(NHC.EOFError _ _) _ = e
ioeSetLocation (NHC.PatternError _) l = NHC.PatternError l
ioeSetLocation (NHC.UserError _ m) l = NHC.UserError l m
ioeSetHandle (NHC.IOError o f _ e) h = NHC.IOError o f (Just h) e
ioeSetHandle (NHC.EOFError o _) h = NHC.EOFError o h
ioeSetHandle e@(NHC.PatternError _) _ = e
ioeSetHandle e@(NHC.UserError _ _) _ = e
ioeSetFileName (NHC.IOError o _ h e) f = NHC.IOError o (Just f) h e
ioeSetFileName e _ = e
#endif
-- | Catch any 'IOError' that occurs in the computation and throw a
-- modified version.
modifyIOError :: (IOError -> IOError) -> IO a -> IO a
modifyIOError f io = catch io (\e -> ioError (f e))
-- -----------------------------------------------------------------------------
-- annotating an IOError
-- | Adds a location description and maybe a file path and file handle
-- to an 'IOError'. If any of the file handle or file path is not given
-- the corresponding value in the 'IOError' remains unaltered.
annotateIOError :: IOError
-> String
-> Maybe Handle
-> Maybe FilePath
-> IOError
#if defined(__GLASGOW_HASKELL__) || defined(__HUGS__)
annotateIOError ioe loc hdl path =
ioe{ ioe_handle = hdl `mplus` ioe_handle ioe,
ioe_location = loc, ioe_filename = path `mplus` ioe_filename ioe }
where
mplus :: Maybe a -> Maybe a -> Maybe a
Nothing `mplus` ys = ys
xs `mplus` _ = xs
#endif /* __GLASGOW_HASKELL__ || __HUGS__ */
#if defined(__NHC__)
annotateIOError (NHC.IOError msg file hdl code) msg' hdl' file' =
NHC.IOError (msg++'\n':msg') (file`mplus`file') (hdl`mplus`hdl') code
annotateIOError (NHC.EOFError msg hdl) msg' _ _ =
NHC.EOFError (msg++'\n':msg') hdl
annotateIOError (NHC.UserError loc msg) msg' _ _ =
NHC.UserError loc (msg++'\n':msg')
annotateIOError (NHC.PatternError loc) msg' _ _ =
NHC.PatternError (loc++'\n':msg')
#endif
#ifndef __HUGS__
-- | The 'catchIOError' function establishes a handler that receives any
-- 'IOError' raised in the action protected by 'catchIOError'.
-- An 'IOError' is caught by
-- the most recent handler established by one of the exception handling
-- functions. These handlers are
-- not selective: all 'IOError's are caught. Exception propagation
-- must be explicitly provided in a handler by re-raising any unwanted
-- exceptions. For example, in
--
-- > f = catchIOError g (\e -> if IO.isEOFError e then return [] else ioError e)
--
-- the function @f@ returns @[]@ when an end-of-file exception
-- (cf. 'System.IO.Error.isEOFError') occurs in @g@; otherwise, the
-- exception is propagated to the next outer handler.
--
-- When an exception propagates outside the main program, the Haskell
-- system prints the associated 'IOError' value and exits the program.
--
-- Non-I\/O exceptions are not caught by this variant; to catch all
-- exceptions, use 'Control.Exception.catch' from "Control.Exception".
catchIOError :: IO a -> (IOError -> IO a) -> IO a
catchIOError = catch
#endif /* !__HUGS__ */
|
beni55/haste-compiler
|
libraries/ghc-7.8/base/System/IO/Error.hs
|
bsd-3-clause
| 17,061 | 0 | 11 | 3,858 | 1,931 | 1,141 | 790 | 118 | 1 |
{-# OPTIONS_GHC -Wall #-}
module B where
answer_to_live_the_universe_and_everything =
length [1..23*2] - 4
|
urbanslug/ghc
|
testsuite/tests/ghc-api/apirecomp001/B.hs
|
bsd-3-clause
| 111 | 0 | 8 | 17 | 26 | 15 | 11 | 4 | 1 |
{-# LANGUAGE QuasiQuotes #-}
module S01C04Spec (main, spec) where
import Test.Hspec
import Heredoc
import qualified S01C03 (analyze)
import S01C04 (findLine)
testData = [str|295f3bfae9271ae8065a3b4417545c3e5b0df11a53351c78530915392d2e
074a122ee01b17131e4e124e2322a9560ce4120e37582b24e1036fe93f30
3c08290121090ef72f25e4f220323444532d3fe71f34553c7b2726131009
12e84a3308590357a719e74c4f2133690a20031a0b045af63551325b1219
0e3d4fe03f56523cf40f29e4353455120e3a4f2f26f6a30a2b3e0c5b085a
57f3315c33e41c0f523426232d0651395c1525274e314d0219163b5f181f
53471622182739e9e25b473d74e1e7023d095a3134e62d1366563004120e
230a06431935391d5e0b5543223a3bed2b4358f555401e1b3b5c36470d11
22100330e03b4812e6120f163b1ef6abebe6f602545ef9a459e33d334c2a
463405faa655563a43532cfe154bec32fe3345eb2c2700340811213e5006
14241340112b2916017c270a0652732ee8121132385a6c020c040e2be15b
251119225c573b105d5c0a371c3d421ef23e22377fee334e0228561b2d15
2e4c2e373b434b0d0b1b340c300e4b195614130ea03c234c292e14530c46
0d2c3f08560ee32e5a5b6413355215384442563e69ec294a0eef561e3053
193c100c0b24231c012273e10d2e12552723586120020b02e45632265e5f
2c175a11553d4b0b16025e2534180964245b125e5d6e595d1d2a0710580b
213a175ff30855e4001b305000263f5a5c3c5100163cee00114e3518f33a
10ed33e65b003012e7131e161d5e2e270b4645f358394118330f5a5b241b
33e80130f45708395457573406422a3b0d03e6e5053d0d2d151c083337a2
551be2082b1563c4ec2247140400124d4b6508041b5a472256093aea1847
7b5a4215415d544115415d5015455447414c155c46155f4058455c5b523f
0864eb4935144c501103a71851370719301bec57093a0929ea3f18060e55
2d395e57143359e80efffb13330633ea19e323077b4814571e5a3de73a1f
52e73c1d53330846243c422d3e1b374b5209543903e3195c041c251b7c04
2f3c2c28273a12520b482f18340d565d1fe84735474f4a012e1a13502523
23340f39064e306a08194d544647522e1443041d5ee81f5a18415e34a45f
475a392637565757730a0c4a517b2821040e1709e028071558021f164c54
100b2135190505264254005618f51152136125370eef27383e45350118ed
3947452914e0223f1d040943313c193f295b221e573e1b5723391d090d1f
2c33141859392b04155e3d4e393b322526ee3e581d1b3d6817374d0c085b
c2ea5821200f1b755b2d13130f04e26625ea3a5b1e37144d3e473c24030d
ee15025d2019f757305e3f010e2a453a205f1919391e1a04e86d1a350119
1a5beb4946180fe0002a031a050b41e5164c58795021e1e45c59e2495c20
1121394f1e381c3647005b7326250514272b55250a49183be5454ba518eb
1ee55936102a465d5004371f2e382f1d03144f170d2b0eed042ee341eb19
ec1014ef3ff1272c3408220a41163708140b2e340e505c560c1e4cf82704
274b341a454a27a0263408292e362c201c0401462049523b2d55e5132d54
e259032c444b091e2e4920023f1a7ce40908255228e36f0f2424394b3c48
34130cf8223f23084813e745e006531a1e464b005e0e1ee405413fe22b4e
4af201080c0928420c2d491f6e5121e451223b070dee54244b3efc470a0e
771c161f795df81c22101408465ae7ef0c0604733ee03a20560c1512f217
2f3a142c4155073a200f04166c565634020a59ea04244ff7413c4bc10858
240d4752e5fa5a4e1ce255505602e55d4c575e2b59f52b4e0c0a0b464019
21341927f3380232396707232ae424ea123f5b371d4f65e2471dfbede611
e10e1c3b1d4d28085c091f135b585709332c56134e4844552f45eb41172a
3f1b5a343f034832193b153c482f1705392f021f5f0953290c4c43312b36
3810161aea7001fb5d502b285945255d4ef80131572d2c2e59730e2c3035
4d59052e1f2242403d440a13263e1d2dea0612125e16033b180834030829
022917180d07474c295f793e42274b0e1e16581036225c1211e41e04042f
ec2b41054f2a5f56065e5e0e1f56e13e0a702e1b2f2137020e363a2ae2a4|]
main :: IO()
main = hspec spec
spec :: Spec
spec = do
describe "Process sample data" $ do
it "Detect xor-encyphered string" $ do
findLine testData `shouldBe` ["Now that the party is jumping\n"]
|
blast-hardcheese/cryptopals
|
test/S01C04Spec.hs
|
mit
| 3,452 | 0 | 14 | 138 | 110 | 63 | 47 | 14 | 1 |
module Main (main) where
import Hyperprism
import Data.Monoid (mempty)
import Options.Applicative (execParser,info,short,long,metavar,help,strOption,(<>))
import System.Environment (getEnv)
getApiKey :: IO ApiKey
getApiKey = getEnv "GITHUB_API_KEY"
getOrg :: IO Org
getOrg = execParser $
info parseOrg mempty
where
parseOrg = strOption $
short 'o' <> long "org" <> metavar "ORG" <> help "Org to crawl"
main :: IO ()
main = do
apiKey <- getApiKey
org <- getOrg
dumpRepos apiKey org
|
mfine/hyperprism
|
Main.hs
|
mit
| 506 | 0 | 11 | 93 | 175 | 94 | 81 | 17 | 1 |
{-# LANGUAGE CPP #-}
module GHCJS.DOM.SourceBufferList (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.SourceBufferList
#else
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.SourceBufferList
#else
#endif
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/SourceBufferList.hs
|
mit
| 361 | 0 | 5 | 33 | 33 | 26 | 7 | 4 | 0 |
module Display where
import Data.List(intercalate)
import Text.Printf(printf)
class (Show a) => Display a where
display :: a -> String
displayList :: Display a => [a] -> IO ()
displayList list = printf . (++ "\n") . intercalate "\n" . map (("-" ++) . display) $ list
|
marcusbuffett/Clanki
|
src/Display.hs
|
mit
| 275 | 0 | 10 | 54 | 120 | 65 | 55 | 7 | 1 |
module Game.Render.Update
( Renderable(..)
, removeRenderObjects
, newRenderObjects
, update
)
where
import Debug.Trace
import Control.Monad.RWS.Strict
import Control.Monad.State.Strict
import Data.Maybe
import Game.Input.Input
import Control.Wire
import Control.Monad
import qualified Control.Wire as W
import Control.Wire.Unsafe.Event
import qualified Prelude as P
import Prelude hiding ((.), until)
import Control.Lens
import qualified Control.Lens as L
import Game.World.Objects
import qualified Data.Tiled as T
import Data.Tiled
import Game.World.Import.Tiled
import Game.World.Delta
import Game.World.Wires
import Game.World.Lens
import Game.World.Common
import qualified Game.World.Objects as World
import Game.Render.World hiding (World)
import qualified Game.Render.World as R
type Renderer = RWS (World, WorldDelta, [Renderable]) [Renderable] R.RenderWorld
-- * TODO: fix monad
whenMaybeDo :: (Monad m) => Maybe a -> (a -> m ()) -> m ()
whenMaybeDo m f =
case m of
Just v -> f v
Nothing -> return ()
removeRenderObjects = return () :: Renderer ()
newRenderObjects = return () :: Renderer ()
update = return () :: Renderer ()
--removeRenderObjects :: Renderer ()
--removeRenderObjects = do
-- (world, delta, _) <- ask
-- let deleted = delta^.deletedObjects
-- mapM_ (\objId -> do
-- let Just obj = world^.wObjects.L.at objId
-- -- FIXME
-- -- order of deletion is important
-- wLayerObject "CObjectLayer" (obj^.objName) .= Nothing
-- wObject (obj^.objName) .= Nothing
-- mapHashes.gameObjects.L.at (obj^.objName) .= Nothing
-- writer ((), [obj])
-- ) deleted
--newRenderObjects :: Renderer ()
--newRenderObjects = do
-- (world, delta, _) <- ask
-- let newObjects' = delta^.newObjects
-- let objectTileNames = [world^?getAnimations. L.at (o^.objId)._Just.animTileName | o <- newObjects']
-- let objectPoss = [world^?getPositions. L.at (o^.objId)._Just | o <- newObjects']
-- mapM_ (\(obj, tileName, pos) -> do
-- -- only objects with a position are renderable
-- Control.Monad.when (isJust pos && isJust tileName
-- ) $ do
-- objId <- wObjectFromPrefab "FWTFrontStand" (obj^.objName)
-- wLayerObject "CObjectLayer" (obj^.objName) .= (Just $
-- newRenderObject objId (0, 0) 0)
-- writer ((), [obj])
-- ) $ zip3 newObjects' objectTileNames objectPoss
--data RenderableGameObject = RenderableGameObject
-- { rgoInit :: Renderer ()
-- , rgoUpdate :: Renderer ()
-- }
--update :: Renderer ()
--update = do
-- (world, _, renderables) <- ask
-- mapM_ (\obj -> do
-- let oId = obj^.objId
-- let Just oPos = world^.objectPosition oId
-- let mRot = world^.objectRotation oId
-- tiledMap <- get
-- -- update position
-- wLayerObject "CObjectLayer" (obj^.objName) . _Just . roPos .= oPos
-- case mRot of
-- Just rot -> do
-- wLayerObject "CObjectLayer" (obj^.objName)
-- . _Just . roRotation .= rot
-- Nothing -> return ()
-- -- animation
-- let mTileName = world^?getAnimations. L.at oId._Just.animTileName
-- -- update tile
-- whenMaybeDo mTileName (\tileName -> do
-- -- tileset of tile
-- --let Just (tilesetName, localTileId) =
-- -- tileMap ^. L.at tileName
-- -- wLayerObject "CObjectLayer" (obj^.objName) . _Just . roTileName .= tileName
-- wSetObjectPrefab (obj^.objName) tileName
-- --case tileMap ^. L.at tileName of
-- -- Just (tilesetName, localTileId) -> do
-- -- --Just tsId <- use $ mapHashes . gameTilesets . L.at tilesetName
-- -- --wObject (obj^.objName)._Just.objTsId .= tsId
-- -- --wObject (obj^.objName)._Just.objLocalId .= localTileId
-- -- Nothing -> return ()
-- --Nothing -> do
-- -- Just (tsId, localTileId) <- use $ mapHashes . gamePrefabs . L.at tileName
-- -- wObject (obj^.objName)._Just.objTsId .= tsId
-- -- wObject (obj^.objName)._Just.objLocalId .= localTileId
-- )
-- ) renderables
type Renderable = World.Object
|
mfpi/q-inqu
|
Game/Render/Update.hs
|
mit
| 3,927 | 2 | 11 | 723 | 435 | 292 | 143 | 39 | 2 |
module HaskellCourse.AE.Parser (parseExp) where
import Data.Maybe (fromMaybe)
import HaskellCourse.Parsing
import HaskellCourse.Prim
import HaskellCourse.AE.AST
-- | Parse the given s-expression into an AE 'Exp'.
parseExp :: SExpr -> Exp
parseExp (AtomNum n) = LitInt n
parseExp (AtomBool b) = LitBool b
parseExp (List [AtomSym p, a, b]) = App (parsePrim p) (parseExp a) (parseExp b)
parseExp bad = error $ "parse error, bad expression: " ++ show bad
|
joshcough/HaskellCourse
|
src/HaskellCourse/AE/Parser.hs
|
mit
| 455 | 0 | 9 | 71 | 152 | 81 | 71 | 10 | 1 |
-- Pretty.hs ---
--
-- Filename: Pretty.hs
-- Description:
-- Author: Manuel Schneckenreither
-- Maintainer:
-- Created: Wed May 4 17:34:21 2016 (+0200)
-- Version:
-- Package-Requires: ()
-- Last-Updated: Mon Jul 23 10:23:31 2018 (+0200)
-- By: Manuel Schneckenreither
-- Update #: 16
-- URL:
-- Doc URL:
-- Keywords:
-- Compatibility:
--
--
-- Commentary:
--
--
--
--
-- Change Log:
--
--
--
--
--
--
--
-- Code:
module Data.Rewriting.ARA.ByInferenceRules.Vector.Pretty where
import Data.Rewriting.ARA.ByInferenceRules.Vector.Type
import Prelude hiding ((<>))
import Text.PrettyPrint
prettyVector :: Vector -> Doc
prettyVector (Vector1 a) = parens $ (int a)
prettyVector (Vector2 x1 x2) = parens $ (int x1) <> comma <+> (int x2)
prettyVector (Vector3 x1 x2 x3 ) = parens $ (int x1) <> comma <+> (int x2) <> comma <+> (int x3)
prettyVector (Vector4 x1 x2 x3 x4 ) = parens $ (int x1) <> comma <+> (int x2) <> comma <+> (int x3) <> comma <+> (int x4)
prettyVector (Vector5 x1 x2 x3 x4 x5) = parens $ (int x1) <> comma <+> (int x2) <> comma <+> (int x3) <> comma <+> (int x4) <> comma <+> (int x5)
prettyVector (Vector6 x1 x2 x3 x4 x5 x6) = parens $ (int x1) <> comma <+> (int x2) <> comma <+> (int x3) <> comma <+> (int x4) <> comma <+> (int x5) <> comma <+> (int x6)
prettyVector (Vector7 x1 x2 x3 x4 x5 x6 x7) = parens $ (int x1) <> comma <+> (int x2) <> comma <+> (int x3) <> comma <+> (int x4) <> comma <+> (int x5) <> comma <+> (int x6) <> comma <+> (int x7)
--
-- Pretty.hs ends here
|
ComputationWithBoundedResources/ara-inference
|
src/Data/Rewriting/ARA/ByInferenceRules/Vector/Pretty.hs
|
mit
| 1,675 | 0 | 19 | 464 | 582 | 317 | 265 | 12 | 1 |
{-# LANGUAGE OverloadedStrings, NoMonomorphismRestriction, FlexibleContexts #-}
module HTML.Parsec
( parseHtml,
parseText,
parseElement
) where
import Control.Monad (liftM, void)
import Control.Applicative ((<*))
import qualified Data.Text as T
import Text.Parsec
import Text.Parsec.Text
import qualified Data.HashMap.Strict as HM
import Dom
parseHtml :: T.Text -> Either ParseError Node
parseHtml s = case parse parseNodes "" s of
Left err -> Left err
Right nodes -> Right $
if length nodes == 1
then head nodes
else Dom.elem "html" HM.empty nodes
parseNodes = spaces >> manyTill (spacesAfter parseNode) end
where
end = eof <|> void (try (string "</"))
parseNode = parseElement <|> parseText
parseText = liftM (Dom.text . T.pack) $ many (noneOf "<")
parseElement = do
-- opening tag
(tag, attrs) <- between (char '<') (char '>') tagData
-- contents
children <- parseNodes
-- closing tag
string $ tag ++ ">" -- "</" is consumed by parseNodes, maybe bad form?
return $ Dom.elem (T.pack tag) attrs children
-- parseChildren = spaces >> manyTill parseChild end
-- where
-- end = eof <|> (try (string "</") >> return ())
--
-- parseChild = spacesAfter parseNode
tagData = do
t <- tagName
attrs <- attributes
return (t,attrs)
tagName = many1 alphaNum
--this is safe because attribute will fail without consuming on '>''
attributes = liftM HM.fromList $ spaces >> many (spacesAfter attribute)
attribute = do
name <- tagName
char '='
open <- char '\"' <|> char '\''
value <- manyTill anyChar (try $ char open)
return (T.pack name, T.pack value)
-- run parser p and then strip the trailing spaces, returning the result of p.
spacesAfter p = p <* spaces
|
Hrothen/Hubert
|
src/HTML/Parsec.hs
|
mit
| 1,837 | 0 | 12 | 456 | 497 | 258 | 239 | 41 | 3 |
module HolyProject.HolyGitQueries.Test
(githubAPISuite
) where
import Test.Tasty (testGroup, TestTree)
import Test.Tasty.HUnit
import HolyProject.HolyGitQueries
githubAPISuite :: TestTree
githubAPISuite = testGroup "GithubAPI"
[ testCase "Sean" $ ioTestEq
(getGHUser "[email protected]")
(Just "\"mankyKitty\"")
, testCase "Name" $ ioTestEq
(getGHUser "Sean Chalmers")
(Just "\"mankyKitty\"")
]
-- | Test if some IO action returns some expected value
ioTestEq :: (Eq a, Show a) => IO a -> a -> Assertion
ioTestEq action expected = action >>= assertEqual "" expected
|
mankyKitty/holy-haskell-project-starter
|
test/HolyProject/HolyGitQueries/Test.hs
|
mit
| 714 | 0 | 10 | 218 | 156 | 83 | 73 | 15 | 1 |
{-# LANGUAGE PackageImports #-}
import "happyscheduler" Application (develMain)
import Prelude (IO)
main :: IO ()
main = develMain
|
frt/happyscheduler
|
app/devel.hs
|
mit
| 132 | 1 | 6 | 19 | 37 | 20 | 17 | 5 | 1 |
{-|
Module: Flaw.Graphics
Description: Graphics abstraction.
License: MIT
Graphics system is mostly abstracted from backend. Almost everything from
uploading textures to writing shader programs can be done with zero amount
of backend-dependent code.
Initialization of graphics subsystem is still backend-dependent though.
Initialization includes creating instances of 'System', 'Device' and 'Presenter'
classes.
* 'System' instance allows to enumerate graphics hardware supported by backend,
get information about displays and display modes.
* 'Device' instance creates graphics resources such as render targets, textures,
shader programs, etc.
* 'Context' instance performs actual drawing. Draw operations run in 'Render' monad.
* 'Presenter' instance shows rendering results onto screen or window.
Abstraction is mostly follows DirectX 11 / OpenGL 3 model.
In a few places the lowest common denominator was choosen for certain features,
to allow one-to-one mapping from abstraction to implementation. Examples:
* Textures accept default sampling parameters, used when no sampler is bound;
* Program objects are created explicitly;
* There's no separate texture and sampler slots. Both a texture and a sampler are bound
into combined slot indexed by numeric index;
* Default framebuffer is bound automatically as part of initial state inside 'present'.
-}
{-# LANGUAGE DeriveGeneric, FlexibleContexts, FunctionalDependencies, MultiParamTypeClasses, RankNTypes, TypeFamilies #-}
module Flaw.Graphics
( System(..)
, Device(..)
, Context(..)
, Presenter(..)
, DeviceInfo(..)
, DisplayInfo(..)
, DisplayModeInfo(..)
, IndexTopology(..)
, IndexStride(..)
, DepthTestFunc(..)
, Render
, renderScope
, renderFrameBuffer
, renderViewport
, renderGetViewport
, renderScissor
, renderGetScissor
, renderIntersectScissor
, renderVertexBuffer
, renderIndexBuffer
, renderUniformBuffer
, renderSampler
, renderBlendState
, renderDepthTestFunc
, renderDepthWrite
, renderProgram
, renderClearColor
, renderClearDepth
, renderClearStencil
, renderClearDepthStencil
, renderUploadUniformBuffer
, renderUploadVertexBuffer
, renderDraw
, renderDrawInstanced
, renderPlay
, render
, present
) where
import Control.Exception
import Control.Monad.Trans.Class
import Control.Monad.Trans.Reader
import qualified Data.ByteString as B
import qualified Data.Serialize as S
import qualified Data.Text as T
import GHC.Generics(Generic)
import Flaw.Exception
import Flaw.Graphics.Blend
import Flaw.Graphics.Program.Internal
import Flaw.Graphics.Sampler
import Flaw.Graphics.Texture
import Flaw.Math
import Flaw.Stack
-- | Class of graphics system.
{- Initialization of graphics system is backend-dependent.
There're just a few general functions.
-}
class System s where
-- | Type for id of graphics device.
data DeviceId s :: *
-- | Type for id of display.
data DisplayId s :: *
-- | Type for id of display mode.
data DisplayModeId s :: *
-- | Get list of graphics devices installed in system.
getInstalledDevices :: s -> IO ([(DeviceId s, DeviceInfo s)], IO ())
-- | Create custom display mode (with specified width and height) for specified display.
createDisplayMode
:: s
-> DisplayId s -- ^ Display id.
-> Int -- ^ Width.
-> Int -- ^ Height.
-> IO ((DisplayModeId s, DisplayModeInfo), IO ())
-- | Class of graphics device.
-- Graphics device performs managing of resources.
-- Also it works as a primary context for the device.
class Device d where
-- | Type for deferred contexts.
type DeferredContext d :: *
-- | Type for texture id.
data TextureId d :: *
-- | Type for sampler state id.
data SamplerStateId d :: *
-- | Type for blend state id.
data BlendStateId d :: *
-- | Type for render target id.
data RenderTargetId d :: *
-- | Type for depth stencil target id.
data DepthStencilTargetId d :: *
-- | Type for framebuffer id.
data FrameBufferId d :: *
-- | Type for vertex buffer id.
data VertexBufferId d :: *
-- | Type for index buffer id.
data IndexBufferId d :: *
-- | Type for program id.
data ProgramId d :: *
-- | Type for uniform buffer id.
data UniformBufferId d :: *
-- | Null texture.
nullTexture :: TextureId d
-- | Null sampler state.
nullSamplerState :: SamplerStateId d
-- | Null blend state.
nullBlendState :: BlendStateId d
-- | Null depth stencil target.
nullDepthStencilTarget :: DepthStencilTargetId d
-- | Null vertex buffer.
nullVertexBuffer :: VertexBufferId d
-- | Null index buffer.
nullIndexBuffer :: IndexBufferId d
-- | Null uniform buffer.
nullUniformBuffer :: UniformBufferId d
-- | Create deferred context.
createDeferredContext :: d -> IO (DeferredContext d, IO ())
createDeferredContext _ = throwIO $ DescribeFirstException "creating deferred context is not supported"
-- | Create static texture.
createStaticTexture :: d -> TextureInfo -> SamplerStateInfo -> B.ByteString -> IO (TextureId d, IO ())
-- | Create texture from image packed in any format natively supported by device.
createNativeTexture :: d -> SamplerStateInfo -> B.ByteString -> IO (TextureId d, IO ())
createNativeTexture _ _ _ = throwIO $ DescribeFirstException "creating native texture is not supported"
-- | Create sampler state.
createSamplerState :: d -> SamplerStateInfo -> IO (SamplerStateId d, IO ())
-- | Create blend state.
createBlendState :: d -> BlendStateInfo -> IO (BlendStateId d, IO ())
-- | Create readable render target.
createReadableRenderTarget :: d -> Int -> Int -> TextureFormat -> SamplerStateInfo -> IO ((RenderTargetId d, TextureId d), IO ())
-- | Create depth stencil target.
createDepthStencilTarget :: d -> Int -> Int -> IO (DepthStencilTargetId d, IO ())
-- | Create readable depth stencil target.
createReadableDepthStencilTarget :: d -> Int -> Int -> SamplerStateInfo -> IO ((DepthStencilTargetId d, TextureId d), IO ())
-- | Create framebuffer.
createFrameBuffer :: d -> [RenderTargetId d] -> DepthStencilTargetId d -> IO (FrameBufferId d, IO ())
-- | Create static vertex buffer.
createStaticVertexBuffer
:: d -- ^ Device.
-> B.ByteString -- ^ Buffer.
-> Int -- ^ Stride in bytes.
-> IO (VertexBufferId d, IO ())
-- | Create dynamic vertex buffer.
createDynamicVertexBuffer
:: d -- ^ Device.
-> Int -- ^ Size in bytes.
-> Int -- ^ Stride in bytes.
-> IO (VertexBufferId d, IO ())
-- | Create index buffer.
createStaticIndexBuffer :: d -> B.ByteString -> IndexTopology -> IndexStride -> IO (IndexBufferId d, IO ())
-- | Create program.
createProgram
:: d -- ^ Device.
-> Program () -- ^ Program contents.
-> IO (ProgramId d, IO ())
-- | Create uniform buffer.
createUniformBuffer :: d -> Int -> IO (UniformBufferId d, IO ())
-- | Class of graphics context.
-- Performs actual render operations.
class Device d => Context c d | c -> d where
------- Immediate commands.
-- | Clear render target.
contextClearColor :: c -> Int -> Float4 -> IO ()
-- | Clear depth.
contextClearDepth :: c -> Float -> IO ()
-- | Clear stencil.
contextClearStencil :: c -> Int -> IO ()
-- | Clear depth and stencil.
contextClearDepthStencil :: c -> Float -> Int -> IO ()
-- | Upload data to uniform buffer.
contextUploadUniformBuffer :: c -> UniformBufferId d -> B.ByteString -> IO ()
-- | Upload data to dynamic vertex buffer.
contextUploadVertexBuffer :: c -> VertexBufferId d -> B.ByteString -> IO ()
-- | Draw (instanced).
contextDraw :: c
-> Int -- ^ Instances count (1 for non-instanced).
-> Int -- ^ Indices count.
-> IO ()
-- | Replay deferred context on immediate context.
contextPlay :: Context dc d => c -> dc -> IO ()
contextPlay _ _ = throwIO $ DescribeFirstException "playing deferred context is not supported"
-- | Perform rendering. Initial state is context's default state.
contextRender :: c -> IO a -> IO a
------- Setup commands.
-- | Set framebuffer.
contextSetFrameBuffer :: c -> FrameBufferId d -> IO a -> IO a
-- | Set viewport (left, top, right, bottom).
contextSetViewport :: c -> Int4 -> IO a -> IO a
-- | Get current viewport.
contextGetViewport :: c -> IO Int4
-- | Set scissor (left, top, right, bottom).
contextSetScissor :: c -> Maybe Int4 -> IO a -> IO a
-- | Get current scissor.
contextGetScissor :: c -> IO (Maybe Int4)
-- | Set vertex buffer.
contextSetVertexBuffer :: c -> Int -> VertexBufferId d -> IO a -> IO a
-- | Set index buffer.
contextSetIndexBuffer :: c -> IndexBufferId d -> IO a -> IO a
-- | Set uniform buffer.
contextSetUniformBuffer :: c -> Int -> UniformBufferId d -> IO a -> IO a
-- | Set sampler.
contextSetSampler :: c -> Int -> TextureId d -> SamplerStateId d -> IO a -> IO a
-- | Set blend state.
contextSetBlendState :: c -> BlendStateId d -> IO a -> IO a
-- | Set depth-test function.
contextSetDepthTestFunc :: c -> DepthTestFunc -> IO a -> IO a
-- | Set depth write flag.
contextSetDepthWrite :: c -> Bool -> IO a -> IO a
-- | Set program.
contextSetProgram :: c -> ProgramId d -> IO a -> IO a
-- | Presenter class.
class (System s, Context c d) => Presenter p s c d | p -> s c d where
setPresenterMode :: p -> Maybe (DisplayModeId s) -> IO ()
-- | Perform rendering on presenter's surface.
-- Presenter's framebuffer, viewport, etc will be automatically set
-- as an initial state.
presenterRender :: p -> c -> IO a -> IO a
-- | Device information structure.
data DeviceInfo device = DeviceInfo
{ deviceName :: !T.Text
, deviceDisplays :: [(DisplayId device, DisplayInfo device)]
}
-- | Display information structure.
data DisplayInfo device = DisplayInfo
{ displayName :: !T.Text
, displayModes :: [(DisplayModeId device, DisplayModeInfo)]
}
-- | Display mode information structure.
data DisplayModeInfo = DisplayModeInfo
{ displayModeName :: !T.Text
, displayModeWidth :: !Int
, displayModeHeight :: !Int
, displayModeRefreshRate :: !Rational
} deriving Show
-- | Index topology.
data IndexTopology
= IndexTopologyPoints
| IndexTopologyLines
| IndexTopologyLineStrip
| IndexTopologyTriangles
| IndexTopologyTriangleStrip
| IndexTopologyPatches {-# UNPACK #-} !Int
deriving (Eq, Generic)
instance S.Serialize IndexTopology
-- | Index stride.
data IndexStride
= IndexStride32Bit
| IndexStride16Bit
deriving (Eq, Generic)
instance S.Serialize IndexStride
-- | Depth test function.
data DepthTestFunc
= DepthTestFuncNever
| DepthTestFuncLess
| DepthTestFuncLessOrEqual
| DepthTestFuncEqual
| DepthTestFuncNonEqual
| DepthTestFuncGreaterOrEqual
| DepthTestFuncGreater
| DepthTestFuncAlways
deriving Eq
-- | Rendering monad.
type Render c = StackT (ReaderT c IO)
renderSetup :: (forall a. c -> IO a -> IO a) -> Render c ()
renderSetup setup = StackT $ \q -> do
c <- ask
mapReaderT (setup c) $ q ()
renderAction :: (c -> IO a) -> Render c a
renderAction action = StackT $ \q -> do
c <- ask
lift (action c) >>= q
-- | Scope for rendering state.
-- Context state will be restored after the scope.
renderScope :: Render c a -> Render c a
renderScope = scope
-- | Set current framebuffer.
renderFrameBuffer :: Context c d => FrameBufferId d -> Render c ()
renderFrameBuffer fb = renderSetup $ \c q -> contextSetFrameBuffer c fb q
-- | Set current viewport (vector with left, top, right, bottom).
renderViewport :: Context c d => Int4 -> Render c ()
renderViewport viewport = renderSetup $ \c q -> contextSetViewport c viewport q
-- | Get current viewport.
renderGetViewport :: Context c d => Render c Int4
renderGetViewport = renderAction contextGetViewport
-- | Set current scissor (vector with left, top, right, bottom).
renderScissor :: Context c d => Maybe Int4 -> Render c ()
renderScissor scissor = renderSetup $ \c q -> contextSetScissor c scissor q
-- | Get current scissor.
renderGetScissor :: Context c d => Render c (Maybe Int4)
renderGetScissor = renderAction contextGetScissor
-- | Set intersection between specified and current scissor as scissor.
renderIntersectScissor :: Context c d => Int4 -> Render c ()
renderIntersectScissor scissor@(Vec4 left top right bottom) = do
currentScissor <- renderGetScissor
renderScissor $ Just $ case currentScissor of
Just (Vec4 currentLeft currentTop currentRight currentBottom) ->
Vec4 (max left currentLeft) (max top currentTop) (min right currentRight) (min bottom currentBottom)
Nothing -> scissor
-- | Set vertex buffer.
renderVertexBuffer :: Context c d => Int -> VertexBufferId d -> Render c ()
renderVertexBuffer i vb = renderSetup $ \c q -> contextSetVertexBuffer c i vb q
-- | Set current index buffer.
renderIndexBuffer :: Context c d => IndexBufferId d -> Render c ()
renderIndexBuffer ib = renderSetup $ \c q -> contextSetIndexBuffer c ib q
-- | Set uniform buffer.
renderUniformBuffer :: Context c d => Int -> UniformBufferId d -> Render c ()
renderUniformBuffer i ub = renderSetup $ \c q -> contextSetUniformBuffer c i ub q
-- | Set sampler.
renderSampler :: Context c d => Int -> TextureId d -> SamplerStateId d -> Render c ()
renderSampler i t s = renderSetup $ \c q -> contextSetSampler c i t s q
-- | Set blend state.
renderBlendState :: Context c d => BlendStateId d -> Render c ()
renderBlendState b = renderSetup $ \c q -> contextSetBlendState c b q
-- | Set depth test function.
renderDepthTestFunc :: Context c d => DepthTestFunc -> Render c ()
renderDepthTestFunc f = renderSetup $ \c q -> contextSetDepthTestFunc c f q
-- | Set depth write flag.
renderDepthWrite :: Context c d => Bool -> Render c ()
renderDepthWrite f = renderSetup $ \c q -> contextSetDepthWrite c f q
-- | Set current program.
renderProgram :: Context c d => ProgramId d -> Render c ()
renderProgram p = renderSetup $ \c q -> contextSetProgram c p q
-- | Clear render target.
renderClearColor :: Context c d => Int -> Float4 -> Render c ()
renderClearColor i color = renderAction $ \c -> contextClearColor c i color
-- | Clear depth.
renderClearDepth :: Context c d => Float -> Render c ()
renderClearDepth depth = renderAction $ \c -> contextClearDepth c depth
-- | Clear stencil.
renderClearStencil :: Context c d => Int -> Render c ()
renderClearStencil stencil = renderAction $ \c -> contextClearStencil c stencil
-- | Clear depth and stencil.
renderClearDepthStencil :: Context c d => Float -> Int -> Render c ()
renderClearDepthStencil depth stencil = renderAction $ \c -> contextClearDepthStencil c depth stencil
-- | Upload data to uniform buffer.
renderUploadUniformBuffer :: Context c d => UniformBufferId d -> B.ByteString -> Render c ()
renderUploadUniformBuffer ub bytes = renderAction $ \c -> contextUploadUniformBuffer c ub bytes
-- | Upload data to dynamic vertex buffer.
renderUploadVertexBuffer :: Context c d => VertexBufferId d -> B.ByteString -> Render c ()
renderUploadVertexBuffer vb bytes = renderAction $ \c -> contextUploadVertexBuffer c vb bytes
-- | Draw.
renderDraw :: Context c d
=> Int -- ^ Indices count.
-> Render c ()
renderDraw = renderDrawInstanced 1
-- | Draw instanced.
renderDrawInstanced :: Context c d
=> Int -- ^ Instances count.
-> Int -- ^ Indices count.
-> Render c ()
renderDrawInstanced instancesCount indicesCount = renderAction $ \c -> contextDraw c instancesCount indicesCount
-- | Play deferred context on immediate context.
renderPlay :: (Context c d, Context dc d) => dc -> Render c ()
renderPlay deferredContext = renderAction $ \c -> contextPlay c deferredContext
-- | Perform offscreen rendering.
render :: Context c d => c -> Render c a -> IO a
render c f = contextRender c $ runReaderT (runStackT f) c
-- | Perform rendering on presenter.
present :: Presenter p s c d => p -> Render c a -> Render c a
present p f = renderAction $ \c -> presenterRender p c $ runReaderT (runStackT f) c
|
quyse/flaw
|
flaw-graphics/Flaw/Graphics.hs
|
mit
| 15,901 | 0 | 15 | 3,054 | 3,673 | 1,949 | 1,724 | 259 | 2 |
{-# LANGUAGE ForeignFunctionInterface #-}
{-# CFILES System/Posix/waitpid.c #-}
module System.Posix.Waitpid where
import Control.Monad
import Data.List
import Foreign
import Foreign.C
import Foreign.C.Types(CInt(..))
import System.Posix.Signals (Signal)
import System.Posix.Types (CPid(..))
foreign import ccall unsafe "SystemPosixWaitpid_waitpid" c_waitpid :: CPid -> Ptr CInt -> CInt -> IO CPid
data Flag = NoHang | IncludeUntraced | IncludeContinued deriving Show
data Status = Exited Int | Signaled Signal | Stopped Signal | Continued deriving (Show, Eq)
waitpid :: CPid -> [Flag] -> IO (Maybe (CPid, Status))
waitpid pid flags = alloca $ \status -> do
child <- throwErrnoIfMinus1 "waitpid" $ c_waitpid pid status options
stat <- peek status
return $ guard (child /= 0) >> return (child, extractStatus stat)
where
options = foldl' (.|.) 0 (map flagValue flags)
flagValue NoHang = 1
flagValue IncludeUntraced = 2
flagValue IncludeContinued = 4
extractStatus stat | stat < 0x10000 = Exited (fromIntegral stat)
| stat < 0x20000 = Signaled (stat - 0x10000)
| stat < 0x30000 = Stopped (stat - 0x20000)
| stat == 0x30000 = Continued
| otherwise = error $ "waitpid: unexpected status " ++ show stat
|
GaloisInc/posix-waitpid
|
System/Posix/Waitpid.hs
|
mit
| 1,302 | 0 | 13 | 285 | 417 | 220 | 197 | 26 | 3 |
module Caramel where
import Control.Applicative ((<*>),(<$>))
import Control.Monad (msum,replicateM)
import Data.Char
import Data.List (intercalate,foldl1')
import Data.List.Split (splitOn)
import Data.Maybe (listToMaybe,fromJust)
import Text.ParserCombinators.ReadP
import Text.Printf
import qualified Data.Map as M
import qualified Data.Set as S
import qualified Lambda as L
-- The main datatype of Caramel's syntax sugars.
data Caramel
= Lam [String] Caramel
| App [Caramel]
| Var String
| Nat Int
| Lst [Caramel]
| Tup [Caramel]
| Chr Char
| Str String
| Wrd Int
| Adt [(String, [(String, Caramel)])]
| Let [(String,[String],Caramel)] Caramel
deriving Show
-- The usual fold over Caramel's constructors.
fold
:: ([String]->b->b)
-> ([b]->b)
-> (String->b)
-> (Int->b)
-> ([b]->b)
-> ([b]->b)
-> (Char->b)
-> (String->b)
-> (Int->b)
-> ([(String,[(String,b)])]->b)
-> ([(String,[String],b)]->b->b)
-> Caramel
-> b
fold lam app var num lst tup chr str wrd adt leT = go where
go (Lam vars body) = lam vars (go body)
go (App terms) = app (map go terms)
go (Var name) = var name
go (Nat n) = num n
go (Lst l) = lst (map go l)
go (Tup t) = tup (map go t)
go (Chr c) = chr c
go (Wrd c) = wrd c
go (Str s) = str s
go (Adt ctors) = adt (map (\ (name,ctor) -> (name, map (\ (name,field) -> (name, go field)) ctor)) ctors)
go (Let defs term) = leT (map (\ (name,vars,term) -> (name, vars, go term)) defs) (go term)
-- Prints a Caramel term with the Caramel syntax. The reverse of parsing.
pretty :: Caramel -> String
pretty term = fold lam app var num lst tup chr str wrd adt leT term [] where
lam vars body = ("(" ++) . (unwords vars ++) . (" -> " ++) . body . (")" ++)
app terms = ("(" ++) . unwords' terms . (")" ++)
var name = (name ++)
num n = (show n ++)
lst l = ("[" ++) . inters' "," l . ("]" ++)
tup l = ("(" ++) . inters' "," l . (")" ++)
chr c = ("'" ++) . ([c] ++) . ("'" ++)
str s = ("\"" ++) . (s ++) . ("\"" ++)
wrd w = (("#" ++ show w) ++)
adt d = undefined
leT d t = ("{"++). (inters' "; " (map (\ (name,vars,term)->(unwords (name:vars)++).(" = "++).term) d)) . ("; "++) . t . ("}"++)
unwords' list = foldr (\ h t s -> (s ++) . h . t " ") (const ([] ++)) list ""
inters' sep list = foldr (\ h t s -> (s ++) . h . t sep) (const ([] ++)) list ""
-- Parses a Caramel source code into a Caramel term. The reverse of pretty-printing.
-- Does not deal with parse errors properly (TODO).
parse :: String -> Caramel
parse = fst . head . reverse . readP_to_S (term 0) . stripComments where
-- The pre-processing just removes comments and empty lines
stripComments
= unlines
. (++ [""])
. filter (not . all isSpace) -- removes empty lines
. map (head . splitOn "--") -- removes comments
. lines
-- A term is one of the syntax sugars provided by the DSL, optimionally
-- followed by local definitions.
term d = do
parsedTerm <- lam d <++ leT d <++ tup d <++ choice [leT d,app d,str,chr,lst d,adt d] <++ wrd <++ num <++ var
localDefs <- many (string ('\n':replicate ((d+1)*4) ' ') >> def (d+1))
return $ case localDefs of
[] -> parsedTerm
defs -> Let defs parsedTerm
-- The sugars below implement Caramel's syntax as defined on the README.
app d = App <$> sepBetween "(" " " ")" (term d)
var = Var <$> choice [word, many1 (char '*')]
num = Nat <$> read <$> number
lst d = Lst <$> sepBetween "[" "," "]" (term d)
tup d = Tup <$> sepBetween "(" "," ")" (term d)
chr = Chr <$> (do { char '\''; c <- get; char '\''; return c})
str = Str <$> (do { char '"'; s <- manyTill get (char '"'); return s})
wrd = Wrd <$> read <$> (char '#' >> number)
adt d = Adt <$> sepBetween "#{" "|" "}" ctor where
ctor = paired (,) word space fields
fields = sepBetween "{" "," "}" field <++ return []
field = paired (,) word (char ':') (term d)
lam d = between (char '(') (char ')') (paired Lam vars (string "->") (term d)) where
vars = sepBy word (char ' ')
def d = paired pair names (char '=') (term d) where
pair = \ (name:vars) value -> (name, vars, value)
names = sepBy word (char ' ')
leT d = between (char '{' >> skipSpaces) (skipSpaces >> char '}') (paired Let defs (char ';') (term d)) where
defs = sepBy (def d) (char ';' >> space)
-- Some useful parse combinators. This code looks bad and could improve.
number = many1 (satisfy isDigit)
letter = satisfy isLetter
word = many1 (satisfy (\ c -> isAlphaNum c || elem c ("_.@:?#$!^&|*-+<>~=/"::String)))
paired = \ fn left sep right -> do { l <- left; space>>sep>>space; r <- right; return (fn l r) }
sepBetween = \ left sep right parse -> let
lSpace = if head sep /= ' ' then space else return ()
wrap = between (string left) (string right)
in wrap (sepBy parse (lSpace >> string sep >> space))
space = skipSpaces
-- Converts a Lambda Calculus term to a value of the Caramel DSL.
fromLambda :: L.Term -> Caramel
fromLambda term = L.fold lam app var term (M.empty :: M.Map Int String) 0 where
-- Converts λ-calculus abstractions to Caramel.
lam body scope depth
= strSugar
. chrSugar
. wrdSugar
. lstSugar
. tupSugar
. natSugar
. lamSugar
$ Lam [name] (body (M.insert depth name scope) (depth+1))
where name = infiniteAlphabet !! depth
-- Converts λ-calculus applications to Caramel.
app left right scope depth
= appSugar
$ App [left scope depth, right scope depth]
-- Converts λ-calculus variables to Caramel.
var index scope depth
= Var (scope M.! (depth - index - 1))
-- The lam sugar just removes consecutive lambdas,
-- i.e., (a -> (b -> c)) becomes (a b -> c)
lamSugar :: Caramel -> Caramel
lamSugar (Lam names (Lam names' body)) = Lam (names++names') (lamSugar body)
lamSugar term = term
-- The app sugar just removes redundant parens,
-- i.e., ((f x) y) becomes (f x y)
appSugar :: Caramel -> Caramel
appSugar (App (App args : args')) = appSugar (App (args ++ args'))
appSugar term = term
-- Church naturals to Nat,
-- i.e., (f x -> (f (f (f x)))) to 3
natSugar :: Caramel -> Caramel
natSugar term = maybe term id (getNat term) where
getNat (Lam [fn,arg] vals) = Nat <$> go vals where
go (App [Var f, p]) | f == fn = (+ 1) <$> go p
go (Var x) | x == arg = Just 0
go _ | otherwise = Nothing
getNat term = Just term
-- Church lists to Lst,
-- i.e., (c n -> (c 1 (c 2 (c 3 n)))) to [1,2,3]
lstSugar term = maybe term id (getLst term) where
getLst (Lam [cons,nil] cells) = Lst <$> go cells where
go (App [Var c, h, t])
| c == cons
&& not (freeVarInTerm cons h)
&& not (freeVarInTerm nil h)
= (h :) <$> go t
go (Var n) | n == nil = Just []
go _ | otherwise = Nothing
getLst term = Just term
-- Church tuples to Tup,
-- i.e., (t -> (t 1 2 3)) to {1,2,3}
tupSugar term@(Lam [tupVar] body@(App (Var t : xs)))
| t == tupVar
&& not (any (freeVarInTerm tupVar) xs)
= Tup xs
tupSugar term = term
-- Template function to create the Chr and Wrd sugar.
-- bitVecSugar :: Caramel -> Caramel
bitVecSugar size ctor term = maybe term id (getChr term) where
getChr (Lam [fn,one,zero] (App (Var fnv : bits)))
| fn == fnv
&& length bits == size
&& all (\ (Var bit) -> bit == one || bit == zero) bits
= Just . ctor . toEnum . toByte . map (\ (Var bit) -> bit) $ bits
where toByte bits = foldl makeByte (const 0) bits 1
makeByte t h b = (if h == one then 1 else 0) * b + (t (b*2))
getChr term = Just term
-- Church byte to Chr (ASCII-encoded char),
-- i.e., (f 1 0 -> (f 0 1 1 0 0 0 0 1)) to '\'a\''
chrSugar :: Caramel -> Caramel
chrSugar = bitVecSugar 8 Chr
-- Church word to Wrd (Haskell's Word32)
-- i.e., (f 1 0 -> (f 0 0 ...28 zeros... 0 1)) to '1''
wrdSugar :: Caramel -> Caramel
wrdSugar = bitVecSugar 32 Wrd
-- Church string (list of Chrs) to Str,
-- i.e., ['a' 'b' 'c' 'd'] to "abcd"
strSugar :: Caramel -> Caramel
strSugar term = maybe term id (getStr term) where
getStr (Lst chrs) | all isChr chrs = Just (Str (map (\ (Chr c) -> c) chrs)) where
isChr (Chr _) = True
isChr otherwise = False
getStr term = Just term
-- TODO: ADT fromLambda
adt = undefined
-- List of all strings consisting of upper/lowercase letters.
infiniteAlphabet :: [String]
infiniteAlphabet = do
x <- [1..]
replicateM x (['a'..'z']++['A'..'Z'])
-- Is given variable free in a term?
freeVarInTerm :: String -> Caramel -> Bool
freeVarInTerm varName = elem varName . freeVars
-- Converts a value of the Caramel DSL to a pure Lambda Calculus term.
toLambda :: Caramel -> L.Term
toLambda term = go term (M.empty :: M.Map String Int) 0 where
go = fold lam app var num lst tup chr str wrd adt leT
lam vars body = foldr cons body vars where
cons var body scope depth = L.Lam (body (M.insert var depth scope) (depth+1))
leT defs term = foldr cons term defs where
cons (name,vars,body) term scope depth = L.App (L.Lam (term (M.insert name depth scope) (depth+1))) (foldr cons' body vars scope depth)
cons' var body scope depth = L.Lam (body (M.insert var depth scope) (depth+1))
app args scope depth = foldl1' snoc args scope depth where
snoc left right scope depth = L.App (left scope depth) (right scope depth)
var name scope depth = L.Var (depth - index - 1) where
index = maybe (error ("undefined variable `"++name++"`.")) id (M.lookup name scope)
num n scope depth = L.Lam (L.Lam (call n (L.App (L.Var 1)) (L.Var 0)))
lst terms scope depth = L.Lam (L.Lam (foldr (\ h t -> L.App (L.App (L.Var 1) (h scope (depth+2))) t) (L.Var 0) terms))
tup terms scope depth = L.Lam (foldl (\ t h -> L.App t (h scope (depth+1))) (L.Var 0) terms)
chr c scope depth = L.Lam (L.Lam (L.Lam (foldl bits (L.Var 2) (printf "%08b" (fromEnum c) :: String))))
where bits t h = L.App t (L.Var (fromEnum h - fromEnum '0'))
str s scope depth = toLambda (Lst (map Chr s))
wrd c scope depth = L.Lam (L.Lam (L.Lam (foldl bits (L.Var 2) (printf "%032b" (fromEnum c) :: String))))
where bits t h = L.App t (L.Var (fromEnum h - fromEnum '0'))
adt ctors scope depth = L.Lam (L.App (L.Var 0) (list (map ctor ctors))) where
ctor (name,ctor) = pair (toLambda (Str name)) (applyConstToBoundVar (L.Lam (list (map field ctor))))
field (name,field) = pair (toLambda (Str name)) (L.App (field (M.insert "*" (depth+4) scope) (depth+8)) (L.Var 7))
list term = L.Lam (L.Lam (foldr (\ h t -> L.App (L.App (L.Var 1) h) t) (L.Var 0) term))
pair a b = L.Lam (L.App (L.App (L.Var 0) a) b)
applyConstToBoundVar term = L.fold lam app var term (-1) where
lam body depth = L.Lam (body (depth+1))
app left right depth = L.App (left depth) (right depth)
var index depth | index == depth = L.Lam (L.Var (index+1))
var index depth | otherwise = L.Var index
call n f x = go n x where
go 0 x = x
go k x = go (k-1) (f x)
-- Returns a list of the free variables in a Caramel term.
freeVars :: Caramel -> [String]
freeVars term = fold lam app var nat lst tup chr str wrd adt leT term S.empty where
lam vars body boundVars = body (foldr S.insert boundVars vars)
app terms boundVars = concatMap ($ boundVars) terms
var varName boundVars = if S.member varName boundVars then [] else [varName]
nat _ boundVars = []
lst terms boundVars = concatMap ($ boundVars) terms
tup terms boundVars = concatMap ($ boundVars) terms
chr _ boundVars = []
str _ boundVars = []
wrd _ boundVars = []
adt ctors boundVars = concatMap (concatMap (($ boundVars) . snd) . snd) ctors
leT defs term boundVars
= term (foldr S.insert boundVars (map (\ (name,_,_) -> name) defs))
++ concatMap (\ (_,vars,body) -> body (foldr S.insert boundVars vars)) defs
-- Sorts let expressions so that a term that depends on the other always come before.
-- Also adds an extre bound variable for recursive terms, in order to enable further use
-- with fixed-point combinators and similars, i.e.,
-- `sum n = (is_zero? n 0 (add n (sum (pred n 1))))` becomes
-- `sum sum n = (is_zero? n 0 (add n (sum (pred n 1))))`
-- So it can be used as `(Y sum 3)` (`Y` being the Y-combinator).
sortRecursiveLets :: Caramel -> Caramel
sortRecursiveLets = fold Lam App Var Nat Lst Tup Chr Str Wrd Adt leT where
leT defs term = Let (sortTopologically (map node defs)) term where
names = S.fromList (map (\ (name,_,_) -> name) defs)
node def@(name, vars, body) = (name, dependencies, defWithFixedPoint) where
dependencies = filter (/= name) . filter (flip S.member names) $ freeVars'
defWithFixedPoint = (name, if elem name freeVars' then name:vars else vars, body)
freeVars' = freeVars (Lam vars body)
-- Naive implementation of a topological sort, O(N^2). Potential bottleneck. TODO: improve.
sortTopologically :: [(String, [String], a)] -> [a]
sortTopologically graph = go graph (S.empty :: S.Set String) [] where
go :: [(String, [String], a)] -> S.Set String -> [(String, [String], a)] -> [a]
go [] defined [] = []
go [] defined rest = go rest defined []
go ((node@(id, deps, val)) : nodes) defined rest
| all (flip S.member defined) deps = val : go nodes (S.insert id defined) rest
| otherwise = go nodes defined (node:rest)
-- Evaluates a Caramel term by converting it to the Lambda Calculus, reducing and reading back.
reduce :: Caramel -> Caramel
reduce = fromLambda . L.reduce . toLambda
|
8l/caramel
|
src/Caramel.hs
|
mit
| 14,900 | 1 | 21 | 4,557 | 5,808 | 3,029 | 2,779 | 245 | 14 |
{-# LANGUAGE TupleSections, OverloadedStrings #-}
module Forms where
import Import
import qualified Data.Text as T
import Yesod.Form.Bootstrap3
-- form handler with default action for FormFailure and FormMissing
formHandler :: FormResult a -> (a -> Handler ()) -> Handler ()
formHandler result f =
case result of
FormSuccess res -> f res
FormFailure err -> setMessageI $ MsgFormFailure $ T.concat err
FormMissing -> setMessageI MsgFormMissing
-- default submit button
submitButton :: a -> BootstrapSubmit a
submitButton msg = BootstrapSubmit msg "btn btn-light btn-block btn-lg" []
withRows :: Text -> FieldSettings site -> FieldSettings site
withRows n fs = fs { fsAttrs = newAttrs }
where newAttrs = ("rows", n) : fsAttrs fs
|
haBuu/tfs-website
|
Forms.hs
|
mit
| 748 | 0 | 10 | 132 | 200 | 104 | 96 | 16 | 3 |
module Paths_minimalbug (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
catchIO = Exception.catch
version :: Version
version = Version [0,1,0,0] []
bindir, libdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/Users/salmaancraig/.cabal/bin"
libdir = "/Users/salmaancraig/.cabal/lib/x86_64-osx-ghc-7.10.2/minimalbug-0.1.0.0-E0xQJzCPYSD8GCImmvGIKy"
datadir = "/Users/salmaancraig/.cabal/share/x86_64-osx-ghc-7.10.2/minimalbug-0.1.0.0"
libexecdir = "/Users/salmaancraig/.cabal/libexec"
sysconfdir = "/Users/salmaancraig/.cabal/etc"
getBinDir, getLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "minimalbug_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "minimalbug_libdir") (\_ -> return libdir)
getDataDir = catchIO (getEnv "minimalbug_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "minimalbug_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "minimalbug_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
|
kejace/minimalbug
|
dist/build/autogen/Paths_minimalbug.hs
|
mit
| 1,390 | 0 | 10 | 177 | 362 | 206 | 156 | 28 | 1 |
module Language.Jass.JIT.Executing(
loadJassModule
, loadJassModuleFromFile
, withRaisedAST
, optimizeModule
, moduleAssembly
, withJassJIT
, NativeTableMaker
) where
import Language.Jass.Runtime.Memory
import Language.Jass.Runtime.Natives
import Language.Jass.Runtime.Globals
import Language.Jass.Utils
import Language.Jass.Codegen.Generator
import Language.Jass.Semantic.Check
import Language.Jass.Parser.Grammar
import Language.Jass.JIT.Module
import LLVM.General.Module as LLVM
import LLVM.General.ExecutionEngine
import LLVM.General.PassManager
import LLVM.General.Context
import Control.Monad
import Control.Monad.Trans.Except
import Foreign.Ptr
import Control.Monad.IO.Class (liftIO)
-- | Users defines this function to specify natives
type NativeTableMaker = JITModule -> ExceptT String IO [(String, FunPtr ())]
-- single file api
loadJassModule :: String -> String -> ExceptT String IO JassProgram
loadJassModule name code = loadJassFromSource name $ liftExceptPure $ parseJass name code
loadJassModuleFromFile :: FilePath -> ExceptT String IO JassProgram
loadJassModuleFromFile path = loadJassFromSource path $ liftExcept $ parseJassFile path
loadJassFromSource :: String -> ExceptT String IO JassModule -> ExceptT String IO JassProgram
loadJassFromSource modName source = do
tree <- source
context <- liftExceptPure $ checkModuleSemantic' tree
triple <- liftExceptPure $ uncurry3 (generateLLVM modName) context
return $ uncurry3 JassProgram triple
---
withRaisedAST :: Context -> JassProgram -> (UnlinkedProgram -> ExceptT String IO a) -> ExceptT String IO a
withRaisedAST cntx (JassProgram mapping tmap module') f = do
let map' = nativesMapFromMapping mapping
res <- withModuleFromAST cntx module' $ \mod' -> runExceptT $ f $ UnlinkedProgram map' tmap mod'
liftExceptPure res
moduleAssembly :: UnlinkedProgram -> ExceptT String IO String
moduleAssembly (UnlinkedProgram _ _ llvmModule) = liftIO $ moduleLLVMAssembly llvmModule
optimizeModule :: UnlinkedProgram -> ExceptT String IO ()
optimizeModule (UnlinkedProgram _ _ llvmModule) = liftIO $ void $ withPassManager set $ \ mng -> runPassManager mng llvmModule
where set = defaultCuratedPassSetSpec {
optLevel = Just 3
, simplifyLibCalls = Just True
, loopVectorize = Just True
, superwordLevelParallelismVectorize = Just True
, useInlinerWithThreshold = Just 1000
}
withJassJIT :: Context -> NativeTableMaker -> UnlinkedProgram -> (JITModule -> ExceptT String IO a) -> ExceptT String IO a
withJassJIT cntx nativesMaker (UnlinkedProgram nativesMap tmap llvmModule) action =
liftExcept $ withJIT cntx 3 $ \jit -> withModuleInEngine jit llvmModule $ \exModule -> runExceptT $ do
let jitModule = JITModule tmap exModule
natives <- nativesMaker jitModule
checkNativesName (fst <$> natives) nativesMap
let bindedNatives = foldl (\mp f -> f mp) nativesMap $ fmap (uncurry nativesMapBind) natives
case isAllNativesBinded bindedNatives of
Just name -> throwE $ "Native '" ++ name ++ "' isn't binded!"
Nothing -> do
mapM_ (uncurry $ callNativeBinder jitModule) $ getNativesBindings bindedNatives
setDefaultAllocator jitModule
executeGlobalInitializers jitModule
action jitModule
|
NCrashed/hjass
|
src/library/Language/Jass/JIT/Executing.hs
|
mit
| 3,342 | 0 | 21 | 599 | 882 | 454 | 428 | 64 | 2 |
module GHCJS.DOM.QuickTimePluginReplacement (
) where
|
manyoo/ghcjs-dom
|
ghcjs-dom-webkit/src/GHCJS/DOM/QuickTimePluginReplacement.hs
|
mit
| 56 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
import Data.List
import System.Environment
import System.FilePath
import System.Process
import System.INotify
uniqDirs :: [FilePath] -> [FilePath]
uniqDirs = nub . map takeDirectory
eventHandler :: String -> [FilePath] -> FilePath -> Event -> IO ()
eventHandler command paths directory (Modified _ (Just file)) =
if elem path paths
then system command >> return ()
else return ()
where
path = normalise $ directory </> file
eventHandler _ _ _ _ = return ()
addWatches :: (FilePath -> Event -> IO ()) -> [FilePath] -> IO [WatchDescriptor]
addWatches handler paths = do
inotify <- initINotify
mapM (\p -> addWatch inotify [Modify] p (handler p)) (map normalise paths)
wait :: String -> IO ()
wait command = do
_ <- getLine
_ <- system command
wait command
parse :: [String] -> IO ()
parse (command:paths) = do
_ <- addWatches (eventHandler command paths) $ uniqDirs paths
wait command
parse [] = print "Usage: monitor 'commands' [file ...]"
main :: IO ()
main = getArgs >>= parse
|
mastensg/monitor
|
monitor.hs
|
mit
| 1,045 | 1 | 12 | 229 | 423 | 207 | 216 | 30 | 2 |
{-# LANGUAGE BangPatterns, RankNTypes #-}
{-| Space efficient bit arrays
The module is meant to be imported qualified
(as it is common with collection libraries).
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Objects.BitArray
( BitArray
, size
, empty
, zeroes
, count0
, count1
, foldr
, (!)
, setAt
, (-&-)
, (-|-)
, subset
, asString
, fromList
, toList
) where
import Prelude hiding (foldr)
import qualified Prelude as P
import Control.Monad
import Control.Monad.Error
import qualified Data.IntSet as IS
import qualified Text.JSON as J
import Ganeti.BasicTypes
import Ganeti.JSON
-- | A fixed-size, space-efficient array of bits.
data BitArray = BitArray
{ size :: !Int
, _bitArrayBits :: !IS.IntSet
-- ^ Must not contain elements outside [0..size-1].
}
deriving (Eq, Ord)
instance Show BitArray where
show = asString '0' '1'
empty :: BitArray
empty = BitArray 0 IS.empty
zeroes :: Int -> BitArray
zeroes s = BitArray s IS.empty
-- | Right fold over the set, including indexes of each value.
foldr :: (Bool -> Int -> a -> a) -> a -> BitArray -> a
foldr f z (BitArray s bits) = let (j, x) = IS.foldr loop (s, z) bits
in feed0 (-1) j x
where
loop i (!l, x) = (i, f True i (feed0 i l x))
feed0 !i !j x | i >= j' = x
| otherwise = feed0 i j' (f False j' x)
where j' = j - 1
-- | Converts a bit array into a string, given characters
-- for @0@ and @1@/
asString :: Char -> Char -> BitArray -> String
asString c0 c1 = foldr f []
where f b _ = ((if b then c1 else c0) :)
-- | Computes the number of zeroes in the array.
count0 :: BitArray -> Int
count0 ba@(BitArray s _) = s - count1 ba
-- | Computes the number of ones in the array.
count1 :: BitArray -> Int
count1 (BitArray _ bits) = IS.size bits
infixl 9 !
-- | Test a given bit in an array.
-- If it's outside its scope, it's always @False@.
(!) :: BitArray -> Int -> Bool
(!) (BitArray s bits) i | (i >= 0) && (i < s) = IS.member i bits
| otherwise = False
-- | Sets or removes an element from a bit array.
-- | Sets a given bit in an array. Fails if the index is out of bounds.
setAt :: (MonadError e m, Error e) => Int -> Bool -> BitArray -> m BitArray
setAt i False (BitArray s bits) =
return $ BitArray s (IS.delete i bits)
setAt i True (BitArray s bits) | (i >= 0) && (i < s) =
return $ BitArray s (IS.insert i bits)
setAt i True _ = failError $ "Index out of bounds: " ++ show i
infixl 7 -&-
-- | An intersection of two bit arrays.
-- The length of the result is the minimum length of the two.
(-&-) :: BitArray -> BitArray -> BitArray
BitArray xs xb -&- BitArray ys yb = BitArray (min xs ys)
(xb `IS.intersection` yb)
infixl 5 -|-
-- | A union of two bit arrays.
-- The length of the result is the maximum length of the two.
(-|-) :: BitArray -> BitArray -> BitArray
BitArray xs xb -|- BitArray ys yb = BitArray (max xs ys) (xb `IS.union` yb)
-- | Checks if the first array is a subset of the other.
subset :: BitArray -> BitArray -> Bool
subset (BitArray _ xs) (BitArray _ ys) = IS.isSubsetOf xs ys
-- | Converts a bit array into a list of booleans.
toList :: BitArray -> [Bool]
toList = foldr (\b _ -> (b :)) []
-- | Converts a list of booleans to a 'BitArray'.
fromList :: [Bool] -> BitArray
fromList xs =
-- Note: This traverses the list twice. It'd be better to compute everything
-- in one pass.
BitArray (length xs) (IS.fromList . map fst . filter snd . zip [0..] $ xs)
instance J.JSON BitArray where
showJSON = J.JSString . J.toJSString . show
readJSON j = do
let parseBit '0' = return False
parseBit '1' = return True
parseBit c = fail $ "Neither '0' nor '1': '" ++ [c] ++ "'"
str <- readEitherString j
fromList `liftM` mapM parseBit str
|
ribag/ganeti-experiments
|
src/Ganeti/Objects/BitArray.hs
|
gpl-2.0
| 4,586 | 0 | 13 | 1,109 | 1,184 | 636 | 548 | 85 | 2 |
{-# LANGUAGE OverloadedStrings, NoMonomorphismRestriction, GADTs, TemplateHaskell #-}
module Main where
import Control.Applicative
import Control.Lens
import Data.ByteString (ByteString)
import Data.Text (Text)
import Data.Maybe (fromMaybe)
import qualified Data.Map as M
import Snap (Handler, method, Method(..), writeText, writeBS,
getParam, SnapletInit, makeSnaplet, addRoutes,
route, liftIO, void)
import qualified Snap as Snap
import Control.Concurrent.Async (async)
import Control.Concurrent.MVar (MVar, newEmptyMVar, tryPutMVar, tryTakeMVar, isEmptyMVar)
import qualified System.IO.Streams as Stream
import qualified System.IO.Streams.Concurrent as Stream
import System.Exit (exitSuccess, exitFailure)
import Snap.Test.BDD
import Site
import HomeTest
main :: IO ()
main = do
(inp, out) <- Stream.makeChanPipe
runSnapTests defaultConfig { reportGenerators = [streamReport out, consoleReport] }
(route routes)
app
homeTests
res <- Stream.toList inp
if length (filter isFailing res) == 0
then exitSuccess
else exitFailure
where streamReport out results = do res <- Stream.read results
case res of
Nothing -> Stream.write Nothing out
Just r -> do
Stream.write (Just r) out
streamReport out results
isFailing (TestFail _) = True
isFailing (TestError _) = True
isFailing _ = False
|
TimeAttack/time-attack-snap
|
src/test/Test.hs
|
gpl-2.0
| 1,625 | 0 | 16 | 502 | 398 | 226 | 172 | 40 | 5 |
{-# LANGUAGE ExistentialQuantification #-}
{- |
Module : $Header$
Description : Transforms an OMDoc file into a development graph
Copyright : (c) Ewaryst Schulz, DFKI Bremen 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable(Logic)
Given an OMDoc file, a Library Environment is constructed from it by
following all library links.
The import requires the following interface functions to be instantiated
for each logic
Signature Category:
ide, cod
Sentences:
symmap_of
StaticAnalysis:
id_to_raw, symbol_to_raw, induced_from_morphism, induced_from_to_morphism
, signature_union, empty_signature, add_symb_to_sign
Logic:
omdoc_metatheory, omdocToSym, omdocToSen
These functions have default implementations which are sufficient
in many cases:
addOMadtToTheory, addOmdocToTheory
-}
module OMDoc.Import where
import Common.Result
import Common.ResultT
import Common.ExtSign
import Common.Id
import Common.IRI (simpleIdToIRI)
import Common.LibName
import Common.Utils
import Common.XmlParser (readXmlFile)
import Driver.ReadFn (libNameToFile)
import Driver.Options (rmSuffix, HetcatsOpts, putIfVerbose, showDiags)
import Logic.Logic
( AnyLogic (Logic)
, Logic ( omdoc_metatheory, omdocToSym, omdocToSen, addOMadtToTheory
, addOmdocToTheory)
, Category (ide, cod)
, StaticAnalysis ( induced_from_morphism, induced_from_to_morphism
, signature_union, empty_signature, add_symb_to_sign
, symbol_to_raw, id_to_raw )
, Sentences (symmap_of) )
import Logic.ExtSign
import Logic.Coerce
import Logic.Prover
import Logic.Grothendieck
import Comorphisms.LogicList
import Comorphisms.LogicGraph
import Static.DevGraph
import Static.DgUtils
import Static.GTheory
import Static.AnalysisStructured
import Static.ComputeTheory
import OMDoc.DataTypes
import OMDoc.XmlInterface (xmlIn)
import System.Directory
import Data.Graph.Inductive.Graph (LNode, Node)
import Data.Maybe
import Data.List
import qualified Data.Map as Map
import Control.Monad
import Control.Monad.Trans
import Network.URI
-- * Import Environment Interface
{- | There are three important maps for each theory:
1. OMName -> symbol, the NameSymbolMap stores for each OMDoc name the
translated hets symbol
2. OMName -> String, the NameMap stores the notation information of the
OMDoc names, identity mappings are NOT stored here!
3. SigMapI symbol, this signature map is just a container to store map 1 and 2
-}
type NameSymbolMap = G_mapofsymbol OMName
-- | The keys of libMap consist of the filepaths without suffix!
data ImpEnv =
ImpEnv {
libMap :: Map.Map FilePath (LibName, DGraph)
, nsymbMap :: Map.Map (LibName, String) NameSymbolMap
, hetsOptions :: HetcatsOpts
}
initialEnv :: HetcatsOpts -> ImpEnv
initialEnv opts = ImpEnv { libMap = Map.empty
, nsymbMap = Map.empty
, hetsOptions = opts }
getLibEnv :: ImpEnv -> LibEnv
getLibEnv e = computeLibEnvTheories $
Map.fromList $ Map.elems $ libMap e
addDGToEnv :: ImpEnv -> LibName -> DGraph -> ImpEnv
addDGToEnv e ln dg =
e { libMap = Map.insert (libNameToFile ln) (ln, dg) $ libMap e }
addNSMapToEnv :: ImpEnv -> LibName -> String -> NameSymbolMap -> ImpEnv
addNSMapToEnv e ln nm nsm =
e { nsymbMap = Map.insert (ln, nm) nsm $ nsymbMap e }
lookupLib :: ImpEnv -> URI -> Maybe (LibName, DGraph)
lookupLib e u = Map.lookup (rmSuffix $ uriPath u) $ libMap e
lookupNode :: ImpEnv -> CurrentLib -> UriCD
-> Maybe ( LibName -- the origin libname of the theory
, LNode DGNodeLab -- the (eventually reference) node
)
lookupNode e (ln, dg) ucd =
let mn = getModule ucd in
if cdInLib ucd ln then
case filterLocalNodesByName mn dg of
[] -> error $ "lookupNode: Node not found: " ++ mn
lnode : _ -> Just (ln, lnode)
else case lookupLib e $ fromJust $ getUri ucd of
Nothing -> Nothing
Just (ln', dg') ->
case filterRefNodesByName mn ln' dg of
lnode : _ -> Just (ln', lnode)
[] -> listToMaybe
$ map (\ n -> (ln', n)) $ filterLocalNodesByName mn dg'
lookupNSMap :: ImpEnv -> LibName -> Maybe LibName -> String -> NameSymbolMap
lookupNSMap e ln mLn nm =
let ln' = fromMaybe ln mLn
mf = Map.findWithDefault
$ error $ concat [ "lookupNSMap: lookup failed for "
, show (ln', nm), "\n", show mLn, "\n"
, show $ nsymbMap e ]
in mf (ln', nm) $ nsymbMap e
rPutIfVerbose :: ImpEnv -> Int -> String -> ResultT IO ()
rPutIfVerbose e n s = lift $ putIfVerbose (hetsOptions e) n s
rPut :: ImpEnv -> String -> ResultT IO ()
rPut e = rPutIfVerbose e 1
rPut2 :: ImpEnv -> String -> ResultT IO ()
rPut2 e = rPutIfVerbose e 2
-- * URI Functions
readFromURL :: (FilePath -> IO a) -> URI -> IO a
readFromURL f u = if isFileURI u then f $ uriPath u
else error $ "readFromURL: Unsupported URI-scheme "
++ uriScheme u
toURI :: String -> URI
toURI s = case parseURIReference s of
Just u -> u
_ -> error $ "toURI: can't parse as uri " ++ s
libNameFromURL :: String -> URI -> IO LibName
libNameFromURL s u = do
let fp = uriPath u
mt <- getModificationTime fp
return $ setFilePath fp mt $ emptyLibName s
-- | Compute an absolute URI for a supplied URI relative to the given filepath.
resolveURI :: URI -> FilePath -> URI
resolveURI u fp = fromMaybe (error $ "toURI: can't resolve uri " ++ show u)
$ relativeTo u $ toURI fp
-- | Is the scheme of the uri empty or file?
isFileURI :: URI -> Bool
isFileURI u = elem (uriScheme u) ["", "file:"]
type UriCD = (Maybe URI, String)
showUriCD :: UriCD -> String
showUriCD (mUri, s) = case mUri of
Just u -> show u ++ "?" ++ s
_ -> s
getUri :: UriCD -> Maybe URI
getUri = fst
getModule :: UriCD -> String
getModule = snd
-- | Compute an absolute URI for a supplied CD relative to the given LibName
toUriCD :: OMCD -> LibName -> UriCD
toUriCD cd ln =
let [base, m] = cdToList cd
fp = getFilePath ln
mU = if null base then Nothing
else Just $ resolveURI (toURI base) fp
in (mU, m)
getLogicFromMeta :: Maybe OMCD -> AnyLogic
getLogicFromMeta mCD =
let p (Logic lid) = case omdoc_metatheory lid of
Just cd' -> fromJust mCD == cd'
_ -> False
in if isNothing mCD then defaultLogic else
case find p logicList of
Just al -> al
_ -> defaultLogic
cdInLib :: UriCD -> LibName -> Bool
cdInLib ucd ln = case getUri ucd of
Nothing -> True
Just url -> isFileURI url && getFilePath ln == uriPath url
-- * Main translation functions
-- | Translates an OMDoc file to a LibEnv
anaOMDocFile :: HetcatsOpts -> FilePath -> IO (Maybe (LibName, LibEnv))
anaOMDocFile opts fp = do
dir <- getCurrentDirectory
putIfVerbose opts 2 $ "Importing OMDoc file " ++ fp
Result ds mEnvLn <- runResultT $ importLib (initialEnv opts)
$ resolveURI (toURI fp) $ dir ++ "/"
showDiags opts ds
return $ fmap (\ (env, ln, _) -> (ln, getLibEnv env)) mEnvLn
-- * OMDoc traversal
{- | If the lib is not already in the environment, the OMDoc file and
the closure of its imports is added to the environment. -}
importLib :: ImpEnv -- ^ The import environment
-> URI -- ^ The url of the OMDoc file
-> ResultT IO (ImpEnv, LibName, DGraph)
importLib e u =
case lookupLib e u of
Just (ln, dg) -> return (e, ln, dg)
_ -> readLib e u
-- | The OMDoc file and the closure of its imports is added to the environment.
readLib :: ImpEnv -- ^ The import environment
-> URI -- ^ The url of the OMDoc file
-> ResultT IO (ImpEnv, LibName, DGraph)
readLib e u = do
rPut e $ "Downloading " ++ show u ++ " ..."
xmlString <- lift $ readFromURL readXmlFile u
OMDoc n l <- liftR $ xmlIn xmlString
{- the name of the omdoc is used as the libname, no relationship between the
libname and the filepath! -}
ln <- lift $ libNameFromURL n u
rPut e $ "Importing library " ++ show ln
(e', dg) <- foldM (addTLToDGraph ln) (e, emptyDG) l
rPut e $ "... loaded " ++ show u
return (addDGToEnv e' ln dg, ln, dg)
-- | Adds the Theory in the OMCD and the containing lib to the environment
importTheory :: ImpEnv -- ^ The import environment
-> CurrentLib -- ^ The current lib
-> OMCD -- ^ The cd which points to the Theory
-> ResultT IO ( ImpEnv -- the updated environment
, LibName -- the origin libname of the theory
, DGraph -- the updated devgraph of the current lib
, LNode DGNodeLab -- the corresponding node
)
importTheory e (ln, dg) cd = do
let ucd = toUriCD cd ln
rPut2 e $ "Looking up theory " ++ showUriCD ucd ++ " ..."
case lookupNode e (ln, dg) ucd of
Just (ln', nd)
| ln == ln' ->
do
rPut2 e "... found local node."
return (e, ln, dg, nd)
| isDGRef $ snd nd ->
do
rPut2 e "... found already referenced node."
return (e, ln', dg, nd)
| otherwise ->
do
rPut2 e "... found node, referencing it ..."
let (lnode, dg') = addNodeAsRefToDG nd ln' dg
rPut2 e "... done"
return (e, ln', dg', lnode)
-- if lookupNode finds nothing implies that ln is not the current libname!
_ -> do
let u = fromJust $ getUri ucd
rPut2 e "... node not found, reading lib."
(e', ln', refDg) <- readLib e u
case filterLocalNodesByName (getModule ucd) refDg of
-- don't add the node to the refDG but to the original DG!
nd : _ -> let (lnode, dg') = addNodeAsRefToDG nd ln' dg
in return (e', ln', dg', lnode)
[] -> error $ "importTheory: couldn't find node: " ++ show cd
-- | Adds a view or theory to the DG, the ImpEnv may also be modified.
addTLToDGraph :: LibName -> (ImpEnv, DGraph) -> TLElement
-> ResultT IO (ImpEnv, DGraph)
-- adding a theory to the DG
addTLToDGraph ln (e, dg) (TLTheory n mCD l) = do
rPut e $ "Importing theory " ++ n
let clf = classifyTCs l
{- I. Lookup all imports (= follow and create them first),
and insert DGNodeRefs if neccessary. -}
((e', dg'), iIL) <- followImports ln (e, dg) $ importInfo clf
-- II. Compute morphisms and update initial sig and name symbol map stepwise.
((nsmap, gSig), iIWL) <-
computeMorphisms e' ln (notations clf)
(initialSig $ getLogicFromMeta mCD) iIL
-- III. Compute local signature
(nsmap', gSig') <- liftR $ localSig clf nsmap gSig
-- IV. Add the sentences to the Node.
gThy <- liftR $ addSentences clf nsmap' gSig'
-- V. Complete the morphisms with final signature
iIWL' <- liftR $ completeMorphisms (signOf gThy) iIWL
-- VI. Add the Node to the DGraph.
let ((nd, _), dg'') = addNodeToDG dg' n gThy
-- VII. Create links from the morphisms.
dg''' = addLinksToDG nd dg'' iIWL'
-- add the new name symbol map to the environment
e'' = addNSMapToEnv e' ln n nsmap'
return (e'', dg''')
addTLToDGraph ln (e, dg) (TLView n from to mMor) = do
rPut e $ "Importing view " ++ n
{- follow the source and target of the view and insert DGNodeRefs
if neccessary.
use followTheory for from and to. -}
((e', dg'), [lkNdFrom, lkNdTo]) <- followTheories ln (e, dg) [from, to]
lkInf <- computeViewMorphism e' ln $ ImportInfo (lkNdFrom, lkNdTo) n mMor
let dg'' = addLinkToDG
{- this error should never occur as the linkinfo contains
a to-node.
The error is used here as a "don't care element" of type Node -}
(error "addTLToDGraph: TLView - Default node not available")
dg' lkInf
return (e', dg'')
-- ** Utils to compute DGNodes from OMDoc Theories
{-
the morphisms are incomplete because the target signature
wasn't complete at the time of morphism computation.
we complete the morphisms by composing them with signature inclusions
to the complete target signature
-}
completeMorphisms :: G_sign -- ^ the complete target signature
-> [LinkInfo] -- ^ the incomplete morphisms
-> Result [LinkInfo]
completeMorphisms gsig = mapR (fmapLI $ completeMorphism $ ide gsig)
completeMorphism :: GMorphism -- ^ the target signature id morphism
-> GMorphism -- ^ the incomplete morphism
-> Result GMorphism
completeMorphism idT gmorph = compInclusion logicGraph gmorph idT
computeMorphisms :: ImpEnv -> LibName
-> Map.Map OMName String -- ^ Notations
-> (NameSymbolMap, G_sign)
-> [ImportInfo LinkNode]
-> ResultT IO ((NameSymbolMap, G_sign), [LinkInfo])
computeMorphisms e ln nots = mapAccumLM (computeMorphism e ln nots)
{- | Computes the morphism for an import link and updates the signature
and the name symbol map with the imported symbols -}
computeMorphism :: ImpEnv -- ^ The import environment for lookup purposes
-> LibName -- ^ Current libname
-> Map.Map OMName String -- ^ Notations of target signature
-> (NameSymbolMap, G_sign) {- ^ OMDoc symbol to Hets symbol map
and target signature -}
-> ImportInfo LinkNode -- ^ source label with OMDoc morphism
-> ResultT IO ((NameSymbolMap, G_sign), LinkInfo)
computeMorphism e ln nots (nsmap, tGSig) (ImportInfo (mLn, (from, lbl)) n morph)
= case dgn_theory lbl of
G_theory sLid (ExtSign sSig _) _ _ _ ->
case tGSig of
G_sign tLid (ExtSign tSig _) sigId ->
do
let sourceNSMap = lookupNSMap e ln mLn $ getDGNodeName lbl
{- 1. build the morphism
compute first the symbol-map -}
symMap <- computeSymbolMap (Just nots) sourceNSMap nsmap
morph tLid
let
f = symbol_to_raw tLid
g (Left (_, rs)) = rs
g (Right s) = symbol_to_raw tLid s
rsMap = Map.fromList $ map (\ (x, y) -> (f x, g y) )
symMap
-- REMARK: Logic-homogeneous environment assumed
sSig' <- coercePlainSign sLid tLid "computeMorphism" sSig
mor <- liftR $ induced_from_morphism tLid rsMap sSig'
{- 2. build the GMorphism and update the signature
and the name symbol map -}
newSig <- liftR $ signature_union tLid tSig $ cod mor
let gMor = gEmbed $ mkG_morphism tLid mor
newGSig = G_sign tLid (makeExtSign tLid newSig) sigId
{- function for filtering the raw symbols in the
nsmap update -}
h (s, Left (n', _)) = Just (s, n')
h (_, Right _) = Nothing
nsmap' = updateSymbolMap tLid mor nsmap
$ mapMaybe h symMap
return ( (nsmap', newGSig)
, (gMor, globalDef, mkLinkOrigin n, from, Nothing))
-- | Computes the morphism for a view
computeViewMorphism :: ImpEnv -- ^ The import environment for lookup purposes
-> LibName -- ^ Current libname
-> ImportInfo (LinkNode, LinkNode)
-- ^ OMDoc morphism with source and target node
-> ResultT IO LinkInfo
computeViewMorphism e ln (ImportInfo ( (mSLn, (from, lblS))
, (mTLn, (to, lblT))) n morph)
= case (dgn_theory lblS, dgn_theory lblT) of
(G_theory sLid eSSig _ _ _, G_theory tLid eTSig _ _ _) ->
do
let nsmapS = lookupNSMap e ln mSLn $ getDGNodeName lblS
nsmapT = lookupNSMap e ln mTLn $ getDGNodeName lblT
{- 1. build the morphism
compute first the symbol-map -}
symMap <- computeSymbolMap Nothing nsmapS nsmapT morph tLid
let f = symbol_to_raw tLid
{- this can't occur as we do not provide a notation map
to computeSymbolMap -}
g (Left _) = error "computeViewMorphism: impossible case"
g (Right s) = symbol_to_raw tLid s
rsMap = Map.fromList
$ map (\ (x, y) -> (f x, g y) ) symMap
-- REMARK: Logic-homogeneous environment assumed
eSSig' <- coerceSign sLid tLid "computeViewMorphism" eSSig
mor <- liftR $ induced_from_to_morphism tLid rsMap eSSig' eTSig
-- 2. build the GMorphism
let gMor = gEmbed $ mkG_morphism tLid mor
return (gMor, globalThm, mkLinkOrigin n, from, Just to)
mkLinkOrigin :: String -> DGLinkOrigin
mkLinkOrigin s = DGLinkMorph $ simpleIdToIRI $ mkSimpleId s
{- | For each entry (s, n) in l we enter the mapping (n, m(s))
to the name symbol map -}
updateSymbolMap :: forall lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree .
Logic lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree =>
lid -> morphism -- ^ a signature morphism m
-> NameSymbolMap
-> [(symbol, OMName)] -- ^ a list l of symbol to OMName mappings
-> NameSymbolMap
updateSymbolMap lid mor nsmap l =
case nsmap of
G_mapofsymbol lid' sm ->
let f nsm (s, n) = Map.insert n (g s) nsm -- fold function
g s = Map.findWithDefault
(error $ "updateSymbolMap: symbol not found " ++ show s)
s $ symmap_of lid mor
sm' = coerceMapofsymbol lid' lid sm
in G_mapofsymbol lid $ foldl f sm' l
{- | Computes a symbol map for the given TCMorphism. The symbols are looked
up in the provided maps. For each symbol not found in the target map we
return a OMName, raw symbol pair in order to insert the missing entries
in the target name symbol map later. If notations are not present, all
lookup failures end up in errors.
-}
computeSymbolMap :: forall lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree .
Logic lid sublogics
basic_spec sentence symb_items symb_map_items
sign morphism symbol raw_symbol proof_tree =>
Maybe (Map.Map OMName String) -- ^ Notations for missing symbols in map
-> NameSymbolMap -> NameSymbolMap -> TCMorphism -> lid
-> ResultT IO [(symbol, Either (OMName, raw_symbol) symbol)]
computeSymbolMap mNots nsmapS nsmapT morph lid =
case (nsmapS, nsmapT) of
(G_mapofsymbol sLid sm, G_mapofsymbol tLid tm) ->
do
-- REMARK: Logic-homogeneous environment assumed
let sNSMap = coerceMapofsymbol sLid lid sm
tNSMap = coerceMapofsymbol tLid lid tm
mf msg = Map.findWithDefault
$ error $ "computeSymbolMap: lookup failed for " ++ msg
-- function for notation lookup
g = lookupNotationInMap
$ fromMaybe (error "computeSymbolMap: no notations") mNots
-- function for map
f (omn, omimg) =
let tSymName = case omimg of
Left s -> mkSimpleName s
Right (OMS qn) -> unqualName qn
_ -> error $ "computeSymbolMap: Nonsymbol "
++ "element mapped"
in ( mf (show omn) omn sNSMap
, case Map.lookup tSymName tNSMap of
Just ts -> Right ts
_ -> Left (tSymName, id_to_raw lid $ nameToId
$ g tSymName))
return $ map f morph
followImports :: LibName -> (ImpEnv, DGraph) -> [ImportInfo OMCD]
-> ResultT IO ((ImpEnv, DGraph), [ImportInfo LinkNode])
followImports ln = mapAccumLCM (curry snd) (followImport ln)
{- | Ensures that the theory for the given OMCD is available in the environment.
See also 'followTheory' -}
followImport :: LibName -> (ImpEnv, DGraph) -> ImportInfo OMCD
-> ResultT IO ((ImpEnv, DGraph), ImportInfo LinkNode)
followImport ln x iInfo = do
(x', linknode) <- followTheory ln x $ iInfoVal iInfo
return (x', fmap (const linknode) iInfo)
followTheories :: LibName -> (ImpEnv, DGraph) -> [OMCD]
-> ResultT IO ((ImpEnv, DGraph), [LinkNode])
followTheories ln = mapAccumLCM (curry snd) (followTheory ln)
{- | We lookup the theory referenced by the cd in the environment
and add it if neccessary to the environment. -}
followTheory :: LibName -> (ImpEnv, DGraph) -> OMCD
-> ResultT IO ((ImpEnv, DGraph), LinkNode)
followTheory ln (e, dg) cd = do
(e', ln', dg', lnode) <- importTheory e (ln, dg) cd
let mLn = if ln == ln' then Nothing else Just ln'
return ((e', dg'), (mLn, lnode))
-- * Development Graph and LibEnv interface
{- | returns a function compatible with mapAccumLM for TCElement processing.
Used in localSig. -}
sigmapAccumFun :: (Monad m, Show a) => (SigMapI a -> TCElement -> String -> m a)
-> SigMapI a -> TCElement -> m (SigMapI a, a)
sigmapAccumFun f smi s = do
let n = tcName s
hetsname = lookupNotation smi n
s' <- f smi s hetsname
let smi' = smi { sigMapISymbs = Map.insert n s' $ sigMapISymbs smi }
return (smi', s')
-- | Builds an initial signature and a name map of the given logic.
initialSig :: AnyLogic -> (NameSymbolMap, G_sign)
initialSig lg =
case lg of
Logic lid ->
( G_mapofsymbol lid Map.empty
, G_sign lid (makeExtSign lid $ empty_signature lid) startSigId)
-- | Adds the local signature to the given signature and name symbol map
localSig :: TCClassification -> NameSymbolMap -> G_sign
-> Result (NameSymbolMap, G_sign)
localSig clf nsmap gSig =
case (gSig, nsmap) of
(G_sign lid _ _, G_mapofsymbol lid' sm) ->
do
let smi = SigMapI (coerceMapofsymbol lid' lid sm) $ notations clf
{- accumulates symbol mappings in the symbMap in SigMapI
while creating symbols from OMDoc symbols -}
(sm', symbs) <- mapAccumLM (sigmapAccumFun $ omdocToSym lid) smi
$ sigElems clf
-- adding the symbols to the empty signature
sig <- foldM (add_symb_to_sign lid) (empty_signature lid) symbs
let locGSig = G_sign lid (makeExtSign lid sig) startSigId
-- combining the local and the given signature
gSig' <- gsigUnion logicGraph True gSig locGSig
return (G_mapofsymbol lid $ sigMapISymbs sm', gSig')
-- | Adds sentences and logic dependent signature elements to the given sig
addSentences :: TCClassification -> NameSymbolMap -> G_sign -> Result G_theory
addSentences clf nsmap gsig =
case (nsmap, gsig) of
(G_mapofsymbol lidM sm, G_sign lid (ExtSign sig _) ind1) ->
do
sigm <- return $ SigMapI (coerceMapofsymbol lidM lid sm)
$ notations clf
-- 1. translate sentences
mSens <- mapM (\ tc -> omdocToSen lid sigm tc
$ lookupNotation sigm $ tcName tc) $ sentences clf
-- 2. translate adts
(sig', sens') <- addOMadtToTheory lid sigm (sig, catMaybes mSens)
$ adts clf
{- 3. translate rest of theory
(all the sentences or just those which returned Nothing?) -}
(sig'', sens'') <- addOmdocToTheory lid sigm (sig', sens')
$ sentences clf
return $ G_theory lid (mkExtSign sig'') ind1
(toThSens sens'') startThId
-- | Adds Edges from the LinkInfo list to the development graph.
addLinksToDG :: Node -> DGraph -> [LinkInfo] -> DGraph
addLinksToDG nd = foldl (addLinkToDG nd)
-- | Adds Edge from the LinkInfo to the development graph.
addLinkToDG :: Node -> DGraph -> LinkInfo -> DGraph
addLinkToDG to dg (gMor, lt, lo, from, mTo) =
insLink dg gMor lt lo from $ fromMaybe to mTo
-- | Adds a Node from the given 'G_theory' to the development graph.
addNodeToDG :: DGraph -> String -> G_theory -> (LNode DGNodeLab, DGraph)
addNodeToDG dg n gth =
let nd = getNewNodeDG dg
ndName = parseNodeName n
ndInfo = newNodeInfo DGBasic
newNode = (nd, newInfoNodeLab ndName ndInfo gth)
in (newNode, insNodeDG newNode dg)
addNodeAsRefToDG :: LNode DGNodeLab -> LibName -> DGraph
-> (LNode DGNodeLab, DGraph)
addNodeAsRefToDG (nd, lbl) ln dg =
let info = newRefInfo ln nd
refNodeM = lookupInAllRefNodesDG info dg
nd' = getNewNodeDG dg
lnode = (nd', lbl { nodeInfo = info })
dg1 = insNodeDG lnode dg
in case refNodeM of
Just refNode -> ((refNode, labDG dg refNode), dg)
_ -> (lnode, dg1)
-- * Theory-utils
type CurrentLib = (LibName, DGraph)
type LinkNode = (Maybe LibName, LNode DGNodeLab)
type LinkInfo = (GMorphism, DGLinkType, DGLinkOrigin, Node, Maybe Node)
data ImportInfo a = ImportInfo a String TCMorphism deriving Show
iInfoVal :: ImportInfo a -> a
iInfoVal (ImportInfo x _ _) = x
instance Functor ImportInfo where
fmap f (ImportInfo x y z) = ImportInfo (f x) y z
fmapLI :: Monad m => (GMorphism -> m GMorphism) -> LinkInfo -> m LinkInfo
fmapLI f (gm, x, y, z, t) = do
gm' <- f gm
return (gm', x, y, z, t)
data TCClassification = TCClf {
importInfo :: [ImportInfo OMCD] -- ^ Import-info
, sigElems :: [TCElement] -- ^ Signature symbols
, sentences :: [TCElement] -- ^ Theory sentences
, adts :: [[OmdADT]] -- ^ ADTs
, notations :: Map.Map OMName String -- ^ Notations
}
emptyClassification :: TCClassification
emptyClassification = TCClf [] [] [] [] Map.empty
classifyTCs :: [TCElement] -> TCClassification
classifyTCs = foldr classifyTC emptyClassification
classifyTC :: TCElement -> TCClassification -> TCClassification
classifyTC tc clf =
case tc of
TCSymbol _ _ sr _
| elem sr [Obj, Typ] -> clf { sigElems = tc : sigElems clf }
| otherwise -> clf { sentences = tc : sentences clf }
TCNotation (cd, omn) n (Just "hets") ->
if cdIsEmpty cd then
clf { notations = Map.insert omn n $ notations clf }
else clf
TCADT l -> clf { adts = l : adts clf }
TCImport n from morph ->
clf { importInfo = ImportInfo from n morph : importInfo clf }
TCComment _ -> clf
TCSmartNotation _ _ _ _ _ -> error "classifyTC: unexpected SmartNotation"
TCFlexibleNotation _ _ _ ->
error "classifyTC: unexpected FlexibleNotation"
-- just for the case TCNotation with a style different from hets
_ -> clf
|
nevrenato/Hets_Fork
|
OMDoc/Import.hs
|
gpl-2.0
| 27,724 | 0 | 23 | 8,513 | 6,776 | 3,527 | 3,249 | 466 | 9 |
-----------------------------------------------------------------------------
-- |
-- Module :
-- Copyright : (c) 2013 Boyun Tang
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : ghc
--
--
--
-----------------------------------------------------------------------------
module MiRanda.Storage.Type where
import MiRanda.Types (Gene(..)
,UTR(..)
,MScore(..)
,Pair(..)
,Conservation(..)
,RawScore(..)
,ContextScore(..)
,ContextScorePlus(..)
,SeedType(..)
,Align(..)
)
import Data.ByteString (ByteString)
import Control.DeepSeq
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as UV
import Data.Int
data Expression = Coding
| NonCoding
deriving (Eq,Show)
data GeneRecord = GR
{ geneInfo :: GeneInfo
, mirSites :: [MiRSites]
} deriving (Show,Eq)
data GeneInfo = GI
{ gene :: Gene
, expressionStyle :: Expression
, thisSpecies :: UTR
, otherSpecies :: [UTR]
} deriving (Show,Eq)
data MiRSites = MiRSites
{ mir :: MiRNA
, sites :: [Site]
} deriving (Show,Eq)
data Site = Site
{ miRandaScore :: MScore
, conserveScore :: Conservation
, rawScore :: RawScore
, contextScore :: Maybe ContextScore
, contextScorePlus :: Maybe ContextScorePlus
, seedRange :: Pair
, siteRange :: Pair
, seed :: SeedType
, alignStructure :: Align
} deriving (Show,Eq)
data MiRNA = MiRNA
{ identity :: ByteString -- "product" in miRBase feature
, accession :: ByteString -- miRBase accession id
, isExperimentalValidated :: Bool
, family :: Maybe Family
, seqdata :: ByteString
} deriving (Show,Eq)
data Family = Family
{ rFamID :: ByteString
, miRBaseID :: ByteString
, miRNAFamily :: ByteString
} deriving (Show,Eq)
data EnDB = EnDB
{ seqnames :: V.Vector ByteString
, genesymbols :: V.Vector ByteString
, identities :: V.Vector ByteString
, accessions :: V.Vector ByteString
-- , datum :: [(UV.Vector Int,UV.Vector Double)]
, datContext :: UV.Vector Float
, datSite :: UV.Vector Int16
} deriving (Eq)
instance NFData GeneRecord where
rnf (GR gi mS) = rnf gi `seq` rnf mS `seq` ()
instance NFData GeneInfo where
rnf (GI a b c ds) = rnf a `seq` rnf b `seq` rnf c `seq` rnf ds `seq` ()
instance NFData Expression where
rnf a = a `seq` ()
instance NFData Family where
rnf (Family a b c) = rnf a `seq` rnf b `seq` rnf c `seq` ()
instance NFData MiRNA where
rnf (MiRNA a b c d e) = rnf a `seq` rnf b `seq` rnf c `seq` rnf d `seq` rnf e `seq` ()
instance NFData Site where
rnf (Site a b c d e f g h i) =
rnf a `seq` rnf b `seq` rnf c `seq` rnf d `seq` rnf e `seq`
rnf f `seq` rnf g `seq` rnf h `seq` rnf i `seq` ()
instance NFData MiRSites where
rnf (MiRSites a b) = rnf a `seq` rnf b `seq` ()
|
tangboyun/miranda
|
src/MiRanda/Storage/Type.hs
|
gpl-3.0
| 3,088 | 0 | 15 | 881 | 958 | 560 | 398 | 80 | 0 |
module Lab2.Exercises where
import Data.List
import Data.Maybe
import Test.QuickCheck
import Data.Bits
import System.Random
import Control.Monad
import Control.Applicative
import Data.Char
import Lab2.Util.Ibans
import Text.Regex.Posix
-- Assignment 2 / Lab 2 :: Group 14 --
-- Define Main --
main = do
putStrLn "===================="
putStrLn "Assignment 2 / Lab 2"
putStrLn "===================="
putStrLn "> Exercise 1"
exercise1
putStrLn "> Exercise 2"
exercise2
putStrLn "> Exercise 3a"
exercise3a
putStrLn "> Exercise 3b"
exercise3b
putStrLn "> Exercise 4"
exercise4
putStrLn "> Exercise 5"
exercise5
putStrLn "> Exercise 6"
exercise6
putStrLn "> Exercise 7"
exercise7
putStrLn "> BONUS"
exercisebonus
-- provided
infix 1 -->
(-->) :: Bool -> Bool -> Bool
p --> q = not p || q
probs :: Int -> IO [Float]
probs 0 = return []
probs n = do
p <- getStdRandom random
ps <- probs (n-1)
return (p:ps)
forall :: [a] -> (a -> Bool) -> Bool
forall = flip all
stronger, weaker :: [a] -> (a -> Bool) -> (a -> Bool) -> Bool
stronger xs p q = forall xs (\ x -> p x --> q x)
weaker xs p q = stronger xs q p
-- Exercise 1
-- About one hour also to think of chi test
quartile :: Float -> Float -> Float -> Bool
quartile min max n = n >= min && n < max
quartile1, quartile2, quartile3, quartile4 :: Float -> Bool
quartile1 = quartile 0 0.25
quartile2 = quartile 0.25 0.5
quartile3 = quartile 0.5 0.75
quartile4 = quartile 0.75 1
chi :: [Int] -> Int -> Float
chi [] m = 0
chi (x:xs) m = (fromIntegral((x-m)^2) / fromIntegral m) + chi xs m
distribution :: Int -> IO ()
distribution n = do
p <- probs n
let a = length $ filter quartile1 p
let b = length $ filter quartile2 p
let c = length $ filter quartile3 p
let d = length $ filter quartile4 p
let m = div n 4
let x = chi [a,b,c,d] m
print (a,b,c,d,x)
exercise1 = distribution 10000
-- Exercise 2
-- Encoded properties for each triangle
-- 15 min
exercise2 = print()
data Shape = NoTriangle | Equilateral
| Isosceles | Rectangular | Other deriving (Eq,Show)
triangle :: Integer -> Integer -> Integer -> Shape
triangle a b c | noTriangle a b c = NoTriangle
| equilateral a b c = Equilateral
| isosceles a b c = Isosceles
| rectangular a b c = Rectangular
| otherwise = Other
isTriangle, noTriangle, equilateral, isosceles, rectangular :: Integer -> Integer -> Integer -> Bool
isTriangle a b c = (a + b > c) || (a + c > b) || (b + c > a)
noTriangle a b c = not $ isTriangle a b c
equilateral a b c = (a == b) && (a == c) && (b == c)
isosceles a b c = (a == b) || (a == c) || (b == c)
rectangular a b c = (a^2+b^2) == c^2 || (a^2+c^2) == b^2 || (b^2+c^2) == a^2
equilateralProp, isoscelesProp, rectangularProp :: Int -> [(Int, Int, Int)]
equilateralProp n = [(a,b,c)| a <- [1..n], b <- [1..n], c <- [1..n], a == b && b == c]
isoscelesProp n = [(a,b,c)| a <- [1..n], b <- [1..n], c <- [1..n], (a == b) && (a == c) && (b == c)]
rectangularProp n = [(a,b,c)| a <- [1..n], b <- [1..n], c <- [1..n], (a^2+b^2) == c^2 || (a^2+c^2) == b^2 || (b^2+c^2) == a^2]
-- Exercise 3a
exercise3a = print()
-- Exercise 3b
exercise3b = print()
-- Exercise 4
exercise4 = print(deran [0..3])
isPermutation :: Eq a => [a] -> [a] -> Bool
isPermutation [] [] = False
isPermutation xs ys = (length xs == length ys)
&& forall xs (`elem` ys)
&& forall ys (`elem` xs)
-- Exercise 5
-- 10 min
-- reuse isPermutation, we know the elements are there so we can call fromJust on elemIndex
-- simply compare the index of the relevant element with the other list
-- use permutations to generate a permutation, only add it to the list if it satisfies isDerangement
exercise5 = print()
isDerangement :: Eq a => [a] -> [a] -> Bool
isDerangement xs ys = isPermutation xs ys
&& forall xs (\x -> fromJust(elemIndex x xs) /= fromJust(elemIndex x ys))
deran :: Eq a => [a] -> [[a]]
deran xs = [ys | ys <- permutations xs, isDerangement xs ys]
-- Exercise 6
-- simply fetch index in the array and get letter from the other array
-- 15 min
exercise6 = quickCheckResult rotSpec
upper, lower, upperRot13, lowerRot13 :: String
upper = ['A'..'Z']
lower = ['a'..'z']
upperRot13 = ['N'..'Z'] ++ ['A'..'M']
lowerRot13 = ['n'..'z'] ++ ['a'..'m']
rot13 :: Char -> Char
rot13 c | c `elem` upper = upperRot13 !! fromJust(elemIndex c upper)
| c `elem` lower = lowerRot13 !! fromJust(elemIndex c lower)
| otherwise = c
rot13string :: String -> String
rot13string = map rot13
rotSpecLength :: String -> Bool
rotSpecLength s = length(rot13string s) == length s
rotSpecNotEqual :: String -> Bool
rotSpecNotEqual [] = True
rotSpecNotEqual s = rot13string s /= s
rotSpecEqual :: String -> Bool
rotSpecEqual s = rot13string(rot13string s) == s
rotSpec :: String -> Bool
rotSpec s | null(strip s) = True
| otherwise = rotSpecLength s && rotSpecNotEqual s && rotSpecEqual s
strip :: String -> String
strip = filter(\x -> x `elem` (['a'..'z'] ++ ['A'..'Z']))
-- Exercise 7
iban :: String -> Bool
iban s = mod (toNumbers $ first4 s) 97 == 1
first4 :: String -> String
first4 (a:b:c:d:s) = s ++ [a,b,c,d]
toNum :: Char -> String
toNum c | c `elem` ['0'..'9'] = [c]
| c `elem` ['A'..'Z'] = show $ maybe 0 (+10) (elemIndex c ['A'..'Z'])
| otherwise = []
toNumbers :: String -> Integer
toNumbers s = read(concatMap toNum s)::Integer
preCheck :: String -> Bool
preCheck s = s =~ "^[A-Z]{2}[0-9]{2}[A-Z0-9]{0,30}"
exercise7 = print(forall validIbans preCheck)
-- Bonus Exercises
exercisebonus = print()
|
vdweegen/UvA-Software_Testing
|
Lab2/Willem/Exercises.hs
|
gpl-3.0
| 5,870 | 0 | 18 | 1,505 | 2,435 | 1,271 | 1,164 | 139 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Development.Shake.Simple
(Rule, need, rule, simpleRule, simpleStoredValue, storedValue, want) where
import Data.Functor (($>))
import Development.Shake (Action, Rules, ShakeOptions, ShakeValue, action)
import Development.Shake.Classes (Binary, Hashable, NFData)
import Development.Shake.Rule (Rule, apply, rule, storedValue)
newtype SimpleKey a = SimpleKey a
deriving (Binary, Eq, Hashable, NFData, Show)
fromSimpleKey :: SimpleKey a -> a
fromSimpleKey (SimpleKey a) = a
instance ShakeValue key => Rule (SimpleKey key) () where
storedValue = simpleStoredValue
need :: Rule (SimpleKey key) () => [key] -> Action ()
need keys = (apply $ fmap SimpleKey keys :: Action [()]) $> ()
want :: Rule (SimpleKey key) () => [key] -> Rules ()
want = action . need
simpleStoredValue :: Rule key value => ShakeOptions -> key -> IO (Maybe value)
simpleStoredValue _ _ = pure Nothing
simpleRule :: Rule (SimpleKey key) value => (key -> Action value) -> Rules ()
simpleRule r = rule $ Just . r . fromSimpleKey
|
cblp/stack-offline
|
test/Development/Shake/Simple.hs
|
gpl-3.0
| 1,161 | 0 | 10 | 206 | 409 | 223 | 186 | 23 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module System.DevUtils.Base.Cloud.Amazon.EC2 (
EC2 (..),
EC2Root (..),
EC2Config (..),
EC2Region (..),
EC2InstanceType (..),
EC2Size (..),
EC2ValueColumns (..)
) where
import System.DevUtils.Base.Cloud.Amazon.Misc
import System.DevUtils.Base.Currency
import Data.Aeson
import Control.Applicative
import Control.Monad
data EC2 = EC2 {
}
data EC2Root = EC2Root {
vers :: Version,
config :: EC2Config
} deriving (Show, Read, Eq)
instance FromJSON EC2Root where
parseJSON (Object v) = EC2Root <$>
v .: "vers" <*>
v .: "config"
parseJSON _ = mzero
data EC2Config = EC2Config {
currencies :: [Currency],
rate :: Rate,
valueColumnsC :: [String],
regions :: [EC2Region]
} deriving (Show, Read, Eq)
instance FromJSON EC2Config where
parseJSON (Object v) = EC2Config <$>
v .: "currencies" <*>
v .: "rate" <*>
v .: "valueColumns" <*>
v .: "regions"
parseJSON _ = mzero
data EC2Region = EC2Region {
region :: String,
instanceType :: [EC2InstanceType]
} deriving (Show, Read, Eq)
instance FromJSON EC2Region where
parseJSON (Object v) = EC2Region <$>
v .: "region" <*>
v .: "instanceTypes"
parseJSON _ = mzero
data EC2InstanceType = EC2InstanceType {
typeI :: String,
sizes :: [EC2Size]
} deriving (Show, Read, Eq)
instance FromJSON EC2InstanceType where
parseJSON (Object v) = EC2InstanceType <$>
v .: "type" <*>
v .: "sizes"
parseJSON _ = mzero
data EC2Size = EC2Size {
size :: String,
vCPU :: String,
ecu :: String,
memoryGiB :: String,
storageGB :: String,
valueColumns :: [EC2ValueColumns]
} deriving (Show, Read, Eq)
instance FromJSON EC2Size where
parseJSON (Object v) = EC2Size <$>
v .: "size" <*>
v .: "vCPU" <*>
v .: "ECU" <*>
v .: "memoryGiB" <*>
v .: "storageGB" <*>
v .: "valueColumns"
parseJSON _ = mzero
data EC2ValueColumns = EC2ValueColumns {
name :: String,
prices :: CurrencyObject
} deriving (Show, Read, Eq)
instance FromJSON EC2ValueColumns where
parseJSON (Object v) = EC2ValueColumns <$>
v .: "name" <*>
v .: "prices"
parseJSON _ = mzero
|
adarqui/DevUtils-Base
|
src/System/DevUtils/Base/Cloud/Amazon/EC2.hs
|
gpl-3.0
| 2,078 | 1 | 17 | 397 | 692 | 394 | 298 | 81 | 0 |
-- Copyright 2016, 2017 Robin Raymond
--
-- This file is part of Purple Muon
--
-- Purple Muon is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- Purple Muon is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with Purple Muon. If not, see <http://www.gnu.org/licenses/>.
module Client.Assets.Texture
( textureLoader
, TextureLoaderType
, TextureID
) where
import Protolude
import qualified Data.HashTable.IO as DHI
import qualified SDL
import qualified SDL.Image as SIM
import qualified System.FilePath.Posix as SFP
import qualified Client.Assets.Generic as CAG
import qualified PurpleMuon.Util.MonadError as PUM
-- | Type of a texture loader
type TextureLoaderType = CAG.AssetLoader SDL.Texture SDL.Renderer ()
-- | Type of a texture identifier
type TextureID = CAG.AssetID SDL.Texture
-- | Implementation of `AssetLoader` for textures
textureLoader :: MonadIO m => SDL.Renderer -> m TextureLoaderType
textureLoader ren = do
ht <- liftIO $ DHI.new
return (CAG.AssetLoader
{ CAG.store = ht
, CAG.extData = ren
, CAG.load = \_ r p -> do
t <- try $ SIM.loadTexture r p
:: IO (Either SomeException SDL.Texture)
let res = PUM.mapLeft show t
i = CAG.AssetID $ toS $ SFP.takeBaseName p
return (fmap (\x -> [(i, x)]) res)
, CAG.delete = SDL.destroyTexture
})
|
r-raymond/purple-muon
|
src/Client/Assets/Texture.hs
|
gpl-3.0
| 1,966 | 0 | 20 | 538 | 321 | 190 | 131 | 26 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Cloudbuild.Operations.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists operations that match the specified filter in the request. If the
-- server doesn\'t support this method, it returns \`UNIMPLEMENTED\`. NOTE:
-- the \`name\` binding allows API services to override the binding to use
-- different resource name schemes, such as \`users\/*\/operations\`. To
-- override the binding, API services can add a binding such as
-- \`\"\/v1\/{name=users\/*}\/operations\"\` to their service
-- configuration. For backwards compatibility, the default name includes
-- the operations collection id, however overriding users must ensure the
-- name binding is the parent resource, without the operations collection
-- id.
--
-- /See:/ <https://cloud.google.com/cloud-build/docs/ Cloud Build API Reference> for @cloudbuild.operations.list@.
module Network.Google.Resource.Cloudbuild.Operations.List
(
-- * REST Resource
OperationsListResource
-- * Creating a Request
, operationsList
, OperationsList
-- * Request Lenses
, olXgafv
, olUploadProtocol
, olAccessToken
, olUploadType
, olName
, olFilter
, olPageToken
, olPageSize
, olCallback
) where
import Network.Google.ContainerBuilder.Types
import Network.Google.Prelude
-- | A resource alias for @cloudbuild.operations.list@ method which the
-- 'OperationsList' request conforms to.
type OperationsListResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListOperationsResponse
-- | Lists operations that match the specified filter in the request. If the
-- server doesn\'t support this method, it returns \`UNIMPLEMENTED\`. NOTE:
-- the \`name\` binding allows API services to override the binding to use
-- different resource name schemes, such as \`users\/*\/operations\`. To
-- override the binding, API services can add a binding such as
-- \`\"\/v1\/{name=users\/*}\/operations\"\` to their service
-- configuration. For backwards compatibility, the default name includes
-- the operations collection id, however overriding users must ensure the
-- name binding is the parent resource, without the operations collection
-- id.
--
-- /See:/ 'operationsList' smart constructor.
data OperationsList =
OperationsList'
{ _olXgafv :: !(Maybe Xgafv)
, _olUploadProtocol :: !(Maybe Text)
, _olAccessToken :: !(Maybe Text)
, _olUploadType :: !(Maybe Text)
, _olName :: !Text
, _olFilter :: !(Maybe Text)
, _olPageToken :: !(Maybe Text)
, _olPageSize :: !(Maybe (Textual Int32))
, _olCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OperationsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'olXgafv'
--
-- * 'olUploadProtocol'
--
-- * 'olAccessToken'
--
-- * 'olUploadType'
--
-- * 'olName'
--
-- * 'olFilter'
--
-- * 'olPageToken'
--
-- * 'olPageSize'
--
-- * 'olCallback'
operationsList
:: Text -- ^ 'olName'
-> OperationsList
operationsList pOlName_ =
OperationsList'
{ _olXgafv = Nothing
, _olUploadProtocol = Nothing
, _olAccessToken = Nothing
, _olUploadType = Nothing
, _olName = pOlName_
, _olFilter = Nothing
, _olPageToken = Nothing
, _olPageSize = Nothing
, _olCallback = Nothing
}
-- | V1 error format.
olXgafv :: Lens' OperationsList (Maybe Xgafv)
olXgafv = lens _olXgafv (\ s a -> s{_olXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
olUploadProtocol :: Lens' OperationsList (Maybe Text)
olUploadProtocol
= lens _olUploadProtocol
(\ s a -> s{_olUploadProtocol = a})
-- | OAuth access token.
olAccessToken :: Lens' OperationsList (Maybe Text)
olAccessToken
= lens _olAccessToken
(\ s a -> s{_olAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
olUploadType :: Lens' OperationsList (Maybe Text)
olUploadType
= lens _olUploadType (\ s a -> s{_olUploadType = a})
-- | The name of the operation\'s parent resource.
olName :: Lens' OperationsList Text
olName = lens _olName (\ s a -> s{_olName = a})
-- | The standard list filter.
olFilter :: Lens' OperationsList (Maybe Text)
olFilter = lens _olFilter (\ s a -> s{_olFilter = a})
-- | The standard list page token.
olPageToken :: Lens' OperationsList (Maybe Text)
olPageToken
= lens _olPageToken (\ s a -> s{_olPageToken = a})
-- | The standard list page size.
olPageSize :: Lens' OperationsList (Maybe Int32)
olPageSize
= lens _olPageSize (\ s a -> s{_olPageSize = a}) .
mapping _Coerce
-- | JSONP
olCallback :: Lens' OperationsList (Maybe Text)
olCallback
= lens _olCallback (\ s a -> s{_olCallback = a})
instance GoogleRequest OperationsList where
type Rs OperationsList = ListOperationsResponse
type Scopes OperationsList =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient OperationsList'{..}
= go _olName _olXgafv _olUploadProtocol
_olAccessToken
_olUploadType
_olFilter
_olPageToken
_olPageSize
_olCallback
(Just AltJSON)
containerBuilderService
where go
= buildClient (Proxy :: Proxy OperationsListResource)
mempty
|
brendanhay/gogol
|
gogol-containerbuilder/gen/Network/Google/Resource/Cloudbuild/Operations/List.hs
|
mpl-2.0
| 6,632 | 0 | 18 | 1,567 | 971 | 567 | 404 | 129 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.TargetingTemplates.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a list of targeting templates, optionally filtered. This
-- method supports paging.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ Campaign Manager 360 API Reference> for @dfareporting.targetingTemplates.list@.
module Network.Google.Resource.DFAReporting.TargetingTemplates.List
(
-- * REST Resource
TargetingTemplatesListResource
-- * Creating a Request
, targetingTemplatesList
, TargetingTemplatesList
-- * Request Lenses
, ttlXgafv
, ttlUploadProtocol
, ttlAccessToken
, ttlAdvertiserId
, ttlSearchString
, ttlUploadType
, ttlIds
, ttlProFileId
, ttlSortOrder
, ttlPageToken
, ttlSortField
, ttlMaxResults
, ttlCallback
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.targetingTemplates.list@ method which the
-- 'TargetingTemplatesList' request conforms to.
type TargetingTemplatesListResource =
"dfareporting" :>
"v3.5" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"targetingTemplates" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "advertiserId" (Textual Int64) :>
QueryParam "searchString" Text :>
QueryParam "uploadType" Text :>
QueryParams "ids" (Textual Int64) :>
QueryParam "sortOrder"
TargetingTemplatesListSortOrder
:>
QueryParam "pageToken" Text :>
QueryParam "sortField"
TargetingTemplatesListSortField
:>
QueryParam "maxResults" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON]
TargetingTemplatesListResponse
-- | Retrieves a list of targeting templates, optionally filtered. This
-- method supports paging.
--
-- /See:/ 'targetingTemplatesList' smart constructor.
data TargetingTemplatesList =
TargetingTemplatesList'
{ _ttlXgafv :: !(Maybe Xgafv)
, _ttlUploadProtocol :: !(Maybe Text)
, _ttlAccessToken :: !(Maybe Text)
, _ttlAdvertiserId :: !(Maybe (Textual Int64))
, _ttlSearchString :: !(Maybe Text)
, _ttlUploadType :: !(Maybe Text)
, _ttlIds :: !(Maybe [Textual Int64])
, _ttlProFileId :: !(Textual Int64)
, _ttlSortOrder :: !TargetingTemplatesListSortOrder
, _ttlPageToken :: !(Maybe Text)
, _ttlSortField :: !TargetingTemplatesListSortField
, _ttlMaxResults :: !(Textual Int32)
, _ttlCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TargetingTemplatesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ttlXgafv'
--
-- * 'ttlUploadProtocol'
--
-- * 'ttlAccessToken'
--
-- * 'ttlAdvertiserId'
--
-- * 'ttlSearchString'
--
-- * 'ttlUploadType'
--
-- * 'ttlIds'
--
-- * 'ttlProFileId'
--
-- * 'ttlSortOrder'
--
-- * 'ttlPageToken'
--
-- * 'ttlSortField'
--
-- * 'ttlMaxResults'
--
-- * 'ttlCallback'
targetingTemplatesList
:: Int64 -- ^ 'ttlProFileId'
-> TargetingTemplatesList
targetingTemplatesList pTtlProFileId_ =
TargetingTemplatesList'
{ _ttlXgafv = Nothing
, _ttlUploadProtocol = Nothing
, _ttlAccessToken = Nothing
, _ttlAdvertiserId = Nothing
, _ttlSearchString = Nothing
, _ttlUploadType = Nothing
, _ttlIds = Nothing
, _ttlProFileId = _Coerce # pTtlProFileId_
, _ttlSortOrder = TTLSOAscending
, _ttlPageToken = Nothing
, _ttlSortField = TTLSFID
, _ttlMaxResults = 1000
, _ttlCallback = Nothing
}
-- | V1 error format.
ttlXgafv :: Lens' TargetingTemplatesList (Maybe Xgafv)
ttlXgafv = lens _ttlXgafv (\ s a -> s{_ttlXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ttlUploadProtocol :: Lens' TargetingTemplatesList (Maybe Text)
ttlUploadProtocol
= lens _ttlUploadProtocol
(\ s a -> s{_ttlUploadProtocol = a})
-- | OAuth access token.
ttlAccessToken :: Lens' TargetingTemplatesList (Maybe Text)
ttlAccessToken
= lens _ttlAccessToken
(\ s a -> s{_ttlAccessToken = a})
-- | Select only targeting templates with this advertiser ID.
ttlAdvertiserId :: Lens' TargetingTemplatesList (Maybe Int64)
ttlAdvertiserId
= lens _ttlAdvertiserId
(\ s a -> s{_ttlAdvertiserId = a})
. mapping _Coerce
-- | Allows searching for objects by name or ID. Wildcards (*) are allowed.
-- For example, \"template*2015\" will return objects with names like
-- \"template June 2015\", \"template April 2015\", or simply \"template
-- 2015\". Most of the searches also add wildcards implicitly at the start
-- and the end of the search string. For example, a search string of
-- \"template\" will match objects with name \"my template\", \"template
-- 2015\", or simply \"template\".
ttlSearchString :: Lens' TargetingTemplatesList (Maybe Text)
ttlSearchString
= lens _ttlSearchString
(\ s a -> s{_ttlSearchString = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ttlUploadType :: Lens' TargetingTemplatesList (Maybe Text)
ttlUploadType
= lens _ttlUploadType
(\ s a -> s{_ttlUploadType = a})
-- | Select only targeting templates with these IDs.
ttlIds :: Lens' TargetingTemplatesList [Int64]
ttlIds
= lens _ttlIds (\ s a -> s{_ttlIds = a}) . _Default .
_Coerce
-- | User profile ID associated with this request.
ttlProFileId :: Lens' TargetingTemplatesList Int64
ttlProFileId
= lens _ttlProFileId (\ s a -> s{_ttlProFileId = a})
. _Coerce
-- | Order of sorted results.
ttlSortOrder :: Lens' TargetingTemplatesList TargetingTemplatesListSortOrder
ttlSortOrder
= lens _ttlSortOrder (\ s a -> s{_ttlSortOrder = a})
-- | Value of the nextPageToken from the previous result page.
ttlPageToken :: Lens' TargetingTemplatesList (Maybe Text)
ttlPageToken
= lens _ttlPageToken (\ s a -> s{_ttlPageToken = a})
-- | Field by which to sort the list.
ttlSortField :: Lens' TargetingTemplatesList TargetingTemplatesListSortField
ttlSortField
= lens _ttlSortField (\ s a -> s{_ttlSortField = a})
-- | Maximum number of results to return.
ttlMaxResults :: Lens' TargetingTemplatesList Int32
ttlMaxResults
= lens _ttlMaxResults
(\ s a -> s{_ttlMaxResults = a})
. _Coerce
-- | JSONP
ttlCallback :: Lens' TargetingTemplatesList (Maybe Text)
ttlCallback
= lens _ttlCallback (\ s a -> s{_ttlCallback = a})
instance GoogleRequest TargetingTemplatesList where
type Rs TargetingTemplatesList =
TargetingTemplatesListResponse
type Scopes TargetingTemplatesList =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient TargetingTemplatesList'{..}
= go _ttlProFileId _ttlXgafv _ttlUploadProtocol
_ttlAccessToken
_ttlAdvertiserId
_ttlSearchString
_ttlUploadType
(_ttlIds ^. _Default)
(Just _ttlSortOrder)
_ttlPageToken
(Just _ttlSortField)
(Just _ttlMaxResults)
_ttlCallback
(Just AltJSON)
dFAReportingService
where go
= buildClient
(Proxy :: Proxy TargetingTemplatesListResource)
mempty
|
brendanhay/gogol
|
gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/TargetingTemplates/List.hs
|
mpl-2.0
| 8,604 | 0 | 25 | 2,229 | 1,335 | 765 | 570 | 188 | 1 |
{-
Copyright 2015 Arun Raghavan <[email protected]>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE RecordWildCards #-}
module Main where
import Data.Semigroup ((<>))
import System.FilePath (joinPath)
import System.Directory (createDirectoryIfMissing, doesDirectoryExist)
import Options.Applicative
import TinyXML
import TinyXMLConfig
import UCM
import XML2UCM (xml2ucm)
data Opts = Opts { optConfigFile :: String,
optTinyXMLFile :: String,
optOutputDir :: String,
optForce :: Bool }
parseOptions :: Parser Opts
parseOptions = Opts
<$> strOption
( long "config"
<> short 'c'
<> metavar "FILE"
<> help "Configuration XML file" )
<*> strOption
( long "mixer-paths"
<> short 'm'
<> metavar "FILE"
<> help "Mixer paths XML file" )
<*> strOption
( long "output-dir"
<> short 'o'
<> value "-"
<> metavar "DIR"
<> help "Output path for UCM configuration directory" )
<*> switch
( long "force"
<> short 'f'
<> help "Overwrite existing configuration if it exists")
run :: Opts -> IO ()
run Opts{..} = do
xmlFile <- readFile optTinyXMLFile
configFile <- readFile optConfigFile
xml <- TinyXML.parse xmlFile
config <- TinyXMLConfig.parse configFile
let stdout = optOutputDir == "-"
let card = TinyXMLConfig.confCardName config
let dir = joinPath [optOutputDir, card]
exists <- doesDirectoryExist dir
case (stdout, optForce, exists) of
(True, _, _) -> return () -- Just write to stdout
(_, False, True) -> error ("Error: Output directory '" ++ dir ++ "' already exists (use -f to overwrite)")
(_, _, _) -> createDirectoryIfMissing False dir
mapM_ (uncurry (dumpFile stdout card optOutputDir)) (UCM.generateFiles $ xml2ucm xml config)
where
dumpFile :: Bool -> String -> FilePath -> FilePath -> String -> IO ()
dumpFile False card dir file =
writeFile (joinPath [dir, card, file])
dumpFile True card _ file =
dumpFileNameAndContents $ joinPath [card, file]
where
dumpFileNameAndContents :: String -> String -> IO ()
dumpFileNameAndContents path contents =
-- File path and contents in markdown formattable form
putStrLn $ "## " ++ path ++ "\n```\n" ++ contents ++ "```\n"
main :: IO ()
main = execParser opts >>= run
where
opts = info (helper <*> parseOptions)
( fullDesc
<> header "xml2ucm - Android mixer path XML to ALSA UCM converter" )
|
ford-prefect/xml2ucm
|
src/Main.hs
|
apache-2.0
| 3,213 | 0 | 14 | 911 | 689 | 351 | 338 | 64 | 4 |
-- see: http://learnyouahaskell.com/syntax-in-functions#guards-guards
module Guards where
import Data.Monoid
import Numeric
-- output
echo :: String -> String -> IO ()
echo msg_one msg_two = putStrLn $ msg_one `Data.Monoid.mappend` msg_two
-- see: http://www.whathealth.com/bmi/formula.html
calcBmi :: (RealFloat a) => a -> a -> a
calcBmi weight height = weight / (height/100)^2
-- see: http://stackoverflow.com/questions/1559590/haskell-force-floats-to-have-two-decimals
formatFloatN floatNum numOfDecimals = showFFloat (Just numOfDecimals) floatNum ""
-- based on your bmi, the function tells you off (omg!)
bmiTell :: (RealFloat a) => a -> String
bmiTell bmi
| bmi <= 18.5 = "You're underweight, you emo you!"
| bmi <= 25.0 = "You're supposedly normal. Pfft, I bet you're ugly!"
| bmi <= 30.0 = "You're fat! Lose some weight, fatty!"
| otherwise = "You're a whale, congratulations!"
max' :: (Ord a) => a -> a -> a
max' a b
| a > b = a
| otherwise = b
main :: IO ()
main = do
echo "calcBmi 72 182: " $ formatFloatN (calcBmi 72 182) 2
echo "bmiTell calcBmi 72 182: " $ bmiTell $ calcBmi 72 182
echo "max' 1 2: " $ show $ max' 1 2
echo "max' 4 3: " $ show $ max' 4 3
|
dnvriend/study-category-theory
|
haskell/learn_a_haskell/ch3/guards.hs
|
apache-2.0
| 1,240 | 0 | 10 | 271 | 345 | 172 | 173 | 24 | 1 |
-- Copyright 2014 (c) Diego Souza <[email protected]>
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
--
-- [DO NOT EDIT, AUTOMATICALLY GENERATED BY (./src/scripts/version.sh /home/dsouza/dev/locaweb/leela/src/warpdrive/src/Leela/Version.hs)]
module Leela.Version where
major :: Int
major = 5
minor :: Int
minor = 11
patch :: Int
patch = 0
version :: String
version = "5.11.0"
|
locaweb/leela
|
src/warpdrive/src/Leela/Version.hs
|
apache-2.0
| 897 | 0 | 4 | 150 | 61 | 43 | 18 | 9 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE FlexibleContexts #-}
-- | Defines Parse class and its generic counterpart
-- for easier parsing of packets.
module Packet.Parse where
import Data.Char
import GHC.Generics
import Data.List (intercalate)
import Control.Applicative
import Debug.Trace
--import System.IO
-- From bytestring module:
import qualified Data.ByteString.Char8 as BS
-- From Attoparsec
import Data.Attoparsec.ByteString.Char8 as Atto
-- * This class uses generics to give default instance to Parse class.
class GParse a where
gParser :: Parser (a r)
instance GParse U1 where
gParser = return U1
instance (Parse a) => GParse (K1 i a) where
gParser = K1 <$> parser
instance (GParse f, GParse g) => GParse (f :*: g) where
gParser = (:*:) <$> gParser <*> gParser
instance (GParse f, GParse g) => GParse (f :+: g) where
gParser = (L1 <$> gParser) <|> (R1 <$> gParser)
{-
instance (GParse a, GParse b) => GParse (a :*: b) where
gParser :: Parser (
gParser (a :*: b) = (:*:) <$> gParser a
<*> gParser b-}
-- | Class of things that have a default parsing from ByteString.
class Parse a where
parser :: Parser a
default parser :: (Generic a, GParse (Rep a)) => Parser a
parser = to <$> gParser
instance Parse Int where
parser = ord <$> anyChar
-- | Parse ByteString to any value that has Parse instance.
parseBS :: (Parse a) => BS.ByteString -> a
parseBS bs = case parse parser bs of
Done i r -> if not $ BS.null i then
trace ("Leftover input: " ++ show i ++
" of length " ++ show (BS.length i)) r
else
r
Partial _ -> error $ "Not enough input to parse anything:\n" ++ show bs
Fail i ctxs msg -> error $ "ParseError: " ++ msg ++ "\n" ++ show ctxs ++ "\nat:\n" ++
show i
-- | WARNING: doesn't seem to work!!!
untilEOF :: Parser a -> Parser [a]
untilEOF p = loop []
where
loop acc = do
isEnd <- atEnd
if isEnd
then return $ reverse acc
else do
n <- p
loop $ n:acc
|
mgajda/tinyMesh
|
Packet/GenericParse.hs
|
bsd-2-clause
| 2,429 | 0 | 15 | 841 | 559 | 297 | 262 | 46 | 4 |
{-# LANGUAGE OverloadedStrings #-}
-- | This module implements parsing and rendering of CommonMark documents.
--
-- CommonMark is a standard, unambiguous syntax specification for Markdown.
-- See <http://commonmark.org> for more details.
--
-- * The implementation is intended to ultimately become fully compliant
-- with the Commonmark Spec. At this stage of development, only some
-- syntactic elements are parsed.
--
-- * The current export list of this module is tentative; the module
-- currently exports some entities only for testing purposes.
--
module CommonMark
(
-- $example
-- * Testing
commonmarkTest
-- * Parsers
-- ** Blocks
, hRule
, atxHeader
, setextHeaderUnderLine
-- ** Inlines
, escapedChar
, entity
, namedEntity
, numericEntity
, codeSpan
, autolink
, absoluteURI
, emailAddress
)
where
import Data.Text ( Text )
import Data.Attoparsec.Text ( Parser )
import qualified Data.Attoparsec.Text as A
import CommonMark.Parser.Blocks
import CommonMark.Parser.Inlines
-- import CommonMark.Renderer.Html
import CommonMark.Types
import CommonMark.Util.Parsing
-- | Temporary function for testing elementary parsers.
commonmarkTest :: Show a => Parser a -> Text -> IO ()
commonmarkTest p s = do
case A.parseOnly p s of
Left e -> putStrLn $ "No parse: " ++ e
Right x -> putStrLn $ "Parsed: " ++ show x
-- $example
--
-- Parsers for CommonMark syntactic elements can be
-- tested at the command line:
--
-- > λ> commonmarkTest hRule "---rule?---"
-- > No parse: horizontal rule: endOfInput
--
-- > λ> commonmarkTest hRule " ------"
-- > Parsed: Rule
--
-- > λ> commonmarkTest hRule " ------"
-- > No parse: horizontal rule: Failed reading: empty
|
Jubobs/CommonMark-WIP
|
src/CommonMark.hs
|
bsd-3-clause
| 1,788 | 0 | 11 | 392 | 217 | 137 | 80 | 27 | 2 |
-- To be included in other files:
-----------------------------------------------------------------------------
pushWork :: StepCode () -> StepCode ()
popWork :: R.ReaderT Sched IO (Maybe (StepCode ()))
pushWork a =
do Sched { workpool } <- R.ask
C.liftIO$ hotVarTransaction $
do xs <- readHotVarRaw workpool
writeHotVarRaw workpool (a:xs)
popWork =
do Sched { workpool } <- R.ask
R.lift $
hotVarTransaction $
do xs <- readHotVarRaw workpool
case xs of
[] -> return Nothing
x:xs' -> do
writeHotVarRaw workpool xs'
return (Just x)
-----------------------------------------------------------------------------
|
rrnewton/Haskell-CnC
|
Intel/simple_stack.hs
|
bsd-3-clause
| 703 | 0 | 17 | 172 | 203 | 95 | 108 | -1 | -1 |
{-# LANGUAGE UnicodeSyntax #-}
module Compiler.Codegen.Model where
data JExpr = JAtom String
| JNew String
| JDispatch JExpr String [JExpr]
| JCast String JExpr
| JUnop String JExpr
| JBin JExpr String JExpr
| JInvoke JExpr [JExpr]
data JAttr = JFinal
| JPrivate
| JPublic
| JProtected
| JStatic
type JAttrs = [JAttr]
type JFormal = (String, JType)
type JType = String
data JDecl = JClass JAttrs String (Maybe String) ([JDecl], [JDecl])
| JMethod JAttrs JType String [JFormal] JStmt
| JConstr JAttrs String [JFormal] JStmt
| JMember JAttrs JType String (Maybe JExpr)
data JStmt = JEmpty
| JBlock [JStmt]
| JReturn JExpr
| JEx JExpr
| JAssign JExpr JExpr
| JLocal JDecl
| JCond String JExpr [JStmt]
| JBlockX String [JStmt]
|
YoEight/hk-coolc
|
src/Compiler/Codegen/Model.hs
|
bsd-3-clause
| 948 | 0 | 8 | 346 | 252 | 150 | 102 | 29 | 0 |
{-# LANGUAGE MultiParamTypeClasses, TypeFamilies #-}
module Rubik.V3 where
import Rubik.Abs
import Rubik.Axis
import Rubik.D3
import Rubik.Negate as N
import Rubik.Sign as S
import Rubik.Turn as T
import Rubik.V2
import Rubik.Key as K
data V3 a = V3 a a a
deriving (Eq,Ord,Show)
data Turn3D = Turn3D Turn D3
instance Negate Turn3D where
negate (Turn3D t d) = (Turn3D (N.negate t) d)
instance Negate a => Rotate (V3 a) where
type SideOf (V3 a) = Turn3D
rotate (Turn3D t X) (V3 x y z) = V3 x y' z' where (V2 y' z') = rotate t $ V2 y z
rotate (Turn3D t Y) (V3 x y z) = V3 x' y z' where (V2 z' x') = rotate t $ V2 z x -- intentually flipped
rotate (Turn3D t Z) (V3 x y z) = V3 x' y' z where (V2 x' y') = rotate t $ V2 x y
instance Key a => Key (V3 a) where
universe = [ V3 a b c | a <- universe, b <- universe, c <- universe ]
|
andygill/rubik-solver
|
src/Rubik/V3.hs
|
bsd-3-clause
| 863 | 0 | 10 | 216 | 421 | 218 | 203 | 22 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
module Angel.Files ( getFile
, startFileManager ) where
import Control.Exception ( try
, SomeException )
import Control.Concurrent.STM ( readTChan
, writeTChan
, atomically
, TChan
, newTChanIO )
import Control.Monad (forever)
import System.IO ( Handle
, hClose
, openFile
, IOMode(AppendMode) )
import GHC.IO.Handle (hDuplicate)
import Angel.Data ( GroupConfig
, fileRequest
, FileRequest )
startFileManager :: TChan FileRequest -> IO b
startFileManager req = forever $ fileManager req
fileManager :: TChan FileRequest -> IO ()
fileManager req = do
(path, resp) <- atomically $ readTChan req
mh <- try $ openFile path AppendMode
case mh of
Right hand -> do
hand' <- hDuplicate hand
hClose hand
atomically $ writeTChan resp (Just hand')
Left (_ :: SomeException) -> atomically $ writeTChan resp Nothing
fileManager req
getFile :: String -> GroupConfig -> IO Handle
getFile path cfg = do
resp <- newTChanIO
atomically $ writeTChan (fileRequest cfg) (path, resp)
mh <- atomically $ readTChan resp
case mh of
Just hand -> return hand
Nothing -> error $ "could not open stdout/stderr file " ++ path
|
zalora/Angel
|
src/Angel/Files.hs
|
bsd-3-clause
| 1,488 | 0 | 15 | 533 | 387 | 198 | 189 | 40 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Prompt.DirExec
-- Copyright : (C) 2008 Juraj Hercek
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : unstable
-- Portability : unportable
--
-- A directory file executables prompt for XMonad. This might be useful if you
-- don't want to have scripts in your PATH environment variable (same
-- executable names, different behavior) - otherwise you might want to use
-- "XMonad.Prompt.Shell" instead - but you want to have easy access to these
-- executables through the xmonad's prompt.
--
-----------------------------------------------------------------------------
module XMonad.Prompt.DirExec
( -- * Usage
-- $usage
dirExecPrompt
, dirExecPromptNamed
) where
import System.Directory
import Control.Monad
import Data.List
import XMonad
import XMonad.Prompt
-- $usage
-- 1. In your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Prompt.DirExec
--
-- 2. In your keybindings add something like:
--
-- > , ("M-C-x", dirExecPrompt defaultXPConfig spawn "/home/joe/.scipts")
--
-- or
--
-- > , ("M-C-x", dirExecPromptNamed defaultXPConfig spawn
-- > "/home/joe/.scripts" "My Scripts: ")
--
-- or add this after your default bindings:
--
-- > ++
-- > [ ("M-x " ++ key, dirExecPrompt defaultXPConfig fn "/home/joe/.scripts")
-- > | (key, fn) <- [ ("x", spawn), ("M-x", runInTerm "-hold") ]
-- > ]
-- > ++
--
-- The first alternative uses the last element of the directory path for
-- a name of prompt. The second alternative uses the provided string
-- for the name of the prompt. The third alternative defines 2 key bindings,
-- first one spawns the program by shell, second one runs the program in
-- terminal
--
-- For detailed instruction on editing the key binding see
-- "XMonad.Doc.Extending#Editing_key_bindings".
data DirExec = DirExec String
instance XPrompt DirExec where
showXPrompt (DirExec name) = name
-- | Function 'dirExecPrompt' starts the prompt with list of all executable
-- files in directory specified by 'FilePath'. The name of the prompt is taken
-- from the last element of the path. If you specify root directory - @\/@ - as
-- the path, name @Root:@ will be used as the name of the prompt instead. The
-- 'XPConfig' parameter can be used to customize visuals of the prompt.
-- The runner parameter specifies the function used to run the program - see
-- usage for more information
dirExecPrompt :: XPConfig -> (String -> X ()) -> FilePath -> X ()
dirExecPrompt cfg runner path = do
let name = (++ ": ") . last
. (["Root"] ++) -- handling of "/" path parameter
. words
. map (\x -> if x == '/' then ' ' else x)
$ path
dirExecPromptNamed cfg runner path name
-- | Function 'dirExecPromptNamed' does the same as 'dirExecPrompt' except
-- the name of the prompt is specified by 'String' parameter.
dirExecPromptNamed :: XPConfig -> (String -> X ()) -> FilePath -> String -> X ()
dirExecPromptNamed cfg runner path name = do
let path' = path ++ "/"
cmds <- io $ getDirectoryExecutables path'
mkXPrompt (DirExec name) cfg (compList cmds) (runner . (path' ++))
where
compList cmds s = return . filter (isInfixOf s) $ cmds
getDirectoryExecutables :: FilePath -> IO [String]
getDirectoryExecutables path =
(getDirectoryContents path >>=
filterM (\x -> let x' = path ++ x in
liftM2 (&&)
(doesFileExist x')
(liftM executable (getPermissions x'))))
`catch` (return . return . show)
|
MasseR/xmonadcontrib
|
XMonad/Prompt/DirExec.hs
|
bsd-3-clause
| 3,714 | 0 | 17 | 846 | 497 | 287 | 210 | 34 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : HSX.XMLGenerator
-- Copyright : (c) Niklas Broberg 2008
-- License : BSD-style (see the file LICENSE.txt)
--
-- Maintainer : Niklas Broberg, [email protected]
-- Stability : experimental
-- Portability : requires newtype deriving and MPTCs with fundeps
--
-- The class and monad transformer that forms the basis of the literal XML
-- syntax translation. Literal tags will be translated into functions of
-- the GenerateXML class, and any instantiating monads with associated XML
-- types can benefit from that syntax.
-----------------------------------------------------------------------------
module HSX.XMLGenerator where
import Control.Monad.Trans
import Control.Monad (liftM)
----------------------------------------------
-- General XML Generation
-- | The monad transformer that allows a monad to generate XML values.
newtype XMLGenT m a = XMLGenT (m a)
deriving (Monad, Functor, MonadIO)
-- | un-lift.
unXMLGenT :: XMLGenT m a -> m a
unXMLGenT (XMLGenT ma) = ma
instance MonadTrans XMLGenT where
lift = XMLGenT
type Name = (Maybe String, String)
-- | Generate XML values in some XMLGenerator monad.
class Monad m => XMLGen m where
type XML m
data Child m
data Attribute m
genElement :: Name -> [XMLGenT m [Attribute m]] -> [XMLGenT m [Child m]] -> XMLGenT m (XML m)
genEElement :: Name -> [XMLGenT m [Attribute m]] -> XMLGenT m (XML m)
genEElement n ats = genElement n ats []
xmlToChild :: XML m -> Child m
-- | Type synonyms to avoid writing out the XMLnGenT all the time
type GenXML m = XMLGenT m (XML m)
type GenXMLList m = XMLGenT m [XML m]
type GenChild m = XMLGenT m (Child m)
type GenChildList m = XMLGenT m [Child m]
type GenAttribute m = XMLGenT m (Attribute m)
type GenAttributeList m = XMLGenT m [Attribute m]
-- | Embed values as child nodes of an XML element. The parent type will be clear
-- from the context so it is not mentioned.
class XMLGen m => EmbedAsChild m c where
asChild :: c -> GenChildList m
instance (EmbedAsChild m c, TypeCastM m1 m) => EmbedAsChild m (XMLGenT m1 c) where
asChild (XMLGenT m1a) = do
a <- XMLGenT $ typeCastM m1a
asChild a
instance EmbedAsChild m c => EmbedAsChild m [c] where
asChild = liftM concat . mapM asChild
instance XMLGen m => EmbedAsChild m (Child m) where
asChild = return . return
#if __GLASGOW_HASKELL__ >= 610
instance (XMLGen m, XML m ~ x) => EmbedAsChild m x where
#else
instance (XMLGen m) => EmbedAsChild m (XML m) where
#endif
asChild = return . return . xmlToChild
-- | Similarly embed values as attributes of an XML element.
class XMLGen m => EmbedAsAttr m a where
asAttr :: a -> GenAttributeList m
instance (XMLGen m, EmbedAsAttr m a) => EmbedAsAttr m (XMLGenT m a) where
asAttr ma = ma >>= asAttr
instance XMLGen m => EmbedAsAttr m (Attribute m) where
asAttr = return . return
instance EmbedAsAttr m a => EmbedAsAttr m [a] where
asAttr = liftM concat . mapM asAttr
class (XMLGen m,
SetAttr m (XML m),
AppendChild m (XML m),
EmbedAsChild m String,
EmbedAsChild m Char, -- for overlap purposes
EmbedAsAttr m (Attr String String),
EmbedAsAttr m (Attr String Int),
EmbedAsAttr m (Attr String Bool)
) => XMLGenerator m
{-
-- This is certainly true, but we want the various generators to explicitly state it,
-- in order to get the error messages right.
instance (XMLGen m,
SetAttr m (XML m),
AppendChild m (XML m),
EmbedAsChild m String,
EmbedAsChild m Char,
EmbedAsAttr m (Attr String String),
EmbedAsAttr m (Attr String Int),
EmbedAsAttr m (Attr String Bool)
) => XMLGenerator m
-}
data Attr n a = n := a
deriving Show
-------------------------------------
-- Setting attributes
-- | Set attributes on XML elements
class XMLGen m => SetAttr m elem where
setAttr :: elem -> GenAttribute m -> GenXML m
setAll :: elem -> GenAttributeList m -> GenXML m
setAttr e a = setAll e $ liftM return a
(<@), set :: (SetAttr m elem, EmbedAsAttr m attr) => elem -> attr -> GenXML m
set xml attr = setAll xml (asAttr attr)
(<@) = set
(<<@) :: (SetAttr m elem, EmbedAsAttr m a) => elem -> [a] -> GenXML m
xml <<@ ats = setAll xml (liftM concat $ mapM asAttr ats)
instance (TypeCastM m1 m, SetAttr m x) =>
SetAttr m (XMLGenT m1 x) where
setAll (XMLGenT m1x) ats = (XMLGenT $ typeCastM m1x) >>= (flip setAll) ats
-------------------------------------
-- Appending children
class XMLGen m => AppendChild m elem where
appChild :: elem -> GenChild m -> GenXML m
appAll :: elem -> GenChildList m -> GenXML m
appChild e c = appAll e $ liftM return c
(<:), app :: (AppendChild m elem, EmbedAsChild m c) => elem -> c -> GenXML m
app xml c = appAll xml $ asChild c
(<:) = app
(<<:) :: (AppendChild m elem, EmbedAsChild m c) => elem -> [c] -> GenXML m
xml <<: chs = appAll xml (liftM concat $ mapM asChild chs)
instance (AppendChild m x, TypeCastM m1 m) =>
AppendChild m (XMLGenT m1 x) where
appAll (XMLGenT m1x) chs = (XMLGenT $ typeCastM m1x) >>= (flip appAll) chs
-------------------------------------
-- Names
-- | Names can be simple or qualified with a domain. We want to conveniently
-- use both simple strings or pairs wherever a Name is expected.
class Show n => IsName n where
toName :: n -> Name
-- | Names can represent names, of course.
instance IsName Name where
toName = id
-- | Strings can represent names, meaning a simple name with no domain.
instance IsName String where
toName s = (Nothing, s)
-- | Pairs of strings can represent names, meaning a name qualified with a domain.
instance IsName (String, String) where
toName (ns, s) = (Just ns, s)
---------------------------------------
-- TypeCast, in lieu of ~ constraints
-- literally lifted from the HList library
class TypeCast a b | a -> b, b -> a where typeCast :: a -> b
class TypeCast' t a b | t a -> b, t b -> a where typeCast' :: t->a->b
class TypeCast'' t a b | t a -> b, t b -> a where typeCast'' :: t->a->b
instance TypeCast' () a b => TypeCast a b where typeCast x = typeCast' () x
instance TypeCast'' t a b => TypeCast' t a b where typeCast' = typeCast''
instance TypeCast'' () a a where typeCast'' _ x = x
class TypeCastM ma mb | ma -> mb, mb -> ma where typeCastM :: ma x -> mb x
class TypeCastM' t ma mb | t ma -> mb, t mb -> ma where typeCastM' :: t -> ma x -> mb x
class TypeCastM'' t ma mb | t ma -> mb, t mb -> ma where typeCastM'' :: t -> ma x -> mb x
instance TypeCastM' () ma mb => TypeCastM ma mb where typeCastM mx = typeCastM' () mx
instance TypeCastM'' t ma mb => TypeCastM' t ma mb where typeCastM' = typeCastM''
instance TypeCastM'' () ma ma where typeCastM'' _ x = x
|
abuiles/turbinado-blog
|
tmp/dependencies/hsx-0.4.5/src/HSX/XMLGenerator.hs
|
bsd-3-clause
| 7,075 | 0 | 13 | 1,739 | 1,999 | 1,053 | 946 | -1 | -1 |
--
-- Annotation.hs
--
-- Annotation of the expression tree.
--
-- Gregory Wright, 8 November 2011
--
module Math.Symbolic.Wheeler.Annotation where
class Functor expr => Annotated expr where
ann :: expr a -> a
amap :: (a -> a) -> expr a -> expr a
|
gwright83/Wheeler
|
src/Math/Symbolic/Wheeler/Annotation.hs
|
bsd-3-clause
| 259 | 0 | 9 | 57 | 69 | 39 | 30 | 4 | 0 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE KindSignatures #-}
module Types where
import Language.Haskell.TH.Lib
import Language.Haskell.TH.Syntax
import Util
-- type A = forall a. B a; type B a = Maybe a; expand [t|B A|]
type ListOf x = [x]
type ForAll f = forall x. f x
type ApplyToInteger f = f Integer
type Int' = Int
type Either' = Either
type Int'' = Int
type Id a = a
-- type E x = forall y. Either x y -> Int
$(sequence [tySynD (mkName "E") [plainTV (mkName "x")]
(forallT'' ["y"] (conT ''Either `appT` varT' "x" `appT` varT' "y" --> conT ''Int))
])
data family DF1 a
data instance DF1 Int = DInt (ListOf ())
type family TF1 a
type instance TF1 Int = ListOf ()
class Class1 a where
type AT1 a
instance Class1 Int where type AT1 Int = ListOf ()
|
DanielSchuessler/th-expand-syns
|
testing/Types.hs
|
bsd-3-clause
| 862 | 0 | 17 | 201 | 254 | 147 | 107 | 24 | 0 |
module Linear.Simplex.PrimalSpec (main, spec) where
import Linear.Simplex.PrimalSpec.Types
import Linear.Simplex.Primal
import Linear.Simplex.Primal.Types
import Linear.Grammar
import Test.Hspec
import Test.QuickCheck
import Data.Maybe
import qualified Data.Map as M
import Control.Applicative
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "`populate`" $ do
it "should be idempotent" $
property prop_populate_Idempotent
it "should have a uniform result" $
property prop_populate_Uniform
describe "`diffZip`" $
it "should be all 0 when ran on self" $
property prop_diffZip_Zero
describe "`flatten`" $ do
it "should have 1 at its oriented column in it's result" $
property prop_flatten_One
it "should be idempotent" $
property prop_flatten_Idempotent
describe "`compensate`" $
it "should have 0 at its oriented column in it's result" $
property prop_compensate_Zero
describe "`nextRow`" $
it "should have the smallest ratio" $
property prop_nextRow_MinRatio
describe "`nextColumn`" $
it "should have the smallest value" $
property prop_nextColumn_MinValue
describe "unit tests" $ do
it "should pass Finite Mathematics Lesson 4, Example 1" $
let f1 = EVar "x" .+. EVar "y" .+. EVar "z" .<=. ELit 600
f2 = EVar "x" .+. (3 :: Rational) .*. EVar "y" .<=. ELit 600
f3 = (2 :: Rational) .*. EVar "x" .+. EVar "z" .<=. ELit 900
obj = EVar "M" .==. (60 :: Rational) .*. EVar "x" .+. (90 :: Rational) .*. EVar "y"
.+. (300 :: Rational) .*. EVar "z"
test = M.fromList $ simplexPrimal (standardForm obj) (standardForm <$> [f1,f2,f3])
in
test `shouldBe` M.fromList [("M",180000),("x",0),("y",0),("z",600),("s0",0),("s1",600),("s2",300)]
it "should pass Finite Mathematics Lesson 4, Example 2" $
let f1 = EVar "a" .+. EVar "b" .+. EVar "c" .<=. ELit 100
f2 = (5 :: Rational) .*. EVar "a" .+. (4 :: Rational) .*. EVar "b"
.+. (4 :: Rational) .*. EVar "c" .<=. ELit 480
f3 = (40 :: Rational) .*. EVar "a" .+. (20 :: Rational) .*. EVar "b"
.+. (30 :: Rational) .*. EVar "c" .<=. ELit 3200
obj = EVar "M" .==. (70 :: Rational) .*. EVar "a" .+. (210 :: Rational) .*. EVar "b"
.+. (140 :: Rational) .*. EVar "c"
test = M.fromList $ simplexPrimal (standardForm obj) (standardForm <$> [f1,f2,f3])
in
test `shouldBe` M.fromList [("M",21000),("a",0),("b",100),("c",0),("s0",0),("s1",80),("s2",1200)]
it "should pass Example of Simplex Procedure" $
let f1 = (2 :: Rational) .*. EVar "x1" .+. EVar "x2" .+. EVar "x3" .<=. ELit 14
f2 = (4 :: Rational) .*. EVar "x1" .+. (2 :: Rational) .*. EVar "x2"
.+. (3 :: Rational) .*. EVar "x3" .<=. ELit 28
f3 = (2 :: Rational) .*. EVar "x1" .+. (5 :: Rational) .*. EVar "x2"
.+. (5 :: Rational) .*. EVar "x3" .<=. ELit 30
obj = EVar "Z" .==. EVar "x1" .+. (2 :: Rational) .*. EVar "x2"
.+. (-1 :: Rational) .*. EVar "x3"
test = M.fromList $ simplexPrimal (standardForm obj) (standardForm <$> [f1,f2,f3])
in
test `shouldBe` M.fromList [("Z",13),("x1",5),("x2",4),("x3",0),("s0",0),("s1",0),("s2",0)]
prop_populate_Idempotent :: [IneqSlack] -> Bool
prop_populate_Idempotent x = populate x == populate (populate x)
prop_populate_Uniform :: [IneqSlack] -> Property
prop_populate_Uniform x =
length x > 0 ==>
let x' = map (\z -> length (getStdVars $ slackIneq z)
+ length (slackVars z)) $ populate x
in
allTheSame x'
prop_diffZip_Zero :: EquSlackQC -> Bool
prop_diffZip_Zero x' =
let x = fromEquSlack x'
r = diffZip x x
ss = map varCoeff $ slackVars r
c = getStdConst $ slackIneq r
vs = map varCoeff $ getStdVars $ slackIneq r
in
all (== 0) (ss ++ vs ++ [c])
prop_flatten_One :: IneqSlack -> Int -> Property
prop_flatten_One x n =
n >= 0 && n < length (getStdVars $ slackIneq x) ==>
let r = varCoeff (getStdVars (slackIneq $ flatten x n) !! n) in
r == 1
prop_flatten_Idempotent :: IneqSlack -> Int -> Property
prop_flatten_Idempotent x n =
n >= 0 && n < length (getStdVars $ slackIneq x) ==>
flatten (flatten x n) n == flatten x n
prop_compensate_Zero :: EquSlackQC -> EquSlackQC -> Int -> Property
prop_compensate_Zero nfocal ntarget n =
let focal = fromEquSlack nfocal
target = fromEquSlack ntarget
[focal',target'] = populate [focal,target]
focal'' = flatten focal' n
in
n >= 0 && n < (length (getStdVars $ slackIneq focal) `max` length (getStdVars $ slackIneq target)) ==>
varCoeff (getStdVars (slackIneq (compensate focal'' target' n)) !! n) == 0
prop_nextRow_MinRatio :: [IneqSlack] -> Int -> Property
prop_nextRow_MinRatio xs n =
let xs' = populate xs in
not (null xs) && n >= 0 && n < length (getStdVars $ slackIneq $ head xs') ==>
case nextRow xs' n of
Nothing -> True
Just r ->
let ratios = mapMaybe (`coeffRatio` n) xs'
ratio = fromJust $ coeffRatio (xs' !! r) n
in
minimum ratios == ratio
prop_nextColumn_MinValue :: EquSlackQC -> Property
prop_nextColumn_MinValue x' =
let x = fromEquSlack x'
vars = varCoeff <$> getStdVars (slackIneq x)
in
length (filter (< 0) vars) > 0 ==>
vars !! fromJust (nextColumn x) == minimum vars
allTheSame :: (Eq a) => [a] -> Bool
allTheSame xs = all (== head xs) (tail xs)
|
athanclark/simplex-basic
|
test/Linear/Simplex/PrimalSpec.hs
|
bsd-3-clause
| 5,535 | 0 | 21 | 1,408 | 2,074 | 1,068 | 1,006 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
module Onedrive.Types.FileSystemInfoFacet where
import Control.Lens (makeLensesWith, camelCaseFields)
import Data.Aeson (FromJSON(parseJSON), Value(Object), (.:))
import Data.Text (Text)
data FileSystemInfoFacet =
FileSystemInfoFacet
{ fileSystemInfoFacetCreatedDateTime :: Text
, fileSystemInfoFacetLastModifiedDateTime :: Text
} deriving (Show)
instance FromJSON FileSystemInfoFacet where
parseJSON (Object o) =
FileSystemInfoFacet <$> o .: "createdDateTime" <*> o .: "lastModifiedDateTime"
parseJSON _ =
error "Invalid FileSystemInfoFacet JSON"
makeLensesWith camelCaseFields ''FileSystemInfoFacet
|
asvyazin/hs-onedrive
|
src/Onedrive/Types/FileSystemInfoFacet.hs
|
bsd-3-clause
| 776 | 0 | 9 | 101 | 147 | 86 | 61 | 19 | 0 |
{-# LANGUAGE CPP #-}
module Test.Unit.Module (
tests
) where
import Test.Tasty.TestSet (group)
import Bead.Config (initTaskAssertions)
import Bead.Config.Parser (parseTests)
import Bead.Controller.Pages (pageDescTest)
import Bead.Daemon.Logout (logoutQueueTests)
import Bead.Domain.Entities (asgTests, entityTests, feedbackTests)
import Bead.Domain.Relationships (relationshipTests)
import Bead.Domain.RolePermission (permissionTest)
import Bead.Persistence.NoSQLDirFile (noSqlDirTests)
import Bead.Persistence.Persist (persistTests)
import Bead.Persistence.Relations (persistRelationsTests)
import Bead.View.Content.All (pageContentTest)
import Bead.View.Content.Home.Page (sumBinaryResultTests, sumPercentageResultTests)
import Bead.View.DataBridge (dataBridgeTests)
import Bead.View.DictionaryLoader (patchDictionariesTests)
#ifdef EmailEnabled
import Bead.View.EmailTemplate (runEmailTemplateTests)
#endif
import Bead.View.Headers.AcceptLanguage (acceptLanguageTests)
import Bead.View.Pagelets (linkTextTest)
import Bead.View.RouteOf (routeOfTest)
import Bead.View.Routing (routingTest)
import Bead.View.Session (uniqueSessionKeysTest)
import Bead.View.TemplateAndComponentNames (fieldNameTest)
#ifdef EmailEnabled
import Bead.View.Validators (emailAddressTests)
#endif
tests = do
group "Page description" pageDescTest
group "Route of" routeOfTest
group "Routing" routingTest
group "Page content handler " pageContentTest
group "Link text" linkTextTest
group "Logout daemon" logoutQueueTests
group "Permissions" permissionTest
group "NoSQLDir" noSqlDirTests
group "Field name" fieldNameTest
group "Entity" entityTests
group "Relationships" relationshipTests
group "Unique session keys" uniqueSessionKeysTest
group "Assignment" asgTests
group "Homepage binary results" sumBinaryResultTests
group "Homepage percentage results" sumPercentageResultTests
group "Command line and configuration" initTaskAssertions
group "Persist" persistTests
#ifdef EmailEnabled
group "Email address" emailAddressTests
group "Run email template" runEmailTemplateTests
#endif
group "Persist relations" persistRelationsTests
group "Data bridge" dataBridgeTests
group "Feedback" feedbackTests
group "Parse" parseTests
group "Accept language" acceptLanguageTests
group "Patch dictionaries" patchDictionariesTests
|
pgj/bead
|
test/Test/Unit/Module.hs
|
bsd-3-clause
| 2,350 | 0 | 7 | 252 | 489 | 263 | 226 | 48 | 1 |
{-# LANGUAGE RecordWildCards, DeriveDataTypeable, ViewPatterns, TupleSections #-}
module Core(
Equal(..), sym,
Goal(..), State(..),
resetState, getState, getProofs, getGoals,
defineFunction, defineData, addGoal,
firstGoal, firstSubgoal, rewriteExp, applyProof,
splitCase, splitCon, splitOther, removeLam,
cheat
) where
import Control.Applicative
import Control.Exception
import Control.DeepSeq
import Exp
import Util
import System.IO.Unsafe
import Data.IORef
import Data.Generics.Uniplate.Data
import Data.List.Extra
import Data.Data
-- What inductive hypothesis is available to us
data Induction
= Coinduction -- can prove by coinduction, have generated a constructor and am at the root
| InductionEq Int Int -- position i in the original and j in the current are equal
| InductionGt Int Int -- position i in the original is strictly greater than j
deriving (Show,Eq)
data Proved = Defined | Proved deriving (Show,Eq)
data State = State
{types :: [(String, [(Con,Int)])] -- these should go away
,proved :: [(Equal, Proved)]
,goals :: [Goal] -- none are literally equal
} deriving Show
data Equal = Exp :=: Exp deriving (Data,Typeable,Show,Eq)
data Goal = Goal Equal [(Equal, [Induction])] -- prove the ultimate goal, given a list of subgoals
deriving Show
sym :: Equal -> Equal
sym (a :=: b) = b :=: a
resetState :: IO ()
resetState = withState $ const $ State [] [] []
invalid :: String -> a
invalid x = error $ "Proof step is invalid, " ++ x
promote :: State -> State
promote s@State{goals = Goal t []:xs} = promote $ s{proved = proved s ++ [(t,Proved)], goals = xs}
promote s@State{goals = Goal t ((a :=: b, _):gs):xs} | a == b = promote $ s{goals = Goal t gs : xs}
promote s = s
instance Pretty Equal where
pretty (a :=: b) = pretty a ++ " = " ++ pretty b
instance Pretty State where
pretty State{..} = unlines $
[unwords $ "data" : x : "=" : intercalate ["|"] [fromCon y : replicate n "_" | (y,n) <- ys] | (x,ys) <- types] ++
["\n" ++ pretty x ++ (if b == Defined then " -- defined" else "") | (x,b) <- proved] ++
["\n-- GOAL\n" ++ pretty a ++ concat
["\n-- SUBGOAL " ++ show induct ++ "\n" ++
pretty a | (a, induct) <- xs]
| Goal a xs <- goals]
-- trusted Core operations:
-- * Reorder goals
-- * Apply transformations to an expression
-- * Based on proof, direct textually equivalent equality substitution
-- * Based on eval equivalence
-- * Split based on Case, Ctor (induces a reduction)
-- * Reorder or drop lambda parameters equally (positional quantifiers)
-- * Induction, direct textually equivalent equality substitution
addGoal :: Exp -> Exp -> IO Equal
addGoal a b = do
withState $ \s -> s{goals = Goal (a :=: b) [(a :=: b, [])] : goals s}
return $ a :=: b
-- | Make goal at position i the first goal
firstGoal :: Int -> IO ()
firstGoal i = withState $ \s ->
let (pre,x:post) = splitAt i $ goals s
in s{goals = x:pre++post}
-- | Make subgoal at position i the first subgoal
firstSubgoal :: Int -> IO ()
firstSubgoal i = withState $ \s@State{goals=Goal a bs:rest} ->
let (pre,x:post) = splitAt i bs
in s{goals = Goal a (x:pre++post) : rest}
-- | Define a new function
defineFunction :: String -> Exp -> IO Equal
defineFunction name body = do
let prf = Var (V name) :=: body
withState $ \s -> s{proved = proved s ++ [(prf, Defined)]}
return prf
-- | Define a new data type, defines the case splitting rule.
defineData :: [(String,Int)] -> IO Equal
defineData ctrs = do
withState $ \s -> s{proved = proved s ++ [(prf, Defined)]}
return prf
where
v1:vs = fresh []
prf = Lam v1 (Var v1) :=: Lam v1 (Case (Var v1) alts)
alts = [(PCon (C a) vs', Con (C a) `apps` map Var vs') | (a,b) <- ctrs, let vs' = take b vs]
-- using the proof (which must be True, or inductively True) apply to produce this subgoal
applyProof :: Equal -> Equal -> IO ()
applyProof given@(from :=: to) new = withState $ \s ->
if not $ valid s given then error $ "applyProof called with an invalid proof, " ++ pretty given else
case goals s of
Goal r1 ((old, reduced):r2):r3
| new `elem` [ctx to | (val,ctx) <- contextsBi old, val == from]
-> s{goals = Goal r1 ((new, reduced):r2) : r3}
| otherwise -> error $ "failed to match proof\n" ++ pretty given ++ "\n" ++ pretty old ++ "\n" ++ pretty new
where
valid s prf | prf `elem` map fst (proved s) || sym prf `elem` map fst (proved s) = True
| Goal t ((_,ind):_):_ <- goals s, prf `elem` [t, sym t] =
Coinduction `elem` ind
| otherwise = False
-- can only apply the induction in the case that
-- rewrite expressions, must be equivalent under eval
rewriteExp :: Equal -> IO ()
rewriteExp (a :=: b) = withSubgoal $ \(o@(x :=: y), reduced) ->
if eval x /= eval a then invalid $ "rewriteExp\n" ++ pretty x ++ "\nNot equal to:\n" ++ pretty a
else if eval y /= eval b then invalid $ "rewriteExp\n" ++ pretty y ++ "\nNot equal to:\n" ++ pretty b
else [(a :=: b,reduced)]
splitCase :: IO ()
splitCase = withSubgoal $ \(o@(a :=: b), reduced) ->
if pattern a /= pattern b then invalid $ "splitCase on different patterns, " ++ pretty o
else let (vs,v,_) = pattern a
in map (, [Coinduction | v `elem` map Var vs] ++ reduced) $ zipWith (:=:) (split a) (split b)
where
-- distinguishes the salient features
pattern (fromLams . relabel -> (vs, Case on alts)) = (vs, on, map (patCon . fst) alts)
pattern x = invalid $ "splitCase not on a case, " ++ pretty x
split (fromLams -> (vs, Case on alts)) = [lams (vs ++ patVars p) x | (p,x) <- alts]
splitCon :: IO ()
splitCon = withSubgoal $ \(o@(a :=: b), _) ->
if pattern a /= pattern b then invalid $ "splitCon on different patterns, " ++ pretty o
else map (,[Coinduction]) $ zipWith (:=:) (split a) (split b)
where
pattern (fromLams -> (vs, fromApps -> (Con ctr, args))) = (length vs, ctr, length args)
pattern x = invalid $ "splitCon not a con, " ++ pretty x
split (fromLams -> (vs, fromApps -> (Con ctr, args))) = map (lams vs) args
splitOther :: IO ()
splitOther = withSubgoal $ \(o@(a :=: b), induct) ->
if pattern a /= pattern b then invalid $ "splitVar on different patterns, " ++ pretty o
else map (,induct) $ zipWith (:=:) (split a) (split b)
where
pattern (fromLams . relabel -> (vs, fromApps -> (Var v, args))) | v `elem` vs = (vs, v, length args)
pattern x = invalid $ "splitVar not a free var, " ++ pretty x
split (fromLams -> (vs, fromApps -> (Var v, args))) | v `elem` vs = map (lams vs) args
removeLam :: IO ()
removeLam = withSubgoal $ \(old@((fromLams -> (as, a)) :=: (fromLams -> (bs, b))), reduced) ->
let rem = f as a `intersect` f bs b
new = g rem as a :=: g rem bs b
in if new == old then invalid "removeLam, none are redundant" else [(new, reduced)]
where
f as a = [i | let fr = free a, (i,x) <- zip [0..] as, x `notElem` fr]
g rem as a = h [x | (i,x) <- zip [0..] as, i `notElem` rem] a
h (unsnoc -> Just (vs,v)) (App x v2) | vs /= [], Var v == v2, v `notElem` free x = h vs x
h a b = lams a b
{-# NOINLINE state #-}
state :: IORef State
state = unsafePerformIO $ newIORef $ State [] [] []
getState :: IO State
getState = readIORef state
getGoals = goals <$> getState
getProofs = map fst . proved <$> getState
withState :: (State -> State) -> IO ()
withState f = do
s <- readIORef state
s <- return $ promote $ f s
evaluate $ rnf $ show s
writeIORef state s
-- Nothing indicates you proved it
withGoal :: (Goal -> Goal) -> IO ()
withGoal f = withState $ \s@State{goals=g:gs} -> s{goals = f g : gs}
withSubgoal :: ((Equal, [Induction]) -> [(Equal, [Induction])]) -> IO ()
withSubgoal f = withGoal $ \(Goal t (p:ps)) -> Goal t (f p ++ ps)
cheat :: IO ()
cheat = withSubgoal (const [])
|
ndmitchell/qed
|
v1/Core.hs
|
bsd-3-clause
| 8,104 | 0 | 18 | 2,059 | 3,308 | 1,766 | 1,542 | 144 | 3 |
{- | Provide iteratee-based IO as described in Oleg Kiselyov's paper 'http://okmij.org/ftp/Haskell/Iteratee/'.
Oleg's original code uses lists to store buffers of data for reading in the iteratee. This package allows the use of arbitrary types through use of the ListLike type class.
Iteratees can be thought of as stream processor combinators. Iteratees are combined to run in sequence or in parallel, and then processed by enumerators. The result of the enumeration is another iteratee, which may then be used again, or have the result obtained via the 'run' function.
> -- count the number of bytes in a file, reading at most 8192 bytes at a time
> import Data.Iteratee as I
> import Data.Iteratee.IO
> import Data.ByteString
>
> byteCounter :: Monad m => Iteratee ByteString m Int
> byteCounter = I.length
>
> countBytes = do
> i' <- enumFile 8192 "/usr/share/dict/words" byteCounter
> result <- run i'
> print result
Iteratees can be combined to perform much more complex tasks. The iteratee monad allows for sequencing iteratee operations.
> iter2 = do
> I.drop 4
> I.head
In addition to enumerations over files and Handles, enumerations can be programmatically generated.
> get5thElement = enumPure1Chunk [1..10] iter2 >>= run >>= print
Iteratees can also work as stream transformers, called 'Enumeratee's. A very simple example is provided by 'Data.Iteratee.ListLike.filter'. When working with enumeratees, it's very common to collaps the nested iteratee with 'joinI'.
This function returns the 5th element greater than 5.
> iterfilt = joinI $ I.filter (>5) iter2
> find5thOver5 = enumPure1Chunk [10,1,4,6,7,4,2,8,5,9::Int] iterfilt >>= run >>= print
Another common use of iteratees is 'takeUpTo', which guarantees that an iteratee consumes a bounded number of elements. This is often useful when parsing data. You can check how much data an iteratee has consumed with 'enumWith'
> iter3 :: (Num el, Ord el, Monad m) => Iteratee [el] m (el,Int)
> iter3 = joinI (I.takeUpTo 100 (enumWith iterfilt I.length))
Many more functions are provided, and there are many other useful ways to combine iteratees and enumerators.
-}
module Data.Iteratee (
module Data.Iteratee.Binary,
module Data.Iteratee.ListLike,
module Data.Iteratee.PTerm,
fileDriver,
fileDriverVBuf,
fileDriverRandom,
fileDriverRandomVBuf
)
where
import Data.Iteratee.Binary
import Data.Iteratee.IO
import Data.Iteratee.ListLike
import Data.Iteratee.PTerm
|
iteloo/tsuru-sample
|
iteratee-0.8.9.6/src/Data/Iteratee.hs
|
bsd-3-clause
| 2,473 | 0 | 5 | 413 | 66 | 45 | 21 | 12 | 0 |
-- | This bitmap type is deprecated; use 'Data.Bitmap.String' instead
--
-- | Bitmaps represented as strings
--
-- The module provides polymorphic support for representation of bitmaps as strings.
-- This module is designed to be most efficient with lazy bytestrings.
module Data.Bitmap.StringRGB32 {-# DEPRECATED "Use Data.Bitmap.String instead" #-}
( BitmapStringRGB32
) where
import Data.Bitmap.StringRGB32.Internal
|
bairyn/bitmaps
|
src/Data/Bitmap/StringRGB32.hs
|
bsd-3-clause
| 429 | 0 | 4 | 66 | 26 | 20 | 6 | 3 | 0 |
-- (c) The University of Glasgow 2006
-- (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
--
-- The @Class@ datatype
{-# LANGUAGE CPP, DeriveDataTypeable #-}
module Class (
Class,
ClassOpItem,
ClassATItem(..),
ClassMinimalDef,
DefMethInfo, pprDefMethInfo, defMethSpecOfDefMeth,
FunDep, pprFundeps, pprFunDep,
mkClass, classTyVars, classArity,
classKey, className, classATs, classATItems, classTyCon, classMethods,
classOpItems, classBigSig, classExtraBigSig, classTvsFds, classSCTheta,
classAllSelIds, classSCSelId, classMinimalDef, classHasFds,
naturallyCoherentClass
) where
#include "HsVersions.h"
import {-# SOURCE #-} TyCon ( TyCon, tyConName, tyConUnique )
import {-# SOURCE #-} TyCoRep ( Type, PredType, pprType )
import Var
import Name
import BasicTypes
import Unique
import Util
import SrcLoc
import PrelNames ( eqTyConKey, coercibleTyConKey, typeableClassKey,
heqTyConKey )
import Outputable
import BooleanFormula (BooleanFormula)
import Data.Typeable (Typeable)
import qualified Data.Data as Data
{-
************************************************************************
* *
\subsection[Class-basic]{@Class@: basic definition}
* *
************************************************************************
A @Class@ corresponds to a Greek kappa in the static semantics:
-}
data Class
= Class {
classTyCon :: TyCon, -- The data type constructor for
-- dictionaries of this class
-- See Note [ATyCon for classes] in TyCoRep
className :: Name, -- Just the cached name of the TyCon
classKey :: Unique, -- Cached unique of TyCon
classTyVars :: [TyVar], -- The class kind and type variables;
-- identical to those of the TyCon
classFunDeps :: [FunDep TyVar], -- The functional dependencies
-- Superclasses: eg: (F a ~ b, F b ~ G a, Eq a, Show b)
-- We need value-level selectors for both the dictionary
-- superclasses and the equality superclasses
classSCTheta :: [PredType], -- Immediate superclasses,
classSCSels :: [Id], -- Selector functions to extract the
-- superclasses from a
-- dictionary of this class
-- Associated types
classATStuff :: [ClassATItem], -- Associated type families
-- Class operations (methods, not superclasses)
classOpStuff :: [ClassOpItem], -- Ordered by tag
-- Minimal complete definition
classMinimalDef :: ClassMinimalDef
}
deriving Typeable
-- | e.g.
--
-- > class C a b c | a b -> c, a c -> b where...
--
-- Here fun-deps are [([a,b],[c]), ([a,c],[b])]
--
-- - 'ApiAnnotation.AnnKeywordId' : 'ApiAnnotation.AnnRarrow'',
-- For details on above see note [Api annotations] in ApiAnnotation
type FunDep a = ([a],[a])
type ClassOpItem = (Id, DefMethInfo)
-- Selector function; contains unfolding
-- Default-method info
type DefMethInfo = Maybe (Name, DefMethSpec Type)
-- Nothing No default method
-- Just ($dm, VanillaDM) A polymorphic default method, name $dm
-- Just ($gm, GenericDM ty) A generic default method, name $gm, type ty
-- The generic dm type is *not* quantified
-- over the class variables; ie has the
-- class vaiables free
data ClassATItem
= ATI TyCon -- See Note [Associated type tyvar names]
(Maybe (Type, SrcSpan))
-- Default associated type (if any) from this template
-- Note [Associated type defaults]
type ClassMinimalDef = BooleanFormula Name -- Required methods
-- | Convert a `DefMethInfo` to a `DefMethSpec`, which discards the name field in
-- the `DefMeth` constructor of the `DefMeth`.
defMethSpecOfDefMeth :: DefMethInfo -> Maybe (DefMethSpec Type)
defMethSpecOfDefMeth meth
= case meth of
Nothing -> Nothing
Just (_, spec) -> Just spec
{-
Note [Associated type defaults]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The following is an example of associated type defaults:
class C a where
data D a r
type F x a b :: *
type F p q r = (p,q)->r -- Default
Note that
* The TyCons for the associated types *share type variables* with the
class, so that we can tell which argument positions should be
instantiated in an instance decl. (The first for 'D', the second
for 'F'.)
* We can have default definitions only for *type* families,
not data families
* In the default decl, the "patterns" should all be type variables,
but (in the source language) they don't need to be the same as in
the 'type' decl signature or the class. It's more like a
free-standing 'type instance' declaration.
* HOWEVER, in the internal ClassATItem we rename the RHS to match the
tyConTyVars of the family TyCon. So in the example above we'd get
a ClassATItem of
ATI F ((x,a) -> b)
So the tyConTyVars of the family TyCon bind the free vars of
the default Type rhs
The @mkClass@ function fills in the indirect superclasses.
The SrcSpan is for the entire original declaration.
-}
mkClass :: [TyVar]
-> [([TyVar], [TyVar])]
-> [PredType] -> [Id]
-> [ClassATItem]
-> [ClassOpItem]
-> ClassMinimalDef
-> TyCon
-> Class
mkClass tyvars fds super_classes superdict_sels at_stuff
op_stuff mindef tycon
= Class { classKey = tyConUnique tycon,
className = tyConName tycon,
classTyVars = tyvars,
classFunDeps = fds,
classSCTheta = super_classes,
classSCSels = superdict_sels,
classATStuff = at_stuff,
classOpStuff = op_stuff,
classMinimalDef = mindef,
classTyCon = tycon }
{-
Note [Associated type tyvar names]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The TyCon of an associated type should use the same variable names as its
parent class. Thus
class C a b where
type F b x a :: *
We make F use the same Name for 'a' as C does, and similary 'b'.
The reason for this is when checking instances it's easier to match
them up, to ensure they match. Eg
instance C Int [d] where
type F [d] x Int = ....
we should make sure that the first and third args match the instance
header.
Having the same variables for class and tycon is also used in checkValidRoles
(in TcTyClsDecls) when checking a class's roles.
************************************************************************
* *
\subsection[Class-selectors]{@Class@: simple selectors}
* *
************************************************************************
The rest of these functions are just simple selectors.
-}
classArity :: Class -> Arity
classArity clas = length (classTyVars clas)
-- Could memoise this
classAllSelIds :: Class -> [Id]
-- Both superclass-dictionary and method selectors
classAllSelIds c@(Class {classSCSels = sc_sels})
= sc_sels ++ classMethods c
classSCSelId :: Class -> Int -> Id
-- Get the n'th superclass selector Id
-- where n is 0-indexed, and counts
-- *all* superclasses including equalities
classSCSelId (Class { classSCSels = sc_sels }) n
= ASSERT( n >= 0 && n < length sc_sels )
sc_sels !! n
classMethods :: Class -> [Id]
classMethods (Class {classOpStuff = op_stuff})
= [op_sel | (op_sel, _) <- op_stuff]
classOpItems :: Class -> [ClassOpItem]
classOpItems = classOpStuff
classATs :: Class -> [TyCon]
classATs (Class { classATStuff = at_stuff })
= [tc | ATI tc _ <- at_stuff]
classATItems :: Class -> [ClassATItem]
classATItems = classATStuff
classTvsFds :: Class -> ([TyVar], [FunDep TyVar])
classTvsFds c
= (classTyVars c, classFunDeps c)
classHasFds :: Class -> Bool
classHasFds (Class { classFunDeps = fds }) = not (null fds)
classBigSig :: Class -> ([TyVar], [PredType], [Id], [ClassOpItem])
classBigSig (Class {classTyVars = tyvars, classSCTheta = sc_theta,
classSCSels = sc_sels, classOpStuff = op_stuff})
= (tyvars, sc_theta, sc_sels, op_stuff)
classExtraBigSig :: Class -> ([TyVar], [FunDep TyVar], [PredType], [Id], [ClassATItem], [ClassOpItem])
classExtraBigSig (Class {classTyVars = tyvars, classFunDeps = fundeps,
classSCTheta = sc_theta, classSCSels = sc_sels,
classATStuff = ats, classOpStuff = op_stuff})
= (tyvars, fundeps, sc_theta, sc_sels, ats, op_stuff)
-- | If a class is "naturally coherent", then we needn't worry at all, in any
-- way, about overlapping/incoherent instances. Just solve the thing!
naturallyCoherentClass :: Class -> Bool
-- See also Note [The equality class story] in TysPrim.
naturallyCoherentClass cls
= cls `hasKey` heqTyConKey ||
cls `hasKey` eqTyConKey ||
cls `hasKey` coercibleTyConKey ||
cls `hasKey` typeableClassKey
{-
************************************************************************
* *
\subsection[Class-instances]{Instance declarations for @Class@}
* *
************************************************************************
We compare @Classes@ by their keys (which include @Uniques@).
-}
instance Eq Class where
c1 == c2 = classKey c1 == classKey c2
c1 /= c2 = classKey c1 /= classKey c2
instance Ord Class where
c1 <= c2 = classKey c1 <= classKey c2
c1 < c2 = classKey c1 < classKey c2
c1 >= c2 = classKey c1 >= classKey c2
c1 > c2 = classKey c1 > classKey c2
compare c1 c2 = classKey c1 `compare` classKey c2
instance Uniquable Class where
getUnique c = classKey c
instance NamedThing Class where
getName clas = className clas
instance Outputable Class where
ppr c = ppr (getName c)
pprDefMethInfo :: DefMethInfo -> SDoc
pprDefMethInfo Nothing = empty -- No default method
pprDefMethInfo (Just (n, VanillaDM)) = text "Default method" <+> ppr n
pprDefMethInfo (Just (n, GenericDM ty)) = text "Generic default method"
<+> ppr n <+> dcolon <+> pprType ty
pprFundeps :: Outputable a => [FunDep a] -> SDoc
pprFundeps [] = empty
pprFundeps fds = hsep (vbar : punctuate comma (map pprFunDep fds))
pprFunDep :: Outputable a => FunDep a -> SDoc
pprFunDep (us, vs) = hsep [interppSP us, text "->", interppSP vs]
instance Data.Data Class where
-- don't traverse?
toConstr _ = abstractConstr "Class"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "Class"
|
tjakway/ghcjvm
|
compiler/types/Class.hs
|
bsd-3-clause
| 11,156 | 0 | 12 | 3,137 | 1,713 | 987 | 726 | 140 | 2 |
{-# LANGUAGE CPP, MultiParamTypeClasses, FlexibleInstances, FlexibleContexts #-}
{-# OPTIONS -Wall #-}
module Language.Haskell.TH.Build.Convertible where
import Language.Haskell.TH
import Data.Char
import Control.Monad
isUpperName :: Name -> Bool
isUpperName = liftM2 (||) (==':') isUpper . head . nameBase
ifUpperThenElse :: (Name -> t) -> (Name -> t) -> Name -> t
ifUpperThenElse ku kl n = (if isUpperName n then ku else kl) n
class Convertible a b where
convert :: a -> b
-- instance Convertible ExpQ ExpQ where convert = id
-- instance Convertible [ ExpQ ] [ ExpQ ] where convert = id
-- instance Convertible [ StrictTypeQ ] [ StrictTypeQ ] where convert = id
-- instance Convertible DecsQ DecsQ where convert = id
-- instance Convertible [ DecQ ] [ DecQ ] where convert = id
-- instance Convertible [PatQ] [PatQ] where convert = id
-- instance Convertible TypeQ TypeQ where convert = id
-- instance Convertible [ TypeQ ] [TypeQ] where convert = id
-- instance Convertible Name Name where convert = id
-- instance Convertible TyVarBndr TyVarBndr where convert = id
-- instance Convertible ConQ ConQ where convert = id
-- instance Convertible CxtQ CxtQ where convert = id
-- instance Convertible StrictTypeQ StrictTypeQ where convert = id
#define TRANS(A,B,C) instance Convertible (A) (C) where convert = (convert :: (B) -> (C)) . (convert :: (A) -> (B))
#define MAP(A,C) instance Convertible (A) (C) where convert = map convert
#define SINGLETON(A) -- instance Convertible (A) [A] where convert = return
instance Convertible a a where convert = id
-- | Singleton
instance Convertible a [a] where convert = return
-- instance Convertible a b => Convertible a [b] where convert = return . convert
-- | Empty list
instance Convertible () [a] where convert = const mzero
-- | Empty list
instance Convertible () (Q [a]) where convert = const (return mzero)
-- | 'Nothing'
instance Convertible () (Maybe a) where convert = const mzero
-- | 'Nothing'
instance Convertible () (Q (Maybe a)) where convert = const (return mzero)
instance Convertible Integer Lit where convert = integerL
-- | 'conE' or 'varE', determined by capitalization.
instance Convertible Name ExpQ where convert = ifUpperThenElse conE varE
instance Convertible String Name where convert = mkName
instance Convertible Lit ExpQ where convert = litE
instance Convertible RangeQ ExpQ where convert = arithSeqE
-- | 'conE' or 'varE', determined by capitalization.
TRANS(String,Name,ExpQ)
TRANS(Integer,Lit,ExpQ)
MAP([ Name ],[ ExpQ ])
MAP([ String ],[ ExpQ ])
MAP([ Lit ],[ ExpQ ])
MAP([ Integer ],[ ExpQ ])
MAP([ RangeQ ],[ ExpQ ])
-- | A single 'conE' or 'varE', determined by capitalization.
TRANS( String ,ExpQ,[ExpQ])
-- | A single 'conE' or 'varE', determined by capitalization.
TRANS( Name ,ExpQ,[ExpQ])
TRANS( Lit ,ExpQ,[ExpQ])
TRANS( Integer ,ExpQ,[ExpQ])
TRANS( RangeQ ,ExpQ,[ExpQ])
-- | 'conP' or 'varP', determined by capitalization.
instance Convertible Name PatQ where convert = ifUpperThenElse (flip conP []) varP
-- | 'conP' or 'varP', determined by capitalization.
TRANS(String,Name,PatQ)
MAP([ Name ],[PatQ])
MAP([ String ],[PatQ])
SINGLETON(PatQ)
-- | A single 'conP' or 'varP', determined by capitalization.
TRANS(Name,PatQ,[PatQ])
-- | A single 'conP' or 'varP', determined by capitalization.
TRANS(String,PatQ,[PatQ])
-- | 'conT' or 'varT', determined by capitalization.
instance Convertible Name TypeQ where convert = ifUpperThenElse conT varT
-- | 'conT' or 'varT', determined by capitalization.
TRANS(String,Name,TypeQ)
MAP([ Name ],[TypeQ])
MAP([ String ],[TypeQ])
SINGLETON(TypeQ)
-- | A single 'conT' or 'varT', determined by capitalization.
TRANS(Name,TypeQ,[TypeQ])
-- | A single 'conT' or 'varT', determined by capitalization.
TRANS(String,TypeQ,[TypeQ])
instance Convertible TyVarBndr TypeQ where
convert (PlainTV v) = varT v
convert (KindedTV v k) = sigT (varT v) k
-- | 'PlainTV'
instance Convertible Name TyVarBndr where convert = PlainTV
TRANS(String,Name,TyVarBndr)
SINGLETON(TyVarBndr)
TRANS(Name,TyVarBndr,[TyVarBndr])
-- | 'sequence'
instance Convertible [PredQ] CxtQ where convert = sequence
-- | Uses 'NotStrict'.
instance Convertible TypeQ StrictTypeQ where convert = strictType notStrict
TRANS(Name,TypeQ,StrictTypeQ)
TRANS(String,TypeQ,StrictTypeQ)
SINGLETON(StrictTypeQ)
TRANS(TypeQ,StrictTypeQ,[StrictTypeQ])
TRANS(Name,StrictTypeQ,[StrictTypeQ])
TRANS(String,StrictTypeQ,[StrictTypeQ])
-- | 'sequence'
instance Convertible [ DecQ ] DecsQ where convert = sequence
instance Convertible DecQ DecsQ where convert = fmap return
instance Convertible [DecsQ] DecsQ where convert = fmap join . sequence
SINGLETON(DecQ)
-- | 'normalB'
instance Convertible ExpQ BodyQ where convert = normalB
TRANS(Name,ExpQ,BodyQ)
TRANS(String,ExpQ,BodyQ)
TRANS(Lit,ExpQ,BodyQ)
TRANS(Integer,ExpQ,BodyQ)
TRANS(RangeQ,ExpQ,BodyQ)
#undef MAP
#undef TRANS
(&) :: Convertible a1 a => a1 -> [a] -> [a]
a & b = convert a : b
infixr 5 &
-- * Function transformers
preconvert1 :: Convertible a b => (b -> c) -> a -> c
preconvert1 = (. convert)
preconvert2
:: (Convertible a1 b, Convertible a b1) =>
(b1 -> b -> c) -> a -> a1 -> c
preconvert2 f = preconvert1 . preconvert1 f
preconvert3
:: (Convertible a1 b, Convertible a2 b1, Convertible a b2) =>
(b2 -> b1 -> b -> c) -> a -> a2 -> a1 -> c
preconvert3 f = preconvert2 . preconvert1 f
preconvert4
:: (Convertible a1 b,
Convertible a2 b1,
Convertible a3 b2,
Convertible a b3) =>
(b3 -> b2 -> b1 -> b -> c) -> a -> a3 -> a2 -> a1 -> c
preconvert4 f = preconvert3 . preconvert1 f
preconvert5
:: (Convertible a1 b, Convertible a2 b1, Convertible a3 b2,
Convertible a4 b3, Convertible a b4) =>
(b4 -> b3 -> b2 -> b1 -> b -> c) -> a -> a4 -> a3 -> a2 -> a1 -> c
preconvert5 f = preconvert4 . preconvert1 f
preconvert6
:: (Convertible a1 b, Convertible a2 b1, Convertible a3 b2,
Convertible a4 b3, Convertible a5 b4, Convertible a b5) =>
(b5 -> b4 -> b3 -> b2 -> b1 -> b -> c)
-> a -> a5 -> a4 -> a3 -> a2 -> a1 -> c
preconvert6 f = preconvert5 . preconvert1 f
preconvert7
:: (Convertible a1 b, Convertible a2 b1, Convertible a3 b2,
Convertible a4 b3, Convertible a5 b4, Convertible a6 b5,
Convertible a b6) =>
(b6 -> b5 -> b4 -> b3 -> b2 -> b1 -> b -> c)
-> a -> a6 -> a5 -> a4 -> a3 -> a2 -> a1 -> c
preconvert7 f = preconvert6 . preconvert1 f
|
DanielSchuessler/th-build
|
Language/Haskell/TH/Build/Convertible.hs
|
bsd-3-clause
| 6,410 | 0 | 14 | 1,141 | 1,974 | 1,060 | 914 | -1 | -1 |
import System.Environment
import System.Directory
import System.IO
import Data.List
main = do
(command:args) <- getArgs
let (Just action) = lookup command dispatch
action args
dispatch :: [(String, [String] -> IO ())]
dispatch = [ ("add", add), ("view", view), ("remove", remove) ]
add :: [String] -> IO ()
add [fileName, todoItem] = appendFile fileName (todoItem ++ "\n")
view :: [String] -> IO ()
view [fileName] = do
contents <- readFile fileName
let todoTasks = lines contents
numberedTasks = zipWith (\n line -> show n ++ " - " ++ line) [0..] todoTasks
putStr $ unlines numberedTasks
remove :: [String] -> IO ()
remove [fileName, numberString] = do
handle <- openFile fileName ReadMode
(tempName, tempHandle) <- openTempFile "." "temp"
contents <- hGetContents handle
let number = read numberString
todoTasks = lines contents
newTodoItems = delete (todoTasks !! number) todoTasks
hPutStr tempHandle $ unlines newTodoItems
hClose handle
hClose tempHandle
removeFile fileName
renameFile tempName fileName
|
leichunfeng/learnyouahaskell
|
todo.hs
|
mit
| 1,097 | 0 | 15 | 239 | 415 | 207 | 208 | 31 | 1 |
module UtilSpec (main, spec) where
import Test.Hspec
import Util
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "normalizeFields" $ do
it "converts to snake_case" $ do
normalizeFields "Record" ["foo", "fooBar", "fooBarBaz"] `shouldBe` ["foo", "foo_bar", "foo_bar_baz"]
context "when all fields are prefixed with constructor name" $ do
it "strips constructor name" $ do
normalizeFields "Record" ["recordFoo", "recordBar", "recordBaz"] `shouldBe` ["foo", "bar", "baz"]
context "when only some fields are prefixed with constructor name" $ do
it "does not strip constructor name" $ do
normalizeFields "Record" ["recordFoo", "bar", "recordBaz"] `shouldBe` ["record_foo", "bar", "record_baz"]
|
sol/json-fu
|
test/UtilSpec.hs
|
mit
| 778 | 0 | 18 | 171 | 204 | 109 | 95 | 16 | 1 |
-- | Test out contatentation.
-- Concatenation is a core operation that exercises the representation of nested
-- arrays, as well as the extractsPR function from the dph-lifted-vseg library.
import Vectorised
import Test.HUnit
import qualified Data.Array.Parallel.PArray as PA
arr2 :: [[Int]]
arr2 = [ [0, 1, 2, 3], [4, 5], [], [6, 7, 8], [9]]
arr3 :: [[[Int]]]
arr3 = [[[0]], [[1], [2, 3]], [[4, 5]], [], [[6, 7], [8]], [[]], [[9]] ]
main = runTestTT $ test $
[ "test0" ~: PA.toList test0 ~?= concat arr2
, "test1" ~: PA.toList test1 ~?= concat (concat arr3)
, "test2" ~: PA.toList test2 ~?= concat (map concat arr3)
, "test3" ~: PA.toList test3 ~?= concat [ map (+1) x | x <- [ [1, 2], [3, 4] ] ]
, "test4" ~: PA.toList test4 ~?= concat (map (map (+1)) arr2)
]
|
mainland/dph
|
dph-examples/examples/smoke/prims/Concat/Main.hs
|
bsd-3-clause
| 857 | 0 | 13 | 232 | 361 | 213 | 148 | 13 | 1 |
module HaskellHighlighter (highlightHaskell) where
import Language.Haskell.Lexer
highlightHaskell :: String -> String
highlightHaskell src = init (colorize (lexerPass0 (src++"\n")))
-- the lexer requires this newline for single-line comments to work
colorize :: [PosToken] -> String
colorize [] = ""
colorize ((_,(_,"`")):(Varid,(_,str)):(_,(_,"`")):rest)
= orange ("`" ++ str ++ "`") ++ colorize rest
colorize ((tok, (_,str)):rest) = aux str ++ colorize rest
where
aux =
case tok of
Varid -> id
Conid -> id
Varsym -> orange
Consym -> orange
Reservedid ->
case str of
"case" -> orange
"of" -> orange
"do" -> orange
"if" -> orange
"then" -> orange
"else" -> orange
"let" -> orange
"in" -> orange
"import" -> pink
"infixl" -> pink
"infixr" -> pink
"infix" -> pink
"_" -> id
_ -> green
Reservedop -> orange
Specialid -> id
IntLit -> red
FloatLit -> red
CharLit -> red
StringLit -> red
Qvarid -> id
Qconid -> id
Qvarsym -> orange
Qconsym -> orange
Special -> id
Whitespace -> id
NestedCommentStart -> comment
NestedComment -> comment
LiterateComment -> comment
Commentstart -> comment
Comment -> comment
ErrorToken -> id
GotEOF -> id
ModuleName -> id
ModuleAlias -> id
Layout -> id
Indent _ -> id
Open _ -> id
TheRest -> id
comment :: String -> String
comment = cyan
green, orange, red, cyan, pink :: String -> String
green x = "\03\&03" ++ x ++ "\03\02\02"
red x = "\03\&04" ++ x ++ "\03\02\02"
orange x = "\03\&07" ++ x ++ "\03\02\02"
cyan x = "\03\&11" ++ x ++ "\03\02\02"
pink x = "\03\&13" ++ x ++ "\03\02\02"
|
TomMD/irc-core
|
driver/HaskellHighlighter.hs
|
bsd-3-clause
| 2,266 | 0 | 12 | 1,013 | 590 | 312 | 278 | 64 | 43 |
{-# LANGUAGE FlexibleContexts, TypeFamilies #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Packed.Array.Solve
-- Copyright : (c) Alberto Ruiz 2009
-- License : BSD3
-- Maintainer : Alberto Ruiz
-- Stability : provisional
--
-- Solution of general multidimensional linear and multilinear systems.
--
-----------------------------------------------------------------------------
module Numeric.LinearAlgebra.Array.Solve (
-- * Linear systems
solve,
solveHomog, solveHomog1, solveH,
solveP,
-- * Multilinear systems
-- ** General
ALSParam(..), defaultParameters,
mlSolve, mlSolveH, mlSolveP,
-- ** Factorized
solveFactors, solveFactorsH,
-- * Utilities
eqnorm, infoRank,
solve', solveHomog', solveHomog1', solveP'
) where
import Numeric.LinearAlgebra.Array.Util
import Numeric.LinearAlgebra.Exterior
import Numeric.LinearAlgebra.Array.Internal(mkNArray, selDims, debug, namesR)
import Numeric.LinearAlgebra.HMatrix hiding (scalar,size)
--import qualified Numeric.LinearAlgebra.HMatrix as LA
import Data.List
import System.Random
-- | Solution of the linear system a x = b, where a and b are
-- general multidimensional arrays. The structure and dimension names
-- of the result are inferred from the arguments.
solve :: (Compat i, Coord t, Field t)
=> NArray i t -- ^ coefficients (a)
-> NArray i t -- ^ target (b)
-> NArray i t -- ^ result (x)
solve = solve' id
solve' g a b = x where
nx = namesR a \\ namesR b
na = namesR a \\ nx
nb = namesR b \\ namesR a
aM = g $ matrixator a na nx
bM = g $ matrixator b na nb
xM = linearSolveSVD aM bM
dx = map opos (selDims (dims a) nx) ++ selDims (dims b) nb
x = mkNArray dx (flatten xM)
-- | Solution of the homogeneous linear system a x = 0, where a is a
-- general multidimensional array.
--
-- If the system is overconstrained we may provide the theoretical rank to get a MSE solution.
solveHomog :: (Compat i, Coord t, Field t)
=> NArray i t -- ^ coefficients (a)
-> [Name] -- ^ desired dimensions for the result
-- (a subset selected from the target).
-> Either Double Int -- ^ Left \"numeric zero\" (e.g. eps), Right \"theoretical\" rank
-> [NArray i t] -- ^ basis for the solutions (x)
solveHomog = solveHomog' id
solveHomog' g a nx' hint = xs where
nx = filter (`elem` (namesR a)) nx'
na = namesR a \\ nx
aM = g $ matrixator a na nx
vs = toColumns $ nullspaceSVD hint aM (rightSV aM)
dx = map opos (selDims (dims a) nx)
xs = map (mkNArray dx) vs
-- | A simpler way to use 'solveHomog', which returns just one solution.
-- If the system is overconstrained it returns the MSE solution.
solveHomog1 :: (Compat i, Coord t, Field t)
=> NArray i t
-> [Name]
-> NArray i t
solveHomog1 = solveHomog1' id
solveHomog1' g m ns = head $ solveHomog' g m ns (Right (k-1))
where k = product $ map iDim $ selDims (dims m) ns
-- | 'solveHomog1' for single letter index names.
solveH :: (Compat i, Coord t, Field t) => NArray i t -> [Char] -> NArray i t
solveH m ns = solveHomog1 m (map return ns)
-- | Solution of the linear system a x = b, where a and b are
-- general multidimensional arrays, with homogeneous equality along a given index.
solveP :: Tensor Double -- ^ coefficients (a)
-> Tensor Double -- ^ desired result (b)
-> Name -- ^ the homogeneous dimension
-> Tensor Double -- ^ result (x)
solveP = solveP' id
solveP' g a b h = mapTat (solveP1 g h a) (namesR b \\ (h:namesR a)) b
-- solveP for a single right hand side
solveP1 g nh a b = solveHomog1' g ou ns where
k = size nh b
epsi = t $ leviCivita k `renameO` (nh : (take (k-1) $ (map (('e':).(:[])) ['2'..])))
ou = a .* b' * epsi
ns = (namesR a \\ namesR b) ++ x
b' = renameExplicit [(nh,"e2")] b
x = if nh `elem` (namesR a) then [] else [nh]
t = if typeOf nh b == Co then contrav else cov
-- mapTypes (const (opos $ typeOf nh b))
-----------------------------------------------------------------------
-- | optimization parameters for alternating least squares
data ALSParam i t = ALSParam
{ nMax :: Int -- ^ maximum number of iterations
, delta :: Double -- ^ minimum relative improvement in the optimization (percent, e.g. 0.1)
, epsilon :: Double -- ^ maximum relative error. For nonhomogeneous problems it is
-- the reconstruction error in percent (e.g.
-- 1E-3), and for homogeneous problems is the frobenius norm of the
-- expected zero structure in the right hand side.
, post :: [NArray i t] -> [NArray i t] -- ^ post-processing function after each full iteration (e.g. 'id')
, postk :: Int -> NArray i t -> NArray i t-- ^ post-processing function for the k-th argument (e.g. 'const' 'id')
, presys :: Matrix t -> Matrix t -- ^ preprocessing function for the linear systems (eg. 'id', or 'infoRank')
}
optimize :: (x -> x) -- ^ method
-> (x -> Double) -- ^ error function
-> x -- ^ starting point
-> ALSParam i t -- ^ optimization parameters
-> (x, [Double]) -- ^ solution and error history
optimize method errfun s0 p = (sol,e) where
sols = take (max 1 (nMax p)) $ iterate method s0
errs = map errfun sols
(sol,e) = convergence (zip sols errs) []
convergence [] _ = error "impossible"
convergence [(s,err)] prev = (s, err:prev)
convergence ((s1,e1):(s2,e2):ses) prev
| e1 < epsilon p = (s1, e1:prev)
| abs (100*(e1 - e2)/e1) < delta p = (s2, e2:prev)
| otherwise = convergence ((s2,e2):ses) (e1:prev)
percent t s = 100 * frobT (t - smartProduct s) / frobT t
percentP h t s = 100 * frobT (t' - s') / frobT t' where
t' = f t
s' = f (smartProduct s)
f = mapTat g (namesR t \\ [h])
g v = v / atT v [n]
n = size h t - 1
frobT t = realToFrac . norm_2 . coords $ t
--unitT t = t / scalar (frobT t)
dropElemPos k xs = take k xs ++ drop (k+1) xs
replaceElemPos k v xs = take k xs ++ v : drop (k+1) xs
takes [] _ = []
takes (n:ns) xs = take n xs : takes ns (drop n xs)
----------------------------------------------------------------------
alsStep f params a x = (foldl1' (.) (map (f params a) [n,n-1 .. 0])) x
where n = length x - 1
-----------------------------------------------------------------------
-- | Solution of a multilinear system a x y z ... = b based on alternating least squares.
mlSolve
:: (Compat i, Coord t, Field t, Num (NArray i t), Show (NArray i t))
=> ALSParam i t -- ^ optimization parameters
-> [NArray i t] -- ^ coefficients (a), given as a list of factors.
-> [NArray i t] -- ^ initial solution [x,y,z...]
-> NArray i t -- ^ target (b)
-> ([NArray i t], [Double]) -- ^ Solution and error history
mlSolve params a x0 b
= optimize (post params . alsStep (alsArg b) params a) (percent b . (a++)) x0 params
alsArg _ _ _ _ [] = error "alsArg _ _ []"
alsArg b params a k xs = sol where
p = smartProduct (a ++ dropElemPos k xs)
x = solve' (presys params) p b
x' = postk params k x
sol = replaceElemPos k x' xs
----------------------------------------------------------
-- | Solution of the homogeneous multilinear system a x y z ... = 0 based on alternating least squares.
mlSolveH
:: (Compat i, Coord t, Field t, Num (NArray i t), Show (NArray i t))
=> ALSParam i t -- ^ optimization parameters
-> [NArray i t] -- ^ coefficients (a), given as a list of factors.
-> [NArray i t] -- ^ initial solution [x,y,z...]
-> ([NArray i t], [Double]) -- ^ Solution and error history
mlSolveH params a x0
= optimize (post params . alsStep alsArgH params a) (frobT . smartProduct . (a++)) x0 params
alsArgH _ _ _ [] = error "alsArgH _ _ []"
alsArgH params a k xs = sol where
p = smartProduct (a ++ dropElemPos k xs)
x = solveHomog1' (presys params) p (namesR (xs!!k))
x' = postk params k x
sol = replaceElemPos k x' xs
----------------------------------------------------------
-- | Solution of a multilinear system a x y z ... = b, with a homogeneous index, based on alternating least squares.
mlSolveP
:: ALSParam Variant Double -- ^ optimization parameters
-> [Tensor Double] -- ^ coefficients (a), given as a list of factors.
-> [Tensor Double] -- ^ initial solution [x,y,z...]
-> Tensor Double -- ^ target (b)
-> Name -- ^ homogeneous index
-> ([Tensor Double], [Double]) -- ^ Solution and error history
mlSolveP params a x0 b h
= optimize (post params . alsStep (alsArgP b h) params a) (percentP h b . (a++)) x0 params
alsArgP _ _ _ _ _ [] = error "alsArgP _ _ []"
alsArgP b h params a k xs = sol where
p = smartProduct (a ++ dropElemPos k xs)
x = solveP' (presys params) p b h
x' = postk params k x
sol = replaceElemPos k x' xs
-------------------------------------------------------------
{- | Given two arrays a (source) and b (target), we try to compute linear transformations x,y,z,... for each dimension, such that product [a,x,y,z,...] == b.
(We can use 'eqnorm' for 'post' processing, or 'id'.)
-}
solveFactors :: (Coord t, Field t, Random t, Compat i, Num (NArray i t), Show (NArray i t))
=> Int -- ^ seed for random initialization
-> ALSParam i t -- ^ optimization parameters
-> [NArray i t] -- ^ source (also factorized)
-> String -- ^ index pairs for the factors separated by spaces
-> NArray i t -- ^ target
-> ([NArray i t],[Double]) -- ^ solution and error history
solveFactors seed params a pairs b =
mlSolve params a (initFactorsRandom seed (smartProduct a) pairs b) b
initFactorsSeq rs a pairs b | ok = as
| otherwise = error "solveFactors index pairs"
where
(ia,ib) = unzip (map sep (words pairs))
ic = intersect (namesR a) (namesR b)
ok = sort (namesR b\\ic) == sort ib && sort (namesR a\\ic) == sort ia
db = selDims (dims b) ib
da = selDims (dims a) ia
nb = map iDim db
na = map iDim da
ts = takes (zipWith (*) nb na) rs
as = zipWith5 f ts ib ia db da
f c i1 i2 d1 d2 = (mkNArray [d1,opos d2] (fromList c)) `renameO` [i1,i2]
initFactorsRandom seed a b = initFactorsSeq (randomRs (-1,1) (mkStdGen seed)) a b
-- | Homogeneous factorized system. Given an array a,
-- given as a list of factors as, and a list of pairs of indices
-- [\"pi\",\"qj\", \"rk\", etc.], we try to compute linear transformations
-- x!\"pi\", y!\"pi\", z!\"rk\", etc. such that product [a,x,y,z,...] == 0.
solveFactorsH
:: (Coord t, Random t, Field t, Compat i, Num (NArray i t), Show (NArray i t))
=> Int -- ^ seed for random initialization
-> ALSParam i t -- ^ optimization parameters
-> [NArray i t] -- ^ coefficient array (a), (also factorized)
-> String -- ^ index pairs for the factors separated by spaces
-> ([NArray i t], [Double]) -- ^ solution and error history
solveFactorsH seed params a pairs =
mlSolveH params a (initFactorsHRandom seed (smartProduct a) pairs)
initFactorsHSeq rs a pairs = as where
(ir,it) = unzip (map sep (words pairs))
nr = map (flip size a) ir
nt = map (flip size a) it
ts = takes (zipWith (*) nr nt) rs
as = zipWith5 f ts ir it (selDims (dims a) ir) (selDims (dims a) it)
f c i1 i2 d1 d2 = (mkNArray (map opos [d1,d2]) (fromList c)) `renameO` [i1,i2]
initFactorsHRandom seed a pairs = initFactorsHSeq (randomRs (-1,1) (mkStdGen seed)) a pairs
sep [a,b] = ([a],[b])
sep _ = error "impossible pattern in hTensor initFactors"
----------------------------------
-- | post processing function that modifies a list of tensors so that they
-- have equal frobenius norm
eqnorm :: (Compat i,Show (NArray i Double))
=> [NArray i Double] -> [NArray i Double]
eqnorm [] = error "eqnorm []"
eqnorm as = as' where
n = length as
fs = map (frobT) as
s = product fs ** (1/fromIntegral n)
as' = zipWith g as fs where g a f = a * (scalar (s/f))
-- | nMax = 20, epsilon = 1E-3, delta = 1, post = id, postk = const id, presys = id
defaultParameters :: ALSParam i t
defaultParameters = ALSParam {
nMax = 20,
epsilon = 1E-3,
delta = 1,
post = id,
postk = const id,
presys = id
}
-- | debugging function (e.g. for 'presys'), which shows rows, columns and rank of the
-- coefficient matrix of a linear system.
infoRank :: Field t => Matrix t -> Matrix t
infoRank a = debug "" (const (rows a, cols a, rank a)) a
|
kjslag/hTensor
|
lib/Numeric/LinearAlgebra/Array/Solve.hs
|
bsd-3-clause
| 12,787 | 1 | 17 | 3,258 | 3,844 | 2,041 | 1,803 | 205 | 3 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_HADDOCK show-extensions #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- |
-- Module : Yi.Tag
-- License : GPL-2
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- A module for CTags integration. Note that this reads the ‘tags’
-- file produced by @hasktags@, not the ‘TAGS’ file which uses a
-- different format (etags).
module Yi.Tag ( lookupTag
, importTagTable
, hintTags
, completeTag
, Tag(..)
, unTag'
, TagTable(..)
, getTags
, setTags
, resetTags
, tagsFileList
, readCTags
) where
import GHC.Generics (Generic)
import Lens.Micro.Platform (makeLenses)
import Data.Binary (Binary)
import qualified Data.ByteString as BS (readFile)
import Data.Default (Default, def)
import qualified Data.Foldable as F (concat)
import Data.Map (Map, fromListWith, keys, lookup)
import Data.Maybe (mapMaybe)
import qualified Data.Text as T (Text, append, isPrefixOf, lines, unpack, words)
import qualified Data.Text.Encoding as E (decodeUtf8)
import qualified Data.Text.Read as R (decimal)
import qualified Yi.CompletionTree as CT
import System.FilePath (takeDirectory, takeFileName, (</>))
import System.FriendlyPath (expandTilda)
import Yi.Config.Simple.Types (Field, customVariable)
import Yi.Editor (EditorM, getEditorDyn, putEditorDyn)
import Yi.Types (YiConfigVariable, YiVariable)
newtype TagsFileList = TagsFileList { _unTagsFileList :: [FilePath] }
instance Default TagsFileList where
def = TagsFileList ["tags"]
instance YiConfigVariable TagsFileList
makeLenses ''TagsFileList
tagsFileList :: Field [FilePath]
tagsFileList = customVariable . unTagsFileList
newtype Tags = Tags (Maybe TagTable) deriving (Binary)
instance Default Tags where
def = Tags Nothing
instance YiVariable Tags
newtype Tag = Tag { _unTag :: T.Text } deriving (Show, Eq, Ord, Binary)
unTag' :: Tag -> T.Text
unTag' = _unTag
data TagTable = TagTable
{ tagFileName :: FilePath
-- ^ local name of the tag file
-- TODO: reload if this file is changed
, tagBaseDir :: FilePath
-- ^ path to the tag file directory
-- tags are relative to this path
, tagFileMap :: Map Tag [(FilePath, Int)]
-- ^ map from tags to files
, tagCompletionTree :: CT.CompletionTree T.Text
-- ^ trie to speed up tag hinting
} deriving (Generic)
-- | Find the location of a tag using the tag table.
-- Returns a full path and line number
lookupTag :: Tag -> TagTable -> [(FilePath, Int)]
lookupTag tag tagTable = do
(file, line) <- F.concat . Data.Map.lookup tag $ tagFileMap tagTable
return (tagBaseDir tagTable </> file, line)
-- | Super simple parsing CTag format 1 parsing algorithm
-- TODO: support search patterns in addition to lineno
readCTags :: T.Text -> Map Tag [(FilePath, Int)]
readCTags =
fromListWith (++) . mapMaybe (parseTagLine . T.words) . T.lines
where parseTagLine (tag:tagfile:lineno:_) =
-- remove ctag control lines
if "!_TAG_" `T.isPrefixOf` tag then Nothing
else Just (Tag tag, [(T.unpack tagfile, getLineNumber lineno)])
where getLineNumber = (\(Right x) -> x) . fmap fst . R.decimal
parseTagLine _ = Nothing
-- | Read in a tag file from the system
importTagTable :: FilePath -> IO TagTable
importTagTable filename = do
friendlyName <- expandTilda filename
tagStr <- E.decodeUtf8 <$> BS.readFile friendlyName
let cts = readCTags tagStr
return TagTable { tagFileName = takeFileName filename
, tagBaseDir = takeDirectory filename
, tagFileMap = cts
, tagCompletionTree = CT.fromList . map (_unTag) $ keys cts
}
-- | Gives all the possible expanded tags that could match a given @prefix@
hintTags :: TagTable -> T.Text -> [T.Text]
hintTags tags prefix = map (prefix `T.append`) sufs
where
sufs :: [T.Text]
sufs = CT.toList (CT.update (tagCompletionTree tags) prefix)
-- | Extends the string to the longest certain length
completeTag :: TagTable -> T.Text -> T.Text
completeTag tags prefix =
prefix `T.append` fst (CT.complete (CT.update (tagCompletionTree tags) prefix))
-- ---------------------------------------------------------------------
-- Direct access interface to TagTable.
-- | Set a new TagTable
setTags :: TagTable -> EditorM ()
setTags = putEditorDyn . Tags . Just
-- | Reset the TagTable
resetTags :: EditorM ()
resetTags = putEditorDyn $ Tags Nothing
-- | Get the currently registered tag table
getTags :: EditorM (Maybe TagTable)
getTags = do
Tags t <- getEditorDyn
return t
instance Binary TagTable
|
noughtmare/yi
|
yi-core/src/Yi/Tag.hs
|
gpl-2.0
| 5,265 | 0 | 14 | 1,383 | 1,152 | 663 | 489 | 94 | 3 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TupleSections #-}
-- | The general Stack configuration that starts everything off. This should
-- be smart to falback if there is no stack.yaml, instead relying on
-- whatever files are available.
--
-- If there is no stack.yaml, and there is a cabal.config, we
-- read in those constraints, and if there's a cabal.sandbox.config,
-- we read any constraints from there and also find the package
-- database from there, etc. And if there's nothing, we should
-- probably default to behaving like cabal, possibly with spitting out
-- a warning that "you should run `stk init` to make things better".
module Stack.Config
(loadConfig
,packagesParser
) where
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Compression.GZip as GZip
import Control.Applicative
import Control.Concurrent (getNumCapabilities)
import Control.Exception (IOException)
import Control.Monad
import Control.Monad.Catch (Handler(..), MonadCatch, MonadThrow, catches, throwM)
import Control.Monad.IO.Class
import Control.Monad.Logger hiding (Loc)
import Control.Monad.Reader (MonadReader, ask, asks, runReaderT)
import Control.Monad.Trans.Control (MonadBaseControl)
import qualified Crypto.Hash.SHA256 as SHA256
import Data.Aeson.Extended
import qualified Data.ByteString.Base16 as B16
import qualified Data.ByteString.Lazy as L
import qualified Data.IntMap as IntMap
import qualified Data.Map as Map
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
import qualified Data.Yaml as Yaml
import Distribution.System (OS (..), Platform (..), buildPlatform)
import qualified Distribution.Text
import Distribution.Version (simplifyVersionRange)
import Network.HTTP.Client.Conduit (HasHttpManager, getHttpManager, Manager, parseUrl)
import Network.HTTP.Download (download)
import Options.Applicative (Parser, strOption, long, help)
import Path
import Path.IO
import qualified Paths_stack as Meta
import Stack.BuildPlan
import Stack.Constants
import qualified Stack.Docker as Docker
import qualified Stack.Image as Image
import Stack.Init
import Stack.Types
import Stack.Types.Internal
import System.Directory (getAppUserDataDirectory, createDirectoryIfMissing, canonicalizePath)
import System.Environment
import System.IO
import System.Process.Read (getEnvOverride, EnvOverride, unEnvOverride, readInNull)
-- | Get the latest snapshot resolver available.
getLatestResolver
:: (MonadIO m, MonadThrow m, MonadReader env m, HasConfig env, HasHttpManager env, MonadLogger m)
=> m Resolver
getLatestResolver = do
snapshots <- getSnapshots
let mlts = do
(x,y) <- listToMaybe (reverse (IntMap.toList (snapshotsLts snapshots)))
return (LTS x y)
snap =
case mlts of
Nothing -> Nightly (snapshotsNightly snapshots)
Just lts -> lts
return (ResolverSnapshot snap)
-- | Note that this will be @Nothing@ on Windows, which is by design.
defaultStackGlobalConfig :: Maybe (Path Abs File)
defaultStackGlobalConfig = parseAbsFile "/etc/stack/config"
-- Interprets ConfigMonoid options.
configFromConfigMonoid
:: (MonadLogger m, MonadIO m, MonadCatch m, MonadReader env m, HasHttpManager env)
=> Path Abs Dir -- ^ stack root, e.g. ~/.stack
-> Maybe Project
-> ConfigMonoid
-> m Config
configFromConfigMonoid configStackRoot mproject configMonoid@ConfigMonoid{..} = do
let configDocker = Docker.dockerOptsFromMonoid mproject configStackRoot configMonoidDockerOpts
configConnectionCount = fromMaybe 8 configMonoidConnectionCount
configHideTHLoading = fromMaybe True configMonoidHideTHLoading
configLatestSnapshotUrl = fromMaybe
"https://s3.amazonaws.com/haddock.stackage.org/snapshots.json"
configMonoidLatestSnapshotUrl
configPackageIndices = fromMaybe
[PackageIndex
{ indexName = IndexName "Hackage"
, indexLocation = ILGitHttp
"https://github.com/commercialhaskell/all-cabal-hashes.git"
"https://s3.amazonaws.com/hackage.fpcomplete.com/00-index.tar.gz"
, indexDownloadPrefix = "https://s3.amazonaws.com/hackage.fpcomplete.com/package/"
, indexGpgVerify = False
, indexRequireHashes = False
}]
configMonoidPackageIndices
configSystemGHC = fromMaybe True configMonoidSystemGHC
configInstallGHC = fromMaybe False configMonoidInstallGHC
configSkipGHCCheck = fromMaybe False configMonoidSkipGHCCheck
configSkipMsys = fromMaybe False configMonoidSkipMsys
configExtraIncludeDirs = configMonoidExtraIncludeDirs
configExtraLibDirs = configMonoidExtraLibDirs
-- Only place in the codebase where platform is hard-coded. In theory
-- in the future, allow it to be configured.
(Platform defArch defOS) = buildPlatform
arch = fromMaybe defArch
$ configMonoidArch >>= Distribution.Text.simpleParse
os = fromMaybe defOS
$ configMonoidOS >>= Distribution.Text.simpleParse
configPlatform = Platform arch os
configRequireStackVersion = simplifyVersionRange configMonoidRequireStackVersion
configConfigMonoid = configMonoid
configImage = Image.imgOptsFromMonoid configMonoidImageOpts
origEnv <- getEnvOverride configPlatform
let configEnvOverride _ = return origEnv
platform <- runReaderT platformRelDir configPlatform
configLocalPrograms <-
case configPlatform of
Platform _ Windows -> do
progsDir <- getWindowsProgsDir configStackRoot origEnv
return $ progsDir </> $(mkRelDir stackProgName) </> platform
_ -> return $ configStackRoot </> $(mkRelDir "programs") </> platform
configLocalBin <-
case configMonoidLocalBinPath of
Nothing -> do
localDir <- liftIO (getAppUserDataDirectory "local") >>= parseAbsDir
return $ localDir </> $(mkRelDir "bin")
Just userPath ->
(liftIO $ canonicalizePath userPath >>= parseAbsDir)
`catches`
[Handler (\(_ :: IOException) -> throwM $ NoSuchDirectory userPath)
,Handler (\(_ :: PathParseException) -> throwM $ NoSuchDirectory userPath)
]
configJobs <-
case configMonoidJobs of
Nothing -> liftIO getNumCapabilities
Just i -> return i
let configConcurrentTests = fromMaybe True configMonoidConcurrentTests
return Config {..}
-- | Get the directory on Windows where we should install extra programs. For
-- more information, see discussion at:
-- https://github.com/fpco/minghc/issues/43#issuecomment-99737383
getWindowsProgsDir :: MonadThrow m
=> Path Abs Dir
-> EnvOverride
-> m (Path Abs Dir)
getWindowsProgsDir stackRoot m =
case Map.lookup "LOCALAPPDATA" $ unEnvOverride m of
Just t -> do
lad <- parseAbsDir $ T.unpack t
return $ lad </> $(mkRelDir "Programs")
Nothing -> return $ stackRoot </> $(mkRelDir "Programs")
data MiniConfig = MiniConfig Manager Config
instance HasConfig MiniConfig where
getConfig (MiniConfig _ c) = c
instance HasStackRoot MiniConfig
instance HasHttpManager MiniConfig where
getHttpManager (MiniConfig man _) = man
instance HasPlatform MiniConfig
-- | Load the configuration, using current directory, environment variables,
-- and defaults as necessary.
loadConfig :: (MonadLogger m,MonadIO m,MonadCatch m,MonadThrow m,MonadBaseControl IO m,MonadReader env m,HasHttpManager env,HasTerminal env)
=> ConfigMonoid
-- ^ Config monoid from parsed command-line arguments
-> Maybe (Path Abs File)
-- ^ Override stack.yaml
-> m (LoadConfig m)
loadConfig configArgs mstackYaml = do
stackRoot <- determineStackRoot
extraConfigs <- getExtraConfigs stackRoot >>= mapM loadYaml
mproject <- loadProjectConfig mstackYaml
config <- configFromConfigMonoid stackRoot (fmap (\(proj, _, _) -> proj) mproject) $ mconcat $
case mproject of
Nothing -> configArgs : extraConfigs
Just (_, _, projectConfig) -> configArgs : projectConfig : extraConfigs
unless (fromCabalVersion Meta.version `withinRange` configRequireStackVersion config)
(throwM (BadStackVersionException (configRequireStackVersion config)))
menv <- runReaderT getMinimalEnvOverride config
return $ LoadConfig
{ lcConfig = config
, lcLoadBuildConfig = loadBuildConfig menv mproject config stackRoot
, lcProjectRoot = fmap (\(_, fp, _) -> parent fp) mproject
}
-- | Load the build configuration, adds build-specific values to config loaded by @loadConfig@.
-- values.
loadBuildConfig :: (MonadLogger m, MonadIO m, MonadCatch m, MonadReader env m, HasHttpManager env, MonadBaseControl IO m, HasTerminal env)
=> EnvOverride
-> Maybe (Project, Path Abs File, ConfigMonoid)
-> Config
-> Path Abs Dir
-> Maybe AbstractResolver -- override resolver
-> NoBuildConfigStrategy
-> m BuildConfig
loadBuildConfig menv mproject config stackRoot mresolver noConfigStrat = do
env <- ask
let miniConfig = MiniConfig (getHttpManager env) config
(project', stackYamlFP) <- case mproject of
Just (project, fp, _) -> return (project, fp)
Nothing -> case noConfigStrat of
ThrowException -> do
currDir <- getWorkingDir
cabalFiles <- findCabalFiles True currDir
throwM $ NoProjectConfigFound currDir
$ Just $ if null cabalFiles then "new" else "init"
ExecStrategy -> do
let dest :: Path Abs File
dest = destDir </> stackDotYaml
destDir = implicitGlobalDir stackRoot
dest' :: FilePath
dest' = toFilePath dest
createTree destDir
exists <- fileExists dest
if exists
then do
ProjectAndConfigMonoid project _ <- loadYaml dest
when (getTerminal env) $
case mresolver of
Nothing ->
$logInfo ("Using resolver: " <> resolverName (projectResolver project) <>
" from global config file: " <> T.pack dest')
Just aresolver -> do
let name =
case aresolver of
ARResolver resolver -> resolverName resolver
ARLatestNightly -> "nightly"
ARLatestLTS -> "lts"
ARLatestLTSMajor x -> T.pack $ "lts-" ++ show x
ARGlobal -> "global"
$logInfo ("Using resolver: " <> name <>
" specified on command line")
return (project, dest)
else do
r <- runReaderT getLatestResolver miniConfig
$logInfo ("Using latest snapshot resolver: " <> resolverName r)
$logInfo ("Writing global (non-project-specific) config file to: " <> T.pack dest')
$logInfo "Note: You can change the snapshot via the resolver field there."
let p = Project
{ projectPackages = mempty
, projectExtraDeps = mempty
, projectFlags = mempty
, projectResolver = r
}
liftIO $ Yaml.encodeFile dest' p
return (p, dest)
resolver <-
case mresolver of
Nothing -> return $ projectResolver project'
Just aresolver -> do
manager <- asks getHttpManager
runReaderT
(makeConcreteResolver aresolver)
(MiniConfig manager config)
let project = project' { projectResolver = resolver }
ghcVersion <-
case projectResolver project of
ResolverSnapshot snapName -> do
mbp <- runReaderT (loadMiniBuildPlan snapName) miniConfig
return $ mbpGhcVersion mbp
ResolverGhc m -> return $ fromMajorVersion m
ResolverCustom _name url -> do
mbp <- runReaderT (parseCustomMiniBuildPlan stackYamlFP url) miniConfig
return $ mbpGhcVersion mbp
let root = parent stackYamlFP
packages' <- mapM (resolvePackageEntry menv root) (projectPackages project)
let packages = Map.fromList $ concat packages'
return BuildConfig
{ bcConfig = config
, bcResolver = projectResolver project
, bcGhcVersionExpected = ghcVersion
, bcPackages = packages
, bcExtraDeps = projectExtraDeps project
, bcRoot = root
, bcStackYaml = stackYamlFP
, bcFlags = projectFlags project
}
-- | Resolve a PackageEntry into a list of paths, downloading and cloning as
-- necessary.
resolvePackageEntry
:: (MonadIO m, MonadThrow m, MonadReader env m, HasHttpManager env, MonadLogger m, MonadCatch m
,MonadBaseControl IO m)
=> EnvOverride
-> Path Abs Dir -- ^ project root
-> PackageEntry
-> m [(Path Abs Dir, Bool)]
resolvePackageEntry menv projRoot pe = do
entryRoot <- resolvePackageLocation menv projRoot (peLocation pe)
paths <-
case peSubdirs pe of
[] -> return [entryRoot]
subs -> mapM (resolveDir entryRoot) subs
case peValidWanted pe of
Nothing -> return ()
Just _ -> $logWarn "Warning: you are using the deprecated valid-wanted field. You should instead use extra-dep. See: https://github.com/commercialhaskell/stack/wiki/stack.yaml#packages"
return $ map (, not $ peExtraDep pe) paths
-- | Resolve a PackageLocation into a path, downloading and cloning as
-- necessary.
resolvePackageLocation
:: (MonadIO m, MonadThrow m, MonadReader env m, HasHttpManager env, MonadLogger m, MonadCatch m
,MonadBaseControl IO m)
=> EnvOverride
-> Path Abs Dir -- ^ project root
-> PackageLocation
-> m (Path Abs Dir)
resolvePackageLocation _ projRoot (PLFilePath fp) = resolveDir projRoot fp
resolvePackageLocation _ projRoot (PLHttpTarball url) = do
let name = T.unpack $ decodeUtf8 $ B16.encode $ SHA256.hash $ encodeUtf8 url
root = projRoot </> workDirRel </> $(mkRelDir "downloaded")
fileRel <- parseRelFile $ name ++ ".tar.gz"
dirRel <- parseRelDir name
dirRelTmp <- parseRelDir $ name ++ ".tmp"
let file = root </> fileRel
dir = root </> dirRel
dirTmp = root </> dirRelTmp
exists <- dirExists dir
unless exists $ do
req <- parseUrl $ T.unpack url
_ <- download req file
removeTreeIfExists dirTmp
liftIO $ withBinaryFile (toFilePath file) ReadMode $ \h -> do
lbs <- L.hGetContents h
let entries = Tar.read $ GZip.decompress lbs
Tar.unpack (toFilePath dirTmp) entries
renameDir dirTmp dir
x <- listDirectory dir
case x of
([dir'], []) -> return dir'
(dirs, files) -> do
removeFileIfExists file
removeTreeIfExists dir
throwM $ UnexpectedTarballContents dirs files
resolvePackageLocation menv projRoot (PLGit url commit) = do
let name = T.unpack $ decodeUtf8 $ B16.encode $ SHA256.hash $ encodeUtf8 $ T.unwords [url, commit]
root = projRoot </> workDirRel </> $(mkRelDir "downloaded")
dirRel <- parseRelDir $ name ++ ".git"
dirRelTmp <- parseRelDir $ name ++ ".git.tmp"
let dir = root </> dirRel
dirTmp = root </> dirRelTmp
exists <- dirExists dir
unless exists $ do
removeTreeIfExists dirTmp
createTree (parent dirTmp)
readInNull (parent dirTmp) "git" menv
[ "clone"
, T.unpack url
, toFilePath dirTmp
]
Nothing
readInNull dirTmp "git" menv
[ "reset"
, "--hard"
, T.unpack commit
]
Nothing
renameDir dirTmp dir
return dir
-- | Get the stack root, e.g. ~/.stack
determineStackRoot :: (MonadIO m, MonadThrow m) => m (Path Abs Dir)
determineStackRoot = do
env <- liftIO getEnvironment
case lookup stackRootEnvVar env of
Nothing -> do
x <- liftIO $ getAppUserDataDirectory stackProgName
parseAbsDir x
Just x -> do
y <- liftIO $ do
createDirectoryIfMissing True x
canonicalizePath x
parseAbsDir y
-- | Determine the extra config file locations which exist.
--
-- Returns most local first
getExtraConfigs :: MonadIO m
=> Path Abs Dir -- ^ stack root
-> m [Path Abs File]
getExtraConfigs stackRoot = liftIO $ do
env <- getEnvironment
mstackConfig <-
maybe (return Nothing) (fmap Just . parseAbsFile)
$ lookup "STACK_CONFIG" env
mstackGlobalConfig <-
maybe (return Nothing) (fmap Just . parseAbsFile)
$ lookup "STACK_GLOBAL_CONFIG" env
filterM fileExists
$ fromMaybe (stackRoot </> stackDotYaml) mstackConfig
: maybe [] return (mstackGlobalConfig <|> defaultStackGlobalConfig)
-- | Load and parse YAML from the given file.
loadYaml :: (FromJSON (a, [JSONWarning]), MonadIO m, MonadLogger m) => Path Abs File -> m a
loadYaml path = do
(result,warnings) <-
liftIO $
Yaml.decodeFileEither (toFilePath path) >>=
either (throwM . ParseConfigFileException path) return
logJSONWarnings (toFilePath path) warnings
return result
-- | Get the location of the project config file, if it exists.
getProjectConfig :: (MonadIO m, MonadThrow m, MonadLogger m)
=> Maybe (Path Abs File)
-- ^ Override stack.yaml
-> m (Maybe (Path Abs File))
getProjectConfig (Just stackYaml) = return $ Just stackYaml
getProjectConfig Nothing = do
env <- liftIO getEnvironment
case lookup "STACK_YAML" env of
Just fp -> do
$logInfo "Getting project config file from STACK_YAML environment"
liftM Just $ case parseAbsFile fp of
Left _ -> do
currDir <- getWorkingDir
resolveFile currDir fp
Right path -> return path
Nothing -> do
currDir <- getWorkingDir
search currDir
where
search dir = do
let fp = dir </> stackDotYaml
fp' = toFilePath fp
$logDebug $ "Checking for project config at: " <> T.pack fp'
exists <- fileExists fp
if exists
then return $ Just fp
else do
let dir' = parent dir
if dir == dir'
-- fully traversed, give up
then return Nothing
else search dir'
-- | Find the project config file location, respecting environment variables
-- and otherwise traversing parents. If no config is found, we supply a default
-- based on current directory.
loadProjectConfig :: (MonadIO m, MonadThrow m, MonadLogger m)
=> Maybe (Path Abs File)
-- ^ Override stack.yaml
-> m (Maybe (Project, Path Abs File, ConfigMonoid))
loadProjectConfig mstackYaml = do
mfp <- getProjectConfig mstackYaml
case mfp of
Just fp -> do
currDir <- getWorkingDir
$logDebug $ "Loading project config file " <>
T.pack (maybe (toFilePath fp) toFilePath (stripDir currDir fp))
load fp
Nothing -> do
$logDebug $ "No project config file found, using defaults."
return Nothing
where
load fp = do
ProjectAndConfigMonoid project config <- loadYaml fp
return $ Just (project, fp, config)
packagesParser :: Parser [String]
packagesParser = many (strOption (long "package" <> help "Additional packages that must be installed"))
|
wskplho/stack
|
src/Stack/Config.hs
|
bsd-3-clause
| 21,247 | 0 | 33 | 6,483 | 4,683 | 2,346 | 2,337 | 414 | 13 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
-- | Test suite for Stack.Dot
module Stack.DotSpec where
import Control.Monad (filterM)
import Data.Foldable as F
import Data.Functor.Identity
import Data.List ((\\))
import qualified Data.Map as Map
import Data.Maybe (fromMaybe)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import Distribution.License (License (BSD3))
import Stack.Types.PackageName
import Stack.Types.Version
import Test.Hspec
import Test.Hspec.QuickCheck (prop)
import Test.QuickCheck (forAll,choose,Gen)
import Stack.Dot
dummyPayload :: DotPayload
dummyPayload = DotPayload (parseVersionFromString "0.0.0.0") (Just BSD3)
spec :: Spec
spec = do
let graph =
Map.mapKeys pkgName
. fmap (\p -> (Set.map pkgName p, dummyPayload))
. Map.fromList $ [("one",Set.fromList ["base","free"])
,("two",Set.fromList ["base","free","mtl","transformers","one"])
]
describe "Stack.Dot" $ do
it "does nothing if depth is 0" $
resolveDependencies (Just 0) graph stubLoader `shouldBe` return graph
it "with depth 1, more dependencies are resolved" $ do
let graph' = Map.insert (pkgName "cycle")
(Set.singleton (pkgName "cycle"), dummyPayload)
graph
resultGraph = runIdentity (resolveDependencies (Just 0) graph stubLoader)
resultGraph' = runIdentity (resolveDependencies (Just 1) graph' stubLoader)
Map.size resultGraph < Map.size resultGraph' `shouldBe` True
it "cycles are ignored" $ do
let graph' = Map.insert (pkgName "cycle")
(Set.singleton (pkgName "cycle"), dummyPayload)
graph
resultGraph = resolveDependencies Nothing graph stubLoader
resultGraph' = resolveDependencies Nothing graph' stubLoader
fmap Map.size resultGraph' `shouldBe` fmap ((+1) . Map.size) resultGraph
let graphElem e = Set.member e . Set.unions . Map.elems
prop "requested packages are pruned" $ do
let resolvedGraph = runIdentity (resolveDependencies Nothing graph stubLoader)
allPackages g = Set.map show (Map.keysSet g `Set.union` F.fold (fmap fst g))
forAll (sublistOf (Set.toList (allPackages resolvedGraph))) $ \toPrune ->
let pruned = pruneGraph [pkgName "one", pkgName "two"] toPrune resolvedGraph
in Set.null (allPackages pruned `Set.intersection` Set.fromList toPrune)
prop "pruning removes orhpans" $ do
let resolvedGraph = runIdentity (resolveDependencies Nothing graph stubLoader)
allPackages g = Set.map show (Map.keysSet g `Set.union` F.fold (fmap fst g))
orphans g = Map.filterWithKey (\k _ -> not (graphElem k g)) g
forAll (sublistOf (Set.toList (allPackages resolvedGraph))) $ \toPrune ->
let pruned = pruneGraph [pkgName "one", pkgName "two"] toPrune resolvedGraph
in null (Map.keys (orphans (fmap fst pruned)) \\ [pkgName "one", pkgName "two"])
{- Helper functions below -}
-- Backport from QuickCheck 2.8 to 2.7.6
sublistOf :: [a] -> Gen [a]
sublistOf = filterM (\_ -> choose (False, True))
-- Unsafe internal helper to create a package name
pkgName :: Text -> PackageName
pkgName = fromMaybe failure . parsePackageName
where
failure = error "Internal error during package name creation in DotSpec.pkgName"
-- Stub, simulates the function to load package dependecies
stubLoader :: PackageName -> Identity (Set PackageName, DotPayload)
stubLoader name = return . (, dummyPayload) . Set.fromList . map pkgName $ case show name of
"StateVar" -> ["stm","transformers"]
"array" -> []
"bifunctors" -> ["semigroupoids","semigroups","tagged"]
"binary" -> ["array","bytestring","containers"]
"bytestring" -> ["deepseq","ghc-prim","integer-gmp"]
"comonad" -> ["containers","contravariant","distributive"
,"semigroups","tagged","transformers","transformers-compat"
]
"cont" -> ["StateVar","semigroups","transformers","transformers-compat","void"]
"containers" -> ["array","deepseq","ghc-prim"]
"deepseq" -> ["array"]
"distributive" -> ["ghc-prim","tagged","transformers","transformers-compat"]
"free" -> ["bifunctors","comonad","distributive","mtl"
,"prelude-extras","profunctors","semigroupoids"
,"semigroups","template-haskell","transformers"
]
"ghc" -> []
"hashable" -> ["bytestring","ghc-prim","integer-gmp","text"]
"integer" -> []
"mtl" -> ["transformers"]
"nats" -> []
"one" -> ["free"]
"prelude" -> []
"profunctors" -> ["comonad","distributive","semigroupoids","tagged","transformers"]
"semigroupoids" -> ["comonad","containers","contravariant","distributive"
,"semigroups","transformers","transformers-compat"
]
"semigroups" -> ["bytestring","containers","deepseq","hashable"
,"nats","text","unordered-containers"
]
"stm" -> ["array"]
"tagged" -> ["template-haskell"]
"template" -> []
"text" -> ["array","binary","bytestring","deepseq","ghc-prim","integer-gmp"]
"transformers" -> []
"two" -> ["free","mtl","one","transformers"]
"unordered" -> ["deepseq","hashable"]
"void" -> ["ghc-prim","hashable","semigroups"]
_ -> []
|
AndreasPK/stack
|
src/test/Stack/DotSpec.hs
|
bsd-3-clause
| 5,531 | 0 | 24 | 1,274 | 1,561 | 845 | 716 | 102 | 30 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP
, GHCForeignImportPrim
, NoImplicitPrelude
, MagicHash
, UnboxedTuples
, UnliftedFFITypes
#-}
{-# LANGUAGE CApiFFI #-}
-- We believe we could deorphan this module, by moving lots of things
-- around, but we haven't got there yet:
{-# OPTIONS_GHC -Wno-orphans #-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Float
-- Copyright : (c) The University of Glasgow 1994-2002
-- Portions obtained from hbc (c) Lennart Augusstson
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable (GHC Extensions)
--
-- The types 'Float' and 'Double', the classes 'Floating' and 'RealFloat' and
-- casting between Word32 and Float and Word64 and Double.
--
-----------------------------------------------------------------------------
#include "ieee-flpt.h"
#include "MachDeps.h"
module GHC.Float
( module GHC.Float
, Float(..), Double(..), Float#, Double#
, double2Int, int2Double, float2Int, int2Float
-- * Monomorphic equality operators
-- | See GHC.Classes#matching_overloaded_methods_in_rules
, eqFloat, eqDouble
) where
import Data.Maybe
import Data.Bits
import GHC.Base
import GHC.List
import GHC.Enum
import GHC.Show
import GHC.Num
import GHC.Real
import GHC.Word
import GHC.Arr
import GHC.Float.RealFracMethods
import GHC.Float.ConversionUtils
import GHC.Integer.Logarithms ( integerLogBase# )
import GHC.Integer.Logarithms.Internals
infixr 8 **
------------------------------------------------------------------------
-- Standard numeric classes
------------------------------------------------------------------------
-- | Trigonometric and hyperbolic functions and related functions.
class (Fractional a) => Floating a where
pi :: a
exp, log, sqrt :: a -> a
(**), logBase :: a -> a -> a
sin, cos, tan :: a -> a
asin, acos, atan :: a -> a
sinh, cosh, tanh :: a -> a
asinh, acosh, atanh :: a -> a
-- | @'log1p' x@ computes @'log' (1 + x)@, but provides more precise
-- results for small (absolute) values of @x@ if possible.
--
-- @since 4.9.0.0
log1p :: a -> a
-- | @'expm1' x@ computes @'exp' x - 1@, but provides more precise
-- results for small (absolute) values of @x@ if possible.
--
-- @since 4.9.0.0
expm1 :: a -> a
-- | @'log1pexp' x@ computes @'log' (1 + 'exp' x)@, but provides more
-- precise results if possible.
--
-- Examples:
--
-- * if @x@ is a large negative number, @'log' (1 + 'exp' x)@ will be
-- imprecise for the reasons given in 'log1p'.
--
-- * if @'exp' x@ is close to @-1@, @'log' (1 + 'exp' x)@ will be
-- imprecise for the reasons given in 'expm1'.
--
-- @since 4.9.0.0
log1pexp :: a -> a
-- | @'log1mexp' x@ computes @'log' (1 - 'exp' x)@, but provides more
-- precise results if possible.
--
-- Examples:
--
-- * if @x@ is a large negative number, @'log' (1 - 'exp' x)@ will be
-- imprecise for the reasons given in 'log1p'.
--
-- * if @'exp' x@ is close to @1@, @'log' (1 - 'exp' x)@ will be
-- imprecise for the reasons given in 'expm1'.
--
-- @since 4.9.0.0
log1mexp :: a -> a
{-# INLINE (**) #-}
{-# INLINE logBase #-}
{-# INLINE sqrt #-}
{-# INLINE tan #-}
{-# INLINE tanh #-}
x ** y = exp (log x * y)
logBase x y = log y / log x
sqrt x = x ** 0.5
tan x = sin x / cos x
tanh x = sinh x / cosh x
{-# INLINE log1p #-}
{-# INLINE expm1 #-}
{-# INLINE log1pexp #-}
{-# INLINE log1mexp #-}
log1p x = log (1 + x)
expm1 x = exp x - 1
log1pexp x = log1p (exp x)
log1mexp x = log1p (negate (exp x))
-- | Efficient, machine-independent access to the components of a
-- floating-point number.
class (RealFrac a, Floating a) => RealFloat a where
-- | a constant function, returning the radix of the representation
-- (often @2@)
floatRadix :: a -> Integer
-- | a constant function, returning the number of digits of
-- 'floatRadix' in the significand
floatDigits :: a -> Int
-- | a constant function, returning the lowest and highest values
-- the exponent may assume
floatRange :: a -> (Int,Int)
-- | The function 'decodeFloat' applied to a real floating-point
-- number returns the significand expressed as an 'Integer' and an
-- appropriately scaled exponent (an 'Int'). If @'decodeFloat' x@
-- yields @(m,n)@, then @x@ is equal in value to @m*b^^n@, where @b@
-- is the floating-point radix, and furthermore, either @m@ and @n@
-- are both zero or else @b^(d-1) <= 'abs' m < b^d@, where @d@ is
-- the value of @'floatDigits' x@.
-- In particular, @'decodeFloat' 0 = (0,0)@. If the type
-- contains a negative zero, also @'decodeFloat' (-0.0) = (0,0)@.
-- /The result of/ @'decodeFloat' x@ /is unspecified if either of/
-- @'isNaN' x@ /or/ @'isInfinite' x@ /is/ 'True'.
decodeFloat :: a -> (Integer,Int)
-- | 'encodeFloat' performs the inverse of 'decodeFloat' in the
-- sense that for finite @x@ with the exception of @-0.0@,
-- @'uncurry' 'encodeFloat' ('decodeFloat' x) = x@.
-- @'encodeFloat' m n@ is one of the two closest representable
-- floating-point numbers to @m*b^^n@ (or @±Infinity@ if overflow
-- occurs); usually the closer, but if @m@ contains too many bits,
-- the result may be rounded in the wrong direction.
encodeFloat :: Integer -> Int -> a
-- | 'exponent' corresponds to the second component of 'decodeFloat'.
-- @'exponent' 0 = 0@ and for finite nonzero @x@,
-- @'exponent' x = snd ('decodeFloat' x) + 'floatDigits' x@.
-- If @x@ is a finite floating-point number, it is equal in value to
-- @'significand' x * b ^^ 'exponent' x@, where @b@ is the
-- floating-point radix.
-- The behaviour is unspecified on infinite or @NaN@ values.
exponent :: a -> Int
-- | The first component of 'decodeFloat', scaled to lie in the open
-- interval (@-1@,@1@), either @0.0@ or of absolute value @>= 1\/b@,
-- where @b@ is the floating-point radix.
-- The behaviour is unspecified on infinite or @NaN@ values.
significand :: a -> a
-- | multiplies a floating-point number by an integer power of the radix
scaleFloat :: Int -> a -> a
-- | 'True' if the argument is an IEEE \"not-a-number\" (NaN) value
isNaN :: a -> Bool
-- | 'True' if the argument is an IEEE infinity or negative infinity
isInfinite :: a -> Bool
-- | 'True' if the argument is too small to be represented in
-- normalized format
isDenormalized :: a -> Bool
-- | 'True' if the argument is an IEEE negative zero
isNegativeZero :: a -> Bool
-- | 'True' if the argument is an IEEE floating point number
isIEEE :: a -> Bool
-- | a version of arctangent taking two real floating-point arguments.
-- For real floating @x@ and @y@, @'atan2' y x@ computes the angle
-- (from the positive x-axis) of the vector from the origin to the
-- point @(x,y)@. @'atan2' y x@ returns a value in the range [@-pi@,
-- @pi@]. It follows the Common Lisp semantics for the origin when
-- signed zeroes are supported. @'atan2' y 1@, with @y@ in a type
-- that is 'RealFloat', should return the same value as @'atan' y@.
-- A default definition of 'atan2' is provided, but implementors
-- can provide a more accurate implementation.
atan2 :: a -> a -> a
exponent x = if m == 0 then 0 else n + floatDigits x
where (m,n) = decodeFloat x
significand x = encodeFloat m (negate (floatDigits x))
where (m,_) = decodeFloat x
scaleFloat 0 x = x
scaleFloat k x
| isFix = x
| otherwise = encodeFloat m (n + clamp b k)
where (m,n) = decodeFloat x
(l,h) = floatRange x
d = floatDigits x
b = h - l + 4*d
-- n+k may overflow, which would lead
-- to wrong results, hence we clamp the
-- scaling parameter.
-- If n + k would be larger than h,
-- n + clamp b k must be too, simliar
-- for smaller than l - d.
-- Add a little extra to keep clear
-- from the boundary cases.
isFix = x == 0 || isNaN x || isInfinite x
atan2 y x
| x > 0 = atan (y/x)
| x == 0 && y > 0 = pi/2
| x < 0 && y > 0 = pi + atan (y/x)
|(x <= 0 && y < 0) ||
(x < 0 && isNegativeZero y) ||
(isNegativeZero x && isNegativeZero y)
= -atan2 (-y) x
| y == 0 && (x < 0 || isNegativeZero x)
= pi -- must be after the previous test on zero y
| x==0 && y==0 = y -- must be after the other double zero tests
| otherwise = x + y -- x or y is a NaN, return a NaN (via +)
------------------------------------------------------------------------
-- Float
------------------------------------------------------------------------
-- | @since 2.01
instance Num Float where
(+) x y = plusFloat x y
(-) x y = minusFloat x y
negate x = negateFloat x
(*) x y = timesFloat x y
abs x = fabsFloat x
signum x | x > 0 = 1
| x < 0 = negateFloat 1
| otherwise = x -- handles 0.0, (-0.0), and NaN
{-# INLINE fromInteger #-}
fromInteger i = F# (floatFromInteger i)
-- | @since 2.01
instance Real Float where
toRational (F# x#) =
case decodeFloat_Int# x# of
(# m#, e# #)
| isTrue# (e# >=# 0#) ->
(smallInteger m# `shiftLInteger` e#) :% 1
| isTrue# ((int2Word# m# `and#` 1##) `eqWord#` 0##) ->
case elimZerosInt# m# (negateInt# e#) of
(# n, d# #) -> n :% shiftLInteger 1 d#
| otherwise ->
smallInteger m# :% shiftLInteger 1 (negateInt# e#)
-- | @since 2.01
instance Fractional Float where
(/) x y = divideFloat x y
{-# INLINE fromRational #-}
fromRational (n:%d) = rationalToFloat n d
recip x = 1.0 / x
rationalToFloat :: Integer -> Integer -> Float
{-# NOINLINE [1] rationalToFloat #-}
rationalToFloat n 0
| n == 0 = 0/0
| n < 0 = (-1)/0
| otherwise = 1/0
rationalToFloat n d
| n == 0 = encodeFloat 0 0
| n < 0 = -(fromRat'' minEx mantDigs (-n) d)
| otherwise = fromRat'' minEx mantDigs n d
where
minEx = FLT_MIN_EXP
mantDigs = FLT_MANT_DIG
-- RULES for Integer and Int
{-# RULES
"properFraction/Float->Integer" properFraction = properFractionFloatInteger
"truncate/Float->Integer" truncate = truncateFloatInteger
"floor/Float->Integer" floor = floorFloatInteger
"ceiling/Float->Integer" ceiling = ceilingFloatInteger
"round/Float->Integer" round = roundFloatInteger
"properFraction/Float->Int" properFraction = properFractionFloatInt
"truncate/Float->Int" truncate = float2Int
"floor/Float->Int" floor = floorFloatInt
"ceiling/Float->Int" ceiling = ceilingFloatInt
"round/Float->Int" round = roundFloatInt
#-}
-- | @since 2.01
instance RealFrac Float where
-- ceiling, floor, and truncate are all small
{-# INLINE [1] ceiling #-}
{-# INLINE [1] floor #-}
{-# INLINE [1] truncate #-}
-- We assume that FLT_RADIX is 2 so that we can use more efficient code
#if FLT_RADIX != 2
#error FLT_RADIX must be 2
#endif
properFraction (F# x#)
= case decodeFloat_Int# x# of
(# m#, n# #) ->
let m = I# m#
n = I# n#
in
if n >= 0
then (fromIntegral m * (2 ^ n), 0.0)
else let i = if m >= 0 then m `shiftR` negate n
else negate (negate m `shiftR` negate n)
f = m - (i `shiftL` negate n)
in (fromIntegral i, encodeFloat (fromIntegral f) n)
truncate x = case properFraction x of
(n,_) -> n
round x = case properFraction x of
(n,r) -> let
m = if r < 0.0 then n - 1 else n + 1
half_down = abs r - 0.5
in
case (compare half_down 0.0) of
LT -> n
EQ -> if even n then n else m
GT -> m
ceiling x = case properFraction x of
(n,r) -> if r > 0.0 then n + 1 else n
floor x = case properFraction x of
(n,r) -> if r < 0.0 then n - 1 else n
-- | @since 2.01
instance Floating Float where
pi = 3.141592653589793238
exp x = expFloat x
log x = logFloat x
sqrt x = sqrtFloat x
sin x = sinFloat x
cos x = cosFloat x
tan x = tanFloat x
asin x = asinFloat x
acos x = acosFloat x
atan x = atanFloat x
sinh x = sinhFloat x
cosh x = coshFloat x
tanh x = tanhFloat x
(**) x y = powerFloat x y
logBase x y = log y / log x
asinh x = log (x + sqrt (1.0+x*x))
acosh x = log (x + (x+1.0) * sqrt ((x-1.0)/(x+1.0)))
atanh x = 0.5 * log ((1.0+x) / (1.0-x))
log1p = log1pFloat
expm1 = expm1Float
log1mexp a
| a <= log 2 = log (negate (expm1Float a))
| otherwise = log1pFloat (negate (exp a))
{-# INLINE log1mexp #-}
log1pexp a
| a <= 18 = log1pFloat (exp a)
| a <= 100 = a + exp (negate a)
| otherwise = a
{-# INLINE log1pexp #-}
-- | @since 2.01
instance RealFloat Float where
floatRadix _ = FLT_RADIX -- from float.h
floatDigits _ = FLT_MANT_DIG -- ditto
floatRange _ = (FLT_MIN_EXP, FLT_MAX_EXP) -- ditto
decodeFloat (F# f#) = case decodeFloat_Int# f# of
(# i, e #) -> (smallInteger i, I# e)
encodeFloat i (I# e) = F# (encodeFloatInteger i e)
exponent x = case decodeFloat x of
(m,n) -> if m == 0 then 0 else n + floatDigits x
significand x = case decodeFloat x of
(m,_) -> encodeFloat m (negate (floatDigits x))
scaleFloat 0 x = x
scaleFloat k x
| isFix = x
| otherwise = case decodeFloat x of
(m,n) -> encodeFloat m (n + clamp bf k)
where bf = FLT_MAX_EXP - (FLT_MIN_EXP) + 4*FLT_MANT_DIG
isFix = x == 0 || isFloatFinite x == 0
isNaN x = 0 /= isFloatNaN x
isInfinite x = 0 /= isFloatInfinite x
isDenormalized x = 0 /= isFloatDenormalized x
isNegativeZero x = 0 /= isFloatNegativeZero x
isIEEE _ = True
-- | @since 2.01
instance Show Float where
showsPrec x = showSignedFloat showFloat x
showList = showList__ (showsPrec 0)
------------------------------------------------------------------------
-- Double
------------------------------------------------------------------------
-- | @since 2.01
instance Num Double where
(+) x y = plusDouble x y
(-) x y = minusDouble x y
negate x = negateDouble x
(*) x y = timesDouble x y
abs x = fabsDouble x
signum x | x > 0 = 1
| x < 0 = negateDouble 1
| otherwise = x -- handles 0.0, (-0.0), and NaN
{-# INLINE fromInteger #-}
fromInteger i = D# (doubleFromInteger i)
-- | @since 2.01
instance Real Double where
toRational (D# x#) =
case decodeDoubleInteger x# of
(# m, e# #)
| isTrue# (e# >=# 0#) ->
shiftLInteger m e# :% 1
| isTrue# ((integerToWord m `and#` 1##) `eqWord#` 0##) ->
case elimZerosInteger m (negateInt# e#) of
(# n, d# #) -> n :% shiftLInteger 1 d#
| otherwise ->
m :% shiftLInteger 1 (negateInt# e#)
-- | @since 2.01
instance Fractional Double where
(/) x y = divideDouble x y
{-# INLINE fromRational #-}
fromRational (n:%d) = rationalToDouble n d
recip x = 1.0 / x
rationalToDouble :: Integer -> Integer -> Double
{-# NOINLINE [1] rationalToDouble #-}
rationalToDouble n 0
| n == 0 = 0/0
| n < 0 = (-1)/0
| otherwise = 1/0
rationalToDouble n d
| n == 0 = encodeFloat 0 0
| n < 0 = -(fromRat'' minEx mantDigs (-n) d)
| otherwise = fromRat'' minEx mantDigs n d
where
minEx = DBL_MIN_EXP
mantDigs = DBL_MANT_DIG
-- | @since 2.01
instance Floating Double where
pi = 3.141592653589793238
exp x = expDouble x
log x = logDouble x
sqrt x = sqrtDouble x
sin x = sinDouble x
cos x = cosDouble x
tan x = tanDouble x
asin x = asinDouble x
acos x = acosDouble x
atan x = atanDouble x
sinh x = sinhDouble x
cosh x = coshDouble x
tanh x = tanhDouble x
(**) x y = powerDouble x y
logBase x y = log y / log x
asinh x = log (x + sqrt (1.0+x*x))
acosh x = log (x + (x+1.0) * sqrt ((x-1.0)/(x+1.0)))
atanh x = 0.5 * log ((1.0+x) / (1.0-x))
log1p = log1pDouble
expm1 = expm1Double
log1mexp a
| a <= log 2 = log (negate (expm1Double a))
| otherwise = log1pDouble (negate (exp a))
{-# INLINE log1mexp #-}
log1pexp a
| a <= 18 = log1pDouble (exp a)
| a <= 100 = a + exp (negate a)
| otherwise = a
{-# INLINE log1pexp #-}
-- RULES for Integer and Int
{-# RULES
"properFraction/Double->Integer" properFraction = properFractionDoubleInteger
"truncate/Double->Integer" truncate = truncateDoubleInteger
"floor/Double->Integer" floor = floorDoubleInteger
"ceiling/Double->Integer" ceiling = ceilingDoubleInteger
"round/Double->Integer" round = roundDoubleInteger
"properFraction/Double->Int" properFraction = properFractionDoubleInt
"truncate/Double->Int" truncate = double2Int
"floor/Double->Int" floor = floorDoubleInt
"ceiling/Double->Int" ceiling = ceilingDoubleInt
"round/Double->Int" round = roundDoubleInt
#-}
-- | @since 2.01
instance RealFrac Double where
-- ceiling, floor, and truncate are all small
{-# INLINE [1] ceiling #-}
{-# INLINE [1] floor #-}
{-# INLINE [1] truncate #-}
properFraction x
= case (decodeFloat x) of { (m,n) ->
if n >= 0 then
(fromInteger m * 2 ^ n, 0.0)
else
case (quotRem m (2^(negate n))) of { (w,r) ->
(fromInteger w, encodeFloat r n)
}
}
truncate x = case properFraction x of
(n,_) -> n
round x = case properFraction x of
(n,r) -> let
m = if r < 0.0 then n - 1 else n + 1
half_down = abs r - 0.5
in
case (compare half_down 0.0) of
LT -> n
EQ -> if even n then n else m
GT -> m
ceiling x = case properFraction x of
(n,r) -> if r > 0.0 then n + 1 else n
floor x = case properFraction x of
(n,r) -> if r < 0.0 then n - 1 else n
-- | @since 2.01
instance RealFloat Double where
floatRadix _ = FLT_RADIX -- from float.h
floatDigits _ = DBL_MANT_DIG -- ditto
floatRange _ = (DBL_MIN_EXP, DBL_MAX_EXP) -- ditto
decodeFloat (D# x#)
= case decodeDoubleInteger x# of
(# i, j #) -> (i, I# j)
encodeFloat i (I# j) = D# (encodeDoubleInteger i j)
exponent x = case decodeFloat x of
(m,n) -> if m == 0 then 0 else n + floatDigits x
significand x = case decodeFloat x of
(m,_) -> encodeFloat m (negate (floatDigits x))
scaleFloat 0 x = x
scaleFloat k x
| isFix = x
| otherwise = case decodeFloat x of
(m,n) -> encodeFloat m (n + clamp bd k)
where bd = DBL_MAX_EXP - (DBL_MIN_EXP) + 4*DBL_MANT_DIG
isFix = x == 0 || isDoubleFinite x == 0
isNaN x = 0 /= isDoubleNaN x
isInfinite x = 0 /= isDoubleInfinite x
isDenormalized x = 0 /= isDoubleDenormalized x
isNegativeZero x = 0 /= isDoubleNegativeZero x
isIEEE _ = True
-- | @since 2.01
instance Show Double where
showsPrec x = showSignedFloat showFloat x
showList = showList__ (showsPrec 0)
------------------------------------------------------------------------
-- Enum instances
------------------------------------------------------------------------
{-
The @Enum@ instances for Floats and Doubles are slightly unusual.
The @toEnum@ function truncates numbers to Int. The definitions
of @enumFrom@ and @enumFromThen@ allow floats to be used in arithmetic
series: [0,0.1 .. 1.0]. However, roundoff errors make these somewhat
dubious. This example may have either 10 or 11 elements, depending on
how 0.1 is represented.
NOTE: The instances for Float and Double do not make use of the default
methods for @enumFromTo@ and @enumFromThenTo@, as these rely on there being
a `non-lossy' conversion to and from Ints. Instead we make use of the
1.2 default methods (back in the days when Enum had Ord as a superclass)
for these (@numericEnumFromTo@ and @numericEnumFromThenTo@ below.)
-}
-- | @since 2.01
instance Enum Float where
succ x = x + 1
pred x = x - 1
toEnum = int2Float
fromEnum = fromInteger . truncate -- may overflow
enumFrom = numericEnumFrom
enumFromTo = numericEnumFromTo
enumFromThen = numericEnumFromThen
enumFromThenTo = numericEnumFromThenTo
-- | @since 2.01
instance Enum Double where
succ x = x + 1
pred x = x - 1
toEnum = int2Double
fromEnum = fromInteger . truncate -- may overflow
enumFrom = numericEnumFrom
enumFromTo = numericEnumFromTo
enumFromThen = numericEnumFromThen
enumFromThenTo = numericEnumFromThenTo
------------------------------------------------------------------------
-- Printing floating point
------------------------------------------------------------------------
-- | Show a signed 'RealFloat' value to full precision
-- using standard decimal notation for arguments whose absolute value lies
-- between @0.1@ and @9,999,999@, and scientific notation otherwise.
showFloat :: (RealFloat a) => a -> ShowS
showFloat x = showString (formatRealFloat FFGeneric Nothing x)
-- These are the format types. This type is not exported.
data FFFormat = FFExponent | FFFixed | FFGeneric
-- This is just a compatibility stub, as the "alt" argument formerly
-- didn't exist.
formatRealFloat :: (RealFloat a) => FFFormat -> Maybe Int -> a -> String
formatRealFloat fmt decs x = formatRealFloatAlt fmt decs False x
formatRealFloatAlt :: (RealFloat a) => FFFormat -> Maybe Int -> Bool -> a
-> String
formatRealFloatAlt fmt decs alt x
| isNaN x = "NaN"
| isInfinite x = if x < 0 then "-Infinity" else "Infinity"
| x < 0 || isNegativeZero x = '-':doFmt fmt (floatToDigits (toInteger base) (-x))
| otherwise = doFmt fmt (floatToDigits (toInteger base) x)
where
base = 10
doFmt format (is, e) =
let ds = map intToDigit is in
case format of
FFGeneric ->
doFmt (if e < 0 || e > 7 then FFExponent else FFFixed)
(is,e)
FFExponent ->
case decs of
Nothing ->
let show_e' = show (e-1) in
case ds of
"0" -> "0.0e0"
[d] -> d : ".0e" ++ show_e'
(d:ds') -> d : '.' : ds' ++ "e" ++ show_e'
[] -> errorWithoutStackTrace "formatRealFloat/doFmt/FFExponent: []"
Just dec ->
let dec' = max dec 1 in
case is of
[0] -> '0' :'.' : take dec' (repeat '0') ++ "e0"
_ ->
let
(ei,is') = roundTo base (dec'+1) is
(d:ds') = map intToDigit (if ei > 0 then init is' else is')
in
d:'.':ds' ++ 'e':show (e-1+ei)
FFFixed ->
let
mk0 ls = case ls of { "" -> "0" ; _ -> ls}
in
case decs of
Nothing
| e <= 0 -> "0." ++ replicate (-e) '0' ++ ds
| otherwise ->
let
f 0 s rs = mk0 (reverse s) ++ '.':mk0 rs
f n s "" = f (n-1) ('0':s) ""
f n s (r:rs) = f (n-1) (r:s) rs
in
f e "" ds
Just dec ->
let dec' = max dec 0 in
if e >= 0 then
let
(ei,is') = roundTo base (dec' + e) is
(ls,rs) = splitAt (e+ei) (map intToDigit is')
in
mk0 ls ++ (if null rs && not alt then "" else '.':rs)
else
let
(ei,is') = roundTo base dec' (replicate (-e) 0 ++ is)
d:ds' = map intToDigit (if ei > 0 then is' else 0:is')
in
d : (if null ds' && not alt then "" else '.':ds')
roundTo :: Int -> Int -> [Int] -> (Int,[Int])
roundTo base d is =
case f d True is of
x@(0,_) -> x
(1,xs) -> (1, 1:xs)
_ -> errorWithoutStackTrace "roundTo: bad Value"
where
b2 = base `quot` 2
f n _ [] = (0, replicate n 0)
f 0 e (x:xs) | x == b2 && e && all (== 0) xs = (0, []) -- Round to even when at exactly half the base
| otherwise = (if x >= b2 then 1 else 0, [])
f n _ (i:xs)
| i' == base = (1,0:ds)
| otherwise = (0,i':ds)
where
(c,ds) = f (n-1) (even i) xs
i' = c + i
-- Based on "Printing Floating-Point Numbers Quickly and Accurately"
-- by R.G. Burger and R.K. Dybvig in PLDI 96.
-- This version uses a much slower logarithm estimator. It should be improved.
-- | 'floatToDigits' takes a base and a non-negative 'RealFloat' number,
-- and returns a list of digits and an exponent.
-- In particular, if @x>=0@, and
--
-- > floatToDigits base x = ([d1,d2,...,dn], e)
--
-- then
--
-- (1) @n >= 1@
--
-- (2) @x = 0.d1d2...dn * (base**e)@
--
-- (3) @0 <= di <= base-1@
floatToDigits :: (RealFloat a) => Integer -> a -> ([Int], Int)
floatToDigits _ 0 = ([0], 0)
floatToDigits base x =
let
(f0, e0) = decodeFloat x
(minExp0, _) = floatRange x
p = floatDigits x
b = floatRadix x
minExp = minExp0 - p -- the real minimum exponent
-- Haskell requires that f be adjusted so denormalized numbers
-- will have an impossibly low exponent. Adjust for this.
(f, e) =
let n = minExp - e0 in
if n > 0 then (f0 `quot` (expt b n), e0+n) else (f0, e0)
(r, s, mUp, mDn) =
if e >= 0 then
let be = expt b e in
if f == expt b (p-1) then
(f*be*b*2, 2*b, be*b, be) -- according to Burger and Dybvig
else
(f*be*2, 2, be, be)
else
if e > minExp && f == expt b (p-1) then
(f*b*2, expt b (-e+1)*2, b, 1)
else
(f*2, expt b (-e)*2, 1, 1)
k :: Int
k =
let
k0 :: Int
k0 =
if b == 2 && base == 10 then
-- logBase 10 2 is very slightly larger than 8651/28738
-- (about 5.3558e-10), so if log x >= 0, the approximation
-- k1 is too small, hence we add one and need one fixup step less.
-- If log x < 0, the approximation errs rather on the high side.
-- That is usually more than compensated for by ignoring the
-- fractional part of logBase 2 x, but when x is a power of 1/2
-- or slightly larger and the exponent is a multiple of the
-- denominator of the rational approximation to logBase 10 2,
-- k1 is larger than logBase 10 x. If k1 > 1 + logBase 10 x,
-- we get a leading zero-digit we don't want.
-- With the approximation 3/10, this happened for
-- 0.5^1030, 0.5^1040, ..., 0.5^1070 and values close above.
-- The approximation 8651/28738 guarantees k1 < 1 + logBase 10 x
-- for IEEE-ish floating point types with exponent fields
-- <= 17 bits and mantissae of several thousand bits, earlier
-- convergents to logBase 10 2 would fail for long double.
-- Using quot instead of div is a little faster and requires
-- fewer fixup steps for negative lx.
let lx = p - 1 + e0
k1 = (lx * 8651) `quot` 28738
in if lx >= 0 then k1 + 1 else k1
else
-- f :: Integer, log :: Float -> Float,
-- ceiling :: Float -> Int
ceiling ((log (fromInteger (f+1) :: Float) +
fromIntegral e * log (fromInteger b)) /
log (fromInteger base))
--WAS: fromInt e * log (fromInteger b))
fixup n =
if n >= 0 then
if r + mUp <= expt base n * s then n else fixup (n+1)
else
if expt base (-n) * (r + mUp) <= s then n else fixup (n+1)
in
fixup k0
gen ds rn sN mUpN mDnN =
let
(dn, rn') = (rn * base) `quotRem` sN
mUpN' = mUpN * base
mDnN' = mDnN * base
in
case (rn' < mDnN', rn' + mUpN' > sN) of
(True, False) -> dn : ds
(False, True) -> dn+1 : ds
(True, True) -> if rn' * 2 < sN then dn : ds else dn+1 : ds
(False, False) -> gen (dn:ds) rn' sN mUpN' mDnN'
rds =
if k >= 0 then
gen [] r (s * expt base k) mUp mDn
else
let bk = expt base (-k) in
gen [] (r * bk) s (mUp * bk) (mDn * bk)
in
(map fromIntegral (reverse rds), k)
------------------------------------------------------------------------
-- Converting from a Rational to a RealFloa
------------------------------------------------------------------------
{-
[In response to a request for documentation of how fromRational works,
Joe Fasel writes:] A quite reasonable request! This code was added to
the Prelude just before the 1.2 release, when Lennart, working with an
early version of hbi, noticed that (read . show) was not the identity
for floating-point numbers. (There was a one-bit error about half the
time.) The original version of the conversion function was in fact
simply a floating-point divide, as you suggest above. The new version
is, I grant you, somewhat denser.
Unfortunately, Joe's code doesn't work! Here's an example:
main = putStr (shows (1.82173691287639817263897126389712638972163e-300::Double) "\n")
This program prints
0.0000000000000000
instead of
1.8217369128763981e-300
Here's Joe's code:
\begin{pseudocode}
fromRat :: (RealFloat a) => Rational -> a
fromRat x = x'
where x' = f e
-- If the exponent of the nearest floating-point number to x
-- is e, then the significand is the integer nearest xb^(-e),
-- where b is the floating-point radix. We start with a good
-- guess for e, and if it is correct, the exponent of the
-- floating-point number we construct will again be e. If
-- not, one more iteration is needed.
f e = if e' == e then y else f e'
where y = encodeFloat (round (x * (1 % b)^^e)) e
(_,e') = decodeFloat y
b = floatRadix x'
-- We obtain a trial exponent by doing a floating-point
-- division of x's numerator by its denominator. The
-- result of this division may not itself be the ultimate
-- result, because of an accumulation of three rounding
-- errors.
(s,e) = decodeFloat (fromInteger (numerator x) `asTypeOf` x'
/ fromInteger (denominator x))
\end{pseudocode}
Now, here's Lennart's code (which works):
-}
-- | Converts a 'Rational' value into any type in class 'RealFloat'.
{-# RULES
"fromRat/Float" fromRat = (fromRational :: Rational -> Float)
"fromRat/Double" fromRat = (fromRational :: Rational -> Double)
#-}
{-# NOINLINE [1] fromRat #-}
fromRat :: (RealFloat a) => Rational -> a
-- Deal with special cases first, delegating the real work to fromRat'
fromRat (n :% 0) | n > 0 = 1/0 -- +Infinity
| n < 0 = -1/0 -- -Infinity
| otherwise = 0/0 -- NaN
fromRat (n :% d) | n > 0 = fromRat' (n :% d)
| n < 0 = - fromRat' ((-n) :% d)
| otherwise = encodeFloat 0 0 -- Zero
-- Conversion process:
-- Scale the rational number by the RealFloat base until
-- it lies in the range of the mantissa (as used by decodeFloat/encodeFloat).
-- Then round the rational to an Integer and encode it with the exponent
-- that we got from the scaling.
-- To speed up the scaling process we compute the log2 of the number to get
-- a first guess of the exponent.
fromRat' :: (RealFloat a) => Rational -> a
-- Invariant: argument is strictly positive
fromRat' x = r
where b = floatRadix r
p = floatDigits r
(minExp0, _) = floatRange r
minExp = minExp0 - p -- the real minimum exponent
xMax = toRational (expt b p)
p0 = (integerLogBase b (numerator x) - integerLogBase b (denominator x) - p) `max` minExp
-- if x = n/d and ln = integerLogBase b n, ld = integerLogBase b d,
-- then b^(ln-ld-1) < x < b^(ln-ld+1)
f = if p0 < 0 then 1 :% expt b (-p0) else expt b p0 :% 1
x0 = x / f
-- if ln - ld >= minExp0, then b^(p-1) < x0 < b^(p+1), so there's at most
-- one scaling step needed, otherwise, x0 < b^p and no scaling is needed
(x', p') = if x0 >= xMax then (x0 / toRational b, p0+1) else (x0, p0)
r = encodeFloat (round x') p'
-- Exponentiation with a cache for the most common numbers.
minExpt, maxExpt :: Int
minExpt = 0
maxExpt = 1100
expt :: Integer -> Int -> Integer
expt base n =
if base == 2 && n >= minExpt && n <= maxExpt then
expts!n
else
if base == 10 && n <= maxExpt10 then
expts10!n
else
base^n
expts :: Array Int Integer
expts = array (minExpt,maxExpt) [(n,2^n) | n <- [minExpt .. maxExpt]]
maxExpt10 :: Int
maxExpt10 = 324
expts10 :: Array Int Integer
expts10 = array (minExpt,maxExpt10) [(n,10^n) | n <- [minExpt .. maxExpt10]]
-- Compute the (floor of the) log of i in base b.
-- Simplest way would be just divide i by b until it's smaller then b, but that would
-- be very slow! We are just slightly more clever, except for base 2, where
-- we take advantage of the representation of Integers.
-- The general case could be improved by a lookup table for
-- approximating the result by integerLog2 i / integerLog2 b.
integerLogBase :: Integer -> Integer -> Int
integerLogBase b i
| i < b = 0
| b == 2 = I# (integerLog2# i)
| otherwise = I# (integerLogBase# b i)
{-
Unfortunately, the old conversion code was awfully slow due to
a) a slow integer logarithm
b) repeated calculation of gcd's
For the case of Rational's coming from a Float or Double via toRational,
we can exploit the fact that the denominator is a power of two, which for
these brings a huge speedup since we need only shift and add instead
of division.
The below is an adaption of fromRat' for the conversion to
Float or Double exploiting the known floatRadix and avoiding
divisions as much as possible.
-}
{-# SPECIALISE fromRat'' :: Int -> Int -> Integer -> Integer -> Float,
Int -> Int -> Integer -> Integer -> Double #-}
fromRat'' :: RealFloat a => Int -> Int -> Integer -> Integer -> a
-- Invariant: n and d strictly positive
fromRat'' minEx@(I# me#) mantDigs@(I# md#) n d =
case integerLog2IsPowerOf2# d of
(# ld#, pw# #)
| isTrue# (pw# ==# 0#) ->
case integerLog2# n of
ln# | isTrue# (ln# >=# (ld# +# me# -# 1#)) ->
-- this means n/d >= 2^(minEx-1), i.e. we are guaranteed to get
-- a normalised number, round to mantDigs bits
if isTrue# (ln# <# md#)
then encodeFloat n (I# (negateInt# ld#))
else let n' = n `shiftR` (I# (ln# +# 1# -# md#))
n'' = case roundingMode# n (ln# -# md#) of
0# -> n'
2# -> n' + 1
_ -> case fromInteger n' .&. (1 :: Int) of
0 -> n'
_ -> n' + 1
in encodeFloat n'' (I# (ln# -# ld# +# 1# -# md#))
| otherwise ->
-- n/d < 2^(minEx-1), a denorm or rounded to 2^(minEx-1)
-- the exponent for encoding is always minEx-mantDigs
-- so we must shift right by (minEx-mantDigs) - (-ld)
case ld# +# (me# -# md#) of
ld'# | isTrue# (ld'# <=# 0#) -> -- we would shift left, so we don't shift
encodeFloat n (I# ((me# -# md#) -# ld'#))
| isTrue# (ld'# <=# ln#) ->
let n' = n `shiftR` (I# ld'#)
in case roundingMode# n (ld'# -# 1#) of
0# -> encodeFloat n' (minEx - mantDigs)
1# -> if fromInteger n' .&. (1 :: Int) == 0
then encodeFloat n' (minEx-mantDigs)
else encodeFloat (n' + 1) (minEx-mantDigs)
_ -> encodeFloat (n' + 1) (minEx-mantDigs)
| isTrue# (ld'# ># (ln# +# 1#)) -> encodeFloat 0 0 -- result of shift < 0.5
| otherwise -> -- first bit of n shifted to 0.5 place
case integerLog2IsPowerOf2# n of
(# _, 0# #) -> encodeFloat 0 0 -- round to even
(# _, _ #) -> encodeFloat 1 (minEx - mantDigs)
| otherwise ->
let ln = I# (integerLog2# n)
ld = I# ld#
-- 2^(ln-ld-1) < n/d < 2^(ln-ld+1)
p0 = max minEx (ln - ld)
(n', d')
| p0 < mantDigs = (n `shiftL` (mantDigs - p0), d)
| p0 == mantDigs = (n, d)
| otherwise = (n, d `shiftL` (p0 - mantDigs))
-- if ln-ld < minEx, then n'/d' < 2^mantDigs, else
-- 2^(mantDigs-1) < n'/d' < 2^(mantDigs+1) and we
-- may need one scaling step
scale p a b
| (b `shiftL` mantDigs) <= a = (p+1, a, b `shiftL` 1)
| otherwise = (p, a, b)
(p', n'', d'') = scale (p0-mantDigs) n' d'
-- n''/d'' < 2^mantDigs and p' == minEx-mantDigs or n''/d'' >= 2^(mantDigs-1)
rdq = case n'' `quotRem` d'' of
(q,r) -> case compare (r `shiftL` 1) d'' of
LT -> q
EQ -> if fromInteger q .&. (1 :: Int) == 0
then q else q+1
GT -> q+1
in encodeFloat rdq p'
------------------------------------------------------------------------
-- Floating point numeric primops
------------------------------------------------------------------------
-- Definitions of the boxed PrimOps; these will be
-- used in the case of partial applications, etc.
plusFloat, minusFloat, timesFloat, divideFloat :: Float -> Float -> Float
plusFloat (F# x) (F# y) = F# (plusFloat# x y)
minusFloat (F# x) (F# y) = F# (minusFloat# x y)
timesFloat (F# x) (F# y) = F# (timesFloat# x y)
divideFloat (F# x) (F# y) = F# (divideFloat# x y)
negateFloat :: Float -> Float
negateFloat (F# x) = F# (negateFloat# x)
gtFloat, geFloat, ltFloat, leFloat :: Float -> Float -> Bool
gtFloat (F# x) (F# y) = isTrue# (gtFloat# x y)
geFloat (F# x) (F# y) = isTrue# (geFloat# x y)
ltFloat (F# x) (F# y) = isTrue# (ltFloat# x y)
leFloat (F# x) (F# y) = isTrue# (leFloat# x y)
expFloat, logFloat, sqrtFloat, fabsFloat :: Float -> Float
sinFloat, cosFloat, tanFloat :: Float -> Float
asinFloat, acosFloat, atanFloat :: Float -> Float
sinhFloat, coshFloat, tanhFloat :: Float -> Float
expFloat (F# x) = F# (expFloat# x)
logFloat (F# x) = F# (logFloat# x)
sqrtFloat (F# x) = F# (sqrtFloat# x)
fabsFloat (F# x) = F# (fabsFloat# x)
sinFloat (F# x) = F# (sinFloat# x)
cosFloat (F# x) = F# (cosFloat# x)
tanFloat (F# x) = F# (tanFloat# x)
asinFloat (F# x) = F# (asinFloat# x)
acosFloat (F# x) = F# (acosFloat# x)
atanFloat (F# x) = F# (atanFloat# x)
sinhFloat (F# x) = F# (sinhFloat# x)
coshFloat (F# x) = F# (coshFloat# x)
tanhFloat (F# x) = F# (tanhFloat# x)
powerFloat :: Float -> Float -> Float
powerFloat (F# x) (F# y) = F# (powerFloat# x y)
-- definitions of the boxed PrimOps; these will be
-- used in the case of partial applications, etc.
plusDouble, minusDouble, timesDouble, divideDouble :: Double -> Double -> Double
plusDouble (D# x) (D# y) = D# (x +## y)
minusDouble (D# x) (D# y) = D# (x -## y)
timesDouble (D# x) (D# y) = D# (x *## y)
divideDouble (D# x) (D# y) = D# (x /## y)
negateDouble :: Double -> Double
negateDouble (D# x) = D# (negateDouble# x)
gtDouble, geDouble, leDouble, ltDouble :: Double -> Double -> Bool
gtDouble (D# x) (D# y) = isTrue# (x >## y)
geDouble (D# x) (D# y) = isTrue# (x >=## y)
ltDouble (D# x) (D# y) = isTrue# (x <## y)
leDouble (D# x) (D# y) = isTrue# (x <=## y)
double2Float :: Double -> Float
double2Float (D# x) = F# (double2Float# x)
float2Double :: Float -> Double
float2Double (F# x) = D# (float2Double# x)
expDouble, logDouble, sqrtDouble, fabsDouble :: Double -> Double
sinDouble, cosDouble, tanDouble :: Double -> Double
asinDouble, acosDouble, atanDouble :: Double -> Double
sinhDouble, coshDouble, tanhDouble :: Double -> Double
expDouble (D# x) = D# (expDouble# x)
logDouble (D# x) = D# (logDouble# x)
sqrtDouble (D# x) = D# (sqrtDouble# x)
fabsDouble (D# x) = D# (fabsDouble# x)
sinDouble (D# x) = D# (sinDouble# x)
cosDouble (D# x) = D# (cosDouble# x)
tanDouble (D# x) = D# (tanDouble# x)
asinDouble (D# x) = D# (asinDouble# x)
acosDouble (D# x) = D# (acosDouble# x)
atanDouble (D# x) = D# (atanDouble# x)
sinhDouble (D# x) = D# (sinhDouble# x)
coshDouble (D# x) = D# (coshDouble# x)
tanhDouble (D# x) = D# (tanhDouble# x)
powerDouble :: Double -> Double -> Double
powerDouble (D# x) (D# y) = D# (x **## y)
foreign import ccall unsafe "isFloatNaN" isFloatNaN :: Float -> Int
foreign import ccall unsafe "isFloatInfinite" isFloatInfinite :: Float -> Int
foreign import ccall unsafe "isFloatDenormalized" isFloatDenormalized :: Float -> Int
foreign import ccall unsafe "isFloatNegativeZero" isFloatNegativeZero :: Float -> Int
foreign import ccall unsafe "isFloatFinite" isFloatFinite :: Float -> Int
foreign import ccall unsafe "isDoubleNaN" isDoubleNaN :: Double -> Int
foreign import ccall unsafe "isDoubleInfinite" isDoubleInfinite :: Double -> Int
foreign import ccall unsafe "isDoubleDenormalized" isDoubleDenormalized :: Double -> Int
foreign import ccall unsafe "isDoubleNegativeZero" isDoubleNegativeZero :: Double -> Int
foreign import ccall unsafe "isDoubleFinite" isDoubleFinite :: Double -> Int
------------------------------------------------------------------------
-- libm imports for extended floating
------------------------------------------------------------------------
foreign import capi unsafe "math.h log1p" log1pDouble :: Double -> Double
foreign import capi unsafe "math.h expm1" expm1Double :: Double -> Double
foreign import capi unsafe "math.h log1pf" log1pFloat :: Float -> Float
foreign import capi unsafe "math.h expm1f" expm1Float :: Float -> Float
------------------------------------------------------------------------
-- Coercion rules
------------------------------------------------------------------------
word2Double :: Word -> Double
word2Double (W# w) = D# (word2Double# w)
word2Float :: Word -> Float
word2Float (W# w) = F# (word2Float# w)
{-# RULES
"fromIntegral/Int->Float" fromIntegral = int2Float
"fromIntegral/Int->Double" fromIntegral = int2Double
"fromIntegral/Word->Float" fromIntegral = word2Float
"fromIntegral/Word->Double" fromIntegral = word2Double
"realToFrac/Float->Float" realToFrac = id :: Float -> Float
"realToFrac/Float->Double" realToFrac = float2Double
"realToFrac/Double->Float" realToFrac = double2Float
"realToFrac/Double->Double" realToFrac = id :: Double -> Double
"realToFrac/Int->Double" realToFrac = int2Double -- See Note [realToFrac int-to-float]
"realToFrac/Int->Float" realToFrac = int2Float -- ..ditto
#-}
{-
Note [realToFrac int-to-float]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Don found that the RULES for realToFrac/Int->Double and simliarly
Float made a huge difference to some stream-fusion programs. Here's
an example
import Data.Array.Vector
n = 40000000
main = do
let c = replicateU n (2::Double)
a = mapU realToFrac (enumFromToU 0 (n-1) ) :: UArr Double
print (sumU (zipWithU (*) c a))
Without the RULE we get this loop body:
case $wtoRational sc_sY4 of ww_aM7 { (# ww1_aM9, ww2_aMa #) ->
case $wfromRat ww1_aM9 ww2_aMa of tpl_X1P { D# ipv_sW3 ->
Main.$s$wfold
(+# sc_sY4 1)
(+# wild_X1i 1)
(+## sc2_sY6 (*## 2.0 ipv_sW3))
And with the rule:
Main.$s$wfold
(+# sc_sXT 1)
(+# wild_X1h 1)
(+## sc2_sXV (*## 2.0 (int2Double# sc_sXT)))
The running time of the program goes from 120 seconds to 0.198 seconds
with the native backend, and 0.143 seconds with the C backend.
A few more details in Trac #2251, and the patch message
"Add RULES for realToFrac from Int".
-}
-- Utils
showSignedFloat :: (RealFloat a)
=> (a -> ShowS) -- ^ a function that can show unsigned values
-> Int -- ^ the precedence of the enclosing context
-> a -- ^ the value to show
-> ShowS
showSignedFloat showPos p x
| x < 0 || isNegativeZero x
= showParen (p > 6) (showChar '-' . showPos (-x))
| otherwise = showPos x
{-
We need to prevent over/underflow of the exponent in encodeFloat when
called from scaleFloat, hence we clamp the scaling parameter.
We must have a large enough range to cover the maximum difference of
exponents returned by decodeFloat.
-}
clamp :: Int -> Int -> Int
clamp bd k = max (-bd) (min bd k)
{-
Note [Casting from integral to floating point types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To implement something like `reinterpret_cast` from C++ to go from a
floating-point type to an integral type one might niavely think that the
following should work:
cast :: Float -> Word32
cast (F# f#) = W32# (unsafeCoerce# f#)
Unfortunately that is not the case, because all the `unsafeCoerce#` does is tell
the compiler that the types have changed. When one does the above cast and
tries to operate on the resulting `Word32` the code generator will generate code
that performs an integer/word operation on a floating-point register, which
results in a compile error.
The correct way of implementing `reinterpret_cast` to implement a primpop, but
that requires a unique implementation for all supported archetectures. The next
best solution is to write the value from the source register to memory and then
read it from memory into the destination register and the best way to do that
is using CMM.
-}
-- | @'castWord32ToFloat' w@ does a bit-for-bit copy from an integral value
-- to a floating-point value.
--
-- @since 4.10.0.0
{-# INLINE castWord32ToFloat #-}
castWord32ToFloat :: Word32 -> Float
castWord32ToFloat (W32# w#) = F# (stgWord32ToFloat w#)
foreign import prim "stg_word32ToFloatzh"
stgWord32ToFloat :: Word# -> Float#
-- | @'castFloatToWord32' f@ does a bit-for-bit copy from a floating-point value
-- to an integral value.
--
-- @since 4.10.0.0
{-# INLINE castFloatToWord32 #-}
castFloatToWord32 :: Float -> Word32
castFloatToWord32 (F# f#) = W32# (stgFloatToWord32 f#)
foreign import prim "stg_floatToWord32zh"
stgFloatToWord32 :: Float# -> Word#
-- | @'castWord64ToDouble' w@ does a bit-for-bit copy from an integral value
-- to a floating-point value.
--
-- @since 4.10.0.0
{-# INLINE castWord64ToDouble #-}
castWord64ToDouble :: Word64 -> Double
castWord64ToDouble (W64# w) = D# (stgWord64ToDouble w)
foreign import prim "stg_word64ToDoublezh"
#if WORD_SIZE_IN_BITS == 64
stgWord64ToDouble :: Word# -> Double#
#else
stgWord64ToDouble :: Word64# -> Double#
#endif
-- | @'castFloatToWord32' f@ does a bit-for-bit copy from a floating-point value
-- to an integral value.
--
-- @since 4.10.0.0
{-# INLINE castDoubleToWord64 #-}
castDoubleToWord64 :: Double -> Word64
castDoubleToWord64 (D# d#) = W64# (stgDoubleToWord64 d#)
foreign import prim "stg_doubleToWord64zh"
#if WORD_SIZE_IN_BITS == 64
stgDoubleToWord64 :: Double# -> Word#
#else
stgDoubleToWord64 :: Double# -> Word64#
#endif
|
ezyang/ghc
|
libraries/base/GHC/Float.hs
|
bsd-3-clause
| 51,084 | 1 | 29 | 16,582 | 11,209 | 5,929 | 5,280 | -1 | -1 |
-- !!! Type sigs in instance decl
module M where
data T = T Int
instance Eq T where
(==) :: T -> T -> Bool
T x == T y = x == y
|
ghc-android/ghc
|
testsuite/tests/module/mod45.hs
|
bsd-3-clause
| 132 | 0 | 7 | 40 | 58 | 31 | 27 | -1 | -1 |
module Vogogo.Transaction (sendEFT) where
import Data.Fixed (Centi)
import Control.Monad (when)
import Currency (ISO4217Currency)
import Data.Aeson ((.=))
import Control.Error (EitherT(..), throwT, runEitherT)
import Network.Http.Client (getStatusCode)
import qualified Data.Aeson as Aeson
import qualified Data.Text as T
import Vogogo.Internal
data EFT = EFT UUID UUID Centi ISO4217Currency
instance Aeson.ToJSON EFT where
toJSON (EFT (UUID from) (UUID to) amount currency) = Aeson.object [
T.pack "src_account" .= from,
T.pack "dst_account" .= to,
T.pack "amount" .= amount,
T.pack "currency" .= show currency
]
sendEFT ::
Auth
-> UUID -- ^ From account
-> UUID -- ^ To account
-> Centi -- ^ Amount
-> ISO4217Currency
-> IO (Either APIError ())
sendEFT auth from to amount currency = runEitherT $ do
resp <- EitherT $ post (apiCall "credit/") (basicAuth auth)
(EFT from to amount currency)
(const . return . statusCodeHandler)
when (getStatusCode resp /= 201) $ throwT APIParseError
|
singpolyma/vogogo-haskell
|
Vogogo/Transaction.hs
|
isc
| 1,027 | 26 | 13 | 183 | 381 | 205 | 176 | 29 | 1 |
module Control.Concurrent.STM.ClosableQueue where
import Control.Concurrent.STM
import Control.Concurrent.STM.TBMQueue
import Control.Concurrent.STM.TMQueue
class ClosableQueue q where
isClosedQueue :: q -> STM Bool
closeQueue :: q -> STM ()
instance ClosableQueue (TMQueue a) where
isClosedQueue = isClosedTMQueue
closeQueue = closeTMQueue
instance ClosableQueue (TBMQueue a) where
isClosedQueue = isClosedTBMQueue
closeQueue = closeTBMQueue
|
MRudolph/stm-chans-class
|
src/Control/Concurrent/STM/ClosableQueue.hs
|
mit
| 462 | 0 | 9 | 66 | 111 | 63 | 48 | 13 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE TypeFamilies #-}
{- |
Module : System.Log.Handler.Simple
Copyright : Copyright (C) 2004-2011 John Goerzen
License : BSD3
Maintainer : John Goerzen <[email protected]>
Stability : provisional
Portability: portable
Simple log handlers
Written by John Goerzen, jgoerzen\@complete.org
-}
module System.Wlog.LogHandler.Simple
( GenericHandler(..)
, defaultHandleAction
-- * Custom handlers
, fileHandler
, streamHandler
) where
import Universum
import Control.Concurrent (modifyMVar_, withMVar)
import Data.Text.Lazy.Builder as B
import System.Directory (createDirectoryIfMissing)
import System.FilePath (takeDirectory)
import System.IO (SeekMode (SeekFromEnd), hFlush, hSeek)
import System.Wlog.Formatter (LogFormatter, nullFormatter)
import System.Wlog.LogHandler (LogHandler (..), LogHandlerTag (..))
import System.Wlog.MemoryQueue (MemoryQueue, newMemoryQueue, pushFront, queueToList)
import System.Wlog.Severity (Severities)
import qualified Data.Text.IO as TIO
-- | A helper data type.
data GenericHandler a = GenericHandler
{ severities :: !Severities
, formatter :: !(LogFormatter (GenericHandler a))
, privData :: !a
, writeFunc :: !(a -> Text -> IO ())
, closeFunc :: !(a -> IO ())
, readBackBuffer :: !(MVar (MemoryQueue Text))
, ghTag :: !LogHandlerTag
} deriving Typeable
instance Typeable a => LogHandler (GenericHandler a) where
getTag = ghTag
setLevel sh s = sh {severities = s}
getLevel = severities
setFormatter sh f = sh{formatter = f}
getFormatter = formatter
readBack sh i = liftIO $ withMVar (readBackBuffer sh) $ \mq' -> pure $! take i . queueToList $ mq'
emit sh bldr _ = liftIO $ writeFunc sh (privData sh) (toText . B.toLazyText $ bldr)
close sh = liftIO $ closeFunc sh (privData sh)
-- | Default action which just prints to handle using given message.
defaultHandleAction :: Handle -> Text -> IO ()
defaultHandleAction = TIO.hPutStrLn
-- | Creates custom write action and memory queue where write action
-- updates memory queue as well.
createWriteFuncWrapper
:: (Handle -> Text -> IO ())
-> MVar ()
-> IO ( Handle -> Text -> IO ()
, MVar (MemoryQueue Text)
)
createWriteFuncWrapper action lock = do
memoryQueue <- newMVar $ newMemoryQueue $ 2 * 1024 * 1024 -- 2 MB
let customWriteFunc :: Handle -> Text -> IO ()
customWriteFunc hdl msg = withMVar lock $ const $ do
action hdl msg
-- Important to force the queue here, else a massive closure will
-- be retained until the queue is actually used.
modifyMVar_ memoryQueue $ \mq -> pure $! pushFront msg mq
hFlush hdl
return (customWriteFunc, memoryQueue)
-- | Create a stream log handler. Log messages sent to this handler
-- will be sent to the stream used initially. Note that the 'close'
-- method will have no effect on stream handlers; it does not actually
-- close the underlying stream.
streamHandler :: Handle
-> (Handle -> Text -> IO ())
-> MVar ()
-> Severities
-> IO (GenericHandler Handle)
streamHandler privData writeAction lock severities = do
(writeFunc, readBackBuffer) <- createWriteFuncWrapper writeAction lock
return GenericHandler
{ formatter = nullFormatter
, closeFunc = const $ pure ()
, ghTag = HandlerOther "GenericHandler/StreamHandler"
, ..
}
-- | Create a file log handler. Log messages sent to this handler
-- will be sent to the filename specified, which will be opened in
-- Append mode. Calling 'close' on the handler will close the file.
fileHandler :: FilePath -> Severities -> IO (GenericHandler Handle)
fileHandler fp sev = do
createDirectoryIfMissing True (takeDirectory fp)
h <- openFile fp ReadWriteMode
hSeek h SeekFromEnd 0
lock <- newMVar ()
sh <- streamHandler h defaultHandleAction lock sev
pure $ sh { closeFunc = hClose
, ghTag = HandlerFilelike fp
}
|
serokell/log-warper
|
src/System/Wlog/LogHandler/Simple.hs
|
mit
| 4,178 | 0 | 16 | 1,037 | 943 | 507 | 436 | -1 | -1 |
module Data.ExplicitMinHeap(
ExplicitMinHeap(..),
empty,
isEmpty,
insert,
merge,
findMin,
deleteMin,
) where
import Data.Heap
data ExplicitMinHeap h a = E | NE a (h a)
instance Heap h => Heap (ExplicitMinHeap h) where
empty = E
isEmpty E = True
isEmpty _ = False
findMin (NE x _) = x
deleteMin (NE _ h) = let h' = deleteMin h in
NE (findMin h') h'
insert x E = NE x empty
insert x (NE _ h) = let h' = merge (insert x empty) h in
NE (findMin h') h'
merge h E = h
merge E h = h
merge (NE _ h1) (NE _ h2) = let h' = merge h1 h2 in
NE (findMin h') h'
|
syunta/PFDS
|
Data/ExplicitMinHeap.hs
|
mit
| 672 | 0 | 12 | 244 | 301 | 154 | 147 | 24 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE ViewPatterns #-}
module Hpack.Syntax.DependencyVersion (
githubBaseUrl
, GitRef
, GitUrl
, VersionConstraint(..)
, versionConstraint
, anyVersion
, versionRange
, DependencyVersion(..)
, withDependencyVersion
, dependencyVersion
, SourceDependency(..)
, objectDependency
, versionConstraintFromCabal
, scientificToVersion
, cabalParse
) where
import Imports
import qualified Control.Monad.Fail as Fail
import Data.Maybe
import Data.Scientific
import qualified Data.Text as T
import qualified Data.Aeson.Config.KeyMap as KeyMap
import Text.PrettyPrint (renderStyle, Style(..), Mode(..))
import qualified Distribution.Version as D
import qualified Distribution.Parsec as D
import qualified Distribution.Pretty as D
import qualified Distribution.Types.VersionRange.Internal as D
import Data.Aeson.Config.FromValue
githubBaseUrl :: String
githubBaseUrl = "https://github.com/"
type GitUrl = String
type GitRef = String
data VersionConstraint = AnyVersion | VersionRange String
deriving (Eq, Ord, Show)
instance FromValue VersionConstraint where
fromValue = versionConstraint
versionConstraint :: Value -> Parser VersionConstraint
versionConstraint v = case v of
Null -> return AnyVersion
Number n -> return (numericVersionConstraint n)
String s -> stringVersionConstraint s
_ -> typeMismatch "Null, Number, or String" v
anyVersion :: DependencyVersion
anyVersion = DependencyVersion Nothing AnyVersion
versionRange :: String -> DependencyVersion
versionRange = DependencyVersion Nothing . VersionRange
data DependencyVersion = DependencyVersion (Maybe SourceDependency) VersionConstraint
deriving (Eq, Ord, Show)
withDependencyVersion
:: (DependencyVersion -> a)
-> (Object -> DependencyVersion -> Parser a)
-> Value
-> Parser a
withDependencyVersion k obj v = case v of
Null -> return $ k anyVersion
Object o -> objectDependency o >>= obj o
Number n -> return $ k (DependencyVersion Nothing $ numericVersionConstraint n)
String s -> k . DependencyVersion Nothing <$> stringVersionConstraint s
_ -> typeMismatch "Null, Object, Number, or String" v
dependencyVersion :: Value -> Parser DependencyVersion
dependencyVersion = withDependencyVersion id (const return)
data SourceDependency = GitRef GitUrl GitRef (Maybe FilePath) | Local FilePath
deriving (Eq, Ord, Show)
objectDependency :: Object -> Parser DependencyVersion
objectDependency o = let
version :: Parser VersionConstraint
version = fromMaybe AnyVersion <$> (o .:? "version")
local :: Parser SourceDependency
local = Local <$> o .: "path"
git :: Parser SourceDependency
git = GitRef <$> url <*> ref <*> subdir
url :: Parser String
url =
((githubBaseUrl ++) <$> o .: "github")
<|> (o .: "git")
<|> fail "neither key \"git\" nor key \"github\" present"
ref :: Parser String
ref = o .: "ref"
subdir :: Parser (Maybe FilePath)
subdir = o .:? "subdir"
source :: Parser (Maybe SourceDependency)
source
| any (`KeyMap.member` o) ["path", "git", "github", "ref", "subdir"] = Just <$> (local <|> git)
| otherwise = return Nothing
in DependencyVersion <$> source <*> version
numericVersionConstraint :: Scientific -> VersionConstraint
numericVersionConstraint n = VersionRange ("==" ++ version)
where
version = scientificToVersion n
stringVersionConstraint :: Text -> Parser VersionConstraint
stringVersionConstraint s = parseVersionRange ("== " ++ input) <|> parseVersionRange input
where
input = T.unpack s
scientificToVersion :: Scientific -> String
scientificToVersion n = version
where
version = formatScientific Fixed (Just decimalPlaces) n
decimalPlaces
| e < 0 = abs e
| otherwise = 0
e = base10Exponent n
parseVersionRange :: Fail.MonadFail m => String -> m VersionConstraint
parseVersionRange = fmap versionConstraintFromCabal . parseCabalVersionRange
parseCabalVersionRange :: Fail.MonadFail m => String -> m D.VersionRange
parseCabalVersionRange = cabalParse "constraint"
cabalParse :: (Fail.MonadFail m, D.Parsec a) => String -> String -> m a
cabalParse subject s = case D.eitherParsec s of
Right d -> return d
Left _ ->fail $ unwords ["invalid", subject, show s]
renderVersionRange :: D.VersionRange -> String
renderVersionRange = \ case
D.IntersectVersionRanges (D.OrLaterVersion x) (D.EarlierVersion y) | differByOneInLeastPosition (x, y) -> "==" ++ render x ++ ".*"
v -> render v
where
differByOneInLeastPosition = \ case
(reverse . D.versionNumbers -> x : xs, reverse . D.versionNumbers -> y : ys) -> xs == ys && succ x == y
_ -> False
render :: D.Pretty a => a -> String
render = renderStyle (Style OneLineMode 0 0) . D.pretty
versionConstraintFromCabal :: D.VersionRange -> VersionConstraint
versionConstraintFromCabal range
| D.isAnyVersion range = AnyVersion
| otherwise = VersionRange . renderVersionRange $ toPreCabal2VersionRange range
where
toPreCabal2VersionRange :: D.VersionRange -> D.VersionRange
toPreCabal2VersionRange = D.embedVersionRange . D.cataVersionRange f
where
f :: D.VersionRangeF (D.VersionRangeF D.VersionRange) -> D.VersionRangeF D.VersionRange
f = \ case
D.MajorBoundVersionF v -> D.IntersectVersionRangesF (D.embedVersionRange lower) (D.embedVersionRange upper)
where
lower = D.OrLaterVersionF v
upper = D.EarlierVersionF (D.majorUpperBound v)
D.ThisVersionF v -> D.ThisVersionF v
D.LaterVersionF v -> D.LaterVersionF v
D.OrLaterVersionF v -> D.OrLaterVersionF v
D.EarlierVersionF v -> D.EarlierVersionF v
D.OrEarlierVersionF v -> D.OrEarlierVersionF v
D.UnionVersionRangesF a b -> D.UnionVersionRangesF (D.embedVersionRange a) (D.embedVersionRange b)
D.IntersectVersionRangesF a b -> D.IntersectVersionRangesF (D.embedVersionRange a) (D.embedVersionRange b)
#if !MIN_VERSION_Cabal(3,4,0)
D.WildcardVersionF v -> D.WildcardVersionF v
D.VersionRangeParensF a -> D.VersionRangeParensF (D.embedVersionRange a)
D.AnyVersionF -> D.AnyVersionF
#endif
|
sol/hpack
|
src/Hpack/Syntax/DependencyVersion.hs
|
mit
| 6,305 | 0 | 17 | 1,219 | 1,766 | 914 | 852 | 140 | 11 |
module Yesod.Hunt
( module Yesod.Hunt.SubSite
, module Yesod.Hunt.Routes
)
where
import Yesod.Hunt.SubSite
import Yesod.Hunt.Routes
|
hunt-framework/yesod-hunt
|
src/Yesod/Hunt.hs
|
mit
| 133 | 0 | 5 | 15 | 34 | 23 | 11 | 5 | 0 |
module Server.HandleUserInput where
import Server.ClientQueries
import Server.GlobalState
import Data.Schema
import Utils.Utils
import CodeGen.Metadata.SimpleRecord
import CodeGen.Metadata.SimpleSubInstance
import CodeGen.Metadata.CoLimit
import Data.Types
import Server.Execute
import CodeGen.Metadata.Metadata
import CodeGen.NutleyQuery
import Server.QueryCompile
import Server.NutleyInstance
import Data.Name
import Utils.Verify
import CodeGen.Metadata.Shriek
import CodeGen.Metadata.DirectImage
import System.Directory
import CodeGen.Metadata.InverseImage
import qualified Data.Map as Map
import Control.Monad
import Data.Either
import Control.Concurrent.STM
import Server.Parser
import Control.Monad.Trans.Either
import Network.Socket
import Network
import System.IO
createToObject :: GlobalState -> CreateQuery -> ErrorT STM NutleyObject
createToObject state (NamedObject name) = lookupByName state name
createToObject state (SchemaQuery sch) = fmap NutleySchema $ schemaQuerySchema state sch
createToObject state cs@(CreateSchema verts simps) = fmap NutleySchema $ hoistEither $ createSchemaToSchema cs
createToObject state (InstantiateSchema schq simplex dat) = do
sch <- schemaQuerySchema state schq
id <- liftEitherT $ nextInstanceID state
case mapM (\s -> schemaLookupVertex s sch) simplex of
(Just simp) -> do
hoistEither $ guardEither "Cannot instantiate schema at simplex not in schema" $ containsSimplex (fullSubSchema sch) simp
let md = simpleRecord (name schq) sch simp
case dat of
(ExplicitTuples tps) -> return $ NutleyActionObject $ do
fmap (flip NutleyObjInstance md) $ executeInstantiateFromStrings (InstantiateQuery md) id tps
(LoadCSV filepath) -> return $ NutleyActionObject $ do
dfe <- liftEitherT $ doesFileExist filepath
if not dfe
then left $ "Cannot find file " ++ filepath
else do
dats <- liftEitherT $ fmap ((map (sepBySkipQuotes (==','))).lines) $ readFile filepath
let tups = map (map (\x -> if x == "null" then Nothing else Just x)) dats
fmap (flip NutleyObjInstance md) $ executeInstantiateFromStrings (InstantiateQuery md) id tups
(SelectData (SelectQuery simpNamed from)) -> do
instanceObj <- instanceQueryInstance state from
ss <- case simplicesFromNames md simpNamed of
Nothing -> left "Simplex not in instance's schema"
(Just simps) -> do
right (SubSchema simps $ dbSchema md)
return $ NutleyActionObject $ do
(inst,mdfrom) <- execNutleyInstance instanceObj
fmap (flip NutleyObjInstance md) $ executeInstantiateSelect (InstantiateSelectQuery md mdfrom ss) id inst
Nothing -> left "Cannot instantiate schema at vertices not in schema"
createToObject state (FilterQuery inner fn) = do
instObj <- instanceQueryInstance state inner
hoistEither $ withNutleyInstance instObj $ \(inst,md) -> do
(md,params) <- subInstance fn md
return $ NutleyObjInstance (SimpleSubInstance params inst) md
createToObject state (UnionQuery us) = do
instances <- mapM (instanceQueryInstance state) us
hoistEither $ withNutleyInstances instances $ \inners -> do
let (db,ni) = coLimitOne inners
verifyEither db -- wait.. why are we verifying this db?
return $ NutleyObjInstance ni db
createToObject state (CreateMap srcQ trgQ defs) = do
schSrc <- schemaQuerySchema state srcQ
schTrg <- schemaQuerySchema state trgQ
case mapM (\(a,b,f) -> (Just (,,)) `ap` (schemaLookupVertex a schSrc) `ap` (schemaLookupVertex b schTrg) `ap` (Just f)) defs of
(Just defs) -> return $ NutleyMap $ SchemaMap schSrc schTrg defs
Nothing -> left $ "Mapped vertex not in schema"
createToObject state (FunctorQuery t mapQuery instanceQuery) = do
f <- mapQueryMap state mapQuery
instObj <- instanceQueryInstance state instanceQuery
hoistEither $ withNutleyInstance instObj $ \(inst,md) -> do
case t of
ShriekFunctor -> fmap (NutleyObjInstance $ Shriek inst) $ verifyEither $ shriek f md
DirectImageFunctor -> fmap (NutleyObjInstance $ DirectImage inst) $ verifyEither $ directImage f md
InverseImageFunctor -> do
(invmd,params) <- inverseImage f md
return $ NutleyObjInstance (InverseImage params inst) invmd
createToObject state (ConnectQuery addr port) =
return $ NutleyConnection addr $ PortNumber $ fromIntegral port
createSchemaToSchema :: CreateQuery -> Error Schema
createSchemaToSchema (CreateSchema verts simps) = do
let idMap = Map.fromList $ zip (map (\(TypeDec a _) -> a) verts) [1..]
tpList = zip [1..] (map (\(TypeDec _ a) -> a) verts)
guardEither "Duplicate vertex name in create schema" $ (Map.size idMap) == (length verts)
simpIds <- forM simps $ mapM ((maybeToEither "simplex not in schema").(flip Map.lookup idMap))
return $ Schema (SC (zip [1.. Map.size idMap] $ map (\(TypeDec a _) -> a) verts) simpIds) tpList
withNutleyInstance :: NutleyObject -> ((NutleyInstance,DBMetadata) -> Error NutleyObject) -> Error NutleyObject
withNutleyInstance (NutleyObjInstance inst md) f = f (inst,md)
withNutleyInstance (NutleyActionObject ioAction) f = return $ NutleyActionObject $ do
obj <- ioAction
hoistEither $ withNutleyInstance obj f
withNutleyInstances :: [NutleyObject] -> ([(DBMetadata,NutleyInstance)] -> Error NutleyObject) -> Error NutleyObject
withNutleyInstances instances f
| all (not.isActionObject) instances = f $ map (\(NutleyObjInstance inst md) -> (md,inst)) instances
| otherwise = do
return $ NutleyActionObject $ do
objs <- forM instances $ \i -> case i of
(NutleyObjInstance inst md) -> return i
(NutleyActionObject ioAction) -> ioAction
hoistEither $ withNutleyInstances objs f
handleLetName :: GlobalState -> ClientQuery -> IO (Error String)
handleLetName state (LetQuery name' (OnConnection c query)) = runEitherT $ do
(name,NutleyConnection addr port) <- mapEitherT atomically $ do
name <- liftEitherT $ freshName state name'
conn <- connectQueryConnect state c
reserveName state name
return (name,conn)
hdl <- tryErrorT $ connectTo addr port
tryErrorT $ hPutStrLn hdl $ "let " ++ name ++ " = " ++ query ++ ";"
(e:trans) <- hGetTransmission hdl
hoistEither $ guardEither ("node error: " ++ concat trans) $ e == "\000"
tryErrorT $ hPutStrLn hdl $ "show (schema " ++ name ++ ")"
(e2:createSchemaQuery:rst) <- hGetTransmission hdl
if e == "\000"
then do
hoistEither $ guardEither "something has gone wrong" $ null rst
let (Just schQ) = (\(Node TopLevel lt) -> parseCreateSchema lt) =<< (lexTree createSchemaQuery)
sch <- hoistEither $ createSchemaToSchema schQ
let nobj = NutleyObjInstance (RemoteInstance addr port) $ error "no remote"-- (remoteInstance name sch)
liftEitherT $ atomically $ addNamedObject state name nobj
return $ "Added object " ++ name
else do
tryErrorT $ hPutStrLn hdl $ "show " ++ name ++ ";"
(e2:createQuery:rst) <- hGetTransmission hdl
hoistEither $ guardEither ("node error: " ++ concat trans) $ e2 == "\000"
hoistEither $ guardEither "something went wrong" $ null rst
res <- liftEitherT $ handleUserInput state $ "let " ++ name ++ " = " ++ createQuery
case res of
(Left err) -> (liftEitherT $ atomically $ removeNamedObject state name) >> left err
(Right note) -> return $ "Added object " ++ name
handleLetName state (LetQuery name' create) = runEitherT $ do
join $ mapEitherT atomically $ do
name <- liftEitherT $ freshName state name'
obj <- createToObject state create
case obj of
(NutleyActionObject ioObj) -> (>>) (reserveName state name) $ return $ do
result <- liftEitherT $ runEitherT ioObj
case result of
(Left err) -> (liftEitherT $ atomically $ removeNamedObject state name) >> left err
(Right nobj) -> do
hoistEither $ guardEither "something has gone wrong" $ not $ isActionObject nobj
liftEitherT $ atomically $ addNamedObject state name nobj
return $ "Added object " ++ name
_ -> do
addNamedObjectIfNotExists state name obj
return $ return $ "Added object " ++ name
handleShow state (ShowQuery create) = do
res <- runEitherT $ do
obj <- mapEitherT atomically $ createToObject state create
case obj of
(NutleyActionObject a) -> a
_ -> return obj
return $ fmap showNutleyObject res
schemaQuerySchema :: GlobalState -> SchemaQuery -> ErrorT STM Schema
schemaQuerySchema state (NamedSchema name) = do
(NutleySchema s) <- lookupByName state name
return s
schemaQuerySchema state (SchemaOf instanceQuery) = instanceQuerySchema state instanceQuery
mapQueryMap :: GlobalState -> MapQuery -> ErrorT STM SchemaMap
mapQueryMap state (NamedMap name) = do
(NutleyMap f) <- lookupByName state name
return f
connectQueryConnect :: GlobalState -> ConnectQuery -> ErrorT STM NutleyObject
connectQueryConnect state (AddressedConnect addr port) = return $ NutleyConnection addr $ PortNumber $ fromIntegral port
connectQueryConnect state (NamedConnect name) = lookupByName state name
instanceQueryInstance :: GlobalState -> InstanceQuery -> ErrorT STM NutleyObject
instanceQueryInstance state (NamedInstance name) = lookupByName state name
instanceQueryInstance state (CreateInstance createQuery) = createToObject state createQuery
instanceQuerySchema state (NamedInstance name) = do
res <- lookupByName state name
case res of
(NutleyObjInstance _ md) -> return $ dbSchema md
_ -> left $ "object " ++ name ++ " has no schema"
instanceQuerySchema state (CreateInstance (InstantiateSchema schq _ _)) = schemaQuerySchema state schq
instanceQuerySchema state (CreateInstance (FilterQuery inst _)) = instanceQuerySchema state inst
instanceQuerySchema state (CreateInstance (FunctorQuery ShriekFunctor f _)) = mapQueryCoDomainSchema state f
instanceQuerySchema state (CreateInstance (FunctorQuery DirectImageFunctor f _)) = mapQueryCoDomainSchema state f
instanceQuerySchema state (CreateInstance (FunctorQuery InverseImageFunctor f _)) = mapQueryDomainSchema state f
instanceQuerySchema state (CreateInstance (UnionQuery (a:as))) = instanceQuerySchema state a
instanceQuerySchema state (CreateInstance (UnionQuery [])) = return emptySchema
instanceQuerySchema state _ = left "cannot take schema of that object"
mapQueryCoDomainSchema state mapQuery = do
(SchemaMap _ cdm _) <- mapQueryMap state mapQuery
return cdm
mapQueryDomainSchema state mapQuery = do
(SchemaMap dom _ _) <- mapQueryMap state mapQuery
return dom
handleSelect :: GlobalState -> ClientQuery -> IO (Error String)
handleSelect state (SelectQuery simpNamed instanceQuery) = runEitherT $ do
instanceObj <- mapEitherT atomically $ instanceQueryInstance state instanceQuery
(inst,md) <- execNutleyInstance instanceObj -- THIS CAN LEAK MEMORY IF instanceObj IS AN INSTANTIATE QUERY
ss <- case simplicesFromNames md simpNamed of
Nothing -> left "Simplex not in instance's schema"
(Just simps) -> do
right (SubSchema simps $ dbSchema md)
executeSectionString (SectionQuery md ss) inst
handleUserInput :: GlobalState -> String -> IO (Error String)
handleUserInput state str = do
case parse str of
Nothing -> return $ Left "Parse Error"
(Just letq@(LetQuery _ _)) -> handleLetName state letq
(Just show@(ShowQuery _)) -> handleShow state show
(Just sele@(SelectQuery _ _)) -> handleSelect state sele
(Just ClearCache) -> clearPlancache >> (return $ Right "")
(Just ClearData) -> clearData >> (clearGlobalState state) >> (return $ Right "")
(Just Quit) -> return $ Right "_q"
|
jvictor0/JoSQL
|
Server/HandleUserInput.hs
|
mit
| 11,730 | 1 | 31 | 2,265 | 3,809 | 1,843 | 1,966 | 217 | 10 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.