code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE CPP #-}
-- |
-- Module : Data.Vector.Internal.Check
-- Copyright : (c) Roman Leshchinskiy 2009
-- License : BSD-style
--
-- Maintainer : Roman Leshchinskiy <[email protected]>
-- Stability : experimental
-- Portability : non-portable
--
-- Bounds checking infrastructure
--
{-# LANGUAGE MagicHash #-}
module Data.Vector.Internal.Check (
Checks(..), doChecks,
error, internalError,
check, checkIndex, checkLength, checkSlice, checkLIQUID, checkIndexLIQUID
) where
import GHC.Base( Int(..) )
import GHC.Prim( Int# )
import Prelude hiding( error, (&&), (||), not )
import qualified Prelude as P
-- NOTE: This is a workaround for GHC's weird behaviour where it doesn't inline
-- these functions into unfoldings which makes the intermediate code size
-- explode. See http://hackage.haskell.org/trac/ghc/ticket/5539.
infixr 2 ||
infixr 3 &&
{-@ not :: b:Bool -> {v:Bool | ((Prop v) <=> ~(Prop b))} @-}
not :: Bool -> Bool
{-# INLINE not #-}
not True = False
not False = True
{-@ (&&) :: x:Bool -> y:Bool -> {v:Bool | ((Prop v) <=> ((Prop x) && (Prop y)))} @-}
(&&) :: Bool -> Bool -> Bool
{-# INLINE (&&) #-}
False && x = False
True && x = x
{-@ (||) :: x:Bool -> y:Bool -> {v:Bool | ((Prop v) <=> ((Prop x) || (Prop y)))} @-}
(||) :: Bool -> Bool -> Bool
{-# INLINE (||) #-}
True || x = True
False || x = x
data Checks = Bounds | Unsafe | Internal deriving( Eq )
doBoundsChecks :: Bool
#ifdef VECTOR_BOUNDS_CHECKS
doBoundsChecks = True
#else
doBoundsChecks = False
#endif
doUnsafeChecks :: Bool
#ifdef VECTOR_UNSAFE_CHECKS
doUnsafeChecks = True
#else
doUnsafeChecks = False
#endif
doInternalChecks :: Bool
#ifdef VECTOR_INTERNAL_CHECKS
doInternalChecks = True
#else
doInternalChecks = False
#endif
doChecks :: Checks -> Bool
{-# INLINE doChecks #-}
doChecks Bounds = doBoundsChecks
doChecks Unsafe = doUnsafeChecks
doChecks Internal = doInternalChecks
error_msg :: String -> Int -> String -> String -> String
error_msg file line loc msg = file ++ ":" ++ show line ++ " (" ++ loc ++ "): " ++ msg
{-@ error :: {v:_ | false} -> _ @-}
error :: String -> Int -> String -> String -> a
{-# NOINLINE error #-}
error file line loc msg
= P.error $ error_msg file line loc msg
{-@ internalError :: {v:_ | false} -> _ @-}
internalError :: String -> Int -> String -> String -> a
{-# NOINLINE internalError #-}
internalError file line loc msg
= P.error $ unlines
["*** Internal error in package vector ***"
,"*** Please submit a bug report at http://trac.haskell.org/vector"
,error_msg file line loc msg]
{-@ checkError :: {v:_ | false} -> _ @-}
checkError :: String -> Int -> Checks -> String -> String -> a
{-# NOINLINE checkError #-}
checkError file line kind loc msg
= case kind of
Internal -> internalError file line loc msg
_ -> error file line loc msg
{-@ check :: _ -> _ -> _ -> _ -> _ -> {v:Bool | (Prop v)} -> _ -> _ @-}
check :: String -> Int -> Checks -> String -> String -> Bool -> a -> a
{-# INLINE check #-}
check file line kind loc msg cond x
| not (doChecks kind) || cond = x
| otherwise = checkError file line kind loc msg
{-@ checkLIQUID :: _ -> _ -> _ -> _ -> _ -> b:Bool -> _ -> {v:_ | (Prop b)} @-}
checkLIQUID :: String -> Int -> Checks -> String -> String -> Bool -> a -> a
{-# INLINE checkLIQUID #-}
checkLIQUID file line kind loc msg cond x
| not (doChecks kind) || cond = x
| otherwise = case kind of
Internal -> internalError file line loc msg
_ -> error file line loc msg
checkIndex_msg :: Int -> Int -> String
{-# INLINE checkIndex_msg #-}
checkIndex_msg (I# i#) (I# n#) = checkIndex_msg# i# n#
checkIndex_msg# :: Int# -> Int# -> String
{-# NOINLINE checkIndex_msg# #-}
checkIndex_msg# i# n# = "index out of bounds " ++ show (I# i#, I# n#)
{-@ checkIndex :: String -> Int -> Checks -> String -> i:Nat -> {n:Nat | i < n } -> a -> a @-}
checkIndex :: String -> Int -> Checks -> String -> Int -> Int -> a -> a
{-# INLINE checkIndex #-}
checkIndex file line kind loc i n x
= check file line kind loc (checkIndex_msg i n) (i >= 0 && i<n) x
{-@ checkIndexLIQUID :: String -> Int -> Checks -> String -> i:Int -> n:Int -> a -> {v:a | (0 <= i && i < n)} @-}
checkIndexLIQUID :: String -> Int -> Checks -> String -> Int -> Int -> a -> a
{-# INLINE checkIndexLIQUID #-}
checkIndexLIQUID file line kind loc i n x
= checkLIQUID file line kind loc (checkIndex_msg i n) (i >= 0 && i<n) x
checkLength_msg :: Int -> String
{-# INLINE checkLength_msg #-}
checkLength_msg (I# n#) = checkLength_msg# n#
checkLength_msg# :: Int# -> String
{-# NOINLINE checkLength_msg# #-}
checkLength_msg# n# = "negative length " ++ show (I# n#)
{-@ checkLength :: String -> Int -> Checks -> String -> Nat -> a -> a @-}
checkLength :: String -> Int -> Checks -> String -> Int -> a -> a
{-# INLINE checkLength #-}
checkLength file line kind loc n x
= check file line kind loc (checkLength_msg n) (n >= 0) x
checkSlice_msg :: Int -> Int -> Int -> String
{-# INLINE checkSlice_msg #-}
checkSlice_msg (I# i#) (I# m#) (I# n#) = checkSlice_msg# i# m# n#
checkSlice_msg# :: Int# -> Int# -> Int# -> String
{-# NOINLINE checkSlice_msg# #-}
checkSlice_msg# i# m# n# = "invalid slice " ++ show (I# i#, I# m#, I# n#)
{-@ checkSlice :: String -> Int -> Checks -> String -> i:Nat -> m:Nat -> {n:Nat | i + m <= n} -> a -> a @-}
checkSlice :: String -> Int -> Checks -> String -> Int -> Int -> Int -> a -> a
{-# INLINE checkSlice #-}
checkSlice file line kind loc i m n x
= check file line kind loc (checkSlice_msg i m n)
(i >= 0 && m >= 0 && i+m <= n) x
| mightymoose/liquidhaskell | benchmarks/vector-0.10.0.1/Data/Vector/Internal/Check.hs | bsd-3-clause | 5,620 | 0 | 12 | 1,218 | 1,414 | 759 | 655 | 102 | 2 |
{-# LANGUAGE Trustworthy #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.ST.Lazy.Safe
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : non-portable (requires universal quantification for runST)
--
-- This module presents an identical interface to "Control.Monad.ST",
-- except that the monad delays evaluation of state operations until
-- a value depending on them is required.
--
-- Safe API only.
--
-----------------------------------------------------------------------------
module Control.Monad.ST.Lazy.Safe (
-- * The 'ST' monad
ST,
runST,
fixST,
-- * Converting between strict and lazy 'ST'
strictToLazyST, lazyToStrictST,
-- * Converting 'ST' To 'IO'
RealWorld,
stToIO,
) where
import Control.Monad.ST.Lazy.Imp
| beni55/haste-compiler | libraries/ghc-7.8/base/Control/Monad/ST/Lazy/Safe.hs | bsd-3-clause | 1,034 | 0 | 4 | 217 | 62 | 50 | 12 | 9 | 0 |
module ShouldCompile where
infix 5 |-
infix 9 :=
data Equal = Char := Int
-- fails in GHC (all versions), due to not doing fixity resolution on
-- the lhs before deciding which is the function symbol.
(|-) :: Int -> Equal -> Bool
0 |- x:=y = 1 |- x:=y -- XXX fails here
2 |- (x:=y) = 0 |- x:=y
_ |- _ = False
| ghc-android/ghc | testsuite/tests/parser/should_compile/read027.hs | bsd-3-clause | 326 | 0 | 7 | 85 | 102 | 56 | 46 | -1 | -1 |
main = do putStrLn "Test"
| kevinbenard/code-exercises | haskell/test.hs | mit | 26 | 0 | 7 | 5 | 12 | 5 | 7 | 1 | 1 |
{-# LANGUAGE ConstraintKinds, DataKinds, FlexibleContexts, FlexibleInstances,
MultiParamTypeClasses, TypeFamilies #-}
{- |
Module : Control.Monad.Levels.RWS
Description : Monads with reader, writer and state abilities
Copyright : (c) Ivan Lazar Miljenovic
License : MIT
Maintainer : [email protected]
Note that the original definitions are used for the various reader,
writer and state computations: as such, if there is (for example)
another level that satisfies 'IsReader r' above the one that satisfies
'IsRWS r w s' in the stack, then calling 'ask' will use the higher
level.
-}
module Control.Monad.Levels.RWS
( HasRWS
, IsRWS
, module Control.Monad.Levels.Reader
, module Control.Monad.Levels.Writer
, module Control.Monad.Levels.State
) where
import Control.Monad.Levels
import Control.Monad.Levels.Constraints
import Control.Monad.Levels.Reader
import Control.Monad.Levels.State
import Control.Monad.Levels.Writer
import Data.Monoid (Monoid)
import qualified Control.Monad.Trans.RWS.Lazy as L
import qualified Control.Monad.Trans.RWS.Strict as S
-- -----------------------------------------------------------------------------
-- | Defined as another class rather than an alias in case you need to
-- ensure the same level satisfies all three constraints (and to
-- have a specific 'ValidConstraint' instance).
class (IsReader r m, IsWriter w m, IsState s m) => IsRWS r w s m
instance (MonadTower m, Monoid w) => IsRWS r w s (L.RWST r w s m)
instance (MonadTower m, Monoid w) => IsRWS r w s (S.RWST r w s m)
instance (Monoid w) => ValidConstraint (IsRWS r w s) where
type ConstraintSatisfied (IsRWS r w s) m = SameRWS r w s m
type family SameRWS r w s m where
SameRWS r w s (L.RWST r w s m) = True
SameRWS r w s (S.RWST r w s m) = True
SameRWS r w s m = False
type HasRWS r w s m = SatisfyConstraint (IsRWS r w s) m
| ivan-m/monad-levels | Control/Monad/Levels/RWS.hs | mit | 1,927 | 0 | 9 | 372 | 398 | 232 | 166 | -1 | -1 |
import System.Environment (getArgs)
import Jbobaf.Jitro
import Jbobaf.Vlatai
main = do
argv <- getArgs
input <- if null argv then getContents else readFile (head argv)
mapM_ (\(jvo, rafs) ->
let rafs' = either (const []) id (runReaderT (jvokatna jvo) defaults)
in if rafs /= rafs'
then putStrLn $ "Failure splitting " ++ jvo ++ ": expected " ++ show rafs
++ ", got " ++ show rafs'
else putStrLn $ "jvokatna " ++ show jvo ++ " = " ++ show rafs')
$ map read (lines input)
| jwodder/jbobaf | haskell/cipra/jvokatna.hs | mit | 501 | 0 | 19 | 117 | 199 | 99 | 100 | 13 | 3 |
{-# OPTIONS_GHC -Wall -fno-warn-orphans #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE TypeSynonymInstances #-}
import Test.QuickCheck
import AlignPar as AP
-- import AlnHtml
--import Align as A
-- import Data.Array
import Control.Monad
--import Data.Char
import qualified Data.ByteString.Char8 as BS
-- import Debug.Trace
arbitraryNonEmpty :: Gen String
arbitraryNonEmpty = arbitrary `suchThat` (not . null)
arbitraryNuc :: Gen Char
arbitraryNuc =
oneof $ map return ['A', 'G', 'C', 'T']
maxSeqLen :: Int
maxSeqLen = 500
instance Arbitrary Seq where
arbitrary = do
length_ <- choose (1,maxSeqLen)
str <- (liftM (take length_) $ infiniteListOf arbitraryNuc) :: Gen String
return $ AP.tobs str
-- return $ AP.tobs (trace ("length_ is " ++ (show length_) ++ "str " ++ str) str)
arbitrarySeq :: Gen Seq
arbitrarySeq = do
length_ <- choose (1,maxSeqLen)
str <- (liftM (take length_) $ listOf arbitraryNuc) :: Gen String
return $ AP.tobs str
-- return $ AP.tobs (trace ("length_ is" ++ (show length_)) str)
instance Arbitrary Grid where
arbitrary = do
seq1 <- arbitrarySeq
seq2 <- arbitrarySeq
--return $ grid (trace "Seq1: " seq1) (trace "Seq2: " seq2)
return $ grid seq1 seq2
instance Arbitrary Cell where
arbitrary = do
n <- elements [1,2,3] :: Gen Integer
let cell = case n of 1 -> Diag
2 -> Across
3 -> Up
_ -> error "1-3 only plz"
liftM cell arbitrary
equalsItself :: Seq -> Bool
equalsItself s = s == s
noEmptySeqs :: Seq -> Bool
noEmptySeqs s = s /= BS.empty
equalThemselves :: [Seq] -> Bool
equalThemselves xs = and $ map equalsItself xs
isStart :: Cell -> Bool
isStart (Start _) = True
isStart _ = False
startIsStart :: Seq -> Seq -> Bool
startIsStart s1 s2 = isStart $ lookUp (grid s1 s2) 0 0
intslessThan10 :: Gen Int
intslessThan10 = suchThat arbitrary (<10)
lessThan10 :: Int -> Bool
lessThan10 = (<10)
triviallyTrue :: Property -- Property is Gen Prop
-- Prop is not used anywhere else
-- but is an instance of Testable
triviallyTrue = forAll intslessThan10 lessThan10
-- forAll (arbitrary `suchThat` (<10)) lessThan10
pointsToBest :: GridAndCoords -> Bool
pointsToBest (GridAndCoords g 0 0 _s1 _s2) = case lookUp g 0 0 of
Start _ -> True
_ -> False
pointsToBest (GridAndCoords g 0 y _s1 _s2) = case lookUp g 0 y of
Up _ -> True
_ -> False
pointsToBest (GridAndCoords g x 0 _s1 _s2) = case lookUp g x 0 of
Across _ -> True
_ -> False
pointsToBest (GridAndCoords g x y s1 s2) = case lookUp g x y of
Diag _ -> and [diagScore >= upScore,
diagScore >= acrossScore]
Across _ -> and [acrossScore >= upScore,
acrossScore >= diagScore]
Up _ -> and [upScore >= diagScore,
upScore >= acrossScore]
Start _ -> False
where
diagScore = (scoreOf $ lookUp g (x - 1) (y - 1)) + scoreAt s1 x s2 y
upScore = scoreOf (lookUp g x (y - 1)) - gapPenalty
acrossScore = scoreOf (lookUp g (x - 1) y) - gapPenalty
scoreThisCellIsRight :: GridAndCoords -> Bool
scoreThisCellIsRight (GridAndCoords g x y s1 s2) = case lookUp g x y of
Diag score -> (scoreOf $ lookUp g (x - 1) (y - 1)) + scoreAt s1 x s2 y == score
Up score -> (scoreOf $ lookUp g x (y - 1)) - gapPenalty == score
Across score -> (scoreOf $ lookUp g (x - 1) y) - gapPenalty == score
Start score -> (scoreAt s1 x s2 y) == score
inRangeOfGrid :: Grid -> ((Int, Int) -> Int) -> Int -> Bool
inRangeOfGrid g sel x = and [x >= 0,
x <= (sel $ gridBounds g)]
inXRangeOfGrid :: Grid -> Int -> Bool
inXRangeOfGrid g = inRangeOfGrid g fst
inYRangeOfGrid :: Grid -> Int -> Bool
inYRangeOfGrid g = inRangeOfGrid g snd
-- this is an ugly, heavyweight thing
-- including a grid, arbitrary coordinates, and
-- the arbitrary sequences that produce the grid.
-- Having the sequences handy is nice for visualizing test failures.
data GridAndCoords = GridAndCoords Grid Int Int Seq Seq
deriving Show
instance Arbitrary GridAndCoords where
arbitrary = do
-- g <- arbitrary
s1 <- arbitrary
s2 <- arbitrary
g <- return $ grid s1 s2
x <- choose (0, xMax g)
y <- choose (0, yMax g)
return $ GridAndCoords g x y s1 s2
-- arbitrary = arbitrary >>= \s1 ->
-- arbitrary >>= \s2 ->
-- liftM2 grid s1 s2 >>= \g ->
-- liftM3 GridAndCoords
trivial2 :: GridAndCoords -> Bool
trivial2 (GridAndCoords _g _x _y _s1 _s2) = True
genWorks :: GridAndCoords -> Bool
genWorks (GridAndCoords g x y _s1 _s2) = and [inXRangeOfGrid g x,
inYRangeOfGrid g y]
genWorksDetail :: Seq -> Seq -> Bool
genWorksDetail s1 s2 = and [inXRangeOfGrid g x,
inYRangeOfGrid g y]
where
g = grid s1 s2
x = 0 -- hmm
y = 0
derivingOrd :: Cell -> Cell -> Bool
derivingOrd c1 c2 = (c1 > c2) == (scoreOf c1 > scoreOf c2)
main :: IO ()
main = do
print "I do not want to gen empty Seqs"
quickCheck noEmptySeqs
print "simple stuff"
quickCheck equalsItself
quickCheck equalThemselves
quickCheck startIsStart
quickCheck triviallyTrue
print "arbitrary :: Gen GridAndCoords is sane"
quickCheck genWorks
print "points to Best"
quickCheck pointsToBest
print "instance Ord Cell is correct"
quickCheck derivingOrd
print "score is right in this cell"
quickCheck scoreThisCellIsRight
| gregoryck/biocalc | testbiocalc.hs | mit | 6,341 | 0 | 14 | 2,280 | 1,705 | 860 | 845 | 128 | 7 |
module Week4.Week4 where
import Data.Char
import Data.List
import Data.Maybe
import Data.String.Utils
import Week4.BST
ex1 :: a -> b -> b
ex1 _ b = b
--must return b - only option
ex2 :: a -> a -> a
ex2 y z = z
-- could return either a or some operation between them
ex3 :: Int -> a -> a
ex3 a b = b
-- to be total must return a, you may not have a type that interacts with Int
ex4 :: Bool -> a -> a -> a
ex4 tf v1 v2
| tf == True = v1
| tf == False = v2
-- could do a number of things...
ex5 :: Bool -> Bool
ex5 trueOrFalse
| (trueOrFalse == True) = False
| (trueOrFalse == False) = True
ex6 :: (a -> a) -> a
ex6 x = error "Don't see how this can be written"
ex7 :: (a -> a) -> a -> a
ex7 f x = f x
-- simply applies f to x
ex8 :: [a] -> [a]
ex8 a = a
ex9 :: (a -> b) -> [a] -> [b]
ex9 = map
-- getting something from a Maybe needs to account for the Nothing condition. To do this you need to supply a default value of some specific type (check out fromMaybe in Data.Maybe)
ex10 :: Maybe a -> a
ex10 x = error "No can do"
ex11 :: a -> Maybe a
ex11 c = Just c
ex12 :: Maybe a -> Maybe a
ex12 z = z
insertBST :: (a -> a -> Ordering) -> a -> BST a -> BST a
insertBST _ x Leaf = Node Leaf x Leaf
insertBST f x (Node a y b)
| f x y == EQ || f x y == LT = Node (insertBST f x a) y b
| f x y == GT = Node a y (insertBST f x b)
insertBST _ _ _ = error "impossible"
allCaps :: [String] -> Bool
allCaps s = foldl (&&) True $ map fA s
where fA s
| s == "" = False
| otherwise = isUpper (s!!0)
dropTrailingWhitespace :: String -> String
dropTrailingWhitespace s = rstrip s
firstLetters :: [String] -> [Char]
firstLetters ss = map fromJust $ filter isJust $ map safeHead ss
safeHead :: [a] -> Maybe a
safeHead [] = Nothing
safeHead (x:_) = Just x
asList :: [String] -> String
asList ss = "[" ++ intercalate "," ss ++ "]"
| rglew/cis194 | src/Week4/Week4.hs | mit | 1,918 | 0 | 11 | 527 | 771 | 392 | 379 | 54 | 1 |
module Jenkins.Client.RunBuild where
import qualified Data.Text as T
import qualified Data.ByteString as BS
import Jenkins.Client.Types
import qualified Jenkins.Endpoints as JEP
import Options (BuildParams(..))
runBuild :: T.Text
-> BuildParams
-> Client ()
runBuild name params =
JEP.runBuild name params >>= getResponseBody >> return ()
| afiore/jenkins-tty.hs | src/Jenkins/Client/RunBuild.hs | mit | 362 | 0 | 8 | 66 | 99 | 59 | 40 | 11 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module Jakway.Blackjack.IO.DatabaseReads
(readPlayers,
readHandStatement,
readHand,
readPlayerHands,
getNumPlayers,
readMatchStatement,
readMatch,
getNumMatches
)
where
import Prelude hiding (lookup)
import Jakway.Blackjack.CardOps
import Jakway.Blackjack.Match
import Jakway.Blackjack.IO.DatabaseCommon
import Jakway.Blackjack.IO.TableNames
import Jakway.Blackjack.Result
import Jakway.Blackjack.Util (innerMapTuple4, ssub)
import Database.HDBC
import qualified Data.Map.Strict as HashMap
import Data.Maybe (fromJust, isNothing)
import Control.Monad (join, liftM)
-- |returns the number of matches from the database
-- it's an error to call getNumMatches on a table that hasn't been created yet
getNumMatches :: (IConnection a) => a -> TableNames -> IO (Integer)
getNumMatches conn tableNames = quickQuery' conn queryStr [] >>= (return . join) >>=
--if there aren't any rows then the number of matches is 0
(\res -> case res of [] -> return 0
[x] -> return . fromSql $ x
_ -> error $ "Error in getNumMatches: unrecognized value returned from database for query " ++ queryStr)
--select only unique rows because the actual number of
--matches is the whichGame column
--any match with more than 2 players will take up more
--than 1 row in the matches table
--see http://stackoverflow.com/questions/11250253/postgresql-countdistinct-very-slow
where queryStr = ssub "SELECT COUNT(*) FROM (SELECT DISTINCT whichGame FROM ?) AS temp" [(getMatchTableName tableNames)]
readPlayers :: (IConnection a) => a -> TableNames -> IO ([Int])
readPlayers conn tableNames = do
values <- quickQuery' conn ("SELECT whichPlayer FROM " ++ playerTable) []
-- |quickQuery' returns [[SqlValue]]. Collapse the list.
(return . (map fromSql)) (join values)
where playerTable = getPlayerTableName tableNames
readHandStatement :: (IConnection a) => a -> TableNames -> IO (Statement)
readHandStatement conn tableNames = prepare conn $ "SELECT thisCard FROM " ++ handTable ++ " WHERE whichHand=?"
where handTable = getHandTableName tableNames
readHand :: Statement -> Integer -> IO (Maybe Hand)
readHand statement whichHand = do
_ <- execute statement [toSql whichHand]
handRows <- fetchAllRows' statement
let cardIds = (map fromSql (join handRows)) :: [Int]
return $ Just $ foldr (\thisId hand -> let card = getCard thisId in
case hand of [] -> return card
_ -> card : hand) [] cardIds
where getCard thisId = (fromJust $ HashMap.lookup thisId idCardMap)
readPlayerHandIds :: (IConnection a) => a -> TableNames -> Int -> IO (Maybe [Int])
readPlayerHandIds conn tableNames whichPlayer = do
--DISTINCT removes duplicates
--see https://www.sqlite.org/lang_select.html and http://www.postgresql.org/docs/9.0/static/sql-select.html
values <- (liftM join) $ (quickQuery' conn ("SELECT DISTINCT whichHand FROM " ++ playerTable) [])
case values of [] -> return Nothing
_ -> return . return . (map fromSql) $ values
where playerTable = getPlayerTableName tableNames
readPlayerHands :: (IConnection a) => Statement -> a -> Int -> IO (Maybe [Hand])
readPlayerHands statement whichPlayer = undefined
--readPlayerHands statement whichPlayer = do
-- mayHandIds <- readPlayerHandIds conn whichPlayer
-- case mayHandsIds of Nothing -> return Nothing
-- Just (ids) ->
getNumPlayers :: (IConnection a) => a -> TableNames -> IO (Int)
getNumPlayers conn tableNames = do
query <- prepare conn ("SELECT * FROM " ++ playerTable)
_ <- execute query []
rows <- fetchAllRows' query
return . length $ rows
where playerTable = getPlayerTableName tableNames
readMatchStatement :: (IConnection a) => a -> TableNames -> IO (Statement)
readMatchStatement conn tableNames = prepare conn $ "SELECT dealersHand, whichPlayer, thisPlayersHand, playerResult FROM " ++ matchesTable ++ " WHERE whichGame=?"
where matchesTable = getMatchTableName tableNames
readMatch :: Statement -> Statement -> Integer -> IO (Maybe Match)
readMatch rMatchStatement rHandStatement whichGame = do
_ <- execute rMatchStatement [iToSql (fromInteger whichGame)]
matchRows <- fetchAllRows' rMatchStatement
case matchRows of [[]] -> return Nothing
_ -> extractMatchData rHandStatement matchRows
rowToTuple :: [SqlValue] -> Maybe (Integer, Int, Integer, Int)
rowToTuple thisRow
| (length thisRow) < 4 = Nothing
| otherwise = return . (innerMapTuple2and4 fromIntegral) . (innerMapTuple4 fromSql) $ (thisRow !! 0, thisRow !! 1, thisRow !! 2, thisRow !! 3)
where innerMapTuple2and4 f (a,b,c,d) = (a, f b, c, f d)
extractMatchData :: Statement -> [[SqlValue]] -> IO (Maybe Match)
extractMatchData rHandStatement rows = do
--TODO: there must be a more elegant way to check if there's
--a Nothing in an array and bind it
let mayCheckedRows = map rowToTuple rows
if mayCheckedRows == [] || (elem Nothing mayCheckedRows) then return Nothing
else do
let checkedRows = (convResult (map fromJust mayCheckedRows)) :: [(Integer, Int, Integer, Result)]
--get the dealer's hand ID
--it's the same dealer's hand for every game in this match so just get the ID from the first row
let dHandId = fstIn4 . head $ checkedRows --we already checked that the array isn't empty, so it must have at least 1 array with 1 item
dHand <- readHand rHandStatement dHandId
--make sure the dealers hand exists
if isNothing dHand then return Nothing else do
(pIds, pHands, pResults) <- liftM unzip3 . sequence $ map (\(_, playerId, playersHandId, playersResult) -> readHand rHandStatement playersHandId >>=
(\playersReadHand -> return (playerId, playersReadHand, playersResult))) checkedRows
-- **********************************
--TODO: rewrite this using bind?
if elem Nothing pHands then return Nothing
--the fromJust is OK
--because we're checking
--that it isn't Nothing
else return . return $ Match (fromJust dHand) pIds (map fromJust pHands) pResults
where fstIn4 (a,_,_,_) = a
-- |convert the player result from an integer (its database
-- representation) back to an enum
convResult = map (\(a,b,c,d) -> (a,b,c, toEnum d))
| tjakway/blackjack-simulator | src/Jakway/Blackjack/IO/DatabaseReads.hs | mit | 6,969 | 0 | 22 | 1,883 | 1,615 | 857 | 758 | 88 | 4 |
{-# language TypeApplications #-}
module Sandbox.Data.Vector.Unboxed.Extra where
import qualified Data.Vector.Unboxed as V
import qualified Data.Map.Strict as M
quicksort :: (V.Unbox a, Ord a) => V.Vector a -> V.Vector a
quicksort xs
| V.null xs = xs
| otherwise =
let
(hds, tl) = V.splitAt 1 xs
hd = V.head hds
smallerSorted = quicksort . V.filter (<= hd) $ tl
biggerSorted = quicksort . V.filter (> hd) $ tl
in smallerSorted V.++ (V.singleton hd) V.++ biggerSorted
-- | The average number.
--
-- The sum of the values divided by the number of values.
mean :: (V.Unbox a, Real a) => V.Vector a -> Double
mean xs = (realToFrac (V.sum xs) / (fromIntegral (V.length xs)))
-- | Weighted mean
--
-- The weighted sum of the values divided by the number of values.
meanWeighted :: (Fractional b, V.Unbox a, Real a) => V.Vector a -> V.Vector a -> b
meanWeighted xs ws = realToFrac weightedSum / realToFrac weight
where
weightedSum = V.foldl' (\m (x, w) -> m + x*w) 0 (V.zip xs ws)
weight = V.sum ws
-- | The middle number.
--
-- Found by ordering all data points and picking out the one in the middle, or
-- if there are two middle numbers, taking the mean of those two numbers.
median :: (Fractional b, V.Unbox a, Real a) => V.Vector a -> b
median = median' . quicksort
-- | The middle number.
--
-- !!! Prerequisite unchecked: Vector is sorted.
--
-- Found by ordering all data points and picking out the one in the middle, or
-- if there are two middle numbers, taking the mean of those two numbers.
median' :: (Fractional b, V.Unbox a, Real a) => V.Vector a -> b
median' xs
| even (V.length xs) = realToFrac ((xs V.! (middle-1)) + xs V.! middle) / 2
| otherwise = realToFrac (xs V.! middle)
where
middle = truncate @Double (fromIntegral (V.length xs) / 2)
-- | The value that appears most often.
--
-- If there are several values that appear the same number of times, take the
-- smallest of them.
mode :: (V.Unbox a, Ord a, Num a) => V.Vector a -> a
mode xs = fst $ M.foldlWithKey' makeMode (hd, 0) occurrences
where
hd = V.head xs
occurrences = V.foldl (\m a -> M.insertWith (+) a (1::Int) m) M.empty xs
makeMode (ma, mi) a i
| i > mi = (a, i)
| i == mi = (min a ma, i)
| otherwise = (ma, mi)
-- | Standard deviation
std :: (Fractional b, V.Unbox a, Real a) => V.Vector a -> b
std xs = realToFrac (sqrt (squaredDist / (fromIntegral size)))
where
size = V.length xs
mean' = mean xs
squaredDist = V.foldl' (\a x -> a + (((realToFrac x) - mean') ^^ (2::Int))) 0 xs
| 4e6/sandbox | haskell/Sandbox/Data/Vector/Unboxed/Extra.hs | mit | 2,589 | 0 | 16 | 595 | 941 | 499 | 442 | 40 | 1 |
import MyLib
answer n = sum $ digits n
where
--n = pow 100
| yuto-matsum/contest-util-hs | src/Euler/020.hs | mit | 67 | 0 | 6 | 22 | 21 | 11 | 10 | 3 | 1 |
{-# LANGUAGE TemplateHaskell #-}
import Language.Haskell.TH.Syntax
main = $(return $(return $ InfixE
(Just $
(ConE $ mkName "VarE") `AppE`
((VarE $ mkName "mkName") `AppE` (LitE $ StringL "putStrLn")))
(ConE $ mkName "AppE")
(Just $
(ConE $ mkName "LitE") `AppE`
((ConE $ mkName "StringL") `AppE` (LitE $ StringL "Yo dawg"))))) | stefan-j/ProjectEuler | test.hs | mit | 384 | 0 | 18 | 104 | 150 | 82 | 68 | 10 | 1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Hakyll.Web.Agda
( agdaCompilerWith
, agdaVerbosityQuiet
, CommandLineOptions(..)
, PragmaOptions(..)
, defaultAgdaOptions
, defaultAgdaPragmaOptions
, mkFixStdlibLink
, mkFixLocalLink
, agdaModule
, agdaModuleFromPath
) where
import qualified Agda.Main as Agda
import Agda.Interaction.Options
import qualified Agda.Interaction.Highlighting.HTML as Agda (generateHTML)
import qualified Agda.Utils.Trie as Trie (singleton)
import Control.Exception (catchJust)
import Control.Monad (forM, void)
import qualified Data.ByteString as B
import Data.Frontmatter (parseYamlFrontmatterEither)
import qualified Data.List.Extra as L
import Data.Maybe (fromMaybe)
import qualified Data.Map as M
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Data.Text.ICU as ICU
import qualified Data.Text.ICU.Replace as ICU
import Data.Yaml (FromJSON(..), ToJSON(..), (.:), (.=))
import qualified Data.Yaml as Y
import Hakyll
import Text.Printf (printf)
import Text.Regex.TDFA ((=~))
import System.Directory (createDirectoryIfMissing)
import System.Exit (ExitCode(..), exitFailure)
import System.FilePath.Find ((~~?), (==?), always, extension, fileName, find)
import System.FilePath ((</>), (<.>), dropExtension, dropExtensions, makeRelative, pathSeparator)
-- |Default Agda command-line options. Rename of `defaultOptions`.
defaultAgdaOptions :: CommandLineOptions
defaultAgdaOptions = defaultOptions
-- |Default Agda pragma options. Rename of `defaultPragmaOptions`.
defaultAgdaPragmaOptions :: PragmaOptions
defaultAgdaPragmaOptions = defaultPragmaOptions
-- |Compile literate Agda to HTML
agdaCompilerWith :: CommandLineOptions -> Compiler (Item String)
agdaCompilerWith agdaOptions = cached "Hakyll.Web.Agda.agdaCompilerWith" $ do
item <- getResourceBody
let agdaPath = toFilePath (itemIdentifier item)
let moduleName = agdaModule (itemBody item)
TmpFile tmpPath <- newTmpFile ".lock"
let tmpDir = init (dropExtension tmpPath)
let mdPath = tmpDir </> moduleName <.> "md"
md <- unsafeCompiler $ do
createDirectoryIfMissing True tmpDir
-- Add input file and HTML options
let opts = agdaOptions
{ optInputFile = Just agdaPath
, optHTMLDir = tmpDir
, optGenerateHTML = True
, optHTMLHighlight = HighlightCode
}
-- Run Agda
let tcm = void $
Agda.runAgdaWithOptions [] Agda.generateHTML (Agda.defaultInteraction opts) "agda" opts
catchJust
(\case {e@ExitSuccess -> Just e; _ -> Nothing})
(Agda.runTCMPrettyErrors tcm)
(\_ -> return ())
-- Read output Markdown file
md <- readFile mdPath
removeDirectory tmpDir
return md
return $ itemSetBody md item
-- |Get Agda module name from code
agdaModule :: String -> String
agdaModule code = case regexResult of
(_, _, _, [moduleName]) -> moduleName
_ -> "Main"
where
moduleRegex = "module ([^ ]*) where" :: String
regexResult = code =~ moduleRegex :: (String, String, String, [String])
-- |Get Agda module from a path and a root directory
agdaModuleFromPath :: FilePath -> FilePath -> String
agdaModuleFromPath rootDir = map slashToDot . makeRelative rootDir . dropExtensions
where
slashToDot c = if c == '/' then '.' else c
-- |Suppress non-error output
agdaVerbosityQuiet :: Verbosity
agdaVerbosityQuiet = Trie.singleton [] 0
--------------------------------------------------------------------------------
-- Fix references to Agda standard library
--------------------------------------------------------------------------------
-- |Default URL for the Agda standard library.
defaultStdlibUrl :: String
defaultStdlibUrl = "https://agda.github.io/agda-stdlib"
readStdlibVersion :: FilePath -> IO String
readStdlibVersion stdlibPath = do
let changelogPath = stdlibPath </> "CHANGELOG.md"
changelog <- T.readFile changelogPath
let versionLine = head (T.lines changelog)
case T.stripPrefix "Version " versionLine of
Just versionStr -> return . T.unpack $ "v" <> T.strip versionStr
Nothing -> error $ printf "Could not read version from '%s'" changelogPath
-- |Fix references to the Agda standard library.
mkFixStdlibLink :: FilePath -> IO (String -> String)
mkFixStdlibLink stdlibPath = do
stdlibVersion <- readStdlibVersion stdlibPath
let stdlibUrl = defaultStdlibUrl </> stdlibVersion
re <- stdlibRegex stdlibPath
let replacement = ICU.rstring stdlibUrl <> "/$1.html$2"
return $ T.unpack . ICU.replaceAll re replacement . T.pack
-- |An ICU regular expression which matches links to the Agda standard library.
stdlibRegex :: FilePath -> IO ICU.Regex
stdlibRegex stdlibPath = do
modNames <- map T.pack <$> stdlibModules stdlibPath
let builtin = "Agda\\.[A-Za-z\\.]+"
let modPatns = T.replace "." "\\." <$> modNames
let modPatn = T.concat . L.intersperse "|" $ builtin : modPatns
let hrefPatn = "(" `T.append` modPatn `T.append` ")\\.html(#[^\"^']+)?"
return (ICU.regex [] hrefPatn)
-- |Gather all standard library modules given a path.
stdlibModules :: FilePath -> IO [String]
stdlibModules stdlibPath = do
let stdlibPathSrc = stdlibPath </> "src"
agdaFiles <- find always (extension ==? ".agda") stdlibPathSrc
let sepToDot c = if c == pathSeparator then '.' else c
let fileToMod = map sepToDot . dropExtension . makeRelative stdlibPathSrc
return . map fileToMod $ agdaFiles
--------------------------------------------------------------------------------
-- Fix references to local Agda modules
--------------------------------------------------------------------------------
newtype Frontmatter = Frontmatter
{ frontmatterPermalink :: FilePath
}
instance FromJSON Frontmatter where
parseJSON = Y.withObject "Frontmatter" $ \v -> Frontmatter
<$> v .: "permalink"
instance ToJSON Frontmatter where
toJSON Frontmatter{..} =
Y.object [ "permalink" .= frontmatterPermalink
]
-- |Create a function to fix URL references output by Agda HTML highlighter.
mkFixLocalLink :: FilePath -> IO (String -> String)
mkFixLocalLink rootDir = do
-- Get all Agda files in `rootDir`.
agdaFiles <- find always (fileName ~~? "*.lagda.md") rootDir
-- Get all permalinks and Agda module names from these files.
localLinkList <- forM agdaFiles $ \agdaFile -> do
frontmatterOrError <- parseYamlFrontmatterEither <$> B.readFile agdaFile
case frontmatterOrError of
Left errmsg -> do
printf "Parse error in '%s': %s\n" agdaFile errmsg
exitFailure
Right Frontmatter{..} ->
return (agdaModuleFromPath rootDir agdaFile, frontmatterPermalink)
-- Construct a Map from the local link list.
let localLinkMap = M.fromList localLinkList
-- Construct a function which looks up the URL in the map.
return $ \url -> fromMaybe url $ do
(oldPath, anchor) <- L.stripInfix ".html" url
newPath <- M.lookup oldPath localLinkMap
return $ newPath <> anchor
| wenkokke/sf | hs/Hakyll/Web/Agda.hs | mit | 7,187 | 0 | 19 | 1,375 | 1,659 | 893 | 766 | 134 | 2 |
module EntityManager where
import Control.Monad (liftM, forM, guard, when)
import Control.Monad.Trans.Maybe
import Control.Monad.Trans.Class
import Control.Lens
import Data.Monoid (mconcat)
import qualified Data.IntMap.Strict as I
import Data.IntMap.Strict (IntMap)
import qualified Data.Map.Strict as M
import Data.Map.Strict (Map)
import qualified Data.Set as S
import Types
-- * Entities
-- ** Addition and Removal
-- | These will probably be internal and will be called indirectly.
-- | Only empty entities can be created. Components can then added to the created entity
createEntity :: Cesh EntityId
createEntity = do
-- TODO: change to re-usable ids
nextId <- use entityCounter
entityCounter += 1
let newId = EntityId nextId
entitySet %= S.insert newId
return newId
-- | Remove entity and associated components. Returns False if entity was not found.
removeEntity :: EntityId -> Cesh Bool
removeEntity e = do
entityExists <- use $ entitySet . contains e
--let entityExists = e `S.member` entities
when entityExists $
removeComponents e
-- TODO: add remove for re-usable ids
return entityExists
where
-- remove any components associated to given entity O(t*log(n(t))) (hopefully)
removeComponents :: EntityId -> Cesh ()
removeComponents (EntityId eid) =
compsByType . traverse %= (at eid .~ Nothing)
removeComponent :: ComponentLocation -> Cesh ()
removeComponent (ComponentLocation tag (EntityId eid)) =
-- update 2D IntMap: update at tag, traverse Just value at eid, setting it to Nothing which removes it
compsByType . at tag %= (_Just . at eid .~ Nothing)
-- ** Checks and getters
entityMember :: EntityId -> IntMap a -> Bool
entityMember (EntityId eid) = I.member eid
entityLookup :: EntityId -> IntMap a -> Maybe a
entityLookup (EntityId eid) = I.lookup eid
{-
-- | This searches entity's components from all components
getEntityComponents :: EntityId -> Cesh [ComponentLocation]
getEntityComponents e = do
allByType <- use compsByType
where isMember = entityMember e
-}
-- | Get parent(s), ancestors also?
getEntityParents :: EntityId -> Cesh [EntityId]
getEntityParents eid = do
mParents <- use $ entityParents . at eid :: Cesh (Maybe [EntityId])
case mParents of
Just parents -> do
ancestors <- concatMapM getEntityParents parents
return $ parents ++ ancestors
Nothing -> return []
where concatMapM f xs = liftM concat (mapM f xs)
-- | Returns list of Bools corresponding to given list of 'TagId's
entityHasTags :: EntityId -> [TagId] -> Cesh (Map TagId Bool)
entityHasTags e tags = do
singletons <- forM tags $ \tag -> do
bool <- entityHasTag e tag
return $ M.singleton tag bool
return $ mconcat singletons
entityHasTag :: EntityId -> TagId -> Cesh Bool
entityHasTag e tagId = do
maybeTagComps <- use $ compsByType . at tagId
return $ case maybeTagComps of
Just tagComps -> e `entityMember` tagComps
Nothing -> False
-- | Tests whether entity contains all components given by '[TagId]',
-- some but not all can be from parents or ancestors, if so, results to asked components.
resolveEntity :: EntityId -> [TagId] -> MaybeT Cesh [ComponentLocation]
resolveEntity e tags = do
entityOwns <- lift entityMatch
parentsOwns <- lift parentsMatch
guard $ entityCheckTags (M.elems entityOwns) (M.elems parentsOwns)
-- union discards duplicates in the second parameter
let allMatches = entityOwns `M.union` parentsOwns
-- now should hold:
-- guard . and $ elems allMatches
return . map cLocation $ M.keys allMatches
where
cLocation tag = ComponentLocation tag e
getParents = getEntityParents e
entityMatch :: Cesh (Map TagId Bool)
entityMatch = entityHasTags e tags
parentsMatch :: Cesh (Map TagId Bool)
parentsMatch = do
parents <- getParents
results <- forM parents $ \parent ->
entityHasTags parent tags
return $ mconcat results
or2 = zipWith (||)
entityCheckTags :: [Bool] -> [Bool] -> Bool
entityCheckTags entityOwns parentsOwns =
and entityOwns ||
(or entityOwns &&
and (entityOwns `or2` parentsOwns)
)
| TK009/CESH | src/EntityManager.hs | mit | 4,344 | 0 | 14 | 1,031 | 1,017 | 517 | 500 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module TemplateHaskell where
import Data.List (intercalate)
import Language.Haskell.TH
emptyShow :: Name -> Q [Dec]
emptyShow name = [d|instance Show $(conT name) where show _ = "empty"|]
listFields :: Name -> Q [Dec]
listFields name = do
TyConI (DataD _ _ _ [RecC _ fields] _) <- reify name
let names = map (\(name, _, _) -> name) fields
let showField :: Name -> Q Exp
showField name = let s = nameBase name in [|\x -> s ++ " = " ++ show ($(varE name) x)|]
let showFields :: Q Exp
showFields = listE $ map showField names
[d|instance Show $(conT name) where show x = intercalate ", " (map ($ x) $showFields)|]
| limdauto/learning-haskell | snippets/TemplateHaskell.hs | mit | 684 | 0 | 14 | 157 | 211 | 112 | 99 | 15 | 1 |
module StmContainers.Map
(
Map,
new,
newIO,
null,
size,
focus,
lookup,
insert,
delete,
reset,
unfoldlM,
listT,
)
where
import StmContainers.Prelude hiding (insert, delete, lookup, alter, foldM, toList, empty, null)
import qualified StmHamt.Hamt as A
import qualified Focus as B
import qualified DeferredFolds.UnfoldlM as C
-- |
-- Hash-table, based on STM-specialized Hash Array Mapped Trie.
newtype Map key value =
Map (A.Hamt (Product2 key value))
-- |
-- Construct a new map.
{-# INLINABLE new #-}
new :: STM (Map key value)
new =
Map <$> A.new
-- |
-- Construct a new map in IO.
--
-- This is useful for creating it on a top-level using 'unsafePerformIO',
-- because using 'atomically' inside 'unsafePerformIO' isn't possible.
{-# INLINABLE newIO #-}
newIO :: IO (Map key value)
newIO =
Map <$> A.newIO
-- |
-- Check, whether the map is empty.
{-# INLINABLE null #-}
null :: Map key value -> STM Bool
null (Map hamt) =
A.null hamt
-- |
-- Get the number of elements.
{-# INLINABLE size #-}
size :: Map key value -> STM Int
size =
C.foldlM' (\ x _ -> return (succ x)) 0 . unfoldlM
-- |
-- Focus on a value by the key.
--
-- This function allows to perform composite operations in a single access
-- to the map's row.
-- E.g., you can look up a value and delete it at the same time,
-- or update it and return the new value.
{-# INLINE focus #-}
focus :: (Eq key, Hashable key) => B.Focus value STM result -> key -> Map key value -> STM result
focus valueFocus key (Map hamt) =
A.focus rowFocus (\(Product2 key _) -> key) key hamt
where
rowFocus =
B.mappingInput (\value -> Product2 key value) (\(Product2 _ value) -> value) valueFocus
-- |
-- Look up an item.
{-# INLINABLE lookup #-}
lookup :: (Eq key, Hashable key) => key -> Map key value -> STM (Maybe value)
lookup key =
focus B.lookup key
-- |
-- Insert a value at a key.
{-# INLINE insert #-}
insert :: (Eq key, Hashable key) => value -> key -> Map key value -> STM ()
insert value key (Map hamt) =
void (A.insert (\(Product2 key _) -> key) (Product2 key value) hamt)
-- |
-- Delete an item by a key.
{-# INLINABLE delete #-}
delete :: (Eq key, Hashable key) => key -> Map key value -> STM ()
delete key =
focus B.delete key
-- |
-- Delete all the associations.
{-# INLINABLE reset #-}
reset :: Map key value -> STM ()
reset (Map hamt) =
A.reset hamt
-- |
-- Stream the associations actively.
--
-- Amongst other features this function provides an interface to folding.
{-# INLINABLE unfoldlM #-}
unfoldlM :: Map key value -> UnfoldlM STM (key, value)
unfoldlM (Map hamt) =
fmap (\ (Product2 k v) -> (k, v)) (A.unfoldlM hamt)
-- |
-- Stream the associations passively.
{-# INLINE listT #-}
listT :: Map key value -> ListT STM (key, value)
listT (Map hamt) =
fmap (\ (Product2 k v) -> (k, v)) (A.listT hamt)
| nikita-volkov/stm-containers | library/StmContainers/Map.hs | mit | 2,845 | 0 | 11 | 596 | 846 | 470 | 376 | 58 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
module BCMtools.Convert
( convert
, convertOptions
) where
import Data.Monoid ((<>))
import Control.Monad.Trans.Resource (runResourceT)
import qualified Data.ByteString.Char8 as B
import Data.ByteString.Lex.Fractional (readSigned, readExponential)
import Data.Conduit
import qualified Data.Conduit.List as CL
import qualified Data.Conduit.Binary as Bin
import qualified Data.HashMap.Strict as M
import Data.Maybe (fromJust)
import Data.List.Split (splitOn)
import Options.Applicative
import System.IO
import BCMtools.Types
import BCM (ContactMap, createContactMap, saveContactMap, closeContactMap)
import BCM.IOMatrix (DMatrix, MCSR, DSMatrix, MSMatrix, MMatrix)
convertOptions :: Parser Command
convertOptions = fmap Convert $ ConvertOptions
<$> strOption
( long "genome"
<> short 'g'
<> metavar "ASSEMBLY"
<> help "e.g., hg19, or a file" )
<*> fmap (splitOn ",") (strOption
( long "rownames"
<> short 'r'
<> metavar "ROW LABELS" ))
<*> fmap (splitOn ",") (strOption
( long "colnames"
<> short 'c'
<> metavar "COLOUMN LABELS" ))
<*> fmap readInt' (strOption
( long "resolution"
<> short 's'
<> metavar "RESOLUTION" ))
<*> switch
( long "sparse"
<> help "whether to use sparse encoding" )
<*> switch
( long "symmetric"
<> help "whether to use symmetric encoding" )
where
readInt' x = let (Just (i, left)) = B.readInt $ B.pack x
in case () of
_ | B.null left -> i
| left == "k" || left == "K" -> i * 1000
| otherwise -> i
convert :: FilePath -> FilePath -> Bool -> ConvertOptions -> IO ()
convert input output onDisk opt = do
genome <- case _genome opt of
"hg19" -> return hg19
fl -> readGenome fl
inputLength <- runResourceT $ Bin.sourceFile input $=
Bin.lines $$ CL.fold (\i _ -> i+1) 0
line1 <- B.split '\t' <$> withFile input ReadMode B.hGetLine
let fn = case line1 of
[_,_,_] -> field2
[_,_,_,_,_] -> field5
_ -> error "Please check your input format"
runner f = runResourceT $
Bin.sourceFile input $=
Bin.lines $= f $$
createContactMap output rows cols (_resolution opt) (Just inputLength)
rows = getChrSize genome $ _rownames opt
cols = getChrSize genome $ _colnames opt
case () of
_ | _sparse opt && _symmetric opt -> do
if onDisk
then createMapWith (runner fn :: IO (ContactMap MCSR))
else createMapWith (runner fn :: IO (ContactMap MCSR))
| _sparse opt ->
if onDisk
then createMapWith (runner fn :: IO (ContactMap MCSR))
else createMapWith (runner fn :: IO (ContactMap MCSR))
| _symmetric opt ->
if onDisk
then createMapWith (runner fn :: IO (ContactMap DSMatrix))
else createMapWith (runner fn :: IO (ContactMap MSMatrix))
| otherwise -> do
if onDisk
then createMapWith (runner fn :: IO (ContactMap DMatrix))
else createMapWith (runner fn :: IO (ContactMap MMatrix))
where
createMapWith run = do cm <- run
saveContactMap cm
closeContactMap cm
readGenome x = do
c <- B.readFile x
return $ M.fromList $ map ((\[a,b] -> (B.unpack a, readInt b)) . B.words) $ B.lines c
getChrSize g = map lookup'
where
lookup' x = (B.pack x, M.lookupDefault errMsg x g)
where
errMsg = error $ "Unknown chromosome: " ++ x
field2 = do
_ <- await
CL.map f
where
f l = let [x1,x2,v] = B.split '\t' l
in (B.pack chr1, readInt x1, B.pack chr2, readInt x2, readDouble v)
[chr1] = _rownames opt
[chr2] = _colnames opt
field5 = CL.map f
where
f l = let [x1,x2,x3,x4,x5] = B.split '\t' l
in (x1, readInt x2, x3, readInt x4, readDouble x5)
{-# INLINE convert #-}
readInt :: B.ByteString -> Int
readInt = fst . fromJust . B.readInt
{-# INLINE readInt #-}
readDouble :: B.ByteString -> Double
readDouble = fst . fromJust . readSigned readExponential
{-# INLINE readDouble #-}
hg19 :: M.HashMap String Int
hg19 = M.fromList [ ("chr1", 249250621)
, ("chr2", 243199373)
, ("chr3", 198022430)
, ("chr4", 191154276)
, ("chr5", 180915260)
, ("chr6", 171115067)
, ("chr7", 159138663)
, ("chrX", 155270560)
, ("chr8", 146364022)
, ("chr9", 141213431)
, ("chr10", 135534747)
, ("chr11", 135006516)
, ("chr12", 133851895)
, ("chr13", 115169878)
, ("chr14", 107349540)
, ("chr15", 102531392)
, ("chr16", 90354753)
, ("chr17", 81195210)
, ("chr18", 78077248)
, ("chr20", 63025520)
, ("chrY", 59373566)
, ("chr19", 59128983)
, ("chr22", 51304566)
, ("chr21", 48129895)
]
| kaizhang/BCMtools | app/BCMtools/Convert.hs | mit | 5,768 | 0 | 18 | 2,224 | 1,695 | 894 | 801 | 136 | 8 |
module Main where
import Faust.Faust
main :: IO ()
main = undefined
| Muzietto/transformerz | haskell/faust-stack/app/Main.hs | mit | 70 | 0 | 6 | 14 | 24 | 14 | 10 | 4 | 1 |
{-# LANGUAGE DeriveFunctor #-}
module Test.Hspec.Core.Formatters.V1.Free where
import Prelude ()
import Test.Hspec.Core.Compat
data Free f a = Free (f (Free f a)) | Pure a
deriving Functor
instance Functor f => Applicative (Free f) where
pure = Pure
Pure f <*> Pure a = Pure (f a)
Pure f <*> Free m = Free (fmap f <$> m)
Free m <*> b = Free (fmap (<*> b) m)
instance Functor f => Monad (Free f) where
return = pure
Pure a >>= f = f a
Free m >>= f = Free (fmap (>>= f) m)
liftF :: Functor f => f a -> Free f a
liftF command = Free (fmap Pure command)
| hspec/hspec | hspec-core/src/Test/Hspec/Core/Formatters/V1/Free.hs | mit | 591 | 0 | 10 | 155 | 283 | 142 | 141 | 17 | 1 |
import System.IO
import XMonad
import XMonad.Actions.NoBorders
import XMonad.Hooks.DynamicLog
import XMonad.Hooks.ManageDocks
import XMonad.Layout.NoBorders (smartBorders)
import XMonad.Util.EZConfig (additionalKeys)
import XMonad.Util.Run (spawnPipe)
------------------------------------------------------------------------
-- Status bar (Pretty Printing)
-- https://github.com/dmxt/Solarized-xmonad-xmobar
myBar = "xmobar"
myPP = xmobarPP
{ ppTitle = xmobarColor "#657b83" "" . shorten 100
, ppCurrent = xmobarColor "#93a1a1" "" . wrap "" ""
, ppSep = xmobarColor "#657b83" "" " | "
, ppUrgent = xmobarColor "#ff69b4" ""
, ppLayout = const "" -- Disables layout info
}
toggleStrutsKey XConfig {XMonad.modMask = modMask} = (modMask, xK_b)
------------------------------------------------------------------------
-- Colors and borders
myNormalBorderColor = "#002b36"
myFocusedBorderColor = "#93a1a1"
--myFocusedBorderColor = "#839496"
myBorderWidth = 1
------------------------------------------------------------------------
-- Keybindings
myKeys =
[ (( mod1Mask, xK_p ), spawn "dmenu_run \
\ -fn 'DejaVu Sans Mono-11' \
\ -nb '#002b36' \
\ -nf '#586e75' \
\ -sb '#073642' \
\ -sf '#93a1a1'" )
, (( mod1Mask, xK_e ), spawn "nautilus --no-desktop" )
, (( mod1Mask, 0x2d ), spawn "amixer -q sset Master 5%-" ) -- Lower volume
, (( mod1Mask, 0x3d ), spawn "amixer -q sset Master 5%+" ) -- Raise volume
, (( mod1Mask, xK_BackSpace ), spawn "amixer -D pulse set Master toggle" ) -- Mute and unmute
, (( mod1Mask .|. controlMask, xK_l ), spawn "slock" ) -- Lock screen
]
-- \ -sf '#839496'" )
------------------------------------------------------------------------
-- Main function
-- https://beginners-guide-to-xmonad.readthedocs.io/configure_xmonadhs.html
-- https://unix.stackexchange.com/questions/336701/xmonad-defaults-depreciation-what-is-the-future-proof-configuration
-- https://wiki.haskell.org/Xmonad/Config_archive/John_Goerzen's_Configuration
main = xmonad =<< statusBar myBar myPP toggleStrutsKey myConfig
------------------------------------------------------------------------
-- Main configuration
myConfig = def
{ manageHook = manageDocks <+> manageHook def
, layoutHook = smartBorders $ avoidStruts $ layoutHook def
, terminal = "rxvt-unicode"
, borderWidth = myBorderWidth
, normalBorderColor = myNormalBorderColor
, focusedBorderColor = myFocusedBorderColor
} `additionalKeys` myKeys
| csebesta/dotfiles | xmonad/.xmonad/xmonad.hs | mit | 2,512 | 0 | 9 | 384 | 406 | 246 | 160 | 35 | 1 |
-- The decimal number, 585 = 10010010012 (binary), is palindromic in both bases.
--
-- Find the sum of all numbers, less than one million, which are palindromic in base 10 and base 2.
--
-- (Please note that the palindromic number, in either base, may not include leading zeros.)
import Text.Printf
isPalindrome :: String -> Bool
isPalindrome s = do
if (head $ reverse s) == '0' then False else s == (reverse s)
calculate :: Int -- Filter by decimal palindromes, then binary palindromes, then sum
calculate = sum $ filter (isPalindrome . printf "%b") $ filter (isPalindrome . show) ([1..999999] :: [Int])
main :: IO ()
main = print calculate | daniel-beard/projecteulerhaskell | Problems/p36.hs | mit | 650 | 0 | 11 | 121 | 139 | 76 | 63 | 8 | 2 |
-- (C) 2011-14 Nicola Bonelli <[email protected]>
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software Foundation,
-- Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
--
-- The full GNU General Public License is included in this distribution in
-- the file called "COPYING".
module Main where
import Network.PFq as Q
import Foreign
import System.Environment
import Network.PFq.Lang
import Network.PFq.Default
handler :: Q.Callback
handler h _ = print h
recvDispatch :: Ptr PFqTag -> IO()
recvDispatch q = do
Q.dispatch q handler 1000
-- cs <- Q.getGroupId q >>= Q.getGroupCounters q
-- print cs
recvDispatch q
dumper :: String -> IO ()
dumper dev = do
putStrLn $ "dumping " ++ dev ++ "..."
fp <- Q.open 64 4096
withForeignPtr fp $ \q -> do
Q.setTimestamp q True
gid <- Q.getGroupId q
Q.bindGroup q gid dev (-1)
Q.enable q
Q.groupComputation q gid (icmp >-> steer_ip >-> inc 0)
Q.getRxSlotSize q >>= \o -> putStrLn $ "slot_size: " ++ show o
recvDispatch q
main :: IO ()
main = do
args <- getArgs
case length args of
0 -> error "usage: test-dispatch dev"
_ -> dumper (head args)
| Mr-Click/PFQ | user/Haskell/test/test-dispatch.hs | gpl-2.0 | 1,827 | 0 | 14 | 450 | 345 | 175 | 170 | 30 | 2 |
import Test.HUnit
import FileMode
testRoundTrip :: String -> String -> Int -> Test
testRoundTrip msg exp inp = test $
assertEqual ("round-trip " ++ msg) exp (modeToString (modeFromInt inp))
testRoundTrips = test $ [
testRoundTrip "regular" "100644" 0o100644
, testRoundTrip "executable" "100755" 0o100755
, testRoundTrip "directory" "40000" 0o40000
, testRoundTrip "symlink" "120000" 0o120000
]
main = runTestTT testRoundTrips
| martine/gat | FileMode_test.hs | gpl-2.0 | 453 | 0 | 10 | 84 | 125 | 64 | 61 | 11 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances, CPP #-}
{-
Copyright (C) 2009 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
module Yst.Types
where
import Data.Char
import qualified Data.HashMap.Strict as H
import Data.Time
import qualified Data.Text as T
import Text.StringTemplate
import Data.Aeson
import qualified Data.Map as M
import Data.Scientific (coefficient, base10Exponent)
import Control.Monad
#if MIN_VERSION_time(1,5,0)
#else
import System.Locale ( defaultTimeLocale )
#endif
data Site = Site {
siteTitle :: String
, sourceDir :: [FilePath]
, dataDir :: [FilePath]
, filesDir :: [FilePath]
, deployDir :: FilePath
, defaultLayout :: FilePath
, indexFile :: FilePath
, pageIndex :: M.Map String Page
, navigation :: [NavNode]
, navstyle :: NavStyle
} deriving (Show, Read, Eq)
data NavStyle = TopNav | SideNav
deriving (Show, Read, Eq)
data Source = TemplateFile FilePath
| SourceFile FilePath
deriving (Show, Read, Eq)
data Page = Page {
pageData :: [(String, DataSpec)]
, layoutFile :: Maybe FilePath
, sourceFile :: Source
, requiresFiles :: [FilePath]
, pageUrl :: String
, pageTitle :: String
, pageInMenu :: Bool
} deriving (Show, Read, Eq)
data DataSpec = DataConstant Node
| DataFromFile FilePath [DataOption]
| DataFromSqlite3 FilePath String [DataOption]
deriving (Show, Read, Eq)
data DataOption = OrderBy [(String, SortDirection)]
| GroupBy [String]
| Where FilterCond
| Limit Int
deriving (Show, Read, Eq)
data FilterCond = Filter FilterTest FilterArg FilterArg
| And FilterCond FilterCond
| Or FilterCond FilterCond
| Not FilterCond
| Has String
deriving (Show, Read, Eq)
data FilterArg = AttrValue String
| StringConstant String
| DateConstant Day
deriving (Show, Read, Eq)
data FilterTest = TestEq
| TestGt
| TestLt
| TestGtEq
| TestLtEq
| TestContains
deriving (Show, Read, Eq)
data NavNode = NavPage String String
| NavMenu String [NavNode]
deriving (Show, Read, Eq)
data Node = NString String
| NDate Day
| NList [Node]
| NMap [(String, Node)]
| NNil
deriving (Show, Read, Eq)
instance Ord Node where
compare (NString x) (NString y) = compare x y
compare (NDate x) (NDate y) = compare x y
compare (NList x) (NList y) = compare x y
compare (NMap x) (NMap y) = compare x y
compare NNil NNil = EQ
compare (NList x) y = compare (NList x) (NList [y])
compare x (NList y) = compare (NList [x]) (NList y)
compare (NString _) _ = GT
compare (NDate _) _ = GT
compare _ _ = GT
instance FromJSON Node where
parseJSON (String t) = do
let t' = T.unpack t
case parseAsDate t' of
Nothing -> return $ NString t'
Just d -> return $ NDate d
parseJSON (Object h) = case fromJSON (Object $ handleMerges h) of
Success y -> return $ NMap $ M.toList y
_ -> mzero
parseJSON x@(Array _) = case fromJSON x of
Success y -> return $ NList y
_ -> mzero
parseJSON (Bool b) = return $ NString $ show b
parseJSON (Number n)
| base10Exponent n >= 0 = return $ NString $ show $
coefficient n * (10 ^ base10Exponent n)
| otherwise = return $ NString $ show n
parseJSON _ = return $ NNil
handleMerges :: H.HashMap T.Text Value -> H.HashMap T.Text Value
handleMerges = H.foldrWithKey go H.empty
where go k (Object h) m | isMerge k = H.foldrWithKey go m h
go k v m = H.insert k v m
isMerge k = k == T.pack "<<"
instance ToJSON Node where
toJSON (NDate s) = toJSON (NString $ formatTime defaultTimeLocale "%x" s)
toJSON (NString s) = toJSON s
toJSON (NMap xs) = toJSON $ M.fromList xs
toJSON (NList xs) = toJSON xs
toJSON (NNil) = toJSON ()
data SortDirection = Ascending | Descending deriving (Show, Read, Eq)
data Format = HtmlFormat
| LaTeXFormat
| ConTeXtFormat
| PlainFormat
| ManFormat
| RTFFormat
| TexinfoFormat
| DocBookFormat
| OpenDocumentFormat
deriving (Show, Read, Eq)
instance StringTemplateShows String
where stringTemplateShow s = s
stringTemplateFormattedShow "uppercase" s = map toUpper s
stringTemplateFormattedShow "lowercase" s = map toUpper s
stringTemplateFormattedShow "capitalize" s = if null s
then ""
else toUpper (head s) : tail s
stringTemplateFormattedShow f _ = error $ "Unknown format: " ++ f
instance ToSElem String
where toSElem = stShowsToSE
instance ToSElem Node
where toSElem x = case x of
NString s -> toSElem s
NDate d -> toSElem d
NList xs -> toSElem xs
NMap xs -> toSElem $ M.fromList xs
NNil -> toSElem ""
parseAsDate :: (ParseTime t) => String -> Maybe t
parseAsDate s =
msum $ map (\fs -> parsetimeWith fs s) formats
where parsetimeWith = parseTimeM True defaultTimeLocale
formats = ["%x","%m/%d/%Y", "%D","%F", "%d %b %Y"]
| jgm/yst | Yst/Types.hs | gpl-2.0 | 6,466 | 0 | 12 | 2,157 | 1,736 | 918 | 818 | 147 | 2 |
{-| Unittest helpers for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Test.Ganeti.TestCommon where
import Control.Applicative
import Control.Exception (catchJust)
import Control.Monad
import Data.List
import qualified Data.Set as Set
import System.Environment (getEnv)
import System.Exit (ExitCode(..))
import System.IO.Error (isDoesNotExistError)
import System.Process (readProcessWithExitCode)
import qualified Test.HUnit as HUnit
import Test.QuickCheck
import Test.QuickCheck.Monadic
import qualified Text.JSON as J
import Numeric
import qualified Ganeti.BasicTypes as BasicTypes
import Ganeti.Types
-- * Constants
-- | Maximum memory (1TiB, somewhat random value).
maxMem :: Int
maxMem = 1024 * 1024
-- | Maximum disk (8TiB, somewhat random value).
maxDsk :: Int
maxDsk = 1024 * 1024 * 8
-- | Max CPUs (1024, somewhat random value).
maxCpu :: Int
maxCpu = 1024
-- | Max vcpu ratio (random value).
maxVcpuRatio :: Double
maxVcpuRatio = 1024.0
-- | Max spindle ratio (random value).
maxSpindleRatio :: Double
maxSpindleRatio = 1024.0
-- | Max nodes, used just to limit arbitrary instances for smaller
-- opcode definitions (e.g. list of nodes in OpTestDelay).
maxNodes :: Int
maxNodes = 32
-- | Max opcodes or jobs in a submit job and submit many jobs.
maxOpCodes :: Int
maxOpCodes = 16
-- * Helper functions
-- | Checks for equality with proper annotation. The first argument is
-- the computed value, the second one the expected value.
(==?) :: (Show a, Eq a) => a -> a -> Property
(==?) x y = printTestCase
("Expected equality, but got mismatch\nexpected: " ++
show y ++ "\n but got: " ++ show x) (x == y)
infix 3 ==?
-- | Checks for inequality with proper annotation. The first argument
-- is the computed value, the second one the expected (not equal)
-- value.
(/=?) :: (Show a, Eq a) => a -> a -> Property
(/=?) x y = printTestCase
("Expected inequality, but got equality: '" ++
show x ++ "'.") (x /= y)
infix 3 /=?
-- | Show a message and fail the test.
failTest :: String -> Property
failTest msg = printTestCase msg False
-- | A 'True' property.
passTest :: Property
passTest = property True
-- | Return the python binary to use. If the PYTHON environment
-- variable is defined, use its value, otherwise use just \"python\".
pythonCmd :: IO String
pythonCmd = catchJust (guard . isDoesNotExistError)
(getEnv "PYTHON") (const (return "python"))
-- | Run Python with an expression, returning the exit code, standard
-- output and error.
runPython :: String -> String -> IO (ExitCode, String, String)
runPython expr stdin = do
py_binary <- pythonCmd
readProcessWithExitCode py_binary ["-c", expr] stdin
-- | Check python exit code, and fail via HUnit assertions if
-- non-zero. Otherwise, return the standard output.
checkPythonResult :: (ExitCode, String, String) -> IO String
checkPythonResult (py_code, py_stdout, py_stderr) = do
HUnit.assertEqual ("python exited with error: " ++ py_stderr)
ExitSuccess py_code
return py_stdout
-- * Arbitrary instances
-- | Defines a DNS name.
newtype DNSChar = DNSChar { dnsGetChar::Char }
instance Arbitrary DNSChar where
arbitrary = liftM DNSChar $ elements (['a'..'z'] ++ ['0'..'9'] ++ "_-")
instance Show DNSChar where
show = show . dnsGetChar
-- | Generates a single name component.
genName :: Gen String
genName = do
n <- choose (1, 16)
dn <- vector n
return (map dnsGetChar dn)
-- | Generates an entire FQDN.
genFQDN :: Gen String
genFQDN = do
ncomps <- choose (1, 4)
names <- vectorOf ncomps genName
return $ intercalate "." names
-- | Combinator that generates a 'Maybe' using a sub-combinator.
genMaybe :: Gen a -> Gen (Maybe a)
genMaybe subgen = frequency [ (1, pure Nothing), (3, Just <$> subgen) ]
-- | Defines a tag type.
newtype TagChar = TagChar { tagGetChar :: Char }
-- | All valid tag chars. This doesn't need to match _exactly_
-- Ganeti's own tag regex, just enough for it to be close.
tagChar :: String
tagChar = ['a'..'z'] ++ ['A'..'Z'] ++ ['0'..'9'] ++ ".+*/:@-"
instance Arbitrary TagChar where
arbitrary = liftM TagChar $ elements tagChar
-- | Generates a tag
genTag :: Gen [TagChar]
genTag = do
-- the correct value would be C.maxTagLen, but that's way too
-- verbose in unittests, and at the moment I don't see any possible
-- bugs with longer tags and the way we use tags in htools
n <- choose (1, 10)
vector n
-- | Generates a list of tags (correctly upper bounded).
genTags :: Gen [String]
genTags = do
-- the correct value would be C.maxTagsPerObj, but per the comment
-- in genTag, we don't use tags enough in htools to warrant testing
-- such big values
n <- choose (0, 10::Int)
tags <- mapM (const genTag) [1..n]
return $ map (map tagGetChar) tags
-- | Generates a fields list. This uses the same character set as a
-- DNS name (just for simplicity).
genFields :: Gen [String]
genFields = do
n <- choose (1, 32)
vectorOf n genName
-- | Generates a list of a given size with non-duplicate elements.
genUniquesList :: (Eq a, Arbitrary a, Ord a) => Int -> Gen a -> Gen [a]
genUniquesList cnt generator = do
set <- foldM (\set _ -> do
newelem <- generator `suchThat` (`Set.notMember` set)
return (Set.insert newelem set)) Set.empty [1..cnt]
return $ Set.toList set
newtype SmallRatio = SmallRatio Double deriving Show
instance Arbitrary SmallRatio where
arbitrary = liftM SmallRatio $ choose (0, 1)
-- | Helper for 'genSet', declared separately due to type constraints.
genSetHelper :: (Ord a) => [a] -> Maybe Int -> Gen (Set.Set a)
genSetHelper candidates size = do
size' <- case size of
Nothing -> choose (0, length candidates)
Just s | s > length candidates ->
error $ "Invalid size " ++ show s ++ ", maximum is " ++
show (length candidates)
| otherwise -> return s
foldM (\set _ -> do
newelem <- elements candidates `suchThat` (`Set.notMember` set)
return (Set.insert newelem set)) Set.empty [1..size']
-- | Generates a set of arbitrary elements.
genSet :: (Ord a, Bounded a, Enum a) => Maybe Int -> Gen (Set.Set a)
genSet = genSetHelper [minBound..maxBound]
-- | Generate an arbitrary IPv4 address in textual form (non empty).
genIp4Addr :: Gen NonEmptyString
genIp4Addr = genIp4AddrStr >>= mkNonEmpty
-- | Generate an arbitrary IPv4 address in textual form.
genIp4AddrStr :: Gen String
genIp4AddrStr = do
a <- choose (1::Int, 255)
b <- choose (0::Int, 255)
c <- choose (0::Int, 255)
d <- choose (0::Int, 255)
return $ intercalate "." (map show [a, b, c, d])
-- | Generates an arbitrary IPv4 address with a given netmask in textual form.
genIp4NetWithNetmask :: Int -> Gen NonEmptyString
genIp4NetWithNetmask netmask = do
ip <- genIp4AddrStr
mkNonEmpty $ ip ++ "/" ++ show netmask
-- | Generate an arbitrary IPv4 network in textual form.
genIp4Net :: Gen NonEmptyString
genIp4Net = do
netmask <- choose (8::Int, 30)
genIp4NetWithNetmask netmask
-- | Helper function to compute the number of hosts in a network
-- given the netmask. (For IPv4 only.)
netmask2NumHosts :: Int -> Int
netmask2NumHosts n = 2^(32-n)
-- | Generates an arbitrary IPv6 network address in textual form.
-- The generated address is not simpflified, e. g. an address like
-- "2607:f0d0:1002:0051:0000:0000:0000:0004" does not become
-- "2607:f0d0:1002:51::4"
genIp6Addr :: Gen String
genIp6Addr = do
rawIp <- vectorOf 8 $ choose (0::Integer, 65535)
return $ intercalate ":" (map (`showHex` "") rawIp)
-- | Generates an arbitrary IPv6 network in textual form.
genIp6Net :: Gen String
genIp6Net = do
netmask <- choose (8::Int, 126)
ip <- genIp6Addr
return $ ip ++ "/" ++ show netmask
-- * Helper functions
-- | Checks for serialisation idempotence.
testSerialisation :: (Eq a, Show a, J.JSON a) => a -> Property
testSerialisation a =
case J.readJSON (J.showJSON a) of
J.Error msg -> failTest $ "Failed to deserialise: " ++ msg
J.Ok a' -> a ==? a'
-- | Result to PropertyM IO.
resultProp :: (Show a) => BasicTypes.GenericResult a b -> PropertyM IO b
resultProp (BasicTypes.Bad err) = stop . failTest $ show err
resultProp (BasicTypes.Ok val) = return val
-- | Return the source directory of Ganeti.
getSourceDir :: IO FilePath
getSourceDir = catchJust (guard . isDoesNotExistError)
(getEnv "TOP_SRCDIR")
(const (return "."))
-- | Returns the path of a file in the test data directory, given its name.
testDataFilename :: String -> String -> IO FilePath
testDataFilename datadir name = do
src <- getSourceDir
return $ src ++ datadir ++ name
-- | Returns the content of the specified haskell test data file.
readTestData :: String -> IO String
readTestData filename = do
name <- testDataFilename "/test/data/" filename
readFile name
-- | Generate arbitrary values in the IO monad. This is a simple
-- wrapper over 'sample''.
genSample :: Gen a -> IO a
genSample gen = do
values <- sample' gen
case values of
[] -> error "sample' returned an empty list of values??"
x:_ -> return x
| damoxc/ganeti | test/hs/Test/Ganeti/TestCommon.hs | gpl-2.0 | 9,899 | 0 | 16 | 2,028 | 2,263 | 1,207 | 1,056 | 168 | 2 |
{-# LANGUAGE TypeSynonymInstances, MultiParamTypeClasses #-}
module PCProblem.Quiz where
-- -- $Id$
import PCProblem.Type
import PCProblem.Param
import PCProblem.Generator
import Inter.Types
import Inter.Quiz
import Challenger.Partial
import Data.Array
import Autolib.Reporter
import Data.List (isPrefixOf)
import Autolib.ToDoc
import Autolib.Informed
instance OrderScore PCProblem where
scoringOrder _ = Increasing
instance Partial PCProblem PCP Folge where
describe p i =
vcat [ text "Lösen Sie diese Instanz des Postschen Korrespondenz-Problems:"
, nest 4 $ toDoc i
]
initial p i =
case do let PCP uvs = i
(k, (u,v)) <- zip [1..] uvs
guard $ isPrefixOf u v || isPrefixOf v u
return k
of [] -> [ ] -- sollte nicht passieren
k : ks -> [k] -- so könnte es losgehen
partial p i @ ( PCP uvs ) b = do
let n = fromIntegral $ length uvs
let wrong = do k <- b ; guard $ not $ 1 <= k && k <= n ; return k
when ( not $ null wrong ) $ do
inform $ text "Diese Indizes sind nicht erlaubt:"
reject $ nest 4 $ toDoc wrong
let ( us, vs ) = lr i b
inform $ vcat
[ text "Aus Ihrer Folge entstehen die Zeichenketten:"
-- , toDoc us, toDoc vs
-- fix for bug #80
, text us, text vs
]
let com = common us vs
urest = drop (length com) us
vrest = drop (length com) vs
when ( not (null urest) && not ( null vrest )) $ do
reject $ vcat
[ text "Die eine muß ein Präfix der anderen sein,"
, text "nach Löschen des gemeinsamen Präfixes"
, nest 4 $ toDoc com
, text "entstehen jedoch die Reste"
, nest 4 $ toDoc ( urest, vrest )
]
total p i b = do
when ( null b ) $ do
reject $ text "Das Lösungswort darf nicht leer sein."
let ( us, vs ) = lr i b
assert ( us == vs )
$ text "Sind die Zeichenketten gleich?"
--------------------------------------------------------------------------
make_quiz :: Make
make_quiz = quiz PCProblem
PCProblem.Param.g
make_fixed :: Make
make_fixed = direct PCProblem
( PCP [ ("bba","b"),("a","b"),("b","ab") ] )
| Erdwolf/autotool-bonn | src/PCProblem/Quiz.hs | gpl-2.0 | 2,231 | 27 | 14 | 668 | 693 | 357 | 336 | 58 | 1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 [email protected]
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.LevelPuzzle.File.Write
(
) where
import MyPrelude
import File.Binary
| karamellpelle/grid | source/Game/LevelPuzzle/File/Write.hs | gpl-3.0 | 836 | 0 | 4 | 151 | 37 | 31 | 6 | 4 | 0 |
-- Collection of utility functions
module Utility where
import Numeric.Units.Dimensional.Prelude (ThermodynamicTemperature, ElectricResistance, Dim, Dimensionless, Quantity, pos2, _5, _2, (/~~), (/~), (*~), one)
import qualified Numeric.Units.Dimensional.Prelude ((^), (+), (**), (-), (*), (/), log)
import qualified Numeric.Units.Dimensional
import Numeric.NumType (Pos1, Neg1, Pos2, Neg2, Neg3, Zero)
import Data.Function (on)
import Data.List (minimumBy)
import Data.Decimal
import Data.Word
a // b = fromIntegral a / fromIntegral b
e a b = a * (10.0 ** b) -- "e" function; i.e.: 1.3e6 = 1.3 * (10^6) = 1300000
e' a b = a !* (10.0 !** b) -- "e" function; i.e.: 1.3e6 = 1.3 * (10^6) = 1300000
cang = 9.0 -- Half-angle of the cones, in degrees
pyth a b = sqrt ((a ** 2) + (b ** 2))
dtr x = pi * x / 180.0 -- Degrees to radians conversion
rtd x = 180.0 * x / pi -- Radians to degrees conversion
sin2 x = sin x ** 2.0 -- Trigonometric utility functions
cos2 x = cos x ** 2.0
tan2 x = tan x ** 2.0
csc x = 1.0 / sin x
sec x = 1.0 / cos x
cot x = 1.0 / tan x
csc2 x = csc x ** 2.0
sec2 x = sec x ** 2.0
cot2 x = cot x ** 2.0
fst3 (a, _, _) = a
snd3 (_, b, _) = b
thd3 (_, _, c) = c
fst4 (a, _, _, _) = a
snd4 (_, b, _, _) = b
thd4 (_, _, c, _) = c
lst4 (_, _, _, d) = d
log10 = logBase 10
log10' :: (Floating a) => Dimensionless a -> Dimensionless a
log10' = fmap (logBase 10)
isNaN' :: (RealFloat a) => Dimensionless a -> Bool
isNaN' x = isNaN (x /~ one)
type Temperature = ThermodynamicTemperature
type Resistance = ElectricResistance
type DimlessDouble = Dimensionless Double
type DCompliance = Dim Zero Neg1 Pos2 Zero Zero Zero Zero
type Compliance = Quantity DCompliance
type DSpringConstant = Dim Zero Pos1 Neg2 Zero Zero Zero Zero
type SpringConstant = Quantity DSpringConstant
type DBLValue = Dim Pos1 Pos1 Neg2 Neg1 Zero Zero Zero
type BLValue = Quantity DBLValue
type DVoltage = Dim Pos2 Pos1 Neg3 Neg1 Zero Zero Zero
type Voltage = Quantity DVoltage
imap :: [a -> b] -> a -> [b]
imap fs s = zipWith ($) fs (replicate (length fs) s)
a !+ b = a Prelude.+ b
a !- b = a Prelude.- b
a !* b = a Prelude.* b
a !/ b = a Prelude./ b
a !^ b = a Prelude.^ b
a !** b = a Prelude.** b
psqrt = Prelude.sqrt
pexp = Prelude.exp
ppyth a b = psqrt ((a !** 2.0) !+ (b !** 2.0))
a !^/ b = b Prelude.** (1 Prelude./ a)
a #+ b = a Numeric.Units.Dimensional.Prelude.+ b
a #* b = a Numeric.Units.Dimensional.Prelude.* b
a #/ b = a Numeric.Units.Dimensional.Prelude./ b
a #^ b = a Numeric.Units.Dimensional.Prelude.^ b
a #** b = a Numeric.Units.Dimensional.Prelude.** b
funcMap :: [a -> b] -> a -> [b]
funcMap fs x = zipWith ($) fs xs
where
xs = replicate (length fs) x
squ a = a #^ pos2
places :: Int -- Max number of decimal places in a printed number
places = 8
round_ :: (Floating a, RealFrac a) => Int -> a -> Decimal
round_ p x = realFracToDecimal (fromIntegral p::Word8) (round (x * (10 !^ p)) // (10 !^ p))
round' :: (Floating a, RealFrac a) => Int -> Quantity d a -> Quantity d Decimal
round' p (Numeric.Units.Dimensional.Dimensional x) = Numeric.Units.Dimensional.Dimensional (round_ p x)
show_ :: Int -> Double -> String -- Prints fixed-length numbers
show_ p x
| al < p = a ++ replicate (places - al) '0'
| al > p = take places a
| al == p = a
where
a = show (realFracToDecimal (fromIntegral p :: Word8) x)
al = length a
acc = 0.01
accD = acc *~ one
listgen :: (Enum a, Num a) => (a, a) -> a -> [a]
listgen (m,n) d = [m, (m !+ d) .. n]
listgen' :: (DimlessDouble, DimlessDouble) -> DimlessDouble -> [DimlessDouble]
listgen' (m,n) d = [m, (m #+ d) .. n]
bestRoot f brckt d = xs !! index
where
xs = listgen brckt d
index = fst (minimumBy (compare `on` snd) zipped)
zipped = zip [0 .. length ys] ys
ys = map f xs
bestRoot' :: (DimlessDouble -> DimlessDouble) -> (DimlessDouble, DimlessDouble) -> DimlessDouble -> DimlessDouble
bestRoot' f brckt d = xs !! index
where
xs = listgen' brckt d
index = fst (minimumBy (compare `on` snd) zipped)
zipped = zip [0 .. length ys] ys
ys = map f xs /~~ one
circleArea r = pi * r**2.0
circleArea' d = (1/4) * pi * d**2.0
sphereVol r = (4/3) * pi * r**3.0
sphereVol' d = (1/6) * pi * d**3.0
| taktoa/ThermoCalc | src/Utility.hs | gpl-3.0 | 4,564 | 2 | 12 | 1,262 | 1,912 | 1,038 | 874 | -1 | -1 |
module SpacialGameEnv.RunSGEnv where
import SpacialGameEnv.SGModelEnv
import qualified PureAgents2DDiscrete as Front
import qualified Graphics.Gloss as GLO
import Graphics.Gloss.Interface.IO.Simulate
import qualified PureAgentsConc as PA
import System.Random
import Data.Maybe
import Data.List
import Control.Monad.STM
winTitle = "Spacial Game ENV CONC"
winSize = (1000, 1000)
runSGEnvWithRendering :: IO ()
runSGEnvWithRendering = do
--hSetBuffering stdin NoBuffering
let dt = 1.0
let dims = (50, 50)
let rngSeed = 42
let defectorsRatio = 0.0
let g = mkStdGen rngSeed
(as, g') <- atomically $ createRandomSGAgents g dims defectorsRatio
let asWithDefector = setDefector as (25, 25) dims
env <- atomically $ sgEnvironmentFromAgents asWithDefector
hdl <- PA.initStepSimulation asWithDefector env
stepWithRendering dims hdl dt
setDefector :: [SGAgent] -> (Int, Int) -> (Int, Int) -> [SGAgent]
setDefector as pos cells
| isNothing mayAgentAtPos = as
| otherwise = infront ++ [defectedAgentAtPos] ++ (tail behind)
where
mayAgentAtPos = find (\a -> pos == (agentToCell a cells)) as
agentAtPos = (fromJust mayAgentAtPos)
agentAtPosId = PA.agentId agentAtPos
defectedAgentAtPos = PA.updateState agentAtPos (\s -> s { sgCurrState = Defector, sgPrevState = Defector } )
(infront, behind) = splitAt agentAtPosId as
stepWithRendering :: (Int, Int) -> SGSimHandle -> Double -> IO ()
stepWithRendering dims hdl dt = simulateIO (Front.display winTitle winSize)
GLO.white
2
hdl
(modelToPicture dims)
(stepIteration dt)
modelToPicture :: (Int, Int) -> SGSimHandle -> IO GLO.Picture
modelToPicture dims hdl = do
let as = PA.extractHdlAgents hdl
let cells = map (sgAgentToRenderCell dims) as
return (Front.renderFrame cells winSize dims)
stepIteration :: Double -> ViewPort -> Float -> SGSimHandle -> IO SGSimHandle
stepIteration fixedDt viewport dtRendering hdl = (PA.advanceSimulation hdl fixedDt)
sgAgentToRenderCell :: (Int, Int) -> SGAgent -> Front.RenderCell
sgAgentToRenderCell (xDim, yDim) a = Front.RenderCell { Front.renderCellCoord = (ax, ay),
Front.renderCellColor = ss }
where
id = PA.agentId a
s = PA.state a
ax = mod id yDim
ay = floor((fromIntegral id) / (fromIntegral xDim))
curr = sgCurrState s
prev = sgPrevState s
ss = sgAgentStateToColor prev curr
sgAgentStateToColor :: SGState -> SGState -> (Double, Double, Double)
sgAgentStateToColor Cooperator Cooperator = blueC
sgAgentStateToColor Defector Defector = redC
sgAgentStateToColor Defector Cooperator = greenC
sgAgentStateToColor Cooperator Defector = yellowC
blueC :: (Double, Double, Double)
blueC = (0.0, 0.0, 0.7)
greenC :: (Double, Double, Double)
greenC = (0.0, 0.4, 0.0)
redC :: (Double, Double, Double)
redC = (0.7, 0.0, 0.0)
yellowC :: (Double, Double, Double)
yellowC = (1.0, 0.9, 0.0) | thalerjonathan/phd | public/ArtIterating/code/haskell/PureAgentsConc/src/SpacialGameEnv/RunSGEnv.hs | gpl-3.0 | 3,485 | 0 | 12 | 1,115 | 949 | 514 | 435 | 70 | 1 |
--record syntax
data CAR = Car {company :: String, model :: String, year :: Int} deriving (Show,Eq)
--ghci> Car "x" "z" 3 == Car {company = "x", model = "z", year = 3}
--True
--it :: Bool | YPBlib/NaiveFunGame_hs | syntax.hs | gpl-3.0 | 190 | 0 | 8 | 39 | 43 | 27 | 16 | 1 | 0 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ScopedTypeVariables #-}
module EpsilonNFA where
import Data.Set
import qualified Data.List as L
import Control.Monad
import Control.Lens
import qualified DFA as D (State(State), Input(Input), Rule(Rule), Label(Label), GoalState(GoalState), Priority, DFA, mkDFA)
import qualified Debug.Trace as De
import qualified Data.Map as M
data Epsilon = Epsilon deriving (Show, Eq)
data Rule a b = Rule (D.State a) (Either (D.Input b) Epsilon) (D.State a) deriving (Eq, Show)
matchRule :: (Eq a, Eq b) => D.Input b -> D.State a -> [Rule a b] -> [Rule a b]
matchRule i cs rs =
L.filter (\(Rule c y _) -> case y of
(Left l) -> (cs == c) && (i == l)
(Right _) -> False) rs
--Epsilonルール1ステップで移動可能な状態の集合を得る
eclose' :: (Eq a) => D.State a -> [Rule a b] -> [D.State a]
eclose' s rs = enext
where
erule = L.filter (\(Rule x y _) -> case y of
(Right _) -> (x == s)
(Left _) -> False) rs
enext = L.map (\(Rule _ _ ns) -> ns) erule
--Epsilonルールで移動可能な状態の集合を得る
eclose'' :: (Eq a) => [D.State a] -> [Rule a b] -> [D.State a] -> [D.State a]
eclose'' s rs acc
| epNexts == [] = acc
| otherwise = eclose'' s rs (acc ++ epNexts)
where
epNexts = L.filter (\es -> not (elem es acc)) $
concat $ L.map (\a -> eclose' a rs) acc
eclose :: (Eq a) => [D.State a] -> [Rule a b] -> [D.State a]
eclose s rs = eclose'' s rs s
data EpsilonNFA a b c = EpsilonNFA {
_fstState :: D.State a
,_currState :: [D.State a]
,_rules :: [Rule a b]
,_goalState :: [D.GoalState a c]
} deriving (Show)
$(makeLenses ''EpsilonNFA)
mkEpsilonNFA :: (Eq a) => D.State a -> [Rule a b] -> [D.GoalState a c] -> EpsilonNFA a b c
mkEpsilonNFA s rs gs = EpsilonNFA {
_fstState = s
,_currState = eclose [s] rs
,_rules = rs
,_goalState = gs
}
updateEpsilonNFA :: (Eq a, Eq b) => EpsilonNFA a b c -> D.Input b -> Maybe (EpsilonNFA a b c)
updateEpsilonNFA enfa i = updateEpsilonNFA' enfa nxtStates
where
rs = concat $ L.map (\s -> matchRule i s (enfa^.rules))
(enfa^.currState)
nxtStates = eclose (L.map (\(Rule _ _ ns) -> ns) rs) (enfa^.rules)
updateEpsilonNFA' :: (Eq a) => EpsilonNFA a b c -> [D.State a] -> Maybe (EpsilonNFA a b c)
updateEpsilonNFA' _ [] = Nothing
updateEpsilonNFA' nfa ns = Just (nfa&currState.~ns)
runEpsilonNFA :: (Eq a, Eq b) => EpsilonNFA a b c -> [D.Input b] -> Maybe (EpsilonNFA a b c)
runEpsilonNFA enfa is = foldM updateEpsilonNFA enfa is
accept :: (Eq a, Eq b) => EpsilonNFA a b c -> [b] -> Bool
accept enfa is = accept' res
where
res = runEpsilonNFA enfa $ L.map (\x -> (D.Input x)) is
accept' Nothing = False
accept' (Just f) = L.any (\s -> (L.any (\(D.GoalState gs _ _) -> gs == s) (f^.goalState))) (f^.currState)
incrementStateId :: D.State Int -> Int -> D.State Int
incrementStateId (D.State a) i = D.State (a + i)
incrementRuleId :: Rule Int b -> Int -> Rule Int b
incrementRuleId (Rule fs b ts) i = Rule (incrementStateId fs i) b (incrementStateId ts i)
incrementGoalStateId :: D.GoalState Int b -> Int -> D.GoalState Int b
incrementGoalStateId (D.GoalState gs p l) i = D.GoalState (incrementStateId gs i) p l
incrementEpsilonNFA :: EpsilonNFA Int b c -> Int -> EpsilonNFA Int b c
incrementEpsilonNFA enfa i = EpsilonNFA {
_fstState = incrementedFstState,
_currState = incrementedCurrState,
_rules = incrementedRules,
_goalState = incrementedGoalStates
}
where
incrementedFstState = incrementStateId (enfa^.fstState) i
incrementedCurrState = L.map (\cs -> incrementStateId cs i) (enfa^.currState)
incrementedRules = L.map (\r -> incrementRuleId r i) (enfa^.rules)
incrementedGoalStates = L.map (\gs -> incrementGoalStateId gs i) (enfa^.goalState)
mergeEpsilonNFA :: [EpsilonNFA Int b c] -> EpsilonNFA Int b c
mergeEpsilonNFA [] = error "mergeEpsilonNFA: can't merge empty Epsilon NFAs"
mergeEpsilonNFA (x:[]) = x
mergeEpsilonNFA enfas@(x:xs) = mkEpsilonNFA (x^.fstState) newRules newGoalState
where
stateCounts = L.map (\e -> maximum (L.map (\(Rule _ _ (D.State i)) -> i) (e^.rules))) enfas
increments = L.scanl1 (+) stateCounts
incrementedEnfas = x : (L.zipWith (\enfa i -> incrementEpsilonNFA enfa (i + 1)) (tail enfas) increments)
newEpsilonRules = L.map (\i -> Rule (x^.fstState) (Right Epsilon) (D.State (i + 1))) (init increments)
newRules = newEpsilonRules ++ (L.concatMap (\e -> (e^.rules)) incrementedEnfas)
newGoalState = L.concatMap (\e -> (e^.goalState)) incrementedEnfas
{-
- Convert Epsilon-NFA -> DFA
-}
{-
いくつかのルールをグループ化する必要が生まれた。
ある頂点から同一のラベルで遷移する先が複数存在する可能性があるので
ルールどうしをラベルでまとめるという処理が必要
-}
groupedRules :: (Eq b, Ord b) => [ENFARule a b] -> [[ENFARule a b]]
groupedRules rs = L.groupBy (\(Rule _ i1 _) (Rule _ i2 _) -> i1 == i2)
$ L.sortBy (\(Rule _ (Left i1) _) (Rule _ (Left i2) _) -> compare i1 i2)
$ L.filter (\(Rule _ i _) -> case i of
(Left _) -> True
(Right _) -> False) rs
type ENFARule a b = Rule a b
type DFARule a b = D.Rule (Set a) b
genDFAGoalFromDFAState :: (Eq a, Eq b) => [D.State a] -> c -> [D.GoalState a b] -> Maybe (D.GoalState c b)
genDFAGoalFromDFAState as gi gs = if (goalStates == [])
then Nothing
else Just (D.GoalState (D.State gi) goalPriority goalLabel)
where
goalStates = L.filter (\(D.GoalState g _ _) -> elem g as) gs
(D.GoalState _ goalPriority goalLabel) = minimum goalStates
type RuleMap a b = M.Map (D.Input b) [ENFARule a b]
groupeRules :: (Eq b, Ord b) => [ENFARule a b] -> RuleMap a b
groupeRules rs = M.fromList
$ L.map (\xs@((Rule _ (Left i) _):_) -> (i,xs))
$ L.groupBy (\(Rule _ i1 _) (Rule _ i2 _) -> i1 == i2)
$ L.sortBy (\(Rule _ (Left i1) _) (Rule _ (Left i2) _) -> compare i1 i2)
$ L.filter (\(Rule _ i _) -> case i of
(Left _) -> True
(Right _) -> False) rs
dfaEdge :: (Eq a, Ord b) => [D.State a] -> RuleMap a b -> [ENFARule a b] -> D.Input b -> [D.State a]
dfaEdge ss rmap rules i = case r of
Nothing -> []
Just rs -> eclose (L.nub $ L.map (\(Rule _ _ t) -> t) $
L.concatMap (\s -> L.filter (\(Rule fs _ _) -> fs == s) rs) ss) rules
where
r = M.lookup i rmap
type States a = M.Map Int [D.State a]
type Trans a = [D.Rule Int a]
generateDFA'' :: (Eq a, Ord b, Eq c) => States a -> Trans b -> [D.GoalState a c] -> [D.GoalState Int c] -> RuleMap a b -> [ENFARule a b] -> D.Input b -> Int -> Int -> (States a, Trans b, [D.GoalState Int c], Int)
generateDFA'' s t g gacc r rules i j p = if (e /= [])
then case idx of
Just ii -> (s, (D.Rule (D.State j) i (D.State ii)) : t, newGacc, p)
Nothing -> ((M.insert (p + 1) e s), ((D.Rule (D.State j) i (D.State (p + 1))):t), newGacc, (p + 1))
else (s, t, gacc, p)
where
e = dfaEdge (s M.! j) r rules i
idx = L.find (\idx -> (s M.! idx) == e) [0..p]
newGoalState = case idx of
Just ii -> Nothing
Nothing -> genDFAGoalFromDFAState e (p + 1) g
newGacc = case newGoalState of
Just newG -> newG : gacc
Nothing -> gacc
generateDFA' :: (Eq a, Ord b, Eq c) => States a -> Trans b -> [D.GoalState a c] -> [D.GoalState Int c] -> RuleMap a b -> [ENFARule a b] -> [D.Input b] -> Int -> Int -> (Trans b, [D.GoalState Int c])
generateDFA' states trans goals gstates rmap rules alphabets j p
| j <= p = generateDFA' newStates newTrans goals newGStates rmap rules alphabets (j + 1) newP
| otherwise = (trans, gstates)
where
(newStates, newTrans, newGStates, newP) = L.foldl (\(s, t, g, p) c -> generateDFA'' s t goals g rmap rules c j p) (states, trans, gstates, p) alphabets
genAlphabets :: (Eq b) => [ENFARule a b] -> [D.Input b]
genAlphabets rs = L.nub $ L.map (\(Rule _ (Left i) _) -> i) $ L.filter (\(Rule _ i _) -> case i of
(Left _) -> True
(Right _) -> False) rs
generateDFA :: (Ord a, Ord b, Eq c) => EpsilonNFA a b c -> (Trans b, [D.GoalState Int c])
generateDFA enfa = generateDFA' states trans (enfa^.goalState) gacc rmap (enfa^.rules) alphabets j p
where
fstEclose = eclose [enfa^.fstState] (enfa^.rules)
states = M.fromList [(0, fstEclose)]
trans = []
rmap = (groupeRules (enfa^.rules))
alphabets = genAlphabets (enfa^.rules)
j = 0
p = 0
gacc = case (genDFAGoalFromDFAState fstEclose 0 (enfa^.goalState)) of
Just g -> [g]
Nothing -> []
genDFA :: (Ord a, Ord b, Eq c) => EpsilonNFA a b c -> D.DFA Int b c
genDFA enfa = D.mkDFA (D.State 0) trans gstates
where
(trans, gstates) = generateDFA enfa
| pocket7878/min-tokenizer | src/EpsilonNFA.hs | gpl-3.0 | 9,899 | 0 | 19 | 3,107 | 4,221 | 2,213 | 2,008 | 154 | 5 |
{- |
Module : $Header$
Description : Higher-kinded stuff.
Copyright : (c) plaimi 2014
License : GPL-3
Maintainer : [email protected]
-} module Plailude.Control (
(!?),
(~+~),
both,
erretreat,
if',
fi,
retreat,
) where
import Plailude.Control.Arrow
import Plailude.Control.Bool
import Plailude.Control.IO
import Plailude.Control.List
import Plailude.Control.Monad
| plaimi/plailude | src/Plailude/Control.hs | gpl-3.0 | 400 | 0 | 4 | 78 | 65 | 45 | 20 | 13 | 0 |
module Test.QuickFuzz.Gen.Bnfc.AbsGrammar where
-- Haskell module generated by the BNF converter
data Exp
= EAdd Exp Exp
| ESub Exp Exp
| EMul Exp Exp
| EDiv Exp Exp
| EInt Integer
deriving (Eq, Ord, Show, Read)
| CIFASIS/QuickFuzz | src/Test/QuickFuzz/Gen/Bnfc/AbsGrammar.hs | gpl-3.0 | 242 | 0 | 6 | 67 | 66 | 39 | 27 | 8 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.DynamoDB.DeleteTable
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | The /DeleteTable/ operation deletes a table and all of its items. After a /DeleteTable/ request, the specified table is in the 'DELETING' state until DynamoDB
-- completes the deletion. If the table is in the 'ACTIVE' state, you can delete
-- it. If a table is in 'CREATING' or 'UPDATING' states, then DynamoDB returns a /ResourceInUseException/. If the specified table does not exist, DynamoDB returns a /ResourceNotFoundException/. If table is already in the 'DELETING' state, no error is returned.
--
-- DynamoDB might continue to accept data read and write operations, such as /GetItem/ and /PutItem/, on a table in the 'DELETING' state until the table deletion is
-- complete.
--
-- When you delete a table, any indexes on that table are also deleted.
--
-- Use the /DescribeTable/ API to check the status of the table.
--
-- <http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DeleteTable.html>
module Network.AWS.DynamoDB.DeleteTable
(
-- * Request
DeleteTable
-- ** Request constructor
, deleteTable
-- ** Request lenses
, dtTableName
-- * Response
, DeleteTableResponse
-- ** Response constructor
, deleteTableResponse
-- ** Response lenses
, dtrTableDescription
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.DynamoDB.Types
import qualified GHC.Exts
newtype DeleteTable = DeleteTable
{ _dtTableName :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'DeleteTable' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dtTableName' @::@ 'Text'
--
deleteTable :: Text -- ^ 'dtTableName'
-> DeleteTable
deleteTable p1 = DeleteTable
{ _dtTableName = p1
}
-- | The name of the table to delete.
dtTableName :: Lens' DeleteTable Text
dtTableName = lens _dtTableName (\s a -> s { _dtTableName = a })
newtype DeleteTableResponse = DeleteTableResponse
{ _dtrTableDescription :: Maybe TableDescription
} deriving (Eq, Read, Show)
-- | 'DeleteTableResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dtrTableDescription' @::@ 'Maybe' 'TableDescription'
--
deleteTableResponse :: DeleteTableResponse
deleteTableResponse = DeleteTableResponse
{ _dtrTableDescription = Nothing
}
dtrTableDescription :: Lens' DeleteTableResponse (Maybe TableDescription)
dtrTableDescription =
lens _dtrTableDescription (\s a -> s { _dtrTableDescription = a })
instance ToPath DeleteTable where
toPath = const "/"
instance ToQuery DeleteTable where
toQuery = const mempty
instance ToHeaders DeleteTable
instance ToJSON DeleteTable where
toJSON DeleteTable{..} = object
[ "TableName" .= _dtTableName
]
instance AWSRequest DeleteTable where
type Sv DeleteTable = DynamoDB
type Rs DeleteTable = DeleteTableResponse
request = post "DeleteTable"
response = jsonResponse
instance FromJSON DeleteTableResponse where
parseJSON = withObject "DeleteTableResponse" $ \o -> DeleteTableResponse
<$> o .:? "TableDescription"
| dysinger/amazonka | amazonka-dynamodb/gen/Network/AWS/DynamoDB/DeleteTable.hs | mpl-2.0 | 4,142 | 0 | 9 | 868 | 446 | 274 | 172 | 56 | 1 |
--Zaoqilc
--Copyright (C) 2017 Zaoqi
--This program is free software: you can redistribute it and/or modify
--it under the terms of the GNU Affero General Public License as published
--by the Free Software Foundation, either version 3 of the License, or
--(at your option) any later version.
--This program is distributed in the hope that it will be useful,
--but WITHOUT ANY WARRANTY; without even the implied warranty of
--MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
--GNU Affero General Public License for more details.
--You should have received a copy of the GNU Affero General Public License
--along with this program. If not, see <http://www.gnu.org/licenses/>.
module Zaoqil.Monad.List where
instance Monad [] where
xs >>= f = j 1 $ map f xs
where
p _ [] = ([], [])
p 0 xs = ([], xs)
p x ((y:ys):zs) = let (a, b) = p (pred x) zs in (y:a, ys:b)
j _ [] = []
j x xs = let (a, b) = p x xs in a ++ j (succ x) b
instance Applicative [] where
pure x = [x]
fs <*> xs = do
f <- fs
x <- xs
return $ f x
| zaoqi/zaoqilc | featuring/MonadList.hs | agpl-3.0 | 1,108 | 0 | 14 | 288 | 279 | 146 | 133 | 14 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Kubernetes.V1.TCPSocketAction where
import GHC.Generics
import Kubernetes.Utils
import qualified Data.Aeson
-- | TCPSocketAction describes an action based on opening a socket
data TCPSocketAction = TCPSocketAction
{ port :: IntOrText -- ^ Number or name of the port to access on the container. Number must be in the range 1 to 65535. Name must be an IANA_SVC_NAME.
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON TCPSocketAction
instance Data.Aeson.ToJSON TCPSocketAction
| minhdoboi/deprecated-openshift-haskell-api | kubernetes/lib/Kubernetes/V1/TCPSocketAction.hs | apache-2.0 | 672 | 0 | 8 | 100 | 80 | 49 | 31 | 14 | 0 |
module SqrtMultiples.A262036 (a262036) where
import Data.List (find)
import Data.Maybe (fromJust)
import HelperSequences.A005117 (a005117)
import SqrtMultiples.A261865 (a261865)
a262036 :: Integral a => a -> a
a262036 n = fromJust $ find (\k -> a261865 k == a005117 n) [1..]
| peterokagey/haskellOEIS | src/SqrtMultiples/A262036.hs | apache-2.0 | 276 | 0 | 10 | 39 | 105 | 58 | 47 | 7 | 1 |
{-# LANGUAGE UndecidableInstances #-}
{-# Language FlexibleInstances #-}
{-# Language MultiParamTypeClasses #-}
-- Search for -fallow-undecidable-instances to see why this is needed
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.Writer.Lazy
-- Copyright : (c) Andy Gill 2001,
-- (c) Oregon Graduate Institute of Science and Technology, 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (multi-param classes, functional dependencies)
--
-- Lazy writer monads.
--
-- Inspired by the paper
-- /Functional Programming with Overloading and
-- Higher-Order Polymorphism/,
-- Mark P Jones (<http://web.cecs.pdx.edu/~mpj/pubs/springschool.html>)
-- Advanced School of Functional Programming, 1995.
-----------------------------------------------------------------------------
module Control.Monad.Writer.Lazy (
module Control.Monad.Writer.Class,
Writer(..),
execWriter,
mapWriter,
WriterT(..),
execWriterT,
mapWriterT,
module Control.Monad,
module Control.Monad.Trans,
module Data.Monoid,
) where
import Control.Monad
import Control.Monad.Cont.Class
import Control.Monad.Error.Class
import Control.Monad.Reader.Class
import Control.Monad.State.Class
import Control.Monad.Trans
import Control.Monad.Writer.Class
import Data.Monoid
-- ---------------------------------------------------------------------------
-- Our parameterizable writer monad
newtype Writer w a = Writer { runWriter :: (a, w) }
execWriter :: Writer w a -> w
execWriter m = snd (runWriter m)
mapWriter :: ((a, w) -> (b, w')) -> Writer w a -> Writer w' b
mapWriter f m = Writer $ f (runWriter m)
instance Functor (Writer w) where
fmap f m = Writer $ let (a, w) = runWriter m in (f a, w)
instance (Monoid w) => Monad (Writer w) where
return a = Writer (a, mempty)
m >>= k = Writer $ let
(a, w) = runWriter m
(b, w') = runWriter (k a)
in (b, w `mappend` w')
instance (Monoid w) => MonadWriter w (Writer w) where
tell w = Writer ((), w)
listen m = Writer $ let (a, w) = runWriter m in ((a, w), w)
pass m = Writer $ let ((a, f), w) = runWriter m in (a, f w)
-- ---------------------------------------------------------------------------
-- Our parameterizable writer monad, with an inner monad
newtype WriterT w m a = WriterT { runWriterT :: m (a, w) }
execWriterT :: Monad m => WriterT w m a -> m w
execWriterT m = do
~(_, w) <- runWriterT m
return w
mapWriterT :: (m (a, w) -> n (b, w')) -> WriterT w m a -> WriterT w' n b
mapWriterT f m = WriterT $ f (runWriterT m)
instance (Monad m) => Functor (WriterT w m) where
fmap f m = WriterT $ do
~(a, w) <- runWriterT m
return (f a, w)
instance (Monoid w, Monad m) => Monad (WriterT w m) where
return a = WriterT $ return (a, mempty)
m >>= k = WriterT $ do
~(a, w) <- runWriterT m
~(b, w') <- runWriterT (k a)
return (b, w `mappend` w')
fail msg = WriterT $ fail msg
instance (Monoid w, MonadPlus m) => MonadPlus (WriterT w m) where
mzero = WriterT mzero
m `mplus` n = WriterT $ runWriterT m `mplus` runWriterT n
instance (Monoid w, Monad m) => MonadWriter w (WriterT w m) where
tell w = WriterT $ return ((), w)
listen m = WriterT $ do
~(a, w) <- runWriterT m
return ((a, w), w)
pass m = WriterT $ do
~((a, f), w) <- runWriterT m
return (a, f w)
-- ---------------------------------------------------------------------------
-- Instances for other mtl transformers
instance (Monoid w) => MonadTrans (WriterT w) where
mt = MT
lift m = WriterT $ do
a <- m
return (a, mempty)
unlift f = WriterT $ (f (\tmx -> runWriterT tmx >>= return . (\(x,w) -> WriterF x w))) >>= return . (\(WriterF x w) -> (x,w))
data WriterF w a = WriterF a w
instance Functor (WriterF w) where
fmap f (WriterF a w) = WriterF (f a) w
instance (Monoid w, MonadIO m) => MonadIO (WriterT w m) where
liftIO = lift . liftIO
instance (Monoid w, MonadCont m) => MonadCont (WriterT w m) where
callCC f = WriterT $
callCC $ \c ->
runWriterT (f (\a -> WriterT $ c (a, mempty)))
instance (Monoid w, MonadError e m) => MonadError e (WriterT w m) where
throwError = lift . throwError
m `catchError` h = WriterT $ runWriterT m
`catchError` \e -> runWriterT (h e)
-- This instance needs -fallow-undecidable-instances, because
-- it does not satisfy the coverage condition
instance (Monoid w, MonadReader r m) => MonadReader r (WriterT w m) where
ask = lift ask
local f m = WriterT $ local f (runWriterT m)
-- Needs -fallow-undecidable-instances
instance (Monoid w, MonadState s m) => MonadState s (WriterT w m) where
get = lift get
put = lift . put
| ifigueroap/mzv | src/Control/Monad/Writer/Lazy.hs | bsd-3-clause | 5,007 | 0 | 16 | 1,172 | 1,684 | 908 | 776 | 93 | 1 |
{-# LANGUAGE PackageImports #-}
module GHC.IOArray (module M) where
import "base" GHC.IOArray as M
| silkapp/base-noprelude | src/GHC/IOArray.hs | bsd-3-clause | 104 | 0 | 4 | 18 | 21 | 15 | 6 | 3 | 0 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE FlexibleContexts #-}
module Feldspar.Algorithm.FFT.Push
( fft
, ifft
, fftCore
, butterfly
, twid
, twids
) where
import qualified Prelude as P
import qualified Data.Complex
import Feldspar
import Feldspar.Mutable
import Feldspar.Vector
import Feldspar.Algorithm.FFT.Utils
import Feldspar.Algorithm.FFT.Twids
import Test.QuickCheck
-- | Utilities that should go into Feldspar.Vector
chnk :: (Pushy arr1 DIM1, Syntax b)
=> Data Length -- ^ Number of chunks
-> Data Length -- ^ Size of the chunks
-> (Pull DIM1 a -> arr1 DIM1 b) -- ^ Applied to every chunk
-> Pull DIM1 a
-> Push DIM1 b
chnk r c f v = Push loop $ extent v
where loop func = forM r $ \i ->
do let (Push k _) = toPush $ f (take c (drop (c*i) v))
k (\(Z:.j) a -> func (Z:.(c*i + j)) a)
unhalve :: (Syntax a)
=> Pull DIM1 (a,a) -> Push DIM1 a
unhalve xs = unpairWith id (\(Z:.i) -> Z:.(i + length xs)) xs
stride :: Data Length -> Data Length
-> (Data Index -> a -> M b)
-> Data Index -> (a,a) -> M b
stride n k f ix (a1,a2) = f (n*ix) a1 >> f (n*ix+k) a2
-- | DFT2 for Decimation-In-Frequency
dft2 :: Num a => a -> (a, a) -> (a,a)
dft2 w (x0,x1) = (x0+x1, (x0-x1)*w)
butterfly :: (Syntax a, Num a)
=> (a -> (a,a) -> (a,a))
-> Pull DIM1 a -> Pull DIM1 a -> Push DIM1 a
butterfly f ws = unhalve . zipWith f ws . uncurry zip . halve
-- | Cooley-Tukey Radix-2 Decimation In Frequency Fast Fourier Transfrom
fftCore :: (Syntax a, Num a)
=> Pull DIM1 a -> Pull DIM1 a -> Pull DIM1 a
fftCore ws vs = forLoop (ilog2 len) vs stage
where
len = length vs
stage s = withLen len
$ toPull
. store
. chnk (1 .<<. s) (len .>>. s) (butterfly dft2 (ixmap (.<<. s) ws))
fft vs = fftCore (twids (length vs)) vs
ifft vs = fftCore (itwids (length vs)) vs
| emwap/feldspar-language | src/Feldspar/Algorithm/FFT/Push.hs | bsd-3-clause | 1,954 | 0 | 21 | 567 | 842 | 442 | 400 | 50 | 1 |
-------------------------------------------------------------------------------
-- |
-- Module : Example
-- Copyright : (C) BaseX Team 2005-22
-- License : BSD
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- This example shows how database commands can be executed.
--
-- Documentation: https://docs.basex.org/wiki/Clients
--
-------------------------------------------------------------------------------
module Example where
import BaseXClient
import Network ( withSocketsDo )
import Data.Time.Clock ( getCurrentTime, diffUTCTime )
import Control.Applicative ( (<$>), (<*>), pure )
query :: String
query = "xquery 1 to 10"
main :: IO ()
main = withSocketsDo $ do
-- start time
start <- getCurrentTime
-- connect to the server
(Just session) <- connect "localhost" 1984 "admin" "admin"
-- execute and print the query
execute session query >>= putStrLn . either id content
-- close the session
close session
-- print time difference
(diffUTCTime <$> getCurrentTime <*> pure start) >>= print
| BaseXdb/basex | basex-api/src/main/haskell/Example.hs | bsd-3-clause | 1,139 | 0 | 11 | 241 | 175 | 103 | 72 | 14 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-|
This is where all the routes and handlers are defined for your site. The
'site' function combines everything together and is exported by this module.
-}
module Site
( site
) where
import Control.Applicative
import Control.Monad ((<=<))
import Control.Monad.Trans(liftIO)
import Data.Maybe
import Data.Time.Clock(getCurrentTime)
import Data.ByteString (ByteString)
import Data.ByteString.Char8 (pack, unpack)
import Snap.Extension.Timer
import Snap.Extension.HDBC
import Snap.Util.FileServe
import Snap.Types
import Application
import Database
import Views
import Types
import Paste
------------------------------------------------------------------------------
-- | Render a list of all pastes
pastesListH :: Application ()
pastesListH = withDb getAllPastes >>= (renderView . pastesListV)
------------------------------------------------------------------------------
-- | Renders a form to add a new paste.
pasteFormH :: Application ()
pasteFormH = renderView $ pasteFormV [] nullPaste
------------------------------------------------------------------------------
-- | Saves a new paste to the database or redisplays the form with a list of
-- errors.
addPasteH :: Application ()
addPasteH = do
title <- decodeParam "title"
contents <- decodeParam "contents"
syntax <- decodeParam "syntax"
let paste = nullPaste { pasteTitle = title
, pasteContents = contents
, pasteSyntax = syntax }
let errors = validatePaste paste
if not (null errors)
then renderView $ pasteFormV errors paste
else do
uid <- withDb $ flip savePasteToDb paste
redirect $ pack ("/paste/" ++ show uid)
------------------------------------------------------------------------------
-- | Display a single paste.
pasteH :: Application ()
pasteH = getParam "id" >>= maybe pass readPid >>= maybe pass pasteFromId >>= maybe pass renderPasteV
where
readPid pid = case reads (unpack pid) of
[(pid', "")] -> return $ Just pid'
_ -> return Nothing
renderPasteV = renderView . pasteV
pasteFromId pid = withDb $ flip getPasteFromDb pid
------------------------------------------------------------------------------
-- | Display a list of all languages used by the pastes
languagesListH :: Application ()
languagesListH = withDb getAllUsedLanguages >>= renderView . languagesListV
------------------------------------------------------------------------------
-- | Display a list of all pastes for the given language.
languageH :: Application ()
languageH = do
lang <- decodeParam "lang"
pastes <- withDb $ flip getPastesForLang lang
renderView $ languageV lang pastes
------------------------------------------------------------------------------
-- | Renders a BlazeHtml template and writes it to the response stream.
renderView :: Template -> Application ()
renderView template = do
start_time <- startTime
current_time <- liftIO getCurrentTime
modifyResponse $ addHeader "Content-Type" "text/html; charset=UTF-8"
writeLBS $ renderHtml $ template start_time current_time
------------------------------------------------------------------------------
-- | Return the value of the given parameter or an empty string if the
-- parameter doesn't exist.
decodeParam :: ByteString -> Application String
decodeParam = return . unpack . fromMaybe "" <=< getParam
------------------------------------------------------------------------------
-- | The main entry point handler.
site :: Application ()
site = withDb createTableIfMissing >>
route [ ("/", method GET $ ifTop pastesListH)
, ("/new", method GET pasteFormH)
, ("/new", method POST addPasteH)
, ("/paste/:id", method GET pasteH)
, ("/languages", method GET languagesListH)
, ("/language/:lang", method GET languageH)
]
<|> serveDirectory "resources/static"
| benarmston/hpastie | src/Site.hs | bsd-3-clause | 4,238 | 0 | 14 | 989 | 767 | 405 | 362 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE FlexibleContexts #-}
module IO
( readShot
, loadVisor
, screenShotSource
, dir
, saveVisor
, saveVisorContinuous
, saveKernels
, loadMany
, ioC
, pathSource
, pmap
, saveMany
, deleteVisor
, batchify
, trainC
, trainBatchC
, datasetSampleSource
, clear
, shuffleC
) where
import Types
import Lib
import Visor
import Vector
import Util
import Network
import Layers.Convolution
import qualified Static.Image as I
import Conduit
import Control.Monad
import System.FilePath
import System.Posix.Files
import System.Directory
import System.Random.Shuffle
import System.Process
import Data.Array.Repa.IO.BMP
import Data.Singletons.TypeLits
import Data.Singletons.Prelude.List
import Data.Proxy
import qualified Data.ByteString as BS
import Data.Serialize
import Numeric
readShot :: Path a -> IO (Screenshot a)
readShot (Path fp) = do ebmp <- I.readRaw fp
case ebmp of
Left err -> error$ err ++ " " ++ fp
Right bmp -> return (Screenshot bmp)
screenShotSource :: Int -> Int -> Int -> Int -> RTSource (Screenshot a)
screenShotSource x y w h = forever$ liftIO takeshot >>= yield
where
cmd = "screencapture -xm -R" ++ show x ++ ',':show y ++ ',':show w ++ ',':show h ++ " -t bmp out.bmp"
takeshot = do _ <- system cmd
Right img <- readImageFromBMP "out.bmp"
removeFile "out.bmp"
return (Screenshot img)
pathSource :: forall a. GameState a => RTSource (Path a)
pathSource = sourceDirectoryDeep True (unpath (rootDir :: Path a))
.| filterC ((== ".bmp") . takeExtension)
.| mapC Path
ioC :: (a -> IO ()) -> RTConduit a a
ioC f = awaitForever$ \x -> do liftIO$ f x
yield x
datasetSampleSource :: GameState a => Bool -> RTSource (Screenshot a, LabelVec a)
datasetSampleSource shuf = pathSource
.| (if shuf then shuffleC else awaitForever yield)
.| loadC
loadC :: GameState a => RTConduit (Path a) (Screenshot a, LabelVec a)
loadC = awaitForever$ \path -> do shot <- liftIO$ readShot path
yield (shot, parse $ pmap takeFileName path)
-- | Drain a source of its elements, and yield them in a random order
shuffleC :: RTConduit a a
shuffleC = do ls <- sinkList
ls' <- liftIO$ shuffleM ls
yieldMany ls'
loadVisor :: forall a.
( Creatable (Visor a)
, GameState a
) => IO (Visor a)
loadVisor = do createDirectoryIfMissing True "data"
exists <- fileExist path
visor <- if exists then readVisor else newVisor
return visor
where
name = symbolVal (Proxy :: Proxy (Title a))
path = dir </> name
readVisor :: IO (Visor a)
readVisor = do bs <- BS.readFile path
case decode bs of
Left err -> error err
Right v -> return v
newVisor :: IO (Visor a)
newVisor = do putStrLn$ "Initializing new visor at " ++ path
return$ seeded 9
saveVisor :: forall a.
( Serialize (Visor a)
, GameState a
) => Visor a -> IO ()
saveVisor v = do exists <- fileExist path
flag <- if exists then do putStrLn$ "Visor found at " ++ path ++ ", delete?[Yn] "
a <- getLine
return$ a `notElem` ["n", "N"]
else return True
when flag $ BS.writeFile path (encode v)
where
name = symbolVal (Proxy :: Proxy (Title a))
path = dir </> name
saveVisorContinuous :: forall a.
( Serialize (Visor a)
, GameState a
) => RTSink (Visor a)
saveVisorContinuous = awaitForever $ liftIO . BS.writeFile path . encode
where
name = symbolVal (Proxy :: Proxy (Title a))
path = dir </> name
deleteVisor :: forall a p.
( GameState a
) => p a -> IO ()
deleteVisor _ = removeFile path
where
name = symbolVal (Proxy :: Proxy (Title a))
path = dir </> name
trainC :: ( GameState a
, WVector (Widgets a)
) => Visor a -> RTConduit (Screenshot a, LabelVec a) (Visor a)
trainC visor =
do ms <- await
case ms of
Nothing -> return ()
Just (x, y) ->
do (v', ((p,c),l)) <- trainImage visor x y
liftIO.putStrLn$ showString "Correct: "
. shows p
. showString "/"
. shows c
. showString "\tLoss: "
. showEFloat (Just 5) l $""
yield v'
trainC v'
trainBatchC :: ( Stack n (Widgets a)
) => Visor a -> RTConduit (BatchVec n a) (Visor a)
trainBatchC (Visor visor) = go visor [] []
where
go v ls ps =
do mb <- await
case mb of
Nothing -> return ()
Just b -> do (v', ((p, c),l)) <- trainBatch v b
let ls' = take 20 (l:ls)
ps' = take 20 (p:ps)
liftIO.putStrLn$ showEFloat (Just 2) (median' ls')
. showString " ("
. showEFloat (Just 2) (minimum ls')
. showString " .. "
. showEFloat (Just 2) (maximum ls')
. showString ")\t"
. shows (median ps')
. showString " ("
. shows (minimum ps')
. showString " .. "
. shows (maximum ps')
. showString ")\t("
. shows p
. showString "/"
. shows c
. showString ", "
. showEFloat (Just 4) l
$ ")"
yield (Visor v')
go v' ls' ps'
batchify :: forall n a. (KnownNat n, Stack n (Widgets a)) => BatchC n a
batchify = do xs <- takeC n .| extractC .| sinkList
case stack xs of
Just xs' -> yield xs' >> batchify
Nothing -> return ()
where
n = fromInteger$ natVal (Proxy :: Proxy n)
extractC = awaitForever$ \(shot, LabelVec ls) ->
do xs <- Visor.extract shot
yield (xs, ls)
saveMany :: Serialize a => String -> RTSink a
saveMany name = do liftIO$ createDirectoryIfMissing True dir'
go (0 :: Int)
where dir' = dir </> name
go i = do mx <- await
case mx of
Nothing -> return ()
Just x -> do let path = dir' </> show i
liftIO . putStrLn$ "Writing " ++ path
liftIO$ BS.writeFile path (encode x)
go (i+1)
loadMany :: Serialize a => String -> RTSource a
loadMany name = do sourceDirectory dir' .| awaitForever load
where
dir' = dir </> name
load path = do bs <- liftIO$ BS.readFile path
let ex = decode bs
case ex of
Left err -> liftIO.putStrLn$ err
Right x -> yield x
clear :: String -> IO ()
clear name = do createDirectoryIfMissing True (dir </> name)
removeDirectoryRecursive$ dir </> name
createDirectoryIfMissing True (dir </> name)
saveKernels :: (Head (NetConfig (Head (Widgets g))) ~ Convolution a 3 b c d e, GameState g)
=> Visor g -> IO ()
saveKernels (Visor v) = do clear "krns"
case v of
WNetwork (Convolution k _ _ `NCons` _) :- _ -> I.saveMany (dir</>"krns/") k
_ -> undefined
dir :: FilePath
dir = "data"
| jonascarpay/visor | src/IO.hs | bsd-3-clause | 8,266 | 1 | 34 | 3,374 | 2,605 | 1,290 | 1,315 | 207 | 3 |
{-# LANGUAGE QuasiQuotes #-}
{-@ LIQUID "--no-termination "@-}
{-@ LIQUID "--maxparams=3 "@-}
import LiquidHaskell
-- Source: Okasaki, JFP 1995
-- http://www.westpoint.edu/eecs/SiteAssets/SitePages/Faculty%20Publication%20Documents/Okasaki/jfp95queue.pdf
--------------------------------------------------------------------------------
-- | Sized Lists
--------------------------------------------------------------------------------
data SList a = SL { size :: Int
, elems :: [a]
}
[lq| type SListN a N = {v:SList a | size v = N} |]
-- | Invariant: `size` is really the size:
[lq| data SList a = SL { size :: Int
, elems :: {v:[a] | len v = size}
}
|]
-- | Size function actually returns the size: (Duh!)
[lq| size :: q:SList a -> {v:Nat | v = size q} |]
-- | Non-Empty Lists:
[lq| type NEList a = {v:SList a | size v > 0} |]
[lq| nil :: SListN a 0 |]
nil = SL 0 []
[lq| cons :: a -> xs:SList a -> SListN a {size xs + 1} |]
cons x (SL n xs) = SL (n+1) (x:xs)
[lq| tl :: xs:NEList a -> SListN a {size xs - 1} |]
tl (SL n (_:xs)) = SL (n-1) xs
tl _ = die "never"
[lq| hd :: xs:NEList a -> a |]
hd (SL _ (x:_)) = x
hd _ = die "never"
--------------------------------------------------------------------------------
-- | Sized Lists
--------------------------------------------------------------------------------
data Queue a = Q { left :: SList a
, right :: SList a
}
-- | Invariant: `|right|` <= `|left|`
[lq| data Queue a = Q { left :: SList a
, right :: {v:SList a | size v <= size left}
}
|]
emp = Q nil nil
qsize :: Queue a -> Int
qsize (Q l r) = size l + size r
insert e (Q l r) = makeq l (e `cons` r)
[lq| makeq :: l:_ -> r:{ _ | size r <= size l + 1} -> _ |]
makeq l r
| size r <= size l = Q l r
| otherwise = Q (rot l r nil) nil
[lq| rot :: l:_ -> r:SListN _ {1 + size l} -> a:_ -> {v:_ | size v = size l + size r + size a} |]
rot l r a
| size l == 0 = (hd r) `cons` a
| otherwise = (hd l) `cons` (rot (tl l) (tl r) ((hd r) `cons` a))
[lq| die :: {v:_ | false} -> a |]
die x = error x
| spinda/liquidhaskell | tests/gsoc15/unknown/pos/AmortizedQueue.hs | bsd-3-clause | 2,312 | 0 | 12 | 718 | 526 | 292 | 234 | 35 | 1 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
\section[WorkWrap]{Worker/wrapper-generating back-end of strictness analyser}
-}
{-# LANGUAGE CPP #-}
module WorkWrap ( wwTopBinds ) where
import GhcPrelude
import CoreArity ( manifestArity )
import CoreSyn
import CoreUnfold ( certainlyWillInline, mkWwInlineRule, mkWorkerUnfolding )
import CoreUtils ( exprType, exprIsHNF )
import CoreFVs ( exprFreeVars )
import Var
import Id
import IdInfo
import Type
import UniqSupply
import BasicTypes
import DynFlags
import Demand
import WwLib
import Util
import Outputable
import FamInstEnv
import MonadUtils
#include "HsVersions.h"
{-
We take Core bindings whose binders have:
\begin{enumerate}
\item Strictness attached (by the front-end of the strictness
analyser), and / or
\item Constructed Product Result information attached by the CPR
analysis pass.
\end{enumerate}
and we return some ``plain'' bindings which have been
worker/wrapper-ified, meaning:
\begin{enumerate}
\item Functions have been split into workers and wrappers where
appropriate. If a function has both strictness and CPR properties
then only one worker/wrapper doing both transformations is produced;
\item Binders' @IdInfos@ have been updated to reflect the existence of
these workers/wrappers (this is where we get STRICTNESS and CPR pragma
info for exported values).
\end{enumerate}
-}
wwTopBinds :: DynFlags -> FamInstEnvs -> UniqSupply -> CoreProgram -> CoreProgram
wwTopBinds dflags fam_envs us top_binds
= initUs_ us $ do
top_binds' <- mapM (wwBind dflags fam_envs) top_binds
return (concat top_binds')
{-
************************************************************************
* *
\subsection[wwBind-wwExpr]{@wwBind@ and @wwExpr@}
* *
************************************************************************
@wwBind@ works on a binding, trying each \tr{(binder, expr)} pair in
turn. Non-recursive case first, then recursive...
-}
wwBind :: DynFlags
-> FamInstEnvs
-> CoreBind
-> UniqSM [CoreBind] -- returns a WwBinding intermediate form;
-- the caller will convert to Expr/Binding,
-- as appropriate.
wwBind dflags fam_envs (NonRec binder rhs) = do
new_rhs <- wwExpr dflags fam_envs rhs
new_pairs <- tryWW dflags fam_envs NonRecursive binder new_rhs
return [NonRec b e | (b,e) <- new_pairs]
-- Generated bindings must be non-recursive
-- because the original binding was.
wwBind dflags fam_envs (Rec pairs)
= return . Rec <$> concatMapM do_one pairs
where
do_one (binder, rhs) = do new_rhs <- wwExpr dflags fam_envs rhs
tryWW dflags fam_envs Recursive binder new_rhs
{-
@wwExpr@ basically just walks the tree, looking for appropriate
annotations that can be used. Remember it is @wwBind@ that does the
matching by looking for strict arguments of the correct type.
@wwExpr@ is a version that just returns the ``Plain'' Tree.
-}
wwExpr :: DynFlags -> FamInstEnvs -> CoreExpr -> UniqSM CoreExpr
wwExpr _ _ e@(Type {}) = return e
wwExpr _ _ e@(Coercion {}) = return e
wwExpr _ _ e@(Lit {}) = return e
wwExpr _ _ e@(Var {}) = return e
wwExpr dflags fam_envs (Lam binder expr)
= Lam new_binder <$> wwExpr dflags fam_envs expr
where new_binder | isId binder = zapIdUsedOnceInfo binder
| otherwise = binder
-- See Note [Zapping Used Once info in WorkWrap]
wwExpr dflags fam_envs (App f a)
= App <$> wwExpr dflags fam_envs f <*> wwExpr dflags fam_envs a
wwExpr dflags fam_envs (Tick note expr)
= Tick note <$> wwExpr dflags fam_envs expr
wwExpr dflags fam_envs (Cast expr co) = do
new_expr <- wwExpr dflags fam_envs expr
return (Cast new_expr co)
wwExpr dflags fam_envs (Let bind expr)
= mkLets <$> wwBind dflags fam_envs bind <*> wwExpr dflags fam_envs expr
wwExpr dflags fam_envs (Case expr binder ty alts) = do
new_expr <- wwExpr dflags fam_envs expr
new_alts <- mapM ww_alt alts
let new_binder = zapIdUsedOnceInfo binder
-- See Note [Zapping Used Once info in WorkWrap]
return (Case new_expr new_binder ty new_alts)
where
ww_alt (con, binders, rhs) = do
new_rhs <- wwExpr dflags fam_envs rhs
let new_binders = [ if isId b then zapIdUsedOnceInfo b else b
| b <- binders ]
-- See Note [Zapping Used Once info in WorkWrap]
return (con, new_binders, new_rhs)
{-
************************************************************************
* *
\subsection[tryWW]{@tryWW@: attempt a worker/wrapper pair}
* *
************************************************************************
@tryWW@ just accumulates arguments, converts strictness info from the
front-end into the proper form, then calls @mkWwBodies@ to do
the business.
The only reason this is monadised is for the unique supply.
Note [Don't w/w INLINE things]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's very important to refrain from w/w-ing an INLINE function (ie one
with a stable unfolding) because the wrapper will then overwrite the
old stable unfolding with the wrapper code.
Furthermore, if the programmer has marked something as INLINE,
we may lose by w/w'ing it.
If the strictness analyser is run twice, this test also prevents
wrappers (which are INLINEd) from being re-done. (You can end up with
several liked-named Ids bouncing around at the same time---absolute
mischief.)
Notice that we refrain from w/w'ing an INLINE function even if it is
in a recursive group. It might not be the loop breaker. (We could
test for loop-breaker-hood, but I'm not sure that ever matters.)
Note [Worker-wrapper for INLINABLE functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have
{-# INLINABLE f #-}
f :: Ord a => [a] -> Int -> a
f x y = ....f....
where f is strict in y, we might get a more efficient loop by w/w'ing
f. But that would make a new unfolding which would overwrite the old
one! So the function would no longer be INLNABLE, and in particular
will not be specialised at call sites in other modules.
This comes in practice (#6056).
Solution: do the w/w for strictness analysis, but transfer the Stable
unfolding to the *worker*. So we will get something like this:
{-# INLINE[0] f #-}
f :: Ord a => [a] -> Int -> a
f d x y = case y of I# y' -> fw d x y'
{-# INLINABLE[0] fw #-}
fw :: Ord a => [a] -> Int# -> a
fw d x y' = let y = I# y' in ...f...
How do we "transfer the unfolding"? Easy: by using the old one, wrapped
in work_fn! See CoreUnfold.mkWorkerUnfolding.
Note [Worker-wrapper for NOINLINE functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We used to disable worker/wrapper for NOINLINE things, but it turns out
this can cause unnecessary reboxing of values. Consider
{-# NOINLINE f #-}
f :: Int -> a
f x = error (show x)
g :: Bool -> Bool -> Int -> Int
g True True p = f p
g False True p = p + 1
g b False p = g b True p
the strictness analysis will discover f and g are strict, but because f
has no wrapper, the worker for g will rebox p. So we get
$wg x y p# =
let p = I# p# in -- Yikes! Reboxing!
case x of
False ->
case y of
False -> $wg False True p#
True -> +# p# 1#
True ->
case y of
False -> $wg True True p#
True -> case f p of { }
g x y p = case p of (I# p#) -> $wg x y p#
Now, in this case the reboxing will float into the True branch, and so
the allocation will only happen on the error path. But it won't float
inwards if there are multiple branches that call (f p), so the reboxing
will happen on every call of g. Disaster.
Solution: do worker/wrapper even on NOINLINE things; but move the
NOINLINE pragma to the worker.
(See #13143 for a real-world example.)
It is crucial that we do this for *all* NOINLINE functions. #10069
demonstrates what happens when we promise to w/w a (NOINLINE) leaf function, but
fail to deliver:
data C = C Int# Int#
{-# NOINLINE c1 #-}
c1 :: C -> Int#
c1 (C _ n) = n
{-# NOINLINE fc #-}
fc :: C -> Int#
fc c = 2 *# c1 c
Failing to w/w `c1`, but still w/wing `fc` leads to the following code:
c1 :: C -> Int#
c1 (C _ n) = n
$wfc :: Int# -> Int#
$wfc n = let c = C 0# n in 2 #* c1 c
fc :: C -> Int#
fc (C _ n) = $wfc n
Yikes! The reboxed `C` in `$wfc` can't cancel out, so we are in a bad place.
This generalises to any function that derives its strictness signature from
its callees, so we have to make sure that when a function announces particular
strictness properties, we have to w/w them accordingly, even if it means
splitting a NOINLINE function.
Note [Worker activation]
~~~~~~~~~~~~~~~~~~~~~~~~
Follows on from Note [Worker-wrapper for INLINABLE functions]
It is *vital* that if the worker gets an INLINABLE pragma (from the
original function), then the worker has the same phase activation as
the wrapper (or later). That is necessary to allow the wrapper to
inline into the worker's unfolding: see SimplUtils
Note [Simplifying inside stable unfoldings].
If the original is NOINLINE, it's important that the work inherit the
original activation. Consider
{-# NOINLINE expensive #-}
expensive x = x + 1
f y = let z = expensive y in ...
If expensive's worker inherits the wrapper's activation,
we'll get this (because of the compromise in point (2) of
Note [Wrapper activation])
{-# NOINLINE[0] $wexpensive #-}
$wexpensive x = x + 1
{-# INLINE[0] expensive #-}
expensive x = $wexpensive x
f y = let z = expensive y in ...
and $wexpensive will be immediately inlined into expensive, followed by
expensive into f. This effectively removes the original NOINLINE!
Otherwise, nothing is lost by giving the worker the same activation as the
wrapper, because the worker won't have any chance of inlining until the
wrapper does; there's no point in giving it an earlier activation.
Note [Don't w/w inline small non-loop-breaker things]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general, we refrain from w/w-ing *small* functions, which are not
loop breakers, because they'll inline anyway. But we must take care:
it may look small now, but get to be big later after other inlining
has happened. So we take the precaution of adding an INLINE pragma to
any such functions.
I made this change when I observed a big function at the end of
compilation with a useful strictness signature but no w-w. (It was
small during demand analysis, we refrained from w/w, and then got big
when something was inlined in its rhs.) When I measured it on nofib,
it didn't make much difference; just a few percent improved allocation
on one benchmark (bspt/Euclid.space). But nothing got worse.
There is an infelicity though. We may get something like
f = g val
==>
g x = case gw x of r -> I# r
f {- InlineStable, Template = g val -}
f = case gw x of r -> I# r
The code for f duplicates that for g, without any real benefit. It
won't really be executed, because calls to f will go via the inlining.
Note [Don't CPR join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There's no point in doing CPR on a join point. If the whole function is getting
CPR'd, then the case expression around the worker function will get pushed into
the join point by the simplifier, which will have the same effect that CPR would
have - the result will be returned in an unboxed tuple.
f z = let join j x y = (x+1, y+1)
in case z of A -> j 1 2
B -> j 2 3
=>
f z = case $wf z of (# a, b #) -> (a, b)
$wf z = case (let join j x y = (x+1, y+1)
in case z of A -> j 1 2
B -> j 2 3) of (a, b) -> (# a, b #)
=>
f z = case $wf z of (# a, b #) -> (a, b)
$wf z = let join j x y = (# x+1, y+1 #)
in case z of A -> j 1 2
B -> j 2 3
Doing CPR on a join point would be tricky anyway, as the worker could not be
a join point because it would not be tail-called. However, doing the *argument*
part of W/W still works for join points, since the wrapper body will make a tail
call:
f z = let join j x y = x + y
in ...
=>
f z = let join $wj x# y# = x# +# y#
j x y = case x of I# x# ->
case y of I# y# ->
$wj x# y#
in ...
Note [Wrapper activation]
~~~~~~~~~~~~~~~~~~~~~~~~~
When should the wrapper inlining be active?
1. It must not be active earlier than the current Activation of the
Id
2. It should be active at some point, despite (1) because of
Note [Worker-wrapper for NOINLINE functions]
3. For ordinary functions with no pragmas we want to inline the
wrapper as early as possible (#15056). Suppose another module
defines f x = g x x
and suppose there is some RULE for (g True True). Then if we have
a call (f True), we'd expect to inline 'f' and the RULE will fire.
But if f is w/w'd (which it might be), we want the inlining to
occur just as if it hadn't been.
(This only matters if f's RHS is big enough to w/w, but small
enough to inline given the call site, but that can happen.)
4. We do not want to inline the wrapper before specialisation.
module Foo where
f :: Num a => a -> Int -> a
f n 0 = n -- Strict in the Int, hence wrapper
f n x = f (n+n) (x-1)
g :: Int -> Int
g x = f x x -- Provokes a specialisation for f
module Bar where
import Foo
h :: Int -> Int
h x = f 3 x
In module Bar we want to give specialisations a chance to fire
before inlining f's wrapper.
Reminder: Note [Don't w/w INLINE things], so we don't need to worry
about INLINE things here.
Conclusion:
- If the user said NOINLINE[n], respect that
- If the user said NOINLINE, inline the wrapper as late as
poss (phase 0). This is a compromise driven by (2) above
- Otherwise inline wrapper in phase 2. That allows the
'gentle' simplification pass to apply specialisation rules
Historical note: At one stage I tried making the wrapper inlining
always-active, and that had a very bad effect on nofib/imaginary/x2n1;
a wrapper was inlined before the specialisation fired.
Note [Wrapper NoUserInline]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
The use an inl_inline of NoUserInline on the wrapper distinguishes
this pragma from one that was given by the user. In particular, CSE
will not happen if there is a user-specified pragma, but should happen
for w/w’ed things (#14186).
-}
tryWW :: DynFlags
-> FamInstEnvs
-> RecFlag
-> Id -- The fn binder
-> CoreExpr -- The bound rhs; its innards
-- are already ww'd
-> UniqSM [(Id, CoreExpr)] -- either *one* or *two* pairs;
-- if one, then no worker (only
-- the orig "wrapper" lives on);
-- if two, then a worker and a
-- wrapper.
tryWW dflags fam_envs is_rec fn_id rhs
-- See Note [Worker-wrapper for NOINLINE functions]
| Just stable_unf <- certainlyWillInline dflags fn_info
= return [ (fn_id `setIdUnfolding` stable_unf, rhs) ]
-- See Note [Don't w/w INLINE things]
-- See Note [Don't w/w inline small non-loop-breaker things]
| is_fun && is_eta_exp
= splitFun dflags fam_envs new_fn_id fn_info wrap_dmds res_info rhs
| is_thunk -- See Note [Thunk splitting]
= splitThunk dflags fam_envs is_rec new_fn_id rhs
| otherwise
= return [ (new_fn_id, rhs) ]
where
fn_info = idInfo fn_id
(wrap_dmds, res_info) = splitStrictSig (strictnessInfo fn_info)
new_fn_id = zapIdUsedOnceInfo (zapIdUsageEnvInfo fn_id)
-- See Note [Zapping DmdEnv after Demand Analyzer] and
-- See Note [Zapping Used Once info in WorkWrap]
is_fun = notNull wrap_dmds || isJoinId fn_id
-- See Note [Don't eta expand in w/w]
is_eta_exp = length wrap_dmds == manifestArity rhs
is_thunk = not is_fun && not (exprIsHNF rhs) && not (isJoinId fn_id)
&& not (isUnliftedType (idType fn_id))
{-
Note [Zapping DmdEnv after Demand Analyzer]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In the worker-wrapper pass we zap the DmdEnv. Why?
(a) it is never used again
(b) it wastes space
(c) it becomes incorrect as things are cloned, because
we don't push the substitution into it
Why here?
* Because we don’t want to do it in the Demand Analyzer, as we never know
there when we are doing the last pass.
* We want them to be still there at the end of DmdAnal, so that
-ddump-str-anal contains them.
* We don’t want a second pass just for that.
* WorkWrap looks at all bindings anyway.
We also need to do it in TidyCore.tidyLetBndr to clean up after the
final, worker/wrapper-less run of the demand analyser (see
Note [Final Demand Analyser run] in DmdAnal).
Note [Zapping Used Once info in WorkWrap]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In the worker-wrapper pass we zap the used once info in demands and in
strictness signatures.
Why?
* The simplifier may happen to transform code in a way that invalidates the
data (see #11731 for an example).
* It is not used in later passes, up to code generation.
So as the data is useless and possibly wrong, we want to remove it. The most
convenient place to do that is the worker wrapper phase, as it runs after every
run of the demand analyser besides the very last one (which is the one where we
want to _keep_ the info for the code generator).
We do not do it in the demand analyser for the same reasons outlined in
Note [Zapping DmdEnv after Demand Analyzer] above.
Note [Don't eta expand in w/w]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A binding where the manifestArity of the RHS is less than idArity of the binder
means CoreArity didn't eta expand that binding. When this happens, it does so
for a reason (see Note [exprArity invariant] in CoreArity) and we probably have
a PAP, cast or trivial expression as RHS.
Performing the worker/wrapper split will implicitly eta-expand the binding to
idArity, overriding CoreArity's decision. Other than playing fast and loose with
divergence, it's also broken for newtypes:
f = (\xy.blah) |> co
where
co :: (Int -> Int -> Char) ~ T
Then idArity is 2 (despite the type T), and it can have a StrictSig based on a
threshold of 2. But we can't w/w it without a type error.
The situation is less grave for PAPs, but the implicit eta expansion caused a
compiler allocation regression in T15164, where huge recursive instance method
groups, mostly consisting of PAPs, got w/w'd. This caused great churn in the
simplifier, when simply waiting for the PAPs to inline arrived at the same
output program.
Note there is the worry here that such PAPs and trivial RHSs might not *always*
be inlined. That would lead to reboxing, because the analysis tacitly assumes
that we W/W'd for idArity and will propagate analysis information under that
assumption. So far, this doesn't seem to matter in practice.
See https://gitlab.haskell.org/ghc/ghc/merge_requests/312#note_192064.
-}
---------------------
splitFun :: DynFlags -> FamInstEnvs -> Id -> IdInfo -> [Demand] -> DmdResult -> CoreExpr
-> UniqSM [(Id, CoreExpr)]
splitFun dflags fam_envs fn_id fn_info wrap_dmds res_info rhs
= WARN( not (wrap_dmds `lengthIs` arity), ppr fn_id <+> (ppr arity $$ ppr wrap_dmds $$ ppr res_info) ) do
-- The arity should match the signature
stuff <- mkWwBodies dflags fam_envs rhs_fvs fn_id wrap_dmds use_res_info
case stuff of
Just (work_demands, join_arity, wrap_fn, work_fn) -> do
work_uniq <- getUniqueM
let work_rhs = work_fn rhs
work_act = case fn_inline_spec of -- See Note [Worker activation]
NoInline -> fn_act
_ -> wrap_act
work_prag = InlinePragma { inl_src = SourceText "{-# INLINE"
, inl_inline = fn_inline_spec
, inl_sat = Nothing
, inl_act = work_act
, inl_rule = FunLike }
-- inl_inline: copy from fn_id; see Note [Worker-wrapper for INLINABLE functions]
-- inl_act: see Note [Worker activation]
-- inl_rule: it does not make sense for workers to be constructorlike.
work_join_arity | isJoinId fn_id = Just join_arity
| otherwise = Nothing
-- worker is join point iff wrapper is join point
-- (see Note [Don't CPR join points])
work_id = mkWorkerId work_uniq fn_id (exprType work_rhs)
`setIdOccInfo` occInfo fn_info
-- Copy over occurrence info from parent
-- Notably whether it's a loop breaker
-- Doesn't matter much, since we will simplify next, but
-- seems right-er to do so
`setInlinePragma` work_prag
`setIdUnfolding` mkWorkerUnfolding dflags work_fn fn_unfolding
-- See Note [Worker-wrapper for INLINABLE functions]
`setIdStrictness` mkClosedStrictSig work_demands work_res_info
-- Even though we may not be at top level,
-- it's ok to give it an empty DmdEnv
`setIdDemandInfo` worker_demand
`setIdArity` work_arity
-- Set the arity so that the Core Lint check that the
-- arity is consistent with the demand type goes
-- through
`asJoinId_maybe` work_join_arity
work_arity = length work_demands
-- See Note [Demand on the Worker]
single_call = saturatedByOneShots arity (demandInfo fn_info)
worker_demand | single_call = mkWorkerDemand work_arity
| otherwise = topDmd
wrap_rhs = wrap_fn work_id
wrap_act = case fn_act of -- See Note [Wrapper activation]
ActiveAfter {} -> fn_act
NeverActive -> activeDuringFinal
_ -> activeAfterInitial
wrap_prag = InlinePragma { inl_src = SourceText "{-# INLINE"
, inl_inline = NoUserInline
, inl_sat = Nothing
, inl_act = wrap_act
, inl_rule = rule_match_info }
-- inl_act: see Note [Wrapper activation]
-- inl_inline: see Note [Wrapper NoUserInline]
-- inl_rule: RuleMatchInfo is (and must be) unaffected
wrap_id = fn_id `setIdUnfolding` mkWwInlineRule dflags wrap_rhs arity
`setInlinePragma` wrap_prag
`setIdOccInfo` noOccInfo
-- Zap any loop-breaker-ness, to avoid bleating from Lint
-- about a loop breaker with an INLINE rule
return $ [(work_id, work_rhs), (wrap_id, wrap_rhs)]
-- Worker first, because wrapper mentions it
Nothing -> return [(fn_id, rhs)]
where
rhs_fvs = exprFreeVars rhs
fn_inl_prag = inlinePragInfo fn_info
fn_inline_spec = inl_inline fn_inl_prag
fn_act = inl_act fn_inl_prag
rule_match_info = inlinePragmaRuleMatchInfo fn_inl_prag
fn_unfolding = unfoldingInfo fn_info
arity = arityInfo fn_info
-- The arity is set by the simplifier using exprEtaExpandArity
-- So it may be more than the number of top-level-visible lambdas
use_res_info | isJoinId fn_id = topRes -- Note [Don't CPR join points]
| otherwise = res_info
work_res_info | isJoinId fn_id = res_info -- Worker remains CPR-able
| otherwise
= case returnsCPR_maybe res_info of
Just _ -> topRes -- Cpr stuff done by wrapper; kill it here
Nothing -> res_info -- Preserve exception/divergence
{-
Note [Demand on the worker]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the original function is called once, according to its demand info, then
so is the worker. This is important so that the occurrence analyser can
attach OneShot annotations to the worker’s lambda binders.
Example:
-- Original function
f [Demand=<L,1*C1(U)>] :: (a,a) -> a
f = \p -> ...
-- Wrapper
f [Demand=<L,1*C1(U)>] :: a -> a -> a
f = \p -> case p of (a,b) -> $wf a b
-- Worker
$wf [Demand=<L,1*C1(C1(U))>] :: Int -> Int
$wf = \a b -> ...
We need to check whether the original function is called once, with
sufficiently many arguments. This is done using saturatedByOneShots, which
takes the arity of the original function (resp. the wrapper) and the demand on
the original function.
The demand on the worker is then calculated using mkWorkerDemand, and always of
the form [Demand=<L,1*(C1(...(C1(U))))>]
Note [Do not split void functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this rather common form of binding:
$j = \x:Void# -> ...no use of x...
Since x is not used it'll be marked as absent. But there is no point
in w/w-ing because we'll simply add (\y:Void#), see WwLib.mkWorerArgs.
If x has a more interesting type (eg Int, or Int#), there *is* a point
in w/w so that we don't pass the argument at all.
Note [Thunk splitting]
~~~~~~~~~~~~~~~~~~~~~~
Suppose x is used strictly (never mind whether it has the CPR
property).
let
x* = x-rhs
in body
splitThunk transforms like this:
let
x* = case x-rhs of { I# a -> I# a }
in body
Now simplifier will transform to
case x-rhs of
I# a -> let x* = I# a
in body
which is what we want. Now suppose x-rhs is itself a case:
x-rhs = case e of { T -> I# a; F -> I# b }
The join point will abstract over a, rather than over (which is
what would have happened before) which is fine.
Notice that x certainly has the CPR property now!
In fact, splitThunk uses the function argument w/w splitting
function, so that if x's demand is deeper (say U(U(L,L),L))
then the splitting will go deeper too.
-}
-- See Note [Thunk splitting]
-- splitThunk converts the *non-recursive* binding
-- x = e
-- into
-- x = let x = e
-- in case x of
-- I# y -> let x = I# y in x }
-- See comments above. Is it not beautifully short?
-- Moreover, it works just as well when there are
-- several binders, and if the binders are lifted
-- E.g. x = e
-- --> x = let x = e in
-- case x of (a,b) -> let x = (a,b) in x
splitThunk :: DynFlags -> FamInstEnvs -> RecFlag -> Var -> Expr Var -> UniqSM [(Var, Expr Var)]
splitThunk dflags fam_envs is_rec fn_id rhs
= ASSERT(not (isJoinId fn_id))
do { (useful,_, wrap_fn, work_fn) <- mkWWstr dflags fam_envs False [fn_id]
; let res = [ (fn_id, Let (NonRec fn_id rhs) (wrap_fn (work_fn (Var fn_id)))) ]
; if useful then ASSERT2( isNonRec is_rec, ppr fn_id ) -- The thunk must be non-recursive
return res
else return [(fn_id, rhs)] }
| sdiehl/ghc | compiler/stranal/WorkWrap.hs | bsd-3-clause | 28,177 | 0 | 25 | 8,003 | 2,095 | 1,106 | 989 | -1 | -1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
TcSplice: Template Haskell splices
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE InstanceSigs #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module TcSplice(
-- These functions are defined in stage1 and stage2
-- The raise civilised errors in stage1
tcSpliceExpr, tcTypedBracket, tcUntypedBracket,
-- runQuasiQuoteExpr, runQuasiQuotePat,
-- runQuasiQuoteDecl, runQuasiQuoteType,
runAnnotation,
#ifdef GHCI
-- These ones are defined only in stage2, and are
-- called only in stage2 (ie GHCI is on)
runMetaE, runMetaP, runMetaT, runMetaD, runQuasi,
tcTopSpliceExpr, lookupThName_maybe,
defaultRunMeta, runMeta'
#endif
) where
#include "HsVersions.h"
import HsSyn
import Annotations
import Name
import TcRnMonad
import TcType
import Outputable
import TcExpr
import SrcLoc
import FastString
import THNames
import TcUnify
import TcEnv
#ifdef GHCI
import HscMain
-- These imports are the reason that TcSplice
-- is very high up the module hierarchy
import RnSplice( traceSplice, SpliceInfo(..) )
import RdrName
import HscTypes
import Convert
import RnExpr
import RnEnv
import RnTypes
import TcHsSyn
import TcSimplify
import Type
import Kind
import NameSet
import TcMType
import TcHsType
import TcIface
import TypeRep
import FamInst
import FamInstEnv
import InstEnv
import NameEnv
import PrelNames
import OccName
import Hooks
import Var
import Module
import LoadIface
import Class
import Inst
import TyCon
import CoAxiom
import PatSyn ( patSynName )
import ConLike
import DataCon
import TcEvidence( TcEvBinds(..) )
import Id
import IdInfo
import DsExpr
import DsMonad
import Serialized
import ErrUtils
import Util
import Unique
import VarSet ( isEmptyVarSet )
import Data.Maybe
import BasicTypes hiding( SuccessFlag(..) )
import Maybes( MaybeErr(..) )
import DynFlags
import Panic
import Lexeme
import qualified Language.Haskell.TH as TH
-- THSyntax gives access to internal functions and data types
import qualified Language.Haskell.TH.Syntax as TH
-- Because GHC.Desugar might not be in the base library of the bootstrapping compiler
import GHC.Desugar ( AnnotationWrapper(..) )
import qualified Data.Map as Map
import Data.Dynamic ( fromDynamic, toDyn )
import Data.Typeable ( typeOf, Typeable )
import Data.Data (Data)
import GHC.Exts ( unsafeCoerce# )
#endif
{-
************************************************************************
* *
\subsection{Main interface + stubs for the non-GHCI case
* *
************************************************************************
-}
tcTypedBracket :: HsBracket Name -> TcRhoType -> TcM (HsExpr TcId)
tcUntypedBracket :: HsBracket Name -> [PendingRnSplice] -> TcRhoType -> TcM (HsExpr TcId)
tcSpliceExpr :: HsSplice Name -> TcRhoType -> TcM (HsExpr TcId)
-- None of these functions add constraints to the LIE
-- runQuasiQuoteExpr :: HsQuasiQuote RdrName -> RnM (LHsExpr RdrName)
-- runQuasiQuotePat :: HsQuasiQuote RdrName -> RnM (LPat RdrName)
-- runQuasiQuoteType :: HsQuasiQuote RdrName -> RnM (LHsType RdrName)
-- runQuasiQuoteDecl :: HsQuasiQuote RdrName -> RnM [LHsDecl RdrName]
runAnnotation :: CoreAnnTarget -> LHsExpr Name -> TcM Annotation
{-
************************************************************************
* *
\subsection{Quoting an expression}
* *
************************************************************************
-}
-- See Note [How brackets and nested splices are handled]
-- tcTypedBracket :: HsBracket Name -> TcRhoType -> TcM (HsExpr TcId)
tcTypedBracket brack@(TExpBr expr) res_ty
= addErrCtxt (quotationCtxtDoc brack) $
do { cur_stage <- getStage
; ps_ref <- newMutVar []
; lie_var <- getConstraintVar -- Any constraints arising from nested splices
-- should get thrown into the constraint set
-- from outside the bracket
-- Typecheck expr to make sure it is valid,
-- Throw away the typechecked expression but return its type.
-- We'll typecheck it again when we splice it in somewhere
; (_tc_expr, expr_ty) <- setStage (Brack cur_stage (TcPending ps_ref lie_var)) $
tcInferRhoNC expr
-- NC for no context; tcBracket does that
; meta_ty <- tcTExpTy expr_ty
; co <- unifyType meta_ty res_ty
; ps' <- readMutVar ps_ref
; texpco <- tcLookupId unsafeTExpCoerceName
; return (mkHsWrapCo co (unLoc (mkHsApp (nlHsTyApp texpco [expr_ty])
(noLoc (HsTcBracketOut brack ps'))))) }
tcTypedBracket other_brack _
= pprPanic "tcTypedBracket" (ppr other_brack)
-- tcUntypedBracket :: HsBracket Name -> [PendingRnSplice] -> TcRhoType -> TcM (HsExpr TcId)
tcUntypedBracket brack ps res_ty
= do { traceTc "tc_bracket untyped" (ppr brack $$ ppr ps)
; ps' <- mapM tcPendingSplice ps
; meta_ty <- tcBrackTy brack
; co <- unifyType meta_ty res_ty
; traceTc "tc_bracket done untyped" (ppr meta_ty)
; return (mkHsWrapCo co (HsTcBracketOut brack ps')) }
---------------
tcBrackTy :: HsBracket Name -> TcM TcType
tcBrackTy (VarBr _ _) = tcMetaTy nameTyConName -- Result type is Var (not Q-monadic)
tcBrackTy (ExpBr _) = tcMetaTy expQTyConName -- Result type is ExpQ (= Q Exp)
tcBrackTy (TypBr _) = tcMetaTy typeQTyConName -- Result type is Type (= Q Typ)
tcBrackTy (DecBrG _) = tcMetaTy decsQTyConName -- Result type is Q [Dec]
tcBrackTy (PatBr _) = tcMetaTy patQTyConName -- Result type is PatQ (= Q Pat)
tcBrackTy (DecBrL _) = panic "tcBrackTy: Unexpected DecBrL"
tcBrackTy (TExpBr _) = panic "tcUntypedBracket: Unexpected TExpBr"
---------------
tcPendingSplice :: PendingRnSplice -> TcM PendingTcSplice
tcPendingSplice (PendingRnSplice flavour splice_name expr)
= do { res_ty <- tcMetaTy meta_ty_name
; expr' <- tcMonoExpr expr res_ty
; return (PendingTcSplice splice_name expr') }
where
meta_ty_name = case flavour of
UntypedExpSplice -> expQTyConName
UntypedPatSplice -> patQTyConName
UntypedTypeSplice -> typeQTyConName
UntypedDeclSplice -> decsQTyConName
---------------
-- Takes a type tau and returns the type Q (TExp tau)
tcTExpTy :: TcType -> TcM TcType
tcTExpTy tau
= do { q <- tcLookupTyCon qTyConName
; texp <- tcLookupTyCon tExpTyConName
; return (mkTyConApp q [mkTyConApp texp [tau]]) }
quotationCtxtDoc :: HsBracket Name -> SDoc
quotationCtxtDoc br_body
= hang (ptext (sLit "In the Template Haskell quotation"))
2 (ppr br_body)
#ifndef GHCI
tcSpliceExpr e _ = failTH e "Template Haskell splice"
-- runQuasiQuoteExpr q = failTH q "quasiquote"
-- runQuasiQuotePat q = failTH q "pattern quasiquote"
-- runQuasiQuoteType q = failTH q "type quasiquote"
-- runQuasiQuoteDecl q = failTH q "declaration quasiquote"
runAnnotation _ q = failTH q "annotation"
#else
-- The whole of the rest of the file is the else-branch (ie stage2 only)
{-
Note [How top-level splices are handled]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Top-level splices (those not inside a [| .. |] quotation bracket) are handled
very straightforwardly:
1. tcTopSpliceExpr: typecheck the body e of the splice $(e)
2. runMetaT: desugar, compile, run it, and convert result back to
HsSyn RdrName (of the appropriate flavour, eg HsType RdrName,
HsExpr RdrName etc)
3. treat the result as if that's what you saw in the first place
e.g for HsType, rename and kind-check
for HsExpr, rename and type-check
(The last step is different for decls, because they can *only* be
top-level: we return the result of step 2.)
Note [How brackets and nested splices are handled]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Nested splices (those inside a [| .. |] quotation bracket),
are treated quite differently.
Remember, there are two forms of bracket
typed [|| e ||]
and untyped [| e |]
The life cycle of a typed bracket:
* Starts as HsBracket
* When renaming:
* Set the ThStage to (Brack s RnPendingTyped)
* Rename the body
* Result is still a HsBracket
* When typechecking:
* Set the ThStage to (Brack s (TcPending ps_var lie_var))
* Typecheck the body, and throw away the elaborated result
* Nested splices (which must be typed) are typechecked, and
the results accumulated in ps_var; their constraints
accumulate in lie_var
* Result is a HsTcBracketOut rn_brack pending_splices
where rn_brack is the incoming renamed bracket
The life cycle of a un-typed bracket:
* Starts as HsBracket
* When renaming:
* Set the ThStage to (Brack s (RnPendingUntyped ps_var))
* Rename the body
* Nested splices (which must be untyped) are renamed, and the
results accumulated in ps_var
* Result is still (HsRnBracketOut rn_body pending_splices)
* When typechecking a HsRnBracketOut
* Typecheck the pending_splices individually
* Ignore the body of the bracket; just check that the context
expects a bracket of that type (e.g. a [p| pat |] bracket should
be in a context needing a (Q Pat)
* Result is a HsTcBracketOut rn_brack pending_splices
where rn_brack is the incoming renamed bracket
In both cases, desugaring happens like this:
* HsTcBracketOut is desugared by DsMeta.dsBracket. It
a) Extends the ds_meta environment with the PendingSplices
attached to the bracket
b) Converts the quoted (HsExpr Name) to a CoreExpr that, when
run, will produce a suitable TH expression/type/decl. This
is why we leave the *renamed* expression attached to the bracket:
the quoted expression should not be decorated with all the goop
added by the type checker
* Each splice carries a unique Name, called a "splice point", thus
${n}(e). The name is initialised to an (Unqual "splice") when the
splice is created; the renamer gives it a unique.
* When DsMeta (used to desugar the body of the bracket) comes across
a splice, it looks up the splice's Name, n, in the ds_meta envt,
to find an (HsExpr Id) that should be substituted for the splice;
it just desugars it to get a CoreExpr (DsMeta.repSplice).
Example:
Source: f = [| Just $(g 3) |]
The [| |] part is a HsBracket
Typechecked: f = [| Just ${s7}(g 3) |]{s7 = g Int 3}
The [| |] part is a HsBracketOut, containing *renamed*
(not typechecked) expression
The "s7" is the "splice point"; the (g Int 3) part
is a typechecked expression
Desugared: f = do { s7 <- g Int 3
; return (ConE "Data.Maybe.Just" s7) }
Note [Template Haskell state diagram]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here are the ThStages, s, their corresponding level numbers
(the result of (thLevel s)), and their state transitions.
The top level of the program is stage Comp:
Start here
|
V
----------- $ ------------ $
| Comp | ---------> | Splice | -----|
| 1 | | 0 | <----|
----------- ------------
^ | ^ |
$ | | [||] $ | | [||]
| v | v
-------------- ----------------
| Brack Comp | | Brack Splice |
| 2 | | 1 |
-------------- ----------------
* Normal top-level declarations start in state Comp
(which has level 1).
Annotations start in state Splice, since they are
treated very like a splice (only without a '$')
* Code compiled in state Splice (and only such code)
will be *run at compile time*, with the result replacing
the splice
* The original paper used level -1 instead of 0, etc.
* The original paper did not allow a splice within a
splice, but there is no reason not to. This is the
$ transition in the top right.
Note [Template Haskell levels]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Imported things are impLevel (= 0)
* However things at level 0 are not *necessarily* imported.
eg $( \b -> ... ) here b is bound at level 0
* In GHCi, variables bound by a previous command are treated
as impLevel, because we have bytecode for them.
* Variables are bound at the "current level"
* The current level starts off at outerLevel (= 1)
* The level is decremented by splicing $(..)
incremented by brackets [| |]
incremented by name-quoting 'f
When a variable is used, we compare
bind: binding level, and
use: current level at usage site
Generally
bind > use Always error (bound later than used)
[| \x -> $(f x) |]
bind = use Always OK (bound same stage as used)
[| \x -> $(f [| x |]) |]
bind < use Inside brackets, it depends
Inside splice, OK
Inside neither, OK
For (bind < use) inside brackets, there are three cases:
- Imported things OK f = [| map |]
- Top-level things OK g = [| f |]
- Non-top-level Only if there is a liftable instance
h = \(x:Int) -> [| x |]
To track top-level-ness we use the ThBindEnv in TcLclEnv
For example:
f = ...
g1 = $(map ...) is OK
g2 = $(f ...) is not OK; because we havn't compiled f yet
-}
{-
************************************************************************
* *
\subsection{Splicing an expression}
* *
************************************************************************
-}
tcSpliceExpr splice@(HsTypedSplice name expr) res_ty
= addErrCtxt (spliceCtxtDoc splice) $
setSrcSpan (getLoc expr) $ do
{ stage <- getStage
; case stage of
Splice {} -> tcTopSplice expr res_ty
Comp -> tcTopSplice expr res_ty
Brack pop_stage pend -> tcNestedSplice pop_stage pend name expr res_ty }
tcSpliceExpr splice _
= pprPanic "tcSpliceExpr" (ppr splice)
tcNestedSplice :: ThStage -> PendingStuff -> Name
-> LHsExpr Name -> TcRhoType -> TcM (HsExpr Id)
-- See Note [How brackets and nested splices are handled]
-- A splice inside brackets
tcNestedSplice pop_stage (TcPending ps_var lie_var) splice_name expr res_ty
= do { meta_exp_ty <- tcTExpTy res_ty
; expr' <- setStage pop_stage $
setConstraintVar lie_var $
tcMonoExpr expr meta_exp_ty
; untypeq <- tcLookupId unTypeQName
; let expr'' = mkHsApp (nlHsTyApp untypeq [res_ty]) expr'
; ps <- readMutVar ps_var
; writeMutVar ps_var (PendingTcSplice splice_name expr'' : ps)
-- The returned expression is ignored; it's in the pending splices
; return (panic "tcSpliceExpr") }
tcNestedSplice _ _ splice_name _ _
= pprPanic "tcNestedSplice: rename stage found" (ppr splice_name)
tcTopSplice :: LHsExpr Name -> TcRhoType -> TcM (HsExpr Id)
tcTopSplice expr res_ty
= do { -- Typecheck the expression,
-- making sure it has type Q (T res_ty)
meta_exp_ty <- tcTExpTy res_ty
; zonked_q_expr <- tcTopSpliceExpr True $
tcMonoExpr expr meta_exp_ty
-- Run the expression
; expr2 <- runMetaE zonked_q_expr
; traceSplice (SpliceInfo { spliceDescription = "expression"
, spliceIsDecl = False
, spliceSource = Just expr
, spliceGenerated = ppr expr2 })
-- Rename and typecheck the spliced-in expression,
-- making sure it has type res_ty
-- These steps should never fail; this is a *typed* splice
; addErrCtxt (spliceResultDoc expr) $ do
{ (exp3, _fvs) <- rnLExpr expr2
; exp4 <- tcMonoExpr exp3 res_ty
; return (unLoc exp4) } }
{-
************************************************************************
* *
\subsection{Error messages}
* *
************************************************************************
-}
spliceCtxtDoc :: HsSplice Name -> SDoc
spliceCtxtDoc splice
= hang (ptext (sLit "In the Template Haskell splice"))
2 (pprSplice splice)
spliceResultDoc :: LHsExpr Name -> SDoc
spliceResultDoc expr
= sep [ ptext (sLit "In the result of the splice:")
, nest 2 (char '$' <> pprParendExpr expr)
, ptext (sLit "To see what the splice expanded to, use -ddump-splices")]
-------------------
tcTopSpliceExpr :: Bool -> TcM (LHsExpr Id) -> TcM (LHsExpr Id)
-- Note [How top-level splices are handled]
-- Type check an expression that is the body of a top-level splice
-- (the caller will compile and run it)
-- Note that set the level to Splice, regardless of the original level,
-- before typechecking the expression. For example:
-- f x = $( ...$(g 3) ... )
-- The recursive call to tcMonoExpr will simply expand the
-- inner escape before dealing with the outer one
tcTopSpliceExpr isTypedSplice tc_action
= checkNoErrs $ -- checkNoErrs: must not try to run the thing
-- if the type checker fails!
unsetGOptM Opt_DeferTypeErrors $
-- Don't defer type errors. Not only are we
-- going to run this code, but we do an unsafe
-- coerce, so we get a seg-fault if, say we
-- splice a type into a place where an expression
-- is expected (Trac #7276)
setStage (Splice isTypedSplice) $
do { -- Typecheck the expression
(expr', lie) <- captureConstraints tc_action
-- Solve the constraints
; const_binds <- simplifyTop lie
-- Zonk it and tie the knot of dictionary bindings
; zonkTopLExpr (mkHsDictLet (EvBinds const_binds) expr') }
{-
************************************************************************
* *
Annotations
* *
************************************************************************
-}
runAnnotation target expr = do
-- Find the classes we want instances for in order to call toAnnotationWrapper
loc <- getSrcSpanM
data_class <- tcLookupClass dataClassName
to_annotation_wrapper_id <- tcLookupId toAnnotationWrapperName
-- Check the instances we require live in another module (we want to execute it..)
-- and check identifiers live in other modules using TH stage checks. tcSimplifyStagedExpr
-- also resolves the LIE constraints to detect e.g. instance ambiguity
zonked_wrapped_expr' <- tcTopSpliceExpr False $
do { (expr', expr_ty) <- tcInferRhoNC expr
-- We manually wrap the typechecked expression in a call to toAnnotationWrapper
-- By instantiating the call >here< it gets registered in the
-- LIE consulted by tcTopSpliceExpr
-- and hence ensures the appropriate dictionary is bound by const_binds
; wrapper <- instCall AnnOrigin [expr_ty] [mkClassPred data_class [expr_ty]]
; let specialised_to_annotation_wrapper_expr
= L loc (HsWrap wrapper (HsVar to_annotation_wrapper_id))
; return (L loc (HsApp specialised_to_annotation_wrapper_expr expr')) }
-- Run the appropriately wrapped expression to get the value of
-- the annotation and its dictionaries. The return value is of
-- type AnnotationWrapper by construction, so this conversion is
-- safe
serialized <- runMetaAW zonked_wrapped_expr'
return Annotation {
ann_target = target,
ann_value = serialized
}
convertAnnotationWrapper :: AnnotationWrapper -> Either MsgDoc Serialized
convertAnnotationWrapper annotation_wrapper = Right $
case annotation_wrapper of
AnnotationWrapper value | let serialized = toSerialized serializeWithData value ->
-- Got the value and dictionaries: build the serialized value and
-- call it a day. We ensure that we seq the entire serialized value
-- in order that any errors in the user-written code for the
-- annotation are exposed at this point. This is also why we are
-- doing all this stuff inside the context of runMeta: it has the
-- facilities to deal with user error in a meta-level expression
seqSerialized serialized `seq` serialized
{-
************************************************************************
* *
\subsection{Running an expression}
* *
************************************************************************
-}
runQuasi :: TH.Q a -> TcM a
runQuasi act = TH.runQ act
runQResult :: (a -> String) -> (SrcSpan -> a -> b) -> SrcSpan -> TH.Q a -> TcM b
runQResult show_th f expr_span hval
= do { th_result <- TH.runQ hval
; traceTc "Got TH result:" (text (show_th th_result))
; return (f expr_span th_result) }
-----------------
runMeta :: (MetaHook TcM -> LHsExpr Id -> TcM hs_syn)
-> LHsExpr Id
-> TcM hs_syn
runMeta unwrap e
= do { h <- getHooked runMetaHook defaultRunMeta
; unwrap h e }
defaultRunMeta :: MetaHook TcM
defaultRunMeta (MetaE r)
= fmap r . runMeta' True ppr (runQResult TH.pprint convertToHsExpr)
defaultRunMeta (MetaP r)
= fmap r . runMeta' True ppr (runQResult TH.pprint convertToPat)
defaultRunMeta (MetaT r)
= fmap r . runMeta' True ppr (runQResult TH.pprint convertToHsType)
defaultRunMeta (MetaD r)
= fmap r . runMeta' True ppr (runQResult TH.pprint convertToHsDecls)
defaultRunMeta (MetaAW r)
= fmap r . runMeta' False (const empty) (const (return . convertAnnotationWrapper))
-- We turn off showing the code in meta-level exceptions because doing so exposes
-- the toAnnotationWrapper function that we slap around the users code
----------------
runMetaAW :: LHsExpr Id -- Of type AnnotationWrapper
-> TcM Serialized
runMetaAW = runMeta metaRequestAW
runMetaE :: LHsExpr Id -- Of type (Q Exp)
-> TcM (LHsExpr RdrName)
runMetaE = runMeta metaRequestE
runMetaP :: LHsExpr Id -- Of type (Q Pat)
-> TcM (LPat RdrName)
runMetaP = runMeta metaRequestP
runMetaT :: LHsExpr Id -- Of type (Q Type)
-> TcM (LHsType RdrName)
runMetaT = runMeta metaRequestT
runMetaD :: LHsExpr Id -- Of type Q [Dec]
-> TcM [LHsDecl RdrName]
runMetaD = runMeta metaRequestD
---------------
runMeta' :: Bool -- Whether code should be printed in the exception message
-> (hs_syn -> SDoc) -- how to print the code
-> (SrcSpan -> x -> TcM (Either MsgDoc hs_syn)) -- How to run x
-> LHsExpr Id -- Of type x; typically x = Q TH.Exp, or something like that
-> TcM hs_syn -- Of type t
runMeta' show_code ppr_hs run_and_convert expr
= do { traceTc "About to run" (ppr expr)
; recordThSpliceUse -- seems to be the best place to do this,
-- we catch all kinds of splices and annotations.
-- Check that we've had no errors of any sort so far.
-- For example, if we found an error in an earlier defn f, but
-- recovered giving it type f :: forall a.a, it'd be very dodgy
-- to carry ont. Mind you, the staging restrictions mean we won't
-- actually run f, but it still seems wrong. And, more concretely,
-- see Trac #5358 for an example that fell over when trying to
-- reify a function with a "?" kind in it. (These don't occur
-- in type-correct programs.
; failIfErrsM
-- Desugar
; ds_expr <- initDsTc (dsLExpr expr)
-- Compile and link it; might fail if linking fails
; hsc_env <- getTopEnv
; src_span <- getSrcSpanM
; traceTc "About to run (desugared)" (ppr ds_expr)
; either_hval <- tryM $ liftIO $
HscMain.hscCompileCoreExpr hsc_env src_span ds_expr
; case either_hval of {
Left exn -> fail_with_exn "compile and link" exn ;
Right hval -> do
{ -- Coerce it to Q t, and run it
-- Running might fail if it throws an exception of any kind (hence tryAllM)
-- including, say, a pattern-match exception in the code we are running
--
-- We also do the TH -> HS syntax conversion inside the same
-- exception-cacthing thing so that if there are any lurking
-- exceptions in the data structure returned by hval, we'll
-- encounter them inside the try
--
-- See Note [Exceptions in TH]
let expr_span = getLoc expr
; either_tval <- tryAllM $
setSrcSpan expr_span $ -- Set the span so that qLocation can
-- see where this splice is
do { mb_result <- run_and_convert expr_span (unsafeCoerce# hval)
; case mb_result of
Left err -> failWithTc err
Right result -> do { traceTc "Got HsSyn result:" (ppr_hs result)
; return $! result } }
; case either_tval of
Right v -> return v
Left se -> case fromException se of
Just IOEnvFailure -> failM -- Error already in Tc monad
_ -> fail_with_exn "run" se -- Exception
}}}
where
-- see Note [Concealed TH exceptions]
fail_with_exn phase exn = do
exn_msg <- liftIO $ Panic.safeShowException exn
let msg = vcat [text "Exception when trying to" <+> text phase <+> text "compile-time code:",
nest 2 (text exn_msg),
if show_code then text "Code:" <+> ppr expr else empty]
failWithTc msg
{-
Note [Exceptions in TH]
~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have something like this
$( f 4 )
where
f :: Int -> Q [Dec]
f n | n>3 = fail "Too many declarations"
| otherwise = ...
The 'fail' is a user-generated failure, and should be displayed as a
perfectly ordinary compiler error message, not a panic or anything
like that. Here's how it's processed:
* 'fail' is the monad fail. The monad instance for Q in TH.Syntax
effectively transforms (fail s) to
qReport True s >> fail
where 'qReport' comes from the Quasi class and fail from its monad
superclass.
* The TcM monad is an instance of Quasi (see TcSplice), and it implements
(qReport True s) by using addErr to add an error message to the bag of errors.
The 'fail' in TcM raises an IOEnvFailure exception
* 'qReport' forces the message to ensure any exception hidden in unevaluated
thunk doesn't get into the bag of errors. Otherwise the following splice
will triger panic (Trac #8987):
$(fail undefined)
See also Note [Concealed TH exceptions]
* So, when running a splice, we catch all exceptions; then for
- an IOEnvFailure exception, we assume the error is already
in the error-bag (above)
- other errors, we add an error to the bag
and then fail
Note [Concealed TH exceptions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When displaying the error message contained in an exception originated from TH
code, we need to make sure that the error message itself does not contain an
exception. For example, when executing the following splice:
$( error ("foo " ++ error "bar") )
the message for the outer exception is a thunk which will throw the inner
exception when evaluated.
For this reason, we display the message of a TH exception using the
'safeShowException' function, which recursively catches any exception thrown
when showing an error message.
To call runQ in the Tc monad, we need to make TcM an instance of Quasi:
-}
instance TH.Quasi (IOEnv (Env TcGblEnv TcLclEnv)) where
qNewName s = do { u <- newUnique
; let i = getKey u
; return (TH.mkNameU s i) }
-- 'msg' is forced to ensure exceptions don't escape,
-- see Note [Exceptions in TH]
qReport True msg = seqList msg $ addErr (text msg)
qReport False msg = seqList msg $ addWarn (text msg)
qLocation = do { m <- getModule
; l <- getSrcSpanM
; r <- case l of
UnhelpfulSpan _ -> pprPanic "qLocation: Unhelpful location"
(ppr l)
RealSrcSpan s -> return s
; return (TH.Loc { TH.loc_filename = unpackFS (srcSpanFile r)
, TH.loc_module = moduleNameString (moduleName m)
, TH.loc_package = packageKeyString (modulePackageKey m)
, TH.loc_start = (srcSpanStartLine r, srcSpanStartCol r)
, TH.loc_end = (srcSpanEndLine r, srcSpanEndCol r) }) }
qLookupName = lookupName
qReify = reify
qReifyFixity nm = lookupThName nm >>= reifyFixity
qReifyInstances = reifyInstances
qReifyRoles = reifyRoles
qReifyAnnotations = reifyAnnotations
qReifyModule = reifyModule
-- For qRecover, discard error messages if
-- the recovery action is chosen. Otherwise
-- we'll only fail higher up. c.f. tryTcLIE_
qRecover recover main = do { (msgs, mb_res) <- tryTcErrs main
; case mb_res of
Just val -> do { addMessages msgs -- There might be warnings
; return val }
Nothing -> recover -- Discard all msgs
}
qRunIO io = liftIO io
qAddDependentFile fp = do
ref <- fmap tcg_dependent_files getGblEnv
dep_files <- readTcRef ref
writeTcRef ref (fp:dep_files)
qAddTopDecls thds = do
l <- getSrcSpanM
let either_hval = convertToHsDecls l thds
ds <- case either_hval of
Left exn -> pprPanic "qAddTopDecls: can't convert top-level declarations" exn
Right ds -> return ds
mapM_ (checkTopDecl . unLoc) ds
th_topdecls_var <- fmap tcg_th_topdecls getGblEnv
updTcRef th_topdecls_var (\topds -> ds ++ topds)
where
checkTopDecl :: HsDecl RdrName -> TcM ()
checkTopDecl (ValD binds)
= mapM_ bindName (collectHsBindBinders binds)
checkTopDecl (SigD _)
= return ()
checkTopDecl (AnnD _)
= return ()
checkTopDecl (ForD (ForeignImport (L _ name) _ _ _))
= bindName name
checkTopDecl _
= addErr $ text "Only function, value, annotation, and foreign import declarations may be added with addTopDecl"
bindName :: RdrName -> TcM ()
bindName (Exact n)
= do { th_topnames_var <- fmap tcg_th_topnames getGblEnv
; updTcRef th_topnames_var (\ns -> extendNameSet ns n)
}
bindName name =
addErr $
hang (ptext (sLit "The binder") <+> quotes (ppr name) <+> ptext (sLit "is not a NameU."))
2 (text "Probable cause: you used mkName instead of newName to generate a binding.")
qAddModFinalizer fin = do
th_modfinalizers_var <- fmap tcg_th_modfinalizers getGblEnv
updTcRef th_modfinalizers_var (\fins -> fin:fins)
qGetQ :: forall a. Typeable a => IOEnv (Env TcGblEnv TcLclEnv) (Maybe a)
qGetQ = do
th_state_var <- fmap tcg_th_state getGblEnv
th_state <- readTcRef th_state_var
-- See #10596 for why we use a scoped type variable here.
-- ToDo: convert @undefined :: a@ to @proxy :: Proxy a@ when
-- we drop support for GHC 7.6.
return (Map.lookup (typeOf (undefined :: a)) th_state >>= fromDynamic)
qPutQ x = do
th_state_var <- fmap tcg_th_state getGblEnv
updTcRef th_state_var (\m -> Map.insert (typeOf x) (toDyn x) m)
{-
************************************************************************
* *
Instance Testing
* *
************************************************************************
-}
reifyInstances :: TH.Name -> [TH.Type] -> TcM [TH.Dec]
reifyInstances th_nm th_tys
= addErrCtxt (ptext (sLit "In the argument of reifyInstances:")
<+> ppr_th th_nm <+> sep (map ppr_th th_tys)) $
do { loc <- getSrcSpanM
; rdr_ty <- cvt loc (mkThAppTs (TH.ConT th_nm) th_tys)
-- #9262 says to bring vars into scope, like in HsForAllTy case
-- of rnHsTyKi
; let (kvs, tvs) = extractHsTyRdrTyVars rdr_ty
tv_bndrs = userHsTyVarBndrs loc tvs
hs_tvbs = mkHsQTvs tv_bndrs
-- Rename to HsType Name
; ((rn_tvbs, rn_ty), _fvs)
<- bindHsTyVars doc Nothing kvs hs_tvbs $ \ rn_tvbs ->
do { (rn_ty, fvs) <- rnLHsType doc rdr_ty
; return ((rn_tvbs, rn_ty), fvs) }
; (ty, _kind) <- tcHsTyVarBndrs rn_tvbs $ \ _tvs ->
tcLHsType rn_ty
; ty <- zonkTcTypeToType emptyZonkEnv ty
-- Substitute out the meta type variables
-- In particular, the type might have kind
-- variables inside it (Trac #7477)
; traceTc "reifyInstances" (ppr ty $$ ppr (typeKind ty))
; case splitTyConApp_maybe ty of -- This expands any type synonyms
Just (tc, tys) -- See Trac #7910
| Just cls <- tyConClass_maybe tc
-> do { inst_envs <- tcGetInstEnvs
; let (matches, unifies, _) = lookupInstEnv False inst_envs cls tys
; traceTc "reifyInstances1" (ppr matches)
; reifyClassInstances cls (map fst matches ++ unifies) }
| isOpenFamilyTyCon tc
-> do { inst_envs <- tcGetFamInstEnvs
; let matches = lookupFamInstEnv inst_envs tc tys
; traceTc "reifyInstances2" (ppr matches)
; reifyFamilyInstances tc (map fim_instance matches) }
_ -> bale_out (hang (ptext (sLit "reifyInstances:") <+> quotes (ppr ty))
2 (ptext (sLit "is not a class constraint or type family application"))) }
where
doc = ClassInstanceCtx
bale_out msg = failWithTc msg
cvt :: SrcSpan -> TH.Type -> TcM (LHsType RdrName)
cvt loc th_ty = case convertToHsType loc th_ty of
Left msg -> failWithTc msg
Right ty -> return ty
{-
************************************************************************
* *
Reification
* *
************************************************************************
-}
lookupName :: Bool -- True <=> type namespace
-- False <=> value namespace
-> String -> TcM (Maybe TH.Name)
lookupName is_type_name s
= do { lcl_env <- getLocalRdrEnv
; case lookupLocalRdrEnv lcl_env rdr_name of
Just n -> return (Just (reifyName n))
Nothing -> do { mb_nm <- lookupGlobalOccRn_maybe rdr_name
; return (fmap reifyName mb_nm) } }
where
th_name = TH.mkName s -- Parses M.x into a base of 'x' and a module of 'M'
occ_fs :: FastString
occ_fs = mkFastString (TH.nameBase th_name)
occ :: OccName
occ | is_type_name
= if isLexCon occ_fs then mkTcOccFS occ_fs
else mkTyVarOccFS occ_fs
| otherwise
= if isLexCon occ_fs then mkDataOccFS occ_fs
else mkVarOccFS occ_fs
rdr_name = case TH.nameModule th_name of
Nothing -> mkRdrUnqual occ
Just mod -> mkRdrQual (mkModuleName mod) occ
getThing :: TH.Name -> TcM TcTyThing
getThing th_name
= do { name <- lookupThName th_name
; traceIf (text "reify" <+> text (show th_name) <+> brackets (ppr_ns th_name) <+> ppr name)
; tcLookupTh name }
-- ToDo: this tcLookup could fail, which would give a
-- rather unhelpful error message
where
ppr_ns (TH.Name _ (TH.NameG TH.DataName _pkg _mod)) = text "data"
ppr_ns (TH.Name _ (TH.NameG TH.TcClsName _pkg _mod)) = text "tc"
ppr_ns (TH.Name _ (TH.NameG TH.VarName _pkg _mod)) = text "var"
ppr_ns _ = panic "reify/ppr_ns"
reify :: TH.Name -> TcM TH.Info
reify th_name
= do { traceTc "reify 1" (text (TH.showName th_name))
; thing <- getThing th_name
; traceTc "reify 2" (ppr thing)
; reifyThing thing }
lookupThName :: TH.Name -> TcM Name
lookupThName th_name = do
mb_name <- lookupThName_maybe th_name
case mb_name of
Nothing -> failWithTc (notInScope th_name)
Just name -> return name
lookupThName_maybe :: TH.Name -> TcM (Maybe Name)
lookupThName_maybe th_name
= do { names <- mapMaybeM lookup (thRdrNameGuesses th_name)
-- Pick the first that works
-- E.g. reify (mkName "A") will pick the class A in preference to the data constructor A
; return (listToMaybe names) }
where
lookup rdr_name
= do { -- Repeat much of lookupOccRn, becase we want
-- to report errors in a TH-relevant way
; rdr_env <- getLocalRdrEnv
; case lookupLocalRdrEnv rdr_env rdr_name of
Just name -> return (Just name)
Nothing -> lookupGlobalOccRn_maybe rdr_name }
tcLookupTh :: Name -> TcM TcTyThing
-- This is a specialised version of TcEnv.tcLookup; specialised mainly in that
-- it gives a reify-related error message on failure, whereas in the normal
-- tcLookup, failure is a bug.
tcLookupTh name
= do { (gbl_env, lcl_env) <- getEnvs
; case lookupNameEnv (tcl_env lcl_env) name of {
Just thing -> return thing;
Nothing ->
case lookupNameEnv (tcg_type_env gbl_env) name of {
Just thing -> return (AGlobal thing);
Nothing ->
if nameIsLocalOrFrom (tcg_mod gbl_env) name
then -- It's defined in this module
failWithTc (notInEnv name)
else
do { mb_thing <- tcLookupImported_maybe name
; case mb_thing of
Succeeded thing -> return (AGlobal thing)
Failed msg -> failWithTc msg
}}}}
notInScope :: TH.Name -> SDoc
notInScope th_name = quotes (text (TH.pprint th_name)) <+>
ptext (sLit "is not in scope at a reify")
-- Ugh! Rather an indirect way to display the name
notInEnv :: Name -> SDoc
notInEnv name = quotes (ppr name) <+>
ptext (sLit "is not in the type environment at a reify")
------------------------------
reifyRoles :: TH.Name -> TcM [TH.Role]
reifyRoles th_name
= do { thing <- getThing th_name
; case thing of
AGlobal (ATyCon tc) -> return (map reify_role (tyConRoles tc))
_ -> failWithTc (ptext (sLit "No roles associated with") <+> (ppr thing))
}
where
reify_role Nominal = TH.NominalR
reify_role Representational = TH.RepresentationalR
reify_role Phantom = TH.PhantomR
------------------------------
reifyThing :: TcTyThing -> TcM TH.Info
-- The only reason this is monadic is for error reporting,
-- which in turn is mainly for the case when TH can't express
-- some random GHC extension
reifyThing (AGlobal (AnId id))
= do { ty <- reifyType (idType id)
; let v = reifyName id
; case idDetails id of
ClassOpId cls -> return (TH.ClassOpI v ty (reifyName cls))
_ -> return (TH.VarI v ty Nothing)
}
reifyThing (AGlobal (ATyCon tc)) = reifyTyCon tc
reifyThing (AGlobal (AConLike (RealDataCon dc)))
= do { let name = dataConName dc
; ty <- reifyType (idType (dataConWrapId dc))
; return (TH.DataConI (reifyName name) ty
(reifyName (dataConOrigTyCon dc)))
}
reifyThing (AGlobal (AConLike (PatSynCon ps)))
= noTH (sLit "pattern synonyms") (ppr $ patSynName ps)
reifyThing (ATcId {tct_id = id})
= do { ty1 <- zonkTcType (idType id) -- Make use of all the info we have, even
-- though it may be incomplete
; ty2 <- reifyType ty1
; return (TH.VarI (reifyName id) ty2 Nothing) }
reifyThing (ATyVar tv tv1)
= do { ty1 <- zonkTcTyVar tv1
; ty2 <- reifyType ty1
; return (TH.TyVarI (reifyName tv) ty2) }
reifyThing thing = pprPanic "reifyThing" (pprTcTyThingCategory thing)
-------------------------------------------
reifyAxBranch :: CoAxBranch -> TcM TH.TySynEqn
reifyAxBranch (CoAxBranch { cab_lhs = args, cab_rhs = rhs })
-- remove kind patterns (#8884)
= do { args' <- mapM reifyType (filter (not . isKind) args)
; rhs' <- reifyType rhs
; return (TH.TySynEqn args' rhs') }
reifyTyCon :: TyCon -> TcM TH.Info
reifyTyCon tc
| Just cls <- tyConClass_maybe tc
= reifyClass cls
| isFunTyCon tc
= return (TH.PrimTyConI (reifyName tc) 2 False)
| isPrimTyCon tc
= return (TH.PrimTyConI (reifyName tc) (tyConArity tc) (isUnLiftedTyCon tc))
| isTypeFamilyTyCon tc
= do { let tvs = tyConTyVars tc
kind = tyConKind tc
resVar = famTcResVar tc
-- we need the *result kind* (see #8884)
(kvs, mono_kind) = splitForAllTys kind
-- tyConArity includes *kind* params
(_, res_kind) = splitKindFunTysN (tyConArity tc - length kvs)
mono_kind
; kind' <- reifyKind res_kind
; let (resultSig, injectivity) =
case resVar of
Nothing -> (TH.KindSig kind', Nothing)
Just name ->
let thName = reifyName name
injAnnot = familyTyConInjectivityInfo tc
sig = TH.TyVarSig (TH.KindedTV thName kind')
inj = case injAnnot of
NotInjective -> Nothing
Injective ms ->
Just (TH.InjectivityAnn thName injRHS)
where
injRHS = map (reifyName . tyVarName)
(filterByList ms tvs)
in (sig, inj)
; tvs' <- reifyTyVars tvs
; if isOpenTypeFamilyTyCon tc
then do { fam_envs <- tcGetFamInstEnvs
; instances <- reifyFamilyInstances tc
(familyInstances fam_envs tc)
; return (TH.FamilyI
(TH.OpenTypeFamilyD (reifyName tc) tvs'
resultSig injectivity)
instances) }
else do { eqns <-
case isClosedSynFamilyTyConWithAxiom_maybe tc of
Just ax -> mapM reifyAxBranch $
fromBranches $ coAxiomBranches ax
Nothing -> return []
; return (TH.FamilyI
(TH.ClosedTypeFamilyD (reifyName tc) tvs' resultSig
injectivity eqns)
[]) } }
| isDataFamilyTyCon tc
= do { let tvs = tyConTyVars tc
kind = tyConKind tc
-- we need the *result kind* (see #8884)
(kvs, mono_kind) = splitForAllTys kind
-- tyConArity includes *kind* params
(_, res_kind) = splitKindFunTysN (tyConArity tc - length kvs)
mono_kind
; kind' <- fmap Just (reifyKind res_kind)
; tvs' <- reifyTyVars tvs
; fam_envs <- tcGetFamInstEnvs
; instances <- reifyFamilyInstances tc (familyInstances fam_envs tc)
; return (TH.FamilyI
(TH.DataFamilyD (reifyName tc) tvs' kind') instances) }
| Just (tvs, rhs) <- synTyConDefn_maybe tc -- Vanilla type synonym
= do { rhs' <- reifyType rhs
; tvs' <- reifyTyVars tvs
; return (TH.TyConI
(TH.TySynD (reifyName tc) tvs' rhs'))
}
| otherwise
= do { cxt <- reifyCxt (tyConStupidTheta tc)
; let tvs = tyConTyVars tc
; cons <- mapM (reifyDataCon (mkTyVarTys tvs)) (tyConDataCons tc)
; r_tvs <- reifyTyVars tvs
; let name = reifyName tc
deriv = [] -- Don't know about deriving
decl | isNewTyCon tc = TH.NewtypeD cxt name r_tvs (head cons) deriv
| otherwise = TH.DataD cxt name r_tvs cons deriv
; return (TH.TyConI decl) }
reifyDataCon :: [Type] -> DataCon -> TcM TH.Con
-- For GADTs etc, see Note [Reifying data constructors]
reifyDataCon tys dc
= do { let (ex_tvs, theta, arg_tys) = dataConInstSig dc tys
stricts = map reifyStrict (dataConSrcBangs dc)
fields = dataConFieldLabels dc
name = reifyName dc
; r_arg_tys <- reifyTypes arg_tys
; let main_con | not (null fields)
= TH.RecC name (zip3 (map reifyName fields) stricts r_arg_tys)
| dataConIsInfix dc
= ASSERT( length arg_tys == 2 )
TH.InfixC (s1,r_a1) name (s2,r_a2)
| otherwise
= TH.NormalC name (stricts `zip` r_arg_tys)
[r_a1, r_a2] = r_arg_tys
[s1, s2] = stricts
; ASSERT( length arg_tys == length stricts )
if null ex_tvs && null theta then
return main_con
else do
{ cxt <- reifyCxt theta
; ex_tvs' <- reifyTyVars ex_tvs
; return (TH.ForallC ex_tvs' cxt main_con) } }
------------------------------
reifyClass :: Class -> TcM TH.Info
reifyClass cls
= do { cxt <- reifyCxt theta
; inst_envs <- tcGetInstEnvs
; insts <- reifyClassInstances cls (InstEnv.classInstances inst_envs cls)
; ops <- concatMapM reify_op op_stuff
; tvs' <- reifyTyVars tvs
; let dec = TH.ClassD cxt (reifyName cls) tvs' fds' ops
; return (TH.ClassI dec insts) }
where
(tvs, fds, theta, _, _, op_stuff) = classExtraBigSig cls
fds' = map reifyFunDep fds
reify_op (op, def_meth)
= do { ty <- reifyType (idType op)
; let nm' = reifyName op
; case def_meth of
GenDefMeth gdm_nm ->
do { gdm_id <- tcLookupId gdm_nm
; gdm_ty <- reifyType (idType gdm_id)
; return [TH.SigD nm' ty, TH.DefaultSigD nm' gdm_ty] }
_ -> return [TH.SigD nm' ty] }
------------------------------
-- | Annotate (with TH.SigT) a type if the first parameter is True
-- and if the type contains a free variable.
-- This is used to annotate type patterns for poly-kinded tyvars in
-- reifying class and type instances. See #8953 and th/T8953.
annotThType :: Bool -- True <=> annotate
-> TypeRep.Type -> TH.Type -> TcM TH.Type
-- tiny optimization: if the type is annotated, don't annotate again.
annotThType _ _ th_ty@(TH.SigT {}) = return th_ty
annotThType True ty th_ty
| not $ isEmptyVarSet $ tyVarsOfType ty
= do { let ki = typeKind ty
; th_ki <- reifyKind ki
; return (TH.SigT th_ty th_ki) }
annotThType _ _ th_ty = return th_ty
-- | For every *type* variable (not *kind* variable) in the input,
-- report whether or not the tv is poly-kinded. This is used to eventually
-- feed into 'annotThType'.
mkIsPolyTvs :: [TyVar] -> [Bool]
mkIsPolyTvs tvs = [ is_poly_tv tv | tv <- tvs
, not (isKindVar tv) ]
where
is_poly_tv tv = not $ isEmptyVarSet $ tyVarsOfType $ tyVarKind tv
------------------------------
reifyClassInstances :: Class -> [ClsInst] -> TcM [TH.Dec]
reifyClassInstances cls insts
= mapM (reifyClassInstance (mkIsPolyTvs tvs)) insts
where
tvs = classTyVars cls
reifyClassInstance :: [Bool] -- True <=> the corresponding tv is poly-kinded
-- this list contains flags only for *type*
-- variables, not *kind* variables
-> ClsInst -> TcM TH.Dec
reifyClassInstance is_poly_tvs i
= do { cxt <- reifyCxt theta
; let types_only = filterOut isKind types
; thtypes <- reifyTypes types_only
; annot_thtypes <- zipWith3M annotThType is_poly_tvs types_only thtypes
; let head_ty = mkThAppTs (TH.ConT (reifyName cls)) annot_thtypes
; return $ (TH.InstanceD cxt head_ty []) }
where
(_tvs, theta, cls, types) = tcSplitDFunTy (idType dfun)
dfun = instanceDFunId i
------------------------------
reifyFamilyInstances :: TyCon -> [FamInst] -> TcM [TH.Dec]
reifyFamilyInstances fam_tc fam_insts
= mapM (reifyFamilyInstance (mkIsPolyTvs fam_tvs)) fam_insts
where
fam_tvs = tyConTyVars fam_tc
reifyFamilyInstance :: [Bool] -- True <=> the corresponding tv is poly-kinded
-- this list contains flags only for *type*
-- variables, not *kind* variables
-> FamInst -> TcM TH.Dec
reifyFamilyInstance is_poly_tvs (FamInst { fi_flavor = flavor
, fi_fam = fam
, fi_tys = lhs
, fi_rhs = rhs })
= case flavor of
SynFamilyInst ->
-- remove kind patterns (#8884)
do { let lhs_types_only = filterOut isKind lhs
; th_lhs <- reifyTypes lhs_types_only
; annot_th_lhs <- zipWith3M annotThType is_poly_tvs lhs_types_only
th_lhs
; th_rhs <- reifyType rhs
; return (TH.TySynInstD (reifyName fam)
(TH.TySynEqn annot_th_lhs th_rhs)) }
DataFamilyInst rep_tc ->
do { let tvs = tyConTyVars rep_tc
fam' = reifyName fam
-- eta-expand lhs types, because sometimes data/newtype
-- instances are eta-reduced; See Trac #9692
-- See Note [Eta reduction for data family axioms]
-- in TcInstDcls
(_rep_tc, rep_tc_args) = splitTyConApp rhs
etad_tyvars = dropList rep_tc_args tvs
eta_expanded_lhs = lhs `chkAppend` mkTyVarTys etad_tyvars
; cons <- mapM (reifyDataCon (mkTyVarTys tvs)) (tyConDataCons rep_tc)
; let types_only = filterOut isKind eta_expanded_lhs
; th_tys <- reifyTypes types_only
; annot_th_tys <- zipWith3M annotThType is_poly_tvs types_only th_tys
; return (if isNewTyCon rep_tc
then TH.NewtypeInstD [] fam' annot_th_tys (head cons) []
else TH.DataInstD [] fam' annot_th_tys cons []) }
------------------------------
reifyType :: TypeRep.Type -> TcM TH.Type
-- Monadic only because of failure
reifyType ty@(ForAllTy _ _) = reify_for_all ty
reifyType (LitTy t) = do { r <- reifyTyLit t; return (TH.LitT r) }
reifyType (TyVarTy tv) = return (TH.VarT (reifyName tv))
reifyType (TyConApp tc tys) = reify_tc_app tc tys -- Do not expand type synonyms here
reifyType (AppTy t1 t2) = do { [r1,r2] <- reifyTypes [t1,t2] ; return (r1 `TH.AppT` r2) }
reifyType ty@(FunTy t1 t2)
| isPredTy t1 = reify_for_all ty -- Types like ((?x::Int) => Char -> Char)
| otherwise = do { [r1,r2] <- reifyTypes [t1,t2] ; return (TH.ArrowT `TH.AppT` r1 `TH.AppT` r2) }
reify_for_all :: TypeRep.Type -> TcM TH.Type
reify_for_all ty
= do { cxt' <- reifyCxt cxt;
; tau' <- reifyType tau
; tvs' <- reifyTyVars tvs
; return (TH.ForallT tvs' cxt' tau') }
where
(tvs, cxt, tau) = tcSplitSigmaTy ty
reifyTyLit :: TypeRep.TyLit -> TcM TH.TyLit
reifyTyLit (NumTyLit n) = return (TH.NumTyLit n)
reifyTyLit (StrTyLit s) = return (TH.StrTyLit (unpackFS s))
reifyTypes :: [Type] -> TcM [TH.Type]
reifyTypes = mapM reifyType
reifyKind :: Kind -> TcM TH.Kind
reifyKind ki
= do { let (kis, ki') = splitKindFunTys ki
; ki'_rep <- reifyNonArrowKind ki'
; kis_rep <- mapM reifyKind kis
; return (foldr (TH.AppT . TH.AppT TH.ArrowT) ki'_rep kis_rep) }
where
reifyNonArrowKind k | isLiftedTypeKind k = return TH.StarT
| isConstraintKind k = return TH.ConstraintT
reifyNonArrowKind (TyVarTy v) = return (TH.VarT (reifyName v))
reifyNonArrowKind (ForAllTy _ k) = reifyKind k
reifyNonArrowKind (TyConApp kc kis) = reify_kc_app kc kis
reifyNonArrowKind (AppTy k1 k2) = do { k1' <- reifyKind k1
; k2' <- reifyKind k2
; return (TH.AppT k1' k2')
}
reifyNonArrowKind k = noTH (sLit "this kind") (ppr k)
reify_kc_app :: TyCon -> [TypeRep.Kind] -> TcM TH.Kind
reify_kc_app kc kis
= fmap (mkThAppTs r_kc) (mapM reifyKind kis)
where
r_kc | Just tc <- isPromotedTyCon_maybe kc
, isTupleTyCon tc = TH.TupleT (tyConArity kc)
| kc `hasKey` listTyConKey = TH.ListT
| otherwise = TH.ConT (reifyName kc)
reifyCxt :: [PredType] -> TcM [TH.Pred]
reifyCxt = mapM reifyPred
reifyFunDep :: ([TyVar], [TyVar]) -> TH.FunDep
reifyFunDep (xs, ys) = TH.FunDep (map reifyName xs) (map reifyName ys)
reifyTyVars :: [TyVar]
-> TcM [TH.TyVarBndr]
reifyTyVars tvs = mapM reify_tv $ filter isTypeVar tvs
where
-- even if the kind is *, we need to include a kind annotation,
-- in case a poly-kind would be inferred without the annotation.
-- See #8953 or test th/T8953
reify_tv tv = TH.KindedTV name <$> reifyKind kind
where
kind = tyVarKind tv
name = reifyName tv
{-
Note [Kind annotations on TyConApps]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A poly-kinded tycon sometimes needs a kind annotation to be unambiguous.
For example:
type family F a :: k
type instance F Int = (Proxy :: * -> *)
type instance F Bool = (Proxy :: (* -> *) -> *)
It's hard to figure out where these annotations should appear, so we do this:
Suppose the tycon is applied to n arguments. We strip off the first n
arguments of the tycon's kind. If there are any variables left in the result
kind, we put on a kind annotation. But we must be slightly careful: it's
possible that the tycon's kind will have fewer than n arguments, in the case
that the concrete application instantiates a result kind variable with an
arrow kind. So, if we run out of arguments, we conservatively put on a kind
annotation anyway. This should be a rare case, indeed. Here is an example:
data T1 :: k1 -> k2 -> *
data T2 :: k1 -> k2 -> *
type family G (a :: k) :: k
type instance G T1 = T2
type instance F Char = (G T1 Bool :: (* -> *) -> *) -- F from above
Here G's kind is (forall k. k -> k), and the desugared RHS of that last
instance of F is (G (* -> (* -> *) -> *) (T1 * (* -> *)) Bool). According to
the algorithm above, there are 3 arguments to G so we should peel off 3
arguments in G's kind. But G's kind has only two arguments. This is the
rare special case, and we conservatively choose to put the annotation
in.
See #8953 and test th/T8953.
-}
reify_tc_app :: TyCon -> [TypeRep.Type] -> TcM TH.Type
reify_tc_app tc tys
= do { tys' <- reifyTypes (removeKinds tc_kind tys)
; maybe_sig_t (mkThAppTs r_tc tys') }
where
arity = tyConArity tc
tc_kind = tyConKind tc
r_tc | isTupleTyCon tc = if isPromotedDataCon tc
then TH.PromotedTupleT arity
else TH.TupleT arity
| tc `hasKey` listTyConKey = TH.ListT
| tc `hasKey` nilDataConKey = TH.PromotedNilT
| tc `hasKey` consDataConKey = TH.PromotedConsT
| tc `hasKey` eqTyConKey = TH.EqualityT
| otherwise = TH.ConT (reifyName tc)
-- See Note [Kind annotations on TyConApps]
maybe_sig_t th_type
| needs_kind_sig
= do { let full_kind = typeKind (mkTyConApp tc tys)
; th_full_kind <- reifyKind full_kind
; return (TH.SigT th_type th_full_kind) }
| otherwise
= return th_type
needs_kind_sig
| Just result_ki <- peel_off_n_args tc_kind (length tys)
= not $ isEmptyVarSet $ kiVarsOfKind result_ki
| otherwise
= True
peel_off_n_args :: Kind -> Arity -> Maybe Kind
peel_off_n_args k 0 = Just k
peel_off_n_args k n
| Just (_, res_k) <- splitForAllTy_maybe k
= peel_off_n_args res_k (n-1)
| Just (_, res_k) <- splitFunTy_maybe k
= peel_off_n_args res_k (n-1)
| otherwise
= Nothing
removeKinds :: Kind -> [TypeRep.Type] -> [TypeRep.Type]
removeKinds (FunTy k1 k2) (h:t)
| isSuperKind k1 = removeKinds k2 t
| otherwise = h : removeKinds k2 t
removeKinds (ForAllTy v k) (h:t)
| isSuperKind (varType v) = removeKinds k t
| otherwise = h : removeKinds k t
removeKinds _ tys = tys
reifyPred :: TypeRep.PredType -> TcM TH.Pred
reifyPred ty
-- We could reify the implicit paramter as a class but it seems
-- nicer to support them properly...
| isIPPred ty = noTH (sLit "implicit parameters") (ppr ty)
| otherwise = reifyType ty
------------------------------
reifyName :: NamedThing n => n -> TH.Name
reifyName thing
| isExternalName name = mk_varg pkg_str mod_str occ_str
| otherwise = TH.mkNameU occ_str (getKey (getUnique name))
-- Many of the things we reify have local bindings, and
-- NameL's aren't supposed to appear in binding positions, so
-- we use NameU. When/if we start to reify nested things, that
-- have free variables, we may need to generate NameL's for them.
where
name = getName thing
mod = ASSERT( isExternalName name ) nameModule name
pkg_str = packageKeyString (modulePackageKey mod)
mod_str = moduleNameString (moduleName mod)
occ_str = occNameString occ
occ = nameOccName name
mk_varg | OccName.isDataOcc occ = TH.mkNameG_d
| OccName.isVarOcc occ = TH.mkNameG_v
| OccName.isTcOcc occ = TH.mkNameG_tc
| otherwise = pprPanic "reifyName" (ppr name)
------------------------------
reifyFixity :: Name -> TcM TH.Fixity
reifyFixity name
= do { fix <- lookupFixityRn name
; return (conv_fix fix) }
where
conv_fix (BasicTypes.Fixity i d) = TH.Fixity i (conv_dir d)
conv_dir BasicTypes.InfixR = TH.InfixR
conv_dir BasicTypes.InfixL = TH.InfixL
conv_dir BasicTypes.InfixN = TH.InfixN
reifyStrict :: DataCon.HsSrcBang -> TH.Strict
reifyStrict (HsSrcBang _ _ SrcLazy) = TH.NotStrict
reifyStrict (HsSrcBang _ _ NoSrcStrict) = TH.NotStrict
reifyStrict (HsSrcBang _ SrcUnpack SrcStrict) = TH.Unpacked
reifyStrict (HsSrcBang _ _ SrcStrict) = TH.IsStrict
------------------------------
lookupThAnnLookup :: TH.AnnLookup -> TcM CoreAnnTarget
lookupThAnnLookup (TH.AnnLookupName th_nm) = fmap NamedTarget (lookupThName th_nm)
lookupThAnnLookup (TH.AnnLookupModule (TH.Module pn mn))
= return $ ModuleTarget $
mkModule (stringToPackageKey $ TH.pkgString pn) (mkModuleName $ TH.modString mn)
reifyAnnotations :: Data a => TH.AnnLookup -> TcM [a]
reifyAnnotations th_name
= do { name <- lookupThAnnLookup th_name
; topEnv <- getTopEnv
; epsHptAnns <- liftIO $ prepareAnnotations topEnv Nothing
; tcg <- getGblEnv
; let selectedEpsHptAnns = findAnns deserializeWithData epsHptAnns name
; let selectedTcgAnns = findAnns deserializeWithData (tcg_ann_env tcg) name
; return (selectedEpsHptAnns ++ selectedTcgAnns) }
------------------------------
modToTHMod :: Module -> TH.Module
modToTHMod m = TH.Module (TH.PkgName $ packageKeyString $ modulePackageKey m)
(TH.ModName $ moduleNameString $ moduleName m)
reifyModule :: TH.Module -> TcM TH.ModuleInfo
reifyModule (TH.Module (TH.PkgName pkgString) (TH.ModName mString)) = do
this_mod <- getModule
let reifMod = mkModule (stringToPackageKey pkgString) (mkModuleName mString)
if (reifMod == this_mod) then reifyThisModule else reifyFromIface reifMod
where
reifyThisModule = do
usages <- fmap (map modToTHMod . moduleEnvKeys . imp_mods) getImports
return $ TH.ModuleInfo usages
reifyFromIface reifMod = do
iface <- loadInterfaceForModule (ptext (sLit "reifying module from TH for") <+> ppr reifMod) reifMod
let usages = [modToTHMod m | usage <- mi_usages iface,
Just m <- [usageToModule (modulePackageKey reifMod) usage] ]
return $ TH.ModuleInfo usages
usageToModule :: PackageKey -> Usage -> Maybe Module
usageToModule _ (UsageFile {}) = Nothing
usageToModule this_pkg (UsageHomeModule { usg_mod_name = mn }) = Just $ mkModule this_pkg mn
usageToModule _ (UsagePackageModule { usg_mod = m }) = Just m
------------------------------
mkThAppTs :: TH.Type -> [TH.Type] -> TH.Type
mkThAppTs fun_ty arg_tys = foldl TH.AppT fun_ty arg_tys
noTH :: LitString -> SDoc -> TcM a
noTH s d = failWithTc (hsep [ptext (sLit "Can't represent") <+> ptext s <+>
ptext (sLit "in Template Haskell:"),
nest 2 d])
ppr_th :: TH.Ppr a => a -> SDoc
ppr_th x = text (TH.pprint x)
{-
Note [Reifying data constructors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Template Haskell syntax is rich enough to express even GADTs,
provided we do so in the equality-predicate form. So a GADT
like
data T a where
MkT1 :: a -> T [a]
MkT2 :: T Int
will appear in TH syntax like this
data T a = forall b. (a ~ [b]) => MkT1 b
| (a ~ Int) => MkT2
-}
#endif /* GHCI */
| ghc-android/ghc | compiler/typecheck/TcSplice.hs | bsd-3-clause | 64,165 | 0 | 18 | 20,049 | 1,244 | 697 | 547 | 76 | 4 |
module StorySpec where
import Debug.Trace
import Control.Monad
import Control.Monad.Except
import Control.Monad.State
import qualified Data.ByteString as B
import Utils
import Test.Hspec
import Types
import Header
import qualified BinReader as BR
type TestRes = ExceptT String IO
expHeader x = do print x
version x `shouldBe` Version 3
flags1 x `shouldBe` 0
baseAddr x `shouldBe` 0x3709
initPc x `shouldBe` 0x37D9
dictionaryLoc x `shouldBe` 0x285A
objectTableLoc x `shouldBe` 0x03C6
globalVarLoc x `shouldBe` 0x02B4
baseStaticAddr x `shouldBe` 0x2187
flags2 x `shouldBe` 0
abbrevLoc x `shouldBe` 0x01F4
spec = describe "Read Story Header" $ do
-- it "should load file" $ do
-- fi <- minizork ()
-- return ()
it "should monad" $ do
s <- readStory <$> minizork ()
traceShowM s -- $ (Header.header . $ s)
let m = do -- traceShowM "START"
setAt 0
-- traceShowM "SET AT"
getHeader
-- traceShowM "readStory"
-- traceShowM s
-- exec readHeader
either expectationFailure expHeader $ run s m
it "should read header using BinReader" $ do
s <- minizork ()
-- traceShowM s -- $ (Header.header . $ s)
let m = do -- traceShowM "START"
BR.setAt 0
-- traceShowM "SET AT"
readHeaderB
-- traceShowM "readStory"
-- traceShowM s
-- exec readHeader
-- length x `shouldBe` 26108
-- checksum x `shouldBe` 55408
-- dynMem x `shouldBe` 8583
-- staticMem x `shouldBe` 43633
either expectationFailure expHeader $ BR.run s m
-- print x
-- runStateT m (s,0)
| theor/zorkell | test/StorySpec.hs | bsd-3-clause | 1,945 | 0 | 18 | 748 | 364 | 189 | 175 | 37 | 1 |
module Wavecore.ECDIS.Types where
import Prelude ()
import Numeric.Units.Dimensional.TF.Prelude
import Data.Geodetic
import Data.Text (Text)
type Coordinate = GeodeticCoordinate WGS84 Double
mkCoord :: PlaneAngle Double -> PlaneAngle Double -> Length Double ->
Coordinate
mkCoord = WGS84
-- | Speed Over Ground
newtype SOG = SOG (Velocity Double) deriving (Eq, Ord, Show)
-- | Speed Made Good
newtype SMG = SMG (Velocity Double) deriving (Eq, Ord, Show)
-- | Velocity Made Good
newtype VMG = VMG (Velocity Double) deriving (Eq, Ord, Show)
-- | Course Over Ground
newtype COG = COG (PlaneAngle Double) deriving (Eq, Ord, Show)
-- | Course Made Good
newtype CMG = CMG (PlaneAngle Double) deriving (Eq, Ord, Show)
type SensorId = Text
newtype Waypoint = Waypoint Coordinate
data Route = Route {
_routeName :: Text,
_routeWaypoints :: [Waypoint]
}
| wavecorenautic/ecdis-client | src/Wavecore/ECDIS/Types.hs | bsd-3-clause | 887 | 0 | 9 | 173 | 266 | 154 | 112 | 19 | 1 |
{-# LANGUAGE CPP, DeriveDataTypeable, DeriveFunctor #-}
{-# LANGUAGE FlexibleInstances, TypeSynonymInstances #-}
------------------------------------------------------------------------------
-- |
-- Module: Database.PostgreSQL.Simple.ToField
-- Copyright: (c) 2011 MailRank, Inc.
-- (c) 2011-2012 Leon P Smith
-- License: BSD3
-- Maintainer: Leon P Smith <[email protected]>
-- Stability: experimental
--
-- The 'ToField' typeclass, for rendering a parameter to a SQL query.
--
------------------------------------------------------------------------------
module Database.PostgreSQL.Simple.ToField
(
Action(..)
, ToField(..)
, toJSONField
, inQuotes
) where
import qualified Data.Aeson as JSON
import Data.ByteString (ByteString)
import Data.ByteString.Builder
( Builder, byteString, char8, stringUtf8
, intDec, int8Dec, int16Dec, int32Dec, int64Dec, integerDec
, wordDec, word8Dec, word16Dec, word32Dec, word64Dec
, floatDec, doubleDec
)
import Data.Int (Int8, Int16, Int32, Int64)
import Data.List (intersperse)
import Data.Monoid (mappend)
import Data.Time (Day, TimeOfDay, LocalTime, UTCTime, ZonedTime, NominalDiffTime)
import Data.Typeable (Typeable)
import Data.Word (Word, Word8, Word16, Word32, Word64)
import {-# SOURCE #-} Database.PostgreSQL.Simple.ToRow
import Database.PostgreSQL.Simple.Types
import Database.PostgreSQL.Simple.Compat (toByteString)
import qualified Data.ByteString as SB
import qualified Data.ByteString.Lazy as LB
import qualified Data.Text as ST
import qualified Data.Text.Encoding as ST
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.Builder as LT
import Data.UUID.Types (UUID)
import qualified Data.UUID.Types as UUID
import Data.Vector (Vector)
import qualified Data.Vector as V
import qualified Database.PostgreSQL.LibPQ as PQ
import Database.PostgreSQL.Simple.Time
import Data.Scientific (Scientific)
#if MIN_VERSION_scientific(0,3,0)
import Data.Text.Lazy.Builder.Scientific (scientificBuilder)
#else
import Data.Scientific (scientificBuilder)
#endif
import Foreign.C.Types (CUInt(..))
-- | How to render an element when substituting it into a query.
data Action =
Plain Builder
-- ^ Render without escaping or quoting. Use for non-text types
-- such as numbers, when you are /certain/ that they will not
-- introduce formatting vulnerabilities via use of characters such
-- as spaces or \"@'@\".
| Escape ByteString
-- ^ Escape and enclose in quotes before substituting. Use for all
-- text-like types, and anything else that may contain unsafe
-- characters when rendered.
| EscapeByteA ByteString
-- ^ Escape binary data for use as a @bytea@ literal. Include surrounding
-- quotes. This is used by the 'Binary' newtype wrapper.
| EscapeIdentifier ByteString
-- ^ Escape before substituting. Use for all sql identifiers like
-- table, column names, etc. This is used by the 'Identifier' newtype
-- wrapper.
| Many [Action]
-- ^ Concatenate a series of rendering actions.
deriving (Typeable)
instance Show Action where
show (Plain b) = "Plain " ++ show (toByteString b)
show (Escape b) = "Escape " ++ show b
show (EscapeByteA b) = "EscapeByteA " ++ show b
show (EscapeIdentifier b) = "EscapeIdentifier " ++ show b
show (Many b) = "Many " ++ show b
-- | A type that may be used as a single parameter to a SQL query.
class ToField a where
toField :: a -> Action
-- ^ Prepare a value for substitution into a query string.
instance ToField Action where
toField a = a
{-# INLINE toField #-}
instance (ToField a) => ToField (Maybe a) where
toField Nothing = renderNull
toField (Just a) = toField a
{-# INLINE toField #-}
instance (ToField a) => ToField (In [a]) where
toField (In []) = Plain $ byteString "(null)"
toField (In xs) = Many $
Plain (char8 '(') :
(intersperse (Plain (char8 ',')) . map toField $ xs) ++
[Plain (char8 ')')]
renderNull :: Action
renderNull = Plain (byteString "null")
instance ToField Null where
toField _ = renderNull
{-# INLINE toField #-}
instance ToField Default where
toField _ = Plain (byteString "default")
{-# INLINE toField #-}
instance ToField Bool where
toField True = Plain (byteString "true")
toField False = Plain (byteString "false")
{-# INLINE toField #-}
instance ToField Int8 where
toField = Plain . int8Dec
{-# INLINE toField #-}
instance ToField Int16 where
toField = Plain . int16Dec
{-# INLINE toField #-}
instance ToField Int32 where
toField = Plain . int32Dec
{-# INLINE toField #-}
instance ToField Int where
toField = Plain . intDec
{-# INLINE toField #-}
instance ToField Int64 where
toField = Plain . int64Dec
{-# INLINE toField #-}
instance ToField Integer where
toField = Plain . integerDec
{-# INLINE toField #-}
instance ToField Word8 where
toField = Plain . word8Dec
{-# INLINE toField #-}
instance ToField Word16 where
toField = Plain . word16Dec
{-# INLINE toField #-}
instance ToField Word32 where
toField = Plain . word32Dec
{-# INLINE toField #-}
instance ToField Word where
toField = Plain . wordDec
{-# INLINE toField #-}
instance ToField Word64 where
toField = Plain . word64Dec
{-# INLINE toField #-}
instance ToField PQ.Oid where
toField = Plain . \(PQ.Oid (CUInt x)) -> word32Dec x
{-# INLINE toField #-}
instance ToField Float where
toField v | isNaN v || isInfinite v = Plain (inQuotes (floatDec v))
| otherwise = Plain (floatDec v)
{-# INLINE toField #-}
instance ToField Double where
toField v | isNaN v || isInfinite v = Plain (inQuotes (doubleDec v))
| otherwise = Plain (doubleDec v)
{-# INLINE toField #-}
instance ToField Scientific where
toField x = toField (LT.toLazyText (scientificBuilder x))
{-# INLINE toField #-}
instance ToField (Binary SB.ByteString) where
toField (Binary bs) = EscapeByteA bs
{-# INLINE toField #-}
instance ToField (Binary LB.ByteString) where
toField (Binary bs) = (EscapeByteA . SB.concat . LB.toChunks) bs
{-# INLINE toField #-}
instance ToField Identifier where
toField (Identifier bs) = EscapeIdentifier (ST.encodeUtf8 bs)
{-# INLINE toField #-}
instance ToField QualifiedIdentifier where
toField (QualifiedIdentifier (Just s) t) =
Many [ EscapeIdentifier (ST.encodeUtf8 s)
, Plain (char8 '.')
, EscapeIdentifier (ST.encodeUtf8 t)
]
toField (QualifiedIdentifier Nothing t) =
EscapeIdentifier (ST.encodeUtf8 t)
{-# INLINE toField #-}
instance ToField SB.ByteString where
toField = Escape
{-# INLINE toField #-}
instance ToField LB.ByteString where
toField = toField . SB.concat . LB.toChunks
{-# INLINE toField #-}
instance ToField ST.Text where
toField = Escape . ST.encodeUtf8
{-# INLINE toField #-}
instance ToField [Char] where
toField = Escape . toByteString . stringUtf8
{-# INLINE toField #-}
instance ToField LT.Text where
toField = toField . LT.toStrict
{-# INLINE toField #-}
instance ToField UTCTime where
toField = Plain . inQuotes . utcTimeToBuilder
{-# INLINE toField #-}
instance ToField ZonedTime where
toField = Plain . inQuotes . zonedTimeToBuilder
{-# INLINE toField #-}
instance ToField LocalTime where
toField = Plain . inQuotes . localTimeToBuilder
{-# INLINE toField #-}
instance ToField Day where
toField = Plain . inQuotes . dayToBuilder
{-# INLINE toField #-}
instance ToField TimeOfDay where
toField = Plain . inQuotes . timeOfDayToBuilder
{-# INLINE toField #-}
instance ToField UTCTimestamp where
toField = Plain . inQuotes . utcTimestampToBuilder
{-# INLINE toField #-}
instance ToField ZonedTimestamp where
toField = Plain . inQuotes . zonedTimestampToBuilder
{-# INLINE toField #-}
instance ToField LocalTimestamp where
toField = Plain . inQuotes . localTimestampToBuilder
{-# INLINE toField #-}
instance ToField Date where
toField = Plain . inQuotes . dateToBuilder
{-# INLINE toField #-}
instance ToField NominalDiffTime where
toField = Plain . inQuotes . nominalDiffTimeToBuilder
{-# INLINE toField #-}
instance (ToField a) => ToField (PGArray a) where
toField pgArray =
case fromPGArray pgArray of
[] -> Plain (byteString "'{}'")
xs -> Many $
Plain (byteString "ARRAY[") :
(intersperse (Plain (char8 ',')) . map toField $ xs) ++
[Plain (char8 ']')]
-- Because the ARRAY[...] input syntax is being used, it is possible
-- that the use of type-specific separator characters is unnecessary.
instance (ToField a) => ToField (Vector a) where
toField = toField . PGArray . V.toList
instance ToField UUID where
toField = Plain . inQuotes . byteString . UUID.toASCIIBytes
instance ToField JSON.Value where
toField = toField . JSON.encode
-- | Convert a Haskell value to a JSON 'JSON.Value' using
-- 'JSON.toJSON' and convert that to a field using 'toField'.
--
-- This can be used as the default implementation for the 'toField'
-- method for Haskell types that have a JSON representation in
-- PostgreSQL.
toJSONField :: JSON.ToJSON a => a -> Action
toJSONField = toField . JSON.toJSON
-- | Surround a string with single-quote characters: \"@'@\"
--
-- This function /does not/ perform any other escaping.
inQuotes :: Builder -> Builder
inQuotes b = quote `mappend` b `mappend` quote
where quote = char8 '\''
interleaveFoldr :: (a -> [b] -> [b]) -> b -> [b] -> [a] -> [b]
interleaveFoldr f b bs as = foldr (\a bs -> b : f a bs) bs as
{-# INLINE interleaveFoldr #-}
instance ToRow a => ToField (Values a) where
toField (Values types rows) =
case rows of
[] -> case types of
[] -> error norows
(_:_) -> values $ typedRow (repeat (lit "null"))
types
[lit " LIMIT 0)"]
(_:_) -> case types of
[] -> values $ untypedRows rows [litC ')']
(_:_) -> values $ typedRows rows types [litC ')']
where
funcname = "Database.PostgreSQL.Simple.toField :: Values a -> Action"
norows = funcname ++ " either values or types must be non-empty"
emptyrow = funcname ++ " each row must contain at least one column"
lit = Plain . byteString
litC = Plain . char8
values x = Many (lit "(VALUES ": x)
typedField :: (Action, QualifiedIdentifier) -> [Action] -> [Action]
typedField (val,typ) rest = val : lit "::" : toField typ : rest
typedRow :: [Action] -> [QualifiedIdentifier] -> [Action] -> [Action]
typedRow (val:vals) (typ:typs) rest =
litC '(' :
typedField (val,typ) ( interleaveFoldr
typedField
(litC ',')
(litC ')' : rest)
(zip vals typs) )
typedRow _ _ _ = error emptyrow
untypedRow :: [Action] -> [Action] -> [Action]
untypedRow (val:vals) rest =
litC '(' : val :
interleaveFoldr
(:)
(litC ',')
(litC ')' : rest)
vals
untypedRow _ _ = error emptyrow
typedRows :: ToRow a => [a] -> [QualifiedIdentifier] -> [Action] -> [Action]
typedRows [] _ _ = error funcname
typedRows (val:vals) types rest =
typedRow (toRow val) types (multiRows vals rest)
untypedRows :: ToRow a => [a] -> [Action] -> [Action]
untypedRows [] _ = error funcname
untypedRows (val:vals) rest =
untypedRow (toRow val) (multiRows vals rest)
multiRows :: ToRow a => [a] -> [Action] -> [Action]
multiRows vals rest = interleaveFoldr
(untypedRow . toRow)
(litC ',')
rest
vals
| tolysz/postgresql-simple | src/Database/PostgreSQL/Simple/ToField.hs | bsd-3-clause | 12,573 | 0 | 18 | 3,467 | 2,960 | 1,613 | 1,347 | 261 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Crypto.Passphrase (Dict, dict, en, de, getPass) where
import Prelude hiding (readFile, lines, unwords)
import Control.Applicative
import Control.Monad
import Data.Binary.Strict.Get
import Data.ByteString.Char8 (ByteString, readFile, lines, unwords)
import Data.Monoid
import System.Entropy
import Paths_passphrase (getDataFileName)
newtype Dict = Dict { getWords :: IO [ByteString] }
instance Monoid Dict where
mempty = Dict (return mempty)
d `mappend` d' = Dict $ mappend <$> getWords d <*> getWords d'
dict :: FilePath -> Dict
dict f = Dict $ lines <$> readFile f
dict' :: FilePath -> Dict
dict' lang = Dict $ do
f <- getDataFileName ("words/" ++ lang ++ ".words")
getWords (dict f)
en, de :: Dict
en = dict' "en"
de = dict' "de"
getPass :: Dict -> IO ByteString
getPass d = do
ws <- getWords d
ixs <- getRandIxs 4
return . unwords $ map (\ix -> ws !! (ix `rem` length ws)) ixs
where
getRandIxs n = do
(Right ixs, _) <- runGet (replicateM n getRandIx) <$> getEntropy (n*2)
return ixs
getRandIx = do
x <- fromIntegral <$> getWord8
y <- fromIntegral <$> getWord8
return $ 256 * x + y
| informatikr/passphrase | Crypto/Passphrase.hs | bsd-3-clause | 1,223 | 0 | 14 | 284 | 450 | 238 | 212 | 35 | 1 |
module Dep where
test :: [(Char, Integer)]
test = undefined $ zip "Monkey!" [1..]
| sebastiaanvisser/ghc-goals | tests/Dep.hs | bsd-3-clause | 83 | 0 | 7 | 15 | 35 | 21 | 14 | 3 | 1 |
module Pkgs.Syntax where
data Toplevel
= P PkgTerm
| I IfaceTerm
data PkgTerm
= Pkg { pName :: PVName
, pArgs :: [PArg]
, pImplName :: IVName
, pBody :: PBody
}
deriving (Eq)
data IfaceTerm
= Iface { iName :: IVName
, iArgs :: [PArg]
, iSubType :: Maybe IVName
, ibody :: IBody
}
deriving (Eq)
data PArg
= PArg { pArgName :: PVar
, pIExpr :: IExpr
} deriving (Eq)
data PBody = PBody { pImports :: [PImport], pDefs :: [Def] } deriving (Eq)
data IBody = IBody [Decl] deriving (Eq)
data PImport
= PImport PName IVName PExpr
deriving (Eq)
data PExpr
= PExpr PVName [Expr]
deriving (Eq)
data IExpr
= IExpr IVName [PVar]
deriving (Eq)
data Def
= TypeDef TyName Type
| FunDef VarName Type Expr
deriving (Eq)
data Decl
= TypeDecl TyName
| FunDecl VarName Type
deriving (Eq)
data Expr
= Lam VarName Type Expr
| App Expr Expr
| Plus Expr Expr
| Proj Expr VarName
| Var VarName
| IntVal Int
| Unit
| PkgVal [PImport] [Def]
deriving (Eq)
data Type
= Arrow Type Type
| TypeName TyName
| ProjType PVar TyName
| IntType
| UnitType
-- | Pi PVar Type -- Package abstraction type
| PkgType IVName [Decl] -- Package type
deriving (Eq)
type PVName = String
type PName = String
type IVName = String
type TyName = String
type PVar = String
type VarName = String
| markflorisson/packages | Pkgs/Syntax.hs | bsd-3-clause | 1,526 | 0 | 9 | 521 | 453 | 271 | 182 | 63 | 0 |
{-# LANGUAGE NoImplicitPrelude #-}
-- |
--
-- Functions in this module return well-formed 'Encoding''.
-- Polymorphic variants, which return @'Encoding' a@, return a textual JSON
-- value, so it can be used as both @'Encoding'' 'Text'@ and @'Encoding' = 'Encoding'' 'Value'@.
module Data.Aeson.Encoding
(
-- * Encoding
Encoding
, Encoding'
, encodingToLazyByteString
, fromEncoding
, unsafeToEncoding
, Series
, pairs
, pair
, pairStr
, pair'
-- * Predicates
, nullEncoding
-- * Encoding constructors
, emptyArray_
, emptyObject_
, text
, lazyText
, shortText
, string
, list
, dict
, null_
, bool
-- ** Decimal numbers
, int8, int16, int32, int64, int
, word8, word16, word32, word64, word
, integer, float, double, scientific
-- ** Decimal numbers as Text
, int8Text, int16Text, int32Text, int64Text, intText
, word8Text, word16Text, word32Text, word64Text, wordText
, integerText, floatText, doubleText, scientificText
-- ** Time
, day
, month
, quarter
, localTime
, utcTime
, timeOfDay
, zonedTime
-- ** value
, value
) where
import Data.Aeson.Encoding.Internal
| dmjio/aeson | src/Data/Aeson/Encoding.hs | bsd-3-clause | 1,251 | 0 | 4 | 350 | 200 | 138 | 62 | 39 | 0 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, DeriveFunctor,
PatternGuards #-}
module Idris.Core.Typecheck where
import Control.Monad.State
import Debug.Trace
import qualified Data.Vector.Unboxed as V (length)
import Idris.Core.TT
import Idris.Core.Evaluate
-- To check conversion, normalise each term wrt the current environment.
-- Since we haven't converted everything to de Bruijn indices yet, we'll have to
-- deal with alpha conversion - we do this by making each inner term de Bruijn
-- indexed with 'finalise'
convertsC :: Context -> Env -> Term -> Term -> StateT UCs TC ()
convertsC ctxt env x y
= do c1 <- convEq ctxt x y
if c1 then return ()
else
do c2 <- convEq ctxt (finalise (normalise ctxt env x))
(finalise (normalise ctxt env y))
if c2 then return ()
else lift $ tfail (CantConvert
(finalise (normalise ctxt env x))
(finalise (normalise ctxt env y)) (errEnv env))
converts :: Context -> Env -> Term -> Term -> TC ()
converts ctxt env x y
= case convEq' ctxt x y of
OK True -> return ()
_ -> case convEq' ctxt (finalise (normalise ctxt env x))
(finalise (normalise ctxt env y)) of
OK True -> return ()
_ -> tfail (CantConvert
(finalise (normalise ctxt env x))
(finalise (normalise ctxt env y)) (errEnv env))
errEnv = map (\(x, b) -> (x, binderTy b))
isType :: Context -> Env -> Term -> TC ()
isType ctxt env tm = isType' (normalise ctxt env tm)
where isType' (TType _) = return ()
isType' tm = fail (showEnv env tm ++ " is not a TType")
recheck :: Context -> Env -> Raw -> Term -> TC (Term, Type, UCs)
recheck ctxt env tm orig
= let v = next_tvar ctxt in
case runStateT (check' False ctxt env tm) (v, []) of -- holes banned
Error (IncompleteTerm _) -> Error $ IncompleteTerm orig
Error e -> Error e
OK ((tm, ty), constraints) ->
return (tm, ty, constraints)
check :: Context -> Env -> Raw -> TC (Term, Type)
check ctxt env tm = evalStateT (check' True ctxt env tm) (0, []) -- Holes allowed
check' :: Bool -> Context -> Env -> Raw -> StateT UCs TC (Term, Type)
check' holes ctxt env top = chk env top where
chk env (Var n)
| Just (i, ty) <- lookupTyEnv n env = return (P Bound n ty, ty)
| (P nt n' ty : _) <- lookupP n ctxt = return (P nt n' ty, ty)
| otherwise = do lift $ tfail $ NoSuchVariable n
chk env (RApp f a)
= do (fv, fty) <- chk env f
(av, aty) <- chk env a
let fty' = case uniqueBinders (map fst env) (finalise fty) of
ty@(Bind x (Pi s) t) -> ty
_ -> uniqueBinders (map fst env)
$ case hnf ctxt env fty of
ty@(Bind x (Pi s) t) -> ty
_ -> normalise ctxt env fty
case fty' of
Bind x (Pi s) t ->
-- trace ("Converting " ++ show aty ++ " and " ++ show s ++
-- " from " ++ show fv ++ " : " ++ show fty) $
do convertsC ctxt env aty s
-- let apty = normalise initContext env
-- (Bind x (Let aty av) t)
let apty = simplify initContext env
(Bind x (Let aty av) t)
return (App fv av, apty)
t -> lift $ tfail $ NonFunctionType fv fty -- "Can't apply a non-function type"
-- This rather unpleasant hack is needed because during incomplete
-- proofs, variables are locally bound with an explicit name. If we just
-- make sure bound names in function types are locally unique, machine
-- generated names, we'll be fine.
-- NOTE: now replaced with 'uniqueBinders' above
where renameBinders i (Bind x (Pi s) t) = Bind (sMN i "binder") (Pi s)
(renameBinders (i+1) t)
renameBinders i sc = sc
chk env RType
| holes = return (TType (UVal 0), TType (UVal 0))
| otherwise = do (v, cs) <- get
let c = ULT (UVar v) (UVar (v+1))
put (v+2, (c:cs))
return (TType (UVar v), TType (UVar (v+1)))
chk env (RConstant Forgot) = return (Erased, Erased)
chk env (RConstant c) = return (Constant c, constType c)
where constType (I _) = Constant (AType (ATInt ITNative))
constType (BI _) = Constant (AType (ATInt ITBig))
constType (Fl _) = Constant (AType ATFloat)
constType (Ch _) = Constant (AType (ATInt ITChar))
constType (Str _) = Constant StrType
constType (B8 _) = Constant (AType (ATInt (ITFixed IT8)))
constType (B16 _) = Constant (AType (ATInt (ITFixed IT16)))
constType (B32 _) = Constant (AType (ATInt (ITFixed IT32)))
constType (B64 _) = Constant (AType (ATInt (ITFixed IT64)))
constType (B8V a) = Constant (AType (ATInt (ITVec IT8 (V.length a))))
constType (B16V a) = Constant (AType (ATInt (ITVec IT16 (V.length a))))
constType (B32V a) = Constant (AType (ATInt (ITVec IT32 (V.length a))))
constType (B64V a) = Constant (AType (ATInt (ITVec IT64 (V.length a))))
constType Forgot = Erased
constType _ = TType (UVal 0)
chk env (RForce t) = do (_, ty) <- chk env t
return (Erased, ty)
chk env (RBind n (Pi s) t)
= do (sv, st) <- chk env s
(tv, tt) <- chk ((n, Pi sv) : env) t
(v, cs) <- get
let TType su = normalise ctxt env st
let TType tu = normalise ctxt env tt
when (not holes) $ put (v+1, ULE su (UVar v):ULE tu (UVar v):cs)
return (Bind n (Pi (uniqueBinders (map fst env) sv))
(pToV n tv), TType (UVar v))
chk env (RBind n b sc)
= do b' <- checkBinder b
(scv, sct) <- chk ((n, b'):env) sc
discharge n b' (pToV n scv) (pToV n sct)
where checkBinder (Lam t)
= do (tv, tt) <- chk env t
let tv' = normalise ctxt env tv
let tt' = normalise ctxt env tt
lift $ isType ctxt env tt'
return (Lam tv)
checkBinder (Pi t)
= do (tv, tt) <- chk env t
let tv' = normalise ctxt env tv
let tt' = normalise ctxt env tt
lift $ isType ctxt env tt'
return (Pi tv)
checkBinder (Let t v)
= do (tv, tt) <- chk env t
(vv, vt) <- chk env v
let tv' = normalise ctxt env tv
let tt' = normalise ctxt env tt
convertsC ctxt env vt tv
lift $ isType ctxt env tt'
return (Let tv vv)
checkBinder (NLet t v)
= do (tv, tt) <- chk env t
(vv, vt) <- chk env v
let tv' = normalise ctxt env tv
let tt' = normalise ctxt env tt
convertsC ctxt env vt tv
lift $ isType ctxt env tt'
return (NLet tv vv)
checkBinder (Hole t)
| not holes = lift $ tfail (IncompleteTerm undefined)
| otherwise
= do (tv, tt) <- chk env t
let tv' = normalise ctxt env tv
let tt' = normalise ctxt env tt
lift $ isType ctxt env tt'
return (Hole tv)
checkBinder (GHole i t)
= do (tv, tt) <- chk env t
let tv' = normalise ctxt env tv
let tt' = normalise ctxt env tt
lift $ isType ctxt env tt'
return (GHole i tv)
checkBinder (Guess t v)
| not holes = lift $ tfail (IncompleteTerm undefined)
| otherwise
= do (tv, tt) <- chk env t
(vv, vt) <- chk env v
let tv' = normalise ctxt env tv
let tt' = normalise ctxt env tt
convertsC ctxt env vt tv
lift $ isType ctxt env tt'
return (Guess tv vv)
checkBinder (PVar t)
= do (tv, tt) <- chk env t
let tv' = normalise ctxt env tv
let tt' = normalise ctxt env tt
lift $ isType ctxt env tt'
-- Normalised version, for erasure purposes (it's easier
-- to tell if it's a collapsible variable)
return (PVar tv)
checkBinder (PVTy t)
= do (tv, tt) <- chk env t
let tv' = normalise ctxt env tv
let tt' = normalise ctxt env tt
lift $ isType ctxt env tt'
return (PVTy tv)
discharge n (Lam t) scv sct
= return (Bind n (Lam t) scv, Bind n (Pi t) sct)
discharge n (Pi t) scv sct
= return (Bind n (Pi t) scv, sct)
discharge n (Let t v) scv sct
= return (Bind n (Let t v) scv, Bind n (Let t v) sct)
discharge n (NLet t v) scv sct
= return (Bind n (NLet t v) scv, Bind n (Let t v) sct)
discharge n (Hole t) scv sct
= return (Bind n (Hole t) scv, sct)
discharge n (GHole i t) scv sct
= return (Bind n (GHole i t) scv, sct)
discharge n (Guess t v) scv sct
= return (Bind n (Guess t v) scv, sct)
discharge n (PVar t) scv sct
= return (Bind n (PVar t) scv, Bind n (PVTy t) sct)
discharge n (PVTy t) scv sct
= return (Bind n (PVTy t) scv, sct)
| ctford/Idris-Elba-dev | src/Idris/Core/Typecheck.hs | bsd-3-clause | 9,976 | 0 | 23 | 4,048 | 3,827 | 1,860 | 1,967 | 188 | 42 |
module Clicklac.Environment
( AppEnvironment(..)
, EnvLookup(..)
, lookupAppEnv'
, lookupEnvVar'
) where
import Control.Monad.IO.Class (MonadIO(..))
import System.Environment (lookupEnv)
import Data.Maybe (fromMaybe)
import Text.Read (readMaybe)
data AppEnvironment
= Development
| Production
deriving (Show, Eq, Read)
appEnv :: String
appEnv = "APP_ENV"
class (Monad m) => EnvLookup m where
lookupAppEnv :: m AppEnvironment
lookupAppEnv = lookupEnvVar appEnv Development
lookupEnvVar :: Read a => String -> a -> m a
lookupAppEnv' :: (MonadIO m) => m AppEnvironment
lookupAppEnv' = lookupEnvVar' appEnv Development
lookupEnvVar' :: (MonadIO m, Read a) => String -> a -> m a
lookupEnvVar' env def' = fmap readEnvVar (liftIO $ lookupEnv env)
where
readEnvVar (Just envStr) = fromMaybe def' (readMaybe envStr)
readEnvVar _ = def'
| onurzdg/clicklac | src/Clicklac/Environment.hs | bsd-3-clause | 935 | 0 | 10 | 223 | 286 | 156 | 130 | 25 | 2 |
-- Copyright (c) Microsoft. All rights reserved.
-- Licensed under the MIT license. See LICENSE file in the project root for full license information.
{-# LANGUAGE OverloadedStrings, RecordWildCards #-}
module Language.Bond.Syntax.Internal
( showPretty
, showQualifiedName
, takeName
, takeNamespace
, isBaseField
, metaField
) where
import Data.Monoid
import Language.Bond.Util
import Language.Bond.Syntax.Util
import Language.Bond.Syntax.Types
takeName :: QualifiedName -> String
takeName = last
takeNamespace :: QualifiedName -> QualifiedName
takeNamespace = subtract 1 . length >>= take
showQualifiedName :: QualifiedName -> String
showQualifiedName = sepBy "." id
showTypeParams :: [TypeParam] -> String
showTypeParams = angles . sepBy ", " showPretty
class ShowPretty a where
showPretty :: a -> String
instance ShowPretty Constraint where
showPretty Value = ": value"
instance ShowPretty TypeParam where
showPretty TypeParam {..} = paramName ++ optional showPretty paramConstraint
instance ShowPretty Declaration where
showPretty Struct {..} = "struct " ++ declName ++ showTypeParams declParams
showPretty Enum {..} = "enum " ++ declName
showPretty Forward {..} = "struct declaration " ++ declName ++ showTypeParams declParams
showPretty Alias {..} = "alias " ++ declName ++ showTypeParams declParams
metaField :: Field -> Any
metaField Field {..} = Any $ isMetaName fieldType
isBaseField :: String -> Maybe Type -> Bool
isBaseField name = getAny . optional (foldMapFields (Any.(name==).fieldName))
| upsoft/bond | compiler/src/Language/Bond/Syntax/Internal.hs | mit | 1,575 | 1 | 11 | 273 | 384 | 205 | 179 | 35 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module Opaleye.RunQuery (module Opaleye.RunQuery,
QueryRunner,
IRQ.QueryRunnerColumn,
IRQ.QueryRunnerColumnDefault (..),
IRQ.fieldQueryRunnerColumn) where
import qualified Database.PostgreSQL.Simple as PGS
import qualified Database.PostgreSQL.Simple.FromRow as FR
import qualified Data.String as String
import Opaleye.Column (Column)
import qualified Opaleye.Sql as S
import Opaleye.QueryArr (Query)
import Opaleye.Internal.RunQuery (QueryRunner(QueryRunner))
import qualified Opaleye.Internal.RunQuery as IRQ
import qualified Opaleye.Internal.QueryArr as Q
import qualified Data.Profunctor as P
import qualified Data.Profunctor.Product.Default as D
import Control.Applicative ((*>))
-- | @runQuery@'s use of the 'D.Default' typeclass means that the
-- compiler will have trouble inferring types. It is strongly
-- recommended that you provide full type signatures when using
-- @runQuery@.
--
-- Example type specialization:
--
-- @
-- runQuery :: Query (Column 'Opaleye.PGTypes.PGInt4', Column 'Opaleye.PGTypes.PGText') -> IO [(Column Int, Column String)]
-- @
--
-- Assuming the @makeAdaptorAndInstance@ splice has been run for the product type @Foo@:
--
-- @
-- runQuery :: Query (Foo (Column 'Opaleye.PGTypes.PGInt4') (Column 'Opaleye.PGTypes.PGText') (Column 'Opaleye.PGTypes.PGBool')
-- -> IO [(Foo (Column Int) (Column String) (Column Bool)]
-- @
--
-- Opaleye types are converted to Haskell types based on instances of
-- the 'Opaleye.Internal.RunQuery.QueryRunnerColumnDefault' typeclass.
runQuery :: D.Default QueryRunner columns haskells
=> PGS.Connection
-> Query columns
-> IO [haskells]
runQuery = runQueryExplicit D.def
runQueryExplicit :: QueryRunner columns haskells
-> PGS.Connection
-> Query columns
-> IO [haskells]
runQueryExplicit (QueryRunner u rowParser nonZeroColumns) conn q =
PGS.queryWith_ parser conn sql
where sql :: PGS.Query
sql = String.fromString (S.showSqlForPostgresExplicit u q)
-- FIXME: We're doing work twice here
(b, _, _) = Q.runSimpleQueryArrStart q ()
parser = if nonZeroColumns b
then rowParser b
else (FR.fromRow :: FR.RowParser (PGS.Only Int)) *> rowParser b
-- If we are selecting zero columns then the SQL
-- generator will have to put a dummy 0 into the
-- SELECT statement, since we can't select zero
-- columns. In that case we have to make sure we
-- read a single Int.
-- | Use 'queryRunnerColumn' to make an instance to allow you to run queries on
-- your own datatypes. For example:
--
-- @
-- newtype Foo = Foo Int
-- instance Default QueryRunnerColumn Foo Foo where
-- def = queryRunnerColumn ('Opaleye.Column.unsafeCoerce' :: Column Foo -> Column PGInt4) Foo def
-- @
queryRunnerColumn :: (Column a' -> Column a) -> (b -> b')
-> IRQ.QueryRunnerColumn a b -> IRQ.QueryRunnerColumn a' b'
queryRunnerColumn colF haskellF qrc = IRQ.QueryRunnerColumn (P.lmap colF u)
(fmapFP haskellF fp)
where IRQ.QueryRunnerColumn u fp = qrc
fmapFP = fmap . fmap . fmap
| bergmark/haskell-opaleye | src/Opaleye/RunQuery.hs | bsd-3-clause | 3,430 | 0 | 13 | 891 | 516 | 306 | 210 | 41 | 2 |
{-# LANGUAGE RecordWildCards #-}
module Graphics.GL.Pal.Framebuffer where
import Graphics.GL
import Control.Monad.Trans
import Graphics.GL.Pal.Types
import Control.Monad
import Foreign
bindFramebuffer :: MonadIO m => Framebuffer -> m ()
bindFramebuffer (Framebuffer framebuffer) = glBindFramebuffer GL_FRAMEBUFFER framebuffer
withFramebuffer :: MonadIO m => Framebuffer -> m a -> m ()
withFramebuffer (Framebuffer framebuffer) action = do
glBindFramebuffer GL_FRAMEBUFFER framebuffer
_ <- action
glBindFramebuffer GL_FRAMEBUFFER 0
-- | Create and configure the texture to use for our framebuffer
createFramebufferTexture :: MonadIO m => GLenum -> GLsizei -> GLsizei -> m TextureID
createFramebufferTexture storage sizeX sizeY = do
texID <- overPtr (glGenTextures 1)
glBindTexture GL_TEXTURE_2D texID
glTexParameteri GL_TEXTURE_2D GL_TEXTURE_MIN_FILTER GL_LINEAR
glTexParameteri GL_TEXTURE_2D GL_TEXTURE_MAG_FILTER GL_LINEAR
glTexParameteri GL_TEXTURE_2D GL_TEXTURE_WRAP_S GL_CLAMP_TO_BORDER
glTexParameteri GL_TEXTURE_2D GL_TEXTURE_WRAP_T GL_CLAMP_TO_BORDER
glTexStorage2D GL_TEXTURE_2D 1 storage sizeX sizeY
glBindTexture GL_TEXTURE_2D 0
return (TextureID texID)
newFramebuffer :: MonadIO m => m Framebuffer
newFramebuffer = Framebuffer <$> overPtr (glGenFramebuffers 1)
-- Create a flat render target with the given size and storage format (and no depth or stencil buffer)
createRenderTexture :: MonadIO m => GLenum -> GLsizei -> GLsizei -> m (Framebuffer, TextureID)
createRenderTexture storage sizeX sizeY = do
TextureID framebufferTextureID <- createFramebufferTexture storage sizeX sizeY
framebuffer <- newFramebuffer
withFramebuffer framebuffer $ do
-- Attach the texture as the color buffer
glFramebufferTexture2D GL_FRAMEBUFFER GL_COLOR_ATTACHMENT0 GL_TEXTURE_2D framebufferTextureID 0
-- Clear the texture
glClearColor 0 0 0 0
glClear GL_COLOR_BUFFER_BIT
return (framebuffer, TextureID framebufferTextureID)
-- | Create an RGBA8 framebuffer with 32-bit depth and 8-bit stencil buffer, suitable for 3D render-to-texture
createFramebuffer :: MonadIO m => GLsizei -> GLsizei -> m (Framebuffer, TextureID)
createFramebuffer sizeX sizeY = do
TextureID framebufferTextureID <- createFramebufferTexture GL_RGBA8 sizeX sizeY
framebuffer <- newFramebuffer
-- Attach the eye texture as the color buffer
withFramebuffer framebuffer $ do
glFramebufferTexture2D GL_FRAMEBUFFER GL_COLOR_ATTACHMENT0 GL_TEXTURE_2D framebufferTextureID 0
-- Generate a render buffer for depth
renderbuffer <- overPtr (glGenRenderbuffers 1)
-- Configure the depth buffer dimensions to match the eye texture
glBindRenderbuffer GL_RENDERBUFFER renderbuffer
glRenderbufferStorage GL_RENDERBUFFER GL_DEPTH32F_STENCIL8 sizeX sizeY
glBindRenderbuffer GL_RENDERBUFFER 0
-- Attach the render buffer as the depth target
glFramebufferRenderbuffer GL_FRAMEBUFFER GL_DEPTH_STENCIL_ATTACHMENT GL_RENDERBUFFER renderbuffer
return (framebuffer, TextureID framebufferTextureID)
data MultisampleFramebuffer = MultisampleFramebuffer
{ mfbRenderFramebufferID :: Framebuffer
, mfbRenderTextureID :: TextureID
, mfbResolveFramebufferID :: Framebuffer
, mfbResolveTextureID :: TextureID
, mfbWidth :: GLint
, mfbHeight :: GLint
}
data MSAASamples = MSAASamples1
| MSAASamples2
| MSAASamples4
| MSAASamples8
| MSAASamples16
msaaSamplesToNum :: MSAASamples -> GLsizei
msaaSamplesToNum MSAASamples1 = 1
msaaSamplesToNum MSAASamples2 = 2
msaaSamplesToNum MSAASamples4 = 4
msaaSamplesToNum MSAASamples8 = 8
msaaSamplesToNum MSAASamples16 = 16
createMultisampleFramebuffer :: MonadIO m => MSAASamples -> GLsizei -> GLsizei -> m MultisampleFramebuffer
createMultisampleFramebuffer msaaSamples sizeX sizeY = do
let numSamples = msaaSamplesToNum msaaSamples
renderFramebufferID <- overPtr (glGenFramebuffers 1)
glBindFramebuffer GL_FRAMEBUFFER renderFramebufferID
depthBufferID <- overPtr (glGenRenderbuffers 1)
glBindRenderbuffer GL_RENDERBUFFER depthBufferID
glRenderbufferStorageMultisample GL_RENDERBUFFER numSamples GL_DEPTH32F_STENCIL8 sizeX sizeY
glFramebufferRenderbuffer GL_FRAMEBUFFER GL_DEPTH_STENCIL_ATTACHMENT GL_RENDERBUFFER depthBufferID
renderTextureID <- overPtr (glGenTextures 1)
glBindTexture GL_TEXTURE_2D_MULTISAMPLE renderTextureID
glTexImage2DMultisample GL_TEXTURE_2D_MULTISAMPLE numSamples GL_RGBA8 sizeX sizeY GL_TRUE
glFramebufferTexture2D GL_FRAMEBUFFER GL_COLOR_ATTACHMENT0 GL_TEXTURE_2D_MULTISAMPLE renderTextureID 0
resolveFramebufferID <- overPtr (glGenFramebuffers 1)
glBindFramebuffer GL_FRAMEBUFFER resolveFramebufferID
resolveTextureID <- overPtr (glGenTextures 1)
glBindTexture GL_TEXTURE_2D resolveTextureID
glTexParameteri GL_TEXTURE_2D GL_TEXTURE_MIN_FILTER GL_LINEAR
glTexParameteri GL_TEXTURE_2D GL_TEXTURE_MAX_LEVEL 0
glTexImage2D GL_TEXTURE_2D 0 GL_RGBA8 sizeX sizeY 0 GL_RGBA GL_UNSIGNED_BYTE nullPtr
glFramebufferTexture2D GL_FRAMEBUFFER GL_COLOR_ATTACHMENT0 GL_TEXTURE_2D resolveTextureID 0
-- check FBO status
status <- glCheckFramebufferStatus GL_FRAMEBUFFER
when (status /= GL_FRAMEBUFFER_COMPLETE) $
error "createMultisampleFramebuffer: Framebuffer status incomplete"
glBindFramebuffer GL_FRAMEBUFFER 0
return MultisampleFramebuffer
{ mfbRenderFramebufferID = Framebuffer renderFramebufferID
, mfbRenderTextureID = TextureID renderTextureID
, mfbResolveFramebufferID = Framebuffer resolveFramebufferID
, mfbResolveTextureID = TextureID resolveTextureID
, mfbWidth = sizeX
, mfbHeight = sizeY
}
withMultisamplingFramebuffer :: MonadIO m => MultisampleFramebuffer -> m a -> m ()
withMultisamplingFramebuffer MultisampleFramebuffer{..} action = do
glEnable GL_MULTISAMPLE
glBindFramebuffer GL_FRAMEBUFFER (unFramebuffer mfbRenderFramebufferID)
_ <- action
glBindFramebuffer GL_FRAMEBUFFER 0
glDisable GL_MULTISAMPLE
glBindFramebuffer GL_READ_FRAMEBUFFER (unFramebuffer mfbRenderFramebufferID)
glBindFramebuffer GL_DRAW_FRAMEBUFFER (unFramebuffer mfbResolveFramebufferID)
glBlitFramebuffer 0 0 mfbWidth mfbHeight 0 0 mfbWidth mfbHeight
GL_COLOR_BUFFER_BIT
GL_LINEAR
glBindFramebuffer GL_READ_FRAMEBUFFER 0
glBindFramebuffer GL_DRAW_FRAMEBUFFER 0
| lukexi/gl-pal | src/Graphics/GL/Pal/Framebuffer.hs | bsd-3-clause | 6,594 | 0 | 13 | 1,180 | 1,249 | 586 | 663 | 112 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE MagicHash #-}
module Data.HashTable.Weak.Internal.IntArray
( IntArray
, Elem
, elemMask
, primWordToElem
, elemToInt
, elemToInt#
, newArray
, readArray
, writeArray
, length
, toPtr
) where
------------------------------------------------------------------------------
import Control.Monad.ST
import Data.Bits
import qualified Data.Primitive.ByteArray as A
import Data.Primitive.Types (Addr (..))
import GHC.Exts
import GHC.Word
import Prelude hiding (length)
------------------------------------------------------------------------------
#ifdef BOUNDS_CHECKING
#define BOUNDS_MSG(sz,i) concat [ "[", __FILE__, ":", \
show (__LINE__ :: Int), \
"] bounds check exceeded: ", \
"size was ", show (sz), " i was ", show (i) ]
#define BOUNDS_CHECK(arr,i) let sz = (A.sizeofMutableByteArray (arr) \
`div` wordSizeInBytes) in \
if (i) < 0 || (i) >= sz \
then error (BOUNDS_MSG(sz,(i))) \
else return ()
#else
#define BOUNDS_CHECK(arr,i)
#endif
------------------------------------------------------------------------------
newtype IntArray s = IA (A.MutableByteArray s)
type Elem = Word16
------------------------------------------------------------------------------
primWordToElem :: Word# -> Elem
primWordToElem = W16#
------------------------------------------------------------------------------
elemToInt :: Elem -> Int
elemToInt e = let !i# = elemToInt# e
in (I# i#)
------------------------------------------------------------------------------
elemToInt# :: Elem -> Int#
elemToInt# (W16# w#) = word2Int# w#
------------------------------------------------------------------------------
elemMask :: Int
elemMask = 0xffff
------------------------------------------------------------------------------
wordSizeInBytes :: Int
wordSizeInBytes = bitSize (0::Elem) `div` 8
------------------------------------------------------------------------------
-- | Cache line size, in bytes
cacheLineSize :: Int
cacheLineSize = 64
------------------------------------------------------------------------------
newArray :: Int -> ST s (IntArray s)
newArray n = do
let !sz = n * wordSizeInBytes
!arr <- A.newAlignedPinnedByteArray sz cacheLineSize
A.fillByteArray arr 0 sz 0
return $! IA arr
------------------------------------------------------------------------------
readArray :: IntArray s -> Int -> ST s Elem
readArray (IA a) idx = do
BOUNDS_CHECK(a,idx)
A.readByteArray a idx
------------------------------------------------------------------------------
writeArray :: IntArray s -> Int -> Elem -> ST s ()
writeArray (IA a) idx val = do
BOUNDS_CHECK(a,idx)
A.writeByteArray a idx val
------------------------------------------------------------------------------
length :: IntArray s -> Int
length (IA a) = A.sizeofMutableByteArray a `div` wordSizeInBytes
------------------------------------------------------------------------------
toPtr :: IntArray s -> Ptr a
toPtr (IA a) = Ptr a#
where
!(Addr !a#) = A.mutableByteArrayContents a
| cornell-pl/HsAdapton | weak-hashtables/src/Data/HashTable/Weak/Internal/IntArray.hs | bsd-3-clause | 3,541 | 0 | 10 | 871 | 568 | 309 | 259 | 56 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
module Network.Pubnub.Types
(
convertHistoryOptions
-- Record construction
, Timestamp(..)
, PN(..)
, defaultPN
, SubscribeOptions(..)
, defaultSubscribeOptions
, Auth(..)
, defaultAuth
, ConnectResponse(..)
, SubscribeResponse(..)
, EncryptedSubscribeResponse(..)
, PublishResponse(..)
, UUID
, Presence(..)
, Action(..)
, HereNow(..)
, History(..)
, HistoryOption(..)
, HistoryOptions
, setEncryptionKey
) where
import GHC.Generics
import Control.Applicative (empty)
import Data.Text.Read
import Data.Aeson
import Data.Aeson.TH
import Crypto.Cipher.AES
import Crypto.Cipher.Types
import Data.Digest.Pure.SHA
import qualified Data.Vector as V
import qualified Data.Text as T
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy as L
data PN = PN { origin :: T.Text
, pub_key :: T.Text
, sub_key :: T.Text
, sec_key :: T.Text
, uuid_key :: Maybe UUID
, auth_key :: Maybe T.Text
, channels :: [T.Text]
, jsonp_callback :: Integer
, time_token :: Timestamp
, cipher_key :: B.ByteString
, ctx :: Maybe AES
, iv :: Maybe (IV AES)
, ssl :: Bool}
defaultPN :: PN
defaultPN = PN { origin = "haskell.pubnub.com"
, pub_key = T.empty
, sub_key = T.empty
, sec_key = "0"
, uuid_key = Nothing
, auth_key = Nothing
, channels = []
, jsonp_callback = 0
, time_token = Timestamp 0
, cipher_key = B.empty
, ctx = Nothing
, iv = makeIV (B.pack "0123456789012345")
, ssl = False }
data SubscribeOptions a = SubscribeOptions { onMsg :: a -> IO ()
, onConnect :: IO ()
, onDisconnect :: IO ()
, onError :: Maybe Int -> Maybe B.ByteString -> IO ()
, onPresence :: Maybe (Presence -> IO ())
, onReconnect :: IO ()
, subTimeout :: Maybe Int
, resumeOnReconnect :: Bool
, windowing :: Maybe Integer }
defaultSubscribeOptions :: SubscribeOptions a
defaultSubscribeOptions = SubscribeOptions { onMsg = \_ -> return ()
, onConnect = return ()
, onDisconnect = return ()
, onError = \_ _ -> return ()
, onPresence = Nothing
, onReconnect = return ()
, subTimeout = Just 310
, resumeOnReconnect = True
, windowing = Nothing }
data Auth = Auth { chan :: Maybe T.Text
, authKeys :: [T.Text]
, r :: Bool
, w :: Bool
, ttl :: Int }
deriving (Show)
defaultAuth :: Auth
defaultAuth = Auth { chan = Nothing
, authKeys = []
, r = False
, w = False
, ttl = 0 }
setEncryptionKey :: PN -> B.ByteString -> Either KeyError PN
setEncryptionKey pn key =
either Left (\a -> Right pn{ctx = Just (initAES256 a)}) (convertKey key)
where
initAES256 :: Key AES -> AES
initAES256 = cipherInit
convertKey k = makeKey (B.pack . take 32 . showDigest . sha256 $ L.fromStrict k)
newtype Timestamp = Timestamp Integer
deriving (Show)
instance ToJSON Timestamp where
toJSON (Timestamp t) = (Number . fromIntegral) t
instance FromJSON Timestamp where
parseJSON (String s) = Timestamp <$> (pure . decimalRight) s
parseJSON (Array a) =
Timestamp <$> (withScientific "Integral" $ pure . floor) (V.head a)
parseJSON _ = empty
data ConnectResponse = ConnectResponse ([Value], Timestamp)
deriving (Show, Generic)
instance FromJSON ConnectResponse
data PublishResponse = PublishResponse Integer String Timestamp
deriving (Show, Generic)
instance FromJSON PublishResponse
data SubscribeResponse a = SubscribeResponse ([a], Timestamp)
deriving (Show, Generic)
instance (FromJSON a) => FromJSON (SubscribeResponse a)
data EncryptedSubscribeResponse = EncryptedSubscribeResponse ([T.Text], Timestamp)
deriving (Show, Generic)
instance FromJSON EncryptedSubscribeResponse
type UUID = T.Text
type Occupancy = Integer
data Action = Join | Leave | Timeout
deriving (Show)
instance FromJSON Action where
parseJSON (String "join") = pure Join
parseJSON (String "leave") = pure Leave
parseJSON (String "timeout") = pure Timeout
parseJSON _ = empty
instance ToJSON Action where
toJSON Join = String "join"
toJSON Leave = String "leave"
toJSON Timeout = String "timeout"
data Presence = Presence { action :: Action
, timestamp :: Integer
, uuid :: UUID
, presenceOccupancy :: Occupancy }
deriving (Show)
data HereNow = HereNow { uuids :: [UUID]
, herenowOccupancy :: Occupancy }
deriving (Show)
data History a = History [a] Integer Integer
deriving (Show, Generic)
instance (FromJSON a) => FromJSON (History a)
data HistoryOption = Start Integer
| End Integer
| Reverse Bool
| Count Integer
type HistoryOptions = [HistoryOption]
convertHistoryOptions :: HistoryOptions -> [(B.ByteString, B.ByteString)]
convertHistoryOptions =
map convertHistoryOption
convertHistoryOption :: HistoryOption -> (B.ByteString, B.ByteString)
convertHistoryOption (Start i) = ("start", B.pack $ show i)
convertHistoryOption (End i) = ("end", B.pack $ show i)
convertHistoryOption (Reverse True) = ("reverse", "true")
convertHistoryOption (Reverse False) = ("reverse", "false")
convertHistoryOption (Count i) = ("count", B.pack $ show i)
decimalRight :: T.Text -> Integer
decimalRight = either (const 0) fst . decimal
$(deriveJSON defaultOptions{ fieldLabelModifier =
\ x -> case x of
"presenceOccupancy" -> "occupancy"
_ -> x } ''Presence)
$(deriveJSON defaultOptions{ fieldLabelModifier = \ x ->
case x of
"herenowOccupancy" -> "occupancy"
_ -> x } ''HereNow)
| pubnub/haskell | src/Network/Pubnub/Types.hs | mit | 7,627 | 2 | 14 | 3,224 | 1,795 | 1,022 | 773 | 170 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Control.Monad
import Control.Concurrent
import System.Environment
import Network.Helics
import qualified Data.ByteString.Char8 as S
import Control.Exception
import System.IO.Error
main :: IO ()
main = do
k:_ <- getArgs
_ <- forkIO $ sampler 60
withHelics def { licenseKey = S.pack k } $ putStrLn "start" >> loop 0
where
loop i = do
withTransaction "test" def (\tid -> do
withTransaction "inu" def (const $ threadDelay (10^5))
genericSegment autoScope "neko" (threadDelay (10^5)) tid
when (i `mod` 97 == 0) $ ioError $ userError "user error!"
when (i `mod` 101 == 0) $ throwIO Overflow
) `catch` (\e -> print (e::SomeException))
threadDelay (2 * 10^5)
loop (succ i)
| dzotokan/helics | example.hs | mit | 811 | 0 | 20 | 213 | 303 | 155 | 148 | 22 | 1 |
{-| PyValue contains instances for the 'PyValue' typeclass.
The typeclass 'PyValue' converts Haskell values to Python values.
This module contains instances of this typeclass for several generic
types. These instances are used in the Haskell to Python generation
of opcodes and constants, for example.
-}
{-
Copyright (C) 2013 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
{-# LANGUAGE ExistentialQuantification #-}
module Ganeti.PyValue
( PyValue(..)
, PyValueEx(..)
) where
import Data.List (intercalate)
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.Set as Set (toList)
import Ganeti.BasicTypes
-- * PyValue represents data types convertible to Python
-- | Converts Haskell values into Python values
--
-- This is necessary for the default values of opcode parameters and
-- return values. For example, if a default value or return type is a
-- Data.Map, then it must be shown as a Python dictioanry.
class PyValue a where
showValue :: a -> String
showValueList :: [a] -> String
showValueList xs = "[" ++ intercalate "," (map showValue xs) ++ "]"
instance PyValue Bool where
showValue = show
instance PyValue Int where
showValue = show
instance PyValue Integer where
showValue = show
instance PyValue Double where
showValue = show
instance PyValue Char where
showValue = show
showValueList = show
instance (PyValue a, PyValue b) => PyValue (a, b) where
showValue (x, y) = "(" ++ showValue x ++ "," ++ showValue y ++ ")"
instance (PyValue a, PyValue b, PyValue c) => PyValue (a, b, c) where
showValue (x, y, z) =
"(" ++
showValue x ++ "," ++
showValue y ++ "," ++
showValue z ++
")"
instance PyValue a => PyValue [a] where
showValue = showValueList
instance (PyValue k, PyValue a) => PyValue (Map k a) where
showValue mp =
"{" ++ intercalate ", " (map showPair (Map.assocs mp)) ++ "}"
where showPair (k, x) = showValue k ++ ":" ++ showValue x
instance PyValue a => PyValue (ListSet a) where
showValue = showValue . Set.toList . unListSet
-- * PyValue represents an unspecified value convertible to Python
-- | Encapsulates Python default values
data PyValueEx = forall a. PyValue a => PyValueEx a
instance PyValue PyValueEx where
showValue (PyValueEx x) = showValue x
| kawamuray/ganeti | src/Ganeti/PyValue.hs | gpl-2.0 | 2,942 | 0 | 13 | 566 | 564 | 306 | 258 | 44 | 0 |
module Inter.Language where
import Autolib.Multilingual
import Gateway.CGI
choose :: Monad m => Form m Language
choose = do
open table
l <- click_choice_with_default 0 "choose language" $ do
l <- [ minBound .. maxBound ]
return ( show l, l )
close
return l
| Erdwolf/autotool-bonn | trial/src/Inter/Language.hs | gpl-2.0 | 292 | 0 | 13 | 79 | 97 | 47 | 50 | 11 | 1 |
{-# LANGUAGE CPP, UnboxedTuples, MagicHash, StandaloneDeriving, DeriveDataTypeable #-}
{-# OPTIONS_GHC -O #-}
-- In GHC 6.4, compiling this module gave a Core Lint failure following the
-- specialier, because a function was floated out that had a RULE that
-- mentioned another fuction (unpack, in fact). but the latter wasn't
-- floated because we didn't take the RULES into account properly; result,
-- variable out of scope.
-- It's hard to cut this test down.
module Data.PackedString.Latin1 (
-- * The @PackedString@ type
PackedString, -- abstract, instances: Eq, Ord, Show, Typeable
-- * Converting to and from @PackedString@s
pack,
unpack,
-- * I\/O with @PackedString@s
hPut, hGet,
-- * List-like manipulation functions
nil,
cons,
head,
tail,
null,
append,
length,
index,
map,
filter,
reverse,
concat,
elem,
substr,
take,
drop,
splitAt,
foldl,
foldr,
takeWhile,
dropWhile,
span,
break,
lines,
unlines,
words,
unwords,
split,
splitWith,
join,
-- unpackList, -- eek, otherwise it gets thrown away by the simplifier
) where
import qualified Prelude
import Prelude hiding (
head,
tail,
null,
length,
(!!),
map,
filter,
reverse,
concat,
elem,
take,
drop,
foldl,
foldr,
splitAt,
takeWhile,
dropWhile,
span,
break,
lines,
unlines,
words,
unwords,
join
)
import GHC.Exts
import GHC.IO (IO(..))
import Foreign
import Data.Typeable
import Data.Char
import qualified Data.List
import System.IO
-- -----------------------------------------------------------------------------
-- PackedString type declaration
-- | A space-efficient representation of a 'String', which supports
-- various efficient operations. A 'PackedString' contains Latin1
-- (8-bit) characters only.
data PackedString = PS {-#UNPACK#-}!Int {-#UNPACK#-}!Int
{-#UNPACK#-}!(ForeignPtr Word8)
-- this is a pretty efficient representation, and can be
-- converted to/from a StorableArray.
-- When the ForeignPtr is unpacked, we get the Addr# stored
-- directly in the PS constructor.
-- Perhaps making a slice should be conditional on the ratio of the
-- slice/string size to limit memory leaks.
instance Eq PackedString where
a == b = comparePS a b == EQ
instance Ord PackedString where
compare = comparePS
comparePS (PS off1 len1 fp1) (PS off2 len2 fp2)
= inlinePerformIO $
withForeignPtr fp1 $ \p1 ->
withForeignPtr fp2 $ \p2 ->
cmp (p1 `plusPtr` off1) (p2 `plusPtr` off2) len1
where
cmp :: Ptr Word8 -> Ptr Word8 -> Int -> IO Ordering
cmp p1 p2 n
| n == len1 = if n == len2 then return EQ else return LT
| n == len2 = return GT
| otherwise = do
a <- peekElemOff p1 n
b <- peekElemOff p2 n
case a `compare` b of
EQ -> cmp p1 p2 (n+1)
LT -> return LT
GT -> return GT
--instance Read PackedString: ToDo
instance Show PackedString where
showsPrec p ps r = showsPrec p (unpack ps) r
deriving instance Typeable PackedString
-- -----------------------------------------------------------------------------
-- Constructor functions
-- | The 'nilPS' value is the empty string.
nil :: PackedString
nil = inlinePerformIO $ do
fp <- newForeignPtr_ nullPtr
return (PS 0 0 fp)
-- | The 'consPS' function prepends the given character to the
-- given string.
cons :: Char -> PackedString -> PackedString
cons c cs = pack (c : (unpack cs)) -- ToDo:better
-- | Convert a 'String' into a 'PackedString'
packLen :: Int -> String -> PackedString
packLen len str = inlinePerformIO $ do
fp <- mallocForeignPtrBytes len
withForeignPtr fp $ \p -> do
fill_it_in p 0 str
return (PS 0 len fp)
fill_it_in p i [] = return ()
fill_it_in p i (c:cs) = do pokeElemOff p i (c2w c); fill_it_in p (i+1) cs
pack :: String -> PackedString
pack str = packLen (Prelude.length str) str
{-# INLINE w2c #-}
w2c :: Word8 -> Char
w2c = chr . fromIntegral
{-# INLINE c2w #-}
c2w :: Char -> Word8
c2w = fromIntegral . ord
-- -----------------------------------------------------------------------------
-- List-mimicking functions for PackedStrings
-- | The 'length' function returns the length of the input list.
-- Analogous to 'length'.
length :: PackedString -> Int
length (PS _ len _) = len
-- | The 'index' function returns the character in the string at the
-- given position.
index :: PackedString -> Int -> Char
index ps i
| i >= 0 && i < len = unsafeIndex ps i
| otherwise = error "Data.PackedString.Latin1.index: index out of range"
where len = length ps
unsafeIndex :: PackedString -> Int -> Char
unsafeIndex (PS off len fp) i =
withPackedString fp $ \p -> do
w <- peekElemOff (p `plusPtr` off) i
return $! w2c w
-- | The 'head' function returns the first element of a
-- 'PackedString' or throws an error if the string is empty.
head :: PackedString -> Char
head ps
| len <= 0 = error "Data.PackedString.Latin1.head: head []"
| otherwise = index ps 0
where len = length ps
-- | The 'tail' function returns the tail of a 'PackedString' or throws an error
-- if the string is empty.
tail :: PackedString -> PackedString
tail ps
| len <= 0 = error "Data.PackedString.Latin1.tail: tail []"
| len == 1 = nil
| otherwise = substr ps 1 (len - 1)
where len = length ps
-- | The 'null' function returns True iff the argument is null.
null :: PackedString -> Bool
null (PS _ l _) = l == 0
-- | The 'append' function appends the second string onto the first.
append :: PackedString -> PackedString -> PackedString
append xs ys
| null xs = ys
| null ys = xs
| otherwise = concat [xs,ys]
-- | The 'map' function applies a function to each character in the string.
map :: (Char -> Char) -> PackedString -> PackedString
map f ps = packLen (length ps) (Prelude.map f (unpack ps))
-- | The 'filter' function filters out the appropriate substring.
filter :: (Char -> Bool) -> PackedString -> PackedString {-or String?-}
filter pred ps = pack $ Prelude.filter pred $ unpack ps
-- | The 'foldl' function behaves like 'foldl' on 'PackedString's.
foldl :: (a -> Char -> a) -> a -> PackedString -> a
foldl f b ps = Prelude.foldl f b $ unpack ps
-- | The 'foldr' function behaves like 'foldr' on 'PackedString's.
foldr :: (Char -> a -> a) -> a -> PackedString -> a
foldr f v ps = Prelude.foldr f v $ unpack ps -- no intermediate list, we hope
-- | The 'take' function takes the first @n@ characters of a 'PackedString'.
take :: Int -> PackedString -> PackedString
take n ps = substr ps 0 (n-1)
-- | The 'drop' function drops the first @n@ characters of a 'PackedString'.
drop :: Int -> PackedString -> PackedString
drop n ps = substr ps n (length ps - 1)
-- | The 'splitWith' function splits a 'PackedString' at a given index.
splitAt :: Int -> PackedString -> (PackedString, PackedString)
splitAt n ps = (take n ps, drop n ps)
-- | The 'takeWhile' function is analogous to the 'takeWhile' function.
takeWhile :: (Char -> Bool) -> PackedString -> PackedString
takeWhile pred ps = pack $ Prelude.takeWhile pred $ unpack ps
-- | The 'dropWhile' function is analogous to the 'dropWhile' function.
dropWhile :: (Char -> Bool) -> PackedString -> PackedString
dropWhile pred ps = pack $ Prelude.dropWhile pred $ unpack ps
-- | The 'elem' function returns True iff the given element is in the string.
elem :: Char -> PackedString -> Bool
elem c ps = c `Prelude.elem` unpack ps
-- | The 'span' function returns a pair containing the result of
-- running both 'takeWhile' and 'dropWhile'.
span :: (Char -> Bool) -> PackedString -> (PackedString, PackedString)
span p ps = (takeWhile p ps, dropWhile p ps)
-- | The 'break' function breaks a string at the first position which
-- satisfies the predicate.
break :: (Char -> Bool) -> PackedString -> (PackedString, PackedString)
break p ps = span (not . p) ps
-- | The 'lines' function splits the input on line-breaks.
lines :: PackedString -> [PackedString]
lines ps = split '\n' ps
-- | The 'unlines' function concatenates the input list after
-- interspersing newlines.
unlines :: [PackedString] -> PackedString
unlines pss = join (pack "\n") pss
-- | The 'words' function is analogous to the 'words' function.
words :: PackedString -> [PackedString]
words ps = Prelude.filter (not.null) (splitWith isSpace ps)
-- | The 'unwords' function is analogous to the 'unwords' function.
unwords :: [PackedString] -> PackedString
unwords pss = join (pack " ") pss
-- | The 'reverse' function reverses the string.
reverse :: PackedString -> PackedString
reverse ps = pack $ Prelude.reverse $ unpack ps
-- | The 'concat' function concatenates a list of 'PackedString's.
concat :: [PackedString] -> PackedString
concat pss = pack $ Prelude.concat $ Prelude.map unpack pss
------------------------------------------------------------
-- | The 'join' function takes a 'PackedString' and a list of 'PackedString's
-- and concatenates the list after interspersing the first argument between
-- each element of the list.
join :: PackedString -> [PackedString] -> PackedString
join filler pss = concat (splice pss)
where
splice [] = []
splice [x] = [x]
splice (x:y:xs) = x:filler:splice (y:xs)
-- ToDo: the obvious generalisation
{-
Some properties that hold:
* split x ls = ls'
where False = any (map (x `elem`) ls')
* join (pack [x]) (split x ls) = ls
-}
-- | The 'split' function splits the input string on each occurrence of the given 'Char'.
split :: Char -> PackedString -> [PackedString]
split c = splitWith (== c)
splitWith :: (Char -> Bool) -> PackedString -> [PackedString]
splitWith pred (PS off 0 fp) = []
splitWith pred (PS off len fp) = splitWith' pred off len fp
splitWith' pred off len fp =
withPackedString fp $ \p -> splitLoop pred p 0 off len fp
splitLoop pred p idx off len fp
| p `seq` idx `seq` off `seq` fp `seq` False = undefined
splitLoop pred p idx off len fp
| idx >= len = return [PS off idx fp]
| otherwise = do
w <- peekElemOff p (off+idx)
if pred (w2c w)
then return (PS off idx fp :
splitWith' pred (off+idx+1) (len-idx-1) fp)
else splitLoop pred p (idx+1) off len fp
-- -----------------------------------------------------------------------------
-- Local utility functions
-- The definition of @_substr@ is essentially:
-- @take (end - begin + 1) (drop begin str)@.
-- | The 'substr' function takes a 'PackedString' and two indices
-- and returns the substring of the input string between (and including)
-- these indices.
substr :: PackedString -> Int -> Int -> PackedString
substr (PS off len fp) begin end = PS (off+begin) (end-begin+1) fp
-- -----------------------------------------------------------------------------
-- hPut
-- | Outputs a 'PackedString' to the specified 'Handle'.
--
-- NOTE: the string will be output directly in Latin-1.
--
hPut :: Handle -> PackedString -> IO ()
hPut h (PS off l fp) =
withForeignPtr fp $ \p ->
hPutBuf h (p `plusPtr` off) l
-- -----------------------------------------------------------------------------
-- hGet
-- | Read a 'PackedString' directly from the specified 'Handle'.
-- This is far more efficient than reading the characters into a 'String'
-- and then using 'pack'.
--
-- NOTE: as with 'hPut', the string representation in the file is
-- assumed to be Latin-1.
hGet :: Handle -> Int -> IO PackedString
hGet h i = do
fp <- mallocForeignPtrBytes i
withForeignPtr fp $ \p -> do
l <- hGetBuf h p i
return (PS 0 l fp)
-- -----------------------------------------------------------------------------
-- unpacking
{-# INLINE unpack #-}
unpack :: PackedString -> String
unpack ps = build (unpackFoldr ps)
{-# RULES
"unpack-list" [1] forall p . unpackFoldr p (:) [] = unpackList p
#-}
unpackList :: PackedString -> [Char]
unpackList (PS off len fp) =
withPackedString fp $ \p -> do
let loop p (-1) acc = return acc
loop p n acc = do
a <- peekElemOff p n
loop p (n-1) (w2c a : acc)
loop (p `plusPtr` off) (len-1) []
{-# INLINE [0] unpackFoldr #-}
unpackFoldr :: PackedString -> (Char -> a -> a) -> a -> a
unpackFoldr (PS off len fp) f c =
withPackedString fp $ \p -> do
let loop p (-1) acc = return acc
loop p n acc = do
a <- peekElemOff p n
loop p (n-1) (w2c a `f` acc)
loop (p `plusPtr` off) (len-1) c
-- -----------------------------------------------------------------------------
-- Utils
-- Just like unsafePerformIO, but we inline it.
{-# INLINE inlinePerformIO #-}
inlinePerformIO :: IO a -> a
inlinePerformIO (IO m) = case m realWorld# of (# _, r #) -> r
withPackedString :: ForeignPtr a -> (Ptr a -> IO b) -> b
withPackedString fp io = inlinePerformIO (withForeignPtr fp io)
| lukexi/ghc | testsuite/tests/simplCore/should_compile/spec001.hs | bsd-3-clause | 13,358 | 0 | 17 | 3,232 | 3,239 | 1,712 | 1,527 | 244 | 4 |
{-# LANGUAGE PolyKinds, TypeFamilies #-}
module T9171 where
data Base
type family GetParam (p::k1) (t::k2) :: k3
type instance GetParam Base t = t
foo = undefined :: GetParam Base (GetParam Base Int)
| urbanslug/ghc | testsuite/tests/indexed-types/should_fail/T9171.hs | bsd-3-clause | 204 | 0 | 7 | 37 | 63 | 39 | 24 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
-- | Tests that conversions between various primitive types (e.g.
-- Word, Double, etc) doesn't allocate.
module Main (main) where
import Data.Word
-- Repeatedly convert Words to Doubles
loop :: Floating a => Word -> a
loop n = go 0 0.0
where
go i !acc | i < n = go (i+1) (acc + fromIntegral i)
| otherwise = acc
{-# SPECIALISE loop :: Word -> Float #-}
{-# SPECIALISE loop :: Word -> Double #-}
main :: IO ()
main = do
print (loop 1000000 :: Float)
print (loop 1000000 :: Double)
| urbanslug/ghc | testsuite/tests/perf/should_run/Conversions.hs | bsd-3-clause | 543 | 0 | 11 | 130 | 148 | 76 | 72 | 13 | 1 |
{-# LANGUAGE FlexibleContexts #-}
-- | It is well known that fully parallel loops can always be
-- interchanged inwards with a sequential loop. This module
-- implements that transformation.
module Futhark.Pass.ExtractKernels.Interchange
(
SeqLoop (..)
, interchangeLoops
) where
import Control.Applicative
import Control.Monad.RWS.Strict
import qualified Data.Set as S
import Data.Maybe
import Data.List
import Futhark.Pass.ExtractKernels.Distribution
(LoopNesting(..), KernelNest, kernelNestLoops)
import Futhark.Representation.SOACS
import Futhark.MonadFreshNames
import Futhark.Tools
import Prelude
-- | An encoding of a sequential do-loop with no existential context,
-- alongside its result pattern.
data SeqLoop = SeqLoop [Int] Pattern [(FParam, SubExp)] (LoopForm SOACS) Body
seqLoopStm :: SeqLoop -> Stm
seqLoopStm (SeqLoop _ pat merge form body) =
Let pat (defAux ()) $ DoLoop [] merge form body
interchangeLoop :: (MonadBinder m, LocalScope SOACS m) =>
SeqLoop -> LoopNesting
-> m SeqLoop
interchangeLoop
(SeqLoop perm loop_pat merge form body)
(MapNesting pat cs w params_and_arrs) = do
merge_expanded <-
localScope (scopeOfLParams $ map fst params_and_arrs) $
mapM expand merge
let loop_pat_expanded =
Pattern [] $ map expandPatElem $ patternElements loop_pat
new_params = [ Param pname $ fromDecl ptype
| (Param pname ptype, _) <- merge ]
new_arrs = map (paramName . fst) merge_expanded
rettype = map rowType $ patternTypes loop_pat_expanded
-- If the map consumes something that is bound outside the loop
-- (i.e. is not a merge parameter), we have to copy() it. As a
-- small simplification, we just remove the parameter outright if
-- it is not used anymore. This might happen if the parameter was
-- used just as the inital value of a merge parameter.
((params', arrs'), pre_copy_bnds) <-
runBinder $ localScope (scopeOfLParams new_params) $
unzip . catMaybes <$> mapM copyOrRemoveParam params_and_arrs
let lam = Lambda (params'<>new_params) body rettype
map_bnd = Let loop_pat_expanded (StmAux cs ()) $
Op $ Map w lam $ arrs' <> new_arrs
res = map Var $ patternNames loop_pat_expanded
pat' = Pattern [] $ rearrangeShape perm $ patternValueElements pat
return $
SeqLoop [0..patternSize pat-1] pat' merge_expanded form $
mkBody (pre_copy_bnds++[map_bnd]) res
where free_in_body = freeInBody body
copyOrRemoveParam (param, arr)
| not (paramName param `S.member` free_in_body) =
return Nothing
| otherwise =
return $ Just (param, arr)
expandedInit _ (Var v)
| Just (_, arr) <-
find ((==v).paramName.fst) params_and_arrs =
return $ Var arr
expandedInit param_name se =
letSubExp (param_name <> "_expanded_init") $
BasicOp $ Replicate (Shape [w]) se
expand (merge_param, merge_init) = do
expanded_param <-
newParam (param_name <> "_expanded") $
arrayOf (paramDeclType merge_param) (Shape [w]) $
uniqueness $ declTypeOf merge_param
expanded_init <- expandedInit param_name merge_init
return (expanded_param, expanded_init)
where param_name = baseString $ paramName merge_param
expandPatElem (PatElem name bindage t) =
PatElem name bindage $ arrayOfRow t w
-- | Given a (parallel) map nesting and an inner sequential loop, move
-- the maps inside the sequential loop. The result is several
-- statements - one of these will be the loop, which will then contain
-- statements with 'Map' expressions.
interchangeLoops :: (MonadFreshNames m, HasScope SOACS m) =>
KernelNest -> SeqLoop
-> m [Stm]
interchangeLoops nest loop = do
(loop', bnds) <-
runBinder $ foldM interchangeLoop loop $ reverse $ kernelNestLoops nest
return $ bnds ++ [seqLoopStm loop']
| ihc/futhark | src/Futhark/Pass/ExtractKernels/Interchange.hs | isc | 4,105 | 0 | 17 | 1,059 | 1,000 | 518 | 482 | 76 | 2 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Hearts.Deck where
import Prelude hiding (foldr)
import Data.Monoid (Monoid)
import Data.Foldable (Foldable, foldr)
import Data.Set (Set)
import qualified Data.Set as S
import Hearts.Class
-- Data types
newtype Deck_ a = Deck { unDeck :: Set a } deriving (Eq, Ord, Show, Monoid, Foldable)
type Deck = Deck_ Card
-- Instances
instance HasCards Deck_ where
getIsCards = deckToList
putCard = deckInsert
-- Functions
emptyDeck :: Deck_ a
emptyDeck = Deck S.empty
deckFromList :: Ord a => [a] -> Deck_ a
deckFromList = Deck . S.fromList
deckToList :: Deck_ a -> [a]
deckToList = S.toList . unDeck
deckInsert :: Ord a => a -> Deck_ a -> Deck_ a
deckInsert c = Deck . S.insert c . unDeck
makeDeck :: Deck
makeDeck = deckFromList [ Card rank suit | suit <- [Hearts ..], rank <- [Two ..]]
deckScore :: Deck -> Score
deckScore deck = if allScoreCards == (allScoreCards `S.intersection` unDeck deck) then 0 else foldr (\card acc -> acc + cardScore card) 0 (unDeck deck)
where
allScoreCards = S.fromList $ Card Queen Spades : [Card rank Hearts | rank <- [Two ..]]
| nadirs/hearts-hs | src/Hearts/Deck.hs | mit | 1,132 | 0 | 11 | 216 | 416 | 229 | 187 | 26 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module Text.Greek.Mounce.NounSecondDeclension where
import Text.Greek.Grammar
import Text.Greek.Mounce.Morphology
import Text.Greek.Mounce.Quote
secondDeclensionNouns :: [Cited NounCategory]
secondDeclensionNouns =
[ mounce §§ ["n-2a"] $
[nounCategory|
Masculine nouns with stems ending in ο(ς)
sg: pl:
nom: ος οι
gen: ου ων
dat: ῳ οις
acc: ον ους
voc: ε οι
lemmas:
Ἅγαβος ἄγαμος ἄγγελος ἁγιασμός ἁγνισμός
ἀγρός ἀδελφός ἀετός αἰγιαλός αἶνος
αἰχμάλωτος ἀλάβαστρος Ἀλέξανδρος ἀλλοτριεπίσκοπος Ἁλφαῖος
ἀμνός ἀμπελουργός Ἀμπλιᾶτος ἀναβαθμός Ἀνδρόνικος
ἀνδροφόνος ἄνεμος ἀνεψιός ἀνθρωποκτόνος ἄνθρωπος
ἀνθύπατος ἀντίδικος ἀντίχριστος ἀπαρτισμός ἀπελεγμός
ἀπελεύθερος ἀπόστολος Ἄππιος Ἄραβοι ἀργυροκόπος
ἄργυρος ἀριθμός Ἀρίσταρχος Ἀριστόβουλος ἄρκος
ἄρκτος ἁρμός ἁρπαγμός ἄρτος ἀρχάγγελος
Ἀρχέλαος ἀρχηγός Ἄρχιππος ἀρχισυνάγωγος ἀρχιτρίκλινος
Ἀσιανός ἀσκός ἀσπασμός Ἀσύγκριτος Αὐγοῦστος
αὐλός ἀφανισμός ἀφρός Ἀχαϊκός βαθμός
βαπτισμός Βαρθολομαῖος Βαρτιμαῖος βασανισμός βασιλίσκος
βάτος βάτος βάτραχος βήρυλλος βίος
Βλάστος βόθρος βόθυνος βόρβορος βουνός
βρόχος βρυγμός βυθός βωμός Γάϊος
γάμος γεωργός γνόφος γογγυσμός γόμος
δακτύλιος δάκτυλος δεξιολάβος δέσμιος δεσμός
Δημήτριος δημιουργός δῆμος διάδοχος διάκονος
διαλογισμός διαμερισμός διδάσκαλος Δίδυμος Διονύσιος
Διόσκουροι διωγμός δόλος δοῦλος δρόμος
Ἑβραῖος ἑκατόνταρχος ἐλεγμός ἔλεγχος Ἐλισαῖος
Ἐλισσαῖος ἐμπαιγμός ἔμπορος ἐνιαυτός ἐνταφιασμός
Ἐπαίνετος ἔπαινος Ἐπαφρόδιτος Ἐπικούρειος ἐπισιτισμός
ἐπίσκοπος ἐπίτροπος Ἔραστος ἔριφος ἑταῖρος
Εὔβουλος εὐνοῦχος Εὔτυχος Ζακχαῖος Ζεβεδαῖος
ζῆλος ζόφος ζυγός ἥλιος ἧλος
Ἡρῳδιανοί ἦχος Θαδδαῖος θάμβος θάνατος
θεμέλιος θεολόγος θεός Θεόφιλος θερισμός
θησαυρός θόρυβος θρῆνος θρόμβος θρόνος
θυμός θυρεός θυρωρός Ἰάϊρος Ἰάκωβος
ἰατρός ἱερόσυλος ἱλασμός ἱματισμός ἰός
Ἰουδαϊσμός Ἰούλιος Ἰοῦστος ἵππος κάβος
κάδος καθαρισμός καιρός κάλαμος κάμηλος
κάμιλος Καναναῖος καπνός Κάρπος καρπός
κατακλυσμός καταρτισμός κατάσκοπος κατήγορος κέραμος
κῆνσος κῆπος κηπουρός κιθαρῳδός κίνδυνος
κλάδος Κλαύδιος κλαυθμός κληρονόμος κλῆρος
κλίβανος κοινωνός κόκκος κόλπος κονιορτός
κοπετός κόπος Κορίνθιος Κορνήλιος κόρος
κόσμος Κούαρτος κόφινος κράβαττος κράββατος
κρημνός Κρίσπος κρύσταλλος κυλισμός Κύπριος
Κυρηναῖος Κυρήνιος κύριος κῶμος Λάζαρος
λαός Λεββαῖος λειτουργός λῆρος λίβανος
λιβανωτός Λιβερτῖνος Λιβυστῖνος λίθος λιμός
Λίνος λογισμός λόγος λοίδορος λοιμός
Λούκιος λύκος λύχνος μάγος μαζός
Μαθθαῖος μακαρισμός Μάλχος Μᾶρκος μάρμαρος
μασθός μαστός Ματθαῖος μέθυσος μερισμός
Μῆδος μηρός μιασμός μίσθιος μισθός
μισθωτός μόδιος μοιχός μολυσμός μόσχος
μόχθος μυελός μῦθος μύλος μῶμος
Ναζωραῖος ναός Νάρκισσος ναύκληρος νεανίσκος
νεοσσός νεφρός νεωκόρος Νικόδημος Νικόλαος
νομοδιδάσκαλος νόμος νοσσός νότος νυμφίος
νῶτος ὄγκος ὁδηγός ὀδυρμός οἰκιακός
οἰκοδόμος οἰκονόμος οἶκος οἰκτιρμός οἶνος
ὄλεθρος ὄλυνθος ὄμβρος ὅμιλος ὀνειδισμός
Ὀνήσιμος Ὀνησίφορος ὄνος ὄρθρος ὅρκος
ὅρος οὐρανός Οὐρβανός ὀφθαλμός ὀχετός
ὄχλος πάγος παιδαγωγός παράδεισος παράκλητος
παραπικρασμός παρθένος Πάρθοι παροξυσμός παροργισμός
Πάτμος Παῦλος πειρασμός πενθερός Πέτρος
πηλός Πιλᾶτος πλόος πλοῦτος πόλεμος
πόνος Πόντιος Πόντος πόντος Πόπλιος
πορισμός Πόρκιος πόρνος ποταμός Ποτίολοι
πότος προσήλυτος Πρόχορος πύργος πυρετός
Πύρρος πῶλος ῥαβδοῦχος ῥαντισμός Ῥοῦφος
ῥύπος σαββατισμός Σαδδουκαῖος σάκκος σάλος
σάρδινος Σαῦλος σειρός σεισμός Σεκοῦνδος
Σέργιος σίδηρος σικάριος Σιλουανός σιρός
σῖτος σκηνοποιός σκοπός σκορπίος σμάραγδος
σπίλος σπόγγος σπόρος σταυρός στεναγμός
Στέφανος στέφανος στηριγμός στόμαχος στρατηγός
στρατοπέδορχος στῦλος συγκοινωνός σύζυγος σύμβουλος
συμπρεσβύτερος συναιχμάλωτος σύνδεσμος σύνδουλος συνέδριος
σύνεδρος συνέκδημος συνεπίσκοτος συνεργός Σύρος
Σώπατρος Σωσίπατρος σωφρονισμός τάραχος ταῦρος
τάφος Τέρτιος Τέρτουλλος Τέρτυλλος Τιβέριος
Τιμαῖος Τιμόθεος Τίτιος τίτλος Τίτος
τοῖχος τόκος τόπος τράγος τράχηλος
τρίβολος τρόμος τρόπος Τρόφιμος τροχός
τύπος Τύραννος τύραννος Τύριος Τυχικός
ὑάκινθος ὑετός υἱός Ὑμέναιος ὕμνος
ὕπνος ὑπογραμμός ὑσσός φάγος φανός
φαρισαῖος φάρμακος Φῆστος φθόγγος φθόνος
Φίλητος Φιλιππήσιος Φιλιπποι Φίλιππος Φιλόλογος
φιλόσοφος φόβος φόνος φόρος φόρτος
Φορτουνᾶτος φραγμός Φύγελος φωλεός φωτισμός
Χαλδαῖος χαλινός χαλκός χείμαρρος χειραγωγός
χιλίαρχος χοῖρος χορός χόρτος χρηματισμός
Χριστιανός Χριστός χρόνος χρυσόλιθος χρυσόπρασος
χρυσός χωρισμός χῶρος ψαλμός ψευδάδελφος
ψευδαπόστολος ψευδοδιδάσκαλος ψευδόχριστος ψιθυρισμός ὦμος
|]
, mounce §§ ["n-2b"] $
[nounCategory|
Femine nouns with stems ending in ο(ς)
sg: pl:
nom: ος οι
gen: ου ων
dat: ῳ οις
acc: ον ους
voc: ε οι
lemmas:
ἄβυσσος ἄγαμος ἀγριέλαιος Ἄζωτος Αἴγυπτος ἀλάβαστρος ἀμέθυσος ἀμέθυστος ἄμμος ἄμπελος ἄρκος ἄρκτος Ἆσσος ἄψινθος βάσανος βάτος βήρυλλος
βίβλος βύσσος Δαμασκός διάκονος διάλεκτος διέξοδος διέξοδος δοκός εἴσοδος ἔξοδος Ἔφεσος θεός θυρωρός καλλιέλαιος κάμηλος κάμινος κέδρος
κιβωτός Κνίδος κοινωνός κόπρος Κόρινθος Κύπρος ληνός λιμός Μίλητος νάρδος νῆσος νόσος ὁδός ὄνος παρθένος Πάφος Πέργαμος ῥάβδος Ῥόδος Σάμος
σάπφιρος σορός σποδός στάμνος συκάμινος Ταρσός τρίβος τροφός Τύρος ὕαλος ὕσσωπος χίος ψῆφος
|]
, mounce §§ ["n-2c"] $
[nounCategory|
Neuter nouns with stems ending in ο(ν)
sg: pl:
nom: ον α
gen: ου ων
dat: ῳ οις
acc: ον α
voc: ε οι
lemmas:
ἀγγεῖον ἄγκιστρον αἰσθητήριον ἀκροατήριον ἀκροθίνιον ἄκρον ἀλάβαστρον ἄλευρον ἄμμον ἀμφίβληστρον ἄμφοδον ἄμωμον ἀνάγαιον ἄνηθον ἀντίλυτρον ἀποστάσιον
ργύριον ἄριστον ἀρνίον ἄροτρον ἀσσάριον ἄστρον ἄχυρον ἀψίνθιον βάϊον βαλλάντιον βιβλαρίδιον βιβλίον βραβεῖον γαζοφυλάκιον γενέσια γεώργιον γλωσσόκομον
Γόμορρα γυναικάριον δαιμόνιον δάκρυον δάνειον δεῖπνον δένδρον δεσμωτήριον δηνάριον δίδραχμον δίκτυον δοκίμιον δρέπανον δυσεντέριον δωδεκάφυλον
δῶρον ἐγκαίνια εἰδωλεῖον εἴδωλον ἔλαιον ἐμπόριον ἔνεδρον ἐνύπνιον ἐπικεφάλαιον ἔργον ἔριον ἐρίφιον ἑρπετόν ἔσοπτρον εὐαγγέλιον ζιζάνιον ζυγόν
ζῷον ἡδύοσμον ἡμιώριον ἡμίωρον θέατρον θεῖον θεμέλιον θηρίον θυγάτριον θυμιατήριον θυσιαστήριον ἱερόν Ἱεροσόλυμα Ἰκόνιον ἱλαστήριον Ἰλλυρικόν
ἱμάτιον ἱστίον ἰχθύδιον κατοικητήριον κέντρον κεράμιον κεράτιον κεφάλαιον ηρίον κιβώριον κιννάμωμον κλινάριον κλινίδιον κολλούριον κόπριον κοράσιον
κρανίον κράσπεδον κρίνον κριτήριον κύμβαλον κύμινον κυνάριον κῶλον λάχανον λέντιον λίνον λόγιον λουτρόν λύτρον μάκελλον μαρτύριον μεθόριον μελισσεῖον μελισσῖον
μεσονύκτιον μεσότοιχον μέτρον μέτωπον μίλιον μνημεῖον μνημόσυνον Μύρα μύρον μυστήριον νησίον νοσσίον νυχθήμερον ξύλον ὀθόνιον οἰκητήριον ὀνάριον
ὅπλον ὅριον ὄρνεον ὀστέον ὀψάριον ὀψώνιον παιδάριον παιδίον πανδοχεῖον παραμύθιον Πάταρα περιβόλαιον Πέργαμον πετεινόν πήγανον πηδάλιον πινακίδιον
πλοιάριον πλοῖον ποίμνιον ποτήριον πραιτώριον πρεσβυτέριον προαύλιον προβάτιον πρόβατον προσάββατον προσκεφάλαιον προσφάγιον πρόσωπον πρωτοτόκια πτερύγιον
πτύον Ῥήγιον σάββατον σανδάλιον σάρδιον Σάρεπτα σάτον σημεῖον σημικίθιον σιμικίνθιον σιτίον σιτομέτριον σκάνδαλον σκύβαλον σκῦλον Σόδομα σουδάριον
σπήλαιον σπλάγχνον στάδιον στοιχεῖον στρατόπεδον στρουθίον σῦκον συμβούλιον συμπόσιον συνέδριον σύσσημον σφάγιον σφυδρόν σφυρόν σχοινίον τάλαντον
ταμεῖον ταμιεῖον τεκμήριον τεκνίον τέκνον τελωνεῖον τελώνιον τετάδιον τόξον τοπάζιον τρίστεγον τρύβλιον Τρωγύλλιον ὑπερῷον ὑποζύγιον ὑπολήνιον
ὑποπόδιον φάρμακον φόβητρον φόρον φορτίον φραγέλλιον φρύγανον φυλακτήριον φύλλον χαλκίον χαλκολίβανον χειρόγραφον χρυσίον χωρίον ψιχίον ψωμίον
ᾠόν ὠτάριον ὠτίον
|]
, mounce §§ ["n-2d"] $
[nounCategory|
Second declension contract nouns with stems ending in εο
sg: pl:
nom: ουν α
gen: ου ων
dat: ῳ οις
acc: ουν α
voc: ουν α
lemmas:
ὀστοῦν
|]
, mounce §§ ["n-2d"] $
[nounCategory|
Second declension contract nouns with stems ending in οο
sg: pl:
nom: ους οι
gen: ου ων
dat: ῳ οις
acc: ουν ους
voc: ους οι
lemmas:
χειμάρρους
|]
, mounce §§ ["n-2ε"] $
[nounCategory|
Nouns with stems ending in εω(ς) (“Attic” declension)
sg: pl:
nom: ως *
gen: ω *
dat: ῳ *
acc: ω *
voc: ως *
lemmas:
Ἀπολλῶς Κῶς
|]
] | scott-fleischman/greek-grammar | haskell/greek-grammar/src/Text/Greek/Mounce/NounSecondDeclension.hs | mit | 15,504 | 0 | 8 | 2,386 | 154 | 103 | 51 | 20 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Network.Wai.Middleware.Router (Route,router,dir) where
import Data.Text (Text,splitOn)
import Network.Wai (Request(..),Application)
-- | Alias for a function which maps path pieces to applications.
type Route = ([Text] -> Maybe Application)
-- | Router for mapping paths to applications.
--
-- For example:
--
-- > router [ dir "/foo" fooApp
-- > , dir "/api" apiApp
-- > ] defaultApp
router :: [Route] -> Application -> Application
router routes d req = case router' (pathInfo req) routes of
Nothing -> d req
Just a -> a req
-- | First matching paths' application, nothing otherwise.
router' :: [Text] -> [Route] -> Maybe Application
router' _ [] = Nothing
router' ps (r:rs) = case r ps of
Nothing -> router' ps rs
Just a -> Just a
-- | A possible web application if the path matches, nothing otherwise.
dir :: Text -> Application -> Route
dir path a = let ps = pathPieces path
in ( \xs -> if ps == xs
then Just a
else Nothing )
-- | Pieces of a URL path.
pathPieces :: Text -> [Text]
pathPieces path = filter (""/=) $ splitOn "/" path
| mdmarek/wai-router | Network/Wai/Middleware/Router.hs | mit | 1,192 | 0 | 10 | 305 | 321 | 176 | 145 | 21 | 2 |
module Tree where
import Data.Monoid (Monoid, (<>))
import Data.Vector (Vector)
import qualified Data.Vector as Vector
import Prelude hiding (lookup)
data RangeTree a = Node {
size :: Int,
val :: a,
left :: RangeTree a,
right :: RangeTree a
} | Leaf a
fromVector :: Monoid a => Vector a -> RangeTree a
fromVector vector =
if len == 1 then Leaf $ Vector.head vector else
let (leftVector, rightVector) = Vector.splitAt (quot len 2) vector
left = fromVector leftVector
right = fromVector rightVector
in Node {
size = len,
val = val left <> val right,
left = left,
right = right
}
where len = Vector.length vector
adjust :: Monoid a => (a -> a) -> Int -> RangeTree a -> RangeTree a
adjust f _ (Leaf a) = Leaf (f a)
adjust f index node =
let half = quot (size node) 2 in
if index < half
then let newLeft = adjust f index (left node) in
node {
val = val newLeft <> val (right node),
left = newLeft
}
else let newRight = adjust f (index - half) (right node) in
node {
val = val (left node) <> val newRight,
right = newRight
}
update :: Monoid a => a -> Int -> RangeTree a -> RangeTree a
update a = adjust (const a)
| vladfi1/game-ai | lib/Tree.hs | mit | 1,262 | 0 | 15 | 374 | 511 | 268 | 243 | 38 | 2 |
module Google.CalendarSpec (spec) where
import Google.Calendar
import Test.Hspec
spec :: Spec
spec =
describe "main" $ do
it "returns the unit" $
main `shouldReturn` ()
| tippenein/google-calendar | test/Web/Google/CalendarSpec.hs | mit | 184 | 0 | 10 | 40 | 56 | 31 | 25 | 8 | 1 |
module Main (main) where
import ClassyPrelude
import System.FilePath.Glob (glob)
import Test.DocTest (doctest)
main :: IO ()
main = glob "src/**/*.hs" >>= doDocTest
doDocTest :: [String] -> IO ()
doDocTest options = doctest $ options <> ghcExtensions
ghcExtensions :: [String]
ghcExtensions =
[ "-XConstraintKinds"
, "-XDataKinds"
, "-XDeriveDataTypeable"
, "-XDeriveFunctor"
, "-XDeriveGeneric"
, "-XDuplicateRecordFields"
, "-XEmptyDataDecls"
, "-XFlexibleContexts"
, "-XFlexibleInstances"
, "-XGADTs"
, "-XGeneralizedNewtypeDeriving"
, "-XInstanceSigs"
, "-XLambdaCase"
, "-XMultiParamTypeClasses"
, "-XNamedFieldPuns"
, "-XNoImplicitPrelude"
, "-XNoMonomorphismRestriction"
, "-XOverloadedLabels"
, "-XOverloadedLists"
, "-XOverloadedStrings"
, "-XPackageImports"
, "-XPatternSynonyms"
, "-XPolyKinds"
, "-XRankNTypes"
, "-XRecordWildCards"
, "-XScopedTypeVariables"
, "-XStandaloneDeriving"
, "-XTupleSections"
, "-XTypeFamilies"
, "-XTypeOperators"
, "-XViewPatterns"
]
| arowM/ASPico | test/DocTest.hs | mit | 1,110 | 0 | 7 | 238 | 194 | 119 | 75 | 41 | 1 |
module Network.Gazelle.Types.Id (
ArtistID(..),
SimilarId(..),
TorrentID(..),
TorrentGroupID(..)
) where
import Data.Aeson
newtype ArtistID = ArtistID Integer
deriving Show
instance FromJSON ArtistID where
parseJSON v = ArtistID <$> parseJSON v
newtype SimilarId = SimilarId Integer
deriving Show
instance FromJSON SimilarId where
parseJSON v = SimilarId <$> parseJSON v
newtype TorrentID = TorrentID Integer
deriving Show
instance FromJSON TorrentID where
parseJSON v = TorrentID <$> parseJSON v
newtype TorrentGroupID = TorrentGroupID Integer
deriving Show
instance FromJSON TorrentGroupID where
parseJSON v = TorrentGroupID <$> parseJSON v
| mr/gazelle | src/Network/Gazelle/Types/Id.hs | mit | 700 | 0 | 7 | 140 | 179 | 102 | 77 | 22 | 0 |
module Paths_hangman (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
catchIO = Exception.catch
version :: Version
version = Version [0,1,0,0] []
bindir, libdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/Users/mjhamrick/.org/reading-list/haskell_programming_from_first_principles/hangman/.stack-work/install/x86_64-osx/lts-5.16/7.10.3/bin"
libdir = "/Users/mjhamrick/.org/reading-list/haskell_programming_from_first_principles/hangman/.stack-work/install/x86_64-osx/lts-5.16/7.10.3/lib/x86_64-osx-ghc-7.10.3/hangman-0.1.0.0-BBZpnRHCvuLIKle9bhE9C0"
datadir = "/Users/mjhamrick/.org/reading-list/haskell_programming_from_first_principles/hangman/.stack-work/install/x86_64-osx/lts-5.16/7.10.3/share/x86_64-osx-ghc-7.10.3/hangman-0.1.0.0"
libexecdir = "/Users/mjhamrick/.org/reading-list/haskell_programming_from_first_principles/hangman/.stack-work/install/x86_64-osx/lts-5.16/7.10.3/libexec"
sysconfdir = "/Users/mjhamrick/.org/reading-list/haskell_programming_from_first_principles/hangman/.stack-work/install/x86_64-osx/lts-5.16/7.10.3/etc"
getBinDir, getLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "hangman_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "hangman_libdir") (\_ -> return libdir)
getDataDir = catchIO (getEnv "hangman_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "hangman_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "hangman_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
| diminishedprime/.org | reading-list/haskell_programming_from_first_principles/hangman/.stack-work/dist/x86_64-osx/Cabal-1.22.5.0/build/autogen/Paths_hangman.hs | mit | 1,891 | 0 | 10 | 177 | 362 | 206 | 156 | 28 | 1 |
module Rebase.Data.Functor.Compose
(
module Data.Functor.Compose
)
where
import Data.Functor.Compose
| nikita-volkov/rebase | library/Rebase/Data/Functor/Compose.hs | mit | 104 | 0 | 5 | 12 | 23 | 16 | 7 | 4 | 0 |
module Database.Siege.NetworkHelper where
import Control.Monad
import Control.Concurrent
import Network.Socket
listenAt :: Int -> (Socket -> IO ()) -> IO ()
listenAt port act = do
let port' = toEnum port
lsock <- socket AF_INET Stream 0
setSocketOption lsock ReuseAddr 1
bindSocket lsock $ SockAddrInet port' iNADDR_ANY
listen lsock 5
forever $ do
(sock, _) <- accept lsock
forkIO $ do
act sock
sClose sock
| DanielWaterworth/siege | src/Database/Siege/NetworkHelper.hs | mit | 441 | 0 | 13 | 98 | 163 | 76 | 87 | 16 | 1 |
module Handler.AnalogReadSpec (spec) where
import TestImport
spec :: Spec
spec = withApp $ do
describe "getAnalogReadR" $ do
error "Spec not implemented: getAnalogReadR"
| aufheben/lambda-arduino | test/Handler/AnalogReadSpec.hs | mit | 186 | 0 | 11 | 39 | 44 | 23 | 21 | 6 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Test.LDAP.Classy.Dn (dnTests) where
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit (Assertion, testCase, (@?=))
import Control.Applicative ((<|>))
import Data.Attoparsec.Text (Parser, eitherResult, endOfInput,
feed, option, parse)
import Data.Foldable (traverse_)
import Data.List.NonEmpty (NonEmpty ((:|)))
import Data.Semigroup ((<>))
import Data.Text (Text)
import qualified Data.Text as T
import LDAP.Classy.Dn
import LDAP.Classy.AttributeType
import LDAP.Classy.Dn.Internal
dnTests :: TestTree
dnTests = testGroup "dn"
[ testGroup "fromText"
[ testCase "nullCharacter" dnFromTextNull
, testCase "ok" dnFromTextOk
, testCase "craycray" dnFromTextCrayCray
]
, testGroup "toText"
[ testCase "ok" dnToTextOk
, testCase "crayCray" dnToTextCrayCray
, testCase "escapedPlus" dnToTextEscapedPlus
, testCase "escapedNull" dnToTextEscapedNull
]
, testGroup "parsers"
[ testCase "parsePairOk" parsePairOk
, testCase "dnStringOk" parseDnStringOk
, testCase "dnStringSpacesOk" parseDnStringSpacesOk
, testCase "attributeValueOk" parseAttributeValueOk
, testCase "attributeValueHexPair" parseAttributeValueHexPair
, testCase "attributeTypeOk" parseAttributeTypeOk
, testCase "attributeTypeAndValueOid" parseAttributeTypeAndValueOid
, testCase "attributeTypeAndValueOther" parseAttributeTypeAndValueOther
, testCase "attributeTypeAndValueOk" parseAttributeTypeAndValueOk
, testCase "attributeTypeAndValueSpaces" parseAttributeTypeAndValueSpaces
, testCase "attributeValueSpaces" parseAttributeValueSpaces
, testCase "parseNumericOid" parseNumericOid
, testCase "relativeDistinguishedNameOk" parseRelativeDistinguishedNameOk
, testCase "relativeDistinguishedNameMultiSpaces" parseRelativeDistinguishedNameMultiSpaces
, testCase "relativeDistinguishedNameMultiOk" parseRelativeDistinguishedNameMultiOk
, testCase "distinguishedNameOk" parseDistinguishedNameOk
, testCase "parseEndOfDnOk" parseEndOfDnOk
]
]
dnFromTextNull :: Assertion
dnFromTextNull =
(dnFromText "uid=benkolera\x00,dc=benkolera,dc=com") @?= Nothing
okDnText :: Text
okDnText = "UID=benkolera,DC=benkolera,DC=com"
okDn :: Dn
okDn = Dn
[ rDnSingle $ uid "benkolera"
, rDnSingle $ dc "benkolera"
, rDnSingle $ dc "com"
]
dnFromTextOk :: Assertion
dnFromTextOk = dnFromTextEither okDnText @?= (Right okDn)
dnToTextOk :: Assertion
dnToTextOk = dnToText okDn @?= okDnText
crayCrayDnText :: Text
crayCrayDnText = "UID=benkolera\\[email protected] + CN= Ben Kolera\\ ,1337=foo,DC=benkolera,DC=com"
crayCrayDn :: Dn
crayCrayDn = Dn
[ RelativeDn ( uid "[email protected]" :| [cn "Ben Kolera "])
, rDnSingle $ oid 1337 "foo"
, rDnSingle $ dc "benkolera"
, rDnSingle $ dc "com"
]
dnFromTextCrayCray :: Assertion
dnFromTextCrayCray = dnFromTextEither crayCrayDnText @?= (Right crayCrayDn)
dnToTextCrayCray :: Assertion
dnToTextCrayCray = dnToText crayCrayDn @?= "UID=benkolera\\[email protected]+CN=Ben Kolera\\20,1337=foo,DC=benkolera,DC=com"
dnToTextEscapedPlus :: Assertion
dnToTextEscapedPlus = dnToText dn @?= "UID=ben\\[email protected],DC=benkolera,DC=com"
where
dn = (Dn [rDnSingle $ uid "[email protected]",rDnSingle $ dc "benkolera",rDnSingle $ dc "com"])
dnToTextEscapedNull :: Assertion
dnToTextEscapedNull = dnToText dn @?= "[email protected],DC=benkolera,DC=com"
where
dn = (Dn [rDnSingle $ uid "[email protected]\x00",rDnSingle $ dc "benkolera",rDnSingle $ dc "com"])
parserTest :: (Eq a, Show a) => Parser a -> Text -> Either String a -> Assertion
parserTest p t expected = eitherResult (feed (parse (p <* endOfInput) t) "") @?= expected
parsePairOk :: Assertion
parsePairOk =
traverse_ (\ ch -> parserTest pair ("\\" <> ch) (Right ch))
[ " "
, "#"
, "+"
, ","
, ";"
, "<"
, ">"
, "="
, "\\"
]
parseDnStringOk :: Assertion
parseDnStringOk = parserTest dnString "benkolera" (Right "benkolera")
parseDnStringSpacesOk :: Assertion
parseDnStringSpacesOk = parserTest dnString "\\ ben kolera\\ " (Right " ben kolera ")
parseAttributeValueOk :: Assertion
parseAttributeValueOk = parserTest attributeValue "benkolera" (Right "benkolera")
parseAttributeValueHexPair :: Assertion
parseAttributeValueHexPair = parserTest attributeValue "benkolera\\[email protected]" (Right "[email protected]")
parseAttributeValueSpaces :: Assertion
parseAttributeValueSpaces = parserTest attributeValue " ben kolera " (Right "ben kolera")
parseAttributeTypeOk :: Assertion
parseAttributeTypeOk = traverse_
(\ (t,e) -> do
parserTest attributeType (T.toUpper t) (Right e)
parserTest attributeType (T.toLower t) (Right e)
)
[ ("uid" , UserId )
, ("l" , LocalityName )
, ("cn" , CommonName )
, ("ou" , OrganizationalUnitName )
, ("o" , OrganizationName)
, ("st" , StateOrProvinceName)
, ("street" , StreetAddress )
, ("c" , CountryName )
, ("dc" , DomainComponent )
]
parseAttributeTypeAndValueOk :: Assertion
parseAttributeTypeAndValueOk =
parserTest attributeTypeAndValue "uid=benkolera" (Right (UserId,"benkolera"))
parseAttributeTypeAndValueOid :: Assertion
parseAttributeTypeAndValueOid =
parserTest attributeTypeAndValue
"1337=benkolera"
(Right (OidAttributeType 1337,"benkolera"))
parseNumericOid :: Assertion
parseNumericOid = parserTest numericOid "1337" (Right $ OidAttributeType 1337)
parseAttributeTypeAndValueOther :: Assertion
parseAttributeTypeAndValueOther =
parserTest attributeTypeAndValue
"butts=benkolera"
(Right (OtherAttributeDescr "butts","benkolera"))
parseAttributeTypeAndValueSpaces :: Assertion
parseAttributeTypeAndValueSpaces =
parserTest attributeTypeAndValue "uid = benkolera " (Right (UserId,"benkolera"))
parseRelativeDistinguishedNameOk :: Assertion
parseRelativeDistinguishedNameOk = parserTest
relativeDistinguishedName
"uid=benkolera"
(Right $ rDnSingle (uid "benkolera"))
parseRelativeDistinguishedNameMultiOk :: Assertion
parseRelativeDistinguishedNameMultiOk = parserTest
relativeDistinguishedName
"uid=benkolera+cn=Ben Kolera"
(Right . RelativeDn $ (uid "benkolera") :| [cn "Ben Kolera"])
parseRelativeDistinguishedNameMultiSpaces :: Assertion
parseRelativeDistinguishedNameMultiSpaces = parserTest
relativeDistinguishedName
"uid=benkolera + cn=Ben Kolera\\ "
(Right . RelativeDn $ (uid "benkolera") :| [cn "Ben Kolera "])
parseDistinguishedNameOk :: Assertion
parseDistinguishedNameOk = parserTest
distinguishedName
"uid=benkolera,dc=benkolera,dc=com"
(Right . Dn $
[ rDnSingle $ uid "benkolera"
, rDnSingle $ dc "benkolera"
, rDnSingle $ dc "com"
])
parseEndOfDnOk :: Assertion
parseEndOfDnOk = traverse_
(\(x,e) -> parserTest
(endOfDn *> option T.empty (fmap T.singleton (comma <|> plus))) -- Mess around consuming the trailing comma we expect
x
(Right e))
[(" ","")
,(" ","")
,(",",",")
,(" ,",",")
,(" ,",",")
,("+","+")
,(" +","+")
,(" +","+")
]
| benkolera/haskell-ldap-classy | tests/Test/LDAP/Classy/Dn.hs | mit | 7,618 | 0 | 15 | 1,634 | 1,640 | 902 | 738 | 169 | 1 |
{-# LANGUAGE OverloadedStrings, RecordWildCards #-}
module Crypto.PubKey.OpenSsh.Encode where
import Control.Monad (when)
import Data.ByteString.Char8 (ByteString)
import Data.Bits (testBit)
import Data.List (unfoldr)
import Data.Word (Word8)
import qualified Data.ByteString as BS
import Data.Serialize (Put, Putter, runPut, putByteString, putWord32be, put)
import Data.ASN1.Encoding (encodeASN1')
import Data.ASN1.Types (ASN1(IntVal, Start, End), ASN1ConstructionType(Sequence))
import Data.ASN1.BinaryEncoding (DER(..))
import Data.PEM (PEM(..), pemWriteBS)
import qualified Crypto.Types.PubKey.DSA as DSA
import qualified Crypto.Types.PubKey.RSA as RSA
import qualified Data.ByteString.Base64 as Base64
import Crypto.PubKey.OpenSsh.Types (OpenSshKeyType(..), OpenSshPublicKey(..),
OpenSshPrivateKey(..))
fixZeroByte :: [Word8] -> [Word8]
fixZeroByte [] = []
fixZeroByte bs = if testBit (head bs) msb then 0:bs else bs
where
msb = 7
expandInteger :: Integer -> [Word8]
expandInteger n = reverse $ unfoldr expand n
where
expand :: Integer -> Maybe (Word8, Integer)
expand e | e == 0 = Nothing
| otherwise = Just $ getResults $ quotRem e 256
getResults :: (Integer, Integer) -> (Word8, Integer)
getResults (i, w) = (fromIntegral w, i)
keyTypePutter :: Putter OpenSshKeyType
keyTypePutter OpenSshKeyTypeRsa = putByteString "ssh-rsa"
keyTypePutter OpenSshKeyTypeDsa = putByteString "ssh-dss"
mpint :: Integer -> ByteString
mpint i = runPut $ do
putWord32be $ fromIntegral $ length binary
mapM_ put binary
where
binary = fixZeroByte $ expandInteger i
commonPublicKeyPutter :: OpenSshKeyType
-> ByteString
-> ByteString
-> Put
commonPublicKeyPutter keyType comment body = do
keyTypePutter keyType
putByteString " "
putByteString $ Base64.encode $ BS.append wrapType body
when (not $ BS.null comment) $ do
putByteString " "
putByteString comment
where
binaryType = runPut $ keyTypePutter keyType
wrapType = runPut $ do
putWord32be $ fromIntegral $ BS.length $ binaryType
putByteString binaryType
commonPrivateKeyPutter :: OpenSshKeyType
-> ByteString
-> Put
commonPrivateKeyPutter OpenSshKeyTypeRsa body = do
putByteString $ pemWriteBS $ PEM "RSA PRIVATE KEY" [] body
commonPrivateKeyPutter OpenSshKeyTypeDsa body = do
putByteString $ pemWriteBS $ PEM "DSA PRIVATE KEY" [] body
openSshPublicKeyPutter :: Putter OpenSshPublicKey
openSshPublicKeyPutter (OpenSshPublicKeyRsa
(RSA.PublicKey _ public_n public_e)
comment) =
commonPublicKeyPutter OpenSshKeyTypeRsa comment $ BS.concat
[ mpint public_e
, mpint public_n ]
openSshPublicKeyPutter (OpenSshPublicKeyDsa
(DSA.PublicKey (DSA.Params public_p public_g public_q) public_y)
comment) =
commonPublicKeyPutter OpenSshKeyTypeDsa comment $ BS.concat
[ mpint public_p
, mpint public_q
, mpint public_g
, mpint public_y ]
openSshPrivateKeyPutter :: Putter OpenSshPrivateKey
openSshPrivateKeyPutter (OpenSshPrivateKeyRsa (RSA.PrivateKey {..})) =
let RSA.PublicKey{..} = private_pub
in commonPrivateKeyPutter OpenSshKeyTypeRsa $ encodeASN1' DER
[ Start Sequence
, IntVal 0 -- version
, IntVal public_n
, IntVal public_e
, IntVal private_d
, IntVal private_p
, IntVal private_q
, IntVal private_dP
, IntVal private_dQ
, IntVal private_qinv
, End Sequence
]
openSshPrivateKeyPutter (OpenSshPrivateKeyDsa (DSA.PrivateKey {..}) public_y) =
let DSA.Params{..} = private_params
in commonPrivateKeyPutter OpenSshKeyTypeDsa $ encodeASN1' DER
[ Start Sequence
, IntVal 0 -- version
, IntVal params_p
, IntVal params_q
, IntVal params_g
, IntVal public_y
, IntVal private_x
, End Sequence
]
encodePublic :: OpenSshPublicKey -> ByteString
encodePublic = runPut . openSshPublicKeyPutter
encodePrivate :: OpenSshPrivateKey -> ByteString
encodePrivate k = runPut $ openSshPrivateKeyPutter k
| knsd/crypto-pubkey-openssh | src/Crypto/PubKey/OpenSsh/Encode.hs | mit | 4,335 | 0 | 12 | 1,081 | 1,140 | 603 | 537 | 104 | 2 |
{-# LANGUAGE TemplateHaskell, OverloadedStrings #-}
module Data.NGH.Formats.Tests.Sam
( tests ) where
import Test.Framework.TH
import Test.HUnit
import Test.QuickCheck
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Data.Maybe
import Data.NGH.Alignments
import Data.NGH.Formats.Sam
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Lazy.Char8 as L8 ()
test_file = "\
\@SQ\tSN:R9\tLN:1541735\tSP: converted from Pf3D7_09_v3.embl.gz\tUR:plasmodium.fa\n\
\@SQ\tSN:R10\tLN:1687656\tSP: converted from Pf3D7_10_v3.embl.gz\tUR:plasmodium.fa\n\
\@SQ\tSN:R11\tLN:2038340\tSP: converted from Pf3D7_11_v3.embl.gz\tUR:plasmodium.fa\n\
\@SQ\tSN:R12\tLN:2271494\tSP: converted from Pf3D7_12_v3.embl.gz\tUR:plasmodium.fa\n\
\@SQ\tSN:R13\tLN:2925236\tSP: converted from Pf3D7_13_v3.embl.gz\tUR:plasmodium.fa\n\
\@SQ\tSN:R14\tLN:3291936\tSP: converted from Pf3D7_14_v3.embl.gz\tUR:plasmodium.fa\n\
\@PG\tID:0\tPN:TAPyR\tVN:1.2c\n\
\SRR065639.1 SOLEXAWS1_0001:1:1:4:1261 length=75\t4\t*\t0\t0\t*\t*\t0\t0\t*\t*\n\
\SRR065639.2 SOLEXAWS1_0001:1:1:4:631 length=75\t0\tR13\t1430385\t0\t28M1D4M1I5M1I3M1D4M1I1M1I2M1I2M\t*\t0\t0\tATTCTTCTTCTTTTTGTAGTCGTTCTTGTCTTACTCTTCTTTTTGCTTGTCTTT\t*\n\
\SRR065639.2 SOLEXAWS1_0001:1:1:4:631 length=75\t16\tR13\t1449199\t0\t1I3M2I5M1D4M1I4M1I4M1D29M\t*\t0\t0\tAAAGACAAGCAAAAAGAAGAGTAAGACAAGAACGACTACAAAAAGAAGAAGAAT\t*\n\
\SRR065639.2 SOLEXAWS1_0001:1:1:4:631 length=75\t16\tR13\t1449349\t0\t1I3M2I5M1D4M1I4M1I4M1D29M\t*\t0\t0\tAAAGACAAGCAAAAAGAAGAGTAAGACAAGAACGACTACAAAAAGAAGAAGAAT\t*\n\
\SRR065639.2 SOLEXAWS1_0001:1:1:4:631 length=75\t16\tR13\t1449499\t0\t1I3M2I5M1D4M1I4M1I4M1D29M\t*\t0\t0\tAAAGACAAGCAAAAAGAAGAGTAAGACAAGAACGACTACAAAAAGAAGAAGAAT\t*\n\
\SRR065639.3 SOLEXAWS1_0001:1:1:4:456 length=75\t4\t*\t0\t0\t*\t*\t0\t0\t*\t*\n\
\SRR065639.4 SOLEXAWS1_0001:1:1:4:207 length=75\t4\t*\t0\t0\t*\t*\t0\t0\t*\t*\n"
bowtie_extra = "FCC0PHYACXX:7:1101:1354:2168#0/1\t4\t*\t0\t0\t*\t*\t0\t0\tAAGTTAGTTCAGTCTACATCCAGAAATGAGCAAGAGCAGCTTGGAGGTT\tabbceeecggggghiiiiiiiiiiiiiiiihiiighhiiiiiiifhicg\tYT:Z:UU\n"
alignments = readAlignments test_file
tests = $(testGroupGenerator)
case_all = (length alignments) @?= 7
case_aligned = (length $ filter isAligned alignments) @?= 4
case_flagNot4 = (null $ filter ((==4) . samFlag) $ filter isAligned alignments) @? "Should be empty"
case_bowtie = (length $ readAlignments bowtie_extra) @?= 1
| luispedro/NGH | Data/NGH/Formats/Tests/Sam.hs | mit | 2,480 | 0 | 12 | 205 | 206 | 125 | 81 | 21 | 1 |
-- Recommended style of importing.
-- YES:
-- import qualified Very.Special.Module as VSM
-- import Another.Important.Module (printf, (<|>), )
--
-- NO:
-- import Very.Special.Module
-- import Another.Important.Module hiding (open, close, )
--
-- Nomenclature: the following "reserved words" have a special meaning for the
-- interpreter:
--
-- case of
--
-- if then else
--
-- let where
--
-- infix infixl infixr
--
-- class instance
--
-- data type
--
-- primitive
-- in
--
--
--
--
-- GET DIRTY QUICK ---> ghci>
--
--
-- Lambda-calculus: a mathematical formalism in which the key concepts are
-- the <expression>, the <function> and the <application>.
-- They are defined as follows:
--
-- <expression> := <name> | <function> | <application>
-- <function> := lambda <name> . <expression>
-- <application> := <expression> <expression>
--
-- ....
--
-- it boils down to this:
--
-- Math notation || Haskell Code
-- --------------------------------
-- Named Function f(x, y) = x^2 + y^2 || f x y = x^2 + y^2
-- Lambda abstraction lambda xy . x^2 + y^2 || \x y -> x^2 + y^2
--
-- So for instance, the lambda abstraction could for example
-- be used like this:
--
-- Prelude> map (\x -> 3 * x) [1..10]
-- [3,6,9,12,15,18,21,24,27,30]
--
-- which uses a lambda abstraction, but is quite a bit of typing.
--
-- Or consider this:
-- Prelude> let foo x = 3*x
-- Prelude> map foo [1..10]
-- [3,6,9,12,15,18,21,24,27,30]
--
-- Or simply:
-- Prelude> map (3*) [1..10]
-- [3,6,9,12,15,18,21,24,27,30]
--
--
-- Syntax:
--
-- Declaration of a function.
--
-- <func> <arg>
--
-- Declaration of a
--
-- Style:
--
import Data.List (union, sort)
fibs = 0 : 1 : zipWith (+) fibs (tail fibs)
fib n = fibs !! n
qsort [] = []
qsort (x:xs) = qsort [y | y <- xs, y < x ] ++ [x] ++
qsort [y | y <- xs, y >= x]
raise' x ys = map (x+) ys
raise x = map (x+)
fac n = product [1..n]
fac' 0 = 1
fac' n = n * fac (n-1)
fac'' = scanl (*) 1 [2..]
boven n k = fac n / (fac k * fac (n-k))
plus :: Int -> Int -> Int
plus a b = a + b
opvolger :: Int -> Int
opvolger = plus 1
-- operator-secties
verdubbel :: Num a => a -> a
verdubbel = (2*)
halveer :: Fractional a => a -> a
halveer = (/2.0)
omgekeerde :: Fractional a => a -> a
omgekeerde = (1.0/)
kwadraat :: Integral a => a -> a
kwadraat = (^2)
tweeTotDe :: Integral a => a -> a
tweeTotDe = (2^)
eencijferig :: Integral a => a -> Bool
eencijferig = (<=9)
isNul :: Integral a => a -> Bool
isNul = (==0)
-- map
map' :: (a->b) -> [a] -> [b]
map' functie [] = []
map' functie (x:xs) = functie x : map' functie xs
-- filter
filter' :: (a -> Bool) -> [a] -> [a]
filter' predikaat [] = []
filter' predikaat (x:xs) | predikaat x = x : filter predikaat xs
| otherwise = filter predikaat xs
lengte :: [a] -> Int
lengte [] = 0
lengte (kop:staart) = 1 + lengte staart
sum' :: Num a => [a] -> a
sum' [] = 0
sum' (x:xs) = x + sum' xs
--type sig?
sum'' [] = foldr' (+) 0
product' :: Num a => [a] -> a
product' [] = 1
product' (x:xs) = x * product' xs
--type sig?
product'' [] = foldr' (*) 1
and' :: [Bool] -> Bool
and' [] = True
and' (x:xs) = x && and xs
--type sig?
and'' [] = foldr' (&&) True
oneven :: Integral a => a -> Bool
oneven x = not (even x)
foldr' :: (a->b->b) -> b -> [a] -> b
foldr' oper expr [] = expr
foldr' oper expr (x:xs) = x `oper` foldr' oper expr xs
foldl' :: (b->a->b) -> b -> [a] -> b
foldl' oper expr [] = expr
foldl' oper expr (x:xs) = foldl oper (expr `oper` x) xs
until' :: (a->Bool) -> (a->a) -> a -> a
until' p f x | p x = x
| otherwise = until' p f (f x)
elem' :: Eq a => a -> [a] -> Bool
elem' e xs = or (map (==e) xs)
elem'' :: Eq a => a -> [a] -> Bool
elem'' e [] = False
elem'' e (x:xs) = x==e || elem e xs
concat' :: [[a]] -> [a]
concat' [] = []
concat' xs = foldr (++) [] xs
concat'' :: [[a]] -> [a]
concat'' [] = []
concat'' (xs:xss) = xs ++ concat xss
powers :: Integral a => a -> [a]
powers n = n : map (* n) (powers n)
divisorsOf :: Integral a => a -> [a]
divisorsOf x = [ d | d <- [1..(x `div` 2)], x `mod` d == 0 ]
getLastElem :: [a] -> a
getLastElem [x] = x
getLastElem (_:xs) = getLastElem xs
getButLast :: [a] -> a
getButLast [x,y] = x
getButLast (_:x:xs) = getButLast (x:xs)
-- lambda x . x = Identity function
identit :: Int -> Int
identit x = x
applyN = (foldr (.) id.) . replicate
zs :: [(Integer, Integer)]
zs = [(x,y) | x <- naturals, y <- naturals]
ones :: [Integer]
ones = 1 : ones
numsFrom :: Integer -> Integer
numsFrom n = n + numsFrom (n+1)
dupeFirstInList s@(x:xs) = x:s
sign x | x > 0 = 1
| x == 0 = 0
| x < 1 = -1
--- case expression ---
ttake m ys = case (m, ys) of
(0,_) -> []
(_,[]) -> []
(n,x:xs) -> x: take (n-1) xs
--- client-server model ----
reqs = client initial resps
resps = server reqs
client initial ~(rp:resps) = initial : client (next rp) resps
server (req:reqs) = process req: server reqs
initial = 0
next resp = resp
process req = req + 1
--digits :: Num a => a -> Integer
-- priemgetallen
primes = 2 : gaps 3 (join [[p*p,p*p+2*p..] | p <- primes'])
where
primes' = 3 : gaps 5 (join [[p*p,p*p+2*p..] | p <- primes'])
join ((x:xs):t) = x : union xs (join (pairs t))
pairs ((x:xs):ys:t) = (x : union xs ys) : pairs t
gaps k xs@(x:t) | k==x = gaps (k+2) t
| True = k : gaps (k+2) xs
{- 'union' function is implemented in Data.list
union (x:xs) (y:ys) = case (compare x y) of
LT -> x : union xs (y:ys)
EQ -> x : union xs ys
GT -> y : union (x:xs) ys
-}
--isLychrel :: Int -> Bool
isLychrel x | hasPalindromes (take 50 $ iterate reverseadd x) = False
| otherwise = True
--hasPalindromes :: [Int] -> Bool
hasPalindromes xs = (length $ filter (==True) $ map isPalindromic xs) > 0
--isPalindromic :: Int -> Bool
isPalindromic x = x == ((read . reverse . show) x :: Integer)
--reverseadd :: Int -> Int
reverseadd x = x + (read . reverse . show $ x :: Integer)
naturals :: (Num a, Enum a) => [a]
naturals = [1..]
lltseqns :: [Integer]
lltseqns = 4 : [ x^2 - 2 | x <- lltseqns ]
--See:https://en.wikipedia.org/wiki/Lucas–Lehmer_primality_test
--isLLTPrimalityTest :: (Integer, Integer) -> Bool
isLLTPrimalityTest (p,x)
| p == 1 = False
| (map (`mod` x) lltseqns) !! (p - 2) == 0 = True
| otherwise = False
mersenneprimes :: [(Int, Integer)]
mersenneprimes = filter isLLTPrimalityTest $ zip naturals mnumbers
mnumbers :: [Integer]
mnumbers = map (subtract 1) $ powers 2
powersoftwo :: [Integer]
powersoftwo = [2^x | x <- [1,2..] ]
--
--todo https://en.wikipedia.org/wiki/Miller–Rabin_primality_test
--
deelDoorTweeLijst :: Float -> [Float]
deelDoorTweeLijst x = x : deelDoorTweeLijst (x/2.0)
data Set a = Set [a]
empty :: Set a
empty = Set []
insert :: Integer -> Set Integer -> Set Integer
insert x (Set xs)
| not (x `elem` xs) = Set (x:xs)
| otherwise = Set xs
-- Record Syntax
--
-- this is an example that doesn't use record syntax
data Point = Point Int Int deriving Show
xval :: Point -> Int
xval (Point x _) = x
yval :: Point -> Int
yval (Point _ y) = y
-- The origin would be the following Point
the_origin = Point 0 0
-- But also consider the following Pointt using Record Syntax, which in some
-- cases is more readable:
data Pointt = Pointt {
xvall :: Double,
yvall :: Double
}
deriving (Show)
the_originn = Pointt {xvall = 2, yvall = 0 }
-- De functie <filter''> kan gedefinieerd worden in termen van <concat> en <map>:
filter'' :: (a -> Bool) -> [a] -> [a]
filter'' p = concat . map box
where box x | p x = [x]
| otherwise = []
-- "<iterate'> takes a function that maps a type 'a' to a type 'a', and a type 'a'
-- and maps it to a list of type 'a'
iterate' :: (a -> a) -> a -> [a] -- ghci> take 10 $ iterate' (\x -> 2*x) 2
iterate' f x = x : iterate' f (f x) -- [2,4,8,16,32,64,128,256,512,1024]
-- a version of <repeat> :
repeat' :: a -> [a] -- ghci> take 10 $ repeat 2
repeat' x = x : repeat' x -- [2,2,2,2,2,2,2,2,2,2]
-- a non-recursive definition of <repeat> with <iterate'>:
repeat'' :: a -> [a]
repeat'' x = iterate' (\y -> y) x
(\\) :: Eq a => [a] -> [a] -> [a]
xs \\ ys = filter (`notElem` ys) xs
--Definieer de functie <curry'> als tegenhanger van <uncurry'>
curry' :: ((a, b) -> c) -> a -> b -> c
curry' f x y = f (x, y)
uncurry' :: (a -> b -> c) -> ((a, b) -> c)
uncurry' f p = f (fst p) (snd p)
--Wat is het type van map map
--
--Geef een definitie van until
--Schrijf de functie length als aanroep van foldr
-- verzameling van Natuurlijke getallen.
sN = [1..]
ten = take 10
five = take 5
-- Gebruik de functies map en concat in plaats van lijstcomprehensie om de volgende
-- lijst te definieren: [(x,y) | x <- [1..5], y <- [1..x]]
opgave3_7 = concat (map f [1..5])
where f x = map g [1..x]
where g y = (x,y)
pairs :: [a] -> [b] -> [(a,b)]
pairs xs ys = do x <- xs
y <- ys
return (x,y)
--addInt :: Int a => a -> a -> a
--addInt x y = x >>= (\n ->
-- y >>= (\m ->
-- n + m ))
--
add :: Maybe Int -> Maybe Int -> Maybe Int
add mx my = -- Adds two values of type (Maybe Int), where each input value can be Nothing
mx >>= (\x -> -- Extracts value x if mx is not Nothing
my >>= (\y -> -- Extracts value y if my is not Nothing
return (x + y))) -- Wraps value (x+y), returning the sum as a value of type (Maybe Int)
-- [[[a -> b]]] -> [[[[a] -> [b]]]]
--
mapp :: (a -> b) -> [a] -> [b]
mapp f [] = []
mapp f (x:xs) = f x : mapp f xs
groterDanTien :: Integer -> String
groterDanTien x | x <= 10 = "<=10"
| otherwise = ">10"
slow_fib :: Int -> Integer
slow_fib 0 = 0
slow_fib 1 = 1
slow_fib n = slow_fib (n-2) + slow_fib (n-1)
memoized_fib :: Int -> Integer
memoized_fib = (map fib [0 ..] !!)
where fib 0 = 0
fib 1 = 1
fib n = memoized_fib (n-2) + memoized_fib (n-1)
f :: (Int -> Int) -> Int -> Int
f mf 0 = 0
f mf n = max n $ mf (div n 2) +
mf (div n 3) +
mf (div n 4)
g x = x^2
| iambernie/tryhaskell | somefunctions.hs | mit | 11,070 | 0 | 15 | 3,512 | 3,877 | 2,099 | 1,778 | 206 | 3 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
import Yesod
import Control.Applicative
import Data.Text (Text)
data MFormExample = MFormExample
mkYesod "MFormExample" [parseRoutes|
/ RootR GET
|]
instance Yesod MFormExample
instance RenderMessage MFormExample FormMessage where
renderMessage _ _ = defaultFormMessage
data Person = Person { personName :: Text, personAge :: Text}
deriving Show
textBoxField :: Text -> Field Handler Text
textBoxField label = Field
{ fieldParse = \rawVals _ ->
case rawVals of
[a] -> return $ Right $ Just a
[] -> return $ Right Nothing
, fieldView = \idAttr nameAttr otherAttrs eResult isReq ->
[whamlet|
<div class="form-group">
<label for=#{idAttr} class="control-label col-lg-4">#{label}
<div class="col-lg-8">
<input id=#{idAttr} name=#{nameAttr} *{otherAttrs}
type="text" class="form-control">
|]
, fieldEnctype = UrlEncoded
}
cPasswordField :: Text -> Field Handler Text
cPasswordField label = Field
{ fieldParse = \rawVals _ ->
case rawVals of
[a] -> return $ Right $ Just a
[] -> return $ Right Nothing
, fieldView = \idAttr nameAttr otherAttrs eResult isReq ->
[whamlet|
<div class="form-group">
<label for=#{idAttr} class="control-label col-lg-4">#{label}
<div class="col-lg-8">
<input id=#{idAttr} name=#{nameAttr} *{otherAttrs}
type="password" class="form-control"
data-original-title="Please use your secure password" data-placement="top">
|]
, fieldEnctype = UrlEncoded
}
personForm :: Html -> MForm Handler (FormResult Person, Widget)
personForm extra = do
(nameRes, nameView) <- mreq (textBoxField "Normal Input Field") "" Nothing
(ageRes, ageView) <- mreq (cPasswordField "Password Field") "" Nothing
let personRes = Person <$> nameRes <*> ageRes
let widget = do
[whamlet|
#{extra}
^{fvInput nameView}
^{fvInput ageView}
|]
return (personRes, widget)
getRootR :: Handler Html
getRootR = do
((res, widget), enctype) <- runFormGet personForm
defaultLayout
[whamlet|
<p>Result: #{show res}
<form enctype=#{enctype} class="form-horizontal">
^{widget}
|]
main :: IO ()
main = warp 3000 MFormExample | cirquit/quizlearner | resources/form/mforms_customfield_3.hs | mit | 2,779 | 0 | 12 | 901 | 497 | 271 | 226 | 48 | 2 |
{-# LANGUAGE CPP, NoImplicitPrelude, PackageImports #-}
module Data.Functor.Const.Compat (
module Base
) where
import "base-compat" Data.Functor.Const.Compat as Base
| haskell-compat/base-compat | base-compat-batteries/src/Data/Functor/Const/Compat.hs | mit | 169 | 0 | 4 | 21 | 25 | 19 | 6 | 4 | 0 |
{-# htermination delFromFM :: FiniteMap Ordering b -> Ordering -> FiniteMap Ordering b #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_delFromFM_11.hs | mit | 110 | 0 | 3 | 18 | 5 | 3 | 2 | 1 | 0 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverlappingInstances #-}
module System.Console.Questioner
(
Question(..)
, ChoiceEvent
, charToChoiceEvent
, listPrompt
, checkboxPrompt
, module System.Console.Questioner.ProgressIndicators
)
where
import Control.Applicative ((<$>))
import Control.Concurrent.STM
import Control.Monad (forM_, (>=>))
import Data.List (delete)
import Graphics.Vty (Event (..),
Key (..),
Modifier (..))
import qualified Graphics.Vty as Vty
import System.Console.ANSI (Color (..), ColorIntensity (..), ConsoleIntensity (..), ConsoleLayer (..),
SGR (..),
clearLine,
cursorUpLine,
setSGR)
import System.Console.Questioner.ProgressIndicators
import System.Console.Questioner.Util
import System.Exit
import System.IO (hFlush, stdin,
stdout)
-- Base `Question` and `Question` instances
-------------------------------------------------------------------------------
class Question q a where
prompt :: q -> IO a
instance {-# OVERLAPPABLE #-} Read a => Question String a where
prompt = putStr . (++ " ") >=> const readLn
instance {-# OVERLAPPING #-} Question String String where
prompt = putStr . (++ " ") >=> const getLine
instance {-# OVERLAPPING #-} Question String (Maybe String) where
prompt = putStr . (++ " ") >=> const getLine >=> helper
where
helper [] = return Nothing
helper s = return $ Just s
instance {-# OVERLAPPING #-} Question (String, (String, String)) String where
prompt (s, (o1, o2)) = do
putStr s
putStr $ " (" ++ o1 ++ "/" ++ o2 ++ ") "
getLine
instance {-# OVERLAPPING #-} Question (String, [String]) String where
prompt = uncurry listPrompt
instance {-# OVERLAPPING #-} Question (String, [String]) [String] where
prompt = uncurry checkboxPrompt
-- Multiple choice prompts
-------------------------------------------------------------------------------
data ChoiceEvent = MoveUp | MoveDown | MakeChoice | ToggleSelection | Exit
deriving(Eq, Ord, Show)
charToChoiceEvent :: Char -> Maybe ChoiceEvent
charToChoiceEvent 'j' = Just MoveDown
charToChoiceEvent 'k' = Just MoveUp
charToChoiceEvent '\n' = Just MakeChoice
charToChoiceEvent ' ' = Just ToggleSelection
charToChoiceEvent _ = Nothing
-- simpleListPrompt options choices = setup $ do
-- inp <- Vty.inputForConfig =<< Vty.standardIOConfig
-- selection <- waitForSelection (Vty._eventChannel inp) 0
-- setSGR []
-- clearScreen
-- setCursorPosition 0 0
-- Vty.shutdownInput inp
-- return selection
-- where
-- setup = withNoBuffering stdin NoBuffering . withNoCursor . withNoEcho
-- numChoices = length choices
-- waitForSelection ichan currentIdx = do
-- clearScreen
-- renderListOptions options def choices currentIdx
-- e <- atomically $ readTChan ichan
-- case e of
-- EvKey KEnter _ -> return $ Just (choices !! currentIdx)
-- EvKey (KChar 'n') [MCtrl] -> onDown
-- EvKey (KChar 'j') _ -> onDown
-- EvKey KDown _ -> onDown
-- EvKey (KChar 'p') [MCtrl] -> onUp
-- EvKey (KChar 'k') _ -> onUp
-- EvKey KUp _ -> onUp
-- EvKey (KChar 'q') _ -> return Nothing
-- EvKey KEsc _ -> return Nothing
-- _ -> waitForSelection ichan currentIdx
-- where
-- onDown = waitForSelection ichan ((currentIdx + 1) `rem` numChoices)
-- onUp = let currentIdx' = if currentIdx == 0
-- then length choices - 1
-- else currentIdx - 1
-- in waitForSelection ichan currentIdx'
listPrompt :: String -> [String] -> IO String
listPrompt question options = setup $ do
putStrLn question
-- selection has structure: (selected item's index, indexed options)
let selection = (0, zip options ([0..] :: [Int]))
mi <- listenForSelection selection
case mi of
Just i -> return (options !! i)
Nothing -> exitSuccess
where
setup = hWithNoBuffering stdin . withNoEcho
listenForSelection selection = do
inp <- Vty.inputForConfig =<< Vty.standardIOConfig
go (Vty._eventChannel inp) selection
where
go c os = do
render os
hFlush stdout
e <- atomically (readTChan c)
case e of
EvKey KEnter _ -> do
makeChoice
return (Just (fst os))
EvKey (KChar 'n') [MCtrl] -> do
clearFromCursorTo $ length $ snd os
go c (updateSelection MoveDown os)
EvKey (KChar 'j') _ -> do
clearFromCursorTo $ length $ snd os
go c (updateSelection MoveDown os)
EvKey KDown _ -> do
clearFromCursorTo $ length $ snd os
go c (updateSelection MoveDown os)
EvKey (KChar 'p') [MCtrl] -> do
clearFromCursorTo $ length $ snd os
go c (updateSelection MoveUp os)
EvKey (KChar 'k') _ -> do
clearFromCursorTo $ length $ snd os
go c (updateSelection MoveUp os)
EvKey KUp _ -> do
clearFromCursorTo $ length $ snd os
go c (updateSelection MoveUp os)
EvKey (KChar 'q') _ ->
return Nothing
EvKey (KChar 'c') [MCtrl] ->
return Nothing
EvKey KEsc _ ->
return Nothing
_ -> do
clearFromCursorTo $ length $ snd os
go c os
makeChoice = forM_ (replicate (length (snd selection)) ())
(const (clearLine >> cursorUpLine 1))
updateSelection MoveUp (i, os) = ((i - 1) `mod` length os, os)
updateSelection MoveDown (i, os) = ((i + 1) `mod` length os, os)
updateSelection _ _ = error "Internal error, key not recognized"
render (s, optionsI) = forM_ optionsI $ \(o, i) ->
if i == s
then do
setSGR [ SetColor Foreground Vivid White
, SetConsoleIntensity BoldIntensity
]
putStr "> "
setSGR [ SetColor Foreground Vivid Cyan
, SetConsoleIntensity NormalIntensity
]
putStrLn $ o
setSGR []
else putStrLn $ " " ++ o
checkboxPrompt :: String -> [String] -> IO [String]
checkboxPrompt question options = setup $ do
putStrLn question
let selection = (0, [], zip options ([0..] :: [Int]))
render selection
is <- listenForSelection selection
return $ map (options !!) is
where
setup = hWithNoBuffering stdin . withNoEcho
listenForSelection :: (Int, [Int], [(String, Int)]) -> IO [Int]
listenForSelection selection@(_, _, s3) = do
inp <- Vty.inputForConfig =<< Vty.standardIOConfig
go (Vty._eventChannel inp) selection
where
go :: TChan Event -> (Int, [Int], [(String, Int)]) -> IO [Int]
go c os@(_, os2, os3) = do
render os
hFlush stdout
e <- atomically (readTChan c)
print e
case e of
EvKey KEnter _ -> do
makeChoice
return os2
EvKey (KChar 'n') [MCtrl] -> do
clearFromCursorTo $ length os3
go c (updateSelection MoveDown os)
EvKey (KChar 'j') _ -> do
clearFromCursorTo $ length os3
go c (updateSelection MoveDown os)
EvKey KDown _ -> do
clearFromCursorTo $ length os3
go c (updateSelection MoveDown os)
EvKey (KChar 'p') [MCtrl] -> do
clearFromCursorTo $ length os3
go c (updateSelection MoveUp os)
EvKey (KChar 'k') _ -> do
clearFromCursorTo $ length os3
go c (updateSelection MoveUp os)
EvKey KUp _ -> do
clearFromCursorTo $ length os3
go c (updateSelection MoveUp os)
EvKey (KChar 'q') _ ->
return []
EvKey (KChar 'c') [MCtrl] ->
return []
EvKey KEsc _ ->
return []
_ -> do
clearFromCursorTo $ length os3
go c os
makeChoice = do
let size = length (s3 :: [(String, Int)])
mlist = replicate size ()
forM_ mlist (const (clearLine >> cursorUpLine 1))
updateSelection MoveUp (i, is, os) = ((i - 1) `mod` length os, is, os)
updateSelection MoveDown (i, is, os) = ((i + 1) `mod` length os, is, os)
updateSelection ToggleSelection (i, is, os) = (i, is', os)
where
is' = if i `elem` is then delete i is else i:is
updateSelection _ _ = error "Internal error, key not recognized"
render (i, is, optionsI) = forM_ optionsI $ \(o, j) -> do
let checkbox = if j `elem` is then "◉ " else "◯ "
if i == j
then do
setSGR [ SetColor Foreground Vivid Cyan ]
putStrLn $ ">" ++ checkbox ++ o
setSGR []
else putStrLn $ " " ++ checkbox ++ o
| yamadapc/haskell-questioner | src/System/Console/Questioner.hs | mit | 10,376 | 0 | 20 | 4,174 | 2,528 | 1,310 | 1,218 | -1 | -1 |
module Main where
import System.IO (print)
main = print solution
revNum :: Integer -> Integer
revNum = read . reverse . show
palindrome :: Integer -> Bool
palindrome n = (show n) == (reverse $ show n)
findPali :: Integer -> Int -> Bool
findPali n iter | iter == 50 = False
| palindrome sumrev = True
| otherwise = findPali sumrev (iter+1)
where sumrev = n + revNum n
solution = foldr (\x acc -> if not (findPali x 1) then acc +1 else acc) 0 [1..9999]
| t00n/ProjectEuler | Euler/Euler55/main.hs | epl-1.0 | 516 | 0 | 11 | 154 | 211 | 109 | 102 | 13 | 2 |
{-
-}
module Main
( main )
where
{- Standard Library Modules Imported -}
import System.Cmd
( system )
import System.Console.GetOpt
( getOpt
, usageInfo
, ArgOrder ( .. )
, OptDescr ( .. )
, ArgDescr ( .. )
)
import System.Environment
( getArgs
, getProgName
)
{- External Library Modules Imported -}
{- Local Modules Imported -}
{- End of Imports -}
data CliFlag =
CliHelp
| CliVersion
deriving Eq
options :: [ OptDescr CliFlag ]
options =
[ Option "h" [ "help" ]
(NoArg CliHelp)
"Print the help message to standard out and then exit"
, Option "v" [ "version" ]
(NoArg CliVersion)
"Print out the version of this program"
]
helpMessage :: String -> String
helpMessage progName =
usageInfo progName options
versionMessage :: String -> String
versionMessage progName =
progName ++ ": This is version 0.001"
-- | The main exported function
main :: IO ()
main = getArgs >>= processOptions
processOptions :: [ String ] -> IO ()
processOptions cliArgs =
case getOpt Permute options cliArgs of
(flags, args, []) ->
processArgs flags args
(_flags, _args, errors) ->
do progName <- getProgName
ioError $ userError (concat errors ++ helpMessage progName)
-- We assume all of the arguments are files to process
processArgs :: [ CliFlag ] -> [ String ] -> IO ()
processArgs flags _files
| elem CliHelp flags = getProgName >>= (putStrLn . helpMessage)
| elem CliVersion flags = getProgName >>= (putStrLn . versionMessage)
| otherwise = do performTests
return ()
performTests :: IO ()
performTests =
do passageResults <- performPassageTests
putStrLn $ hprintPassageResults passageResults
performInternalResults
{- NOTE: these passage tests I could easily translate into HUnit tests
whereby the test is simply that it produces the correct files
-}
type PassageResults = [ PassageResult ]
data PassageResult = PassageResult { commandFailed :: Bool
, cdfAgrees :: Bool
, pdfAgrees :: Bool
}
hprintPassageResults :: PassageResults -> String
hprintPassageResults results =
unlines [ -- unlines $ map hprintPassageResult results
printNumber "Number of tests: " noTests
, printNumber "Number of failed commands: " noFailed
, printNumber "Number of cdf disagreements: " noCDFDisagrees
, printNumber "Number of pdf disagreements: " noPDFDisagrees
]
where
noTests = length results
noFailed = length $ filter commandFailed results
noCDFDisagrees = length $ filter (not . cdfAgrees) results
noPDFDisagrees = length $ filter (not . pdfAgrees) results
printNumber :: String -> Int -> String
printNumber s i = s ++ (show i)
{- Wish to descend into the passage directory and then perform
all the tests there, however let's just get one test working first
We will assume that each test in this directory does produce a
similarly named -cdf and -pdf file.
-}
performPassageTests :: IO PassageResults
performPassageTests =
do _exitCode <- runIpc
okay <- compareFiles "Test/passage/test01-cdf.csv"
"Test/passage/results/test01-cdf.csv"
return [ PassageResult { commandFailed = False
, cdfAgrees = okay
, pdfAgrees = True
}
]
where
-- This should be passed in as an argument maybe
ipcTested = "./dist/build/ipc/ipc"
runIpc = system $ unwords [ ipcTested, "Test/passage/test01.ptree" ]
{-
Very rough and bare bones attempt to compare to files.
Of course we really need to be a bit more lenient with numbers.
-}
compareFiles :: FilePath -> FilePath -> IO Bool
compareFiles f1 f2 =
do contents1 <- readFile f1
contents2 <- readFile f2
return (contents1 == contents2)
{-
The internal results will try to do it the way I initially thought
of, that is to have the ptree file and result described here and we
just compare the evaluated ptree results to the one described here.
Downside is that we may need to write a parser for ptree results.
I think in the end both styles of tests are a good thing.
-}
performInternalResults :: IO ()
performInternalResults = return () | allanderek/ipclib | Test/Main.hs | gpl-2.0 | 4,441 | 0 | 14 | 1,236 | 804 | 427 | 377 | 86 | 2 |
{-# LANGUAGE Arrows #-}
{-# LANGUAGE FlexibleContexts #-}
module Text.Properties.Node where
import Text.Properties.Types
import Data.Convertible.Text (ConvertSuccess, cs)
import qualified Data.Map as Map
import Text.XML.HXT.Core
-- Convert an XML node format to a set of properties.
-- nodeToProperties
nodeToProperties :: ConvertSuccess a String => a -> IO Properties
nodeToProperties inp = do
tree <- runX $ constA (cs inp) >>> decode
mapping <- runX $ constL tree >>> decodeMap
[kind] <- runX $ constL tree >>> decodeKind
return $ Map.fromList $ ("_kind", kind) : mapping
propertiesToNode :: ConvertSuccess String a => Properties -> IO a
propertiesToNode props = do
let kind = props Map.! "_kind"
let props' = "_kind" `Map.delete` props
text <- runX $ encoder kind (Map.toList props')
return $ cs $ header ++ concat text
decode :: IOSArrow String XmlTree
decode = proc input -> do
readFromString [] -< input
decodeMap :: ArrowXml a => a XmlTree (String, String)
decodeMap = proc input -> do
nodes <- hasName "/" /> hasName "node" /> hasName "entry" -< input
keys <- getAttrValue "key" -< nodes
text <- deep isText >>> getText -< nodes
returnA -< (keys, text)
decodeKind :: ArrowXml a => a XmlTree String
decodeKind = proc input -> do
node <- hasName "/" /> hasName "node" -< input
getAttrValue "kind" -< node
header :: String
header = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
encoder :: ArrowXml a => String -> [(String, String)] -> a XmlTree String
encoder kind props =
selem "root" [] >>> root [] [
mkElement (mkName "node") (constA kind >>> attr "kind" mkText) (
constL props >>> encodePairs)
] >>>
writeDocumentToString []
encodePairs :: ArrowXml a => a (String, String) XmlTree
encodePairs =
mkelem "entry" [ attr "key" (arr fst >>> mkText) ] [ arr snd >>> mkText ]
| d3zd3z/harchive | src/Text/Properties/Node.hs | gpl-2.0 | 1,865 | 3 | 12 | 375 | 651 | 324 | 327 | 44 | 1 |
--This module contains code for rendering DFDs to Verilog
module RenderVerilog (dfdToVerilog) where
import Control.Monad
import Data.Either
import Data.List
import Text.Printf
import Debug.Trace
import Common
import DfdDef
--IDEA: implement closure support using partial application. This simplifies rewriting application, and issues with nested closures
--If there is an (undirected) loop in the graph, all nodes above it are defined and assigned multiple times.
--Therefore, we tag each node definition with a UID, to eliminate duplicate definitions.
--We store the definition and assignment separately, because Modelsim requires the definition to precede all uses.
--vModDeps contains the definition of any modules used by this node. (Needed because modules can't be nested.)
--This functional approach is cleaner, but less efficient than the monadic/stateful approach.
data VNodeDef = VNodeDef{
vNodeId :: NodeId,
vDef :: String,
vAssign :: String,
vModDeps :: String
} deriving (Show, Eq)
--A tail-recursive DFD will be a tree of IFs, where each leaf is a recursive call or a non-recursive expression.
--Returns a list of 2-tuples representing each leaf.
--The first element of the tuple is a list of conditions, where left conditions must be negated.
-- Earlier conditions have higher precedence.
--The second element of the tuple is:
-- Left: a non-recursive expression
-- Right: a list of arguments for the recursive call
--The third element of the tuple is a list of 2-tuples, which specify the index of lists which require elements and how many are required
type RecursiveCase = ([Either DNode DNode], Either DNode [DNode], [(NodeId, Int)])
--Conditions for case to be valid. Left nodes need to be negated.
recConds :: RecursiveCase -> [Either DNode DNode]
recConds (x, _, _) = x
--Returns False if the argument is a base case
isRecursive :: RecursiveCase -> Bool
isRecursive (_, x, _) = isRight x
--NodeId of root node for base case
baseRoot :: RecursiveCase -> DNode
baseRoot (_, Left x, _) = x
--Arguments for the recursive call
recArgs :: RecursiveCase -> [DNode]
recArgs (_, Right x, _) = x
dfdToVerilog :: DProgram -> String
dfdToVerilog dfds = concatMap renderFunc dfds
--Returns True if f calls target. A function which calls itself is recursive.
--This is not transitive. i.e. if f1 calls f2 and f2 calls f3, then f1 does not call f3.
--Otherwise, only the calls directly within f are considered. i.e. self-recursion
--NOTE: need to write a separate function to test for indirect calls, since we recurse infinitely without a stack
--TODO: need to add check for arguments, once first class functions are in use
fCalls :: DFD -> DFD -> Bool
fCalls target f = eCalls target $ dfdRoot f
--Returns True if an expression or sub-expression calls target.
eCalls :: DFD -> DNode -> Bool
eCalls target e = dfold (\a -> \b -> a || eCalls' b) False e where
eCalls' :: DNode -> Bool
eCalls' (DFunctionCall _ fc _ _) = (dfdID fc == dfdID target)
eCalls' _ = False
--Assumes function is self-recursive - use `calls` to check this.
recursiveCases :: DFD -> [RecursiveCase]
recursiveCases f = recExpr [] $ dfdRoot f where
recExpr :: [Either DNode DNode] -> DNode -> [RecursiveCase]
recExpr conds node
| isIf node = (recExpr (trueCond:conds) trueBranch) ++ (recExpr (falseCond:conds) falseBranch)
| isFunctionCall node = return (conds', Right $ callArgs node, listConds)
| otherwise = return (conds', Left node, listConds) --is expression
where
[cond, trueBranch, falseBranch] = callArgs node
trueCond = Right cond
falseCond = Left cond
(conds', listConds) = partitionEithers $ map extractListConds conds
extractListConds :: Either DNode DNode -> Either (Either DNode DNode) (NodeId, Int)
extractListConds (Right (DFunctionCall _ DFD{dfdRoot = root} [eCount, list] _))
| (isBuiltin root && builtinOp root == ListMinAvail) = Right (nodeID list, getConstant eCount)
extractListConds x = Left x
--Render combinatorial functions.
--TODO: add assign statements to link ready/done signals
renderFunc :: DFD -> String
renderFunc dfd@(DFD dfdID name _ _ _ root)
| fCalls dfd dfd = renderRecursiveFunc dfd $ recursiveCases dfd
| otherwise = unlines [concatMap vModDeps defs,
printf "module dfd_%i(" dfdID,
indent [
printf "//%s (%i args) [dfd_%i]" name (length args) dfdID,
"input clock, input ready, output done,",
joinMap "\n" (\(i, n) -> renderArg "input" "node" True "," (parValue $ safeParallelism n) (i, (nodeID n, nodeType n))) (zip [0..] args),
rstrip . chopComma $ renderArg "output" "node" True ", " par (0, (nodeID root, retType)),
");"
],
indent $ map (printf "wire node_%i_done; //arg" . nodeID) args,
indent . lines $ concatNodes defs',
indent $ map (printf "assign node_%i_done = ready;" . nodeID) args,
'\t' : doneAssign,
"endmodule\n"
]
where
args = trueArgs dfd
par = if isList $ returnType dfd
then getParallelism root
else 1
defs = renderNode root
retType = selectType [returnType_ dfd, nodeType root]
doneAssign = printf "assign done = node_%i_done;" $ nodeID root
--need to filter out definitions from the root node that are already present in the module def
argDefs = map strip . lines $ concat [ concatMap (\(i, n) -> renderArg "" "node" True "\n" par (i, (nodeID n, nodeType n))) (zip [0..] args),
renderArg "" "node" True "\n" par (0, (nodeID root, retType))
]
argIDs = map nodeID args
(rootDefs, otherDefs) = partition (\n -> vNodeId n == nodeID root) defs
defs' = (map filterRootDef rootDefs) ++ (filter (\n -> not $ vNodeId n `elem` argIDs) otherDefs)
filterRootDef :: VNodeDef -> VNodeDef
filterRootDef v = v{vDef = def'} where
def = vDef v
def' = unlines . filterMap containsArg ("//" ++) . lines $ def
containsArg :: String -> Bool
containsArg line = any2 (\arg suf -> isInfixOf (arg ++ suf) line) argDefs [" ", ",", ";"]
--There are two trees of evaluation for a tail-recursive function:
-- -the base case, where the root is the resulting expression.
-- -the recursive case, where the root is the recursive call.
--Each element in the list of recursive cases will correspond to one of these.
--
--structure: input -> comb logic -> registers -> comb logic ...
--NOTE: The 'combinatorial' logic may include calls to synchronous functions, so it's not actually combinatorial.
renderRecursiveFunc :: DFD -> [RecursiveCase] -> String
renderRecursiveFunc dfd@(DFD dfdID name args _ _ root) recCases
| isList retType = renderRecursiveListFunc dfd recCases
| otherwise = res
where
retType = selectType [returnType_ dfd, nodeType root]
res = unlines [ --Combinatorial logic
printf "module dfd_%i_cmb(" dfdID,
indent [
printf "//Input args: %s (%i args)" name $ length args,
"input clock,",
"input ready,",
"output reg done,",
"output reg recurse,",
unlines . map (renderArg "input" "node" True "," 1) $ zip [0..] args,
unlines . map (renderArg "output reg" "outArg" False "," 1) $ zip [0..] args,
printf "output reg [7:0] result",
");",
"",
--Define valid_%i, ready_%i, done_%i for each case
"//Control signals & logic",
concatNodes $ concatMap defineRecCase $ zip [0..] recCases,
"",
--Muxing logic
"always @(*) begin",
indent . lines . concatMap ((++ "else ") . selectRecCase) $ zip [0..] recCases,
indent [
"begin",
indent [
"//This should never happen, but is needed to remove latches",
"recurse = 1'bX;",
"done = 1'bX;",
"result = 8'dX;"
],
indent . lines $ concatMap nullArg [0 .. length args - 1],
"end"
],
"end"
],
"endmodule\n",
--Synchronous logic
printf "module dfd_%i(" dfdID,
indent [
printf "//%s (%i args) [dfd_%i]" name (length args) dfdID,
"input clock,",
"input ready,",
"output done,",
unlines $ map (renderArg "input" "inArg" False "," 1) (zip [0..] args),
"output [7:0] result",
");",
"",
"wire advance, recurse;",
"reg wasReady;",
unlines $ map (renderArg "reg" "nextArg" False ";" 1) (zip [0..] args),
unlines $ map (renderArg "wire" "outArg" False ";" 1) (zip [0..] args),
"",
let
inArgs = unlines . map (renderArg "" "nextArg" False "," 1) $ zip [0..] args
outArgs = unlines . map (renderArg "" "outArg" False "," 1) $ zip [0..] args
in printf "dfd_%i_cmb cmb(clock, wasReady, advance, recurse,\n%s%s result);" dfdID inArgs outArgs,
"assign done = ready & advance & ~recurse;",
"",
"always @(posedge clock) begin",
indent [
"wasReady <= ready;",
"",
"if(ready & ~wasReady) begin",
--nextArgs <= inArgs
let
f i = printf "%s_%i <= %s_%i;" "nextArg" i "inArg" i
in indent $ map f [0 .. length args - 1],
"end",
"",
"if(wasReady & ~done) begin",
--nextArgs <= outArgs
let
f i = printf "%s_%i <= %s_%i;" "nextArg" i "outArg" i
in indent $ map f [0 .. length args - 1],
"end"
],
"end"
],
"endmodule\n"
]
--Define control signals for a recursive case
--TODO: assuming that the logic for selecting the case is combinatorial - need to add explicit check for this
defineRecCase :: (Int, RecursiveCase) -> [VNodeDef]
defineRecCase (i, rCase) = (VNodeDef (-i) defs assigns "") : vNodes ++ auxNodes where
defs = unlines [
printfAll "wire valid_%i, ready_%i, done_%i;" i,
outDef
]
assigns = unlines [ --TODO: set ready/done
printf "assign valid_%i = %s;" i $ joinMap " & " boolNode $ recConds rCase,
printf "assign ready_%i = 1;" i, --treating case selection logic as combinatorial
printf "assign done_%i = %i;" i $ (fromEnum . not . isRecursive) rCase,
outAss
]
(outDef, outAss) = if isRecursive rCase
then map2 unlines unlines . splitTuple . zipWith (setArg i) [0..] $ recArgs rCase
else (printf "wire [7:0] result_%i;" i,
printf "assign result_%i = node_%i;" i (nodeID . baseRoot $ rCase))
vNodes = concatMap renderNode . both $ recConds rCase
auxNodes = if isRecursive rCase
then concatMap renderNode $ recArgs rCase
else renderNode $ baseRoot rCase
boolNode :: Either DNode DNode -> String
boolNode (Left n) = printf "~node_%i" $ nodeID n
boolNode (Right n) = printf "node_%i" $ nodeID n
--i is the arg index, j is the recursive case index, a is the node
setArg :: Int -> Int -> DNode -> (String, String)
setArg j i a = (def, ass) where
def = printf "wire [7:0] outArg_%i_%i;" j i
ass = printf "assign outArg_%i_%i = node_%i;" j i $ nodeID a
--multiplexor logic
selectRecCase :: (Int, RecursiveCase) -> String
selectRecCase (i, rCase) = unlines [
printf "if(valid_%i) begin" i,
indent [
printf "recurse = %i;" $ fromEnum $ isRecursive rCase,
printf "done = done_%i;" i,
let
setRes = printf "result = result_%i;\n" i
in if isRecursive rCase
then ("result = 8'hXX;\n" ++) $ (concatMap (selectArg i) [0 .. length args - 1])
else (setRes ++) $ (concatMap nullArg [0 .. length args - 1])
],
"end"
] where
--outArg_i_j is argument j for recursive case i; recursive case is innermost
selectArg :: Int -> Int -> String
selectArg i j = printf "outArg_%i = outArg_%i_%i;\n" j i j
--WIP: implement
renderRecursiveListFunc :: DFD -> [RecursiveCase] -> String
renderRecursiveListFunc dfd@(DFD dfdID name args _ _ root) recCases = ""
--Helper method for removing latches
nullArg :: Int -> String
nullArg i = printf "outArg_%i = 8'hXX;\n" i
--Defines the wires for a node, but doesn't connect them to anything
defineNode :: NodeId -> DType -> Int -> String
defineNode nodeID t par = defineNodeX (printf "node_%i" nodeID) t par
defineNodeX :: String -> DType -> Int -> String
defineNodeX label (DList t) par = res where
res = unlines [ printf "wire %s_req, %s_ack;" label label,
unlines . parEdge par $ printf "wire %s %s_value_%i;" (scalarVType t) label,
unlines . parEdge par $ printf "wire %s_value_%i_valid;" label,
printf "wire %s_done;" label
]
defineNodeX label t _ = unlines [line1, line2] where
line1 = printf "wire %s %s;" (scalarVType t) label
line2 = printf "wire %s_done;" label
--Generates the assign statements needed to connect two nodes. LHS is set to RHS.
assignNode :: DNode -> DNode -> String
assignNode lhs rhs
| isList (nodeType lhs) || isList (nodeType rhs) = unlines res
where
par = getParallelism rhs
lhsID = nodeID lhs
rhsID = nodeID rhs
res = [ printf "assign node_%i_req = node_%i_req;" rhsID lhsID,
printf "assign node_%i_ack = node_%i_ack;" lhsID rhsID,
unlines . parEdge par $ \i -> printf "assign node_%i_value_%i = node_%i_value_%i;" lhsID i rhsID i,
unlines . parEdge par $ \i -> printf "assign node_%i_value_%i_valid = node_%i_value_%i_valid;" lhsID i rhsID i,
printf "assign node_%i_done = node_%i_done;" lhsID rhsID
]
assignNode lhs rhs = unlines [line1, line2] where
line1 = printf "assign node_%i = node_%i;" (nodeID lhs) (nodeID rhs)
line2 = printf "assign node_%i_done = node_%i_done;" (nodeID lhs) (nodeID rhs)
--Defines an argument to a Verilog module.
-- io: the storage class. e.g. "input"/"output". Type will be omitted if storage class is blank.
-- prefix. e.g. "node".
-- useNodeId: whether the numeric identifier used will be the node ID or the argument index
-- par: the degree of parallelism, if the argument is a list
-- (i, (ai, t): i is the arg index, ai is the node ID, t is the type
-- tail: a string to append to the end of the result. Useful for semicolons, etc.
renderArg :: String -> String -> Bool -> String -> Int -> (Int, (NodeId, DType)) -> String
renderArg io prefix useNodeId tail par (i, (argID, DList t)) = concat lines where
lines = [ printf "%s %s_%i_req%s" invIo prefix index tail,
printf "%s %s_%i_ack%s" io prefix index tail
] ++ parEdge par (\pI -> printf "%s %s %s_%i_value_%i%s" io hwType prefix index pI tail)
++ parEdge par (\pI -> printf "%s %s_%i_value_%i_valid%s" io prefix index pI tail)
hwType = if io == ""
then ""
else scalarVType t
index = if useNodeId
then argID
else i
invIo = maybe io id (lookup io [("input", "output"), ("output", "input")])
renderArg io prefix useNodeId tail _ (i, (argID, t)) = printf "%s %s %s_%i%s" io hwType prefix index tail where
hwType = if io == ""
then ""
else scalarVType t
index = if useNodeId
then argID
else i
renderNode :: DNode -> [VNodeDef]
renderNode (DLiteral nodeID value) = return $ VNodeDef nodeID def ass "" where
def = defineNode nodeID UndefinedType 1
ass = unlines [ printf "assign node_%i = %i;" nodeID value,
printf "assign node_%i_done = ready;" nodeID
]
--arguments which are lists are already defined
renderNode (DArgument nodeID (DList _) _) = []
--argument - wire/reg is defined with function, but need to manually specify ready/done signals
renderNode (DArgument nodeID _ _) = return $ VNodeDef nodeID def ass "" where
def = printf "wire node_%i_done;\n" nodeID
ass = printf "assign node_%i_done = ready;\n" nodeID
renderNode var@(DVariable varID t val) = valDef ++ return (VNodeDef varID def ass "") where
valDef = renderNode val
def = defineNode varID t $ getParallelism val
ass = assignNode var val
renderNode (DFunctionCall appID f args p)
| dfdID f == (-1) = aDefs ++ return (renderBuiltin appID (builtinOp $ dfdRoot f) par args)
| otherwise = aDefs ++ return (VNodeDef appID def ass "")
where
par = parValue p
def = unlines [ defineNode appID (returnType f) par,
printf "wire node_%i_ready;" appID
]
ass = unlines [ printf "assign node_%i_ready = %s;" appID ready,
printf "dfd_%i fcall_%i(clock, node_%i_ready, node_%i_done, %s %s);\n" fID appID appID appID aAsses resAss
]
fID = dfdID f
args' = if isBuiltinMacro $ dfdRoot f --don't render the lambda
then tail args
else args
ready = joinMap " & " (printf "node_%i_done" . nodeID) args'
aDefs = concatMap renderNode args'
aAsses = concatMap (argEdge par) args'
resAss = chopComma . argEdge par $ DArgument appID (returnType f) p
renderNode elem@(DTupleElem elemID tupleIndex tuple) = (renderNode tuple) ++ return (VNodeDef elemID def ass "") where
par = getParallelism tuple
tupleID = nodeID tuple
def = defineNode elemID (nodeType elem) par
ass = unlines $ case tupleIndex of
0 -> [ printf "assign node_%i = node_head_%i;" elemID tupleID,
printf "assign node_%i_done = node_%i_done;" elemID tupleID
]
1 -> [ printf "assign node_tail_%i_req = node_%i_req;" tupleID elemID,
printf "assign node_%i_ack = node_tail_%i_ack;" elemID tupleID,
unlines . parEdge par $ \i -> printf "assign node_%i_value_%i = node_tail_%i_value_%i;" elemID i tupleID i,
unlines . parEdge par $ \i -> printf "assign node_%i_value_%i_valid = node_tail_%i_value_%i_valid;" elemID i tupleID i,
printf "assign node_%i_done = node_%i_done;" elemID tupleID
]
2 -> [ printf "assign node_%i = node_head_%i_valid;" elemID tupleID,
printf "assign node_%i_done = node_%i_done;" elemID tupleID
]
_ -> error $ "Invalid tuple index: " ++ show tupleIndex
--List literals are handled by generating a module to implement the list interface
renderNode (DListLiteral listID items p) = (VNodeDef listID def ass mod):elemDefs where
par = parValue p
def = unlines [ defineNode listID (DList UndefinedType) par,
printf "wire node_%i_ready;" listID
]
ass = unlines [ printf "assign node_%i_ready = %s;" listID readySig,
printfAll "listLiteral_%i listLit_%i(clock, node_%i_ready," listID,
chopComma $ indent [
concatMap (argEdge par) items,
argEdge par (DArgument listID (DList UndefinedType) p)
],
");",
genericDone listID items
]
readySig = if null items
then "ready"
else joinMap " & " (printf "node_%i_done" . nodeID) items
elemDefs = concatMap renderNode items
elemIndices = [0 .. length items - 1]
mod = if items == []
then emptyMod
else mod'
emptyMod = unlines [
printf "module listLiteral_%i(" listID,
indent [
"input clock,",
"input ready,",
"input req,",
"output reg ack,",
unlines . parEdge par $ printf "output [7:0] value_%i,",
chopComma . unlines . parEdge par $ printf "output value_%i_valid,"
],
");\n",
indent [
"reg lastReq;",
"",
unlines . parEdge par $ printf "assign value_%i = 8'hXX;",
chopComma . unlines . parEdge par $ printf "assign value_%i_valid = 0;",
"",
"always @(posedge clock) begin",
"\tack <= ready & ~lastReq & req;",
"\tlastReq <= req;",
"end"
],
"endmodule\n"
]
parCases = length items `div` par
mod' = unlines [printf "module listLiteral_%i(" listID,
indent [
"input clock,",
"input ready,",
unlines $ map (printf "input [7:0] x_%i,") elemIndices,
"input req,",
"output reg ack,",
unlines . parEdge par $ printf "output reg [7:0] value_%i,",
chopComma . unlines . parEdge par $ printf "output reg value_%i_valid,"
],
");\n",
indent [
"reg dummy;",
"reg lastReq;",
"reg [7:0] index;",
"",
"always @(*) begin",
indent [
"case (index)",
indent [
concatMap elemCase [0 .. parCases],
"default: begin",
indent . parEdge par $ printf "value_%i = 8'hFF;",
chopComma . indent . parEdge par $ printf "value_%i_valid = 0;",
"end"
],
"endcase"
],
"end\n",
"always @(posedge clock) begin",
indent [
"lastReq <= req;\n",
"if(~ready) begin",
indent [
"index <= 8'hFF;",
"ack <= 0;"
],
"end else if(req & ~lastReq) begin",
indent [
printf "if(index < 8'd%i || index == 8'hFF) begin" $ parCases + 1,
"\tindex <= index + 1;",
"end",
"ack <= 1;"
],
"end else begin",
indent [
"ack <= 0;"
],
"end"
],
"end"
],
"endmodule\n"
]
--i is the index of the iteration - value_0 will be set to par*i
elemCase :: Int -> String
elemCase i = res where
res = unlines [
printf "%i: begin" i,
indent $ parEdge par valAssign,
"end"
]
offset = par*i
valAssign :: Int -> String
valAssign j
| isValid = printf fmt1 j (offset+j) j
| otherwise = printf fmt2 j j
where
isValid = (offset + j) < (length items)
fmt1 = unlines [
"value_%i = x_%i;",
"value_%i_valid = 1'b1;"
]
fmt2 = unlines [
"value_%i = 8'hFF;",
"value_%i_valid = 1'b0;"
]
renderNode n = error $ "Unable to render:\n" ++ (show n)
argEdge :: Int -> DNode -> String
argEdge p n = argEdgeX "node" n p
argEdgeX :: String -> DNode -> Int -> String
argEdgeX lbl a p = renderArg "" lbl True ", " p (0, (nodeID a, nodeType a))
--Helper method for implementing the parallel list interface
parEdge :: Int -> (Int -> String) -> [String]
parEdge par f = map f [0 .. par - 1]
--Returns the parallelism of an arbitrary node, traversing the DFD if necessary
getParallelism :: DNode -> Int
getParallelism node
| hasParallelism node = parValue $ parallelism node
getParallelism (DVariable _ _ value) = getParallelism value
getParallelism node@DArgument{} = parValue $ parallelism node
getParallelism node@DFunctionCall{} = parValue $ parallelism node
getParallelism node@DTupleElem{} = getParallelism $ tuple node
getParallelism node = error $ "getParallelism: unknown node " ++ show node
renderBuiltin :: NodeId -> BuiltinOp -> Int -> [DNode] -> VNodeDef
renderBuiltin resID BitwiseNot _ args@(arg:[]) = VNodeDef resID def (ass ++ doneAs) "" where
def = defineNode resID (nodeType arg) 1
ass = printf "assign node_%i = ~node_%i;\n" resID (nodeID arg)
doneAs = genericDone resID args
renderBuiltin resID (BinaryOp ":") par args@(a0:a1:[])
| par == 1 = VNodeDef resID def (ass ++ doneAs) ""
| otherwise = error "Cons does not support parallel access yet."
where
def = defineNode resID (nodeType a1) par
ass = concat [
printf "Cons cons_%i(clock, node_%i_done, node_%i, " resID resID a0ID,
argEdge par (DArgument a1ID (DList UndefinedType) (InferredPar par)),
argEdge par (DArgument resID (DList UndefinedType) (InferredPar par)), --TODO: implement parallelism
");\n"
]
a0ID = nodeID a0
a1ID = nodeID a1
doneAs = genericDone resID args
renderBuiltin resID (BinaryOp "++") par args@(a0:a1:[])
| par == 1 = VNodeDef resID def ass ""
| otherwise = error "Concat does not support parallel access yet."
where
def = defineNode resID (nodeType a0) par
a0ID = nodeID a0
a1ID = nodeID a1
ass = unlines [
printf "Concat concat_%i(clock, node_%i_done," resID resID,
argEdge par (DArgument a0ID (DList UndefinedType) (InferredPar par)), --TODO: implement parallelism
argEdge par (DArgument a1ID (DList UndefinedType) (InferredPar par)),
argEdge par (DArgument resID (DList UndefinedType) (InferredPar par)),
");",
printf "assign node_%i_done = node_%i_done & node_%i_done;" resID a0ID a1ID
]
renderBuiltin resID (BinaryOp op) _ args@(a0:a1:[]) = VNodeDef resID def (ass ++ doneAs) "" where
def = defineNode resID (nodeType a0) 1
ass = printf "assign node_%i = node_%i %s node_%i;\n" resID (nodeID a0) op (nodeID a1)
doneAs = genericDone resID args
renderBuiltin resID Ternary par args@(cond:tExp:fExp:[])
| par == 1 = VNodeDef resID def (ass ++ doneAs) ""
| otherwise = error "Parallel ternary operator is not supported"
where
resType = headOr UndefinedType $ filter (/= UndefinedType) [nodeType tExp, nodeType fExp]
def = defineNode resID (nodeType tExp) par
ass = case resType of
DList _ -> listAss
_ -> scalarAss
[cID, tID, fID] = map nodeID [cond, tExp, fExp]
scalarAss = printf "assign node_%i = node_%i ? node_%i : node_%i;\n" resID cID tID fID
listTerms = map (++ ", ") ["node_%i_req", "node_%i_ack", "node_%i_value_0", "node_%i_value_0_valid"]
listAss = concat [ printf "ListMux lm_%i(node_%i_done, node_%i," resID cID cID,
chopComma $ concatMap genMuxLine [resID, tID, fID],
");\n"
]
genMuxLine s = rstrip $ "\n\t" ++ concatMap (\fmt -> printf fmt s) listTerms
doneAs = genericDone resID args
renderBuiltin resID EnumList par args@(min:step:max:[]) = VNodeDef resID def (ass ++ doneAs) mod where
--bounded
def = defineNode resID (DList UndefinedType) par
(ass, mod) = renderListGen resID min step (Just max) par
doneAs = genericDone resID [min, step, max]
renderBuiltin resID EnumList par args@(min:step:[]) = VNodeDef resID def (ass ++ doneAs) mod where
--unbounded
def = defineNode resID (DList UndefinedType) par
(ass, mod) = renderListGen resID min step Nothing par
doneAs = genericDone resID [min, step]
renderBuiltin resID Decons par [list]
| par == 1 = VNodeDef resID def ass ""
| otherwise = error "Decons does not support parallel access yet."
where
listID = nodeID list
def = concat [ defineNodeX (printf "node_head_%i" resID) UndefinedType 1,
defineNodeX (printf "node_tail_%i" resID) (DList UndefinedType) par,
printf "wire node_head_%i_valid;\n" resID,
printf "wire node_%i_done;\n" resID
]
ass = concat [ printf "Decons decons_%i(clock, node_%i_done, node_%i_done,\n\t" listID listID resID,
strip $ argEdge par list,
"\n\t",
lstrip $ argEdgeX "node_head" (DArgument resID UndefinedType NoPar) 1,
lstrip $ printf "node_head_%i_valid,\n\t" resID,
lstrip . chopComma $ argEdgeX "node_tail" (DArgument resID (DList UndefinedType) (InferredPar par)) par,
");\n"
]
renderBuiltin resID MapMacro par [lambda, list] = VNodeDef resID def ass mod where
listID = nodeID list
listType = nodeType list
f = functionCalled lambda
fID = dfdID f
def = defineNode resID listType par
ass = concat [
printf "Map_%i map_%i(clock, node_%i_done, node_%i_done,\n\t" resID resID listID resID,
lstrip $ argEdge par list,
"\n\t",
lstrip . chopComma . argEdge par $ DArgument resID listType (InferredPar par),
");\n"
]
inputType = scalarVType . snd . head $ dfdArgs f
outputType = scalarVType $ returnType f
mod = unlines [
printf "module Map_%i(" resID,
indent [
--referring to args as list_0 and list_1 for input and output lists, respectively
"input clock, input ready, output done,",
"output listIn_req_actual,",
"input listIn_ack,",
unlines . parEdge par $ printf "input %s listIn_value_%i," inputType,
unlines . parEdge par $ printf "input listIn_value_%i_valid,",
"input listOut_req,",
"output reg listOut_ack,",
unlines . parEdge par $ printf "output reg %s listOut_value_%i," outputType,
chopComma . unlines . parEdge par $ printf "output reg listOut_value_%i_valid,",
");",
"",
--waitingForInput: waiting for a new value from listIn
--processingValues: waiting for f(listIn_value) to complete
--endOfInput: the input list has been exhausted
--funcStalling: the function is stalling, waiting for the consumer to take the next value
--consumerWaiting: the consumer of the output list is waiting for a value
--consumerServed: the consumer of the output list has been given a value
"reg wasReady;",
"reg listIn_req;",
"reg waitingForInput, processingValuesActual, endOfInput;",
"reg consumerServed;",
"wire consumerWaiting, processingValues, valuesProcessed, funcStalling;",
printf "wire %s %s;" outputType $ joinMap ", " id $ parEdge par (printf "nextVal_%i"),
printf "wire %s;" $ joinMap ", " id $ parEdge par (printf "nextVal_%i_valid"),
printf "wire %s;" $ joinMap ", " id $ parEdge par (printf "lambda_%i_done"),
unlines . parEdge par $ \i -> printf "dfd_%i lambda_%i(clock, processingValues, lambda_%i_done, listIn_value_%i, nextVal_%i);" fID i i i i,
"",
"assign done = ready;",
"assign listIn_req_actual = listIn_req | (ready & ~wasReady);",
"assign consumerWaiting = listOut_req & ~consumerServed;",
"assign funcStalling = ~waitingForInput & ~processingValues & ~endOfInput;",
"assign processingValues = processingValuesActual | (waitingForInput & listIn_ack & ~endOfInput);",
printf "assign valuesProcessed = %s;" . joinMap " & " id . parEdge par $ printf "lambda_%i_done",
"",
"always @(posedge clock) begin",
indent [
"wasReady <= ready;",
"",
"if(ready) begin",
indent [
"if(~wasReady | (funcStalling & consumerServed)) begin",
"\tlistIn_req <= 1;",
"\twaitingForInput <= 1'b1;",
"end",
"",
"if(waitingForInput & listIn_ack) begin",
indent [
"waitingForInput <= 1'b0;",
"",
printf "if(listIn_value_0_valid) begin",
"\tprocessingValuesActual <= 1'b1;",
"end else begin",
"\tendOfInput <= 1'b1;",
"end"
],
"end",
"",
"if((valuesProcessed | endOfInput) & consumerWaiting) begin",
indent [
unlines . parEdge par $ printfAll "listOut_value_%i <= nextVal_%i;",
unlines . parEdge par $ printfAll "listOut_value_%i_valid <= listIn_value_%i_valid & ~endOfInput;",
"listOut_ack <= 1'b1;",
"consumerServed <= 1'b1;",
"if(~endOfInput) begin",
indent [
"if (listIn_req) begin",
"\tlistIn_req <= 1'b0;",
"\tprocessingValuesActual <= 1'b0;",
"end else begin",
"\tlistIn_req <= 1'b1;",
"\twaitingForInput <= 1'b1;",
"end"
],
"end"
],
"end",
"",
"if(consumerServed)",
"\tlistOut_ack <= 1'b0;",
"",
"if(~listOut_req)",
"\tconsumerServed <= 1'b0;",
""
],
"end else begin",
indent [
"waitingForInput <= 1'b0;",
"processingValuesActual <= 1'b0;",
"endOfInput <= 1'b0;",
"consumerServed <= 1'b0;",
"listIn_req <= 1'b0;",
"listOut_ack <= 1'b0;",
unlines . parEdge par $ printf "listOut_value_%i <= 8'hFF;",
unlines . parEdge par $ printf "listOut_value_%i_valid <= 1'b0;"
],
"end"
],
"end"
],
"endmodule"
]
renderBuiltin resID ZipMacro par [lambda, list1, list2] = VNodeDef resID def ass mod where
list1ID = nodeID list1
list2ID = nodeID list2
listType = nodeType list1
f = functionCalled lambda
fID = dfdID f
def = defineNode resID listType par
ass = concat [
printf "Zip_%i zip_%i(clock, node_%i_done & node_%i_done, node_%i_done,\n\t" resID resID list1ID list2ID resID,
lstrip $ argEdge par list1,
lstrip $ argEdge par list2,
"\n\t",
lstrip . chopComma . argEdge par $ DArgument resID listType (InferredPar par),
");\n"
]
inputType = scalarVType . snd . head $ dfdArgs f
outputType = scalarVType $ returnType f
mod = unlines [
printf "module Zip_%i(" resID,
indent [
--referring to args as list_0 and list_1 for input and output lists, respectively
"input clock, input ready, output done,",
"output list1In_req_actual,",
"input list1In_ack,",
unlines . parEdge par $ printf "input %s list1In_value_%i," inputType,
unlines . parEdge par $ printf "input list1In_value_%i_valid,",
"output list2In_req_actual,",
"input list2In_ack,",
unlines . parEdge par $ printf "input %s list2In_value_%i," inputType,
unlines . parEdge par $ printf "input list2In_value_%i_valid,",
"input listOut_req,",
"output reg listOut_ack,",
unlines . parEdge par $ printf "output reg %s listOut_value_%i," outputType,
chopComma . unlines . parEdge par $ printf "output reg listOut_value_%i_valid,",
");",
"",
--waitingForInput: waiting for a new value from listIn
--processingValues: waiting for f(listIn_value) to complete
--endOfInput: the input list has been exhausted
--funcStalling: the function is stalling, waiting for the consumer to take the next value
--consumerWaiting: the consumer of the output list is waiting for a value
--consumerServed: the consumer of the output list has been given a value
--bothAckd: both lists have asserted ACK
"reg wasReady;",
"reg list1In_req, list2In_req, list1In_ackd, list2In_ackd;",
"reg waitingForInput, processingValuesActual, endOfInput;",
"reg consumerServed;",
"wire consumerWaiting, processingValues, valuesProcessed, funcStalling, bothAckd;",
printf "wire %s %s;" outputType $ joinMap ", " id $ parEdge par (printf "nextVal_%i"),
printf "wire %s;" $ joinMap ", " id $ parEdge par (printf "nextVal_%i_valid"),
printf "wire %s;" $ joinMap ", " id $ parEdge par (printf "lambda_%i_done"),
unlines . parEdge par $ \i -> printf "dfd_%i lambda_%i(clock, processingValues, lambda_%i_done, list1In_value_%i, list2In_value_%i, nextVal_%i);" fID i i i i i,
"",
"assign done = ready;",
"assign list1In_req_actual = list1In_req | (ready & ~wasReady);",
"assign list2In_req_actual = list2In_req | (ready & ~wasReady);",
"assign consumerWaiting = listOut_req & ~consumerServed;",
"assign funcStalling = ~waitingForInput & ~processingValues & ~endOfInput;",
"assign processingValues = processingValuesActual | (waitingForInput & bothAckd & ~endOfInput);",
"assign bothAckd = (list1In_ackd | list1In_ack) & (list2In_ackd | list2In_ack);",
printf "assign valuesProcessed = %s;" . joinMap " & " id . parEdge par $ printf "lambda_%i_done",
"",
"always @(posedge clock) begin",
indent [
"wasReady <= ready;",
"",
"if(ready) begin",
indent [
"if(~wasReady | (funcStalling & consumerServed)) begin",
"\tlist1In_req <= 1;",
"\tlist2In_req <= 1;",
"\twaitingForInput <= 1'b1;",
"end",
"",
"if(waitingForInput) begin",
indent [
"if(bothAckd) begin",
indent [
"waitingForInput <= 1'b0;",
"list1In_ackd <= 1'b0;",
"list2In_ackd <= 1'b0;",
"",
printf "if(list1In_value_0_valid && list2In_value_0_valid) begin",
"\tprocessingValuesActual <= 1'b1;",
"end else begin",
"\tendOfInput <= 1'b1;",
"end"
],
"end else if(list1In_ack) begin",
"\tlist1In_ackd <= 1'b1;",
"end else if(list2In_ack) begin",
"\tlist2In_ackd <= 1'b1;",
"end"
],
"end",
"",
"if((valuesProcessed | endOfInput) & consumerWaiting) begin",
indent [
unlines . parEdge par $ printfAll "listOut_value_%i <= nextVal_%i;",
unlines . parEdge par $ printfAll "listOut_value_%i_valid <= list1In_value_%i_valid & list2In_value_%i_valid & ~endOfInput;",
"listOut_ack <= 1'b1;",
"consumerServed <= 1'b1;",
"if(~endOfInput) begin",
indent [
"if (list1In_req) begin",
"\tlist1In_req <= 1'b0;",
"\tlist2In_req <= 1'b0;",
"\tprocessingValuesActual <= 1'b0;",
"end else begin",
"\tlist1In_req <= 1'b1;",
"\tlist2In_req <= 1'b1;",
"\twaitingForInput <= 1'b1;",
"end"
],
"end"
],
"end",
"",
"if(consumerServed)",
"\tlistOut_ack <= 1'b0;",
"",
"if(~listOut_req)",
"\tconsumerServed <= 1'b0;",
""
],
"end else begin",
indent [
"waitingForInput <= 1'b0;",
"processingValuesActual <= 1'b0;",
"endOfInput <= 1'b0;",
"consumerServed <= 1'b0;",
"list1In_req <= 1'b0;",
"list2In_req <= 1'b0;",
"listOut_ack <= 1'b0;",
unlines . parEdge par $ printf "listOut_value_%i <= 8'hFF;",
unlines . parEdge par $ printf "listOut_value_%i_valid <= 1'b0;"
],
"end"
],
"end"
],
"endmodule"
]
renderBuiltin resID FoldMacro par [lambda, identity, list] = VNodeDef resID def ass vMod where
listID = nodeID list
listType = nodeType list
f = functionCalled lambda
fID = dfdID f
def = unlines [
defineNode resID listType par,
printf "wire node_%i_ready;" resID
]
ass = concat [
printf "assign node_%i_ready = node_%i_done & node_%i_done;\n" resID listID (nodeID identity),
printf "Fold_%i fold_%i(clock, node_%i_ready, node_%i_done,\n\t" resID resID resID resID,
lstrip $ argEdge 1 identity,
lstrip $ argEdge par list,
"\n\t",
lstrip . chopComma . argEdge 1 $ DArgument resID (returnType f) (InferredPar par),
");\n"
]
inputType = scalarVType . snd . head $ dfdArgs f
outputType = scalarVType $ returnType f
accNo = (par + 1) + ((par + 1) `mod` 2) --no. of accumulators must be even, and we need an extra acc. for the carry
funcNo = accNo `div` 2 --no. of function instances
stageNo :: Int = ceiling . logBase 2 $ fromIntegral accNo --no. of stages needed
vMod = unlines [
printf "module Fold_%i(input clock, input ready, output reg done," resID,
indent [
printf "input %s identity," inputType,
"output listIn_req_actual,",
"input listIn_ack,",
unlines . parEdge par $ printf "input %s listIn_value_%i," inputType,
unlines . parEdge par $ printf "input listIn_value_%i_valid,",
printf "output %s result" outputType,
");",
"",
--waitingForInput: waiting for data from list
--processingValues: waiting for functions to complete
--resettingFuncs: functions have completed, ready is unasserted
--carryPresent: whether there is a carry value present from the previous row
"reg wasReady, waitingForInput, processingValues, resettingFuncs;",
"reg [3:0] foldStage;", --2^4 stages => 2^2^4 (256) degrees of parallelism
"reg carryPresent;",
unlines . parEdge accNo $ printfAll "reg [7:0] accumulator_%i_reg, accumulator_%i;",
unlines . parEdge funcNo $ printf "wire func_%i_done;",
unlines . parEdge funcNo $ printf "reg func_%i_enabled;",
unlines . parEdge funcNo $ printf "wire %s func_%i_result;" outputType,
"wire funcs_done;",
"assign listIn_req_actual = waitingForInput | (ready & ~wasReady);",
"assign result = accumulator_0;",
printf "assign funcs_done = %s;" $ joinMap " & " (printfAll "(func_%i_done | ~func_%i_enabled)") [0 .. funcNo - 1],
--Define dummy wires to simplify the logic for setting the accumulators
--These are needed because we have more accumulators than list values
"//Dummy elements to simplify accumulator logic",
let f i = unlines [
printf "wire listIn_value_%i_valid;" i,
printf "wire %s listIn_value_%i;" outputType i,
printf "assign listIn_value_%i_valid = 1'b0;" i,
printf "assign listIn_value_%i = identity;" i
]
in unlines $ map f [par .. accNo - 1],
"",
let renderInst :: Int -> String
renderInst i --built-in operators don't have explicit function definitions
| fID == (-1) = printf fmt2 i (2*i) op (2*i + 1) i i
| otherwise = printf fmt1 fID i i i (2*i) (2*i + 1) i
where
op = getOperator . builtinOp $ dfdRoot f
fmt1 = intercalate " " [
"dfd_%i func_%i(clock,",
"(processingValues | listIn_ack) & func_%i_enabled,",
"func_%i_done,",
"accumulator_%i, accumulator_%i, func_%i_result);"
]
fmt2 = unlines [
"assign func_%i_result = accumulator_%i %s accumulator_%i;",
"assign func_%i_done = (processingValues | listIn_ack) & func_%i_enabled;"
]
in unlines $ parEdge funcNo renderInst,
"",
"always @(*) begin",
indent [
"if(listIn_ack) begin",
indent [
"if(carryPresent) begin",
indent . return $ "accumulator_0 = accumulator_0_reg;",
indent . (flip map) [1 .. accNo-1] $ \i -> printf "accumulator_%i = listIn_value_%i;" i (i-1),
"end else begin",
indent . parEdge accNo $ printfAll "accumulator_%i = listIn_value_%i;",
"end"
],
"",
"end else begin",
indent . parEdge accNo $ printfAll "accumulator_%i = accumulator_%i_reg;",
"end"
],
"end",
"",
"always @(posedge clock) begin",
indent [
"wasReady <= ready;",
"",
"if(ready) begin",
indent [
"if(~wasReady) begin",
indent [
"waitingForInput <= 1'b1;"
],
"end",
"",
"if(listIn_ack & waitingForInput) begin",
indent [
"waitingForInput <= 1'b0;",
"",
printf "if(listIn_value_0_valid) begin",
indent [
"processingValues <= 1'b1;",
"foldStage <= 4'd0;",
"",
--load list values into accumulators
"if(carryPresent) begin",
let fmt = "accumulator_%i_reg <= (listIn_value_%i_valid ? listIn_value_%i : identity);"
in indent . parEdge (accNo - 1) $ \i -> printf fmt (i+1) i i,
"\tfunc_0_enabled <= listIn_value_0_valid;",
let fmt = "func_%i_enabled <= listIn_value_%i_valid & listIn_value_%i_valid;"
in indent . (flip map) [1 .. funcNo - 1] $ \i -> printf fmt i (2*i-1) (2*i),
"end else begin",
let fmt = "accumulator_%i_reg <= (listIn_value_%i_valid ? listIn_value_%i : identity);"
in indent . parEdge accNo $ printfAll fmt,
let fmt = "func_%i_enabled <= listIn_value_%i_valid & listIn_value_%i_valid;"
in indent . parEdge funcNo $ \i -> printf fmt i (2*i) (2*i+1),
"end"
],
"end else begin",
" done <= 1'b1;",
"end"
],
"end",
"",
"if(processingValues & funcs_done) begin",
indent [
"processingValues <= 1'b0;",
"",
"if(~func_0_enabled) begin",
" waitingForInput <= 1'b1;",
" carryPresent <= 1'b1;",
" accumulator_0_reg <= func_0_result;",
"end else begin",
" resettingFuncs <= 1'b1;",
" foldStage <= foldStage + 4'd1;",
"",
--Shift values. If a func is disabled, then take its left arg (either a single value or an identity)
let fmt = "accumulator_%i_reg <= (func_%i_enabled ? func_%i_result : accumulator_%i_reg);"
in indent . parEdge funcNo $ \i -> printf fmt i i i (2*i),
indent . (flip map) [funcNo .. accNo - 1] $ printf "accumulator_%i_reg <= identity;",
let funcEnabled :: Int -> String
funcEnabled i
| i < funcNo = printf "func_%i_enabled" i
| otherwise = "1'b0"
fmt = "%s <= %s & %s;"
in indent . parEdge funcNo $ \i -> printf fmt (funcEnabled i) (funcEnabled $ 2*i) (funcEnabled $ 2*i+1),
"end"
],
"end",
"",
"if(resettingFuncs) begin",
indent [
"resettingFuncs <= 1'b0;",
"processingValues <= 1'b1;"
],
"end"
],
"end else begin",
indent [
"waitingForInput <= 1'b0;",
"processingValues <= 1'b0;",
"resettingFuncs <= 1'b0;",
"done <= 1'b0;",
"carryPresent <= 1'b0;",
"foldStage <= 4'hX;",
unlines . parEdge accNo $ printf "accumulator_%i_reg <= 8'hFF;"
],
"end"
],
"end"
],
"endmodule",
""
]
--Generates assign statement for bounded and unbounded enumerations
renderListGen :: NodeId -> DNode -> DNode -> Maybe DNode -> Int -> (String, String)
renderListGen resID min step max par = (ass, mod) where
ass = concat [
printf "BoundedEnum_%i enum_%i(clock, node_%i_done, " resID resID resID,
printf "node_%i, node_%i, " (nodeID min) (nodeID step),
maybe "8'hFF, " (printf "node_%i, " . nodeID) max,
chopComma $ argEdge par (DArgument resID (DList UndefinedType) (InferredPar par)),
");\n"
]
mod = unlines [
printf "module BoundedEnum_%i(input clock, input ready," resID,
indent [
"input signed [7:0] min,",
"input [7:0] step,",
"input signed [7:0] max,",
"",
"input req,",
"output reg ack,",
unlines . parEdge par $ printf "output reg signed [7:0] value_%i,",
chopComma . unlines . parEdge par $ printf "output value_%i_valid,",
");",
"",
"reg lastReq;",
"reg initialized, active;",
unlines . parEdge par $ printf "wire signed [7:0] nextValue_%i;",
unlines . parEdge par $ \i -> printf "assign nextValue_%i = value_%i + 8'd%i * step;" i i par,
unlines . parEdge par $ printfAll "assign value_%i_valid = ready & active & value_%i >= min && value_%i <= max;",
"",
"always @(posedge clock) begin",
indent [
"lastReq <= req;",
"",
"if(ready) begin",
indent [
"if(req & ~lastReq) begin",
indent [
"if(initialized) begin",
indent [
printf "if(value_%i_valid) begin" $ par - 1,
indent . parEdge par $ printfAll "value_%i <= nextValue_%i;",
"end else begin",
" active <= 1'b0;",
"end",
""
],
"end else begin",
indent [
"initialized <= 1;",
unlines . parEdge par $ printfAll "value_%i <= min + 8'd%i;"
],
"end",
"",
"ack <= 1;",
""
],
"end else begin",
" ack <= 0;",
"end",
""
],
--if not ready, reset/initialize variables
"end else begin",
" ack <= 0;",
" initialized <= 0;",
" active <= 1'b1;",
indent . parEdge par $ printf "value_%i <= 8'hXX;",
"end"
],
"end"
],
"endmodule",
""
]
--Generates assign statements for ready/done signals for builtin functions.
genericDone :: NodeId -> [DNode] -> String
genericDone resID args = ass where
ass = printf "assign node_%i_done = %s;\n" resID doneArgs
doneArgs = if length args == 0
then "ready"
else joinMap " & " (printf "node_%i_done" . nodeID) args
--Helper function for extracting the contents of VNodeDefs
concatNodes :: [VNodeDef] -> String
concatNodes ns = defs ++ "\n" ++ assigns where
(defs, assigns) = foldl f ("", "") $ uniqV ns
--lift uniq over VNodeDef
uniqV :: [VNodeDef] -> [VNodeDef]
uniqV = map snd . uniq . map (\v -> (vNodeId v, v))
--Concatenate definitions and assignments
f :: (String, String) -> VNodeDef -> (String, String)
f (ds, as) node = (ds ++ vDef node, as ++ vAssign node)
--Converts a scalar Haskell type to a Verilog type (i.e. a bus)
scalarVType :: DType -> String
scalarVType (DUInt n) = printf "[%i:0]" (n - 1)
scalarVType (DSInt n) = printf "[%i:0]" (n - 1)
scalarVType DBool = "" --1 bit is implied by a blank type
scalarVType UndefinedType = scalarVType $ DUInt 8 --default
--Filters out key-value pairs which re-use existing keys. Preserves order.
uniq :: Ord a => [(a, b)] -> [(a, b)]
uniq xs = reverse $ foldl f [] xs where
f res (key, value)
| key `elem` (map fst res) = res --key already defined; do nothing
| otherwise = (key, value):res --key not defined; add definition
--Helper function for indenting blocks of code
indent :: [String] -> String
indent = rstrip . unlines . map f where
f = intercalate "\n" . map ('\t':) . lines
indentN :: Int -> [String] -> String
indentN n = unlines . map f where
f = intercalate "\n" . map (delim ++) . lines
delim = take n (repeat '\t')
--like printf, but replaces all %i with a single value.
printfAll :: String -> Int -> String
printfAll f i = replace "%i" (show i) f
| rdnetto/H2V | H2V/RenderVerilog.hs | gpl-2.0 | 62,911 | 0 | 26 | 27,012 | 11,348 | 5,983 | 5,365 | -1 | -1 |
module Main (
main
) where
import Sivi
import Linear
cut :: (Machine m, Backend w) => Double -> Double -> Operation m w ()
cut d l =
chain 5 [
probeHorizontalCylinderRight d l 5 (ProbeTool 3 42)
, saw_left d d 1
]
drillings :: (Machine m, Backend w) => Double -> Double -> Double -> Double -> Operation m w ()
drillings d l d1 d2 =
withTool (EndMill 2 42) $
chain 5 [
probeZMinus (V3 d1 (d/2) 0) 5
, message "Start the spindle"
, translate (V3 d1 (d/2) 0) (drill (d+1) 10)
, translate (V3 (l-d2) (d/2) 0) (drill (d+1) 10)
]
axialDrilling :: (Machine m, Backend w) => Double -> Double -> Double -> Operation m w ()
axialDrilling dc dd l =
chain 5 [
probeOuterCylinder dc 5 (ProbeTool 3 42)
, circularPocket dd l 0.5
]
handle :: (Machine m, Backend w) => Operation m w ()
handle = do
let l = 12
let d = 10
let d1 = 7.2
let d2 = 3
chain 5 [
do
cut d (l+2)
retract 30
message "Please rotate the part to cut the other side"
cut d l
, drillings d l d1 d2 ]
axialDrilling d 4.5 l
main :: IO ()
--main = putStr . (++"M2\n") . show . getGCode defaultCuttingParameters $ handle
main = interface . getGCode defaultCuttingParameters $ handle
| iemxblog/sivi-haskell | examples/handle.hs | gpl-2.0 | 1,376 | 0 | 13 | 478 | 545 | 272 | 273 | 38 | 1 |
import Text.Pandoc
import Text.Pandoc.JSON
import Text.Pandoc.CrossRef
main :: IO ()
main = toJSONFilter go
where
go fmt p@(Pandoc meta _) = runCrossRefIO meta fmt action p
where
action (Pandoc _ bs) = do
meta' <- crossRefMeta
bs' <- crossRefBlocks bs
return $ Pandoc meta' bs'
| infotroph/pandoc-crossref | src/pandoc-crossref.hs | gpl-2.0 | 330 | 0 | 12 | 97 | 115 | 57 | 58 | 10 | 1 |
module GtkBlast.Log
(
-- * 'IO'
putInvisibleLog
-- * 'E'
, writeLog
, showMessage
, clearMessage
, tempError
, banMessage
, uncMessage
, redMessage
-- * this shouldn't be here
, appFile
) where
import Import
import GtkBlast.Environment
import GtkBlast.GtkUtils
import GtkBlast.MuVar
import Graphics.UI.Gtk hiding (get, set, labelSetMarkup, labelSetText)
import qualified Graphics.UI.Gtk as Gtk
import qualified Data.ByteString.Char8 as B8
import qualified Data.Text as T
import qualified Data.Text.IO.Locale as LTIO
{-# INLINE labelSetMarkup #-}
labelSetMarkup :: MonadIO m => Label -> Text -> m ()
labelSetMarkup l = liftIO . Gtk.labelSetMarkup l . T.unpack
{-# INLINE labelSetText #-}
labelSetText :: MonadIO m => Label -> Text -> m ()
labelSetText l = liftIO . Gtk.labelSetText l . T.unpack
red :: Text -> Text
red s = "<span foreground=\"#ff0000\">" ++ s ++ "</span>"
rawPutStdout :: Text -> IO ()
rawPutStdout s =
fromIOException (return ()) $
whenM (hIsTerminalDevice stdout) $ do
LTIO.putStrLn s
hFlush stdout
rawPutLog :: (Text -> IO ()) -> FilePath -> Text -> IO ()
rawPutLog err' logfile str = do {
withBinaryFile logfile AppendMode $ \h -> do
B8.hPutStrLn h $ encodeUtf8 str
} `catch`
\(a :: IOException) ->
err' $ "Got exception while trying to write to log file \"" ++
fromString logfile ++ "\": " ++ show a ++
"\nAttempted to write: " ++ str
rawGUILog :: TextBuffer -> Int -> Text -> IO ()
rawGUILog wbuf maxLines msg = do
e <- textBufferGetEndIter wbuf
textBufferInsertByteString wbuf e $ encodeUtf8 msg
textBufferInsertByteString wbuf e "\n"
l <- textBufferGetLineCount wbuf
when (l > maxLines) $ do
s <- textBufferGetStartIter wbuf
il <- textBufferGetIterAtLine wbuf (l - maxLines)
textBufferDelete wbuf s il
writeLogIO :: TextBuffer -> Int -> Text -> IO ()
writeLogIO wbuf maxLines rawmsg = do
st <- getZonedTime
let !frmt = ("[" ++ show st ++ "]:\n " ++ rawmsg)
rawPutLog (toGUIAndStdout wbuf maxLines) "log.txt" frmt
rawPutStdout frmt
rawGUILog wbuf maxLines frmt
where
toGUIAndStdout buf lin er = do
rawGUILog buf lin er
rawPutStdout er
-- | Write to logfile and stdout, but not to the GUI.
-- Use only when GUI is uninitialized.
putInvisibleLog :: Text -> IO ()
putInvisibleLog msg = do
rawPutLog rawPutStdout "log.txt" msg
rawPutStdout msg -- duplicate log to stdout for convinience
writeLog :: Text -> E ()
writeLog s = do
E{ wspinmaxlines
, wbuf } <- ask
maxLines <- round <$> get wspinmaxlines
io $ writeLogIO wbuf maxLines s
showMessage :: (Env -> CheckButton) -> Text -> Maybe Int -> Bool -> Text -> E ()
showMessage getCheck msgname mUnlockT mkRed msg = do
E{ wlabelmessage
, messageLocks
, window
} <- ask
wcheck <- asks getCheck
io $ modifyIORef messageLocks $
\x -> if x < 0
then 0 + 1
else x + 1
n <- io getZonedTime
writeLog $ "gtkblast, " ++ show n ++ ": " ++ msgname ++ ": " ++ msg
if mkRed
then labelSetMarkup wlabelmessage (red msg)
else labelSetText wlabelmessage msg
io $ whenM (toggleButtonGetActive wcheck) $ windowPopup window
case mUnlockT of
Just t -> void $ io $ timeoutAdd
(False <$ modifyIORef messageLocks (subtract 1))
(t * 1000)
Nothing -> return ()
clearMessage :: E ()
clearMessage = do
E{..} <- ask
locks <- io $ readIORef messageLocks
when (locks == 0) $
labelSetText wlabelmessage ""
tempError :: Int -> Text -> E ()
tempError t s = do
showMessage wcheckannoyerrors "Displayed error message" (Just t) True s
banMessage :: Int -> Text -> E ()
banMessage t s = showMessage wcheckannoy "Ban message" (Just t) True s
uncMessage :: Text -> E ()
uncMessage s = do
E{ wlabelmessage } <- ask
writeLog $ "gtkblast, Unconditinal message: " ++ s
labelSetText wlabelmessage s
redMessage :: Text -> E ()
redMessage s = do
E{ wlabelmessage } <- ask
writeLog $ "gtkblast, Red message: " ++ s
labelSetMarkup wlabelmessage $ red s
-- | Specialized 'fromIOException' showing 'tempError' message on exceptions
appFile :: a -> (FilePath -> IO a) -> FilePath -> E a
appFile def' ac file =
fromIOException err $ io $ ac file
where err = do
tempError 3 $ "Невозможно прочитать файл \"" ++ fromString file ++ "\""
return def'
| exbb2/BlastItWithPiss | src/GtkBlast/Log.hs | gpl-3.0 | 4,602 | 0 | 16 | 1,215 | 1,444 | 708 | 736 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : Numeric.LinearAlgebra.Static.Algorithms
-- Copyright : (c) Reiner Pope 2008
-- License : GPL-style
--
-- Maintainer : Reiner Pope <[email protected]>
-- Stability : experimental
-- Portability : portable
--
-- Common operations.
--
-----------------------------------------------------------------------------
module Numeric.LinearAlgebra.Static.Algorithms(
-- * Type hints
matT,
vecT,
doubleT,
complexT,
-- * Multiplication
Mul(..),
(<.>),
-- * Concatenating
(<->),
(<|>),
-- * Solving \/ inverting
(<\>),
linearSolve,
inv,
pinv,
-- * Determinant \/ rank \/ condition number
det,
rank,
rcond,
-- * Eigensystems
eig,
eigSH,
-- * Factorisations
-- ** SVD
svd,
fullSVD,
economySVDU,
-- ** QR
qr,
-- ** Cholesky
chol,
-- ** Hessenberg
hess,
-- ** Schur
schur,
-- ** LU
lu,
luPacked,
luSolve,
-- * Matrix functions
expm,
sqrtm,
matFunc,
-- * Nullspace
nullspacePrec,
nullVector,
-- * Norms
pnorm,
H.NormType(..),
-- * Misc
ctrans,
eps,
i,
outer,
kronecker,
) where
import Data.Complex
import Types.Data.Num
import Types.Data.Ord
import Types.Data.Bool
import Data.Packed.Static
import Data.Packed.Static.Internal
import qualified Numeric.LinearAlgebra as H
import Numeric.LinearAlgebra(pnorm)
matT :: Matrix s t -> a
matT = const undefined
vecT :: Vector s t -> a
vecT = const undefined
doubleT :: a s Double -> x
doubleT = const undefined
complexT :: a s (Complex Double) -> x
complexT = const undefined
class Mul a b where
-- | Overloaded matrix-matrix, matrix-vector, or vector-matrix product.
-- The instances have type equalities to improve the quality of
-- type inference.
(<>) :: Field t => a t -> b t -> MulResult a b t
type MulResult a b :: * -> *
instance (n ~ n') => Mul (Matrix (m,n)) (Matrix (n',p)) where
a <> b = Matrix $ H.multiply (unMatrix a) (unMatrix b)
type MulResult (Matrix (m,n)) (Matrix (n',p)) = Matrix (m,p)
instance (n ~ n') => Mul (Matrix (m,n)) (Vector n') where
m <> v = Vector (unMatrix m H.<> unVector v)
type MulResult (Matrix (m,n)) (Vector n') = Vector m
instance (m ~ m') => Mul (Vector m) (Matrix (m',n)) where
v <> m = Vector (unVector v H.<> unMatrix m)
type MulResult (Vector m) (Matrix (m',n)) = Vector n
-- | Dot product
(<.>) :: (Field t) => Vector n t -> Vector n t -> t
a <.> b = H.dot (unVector a) (unVector b)
class JoinableV a b where
type JoinShapeV a b :: *
-- | Overloaded matrix-matrix, matrix-vector, vector-matrix, or vector-vector
-- vertical concatenation. The instances have type equalities
-- to improve the quality of type inference.
(<->) :: Element t => a t -> b t -> Matrix (JoinShapeV a b) t
instance JoinableV (Matrix (m,n)) (Matrix (p,n)) where
type JoinShapeV (Matrix (m,n)) (Matrix (p,n)) = (m :+: p, n)
m <-> n = Matrix (unMatrix m H.<-> unMatrix n)
instance JoinableV (Matrix (m,n)) (Vector n) where
type JoinShapeV (Matrix (m,n)) (Vector n) = (m :+: D1, n)
m <-> v = m <-> asRow v
instance JoinableV (Vector n) (Matrix (m,n)) where
type JoinShapeV (Vector n) (Matrix (m,n)) = (D1 :+: m, n)
v <-> m = asRow v <-> m
instance JoinableV (Vector n) (Vector n) where
type JoinShapeV (Vector n) (Vector n) = (D2, n)
v <-> w = asRow v <-> w
class JoinableH a b where
type JoinShapeH a b :: *
-- | Overloaded matrix-matrix, matrix-vector, vector-matrix,
-- or vector-vector horizontal concatenation. The
-- instances have type equalities to
-- improve the quality of type inference.
(<|>) :: Element t => a t -> b t -> Matrix (JoinShapeH a b) t
instance JoinableH (Matrix (m,n)) (Matrix (m,p)) where
type JoinShapeH (Matrix (m,n)) (Matrix (m,p)) = (m,n:+:p)
m <|> n = Matrix (unMatrix m H.<|> unMatrix n)
instance JoinableH (Matrix (m,n)) (Vector m) where
type JoinShapeH (Matrix (m,n)) (Vector m) = (m,n:+:D1)
m <|> v = m <|> asColumn v
instance JoinableH (Vector m) (Matrix (m,n)) where
type JoinShapeH (Vector m) (Matrix (m,n)) = (m,D1 :+: n)
v <|> m = asColumn v <|> m
instance JoinableH (Vector m) (Vector m) where
type JoinShapeH (Vector m) (Vector m) = (m,D2)
v <|> w = asColumn v <|> w
-- | Least squares solution of a linear equation.
(<\>) :: Field t => Matrix (m,n) t -> Vector m t -> Vector n t
(<\>) m = Vector . (H.<\>) (unMatrix m) . unVector
linearSolve :: (H.Field t) => Matrix (m,m) t -> Matrix (m,n) t -> Matrix (m,n) t
linearSolve a b = Matrix $ H.multiply (unMatrix a) (unMatrix b)
inv :: (H.Field t) => Matrix (m,m) t -> Matrix (m,m) t
inv = Matrix . H.inv . unMatrix
pinv :: (H.Field t) => Matrix (m,n) t -> Matrix (n,m) t
pinv = Matrix . H.pinv . unMatrix
det :: (H.Field t) => Matrix (m,m) t -> t
det = H.det . unMatrix
rank :: (H.Field t) => Matrix (m,n) t -> Int
rank = H.rank . unMatrix
rcond :: (H.Field t) => Matrix (m,n) t -> Double
rcond = H.rcond . unMatrix
--------- SVD
svd :: (H.Field t) => Matrix (m,n) t ->
(Matrix (m,m) t, Vector (Min m n) Double, Matrix (n,n) t)
svd = svdBody
-- private
svdBody m = case H.svd $ unMatrix m of
(a,b,c) -> (Matrix a, Vector b, Matrix c)
fullSVD :: (H.Field t) => Matrix mn t
-> (Matrix (m,m) t, Matrix (m,n) Double, Matrix (n,n) t)
fullSVD m = case H.full H.svd $ unMatrix m of
(a,b,c) -> (Matrix a, Matrix b, Matrix c)
economySVDU :: (H.Field t) => Matrix (m,n) t ->
(Matrix (m,Unknown) t,
Vector Unknown Double,
Matrix (n,Unknown) t)
economySVDU m = case H.economy H.svd $ unMatrix m of
(a,b,c) -> (Matrix a, Vector b, Matrix c)
---- eig
eig :: H.Field t =>
Matrix (m,m) t
-> (Vector m (H.Complex Double),
Matrix (m,m) (H.Complex Double))
eig = liftEig H.eig
eigSH :: (H.Field t) =>
Matrix (m,m) t
-> (Vector m Double, Matrix (m,m) t)
eigSH = liftEig H.eigSH
liftEig f m = case f $ unMatrix m of
(a,b) -> (Vector a, Matrix b)
---- qr
qr :: (H.Field t) =>
Matrix (m,n) t -> (Matrix (m,m) t, Matrix (m,n) t)
qr m = case H.qr $ unMatrix m of
(a,b) -> (Matrix a, Matrix b)
--- cholesky
chol :: (H.Field t) => Matrix (m,m) t -> Matrix (m,m) t
chol = Matrix . H.chol . unMatrix
-- hessenberg
hess :: (H.Field t) =>
Matrix (m,m) t -> (Matrix (m,m) t, Matrix (m,m) t)
hess m = case H.hess $ unMatrix m of
(a,b) -> (Matrix a, Matrix b)
-- schur
schur :: (H.Field t) =>
Matrix (m,m) t -> (Matrix (m,m) t, Matrix (m,m) t)
schur m = case H.schur $ unMatrix m of
(a,b) -> (Matrix a, Matrix b)
-- lu
--- I hope these sizes is right !!
lu :: (H.Field t) =>
Matrix (m,n) t
-> (Matrix (m, Min m n) t, Matrix (Min m n, n) t, Matrix (m,m) t, t)
lu m = case H.lu $ unMatrix m of
(a,b,c,d) -> (Matrix a, Matrix b, Matrix c, d)
luPacked :: (H.Field t) =>
Matrix (m,n) t -> (Matrix (m,n) t, [Int])
luPacked m = case H.luPacked $ unMatrix m of
(a,b) -> (Matrix a, b)
--- is this right?
luSolve :: (H.Field t) =>
(Matrix (m,n) t, [Int])
-> Matrix (m,p) t -> Matrix (n,p) t
luSolve (Matrix lu,is) = Matrix . H.luSolve (lu,is) . unMatrix
----
expm :: (H.Field t) => Matrix (m,m) t -> Matrix (m,m) t
expm = Matrix . H.expm . unMatrix
sqrtm :: (H.Field t) => Matrix (m,m) t -> Matrix (m,m) t
sqrtm = Matrix . H.sqrtm . unMatrix
matFunc :: (H.Field t) => (Complex Double -> Complex Double)
-> Matrix (m,m) t
-> Matrix (m,m) (Complex Double)
matFunc f = Matrix . H.matFunc f . unMatrix
nullspacePrec :: (H.Field t) =>
Double -> Matrix (m,n) t -> [Vector n t]
nullspacePrec tol = map Vector . H.nullspacePrec tol . unMatrix
nullVector :: (H.Field t) =>
Matrix (m, n) t -> Vector n t
nullVector = last . nullspacePrec 1
--- misc
ctrans :: (H.Field t) =>
Matrix (m,n) t -> Matrix (n,m) t
ctrans = Matrix . H.ctrans . unMatrix
eps :: Double
eps = H.eps
i :: Complex Double
i = H.i
outer :: (H.Field t) =>
Vector m t -> Vector n t -> Matrix (m,n) t
outer v w = Matrix $ H.outer (unVector v) (unVector w)
kronecker :: (H.Field t) =>
Matrix (m,n) t -> Matrix (p,q) t -> Matrix (m :*: p, n :*: q) t
kronecker m n = Matrix $ H.kronecker (unMatrix m) (unMatrix n)
----
-- omitted: haussholder, unpackQR, unpackHess
| reinerp/hmatrix-static | Numeric/LinearAlgebra/Static/Algorithms.hs | gpl-3.0 | 8,652 | 0 | 12 | 2,279 | 3,869 | 2,104 | 1,765 | -1 | -1 |
{-|
Module : Player
Copyright : (c) 2014 Kaashif Hymabaccus
License : GPL-3
Maintainer : [email protected]
Stability : experimental
Portability : POSIX
-}
module Player where
import Vector
-- | Player data structure, storing the current position (and will store inventory, health etc)
data Player = Player { alive :: Bool -- ^ Whether the player is alive
, position :: Vector2D -- ^ Position on the board (in 2D space)
}
-- | The player's condition at the start of the game
initial = Player { alive = True
, position = (0,0)
}
-- | Moves player from current position to position supplied (validation is done elsewhere)
move :: Player -> Vector2D -> Player
move p (x,y) = p { position = (x,y) }
-- | Moves player 1 \"up\"
north p = move p $ (position p) + (0,1)
-- | Moves player 1 \"right\"
east p = move p $ (position p) + (1,0)
-- | Moves player 1 \"down\"
south p = move p $ (position p) + (0,-1)
-- | Moves player 1 \"left\"
west p = move p $ (position p) + (-1,0)
| kaashif/venture | src/Player.hs | gpl-3.0 | 1,083 | 0 | 8 | 300 | 234 | 135 | 99 | 12 | 1 |
module Teb where
import System (getArgs)
import System.Directory (getCurrentDirectory, doesDirectoryExist)
import System.FilePath (isAbsolute)
import Teb.Consts
import Teb.Fetch (fetch)
import Teb.Init (initialiseRepository)
import Teb.Info (info)
import Teb.Reinit (reinit)
import Teb.Status (status)
import Teb.Sync (sync)
import Teb.Types
data TebCommand = Init |
Status |
Fetch |
Sync |
Info |
Reinit |
Version |
Usage
deriving (Show)
getCommand :: [String] -> Maybe TebCommand
getCommand ("init":_) = Just Init
getCommand ("status":_) = Just Status
getCommand ("fetch":_) = Just Fetch
getCommand ("sync":_) = Just Sync
getCommand ("info":_) = Just Info
getCommand ("reinit":_) = Just Reinit
getCommand ("version":_) = Just Version
getCommand ("-v":_) = Just Version
getCommand ("-h":_) = Just Usage
getCommand (_:_) = Nothing
getCommand _ = Nothing
showUsage :: IO ()
showUsage = mapM_ putStrLn usageStrings
outputVersionDescription = putStrLn versionDescription
tebMain' :: CurrentWorkingDirectory -> Arguments -> IO ()
tebMain' currentDirectory args =
do
validDir <- doesDirectoryExist currentDirectory
let pathIsAbsolute = isAbsolute currentDirectory
if validDir && pathIsAbsolute
then
case getCommand args of
Just Init -> initialiseRepository currentDirectory args
Just Status -> status currentDirectory args
Just Fetch -> fetch currentDirectory args
Just Sync -> sync currentDirectory args
Just Info -> info currentDirectory args
Just Reinit -> reinit currentDirectory args
Just Version -> outputVersionDescription
Just Usage -> showUsage
Nothing -> putStrLn "Unknown command : try teb -h for help."
else putStrLn $ "Current Directory is invalid: " ++ currentDirectory
tebMain :: IO ()
tebMain = do
currentDirectory <- getCurrentDirectory
args <- getArgs
tebMain' currentDirectory args | ignesco/teb-h | Teb.hs | gpl-3.0 | 1,984 | 66 | 11 | 427 | 628 | 333 | 295 | 58 | 10 |
-- | All numbers in the unsorted list are present twice,
-- except the one you have to find.
import Data.List
-- | All numbers in the unsorted list are present twice,
-- except the one you have to find.
findUnique :: [Int] -> Int
findUnique xs = head $ head $ filter (\x -> length x == 1) $ group $ sort xs | yannxia-self/code-war-hasekell-training | find_the_unique_number.hs | gpl-3.0 | 311 | 0 | 12 | 68 | 67 | 36 | 31 | 3 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.InstanceGroupManagers.DeleteInstances
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Schedules a group action to delete the specified instances in the
-- managed instance group. The instances are also removed from any target
-- pools of which they were a member. This method reduces the targetSize of
-- the managed instance group by the number of instances that you delete.
-- This operation is marked as DONE when the action is scheduled even if
-- the instances are still being deleted. You must separately verify the
-- status of the deleting action with the listmanagedinstances method.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.instanceGroupManagers.deleteInstances@.
module Network.Google.Resource.Compute.InstanceGroupManagers.DeleteInstances
(
-- * REST Resource
InstanceGroupManagersDeleteInstancesResource
-- * Creating a Request
, instanceGroupManagersDeleteInstances
, InstanceGroupManagersDeleteInstances
-- * Request Lenses
, igmdiProject
, igmdiInstanceGroupManager
, igmdiZone
, igmdiPayload
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.instanceGroupManagers.deleteInstances@ method which the
-- 'InstanceGroupManagersDeleteInstances' request conforms to.
type InstanceGroupManagersDeleteInstancesResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"zones" :>
Capture "zone" Text :>
"instanceGroupManagers" :>
Capture "instanceGroupManager" Text :>
"deleteInstances" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
InstanceGroupManagersDeleteInstancesRequest
:> Post '[JSON] Operation
-- | Schedules a group action to delete the specified instances in the
-- managed instance group. The instances are also removed from any target
-- pools of which they were a member. This method reduces the targetSize of
-- the managed instance group by the number of instances that you delete.
-- This operation is marked as DONE when the action is scheduled even if
-- the instances are still being deleted. You must separately verify the
-- status of the deleting action with the listmanagedinstances method.
--
-- /See:/ 'instanceGroupManagersDeleteInstances' smart constructor.
data InstanceGroupManagersDeleteInstances = InstanceGroupManagersDeleteInstances'
{ _igmdiProject :: !Text
, _igmdiInstanceGroupManager :: !Text
, _igmdiZone :: !Text
, _igmdiPayload :: !InstanceGroupManagersDeleteInstancesRequest
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'InstanceGroupManagersDeleteInstances' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'igmdiProject'
--
-- * 'igmdiInstanceGroupManager'
--
-- * 'igmdiZone'
--
-- * 'igmdiPayload'
instanceGroupManagersDeleteInstances
:: Text -- ^ 'igmdiProject'
-> Text -- ^ 'igmdiInstanceGroupManager'
-> Text -- ^ 'igmdiZone'
-> InstanceGroupManagersDeleteInstancesRequest -- ^ 'igmdiPayload'
-> InstanceGroupManagersDeleteInstances
instanceGroupManagersDeleteInstances pIgmdiProject_ pIgmdiInstanceGroupManager_ pIgmdiZone_ pIgmdiPayload_ =
InstanceGroupManagersDeleteInstances'
{ _igmdiProject = pIgmdiProject_
, _igmdiInstanceGroupManager = pIgmdiInstanceGroupManager_
, _igmdiZone = pIgmdiZone_
, _igmdiPayload = pIgmdiPayload_
}
-- | Project ID for this request.
igmdiProject :: Lens' InstanceGroupManagersDeleteInstances Text
igmdiProject
= lens _igmdiProject (\ s a -> s{_igmdiProject = a})
-- | The name of the managed instance group.
igmdiInstanceGroupManager :: Lens' InstanceGroupManagersDeleteInstances Text
igmdiInstanceGroupManager
= lens _igmdiInstanceGroupManager
(\ s a -> s{_igmdiInstanceGroupManager = a})
-- | The name of the zone where the managed instance group is located.
igmdiZone :: Lens' InstanceGroupManagersDeleteInstances Text
igmdiZone
= lens _igmdiZone (\ s a -> s{_igmdiZone = a})
-- | Multipart request metadata.
igmdiPayload :: Lens' InstanceGroupManagersDeleteInstances InstanceGroupManagersDeleteInstancesRequest
igmdiPayload
= lens _igmdiPayload (\ s a -> s{_igmdiPayload = a})
instance GoogleRequest
InstanceGroupManagersDeleteInstances where
type Rs InstanceGroupManagersDeleteInstances =
Operation
type Scopes InstanceGroupManagersDeleteInstances =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient
InstanceGroupManagersDeleteInstances'{..}
= go _igmdiProject _igmdiZone
_igmdiInstanceGroupManager
(Just AltJSON)
_igmdiPayload
computeService
where go
= buildClient
(Proxy ::
Proxy InstanceGroupManagersDeleteInstancesResource)
mempty
| rueshyna/gogol | gogol-compute/gen/Network/Google/Resource/Compute/InstanceGroupManagers/DeleteInstances.hs | mpl-2.0 | 6,023 | 0 | 18 | 1,337 | 559 | 336 | 223 | 95 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Run.Namespaces.Revisions.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- List revisions.
--
-- /See:/ <https://cloud.google.com/run/ Cloud Run Admin API Reference> for @run.namespaces.revisions.list@.
module Network.Google.Resource.Run.Namespaces.Revisions.List
(
-- * REST Resource
NamespacesRevisionsListResource
-- * Creating a Request
, namespacesRevisionsList
, NamespacesRevisionsList
-- * Request Lenses
, nParent
, nXgafv
, nFieldSelector
, nUploadProtocol
, nAccessToken
, nResourceVersion
, nLabelSelector
, nUploadType
, nLimit
, nIncludeUninitialized
, nContinue
, nWatch
, nCallback
) where
import Network.Google.Prelude
import Network.Google.Run.Types
-- | A resource alias for @run.namespaces.revisions.list@ method which the
-- 'NamespacesRevisionsList' request conforms to.
type NamespacesRevisionsListResource =
"apis" :>
"serving.knative.dev" :>
"v1" :>
Capture "parent" Text :>
"revisions" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "fieldSelector" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "resourceVersion" Text :>
QueryParam "labelSelector" Text :>
QueryParam "uploadType" Text :>
QueryParam "limit" (Textual Int32) :>
QueryParam "includeUninitialized" Bool :>
QueryParam "continue" Text :>
QueryParam "watch" Bool :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListRevisionsResponse
-- | List revisions.
--
-- /See:/ 'namespacesRevisionsList' smart constructor.
data NamespacesRevisionsList =
NamespacesRevisionsList'
{ _nParent :: !Text
, _nXgafv :: !(Maybe Xgafv)
, _nFieldSelector :: !(Maybe Text)
, _nUploadProtocol :: !(Maybe Text)
, _nAccessToken :: !(Maybe Text)
, _nResourceVersion :: !(Maybe Text)
, _nLabelSelector :: !(Maybe Text)
, _nUploadType :: !(Maybe Text)
, _nLimit :: !(Maybe (Textual Int32))
, _nIncludeUninitialized :: !(Maybe Bool)
, _nContinue :: !(Maybe Text)
, _nWatch :: !(Maybe Bool)
, _nCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'NamespacesRevisionsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'nParent'
--
-- * 'nXgafv'
--
-- * 'nFieldSelector'
--
-- * 'nUploadProtocol'
--
-- * 'nAccessToken'
--
-- * 'nResourceVersion'
--
-- * 'nLabelSelector'
--
-- * 'nUploadType'
--
-- * 'nLimit'
--
-- * 'nIncludeUninitialized'
--
-- * 'nContinue'
--
-- * 'nWatch'
--
-- * 'nCallback'
namespacesRevisionsList
:: Text -- ^ 'nParent'
-> NamespacesRevisionsList
namespacesRevisionsList pNParent_ =
NamespacesRevisionsList'
{ _nParent = pNParent_
, _nXgafv = Nothing
, _nFieldSelector = Nothing
, _nUploadProtocol = Nothing
, _nAccessToken = Nothing
, _nResourceVersion = Nothing
, _nLabelSelector = Nothing
, _nUploadType = Nothing
, _nLimit = Nothing
, _nIncludeUninitialized = Nothing
, _nContinue = Nothing
, _nWatch = Nothing
, _nCallback = Nothing
}
-- | The namespace from which the revisions should be listed. For Cloud Run
-- (fully managed), replace {namespace_id} with the project ID or number.
nParent :: Lens' NamespacesRevisionsList Text
nParent = lens _nParent (\ s a -> s{_nParent = a})
-- | V1 error format.
nXgafv :: Lens' NamespacesRevisionsList (Maybe Xgafv)
nXgafv = lens _nXgafv (\ s a -> s{_nXgafv = a})
-- | Allows to filter resources based on a specific value for a field name.
-- Send this in a query string format. i.e. \'metadata.name%3Dlorem\'. Not
-- currently used by Cloud Run.
nFieldSelector :: Lens' NamespacesRevisionsList (Maybe Text)
nFieldSelector
= lens _nFieldSelector
(\ s a -> s{_nFieldSelector = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
nUploadProtocol :: Lens' NamespacesRevisionsList (Maybe Text)
nUploadProtocol
= lens _nUploadProtocol
(\ s a -> s{_nUploadProtocol = a})
-- | OAuth access token.
nAccessToken :: Lens' NamespacesRevisionsList (Maybe Text)
nAccessToken
= lens _nAccessToken (\ s a -> s{_nAccessToken = a})
-- | The baseline resource version from which the list or watch operation
-- should start. Not currently used by Cloud Run.
nResourceVersion :: Lens' NamespacesRevisionsList (Maybe Text)
nResourceVersion
= lens _nResourceVersion
(\ s a -> s{_nResourceVersion = a})
-- | Allows to filter resources based on a label. Supported operations are =,
-- !=, exists, in, and notIn.
nLabelSelector :: Lens' NamespacesRevisionsList (Maybe Text)
nLabelSelector
= lens _nLabelSelector
(\ s a -> s{_nLabelSelector = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
nUploadType :: Lens' NamespacesRevisionsList (Maybe Text)
nUploadType
= lens _nUploadType (\ s a -> s{_nUploadType = a})
-- | Optional. The maximum number of records that should be returned.
nLimit :: Lens' NamespacesRevisionsList (Maybe Int32)
nLimit
= lens _nLimit (\ s a -> s{_nLimit = a}) .
mapping _Coerce
-- | Not currently used by Cloud Run.
nIncludeUninitialized :: Lens' NamespacesRevisionsList (Maybe Bool)
nIncludeUninitialized
= lens _nIncludeUninitialized
(\ s a -> s{_nIncludeUninitialized = a})
-- | Optional. Encoded string to continue paging.
nContinue :: Lens' NamespacesRevisionsList (Maybe Text)
nContinue
= lens _nContinue (\ s a -> s{_nContinue = a})
-- | Flag that indicates that the client expects to watch this resource as
-- well. Not currently used by Cloud Run.
nWatch :: Lens' NamespacesRevisionsList (Maybe Bool)
nWatch = lens _nWatch (\ s a -> s{_nWatch = a})
-- | JSONP
nCallback :: Lens' NamespacesRevisionsList (Maybe Text)
nCallback
= lens _nCallback (\ s a -> s{_nCallback = a})
instance GoogleRequest NamespacesRevisionsList where
type Rs NamespacesRevisionsList =
ListRevisionsResponse
type Scopes NamespacesRevisionsList =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient NamespacesRevisionsList'{..}
= go _nParent _nXgafv _nFieldSelector
_nUploadProtocol
_nAccessToken
_nResourceVersion
_nLabelSelector
_nUploadType
_nLimit
_nIncludeUninitialized
_nContinue
_nWatch
_nCallback
(Just AltJSON)
runService
where go
= buildClient
(Proxy :: Proxy NamespacesRevisionsListResource)
mempty
| brendanhay/gogol | gogol-run/gen/Network/Google/Resource/Run/Namespaces/Revisions/List.hs | mpl-2.0 | 7,802 | 0 | 25 | 2,001 | 1,292 | 742 | 550 | 177 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.