code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
module Drive.Terminal.Handlers
( terminalToDescribeI
, execTerminal
) where
import qualified Data.Text as Text
import qualified Drive as D
import qualified Drive.Describe as D
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.Functor (($>))
import Drive.Terminal.Types
type DescribeP = D.Free D.DescribeF
terminalToDescribeI :: TerminalF a -> DescribeP a
terminalToDescribeI (WaitForEnter x) = D.debug "waiting for enter" >> pure x
terminalToDescribeI (PrintMessage s x) = D.debug ("printing \"" <> Text.pack s <> "\"") >> pure x
terminalToDescribeI (ReadInput x) = D.debug "reading input" $> x ""
execTerminal :: (MonadIO m) => TerminalF a -> m a
execTerminal (WaitForEnter x) = liftIO getLine >> pure x
execTerminal (PrintMessage s x) = liftIO (print s) >> pure x
execTerminal (ReadInput x) = x <$> liftIO getLine
| palf/free-driver | packages/drive-terminal/lib/Drive/Terminal/Handlers.hs | bsd-3-clause | 1,088 | 0 | 11 | 255 | 299 | 160 | 139 | 22 | 1 |
module Lifted where
import Control.Concurrent.STM (STM, TVar, atomically, readTVarIO)
import Control.Monad (MonadPlus, mzero)
import Control.Monad.IO.Class (MonadIO, liftIO)
-- atomically, but generalised to MonadIO
atomicallyL :: MonadIO io => STM a -> io a
atomicallyL = liftIO . atomically
-- readTVarIO, but generalised to MonadIO
readTVarIOL :: MonadIO io => TVar a -> io a
readTVarIOL = liftIO . readTVarIO
| frublox/aichanbot | src/Lifted.hs | bsd-3-clause | 455 | 0 | 7 | 101 | 116 | 66 | 50 | 8 | 1 |
{-# LANGUAGE UnicodeSyntax #-}
{-|
Module : Math.Haskell.Fractal.HurstExponent
Description : Calculate Hurst Exponent of timer series data
Copyright : (c) m00nlight, 2014-2015
License : BSD3
Stability : experimental
This exponent calculate the Hurst Exponent of time series data using
the <http://en.wikipedia.org/wiki/Rescaled_range Rescale_Range> methods.
Briefly speaking, the function will calculate the R/S score of the time
series data with different chunck size, and using linear regression to
calculate the slope in log-log coordinates of the R/S average value and
chunk size. The series data had better be of length of exponent of 2,
but it does not need to satisfy.
More details of how to calculate the Hurst Exponent, please see the
<http://en.wikipedia.org/wiki/Hurst_exponent wikipedia> page .
-}
module Math.Haskell.Fractal.HurstExponent
(hurstExponent) where
import Math.Haskell.Fractal.LinearRegression
import Math.Haskell.Fractal.Utils
import Data.List
import Prelude.Unicode
import Data.List.Split
-- | The function 'hurstR' calculate the R value of a series data
hurstR β· (Fractional a, Ord a) β [a] β a
hurstR xs = maximum zs - minimum zs
where ΞΌ = average xs
ys = map (\ x β x - ΞΌ) xs
zs = accumulateList ys
-- | The function 'hurstS' calculate the S value of a series data
hurstS = sd
-- | The function 'hurstRS' calculate the R/S value of a seris data
hurstRS β· [Double] β Double
hurstRS xs = if s β‘ 0.0 then 1.0 else r / s
where r = hurstR xs
s = hurstS xs
{-|
The 'hurstExponent' function is used to estimate the
<http://en.wikipedia.org/wiki/Hurst_exponent Hurst_Expoennt> of a
time series data (vβ, vβ, ..., vβ). n had better be exponent of 2.
If n is not the exponent of 2, we will take as many elements as
possible from the tail of the list to form an list of length of 2^m.
The value can be late used to calculate the fractal dimension of
the time series data.
-}
hurstExponent β· [Double] β Result
hurstExponent vs = snd res
where
len = length vs
n = last $ takeWhile (<len) [2^i | i β [1..]]
vs' = drop (len - n) vs
chunkSizes = nestWhile (>4) (\ x β x `div` 2) n
help β· [Double] β Int β (Double, Double)
help vs size = (log (average $ map hurstRS (chunksOf size vs)) / log 2.0,
log (fromIntegral size) / log 2.0)
tmp = map (\ x β help vs' x) chunkSizes
res = linearRegression (map snd tmp) (map fst tmp)
| m00nlight/hs-fractal | Math/Haskell/Fractal/HurstExponent.hs | bsd-3-clause | 2,551 | 0 | 14 | 579 | 442 | 243 | 199 | 29 | 2 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-|
Module : AERN2.MP.Float.Auxi
Description : Auxiliary structures
Copyright : (c) Michal Konecny
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : portable
Auxiliary structures for bounds on result and printing.
-}
module AERN2.MP.Float.Auxi
(
BoundsCEDU(..)
, ceduDownUp
, ceduCentreErr
)
where
data BoundsCEDU a =
BoundsCEDU
{
ceduCentre :: a
, ceduErr :: a
, ceduDown :: a
, ceduUp :: a
}
ceduDownUp :: BoundsCEDU a -> (a,a)
ceduDownUp cedu = (ceduDown cedu, ceduUp cedu)
ceduCentreErr :: BoundsCEDU a -> (a,a)
ceduCentreErr cedu = (ceduCentre cedu, ceduErr cedu)
| michalkonecny/aern2 | aern2-mp/src/AERN2/MP/Float/Auxi.hs | bsd-3-clause | 763 | 0 | 8 | 187 | 136 | 81 | 55 | 17 | 1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.EXT.BlendFuncSeparate
-- Copyright : (c) Sven Panne 2013
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- All raw functions and tokens from the EXT_blend_func_separate extension, see
-- <http://www.opengl.org/registry/specs/EXT/blend_func_separate.txt>.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.EXT.BlendFuncSeparate (
-- * Functions
glBlendFuncSeparate,
-- * Tokens
gl_BLEND_DST_RGB,
gl_BLEND_SRC_RGB,
gl_BLEND_DST_ALPHA,
gl_BLEND_SRC_ALPHA
) where
import Graphics.Rendering.OpenGL.Raw.Core32
| mfpi/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/EXT/BlendFuncSeparate.hs | bsd-3-clause | 820 | 0 | 4 | 110 | 51 | 41 | 10 | 7 | 0 |
module Utils where
import Control.Monad (ap, liftM, liftM2)
import Data.Char (isNumber)
import Data.List (genericLength)
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as U
import Numeric.LinearAlgebra (Matrix, cmap, cols, dropColumns,
flatten, fromRows, loadMatrix,
takeColumns, toList, toRows)
import qualified Numeric.LinearAlgebra as L
import System.FilePath (splitFileName)
import System.Posix (fileExist)
usage :: String
usage = "Usage: ./init_present <file> <Graph nbd size> <PCA nbd size>"
errorMsg1 :: String
errorMsg1 = "You did not enter two positive integers.\n"
errorMsg2 :: String
errorMsg2 = "You did not enter two positive integer arguments.\n"
graphMsg1 :: String
graphMsg1 = "The graph is not fully connected. Please enter larger integers!"
fileMsg :: String
fileMsg = "You have not provided an existent file. Please try again."
getFile :: FilePath -> Bool -> FilePath
getFile file pCond = if pCond
then file
else error fileMsg
fileArgs :: [String] -> IO Bool
fileArgs = (=<<) fileExist . return . head
intCheck :: Foldable t => t Char -> Bool
intCheck x = (not . null $ x) && foldr ((&&) . isNumber) True x
gtZero :: [String] -> Bool
gtZero = all ((> 0) . (\x -> read x :: Int))
procMNIST :: [Double] -> Matrix Double -> Matrix Double
procMNIST selection mat = images
where
images = fromRows [x | (x, c) <- zip xs cs
, or $ sequenceA eqfns c]
xs = toRows . cmap (\x -> 255 - x :: Double) . mSel takeColumns $ mat
cs = toList $ flatten . mSel dropColumns $ mat
eqfns = [(== x) | x <- selection]
mSel f x = f ((+ (-1)) $ cols x) x
checkArgs :: [String] -> (Int,Int)
checkArgs args = if ((== (3 :: Integer)) . genericLength) args
then let val = tail args
in if all intCheck val && gtZero val
then head . ap zip tail . Prelude.map read $ val -- yeah...
else error (errorMsg1 ++ usage)
else error (errorMsg2 ++ usage)
checkFile :: [FilePath] -> IO FilePath
checkFile args = if not . null $ args
then liftM2 getFile <$> return . head <*> fileArgs $ args
else error usage
getReqMatrix :: FilePath -> IO (Matrix Double)
getReqMatrix file = if (snd . splitFileName $ file) == "mnist.txt"
then liftM (procMNIST [4]) $ loadMatrix file
else loadMatrix file
convertDataset :: Matrix Double -> V.Vector (U.Vector Double)
convertDataset = V.map (U.fromList . L.toList) . V.fromList . L.toRows
rowSize :: Matrix Double -> Int
rowSize = L.rows
| emmanueldenloye/manifoldRNC | src/Utils.hs | bsd-3-clause | 2,880 | 0 | 13 | 914 | 871 | 476 | 395 | 59 | 3 |
import System.Posix.Interaction(Interaction(..), OStream(..), testProgram)
-- throw-away example demonstrating Interactions
main :: IO ()
main = testProgram "/usr/bin/head" ["-1"] Nothing True (Interactions [Feed "apa\n", Expect (Just 2) StdOut "apa\n"])
| GaloisInc/sk-dev-platform | libs/SCD/src/System/Posix/Test.hs | bsd-3-clause | 257 | 0 | 11 | 30 | 84 | 47 | 37 | 3 | 1 |
module Config
( initConfig
, readConfig
, dropConfigs
) where
import Events
import Data.ConfigFile
import Network.SimpleIRC
import System.Directory
import Control.Monad.Except
import Control.Monad
import Data.List
--import qualified System.IO.UTF8 as I
initConfig :: IO (FilePath,[FilePath])
initConfig = do
-- home <- getHomeDirectory
let configdir = "/app/mssbot" --home ++ "/.mssbot"
exists <- doesDirectoryExist configdir
unless exists $
createDirectory configdir
fileexists <- doesFileExist (configdir++"/default.irc")
unless fileexists $
writeFile (configdir++"/default.irc") $ unlines ["network: irc.network.net", "name: botName", "channels = [\"#chan\"]"]
fullfilelist <- getDirectoryContents configdir
return (configdir,fullfilelist)
readConfig :: FilePath -> IO IrcConfig
readConfig file = do
rv <- runExceptT $ do
cp <- join $ liftIO $ readfile emptyCP file
let x = cp
network <- get x "DEFAULT" "network"
name <- get x "DEFAULT" "name"
channels <- get x "DEFAULT" "channels"
return $ (mkDefaultConfig network name) { cAddr = network
, cNick = name
, cUsername = name
, cRealname = name
, cChannels = read channels ::[String]
, cEvents = events}
return $ either (\a -> mkDefaultConfig "dead" "dead") id rv
dropConfigs :: [FilePath] -> [FilePath]
dropConfigs [] = []
dropConfigs (f:fs) = if f=="." || f==".." || f=="default.irc" || (head f == '.') || not (".irc" `isInfixOf` f) then dropConfigs fs else f: dropConfigs fs
| raposalorx/mssbot | Config.hs | bsd-3-clause | 1,776 | 0 | 14 | 547 | 482 | 250 | 232 | 40 | 2 |
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeFamilies #-}
module Math.Functor.Faithful where
import Data.Constraint
import Math.Category
import Math.Functor
import Prelude (($))
class Functor f => FullyFaithful f where
unfmap :: Cod f (f a) (f b) -> Dom f a b
instance FullyFaithful Dict where
unfmap f = Sub $ f Dict
instance FullyFaithful (->) where
unfmap (Nat f) = Op (f id)
instance FullyFaithful (:-) where
unfmap (Nat f) = Op (f id)
| ekmett/categories | src/Math/Functor/Faithful.hs | bsd-3-clause | 453 | 0 | 10 | 87 | 175 | 92 | 83 | -1 | -1 |
module Code28_WarmingUp where
import Data.List (unfoldr)
import Prelude hiding (reverse,concat)
import qualified Prelude as P
loopless = unfoldr step . prolog
-- Four warm-up exercises
-- (1) id_L
id_L :: [a] -> [a]
id_L = unfoldr uncons . prolog
prolog :: [a] -> [a]
prolog = id
uncons :: [a] -> Maybe (a, [a])
uncons [] = Nothing
uncons (x:xs) = Just (x,xs)
-- (2) reverse
reverse :: [a] -> [a]
reverse = unfoldr uncons . foldl (flip (:)) []
-- (3) concat
concat :: [[a]] -> [a]
concat = unfoldr step . filter (not . null)
where
step :: [[a]] -> Maybe (a,[[a]])
step [] = Nothing
step ((x:xs):xss) = Just (x,consList xs xss)
consList :: [a] -> [[a]] -> [[a]]
consList xs xss = if null xs then xss else xs:xss
-- (4) preorder
type Forest a = [Rose a]
data Rose a = Node a (Forest a)
preorder :: Forest a -> [a]
preorder = unfoldr step . wrapList
where
step :: [Forest a] -> Maybe (a,[Forest a])
step [] = Nothing
step ((Node x xs:ys):zss) = Just (x,consList xs (consList ys zss))
wrapList :: [a] -> [[a]]
wrapList xs = consList xs []
| sampou-org/pfad | Code/Code28_WarmingUp.hs | bsd-3-clause | 1,192 | 0 | 12 | 354 | 549 | 307 | 242 | 30 | 2 |
module Seed where
import Rumpus
start :: Start
start = do
myCodeHidden ==> True
setShape Sphere
setSize 0.2
setColor (V4 0.2 0.3 0.1 1)
setBody Physical
myCollisionBegan ==> \_ _ -> do
isHeld <- isBeingHeld
when (not isHeld) $ do
removeComponent myCollisionBegan
_treeID <- spawnChildInstance "Tree"
setRotation (V3 0 1 0) 0
setShape Cube
animateSizeTo (V3 0.4 0.1 0.4) 1
return ()
| lukexi/rumpus | pristine/Room/Seed.hs | bsd-3-clause | 516 | 0 | 16 | 196 | 163 | 72 | 91 | 18 | 1 |
{-# LANGUAGE OverloadedStrings, QuasiQuotes #-}
module Output.Common where
import Control.Lens
import Data.Text (Text)
import NewTTRS.Law
import Text.Blaze.Html (preEscapedToHtml)
import Text.Hamlet
import qualified Data.Text as Text
import DataStore
import Player
import Output.Formatting
graphStddevCutoff :: Double
graphStddevCutoff = 150
--------------------------------------------------------------------------------
-- URL generators
mkPlayerUrl :: PlayerId -> Text
mkPlayerUrl (PlayerId i) = "/player/" `Text.append` Text.pack (show i)
mkEventUrl :: EventId -> Text
mkEventUrl (EventId i) = "/event/" `Text.append` Text.pack (show i)
--------------------------------------------------------------------------------
playerLink :: PlayerId -> Player -> Html
playerLink playerId player = [shamlet|<a href=#{mkPlayerUrl playerId}>#{view playerName player}|]
--------------------------------------------------------------------------------
metaTags :: Html
metaTags = [shamlet|
<meta charset="UTF-8" />
<meta name="google" content="notranslate">
<meta http-equiv="Content-Language" content="en" />
<link rel="shortcut icon" href="/favicon.ico" />
|]
navigationLinks :: Html
navigationLinks = [shamlet|
<div #navigation>
<ul>
<li>
<a href="/">Match Entry
<li>
<a href="/players">Players
<li>
<a href="/event/latest">Latest Event
<li>
<a href="/events">Events
<li>
<a href="/static/graph.html">Curves
|]
graphInclude :: [Law] -> Html
graphInclude laws = [shamlet|
<script language=javascript>#{preEscapedToHtml $ graphScript laws}
|]
graphScript :: [Law] -> String
graphScript laws
= unlines
$ "$(document).ready(function drawGraphs() {"
: options
++ imap drawOne laws
++ ["});"]
where
tightEnough law = lawStddev law < graphStddevCutoff
tightLaws
| any tightEnough laws = filter tightEnough laws
| otherwise = laws
minVal = minimum $ 3600 : map mkLo tightLaws
maxVal = maximum $ 0 : map mkHi tightLaws
mkLo law = lawMean law - 2 * lawStddev law
mkLoMid law = lawMean law - lawStddev law
mkHiMid law = lawMean law + lawStddev law
mkHi law = lawMean law + 2 * lawStddev law
drawOne i law = "$.plot($(\"#graph"++show i++"\"), " ++ show [mkSeries law] ++ ", options);"
mkSeries law = [[x,0] | x <- [mkLo law, mkLoMid law, lawMean law, mkHiMid law, mkHi law]]
options =
[ "function vertLine(ctx, x, y, radius, shadow) {"
, " ctx.moveTo(x, y - radius);"
, " ctx.lineTo(x, y + radius);"
, "}"
, "var options = {"
," xaxis: { min: " ++ show minVal ++ ", max: " ++ show maxVal ++ " , show: false },"
," yaxis: { show: false },"
," margin: { top: 0, left: 0, right: 0, bottom: 0 },"
," lines: { show: true },"
," points: { show: true, symbol: vertLine },"
," colors: [\"red\"]"
,"};"
]
formatDelta :: Double -> Html
formatDelta d = case compare d 0 of
LT -> [shamlet|
<td .num .delta>#{showRound (abs d)}
<td .arrow>
<img src="/static/down.svg">|]
EQ -> [shamlet|
<td .delta>
<td .arrow>|]
GT -> [shamlet|
<td .num .delta>#{showRound d}
<td .arrow>
<img src="/static/up.svg">|]
| glguy/tt-ratings | Output/Common.hs | bsd-3-clause | 3,350 | 0 | 12 | 788 | 658 | 361 | 297 | 62 | 3 |
-- clock (on console, for now)
-- todo: add a UI clock.
module Main
( main
) where
import Sirea.Prelude
import Sirea.Clock
import Sirea.Time
import Control.Exception (assert)
-- a better way to show the clock...
timeString :: T -> String
timeString t =
let nDay = tmNanos t in
let sDay = nDay `div` 1000000000 in
let (mDay,s) = sDay `divMod` 60 in
let (hDay,m) = mDay `divMod` 60 in
s2 hDay ++ ":" ++ s2 m ++ ":" ++ s2 s
where s2 x = assert ((x >= 0) && (x < 100)) $
if x < 10
then "0" ++ show x
else show x
-- using clock, printing based on stability. (Only works for low rate
-- clocks... up to ~20Hz. Higher rate will update in bursts.)
bCC :: B (S P0 ()) (S P0 ())
bCC = bvoid $ bclockSeconds >>> bseq >>> bprintWith timeString
main :: IO ()
main =
print "before clock app" >>
runSireaApp bCC >>
print "after clock app"
| dmbarbour/Sirea | tst/Clock.hs | bsd-3-clause | 944 | 0 | 17 | 287 | 302 | 158 | 144 | 24 | 2 |
{- |
Module : ./CommonLogic/ClTests.hs
Description : Parser of common logic interface format
Copyright : (c) Karl Luc, DFKI Bremen 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
-- Tests and examples of Common Logic AS and CLIF parse
-}
module CommonLogic.ClTests where
import CommonLogic.AS_CommonLogic
import CommonLogic.Parse_CLIF
import Common.Doc as Doc
import Common.Id as Id
import Text.ParserCombinators.Parsec
-- examples for abstract syntax
a :: NAME
a = Token "x" nullRange
b :: NAME
b = Token "y" nullRange
t1 :: TERM
t1 = Name_term a
t2 :: TERM
t2 = Name_term b
t3 :: TERM
t3 = Name_term (Token "P" nullRange)
t4 :: TERM
t4 = Name_term (Token "Q" nullRange)
ts1 :: TERM_SEQ
ts1 = Term_seq t1
b1 :: BOOL_SENT
b1 = Conjunction [s1, sa2]
b2 :: BOOL_SENT
b2 = Negation s1
b3 :: BOOL_SENT
b3 = Implication s1 s1
s1 :: SENTENCE
s1 = Atom_sent at1 nullRange
sa2 :: SENTENCE
sa2 = Atom_sent at2 nullRange
at1 :: ATOM
at1 = Atom t3 [Term_seq t1]
at2 :: ATOM
at2 = Atom t4 [Term_seq t2]
s2 :: SENTENCE
s2 = Bool_sent b1 nullRange
s3 :: SENTENCE
s3 = Bool_sent (Negation s1) nullRange
s4 :: SENTENCE
s4 = Bool_sent (Disjunction [s1, sa2]) nullRange
ct :: TERM
ct = Name_term (Token "Cat" nullRange)
{-
bs1 :: BINDING_SEQ
bs1 = B_name a nullRange
bs2 :: BINDING_SEQ
bs2 = B_name b nullRange
-}
-- examples for pretty printing
test :: Doc
test = Doc.text "Atom:" <+> printAtom at1
$+$ Doc.text "Atom_sent:" <+> printSentence s1
$+$ Doc.text "Bool_sent:" <+> printSentence s2
$+$ Doc.text "Bool_sent:" <+> printSentence s4
$+$ Doc.text "Bool_sent:" <+> printSentence s3
$+$ Doc.text "Bool_sent:"
<+> printSentence (Bool_sent (Implication s1 sa2) nullRange)
$+$ Doc.text "Bool_sent:"
<+> printSentence (Bool_sent (Biconditional s1 sa2) nullRange)
$+$ Doc.text "Quant_sent:" <+> printSentence
(Quant_sent (Existential [] s1) nullRange)
$+$ Doc.text "Quant_sent:" <+> printSentence
(Quant_sent (Universal [] s1) nullRange)
$+$ Doc.text "Equation:" <+> printAtom (Equation t1 t1)
$+$ Doc.text "Functional Term:" <+> printTerm (Funct_term t1 [ts1] nullRange)
$+$ Doc.text "Sentence Functional:" <+> printSentence (
Atom_sent (Atom (Funct_term t1 [ts1] nullRange)
[Term_seq t1]) nullRange)
-- examples for CLIF parser
p1 = parseTest sentence "(P x)"
p2 = parseTest sentence "(and (P x) (Q y))"
p3 = parseTest sentence "(or (Cat x) (Mat y))"
p4 = parseTest sentence "(not (On x y))"
p5 = parseTest sentence "(if (P x) (Q x))"
p6 = parseTest sentence "(exists (z) (and (Pet x) (Happy z) (Attr x z)))"
-- helper functions for testing sublogics
-- | parses the given string
abstrSyntax :: String -> Either ParseError TEXT_META
abstrSyntax = parse CommonLogic.Parse_CLIF.cltext ""
cParse p = parse p ""
| spechub/Hets | CommonLogic/ClTests.hs | gpl-2.0 | 2,928 | 0 | 29 | 586 | 779 | 400 | 379 | 70 | 1 |
module AST.Node where
-- import AST.Type
--
import AST.Type.Vector
import AST.Type.Scalar
import AST.Function.Operator
import AST.Function.Lambda
import AST.Function.Basic
-- We could view all the AST nodes as a set of 3 subsets:
-- 1. Subset of Term, which consists the node of Array, Vector and Scalar.
-- 2. Subset of Function (or Func), which consists the node of built-in
-- functions.
-- 3. Subset of Expr, which is the upper level beyond Term and Func.
data ASTTerm = ASTVector Vector
| ASTScalar Scalar
deriving (Show)
data ASTFunc = ASTOperator Operator
| ASTBasic Basic
deriving (Show)
data ASTLambda =
ASTLambda {
lambdaBody :: LambdaASTExpr
} deriving (Show)
data ASTExpr = ASTTermExpr ASTTerm
| ASTFuncExpr ASTFunc [ASTExpr]
| ASTLambdaExpr ASTLambda
deriving (Show)
data TermType = TermScalarType ScalarType
| TermVectorType VectorType
deriving (Show, Eq)
type TermName = String
-- type system
typeOfTerm :: ASTTerm -> TermType
typeOfTerm (ASTScalar scalar) = TermScalarType $ typeOfScalar scalar
typeOfTerm (ASTVector vector) = TermVectorType $ typeOfVector vector
| VELVETDETH/MiniAcc | AST/Node.hs | apache-2.0 | 1,297 | 0 | 8 | 375 | 221 | 130 | 91 | 27 | 1 |
{-# LANGUAGE Safe #-}
-----------------------------------------------------------------------------
-- |
-- Module : Text.Parsec.ByteString.Lazy
-- Copyright : (c) Paolo Martini 2007
-- License : BSD-style (see the LICENSE file)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Convinience definitions for working with lazy 'C.ByteString's.
--
-----------------------------------------------------------------------------
module Text.Parsec.ByteString.Lazy
( Parser, GenParser, parseFromFile
) where
import Text.Parsec.Error
import Text.Parsec.Prim
import qualified Data.ByteString.Lazy.Char8 as C
type Parser = Parsec C.ByteString ()
type GenParser t st = Parsec C.ByteString st
-- | @parseFromFile p filePath@ runs a lazy bytestring parser @p@ on the
-- input read from @filePath@ using 'ByteString.Lazy.Char8.readFile'. Returns either a 'ParseError'
-- ('Left') or a value of type @a@ ('Right').
--
-- > main = do{ result <- parseFromFile numbers "digits.txt"
-- > ; case result of
-- > Left err -> print err
-- > Right xs -> print (sum xs)
-- > }
parseFromFile :: Parser a -> FilePath -> IO (Either ParseError a)
parseFromFile p fname
= do input <- C.readFile fname
return (runP p () fname input)
| aslatter/parsec | src/Text/Parsec/ByteString/Lazy.hs | bsd-2-clause | 1,369 | 0 | 10 | 289 | 163 | 101 | 62 | 12 | 1 |
{-# OPTIONS -fglasgow-exts #-}
module HJS.Interpreter.VarArgs where
class BuildList a r | r-> a where
build' :: [a] -> a -> r
instance Show a => BuildList a [String] where
build' l x = reverse $ (show x):(map show l)
instance BuildList a b => BuildList a (a->b) where
build' l x y = build' (x:l) y
--argsList x = build' [] x
| disnet/jscheck | src/HJS/Interpreter/VarArgs.hs | bsd-3-clause | 357 | 0 | 9 | 95 | 144 | 76 | 68 | -1 | -1 |
-- From http://lpaste.net/81623, courtesy of Albert Y. C. Lai
main = if True
then print 12
else print 42
| mpickering/ghc-exactprint | tests/examples/ghc710/IfThenElse2.hs | bsd-3-clause | 106 | 0 | 6 | 20 | 21 | 11 | 10 | 3 | 2 |
-- | Various JSON-related classes and helper functions not provided by Aeson
module Unison.JSON where
import Data.Aeson (ToJSON(..), FromJSON(..))
import Data.Text (Text)
import Data.Vector ((!?))
import qualified Data.Aeson as J
import qualified Data.Aeson.Types as Aeson
import qualified Data.Vector as Vector
class ToJSON1 f where
toJSON1 :: ToJSON a => f a -> Aeson.Value
class FromJSON1 f where
parseJSON1 :: FromJSON a => Aeson.Value -> Aeson.Parser (f a)
text :: Text -> Aeson.Value
text t = toJSON t
array :: [Aeson.Value] -> Aeson.Value
array = Aeson.Array . Vector.fromList
-- | Run the parser on the nth (0-based) subtree, assuming the input is an array
at :: Int -> (Aeson.Value -> Aeson.Parser a) -> Aeson.Value -> Aeson.Parser a
at ind parse j = J.withArray "at" k j where
k vs = maybe z parse (vs !? ind) where z = fail ("invalid index: " ++ show ind)
-- | Run the parser on the 0th subtree, assuming the input is an array
at0 :: (Aeson.Value -> Aeson.Parser a) -> Aeson.Value -> Aeson.Parser a
at0 = at 0
| nightscape/platform | shared/src/Unison/JSON.hs | mit | 1,035 | 0 | 12 | 189 | 341 | 185 | 156 | 20 | 1 |
import Test.Hspec
t :: [String] -> [String]
b :: [String] -> [String]
h :: [String] -> Bool
z :: String -> Bool
d :: String -> String
(%) :: a -> [a] -> [a]
i :: String -> Bool
l :: String -> String
a :: (a -> b) -> (a -> b) -> [a] -> [b]
m :: [a] -> [a]
-- Not posting this because it is a lot larger than the existing JS solution
-- and assumes that the input is rectangular (all lines are the same length).
-- START COUNTING
m(x:y)=init y
a b m(x:y)=b x:map m(init y)++[b$last y]
l=a(\_->'+')(\_->'-')
i s=s==l s
d%y=d:y++[d]
d s='|':s++"|"
z s=(d.m)s==s
h=and.a i z
b[]=t:[t]where t=l"xx"
b y=(l$d$head y)%map d y
t y|h y=((map m).m)y|1<2=b y
-- END COUNTING
main = hspec $ do
describe "Toggle box" $ do
it "Passes test cases" $ do
t ["Hello, World!"] `shouldBe`
["+-------------+",
"|Hello, World!|",
"+-------------+"]
t ["Hello, ",
" World!"] `shouldBe`
["+----------+",
"|Hello, |",
"| World!|",
"+----------+"]
t ["+--------+",
"| |",
" --------+"] `shouldBe`
["+----------+",
"|+--------+|",
"|| ||",
"| --------+|",
"+----------+"]
t ["++",
"++"] `shouldBe` []
t ["+----+",
"+----+"] `shouldBe` []
t ["++",
"||",
"||",
"++"] `shouldBe` ["", ""]
t ["+-------+",
"| Hello |",
"+ ------+"] `shouldBe`
["+---------+",
"|+-------+|",
"|| Hello ||",
"|+ ------+|",
"+---------+"]
t [" +-------+",
"a| Hello |",
" +-------+"] `shouldBe`
["+----------+",
"| +-------+|",
"|a| Hello ||",
"| +-------+|",
"+----------+"]
| clupascu/codegolf | 135541-toggle-the-box/toggleBox_golfed.hs | mit | 1,885 | 0 | 16 | 695 | 704 | 384 | 320 | 67 | 1 |
-- An abstraction layer for shell-related tasks.
{-
Copyright 2012, 2013, 2014 Colin Woodbury <[email protected]>
This file is part of Aura.
Aura is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Aura is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Aura. If not, see <http://www.gnu.org/licenses/>.
-}
{- CITATION
`Escape Codes` section is directly borrowed from:
library: ansi-terminal
author: Max Bolingbroke
contact: <[email protected]>
-}
module Shell where
-- System Libraries
import Control.Exception (catchJust)
import System.FilePath ((</>))
import System.Process (readProcess, readProcessWithExitCode, rawSystem)
import Control.Monad (void)
import Data.Maybe (fromMaybe, fromJust)
import Data.List (intercalate)
import System.Directory ( getDirectoryContents
, setCurrentDirectory
, getCurrentDirectory
, removeFile
, renameFile
, copyFile )
import GHC.IO.Exception
---
----------------------
-- SYSTEM CALL ALIASES
----------------------
pwd :: IO String
pwd = getCurrentDirectory
rm :: FilePath -> IO ()
rm = removeFile
ls :: FilePath -> IO [FilePath]
ls = getDirectoryContents
-- Would this work?
-- drop 2 `fmap` getDirectoryContents
ls' :: FilePath -> IO [FilePath]
ls' p = noDots `fmap` ls p
where noDots = filter (`notElem` [".",".."])
-- | Returns every file's full file path.
ls'' :: FilePath -> IO [FilePath]
ls'' p = map (p </>) `fmap` ls' p
mv :: FilePath -> FilePath -> IO ()
mv f f' = catchJust unsupported (renameFile f f') (\_ -> cp f f' >> rm f)
where unsupported x@(IOError _ UnsupportedOperation _ _ _ _) = Just x
unsupported _ = Nothing
cd :: FilePath -> IO ()
cd = setCurrentDirectory
cp :: FilePath -> FilePath -> IO ()
cp = copyFile
chown :: String -> FilePath -> [String] -> IO ()
chown user path args = void $ quietShellCmd "chown" (args ++ [user,path])
---------------
-- ESCAPE CODES
---------------
-- Code borrowed from `ansi-terminal` library by Max Bolingbroke.
csi :: [Int] -> String -> String
csi args code = "\ESC[" ++ intercalate ";" (map show args) ++ code
cursorUpLineCode :: Int -> String
cursorUpLineCode n = csi [n] "F"
hideCursor :: IO ()
hideCursor = putStr hideCursorCode
showCursor :: IO ()
showCursor = putStr showCursorCode
hideCursorCode :: String
hideCursorCode = csi [] "?25l"
showCursorCode :: String
showCursorCode = csi [] "?25h"
---------------
-- SYSTEM CALLS
---------------
-- Calls a child process that suspends the current one and takes over.
shellCmd :: String -> [String] -> IO ExitCode
shellCmd = rawSystem
-- Suppresses output, but returns it on completion.
quietShellCmd :: String -> [String] -> IO String
quietShellCmd cmd args = readProcess cmd args ""
-- Return type is slightly more verbose than `quietShellCmd`.
quietShellCmd' :: String -> [String] -> IO (ExitCode,String,String)
quietShellCmd' cmd args = readProcessWithExitCode cmd args ""
-------------
-- EXIT CODES
-------------
didProcessSucceed :: ExitCode -> Bool
didProcessSucceed ExitSuccess = True
didProcessSucceed _ = False
didProcessFail :: ExitCode -> Bool
didProcessFail = not . didProcessSucceed
------------------------
-- ENVIRONMENT VARIABLES
------------------------
type Environment = [(String,String)]
getEnvVar :: String -> Environment -> Maybe String
getEnvVar = lookup
varExists :: String -> Environment -> Bool
varExists v env = case getEnvVar v env of
Just _ -> True
Nothing -> False
-- As of `sudo 1.8.6`, the USER variable disappears when using `sudo`.
getUser :: Environment -> Maybe String
getUser = getEnvVar "USER"
-- I live on the edge.
getUser' :: Environment -> String
getUser' = fromJust . getUser
-- This variable won't exist if the current program wasn't run with `sudo`.
getSudoUser :: Environment -> Maybe String
getSudoUser = getEnvVar "SUDO_USER"
getSudoUser' :: Environment -> String
getSudoUser' = fromJust . getSudoUser
-- Is the user root, or using sudo?
hasRootPriv :: Environment -> Bool
hasRootPriv env = varExists "SUDO_USER" env || isTrueRoot env
isTrueRoot :: Environment -> Bool
isTrueRoot env = varExists "USER" env &&
getUser' env == "root" &&
not (varExists "SUDO_USER" env)
isntTrueRoot :: Environment -> Bool
isntTrueRoot = not . isTrueRoot
-- This will get the true user name regardless of sudo-ing.
getTrueUser :: Environment -> String
getTrueUser env | isTrueRoot env = "root"
| hasRootPriv env = getSudoUser' env
| otherwise = getUser' env
getEditor :: Environment -> String
getEditor env = fromMaybe "vi" $ getEnvVar "EDITOR" env
-- This will get the LANG variable from the environment
getLangVar :: Environment -> String
getLangVar env = fromMaybe "C" $ getEnvVar "LANG" env
| joehillen/aura | src/Shell.hs | gpl-3.0 | 5,349 | 0 | 10 | 1,113 | 1,143 | 617 | 526 | 89 | 2 |
{-# language OverloadedLists #-}
{-# language OverloadedStrings #-}
{-# language QuasiQuotes #-}
{-# language TypeApplications #-}
{-# language TypeFamilies #-}
module Planetary.Library.FrankExamples.Test (unitTests) where
import Control.Exception (Exception, throw)
import Control.Lens
import Control.Monad.Except
import Data.Text (Text)
import Data.Text.Prettyprint.Doc
import Data.Typeable (Typeable)
import NeatInterpolation
import Network.IPLD (toIpld)
import EasyTest hiding (run)
import Planetary.Core
import qualified Planetary.Library.FrankExamples as Frank
import Planetary.Core.Eval.Test (runTest)
import Planetary.Library
import Planetary.Library.HaskellForeign (mkForeignTm, intOpsId, haskellOracles)
import Planetary.Support.Ids
import Planetary.Support.NameResolution
import Planetary.Support.Parser
data NotEqual = NotEqual TmI TmI
deriving (Show, Typeable)
instance Exception NotEqual
unitTests :: Test ()
unitTests = do
Right testingDecls <- pure $ resolveDecls [ ("Unit", unitId) ] $
fst $ forceDeclarations [text|
interface TestingOps A B =
| checkEqual : A -> B -> <Unit>
|]
Just (checkingOpsId, _) <- pure $ namedInterface "TestingOps" testingDecls
let
unit = DataConstructor unitId 0 []
-- This should check that the two terms are equal. If so it just exits
-- with unit, otherwise it throws (to easytest).
checkEqualImpl :: Handler
checkEqualImpl st
| AppN _ [tm1, tm2] <- st ^. evalFocus =
if tm1 == tm2
then pure $ st & evalFocus .~ Value unit
else throw $ NotEqual tm1 tm2
checkEqualImpl _ = throwError (FailedForeignFun "checkEqualImpl")
testingHandlers :: AmbientHandlers
testingHandlers = AmbientHandlers $
haskellOracles <>
[ (checkingOpsId, [ checkEqualImpl ]) ]
checkEqual = Command checkingOpsId 0
scope "frank examples" $ tests
[ scope "catch" $ tests []
, scope "pipe" $ tests []
, scope "spacer" $ tests []
, scope "state" $ tests []
, scope "next" $ tests
[ do
-- Just (listfId, _) <- pure $ namedData "ListF" Frank.resolvedDecls
-- Just (pairId, _) <- pure $ namedData "Pair" Frank.resolvedDecls
Just stateCid <- pure $
Frank.resolvedDecls ^? globalCids . ix "State"
let
test = fst $ forceTm [text|
letrec
-- note: not `forall S X. {S -> <State S>X -> X}`
state : forall S X. {S -> {X} -> X}
= \s x -> handle x! : X with
State:
| <get -> k> -> state s (\-> k s)
| <put s -> k> -> state s (\-> k <Unit.0>)
| y -> y
-- TODO make List typecheck. IE Not ListF
map : forall X Y. {{X -> Y} -> <List X> -> <List Y>}
= \f lst -> case lst of
| <nil> -> <ListF.0>
| <cons x xs> -> <ListF.1 (f x) (map f xs)>
fst : forall X Y. {X -> Y -> X}
= \x y -> x
next : forall. {[<State <Int>>] <Int>}
= \-> fst get! (put (add get! one))
index
: forall X. {<List X> -> <List <Pair <Int> X>>}
= \xs -> state zero (\-> map (\x -> <Pair.0 next! x>) xs)
actual : forall. {[<State <Int>>] <Int>}
= \-> index abc!
abc : forall. {<List <Char>>}
= \-> cons a (cons b (cons c nil))
expected : forall. {[<State <Int>>] <Int>}
= \-> cons <Pair.0 zero a>
(cons <Pair.0 one b>
(cons <Pair.0 two c> nil))
in checkEqual actual! expected!
|]
add = Command intOpsId 0
get = Command stateCid 0
put = Command stateCid 1
(zero, zeroVal) = mkForeignTm @Int intId [] 0
(one, oneVal) = mkForeignTm @Int intId [] 1
(two, twoVal) = mkForeignTm @Int intId [] 2
(a, aVal) = mkForeignTm @Text textId [] "a"
(b, bVal) = mkForeignTm @Text textId [] "b"
(c, cVal) = mkForeignTm @Text textId [] "c"
-- pair a b = DataConstructor pairId 0 [a, b]
-- TODO:
-- * these definitions are copied from HaskellForeign.Test
-- * HaskellForeign.Test duplicates the ListF defn
-- lfixTm x = DataConstructor lfixId 0 [x]
-- lcons x xs = lfixTm (DataConstructor listfId 1 [x, xs])
-- lnil = lfixTm (DataConstructor listfId 0 [])
Just (listId, _) <- pure $ namedData "ListF" Frank.resolvedDecls
let store = storeOf $ toIpld <$>
[ zeroVal
, oneVal
, twoVal
, aVal
, bVal
, cVal
]
Right test' <- pure $ resolve test
let test'' = substituteAll
[ ("add", add)
, ("get", get)
, ("put", put)
, ("zero", zero)
, ("one", one)
, ("two", two)
, ("a", a)
, ("b", b)
, ("c", c)
, ("checkEqual", checkEqual)
, ("cons", Lambda ["x", "xs"] (DataConstructor listId 1 [V"x", V"xs"]))
, ("nil", DataConstructor listId 0 [])
]
test'
runTest "next one" testingHandlers store test'' (Right unit)
]
-- Example from "Continuation Passing Style for Effect Handlers"
-- - HillerstrΓΆm, Lindley, Atkey, Sivaramakrishnan
, do
let tm = fst $ forceTm [text|
letrec
ifthenelse : forall A. {<Bool> -> {A} -> {A} -> A}
= \b l r -> case b of
| <False> -> r!
| <True> -> l!
cons : forall A. {A -> <List A> -> <List A>}
= \a as -> <List.1 a as>
concat
: forall A. {<List A> -> <List A> -> <List A>}
= \xs ys -> case xs of
| <nil> -> ys
| <cons x xs> -> cons x (concat xs ys)
singletonList
: forall a. {A -> <List A>}
= \a -> <List.1 a <List.0>>
drunkToss
: forall. {[<Choose <Bool>>, <Abort>] Toss}
-- decide whether the coin is caught
= \-> ifthenelse Choose.0!
-- decide the face
(\-> ifthenelse Choose.0! (\-> <Toss.0>) (\-> <Toss.1>))
(\-> Abort.0!)
-- use a list to model nondeterminism
-- nondet : forall A. {{[<Choose <Bool>>, <Abort>] A} -> <List A>}
-- = \a -> handle a! : A with
-- Choose:
-- | <choose -> k> -> concat (k Bool.0) (k Bool.1)
-- Abort:
-- | <aborting -> k> -> emptyList
-- | x -> singletonList x
-- TODO:
-- * should p come first or last?
-- * can we use variables other than e yet?
-- * change declarations format:
-- - data and effects aren't really different?
-- - toplevel letrec is weird - instead make it implicit
-- * nits
-- - really would be nice to remove some angle brackets
-- - s/Bool.0/True
allChoices : forall A. {{[p, <Choose <Bool>>] A} -> [p] <List <A>>}
= \a -> handle a! : A with
Choose:
| <choose -> k> -> concat (k <Bool.0>) (k <Bool.1>)
| x -> singletonList x
failure : forall A. {{[p, <Abort>] A} -> [p] <List <A>>}
= \a -> handle a! : A with
Abort:
| <aborting -> k> -> <List.0>
| x -> singletonList x
-- should give []
composition1
: forall A. {{[<Choose <Bool>>, <Abort>] A} -> <List <List A>>}
= \a -> failure (\-> allChoices a)
-- should give [[Heads], [Tails], []]
composition2
: forall A. {{[<Choose <Bool>>, <Abort>] A} -> <List <List A>>}
= \a -> allChoices (\-> failure a)
actual1
: forall. {<List <List <Toss>>>}
= \-> composition1 drunkToss
expected1
: forall. {<List <List <Toss>>>}
= \-> <List.0>
actual2
: forall. {<List <List <Toss>>>}
= \-> composition2 drunkToss
-- [[Heads], [Tails], []]
-- = [Heads]:[Tails]:[]:Nil
-- = (Heads:Nil):(Tails:Nil):Nil:Nil
-- = (cons (cons Heads Nil) (cons (cons Tails Nil) (cons Nil Nil)))
expected2
: forall. {<List <List <Toss>>>}
= \-> cons
(cons Heads <List.0>)
(cons
(cons Tails <List.0>)
(cons <List.0> <List.0>))
in checkEqual actual1! expected1! -- TODO actual2! expected2!
|]
Right tm' <- pure $ resolve tm
let tm'' = substituteAll
[ ("checkEqual", checkEqual)
]
tm'
let store = emptyStore -- storeOf $ toIpld <$> []
runTest "drunk toss" testingHandlers store tm'' (Right unit)
]
| joelburget/interplanetary-computation | src/Planetary/Library/FrankExamples/Test.hs | bsd-3-clause | 9,963 | 0 | 25 | 4,247 | 1,101 | 601 | 500 | -1 | -1 |
module Main where
import System.Environment
import System.IO
import System.Process
import System.Locale
import System.Posix.Process
import System.Posix.Files
import System.Posix.Directory
import System.Exit
import qualified Text.Regex.PDeriv.ByteString.LeftToRightD as R
import qualified Data.ByteString.Char8 as S
import Data.Char
pat = S.pack "([0-9\\.]*) user"
isExitFailure :: ExitCode -> Bool
isExitFailure (ExitFailure _) = True
isExitFailure _ = False
timeExec :: FilePath -> R.Regex -> String -> String -> String -> IO ()
timeExec fp r exec pstr arg = do
{ (ec,stdin,stderr) <- readProcessWithExitCode "time" [exec,pstr,arg] []
; if isExitFailure ec
then do print "failed"
print (show (ec,stdin,stderr))
else do { print stdin
; case parse r (S.pack $ stripCR stderr) of
{ Just (t:_) -> let ln = (joinWith (S.pack "\t") ((S.pack arg) : [t])) `S.append` (S.pack "\n")
in S.appendFile fp ln
; _ -> print "not matched" }
; putStr stderr }
}
stripCR :: String -> String
stripCR [] = []
stripCR ('\n':xs) = stripCR xs
stripCR (x:xs) = x:(stripCR xs)
joinWith :: S.ByteString -> [S.ByteString] -> S.ByteString
joinWith _ [] = S.empty
joinWith _ [x] = x
joinWith d (x:xs) = x `S.append` d `S.append` (joinWith d xs)
parse compiled s = case R.regexec compiled s of
(Right (Just (_,_,_,l))) -> Just l
_ -> Nothing
main :: IO ()
main = do
{ (exec:pstr:logfile:rest) <- getArgs
; let compiled = case R.compile R.defaultCompOpt R.defaultExecOpt pat of
Left _ -> error " compilation failed . "
Right r -> r
params = parseArgs rest
-- ; S.writeFile logfile S.empty
; mapM_ (\x -> do { timeExec logfile compiled exec pstr x } ) params
}
where
parseArgs :: [String] -> [String]
parseArgs args =
case args of
[l, u, inc ] | all (\a -> all isDigit a) args ->
let
l' = read l
u' = read u
inc' = read inc
params :: [ String ]
params = map show [l', l'+inc' .. u' ]
in params
_ -> args
| awalterschulze/xhaskell-regex-deriv | benchmarks/Timer.hs | bsd-3-clause | 2,487 | 0 | 24 | 933 | 853 | 454 | 399 | 59 | 3 |
data Foo = Foo {foo :: (Maybe Foo)} | mpickering/hlint-refactor | tests/examples/Bracket21.hs | bsd-3-clause | 35 | 0 | 10 | 7 | 22 | 12 | 10 | 1 | 0 |
module Main(main) where
import Control.Exception
import Data.Char
import System.Directory
import System.FilePath as FP
import System.IO
import qualified Data.ByteString.Lazy as LBS
import DataConstructors
import E.Main
import E.Program
import E.Rules
import E.Type
import FrontEnd.Class
import Grin.Main(compileToGrin)
import Grin.Show(render)
import Ho.Build
import Ho.Collected
import Ho.Library
import Name.Names
import Options
import StringTable.Atom
import Support.TempDir
import Util.Gen
import Util.SetLike as S
import Util.Std
import Version.Version(versionSimple)
import qualified FlagDump as FD
import qualified Interactive
main = wrapMain $ do
hSetEncoding stdout utf8
hSetEncoding stderr utf8
when (dump FD.Atom) $ do
addAtExit dumpStringTableStats
addAtExit dumpToFile
let darg = progressM $ do
(argstring,_) <- getArgString
return (argstring ++ "\n" ++ versionSimple)
case optMode options of
BuildHl hl -> darg >> buildLibrary processInitialHo processDecls hl
ListLibraries -> listLibraries
ShowHo ho -> dumpHoFile ho
PurgeCache -> purgeCache
Preprocess -> forM_ (optArgs options) $ \fn -> do
lbs <- LBS.readFile fn
res <- preprocessHs options fn lbs
LBS.putStr res
_ -> darg >> processFiles (optArgs options)
-- we are very careful to only delete cache files.
purgeCache = do
Just hc <- findHoCache
ds <- getDirectoryContents hc
let cacheFile fn = case map toLower (reverse fn) of
'o':'h':'.':fs -> length fs == 26 && all isAlphaNum fs
_ -> False
forM_ ds $ \fn -> when (cacheFile fn) (removeFile (hc </> fn))
processFiles :: [String] -> IO ()
processFiles cs = f cs (optMainFunc options) where
f [] Nothing = do
int <- Interactive.isInteractive
when (not int) $ putErrDie "jhc: no input files"
g [Left mod_Prelude]
f [] (Just (b,m)) = do
m <- getModule (parseName Val m)
g [Left m]
f cs _ = g (map fileOrModule cs)
g fs = processCollectedHo . snd =<< parseFiles options [outputName]
fs processInitialHo processDecls
fileOrModule f = case reverse f of
('s':'h':'.':_) -> Right f
('s':'h':'l':'.':_) -> Right f
('c':'s':'h':'.':_) -> Right f
_ -> Left $ toModule f
processCollectedHo cho = do
if optStop options == CompileHo then return () else do
putProgressLn "Collected Compilation..."
when (dump FD.ClassSummary) $ do
putStrLn " ---- class summary ---- "
printClassSummary (choClassHierarchy cho)
when (dump FD.Class) $ do
putStrLn " ---- class hierarchy ---- "
printClassHierarchy (choClassHierarchy cho)
let dataTable = choDataTable cho
combinators = values $ choCombinators cho
evaluate dataTable
evaluate combinators
let prog = programUpdate program {
progCombinators = combinators,
progDataTable = dataTable
}
-- dump final version of various requested things
wdump FD.Datatable $ putErrLn (render $ showDataTable dataTable)
wdump FD.DatatableBuiltin $
putErrLn (render $ showDataTable samplePrimitiveDataTable)
dumpRules (Rules $ fromList
[(combIdent x,combRules x) | x <- combinators, not $ null (combRules x)])
-- enter interactive mode
int <- Interactive.isInteractive
if int then Interactive.interact cho else do
prog <- compileWholeProgram prog
compileToGrin prog
progressM c = wdump FD.Progress $ (c >>= putErrLn) >> hFlush stderr
| hvr/jhc | src/Main.hs | mit | 3,651 | 6 | 19 | 943 | 1,184 | 578 | 606 | -1 | -1 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[StgStats]{Gathers statistical information about programs}
The program gather statistics about
\begin{enumerate}
\item number of boxed cases
\item number of unboxed cases
\item number of let-no-escapes
\item number of non-updatable lets
\item number of updatable lets
\item number of applications
\item number of primitive applications
\item number of closures (does not include lets bound to constructors)
\item number of free variables in closures
%\item number of top-level functions
%\item number of top-level CAFs
\item number of constructors
\end{enumerate}
-}
{-# LANGUAGE CPP #-}
module StgStats ( showStgStats ) where
#include "HsVersions.h"
import StgSyn
import Id (Id)
import Panic
import Data.Map (Map)
import qualified Data.Map as Map
data CounterType
= Literals
| Applications
| ConstructorApps
| PrimitiveApps
| LetNoEscapes
| StgCases
| FreeVariables
| ConstructorBinds Bool{-True<=>top-level-}
| ReEntrantBinds Bool{-ditto-}
| SingleEntryBinds Bool{-ditto-}
| UpdatableBinds Bool{-ditto-}
deriving (Eq, Ord)
type Count = Int
type StatEnv = Map CounterType Count
emptySE :: StatEnv
emptySE = Map.empty
combineSE :: StatEnv -> StatEnv -> StatEnv
combineSE = Map.unionWith (+)
combineSEs :: [StatEnv] -> StatEnv
combineSEs = foldr combineSE emptySE
countOne :: CounterType -> StatEnv
countOne c = Map.singleton c 1
countN :: CounterType -> Int -> StatEnv
countN = Map.singleton
{-
************************************************************************
* *
\subsection{Top-level list of bindings (a ``program'')}
* *
************************************************************************
-}
showStgStats :: [StgBinding] -> String
showStgStats prog
= "STG Statistics:\n\n"
++ concat (map showc (Map.toList (gatherStgStats prog)))
where
showc (x,n) = (showString (s x) . shows n) "\n"
s Literals = "Literals "
s Applications = "Applications "
s ConstructorApps = "ConstructorApps "
s PrimitiveApps = "PrimitiveApps "
s LetNoEscapes = "LetNoEscapes "
s StgCases = "StgCases "
s FreeVariables = "FreeVariables "
s (ConstructorBinds True) = "ConstructorBinds_Top "
s (ReEntrantBinds True) = "ReEntrantBinds_Top "
s (SingleEntryBinds True) = "SingleEntryBinds_Top "
s (UpdatableBinds True) = "UpdatableBinds_Top "
s (ConstructorBinds _) = "ConstructorBinds_Nested "
s (ReEntrantBinds _) = "ReEntrantBindsBinds_Nested "
s (SingleEntryBinds _) = "SingleEntryBinds_Nested "
s (UpdatableBinds _) = "UpdatableBinds_Nested "
gatherStgStats :: [StgBinding] -> StatEnv
gatherStgStats binds
= combineSEs (map (statBinding True{-top-level-}) binds)
{-
************************************************************************
* *
\subsection{Bindings}
* *
************************************************************************
-}
statBinding :: Bool -- True <=> top-level; False <=> nested
-> StgBinding
-> StatEnv
statBinding top (StgNonRec b rhs)
= statRhs top (b, rhs)
statBinding top (StgRec pairs)
= combineSEs (map (statRhs top) pairs)
statRhs :: Bool -> (Id, StgRhs) -> StatEnv
statRhs top (_, StgRhsCon _ _ _)
= countOne (ConstructorBinds top)
statRhs top (_, StgRhsClosure _ _ fv u _ body)
= statExpr body `combineSE`
countN FreeVariables (length fv) `combineSE`
countOne (
case u of
ReEntrant -> ReEntrantBinds top
Updatable -> UpdatableBinds top
SingleEntry -> SingleEntryBinds top
)
{-
************************************************************************
* *
\subsection{Expressions}
* *
************************************************************************
-}
statExpr :: StgExpr -> StatEnv
statExpr (StgApp _ _) = countOne Applications
statExpr (StgLit _) = countOne Literals
statExpr (StgConApp _ _ _)= countOne ConstructorApps
statExpr (StgOpApp _ _ _) = countOne PrimitiveApps
statExpr (StgTick _ e) = statExpr e
statExpr (StgLetNoEscape binds body)
= statBinding False{-not top-level-} binds `combineSE`
statExpr body `combineSE`
countOne LetNoEscapes
statExpr (StgLet binds body)
= statBinding False{-not top-level-} binds `combineSE`
statExpr body
statExpr (StgCase expr _ _ alts)
= statExpr expr `combineSE`
stat_alts alts `combineSE`
countOne StgCases
where
stat_alts alts
= combineSEs (map statExpr [ e | (_,_,e) <- alts ])
statExpr (StgLam {}) = panic "statExpr StgLam"
| olsner/ghc | compiler/simplStg/StgStats.hs | bsd-3-clause | 5,279 | 0 | 13 | 1,558 | 1,016 | 537 | 479 | 93 | 15 |
{-# LANGUAGE TypeFamilies, MultiParamTypeClasses, FlexibleInstances #-}
module ShouldFail where
import Data.Kind
class C7 a b where
data S7 b :: Type
instance C7 Char (a, Bool) where
data S7 (a, Bool) = S7_1
-- Fails because the arg to S7 should be the
-- same as that to C7
instance C7 Char (a, Int) where
data S7 (b, Int) = S7_2
| sdiehl/ghc | testsuite/tests/indexed-types/should_fail/SimpleFail9.hs | bsd-3-clause | 342 | 0 | 7 | 72 | 96 | 55 | 41 | -1 | -1 |
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeFamilies #-}
module T15592b where
import Data.Kind
class C a where
type T (x :: (f :: k -> Type) a)
| sdiehl/ghc | testsuite/tests/polykinds/T15592b.hs | bsd-3-clause | 148 | 0 | 11 | 31 | 43 | 26 | 17 | -1 | -1 |
module Fixme () where
data F a b c = F (Int -> b -> c)
{- data F a b c = F (x::(Int -> b -> c)) @-}
{-@ bar :: F {v:Int| v >= 0} b c @-}
bar :: F Int b c
bar = undefined
{-@ foo :: F {v:Int| v >= 0} b c -> Int @-}
foo :: F Int b c -> Int
foo = undefined
{-@ hoo :: Int @-}
hoo = foo bar
| mightymoose/liquidhaskell | tests/pos/unusedtyvars.hs | bsd-3-clause | 294 | 0 | 9 | 90 | 80 | 47 | 33 | 7 | 1 |
{-# LANGUAGE QuasiQuotes #-}
module Test where
import QQ
f' = f . (+ 1)
[pq| foo |] -- Expands to f :: Int -> Int
f x = x + 1
| ezyang/ghc | testsuite/tests/quasiquotation/qq009/Test.hs | bsd-3-clause | 137 | 0 | 6 | 43 | 41 | 26 | 15 | 6 | 1 |
-- !!! do & where interaction
module ShouldCompile where
f1 :: IO a -> IO [a]
f1 x = do
v <- x
return [v]
where
g x = [x,x]
f2 :: IO a -> IO [a]
f2 x = do
v <- x
return (g v)
where
g x = [x,x]
f3 :: IO a -> IO [a]
f3 x = do
v <- x
return (g v)
where
g x = [x,x]
| urbanslug/ghc | testsuite/tests/parser/should_compile/read011.hs | bsd-3-clause | 293 | 0 | 9 | 105 | 185 | 93 | 92 | 16 | 1 |
module Main where
data X = X ()
{-# NOINLINE newX #-}
newX :: () -> IO X
newX n = do
let {-# NOINLINE value #-}
value = n
return (X value)
main = do
x <- newX (error "Why?")
case x of
X _ -> return ()
| siddhanathan/ghc | testsuite/tests/stranal/should_run/T2756b.hs | bsd-3-clause | 236 | 0 | 11 | 84 | 102 | 50 | 52 | 12 | 1 |
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE OverloadedStrings #-}
module Shed.BlobServer where
import qualified Crypto.Hash.SHA1 as SHA1
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Builder as Builder
import qualified Data.ByteString.Lazy as BL
import Data.Maybe (isJust)
import Data.Monoid
import qualified Data.Text.Encoding as T
import Shed.Types
getBlobName :: ByteString -> IO SHA1
getBlobName dat = do
let digest = SHA1.hash dat
let chars = T.decodeUtf8 $ BL.toStrict $ Builder.toLazyByteString $ Builder.byteStringHex digest
return (SHA1 $ "sha1-" <> chars)
class BlobServer a where
statBlob :: a -> ByteString -> IO Bool
statBlob store dat = do
sha <- getBlobName dat
isJust <$> readBlob store sha
writeBlob :: a -> ByteString -> IO SHA1
readBlob :: a -> SHA1 -> IO (Maybe BL.ByteString)
enumerateBlobs :: a -> (SHA1 -> BL.ByteString -> IO ()) -> IO ()
data SomeBlobServer = forall s. BlobServer s => SomeBlobServer s
instance BlobServer SomeBlobServer where
writeBlob (SomeBlobServer s) = writeBlob s
readBlob (SomeBlobServer s) = readBlob s
enumerateBlobs (SomeBlobServer s) = enumerateBlobs s
| dbp/shed | src/Shed/BlobServer.hs | isc | 1,307 | 0 | 13 | 294 | 378 | 200 | 178 | 30 | 1 |
zip2 :: [a] -> [b] -> [(a, b)]
zip2 _ [] = []
zip2 [] _ = []
zip2 (x:xs) (y:ys) = (x, y): zip2 xs ys
| v0lkan/learning-haskell | session-003/024-zip-recursive.hs | mit | 101 | 0 | 8 | 27 | 96 | 52 | 44 | 4 | 1 |
module Main where
import LI11718
import qualified Tarefa4_2017li1g180 as T4
import System.Environment
import Text.Read
main = do
args <- getArgs
case args of
["atualiza"] -> do
str <- getContents
let params = readMaybe str
case params of
Nothing -> error "parΓ’metros invΓ‘lidos"
Just (tempo,jogo,jogador,acao) -> print $ T4.atualiza tempo jogo jogador acao
["testes"] -> print $ take 100 $ T4.testesT4
otherwise -> error "RunT4 argumentos invΓ‘lidos" | hpacheco/HAAP | examples/plab/svn/2017li1g180/src/RunT4.hs | mit | 554 | 0 | 17 | 168 | 153 | 79 | 74 | 16 | 4 |
{-# LANGUAGE QuasiQuotes #-}
module View.Doors where
import Model
import View.Helpers
import View.Layout
data DoorView = DoorView {
door :: Entity Door
}
doorsListView :: [DoorView] -> Html
doorsListView doors = layout [shamlet|
<a href="/doors/new">Add a Door
<ul>
$forall view <- doors
$with Entity key d <- door view
<li>
#{doorName d}
<a href="/doors/#{key}/edit">Edit
|]
doorsNewView :: View Text -> Html
doorsNewView view = layout [shamlet|
<form action="/doors" method="POST">
^{doorsFields view}
<input type="submit" value="save">
|]
doorsEditView :: Entity Door -> View Text -> Html
doorsEditView (Entity id _) view = layout [shamlet|
<form action="/doors/#{id}" method="POST">
^{doorsFields view}
<input type="submit" value="save">
|]
doorsFields :: View Text -> Html
doorsFields view = [shamlet|
^{textField "name" "Door Name" view}
^{textField "hardwareAddress" "Hardware Address" view}
|]
| flipstone/glados | src/View/Doors.hs | mit | 979 | 0 | 9 | 199 | 160 | 91 | 69 | 15 | 1 |
import System.IO
sumofsquares :: Int -> Bool
sumofsquares 0 = False
sumofsquares x = if length (triangles x) > 0 then True else False
triangles :: Int -> [(Int, Int, Int)]
triangles x = [(a,b,x) | c <-[x] , b <- [1..c], a <- [1..b] , a^2 + b^2 == c]
main = do {
n <- readLn :: IO Int;
putStrLn . show $ sumofsquares n
}
| divayprakash/haskell-course | onlineTest2_problem5.hs | mit | 331 | 0 | 10 | 79 | 185 | 100 | 85 | 9 | 2 |
main = print (foldr1 lcm [1..20]) | emilkloeden/21-days-of-Euler | Haskell/5.hs | mit | 33 | 0 | 8 | 5 | 22 | 11 | 11 | 1 | 1 |
import System.Environment (getArgs)
firstwords :: String -> String
firstwords input = unlines fwords
where
fwords = map fw $ lines input
fw [] = []
fw l = head $ words l
interactWith function inputFile outputFile = do
input <- readFile inputFile
writeFile outputFile (function input)
main = mainWith myFunction
where mainWith function = do
args <- getArgs
case args of
[input,output] -> interactWith function input output
_ -> putStrLn "error: exactly two arguments needed"
-- replace "id" with the name of our function below
myFunction = firstwords
| lpenz/realworldhaskell-exercises | ch04/firstword.hs | mit | 649 | 1 | 11 | 187 | 179 | 86 | 93 | 16 | 2 |
module Gshell.Unionfs ( unmountWorkspace
, createWorkspace
, unmountWorkspaces
, generateBranch
) where
import Gshell.Names
import Gshell.State
import Utility.Debug
import System.Exit
import System.Process
import Control.Lens
import Control.Monad
import Control.Monad.Trans.Class
import Control.Monad.Trans.Either
import Control.Monad.Trans.State
import Data.Either
import System.Unix.Mount
import System.Directory.Tree
import System.FilePath
import System.FilePath.Posix
import Data.List
unionfs :: FilePath
unionfs = "unionfs"
ufoptions :: FilePath -> FilePath -> [String]
ufoptions path fullWorkDirName = ["-ocow", "-orelaxed_permissions", "-odebug_file=" ++ unionfsLogFile path fullWorkDirName]
fusermount :: FilePath
fusermount = "fusermount"
fuuoptions :: [String]
fuuoptions = ["-uz"]
runWithExitCodeMessage :: FilePath -> [String] -> IO Result
runWithExitCodeMessage proc options = do
(exitCode, stdo, stde) <- readProcessWithExitCode proc options ""
case exitCode of
ExitSuccess -> return $ Right $
ResultInfo $ [show proc ++ " " ++ (last options)]
ExitFailure i -> return $ Left $
show proc ++ " exit code: " ++ show i
++ "\nstdout: " ++ stdo
++ "\nstderr: " ++ stde
unmountWorkspaces :: GState -> IO Result
unmountWorkspaces state = do
let path = projectPath state
result <- mapM (unmountWorkspace . (path </>)) $ state ^.. workDirs._name
if (null $ result ^..below _Right)
then return $ Left $ concat $ intersperse ", " $ lefts result
else return $ Right $ ResultInfo $ concatMap (\(ResultInfo a) -> a) $ rights result
unmountWorkspace :: FilePath -> IO Result
unmountWorkspace toUmount = runEitherT $ do
mounted <- lift $ isMountPoint toUmount
result <- if mounted
then lift $ unmountWorkspace' $ toUmount
else return $ Right $ ResultInfo $ [toUmount ++ " is not mounted"]
case result of
Right b -> return b
Left b -> left b
unmountWorkspace' :: FilePath -> IO Result
unmountWorkspace' workspace = do
let options = fuuoptions ++ [workspace]
runWithExitCodeMessage fusermount options
createWorkspace :: FilePath -> [FilePath] -> GState -> IO Result
createWorkspace workingDir folders state = do
let rootDir = projectPath state
createWorkspace' rootDir folders workingDir
generateBranch :: [String] -> GState -> [FilePath]
generateBranch revs state = result
where
result = sortBy (cmpRevs state) $ nub $ concatMap (generateBranch' state) revs
generateBranch' :: GState -> String -> [FilePath]
generateBranch' state rev = rev : otherParents
where
allParents = (read $ state ^. parents rev) ^. parentsRevs
otherParents = if null allParents then [] else concatMap (generateBranch' state) allParents
makeDirs :: [FilePath] -> [String]
makeDirs folders = [intercalate ":" $ head' ++ tail']
where
head' = [last folders ++ "=RW"]
tail' = map (++ "=RO") (reverse $ init folders)
createWorkspace' :: FilePath -> [FilePath] -> FilePath -> IO Result
createWorkspace' rootDir folders workspace = do
let folders' = makeDirs $ map (flip (</>) mountDirName . (commitsDir rootDir </>)) $ folders
let options = ufoptions rootDir (takeFileName workspace) ++ folders' ++ [workspace]
runWithExitCodeMessage unionfs options
| ctlab/gShell | src/Gshell/Unionfs.hs | mit | 3,612 | 0 | 17 | 940 | 1,032 | 536 | 496 | -1 | -1 |
module Shexkell.Text.JSON.Control (
ObjectParser
, parseObject
, valueOf
, valueOpt
) where
import Control.Monad.Reader
import Data.Aeson
import Data.Aeson.Types
import qualified Data.Text as T
-- | Parsing in the context of a JSON object
type ObjectParser a = ReaderT Object Parser a
parseObject :: FromJSON a => ObjectParser a -> Object -> Parser a
parseObject = runReaderT
-- | In the context of an object, obtains the value of a field by a
-- specified key
valueOf :: FromJSON a => T.Text -> ObjectParser a
valueOf = getValueWith (.:)
-- | In the context of an object, obtains the value of a field by a
-- specified key. If the field is not present, returns Nothing
valueOpt :: FromJSON a => T.Text -> ObjectParser (Maybe a)
valueOpt = getValueWith (.:?)
getValueWith ::
(Object -> T.Text -> Parser a) -- ^ Function that, given an object and a key, returns the value
-> T.Text -- ^ Key
-> ObjectParser a
getValueWith f key = ask >>= lift . (`f` key) | weso/shexkell | src/Shexkell/Text/JSON/Control.hs | mit | 1,012 | 0 | 9 | 226 | 225 | 128 | 97 | 21 | 1 |
#!/usr/bin/runhaskell
myDrop n xs =
if n <= 0 || null xs
then xs
else myDrop (n - 1) (tail xs)
| borisjoffe/haskell | myDrop.hs | mit | 99 | 6 | 8 | 25 | 55 | 28 | 27 | 4 | 2 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Nunavut.Propogation where
import Control.Lens (Lens', lens)
import Control.Monad.Trans.RWS (RWST)
import Data.Monoid (Monoid, mappend, mempty)
import Numeric.LinearAlgebra (Matrix, fromList, outer, toList)
import Nunavut.ErrorFunction (ErrorFunction)
import Nunavut.Newtypes (HasMtx (..), HasVec (..))
import Nunavut.Signals (ErrorSignal (..), Signal (..))
{--------------------------------------------------------------------------
- Types -
--------------------------------------------------------------------------}
data PropConfig = PConfig {
_learningRate :: Double,
_batchSize :: Int,
_errFunc :: ErrorFunction
} deriving (Show)
data PropData = PData {
_preWeights :: [Signal],
_preActivated :: [Signal]
} deriving (Show, Eq)
data PropDatum = PDatum {
_preWeights1 :: Signal,
_preActivated1 :: Signal
} deriving (Show, Eq)
newtype Update = Update { unUpdate :: Matrix Double }
deriving (Show, Eq)
newtype Updates = Updates { unUpdates :: [Update] }
deriving (Show)
type PropResult m = RWST () PropData () m Signal
type BackpropResult m = RWST PropConfig Updates ([Update], PropData) m ErrorSignal
type Propogation a m = a -> Signal -> PropResult m
type Backpropogation a m = a -> ErrorSignal -> BackpropResult m
{--------------------------------------------------------------------------
- Lenses -
--------------------------------------------------------------------------}
learningRate :: Lens' PropConfig Double
learningRate = lens _learningRate (\c r -> c { _learningRate = r })
batchSize :: Lens' PropConfig Int
batchSize = lens _batchSize (\c r -> c { _batchSize = r })
errFunc :: Lens' PropConfig ErrorFunction
errFunc = lens _errFunc (\c r -> c { _errFunc = r })
preWeights :: Lens' PropData [Signal]
preWeights = lens _preWeights (\p s -> p { _preWeights = s })
preActivated :: Lens' PropData [Signal]
preActivated = lens _preActivated (\p s -> p { _preActivated = s })
preWeights1 :: Lens' PropDatum Signal
preWeights1 = lens _preWeights1 (\p s -> p { _preWeights1 = s })
preActivated1 :: Lens' PropDatum Signal
preActivated1 = lens _preActivated1 (\p s -> p { _preActivated1 = s })
{--------------------------------------------------------------------------
- Constructors -
--------------------------------------------------------------------------}
mkUpdate :: Matrix Double -> Update
mkUpdate = Update
{--------------------------------------------------------------------------
- Instances -
--------------------------------------------------------------------------}
instance Monoid PropData where
mempty = PData mempty mempty
mappend (PData w1 a1) (PData w2 a2) = PData (w1 `mappend` w2) (a1 `mappend` a2)
instance Monoid Updates where
mempty = Updates []
(Updates (u1:u1s)) `mappend` (Updates (u2:u2s)) = Updates $ u3 : (u1s `mappend` u2s)
where u3 = fromMtx $ toMtx u1 + toMtx u2
(Updates []) `mappend` u2s = u2s
u1s `mappend` (Updates []) = u1s
instance HasMtx Update where
toMtx = unUpdate
fromMtx = mkUpdate
{--------------------------------------------------------------------------
- Helper Functions -
--------------------------------------------------------------------------}
onElements :: HasVec a
=> ([Double] -> [Double])
-> a -> a
onElements f = fromVec . fromList . f . toList . toVec
withBias :: HasVec a => a -> a
withBias = onElements (1 :)
withoutBias :: HasVec a => a -> a
withoutBias = onElements tail
(><) :: Signal -> ErrorSignal -> Update
(><) (Sig sig) (ErrSig err) = fromMtx $ sig `outer` err
| markcwhitfield/nunavut | src/Nunavut/Propogation.hs | mit | 4,767 | 0 | 10 | 1,734 | 1,034 | 589 | 445 | 68 | 1 |
{-#LANGUAGE ScopedTypeVariables #-}
{-#LANGUAGE DeriveGeneric #-}
module GenericTypeSpec where
-- Test tools
import Test.Hspec
import Test.QuickCheck
-- Tested modules
import GenericType
-- Additional data
import Foreign.Storable.Generic
import Foreign.Ptr (Ptr)
import Foreign.Marshal.Alloc (malloc)
import Data.Int
import GHC.Generics
import Foreign.Storable.Generic.Internal
data TestData = TestData Int Int64 Int8 Int8
deriving (Show, Generic, Eq)
instance GStorable TestData
instance Arbitrary TestData where
arbitrary = TestData <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
spec :: Spec
spec = do
describe "M1 M1 (M1 K1 :*: M1 K1)" $ do
it
describe "instance (GStorable' f, GStorable' g) => GStorable (:*: f g p)" $ do
it "glistSizeOf' @((:*:) f g p) == glistSizeOf' @(f p) ++ glistSizeOf' @(g p) " $ do
-- Construct the tree using typeProduct
property (\gen_type1 gen_type2 ->
sequence_ [listOfSizes gen_type1 ++ listOfSizes gen_type2 `shouldBe` (listOfSizes $ typeProduct gen_type1 gen_type2)
,listOfSizes gen_type2 ++ listOfSizes gen_type1 `shouldBe` (listOfSizes $ typeProduct gen_type2 gen_type1)
])
it "glistAlignment' @((:*:) f g p) == glistAlignment' @(f p) ++ glistAlignment' @(g p)" $ do
property (\gen_type1 gen_type2 ->
sequence_ [listOfAlignments gen_type1 ++ listOfAlignments gen_type2 `shouldBe` (listOfAlignments $ typeProduct gen_type1 gen_type2)
,listOfAlignments gen_type2 ++ listOfAlignments gen_type1 `shouldBe` (listOfAlignments $ typeProduct gen_type2 gen_type1)
])
it "gpeekByteOff' works for both the test type and it's representation at :*: level" $ do
property (\(gstor :: TestData)-> do
-- The generic representation of TestData, without the M1 M1 constructors.
let gen_rep = unM1 $ unM1 $ from gstor
-- Memory for the test data.
ptr <- malloc :: IO (Ptr TestData)
-- First peek of the raw memory.
gen_rep_peeked <- gpeekByteOff ptr 0
gstor_peeked <- gpeekByteOff ptr 0 :: IO TestData
-- Save the gstor value in the pointer.
-- Assumes that gpokeByteOff works.
gpokeByteOff ptr 0 gstor
-- Second peek to the modified memory
gen_rep_peeked2 <- gpeekByteOff ptr 0
gstor_peeked2 <- gpeekByteOff ptr 0 :: IO TestData
-- Get the values from generic reps back.
let back_to_life = to $ M1 $ M1 gen_rep_peeked :: TestData
back_to_life2 = to $ M1 $ M1 gen_rep_peeked2 :: TestData
-- Compare:
sequence_ [back_to_life `shouldBe` gstor_peeked
,back_to_life2 `shouldBe` gstor_peeked2
]
)
it "gpokeByteOff' works for both the test type and it's representation at :*: level" $ do
property (\(gstor :: TestData)-> do
-- The generic representation of TestData, without the M1 M1 constructors.
let gen_rep = unM1 $ unM1 $ from gstor
-- Memory for the test data.
ptr1 <- malloc :: IO (Ptr TestData)
ptr2 <- malloc :: IO (Ptr TestData)
-- Poke the memory
gpokeByteOff ptr1 0 gstor
gpokeByteOff ptr2 0 gen_rep
-- Read the memory
-- Assumes that gpeekByteOff works
ptr1_peeked <- gpeekByteOff ptr1 0 :: IO TestData
ptr2_peeked <- gpeekByteOff ptr2 0 :: IO TestData
ptr1_peeked `shouldBe` ptr2_peeked
)
it "gsizeOf a == gsizeOf (unM1 $ unM1 $ from a)" $ do
property (\(gstor :: TestData) -> do
let gen_rep = unM1 $ unM1 $ from gstor
gsizeOf gstor `shouldBe` gsizeOf gen_rep
)
it "galignment a == galignment (unM1 $ unM1 $ from a)" $ do
property (\(gstor :: TestData) -> do
let gen_rep = unM1 $ unM1 $ from gstor
galignment gstor `shouldBe` galignment gen_rep
)
main :: IO ()
main = hspec spec
| mkloczko/derive-storable | test/GenericRep/GenericTypeSpec.hs | mit | 4,469 | 0 | 22 | 1,577 | 871 | 437 | 434 | 63 | 1 |
{-# LANGUAGE OverloadedStrings, TemplateHaskell #-}
-- | Service API.
module Web.Mackerel.Api.Service
( listServices
, createService
, deleteService
, listServiceMetricNames
) where
import Data.Aeson.TH (deriveJSON)
import qualified Data.ByteString.Char8 as BS
import Network.HTTP.Types (StdMethod(..))
import Web.Mackerel.Client
import Web.Mackerel.Internal.Api
import Web.Mackerel.Internal.TH
import Web.Mackerel.Types.Service
data ListServicesResponse = ListServicesResponse { responseServices :: [Service] }
$(deriveJSON options ''ListServicesResponse)
listServices :: Client -> IO (Either ApiError [Service])
listServices client
= request client GET "/api/v0/services" [] emptyBody (createHandler responseServices)
createService :: Client -> Service -> IO (Either ApiError Service)
createService client service
= request client POST "/api/v0/services" [] (Just service) (createHandler id)
deleteService :: Client -> String -> IO (Either ApiError Service)
deleteService client name
= request client DELETE ("/api/v0/services/" <> BS.pack name) [] emptyBody (createHandler id)
data ListMetricNamesResponse = ListMetricNamesResponse { responseNames :: [String] }
$(deriveJSON options ''ListMetricNamesResponse)
listServiceMetricNames :: Client -> String -> IO (Either ApiError [String])
listServiceMetricNames client serviceName'
= request client GET ("/api/v0/services/" <> BS.pack serviceName' <> "/metric-names") [] emptyBody (createHandler responseNames)
| itchyny/mackerel-client-hs | src/Web/Mackerel/Api/Service.hs | mit | 1,487 | 0 | 10 | 184 | 404 | 220 | 184 | 29 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
import Test.Hspec
import Test.Hspec.QuickCheck (prop)
import Test.QuickCheck.Monadic (assert, monadicIO, run)
import qualified Data.Conduit as C
import qualified Data.Conduit.Util as C
import qualified Data.Conduit.Internal as CI
import qualified Data.Conduit.List as CL
import qualified Data.Conduit.Lazy as CLazy
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.Text as CT
import Data.Conduit (runResourceT)
import Data.Maybe (fromMaybe,catMaybes)
import qualified Data.List as DL
import Control.Monad.ST (runST)
import Data.Monoid
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8
import qualified Data.IORef as I
import qualified Data.ByteString.Lazy as L
import Data.ByteString.Lazy.Char8 ()
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TLE
import Control.Monad.Trans.Resource (runExceptionT, runExceptionT_, allocate, resourceForkIO)
import Control.Concurrent (threadDelay, killThread)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Writer (execWriter, tell, runWriterT)
import Control.Monad.Trans.State (evalStateT, get, put)
import Control.Applicative (pure, (<$>), (<*>))
import Data.Functor.Identity (Identity,runIdentity)
import Control.Monad (forever)
import Data.Void (Void)
import qualified Control.Concurrent.MVar as M
import Control.Monad.Error (catchError, throwError, Error)
(@=?) :: (Eq a, Show a) => a -> a -> IO ()
(@=?) = flip shouldBe
-- Quickcheck property for testing equivalence of list processing
-- functions and their conduit counterparts
equivToList :: Eq b => ([a] -> [b]) -> CI.Conduit a Identity b -> [a] -> Bool
equivToList f conduit xs =
f xs == runIdentity (CL.sourceList xs C.$$ conduit C.=$= CL.consume)
main :: IO ()
main = hspec $ do
describe "data loss rules" $ do
it "consumes the source to quickly" $ do
x <- runResourceT $ CL.sourceList [1..10 :: Int] C.$$ do
strings <- CL.map show C.=$ CL.take 5
liftIO $ putStr $ unlines strings
CL.fold (+) 0
40 `shouldBe` x
it "correctly consumes a chunked resource" $ do
x <- runResourceT $ (CL.sourceList [1..5 :: Int] `mappend` CL.sourceList [6..10]) C.$$ do
strings <- CL.map show C.=$ CL.take 5
liftIO $ putStr $ unlines strings
CL.fold (+) 0
40 `shouldBe` x
describe "filter" $ do
it "even" $ do
x <- runResourceT $ CL.sourceList [1..10] C.$$ CL.filter even C.=$ CL.consume
x `shouldBe` filter even [1..10 :: Int]
prop "concat" $ equivToList (concat :: [[Int]]->[Int]) CL.concat
describe "mapFoldable" $ do
prop "list" $
equivToList (concatMap (:[]) :: [Int]->[Int]) (CL.mapFoldable (:[]))
let f x = if odd x then Just x else Nothing
prop "Maybe" $
equivToList (catMaybes . map f :: [Int]->[Int]) (CL.mapFoldable f)
prop "scanl" $ equivToList (tail . scanl (+) 0 :: [Int]->[Int]) (CL.scanl (\a s -> (a+s,a+s)) 0)
-- mapFoldableM and scanlM are fully polymorphic in type of monad
-- so it suffice to check only with Identity.
describe "mapFoldableM" $ do
prop "list" $
equivToList (concatMap (:[]) :: [Int]->[Int]) (CL.mapFoldableM (return . (:[])))
let f x = if odd x then Just x else Nothing
prop "Maybe" $
equivToList (catMaybes . map f :: [Int]->[Int]) (CL.mapFoldableM (return . f))
prop "scanl" $ equivToList (tail . scanl (+) 0 :: [Int]->[Int]) (CL.scanlM (\a s -> return (a+s,a+s)) 0)
describe "ResourceT" $ do
it "resourceForkIO" $ do
counter <- I.newIORef 0
let w = allocate
(I.atomicModifyIORef counter $ \i ->
(i + 1, ()))
(const $ I.atomicModifyIORef counter $ \i ->
(i - 1, ()))
runResourceT $ do
_ <- w
_ <- resourceForkIO $ return ()
_ <- resourceForkIO $ return ()
sequence_ $ replicate 1000 $ do
tid <- resourceForkIO $ return ()
liftIO $ killThread tid
_ <- resourceForkIO $ return ()
_ <- resourceForkIO $ return ()
return ()
-- give enough of a chance to the cleanup code to finish
threadDelay 1000
res <- I.readIORef counter
res `shouldBe` (0 :: Int)
describe "sum" $ do
it "works for 1..10" $ do
x <- runResourceT $ CL.sourceList [1..10] C.$$ CL.fold (+) (0 :: Int)
x `shouldBe` sum [1..10]
prop "is idempotent" $ \list ->
(runST $ CL.sourceList list C.$$ CL.fold (+) (0 :: Int))
== sum list
describe "foldMap" $ do
it "sums 1..10" $ do
Sum x <- CL.sourceList [1..(10 :: Int)] C.$$ CL.foldMap Sum
x `shouldBe` sum [1..10]
it "preserves order" $ do
x <- CL.sourceList [[4],[2],[3],[1]] C.$$ CL.foldMap (++[(9 :: Int)])
x `shouldBe` [4,9,2,9,3,9,1,9]
describe "foldMapM" $ do
it "sums 1..10" $ do
Sum x <- CL.sourceList [1..(10 :: Int)] C.$$ CL.foldMapM (return . Sum)
x `shouldBe` sum [1..10]
it "preserves order" $ do
x <- CL.sourceList [[4],[2],[3],[1]] C.$$ CL.foldMapM (return . (++[(9 :: Int)]))
x `shouldBe` [4,9,2,9,3,9,1,9]
describe "unfold" $ do
it "works" $ do
let f 0 = Nothing
f i = Just (show i, i - 1)
seed = 10 :: Int
x <- CL.unfold f seed C.$$ CL.consume
let y = DL.unfoldr f seed
x `shouldBe` y
describe "Monoid instance for Source" $ do
it "mappend" $ do
x <- runResourceT $ (CL.sourceList [1..5 :: Int] `mappend` CL.sourceList [6..10]) C.$$ CL.fold (+) 0
x `shouldBe` sum [1..10]
it "mconcat" $ do
x <- runResourceT $ mconcat
[ CL.sourceList [1..5 :: Int]
, CL.sourceList [6..10]
, CL.sourceList [11..20]
] C.$$ CL.fold (+) 0
x `shouldBe` sum [1..20]
describe "file access" $ do
it "read" $ do
bs <- S.readFile "conduit.cabal"
bss <- runResourceT $ CB.sourceFile "conduit.cabal" C.$$ CL.consume
bs @=? S.concat bss
it "read range" $ do
S.writeFile "tmp" "0123456789"
bss <- runResourceT $ CB.sourceFileRange "tmp" (Just 2) (Just 3) C.$$ CL.consume
S.concat bss `shouldBe` "234"
it "write" $ do
runResourceT $ CB.sourceFile "conduit.cabal" C.$$ CB.sinkFile "tmp"
bs1 <- S.readFile "conduit.cabal"
bs2 <- S.readFile "tmp"
bs1 @=? bs2
it "conduit" $ do
runResourceT $ CB.sourceFile "conduit.cabal"
C.$= CB.conduitFile "tmp"
C.$$ CB.sinkFile "tmp2"
bs1 <- S.readFile "conduit.cabal"
bs2 <- S.readFile "tmp"
bs3 <- S.readFile "tmp2"
bs1 @=? bs2
bs1 @=? bs3
describe "zipping" $ do
it "zipping two small lists" $ do
res <- runResourceT $ C.zip (CL.sourceList [1..10]) (CL.sourceList [11..12]) C.$$ CL.consume
res @=? zip [1..10 :: Int] [11..12 :: Int]
describe "zipping sinks" $ do
it "take all" $ do
res <- runResourceT $ CL.sourceList [1..10] C.$$ C.zipSinks CL.consume CL.consume
res @=? ([1..10 :: Int], [1..10 :: Int])
it "take fewer on left" $ do
res <- runResourceT $ CL.sourceList [1..10] C.$$ C.zipSinks (CL.take 4) CL.consume
res @=? ([1..4 :: Int], [1..10 :: Int])
it "take fewer on right" $ do
res <- runResourceT $ CL.sourceList [1..10] C.$$ C.zipSinks CL.consume (CL.take 4)
res @=? ([1..10 :: Int], [1..4 :: Int])
describe "Monad instance for Sink" $ do
it "binding" $ do
x <- runResourceT $ CL.sourceList [1..10] C.$$ do
_ <- CL.take 5
CL.fold (+) (0 :: Int)
x `shouldBe` sum [6..10]
describe "Applicative instance for Sink" $ do
it "<$> and <*>" $ do
x <- runResourceT $ CL.sourceList [1..10] C.$$
(+) <$> pure 5 <*> CL.fold (+) (0 :: Int)
x `shouldBe` sum [1..10] + 5
describe "resumable sources" $ do
it "simple" $ do
(x, y, z) <- runResourceT $ do
let src1 = CL.sourceList [1..10 :: Int]
(src2, x) <- src1 C.$$+ CL.take 5
(src3, y) <- src2 C.$$++ CL.fold (+) 0
z <- src3 C.$$+- CL.consume
return (x, y, z)
x `shouldBe` [1..5] :: IO ()
y `shouldBe` sum [6..10]
z `shouldBe` []
describe "conduits" $ do
it "map, left" $ do
x <- runResourceT $
CL.sourceList [1..10]
C.$= CL.map (* 2)
C.$$ CL.fold (+) 0
x `shouldBe` 2 * sum [1..10 :: Int]
it "map, left >+>" $ do
x <- runResourceT $
CI.ConduitM
(CI.unConduitM (CL.sourceList [1..10])
CI.>+> CI.injectLeftovers (CI.unConduitM $ CL.map (* 2)))
C.$$ CL.fold (+) 0
x `shouldBe` 2 * sum [1..10 :: Int]
it "map, right" $ do
x <- runResourceT $
CL.sourceList [1..10]
C.$$ CL.map (* 2)
C.=$ CL.fold (+) 0
x `shouldBe` 2 * sum [1..10 :: Int]
it "groupBy" $ do
let input = [1::Int, 1, 2, 3, 3, 3, 4, 5, 5]
x <- runResourceT $ CL.sourceList input
C.$$ CL.groupBy (==)
C.=$ CL.consume
x `shouldBe` DL.groupBy (==) input
it "groupBy (nondup begin/end)" $ do
let input = [1::Int, 2, 3, 3, 3, 4, 5]
x <- runResourceT $ CL.sourceList input
C.$$ CL.groupBy (==)
C.=$ CL.consume
x `shouldBe` DL.groupBy (==) input
it "mapMaybe" $ do
let input = [Just (1::Int), Nothing, Just 2, Nothing, Just 3]
x <- runResourceT $ CL.sourceList input
C.$$ CL.mapMaybe ((+2) <$>)
C.=$ CL.consume
x `shouldBe` [3, 4, 5]
it "mapMaybeM" $ do
let input = [Just (1::Int), Nothing, Just 2, Nothing, Just 3]
x <- runResourceT $ CL.sourceList input
C.$$ CL.mapMaybeM (return . ((+2) <$>))
C.=$ CL.consume
x `shouldBe` [3, 4, 5]
it "catMaybes" $ do
let input = [Just (1::Int), Nothing, Just 2, Nothing, Just 3]
x <- runResourceT $ CL.sourceList input
C.$$ CL.catMaybes
C.=$ CL.consume
x `shouldBe` [1, 2, 3]
it "concatMap" $ do
let input = [1, 11, 21]
x <- runResourceT $ CL.sourceList input
C.$$ CL.concatMap (\i -> enumFromTo i (i + 9))
C.=$ CL.fold (+) (0 :: Int)
x `shouldBe` sum [1..30]
it "bind together" $ do
let conduit = CL.map (+ 5) C.=$= CL.map (* 2)
x <- runResourceT $ CL.sourceList [1..10] C.$= conduit C.$$ CL.fold (+) 0
x `shouldBe` sum (map (* 2) $ map (+ 5) [1..10 :: Int])
#if !FAST
describe "isolate" $ do
it "bound to resumable source" $ do
(x, y) <- runResourceT $ do
let src1 = CL.sourceList [1..10 :: Int]
(src2, x) <- src1 C.$= CL.isolate 5 C.$$+ CL.consume
y <- src2 C.$$+- CL.consume
return (x, y)
x `shouldBe` [1..5]
y `shouldBe` []
it "bound to sink, non-resumable" $ do
(x, y) <- runResourceT $ do
CL.sourceList [1..10 :: Int] C.$$ do
x <- CL.isolate 5 C.=$ CL.consume
y <- CL.consume
return (x, y)
x `shouldBe` [1..5]
y `shouldBe` [6..10]
it "bound to sink, resumable" $ do
(x, y) <- runResourceT $ do
let src1 = CL.sourceList [1..10 :: Int]
(src2, x) <- src1 C.$$+ CL.isolate 5 C.=$ CL.consume
y <- src2 C.$$+- CL.consume
return (x, y)
x `shouldBe` [1..5]
y `shouldBe` [6..10]
it "consumes all data" $ do
x <- runResourceT $ CL.sourceList [1..10 :: Int] C.$$ do
CL.isolate 5 C.=$ CL.sinkNull
CL.consume
x `shouldBe` [6..10]
describe "lazy" $ do
it' "works inside a ResourceT" $ runResourceT $ do
counter <- liftIO $ I.newIORef 0
let incr i = do
istate <- liftIO $ I.newIORef $ Just (i :: Int)
let loop = do
res <- liftIO $ I.atomicModifyIORef istate ((,) Nothing)
case res of
Nothing -> return ()
Just x -> do
count <- liftIO $ I.atomicModifyIORef counter
(\j -> (j + 1, j + 1))
liftIO $ count `shouldBe` i
C.yield x
loop
loop
nums <- CLazy.lazyConsume $ mconcat $ map incr [1..10]
liftIO $ nums `shouldBe` [1..10]
it' "returns nothing outside ResourceT" $ do
bss <- runResourceT $ CLazy.lazyConsume $ CB.sourceFile "test/main.hs"
bss `shouldBe` []
it' "works with pure sources" $ do
nums <- CLazy.lazyConsume $ forever $ C.yield 1
take 100 nums `shouldBe` replicate 100 (1 :: Int)
describe "sequence" $ do
it "simple sink" $ do
let sumSink = do
ma <- CL.head
case ma of
Nothing -> return 0
Just a -> (+a) . fromMaybe 0 <$> CL.head
res <- runResourceT $ CL.sourceList [1..11 :: Int]
C.$= CL.sequence sumSink
C.$$ CL.consume
res `shouldBe` [3, 7, 11, 15, 19, 11]
it "sink with unpull behaviour" $ do
let sumSink = do
ma <- CL.head
case ma of
Nothing -> return 0
Just a -> (+a) . fromMaybe 0 <$> CL.peek
res <- runResourceT $ CL.sourceList [1..11 :: Int]
C.$= CL.sequence sumSink
C.$$ CL.consume
res `shouldBe` [3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 11]
#endif
describe "peek" $ do
it "works" $ do
(a, b) <- runResourceT $ CL.sourceList [1..10 :: Int] C.$$ do
a <- CL.peek
b <- CL.consume
return (a, b)
(a, b) `shouldBe` (Just 1, [1..10])
describe "text" $ do
let go enc tenc tdec cenc = do
prop (enc ++ " single chunk") $ \chars -> runST $ runExceptionT_ $ do
let tl = TL.pack chars
lbs = tenc tl
src = CL.sourceList $ L.toChunks lbs
ts <- src C.$= CT.decode cenc C.$$ CL.consume
return $ TL.fromChunks ts == tl
prop (enc ++ " many chunks") $ \chars -> runIdentity $ runExceptionT_ $ do
let tl = TL.pack chars
lbs = tenc tl
src = mconcat $ map (CL.sourceList . return . S.singleton) $ L.unpack lbs
ts <- src C.$= CT.decode cenc C.$$ CL.consume
return $ TL.fromChunks ts == tl
-- Check whether raw bytes are decoded correctly, in
-- particular that Text decoding produces an error if
-- and only if Conduit does.
prop (enc ++ " raw bytes") $ \bytes ->
let lbs = L.pack bytes
src = CL.sourceList $ L.toChunks lbs
etl = C.runException $ src C.$= CT.decode cenc C.$$ CL.consume
tl' = tdec lbs
in case etl of
(Left _) -> (return $! TL.toStrict tl') `shouldThrow` anyException
(Right tl) -> TL.fromChunks tl `shouldBe` tl'
prop (enc ++ " encoding") $ \chars -> runIdentity $ runExceptionT_ $ do
let tss = map T.pack chars
lbs = tenc $ TL.fromChunks tss
src = mconcat $ map (CL.sourceList . return) tss
bss <- src C.$= CT.encode cenc C.$$ CL.consume
return $ L.fromChunks bss == lbs
go "utf8" TLE.encodeUtf8 TLE.decodeUtf8 CT.utf8
go "utf16_le" TLE.encodeUtf16LE TLE.decodeUtf16LE CT.utf16_le
go "utf16_be" TLE.encodeUtf16BE TLE.decodeUtf16BE CT.utf16_be
go "utf32_le" TLE.encodeUtf32LE TLE.decodeUtf32LE CT.utf32_le
go "utf32_be" TLE.encodeUtf32BE TLE.decodeUtf32BE CT.utf32_be
describe "text lines" $ do
it "works across split lines" $
(CL.sourceList [T.pack "abc", T.pack "d\nef"] C.$= CT.lines C.$$ CL.consume) ==
[[T.pack "abcd", T.pack "ef"]]
it "works with multiple lines in an item" $
(CL.sourceList [T.pack "ab\ncd\ne"] C.$= CT.lines C.$$ CL.consume) ==
[[T.pack "ab", T.pack "cd", T.pack "e"]]
it "works with ending on a newline" $
(CL.sourceList [T.pack "ab\n"] C.$= CT.lines C.$$ CL.consume) ==
[[T.pack "ab"]]
it "works with ending a middle item on a newline" $
(CL.sourceList [T.pack "ab\n", T.pack "cd\ne"] C.$= CT.lines C.$$ CL.consume) ==
[[T.pack "ab", T.pack "cd", T.pack "e"]]
it "is not too eager" $ do
x <- CL.sourceList ["foobarbaz", error "ignore me"] C.$$ CT.decode CT.utf8 C.=$ CL.head
x `shouldBe` Just "foobarbaz"
describe "text lines bounded" $ do
it "works across split lines" $
(CL.sourceList [T.pack "abc", T.pack "d\nef"] C.$= CT.linesBounded 80 C.$$ CL.consume) ==
[[T.pack "abcd", T.pack "ef"]]
it "works with multiple lines in an item" $
(CL.sourceList [T.pack "ab\ncd\ne"] C.$= CT.linesBounded 80 C.$$ CL.consume) ==
[[T.pack "ab", T.pack "cd", T.pack "e"]]
it "works with ending on a newline" $
(CL.sourceList [T.pack "ab\n"] C.$= CT.linesBounded 80 C.$$ CL.consume) ==
[[T.pack "ab"]]
it "works with ending a middle item on a newline" $
(CL.sourceList [T.pack "ab\n", T.pack "cd\ne"] C.$= CT.linesBounded 80 C.$$ CL.consume) ==
[[T.pack "ab", T.pack "cd", T.pack "e"]]
it "is not too eager" $ do
x <- CL.sourceList ["foobarbaz", error "ignore me"] C.$$ CT.decode CT.utf8 C.=$ CL.head
x `shouldBe` Just "foobarbaz"
it "throws an exception when lines are too long" $ do
x <- C.runExceptionT $ CL.sourceList ["hello\nworld"] C.$$ CT.linesBounded 4 C.=$ CL.consume
show x `shouldBe` show (Left $ CT.LengthExceeded 4 :: Either CT.TextException ())
describe "binary isolate" $ do
it "works" $ do
bss <- runResourceT $ CL.sourceList (replicate 1000 "X")
C.$= CB.isolate 6
C.$$ CL.consume
S.concat bss `shouldBe` "XXXXXX"
describe "unbuffering" $ do
it "works" $ do
x <- runResourceT $ do
let src1 = CL.sourceList [1..10 :: Int]
(src2, ()) <- src1 C.$$+ CL.drop 5
src2 C.$$+- CL.fold (+) 0
x `shouldBe` sum [6..10]
describe "operators" $ do
it "only use =$=" $
runIdentity
( CL.sourceList [1..10 :: Int]
C.$$ CL.map (+ 1)
C.=$ CL.map (subtract 1)
C.=$ CL.mapM (return . (* 2))
C.=$ CL.map (`div` 2)
C.=$ CL.fold (+) 0
) `shouldBe` sum [1..10]
it "only use =$" $
runIdentity
( CL.sourceList [1..10 :: Int]
C.$$ CL.map (+ 1)
C.=$ CL.map (subtract 1)
C.=$ CL.map (* 2)
C.=$ CL.map (`div` 2)
C.=$ CL.fold (+) 0
) `shouldBe` sum [1..10]
it "chain" $ do
x <- CL.sourceList [1..10 :: Int]
C.$= CL.map (+ 1)
C.$= CL.map (+ 1)
C.$= CL.map (+ 1)
C.$= CL.map (subtract 3)
C.$= CL.map (* 2)
C.$$ CL.map (`div` 2)
C.=$ CL.map (+ 1)
C.=$ CL.map (+ 1)
C.=$ CL.map (+ 1)
C.=$ CL.map (subtract 3)
C.=$ CL.fold (+) 0
x `shouldBe` sum [1..10]
describe "properly using binary file reading" $ do
it "sourceFile" $ do
x <- runResourceT $ CB.sourceFile "test/random" C.$$ CL.consume
lbs <- L.readFile "test/random"
L.fromChunks x `shouldBe` lbs
describe "binary head" $ do
let go lbs = do
x <- CB.head
case (x, L.uncons lbs) of
(Nothing, Nothing) -> return True
(Just y, Just (z, lbs'))
| y == z -> go lbs'
_ -> return False
prop "works" $ \bss' ->
let bss = map S.pack bss'
in runIdentity $
CL.sourceList bss C.$$ go (L.fromChunks bss)
describe "binary takeWhile" $ do
prop "works" $ \bss' ->
let bss = map S.pack bss'
in runIdentity $ do
bss2 <- CL.sourceList bss C.$$ CB.takeWhile (>= 5) C.=$ CL.consume
return $ L.fromChunks bss2 == L.takeWhile (>= 5) (L.fromChunks bss)
prop "leftovers present" $ \bss' ->
let bss = map S.pack bss'
in runIdentity $ do
result <- CL.sourceList bss C.$$ do
x <- CB.takeWhile (>= 5) C.=$ CL.consume
y <- CL.consume
return (S.concat x, S.concat y)
let expected = S.span (>= 5) $ S.concat bss
if result == expected
then return True
else error $ show (S.concat bss, result, expected)
describe "binary dropWhile" $ do
prop "works" $ \bss' ->
let bss = map S.pack bss'
in runIdentity $ do
bss2 <- CL.sourceList bss C.$$ do
CB.dropWhile (< 5)
CL.consume
return $ L.fromChunks bss2 == L.dropWhile (< 5) (L.fromChunks bss)
describe "binary take" $ do
let go n l = CL.sourceList l C.$$ do
a <- CB.take n
b <- CL.consume
return (a, b)
-- Taking nothing should result in an empty Bytestring
it "nothing" $ do
(a, b) <- runResourceT $ go 0 ["abc", "defg"]
a `shouldBe` L.empty
L.fromChunks b `shouldBe` "abcdefg"
it "normal" $ do
(a, b) <- runResourceT $ go 4 ["abc", "defg"]
a `shouldBe` "abcd"
L.fromChunks b `shouldBe` "efg"
-- Taking exactly the data that is available should result in no
-- leftover.
it "all" $ do
(a, b) <- runResourceT $ go 7 ["abc", "defg"]
a `shouldBe` "abcdefg"
b `shouldBe` []
-- Take as much as possible.
it "more" $ do
(a, b) <- runResourceT $ go 10 ["abc", "defg"]
a `shouldBe` "abcdefg"
b `shouldBe` []
describe "normalFuseLeft" $ do
it "does not double close conduit" $ do
x <- runResourceT $ do
let src = CL.sourceList ["foobarbazbin"]
src C.$= CB.isolate 10 C.$$ CL.head
x `shouldBe` Just "foobarbazb"
describe "binary" $ do
prop "lines" $ \bss' -> runIdentity $ do
let bss = map S.pack bss'
bs = S.concat bss
src = CL.sourceList bss
res <- src C.$$ CB.lines C.=$ CL.consume
return $ S8.lines bs == res
describe "termination" $ do
it "terminates early" $ do
let src = forever $ C.yield ()
x <- src C.$$ CL.head
x `shouldBe` Just ()
it "bracket" $ do
ref <- I.newIORef (0 :: Int)
let src = C.bracketP
(I.modifyIORef ref (+ 1))
(\() -> I.modifyIORef ref (+ 2))
(\() -> forever $ C.yield (1 :: Int))
val <- C.runResourceT $ src C.$$ CL.isolate 10 C.=$ CL.fold (+) 0
val `shouldBe` 10
i <- I.readIORef ref
i `shouldBe` 3
it "bracket skipped if not needed" $ do
ref <- I.newIORef (0 :: Int)
let src = C.bracketP
(I.modifyIORef ref (+ 1))
(\() -> I.modifyIORef ref (+ 2))
(\() -> forever $ C.yield (1 :: Int))
src' = CL.sourceList $ repeat 1
val <- C.runResourceT $ (src' >> src) C.$$ CL.isolate 10 C.=$ CL.fold (+) 0
val `shouldBe` 10
i <- I.readIORef ref
i `shouldBe` 0
it "bracket + toPipe" $ do
ref <- I.newIORef (0 :: Int)
let src = C.bracketP
(I.modifyIORef ref (+ 1))
(\() -> I.modifyIORef ref (+ 2))
(\() -> forever $ C.yield (1 :: Int))
val <- C.runResourceT $ src C.$$ CL.isolate 10 C.=$ CL.fold (+) 0
val `shouldBe` 10
i <- I.readIORef ref
i `shouldBe` 3
it "bracket skipped if not needed" $ do
ref <- I.newIORef (0 :: Int)
let src = C.bracketP
(I.modifyIORef ref (+ 1))
(\() -> I.modifyIORef ref (+ 2))
(\() -> forever $ C.yield (1 :: Int))
src' = CL.sourceList $ repeat 1
val <- C.runResourceT $ (src' >> src) C.$$ CL.isolate 10 C.=$ CL.fold (+) 0
val `shouldBe` 10
i <- I.readIORef ref
i `shouldBe` 0
describe "invariant violations" $ do
it "leftovers without input" $ do
ref <- I.newIORef []
let add x = I.modifyIORef ref (x:)
adder' = CI.NeedInput (\a -> liftIO (add a) >> adder') return
adder = CI.ConduitM adder'
residue x = CI.ConduitM $ CI.Leftover (CI.Done ()) x
_ <- C.yield 1 C.$$ adder
x <- I.readIORef ref
x `shouldBe` [1 :: Int]
I.writeIORef ref []
_ <- C.yield 1 C.$$ (residue 2 >> residue 3) >> adder
y <- I.readIORef ref
y `shouldBe` [1, 2, 3]
I.writeIORef ref []
_ <- C.yield 1 C.$$ residue 2 >> (residue 3 >> adder)
z <- I.readIORef ref
z `shouldBe` [1, 2, 3]
I.writeIORef ref []
describe "sane yield/await'" $ do
it' "yield terminates" $ do
let is = [1..10] ++ undefined
src [] = return ()
src (x:xs) = C.yield x >> src xs
x <- src is C.$$ CL.take 10
x `shouldBe` [1..10 :: Int]
it' "yield terminates (2)" $ do
let is = [1..10] ++ undefined
x <- mapM_ C.yield is C.$$ CL.take 10
x `shouldBe` [1..10 :: Int]
it' "yieldOr finalizer called" $ do
iref <- I.newIORef (0 :: Int)
let src = mapM_ (\i -> C.yieldOr i $ I.writeIORef iref i) [1..]
src C.$$ CL.isolate 10 C.=$ CL.sinkNull
x <- I.readIORef iref
x `shouldBe` 10
describe "upstream results" $ do
it' "works" $ do
let foldUp :: (b -> a -> b) -> b -> CI.Pipe l a Void u IO (u, b)
foldUp f b = CI.awaitE >>= either (\u -> return (u, b)) (\a -> let b' = f b a in b' `seq` foldUp f b')
passFold :: (b -> a -> b) -> b -> CI.Pipe l a a () IO b
passFold f b = CI.await >>= maybe (return b) (\a -> let b' = f b a in b' `seq` CI.yield a >> passFold f b')
(x, y) <- CI.runPipe $ mapM_ CI.yield [1..10 :: Int] CI.>+> passFold (+) 0 CI.>+> foldUp (*) 1
(x, y) `shouldBe` (sum [1..10], product [1..10])
describe "input/output mapping" $ do
it' "mapOutput" $ do
x <- C.mapOutput (+ 1) (CL.sourceList [1..10 :: Int]) C.$$ CL.fold (+) 0
x `shouldBe` sum [2..11]
it' "mapOutputMaybe" $ do
x <- C.mapOutputMaybe (\i -> if even i then Just i else Nothing) (CL.sourceList [1..10 :: Int]) C.$$ CL.fold (+) 0
x `shouldBe` sum [2, 4..10]
it' "mapInput" $ do
xyz <- (CL.sourceList $ map show [1..10 :: Int]) C.$$ do
(x, y) <- C.mapInput read (Just . show) $ ((do
x <- CL.isolate 5 C.=$ CL.fold (+) 0
y <- CL.peek
return (x :: Int, y :: Maybe Int)) :: C.Sink Int IO (Int, Maybe Int))
z <- CL.consume
return (x, y, concat z)
xyz `shouldBe` (sum [1..5], Just 6, "678910")
describe "left/right identity" $ do
it' "left identity" $ do
x <- CL.sourceList [1..10 :: Int] C.$$ CI.ConduitM CI.idP C.=$ CL.fold (+) 0
y <- CL.sourceList [1..10 :: Int] C.$$ CL.fold (+) 0
x `shouldBe` y
it' "right identity" $ do
x <- CI.runPipe $ mapM_ CI.yield [1..10 :: Int] CI.>+> (CI.injectLeftovers $ CI.unConduitM $ CL.fold (+) 0) CI.>+> CI.idP
y <- CI.runPipe $ mapM_ CI.yield [1..10 :: Int] CI.>+> (CI.injectLeftovers $ CI.unConduitM $ CL.fold (+) 0)
x `shouldBe` y
describe "generalizing" $ do
it' "works" $ do
x <- CI.runPipe
$ CI.sourceToPipe (CL.sourceList [1..10 :: Int])
CI.>+> CI.conduitToPipe (CL.map (+ 1))
CI.>+> CI.sinkToPipe (CL.fold (+) 0)
x `shouldBe` sum [2..11]
describe "withUpstream" $ do
it' "works" $ do
let src = mapM_ CI.yield [1..10 :: Int] >> return True
fold f =
loop
where
loop accum =
CI.await >>= maybe (return accum) go
where
go a =
let accum' = f accum a
in accum' `seq` loop accum'
sink = CI.withUpstream $ fold (+) 0
res <- CI.runPipe $ src CI.>+> sink
res `shouldBe` (True, sum [1..10])
describe "iterate" $ do
it' "works" $ do
res <- CL.iterate (+ 1) (1 :: Int) C.$$ CL.isolate 10 C.=$ CL.fold (+) 0
res `shouldBe` sum [1..10]
describe "unwrapResumable" $ do
it' "works" $ do
ref <- I.newIORef (0 :: Int)
let src0 = do
C.yieldOr () $ I.writeIORef ref 1
C.yieldOr () $ I.writeIORef ref 2
C.yieldOr () $ I.writeIORef ref 3
(rsrc0, Just ()) <- src0 C.$$+ CL.head
x0 <- I.readIORef ref
x0 `shouldBe` 0
(_, final) <- C.unwrapResumable rsrc0
x1 <- I.readIORef ref
x1 `shouldBe` 0
final
x2 <- I.readIORef ref
x2 `shouldBe` 1
it' "isn't called twice" $ do
ref <- I.newIORef (0 :: Int)
let src0 = do
C.yieldOr () $ I.writeIORef ref 1
C.yieldOr () $ I.writeIORef ref 2
(rsrc0, Just ()) <- src0 C.$$+ CL.head
x0 <- I.readIORef ref
x0 `shouldBe` 0
(src1, final) <- C.unwrapResumable rsrc0
x1 <- I.readIORef ref
x1 `shouldBe` 0
Just () <- src1 C.$$ CL.head
x2 <- I.readIORef ref
x2 `shouldBe` 2
final
x3 <- I.readIORef ref
x3 `shouldBe` 2
it' "source isn't used" $ do
ref <- I.newIORef (0 :: Int)
let src0 = do
C.yieldOr () $ I.writeIORef ref 1
C.yieldOr () $ I.writeIORef ref 2
(rsrc0, Just ()) <- src0 C.$$+ CL.head
x0 <- I.readIORef ref
x0 `shouldBe` 0
(src1, final) <- C.unwrapResumable rsrc0
x1 <- I.readIORef ref
x1 `shouldBe` 0
() <- src1 C.$$ return ()
x2 <- I.readIORef ref
x2 `shouldBe` 0
final
x3 <- I.readIORef ref
x3 `shouldBe` 1
describe "injectLeftovers" $ do
it "works" $ do
let src = mapM_ CI.yield [1..10 :: Int]
conduit = CI.injectLeftovers $ CI.unConduitM $ C.awaitForever $ \i -> do
js <- CL.take 2
mapM_ C.leftover $ reverse js
C.yield i
res <- CI.ConduitM (src CI.>+> CI.injectLeftovers conduit) C.$$ CL.consume
res `shouldBe` [1..10]
describe "up-upstream finalizers" $ do
it "pipe" $ do
let p1 = CI.await >>= maybe (return ()) CI.yield
p2 = idMsg "p2-final"
p3 = idMsg "p3-final"
idMsg msg = CI.addCleanup (const $ tell [msg]) $ CI.awaitForever CI.yield
printer = CI.awaitForever $ lift . tell . return . show
src = mapM_ CI.yield [1 :: Int ..]
let run' p = execWriter $ CI.runPipe $ printer CI.<+< p CI.<+< src
run' (p1 CI.<+< (p2 CI.<+< p3)) `shouldBe` run' ((p1 CI.<+< p2) CI.<+< p3)
it "conduit" $ do
let p1 = C.await >>= maybe (return ()) C.yield
p2 = idMsg "p2-final"
p3 = idMsg "p3-final"
idMsg msg = C.addCleanup (const $ tell [msg]) $ C.awaitForever C.yield
printer = C.awaitForever $ lift . tell . return . show
src = CL.sourceList [1 :: Int ..]
let run' p = execWriter $ src C.$$ p C.=$ printer
run' ((p3 C.=$= p2) C.=$= p1) `shouldBe` run' (p3 C.=$= (p2 C.=$= p1))
describe "monad transformer laws" $ do
it "transPipe" $ do
let source = CL.sourceList $ replicate 10 ()
let tell' x = tell [x :: Int]
let replaceNum1 = C.awaitForever $ \() -> do
i <- lift get
lift $ (put $ i + 1) >> (get >>= lift . tell')
C.yield i
let replaceNum2 = C.awaitForever $ \() -> do
i <- lift get
lift $ put $ i + 1
lift $ get >>= lift . tell'
C.yield i
x <- runWriterT $ source C.$$ C.transPipe (`evalStateT` 1) replaceNum1 C.=$ CL.consume
y <- runWriterT $ source C.$$ C.transPipe (`evalStateT` 1) replaceNum2 C.=$ CL.consume
x `shouldBe` y
describe "text decode" $ do
it' "doesn't throw runtime exceptions" $ do
let x = runIdentity $ runExceptionT $ C.yield "\x89\x243" C.$$ CT.decode CT.utf8 C.=$ CL.consume
case x of
Left _ -> return ()
Right t -> error $ "This should have failed: " ++ show t
describe "iterM" $ do
prop "behavior" $ \l -> monadicIO $ do
let counter ref = CL.iterM (const $ liftIO $ M.modifyMVar_ ref (\i -> return $! i + 1))
v <- run $ do
ref <- M.newMVar 0
CL.sourceList l C.$= counter ref C.$$ CL.mapM_ (const $ return ())
M.readMVar ref
assert $ v == length (l :: [Int])
prop "mapM_ equivalence" $ \l -> monadicIO $ do
let runTest h = run $ do
ref <- M.newMVar (0 :: Int)
let f = action ref
s <- CL.sourceList (l :: [Int]) C.$= h f C.$$ CL.fold (+) 0
c <- M.readMVar ref
return (c, s)
action ref = const $ liftIO $ M.modifyMVar_ ref (\i -> return $! i + 1)
(c1, s1) <- runTest CL.iterM
(c2, s2) <- runTest (\f -> CL.mapM (\a -> f a >>= \() -> return a))
assert $ c1 == c2
assert $ s1 == s2
describe "generalizing" $ do
it "works" $ do
let src :: Int -> C.Source IO Int
src i = CL.sourceList [1..i]
sink :: C.Sink Int IO Int
sink = CL.fold (+) 0
res <- C.yield 10 C.$$ C.awaitForever (C.toProducer . src) C.=$ (C.toConsumer sink >>= C.yield) C.=$ C.await
res `shouldBe` Just (sum [1..10])
describe "sinkCacheLength" $ do
it' "works" $ C.runResourceT $ do
lbs <- liftIO $ L.readFile "test/main.hs"
(len, src) <- CB.sourceLbs lbs C.$$ CB.sinkCacheLength
lbs' <- src C.$$ CB.sinkLbs
liftIO $ do
fromIntegral len `shouldBe` L.length lbs
lbs' `shouldBe` lbs
fromIntegral len `shouldBe` L.length lbs'
describe "mtl instances" $ do
it "ErrorT" $ do
let src = flip catchError (const $ C.yield 4) $ do
lift $ return ()
C.yield 1
lift $ return ()
C.yield 2
lift $ return ()
() <- throwError DummyError
lift $ return ()
C.yield 3
lift $ return ()
(src C.$$ CL.consume) `shouldBe` Right [1, 2, 4 :: Int]
describe "finalizers" $ do
it "promptness" $ do
imsgs <- I.newIORef []
let add x = liftIO $ do
msgs <- I.readIORef imsgs
I.writeIORef imsgs $ msgs ++ [x]
src' = C.bracketP
(add "acquire")
(const $ add "release")
(const $ C.addCleanup (const $ add "inside") (mapM_ C.yield [1..5]))
src = do
src' C.$= CL.isolate 4
add "computation"
sink = CL.mapM (\x -> add (show x) >> return x) C.=$ CL.consume
res <- C.runResourceT $ src C.$$ sink
msgs <- I.readIORef imsgs
-- FIXME this would be better msgs `shouldBe` words "acquire 1 2 3 4 inside release computation"
msgs `shouldBe` words "acquire 1 2 3 4 release inside computation"
res `shouldBe` [1..4 :: Int]
it "left associative" $ do
imsgs <- I.newIORef []
let add x = liftIO $ do
msgs <- I.readIORef imsgs
I.writeIORef imsgs $ msgs ++ [x]
p1 = C.bracketP (add "start1") (const $ add "stop1") (const $ add "inside1" >> C.yield ())
p2 = C.bracketP (add "start2") (const $ add "stop2") (const $ add "inside2" >> C.await >>= maybe (return ()) C.yield)
p3 = C.bracketP (add "start3") (const $ add "stop3") (const $ add "inside3" >> C.await)
res <- C.runResourceT $ (p1 C.$= p2) C.$$ p3
res `shouldBe` Just ()
msgs <- I.readIORef imsgs
msgs `shouldBe` words "start3 inside3 start2 inside2 start1 inside1 stop3 stop2 stop1"
it "right associative" $ do
imsgs <- I.newIORef []
let add x = liftIO $ do
msgs <- I.readIORef imsgs
I.writeIORef imsgs $ msgs ++ [x]
p1 = C.bracketP (add "start1") (const $ add "stop1") (const $ add "inside1" >> C.yield ())
p2 = C.bracketP (add "start2") (const $ add "stop2") (const $ add "inside2" >> C.await >>= maybe (return ()) C.yield)
p3 = C.bracketP (add "start3") (const $ add "stop3") (const $ add "inside3" >> C.await)
res <- C.runResourceT $ p1 C.$$ (p2 C.=$ p3)
res `shouldBe` Just ()
msgs <- I.readIORef imsgs
msgs `shouldBe` words "start3 inside3 start2 inside2 start1 inside1 stop3 stop2 stop1"
describe "dan burton's associative tests" $ do
let tellLn = tell . (++ "\n")
finallyP fin = CI.addCleanup (const fin)
printer = CI.awaitForever $ lift . tellLn . show
idMsg msg = finallyP (tellLn msg) CI.idP
takeP 0 = return ()
takeP n = CI.awaitE >>= \ex -> case ex of
Left _u -> return ()
Right i -> CI.yield i >> takeP (pred n)
testPipe p = execWriter $ runPipe $ printer <+< p <+< CI.sourceList ([1..] :: [Int])
p1 = takeP (1 :: Int)
p2 = idMsg "foo"
p3 = idMsg "bar"
(<+<) = (CI.<+<)
runPipe = CI.runPipe
test1L = testPipe $ (p1 <+< p2) <+< p3
test1R = testPipe $ p1 <+< (p2 <+< p3)
test2L = testPipe $ (p2 <+< p1) <+< p3
test2R = testPipe $ p2 <+< (p1 <+< p3)
test3L = testPipe $ (p2 <+< p3) <+< p1
test3R = testPipe $ p2 <+< (p3 <+< p1)
verify testL testR p1' p2' p3'
| testL == testR = return () :: IO ()
| otherwise = error $ unlines
[ "FAILURE"
, ""
, "(" ++ p1' ++ " <+< " ++ p2' ++ ") <+< " ++ p3'
, "------------------"
, testL
, ""
, p1' ++ " <+< (" ++ p2' ++ " <+< " ++ p3' ++ ")"
, "------------------"
, testR
]
it "test1" $ verify test1L test1R "p1" "p2" "p3"
-- FIXME this is broken it "test2" $ verify test2L test2R "p2" "p1" "p3"
it "test3" $ verify test3L test3R "p2" "p3" "p1"
it' :: String -> IO () -> Spec
it' = it
data DummyError = DummyError
deriving (Show, Eq)
instance Error DummyError
| moonKimura/conduit-1.0.8 | test/main.hs | mit | 43,107 | 0 | 35 | 17,358 | 15,926 | 7,901 | 8,025 | -1 | -1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Gen where
import Control.Monad.IO.Class
import Control.Monad.Reader
import Data.String
import GHCJS.DOM
import qualified GHCJS.DOM.Document as Doc
import GHCJS.DOM.DocumentFragment
import GHCJS.DOM.Element
import GHCJS.DOM.Node
import GHCJS.DOM.NodeList
import GHCJS.DOM.ParentNode (querySelectorAll)
import GHCJS.DOM.Types
newtype Html a = Html {unHtml :: ReaderT Node IO a}
deriving (Functor, Applicative, Monad, MonadIO, MonadReader Node)
instance a ~ () => IsString (Html a) where
fromString = text
parent :: Html Node
parent = ask
text :: String -> Html ()
text str = parent >>= flip setTextContent (Just str)
element :: (IsNode b) => (JSVal -> b) -> JSString -> Html a -> Html b
element typ tag children = do
doc <- currentDocumentUnchecked
t <- uncheckedCastTo typ <$> Doc.createElement doc tag
_ <- flip appendChild t =<< parent
_ <- local (const $ uncheckedCastTo Node t) children
return t
clone :: (IsNode a, MonadIO m) => a -> m Node
clone node = cloneNode node True
toFragment :: MonadIO m => Html a -> m DocumentFragment
toFragment (Html r)= do
df <- uncheckedCastTo Node <$> newDocumentFragment
_ <- liftIO $ runReaderT r df
return $ uncheckedCastTo DocumentFragment df
prependChild :: (MonadIO m, IsNode self, IsNode node) => self -> node -> m Node
prependChild parent child = getFirstChild parent >>= insertBefore parent child
fromNodeList :: (MonadIO m) => NodeList -> m [HTMLElement]
fromNodeList nl = do
l <- getLength nl
if l > 0 then mapM (fmap (uncheckedCastTo HTMLElement . fJ) . item nl) [0..l-1] else return []
where
fJ (Just x) = x
fJ Nothing = error "out of bound NodeList"
getChildren :: (MonadIO m, IsNode a) => a -> m [HTMLElement]
getChildren node = getChildNodes node >>= fromNodeList
queryAll :: (MonadIO m, IsParentNode self, ToJSString selector) => self -> selector -> m [HTMLElement]
queryAll e s = querySelectorAll e s >>= fromNodeList
deleteChildren :: (MonadIO m, IsNode a) => a -> m ()
deleteChildren node = do
children <- getChildren node
mapM_ (removeChild node) children
return ()
attach :: (MonadIO m, IsNode node) => Html a -> node -> m ()
attach html p = toFragment html >>= prependChild p >> return ()
infixl 5 =.
(=.) :: JSString -> JSString -> Html ()
(=.) attr val = do
elem <- uncheckedCastTo HTMLElement <$> parent
liftIO $ setAttribute elem attr val
return ()
p :: Html a -> Html HTMLParagraphElement
p = element HTMLParagraphElement "p"
div :: Html a -> Html HTMLDivElement
div = element HTMLDivElement "div"
table :: Html a -> Html HTMLTableElement
table = element HTMLTableElement "table"
tbody :: Html a -> Html HTMLTableSectionElement
tbody = element HTMLTableSectionElement "tbody"
thead :: Html a -> Html HTMLTableSectionElement
thead = element HTMLTableSectionElement "thead"
tfoot :: Html a -> Html HTMLTableSectionElement
tfoot = element HTMLTableSectionElement "tfoot"
tr :: Html a -> Html HTMLTableRowElement
tr = element HTMLTableRowElement "tr"
th :: Html a -> Html HTMLTableCellElement
th = element HTMLTableCellElement "th"
td :: Html a -> Html HTMLTableCellElement
td = element HTMLTableCellElement "td"
span :: Html a -> Html HTMLSpanElement
span = element HTMLSpanElement "span"
button :: Html a -> Html HTMLButtonElement
button = element HTMLButtonElement "button"
i :: Html a -> Html HTMLElement
i = element HTMLElement "i"
select = element HTMLSelectElement "select"
option = element HTMLElement "option"
h5 = element HTMLElement "h5"
| TeofilC/Turing | Gen.hs | mit | 3,831 | 0 | 14 | 797 | 1,277 | 638 | 639 | 91 | 3 |
module Y2016.M12.D19.Exercise where
import Codec.Compression.GZip
-- below imports available at the 1HaskellADay git repository
import Data.SymbolTable
import Data.SymbolTable.Compiler
{--
Today and this week we're going to be focusing on the SAIPE/poverty data that
we started to look at last week. But we'll be breaking this examination into
daily bite-sized pieces, because I'm all nice like that.
So, last week I asked for a set of ScoreCards from the data set, but score-cards
are based on two sets of indices, one for the score card and one for each datum
of the (indexed) arrayed data set for the score car.
A county makes a perfect index for a score card, as each row is data on the
county. One small problem: a String is not an Ix type.
That's a problem.
Another, semi-related/unrelated problem is that an uniquely identified County
is either: not a string, or if it is, it embeds, then loses, the State in which
it is.
Huh?
("Middlesex County","CT") is not a String, and the original String:
"Middlesex County (CT)" becomes a parsing problem with the embedded State. I
grant you, it's an uninteresting parsing problem for you grizzled ancients,
but a datum, fundmentally, should be atomic. This 'datum' is a cartesian product.
Not good.
Well, if you've been following along, we solved the parsing problem last week...
YAY!
But we still need a set of unique indices for each score cards (1) and (2) we'd
like to retain, and not lose, the State to which this County belongs.
Today's Haskell exercise, then is a rather simple problem.
As we saw last week, following along with this example, the State (NOT the
StateAbbrev, but the USState) is Connecticut, and we 'know' this because of
indirect structural information of the SAIPE data file (which is here:
Y2016/M12/D15/SAIPESNC_15DEC16_11_35_13_00.csv.gz
in this git repository) where the Connecticut-row preceeds the Middlesex County
(CT)-row.
Okay.
Today, even simpler than the 'Which State countains Middlesex County (CT)?'-
question, is this request:
from the SAIPE data file, create a Data.SAIPE.USStates module that enumates
each USState as a value as a USStates data type, i.e.:
Read in SAIPESNC_15DEC16_11_35_13_00.csv.gz and output the module
module Data.SAIPE.USStates where
import Data.Array
data USStates = Alabama | Arizona | ...
deriving (Eq, Ord, Show, Read, Enum, Ix)
Hints: the above imports may help. Determining is a US State and what is not
is up to you, but a hint here is that we have examined how to make this
determination by context in exercises last week. Haskell provides a gzip
reader, provided in the above import.
--}
usStateIndices :: FilePath -> FilePath -> IO ()
usStateIndices gzipSAIPEdata outputfilename = undefined
-- from the gzipped SAIPE data set output the enumerated USStates as the
-- Data.SAIPE.USStates module
| geophf/1HaskellADay | exercises/HAD/Y2016/M12/D19/Exercise.hs | mit | 2,848 | 0 | 8 | 488 | 57 | 35 | 22 | 6 | 1 |
{- |
Module : SemanticalAnalysis.hs
Description : .
Maintainer : Christopher Pockrandt
License : MIT
There is no need for a semantical analysis at this time, therefore the function
`process` equals the identity function
-}
module SemanticalAnalysis (
process -- main function of the module "SemanticalAnalysis"
)
where
-- imports --
import InterfaceDT as IDT
import ErrorHandling as EH
import Data.List
-- functions --
process :: IDT.SynAna2SemAna -> IDT.SemAna2InterCode
process (IDT.ISS input)
| null input = error EH.strEmptyProgram
| duplicatefunctions input = error EH.strDuplicateFunctions
| nomain input = error EH.strMainMissing
| not (all validfollowers input) = error EH.strUnknownNode
| otherwise = IDT.ISI (concatMap splitlambda $ map check input)
-- splits lambdas into own functions
splitlambda :: IDT.AST -> [IDT.AST]
splitlambda func@(funcname, paths) = func : lambdafuncs 1
where
lambdafuncs offset
| offset > maximum (fst (getids func)) = []
| islambda offset = (funcname ++ "!" ++ show offset, (1, [NOP], offset):tail paths):lambdafuncs (offset + 1)
| otherwise = lambdafuncs (offset + 1)
islambda x = any (\(_, lex, _) -> Lambda x `elem` lex) paths
-- checks if there are unknown followers
validfollowers :: IDT.AST -> Bool
validfollowers ast = null subset
where
(ids, followers) = getids ast
subset = nub followers \\ ids
-- gets a tuple of (ids, followers) to check if there are unknown followers
getids :: IDT.AST -> ([Int], [Int])
getids (_, nodes) = unzip (getnodeids nodes)
where
getnodeids [] = []
getnodeids ((id, nodes, follow):xs) = (0, junctionattribute nodes):(id, follow):getnodeids xs
junctionattribute nodes = case last nodes of
(Junction attribute) -> attribute
_ -> 0
-- looking for a main function
nomain :: [IDT.AST] -> Bool
nomain input = "main" `notElem` allfunctions input
-- checking if there are two or more functions with the same name
duplicatefunctions :: [IDT.AST] -> Bool
duplicatefunctions input = length functions > length (nub functions)
where functions = allfunctions input
-- getting a list of every function
allfunctions :: [IDT.AST] -> [String]
allfunctions [] = []
allfunctions ((name, _):xs) = name:allfunctions xs
-- this will return the exact same input if it's valid and will error otherwise
check :: IDT.AST -> IDT.AST
check (name, nodes) = (name, map checknode nodes)
-- this will return the exact same input if it's valid and will error otherwise
checknode :: (Int, [Lexeme], Int) -> (Int, [Lexeme], Int)
checknode (id, lexeme, following)
| following == 0 && not (last lexeme `elem` [Finish, Boom] || isinvalidjunction (last lexeme)) = error EH.strInvalidMovement
| otherwise = (id, map checklexeme lexeme, following)
where
isinvalidjunction (Junction x) = x == 0
isinvalidjunction _ = False
-- this will return the exact same input if it's valid and will error otherwise
checklexeme :: Lexeme -> Lexeme
checklexeme (Junction 0) = error EH.strInvalidMovement
checklexeme (Push "") = error EH.strInvalidVarName
checklexeme (Pop "") = error EH.strInvalidVarName
checklexeme lexeme = lexeme
| SWP-Ubau-SoSe2014-Haskell/SWPSoSe14 | src/RailCompiler/SemanticalAnalysis.hs | mit | 3,244 | 4 | 16 | 661 | 926 | 492 | 434 | 51 | 3 |
--Project Euler Problem 1
main :: IO()
main = do
y <- return (sum[x | x <- [1..999], mod x 3 == 0 || mod x 5 == 0])
putStrLn("Answer to problem 1 = " ++ show(y))
| calewis/SmallProjectsAndDev | project_euler/haskell/problem_1.hs | mit | 170 | 0 | 16 | 45 | 94 | 46 | 48 | 4 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Baum.ZweiDrei.Type where
import Autolib.ToDoc
import Autolib.Reader
import Data.Typeable
data Key a = This a | Infinity
deriving ( Typeable, Eq, Ord )
instance Functor Key where
fmap f (This x ) = This ( f x )
fmap f Infinity = Infinity
$(derives [makeReader, makeToDoc] [''Key])
-- | zweites element ist jeweils der schlΓΌssel,
-- schlΓΌssel sind aufsteigend geordnet (je Knoten)
-- in baum stehen jeweils alle knoten,
-- die echt kleiner als schlΓΌssel sind.
-- der letzte Eintrag jeder liste hat key == Infinity
data Baum a = Null
| Baum [ ( Baum a, Key a ) ]
deriving ( Eq, Ord, Typeable )
instance Functor Baum where
fmap f Null = Null
fmap f ( Baum bks ) = Baum $ do
( b, k ) <- bks
return ( fmap f b, fmap f k )
isNull :: Baum a -> Bool
isNull Null = True
isNull _ = False
$(derives [makeReader, makeToDoc] [''Baum])
instance ToDoc a => Show (Baum a) where show = render . toDoc
-- local variables:
-- mode: haskell
-- end;
| marcellussiegburg/autotool | collection/src/Baum/ZweiDrei/Type.hs | gpl-2.0 | 1,080 | 0 | 11 | 293 | 322 | 173 | 149 | 24 | 1 |
module Brainfuck.Generator.JVM (tokensToJVMCode, generateJVMTemplate) where
import Control.Monad.State
import Brainfuck.Tokens
type LabelStack = [Int]
pop :: State LabelStack String
pop = state $ \(x:xs) -> (show x, xs)
push :: State LabelStack String
push = state $ inc
where
inc (x:xs) = (show (x + 1), (x + 1):x:xs)
inc [] = (show 0, [0])
evalStack :: State LabelStack [String] -> [String]
evalStack stack = (evalState stack) []
tokensToJVMCode :: [BFToken] -> String
tokensToJVMCode = unlines . evalStack . (mapM tokenToCode)
tokenToCode :: BFToken -> State LabelStack String
tokenToCode (BFMove c) = return $ unlines
[ " ; " ++ tokenComment
, " iinc 1 " ++ (show c)
]
where
tokenComment = replicate (abs c) (if c > 0 then '>' else '<')
tokenToCode (BFInc c) = return $ unlines
[ " ; " ++ tokenComment
, " aload_2"
, " iload_1"
, " dup2"
, " iaload"
, " bipush " ++ (show c)
, " iadd"
, " iastore "
]
where
tokenComment = replicate (abs c) (if c > 0 then '+' else '-')
tokenToCode BFOut = return $ unlines
[ " ; ."
, " getstatic java/lang/System/out Ljava/io/PrintStream;"
, " aload_2"
, " iload_1"
, " iaload"
, " i2c"
, " invokevirtual java/io/PrintStream/print(C)V"
]
tokenToCode BFIn = return $ unlines
[ " ; ,"
, " aload_2"
, " iload_1"
, " getstatic java/lang/System/in Ljava/io/InputStream;"
, " invokevirtual java/io/InputStream/read()I"
, " iastore "
]
tokenToCode BFLoopStart = do
current <- push
return $ unlines
[ " ; ["
, "loop" ++ current ++ "Start:"
, " aload_2"
, " iload_1"
, " iaload"
, " ifeq loop" ++ current ++ "End"
]
tokenToCode BFLoopEnd = do
current <- pop
return $ unlines
[ " ; ]"
, " goto loop" ++ current ++ "Start"
, "loop" ++ current ++ "End:"
]
generateJVMTemplate :: String -> String -> String
generateJVMTemplate name body = unlines
[ ".class public " ++ name
, ".super java/lang/Object"
, ""
, ".method public <init>()V"
, " aload_0"
, " invokenonvirtual java/lang/Object/<init>()V"
, " return"
, ".end method"
, ""
, ".method public static main([Ljava/lang/String;)V"
, " .limit stack 10"
, " .limit locals 3"
, ""
, " ; Pointer"
, " iconst_0"
, " istore_1"
, " "
, " ; Array"
, " bipush 100"
, " newarray int "
, " astore_2"
, ""
, body
, " return "
, ".end method"
]
| Nullreff/BF-JVM | src/Brainfuck/Generator/JVM.hs | gpl-3.0 | 2,723 | 0 | 11 | 920 | 697 | 388 | 309 | 86 | 3 |
-- a brutally simple command-line utility to render a Tidal pattern as a WebDirt score
-- usage: renderTidal [cps] [number-of-cycles-to-render]
-- pattern is taken from first line of stdin
-- rendered output goes to stdout
-- if anything is wrong or missing, it will fail silently and output the message "error"
module Main where
import System.Environment
import TidalHint
import WebDirt
main = do
[c,n] <- getArgs
p <- getContents
p' <- hintParamPattern p
case p' of Left _ -> putStrLn "error"
Right p'' -> putStrLn (show (render p'' (read c) (read n)))
| Moskau/estuary | server/renderTidal.hs | gpl-3.0 | 579 | 0 | 16 | 116 | 114 | 59 | 55 | 10 | 2 |
module Main ( main ) where
import Test.Framework
import Properties
import StoreTest
main :: IO ()
main = defaultMain
[ testGroup "properties" testProperties
, storeTests
]
| waldheinz/ads | src/tests/Main.hs | gpl-3.0 | 197 | 0 | 7 | 51 | 49 | 28 | 21 | 8 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.CloudHSM.CreateHapg
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Creates a high-availability partition group. A high-availability partition
-- group is a group of partitions that spans multiple physical HSMs.
--
-- <http://docs.aws.amazon.com/cloudhsm/latest/dg/API_CreateHapg.html>
module Network.AWS.CloudHSM.CreateHapg
(
-- * Request
CreateHapg
-- ** Request constructor
, createHapg
-- ** Request lenses
, chLabel
-- * Response
, CreateHapgResponse
-- ** Response constructor
, createHapgResponse
-- ** Response lenses
, chrHapgArn
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.CloudHSM.Types
import qualified GHC.Exts
newtype CreateHapg = CreateHapg
{ _chLabel :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'CreateHapg' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'chLabel' @::@ 'Text'
--
createHapg :: Text -- ^ 'chLabel'
-> CreateHapg
createHapg p1 = CreateHapg
{ _chLabel = p1
}
-- | The label of the new high-availability partition group.
chLabel :: Lens' CreateHapg Text
chLabel = lens _chLabel (\s a -> s { _chLabel = a })
newtype CreateHapgResponse = CreateHapgResponse
{ _chrHapgArn :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'CreateHapgResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'chrHapgArn' @::@ 'Maybe' 'Text'
--
createHapgResponse :: CreateHapgResponse
createHapgResponse = CreateHapgResponse
{ _chrHapgArn = Nothing
}
-- | The ARN of the high-availability partition group.
chrHapgArn :: Lens' CreateHapgResponse (Maybe Text)
chrHapgArn = lens _chrHapgArn (\s a -> s { _chrHapgArn = a })
instance ToPath CreateHapg where
toPath = const "/"
instance ToQuery CreateHapg where
toQuery = const mempty
instance ToHeaders CreateHapg
instance ToJSON CreateHapg where
toJSON CreateHapg{..} = object
[ "Label" .= _chLabel
]
instance AWSRequest CreateHapg where
type Sv CreateHapg = CloudHSM
type Rs CreateHapg = CreateHapgResponse
request = post "CreateHapg"
response = jsonResponse
instance FromJSON CreateHapgResponse where
parseJSON = withObject "CreateHapgResponse" $ \o -> CreateHapgResponse
<$> o .:? "HapgArn"
| dysinger/amazonka | amazonka-cloudhsm/gen/Network/AWS/CloudHSM/CreateHapg.hs | mpl-2.0 | 3,334 | 0 | 9 | 757 | 447 | 271 | 176 | 55 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.GlobalPublicDelegatedPrefixes.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a global PublicDelegatedPrefix in the specified project using
-- the parameters that are included in the request.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.globalPublicDelegatedPrefixes.insert@.
module Network.Google.Resource.Compute.GlobalPublicDelegatedPrefixes.Insert
(
-- * REST Resource
GlobalPublicDelegatedPrefixesInsertResource
-- * Creating a Request
, globalPublicDelegatedPrefixesInsert
, GlobalPublicDelegatedPrefixesInsert
-- * Request Lenses
, gpdpiRequestId
, gpdpiProject
, gpdpiPayload
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.globalPublicDelegatedPrefixes.insert@ method which the
-- 'GlobalPublicDelegatedPrefixesInsert' request conforms to.
type GlobalPublicDelegatedPrefixesInsertResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"publicDelegatedPrefixes" :>
QueryParam "requestId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] PublicDelegatedPrefix :>
Post '[JSON] Operation
-- | Creates a global PublicDelegatedPrefix in the specified project using
-- the parameters that are included in the request.
--
-- /See:/ 'globalPublicDelegatedPrefixesInsert' smart constructor.
data GlobalPublicDelegatedPrefixesInsert =
GlobalPublicDelegatedPrefixesInsert'
{ _gpdpiRequestId :: !(Maybe Text)
, _gpdpiProject :: !Text
, _gpdpiPayload :: !PublicDelegatedPrefix
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'GlobalPublicDelegatedPrefixesInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gpdpiRequestId'
--
-- * 'gpdpiProject'
--
-- * 'gpdpiPayload'
globalPublicDelegatedPrefixesInsert
:: Text -- ^ 'gpdpiProject'
-> PublicDelegatedPrefix -- ^ 'gpdpiPayload'
-> GlobalPublicDelegatedPrefixesInsert
globalPublicDelegatedPrefixesInsert pGpdpiProject_ pGpdpiPayload_ =
GlobalPublicDelegatedPrefixesInsert'
{ _gpdpiRequestId = Nothing
, _gpdpiProject = pGpdpiProject_
, _gpdpiPayload = pGpdpiPayload_
}
-- | An optional request ID to identify requests. Specify a unique request ID
-- so that if you must retry your request, the server will know to ignore
-- the request if it has already been completed. For example, consider a
-- situation where you make an initial request and the request times out.
-- If you make the request again with the same request ID, the server can
-- check if original operation with the same request ID was received, and
-- if so, will ignore the second request. This prevents clients from
-- accidentally creating duplicate commitments. The request ID must be a
-- valid UUID with the exception that zero UUID is not supported
-- (00000000-0000-0000-0000-000000000000).
gpdpiRequestId :: Lens' GlobalPublicDelegatedPrefixesInsert (Maybe Text)
gpdpiRequestId
= lens _gpdpiRequestId
(\ s a -> s{_gpdpiRequestId = a})
-- | Project ID for this request.
gpdpiProject :: Lens' GlobalPublicDelegatedPrefixesInsert Text
gpdpiProject
= lens _gpdpiProject (\ s a -> s{_gpdpiProject = a})
-- | Multipart request metadata.
gpdpiPayload :: Lens' GlobalPublicDelegatedPrefixesInsert PublicDelegatedPrefix
gpdpiPayload
= lens _gpdpiPayload (\ s a -> s{_gpdpiPayload = a})
instance GoogleRequest
GlobalPublicDelegatedPrefixesInsert
where
type Rs GlobalPublicDelegatedPrefixesInsert =
Operation
type Scopes GlobalPublicDelegatedPrefixesInsert =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient
GlobalPublicDelegatedPrefixesInsert'{..}
= go _gpdpiProject _gpdpiRequestId (Just AltJSON)
_gpdpiPayload
computeService
where go
= buildClient
(Proxy ::
Proxy GlobalPublicDelegatedPrefixesInsertResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/GlobalPublicDelegatedPrefixes/Insert.hs | mpl-2.0 | 5,080 | 0 | 16 | 1,078 | 484 | 292 | 192 | 81 | 1 |
{-# LANGUAGE PatternSynonyms #-}
pattern myLongLeftVariableName `MyLongInfixPatternMatcher` myLongRightVariableName =
[myLongLeftVariableName, myLongRightVariableName]
| lspitzner/brittany | data/Test245.hs | agpl-3.0 | 170 | 0 | 6 | 13 | 22 | 13 | 9 | -1 | -1 |
module Foreign.MathJax where
typeset :: String -> IO ()
typeset _ = return ()
| theam/haskell-do | src/ghc-specific/Foreign/MathJax.hs | apache-2.0 | 80 | 0 | 7 | 16 | 33 | 17 | 16 | 3 | 1 |
--(*) Duplicate the elements of a list.
--
--Example:
--
-- * (dupli '(a b c c d))
--(A A B B C C C C D D)
--Example in Haskell:
--
-- > dupli [1, 2, 3]
--[1,1,2,2,3,3]
dupli :: [a] -> [a]
dupli = foldr copy []
where
copy x acc = x: x: acc
dupli' xs = xs >>= (\x -> [x, x])
| tiann/haskell-learning | haskell99/p14/main.hs | apache-2.0 | 282 | 0 | 8 | 74 | 84 | 50 | 34 | 4 | 1 |
module Application.Hoodle.Database.Command where
import Application.Hoodle.Database.ProgType
import Application.Hoodle.Database.Job
commandLineProcess :: Hoodle_db -> IO ()
commandLineProcess Test = do
putStrLn "test called"
startJob
| wavewave/hoodle-db | lib/Application/Hoodle/Database/Command.hs | bsd-2-clause | 241 | 0 | 7 | 28 | 53 | 30 | 23 | 7 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QUndoCommand_h.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:30
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QUndoCommand_h (
Qqid_h(..)
,QmergeWith_h(..)
,Qredo_h(..)
,Qundo_h(..)
) where
import Qtc.Enums.Base
import Qtc.Classes.Base
import Qtc.Classes.Qccs_h
import Qtc.Classes.Core_h
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui_h
import Qtc.ClassTypes.Gui
import Foreign.Marshal.Array
instance QunSetUserMethod (QUndoCommand ()) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QUndoCommand_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
foreign import ccall "qtc_QUndoCommand_unSetUserMethod" qtc_QUndoCommand_unSetUserMethod :: Ptr (TQUndoCommand a) -> CInt -> CInt -> IO (CBool)
instance QunSetUserMethod (QUndoCommandSc a) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QUndoCommand_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
instance QunSetUserMethodVariant (QUndoCommand ()) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QUndoCommand_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariant (QUndoCommandSc a) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QUndoCommand_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariantList (QUndoCommand ()) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QUndoCommand_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QunSetUserMethodVariantList (QUndoCommandSc a) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QUndoCommand_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QsetUserMethod (QUndoCommand ()) (QUndoCommand x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QUndoCommand setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QUndoCommand_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QUndoCommand_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQUndoCommand x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QUndoCommand_setUserMethod" qtc_QUndoCommand_setUserMethod :: Ptr (TQUndoCommand a) -> CInt -> Ptr (Ptr (TQUndoCommand x0) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethod_QUndoCommand :: (Ptr (TQUndoCommand x0) -> IO ()) -> IO (FunPtr (Ptr (TQUndoCommand x0) -> IO ()))
foreign import ccall "wrapper" wrapSetUserMethod_QUndoCommand_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QUndoCommandSc a) (QUndoCommand x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QUndoCommand setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QUndoCommand_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QUndoCommand_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQUndoCommand x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QsetUserMethod (QUndoCommand ()) (QUndoCommand x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QUndoCommand setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QUndoCommand_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QUndoCommand_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQUndoCommand x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QUndoCommand_setUserMethodVariant" qtc_QUndoCommand_setUserMethodVariant :: Ptr (TQUndoCommand a) -> CInt -> Ptr (Ptr (TQUndoCommand x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethodVariant_QUndoCommand :: (Ptr (TQUndoCommand x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> IO (FunPtr (Ptr (TQUndoCommand x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))))
foreign import ccall "wrapper" wrapSetUserMethodVariant_QUndoCommand_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QUndoCommandSc a) (QUndoCommand x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QUndoCommand setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QUndoCommand_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QUndoCommand_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQUndoCommand x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QunSetHandler (QUndoCommand ()) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QUndoCommand_unSetHandler cobj_qobj cstr_evid
foreign import ccall "qtc_QUndoCommand_unSetHandler" qtc_QUndoCommand_unSetHandler :: Ptr (TQUndoCommand a) -> CWString -> IO (CBool)
instance QunSetHandler (QUndoCommandSc a) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QUndoCommand_unSetHandler cobj_qobj cstr_evid
instance QsetHandler (QUndoCommand ()) (QUndoCommand x0 -> IO (Int)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QUndoCommand1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QUndoCommand1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QUndoCommand_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQUndoCommand x0) -> IO (CInt)
setHandlerWrapper x0
= do x0obj <- objectFromPtr_nf x0
let rv =
if (objectIsNull x0obj)
then return 0
else _handler x0obj
rvf <- rv
return (toCInt rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QUndoCommand_setHandler1" qtc_QUndoCommand_setHandler1 :: Ptr (TQUndoCommand a) -> CWString -> Ptr (Ptr (TQUndoCommand x0) -> IO (CInt)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QUndoCommand1 :: (Ptr (TQUndoCommand x0) -> IO (CInt)) -> IO (FunPtr (Ptr (TQUndoCommand x0) -> IO (CInt)))
foreign import ccall "wrapper" wrapSetHandler_QUndoCommand1_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QUndoCommandSc a) (QUndoCommand x0 -> IO (Int)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QUndoCommand1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QUndoCommand1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QUndoCommand_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQUndoCommand x0) -> IO (CInt)
setHandlerWrapper x0
= do x0obj <- objectFromPtr_nf x0
let rv =
if (objectIsNull x0obj)
then return 0
else _handler x0obj
rvf <- rv
return (toCInt rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
class Qqid_h x0 x1 where
qid_h :: x0 -> x1 -> IO (Int)
instance Qqid_h (QUndoCommand ()) (()) where
qid_h x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoCommand_id cobj_x0
foreign import ccall "qtc_QUndoCommand_id" qtc_QUndoCommand_id :: Ptr (TQUndoCommand a) -> IO CInt
instance Qqid_h (QUndoCommandSc a) (()) where
qid_h x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoCommand_id cobj_x0
instance QsetHandler (QUndoCommand ()) (QUndoCommand x0 -> QUndoCommand t1 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QUndoCommand2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QUndoCommand2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QUndoCommand_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQUndoCommand x0) -> Ptr (TQUndoCommand t1) -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QUndoCommand_setHandler2" qtc_QUndoCommand_setHandler2 :: Ptr (TQUndoCommand a) -> CWString -> Ptr (Ptr (TQUndoCommand x0) -> Ptr (TQUndoCommand t1) -> IO (CBool)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QUndoCommand2 :: (Ptr (TQUndoCommand x0) -> Ptr (TQUndoCommand t1) -> IO (CBool)) -> IO (FunPtr (Ptr (TQUndoCommand x0) -> Ptr (TQUndoCommand t1) -> IO (CBool)))
foreign import ccall "wrapper" wrapSetHandler_QUndoCommand2_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QUndoCommandSc a) (QUndoCommand x0 -> QUndoCommand t1 -> IO (Bool)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QUndoCommand2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QUndoCommand2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QUndoCommand_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQUndoCommand x0) -> Ptr (TQUndoCommand t1) -> IO (CBool)
setHandlerWrapper x0 x1
= do x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
let rv =
if (objectIsNull x0obj)
then return False
else _handler x0obj x1obj
rvf <- rv
return (toCBool rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
class QmergeWith_h x0 x1 where
mergeWith_h :: x0 -> x1 -> IO (Bool)
instance QmergeWith_h (QUndoCommand ()) ((QUndoCommand t1)) where
mergeWith_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoCommand_mergeWith cobj_x0 cobj_x1
foreign import ccall "qtc_QUndoCommand_mergeWith" qtc_QUndoCommand_mergeWith :: Ptr (TQUndoCommand a) -> Ptr (TQUndoCommand t1) -> IO CBool
instance QmergeWith_h (QUndoCommandSc a) ((QUndoCommand t1)) where
mergeWith_h x0 (x1)
= withBoolResult $
withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QUndoCommand_mergeWith cobj_x0 cobj_x1
instance QsetHandler (QUndoCommand ()) (QUndoCommand x0 -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QUndoCommand3 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QUndoCommand3_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QUndoCommand_setHandler3 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQUndoCommand x0) -> IO ()
setHandlerWrapper x0
= do x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QUndoCommand_setHandler3" qtc_QUndoCommand_setHandler3 :: Ptr (TQUndoCommand a) -> CWString -> Ptr (Ptr (TQUndoCommand x0) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QUndoCommand3 :: (Ptr (TQUndoCommand x0) -> IO ()) -> IO (FunPtr (Ptr (TQUndoCommand x0) -> IO ()))
foreign import ccall "wrapper" wrapSetHandler_QUndoCommand3_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QUndoCommandSc a) (QUndoCommand x0 -> IO ()) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QUndoCommand3 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QUndoCommand3_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QUndoCommand_setHandler3 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQUndoCommand x0) -> IO ()
setHandlerWrapper x0
= do x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
class Qredo_h x0 x1 where
redo_h :: x0 -> x1 -> IO ()
instance Qredo_h (QUndoCommand ()) (()) where
redo_h x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoCommand_redo cobj_x0
foreign import ccall "qtc_QUndoCommand_redo" qtc_QUndoCommand_redo :: Ptr (TQUndoCommand a) -> IO ()
instance Qredo_h (QUndoCommandSc a) (()) where
redo_h x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoCommand_redo cobj_x0
class Qundo_h x0 x1 where
undo_h :: x0 -> x1 -> IO ()
instance Qundo_h (QUndoCommand ()) (()) where
undo_h x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoCommand_undo cobj_x0
foreign import ccall "qtc_QUndoCommand_undo" qtc_QUndoCommand_undo :: Ptr (TQUndoCommand a) -> IO ()
instance Qundo_h (QUndoCommandSc a) (()) where
undo_h x0 ()
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QUndoCommand_undo cobj_x0
| uduki/hsQt | Qtc/Gui/QUndoCommand_h.hs | bsd-2-clause | 20,034 | 0 | 18 | 4,495 | 6,513 | 3,112 | 3,401 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DeriveDataTypeable #-}
module NLP.Concraft.Polish.DAG.Format.Base
(
-- * Printing
ShowCfg (..)
, ProbType (..)
, showSent
, showData
-- * Parsing
, parseData
, parseSent
) where
import Prelude hiding (Word)
import Data.Monoid (mconcat, mappend)
import qualified Data.Map as M
import Data.List (intersperse, groupBy)
-- import Data.Maybe (listToMaybe)
import Data.String (IsString)
import Data.Data (Data)
import Data.Typeable (Typeable)
import qualified Data.Text as T
import qualified Data.Text.Lazy as L
import qualified Data.Text.Lazy.Builder as L
import Text.Printf (printf)
import Text.Read (readMaybe)
import qualified Data.DAG as DAG
-- import qualified Data.CRF.Chain1.Constrained.DAG.Dataset.Internal as DAG
import qualified NLP.Concraft.DAG.Morphosyntax as X
-- import qualified NLP.Concraft.Polish.DAG2 as C
-- import NLP.Concraft.Polish.DAG2 (AnnoSent(..))
-- import qualified NLP.Concraft.Polish.DAGSeg as C
import NLP.Concraft.Polish.DAGSeg (AnnoSent(..))
import qualified NLP.Concraft.Polish.Morphosyntax as I
import NLP.Concraft.Polish.DAG.Morphosyntax hiding (tag, Tag)
import qualified NLP.Concraft.Polish.DAG.Morphosyntax as PolX
-----------------------------
-- Base
-----------------------------
type Tag = PolX.Interp PolX.Tag
-----------------------------
-- Showing
-----------------------------
-- | Printing configuration.
data ShowCfg = ShowCfg
-- { suppressProbs :: Bool
-- -- ^ Do not show any probabilities
{ probType :: ProbType
-- ^ Which type of probabilities to show (unless suppressed)
, numericDisamb :: Bool
-- ^ Print disamb markers as numerical values instead of probability values
}
-- | Type of probabilities.
data ProbType
= Marginals
-- ^ Marginals of the disambiguation model
| MaxProbs
-- ^ Max probabilities of the disambiguation model
| GuessedMarginals
-- ^ Marginals of the guessing model
deriving (Show, Eq, Ord, Enum, Typeable, Data)
-- Above, deriving Typeable and Data so that it can be easily parsed
-- for the command-line tool.
-- mkProbType :: ProbType -> Disamb.ProbType
-- mkProbType Marginals = Disamb.Marginals
-- mkProbType MaxProbs = Disamb.MaxProbs
-- | Show entire data.
showData :: ShowCfg -> [[AnnoSent]] -> L.Text
showData cfg
= flip L.append "\n"
. L.toLazyText
. mconcat
. intersperse "\n"
. map (buildSents cfg)
-- | Show the given sentence.
showSent :: ShowCfg -> [AnnoSent] -> L.Text
showSent cfg = L.toLazyText . buildSents cfg
buildSents :: ShowCfg -> [AnnoSent] -> L.Builder
buildSents cfg =
finalize . map (buildSent cfg)
where
-- finalize = (`mappend` "\n") . mconcat . intersperse "\n"
finalize = mconcat
buildSent :: ShowCfg -> AnnoSent -> L.Builder
buildSent showCfg AnnoSent{..} = finalize $ do
let dag = guessSent
edgeID <- DAG.dagEdges dag
let tailNode = DAG.begsWith edgeID dag
headNode = DAG.endsWith edgeID dag
X.Seg{..} = DAG.edgeLabel edgeID dag
interpWeight <- map Just (M.toList (X.unWMap tags)) ++
if known word then [] else [Nothing]
return $ case interpWeight of
Just (interp@Interp{..}, weight) -> buildInterp
showCfg tailNode headNode word interp
(case probType showCfg of
Marginals ->
tagWeightIn edgeID interp marginals
MaxProbs ->
tagWeightIn edgeID interp maxProbs
GuessedMarginals ->
weight)
(tagLabelIn False edgeID interp disambs)
-- below, the case when the word is unknown
Nothing ->
let interp = Interp
{ base = "none"
, tag = ign
, commonness = Nothing
, qualifier = Nothing
, metaInfo = Nothing
, eos = False }
in buildInterp showCfg tailNode headNode word interp 0 False
where
finalize = (`mappend` "\n") . mconcat . intersperse "\n"
tagWeightIn = tagLabelIn 0
tagLabelIn def i x anno
= maybe def (tagLabel def x) (DAG.maybeEdgeLabel i anno)
tagLabel def x = maybe def id . M.lookup x
buildInterp
:: ShowCfg
-> DAG.NodeID -- ^ Tail node
-> DAG.NodeID -- ^ Head node
-> Word -- ^ Word
-> Interp PolX.Tag -- ^ A particular morphosyntactic interpretation
-> Double -- ^ Probability to report (e.g., marginal probability)
-> Bool -- Disamb makrer
-> L.Builder
buildInterp ShowCfg{..} tailNode headNode word Interp{..} weight disamb =
mconcat $ intersperse "\t" $
[ buildNode tailNode
, buildNode headNode
, L.fromText $ orth word
, L.fromText $ if known word then base else orth word
, L.fromText tag
, buildMayText commonness
, buildMayText qualifier
, if numericDisamb
then buildDisamb disamb
else buildWeight weight
, buildMayText metaInfo
, if eos then "eos" else ""
, buildMayText (wordInfo word)
] ++
if numericDisamb then [] else [buildDisamb disamb]
where
buildNode (DAG.NodeID i) = L.fromString (show i)
buildWeight = L.fromString . printf "%.4f"
buildDisamb True = if numericDisamb then "1.0000" else "disamb"
buildDisamb False = if numericDisamb then "0.0000" else ""
-- buildDmb = between "\t" "\n" . L.fromString . printf "%.3f"
-- between x y z = x <> z <> y
buildMayText Nothing = ""
buildMayText (Just x) = L.fromText x
-----------------------------
-- Parsing
-----------------------------
-- | Parse the text in the DAG format.
parseData :: L.Text -> [Sent Tag]
parseData =
map parseSent . filter realSent . L.splitOn "\n\n"
where
realSent = not . L.null
-- | Parse sentence in the DAG format.
parseSent :: L.Text -> Sent Tag
parseSent = fromRows . parseRows
data Row = Row
{ tailNode :: Int
, headNode :: Int
, orthForm :: T.Text
, baseForm :: T.Text
, theTag :: PolX.Tag
, commonness :: Maybe T.Text
, qualifier :: Maybe T.Text
, tagProb :: Double
, metaInfo :: Maybe T.Text
, eos :: Bool
, segmInfo :: Maybe T.Text
}
fromRows :: [Row] -> Sent Tag
fromRows =
-- DAG.fromList' I.None . zip (repeat I.None) . getEdges
DAG.mapN (const I.None) . DAG.fromEdgesUnsafe . getEdges
where
getEdges = map mkEdge . groupBy theSameEdge
theSameEdge r1 r2
= tailNode r1 == tailNode r2
&& headNode r1 == headNode r2
mkEdge [] = error "Format.Base.fromRows: empty list"
mkEdge rows@(row0:_) = DAG.Edge
{ DAG.tailNode = DAG.NodeID $ tailNode row0
, DAG.headNode = DAG.NodeID $ headNode row0
, DAG.edLabel = edge }
where
edge = X.Seg
{ word = newWord
, tags = newTags
}
newWord = Word
{ orth = orthForm row0
, known = not $ ign `elem` map theTag rows
, wordInfo = segmInfo row0
}
newTags = X.mkWMap
[ (interp, tagProb)
| Row{..} <- rows
, not $ theTag == ign
, let interp = Interp
{ base = baseForm
, tag = theTag
, commonness = commonness
, qualifier = qualifier
, metaInfo = metaInfo
, eos = eos }
]
parseRows :: L.Text -> [Row]
parseRows = map parseRow . L.splitOn "\n"
parseRow :: L.Text -> Row
parseRow =
doit . L.splitOn "\t"
where
doit (tlNode : hdNode : otForm : bsForm : tag :
comm : qual : prob : meta : eos : segi : _) = Row
{ tailNode = readTyp "tail node" $ L.unpack tlNode
, headNode = readTyp "head node" $ L.unpack hdNode
, orthForm = L.toStrict otForm
, baseForm = L.toStrict bsForm
, theTag = L.toStrict tag
, commonness = nullIfEmpty comm
, qualifier = nullIfEmpty qual
, tagProb = readTyp "probability value" $ L.unpack prob
, metaInfo = nullIfEmpty meta
, eos = case eos of
"eos" -> True
_ -> False
, segmInfo = nullIfEmpty segi
}
-- doit (tlNode : hdNode : otForm : bsForm : tag :
-- comm : qual : prob : meta : eos : _) = Row
-- { tailNode = readTyp "tail node" $ L.unpack tlNode
-- , headNode = readTyp "head node" $ L.unpack hdNode
-- , orthForm = L.toStrict otForm
-- , baseForm = L.toStrict bsForm
-- , theTag = L.toStrict tag
-- , commonness = nullIfEmpty comm
-- , qualifier = nullIfEmpty qual
-- , tagProb = readTyp "probability value" $ L.unpack prob
-- , metaInfo = nullIfEmpty meta
-- , eos = case eos of
-- "eos" -> True
-- _ -> False
-- , segmInfo = Nothing
-- }
doit xs =
error $ unlines
[ "[parseRow] expected 11 columns, got " ++ show (length xs)
, L.unpack (L.intercalate "\t" xs)
]
nullIfEmpty x = case x of
"" -> Nothing
_ -> Just (L.toStrict x)
-----------
-- Utils
-----------
-- -- | An infix synonym for 'mappend'.
-- (<>) :: Monoid m => m -> m -> m
-- (<>) = mappend
-- {-# INLINE (<>) #-}
readTyp :: (Read a) => String -> String -> a
readTyp typ x =
case readMaybe x of
Just y -> y
Nothing -> error $
"unable to parse \"" ++ x ++ "\" to a " ++ typ
-- "Unable to parse <" ++ typ ++ ">" ++
-- " (string=" ++ x ++ ")"
-- | Tag which indicates unknown words.
ign :: IsString a => a
ign = "ign"
{-# INLINE ign #-}
| kawu/concraft-pl | src/NLP/Concraft/Polish/DAG/Format/Base.hs | bsd-2-clause | 9,496 | 0 | 19 | 2,627 | 2,212 | 1,230 | 982 | 201 | 9 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- |
-- Module : Data.CritBit.Set
-- Copyright : (c) Bryan O'Sullivan and others 2013-2014
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC
--
-- A set type that uses crit-bit trees internally.
--
-- For every /n/ key-value pairs stored, a crit-bit tree uses /n/-1
-- internal nodes, for a total of 2/n/-1 internal nodes and leaves.
module Data.CritBit.Set
(
-- * Set type
Set
-- * Operators
, (\\)
-- * Query
, null
, size
, member
, notMember
, lookupLT
, lookupGT
, lookupLE
, lookupGE
, isSubsetOf
, isProperSubsetOf
-- * Construction
, empty
, singleton
, insert
, delete
-- * Combine
, union
, unions
, difference
, intersection
-- * Filter
, filter
, partition
, split
, splitMember
-- * Map
, map
, mapMonotonic
-- * Folds
, foldr
, foldl
-- ** Strict folds
, foldr'
, foldl'
-- * Min\/Max
, findMin
, findMax
, deleteMin
, deleteMax
, deleteFindMin
, deleteFindMax
, maxView
, minView
-- * Conversion
-- ** List
, elems
, toList
, fromList
-- ** Ordered list
, toAscList
, toDescList
, fromAscList
, fromDistinctAscList
) where
import Control.Arrow ((***))
import Data.CritBit.Types.Internal (CritBit(..), Set(..), CritBitKey, Node(..))
import Data.Foldable (Foldable, foldMap)
import Data.Maybe (isJust)
import Data.Monoid (Monoid(..))
import Prelude hiding (null, filter, map, foldl, foldr)
import qualified Data.CritBit.Tree as T
import qualified Data.List as List
instance (Show a) => Show (Set a) where
show s = "fromList " ++ show (toList s)
instance CritBitKey k => Monoid (Set k) where
mempty = empty
mappend = union
mconcat = unions
instance Foldable Set where
foldMap f (Set (CritBit n)) = foldSet f n
foldSet :: (Monoid m) => (a -> m) -> Node a () -> m
foldSet f (Internal l r _ _) = mappend (foldSet f l) (foldSet f r)
foldSet f (Leaf k _) = f k
foldSet _ Empty = mempty
{-# INLINABLE foldSet #-}
-- | Same as 'difference'.
(\\) :: CritBitKey a => Set a -> Set a -> Set a
s \\ p = difference s p
{-# INLINABLE (\\) #-}
-- | /O(1)/. Is the set empty?
--
-- > null (empty) == True
-- > null (singleton "a") == False
null :: Set a -> Bool
null (Set a) = T.null a
-- | /O(1)/. The empty set.
--
-- > empty == fromList []
-- > size empty == 0
empty :: Set a
empty = Set T.empty
{-# INLINABLE empty #-}
-- | /O(1)/. A set with a single element.
--
-- > singleton "a" == fromList ["a"]
singleton :: a -> Set a
singleton a = Set $ T.singleton a ()
{-# INLINE singleton #-}
-- | /O(k)/. Build a set from a list of values.
--
-- > fromList [] == empty
-- > fromList ["a", "b", "a"] == fromList ["a", "b"]
fromList :: (CritBitKey a) => [a] -> Set a
fromList = liftFromList T.fromList
{-# INLINABLE fromList #-}
-- | /O(n)/. An alias of 'toList'.
--
-- Returns the elements of a set in ascending order.
elems :: Set a -> [a]
elems = toList
-- | /O(n)/. Convert the set to a list of values. The list returned
-- will be sorted in lexicographically ascending order.
--
-- > toList (fromList ["b", "a"]) == ["a", "b"]
-- > toList empty == []
toList :: Set a -> [a]
toList = wrapS id T.keys
{-# INLINABLE toList #-}
-- | /O(n)/. The number of elements in the set.
--
-- > size empty == 0
-- > size (singleton "a") == 1
-- > size (fromList ["a", "c", "b"]) == 3
size :: Set a -> Int
size = wrapS id T.size
{-# INLINABLE size #-}
-- | /O(k)/. Is the element in the set?
--
-- > member "a" (fromList ["a", "b"]) == True
-- > member "c" (fromList ["a", "b"]) == False
--
-- See also 'notMember'.
member :: (CritBitKey a) => a -> Set a -> Bool
member a (Set s) = T.member a s
{-# INLINABLE member #-}
-- | /O(k)/. Is the element not in the set?
--
-- > notMember "a" (fromList ["a", "b"]) == False
-- > notMember "c" (fromList ["a", "b"]) == True
--
-- See also 'member'.
notMember :: (CritBitKey a) => a -> Set a -> Bool
notMember a (Set s) = T.notMember a s
{-# INLINABLE notMember #-}
-- | /O(k)/. Find largest element smaller than the given one.
--
-- > lookupLT "b" (fromList ["a", "b"]) == Just "a"
-- > lookupLT "aa" (fromList ["a", "b"]) == Just "a"
-- > lookupLT "a" (fromList ["a", "b"]) == Nothing
lookupLT :: (CritBitKey a) => a -> Set a -> Maybe a
lookupLT = wrapVS (fmap fst) T.lookupLT
{-# INLINABLE lookupLT #-}
-- | /O(k)/. Find smallest element greater than the given one.
--
-- > lookupGT "b" (fromList ["a", "b"]) == Nothing
-- > lookupGT "aa" (fromList ["a", "b"]) == Just "b"
-- > lookupGT "a" (fromList ["a", "b"]) == Just "b"
lookupGT :: (CritBitKey a) => a -> Set a -> Maybe a
lookupGT = wrapVS (fmap fst) T.lookupGT
{-# INLINABLE lookupGT #-}
-- | /O(k)/. Find largest element smaller than or equal to the given one.
--
-- > lookupGE "b" (fromList ["a", "b"]) == Just "b"
-- > lookupGE "aa" (fromList ["a", "b"]) == Just "b"
-- > lookupGE "a" (fromList ["a", "b"]) == Just "a"
-- > lookupGE "" (fromList ["a", "b"]) == Nothing
lookupLE :: (CritBitKey a) => a -> Set a -> Maybe a
lookupLE = wrapVS (fmap fst) T.lookupLE
{-# INLINABLE lookupLE #-}
-- | /O(k)/. Find smallest element greater than or equal to the given one.
--
-- > lookupGE "aa" (fromList ["a", "b"]) == Just "b"
-- > lookupGE "b" (fromList ["a", "b"]) == Just "b"
-- > lookupGE "bb" (fromList ["a", "b"]) == Nothing
lookupGE :: (CritBitKey a) => a -> Set a -> Maybe a
lookupGE = wrapVS (fmap fst) T.lookupGE
{-# INLINABLE lookupGE #-}
-- | /O(n+m)/. Is this a subset?
-- @(s1 `isSubsetOf` s2)@ tells whether @s1@ is a subset of @s2@.
isSubsetOf :: (CritBitKey a) => Set a -> Set a -> Bool
isSubsetOf = wrapSS id T.isSubmapOf
{-# INLINABLE isSubsetOf #-}
-- | /O(n+m)/. Is this a proper subset (ie. a subset but not equal)?
-- @(s1 `isSubsetOf` s2)@ tells whether @s1@ is a proper subset of @s2@.
isProperSubsetOf :: (CritBitKey a) => Set a -> Set a -> Bool
isProperSubsetOf = wrapSS id T.isProperSubmapOf
{-# INLINABLE isProperSubsetOf #-}
-- | /O(k)/. Insert an element in a set.
-- If the set already contains an element equal to the given value,
-- it is replaced with the new value.
insert :: (CritBitKey a) => a -> Set a -> Set a
insert = wrapVS Set (`T.insert` ())
{-# INLINABLE insert #-}
-- | /O(k)/. Delete an element from a set.
delete :: (CritBitKey a) => a -> Set a -> Set a
delete = wrapVS Set T.delete
{-# INLINABLE delete #-}
-- | /O(k)/. The union of two sets, preferring the first set when
-- equal elements are encountered.
union :: (CritBitKey a) => Set a -> Set a -> Set a
union = wrapSS Set T.union
{-# INLINABLE union #-}
-- | The union of a list of sets: (@'unions' == 'foldl' 'union' 'empty'@).
unions :: (CritBitKey a) => [Set a] -> Set a
unions = List.foldl' union empty
{-# INLINABLE unions #-}
-- | /O(k)/. The difference of two sets.
difference :: (CritBitKey a) => Set a -> Set a -> Set a
difference = wrapSS Set T.difference
{-# INLINABLE difference #-}
-- | /O(k)/. The intersection of two sets. Elements of the
-- result come from the first set.
intersection :: (CritBitKey a) => Set a -> Set a -> Set a
intersection = wrapSS Set T.intersection
{-# INLINABLE intersection #-}
-- | /O(n)/. Filter all elements that satisfy the predicate.
--
-- > filter (> "a") (fromList ["a", "b"]) == fromList [("3","b")]
-- > filter (> "x") (fromList ["a", "b"]) == empty
-- > filter (< "a") (fromList ["a", "b"]) == empty
filter :: (a -> Bool) -> Set a -> Set a
filter = wrapVS Set (T.filterWithKey . (const .))
{-# INLINABLE filter #-}
-- | /O(n)/. Partition the set into two sets, one with all elements that satisfy
-- the predicate and one with all elements that don't satisfy the predicate.
-- See also 'split'.
partition :: (CritBitKey a) => (a -> Bool) -> Set a -> (Set a, Set a)
partition = wrapVS (Set *** Set) (T.partitionWithKey . (const .))
{-# INLINABLE partition #-}
-- | /O(k)/. The expression (@'split' x set@) is a pair @(set1,set2)@
-- where @set1@ comprises the elements of @set@ less than @x@ and @set2@
-- comprises the elements of @set@ greater than @x@.
--
-- > split "a" (fromList ["b", "d"]) == (empty, fromList ["b", "d")])
-- > split "b" (fromList ["b", "d"]) == (empty, singleton "d")
-- > split "c" (fromList ["b", "d"]) == (singleton "b", singleton "d")
-- > split "d" (fromList ["b", "d"]) == (singleton "b", empty)
-- > split "e" (fromList ["b", "d"]) == (fromList ["b", "d"], empty)
split :: (CritBitKey a) => a -> Set a -> (Set a, Set a)
split = wrapVS (Set *** Set) T.split
{-# INLINABLE split #-}
-- | /O(k)/. Performs a 'split' but also returns whether the pivot
-- element was found in the original set.
--
-- > splitMember "a" (fromList ["b", "d"]) == (empty, False, fromList ["b", "d"])
-- > splitMember "b" (fromList ["b", "d"]) == (empty, True, singleton "d")
-- > splitMember "c" (fromList ["b", "d"]) == (singleton "b", False, singleton "d")
-- > splitMember "d" (fromList ["b", "d"]) == (singleton "b", True, empty)
-- > splitMember "e" (fromList ["b", "d"]) == (fromList ["b", "d"], False, empty)
splitMember :: (CritBitKey a) => a -> Set a -> (Set a, Bool, Set a)
splitMember = wrapVS pack T.splitLookup
where pack (l, m, r) = (Set l, isJust m, Set r)
{-# INLINABLE splitMember #-}
-- | /O(k)/. @'map' f s@ is the set obtained by applying @f@ to each
-- element of @s@.
--
-- It's worth noting that the size of the result may be smaller if,
-- for some @(x,y)@, @x \/= y && f x == f y@
map :: (CritBitKey a2) => (a1 -> a2) -> Set a1 -> Set a2
map = wrapVS Set T.mapKeys
{-# INLINABLE map #-}
-- | /O(n)/. The @'mapMonotonic' f s == 'map' f s@, but works only when
-- @f@ is monotonic.
-- /The precondition is not checked./
-- Semi-formally, we have:
--
-- > and [x < y ==> f x < f y | x <- ls, y <- ls]
-- > ==> mapMonotonic f s == map f s
-- > where ls = toList s
mapMonotonic :: (CritBitKey a2) => (a1 -> a2) -> Set a1 -> Set a2
mapMonotonic = wrapVS Set T.mapKeysMonotonic
{-# INLINABLE mapMonotonic #-}
-- | /O(n)/. Fold the elements in the set using the given left-associative
-- binary operator, such that @'foldl' f z == 'Prelude.foldl' f z . 'toAscList'@.
--
-- For example,
--
-- > toDescList set = foldl (flip (:)) [] set
foldl :: (a -> b -> a) -> a -> Set b -> a
foldl f = wrapVS id (T.foldlWithKey ((const .) . f))
{-# INLINE foldl #-}
-- | /O(n)/. A strict version of 'foldl'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
foldl' :: (a -> b -> a) -> a -> Set b -> a
foldl' f = wrapVS id (T.foldlWithKey' ((const .) . f))
{-# INLINE foldl' #-}
-- | /O(n)/. Fold the elements in the set using the given right-associative
-- binary operator, such that @'foldr' f z == 'Prelude.foldr' f z . 'toAscList'@.
--
-- For example,
--
-- > toAscList set = foldr (:) [] set
foldr :: (a -> b -> b) -> b -> Set a -> b
foldr f = wrapVS id (T.foldrWithKey (const . f))
{-# INLINE foldr #-}
-- | /O(n)/. A strict version of 'foldr'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
foldr' :: (a -> b -> b) -> b -> Set a -> b
foldr' f = wrapVS id (T.foldrWithKey' (const . f))
{-# INLINE foldr' #-}
-- | /O(k')/. The minimal element of a set.
findMin :: Set a -> a
findMin = wrapS fst T.findMin
{-# INLINE findMin #-}
-- | /O(k)/. The maximal element of a set.
findMax :: Set a -> a
findMax = wrapS fst T.findMax
{-# INLINE findMax #-}
-- | /O(k')/. Delete the minimal element. Returns an empty set if the
-- set is empty.
deleteMin :: Set a -> Set a
deleteMin = wrapS Set T.deleteMin
{-# INLINE deleteMin #-}
-- | /O(k)/. Delete the maximal element. Returns an empty set if the
-- set is empty.
deleteMax :: Set a -> Set a
deleteMax = wrapS Set T.deleteMax
{-# INLINE deleteMax #-}
-- | /O(k')/. Delete and find the minimal element.
--
-- > deleteFindMin set = (findMin set, deleteMin set)
deleteFindMin :: Set a -> (a, Set a)
deleteFindMin = wrapS (fst *** Set) T.deleteFindMin
{-# INLINE deleteFindMin #-}
-- | /O(k)/. Delete and find the maximal element.
--
-- > deleteFindMax set = (findMax set, deleteMax set)
deleteFindMax :: Set a -> (a, Set a)
deleteFindMax = wrapS (fst *** Set) T.deleteFindMax
{-# INLINE deleteFindMax #-}
-- | /O(k')/. Retrieves the minimal key of the set, and the set
-- stripped of that element, or 'Nothing' if passed an empty set.
minView :: Set a -> Maybe (a, Set a)
minView = wrapS (fmap (fst *** Set)) T.minViewWithKey
{-# INLINE minView #-}
-- | /O(k)/. Retrieves the maximal key of the set, and the set
-- stripped of that element, or 'Nothing' if passed an empty set.
maxView :: Set a -> Maybe (a, Set a)
maxView = wrapS (fmap (fst *** Set)) T.maxViewWithKey
{-# INLINE maxView #-}
-- | /O(n)/. Convert the set to an ascending list of elements.
toAscList :: Set a -> [a]
toAscList = toList
-- | /O(n)/. Convert the set to a descending list of elements.
toDescList :: Set a -> [a]
toDescList = reverse . toAscList
-- | /O(n)/. Build a set from an ascending list in linear time.
-- /The precondition (input list is ascending) is not checked./
fromAscList :: (CritBitKey a) => [a] -> Set a
fromAscList = liftFromList T.fromAscList
-- | /O(n)/. Build a set from an ascending list in linear time.
-- /The precondition (input list is ascending) is not checked./
fromDistinctAscList :: (CritBitKey a) => [a] -> Set a
fromDistinctAscList = liftFromList T.fromDistinctAscList
-- | Wraps tree operation to set operation
wrapS :: (r -> q) -> (CritBit a () -> r) -> Set a -> q
wrapS f g (Set s) = f $ g s
{-# INLINE wrapS #-}
-- | Wraps (value, tree) operation to (value, set) operation
wrapVS :: (r -> q) -> (t -> CritBit a () -> r) -> t -> Set a -> q
wrapVS f g a (Set s) = f $ g a s
{-# INLINE wrapVS #-}
-- | Wraps (tree, tree) operation to (set, set) operation
wrapSS :: (r -> q) -> (CritBit a () -> CritBit a () -> r) -> Set a -> Set a -> q
wrapSS f g (Set s1) (Set s2) = f $ g s1 s2
{-# INLINE wrapSS #-}
liftFromList :: ([(a, ())] -> CritBit a ()) -> [a] -> Set a
liftFromList f xs = Set . f . zip xs . repeat $ ()
{-# INLINE liftFromList #-}
| bos/critbit | Data/CritBit/Set.hs | bsd-2-clause | 14,379 | 0 | 11 | 3,131 | 2,878 | 1,619 | 1,259 | 182 | 1 |
module UpdateTest
( main
) where
import Control.Concurrent
import Data.Word
import qualified Data.Set as Set
import qualified Data.Map as Map
import Frenetic.NetCore
import Control.Monad (forever)
import Frenetic.NetCore.Util (poDom)
dlDst a = DlDst $ EthernetAddress (fromInteger a)
switch a = Switch $ fromInteger a
physical a = Physical $ fromInteger a
-- Maybe add an 'else' policy combinator?
-- pol1 `else` pol2 = pol1 <+> (pol2 <%> dom pol1)
-- I'd like to use <%> and change <|> to mean restriction...
infixr 5 <!>
(<!>) pol acts = pol <+> (Not (poDom pol) ==> acts)
pol1 =
((((dlDst 1)
==> [ Forward (physical 1) unmodified ]
<+>
(dlDst 2)
==> [ Forward (physical 2) unmodified ]
<+>
(dlDst 3)
==> [ Forward (physical 3) unmodified ]
<+>
(dlDst 4)
==> [ Forward (physical 4) unmodified ])
<!> [ Forward (physical 5) unmodified ])
<%> (switch 101))
<+>
((((dlDst 5)
==> [ Forward (physical 1) unmodified ]
<+>
(dlDst 6)
==> [ Forward (physical 2) unmodified ]
<+>
(dlDst 7)
==> [ Forward (physical 3) unmodified ]
<+>
(dlDst 8)
==> [ Forward (physical 4) unmodified ])
<!> [ Forward (physical 5) unmodified ])
<%> (switch 102))
<+>
((((dlDst 9)
==> [ Forward (physical 1) unmodified ]
<+>
(dlDst 10)
==> [ Forward (physical 2) unmodified ]
<+>
(dlDst 11)
==> [ Forward (physical 3) unmodified ]
<+>
(dlDst 12)
==> [ Forward (physical 4) unmodified ])
<!> [ Forward (physical 5) unmodified ])
<%> (switch 103))
<+>
((((dlDst 13)
==> [ Forward (physical 1) unmodified ]
<+>
(dlDst 14)
==> [ Forward (physical 2) unmodified ]
<+>
(dlDst 15)
==> [ Forward (physical 3) unmodified ]
<+>
(dlDst 16)
==> [ Forward (physical 4) unmodified ])
<!> [ Forward (physical 5) unmodified ])
<%> (switch 104))
<+>
(((((dlDst 1) <||> (dlDst 2) <||> (dlDst 3) <||> (dlDst 4))
==> [ Forward (physical 1) unmodified ]
<+>
((dlDst 5) <||> (dlDst 6) <||> (dlDst 7) <||> (dlDst 8))
==> [ Forward (physical 2) unmodified ])
<!> [ Forward (physical 3) unmodified ])
<%> (switch 105))
<+>
(((((dlDst 9) <||> (dlDst 10) <||> (dlDst 11) <||> (dlDst 12))
==> [ Forward (physical 1) unmodified ]
<+>
((dlDst 13) <||> (dlDst 14) <||> (dlDst 15) <||> (dlDst 16))
==> [ Forward (physical 2) unmodified ])
<!> [ Forward (physical 3) unmodified ])
<%> (switch 106))
extPorts 101 = [1, 2, 3, 4]
extPorts 102 = [1, 2, 3, 4]
extPorts 103 = [1, 2, 3, 4]
extPorts 104 = [1, 2, 3, 4]
extPorts _ = []
-- pol2 = ((dlDst 2)
-- ==> [ Forward (physical 1) unmodified ]
-- <+>
-- (dlDst 3)
-- ==> [ Forward (physical 2) unmodified ])
-- <%> (switch 1)
main addr = do
polChan <- newChan
writeChan polChan (pol1, extPorts)
consistentController addr polChan
| frenetic-lang/netcore-1.0 | examples/UpdateTest.hs | bsd-3-clause | 3,135 | 0 | 25 | 987 | 1,200 | 632 | 568 | 95 | 1 |
{-# LANGUAGE TypeFamilies, GeneralizedNewtypeDeriving #-}
module Math where
import Data.VectorSpace
-- | un punto nel piano 2d ascissa e ordinata o anche un vettore
newtype Punto = Punto (Float,Float) deriving (Eq,Show, Read, AdditiveGroup)
-- | un angolo
type Angolo = Float
-- campo dei Punto
instance Num Punto where
(+) (Punto (x,y)) (Punto (x1,y1)) = Punto (x+x1,y+y1)
negate (Punto (x,y)) = Punto (negate x,negate y)
(*) = error "Punto Num method undefined used"
abs x = error $ "abs :" ++ show x ++ " Punto Num method undefined used"
signum = error "signum : Punto Num method undefined used"
fromInteger x = error $ "fromInteger " ++ show x ++ ": Punto Num method undefined used"
instance VectorSpace Punto where
type Scalar Punto = Float
t *^ (Punto (x,y)) = Punto (x * t,y * t)
type Ruota = Punto -> Punto
-- rotazione intorno all'origine
ruota :: Angolo -> Ruota
ruota alpha (Punto (x,y))= Punto (cos alpha * x - sin alpha * y, sin alpha * x + cos alpha * y)
-- modulo di un vettore
modulus :: Punto -> Float
modulus (Punto (x,y)) = sqrt (x ^ 2 + y ^ 2)
pointOfOnlyRotation :: (Punto,Angolo) -> (Punto,Angolo) -> Punto
pointOfOnlyRotation (p1,alpha1) (p2,alpha2) = let
dp@(Punto (dpx,dpy)) = p2 - p1
s = modulus (p2 - p1)
x = Punto (s/2 , 0)
y = Punto (0, s / 2 / tan (alpha / 2))
alpha = alpha2 - alpha1
beta = atan2 dpy dpx
in ruota beta (y - x) + p2
| paolino/marionetta | Math.hs | bsd-3-clause | 1,443 | 0 | 14 | 337 | 582 | 315 | 267 | 29 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
module Benchmarks.Mate where
import Data.Generics
import Data.List
data Kind = King | Queen | Rook | Bishop | Knight | Pawn
deriving (Eq, Show, Data, Typeable)
data Colour = Black | White
deriving (Eq, Show, Data, Typeable)
type Piece = (Colour,Kind)
type Square = (Int,Int)
data Board = Board
[(Kind,Square)] -- white
[(Kind,Square)] -- black
deriving (Show, Data, Typeable)
pieceAt :: Board -> Square -> Maybe Piece
pieceAt (Board wkss bkss) sq =
pieceAtWith White (pieceAtWith Black Nothing bkss) wkss
where
pieceAtWith c n [] = n
pieceAtWith c n ((k,s):xs) = if s==sq then Just (c,k) else pieceAtWith c n xs
emptyAtAll :: Board -> (Square->Bool) -> Bool
emptyAtAll (Board wkss bkss) e =
emptyAtAllAnd (emptyAtAllAnd True bkss) wkss
where
emptyAtAllAnd b [] = b
emptyAtAllAnd b ((_,s):xs) = not (e s) && emptyAtAllAnd b xs
rmPieceAt White sq (Board wkss bkss) = Board (rPa sq wkss) bkss
rmPieceAt Black sq (Board wkss bkss) = Board wkss (rPa sq bkss)
rPa sq (ks@(k,s):kss) = if s==sq then kss else ks : rPa sq kss
putPieceAt sq (White,k) (Board wkss bkss) = Board ((k,sq):wkss) bkss
putPieceAt sq (Black,k) (Board wkss bkss) = Board wkss ((k,sq):bkss)
kingSquare :: Colour -> Board -> Square
kingSquare White (Board kss _) = kSq kss
kingSquare Black (Board _ kss) = kSq kss
kSq ((King,s):_) = s
kSq ( _:kss) = kSq kss
opponent Black = White
opponent White = Black
colourOf :: Piece -> Colour
colourOf (c,_) = c
kindOf :: Piece -> Kind
kindOf (_,k) = k
onboard :: Square -> Bool
onboard (p,q) = 1<=p && p<=8 && 1<=q && q<=8
forcesColoured White (Board kss _) = kss
forcesColoured Black (Board _ kss) = kss
emptyBoard = Board [] []
data Move = Move
Square -- to here
(Maybe Piece) -- capturing this
(Maybe Piece) -- gaining promotion to this
data MoveInFull = MoveInFull Piece Square Move
tryMove :: Colour -> (Kind,Square) -> Move -> Board -> Maybe (MoveInFull,Board)
tryMove c ksq@(k,sq) m@(Move sq' mcp mpp) bd =
if not (kingincheck c bd2) then Just (MoveInFull p sq m, bd2)
else Nothing
where
p = (c,k)
bd1 = rmPieceAt c sq bd
p' = maybe p id mpp
bd2 = maybe (putPieceAt sq' p' bd1)
(const (putPieceAt sq' p' (rmPieceAt (opponent c) sq' bd1)))
mcp
moveDetailsFor :: Colour -> Board -> [(MoveInFull,Board)]
moveDetailsFor c bd =
foldr ( \ksq ms ->
foldr (\rm ms' -> maybe id (:) (tryMove c ksq rm bd) ms')
ms
(rawmoves c ksq bd) )
[]
(forcesColoured c bd)
-- NB raw move = might illegally leave the king in check.
rawmoves :: Colour -> (Kind,Square) -> Board -> [Move]
rawmoves c (k,sq) bd = m c sq bd
where
m = case k of
King -> kingmoves
Queen -> queenmoves
Rook -> rookmoves
Bishop -> bishopmoves
Knight -> knightmoves
Pawn -> pawnmoves
bishopmoves :: Colour -> Square -> Board -> [Move]
bishopmoves c sq bd =
( moveLine bd c sq (\(x,y) -> (x-1,y+1)) $
moveLine bd c sq (\(x,y) -> (x+1,y+1)) $
moveLine bd c sq (\(x,y) -> (x-1,y-1)) $
moveLine bd c sq (\(x,y) -> (x+1,y-1)) id
) []
rookmoves :: Colour -> Square -> Board -> [Move]
rookmoves c sq bd =
( moveLine bd c sq (\(x,y) -> (x-1,y)) $
moveLine bd c sq (\(x,y) -> (x+1,y)) $
moveLine bd c sq (\(x,y) -> (x,y-1)) $
moveLine bd c sq (\(x,y) -> (x,y+1)) id
) []
moveLine :: Board -> Colour -> Square -> (Square->Square) -> ([Move]->a) -> [Move] -> a
moveLine bd c sq inc cont = ml sq
where
ml sq ms =
let sq' = inc sq in
if onboard sq' then
case pieceAt bd sq' of
Nothing -> ml sq' (Move sq' Nothing Nothing : ms)
Just p' -> if colourOf p' /= c then
cont (Move sq' (Just p') Nothing : ms)
else cont ms
else cont ms
kingmoves :: Colour -> Square -> Board -> [Move]
kingmoves c (p,q) bd =
sift c bd [] [(p-1,q+1), (p,q+1), (p+1,q+1),
(p-1,q), (p+1,q),
(p-1,q-1), (p,q-1), (p+1,q-1)]
knightmoves :: Colour -> Square -> Board -> [Move]
knightmoves c (p,q) bd =
sift c bd [] [ (p-1,q+2),(p+1,q+2),
(p-2,q+1), (p+2,q+1),
(p-2,q-1), (p+2,q-1),
(p-1,q-2),(p+1,q-2) ]
sift :: Colour -> Board -> [Move] -> [Square] -> [Move]
sift _ _ ms [] = ms
sift c bd ms (sq:sqs) =
if onboard sq then
case pieceAt bd sq of
Nothing -> sift c bd (Move sq Nothing Nothing : ms) sqs
Just p' -> if colourOf p' == c then sift c bd ms sqs
else sift c bd (Move sq (Just p') Nothing : ms) sqs
else sift c bd ms sqs
pawnmoves :: Colour -> Square -> Board -> [Move]
pawnmoves c (p,q) bd = movs ++ caps
where
movs = let on1 = (p,q+fwd)
on2 = (p,q+2*fwd) in
if pieceAt bd on1 == Nothing then
promote on1 Nothing ++
if (q==2 && c==White || q==7 && c==Black) &&
pieceAt bd on2 == Nothing then [Move on2 Nothing Nothing]
else []
else []
caps = concat [ promote sq mcp
| sq <- [(p+1,q+fwd), (p-1,q+fwd)],
mcp@(Just p') <- [pieceAt bd sq], colourOf p'/=c ]
fwd = case c of
White -> 1
Black -> -1
promote sq@(x,y) mcp =
if (c==Black && y==1 || c==White && y==8) then
map (Move sq mcp . Just)
[(c,Queen), (c,Rook), (c,Bishop), (c,Knight)]
else [Move sq mcp Nothing]
queenmoves :: Colour -> Square -> Board -> [Move]
queenmoves c sq bd = bishopmoves c sq bd ++ rookmoves c sq bd
kingincheck :: Colour -> Board -> Bool
kingincheck c bd =
any givesCheck (forcesColoured (opponent c) bd)
where
givesCheck (k,(x,y)) = kthreat k
where
kthreat King =
abs (x-xk) <= 1 && abs (y-yk) <= 1
kthreat Queen =
kthreat Rook || kthreat Bishop
kthreat Rook =
x==xk &&
emptyAtAll bd (\(xe,ye) -> xe==xk && min y yk < ye && ye < max y yk) ||
y==yk &&
emptyAtAll bd (\(xe,ye) -> ye==yk && min x xk < xe && xe < max x xk)
kthreat Bishop =
x+y==xk+yk &&
emptyAtAll bd (\(xe,ye) -> xe+ye==xk+yk && min x xk < xe && xe < max x xk) ||
x-y==xk-yk &&
emptyAtAll bd (\(xe,ye) -> xe-ye==xk-yk && min x xk < xe && xe < max x xk)
kthreat Knight =
abs (x-xk) == 2 && abs (y-yk) == 1 ||
abs (x-xk) == 1 && abs (y-yk) == 2
kthreat Pawn =
abs (x-xk) == 1 &&
case c of
Black -> yk == y+1
White -> yk == y-1
(xk,yk) = kingSquare c bd
checkmate :: Colour -> Board -> Bool
checkmate col b = null (moveDetailsFor col b) && kingincheck col b
-- Board generator
allDiff [] = True
allDiff (x:xs) = x `notElem` xs && allDiff xs
onBoard (p, q) = 1 <= p && p <= 8 && 1 <= q && q <= 8
one p [] = False
one p (x:xs) = if p x then all (not . p) xs else one p xs
kingsDontTouch ws bs =
(bx > succ wx || wx > succ bx || by > succ wy || wy > succ by)
where
(wx, wy) = kSq ws
(bx, by) = kSq bs
validBoard (Board ws bs) =
one ((== King) . fst) ws
&& one ((== King) . fst) bs
&& all onBoard sqs
&& kingsDontTouch ws bs
&& allDiff sqs
where
sqs = map snd (ws ++ bs)
-- Property
infixr 0 -->
False --> _ = True
True --> x = x
prop_checkmate b =
( length ws == 2
&& Pawn `elem` (map fst ws)
&& validBoard b
)
--> not (checkmate Black b)
where
ws = forcesColoured White b
| UoYCS-plasma/LazySmallCheck2012 | suite/performance/Benchmarks/Mate.hs | bsd-3-clause | 7,326 | 111 | 24 | 2,057 | 4,004 | 2,122 | 1,882 | 199 | 7 |
module Data.Hexagon.NeighborsSpec where
import qualified Data.Sequence as Seq
import SpecHelper
spec :: Spec
spec = do
describe "Data.Hexagon.Neighbors" $ do
context "every HexCoordinate has 6 neighbors" $ do
it "CubeCoordinate" $ property $ prop_neighbor_count_cube
it "AxialCoordinate" $ property $ prop_neighbor_count_axial
it "OffsetEvenQ" $ property $ prop_neighbor_count_evenq
it "OffsetOddQ" $ property $ prop_neighbor_count_oddq
it "OffsetEvenR" $ property $ prop_neighbor_count_evenr
it "OffserOddR" $ property $ prop_neighbor_count_oddr
context "every neighbor of a node has a distance of 1 to that node" $ do
it "CubeCoordinate" $ property $ prop_neighbor_dist_cube
it "AxialCoordinate" $ property $ prop_neighbor_dist_axial
it "OffsetEvenQ" $ property $ prop_neighbor_dist_evenq
it "OffsetOddQ" $ property $ prop_neighbor_dist_oddq
it "OffsetEvenR" $ property $ prop_neighbor_dist_evenr
it "OffsetOddR" $ property $ prop_neighbor_dist_oddr
context "neighbor of direction ismorph isNeighbor dir" $ do
it "CubeCoordinate" $ property $ prop_neighbor_dir_cube
it "AxialCoordinate" $ property $ prop_neighbor_dir_axial
it "OffsetEvenQ" $ property $ prop_neighbor_dir_evenq
it "OffsetOddQ" $ property $ prop_neighbor_dir_oddq
it "OffsetEvenR" $ property $ prop_neighbor_dir_evenr
it "OffsetOddR" $ property $ prop_neighbor_dir_oddr
context "every neighbor is within range 1" $ do
it "CubeCoordinate" $ property $ prop_neighbor_range_cube
it "AxialCoordinate" $ property $ prop_neighbor_range_axial
it "OffsetEvenQ" $ property $ prop_neighbor_range_evenq
it "OffsetOddQ" $ property $ prop_neighbor_range_oddq
it "OffsetEvenR" $ property $ prop_neighbor_range_evenr
it "OffsetOddR" $ property $ prop_neighbor_range_oddr
context "lineDraw from c to neighbor has 2 elements, c and neighbor" $ do
it "CubeCoordinate" $ property $ prop_neighbor_lineDraw_cube
it "AxialCoordinate" $ property $ prop_neighbor_lineDraw_axial
it "OffsetEvenQ" $ property $ prop_neighbor_lineDraw_evenq
it "OffsetOddQ" $ property $ prop_neighbor_lineDraw_oddq
it "OffsetEvenR" $ property $ prop_neighbor_lineDraw_evenr
it "OffsetOddR" $ property $ prop_neighbor_lineDraw_oddr
context "a neighbor of a coordinate must not be a diagonal of it" $ do
it "CubeCoordinate" $ property $ prop_neighbor_diagonals_cube
it "AxialCoordinate" $ property $ prop_neighbor_diagonals_axial
it "OffsetEvenQ" $ property $ prop_neighbor_diagonals_evenq
it "OffsetOddQ" $ property $ prop_neighbor_diagonals_oddq
it "OffsetEvenR" $ property $ prop_neighbor_diagonals_evenr
it "OffsetOddR" $ property $ prop_neighbor_diagonals_oddr
prop_neighbor_count :: (HasNeighbors t a, Integral a) => t a -> Bool
prop_neighbor_count c = 6 == (length . directions $ c)
prop_neighbor_dist :: (HexCoordinate t a, HasNeighbors t a, Integral a) =>
t a -> Bool
prop_neighbor_dist c = and . fmap ((==) 1 . distance c) . directions $ c
prop_neighbor_dir :: ( Eq (t a), HexCoordinate t a
, HasNeighbors t a, Integral a ) =>
Direction -> t a -> Bool
prop_neighbor_dir d c = (neighbor d c `isNeighbor` c) == (Just d)
prop_neighbor_range ::
(Eq (t a), HexCoordinate t a, HasNeighbors t a, Integral a) =>
t a -> Bool
prop_neighbor_range c =
let rs1 = range 1 c
in and $ fmap (flip elem rs1) . directions $ c
prop_neighbor_diagonals ::
(Eq (t a), HexCoordinate t a, HasNeighbors t a, Integral a) =>
t a -> Bool
prop_neighbor_diagonals c =
and . fmap (flip notElem (diagonals c)) . directions $ c
prop_neighbor_lineDraw ::
(Eq (t a), HexCoordinate t a, HasNeighbors t a, Integral a) =>
t a -> Bool
prop_neighbor_lineDraw c = and $ fmap (ld c) . directions $ c
where ld a b =
let sq = lineDraw a b
in (Seq.length sq == 2) && (Seq.index sq 0 == a) && (Seq.index sq 1 == b)
prop_neighbor_count_cube :: CubeCoordinate Int -> Bool
prop_neighbor_count_cube = prop_neighbor_count
prop_neighbor_dist_cube :: CubeCoordinate Int -> Bool
prop_neighbor_dist_cube = prop_neighbor_count
prop_neighbor_dir_cube :: Direction -> CubeCoordinate Int -> Bool
prop_neighbor_dir_cube = prop_neighbor_dir
prop_neighbor_range_cube :: CubeCoordinate Int -> Bool
prop_neighbor_range_cube = prop_neighbor_range
prop_neighbor_lineDraw_cube :: CubeCoordinate Int -> Bool
prop_neighbor_lineDraw_cube = prop_neighbor_lineDraw
prop_neighbor_diagonals_cube :: CubeCoordinate Int -> Bool
prop_neighbor_diagonals_cube = prop_neighbor_lineDraw
prop_neighbor_count_axial :: AxialCoordinate Int -> Bool
prop_neighbor_count_axial = prop_neighbor_count
prop_neighbor_dist_axial :: AxialCoordinate Int -> Bool
prop_neighbor_dist_axial = prop_neighbor_count
prop_neighbor_dir_axial :: Direction -> AxialCoordinate Int -> Bool
prop_neighbor_dir_axial = prop_neighbor_dir
prop_neighbor_range_axial :: AxialCoordinate Int -> Bool
prop_neighbor_range_axial = prop_neighbor_range
prop_neighbor_lineDraw_axial :: AxialCoordinate Int -> Bool
prop_neighbor_lineDraw_axial = prop_neighbor_lineDraw
prop_neighbor_diagonals_axial :: AxialCoordinate Int -> Bool
prop_neighbor_diagonals_axial = prop_neighbor_lineDraw
prop_neighbor_count_evenq :: OffsetEvenQ Int -> Bool
prop_neighbor_count_evenq = prop_neighbor_count
prop_neighbor_dist_evenq :: OffsetEvenQ Int -> Bool
prop_neighbor_dist_evenq = prop_neighbor_count
prop_neighbor_dir_evenq :: Direction -> OffsetEvenQ Int -> Bool
prop_neighbor_dir_evenq = prop_neighbor_dir
prop_neighbor_range_evenq :: OffsetEvenQ Int -> Bool
prop_neighbor_range_evenq = prop_neighbor_range
prop_neighbor_lineDraw_evenq :: OffsetEvenQ Int -> Bool
prop_neighbor_lineDraw_evenq = prop_neighbor_lineDraw
prop_neighbor_diagonals_evenq :: OffsetEvenQ Int -> Bool
prop_neighbor_diagonals_evenq = prop_neighbor_lineDraw
prop_neighbor_count_oddq :: OffsetOddQ Int -> Bool
prop_neighbor_count_oddq = prop_neighbor_count
prop_neighbor_dist_oddq :: OffsetOddQ Int -> Bool
prop_neighbor_dist_oddq = prop_neighbor_count
prop_neighbor_dir_oddq :: Direction -> OffsetOddQ Int -> Bool
prop_neighbor_dir_oddq = prop_neighbor_dir
prop_neighbor_range_oddq :: OffsetOddQ Int -> Bool
prop_neighbor_range_oddq = prop_neighbor_range
prop_neighbor_lineDraw_oddq :: OffsetOddQ Int -> Bool
prop_neighbor_lineDraw_oddq = prop_neighbor_lineDraw
prop_neighbor_diagonals_oddq :: OffsetOddQ Int -> Bool
prop_neighbor_diagonals_oddq = prop_neighbor_lineDraw
prop_neighbor_count_evenr :: OffsetEvenR Int -> Bool
prop_neighbor_count_evenr = prop_neighbor_count
prop_neighbor_dist_evenr :: OffsetEvenR Int -> Bool
prop_neighbor_dist_evenr = prop_neighbor_count
prop_neighbor_dir_evenr :: Direction -> OffsetEvenR Int -> Bool
prop_neighbor_dir_evenr = prop_neighbor_dir
prop_neighbor_range_evenr :: OffsetEvenR Int -> Bool
prop_neighbor_range_evenr = prop_neighbor_range
prop_neighbor_lineDraw_evenr :: OffsetEvenR Int -> Bool
prop_neighbor_lineDraw_evenr = prop_neighbor_lineDraw
prop_neighbor_diagonals_evenr :: OffsetEvenR Int -> Bool
prop_neighbor_diagonals_evenr = prop_neighbor_lineDraw
prop_neighbor_count_oddr :: OffsetOddR Int -> Bool
prop_neighbor_count_oddr = prop_neighbor_count
prop_neighbor_dist_oddr :: OffsetOddR Int -> Bool
prop_neighbor_dist_oddr = prop_neighbor_count
prop_neighbor_dir_oddr :: Direction -> OffsetOddR Int -> Bool
prop_neighbor_dir_oddr = prop_neighbor_dir
prop_neighbor_range_oddr :: OffsetOddR Int -> Bool
prop_neighbor_range_oddr = prop_neighbor_range
prop_neighbor_lineDraw_oddr :: OffsetOddR Int -> Bool
prop_neighbor_lineDraw_oddr = prop_neighbor_lineDraw
prop_neighbor_diagonals_oddr :: OffsetOddR Int -> Bool
prop_neighbor_diagonals_oddr = prop_neighbor_lineDraw
main :: IO ()
main = hspec spec
| alios/hexagon | test/Data/Hexagon/NeighborsSpec.hs | bsd-3-clause | 7,917 | 0 | 15 | 1,272 | 1,806 | 884 | 922 | 149 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
-- | Dealing with Cabal.
module Stack.Package
(readPackage
,readPackageBS
,readPackageDir
,readPackageUnresolved
,readPackageUnresolvedBS
,resolvePackage
,getCabalFileName
,Package(..)
,GetPackageFiles(..)
,PackageConfig(..)
,buildLogPath
,PackageException (..)
,resolvePackageDescription
,packageToolDependencies
,packageDependencies
,packageIdentifier)
where
import Control.Exception hiding (try,catch)
import Control.Monad
import Control.Monad.Catch
import Control.Monad.IO.Class
import Control.Monad.Logger (MonadLogger,logWarn)
import Control.Monad.Reader
import qualified Data.ByteString as S
import Data.Data
import Data.Either
import Data.Function
import Data.List
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.Maybe.Extra
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as S
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import Distribution.Compiler
import Distribution.InstalledPackageInfo (PError)
import qualified Distribution.ModuleName as Cabal
import Distribution.ModuleName (ModuleName)
import Distribution.Package hiding (Package,PackageName,packageName,packageVersion,PackageIdentifier)
import Distribution.PackageDescription hiding (FlagName)
import Distribution.PackageDescription.Parse
import Distribution.Simple.Utils
import Distribution.System (OS, Arch, Platform (..))
import Distribution.Version (intersectVersionRanges)
import Path as FL
import Path.Find
import Path.IO
import Prelude hiding (FilePath)
import Stack.Constants
import Stack.Types
import qualified Stack.Types.PackageIdentifier
import System.Directory (getDirectoryContents)
import System.FilePath (splitExtensions)
import qualified System.FilePath as FilePath
import System.IO.Error
-- | All exceptions thrown by the library.
data PackageException
= PackageInvalidCabalFile (Maybe (Path Abs File)) PError
| PackageNoCabalFileFound (Path Abs Dir)
| PackageMultipleCabalFilesFound (Path Abs Dir) [Path Abs File]
| MismatchedCabalName (Path Abs File) PackageName
deriving Typeable
instance Exception PackageException
instance Show PackageException where
show (PackageInvalidCabalFile mfile err) =
"Unable to parse cabal file" ++
(case mfile of
Nothing -> ""
Just file -> ' ' : toFilePath file) ++
": " ++
show err
show (PackageNoCabalFileFound dir) =
"No .cabal file found in directory " ++
toFilePath dir
show (PackageMultipleCabalFilesFound dir files) =
"Multiple .cabal files found in directory " ++
toFilePath dir ++
": " ++
intercalate ", " (map (toFilePath . filename) files)
show (MismatchedCabalName fp name) = concat
[ "cabal file "
, toFilePath fp
, " has a mismatched package name: "
, packageNameString name
]
-- | Some package info.
data Package =
Package {packageName :: !PackageName -- ^ Name of the package.
,packageVersion :: !Version -- ^ Version of the package
,packageFiles :: !GetPackageFiles
,packageDeps :: !(Map PackageName VersionRange) -- ^ Packages that the package depends on.
,packageTools :: ![Dependency] -- ^ A build tool name.
,packageAllDeps :: !(Set PackageName) -- ^ Original dependencies (not sieved).
,packageFlags :: !(Map FlagName Bool) -- ^ Flags used on package.
,packageHasLibrary :: !Bool -- ^ does the package have a buildable library stanza?
,packageTests :: !(Set Text) -- ^ names of test suites
}
deriving (Show,Typeable)
-- | Files that the package depends on, relative to package directory.
-- Argument is the location of the .cabal file
newtype GetPackageFiles = GetPackageFiles
{ getPackageFiles :: forall m. (MonadIO m, MonadLogger m, MonadThrow m, MonadCatch m)
=> Path Abs File
-> m (Set (Path Abs File))
}
instance Show GetPackageFiles where
show _ = "<GetPackageFiles>"
-- | Get the identifier of the package.
packageIdentifier :: Package -> Stack.Types.PackageIdentifier.PackageIdentifier
packageIdentifier pkg =
Stack.Types.PackageIdentifier.PackageIdentifier
(packageName pkg)
(packageVersion pkg)
-- | Package build configuration
data PackageConfig =
PackageConfig {packageConfigEnableTests :: !Bool -- ^ Are tests enabled?
,packageConfigEnableBenchmarks :: !Bool -- ^ Are benchmarks enabled?
,packageConfigFlags :: !(Map FlagName Bool) -- ^ Package config flags.
,packageConfigGhcVersion :: !Version -- ^ GHC version
,packageConfigPlatform :: !Platform -- ^ host platform
}
deriving (Show,Typeable)
-- | Compares the package name.
instance Ord Package where
compare = on compare packageName
-- | Compares the package name.
instance Eq Package where
(==) = on (==) packageName
-- | Read the raw, unresolved package information.
readPackageUnresolved :: (MonadIO m, MonadThrow m)
=> Path Abs File
-> m GenericPackageDescription
readPackageUnresolved cabalfp =
liftIO (S.readFile (FL.toFilePath cabalfp))
>>= readPackageUnresolvedBS (Just cabalfp)
-- | Read the raw, unresolved package information from a ByteString.
readPackageUnresolvedBS :: (MonadThrow m)
=> Maybe (Path Abs File)
-> S.ByteString
-> m GenericPackageDescription
readPackageUnresolvedBS mcabalfp bs =
case parsePackageDescription chars of
ParseFailed per ->
throwM (PackageInvalidCabalFile mcabalfp per)
ParseOk _ gpkg -> return gpkg
where
chars = T.unpack (dropBOM (decodeUtf8With lenientDecode bs))
-- https://github.com/haskell/hackage-server/issues/351
dropBOM t = fromMaybe t $ T.stripPrefix "\xFEFF" t
-- | Reads and exposes the package information
readPackage :: (MonadLogger m, MonadIO m, MonadThrow m, MonadCatch m)
=> PackageConfig
-> Path Abs File
-> m Package
readPackage packageConfig cabalfp =
resolvePackage packageConfig `liftM` readPackageUnresolved cabalfp
-- | Reads and exposes the package information, from a ByteString
readPackageBS :: (MonadThrow m)
=> PackageConfig
-> S.ByteString
-> m Package
readPackageBS packageConfig bs =
resolvePackage packageConfig `liftM` readPackageUnresolvedBS Nothing bs
-- | Convenience wrapper around @readPackage@ that first finds the cabal file
-- in the given directory.
readPackageDir :: (MonadLogger m, MonadIO m, MonadThrow m, MonadCatch m)
=> PackageConfig
-> Path Abs Dir
-> m (Path Abs File, Package)
readPackageDir packageConfig dir = do
cabalfp <- getCabalFileName dir
pkg <- readPackage packageConfig cabalfp
name <- parsePackageNameFromFilePath cabalfp
when (packageName pkg /= name)
$ throwM $ MismatchedCabalName cabalfp name
return (cabalfp, pkg)
-- | Resolve a parsed cabal file into a 'Package'.
resolvePackage :: PackageConfig
-> GenericPackageDescription
-> Package
resolvePackage packageConfig gpkg = Package
{ packageName = name
, packageVersion = fromCabalVersion (pkgVersion pkgId)
, packageDeps = deps
, packageFiles = GetPackageFiles $ \cabalfp -> do
files <- runReaderT (packageDescFiles pkg) cabalfp
return $ S.fromList $ cabalfp : files
, packageTools = packageDescTools pkg
, packageFlags = packageConfigFlags packageConfig
, packageAllDeps = S.fromList (M.keys deps)
, packageHasLibrary = maybe False (buildable . libBuildInfo) (library pkg)
, packageTests = S.fromList $ map (T.pack . fst) $ condTestSuites gpkg -- FIXME need to test if it's buildable
}
where
pkgId = package (packageDescription gpkg)
name = fromCabalPackageName (pkgName pkgId)
pkg = resolvePackageDescription packageConfig gpkg
deps = M.filterWithKey (const . (/= name)) (packageDependencies pkg)
-- | Get all dependencies of the package (buildable targets only).
packageDependencies :: PackageDescription -> Map PackageName VersionRange
packageDependencies =
M.fromListWith intersectVersionRanges .
concatMap (map (\dep -> ((depName dep),depRange dep)) .
targetBuildDepends) .
allBuildInfo'
-- | Get all build tool dependencies of the package (buildable targets only).
packageToolDependencies :: PackageDescription -> Map S.ByteString VersionRange
packageToolDependencies =
M.fromList .
concatMap (map (\dep -> ((packageNameByteString $ depName dep),depRange dep)) .
buildTools) .
allBuildInfo'
-- | Get all dependencies of the package (buildable targets only).
packageDescTools :: PackageDescription -> [Dependency]
packageDescTools = concatMap buildTools . allBuildInfo'
-- | This is a copy-paste from Cabal's @allBuildInfo@ function, but with the
-- @buildable@ test removed. The reason is that (surprise) Cabal is broken,
-- see: https://github.com/haskell/cabal/issues/1725
allBuildInfo' :: PackageDescription -> [BuildInfo]
allBuildInfo' pkg_descr = [ bi | Just lib <- [library pkg_descr]
, let bi = libBuildInfo lib
, True || buildable bi ]
++ [ bi | exe <- executables pkg_descr
, let bi = buildInfo exe
, True || buildable bi ]
++ [ bi | tst <- testSuites pkg_descr
, let bi = testBuildInfo tst
, True || buildable bi
, testEnabled tst ]
++ [ bi | tst <- benchmarks pkg_descr
, let bi = benchmarkBuildInfo tst
, True || buildable bi
, benchmarkEnabled tst ]
-- | Get all files referenced by the package.
packageDescFiles :: (MonadLogger m,MonadIO m,MonadThrow m,MonadReader (Path Abs File) m,MonadCatch m)
=> PackageDescription -> m [Path Abs File]
packageDescFiles pkg =
do libfiles <-
liftM concat
(mapM libraryFiles
(maybe [] return (library pkg)))
exefiles <-
liftM concat
(mapM executableFiles
(executables pkg))
dfiles <-
resolveGlobFiles (map (dataDir pkg FilePath.</>) (dataFiles pkg))
srcfiles <-
resolveGlobFiles (extraSrcFiles pkg)
-- extraTmpFiles purposely not included here, as those are files generated
-- by the build script. Another possible implementation: include them, but
-- don't error out if not present
docfiles <-
resolveGlobFiles (extraDocFiles pkg)
return (concat [libfiles,exefiles,dfiles,srcfiles,docfiles])
-- | Resolve globbing of files (e.g. data files) to absolute paths.
resolveGlobFiles :: (MonadLogger m,MonadIO m,MonadThrow m,MonadReader (Path Abs File) m,MonadCatch m)
=> [String] -> m [Path Abs File]
resolveGlobFiles =
liftM (catMaybes . concat) .
mapM resolve
where
resolve name =
if any (== '*') name
then explode name
else liftM return (resolveFileOrWarn name)
explode name = do
dir <- asks parent
names <-
matchDirFileGlob'
(FL.toFilePath dir)
name
mapM resolveFileOrWarn names
matchDirFileGlob' dir glob =
catch
(liftIO (matchDirFileGlob_ dir glob))
(\(e :: IOException) ->
if isUserError e
then do
$logWarn
("Wildcard does not match any files: " <> T.pack glob <> "\n" <>
"in directory: " <> T.pack dir)
return []
else throwM e)
-- | This is a copy/paste of the Cabal library function, but with
--
-- @ext == ext'@
--
-- Changed to
--
-- @isSuffixOf ext ext'@
--
-- So that this will work:
--
-- @
-- Ξ»> matchDirFileGlob_ "." "test/package-dump/*.txt"
-- ["test/package-dump/ghc-7.8.txt","test/package-dump/ghc-7.10.txt"]
-- @
--
matchDirFileGlob_ :: String -> String -> IO [String]
matchDirFileGlob_ dir filepath = case parseFileGlob filepath of
Nothing -> die $ "invalid file glob '" ++ filepath
++ "'. Wildcards '*' are only allowed in place of the file"
++ " name, not in the directory name or file extension."
++ " If a wildcard is used it must be with an file extension."
Just (NoGlob filepath') -> return [filepath']
Just (FileGlob dir' ext) -> do
files <- getDirectoryContents (dir FilePath.</> dir')
case [ dir' FilePath.</> file
| file <- files
, let (name, ext') = splitExtensions file
, not (null name) && isSuffixOf ext ext' ] of
[] -> die $ "filepath wildcard '" ++ filepath
++ "' does not match any files."
matches -> return matches
-- | Get all files referenced by the executable.
executableFiles :: (MonadLogger m,MonadIO m,MonadThrow m,MonadReader (Path Abs File) m)
=> Executable -> m [Path Abs File]
executableFiles exe =
do dirs <- mapMaybeM resolveDirOrWarn (hsSourceDirs build)
dir <- asks parent
exposed <-
resolveFiles
(dirs ++ [dir])
[Right (modulePath exe)]
haskellFileExts
bfiles <- buildFiles dir build
return (concat [bfiles,exposed])
where build = buildInfo exe
-- | Get all files referenced by the library.
libraryFiles :: (MonadLogger m,MonadIO m,MonadThrow m,MonadReader (Path Abs File) m)
=> Library -> m [Path Abs File]
libraryFiles lib =
do dirs <- mapMaybeM resolveDirOrWarn (hsSourceDirs build)
dir <- asks parent
exposed <- resolveFiles
(dirs ++ [dir])
(map Left (exposedModules lib))
haskellFileExts
bfiles <- buildFiles dir build
return (concat [bfiles,exposed])
where build = libBuildInfo lib
-- | Get all files in a build.
buildFiles :: (MonadLogger m,MonadIO m,MonadThrow m,MonadReader (Path Abs File) m)
=> Path Abs Dir -> BuildInfo -> m [Path Abs File]
buildFiles dir build = do
dirs <- mapMaybeM resolveDirOrWarn (hsSourceDirs build)
other <- resolveFiles
(dirs ++ [dir])
(map Left (otherModules build))
haskellFileExts
cSources' <- mapMaybeM resolveFileOrWarn (cSources build)
return (other ++ cSources')
-- | Get all dependencies of a package, including library,
-- executables, tests, benchmarks.
resolvePackageDescription :: PackageConfig
-> GenericPackageDescription
-> PackageDescription
resolvePackageDescription packageConfig (GenericPackageDescription desc defaultFlags mlib exes tests benches) =
desc {library =
fmap (resolveConditions rc updateLibDeps) mlib
,executables =
map (resolveConditions rc updateExeDeps .
snd)
exes
,testSuites =
map (resolveConditions rc updateTestDeps .
snd)
tests
,benchmarks =
map (resolveConditions rc updateBenchmarkDeps .
snd)
benches}
where flags =
M.union (packageConfigFlags packageConfig)
(flagMap defaultFlags)
rc = mkResolveConditions
(packageConfigGhcVersion packageConfig)
(packageConfigPlatform packageConfig)
flags
updateLibDeps lib deps =
lib {libBuildInfo =
((libBuildInfo lib) {targetBuildDepends =
deps})}
updateExeDeps exe deps =
exe {buildInfo =
(buildInfo exe) {targetBuildDepends = deps}}
updateTestDeps test deps =
test {testBuildInfo =
(testBuildInfo test) {targetBuildDepends = deps}
,testEnabled = packageConfigEnableTests packageConfig}
updateBenchmarkDeps benchmark deps =
benchmark {benchmarkBuildInfo =
(benchmarkBuildInfo benchmark) {targetBuildDepends = deps}
,benchmarkEnabled = packageConfigEnableBenchmarks packageConfig}
-- | Make a map from a list of flag specifications.
--
-- What is @flagManual@ for?
flagMap :: [Flag] -> Map FlagName Bool
flagMap = M.fromList . map pair
where pair :: Flag -> (FlagName, Bool)
pair (MkFlag (fromCabalFlagName -> name) _desc def _manual) = (name,def)
data ResolveConditions = ResolveConditions
{ rcFlags :: Map FlagName Bool
, rcGhcVersion :: Version
, rcOS :: OS
, rcArch :: Arch
}
-- | Generic a @ResolveConditions@ using sensible defaults.
mkResolveConditions :: Version -- ^ GHC version
-> Platform -- ^ installation target platform
-> Map FlagName Bool -- ^ enabled flags
-> ResolveConditions
mkResolveConditions ghcVersion (Platform arch os) flags = ResolveConditions
{ rcFlags = flags
, rcGhcVersion = ghcVersion
, rcOS = os
, rcArch = arch
}
-- | Resolve the condition tree for the library.
resolveConditions :: (Monoid target,Show target)
=> ResolveConditions
-> (target -> cs -> target)
-> CondTree ConfVar cs target
-> target
resolveConditions rc addDeps (CondNode lib deps cs) = basic <> children
where basic = addDeps lib deps
children = mconcat (map apply cs)
where apply (cond,node,mcs) =
if (condSatisfied cond)
then resolveConditions rc addDeps node
else maybe mempty (resolveConditions rc addDeps) mcs
condSatisfied c =
case c of
Var v -> varSatisifed v
Lit b -> b
CNot c' ->
not (condSatisfied c')
COr cx cy ->
or [condSatisfied cx,condSatisfied cy]
CAnd cx cy ->
and [condSatisfied cx,condSatisfied cy]
varSatisifed v =
case v of
OS os -> os == rcOS rc
Arch arch -> arch == rcArch rc
Flag flag ->
case M.lookup (fromCabalFlagName flag) (rcFlags rc) of
Just x -> x
Nothing ->
-- NOTE: This should never happen, as all flags
-- which are used must be declared. Defaulting
-- to False
False
Impl flavor range ->
flavor == GHC &&
withinRange (rcGhcVersion rc) range
-- | Get the name of a dependency.
depName :: Dependency -> PackageName
depName = \(Dependency n _) -> fromCabalPackageName n
-- | Get the version range of a dependency.
depRange :: Dependency -> VersionRange
depRange = \(Dependency _ r) -> r
-- | Try to resolve the list of base names in the given directory by
-- looking for unique instances of base names applied with the given
-- extensions.
resolveFiles :: MonadIO m
=> [Path Abs Dir] -- ^ Directories to look in.
-> [Either ModuleName String] -- ^ Base names.
-> [Text] -- ^ Extentions.
-> m [Path Abs File]
resolveFiles dirs names exts =
liftM catMaybes (forM names (liftIO . makeNameCandidates))
where makeNameCandidates name =
fmap (listToMaybe . rights . concat)
(mapM (makeDirCandidates name) dirs)
makeDirCandidates :: Either ModuleName String
-> Path Abs Dir
-> IO [Either ResolveException (Path Abs File)]
makeDirCandidates name dir =
mapM (\ext ->
try (case name of
Left mn ->
resolveFile dir
(Cabal.toFilePath mn ++ "." ++ ext)
Right fp ->
resolveFile dir fp))
(map T.unpack exts)
-- | Get the filename for the cabal file in the given directory.
--
-- If no .cabal file is present, or more than one is present, an exception is
-- thrown via 'throwM'.
getCabalFileName
:: (MonadThrow m, MonadIO m)
=> Path Abs Dir -- ^ package directory
-> m (Path Abs File)
getCabalFileName pkgDir = do
files <- liftIO $ findFiles
pkgDir
(flip hasExtension "cabal" . FL.toFilePath)
(const False)
case files of
[] -> throwM $ PackageNoCabalFileFound pkgDir
[x] -> return x
_:_ -> throwM $ PackageMultipleCabalFilesFound pkgDir files
where hasExtension fp x = FilePath.takeExtensions fp == "." ++ x
-- | Path for the package's build log.
buildLogPath :: (MonadReader env m, HasBuildConfig env, MonadThrow m)
=> Package -> m (Path Abs File)
buildLogPath package' = do
env <- ask
let stack = configProjectWorkDir env
fp <- parseRelFile $ concat
[ packageNameString $ packageName package'
, "-"
, versionString $ packageVersion package'
, ".log"
]
return $ stack </> $(mkRelDir "logs") </> fp
-- | Resolve the file, if it can't be resolved, warn for the user
-- (purely to be helpful).
resolveFileOrWarn :: (MonadThrow m,MonadIO m,MonadLogger m,MonadReader (Path Abs File) m)
=> FilePath.FilePath
-> m (Maybe (Path Abs File))
resolveFileOrWarn y =
do cwd <- getWorkingDir
file <- ask
dir <- asks parent
result <- resolveFileMaybe dir y
case result of
Nothing ->
$logWarn ("Warning: File listed in " <>
T.pack (maybe (FL.toFilePath file) FL.toFilePath (stripDir cwd file)) <>
" file does not exist: " <>
T.pack y)
_ -> return ()
return result
-- | Resolve the directory, if it can't be resolved, warn for the user
-- (purely to be helpful).
resolveDirOrWarn :: (MonadThrow m,MonadIO m,MonadLogger m,MonadReader (Path Abs File) m)
=> FilePath.FilePath
-> m (Maybe (Path Abs Dir))
resolveDirOrWarn y =
do cwd <- getWorkingDir
file <- ask
dir <- asks parent
result <- resolveDirMaybe dir y
case result of
Nothing ->
$logWarn ("Warning: Directory listed in " <>
T.pack (maybe (FL.toFilePath file) FL.toFilePath (stripDir cwd file)) <>
" file does not exist: " <>
T.pack y)
_ -> return ()
return result
| mietek/stack | src/Stack/Package.hs | bsd-3-clause | 23,951 | 0 | 20 | 7,491 | 5,363 | 2,798 | 2,565 | 517 | 10 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverloadedStrings #-}
module Search.SearchIndex
(
SearchIndex, SearchIndexBuilder
, newSearchIndexBuilder, indexTokenDoc
, buildSearchIndex
, search, numDocsPerTerm
, SearchIndexHandle, withSearchIndexFile, readPostingList, joinIndexes
, indexedTokens, handleIndexedTokens
) where
import Control.Monad.Primitive
import Control.Arrow (second)
import Search.Common
import qualified Search.Collection as C
import qualified Search.IndexedSet as I
import qualified Data.Vector.Unboxed as VU
type SearchIndex = I.IndexedSet Token DocId
type SearchIndexBuilder s = I.IndexedSetBuilder s Token DocId
newSearchIndexBuilder :: (PrimMonad m) => C.DocCollection a -> m (SearchIndexBuilder (PrimState m))
newSearchIndexBuilder coll = I.newIndexedSetBuilder (C.dictionarySize coll)
indexTokenDoc :: (PrimMonad m) => SearchIndexBuilder (PrimState m) -> IndexedTokenDoc -> m ()
indexTokenDoc ib (IndexedTokenDoc did doc) = do
let tfs = termFrequencies doc
let toIndex = map (second $ const did) tfs
mapM_ (uncurry $ I.addToIndex ib) toIndex
buildSearchIndex :: (PrimMonad m) => SearchIndexBuilder (PrimState m) -> m SearchIndex
buildSearchIndex = I.buildIndex
search :: SearchIndex -> [Token] -> [DocId]
search _ [] = []
search index ts = foldl1 intersectSorted $ map (I.lookup index) ts
numDocsPerTerm :: SearchIndex -> VU.Vector Int
numDocsPerTerm = I.setSizes
intersectSorted :: (Ord a) => [a] -> [a] -> [a]
intersectSorted _ [] = []
intersectSorted [] _ = []
intersectSorted xs@(x:xs') ys@(y:ys')
| x < y = intersectSorted xs' ys
| x == y = x:intersectSorted xs' ys'
| x > y = intersectSorted xs ys'
| otherwise = undefined
type SearchIndexHandle = I.IndexedSetHandle Token DocId
withSearchIndexFile :: FilePath -> (SearchIndexHandle -> IO a) -> IO a
withSearchIndexFile = I.withIndexedSetFile
readPostingList :: SearchIndexHandle -> Token -> IO (VU.Vector DocId)
readPostingList = I.readSet
joinIndexes :: [FilePath] -> FilePath -> IO ()
joinIndexes = I.joinFiles
indexedTokens :: SearchIndex -> [Token]
indexedTokens = I.enumerateIndexKeys
handleIndexedTokens :: SearchIndexHandle -> [Token]
handleIndexedTokens = I.enumerateIndexHandleKeys
| jdimond/diplomarbeit | lib/Search/SearchIndex.hs | bsd-3-clause | 2,256 | 0 | 13 | 365 | 710 | 378 | 332 | 51 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeFamilies #-}
module River.Source.Concrete.Parser (
parseProgram
, parseProgram'
) where
import Control.Applicative (Alternative(..))
import Control.Monad (void)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Except (ExceptT(..))
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Text.Megaparsec ((<?>))
import Text.Megaparsec (ParseError, Dec, runParser)
import Text.Megaparsec (SourcePos, getPosition)
import Text.Megaparsec (label, try, optional, between, oneOf)
import Text.Megaparsec (eof, spaceChar, letterChar, alphaNumChar, char, char')
import Text.Megaparsec.Expr (Operator(..), makeExprParser)
import qualified Text.Megaparsec.Lexer as Lexer
import Text.Megaparsec.Prim (MonadParsec, Token)
import River.Source.Concrete.Syntax
------------------------------------------------------------------------
parseProgram :: FilePath -> ExceptT (ParseError Char Dec) IO (Program SourcePos)
parseProgram path =
ExceptT $ do
source <- liftIO $ T.readFile path
pure $ runParser pProgram path source
parseProgram' :: FilePath -> String -> Either (ParseError Char Dec) (Program SourcePos)
parseProgram' name source =
runParser pProgram name source
------------------------------------------------------------------------
pProgram :: RiverParser s m => m (Program SourcePos)
pProgram = do
pSpace
pReserved "int"
pos <- getPosition
pReserved "main"
pParens $ pure ()
Program pos <$> pBlock <* eof
------------------------------------------------------------------------
pBlock :: RiverParser s m => m (Block SourcePos)
pBlock = do
Block
<$> getPosition
<*> pBraces pStatements
pStatements :: RiverParser s m => m [Statement SourcePos]
pStatements =
many pStatement
pStatement :: RiverParser s m => m (Statement SourcePos)
pStatement =
(SSimple <$> getPosition <*> pSimple <* pSemi <?> "simple statement") <|>
(SControl <$> getPosition <*> pControl <?> "control statement") <|>
(SBlock <$> getPosition <*> pBlock <?> "block")
pSimple :: RiverParser s m => m (Simple SourcePos)
pSimple =
(pAssignPost <?> "assignment or postfix operation") <|>
(pDeclare <?> "declaration")
pAssignPost :: RiverParser s m => m (Simple SourcePos)
pAssignPost = do
pos <- getPosition
lv <- pLValue
pAssign pos lv <|> pPost pos lv
pAssign :: RiverParser s m => SourcePos -> LValue SourcePos -> m (Simple SourcePos)
pAssign pos lv =
Assign pos lv
<$> pAssignOp
<*> pExpression
pPost :: RiverParser s m => SourcePos -> LValue SourcePos -> m (Simple SourcePos)
pPost pos lv =
Post pos lv
<$> pPostOp
pDeclare :: RiverParser s m => m (Simple SourcePos)
pDeclare =
Declare
<$> getPosition
<*> pType
<*> pIdentifier
<*> optional (pEquals *> pExpression)
pControl :: RiverParser s m => m (Control SourcePos)
pControl =
(pIf <?> "if statement") <|>
(pWhile <?> "while loop") <|>
(pFor <?> "for loop") <|>
(pReturn <?> "return")
pIf :: RiverParser s m => m (Control SourcePos)
pIf =
let
pElse =
pReserved "else" *> pStatement
in
If
<$> (getPosition <* pReserved "if")
<*> pParens pExpression
<*> pStatement
<*> optional pElse
pWhile :: RiverParser s m => m (Control SourcePos)
pWhile =
While
<$> (getPosition <* pReserved "while")
<*> pParens pExpression
<*> pStatement
pFor :: RiverParser s m => m (Control SourcePos)
pFor =
For
<$> (getPosition <* pReserved "for" <* pSymbol "(")
<*> (optional pSimple <* pSemi)
<*> (pExpression <* pSemi)
<*> (optional pSimple <* pSymbol ")")
<*> pStatement
pAssignOp :: RiverParser s m => m AssignOp
pAssignOp =
pReservedOp "=" *> pure AEq <|>
pReservedOp "+=" *> pure AAdd <|>
pReservedOp "-=" *> pure ASub <|>
pReservedOp "*=" *> pure AMul <|>
pReservedOp "/=" *> pure ADiv <|>
pReservedOp "%=" *> pure AMod <|>
pReservedOp "<<=" *> pure AShl <|>
pReservedOp ">>=" *> pure AShr <|>
pReservedOp "&=" *> pure AAnd <|>
pReservedOp "^=" *> pure AXor <|>
pReservedOp "|=" *> pure AOr
pPostOp :: RiverParser s m => m PostOp
pPostOp =
pReservedOp "++" *> pure Inc <|>
pReservedOp "--" *> pure Dec
pReturn :: RiverParser s m => m (Control SourcePos)
pReturn =
Return
<$> getPosition
<*> (pReserved "return" *> pExpression) <* pSemi
------------------------------------------------------------------------
pExpression :: RiverParser s m => m (Expression SourcePos)
pExpression =
try pConditional <|>
pExpression0
pConditional :: RiverParser s m => m (Expression SourcePos)
pConditional =
Conditional
<$> getPosition
<*> pExpression0 <* pReservedOp "?"
<*> pExpression <* pReservedOp ":"
<*> pExpression
pExpression0 :: RiverParser s m => m (Expression SourcePos)
pExpression0 =
makeExprParser pExpression1 opTable
pExpression1 :: RiverParser s m => m (Expression SourcePos)
pExpression1 =
pParens (pExpression) <|>
try pLiteral <|>
pVariable
pLiteral :: RiverParser s m => m (Expression SourcePos)
pLiteral =
Literal
<$> getPosition
<*> (pLiteralInt <|> pLiteralTrue <|> pLiteralFalse)
<?> "literal"
pLiteralInt :: RiverParser s m => m Literal
pLiteralInt =
pLiteralHexZero <|>
pLiteralDec
pLiteralDec :: RiverParser s m => m Literal
pLiteralDec =
LiteralInt <$> pLexeme Lexer.decimal
pLiteralHexZero :: RiverParser s m => m Literal
pLiteralHexZero =
char '0' *> (pLiteralHex <|> pLiteralZero)
pLiteralZero :: RiverParser s m => m Literal
pLiteralZero =
LiteralInt 0 <$ pSpace
pLiteralHex :: RiverParser s m => m Literal
pLiteralHex =
LiteralInt <$> (char' 'x' *> pLexeme Lexer.hexadecimal)
pLiteralTrue :: RiverParser s m => m Literal
pLiteralTrue =
LiteralBool <$> (True <$ pReserved "true") <?> "true"
pLiteralFalse :: RiverParser s m => m Literal
pLiteralFalse =
LiteralBool <$> (False <$ pReserved "false") <?> "false"
pVariable :: RiverParser s m => m (Expression SourcePos)
pVariable =
Variable <$> getPosition <*> pIdentifier <?> "variable"
------------------------------------------------------------------------
-- Operator Table
opTable :: RiverParser s m => [[Operator m (Expression SourcePos)]]
opTable =
[ [ prefix "!" (\p -> Unary p LNot)
, prefix "~" (\p -> Unary p BNot)
, prefix "-" (\p -> Unary p Neg)
]
, [ infixL "*" (\p -> Binary p Mul)
, infixL "/" (\p -> Binary p Div)
, infixL "%" (\p -> Binary p Mod)
]
, [ infixL "+" (\p -> Binary p Add)
, infixL "-" (\p -> Binary p Sub)
]
, [ infixL "<<" (\p -> Binary p Shl)
, infixL ">>" (\p -> Binary p Shr)
]
, [ infixL "<" (\p -> Binary p Lt)
, infixL "<=" (\p -> Binary p Le)
, infixL ">" (\p -> Binary p Gt)
, infixL ">=" (\p -> Binary p Ge)
]
, [ infixL "==" (\p -> Binary p Eq)
, infixL "!=" (\p -> Binary p Ne)
]
, [ infixL "&" (\p -> Binary p BAnd)
]
, [ infixL "^" (\p -> Binary p BXor)
]
, [ infixL "|" (\p -> Binary p BOr)
]
, [ infixL "&&" (\p -> Binary p LAnd)
]
, [ infixL "||" (\p -> Binary p LOr)
]
]
infixL :: RiverParser s m => String -> (SourcePos -> a -> a -> a) -> Operator m a
infixL name fun =
InfixL (fmap fun (getPosition <* pReservedOp name))
prefix :: RiverParser s m => String -> (SourcePos -> a -> a) -> Operator m a
prefix name fun =
Prefix (fmap fun (getPosition <* pReservedOp name))
------------------------------------------------------------------------
pType :: RiverParser s m => m Type
pType =
(Int <$ pReserved "int") <|>
(Bool <$ pReserved "bool") <?> "type"
pLValue :: RiverParser s m => m (LValue SourcePos)
pLValue =
LIdentifier
<$> getPosition
<*> pIdentifier
pIdentifier :: RiverParser s m => m Identifier
pIdentifier =
try . label "identifier" $ do
parsed <- pIdent
if Set.member parsed reservedIdents then
fail $ "keyword '" ++ parsed ++ "' cannot be an identifier"
else
pure . Identifier $ T.pack parsed
pReserved :: RiverParser s m => String -> m ()
pReserved expected =
try . label expected $ do
parsed <- pIdent
if parsed == expected then
pure ()
else
fail $ "expected keyword '" ++ expected ++ "'"
pReservedOp :: RiverParser s m => String -> m ()
pReservedOp expected =
try . label expected $ do
parsed <- pOperator
if parsed == expected then
pure ()
else
fail $ "expected operator " ++ expected
pEquals :: RiverParser s m => m ()
pEquals =
pReservedOp "="
------------------------------------------------------------------------
-- Lexer
type RiverParser s m =
(MonadParsec Dec s m, Token s ~ Char)
reservedIdents :: Set String
reservedIdents =
Set.fromList
[ "struct"
, "typedef"
, "if"
, "else"
, "while"
, "for"
, "continue"
, "break"
, "return"
, "assert"
, "true"
, "false"
, "NULL"
, "alloc"
, "alloc_array"
, "int"
, "bool"
, "void"
, "char"
, "string"
]
pSpace :: RiverParser s m => m ()
pSpace =
let
lineComment =
Lexer.skipLineComment "//"
blockComment =
Lexer.skipBlockComment "/*" "*/"
in
Lexer.space (void spaceChar) lineComment blockComment
pLexeme :: RiverParser s m => m a -> m a
pLexeme =
Lexer.lexeme pSpace
pSymbol :: RiverParser s m => String -> m String
pSymbol =
Lexer.symbol pSpace
pParens :: RiverParser s m => m a -> m a
pParens =
between (pSymbol "(") (pSymbol ")")
pBraces :: RiverParser s m => m a -> m a
pBraces =
between (pSymbol "{") (pSymbol "}")
pSemi :: RiverParser s m => m ()
pSemi =
() <$ pSymbol ";"
pIdent :: RiverParser s m => m String
pIdent =
pLexeme $
(:) <$> pIdentStart <*> many pIdentLetter
pIdentStart :: RiverParser s m => m Char
pIdentStart =
letterChar <|> char '_'
pIdentLetter :: RiverParser s m => m Char
pIdentLetter =
alphaNumChar <|> char '_'
pOperator :: RiverParser s m => m String
pOperator =
pLexeme $
(:) <$> pOpStart <*> many pOpLetter
pOpStart :: RiverParser s m => m Char
pOpStart =
oneOf "!~-+*/%<>&^|=?:"
pOpLetter :: RiverParser s m => m Char
pOpLetter =
oneOf "-+<>&|="
| jystic/river | src/River/Source/Concrete/Parser.hs | bsd-3-clause | 10,480 | 0 | 26 | 2,368 | 3,500 | 1,784 | 1,716 | 312 | 2 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE DefaultSignatures #-}
#endif
module Generics.Deriving.Enum (
-- * Generic enum class
GEnum(..)
-- * Default definitions for GEnum
, genumDefault, toEnumDefault, fromEnumDefault
-- * Generic Ix class
, GIx(..)
-- * Default definitions for GIx
, rangeDefault, indexDefault, inRangeDefault
) where
import Generics.Deriving.Base
import Generics.Deriving.Instances ()
import Generics.Deriving.Eq
-----------------------------------------------------------------------------
-- Utility functions for Enum'
-----------------------------------------------------------------------------
infixr 5 |||
-- | Interleave elements from two lists. Similar to (++), but swap left and
-- right arguments on every recursive application.
--
-- From Mark Jones' talk at AFP2008
(|||) :: [a] -> [a] -> [a]
[] ||| ys = ys
(x:xs) ||| ys = x : ys ||| xs
-- | Diagonalization of nested lists. Ensure that some elements from every
-- sublist will be included. Handles infinite sublists.
--
-- From Mark Jones' talk at AFP2008
diag :: [[a]] -> [a]
diag = concat . foldr skew [] . map (map (\x -> [x]))
skew :: [[a]] -> [[a]] -> [[a]]
skew [] ys = ys
skew (x:xs) ys = x : combine (++) xs ys
combine :: (a -> a -> a) -> [a] -> [a] -> [a]
combine _ xs [] = xs
combine _ [] ys = ys
combine f (x:xs) (y:ys) = f x y : combine f xs ys
findIndex :: (a -> Bool) -> [a] -> Maybe Int
findIndex p xs = let l = [ i | (y,i) <- zip xs [(0::Int)..], p y]
in if (null l)
then Nothing
else Just (head l)
--------------------------------------------------------------------------------
-- Generic enum
--------------------------------------------------------------------------------
class Enum' f where
enum' :: [f a]
instance Enum' U1 where
enum' = [U1]
instance (GEnum c) => Enum' (K1 i c) where
enum' = map K1 genum
instance (Enum' f) => Enum' (M1 i c f) where
enum' = map M1 enum'
instance (Enum' f, Enum' g) => Enum' (f :+: g) where
enum' = map L1 enum' ||| map R1 enum'
instance (Enum' f, Enum' g) => Enum' (f :*: g) where
enum' = diag [ [ x :*: y | y <- enum' ] | x <- enum' ]
#if __GLASGOW_HASKELL__ < 701
instance (GEnum a) => GEnum (Maybe a) where
genum = genumDefault
instance (GEnum a) => GEnum [a] where
genum = genumDefault
#else
instance (GEnum a) => GEnum (Maybe a)
instance (GEnum a) => GEnum [a]
#endif
genumDefault :: (Generic a, Enum' (Rep a)) => [a]
genumDefault = map to enum'
toEnumDefault :: (Generic a, Enum' (Rep a)) => Int -> a
toEnumDefault i = let l = enum'
in if (length l > i)
then to (l !! i)
else error "toEnum: invalid index"
fromEnumDefault :: (GEq a, Generic a, Enum' (Rep a))
=> a -> Int
fromEnumDefault x = case findIndex (geq x) (map to enum') of
Nothing -> error "fromEnum: no corresponding index"
Just i -> i
class GEnum a where
genum :: [a]
#if __GLASGOW_HASKELL__ >= 701
default genum :: (Generic a, Enum' (Rep a)) => [a]
genum = genumDefault
#endif
instance GEnum Int where
genum = [0..] ||| (neg 0) where
neg n = (n-1) : neg (n-1)
--------------------------------------------------------------------------------
-- Generic Ix
--------------------------------------------------------------------------------
-- Minimal complete instance: 'range', 'index' and 'inRange'.
class (Ord a) => GIx a where
-- | The list of values in the subrange defined by a bounding pair.
range :: (a,a) -> [a]
-- | The position of a subscript in the subrange.
index :: (a,a) -> a -> Int
-- | Returns 'True' the given subscript lies in the range defined
-- the bounding pair.
inRange :: (a,a) -> a -> Bool
#if __GLASGOW_HASKELL__ >= 701
default range :: (GEq a, Generic a, Enum' (Rep a)) => (a,a) -> [a]
range = rangeDefault
default index :: (GEq a, Generic a, Enum' (Rep a)) => (a,a) -> a -> Int
index = indexDefault
default inRange :: (GEq a, Generic a, Enum' (Rep a)) => (a,a) -> a -> Bool
inRange = inRangeDefault
#endif
rangeDefault :: (GEq a, Generic a, Enum' (Rep a))
=> (a,a) -> [a]
rangeDefault = t (map to enum') where
t l (x,y) =
case (findIndex (geq x) l, findIndex (geq y) l) of
(Nothing, _) -> error "rangeDefault: no corresponding index"
(_, Nothing) -> error "rangeDefault: no corresponding index"
(Just i, Just j) -> take (j-i) (drop i l)
indexDefault :: (GEq a, Generic a, Enum' (Rep a))
=> (a,a) -> a -> Int
indexDefault = t (map to enum') where
t l (x,y) z =
case (findIndex (geq x) l, findIndex (geq y) l) of
(Nothing, _) -> error "indexDefault: no corresponding index"
(_, Nothing) -> error "indexDefault: no corresponding index"
(Just i, Just j) -> case findIndex (geq z) (take (j-i) (drop i l)) of
Nothing -> error "indexDefault: index out of range"
Just k -> k
inRangeDefault :: (GEq a, Generic a, Enum' (Rep a))
=> (a,a) -> a -> Bool
inRangeDefault = t (map to enum') where
t l (x,y) z =
case (findIndex (geq x) l, findIndex (geq y) l) of
(Nothing, _) -> error "indexDefault: no corresponding index"
(_, Nothing) -> error "indexDefault: no corresponding index"
(Just i, Just j) -> maybe False (const True)
(findIndex (geq z) (take (j-i) (drop i l)))
#if __GLASGOW_HASKELL__ < 701
instance (GEq a, GEnum a, GIx a) => GIx (Maybe a) where
range = rangeDefault
index = indexDefault
inRange = inRangeDefault
instance (GEq a, GEnum a, GIx a) => GIx [a] where
range = rangeDefault
index = indexDefault
inRange = inRangeDefault
#else
instance (GEq a, GEnum a, GIx a) => GIx (Maybe a)
instance (GEq a, GEnum a, GIx a) => GIx [a]
#endif
instance GIx Int where
range (m,n) = [m..n]
index (m,_n) i = i - m
inRange (m,n) i = m <= i && i <= n
| ekmett/generic-deriving | src/Generics/Deriving/Enum.hs | bsd-3-clause | 6,456 | 0 | 16 | 1,807 | 2,121 | 1,155 | 966 | 107 | 4 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RebindableSyntax #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
--
-- This module contains the main implementation of the Livermore Unstructured
-- Lagrangian Explicit Shock Hydrodynamics (LULESH) mini-app in Accelerate.
--
-- NOTES:
--
-- Most functions are named similarly to the corresponding function in the
-- reference C implementation.
--
-- I did my best to add comments as I reverse-engineered the C code, but I
-- apologise that it may be difficult to understand.
--
module LULESH where
import Domain
import Type
import Util
import Data.Array.Accelerate as A hiding ( transpose, fromInteger, V3, V4 )
import Data.Array.Accelerate.Linear as L hiding ( Epsilon )
import Data.Array.Accelerate.Control.Lens as L hiding ( _1, _2, _3, _4, _5, _6, _7, _8, _9, at, ix, use )
import Data.Foldable as F
import Data.Functor as F
import Prelude ( fromInteger )
import qualified Prelude as P
-- | Take a single time step of the LULESH algorithm
--
lulesh
:: Acc (Field Mass) -- reference nodal mass
-> Acc (Field Volume) -- reference volume
-> Acc Domain -- current simulation parameters
-> Acc Domain -- updated simulation state
lulesh mN0 v0 domain =
let
(x, dx, e, p, q, v, ss, t, dt)
= unlift domain
(x', dx', e', p', q', v', ss', dtc, dth)
= lagrangeLeapFrog parameters (the dt) x dx e p q v v0 ss v0 mN0
(t', dt')
= timeIncrement parameters t dt dtc dth
in
lift (x', dx', e', p', q', v', ss', t', dt')
-- Lagrange Leapfrog Algorithm
-- ===========================
-- | 'lagrangeLeapFrog' advances the solution from t_n to t_{n+1} over the time
-- increment delta_t. The process of advance the solution is comprised of two
-- major parts:
--
-- 1. Advance variables on the nodal mesh; and
-- 2. Advance the element variables
--
lagrangeLeapFrog
:: Parameters
-> Exp Time
-> Acc (Field Position)
-> Acc (Field Velocity)
-> Acc (Field Energy)
-> Acc (Field Pressure)
-> Acc (Field Viscosity)
-> Acc (Field Volume) -- relative volume
-> Acc (Field Volume) -- reference volume
-> Acc (Field SoundSpeed) -- speed of sound
-> Acc (Field Mass) -- element mass
-> Acc (Field Mass) -- nodal mass
-> ( Acc (Field Position)
, Acc (Field Velocity)
, Acc (Field Energy)
, Acc (Field Pressure)
, Acc (Field Viscosity)
, Acc (Field Volume)
, Acc (Field R)
, Acc (Scalar Time)
, Acc (Scalar Time) )
lagrangeLeapFrog param dt x dx e p q v v0 ss mZ mN =
let
-- Calculate nodal quantities
(x', dx')
= lagrangeNodal param dt x dx p q v v0 ss mZ mN
-- Calculate element quantities
(p', e', q', v', ss', vdov, arealg)
= lagrangeElements param dt x' dx' v v0 q e p mZ
-- Calculate timestep constraints
(dtc, dth)
= calcTimeConstraints param ss' vdov arealg
in
(x', dx', e', p', q', v', ss', dtc, dth)
-- Advance Node Quantities
-- -----------------------
-- | Advance the nodal mesh variables, primarily the velocity and position. The
-- main steps are:
--
-- 1. Calculate the nodal forces: 'calcForceForNodes'
-- 2. Calculate nodal accelerations: 'calcAccelerationForNodes'
-- 3. Apply acceleration boundary conditions ('applyAccelerationBoundaryConditionsForNodes, but called from (2))
-- 4. Integrate nodal accelerations to obtain updated velocities: 'calcVelocityForNodes'
-- 5. Integrate nodal velocities to obtain updated positions: 'calcPositionForNodes'
--
lagrangeNodal
:: Parameters
-> Exp Time
-> Acc (Field Position)
-> Acc (Field Velocity)
-> Acc (Field Pressure)
-> Acc (Field Viscosity)
-> Acc (Field Volume) -- relative volume
-> Acc (Field Volume) -- reference volume
-> Acc (Field SoundSpeed) -- speed of sound
-> Acc (Field Mass) -- element mass
-> Acc (Field Mass) -- nodal mass
-> ( Acc (Field Position)
, Acc (Field Velocity) )
lagrangeNodal param dt x dx p q v v0 ss mZ mN =
let
-- Time of boundary condition evaluation is beginning of step for force
-- and acceleration boundary conditions
f' = calcForceForNodes param x dx p q v v0 ss mZ
ddx' = calcAccelerationForNodes f' mN
dx' = calcVelocityForNodes param dt dx ddx'
x' = calcPositionForNodes dt x dx'
in
(x', dx')
-- | Calculate the three-dimensional force vector F at each mesh node based on
-- the values of mesh variables at time t_n.
--
-- A volume force contribution is calculated within each mesh element. This is
-- then distributed to the surrounding nodes.
--
calcForceForNodes
:: Parameters
-> Acc (Field Position)
-> Acc (Field Velocity)
-> Acc (Field Pressure)
-> Acc (Field Viscosity)
-> Acc (Field Volume) -- relative volume
-> Acc (Field Volume) -- reference volume
-> Acc (Field SoundSpeed) -- sound speed
-> Acc (Field Mass) -- element mass
-> Acc (Field Force)
calcForceForNodes param x dx p q v v0 ss mZ
= distributeToNode (+) 0
$ calcVolumeForceForElems param x dx p q v v0 ss mZ
-- | Calculate the volume force contribute for each hexahedral mesh element. The
-- main steps are:
--
-- 1. Initialise stress terms for each element
-- 2. Integrate the volumetric stress terms for each element
-- 3. Calculate the hourglass control contribution for each element.
--
calcVolumeForceForElems
:: Parameters
-> Acc (Field Position)
-> Acc (Field Velocity)
-> Acc (Field Pressure)
-> Acc (Field Viscosity)
-> Acc (Field Volume)
-> Acc (Field Volume)
-> Acc (Field R)
-> Acc (Field Mass)
-> Acc (Field (Hexahedron Force))
calcVolumeForceForElems param x dx p q v v0 ss mZ =
let
-- sum contributions to total stress tensor
sigma = A.zipWith initStressTermsForElem p q
-- calculate nodal forces from element stresses
(stress, determ) = integrateStressForElems x sigma
-- TODO: check for negative element volume
_volumeError = A.any (<= 0) determ
-- Calculate the hourglass control contribution for each element
hourglass = A.generate (shape v0)
$ \ix -> calcHourglassControlForElem param
(collectToElem x ix)
(collectToElem dx ix)
(v !ix)
(v0!ix)
(ss!ix)
(mZ!ix)
-- Combine the nodal force contributions
combine :: Exp (Hexahedron Force) -> Exp (Hexahedron Force) -> Exp (Hexahedron Force)
combine x y = lift ( x^._0 + y^._0
, x^._1 + y^._1
, x^._2 + y^._2
, x^._3 + y^._3
, x^._4 + y^._4
, x^._5 + y^._5
, x^._6 + y^._6
, x^._7 + y^._7 )
in
A.zipWith combine stress hourglass
-- | Initialize stress terms for each element. Our assumption of an inviscid
-- isotropic stress tensor implies that the three principal stress components
-- are equal, and the shear stresses are zero. Thus, we initialize the diagonal
-- terms of the stress tensor sigma to β(p + q) in each element.
--
initStressTermsForElem
:: Exp Pressure
-> Exp Viscosity
-> Exp Sigma
initStressTermsForElem p q =
let s = -p - q
in lift (V3 s s s)
-- | Integrate the volumetric stress contributions for each element.
--
-- In the reference LULESH code, the forces at each of the corners of the
-- hexahedron defining this element would be distributed to the nodal mesh. This
-- corresponds to a global scatter operation.
--
-- Instead, we just return all the values directly, and the individual
-- contributions to the nodes will be combined in a different step.
--
integrateStressForElems
:: Acc (Field Position)
-> Acc (Field Sigma)
-> ( Acc (Field (Hexahedron Force))
, Acc (Field Volume) )
integrateStressForElems x sigma
= A.unzip
$ A.zipWith integrateStressForElem
(generate (shape sigma) (collectToElem x))
sigma
integrateStressForElem
:: Exp (Hexahedron Position)
-> Exp Sigma
-> Exp (Hexahedron Force, Volume)
integrateStressForElem x sigma =
let
-- Volume calculation involves extra work for numerical consistency
det = calcElemShapeFunctionDerivatives x ^._1
b = calcElemNodeNormals x
f = sumElemStressesToNodeForces b sigma
in
lift (f, det)
-- Calculate the shape function derivative for the element. This is used to
-- compute the velocity gradient of the element.
--
calcElemShapeFunctionDerivatives
:: Exp (Hexahedron Position) -- node coordinates bounding this hexahedron
-> Exp (Hexahedron Force, Volume) -- (shape function derivatives, jacobian determinant (volume))
calcElemShapeFunctionDerivatives p =
let
-- compute diagonal differences
d60 = p^._6 - p^._0
d53 = p^._5 - p^._3
d71 = p^._7 - p^._1
d42 = p^._4 - p^._2
-- compute jacobians
fj_xi = 0.125 * ( d60 + d53 - d71 - d42 )
fj_eta = 0.125 * ( d60 - d53 + d71 - d42 )
fj_zeta = 0.125 * ( d60 + d53 + d71 + d42 )
-- calculate cofactors (= determinant??)
cj_xi = cross fj_eta fj_zeta
cj_eta = cross fj_zeta fj_xi
cj_zeta = cross fj_xi fj_eta
-- calculate partials
-- By symmetry, [6,7,4,5] = - [0,1,2,3]
b0 = - cj_xi - cj_eta - cj_zeta
b1 = cj_xi - cj_eta - cj_zeta
b2 = cj_xi + cj_eta - cj_zeta
b3 = - cj_xi + cj_eta - cj_zeta
b4 = -b2
b5 = -b3
b6 = -b0
b7 = -b1
-- calculate jacobian determinant (volume)
volume = 8.0 * dot fj_eta cj_eta
in
lift ((b0, b1, b2, b3, b4, b5, b6, b7), volume)
-- | Calculate normal vectors at element nodes, as an interpolation of element
-- face normals.
--
-- 1. The normal at each node of the element is initially zero
--
-- 2. Enumerate all six faces of the element. For each face, calculate a normal
-- vector, scale the magnitude by one quarter, and sum the scaled vector
-- into each of the four nodes of the element corresponding to a face.
--
calcElemNodeNormals
:: Exp (Hexahedron Position)
-> Exp (Hexahedron Normal)
calcElemNodeNormals p =
let
-- Calculate a face normal
--
surfaceElemFaceNormal :: Exp (Quad Position) -> Exp Normal
surfaceElemFaceNormal p =
let
bisectx = 0.5 * (p^._3 + p^._2 - p^._1 - p^._0)
bisecty = 0.5 * (p^._2 + p^._1 - p^._3 - p^._0)
in
0.25 * cross bisectx bisecty
-- The normals at each of the six faces of the hexahedron.
--
-- The direction that we trace out the coordinates forming a face is such
-- that it points towards the inside the hexahedron (RH-rule)
--
n0 = surfaceElemFaceNormal (collectFace 0 p) -- corners: 0, 1, 2, 3
n1 = surfaceElemFaceNormal (collectFace 1 p) -- corners: 0, 4, 5, 1
n2 = surfaceElemFaceNormal (collectFace 2 p) -- corners: 1, 5, 6, 2
n3 = surfaceElemFaceNormal (collectFace 3 p) -- corners: 2, 6, 7, 3
n4 = surfaceElemFaceNormal (collectFace 4 p) -- corners: 3, 7, 4, 0
n5 = surfaceElemFaceNormal (collectFace 5 p) -- corners: 4, 7, 6, 5
-- The normal at each node is then the sum of the normals of the three
-- faces that meet at that node.
in
lift ( n0 + n1 + n4
, n0 + n1 + n2
, n0 + n2 + n3
, n0 + n3 + n4
, n1 + n4 + n5
, n1 + n2 + n5
, n2 + n3 + n5
, n3 + n4 + n5
)
-- | Sum force contribution in element to local vector for each node around
-- element.
--
sumElemStressesToNodeForces
:: Exp (Hexahedron Normal)
-> Exp Sigma
-> Exp (Hexahedron Force)
sumElemStressesToNodeForces pf sigma =
over each (\x -> -sigma * x) pf -- interesting shorthand to map over a tuple
-- | Calculate the volume derivatives for an element. Starting with a formula
-- for the volume of a hexahedron, take the derivative of that volume formula
-- with respect to the coordinates at one of the nodes. By symmetry, the formula
-- for one node can be applied to each of the other seven nodes
--
calcElemVolumeDerivative
:: Exp (Hexahedron Position)
-> Exp (Hexahedron (V3 R))
calcElemVolumeDerivative p =
let
volumeDerivative :: Exp (V3 R, V3 R, V3 R, V3 R, V3 R, V3 R) -> Exp (V3 R)
volumeDerivative p =
let p01 = p^._0 + p^._1
p12 = p^._1 + p^._2
p04 = p^._0 + p^._4
p34 = p^._3 + p^._4
p25 = p^._2 + p^._5
p35 = p^._3 + p^._5
in
(1/12) * (cross p12 p01 + cross p04 p34 + cross p35 p25)
in
lift ( volumeDerivative (lift (p^._1, p^._2, p^._3, p^._4, p^._5, p^._7))
, volumeDerivative (lift (p^._0, p^._1, p^._2, p^._7, p^._4, p^._6))
, volumeDerivative (lift (p^._3, p^._0, p^._1, p^._6, p^._7, p^._5))
, volumeDerivative (lift (p^._2, p^._3, p^._0, p^._5, p^._6, p^._4))
, volumeDerivative (lift (p^._7, p^._6, p^._5, p^._0, p^._3, p^._1))
, volumeDerivative (lift (p^._4, p^._7, p^._6, p^._1, p^._0, p^._2))
, volumeDerivative (lift (p^._5, p^._4, p^._7, p^._2, p^._1, p^._3))
, volumeDerivative (lift (p^._6, p^._5, p^._4, p^._3, p^._2, p^._0))
)
-- Calculate the hourglass control contribution for each element.
--
-- For each element:
--
-- 1. Gather the node coordinates for that element.
-- 2. Calculate the element volume derivative.
-- 3. Perform a diagnosis check for any element volumes <= zero
-- 4. Compute the Flanagan-Belytschko hourglass control force for each element.
-- This is described in the paper:
--
-- [1] "A uniform strain hexahedron and quadrilateral with orthogonal
-- hourglass control", Flanagan, D. P. and Belytschko, T. International
-- Journal for Numerical Methods in Engineering, (17) 5, May 1981.
--
calcHourglassControlForElem
:: Parameters
-> Exp (Hexahedron Position)
-> Exp (Hexahedron Velocity)
-> Exp Volume -- relative volume
-> Exp Volume -- reference volume
-> Exp SoundSpeed -- speed of sound
-> Exp Mass -- element mass
-> Exp (Hexahedron Force)
calcHourglassControlForElem param@Parameters{..} x dx v v0 ss mZ =
let
dv = calcElemVolumeDerivative x
determ = v * v0
in
if hgcoef > 0
then calcFBHourglassForceForElem param x dx determ dv ss mZ
else constant (0,0,0,0,0,0,0,0)
calcFBHourglassForceForElem
:: Parameters
-> Exp (Hexahedron Position)
-> Exp (Hexahedron Velocity)
-> Exp Volume -- actual volume
-> Exp (Hexahedron (V3 R)) -- volume derivatives
-> Exp SoundSpeed -- speed of sound
-> Exp Mass -- element mass
-> Exp (Hexahedron Force)
calcFBHourglassForceForElem Parameters{..} x dx determ dv ss mZ =
let
-- Hourglass base vectors, from [1] table 1. This defines the hourglass
-- patterns for a unit cube.
gamma :: Exp (Hexahedron (V4 R))
gamma = constant
( V4 ( 1) ( 1) ( 1) (-1)
, V4 ( 1) (-1) (-1) ( 1)
, V4 (-1) (-1) ( 1) (-1)
, V4 (-1) ( 1) (-1) ( 1)
, V4 (-1) (-1) ( 1) ( 1)
, V4 (-1) ( 1) (-1) (-1)
, V4 ( 1) ( 1) ( 1) ( 1)
, V4 ( 1) (-1) (-1) (-1)
)
-- transpose x !*! gamma
hourmod :: Exp (M34 R)
hourmod = lift $
V3 (F.sum $ P.zipWith (*^) (x ^.. (each._x)) (gamma ^.. each))
(F.sum $ P.zipWith (*^) (x ^.. (each._y)) (gamma ^.. each))
(F.sum $ P.zipWith (*^) (x ^.. (each._z)) (gamma ^.. each))
-- Compute hourglass modes
hourgam :: Exp (Hexahedron (V4 R))
hourgam =
let hg :: Exp (V4 R) -> Exp (V3 R) -> Exp (V4 R)
hg g dv = g - volinv *^ (dv *! hourmod)
volinv = 1 / determ
in
lift ( hg (gamma^._0) (dv^._0)
, hg (gamma^._1) (dv^._1)
, hg (gamma^._2) (dv^._2)
, hg (gamma^._3) (dv^._3)
, hg (gamma^._4) (dv^._4)
, hg (gamma^._5) (dv^._5)
, hg (gamma^._6) (dv^._6)
, hg (gamma^._7) (dv^._7)
)
-- Compute forces
cbrt x = x ** (1/3) -- cube root
coefficient = - hgcoef * 0.01 * ss * mZ / cbrt determ
in
calcElemFBHourglassForce coefficient dx hourgam
calcElemFBHourglassForce
:: Exp R
-> Exp (Hexahedron Velocity)
-> Exp (Hexahedron (V4 R))
-> Exp (Hexahedron Force)
calcElemFBHourglassForce coefficient dx hourgam =
let
-- transpose hourgam !*! dx
h00, h01, h02, h03 :: Exp (V3 R)
h00 = F.sum $ P.zipWith (*^) (hourgam ^.. (each._x)) (dx ^.. each)
h01 = F.sum $ P.zipWith (*^) (hourgam ^.. (each._y)) (dx ^.. each)
h02 = F.sum $ P.zipWith (*^) (hourgam ^.. (each._z)) (dx ^.. each)
h03 = F.sum $ P.zipWith (*^) (hourgam ^.. (each._w)) (dx ^.. each)
hh :: Exp (M43 R)
hh = lift (V4 h00 h01 h02 h03)
hg :: Exp (V4 R) -> Exp Force
hg h = coefficient *^ (h *! hh)
in
over each hg hourgam
-- | Calculate the three-dimensional acceleration vector at each mesh node, and
-- apply the symmetry boundary conditions.
--
calcAccelerationForNodes
:: Acc (Field Force) -- force at each node
-> Acc (Field Mass) -- nodal mass
-> Acc (Field Acceleration)
calcAccelerationForNodes f mN
= applyAccelerationBoundaryConditionsForNodes
$ A.zipWith (^/) f mN
-- | Applies symmetry boundary conditions at nodes on the boundaries of the
-- mesh. This sets the normal component of the acceleration vector at the
-- boundary to zero. This implies that the normal component of the velocity
-- vector will remain constant in time.
--
-- Recall that the benchmark Sedov problem is spherically-symmetric and that we
-- simulate it in a cubic domain containing a single octant of the sphere. To
-- maintain spherical symmetry of the domain, we apply symmetry boundary
-- conditions along the faces of the cubic domain that contact the planes
-- separating the octants of the sphere. This forces the normal component of the
-- velocity vector to be zero along these boundary faces for all time, since
-- they were initialised to zero.
--
applyAccelerationBoundaryConditionsForNodes
:: Acc (Field Acceleration)
-> Acc (Field Acceleration)
applyAccelerationBoundaryConditionsForNodes acc =
generate (shape acc) $ \ix ->
let
Z :. z :. y :. x = unlift ix
V3 ddx ddy ddz = unlift $ acc ! ix
in
lift $ V3 (x == 0 ? (0, ddx))
(y == 0 ? (0, ddy))
(z == 0 ? (0, ddz))
-- | Integrate the acceleration at each node to advance the velocity at the
-- node.
--
-- Note that the routine applies a cutoff to each velocity vector value.
-- Specifically, if a value is below some prescribed threshold the term is set
-- to zero. The reason for this cutoff is to prevent spurious mesh motion which
-- may arise due to floating point roundoff error when the velocity is near
-- zero.
--
calcVelocityForNodes
:: Parameters
-> Exp Time
-> Acc (Field Velocity)
-> Acc (Field Acceleration)
-> Acc (Field Velocity)
calcVelocityForNodes Parameters{..} dt u ud
= A.map (over each (\x -> abs x < u_cut ? (0,x)))
$ integrate dt u ud
-- | Integrate the velocity at each node to advance the position of the node
--
calcPositionForNodes
:: Exp Time
-> Acc (Field Position)
-> Acc (Field Velocity)
-> Acc (Field Position)
calcPositionForNodes = integrate
-- | Euler integration
--
integrate
:: Exp Time
-> Acc (Field (V3 R))
-> Acc (Field (V3 R))
-> Acc (Field (V3 R))
integrate dt
= A.zipWith (\x dx -> x + dx ^* dt)
-- Advance Element Quantities
-- --------------------------
-- | Advance element quantities, primarily pressure, internal energy, and
-- relative volume. The artificial viscosity in each element is also calculated
-- here. The main steps are:
--
-- 1. Calculate element quantities based on nodal kinematic quantities
-- 2. Calculate element artificial viscosity terms
-- 3. Apply material properties in each element needed to calculate updated
-- pressure and internal energy.
-- 4. Compute updated element volume
--
lagrangeElements
:: Parameters
-> Exp Time
-> Acc (Field Position)
-> Acc (Field Velocity)
-> Acc (Field Volume)
-> Acc (Field Volume)
-> Acc (Field Viscosity)
-> Acc (Field Energy)
-> Acc (Field Pressure)
-> Acc (Field Mass)
-> ( Acc (Field Pressure)
, Acc (Field Energy)
, Acc (Field Viscosity)
, Acc (Field Volume)
, Acc (Field R)
, Acc (Field R)
, Acc (Field R) )
lagrangeElements params dt x dx v v0 q e p mZ =
let
(v', dv', vdov, arealg)
= calcLagrangeElements dt x dx v v0
(ql, qq)
= calcQForElems params x dx v' v0 mZ vdov
(p', e', q', ss')
= A.unzip4
$ A.zipWith7 (calcEOSForElem params) v' dv' e p q ql qq
v'' = A.map (updateVolumeForElem params) v'
in
(p', e', q', v'', ss', vdov, arealg)
-- | Calculate various element quantities that are based on the new kinematic
-- node quantities position and velocity.
--
calcLagrangeElements
:: Exp Time
-> Acc (Field Position) -- nodal position
-> Acc (Field Velocity) -- nodal velocity
-> Acc (Field Volume) -- relative volume
-> Acc (Field Volume) -- reference volume
-> ( Acc (Field Volume)
, Acc (Field Volume)
, Acc (Field R)
, Acc (Field R) )
calcLagrangeElements dt x dx v v0 =
let
-- calculate new element quantities based on updated position and velocity
(v', dv', vdov, arealg)
= A.unzip4
$ A.generate (shape v0)
$ \ix -> calcKinematicsForElem dt
(collectToElem x ix)
(collectToElem dx ix)
(v !ix)
(v0!ix)
-- TODO: Check for negative element volume
_volumeError = A.any (<= 0) v'
in
(v', dv', vdov, arealg)
-- | Calculate terms in the total strain rate tensor epsilon_tot that are used
-- to compute the terms in the deviatoric strain rate tensor epsilon.
--
calcKinematicsForElem
:: Exp Time
-> Exp (Hexahedron Position)
-> Exp (Hexahedron Velocity)
-> Exp Volume -- relative volume
-> Exp Volume -- reference volume
-> Exp (Volume, Volume, R, R)
calcKinematicsForElem dt x dx v v0 =
let
-- (relative) volume calculations
vol' = calcElemVolume x
v' = vol' / v0
dv' = v' - v
-- characteristic length
arealg = calcElemCharacteristicLength x vol'
-- modify nodal positions to be halfway between time(n) and time(n+1)
mid :: Exp (V3 R) -> Exp (V3 R) -> Exp (V3 R)
mid x xd = x - 0.5 * dt *^ xd
midx = lift ( mid (x^._0) (dx^._0)
, mid (x^._1) (dx^._1)
, mid (x^._2) (dx^._2)
, mid (x^._3) (dx^._3)
, mid (x^._4) (dx^._4)
, mid (x^._5) (dx^._5)
, mid (x^._6) (dx^._6)
, mid (x^._7) (dx^._7)
)
-- Use midpoint nodal positions to calculate velocity gradient and
-- strain rate tensor
(b, det) = unlift $ calcElemShapeFunctionDerivatives midx
d = calcElemVelocityGradient dx b det
-- calculate the deviatoric strain rate tensor
vdov = F.sum (unlift d :: V3 (Exp R)) -- TLM: no Foldable instance for (Exp V3) ):
_strain = d ^- (vdov / 3.0)
in
lift (v', dv', vdov, arealg)
-- | Calculate the volume of an element given the nodal coordinates
--
calcElemVolume
:: Exp (Hexahedron Position)
-> Exp Volume
calcElemVolume p =
let
-- compute diagonal differences
d61 = p^._6 - p^._1
d70 = p^._7 - p^._0
d63 = p^._6 - p^._3
d20 = p^._2 - p^._0
d50 = p^._5 - p^._0
d64 = p^._6 - p^._4
d31 = p^._3 - p^._1
d72 = p^._7 - p^._2
d43 = p^._4 - p^._3
d57 = p^._5 - p^._7
d14 = p^._1 - p^._4
d25 = p^._2 - p^._5
in
(1/12) * ( triple (d31 + d72) d63 d20
+ triple (d43 + d57) d64 d70
+ triple (d14 + d25) d61 d50
)
-- | Calculate the characteristic length of the element. This is the volume of
-- the element divided by the area of its largest face.
--
calcElemCharacteristicLength
:: Exp (Hexahedron Position)
-> Exp Volume
-> Exp R
calcElemCharacteristicLength x v =
let
faceArea :: Exp (Quad Position) -> Exp R
faceArea face =
let
d20 = face^._2 - face^._0
d31 = face^._3 - face^._1
f = d20 - d31
g = d20 + d31
h = dot f g
in
dot f f * dot g g - h * h
area = P.maximum
$ P.map faceArea
$ P.map (P.flip collectFace x) [0..5]
in
4.0 * v / sqrt area
-- | Calculate the element velocity gradient which defines the terms of
-- epsilon_tot. The diagonal entries of epsilon_tot are then used to initialise
-- the diagonal entries of the strain rate tensor epsilon.
--
calcElemVelocityGradient
:: Exp (Hexahedron Velocity)
-> Exp (Hexahedron Force)
-> Exp Volume
-> Exp (V3 R)
calcElemVelocityGradient dx b det =
let
-- TLM: unfortunately the (Accelerate) simplifier does not spot that the
-- off-diagonal elements of the matrix are unused. Thus, we will need
-- to rely on the code generator / backend compiler to remove those
-- expressions as dead code.
--
inv_det = 1 / det
mm = inv_det *!! (transpose pf !*! vd)
vd :: Exp (M43 R)
vd = lift $ V4 (dx^._0 - dx^._6)
(dx^._1 - dx^._7)
(dx^._2 - dx^._4)
(dx^._3 - dx^._5)
pf :: Exp (M43 R)
pf = lift $ V4 (b^._0)
(b^._1)
(b^._2)
(b^._3)
-- d3 = 0.5 * ( mm^._z._y + mm^._y._z ) -- 0.5 * ( dzddy + dyddz )
-- d4 = 0.5 * ( mm^._x._z + mm^._z._x ) -- 0.5 * ( dxddz + dzddx )
-- d5 = 0.5 * ( mm^._x._y + mm^._y._x ) -- 0.5 * ( dxddy + dyddx )
in
diagonal mm
-- | Calculate the artificial viscosity term for each element. The mathematical
-- aspects of the algorithm are described in [2]:
--
-- [2] Christensen, Randy B. "Godunov methods on a staggered mesh: An improved
-- artificial viscosity". Lawrence Livermore National Laboratory Report,
-- UCRL-JC-105-269, 1991. https://e-reports-ext.llnl.gov/pdf/219547.pdf
--
calcQForElems
:: Parameters
-> Acc (Field Position)
-> Acc (Field Velocity)
-> Acc (Field Volume)
-> Acc (Field Volume)
-> Acc (Field Mass) -- element mass
-> Acc (Field R) -- vdot / v
-> ( Acc (Field Viscosity)
, Acc (Field Viscosity) )
calcQForElems params x dx v v0 mZ vdov =
let
-- calculate velocity gradients
(grad_p, grad_v)
= A.unzip
$ A.generate (shape v0)
$ \ix -> calcMonotonicQGradientsForElem
(collectToElem x ix)
(collectToElem dx ix)
(v !ix)
(v0!ix)
-- Transfer velocity gradients in the first order elements
(ql, qq)
= calcMonotonicQForElems params grad_p grad_v v v0 mZ vdov
-- TODO: don't allow excessive artificial viscosity
-- _viscosityError = A.any (> qstop) q
in
(ql, qq)
-- | Calculate discrete spatial gradients of nodal coordinates and velocity
-- gradients with respect to a reference coordinate system. The following maps
-- an element to the unit cube:
--
-- (x,y,z) β¦ (xi, eta, zeta)
--
-- Mapping the element to the unit cube simplifies the process of defining a
-- single value for the viscosity in the element from the gradient information.
--
-- The reference code adds a small (1.0e-36) factor into denominators.
--
calcMonotonicQGradientsForElem
:: Exp (Hexahedron Position)
-> Exp (Hexahedron Velocity)
-> Exp Volume
-> Exp Volume
-> Exp (Gradient Position, Gradient Velocity)
calcMonotonicQGradientsForElem x dx v v0 =
let
vol = v * v0
ivol = 1 / vol
x_eta, x_xi, x_zeta :: Exp Position
x_eta = 0.25 *^ (sumOf each (collectFace 3 x) - sumOf each (collectFace 1 x))
x_xi = 0.25 *^ (sumOf each (collectFace 2 x) - sumOf each (collectFace 4 x))
x_zeta = 0.25 *^ (sumOf each (collectFace 5 x) - sumOf each (collectFace 0 x))
dx_eta, dx_xi, dx_zeta :: Exp Velocity
dx_eta = 0.25 *^ (sumOf each (collectFace 3 dx) - sumOf each (collectFace 1 dx))
dx_xi = 0.25 *^ (sumOf each (collectFace 2 dx) - sumOf each (collectFace 4 dx))
dx_zeta = 0.25 *^ (sumOf each (collectFace 5 dx) - sumOf each (collectFace 0 dx))
a = cross x_xi x_eta
b = cross x_eta x_zeta
c = cross x_zeta x_xi
grad_x = V3 (vol / norm b)
(vol / norm c)
(vol / norm a)
grad_v = V3 (ivol * dot b dx_xi)
(ivol * dot c dx_eta)
(ivol * dot a dx_zeta)
in
lift (grad_x, grad_v)
-- | Use the spatial gradient information to compute linear and quadratic terms
-- for viscosity. The actual element values of viscosity (q) are calculated
-- during application of material properties in each element; see
-- 'applyMaterialPropertiesForElem'.
--
calcMonotonicQForElems
:: Parameters
-> Acc (Field (Gradient Position))
-> Acc (Field (Gradient Velocity))
-> Acc (Field Mass)
-> Acc (Field Volume)
-> Acc (Field Volume)
-> Acc (Field R) -- vdot / v
-> ( Acc (Field Viscosity) -- ql
, Acc (Field Viscosity) ) -- qq
calcMonotonicQForElems Parameters{..} grad_x grad_v volNew volRef elemMass vdov =
let
sh = shape volRef
numElem = indexHead sh
-- Need to compute a stencil on the neighbouring elements of the velocity
-- gradients. However, we have different boundary conditions depending on
-- whether we are at an internal/symmetric (= clamp) or external/free (=
-- set to zero) face. This procedure encodes that decision.
--
get :: Exp Int -> Exp Int -> Exp Int -> Exp (Gradient Velocity)
get z y x =
if x >= numElem || y >= numElem || z >= numElem
then zero -- external face
else grad_v ! index3 (A.max 0 z) (A.max 0 y) (A.max 0 x) -- internal region
-- Calculate one component of the phi term
--
calcPhi :: Exp R -> Exp R -> Exp R -> Exp R
calcPhi l c r =
let
ic = 1.0 / (c + 1.0e-36)
l' = l * ic
r' = r * ic
phi = 0.5 * (l' + r')
in
(l' * monoq_max_slope) `A.min` phi `A.min` (r' * monoq_max_slope) `A.max` 0 `A.min` monoq_limiter
-- Calculate linear and quadratic terms for viscosity
--
viscosity = generate sh $ \ix@(unlift -> Z:.z:.y:.x) ->
let
phi = lift $
V3 (calcPhi (get z y (x-1) ^._x) (dv^._x) (get z y (x+1) ^._x))
(calcPhi (get z (y-1) x ^._y) (dv^._y) (get z (y+1) x ^._y))
(calcPhi (get (z-1) y x ^._z) (dv^._z) (get (z+1) y x ^._z))
-- remove length scale
dx = grad_x ! ix
dv = grad_v ! ix
dvx = lift1 (fmap (A.min 0) :: V3 (Exp R) -> V3 (Exp R)) (dx * dv)
rho = elemMass!ix / (volRef!ix * volNew!ix)
qlin = -qlc_monoq * rho * dot dvx (1 - phi)
qquad = qqc_monoq * rho * dot (dvx*dvx) (1 - phi*phi)
in
if vdov ! ix > 0
then constant (0,0)
else lift (qlin, qquad)
in
A.unzip viscosity
-- | Evaluate the Equation of State of the system to calculate the updated
-- pressure and internal energy of an element.
--
-- The reference implementation had a function 'applyMaterialPropertiesForElem',
-- which has been merged into this.
--
calcEOSForElem
:: Parameters
-> Exp Volume
-> Exp Volume
-> Exp Energy
-> Exp Pressure
-> Exp Viscosity
-> Exp Viscosity -- linear term
-> Exp Viscosity -- quadratic term
-> Exp (Pressure, Energy, Viscosity, R)
calcEOSForElem param@Parameters{..} vol delta_vol e p q ql qq =
let
clamp = (\x -> if eosvmin /= 0 then A.max eosvmin x else x)
. (\x -> if eosvmax /= 0 then A.min eosvmax x else x)
vol' = clamp vol
work = 0
comp = 1 / vol' - 1
comp' = 1 / (vol' - delta_vol * 0.5) - 1
(e', p', q', bvc, pbvc) = calcEnergyForElem param e p q ql qq comp comp' vol' delta_vol work
ss = calcSoundSpeedForElem param vol' e' p' bvc pbvc
in
lift (p', e', q', ss)
-- | Calculate pressure and energy for an element
--
calcEnergyForElem
:: Parameters
-> Exp Energy
-> Exp Pressure
-> Exp Viscosity
-> Exp Viscosity -- linear term
-> Exp Viscosity -- quadratic term
-> Exp R -- compression
-> Exp R -- half-step compression
-> Exp Volume
-> Exp Volume
-> Exp R -- work
-> (Exp Energy, Exp Pressure, Exp Viscosity, Exp R, Exp R)
calcEnergyForElem params@Parameters{..} e0 p0 q0 ql qq comp comp_half vol vol_delta work =
let
e1 = e_min `A.max` (e0 - 0.5 * vol_delta * (p0 + q0) + 0.5 * work)
(p1, bvc1, pbvc1) = calcPressureForElem params e1 vol comp_half
ssc1 = calcSoundSpeedForElem params (1/(1+comp_half)) e1 p1 bvc1 pbvc1
q1 = vol_delta > 0 ? (0, ssc1 * ql + qq )
e2 = let e = e1
+ 0.5 * vol_delta * (3.0 * (p0 + q0) - 4.0 * (p1 + q1))
+ 0.5 * work
in
abs e < e_cut ? (0, A.max e_min e )
(p2, bvc2, pbvc2) = calcPressureForElem params e2 vol comp
ssc2 = calcSoundSpeedForElem params vol e2 p2 bvc2 pbvc2
q2 = vol_delta > 0 ? (0, ssc2 * ql + qq)
e3 = let e = e2 - 1/6 * vol_delta * ( 7.0 * (p0 + q0)
- 8.0 * (p1 + q1)
+ (p2 + q2) )
in
abs e < e_cut ? (0, A.max e_min e)
(p3, bvc3, pbvc3) = calcPressureForElem params e3 vol comp
ssc3 = calcSoundSpeedForElem params vol e3 p3 bvc3 pbvc3
q3 = let q = ssc3 * ql + qq
in abs q < q_cut ? (0, q)
in
(e3, p3, q3, bvc3, pbvc3)
-- | Calculate the "gamma law" model of a gas:
--
-- P = (gamma - 1) (rho / rho0) e
--
--
calcPressureForElem
:: Parameters
-> Exp Energy
-> Exp Volume
-> Exp R
-> (Exp Pressure, Exp R, Exp R)
calcPressureForElem Parameters{..} e vol comp =
let
c1s = 2/3 -- defined to be (gamma - 1)
bvc = c1s * (comp + 1)
pbvc = c1s
p_new = bvc * e
p_new' = if abs p_new < p_cut || vol >= eosvmax
then 0
else p_new
in
( A.max p_min p_new', bvc, pbvc )
-- | Calculate the speed of sound in each element
--
-- c_sound = (p*e + V^2*p*(gamma-1)*(1/(V-1)+1)) / rho0
--
calcSoundSpeedForElem
:: Parameters
-> Exp Volume
-> Exp Energy
-> Exp Pressure
-> Exp R
-> Exp R
-> Exp SoundSpeed
calcSoundSpeedForElem Parameters{..} v e p bvc pbvc =
let
ss = (pbvc * e + v * v * p * bvc ) / ref_dens
in
if ss <= 1.111111e-36
then 0.333333e-18
else sqrt ss
-- | Update the relative volume, using a tolerance to prevent spurious
-- deviations from the initial values (which may arise due to floating point
-- roundoff error).
--
updateVolumeForElem
:: Parameters
-> Exp Volume
-> Exp Volume
updateVolumeForElem Parameters{..} vol =
if abs (vol - 1) < v_cut
then 1
else vol
-- Time Constraints
-- ================
-- | After all the solution variables are moved to the next time step, the
-- constrains for next time increment are calculated. Each constraint is
-- computed in each element, and the final constraint is the minimum over all
-- element values.
--
calcTimeConstraints
:: Parameters
-> Acc (Field R)
-> Acc (Field R)
-> Acc (Field R)
-> ( Acc (Scalar Time)
, Acc (Scalar Time) )
calcTimeConstraints param ss vdov arealg =
let
dt_courant = A.minimum . A.flatten $ A.zipWith3 (calcCourantConstraintForElem param) ss vdov arealg
dt_hydro = A.minimum . A.flatten $ A.map (calcHydroConstraintForElem param) vdov
in
(dt_courant, dt_hydro)
-- | The Courant-Friedrichs-Lewy (CFL) constraint is calculated only in elements
-- whose volumes are changing (vdov /= 0). This constraint is essentially the
-- ratio of the characteristic length of the element to the speed of sound in
-- that element. However, when the element is under compression (vdov < 0),
-- additional terms are added to the denominator to reduce the timestep further.
--
calcCourantConstraintForElem
:: Parameters
-> Exp SoundSpeed
-> Exp R -- vdot / v
-> Exp R -- characteristic length
-> Exp Time
calcCourantConstraintForElem Parameters{..} ss vdov arealg =
if vdov == 0
then 1.0e20
else let
qqc' = 64 * qqc * qqc
dtf = ss * ss
+ if vdov >= 0 then 0
else qqc' * arealg * arealg * vdov * vdov
in
arealg / sqrt dtf
-- | Calculate the hydro time constraint in elements whose volumes are changing
-- (vdov /= 0). When the element is undergoing volume change, the timestep for
-- that element is some maximum allowable element volume change (prescribed)
-- divided by vdov in the element
--
calcHydroConstraintForElem
:: Parameters
-> Exp R -- vdot / v
-> Exp Time
calcHydroConstraintForElem Parameters{..} vdov =
if vdov == 0
then 1.0e20
else dvovmax / (abs vdov + 1.0e-20)
-- | Compute the time increment for the current loop iteration. We aim for a
-- "target" timestep value which completes the simulation in the next step, but
-- is only allowed to change from the previous value by a certain amount,
-- subject to courant and hydro constraints.
--
timeIncrement
:: Parameters
-> Acc (Scalar Time)
-> Acc (Scalar Time)
-> Acc (Scalar Time)
-> Acc (Scalar Time)
-> (Acc (Scalar Time), Acc (Scalar Time))
timeIncrement param t dt dtc dth
= A.unzip
$ A.zipWith4 (timeIncrement' param) t dt dtc dth
timeIncrement'
:: Parameters
-> Exp Time -- current simulation time
-> Exp Time -- old timestep
-> Exp Time
-> Exp Time
-> Exp (Time, Time) -- (simulation time, timestep)
timeIncrement' Parameters{..} t_now dt_old dt_courant dt_hydro =
let
dt_end = constant t_end - t_now
(lb,ub) = dt_scaling
-- try to prevent very small scaling on the next cycle
target = if dt_end > step && dt_end < 4 * step / 3
then 2 / 3 * step
else dt_end
-- increment the previous timestep by a small amount
step = A.min dt_new dt_max
c1 = 1.0e20
c2 = if dt_courant < c1 then 0.5 * dt_courant else c1
c3 = if dt_hydro < c2 then 2/3 * dt_hydro else c2
ratio = c3 / dt_old
dt_new = if ratio >= 1 && ratio < lb
then dt_old
else if ratio >= 1 && ratio > ub
then dt_old * ub
else c3
-- compute timestep and the new simulation time
dt' = A.min step target
t_now' = t_now + dt'
in
lift (t_now', dt')
| tmcdonell/accelerate-lulesh | src/LULESH.hs | bsd-3-clause | 41,781 | 0 | 23 | 13,796 | 11,112 | 5,889 | 5,223 | 749 | 6 |
module Tor.Flags(
Flag(..)
, runDefaultMain
--
, getNickname
, getOnionPort
, getContactInfo
, getTapDevice
)
where
import Data.Version hiding (Version)
import Data.Word
import System.Console.GetOpt
import System.Environment
import System.Exit
import Paths_haskell_tor
data Flag = Version
| Help
| OnionPort Word16
| OutputLog FilePath
| Nickname String
| ContactInfo String
| UseTapDevice String
deriving (Eq)
options :: [OptDescr Flag]
options =
[ Option ['v'] ["version"] (NoArg Version)
"show the version number"
, Option ['h','?'] ["help"] (NoArg Help)
"show this message"
, Option ['p'] ["onion-port"] (ReqArg (OnionPort . read) "PORT")
"Select what onion port to use. [default 9374]"
, Option ['o'] ["output-log"] (ReqArg OutputLog "FILE")
"Select where to write log info. [default stdout]"
, Option ['n'] ["node-nickname"] (ReqArg Nickname "STR")
"An (optional) nickname for this Tor node."
, Option ['c'] ["node-contact"] (ReqArg ContactInfo "STR")
"An (optional) contact for this Tor node."
, Option ['t'] ["use-tap"] (ReqArg UseTapDevice "STR")
"Use a direct connection to a tap device."
]
showHelpAndStop :: Bool -> IO ()
showHelpAndStop okgood =
do putStrLn (usageInfo "Usage: haskell-tor [options]" options)
exitWith (if okgood then ExitSuccess else (ExitFailure 2))
showVersionAndStop :: IO ()
showVersionAndStop =
do putStrLn ("Haskell Tor Version " ++ showVersion version)
exitWith ExitSuccess
runDefaultMain :: ([Flag] -> IO ()) -> IO ()
runDefaultMain runNode =
do args <- getArgs
case getOpt Permute options args of
(opts, [], [])
| Version `elem` opts -> showVersionAndStop
| Help `elem` opts -> showHelpAndStop True
| otherwise -> runNode opts
(_, _, _) -> showHelpAndStop False
-- -----------------------------------------------------------------------------
getNickname :: [Flag] -> String
getNickname [] = ""
getNickname (Nickname x : _) = x
getNickname (_ : rest) = getNickname rest
getOnionPort :: [Flag] -> Word16
getOnionPort [] = 9002 -- http://xkcd.com/221/
getOnionPort (OnionPort p : _) = p
getOnionPort (_ : rest) = getOnionPort rest
getContactInfo :: [Flag] -> Maybe String
getContactInfo [] = Nothing
getContactInfo (ContactInfo ci : _) = Just ci
getContactInfo (_ : rest) = getContactInfo rest
getTapDevice :: [Flag] -> Maybe String
getTapDevice [] = Nothing
getTapDevice (UseTapDevice t : _) = Just t
getTapDevice (_ : rest) = getTapDevice rest
| GaloisInc/haskell-tor | exe/Tor/Flags.hs | bsd-3-clause | 2,934 | 0 | 13 | 895 | 791 | 420 | 371 | 70 | 2 |
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE UndecidableInstances #-}
module Rules.Type.Infer.Common (
InferTypeRule(..)
, mkInferType
, mkCheckType'
, PCheckRule(..)
, mkPCheck
, InferTypeInput(..)
, InferTypeOutput(..)
, MonadProxy
, BasicInferTypeConstraint
, MkInferType(..)
, InferTypeRules(..)
, InferTypeRulesOut(..)
, inferTypeOutput
) where
import Data.Foldable (asum)
import Data.Maybe (fromMaybe)
import Data.Proxy (Proxy(..))
import GHC.Exts (Constraint)
import Data.List.NonEmpty (NonEmpty)
import Control.Monad.Except (MonadError)
import Control.Monad.Error.Lens (throwing)
import qualified Data.Map as M
import Ast.Kind
import Ast.Type
import Ast.Pattern
import Ast.Term
import Ast.Error
import Ast.Error.Common
import Ast.Warning
import Data.Functor.Rec
import Util.TypeList
import Util.MonadProxy
import Rules.Unification
data InferTypeRule e w s r m ki ty pt tm a =
InferTypeBase (Term ki ty pt tm a -> Maybe (m (Type ki ty a)))
| InferTypePCheck ((Term ki ty pt tm a -> m (Type ki ty a)) -> (Pattern pt a -> Type ki ty a -> m [Type ki ty a]) -> Term ki ty pt tm a -> Maybe (m (Type ki ty a)))
| InferTypeRecurse ((Term ki ty pt tm a -> m (Type ki ty a)) -> Term ki ty pt tm a -> Maybe (m (Type ki ty a)))
| InferTypeRecurseKind ((Type ki ty a -> m (Kind ki a)) -> (Term ki ty pt tm a -> m (Type ki ty a)) -> Term ki ty pt tm a -> Maybe (m (Type ki ty a)))
fixInferTypeRule :: (Type ki ty a -> m (Kind ki a))
-> (Term ki ty pt tm a -> m (Type ki ty a))
-> (Pattern pt a -> Type ki ty a -> m [Type ki ty a])
-> InferTypeRule e w s r m ki ty pt tm a
-> Term ki ty pt tm a
-> Maybe (m (Type ki ty a))
fixInferTypeRule _ _ _ (InferTypeBase f) = f
fixInferTypeRule _ inferFn checkFn (InferTypePCheck f) = f inferFn checkFn
fixInferTypeRule _ inferFn _ (InferTypeRecurse f) = f inferFn
fixInferTypeRule inferKindFn inferTypeFn _ (InferTypeRecurseKind f) = f inferKindFn inferTypeFn
mkInferType :: (MonadError e m, AsUnknownTypeError e)
=> (Type ki ty a -> m (Kind ki a))
-> (Type ki ty a -> Type ki ty a)
-> (Pattern pt a -> Type ki ty a -> m [Type ki ty a])
-> [InferTypeRule e w s r m ki ty pt tm a]
-> Term ki ty pt tm a
-> m (Type ki ty a)
mkInferType inferKindFn normalizeFn pc rules =
let
go tm =
fmap normalizeFn .
fromMaybe (throwing _UnknownTypeError ()) .
asum .
fmap (\r -> fixInferTypeRule inferKindFn go pc r tm) $
rules
in
go
mkCheckType' :: (Eq a, EqRec (ty ki), Monad m)
=> (ExpectedType ki ty a -> ActualType ki ty a -> m ())
-> (Term ki ty pt tm a -> m (Type ki ty a))
-> Term ki ty pt tm a
-> Type ki ty a
-> m ()
mkCheckType' expectTypeFn inferTypeFn =
let
go tm ty = do
tyAc <- inferTypeFn tm
expectTypeFn (ExpectedType ty) (ActualType tyAc)
in
go
data PCheckRule e m pt ki ty a =
PCheckBase (Pattern pt a -> Type ki ty a -> Maybe (m [Type ki ty a]))
| PCheckRecurse ((Pattern pt a -> Type ki ty a -> m [Type ki ty a]) -> Pattern pt a -> Type ki ty a -> Maybe (m [Type ki ty a]))
fixPCheckRule :: (Pattern pt a -> Type ki ty a -> m [Type ki ty a])
-> PCheckRule e m pt ki ty a
-> Pattern pt a
-> Type ki ty a
-> Maybe (m [Type ki ty a])
fixPCheckRule _ (PCheckBase f) = f
fixPCheckRule pPCheckFn (PCheckRecurse f) = f pPCheckFn
mkPCheck :: (MonadError e m, AsUnknownTypeError e)
=> [PCheckRule e m pt ki ty a]
-> Pattern pt a
-> Type ki ty a
-> m [Type ki ty a]
mkPCheck rules x y =
let
go p ty =
fromMaybe (throwing _UnknownTypeError ()) .
asum .
fmap (\r -> fixPCheckRule go r p ty) $
rules
in
go x y
data InferTypeInput e w s r m mi ki ty pt tm a =
InferTypeInput {
iiUnifyRules :: [UnificationRule m (TyAst ki ty) (TyAstVar a)]
, iiInferTypeRules :: [InferTypeRule e w s r mi ki ty pt tm a]
, iiPCheckRules :: [PCheckRule e mi pt ki ty a]
}
instance Monoid (InferTypeInput e w s r m mi ki ty pt tm a) where
mempty =
InferTypeInput mempty mempty mempty
mappend (InferTypeInput u1 i1 c1) (InferTypeInput u2 i2 c2) =
InferTypeInput
(mappend u1 u2)
(mappend i1 i2)
(mappend c1 c2)
data InferTypeOutput e w s r m ki ty pt tm a =
InferTypeOutput {
ioUnify :: [UConstraint (Type ki ty) a] -> m (M.Map a (Type ki ty a))
, ioInfer :: Term ki ty pt tm a -> m (Type ki ty a)
, ioCheck :: Term ki ty pt tm a -> Type ki ty a -> m ()
}
class MkInferType i where
type MkInferTypeConstraint (e :: *) (w :: *) (s :: *) (r :: *) (m :: * -> *) (ki :: (* -> *) -> * -> *) (ty :: ((* -> *) -> * -> *) -> (* -> *) -> * -> *) a i :: Constraint
type InferTypeMonad (m :: * -> *) (ki :: (* -> *) -> * -> *) (ty :: ((* -> *) -> * -> *) -> (* -> *) -> * -> *) a i :: (* -> *)
type MkInferTypeErrorList (ki :: (* -> *) -> * -> *) (ty :: ((* -> *) -> * -> *) -> (* -> *) -> * -> *) (pt :: (* -> *) -> * -> *) (tm :: (((* -> *) -> * -> *) -> (((* -> *) -> * -> *) -> (* -> *) -> * -> *) -> ((* -> *) -> * -> *) -> (* -> *) -> * -> *)) a i :: [*]
type MkInferTypeWarningList (ki :: (* -> *) -> * -> *) (ty :: ((* -> *) -> * -> *) -> (* -> *) -> * -> *) (pt :: (* -> *) -> * -> *) (tm :: (((* -> *) -> * -> *) -> (((* -> *) -> * -> *) -> (* -> *) -> * -> *) -> ((* -> *) -> * -> *) -> (* -> *) -> * -> *)) a i :: [*]
expectType :: MkInferTypeConstraint e w s r m ki ty a i
=> Proxy (MonadProxy e w s r m)
-> Proxy i
-> ExpectedType ki ty a
-> ActualType ki ty a
-> InferTypeMonad m ki ty a i ()
expectTypeEq :: MkInferTypeConstraint e w s r m ki ty a i
=> Proxy (MonadProxy e w s r m)
-> Proxy i
-> Type ki ty a
-> Type ki ty a
-> InferTypeMonad m ki ty a i ()
expectTypeAllEq :: MkInferTypeConstraint e w s r m ki ty a i
=> Proxy (MonadProxy e w s r m)
-> Proxy i
-> NonEmpty (Type ki ty a)
-> InferTypeMonad m ki ty a i (Type ki ty a)
mkCheckType :: MkInferTypeConstraint e w s r m ki ty a i
=> Proxy (MonadProxy e w s r m)
-> Proxy i
-> (Term ki ty pt tm a -> InferTypeMonad m ki ty a i (Type ki ty a))
-> Term ki ty pt tm a
-> Type ki ty a
-> InferTypeMonad m ki ty a i ()
prepareInferType :: MkInferTypeConstraint e w s r m ki ty a i
=> Proxy (MonadProxy e w s r m)
-> Proxy i
-> (Type ki ty a -> InferTypeMonad m ki ty a i (Kind ki a))
-> (Type ki ty a -> Type ki ty a)
-> InferTypeInput e w s r m (InferTypeMonad m ki ty a i) ki ty pt tm a
-> InferTypeOutput e w s r m ki ty pt tm a
type BasicInferTypeConstraint e w s r (m :: * -> *) (ki :: (* -> *) -> * -> *) (ty :: ((* -> *) -> * -> *) -> (* -> *) -> * -> *) (pt :: (* -> *) -> * -> *) (tm :: (((* -> *) -> * -> *) -> (((* -> *) -> * -> *) -> (* -> *) -> * -> *) -> ((* -> *) -> * -> *) -> (* -> *) -> * -> *)) a i = ( MkInferType i
, Monad (InferTypeMonad m ki ty a i)
, MkInferTypeConstraint e w s r m ki ty a i
)
class MkInferType i => InferTypeRules i (k :: j) where
type InferTypeConstraint e w s r (m :: * -> *) (ki :: (* -> *) -> * -> *) (ty :: ((* -> *) -> * -> *) -> (* -> *) -> * -> *) (pt :: (* -> *) -> * -> *) (tm :: (((* -> *) -> * -> *) -> (((* -> *) -> * -> *) -> (* -> *) -> * -> *) -> ((* -> *) -> * -> *) -> (* -> *) -> * -> *)) a i k :: Constraint
type InferTypeErrorList (ki :: (* -> *) -> * -> *) (ty :: ((* -> *) -> * -> *) -> (* -> *) -> * -> *) (pt :: (* -> *) -> * -> *) (tm :: (((* -> *) -> * -> *) -> (((* -> *) -> * -> *) -> (* -> *) -> * -> *) -> ((* -> *) -> * -> *) -> (* -> *) -> * -> *)) a i k :: [*]
type InferTypeWarningList (ki :: (* -> *) -> * -> *) (ty :: ((* -> *) -> * -> *) -> (* -> *) -> * -> *) (pt :: (* -> *) -> * -> *) (tm :: (((* -> *) -> * -> *) -> (((* -> *) -> * -> *) -> (* -> *) -> * -> *) -> ((* -> *) -> * -> *) -> (* -> *) -> * -> *)) a i k :: [*]
inferTypeInput :: InferTypeConstraint e w s r m ki ty pt tm a i k
=> Proxy (MonadProxy e w s r m)
-> Proxy i
-> Proxy k
-> InferTypeInput e w s r m (InferTypeMonad m ki ty a i) ki ty pt tm a
instance MkInferType i => InferTypeRules i '[] where
type InferTypeConstraint e w s r m ki ty pt tm a i '[] =
MkInferTypeConstraint e w s r m ki ty a i
-- pull in error lists from MkInferType i
type InferTypeErrorList ki ty pt tm a i '[] =
Append
(MkInferTypeErrorList ki ty pt tm a i)
('[ ErrUnknownTypeError
, ErrUnexpectedType ki ty a
, ErrExpectedTypeEq ki ty a
, ErrExpectedTypeAllEq ki ty a
])
type InferTypeWarningList ki ty pt tm a i '[] =
MkInferTypeWarningList ki ty pt tm a i
inferTypeInput _ _ _ = mempty
instance (MkInferType i, InferTypeRules i k, InferTypeRules i ks) => InferTypeRules i (k ': ks) where
type InferTypeConstraint e w s r m ki ty pt tm a i (k ': ks) =
( InferTypeConstraint e w s r m ki ty pt tm a i k
, InferTypeConstraint e w s r m ki ty pt tm a i ks
)
type InferTypeErrorList ki ty pt tm a i (k ': ks) =
Append
(InferTypeErrorList ki ty pt tm a i k)
(InferTypeErrorList ki ty pt tm a i ks)
type InferTypeWarningList ki ty pt tm a i (k ': ks) =
Append
(InferTypeWarningList ki ty pt tm a i k)
(InferTypeWarningList ki ty pt tm a i ks)
inferTypeInput m i _ =
mappend
(inferTypeInput m i (Proxy :: Proxy k))
(inferTypeInput m i (Proxy :: Proxy ks))
class InferTypeRulesOut i k where
type InferTypeError (ki :: (* -> *) -> * -> *) (ty :: ((* -> *) -> * -> *) -> (* -> *) -> * -> *) (pt :: (* -> *) -> * -> *) (tm :: (((* -> *) -> * -> *) -> (((* -> *) -> * -> *) -> (* -> *) -> * -> *) -> ((* -> *) -> * -> *) -> (* -> *) -> * -> *)) a i k :: *
type InferTypeWarning (ki :: (* -> *) -> * -> *) (ty :: ((* -> *) -> * -> *) -> (* -> *) -> * -> *) (pt :: (* -> *) -> * -> *) (tm :: (((* -> *) -> * -> *) -> (((* -> *) -> * -> *) -> (* -> *) -> * -> *) -> ((* -> *) -> * -> *) -> (* -> *) -> * -> *)) a i k :: *
instance InferTypeRules i k => InferTypeRulesOut i k where
type InferTypeError ki ty pt tm a i k = ErrSum (InferTypeErrorList ki ty pt tm a i k)
type InferTypeWarning ki ty pt tm a i k = WarnSum (InferTypeWarningList ki ty pt tm a i k)
inferTypeOutput :: (MkInferTypeConstraint e w s r m ki ty a i, InferTypeRules i k, InferTypeConstraint e w s r m ki ty pt tm a i k)
=> Proxy (MonadProxy e w s r m)
-> Proxy i
-> Proxy k
-> (Type ki ty a -> InferTypeMonad m ki ty a i (Kind ki a))
-> (Type ki ty a -> Type ki ty a)
-> InferTypeOutput e w s r m ki ty pt tm a
inferTypeOutput m i k inferKindFn normalizeTypeFn =
prepareInferType m i inferKindFn normalizeTypeFn (inferTypeInput m i k)
| dalaing/type-systems | src/Rules/Type/Infer/Common.hs | bsd-3-clause | 11,722 | 0 | 16 | 3,682 | 5,673 | 3,019 | 2,654 | 220 | 1 |
module Numeric.Regression.Linear
(Model, compute, regress) where
import Control.Applicative
import Data.Foldable
import Data.Monoid
import Data.Traversable
import Numeric.AD
import Numeric.Regression.Internal
-- | A model using the given @f@ to store parameters of type @a@.
-- Can be thought of as some kind of vector throughough this
-- package.
type Model f a = f a
-- | Compute the predicted value for
-- the given model on the given observation
compute :: (Applicative v, Foldable v, Num a)
=> Model v a -- ^ theta vector, the model's parameters
-> v a -- ^ @x@ vector, with the observed numbers
-> a -- ^ predicted @y@ for this observation
compute theta x = theta `dot` x
{-# INLINE compute #-}
-- | Cost function for a linear regression on a single observation
cost :: (Applicative v, Foldable v, Floating a)
=> Model v a -- ^ theta vector, the model's parameters
-> v a -- ^ @x@ vector
-> a -- ^ expected @y@ for the observation
-> a -- ^ cost
cost theta x y = 0.5 * (y - compute theta x) ^ (2 :: Int)
{-# INLINE cost #-}
-- | Cost function for a linear regression on a set of observations
totalCost :: (Applicative v, Foldable v, Applicative f, Foldable f, Floating a)
=> Model v a -- ^ theta vector, the model's parameters
-> f a -- ^ expected @y@ value for each observation
-> f (v a) -- ^ input data for each observation
-> a -- ^ total cost over all observations
totalCost theta ys xs =
let Acc n (Sum s) = foldMap acc $ liftA2 (cost theta) xs ys
in s / fromIntegral n
{-# INLINE totalCost #-}
-- | Given some observed \"predictions\" @ys@, the corresponding
-- input values @xs@ and initial values for the model's parameters @theta0@,
--
-- > regress ys xs theta0
--
-- returns a stream of values for the parameters that'll fit the data better
-- and better.
--
-- Example:
--
-- @
-- -- the theta we're approximating
-- realtheta :: Model V.Vector Double
-- realtheta = V.fromList [1.0, 2.0, 3.0]
--
-- -- let's start there and make 'regress'
-- -- get values that better fit the input data
-- theta0 :: Model V.Vector Double
-- theta0 = V.fromList [0.2, 3.0, 2.23]
--
-- -- input data. (output value, vector of values for each input)
-- ys_ex :: V.Vector Double
-- xs_ex :: V.Vector (V.Vector Double)
-- (ys_ex, xs_ex) = V.unzip . V.fromList $
-- [ (3, V.fromList [0, 0, 1])
-- , (1, V.fromList [1, 0, 0])
-- , (2, V.fromList [0, 1, 0])
-- , (6, V.fromList [1, 1, 1])
-- ]
--
-- -- stream of increasingly accurate parameters
-- thetaApproxs :: [Model V.Vector Double]
-- thetaApproxs = learnAll ys_ex xs_ex theta0
-- @
regress :: (Traversable v, Applicative v, Foldable v, Applicative f, Foldable f, Ord a, Floating a)
=> f a -- ^ expected @y@ value for each observation
-> f (v a) -- ^ input data for each observation
-> Model v a -- ^ initial parameters for the model, from which we'll improve
-> [Model v a] -- ^ a stream of increasingly accurate values
-- for the model's parameter to better fit the observations.
regress ys xs t0 =
gradientDescent (\theta -> totalCost theta (fmap auto ys) (fmap (fmap auto) xs))
t0
{-# INLINE regress #-}
| alpmestan/regress | src/Numeric/Regression/Linear.hs | bsd-3-clause | 3,337 | 0 | 12 | 847 | 533 | 303 | 230 | 40 | 1 |
------------------------------------------------------------
--
-- Author: Joao H de A Franco ([email protected])
--
-- Description: FFT implementation in Haskell
--
-- Date: 2013-Mar-28
--
-- License: Attribution-NonCommercial-ShareAlike 3.0 Unported
-- (CC BY-NC-SA 3.0)
--
-------------------------------------------------------------
module FFT (fft, ifft) where
import Data.Complex(Complex((:+)))
fft, ifft :: [Complex Double] -> [Complex Double]
fft = fft' 1
ifft = fft' (-1)
fft' :: Double -> [Complex Double] -> [Complex Double]
fft' e xs = if pow2 (length xs)
then let z = sqrt (1 / fromIntegral (length xs))
in map ((z :+ 0) *) $ fft'' e xs
else error "Number of points is not a power of 2"
where pow2 n
| n == 1 || n == 2 = True
| otherwise = n `mod` 2 == 0 && pow2 (n `div` 2)
fft'' :: Double -> [Complex Double] -> [Complex Double]
fft'' _ [] = []
fft'' _ [x] = [x]
fft'' e xs = fft'' e (evens xs) <+> t (fft'' e (odds xs))
where (<+>) r s = zipWith (+) (r ++ r) (s ++ map negate s)
evens [] = []
evens [u] = [u]
evens (v:_:vs) = v:evens vs
odds = evens . drop 1
n = 2 * pi / fromIntegral (length xs)
t = zipWith (\k z -> z * exp (0 :+ k * n * e)) ([0..] :: [Double])
| akru/ColorSound | src/FFT.hs | bsd-3-clause | 1,341 | 0 | 15 | 385 | 534 | 287 | 247 | 24 | 3 |
{-
Copyright (c) 2013, Genome Research Limited
Author: Nicholas Clarke <[email protected]>
Rewrite of hgc-deploy in Haskell, aiming to be more configurable and maintainable.
Steps involved:
1. Become root.
2. Create temporary directories.
3. Modify the config and fstab.
4. Establish a union mount for the root filesystem.
5. Modify the contents of the capsule to add the external user as an autologin.
6. Run the capsule in daemon mode.
7. Connect to the capsule using lxc-console.
8. Stop the capsule using lxc-stop.
9. Unmount the union filesystem.
10. Clean up the temporary directory?
-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Main where
import Control.Applicative
import Control.Concurrent
import Control.Exception (bracket)
import System.Exit (ExitCode(..))
import Control.Monad.Reader
import Data.List (intercalate)
import System.Console.GetOpt
import System.Directory (doesDirectoryExist)
import System.FilePath ((</>))
import System.Environment (getArgs)
import System.IO
import System.Log.Logger
import qualified System.Posix.Files as Files
import System.Posix.Types (UserID)
import qualified System.Posix.User as User
import System.Random (randomIO)
import Text.Printf (printf)
import Text.Regex.TDFA ((=~))
import qualified Hgcdeploy.Config as Cnf
import qualified Hgc.Cvmfs as Cvmfs
import Hgc.Directory
import qualified Hgc.Lxc as Lxc
import Hgc.Mount
import qualified Hgc.Union as Union
import Hgc.Shell
-- | Environment
newtype Env a = Env {
unEnv :: ReaderT Options IO a
} deriving (Applicative, Functor, Monad, MonadIO, MonadReader Options)
runEnv :: Env a -> Options -> IO a
runEnv = runReaderT . unEnv
data CleanMethod = Chown UserID | Delete
data RunMethod = Interactive | Command String
data Options = Options {
optRepository :: String
, optMount :: [FilePath] -- ^ Resources to mount in the capsule.
, optVerbose :: Bool
, optUnionType :: Union.Union
, optRetainTemp :: Bool
}
defaultOptions :: Options
defaultOptions = Options {
optRepository = "mercury.repo"
, optMount = []
, optVerbose = False
, optUnionType = Union.aufs
, optRetainTemp = False
}
setOptions :: [OptDescr (Options -> Options)]
setOptions =
[
Option ['m'] ["mount"] (ReqArg (\n o -> o { optMount = n : optMount o }) "RESOURCE")
"Load the specified resource into the capsule."
, Option ['r'] ["repository"] (ReqArg (\n o -> o { optRepository = n }) "REPOSITORY")
"Use the specified repository name (defaults to mercury.repo)."
, Option ['t'] ["union-type"] (ReqArg (\n o -> setUnionType o n) "UNION_TYPE")
"Set the type of filesystem used to implement the union mount. Currently supported are aufs and overlayfs."
, Option ['v'] ["verbose"] (NoArg (\o -> o { optVerbose = True }))
"Enable verbose output."
, Option [] ["retain-temp"] (NoArg (\o -> o { optRetainTemp = True }))
("Retain the temporary files under " ++ Cnf.runPath)
] where setUnionType o "aufs" = o { optUnionType = Union.aufs }
setUnionType o "overlayfs" = o { optUnionType = Union.overlayfs }
setUnionType o _ = o
usage :: String
usage = usageInfo header setOptions
where header = "Launch a Mercury capsule.\n" ++
"Usage: hgc-deploy [Option...] capsule [command]"
main :: IO ()
main = do
args <- getArgs
case (getOpt Permute setOptions args) of
(o,[f],[]) -> runEnv (deploy f Interactive) (foldl (flip id) defaultOptions o)
(o,[f,c],[]) -> runEnv (deploy f $ Command c) (foldl (flip id) defaultOptions o)
(_,_,errs) -> putStrLn (concat errs ++ "\n" ++ usage)
deploy :: String -- ^ Capsule
-> RunMethod -- ^ Command
-> Env ()
deploy capsule command = ask >>= \options -> do
liftIO $ when (optVerbose options) $ updateGlobalLogger "hgc" (setLevel DEBUG)
liftIO . debugM "hgc" $ case command of
Interactive -> "Deploying capsule " ++ capsule ++ " interactively"
Command c -> "Deploying capsule " ++ capsule ++ " with command " ++ c
liftIO $ do
debugM "hgc" $ "Setting safe environment."
setSafeEnv
liftIO $ do
unless (capsule =~ validFileRegex) $ ioError . userError $
"Invalid capsule name: " ++ capsule
unless ((optRepository options) =~ validFileRegex) $ ioError . userError $
"Invalid repository name: " ++ (optRepository options)
realUserID <- liftIO $ User.getRealUserID
let sourcePath = Cvmfs.base </> (optRepository options) </> capsule
cleanMethod = if (optRetainTemp options)
then Chown realUserID
else Delete
liftIO $ unlessM (doesDirectoryExist sourcePath) $ ioError . userError $
"Source path does not exist or is not a directory: " ++ sourcePath
(uuid, clonePath) <- cloneCapsule capsule sourcePath
withRoot $ do
withUnionMount (sourcePath </> "rootfs") clonePath $ do
addUser realUserID clonePath
setAutologinUser realUserID clonePath
let config = (clonePath </> "config")
case command of
Interactive -> withDetachedCapsule uuid config $
liftIO $ threadDelay 1000000 >> Lxc.console uuid 1
Command c -> liftIO $ Lxc.runCommand uuid config c
liftIO $ cleanTemp clonePath cleanMethod
where
unlessM p m = do
p' <- p
unless p' m
validFileRegex = "^[A-Za-z0-9_][A-Za-z0-9._-]*$"
-- | Clone the capsule into a temporary location.
cloneCapsule :: String -- ^ Name of the capsule template.
-> FilePath -- ^ Location of the capsule template.
-> Env (String, FilePath) -- ^ Capsule name, Location on system of the capsule.
cloneCapsule capsule sourcePath = ask >>= \options -> do
uuid <- liftIO $ do
rand <- liftM abs (randomIO :: IO Int)
un <- User.getLoginName
return $ un ++ "_" ++ capsule ++ "_" ++ (show rand)
liftIO $ debugM "hgc" $ "Setting unique capsule ID to " ++ uuid
let clonePath = Cnf.runPath </> uuid
liftIO $ debugM "hgc" $ "Source path: " ++ sourcePath ++ "\nClone path: " ++ clonePath
liftIO . mkdir $ clonePath
liftIO $ writeConfig uuid clonePath
liftIO $ writeFstab clonePath (optMount options)
return (uuid, clonePath)
where
writeConfig uuid clonePath =
Lxc.readConfig sourceConf >>= Lxc.writeConfig cloneConf . update
where
update c = Lxc.setConfig "lxc.rootfs" [clonePath </> "image"] .
Lxc.setConfig "lxc.mount" [clonePath </> "fstab"] .
Lxc.setConfig "lxc.utsname" [uuid] $ c
sourceConf = sourcePath </> "config"
cloneConf = clonePath </> "config"
writeFstab clonePath mounts' = do
mounts <- fmap (\a -> fmap (mkFstabEntry . mkBindMount) a) .
mapM (\a -> mkMountPoint scratchMntDir a) $ mounts'
readFile (sourcePath </> "fstab") >>=
writeFile (clonePath </> "fstab") . writeMounts mounts
where scratchMntDir = clonePath </> Cnf.scratch </> "mnt"
imageMntDir = clonePath </> Cnf.image </> "mnt"
mkBindMount (e,i) = Mount e (imageMntDir </> i) "none" [Bind] []
writeMounts mounts str = str ++ "\n" ++ unlines mounts
-- | Perform the given operation with seteuid root.
withRoot :: Env a -> Env a
withRoot f = ask >>= \options -> liftIO $
User.getRealUserID >>= \uid ->
bracket
(User.setUserID 0)
(\_ -> User.setUserID uid)
(\_ -> runEnv f options)
-- | Perform the given operation in a union mount.
withUnionMount :: FilePath -- ^ Lower (ro) dir.
-> FilePath -- ^ Clone path.
-> Env a -- ^ Operation to perform in union mount.
-> Env a -- ^ Result.
withUnionMount sourcePath clonePath f = ask >>= \options -> do
let unionfs = optUnionType options
let union = Mount "none" image (Union.name unionfs) [] [Union.format unionfs sourcePath scratch]
liftIO $ do
mkdir $ scratch -- rw dir
mkdir $ image -- union dir
liftIO $ bracket
(mount union)
(\_ -> umount union)
(\_ -> runEnv f options)
where image = clonePath </> Cnf.image
scratch = clonePath </> Cnf.scratch
{- | Adds the given user into the capsule environment by:
- Adding entries to the /usr/passwd and /usr/shadow files.
- Creating a home directory.
-}
addUser :: UserID
-> FilePath -- ^ Clone path.
-> Env ()
addUser uid clonePath = do
liftIO $ debugM "hgc" $ "Adding user with ID " ++ (show uid) ++ " into container."
ue <- liftIO $ User.getUserEntryForID uid
let username = User.userName ue
intHomedir = "/home" </> username
extHomedir = clonePath </> "image" ++ intHomedir -- intHomeDir is absolute, so </> fails
newue = ue { User.homeDirectory = intHomedir }
liftIO $ do
debugM "hgc" $ printf "Username: %s\nHomedir: %s" username intHomedir
mkdir extHomedir
Files.setOwnerAndGroup extHomedir uid (-1)
withFile (clonePath </> Cnf.passwdFile) AppendMode (\h ->
let pwentry = mkPasswdEntry newue in
debugM "hgc" ("Adding passwd entry: " ++ pwentry) >>
hPutStrLn h pwentry
)
withFile (clonePath </> Cnf.shadowFile) AppendMode (\h ->
let pwentry = mkShadowEntry newue in
debugM "hgc" ("Adding shadow entry: " ++ pwentry) >>
hPutStrLn h pwentry
)
where mkPasswdEntry (User.UserEntry n p i g ge h s) =
intercalate ":" [n,p,show i,show g,ge,h,s]
mkShadowEntry (User.UserEntry n _ _ _ _ _ _) =
n ++ ":*:::::::"
-- | Set the autologin user.
setAutologinUser :: UserID
-> FilePath -- ^ Clone path.
-> Env ()
setAutologinUser uid clonePath = do
ue <- liftIO $ User.getUserEntryForID uid
let username = User.userName ue
liftIO $ safeSystem "sed" [
"-i"
, "s/" ++ Cnf.autologinVar ++ "/" ++ username ++ "/"
, clonePath </> Cnf.autologinFile
] >>= \ex -> case ex of
ExitSuccess -> return ()
ExitFailure r -> ioError . userError $
"Cannot set autologin user (exit code " ++ show r ++ ")."
-- | Perform the given operation with a running capsule.
withDetachedCapsule :: String -- ^ Capsule name.
-> FilePath -- ^ Config file location.
-> Env a -- ^ Operation to perform with running capsule.
-> Env a
withDetachedCapsule capsule config f = ask >>= \options ->
liftIO $ Lxc.withContainerDaemon capsule config (runEnv f options)
cleanTemp :: FilePath -- ^ Clone location
-> CleanMethod -- ^ Whether to delete or chown the temp dir
-> IO ()
cleanTemp clonePath Delete = do
debugM "hgc" $ "Removing directory " ++ clonePath
removeDirectoryRecursive NoFollow clonePath
cleanTemp clonePath (Chown uid) = do
debugM "hgc" $ "Changing ownership on directory " ++ clonePath
chownRecursive NoFollow uid (-1) clonePath
| wtsi-hgi/hgc-tools | hgc-deploy.hs | bsd-3-clause | 11,197 | 0 | 21 | 2,974 | 2,922 | 1,516 | 1,406 | 221 | 4 |
module Blog.BackEnd.AsyncLogHandler (AsyncLogHandler, asyncHandler) where
import System.Log.Handler
import System.IO
import System.Log
import Utilities ( now )
import Control.Concurrent ( forkIO )
import Control.Concurrent.Chan ( Chan, newChan, readChan, writeChan )
import qualified Control.Exception as CE
type Timestamp = String
type LogMessage = String
data AsyncLogHandler = AsyncLogHandler { channel :: Chan (LogRecord, LogMessage, Timestamp)
, level :: Priority }
instance LogHandler AsyncLogHandler where
setLevel alh p = alh { level = p }
getLevel alh = level alh
emit alh lr msg = do { n <- now
; writeChan (channel alh) (lr,msg,n) }
close _ = return () -- make this better
asyncHandler :: Int -> Handle -> Priority -> IO AsyncLogHandler
asyncHandler n h pri = do { c <- newChan
; forkIO $ append n 0 h c
; return $ AsyncLogHandler { channel = c
, level = pri } }
append :: Int -> Int -> Handle -> Chan (LogRecord, LogMessage, Timestamp) -> IO ()
append n i h c = do { ((p,m),l,ts) <- readChan c
; (hPutStrLn h $ ts ++ " [" ++ (show p) ++ "] " ++ l ++ " - " ++ m)
`CE.catch` (printex h)
; if i == n then
do { (hFlush h) `CE.catch` (printex h)
; append n 0 h c }
else
append n (i+1) h c }
printex :: Handle -> CE.Exception -> IO ()
printex h e = hPutStrLn System.IO.stderr $ "Error writing to log handle " ++ (show h) ++ ": " ++ show e | prb/perpubplat | src/Blog/BackEnd/AsyncLogHandler.hs | bsd-3-clause | 1,704 | 0 | 16 | 612 | 566 | 310 | 256 | 33 | 2 |
module StaticFlags where
import System.Environment
import System.IO.Unsafe
{-# NOINLINE aSSERTIONS #-}
aSSERTIONS :: Bool
aSSERTIONS = not $ "--no-assertions" `elem` (unsafePerformIO getArgs)
{-# NOINLINE qUIET #-}
qUIET :: Bool
qUIET = "-v0" `elem` (unsafePerformIO getArgs)
{-# NOINLINE tERMINATION_CHECK #-}
tERMINATION_CHECK :: Bool
tERMINATION_CHECK = not $ "--no-termination" `elem` (unsafePerformIO getArgs)
{-# NOINLINE eVALUATE_PRIMOPS #-}
eVALUATE_PRIMOPS :: Bool
eVALUATE_PRIMOPS = not $ "--no-primops" `elem` (unsafePerformIO getArgs)
| batterseapower/supercompilation-by-evaluation | StaticFlags.hs | bsd-3-clause | 553 | 0 | 7 | 71 | 119 | 72 | 47 | 15 | 1 |
{-# LANGUAGE GADTs, BangPatterns, ScopedTypeVariables #-}
module GHC.Cmm.CommonBlockElim
( elimCommonBlocks
)
where
import GhcPrelude hiding (iterate, succ, unzip, zip)
import GHC.Cmm.BlockId
import GHC.Cmm
import GHC.Cmm.Utils
import GHC.Cmm.Switch (eqSwitchTargetWith)
import GHC.Cmm.ContFlowOpt
import GHC.Cmm.Dataflow.Block
import GHC.Cmm.Dataflow.Graph
import GHC.Cmm.Dataflow.Label
import GHC.Cmm.Dataflow.Collections
import Data.Bits
import Data.Maybe (mapMaybe)
import qualified Data.List as List
import Data.Word
import qualified Data.Map as M
import Outputable
import qualified TrieMap as TM
import UniqFM
import Unique
import Control.Arrow (first, second)
-- -----------------------------------------------------------------------------
-- Eliminate common blocks
-- If two blocks are identical except for the label on the first node,
-- then we can eliminate one of the blocks. To ensure that the semantics
-- of the program are preserved, we have to rewrite each predecessor of the
-- eliminated block to proceed with the block we keep.
-- The algorithm iterates over the blocks in the graph,
-- checking whether it has seen another block that is equal modulo labels.
-- If so, then it adds an entry in a map indicating that the new block
-- is made redundant by the old block.
-- Otherwise, it is added to the useful blocks.
-- To avoid comparing every block with every other block repeatedly, we group
-- them by
-- * a hash of the block, ignoring labels (explained below)
-- * the list of outgoing labels
-- The hash is invariant under relabeling, so we only ever compare within
-- the same group of blocks.
--
-- The list of outgoing labels is updated as we merge blocks (that is why they
-- are not included in the hash, which we want to calculate only once).
--
-- All in all, two blocks should never be compared if they have different
-- hashes, and at most once otherwise. Previously, we were slower, and people
-- rightfully complained: #10397
-- TODO: Use optimization fuel
elimCommonBlocks :: CmmGraph -> CmmGraph
elimCommonBlocks g = replaceLabels env $ copyTicks env g
where
env = iterate mapEmpty blocks_with_key
-- The order of blocks doesn't matter here. While we could use
-- revPostorder which drops unreachable blocks this is done in
-- ContFlowOpt already which runs before this pass. So we use
-- toBlockList since it is faster.
groups = groupByInt hash_block (toBlockList g) :: [[CmmBlock]]
blocks_with_key = [ [ (successors b, [b]) | b <- bs] | bs <- groups]
-- Invariant: The blocks in the list are pairwise distinct
-- (so avoid comparing them again)
type DistinctBlocks = [CmmBlock]
type Key = [Label]
type Subst = LabelMap BlockId
-- The outer list groups by hash. We retain this grouping throughout.
iterate :: Subst -> [[(Key, DistinctBlocks)]] -> Subst
iterate subst blocks
| mapNull new_substs = subst
| otherwise = iterate subst' updated_blocks
where
grouped_blocks :: [[(Key, [DistinctBlocks])]]
grouped_blocks = map groupByLabel blocks
merged_blocks :: [[(Key, DistinctBlocks)]]
(new_substs, merged_blocks) = List.mapAccumL (List.mapAccumL go) mapEmpty grouped_blocks
where
go !new_subst1 (k,dbs) = (new_subst1 `mapUnion` new_subst2, (k,db))
where
(new_subst2, db) = mergeBlockList subst dbs
subst' = subst `mapUnion` new_substs
updated_blocks = map (map (first (map (lookupBid subst')))) merged_blocks
-- Combine two lists of blocks.
-- While they are internally distinct they can still share common blocks.
mergeBlocks :: Subst -> DistinctBlocks -> DistinctBlocks -> (Subst, DistinctBlocks)
mergeBlocks subst existing new = go new
where
go [] = (mapEmpty, existing)
go (b:bs) = case List.find (eqBlockBodyWith (eqBid subst) b) existing of
-- This block is a duplicate. Drop it, and add it to the substitution
Just b' -> first (mapInsert (entryLabel b) (entryLabel b')) $ go bs
-- This block is not a duplicate, keep it.
Nothing -> second (b:) $ go bs
mergeBlockList :: Subst -> [DistinctBlocks] -> (Subst, DistinctBlocks)
mergeBlockList _ [] = pprPanic "mergeBlockList" empty
mergeBlockList subst (b:bs) = go mapEmpty b bs
where
go !new_subst1 b [] = (new_subst1, b)
go !new_subst1 b1 (b2:bs) = go new_subst b bs
where
(new_subst2, b) = mergeBlocks subst b1 b2
new_subst = new_subst1 `mapUnion` new_subst2
-- -----------------------------------------------------------------------------
-- Hashing and equality on blocks
-- Below here is mostly boilerplate: hashing blocks ignoring labels,
-- and comparing blocks modulo a label mapping.
-- To speed up comparisons, we hash each basic block modulo jump labels.
-- The hashing is a bit arbitrary (the numbers are completely arbitrary),
-- but it should be fast and good enough.
-- We want to get as many small buckets as possible, as comparing blocks is
-- expensive. So include as much as possible in the hash. Ideally everything
-- that is compared with (==) in eqBlockBodyWith.
type HashCode = Int
hash_block :: CmmBlock -> HashCode
hash_block block =
fromIntegral (foldBlockNodesB3 (hash_fst, hash_mid, hash_lst) block (0 :: Word32) .&. (0x7fffffff :: Word32))
-- UniqFM doesn't like negative Ints
where hash_fst _ h = h
hash_mid m h = hash_node m + h `shiftL` 1
hash_lst m h = hash_node m + h `shiftL` 1
hash_node :: CmmNode O x -> Word32
hash_node n | dont_care n = 0 -- don't care
hash_node (CmmAssign r e) = hash_reg r + hash_e e
hash_node (CmmStore e e') = hash_e e + hash_e e'
hash_node (CmmUnsafeForeignCall t _ as) = hash_tgt t + hash_list hash_e as
hash_node (CmmBranch _) = 23 -- NB. ignore the label
hash_node (CmmCondBranch p _ _ _) = hash_e p
hash_node (CmmCall e _ _ _ _ _) = hash_e e
hash_node (CmmForeignCall t _ _ _ _ _ _) = hash_tgt t
hash_node (CmmSwitch e _) = hash_e e
hash_node _ = error "hash_node: unknown Cmm node!"
hash_reg :: CmmReg -> Word32
hash_reg (CmmLocal localReg) = hash_unique localReg -- important for performance, see #10397
hash_reg (CmmGlobal _) = 19
hash_e :: CmmExpr -> Word32
hash_e (CmmLit l) = hash_lit l
hash_e (CmmLoad e _) = 67 + hash_e e
hash_e (CmmReg r) = hash_reg r
hash_e (CmmMachOp _ es) = hash_list hash_e es -- pessimal - no operator check
hash_e (CmmRegOff r i) = hash_reg r + cvt i
hash_e (CmmStackSlot _ _) = 13
hash_lit :: CmmLit -> Word32
hash_lit (CmmInt i _) = fromInteger i
hash_lit (CmmFloat r _) = truncate r
hash_lit (CmmVec ls) = hash_list hash_lit ls
hash_lit (CmmLabel _) = 119 -- ugh
hash_lit (CmmLabelOff _ i) = cvt $ 199 + i
hash_lit (CmmLabelDiffOff _ _ i _) = cvt $ 299 + i
hash_lit (CmmBlock _) = 191 -- ugh
hash_lit (CmmHighStackMark) = cvt 313
hash_tgt (ForeignTarget e _) = hash_e e
hash_tgt (PrimTarget _) = 31 -- lots of these
hash_list f = foldl' (\z x -> f x + z) (0::Word32)
cvt = fromInteger . toInteger
hash_unique :: Uniquable a => a -> Word32
hash_unique = cvt . getKey . getUnique
-- | Ignore these node types for equality
dont_care :: CmmNode O x -> Bool
dont_care CmmComment {} = True
dont_care CmmTick {} = True
dont_care CmmUnwind {} = True
dont_care _other = False
-- Utilities: equality and substitution on the graph.
-- Given a map ``subst'' from BlockID -> BlockID, we define equality.
eqBid :: LabelMap BlockId -> BlockId -> BlockId -> Bool
eqBid subst bid bid' = lookupBid subst bid == lookupBid subst bid'
lookupBid :: LabelMap BlockId -> BlockId -> BlockId
lookupBid subst bid = case mapLookup bid subst of
Just bid -> lookupBid subst bid
Nothing -> bid
-- Middle nodes and expressions can contain BlockIds, in particular in
-- CmmStackSlot and CmmBlock, so we have to use a special equality for
-- these.
--
eqMiddleWith :: (BlockId -> BlockId -> Bool)
-> CmmNode O O -> CmmNode O O -> Bool
eqMiddleWith eqBid (CmmAssign r1 e1) (CmmAssign r2 e2)
= r1 == r2 && eqExprWith eqBid e1 e2
eqMiddleWith eqBid (CmmStore l1 r1) (CmmStore l2 r2)
= eqExprWith eqBid l1 l2 && eqExprWith eqBid r1 r2
eqMiddleWith eqBid (CmmUnsafeForeignCall t1 r1 a1)
(CmmUnsafeForeignCall t2 r2 a2)
= t1 == t2 && r1 == r2 && eqListWith (eqExprWith eqBid) a1 a2
eqMiddleWith _ _ _ = False
eqExprWith :: (BlockId -> BlockId -> Bool)
-> CmmExpr -> CmmExpr -> Bool
eqExprWith eqBid = eq
where
CmmLit l1 `eq` CmmLit l2 = eqLit l1 l2
CmmLoad e1 _ `eq` CmmLoad e2 _ = e1 `eq` e2
CmmReg r1 `eq` CmmReg r2 = r1==r2
CmmRegOff r1 i1 `eq` CmmRegOff r2 i2 = r1==r2 && i1==i2
CmmMachOp op1 es1 `eq` CmmMachOp op2 es2 = op1==op2 && es1 `eqs` es2
CmmStackSlot a1 i1 `eq` CmmStackSlot a2 i2 = eqArea a1 a2 && i1==i2
_e1 `eq` _e2 = False
xs `eqs` ys = eqListWith eq xs ys
eqLit (CmmBlock id1) (CmmBlock id2) = eqBid id1 id2
eqLit l1 l2 = l1 == l2
eqArea Old Old = True
eqArea (Young id1) (Young id2) = eqBid id1 id2
eqArea _ _ = False
-- Equality on the body of a block, modulo a function mapping block
-- IDs to block IDs.
eqBlockBodyWith :: (BlockId -> BlockId -> Bool) -> CmmBlock -> CmmBlock -> Bool
eqBlockBodyWith eqBid block block'
{-
| equal = pprTrace "equal" (vcat [ppr block, ppr block']) True
| otherwise = pprTrace "not equal" (vcat [ppr block, ppr block']) False
-}
= equal
where (_,m,l) = blockSplit block
nodes = filter (not . dont_care) (blockToList m)
(_,m',l') = blockSplit block'
nodes' = filter (not . dont_care) (blockToList m')
equal = eqListWith (eqMiddleWith eqBid) nodes nodes' &&
eqLastWith eqBid l l'
eqLastWith :: (BlockId -> BlockId -> Bool) -> CmmNode O C -> CmmNode O C -> Bool
eqLastWith eqBid (CmmBranch bid1) (CmmBranch bid2) = eqBid bid1 bid2
eqLastWith eqBid (CmmCondBranch c1 t1 f1 l1) (CmmCondBranch c2 t2 f2 l2) =
c1 == c2 && l1 == l2 && eqBid t1 t2 && eqBid f1 f2
eqLastWith eqBid (CmmCall t1 c1 g1 a1 r1 u1) (CmmCall t2 c2 g2 a2 r2 u2) =
t1 == t2 && eqMaybeWith eqBid c1 c2 && a1 == a2 && r1 == r2 && u1 == u2 && g1 == g2
eqLastWith eqBid (CmmSwitch e1 ids1) (CmmSwitch e2 ids2) =
e1 == e2 && eqSwitchTargetWith eqBid ids1 ids2
eqLastWith _ _ _ = False
eqMaybeWith :: (a -> b -> Bool) -> Maybe a -> Maybe b -> Bool
eqMaybeWith eltEq (Just e) (Just e') = eltEq e e'
eqMaybeWith _ Nothing Nothing = True
eqMaybeWith _ _ _ = False
eqListWith :: (a -> b -> Bool) -> [a] -> [b] -> Bool
eqListWith f (a : as) (b : bs) = f a b && eqListWith f as bs
eqListWith _ [] [] = True
eqListWith _ _ _ = False
-- | Given a block map, ensure that all "target" blocks are covered by
-- the same ticks as the respective "source" blocks. This not only
-- means copying ticks, but also adjusting tick scopes where
-- necessary.
copyTicks :: LabelMap BlockId -> CmmGraph -> CmmGraph
copyTicks env g
| mapNull env = g
| otherwise = ofBlockMap (g_entry g) $ mapMap copyTo blockMap
where -- Reverse block merge map
blockMap = toBlockMap g
revEnv = mapFoldlWithKey insertRev M.empty env
insertRev m k x = M.insertWith (const (k:)) x [k] m
-- Copy ticks and scopes into the given block
copyTo block = case M.lookup (entryLabel block) revEnv of
Nothing -> block
Just ls -> foldr copy block $ mapMaybe (flip mapLookup blockMap) ls
copy from to =
let ticks = blockTicks from
CmmEntry _ scp0 = firstNode from
(CmmEntry lbl scp1, code) = blockSplitHead to
in CmmEntry lbl (combineTickScopes scp0 scp1) `blockJoinHead`
foldr blockCons code (map CmmTick ticks)
-- Group by [Label]
-- See Note [Compressed TrieMap] in coreSyn/TrieMap about the usage of GenMap.
groupByLabel :: [(Key, DistinctBlocks)] -> [(Key, [DistinctBlocks])]
groupByLabel =
go (TM.emptyTM :: TM.ListMap (TM.GenMap LabelMap) (Key, [DistinctBlocks]))
where
go !m [] = TM.foldTM (:) m []
go !m ((k,v) : entries) = go (TM.alterTM k adjust m) entries
where --k' = map (getKey . getUnique) k
adjust Nothing = Just (k,[v])
adjust (Just (_,vs)) = Just (k,v:vs)
groupByInt :: (a -> Int) -> [a] -> [[a]]
groupByInt f xs = nonDetEltsUFM $ List.foldl' go emptyUFM xs
-- See Note [Unique Determinism and code generation]
where
go m x = alterUFM addEntry m (f x)
where
addEntry xs = Just $! maybe [x] (x:) xs
| sdiehl/ghc | compiler/GHC/Cmm/CommonBlockElim.hs | bsd-3-clause | 12,845 | 0 | 15 | 3,134 | 3,591 | 1,881 | 1,710 | 189 | 24 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
-- |
--
module Data.Time.Cube.Label
( DateTimeLensT(..)
-- * Raw date components
, datePart
-- * Lens accessors
-- ** Pure
, Data.Time.Cube.Label.get
, Data.Time.Cube.Label.set
, Data.Time.Cube.Label.modify
-- ** Monadic (currently not very monadic...)
, Data.Time.Cube.Label.getM
, Data.Time.Cube.Label.getM2
, Data.Time.Cube.Label.setM
, Data.Time.Cube.Label.modifyM
-- * Helper lenses
, epoch
, era
, century
, year
, month
, monthOfYear
, week
, day
, dayOfWeek
, hour
, minute
, second
, milli
, nano
, pico
, timeZoneOffset
) where
import Data.Time.Cube
import Control.Arrow (Kleisli(..), runKleisli, arr)
import Control.Category
import Control.Monad
import Control.Monad.Identity
import Control.Monad.State.Lazy as State
import Data.Label.Abstract as A
import Prelude hiding ((.), id)
dateTimeLens
:: (f -> StateT c m a)
-> (a -> f -> StateT c m f)
-> DateTimeLensT m c f a
dateTimeLens g s = DateTimeLensT (A.lens (Kleisli g) (Kleisli (uncurry s)))
-- |
-- A lens from a time into a time component
datePart :: (Functor m, Monad m, DateTimePart c f a) => DateTimeLensT m c f a
datePart = dateTimeLens dtg dts
newtype DateTimeLensT m c f a = DateTimeLensT{unDateTimeLens :: A.Lens (Kleisli (StateT c m)) f a}
deriving instance Monad m => Category (DateTimeLensT m c)
runDateTimeLensT
:: (Monad m, DateTime f)
=> Kleisli (StateT (DateTimeComponents f) m) f a
-> f
-> m a
runDateTimeLensT l f = evalStateT (runKleisli l f) (unpack f)
getM
:: (Functor m, Monad m, DateTime f)
=> DateTimeLensT m (DateTimeComponents f) f a
-> f
-> m a
getM = runDateTimeLensT . A.get . unDateTimeLens
getM2
:: (Functor m, Monad m, DateTime f)
=> (DateTimeLensT m (DateTimeComponents f) f a, DateTimeLensT m (DateTimeComponents f) f b)
-> f
-> m (a, b)
getM2 (l1, l2) f = do
a <- runDateTimeLensT (A.get (unDateTimeLens l1)) f
b <- runDateTimeLensT (A.get (unDateTimeLens l2)) f
return (a, b)
setM
:: (Functor m, Monad m, DateTime f)
=> DateTimeLensT m (DateTimeComponents f) f a
-> a
-> f
-> m f
setM (DateTimeLensT l) v = runDateTimeLensT (A.set l . arr (v,))
modifyM
:: (Functor m, Monad m, DateTime f)
=> DateTimeLensT m (DateTimeComponents f) f a
-> (a -> a)
-> f
-> m f
modifyM (DateTimeLensT l) v = runDateTimeLensT (A.modify l . arr (arr v,))
get
:: DateTime f
=> DateTimeLensT Identity (DateTimeComponents f) f a
-> f
-> a
get l = runIdentity . getM l
set
:: DateTime f
=> DateTimeLensT Identity (DateTimeComponents f) f a
-> a
-> f
-> f
set l v = runIdentity . setM l v
modify
:: DateTime f
=> DateTimeLensT Identity (DateTimeComponents f) f a
-> (a -> a)
-> f
-> f
modify l v = runIdentity . modifyM l v
epoch
:: (Functor m, Monad m, DateTimePart c f Epoch)
=> DateTimeLensT m c f Epoch
epoch = datePart
era
:: (Functor m, Monad m, DateTimePart c f Era)
=> DateTimeLensT m c f Era
era = datePart
century
:: (Functor m, Monad m, DateTimePart c f Century)
=> DateTimeLensT m c f Century
century = datePart
year
:: (Functor m, Monad m, DateTimePart c f Year)
=> DateTimeLensT m c f Year
year = datePart
month
:: (Functor m, Monad m, DateTimePart c f Month)
=> DateTimeLensT m c f Month
month = datePart
monthOfYear
:: (Functor m, Monad m, DateTimePart c f MonthOfYear)
=> DateTimeLensT m c f MonthOfYear
monthOfYear = datePart
week
:: (Functor m, Monad m, DateTimePart c f Week)
=> DateTimeLensT m c f Week
week = datePart
dayOfWeek
:: (Functor m, Monad m, DateTimePart c f DayOfWeek)
=> DateTimeLensT m c f DayOfWeek
dayOfWeek = datePart
day
:: (Functor m, Monad m, DateTimePart c f Day)
=> DateTimeLensT m c f Day
day = datePart
hour
:: (Functor m, Monad m, DateTimePart c f Hour)
=> DateTimeLensT m c f Hour
hour = datePart
minute
:: (Functor m, Monad m, DateTimePart c f Minute)
=> DateTimeLensT m c f Minute
minute = datePart
second
:: (Functor m, Monad m, DateTimePart c f Second)
=> DateTimeLensT m c f Second
second = datePart
milli
:: (Functor m, Monad m, DateTimePart c f Milli)
=> DateTimeLensT m c f Milli
milli = datePart
nano
:: (Functor m, Monad m, DateTimePart c f Nano)
=> DateTimeLensT m c f Nano
nano = datePart
pico
:: (Monad m, Functor m, DateTimePart c f Pico)
=> DateTimeLensT m c f Pico
pico = datePart
timeZoneOffset
:: (Functor m, Monad m, DateTimePart c f TimeZoneOffset)
=> DateTimeLensT m c f TimeZoneOffset
timeZoneOffset = datePart
| alphaHeavy/time-cube | fclabels/Data/Time/Cube/Label.hs | bsd-3-clause | 4,750 | 0 | 12 | 1,027 | 1,802 | 966 | 836 | 169 | 1 |
-- | Examples of diagrams using the Turtle data type
import Diagrams.Prelude
import Diagrams.TwoD.Path.Turtle.Internal
import Diagrams.Backend.SVG
import Diagrams.Backend.SVG.CmdLine (defaultMain)
squareTurtle :: Diagram SVG R2
squareTurtle = getTurtleDiagram $
startTurtle #
forward x # right turn # setPenWidth 2.0 # setPenColour blue #
forward x # right turn # setPenWidth 3.0 # setPenColour red #
forward x # right turn # setPenWidth 0.5 # setPenColour green #
forward x
where
x = 2.0
turn = 90
main = defaultMain (squareTurtle # centerXY # pad 1.1)
| deepakjois/hs-logo | examples/Turtles.hs | bsd-3-clause | 575 | 0 | 18 | 105 | 177 | 89 | 88 | 14 | 1 |
module D8Lib.DataSpec where
import Test.Hspec
import qualified Data.Map.Strict as M
import D8Lib.Data
-- `main` is here so that this module can be run from GHCi on its own. It is
-- not needed for automatic spec discovery.
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "initState" $ do
it "constructs an appropriate map" $ do
let s = initState sampleRules
s `shouldBe` M.fromList [("a", 0), ("b", 0), ("c", 0)]
describe "testCnd" $ do
it "test a condition against the map" $ do
let s = initState sampleRules
let c = Cmp "a" Gt 1
testCnd c s `shouldBe` False
describe "apply" $ do
it "test a condition against the map" $ do
let s = initState sampleRules
let s' = M.insert "a" 3 s
let s'' = apply s' (head sampleRules)
M.lookup "a" s'' `shouldBe` Just 3
M.lookup "b" s'' `shouldBe` Just 5
describe "sample full run" $ do
it "sets appropraite states" $ do
let s0 = initState sampleRules
s0 `shouldBe` M.fromList [("a", 0), ("b", 0), ("c", 0)]
let s1 = apply s0 (sampleRules !! 0)
s1 `shouldBe` M.fromList [("a", 0), ("b", 0), ("c", 0)]
let r1@(Rule r1n r1a r1c) = sampleRules !! 1
testCnd r1c s1 `shouldBe` True
let s2 = apply s1 r1
s2 `shouldBe` M.fromList [("a", 1), ("b", 0), ("c", 0)]
let s3 = apply s2 (sampleRules !! 2)
s3 `shouldBe` M.fromList [("a", 1), ("b", 0), ("c", 10)]
let s4 = apply s3 (sampleRules !! 3)
s4 `shouldBe` M.fromList [("a", 1), ("b", 0), ("c", (-10))]
sampleRules =
[ Rule "b" (Inc 5) (Cmp "a" Gt 1)
, Rule "a" (Inc 1) (Cmp "b" Lt 5)
, Rule "c" (Dec (-10)) (Cmp "a" Gte 1)
, Rule "c" (Inc (-20)) (Cmp "c" Eq 10) ]
| wfleming/advent-of-code-2016 | 2017/D8/test/D8Lib/DataSpec.hs | bsd-3-clause | 1,734 | 0 | 18 | 472 | 777 | 405 | 372 | 43 | 1 |
{-# LANGUAGE DataKinds, FlexibleContexts, FlexibleInstances, GADTs #-}
module UI.Drawing
( Shape(..)
, Colour(..)
, DrawingF(..)
, Drawing
, Rendering
, RenderingF
, text
, clip
, drawingRectAlgebra
, renderingRectAlgebra
, drawingCoalgebra
, renderingCoalgebra
, renderingRects
, module Layout
) where
import Control.Comonad.Trans.Cofree
import Control.Monad.Free.Freer as Freer
import Control.Monad.Trans.Free.Freer as FreerF
import Data.Functor.Algebraic
import Data.Functor.Classes
import Data.Functor.Foldable hiding (Nil)
import Data.Functor.Union
import Data.Maybe (catMaybes, fromMaybe)
import qualified Linear.V2 as Linear
import UI.Layout as Layout
import UI.Font
import UI.Geometry
data Shape a = Rectangle (Linear.V2 a) (Linear.V2 a)
data Colour a = RGBA !a !a !a !a
data DrawingF a f where
Text :: Size (Maybe a) -> String -> DrawingF a (Size a)
Clip :: Size a -> f -> DrawingF a f
type Drawing a = Freer (DrawingF a)
type Rendering a = Freer (RenderingF a)
type RenderingF a = Union '[DrawingF a, LayoutF a]
text :: InUnion fs (DrawingF a) => Size (Maybe a) -> String -> Freer (Union fs) (Size a)
text maxSize str = inj (Text maxSize str) `Freer.Then` return
clip :: InUnion fs (DrawingF a) => Size a -> Freer (Union fs) b -> Freer (Union fs) b
clip size drawing = wrapU (Clip size drawing)
drawingRectAlgebra :: Real a => Algebra (Fitting (DrawingF a) a) (Maybe (Rect a))
drawingRectAlgebra (FittingState _ origin _ :< r) = Rect origin <$> case r of
FreerF.Return size -> Just size
drawing `FreerF.Then` runF -> case drawing of
Text maxSize s -> size <$> runF (measureText (width maxSize) s)
Clip size _ -> Just size
renderingRectAlgebra :: Real a => Algebra (Fitting (RenderingF a) a) (Maybe (Rect a))
renderingRectAlgebra (a@(FittingState _ origin _) :< r) = case r of
FreerF.Return size -> Just (Rect origin size)
union `FreerF.Then` continue -> caseU union
$ (\ d -> drawingRectAlgebra (a :< (d `FreerF.Then` continue)))
:. (\ l -> layoutAlgebra (aΒ :< (l `FreerF.Then` continue)))
:. Nil
drawingCoalgebra :: Coalgebra (Fitting (DrawingF a) a) (Fitting (DrawingF a) a (Drawing a (Size a)))
drawingCoalgebra = liftBidiCoalgebra drawingFCoalgebra
drawingFCoalgebra :: CoalgebraFragment (DrawingF a) (FittingState a) (Size a)
drawingFCoalgebra state run = flip FreerF.Then (run state)
renderingCoalgebra :: Real a => Coalgebra (Fitting (RenderingF a) a) (Fitting (RenderingF a) a (Rendering a (Size a)))
renderingCoalgebra = liftBidiCoalgebra (\ state run union -> caseU union
$ (\ d -> hoistFreerF inj (drawingFCoalgebra state run d))
:. (\ l -> hoistFreerF inj (layoutFCoalgebra state run l))
:. Nil)
renderingRects :: Real a => Rendering a (Size a) -> [Rect a]
renderingRects = hylo (wrapAlgebra catMaybes (fmap Just) (collect renderingRectAlgebra)) renderingCoalgebra . (FittingState Full (pure 0) (pure Nothing) :<) . project
-- Instances
instance (Real a, Show a) => Show1 (DrawingF a) where
liftShowsPrec sp _ d drawing = case drawing of
Text size string -> showsBinaryWith showsPrec showsPrec "Text" d size string . showChar ' ' . sp d (fromMaybe <$> measureText (width size) string <*> size)
Clip size f -> showsBinaryWith showsPrec sp "Clip" d size f
instance (Real a, Show a, Show b) => Show (DrawingF a b) where
showsPrec = liftShowsPrec showsPrec showList
instance Real a => Foldable (DrawingF a) where
foldMap f drawing = case drawing of
Text (Size w _) s -> f (measureText w s)
Clip _ child -> f child
instance Eq2 DrawingF where
liftEq2 eqA eqF d1 d2 = case (d1, d2) of
(Text m1 s1, Text m2 s2) -> liftEq (liftEq eqA) m1 m2 && s1 == s2
(Clip s1 c1, Clip s2 c2) -> liftEq eqA s1 s2 && eqF c1 c2
_ -> False
instance Eq a => Eq1 (DrawingF a) where
liftEq = liftEq2 (==)
instance (Eq a, Eq f) => Eq (DrawingF a f) where
(==) = liftEq (==)
| robrix/ui-effects | src/UI/Drawing.hs | bsd-3-clause | 3,869 | 2 | 17 | 717 | 1,624 | 844 | 780 | 91 | 3 |
-- | Heist 0.5.x
module Heist.Future
( viewWith
, viewWithTemplates
, viewWithText
, mapSplices
) where
import Control.Arrow (second)
import Control.Monad (liftM)
import Data.Text (Text)
import Text.Templating.Heist
import qualified Text.XmlHtml as X
------------------------------------------------------------------------------
-- | Runs the parameter node's children and returns the resulting node list.
-- By itself this function is a simple passthrough splice that makes the
-- spliced node disappear. In combination with locally bound splices, this
-- function makes it easier to pass the desired view into your splices.
runChildren :: Monad m => Splice m
runChildren = runNodeList . X.childNodes =<< getParamNode
------------------------------------------------------------------------------
-- | Binds a list of splices before using the children of the spliced node as
-- a view.
viewWith :: (Monad m)
=> [(Text, Splice m)]
-- ^ List of splices to bind before running the param nodes.
-> Splice m
-- ^ Returns the passed in view.
viewWith splices = localTS (bindSplices splices) runChildren
------------------------------------------------------------------------------
-- | Wrapper around viewWith that applies a transformation function to the
-- second item in each of the tuples before calling viewWith.
viewTrans :: (Monad m)
=> (b -> Splice m)
-- ^ Splice generating function
-> [(Text, b)]
-- ^ List of tuples to be bound
-> Splice m
viewTrans f = viewWith . map (second f)
------------------------------------------------------------------------------
-- | Like viewWith but using constant templates rather than dynamic splices.
viewWithTemplates :: (Monad m) => [(Text, Template)] -> Splice m
viewWithTemplates = viewTrans return
------------------------------------------------------------------------------
-- | Like viewWith but using literal text rather than dynamic splices.
viewWithText :: (Monad m) => [(Text, Text)] -> Splice m
viewWithText = viewTrans (return . (:[]) . X.TextNode)
------------------------------------------------------------------------------
-- | Maps a splice generating function over a list and concatenates the
-- results.
mapSplices :: (Monad m)
=> (a -> Splice m)
-- ^ Function applied to each element of the list to generate splices
-> [a]
-- ^ List of items to generate splices for
-> Splice m
-- ^ The result of all splices concatenated together.
mapSplices f vs = liftM concat $ mapM f vs
------------------------------------------------------------------------------
localTS :: Monad m
=> (TemplateState m -> TemplateState m)
-> TemplateMonad m a
-> TemplateMonad m a
localTS f k = do
ts <- getTS
putTS $ f ts
res <- k
restoreTS ts
return res
| jystic/mothership | src/Heist/Future.hs | bsd-3-clause | 2,951 | 0 | 10 | 635 | 482 | 266 | 216 | 40 | 1 |
module Test.Interval where
import Data.Interval
import Control.Monad
import Test.SmallCheck
import Test.SmallCheck.Series
instance Monad m => Serial m Interval where
series = cons2 f
where f a (Positive l) = Interval a (a + l)
subintervals :: Monad m => Interval -> Series m Interval
subintervals (Interval a b) = do
a' <- msum [return x | x <- [a .. b - 1]]
b' <- msum [return x | x <- [a' + 1 .. b]]
return (Interval a' b')
| ian-mi/interval-set | tests/Test/Interval.hs | bsd-3-clause | 454 | 0 | 13 | 108 | 201 | 102 | 99 | -1 | -1 |
-- https://www.hackerrank.com/challenges/plus-minus/problem
import Control.Exception.Base(assert)
import Text.Printf(printf)
readInputList :: IO([Int])
readInputList = do
listSize <- readLn :: IO Int
line <- getLine
let inputList = map (read :: String -> Int) $ words line
assert (listSize == length inputList) (return inputList)
getNumbers :: [Int] -> [Int]
getNumbers inputList =
let positives = filter (>0) inputList
zeros = filter (==0) inputList
negatives = filter (<0) inputList
in map length $ positives:negatives:zeros:[]
ratio :: Int -> Int -> Double
ratio listSize number = fromIntegral(number) / fromIntegral(listSize)
main = do
inputList <- readInputList
let numbers = getNumbers(inputList)
let results = map (ratio (length inputList)) numbers
mapM_ (printf "%.6f\n") results
| julianespinel/trainning | hackerrank/PlusMinus.hs | mit | 825 | 0 | 14 | 142 | 313 | 159 | 154 | 21 | 1 |
--
--
--
-----------------
-- Exercise 6.20.
-----------------
--
--
--
module E'6'20 where
import E'6''8 ( rotate90 )
import Pictures
(
Picture
, invert
, rotate
, invertColour
, horse
)
-- Transform a picture first (using (map reverse (rotate90 picture)) or sth. similar).
-- Then use the functions from Picture.hs on the result and it will work.
-- Another "working solution":
rowsToColumns :: Picture -> Picture
rowsToColumns picture
= map reverse (rotate90 picture)
horse'C :: Picture
horse'C
= rowsToColumns horse
-- flipV --
flipV'C :: Picture -> Picture
flipV'C
= reverse
-----------
-- flipH --
flipH'C :: Picture -> Picture
flipH'C
= map reverse
-----------
-- rotate --
rotate'C :: Picture -> Picture
rotate'C
= flipH'C . flipV'C
------------
-- invertColour --
invertColour'C :: Picture -> Picture
invertColour'C picture'C
= invertColour picture'C
invert'C :: Char -> Char
invert'C char
= invert char
------------------
-- height --
height'C :: Picture -> Int
height'C [] = 0
height'C picture'C = (length . head) picture'C
------------
-- width --
width'C :: Picture -> Int
width'C
= length
-----------
-- above --
above'C :: Picture -> Picture -> Picture
above'C top bottom
| widthTop < widthBottom = zipWith (++) paddedTop bottom
| widthBottom < widthTop = zipWith (++) top paddedBottom
| otherwise = zipWith (++) top bottom
where
widthTop :: Int
widthTop = width'C top
widthBottom :: Int
widthBottom = width'C bottom
padsTop :: [[Char]]
padsTop = replicate (widthBottom - widthTop) (replicate (height'C top) '.')
paddedTop :: [[Char]]
paddedTop = top ++ padsTop
padsBottom :: [[Char]]
padsBottom = replicate (widthTop - widthBottom) (replicate (height'C bottom) '.')
paddedBottom :: [[Char]]
paddedBottom = bottom ++ padsBottom
-----------
-- beside --
beside'C :: Picture -> Picture -> Picture
beside'C left right
| heightLeft < heightRight = paddedLeft ++ right
| heightRight < heightLeft = left ++ paddedRight
| otherwise = left ++ right
where
heightLeft :: Int
heightLeft = height'C left
heightRight :: Int
heightRight = height'C right
padsLeft :: [[Char]]
padsLeft = replicate (width'C left) (replicate (heightRight - heightLeft) '.')
paddedLeft :: [[Char]]
paddedLeft = (zipWith (++) left padsLeft)
padsRight :: [[Char]]
padsRight = replicate (width'C right) (replicate (heightLeft - heightRight) '.')
paddedRight :: [[Char]]
paddedRight = (zipWith (++) right padsRight)
------------
| pascal-knodel/haskell-craft | _/links/E'6'20.hs | mit | 2,651 | 0 | 11 | 614 | 710 | 397 | 313 | 70 | 1 |
--
--
--
-----------------
-- Exercise 11.7.
-----------------
--
--
--
module E'11''7 where
isNonWhitespace
-- Lambda abstraction:
= \character -> not $ character `elem` whitespace
-- ...
where
whitespace :: [Char]
whitespace = " \t\n"
{- GHCi>
isNonWhitespace 'a'
isNonWhitespace ' '
-}
-- True
-- False
| pascal-knodel/haskell-craft | _/links/E'11''7.hs | mit | 339 | 0 | 7 | 81 | 53 | 38 | 15 | 5 | 1 |
{-|
Module abstracting the peer map implementation.
This is abstracted separately since the speed of peermap updates can
be a significant part of the total runtime, and as such changing the
implementation should be easy in case it's needed.
-}
{-
Copyright (C) 2009 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.HTools.PeerMap
( PeerMap
, Key
, Elem
, empty
, accumArray
, Ganeti.HTools.PeerMap.find
, add
, remove
, maxElem
) where
import Data.Maybe (fromMaybe)
import Data.List
import Data.Ord (comparing)
import Ganeti.HTools.Types
type Key = Ndx
type Elem = Int
type PeerMap = [(Key, Elem)]
-- * Initialization functions
-- | Create a new empty map.
empty :: PeerMap
empty = []
-- | Our reverse-compare function.
pmCompare :: (Key, Elem) -> (Key, Elem) -> Ordering
pmCompare a b = comparing snd b a
-- | Add or update (via a custom function) an element.
addWith :: (Elem -> Elem -> Elem) -> Key -> Elem -> PeerMap -> PeerMap
addWith fn k v lst =
case lookup k lst of
Nothing -> insertBy pmCompare (k, v) lst
Just o -> insertBy pmCompare (k, fn o v) (remove k lst)
-- | Create a PeerMap from an association list, with possible duplicates
accumArray :: (Elem -> Elem -> Elem) -- ^ function used to merge the elements
-> [(Key, Elem)] -- ^ source data
-> PeerMap -- ^ results
accumArray _ [] = empty
accumArray fn ((k, v):xs) = addWith fn k v $ accumArray fn xs
-- * Basic operations
-- | Returns either the value for a key or zero if not found
find :: Key -> PeerMap -> Elem
find k = fromMaybe 0 . lookup k
-- | Add an element to a peermap, overwriting the previous value
add :: Key -> Elem -> PeerMap -> PeerMap
add = addWith (flip const)
-- | Remove an element from a peermap
remove :: Key -> PeerMap -> PeerMap
remove _ [] = []
remove k ((x@(x', _)):xs) = if k == x'
then xs
else x:remove k xs
-- | Find the maximum element.
--
-- Since this is a sorted list, we just get the value at the head of
-- the list, or zero for a null list
maxElem :: PeerMap -> Elem
maxElem (x:_) = snd x
maxElem _ = 0
| ekohl/ganeti | htools/Ganeti/HTools/PeerMap.hs | gpl-2.0 | 2,847 | 0 | 10 | 681 | 537 | 303 | 234 | 43 | 2 |
{- |
Module : PredefinedSign.hs
Description : with inlined axioms
Copyright : (c) Uni and DFKI Bremen 2005-2007
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
-}
module CASL_DL.PredefinedCASLAxioms
( predefSign
, predefinedSign
, predefSign2
, datatypeSigns
, thing
, nothing
, conceptPred
, dataPred
, dataS
, predefinedAxioms
, mkNName
, mkDigit
, joinDigits
, negateInt
, integer
, float
, negateFloat
, posInt
, nonPosInt
, decimal
, double
, upcast
, mkDecimal
, mkFloat
, consChar
, emptyStringTerm
, trueT
, falseT
, nonNegInt
, negIntS
, stringS
) where
import CASL.AS_Basic_CASL
import CASL.Sign
import OWL2.Keywords
import Common.AS_Annotation
import Common.Id
import Common.GlobalAnnotations
import qualified Common.Lib.Rel as Rel
import qualified Common.Lib.MapSet as MapSet
import qualified Data.Map as Map
import Data.Char
hetsPrefix :: String
hetsPrefix = ""
-- | OWL topsort Thing
thing :: SORT
thing = stringToId thingS
n :: Range
n = nullRange
nothing :: SORT
nothing = stringToId nothingS
-- | OWL Data topSort DATA
dataS :: SORT
dataS = stringToId dATAS
integer :: SORT
integer = stringToId integerS
float :: SORT
float = stringToId floatS
decimal :: SORT
decimal = stringToId decimalS
double :: SORT
double = stringToId doubleS
posInt :: SORT
posInt = stringToId positiveIntegerS
negIntS :: SORT
negIntS = stringToId negativeIntegerS
nonPosInt :: SORT
nonPosInt = stringToId nonPositiveIntegerS
nonNegInt :: SORT
nonNegInt = stringToId nonNegativeIntegerS
classPredType :: PRED_TYPE
classPredType = Pred_type [thing] n
conceptPred :: PredType
conceptPred = toPredType classPredType
dataPred :: PredType
dataPred = PredType [dataS, dataS]
boolS :: SORT
boolS = stringToId "boolean"
boolT :: OpType
boolT = mkTotOpType [] boolS
trueS :: Id
trueS = stringToId "True"
falseS :: Id
falseS = stringToId "False"
mkConst :: Id -> OpType -> TERM ()
mkConst i o = mkAppl (mkQualOp i $ toOP_TYPE o) []
trueT :: TERM ()
trueT = mkConst trueS boolT
falseT :: TERM ()
falseT = mkConst falseS boolT
natT :: OpType
natT = mkTotOpType [] nonNegInt
-- | create a term of type nonNegativeInteger
mkDigit :: Int -> TERM ()
mkDigit i = mkConst (stringToId $ show i) natT
unMinus :: Id
unMinus = mkId [mkSimpleId "-", placeTok]
minusTy :: OpType
minusTy = mkTotOpType [integer] integer
minusFloat :: OpType
minusFloat = mkTotOpType [float] float
negateTy :: OpType -> TERM () -> TERM ()
negateTy o t = mkAppl (mkQualOp unMinus $ toOP_TYPE o) [t]
-- | negate a term of type integer
negateInt :: TERM () -> TERM ()
negateInt = negateTy minusTy
-- | negate a term of type float
negateFloat :: TERM () -> TERM ()
negateFloat = negateTy minusFloat
atAt :: Id
atAt = mkInfix "@@"
atAtTy :: OpType
atAtTy = mkTotOpType [nonNegInt, nonNegInt] nonNegInt
mkBinOp :: Id -> OpType -> TERM () -> TERM () -> TERM ()
mkBinOp i o t1 t2 = mkAppl (mkQualOp i $ toOP_TYPE o) [t1, t2]
-- | join two terms of type nonNegativeInteger
joinDigits :: TERM () -> TERM () -> TERM ()
joinDigits = mkBinOp atAt atAtTy
dec :: Id
dec = mkInfix ":::"
decTy :: OpType
decTy = mkTotOpType [nonNegInt, nonNegInt] float
{- | create the float given by two non-negative integers separated by the
decimal point -}
mkDecimal :: TERM () -> TERM () -> TERM ()
mkDecimal = mkBinOp dec decTy
eId :: Id
eId = mkInfix "E"
expTy :: OpType
expTy = mkTotOpType [float, integer] float
-- | construct the E float, where the second argument is of type integer
mkFloat :: TERM () -> TERM () -> TERM ()
mkFloat = mkBinOp eId expTy
-- | upcast a term to a matching sort
upcast :: TERM () -> SORT -> TERM ()
upcast t ty = Sorted_term t ty nullRange
charS :: Id
charS = stringToId "Char"
charT :: OpType
charT = mkTotOpType [] charS
stringS :: Id
stringS = stringToId "string"
cons :: Id
cons = mkInfix ":@:"
emptyString :: Id
emptyString = stringToId "emptyString"
emptyStringTerm :: TERM ()
emptyStringTerm = mkAppl (mkQualOp emptyString $ toOP_TYPE emptyStringTy) []
charToId :: Char -> Id
charToId c = let s = show (ord c) in
stringToId $ "'\\" ++ replicate (3 - length s) '0' ++ show (ord c) ++ "'"
mkChar :: Char -> TERM ()
mkChar c = mkAppl (mkQualOp (charToId c) $ toOP_TYPE charT) []
consChar :: Char -> TERM () -> TERM ()
consChar c t = mkAppl (mkQualOp cons $ toOP_TYPE consTy) [mkChar c, t]
emptyStringTy :: OpType
emptyStringTy = mkTotOpType [] stringS
consTy :: OpType
consTy = mkTotOpType [charS, stringS] stringS
-- | OWL bottom
noThing :: PRED_SYMB
noThing = Qual_pred_name nothing classPredType n
intTypes :: [PredType]
intTypes = map (\ t -> PredType [t]) [integer, nonNegInt]
predefinedSign2 :: e -> Sign f e
predefinedSign2 e = (emptySign e) {
sortRel = Rel.insertKey thing $ Rel.insertKey dataS Rel.empty
}
predefSign2 :: CASLSign
predefSign2 = predefinedSign2 ()
-- | instead of one big signature, several small ones
charSign :: CASLSign
charSign = (emptySign ())
{ sortRel = Rel.insertKey (stringToId "Char") Rel.empty
, opMap = MapSet.fromList
$ map (\ c -> (charToId c, [charT]))
[chr 0 .. chr 127]
}
integerSign :: CASLSign
integerSign = (emptySign ())
{ sortRel =
Rel.transClosure $ Rel.fromList
[(negIntS, nonPosInt),
(nonNegInt, integer),
(nonPosInt, integer),
(posInt, nonNegInt),
(integer, dataS)]
, predMap =
MapSet.fromList
$
map ( \ o -> (stringToId o, intTypes))
["even", "odd"]
, opMap = MapSet.fromList
$ map (\ i -> (stringToId $ show i, [natT]))
[0 .. 9 :: Int]
++
[
(atAt, [atAtTy])
]
, globAnnos = emptyGlobalAnnos
{ literal_annos = emptyLiteralAnnos
{ number_lit = Just atAt
}}
}
predefinedSign :: e -> Sign f e
predefinedSign e = (emptySign e)
{ sortRel = Rel.insertKey (stringToId "Char")
$ Rel.insertKey thing
$ Rel.transClosure $ Rel.fromList
[(boolS, dataS),
(integer, float),
(float, dataS),
(negIntS, nonPosInt),
(nonNegInt, integer),
(nonPosInt, integer),
(posInt, nonNegInt),
(stringS, dataS) ]
, predMap =
MapSet.fromList
$ (nothing, [conceptPred])
: map ((\ o -> (mkInfix o, [dataPred])) .
showFacet) facetList
++ map ( \ o -> (stringToId o, intTypes))
["even", "odd"]
, opMap = MapSet.fromList
$ map (\ i -> (stringToId $ show i, [natT]))
[0 .. 9 :: Int]
++ map (\ c -> (charToId c, [charT]))
[chr 0 .. chr 127]
++
[ (trueS, [boolT])
, (falseS, [boolT])
, (atAt, [atAtTy])
, (unMinus, [minusTy, minusFloat])
, (dec, [decTy])
, (eId, [expTy])
, (cons, [consTy])
, (emptyString, [emptyStringTy])
]
, globAnnos = emptyGlobalAnnos
{ literal_annos = emptyLiteralAnnos
{ number_lit = Just atAt
, float_lit = Just (dec, eId)
, string_lit = Just (emptyString, cons) }}
}
floatSign :: CASLSign
floatSign = integerSign
{ sortRel = Rel.union (sortRel integerSign)
$ Rel.transClosure $ Rel.fromList
[
(integer, float),
(float, dataS)
]
, opMap = MapSet.union (opMap integerSign) $ MapSet.fromList
$
[
(unMinus, [minusTy, minusFloat])
, (dec, [decTy])
, (eId, [expTy])
]
, globAnnos = (globAnnos integerSign)
{ literal_annos = (literal_annos $ globAnnos integerSign)
{ float_lit = Just (dec, eId)
}}
}
boolSign :: CASLSign
boolSign = (emptySign ())
{ sortRel = Rel.transClosure $ Rel.fromList
[(boolS, dataS)]
, opMap = MapSet.fromList
$
[ (trueS, [boolT])
, (falseS, [boolT])
]
}
stringSignAux :: CASLSign
stringSignAux = (emptySign ())
{ sortRel =
Rel.transClosure $ Rel.fromList
[ (stringS, dataS) ]
, opMap = MapSet.fromList
$
[
(emptyString, [emptyStringTy])
, (cons, [consTy])
]
, globAnnos = emptyGlobalAnnos
{ literal_annos = emptyLiteralAnnos
{
string_lit = Just (emptyString, cons) }}
}
stringSign :: CASLSign
stringSign = uniteCASLSign stringSignAux charSign
datatypeSigns :: Map.Map SORT CASLSign
datatypeSigns = Map.fromList
[ (charS, charSign)
, (integer, integerSign)
, (float, floatSign)
, (boolS, boolSign)
, (stringS, stringSign)]
predefSign :: CASLSign
predefSign = predefinedSign ()
predefinedAxioms :: [Named (FORMULA ())]
predefinedAxioms = let
v1 = mkVarDecl (mkNName 1) thing
t1 = toQualVar v1
in [makeNamed "nothing in Nothing" $ mkForall [v1] $ Negation
(Predication noThing [t1] n) n,
makeNamed "thing in Thing" $ mkForall [v1] $ Predication
(Qual_pred_name thing classPredType n) [t1] n]
mkNNameAux :: Int -> String
mkNNameAux k = genNamePrefix ++ "x" ++ show k
-- | Build a name
mkNName :: Int -> Token
mkNName i = mkSimpleId $ hetsPrefix ++ mkNNameAux i
| spechub/Hets | CASL_DL/PredefinedCASLAxioms.hs | gpl-2.0 | 10,890 | 0 | 15 | 3,980 | 2,972 | 1,650 | 1,322 | 286 | 1 |
----------------------------------------------------------------------------
-- |
-- Module : XMonad.Actions.WindowMenu
-- Copyright : (c) Jan Vornberger 2009
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : [email protected]
-- Stability : unstable
-- Portability : not portable
--
-- Uses "XMonad.Actions.GridSelect" to display a number of actions related to
-- window management in the center of the focused window. Actions include: Closing,
-- maximizing, minimizing and shifting the window to another workspace.
--
-- Note: For maximizing and minimizing to actually work, you will need
-- to integrate "XMonad.Layout.Maximize" and "XMonad.Layout.Minimize" into your
-- setup. See the documentation of those modules for more information.
--
-----------------------------------------------------------------------------
module XMonad.Actions.WindowMenu (
-- * Usage
-- $usage
windowMenu
) where
import XMonad
import qualified XMonad.StackSet as W
import XMonad.Actions.GridSelect
import XMonad.Layout.Maximize
import XMonad.Actions.Minimize
import XMonad.Util.XUtils (fi)
-- $usage
--
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Actions.WindowMenu
--
-- Then add a keybinding, e.g.
--
-- > , ((modm, xK_o ), windowMenu)
colorizer :: a -> Bool -> X (String, String)
colorizer _ isFg = do
fBC <- asks (focusedBorderColor . config)
nBC <- asks (normalBorderColor . config)
return $ if isFg
then (fBC, nBC)
else (nBC, fBC)
windowMenu :: X ()
windowMenu = withFocused $ \w -> do
tags <- asks (workspaces . config)
Rectangle x y wh ht <- getSize w
Rectangle sx sy swh sht <- gets $ screenRect . W.screenDetail . W.current . windowset
let originFractX = (fi x - fi sx + fi wh / 2) / fi swh
originFractY = (fi y - fi sy + fi ht / 2) / fi sht
gsConfig = (buildDefaultGSConfig colorizer)
{ gs_originFractX = originFractX
, gs_originFractY = originFractY }
actions = [ ("Cancel menu", return ())
, ("Close" , kill)
, ("Maximize" , sendMessage $ maximizeRestore w)
, ("Minimize" , minimizeWindow w)
] ++
[ ("Move to " ++ tag, windows $ W.shift tag)
| tag <- tags ]
runSelectedAction gsConfig actions
getSize :: Window -> X (Rectangle)
getSize w = do
d <- asks display
wa <- io $ getWindowAttributes d w
let x = fi $ wa_x wa
y = fi $ wa_y wa
wh = fi $ wa_width wa
ht = fi $ wa_height wa
return (Rectangle x y wh ht)
| f1u77y/xmonad-contrib | XMonad/Actions/WindowMenu.hs | bsd-3-clause | 2,834 | 0 | 17 | 818 | 589 | 320 | 269 | 42 | 2 |
{-# LANGUAGE ScopedTypeVariables #-}
module Differential where
import Control.Applicative
import Data.Dynamic
import Data.Ratio
import Expr
import Transformation
import Tensor
-- A symbolic partial differentiation.
partial :: forall a. (Typeable a) => Expr Axis -> Expr (Pt -> a) -> Expr (Pt -> a)
partial i f = (Reserved Partial :: Expr (Axis -> (Pt->a)->(Pt->a))) :$ i :$ f
Σ :: forall a. (Typeable a) => Expr Axis -> Expr (Pt -> a) -> Expr (Pt -> a)
Σ = partial
-- A 4-th order implementation of partial differentiation.
partial4 :: (Typeable a, Fractional a) => Axis -> Expr (Pt->a) -> (Expr Pt-> Expr a)
partial4 i f p = (fromRational $ 9%8)*((f :$ (p + 0.5 * e(i))) - (f :$ (p - 0.5 * e(i))))
- (fromRational $ 1%24)*((f :$ (p + 1.5 * e(i))) - (f :$ (p - 1.5 * e(i))))
where
e :: Axis -> Expr Pt
e i = Static ("\\mathbf{e}_"++show i) (\j -> if i==j then 1 else 0)
usePartial4 :: (Typeable a, Fractional a) => Expr a -> Expr a
usePartial4 x = case x of
(Reserved Partial :$ i :$ f :$ r) -> case partial4 <$> (runStatic i >>= cast) <*> cast f <*> cast r of
Just y -> y
_ -> x
_ -> x
| nushio3/Paraiso | attic/newexp/Differential.hs | bsd-3-clause | 1,147 | 0 | 15 | 274 | 574 | 303 | 271 | 23 | 3 |
{-# LANGUAGE CPP, OverloadedStrings #-}
{-# OPTIONS_GHC -Wall -fwarn-tabs #-}
module Language.Hakaru.Parser.Import (expandImports) where
import Language.Hakaru.Parser.AST
import Language.Hakaru.Parser.Parser (parseHakaruWithImports)
import Control.Monad.Trans.Except
import Control.Monad.IO.Class
import qualified Data.Text as T
import qualified Data.Text.IO as IO
import Text.Parsec
replaceBody :: AST' T.Text -> AST' T.Text -> AST' T.Text
replaceBody e1 e2 =
case e1 of
Let x e3 e4 -> Let x e3 (replaceBody e4 e2)
Ann e3 t -> Ann (replaceBody e3 e2) t
WithMeta e3 s -> WithMeta (replaceBody e3 e2) s
_ -> e2
expandImports
:: Maybe FilePath
-> ASTWithImport' T.Text
-> ExceptT ParseError IO (AST' T.Text)
expandImports dir (ASTWithImport' (Import i:is) ast) = do
file <- liftIO . IO.readFile . T.unpack $
T.concat $ maybe [] ((:["/"]) . T.pack) dir ++ [ i, ".hk" ]
astIm <- ExceptT . return $ parseHakaruWithImports file
ast' <- expandImports dir astIm
expandImports dir (ASTWithImport' is (replaceBody ast' ast))
expandImports _ (ASTWithImport' [] ast) = return ast
| zachsully/hakaru | haskell/Language/Hakaru/Parser/Import.hs | bsd-3-clause | 1,270 | 0 | 14 | 356 | 399 | 208 | 191 | 28 | 4 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-2012
Note [Unarisation]
~~~~~~~~~~~~~~~~~~
The idea of this pass is to translate away *all* unboxed-tuple and unboxed-sum
binders. So for example:
f (x :: (# Int, Bool #)) = f x + f (# 1, True #)
==>
f (x1 :: Int) (x2 :: Bool) = f x1 x2 + f 1 True
It is important that we do this at the STG level and NOT at the Core level
because it would be very hard to make this pass Core-type-preserving. In this
example the type of 'f' changes, for example.
STG fed to the code generators *must* be unarised because the code generators do
not support unboxed tuple and unboxed sum binders natively.
In more detail: (see next note for unboxed sums)
Suppose that a variable x : (# t1, t2 #).
* At the binding site for x, make up fresh vars x1:t1, x2:t2
* Extend the UnariseEnv x :-> MultiVal [x1,x2]
* Replace the binding with a curried binding for x1,x2
Lambda: \x.e ==> \x1 x2. e
Case alt: MkT a b x c d -> e ==> MkT a b x1 x2 c d -> e
* Replace argument occurrences with a sequence of args via a lookup in
UnariseEnv
f a b x c d ==> f a b x1 x2 c d
* Replace tail-call occurrences with an unboxed tuple via a lookup in
UnariseEnv
x ==> (# x1, x2 #)
So, for example
f x = x ==> f x1 x2 = (# x1, x2 #)
* We /always/ eliminate a case expression when
- It scrutinises an unboxed tuple or unboxed sum
- The scrutinee is a variable (or when it is an explicit tuple, but the
simplifier eliminates those)
The case alternative (there can be only one) can be one of these two
things:
- An unboxed tuple pattern. e.g.
case v of x { (# x1, x2, x3 #) -> ... }
Scrutinee has to be in form `(# t1, t2, t3 #)` so we just extend the
environment with
x :-> MultiVal [t1,t2,t3]
x1 :-> UnaryVal t1, x2 :-> UnaryVal t2, x3 :-> UnaryVal t3
- A DEFAULT alternative. Just the same, without the bindings for x1,x2,x3
By the end of this pass, we only have unboxed tuples in return positions.
Unboxed sums are completely eliminated, see next note.
Note [Translating unboxed sums to unboxed tuples]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Unarise also eliminates unboxed sum binders, and translates unboxed sums in
return positions to unboxed tuples. We want to overlap fields of a sum when
translating it to a tuple to have efficient memory layout. When translating a
sum pattern to a tuple pattern, we need to translate it so that binders of sum
alternatives will be mapped to right arguments after the term translation. So
translation of sum DataCon applications to tuple DataCon applications and
translation of sum patterns to tuple patterns need to be in sync.
These translations work like this. Suppose we have
(# x1 | | ... #) :: (# t1 | t2 | ... #)
remember that t1, t2 ... can be sums and tuples too. So we first generate
layouts of those. Then we "merge" layouts of each alternative, which gives us a
sum layout with best overlapping possible.
Layout of a flat type 'ty1' is just [ty1].
Layout of a tuple is just concatenation of layouts of its fields.
For layout of a sum type,
- We first get layouts of all alternatives.
- We sort these layouts based on their "slot types".
- We merge all the alternatives.
For example, say we have (# (# Int#, Char #) | (# Int#, Int# #) | Int# #)
- Layouts of alternatives: [ [Word, Ptr], [Word, Word], [Word] ]
- Sorted: [ [Ptr, Word], [Word, Word], [Word] ]
- Merge all alternatives together: [ Ptr, Word, Word ]
We add a slot for the tag to the first position. So our tuple type is
(# Tag#, Any, Word#, Word# #)
(we use Any for pointer slots)
Now, any term of this sum type needs to generate a tuple of this type instead.
The translation works by simply putting arguments to first slots that they fit
in. Suppose we had
(# (# 42#, 'c' #) | | #)
42# fits in Word#, 'c' fits in Any, so we generate this application:
(# 1#, 'c', 42#, rubbish #)
Another example using the same type: (# | (# 2#, 3# #) | #). 2# fits in Word#,
3# fits in Word #, so we get:
(# 2#, rubbish, 2#, 3# #).
Note [Types in StgConApp]
~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have this unboxed sum term:
(# 123 | #)
What will be the unboxed tuple representation? We can't tell without knowing the
type of this term. For example, these are all valid tuples for this:
(# 1#, 123 #) -- when type is (# Int | String #)
(# 1#, 123, rubbish #) -- when type is (# Int | Float# #)
(# 1#, 123, rubbish, rubbish #)
-- when type is (# Int | (# Int, Int, Int #) #)
So we pass type arguments of the DataCon's TyCon in StgConApp to decide what
layout to use. Note that unlifted values can't be let-bound, so we don't need
types in StgRhsCon.
Note [UnariseEnv can map to literals]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To avoid redundant case expressions when unarising unboxed sums, UnariseEnv
needs to map variables to literals too. Suppose we have this Core:
f (# x | #)
==> (CorePrep)
case (# x | #) of y {
_ -> f y
}
==> (MultiVal)
case (# 1#, x #) of [x1, x2] {
_ -> f x1 x2
}
To eliminate this case expression we need to map x1 to 1# in UnariseEnv:
x1 :-> UnaryVal 1#, x2 :-> UnaryVal x
so that `f x1 x2` becomes `f 1# x`.
Note [Unarisation and arity]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Because of unarisation, the arity that will be recorded in the generated info
table for an Id may be larger than the idArity. Instead we record what we call
the RepArity, which is the Arity taking into account any expanded arguments, and
corresponds to the number of (possibly-void) *registers* arguments will arrive
in.
Note [Post-unarisation invariants]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
STG programs after unarisation have these invariants:
* No unboxed sums at all.
* No unboxed tuple binders. Tuples only appear in return position.
* DataCon applications (StgRhsCon and StgConApp) don't have void arguments.
This means that it's safe to wrap `StgArg`s of DataCon applications with
`StgCmmEnv.NonVoid`, for example.
* Alt binders (binders in patterns) are always non-void.
-}
{-# LANGUAGE CPP, TupleSections #-}
module UnariseStg (unarise) where
#include "HsVersions.h"
import GhcPrelude
import BasicTypes
import CoreSyn
import DataCon
import FastString (FastString, mkFastString)
import Id
import Literal (Literal (..))
import MkCore (aBSENT_ERROR_ID)
import MkId (voidPrimId, voidArgId)
import MonadUtils (mapAccumLM)
import Outputable
import RepType
import StgSyn
import Type
import TysPrim (intPrimTy)
import TysWiredIn
import UniqSupply
import Util
import VarEnv
import Data.Bifunctor (second)
import Data.Maybe (mapMaybe)
import qualified Data.IntMap as IM
--------------------------------------------------------------------------------
-- | A mapping from binders to the Ids they were expanded/renamed to.
--
-- x :-> MultiVal [a,b,c] in rho
--
-- iff x's typePrimRep is not a singleton, or equivalently
-- x's type is an unboxed tuple, sum or void.
--
-- x :-> UnaryVal x'
--
-- iff x's RepType is UnaryRep or equivalently
-- x's type is not unboxed tuple, sum or void.
--
-- So
-- x :-> MultiVal [a] in rho
-- means x is represented by singleton tuple.
--
-- x :-> MultiVal [] in rho
-- means x is void.
--
-- INVARIANT: OutStgArgs in the range only have NvUnaryTypes
-- (i.e. no unboxed tuples, sums or voids)
--
type UnariseEnv = VarEnv UnariseVal
data UnariseVal
= MultiVal [OutStgArg] -- MultiVal to tuple. Can be empty list (void).
| UnaryVal OutStgArg -- See NOTE [Renaming during unarisation].
instance Outputable UnariseVal where
ppr (MultiVal args) = text "MultiVal" <+> ppr args
ppr (UnaryVal arg) = text "UnaryVal" <+> ppr arg
-- | Extend the environment, checking the UnariseEnv invariant.
extendRho :: UnariseEnv -> Id -> UnariseVal -> UnariseEnv
extendRho rho x (MultiVal args)
= ASSERT(all (isNvUnaryType . stgArgType) args)
extendVarEnv rho x (MultiVal args)
extendRho rho x (UnaryVal val)
= ASSERT(isNvUnaryType (stgArgType val))
extendVarEnv rho x (UnaryVal val)
--------------------------------------------------------------------------------
unarise :: UniqSupply -> [StgTopBinding] -> [StgTopBinding]
unarise us binds = initUs_ us (mapM (unariseTopBinding emptyVarEnv) binds)
unariseTopBinding :: UnariseEnv -> StgTopBinding -> UniqSM StgTopBinding
unariseTopBinding rho (StgTopLifted bind)
= StgTopLifted <$> unariseBinding rho bind
unariseTopBinding _ bind@StgTopStringLit{} = return bind
unariseBinding :: UnariseEnv -> StgBinding -> UniqSM StgBinding
unariseBinding rho (StgNonRec x rhs)
= StgNonRec x <$> unariseRhs rho rhs
unariseBinding rho (StgRec xrhss)
= StgRec <$> mapM (\(x, rhs) -> (x,) <$> unariseRhs rho rhs) xrhss
unariseRhs :: UnariseEnv -> StgRhs -> UniqSM StgRhs
unariseRhs rho (StgRhsClosure ccs b_info fvs update_flag args expr)
= do (rho', args1) <- unariseFunArgBinders rho args
expr' <- unariseExpr rho' expr
let fvs' = unariseFreeVars rho fvs
return (StgRhsClosure ccs b_info fvs' update_flag args1 expr')
unariseRhs rho (StgRhsCon ccs con args)
= ASSERT(not (isUnboxedTupleCon con || isUnboxedSumCon con))
return (StgRhsCon ccs con (unariseConArgs rho args))
--------------------------------------------------------------------------------
unariseExpr :: UnariseEnv -> StgExpr -> UniqSM StgExpr
unariseExpr rho e@(StgApp f [])
= case lookupVarEnv rho f of
Just (MultiVal args) -- Including empty tuples
-> return (mkTuple args)
Just (UnaryVal (StgVarArg f'))
-> return (StgApp f' [])
Just (UnaryVal (StgLitArg f'))
-> return (StgLit f')
Nothing
-> return e
unariseExpr rho e@(StgApp f args)
= return (StgApp f' (unariseFunArgs rho args))
where
f' = case lookupVarEnv rho f of
Just (UnaryVal (StgVarArg f')) -> f'
Nothing -> f
err -> pprPanic "unariseExpr - app2" (ppr e $$ ppr err)
-- Can't happen because 'args' is non-empty, and
-- a tuple or sum cannot be applied to anything
unariseExpr _ (StgLit l)
= return (StgLit l)
unariseExpr rho (StgConApp dc args ty_args)
| Just args' <- unariseMulti_maybe rho dc args ty_args
= return (mkTuple args')
| otherwise
, let args' = unariseConArgs rho args
= return (StgConApp dc args' (map stgArgType args'))
unariseExpr rho (StgOpApp op args ty)
= return (StgOpApp op (unariseFunArgs rho args) ty)
unariseExpr _ e@StgLam{}
= pprPanic "unariseExpr: found lambda" (ppr e)
unariseExpr rho (StgCase scrut bndr alt_ty alts)
-- a tuple/sum binders in the scrutinee can always be eliminated
| StgApp v [] <- scrut
, Just (MultiVal xs) <- lookupVarEnv rho v
= elimCase rho xs bndr alt_ty alts
-- Handle strict lets for tuples and sums:
-- case (# a,b #) of r -> rhs
-- and analogously for sums
| StgConApp dc args ty_args <- scrut
, Just args' <- unariseMulti_maybe rho dc args ty_args
= elimCase rho args' bndr alt_ty alts
-- general case
| otherwise
= do scrut' <- unariseExpr rho scrut
alts' <- unariseAlts rho alt_ty bndr alts
return (StgCase scrut' bndr alt_ty alts')
-- bndr will be dead after unarise
unariseExpr rho (StgLet bind e)
= StgLet <$> unariseBinding rho bind <*> unariseExpr rho e
unariseExpr rho (StgLetNoEscape bind e)
= StgLetNoEscape <$> unariseBinding rho bind <*> unariseExpr rho e
unariseExpr rho (StgTick tick e)
= StgTick tick <$> unariseExpr rho e
-- Doesn't return void args.
unariseMulti_maybe :: UnariseEnv -> DataCon -> [InStgArg] -> [Type] -> Maybe [OutStgArg]
unariseMulti_maybe rho dc args ty_args
| isUnboxedTupleCon dc
= Just (unariseConArgs rho args)
| isUnboxedSumCon dc
, let args1 = ASSERT(isSingleton args) (unariseConArgs rho args)
= Just (mkUbxSum dc ty_args args1)
| otherwise
= Nothing
--------------------------------------------------------------------------------
elimCase :: UnariseEnv
-> [OutStgArg] -- non-void args
-> InId -> AltType -> [InStgAlt] -> UniqSM OutStgExpr
elimCase rho args bndr (MultiValAlt _) [(_, bndrs, rhs)]
= do let rho1 = extendRho rho bndr (MultiVal args)
rho2
| isUnboxedTupleBndr bndr
= mapTupleIdBinders bndrs args rho1
| otherwise
= ASSERT(isUnboxedSumBndr bndr)
if null bndrs then rho1
else mapSumIdBinders bndrs args rho1
unariseExpr rho2 rhs
elimCase rho args bndr (MultiValAlt _) alts
| isUnboxedSumBndr bndr
= do let (tag_arg : real_args) = args
tag_bndr <- mkId (mkFastString "tag") tagTy
-- this won't be used but we need a binder anyway
let rho1 = extendRho rho bndr (MultiVal args)
scrut' = case tag_arg of
StgVarArg v -> StgApp v []
StgLitArg l -> StgLit l
alts' <- unariseSumAlts rho1 real_args alts
return (StgCase scrut' tag_bndr tagAltTy alts')
elimCase _ args bndr alt_ty alts
= pprPanic "elimCase - unhandled case"
(ppr args <+> ppr bndr <+> ppr alt_ty $$ ppr alts)
--------------------------------------------------------------------------------
unariseAlts :: UnariseEnv -> AltType -> InId -> [StgAlt] -> UniqSM [StgAlt]
unariseAlts rho (MultiValAlt n) bndr [(DEFAULT, [], e)]
| isUnboxedTupleBndr bndr
= do (rho', ys) <- unariseConArgBinder rho bndr
e' <- unariseExpr rho' e
return [(DataAlt (tupleDataCon Unboxed n), ys, e')]
unariseAlts rho (MultiValAlt n) bndr [(DataAlt _, ys, e)]
| isUnboxedTupleBndr bndr
= do (rho', ys1) <- unariseConArgBinders rho ys
MASSERT(ys1 `lengthIs` n)
let rho'' = extendRho rho' bndr (MultiVal (map StgVarArg ys1))
e' <- unariseExpr rho'' e
return [(DataAlt (tupleDataCon Unboxed n), ys1, e')]
unariseAlts _ (MultiValAlt _) bndr alts
| isUnboxedTupleBndr bndr
= pprPanic "unariseExpr: strange multi val alts" (ppr alts)
-- In this case we don't need to scrutinize the tag bit
unariseAlts rho (MultiValAlt _) bndr [(DEFAULT, _, rhs)]
| isUnboxedSumBndr bndr
= do (rho_sum_bndrs, sum_bndrs) <- unariseConArgBinder rho bndr
rhs' <- unariseExpr rho_sum_bndrs rhs
return [(DataAlt (tupleDataCon Unboxed (length sum_bndrs)), sum_bndrs, rhs')]
unariseAlts rho (MultiValAlt _) bndr alts
| isUnboxedSumBndr bndr
= do (rho_sum_bndrs, scrt_bndrs@(tag_bndr : real_bndrs)) <- unariseConArgBinder rho bndr
alts' <- unariseSumAlts rho_sum_bndrs (map StgVarArg real_bndrs) alts
let inner_case = StgCase (StgApp tag_bndr []) tag_bndr tagAltTy alts'
return [ (DataAlt (tupleDataCon Unboxed (length scrt_bndrs)),
scrt_bndrs,
inner_case) ]
unariseAlts rho _ _ alts
= mapM (\alt -> unariseAlt rho alt) alts
unariseAlt :: UnariseEnv -> StgAlt -> UniqSM StgAlt
unariseAlt rho (con, xs, e)
= do (rho', xs') <- unariseConArgBinders rho xs
(con, xs',) <$> unariseExpr rho' e
--------------------------------------------------------------------------------
-- | Make alternatives that match on the tag of a sum
-- (i.e. generate LitAlts for the tag)
unariseSumAlts :: UnariseEnv
-> [StgArg] -- sum components _excluding_ the tag bit.
-> [StgAlt] -- original alternative with sum LHS
-> UniqSM [StgAlt]
unariseSumAlts env args alts
= do alts' <- mapM (unariseSumAlt env args) alts
return (mkDefaultLitAlt alts')
unariseSumAlt :: UnariseEnv
-> [StgArg] -- sum components _excluding_ the tag bit.
-> StgAlt -- original alternative with sum LHS
-> UniqSM StgAlt
unariseSumAlt rho _ (DEFAULT, _, e)
= ( DEFAULT, [], ) <$> unariseExpr rho e
unariseSumAlt rho args (DataAlt sumCon, bs, e)
= do let rho' = mapSumIdBinders bs args rho
e' <- unariseExpr rho' e
return ( LitAlt (MachInt (fromIntegral (dataConTag sumCon))), [], e' )
unariseSumAlt _ scrt alt
= pprPanic "unariseSumAlt" (ppr scrt $$ ppr alt)
--------------------------------------------------------------------------------
mapTupleIdBinders
:: [InId] -- Un-processed binders of a tuple alternative.
-- Can have void binders.
-> [OutStgArg] -- Arguments that form the tuple (after unarisation).
-- Can't have void args.
-> UnariseEnv
-> UnariseEnv
mapTupleIdBinders ids args0 rho0
= ASSERT(not (any (isVoidTy . stgArgType) args0))
let
ids_unarised :: [(Id, [PrimRep])]
ids_unarised = map (\id -> (id, typePrimRep (idType id))) ids
map_ids :: UnariseEnv -> [(Id, [PrimRep])] -> [StgArg] -> UnariseEnv
map_ids rho [] _ = rho
map_ids rho ((x, x_reps) : xs) args =
let
x_arity = length x_reps
(x_args, args') =
ASSERT(args `lengthAtLeast` x_arity)
splitAt x_arity args
rho'
| x_arity == 1
= ASSERT(x_args `lengthIs` 1)
extendRho rho x (UnaryVal (head x_args))
| otherwise
= extendRho rho x (MultiVal x_args)
in
map_ids rho' xs args'
in
map_ids rho0 ids_unarised args0
mapSumIdBinders
:: [InId] -- Binder of a sum alternative (remember that sum patterns
-- only have one binder, so this list should be a singleton)
-> [OutStgArg] -- Arguments that form the sum (NOT including the tag).
-- Can't have void args.
-> UnariseEnv
-> UnariseEnv
mapSumIdBinders [id] args rho0
= ASSERT(not (any (isVoidTy . stgArgType) args))
let
arg_slots = map primRepSlot $ concatMap (typePrimRep . stgArgType) args
id_slots = map primRepSlot $ typePrimRep (idType id)
layout1 = layoutUbxSum arg_slots id_slots
in
if isMultiValBndr id
then extendRho rho0 id (MultiVal [ args !! i | i <- layout1 ])
else ASSERT(layout1 `lengthIs` 1)
extendRho rho0 id (UnaryVal (args !! head layout1))
mapSumIdBinders ids sum_args _
= pprPanic "mapSumIdBinders" (ppr ids $$ ppr sum_args)
-- | Build a unboxed sum term from arguments of an alternative.
--
-- Example, for (# x | #) :: (# (# #) | Int #) we call
--
-- mkUbxSum (# _ | #) [ (# #), Int ] [ voidPrimId ]
--
-- which returns
--
-- [ 1#, rubbish ]
--
mkUbxSum
:: DataCon -- Sum data con
-> [Type] -- Type arguments of the sum data con
-> [OutStgArg] -- Actual arguments of the alternative.
-> [OutStgArg] -- Final tuple arguments
mkUbxSum dc ty_args args0
= let
(_ : sum_slots) = ubxSumRepType (map typePrimRep ty_args)
-- drop tag slot
tag = dataConTag dc
layout' = layoutUbxSum sum_slots (mapMaybe (typeSlotTy . stgArgType) args0)
tag_arg = StgLitArg (MachInt (fromIntegral tag))
arg_idxs = IM.fromList (zipEqual "mkUbxSum" layout' args0)
mkTupArgs :: Int -> [SlotTy] -> IM.IntMap StgArg -> [StgArg]
mkTupArgs _ [] _
= []
mkTupArgs arg_idx (slot : slots_left) arg_map
| Just stg_arg <- IM.lookup arg_idx arg_map
= stg_arg : mkTupArgs (arg_idx + 1) slots_left arg_map
| otherwise
= slotRubbishArg slot : mkTupArgs (arg_idx + 1) slots_left arg_map
slotRubbishArg :: SlotTy -> StgArg
slotRubbishArg PtrSlot = StgVarArg aBSENT_ERROR_ID
slotRubbishArg WordSlot = StgLitArg (MachWord 0)
slotRubbishArg Word64Slot = StgLitArg (MachWord64 0)
slotRubbishArg FloatSlot = StgLitArg (MachFloat 0)
slotRubbishArg DoubleSlot = StgLitArg (MachDouble 0)
in
tag_arg : mkTupArgs 0 sum_slots arg_idxs
--------------------------------------------------------------------------------
{-
For arguments (StgArg) and binders (Id) we have two kind of unarisation:
- When unarising function arg binders and arguments, we don't want to remove
void binders and arguments. For example,
f :: (# (# #), (# #) #) -> Void# -> RealWorld# -> ...
f x y z = <body>
Here after unarise we should still get a function with arity 3. Similarly
in the call site we shouldn't remove void arguments:
f (# (# #), (# #) #) voidId rw
When unarising <body>, we extend the environment with these binders:
x :-> MultiVal [], y :-> MultiVal [], z :-> MultiVal []
Because their rep types are `MultiRep []` (aka. void). This means that when
we see `x` in a function argument position, we actually replace it with a
void argument. When we see it in a DataCon argument position, we just get
rid of it, because DataCon applications in STG are always saturated.
- When unarising case alternative binders we remove void binders, but we
still update the environment the same way, because those binders may be
used in the RHS. Example:
case x of y {
(# x1, x2, x3 #) -> <RHS>
}
We know that y can't be void, because we don't scrutinize voids, so x will
be unarised to some number of arguments, and those arguments will have at
least one non-void thing. So in the rho we will have something like:
x :-> MultiVal [xu1, xu2]
Now, after we eliminate void binders in the pattern, we get exactly the same
number of binders, and extend rho again with these:
x1 :-> UnaryVal xu1
x2 :-> MultiVal [] -- x2 is void
x3 :-> UnaryVal xu2
Now when we see x2 in a function argument position or in return position, we
generate void#. In constructor argument position, we just remove it.
So in short, when we have a void id,
- We keep it if it's a lambda argument binder or
in argument position of an application.
- We remove it if it's a DataCon field binder or
in argument position of a DataCon application.
-}
--------------------------------------------------------------------------------
-- | MultiVal a function argument. Never returns an empty list.
unariseFunArg :: UnariseEnv -> StgArg -> [StgArg]
unariseFunArg rho (StgVarArg x) =
case lookupVarEnv rho x of
Just (MultiVal []) -> [voidArg] -- NB: do not remove void args
Just (MultiVal as) -> as
Just (UnaryVal arg) -> [arg]
Nothing -> [StgVarArg x]
unariseFunArg _ arg = [arg]
unariseFunArgs :: UnariseEnv -> [StgArg] -> [StgArg]
unariseFunArgs = concatMap . unariseFunArg
unariseFunArgBinders :: UnariseEnv -> [Id] -> UniqSM (UnariseEnv, [Id])
unariseFunArgBinders rho xs = second concat <$> mapAccumLM unariseFunArgBinder rho xs
unariseFunArgBinder :: UnariseEnv -> Id -> UniqSM (UnariseEnv, [Id])
-- Result list of binders is never empty
unariseFunArgBinder rho x =
case typePrimRep (idType x) of
[] -> return (extendRho rho x (MultiVal []), [voidArgId])
-- NB: do not remove void binders
[_] -> return (rho, [x])
reps -> do
xs <- mkIds (mkFastString "us") (map primRepToType reps)
return (extendRho rho x (MultiVal (map StgVarArg xs)), xs)
--------------------------------------------------------------------------------
-- | MultiVal a DataCon argument. Returns an empty list when argument is void.
unariseConArg :: UnariseEnv -> InStgArg -> [OutStgArg]
unariseConArg rho (StgVarArg x) =
case lookupVarEnv rho x of
Just (UnaryVal arg) -> [arg]
Just (MultiVal as) -> as -- 'as' can be empty
Nothing
| isVoidTy (idType x) -> [] -- e.g. C realWorld#
-- Here realWorld# is not in the envt, but
-- is a void, and so should be eliminated
| otherwise -> [StgVarArg x]
unariseConArg _ arg = [arg] -- We have no void literals
unariseConArgs :: UnariseEnv -> [InStgArg] -> [OutStgArg]
unariseConArgs = concatMap . unariseConArg
unariseConArgBinders :: UnariseEnv -> [Id] -> UniqSM (UnariseEnv, [Id])
unariseConArgBinders rho xs = second concat <$> mapAccumLM unariseConArgBinder rho xs
unariseConArgBinder :: UnariseEnv -> Id -> UniqSM (UnariseEnv, [Id])
unariseConArgBinder rho x =
case typePrimRep (idType x) of
[_] -> return (rho, [x])
reps -> do
xs <- mkIds (mkFastString "us") (map primRepToType reps)
return (extendRho rho x (MultiVal (map StgVarArg xs)), xs)
unariseFreeVars :: UnariseEnv -> [InId] -> [OutId]
unariseFreeVars rho fvs
= [ v | fv <- fvs, StgVarArg v <- unariseFreeVar rho fv ]
-- Notice that we filter out any StgLitArgs
-- e.g. case e of (x :: (# Int | Bool #))
-- (# v | #) -> ... let {g = \y. ..x...} in ...
-- (# | w #) -> ...
-- Here 'x' is free in g's closure, and the env will have
-- x :-> [1, v]
-- we want to capture 'v', but not 1, in the free vars
unariseFreeVar :: UnariseEnv -> Id -> [StgArg]
unariseFreeVar rho x =
case lookupVarEnv rho x of
Just (MultiVal args) -> args
Just (UnaryVal arg) -> [arg]
Nothing -> [StgVarArg x]
--------------------------------------------------------------------------------
mkIds :: FastString -> [UnaryType] -> UniqSM [Id]
mkIds fs tys = mapM (mkId fs) tys
mkId :: FastString -> UnaryType -> UniqSM Id
mkId = mkSysLocalOrCoVarM
isMultiValBndr :: Id -> Bool
isMultiValBndr id
| [_] <- typePrimRep (idType id)
= False
| otherwise
= True
isUnboxedSumBndr :: Id -> Bool
isUnboxedSumBndr = isUnboxedSumType . idType
isUnboxedTupleBndr :: Id -> Bool
isUnboxedTupleBndr = isUnboxedTupleType . idType
mkTuple :: [StgArg] -> StgExpr
mkTuple args = StgConApp (tupleDataCon Unboxed (length args)) args (map stgArgType args)
tagAltTy :: AltType
tagAltTy = PrimAlt IntRep
tagTy :: Type
tagTy = intPrimTy
voidArg :: StgArg
voidArg = StgVarArg voidPrimId
mkDefaultLitAlt :: [StgAlt] -> [StgAlt]
-- We have an exhauseive list of literal alternatives
-- 1# -> e1
-- 2# -> e2
-- Since they are exhaustive, we can replace one with DEFAULT, to avoid
-- generating a final test. Remember, the DEFAULT comes first if it exists.
mkDefaultLitAlt [] = pprPanic "elimUbxSumExpr.mkDefaultAlt" (text "Empty alts")
mkDefaultLitAlt alts@((DEFAULT, _, _) : _) = alts
mkDefaultLitAlt ((LitAlt{}, [], rhs) : alts) = (DEFAULT, [], rhs) : alts
mkDefaultLitAlt alts = pprPanic "mkDefaultLitAlt" (text "Not a lit alt:" <+> ppr alts)
| shlevy/ghc | compiler/simplStg/UnariseStg.hs | bsd-3-clause | 26,475 | 0 | 19 | 6,280 | 5,080 | 2,598 | 2,482 | -1 | -1 |
{-|
Module : IRTS.JavaScript.AST
Description : Data structures and functions used with the JavaScript codegen.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE OverloadedStrings #-}
module IRTS.JavaScript.AST where
import Data.Word
import Data.Char (isDigit)
import qualified Data.Text as T
data JSType = JSIntTy
| JSStringTy
| JSIntegerTy
| JSFloatTy
| JSCharTy
| JSPtrTy
| JSForgotTy
deriving Eq
data JSInteger = JSBigZero
| JSBigOne
| JSBigInt Integer
| JSBigIntExpr JS
deriving Eq
data JSNum = JSInt Int
| JSFloat Double
| JSInteger JSInteger
deriving Eq
data JSWord = JSWord8 Word8
| JSWord16 Word16
| JSWord32 Word32
| JSWord64 Word64
deriving Eq
data JSAnnotation = JSConstructor deriving Eq
instance Show JSAnnotation where
show JSConstructor = "constructor"
data JS = JSRaw String
| JSIdent String
| JSFunction [String] JS
| JSType JSType
| JSSeq [JS]
| JSReturn JS
| JSApp JS [JS]
| JSNew String [JS]
| JSError String
| JSBinOp String JS JS
| JSPreOp String JS
| JSPostOp String JS
| JSProj JS String
| JSNull
| JSUndefined
| JSThis
| JSTrue
| JSFalse
| JSArray [JS]
| JSString String
| JSNum JSNum
| JSWord JSWord
| JSAssign JS JS
| JSAlloc String (Maybe JS)
| JSIndex JS JS
| JSSwitch JS [(JS, JS)] (Maybe JS)
| JSCond [(JS, JS)]
| JSTernary JS JS JS
| JSParens JS
| JSWhile JS JS
| JSFFI String [JS]
| JSAnnotation JSAnnotation JS
| JSDelete JS
| JSClear JS
| JSNoop
deriving Eq
data FFI = FFICode Char | FFIArg Int | FFIError String
ffi :: String -> [String] -> T.Text
ffi code args = let parsed = ffiParse code in
case ffiError parsed of
Just err -> error err
Nothing -> renderFFI parsed args
where
ffiParse :: String -> [FFI]
ffiParse "" = []
ffiParse ['%'] = [FFIError "FFI - Invalid positional argument"]
ffiParse ('%':'%':ss) = FFICode '%' : ffiParse ss
ffiParse ('%':s:ss)
| isDigit s =
FFIArg (
read $ s : takeWhile isDigit ss
) : ffiParse (dropWhile isDigit ss)
| otherwise =
[FFIError "FFI - Invalid positional argument"]
ffiParse (s:ss) = FFICode s : ffiParse ss
ffiError :: [FFI] -> Maybe String
ffiError [] = Nothing
ffiError ((FFIError s):xs) = Just s
ffiError (x:xs) = ffiError xs
renderFFI :: [FFI] -> [String] -> T.Text
renderFFI [] _ = ""
renderFFI (FFICode c : fs) args = c `T.cons` renderFFI fs args
renderFFI (FFIArg i : fs) args
| i < length args && i >= 0 =
T.pack (args !! i)
`T.append` renderFFI fs args
| otherwise = error "FFI - Argument index out of bounds"
compileJS :: JS -> T.Text
compileJS = compileJS' 0
compileJS' :: Int -> JS -> T.Text
compileJS' indent JSNoop = ""
compileJS' indent (JSAnnotation annotation js) =
"/** @"
`T.append` T.pack (show annotation)
`T.append` " */\n"
`T.append` compileJS' indent js
compileJS' indent (JSDelete js) =
"delete " `T.append` compileJS' 0 js
compileJS' indent (JSClear js) =
compileJS' 0 js `T.append` " = undefined"
compileJS' indent (JSFFI raw args) =
ffi raw (map (T.unpack . compileJS' indent) args)
compileJS' indent (JSRaw code) =
T.pack code
compileJS' indent (JSIdent ident) =
T.pack ident
compileJS' indent (JSFunction args body) =
T.replicate indent " " `T.append` "function("
`T.append` T.intercalate "," (map T.pack args)
`T.append` "){\n"
`T.append` compileJS' (indent + 2) body
`T.append` "\n}\n"
compileJS' indent (JSType ty)
| JSIntTy <- ty = "i$Int"
| JSStringTy <- ty = "i$String"
| JSIntegerTy <- ty = "i$Integer"
| JSFloatTy <- ty = "i$Float"
| JSCharTy <- ty = "i$Char"
| JSPtrTy <- ty = "i$Ptr"
| JSForgotTy <- ty = "i$Forgot"
compileJS' indent (JSSeq seq) =
T.intercalate ";\n" (
map (
(T.replicate indent " " `T.append`) . (compileJS' indent)
) $ filter (/= JSNoop) seq
) `T.append` ";"
compileJS' indent (JSReturn val) =
"return " `T.append` compileJS' indent val
compileJS' indent (JSApp lhs rhs)
| JSFunction {} <- lhs =
T.concat ["(", compileJS' indent lhs, ")(", args, ")"]
| otherwise =
T.concat [compileJS' indent lhs, "(", args, ")"]
where args :: T.Text
args = T.intercalate "," $ map (compileJS' 0) rhs
compileJS' indent (JSNew name args) =
"new "
`T.append` T.pack name
`T.append` "("
`T.append` T.intercalate "," (map (compileJS' 0) args)
`T.append` ")"
compileJS' indent (JSError exc) =
"(function(){throw new Error(\"" `T.append` T.pack exc `T.append` "\")})()"
compileJS' indent (JSBinOp op lhs rhs) =
compileJS' indent lhs
`T.append` " "
`T.append` T.pack op
`T.append` " "
`T.append` compileJS' indent rhs
compileJS' indent (JSPreOp op val) =
T.pack op `T.append` "(" `T.append` compileJS' indent val `T.append` ")"
compileJS' indent (JSProj obj field)
| JSFunction {} <- obj =
T.concat ["(", compileJS' indent obj, ").", T.pack field]
| JSAssign {} <- obj =
T.concat ["(", compileJS' indent obj, ").", T.pack field]
| otherwise =
compileJS' indent obj `T.append` ('.' `T.cons` T.pack field)
compileJS' indent JSNull =
"null"
compileJS' indent JSUndefined =
"undefined"
compileJS' indent JSThis =
"this"
compileJS' indent JSTrue =
"true"
compileJS' indent JSFalse =
"false"
compileJS' indent (JSArray elems) =
"[" `T.append` T.intercalate "," (map (compileJS' 0) elems) `T.append` "]"
compileJS' indent (JSString str) =
"\"" `T.append` T.pack str `T.append` "\""
compileJS' indent (JSNum num)
| JSInt i <- num = T.pack (show i)
| JSFloat f <- num = T.pack (show f)
| JSInteger JSBigZero <- num = T.pack "i$ZERO"
| JSInteger JSBigOne <- num = T.pack "i$ONE"
| JSInteger (JSBigInt i) <- num = T.pack (show i)
| JSInteger (JSBigIntExpr e) <- num =
"i$bigInt(" `T.append` compileJS' indent e `T.append` ")"
compileJS' indent (JSAssign lhs rhs) =
compileJS' indent lhs `T.append` " = " `T.append` compileJS' indent rhs
compileJS' 0 (JSAlloc name (Just val@(JSNew _ _))) =
"var "
`T.append` T.pack name
`T.append` " = "
`T.append` compileJS' 0 val
`T.append` ";\n"
compileJS' indent (JSAlloc name val) =
"var "
`T.append` T.pack name
`T.append` maybe "" ((" = " `T.append`) . compileJS' indent) val
compileJS' indent (JSIndex lhs rhs) =
compileJS' indent lhs
`T.append` "["
`T.append` compileJS' indent rhs
`T.append` "]"
compileJS' indent (JSCond branches) =
T.intercalate " else " $ map createIfBlock branches
where
createIfBlock (JSNoop, e@(JSSeq _)) =
"{\n"
`T.append` compileJS' (indent + 2) e
`T.append` "\n" `T.append` T.replicate indent " " `T.append` "}"
createIfBlock (JSNoop, e) =
"{\n"
`T.append` compileJS' (indent + 2) e
`T.append` ";\n" `T.append` T.replicate indent " " `T.append` "}"
createIfBlock (cond, e@(JSSeq _)) =
"if (" `T.append` compileJS' indent cond `T.append`") {\n"
`T.append` compileJS' (indent + 2) e
`T.append` "\n" `T.append` T.replicate indent " " `T.append` "}"
createIfBlock (cond, e) =
"if (" `T.append` compileJS' indent cond `T.append`") {\n"
`T.append` T.replicate (indent + 2) " "
`T.append` compileJS' (indent + 2) e
`T.append` ";\n"
`T.append` T.replicate indent " "
`T.append` "}"
compileJS' indent (JSSwitch val [(_,JSSeq seq)] Nothing) =
let (h,t) = splitAt 1 seq in
(T.concat (map (compileJS' indent) h) `T.append` ";\n")
`T.append` (
T.intercalate ";\n" $ map (
(T.replicate indent " " `T.append`) . compileJS' indent
) t
)
compileJS' indent (JSSwitch val branches def) =
"switch(" `T.append` compileJS' indent val `T.append` "){\n"
`T.append` T.concat (map mkBranch branches)
`T.append` mkDefault def
`T.append` T.replicate indent " " `T.append` "}"
where
mkBranch :: (JS, JS) -> T.Text
mkBranch (tag, code) =
T.replicate (indent + 2) " "
`T.append` "case "
`T.append` compileJS' indent tag
`T.append` ":\n"
`T.append` compileJS' (indent + 4) code
`T.append` "\n"
`T.append` (T.replicate (indent + 4) " " `T.append` "break;\n")
mkDefault :: Maybe JS -> T.Text
mkDefault Nothing = ""
mkDefault (Just def) =
T.replicate (indent + 2) " " `T.append` "default:\n"
`T.append` compileJS' (indent + 4)def
`T.append` "\n"
compileJS' indent (JSTernary cond true false) =
let c = compileJS' indent cond
t = compileJS' indent true
f = compileJS' indent false in
"("
`T.append` c
`T.append` ")?("
`T.append` t
`T.append` "):("
`T.append` f
`T.append` ")"
compileJS' indent (JSParens js) =
"(" `T.append` compileJS' indent js `T.append` ")"
compileJS' indent (JSWhile cond body) =
"while (" `T.append` compileJS' indent cond `T.append` ") {\n"
`T.append` compileJS' (indent + 2) body
`T.append` "\n" `T.append` T.replicate indent " " `T.append` "}"
compileJS' indent (JSWord word)
| JSWord8 b <- word =
"new Uint8Array([" `T.append` T.pack (show b) `T.append` "])"
| JSWord16 b <- word =
"new Uint16Array([" `T.append` T.pack (show b) `T.append` "])"
| JSWord32 b <- word =
"new Uint32Array([" `T.append` T.pack (show b) `T.append` "])"
| JSWord64 b <- word =
"i$bigInt(\"" `T.append` T.pack (show b) `T.append` "\")"
jsInstanceOf :: JS -> String -> JS
jsInstanceOf obj cls = JSBinOp "instanceof" obj (JSIdent cls)
jsOr :: JS -> JS -> JS
jsOr lhs rhs = JSBinOp "||" lhs rhs
jsAnd :: JS -> JS -> JS
jsAnd lhs rhs = JSBinOp "&&" lhs rhs
jsMeth :: JS -> String -> [JS] -> JS
jsMeth obj meth args = JSApp (JSProj obj meth) args
jsCall :: String -> [JS] -> JS
jsCall fun args = JSApp (JSIdent fun) args
jsTypeOf :: JS -> JS
jsTypeOf js = JSPreOp "typeof " js
jsEq :: JS -> JS -> JS
jsEq lhs@(JSNum (JSInteger _)) rhs = JSApp (JSProj lhs "equals") [rhs]
jsEq lhs rhs@(JSNum (JSInteger _)) = JSApp (JSProj lhs "equals") [rhs]
jsEq lhs rhs = JSBinOp "==" lhs rhs
jsNotEq :: JS -> JS -> JS
jsNotEq lhs rhs = JSBinOp "!=" lhs rhs
jsIsNumber :: JS -> JS
jsIsNumber js = (jsTypeOf js) `jsEq` (JSString "number")
jsIsNull :: JS -> JS
jsIsNull js = JSBinOp "==" js JSNull
jsBigInt :: JS -> JS
jsBigInt (JSString "0") = JSNum (JSInteger JSBigZero)
jsBigInt (JSString "1") = JSNum (JSInteger JSBigOne)
jsBigInt js = JSNum $ JSInteger $ JSBigIntExpr js
jsUnPackBits :: JS -> JS
jsUnPackBits js = JSIndex js $ JSNum (JSInt 0)
jsPackUBits8 :: JS -> JS
jsPackUBits8 js = JSNew "Uint8Array" [JSArray [js]]
jsPackUBits16 :: JS -> JS
jsPackUBits16 js = JSNew "Uint16Array" [JSArray [js]]
jsPackUBits32 :: JS -> JS
jsPackUBits32 js = JSNew "Uint32Array" [JSArray [js]]
jsPackSBits8 :: JS -> JS
jsPackSBits8 js = JSNew "Int8Array" [JSArray [js]]
jsPackSBits16 :: JS -> JS
jsPackSBits16 js = JSNew "Int16Array" [JSArray [js]]
jsPackSBits32 :: JS -> JS
jsPackSBits32 js = JSNew "Int32Array" [JSArray [js]]
| tpsinnem/Idris-dev | src/IRTS/JavaScript/AST.hs | bsd-3-clause | 11,714 | 0 | 15 | 3,187 | 4,432 | 2,338 | 2,094 | 321 | 10 |
{-# LANGUAGE OverloadedStrings #-}
import Network.Wai
import Network.Wai.Handler.Warp
import Data.ByteString.Lazy.Char8 (pack)
import qualified Data.ByteString.Char8 as S
main = run 3000 app
app req =
return $ responseLBS (Status s' s) [("Content-Type", "text/plain")]
$ pack $ concat
[ "The status code is "
, S.unpack s
, ". Have a nice day!"
]
where
s = S.dropWhile (== '/') $ pathInfo req
s' = read $ S.unpack s
| dylex/wai | warp/attic/statuses.hs | mit | 475 | 0 | 10 | 126 | 141 | 79 | 62 | 14 | 1 |
import GHC.Conc
import GHC.IO
import GHC.IO.FD as FD
import System.Posix.IO
import System.Posix.Types
main = do
(rfd,wfd) <- createPipe
(waitread, unregister) <- threadWaitReadSTM rfd
unregister
result0 <- atomically $ (fmap (const False) waitread) `orElse` return True
print result0
fdWrite wfd "test"
threadDelay 20000
result1 <- atomically $ (fmap (const False) waitread) `orElse` return True
print result1
(waitread1, _) <- threadWaitReadSTM rfd
threadDelay 20000
result2 <- atomically $ (fmap (const True) waitread1) `orElse` return False
print result2
| ezyang/ghc | libraries/base/tests/T12852.hs | bsd-3-clause | 585 | 0 | 13 | 102 | 224 | 111 | 113 | 19 | 1 |
-- Test that DuplicateRecordFields doesn't affect the metadata
-- generated by GHC.Generics or Data.Data
-- Based on a Stack Overflow post by bennofs
-- (http://stackoverflow.com/questions/24474581)
-- licensed under cc by-sa 3.0
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
import GHC.Generics
import Data.Data
import Data.Proxy
type family FirstSelector (f :: * -> *) :: Meta
type instance FirstSelector (M1 D x f) = FirstSelector f
type instance FirstSelector (M1 C x f) = FirstSelector f
type instance FirstSelector (a :*: b) = FirstSelector a
type instance FirstSelector (M1 S s f) = s
data SelectorProxy (s :: Meta) (f :: * -> *) a = SelectorProxy
type SelectorProxy' (s :: Meta) = SelectorProxy s Proxy ()
-- Extract the first selector name using GHC.Generics
firstSelectorName :: forall a. Selector (FirstSelector (Rep a))
=> Proxy a -> String
firstSelectorName _ =
selName (SelectorProxy :: SelectorProxy' (FirstSelector (Rep a)))
-- Extract the list of selector names for a constructor using Data.Data
selectorNames :: Data a => a -> [String]
selectorNames = constrFields . toConstr
data T = MkT { foo :: Int } deriving (Data, Generic)
data U = MkU { foo :: Int, bar :: Bool } deriving (Data, Generic)
main = do -- This should yield "foo", not "$sel:foo:MkT"
print (firstSelectorName (Proxy :: Proxy T))
-- Similarly this should yield "foo"
print (firstSelectorName (Proxy :: Proxy U))
-- This should yield ["foo"]
print (selectorNames (MkT 3))
-- And this should yield ["foo","bar"]
print (selectorNames (MkU 3 True))
| ezyang/ghc | testsuite/tests/overloadedrecflds/should_run/overloadedrecflds_generics.hs | bsd-3-clause | 1,851 | 0 | 11 | 373 | 427 | 239 | 188 | 31 | 1 |
{-# LANGUAGE StandaloneDeriving #-}
module T5513 where
deriving instance lowercase_name a
| urbanslug/ghc | testsuite/tests/rename/should_fail/T5513.hs | bsd-3-clause | 91 | 0 | 5 | 12 | 13 | 8 | 5 | -1 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.