code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
--
-- Copyright 2014, NICTA
--
-- This software may be distributed and modified according to the terms of
-- the BSD 2-Clause license. Note that NO WARRANTY is provided.
-- See "LICENSE_BSD2.txt" for details.
--
-- @TAG(NICTA_BSD)
--
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
module CapDL.PrintUtils where
import CapDL.Model
import Text.PrettyPrint
import Data.Maybe (fromMaybe)
import Data.Word
import qualified Data.Set as Set
import Numeric
listSucc :: Enum a => [a] -> [a]
listSucc list = init list ++ [succ (last list)]
class (Show a, Eq a) => Printing a where
isSucc :: a -> a -> Bool
num :: a -> Doc
instance Printing Word where
isSucc first second = succ first == second
num n = int (fromIntegral n)
instance Printing [Word] where
isSucc first second = listSucc first == second
num ns = hsep $ punctuate comma (map num ns)
hex :: Word -> String
hex x = "0x" ++ showHex x ""
--Horrible hack for integral log base 2.
logBase2 :: Word -> Int -> Int
logBase2 1 i = i
logBase2 n i = logBase2 (n `div` 2) (i+1)
showID :: ObjID -> String
showID (name, Nothing) = name
showID (name, Just num) = name ++ "[" ++ show num ++ "]"
maybeParens text
| isEmpty text = empty
| otherwise = parens text
maybeParensList text =
maybeParens $ hsep $ punctuate comma $ filter (not . isEmpty) text
prettyBits bits = num bits <+> text "bits"
prettyMBits mbits =
case mbits of
Nothing -> empty
Just bits -> prettyBits bits
prettyLevel l = text "level" <> colon <+> num l
--Is there a better way to do this?
prettyVMSize vmSz =
if vmSz >= 2^20
then num (vmSz `div` (2^20)) <> text "M"
else num (vmSz `div` (2^10)) <> text "k"
prettyPaddr :: Maybe Word -> Doc
prettyPaddr Nothing = empty
prettyPaddr (Just p) = text "paddr:" <+> (text $ hex p)
prettyPortsSize :: Word -> Doc
prettyPortsSize size = num (size `div` (2^10)) <> text "k ports"
prettyAddr :: Word -> Doc
prettyAddr addr = text "addr:" <+> num addr
prettyIP :: Maybe Word -> Doc
prettyIP Nothing = empty
prettyIP (Just ip) = text "ip:" <+> num ip
prettySP :: Maybe Word -> Doc
prettySP Nothing = empty
prettySP (Just sp) = text "sp:" <+> num sp
prettyElf :: Maybe String -> Doc
prettyElf Nothing = empty
prettyElf (Just elf) = text "elf:" <+> text elf
prettyPrio :: Maybe Integer -> Doc
prettyPrio Nothing = empty
prettyPrio (Just prio) = text "prio:" <+> (text $ show prio)
prettyMaxPrio :: Maybe Integer -> Doc
prettyMaxPrio Nothing = empty
prettyMaxPrio (Just max_prio) = text "max_prio:" <+> (text $ show max_prio)
prettyCrit :: Maybe Integer -> Doc
prettyCrit Nothing = empty
prettyCrit (Just crit) = text "crit:" <+> (text $ show crit)
prettyMaxCrit :: Maybe Integer -> Doc
prettyMaxCrit Nothing = empty
prettyMaxCrit (Just max_crit) = text "max_crit:" <+> (text $ show max_crit)
prettyDom :: Integer -> Doc
prettyDom dom = text "dom:" <+> (text $ show dom)
prettyExtraInfo :: Maybe TCBExtraInfo -> Doc
prettyExtraInfo Nothing = empty
prettyExtraInfo (Just (TCBExtraInfo addr ip sp elf prio max_prio crit max_crit)) =
hsep $ punctuate comma $ filter (not . isEmpty)
[prettyAddr addr, prettyIP ip, prettySP sp, prettyElf elf, prettyPrio prio, prettyMaxPrio max_prio, prettyCrit crit, prettyMaxCrit max_crit]
prettyInitArguments :: [Word] -> Doc
prettyInitArguments [] = empty
prettyInitArguments init =
text "init:" <+> brackets (hsep $ punctuate comma $ map num init)
prettyDomainID :: Word -> Doc
prettyDomainID dom = text "domainID:" <+> num dom
prettyPeriod :: Maybe Word -> Doc
prettyPeriod Nothing = empty
prettyPeriod (Just period) = text "period:" <+> (text $ show period)
prettyDeadline :: Maybe Word -> Doc
prettyDeadline Nothing = empty
prettyDeadline (Just deadline) = text "deadline:" <+> (text $ show deadline)
prettyExecReq :: Maybe Word -> Doc
prettyExecReq Nothing = empty
prettyExecReq (Just exec_req) = text "exec_req:" <+> (text $ show exec_req)
prettyFlags :: Maybe Integer -> Doc
prettyFlags Nothing = empty
prettyFlags (Just flags) = text "flags:" <+> (text $ show flags)
prettySCExtraInfo :: Maybe SCExtraInfo -> Doc
prettySCExtraInfo Nothing = empty
prettySCExtraInfo (Just (SCExtraInfo period deadline exec_req flags)) =
hsep $ punctuate comma $ filter (not . isEmpty)
[prettyPeriod period, prettyDeadline deadline, prettyExecReq exec_req, prettyFlags flags]
prettyPCIDevice :: (Word, Word, Word) -> Doc
prettyPCIDevice (pci_bus, pci_dev, pci_fun) =
num pci_bus <> colon <> num pci_dev <> text "." <> num pci_fun
prettyObjParams obj = case obj of
Endpoint -> text "ep"
AsyncEndpoint -> text "aep"
TCB _ extra dom init ->
text "tcb" <+> maybeParensList [prettyExtraInfo extra, prettyDom dom, prettyInitArguments init]
CNode _ 0 -> text "irq" --FIXME: This should check if the obj is in the irqNode
CNode _ bits -> text "cnode" <+> maybeParensList [prettyBits bits]
Untyped mbits -> text "ut" <+> maybeParensList [prettyMBits mbits]
ASIDPool {} -> text "asid_pool"
PT {} -> text "pt"
PD {} -> text "pd"
Frame vmSz paddr -> text "frame" <+> maybeParensList [prettyVMSize vmSz, prettyPaddr paddr]
IOPT _ level -> text "io_pt" <+> maybeParensList [prettyLevel level]
IOPorts size -> text "io_ports" <+> maybeParensList [prettyPortsSize size]
IODevice _ dom pci -> text "io_device" <+> maybeParensList [prettyDomainID dom,
prettyPCIDevice pci]
VCPU {} -> text "vcpu"
SC extra -> text "sc" <+> maybeParensList [prettySCExtraInfo extra]
capParams [] = empty
capParams xs = parens (hsep $ punctuate comma xs)
successiveWordsUp :: [Maybe Word] -> [Word]
successiveWordsUp [] = []
successiveWordsUp [Just x] = [x]
successiveWordsUp ls@((Just first):(Just second):xs)
| succ first == second = first:(successiveWordsUp (tail ls))
| otherwise = [first]
successiveWordsDown :: [Maybe Word] -> [Word]
successiveWordsDown [] = []
successiveWordsDown [Just x] = [x]
successiveWordsDown ls@((Just first):(Just second):xs)
| first == succ second = first:(successiveWordsDown (tail ls))
| otherwise = [first]
successiveWords :: [Maybe Word] -> [Word]
successiveWords [] = []
successiveWords list = if length up == 1 then down else up
where up = successiveWordsUp list
down = successiveWordsDown list
breakSuccessive :: [Maybe Word] -> [[Word]]
breakSuccessive [] = []
breakSuccessive list = range:(breakSuccessive (drop (length range) list))
where range = successiveWords list
prettyRange :: [Word] -> Doc
prettyRange [x] = num x
prettyRange range =
num (head range) <> text ".." <> num (last range)
prettyRanges :: [Maybe Word] -> Doc
prettyRanges range =
hsep $ punctuate comma $ map prettyRange ranges
where ranges = breakSuccessive range
prettyBrackets :: [Maybe Word] -> Doc
prettyBrackets [Nothing] = empty
prettyBrackets list = brackets (prettyRanges list)
prettyParemNum t n = [text t <> colon <+> num n]
maybeNum t 0 = []
maybeNum t n = prettyParemNum t n
maybeBadge = maybeNum "badge"
prettyRight _ Read = text "R"
prettyRight _ Write = text "W"
prettyRight True Grant = text "X"
prettyRight False Grant = text "G"
maybeRightsList _ [] = []
maybeRightsList isFrame xs = [hcat (map (prettyRight isFrame) xs)]
maybeRights isFrame r = maybeRightsList isFrame (Set.toList r)
maybeGuard = maybeNum "guard"
maybeGSize = maybeNum "guard_size"
portsRange ports =
[text "ports:" <+> prettyBrackets (map Just (Set.toList ports))]
zombieNum n = [text "zombie" <> colon <+> num n]
printAsid (high, low) = text "(" <> num high <> text ", " <> num low <> text ")"
prettyAsid asid = [text "asid:" <+> printAsid asid]
maybeAsid Nothing = []
maybeAsid (Just asid) = prettyAsid asid
maybeCapParams :: Cap -> Doc
maybeCapParams cap = case cap of
EndpointCap _ badge rights ->
capParams (maybeBadge badge ++ maybeRights False rights)
AsyncEndpointCap _ badge rights ->
capParams (maybeBadge badge ++ maybeRights False rights)
ReplyCap _ -> capParams [text "reply"]
MasterReplyCap _ -> capParams [text "master_reply"]
CNodeCap _ guard gsize ->
capParams (maybeGuard guard ++ maybeGSize gsize)
FrameCap _ rights asid cached -> capParams (maybeRights True rights ++ maybeAsid asid ++
(if cached then [] else [text "uncached"]))
PTCap _ asid -> capParams (maybeAsid asid)
PDCap _ asid -> capParams (maybeAsid asid)
ASIDPoolCap _ asid -> capParams (prettyAsid asid)
IOPortsCap _ ports -> capParams (portsRange ports)
_ -> empty
printCap :: Cap -> Doc
printCap cap = case cap of
NullCap -> text "null"
IOSpaceMasterCap -> text ioSpaceMaster
ASIDControlCap -> text asidControl
IRQControlCap -> text irqControl
DomainCap -> text domain
SchedControlCap -> text schedControl
_ -> text $ fst $ objID cap
sameName :: ObjID -> ObjID -> Bool
sameName (first, _) (second, _) = first == second
sameParams :: Cap -> Cap -> Bool
sameParams cap1 cap2 =
case (cap1, cap2) of
((EndpointCap _ b1 r1), (EndpointCap _ b2 r2)) -> b1 == b2 && r1 == r2
((AsyncEndpointCap _ b1 r1), (AsyncEndpointCap _ b2 r2)) ->
b1 == b2 && r1 == r2
((CNodeCap _ g1 gs1), (CNodeCap _ g2 gs2)) ->
g1 == g2 && gs1 == gs2
((FrameCap _ r1 a1 c1), (FrameCap _ r2 a2 c2)) -> r1 == r2 && a1 == a2 && c1 == c2
((PTCap _ a1), (PTCap _ a2)) -> a1 == a2
((PDCap _ a1), (PDCap _ a2)) -> a1 == a2
_ -> True
sameCapName :: Cap -> Cap -> Bool
sameCapName first second
| not (hasObjID first) || not (hasObjID second) = False
| snd (objID first) == Nothing || snd (objID second) == Nothing = False
| otherwise = sameName (objID first) (objID second)
sameCap :: Cap -> Cap -> Bool
sameCap first second =
sameCapName first second && sameParams first second
class Arrayable a where
isSameArray :: a -> a -> Bool
instance Arrayable Cap where
isSameArray = sameCap
instance Arrayable ObjID where
isSameArray = sameName
sameArray :: (Printing a, Arrayable b) => [(a, b)] -> [(a, b)]
sameArray [] = []
sameArray [x] = [x]
sameArray ls@(x@(slot1, first):(slot2, second):xs)
| isSameArray first second && isSucc slot1 slot2 = x:(sameArray (tail ls))
| otherwise = [x]
same :: Printing a => (ObjID, KernelObject a) -> (ObjID, KernelObject a) -> Bool
same (name1, obj1) (name2, obj2) =
if (hasSlots obj1 && hasSlots obj2)
then sameName name1 name2 && slots obj1 == slots obj2
else sameName name1 name2
prettyArch ARM11 = text "arm11"
prettyArch IA32 = text "ia32"
| smaccm/capDL-tool | CapDL/PrintUtils.hs | bsd-2-clause | 10,632 | 2 | 14 | 2,221 | 4,114 | 2,045 | 2,069 | 243 | 15 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QStyleOptionGraphicsItem.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:20
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QStyleOptionGraphicsItem (
QqStyleOptionGraphicsItem(..)
,QqStyleOptionGraphicsItem_nf(..)
,qexposedRect, exposedRect
,levelOfDetail
,qsetExposedRect, setExposedRect
,setLevelOfDetail
,qStyleOptionGraphicsItem_delete
)
where
import Foreign.C.Types
import Qth.ClassTypes.Core
import Qtc.Enums.Base
import Qtc.Classes.Base
import Qtc.Classes.Qccs
import Qtc.Classes.Core
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui
import Qtc.ClassTypes.Gui
class QqStyleOptionGraphicsItem x1 where
qStyleOptionGraphicsItem :: x1 -> IO (QStyleOptionGraphicsItem ())
instance QqStyleOptionGraphicsItem (()) where
qStyleOptionGraphicsItem ()
= withQStyleOptionGraphicsItemResult $
qtc_QStyleOptionGraphicsItem
foreign import ccall "qtc_QStyleOptionGraphicsItem" qtc_QStyleOptionGraphicsItem :: IO (Ptr (TQStyleOptionGraphicsItem ()))
instance QqStyleOptionGraphicsItem ((QStyleOptionGraphicsItem t1)) where
qStyleOptionGraphicsItem (x1)
= withQStyleOptionGraphicsItemResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOptionGraphicsItem1 cobj_x1
foreign import ccall "qtc_QStyleOptionGraphicsItem1" qtc_QStyleOptionGraphicsItem1 :: Ptr (TQStyleOptionGraphicsItem t1) -> IO (Ptr (TQStyleOptionGraphicsItem ()))
class QqStyleOptionGraphicsItem_nf x1 where
qStyleOptionGraphicsItem_nf :: x1 -> IO (QStyleOptionGraphicsItem ())
instance QqStyleOptionGraphicsItem_nf (()) where
qStyleOptionGraphicsItem_nf ()
= withObjectRefResult $
qtc_QStyleOptionGraphicsItem
instance QqStyleOptionGraphicsItem_nf ((QStyleOptionGraphicsItem t1)) where
qStyleOptionGraphicsItem_nf (x1)
= withObjectRefResult $
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOptionGraphicsItem1 cobj_x1
qexposedRect :: QStyleOptionGraphicsItem a -> (()) -> IO (QRectF ())
qexposedRect x0 ()
= withQRectFResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOptionGraphicsItem_exposedRect cobj_x0
foreign import ccall "qtc_QStyleOptionGraphicsItem_exposedRect" qtc_QStyleOptionGraphicsItem_exposedRect :: Ptr (TQStyleOptionGraphicsItem a) -> IO (Ptr (TQRectF ()))
exposedRect :: QStyleOptionGraphicsItem a -> (()) -> IO (RectF)
exposedRect x0 ()
= withRectFResult $ \crectf_ret_x crectf_ret_y crectf_ret_w crectf_ret_h ->
withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOptionGraphicsItem_exposedRect_qth cobj_x0 crectf_ret_x crectf_ret_y crectf_ret_w crectf_ret_h
foreign import ccall "qtc_QStyleOptionGraphicsItem_exposedRect_qth" qtc_QStyleOptionGraphicsItem_exposedRect_qth :: Ptr (TQStyleOptionGraphicsItem a) -> Ptr CDouble -> Ptr CDouble -> Ptr CDouble -> Ptr CDouble -> IO ()
levelOfDetail :: QStyleOptionGraphicsItem a -> (()) -> IO (Double)
levelOfDetail x0 ()
= withDoubleResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOptionGraphicsItem_levelOfDetail cobj_x0
foreign import ccall "qtc_QStyleOptionGraphicsItem_levelOfDetail" qtc_QStyleOptionGraphicsItem_levelOfDetail :: Ptr (TQStyleOptionGraphicsItem a) -> IO CDouble
instance Qmatrix (QStyleOptionGraphicsItem a) (()) where
matrix x0 ()
= withQMatrixResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOptionGraphicsItem_matrix cobj_x0
foreign import ccall "qtc_QStyleOptionGraphicsItem_matrix" qtc_QStyleOptionGraphicsItem_matrix :: Ptr (TQStyleOptionGraphicsItem a) -> IO (Ptr (TQMatrix ()))
qsetExposedRect :: QStyleOptionGraphicsItem a -> ((QRectF t1)) -> IO ()
qsetExposedRect x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOptionGraphicsItem_setExposedRect cobj_x0 cobj_x1
foreign import ccall "qtc_QStyleOptionGraphicsItem_setExposedRect" qtc_QStyleOptionGraphicsItem_setExposedRect :: Ptr (TQStyleOptionGraphicsItem a) -> Ptr (TQRectF t1) -> IO ()
setExposedRect :: QStyleOptionGraphicsItem a -> ((RectF)) -> IO ()
setExposedRect x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withCRectF x1 $ \crectf_x1_x crectf_x1_y crectf_x1_w crectf_x1_h ->
qtc_QStyleOptionGraphicsItem_setExposedRect_qth cobj_x0 crectf_x1_x crectf_x1_y crectf_x1_w crectf_x1_h
foreign import ccall "qtc_QStyleOptionGraphicsItem_setExposedRect_qth" qtc_QStyleOptionGraphicsItem_setExposedRect_qth :: Ptr (TQStyleOptionGraphicsItem a) -> CDouble -> CDouble -> CDouble -> CDouble -> IO ()
setLevelOfDetail :: QStyleOptionGraphicsItem a -> ((Double)) -> IO ()
setLevelOfDetail x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOptionGraphicsItem_setLevelOfDetail cobj_x0 (toCDouble x1)
foreign import ccall "qtc_QStyleOptionGraphicsItem_setLevelOfDetail" qtc_QStyleOptionGraphicsItem_setLevelOfDetail :: Ptr (TQStyleOptionGraphicsItem a) -> CDouble -> IO ()
instance QsetMatrix (QStyleOptionGraphicsItem a) ((QMatrix t1)) where
setMatrix x0 (x1)
= withObjectPtr x0 $ \cobj_x0 ->
withObjectPtr x1 $ \cobj_x1 ->
qtc_QStyleOptionGraphicsItem_setMatrix cobj_x0 cobj_x1
foreign import ccall "qtc_QStyleOptionGraphicsItem_setMatrix" qtc_QStyleOptionGraphicsItem_setMatrix :: Ptr (TQStyleOptionGraphicsItem a) -> Ptr (TQMatrix t1) -> IO ()
qStyleOptionGraphicsItem_delete :: QStyleOptionGraphicsItem a -> IO ()
qStyleOptionGraphicsItem_delete x0
= withObjectPtr x0 $ \cobj_x0 ->
qtc_QStyleOptionGraphicsItem_delete cobj_x0
foreign import ccall "qtc_QStyleOptionGraphicsItem_delete" qtc_QStyleOptionGraphicsItem_delete :: Ptr (TQStyleOptionGraphicsItem a) -> IO ()
| keera-studios/hsQt | Qtc/Gui/QStyleOptionGraphicsItem.hs | bsd-2-clause | 5,799 | 0 | 12 | 741 | 1,324 | 681 | 643 | -1 | -1 |
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE RankNTypes #-}
module Arbitrary.Arbitrary
( Arbitrary1(..), genNel, smaller, maybeGen, genScope, genVar
) where
import Bound
import Control.Applicative
import Data.List.NonEmpty hiding (fromList)
import Data.Map
import Data.Monoid
import Data.Void
import Prelude.Extras
import Test.QuickCheck
import Test.QuickCheck.Function
import Test.QuickCheck.Instances
-- Orphans
instance (Arbitrary a, Arbitrary b) => Arbitrary (Var a b) where
arbitrary = oneof [ B <$> arbitrary, F <$> arbitrary ]
instance (Arbitrary b,Arbitrary v,Arbitrary1 f,Functor f) => Arbitrary (Scope b f v) where
arbitrary = Scope . fmap (fmap lower1) <$> arbitrary1
instance Arbitrary a => Arbitrary (NonEmpty a) where
arbitrary = genNel arbitrary
genNel :: Gen a -> Gen (NonEmpty a)
genNel g = (:|) <$> g <*> listOf g
-- | Combinator for decreasing the size of a generator. Should be used when
-- generating tree structures, as relying on probability to terminate them
-- can lead to excessive memory consumption.
smaller :: Gen a -> Gen a
smaller g = sized $ \n -> resize (n`div`3) g
maybeGen :: Applicative f => Maybe (Gen a) -> [Gen (f a)]
maybeGen Nothing = []
maybeGen (Just g) = [ pure <$> g ]
-- | Given a definite generator for bound varibles, and an optional one for
-- free variables, definitely generates Vars.
genVar :: Gen b -> Maybe (Gen a) -> Gen (Var b a)
genVar gb Nothing = B <$> gb
genVar gb (Just ga) = oneof [ B <$> gb , F <$> ga ]
-- | As genVar, but allows for the possibility that bound variables cannot
-- be generated, either. Potentially useful for generating well-scoped
-- terms.
genVar' :: Maybe (Gen b) -> Maybe (Gen a) -> Maybe (Gen (Var b a))
genVar' (Just gb) mga = Just $ genVar gb mga
genVar' Nothing (Just ga) = Just (F <$> ga)
genVar' Nothing Nothing = Nothing
-- | Generates scopes with a definite supply of bound variables. The
-- higher-order generator must be able to handle a lack of free variables.
genScope :: Gen b -> (forall z. Maybe (Gen z) -> Gen (f z)) -> Maybe (Gen a)
-> Gen (Scope b f a)
genScope gb gf mga = Scope <$> gf (Just . genVar gb . Just $ gf mga)
-- | As genScope, but with the possibility of no bound variables.
genScope' :: Maybe (Gen b) -> (forall z. Maybe (Gen z) -> Gen (f z)) -> Maybe (Gen a)
-> Gen (Scope b f a)
genScope' mgb gf mga = Scope <$> gf (genVar' mgb . Just $ gf mga)
-- Higher-order arbitrary
class Arbitrary1 f where
arbitrary1 :: Arbitrary a => Gen (f a)
default arbitrary1 :: Arbitrary (f a) => Gen (f a)
arbitrary1 = arbitrary
instance Arbitrary a => Arbitrary1 ((,) a)
instance Arbitrary1 Maybe
instance Arbitrary1 []
instance Arbitrary a => Arbitrary1 (Either a)
instance (Arbitrary1 f, Arbitrary u) => Arbitrary (Lift1 f u) where
arbitrary = Lift1 <$> arbitrary1
| PipocaQuemada/ermine | tests/properties/Arbitrary/Arbitrary.hs | bsd-2-clause | 2,836 | 0 | 12 | 557 | 988 | 508 | 480 | 50 | 1 |
-- http://www.codewars.com/kata/5410c0e6a0e736cf5b000e69
module Hamming where
hamming :: String -> String -> Int
hamming a b = length $ filter id $ zipWith (/=) a b
| Bodigrim/katas | src/haskell/6-Hamming-Distance.hs | bsd-2-clause | 166 | 0 | 7 | 26 | 48 | 26 | 22 | 3 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Routes where
import Control.Applicative
import Control.Monad.Except (ExceptT, forM_, runExceptT, liftIO, throwError, when)
import qualified Data.Aeson as Json
import qualified Data.Binary as Binary
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as LBS
import qualified Data.Either as Either
import qualified Data.List as List
import qualified Data.Map as Map
import qualified Text.Read as Read
import Snap.Core
import Snap.Util.FileServe
import Snap.Util.FileUploads
import System.Directory
import System.FilePath
import qualified Elm.Compiler.Module as Module
import qualified Elm.Docs as Docs
import qualified Elm.Package as Pkg
import qualified Elm.Package.Description as Desc
import qualified Elm.Package.Paths as Path
import qualified GitHub
import qualified NewPackageList
import qualified NativeWhitelist
import qualified PackageSummary as PkgSummary
import qualified ServeFile
packages :: Snap ()
packages =
ifTop (redirect' "/" 301)
<|> route [ (":user/:name", package) ]
<|> serveDirectory "packages"
package :: Snap ()
package =
do user <- getParameter "user" Right
name <- getParameter "name" Right
let pkg = Pkg.Name user name
ifTop (ServeFile.pkgOverview pkg)
<|>
route
[ ("latest", redirectToLatest pkg)
, (":version", servePackageInfo pkg)
]
servePackageInfo :: Pkg.Name -> Snap ()
servePackageInfo name =
do version <- getParameter "version" Pkg.versionFromString
let pkgDir = packageRoot name version
exists <- liftIO $ doesDirectoryExist pkgDir
when (not exists) pass
ifTop (ServeFile.pkgDocs name version Nothing)
<|> serveModule name version
serveModule :: Pkg.Name -> Pkg.Version -> Snap ()
serveModule name version =
do request <- getRequest
let potentialName = BS.unpack (rqPathInfo request)
let docsDir = packageRoot name version </> "docs"
exists <- liftIO $ doesFileExist (docsDir </> potentialName <.> "json")
when (not exists) pass
case Module.dehyphenate potentialName of
Nothing ->
pass
Just moduleName ->
ServeFile.pkgDocs name version (Just moduleName)
redirectToLatest :: Pkg.Name -> Snap ()
redirectToLatest name =
do rawVersions <- liftIO (getDirectoryContents (packageDirectory </> Pkg.toFilePath name))
case Either.rights (map Pkg.versionFromString rawVersions) of
[] ->
httpStringError 404 $
"Could not find any versions of package " ++ Pkg.toString name
versions ->
do let latestVersion = last (List.sort versions)
let url = "/packages/" ++ Pkg.toUrl name ++ "/" ++ Pkg.versionToString latestVersion ++ "/"
request <- getRequest
redirect (BS.append (BS.pack url) (rqPathInfo request))
-- DIRECTORIES
packageDirectory :: FilePath
packageDirectory =
"packages"
packageRoot :: Pkg.Name -> Pkg.Version -> FilePath
packageRoot name version =
packageDirectory </> Pkg.toFilePath name </> Pkg.versionToString version
documentationPath :: FilePath
documentationPath =
"documentation.json"
-- REGISTER MODULES
register :: Snap ()
register =
do name <- getParameter "name" Pkg.fromString
version <- getParameter "version" Pkg.versionFromString
verifyVersion name version
let directory = packageRoot name version
liftIO (createDirectoryIfMissing True directory)
uploadFiles directory
description <- Desc.read (directory </> Path.description)
result <-
liftIO $ runExceptT $ do
verifyWhitelist (Desc.natives description) (Desc.name description)
splitDocs directory
case result of
Right () ->
liftIO $ do
PkgSummary.add description
NewPackageList.addIfNew description
Left err ->
do liftIO (removeDirectoryRecursive directory)
httpStringError 400 err
verifyVersion :: Pkg.Name -> Pkg.Version -> Snap ()
verifyVersion name version =
do maybeVersions <- liftIO (PkgSummary.readVersionsOf name)
case maybeVersions of
Just localVersions
| version `elem` localVersions ->
httpStringError 400
("Version " ++ Pkg.versionToString version ++ " has already been registered.")
_ -> return ()
publicVersions <- GitHub.getVersionTags name
case version `elem` publicVersions of
True -> return ()
False ->
httpStringError 400
("The tag " ++ Pkg.versionToString version ++ " has not been pushed to GitHub.")
verifyWhitelist :: Bool -> Pkg.Name -> ExceptT String IO ()
verifyWhitelist allowNatives name =
case allowNatives of
False -> return ()
True ->
do whitelist <- liftIO NativeWhitelist.read
case name `elem` whitelist of
True -> return ()
False -> throwError (whitelistError name)
whitelistError :: Pkg.Name -> String
whitelistError name =
"You are trying to publish a project that has native modules, but this is not\n\
\permitted for now.\n\
\\n\
\Writing native modules is very important because it will let the Elm community\n\
\cover the whole web platform with nice community-driven packages. That said,\n\
\it introduces many ways to break the guarantees provided by Elm, so it is very\n\
\important that these packages are written in a reliable way.\n\
\\n\
\Essentially, it is clear that this is very important, but also clear that we do\n\
\not have a good mechanism for making sure everyone can write reliable native\n\
\modules. This is one of the big issues getting focused on in upcoming releases.\n\
\\n\
\For now, there is review process to expidite certain things, but it is badly\n\
\backed up. If you really cannot wait a few months before publishing, please\n\
\open an issue with the title:\n\
\\n\
\ \"Native review for " ++ Pkg.toString name ++ "\"\n\
\\n\
\at <https://github.com/elm-lang/package.elm-lang.org/issues>. The issue should\n\
\link to the relevant repository and provide sufficient context for evaluation.\n\
\But keep in mind that the review process is significantly backed up! The\n\
\priority is on making reviews unnecessary, and in the meantime, the fewer\n\
\special exceptions the better."
-- UPLOADING FILES
uploadFiles :: FilePath -> Snap ()
uploadFiles directory =
handleFileUploads "/tmp" defaultUploadPolicy perPartPolicy (handleParts directory)
where
perPartPolicy info =
if Map.member (partFieldName info) filesForUpload
then allowWithMaximumSize $ 2^(19::Int)
else disallow
filesForUpload :: Map.Map BS.ByteString FilePath
filesForUpload =
Map.fromList
[ ("documentation", documentationPath)
, ("description", Path.description)
, ("readme", "README.md")
]
handleParts
:: FilePath
-> [(PartInfo, Either PolicyViolationException FilePath)]
-> Snap ()
handleParts _dir [] =
return ()
handleParts dir ((info, eitherPath) : parts) =
case (eitherPath, Map.lookup (partFieldName info) filesForUpload) of
(Right tempPath, Just targetPath) ->
do liftIO $ do
contents <- BS.readFile tempPath
BS.writeFile (dir </> targetPath) contents
handleParts dir parts
_ ->
do mapM (writePartError . snd) parts
httpStringError 404 $
"Files " ++ documentationPath ++ " and " ++ Path.description ++ " were not uploaded."
writePartError :: Either PolicyViolationException FilePath -> Snap ()
writePartError part =
case part of
Right _ ->
return ()
Left exception ->
writeText (policyViolationExceptionReason exception)
splitDocs :: FilePath -> ExceptT String IO ()
splitDocs directory =
do json <- liftIO (LBS.readFile (directory </> documentationPath))
case Json.decode json of
Nothing -> throwError "The uploaded documentation is invalid."
Just docs ->
liftIO $
forM_ (docs :: [Docs.Documentation]) $ \doc ->
do let name = Module.hyphenate (Docs.moduleName doc)
let docPath = directory </> "docs" </> name <.> "json"
createDirectoryIfMissing True (directory </> "docs")
LBS.writeFile docPath (Json.encode doc)
-- FETCH ALL AVAILABLE VERSIONS
versions :: Snap ()
versions =
do name <- getParameter "name" Pkg.fromString
versions <- liftIO (PkgSummary.readVersionsOf name)
writeLBS (Binary.encode versions)
-- UPDATE REMOTE PACKAGE CACHES
allPackages :: Snap ()
allPackages =
do maybeValue <- getParam "since"
maybeVersion <- fmap BS.unpack <$> getParam "elm-package-version"
let allPackagesPath =
case maybeVersion of
Just "0.16" ->
PkgSummary.allPackages
Nothing ->
PkgSummary.allPackages
Just _ ->
PkgSummary.allPackagesOld
let maybeString = fmap BS.unpack maybeValue
needsUpdate <-
case Read.readMaybe =<< maybeString of
Nothing ->
return True
Just remoteTime ->
do localTime <- liftIO (getModificationTime allPackagesPath)
return (remoteTime < localTime)
if needsUpdate || maybeVersion == Just "0.16"
then
serveFile allPackagesPath
else
writeLBS "null"
-- FETCH RESOURCES
documentation :: Snap ()
documentation =
fetch documentationPath
description :: Snap ()
description =
fetch Path.description
fetch :: FilePath -> Snap ()
fetch filePath =
do name <- getParameter "name" Pkg.fromString
version <- getParameter "version" Pkg.versionFromString
let target = packageRoot name version </> filePath
exists <- liftIO $ doesFileExist target
case exists of
True -> serveFile target
False -> httpError 404 "That library and version is not registered."
-- HELPERS
getParameter :: BS.ByteString -> (String -> Either String a) -> Snap a
getParameter param fromString =
do maybeValue <- getParam param
let notFoundMsg = "could not find parameter named " ++ BS.unpack param
let eitherString = maybe (Left notFoundMsg) (Right . BS.unpack) maybeValue
case fromString =<< eitherString of
Right value ->
return value
Left problem ->
httpError 400 $ BS.concat [ "problem with parameter '", param, "': ", BS.pack problem ]
httpStringError :: Int -> String -> Snap a
httpStringError code msg =
httpError code (BS.pack msg)
httpError :: Int -> BS.ByteString -> Snap a
httpError code msg = do
modifyResponse $ setResponseCode code
writeBS msg
finishWith =<< getResponse
| mgold/package.elm-lang.org | src/backend/Routes.hs | bsd-3-clause | 10,942 | 17 | 25 | 2,748 | 2,673 | 1,308 | 1,365 | 242 | 5 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import SlackBot.Config
import SlackBot.Bot
main :: IO ()
main = readConf "bot.cfg" >>= run
| Lepovirta/slackbot | src/Main.hs | bsd-3-clause | 146 | 0 | 6 | 23 | 37 | 21 | 16 | 6 | 1 |
{-# LANGUAGE TypeOperators, MultiParamTypeClasses, FunctionalDependencies #-}
{-# LANGUAGE BangPatterns #-}
-- | This module provides data structures for Ethernet frames
-- as well as parsers and unparsers for Ethernet frames.
module Nettle.Ethernet.EthernetFrame (
-- * Data types
EthernetFrame(..)
, EthernetBody(..)
, EthernetHeader(..)
, EthernetTypeCode
, ethTypeVLAN
, ethTypeIP
, ethTypeARP
, ethTypeLLDP
, ethTypeIPv6
, ethType8021X
, ethTypePaneDP
, typeEth2Cutoff
, VLANPriority
, VLANID
, eth_ip_packet
, eth_ip_tcp_packet
, eth_ip_udp_packet
, foldEthernetFrame
, foldEthernetBody
-- * Parsers and unparsers
, getEthernetFrame
, getEthHeader
, getEthHeader2
, putEthHeader
, putEthFrame
-- * ARP frames
, arpQuery
, arpReply
) where
import Nettle.Ethernet.EthernetAddress
import Nettle.IPv4.IPPacket
import Nettle.IPv4.IPAddress
import Nettle.Ethernet.AddressResolutionProtocol
import qualified Data.ByteString as B
import Data.Binary
import Data.Binary.Get
import Data.Word
import Data.Bits
import Control.Monad
import Control.Monad.Error
import Data.HList
import qualified Data.Binary.Strict.Get as Strict
import qualified Nettle.OpenFlow.StrictPut as Strict
import qualified Data.Binary.Get as Binary
-- | An Ethernet frame is either an IP packet, an ARP packet, or an uninterpreted @ByteString@.
-- Based on http://en.wikipedia.org/wiki/File:Ethernet_Type_II_Frame_format.svg
type EthernetFrame = EthernetHeader :*: EthernetBody :*: HNil
data EthernetBody = IPInEthernet !IPPacket
| ARPInEthernet !ARPPacket
| PaneDPInEthernet Word64 Word16
| UninterpretedEthernetBody !B.ByteString
deriving (Show,Eq)
foldEthernetFrame :: (EthernetHeader -> EthernetBody -> a) -> EthernetFrame -> a
foldEthernetFrame f (HCons h (HCons b HNil)) = f h b
foldEthernetBody :: (IPPacket -> a) -> (ARPPacket -> a) -> (B.ByteString -> a) -> EthernetBody -> a
foldEthernetBody f g h (IPInEthernet x) = f x
foldEthernetBody f g h (ARPInEthernet x) = g x
foldEthernetBody f g h (UninterpretedEthernetBody x) = h x
withFrame :: HList l
=> (EthernetBody -> Maybe l)
-> EthernetFrame
-> Maybe (EthernetHeader :*: l)
withFrame f frame = foldEthernetFrame (\h b -> fmap (hCons h) (f b)) frame
fromIPPacket :: EthernetBody -> Maybe IPPacket
fromIPPacket = foldEthernetBody Just (const Nothing) (const Nothing)
fromARPPacket :: EthernetBody -> Maybe (ARPPacket :*: HNil)
fromARPPacket = foldEthernetBody (const Nothing) (\x -> Just (hCons x HNil)) (const Nothing)
eth_ip_packet :: EthernetFrame -> Maybe (EthernetHeader :*: IPPacket)
eth_ip_packet = withFrame fromIPPacket
eth_ip_tcp_packet :: EthernetFrame -> Maybe (EthernetHeader :*: IPHeader :*: TCPHeader :*: HNil)
eth_ip_tcp_packet = withFrame $ fromIPPacket >=> withIPPacket fromTCPPacket
eth_ip_udp_packet :: EthernetFrame -> Maybe (EthernetHeader :*: IPHeader :*: UDPHeader :*: B.ByteString :*: HNil)
eth_ip_udp_packet = withFrame $ fromIPPacket >=> withIPPacket fromUDPPacket
eth_arp_packet :: EthernetFrame -> Maybe (EthernetHeader :*: ARPPacket :*: HNil)
eth_arp_packet = withFrame fromARPPacket
data EthernetHeader = EthernetHeader { destMACAddress :: !EthernetAddress,
sourceMACAddress :: !EthernetAddress,
typeCode :: !EthernetTypeCode }
| Ethernet8021Q { destMACAddress :: !EthernetAddress,
sourceMACAddress :: !EthernetAddress,
typeCode :: !EthernetTypeCode,
priorityCodePoint :: !VLANPriority,
canonicalFormatIndicator :: !Bool,
vlanId :: !VLANID }
deriving (Read,Show,Eq)
type VLANPriority = Word8
-- | Ethernet type code, determines the type of payload carried by an Ethernet frame.
type EthernetTypeCode = Word16
type VLANID = Word16
arpQuery :: EthernetAddress -- ^ source hardware address
-> IPAddress -- ^ source IP address
-> IPAddress -- ^ target IP address
-> EthernetFrame
arpQuery sha spa tpa = hCons hdr (hCons (ARPInEthernet ( body)) hNil)
where hdr = EthernetHeader { destMACAddress = broadcastAddress
, sourceMACAddress = sha
, typeCode = ethTypeARP
}
body = ARPQuery (ARPQueryPacket { querySenderEthernetAddress = sha
, querySenderIPAddress = spa
, queryTargetIPAddress = tpa
}
)
arpReply :: EthernetAddress -- ^ source hardware address
-> IPAddress -- ^ source IP address
-> EthernetAddress -- ^ target hardware address
-> IPAddress -- ^ target IP address
-> EthernetFrame
arpReply sha spa tha tpa = hCons hdr (hCons (ARPInEthernet ( body)) hNil)
where hdr = EthernetHeader { destMACAddress = tha
, sourceMACAddress = sha
, typeCode = ethTypeARP
}
body = ARPReply (ARPReplyPacket { replySenderEthernetAddress = sha
, replySenderIPAddress = spa
, replyTargetEthernetAddress = tha
, replyTargetIPAddress = tpa
}
)
-- | Parser for Ethernet frames.
getEthernetFrame :: Strict.Get EthernetFrame
getEthernetFrame = do
hdr <- {-# SCC "getEthHeader" #-} getEthHeader
-- r <- Strict.remaining
case typeCode hdr of
v | v == ethTypeIP -> do
ipPacket <- getIPPacket
return $ hCons hdr (hCons (IPInEthernet ipPacket) hNil)
v | v == ethTypeARP -> do
mArpPacket <- getARPPacket
case mArpPacket of
Just arpPacket -> return $
hCons hdr (hCons (ARPInEthernet arpPacket) hNil)
Nothing -> error "cannot decode arp packet"
v | v == ethTypePaneDP -> do
switchID <- Strict.getWord64be
portID <- Strict.getWord16be
return (hCons hdr (hCons (PaneDPInEthernet switchID portID) hNil))
v | v == ethTypeIPv6 -> do
bs <- Strict.getByteString ipv6HeaderLen -- let's pretend options don't exist...
return (hCons hdr (hCons (UninterpretedEthernetBody bs) hNil))
v | v == ethType8021X -> do
bs <- Strict.getByteString eth8021xHeaderLen -- let's ignore the body for now...
return (hCons hdr (hCons (UninterpretedEthernetBody bs) hNil))
otherwise -> error $ "unknown ethernet type code: " ++ show (typeCode hdr)
-- | Parser for Ethernet headers.
getEthHeader2 :: Binary.Get EthernetHeader
getEthHeader2 = do
dstAddr <- getEthernetAddress2
srcAddr <- getEthernetAddress2
tcode <- Binary.getWord16be
if tcode < typeEth2Cutoff
then error "don't know how to parse this kind of ethernet frame"
else if (tcode == ethTypeVLAN)
then do x <- Binary.getWord16be
etherType <- Binary.getWord16be
let pcp = fromIntegral (shiftR x 13)
let cfi = testBit x 12
let vid = clearBits x [12,13,14,15]
return (Ethernet8021Q dstAddr srcAddr etherType pcp cfi vid)
else return (EthernetHeader dstAddr srcAddr tcode)
getEthHeader :: Strict.Get EthernetHeader
getEthHeader = do
dstAddr <- getEthernetAddress
srcAddr <- getEthernetAddress
tcode <- Strict.getWord16be
if tcode >= typeEth2Cutoff
then if (tcode /= ethTypeVLAN)
then return (EthernetHeader dstAddr srcAddr tcode)
else do x <- Strict.getWord16be
etherType <- Strict.getWord16be
let pcp = fromIntegral (shiftR x 13)
let cfi = testBit x 12
let vid = clearBits x [12,13,14,15]
return (Ethernet8021Q dstAddr srcAddr etherType pcp cfi vid)
else Strict.zero
{-# INLINE getEthHeader #-}
-- | Unparser for Ethernet headers.
putEthHeader :: EthernetHeader -> Strict.Put
putEthHeader (EthernetHeader dstAddr srcAddr tcode) =
do putEthernetAddress dstAddr
putEthernetAddress srcAddr
Strict.putWord16be tcode
putEthHeader (Ethernet8021Q dstAddr srcAddr tcode pcp cfi vid) =
do putEthernetAddress dstAddr
putEthernetAddress srcAddr
Strict.putWord16be ethTypeVLAN
Strict.putWord16be x
Strict.putWord16be tcode
where x = let y = shiftL (fromIntegral pcp :: Word16) 13
y' = if cfi then setBit y 12 else y
in y' + fromIntegral vid
putEthFrame :: EthernetFrame -> Strict.Put
putEthFrame (HCons hdr (HCons body HNil)) = do
putEthHeader hdr
case body of
IPInEthernet ipPacket -> error "put method NYI for IP packets"
ARPInEthernet arpPacket -> error "put method NYI for ARP packets"
UninterpretedEthernetBody bs -> Strict.putByteString bs
PaneDPInEthernet switchID portID -> do
Strict.putWord64be switchID
Strict.putWord16be portID
-- Best source for list: https://en.wikipedia.org/wiki/EtherType
ethTypeIP, ethTypeARP, ethTypeLLDP, ethTypeVLAN, ethTypeIPv6, ethType8021X, typeEth2Cutoff :: EthernetTypeCode
ethTypeIP = 0x0800
ethTypeARP = 0x0806
ethTypeLLDP = 0x88CC
ethTypeVLAN = 0x8100
ethTypeIPv6 = 0x86DD
ethType8021X = 0x888E
typeEth2Cutoff = 0x0600
ethTypePaneDP :: EthernetTypeCode
ethTypePaneDP = 0x0777
ipv6HeaderLen = 40
eth8021xHeaderLen = 4
clearBits :: Bits a => a -> [Int] -> a
clearBits = foldl clearBit
| brownsys/nettle-openflow | src/Nettle/Ethernet/EthernetFrame.hs | bsd-3-clause | 10,089 | 46 | 21 | 3,029 | 2,171 | 1,156 | 1,015 | 227 | 7 |
{-# LANGUAGE TypeOperators, TemplateHaskell #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Dimensions.SI
-- Copyright : (C) 2013 Richard Eisenberg
-- License : BSD-style (see LICENSE)
-- Maintainer : Richard Eisenberg ([email protected])
-- Stability : experimental
-- Portability : non-portable
--
-- This module defines SI dimensions. The names of SI dimensions conform to
-- <http://www.bipm.org/utils/common/documents/jcgm/JCGM_200_2012.pdf>.
-----------------------------------------------------------------------------
module Data.Dimensions.SI where
import Data.Metrology.Poly
import Data.Metrology.TH
declareDimension "Length"
declareDimension "Mass"
declareDimension "Time"
declareDimension "Current"
declareDimension "Temperature"
declareDimension "AmountOfSubstance"
declareDimension "LuminousIntensity"
-- | A plane angle is dimensionless; alternatively, it has dimension
-- length/length. It would be wrong to divide 2 meters by 1
-- meter and conclude that the quantity is 2 radians or degrees. To make
-- plane angle safe to use, we define it as a fundamental dimension.
declareDimension "PlaneAngle"
-- | As we did for plane angle, we must make solid angle a fundamental
-- dimension to avoid programming mistakes.
--
-- A solid angle is a measure of the amount of the field of view from some
-- particular point that a given object covers.
declareDimension "SolidAngle"
type Area = Length :^ Two
type Volume = Length :^ Three
type Velocity = Length :/ Time
type Acceleration = Velocity :/ Time
type Wavenumber = Length :^ MOne
type Density = Mass :/ Volume
type SurfaceDensity = Mass :/ Area
type SpecificVolume = Volume :/ Mass
type CurrentDensity = Current :/ Area
type MagneticStrength = Current :/ Length
type Concentration = AmountOfSubstance :/ Volume
type Luminance = LuminousIntensity :/ Area
type Frequency = Time :^ MOne
type Force = Mass :* Acceleration
type Pressure = Force :/ Area
type Energy = Force :* Length
type Power = Energy :/ Time
type Charge = Current :* Time
type ElectricPotential = Power :/ Current
type Capacitance = Charge :/ ElectricPotential
type Resistance = ElectricPotential :/ Current
type Conductance = Current :/ ElectricPotential
type MagneticFlux = ElectricPotential :* Time
type MagneticFluxDensity = MagneticFlux :/ Area
type Inductance = MagneticFlux :/ Current
type LuminousFlux = LuminousIntensity
type Illuminance = LuminousIntensity :/ Area
type Kerma = Area :/ (Time :^ Two)
type CatalyticActivity = AmountOfSubstance :/ Time
type Momentum = Mass :* Velocity
type AngularVelocity = PlaneAngle :/ Time
| goldfirere/units | units-defs/Data/Dimensions/SI.hs | bsd-3-clause | 3,189 | 0 | 7 | 930 | 408 | 241 | 167 | 44 | 0 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[TcBinds]{TcBinds}
-}
{-# LANGUAGE CPP, RankNTypes, ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
module TcBinds ( tcLocalBinds, tcTopBinds, tcRecSelBinds,
tcValBinds, tcHsBootSigs, tcPolyCheck,
tcVectDecls, addTypecheckedBinds,
chooseInferredQuantifiers,
badBootDeclErr ) where
import {-# SOURCE #-} TcMatches ( tcGRHSsPat, tcMatchesFun )
import {-# SOURCE #-} TcExpr ( tcMonoExpr )
import {-# SOURCE #-} TcPatSyn ( tcInferPatSynDecl, tcCheckPatSynDecl
, tcPatSynBuilderBind )
import DynFlags
import HsSyn
import HscTypes( isHsBootOrSig )
import TcSigs
import TcRnMonad
import TcEnv
import TcUnify
import TcSimplify
import TcEvidence
import TcHsType
import TcPat
import TcMType
import Inst( deeplyInstantiate )
import FamInstEnv( normaliseType )
import FamInst( tcGetFamInstEnvs )
import TyCon
import TcType
import Type( mkStrLitTy, tidyOpenType, mkTyVarBinder )
import TysPrim
import TysWiredIn( cTupleTyConName )
import Id
import Var
import VarSet
import VarEnv( TidyEnv )
import Module
import Name
import NameSet
import NameEnv
import SrcLoc
import Bag
import ListSetOps
import ErrUtils
import Digraph
import Maybes
import Util
import BasicTypes
import Outputable
import PrelNames( gHC_PRIM, ipClassName )
import TcValidity (checkValidType)
import UniqFM
import qualified GHC.LanguageExtensions as LangExt
import Control.Monad
#include "HsVersions.h"
{- *********************************************************************
* *
A useful helper function
* *
********************************************************************* -}
addTypecheckedBinds :: TcGblEnv -> [LHsBinds Id] -> TcGblEnv
addTypecheckedBinds tcg_env binds
| isHsBootOrSig (tcg_src tcg_env) = tcg_env
-- Do not add the code for record-selector bindings
-- when compiling hs-boot files
| otherwise = tcg_env { tcg_binds = foldr unionBags
(tcg_binds tcg_env)
binds }
{-
************************************************************************
* *
\subsection{Type-checking bindings}
* *
************************************************************************
@tcBindsAndThen@ typechecks a @HsBinds@. The "and then" part is because
it needs to know something about the {\em usage} of the things bound,
so that it can create specialisations of them. So @tcBindsAndThen@
takes a function which, given an extended environment, E, typechecks
the scope of the bindings returning a typechecked thing and (most
important) an LIE. It is this LIE which is then used as the basis for
specialising the things bound.
@tcBindsAndThen@ also takes a "combiner" which glues together the
bindings and the "thing" to make a new "thing".
The real work is done by @tcBindWithSigsAndThen@.
Recursive and non-recursive binds are handled in essentially the same
way: because of uniques there are no scoping issues left. The only
difference is that non-recursive bindings can bind primitive values.
Even for non-recursive binding groups we add typings for each binder
to the LVE for the following reason. When each individual binding is
checked the type of its LHS is unified with that of its RHS; and
type-checking the LHS of course requires that the binder is in scope.
At the top-level the LIE is sure to contain nothing but constant
dictionaries, which we resolve at the module level.
Note [Polymorphic recursion]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The game plan for polymorphic recursion in the code above is
* Bind any variable for which we have a type signature
to an Id with a polymorphic type. Then when type-checking
the RHSs we'll make a full polymorphic call.
This fine, but if you aren't a bit careful you end up with a horrendous
amount of partial application and (worse) a huge space leak. For example:
f :: Eq a => [a] -> [a]
f xs = ...f...
If we don't take care, after typechecking we get
f = /\a -> \d::Eq a -> let f' = f a d
in
\ys:[a] -> ...f'...
Notice the the stupid construction of (f a d), which is of course
identical to the function we're executing. In this case, the
polymorphic recursion isn't being used (but that's a very common case).
This can lead to a massive space leak, from the following top-level defn
(post-typechecking)
ff :: [Int] -> [Int]
ff = f Int dEqInt
Now (f dEqInt) evaluates to a lambda that has f' as a free variable; but
f' is another thunk which evaluates to the same thing... and you end
up with a chain of identical values all hung onto by the CAF ff.
ff = f Int dEqInt
= let f' = f Int dEqInt in \ys. ...f'...
= let f' = let f' = f Int dEqInt in \ys. ...f'...
in \ys. ...f'...
Etc.
NOTE: a bit of arity anaysis would push the (f a d) inside the (\ys...),
which would make the space leak go away in this case
Solution: when typechecking the RHSs we always have in hand the
*monomorphic* Ids for each binding. So we just need to make sure that
if (Method f a d) shows up in the constraints emerging from (...f...)
we just use the monomorphic Id. We achieve this by adding monomorphic Ids
to the "givens" when simplifying constraints. That's what the "lies_avail"
is doing.
Then we get
f = /\a -> \d::Eq a -> letrec
fm = \ys:[a] -> ...fm...
in
fm
-}
tcTopBinds :: [(RecFlag, LHsBinds Name)] -> [LSig Name] -> TcM (TcGblEnv, TcLclEnv)
-- The TcGblEnv contains the new tcg_binds and tcg_spects
-- The TcLclEnv has an extended type envt for the new bindings
tcTopBinds binds sigs
= do { -- Pattern synonym bindings populate the global environment
(binds', (tcg_env, tcl_env)) <- tcValBinds TopLevel binds sigs $
do { gbl <- getGblEnv
; lcl <- getLclEnv
; return (gbl, lcl) }
; specs <- tcImpPrags sigs -- SPECIALISE prags for imported Ids
; let { tcg_env' = tcg_env { tcg_imp_specs = specs ++ tcg_imp_specs tcg_env }
`addTypecheckedBinds` map snd binds' }
; return (tcg_env', tcl_env) }
-- The top level bindings are flattened into a giant
-- implicitly-mutually-recursive LHsBinds
tcRecSelBinds :: HsValBinds Name -> TcM TcGblEnv
tcRecSelBinds (ValBindsOut binds sigs)
= tcExtendGlobalValEnv [sel_id | L _ (IdSig sel_id) <- sigs] $
do { (rec_sel_binds, tcg_env) <- discardWarnings $
tcValBinds TopLevel binds sigs getGblEnv
; let tcg_env' = tcg_env `addTypecheckedBinds` map snd rec_sel_binds
; return tcg_env' }
tcRecSelBinds (ValBindsIn {}) = panic "tcRecSelBinds"
tcHsBootSigs :: [(RecFlag, LHsBinds Name)] -> [LSig Name] -> TcM [Id]
-- A hs-boot file has only one BindGroup, and it only has type
-- signatures in it. The renamer checked all this
tcHsBootSigs binds sigs
= do { checkTc (null binds) badBootDeclErr
; concat <$> mapM (addLocM tc_boot_sig) (filter isTypeLSig sigs) }
where
tc_boot_sig (TypeSig lnames hs_ty) = mapM f lnames
where
f (L _ name)
= do { sigma_ty <- solveEqualities $
tcHsSigWcType (FunSigCtxt name False) hs_ty
; return (mkVanillaGlobal name sigma_ty) }
-- Notice that we make GlobalIds, not LocalIds
tc_boot_sig s = pprPanic "tcHsBootSigs/tc_boot_sig" (ppr s)
badBootDeclErr :: MsgDoc
badBootDeclErr = text "Illegal declarations in an hs-boot file"
------------------------
tcLocalBinds :: HsLocalBinds Name -> TcM thing
-> TcM (HsLocalBinds TcId, thing)
tcLocalBinds EmptyLocalBinds thing_inside
= do { thing <- thing_inside
; return (EmptyLocalBinds, thing) }
tcLocalBinds (HsValBinds (ValBindsOut binds sigs)) thing_inside
= do { (binds', thing) <- tcValBinds NotTopLevel binds sigs thing_inside
; return (HsValBinds (ValBindsOut binds' sigs), thing) }
tcLocalBinds (HsValBinds (ValBindsIn {})) _ = panic "tcLocalBinds"
tcLocalBinds (HsIPBinds (IPBinds ip_binds _)) thing_inside
= do { ipClass <- tcLookupClass ipClassName
; (given_ips, ip_binds') <-
mapAndUnzipM (wrapLocSndM (tc_ip_bind ipClass)) ip_binds
-- If the binding binds ?x = E, we must now
-- discharge any ?x constraints in expr_lie
-- See Note [Implicit parameter untouchables]
; (ev_binds, result) <- checkConstraints (IPSkol ips)
[] given_ips thing_inside
; return (HsIPBinds (IPBinds ip_binds' ev_binds), result) }
where
ips = [ip | L _ (IPBind (Left (L _ ip)) _) <- ip_binds]
-- I wonder if we should do these one at at time
-- Consider ?x = 4
-- ?y = ?x + 1
tc_ip_bind ipClass (IPBind (Left (L _ ip)) expr)
= do { ty <- newOpenFlexiTyVarTy
; let p = mkStrLitTy $ hsIPNameFS ip
; ip_id <- newDict ipClass [ p, ty ]
; expr' <- tcMonoExpr expr (mkCheckExpType ty)
; let d = toDict ipClass p ty `fmap` expr'
; return (ip_id, (IPBind (Right ip_id) d)) }
tc_ip_bind _ (IPBind (Right {}) _) = panic "tc_ip_bind"
-- Coerces a `t` into a dictionry for `IP "x" t`.
-- co : t -> IP "x" t
toDict ipClass x ty = HsWrap $ mkWpCastR $
wrapIP $ mkClassPred ipClass [x,ty]
{- Note [Implicit parameter untouchables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We add the type variables in the types of the implicit parameters
as untouchables, not so much because we really must not unify them,
but rather because we otherwise end up with constraints like this
Num alpha, Implic { wanted = alpha ~ Int }
The constraint solver solves alpha~Int by unification, but then
doesn't float that solved constraint out (it's not an unsolved
wanted). Result disaster: the (Num alpha) is again solved, this
time by defaulting. No no no.
However [Oct 10] this is all handled automatically by the
untouchable-range idea.
Note [Inlining and hs-boot files]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this example (Trac #10083):
---------- RSR.hs-boot ------------
module RSR where
data RSR
eqRSR :: RSR -> RSR -> Bool
---------- SR.hs ------------
module SR where
import {-# SOURCE #-} RSR
data SR = MkSR RSR
eqSR (MkSR r1) (MkSR r2) = eqRSR r1 r2
---------- RSR.hs ------------
module RSR where
import SR
data RSR = MkRSR SR -- deriving( Eq )
eqRSR (MkRSR s1) (MkRSR s2) = (eqSR s1 s2)
foo x y = not (eqRSR x y)
When compiling RSR we get this code
RSR.eqRSR :: RSR -> RSR -> Bool
RSR.eqRSR = \ (ds1 :: RSR.RSR) (ds2 :: RSR.RSR) ->
case ds1 of _ { RSR.MkRSR s1 ->
case ds2 of _ { RSR.MkRSR s2 ->
SR.eqSR s1 s2 }}
RSR.foo :: RSR -> RSR -> Bool
RSR.foo = \ (x :: RSR) (y :: RSR) -> not (RSR.eqRSR x y)
Now, when optimising foo:
Inline eqRSR (small, non-rec)
Inline eqSR (small, non-rec)
but the result of inlining eqSR from SR is another call to eqRSR, so
everything repeats. Neither eqSR nor eqRSR are (apparently) loop
breakers.
Solution: when compiling RSR, add a NOINLINE pragma to every function
exported by the boot-file for RSR (if it exists).
ALAS: doing so makes the boostrappted GHC itself slower by 8% overall
(on Trac #9872a-d, and T1969. So I un-did this change, and
parked it for now. Sigh.
-}
tcValBinds :: TopLevelFlag
-> [(RecFlag, LHsBinds Name)] -> [LSig Name]
-> TcM thing
-> TcM ([(RecFlag, LHsBinds TcId)], thing)
tcValBinds top_lvl binds sigs thing_inside
= do { let patsyns = getPatSynBinds binds
-- Typecheck the signature
; (poly_ids, sig_fn) <- tcAddPatSynPlaceholders patsyns $
tcTySigs sigs
; _self_boot <- tcSelfBootInfo
; let prag_fn = mkPragEnv sigs (foldr (unionBags . snd) emptyBag binds)
-- ------- See Note [Inlining and hs-boot files] (change parked) --------
-- prag_fn | isTopLevel top_lvl -- See Note [Inlining and hs-boot files]
-- , SelfBoot { sb_ids = boot_id_names } <- self_boot
-- = foldNameSet add_no_inl prag_fn1 boot_id_names
-- | otherwise
-- = prag_fn1
-- add_no_inl boot_id_name prag_fn
-- = extendPragEnv prag_fn (boot_id_name, no_inl_sig boot_id_name)
-- no_inl_sig name = L boot_loc (InlineSig (L boot_loc name) neverInlinePragma)
-- boot_loc = mkGeneralSrcSpan (fsLit "The hs-boot file for this module")
-- Extend the envt right away with all the Ids
-- declared with complete type signatures
-- Do not extend the TcIdBinderStack; instead
-- we extend it on a per-rhs basis in tcExtendForRhs
; tcExtendLetEnvIds top_lvl [(idName id, id) | id <- poly_ids] $ do
{ (binds', (extra_binds', thing)) <- tcBindGroups top_lvl sig_fn prag_fn binds $ do
{ thing <- thing_inside
-- See Note [Pattern synonym builders don't yield dependencies]
; patsyn_builders <- mapM tcPatSynBuilderBind patsyns
; let extra_binds = [ (NonRecursive, builder) | builder <- patsyn_builders ]
; return (extra_binds, thing) }
; return (binds' ++ extra_binds', thing) }}
------------------------
tcBindGroups :: TopLevelFlag -> TcSigFun -> TcPragEnv
-> [(RecFlag, LHsBinds Name)] -> TcM thing
-> TcM ([(RecFlag, LHsBinds TcId)], thing)
-- Typecheck a whole lot of value bindings,
-- one strongly-connected component at a time
-- Here a "strongly connected component" has the strightforward
-- meaning of a group of bindings that mention each other,
-- ignoring type signatures (that part comes later)
tcBindGroups _ _ _ [] thing_inside
= do { thing <- thing_inside
; return ([], thing) }
tcBindGroups top_lvl sig_fn prag_fn (group : groups) thing_inside
= do { -- See Note [Closed binder groups]
closed <- isClosedBndrGroup $ snd group
; (group', (groups', thing))
<- tc_group top_lvl sig_fn prag_fn group closed $
tcBindGroups top_lvl sig_fn prag_fn groups thing_inside
; return (group' ++ groups', thing) }
-- Note [Closed binder groups]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- A mutually recursive group is "closed" if all of the free variables of
-- the bindings are closed. For example
--
-- > h = \x -> let f = ...g...
-- > g = ....f...x...
-- > in ...
--
-- Here @g@ is not closed because it mentions @x@; and hence neither is @f@
-- closed.
--
-- So we need to compute closed-ness on each strongly connected components,
-- before we sub-divide it based on what type signatures it has.
--
------------------------
tc_group :: forall thing.
TopLevelFlag -> TcSigFun -> TcPragEnv
-> (RecFlag, LHsBinds Name) -> IsGroupClosed -> TcM thing
-> TcM ([(RecFlag, LHsBinds TcId)], thing)
-- Typecheck one strongly-connected component of the original program.
-- We get a list of groups back, because there may
-- be specialisations etc as well
tc_group top_lvl sig_fn prag_fn (NonRecursive, binds) closed thing_inside
-- A single non-recursive binding
-- We want to keep non-recursive things non-recursive
-- so that we desugar unlifted bindings correctly
= do { let bind = case bagToList binds of
[bind] -> bind
[] -> panic "tc_group: empty list of binds"
_ -> panic "tc_group: NonRecursive binds is not a singleton bag"
; (bind', thing) <- tc_single top_lvl sig_fn prag_fn bind closed
thing_inside
; return ( [(NonRecursive, bind')], thing) }
tc_group top_lvl sig_fn prag_fn (Recursive, binds) closed thing_inside
= -- To maximise polymorphism, we do a new
-- strongly-connected-component analysis, this time omitting
-- any references to variables with type signatures.
-- (This used to be optional, but isn't now.)
-- See Note [Polymorphic recursion] in HsBinds.
do { traceTc "tc_group rec" (pprLHsBinds binds)
; when hasPatSyn $ recursivePatSynErr binds
; (binds1, thing) <- go sccs
; return ([(Recursive, binds1)], thing) }
-- Rec them all together
where
hasPatSyn = anyBag (isPatSyn . unLoc) binds
isPatSyn PatSynBind{} = True
isPatSyn _ = False
sccs :: [SCC (LHsBind Name)]
sccs = stronglyConnCompFromEdgedVerticesUniq (mkEdges sig_fn binds)
go :: [SCC (LHsBind Name)] -> TcM (LHsBinds TcId, thing)
go (scc:sccs) = do { (binds1, ids1) <- tc_scc scc
; (binds2, thing) <- tcExtendLetEnv top_lvl closed ids1
(go sccs)
; return (binds1 `unionBags` binds2, thing) }
go [] = do { thing <- thing_inside; return (emptyBag, thing) }
tc_scc (AcyclicSCC bind) = tc_sub_group NonRecursive [bind]
tc_scc (CyclicSCC binds) = tc_sub_group Recursive binds
tc_sub_group rec_tc binds =
tcPolyBinds top_lvl sig_fn prag_fn Recursive rec_tc closed binds
recursivePatSynErr :: OutputableBndr name => LHsBinds name -> TcM a
recursivePatSynErr binds
= failWithTc $
hang (text "Recursive pattern synonym definition with following bindings:")
2 (vcat $ map pprLBind . bagToList $ binds)
where
pprLoc loc = parens (text "defined at" <+> ppr loc)
pprLBind (L loc bind) = pprWithCommas ppr (collectHsBindBinders bind) <+>
pprLoc loc
tc_single :: forall thing.
TopLevelFlag -> TcSigFun -> TcPragEnv
-> LHsBind Name -> IsGroupClosed -> TcM thing
-> TcM (LHsBinds TcId, thing)
tc_single _top_lvl sig_fn _prag_fn
(L _ (PatSynBind psb@PSB{ psb_id = L _ name }))
_ thing_inside
= do { (aux_binds, tcg_env) <- tc_pat_syn_decl
; thing <- setGblEnv tcg_env thing_inside
; return (aux_binds, thing)
}
where
tc_pat_syn_decl :: TcM (LHsBinds TcId, TcGblEnv)
tc_pat_syn_decl = case sig_fn name of
Nothing -> tcInferPatSynDecl psb
Just (TcPatSynSig tpsi) -> tcCheckPatSynDecl psb tpsi
Just _ -> panic "tc_single"
tc_single top_lvl sig_fn prag_fn lbind closed thing_inside
= do { (binds1, ids) <- tcPolyBinds top_lvl sig_fn prag_fn
NonRecursive NonRecursive
closed
[lbind]
; thing <- tcExtendLetEnv top_lvl closed ids thing_inside
; return (binds1, thing) }
------------------------
type BKey = Int -- Just number off the bindings
mkEdges :: TcSigFun -> LHsBinds Name -> [Node BKey (LHsBind Name)]
-- See Note [Polymorphic recursion] in HsBinds.
mkEdges sig_fn binds
= [ (bind, key, [key | n <- nonDetEltsUFM (bind_fvs (unLoc bind)),
Just key <- [lookupNameEnv key_map n], no_sig n ])
| (bind, key) <- keyd_binds
]
-- It's OK to use nonDetEltsUFM here as stronglyConnCompFromEdgedVertices
-- is still deterministic even if the edges are in nondeterministic order
-- as explained in Note [Deterministic SCC] in Digraph.
where
no_sig :: Name -> Bool
no_sig n = noCompleteSig (sig_fn n)
keyd_binds = bagToList binds `zip` [0::BKey ..]
key_map :: NameEnv BKey -- Which binding it comes from
key_map = mkNameEnv [(bndr, key) | (L _ bind, key) <- keyd_binds
, bndr <- collectHsBindBinders bind ]
------------------------
tcPolyBinds :: TopLevelFlag -> TcSigFun -> TcPragEnv
-> RecFlag -- Whether the group is really recursive
-> RecFlag -- Whether it's recursive after breaking
-- dependencies based on type signatures
-> IsGroupClosed -- Whether the group is closed
-> [LHsBind Name] -- None are PatSynBind
-> TcM (LHsBinds TcId, [TcId])
-- Typechecks a single bunch of values bindings all together,
-- and generalises them. The bunch may be only part of a recursive
-- group, because we use type signatures to maximise polymorphism
--
-- Returns a list because the input may be a single non-recursive binding,
-- in which case the dependency order of the resulting bindings is
-- important.
--
-- Knows nothing about the scope of the bindings
-- None of the bindings are pattern synonyms
tcPolyBinds top_lvl sig_fn prag_fn rec_group rec_tc closed bind_list
= setSrcSpan loc $
recoverM (recoveryCode binder_names sig_fn) $ do
-- Set up main recover; take advantage of any type sigs
{ traceTc "------------------------------------------------" Outputable.empty
; traceTc "Bindings for {" (ppr binder_names)
; dflags <- getDynFlags
; let plan = decideGeneralisationPlan dflags bind_list closed sig_fn
; traceTc "Generalisation plan" (ppr plan)
; result@(tc_binds, poly_ids) <- case plan of
NoGen -> tcPolyNoGen rec_tc prag_fn sig_fn bind_list
InferGen mn -> tcPolyInfer rec_tc prag_fn sig_fn mn bind_list
CheckGen lbind sig -> tcPolyCheck prag_fn sig lbind
-- Check whether strict bindings are ok
-- These must be non-recursive etc, and are not generalised
-- They desugar to a case expression in the end
; checkStrictBinds top_lvl rec_group bind_list tc_binds poly_ids
; traceTc "} End of bindings for" (vcat [ ppr binder_names, ppr rec_group
, vcat [ppr id <+> ppr (idType id) | id <- poly_ids]
])
; return result }
where
binder_names = collectHsBindListBinders bind_list
loc = foldr1 combineSrcSpans (map getLoc bind_list)
-- The mbinds have been dependency analysed and
-- may no longer be adjacent; so find the narrowest
-- span that includes them all
--------------
-- If typechecking the binds fails, then return with each
-- signature-less binder given type (forall a.a), to minimise
-- subsequent error messages
recoveryCode :: [Name] -> TcSigFun -> TcM (LHsBinds TcId, [Id])
recoveryCode binder_names sig_fn
= do { traceTc "tcBindsWithSigs: error recovery" (ppr binder_names)
; let poly_ids = map mk_dummy binder_names
; return (emptyBag, poly_ids) }
where
mk_dummy name
| Just sig <- sig_fn name
, Just poly_id <- completeSigPolyId_maybe sig
= poly_id
| otherwise
= mkLocalId name forall_a_a
forall_a_a :: TcType
forall_a_a = mkSpecForAllTys [runtimeRep1TyVar, openAlphaTyVar] openAlphaTy
{- *********************************************************************
* *
tcPolyNoGen
* *
********************************************************************* -}
tcPolyNoGen -- No generalisation whatsoever
:: RecFlag -- Whether it's recursive after breaking
-- dependencies based on type signatures
-> TcPragEnv -> TcSigFun
-> [LHsBind Name]
-> TcM (LHsBinds TcId, [TcId])
tcPolyNoGen rec_tc prag_fn tc_sig_fn bind_list
= do { (binds', mono_infos) <- tcMonoBinds rec_tc tc_sig_fn
(LetGblBndr prag_fn)
bind_list
; mono_ids' <- mapM tc_mono_info mono_infos
; return (binds', mono_ids') }
where
tc_mono_info (MBI { mbi_poly_name = name, mbi_mono_id = mono_id })
= do { mono_ty' <- zonkTcType (idType mono_id)
-- Zonk, mainly to expose unboxed types to checkStrictBinds
; let mono_id' = setIdType mono_id mono_ty'
; _specs <- tcSpecPrags mono_id' (lookupPragEnv prag_fn name)
; return mono_id' }
-- NB: tcPrags generates error messages for
-- specialisation pragmas for non-overloaded sigs
-- Indeed that is why we call it here!
-- So we can safely ignore _specs
{- *********************************************************************
* *
tcPolyCheck
* *
********************************************************************* -}
tcPolyCheck :: TcPragEnv
-> TcIdSigInfo -- Must be a complete signature
-> LHsBind Name -- Must be a FunBind
-> TcM (LHsBinds TcId, [TcId])
-- There is just one binding,
-- it is a Funbind
-- it has a complete type signature,
tcPolyCheck prag_fn
(CompleteSig { sig_bndr = poly_id
, sig_ctxt = ctxt
, sig_loc = sig_loc })
(L loc (FunBind { fun_id = L nm_loc name
, fun_matches = matches }))
= setSrcSpan sig_loc $
do { traceTc "tcPolyCheck" (ppr poly_id $$ ppr sig_loc)
; (tv_prs, theta, tau) <- tcInstType (tcInstSigTyVars sig_loc) poly_id
-- See Note [Instantiate sig with fresh variables]
; mono_name <- newNameAt (nameOccName name) nm_loc
; ev_vars <- newEvVars theta
; let mono_id = mkLocalId mono_name tau
skol_info = SigSkol ctxt (mkPhiTy theta tau)
skol_tvs = map snd tv_prs
; (ev_binds, (co_fn, matches'))
<- checkConstraints skol_info skol_tvs ev_vars $
tcExtendIdBndrs [TcIdBndr mono_id NotTopLevel] $
tcExtendTyVarEnv2 tv_prs $
setSrcSpan loc $
tcMatchesFun (L nm_loc mono_name) matches (mkCheckExpType tau)
; let prag_sigs = lookupPragEnv prag_fn name
; spec_prags <- tcSpecPrags poly_id prag_sigs
; poly_id <- addInlinePrags poly_id prag_sigs
; let bind' = FunBind { fun_id = L nm_loc mono_id
, fun_matches = matches'
, fun_co_fn = co_fn
, bind_fvs = placeHolderNamesTc
, fun_tick = [] }
abs_bind = L loc $ AbsBindsSig
{ abs_sig_export = poly_id
, abs_tvs = skol_tvs
, abs_ev_vars = ev_vars
, abs_sig_prags = SpecPrags spec_prags
, abs_sig_ev_bind = ev_binds
, abs_sig_bind = L loc bind' }
; return (unitBag abs_bind, [poly_id]) }
tcPolyCheck _prag_fn sig bind
= pprPanic "tcPolyCheck" (ppr sig $$ ppr bind)
{- Note [Instantiate sig with fresh variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's vital to instantiate a type signature with fresh variables.
For example:
type T = forall a. [a] -> [a]
f :: T;
f = g where { g :: T; g = <rhs> }
We must not use the same 'a' from the defn of T at both places!!
(Instantiation is only necessary because of type synonyms. Otherwise,
it's all cool; each signature has distinct type variables from the renamer.)
-}
{- *********************************************************************
* *
tcPolyInfer
* *
********************************************************************* -}
tcPolyInfer
:: RecFlag -- Whether it's recursive after breaking
-- dependencies based on type signatures
-> TcPragEnv -> TcSigFun
-> Bool -- True <=> apply the monomorphism restriction
-> [LHsBind Name]
-> TcM (LHsBinds TcId, [TcId])
tcPolyInfer rec_tc prag_fn tc_sig_fn mono bind_list
= do { (tclvl, wanted, (binds', mono_infos))
<- pushLevelAndCaptureConstraints $
tcMonoBinds rec_tc tc_sig_fn LetLclBndr bind_list
; let name_taus = [ (mbi_poly_name info, idType (mbi_mono_id info))
| info <- mono_infos ]
sigs = [ sig | MBI { mbi_sig = Just sig } <- mono_infos ]
infer_mode = if mono then ApplyMR else NoRestrictions
; mapM_ (checkOverloadedSig mono) sigs
; traceTc "simplifyInfer call" (ppr tclvl $$ ppr name_taus $$ ppr wanted)
; (qtvs, givens, ev_binds)
<- simplifyInfer tclvl infer_mode sigs name_taus wanted
; let inferred_theta = map evVarPred givens
; exports <- checkNoErrs $
mapM (mkExport prag_fn qtvs inferred_theta) mono_infos
; loc <- getSrcSpanM
; let poly_ids = map abe_poly exports
abs_bind = L loc $
AbsBinds { abs_tvs = qtvs
, abs_ev_vars = givens, abs_ev_binds = [ev_binds]
, abs_exports = exports, abs_binds = binds' }
; traceTc "Binding:" (ppr (poly_ids `zip` map idType poly_ids))
; return (unitBag abs_bind, poly_ids) }
-- poly_ids are guaranteed zonked by mkExport
--------------
mkExport :: TcPragEnv
-> [TyVar] -> TcThetaType -- Both already zonked
-> MonoBindInfo
-> TcM (ABExport Id)
-- Only called for generalisation plan InferGen, not by CheckGen or NoGen
--
-- mkExport generates exports with
-- zonked type variables,
-- zonked poly_ids
-- The former is just because no further unifications will change
-- the quantified type variables, so we can fix their final form
-- right now.
-- The latter is needed because the poly_ids are used to extend the
-- type environment; see the invariant on TcEnv.tcExtendIdEnv
-- Pre-condition: the qtvs and theta are already zonked
mkExport prag_fn qtvs theta
mono_info@(MBI { mbi_poly_name = poly_name
, mbi_sig = mb_sig
, mbi_mono_id = mono_id })
= do { mono_ty <- zonkTcType (idType mono_id)
; poly_id <- mkInferredPolyId qtvs theta poly_name mb_sig mono_ty
-- NB: poly_id has a zonked type
; poly_id <- addInlinePrags poly_id prag_sigs
; spec_prags <- tcSpecPrags poly_id prag_sigs
-- tcPrags requires a zonked poly_id
-- See Note [Impedence matching]
-- NB: we have already done checkValidType, including an ambiguity check,
-- on the type; either when we checked the sig or in mkInferredPolyId
; let poly_ty = idType poly_id
sel_poly_ty = mkInfSigmaTy qtvs theta mono_ty
-- This type is just going into tcSubType,
-- so Inferred vs. Specified doesn't matter
; wrap <- if sel_poly_ty `eqType` poly_ty -- NB: eqType ignores visibility
then return idHsWrapper -- Fast path; also avoids complaint when we infer
-- an ambiguouse type and have AllowAmbiguousType
-- e..g infer x :: forall a. F a -> Int
else addErrCtxtM (mk_impedence_match_msg mono_info sel_poly_ty poly_ty) $
tcSubType_NC sig_ctxt sel_poly_ty (mkCheckExpType poly_ty)
; warn_missing_sigs <- woptM Opt_WarnMissingLocalSignatures
; when warn_missing_sigs $
localSigWarn Opt_WarnMissingLocalSignatures poly_id mb_sig
; return (ABE { abe_wrap = wrap
-- abe_wrap :: idType poly_id ~ (forall qtvs. theta => mono_ty)
, abe_poly = poly_id
, abe_mono = mono_id
, abe_prags = SpecPrags spec_prags}) }
where
prag_sigs = lookupPragEnv prag_fn poly_name
sig_ctxt = InfSigCtxt poly_name
mkInferredPolyId :: [TyVar] -> TcThetaType
-> Name -> Maybe TcIdSigInst -> TcType
-> TcM TcId
mkInferredPolyId qtvs inferred_theta poly_name mb_sig_inst mono_ty
| Just (TISI { sig_inst_sig = sig }) <- mb_sig_inst
, CompleteSig { sig_bndr = poly_id } <- sig
= return poly_id
| otherwise -- Either no type sig or partial type sig
= checkNoErrs $ -- The checkNoErrs ensures that if the type is ambiguous
-- we don't carry on to the impedence matching, and generate
-- a duplicate ambiguity error. There is a similar
-- checkNoErrs for complete type signatures too.
do { fam_envs <- tcGetFamInstEnvs
; let (_co, mono_ty') = normaliseType fam_envs Nominal mono_ty
-- Unification may not have normalised the type,
-- (see Note [Lazy flattening] in TcFlatten) so do it
-- here to make it as uncomplicated as possible.
-- Example: f :: [F Int] -> Bool
-- should be rewritten to f :: [Char] -> Bool, if possible
--
-- We can discard the coercion _co, because we'll reconstruct
-- it in the call to tcSubType below
; (binders, theta') <- chooseInferredQuantifiers inferred_theta
(tyCoVarsOfType mono_ty') qtvs mb_sig_inst
; let inferred_poly_ty = mkForAllTys binders (mkPhiTy theta' mono_ty')
; traceTc "mkInferredPolyId" (vcat [ppr poly_name, ppr qtvs, ppr theta'
, ppr inferred_poly_ty])
; addErrCtxtM (mk_inf_msg poly_name inferred_poly_ty) $
checkValidType (InfSigCtxt poly_name) inferred_poly_ty
-- See Note [Validity of inferred types]
; return (mkLocalIdOrCoVar poly_name inferred_poly_ty) }
chooseInferredQuantifiers :: TcThetaType -- inferred
-> TcTyVarSet -- tvs free in tau type
-> [TcTyVar] -- inferred quantified tvs
-> Maybe TcIdSigInst
-> TcM ([TyVarBinder], TcThetaType)
chooseInferredQuantifiers inferred_theta tau_tvs qtvs Nothing
= -- No type signature (partial or complete) for this binder,
do { let free_tvs = closeOverKinds (growThetaTyVars inferred_theta tau_tvs)
-- Include kind variables! Trac #7916
my_theta = pickCapturedPreds free_tvs inferred_theta
binders = [ mkTyVarBinder Inferred tv
| tv <- qtvs
, tv `elemVarSet` free_tvs ]
; return (binders, my_theta) }
chooseInferredQuantifiers inferred_theta tau_tvs qtvs
(Just (TISI { sig_inst_sig = sig -- Always PartialSig
, sig_inst_wcx = wcx
, sig_inst_theta = annotated_theta
, sig_inst_skols = annotated_tvs }))
| Nothing <- wcx
= do { annotated_theta <- zonkTcTypes annotated_theta
; let free_tvs = closeOverKinds (tyCoVarsOfTypes annotated_theta
`unionVarSet` tau_tvs)
; traceTc "ciq" (vcat [ ppr sig, ppr annotated_theta, ppr free_tvs])
; return (mk_binders free_tvs, annotated_theta) }
| Just wc_var <- wcx
= do { annotated_theta <- zonkTcTypes annotated_theta
; let free_tvs = closeOverKinds (tyCoVarsOfTypes annotated_theta
`unionVarSet` tau_tvs)
my_theta = pickCapturedPreds free_tvs inferred_theta
-- Report the inferred constraints for an extra-constraints wildcard/hole as
-- an error message, unless the PartialTypeSignatures flag is enabled. In this
-- case, the extra inferred constraints are accepted without complaining.
-- NB: inferred_theta already includes all the annotated constraints
inferred_diff = [ pred
| pred <- my_theta
, all (not . (`eqType` pred)) annotated_theta ]
; ctuple <- mk_ctuple inferred_diff
; writeMetaTyVar wc_var ctuple
; traceTc "completeTheta" $
vcat [ ppr sig
, ppr annotated_theta, ppr inferred_theta
, ppr inferred_diff ]
; return (mk_binders free_tvs, my_theta) }
| otherwise -- A complete type signature is dealt with in mkInferredPolyId
= pprPanic "chooseInferredQuantifiers" (ppr sig)
where
spec_tv_set = mkVarSet $ map snd annotated_tvs
mk_binders free_tvs
= [ mkTyVarBinder vis tv
| tv <- qtvs
, tv `elemVarSet` free_tvs
, let vis | tv `elemVarSet` spec_tv_set = Specified
| otherwise = Inferred ]
-- Pulling from qtvs maintains original order
mk_ctuple [pred] = return pred
mk_ctuple preds = do { tc <- tcLookupTyCon (cTupleTyConName (length preds))
; return (mkTyConApp tc preds) }
mk_impedence_match_msg :: MonoBindInfo
-> TcType -> TcType
-> TidyEnv -> TcM (TidyEnv, SDoc)
-- This is a rare but rather awkward error messages
mk_impedence_match_msg (MBI { mbi_poly_name = name, mbi_sig = mb_sig })
inf_ty sig_ty tidy_env
= do { (tidy_env1, inf_ty) <- zonkTidyTcType tidy_env inf_ty
; (tidy_env2, sig_ty) <- zonkTidyTcType tidy_env1 sig_ty
; let msg = vcat [ text "When checking that the inferred type"
, nest 2 $ ppr name <+> dcolon <+> ppr inf_ty
, text "is as general as its" <+> what <+> text "signature"
, nest 2 $ ppr name <+> dcolon <+> ppr sig_ty ]
; return (tidy_env2, msg) }
where
what = case mb_sig of
Nothing -> text "inferred"
Just sig | isPartialSig sig -> text "(partial)"
| otherwise -> empty
mk_inf_msg :: Name -> TcType -> TidyEnv -> TcM (TidyEnv, SDoc)
mk_inf_msg poly_name poly_ty tidy_env
= do { (tidy_env1, poly_ty) <- zonkTidyTcType tidy_env poly_ty
; let msg = vcat [ text "When checking the inferred type"
, nest 2 $ ppr poly_name <+> dcolon <+> ppr poly_ty ]
; return (tidy_env1, msg) }
-- | Warn the user about polymorphic local binders that lack type signatures.
localSigWarn :: WarningFlag -> Id -> Maybe TcIdSigInst -> TcM ()
localSigWarn flag id mb_sig
| Just _ <- mb_sig = return ()
| not (isSigmaTy (idType id)) = return ()
| otherwise = warnMissingSignatures flag msg id
where
msg = text "Polymorphic local binding with no type signature:"
warnMissingSignatures :: WarningFlag -> SDoc -> Id -> TcM ()
warnMissingSignatures flag msg id
= do { env0 <- tcInitTidyEnv
; let (env1, tidy_ty) = tidyOpenType env0 (idType id)
; addWarnTcM (Reason flag) (env1, mk_msg tidy_ty) }
where
mk_msg ty = sep [ msg, nest 2 $ pprPrefixName (idName id) <+> dcolon <+> ppr ty ]
checkOverloadedSig :: Bool -> TcIdSigInst -> TcM ()
-- Example:
-- f :: Eq a => a -> a
-- K f = e
-- The MR applies, but the signature is overloaded, and it's
-- best to complain about this directly
-- c.f Trac #11339
checkOverloadedSig monomorphism_restriction_applies sig
| not (null (sig_inst_theta sig))
, monomorphism_restriction_applies
, let orig_sig = sig_inst_sig sig
= setSrcSpan (sig_loc orig_sig) $
failWith $
hang (text "Overloaded signature conflicts with monomorphism restriction")
2 (ppr orig_sig)
| otherwise
= return ()
{- Note [Partial type signatures and generalisation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If /any/ of the signatures in the gropu is a partial type signature
f :: _ -> Int
then we *always* use the InferGen plan, and hence tcPolyInfer.
We do this even for a local binding with -XMonoLocalBinds, when
we normally use NoGen.
Reasons:
* The TcSigInfo for 'f' has a unification variable for the '_',
whose TcLevel is one level deeper than the current level.
(See pushTcLevelM in tcTySig.) But NoGen doesn't increase
the TcLevel like InferGen, so we lose the level invariant.
* The signature might be f :: forall a. _ -> a
so it really is polymorphic. It's not clear what it would
mean to use NoGen on this, and indeed the ASSERT in tcLhs,
in the (Just sig) case, checks that if there is a signature
then we are using LetLclBndr, and hence a nested AbsBinds with
increased TcLevel
It might be possible to fix these difficulties somehow, but there
doesn't seem much point. Indeed, adding a partial type signature is a
way to get per-binding inferred generalisation.
We apply the MR if /all/ of the partial signatures lack a context.
In particular (Trac #11016):
f2 :: (?loc :: Int) => _
f2 = ?loc
It's stupid to apply the MR here. This test includes an extra-constraints
wildcard; that is, we don't apply the MR if you write
f3 :: _ => blah
Note [Validity of inferred types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to check inferred type for validity, in case it uses language
extensions that are not turned on. The principle is that if the user
simply adds the inferred type to the program source, it'll compile fine.
See #8883.
Examples that might fail:
- the type might be ambiguous
- an inferred theta that requires type equalities e.g. (F a ~ G b)
or multi-parameter type classes
- an inferred type that includes unboxed tuples
Note [Impedence matching]
~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f 0 x = x
f n x = g [] (not x)
g [] y = f 10 y
g _ y = f 9 y
After typechecking we'll get
f_mono_ty :: a -> Bool -> Bool
g_mono_ty :: [b] -> Bool -> Bool
with constraints
(Eq a, Num a)
Note that f is polymorphic in 'a' and g in 'b'; and these are not linked.
The types we really want for f and g are
f :: forall a. (Eq a, Num a) => a -> Bool -> Bool
g :: forall b. [b] -> Bool -> Bool
We can get these by "impedance matching":
tuple :: forall a b. (Eq a, Num a) => (a -> Bool -> Bool, [b] -> Bool -> Bool)
tuple a b d1 d1 = let ...bind f_mono, g_mono in (f_mono, g_mono)
f a d1 d2 = case tuple a Any d1 d2 of (f, g) -> f
g b = case tuple Integer b dEqInteger dNumInteger of (f,g) -> g
Suppose the shared quantified tyvars are qtvs and constraints theta.
Then we want to check that
forall qtvs. theta => f_mono_ty is more polymorphic than f's polytype
and the proof is the impedance matcher.
Notice that the impedance matcher may do defaulting. See Trac #7173.
It also cleverly does an ambiguity check; for example, rejecting
f :: F a -> F a
where F is a non-injective type function.
-}
{- *********************************************************************
* *
Vectorisation
* *
********************************************************************* -}
tcVectDecls :: [LVectDecl Name] -> TcM ([LVectDecl TcId])
tcVectDecls decls
= do { decls' <- mapM (wrapLocM tcVect) decls
; let ids = [lvectDeclName decl | decl <- decls', not $ lvectInstDecl decl]
dups = findDupsEq (==) ids
; mapM_ reportVectDups dups
; traceTcConstraints "End of tcVectDecls"
; return decls'
}
where
reportVectDups (first:_second:_more)
= addErrAt (getSrcSpan first) $
text "Duplicate vectorisation declarations for" <+> ppr first
reportVectDups _ = return ()
--------------
tcVect :: VectDecl Name -> TcM (VectDecl TcId)
-- FIXME: We can't typecheck the expression of a vectorisation declaration against the vectorised
-- type of the original definition as this requires internals of the vectoriser not available
-- during type checking. Instead, constrain the rhs of a vectorisation declaration to be a single
-- identifier (this is checked in 'rnHsVectDecl'). Fix this by enabling the use of 'vectType'
-- from the vectoriser here.
tcVect (HsVect s name rhs)
= addErrCtxt (vectCtxt name) $
do { var <- wrapLocM tcLookupId name
; let L rhs_loc (HsVar (L lv rhs_var_name)) = rhs
; rhs_id <- tcLookupId rhs_var_name
; return $ HsVect s var (L rhs_loc (HsVar (L lv rhs_id)))
}
tcVect (HsNoVect s name)
= addErrCtxt (vectCtxt name) $
do { var <- wrapLocM tcLookupId name
; return $ HsNoVect s var
}
tcVect (HsVectTypeIn _ isScalar lname rhs_name)
= addErrCtxt (vectCtxt lname) $
do { tycon <- tcLookupLocatedTyCon lname
; checkTc ( not isScalar -- either we have a non-SCALAR declaration
|| isJust rhs_name -- or we explicitly provide a vectorised type
|| tyConArity tycon == 0 -- otherwise the type constructor must be nullary
)
scalarTyConMustBeNullary
; rhs_tycon <- fmapMaybeM (tcLookupTyCon . unLoc) rhs_name
; return $ HsVectTypeOut isScalar tycon rhs_tycon
}
tcVect (HsVectTypeOut _ _ _)
= panic "TcBinds.tcVect: Unexpected 'HsVectTypeOut'"
tcVect (HsVectClassIn _ lname)
= addErrCtxt (vectCtxt lname) $
do { cls <- tcLookupLocatedClass lname
; return $ HsVectClassOut cls
}
tcVect (HsVectClassOut _)
= panic "TcBinds.tcVect: Unexpected 'HsVectClassOut'"
tcVect (HsVectInstIn linstTy)
= addErrCtxt (vectCtxt linstTy) $
do { (cls, tys) <- tcHsVectInst linstTy
; inst <- tcLookupInstance cls tys
; return $ HsVectInstOut inst
}
tcVect (HsVectInstOut _)
= panic "TcBinds.tcVect: Unexpected 'HsVectInstOut'"
vectCtxt :: Outputable thing => thing -> SDoc
vectCtxt thing = text "When checking the vectorisation declaration for" <+> ppr thing
scalarTyConMustBeNullary :: MsgDoc
scalarTyConMustBeNullary = text "VECTORISE SCALAR type constructor must be nullary"
{-
Note [SPECIALISE pragmas]
~~~~~~~~~~~~~~~~~~~~~~~~~
There is no point in a SPECIALISE pragma for a non-overloaded function:
reverse :: [a] -> [a]
{-# SPECIALISE reverse :: [Int] -> [Int] #-}
But SPECIALISE INLINE *can* make sense for GADTS:
data Arr e where
ArrInt :: !Int -> ByteArray# -> Arr Int
ArrPair :: !Int -> Arr e1 -> Arr e2 -> Arr (e1, e2)
(!:) :: Arr e -> Int -> e
{-# SPECIALISE INLINE (!:) :: Arr Int -> Int -> Int #-}
{-# SPECIALISE INLINE (!:) :: Arr (a, b) -> Int -> (a, b) #-}
(ArrInt _ ba) !: (I# i) = I# (indexIntArray# ba i)
(ArrPair _ a1 a2) !: i = (a1 !: i, a2 !: i)
When (!:) is specialised it becomes non-recursive, and can usefully
be inlined. Scary! So we only warn for SPECIALISE *without* INLINE
for a non-overloaded function.
************************************************************************
* *
tcMonoBinds
* *
************************************************************************
@tcMonoBinds@ deals with a perhaps-recursive group of HsBinds.
The signatures have been dealt with already.
Note [Pattern bindings]
~~~~~~~~~~~~~~~~~~~~~~~
The rule for typing pattern bindings is this:
..sigs..
p = e
where 'p' binds v1..vn, and 'e' may mention v1..vn,
typechecks exactly like
..sigs..
x = e -- Inferred type
v1 = case x of p -> v1
..
vn = case x of p -> vn
Note that
(f :: forall a. a -> a) = id
should not typecheck because
case id of { (f :: forall a. a->a) -> f }
will not typecheck.
Note [Instantiate when inferring a type]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f = (*)
As there is no incentive to instantiate the RHS, tcMonoBinds will
produce a type of forall a. Num a => a -> a -> a for `f`. This will then go
through simplifyInfer and such, remaining unchanged.
There are two problems with this:
1) If the definition were `g _ = (*)`, we get a very unusual type of
`forall {a}. a -> forall b. Num b => b -> b -> b` for `g`. This is
surely confusing for users.
2) The monomorphism restriction can't work. The MR is dealt with in
simplifyInfer, and simplifyInfer has no way of instantiating. This
could perhaps be worked around, but it may be hard to know even
when instantiation should happen.
There is an easy solution to both problems: instantiate (deeply) when
inferring a type. So that's what we do. Note that this decision is
user-facing.
We do this deep instantiation in tcMonoBinds, in the FunBind case
only, and only when we do not have a type signature. Conveniently,
the fun_co_fn field of FunBind gives a place to record the coercion.
We do not need to do this
* for PatBinds, because we don't have a function type
* for FunBinds where we have a signature, bucause we aren't doing inference
-}
data MonoBindInfo = MBI { mbi_poly_name :: Name
, mbi_sig :: Maybe TcIdSigInst
, mbi_mono_id :: TcId }
tcMonoBinds :: RecFlag -- Whether the binding is recursive for typechecking purposes
-- i.e. the binders are mentioned in their RHSs, and
-- we are not rescued by a type signature
-> TcSigFun -> LetBndrSpec
-> [LHsBind Name]
-> TcM (LHsBinds TcId, [MonoBindInfo])
tcMonoBinds is_rec sig_fn no_gen
[ L b_loc (FunBind { fun_id = L nm_loc name,
fun_matches = matches, bind_fvs = fvs })]
-- Single function binding,
| NonRecursive <- is_rec -- ...binder isn't mentioned in RHS
, Nothing <- sig_fn name -- ...with no type signature
= -- In this very special case we infer the type of the
-- right hand side first (it may have a higher-rank type)
-- and *then* make the monomorphic Id for the LHS
-- e.g. f = \(x::forall a. a->a) -> <body>
-- We want to infer a higher-rank type for f
setSrcSpan b_loc $
do { rhs_ty <- newOpenInferExpType
; (co_fn, matches')
<- tcExtendIdBndrs [TcIdBndr_ExpType name rhs_ty NotTopLevel] $
-- We extend the error context even for a non-recursive
-- function so that in type error messages we show the
-- type of the thing whose rhs we are type checking
tcMatchesFun (L nm_loc name) matches rhs_ty
; rhs_ty <- readExpType rhs_ty
-- Deeply instantiate the inferred type
-- See Note [Instantiate when inferring a type]
; let orig = matchesCtOrigin matches
; rhs_ty <- zonkTcType rhs_ty -- NB: zonk to uncover any foralls
; (inst_wrap, rhs_ty) <- addErrCtxtM (instErrCtxt name rhs_ty) $
deeplyInstantiate orig rhs_ty
; mono_id <- newLetBndr no_gen name rhs_ty
; return (unitBag $ L b_loc $
FunBind { fun_id = L nm_loc mono_id,
fun_matches = matches', bind_fvs = fvs,
fun_co_fn = inst_wrap <.> co_fn, fun_tick = [] },
[MBI { mbi_poly_name = name
, mbi_sig = Nothing
, mbi_mono_id = mono_id }]) }
tcMonoBinds _ sig_fn no_gen binds
= do { tc_binds <- mapM (wrapLocM (tcLhs sig_fn no_gen)) binds
-- Bring the monomorphic Ids, into scope for the RHSs
; let mono_infos = getMonoBindInfo tc_binds
rhs_id_env = [ (name, mono_id)
| MBI { mbi_poly_name = name
, mbi_sig = mb_sig
, mbi_mono_id = mono_id } <- mono_infos
, case mb_sig of
Just sig -> isPartialSig sig
Nothing -> True ]
-- A monomorphic binding for each term variable that lacks
-- a complete type sig. (Ones with a sig are already in scope.)
; traceTc "tcMonoBinds" $ vcat [ ppr n <+> ppr id <+> ppr (idType id)
| (n,id) <- rhs_id_env]
; binds' <- tcExtendLetEnvIds NotTopLevel rhs_id_env $
mapM (wrapLocM tcRhs) tc_binds
; return (listToBag binds', mono_infos) }
------------------------
-- tcLhs typechecks the LHS of the bindings, to construct the environment in which
-- we typecheck the RHSs. Basically what we are doing is this: for each binder:
-- if there's a signature for it, use the instantiated signature type
-- otherwise invent a type variable
-- You see that quite directly in the FunBind case.
--
-- But there's a complication for pattern bindings:
-- data T = MkT (forall a. a->a)
-- MkT f = e
-- Here we can guess a type variable for the entire LHS (which will be refined to T)
-- but we want to get (f::forall a. a->a) as the RHS environment.
-- The simplest way to do this is to typecheck the pattern, and then look up the
-- bound mono-ids. Then we want to retain the typechecked pattern to avoid re-doing
-- it; hence the TcMonoBind data type in which the LHS is done but the RHS isn't
data TcMonoBind -- Half completed; LHS done, RHS not done
= TcFunBind MonoBindInfo SrcSpan (MatchGroup Name (LHsExpr Name))
| TcPatBind [MonoBindInfo] (LPat TcId) (GRHSs Name (LHsExpr Name)) TcSigmaType
tcLhs :: TcSigFun -> LetBndrSpec -> HsBind Name -> TcM TcMonoBind
tcLhs sig_fn no_gen (FunBind { fun_id = L nm_loc name, fun_matches = matches })
= do { mono_info <- tcLhsId sig_fn no_gen name
; return (TcFunBind mono_info nm_loc matches) }
tcLhs sig_fn no_gen (PatBind { pat_lhs = pat, pat_rhs = grhss })
= do { let bndr_names = collectPatBinders pat
; mbis <- mapM (tcLhsId sig_fn no_gen) bndr_names
-- See Note [Existentials in pattern bindings]
; let inst_sig_fun = lookupNameEnv $ mkNameEnv $
bndr_names `zip` map mbi_mono_id mbis
; traceTc "tcLhs" (vcat [ ppr id <+> dcolon <+> ppr (idType id)
| mbi <- mbis, let id = mbi_mono_id mbi ]
$$ ppr no_gen)
; ((pat', _), pat_ty) <- addErrCtxt (patMonoBindsCtxt pat grhss) $
tcInfer $ \ exp_ty ->
tcLetPat inst_sig_fun pat exp_ty $
return () -- mapM (lookup_info inst_sig_fun) bndr_names
; return (TcPatBind mbis pat' grhss pat_ty) }
tcLhs _ _ other_bind = pprPanic "tcLhs" (ppr other_bind)
-- AbsBind, VarBind impossible
-------------------
data LetBndrSpec
= LetLclBndr -- We are going to generalise, and wrap in an AbsBinds
-- so clone a fresh binder for the local monomorphic Id
| LetGblBndr TcPragEnv -- Generalisation plan is NoGen, so there isn't going
-- to be an AbsBinds; So we must bind the global version
-- of the binder right away.
-- And here is the inline-pragma information
instance Outputable LetBndrSpec where
ppr LetLclBndr = text "LetLclBndr"
ppr (LetGblBndr {}) = text "LetGblBndr"
tcLhsId :: TcSigFun -> LetBndrSpec -> Name -> TcM MonoBindInfo
tcLhsId sig_fn no_gen name
| Just (TcIdSig sig) <- sig_fn name
= -- A partial type signature on a FunBind, in a mixed group
-- e.g. f :: _ -> _
-- f x = ...g...
-- Just g = ...f...
-- Hence always typechecked with InferGen; hence LetLclBndr
--
-- A compelete type sig on a FunBind is checked with CheckGen
-- and does not go via tcLhsId
do { inst_sig <- tcInstSig sig
; the_id <- newSigLetBndr no_gen name inst_sig
; return (MBI { mbi_poly_name = name
, mbi_sig = Just inst_sig
, mbi_mono_id = the_id }) }
| otherwise
= -- No type signature, plan InferGen (LetLclBndr) or NoGen (LetGblBndr)
do { mono_ty <- newOpenFlexiTyVarTy
; mono_id <- newLetBndr no_gen name mono_ty
; return (MBI { mbi_poly_name = name
, mbi_sig = Nothing
, mbi_mono_id = mono_id }) }
------------
newSigLetBndr :: LetBndrSpec -> Name -> TcIdSigInst -> TcM TcId
newSigLetBndr (LetGblBndr prags) name (TISI { sig_inst_sig = id_sig })
| CompleteSig { sig_bndr = poly_id } <- id_sig
= addInlinePrags poly_id (lookupPragEnv prags name)
newSigLetBndr no_gen name (TISI { sig_inst_tau = tau })
= newLetBndr no_gen name tau
newLetBndr :: LetBndrSpec -> Name -> TcType -> TcM TcId
-- In the polymorphic case when we are going to generalise
-- (plan InferGen, no_gen = LetLclBndr), generate a "monomorphic version"
-- of the Id; the original name will be bound to the polymorphic version
-- by the AbsBinds
-- In the monomorphic case when we are not going to generalise
-- (plan NoGen, no_gen = LetGblBndr) there is no AbsBinds,
-- and we use the original name directly
newLetBndr LetLclBndr name ty
= do { mono_name <- cloneLocalName name
; return (mkLocalId mono_name ty) }
newLetBndr (LetGblBndr prags) name ty
= addInlinePrags (mkLocalId name ty) (lookupPragEnv prags name)
-------------------
tcRhs :: TcMonoBind -> TcM (HsBind TcId)
tcRhs (TcFunBind info@(MBI { mbi_sig = mb_sig, mbi_mono_id = mono_id })
loc matches)
= tcExtendIdBinderStackForRhs [info] $
tcExtendTyVarEnvForRhs mb_sig $
do { traceTc "tcRhs: fun bind" (ppr mono_id $$ ppr (idType mono_id))
; (co_fn, matches') <- tcMatchesFun (L loc (idName mono_id))
matches (mkCheckExpType $ idType mono_id)
; return ( FunBind { fun_id = L loc mono_id
, fun_matches = matches'
, fun_co_fn = co_fn
, bind_fvs = placeHolderNamesTc
, fun_tick = [] } ) }
tcRhs (TcPatBind infos pat' grhss pat_ty)
= -- When we are doing pattern bindings we *don't* bring any scoped
-- type variables into scope unlike function bindings
-- Wny not? They are not completely rigid.
-- That's why we have the special case for a single FunBind in tcMonoBinds
tcExtendIdBinderStackForRhs infos $
do { traceTc "tcRhs: pat bind" (ppr pat' $$ ppr pat_ty)
; grhss' <- addErrCtxt (patMonoBindsCtxt pat' grhss) $
tcGRHSsPat grhss pat_ty
; return ( PatBind { pat_lhs = pat', pat_rhs = grhss'
, pat_rhs_ty = pat_ty
, bind_fvs = placeHolderNamesTc
, pat_ticks = ([],[]) } )}
tcExtendTyVarEnvForRhs :: Maybe TcIdSigInst -> TcM a -> TcM a
tcExtendTyVarEnvForRhs Nothing thing_inside
= thing_inside
tcExtendTyVarEnvForRhs (Just sig) thing_inside
= tcExtendTyVarEnvFromSig sig thing_inside
tcExtendTyVarEnvFromSig :: TcIdSigInst -> TcM a -> TcM a
tcExtendTyVarEnvFromSig sig_inst thing_inside
| TISI { sig_inst_skols = skol_prs, sig_inst_wcs = wcs } <- sig_inst
= tcExtendTyVarEnv2 wcs $
tcExtendTyVarEnv2 skol_prs $
thing_inside
tcExtendIdBinderStackForRhs :: [MonoBindInfo] -> TcM a -> TcM a
-- Extend the TcIdBinderStack for the RHS of the binding, with
-- the monomorphic Id. That way, if we have, say
-- f = \x -> blah
-- and something goes wrong in 'blah', we get a "relevant binding"
-- looking like f :: alpha -> beta
-- This applies if 'f' has a type signature too:
-- f :: forall a. [a] -> [a]
-- f x = True
-- We can't unify True with [a], and a relevant binding is f :: [a] -> [a]
-- If we had the *polymorphic* version of f in the TcIdBinderStack, it
-- would not be reported as relevant, because its type is closed
tcExtendIdBinderStackForRhs infos thing_inside
= tcExtendIdBndrs [ TcIdBndr mono_id NotTopLevel
| MBI { mbi_mono_id = mono_id } <- infos ]
thing_inside
-- NotTopLevel: it's a monomorphic binding
---------------------
getMonoBindInfo :: [Located TcMonoBind] -> [MonoBindInfo]
getMonoBindInfo tc_binds
= foldr (get_info . unLoc) [] tc_binds
where
get_info (TcFunBind info _ _) rest = info : rest
get_info (TcPatBind infos _ _ _) rest = infos ++ rest
{- Note [Existentials in pattern bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider (typecheck/should_compile/ExPat):
data T where
MkT :: Integral a => a -> Int -> T
and suppose t :: T. Which of these pattern bindings are ok?
E1. let { MkT p _ = t } in <body>
E2. let { MkT _ q = t } in <body>
E3. let { MkT (toInteger -> r) _ = t } in <body>
Well (E1) is clearly wrong because the existential 'a' escapes.
What type could 'p' possibly have?
But (E2) is fine, despite the existential pattern, because
q::Int, and nothing escapes.
Even (E3) is fine. The existential pattern binds a dictionary
for (Integral a) which the view pattern can use to convert the
a-valued field to an Integer, so r :: Integer.
An easy way to see all three is to imagine the desugaring.
For (2) it would look like
let q = case t of MkT _ q' -> q'
in <body>
We typecheck pattern bindings as follows:
1. In tcLhs we bind q'::alpha, for each variable q bound by the
pattern, where q' is a fresh name, and alpha is a fresh
unification variable; it will be the monomorphic verion of q that
we later generalise
It's very important that these fresh unification variables
alpha are born here, not deep under implications as would happen
if we allocated them when we encountered q during tcPat.
2. Still in tcLhs, we build a little environment mappting "q" ->
q':alpha, and pass that to tcLetPet.
3. Then tcLhs invokes tcLetPat to typecheck the patter as usual:
- When tcLetPat finds an existential constructor, it binds fresh
type variables and dictionaries as usual, and emits an
implication constraint.
- When tcLetPat finds a variable (TcPat.tcPatBndr) it looks it up
in the little environment, which should always succeed. And
uses tcSubTypeET to connect the type of that variable with the
expected type of the pattern.
And that's it! The implication constraints check for the skolem
escape. It's quite simple and neat, and more exressive than before
e.g. GHC 8.0 rejects (E2) and (E3).
************************************************************************
* *
Generalisation
* *
********************************************************************* -}
data GeneralisationPlan
= NoGen -- No generalisation, no AbsBinds
| InferGen -- Implicit generalisation; there is an AbsBinds
Bool -- True <=> apply the MR; generalise only unconstrained type vars
| CheckGen (LHsBind Name) TcIdSigInfo
-- One FunBind with a signature
-- Explicit generalisation; there is an AbsBindsSig
-- A consequence of the no-AbsBinds choice (NoGen) is that there is
-- no "polymorphic Id" and "monmomorphic Id"; there is just the one
instance Outputable GeneralisationPlan where
ppr NoGen = text "NoGen"
ppr (InferGen b) = text "InferGen" <+> ppr b
ppr (CheckGen _ s) = text "CheckGen" <+> ppr s
decideGeneralisationPlan
:: DynFlags -> [LHsBind Name] -> IsGroupClosed -> TcSigFun
-> GeneralisationPlan
decideGeneralisationPlan dflags lbinds closed sig_fn
| unlifted_pat_binds = NoGen
| has_partial_sigs = InferGen (and partial_sig_mrs)
| Just (bind, sig) <- one_funbind_with_sig = CheckGen bind sig
| mono_local_binds closed = NoGen
| otherwise = InferGen mono_restriction
where
binds = map unLoc lbinds
partial_sig_mrs :: [Bool]
-- One for each parital signature (so empty => no partial sigs)
-- The Bool is True if the signature has no constraint context
-- so we should apply the MR
-- See Note [Partial type signatures and generalisation]
partial_sig_mrs
= [ null theta
| TcIdSig (PartialSig { psig_hs_ty = hs_ty })
<- mapMaybe sig_fn (collectHsBindListBinders lbinds)
, let (_, L _ theta, _) = splitLHsSigmaTy (hsSigWcType hs_ty) ]
has_partial_sigs = not (null partial_sig_mrs)
unlifted_pat_binds = any isUnliftedHsBind binds
-- Unlifted patterns (unboxed tuple) must not
-- be polymorphic, because we are going to force them
-- See Trac #4498, #8762
mono_restriction = xopt LangExt.MonomorphismRestriction dflags
&& any restricted binds
mono_local_binds ClosedGroup = False
mono_local_binds _ = xopt LangExt.MonoLocalBinds dflags
-- With OutsideIn, all nested bindings are monomorphic
-- except a single function binding with a signature
one_funbind_with_sig
| [lbind@(L _ (FunBind { fun_id = v }))] <- lbinds
, Just (TcIdSig sig) <- sig_fn (unLoc v)
= Just (lbind, sig)
| otherwise
= Nothing
-- The Haskell 98 monomorphism restriction
restricted (PatBind {}) = True
restricted (VarBind { var_id = v }) = no_sig v
restricted (FunBind { fun_id = v, fun_matches = m }) = restricted_match m
&& no_sig (unLoc v)
restricted (PatSynBind {}) = panic "isRestrictedGroup/unrestricted PatSynBind"
restricted (AbsBinds {}) = panic "isRestrictedGroup/unrestricted AbsBinds"
restricted (AbsBindsSig {}) = panic "isRestrictedGroup/unrestricted AbsBindsSig"
restricted_match (MG { mg_alts = L _ (L _ (Match _ [] _ _) : _ )}) = True
restricted_match _ = False
-- No args => like a pattern binding
-- Some args => a function binding
no_sig n = noCompleteSig (sig_fn n)
isClosedBndrGroup :: Bag (LHsBind Name) -> TcM IsGroupClosed
isClosedBndrGroup binds = do
type_env <- getLclTypeEnv
if foldUFM (is_closed_ns type_env) True fv_env
then return ClosedGroup
else return $ NonClosedGroup fv_env
where
fv_env :: NameEnv NameSet
fv_env = mkNameEnv $ concatMap (bindFvs . unLoc) binds
bindFvs :: HsBindLR Name idR -> [(Name, NameSet)]
bindFvs (FunBind { fun_id = f, bind_fvs = fvs })
= [(unLoc f, fvs)]
bindFvs (PatBind { pat_lhs = pat, bind_fvs = fvs })
= [(b, fvs) | b <- collectPatBinders pat]
bindFvs _
= []
is_closed_ns :: TcTypeEnv -> NameSet -> Bool -> Bool
is_closed_ns type_env ns b = b && nameSetAll (is_closed_id type_env) ns
-- ns are the Names referred to from the RHS of this bind
is_closed_id :: TcTypeEnv -> Name -> Bool
-- See Note [Bindings with closed types] in TcRnTypes
is_closed_id type_env name
| Just thing <- lookupNameEnv type_env name
= case thing of
ATcId { tct_info = ClosedLet } -> True -- This is the key line
ATcId {} -> False
ATyVar {} -> False -- In-scope type variables
AGlobal {} -> True -- are not closed!
_ -> pprPanic "is_closed_id" (ppr name)
| otherwise
= True
-- The free-var set for a top level binding mentions
-- imported things too, so that we can report unused imports
-- These won't be in the local type env.
-- Ditto class method etc from the current module
-------------------
checkStrictBinds :: TopLevelFlag -> RecFlag
-> [LHsBind Name]
-> LHsBinds TcId -> [Id]
-> TcM ()
-- Check that non-overloaded unlifted bindings are
-- a) non-recursive,
-- b) not top level,
-- c) not a multiple-binding group (more or less implied by (a))
checkStrictBinds top_lvl rec_group orig_binds tc_binds poly_ids
| any_unlifted_bndr || any_strict_pat -- This binding group must be matched strictly
= do { check (isNotTopLevel top_lvl)
(strictBindErr "Top-level" any_unlifted_bndr orig_binds)
; check (isNonRec rec_group)
(strictBindErr "Recursive" any_unlifted_bndr orig_binds)
; check (all is_monomorphic (bagToList tc_binds))
(polyBindErr orig_binds)
-- data Ptr a = Ptr Addr#
-- f x = let p@(Ptr y) = ... in ...
-- Here the binding for 'p' is polymorphic, but does
-- not mix with an unlifted binding for 'y'. You should
-- use a bang pattern. Trac #6078.
; check (isSingleton orig_binds)
(strictBindErr "Multiple" any_unlifted_bndr orig_binds)
-- Complain about a binding that looks lazy
-- e.g. let I# y = x in ...
-- Remember, in checkStrictBinds we are going to do strict
-- matching, so (for software engineering reasons) we insist
-- that the strictness is manifest on each binding
-- However, lone (unboxed) variables are ok
; check (not any_pat_looks_lazy)
(unliftedMustBeBang orig_binds) }
| otherwise
= traceTc "csb2" (ppr [(id, idType id) | id <- poly_ids]) >>
return ()
where
any_unlifted_bndr = any is_unlifted poly_ids
any_strict_pat = any (isUnliftedHsBind . unLoc) orig_binds
any_pat_looks_lazy = any (looksLazyPatBind . unLoc) orig_binds
is_unlifted id = case tcSplitSigmaTy (idType id) of
(_, _, rho) -> isUnliftedType rho
-- For the is_unlifted check, we need to look inside polymorphism
-- and overloading. E.g. x = (# 1, True #)
-- would get type forall a. Num a => (# a, Bool #)
-- and we want to reject that. See Trac #9140
is_monomorphic (L _ (AbsBinds { abs_tvs = tvs, abs_ev_vars = evs }))
= null tvs && null evs
is_monomorphic (L _ (AbsBindsSig { abs_tvs = tvs, abs_ev_vars = evs }))
= null tvs && null evs
is_monomorphic _ = True
check :: Bool -> MsgDoc -> TcM ()
-- Just like checkTc, but with a special case for module GHC.Prim:
-- see Note [Compiling GHC.Prim]
check True _ = return ()
check False err = do { mod <- getModule
; checkTc (mod == gHC_PRIM) err }
unliftedMustBeBang :: [LHsBind Name] -> SDoc
unliftedMustBeBang binds
= hang (text "Pattern bindings containing unlifted types should use an outermost bang pattern:")
2 (vcat (map ppr binds))
polyBindErr :: [LHsBind Name] -> SDoc
polyBindErr binds
= hang (text "You can't mix polymorphic and unlifted bindings")
2 (vcat [vcat (map ppr binds),
text "Probable fix: add a type signature"])
strictBindErr :: String -> Bool -> [LHsBind Name] -> SDoc
strictBindErr flavour any_unlifted_bndr binds
= hang (text flavour <+> msg <+> text "aren't allowed:")
2 (vcat (map ppr binds))
where
msg | any_unlifted_bndr = text "bindings for unlifted types"
| otherwise = text "bang-pattern or unboxed-tuple bindings"
{- Note [Compiling GHC.Prim]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Module GHC.Prim has no source code: it is the host module for
primitive, built-in functions and types. However, for Haddock-ing
purposes we generate (via utils/genprimopcode) a fake source file
GHC/Prim.hs, and give it to Haddock, so that it can generate
documentation. It contains definitions like
nullAddr# :: NullAddr#
which would normally be rejected as a top-level unlifted binding. But
we don't want to complain, because we are only "compiling" this fake
mdule for documentation purposes. Hence this hacky test for gHC_PRIM
in checkStrictBinds.
(We only make the test if things look wrong, so there is no cost in
the common case.) -}
{- *********************************************************************
* *
Error contexts and messages
* *
********************************************************************* -}
-- This one is called on LHS, when pat and grhss are both Name
-- and on RHS, when pat is TcId and grhss is still Name
patMonoBindsCtxt :: (OutputableBndrId id, Outputable body)
=> LPat id -> GRHSs Name body -> SDoc
patMonoBindsCtxt pat grhss
= hang (text "In a pattern binding:") 2 (pprPatBind pat grhss)
instErrCtxt :: Name -> TcType -> TidyEnv -> TcM (TidyEnv, SDoc)
instErrCtxt name ty env
= do { let (env', ty') = tidyOpenType env ty
; return (env', hang (text "When instantiating" <+> quotes (ppr name) <>
text ", initially inferred to have" $$
text "this overly-general type:")
2 (ppr ty') $$
extra) }
where
extra = sdocWithDynFlags $ \dflags ->
ppWhen (xopt LangExt.MonomorphismRestriction dflags) $
text "NB: This instantiation can be caused by the" <+>
text "monomorphism restriction."
| vTurbine/ghc | compiler/typecheck/TcBinds.hs | bsd-3-clause | 74,628 | 0 | 19 | 22,559 | 11,759 | 6,161 | 5,598 | 820 | 8 |
module Main where
import Control.Monad.Reader (runReaderT)
import Lib
import qualified Options.Applicative as OA
main :: IO ()
main = do
cmd <- OA.execParser (OA.info (OA.helper <*> programOpts) programInfo)
runReaderT (interpret cmd) defaultGlobalOptions
| muhbaasu/pfennig-cli | app/Main.hs | bsd-3-clause | 283 | 0 | 13 | 59 | 86 | 47 | 39 | 8 | 1 |
{-# LANGUAGE TypeFamilies #-}
import Bitmap
import Control.Applicative
import Control.Arrow
data RGB8 = RGB8 Int Int Int deriving (Show, Eq, Ord)
data Image = Image { imageX :: Int, imageY :: Int, getImg :: [[RGB8]] } deriving Show
instance TwoDImage Image where
type TwoDImageColor Image = RGB8
new c w h = return $ Image 0 0 $ replicate h $ replicate w c
fromColorList = return . Image 0 0
toColorList = return . getImg
getSize = return . (length . head &&& length) . getImg
getXY = return . (imageX &&& imageY)
setXY img (x, y) = return img { imageX = x, imageY = y }
getPixel = return . ((!!) <$> ((!!) <$> getImg <*> imageY) <*> imageX)
setPixel img c = return $ img { getImg = getImg img `set` imageY img $
(getImg img !! imageY img) `set` imageX img $ c }
instance Color RGB8 where
fromRGB8 r g b = RGB8 (fromIntegral r) (fromIntegral g) (fromIntegral b)
toRGB8 (RGB8 r g b) = (fromIntegral r, fromIntegral g, fromIntegral b)
set :: [a] -> Int -> a -> [a]
set xs i x = take i xs ++ [x] ++ drop (i + 1) xs
getSample, getGradation :: IO Image
getSample = readBMPFile "tmp/out/sample.bmp"
getGradation = readBMPFile "tmp/out/grad.bmp"
| YoshikuniJujo/binary-file | examples/testBitmap.hs | bsd-3-clause | 1,158 | 2 | 14 | 234 | 500 | 268 | 232 | -1 | -1 |
-- | Defines a monad for transfer functions.
module Language.Python.TypeInference.Analysis.TFMonad (
goFlowInsensitive,
TFState,
TF,
simpleTransferFunction,
callTransferFunction,
getType,
getTypeWithDefault,
setType,
removeType,
getAllFromL,
removeFromL,
extractFromL,
getGlobalTypes,
modifyType,
modifyAttribute,
cpType,
mvType,
clsAttr,
instAttr,
classOfInstance
) where
import Control.Monad.State hiding (join)
import Data.Map (Map)
import Data.Maybe (catMaybes, fromMaybe, listToMaybe)
import Language.Analysis.DFA.Lattice
import Language.Analysis.DFA.MonotoneFramework
import Language.Python.TypeInference.Analysis.MapLattice
import Language.Python.TypeInference.Analysis.TypeLattice
import Language.Python.TypeInference.CFG.CFG
import qualified Data.Map as Map
import qualified Data.Set as Set
-- | Use flow-insensitive analysis for the identifier?
goFlowInsensitive :: Bool -> Identifier -> Bool
goFlowInsensitive _ (ClassIdentifier _) = True
goFlowInsensitive _ (InstanceIdentifier _) = True
goFlowInsensitive _ (Name (ExternalScope _) _) = True
goFlowInsensitive True (Name (ModuleScope _) _) = True
goFlowInsensitive _ _ = False
-- | State used and changed by transfer functions.
data TFState = TFState {
tfsL :: L,
tfsGlobalL :: L,
tfsChangedGlobalL :: Bool,
tfsUsedGlobalL :: Bool,
tfsFiModuleScope :: Bool
}
initialTFState :: Bool -> L -> L -> TFState
initialTFState fiModuleScope l gl = TFState l gl False False fiModuleScope
-- | Monad for transfer functions.
type TF = State TFState
-- | Create a 'SimpleTransferFunction' given a flag specifying if
-- flow-insensitive analysis for module-scope identifiers is used, and a
-- computation in the 'TF' monad.
simpleTransferFunction :: Bool -> TF () -> SimpleTF L
simpleTransferFunction fiModuleScope tf =
f
where f :: L -> L -> (L, Maybe L, Bool)
f l gl = let TFState l' gl' changedGL usedGL _ =
execState tf (initialTFState fiModuleScope l gl)
maybeGL' = if changedGL then Just gl' else Nothing
in (l', maybeGL', usedGL)
-- | Create a 'CallTransferFunction' given a flag specifying if
-- flow-insensitive analysis for module-scope identifiers is used, and a
-- computation in the 'TF' monad.
callTransferFunction :: Bool -> TF ([Label], Bool) -> CallTF L
callTransferFunction fiModuleScope tf =
f
where f :: L -> L -> ([Label], Bool, L, Maybe L, Bool)
f l gl = let ((fs, d), TFState l' gl' changedGL usedGL _) =
runState tf (initialTFState fiModuleScope l gl)
maybeGL' = if changedGL then Just gl' else Nothing
in (fs, d, l', maybeGL', usedGL)
setChangedGlobalL, setUsedGlobalL :: TF ()
setChangedGlobalL = modify $ \s -> s { tfsChangedGlobalL = True }
setUsedGlobalL = modify $ \s -> s { tfsUsedGlobalL = True }
-- | Retrieve the type for an identifier.
getType :: Identifier -> TF (Maybe UnionType)
getType i = do fiModuleScope <- gets tfsFiModuleScope
let useGlobal = goFlowInsensitive fiModuleScope i
lookHere = if useGlobal then tfsGlobalL else tfsL
when useGlobal setUsedGlobalL
l <- gets lookHere
return $ Map.lookup i l
-- | Retrieve the type for an identifier; return 'bot' if the identifier is not
-- in the lattice value.
getTypeWithDefault :: UnionType -> Identifier -> TF UnionType
getTypeWithDefault def i = do mt <- getType i
return $ fromMaybe def mt
-- | Set the type for an identifier.
setType :: Identifier -> UnionType -> TF ()
setType i t = do fiModuleScope <- gets tfsFiModuleScope
let useGlobal = goFlowInsensitive fiModuleScope i
f s = if useGlobal
then let l = tfsGlobalL s
t' = t `join` Map.findWithDefault bot i l
in s {tfsGlobalL = Map.insert i t' l }
else s {tfsL = Map.insert i t (tfsL s)}
when useGlobal setChangedGlobalL
modify f
-- | Remove identifier/type.
removeType :: Identifier -> TF ()
removeType i = do fiModuleScope <- gets tfsFiModuleScope
let useGlobal = goFlowInsensitive fiModuleScope i
f s = s {tfsL = Map.delete i (tfsL s)}
unless useGlobal (modify f)
-- | Retrieve the mappings for all identifier that match a predicate from the
-- lattice value. The global lattice value is not used.
getAllFromL :: (Identifier -> Bool) -> TF [(Identifier, UnionType)]
getAllFromL p = do l <- gets tfsL
return $ filter (p . fst) (Map.toList l)
-- | Remove the mappings for identifiers that match a predicate from the
-- lattice value. The global lattice value is not used.
removeFromL :: (Identifier -> Bool) -> TF ()
removeFromL p = do s <- get
let l = tfsL s
(_, b) = Map.partitionWithKey (\k _ -> p k) l
put $ s { tfsL = b }
-- | Remove and return the mappings for identifiers that match a predicate from
-- the lattice value. The global lattice value is not used.
extractFromL :: (Identifier -> Bool) -> TF (Map Identifier UnionType)
extractFromL p = do s <- get
let l = tfsL s
(a, b) = Map.partitionWithKey (\k _ -> p k) l
put $ s { tfsL = b }
return a
-- | Return the mappings for identifiers that match a predicate from the global
-- lattice value.
getGlobalTypes :: (Identifier -> Bool) -> TF (Map Identifier UnionType)
getGlobalTypes p = do l <- gets tfsGlobalL
setUsedGlobalL
return $ Map.filterWithKey (\k _ -> p k) l
-- | Modify the type for the identifier.
modifyType :: Identifier -> (UnionType -> UnionType) -> TF ()
modifyType i f = do mt <- getType i
case mt of Nothing -> return ()
Just t -> setType i (f t)
-- | Update the type of an expression of the form /identifier.attribute/.
modifyAttribute :: Identifier -> String -> (UnionType -> UnionType) -> TF ()
modifyAttribute i attr f =
do mt <- getType i
case mt of
Nothing -> return ()
Just UTyTop -> return ()
Just (UTy s) -> do types <- mapM g (Set.toList s)
setType i (joinAll types)
where g :: ValueType -> TF UnionType
-- reference types: modify corresponding global type instead
g t@(ClassType (ClsRef classId)) =
do modifyAttribute (ClassIdentifier classId) attr f
return $ oneType t
g t@(InstanceType (InstRef classId)) =
do modifyAttribute (InstanceIdentifier classId) attr f
return $ oneType t
-- class and instance types: modify attribute
g (ClassType (ClsTy classId sup env)) =
return $ oneType $ ClsTy classId sup (updEnv env)
g (InstanceType (InstTy cls env)) =
return $ oneType $ InstTy cls (updEnv env)
g _ = return bot
updEnv = Map.alter (Just . f') attr
f' Nothing = f bot
f' (Just t) = f t
-- | @cpType from to@ sets the type of @to@ to the type of @from@.
cpType :: Identifier -> Identifier -> TF ()
cpType from to = do mt <- getType from
case mt of Just t -> setType to t
Nothing -> return ()
-- | @cpType from to@ sets the type of @to@ to the type of @from@ and removes
-- the entry for @from@.
mvType :: Identifier -> Identifier -> TF ()
mvType from to = do mt <- getType from
case mt of Just t -> do removeType from
setType to t
Nothing -> return ()
-- | Look up the attribute in the given class type (and the types of its
-- superclasses).
clsAttr :: String -> ClassType -> TF (Maybe UnionType)
clsAttr name (ClsRef classId) =
do mt <- getType $ ClassIdentifier classId
case mt of
Nothing -> return Nothing
Just UTyTop -> return $ Just UTyTop
Just (UTy s) -> do let helper :: ValueType -> TF (Maybe UnionType)
helper (ClassType ct) = clsAttr name ct
helper _ = return $ Just bot
list <- mapM helper (Set.toList s)
case catMaybes list of
[] -> return Nothing
ts -> return $ Just $ joinAll ts
clsAttr name (ClsTy _ sup env) =
case Map.lookup name env of
(Just t) -> return $ Just t
Nothing -> do attrs <- mapM (clsAttr name) sup
return $ listToMaybe $ catMaybes attrs
-- | Look up the attribute in the given instance type (and its class type).
instAttr :: String -> InstanceType -> TF (Maybe UnionType)
instAttr name (InstRef classId) =
do mt <- getType $ InstanceIdentifier classId
case mt of
Nothing -> return Nothing
Just UTyTop -> return $ Just UTyTop
Just (UTy s) -> do let helper :: ValueType -> TF (Maybe UnionType)
helper (InstanceType it) = instAttr name it
helper _ = return $ Just bot
list <- mapM helper (Set.toList s)
case catMaybes list of
[] -> return Nothing
ts -> return $ Just $ joinAll ts
instAttr name (InstTy cls env) =
case Map.lookup name env of
(Just t) -> return $ Just t
Nothing -> clsAttr name cls
-- | Get the class type for an instance type.
classOfInstance :: InstanceType -> TF ClassType
classOfInstance (InstTy classType _) = return classType
classOfInstance (InstRef classId) =
do mt <- getType $ InstanceIdentifier classId
let Just (UTy s) = mt
[InstanceType (InstTy classType _)] = Set.toList s
return classType
| lfritz/python-type-inference | python-type-inference/src/Language/Python/TypeInference/Analysis/TFMonad.hs | bsd-3-clause | 10,458 | 19 | 25 | 3,535 | 2,778 | 1,401 | 1,377 | 186 | 8 |
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-|
Module : Game.GoreAndAsh.Sync
Description : Gore&Ash high-level networking core module
Copyright : (c) Anton Gushcha, 2015-2016
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : POSIX
The core module contains high-level networking API for Gore&Ash. It allows to perform
automatic synchronzation of states on clients and server using a special EDSL.
Example of embedding:
TODO ADD THIS
Important note, the system tries to use channel id 1 for service messages, but fallbacks
to default channel if there is only one channel allocated in network module. Check initalization
of network module, client and server allocated channels count must match.
-}
module Game.GoreAndAsh.Sync(
SyncT
, runSyncT
-- * Options
, SyncRole(..)
, SyncOptions
, syncOptionsRole
, syncOptionsChannel
, syncOptionsCollectionsChannel
, syncOptionsResolveDelay
, defaultSyncOptions
-- * API
, SyncName
, SyncItemId
, SyncMonad(..)
, ClientSynced(..)
, serverRejected
, conditional
, syncWithNameWith
, syncWithName
, syncUnregisterName
, syncUnregisterNames
-- ** Server side
, syncToClientManual
, syncToClient
, syncToClientsManual
, syncToClients
, syncToAllClients
, sendToClient
, sendToClients
, sendToAllClients
, sendToClientMany
, sendToClientsMany
, sendToAllClientsMany
, syncFromClient
, syncFromClients
, syncFromAllClients
, receiveFromClient
, receiveFromClients
, receiveFromAllClients
-- ** Client side
, syncToServer
, syncFromServerWith
, syncFromServer
, sendToServer
, sendToServerMany
, receiveFromServer
-- ** Prediction
, predict
, predictMaybe
, predictM
, predictMaybeM
, predictInterpolateM
-- * Collections
, hostCollection
, hostSimpleCollection
, remoteCollection
) where
import Game.GoreAndAsh.Sync.API as X
import Game.GoreAndAsh.Sync.Collection as X
import Game.GoreAndAsh.Sync.Module as X
import Game.GoreAndAsh.Sync.Options as X
import Game.GoreAndAsh.Sync.Predict as X
| Teaspot-Studio/gore-and-ash-sync | src/Game/GoreAndAsh/Sync.hs | bsd-3-clause | 2,096 | 0 | 5 | 383 | 224 | 158 | 66 | 57 | 0 |
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE UndecidableInstances #-}
module Data.FAlgebra.Tree
( module Data.FAlgebra.Base
, TreeF(..)
, Tree(..)
, _node
, _left
, _right
, _children
, preorder
, inorder
, postorder
, branch
, leaf
, empty
, left
, right
) where
import Prelude hiding (reverse)
import Data.FAlgebra.Base
import Data.FAlgebra.Annotation
import Data.Foldable
import Data.Proxy
import Data.Traversable
import Control.Applicative hiding (empty)
import Lens.Micro
-- |Functor for trees with values on internal nodes.
data TreeF a b = Empty | Branch a b b deriving (Eq, Show, Ord)
deriving instance Functor (TreeF a)
deriving instance Foldable (TreeF a)
deriving instance Traversable (TreeF a)
-- |Traversal for the value of a node.
_node :: Applicative f => LensLike f (TreeF a b) (TreeF a' b) a a'
_node _ Empty = pure Empty
_node f (Branch a b1 b2) = fmap (\a' -> Branch a' b1 b2) (f a)
{-# INLINE _node #-}
-- |Traversal for the left branch of a node. Can't change type because left and right need to stay the same type.
_left :: Applicative f => LensLike' f (TreeF a b) b
_left _ Empty = pure Empty
_left f (Branch a b1 b2) = fmap (\b1' -> Branch a b1' b2) (f b1)
{-# INLINE _left #-}
-- |Traversal for the right branch of a node. Can't change type because left and right need to stay the same type.
_right :: Applicative f => LensLike' f (TreeF a b) b
_right _ Empty = pure Empty
_right f (Branch a b1 b2) = fmap (Branch a b1) (f b2)
{-# INLINE _right #-}
-- |Traversal for both children of a node.
_children :: Applicative f => LensLike f (TreeF a b) (TreeF a b') b b'
_children _ Empty = pure Empty
_children f (Branch a b1 b2) = Branch a <$> f b1 <*> f b2
{-# INLINE _children #-}
-- |Sequence root and then both branches
preorder :: Applicative g => TreeF (g a) (g b) -> g (TreeF a b)
preorder Empty = pure Empty
preorder (Branch a b1 b2) = Branch <$> a <*> b1 <*> b2
-- |Sequence the left branch, then the root, then the right branch
inorder :: Applicative g => TreeF (g a) (g b) -> g (TreeF a b)
inorder Empty = pure Empty
inorder (Branch a b1 b2) = (\x y z -> Branch y x z) <$> b1 <*> a <*> b2
-- |Sequence both branches and then the root
postorder :: Applicative g => TreeF (g a) (g b) -> g (TreeF a b)
postorder Empty = pure Empty
postorder (Branch a b1 b2) = (\x y z -> Branch z x y) <$> b1 <*> b2 <*> a
newtype Tree a = Tree { runTree :: Fix (TreeF a) }
deriving instance Show (Fix (TreeF a)) => Show (Tree a)
-- TODO: Reduce boilerplate for newtypes
instance (Functor f, FAlgebra f (Fix (TreeF a))) => FAlgebra f (Tree a) where
alg = Tree . alg . fmap runTree
instance (Functor f, FCoalgebra f (Fix (TreeF a))) => FCoalgebra f (Tree a) where
coalg = fmap Tree . coalg . runTree
instance Functor Tree where
fmap f = Tree . fmapFix (over _node f) . runTree
-- |Create a tree from its node and its children
branch :: FAlgebra (TreeF a) t => a -> t -> t -> t
branch a b1 b2 = alg $ Branch a b1 b2
-- |Create a leaf (a tree with empty children)
leaf :: forall a t. FAlgebra (TreeF a) t => a -> t
leaf a = branch a e e
where
e = alg (Empty :: TreeF a t)
-- |Create an empty tree. Using this is a bit tricky due to the ambiguous type.
empty :: forall a t. FAlgebra (TreeF a) t => t
empty = alg (Empty :: TreeF a t)
-- |Get the left branch of a tree
left :: forall a t. FCoalgebra (TreeF a) t => t -> t
left t = case (coalg t :: TreeF a t) of
Empty -> t
Branch _ l _ -> l
-- |Get the right branch of a tree
right :: forall a t. FCoalgebra (TreeF a) t => t -> t
right t = case (coalg t :: TreeF a t) of
Empty -> t
Branch _ _ r -> r
instance FAlgebra (TreeF a) Size where
alg Empty = 0
alg (Branch _ b1 b2) = 1 + b1 + b2
| bhamrick/fixalgs | Data/FAlgebra/Tree.hs | bsd-3-clause | 4,058 | 0 | 10 | 917 | 1,445 | 754 | 691 | 89 | 2 |
{-# LANGUAGE FlexibleContexts #-}
module Database.PostgreSQL.Simple.Lifted.Transaction
(
-- * Transaction handling
withTransaction
, withTransactionLevel
, withTransactionMode
, withTransactionSerializable
, withTransactionModeRetry
, begin
, beginMode
, beginLevel
, commit
, rollback
-- * Defaults
, PS.defaultTransactionMode
, PS.defaultIsolationLevel
, PS.defaultReadWriteMode
-- * Savepoint
, withSavepoint
, releaseSavepoint
, rollbackToSavepoint
, rollbackToAndReleaseSavepoint
, newSavepoint
-- * Predicates
, PS.isSerializationError
, PS.isNoActiveTransactionError
, PS.isFailedTransactionError
-- * Data types
, TransactionMode(..)
, IsolationLevel(..)
, ReadWriteMode(..)
) where
import Control.Exception (SomeException, fromException)
import Control.Exception.Lifted
( mask
, onException
, catch
, throwIO
, try
)
import Database.PostgreSQL.Simple (SqlError)
import Database.PostgreSQL.Simple.Types (Savepoint)
import Database.PostgreSQL.Simple.Transaction
( TransactionMode(..)
, IsolationLevel
, ReadWriteMode
)
import qualified Database.PostgreSQL.Simple.Transaction as PS
import Database.PostgreSQL.Simple.Lifted.PostgresClient
withTransaction :: (PostgresClient m)
=> m a -> m a
withTransaction = withTransactionMode PS.defaultTransactionMode
withTransactionLevel :: (PostgresClient m)
=> IsolationLevel -> m a -> m a
withTransactionLevel lvl =
withTransactionMode PS.defaultTransactionMode { PS.isolationLevel = lvl }
begin :: (PostgresClient m) => m ()
begin = liftPSGClient $ PS.beginMode PS.defaultTransactionMode
beginLevel :: (PostgresClient m) => IsolationLevel -> m ()
beginLevel = liftPSGClient . PS.beginLevel
beginMode :: (PostgresClient m) => TransactionMode -> m ()
beginMode = liftPSGClient . PS.beginMode
commit :: (PostgresClient m) => m ()
commit = liftPSGClient PS.commit
rollback :: (PostgresClient m) => m ()
rollback = liftPSGClient PS.commit
withTransactionMode :: (PostgresClient m)
=> TransactionMode -> m a -> m a
withTransactionMode mode act =
mask $ \restore -> do
beginMode mode
r <- restore act `onException` rollback
commit
return r
withTransactionSerializable :: (PostgresClient m)
=> m a -> m a
withTransactionSerializable =
withTransactionModeRetry
TransactionMode
{ isolationLevel = PS.Serializable
, readWriteMode = PS.ReadWrite
}
PS.isSerializationError
withTransactionModeRetry :: (PostgresClient m)
=> TransactionMode
-> (SqlError -> Bool)
-> m a
-> m a
withTransactionModeRetry mode shouldRetry act =
mask $ \restore ->
retryLoop $ try $ do
a <- restore act
commit
return a
where
retryLoop :: (PostgresClient m)
=> m (Either SomeException a) -> m a
retryLoop act' = do
beginMode mode
r <- act'
case r of
Left e -> do
rollback
case fmap shouldRetry (fromException e) of
Just True -> retryLoop act'
_ -> throwIO e
Right a -> return a
withSavepoint :: (PostgresClient m)
=> m a -> m a
withSavepoint body =
mask $ \restore -> do
sp <- newSavepoint
r <- restore body `onException` rollbackToAndReleaseSavepoint sp
releaseSavepoint sp `catch` \err ->
if PS.isFailedTransactionError err
then rollbackToAndReleaseSavepoint sp
else throwIO err
return r
releaseSavepoint :: (PostgresClient m)
=> Savepoint
-> m ()
releaseSavepoint s = liftPSGClient (`PS.releaseSavepoint` s)
rollbackToSavepoint :: (PostgresClient m)
=> Savepoint
-> m ()
rollbackToSavepoint s = liftPSGClient (`PS.rollbackToSavepoint` s)
rollbackToAndReleaseSavepoint :: (PostgresClient m)
=> Savepoint
-> m ()
rollbackToAndReleaseSavepoint s =
liftPSGClient (`PS.rollbackToAndReleaseSavepoint` s)
newSavepoint :: (PostgresClient m) => m Savepoint
newSavepoint = liftPSGClient PS.newSavepoint
| onurzdg/clicklac | src/Database/PostgreSQL/Simple/Lifted/Transaction.hs | bsd-3-clause | 4,447 | 0 | 17 | 1,295 | 1,064 | 571 | 493 | 124 | 3 |
{-# LANGUAGE TypeFamilies, TypeSynonymInstances, FlexibleInstances, MultiParamTypeClasses, FlexibleContexts, RankNTypes, GADTs, OverloadedStrings #-}
module QueryArrow.Cypher.BuiltIn where
import QueryArrow.Syntax.Term
import QueryArrow.Cypher.Cypher
import Data.Map.Strict (fromList)
import QueryArrow.ListUtils (subset)
import Control.Monad.Trans.State.Strict (get, put)
import Data.Convertible (convert)
import Data.Monoid ((<>))
cypherBuiltIn :: (String -> PredName) -> CypherBuiltIn
cypherBuiltIn lookupPred =
CypherBuiltIn ( fromList [
(lookupPred "le", \ [arg1, arg2] ->
return (cwhere (CypherCompCond "<=" arg1 arg2 Pos))),
(lookupPred "lt", \ [arg1, arg2] ->
return (cwhere (CypherCompCond "<" arg1 arg2 Pos))),
(lookupPred "eq", \ [arg1, arg2] -> do
(a, b, repmap, rvars0, env0, ptm) <- get
let rvars = map convert rvars0
let env = map convert env0
let fv2 = fv arg2
if fv2 `subset` env
then
case arg1 of
CypherVarExpr v ->
if v `elem` env
then return (cwhere (CypherCompCond "=" arg1 arg2 Pos))
else if v `elem` rvars
then do
put (a, b, repmap <> cypherVarExprMap v arg2, rvars0, env0, ptm)
return mempty
else return mempty
_ -> do
let fv1 = fv arg1
if fv1 `subset` env
then return (cwhere (CypherCompCond "=" arg1 arg2 Pos))
else error "eq: first argument is not a variable and contains free variables"
else error "eq: second argument contains free variables"
),
(lookupPred "like_regex", \ [arg1, arg2] ->
return (cwhere (CypherCompCond "=~" arg1 arg2 Pos))),
(lookupPred "like", \[arg1, arg2] ->
return (cwhere (CypherCompCond "=~" arg1 (wildcardToRegex arg2) Pos))),
(lookupPred "regex_replace", \[arg1, arg2, arg3, CypherVarExpr v] -> do
(a, b, repmap, rvars, env, ptm) <- get
put (a,b, repmap <> cypherVarExprMap v (CypherAppExpr "replace" [arg1, arg2, arg3]), rvars, env, ptm) -- currently only do non regex replace
return mempty),
(lookupPred "substr", \[arg1, arg2, arg3, CypherVarExpr v] -> do
(a, b, repmap, rvars, env, ptm) <- get
put (a,b, repmap <> cypherVarExprMap v (CypherAppExpr "substr" [arg1, arg2, arg3]), rvars, env, ptm)
return mempty),
(lookupPred "replace", \[arg1, arg2, arg3, CypherVarExpr v] -> do
(a, b, repmap, rvars, env, ptm) <- get
put (a,b, repmap <> cypherVarExprMap v (CypherAppExpr "replace" [arg1, arg2, arg3]), rvars, env, ptm)
return mempty),
(lookupPred "concat", \[arg1, arg2, CypherVarExpr v] -> do
(a, b, repmap, rvars, env, ptm) <- get
put (a,b, repmap <> cypherVarExprMap v (CypherInfixExpr "+" arg1 arg2), rvars, env, ptm)
return mempty),
(lookupPred "add", \[arg1, arg2, CypherVarExpr v] -> do
(a, b, repmap, rvars, env, ptm) <- get
put (a,b, repmap <> cypherVarExprMap v (CypherInfixExpr "+" arg1 arg2), rvars, env, ptm)
return mempty),
(lookupPred "sub", \[arg1, arg2, CypherVarExpr v] -> do
(a, b, repmap, rvars, env, ptm) <- get
put (a,b, repmap <> cypherVarExprMap v (CypherInfixExpr "-" arg1 arg2), rvars, env, ptm)
return mempty),
(lookupPred "mul", \[arg1, arg2, CypherVarExpr v] -> do
(a, b, repmap, rvars, env, ptm) <- get
put (a,b, repmap <> cypherVarExprMap v (CypherInfixExpr "*" arg1 arg2), rvars, env, ptm)
return mempty),
(lookupPred "div", \[arg1, arg2, CypherVarExpr v] -> do
(a, b, repmap, rvars, env, ptm) <- get
put (a,b, repmap <> cypherVarExprMap v (CypherInfixExpr "/" arg1 arg2), rvars, env, ptm)
return mempty),
(lookupPred "strlen", \[arg1, CypherVarExpr v] -> do
(a, b, repmap, rvars, env, ptm) <- get
put (a,b, repmap <> cypherVarExprMap v (CypherAppExpr "length" [arg1]), rvars, env, ptm)
return mempty),
(lookupPred "in", \ [arg1, arg2] ->
return (cwhere (CypherCompCond "in" arg1 arg2 Pos))),
(lookupPred "ge", \ [arg1, arg2] ->
return (cwhere (CypherCompCond ">=" arg1 arg2 Pos))),
(lookupPred "gt", \ [arg1, arg2] ->
return (cwhere (CypherCompCond ">" arg1 arg2 Pos))),
(lookupPred "ne", \ [arg1, arg2] ->
return (cwhere (CypherCompCond "<>" arg1 arg2 Pos))),
(lookupPred "not_like_regex", \ [arg1, arg2] ->
return (cwhere (CypherCompCond "=~" arg1 arg2 Neg)))
])
wildcardToRegex :: CypherExpr -> CypherExpr
wildcardToRegex a = foldl (\a (x, y) -> CypherAppExpr "replace" [a, CypherStringConstExpr x, CypherStringConstExpr y]) a [("\\", "\\\\"), (".", "\\."), ("*", "\\*"), ("%", ".*"), ("_", ".")] -- incomplete
| xu-hao/QueryArrow | QueryArrow-db-cypher/src/QueryArrow/Cypher/BuiltIn.hs | bsd-3-clause | 5,614 | 0 | 24 | 2,017 | 1,847 | 1,026 | 821 | 89 | 6 |
{-# OPTIONS -fno-cse #-}
{-# LANGUAGE NamedFieldPuns #-}
-- -fno-cse is needed for GLOBAL_VAR's to behave properly
-----------------------------------------------------------------------------
--
-- GHC Driver
--
-- (c) The University of Glasgow 2005
--
-----------------------------------------------------------------------------
module DriverPipeline (
-- Run a series of compilation steps in a pipeline, for a
-- collection of source files.
oneShot, compileFile,
-- Interfaces for the batch-mode driver
linkBinary,
-- Interfaces for the compilation manager (interpreted/batch-mode)
preprocess,
compileOne, compileOne',
link,
-- Exports for hooks to override runPhase and link
PhasePlus(..), CompPipeline(..), PipeEnv(..), PipeState(..),
phaseOutputFilename, getPipeState, getPipeEnv,
hscPostBackendPhase, getLocation, setModLocation, setDynFlags,
runPhase, exeFileName,
mkExtraObjToLinkIntoBinary, mkNoteObjsToLinkIntoBinary,
maybeCreateManifest, runPhase_MoveBinary,
linkingNeeded, checkLinkInfo
) where
#include "HsVersions.h"
import PipelineMonad
import Packages
import HeaderInfo
import DriverPhases
import SysTools
import HscMain
import Finder
import HscTypes hiding ( Hsc )
import Outputable
import Module
import UniqFM ( eltsUFM )
import ErrUtils
import DynFlags
import Config
import Panic
import Util
import StringBuffer ( hGetStringBuffer )
import BasicTypes ( SuccessFlag(..) )
import Maybes ( expectJust )
import ParserCoreUtils ( getCoreModuleName )
import SrcLoc
import FastString
import LlvmCodeGen ( llvmFixupAsm )
import MonadUtils
import Platform
import TcRnTypes
import Hooks
import Exception
import Data.IORef ( readIORef )
import System.Directory
import System.FilePath
import System.IO
import Control.Monad
import Data.List ( isSuffixOf )
import Data.Maybe
import System.Environment
import Data.Char
-- ---------------------------------------------------------------------------
-- Pre-process
-- | Just preprocess a file, put the result in a temp. file (used by the
-- compilation manager during the summary phase).
--
-- We return the augmented DynFlags, because they contain the result
-- of slurping in the OPTIONS pragmas
preprocess :: HscEnv
-> (FilePath, Maybe Phase) -- ^ filename and starting phase
-> IO (DynFlags, FilePath)
preprocess hsc_env (filename, mb_phase) =
ASSERT2(isJust mb_phase || isHaskellSrcFilename filename, text filename)
runPipeline anyHsc hsc_env (filename, fmap RealPhase mb_phase)
Nothing Temporary Nothing{-no ModLocation-} Nothing{-no stub-}
-- ---------------------------------------------------------------------------
-- | Compile
--
-- Compile a single module, under the control of the compilation manager.
--
-- This is the interface between the compilation manager and the
-- compiler proper (hsc), where we deal with tedious details like
-- reading the OPTIONS pragma from the source file, converting the
-- C or assembly that GHC produces into an object file, and compiling
-- FFI stub files.
--
-- NB. No old interface can also mean that the source has changed.
compileOne :: HscEnv
-> ModSummary -- ^ summary for module being compiled
-> Int -- ^ module N ...
-> Int -- ^ ... of M
-> Maybe ModIface -- ^ old interface, if we have one
-> Maybe Linkable -- ^ old linkable, if we have one
-> SourceModified
-> IO HomeModInfo -- ^ the complete HomeModInfo, if successful
compileOne = compileOne' Nothing (Just batchMsg)
compileOne' :: Maybe TcGblEnv
-> Maybe Messager
-> HscEnv
-> ModSummary -- ^ summary for module being compiled
-> Int -- ^ module N ...
-> Int -- ^ ... of M
-> Maybe ModIface -- ^ old interface, if we have one
-> Maybe Linkable -- ^ old linkable, if we have one
-> SourceModified
-> IO HomeModInfo -- ^ the complete HomeModInfo, if successful
compileOne' m_tc_result mHscMessage
hsc_env0 summary mod_index nmods mb_old_iface maybe_old_linkable
source_modified0
= do
let dflags0 = ms_hspp_opts summary
this_mod = ms_mod summary
src_flavour = ms_hsc_src summary
location = ms_location summary
input_fn = expectJust "compile:hs" (ml_hs_file location)
input_fnpp = ms_hspp_file summary
mod_graph = hsc_mod_graph hsc_env0
needsTH = any (xopt Opt_TemplateHaskell . ms_hspp_opts) mod_graph
needsQQ = any (xopt Opt_QuasiQuotes . ms_hspp_opts) mod_graph
needsLinker = needsTH || needsQQ
isDynWay = any (== WayDyn) (ways dflags0)
isProfWay = any (== WayProf) (ways dflags0)
-- #8180 - when using TemplateHaskell, switch on -dynamic-too so
-- the linker can correctly load the object files.
let dflags1 = if needsLinker && dynamicGhc && not isDynWay && not isProfWay
then gopt_set dflags0 Opt_BuildDynamicToo
else dflags0
debugTraceMsg dflags1 2 (text "compile: input file" <+> text input_fnpp)
let basename = dropExtension input_fn
-- We add the directory in which the .hs files resides) to the import path.
-- This is needed when we try to compile the .hc file later, if it
-- imports a _stub.h file that we created here.
let current_dir = takeDirectory basename
old_paths = includePaths dflags1
dflags = dflags1 { includePaths = current_dir : old_paths }
hsc_env = hsc_env0 {hsc_dflags = dflags}
-- Figure out what lang we're generating
let hsc_lang = hscTarget dflags
-- ... and what the next phase should be
let next_phase = hscPostBackendPhase dflags src_flavour hsc_lang
-- ... and what file to generate the output into
output_fn <- getOutputFilename next_phase
Temporary basename dflags next_phase (Just location)
let extCore_filename = basename ++ ".hcr"
-- -fforce-recomp should also work with --make
let force_recomp = gopt Opt_ForceRecomp dflags
source_modified
| force_recomp || isNothing maybe_old_linkable = SourceModified
| otherwise = source_modified0
object_filename = ml_obj_file location
let always_do_basic_recompilation_check = case hsc_lang of
HscInterpreted -> True
_ -> False
e <- genericHscCompileGetFrontendResult
always_do_basic_recompilation_check
m_tc_result mHscMessage
hsc_env summary source_modified mb_old_iface (mod_index, nmods)
case e of
Left iface ->
do details <- genModDetails hsc_env iface
MASSERT(isJust maybe_old_linkable)
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = maybe_old_linkable })
Right (tc_result, mb_old_hash) ->
-- run the compiler
case hsc_lang of
HscInterpreted ->
case ms_hsc_src summary of
HsBootFile ->
do (iface, _changed, details) <- hscSimpleIface hsc_env tc_result mb_old_hash
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = maybe_old_linkable })
_ -> do guts0 <- hscDesugar hsc_env summary tc_result
guts <- hscSimplify hsc_env guts0
(iface, _changed, details, cgguts) <- hscNormalIface hsc_env extCore_filename guts mb_old_hash
(hasStub, comp_bc, modBreaks) <- hscInteractive hsc_env cgguts summary
stub_o <- case hasStub of
Nothing -> return []
Just stub_c -> do
stub_o <- compileStub hsc_env stub_c
return [DotO stub_o]
let hs_unlinked = [BCOs comp_bc modBreaks]
unlinked_time = ms_hs_date summary
-- Why do we use the timestamp of the source file here,
-- rather than the current time? This works better in
-- the case where the local clock is out of sync
-- with the filesystem's clock. It's just as accurate:
-- if the source is modified, then the linkable will
-- be out of date.
let linkable = LM unlinked_time this_mod
(hs_unlinked ++ stub_o)
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = Just linkable })
HscNothing ->
do (iface, _changed, details) <- hscSimpleIface hsc_env tc_result mb_old_hash
let linkable = if isHsBoot src_flavour
then maybe_old_linkable
else Just (LM (ms_hs_date summary) this_mod [])
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = linkable })
_ ->
case ms_hsc_src summary of
HsBootFile ->
do (iface, changed, details) <- hscSimpleIface hsc_env tc_result mb_old_hash
hscWriteIface dflags iface changed summary
touchObjectFile dflags object_filename
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = maybe_old_linkable })
_ -> do guts0 <- hscDesugar hsc_env summary tc_result
guts <- hscSimplify hsc_env guts0
(iface, changed, details, cgguts) <- hscNormalIface hsc_env extCore_filename guts mb_old_hash
hscWriteIface dflags iface changed summary
-- We're in --make mode: finish the compilation pipeline.
let mod_name = ms_mod_name summary
_ <- runPipeline StopLn hsc_env
(output_fn,
Just (HscOut src_flavour mod_name (HscRecomp cgguts summary)))
(Just basename)
Persistent
(Just location)
Nothing
-- The object filename comes from the ModLocation
o_time <- getModificationUTCTime object_filename
let linkable = LM o_time this_mod [DotO object_filename]
return (HomeModInfo{ hm_details = details,
hm_iface = iface,
hm_linkable = Just linkable })
-----------------------------------------------------------------------------
-- stub .h and .c files (for foreign export support)
-- The _stub.c file is derived from the haskell source file, possibly taking
-- into account the -stubdir option.
--
-- The object file created by compiling the _stub.c file is put into a
-- temporary file, which will be later combined with the main .o file
-- (see the MergeStubs phase).
compileStub :: HscEnv -> FilePath -> IO FilePath
compileStub hsc_env stub_c = do
(_, stub_o) <- runPipeline StopLn hsc_env (stub_c,Nothing) Nothing
Temporary Nothing{-no ModLocation-} Nothing
return stub_o
-- ---------------------------------------------------------------------------
-- Link
link :: GhcLink -- interactive or batch
-> DynFlags -- dynamic flags
-> Bool -- attempt linking in batch mode?
-> HomePackageTable -- what to link
-> IO SuccessFlag
-- For the moment, in the batch linker, we don't bother to tell doLink
-- which packages to link -- it just tries all that are available.
-- batch_attempt_linking should only be *looked at* in batch mode. It
-- should only be True if the upsweep was successful and someone
-- exports main, i.e., we have good reason to believe that linking
-- will succeed.
link ghcLink dflags
= lookupHook linkHook l dflags ghcLink dflags
where
l LinkInMemory _ _ _
= if cGhcWithInterpreter == "YES"
then -- Not Linking...(demand linker will do the job)
return Succeeded
else panicBadLink LinkInMemory
l NoLink _ _ _
= return Succeeded
l LinkBinary dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
l LinkStaticLib dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
l LinkDynLib dflags batch_attempt_linking hpt
= link' dflags batch_attempt_linking hpt
panicBadLink :: GhcLink -> a
panicBadLink other = panic ("link: GHC not built to link this way: " ++
show other)
link' :: DynFlags -- dynamic flags
-> Bool -- attempt linking in batch mode?
-> HomePackageTable -- what to link
-> IO SuccessFlag
link' dflags batch_attempt_linking hpt
| batch_attempt_linking
= do
let
staticLink = case ghcLink dflags of
LinkStaticLib -> True
_ -> platformBinariesAreStaticLibs (targetPlatform dflags)
home_mod_infos = eltsUFM hpt
-- the packages we depend on
pkg_deps = concatMap (map fst . dep_pkgs . mi_deps . hm_iface) home_mod_infos
-- the linkables to link
linkables = map (expectJust "link".hm_linkable) home_mod_infos
debugTraceMsg dflags 3 (text "link: linkables are ..." $$ vcat (map ppr linkables))
-- check for the -no-link flag
if isNoLink (ghcLink dflags)
then do debugTraceMsg dflags 3 (text "link(batch): linking omitted (-c flag given).")
return Succeeded
else do
let getOfiles (LM _ _ us) = map nameOfObject (filter isObject us)
obj_files = concatMap getOfiles linkables
exe_file = exeFileName staticLink dflags
linking_needed <- linkingNeeded dflags staticLink linkables pkg_deps
if not (gopt Opt_ForceRecomp dflags) && not linking_needed
then do debugTraceMsg dflags 2 (text exe_file <+> ptext (sLit "is up to date, linking not required."))
return Succeeded
else do
compilationProgressMsg dflags ("Linking " ++ exe_file ++ " ...")
-- Don't showPass in Batch mode; doLink will do that for us.
let link = case ghcLink dflags of
LinkBinary -> linkBinary
LinkStaticLib -> linkStaticLibCheck
LinkDynLib -> linkDynLibCheck
other -> panicBadLink other
link dflags obj_files pkg_deps
debugTraceMsg dflags 3 (text "link: done")
-- linkBinary only returns if it succeeds
return Succeeded
| otherwise
= do debugTraceMsg dflags 3 (text "link(batch): upsweep (partially) failed OR" $$
text " Main.main not exported; not linking.")
return Succeeded
linkingNeeded :: DynFlags -> Bool -> [Linkable] -> [PackageId] -> IO Bool
linkingNeeded dflags staticLink linkables pkg_deps = do
-- if the modification time on the executable is later than the
-- modification times on all of the objects and libraries, then omit
-- linking (unless the -fforce-recomp flag was given).
let exe_file = exeFileName staticLink dflags
e_exe_time <- tryIO $ getModificationUTCTime exe_file
case e_exe_time of
Left _ -> return True
Right t -> do
-- first check object files and extra_ld_inputs
let extra_ld_inputs = [ f | FileOption _ f <- ldInputs dflags ]
e_extra_times <- mapM (tryIO . getModificationUTCTime) extra_ld_inputs
let (errs,extra_times) = splitEithers e_extra_times
let obj_times = map linkableTime linkables ++ extra_times
if not (null errs) || any (t <) obj_times
then return True
else do
-- next, check libraries. XXX this only checks Haskell libraries,
-- not extra_libraries or -l things from the command line.
let pkg_map = pkgIdMap (pkgState dflags)
pkg_hslibs = [ (libraryDirs c, lib)
| Just c <- map (lookupPackage pkg_map) pkg_deps,
lib <- packageHsLibs dflags c ]
pkg_libfiles <- mapM (uncurry (findHSLib dflags)) pkg_hslibs
if any isNothing pkg_libfiles then return True else do
e_lib_times <- mapM (tryIO . getModificationUTCTime)
(catMaybes pkg_libfiles)
let (lib_errs,lib_times) = splitEithers e_lib_times
if not (null lib_errs) || any (t <) lib_times
then return True
else checkLinkInfo dflags pkg_deps exe_file
-- Returns 'False' if it was, and we can avoid linking, because the
-- previous binary was linked with "the same options".
checkLinkInfo :: DynFlags -> [PackageId] -> FilePath -> IO Bool
checkLinkInfo dflags pkg_deps exe_file
| not (platformSupportsSavingLinkOpts (platformOS (targetPlatform dflags)))
-- ToDo: Windows and OS X do not use the ELF binary format, so
-- readelf does not work there. We need to find another way to do
-- this.
= return False -- conservatively we should return True, but not
-- linking in this case was the behaviour for a long
-- time so we leave it as-is.
| otherwise
= do
link_info <- getLinkInfo dflags pkg_deps
debugTraceMsg dflags 3 $ text ("Link info: " ++ link_info)
m_exe_link_info <- readElfSection dflags ghcLinkInfoSectionName exe_file
debugTraceMsg dflags 3 $ text ("Exe link info: " ++ show m_exe_link_info)
return (Just link_info /= m_exe_link_info)
platformSupportsSavingLinkOpts :: OS -> Bool
platformSupportsSavingLinkOpts os
| os == OSSolaris2 = False -- see #5382
| otherwise = osElfTarget os
ghcLinkInfoSectionName :: String
ghcLinkInfoSectionName = ".debug-ghc-link-info"
-- if we use the ".debug" prefix, then strip will strip it by default
findHSLib :: DynFlags -> [String] -> String -> IO (Maybe FilePath)
findHSLib dflags dirs lib = do
let batch_lib_file = if gopt Opt_Static dflags
then "lib" ++ lib <.> "a"
else mkSOName (targetPlatform dflags) lib
found <- filterM doesFileExist (map (</> batch_lib_file) dirs)
case found of
[] -> return Nothing
(x:_) -> return (Just x)
-- -----------------------------------------------------------------------------
-- Compile files in one-shot mode.
oneShot :: HscEnv -> Phase -> [(String, Maybe Phase)] -> IO ()
oneShot hsc_env stop_phase srcs = do
o_files <- mapM (compileFile hsc_env stop_phase) srcs
doLink (hsc_dflags hsc_env) stop_phase o_files
compileFile :: HscEnv -> Phase -> (FilePath, Maybe Phase) -> IO FilePath
compileFile hsc_env stop_phase (src, mb_phase) = do
exists <- doesFileExist src
when (not exists) $
throwGhcExceptionIO (CmdLineError ("does not exist: " ++ src))
let
dflags = hsc_dflags hsc_env
split = gopt Opt_SplitObjs dflags
mb_o_file = outputFile dflags
ghc_link = ghcLink dflags -- Set by -c or -no-link
-- When linking, the -o argument refers to the linker's output.
-- otherwise, we use it as the name for the pipeline's output.
output
-- If we are dong -fno-code, then act as if the output is
-- 'Temporary'. This stops GHC trying to copy files to their
-- final location.
| HscNothing <- hscTarget dflags = Temporary
| StopLn <- stop_phase, not (isNoLink ghc_link) = Persistent
-- -o foo applies to linker
| isJust mb_o_file = SpecificFile
-- -o foo applies to the file we are compiling now
| otherwise = Persistent
stop_phase' = case stop_phase of
As _ | split -> SplitAs
_ -> stop_phase
( _, out_file) <- runPipeline stop_phase' hsc_env
(src, fmap RealPhase mb_phase) Nothing output
Nothing{-no ModLocation-} Nothing
return out_file
doLink :: DynFlags -> Phase -> [FilePath] -> IO ()
doLink dflags stop_phase o_files
| not (isStopLn stop_phase)
= return () -- We stopped before the linking phase
| otherwise
= case ghcLink dflags of
NoLink -> return ()
LinkBinary -> linkBinary dflags o_files []
LinkStaticLib -> linkStaticLibCheck dflags o_files []
LinkDynLib -> linkDynLibCheck dflags o_files []
other -> panicBadLink other
-- ---------------------------------------------------------------------------
-- | Run a compilation pipeline, consisting of multiple phases.
--
-- This is the interface to the compilation pipeline, which runs
-- a series of compilation steps on a single source file, specifying
-- at which stage to stop.
--
-- The DynFlags can be modified by phases in the pipeline (eg. by
-- OPTIONS_GHC pragmas), and the changes affect later phases in the
-- pipeline.
runPipeline
:: Phase -- ^ When to stop
-> HscEnv -- ^ Compilation environment
-> (FilePath,Maybe PhasePlus) -- ^ Input filename (and maybe -x suffix)
-> Maybe FilePath -- ^ original basename (if different from ^^^)
-> PipelineOutput -- ^ Output filename
-> Maybe ModLocation -- ^ A ModLocation, if this is a Haskell module
-> Maybe FilePath -- ^ stub object, if we have one
-> IO (DynFlags, FilePath) -- ^ (final flags, output filename)
runPipeline stop_phase hsc_env0 (input_fn, mb_phase)
mb_basename output maybe_loc maybe_stub_o
= do let
dflags0 = hsc_dflags hsc_env0
-- Decide where dump files should go based on the pipeline output
dflags = dflags0 { dumpPrefix = Just (basename ++ ".") }
hsc_env = hsc_env0 {hsc_dflags = dflags}
(input_basename, suffix) = splitExtension input_fn
suffix' = drop 1 suffix -- strip off the .
basename | Just b <- mb_basename = b
| otherwise = input_basename
-- If we were given a -x flag, then use that phase to start from
start_phase = fromMaybe (RealPhase (startPhase suffix')) mb_phase
isHaskell (RealPhase (Unlit _)) = True
isHaskell (RealPhase (Cpp _)) = True
isHaskell (RealPhase (HsPp _)) = True
isHaskell (RealPhase (Hsc _)) = True
isHaskell (HscOut {}) = True
isHaskell _ = False
isHaskellishFile = isHaskell start_phase
env = PipeEnv{ pe_isHaskellishFile = isHaskellishFile,
stop_phase,
src_filename = input_fn,
src_basename = basename,
src_suffix = suffix',
output_spec = output }
-- We want to catch cases of "you can't get there from here" before
-- we start the pipeline, because otherwise it will just run off the
-- end.
--
-- There is a partial ordering on phases, where A < B iff A occurs
-- before B in a normal compilation pipeline.
let happensBefore' = happensBefore dflags
case start_phase of
RealPhase start_phase' ->
when (not (start_phase' `happensBefore'` stop_phase)) $
throwGhcExceptionIO (UsageError
("cannot compile this file to desired target: "
++ input_fn))
HscOut {} -> return ()
debugTraceMsg dflags 4 (text "Running the pipeline")
r <- runPipeline' start_phase hsc_env env input_fn
maybe_loc maybe_stub_o
-- If we are compiling a Haskell module, and doing
-- -dynamic-too, but couldn't do the -dynamic-too fast
-- path, then rerun the pipeline for the dyn way
let dflags = extractDynFlags hsc_env
-- NB: Currently disabled on Windows (ref #7134, #8228, and #5987)
when (not $ platformOS (targetPlatform dflags) == OSMinGW32) $ do
when isHaskellishFile $ whenCannotGenerateDynamicToo dflags $ do
debugTraceMsg dflags 4
(text "Running the pipeline again for -dynamic-too")
let dflags' = dynamicTooMkDynamicDynFlags dflags
hsc_env' <- newHscEnv dflags'
_ <- runPipeline' start_phase hsc_env' env input_fn
maybe_loc maybe_stub_o
return ()
return r
runPipeline'
:: PhasePlus -- ^ When to start
-> HscEnv -- ^ Compilation environment
-> PipeEnv
-> FilePath -- ^ Input filename
-> Maybe ModLocation -- ^ A ModLocation, if this is a Haskell module
-> Maybe FilePath -- ^ stub object, if we have one
-> IO (DynFlags, FilePath) -- ^ (final flags, output filename)
runPipeline' start_phase hsc_env env input_fn
maybe_loc maybe_stub_o
= do
-- Execute the pipeline...
let state = PipeState{ hsc_env, maybe_loc, maybe_stub_o = maybe_stub_o }
evalP (pipeLoop start_phase input_fn) env state
-- ---------------------------------------------------------------------------
-- outer pipeline loop
-- | pipeLoop runs phases until we reach the stop phase
pipeLoop :: PhasePlus -> FilePath -> CompPipeline (DynFlags, FilePath)
pipeLoop phase input_fn = do
env <- getPipeEnv
dflags <- getDynFlags
let happensBefore' = happensBefore dflags
stopPhase = stop_phase env
case phase of
RealPhase realPhase | realPhase `eqPhase` stopPhase -- All done
-> -- Sometimes, a compilation phase doesn't actually generate any output
-- (eg. the CPP phase when -fcpp is not turned on). If we end on this
-- stage, but we wanted to keep the output, then we have to explicitly
-- copy the file, remembering to prepend a {-# LINE #-} pragma so that
-- further compilation stages can tell what the original filename was.
case output_spec env of
Temporary ->
return (dflags, input_fn)
output ->
do pst <- getPipeState
final_fn <- liftIO $ getOutputFilename
stopPhase output (src_basename env)
dflags stopPhase (maybe_loc pst)
when (final_fn /= input_fn) $ do
let msg = ("Copying `" ++ input_fn ++"' to `" ++ final_fn ++ "'")
line_prag = Just ("{-# LINE 1 \"" ++ src_filename env ++ "\" #-}\n")
liftIO $ copyWithHeader dflags msg line_prag input_fn final_fn
return (dflags, final_fn)
| not (realPhase `happensBefore'` stopPhase)
-- Something has gone wrong. We'll try to cover all the cases when
-- this could happen, so if we reach here it is a panic.
-- eg. it might happen if the -C flag is used on a source file that
-- has {-# OPTIONS -fasm #-}.
-> panic ("pipeLoop: at phase " ++ show realPhase ++
" but I wanted to stop at phase " ++ show stopPhase)
_
-> do liftIO $ debugTraceMsg dflags 4
(ptext (sLit "Running phase") <+> ppr phase)
(next_phase, output_fn) <- runHookedPhase phase input_fn dflags
r <- pipeLoop next_phase output_fn
case phase of
HscOut {} ->
whenGeneratingDynamicToo dflags $ do
setDynFlags $ dynamicTooMkDynamicDynFlags dflags
-- TODO shouldn't ignore result:
_ <- pipeLoop phase input_fn
return ()
_ ->
return ()
return r
runHookedPhase :: PhasePlus -> FilePath -> DynFlags
-> CompPipeline (PhasePlus, FilePath)
runHookedPhase pp input dflags =
lookupHook runPhaseHook runPhase dflags pp input dflags
-- -----------------------------------------------------------------------------
-- In each phase, we need to know into what filename to generate the
-- output. All the logic about which filenames we generate output
-- into is embodied in the following function.
phaseOutputFilename :: Phase{-next phase-} -> CompPipeline FilePath
phaseOutputFilename next_phase = do
PipeEnv{stop_phase, src_basename, output_spec} <- getPipeEnv
PipeState{maybe_loc, hsc_env} <- getPipeState
let dflags = hsc_dflags hsc_env
liftIO $ getOutputFilename stop_phase output_spec
src_basename dflags next_phase maybe_loc
getOutputFilename
:: Phase -> PipelineOutput -> String
-> DynFlags -> Phase{-next phase-} -> Maybe ModLocation -> IO FilePath
getOutputFilename stop_phase output basename dflags next_phase maybe_location
| is_last_phase, Persistent <- output = persistent_fn
| is_last_phase, SpecificFile <- output = case outputFile dflags of
Just f -> return f
Nothing ->
panic "SpecificFile: No filename"
| keep_this_output = persistent_fn
| otherwise = newTempName dflags suffix
where
hcsuf = hcSuf dflags
odir = objectDir dflags
osuf = objectSuf dflags
keep_hc = gopt Opt_KeepHcFiles dflags
keep_s = gopt Opt_KeepSFiles dflags
keep_bc = gopt Opt_KeepLlvmFiles dflags
myPhaseInputExt HCc = hcsuf
myPhaseInputExt MergeStub = osuf
myPhaseInputExt StopLn = osuf
myPhaseInputExt other = phaseInputExt other
is_last_phase = next_phase `eqPhase` stop_phase
-- sometimes, we keep output from intermediate stages
keep_this_output =
case next_phase of
As _ | keep_s -> True
LlvmOpt | keep_bc -> True
HCc | keep_hc -> True
_other -> False
suffix = myPhaseInputExt next_phase
-- persistent object files get put in odir
persistent_fn
| StopLn <- next_phase = return odir_persistent
| otherwise = return persistent
persistent = basename <.> suffix
odir_persistent
| Just loc <- maybe_location = ml_obj_file loc
| Just d <- odir = d </> persistent
| otherwise = persistent
-- -----------------------------------------------------------------------------
-- | Each phase in the pipeline returns the next phase to execute, and the
-- name of the file in which the output was placed.
--
-- We must do things dynamically this way, because we often don't know
-- what the rest of the phases will be until part-way through the
-- compilation: for example, an {-# OPTIONS -fasm #-} at the beginning
-- of a source file can change the latter stages of the pipeline from
-- taking the LLVM route to using the native code generator.
--
runPhase :: PhasePlus -- ^ Run this phase
-> FilePath -- ^ name of the input file
-> DynFlags -- ^ for convenience, we pass the current dflags in
-> CompPipeline (PhasePlus, -- next phase to run
FilePath) -- output filename
-- Invariant: the output filename always contains the output
-- Interesting case: Hsc when there is no recompilation to do
-- Then the output filename is still a .o file
-------------------------------------------------------------------------------
-- Unlit phase
runPhase (RealPhase (Unlit sf)) input_fn dflags
= do
output_fn <- phaseOutputFilename (Cpp sf)
let flags = [ -- The -h option passes the file name for unlit to
-- put in a #line directive
SysTools.Option "-h"
, SysTools.Option $ escape $ normalise input_fn
, SysTools.FileOption "" input_fn
, SysTools.FileOption "" output_fn
]
liftIO $ SysTools.runUnlit dflags flags
return (RealPhase (Cpp sf), output_fn)
where
-- escape the characters \, ", and ', but don't try to escape
-- Unicode or anything else (so we don't use Util.charToC
-- here). If we get this wrong, then in
-- Coverage.addTicksToBinds where we check that the filename in
-- a SrcLoc is the same as the source filenaame, the two will
-- look bogusly different. See test:
-- libraries/hpc/tests/function/subdir/tough2.lhs
escape ('\\':cs) = '\\':'\\': escape cs
escape ('\"':cs) = '\\':'\"': escape cs
escape ('\'':cs) = '\\':'\'': escape cs
escape (c:cs) = c : escape cs
escape [] = []
-------------------------------------------------------------------------------
-- Cpp phase : (a) gets OPTIONS out of file
-- (b) runs cpp if necessary
runPhase (RealPhase (Cpp sf)) input_fn dflags0
= do
src_opts <- liftIO $ getOptionsFromFile dflags0 input_fn
(dflags1, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags0 src_opts
setDynFlags dflags1
liftIO $ checkProcessArgsResult dflags1 unhandled_flags
if not (xopt Opt_Cpp dflags1) then do
-- we have to be careful to emit warnings only once.
unless (gopt Opt_Pp dflags1) $
liftIO $ handleFlagWarnings dflags1 warns
-- no need to preprocess CPP, just pass input file along
-- to the next phase of the pipeline.
return (RealPhase (HsPp sf), input_fn)
else do
output_fn <- phaseOutputFilename (HsPp sf)
liftIO $ doCpp dflags1 True{-raw-}
input_fn output_fn
-- re-read the pragmas now that we've preprocessed the file
-- See #2464,#3457
src_opts <- liftIO $ getOptionsFromFile dflags0 output_fn
(dflags2, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags0 src_opts
liftIO $ checkProcessArgsResult dflags2 unhandled_flags
unless (gopt Opt_Pp dflags2) $
liftIO $ handleFlagWarnings dflags2 warns
-- the HsPp pass below will emit warnings
setDynFlags dflags2
return (RealPhase (HsPp sf), output_fn)
-------------------------------------------------------------------------------
-- HsPp phase
runPhase (RealPhase (HsPp sf)) input_fn dflags
= do
if not (gopt Opt_Pp dflags) then
-- no need to preprocess, just pass input file along
-- to the next phase of the pipeline.
return (RealPhase (Hsc sf), input_fn)
else do
PipeEnv{src_basename, src_suffix} <- getPipeEnv
let orig_fn = src_basename <.> src_suffix
output_fn <- phaseOutputFilename (Hsc sf)
liftIO $ SysTools.runPp dflags
( [ SysTools.Option orig_fn
, SysTools.Option input_fn
, SysTools.FileOption "" output_fn
]
)
-- re-read pragmas now that we've parsed the file (see #3674)
src_opts <- liftIO $ getOptionsFromFile dflags output_fn
(dflags1, unhandled_flags, warns)
<- liftIO $ parseDynamicFilePragma dflags src_opts
setDynFlags dflags1
liftIO $ checkProcessArgsResult dflags1 unhandled_flags
liftIO $ handleFlagWarnings dflags1 warns
return (RealPhase (Hsc sf), output_fn)
-----------------------------------------------------------------------------
-- Hsc phase
-- Compilation of a single module, in "legacy" mode (_not_ under
-- the direction of the compilation manager).
runPhase (RealPhase (Hsc src_flavour)) input_fn dflags0
= do -- normal Hsc mode, not mkdependHS
PipeEnv{ stop_phase=stop,
src_basename=basename,
src_suffix=suff } <- getPipeEnv
-- we add the current directory (i.e. the directory in which
-- the .hs files resides) to the include path, since this is
-- what gcc does, and it's probably what you want.
let current_dir = takeDirectory basename
paths = includePaths dflags0
dflags = dflags0 { includePaths = current_dir : paths }
setDynFlags dflags
-- gather the imports and module name
(hspp_buf,mod_name,imps,src_imps) <- liftIO $
case src_flavour of
ExtCoreFile -> do -- no explicit imports in ExtCore input.
m <- getCoreModuleName input_fn
return (Nothing, mkModuleName m, [], [])
_ -> do
buf <- hGetStringBuffer input_fn
(src_imps,imps,L _ mod_name) <- getImports dflags buf input_fn (basename <.> suff)
return (Just buf, mod_name, imps, src_imps)
-- Take -o into account if present
-- Very like -ohi, but we must *only* do this if we aren't linking
-- (If we're linking then the -o applies to the linked thing, not to
-- the object file for one module.)
-- Note the nasty duplication with the same computation in compileFile above
location <- getLocation src_flavour mod_name
let o_file = ml_obj_file location -- The real object file
-- Figure out if the source has changed, for recompilation avoidance.
--
-- Setting source_unchanged to True means that M.o seems
-- to be up to date wrt M.hs; so no need to recompile unless imports have
-- changed (which the compiler itself figures out).
-- Setting source_unchanged to False tells the compiler that M.o is out of
-- date wrt M.hs (or M.o doesn't exist) so we must recompile regardless.
src_timestamp <- liftIO $ getModificationUTCTime (basename <.> suff)
source_unchanged <- liftIO $
if not (isStopLn stop)
-- SourceModified unconditionally if
-- (a) recompilation checker is off, or
-- (b) we aren't going all the way to .o file (e.g. ghc -S)
then return SourceModified
-- Otherwise look at file modification dates
else do o_file_exists <- doesFileExist o_file
if not o_file_exists
then return SourceModified -- Need to recompile
else do t2 <- getModificationUTCTime o_file
if t2 > src_timestamp
then return SourceUnmodified
else return SourceModified
let extCore_filename = basename ++ ".hcr"
PipeState{hsc_env=hsc_env'} <- getPipeState
-- Tell the finder cache about this module
mod <- liftIO $ addHomeModuleToFinder hsc_env' mod_name location
-- Make the ModSummary to hand to hscMain
let
mod_summary = ModSummary { ms_mod = mod,
ms_hsc_src = src_flavour,
ms_hspp_file = input_fn,
ms_hspp_opts = dflags,
ms_hspp_buf = hspp_buf,
ms_location = location,
ms_hs_date = src_timestamp,
ms_obj_date = Nothing,
ms_textual_imps = imps,
ms_srcimps = src_imps }
-- run the compiler!
result <- liftIO $ hscCompileOneShot hsc_env' extCore_filename
mod_summary source_unchanged
return (HscOut src_flavour mod_name result,
panic "HscOut doesn't have an input filename")
runPhase (HscOut src_flavour mod_name result) _ dflags = do
location <- getLocation src_flavour mod_name
setModLocation location
let o_file = ml_obj_file location -- The real object file
hsc_lang = hscTarget dflags
next_phase = hscPostBackendPhase dflags src_flavour hsc_lang
case result of
HscNotGeneratingCode ->
return (RealPhase next_phase,
panic "No output filename from Hsc when no-code")
HscUpToDate ->
do liftIO $ touchObjectFile dflags o_file
-- The .o file must have a later modification date
-- than the source file (else we wouldn't get Nothing)
-- but we touch it anyway, to keep 'make' happy (we think).
return (RealPhase StopLn, o_file)
HscUpdateBoot ->
do -- In the case of hs-boot files, generate a dummy .o-boot
-- stamp file for the benefit of Make
liftIO $ touchObjectFile dflags o_file
return (RealPhase next_phase, o_file)
HscRecomp cgguts mod_summary
-> do output_fn <- phaseOutputFilename next_phase
PipeState{hsc_env=hsc_env'} <- getPipeState
(outputFilename, mStub) <- liftIO $ hscGenHardCode hsc_env' cgguts mod_summary output_fn
case mStub of
Nothing -> return ()
Just stub_c ->
do stub_o <- liftIO $ compileStub hsc_env' stub_c
setStubO stub_o
return (RealPhase next_phase, outputFilename)
-----------------------------------------------------------------------------
-- Cmm phase
runPhase (RealPhase CmmCpp) input_fn dflags
= do
output_fn <- phaseOutputFilename Cmm
liftIO $ doCpp dflags False{-not raw-}
input_fn output_fn
return (RealPhase Cmm, output_fn)
runPhase (RealPhase Cmm) input_fn dflags
= do
let hsc_lang = hscTarget dflags
let next_phase = hscPostBackendPhase dflags HsSrcFile hsc_lang
output_fn <- phaseOutputFilename next_phase
PipeState{hsc_env} <- getPipeState
liftIO $ hscCompileCmmFile hsc_env input_fn output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- Cc phase
-- we don't support preprocessing .c files (with -E) now. Doing so introduces
-- way too many hacks, and I can't say I've ever used it anyway.
runPhase (RealPhase cc_phase) input_fn dflags
| any (cc_phase `eqPhase`) [Cc, Ccpp, HCc, Cobjc, Cobjcpp]
= do
let platform = targetPlatform dflags
hcc = cc_phase `eqPhase` HCc
let cmdline_include_paths = includePaths dflags
-- HC files have the dependent packages stamped into them
pkgs <- if hcc then liftIO $ getHCFilePackages input_fn else return []
-- add package include paths even if we're just compiling .c
-- files; this is the Value Add(TM) that using ghc instead of
-- gcc gives you :)
pkg_include_dirs <- liftIO $ getPackageIncludePath dflags pkgs
let include_paths = foldr (\ x xs -> ("-I" ++ x) : xs) []
(cmdline_include_paths ++ pkg_include_dirs)
let gcc_extra_viac_flags = extraGccViaCFlags dflags
let pic_c_flags = picCCOpts dflags
let verbFlags = getVerbFlags dflags
-- cc-options are not passed when compiling .hc files. Our
-- hc code doesn't not #include any header files anyway, so these
-- options aren't necessary.
pkg_extra_cc_opts <- liftIO $
if cc_phase `eqPhase` HCc
then return []
else getPackageExtraCcOpts dflags pkgs
framework_paths <-
if platformUsesFrameworks platform
then do pkgFrameworkPaths <- liftIO $ getPackageFrameworkPath dflags pkgs
let cmdlineFrameworkPaths = frameworkPaths dflags
return $ map ("-F"++)
(cmdlineFrameworkPaths ++ pkgFrameworkPaths)
else return []
let split_objs = gopt Opt_SplitObjs dflags
split_opt | hcc && split_objs = [ "-DUSE_SPLIT_MARKERS" ]
| otherwise = [ ]
let cc_opt | optLevel dflags >= 2 = [ "-O2" ]
| optLevel dflags >= 1 = [ "-O" ]
| otherwise = []
-- Decide next phase
let next_phase = As False
output_fn <- phaseOutputFilename next_phase
let
more_hcc_opts =
-- on x86 the floating point regs have greater precision
-- than a double, which leads to unpredictable results.
-- By default, we turn this off with -ffloat-store unless
-- the user specified -fexcess-precision.
(if platformArch platform == ArchX86 &&
not (gopt Opt_ExcessPrecision dflags)
then [ "-ffloat-store" ]
else []) ++
-- gcc's -fstrict-aliasing allows two accesses to memory
-- to be considered non-aliasing if they have different types.
-- This interacts badly with the C code we generate, which is
-- very weakly typed, being derived from C--.
["-fno-strict-aliasing"]
let gcc_lang_opt | cc_phase `eqPhase` Ccpp = "c++"
| cc_phase `eqPhase` Cobjc = "objective-c"
| cc_phase `eqPhase` Cobjcpp = "objective-c++"
| otherwise = "c"
liftIO $ SysTools.runCc dflags (
-- force the C compiler to interpret this file as C when
-- compiling .hc files, by adding the -x c option.
-- Also useful for plain .c files, just in case GHC saw a
-- -x c option.
[ SysTools.Option "-x", SysTools.Option gcc_lang_opt
, SysTools.FileOption "" input_fn
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
]
++ map SysTools.Option (
pic_c_flags
-- Stub files generated for foreign exports references the runIO_closure
-- and runNonIO_closure symbols, which are defined in the base package.
-- These symbols are imported into the stub.c file via RtsAPI.h, and the
-- way we do the import depends on whether we're currently compiling
-- the base package or not.
++ (if platformOS platform == OSMinGW32 &&
thisPackage dflags == basePackageId
then [ "-DCOMPILING_BASE_PACKAGE" ]
else [])
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc) as GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack. See #2872, commit
-- 5bd3072ac30216a505151601884ac88bf404c9f2
++ (if platformArch platform == ArchSPARC
then ["-mcpu=v9"]
else [])
-- GCC 4.6+ doesn't like -Wimplicit when compiling C++.
++ (if (cc_phase /= Ccpp && cc_phase /= Cobjcpp)
then ["-Wimplicit"]
else [])
++ (if hcc
then gcc_extra_viac_flags ++ more_hcc_opts
else [])
++ verbFlags
++ [ "-S" ]
++ cc_opt
++ [ "-D__GLASGOW_HASKELL__="++cProjectVersionInt ]
++ framework_paths
++ split_opt
++ include_paths
++ pkg_extra_cc_opts
))
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- Splitting phase
runPhase (RealPhase Splitter) input_fn dflags
= do -- tmp_pfx is the prefix used for the split .s files
split_s_prefix <- liftIO $ SysTools.newTempName dflags "split"
let n_files_fn = split_s_prefix
liftIO $ SysTools.runSplit dflags
[ SysTools.FileOption "" input_fn
, SysTools.FileOption "" split_s_prefix
, SysTools.FileOption "" n_files_fn
]
-- Save the number of split files for future references
s <- liftIO $ readFile n_files_fn
let n_files = read s :: Int
dflags' = dflags { splitInfo = Just (split_s_prefix, n_files) }
setDynFlags dflags'
-- Remember to delete all these files
liftIO $ addFilesToClean dflags'
[ split_s_prefix ++ "__" ++ show n ++ ".s"
| n <- [1..n_files]]
return (RealPhase SplitAs,
"**splitter**") -- we don't use the filename in SplitAs
-----------------------------------------------------------------------------
-- As, SpitAs phase : Assembler
-- This is for calling the assembler on a regular assembly file (not split).
runPhase (RealPhase (As with_cpp)) input_fn dflags
= do
-- LLVM from version 3.0 onwards doesn't support the OS X system
-- assembler, so we use clang as the assembler instead. (#5636)
let whichAsProg | hscTarget dflags == HscLlvm &&
platformOS (targetPlatform dflags) == OSDarwin
= do
-- be careful what options we call clang with
-- see #5903 and #7617 for bugs caused by this.
llvmVer <- liftIO $ figureLlvmVersion dflags
return $ case llvmVer of
Just n | n >= 30 -> SysTools.runClang
_ -> SysTools.runAs
| otherwise = return SysTools.runAs
as_prog <- whichAsProg
let cmdline_include_paths = includePaths dflags
let pic_c_flags = picCCOpts dflags
next_phase <- maybeMergeStub
output_fn <- phaseOutputFilename next_phase
-- we create directories for the object file, because it
-- might be a hierarchical module.
liftIO $ createDirectoryIfMissing True (takeDirectory output_fn)
ccInfo <- liftIO $ getCompilerInfo dflags
let runAssembler inputFilename outputFilename
= liftIO $ as_prog dflags
([ SysTools.Option ("-I" ++ p) | p <- cmdline_include_paths ]
-- See Note [-fPIC for assembler]
++ map SysTools.Option pic_c_flags
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction so we have to make sure that the assembler accepts the
-- instruction set. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc). GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack.
++ (if platformArch (targetPlatform dflags) == ArchSPARC
then [SysTools.Option "-mcpu=v9"]
else [])
++ (if any (ccInfo ==) [Clang, AppleClang, AppleClang51]
then [SysTools.Option "-Qunused-arguments"]
else [])
++ [ SysTools.Option "-x"
, if with_cpp
then SysTools.Option "assembler-with-cpp"
else SysTools.Option "assembler"
, SysTools.Option "-c"
, SysTools.FileOption "" inputFilename
, SysTools.Option "-o"
, SysTools.FileOption "" outputFilename
])
liftIO $ debugTraceMsg dflags 4 (text "Running the assembler")
runAssembler input_fn output_fn
return (RealPhase next_phase, output_fn)
-- This is for calling the assembler on a split assembly file (so a collection
-- of assembly files)
runPhase (RealPhase SplitAs) _input_fn dflags
= do
-- we'll handle the stub_o file in this phase, so don't MergeStub,
-- just jump straight to StopLn afterwards.
let next_phase = StopLn
output_fn <- phaseOutputFilename next_phase
let base_o = dropExtension output_fn
osuf = objectSuf dflags
split_odir = base_o ++ "_" ++ osuf ++ "_split"
let pic_c_flags = picCCOpts dflags
-- this also creates the hierarchy
liftIO $ createDirectoryIfMissing True split_odir
-- remove M_split/ *.o, because we're going to archive M_split/ *.o
-- later and we don't want to pick up any old objects.
fs <- liftIO $ getDirectoryContents split_odir
liftIO $ mapM_ removeFile $
map (split_odir </>) $ filter (osuf `isSuffixOf`) fs
let (split_s_prefix, n) = case splitInfo dflags of
Nothing -> panic "No split info"
Just x -> x
let split_s n = split_s_prefix ++ "__" ++ show n <.> "s"
split_obj :: Int -> FilePath
split_obj n = split_odir </>
takeFileName base_o ++ "__" ++ show n <.> osuf
let assemble_file n
= SysTools.runAs dflags (
-- We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction so we have to make sure that the assembler accepts the
-- instruction set. Note that the user can still override this
-- (e.g., -mcpu=ultrasparc). GCC picks the "best" -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack.
(if platformArch (targetPlatform dflags) == ArchSPARC
then [SysTools.Option "-mcpu=v9"]
else []) ++
-- See Note [-fPIC for assembler]
map SysTools.Option pic_c_flags ++
[ SysTools.Option "-c"
, SysTools.Option "-o"
, SysTools.FileOption "" (split_obj n)
, SysTools.FileOption "" (split_s n)
])
liftIO $ mapM_ assemble_file [1..n]
-- Note [pipeline-split-init]
-- If we have a stub file, it may contain constructor
-- functions for initialisation of this module. We can't
-- simply leave the stub as a separate object file, because it
-- will never be linked in: nothing refers to it. We need to
-- ensure that if we ever refer to the data in this module
-- that needs initialisation, then we also pull in the
-- initialisation routine.
--
-- To that end, we make a DANGEROUS ASSUMPTION here: the data
-- that needs to be initialised is all in the FIRST split
-- object. See Note [codegen-split-init].
PipeState{maybe_stub_o} <- getPipeState
case maybe_stub_o of
Nothing -> return ()
Just stub_o -> liftIO $ do
tmp_split_1 <- newTempName dflags osuf
let split_1 = split_obj 1
copyFile split_1 tmp_split_1
removeFile split_1
joinObjectFiles dflags [tmp_split_1, stub_o] split_1
-- join them into a single .o file
liftIO $ joinObjectFiles dflags (map split_obj [1..n]) output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- LlvmOpt phase
runPhase (RealPhase LlvmOpt) input_fn dflags
= do
ver <- liftIO $ readIORef (llvmVersion dflags)
let opt_lvl = max 0 (min 2 $ optLevel dflags)
-- don't specify anything if user has specified commands. We do this
-- for opt but not llc since opt is very specifically for optimisation
-- passes only, so if the user is passing us extra options we assume
-- they know what they are doing and don't get in the way.
optFlag = if null (getOpts dflags opt_lo)
then map SysTools.Option $ words (llvmOpts ver !! opt_lvl)
else []
tbaa | ver < 29 = "" -- no tbaa in 2.8 and earlier
| gopt Opt_LlvmTBAA dflags = "--enable-tbaa=true"
| otherwise = "--enable-tbaa=false"
output_fn <- phaseOutputFilename LlvmLlc
liftIO $ SysTools.runLlvmOpt dflags
([ SysTools.FileOption "" input_fn,
SysTools.Option "-o",
SysTools.FileOption "" output_fn]
++ optFlag
++ [SysTools.Option tbaa])
return (RealPhase LlvmLlc, output_fn)
where
-- we always (unless -optlo specified) run Opt since we rely on it to
-- fix up some pretty big deficiencies in the code we generate
llvmOpts ver = [ "-mem2reg -globalopt"
, if ver >= 34 then "-O1 -globalopt" else "-O1"
-- LLVM 3.4 -O1 doesn't eliminate aliases reliably (bug #8855)
, "-O2"
]
-----------------------------------------------------------------------------
-- LlvmLlc phase
runPhase (RealPhase LlvmLlc) input_fn dflags
= do
ver <- liftIO $ readIORef (llvmVersion dflags)
let opt_lvl = max 0 (min 2 $ optLevel dflags)
-- iOS requires external references to be loaded indirectly from the
-- DATA segment or dyld traps at runtime writing into TEXT: see #7722
rmodel | platformOS (targetPlatform dflags) == OSiOS = "dynamic-no-pic"
| gopt Opt_PIC dflags = "pic"
| not (gopt Opt_Static dflags) = "dynamic-no-pic"
| otherwise = "static"
tbaa | ver < 29 = "" -- no tbaa in 2.8 and earlier
| gopt Opt_LlvmTBAA dflags = "--enable-tbaa=true"
| otherwise = "--enable-tbaa=false"
-- hidden debugging flag '-dno-llvm-mangler' to skip mangling
let next_phase = case gopt Opt_NoLlvmMangler dflags of
False -> LlvmMangle
True | gopt Opt_SplitObjs dflags -> Splitter
True -> As False
output_fn <- phaseOutputFilename next_phase
liftIO $ SysTools.runLlvmLlc dflags
([ SysTools.Option (llvmOpts !! opt_lvl),
SysTools.Option $ "-relocation-model=" ++ rmodel,
SysTools.FileOption "" input_fn,
SysTools.Option "-o", SysTools.FileOption "" output_fn]
++ [SysTools.Option tbaa]
++ map SysTools.Option fpOpts
++ map SysTools.Option abiOpts
++ map SysTools.Option sseOpts
++ map SysTools.Option avxOpts
++ map SysTools.Option avx512Opts
++ map SysTools.Option stackAlignOpts)
return (RealPhase next_phase, output_fn)
where
-- Bug in LLVM at O3 on OSX.
llvmOpts = if platformOS (targetPlatform dflags) == OSDarwin
then ["-O1", "-O2", "-O2"]
else ["-O1", "-O2", "-O3"]
-- On ARMv7 using LLVM, LLVM fails to allocate floating point registers
-- while compiling GHC source code. It's probably due to fact that it
-- does not enable VFP by default. Let's do this manually here
fpOpts = case platformArch (targetPlatform dflags) of
ArchARM ARMv7 ext _ -> if (elem VFPv3 ext)
then ["-mattr=+v7,+vfp3"]
else if (elem VFPv3D16 ext)
then ["-mattr=+v7,+vfp3,+d16"]
else []
ArchARM ARMv6 ext _ -> if (elem VFPv2 ext)
then ["-mattr=+v6,+vfp2"]
else ["-mattr=+v6"]
_ -> []
-- On Ubuntu/Debian with ARM hard float ABI, LLVM's llc still
-- compiles into soft-float ABI. We need to explicitly set abi
-- to hard
abiOpts = case platformArch (targetPlatform dflags) of
ArchARM _ _ HARD -> ["-float-abi=hard"]
ArchARM _ _ _ -> []
_ -> []
sseOpts | isSse4_2Enabled dflags = ["-mattr=+sse42"]
| isSse2Enabled dflags = ["-mattr=+sse2"]
| isSseEnabled dflags = ["-mattr=+sse"]
| otherwise = []
avxOpts | isAvx512fEnabled dflags = ["-mattr=+avx512f"]
| isAvx2Enabled dflags = ["-mattr=+avx2"]
| isAvxEnabled dflags = ["-mattr=+avx"]
| otherwise = []
avx512Opts =
[ "-mattr=+avx512cd" | isAvx512cdEnabled dflags ] ++
[ "-mattr=+avx512er" | isAvx512erEnabled dflags ] ++
[ "-mattr=+avx512pf" | isAvx512pfEnabled dflags ]
stackAlignOpts =
case platformArch (targetPlatform dflags) of
ArchX86_64 | isAvxEnabled dflags -> ["-stack-alignment=32"]
_ -> []
-----------------------------------------------------------------------------
-- LlvmMangle phase
runPhase (RealPhase LlvmMangle) input_fn dflags
= do
let next_phase = if gopt Opt_SplitObjs dflags then Splitter else As False
output_fn <- phaseOutputFilename next_phase
liftIO $ llvmFixupAsm dflags input_fn output_fn
return (RealPhase next_phase, output_fn)
-----------------------------------------------------------------------------
-- merge in stub objects
runPhase (RealPhase MergeStub) input_fn dflags
= do
PipeState{maybe_stub_o} <- getPipeState
output_fn <- phaseOutputFilename StopLn
liftIO $ createDirectoryIfMissing True (takeDirectory output_fn)
case maybe_stub_o of
Nothing ->
panic "runPhase(MergeStub): no stub"
Just stub_o -> do
liftIO $ joinObjectFiles dflags [input_fn, stub_o] output_fn
return (RealPhase StopLn, output_fn)
-- warning suppression
runPhase (RealPhase other) _input_fn _dflags =
panic ("runPhase: don't know how to run phase " ++ show other)
maybeMergeStub :: CompPipeline Phase
maybeMergeStub
= do
PipeState{maybe_stub_o} <- getPipeState
if isJust maybe_stub_o then return MergeStub else return StopLn
getLocation :: HscSource -> ModuleName -> CompPipeline ModLocation
getLocation src_flavour mod_name = do
dflags <- getDynFlags
PipeEnv{ src_basename=basename,
src_suffix=suff } <- getPipeEnv
-- Build a ModLocation to pass to hscMain.
-- The source filename is rather irrelevant by now, but it's used
-- by hscMain for messages. hscMain also needs
-- the .hi and .o filenames, and this is as good a way
-- as any to generate them, and better than most. (e.g. takes
-- into account the -osuf flags)
location1 <- liftIO $ mkHomeModLocation2 dflags mod_name basename suff
-- Boot-ify it if necessary
let location2 | isHsBoot src_flavour = addBootSuffixLocn location1
| otherwise = location1
-- Take -ohi into account if present
-- This can't be done in mkHomeModuleLocation because
-- it only applies to the module being compiles
let ohi = outputHi dflags
location3 | Just fn <- ohi = location2{ ml_hi_file = fn }
| otherwise = location2
-- Take -o into account if present
-- Very like -ohi, but we must *only* do this if we aren't linking
-- (If we're linking then the -o applies to the linked thing, not to
-- the object file for one module.)
-- Note the nasty duplication with the same computation in compileFile above
let expl_o_file = outputFile dflags
location4 | Just ofile <- expl_o_file
, isNoLink (ghcLink dflags)
= location3 { ml_obj_file = ofile }
| otherwise = location3
return location4
-----------------------------------------------------------------------------
-- MoveBinary sort-of-phase
-- After having produced a binary, move it somewhere else and generate a
-- wrapper script calling the binary. Currently, we need this only in
-- a parallel way (i.e. in GUM), because PVM expects the binary in a
-- central directory.
-- This is called from linkBinary below, after linking. I haven't made it
-- a separate phase to minimise interfering with other modules, and
-- we don't need the generality of a phase (MoveBinary is always
-- done after linking and makes only sense in a parallel setup) -- HWL
runPhase_MoveBinary :: DynFlags -> FilePath -> IO Bool
runPhase_MoveBinary dflags input_fn
| WayPar `elem` ways dflags && not (gopt Opt_Static dflags) =
panic ("Don't know how to combine PVM wrapper and dynamic wrapper")
| WayPar `elem` ways dflags = do
let sysMan = pgm_sysman dflags
pvm_root <- getEnv "PVM_ROOT"
pvm_arch <- getEnv "PVM_ARCH"
let
pvm_executable_base = "=" ++ input_fn
pvm_executable = pvm_root ++ "/bin/" ++ pvm_arch ++ "/" ++ pvm_executable_base
-- nuke old binary; maybe use configur'ed names for cp and rm?
_ <- tryIO (removeFile pvm_executable)
-- move the newly created binary into PVM land
copy dflags "copying PVM executable" input_fn pvm_executable
-- generate a wrapper script for running a parallel prg under PVM
writeFile input_fn (mk_pvm_wrapper_script pvm_executable pvm_executable_base sysMan)
return True
| otherwise = return True
mkExtraObj :: DynFlags -> Suffix -> String -> IO FilePath
mkExtraObj dflags extn xs
= do cFile <- newTempName dflags extn
oFile <- newTempName dflags "o"
writeFile cFile xs
let rtsDetails = getPackageDetails (pkgState dflags) rtsPackageId
SysTools.runCc dflags
([Option "-c",
FileOption "" cFile,
Option "-o",
FileOption "" oFile]
++ map (FileOption "-I") (includeDirs rtsDetails))
return oFile
-- When linking a binary, we need to create a C main() function that
-- starts everything off. This used to be compiled statically as part
-- of the RTS, but that made it hard to change the -rtsopts setting,
-- so now we generate and compile a main() stub as part of every
-- binary and pass the -rtsopts setting directly to the RTS (#5373)
--
mkExtraObjToLinkIntoBinary :: DynFlags -> IO FilePath
mkExtraObjToLinkIntoBinary dflags = do
when (gopt Opt_NoHsMain dflags && haveRtsOptsFlags dflags) $ do
log_action dflags dflags SevInfo noSrcSpan defaultUserStyle
(text "Warning: -rtsopts and -with-rtsopts have no effect with -no-hs-main." $$
text " Call hs_init_ghc() from your main() function to set these options.")
mkExtraObj dflags "c" (showSDoc dflags main)
where
main
| gopt Opt_NoHsMain dflags = empty
| otherwise = vcat [
ptext (sLit "#include \"Rts.h\""),
ptext (sLit "extern StgClosure ZCMain_main_closure;"),
ptext (sLit "int main(int argc, char *argv[])"),
char '{',
ptext (sLit " RtsConfig __conf = defaultRtsConfig;"),
ptext (sLit " __conf.rts_opts_enabled = ")
<> text (show (rtsOptsEnabled dflags)) <> semi,
case rtsOpts dflags of
Nothing -> empty
Just opts -> ptext (sLit " __conf.rts_opts= ") <>
text (show opts) <> semi,
ptext (sLit " __conf.rts_hs_main = rtsTrue;"),
ptext (sLit " return hs_main(argc, argv, &ZCMain_main_closure,__conf);"),
char '}',
char '\n' -- final newline, to keep gcc happy
]
-- Write out the link info section into a new assembly file. Previously
-- this was included as inline assembly in the main.c file but this
-- is pretty fragile. gas gets upset trying to calculate relative offsets
-- that span the .note section (notably .text) when debug info is present
mkNoteObjsToLinkIntoBinary :: DynFlags -> [PackageId] -> IO [FilePath]
mkNoteObjsToLinkIntoBinary dflags dep_packages = do
link_info <- getLinkInfo dflags dep_packages
if (platformSupportsSavingLinkOpts (platformOS (targetPlatform dflags)))
then fmap (:[]) $ mkExtraObj dflags "s" (showSDoc dflags (link_opts link_info))
else return []
where
link_opts info = hcat [
text "\t.section ", text ghcLinkInfoSectionName,
text ",\"\",",
text elfSectionNote,
text "\n",
text "\t.ascii \"", info', text "\"\n",
-- ALL generated assembly must have this section to disable
-- executable stacks. See also
-- compiler/nativeGen/AsmCodeGen.lhs for another instance
-- where we need to do this.
(if platformHasGnuNonexecStack (targetPlatform dflags)
then text ".section .note.GNU-stack,\"\",@progbits\n"
else empty)
]
where
info' = text $ escape info
escape :: String -> String
escape = concatMap (charToC.fromIntegral.ord)
elfSectionNote :: String
elfSectionNote = case platformArch (targetPlatform dflags) of
ArchARM _ _ _ -> "%note"
_ -> "@note"
-- The "link info" is a string representing the parameters of the
-- link. We save this information in the binary, and the next time we
-- link, if nothing else has changed, we use the link info stored in
-- the existing binary to decide whether to re-link or not.
getLinkInfo :: DynFlags -> [PackageId] -> IO String
getLinkInfo dflags dep_packages = do
package_link_opts <- getPackageLinkOpts dflags dep_packages
pkg_frameworks <- if platformUsesFrameworks (targetPlatform dflags)
then getPackageFrameworks dflags dep_packages
else return []
let extra_ld_inputs = ldInputs dflags
let
link_info = (package_link_opts,
pkg_frameworks,
rtsOpts dflags,
rtsOptsEnabled dflags,
gopt Opt_NoHsMain dflags,
map showOpt extra_ld_inputs,
getOpts dflags opt_l)
--
return (show link_info)
-- generates a Perl skript starting a parallel prg under PVM
mk_pvm_wrapper_script :: String -> String -> String -> String
mk_pvm_wrapper_script pvm_executable pvm_executable_base sysMan = unlines $
[
"eval 'exec perl -S $0 ${1+\"$@\"}'",
" if $running_under_some_shell;",
"# =!=!=!=!=!=!=!=!=!=!=!",
"# This script is automatically generated: DO NOT EDIT!!!",
"# Generated by Glasgow Haskell Compiler",
"# ngoqvam choHbogh vaj' vIHoHnISbej !!!!",
"#",
"$pvm_executable = '" ++ pvm_executable ++ "';",
"$pvm_executable_base = '" ++ pvm_executable_base ++ "';",
"$SysMan = '" ++ sysMan ++ "';",
"",
{- ToDo: add the magical shortcuts again iff we actually use them -- HWL
"# first, some magical shortcuts to run "commands" on the binary",
"# (which is hidden)",
"if ($#ARGV == 1 && $ARGV[0] eq '+RTS' && $ARGV[1] =~ /^--((size|file|strip|rm|nm).*)/ ) {",
" local($cmd) = $1;",
" system("$cmd $pvm_executable");",
" exit(0); # all done",
"}", -}
"",
"# Now, run the real binary; process the args first",
"$ENV{'PE'} = $pvm_executable_base;", -- ++ pvm_executable_base,
"$debug = '';",
"$nprocessors = 0; # the default: as many PEs as machines in PVM config",
"@nonPVM_args = ();",
"$in_RTS_args = 0;",
"",
"args: while ($a = shift(@ARGV)) {",
" if ( $a eq '+RTS' ) {",
" $in_RTS_args = 1;",
" } elsif ( $a eq '-RTS' ) {",
" $in_RTS_args = 0;",
" }",
" if ( $a eq '-d' && $in_RTS_args ) {",
" $debug = '-';",
" } elsif ( $a =~ /^-qN(\\d+)/ && $in_RTS_args ) {",
" $nprocessors = $1;",
" } elsif ( $a =~ /^-qp(\\d+)/ && $in_RTS_args ) {",
" $nprocessors = $1;",
" } else {",
" push(@nonPVM_args, $a);",
" }",
"}",
"",
"local($return_val) = 0;",
"# Start the parallel execution by calling SysMan",
"system(\"$SysMan $debug $pvm_executable $nprocessors @nonPVM_args\");",
"$return_val = $?;",
"# ToDo: fix race condition moving files and flushing them!!",
"system(\"cp $ENV{'HOME'}/$pvm_executable_base.???.gr .\") if -f \"$ENV{'HOME'}/$pvm_executable_base.002.gr\";",
"exit($return_val);"
]
-----------------------------------------------------------------------------
-- Look for the /* GHC_PACKAGES ... */ comment at the top of a .hc file
getHCFilePackages :: FilePath -> IO [PackageId]
getHCFilePackages filename =
Exception.bracket (openFile filename ReadMode) hClose $ \h -> do
l <- hGetLine h
case l of
'/':'*':' ':'G':'H':'C':'_':'P':'A':'C':'K':'A':'G':'E':'S':rest ->
return (map stringToPackageId (words rest))
_other ->
return []
-----------------------------------------------------------------------------
-- Static linking, of .o files
-- The list of packages passed to link is the list of packages on
-- which this program depends, as discovered by the compilation
-- manager. It is combined with the list of packages that the user
-- specifies on the command line with -package flags.
--
-- In one-shot linking mode, we can't discover the package
-- dependencies (because we haven't actually done any compilation or
-- read any interface files), so the user must explicitly specify all
-- the packages.
linkBinary :: DynFlags -> [FilePath] -> [PackageId] -> IO ()
linkBinary = linkBinary' False
linkBinary' :: Bool -> DynFlags -> [FilePath] -> [PackageId] -> IO ()
linkBinary' staticLink dflags o_files dep_packages = do
let platform = targetPlatform dflags
mySettings = settings dflags
verbFlags = getVerbFlags dflags
output_fn = exeFileName staticLink dflags
-- get the full list of packages to link with, by combining the
-- explicit packages with the auto packages and all of their
-- dependencies, and eliminating duplicates.
full_output_fn <- if isAbsolute output_fn
then return output_fn
else do d <- getCurrentDirectory
return $ normalise (d </> output_fn)
pkg_lib_paths <- getPackageLibraryPath dflags dep_packages
let pkg_lib_path_opts = concatMap get_pkg_lib_path_opts pkg_lib_paths
get_pkg_lib_path_opts l
| osElfTarget (platformOS platform) &&
dynLibLoader dflags == SystemDependent &&
not (gopt Opt_Static dflags)
= let libpath = if gopt Opt_RelativeDynlibPaths dflags
then "$ORIGIN" </>
(l `makeRelativeTo` full_output_fn)
else l
rpath = if gopt Opt_RPath dflags
then ["-Wl,-rpath", "-Wl," ++ libpath]
else []
-- Solaris 11's linker does not support -rpath-link option. It silently
-- ignores it and then complains about next option which is -l<some
-- dir> as being a directory and not expected object file, E.g
-- ld: elf error: file
-- /tmp/ghc-src/libraries/base/dist-install/build:
-- elf_begin: I/O error: region read: Is a directory
rpathlink = if (platformOS platform) == OSSolaris2
then []
else ["-Wl,-rpath-link", "-Wl," ++ l]
in ["-L" ++ l] ++ rpathlink ++ rpath
| osMachOTarget (platformOS platform) &&
dynLibLoader dflags == SystemDependent &&
not (gopt Opt_Static dflags) &&
gopt Opt_RPath dflags
= let libpath = if gopt Opt_RelativeDynlibPaths dflags
then "@loader_path" </>
(l `makeRelativeTo` full_output_fn)
else l
in ["-L" ++ l] ++ ["-Wl,-rpath", "-Wl," ++ libpath]
| otherwise = ["-L" ++ l]
let lib_paths = libraryPaths dflags
let lib_path_opts = map ("-L"++) lib_paths
extraLinkObj <- mkExtraObjToLinkIntoBinary dflags
noteLinkObjs <- mkNoteObjsToLinkIntoBinary dflags dep_packages
pkg_link_opts <- do
(package_hs_libs, extra_libs, other_flags) <- getPackageLinkOpts dflags dep_packages
return $ if staticLink
then package_hs_libs -- If building an executable really means making a static
-- library (e.g. iOS), then we only keep the -l options for
-- HS packages, because libtool doesn't accept other options.
-- In the case of iOS these need to be added by hand to the
-- final link in Xcode.
else other_flags ++ package_hs_libs ++ extra_libs -- -Wl,-u,<sym> contained in other_flags
-- needs to be put before -l<package>,
-- otherwise Solaris linker fails linking
-- a binary with unresolved symbols in RTS
-- which are defined in base package
-- the reason for this is a note in ld(1) about
-- '-u' option: "The placement of this option
-- on the command line is significant.
-- This option must be placed before the library
-- that defines the symbol."
pkg_framework_path_opts <-
if platformUsesFrameworks platform
then do pkg_framework_paths <- getPackageFrameworkPath dflags dep_packages
return $ map ("-F" ++) pkg_framework_paths
else return []
framework_path_opts <-
if platformUsesFrameworks platform
then do let framework_paths = frameworkPaths dflags
return $ map ("-F" ++) framework_paths
else return []
pkg_framework_opts <-
if platformUsesFrameworks platform
then do pkg_frameworks <- getPackageFrameworks dflags dep_packages
return $ concat [ ["-framework", fw] | fw <- pkg_frameworks ]
else return []
framework_opts <-
if platformUsesFrameworks platform
then do let frameworks = cmdlineFrameworks dflags
-- reverse because they're added in reverse order from
-- the cmd line:
return $ concat [ ["-framework", fw]
| fw <- reverse frameworks ]
else return []
-- probably _stub.o files
let extra_ld_inputs = ldInputs dflags
-- Here are some libs that need to be linked at the *end* of
-- the command line, because they contain symbols that are referred to
-- by the RTS. We can't therefore use the ordinary way opts for these.
let
debug_opts | WayDebug `elem` ways dflags = [
#if defined(HAVE_LIBBFD)
"-lbfd", "-liberty"
#endif
]
| otherwise = []
let thread_opts
| WayThreaded `elem` ways dflags =
let os = platformOS (targetPlatform dflags)
in if os == OSOsf3 then ["-lpthread", "-lexc"]
else if os `elem` [OSMinGW32, OSFreeBSD, OSOpenBSD,
OSNetBSD, OSHaiku, OSQNXNTO, OSiOS]
then []
else ["-lpthread"]
| otherwise = []
rc_objs <- maybeCreateManifest dflags output_fn
let link = if staticLink
then SysTools.runLibtool
else SysTools.runLink
link dflags (
map SysTools.Option verbFlags
++ [ SysTools.Option "-o"
, SysTools.FileOption "" output_fn
]
++ map SysTools.Option (
[]
-- Permit the linker to auto link _symbol to _imp_symbol.
-- This lets us link against DLLs without needing an "import library".
++ (if platformOS platform == OSMinGW32
then ["-Wl,--enable-auto-import"]
else [])
-- '-no_compact_unwind'
-- C++/Objective-C exceptions cannot use optimised
-- stack unwinding code. The optimised form is the
-- default in Xcode 4 on at least x86_64, and
-- without this flag we're also seeing warnings
-- like
-- ld: warning: could not create compact unwind for .LFB3: non-standard register 5 being saved in prolog
-- on x86.
++ (if sLdSupportsCompactUnwind mySettings &&
not staticLink &&
(platformOS platform == OSDarwin || platformOS platform == OSiOS) &&
case platformArch platform of
ArchX86 -> True
ArchX86_64 -> True
ArchARM {} -> True
_ -> False
then ["-Wl,-no_compact_unwind"]
else [])
-- '-no_pie'
-- iOS uses 'dynamic-no-pic', so we must pass this to ld to suppress a warning; see #7722
++ (if platformOS platform == OSiOS &&
not staticLink
then ["-Wl,-no_pie"]
else [])
-- '-Wl,-read_only_relocs,suppress'
-- ld gives loads of warnings like:
-- ld: warning: text reloc in _base_GHCziArr_unsafeArray_info to _base_GHCziArr_unsafeArray_closure
-- when linking any program. We're not sure
-- whether this is something we ought to fix, but
-- for now this flags silences them.
++ (if platformOS platform == OSDarwin &&
platformArch platform == ArchX86 &&
not staticLink
then ["-Wl,-read_only_relocs,suppress"]
else [])
++ o_files
++ lib_path_opts)
++ extra_ld_inputs
++ map SysTools.Option (
rc_objs
++ framework_path_opts
++ framework_opts
++ pkg_lib_path_opts
++ extraLinkObj:noteLinkObjs
++ pkg_link_opts
++ pkg_framework_path_opts
++ pkg_framework_opts
++ debug_opts
++ thread_opts
))
-- parallel only: move binary to another dir -- HWL
success <- runPhase_MoveBinary dflags output_fn
unless success $
throwGhcExceptionIO (InstallationError ("cannot move binary"))
exeFileName :: Bool -> DynFlags -> FilePath
exeFileName staticLink dflags
| Just s <- outputFile dflags =
case platformOS (targetPlatform dflags) of
OSMinGW32 -> s <?.> "exe"
_ -> if staticLink
then s <?.> "a"
else s
| otherwise =
if platformOS (targetPlatform dflags) == OSMinGW32
then "main.exe"
else if staticLink
then "liba.a"
else "a.out"
where s <?.> ext | null (takeExtension s) = s <.> ext
| otherwise = s
maybeCreateManifest
:: DynFlags
-> FilePath -- filename of executable
-> IO [FilePath] -- extra objects to embed, maybe
maybeCreateManifest dflags exe_filename
| platformOS (targetPlatform dflags) == OSMinGW32 &&
gopt Opt_GenManifest dflags
= do let manifest_filename = exe_filename <.> "manifest"
writeFile manifest_filename $
"<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n"++
" <assembly xmlns=\"urn:schemas-microsoft-com:asm.v1\" manifestVersion=\"1.0\">\n"++
" <assemblyIdentity version=\"1.0.0.0\"\n"++
" processorArchitecture=\"X86\"\n"++
" name=\"" ++ dropExtension exe_filename ++ "\"\n"++
" type=\"win32\"/>\n\n"++
" <trustInfo xmlns=\"urn:schemas-microsoft-com:asm.v3\">\n"++
" <security>\n"++
" <requestedPrivileges>\n"++
" <requestedExecutionLevel level=\"asInvoker\" uiAccess=\"false\"/>\n"++
" </requestedPrivileges>\n"++
" </security>\n"++
" </trustInfo>\n"++
"</assembly>\n"
-- Windows will find the manifest file if it is named
-- foo.exe.manifest. However, for extra robustness, and so that
-- we can move the binary around, we can embed the manifest in
-- the binary itself using windres:
if not (gopt Opt_EmbedManifest dflags) then return [] else do
rc_filename <- newTempName dflags "rc"
rc_obj_filename <- newTempName dflags (objectSuf dflags)
writeFile rc_filename $
"1 24 MOVEABLE PURE " ++ show manifest_filename ++ "\n"
-- magic numbers :-)
-- show is a bit hackish above, but we need to escape the
-- backslashes in the path.
runWindres dflags $ map SysTools.Option $
["--input="++rc_filename,
"--output="++rc_obj_filename,
"--output-format=coff"]
-- no FileOptions here: windres doesn't like seeing
-- backslashes, apparently
removeFile manifest_filename
return [rc_obj_filename]
| otherwise = return []
linkDynLibCheck :: DynFlags -> [String] -> [PackageId] -> IO ()
linkDynLibCheck dflags o_files dep_packages
= do
when (haveRtsOptsFlags dflags) $ do
log_action dflags dflags SevInfo noSrcSpan defaultUserStyle
(text "Warning: -rtsopts and -with-rtsopts have no effect with -shared." $$
text " Call hs_init_ghc() from your main() function to set these options.")
linkDynLib dflags o_files dep_packages
linkStaticLibCheck :: DynFlags -> [String] -> [PackageId] -> IO ()
linkStaticLibCheck dflags o_files dep_packages
= do
when (platformOS (targetPlatform dflags) `notElem` [OSiOS, OSDarwin]) $
throwGhcExceptionIO (ProgramError "Static archive creation only supported on Darwin/OS X/iOS")
linkBinary' True dflags o_files dep_packages
-- -----------------------------------------------------------------------------
-- Running CPP
doCpp :: DynFlags -> Bool -> FilePath -> FilePath -> IO ()
doCpp dflags raw input_fn output_fn = do
let hscpp_opts = picPOpts dflags
let cmdline_include_paths = includePaths dflags
pkg_include_dirs <- getPackageIncludePath dflags []
let include_paths = foldr (\ x xs -> "-I" : x : xs) []
(cmdline_include_paths ++ pkg_include_dirs)
let verbFlags = getVerbFlags dflags
let cpp_prog args | raw = SysTools.runCpp dflags args
| otherwise = SysTools.runCc dflags (SysTools.Option "-E" : args)
let target_defs =
[ "-D" ++ HOST_OS ++ "_BUILD_OS=1",
"-D" ++ HOST_ARCH ++ "_BUILD_ARCH=1",
"-D" ++ TARGET_OS ++ "_HOST_OS=1",
"-D" ++ TARGET_ARCH ++ "_HOST_ARCH=1" ]
-- remember, in code we *compile*, the HOST is the same our TARGET,
-- and BUILD is the same as our HOST.
let sse_defs =
[ "-D__SSE__=1" | isSseEnabled dflags ] ++
[ "-D__SSE2__=1" | isSse2Enabled dflags ] ++
[ "-D__SSE4_2__=1" | isSse4_2Enabled dflags ]
let avx_defs =
[ "-D__AVX__=1" | isAvxEnabled dflags ] ++
[ "-D__AVX2__=1" | isAvx2Enabled dflags ] ++
[ "-D__AVX512CD__=1" | isAvx512cdEnabled dflags ] ++
[ "-D__AVX512ER__=1" | isAvx512erEnabled dflags ] ++
[ "-D__AVX512F__=1" | isAvx512fEnabled dflags ] ++
[ "-D__AVX512PF__=1" | isAvx512pfEnabled dflags ]
backend_defs <- getBackendDefs dflags
cpp_prog ( map SysTools.Option verbFlags
++ map SysTools.Option include_paths
++ map SysTools.Option hsSourceCppOpts
++ map SysTools.Option target_defs
++ map SysTools.Option backend_defs
++ map SysTools.Option hscpp_opts
++ map SysTools.Option sse_defs
++ map SysTools.Option avx_defs
-- Set the language mode to assembler-with-cpp when preprocessing. This
-- alleviates some of the C99 macro rules relating to whitespace and the hash
-- operator, which we tend to abuse. Clang in particular is not very happy
-- about this.
++ [ SysTools.Option "-x"
, SysTools.Option "assembler-with-cpp"
, SysTools.Option input_fn
-- We hackily use Option instead of FileOption here, so that the file
-- name is not back-slashed on Windows. cpp is capable of
-- dealing with / in filenames, so it works fine. Furthermore
-- if we put in backslashes, cpp outputs #line directives
-- with *double* backslashes. And that in turn means that
-- our error messages get double backslashes in them.
-- In due course we should arrange that the lexer deals
-- with these \\ escapes properly.
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
])
getBackendDefs :: DynFlags -> IO [String]
getBackendDefs dflags | hscTarget dflags == HscLlvm = do
llvmVer <- figureLlvmVersion dflags
return $ case llvmVer of
Just n -> [ "-D__GLASGOW_HASKELL_LLVM__="++show n ]
_ -> []
getBackendDefs _ =
return []
hsSourceCppOpts :: [String]
-- Default CPP defines in Haskell source
hsSourceCppOpts =
[ "-D__GLASGOW_HASKELL__="++cProjectVersionInt ]
-- ---------------------------------------------------------------------------
-- join object files into a single relocatable object file, using ld -r
joinObjectFiles :: DynFlags -> [FilePath] -> FilePath -> IO ()
joinObjectFiles dflags o_files output_fn = do
let mySettings = settings dflags
ldIsGnuLd = sLdIsGnuLd mySettings
osInfo = platformOS (targetPlatform dflags)
ld_r args cc = SysTools.runLink dflags ([
SysTools.Option "-nostdlib",
SysTools.Option "-Wl,-r"
]
++ (if any (cc ==) [Clang, AppleClang, AppleClang51]
then []
else [SysTools.Option "-nodefaultlibs"])
++ (if osInfo == OSFreeBSD
then [SysTools.Option "-L/usr/lib"]
else [])
-- gcc on sparc sets -Wl,--relax implicitly, but
-- -r and --relax are incompatible for ld, so
-- disable --relax explicitly.
++ (if platformArch (targetPlatform dflags) == ArchSPARC
&& ldIsGnuLd
then [SysTools.Option "-Wl,-no-relax"]
else [])
++ map SysTools.Option ld_build_id
++ [ SysTools.Option "-o",
SysTools.FileOption "" output_fn ]
++ args)
-- suppress the generation of the .note.gnu.build-id section,
-- which we don't need and sometimes causes ld to emit a
-- warning:
ld_build_id | sLdSupportsBuildId mySettings = ["-Wl,--build-id=none"]
| otherwise = []
ccInfo <- getCompilerInfo dflags
if ldIsGnuLd
then do
script <- newTempName dflags "ldscript"
writeFile script $ "INPUT(" ++ unwords o_files ++ ")"
ld_r [SysTools.FileOption "" script] ccInfo
else if sLdSupportsFilelist mySettings
then do
filelist <- newTempName dflags "filelist"
writeFile filelist $ unlines o_files
ld_r [SysTools.Option "-Wl,-filelist",
SysTools.FileOption "-Wl," filelist] ccInfo
else do
ld_r (map (SysTools.FileOption "") o_files) ccInfo
-- -----------------------------------------------------------------------------
-- Misc.
-- | What phase to run after one of the backend code generators has run
hscPostBackendPhase :: DynFlags -> HscSource -> HscTarget -> Phase
hscPostBackendPhase _ HsBootFile _ = StopLn
hscPostBackendPhase dflags _ hsc_lang =
case hsc_lang of
HscC -> HCc
HscAsm | gopt Opt_SplitObjs dflags -> Splitter
| otherwise -> As False
HscLlvm -> LlvmOpt
HscNothing -> StopLn
HscInterpreted -> StopLn
touchObjectFile :: DynFlags -> FilePath -> IO ()
touchObjectFile dflags path = do
createDirectoryIfMissing True $ takeDirectory path
SysTools.touch dflags "Touching object file" path
haveRtsOptsFlags :: DynFlags -> Bool
haveRtsOptsFlags dflags =
isJust (rtsOpts dflags) || case rtsOptsEnabled dflags of
RtsOptsSafeOnly -> False
_ -> True
-- Note [-fPIC for assembler]
-- When compiling .c source file GHC's driver pipeline basically
-- does the following two things:
-- 1. ${CC} -S 'PIC_CFLAGS' source.c
-- 2. ${CC} -x assembler -c 'PIC_CFLAGS' source.S
--
-- Why do we need to pass 'PIC_CFLAGS' both to C compiler and assembler?
-- Because on some architectures (at least sparc32) assembler also choses
-- relocation type!
-- Consider the following C module:
--
-- /* pic-sample.c */
-- int v;
-- void set_v (int n) { v = n; }
-- int get_v (void) { return v; }
--
-- $ gcc -S -fPIC pic-sample.c
-- $ gcc -c pic-sample.s -o pic-sample.no-pic.o # incorrect binary
-- $ gcc -c -fPIC pic-sample.s -o pic-sample.pic.o # correct binary
--
-- $ objdump -r -d pic-sample.pic.o > pic-sample.pic.o.od
-- $ objdump -r -d pic-sample.no-pic.o > pic-sample.no-pic.o.od
-- $ diff -u pic-sample.pic.o.od pic-sample.no-pic.o.od
--
-- Most of architectures won't show any difference in this test, but on sparc32
-- the following assembly snippet:
--
-- sethi %hi(_GLOBAL_OFFSET_TABLE_-8), %l7
--
-- generates two kinds or relocations, only 'R_SPARC_PC22' is correct:
--
-- 3c: 2f 00 00 00 sethi %hi(0), %l7
-- - 3c: R_SPARC_PC22 _GLOBAL_OFFSET_TABLE_-0x8
-- + 3c: R_SPARC_HI22 _GLOBAL_OFFSET_TABLE_-0x8
| hferreiro/replay | compiler/main/DriverPipeline.hs | bsd-3-clause | 98,996 | 3 | 31 | 34,515 | 16,063 | 8,127 | 7,936 | -1 | -1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ExistentialQuantification #-}
module ClassHierarchy where
import Ivory.Language
import Ivory.Compile.C.CmdlineFrontend
import Control.Monad
--------------------------------------------------------------------------------
[ivory|
-- Three structs defined in the quasiquoter, a base struct and two that'll
-- extend it.
struct StanagBase
{ paramA :: Stored IBool }
-- Annoyingly, the fields are in the same global namespace, so we give unique names to the fields pointing to the base struct.
struct StanagBaseMsg1
{ base1 :: Struct StanagBase
; paramC :: Stored IFloat
}
struct StanagBaseMsg2
{ base2 :: Struct StanagBase
; paramD :: Stored IFloat
}
|]
-- A Haskell class that has a method 'getBase'. Given a reference to a struct,
-- getBase returns the base state.
--
-- XXX This is boilerplate that might be generated...
class (IvoryStruct sym) => ExtendBase sym where
getBase :: forall ref s
. ( IvoryExpr (ref s (Struct sym))
, IvoryExpr (ref s (Struct "StanagBase"))
, IvoryRef ref
)
=> ref s (Struct sym) -> ref s (Struct "StanagBase")
-- For the parent, it's just a noop (identity).
instance ExtendBase "StanagBase" where
getBase = id
-- Otherwise, we dereference the base field.
instance ExtendBase "StanagBaseMsg1" where
getBase ref = ref ~> base1
instance ExtendBase "StanagBaseMsg2" where
getBase ref = ref ~> base2
-- A polymorphic procedure Ivory macro for references to objects in the
-- hierachy. Note: this cannot be a C function (or we'd have to specialize it
-- for each use type).
getBaseVal :: ExtendBase sym => Ref s (Struct sym) -> Ivory eff IBool
getBaseVal ref = do
let r = getBase ref
deref (r ~> paramA)
-- A procedure that makes use of the polymorphism. Regardless of whehter the
-- reference is to a parent or child, we can use the 'getBaseVal' Ivory
-- function.
bar :: Def ([ Ref s (Struct "StanagBase")
, Ref s (Struct "StanagBaseMsg1")
, Ref s (Struct "StanagBaseMsg2")
] :-> IBool)
bar = proc "bar" $ \r0 r1 r2 -> body $ do
b0 <- getBaseVal r0
b1 <- getBaseVal r1
b2 <- getBaseVal r2
ret (b0 .&& b1 .&& b2)
makeCCode :: IO ()
makeCCode = runCompiler [cmodule] [] opts
where
opts = initialOpts { outDir = Nothing, constFold = True }
cmodule = package "Module" $ do
defStruct (Proxy :: Proxy "StanagBase")
defStruct (Proxy :: Proxy "StanagBaseMsg1")
defStruct (Proxy :: Proxy "StanagBaseMsg2")
incl bar
| Hodapp87/ivory | ivory-examples/examples/ClassHierarchy.hs | bsd-3-clause | 2,709 | 0 | 13 | 580 | 510 | 267 | 243 | 45 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
import Test.QuickCheck
import Algebra
import Data.List (nub)
import System.Random (Random(..))
import Data.Text (Text)
import qualified Data.Text as T (pack)
import Data.Time (fromGregorian)
import Data.Time.Clock (UTCTime(..), secondsToDiffTime)
import Control.Monad (replicateM)
newtype QCRelvar = QCRelvar Relvar deriving Show -- for QuickCheck's relvars generation
newtype QCMatchingRelvar = QCMatchingRelvar (Relvar, Relvar) deriving Show -- relvars with same attributes
newtype QCDisjointRelvar = QCDisjointRelvar (Relvar, Relvar) deriving Show -- relvars with disjoint attributes
newtype QCOverlappingRelvar = QCOverlappingRelvar (Relvar, Relvar) deriving Show -- relvars with overlapping attributes, but not disjoint
instance Arbitrary QCRelvar where arbitrary = QCRelvar <$> (sized genRelvar)
instance Arbitrary QCMatchingRelvar where arbitrary = QCMatchingRelvar <$> (sized genMatchingRelvars)
instance Arbitrary QCDisjointRelvar where arbitrary = QCDisjointRelvar <$> (sized genDisjointRelvars)
instance Arbitrary QCOverlappingRelvar where arbitrary = QCOverlappingRelvar <$> (sized genOverlappingRelvars) -- TODO: code genOverlappingRelvars
main :: IO ()
main = do
let mxsss = 1000 -- increase for more tests.
putStrLn "\nprop00 - test simple relvar generation"
quickCheckWith stdArgs { maxSuccess = mxsss } prop00
putStrLn "\nprop01 - union r r == r"
quickCheckWith stdArgs { maxSuccess = mxsss } prop01
putStrLn "\nprop02 - intersect r r == r"
quickCheckWith stdArgs { maxSuccess = mxsss } prop02
putStrLn "\nprop03 - card of minus r r == 0"
quickCheckWith stdArgs { maxSuccess = mxsss } prop03
putStrLn "\nprop04 - (r1 \\ r2) union (r1 intersect r2) == r1"
quickCheckWith stdArgs { maxSuccess = mxsss } prop04
putStrLn "\nprop05 - card r1 + card r2 >= card (r1 `union` r2)"
quickCheckWith stdArgs { maxSuccess = mxsss } prop05
putStrLn "\nprop06 - (card r1) `min` (card r2) >= card (r1 `intersection` r2)"
quickCheckWith stdArgs { maxSuccess = mxsss } prop06
putStrLn "\nprop07 - card (r1 `times` r2) == (card r1) * (card r2) for disjoint relations"
quickCheckWith stdArgs { maxSuccess = mxsss } prop07
-- TODO: ADD more tests
-- ======================
-- == properties
-- ======================
-- TODO: add dee and dum tests
-- make sure that relvar function works
prop00 :: QCRelvar -> Bool
prop00 (QCRelvar r) = (card r) >= 0
-- self union is identity
prop01 :: QCRelvar -> Bool
prop01 (QCRelvar r) = union r r == r
-- self intersection is identity
prop02 :: QCRelvar -> Bool
prop02 (QCRelvar r) = intersection r r == r
-- self difference has no rows
prop03 :: QCRelvar -> Bool
prop03 (QCRelvar r) = (card $ minus r r) == 0
-- (r1 \\ r2) union (r1 intersect r2) == r1
prop04 :: QCMatchingRelvar -> Bool
prop04 (QCMatchingRelvar (r1, r2)) = union (minus r1 r2) (intersection r1 r2) == r1
-- card r1 + card r2 >= card r1 `union` r2
prop05 :: QCMatchingRelvar -> Bool
prop05 (QCMatchingRelvar (r1, r2)) = (card r1 + card r2) >= card (r1 `union` r2)
-- (card r1) `min` (card r2) >= card r1 `intersection` r2
prop06 :: QCMatchingRelvar -> Bool
prop06 (QCMatchingRelvar (r1, r2)) = (card r1 `min` card r2) >= card (r1 `intersection` r2)
-- card (r1 `times` r2) == (card r1) * (card r2) for disjoint relations
prop07 :: QCDisjointRelvar -> Bool
prop07 (QCDisjointRelvar (r1, r2)) = card (r1 `times` r2) == (card r1) * (card r2)
-- ======================
-- == generators
-- ======================
const_MAXCOL = 5 -- maximum column # generated
const_MAXROW = 5 -- maximum row # generated
-- | single Relvar generator.
-- >>> sample (arbitrary :: Gen QCRelvar)
genRelvar :: Int -> Gen Relvar
genRelvar s = sized $ \s -> do
coln <- choose (1, const_MAXCOL)
lbls <- suchThat (vectorOf coln (listOf1 (arbitrary :: Gen Char))) (\xs-> not $ hasDups xs)
tys <- vectorOf coln (arbitrary :: Gen TypeRep)
rown <- choose (0, s `min` const_MAXCOL)
es <- genElems $ map genElem tys
ess <- replicateM rown $ genElems $ map genElem tys
return $ relvar lbls tys ess
-- | Matching Relvars generator.
genMatchingRelvars :: Int -> Gen (Relvar, Relvar)
genMatchingRelvars s = sized $ \s -> do
coln <- choose (1, const_MAXCOL)
lbls <- suchThat (vectorOf coln (listOf1 (arbitrary :: Gen Char))) (\xs-> not $ hasDups xs)
tys <- vectorOf coln (arbitrary :: Gen TypeRep)
rown1 <- choose (0, s `min` const_MAXCOL)
rown2 <- choose (0, s `min` const_MAXCOL)
ess1 <- replicateM rown1 $ genElems $ map genElem tys
ess2 <- replicateM rown2 $ genElems $ map genElem tys
ess2' <- case (ess1++ess2) of
[] -> return []
othewise -> listOf $ elements $ (ess1 ++ ess2) -- garantees that r2 sometime has common rows as r1
let r1 = relvar lbls tys ess1
r2 = relvar lbls tys ess2'
return $ (r1, r2)
-- | Disjoint Relvars generator.
genDisjointRelvars :: Int -> Gen (Relvar, Relvar)
genDisjointRelvars s = sized $ \s -> do
coln1 <- choose (1, const_MAXCOL)
coln2 <- choose (1, const_MAXCOL)
lbls <- suchThat (vectorOf (coln1+coln2) (listOf1 (arbitrary :: Gen Char))) (\xs-> not $ hasDups xs)
let lbls1 = take coln1 lbls
lbls2 = drop coln1 lbls
tys1 <- vectorOf coln1 (arbitrary :: Gen TypeRep)
tys2 <- vectorOf coln2 (arbitrary :: Gen TypeRep)
rown1 <- choose (0, s `min` const_MAXCOL)
rown2 <- choose (0, s `min` const_MAXCOL)
ess1 <- replicateM rown1 $ genElems $ map genElem tys1
ess2 <- replicateM rown2 $ genElems $ map genElem tys2
let r1 = relvar lbls1 tys1 ess1
r2 = relvar lbls2 tys2 ess2
return $ (r1, r2)
-- TODO: code this.
-- | Overlapping Relvars generator.
genOverlappingRelvars :: Int -> Gen (Relvar, Relvar)
genOverlappingRelvars s = sized $ \s -> do
return (dum,dum)
-------------------------
-- Elem functions
-------------------------
-- | from a given TypeRep create the corresponding Gen Elem
genElem :: TypeRep -> Gen Elem
genElem t
| (t == tyB) = B <$> (arbitrary :: Gen Bool)
| (t == tyC) = C <$> choose ('a', 'z')
| (t == tyS) = S <$> (arbitrary :: Gen String)
| (t == tyT) = T <$> (arbitrary :: Gen Text)
| (t == tyI) = I <$> choose (-10, 10)
| (t == tyJ) = J <$> choose (-10, 10)
| (t == tyD) = D <$> choose (-10.0, 10.0)
| (t == tyDD) = DD <$> elements [fromGregorian 2016 1 1
,fromGregorian 2016 6 1
,fromGregorian 2016 12 1]
| (t == tyDT) = let time = UTCTime {utctDay = fromGregorian 2016 1 1, utctDayTime = secondsToDiffTime (3600*1)} in
DT <$> elements [time
,time {utctDayTime = secondsToDiffTime (3600*2)}
,time {utctDayTime = secondsToDiffTime (3600*3)}]
| otherwise = elements [Nil]
-- | need to convert a [Gen Elem] into a Gen [Elem]
genElems :: [Gen a] -> Gen [a]
genElems (g:[]) = do
x <- g
return [x]
genElems (g:gs) = do
x <- g
xs <- genElems gs
return ([x] ++ xs)
genElems _ = undefined
-------------------------
-- Text functions
-------------------------
instance Arbitrary Text where
arbitrary = T.pack <$> (arbitrary :: Gen String)
-------------------------
-- TypeRep functions
-------------------------
-- TODO : add tests with non-basics Elem (eg R r, A a...)
rndtys = [tyB,tyC,tyS, tyT,tyI,tyJ,tyD,tyDD,tyDT] --removing temporarily tyBS
rndtysrng = map (/(fromIntegral $ length rndtys)) [1.0, 2.0..]
instance Random TypeRep where
randomR (x,y) g = case (randomR (0,1) g) of
(r::Double,g')
| r < rndtysrng !! 0 -> (rndtys !! 0, g')
| r < rndtysrng !! 1 -> (rndtys !! 1, g')
| r < rndtysrng !! 2 -> (rndtys !! 2, g')
| r < rndtysrng !! 3 -> (rndtys !! 3, g')
| r < rndtysrng !! 4 -> (rndtys !! 4, g')
| r < rndtysrng !! 5 -> (rndtys !! 5, g')
| r < rndtysrng !! 6 -> (rndtys !! 6, g')
| r < rndtysrng !! 7 -> (rndtys !! 7, g')
| r < rndtysrng !! 8 -> (rndtys !! 8, g')
-- | r < rndtysrng !! 9 -> (rndtys !! 9, g')
| otherwise -> (rndtys !! length rndtys, g')
random g = case (random g) of
(r::Double,g')
| r < rndtysrng !! 0 -> (rndtys !! 0, g')
| r < rndtysrng !! 1 -> (rndtys !! 1, g')
| r < rndtysrng !! 2 -> (rndtys !! 2, g')
| r < rndtysrng !! 3 -> (rndtys !! 3, g')
| r < rndtysrng !! 4 -> (rndtys !! 4, g')
| r < rndtysrng !! 5 -> (rndtys !! 5, g')
| r < rndtysrng !! 6 -> (rndtys !! 6, g')
| r < rndtysrng !! 7 -> (rndtys !! 7, g')
| r < rndtysrng !! 8 -> (rndtys !! 8, g')
-- | r < rndtysrng !! 9 -> (rndtys !! 9, g')
| otherwise -> (rndtys !! length rndtys, g')
instance Arbitrary TypeRep where
arbitrary = elements rndtys -- choose (tyB, tyBS)
-------------------------
-- Local functions
-------------------------
-- True if duplicates exist.
hasDups :: [String] -> Bool
hasDups xs = length xs > (length $ nub xs)
| JAnthelme/relation-tool | test/Spec.hs | bsd-3-clause | 9,557 | 0 | 16 | 2,630 | 3,016 | 1,582 | 1,434 | 157 | 2 |
module Lexer
( lexMap,
LexHgram
) where
import qualified Data.Text as T
import qualified Data.Map as M
import Text.Parsec
import Data.Char
import Data.List
type LexHgram = M.Map T.Text Int
lexMap :: T.Text -> LexHgram
lexMap t = case parse lexer "" t of
(Left err) -> M.empty
(Right l) -> M.fromList . map (\x->(head x, length x)) . group . sort $ l
lexer::Parsec T.Text st [T.Text]
lexer = optional cDelimeter *> many (lexeme <* cDelimeter) <* eof
lexeme::Parsec T.Text st T.Text
lexeme = T.pack <$> many1 ( satisfy (not . isCDelimeter) )
cDelimeter:: Parsec T.Text st [Char]
cDelimeter = many1 $ satisfy isCDelimeter
isCDelimeter:: Char -> Bool
isCDelimeter = not. isAlpha
--isCDelimeter c = (isSpace c ) || (isPunctuation c)
| mmx1/tokenGen | src/Lexer.hs | bsd-3-clause | 754 | 0 | 16 | 147 | 291 | 157 | 134 | 21 | 2 |
{-# LANGUAGE DeriveGeneric, KindSignatures, TemplateHaskell,
QuasiQuotes, FlexibleInstances, TypeOperators, TypeSynonymInstances,
MultiParamTypeClasses, FunctionalDependencies, OverlappingInstances,
ScopedTypeVariables, EmptyDataDecls, DefaultSignatures, ViewPatterns,
UndecidableInstances, FlexibleContexts, StandaloneDeriving, IncoherentInstances,
DeriveDataTypeable #-}
module MRP.Commands where
import qualified Data.ByteString as BS
import Language.C.Simple.CType
import Language.C.Simple.CType.Build.TH
import TypeLevel.NaturalNumber
import Language.C.Simple.Evaluator
import Foreign.C.Types
import Data.Default
type Id = CInt
type Size = CInt
type Offset = CInt
data IdMissing = IdMissing
{
id_missing_id :: Id
}
data ResourceEnv = ResourceEnv
{
resources :: [(Id, BS.ByteString)]
}
deriving(Show, Eq)
data CreateInput = CreateInput {
create_input_id :: Id,
create_input_size :: Size
}
data CreateOutput = CreateOutput
data CreateError = CreateOutCapacity
| CreateOutOfMemory
type CreateCommand = Command CreateInput CreateOutput CreateError
data DeleteInput = DeleteInput
{
delete_input_id :: Id
}
data DeleteOutput = DeleteOutput
type DeleteError = IdMissing
type DeleteCommand = Command DeleteInput DeleteOutput DeleteError
data GetInput = GetInput
{
get_input :: Id
}
data GetOutput = GetOutput
{
get_output_bytes :: BS.ByteString
}
type GetError = IdMissing
type GetCommand = Command GetInput GetOutput GetError
data PutInput = PutInput
{
put_input_id :: Id,
put_input_data :: BS.ByteString
}
data PutOutput = PutOutput
type PutError = IdMissing
type PutCommand = Command PutInput PutOutput PutError
data RunInput n a = RunInput
{
run_input_offset :: [Offset] ,
run_input_offset_count :: Int,
run_input_command :: a
}
deriving(Show, Eq)
data RunError a = RunFixupIdNotFound
| RunFixupOffsetOutofBounds
| RunCommandError a
instance Default (RunError a) where
def = RunFixupIdNotFound
data RunCommand a = RunCommand {
runCommandCmd :: a,
runCommandOffets :: [Offset]
}
data ResourceCommand = C CreateCommand
| D DeleteCommand
| G GetCommand
| P PutCommand
--data ResourceCommandOutput n a =
| jfischoff/minimal-resource-protocol | src/MRP/Commands.hs | bsd-3-clause | 2,606 | 0 | 11 | 759 | 444 | 272 | 172 | 63 | 0 |
-- | Dynamically lookup up values from modules and loading them.
module DynamicLoading (
#ifdef GHCI
-- * Force loading information
forceLoadModuleInterfaces,
forceLoadNameModuleInterface,
forceLoadTyCon,
-- * Finding names
lookupRdrNameInModule,
-- * Loading values
getValueSafely,
lessUnsafeCoerce
#endif
) where
#ifdef GHCI
import Linker ( linkModule, getHValue )
import SrcLoc ( noSrcSpan )
import Finder ( findImportedModule, cannotFindModule )
import DriverPhases ( HscSource(HsSrcFile) )
import TcRnDriver ( getModuleInterface )
import TcRnMonad ( initTc, initIfaceTcRn )
import LoadIface ( loadUserInterface )
import RdrName ( RdrName, Provenance(..), ImportSpec(..), ImpDeclSpec(..)
, ImpItemSpec(..), mkGlobalRdrEnv, lookupGRE_RdrName, gre_name )
import RnNames ( gresFromAvails )
import PrelNames ( iNTERACTIVE )
import DynFlags
import HscTypes ( HscEnv(..), FindResult(..), ModIface(..), lookupTypeHscEnv )
import TypeRep ( TyThing(..), pprTyThingCategory )
import Type ( Type, eqType )
import TyCon ( TyCon )
import Name ( Name, nameModule_maybe )
import Id ( idType )
import Module ( Module, ModuleName )
import Panic ( GhcException(..), throwGhcException )
import FastString
import ErrUtils
import Outputable
import Exception
import Data.Maybe ( mapMaybe )
import GHC.Exts ( unsafeCoerce# )
-- | Force the interfaces for the given modules to be loaded. The 'SDoc' parameter is used
-- for debugging (@-ddump-if-trace@) only: it is shown as the reason why the module is being loaded.
forceLoadModuleInterfaces :: HscEnv -> SDoc -> [Module] -> IO ()
forceLoadModuleInterfaces hsc_env doc modules
= (initTc hsc_env HsSrcFile False iNTERACTIVE $ initIfaceTcRn $ mapM_ (loadUserInterface False doc) modules) >> return ()
-- | Force the interface for the module containing the name to be loaded. The 'SDoc' parameter is used
-- for debugging (@-ddump-if-trace@) only: it is shown as the reason why the module is being loaded.
forceLoadNameModuleInterface :: HscEnv -> SDoc -> Name -> IO ()
forceLoadNameModuleInterface hsc_env reason name = do
let name_modules = mapMaybe nameModule_maybe [name]
forceLoadModuleInterfaces hsc_env reason name_modules
-- | Load the 'TyCon' associated with the given name, come hell or high water. Fails if:
--
-- * The interface could not be loaded
-- * The name is not that of a 'TyCon'
-- * The name did not exist in the loaded module
forceLoadTyCon :: HscEnv -> Name -> IO TyCon
forceLoadTyCon hsc_env con_name = do
forceLoadNameModuleInterface hsc_env (ptext (sLit "contains a name used in an invocation of loadTyConTy")) con_name
mb_con_thing <- lookupTypeHscEnv hsc_env con_name
case mb_con_thing of
Nothing -> throwCmdLineErrorS dflags $ missingTyThingError con_name
Just (ATyCon tycon) -> return tycon
Just con_thing -> throwCmdLineErrorS dflags $ wrongTyThingError con_name con_thing
where dflags = hsc_dflags hsc_env
-- | Loads the value corresponding to a 'Name' if that value has the given 'Type'. This only provides limited safety
-- in that it is up to the user to ensure that that type corresponds to the type you try to use the return value at!
--
-- If the value found was not of the correct type, returns @Nothing@. Any other condition results in an exception:
--
-- * If we could not load the names module
-- * If the thing being loaded is not a value
-- * If the Name does not exist in the module
-- * If the link failed
getValueSafely :: HscEnv -> Name -> Type -> IO (Maybe a)
getValueSafely hsc_env val_name expected_type = do
forceLoadNameModuleInterface hsc_env (ptext (sLit "contains a name used in an invocation of getValueSafely")) val_name
-- Now look up the names for the value and type constructor in the type environment
mb_val_thing <- lookupTypeHscEnv hsc_env val_name
case mb_val_thing of
Nothing -> throwCmdLineErrorS dflags $ missingTyThingError val_name
Just (AnId id) -> do
-- Check the value type in the interface against the type recovered from the type constructor
-- before finally casting the value to the type we assume corresponds to that constructor
if expected_type `eqType` idType id
then do
-- Link in the module that contains the value, if it has such a module
case nameModule_maybe val_name of
Just mod -> do linkModule hsc_env mod
return ()
Nothing -> return ()
-- Find the value that we just linked in and cast it given that we have proved it's type
hval <- getHValue hsc_env val_name
value <- lessUnsafeCoerce (hsc_dflags hsc_env) "getValueSafely" hval
return $ Just value
else return Nothing
Just val_thing -> throwCmdLineErrorS dflags $ wrongTyThingError val_name val_thing
where dflags = hsc_dflags hsc_env
-- | Coerce a value as usual, but:
--
-- 1) Evaluate it immediately to get a segfault early if the coercion was wrong
--
-- 2) Wrap it in some debug messages at verbosity 3 or higher so we can see what happened
-- if it /does/ segfault
lessUnsafeCoerce :: DynFlags -> String -> a -> IO b
lessUnsafeCoerce dflags context what = do
debugTraceMsg dflags 3 $ (ptext $ sLit "Coercing a value in") <+> (text context) <> (ptext $ sLit "...")
output <- evaluate (unsafeCoerce# what)
debugTraceMsg dflags 3 $ ptext $ sLit "Successfully evaluated coercion"
return output
-- | Finds the 'Name' corresponding to the given 'RdrName' in the context of the 'ModuleName'. Returns @Nothing@ if no
-- such 'Name' could be found. Any other condition results in an exception:
--
-- * If the module could not be found
-- * If we could not determine the imports of the module
lookupRdrNameInModule :: HscEnv -> ModuleName -> RdrName -> IO (Maybe Name)
lookupRdrNameInModule hsc_env mod_name rdr_name = do
-- First find the package the module resides in by searching exposed packages and home modules
found_module <- findImportedModule hsc_env mod_name Nothing
case found_module of
Found _ mod -> do
-- Find the exports of the module
(_, mb_iface) <- getModuleInterface hsc_env mod
case mb_iface of
Just iface -> do
-- Try and find the required name in the exports
let decl_spec = ImpDeclSpec { is_mod = mod_name, is_as = mod_name
, is_qual = False, is_dloc = noSrcSpan }
provenance = Imported [ImpSpec decl_spec ImpAll]
env = mkGlobalRdrEnv (gresFromAvails provenance (mi_exports iface))
case lookupGRE_RdrName rdr_name env of
[gre] -> return (Just (gre_name gre))
[] -> return Nothing
_ -> panic "lookupRdrNameInModule"
Nothing -> throwCmdLineErrorS dflags $ hsep [ptext (sLit "Could not determine the exports of the module"), ppr mod_name]
err -> throwCmdLineErrorS dflags $ cannotFindModule dflags mod_name err
where dflags = hsc_dflags hsc_env
wrongTyThingError :: Name -> TyThing -> SDoc
wrongTyThingError name got_thing = hsep [ptext (sLit "The name"), ppr name, ptext (sLit "is not that of a value but rather a"), pprTyThingCategory got_thing]
missingTyThingError :: Name -> SDoc
missingTyThingError name = hsep [ptext (sLit "The name"), ppr name, ptext (sLit "is not in the type environment: are you sure it exists?")]
throwCmdLineErrorS :: DynFlags -> SDoc -> IO a
throwCmdLineErrorS dflags = throwCmdLineError . showSDoc dflags
throwCmdLineError :: String -> IO a
throwCmdLineError = throwGhcException . CmdLineError
#endif
| nomeata/ghc | compiler/main/DynamicLoading.hs | bsd-3-clause | 8,124 | 0 | 24 | 2,110 | 1,446 | 763 | 683 | 1 | 0 |
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
import Data.Either (isLeft)
import Data.Map (fromList)
import Test.Hspec (Spec, describe, it, shouldBe, shouldSatisfy)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import DNA (count, nucleotideCounts)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = describe "nucleotide-count" $ do
-- As of 2016-07-27, there was no reference file
-- for the test cases in `exercism/x-common`.
let x `matches` y = x `shouldBe` Right y
let x `matchesMap` y = x `shouldBe` (Right . fromList) y
describe "count" $ do
it "empty dna strand has no adenosine" $
count 'A' "" `matches` 0
it "repetitive cytidine gets counted" $
count 'C' "CCCCC" `matches` 5
it "counts only thymidine" $
count 'T' "GGGGGTAACCCGG" `matches` 1
it "validates nucleotides" $
count 'X' "GACT" `shouldSatisfy` isLeft
it "validates strand" $
count 'G' "GACYT" `shouldSatisfy` isLeft
describe "nucleotideCounts" $ do
it "empty dna strand has no nucleotides" $
nucleotideCounts "" `matchesMap` [ ('A', 0)
, ('C', 0)
, ('G', 0)
, ('T', 0) ]
it "repetitive-sequence-has-only-guanosine" $
nucleotideCounts "GGGGGGGG" `matchesMap` [ ('A', 0)
, ('C', 0)
, ('G', 8)
, ('T', 0) ]
it "counts all nucleotides" $
nucleotideCounts "AGCTTTTCATTCTGACTGCAACGGGCAATATGTCTCTGTGTGGATTAAAAAAAGAGTGTCTGATAGCAGC"
`matchesMap` [ ('A', 20)
, ('C', 12)
, ('G', 17)
, ('T', 21) ]
it "validates strand" $
nucleotideCounts "GPAC" `shouldSatisfy` isLeft
| vaibhav276/exercism_haskell | nucleotide-count/test/Tests.hs | mit | 2,230 | 0 | 14 | 953 | 487 | 267 | 220 | 42 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE UnicodeSyntax #-}
{-# OPTIONS_GHC -Wextra -Wno-unused-imports -Wno-unticked-promoted-constructors -Wno-type-defaults -Wno-missing-signatures #-}
module Main where
import Control.Compose
import Control.Lens hiding (children, pre)
import Control.Monad.IO.Class
import Data.Foldable
import Data.Monoid
import Data.Sequence (Seq)
import qualified Data.Sequence as S
import Prelude.Unicode
(.:) = (.) ∘ (.)
data I x = I { unI ∷ x }
data HB
= HB { _s ∷ String, _children ∷ [HB] }
-- | HBE { _s ∷ String, _children ∷ [HB] }
-- | HBR { _s ∷ String, _children ∷ [HB] }
deriving (Show)
makeLenses ''HB
walk ∷ (a ~ HB, Monad m) ⇒ ([Int] → a → m ()) → a → m ()
walk action f = loop [0] f
where loop trace f = do
action trace f
sequence $ zip (f^.children) [0..] <&>
\(f', n) → loop (n:trace) f'
pure ()
dump ∷ (a ~ HB, MonadIO m) ⇒ (a ~ HB ⇒ a → String) → a → m ()
dump ppf = walk
(liftIO .: (\trace f' →
putStrLn $ (concat $ take (length trace - 1) (repeat " "))
<> (show $ head trace) <> " " <> ppf f'))
data Derived x = W (x, HB) -- Derived ~ W ~ Dynamic
deriving Show
instance Functor Derived where
fmap f (W (x, xs)) = W (f x, xs)
instance Applicative Derived where
pure x = W (x, HB "vboxEnd" [])
W (f, fhb@(HB fhbn _)) <*> W (x, (HB xhbn xhbr)) =
W (f x, HB ("(ER "<>fhbn<>xhbn<> ")") (fhb : xhbr))
-- W (_, (HB l _)) <*> W (_, (HB r _)) = error $ "HB "<>l<>" <*> HB "<>r
-- W (_, (HB l _)) <*> W (_, (HB r _)) = error $ "HB "<>l<>" <*> HB "<>r
-- W (_, (HB l _)) <*> W (_, (HB r _)) = error $ "HB "<>l<>" <*> HB "<>r
-- W (_, (HB l _)) <*> W (_, (HB r _)) = error $ "HB "<>l<>" <*> HB "<>r
-- pure x = W (mempty, constDyn (x, Holo.vbox []))
-- W (fsubs, fvals) <*> W (xsubs, xvals) =
-- W $ (,)
-- (zipDynWith (<>) fsubs xsubs)
-- (zipDynWith ((\(f, fhb@Item{..})
-- ( x, xhb)→
-- (f x, fhb { hiChildren = hiChildren <> [xhb] })))
-- fvals xvals)
class (MonadIO m, Show a) ⇒ Liftable m a where
lift ∷ a → (m :. Derived) a
lift x = O ∘ pure ∘ W $ (x,
HB "hbox" [ HB "val" []
, HB (show x) []])
instance MonadIO m ⇒ Liftable m String
type C = []
fromL ∷ [x] → C x
fromL = id
toL ∷ C x → [x]
toL = id
data Travable f x = Travable (C (f x))
pre ∷ (Monad m, x ~ String, m ~ IO) ⇒ Travable (m :. Derived) x
pre = Travable $ fromL $ lift <$> ["a", "b", "c", "d"
-- , "e", "f", "g", "h", "i", "j", "k"
]
tsequence ∷ Applicative f ⇒ Travable f x → f (Travable I x)
tsequence (Travable s) = Travable ∘ (I <$>) <$> sequenceA s
post ∷ (Monad m, x ~ String, m ~ IO) ⇒ (m :. Derived) (Travable I x)
post = tsequence pre
main ∷ IO ()
main = do
let O act = post
(W (Travable s, hb)) ← act
putStrLn $ show $ unI <$> toL s
dump _s hb
| deepfire/mood | experiments/lifted-record-holotree/Main.hs | agpl-3.0 | 3,815 | 4 | 19 | 1,157 | 1,091 | 593 | 498 | 79 | 1 |
-- Copyright (C) 2016 Red Hat, Inc.
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, see <http://www.gnu.org/licenses/>.
module BDCS.KeyValue(findKeyValue,
formatKeyValue,
getKeyValue,
insertKeyValue,
keyValueListToJSON)
where
import Control.Monad.IO.Class(MonadIO)
import Data.Aeson((.=), object, toJSON)
import Data.Aeson.Types(KeyValue)
import Data.List(partition)
import qualified Data.Map.Strict as Map
import qualified Data.Text as T
import Database.Esqueleto
import BDCS.DB
import BDCS.KeyType
{-# ANN findKeyValue ("HLint: ignore Use ." :: String) #-}
findKeyValue :: MonadIO m => KeyType -> Maybe T.Text -> Maybe T.Text -> SqlPersistT m (Maybe (Key KeyVal))
findKeyValue k v e = firstKeyResult $
select $ from $ \kv -> do
where_ $ kv ^. KeyValKey_value ==. val k &&.
kv ^. KeyValVal_value ==? v &&.
kv ^. KeyValExt_value ==? e
limit 1
return $ kv ^. KeyValId
formatKeyValue :: KeyVal -> T.Text
formatKeyValue KeyVal{..} = let
rhs = case (keyValVal_value, keyValExt_value) of
(Just v, Nothing) -> T.concat [ " = ", v ]
(Just v, Just e) -> T.concat [ " = (", v, ", ", e, ")" ]
_ -> ""
in
T.concat [ T.pack $ show keyValKey_value, rhs ]
getKeyValue :: MonadIO m => Key KeyVal -> SqlPersistT m (Maybe KeyVal)
getKeyValue key = firstEntityResult $
select $ from $ \kv -> do
where_ $ kv ^. KeyValId ==. val key
limit 1
return kv
insertKeyValue :: MonadIO m => KeyType -> Maybe T.Text -> Maybe T.Text -> SqlPersistT m (Key KeyVal)
insertKeyValue k v e =
insert (KeyVal k v e)
keyValueListToJSON :: KeyValue t => [KeyVal] -> [t]
keyValueListToJSON lst = let
isLabelKey (LabelKey _) = True
isLabelKey _ = False
-- We want to handle LabelKeys differently from all other KeyTypes, so first let's sort them out.
(labelKvs, otherKvs) = partition (isLabelKey . keyValKey_value) lst
-- Convert LabelKeys into tuples of ("labels", json). All LabelKeys will have the same first value
-- in their tuple - the string "labels". This assumes that LabelKeys do not store values or extended
-- values. If they start doing that, this will have to change.
labelVals = map (\kv -> (T.pack "labels", [toJSON $ keyValKey_value kv])) labelKvs
-- Convert all other KeyTypes into tuples of (key, json).
otherVals = map (\kv -> (asText $ keyValKey_value kv, [toJSON kv])) otherKvs
-- A single list can have many KeyVals with the same key (think about rpm-provides and requires
-- especially). We use an intermediate map to turn it into a list of (key, [json1, json2, ...]) tuples.
-- Both types get handled the same way here.
labelMap = Map.fromListWith (++) labelVals
otherMap = Map.fromListWith (++) otherVals
-- If there's only one KeyVal for a given key, strip the list out before converting it to a
-- json list object. Otherwise, everything will end up in a list.
--
-- On the other hand, we don't do anything to LabelKeys. This means labels will always end up
-- in a list named "labels".
pairs = map (\(k, v) -> case v of
[hd] -> k .= hd
_ -> k .= v)
(Map.toList otherMap) ++
map (uncurry (.=)) (Map.toList labelMap)
in
[T.pack "keyvals" .= object pairs]
| atodorov/bdcs | src/BDCS/KeyValue.hs | lgpl-2.1 | 4,106 | 0 | 17 | 1,092 | 849 | 459 | 390 | -1 | -1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
module Stack.Constants.Config
( distDirFromDir
, workDirFromDir
, distRelativeDir
, imageStagingDir
, projectDockerSandboxDir
, configCacheFile
, configCabalMod
, buildCacheFile
, testSuccessFile
, testBuiltFile
, hpcRelativeDir
, hpcDirFromDir
, objectInterfaceDirL
, templatesDir
) where
import Stack.Prelude
import Stack.Constants
import Stack.Types.Compiler
import Stack.Types.Config
import Stack.Types.PackageIdentifier
import Path
-- | Output .o/.hi directory.
objectInterfaceDirL :: HasBuildConfig env => Getting r env (Path Abs Dir)
objectInterfaceDirL = to $ \env -> -- FIXME is this idomatic lens code?
let workDir = view workDirL env
root = view projectRootL env
in root </> workDir </> $(mkRelDir "odir/")
-- | The filename used for dirtiness check of source files.
buildCacheFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs File)
buildCacheFile dir =
liftM
(</> $(mkRelFile "stack-build-cache"))
(distDirFromDir dir)
-- | The filename used to mark tests as having succeeded
testSuccessFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory
-> m (Path Abs File)
testSuccessFile dir =
liftM
(</> $(mkRelFile "stack-test-success"))
(distDirFromDir dir)
-- | The filename used to mark tests as having built
testBuiltFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory
-> m (Path Abs File)
testBuiltFile dir =
liftM
(</> $(mkRelFile "stack-test-built"))
(distDirFromDir dir)
-- | The filename used for dirtiness check of config.
configCacheFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs File)
configCacheFile dir =
liftM
(</> $(mkRelFile "stack-config-cache"))
(distDirFromDir dir)
-- | The filename used for modification check of .cabal
configCabalMod :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs File)
configCabalMod dir =
liftM
(</> $(mkRelFile "stack-cabal-mod"))
(distDirFromDir dir)
-- | Directory for HPC work.
hpcDirFromDir
:: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs Dir)
hpcDirFromDir fp =
liftM (fp </>) hpcRelativeDir
-- | Relative location of directory for HPC work.
hpcRelativeDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> m (Path Rel Dir)
hpcRelativeDir =
liftM (</> $(mkRelDir "hpc")) distRelativeDir
-- | Package's build artifacts directory.
distDirFromDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Path Abs Dir
-> m (Path Abs Dir)
distDirFromDir fp =
liftM (fp </>) distRelativeDir
-- | Package's working directory.
workDirFromDir :: (MonadReader env m, HasEnvConfig env)
=> Path Abs Dir
-> m (Path Abs Dir)
workDirFromDir fp = view $ workDirL.to (fp </>)
-- | Directory for project templates.
templatesDir :: Config -> Path Abs Dir
templatesDir config = configStackRoot config </> $(mkRelDir "templates")
-- | Relative location of build artifacts.
distRelativeDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
=> m (Path Rel Dir)
distRelativeDir = do
cabalPkgVer <- view cabalVersionL
platform <- platformGhcRelDir
wc <- view $ actualCompilerVersionL.to whichCompiler
-- Cabal version, suffixed with "_ghcjs" if we're using GHCJS.
envDir <-
parseRelDir $
(if wc == Ghcjs then (++ "_ghcjs") else id) $
packageIdentifierString $
PackageIdentifier cabalPackageName cabalPkgVer
platformAndCabal <- useShaPathOnWindows (platform </> envDir)
workDir <- view workDirL
return $
workDir </>
$(mkRelDir "dist") </>
platformAndCabal
-- | Docker sandbox from project root.
projectDockerSandboxDir :: (MonadReader env m, HasConfig env)
=> Path Abs Dir -- ^ Project root
-> m (Path Abs Dir) -- ^ Docker sandbox
projectDockerSandboxDir projectRoot = do
workDir <- view workDirL
return $ projectRoot </> workDir </> $(mkRelDir "docker/")
-- | Image staging dir from project root.
imageStagingDir :: (MonadReader env m, HasConfig env, MonadThrow m)
=> Path Abs Dir -- ^ Project root
-> Int -- ^ Index of image
-> m (Path Abs Dir) -- ^ Docker sandbox
imageStagingDir projectRoot imageIdx = do
workDir <- view workDirL
idxRelDir <- parseRelDir (show imageIdx)
return $ projectRoot </> workDir </> $(mkRelDir "image") </> idxRelDir
| MichielDerhaeg/stack | src/Stack/Constants/Config.hs | bsd-3-clause | 5,012 | 0 | 13 | 1,226 | 1,238 | 640 | 598 | 116 | 2 |
module Generate.JavaScript.Ports (incoming, outgoing) where
import qualified Data.List as List
import Generate.JavaScript.Helpers
import AST.Type as T
import qualified AST.Variable as Var
import Language.ECMAScript3.Syntax
data JSType
= JSNumber
| JSBoolean
| JSString
| JSArray
| JSObject [String]
typeToString :: JSType -> String
typeToString tipe =
case tipe of
JSNumber -> "a number"
JSBoolean -> "a boolean (true or false)"
JSString -> "a string"
JSArray -> "an array"
JSObject fields ->
"an object with fields '" ++ List.intercalate "', '" fields ++ "'"
_Array :: String -> Expression ()
_Array functionName =
useLazy ["Elm","Native","Array"] functionName
_List :: String -> Expression ()
_List functionName =
useLazy ["Elm","Native","List"] functionName
_Maybe :: String -> Expression ()
_Maybe functionName =
useLazy ["Elm","Maybe"] functionName
check :: Expression () -> JSType -> Expression () -> Expression ()
check x jsType continue =
CondExpr () (jsFold OpLOr checks x) continue throw
where
jsFold op checks value =
foldl1 (InfixExpr () op) (map ($ value) checks)
throw =
obj ["_U","badPort"] `call` [ string (typeToString jsType), x ]
checks =
case jsType of
JSNumber -> [typeof "number"]
JSBoolean -> [typeof "boolean"]
JSString -> [typeof "string", instanceof "String"]
JSArray -> [instanceof "Array"]
JSObject fields -> [jsFold OpLAnd (typeof "object" : map member fields)]
incoming :: CanonicalType -> Expression ()
incoming tipe =
case tipe of
Aliased _ t -> incoming t
App (Type v) [t]
| Var.isSignal v -> obj ["_P","incomingSignal"] <| incoming t
_ -> ["v"] ==> inc tipe (ref "v")
inc :: CanonicalType -> Expression () -> Expression ()
inc tipe x =
case tipe of
Lambda _ _ -> error "functions should not be allowed through input ports"
Var _ -> error "type variables should not be allowed through input ports"
Aliased _ t ->
inc t x
Type (Var.Canonical Var.BuiltIn name)
| name == "Int" -> from JSNumber
| name == "Float" -> from JSNumber
| name == "Bool" -> from JSBoolean
| name == "String" -> from JSString
where
from checks = check x checks x
Type name
| Var.isJson name ->
x
| Var.isTuple name ->
incomingTuple [] x
| otherwise ->
error "bad type got to incoming port generation code"
App f args ->
case f : args of
Type name : [t]
| Var.isMaybe name ->
CondExpr ()
(equal x (NullLit ()))
(_Maybe "Nothing")
(_Maybe "Just" <| inc t x)
| Var.isList name ->
check x JSArray (_List "fromArray" <| array)
| Var.isArray name ->
check x JSArray (_Array "fromJSArray" <| array)
where
array = DotRef () x (var "map") <| incoming t
Type name : ts
| Var.isTuple name -> incomingTuple ts x
_ -> error "bad ADT got to incoming port generation code"
Record _ (Just _) ->
error "bad record got to incoming port generation code"
Record fields Nothing ->
check x (JSObject (map fst fields)) object
where
object = ObjectLit () $ (prop "_", ObjectLit () []) : keys
keys = map convert fields
convert (f,t) = (prop f, inc t (DotRef () x (var f)))
incomingTuple :: [CanonicalType] -> Expression () -> Expression ()
incomingTuple types x =
check x JSArray (ObjectLit () fields)
where
fields = (prop "ctor", ctor) : zipWith convert [0..] types
ctor = string ("_Tuple" ++ show (length types))
convert n t =
( prop ('_':show n)
, inc t (BracketRef () x (IntLit () n))
)
outgoing :: CanonicalType -> Expression ()
outgoing tipe =
case tipe of
Aliased _ t -> outgoing t
App (Type v) [t]
| Var.isSignal v -> obj ["_P","outgoingSignal"] <| outgoing t
_ -> ["v"] ==> out tipe (ref "v")
out :: CanonicalType -> Expression () -> Expression ()
out tipe x =
case tipe of
Aliased _ t -> out t x
Lambda _ _
| numArgs > 1 && numArgs < 10 ->
func (ref ('A':show numArgs) `call` (x:values))
| otherwise -> func (foldl (<|) x values)
where
ts = T.collectLambdas tipe
numArgs = length ts - 1
args = map (\n -> '_' : show n) [0..]
values = zipWith inc (init ts) (map ref args)
func body =
function (take numArgs args)
[ VarDeclStmt () [VarDecl () (var "_r") (Just body)]
, ret (out (last ts) (ref "_r"))
]
Var _ -> error "type variables should not be allowed through input ports"
Type (Var.Canonical Var.BuiltIn name)
| name `elem` ["Int","Float","Bool","String"] -> x
Type name
| Var.isJson name -> x
| Var.isTuple name -> ArrayLit () []
| otherwise -> error "bad type got to outgoing port generation code"
App f args ->
case f : args of
Type name : [t]
| Var.isMaybe name ->
CondExpr ()
(equal (DotRef () x (var "ctor")) (string "Nothing"))
(NullLit ())
(out t (DotRef () x (var "_0")))
| Var.isArray name ->
DotRef () (_Array "toJSArray" <| x) (var "map") <| outgoing t
| Var.isList name ->
DotRef () (_List "toArray" <| x) (var "map") <| outgoing t
Type name : ts
| Var.isTuple name ->
let convert n t = out t $ DotRef () x $ var ('_':show n)
in ArrayLit () $ zipWith convert [0..] ts
_ -> error "bad ADT got to outgoing port generation code"
Record _ (Just _) ->
error "bad record got to outgoing port generation code"
Record fields Nothing ->
ObjectLit () keys
where
keys = map convert fields
convert (f,t) = (PropId () (var f), out t (DotRef () x (var f)))
| avh4/elm-compiler | src/Generate/JavaScript/Ports.hs | bsd-3-clause | 6,496 | 0 | 20 | 2,340 | 2,298 | 1,118 | 1,180 | 155 | 10 |
module SubHask.Category.HMatrix
where
import GHC.Prim
import Control.DeepSeq
import Data.Typeable
import qualified Prelude as P
import qualified Data.Vector.Generic as VG
import qualified Data.Vector.Generic.Mutable as VGM
import qualified Data.Vector.Storable as VS
import qualified Numeric.LinearAlgebra as LA
import SubHask.Algebra
import SubHask.Category
import SubHask.Category.Trans.Linear
import SubHask.Internal.Prelude
-------------------------------------------------------------------------------
data family HMatrix a b
data instance HMatrix (VS.Vector a) (VS.Vector a) = Matrix (LA.Matrix a)
instance Category HMatrix where
| abailly/subhask | src/SubHask/Category/HMatrix.hs | bsd-3-clause | 652 | 0 | 9 | 78 | 142 | 92 | 50 | -1 | -1 |
main = drawingOf(path[start, end])
start :: Point
start = (0, 0)
end :: Point
end = [2, -4]
| three/codeworld | codeworld-compiler/test/testcase/test_pointType/source.hs | apache-2.0 | 94 | 0 | 8 | 20 | 55 | 32 | 23 | 5 | 1 |
module Inter.Logged where
-- $Id$
import qualified Control.SQL
import System.IO
logged :: String -> IO a -> IO a
logged msg act = do
Control.SQL.logged ( msg ++ " ..." )
x <- act
Control.SQL.logged ( "... " ++ msg )
return x
| Erdwolf/autotool-bonn | trial/src/Inter/Logged.hs | gpl-2.0 | 248 | 0 | 9 | 66 | 92 | 47 | 45 | 9 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ru-RU">
<title>Правила пассивного сканирования — бета-версия | ZAP-расширение </title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Содержание</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Индекс</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Поиск</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Избранное</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/pscanrulesBeta/src/main/javahelp/org/zaproxy/zap/extension/pscanrulesBeta/resources/help_ru_RU/helpset_ru_RU.hs | apache-2.0 | 1,091 | 78 | 69 | 162 | 570 | 284 | 286 | -1 | -1 |
import Data.List
main :: IO ()
main = print $ length $ show (foldl' (*) 1 [1..100000] :: Integer)
| ghc-android/ghc | testsuite/tests/perf/space_leaks/space_leak_001.hs | bsd-3-clause | 100 | 0 | 9 | 21 | 52 | 28 | 24 | 3 | 1 |
{-
Copyright (C) 2012 Christopher Walker <http://gsd.uwaterloo.ca>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-}
-- | Generates simple graph and CVL graph representation for a Clafer model in GraphViz DOT.
module Language.Clafer.Generator.Graph (genSimpleGraph, genCVLGraph, traceAstModule, traceIrModule) where
import Language.Clafer.Common(fst3,snd3,trd3)
import Language.Clafer.Front.Absclafer
{-import Language.Clafer.Front.Mapper(range)-}
import Language.Clafer.Intermediate.Tracing
import Language.Clafer.Intermediate.Intclafer
import Language.Clafer.Generator.Html(genTooltip)
import qualified Data.Map as Map
import Data.Maybe
import Prelude hiding (exp)
-- | Generate a graph in the simplified notation
genSimpleGraph :: Module -> IModule -> String -> Bool -> String
genSimpleGraph m ir name showRefs = cleanOutput $ "digraph \"" ++ name ++ "\"\n{\n\nrankdir=BT;\nranksep=0.3;\nnodesep=0.1;\ngraph [fontname=Sans fontsize=11];\nnode [shape=box color=lightgray fontname=Sans fontsize=11 margin=\"0.02,0.02\" height=0.2 ];\nedge [fontname=Sans fontsize=11];\n" ++ b ++ "}"
where b = graphSimpleModule m (traceIrModule ir) showRefs
-- | Generate a graph in CVL variability abstraction notation
genCVLGraph :: Module -> IModule -> String -> String
genCVLGraph m ir name = cleanOutput $ "digraph \"" ++ name ++ "\"\n{\nrankdir=BT;\nranksep=0.1;\nnodesep=0.1;\nnode [shape=box margin=\"0.025,0.025\"];\nedge [arrowhead=none];\n" ++ b ++ "}"
where b = graphCVLModule m $ traceIrModule ir
-- Simplified Notation Printer --
--toplevel: (Top_level (Boolean), Maybe Topmost parent, Maybe immediate parent)
graphSimpleModule :: Module -> Map.Map Span [Ir] -> Bool -> String
graphSimpleModule (Module _ []) _ _ = ""
graphSimpleModule (Module _ (x:xs)) irMap showRefs = graphSimpleDeclaration x (True, Nothing, Nothing) irMap showRefs ++
graphSimpleModule (Module noSpan xs) irMap showRefs
graphSimpleDeclaration :: Declaration
-> (Bool, Maybe String, Maybe String)
-> Map.Map Span [Ir]
-> Bool
-> String
graphSimpleDeclaration (ElementDecl _ element) topLevel irMap showRefs = graphSimpleElement element topLevel irMap showRefs
graphSimpleDeclaration _ _ _ _ = ""
graphSimpleElement :: Element
-> (Bool, Maybe String, Maybe String)
-> Map.Map Span [Ir]
-> Bool
-> String
graphSimpleElement (Subclafer _ clafer) topLevel irMap showRefs = graphSimpleClafer clafer topLevel irMap showRefs
graphSimpleElement (ClaferUse _ name _ _) topLevel irMap _ = if snd3 topLevel == Nothing then "" else "\"" ++ fromJust (snd3 topLevel) ++ "\" -> \"" ++ graphSimpleName name topLevel irMap ++ "\" [arrowhead=vee arrowtail=diamond dir=both style=solid constraint=true weight=5 minlen=2 arrowsize=0.6 penwidth=0.5 ];\n"
graphSimpleElement _ _ _ _ = ""
graphSimpleElements :: Elements
-> (Bool, Maybe String, Maybe String)
-> Map.Map Span [Ir]
-> Bool
-> String
graphSimpleElements (ElementsEmpty _) _ _ _ = ""
graphSimpleElements (ElementsList _ es) topLevel irMap showRefs = concatMap (\x -> graphSimpleElement x topLevel irMap showRefs ++ "\n") es
graphSimpleClafer :: Clafer
-> (Bool, Maybe String, Maybe String)
-> Map.Map Span [Ir]
-> Bool
-> String
{-graphSimpleClafer (Clafer _ _ _ _ _ _ _ _) _ _ _ = ""-}
graphSimpleClafer (Clafer s abstract gCard id' super' crd init' es) topLevel irMap showRefs
| fst3 topLevel == True = let {tooltip = genTooltip (Module noSpan [ElementDecl noSpan (Subclafer noSpan (Clafer noSpan abstract gCard id' super' crd init' es))]) irMap;
uid' = getDivId s irMap} in
"\"" ++ uid' ++ "\" [label=\"" ++ (head $ lines tooltip) ++ "\" URL=\"#" ++ uid' ++ "\" tooltip=\"" ++ htmlNewlines tooltip ++ "\"];\n"
++ graphSimpleSuper super' (True, Just uid', Just uid') irMap showRefs ++ graphSimpleElements es (False, Just uid', Just uid') irMap showRefs
| otherwise = let (PosIdent (_,ident')) = id' in
graphSimpleSuper super' (fst3 topLevel, snd3 topLevel, Just ident') irMap showRefs ++ graphSimpleElements es (fst3 topLevel, snd3 topLevel, Just ident') irMap showRefs
graphSimpleSuper :: Super
-> (Bool, Maybe String, Maybe String)
-> Map.Map Span [Ir]
-> Bool
-> String
graphSimpleSuper (SuperEmpty _) _ _ _ = ""
graphSimpleSuper (SuperSome _ superHow setExp) topLevel irMap showRefs = let {parent [] = "error";
parent (uid'@('c':xs):xss) = if '_' `elem` xs then uid' else parent xss;
parent (_:xss) = parent xss;
super' = parent $ graphSimpleSetExp setExp topLevel irMap} in
if super' == "error" then "" else "\"" ++ fromJust (snd3 topLevel) ++ "\" -> \"" ++ parent (graphSimpleSetExp setExp topLevel irMap) ++ "\"" ++ graphSimpleSuperHow superHow topLevel irMap showRefs
graphSimpleSuperHow :: SuperHow -> (Bool, Maybe String, Maybe String) -> Map.Map Span [Ir] -> Bool -> String
graphSimpleSuperHow (SuperColon _) topLevel _ _ = " [" ++ if fst3 topLevel == True
then "arrowhead=onormal constraint=true weight=100];\n"
else "arrowhead=vee arrowtail=diamond dir=both style=solid weight=10 color=gray arrowsize=0.6 minlen=2 penwidth=0.5 constraint=true];\n"
graphSimpleSuperHow (SuperArrow _) topLevel _ showRefs = " [arrowhead=vee arrowsize=0.6 penwidth=0.5 constraint=true weight=10 color=" ++ refColour showRefs ++ " fontcolor=" ++ refColour showRefs ++ (if fst3 topLevel == True then "" else " label=" ++ (fromJust $ trd3 topLevel)) ++ "];\n"
graphSimpleSuperHow (SuperMArrow _) topLevel _ showRefs = " [arrowhead=veevee arrowsize=0.6 minlen=1.5 penwidth=0.5 constraint=true weight=10 color=" ++ refColour showRefs ++ " fontcolor=" ++ refColour showRefs ++ (if fst3 topLevel == True then "" else " label=" ++ (fromJust $ trd3 topLevel)) ++ "];\n"
refColour :: Bool -> String
refColour True = "lightgray"
refColour False = "transparent"
graphSimpleName :: Name -> (Bool, Maybe String, Maybe String) -> Map.Map Span [Ir] -> String
graphSimpleName (Path _ modids) topLevel irMap = unwords $ map (\x -> graphSimpleModId x topLevel irMap) modids
graphSimpleModId :: ModId -> (Bool, Maybe String, Maybe String) -> Map.Map Span [Ir] -> String
graphSimpleModId (ModIdIdent _ posident) _ irMap = graphSimplePosIdent posident irMap
graphSimplePosIdent :: PosIdent -> Map.Map Span [Ir] -> String
graphSimplePosIdent (PosIdent (pos, id')) irMap = getUid (PosIdent (pos, id')) irMap
{-graphSimpleCard _ _ _ = ""
graphSimpleConstraint _ _ _ = ""
graphSimpleDecl _ _ _ = ""
graphSimpleInit _ _ _ = ""
graphSimpleInitHow _ _ _ = ""
graphSimpleExp _ _ _ = ""
graphSimpleQuant _ _ _ = ""
graphSimpleGoal _ _ _ = ""
graphSimpleSoftConstraint _ _ _ = ""
graphSimpleAbstract _ _ _ = ""
graphSimpleGCard _ _ _ = ""
graphSimpleNCard _ _ _ = ""
graphSimpleExInteger _ _ _ = ""-}
graphSimpleSetExp :: SetExp -> (Bool, Maybe String, Maybe String) -> Map.Map Span [Ir] -> [String]
graphSimpleSetExp (ClaferId _ name) topLevel irMap = [graphSimpleName name topLevel irMap]
graphSimpleSetExp (Union _ set1 set2) topLevel irMap = graphSimpleSetExp set1 topLevel irMap ++ graphSimpleSetExp set2 topLevel irMap
graphSimpleSetExp (UnionCom _ set1 set2) topLevel irMap = graphSimpleSetExp set1 topLevel irMap ++ graphSimpleSetExp set2 topLevel irMap
graphSimpleSetExp (Difference _ set1 set2) topLevel irMap = graphSimpleSetExp set1 topLevel irMap ++ graphSimpleSetExp set2 topLevel irMap
graphSimpleSetExp (Intersection _ set1 set2) topLevel irMap = graphSimpleSetExp set1 topLevel irMap ++ graphSimpleSetExp set2 topLevel irMap
graphSimpleSetExp (Domain _ set1 set2) topLevel irMap = graphSimpleSetExp set1 topLevel irMap ++ graphSimpleSetExp set2 topLevel irMap
graphSimpleSetExp (Range _ set1 set2) topLevel irMap = graphSimpleSetExp set1 topLevel irMap ++ graphSimpleSetExp set2 topLevel irMap
graphSimpleSetExp (Join _ set1 set2) topLevel irMap = graphSimpleSetExp set1 topLevel irMap ++ graphSimpleSetExp set2 topLevel irMap
{-graphSimpleEnumId :: EnumId -> (Bool, Maybe String, Maybe String) -> Map.Map Span [Ir] -> String
graphSimpleEnumId (EnumIdIdent posident) _ irMap = graphSimplePosIdent posident irMap
graphSimpleEnumId (PosEnumIdIdent _ posident) topLevel irMap = graphSimpleEnumId (EnumIdIdent posident) topLevel irMap-}
-- CVL Printer --
--parent is Maybe the uid of the immediate parent
graphCVLModule :: Module -> Map.Map Span [Ir] -> String
graphCVLModule (Module _ []) _ = ""
graphCVLModule (Module _ (x:xs)) irMap = graphCVLDeclaration x Nothing irMap ++ graphCVLModule (Module noSpan xs) irMap
graphCVLDeclaration :: Declaration -> Maybe String -> Map.Map Span [Ir] -> String
graphCVLDeclaration (ElementDecl _ element) parent irMap = graphCVLElement element parent irMap
graphCVLDeclaration _ _ _ = ""
graphCVLElement :: Element -> Maybe String -> Map.Map Span [Ir] -> String
graphCVLElement (Subclafer _ clafer) parent irMap = graphCVLClafer clafer parent irMap
graphCVLElement (ClaferUse _ name _ _) parent irMap = if parent == Nothing then "" else "?" ++ " -> " ++ graphCVLName name parent irMap ++ " [arrowhead = onormal style = dashed constraint = false];\n"
{-graphCVLElement (PosClaferUse s _ _ _) parent irMap = if parent == Nothing then "" else "?" ++ " -> " ++ getUseId s irMap ++ " [arrowhead = onormal style = dashed constraint = false];\n"-}
graphCVLElement (Subconstraint _ constraint) parent irMap = graphCVLConstraint constraint parent irMap
graphCVLElement _ _ _ = ""
graphCVLElements :: Elements -> Maybe String -> Map.Map Span [Ir] -> String
graphCVLElements (ElementsEmpty _) _ _ = ""
graphCVLElements (ElementsList _ es) parent irMap = concatMap (\x -> graphCVLElement x parent irMap ++ "\n") es
graphCVLClafer :: Clafer -> Maybe String -> Map.Map Span [Ir] -> String
{-graphCVLClafer (Clafer _ _ _ _ _ _ _) _ _ = ""-}
graphCVLClafer (Clafer s _ gCard _ super' crd _ es) parent irMap
= let {{-tooltip = genTooltip (Module [ElementDecl (Subclafer (Clafer abstract gCard id' super' crd init' es))]) irMap;-}
uid' = getDivId s irMap;
gcrd = graphCVLGCard gCard parent irMap;
super'' = graphCVLSuper super' parent irMap} in
"\"" ++ uid' ++ "\" [URL=\"#" ++ uid' ++ "\" label=\"" ++ dropUid uid' ++ super'' ++ (if choiceCard crd then "\" style=rounded" else " [" ++ graphCVLCard crd parent irMap ++ "]\"")
++ (if super'' == "" then "" else " shape=oval") ++ "];\n"
++ (if gcrd == "" then "" else "g" ++ uid' ++ " [label=\"" ++ gcrd ++ "\" fontsize=10 shape=triangle];\ng" ++ uid' ++ " -> " ++ uid' ++ " [weight=10];\n")
++ (if parent==Nothing then "" else uid' ++ " -> " ++ fromJust parent ++ (if lowerCard crd == "0" then " [style=dashed]" else "") ++ ";\n")
++ graphCVLElements es (if gcrd == "" then (Just uid') else (Just $ "g" ++ uid')) irMap
graphCVLSuper :: Super -> Maybe String -> Map.Map Span [Ir] -> String
graphCVLSuper (SuperEmpty _) _ _ = ""
graphCVLSuper (SuperSome _ superHow setExp) parent irMap = graphCVLSuperHow superHow ++ concat (graphCVLSetExp setExp parent irMap)
graphCVLSuperHow :: SuperHow -> String
graphCVLSuperHow (SuperColon _) = ":"
graphCVLSuperHow (SuperArrow _) = "->"
graphCVLSuperHow (SuperMArrow _) = "->>"
graphCVLName :: Name -> Maybe String -> Map.Map Span [Ir] -> String
graphCVLName (Path _ modids) parent irMap = unwords $ map (\x -> graphCVLModId x parent irMap) modids
graphCVLModId :: ModId -> Maybe String -> Map.Map Span [Ir] -> String
graphCVLModId (ModIdIdent _ posident) _ irMap = graphCVLPosIdent posident irMap
graphCVLPosIdent :: PosIdent -> Map.Map Span [Ir] -> String
graphCVLPosIdent (PosIdent (pos, id')) irMap = getUid (PosIdent (pos, id')) irMap
graphCVLConstraint :: Constraint -> Maybe String -> Map.Map Span [Ir] -> String
{-graphCVLConstraint (Constraint _) _ _ = ""-}
graphCVLConstraint (Constraint s exps') parent irMap = let body' = htmlNewlines $ genTooltip (Module noSpan [ElementDecl noSpan (Subconstraint noSpan (Constraint noSpan exps'))]) irMap;
uid' = "\"" ++ getExpId s irMap ++ "\""
in uid' ++ " [label=\"" ++ body' ++ "\" shape=parallelogram];\n" ++
if parent == Nothing then "" else uid' ++ " -> \"" ++ fromJust parent ++ "\";\n"
graphCVLCard :: Card -> Maybe String -> Map.Map Span [Ir] -> String
graphCVLCard (CardEmpty _) _ _ = "1..1"
graphCVLCard (CardLone _) _ _ = "0..1"
graphCVLCard (CardSome _) _ _ = "1..*"
graphCVLCard (CardAny _) _ _ = "0..*"
graphCVLCard (CardNum _ (PosInteger (_, n))) _ _ = n ++ ".." ++ n
graphCVLCard (CardInterval _ ncard) parent irMap = graphCVLNCard ncard parent irMap
graphCVLNCard :: NCard -> Maybe String -> Map.Map Span [Ir] -> String
graphCVLNCard (NCard _ (PosInteger (_, num)) exInteger) parent irMap = num ++ ".." ++ graphCVLExInteger exInteger parent irMap
graphCVLExInteger :: ExInteger -> Maybe String -> Map.Map Span [Ir] -> String
graphCVLExInteger (ExIntegerAst _) _ _ = "*"
graphCVLExInteger (ExIntegerNum _ (PosInteger(_, num))) _ _ = num
graphCVLGCard :: GCard -> Maybe String -> Map.Map Span [Ir] -> String
graphCVLGCard (GCardInterval _ ncard) parent irMap = graphCVLNCard ncard parent irMap
graphCVLGCard (GCardEmpty _) _ _ = ""
graphCVLGCard (GCardXor _) _ _ = "1..1"
graphCVLGCard (GCardOr _) _ _ = "1..*"
graphCVLGCard (GCardMux _) _ _ = "0..1"
graphCVLGCard (GCardOpt _) _ _ = ""
{-graphCVLDecl _ _ _ = ""
graphCVLInit _ _ _ = ""
graphCVLInitHow _ _ _ = ""
graphCVLExp _ _ _ = ""
graphCVLQuant _ _ _ = ""
graphCVLGoal _ _ _ = ""
graphCVLSoftConstraint _ _ _ = ""
graphCVLAbstract _ _ _ = ""-}
graphCVLSetExp :: SetExp -> Maybe String -> Map.Map Span [Ir] -> [String]
graphCVLSetExp (ClaferId _ name) parent irMap = [graphCVLName name parent irMap]
graphCVLSetExp (Union _ set1 set2) parent irMap = graphCVLSetExp set1 parent irMap ++ graphCVLSetExp set2 parent irMap
graphCVLSetExp (UnionCom _ set1 set2) parent irMap = graphCVLSetExp set1 parent irMap ++ graphCVLSetExp set2 parent irMap
graphCVLSetExp (Difference _ set1 set2) parent irMap = graphCVLSetExp set1 parent irMap ++ graphCVLSetExp set2 parent irMap
graphCVLSetExp (Intersection _ set1 set2) parent irMap = graphCVLSetExp set1 parent irMap ++ graphCVLSetExp set2 parent irMap
graphCVLSetExp (Domain _ set1 set2) parent irMap = graphCVLSetExp set1 parent irMap ++ graphCVLSetExp set2 parent irMap
graphCVLSetExp (Range _ set1 set2) parent irMap = graphCVLSetExp set1 parent irMap ++ graphCVLSetExp set2 parent irMap
graphCVLSetExp (Join _ set1 set2) parent irMap = graphCVLSetExp set1 parent irMap ++ graphCVLSetExp set2 parent irMap
{-graphCVLEnumId (EnumIdIdent posident) _ irMap = graphCVLPosIdent posident irMap
graphCVLEnumId (PosEnumIdIdent _ posident) parent irMap = graphCVLEnumId (EnumIdIdent posident) parent irMap-}
choiceCard :: Card -> Bool
choiceCard (CardEmpty _) = True
choiceCard (CardLone _ )= True
choiceCard (CardInterval _ nCard) = case nCard of
NCard _ (PosInteger (_, low)) exInteger -> choiceCard' low exInteger
where choiceCard' low exInteger = if low == "0" || low == "1"
then case exInteger of
ExIntegerAst _ -> False
ExIntegerNum _ (PosInteger (_, high)) -> high == "0" || high == "1"
else False
choiceCard _ = False
lowerCard :: Card -> String
lowerCard crd = takeWhile (/= '.') $ graphCVLCard crd Nothing Map.empty
--Miscellaneous functions
dropUid :: String -> String
dropUid uid' = let id' = rest $ dropWhile (\x -> x /= '_') uid' in if id' == "" then uid' else id'
rest :: String -> String
rest [] = []
rest (_:xs) = xs
getUid :: PosIdent -> Map.Map Span [Ir] -> String
getUid (PosIdent (pos, id')) irMap = if Map.lookup (getSpan (PosIdent (pos, id'))) irMap == Nothing
then id'
else let IRPExp pexp = head $ fromJust $ Map.lookup (getSpan (PosIdent (pos, id'))) irMap in
findUid id' $ getIdentPExp pexp
where {getIdentPExp (PExp _ _ _ exp') = getIdentIExp exp';
getIdentIExp (IFunExp _ exps') = concatMap getIdentPExp exps';
getIdentIExp (IClaferId _ id'' _) = [id''];
getIdentIExp (IDeclPExp _ _ pexp) = getIdentPExp pexp;
getIdentIExp _ = [];
findUid name (x:xs) = if name == dropUid x then x else findUid name xs;
findUid name [] = name}
getDivId :: Span -> Map.Map Span [Ir] -> String
getDivId s irMap = if Map.lookup s irMap == Nothing
then "Uid not Found"
else let IRClafer iClaf = head $ fromJust $ Map.lookup s irMap in
_uid iClaf
{-getSuperId :: Span -> Map.Map Span [Ir] -> String
getSuperId s irMap = if Map.lookup s irMap == Nothing
then "Uid not Found"
else let IRPExp pexp = head $ fromJust $ Map.lookup s irMap in
sident $ exp pexp-}
getUseId :: Span -> Map.Map Span [Ir] -> String
getUseId s irMap = if Map.lookup s irMap == Nothing
then "Uid not Found"
else let IRClafer iClaf = head $ fromJust $ Map.lookup s irMap in
_sident $ _exp $ head $ _supers $ _super iClaf
getExpId :: Span -> Map.Map Span [Ir] -> String
getExpId s irMap = if Map.lookup s irMap == Nothing
then "Uid not Found"
else let IRPExp pexp = head $ fromJust $ Map.lookup s irMap in _pid pexp
{-while :: Bool -> [IExp] -> [IExp]
while bool exp' = if bool then exp' else []-}
htmlNewlines :: String -> String
htmlNewlines "" = ""
htmlNewlines ('\n':xs) = " " ++ htmlNewlines xs
htmlNewlines (x:xs) = x:htmlNewlines xs
cleanOutput :: String -> String
cleanOutput "" = ""
cleanOutput (' ':'\n':xs) = cleanOutput $ '\n':xs
cleanOutput ('\n':'\n':xs) = cleanOutput $ '\n':xs
cleanOutput (' ':'<':'b':'r':'>':xs) = "<br>"++cleanOutput xs
cleanOutput (x:xs) = x : cleanOutput xs
| juodaspaulius/clafer-old-customBNFC | src/Language/Clafer/Generator/Graph.hs | mit | 20,265 | 0 | 19 | 5,018 | 5,302 | 2,694 | 2,608 | 213 | 7 |
module RenameMeToo where
import {-# SOURCE #-} RenameMe
data Even = Zero | Odd Odd
y = False
| antalsz/hs-to-coq | examples/tests/RenameMeToo.hs | mit | 97 | 0 | 6 | 22 | 25 | 16 | 9 | 4 | 1 |
{-# LANGUAGE NamedFieldPuns #-}
module Game.World.Gen.Frequency
( getDistribution
, roomTypeFreqDist
, roomSizeFreqDist
)
where
import System.Random
import Data.Maybe
import Game.World.Gen.Types
import Control.Lens
data FreqDist a b = FreqDist
{ freqDist :: [(a, b)]
}
getDistribution :: (Show b, Num b, Ord b, Random b)
=> FreqDist a b -> GenContext a
getDistribution freq = do
g <- use stateRndGen
let (res, g') = _getDistribution g freq
stateRndGen .= g'
return res
_getDistribution :: (Show b, Num b, Ord b, Random b, RandomGen g)
=> g -> FreqDist a b -> (a, g)
_getDistribution g FreqDist { freqDist } = (result, g')
where
range = (0, (sum . map snd $ freqDist) - 1)
(randVal, g') = randomR range g
result = fromJust . fst $
foldr findResult (Nothing, 0) freqDist
findResult (a, b) (Nothing, s) =
if s + b > randVal
then (Just a, 0)
else (Nothing, s + b)
findResult _ (Just res, _) = (Just res, 0)
roomTypeFreqDist :: FreqDist RoomType Int
roomTypeFreqDist = FreqDist
[ (DefaultRoom, 9 :: Int)
, (BossRoom, 1)
]
roomSizeFreqDist :: RoomType -> FreqDist RoomSize Int
roomSizeFreqDist DefaultRoom = FreqDist
[ (RoomSize 2 2, 1 :: Int)
, (RoomSize 2 3, 1)
, (RoomSize 2 4, 1)
, (RoomSize 3 2, 1)
, (RoomSize 3 3, 1)
, (RoomSize 3 4, 1)
, (RoomSize 4 2, 1)
, (RoomSize 4 3, 1)
, (RoomSize 4 4, 1)
]
-- | Boss rooms are always big
roomSizeFreqDist BossRoom = FreqDist
[ (RoomSize 4 4, 1)
]
initTest :: IO StdGen
initTest =
newStdGen
| mfpi/q-inqu | Game/World/Gen/Frequency.hs | mit | 1,664 | 0 | 12 | 491 | 634 | 350 | 284 | 50 | 3 |
module InterBasics where
import Data.Char
type Input = [String]
type Output = [String]
type RawInput = [Char]
type RawOutput = [Char]
{-
Contains various auxilliary functions and definitions for
Input/output
Version 1.0 18 September 1989
Version 2.0 25 May 2017
Simon Thompson, Computing Lab., Univ. of Kent.
[email protected]
-}
{-
The fundamental type definitions
Note that we have chosen to consider Input and Output as
consisting of streams of Strings rather than streams of
Char. Conversion to and from the ``raw'' versions is trivial:
RawInput is split at newlines, and RawOutput is the result
of concatenating the String forming the output stream. (Note
that this means that we are explicit about the placing of
newlines etc.
-}
newline,space,tab :: Char
newline = '\n'
space = ' '
tab = '\t'
stdin :: [Char]
stdin = "/dev/tty"
{-
Some sensible names. Note that stdin names standard Input
as a file.
-}
split :: RawInput -> Input
gen_split :: Char -> RawInput -> Input
{-
split splits RawInput into lines. Defined in terms of the
more general gen_split which splits Input at occurrences of
its first argument, a Character.
Following these definitions we have two more general ones,
developed in the course of writing a text processing
system. They are more general in that they allow
1. splitting on more than one Character
2. multiple occurrences of the splitting Characters
to be treated in a similar way to single occurrences.
-}
split = gen_split newline
gen_split ch l
= aux_split ch [] l
where
aux_split ch sofar (a:rest)
| a == ch = rever sofar : gen_split ch rest
| otherwise = aux_split ch (a:sofar) rest
aux_split ch sofar []
= [ rever sofar ]
{-
Splitting lists into lists of lists according to membership
of a `split_set'
Lists can be split into sublists in two slightly different
ways, depending on how we treat repeated occurrences of
members of the split_set. We can either treat a repetition
as delimiting an empty list, as we do in `cut', or we
can treat repetitions as single instances, which we do in
`simple_cut'.
Both flavours have their uses.
-}
cut :: Eq a => [a] -> [a] -> [[a]]
cut splitSet [] = []
cut splitSet (a:x)
= cut_aux [] (a:x)
where
cut_aux l []
| l /= [] = [l]
| otherwise = []
cut_aux l (a:x)
| not (elem a splitSet) = cut_aux (l++[a]) x
| otherwise = l : (cut_aux [] x)
simple_cut :: Eq a => [a] -> [a] -> [[a]]
simple_cut splitSet [] = []
simple_cut splitSet (a:x)
= cut_aux [] (a:x)
where
cut_aux l []
| l/=[] = [l]
| otherwise = []
cut_aux l (a:x)
| not (elem a splitSet) = cut_aux (l++[a]) x
| l /= [] = l : (cut_aux [] x)
| otherwise = cut_aux [] x
{-
An auxilliary function : an efficient reversing function
based on shunting
-}
rever :: [a] -> [a]
rever l = shunt l []
shunt :: [a] -> [a] -> [a]
shunt [] m = m
shunt (a:x) m = shunt x (a:m)
join :: Output -> RawOutput
{-
join joins lines, and is an alias for concat.
-}
join = concat -- from the standard envt.
{-
Dealing with basic types, of numbers and Characters.
-}
numeric :: Char -> Bool -- To test for particular
alpha :: Char -> Bool -- kinds of Character.
alphanumeric :: Char -> Bool
numeric = Data.Char.isDigit -- from the standard envt.
alpha = Data.Char.isLetter -- ditto.
alphanumeric ch = alpha ch || numeric ch
{-
Testing for Strings consisting of particular kinds of
Character
Note that the empty String is classed as being in the
respective classes.
-}
numeric_String :: String -> Bool
alpha_String :: String -> Bool
alphanumeric_String :: String -> Bool
integer_String :: String -> Bool
numeric_String = foldr (&&) True . map numeric
alpha_String = foldr (&&) True . map alpha
alphanumeric_String = foldr (&&) True . map alphanumeric
integer_String x = numeric_String x || ( head x == '-' && numeric_String (tail x) )
{-
Converting Strings to numbers and vice versa.
The empty numeric String is translated as zero.
-}
string_posint :: String -> Int
string_int :: String -> Int
string_posint
= conv_aux . rever
where
conv_aux (a:x)
| numeric a = (ord a - ord '0') + 10 * conv_aux x
| otherwise = error "String_posint found non-numeric Character"
conv_aux [] = 0
string_int (a:x)
| numeric a = string_posint (a:x)
| a == '-' = - (string_posint x)
| otherwise = error "unexpected first Character to String_int"
string_int [] = 0
num_String :: Integer -> String
num_String = show -- A standard function
| simonjohnthompson/Interaction | InterBasics.hs | mit | 6,103 | 0 | 12 | 2,566 | 1,122 | 587 | 535 | 79 | 2 |
module Data.Monetary.USD where
-- Representation of US dollars as rational numbers
-- 2015-12-07: Corrected read parse errors for stupid (AAPL) numbers like: $.12
import qualified Data.ByteString.Char8 as B
import Data.Monoid
import Control.Presentation
import Data.Monetary.Currency
import Database.PostgreSQL.Simple.FromField
import Database.PostgreSQL.Simple.ToField
import Database.PostgreSQL.Simple.Types
-- Spraken dollars, mang!
data USD = USD Rational deriving Eq
-- one line of declaration followed by HOW many lines of instantiation?
instance Ord USD where USD x <= USD y = x <= y
instance Show USD where show (USD x) = '$':laxmi 2 x
instance Read USD where readsPrec _ ('$':val) = [(mknMoney USD ('0':val), "")]
instance FromField USD where
fromField f bs = USD . toRational <$> fromDoubleField f bs
fromDoubleField :: FieldParser Double
fromDoubleField f bs =
maybe (returnError UnexpectedNull f "") (pure . read . B.unpack) bs
instance ToField USD where
toField = toField . doubledown
doubledown :: USD -> Double
doubledown (USD d) = fromRational d
-- *Main> read "$.12" :: USD ~> $0.12
-- *Main> read "$1.12" :: USD ~> $1.12
-- *Main> read "$0.12" :: USD ~> $0.12
-- okay, how hard was that AAPL?
-- e.g.: http://investor.apple.com/dividends.cfm
instance Raw USD where rep (USD x) = laxmi 2 x
instance Currency USD where
value (USD x) = x -- so Price-types are copointed ...
instance Num USD where
d1 - d2 = USD $ value d1 - value d2
negate dollar = USD $ 0.0 - value dollar
d1 + d2 = USD $ value d1 + value d2
d1 * d2 = USD $ value d1 * value d2
abs dollar = USD $ abs (value dollar)
signum dollar = USD $ signum (value dollar)
fromInteger x = USD (fromInteger x)
instance Fractional USD where
d1 / d2 = USD $ value d1 / value d2
fromRational = USD
instance Monoid USD where
mempty = USD 0
(USD a) `mappend` (USD b) = USD $ a + b
-- because when you mappend dollars they are summed.
| geophf/1HaskellADay | exercises/HAD/Data/Monetary/USD.hs | mit | 1,967 | 0 | 10 | 396 | 601 | 310 | 291 | 38 | 1 |
module Language.Brainfuck.Py2Bf.BfpointerSpec where
import Test.Hspec
import Language.Brainfuck.Py2Bf.Bf
import Language.Brainfuck.Py2Bf.Bfpointer
spec :: Spec
spec =
describe "Bfpcode"
runSpec
hello :: Bfpcode
hello = Bfpcode
[ Addp 0 9
, Whilep 0 [ Addp 1 8, Addp 2 11, Addp 3 5, Addp 0 (-1) ]
, Putp 1
, Addp 2 2, Putp 2, Addp 2 7, Putp 2, Putp 2
, Addp 2 3, Putp 2, Addp 3 (-1), Putp 3
, Addp 3 (-12), Putp 3, Addp 2 8, Putp 2
, Addp 2 (-8), Putp 2, Addp 2 3, Putp 2
, Addp 2 (-6), Putp 2, Addp 2 (-8), Putp 2
, Addp 3 1, Putp 3
]
runSpec :: Spec
runSpec =
it "runs Bfpcode correctly" $
runString hello `shouldBe` "Hello, world!"
| itchyny/py2bf.hs | test/Language/Brainfuck/Py2Bf/BfpointerSpec.hs | mit | 709 | 0 | 11 | 201 | 325 | 173 | 152 | 23 | 1 |
module Main (main) where
import qualified Examples.Rpc.CalculatorClient
import qualified Examples.Rpc.CalculatorServer
import qualified Examples.Rpc.EchoClient
import qualified Examples.Rpc.EchoServer
import qualified Examples.Serialization.HighLevel.Read
import qualified Examples.Serialization.HighLevel.Write
import qualified Examples.Serialization.LowLevel.Read
import qualified Examples.Serialization.LowLevel.Write
import Data.List (intercalate)
import System.Environment (getArgs)
import System.Exit (exitFailure)
import System.IO (hPutStrLn, stderr)
main :: IO ()
main = do
args <- getArgs
case args of
["calculator-client"] -> Examples.Rpc.CalculatorClient.main
["calculator-server"] -> Examples.Rpc.CalculatorServer.main
["echo-client"] -> Examples.Rpc.EchoClient.main
["echo-server"] -> Examples.Rpc.EchoServer.main
["highlevel-read"] -> Examples.Serialization.HighLevel.Read.main
["highlevel-write"] -> Examples.Serialization.HighLevel.Write.main
["lowlevel-read"] -> Examples.Serialization.LowLevel.Read.main
["lowlevel-write"] -> Examples.Serialization.LowLevel.Write.main
_ -> usageErr
usageErr :: IO ()
usageErr = do
hPutStrLn
stderr
("Usage: run-capnp-example ( "
++ intercalate " | "
[ "calculator-client"
, "calculator-server"
, "echo-client"
, "echo-server"
, "highlevel-read"
, "highlevel-write"
, "lowlevel-read"
, "lowlevel-write"
]
++ " )"
)
exitFailure
| zenhack/haskell-capnp | examples/cmd/Main.hs | mit | 1,695 | 0 | 12 | 438 | 321 | 197 | 124 | 42 | 9 |
module Haskeleton.TestBench (benchmarks) where
import Haskeleton.Test
import Criterion
benchmarks :: [Benchmark]
benchmarks =
[ bench "main" (nfIO main)
]
| Moredread/haskeleton-test | benchmark/Haskeleton/TestBench.hs | mit | 166 | 0 | 8 | 30 | 46 | 27 | 19 | 6 | 1 |
{-# htermination eltsFM_LE :: (Ord a, Ord k) => FiniteMap (Either a k) b -> (Either a k) -> [b] #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_eltsFM_LE_10.hs | mit | 117 | 0 | 3 | 23 | 5 | 3 | 2 | 1 | 0 |
{-# htermination minusFM :: FiniteMap Float b1 -> FiniteMap Float b2 -> FiniteMap Float b1 #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_minusFM_6.hs | mit | 112 | 0 | 3 | 18 | 5 | 3 | 2 | 1 | 0 |
module Battleship.Swallow.Test
(swallowSuite)
where
import Test.Tasty (testGroup, TestTree)
import Test.Tasty.HUnit
import Battleship.Swallow
swallowSuite :: TestTree
swallowSuite = testGroup "Swallow"
[testCase "swallow test" testSwallow]
testSwallow :: Assertion
testSwallow = "something" @=? swallow "some" "thing"
| sgrove/battlehaskell | test/Battleship/Swallow/Test.hs | mit | 328 | 0 | 7 | 43 | 78 | 45 | 33 | 10 | 1 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.AudioNode
(connect, connectParam, disconnect, getContext, getNumberOfInputs,
getNumberOfOutputs, setChannelCount, getChannelCount,
setChannelCountMode, getChannelCountMode, setChannelInterpretation,
getChannelInterpretation, AudioNode(..), gTypeAudioNode,
IsAudioNode, toAudioNode)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AudioNode.connect Mozilla AudioNode.connect documentation>
connect ::
(MonadDOM m, IsAudioNode self, IsAudioNode destination) =>
self -> destination -> Maybe Word -> Maybe Word -> m ()
connect self destination output input
= liftDOM
(void
((toAudioNode self) ^. jsf "connect"
[toJSVal destination, toJSVal output, toJSVal input]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AudioNode.connect Mozilla AudioNode.connect documentation>
connectParam ::
(MonadDOM m, IsAudioNode self) =>
self -> AudioParam -> Maybe Word -> m ()
connectParam self destination output
= liftDOM
(void
((toAudioNode self) ^. jsf "connect"
[toJSVal destination, toJSVal output]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AudioNode.disconnect Mozilla AudioNode.disconnect documentation>
disconnect ::
(MonadDOM m, IsAudioNode self) => self -> Maybe Word -> m ()
disconnect self output
= liftDOM
(void ((toAudioNode self) ^. jsf "disconnect" [toJSVal output]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AudioNode.context Mozilla AudioNode.context documentation>
getContext ::
(MonadDOM m, IsAudioNode self) => self -> m AudioContext
getContext self
= liftDOM
(((toAudioNode self) ^. js "context") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AudioNode.numberOfInputs Mozilla AudioNode.numberOfInputs documentation>
getNumberOfInputs ::
(MonadDOM m, IsAudioNode self) => self -> m Word
getNumberOfInputs self
= liftDOM
(round <$>
(((toAudioNode self) ^. js "numberOfInputs") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AudioNode.numberOfOutputs Mozilla AudioNode.numberOfOutputs documentation>
getNumberOfOutputs ::
(MonadDOM m, IsAudioNode self) => self -> m Word
getNumberOfOutputs self
= liftDOM
(round <$>
(((toAudioNode self) ^. js "numberOfOutputs") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AudioNode.channelCount Mozilla AudioNode.channelCount documentation>
setChannelCount ::
(MonadDOM m, IsAudioNode self) => self -> Word -> m ()
setChannelCount self val
= liftDOM ((toAudioNode self) ^. jss "channelCount" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AudioNode.channelCount Mozilla AudioNode.channelCount documentation>
getChannelCount :: (MonadDOM m, IsAudioNode self) => self -> m Word
getChannelCount self
= liftDOM
(round <$>
(((toAudioNode self) ^. js "channelCount") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AudioNode.channelCountMode Mozilla AudioNode.channelCountMode documentation>
setChannelCountMode ::
(MonadDOM m, IsAudioNode self, ToJSString val) =>
self -> val -> m ()
setChannelCountMode self val
= liftDOM
((toAudioNode self) ^. jss "channelCountMode" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AudioNode.channelCountMode Mozilla AudioNode.channelCountMode documentation>
getChannelCountMode ::
(MonadDOM m, IsAudioNode self, FromJSString result) =>
self -> m result
getChannelCountMode self
= liftDOM
(((toAudioNode self) ^. js "channelCountMode") >>=
fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AudioNode.channelInterpretation Mozilla AudioNode.channelInterpretation documentation>
setChannelInterpretation ::
(MonadDOM m, IsAudioNode self, ToJSString val) =>
self -> val -> m ()
setChannelInterpretation self val
= liftDOM
((toAudioNode self) ^. jss "channelInterpretation" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/AudioNode.channelInterpretation Mozilla AudioNode.channelInterpretation documentation>
getChannelInterpretation ::
(MonadDOM m, IsAudioNode self, FromJSString result) =>
self -> m result
getChannelInterpretation self
= liftDOM
(((toAudioNode self) ^. js "channelInterpretation") >>=
fromJSValUnchecked)
| ghcjs/jsaddle-dom | src/JSDOM/Generated/AudioNode.hs | mit | 5,634 | 0 | 13 | 1,063 | 1,249 | 690 | 559 | 95 | 1 |
{-|
Module: Y2015.D16
Description: Advent of Code Day 16 Solutions.
License: MIT
Maintainer: @tylerjl
Solutions to the day 16 set of problems for <adventofcode.com>.
-}
module Y2015.D16 (findAunt, findRealAunt) where
import Data.List (maximumBy)
import Data.Map.Strict (Map, differenceWith, differenceWithKey, fromList, size)
type Aunt = Map String Int
-- |Finds the real Aunt given altered targeting criteria
findRealAunt :: String -- ^ Raw input of list of Aunts
-> Int -- ^ Id of the gifting Aunt
findRealAunt = findGifter (size . differenceWithKey match gifter)
where match "cats" target candidate = candidate `gtNothing` target
match "trees" target candidate = candidate `gtNothing` target
match "pomeranians" target candidate = target `gtNothing` candidate
match "goldfish" target candidate = target `gtNothing` candidate
match _ target candidate = candidate `sameNothing` target
gtNothing a b | a > b = Nothing
| otherwise = Just a
-- |Given a list of Aunts, find the Id of the gifting Aunt.
findAunt :: String -- ^ Raw input of list of Aunts
-> Int -- ^ Id of the gifting Aunt
findAunt = findGifter (size . differenceWith sameNothing gifter)
sameNothing :: (Eq a) => a -> a -> Maybe a
sameNothing a b | a == b = Nothing
| otherwise = Just a
findGifter :: (Aunt -> Int) -> String -> Int
findGifter f aunts = snd . maximumBy (flip compare)
$ zip (map f (toAunts aunts)) [1..]
gifter :: Aunt
gifter = toAunt $ words ("Sue 0: children: 3 cats: 7 samoyeds: 2 "
++ "pomeranians: 3 akitas: 0 vizslas: 0 "
++ "goldfish: 5 trees: 3 cars: 2 perfumes: 1")
toAunts :: String -> [Aunt]
toAunts = map (toAunt . words) . lines . filter (/= ',')
toAunt :: [String] -> Aunt
toAunt (_:_:traits) = fromList $ toPair traits
where toPair (x:y:ys) = (init x, read y) : toPair ys
toPair _ = []
toAunt _ = fromList []
| tylerjl/adventofcode | src/Y2015/D16.hs | mit | 2,070 | 0 | 10 | 580 | 544 | 289 | 255 | 34 | 5 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-|
Module : Network.WebexTeams
Copyright : (c) Naoto Shimazaki 2017,2018
License : MIT (see the file LICENSE)
Maintainer : https://github.com/nshimaza
Stability : experimental
This module provides types and functions for accessing Cisco Webex Teams REST API.
The module is designed to improve type safety over the API. Each entity is separately typed.
JSON messages contained in REST responses are decoded into appropriate type of Haskell record.
JSON messages sent in REST requests are encoded only from correct type of record.
Some Webex Teams REST API return list of objects. Those APIs require HTTP Link Header based pagination.
Haskell functions for those APIs automatically request subsequent pages as needed.
= Examples
@
-- Sending a message to a room.
let auth = Authorization "your authorization token"
roomId = RoomId "Room ID your message to be sent"
messageText = MessageText "your message"
message = CreateMessage (Just roomId) Nothing Nothing (Just messageText) Nothing Nothing
createEntity auth def createMessage >>= print . getResponseBody
-- Obtaining detail of a user.
let personId = PersonId "your person ID"
getDetail auth def personId >>= print . getResponseBody
-- Obtaining membership of a room as stream of object representing each membership relation.
let filter = MembershipFilter yourRoomId Nothing Nothing
runConduit $ streamListWithFilter auth def filter .| takeC 200 .| mapM_C print
-- Create a room.
let createRoom = CreateRoom "Title of the new room" Nothing
createEntity auth def createRoom >>= print . getResponseBody
-- Delete a room.
deleteRoom auth def roomId >>= print . getResponseBody
@
= List and steaming
The 'WebexTeams' module doesn't provide streaming API for REST response returning list of entities.
It is because the author of the package wants to keep it streaming library agnostic. Instead, it provides
'ListReader' IO action to read list responses with automatic pagenation. Streaming APIs can be found in
separate packages like webex-teams-pipes or webex-teams-conduit.
= Support for Lens
This package provides many of records representing objects communicated via Webex Teams REST API.
Those records are designed to allow create lenses by Control.Lens.TH.makeFields.
Following example creates overloaded accessors for 'Person', 'Room' and 'Team'.
@
makeFields ''Person
makeFields ''Room
makeFields ''Team
@
You can access 'personId', 'roomId' and 'teamId' via overloaded accessor function 'id' like this.
@
let yourPersonId = yourPerson ^. id
yourRoomId = yourRoom ^. id
yourTeamId = yourTeam ^. id
@
This package does not provide pre-generated lenses for you because not everyone need it
but you can make it by yourself so easily as described.
-}
module Network.WebexTeams
(
-- * Types
-- ** Class and Type Families
WebexTeamsFilter
, WebexTeamsListItem
, ToResponse
-- ** Common Types
, Authorization (..)
, CiscoSparkRequest (..)
, WebexTeamsRequest (..)
, Timestamp (..)
, ErrorCode (..)
, ErrorTitle (..)
, Errors (..)
-- ** People related types
, Person (..)
, PersonId (..)
, Email (..)
, DisplayName (..)
, NickName (..)
, FirstName (..)
, LastName (..)
, AvatarUrl (..)
, Timezone (..)
, PersonStatus (..)
, PersonType (..)
, PersonList (..)
, PersonFilter (..)
, CreatePerson (..)
, UpdatePerson (..)
-- ** Room related types
, Room (..)
, RoomId (..)
, RoomTitle (..)
, RoomType (..)
, SipAddr (..)
, RoomList (..)
, RoomFilter (..)
, RoomFilterSortBy (..)
, CreateRoom (..)
, UpdateRoom (..)
-- ** Membership related types
, Membership (..)
, MembershipId (..)
, MembershipList (..)
, MembershipFilter (..)
, CreateMembership (..)
, UpdateMembership (..)
-- ** Message related types
, Message (..)
, MessageId (..)
, MessageText (..)
, MessageHtml (..)
, MessageMarkdown (..)
, FileUrl (..)
, MessageList (..)
, MessageFilter (..)
, MentionedPeople (..)
, CreateMessage (..)
-- ** Team related types
, TeamName (..)
, TeamId (..)
, Team (..)
, TeamList (..)
, CreateTeam (..)
, UpdateTeam (..)
-- ** Team Membership related types
, TeamMembership (..)
, TeamMembershipId (..)
, TeamMembershipList (..)
, TeamMembershipFilter (..)
, CreateTeamMembership (..)
, UpdateTeamMembership (..)
-- ** Organization related types
, Organization (..)
, OrganizationId (..)
, OrganizationDisplayName (..)
, OrganizationList (..)
-- ** License related types
, License (..)
, LicenseId (..)
, LicenseName (..)
, LicenseUnit (..)
, LicenseList (..)
, LicenseFilter (..)
-- ** Role related types
, Role (..)
, RoleId (..)
, RoleName (..)
, RoleList (..)
-- * Functions
-- ** Getting detail of an entity
, getDetail
, getDetailEither
-- ** Streaming response of List API with auto pagenation
, ListReader
, getListWithFilter
, getTeamList
, getOrganizationList
, getRoleList
, streamEntityWithFilter
, streamTeamList
, streamOrganizationList
, streamRoleList
-- ** Creating an entity
, createEntity
, createEntityEither
-- ** Updating an entity
, updateEntity
, updateEntityEither
-- ** Creating default filter spec from mandatory field
, defaultMessageFilter
, defaultTeamMembershipFilter
-- ** Deleting an entity
, deleteRoom
, deleteMembership
, deleteMessage
, deleteTeam
, deleteTeamMembership
) where
import Conduit (ConduitT, yieldMany)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.Aeson (FromJSON, ToJSON)
import Data.ByteString (ByteString)
import Data.ByteString.Char8 as C8 (unpack)
import Data.Default (Default (def))
import Data.IORef (IORef, newIORef, readIORef,
writeIORef)
import Data.Maybe (catMaybes, maybeToList)
import Data.Monoid ((<>))
import Data.Text (Text)
import Data.Text.Encoding (encodeUtf8)
import Network.HTTP.Simple
import Network.URI (URIAuth (..))
import Network.WebexTeams.Internal
import Network.WebexTeams.Types
-- | Authorization string against Webex Teams API to be contained in HTTP Authorization header of every request.
newtype Authorization = Authorization ByteString deriving (Eq, Show)
-- | Wrapping 'Request' in order to provide easy default value specifically for Webex Teams public API.
data WebexTeamsRequest = WebexTeamsRequest
{ webexTeamsRequestRequest :: Request -- ^ Holds pre-set 'Request' for REST API.
, webexTeamsRequestScheme :: String -- ^ Should be "https:" in production.
, webexTeamsRequestAuthority :: URIAuth -- ^ Authority part of request URI.
} deriving (Show)
-- | Type synonym for backward compatibility.
type CiscoSparkRequest = WebexTeamsRequest
-- | Common part of 'Request' against Webex Teams API.
webexTeamsBaseRequest :: Request
webexTeamsBaseRequest
= addRequestHeader "Content-Type" "application/json; charset=utf-8"
$ setRequestPort 443
$ setRequestHost "api.ciscospark.com"
$ setRequestSecure True
$ defaultRequest
-- | Default parameters for HTTP request to Webex Teams REST API.
instance Default WebexTeamsRequest where
def = WebexTeamsRequest webexTeamsBaseRequest "https:" $ URIAuth "" "api.ciscospark.com" ""
-- | Add given Authorization into request header.
addAuthorizationHeader :: Authorization -> Request -> Request
addAuthorizationHeader (Authorization auth) = addRequestHeader "Authorization" ("Bearer " <> auth)
-- | Building common part of 'Request' for List APIs.
makeCommonListReq
:: WebexTeamsRequest -- ^ Common request components
-> ByteString -- ^ API category part of REST URL path
-> WebexTeamsRequest
makeCommonListReq base@WebexTeamsRequest { webexTeamsRequestRequest = req } path
= base { webexTeamsRequestRequest = setRequestPath ("/v1/" <> path) $ setRequestMethod "GET" req }
{-|
Common worker function for List APIs.
It accesses List API with given 'Request', unwrap result into list of items, stream them to Conduit pipe
and finally it automatically accesses next page designated via HTTP Link header if available.
-}
streamList :: (MonadIO m, WebexTeamsListItem i) => Authorization -> WebexTeamsRequest -> ConduitT () i m ()
streamList auth (WebexTeamsRequest req scheme uriAuth) = do
res <- httpJSON $ addAuthorizationHeader auth req
yieldMany . unwrap $ getResponseBody res
streamListLoop auth res scheme uriAuth
-- | Processing pagination by HTTP Link header.
streamListLoop :: (MonadIO m, FromJSON a, WebexTeamsListItem i) => Authorization -> Response a -> String -> URIAuth -> ConduitT () i m ()
streamListLoop auth res scheme uriAuth
= case getNextUrl res >>= validateUrl scheme uriAuth >>= (\url -> parseRequest $ "GET " <> C8.unpack url) of
Nothing -> pure ()
Just nextReq -> do
nextRes <- httpJSON $ addAuthorizationHeader auth nextReq
yieldMany . unwrap $ getResponseBody nextRes
streamListLoop auth nextRes scheme uriAuth
-- | Get list of entities with query parameter and stream it into Conduit pipe. It automatically performs pagination.
{-# DEPRECATED streamEntityWithFilter "Use getListWithFilter or streamListWithFilter of webex-teams-conduit" #-}
streamEntityWithFilter :: (MonadIO m, WebexTeamsFilter filter, WebexTeamsListItem (ToResponse filter))
=> Authorization
-> WebexTeamsRequest
-> filter
-> ConduitT () (ToResponse filter) m ()
streamEntityWithFilter auth base param =
streamList auth $ setQeuryString $ makeCommonListReq base (apiPath param)
where
setQeuryString comm@WebexTeamsRequest { webexTeamsRequestRequest = req }
= comm { webexTeamsRequestRequest = setRequestQueryString (toFilterList param) req }
-- | List of 'Team' and stream it into Conduit pipe. It automatically performs pagination.
{-# DEPRECATED streamTeamList "Use getTeamList or streamTeamList of webex-teams-conduit" #-}
streamTeamList :: MonadIO m => Authorization -> WebexTeamsRequest -> ConduitT () Team m ()
streamTeamList auth base = streamList auth $ makeCommonListReq base teamsPath
-- | Filter list of 'Organization' and stream it into Conduit pipe. It automatically performs pagination.
{-# DEPRECATED streamOrganizationList "Use getOrganizationList or streamOrganizationList of webex-teams-conduit" #-}
streamOrganizationList :: MonadIO m => Authorization -> WebexTeamsRequest -> ConduitT () Organization m ()
streamOrganizationList auth base = streamList auth $ makeCommonListReq base organizationsPath
-- | List of 'Role' and stream it into Conduit pipe. It automatically performs pagination.
{-# DEPRECATED streamRoleList "Use getRoleList or streamRoleList of webex-teams-conduit" #-}
streamRoleList :: MonadIO m => Authorization -> WebexTeamsRequest -> ConduitT () Role m ()
streamRoleList auth base = streamList auth $ makeCommonListReq base rolesPath
{-|
'ListReader' is IO action returned by functions for list API ('getListWithFilter', 'getTeamList' etc).
It is containing URL inside to be accessed. When you call the IO action, it accesses to Webex Teams REST API,
parse next page URL if available, then return new IO action. The new IO action contains list of responded items and
new URL for next page so you can call the new IO action to get the next page.
Following example demonstrates how you can get all items into single list.
@
readAllList :: ListReader i -> IO [i]
readAllList reader = go []
where
go xs = reader >>= \chunk -> case chunk of
[] -> pure xs
ys -> go (xs <> ys)
@
Note that this example is only for explaining how 'ListReader' works. Practically you should not do the above
because it eagerly creates entire list. You should use streaming APIs instead. Streaming APIs are available via
webex-teams-conduit and webex-teams-pipes package.
-}
type ListReader a = IO [a]
{-|
Returns common worker function 'ListReader' for List APIs.
ListReader accesses List API with given 'Request' then return responded list of items.
ListReader also keeps next URL if response is pagenated and next page is available.
Next call of ListReader causes another List API access for the next page.
ListReader returns [] when there is no more page.
-}
getList :: (MonadIO m, WebexTeamsListItem i) => Authorization -> WebexTeamsRequest -> m (ListReader i)
getList auth wxReq = liftIO $ listReader <$> newIORef (Just wxReq)
where
listReader :: WebexTeamsListItem i => IORef (Maybe WebexTeamsRequest) -> ListReader i
listReader wxReqRef = do
maybeReq <- readIORef wxReqRef
case maybeReq of
Nothing -> pure []
Just (WebexTeamsRequest req scheme uriAuth) -> do
res <- httpJSON $ addAuthorizationHeader auth req
writeIORef wxReqRef $ do
maybeUrl <- getNextUrl res
maybeValidUrl <-validateUrl scheme uriAuth maybeUrl
maybeNextReq <- parseRequest $ "GET " <> C8.unpack maybeValidUrl
pure (WebexTeamsRequest maybeNextReq scheme uriAuth)
rr <- readIORef wxReqRef
pure . unwrap $ getResponseBody res
-- | Get list with query parameter.
getListWithFilter :: (MonadIO m, WebexTeamsFilter filter, WebexTeamsListItem (ToResponse filter))
=> Authorization
-> WebexTeamsRequest
-> filter
-> m (ListReader (ToResponse filter))
getListWithFilter auth base param =
getList auth $ setQeuryString $ makeCommonListReq base (apiPath param)
where
setQeuryString comm@WebexTeamsRequest { webexTeamsRequestRequest = req }
= comm { webexTeamsRequestRequest = setRequestQueryString (toFilterList param) req }
-- | Return 'ListReader' for 'Team'.
getTeamList :: MonadIO m => Authorization -> WebexTeamsRequest -> m (ListReader Team)
getTeamList auth base = getList auth $ makeCommonListReq base teamsPath
-- | Return 'ListReader' for 'Team'.
getOrganizationList :: MonadIO m => Authorization -> WebexTeamsRequest -> m (ListReader Organization)
getOrganizationList auth base = getList auth $ makeCommonListReq base organizationsPath
-- | Return 'ListReader' for 'Team'.
getRoleList :: MonadIO m => Authorization -> WebexTeamsRequest -> m (ListReader Role)
getRoleList auth base = getList auth $ makeCommonListReq base rolesPath
makeCommonDetailReq
:: WebexTeamsRequest -- ^ Common request components.
-> Authorization -- ^ Authorization string against Webex Teams API.
-> ByteString -- ^ API category part of REST URL path.
-> Text -- ^ Identifier string part of REST URL path.
-> Request
makeCommonDetailReq (WebexTeamsRequest base _ _) auth path idStr
= setRequestPath ("/v1/" <> path <> "/" <> encodeUtf8 idStr)
$ setRequestMethod "GET"
$ addAuthorizationHeader auth
$ base
{-|
Get details of a Webex Teams entity.
Obtaining detail of an entity identified by key. The key can be a value in one of
following types: 'PersonId', 'RoomId', 'MembershipId', 'MessageId', 'TeamId', 'TeamMembershipId',
'OrganizationId', 'LicenseId', 'RoleId'. API is automatically selected by type of the key.
A JSONException runtime exception will be thrown on an JSON parse errors.
-}
getDetail :: (MonadIO m, WebexTeamsDetail key)
=> Authorization -- ^ Authorization string against Webex Teams API.
-> WebexTeamsRequest -- ^ Predefined part of 'Request' commonly used for Webex Teams API.
-> key -- ^ One of PersonId, RoomId, MembershipId, MessageId, TeamId, TeamMembershipId,
-- OrganizationId, LicenseId and RoleId.
-> m (Response (ToResponse key))
getDetail auth base entityId = httpJSON $ makeCommonDetailReq base auth (apiPath entityId) (toIdStr entityId)
-- | Get details of a Webex Teams entity. A Left value will be returned on an JSON parse errors.
getDetailEither :: (MonadIO m, WebexTeamsDetail key)
=> Authorization
-> WebexTeamsRequest
-> key
-> m (Response (Either JSONException (ToResponse key)))
getDetailEither auth base entityId = httpJSONEither $ makeCommonDetailReq base auth (apiPath entityId) (toIdStr entityId)
makeCommonCreateReq :: ToJSON a => WebexTeamsRequest -> Authorization -> ByteString -> a -> Request
makeCommonCreateReq (WebexTeamsRequest base _ _) auth path body
= setRequestBodyJSON body
$ setRequestPath ("/v1/" <> path)
$ setRequestMethod "POST"
$ addAuthorizationHeader auth
$ base
{-|
Create a Webex Teams entity with given parameters.
Creating a new entity of Webex Teams such as space, team, membership or message.
REST API path is automatically selected by type of createParams.
A JSONException runtime exception will be thrown on an JSON parse errors.
-}
createEntity :: (MonadIO m, WebexTeamsCreate createParams)
=> Authorization -- ^ Authorization string against Webex Teams API.
-> WebexTeamsRequest -- ^ Predefined part of 'Request' commonly used for Webex Teams API.
-> createParams -- ^ One of 'CreatePerson', 'CreateRoom', 'CreateMembership', 'CreateMessage',
-- 'CreateTeam' and 'CreateTeamMembership'.
-> m (Response (ToResponse createParams))
createEntity auth base param = httpJSON $ makeCommonCreateReq base auth (apiPath param) param
-- | Create a Webex Teams entity with given parameters. A Left value will be returned on an JSON parse errors.
createEntityEither :: (MonadIO m, WebexTeamsCreate createParams)
=> Authorization
-> WebexTeamsRequest
-> createParams
-> m (Response (Either JSONException (ToResponse createParams)))
createEntityEither auth base param = httpJSONEither $ makeCommonCreateReq base auth (apiPath param) param
makeCommonUpdateReq :: ToJSON a => WebexTeamsRequest -> Authorization -> ByteString -> a -> Request
makeCommonUpdateReq (WebexTeamsRequest base _ _) auth path body
= setRequestBodyJSON body
$ setRequestPath ("/v1/" <> path)
$ setRequestMethod "PUT"
$ addAuthorizationHeader auth
$ base
{-|
Update a Webex Teams entity with given parameters.
Creating a new entity of Webex Teams such as space, team, or membership.
REST API path is automatically selected by type of updateParams.
A JSONException runtime exception will be thrown on an JSON parse errors.
-}
updateEntity :: (MonadIO m, WebexTeamsUpdate updateParams)
=> Authorization -- ^ Authorization string against Webex Teams API.
-> WebexTeamsRequest -- ^ Predefined part of 'Request' commonly used for Webex Teams API.
-> updateParams -- ^ One of 'UpdatePerson', 'UpdateRoom', 'UpdateMembership',
-- 'UpdateTeam' and 'UpdateTeamMembership'.
-> m (Response (ToResponse updateParams))
updateEntity auth base param = httpJSON $ makeCommonUpdateReq base auth (apiPath param) param
-- | Update a Webex Teams entity with given parameters. A Left value will be returned on an JSON parse errors.
updateEntityEither :: (MonadIO m, WebexTeamsUpdate updateParams)
=> Authorization
-> WebexTeamsRequest
-> updateParams
-> m (Response (Either JSONException (ToResponse updateParams)))
updateEntityEither auth base param = httpJSONEither $ makeCommonUpdateReq base auth (apiPath param) param
makeCommonDeleteReq
:: Authorization -- ^ Authorization string against Webex Teams API.
-> Request -- ^ Common request components.
-> ByteString -- ^ API category part of REST URL path.
-> Text -- ^ Identifier string part of REST URL path.
-> Request
makeCommonDeleteReq auth base path idStr
= setRequestPath ("/v1/" <> path <> "/" <> encodeUtf8 idStr)
$ setRequestMethod "DELETE"
$ addAuthorizationHeader auth
$ base
-- | Polymorphic version of delete. Intentionally not exposed to outside of the module.
deleteEntity :: (MonadIO m, WebexTeamsDetail key)
=> Authorization -- ^ Authorization string against Webex Teams API.
-> WebexTeamsRequest -- ^ Predefined part of 'Request' commonly used for Webex Teams API.
-> key -- ^ One of PersonId, RoomId, MembershipId, MessageId, TeamId, TeamMembershipId.
-> m (Response ())
deleteEntity auth (WebexTeamsRequest base _ _) entityId
= httpNoBody $ makeCommonDeleteReq auth base (apiPath entityId) (toIdStr entityId)
-- | Deletes a room, by ID.
deleteRoom :: MonadIO m
=> Authorization -- ^ Authorization string against Webex Teams API.
-> WebexTeamsRequest -- ^ Predefined part of 'Request' commonly used for Webex Teams API.
-> RoomId -- ^ Identifier of a space to be deleted.
-> m (Response ())
deleteRoom = deleteEntity
-- | Deletes a membership, by ID.
deleteMembership :: MonadIO m
=> Authorization -- ^ Authorization string against Webex Teams API.
-> WebexTeamsRequest -- ^ Predefined part of 'Request' commonly used for Webex Teams API.
-> MembershipId -- ^ Identifier of a space to be deleted.
-> m (Response ())
deleteMembership = deleteEntity
-- | Deletes a message, by ID.
deleteMessage :: MonadIO m
=> Authorization -- ^ Authorization string against Webex Teams API.
-> WebexTeamsRequest -- ^ Predefined part of 'Request' commonly used for Webex Teams API.
-> MessageId -- ^ Identifier of a space to be deleted.
-> m (Response ())
deleteMessage = deleteEntity
-- | Deletes a team, by ID.
deleteTeam :: MonadIO m
=> Authorization -- ^ Authorization string against Webex Teams API.
-> WebexTeamsRequest -- ^ Predefined part of 'Request' commonly used for Webex Teams API.
-> TeamId -- ^ Identifier of a space to be deleted.
-> m (Response ())
deleteTeam = deleteEntity
-- | Deletes a teamMembership, by ID.
deleteTeamMembership :: MonadIO m
=> Authorization -- ^ Authorization string against Webex Teams API.
-> WebexTeamsRequest -- ^ Predefined part of 'Request' commonly used for Webex Teams API.
-> TeamMembershipId -- ^ Identifier of a space to be deleted.
-> m (Response ())
deleteTeamMembership = deleteEntity
| nshimaza/cisco-spark-api | webex-teams-api/src/Network/WebexTeams.hs | mit | 23,047 | 0 | 20 | 5,336 | 3,369 | 1,862 | 1,507 | 316 | 2 |
module MarkovChain
( runMarkovChain
, step
, stepUntilAbsorption
, SystemState
, MarkovChain
) where
import Control.Monad.Reader
import Data.Matrix
import TransitionMatrix
import MC
type SystemState = Int
type MarkovChain = ReaderT TransitionMatrix MC
-- | Take one step: transition from a state to another
step :: SystemState -> MarkovChain (Maybe SystemState)
step state = do
matrix_ <- ask
let probs = getCol state $ toMatrix matrix_
lift $ sampleUniformV probs
actWhileM :: Monad m => a -> (a -> m (Maybe a)) -> m [a]
actWhileM initialState act = do
newState <- act initialState
case newState of
Nothing -> return []
Just s -> do
rest <- actWhileM s act
return (s:rest)
-- | Take steps until we hit absorption
stepUntilAbsorption :: SystemState -> MarkovChain [SystemState]
stepUntilAbsorption initialState = actWhileM initialState step
runMarkovChain :: ReaderT r m a -> r -> m a
runMarkovChain = runReaderT
| arekfu/markov-chains | src/MarkovChain.hs | mit | 970 | 0 | 14 | 202 | 289 | 146 | 143 | 29 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import MPD
import MPD.Core
import MPD.Commands.Parser
import Control.Applicative
import qualified Data.Attoparsec.ByteString as A
import qualified Data.ByteString as SB
import Criterion.Main
main = defaultMain [
bench "boolP" $
nf (A.parseOnly boolP) "0"
, bench "floatP" $
nf (A.parseOnly floatP) "1337.25"
, bench "intP" $
nf (A.parseOnly intP) "1337"
, bench "textP" $
nf (A.parseOnly textP) "foo"
, bench "dateP" $
nf (A.parseOnly dateP) "2014-05-16T17:33:26Z"
, bench "field/single" $
nf (A.parseOnly (fieldP "key" intP)) "key: 1337\n"
, bench "field/compound" $
nf (A.parseOnly ((,,) <$> fieldP "a" boolP
<*> fieldP "b" floatP
<*> fieldP "c" intP)) "a: 1\nb: 2.5\nc: 42\n"
, bench "field/complex" $ do
let p = (,,) <$> fieldP "a" boolP <*> fieldP "b" floatP <*> fieldP "c" intP
s = SB.concat (replicate 1000 "a: 1\nb: 2.5\nc: 42\n")
nf (A.parseOnly $ A.many1 p) s
]
| joachifm/nanompd | bench/ParseBench.hs | mit | 1,084 | 0 | 16 | 294 | 352 | 180 | 172 | 30 | 1 |
{-# htermination show :: (Show a, Show k) => (Either a k) -> String #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_show_11.hs | mit | 72 | 0 | 2 | 15 | 3 | 2 | 1 | 1 | 0 |
import System.IO
import System.Exit
import XMonad
import XMonad.Hooks.DynamicLog
import XMonad.Hooks.ManageDocks
import XMonad.Hooks.ManageHelpers
import XMonad.Hooks.SetWMName
import XMonad.Layout.Fullscreen
import XMonad.Layout.NoBorders
import XMonad.Layout.Spiral
import XMonad.Layout.Tabbed
import XMonad.Util.Run(spawnPipe)
import XMonad.Util.EZConfig(additionalKeys)
import qualified XMonad.StackSet as W
import qualified Data.Map as M
------------------------------------------------------------------------
-- Terminal
-- The preferred terminal program, which is used in a binding below and by
-- certain contrib modules.
--
myTerminal = "terminator"
------------------------------------------------------------------------
-- Workspaces
-- The default number of workspaces (virtual screens) and their names.
--
myWorkspaces = ["1:web","2:terms","3:code","4:misc","5:vm"] ++ map show [6..9]
------------------------------------------------------------------------
-- Window rules
-- Execute arbitrary actions and WindowSet manipulations when managing
-- a new window. You can use this to, for example, always float a
-- particular program, or have a client always appear on a particular
-- workspace.
--
-- To find the property name associated with a program, use
-- > xprop | grep WM_CLASS
-- and click on the client you're interested in.
--
-- To match on the WM_NAME, you can use 'title' in the same way that
-- 'className' and 'resource' are used below.
--
myManageHook = composeAll
[ resource =? "desktop_window" --> doIgnore
, className =? "Galculator" --> doFloat
, className =? "Steam" --> doFloat
, className =? "Gimp" --> doFloat
, resource =? "gpicview" --> doFloat
, className =? "MPlayer" --> doFloat
, className =? "Vlc" --> doFloat
, className =? "Skype" --> doFloat
--, className =? "Chromium" --> doShift "1:web"
, className =? "Thunderbird" --> doShift "1:web"
--, className =? "Subl3" --> doShift "3:code"
, className =? "VirtualBox" --> doShift "5:vm"
, isFullscreen --> (doF W.focusDown <+> doFullFloat)]
------------------------------------------------------------------------
-- Layouts
-- You can specify and transform your layouts by modifying these values.
-- If you change layout bindings be sure to use 'mod-shift-space' after
-- restarting (with 'mod-q') to reset your layout state to the new
-- defaults, as xmonad preserves your old layout settings by default.
--
-- The available layouts. Note that each layout is separated by |||,
-- which denotes layout choice.
--
myLayout = avoidStruts (
Tall 1 (3/100) (1/2) |||
Mirror (Tall 1 (3/100) (1/2)) |||
tabbed shrinkText tabConfig |||
Full |||
spiral (6/7)) |||
noBorders (fullscreenFull Full)
------------------------------------------------------------------------
-- Colors and borders
-- Currently based on the ir_black theme.
--
myNormalBorderColor = "#222222"
myFocusedBorderColor = "#EE9A00"
-- Colors for text and backgrounds of each tab when in "Tabbed" layout.
tabConfig = defaultTheme {
activeBorderColor = "#7C7C7C",
activeTextColor = "#EE9A00",
activeColor = "#222222",
inactiveBorderColor = "#7C7C7C",
inactiveTextColor = "#bbbbbb",
inactiveColor = "#222222"
}
-- Color of current window title in xmobar.
xmobarTitleColor = "#FFB6B0"
-- Color of current workspace in xmobar.
xmobarCurrentWorkspaceColor = "#EE9A00"
-- Width of the window border in pixels.
myBorderWidth = 1
------------------------------------------------------------------------
-- Key bindings
--
-- modMask lets you specify which modkey you want to use. The default
-- is mod1Mask ("left alt"). You may also consider using mod3Mask
-- ("right alt"), which does not conflict with emacs keybindings. The
-- "windows key" is usually mod4Mask.
--
myModMask = mod4Mask
myKeys conf@(XConfig {XMonad.modMask = modMask}) = M.fromList $
----------------------------------------------------------------------
-- Custom key bindings
--
-- Start a terminal. Terminal to start is specified by myTerminal variable.
[ ((modMask .|. shiftMask, xK_Return),
spawn $ XMonad.terminal conf)
-- Lock the screen using xscreensaver.
, ((modMask .|. controlMask, xK_l),
spawn "xscreensaver-command -lock")
-- Launch dmenu via yeganesh.
-- Use this to launch programs without a key binding.
, ((modMask, xK_r),
spawn "exe=`dmenu_path_c | yeganesh` && eval \"exec $exe\"")
-- Take a screenshot in select mode.
-- After pressing this key binding, click a window, or draw a rectangle with
-- the mouse.
, ((modMask, xK_Print),
spawn "~/.xmonad/bin/select-screenshot")
-- Take full screenshot in multi-head mode.
-- That is, take a screenshot of everything you see.
, ((modMask .|. shiftMask, xK_Print),
spawn "~/.xmonad/bin/screenshot")
-- Mute volume.
, ((modMask .|. controlMask, xK_m),
spawn "amixer -q set Master toggle")
-- Decrease volume.
, ((modMask .|. controlMask, xK_j),
spawn "amixer -q set Master 10%-")
-- Increase volume.
, ((modMask .|. controlMask, xK_k),
spawn "amixer -q set Master 10%+")
--------------------------------------------------------------------
-- "Standard" xmonad key bindings
--
-- Close focused window.
, ((modMask, xK_q),
kill)
-- Cycle through the available layout algorithms.
, ((modMask, xK_space),
sendMessage NextLayout)
-- Reset the layouts on the current workspace to default.
, ((modMask .|. shiftMask, xK_space),
setLayout $ XMonad.layoutHook conf)
-- Resize viewed windows to the correct size.
, ((modMask, xK_n),
refresh)
-- Move focus to the next window.
, ((modMask, xK_Tab),
windows W.focusDown)
-- Move focus to the next window.
, ((modMask, xK_Left),
windows W.focusUp)
-- Move focus to the previous window.
, ((modMask, xK_Right),
windows W.focusDown)
-- Move focus to the next window.
, ((modMask, xK_j),
windows W.focusDown)
-- Move focus to the previous window.
, ((modMask, xK_k),
windows W.focusUp)
-- Move focus to the master window.
, ((modMask, xK_m),
windows W.focusMaster )
-- Swap the focused window and the master window.
, ((modMask, xK_Return),
windows W.swapMaster)
-- Swap the focused window with the next window.
, ((modMask .|. shiftMask, xK_Left),
windows W.swapUp )
-- Swap the focused window with the previous window.
, ((modMask .|. shiftMask, xK_Right),
windows W.swapDown )
-- Swap the focused window with the next window.
, ((modMask .|. shiftMask, xK_j),
windows W.swapDown )
-- Swap the focused window with the previous window.
, ((modMask .|. shiftMask, xK_k),
windows W.swapUp )
-- Shrink the master area.
, ((modMask, xK_h),
sendMessage Shrink)
-- Expand the master area.
, ((modMask, xK_l),
sendMessage Expand)
-- Push window back into tiling.
, ((modMask, xK_t),
withFocused $ windows . W.sink)
-- Increment the number of windows in the master area.
, ((modMask, xK_comma),
sendMessage (IncMasterN 1))
-- Decrement the number of windows in the master area.
, ((modMask, xK_period),
sendMessage (IncMasterN (-1)))
-- Toggle the status bar gap.
-- TODO: update this binding with avoidStruts, ((modMask, xK_b),
-- Quit xmonad.
, ((modMask .|. shiftMask, xK_q),
io (exitWith ExitSuccess))
-- Restart xmonad.
, ((modMask .|. shiftMask, xK_r),
restart "xmonad" True)
]
++
-- mod-[1..9], Switch to workspace N
-- mod-shift-[1..9], Move client to workspace N
[((m .|. modMask, k), windows $ f i)
| (i, k) <- zip (XMonad.workspaces conf) [xK_1 .. xK_9]
, (f, m) <- [(W.greedyView, 0), (W.shift, shiftMask)]]
++
-- mod-{w,e,r}, Switch to physical/Xinerama screens 1, 2, or 3
-- mod-shift-{w,e,r}, Move client to screen 1, 2, or 3
[((m .|. modMask, key), screenWorkspace sc >>= flip whenJust (windows . f))
| (key, sc) <- zip [xK_w, xK_e] [0..]
, (f, m) <- [(W.view, 0), (W.shift, shiftMask)]]
------------------------------------------------------------------------
-- Mouse bindings
--
-- Focus rules
-- True if your focus should follow your mouse cursor.
myFocusFollowsMouse :: Bool
myFocusFollowsMouse = False
myMouseBindings (XConfig {XMonad.modMask = modMask}) = M.fromList $
[
-- mod-button1, Set the window to floating mode and move by dragging
((modMask, button1),
(\w -> focus w >> mouseMoveWindow w))
-- mod-button2, Raise the window to the top of the stack
, ((modMask, button2),
(\w -> focus w >> windows W.swapMaster))
-- mod-button3, Set the window to floating mode and resize by dragging
, ((modMask, button3),
(\w -> focus w >> mouseResizeWindow w))
-- you may also bind events to the mouse scroll wheel (button4 and button5)
]
------------------------------------------------------------------------
-- Status bars and logging
-- Perform an arbitrary action on each internal state change or X event.
-- See the 'DynamicLog' extension for examples.
--
-- To emulate dwm's status bar
--
-- > logHook = dynamicLogDzen
--
------------------------------------------------------------------------
-- Startup hook
-- Perform an arbitrary action each time xmonad starts or is restarted
-- with mod-q. Used by, e.g., XMonad.Layout.PerWorkspace to initialize
-- per-workspace layout choices.
--
-- By default, do nothing.
myStartupHook = return ()
------------------------------------------------------------------------
-- Run xmonad with all the defaults we set up.
--
main = do
xmproc <- spawnPipe "/usr/bin/xmobar ~/.xmonad/xmobarrc"
xmonad $ defaults {
logHook = dynamicLogWithPP $ xmobarPP {
ppOutput = hPutStrLn xmproc
, ppTitle = xmobarColor xmobarTitleColor "" . shorten 100
, ppCurrent = xmobarColor xmobarCurrentWorkspaceColor ""
, ppSep = " "}
, manageHook = manageDocks <+> myManageHook
, startupHook = setWMName "LG3D"
}
------------------------------------------------------------------------
-- Combine it all together
-- A structure containing your configuration settings, overriding
-- fields in the default config. Any you don't override, will
-- use the defaults defined in xmonad/XMonad/Config.hs
--
-- No need to modify this.
--
defaults = defaultConfig {
-- simple stuff
terminal = myTerminal,
focusFollowsMouse = myFocusFollowsMouse,
borderWidth = myBorderWidth,
modMask = myModMask,
workspaces = myWorkspaces,
normalBorderColor = myNormalBorderColor,
focusedBorderColor = myFocusedBorderColor,
-- key bindings
keys = myKeys,
mouseBindings = myMouseBindings,
-- hooks, layouts
layoutHook = smartBorders $ myLayout,
manageHook = myManageHook,
startupHook = myStartupHook
}
| black-puppydog/dotfiles_crvs | xmonad.symlink/xmonad.darrhya.hs | mit | 11,104 | 0 | 16 | 2,300 | 1,813 | 1,113 | 700 | 152 | 1 |
{-# LANGUAGE StandaloneDeriving, ViewPatterns #-}
-----------------------------------------------------------------------------
-- |
-- Module : Mezzo.Model.Types
-- Description : Mezzo music types
-- Copyright : (c) Dima Szamozvancev
-- License : MIT
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Types modeling basic musical constructs at the type level.
--
-----------------------------------------------------------------------------
module Mezzo.Model.Types
(
-- * Note properties
PitchClass (..)
, Accidental (..)
, OctaveNum (..)
, Duration (..)
-- ** Singleton types for note properties
, PC (..)
, Acc (..)
, Oct (..)
, Dur (..)
-- * Pitches
, PitchType (..)
, Pit (..)
, type (=?=)
, type (<<=?)
, type (<<?)
-- * Harmonic types
, Mode (..)
, ScaleDegree (..)
, DegreeType (..)
, KeyType (..)
, RootType (..)
, Mod (..)
, ScaDeg (..)
, KeyS (..)
, Deg (..)
, Root (..)
, RootToPitch
, PitchToNat
, Sharpen
, Flatten
, Dot
, HalfOf
, FromRoot
, FromSilence
, FromTriplet
-- * Specialised musical vector types
, Voice
, Partiture
-- * Intervals
, IntervalSize (..)
, IntervalClass (..)
, IntervalType (..)
, MakeInterval
-- ** Singleton types for interval properties
, IC (..)
, IS (..)
, Intv (..)
-- * Operations
, OctPred
, OctSucc
, HalfStepsUpBy
, HalfStepsDownBy
, RaiseBy
, LowerBy
, RaiseAllBy
, LowerAllBy
, RaiseAllBy'
, LowerAllBy'
, RaiseByOct
, LowerByOct
, RaiseAllByOct
, TransposeUpBy
, TransposeDownBy
) where
import GHC.TypeLits
import Data.Proxy
import Mezzo.Model.Prim
import Mezzo.Model.Reify
infixl 3 <<=?
infixl 3 <<?
-------------------------------------------------------------------------------
-- Note properties
-- The "minimum complete definition" for musical notes and rests.
-------------------------------------------------------------------------------
-- | The diatonic pitch class of the note.
data PitchClass = C | D | E | F | G | A | B
-- | The accidental applied to a note.
data Accidental = Natural | Flat | Sharp
-- | The octave where the note resides (middle C is Oct4).
data OctaveNum =
Oct_1 | Oct0 | Oct1 | Oct2 | Oct3 | Oct4 | Oct5 | Oct6 | Oct7 | Oct8
-- | The duration of the note (a whole note has duration 32).
type Duration = Nat
---- Singleton types for note properties
-- | The singleton type for 'PitchClass'.
data PC (pc :: PitchClass) where
PC :: Primitive pc => PC pc
-- | The singleton type for 'Accidental'.
data Acc (acc :: Accidental) where
Acc :: Primitive acc => Acc acc
-- | The singleton type for 'Octave'.
data Oct (oct :: OctaveNum) where
Oct :: Primitive oct => Oct oct
-- | The singleton type for 'Duration'.
data Dur (dur :: Duration) where
Dur :: Primitive dur => Dur dur
-------------------------------------------------------------------------------
-- Pitches
-- Encapsulates the pitch class, accidental and octave of a note.
-------------------------------------------------------------------------------
-- | The type of pitches.
data PitchType where
-- | A pitch made up of a pitch class, an accidental and an octave.
Pitch :: PitchClass -> Accidental -> OctaveNum -> PitchType
-- | Silence, the pitch of rests.
Silence :: PitchType
-- | The singleton type for pitches.
data Pit (p :: PitchType) where
Pit :: Primitive p => Pit p
-------------------------------------------------------------------------------
-- Harmonic types
-------------------------------------------------------------------------------
-- | The mode of a key: major or minor.
data Mode = MajorMode | MinorMode
-- | The seven scale degrees.
data ScaleDegree = I | II | III | IV | V | VI | VII
data DegreeType = Degree ScaleDegree Accidental OctaveNum
-- | The of a scale, chord or piece.
data KeyType = Key PitchClass Accidental Mode
-- | The root of a chord.
data RootType where
-- | A pitch constructs a diatonic root.
PitchRoot :: PitchType -> RootType
-- | A key and a scale degree constructs a scalar root.
DegreeRoot :: KeyType -> DegreeType -> RootType
-- | The singleton type for 'Mode'.
data Mod (m :: Mode) = Mod
-- | The singleton type for 'ScaleDegree'
data ScaDeg (sd :: ScaleDegree) = ScaDeg
-- | The singleton type for 'KeyType'.
data KeyS (k :: KeyType) = KeyS
data Deg (d :: DegreeType) = Deg
-- | The singleton type for 'Root'.
data Root (r :: RootType) where
Root :: Primitive r => Root r
-- | Convert a root to a pitch.
--
-- Note: the default octave for scalar roots is 'Oct2'.
type family RootToPitch (dr :: RootType) :: PitchType where
RootToPitch (PitchRoot p) = p
RootToPitch (DegreeRoot (Key pc acc m) (Degree sd dacc oct)) =
HalfStepsUpBy (Pitch pc acc oct) (DegreeOffset m sd dacc)
-- | Calculate the semitone offset of a scale degree in a given mode.
type family DegreeOffset (m :: Mode) (d :: ScaleDegree) (a :: Accidental) where
DegreeOffset MajorMode I Natural = 0
DegreeOffset MajorMode II Natural = 2
DegreeOffset MajorMode III Natural = 4
DegreeOffset MajorMode IV Natural = 5
DegreeOffset MajorMode V Natural = 7
DegreeOffset MajorMode VI Natural = 9
DegreeOffset MajorMode VII Natural = 11
DegreeOffset MinorMode I Natural = 0
DegreeOffset MinorMode II Natural = 2
DegreeOffset MinorMode III Natural = 3
DegreeOffset MinorMode IV Natural = 5
DegreeOffset MinorMode V Natural = 7
DegreeOffset MinorMode VI Natural = 8
DegreeOffset MinorMode VII Natural = 10
DegreeOffset m sd Flat = (DegreeOffset m sd Natural) - 1
DegreeOffset m sd Sharp = (DegreeOffset m sd Natural) + 1
-- | Sharpen a root.
type family Sharpen (r :: RootType) :: RootType where
Sharpen r = PitchRoot (HalfStepUp (RootToPitch r))
-- | Flatten a root.
type family Flatten (r :: RootType) :: RootType where
Flatten r = PitchRoot (HalfStepDown (RootToPitch r))
-- | Halve a type-level natural.
type family HalfOf (n :: Nat) :: Nat where
HalfOf 0 = 0
HalfOf 1 = 0
HalfOf 8 = 4
HalfOf 32 = 16
HalfOf n = 1 + (HalfOf (n - 2))
-- | Form a dotted duration.
type family Dot (d :: Duration) :: Duration where
Dot 1 = TypeError (Text "Can't have dotted thirty-seconds.")
Dot n = n + HalfOf n
-- | Create a new partiture with one voice of the given pitch.
type family FromRoot (r :: RootType) (d :: Nat) :: Partiture 1 d where
FromRoot r d = ((RootToPitch r) +*+ d) :-- None
-- | Create a new partiture with one voice of silence.
type family FromSilence (d :: Nat) :: Partiture 1 d where
FromSilence d = (Silence +*+ d) :-- None
-- | Create a new partiture with a triplet of three notes.
type family FromTriplet (d :: Nat) (r1 :: RootType) (r2 :: RootType) (r3 :: RootType)
:: Partiture 1 (d + HalfOf d + HalfOf d) where
FromTriplet d r1 r2 r3 = FromRoot r1 d +|+ FromRoot r2 (HalfOf d) +|+ FromRoot r3 (HalfOf d)
-------------------------------------------------------------------------------
-- Type specialisations
-------------------------------------------------------------------------------
-- | A 'Voice' is made up of a sequence of pitch repetitions.
type Voice l = OptVector PitchType l
-- | A 'Partiture' is made up of a fixed number of voices.
type Partiture n l = Matrix PitchType n l
-------------------------------------------------------------------------------
-- Intervals
-------------------------------------------------------------------------------
-- | The size of the interval.
data IntervalSize =
Unison | Second | Third | Fourth | Fifth | Sixth | Seventh | Octave
-- | The class of the interval.
data IntervalClass = Maj | Perf | Min | Aug | Dim
-- | The type of intervals.
data IntervalType where
-- | An interval smaller than 13 semitones, where musical rules
-- can still be enforced.
Interval :: IntervalClass -> IntervalSize -> IntervalType
-- | An interval larger than 13 semitones, which is large enough
-- so that dissonance effects are not significant.
Compound :: IntervalType
-- | The singleton type for 'IntervalSize'.
data IS (is :: IntervalSize) = IS
-- | The singleton type for 'IntervalClass'.
data IC (ic :: IntervalClass) = IC
-- | The singleton type for 'IntervalType'.
data Intv (i :: IntervalType) = Intv
-------------------------------------------------------------------------------
-- Interval construction
-------------------------------------------------------------------------------
-- | Make an interval from two arbitrary pitches.
type family MakeInterval (p1 :: PitchType) (p2 :: PitchType) :: IntervalType where
MakeInterval Silence Silence = TypeError (Text "Can't make intervals from rests.")
MakeInterval Silence p2 = TypeError (Text "Can't make intervals from rests.")
MakeInterval p1 Silence = TypeError (Text "Can't make intervals from rests.")
MakeInterval p1 p2 =
If (p1 <<=? p2)
(MakeIntervalOrd p1 p2)
(MakeIntervalOrd p2 p1)
-- | Make an interval from two ordered pitches.
type family MakeIntervalOrd (p1 :: PitchType) (p2 :: PitchType) :: IntervalType where
-- Handling base cases.
MakeIntervalOrd p p = Interval Perf Unison
---- Base cases from C.
MakeIntervalOrd (Pitch C Flat o) (Pitch C Natural o) = Interval Aug Unison
MakeIntervalOrd (Pitch C Natural o) (Pitch C Sharp o) = Interval Aug Unison
MakeIntervalOrd (Pitch C Natural o) (Pitch D Flat o) = Interval Min Second
MakeIntervalOrd (Pitch C acc o) (Pitch D acc o) = Interval Maj Second
MakeIntervalOrd (Pitch C acc o) (Pitch E acc o) = Interval Maj Third
MakeIntervalOrd (Pitch C acc o) (Pitch F acc o) = Interval Perf Fourth
MakeIntervalOrd (Pitch C acc o) (Pitch G acc o) = Interval Perf Fifth
MakeIntervalOrd (Pitch C acc o) (Pitch A acc o) = Interval Maj Sixth
MakeIntervalOrd (Pitch C acc o) (Pitch B acc o) = Interval Maj Seventh
---- Base cases from F.
MakeIntervalOrd (Pitch F Flat o) (Pitch F Natural o) = Interval Aug Unison
MakeIntervalOrd (Pitch F Natural o) (Pitch F Sharp o) = Interval Aug Unison
MakeIntervalOrd (Pitch F Natural o) (Pitch G Flat o) = Interval Min Second
MakeIntervalOrd (Pitch F acc o) (Pitch G acc o) = Interval Maj Second
MakeIntervalOrd (Pitch F acc o) (Pitch A acc o) = Interval Maj Third
MakeIntervalOrd (Pitch F acc o) (Pitch B acc o) = Interval Aug Fourth
MakeIntervalOrd (Pitch F acc o1) (Pitch C acc o2) =
IntervalOrCompound o1 o2 (Interval Perf Fifth)
MakeIntervalOrd (Pitch F acc o1) (Pitch D acc o2) =
IntervalOrCompound o1 o2 (Interval Maj Sixth)
MakeIntervalOrd (Pitch F acc o1) (Pitch E acc o2) =
IntervalOrCompound o1 o2 (Interval Maj Seventh)
---- Base cases from A.
MakeIntervalOrd (Pitch A Flat o) (Pitch A Natural o) = Interval Aug Unison
MakeIntervalOrd (Pitch A Natural o) (Pitch A Sharp o) = Interval Aug Unison
MakeIntervalOrd (Pitch A Natural o) (Pitch B Flat o) = Interval Min Second
MakeIntervalOrd (Pitch A acc o) (Pitch B acc o) = Interval Maj Second
MakeIntervalOrd (Pitch A acc o1) (Pitch C acc o2) =
IntervalOrCompound o1 o2 (Interval Min Third)
MakeIntervalOrd (Pitch A acc o1) (Pitch D acc o2) =
IntervalOrCompound o1 o2 (Interval Perf Fourth)
MakeIntervalOrd (Pitch A acc o1) (Pitch E acc o2) =
IntervalOrCompound o1 o2 (Interval Perf Fifth)
MakeIntervalOrd (Pitch A acc o1) (Pitch F acc o2) =
IntervalOrCompound o1 o2 (Interval Min Sixth)
MakeIntervalOrd (Pitch A acc o1) (Pitch G acc o2) =
IntervalOrCompound o1 o2 (Interval Min Seventh)
-- Handling perfect and augmented octaves.
MakeIntervalOrd (Pitch C acc o1) (Pitch C acc o2) =
IntervalOrCompound o1 o2 (Interval Perf Octave)
MakeIntervalOrd (Pitch C Natural o1) (Pitch C Sharp o2) =
IntervalOrCompound o1 o2 (Interval Aug Octave)
MakeIntervalOrd (Pitch C Flat o1) (Pitch C Natural o2) =
IntervalOrCompound o1 o2 (Interval Aug Octave)
MakeIntervalOrd (Pitch F acc o1) (Pitch F acc o2) =
IntervalOrCompound o1 o2 (Interval Perf Octave)
MakeIntervalOrd (Pitch F Natural o1) (Pitch F Sharp o2) =
IntervalOrCompound o1 o2 (Interval Aug Octave)
MakeIntervalOrd (Pitch F Flat o1) (Pitch F Natural o2) =
IntervalOrCompound o1 o2 (Interval Aug Octave)
MakeIntervalOrd (Pitch A acc o1) (Pitch A acc o2) =
IntervalOrCompound o1 o2 (Interval Perf Octave)
MakeIntervalOrd (Pitch A Natural o1) (Pitch A Sharp o2) =
IntervalOrCompound o1 o2 (Interval Aug Octave)
MakeIntervalOrd (Pitch A Flat o1) (Pitch A Natural o2) =
IntervalOrCompound o1 o2 (Interval Aug Octave)
-- Handling accidental first pitch.
MakeIntervalOrd (Pitch C Flat o) (Pitch pc2 acc o) =
Expand (MakeIntervalOrd (Pitch C Natural o) (Pitch pc2 acc o))
MakeIntervalOrd (Pitch C Sharp o) (Pitch pc2 acc o) =
Shrink (MakeIntervalOrd (Pitch C Natural o) (Pitch pc2 acc o))
MakeIntervalOrd (Pitch F Flat o) (Pitch E Sharp o) = Interval Min Second
MakeIntervalOrd (Pitch F Flat o) (Pitch E Natural o) = Interval Dim Second
MakeIntervalOrd (Pitch E Natural o) (Pitch F Flat o) = Interval Dim Second
MakeIntervalOrd (Pitch F Flat o) (Pitch pc2 acc o) =
Expand (MakeIntervalOrd (Pitch F Natural o) (Pitch pc2 acc o))
MakeIntervalOrd (Pitch F Sharp o) (Pitch pc2 acc o) =
Shrink (MakeIntervalOrd (Pitch F Natural o) (Pitch pc2 acc o))
MakeIntervalOrd (Pitch A Flat o) (Pitch pc2 acc o) =
Expand (MakeIntervalOrd (Pitch A Natural o) (Pitch pc2 acc o))
MakeIntervalOrd (Pitch A Sharp o) (Pitch pc2 acc o) =
Shrink (MakeIntervalOrd (Pitch A Natural o) (Pitch pc2 acc o))
-- Handling accidental second pitch.
MakeIntervalOrd (Pitch C Natural o) (Pitch pc2 Sharp o) =
Expand (MakeIntervalOrd (Pitch C Natural o) (Pitch pc2 Natural o))
MakeIntervalOrd (Pitch C Natural o) (Pitch pc2 Flat o) =
Shrink (MakeIntervalOrd (Pitch C Natural o) (Pitch pc2 Natural o))
MakeIntervalOrd (Pitch F Natural o) (Pitch pc2 Sharp o) =
Expand (MakeIntervalOrd (Pitch F Natural o) (Pitch pc2 Natural o))
MakeIntervalOrd (Pitch F Natural o) (Pitch pc2 Flat o) =
Shrink (MakeIntervalOrd (Pitch F Natural o) (Pitch pc2 Natural o))
MakeIntervalOrd (Pitch A Natural o) (Pitch pc2 Sharp o) =
Expand (MakeIntervalOrd (Pitch A Natural o) (Pitch pc2 Natural o))
MakeIntervalOrd (Pitch A Natural o) (Pitch pc2 Flat o) =
Shrink (MakeIntervalOrd (Pitch A Natural o) (Pitch pc2 Natural o))
-- Handling the general case.
MakeIntervalOrd (Pitch pc1 acc1 o) (Pitch pc2 acc2 o) =
MakeIntervalOrd (HalfStepDown (Pitch pc1 acc1 o)) (HalfStepDown (Pitch pc2 acc2 o))
MakeIntervalOrd (Pitch pc1 acc1 o1) (Pitch pc2 acc2 o2) =
If (NextOct o1 o2)
(MakeIntervalOrd (HalfStepDown (Pitch pc1 acc1 o1)) (HalfStepDown (Pitch pc2 acc2 o2)))
Compound
-- Handling erroneous construction (shouldn't happen).
MakeIntervalOrd _ _ = TypeError (Text "Invalid interval.")
-- | Shrink an interval.
type family Shrink (i :: IntervalType) :: IntervalType where
Shrink (Interval Perf Unison) = TypeError (Text "Can't diminish unisons.1")
Shrink (Interval Perf is) = Interval Dim is
Shrink (Interval Min is) = Interval Dim is
Shrink (Interval Maj is) = Interval Min is
Shrink (Interval Aug Unison) = Interval Perf Unison
Shrink (Interval Aug Fourth) = Interval Perf Fourth
Shrink (Interval Aug Fifth) = Interval Perf Fifth
Shrink (Interval Aug Octave) = Interval Perf Octave
Shrink (Interval Aug is) = Interval Maj is
Shrink (Interval Dim Unison) = TypeError (Text "Can't diminish unisons.2")
Shrink (Interval Dim Second) = TypeError (Text "Can't diminish unisons.3")
Shrink (Interval Dim Fifth) = Interval Perf Fourth
Shrink (Interval Dim Sixth) = Interval Dim Fifth
Shrink (Interval Dim is) = Interval Min (IntSizePred is)
Shrink Compound = Compound
-- | Expand an interval.
type family Expand (i :: IntervalType) :: IntervalType where
Expand (Interval Perf Octave) = Interval Aug Octave
Expand (Interval Perf is) = Interval Aug is
Expand (Interval Maj is) = Interval Aug is
Expand (Interval Min is) = Interval Maj is
Expand (Interval Dim Unison) = TypeError (Text "Can't diminish unisons.4")
Expand (Interval Dim Fourth) = Interval Perf Fourth
Expand (Interval Dim Fifth) = Interval Perf Fifth
Expand (Interval Dim Octave) = Interval Perf Octave
Expand (Interval Dim is) = Interval Min is
Expand (Interval Aug Third) = Interval Aug Fourth
Expand (Interval Aug Fourth) = Interval Perf Fifth
Expand (Interval Aug Seventh) = Interval Aug Octave
Expand (Interval Aug Octave) = Compound
Expand (Interval Aug is) = Interval Maj (IntSizeSucc is)
Expand Compound = Compound
-------------------------------------------------------------------------------
-- Enumerations and orderings
-- Implementation of enumerators and ordering relations for applicable types.
-------------------------------------------------------------------------------
-- | Convert a pitch to a natural number (equal to its MIDI code).
type family PitchToNat (p :: PitchType) :: Nat where
PitchToNat Silence = TypeError (Text "Can't convert a rest to a number.")
PitchToNat (Pitch C Natural Oct_1) = 0
PitchToNat (Pitch C Sharp Oct_1) = 1
PitchToNat (Pitch D Flat Oct_1) = 1
PitchToNat (Pitch C Natural Oct1) = 24
PitchToNat (Pitch C Natural Oct2) = 36
PitchToNat (Pitch C Natural Oct3) = 48
PitchToNat (Pitch C Natural Oct4) = 60
PitchToNat (Pitch C Natural Oct5) = 72
PitchToNat (Pitch C Natural Oct6) = 84
PitchToNat p = 1 + PitchToNat (HalfStepDown p)
-- | Convert a natural number to a suitable pitch.
-- Not a functional relation, so usage is not recommended.
type family NatToPitch (n :: Nat) where
NatToPitch 0 = Pitch C Natural Oct_1
NatToPitch 1 = Pitch C Sharp Oct_1
NatToPitch n = HalfStepUp (NatToPitch (n - 1))
-- | Greater than or equal to for pitches.
type family (p1 :: PitchType) <<=? (p2 :: PitchType) :: Bool where
p <<=? p = True
(Pitch pc1 acc oct) <<=? (Pitch pc2 acc oct) = ClassToNat pc1 <=? ClassToNat pc2
(Pitch pc acc oct) <<=? (Pitch pc Sharp oct) = True
(Pitch pc Sharp oct) <<=? (Pitch pc acc oct) = False
(Pitch pc Flat oct) <<=? (Pitch pc acc oct) = True
(Pitch pc acc oct) <<=? (Pitch pc Flat oct) = False
(Pitch E Sharp oct) <<=? (Pitch F Flat oct) = False
(Pitch F Flat oct) <<=? (Pitch E Sharp oct) = True
(Pitch B Sharp oct) <<=? (Pitch C Flat oct') =
If (NextOct oct oct') False ((Pitch B Natural oct) <<=? (Pitch C Flat oct'))
(Pitch C Flat oct) <<=? (Pitch B Sharp oct') =
If (NextOct oct' oct) True ((Pitch C Natural oct) <<=? (Pitch B Sharp oct'))
(Pitch pc1 acc1 oct) <<=? (Pitch pc2 acc2 oct) = ClassToNat pc1 <=? ClassToNat pc2
(Pitch pc1 acc1 oct1) <<=? (Pitch pc2 acc2 oct2) = OctToNat oct1 <=? OctToNat oct2
p1 <<=? p2 = PitchToNat p1 <=? PitchToNat p2
-- | Greater than for pitches.
type family (p1 :: PitchType) <<? (p2 :: PitchType) where
p <<? p = False
p1 <<? p2 = (p1 <<=? p2)
-- | Enharmonic equality of pitches.
type family (p :: PitchType) =?= (q :: PitchType) :: Bool where
Silence =?= Silence = True
Silence =?= _ = False
_ =?= Silence = False
Pitch pc acc oct =?= Pitch pc acc oct = True
Pitch C Flat o1 =?= Pitch B Natural o2 = o1 .~. OctSucc o2
Pitch C Natural o1 =?= Pitch B Sharp o2 = o1 .~. OctSucc o2
Pitch E Natural oct =?= Pitch F Flat oct = True
Pitch E Sharp oct =?= Pitch F Natural oct = True
Pitch F Flat oct =?= Pitch E Natural oct = True
Pitch F Natural oct =?= Pitch E Sharp oct = True
Pitch B Natural o1 =?= Pitch C Flat o2 = OctSucc o1 .~. o2
Pitch B Sharp o1 =?= Pitch C Natural o2 = OctSucc o1 .~. o2
Pitch pc1 Sharp oct =?= Pitch pc2 Flat oct = ClassSucc pc1 .~. pc2
Pitch pc1 Flat oct =?= Pitch pc2 Sharp oct = pc1 .~. ClassSucc pc2
_ =?= _ = False
-- | Convert an octave to a natural number.
type family OctToNat (o :: OctaveNum) :: Nat where
OctToNat Oct_1 = 0
OctToNat Oct0 = 1
OctToNat Oct1 = 2
OctToNat Oct2 = 3
OctToNat Oct3 = 4
OctToNat Oct4 = 5
OctToNat Oct5 = 6
OctToNat Oct6 = 7
OctToNat Oct7 = 8
OctToNat Oct8 = 9
-- | Convert a natural number to an octave.
type family NatToOct (n :: Nat) :: OctaveNum where
NatToOct 0 = Oct_1
NatToOct 1 = Oct0
NatToOct 2 = Oct1
NatToOct 3 = Oct2
NatToOct 4 = Oct3
NatToOct 5 = Oct4
NatToOct 6 = Oct5
NatToOct 7 = Oct6
NatToOct 8 = Oct7
NatToOct 9 = Oct8
NatToOct _ = TypeError (Text "Invalid octave.")
-- | Increase the octave by the given number.
type family IncreaseOctave (o :: OctaveNum) (n :: Nat) :: OctaveNum where
IncreaseOctave o n = NatToOct (OctToNat o + n)
-- | Decrease the octave by the given number.
type family DecreaseOctave (o :: OctaveNum) (n :: Nat) :: OctaveNum where
DecreaseOctave o n = NatToOct (OctToNat o - n)
-- | Increment an octave.
type family OctSucc (o :: OctaveNum) :: OctaveNum where
OctSucc o = IncreaseOctave o 1
-- | Decrement an octave.
type family OctPred (o :: OctaveNum) :: OctaveNum where
OctPred o = DecreaseOctave o 1
-- | Returns True if the successor of o1 is o2.
type family NextOct (o1 :: OctaveNum) (o2 :: OctaveNum) :: Bool where
NextOct Oct_1 Oct0 = True
NextOct Oct0 Oct1 = True
NextOct Oct1 Oct2 = True
NextOct Oct2 Oct3 = True
NextOct Oct3 Oct4 = True
NextOct Oct4 Oct5 = True
NextOct Oct5 Oct6 = True
NextOct Oct6 Oct7 = True
NextOct Oct7 Oct8 = True
NextOct _ _ = False
-- | Returns i if o2 is after o2, otherwise returns Compound.
type family IntervalOrCompound (o1 :: OctaveNum) (o2 :: OctaveNum) (i :: IntervalType)
:: IntervalType where
IntervalOrCompound o1 o2 int = If (NextOct o1 o2) int Compound
-- | Convert a pitch class to a natural number.
type family ClassToNat (pc :: PitchClass) :: Nat where
ClassToNat C = 0
ClassToNat D = 1
ClassToNat E = 2
ClassToNat F = 3
ClassToNat G = 4
ClassToNat A = 5
ClassToNat B = 6
-- | Convert a natural number to a pitch class.
-- Numbers are taken modulo 7: e.g. 8 corresponds to the pitch 8 mod 7 = 1 = D
type family NatToClass (n :: Nat) :: PitchClass where
NatToClass 0 = C
NatToClass 1 = D
NatToClass 2 = E
NatToClass 3 = F
NatToClass 4 = G
NatToClass 5 = A
NatToClass 6 = B
NatToClass n = NatToClass (n - 7)
-- | Increase the pitch class by a given number.
type family IncreaseClass (pc :: PitchClass) (n :: Nat) :: PitchClass where
IncreaseClass pc n = NatToClass (ClassToNat pc + n)
-- | Decrease the pitch class by a given number.
type family DecreaseClass (pc :: PitchClass) (n :: Nat) :: PitchClass where
DecreaseClass pc n = NatToClass (ClassToNat pc - n)
-- | Increment a pitch class.
type family ClassSucc (pc :: PitchClass) :: PitchClass where
ClassSucc pc = IncreaseClass pc 1
-- | Decrement a pitch class.
type family ClassPred (pc :: PitchClass) :: PitchClass where
ClassPred pc = DecreaseClass pc 1
-- | Convert an interval size to a natural number.
type family IntSizeToNat (is :: IntervalSize) :: Nat where
IntSizeToNat Unison = 0
IntSizeToNat Second = 1
IntSizeToNat Third = 2
IntSizeToNat Fourth = 3
IntSizeToNat Fifth = 4
IntSizeToNat Sixth = 5
IntSizeToNat Seventh = 6
IntSizeToNat Octave = 7
-- | Convert a natural number to an interval size.
type family NatToIntSize (n :: Nat) :: IntervalSize where
NatToIntSize 0 = Unison
NatToIntSize 1 = Second
NatToIntSize 2 = Third
NatToIntSize 3 = Fourth
NatToIntSize 4 = Fifth
NatToIntSize 5 = Sixth
NatToIntSize 6 = Seventh
NatToIntSize 7 = Octave
NatToIntSize _ = TypeError (Text "Invalid interval size.")
-- | Increase the interval size by a given number.
type family IncreaseIntSize (is :: IntervalSize) (n :: Nat) :: IntervalSize where
IncreaseIntSize is n = NatToIntSize (IntSizeToNat is + n)
-- | Decrease the interval size by a given number.
type family DecreaseIntSize (is :: IntervalSize) (n :: Nat) :: IntervalSize where
DecreaseIntSize is n = NatToIntSize (IntSizeToNat is - n)
-- | Increment an interval size.
type family IntSizeSucc (is :: IntervalSize) :: IntervalSize where
IntSizeSucc is = IncreaseIntSize is 1
-- | Decrement an interval size.
type family IntSizePred (is :: IntervalSize) :: IntervalSize where
IntSizePred is = DecreaseIntSize is 1
-- | Calculate the width of an interval in half-steps.
type family IntervalWidth (i :: IntervalType) :: Nat where
IntervalWidth (Interval Dim Unison) = TypeError (Text "Can't diminish unisons.")
IntervalWidth (Interval Perf Unison) = 0
IntervalWidth (Interval Aug Unison) = 1
IntervalWidth (Interval Dim Fourth) = 4
IntervalWidth (Interval Perf Fourth) = 5
IntervalWidth (Interval Aug Fourth) = 6
IntervalWidth (Interval Dim Fifth) = 6
IntervalWidth (Interval Perf Fifth) = 7
IntervalWidth (Interval Aug Fifth) = 8
IntervalWidth (Interval Dim Octave) = 11
IntervalWidth (Interval Perf Octave) = 12
IntervalWidth (Interval Aug Octave) = 13
IntervalWidth (Interval Maj Second) = 2
IntervalWidth (Interval Maj Third) = 4
IntervalWidth (Interval Maj Sixth) = 9
IntervalWidth (Interval Maj Seventh) = 11
IntervalWidth (Interval Aug is) = IntervalWidth (Interval Maj is) + 1
IntervalWidth (Interval Min is) = IntervalWidth (Interval Maj is) - 1
IntervalWidth (Interval Dim is) = IntervalWidth (Interval Maj is) - 2
-- | Move a pitch up by a semitone.
type family HalfStepUp (p :: PitchType) :: PitchType where
HalfStepUp Silence = Silence
HalfStepUp (Pitch B Flat o) = Pitch B Natural o
HalfStepUp (Pitch B acc o) = Pitch C acc (OctSucc o)
HalfStepUp (Pitch E Flat o) = Pitch E Natural o
HalfStepUp (Pitch E acc o) = Pitch F acc o
HalfStepUp (Pitch pc Flat o) = Pitch pc Natural o
HalfStepUp (Pitch pc Natural o) = Pitch pc Sharp o
HalfStepUp (Pitch pc Sharp o) = Pitch (ClassSucc pc) Natural o
-- | Move a pitch down by a semitone.
type family HalfStepDown (p :: PitchType) :: PitchType where
HalfStepDown Silence = Silence
HalfStepDown (Pitch C Sharp o) = Pitch C Natural o
HalfStepDown (Pitch C acc o) = Pitch B acc (OctPred o)
HalfStepDown (Pitch F Sharp o) = Pitch F Natural o
HalfStepDown (Pitch F acc o) = Pitch E acc o
HalfStepDown (Pitch pc Flat o) = Pitch (ClassPred pc) Natural o
HalfStepDown (Pitch pc Natural o) = Pitch pc Flat o
HalfStepDown (Pitch pc Sharp o) = Pitch pc Natural o
-- | Move a pitch up by the specified number of semitones.
type family HalfStepsUpBy (p :: PitchType) (n :: Nat) :: PitchType where
HalfStepsUpBy p 0 = p
HalfStepsUpBy p n = HalfStepUp (HalfStepsUpBy p (n - 1))
-- | Move a pitch down by the specified number of semitones.
type family HalfStepsDownBy (p :: PitchType) (n :: Nat) :: PitchType where
HalfStepsDownBy p 0 = p
HalfStepsDownBy p n = HalfStepDown (HalfStepsDownBy p (n - 1))
-- | Raise a pitch by an interval.
type family RaiseBy (p :: PitchType) (i :: IntervalType) :: PitchType where
RaiseBy Silence _ = Silence
RaiseBy _ Compound = TypeError (Text "Can't shift by compound interval")
RaiseBy p (Interval Min is) = HalfStepDown (HalfStepsUpBy p (IntervalWidth (Interval Min is) + 1))
RaiseBy p (Interval Dim is) = HalfStepDown (HalfStepsUpBy p (IntervalWidth (Interval Dim is) + 1))
RaiseBy p i = HalfStepsUpBy p (IntervalWidth i)
-- | Lower a pitch by an interval.
type family LowerBy (p :: PitchType) (i :: IntervalType) :: PitchType where
LowerBy Silence _ = Silence
LowerBy _ Compound = TypeError (Text "Can't shift by compound interval")
LowerBy p (Interval Maj is) = HalfStepUp (HalfStepsDownBy p (IntervalWidth (Interval Maj is) + 1))
LowerBy p (Interval Aug is) = HalfStepUp (HalfStepsDownBy p (IntervalWidth (Interval Aug is) + 1))
LowerBy p i = HalfStepsDownBy p (IntervalWidth i)
-- | Raise all pitches in a voice by an interval.
type family RaiseAllBy (ps :: Voice l) (i :: IntervalType) :: Voice l where
RaiseAllBy End _ = End
RaiseAllBy (p :* d :- ps) i = RaiseBy p i :* d :- RaiseAllBy ps i
-- | Raise multiple pitches by an interval.
type family RaiseAllBy' (ps :: Vector PitchType n) (i :: IntervalType) :: Vector PitchType n where
RaiseAllBy' None _ = None
RaiseAllBy' (p :-- ps) i = RaiseBy p i :-- RaiseAllBy' ps i
-- | Lower all pitches in a voice by an interval.
type family LowerAllBy (ps :: Voice l) (i :: IntervalType) :: Voice l where
LowerAllBy End _ = End
LowerAllBy (p :* d :- ps) i = LowerBy p i :* d :- LowerAllBy ps i
-- | Lower multiple pitches by an interval.
type family LowerAllBy' (ps :: Vector PitchType n) (i :: IntervalType) :: Vector PitchType n where
LowerAllBy' None _ = None
LowerAllBy' (p :-- ps) i = LowerBy p i :-- LowerAllBy' ps i
-- | Raise a pitch by an octave.
type family RaiseByOct (p :: PitchType) :: PitchType where
RaiseByOct p = RaiseBy p (Interval Perf Octave)
-- | Lower a pitch by an octave.
type family LowerByOct (p :: PitchType) :: PitchType where
LowerByOct p = LowerBy p (Interval Perf Octave)
type family RaiseAllByOct (ps :: Voice l) :: Voice l where
RaiseAllByOct v = RaiseAllBy v (Interval Perf Octave)
-- | Transpose a partiture up by the given interval.
type family TransposeUpBy (p :: Partiture n l) (i :: IntervalType) :: Partiture n l where
TransposeUpBy _ Compound = TypeError (Text "Can't transpose by compound interval.")
TransposeUpBy None i = None
TransposeUpBy (v :-- vs) i = RaiseAllBy v i :-- TransposeUpBy vs i
-- | Transpose a partiture down by the given interval.
type family TransposeDownBy (p :: Partiture n l) (i :: IntervalType) :: Partiture n l where
TransposeDownBy _ Compound = TypeError (Text "Can't transpose by compound interval.")
TransposeDownBy None i = None
TransposeDownBy (v :-- vs) i = LowerAllBy v i :-- TransposeDownBy vs i
-------------------------------------------------------------------------------
-- Primitive instances
-------------------------------------------------------------------------------
instance Primitive Oct_1 where type Rep Oct_1 = Int ; prim o = 0 ; pretty o = "_5"
instance Primitive Oct0 where type Rep Oct0 = Int ; prim o = 12 ; pretty o = "_4"
instance Primitive Oct1 where type Rep Oct1 = Int ; prim o = 24 ; pretty o = "_3"
instance Primitive Oct2 where type Rep Oct2 = Int ; prim o = 36 ; pretty o = "__"
instance Primitive Oct3 where type Rep Oct3 = Int ; prim o = 48 ; pretty o = "_ "
instance Primitive Oct4 where type Rep Oct4 = Int ; prim o = 60 ; pretty o = " "
instance Primitive Oct5 where type Rep Oct5 = Int ; prim o = 72 ; pretty o = "' "
instance Primitive Oct6 where type Rep Oct6 = Int ; prim o = 84 ; pretty o = "''"
instance Primitive Oct7 where type Rep Oct7 = Int ; prim o = 96 ; pretty o = "'3"
instance Primitive Oct8 where type Rep Oct8 = Int ; prim o = 108; pretty o = "'4"
instance Primitive C where type Rep C = Int ; prim p = 0 ; pretty p = "C"
instance Primitive D where type Rep D = Int ; prim p = 2 ; pretty p = "D"
instance Primitive E where type Rep E = Int ; prim p = 4 ; pretty p = "E"
instance Primitive F where type Rep F = Int ; prim p = 5 ; pretty p = "F"
instance Primitive G where type Rep G = Int ; prim p = 7 ; pretty p = "G"
instance Primitive A where type Rep A = Int ; prim p = 9 ; pretty p = "A"
instance Primitive B where type Rep B = Int ; prim p = 11; pretty p = "B"
instance Primitive Natural where type Rep Natural = Int ; prim a = 0 ; pretty a = " "
instance Primitive Flat where type Rep Flat = Int ; prim a = -1 ; pretty a = "b"
instance Primitive Sharp where type Rep Sharp = Int ; prim a = 1 ; pretty a = "#"
-- "Equality constraints are literally magic."
-- - Michael Gale, 2017
instance (IntRep pc, IntRep acc, IntRep oct)
=> Primitive (Pitch pc acc oct) where
type Rep (Pitch pc acc oct) = Int
prim p = prim (PC @pc) + prim (Acc @acc) + prim (Oct @oct)
pretty p = pretty (PC @pc) ++ pretty (Acc @acc) ++ pretty (Oct @oct)
instance (IntRep sd, IntRep acc, IntRep oct) => Primitive (Degree sd acc oct) where
type Rep (Degree sd acc oct) = Int
prim _ = prim (ScaDeg @sd) + prim (Acc @acc) + prim (Oct @oct)
pretty _ = pretty (ScaDeg @sd) ++ pretty (Acc @acc) ++ pretty (Oct @oct)
instance Primitive Silence where type Rep Silence = Int ; prim s = 60 ; pretty s = "~~~~"
instance IntRep p => Primitive (Root (PitchRoot p)) where
type Rep (Root (PitchRoot p)) = Int
prim r = prim (Pit @p)
pretty r = pretty (Pit @p)
-- Modes
instance Primitive MajorMode where type Rep MajorMode = Bool ; prim m = True ; pretty m = "Major"
instance Primitive MinorMode where type Rep MinorMode = Bool ; prim m = False ; pretty m = "minor"
-- Scale degrees
instance Primitive I where type Rep I = Int ; prim d = 0 ; pretty d = "I"
instance Primitive II where type Rep II = Int ; prim d = 1 ; pretty d = "II"
instance Primitive III where type Rep III = Int ; prim d = 2 ; pretty d = "III"
instance Primitive IV where type Rep IV = Int ; prim d = 3 ; pretty d = "IV"
instance Primitive V where type Rep V = Int ; prim d = 4 ; pretty d = "V"
instance Primitive VI where type Rep VI = Int ; prim d = 5 ; pretty d = "VI"
instance Primitive VII where type Rep VII = Int ; prim d = 6 ; pretty d = "VII"
instance (IntRep pc, IntRep acc, BoolRep mo) => Primitive (Key pc acc mo) where
type Rep (Key pc acc mo) = [Int]
prim k = (+ (prim (PC @pc) + prim (Acc @acc))) <$> baseScale
where baseScale = if (prim (Mod @ mo))
then [0, 2, 4, 5, 7, 9, 11]
else [0, 2, 3, 5, 7, 8, 10]
pretty k = pretty (PC @pc) ++ pretty (Acc @acc) ++ " " ++ pretty (Mod @mo)
instance (IntRep p, RootToPitch (DegreeRoot k deg) ~ p, Primitive deg, Primitive k)
=> Primitive (DegreeRoot k deg) where
type Rep (DegreeRoot k deg) = Int
prim r = prim (Pit @p)
pretty r = pretty (Deg @deg)
instance IntRep p => Primitive (PitchRoot p) where
type Rep (PitchRoot p) = Int
prim p = prim (Pit @p)
pretty p = pretty (Pit @p)
instance KnownNat n => Primitive n where
type Rep n = Int
prim = fromInteger . natVal
pretty (natVal -> 1) = "Th"
pretty (natVal -> 2) = "Si"
pretty (natVal -> 3) = "Si."
pretty (natVal -> 4) = "Ei"
pretty (natVal -> 6) = "Ei."
pretty (natVal -> 8) = "Qu"
pretty (natVal -> 12) = "Qu."
pretty (natVal -> 16) = "Ha"
pretty (natVal -> 24) = "Ha."
pretty (natVal -> 32) = "Wh"
pretty (natVal -> 48) = "Wh."
pretty (natVal -> n) = ":" ++ show n
-- Intervals
---- Interval classes
instance Primitive Maj where
type Rep Maj = Int -> Int
prim _ = id
pretty _ = "Maj"
instance Primitive Min where
type Rep Min = Int -> Int
prim _ = pred
pretty _ = "Min"
instance Primitive Perf where
type Rep Perf = Int -> Int
prim _ = id
pretty _ = "Perf"
instance Primitive Aug where
type Rep Aug = Int -> Int
prim _ = (+ 1)
pretty _ = "Aug"
instance Primitive Dim where
type Rep Dim = Int -> Int
prim _ 2 = 0
prim _ 4 = 2
prim _ 5 = 4
prim _ 7 = 6
prim _ 9 = 7
prim _ 11 = 9
prim _ 12 = 11
pretty _ = "Dim"
---- Interval sizes
instance Primitive Unison where
type Rep Unison = Int
prim _ = 0
pretty _ = "1"
instance Primitive Second where
type Rep Second = Int
prim _ = 2
pretty _ = "2"
instance Primitive Third where
type Rep Third = Int
prim _ = 4
pretty _ = "3"
instance Primitive Fourth where
type Rep Fourth = Int
prim _ = 5
pretty _ = "4"
instance Primitive Fifth where
type Rep Fifth = Int
prim _ = 7
pretty _ = "5"
instance Primitive Sixth where
type Rep Sixth = Int
prim _ = 9
pretty _ = "6"
instance Primitive Seventh where
type Rep Seventh = Int
prim _ = 11
pretty _ = "7"
instance Primitive Octave where
type Rep Octave = Int
prim _ = 12
pretty _ = "8"
instance (FunRep Int Int ic, IntRep is) => Primitive (Interval ic is) where
type Rep (Interval ic is) = Int
prim _ = prim (IC @ic) (prim (IS @ is))
pretty _ = pretty (IC @ic) ++ " " ++ pretty (IS @is)
| DimaSamoz/mezzo | src/Mezzo/Model/Types.hs | mit | 37,677 | 0 | 13 | 9,488 | 11,980 | 6,354 | 5,626 | -1 | -1 |
{-# htermination (succChar :: Char -> Char) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Char = Char MyInt ;
data MyInt = Pos Nat | Neg Nat ;
data Nat = Succ Nat | Zero ;
primCharToInt :: Char -> MyInt;
primCharToInt (Char x) = x;
fromEnumChar :: Char -> MyInt
fromEnumChar = primCharToInt;
primMinusNat :: Nat -> Nat -> MyInt;
primMinusNat Zero Zero = Pos Zero;
primMinusNat Zero (Succ y) = Neg (Succ y);
primMinusNat (Succ x) Zero = Pos (Succ x);
primMinusNat (Succ x) (Succ y) = primMinusNat x y;
primPlusNat :: Nat -> Nat -> Nat;
primPlusNat Zero Zero = Zero;
primPlusNat Zero (Succ y) = Succ y;
primPlusNat (Succ x) Zero = Succ x;
primPlusNat (Succ x) (Succ y) = Succ (Succ (primPlusNat x y));
primPlusInt :: MyInt -> MyInt -> MyInt;
primPlusInt (Pos x) (Neg y) = primMinusNat x y;
primPlusInt (Neg x) (Pos y) = primMinusNat y x;
primPlusInt (Neg x) (Neg y) = Neg (primPlusNat x y);
primPlusInt (Pos x) (Pos y) = Pos (primPlusNat x y);
psMyInt :: MyInt -> MyInt -> MyInt
psMyInt = primPlusInt;
pt :: (a -> c) -> (b -> a) -> b -> c;
pt f g x = f (g x);
primIntToChar :: MyInt -> Char;
primIntToChar x = Char x;
toEnumChar :: MyInt -> Char
toEnumChar = primIntToChar;
succChar :: Char -> Char
succChar = pt toEnumChar (pt (psMyInt (Pos (Succ Zero))) fromEnumChar);
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/basic_haskell/succ_4.hs | mit | 1,381 | 0 | 13 | 309 | 622 | 332 | 290 | 35 | 1 |
import Haste
import Haste.Prim
import Haste.Foreign
import Haste.DOM
import Haste.Events
import Haste.Ajax
import Control.Monad
import Control.Applicative
clearBox box val _ = do
s <- getProp box "value"
if s == val
then setProp box "value" "" >> return ()
else return ()
fillBox box val _ = do
s <- getProp box "value"
if null s
then setProp box "value" val >> return ()
else return ()
setClickToClear (id', val) = withElem id' $ \box -> do
onEvent box Focus (clearBox box val)
onEvent box Blur (fillBox box val)
return ()
getDataURL = ffi $ toJSStr "(function(s,f){var reader=new FileReader();reader.readAsDataURL(document.getElementById(s).files[0]);reader.onload=function(){f(reader.result);};})" :: String -> (String -> IO ()) -> IO ()
onSubmitResultReturn :: Maybe String -> IO ()
onSubmitResultReturn (Just s) = alert s
buildPara [] = return []
buildPara (s:ss) = do
val <- withElem s $ \e -> getProp e "value"
(:) <$> return (s, val) <*> buildPara ss
submit = getDataURL "adPic" onDataURLReady
onDataURLReady dataURL = do
paras <- (:) <$> return ("adPic", dataURL) <*> buildPara ["ownerName", "ownerContact", "ownerType", "isJoined", "captcha"]
ajaxRequest POST "/api/advertisement/insertApplication" paras onSubmitResultReturn
return ()
onCaptchaVerified :: Maybe String -> IO ()
onCaptchaVerified Nothing = alert "Internal Error!"
onCaptchaVerified (Just "1") = submit
onCaptchaVerified (Just "0") = alert "验证码错误"
onSubmit _ = do
captcha <- withElem "captcha" $ \box -> getProp box "value"
ajaxRequest GET ("/api/captcha/verify?code=" ++ captcha) noParams onCaptchaVerified
return ()
main = do
let textboxList = [("ownerName", "申请人姓名..."), ("ownerContact", "联系方式..."), ("captcha", "计算两数相乘结果...")]
forM_ textboxList setClickToClear
withElem "btn_submit" $ \btn -> onEvent btn Click onSubmit
return ()
| sqd/NNEZArtFestival2015Sys | backend/frontend/e/ads_apply.hs | mit | 1,960 | 0 | 11 | 358 | 660 | 324 | 336 | 47 | 2 |
module Algebra.Rewriting where
import Algebra.Base
import Algebra.Display
import Algebra.Suspension
import Algebra.Symbol
import Data.List ( intersperse )
import Data.Map ( Map )
import qualified Data.Map as Map
import Util.Display
import Util.Numeric
import Util.String.Align
import qualified Util.Unicode as U
import qualified Util.Unicode.Greek as Greek
import qualified Util.Unicode.Latin as Latin
p = mergeMonomials
g :: Function
g m | m == 1 = placeholders .+. (p [T alpha :* 1])
| otherwise = placeholders .+. front .+. back
where front = p [T gamma :* n, T (beta_ r) :* 1]
back = p [(T $ only1 $ R $ Recurrence Latin.G (Suspension g n)) :* 3]
(n, r) = m `divMod` 2
placeholders = p $ emptyConstantTerms [Sym Greek.Alpha :. Nothing,
Sym Greek.Beta :. (Just 0),
Sym Greek.Beta :. (Just 1),
Sym Greek.Gamma :. Nothing
]
fib :: Function
fib 1 = p $ [constantTerm 1]
fib 2 = p $ [constantTerm 1]
fib n = fib' .+. fib''
where fib' = p [(T $ only1 $ R $ Recurrence Latin.F (Suspension fib (n - 1))) :* 1]
fib'' = p [(T $ only1 $ R $ Recurrence Latin.F (Suspension fib (n - 2))) :* 1]
h :: Function
h m | m == 1 = placeholders .+. (p [T alpha :* 1])
| otherwise = placeholders .+. front .+. back
where front = p [T (gamma_ r) :* n, T (beta_ r) :* 1]
back = distributeMOverP (constantTerm 4) (h n)
(n, r) = m `divMod` 2
placeholders = p $ emptyConstantTerms [Sym Greek.Alpha :. Nothing,
Sym Greek.Beta :. (Just 0),
Sym Greek.Beta :. (Just 1),
Sym Greek.Gamma :. (Just 0),
Sym Greek.Gamma :. (Just 1)
]
alpha = only1 $ SC $ Sym Greek.Alpha :. Nothing
alpha_ n = only1 $ SC $ Sym Greek.Alpha :. (Just n)
beta = only1 $ SC $ Sym Greek.Beta :. Nothing
beta_ n = only1 $ SC $ Sym Greek.Beta :. (Just n)
gamma = only1 $ SC $ Sym Greek.Gamma :. Nothing
gamma_ n = only1 $ SC $ Sym Greek.Gamma :. (Just n)
printTable :: Function -> Integer -> IO ()
printTable f n = do
let results = map (display . f) [1..n]
formattedResults = alignAtChar '+' results
formattedNs = alignAtChar '+' $ map show {--(`showPowerRemainder` 2)--} [1..n]
table = zipWith (\a b -> a ++ " | " ++ b) formattedNs formattedResults
putStrLn $ unlines table
| sgord512/Algebra | Algebra/Rewriting.hs | mit | 2,677 | 0 | 16 | 957 | 1,015 | 528 | 487 | 55 | 1 |
module Primitive where
import Numerical
import Vec3
import Ray
import Material
import Container
import Data.List
import Data.Maybe
import Control.Applicative
import Control.DeepSeq
data Primitive = Sphere { surface :: C3 Material
, isSource :: Bool
, origin :: Vec
, radius :: Flt
}
| Plane { surface :: C3 Material
, normal :: Vec
, distance :: Flt
}
| Triangle { surface :: C3 Material
, vert0 :: Vec
, vert1 :: Vec
, vert2 :: Vec
, edge0 :: Vec
, edge1 :: Vec
, normal :: Vec
} deriving (Eq, Show)
instance NFData Primitive where
rnf (Sphere a b c d) = a `deepseq` b `deepseq` c `deepseq` d `deepseq` ()
rnf (Plane a b c) = a `deepseq` b `deepseq` c `deepseq` ()
rnf (Triangle a b c d e f g) = a `deepseq`
b `deepseq`
c `deepseq`
d `deepseq`
e `deepseq`
f `deepseq`
g `deepseq`
()
constructTriangle :: C3 Material -> Vec -> Vec -> Vec -> Primitive
constructTriangle mat v0 v1 v2 =
Triangle mat v0 v1 v2 e0 e1 (normalize (cross e0 e1))
where e0 = v1 - v0
e1 = v2 - v0
findNormal :: Primitive -> Vec -> Vec
findNormal (Sphere _ _ orig _) point = normalize $ point - orig
findNormal (Plane _ norm _) _ = norm
findNormal (Triangle _ _ _ _ _ _ norm) _ = norm
intersection :: Primitive -> Ray -> Maybe Flt
intersection (Sphere _ _ orig rad) (Ray position direction) =
if disc > 0
then Just $ -(b + sqrt disc) / (2 * a)
else Nothing
where disc = (b * b) - (4 * a * c)
a = dot direction direction
b = 2 * dot direction pSubOrigin
c = dot pSubOrigin pSubOrigin - (rad * rad)
pSubOrigin = position - orig
intersection (Plane _ norm dist) (Ray position direction) =
if det /= 0 && e > 0
then Just e
else Nothing
where det = dot norm direction
e = (dist - dot norm position) / det
intersection (Triangle _ v0 _ _ e0 e1 _) (Ray position direction)
| -epsilon < det && det < epsilon = Nothing
| u < 0 || 1 < u = Nothing
| v < 0 || 1 < u + v = Nothing
| otherwise = Just $ invdet * dot e1 qvec
where epsilon = 0.000001
det = dot e0 pvec
pvec = cross direction e1
u = invdet * dot tvec pvec
tvec = position - v0
invdet = 1 / det
v = invdet * dot direction qvec
qvec = cross tvec e0
reflectFromPrimitive :: Primitive -> Ray -> Ray
reflectFromPrimitive prim ray@(Ray position direction) = reflectRay ray nor pos
where nor = findNormal prim pos
pos = position + (direction * dist)
dist = pure $ fromMaybe 0 inter
inter = intersection prim ray
closest :: Ray -> [Primitive] -> Maybe Primitive
closest r primitives =
if null filtered then Nothing else Just prim
where zipped = zip primitives (map (`intersection` r) primitives)
filtered = filter (\ (_, x) -> isJust x && fromJust x > 0.00001) zipped
(prim, _) = minimumBy (\ (_, x) (_, y) -> compare x y) filtered
| reuk/rayverb | src/Primitive.hs | gpl-2.0 | 3,846 | 0 | 12 | 1,739 | 1,205 | 649 | 556 | 86 | 3 |
-- No main, hsExpand should not accept this.
fib 0 = 1
fib 1 = 1
fib n = fib (n-1) + fib (n-2)
| uvthenfuv/HsExpand | tests/test2.hs | gpl-2.0 | 96 | 0 | 8 | 25 | 50 | 25 | 25 | 3 | 1 |
module Language.Pepa.Transform.Rules.Apply
( applyRulesToModel )
where
{- Standard Library Modules Imported -}
import Control.Monad
( foldM )
import qualified Data.Map as Map
import Data.Map
( Map )
{- External Library Modules Imported -}
{- Local Modules Imported -}
import Language.Pepa.Transform.Rules.Syntax
( Rules
, Rule ( .. )
, ComponentName
, Pattern ( .. )
, ActionsPattern ( .. )
, PatternVariable
, ExprPattern ( .. )
, Replacement
, patternVariables
, replacementVariables
)
import qualified Language.Pepa.QualifiedName as QualifiedName
import Language.Pepa.QualifiedName
( QualifiedName ( .. ) )
import Language.Pepa.Rates
( RateExpr ( .. ) )
import qualified Language.Pepa.Syntax as Pepa
import Language.Pepa.Syntax
( ParsedModel ( .. )
, ParsedComponent ( .. )
, CooperationSet ( .. )
, ParsedAction ( .. )
, nameOfAction
)
import qualified Language.Pepa.Print as PepaPrint
import qualified Language.Pepa.MainControl as MainControl
import Language.Pepa.MainControl
( MainControl )
import qualified Language.Pepa.Utils as Utils
import qualified Language.Pepa.Compile.PartEval as PartEval
{- End of Module Imports -}
{-|
Apply a set of transformation rules to a model
-}
applyRulesToModel :: ParsedModel -> Rules -> MainControl ParsedModel
applyRulesToModel pModel rules =
do tModel <- foldM applyRuleToModel pModel rules
let logKey = "transformed-model"
logInfo = PepaPrint.hprintPepaModel tModel
MainControl.valueResult tModel logKey logInfo
{-| Apply a single transformation rule to a model -}
applyRuleToModel :: ParsedModel -> Rule -> MainControl ParsedModel
applyRuleToModel pModel rule
| not $ null undefinedVars =
MainControl.resultError undefinedErr
| otherwise =
do newModel <- mTransformed
-- So if we managed to apply the rule to the model
-- (if we didn't the mTransformed will just be a 'failed'
-- main control and this bit doesn't get evaluated)
-- then we must return a model which contains all the
-- original definitions plus the new ones. In addition
-- system equation should be the new one, so we use 'const'
-- here and give the newModel first.
return $ Pepa.combineModels const newModel pModel
where
mTransformed = applyRule $ modelSystemEqn pModel
pattern = rulePattern rule
replacement = ruleReplace rule
patternVars = patternVariables pattern
replaceVars = replacementVariables replacement
undefinedVars = filter (\t -> not $ elem t patternVars) replaceVars
undefinedErr = unlines [ "The rule cannot be applied because the following"
, "pattern variables are used in the replacement but"
, "not defined within the pattern:"
, Utils.mkCSlist $
map QualifiedName.getOrigName undefinedVars
]
-- We return a 'Maybe ParsedComponent', if we cannot apply
-- the rule to the given component then we return 'Nothing'
-- This also allows us to say whether we have applied a rule
-- which should only be applied a once (or a given number).
applyRule :: ParsedComponent -> MainControl ParsedModel
applyRule component
| Just env <- matchPattern component pattern =
-- If the pattern currently matches then no problem we can just
-- blend in the replacement and we're done.
blendReplacement env replacement
| (Cooperation left actions right) <- component
, leftResult <- applyRule left
, rightResult <- applyRule right =
-- In this case we already know that we don't currently
-- match the pattern but either the left or the right
-- might. NOTE: that we do not use the new right if the
-- left matches the pattern.
if MainControl.hasResult leftResult
-- The left matches so we do not care about the right
then do leftModel <- leftResult
let newComp = Cooperation (modelSystemEqn leftModel)
actions
right
return $ leftModel { modelSystemEqn = newComp }
else if MainControl.hasResult rightResult
-- Okay the left didn't match but the right does
then do rightModel <- rightResult
let newComp = Cooperation left
actions
(modelSystemEqn rightModel)
return $ rightModel { modelSystemEqn = newComp }
-- Okay neither has completed so we may just return an
-- error. I'm concatenating the errors this way otherwise
-- (if we said simply "fail Cooperation left right doesn't match")
-- then we would always get the error that system equation
-- doesn't match.
else leftResult >> rightResult
| (ProcessArray comp size mActions) <- component =
do newComp <- applyRule comp
let array = ProcessArray (modelSystemEqn newComp) size mActions
return $ newComp { modelSystemEqn = array }
| otherwise =
-- This is the final catch all case which will include
-- component identifiers that haven't matched the pattern.
-- Here we have nothing left to try so we return 'Nothing'
fail "Pattern could not be matched"
-- 'matchPattern' must return an environment since some of the
-- component matched by the pattern may be reused in the
-- replacement by means of a pattern variable. We return
-- 'Nothing' to indicate that the pattern failed to match
-- and Just Environment to indicate that it succeeded.
-- Note that of course the returned environment may well
-- be empty.
matchPattern :: ParsedComponent -> Pattern -> Maybe Environment
matchPattern component (VarPat ident) =
-- Of course a pattern variable matches any component
return $ addComponent emptyEnvironment ident component
matchPattern (IdProcess compId) (IdentPat patId)
| refersTo patId compId = return emptyEnvironment
| otherwise = fail "Wrong component name"
matchPattern (Cooperation left actions right)
(CoopPat pLeft pActions pRight) =
-- Notice if the pattern fails to match here we do not
-- descent into the left and right components to see if the
-- pattern matches there, 'applyRule' will take care of that
-- we simply state whether the pattern matches currently.
do leftEnv <- matchPattern left pLeft
rightEnv <- matchPattern right pRight
actEnv <- matchActions actions pActions
return $ addActionMap (unionEnvironments leftEnv rightEnv) actEnv
matchPattern (ProcessArray comp size mActions)
(ArrayPat compPat sizePat mActionsPat) =
-- TODO: Currently we don't match the size but we should and
-- will of course need to in order to implement size changing.
do compEnv <- matchPattern comp compPat
exprEnv <- matchExprPattern size sizePat
let env = addExprMap compEnv exprEnv
case (mActions, mActionsPat) of
(Nothing, Nothing) -> return env
(Just _, Nothing) -> fail "process array actions don't match"
(Nothing, Just _) -> fail "process array actions don't match"
(Just a1, Just a2) -> do actEnv <- matchActions (ActionSet a1) a2
return $ addActionMap env actEnv
-- So basically at this point we have determined that the pattern
-- does not match
matchPattern (IdProcess _) _pattern = fail "comp doesn't match"
matchPattern (Cooperation _ _ _ ) _pattern = fail "comp doesn't match"
matchPattern (ProcessArray _ _ _) _pattern = fail "comp doesn't match"
matchPattern (Hiding _ _) _pattern = fail "comp doesn't match"
-- Now the errors, that is sequential components found in the
-- system equation.
matchPattern (PrefixComponent _ _) _pattern = error errorMsg
matchPattern (ComponentSum _ _) _pattern = error errorMsg
matchPattern (CondBehaviour _ _) _pattern = error errorMsg
matchPattern (StopProcess) _pattern = error errorMsg
errorMsg = "matchPattern serious error: " ++
"sequential component in system equation"
-- Matching the set of actions, the same as 'matchPattern' but
-- for a set of actions.
matchActions :: CooperationSet -> ActionsPattern -> Maybe ActionMap
matchActions (ActionSet actions) (AnyActions ident pActions)
| all (\t -> elem t actionNames) pActions = return mapping
| otherwise = fail failMsg
where
mapping = Map.singleton ident nonExplict
failMsg = "Explicit actions in pattern not matched"
-- These are the action in 'actions' but not mentioned
-- explicitly in the pattern and hence those that the
-- pattern variable should be mapped to.
nonExplict = ActionSet $ filter (not . isExplicit) actions
isExplicit :: ParsedAction -> Bool
isExplicit action = elem (nameOfAction action) pActions
actionNames = map nameOfAction actions
matchActions (ActionSet actions) (Actions pActions)
| not $ equalSetLists actionNames pActions =
fail "Actions do not match"
| otherwise =
return Map.empty
where
actionNames = map nameOfAction actions
-- So basically currently a wildcard can only be matched by a
-- pattern variable without any explicit actions.
matchActions (WildCard) (AnyActions ident []) =
return $ Map.singleton ident WildCard
-- So this could technically match but for now we'll
-- assume that the author of the rule meant explicitly
-- those actions. Potentially we could do this but it
-- is quite awkward and requires a process action map.
matchActions WildCard (AnyActions _ident _acts) =
fail "Actions do not match"
matchActions (WildCard) (Actions _pActions) =
fail "Actions do not match"
-- A utility function which tests if two lists define
-- the same set. We do not care if an item occurs multiple
-- times.
equalSetLists :: Eq a => [a] -> [a] -> Bool
equalSetLists l1 l2 =
-- All of the elements of l1 are in l2
(all (\e -> elem e l2) l1) &&
-- And all of the elements of l2 are in l1
(all (\e -> elem e l1) l2)
-- The same as 'matchPattern' but for expressions.
matchExprPattern :: RateExpr -> ExprPattern -> Maybe ExpressionMap
matchExprPattern (Cconstant i) (ConstantPat j)
| i == j = return Map.empty
| otherwise = fail "Pattern does not match"
matchExprPattern (Creal d) (RealPattern e)
| d == e = return Map.empty
| otherwise = fail "Pattern does not match"
matchExprPattern expr (AnyExpr var) =
return $ Map.singleton var expr
-- So here we go a bit wrong, we must decide what to do with
-- compound expressions. We should at least match Cadd with
-- PlusPattern etc. However what about (3) with (2 + 1), not very
-- useful I agree, but what about about (3) with (?a + 1) where
-- ?a is mapped to 2. We should be able to write this.
matchExprPattern _expr _pattern =
fail "Pattern does not match"
-- To make the replacement we must transform the parsed replacement
-- into a parsed component. Note that currently we only support
-- straightforward dropping into place but in time we should allow
-- the pattern to match on some names and hence we need to replace
-- those names in the replacement. That may mean that here we
-- need to take in an environment.
blendReplacement :: Environment -> Replacement -> MainControl ParsedModel
blendReplacement env (VarPat ident) =
do component <- newComp
return $ Pepa.emptyModel component
where
newComp = maybe failVal return $ lookupComp env ident
failVal = fail failMsg
failMsg = unwords [ "Not found:", QualifiedName.textual ident ]
blendReplacement _env (IdentPat ident) =
return $ Pepa.emptyModel (IdProcess ident)
blendReplacement env (CoopPat left actionsPattern right) =
do newLeft <- blendReplacement env left
newRight <- blendReplacement env right
actions <- blendActions env actionsPattern
let combineF l r = Cooperation l actions r
return $ Pepa.combineModels combineF newLeft newRight
blendReplacement env (ArrayPat comp size Nothing) =
do newComp <- blendReplacement env comp
newSize <- blendExpression env size
let array = ProcessArray (modelSystemEqn newComp) newSize Nothing
return $ newComp { modelSystemEqn = array }
blendReplacement env (ArrayPat comp size (Just actPat)) =
do newComp <- blendReplacement env comp
newSize <- blendExpression env size
coopSet <- blendActions env actPat
-- Instead we could probably just get the action set of the array
-- component. However it would be better to allow it in GENERAL
-- for the array syntax if we're going to allow it at all here.
array <- case coopSet of
ActionSet newActions ->
return $ ProcessArray (modelSystemEqn newComp)
newSize
(Just newActions)
WildCard ->
fail failMsg
return $ newComp { modelSystemEqn = array }
where
failMsg = "You cannot have a wildcard cooperation set in an array"
blendReplacement env (PartEval compRep) =
do smallModel <- blendReplacement env compRep
-- We need to combine the model returned with the original
-- model since the original model will have all the process
-- definitions and this one not necessarily. We use 'const'
-- as the combining function since we want the blended component
-- to be the actual system equation that gets partially
-- evaluated.
let fullModel = Pepa.combineModels const smallModel pModel
PartEval.partEvalPepaModel fullModel
-- The same as 'blendReplacement' but for a set of actions.
blendActions :: Environment -> ActionsPattern -> MainControl CooperationSet
blendActions _env (Actions actions) =
return $ ActionSet $ map Action actions
blendActions env (AnyActions var pActions) =
do coopSet <- maybe failVal return $ lookupActions env var
case coopSet of
-- This is kind of questionable, certainly if wildcard doesn't
-- include pActions, then it is wrong, but then in that case you
-- have made a broken PEPA model by attempting to cooperate on
-- an activity you do not perform.
WildCard -> return WildCard
ActionSet acts -> return $ ActionSet $ acts ++ (map Action pActions)
where
failVal = fail failMsg
failMsg = unwords [ "Could not match action set" ]
-- The same as 'blendReplacement' and 'blendActions' but for an
-- expression usually occuring as the size portion of a process array
blendExpression :: Environment -> ExprPattern -> MainControl RateExpr
blendExpression env (AnyExpr var) =
maybe failVal return $ lookupExpr env var
where
failVal = fail failMsg
failMsg = unwords [ "No such variable: ", QualifiedName.textual var ]
blendExpression _env (ConstantPat i) = return $ Cconstant i
blendExpression _env (RealPattern d) = return $ Creal d
blendExpression env (PlusPattern left right) =
do newLeft <- blendExpression env left
newRight <- blendExpression env right
return $ Cadd newLeft newRight
blendExpression env (MinusPattern left right) =
do newLeft <- blendExpression env left
newRight <- blendExpression env right
return $ Csub newLeft newRight
blendExpression env (MultPattern left right) =
do newLeft <- blendExpression env left
newRight <- blendExpression env right
return $ Cmult newLeft newRight
-- A utility function to test whether a component name within
-- a rule refers to a component name within the model.
refersTo :: ComponentName -> QualifiedName -> Bool
refersTo = QualifiedName.sameOrigName
{-
The type 'Environment' captures what we return when a pattern
matches a component. If the pattern contains a pattern variable
(ie. something that matches anything) then we add whatever is
in that position in the matched component to the environment.
We need several maps because a pattern variable may match against
different parts of the model.
Notice that this also allows us to have the same pattern variable
mapped more than once depending on where it occurs in the pattern
and the replacement, you can for example write;
"?P[?P][?P] ==> ?P[?P + 1][?P,a]"
although we don't really advertise this fact as it seems not to be
particularly robust.
-}
data Environment =
Environment { envActions :: ActionMap
, envComponents :: ComponentMap
, envExpressions :: ExpressionMap
}
-- The types of maps included in an environment
type ActionMap = Map PatternVariable CooperationSet
type ComponentMap = Map PatternVariable ParsedComponent
type ExpressionMap = Map PatternVariable RateExpr
{- The empty environment what is returned when the pattern
matches completely and no pattern variables are used.
-}
emptyEnvironment :: Environment
emptyEnvironment = Environment { envActions = Map.empty
, envComponents = Map.empty
, envExpressions = Map.empty
}
-- Lookup a pattern variable which we expect to be mapped to a list
-- of actions.
lookupActions :: Environment -> PatternVariable -> Maybe CooperationSet
lookupActions env var = Map.lookup var $ envActions env
-- Lookup a pattern variable which we expect to be mapped to an expression
-- because it has occurred in an expression place in the replacement, for
-- example the size of an array.
lookupExpr :: Environment -> PatternVariable -> Maybe RateExpr
lookupExpr env var = Map.lookup var $ envExpressions env
-- Lookup a pattern variable which we expect to be mapped to a component
lookupComp :: Environment -> PatternVariable -> Maybe ParsedComponent
lookupComp env var = Map.lookup var $ envComponents env
-- Unify together two environments, to be honest we should really check
-- that any pattern variable is not defined more than once at the same
-- type. As mentioned above we can define the same pattern variable to
-- map against say a component and a set of actions, but we do not wish
-- to allow for example: "?P || ?P" as a pattern as it's not clear what
-- we should do at this point.
unionEnvironments :: Environment -> Environment -> Environment
unionEnvironments e1 e2 =
Environment { envActions = Map.union (envActions e1)
(envActions e2)
, envComponents = Map.union (envComponents e1)
(envComponents e2)
, envExpressions = Map.union (envExpressions e1)
(envExpressions e2)
}
-- Having made an action map, add it to the given environment
-- As above we should make sure we are not overridding any
-- pattern variables already mapped to an action.
addActionMap :: Environment -> ActionMap -> Environment
addActionMap env am = env { envActions = Map.union (envActions env) am }
-- Having made an expression mapping add it to the given environement
-- Similarly we should not allow ourselves to overide and existing
-- mapping from a pattern variable to an expression.
addExprMap :: Environment -> ExpressionMap -> Environment
addExprMap env em = env { envExpressions = Map.union (envExpressions env) em }
-- Add the given component to the given environment.
-- Once again we should check that no mapping from the given pattern
-- variable to a component already exists.
addComponent :: Environment -> PatternVariable
-> ParsedComponent -> Environment
addComponent env pvar comp =
env { envComponents = Map.insert pvar comp $ envComponents env } | allanderek/ipclib | Language/Pepa/Transform/Rules/Apply.hs | gpl-2.0 | 20,562 | 0 | 17 | 5,612 | 3,358 | 1,732 | 1,626 | 257 | 37 |
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable #-}
module SAT.Param where
-- $Id$
import SAT.Types
import Autolib.Reader
import Autolib.ToDoc
import Autolib.Set
import Data.Typeable
data Param =
Param { vars :: Set Variable
, clauses :: Int -- ^ anzahl (empfohlen: 3.5 * variablen)
}
deriving ( Typeable )
p :: Int -> Param
p n = let f = 3.5 :: Double
in Param { vars = mkSet $ take n $ do c <- [ 'p' .. ] ; return $ read [c]
, clauses = round $ f * fromIntegral n
}
$(derives [makeReader, makeToDoc] [''Param])
-- local variables:
-- mode: haskell
-- end:
| Erdwolf/autotool-bonn | src/SAT/Param.hs | gpl-2.0 | 614 | 4 | 14 | 158 | 187 | 105 | 82 | 16 | 1 |
-- 18/03/16
-- Lorenzo Jose Lamas
-- Algebra I
-- Labo
--
suma x y = x + y
doble x = 2* x
normaVectorial v1 v2 = sqrt (v1^2 + v2^2)
funcionConstante8 x = 8
respuestaATodo = 42
-- Primera parte todo piola !
unoSiCero n | n == 0 = 1
| otherwise= 0
-- Medio compliqueti la sintaxis jajaj
signo n | n > 0 = 1
| n == 0 = 0
| n < 0 = (-1)
-- costo pero salio
valorAbsoluto x = sqrt(x^2)
maximo2 v1 v2 | v1 > v2 = v1
| v1 < v2 = v2
| v1 == v2 = v1
-- Y ahora ...
maximo3 v1 v2 v3 = maximum [v1,v2,v3]
-- Es trampa ??
esPositivo n | n >= 0 = True
| otherwise = False
esPar n | even n = True
| otherwise = False
esPar1 x = (div x 2) * 2 == x
-- Div es el cons
esPar2 x = (mod x 2) == 0
yLogico1 x y = x && y
yLogico True True = True
yLogico True False = False
yLogico False True = False
yLogico False False = False
yLogico2 True True = True
yLogico2 x y = False
yLogico3 False _ = False
yLogico3 _ y = y
f n1 n2 n3 | n2 < 10 = n1
| n2 >= 10 = n1 + n3
nand x y = not (x && y)
unaRaiz a b c = ( (-b) + sqrt(b^2 - 4*a*c) ) / (2*a)
unaRaiz1 a b c | (b^2 - 4*a*c) < 0 = error "No hay raices reales"
| (b^2 - 4*a*c) >= 0 = ( (-b) + sqrt(b^2 - 4*a*c) ) / (2*a)
pitagoras a b c = c^2 == a^2 + b^2
| lorenzojlamas/ejercicios | 1.hs | gpl-3.0 | 1,242 | 0 | 14 | 363 | 703 | 350 | 353 | 37 | 1 |
module Main (main) where
import Options.Applicative
import Data.Monoid ((<>))
import Folgerhs.Stage
import Folgerhs.Speakers (speakers)
import Folgerhs.Presence (presence)
import Folgerhs.Animate (animation)
data Config = Presence FilePath Bool
| Speakers FilePath
| Animate FilePath Int Bool Line
config :: Parser Config
config = hsubparser
( command "presence"
( info presenceConfig (progDesc "Line-by-line on stage presence as CSV") )
<> command "animate"
( info animateConfig (progDesc "Animated character interaction") )
<> command "speakers"
( info speakersConfig (progDesc "Speech ratio per character") )
)
presenceConfig :: Parser Config
presenceConfig = Presence
<$> strArgument
( metavar "FILENAME"
<> help "File to parse" )
<*> switch
( long "without-unnamed"
<> help "Exclude unnamed characters")
speakersConfig :: Parser Config
speakersConfig = Speakers
<$> strArgument
( metavar "FILENAME"
<> help "File to parse" )
animateConfig :: Parser Config
animateConfig = Animate
<$> strArgument
( metavar "FILENAME"
<> help "File to parse" )
<*> option auto
( long "rate"
<> value 10
<> metavar "RATE"
<> help "Lines per second")
<*> switch
( long "without-unnamed"
<> help "Exclude unnamed characters")
<*> strOption
( long "seek-line"
<> value "0"
<> metavar "ACT.SCENE.LINE"
<> help "Start animation from given line")
execute :: Config -> IO ()
execute (Presence f wu) = presence f wu
execute (Speakers f) = speakers f
execute (Animate f lps wu sl) = animation f lps wu sl
main :: IO ()
main = execParser opts >>= execute
where
desc = "Example usage of the toolset for Folger Shakespeare Library's TEI-encoded plays"
opts = info (helper <*> config) ( progDesc desc <> fullDesc )
| SU-LOSP/folgerhs | app/Folgerhs/Main.hs | gpl-3.0 | 2,056 | 0 | 13 | 625 | 518 | 259 | 259 | 57 | 1 |
{-|
Module : Parameter
Description : PhotoVideotex parameter generator
Copyright : (c) Frédéric BISSON, 2014
License : GPL-3
Maintainer : [email protected]
Stability : experimental
Portability : POSIX
This module allows to generate parameters for photo videotex. All parameters
are not useful with the Minitel.
-}
module Minitel.Generate.Photo.Parameter
( mResetToDefault
, intParm
, normParm
, enumParm
, mFieldLength
, mFullScreenDisplay
, mSourceAspectRatio
, mPhotoAreaLocation
, mPhotoAreaSize
, mPicturePlacement
, mClearPhotoArea
, mSourcePictureComments
, mSourcePictureDimensions
, mSourcePixelDensityStf
, mSourcePixelDensityVal
, mSourceSweepDirection
, mDCImages
, Component(ComponentRGB, ComponentYCrCb, ComponentCMYK, ComponentY)
, mSourceComponentDescription
, mSourceComponentDataPrecision
, mSourceComponentOrder
, mSourceLevelAssignmentFix
, mSourceLevelAssignment
, JPGHierarchical(NonHierarchical, Hierarchical)
, JPGTransform(DCT, DPCM)
, JPGOrder(Sequential, Progressive)
, JPGCoding(Huffman, Arithmetic)
, mJPEGCodingMode
, mEncodingTableManagement
, mApplicationMarkerCodesAssignment
, TranslationMode( NoTranslation
, NoTranslationExceptUS
, Encoding3in4
, Shift8bits
, Shift7bits
, NoTranslationExceptSp
)
, mTranslationModeEncoding
) where
import Minitel.Constants.Photo
import Minitel.Type.MNatural
import Minitel.Type.MString
-- | Convert an Int into a field length
mFieldLength :: Int -> MString
mFieldLength i
| i < 0 = error "Negative number !"
| i < 32 = [mnat i]
| otherwise = mnat (0x40 + hi):mFieldLength lo
where (hi, lo) = divMod i 32
-- * Parameters
-- Parameters are MString composed of
-- - a parameter type
-- - a field length
-- - the parameter value
-- | Boolean parameter
boolParm :: Bool -> MString
boolParm True = [pBoolean] ++ mFieldLength 1 ++ [0x01]
boolParm False = [pBoolean] ++ mFieldLength 1 ++ [0x00]
-- | Integer parameter
intParm :: Int -> MString
intParm i
| i < -8192 = error "Number below -8192"
| i > 8191 = error "Number above 8191"
| otherwise = [pInteger] ++ mFieldLength 2 ++ [mnat byte1, mnat byte2]
where cpl = if i < 0 then 16384 + i else i
(byte1, byte2) = (div cpl 128, mod cpl 128)
-- | Float parameter
normParm :: Float -> MString
normParm n
| n < -1 = error "Number below -1"
| n > 1 = error "Number above 1"
| otherwise = [pNormalised] ++ mFieldLength 2 ++ [mnat byte1, mnat byte2]
where cpl = round $ if n < 0 then 16384 + (1 + n) * 4096 else n * 4096
(byte1, byte2) = divMod cpl 128
class Enumeration en where
enumToMNat :: en -> MNat
-- | Enumeration parameter
enumParm :: (Enumeration a) => a -> MString
enumParm i = [pEnumeration] ++ mFieldLength 1 ++ [enumToMNat i]
-- * Parameter Status Attribute
-- | Reset to default values
mResetToDefault :: Bool -> MString
mResetToDefault b = rtd ++ boolParm b
-- * Picture Display Attributes
-- | Full screen display
mFullScreenDisplay :: Bool -> MString
mFullScreenDisplay b = fsd ++ boolParm b
-- | Source aspect ratio
mSourceAspectRatio :: Int -> Int -> MString
mSourceAspectRatio araw arah = asr ++ intParm araw ++ intParm arah
-- | Photo area location
-- The location is given as a percentage of the screen width\/height
mPhotoAreaLocation :: Float -> Float -> MString
mPhotoAreaLocation loch locv = loc ++ normParm loch ++ normParm locv
-- | Photo area size
-- The size is given as a percentage of the screen width\/height
mPhotoAreaSize :: Float -> Float -> MString
mPhotoAreaSize sizw sizh = pas ++ normParm sizw ++ normParm sizh
-- | Picture placement
-- Picture placement inside the photo area
mPicturePlacement :: Int -> Int -> Float -> Float -> MString
mPicturePlacement refh refv offh offv =
ppl ++ intParm refh ++ intParm refv ++ normParm offh ++ normParm offv
-- | Clear photo area
-- Useful for pictures handling transparency
mClearPhotoArea :: Bool -> MString
mClearPhotoArea b = cpa ++ boolParm b
-- * Source Picture Attributes
-- | Source picture comments
-- TODO: implement source picture comments
mSourcePictureComments :: Int -> Int -> MString
mSourcePictureComments = undefined
-- | Source picture dimensions
mSourcePictureDimensions :: Int -> Int -> MString
mSourcePictureDimensions nph npv = pds ++ intParm nph ++ intParm npv
-- | Density STF
data DensityStf = Density422625
| Density422525
| Density211625
| Density211525
| DensityCIF
| DensityNA
instance Enumeration DensityStf where
enumToMNat Density422625 = 0x01
enumToMNat Density422525 = 0x02
enumToMNat Density211625 = 0x03
enumToMNat Density211525 = 0x04
enumToMNat DensityCIF = 0x05
enumToMNat DensityNA = 0x06
-- | Density unit
data DensityUnit = PixelsPerInch
| PixelsPerCm
| PixelsPerMm
instance Enumeration DensityUnit where
enumToMNat PixelsPerInch = 0x01
enumToMNat PixelsPerCm = 0x02
enumToMNat PixelsPerMm = 0x03
-- | Source pixel density STF
mSourcePixelDensityStf :: DensityStf -> MString
mSourcePixelDensityStf density = pid ++ enumParm density
-- | Source pixel density Values
mSourcePixelDensityVal :: Int -> Int -> Int -> Int -> DensityUnit -> MString
mSourcePixelDensityVal phnum phden pvnum pvden densityUnit =
pid ++ intParm phnum ++ intParm phden
++ intParm pvnum ++ intParm pvden
++ enumParm densityUnit
-- | Vertical sweep
data VerticalSweep = TopToBottom | BottomToTop
instance Enumeration VerticalSweep where
enumToMNat TopToBottom = 0x01
enumToMNat BottomToTop = 0x02
-- | Horizontal sweep
data HorizontalSweep = LeftToRight | RightToLeft
instance Enumeration HorizontalSweep where
enumToMNat LeftToRight = 0x01
enumToMNat RightToLeft = 0x02
-- | Source sweep direction
mSourceSweepDirection :: VerticalSweep -> HorizontalSweep -> MString
mSourceSweepDirection sdir sdil = swd ++ enumParm sdir ++ enumParm sdil
-- | DCI inages
mDCImages :: Bool -> MString
mDCImages b = dci ++ boolParm b
-- * Source Signal Attributes
-- | Component
-- The Minitel only supports ComponentY
data Component = ComponentRGB
| ComponentYCrCb
| ComponentCMYK
| ComponentY
instance Enumeration Component where
enumToMNat ComponentRGB = 0x01
enumToMNat ComponentYCrCb = 0x02
enumToMNat ComponentCMYK = 0x03
enumToMNat ComponentY = 0x04
-- | Source component description
mSourceComponentDescription :: Component -> MString
mSourceComponentDescription com = scd ++ enumParm com
-- | Source component data precision
mSourceComponentDataPrecision :: Int -> MString
mSourceComponentDataPrecision cpt = cdp ++ intParm cpt
-- | Source component order
mSourceComponentOrder :: Int -> MString
mSourceComponentOrder cor = cmo ++ intParm cor
-- | Level assignment
data LevelAssignment = CCIR6011
instance Enumeration LevelAssignment where
enumToMNat CCIR6011 = 0x01
-- | Source level assignment fix
mSourceLevelAssignmentFix :: MString
mSourceLevelAssignmentFix = las ++ enumParm CCIR6011
-- | Source level assignment
mSourceLevelAssignment :: Int -> Int -> MString
mSourceLevelAssignment low hi = las ++ intParm low ++ intParm hi
-- * Source Coding Algorithm
data JPGHierarchical = NonHierarchical | Hierarchical
data JPGTransform = DCT | DPCM
data JPGOrder = Sequential | Progressive
data JPGCoding = Huffman | Arithmetic
-- | JPEG coding mode
mJPEGCodingMode :: JPGHierarchical -> JPGTransform -> JPGOrder -> JPGCoding
-> MString
mJPEGCodingMode h t o c = jpg ++ intParm (ih + it + io + ic)
where ih = case h of NonHierarchical -> 0x00
Hierarchical -> 0x01
it = case t of DCT -> 0x00
DPCM -> 0x02
io = case o of Sequential -> 0x00
Progressive -> 0x04
ic = case c of Huffman -> 0x00
Arithmetic -> 0x08
-- | Table type
data TableType = TableQuantisation
| TableHuffman
instance Enumeration TableType where
enumToMNat TableQuantisation = 0x01
enumToMNat TableHuffman = 0x02
-- | Table management
data TableManagement = LoadDefaultTable
| UseCurrentTable
| TableWillBeTransferred
instance Enumeration TableManagement where
enumToMNat LoadDefaultTable = 0x01
enumToMNat UseCurrentTable = 0x02
enumToMNat TableWillBeTransferred = 0x03
-- | Encoding table management
mEncodingTableManagement :: TableType -> Int -> TableManagement -> MString
mEncodingTableManagement ttp tid tst =
etm ++ enumParm ttp ++ intParm tid ++ enumParm tst
-- | Application marker
data ApplicationMarker = AnimatedImages
| ColourPaletteDefinition
| ToBeAllocated02
| ToBeAllocated03
| ToBeAllocated04
| ToBeAllocated05
| ToBeAllocated06
| ToBeAllocated07
| ToBeAllocated08
| ToBeAllocated09
| ToBeAllocated0A
| ToBeAllocated0B
| ToBeAllocated0C
| ToBeAllocated0D
| ToBeAllocated0E
| ToBeAllocated0F
instance Enumeration ApplicationMarker where
enumToMNat AnimatedImages = 0x00
enumToMNat ColourPaletteDefinition = 0x01
enumToMNat ToBeAllocated02 = 0x02
enumToMNat ToBeAllocated03 = 0x03
enumToMNat ToBeAllocated04 = 0x04
enumToMNat ToBeAllocated05 = 0x05
enumToMNat ToBeAllocated06 = 0x06
enumToMNat ToBeAllocated07 = 0x07
enumToMNat ToBeAllocated08 = 0x08
enumToMNat ToBeAllocated09 = 0x09
enumToMNat ToBeAllocated0A = 0x0A
enumToMNat ToBeAllocated0B = 0x0B
enumToMNat ToBeAllocated0C = 0x0C
enumToMNat ToBeAllocated0D = 0x0D
enumToMNat ToBeAllocated0E = 0x0E
enumToMNat ToBeAllocated0F = 0x0F
-- | Application marker code assignment
mApplicationMarkerCodesAssignment :: ApplicationMarker -> MString
mApplicationMarkerCodesAssignment mak = ama ++ enumParm mak
-- * Transmission Channel Attributes
-- | Translation mode
data TranslationMode = NoTranslation
| NoTranslationExceptUS
| Encoding3in4
| Shift8bits
| Shift7bits
| NoTranslationExceptSp
instance Enumeration TranslationMode where
enumToMNat NoTranslation = 0x00
enumToMNat NoTranslationExceptUS = 0x01
enumToMNat Encoding3in4 = 0x02
enumToMNat Shift8bits = 0x03
enumToMNat Shift7bits = 0x04
enumToMNat NoTranslationExceptSp = 0x05
-- | Translation mode encoding
mTranslationModeEncoding :: TranslationMode -> MString
mTranslationModeEncoding tmod = tme ++ enumParm tmod
| Zigazou/HaMinitel | src/Minitel/Generate/Photo/Parameter.hs | gpl-3.0 | 11,461 | 0 | 12 | 3,161 | 2,204 | 1,192 | 1,012 | 240 | 5 |
module Examples where
import Triangulation
import TriangulationCxtObject
import StandardCoordinates
import Data.TrieSet as Set
import FaceLattice
import ParseJvx
-- | Figure 1 in "Normal surfaces in topologically finite 3-manifolds (Tillmann)"
fig1 :: Triangulation
fig1 = triang [( 0 ./ tABD, 0 ./ oBCD)]
eightComplement = triang [ ( 0 ./ tABC, 1 ./ oDBC)
, ( 0 ./ tABD, 1 ./ oACD)
, ( 0 ./ tACD, 1 ./ oACB)
, ( 0 ./ tBCD, 1 ./ oBAD)
]
tt2 :: Triangulation
tt2 = triang [( 0 ./ tACD, 1 ./ oCBD)]
tt3 :: Triangulation
tt3 = triang [( 1 ./ tABD, 1 ./ oDBC)]
standard3Sphere :: Triangulation
standard3Sphere = convertToTriangulation [(1,2,3,4),(0,2,3,4),(0,1,3,4),(0,1,2,4),(0,1,2,3)]
-- standard3Sphere = fromRight $ mkTriangulationG tets gluings
-- where
-- tets = subsetsSized 4 (Set.fromList [1::Int .. 5])
--
--
-- gluings = [ (tet,tri,tet',g,tri') |
--
-- tet <- tets,
-- tet' <- tets,
-- tet /= tet',
-- tri <- allTriangles,
-- let verts = [ Set.elemAt (fromEnum v) tet | v <- vertexList tri ],
-- g <- allS3,
-- tri' <- allTriangles,
-- let verts' = [ Set.elemAt (fromEnum v) tet' | v <- vertexList (packOrderedFace g tri') ],
-- Set.fromList verts == Set.fromList verts'
-- ]
tt10 = triang [ ( 0 ./ tABD, 0 ./ oBCD) ,
( 0 ./ tABC, 0 ./ oACD) ]
tt11 = triang [ ( 0 ./ tABD, 0 ./ oBCD) ,
( 0 ./ tABC, 0 ./ oDAC) ]
phs = fromRight $ mkTriangulation [0..89]
[
( 1 ./ tABC, 0 ./ oABC)
, ( 3 ./ tABC, 2 ./ oABC)
, ( 4 ./ tABC, 0 ./ oABD)
, ( 4 ./ tABD, 2 ./ oABD)
, ( 3 ./ tABD, 1 ./ oABD)
, ( 6 ./ tABC, 5 ./ oABC)
, ( 8 ./ tABC, 7 ./ oABC)
, ( 9 ./ tABC, 7 ./ oABD)
, ( 8 ./ tABD, 5 ./ oABD)
, ( 9 ./ tABD, 6 ./ oABD)
, (10 ./ tABC, 0 ./ oACD)
, (10 ./ tABD, 5 ./ oACD)
, ( 6 ./ tACD, 1 ./ oACD)
, (12 ./ tABC, 11 ./ oABC)
, (14 ./ tABC, 13 ./ oABC)
, (15 ./ tABC, 13 ./ oABD)
, (14 ./ tABD, 11 ./ oABD)
, (15 ./ tABD, 12 ./ oABD)
, (16 ./ tABC, 11 ./ oACD)
, (12 ./ tACD, 2 ./ oACD)
, (16 ./ tABD, 3 ./ oACD)
, (18 ./ tABC, 17 ./ oABC)
, (17 ./ tABD, 7 ./ oACD)
, (19 ./ tABC, 18 ./ oABD)
, (19 ./ tABD, 8 ./ oACD)
, (20 ./ tABC, 17 ./ oACD)
, (18 ./ tACD, 13 ./ oACD)
, (20 ./ tABD, 14 ./ oACD)
, (22 ./ tABC, 21 ./ oABC)
, (21 ./ tABD, 10 ./ oACD)
, (22 ./ tABD, 4 ./ oACD)
, (23 ./ tABC, 20 ./ oACD)
, (23 ./ tABD, 9 ./ oACD)
, (21 ./ tACD, 19 ./ oACD)
, (22 ./ tACD, 15 ./ oACD)
, (23 ./ tACD, 16 ./ oACD)
, (25 ./ tABC, 24 ./ oABC)
, (27 ./ tABC, 26 ./ oABC)
, (26 ./ tABD, 24 ./ oABD)
, (28 ./ tABC, 25 ./ oABD)
, (28 ./ tABD, 27 ./ oABD)
, (29 ./ tABC, 0 ./ oBCD)
, (31 ./ tABC, 30 ./ oABC)
, (30 ./ tABD, 29 ./ oABD)
, (31 ./ tABD, 1 ./ oBCD)
, (33 ./ tABC, 32 ./ oABC)
, (34 ./ tABC, 24 ./ oACD)
, (32 ./ tABD, 25 ./ oACD)
, (34 ./ tABD, 33 ./ oABD)
, (36 ./ tABC, 35 ./ oABC)
, (37 ./ tABC, 35 ./ oABD)
, (36 ./ tABD, 2 ./ oBCD)
, (37 ./ tABD, 3 ./ oBCD)
, (39 ./ tABC, 38 ./ oABC)
, (40 ./ tABC, 26 ./ oACD)
, (38 ./ tABD, 27 ./ oACD)
, (40 ./ tABD, 39 ./ oABD)
, (41 ./ tABC, 32 ./ oACD)
, (42 ./ tABC, 33 ./ oACD)
, (42 ./ tABD, 41 ./ oABD)
, (38 ./ tACD, 29 ./ oACD)
, (39 ./ tACD, 4 ./ oBCD)
, (35 ./ tACD, 34 ./ oACD)
, (40 ./ tACD, 36 ./ oACD)
, (30 ./ tACD, 28 ./ oACD)
, (41 ./ tACD, 31 ./ oACD)
, (42 ./ tACD, 37 ./ oACD)
, (44 ./ tABC, 43 ./ oABC)
, (45 ./ tABC, 5 ./ oBCD)
, (45 ./ tABD, 43 ./ oABD)
, (44 ./ tABD, 6 ./ oBCD)
, (46 ./ tABC, 24 ./ oBCD)
, (47 ./ tABC, 25 ./ oBCD)
, (47 ./ tABD, 43 ./ oACD)
, (46 ./ tABD, 44 ./ oACD)
, (26 ./ tBCD, 7 ./ oBCD)
, (48 ./ tABC, 8 ./ oBCD)
, (48 ./ tABD, 27 ./ oBCD)
, (46 ./ tACD, 9 ./ oBCD)
, (49 ./ tABC, 28 ./ oBCD)
, (49 ./ tABD, 47 ./ oACD)
, (50 ./ tABC, 48 ./ oACD)
, (50 ./ tABD, 45 ./ oACD)
, (50 ./ tACD, 49 ./ oACD)
, (52 ./ tABC, 51 ./ oABC)
, (53 ./ tABC, 51 ./ oABD)
, (52 ./ tABD, 43 ./ oBCD)
, (53 ./ tABD, 44 ./ oBCD)
, (54 ./ tABC, 51 ./ oACD)
, (56 ./ tABC, 55 ./ oABC)
, (55 ./ tABD, 54 ./ oABD)
, (56 ./ tABD, 52 ./ oACD)
, (57 ./ tABC, 54 ./ oACD)
, (57 ./ tABD, 53 ./ oACD)
, (59 ./ tABC, 58 ./ oABC)
, (61 ./ tABC, 60 ./ oABC)
, (62 ./ tABC, 58 ./ oABD)
, (60 ./ tABD, 59 ./ oABD)
, (62 ./ tABD, 61 ./ oABD)
, (58 ./ tACD, 10 ./ oBCD)
, (59 ./ tACD, 29 ./ oBCD)
, (63 ./ tABC, 55 ./ oACD)
, (63 ./ tABD, 60 ./ oACD)
, (61 ./ tACD, 56 ./ oACD)
, (63 ./ tACD, 30 ./ oBCD)
, (57 ./ tACD, 31 ./ oBCD)
, (62 ./ tACD, 45 ./ oBCD)
, (64 ./ tABC, 51 ./ oBCD)
, (64 ./ tABD, 11 ./ oBCD)
, (52 ./ tBCD, 12 ./ oBCD)
, (66 ./ tABC, 65 ./ oABC)
, (65 ./ tABD, 64 ./ oACD)
, (66 ./ tABD, 53 ./ oBCD)
, (68 ./ tABC, 67 ./ oABC)
, (32 ./ tBCD, 13 ./ oBCD)
, (67 ./ tABD, 33 ./ oBCD)
, (68 ./ tABD, 14 ./ oBCD)
, (70 ./ tABC, 69 ./ oABC)
, (69 ./ tABD, 67 ./ oACD)
, (68 ./ tACD, 65 ./ oACD)
, (70 ./ tABD, 66 ./ oACD)
, (69 ./ tACD, 34 ./ oBCD)
, (70 ./ tACD, 46 ./ oBCD)
, (47 ./ tBCD, 15 ./ oBCD)
, (71 ./ tABC, 54 ./ oBCD)
, (72 ./ tABC, 71 ./ oABD)
, (72 ./ tABD, 64 ./ oBCD)
, (73 ./ tABC, 55 ./ oBCD)
, (73 ./ tABD, 35 ./ oBCD)
, (56 ./ tBCD, 36 ./ oBCD)
, (73 ./ tACD, 71 ./ oACD)
, (74 ./ tABC, 72 ./ oACD)
, (74 ./ tABD, 37 ./ oBCD)
, (74 ./ tACD, 16 ./ oBCD)
, (75 ./ tABC, 17 ./ oBCD)
, (76 ./ tABC, 18 ./ oBCD)
, (77 ./ tABC, 75 ./ oABD)
, (77 ./ tABD, 76 ./ oABD)
, (65 ./ tBCD, 38 ./ oBCD)
, (78 ./ tABC, 39 ./ oBCD)
, (78 ./ tABD, 66 ./ oBCD)
, (75 ./ tACD, 40 ./ oBCD)
, (71 ./ tBCD, 19 ./ oBCD)
, (76 ./ tACD, 57 ./ oBCD)
, (72 ./ tBCD, 48 ./ oBCD)
, (78 ./ tACD, 77 ./ oACD)
, (67 ./ tBCD, 58 ./ oBCD)
, (68 ./ tBCD, 59 ./ oBCD)
, (60 ./ tBCD, 20 ./ oBCD)
, (75 ./ tBCD, 61 ./ oBCD)
, (76 ./ tBCD, 41 ./ oBCD)
, (79 ./ tABC, 62 ./ oBCD)
, (79 ./ tABD, 42 ./ oBCD)
, (79 ./ tACD, 77 ./ oBCD)
, (81 ./ tABC, 80 ./ oABC)
, (80 ./ tABD, 69 ./ oBCD)
, (82 ./ tABC, 70 ./ oBCD)
, (82 ./ tABD, 81 ./ oABD)
, (80 ./ tACD, 21 ./ oBCD)
, (83 ./ tABC, 22 ./ oBCD)
, (83 ./ tABD, 81 ./ oACD)
, (84 ./ tABC, 78 ./ oBCD)
, (84 ./ tABD, 83 ./ oACD)
, (84 ./ tACD, 82 ./ oACD)
, (80 ./ tBCD, 73 ./ oBCD)
, (85 ./ tABC, 63 ./ oBCD)
, (85 ./ tABD, 81 ./ oBCD)
, (86 ./ tABC, 23 ./ oBCD)
, (86 ./ tABD, 85 ./ oACD)
, (86 ./ tACD, 82 ./ oBCD)
, (87 ./ tABC, 49 ./ oBCD)
, (87 ./ tABD, 85 ./ oBCD)
, (87 ./ tACD, 83 ./ oBCD)
, (88 ./ tABC, 50 ./ oBCD)
, (88 ./ tABD, 74 ./ oBCD)
, (88 ./ tACD, 79 ./ oBCD)
, (89 ./ tABC, 88 ./ oBCD)
, (89 ./ tABD, 87 ./ oBCD)
, (89 ./ tACD, 86 ./ oBCD)
, (89 ./ tBCD, 84 ./ oBCD)
]
| DanielSchuessler/hstri | Examples.hs | gpl-3.0 | 7,696 | 0 | 9 | 3,023 | 3,461 | 2,046 | 1,415 | 205 | 1 |
module Export
(
writeMatlabFile
) where
import System.IO
import Text.Printf
writeMatlabFile :: String -> [(Double, Double, Double)] -> IO ()
writeMatlabFile fileName dynamics = do
fileHdl <- openFile fileName WriteMode
hPutStrLn fileHdl "dynamics = ["
mapM_ (hPutStrLn fileHdl . sirAggregateToString) dynamics
hPutStrLn fileHdl "];"
hPutStrLn fileHdl "susceptible = dynamics (:, 1);"
hPutStrLn fileHdl "infected = dynamics (:, 2);"
hPutStrLn fileHdl "recovered = dynamics (:, 3);"
hPutStrLn fileHdl "totalPopulation = susceptible(1) + infected(1) + recovered(1);"
hPutStrLn fileHdl "susceptibleRatio = susceptible ./ totalPopulation;"
hPutStrLn fileHdl "infectedRatio = infected ./ totalPopulation;"
hPutStrLn fileHdl "recoveredRatio = recovered ./ totalPopulation;"
hPutStrLn fileHdl "steps = length (susceptible);"
hPutStrLn fileHdl "indices = 0 : steps - 1;"
hPutStrLn fileHdl "figure"
hPutStrLn fileHdl "plot (indices, susceptibleRatio.', 'color', 'blue', 'linewidth', 2);"
hPutStrLn fileHdl "hold on"
hPutStrLn fileHdl "plot (indices, infectedRatio.', 'color', 'red', 'linewidth', 2);"
hPutStrLn fileHdl "hold on"
hPutStrLn fileHdl "plot (indices, recoveredRatio.', 'color', 'green', 'linewidth', 2);"
hPutStrLn fileHdl "set(gca,'YTick',0:0.05:1.0);"
hPutStrLn fileHdl "xlabel ('Time');"
hPutStrLn fileHdl "ylabel ('Population Ratio');"
hPutStrLn fileHdl "legend('Susceptible','Infected', 'Recovered');"
hClose fileHdl
sirAggregateToString :: (Double, Double, Double) -> String
sirAggregateToString (susceptibleCount, infectedCount, recoveredCount) =
printf "%f" susceptibleCount
++ "," ++ printf "%f" infectedCount
++ "," ++ printf "%f" recoveredCount
++ ";" | thalerjonathan/phd | coding/papers/pfe/testing/src/Export.hs | gpl-3.0 | 1,741 | 0 | 10 | 271 | 333 | 149 | 184 | 37 | 1 |
{-# OPTIONS_GHC -XFlexibleInstances -XTypeSynonymInstances #-}
{- Channel basics for HSH
Copyright (C) 2004-2008 John Goerzen <[email protected]>
Please see the COPYRIGHT file
-}
{- |
Module : HSH.Channel
Copyright : Copyright (C) 2006-2009 John Goerzen
License : GNU LGPL, version 2.1 or above
Maintainer : John Goerzen <[email protected]>
Stability : provisional
Portability: portable
Copyright (c) 2006-2009 John Goerzen, jgoerzen\@complete.org
-}
module HSH.Channel (Channel(..),
chanAsString,
chanAsBSL,
chanAsBS,
chanToHandle,
Channelizable(..)
) where
import qualified Data.ByteString.Lazy as BSL
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy.Char8 as BSLC
import System.IO
import Control.Concurrent
{- | The main type for communicating between commands. All are expected to
be lazy. -}
data Channel = ChanString String
| ChanBSL BSL.ByteString
| ChanHandle Handle
chanAsString :: Channel -> IO String
chanAsString (ChanString s) = return s
chanAsString (ChanBSL s) = return . bsl2str $ s
chanAsString (ChanHandle h) = hGetContents h
chanAsBSL :: Channel -> IO BSL.ByteString
chanAsBSL (ChanString s) = return . str2bsl $ s
chanAsBSL (ChanBSL s) = return s
chanAsBSL (ChanHandle h) = BSL.hGetContents h
chanAsBS :: Channel -> IO BS.ByteString
chanAsBS c = do r <- chanAsBSL c
let contents = BSL.toChunks r
return . BS.concat $ contents
{- | Writes the Channel to the given Handle. If the first parameter is True,
do this in a separate thread and close the handle afterwards.
-}
chanToHandle :: Bool -> Channel -> Handle -> IO ()
chanToHandle close c h = if close then forkIO (dumpChanToHandle c h >> hClose h) >> return ()
else dumpChanToHandle c h
where dumpChanToHandle (ChanString s) h = hPutStr h s
dumpChanToHandle (ChanBSL s) h = BSL.hPut h s
dumpChanToHandle (ChanHandle srchdl) desthdl
= BSL.hGetContents srchdl >>= BSL.hPut desthdl
class Channelizable a where
toChannel :: a -> Channel
instance Channelizable String where
toChannel = ChanString
instance Channelizable BSL.ByteString where
toChannel = ChanBSL
instance Channelizable Handle where
toChannel = ChanHandle
instance Channelizable BS.ByteString where
toChannel bs = ChanBSL . BSL.fromChunks $ [bs]
str2bsl :: String -> BSL.ByteString
str2bsl = BSLC.pack
bsl2str :: BSL.ByteString -> String
bsl2str = BSLC.unpack
| jgoerzen/hsh | HSH/Channel.hs | lgpl-2.1 | 2,663 | 0 | 11 | 673 | 572 | 301 | 271 | 48 | 4 |
-- DyreExample.hs --
module DyreExample ( dyreExample ) where
import qualified Config.Dyre as Dyre
confError cfgMessage error = "Error:" ++ error ++ "\n" ++ cfgMessage
realMain message = do
putStrLn "Entered Main Function"
putStrLn message
dyreExample = Dyre.wrapMain Dyre.defaultParams
{ Dyre.projectName = "dyreExample"
, Dyre.showError = confError
, Dyre.realMain = realMain
}
| dmp1ce/dyreExampleCabal | src/DyreExample.hs | unlicense | 404 | 0 | 7 | 76 | 98 | 54 | 44 | 10 | 1 |
module LSystem.Interprets (
InterpretFunc,
InterpretFuncMapped,
avibleInterprets,
InterpretInstruction(..),
InstructionMap,
parseIMap,
interpretLSystemRaw
) where
import Data.Map
import Data.Char
import Data.Maybe
import LSystem.Utils
import LSystem.DataTypes
import LSystem.Expressions (isTrueInDoubleLogic)
import LSystem.ShowReadUtils
----------------------------------------------------------------------------------------------------
-- | Type of interpret function.
type InterpretFunc = InstructionMap -> [SymbolDouble] -> String
-- | Type of interpret function with mapped instructions.
type InterpretFuncMapped = [SymbolDouble] -> String
-- | Map of file extensions to aviable interprets. Key (extension) is lowercase.
avibleInterprets :: Map String InterpretFunc
avibleInterprets = fromList [
("txt", interpretLSystemRaw),
("svg", interpretLSystemToSvg)
]
----------------------------------------------------------------------------------------------------
-- | Type for map symbol to instruction.
data InterpretInstruction =
DoNothing |
DrawLineForward |
MoveForward |
TurnLeft |
TurnRight |
StartBranch |
CompleteBranch |
SpecialInstruction String
deriving (Show, Read, Eq, Ord)
-- type ReadS a = String -> [(a, String)]
-- | Reads InterpretInstruction from given string. If nothing was readed by default read function,
-- whole string is considered as name of special instruction (if not empty).
readJustInterpretInstruction :: ReadS InterpretInstruction
readJustInterpretInstruction str =
if Prelude.null str then
[]
else
let defRead = reads str in
if Prelude.null defRead then
[(SpecialInstruction name, "") | (name, "") <- lex str]
else
defRead
type InstructionMap = Map Symbol InterpretInstruction
-- | Tries to parse instruction association to symbol from each given element of list.
-- If even one association filed to parse, nothing is returned.
parseIMap :: [String] -> Maybe InstructionMap
parseIMap strLines = parseIAssocAcc empty $ filterNonEmpty strLines
parseIAssocAcc :: InstructionMap -> [String] -> Maybe InstructionMap
parseIAssocAcc acc [] = Just acc
parseIAssocAcc acc strLines = do
(symbol, ii) <- parseIAssoc $ head strLines
parseIAssocAcc (insert symbol ii acc) (tail strLines)
-- | Tries to parse interpret instruction assoc to symbol.
parseIAssoc :: String -> Maybe (Symbol, InterpretInstruction)
parseIAssoc str = maybeHead [(symbol, instr) |
(symbol, rest) <- reads str,
(instr, "") <- readJustInterpretInstruction rest]
----------------------------------------------------------------------------------------------------
-- | Type synonim for interpret inner function which takes old iState, new iState, its state and
-- symbol and produces tuple with newest interpret state (which will be same as new iState if no
-- special instructions are recognised) and its new state.
type InterpretInnerFunc a = InstructionMap -> InterpretState -> InterpretState -> a -> SymbolDouble
-> Maybe (InterpretState, a)
data InterpretState = InterpretState {
is_x :: Double,
is_y :: Double,
is_angle :: Double,
is_previous :: Maybe InterpretState,
-- | min x, min y, max x, max y
is_extremes :: (Double, Double, Double, Double)
}
-- | Default instance of InterpretState (everything zero).
emptyIState :: InterpretState
emptyIState = InterpretState 0 0 0 Nothing (0, 0, 0, 0)
-- | Main interpret loop, counts new iState and gives all data to user func.
interpretSymbols :: InstructionMap -> (InterpretState, a) -> InterpretInnerFunc a-> [SymbolDouble]
-> Maybe (InterpretState, a)
interpretSymbols _ oldState _ [] = Just oldState
interpretSymbols iMap (oldIState, oldCustState) interpretFunc (s:ss) = do
newIState <- interpretBasicInstr iMap oldIState s
newState <- interpretFunc iMap oldIState newIState oldCustState s
interpretSymbols iMap newState interpretFunc ss
-- | Interprets known instructions in common way. Real interprets do not have to do this dirty job.
interpretBasicInstr :: InstructionMap -> InterpretState -> SymbolDouble -> Maybe InterpretState
interpretBasicInstr iMap is (SymbolDouble symbol params) =
case Data.Map.lookup symbol iMap of
Just instruction ->
let InterpretState x y angle prevState ex = is
paramsLen = length params in
case instruction of
DrawLineForward ->
if paramsLen >= 1 then
let newX = x + cos (degToRad angle) * head params
newY = y + sin (degToRad angle) * head params in
Just $ InterpretState newX newY angle prevState $ countExtremes newX newY ex
else
Nothing
MoveForward ->
if paramsLen >= 1 then
let newX = x + cos (degToRad angle) * head params
newY = y + sin (degToRad angle) * head params in
Just $ InterpretState newX newY angle prevState $ countExtremes newX newY ex
else
Nothing
TurnLeft ->
if paramsLen >= 1 then
Just $ InterpretState x y (angle + head params) prevState ex
else
Nothing
TurnRight ->
if paramsLen >= 1 then
Just $ InterpretState x y (angle - head params) prevState ex
else
Nothing
StartBranch -> Just $ InterpretState x y angle (Just is) ex
CompleteBranch ->
if isJust prevState then
let InterpretState px py pa ps _= fromJust prevState in
Just $ InterpretState px py pa ps ex -- copy new extrems to old instance
else
Nothing
_ -> Just is
Nothing -> Just is
-- | Takes new X and new Y with extremes and counts new extremes.
countExtremes :: Double -> Double -> (Double, Double, Double, Double)
-> (Double, Double, Double, Double)
countExtremes x y (minX, minY, maxX, maxY) = (min minX x, min minY y, max maxX x, max maxY y)
----------------------------------------------------------------------------------------------------
-- | The most primitive L-system interpret, it just convert given symbols to string.
interpretLSystemRaw :: InterpretFunc
interpretLSystemRaw _ [] = ""
interpretLSystemRaw iMap (s:ss) = show s ++ interpretLSystemRaw iMap ss
----------------------------------------------------------------------------------------------------
-- | Interprets given symbols with respect to given instrunction mapping as SVG image.
interpretLSystemToSvg :: InterpretFunc
interpretLSystemToSvg _ [] = ""
interpretLSystemToSvg iMap ss =
case interpretSymbols iMap (emptyIState, emptySvgState) svgInterpretFunc ss of
Just (iState, svgState) -> getSvgData iState $ closeSvgPolyline svgState
Nothing -> []
svgInterpretFunc :: InterpretInnerFunc SvgState
svgInterpretFunc iMap oldIState newIState svgState (SymbolDouble symbol params) =
case Data.Map.lookup symbol iMap of
Just instruction ->
let InterpretState oldX oldY _ _ _ = oldIState
InterpretState newX newY _ _ _ = newIState in
case instruction of
DrawLineForward ->
let newSvgState =
if isLineOpened svgState then
svgState
else
addPointToSvgPolyline svgState (oldX, oldY)
in
Just (newIState, addPointToSvgPolyline newSvgState (newX, newY))
MoveForward -> Just (newIState, closeSvgPolyline svgState)
CompleteBranch -> Just (newIState, closeSvgPolyline svgState)
SpecialInstruction instr -> svgInterpretSpecial newIState svgState params instr
_ -> Just (newIState, svgState) -- unhandled instructions, do nothing by default
Nothing -> Just (newIState, svgState) -- unknown symbol, do nothing
-- | Interprets svg special instruction.
svgInterpretSpecial :: InterpretState -> SvgState -> [Double] -> String
-> Maybe (InterpretState, SvgState)
svgInterpretSpecial iState svgState params instr =
let InterpretState x y _ _ _ = iState in
case instr of
"StartPolyline" -> Just (iState, openSpecSvgPolyline svgState)
"RecordPolylineVertex" -> Just (iState, addSpecPointToSvgPolyline svgState (x, y))
"EndPolyline" -> do
newSvgState <- closeSpecSvgPolyline svgState
$ checkNthParam isTrueInDoubleLogic 1 params
Just (iState, newSvgState)
_ -> Just (iState, svgState) -- unknown instruction, do nothing
-- | Calls given function on n-th param. If count of params is less than n, False is returned.
checkNthParam :: (Double -> Bool) -> Int -> [Double] -> Bool
checkNthParam func n params =
let ps = take n params in
if length ps == n then
func $ last ps
else
False
-- | State of SVG interpret.
data SvgState = SvgState {
svgs_finishedLines :: [PolylineD],
svgs_openedLine :: PolylineD,
svgs_openedLineSpec :: PolylineD,
svgs_lineSpecStack :: [PolylineD]
}
type PointD = (Double, Double)
type PolylineD = [PointD]
-- | Empty instance of SvgState.
emptySvgState :: SvgState
emptySvgState = SvgState [] [] [] []
isLineOpened :: SvgState -> Bool
isLineOpened (SvgState {svgs_openedLine = opened}) = not $ Prelude.null opened
addPointToSvgPolyline :: SvgState -> PointD -> SvgState
addPointToSvgPolyline (SvgState finLines curLine specLine specStack) p =
SvgState finLines (p:curLine) specLine specStack
-- | Closes polyline only if it is opened (can be called on state with alredy closed polyline).
closeSvgPolyline :: SvgState -> SvgState
closeSvgPolyline (SvgState finLines curLine specLine specStack) =
if Prelude.null curLine then
SvgState finLines [] specLine specStack
else
SvgState (curLine:finLines) [] specLine specStack
openSpecSvgPolyline :: SvgState -> SvgState
openSpecSvgPolyline (SvgState finLines curLine specLine specStack) =
SvgState finLines curLine [] (specLine:specStack)
addSpecPointToSvgPolyline :: SvgState -> PointD -> SvgState
addSpecPointToSvgPolyline (SvgState finLines curLine specLine specStack) p =
SvgState finLines curLine (p:specLine) specStack
-- | Ends current special polyline and if second parametr is True, polyline is also closed (first
-- and last points are connected).
closeSpecSvgPolyline :: SvgState -> Bool -> Maybe SvgState
closeSpecSvgPolyline (SvgState finLines curLine specLine specStack) close =
if Prelude.null specStack then
Nothing
else
if Prelude.null specLine then
Just $ SvgState finLines curLine (head specStack) (tail specStack)
else
let newLine = if close then last specLine : specLine else specLine in
Just $ SvgState (newLine:finLines) curLine (head specStack) (tail specStack)
getSvgData :: InterpretState -> SvgState -> String
getSvgData _ (SvgState [] _ _ _) = ""
getSvgData (InterpretState _ _ _ _ extremes) (SvgState finLines _ _ _) =
showsSvgHeader
. showsBeginOfSvg extremes
. showsCustAll showsSvgPoliline (endOfSvg++) finLines
$ ""
showsSvgPoliline :: PolylineD -> ShowS
showsSvgPoliline line =
("<polyline points=\""++) . foldr (\ pt acc -> showsPointPair pt . acc) ("\" />\n"++) line
showsPointPair :: PointD -> ShowS
showsPointPair (x, y) = shows (round2 x) . (',':) . shows (round2 y) . (' ':)
-- | Header of SVG file (DOCTYPE, ...)
showsSvgHeader :: ShowS
showsSvgHeader = ("<?xml version=\"1.0\" standalone=\"no\"?>\n"++)
. ("<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\""++)
. (" \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\n"++)
-- | Takes extremes and returns SVG start tag with viewBox attribute.
showsBeginOfSvg :: (Double, Double, Double, Double) -> ShowS
showsBeginOfSvg (minX, minY, maxX, maxY) = ("<svg viewBox=\""++)
. shows (round2 $ minX - 1) . (' ':) . shows (round2 $ minY - 1) . (' ':)
. shows (round2 $ maxX - minX + 2) . (' ':) . shows (round2 $ maxY - minY + 2)
. ("\" xmlns=\"http://www.w3.org/2000/svg\" version=\"1.1\">\n"++)
. ("<g fill=\"none\" stroke=\"#000000\" stroke-width=\"2\" stroke-linejoin=\"bevel\">\n"++)
-- | SVG end tag
endOfSvg :: String
endOfSvg = "</g>\n</svg>"
| NightElfik/L-systems-in-Haskell | src/LSystem/Interprets.hs | unlicense | 12,803 | 0 | 22 | 3,213 | 2,875 | 1,524 | 1,351 | 208 | 13 |
-- Copyright 2020 Google LLC
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module Main where
import Control.Monad (guard)
import Sub.Foo (foo)
main = guard (foo == 3)
| google/cabal2bazel | bzl/tests/hierarchical/Sub/FooTest.hs | apache-2.0 | 684 | 0 | 7 | 119 | 52 | 36 | 16 | 4 | 1 |
data RPS = Rock | Paper | Scissors
shoot :: RPS -> RPS -> String
shoot Paper Rock = "Paper Beats Rock"
shoot Rock Scissors = "Rock Beats Scissors"
shoot Scissors Paper = "Scissors Beats Paper"
shoot Scissors Rock = "Scissors Loses to Rock "
shoot Paper Scissors = "Paper Loses to Scissors"
shoot Rock Paper = "Rock Loses to Paper"
shoot _ _ = "Error"
| hungaikev/learning-haskell | RockPaperScissors.hs | apache-2.0 | 357 | 0 | 6 | 72 | 97 | 50 | 47 | 9 | 1 |
{-# LANGUAGE CPP, ScopedTypeVariables #-}
module FileCons (Cons, openHandle, closeHandle, newCons, newInt, isPair, first, second, setFirst, setSecond, int, getPtr, list, toList, nth, shw, cmpr, cmpr2, encodeString, decodeString, dlookup, lookupSingle, dinsert, deleteFindMin, deleteFindMax, delete, depth, size) where
import System.IO.Unsafe
import Control.Monad
import Data.Bits
import Data.Char
import Foreign.Ptr
import Foreign.ForeignPtr
import Foreign.Storable
import Data.IORef
import System.IO
import Control.Exception
import System.IO.MMap
import Prelude hiding (catch)
-- This module stores a single value in a file. The value is a tree structure like Lisp's
-- conses. The value in the file can be changed while sharing structure with values
-- that were once in the file.
data Cons = Cons !FilePath !(IORef (Ptr Int, Int, Int)) !Int deriving Eq
openHandle path = do
(p, _, sz) <- mmapFileForeignPtr path ReadWrite Nothing
ref <- newIORef (p, sz, sz)
let cons = Cons path ref 0
when (sz == 0) $ newCons (newInt cons 0) (newInt cons 0) `seq` return ()
return cons
closeHandle (Cons _ ref _) = do
(p, _, _) <- readIORef ref
freeForeignPtr p
-- Functions for building and taking apart values.
newCons cons@(Cons path ref i) (Cons path2 _ j)
#ifdef DEBUG
| path /= path2 = error "newCons: have to come from same file"
#endif
| otherwise = unsafePerformIO $ do
(p, used, sz) <- readIORef ref
(p, sz) <- if used + 8 > sz then do
{-fl <- openBinaryFile path ReadWriteMode
hSetFileSize fl (toInteger (sz + 100))
hClose fl-}
freeForeignPtr p
(p, _, sz) <- mmapFileForeignPtr path ReadWrite (Just (0, sz + 100))
return (p, sz)
else
return (p, sz)
withForeignPtr p $ \p -> do
poke (p `plusPtr` used) i
poke (p `plusPtr` (used + 4)) j
writeIORef ref (p, used + 8, sz)
return (Cons path ref used)
{-# INLINE newCons #-}
newInt (Cons path ref _) n
#ifdef DEBUG
| n < 0 = error "newInt: has to be non-negative"
#endif
| otherwise = Cons path ref (-(n + 1))
{-# INLINE newInt #-}
isPair (Cons _ _ i) = i >= 0
{-# INLINE isPair #-}
first c@(Cons path ref i)
#ifdef DEBUG
| i < 0 = error $ "first: not a pair: " ++ show (int c)
#endif
| otherwise = do
(p, _, _) <- readIORef ref
n <- withForeignPtr p $ \p -> peek (p `plusPtr` i)
return (Cons path ref n)
{-# INLINE first #-}
second c@(Cons path ref i)
#ifdef DEBUG
| i < 0 = error $ "second: not a pair: " ++ show (int c)
#endif
| otherwise = do
(p, _, _) <- readIORef ref
n <- withForeignPtr p $ \p -> peek (p `plusPtr` (i + 4))
return (Cons path ref n)
{-# INLINE second #-}
int c@(Cons _ _ i)
#ifdef DEBUG
| i >= 0 = error $ "int: not an int: " ++ unsafePerformIO (shw c)
#endif
| otherwise = -(i + 1)
{-# INLINE int #-}
getPtr (Cons _ _ i)
#ifdef DEBUG
| i < 0 = error $ "getPtr: not a pointer: " ++ show i
#endif
| otherwise = i
{-# INLINE getPtr #-}
setFirst c@(Cons path ref i) (Cons path2 _ j)
#ifdef DEBUG
| i < 0 = error $ "setFirst: not a pair: " ++ show (int c)
| path /= path2 = error "setFirst: have to come from same file"
#endif
| otherwise = do
(p, used, _) <- readIORef ref
#ifdef DEBUG
if i >= used then
error "setFirst: invalid ptr"
else
#endif
withForeignPtr p $ \p -> poke (p `plusPtr` i) j
{-# INLINE setFirst #-}
setSecond c@(Cons path ref i) (Cons path2 _ j)
#ifdef DEBUG
| i < 0 = error $ "setSecond: not a pair: " ++ show (int c)
| path /= path2 = error "setSecond: have to come from same file"
#endif
| otherwise = do
(p, used, _) <- readIORef ref
#ifdef DEBUG
if i >= used then
error "setSecond: invalid ptr"
else
#endif
withForeignPtr p $ \p -> poke (p `plusPtr` (i + 4)) j
{-# INLINE setSecond #-}
list [x] = newCons x (newInt x 0)
list (x:xs) = newCons x (list xs)
toList = rec [] where
rec acc cons = if isPair cons then
do
x <- first cons
s <- second cons
rec (x : acc) s
else
return (reverse acc)
nth 0 cons = first cons
nth n cons = second cons >>= nth (n - 1)
{-# INLINE nth #-}
padWithZeros s = s ++ replicate (3 - length s `mod` 3) '\0'
{-# INLINE padWithZeros #-}
removeZeros s = reverse (dropWhile (=='\0') (reverse s))
{-# INLINE removeZeros #-}
pack (c1 : c2 : c3 : xs) = (shiftL c1 16 .|. shiftL c2 8 .|. c3) : pack xs
pack [] = []
unpack (n:ns) = shiftR n 16 : (shiftR n 8 .&. 255) : (n .&. 255) : unpack ns
unpack [] = []
encodeString hdl s = list $ map (newInt hdl) $ pack $ map ord $ padWithZeros s
decodeString cons = liftM (removeZeros . map chr . unpack . map int) (toList cons)
{-# INLINE decodeString #-}
shw cons = if isPair cons then
do
f <- first cons >>= shw
s <- second cons >>= shw
return $ "Cons (" ++ f ++ ") (" ++ s ++ ")"
else
return $ show (int cons)
cmpr s c = liftM (compare s) (decodeString c)
cmpr2 c c2 = liftM2 compare (decodeString c) (decodeString c2)
-- Dictionary operations, adapted from Data.Map
dlookup cmp k k2 t
| isPair t = do
f <- first t
val <- nth 1 t
ord <- cmp k f
case ord of
LT -> do
v2 <- nth 2 t >>= dlookup cmp k k2
ord2 <- cmp k2 f
case ord2 of
LT -> return v2
EQ -> return $ v2 ++ [val]
GT -> liftM (\v3 -> v2 ++ val : v3) (nth 3 t >>= dlookup cmp k k2)
EQ -> do
ord2 <- cmp k2 f
case ord2 of
LT -> return []
EQ -> return [val]
GT -> liftM (val:) (nth 3 t >>= dlookup cmp k k2)
GT -> nth 3 t >>= dlookup cmp k k2
| otherwise = return []
lookupSingle cmp k t = do
referent <- first t
if isPair referent then
do
f <- first referent
ord <- cmp k f
case ord of
LT -> second referent >>= second >>= lookupSingle cmp k
EQ -> return t
GT -> second referent >>= second >>= second >>= lookupSingle cmp k
else
return t
dinsert cmp kx x t = do
referent <- first t
if isPair referent then do
val <- first referent
ord <- cmp kx val
case ord of
LT -> second referent >>= second >>= dinsert cmp kx x
GT -> second referent >>= second >>= second >>= dinsert cmp kx x
EQ -> do
second referent >>= \s -> setFirst s x
else
setFirst t (list [kx, x, newInt x 0, newInt x 0])
deleteFindMin t = do
l <- first t >>= second >>= second
r <- first t >>= second >>= second >>= second
fl <- first l
fr <- first r
if isPair fl then
deleteFindMin l
else do
k <- first t >>= first
x <- first t >>= second >>= first
setFirst t fr
return (k, x)
deleteFindMax t = do
l <- first t >>= second >>= second
r <- first t >>= second >>= second >>= second
fl <- first l
fr <- first r
if isPair fr then
deleteFindMax r
else do
k <- first t >>= first
x <- first t >>= second >>= first
setFirst t fl
return (k, x)
delete cmpr k t = do
referent <- first t
when (isPair referent) $ do
l <- second referent >>= second
r <- second referent >>= second >>= second
fl <- first l
fr <- first r
k2 <- first referent
val <- first referent >>= second
ord <- cmpr k k2
case ord of
LT -> delete cmpr k l
GT -> delete cmpr k r
EQ -> if isPair fl then do
(k, x) <- deleteFindMax l
setFirst referent k
setFirst val x
else if isPair fr then do
(k, x) <- deleteFindMin r
setFirst referent k
setFirst val x
else
setFirst t (newInt t 0)
depth idx = if isPair idx then
liftM2 (\x y -> 1 + max x y) (nth 2 idx >>= depth) (nth 3 idx >>= depth)
else
return 0
size idx = if isPair idx then
liftM2 (+) (nth 2 idx >>= size) (nth 3 idx >>= size)
else
return 1
| jacinabox/Indexing | FileCons - Copy.hs | bsd-2-clause | 7,702 | 95 | 20 | 2,129 | 3,338 | 1,647 | 1,691 | -1 | -1 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.NV.TextureExpandNormal
-- Copyright : (c) Sven Panne 2013
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- All tokens from the NV_texture_expand_normal, see
-- <http://www.opengl.org/registry/specs/NV/texture_expand_normal.txt>.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.NV.TextureExpandNormal (
-- * Tokens
gl_TEXTURE_UNSIGNED_REMAP_MODE
) where
import Graphics.Rendering.OpenGL.Raw.Core32
gl_TEXTURE_UNSIGNED_REMAP_MODE :: GLenum
gl_TEXTURE_UNSIGNED_REMAP_MODE = 0x888F
| mfpi/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/NV/TextureExpandNormal.hs | bsd-3-clause | 783 | 0 | 4 | 91 | 48 | 38 | 10 | 5 | 1 |
{-# LANGUAGE RankNTypes, NamedFieldPuns, RecordWildCards #-}
module Distribution.Server.Features.PackageCandidates (
PackageCandidatesFeature(..),
PackageCandidatesResource(..),
initPackageCandidatesFeature,
CandidateRender(..),
CandPkgInfo(..),
) where
import Distribution.Server.Framework
import Distribution.Server.Features.PackageCandidates.Types
import Distribution.Server.Features.PackageCandidates.State
import Distribution.Server.Features.PackageCandidates.Backup
import Distribution.Server.Features.Core
import Distribution.Server.Features.Upload
import Distribution.Server.Features.Users
import Distribution.Server.Features.TarIndexCache
import Distribution.Server.Packages.Types
import Distribution.Server.Packages.Render
import Distribution.Server.Packages.ChangeLog
import Distribution.Server.Packages.Readme
import qualified Distribution.Server.Users.Types as Users
import qualified Distribution.Server.Users.Group as Group
import qualified Distribution.Server.Framework.BlobStorage as BlobStorage
import qualified Distribution.Server.Packages.PackageIndex as PackageIndex
import Distribution.Server.Packages.PackageIndex (PackageIndex)
import qualified Distribution.Server.Framework.ResponseContentTypes as Resource
import Distribution.Server.Util.ServeTarball
import Distribution.Text
import Distribution.Package
import Data.Version
import Data.Function (fix)
import Data.List (find)
import Data.Time.Clock (getCurrentTime)
import qualified Data.Vector as Vec
data PackageCandidatesFeature = PackageCandidatesFeature {
candidatesFeatureInterface :: HackageFeature,
candidatesCoreResource :: CoreResource,
candidatesResource :: PackageCandidatesResource,
-- queries
queryGetCandidateIndex :: forall m. MonadIO m => m (PackageIndex CandPkgInfo),
postCandidate :: ServerPartE Response,
postPackageCandidate :: DynamicPath -> ServerPartE Response,
putPackageCandidate :: DynamicPath -> ServerPartE Response,
doDeleteCandidate :: DynamicPath -> ServerPartE Response,
uploadCandidate :: (PackageId -> Bool) -> ServerPartE CandPkgInfo,
publishCandidate :: DynamicPath -> Bool -> ServerPartE UploadResult,
checkPublish :: PackageIndex PkgInfo -> CandPkgInfo -> Maybe ErrorResponse,
candidateRender :: CandPkgInfo -> IO CandidateRender,
lookupCandidateName :: PackageName -> ServerPartE [CandPkgInfo],
lookupCandidateId :: PackageId -> ServerPartE CandPkgInfo
}
instance IsHackageFeature PackageCandidatesFeature where
getFeatureInterface = candidatesFeatureInterface
-- There can also be build reports as well as documentation for proposed
-- versions.
-- These features check for existence of a package in the *main* index,
-- but it should be possible to hijack their indices to support candidates,
-- perhaps by them having a Filter for whether a package-version exists
-- (since they don't need any other info than the PackageId).
-- Unfortunately, some problems exist when both a candidate and actual version
-- of the same package exist simultaneously, so may want to hook into
-- UploadFeature's canUploadPackage to ensure this won't happen, and to
-- force deletion on publication.
{-
Mapping:
candidatesPage -> corePackagesPage
candidatePage -> corePackagePage
candidateCabal -> coreCabalFile
candidateTarball -> corePackageTarball
candidatesUri -> indexPackageUri
candidateUri -> corePackageUri
candidateTarballUri -> coreTarballUri
candidateCabalUri -> coreCabalUri
-}
data PackageCandidatesResource = PackageCandidatesResource {
packageCandidatesPage :: Resource,
publishPage :: Resource,
deletePage :: Resource,
packageCandidatesUri :: String -> PackageName -> String,
publishUri :: String -> PackageId -> String,
deleteUri :: String -> PackageId -> String,
-- TODO: Why don't the following entries have a corresponding entry
-- in CoreResource?
candidateContents :: Resource,
candidateChangeLog :: Resource,
candidateChangeLogUri :: PackageId -> String
}
-- candidates can be published at any time; there can be multiple candidates per package
-- they can be deleted, but it's not required
data CandidateRender = CandidateRender {
candPackageRender :: PackageRender,
renderWarnings :: [String],
hasIndexedPackage :: Bool
}
-- URI generation (string-based), using maps; user groups
initPackageCandidatesFeature :: ServerEnv
-> IO (UserFeature
-> CoreFeature
-> UploadFeature
-> TarIndexCacheFeature
-> IO PackageCandidatesFeature)
initPackageCandidatesFeature env@ServerEnv{serverStateDir} = do
candidatesState <- candidatesStateComponent serverStateDir
return $ \user core upload tarIndexCache -> do
let feature = candidatesFeature env
user core upload tarIndexCache
candidatesState
return feature
candidatesStateComponent :: FilePath -> IO (StateComponent AcidState CandidatePackages)
candidatesStateComponent stateDir = do
st <- openLocalStateFrom (stateDir </> "db" </> "CandidatePackages") initialCandidatePackages
return StateComponent {
stateDesc = "Candidate packages"
, stateHandle = st
, getState = query st GetCandidatePackages
, putState = update st . ReplaceCandidatePackages
, resetState = candidatesStateComponent
, backupState = \_ -> backupCandidates
, restoreState = restoreCandidates
}
candidatesFeature :: ServerEnv
-> UserFeature
-> CoreFeature
-> UploadFeature
-> TarIndexCacheFeature
-> StateComponent AcidState CandidatePackages
-> PackageCandidatesFeature
candidatesFeature ServerEnv{serverBlobStore = store}
UserFeature{..}
CoreFeature{ coreResource=core@CoreResource{packageInPath, packageTarballInPath}
, queryGetPackageIndex
, updateAddPackage
}
UploadFeature{..}
TarIndexCacheFeature{packageTarball, findToplevelFile}
candidatesState
= PackageCandidatesFeature{..}
where
candidatesFeatureInterface = (emptyHackageFeature "candidates") {
featureDesc = "Support for package candidates"
, featureResources =
map ($ candidatesCoreResource) [
corePackagesPage
, corePackagePage
, coreCabalFile
, corePackageTarball
] ++
map ($ candidatesResource) [
publishPage
, candidateContents
, candidateChangeLog
]
, featureState = [abstractAcidStateComponent candidatesState]
}
queryGetCandidateIndex :: MonadIO m => m (PackageIndex CandPkgInfo)
queryGetCandidateIndex = return . candidateList =<< queryState candidatesState GetCandidatePackages
candidatesCoreResource = fix $ \r -> CoreResource {
-- TODO: There is significant overlap between this definition and the one in Core
corePackagesPage = resourceAt "/packages/candidates/.:format"
, corePackagePage = resourceAt "/package/:package/candidate.:format"
, coreCabalFile = (resourceAt "/package/:package/candidate/:cabal.cabal") {
resourceDesc = [(GET, "Candidate .cabal file")]
, resourceGet = [("cabal", serveCandidateCabal)]
}
, corePackageTarball = (resourceAt "/package/:package/candidate/:tarball.tar.gz") {
resourceDesc = [(GET, "Candidate tarball")]
, resourceGet = [("tarball", serveCandidateTarball)]
}
, indexPackageUri = \format ->
renderResource (corePackagesPage r) [format]
, corePackageIdUri = \format pkgid ->
renderResource (corePackagePage r) [display pkgid, format]
, corePackageNameUri = \format pkgname ->
renderResource (corePackagePage r) [display pkgname, format]
, coreTarballUri = \pkgid ->
renderResource (corePackageTarball r) [display pkgid, display pkgid]
, coreCabalUri = \pkgid ->
renderResource (coreCabalFile r) [display pkgid, display (packageName pkgid)]
, packageInPath
, packageTarballInPath
, guardValidPackageId = void . lookupCandidateId
, guardValidPackageName = void . lookupCandidateName
, lookupPackageName = fmap (map candPkgInfo) . lookupCandidateName
, lookupPackageId = fmap candPkgInfo . lookupCandidateId
}
candidatesResource = fix $ \r -> PackageCandidatesResource {
packageCandidatesPage = resourceAt "/package/:package/candidates/.:format"
, publishPage = resourceAt "/package/:package/candidate/publish.:format"
, deletePage = resourceAt "/package/:package/candidate/delete.:format"
, candidateContents = (resourceAt "/package/:package/candidate/src/..") {
resourceGet = [("", serveContents)]
}
, candidateChangeLog = (resourceAt "/package/:package/candidate/changelog") {
resourceGet = [("changelog", serveChangeLog)]
}
, packageCandidatesUri = \format pkgname ->
renderResource (packageCandidatesPage r) [display pkgname, format]
, publishUri = \format pkgid ->
renderResource (publishPage r) [display pkgid, format]
, deleteUri = \format pkgid ->
renderResource (deletePage r) [display pkgid, format]
, candidateChangeLogUri = \pkgid ->
renderResource (candidateChangeLog candidatesResource) [display pkgid, display (packageName pkgid)]
}
postCandidate :: ServerPartE Response
postCandidate = do
pkgInfo <- uploadCandidate (const True)
seeOther (corePackageIdUri candidatesCoreResource "" $ packageId pkgInfo) (toResponse ())
-- POST to /:package/candidates/
postPackageCandidate :: DynamicPath -> ServerPartE Response
postPackageCandidate dpath = do
name <- packageInPath dpath
pkgInfo <- uploadCandidate ((==name) . packageName)
seeOther (corePackageIdUri candidatesCoreResource "" $ packageId pkgInfo) (toResponse ())
-- PUT to /:package-version/candidate
-- FIXME: like delete, PUT shouldn't redirect
putPackageCandidate :: DynamicPath -> ServerPartE Response
putPackageCandidate dpath = do
pkgid <- packageInPath dpath
guard (packageVersion pkgid /= Version [] [])
pkgInfo <- uploadCandidate (==pkgid)
seeOther (corePackageIdUri candidatesCoreResource "" $ packageId pkgInfo) (toResponse ())
-- FIXME: DELETE should not redirect, but rather return ServerPartE ()
doDeleteCandidate :: DynamicPath -> ServerPartE Response
doDeleteCandidate dpath = do
candidate <- packageInPath dpath >>= lookupCandidateId
guardAuthorisedAsMaintainer (packageName candidate)
void $ updateState candidatesState $ DeleteCandidate (packageId candidate)
seeOther (packageCandidatesUri candidatesResource "" $ packageName candidate) $ toResponse ()
serveCandidateTarball :: DynamicPath -> ServerPartE Response
serveCandidateTarball dpath = do
pkgid <- packageTarballInPath dpath
guard (pkgVersion pkgid /= Version [] [])
pkg <- lookupCandidateId pkgid
case pkgLatestTarball (candPkgInfo pkg) of
Nothing -> errNotFound "Tarball not found"
[MText "No tarball exists for this package version."]
Just (tarball, (uploadtime,_uid)) -> do
let blobId = pkgTarballGz tarball
cacheControl [Public, NoTransform, maxAgeMinutes 10]
(BlobStorage.blobETag blobId)
file <- liftIO $ BlobStorage.fetch store blobId
return $ toResponse $ Resource.PackageTarball file blobId uploadtime
--withFormat :: DynamicPath -> (String -> a) -> a
--TODO: use something else for nice html error pages
serveCandidateCabal :: DynamicPath -> ServerPartE Response
serveCandidateCabal dpath = do
pkg <- packageInPath dpath >>= lookupCandidateId
guard (lookup "cabal" dpath == Just (display $ packageName pkg))
let (fileRev, (utime, _uid)) = pkgLatestRevision (candPkgInfo pkg)
cabalfile = Resource.CabalFile (cabalFileByteString fileRev) utime
return $ toResponse cabalfile
uploadCandidate :: (PackageId -> Bool) -> ServerPartE CandPkgInfo
uploadCandidate isRight = do
guardAuthorised_ [InGroup uploadersGroup]
regularIndex <- queryGetPackageIndex
-- ensure that the user has proper auth if the package exists
(uid, uresult, tarball) <- extractPackage $ \uid info ->
processCandidate isRight regularIndex uid info
now <- liftIO getCurrentTime
let (UploadResult pkg pkgStr _) = uresult
pkgid = packageId pkg
cabalfile = CabalFileText pkgStr
uploadinfo = (now, uid)
candidate = CandPkgInfo {
candPkgInfo = PkgInfo {
pkgInfoId = pkgid,
pkgMetadataRevisions = Vec.singleton (cabalfile, uploadinfo),
pkgTarballRevisions = Vec.singleton (tarball, uploadinfo)
},
candWarnings = uploadWarnings uresult,
candPublic = True -- do withDataFn
}
void $ updateState candidatesState $ AddCandidate candidate
let group = maintainersGroup (packageName pkgid)
liftIO $ Group.addUserToGroup group uid
return candidate
-- | Helper function for uploadCandidate.
processCandidate :: (PackageId -> Bool) -> PackageIndex PkgInfo -> Users.UserId -> UploadResult -> IO (Maybe ErrorResponse)
processCandidate isRight state uid res = do
let pkg = packageId (uploadDesc res)
if not (isRight pkg)
then uploadFailed "Name of package or package version does not match"
else do
pkgGroup <- Group.queryUserGroup (maintainersGroup (packageName pkg))
if packageExists state pkg && not (uid `Group.member` pkgGroup)
then uploadFailed "Not authorized to upload a candidate for this package"
else return Nothing
where uploadFailed = return . Just . ErrorResponse 403 [] "Upload failed" . return . MText
publishCandidate :: DynamicPath -> Bool -> ServerPartE UploadResult
publishCandidate dpath doDelete = do
packages <- queryGetPackageIndex
candidate <- packageInPath dpath >>= lookupCandidateId
-- check authorization to upload - must already be a maintainer
uid <- guardAuthorised [InGroup (maintainersGroup (packageName candidate))]
-- check if package or later already exists
case checkPublish packages candidate of
Just failed -> throwError failed
Nothing -> do
-- run filters
let pkgInfo = candPkgInfo candidate
uresult = UploadResult (pkgDesc pkgInfo)
(cabalFileByteString (pkgLatestCabalFileText pkgInfo))
(candWarnings candidate)
time <- liftIO getCurrentTime
let uploadInfo = (time, uid)
success <- updateAddPackage (packageId candidate)
(pkgLatestCabalFileText pkgInfo)
uploadInfo
(fmap fst $ pkgLatestTarball pkgInfo)
--FIXME: share code here with upload
-- currently we do not create the initial maintainer group etc.
if success
then do
-- delete when requested: "moving" the resource
-- should this be required? (see notes in PackageCandidatesResource)
when doDelete $ updateState candidatesState $ DeleteCandidate (packageId candidate)
return uresult
else errForbidden "Upload failed" [MText "Package already exists."]
-- | Helper function for publishCandidate that ensures it's safe to insert into the main index.
checkPublish :: PackageIndex PkgInfo -> CandPkgInfo -> Maybe ErrorResponse
checkPublish packages candidate = do
let pkgs = PackageIndex.lookupPackageName packages (packageName candidate)
candVersion = packageVersion candidate
case find ((== candVersion) . packageVersion) pkgs of
Just {} -> Just $ ErrorResponse 403 [] "Publish failed" [MText "Package name and version already exist in the database"]
Nothing -> Nothing
------------------------------------------------------------------------------
candidateRender :: CandPkgInfo -> IO CandidateRender
candidateRender cand = do
users <- queryGetUserDb
index <- queryGetPackageIndex
let pkg = candPkgInfo cand
changeLog <- findToplevelFile pkg isChangeLogFile
>>= either (\_ -> return Nothing) (return . Just)
readme <- findToplevelFile pkg isReadmeFile
>>= either (\_ -> return Nothing) (return . Just)
let render = doPackageRender users pkg
return $ CandidateRender {
candPackageRender = render { rendPkgUri = rendPkgUri render ++ "/candidate"
, rendChangeLog = changeLog
, rendReadme = readme},
renderWarnings = candWarnings cand,
hasIndexedPackage = not . null $ PackageIndex.lookupPackageName index (packageName cand)
}
------------------------------------------------------------------------------
-- Find all candidates for a package (there may be none)
-- It is not an error if a package has no candidates, but it is an error
-- when the package itself does not exist. We therefore check the Core
-- package database to check if the package exists.
lookupCandidateName :: PackageName -> ServerPartE [CandPkgInfo]
lookupCandidateName pkgname = do
guardValidPackageName core pkgname
state <- queryState candidatesState GetCandidatePackages
return $ PackageIndex.lookupPackageName (candidateList state) pkgname
-- TODO: Unlike the corresponding function in core, we don't return the
-- "latest" candidate when Version is empty. Should we?
-- (If we change that, we should move the 'guard' to 'guardValidPackageId')
lookupCandidateId :: PackageId -> ServerPartE CandPkgInfo
lookupCandidateId pkgid = do
guard (pkgVersion pkgid /= Version [] [])
state <- queryState candidatesState GetCandidatePackages
case PackageIndex.lookupPackageId (candidateList state) pkgid of
Just pkg -> return pkg
_ -> errNotFound "Candidate not found" [MText $ "No such candidate version for " ++ display (packageName pkgid)]
{-------------------------------------------------------------------------------
TODO: everything below is an (almost) direct duplicate of corresponding
functionality in PackageContents. We could factor this out, although there
isn't any "interesting" code here, except differences in http cache control.
-------------------------------------------------------------------------------}
-- result: changelog or not-found error
serveChangeLog :: DynamicPath -> ServerPartE Response
serveChangeLog dpath = do
pkg <- packageInPath dpath >>= lookupCandidateId
mChangeLog <- liftIO $ findToplevelFile (candPkgInfo pkg) isChangeLogFile
case mChangeLog of
Left err ->
errNotFound "Changelog not found" [MText err]
Right (fp, etag, offset, name) -> do
cacheControl [Public, maxAgeMinutes 5] etag
liftIO $ serveTarEntry fp offset name -- TODO: We've already loaded the contents; refactor
-- return: not-found error or tarball
serveContents :: DynamicPath -> ServerPartE Response
serveContents dpath = do
pkg <- packageInPath dpath >>= lookupCandidateId
mTarball <- liftIO $ packageTarball (candPkgInfo pkg)
case mTarball of
Left err ->
errNotFound "Could not serve package contents" [MText err]
Right (fp, etag, index) ->
serveTarball (display (packageId pkg) ++ " candidate source tarball")
["index.html"] (display (packageId pkg)) fp index
[Public, maxAgeMinutes 5] etag
| chrisdotcode/hackage-server | Distribution/Server/Features/PackageCandidates.hs | bsd-3-clause | 20,803 | 0 | 21 | 5,328 | 3,879 | 2,046 | 1,833 | 315 | 10 |
{-|
/NOTE/: This module is preliminary and may change at a future date.
This module is intended to help converting a list of tags into a
tree of tags.
-}
module Text.HTML.TagSoup.Tree
(
TagTree(..), tagTree, parseTree, parseTreeOptions, ParseOptions(..),
flattenTree, renderTree, renderTreeOptions, RenderOptions(..), transformTree, universeTree
) where
import Text.HTML.TagSoup (parseTags, parseTagsOptions, renderTags, renderTagsOptions, ParseOptions(..), RenderOptions(..))
import Text.HTML.TagSoup.Type
import Control.Arrow
import GHC.Exts (build)
data TagTree str = TagBranch str [Attribute str] [TagTree str]
| TagLeaf (Tag str)
deriving (Eq,Ord,Show)
instance Functor TagTree where
fmap f (TagBranch x y z) = TagBranch (f x) (map (f***f) y) (map (fmap f) z)
fmap f (TagLeaf x) = TagLeaf (fmap f x)
-- | Convert a list of tags into a tree. This version is not lazy at
-- all, that is saved for version 2.
tagTree :: Eq str => [Tag str] -> [TagTree str]
tagTree = g
where
g :: Eq str => [Tag str] -> [TagTree str]
g [] = []
g xs = a ++ map TagLeaf (take 1 b) ++ g (drop 1 b)
where (a,b) = f xs
-- the second tuple is either null or starts with a close
f :: Eq str => [Tag str] -> ([TagTree str],[Tag str])
f (TagOpen name atts:rest) =
case f rest of
(inner,[]) -> (TagLeaf (TagOpen name atts):inner, [])
(inner,TagClose x:xs)
| x == name -> let (a,b) = f xs in (TagBranch name atts inner:a, b)
| otherwise -> (TagLeaf (TagOpen name atts):inner, TagClose x:xs)
_ -> error "TagSoup.Tree.tagTree: safe as - forall x . isTagClose (snd (f x))"
f (TagClose x:xs) = ([], TagClose x:xs)
f (x:xs) = (TagLeaf x:a,b)
where (a,b) = f xs
f [] = ([], [])
parseTree :: StringLike str => str -> [TagTree str]
parseTree = tagTree . parseTags
parseTreeOptions :: StringLike str => ParseOptions str -> str -> [TagTree str]
parseTreeOptions opts str = tagTree $ parseTagsOptions opts str
flattenTree :: [TagTree str] -> [Tag str]
flattenTree xs = build $ flattenTreeFB xs
flattenTreeFB :: [TagTree str] -> (Tag str -> lst -> lst) -> lst -> lst
flattenTreeFB xs cons nil = flattenTreeOnto xs nil
where
flattenTreeOnto [] tags = tags
flattenTreeOnto (TagBranch name atts inner:trs) tags =
TagOpen name atts `cons` flattenTreeOnto inner (TagClose name `cons` flattenTreeOnto trs tags)
flattenTreeOnto (TagLeaf x:trs) tags = x `cons` flattenTreeOnto trs tags
renderTree :: StringLike str => [TagTree str] -> str
renderTree = renderTags . flattenTree
renderTreeOptions :: StringLike str => RenderOptions str -> [TagTree str] -> str
renderTreeOptions opts trees = renderTagsOptions opts $ flattenTree trees
-- | This operation is based on the Uniplate @universe@ function. Given a
-- list of trees, it returns those trees, and all the children trees at
-- any level. For example:
--
-- > universeTree
-- > [TagBranch "a" [("href","url")] [TagBranch "b" [] [TagLeaf (TagText "text")]]]
-- > == [TagBranch "a" [("href","url")] [TagBranch "b" [] [TagLeaf (TagText "text")]]]
-- > ,TagBranch "b" [] [TagLeaf (TagText "text")]]
--
-- This operation is particularly useful for queries. To collect all @\"a\"@
-- tags in a tree, simply do:
--
-- > [x | x@(TagBranch "a" _ _) <- universeTree tree]
universeTree :: [TagTree str] -> [TagTree str]
universeTree = concatMap f
where
f t@(TagBranch _ _ inner) = t : universeTree inner
f x = [x]
-- | This operation is based on the Uniplate @transform@ function. Given a
-- list of trees, it applies the function to every tree in a bottom-up
-- manner. This operation is useful for manipulating a tree - for example
-- to make all tag names upper case:
--
-- > upperCase = transformTree f
-- > where f (TagBranch name atts inner) = [TagBranch (map toUpper name) atts inner]
-- > f x = [x]
transformTree :: (TagTree str -> [TagTree str]) -> [TagTree str] -> [TagTree str]
transformTree act = concatMap f
where
f (TagBranch a b inner) = act $ TagBranch a b (transformTree act inner)
f x = act x
| ChristopherKing42/tagsoup | Text/HTML/TagSoup/Tree.hs | bsd-3-clause | 4,310 | 0 | 15 | 1,065 | 1,255 | 664 | 591 | 56 | 7 |
{-# LANGUAGE DoAndIfThenElse #-}
module ThetaReduction(thetaReduction) where
import Control.Applicative
import Data.List
import Term
import VarEnvironment
data MarkedTerm = MLam Int String MarkedTerm
| MApp MarkedTerm MarkedTerm
| MVar String
--deriving Show
-- Uses alpha equivalence
instance Eq MarkedTerm where
(MVar x) == (MVar y) = x == y
(MApp m n) == (MApp p q) = m == p && n == q
(MLam _ x m) == (MLam _ y n)
| x == y = m == n
| otherwise = m == substitute (y `AssignTo` MVar x) n
_ == _ = False
-- TODO: Fix the repeated code for freeVars, substitute
instance TermClass MarkedTerm where
freeVars (MLam _ x t) = filter (/= x) $ freeVars t
freeVars (MApp x y) = nub $ freeVars x ++ freeVars y
freeVars (MVar x) = [x]
substitute (x `AssignTo` t) (MVar y)
| x == y = t
| otherwise = MVar y
substitute (x `AssignTo` t) (MApp m n) = MApp (substitute (x `AssignTo` t) m) (substitute (x `AssignTo` t) n)
substitute (x `AssignTo` t) (MLam i y m)
| x == y = MLam i y m
| otherwise = MLam i y (substitute (x `AssignTo` t) m)
------ 1. Computing term M1 ------
label :: Term -> [String] -> Int -> MarkedTerm
label (Var x) _ _ = MVar x
label (Lambda x m) acts i
| x `elem` acts = MLam i x $ label m acts i
| otherwise = MLam 1 x $ label m acts i
label (App m n) acts i = label m acts i `MApp` label n (act n) 3
markTerm :: Term -> MarkedTerm
markTerm t = label t (act t) 2
unmarkTerm :: MarkedTerm -> Term
unmarkTerm (MLam _ x m) = Lambda x (unmarkTerm m)
unmarkTerm (MApp m n) = App (unmarkTerm m) (unmarkTerm n)
unmarkTerm (MVar x) = Var x
------ 2. Computing term M2 ------
-- The order of thetas is because of theta2 being monadic
theta :: MarkedTerm -> Environment MarkedTerm
theta = theta2 . theta4 . theta3 . theta1
where theta1 ((MLam 1 x n `MApp` p) `MApp` q) =
MLam 1 x (n `MApp` q) `MApp` p
theta1 t = t
theta2 (MLam 3 x (MLam 1 y n `MApp` p)) = do
v <- newVar "var"
w <- newVar "var"
let n' = substitute (y `AssignTo` (MVar v `MApp` MVar x)) n
p' = substitute (x `AssignTo` MVar w) p
return $ MLam 1 v (MLam 3 x n') `MApp` MLam 3 w p'
theta2 t = return t
theta3 (n `MApp` (MLam 1 x p `MApp` q)) =
MLam 1 x (n `MApp` p) `MApp` q
theta3 t = t
theta4 (MLam 1 x (MLam 2 y n) `MApp` p) =
MLam 2 y $ MLam 1 x n `MApp` p
theta4 t = t
thetaReduction' :: MarkedTerm -> Environment MarkedTerm
thetaReduction' t = do
t' <- thetaRec =<< theta t
if t /= t' then thetaReduction' t' else return t
where thetaRec (MVar x) = return $ MVar x
thetaRec (MApp m n) = MApp <$> (theta m >>= thetaRec) <*> (theta n >>= thetaRec)
thetaRec (MLam i x m) = MLam i x <$> (theta m >>= thetaRec)
-- Point-free version of it is not nice.
thetaReduction :: Term -> Term
thetaReduction t = evalInEnvironment [] $ do
t' <- markTerm <$> makeUniqueVars t
unmarkTerm <$> thetaReduction' t'
| projedi/type-inference-rank2 | src/ThetaReduction.hs | bsd-3-clause | 3,040 | 0 | 17 | 852 | 1,367 | 696 | 671 | 68 | 5 |
{-# language CPP #-}
-- | = Name
--
-- VK_KHR_maintenance4 - device extension
--
-- == VK_KHR_maintenance4
--
-- [__Name String__]
-- @VK_KHR_maintenance4@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 414
--
-- [__Revision__]
-- 2
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.1
--
-- [__Deprecation state__]
--
-- - /Promoted/ to
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#versions-1.3-promotions Vulkan 1.3>
--
-- [__Contact__]
--
-- - Piers Daniell
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_KHR_maintenance4] @pdaniell-nv%0A<<Here describe the issue or question you have about the VK_KHR_maintenance4 extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2021-10-25
--
-- [__Interactions and External Dependencies__]
--
-- - Promoted to Vulkan 1.3 Core
--
-- - Requires SPIR-V 1.2 for @LocalSizeId@
--
-- [__Contributors__]
--
-- - Lionel Duc, NVIDIA
--
-- - Jason Ekstrand, Intel
--
-- - Spencer Fricke, Samsung
--
-- - Tobias Hector, AMD
--
-- - Lionel Landwerlin, Intel
--
-- - Graeme Leese, Broadcom
--
-- - Tom Olson, Arm
--
-- - Stu Smith, AMD
--
-- - Yiwei Zhang, Google
--
-- == Description
--
-- @VK_KHR_maintenance4@ adds a collection of minor features, none of which
-- would warrant an entire extension of their own.
--
-- The new features are as follows:
--
-- - Allow the application to destroy their
-- 'Vulkan.Core10.Handles.PipelineLayout' object immediately after it
-- was used to create another object. It is no longer necessary to keep
-- its handle valid while the created object is in use.
--
-- - Add a new
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#limits-maxBufferSize maxBufferSize>
-- implementation-defined limit for the maximum size
-- 'Vulkan.Core10.Handles.Buffer' that /can/ be created.
--
-- - Add support for the SPIR-V 1.2 @LocalSizeId@ execution mode, which
-- can be used as an alternative to @LocalSize@ to specify the local
-- workgroup size with specialization constants.
--
-- - Add a guarantee that images created with identical creation
-- parameters will always have the same alignment requirements.
--
-- - Add new 'getDeviceBufferMemoryRequirementsKHR',
-- 'getDeviceImageMemoryRequirementsKHR', and
-- 'getDeviceImageSparseMemoryRequirementsKHR' to allow the application
-- to query the image memory requirements without having to create an
-- image object and query it.
--
-- - Relax the requirement that push constants must be initialized before
-- they are dynamically accessed.
--
-- - Relax the interface matching rules to allow a larger output vector
-- to match with a smaller input vector, with additional values being
-- discarded.
--
-- - Add a guarantee for buffer memory requirement that the size memory
-- requirement is never greater than the result of aligning create size
-- with the alignment memory requirement.
--
-- == New Commands
--
-- - 'getDeviceBufferMemoryRequirementsKHR'
--
-- - 'getDeviceImageMemoryRequirementsKHR'
--
-- - 'getDeviceImageSparseMemoryRequirementsKHR'
--
-- == New Structures
--
-- - 'DeviceBufferMemoryRequirementsKHR'
--
-- - 'DeviceImageMemoryRequirementsKHR'
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceFeatures2',
-- 'Vulkan.Core10.Device.DeviceCreateInfo':
--
-- - 'PhysicalDeviceMaintenance4FeaturesKHR'
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceProperties2':
--
-- - 'PhysicalDeviceMaintenance4PropertiesKHR'
--
-- == New Enum Constants
--
-- - 'KHR_MAINTENANCE_4_EXTENSION_NAME'
--
-- - 'KHR_MAINTENANCE_4_SPEC_VERSION'
--
-- - Extending
-- 'Vulkan.Core10.Enums.ImageAspectFlagBits.ImageAspectFlagBits':
--
-- - 'IMAGE_ASPECT_NONE_KHR'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR'
--
-- - 'STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR'
--
-- - 'STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR'
--
-- - 'STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR'
--
-- == Promotion to Vulkan 1.3
--
-- Functionality in this extension is included in core Vulkan 1.3, with the
-- KHR suffix omitted. The original type, enum and command names are still
-- available as aliases of the core functionality.
--
-- == Issues
--
-- None.
--
-- == Version History
--
-- - Revision 1, 2021-08-18 (Piers Daniell)
--
-- - Internal revisions
--
-- - Revision 2, 2021-10-25 (Yiwei Zhang)
--
-- - More guarantees on buffer memory requirements
--
-- == See Also
--
-- 'DeviceBufferMemoryRequirementsKHR', 'DeviceImageMemoryRequirementsKHR',
-- 'PhysicalDeviceMaintenance4FeaturesKHR',
-- 'PhysicalDeviceMaintenance4PropertiesKHR',
-- 'getDeviceBufferMemoryRequirementsKHR',
-- 'getDeviceImageMemoryRequirementsKHR',
-- 'getDeviceImageSparseMemoryRequirementsKHR'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_KHR_maintenance4 Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_KHR_maintenance4 ( pattern STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR
, pattern STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR
, pattern STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR
, pattern STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR
, pattern IMAGE_ASPECT_NONE_KHR
, getDeviceBufferMemoryRequirementsKHR
, getDeviceImageMemoryRequirementsKHR
, getDeviceImageSparseMemoryRequirementsKHR
, DeviceBufferMemoryRequirementsKHR
, DeviceImageMemoryRequirementsKHR
, PhysicalDeviceMaintenance4FeaturesKHR
, PhysicalDeviceMaintenance4PropertiesKHR
, KHR_MAINTENANCE_4_SPEC_VERSION
, pattern KHR_MAINTENANCE_4_SPEC_VERSION
, KHR_MAINTENANCE_4_EXTENSION_NAME
, pattern KHR_MAINTENANCE_4_EXTENSION_NAME
) where
import Data.String (IsString)
import Vulkan.Core13.Promoted_From_VK_KHR_maintenance4 (getDeviceBufferMemoryRequirements)
import Vulkan.Core13.Promoted_From_VK_KHR_maintenance4 (getDeviceImageMemoryRequirements)
import Vulkan.Core13.Promoted_From_VK_KHR_maintenance4 (getDeviceImageSparseMemoryRequirements)
import Vulkan.Core13.Promoted_From_VK_KHR_maintenance4 (DeviceBufferMemoryRequirements)
import Vulkan.Core13.Promoted_From_VK_KHR_maintenance4 (DeviceImageMemoryRequirements)
import Vulkan.Core13.Promoted_From_VK_KHR_maintenance4 (PhysicalDeviceMaintenance4Features)
import Vulkan.Core13.Promoted_From_VK_KHR_maintenance4 (PhysicalDeviceMaintenance4Properties)
import Vulkan.Core10.Enums.ImageAspectFlagBits (ImageAspectFlags)
import Vulkan.Core10.Enums.ImageAspectFlagBits (ImageAspectFlagBits(IMAGE_ASPECT_NONE))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES))
-- No documentation found for TopLevel "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR"
pattern STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR = STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES
-- No documentation found for TopLevel "VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR"
pattern STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR = STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES
-- No documentation found for TopLevel "VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR"
pattern STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR = STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS
-- No documentation found for TopLevel "VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR"
pattern STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR = STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS
-- No documentation found for TopLevel "VK_IMAGE_ASPECT_NONE_KHR"
pattern IMAGE_ASPECT_NONE_KHR = IMAGE_ASPECT_NONE
-- No documentation found for TopLevel "vkGetDeviceBufferMemoryRequirementsKHR"
getDeviceBufferMemoryRequirementsKHR = getDeviceBufferMemoryRequirements
-- No documentation found for TopLevel "vkGetDeviceImageMemoryRequirementsKHR"
getDeviceImageMemoryRequirementsKHR = getDeviceImageMemoryRequirements
-- No documentation found for TopLevel "vkGetDeviceImageSparseMemoryRequirementsKHR"
getDeviceImageSparseMemoryRequirementsKHR = getDeviceImageSparseMemoryRequirements
-- No documentation found for TopLevel "VkDeviceBufferMemoryRequirementsKHR"
type DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements
-- No documentation found for TopLevel "VkDeviceImageMemoryRequirementsKHR"
type DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements
-- No documentation found for TopLevel "VkPhysicalDeviceMaintenance4FeaturesKHR"
type PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features
-- No documentation found for TopLevel "VkPhysicalDeviceMaintenance4PropertiesKHR"
type PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties
type KHR_MAINTENANCE_4_SPEC_VERSION = 2
-- No documentation found for TopLevel "VK_KHR_MAINTENANCE_4_SPEC_VERSION"
pattern KHR_MAINTENANCE_4_SPEC_VERSION :: forall a . Integral a => a
pattern KHR_MAINTENANCE_4_SPEC_VERSION = 2
type KHR_MAINTENANCE_4_EXTENSION_NAME = "VK_KHR_maintenance4"
-- No documentation found for TopLevel "VK_KHR_MAINTENANCE_4_EXTENSION_NAME"
pattern KHR_MAINTENANCE_4_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern KHR_MAINTENANCE_4_EXTENSION_NAME = "VK_KHR_maintenance4"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_KHR_maintenance4.hs | bsd-3-clause | 10,976 | 0 | 8 | 2,016 | 604 | 450 | 154 | -1 | -1 |
{-|
Module : Database.Relational.Delete
Description : Definition of DELETE_FROM.
Copyright : (c) Alexander Vieth, 2015
Licence : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : non-portable (GHC only)
-}
{-# LANGUAGE AutoDeriveTypeable #-}
module Database.Relational.Delete (
DELETE(..)
) where
data DELETE a = DELETE a
| avieth/Relational | Database/Relational/Delete.hs | bsd-3-clause | 376 | 0 | 6 | 75 | 28 | 19 | 9 | 4 | 0 |
module Experiments.StateParser where
import Control.Monad.State.Lazy
data Token
= OpenParen
| CloseParen
| Letter Char
deriving (Eq, Show)
data AST
= Group [AST]
| Expression [Char]
deriving (Eq, Show)
type ParserState = State [Token] AST
charLex :: Char -> Token
charLex c =
case c of
'(' -> OpenParen
')' -> CloseParen
_ -> Letter c
myLex :: String -> [Token]
myLex text = fmap charLex text
myParse :: ParserState
myParse = do
tokens <- get
case tokens of
OpenParen:rest -> do
put rest
myParse
CloseParen:rest -> do
put rest
return $ Expression []
(Letter c):rest -> do
put rest
parsed <- myParse
case parsed of
Expression chars -> return $ Expression (c : chars)
Group _ -> error "shouldn'g get an AST here"
[] -> error "shouldn't hit this"
| rumblesan/haskell-experiments | src/Experiments/StateParser.hs | bsd-3-clause | 878 | 0 | 18 | 263 | 304 | 153 | 151 | 37 | 5 |
--
-- Finder.hs
--
module Finder where
import Data.ByteString.Char8 as BS (ByteString, hGetLine)
import Data.Map as Map (Map, elems, empty, insert, lookup)
import System.IO
import System.Process
type FingerPrint = ByteString
findSame :: [FilePath] -> IO [[FilePath]]
findSame fs = do
fps <- mapM getFingerPrint4 fs
let es = Map.elems $ foldl insertItem Map.empty (zip fps fs)
return $ filter (\x -> length x > 1) es
insertItem :: Map FingerPrint [FilePath] -> (FingerPrint, FilePath)
-> Map FingerPrint [FilePath]
insertItem m x = Map.insert k l m
where
k = fst x
l = toList x (Map.lookup k m)
toList :: (FingerPrint, FilePath) -> Maybe [FilePath] -> [FilePath]
toList x Nothing = [snd x]
toList x (Just l) = (snd x:l)
getFingerPrint :: Int -> FilePath -> IO FingerPrint
getFingerPrint r f = do
(sin, sout, serr, ph) <- runInteractiveCommand command
waitForProcess ph
fp <- BS.hGetLine sout
return fp
where
geo = (show r) ++ "x" ++ (show r)
size = r * r * 3
command = "convert -define jpeg:size=" ++ geo
++ " -filter Cubic -resize " ++ geo ++ "! "
++ f ++ " PPM:- | tail -c " ++ (show size)
getFingerPrint4 = getFingerPrint 4
| eijian/picfinder | src/Finder-r1.hs | bsd-3-clause | 1,219 | 0 | 13 | 292 | 476 | 251 | 225 | 31 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE LambdaCase #-}
#ifdef USE_TEMPLATE_HASKELL
{-# LANGUAGE TemplateHaskell #-}
#endif
module Reflex.Dom.Old
( MonadWidget
, El
, ElConfig (..)
, elConfig_namespace
, elConfig_attributes
, _el_clicked
, _el_element
, _el_events
, addVoidAction
, AttributeMap
, Attributes (..)
, buildElement
, buildElementNS
, buildEmptyElement
, buildEmptyElementNS
, elDynHtml'
, elDynHtmlAttr'
, elStopPropagationNS
, elWith
, elWith'
, emptyElWith
, emptyElWith'
, namedNodeMapGetNames
, nodeClear
, onEventName
, schedulePostBuild
, text'
, unsafePlaceElement
, WidgetHost
, wrapElement
) where
import Control.Arrow (first)
#ifdef USE_TEMPLATE_HASKELL
import Control.Lens (makeLenses, (%~), (&), (.~), (^.))
#else
import Control.Lens (Lens, Lens', (%~), (&), (.~), (^.))
#endif
import Control.Monad
import Control.Monad.Fix
import Control.Monad.IO.Class
import Control.Monad.Reader
import Control.Monad.Ref
import Data.Default
import Data.Dependent.Map as DMap
import Data.Functor.Misc
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified GHCJS.DOM.Element as Element
import GHCJS.DOM.EventM (EventM)
import GHCJS.DOM.NamedNodeMap as NNM
import GHCJS.DOM.Node (getFirstChild, getNodeName, removeChild)
import GHCJS.DOM.Types
(liftJSM, JSM, IsHTMLElement, IsNode)
import qualified GHCJS.DOM.Types as DOM
import Reflex.Class
import Reflex.Dom.Builder.Class
import Reflex.Dom.Builder.Immediate
import Reflex.Dom.Widget.Basic
import Reflex.Host.Class
import Reflex.PerformEvent.Class
import Reflex.PostBuild.Class
import Reflex.TriggerEvent.Class
data ElConfig attrs = ElConfig
{ _elConfig_namespace :: Maybe Text
, _elConfig_attributes :: attrs
}
instance attrs ~ Map Text Text => Default (ElConfig attrs) where
def = ElConfig
{ _elConfig_namespace = Nothing
, _elConfig_attributes = mempty
}
#ifdef USE_TEMPLATE_HASKELL
makeLenses ''ElConfig
#else
elConfig_namespace :: Lens' (ElConfig attrs1) (Maybe Text)
elConfig_namespace f (ElConfig a b) = (\a' -> ElConfig a' b) <$> f a
{-# INLINE elConfig_namespace #-}
elConfig_attributes :: Lens (ElConfig attrs1) (ElConfig attrs2) attrs1 attrs2
elConfig_attributes f (ElConfig a b) = (\b' -> ElConfig a b') <$> f b
{-# INLINE elConfig_attributes #-}
#endif
type MonadWidgetConstraints t m =
( DomBuilder t m
, DomBuilderSpace m ~ GhcjsDomSpace
, MonadFix m
, MonadHold t m
, MonadSample t (Performable m)
, MonadReflexCreateTrigger t m
, PostBuild t m
, PerformEvent t m
, MonadIO m
, MonadIO (Performable m)
#ifndef ghcjs_HOST_OS
, DOM.MonadJSM m
, DOM.MonadJSM (Performable m)
#endif
, TriggerEvent t m
, HasDocument m
, MonadRef m
, Ref m ~ Ref IO
, MonadRef (Performable m)
, Ref (Performable m) ~ Ref IO
)
class MonadWidgetConstraints t m => MonadWidget t m
instance MonadWidgetConstraints t m => MonadWidget t m
type WidgetHost m = Performable m
type El = Element EventResult GhcjsDomSpace
addVoidAction :: MonadWidget t m => Event t (WidgetHost m ()) -> m ()
addVoidAction = performEvent_
type AttributeMap = Map Text Text
buildElement :: (MonadWidget t m, Attributes m attrs t) => Text -> attrs -> m a -> m (RawElement (DomBuilderSpace m), a)
buildElement = buildElementNS Nothing
buildEmptyElement :: (MonadWidget t m, Attributes m attrs t) => Text -> attrs -> m (RawElement (DomBuilderSpace m))
buildEmptyElement elementTag attrs = fst <$> buildElementNS Nothing elementTag attrs blank
buildEmptyElementNS :: (MonadWidget t m, Attributes m attrs t) => Maybe Text -> Text -> attrs -> m (RawElement (DomBuilderSpace m))
buildEmptyElementNS ns elementTag attrs = fst <$> buildElementNS ns elementTag attrs blank
buildElementNS :: (MonadWidget t m, Attributes m attrs t) => Maybe Text -> Text -> attrs -> m a -> m (RawElement (DomBuilderSpace m), a)
buildElementNS ns elementTag attrs child = first _element_raw <$> buildElementInternal ns elementTag attrs child
class Attributes m attrs t where
buildElementInternal :: MonadWidget t m => Maybe Text -> Text -> attrs -> m a -> m (Element EventResult (DomBuilderSpace m) t, a)
instance Attributes m (Map Text Text) t where
buildElementInternal ns elementTag attrs child = do
let cfg = def & elementConfig_namespace .~ ns
buildElementCommon elementTag child =<< addStaticAttributes attrs cfg
addStaticAttributes :: Applicative m => Map Text Text -> ElementConfig er t (DomBuilderSpace m) -> m (ElementConfig er t (DomBuilderSpace m))
addStaticAttributes attrs cfg = do
let initialAttrs = Map.fromList $ first (AttributeName Nothing) <$> Map.toList attrs
pure $ cfg & elementConfig_initialAttributes .~ initialAttrs
instance PostBuild t m => Attributes m (Dynamic t (Map Text Text)) t where
buildElementInternal ns elementTag attrs child = do
let cfg = def & elementConfig_namespace .~ ns
buildElementCommon elementTag child =<< addDynamicAttributes attrs cfg
addDynamicAttributes :: PostBuild t m => Dynamic t (Map Text Text) -> ElementConfig er t (DomBuilderSpace m) -> m (ElementConfig er t (DomBuilderSpace m))
addDynamicAttributes attrs cfg = do
modifyAttrs <- dynamicAttributesToModifyAttributes attrs
return $ cfg & elementConfig_modifyAttributes .~ fmap mapKeysToAttributeName modifyAttrs
buildElementCommon :: MonadWidget t m => Text -> m a -> ElementConfig er t (DomBuilderSpace m) -> m (Element er (DomBuilderSpace m) t, a)
buildElementCommon elementTag child cfg = element elementTag cfg child
onEventName :: IsHTMLElement e => EventName en -> e -> EventM e (EventType en) () -> JSM (JSM ())
onEventName = elementOnEventName
schedulePostBuild :: (PostBuild t m, PerformEvent t m) => WidgetHost m () -> m ()
schedulePostBuild w = do
postBuild <- getPostBuild
performEvent_ $ w <$ postBuild
text' :: MonadWidget t m => Text -> m DOM.Text
text' s = _textNode_raw <$> textNode (def & textNodeConfig_initialContents .~ s)
instance HasAttributes (ElConfig attrs) where
type Attrs (ElConfig attrs) = attrs
attributes = elConfig_attributes
instance HasNamespace (ElConfig attrs) where
namespace = elConfig_namespace
elWith :: (MonadWidget t m, Attributes m attrs t) => Text -> ElConfig attrs -> m a -> m a
elWith elementTag cfg child = snd <$> elWith' elementTag cfg child
elWith' :: (MonadWidget t m, Attributes m attrs t) => Text -> ElConfig attrs -> m a -> m (Element EventResult (DomBuilderSpace m) t, a)
elWith' elementTag cfg = buildElementInternal (cfg ^. namespace) elementTag $ cfg ^. attributes
emptyElWith :: (MonadWidget t m, Attributes m attrs t) => Text -> ElConfig attrs -> m ()
emptyElWith elementTag cfg = void $ emptyElWith' elementTag cfg
emptyElWith' :: (MonadWidget t m, Attributes m attrs t) => Text -> ElConfig attrs -> m (Element EventResult (DomBuilderSpace m) t)
emptyElWith' elementTag cfg = fmap fst $ elWith' elementTag cfg $ return ()
{-# DEPRECATED _el_clicked "Use 'domEvent Click' instead" #-}
_el_clicked :: Reflex t => Element EventResult d t -> Event t ()
_el_clicked = domEvent Click
{-# DEPRECATED _el_element "Use '_element_raw' instead" #-}
_el_element :: El t -> RawElement GhcjsDomSpace
_el_element = _element_raw
{-# DEPRECATED _el_events "Use '_element_events' instead; or, if possible, use 'domEvent' instead to retrieve a particular event" #-}
_el_events :: Element er d t -> EventSelector t (WrapArg er EventName)
_el_events = _element_events
{-# DEPRECATED _el_keypress "Use 'domEvent Keypress' instead" #-}
_el_keypress :: Reflex t => El t -> Event t Word
_el_keypress = domEvent Keypress
{-# DEPRECATED _el_scrolled "Use 'domEvent Scroll' instead" #-}
_el_scrolled :: Reflex t => El t -> Event t Double
_el_scrolled = domEvent Scroll
wrapElement :: forall t m. MonadWidget t m => (forall en. DOM.HTMLElement -> EventName en -> EventM DOM.Element (EventType en) (Maybe (EventResult en))) -> DOM.HTMLElement -> m (El t)
wrapElement eh e = do
let h :: (EventName en, GhcjsDomEvent en) -> JSM (Maybe (EventResult en))
h (en, GhcjsDomEvent evt) = runReaderT (eh e en) evt
wrapRawElement (DOM.toElement e) $ (def :: RawElementConfig EventResult t (DomBuilderSpace m))
{ _rawElementConfig_eventSpec = def
{ _ghcjsEventSpec_handler = GhcjsEventHandler h
}
}
unsafePlaceElement :: MonadWidget t m => DOM.HTMLElement -> m (Element EventResult (DomBuilderSpace m) t)
unsafePlaceElement e = do
placeRawElement $ DOM.toElement e
wrapRawElement (DOM.toElement e) def
namedNodeMapGetNames :: DOM.NamedNodeMap -> JSM (Set Text)
namedNodeMapGetNames self = do
l <- NNM.getLength self
Set.fromList <$> forM (take (fromIntegral l) [0..]) (
NNM.itemUnchecked self >=> getNodeName)
nodeClear :: IsNode self => self -> JSM ()
nodeClear n = do
mfc <- getFirstChild n
case mfc of
Nothing -> return ()
Just fc -> do
_ <- removeChild n fc
nodeClear n
elStopPropagationNS :: forall t m en a. (MonadWidget t m) => Maybe Text -> Text -> EventName en -> m a -> m a
elStopPropagationNS ns elementTag en child = do
let f = GhcjsEventFilter $ \_ -> do
return (stopPropagation, return Nothing)
cfg = (def :: ElementConfig EventResult t (DomBuilderSpace m))
& namespace .~ ns
& elementConfig_eventSpec . ghcjsEventSpec_filters %~ DMap.insert en f
snd <$> element elementTag cfg child
elDynHtmlAttr' :: (DOM.MonadJSM m, MonadWidget t m) => Text -> Map Text Text -> Dynamic t Text -> m (Element EventResult GhcjsDomSpace t)
elDynHtmlAttr' elementTag attrs html = do
let cfg = def & initialAttributes .~ Map.mapKeys (AttributeName Nothing) attrs
(e, _) <- element elementTag cfg $ return ()
postBuild <- getPostBuild
performEvent_ $ liftJSM . Element.setInnerHTML (_element_raw e) <$> leftmost [updated html, tag (current html) postBuild]
return e
elDynHtml' :: MonadWidget t m => Text -> Dynamic t Text -> m (Element EventResult GhcjsDomSpace t)
elDynHtml' elementTag = elDynHtmlAttr' elementTag mempty
| reflex-frp/reflex-dom | reflex-dom-core/src/Reflex/Dom/Old.hs | bsd-3-clause | 10,483 | 0 | 18 | 1,935 | 3,178 | 1,633 | 1,545 | -1 | -1 |
module Main where
import Graphics.Gloss
data Var = X | F | Plus | Minus | Lf | Rt deriving (Eq)
data Turtle = Turtle { position:: (Float, Float), angle:: Float} deriving (Show)
data Config = Config { graphic:: Turtle, history:: [Turtle] } deriving (Show)
class Changer a where
mutateConfig :: a -> Config -> Config
instance Changer Var where
mutateConfig var (Config (Turtle (x,y) rotation ) h) = Config (Turtle (mutatePos (x,y)) (mutateAngle rotation)) (mutateHistory h)
where (mutatePos, mutateAngle, mutateHistory) = case var of F -> (goOn, id, id)
Plus -> (id, turnRight, id)
Minus -> (id, turnLeft, id)
Lf -> (id, id, push)
Rt -> (restorePos, restoreAngle, tail)
X -> (id, id, id)
goOn (x,y) = (x + branch * cos rotation, y + branch * sin rotation)
turnRight angle' = angle' - 25 * (pi / 180)
turnLeft angle' = angle' + 25 * (pi / 180)
push h = Turtle (x,y) rotation : h
restorePos _ = position (head h)
restoreAngle _ = angle (head h)
axiom :: [Var]
axiom = [X]
transformVar :: Var -> [Var]
transformVar X = [F, Minus, Lf, Lf, X, Rt, Plus, X, Rt, Plus, F, Lf, Plus, F, X, Rt,Minus,X]
transformVar F = [F, F]
transformVar Lf = [Lf]
transformVar Rt = [Rt]
transformVar Minus = [Minus]
transformVar Plus = [Plus]
transformList :: [Var] -> [Var]
transformList = concatMap transformVar
algas :: [[Var]]
algas = iterate transformList axiom
algasAtStep n = algas !! n
makeConfigList :: (Changer v) => [Config] -> v -> [Config]
makeConfigList configs var = mutateConfig var (head configs) :configs
configLevel :: Int -> [Config]
configLevel n = foldl makeConfigList [c0] (algasAtStep n)
positionsLevel :: Int -> [(Float, Float)]
positionsLevel n = reverse $ map (position . graphic ) (configLevel n)
nearCouples :: [a] -> [[a]]
nearCouples [] = []
nearCouples [x] = []
nearCouples [x,y] = [[x,y]]
nearCouples (x:y:z:xs) = [x,y]: [y,z] : nearCouples (z:xs)
type Segment = [(Float, Float)]
noSameDest :: [Segment] -> [Segment]
noSameDest (seg:segs) = seg : noSameDest (filter (noSameDest' seg) segs)
where noSameDest' seg' seg = last seg /= last seg'
noSameDest [] = []
coloredLines n = Pictures $ map (\(color, line) -> color line ) $ zip colors myLines
where myLines = map Line $ noSameDest (nearCouples (positionsLevel n))
c0 = Config (Turtle (0,-500) (pi/4)) []
branch = 5
colors = Color (makeColorI 158 191 109 255) : colors
backColor = makeColorI 10 50 10 255
main = display (InWindow "Nice Window" (1400, 1400) (20, 20)) backColor (coloredLines 6)
| edoardo90/lsystem | src/FractalTree.hs | bsd-3-clause | 2,913 | 0 | 12 | 881 | 1,206 | 666 | 540 | 58 | 1 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Control.Alternative.Freer
( Alt
, runAlt
, liftAlt
, hoistAlt
, retractAlt
) where
import Control.Applicative
-- | A purely formal Alternative structure over a functor, where
-- no particular constraints are made on the behaviour of
-- 'Or', 'Some' and 'Many'.
data Alt f a where
Pure :: a -> Alt f a
Lift :: f a -> Alt f a
Ap :: Alt f a -> Alt f (a -> b) -> Alt f b
Empty :: Alt f a
Or :: Alt f a -> Alt f a -> Alt f a
Some :: Alt f a -> Alt f [a]
instance Functor f => Functor (Alt f) where
fmap f (Pure a) = Pure $ f a
fmap f (Lift g) = Lift $ fmap f g
fmap f (Ap x g) = x `Ap` fmap (f .) g
fmap _ Empty = Empty
fmap f (Or a b) = Or (f <$> a) (f <$> b)
fmap f (Some l) = f . (:[]) <$> l
instance Functor f => Applicative (Alt f) where
pure = Pure
{-# INLINE pure #-}
(Pure f) <*> y = fmap f y
Empty <*> _ = Empty
(Ap a f) <*> y = Ap a (flip <$> f <*> y)
y <*> f = Ap f y
instance Functor f => Alternative (Alt f) where
empty = Empty
a <|> b = Or a b
some = Some
many x = some x <|> empty
runAlt :: forall f g a. Alternative g => (forall x. f x -> g x) -> Alt f a -> g a
runAlt phi f = retractAlt $ hoistAlt phi f
liftAlt :: Functor f => f a -> Alt f a
liftAlt = Lift
hoistAlt :: forall f g a. (forall x. f x -> g x) -> Alt f a -> Alt g a
hoistAlt _ (Pure a) = Pure a
hoistAlt f (Lift g) = Lift $ f g
hoistAlt f (Ap x g) = Ap (hoistAlt f x) (hoistAlt f g)
hoistAlt _ Empty = Empty
hoistAlt f (Or a b) = Or (hoistAlt f a) (hoistAlt f b)
hoistAlt f (Some a) = Some $ hoistAlt f a
-- | Interpret the free alternative over f using the Alternative semantics for f.
retractAlt :: Alternative f => Alt f a -> f a
retractAlt (Pure a) = pure a
retractAlt (Lift f) = f
retractAlt (a `Ap` f) = retractAlt a <**> retractAlt f
retractAlt Empty = empty
retractAlt (Or a b) = retractAlt a <|> retractAlt b
retractAlt (Some a) = some $ retractAlt a
| seagate-ssg/options-schema | src/Control/Alternative/Freer.hs | bsd-3-clause | 2,007 | 0 | 11 | 528 | 961 | 482 | 479 | 54 | 1 |
{-# language MultiParamTypeClasses, FlexibleInstances, FlexibleContexts, UndecidableInstances#-}
-- Finger tree half spine, amortized O(1) access to head (push/pop) , O(log n) access to elements (peek)
module HFT (F(E),push,pop,Peek(peek),P(..)) where
import Control.Applicative
import Data.List
import Data.Maybe
data T a = S !Int a | B !Int a a deriving Show
class Peek s b c where
peek :: Int -> s b -> Maybe c
instance Peek T c c where
peek 0 (S _ x) = Just x
peek 0 (B _ x _) = Just x
peek 1 (B _ x y) = Just y
peek _ _ = Nothing
instance Peek T a c => Peek T (T a) c where
peek n (S k x) = peek n x
peek n (B k x y)
| n < px = peek n x
| otherwise = peek (n - px) y
where px = power x
class Power a where
power :: a -> Int
instance Power (T a) where
power (S k _) = k
power (B k _ _) = k
newtype P a = P {unP :: a} deriving (Eq,Show)
instance Power (P a) where
power _ = 1
data F a = F (T a) (F (T a)) | E deriving Show
push :: Power a => a -> F a -> F a
push x = let
px = power x
f E = F (S px x) E
f (F (S py y) e) = F (B (px + py) x y) e
f (F b e) = F (S px x) $ push b e
in f
pop :: Power a => F a -> Maybe (a, F a)
pop E = Nothing
pop (F (B _ x y) e) = Just (x, F (S (power y) y) e)
pop (F (S _ x) e) = Just (x, maybe E (uncurry F) $ pop e)
instance Peek T a b => Peek F a b where
peek n E = Nothing
peek n (F x e) = peek n x <|> peek (n - power x) e
-- naive push 10 * 10^6 elements to an F and random access them all
main = do
let s = map P is
is = [1..10000000::Int]
let t = foldl' (flip push) E $ s
print . sum . map (\s -> unP $ fromJust (peek s t :: Maybe (P Int))) $ is
| paolino/hft | HFT.hs | bsd-3-clause | 1,750 | 0 | 17 | 563 | 944 | 476 | 468 | 56 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
module SAWScript.Heapster.JudgmentTranslation.Examples (
testJudgmentTranslation,
) where
import Data.Functor.Const
import Data.Parameterized.Classes
import Data.Parameterized.Context
import Lang.Crucible.LLVM.MemModel
import Lang.Crucible.Types
import SAWScript.Heapster.JudgmentTranslation
import SAWScript.Heapster.Permissions
import SAWScript.Heapster.TypeTranslation
import SAWScript.TopLevel
import Verifier.SAW.OpenTerm
import Verifier.SAW.Term.Pretty
type ExampleContext = EmptyCtx ::> LLVMPointerType 64
permInLeft :: ValuePerm ExampleContext (LLVMPointerType 64)
permInLeft =
ValPerm_LLVMPtr knownRepr
[ (LLVMFieldShapePerm
(LLVMFieldPerm { llvmFieldOffset = 0
, llvmFieldSplitting = SplExpr_All
, llvmFieldPerm = ValPerm_True
}
)
)
]
Nothing
permInRight :: ValuePerm ExampleContext (LLVMPointerType 64)
permInRight = ValPerm_Eq (PExpr_LLVMWord knownRepr (PExpr_BV knownRepr [] 0))
permIn :: ValuePerm ExampleContext (LLVMPointerType 64)
permIn = ValPerm_Or permInLeft permInRight
permOutLeft :: ValuePerm ExampleContext (LLVMPointerType 64)
permOutLeft = ValPerm_LLVMPtr knownRepr [] Nothing
permOutRightInsideExists :: ValuePerm (ExampleContext ::> BVType 64) (LLVMPointerType 64)
permOutRightInsideExists = ValPerm_Eq (PExpr_LLVMWord knownRepr (PExpr_Var (nextPermVar (incSize zeroSize))))
permOutRight :: ValuePerm ExampleContext (LLVMPointerType 64)
permOutRight = ValPerm_Exists (BVRepr knownRepr) permOutRightInsideExists
permOut :: ValuePerm ExampleContext (LLVMPointerType 64)
permOut = ValPerm_Or permOutLeft permOutRight
-- | For debugging purposes, a simpler example proving the left side:
-- ValPerm_LLVMPtr shapes Nothing |- ValPerm_LLVMPtr [] Nothing
examplePermElimLeft :: PermElim AnnotIntro ExampleContext
examplePermElimLeft = Elim_Done leftAnnotIntro
where
permSetSpecOut :: PermSetSpec EmptyCtx ExampleContext
permSetSpecOut =
[ PermSpec zeroSize (PExpr_Var (nextPermVar zeroSize)) permOutLeft
]
-- permInLeft |- permOutLeft
leftAnnotIntro :: AnnotIntro ExampleContext
leftAnnotIntro = AnnotIntro
{ introInPerms = extendPermSet emptyPermSet knownRepr permInLeft
, introOutPerms = permSetSpecOut
, introProof = leftIntro
}
-- permInLeft |- permOutLeft
leftIntro :: PermIntro ExampleContext
leftIntro =
-- permInLeft |- permOutLeft
Intro_LLVMPtr (nextPermVar zeroSize)
-- permInLeft |- empty
$ Intro_Done
examplePermElimRight :: PermElim AnnotIntro ExampleContext
examplePermElimRight = Elim_Done rightAnnotIntro
where
permSetSpecOut :: PermSetSpec EmptyCtx ExampleContext
permSetSpecOut =
[ PermSpec zeroSize (PExpr_Var (nextPermVar zeroSize)) permOutRight
]
-- permInRight |- permOutRight
rightAnnotIntro :: AnnotIntro ExampleContext
rightAnnotIntro = AnnotIntro
{ introInPerms = extendPermSet emptyPermSet knownRepr permInRight
, introOutPerms = permSetSpecOut
, introProof = rightIntro
}
-- permInRight |- permOutRight
rightIntro :: PermIntro ExampleContext
rightIntro =
-- permInRight |- permOutRight
Intro_Exists (BVRepr (knownRepr :: NatRepr 64)) (PExpr_BV knownRepr [] 0) permOutRightInsideExists
-- permInRight |- ValPerm_Eq (PExpr_LLVMWord knownRepr (PExpr_Var (nextPermVar (incSize zeroSize))))
$ Intro_Eq (EqProof_Refl (PExpr_LLVMWord (knownRepr :: NatRepr 64) (PExpr_BV knownRepr [] 0)))
-- permInRight |- permOut
$ Intro_Done
examplePermElim :: PermElim AnnotIntro ExampleContext
examplePermElim = Elim_Or (nextPermVar zeroSize) leftBranch rightBranch
permSetSpecOut :: PermSetSpec EmptyCtx ExampleContext
permSetSpecOut =
[ PermSpec zeroSize (PExpr_Var (nextPermVar zeroSize)) permOut
]
-- permInLeft |- permOut
leftBranch :: PermElim AnnotIntro ExampleContext
leftBranch = Elim_Done leftAnnotIntro
-- permInLeft |- permOut
leftAnnotIntro :: AnnotIntro ExampleContext
leftAnnotIntro = AnnotIntro
{ introInPerms = extendPermSet emptyPermSet knownRepr permInLeft
, introOutPerms = permSetSpecOut
, introProof = leftIntro
}
-- permInLeft |- permOut
leftIntro :: PermIntro ExampleContext
leftIntro =
-- permInLeft |- permOut
Intro_OrL permOutRight
-- permInLeft |- permOutLeft
$ Intro_LLVMPtr (nextPermVar zeroSize)
-- permInLeft |- empty
$ Intro_Done
-- permInRight |- permOut
rightBranch :: PermElim AnnotIntro ExampleContext
rightBranch = Elim_Done rightAnnotIntro
-- permInRight |- permOut
rightAnnotIntro :: AnnotIntro ExampleContext
rightAnnotIntro = AnnotIntro
{ introInPerms = extendPermSet emptyPermSet knownRepr permInRight
, introOutPerms = permSetSpecOut
, introProof = rightIntro
}
-- permInRight |- permOut
rightIntro :: PermIntro ExampleContext
rightIntro =
-- permInRight |- permOut
Intro_OrR permOutLeft
-- permInRight |- permOutRight
$ Intro_Exists (BVRepr (knownRepr :: NatRepr 64)) (PExpr_BV knownRepr [] 0) permOutRightInsideExists
-- permInRight |- ValPerm_Eq (PExpr_LLVMWord knownRepr (PExpr_Var (nextPermVar (incSize zeroSize))))
$ Intro_Eq (EqProof_Refl (PExpr_LLVMWord (knownRepr :: NatRepr 64) (PExpr_BV knownRepr [] 0)))
-- permInRight |- permOut
$ Intro_Done
emptyInfo :: BlocksInfo EmptyCtx
emptyInfo = BlocksInfo { entryPoints = [] }
llvmPointerType :: OpenTerm
llvmPointerType = typeTranslate'' (LLVMPointerRepr (knownRepr :: NatRepr 64))
permInTypeLeft :: OpenTermCtxt ExampleContext -> OpenTerm
permInTypeLeft typeEnvironment = typeTranslate typeEnvironment permInLeft
permInTypeRight :: OpenTermCtxt ExampleContext -> OpenTerm
permInTypeRight typeEnvironment = typeTranslate typeEnvironment permInRight
permInType :: OpenTermCtxt ExampleContext -> OpenTerm
permInType typeEnvironment = typeTranslate typeEnvironment permIn
permOutTypeLeft :: OpenTermCtxt ExampleContext -> OpenTerm
permOutTypeLeft typeEnvironment = typeTranslate typeEnvironment permOutLeft
permOutTypeRight :: OpenTermCtxt ExampleContext -> OpenTerm
permOutTypeRight typeEnvironment = typeTranslate typeEnvironment permOutRight
permOutType :: OpenTermCtxt ExampleContext -> OpenTerm
permOutType typeEnvironment = typeTranslate typeEnvironment permOut
scaffold ::
ValuePerm ExampleContext (LLVMPointerType 64) ->
ValuePerm ExampleContext (LLVMPointerType 64) ->
PermElim AnnotIntro ExampleContext ->
OpenTerm
scaffold pIn pOut permElim =
lambdaOpenTerm "v" (typeTranslate'' (LLVMPointerRepr (knownRepr :: NatRepr 64))) $ \ v ->
let typeEnvironment :: Assignment (Const OpenTerm) ExampleContext = extend empty (Const v) in
lambdaOpenTerm "vp" (typeTranslate typeEnvironment pIn) $ \ vp ->
let jctx = JudgmentContext { typeEnvironment
, permissionSet = extendPermSet emptyPermSet knownRepr pIn
, permissionMap = extend empty (Const vp)
, catchHandler = Nothing
}
in
let permSetSpecOut = [PermSpec zeroSize (PExpr_Var $ nextPermVar zeroSize) pOut] in
judgmentTranslate' emptyInfo jctx (typeTranslatePermSetSpec typeEnvironment permSetSpecOut) permElim
translateExamplePermElimLeft :: OpenTerm
translateExamplePermElimLeft = scaffold permInLeft permOutLeft examplePermElimLeft
translateExamplePermElimRight :: OpenTerm
translateExamplePermElimRight = scaffold permInRight permOutRight examplePermElimRight
translateExamplePermElimDisjOutL :: OpenTerm
translateExamplePermElimDisjOutL = scaffold permInLeft permOut (Elim_Done leftAnnotIntro)
translateExamplePermElimDisjOutR :: OpenTerm
translateExamplePermElimDisjOutR = scaffold permInRight permOut (Elim_Done rightAnnotIntro)
translateExamplePermElim :: OpenTerm
translateExamplePermElim = scaffold permIn permOut examplePermElim
testJudgmentTranslation :: TopLevel ()
testJudgmentTranslation = do
sc <- getSharedContext
io $ do
let test term = putStrLn . showTerm =<< completeOpenTerm sc term
{-
putStrLn "Testing llvmPointerType"
test llvmPointerType
putStrLn "Testing permInType"
test $ lambdaOpenTerm "v" llvmPointerType $ \ v -> permInType (extend empty (Const v))
putStrLn "Testing permOutType"
test $ lambdaOpenTerm "v" llvmPointerType $ \ v -> permOutType (extend empty (Const v))
putStrLn "Testing permInTypeLeft"
test $ lambdaOpenTerm "v" llvmPointerType $ \ v -> permInTypeLeft (extend empty (Const v))
putStrLn "Testing permOutTypeLeft"
test $ lambdaOpenTerm "v" llvmPointerType $ \ v -> permOutTypeLeft (extend empty (Const v))
putStrLn "Testing translating examplePermElimLeft"
test $ translateExamplePermElimLeft
putStrLn "Testing permInTypeRight"
test $ lambdaOpenTerm "v" llvmPointerType $ \ v -> permInTypeRight (extend empty (Const v))
putStrLn "Testing permOutTypeRight"
test $ lambdaOpenTerm "v" llvmPointerType $ \ v -> permOutTypeRight (extend empty (Const v))
putStrLn "Testing translating examplePermElimRight"
test $ translateExamplePermElimRight
putStrLn "\nTranslating (ptr(0 |-> (W,true)) |- (ptr() \\/ exists x.eq(x))"
test $ translateExamplePermElimDisjOutL
putStrLn "\nTranslating (eq(0) |- (ptr() \\/ exists x.eq(x))"
test $ translateExamplePermElimDisjOutR
-}
putStrLn "\nTranslating x:(ptr(0 |-> (W,true)) \\/ eq(0)) |- x:(ptr() \\/ exists y.eq(y))"
test $ translateExamplePermElim
-- EXAMPLE to write:
--
-- (ValPerm_LLVMPtr shapes ... \/ eq(PExpr_LLVMWord zero)) ->
-- (ValPerm_LLVMPtr [] Nothing) \/ (exists (x : ), PExpr_LLVMWord x)
--
-- PermElim AnnotIntro (EmptyCtx ::> LLVMPointerType 64)
| GaloisInc/saw-script | heapster-saw/src/Verifier/SAW/Heapster/archival/JudgmentTranslation/Examples.hs | bsd-3-clause | 9,844 | 0 | 21 | 1,681 | 1,651 | 873 | 778 | 146 | 1 |
module FRP.Sodium.GameEngine2D.Orientation where
data Orientation = OrientationUp
| OrientationDown
| OrientationLeft
| OrientationRight
| OrientationUpMirrored
| OrientationDownMirrored
| OrientationLeftMirrored
| OrientationRightMirrored
deriving (Eq, Ord, Show, Enum)
| the-real-blackh/sodium-2d-game-engine | FRP/Sodium/GameEngine2D/Orientation.hs | bsd-3-clause | 410 | 0 | 6 | 165 | 55 | 34 | 21 | 10 | 0 |
module CaesarEncode where
import Data.Char (ord, chr)
caesarEncode :: Int -> String -> String
caesarEncode t = map f
where f c
| ord 'a' <= ord c && ord 'z' >= ord c = chr $ (t + ord c - ord 'a') `mod` 26 + ord 'a'
| ord 'A' <= ord c && ord 'Z' >= ord c = chr $ (t + ord c - ord 'A') `mod` 26 + ord 'A'
| True = c
| abhinav-mehta/CipherSolver | src/CaesarEncode.hs | bsd-3-clause | 342 | 2 | 15 | 108 | 191 | 92 | 99 | 8 | 1 |
--
-- AF.hs --- GPIO Alternate Function aliases
--
-- Copyright (C) 2013, Galois, Inc.
-- All Rights Reserved.
--
module Ivory.BSP.STM32F405.GPIO.AF where
import Ivory.BSP.STM32.Peripheral.GPIOF4.RegTypes
-- These are funky because i did it all with copy paste and regexps based on the
-- gpio.h in hwf4. Yeah, disgusting, I know.
-- Types -----------------------------------------------------------------------
gpio_af_rtc_50hz :: GPIO_AF
gpio_af_mc0 :: GPIO_AF
gpio_af_tamper :: GPIO_AF
gpio_af_swj :: GPIO_AF
gpio_af_trace :: GPIO_AF
gpio_af_tim1 :: GPIO_AF
gpio_af_tim2 :: GPIO_AF
gpio_af_tim3 :: GPIO_AF
gpio_af_tim4 :: GPIO_AF
gpio_af_tim5 :: GPIO_AF
gpio_af_tim8 :: GPIO_AF
gpio_af_tim9 :: GPIO_AF
gpio_af_tim10 :: GPIO_AF
gpio_af_tim11 :: GPIO_AF
gpio_af_i2c1 :: GPIO_AF
gpio_af_i2c2 :: GPIO_AF
gpio_af_i2c3 :: GPIO_AF
gpio_af_spi1 :: GPIO_AF
gpio_af_spi2 :: GPIO_AF
gpio_af_spi3 :: GPIO_AF
gpio_af_uart1 :: GPIO_AF
gpio_af_uart2 :: GPIO_AF
gpio_af_uart3 :: GPIO_AF
gpio_af_i2s3ext :: GPIO_AF
gpio_af_uart4 :: GPIO_AF
gpio_af_uart5 :: GPIO_AF
gpio_af_uart6 :: GPIO_AF
gpio_af_can1 :: GPIO_AF
gpio_af_can2 :: GPIO_AF
gpio_af_tim12 :: GPIO_AF
gpio_af_tim13 :: GPIO_AF
gpio_af_tim14 :: GPIO_AF
gpio_af_otg_fs :: GPIO_AF
gpio_af_otg_hs :: GPIO_AF
gpio_af_eth :: GPIO_AF
gpio_af_fsmc :: GPIO_AF
gpio_af_otg_hs_fs :: GPIO_AF
gpio_af_sdio :: GPIO_AF
gpio_af_dcmi :: GPIO_AF
gpio_af_eventout :: GPIO_AF
-- Definitions -----------------------------------------------------------------
gpio_af_rtc_50hz = gpio_af0
gpio_af_mc0 = gpio_af0
gpio_af_tamper = gpio_af0
gpio_af_swj = gpio_af0
gpio_af_trace = gpio_af0
gpio_af_tim1 = gpio_af1
gpio_af_tim2 = gpio_af1
gpio_af_tim3 = gpio_af2
gpio_af_tim4 = gpio_af2
gpio_af_tim5 = gpio_af2
gpio_af_tim8 = gpio_af3
gpio_af_tim9 = gpio_af3
gpio_af_tim10 = gpio_af3
gpio_af_tim11 = gpio_af3
gpio_af_i2c1 = gpio_af4
gpio_af_i2c2 = gpio_af4
gpio_af_i2c3 = gpio_af4
gpio_af_spi1 = gpio_af5
gpio_af_spi2 = gpio_af5
gpio_af_spi3 = gpio_af6
gpio_af_uart1 = gpio_af7
gpio_af_uart2 = gpio_af7
gpio_af_uart3 = gpio_af7
gpio_af_i2s3ext = gpio_af7
gpio_af_uart4 = gpio_af8
gpio_af_uart5 = gpio_af8
gpio_af_uart6 = gpio_af8
gpio_af_can1 = gpio_af9
gpio_af_can2 = gpio_af9
gpio_af_tim12 = gpio_af9
gpio_af_tim13 = gpio_af9
gpio_af_tim14 = gpio_af9
gpio_af_otg_fs = gpio_af10
gpio_af_otg_hs = gpio_af10
gpio_af_eth = gpio_af11
gpio_af_fsmc = gpio_af12
gpio_af_otg_hs_fs = gpio_af12
gpio_af_sdio = gpio_af12
gpio_af_dcmi = gpio_af13
gpio_af_eventout = gpio_af15
| GaloisInc/ivory-tower-stm32 | ivory-bsp-stm32/src/Ivory/BSP/STM32F405/GPIO/AF.hs | bsd-3-clause | 2,823 | 0 | 4 | 636 | 428 | 264 | 164 | 82 | 1 |
module Magic.Color
( CombColor (..)
, colorless
, colors
, Color1 (..)
, fromColor1
, Color2 (..)
, color2
, splitColor2
, fromColor2
) where
import Prelude
import Data.Maybe
-- CombColor
data CombColor = CombColor
{ isWhite :: Bool
, isBlue :: Bool
, isBlack :: Bool
, isRed :: Bool
, isGreen :: Bool
}
deriving (Eq)
colorless :: CombColor
colorless = CombColor False False False False False
colors :: CombColor -> [Color1]
colors c = mapMaybe (($c) . maybeColor) [White ..]
fromColors :: [Color1] -> CombColor
fromColors (White:cs) = (fromColors cs) { isWhite = True }
fromColors (Blue :cs) = (fromColors cs) { isBlue = True }
fromColors (Black:cs) = (fromColors cs) { isBlack = True }
fromColors (Red :cs) = (fromColors cs) { isRed = True }
fromColors (Green:cs) = (fromColors cs) { isGreen = True }
maybeColor :: Color1 -> CombColor -> Maybe Color1
maybeColor White x = if isWhite x then Just White else Nothing
maybeColor Blue x = if isBlue x then Just Blue else Nothing
maybeColor Black x = if isBlack x then Just Black else Nothing
maybeColor Red x = if isRed x then Just Red else Nothing
maybeColor Green x = if isGreen x then Just Green else Nothing
-- Color1
data Color1 = White
| Blue
| Black
| Red
| Green
deriving (Eq, Enum)
fromColor1 :: Color1 -> CombColor
fromColor1 White = CombColor True False False False False
fromColor1 Blue = CombColor False True False False False
fromColor1 Black = CombColor False False True False False
fromColor1 Red = CombColor False False False True False
fromColor1 Green = CombColor False False False False True
-- Color2
data Color2 = WU
| WB
| UB
| UR
| BR
| BG
| RG
| RW
| GW
| GU
deriving (Eq, Enum)
color2 :: Color1 -> Color1 -> Maybe Color2
color2 White Blue = Just WU
color2 White Black = Just WB
color2 Blue Black = Just UB
color2 Blue Red = Just UR
color2 Black Red = Just BR
color2 Black Green = Just BG
color2 Red Green = Just RG
color2 Red White = Just RW
color2 Green White = Just GW
color2 Green Blue = Just GU
color2 x y
| x == y = Nothing
| otherwise = color2 y x
splitColor2 :: Color2 -> (Color1, Color1)
splitColor2 WU = (White, Blue )
splitColor2 WB = (White, Black)
splitColor2 UB = (Blue , Black)
splitColor2 UR = (Blue , Red )
splitColor2 BR = (Black, Red )
splitColor2 BG = (Black, Green)
splitColor2 RG = (Red , Green)
splitColor2 RW = (Red , White)
splitColor2 GW = (Green, White)
splitColor2 GU = (Green, Blue )
fromColor2 :: Color2 -> CombColor
fromColor2 WU = fromColors [White, Blue ]
fromColor2 WB = fromColors [White, Black]
fromColor2 UB = fromColors [Blue , Black]
fromColor2 UR = fromColors [Blue , Red ]
fromColor2 BR = fromColors [Black, Red ]
fromColor2 BG = fromColors [Black, Green]
fromColor2 RG = fromColors [Red , Green]
fromColor2 RW = fromColors [Red , White]
fromColor2 GW = fromColors [Green, White]
fromColor2 GU = fromColors [Green, Blue ]
-- Show isntances
-- vim: set expandtab:
| mkut/libmtg | Magic/Color.hs | bsd-3-clause | 3,183 | 0 | 9 | 836 | 1,153 | 622 | 531 | 94 | 6 |
{-
This module offers a basic parse for the HaCoTeB project. It will create one
single AST node from the entire section containing all the text with no
mark-up and no formatting. Useful for debugging and as a safe fallback.
-}
module HaCoTeB.Parse.BasicParse (basicParse) where
import HaCoTeB.Types
basicParse :: [String] -> Section
basicParse = TextSection . SimpleContent . unwords
| mihaimaruseac/HaCoTeB | src/HaCoTeB/Parse/BasicParse.hs | bsd-3-clause | 388 | 0 | 6 | 63 | 43 | 26 | 17 | 4 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE PatternGuards #-}
-- | This module contains the data types, operations and
-- serialization functions for representing Fixpoint's
-- implication (i.e. subtyping) and well-formedness
-- constraints in Haskell. The actual constraint
-- solving is done by the `fixpoint.native` which
-- is written in Ocaml.
module Language.Fixpoint.Types.Sorts (
-- * Embedding to Fixpoint Types
Sort (..)
, Sub (..)
, FTycon, TCEmb
, sortFTycon
, intFTyCon, boolFTyCon, realFTyCon, numFTyCon -- TODO: hide these
, intSort, realSort, boolSort, strSort, funcSort
, listFTyCon
, isListTC
, fTyconSymbol, symbolFTycon, fTyconSort
, fApp, fApp', fAppTC
, fObj
, sortSubst
, functionSort
, mkFFunc
) where
import qualified Data.Binary as B
import Data.Generics (Data)
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Data.Hashable
import Data.List (foldl')
import Control.DeepSeq
import Data.Maybe (fromMaybe)
import Language.Fixpoint.Types.Names
import Language.Fixpoint.Types.PrettyPrint
import Language.Fixpoint.Types.Spans
import Language.Fixpoint.Misc
import Text.PrettyPrint.HughesPJ
import qualified Data.HashMap.Strict as M
newtype FTycon = TC LocSymbol deriving (Eq, Ord, Show, Data, Typeable, Generic)
type TCEmb a = M.HashMap a FTycon
intFTyCon, boolFTyCon, realFTyCon, funcFTyCon, numFTyCon, strFTyCon, listFTyCon :: FTycon
intFTyCon = TC $ dummyLoc "int"
boolFTyCon = TC $ dummyLoc "bool"
realFTyCon = TC $ dummyLoc "real"
numFTyCon = TC $ dummyLoc "num"
funcFTyCon = TC $ dummyLoc "function"
strFTyCon = TC $ dummyLoc strConName
listFTyCon = TC $ dummyLoc listConName
isListConName :: LocSymbol -> Bool
isListConName x = c == listConName || c == listLConName --"List"
where
c = val x
isListTC :: FTycon -> Bool
isListTC (TC z) = isListConName z
fTyconSymbol :: FTycon -> Located Symbol
fTyconSymbol (TC s) = s
symbolFTycon :: LocSymbol -> FTycon
symbolFTycon c
| isListConName c
= TC $ fmap (const listConName) c
| otherwise
= TC c
fApp :: Sort -> [Sort] -> Sort
fApp = foldl' FApp
fAppTC :: FTycon -> [Sort] -> Sort
fAppTC = fApp . fTyconSort
fApp' :: Sort -> ListNE Sort
fApp' = go []
where
go acc (FApp t1 t2) = go (t2 : acc) t1
go acc t = t : acc
fObj :: LocSymbol -> Sort
fObj = fTyconSort . TC
sortFTycon :: Sort -> Maybe FTycon
sortFTycon FInt = Just intFTyCon
sortFTycon FReal = Just realFTyCon
sortFTycon FNum = Just numFTyCon
sortFTycon (FTC c) = Just c
sortFTycon _ = Nothing
functionSort :: Sort -> Maybe ([Int], [Sort], Sort)
functionSort s
| null is && null ss
= Nothing
| otherwise
= Just (is, ss, r)
where
(is, ss, r) = go [] [] s
go vs ss (FAbs i t) = go (i:vs) ss t
go vs ss (FFunc s1 s2) = go vs (s1:ss) s2
go vs ss t = (reverse vs, reverse ss, t)
----------------------------------------------------------------------
------------------------------- Sorts --------------------------------
----------------------------------------------------------------------
data Sort = FInt
| FReal
| FNum -- ^ numeric kind for Num tyvars
| FFrac -- ^ numeric kind for Fractional tyvars
| FObj Symbol -- ^ uninterpreted type
| FVar !Int -- ^ fixpoint type variable
| FFunc !Sort !Sort -- ^ function
| FAbs !Int !Sort -- ^ type-abstraction
| FTC FTycon
| FApp Sort Sort -- ^ constructed type
deriving (Eq, Ord, Show, Data, Typeable, Generic)
{-@ FFunc :: Nat -> ListNE Sort -> Sort @-}
mkFFunc :: Int -> [Sort] -> Sort
mkFFunc i ss = go [0..i-1] ss
where
go [] [s] = s
go [] (s:ss) = FFunc s $ go [] ss
go (i:is) ss = FAbs i $ go is ss
go _ _ = error "cannot happen"
-- foldl (flip FAbs) (foldl1 (flip FFunc) ss) [0..i-1]
instance Hashable FTycon where
hashWithSalt i (TC s) = hashWithSalt i s
instance Hashable Sort
newtype Sub = Sub [(Int, Sort)] deriving (Generic)
instance Fixpoint Sort where
toFix = toFixSort
toFixSort :: Sort -> Doc
toFixSort (FVar i) = text "@" <> parens (toFix i)
toFixSort FInt = text "int"
toFixSort FReal = text "real"
toFixSort FFrac = text "frac"
toFixSort (FObj x) = toFix x
toFixSort FNum = text "num"
toFixSort t@(FAbs _ _) = toFixAbsApp t
toFixSort t@(FFunc _ _)= toFixAbsApp t
toFixSort (FTC c) = toFix c
toFixSort t@(FApp _ _) = toFixFApp (fApp' t)
toFixAbsApp t = text "func" <> parens (toFix n <> text ", " <> toFix ts)
where
Just (vs, ss, s) = functionSort t
n = length vs
ts = ss ++ [s]
toFixFApp :: ListNE Sort -> Doc
toFixFApp [t] = toFixSort t
toFixFApp [FTC c, t]
| isListTC c = brackets $ toFixSort t
toFixFApp ts = parens $ intersperse space (toFixSort <$> ts)
instance Fixpoint FTycon where
toFix (TC s) = toFix s
-------------------------------------------------------------------------
-- | Exported Basic Sorts -----------------------------------------------
-------------------------------------------------------------------------
boolSort, intSort, realSort, strSort, funcSort :: Sort
boolSort = fTyconSort boolFTyCon
strSort = fTyconSort strFTyCon
intSort = fTyconSort intFTyCon
realSort = fTyconSort realFTyCon
funcSort = fTyconSort funcFTyCon
fTyconSort :: FTycon -> Sort
fTyconSort c
| c == intFTyCon = FInt
| c == realFTyCon = FReal
| c == numFTyCon = FNum
| otherwise = FTC c
------------------------------------------------------------------------
sortSubst :: M.HashMap Symbol Sort -> Sort -> Sort
------------------------------------------------------------------------
sortSubst θ t@(FObj x) = fromMaybe t (M.lookup x θ)
sortSubst θ (FFunc t1 t2) = FFunc (sortSubst θ t1) (sortSubst θ t2)
sortSubst θ (FApp t1 t2) = FApp (sortSubst θ t1) (sortSubst θ t2)
sortSubst θ (FAbs i t) = FAbs i (sortSubst θ t)
sortSubst _ t = t
instance B.Binary FTycon
instance B.Binary Sort
instance B.Binary Sub
instance NFData FTycon
instance NFData Sort
instance NFData Sub
instance Monoid Sort where
mempty = FObj "any"
mappend t1 t2
| t1 == mempty = t2
| t2 == mempty = t1
| t1 == t2 = t1
| otherwise = errorstar $ "mappend-sort: conflicting sorts t1 =" ++ show t1 ++ " t2 = " ++ show t2
| rolph-recto/liquid-fixpoint | src/Language/Fixpoint/Types/Sorts.hs | bsd-3-clause | 7,288 | 0 | 10 | 1,958 | 2,039 | 1,078 | 961 | 178 | 4 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | This widget listens on DBus for Log events from XMonad and
-- displays the formatted status string. To log to this widget using
-- the excellent dbus-core library, use code like the following:
--
-- > import DBus.Client.Simple
-- > main = do
-- > session <- connectSession
-- > emit session "/org/xmonad/Log" "org.xmonad.Log" "Update" [toVariant "msg"]
--
-- There is a more complete example of xmonad integration in the
-- top-level module.
module System.Taffybar.XMonadLog {-# DEPRECATED "Use TaffyPager instead. This module will be removed." #-} (
-- * Constructor
xmonadLogNew,
-- * Log hooks for xmonad.hs
dbusLog,
dbusLogWithPP,
-- * Styles
taffybarPP,
taffybarDefaultPP,
taffybarColor,
taffybarEscape
) where
import Codec.Binary.UTF8.String ( decodeString )
import DBus ( toVariant, fromVariant, Signal(..), signal )
import DBus.Client ( listen, matchAny, MatchRule(..), connectSession, emit, Client )
import Graphics.UI.Gtk hiding ( Signal )
import XMonad
import XMonad.Hooks.DynamicLog
-- | This is a DBus-based logger that can be used from XMonad to log
-- to this widget. This version lets you specify the format for the
-- log using a pretty printer (e.g., 'taffybarPP').
dbusLogWithPP :: Client -> PP -> X ()
dbusLogWithPP client pp = dynamicLogWithPP pp { ppOutput = outputThroughDBus client }
-- | A DBus-based logger with a default pretty-print configuration
dbusLog :: Client -> X ()
dbusLog client = dbusLogWithPP client taffybarDefaultPP
taffybarColor :: String -> String -> String -> String
taffybarColor fg bg = wrap t "</span>" . taffybarEscape
where
t = concat ["<span fgcolor=\"", fg, if null bg then "" else "\" bgcolor=\"" ++ bg , "\">"]
-- | Escape strings so that they can be safely displayed by Pango in
-- the bar widget
taffybarEscape :: String -> String
taffybarEscape = escapeMarkup
-- | The same as the default PP in XMonad.Hooks.DynamicLog
taffybarDefaultPP :: PP
taffybarDefaultPP =
#if MIN_VERSION_xmonad_contrib(0, 12, 0)
def {
#else
defaultPP {
#endif
ppCurrent = taffybarEscape . wrap "[" "]"
, ppVisible = taffybarEscape . wrap "<" ">"
, ppHidden = taffybarEscape
, ppHiddenNoWindows = taffybarEscape
, ppUrgent = taffybarEscape
, ppTitle = taffybarEscape . shorten 80
, ppLayout = taffybarEscape
}
-- | The same as xmobarPP in XMonad.Hooks.DynamicLog
taffybarPP :: PP
taffybarPP = taffybarDefaultPP { ppCurrent = taffybarColor "yellow" "" . wrap "[" "]"
, ppTitle = taffybarColor "green" "" . shorten 40
, ppVisible = wrap "(" ")"
, ppUrgent = taffybarColor "red" "yellow"
}
outputThroughDBus :: Client -> String -> IO ()
outputThroughDBus client str = do
-- The string that we get from XMonad here isn't quite a normal
-- string - each character is actually a byte in a utf8 encoding.
-- We need to decode the string back into a real String before we
-- send it over dbus.
let str' = decodeString str
emit client (signal "/org/xmonad/Log" "org.xmonad.Log" "Update") { signalBody = [ toVariant str' ] }
setupDbus :: Label -> IO ()
setupDbus w = do
let matcher = matchAny { matchSender = Nothing
, matchDestination = Nothing
, matchPath = Just "/org/xmonad/Log"
, matchInterface = Just "org.xmonad.Log"
, matchMember = Just "Update"
}
client <- connectSession
listen client matcher (callback w)
callback :: Label -> Signal -> IO ()
callback w sig = do
let [bdy] = signalBody sig
status :: String
Just status = fromVariant bdy
postGUIAsync $ labelSetMarkup w status
-- | Return a new XMonad log widget
xmonadLogNew :: IO Widget
xmonadLogNew = do
l <- labelNew (Nothing :: Maybe String)
_ <- on l realize $ setupDbus l
widgetShowAll l
return (toWidget l)
{-# DEPRECATED xmonadLogNew "Use taffyPagerNew instead." #-}
| Undeterminant/taffybar | src/System/Taffybar/XMonadLog.hs | bsd-3-clause | 4,193 | 0 | 12 | 1,042 | 764 | 424 | 340 | 67 | 2 |
{-# LANGUAGE ExistentialQuantification #-}
{-# language DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module JSONTest where
import Control.Monad.IO.Class (MonadIO)
import Data.Aeson hiding (Key)
import qualified Data.Vector as V (fromList)
import Test.HUnit (assertBool)
import Test.Hspec.Expectations ()
import Database.Persist
import Database.Persist.Postgresql.JSON
import PgInit
share [mkPersist persistSettings, mkMigrate "jsonTestMigrate"] [persistLowerCase|
TestValue
json Value
deriving Show
|]
cleanDB :: (BaseBackend backend ~ SqlBackend, PersistQueryWrite backend, MonadIO m)
=> ReaderT backend m ()
cleanDB = deleteWhere ([] :: [Filter TestValue])
emptyArr :: Value
emptyArr = toJSON ([] :: [Value])
insert' :: (MonadIO m, PersistStoreWrite backend, BaseBackend backend ~ SqlBackend)
=> Value -> ReaderT backend m (Key TestValue)
insert' = insert . TestValue
matchKeys :: (Show record, Show (Key record), MonadIO m, Eq (Key record))
=> [Key record] -> [Entity record] -> m ()
matchKeys ys xs = do
msg1 `assertBoolIO` (xLen == yLen)
forM_ ys $ \y -> msg2 y `assertBoolIO` (y `elem` ks)
where ks = entityKey <$> xs
xLen = length xs
yLen = length ys
msg1 = mconcat
[ "\nexpected: ", show yLen
, "\n but got: ", show xLen
, "\n[xs: ", show xs, "]"
, "\n[ys: ", show ys, "]"
]
msg2 y = mconcat
[ "key \"", show y
, "\" not in result:\n ", show ks
]
setup :: IO TestKeys
setup = asIO $ runConn_ $ do
void $ runMigrationSilent jsonTestMigrate
testKeys
teardown :: IO ()
teardown = asIO $ runConn_ $ do
cleanDB
shouldBeIO :: (Show a, Eq a, MonadIO m) => a -> a -> m ()
shouldBeIO x y = liftIO $ shouldBe x y
assertBoolIO :: MonadIO m => String -> Bool -> m ()
assertBoolIO s b = liftIO $ assertBool s b
testKeys :: (Monad m, MonadIO m) => ReaderT SqlBackend m TestKeys
testKeys = do
nullK <- insert' Null
boolTK <- insert' $ Bool True
boolFK <- insert' $ toJSON False
num0K <- insert' $ Number 0
num1K <- insert' $ Number 1
numBigK <- insert' $ toJSON (1234567890 :: Int)
numFloatK <- insert' $ Number 0.0
numSmallK <- insert' $ Number 0.0000000000000000123
numFloat2K <- insert' $ Number 1.5
-- numBigFloatK will turn into 9876543210.123457 because JSON
numBigFloatK <- insert' $ toJSON (9876543210.123456789 :: Double)
strNullK <- insert' $ String ""
strObjK <- insert' $ String "{}"
strArrK <- insert' $ String "[]"
strAK <- insert' $ String "a"
strTestK <- insert' $ toJSON ("testing" :: Text)
str2K <- insert' $ String "2"
strFloatK <- insert' $ String "0.45876"
arrNullK <- insert' $ Array $ V.fromList []
arrListK <- insert' $ toJSON [emptyArr,emptyArr,toJSON [emptyArr,emptyArr]]
arrList2K <- insert' $ toJSON [emptyArr,toJSON [Number 3,Bool False]
,toJSON [emptyArr,toJSON [Object mempty]]
]
arrFilledK <- insert' $ toJSON [Null, Number 4, String "b"
,Object mempty, emptyArr
,object [ "test" .= [Null], "test2" .= String "yes"]
]
arrList3K <- insert' $ toJSON [toJSON [String "a"], Number 1]
arrList4K <- insert' $ toJSON [String "a", String "b", String "c", String "d"]
objNullK <- insert' $ Object mempty
objTestK <- insert' $ object ["test" .= Null, "test1" .= String "no"]
objDeepK <- insert' $ object ["c" .= Number 24.986, "foo" .= object ["deep1" .= Bool True]]
objEmptyK <- insert' $ object ["" .= Number 9001]
objFullK <- insert' $ object ["a" .= Number 1, "b" .= Number 2
,"c" .= Number 3, "d" .= Number 4
]
return TestKeys{..}
data TestKeys =
TestKeys { nullK :: Key TestValue
, boolTK :: Key TestValue
, boolFK :: Key TestValue
, num0K :: Key TestValue
, num1K :: Key TestValue
, numBigK :: Key TestValue
, numFloatK :: Key TestValue
, numSmallK :: Key TestValue
, numFloat2K :: Key TestValue
, numBigFloatK :: Key TestValue
, strNullK :: Key TestValue
, strObjK :: Key TestValue
, strArrK :: Key TestValue
, strAK :: Key TestValue
, strTestK :: Key TestValue
, str2K :: Key TestValue
, strFloatK :: Key TestValue
, arrNullK :: Key TestValue
, arrListK :: Key TestValue
, arrList2K :: Key TestValue
, arrFilledK :: Key TestValue
, objNullK :: Key TestValue
, objTestK :: Key TestValue
, objDeepK :: Key TestValue
, arrList3K :: Key TestValue
, arrList4K :: Key TestValue
, objEmptyK :: Key TestValue
, objFullK :: Key TestValue
} deriving (Eq, Ord, Show)
specs :: Spec
specs = afterAll_ teardown $ do
beforeAll setup $ do
describe "Testing JSON operators" $ do
describe "@>. object queries" $ do
it "matches an empty Object with any object" $
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. Object mempty] []
[objNullK, objTestK, objDeepK, objEmptyK, objFullK] `matchKeys` vals
it "matches a subset of object properties" $
-- {test: null, test1: no} @>. {test: null} == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. object ["test" .= Null]] []
[objTestK] `matchKeys` vals
it "matches a nested object against an empty object at the same key" $
-- {c: 24.986, foo: {deep1: true}} @>. {foo: {}} == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. object ["foo" .= object []]] []
[objDeepK] `matchKeys` vals
it "doesn't match a nested object against a string at the same key" $
-- {c: 24.986, foo: {deep1: true}} @>. {foo: nope} == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. object ["foo" .= String "nope"]] []
[] `matchKeys` vals
it "matches a nested object when the query object is identical" $
-- {c: 24.986, foo: {deep1: true}} @>. {foo: {deep1: true}} == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. (object ["foo" .= object ["deep1" .= True]])] []
[objDeepK] `matchKeys` vals
it "doesn't match a nested object when queried with that exact object" $
-- {c: 24.986, foo: {deep1: true}} @>. {deep1: true} == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. object ["deep1" .= True]] []
[] `matchKeys` vals
describe "@>. array queries" $ do
it "matches an empty Array with any list" $
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. emptyArr] []
[arrNullK, arrListK, arrList2K, arrFilledK, arrList3K, arrList4K] `matchKeys` vals
it "matches list when queried with subset (1 item)" $
-- [null, 4, 'b', {}, [], {test: [null], test2: 'yes'}] @>. [4] == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. toJSON [4 :: Int]] []
[arrFilledK] `matchKeys` vals
it "matches list when queried with subset (2 items)" $
-- [null, 4, 'b', {}, [], {test: [null], test2: 'yes'}] @>. [null,'b'] == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. toJSON [Null, String "b"]] []
[arrFilledK] `matchKeys` vals
it "doesn't match list when queried with intersecting list (1 match, 1 diff)" $
-- [null, 4, 'b', {}, [], {test: [null], test2: 'yes'}] @>. [null,'d'] == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. toJSON [emptyArr, String "d"]] []
[] `matchKeys` vals
it "matches list when queried with same list in different order" $
-- [null, 4, 'b', {}, [], {test: [null], test2: 'yes'}] @>.
-- [[],'b',{test: [null],test2: 'yes'},4,null,{}] == True
\TestKeys {..} -> runConnAssert $ do
let queryList =
toJSON [ emptyArr, String "b"
, object [ "test" .= [Null], "test2" .= String "yes"]
, Number 4, Null, Object mempty ]
vals <- selectList [TestValueJson @>. queryList ] []
[arrFilledK] `matchKeys` vals
it "doesn't match list when queried with same list + 1 item" $
-- [null,4,'b',{},[],{test:[null],test2:'yes'}] @>.
-- [null,4,'b',{},[],{test:[null],test2: 'yes'}, false] == False
\TestKeys {..} -> runConnAssert $ do
let testList =
toJSON [ Null, Number 4, String "b", Object mempty, emptyArr
, object [ "test" .= [Null], "test2" .= String "yes"]
, Bool False ]
vals <- selectList [TestValueJson @>. testList] []
[] `matchKeys` vals
it "matches list when it shares an empty object with the query list" $
-- [null,4,'b',{},[],{test: [null],test2: 'yes'}] @>. [{}] == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. toJSON [Object mempty]] []
[arrFilledK] `matchKeys` vals
it "matches list with nested list, when queried with an empty nested list" $
-- [null,4,'b',{},[],{test:[null],test2:'yes'}] @>. [{test:[]}] == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. toJSON [object ["test" .= emptyArr]]] []
[arrFilledK] `matchKeys` vals
it "doesn't match list with nested list, when queried with a diff. nested list" $
-- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] @>.
-- [{"test1":[null]}] == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. toJSON [object ["test1" .= [Null]]]] []
[] `matchKeys` vals
it "matches many nested lists when queried with empty nested list" $
-- [[],[],[[],[]]] @>. [[]] == True
-- [[],[3,false],[[],[{}]]] @>. [[]] == True
-- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] @>. [[]] == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. toJSON [emptyArr]] []
[arrListK,arrList2K,arrFilledK, arrList3K] `matchKeys` vals
it "matches nested list when queried with a subset of that list" $
-- [[],[3,false],[[],[{}]]] @>. [[3]] == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. toJSON [[3 :: Int]]] []
[arrList2K] `matchKeys` vals
it "doesn't match nested list againts a partial intersection of that list" $
-- [[],[3,false],[[],[{}]]] @>. [[true,3]] == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. toJSON [[Bool True, Number 3]]] []
[] `matchKeys` vals
it "matches list when queried with raw number contained in the list" $
-- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] @>. 4 == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. Number 4] []
[arrFilledK] `matchKeys` vals
it "doesn't match list when queried with raw value not contained in the list" $
-- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] @>. 99 == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. Number 99] []
[] `matchKeys` vals
it "matches list when queried with raw string contained in the list" $
-- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] @>. "b" == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. String "b"] []
[arrFilledK, arrList4K] `matchKeys` vals
it "doesn't match list with empty object when queried with \"{}\" " $
-- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] @>. "{}" == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. String "{}"] []
[strObjK] `matchKeys` vals
it "doesnt match list with nested object when queried with object (not in list)" $
-- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] @>.
-- {"test":[null],"test2":"yes"} == False
\TestKeys {..} -> runConnAssert $ do
let queryObject = object [ "test" .= [Null], "test2" .= String "yes"]
vals <- selectList [TestValueJson @>. queryObject ] []
[] `matchKeys` vals
describe "@>. string queries" $ do
it "matches identical strings" $
-- "testing" @>. "testing" == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. String "testing"] []
[strTestK] `matchKeys` vals
it "doesnt match case insensitive" $
-- "testing" @>. "Testing" == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. String "Testing"] []
[] `matchKeys` vals
it "doesn't match substrings" $
-- "testing" @>. "test" == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. String "test"] []
[] `matchKeys` vals
it "doesn't match strings with object keys" $
-- "testing" @>. {"testing":1} == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. object ["testing" .= Number 1]] []
[] `matchKeys` vals
describe "@>. number queries" $ do
it "matches identical numbers" $
-- 1 @>. 1 == True
-- [1] @>. 1 == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. toJSON (1 :: Int)] []
[num1K, arrList3K] `matchKeys` vals
it "matches numbers when queried with float" $
-- 0 @>. 0.0 == True
-- 0.0 @>. 0.0 == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. toJSON (0.0 :: Double)] []
[num0K,numFloatK] `matchKeys` vals
it "does not match numbers when queried with a substring of that number" $
-- 1234567890 @>. 123456789 == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. toJSON (123456789 :: Int)] []
[] `matchKeys` vals
it "does not match number when queried with different number" $
-- 1234567890 @>. 234567890 == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. toJSON (234567890 :: Int)] []
[] `matchKeys` vals
it "does not match number when queried with string of that number" $
-- 1 @>. "1" == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. String "1"] []
[] `matchKeys` vals
it "does not match number when queried with list of digits" $
-- 1234567890 @>. [1,2,3,4,5,6,7,8,9,0] == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. toJSON ([1,2,3,4,5,6,7,8,9,0] :: [Int])] []
[] `matchKeys` vals
describe "@>. boolean queries" $ do
it "matches identical booleans (True)" $
-- true @>. true == True
-- false @>. true == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. toJSON True] []
[boolTK] `matchKeys` vals
it "matches identical booleans (False)" $
-- false @>. false == True
-- true @>. false == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. Bool False] []
[boolFK] `matchKeys` vals
it "does not match boolean with string of boolean" $
-- true @>. "true" == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. String "true"] []
[] `matchKeys` vals
describe "@>. null queries" $ do
it "matches nulls" $
-- null @>. null == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. Null] []
[nullK,arrFilledK] `matchKeys` vals
it "does not match null with string of null" $
-- null @>. "null" == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson @>. String "null"] []
[] `matchKeys` vals
describe "<@. queries" $ do
it "matches subobject when queried with superobject" $
-- {} <@. {"test":null,"test1":"no","blabla":[]} == True
-- {"test":null,"test1":"no"} <@. {"test":null,"test1":"no","blabla":[]} == True
\TestKeys {..} -> runConnAssert $ do
let queryObject = object ["test" .= Null
, "test1" .= String "no"
, "blabla" .= emptyArr
]
vals <- selectList [TestValueJson <@. queryObject] []
[objNullK,objTestK] `matchKeys` vals
it "matches raw values and sublists when queried with superlist" $
-- [] <@. [null,4,"b",{},[],{"test":[null],"test2":"yes"},false] == True
-- null <@. [null,4,"b",{},[],{"test":[null],"test2":"yes"},false] == True
-- false <@. [null,4,"b",{},[],{"test":[null],"test2":"yes"},false] == True
-- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] <@.
-- [null,4,"b",{},[],{"test":[null],"test2":"yes"},false] == True
\TestKeys {..} -> runConnAssert $ do
let queryList =
toJSON [ Null, Number 4, String "b", Object mempty, emptyArr
, object [ "test" .= [Null], "test2" .= String "yes"]
, Bool False ]
vals <- selectList [TestValueJson <@. queryList ] []
[arrNullK,arrFilledK,boolFK,nullK] `matchKeys` vals
it "matches identical strings" $
-- "a" <@. "a" == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson <@. String "a"] []
[strAK] `matchKeys` vals
it "matches identical big floats" $
-- 9876543210.123457 <@ 9876543210.123457 == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson <@. Number 9876543210.123457] []
[numBigFloatK] `matchKeys` vals
it "doesn't match different big floats" $
-- 9876543210.123457 <@. 9876543210.123456789 == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson <@. Number 9876543210.123456789] []
[] `matchKeys` vals
it "matches nulls" $
-- null <@. null == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson <@. Null] []
[nullK] `matchKeys` vals
describe "?. queries" $ do
it "matches top level keys and not the keys of nested objects" $
-- {"test":null,"test1":"no"} ?. "test" == True
-- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] ?. "test" == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?. "test"] []
[objTestK] `matchKeys` vals
it "doesn't match nested key" $
-- {"c":24.986,"foo":{"deep1":true"}} ?. "deep1" == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?. "deep1"] []
[] `matchKeys` vals
it "matches \"{}\" but not empty object when queried with \"{}\"" $
-- "{}" ?. "{}" == True
-- {} ?. "{}" == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?. "{}"] []
[strObjK] `matchKeys` vals
it "matches raw empty str and empty str key when queried with \"\"" $
---- {} ?. "" == False
---- "" ?. "" == True
---- {"":9001} ?. "" == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?. ""] []
[strNullK,objEmptyK] `matchKeys` vals
it "matches lists containing string value when queried with raw string value" $
-- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] ?. "b" == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?. "b"] []
[arrFilledK,arrList4K,objFullK] `matchKeys` vals
it "matches lists, objects, and raw values correctly when queried with string" $
-- [["a"]] ?. "a" == False
-- "a" ?. "a" == True
-- ["a","b","c","d"] ?. "a" == True
-- {"a":1,"b":2,"c":3,"d":4} ?. "a" == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?. "a"] []
[strAK,arrList4K,objFullK] `matchKeys` vals
it "matches string list but not real list when queried with \"[]\"" $
-- "[]" ?. "[]" == True
-- [] ?. "[]" == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?. "[]"] []
[strArrK] `matchKeys` vals
it "does not match null when queried with string null" $
-- null ?. "null" == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?. "null"] []
[] `matchKeys` vals
it "does not match bool whe nqueried with string bool" $
-- true ?. "true" == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?. "true"] []
[] `matchKeys` vals
describe "?|. queries" $ do
it "matches raw vals, lists, objects, and nested objects" $
-- "a" ?|. ["a","b","c"] == True
-- [["a"],1] ?|. ["a","b","c"] == False
-- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] ?|. ["a","b","c"] == True
-- ["a","b","c","d"] ?|. ["a","b","c"] == True
-- {"a":1,"b":2,"c":3,"d":4} ?|. ["a","b","c"] == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?|. ["a","b","c"]] []
[strAK,arrFilledK,objDeepK,arrList4K,objFullK] `matchKeys` vals
it "matches str object but not object when queried with \"{}\"" $
-- "{}" ?|. ["{}"] == True
-- {} ?|. ["{}"] == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?|. ["{}"]] []
[strObjK] `matchKeys` vals
it "doesn't match superstrings when queried with substring" $
-- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] ?|. ["test"] == False
-- "testing" ?|. ["test"] == False
-- {"test":null,"test1":"no"} ?|. ["test"] == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?|. ["test"]] []
[objTestK] `matchKeys` vals
it "doesn't match nested keys" $
-- {"c":24.986,"foo":{"deep1":true"}} ?|. ["deep1"] == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?|. ["deep1"]] []
[] `matchKeys` vals
it "doesn't match anything when queried with empty list" $
-- ANYTHING ?|. [] == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?|. []] []
[] `matchKeys` vals
it "doesn't match raw, non-string, values when queried with strings" $
-- true ?|. ["true","null","1"] == False
-- null ?|. ["true","null","1"] == False
-- 1 ?|. ["true","null","1"] == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?|. ["true","null","1"]] []
[] `matchKeys` vals
it "matches string array when queried with \"[]\"" $
-- [] ?|. ["[]"] == False
-- "[]" ?|. ["[]"] == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?|. ["[]"]] []
[strArrK] `matchKeys` vals
describe "?&. queries" $ do
it "matches anything when queried with an empty list" $
-- ANYTHING ?&. [] == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?&. []] []
flip matchKeys vals [ nullK
, boolTK, boolFK
, num0K, num1K, numBigK, numFloatK
, numSmallK, numFloat2K, numBigFloatK
, strNullK, strObjK, strArrK, strAK
, strTestK, str2K, strFloatK
, arrNullK, arrListK, arrList2K
, arrFilledK, arrList3K, arrList4K
, objNullK, objTestK, objDeepK
, objEmptyK, objFullK
]
it "matches raw values, lists, and objects when queried with string" $
-- "a" ?&. ["a"] == True
-- [["a"],1] ?&. ["a"] == False
-- ["a","b","c","d"] ?&. ["a"] == True
-- {"a":1,"b":2,"c":3,"d":4} ?&. ["a"] == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?&. ["a"]] []
[strAK,arrList4K,objFullK] `matchKeys` vals
it "matches raw values, lists, and objects when queried with multiple string" $
-- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] ?&. ["b","c"] == False
-- {"c":24.986,"foo":{"deep1":true"}} ?&. ["b","c"] == False
-- ["a","b","c","d"] ?&. ["b","c"] == True
-- {"a":1,"b":2,"c":3,"d":4} ?&. ["b","c"] == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?&. ["b","c"]] []
[arrList4K,objFullK] `matchKeys` vals
it "matches object string when queried with \"{}\"" $
-- {} ?&. ["{}"] == False
-- "{}" ?&. ["{}"] == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?&. ["{}"]] []
[strObjK] `matchKeys` vals
it "doesn't match superstrings when queried with substring" $
-- [null,4,"b",{},[],{"test":[null],"test2":"yes"}] ?&. ["test"] == False
-- "testing" ?&. ["test"] == False
-- {"test":null,"test1":"no"} ?&. ["test"] == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?&. ["test"]] []
[objTestK] `matchKeys` vals
it "doesn't match nested keys" $
-- {"c":24.986,"foo":{"deep1":true"}} ?&. ["deep1"] == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?&. ["deep1"]] []
[] `matchKeys` vals
it "doesn't match anything when there is a partial match" $
-- "a" ?&. ["a","e"] == False
-- ["a","b","c","d"] ?&. ["a","e"] == False
-- {"a":1,"b":2,"c":3,"d":4} ?&. ["a","e"] == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?&. ["a","e"]] []
[] `matchKeys` vals
it "matches string array when queried with \"[]\"" $
-- [] ?&. ["[]"] == False
-- "[]" ?&. ["[]"] == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?&. ["[]"]] []
[strArrK] `matchKeys` vals
it "doesn't match null when queried with string null" $
-- THIS WILL FAIL IF THE IMPLEMENTATION USES
-- @ '{null}' @
-- INSTEAD OF
-- @ ARRAY['null'] @
-- null ?&. ["null"] == False
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?&. ["null"]] []
[] `matchKeys` vals
it "doesn't match number when queried with str of that number" $
-- [["a"],1] ?&. ["1"] == False
-- "1" ?&. ["1"] == True
\TestKeys {..} -> runConnAssert $ do
str1 <- insert' $ toJSON $ String "1"
vals <- selectList [TestValueJson ?&. ["1"]] []
[str1] `matchKeys` vals
it "doesn't match empty objs or list when queried with empty string" $
-- {} ?&. [""] == False
-- [] ?&. [""] == False
-- "" ?&. [""] == True
-- {"":9001} ?&. [""] == True
\TestKeys {..} -> runConnAssert $ do
vals <- selectList [TestValueJson ?&. [""]] []
[strNullK,objEmptyK] `matchKeys` vals
| paul-rouse/persistent | persistent-postgresql/test/JSONTest.hs | mit | 30,939 | 0 | 32 | 10,584 | 6,842 | 3,524 | 3,318 | 448 | 1 |
{-# LANGUAGE TypeApplications #-}
module Lamdu.Sugar.Convert.PostProcess
( Result(..), def, expr
, makeScheme
) where
import qualified Control.Lens as Lens
import Data.Property (MkProperty')
import qualified Data.Property as Property
import Hyper
import Hyper.Syntax.Scheme (saveScheme)
import Hyper.Unify (UVar)
import Hyper.Unify.Generalize (generalize)
import Lamdu.Calc.Infer (runPureInfer)
import qualified Lamdu.Calc.Term as V
import qualified Lamdu.Calc.Type as T
import qualified Lamdu.Data.Definition as Definition
import qualified Lamdu.Debug as Debug
import Lamdu.Expr.IRef (DefI, ValI, HRef)
import qualified Lamdu.Expr.IRef as ExprIRef
import qualified Lamdu.Expr.Load as ExprLoad
import qualified Lamdu.Sugar.Convert.Input as Input
import qualified Lamdu.Sugar.Convert.Load as Load
import Revision.Deltum.Transaction (Transaction)
import qualified Revision.Deltum.Transaction as Transaction
import Lamdu.Prelude
type T = Transaction
data Result = GoodExpr | BadExpr (Pure # T.TypeError)
makeScheme ::
Load.InferOut m ->
Either (Pure # T.TypeError) (Pure # T.Scheme)
makeScheme (Load.InferOut inferredVal inferContext) =
generalize (inferredVal ^. hAnn . Input.inferredTypeUVar)
>>= saveScheme
& runPureInfer @(V.Scope # UVar) V.emptyScope inferContext
<&> (^. _1)
def :: Monad m => Load.InferFunc (HRef m) -> Debug.Monitors -> DefI m -> T m Result
def infer monitors defI =
do
loadedDef <- ExprLoad.def defI <&> void
case loadedDef ^. Definition.defBody of
Definition.BodyBuiltin {} -> pure GoodExpr
Definition.BodyExpr defExpr ->
ExprIRef.globalId defI
& Load.inferDef infer monitors defExpr
>>= makeScheme
&
\case
Left err -> BadExpr err & pure
Right scheme ->
GoodExpr <$
( loadedDef
& Definition.defType .~ scheme
& Definition.defBody . Definition._BodyExpr .
Definition.exprFrozenDeps .~
Definition.pruneDefExprDeps defExpr
& Definition.defBody . Lens.mapped %~
(^. hAnn . ExprIRef.iref)
& Transaction.writeIRef defI
)
expr ::
Monad m =>
Load.InferFunc (HRef m) -> Debug.Monitors ->
MkProperty' (T m) (Definition.Expr (ValI m)) ->
T m Result
expr infer monitors prop =
do
defExprLoaded <- ExprLoad.defExpr prop
-- TODO: This is code duplication with the above Load.inferCheckDef
-- & functions inside Load itself
let inferred = Load.inferDefExpr infer monitors defExprLoaded
case inferred of
Left err -> BadExpr err & pure
Right _ ->
GoodExpr <$
Property.modP prop
(Definition.exprFrozenDeps .~ Definition.pruneDefExprDeps defExprLoaded)
| lamdu/lamdu | src/Lamdu/Sugar/Convert/PostProcess.hs | gpl-3.0 | 3,092 | 0 | 26 | 947 | 760 | 417 | 343 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.ELB.CreateLBCookieStickinessPolicy
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Generates a stickiness policy with sticky session lifetimes controlled by
-- the lifetime of the browser (user-agent) or a specified expiration period.
-- This policy can be associated only with HTTP/HTTPS listeners.
--
-- When a load balancer implements this policy, the load balancer uses a
-- special cookie to track the backend server instance for each request. When
-- the load balancer receives a request, it first checks to see if this cookie
-- is present in the request. If so, the load balancer sends the request to the
-- application server specified in the cookie. If not, the load balancer sends
-- the request to a server that is chosen based on the existing load balancing
-- algorithm.
--
-- A cookie is inserted into the response for binding subsequent requests from
-- the same user to that server. The validity of the cookie is based on the
-- cookie expiration time, which is specified in the policy configuration.
--
-- For more information, see <http://docs.aws.amazon.com/ElasticLoadBalancing/latest/DeveloperGuide/US_StickySessions.html#US_EnableStickySessionsLBCookies Enabling Duration-Based Session Stickiness> in the /Elastic Load Balancing Developer Guide/.
--
-- <http://docs.aws.amazon.com/ElasticLoadBalancing/latest/APIReference/API_CreateLBCookieStickinessPolicy.html>
module Network.AWS.ELB.CreateLBCookieStickinessPolicy
(
-- * Request
CreateLBCookieStickinessPolicy
-- ** Request constructor
, createLBCookieStickinessPolicy
-- ** Request lenses
, clbcspCookieExpirationPeriod
, clbcspLoadBalancerName
, clbcspPolicyName
-- * Response
, CreateLBCookieStickinessPolicyResponse
-- ** Response constructor
, createLBCookieStickinessPolicyResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.ELB.Types
import qualified GHC.Exts
data CreateLBCookieStickinessPolicy = CreateLBCookieStickinessPolicy
{ _clbcspCookieExpirationPeriod :: Maybe Integer
, _clbcspLoadBalancerName :: Text
, _clbcspPolicyName :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'CreateLBCookieStickinessPolicy' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'clbcspCookieExpirationPeriod' @::@ 'Maybe' 'Integer'
--
-- * 'clbcspLoadBalancerName' @::@ 'Text'
--
-- * 'clbcspPolicyName' @::@ 'Text'
--
createLBCookieStickinessPolicy :: Text -- ^ 'clbcspLoadBalancerName'
-> Text -- ^ 'clbcspPolicyName'
-> CreateLBCookieStickinessPolicy
createLBCookieStickinessPolicy p1 p2 = CreateLBCookieStickinessPolicy
{ _clbcspLoadBalancerName = p1
, _clbcspPolicyName = p2
, _clbcspCookieExpirationPeriod = Nothing
}
-- | The time period in seconds after which the cookie should be considered
-- stale. Not specifying this parameter indicates that the sticky session will
-- last for the duration of the browser session.
clbcspCookieExpirationPeriod :: Lens' CreateLBCookieStickinessPolicy (Maybe Integer)
clbcspCookieExpirationPeriod =
lens _clbcspCookieExpirationPeriod
(\s a -> s { _clbcspCookieExpirationPeriod = a })
-- | The name associated with the load balancer.
clbcspLoadBalancerName :: Lens' CreateLBCookieStickinessPolicy Text
clbcspLoadBalancerName =
lens _clbcspLoadBalancerName (\s a -> s { _clbcspLoadBalancerName = a })
-- | The name of the policy being created. The name must be unique within the set
-- of policies for this load balancer.
clbcspPolicyName :: Lens' CreateLBCookieStickinessPolicy Text
clbcspPolicyName = lens _clbcspPolicyName (\s a -> s { _clbcspPolicyName = a })
data CreateLBCookieStickinessPolicyResponse = CreateLBCookieStickinessPolicyResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'CreateLBCookieStickinessPolicyResponse' constructor.
createLBCookieStickinessPolicyResponse :: CreateLBCookieStickinessPolicyResponse
createLBCookieStickinessPolicyResponse = CreateLBCookieStickinessPolicyResponse
instance ToPath CreateLBCookieStickinessPolicy where
toPath = const "/"
instance ToQuery CreateLBCookieStickinessPolicy where
toQuery CreateLBCookieStickinessPolicy{..} = mconcat
[ "CookieExpirationPeriod" =? _clbcspCookieExpirationPeriod
, "LoadBalancerName" =? _clbcspLoadBalancerName
, "PolicyName" =? _clbcspPolicyName
]
instance ToHeaders CreateLBCookieStickinessPolicy
instance AWSRequest CreateLBCookieStickinessPolicy where
type Sv CreateLBCookieStickinessPolicy = ELB
type Rs CreateLBCookieStickinessPolicy = CreateLBCookieStickinessPolicyResponse
request = post "CreateLBCookieStickinessPolicy"
response = nullResponse CreateLBCookieStickinessPolicyResponse
| kim/amazonka | amazonka-elb/gen/Network/AWS/ELB/CreateLBCookieStickinessPolicy.hs | mpl-2.0 | 5,829 | 0 | 9 | 1,096 | 484 | 301 | 183 | 61 | 1 |
-- by Kirill Elagin
largest_number :: [Int] -> String
largest_number as = concat (map show as) -- write your code here
main :: IO ()
main = do
_ <- getLine
as <- fmap (map read . words) getLine
putStrLn $ largest_number as
| xunilrj/sandbox | courses/coursera-sandiego-algorithms/algorithmic-toolbox/assignment002/largest_number/largest_number.hs | apache-2.0 | 232 | 0 | 11 | 52 | 88 | 43 | 45 | 7 | 1 |
{-
This example takes logstash messages from a redis queue and stores them into elasticsearch.
It will print to stdout all errors.
-}
module Main where
import System.Environment (getArgs)
import Control.Monad (when)
import Data.Conduit
import qualified Data.Conduit.List as CL
import Data.Conduit.ElasticSearch
import Data.Conduit.Redis
import Logstash.Message
import Control.Monad.IO.Class (liftIO)
import Data.Aeson
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as BSL
import Data.Maybe (mapMaybe)
import Data.Conduit.Misc
endsink :: (MonadResource m) => Either (LogstashMessage, Value) Value -> m ()
endsink (Left x) = liftIO (print x)
endsink _ = return ()
main :: IO ()
main = do
args <- getArgs
when (length args /= 5) (error "Usage: redis2es redishost redisport redislist eshost esport")
let [redishost, redisport, redislist, eshost, esport] = args
runResourceT $ redisSource redishost (read redisport) (BS.pack redislist) 100 1
$= concatFlush 100 -- convert to a flush conduit
$= mapFlushMaybe (decode . BSL.fromStrict) -- decode the json messages
$= groupFlush -- regroup lists
$= esConduit Nothing (BS.pack eshost) (read esport) "logstash" -- send to ES
$$ CL.mapM_ (mapM_ endsink)
| bartavelle/hslogstash | examples/RedisToElasticsearch.hs | bsd-3-clause | 1,362 | 0 | 16 | 297 | 354 | 196 | 158 | 28 | 1 |
module Types ( Point, Line, IPoint, ILine ) where
type Point = (Double,Double)
type Line = (Point,Point)
type IPoint = (Int,Point)
type ILine = (IPoint,IPoint)
| mainland/dph | icebox/examples/delaunay/Types.hs | bsd-3-clause | 165 | 0 | 5 | 30 | 66 | 44 | 22 | 5 | 0 |
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[StgSyn]{Shared term graph (STG) syntax for spineless-tagless code generation}
This data type represents programs just before code generation (conversion to
@Cmm@): basically, what we have is a stylised form of @CoreSyntax@, the style
being one that happens to be ideally suited to spineless tagless code
generation.
-}
{-# LANGUAGE CPP #-}
module StgSyn (
GenStgArg(..),
GenStgTopBinding(..), GenStgBinding(..), GenStgExpr(..), GenStgRhs(..),
GenStgAlt, AltType(..),
UpdateFlag(..), isUpdatable,
StgBinderInfo,
noBinderInfo, stgSatOcc, stgUnsatOcc, satCallsOnly,
combineStgBinderInfo,
-- a set of synonyms for the most common (only :-) parameterisation
StgArg,
StgTopBinding, StgBinding, StgExpr, StgRhs, StgAlt,
-- a set of synonyms to distinguish in- and out variants
InStgArg, InStgTopBinding, InStgBinding, InStgExpr, InStgRhs, InStgAlt,
OutStgArg, OutStgTopBinding, OutStgBinding, OutStgExpr, OutStgRhs, OutStgAlt,
-- StgOp
StgOp(..),
-- utils
topStgBindHasCafRefs, stgArgHasCafRefs, stgRhsArity,
isDllConApp,
stgArgType,
stripStgTicksTop,
pprStgBinding, pprStgTopBindings
) where
#include "HsVersions.h"
import GhcPrelude
import CoreSyn ( AltCon, Tickish )
import CostCentre ( CostCentreStack )
import Data.ByteString ( ByteString )
import Data.List ( intersperse )
import DataCon
import DynFlags
import FastString
import ForeignCall ( ForeignCall )
import Id
import IdInfo ( mayHaveCafRefs )
import Literal ( Literal, literalType )
import Module ( Module )
import Outputable
import Packages ( isDllName )
import Platform
import PprCore ( {- instances -} )
import PrimOp ( PrimOp, PrimCall )
import TyCon ( PrimRep(..), TyCon )
import Type ( Type )
import RepType ( typePrimRep1 )
import Unique ( Unique )
import Util
{-
************************************************************************
* *
\subsection{@GenStgBinding@}
* *
************************************************************************
As usual, expressions are interesting; other things are boring. Here
are the boring things [except note the @GenStgRhs@], parameterised
with respect to binder and occurrence information (just as in
@CoreSyn@):
-}
-- | A top-level binding.
data GenStgTopBinding bndr occ
-- See Note [CoreSyn top-level string literals]
= StgTopLifted (GenStgBinding bndr occ)
| StgTopStringLit bndr ByteString
data GenStgBinding bndr occ
= StgNonRec bndr (GenStgRhs bndr occ)
| StgRec [(bndr, GenStgRhs bndr occ)]
{-
************************************************************************
* *
\subsection{@GenStgArg@}
* *
************************************************************************
-}
data GenStgArg occ
= StgVarArg occ
| StgLitArg Literal
-- | Does this constructor application refer to
-- anything in a different *Windows* DLL?
-- If so, we can't allocate it statically
isDllConApp :: DynFlags -> Module -> DataCon -> [StgArg] -> Bool
isDllConApp dflags this_mod con args
| platformOS (targetPlatform dflags) == OSMinGW32
= isDllName dflags this_mod (dataConName con) || any is_dll_arg args
| otherwise = False
where
-- NB: typePrimRep1 is legit because any free variables won't have
-- unlifted type (there are no unlifted things at top level)
is_dll_arg :: StgArg -> Bool
is_dll_arg (StgVarArg v) = isAddrRep (typePrimRep1 (idType v))
&& isDllName dflags this_mod (idName v)
is_dll_arg _ = False
-- True of machine addresses; these are the things that don't
-- work across DLLs. The key point here is that VoidRep comes
-- out False, so that a top level nullary GADT constructor is
-- False for isDllConApp
-- data T a where
-- T1 :: T Int
-- gives
-- T1 :: forall a. (a~Int) -> T a
-- and hence the top-level binding
-- $WT1 :: T Int
-- $WT1 = T1 Int (Coercion (Refl Int))
-- The coercion argument here gets VoidRep
isAddrRep :: PrimRep -> Bool
isAddrRep AddrRep = True
isAddrRep LiftedRep = True
isAddrRep UnliftedRep = True
isAddrRep _ = False
-- | Type of an @StgArg@
--
-- Very half baked because we have lost the type arguments.
stgArgType :: StgArg -> Type
stgArgType (StgVarArg v) = idType v
stgArgType (StgLitArg lit) = literalType lit
-- | Strip ticks of a given type from an STG expression
stripStgTicksTop :: (Tickish Id -> Bool) -> StgExpr -> ([Tickish Id], StgExpr)
stripStgTicksTop p = go []
where go ts (StgTick t e) | p t = go (t:ts) e
go ts other = (reverse ts, other)
{-
************************************************************************
* *
\subsection{STG expressions}
* *
************************************************************************
The @GenStgExpr@ data type is parameterised on binder and occurrence
info, as before.
************************************************************************
* *
\subsubsection{@GenStgExpr@ application}
* *
************************************************************************
An application is of a function to a list of atoms [not expressions].
Operationally, we want to push the arguments on the stack and call the
function. (If the arguments were expressions, we would have to build
their closures first.)
There is no constructor for a lone variable; it would appear as
@StgApp var []@.
-}
data GenStgExpr bndr occ
= StgApp
occ -- function
[GenStgArg occ] -- arguments; may be empty
{-
************************************************************************
* *
\subsubsection{@StgConApp@ and @StgPrimApp@---saturated applications}
* *
************************************************************************
There are specialised forms of application, for constructors,
primitives, and literals.
-}
| StgLit Literal
-- StgConApp is vital for returning unboxed tuples or sums
-- which can't be let-bound first
| StgConApp DataCon
[GenStgArg occ] -- Saturated
[Type] -- See Note [Types in StgConApp] in UnariseStg
| StgOpApp StgOp -- Primitive op or foreign call
[GenStgArg occ] -- Saturated.
Type -- Result type
-- We need to know this so that we can
-- assign result registers
{-
************************************************************************
* *
\subsubsection{@StgLam@}
* *
************************************************************************
StgLam is used *only* during CoreToStg's work. Before CoreToStg has
finished it encodes (\x -> e) as (let f = \x -> e in f)
-}
| StgLam
[bndr]
StgExpr -- Body of lambda
{-
************************************************************************
* *
\subsubsection{@GenStgExpr@: case-expressions}
* *
************************************************************************
This has the same boxed/unboxed business as Core case expressions.
-}
| StgCase
(GenStgExpr bndr occ)
-- the thing to examine
bndr -- binds the result of evaluating the scrutinee
AltType
[GenStgAlt bndr occ]
-- The DEFAULT case is always *first*
-- if it is there at all
{-
************************************************************************
* *
\subsubsection{@GenStgExpr@: @let(rec)@-expressions}
* *
************************************************************************
The various forms of let(rec)-expression encode most of the
interesting things we want to do.
\begin{enumerate}
\item
\begin{verbatim}
let-closure x = [free-vars] [args] expr
in e
\end{verbatim}
is equivalent to
\begin{verbatim}
let x = (\free-vars -> \args -> expr) free-vars
\end{verbatim}
\tr{args} may be empty (and is for most closures). It isn't under
circumstances like this:
\begin{verbatim}
let x = (\y -> y+z)
\end{verbatim}
This gets mangled to
\begin{verbatim}
let-closure x = [z] [y] (y+z)
\end{verbatim}
The idea is that we compile code for @(y+z)@ in an environment in which
@z@ is bound to an offset from \tr{Node}, and @y@ is bound to an
offset from the stack pointer.
(A let-closure is an @StgLet@ with a @StgRhsClosure@ RHS.)
\item
\begin{verbatim}
let-constructor x = Constructor [args]
in e
\end{verbatim}
(A let-constructor is an @StgLet@ with a @StgRhsCon@ RHS.)
\item
Letrec-expressions are essentially the same deal as
let-closure/let-constructor, so we use a common structure and
distinguish between them with an @is_recursive@ boolean flag.
\item
\begin{verbatim}
let-unboxed u = an arbitrary arithmetic expression in unboxed values
in e
\end{verbatim}
All the stuff on the RHS must be fully evaluated.
No function calls either!
(We've backed away from this toward case-expressions with
suitably-magical alts ...)
\item
~[Advanced stuff here! Not to start with, but makes pattern matching
generate more efficient code.]
\begin{verbatim}
let-escapes-not fail = expr
in e'
\end{verbatim}
Here the idea is that @e'@ guarantees not to put @fail@ in a data structure,
or pass it to another function. All @e'@ will ever do is tail-call @fail@.
Rather than build a closure for @fail@, all we need do is to record the stack
level at the moment of the @let-escapes-not@; then entering @fail@ is just
a matter of adjusting the stack pointer back down to that point and entering
the code for it.
Another example:
\begin{verbatim}
f x y = let z = huge-expression in
if y==1 then z else
if y==2 then z else
1
\end{verbatim}
(A let-escapes-not is an @StgLetNoEscape@.)
\item
We may eventually want:
\begin{verbatim}
let-literal x = Literal
in e
\end{verbatim}
\end{enumerate}
And so the code for let(rec)-things:
-}
| StgLet
(GenStgBinding bndr occ) -- right hand sides (see below)
(GenStgExpr bndr occ) -- body
| StgLetNoEscape
(GenStgBinding bndr occ) -- right hand sides (see below)
(GenStgExpr bndr occ) -- body
{-
%************************************************************************
%* *
\subsubsection{@GenStgExpr@: @hpc@, @scc@ and other debug annotations}
%* *
%************************************************************************
Finally for @hpc@ expressions we introduce a new STG construct.
-}
| StgTick
(Tickish bndr)
(GenStgExpr bndr occ) -- sub expression
-- END of GenStgExpr
{-
************************************************************************
* *
\subsection{STG right-hand sides}
* *
************************************************************************
Here's the rest of the interesting stuff for @StgLet@s; the first
flavour is for closures:
-}
data GenStgRhs bndr occ
= StgRhsClosure
CostCentreStack -- CCS to be attached (default is CurrentCCS)
StgBinderInfo -- Info about how this binder is used (see below)
[occ] -- non-global free vars; a list, rather than
-- a set, because order is important
!UpdateFlag -- ReEntrant | Updatable | SingleEntry
[bndr] -- arguments; if empty, then not a function;
-- as above, order is important.
(GenStgExpr bndr occ) -- body
{-
An example may be in order. Consider:
\begin{verbatim}
let t = \x -> \y -> ... x ... y ... p ... q in e
\end{verbatim}
Pulling out the free vars and stylising somewhat, we get the equivalent:
\begin{verbatim}
let t = (\[p,q] -> \[x,y] -> ... x ... y ... p ...q) p q
\end{verbatim}
Stg-operationally, the @[x,y]@ are on the stack, the @[p,q]@ are
offsets from @Node@ into the closure, and the code ptr for the closure
will be exactly that in parentheses above.
The second flavour of right-hand-side is for constructors (simple but important):
-}
| StgRhsCon
CostCentreStack -- CCS to be attached (default is CurrentCCS).
-- Top-level (static) ones will end up with
-- DontCareCCS, because we don't count static
-- data in heap profiles, and we don't set CCCS
-- from static closure.
DataCon -- Constructor. Never an unboxed tuple or sum, as those
-- are not allocated.
[GenStgArg occ] -- Args
stgRhsArity :: StgRhs -> Int
stgRhsArity (StgRhsClosure _ _ _ _ bndrs _)
= ASSERT( all isId bndrs ) length bndrs
-- The arity never includes type parameters, but they should have gone by now
stgRhsArity (StgRhsCon _ _ _) = 0
-- Note [CAF consistency]
-- ~~~~~~~~~~~~~~~~~~~~~~
--
-- `topStgBindHasCafRefs` is only used by an assert (`consistentCafInfo` in
-- `CoreToStg`) to make sure CAF-ness predicted by `TidyPgm` is consistent with
-- reality.
--
-- Specifically, if the RHS mentions any Id that itself is marked
-- `MayHaveCafRefs`; or if the binding is a top-level updateable thunk; then the
-- `Id` for the binding should be marked `MayHaveCafRefs`. The potential trouble
-- is that `TidyPgm` computed the CAF info on the `Id` but some transformations
-- have taken place since then.
topStgBindHasCafRefs :: GenStgTopBinding bndr Id -> Bool
topStgBindHasCafRefs (StgTopLifted (StgNonRec _ rhs))
= topRhsHasCafRefs rhs
topStgBindHasCafRefs (StgTopLifted (StgRec binds))
= any topRhsHasCafRefs (map snd binds)
topStgBindHasCafRefs StgTopStringLit{}
= False
topRhsHasCafRefs :: GenStgRhs bndr Id -> Bool
topRhsHasCafRefs (StgRhsClosure _ _ _ upd _ body)
= -- See Note [CAF consistency]
isUpdatable upd || exprHasCafRefs body
topRhsHasCafRefs (StgRhsCon _ _ args)
= any stgArgHasCafRefs args
exprHasCafRefs :: GenStgExpr bndr Id -> Bool
exprHasCafRefs (StgApp f args)
= stgIdHasCafRefs f || any stgArgHasCafRefs args
exprHasCafRefs StgLit{}
= False
exprHasCafRefs (StgConApp _ args _)
= any stgArgHasCafRefs args
exprHasCafRefs (StgOpApp _ args _)
= any stgArgHasCafRefs args
exprHasCafRefs (StgLam _ body)
= exprHasCafRefs body
exprHasCafRefs (StgCase scrt _ _ alts)
= exprHasCafRefs scrt || any altHasCafRefs alts
exprHasCafRefs (StgLet bind body)
= bindHasCafRefs bind || exprHasCafRefs body
exprHasCafRefs (StgLetNoEscape bind body)
= bindHasCafRefs bind || exprHasCafRefs body
exprHasCafRefs (StgTick _ expr)
= exprHasCafRefs expr
bindHasCafRefs :: GenStgBinding bndr Id -> Bool
bindHasCafRefs (StgNonRec _ rhs)
= rhsHasCafRefs rhs
bindHasCafRefs (StgRec binds)
= any rhsHasCafRefs (map snd binds)
rhsHasCafRefs :: GenStgRhs bndr Id -> Bool
rhsHasCafRefs (StgRhsClosure _ _ _ _ _ body)
= exprHasCafRefs body
rhsHasCafRefs (StgRhsCon _ _ args)
= any stgArgHasCafRefs args
altHasCafRefs :: GenStgAlt bndr Id -> Bool
altHasCafRefs (_, _, rhs) = exprHasCafRefs rhs
stgArgHasCafRefs :: GenStgArg Id -> Bool
stgArgHasCafRefs (StgVarArg id)
= stgIdHasCafRefs id
stgArgHasCafRefs _
= False
stgIdHasCafRefs :: Id -> Bool
stgIdHasCafRefs id =
-- We are looking for occurrences of an Id that is bound at top level, and may
-- have CAF refs. At this point (after TidyPgm) top-level Ids (whether
-- imported or defined in this module) are GlobalIds, so the test is easy.
isGlobalId id && mayHaveCafRefs (idCafInfo id)
-- Here's the @StgBinderInfo@ type, and its combining op:
data StgBinderInfo
= NoStgBinderInfo
| SatCallsOnly -- All occurrences are *saturated* *function* calls
-- This means we don't need to build an info table and
-- slow entry code for the thing
-- Thunks never get this value
noBinderInfo, stgUnsatOcc, stgSatOcc :: StgBinderInfo
noBinderInfo = NoStgBinderInfo
stgUnsatOcc = NoStgBinderInfo
stgSatOcc = SatCallsOnly
satCallsOnly :: StgBinderInfo -> Bool
satCallsOnly SatCallsOnly = True
satCallsOnly NoStgBinderInfo = False
combineStgBinderInfo :: StgBinderInfo -> StgBinderInfo -> StgBinderInfo
combineStgBinderInfo SatCallsOnly SatCallsOnly = SatCallsOnly
combineStgBinderInfo _ _ = NoStgBinderInfo
--------------
pp_binder_info :: StgBinderInfo -> SDoc
pp_binder_info NoStgBinderInfo = empty
pp_binder_info SatCallsOnly = text "sat-only"
{-
************************************************************************
* *
\subsection[Stg-case-alternatives]{STG case alternatives}
* *
************************************************************************
Very like in @CoreSyntax@ (except no type-world stuff).
The type constructor is guaranteed not to be abstract; that is, we can
see its representation. This is important because the code generator
uses it to determine return conventions etc. But it's not trivial
where there's a module loop involved, because some versions of a type
constructor might not have all the constructors visible. So
mkStgAlgAlts (in CoreToStg) ensures that it gets the TyCon from the
constructors or literals (which are guaranteed to have the Real McCoy)
rather than from the scrutinee type.
-}
type GenStgAlt bndr occ
= (AltCon, -- alts: data constructor,
[bndr], -- constructor's parameters,
GenStgExpr bndr occ) -- ...right-hand side.
data AltType
= PolyAlt -- Polymorphic (a lifted type variable)
| MultiValAlt Int -- Multi value of this arity (unboxed tuple or sum)
-- the arity could indeed be 1 for unary unboxed tuple
| AlgAlt TyCon -- Algebraic data type; the AltCons will be DataAlts
| PrimAlt PrimRep -- Primitive data type; the AltCons (if any) will be LitAlts
{-
************************************************************************
* *
\subsection[Stg]{The Plain STG parameterisation}
* *
************************************************************************
This happens to be the only one we use at the moment.
-}
type StgTopBinding = GenStgTopBinding Id Id
type StgBinding = GenStgBinding Id Id
type StgArg = GenStgArg Id
type StgExpr = GenStgExpr Id Id
type StgRhs = GenStgRhs Id Id
type StgAlt = GenStgAlt Id Id
{- Many passes apply a substitution, and it's very handy to have type
synonyms to remind us whether or not the substitution has been applied.
See CoreSyn for precedence in Core land
-}
type InStgTopBinding = StgTopBinding
type InStgBinding = StgBinding
type InStgArg = StgArg
type InStgExpr = StgExpr
type InStgRhs = StgRhs
type InStgAlt = StgAlt
type OutStgTopBinding = StgTopBinding
type OutStgBinding = StgBinding
type OutStgArg = StgArg
type OutStgExpr = StgExpr
type OutStgRhs = StgRhs
type OutStgAlt = StgAlt
{-
************************************************************************
* *
\subsubsection[UpdateFlag-datatype]{@UpdateFlag@}
* *
************************************************************************
This is also used in @LambdaFormInfo@ in the @ClosureInfo@ module.
A @ReEntrant@ closure may be entered multiple times, but should not be
updated or blackholed. An @Updatable@ closure should be updated after
evaluation (and may be blackholed during evaluation). A @SingleEntry@
closure will only be entered once, and so need not be updated but may
safely be blackholed.
-}
data UpdateFlag = ReEntrant | Updatable | SingleEntry
instance Outputable UpdateFlag where
ppr u = char $ case u of
ReEntrant -> 'r'
Updatable -> 'u'
SingleEntry -> 's'
isUpdatable :: UpdateFlag -> Bool
isUpdatable ReEntrant = False
isUpdatable SingleEntry = False
isUpdatable Updatable = True
{-
************************************************************************
* *
\subsubsection{StgOp}
* *
************************************************************************
An StgOp allows us to group together PrimOps and ForeignCalls.
It's quite useful to move these around together, notably
in StgOpApp and COpStmt.
-}
data StgOp
= StgPrimOp PrimOp
| StgPrimCallOp PrimCall
| StgFCallOp ForeignCall Unique
-- The Unique is occasionally needed by the C pretty-printer
-- (which lacks a unique supply), notably when generating a
-- typedef for foreign-export-dynamic
{-
************************************************************************
* *
\subsection[Stg-pretty-printing]{Pretty-printing}
* *
************************************************************************
Robin Popplestone asked for semi-colon separators on STG binds; here's
hoping he likes terminators instead... Ditto for case alternatives.
-}
pprGenStgTopBinding :: (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> GenStgTopBinding bndr bdee -> SDoc
pprGenStgTopBinding (StgTopStringLit bndr str)
= hang (hsep [pprBndr LetBind bndr, equals])
4 (pprHsBytes str <> semi)
pprGenStgTopBinding (StgTopLifted bind)
= pprGenStgBinding bind
pprGenStgBinding :: (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> GenStgBinding bndr bdee -> SDoc
pprGenStgBinding (StgNonRec bndr rhs)
= hang (hsep [pprBndr LetBind bndr, equals])
4 (ppr rhs <> semi)
pprGenStgBinding (StgRec pairs)
= vcat $ whenPprDebug (text "{- StgRec (begin) -}") :
map (ppr_bind) pairs ++ [whenPprDebug (text "{- StgRec (end) -}")]
where
ppr_bind (bndr, expr)
= hang (hsep [pprBndr LetBind bndr, equals])
4 (ppr expr <> semi)
pprStgBinding :: StgBinding -> SDoc
pprStgBinding bind = pprGenStgBinding bind
pprStgTopBindings :: [StgTopBinding] -> SDoc
pprStgTopBindings binds
= vcat $ intersperse blankLine (map pprGenStgTopBinding binds)
instance (Outputable bdee) => Outputable (GenStgArg bdee) where
ppr = pprStgArg
instance (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> Outputable (GenStgTopBinding bndr bdee) where
ppr = pprGenStgTopBinding
instance (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> Outputable (GenStgBinding bndr bdee) where
ppr = pprGenStgBinding
instance (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> Outputable (GenStgExpr bndr bdee) where
ppr = pprStgExpr
instance (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> Outputable (GenStgRhs bndr bdee) where
ppr rhs = pprStgRhs rhs
pprStgArg :: (Outputable bdee) => GenStgArg bdee -> SDoc
pprStgArg (StgVarArg var) = ppr var
pprStgArg (StgLitArg con) = ppr con
pprStgExpr :: (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> GenStgExpr bndr bdee -> SDoc
-- special case
pprStgExpr (StgLit lit) = ppr lit
-- general case
pprStgExpr (StgApp func args)
= hang (ppr func) 4 (sep (map (ppr) args))
pprStgExpr (StgConApp con args _)
= hsep [ ppr con, brackets (interppSP args) ]
pprStgExpr (StgOpApp op args _)
= hsep [ pprStgOp op, brackets (interppSP args)]
pprStgExpr (StgLam bndrs body)
= sep [ char '\\' <+> ppr_list (map (pprBndr LambdaBind) bndrs)
<+> text "->",
pprStgExpr body ]
where ppr_list = brackets . fsep . punctuate comma
-- special case: let v = <very specific thing>
-- in
-- let ...
-- in
-- ...
--
-- Very special! Suspicious! (SLPJ)
{-
pprStgExpr (StgLet srt (StgNonRec bndr (StgRhsClosure cc bi free_vars upd_flag args rhs))
expr@(StgLet _ _))
= ($$)
(hang (hcat [text "let { ", ppr bndr, ptext (sLit " = "),
ppr cc,
pp_binder_info bi,
text " [", whenPprDebug (interppSP free_vars), ptext (sLit "] \\"),
ppr upd_flag, text " [",
interppSP args, char ']'])
8 (sep [hsep [ppr rhs, text "} in"]]))
(ppr expr)
-}
-- special case: let ... in let ...
pprStgExpr (StgLet bind expr@(StgLet _ _))
= ($$)
(sep [hang (text "let {")
2 (hsep [pprGenStgBinding bind, text "} in"])])
(ppr expr)
-- general case
pprStgExpr (StgLet bind expr)
= sep [hang (text "let {") 2 (pprGenStgBinding bind),
hang (text "} in ") 2 (ppr expr)]
pprStgExpr (StgLetNoEscape bind expr)
= sep [hang (text "let-no-escape {")
2 (pprGenStgBinding bind),
hang (text "} in ")
2 (ppr expr)]
pprStgExpr (StgTick tickish expr)
= sdocWithDynFlags $ \dflags ->
if gopt Opt_SuppressTicks dflags
then pprStgExpr expr
else sep [ ppr tickish, pprStgExpr expr ]
pprStgExpr (StgCase expr bndr alt_type alts)
= sep [sep [text "case",
nest 4 (hsep [pprStgExpr expr,
whenPprDebug (dcolon <+> ppr alt_type)]),
text "of", pprBndr CaseBind bndr, char '{'],
nest 2 (vcat (map pprStgAlt alts)),
char '}']
pprStgAlt :: (OutputableBndr bndr, Outputable occ, Ord occ)
=> GenStgAlt bndr occ -> SDoc
pprStgAlt (con, params, expr)
= hang (hsep [ppr con, sep (map (pprBndr CasePatBind) params), text "->"])
4 (ppr expr <> semi)
pprStgOp :: StgOp -> SDoc
pprStgOp (StgPrimOp op) = ppr op
pprStgOp (StgPrimCallOp op)= ppr op
pprStgOp (StgFCallOp op _) = ppr op
instance Outputable AltType where
ppr PolyAlt = text "Polymorphic"
ppr (MultiValAlt n) = text "MultiAlt" <+> ppr n
ppr (AlgAlt tc) = text "Alg" <+> ppr tc
ppr (PrimAlt tc) = text "Prim" <+> ppr tc
pprStgRhs :: (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> GenStgRhs bndr bdee -> SDoc
-- special case
pprStgRhs (StgRhsClosure cc bi [free_var] upd_flag [{-no args-}] (StgApp func []))
= sdocWithDynFlags $ \dflags ->
hsep [ ppr cc,
pp_binder_info bi,
if not $ gopt Opt_SuppressStgFreeVars dflags
then brackets (ppr free_var) else empty,
text " \\", ppr upd_flag, ptext (sLit " [] "), ppr func ]
-- general case
pprStgRhs (StgRhsClosure cc bi free_vars upd_flag args body)
= sdocWithDynFlags $ \dflags ->
hang (hsep [if gopt Opt_SccProfilingOn dflags then ppr cc else empty,
pp_binder_info bi,
if not $ gopt Opt_SuppressStgFreeVars dflags
then brackets (interppSP free_vars) else empty,
char '\\' <> ppr upd_flag, brackets (interppSP args)])
4 (ppr body)
pprStgRhs (StgRhsCon cc con args)
= hcat [ ppr cc,
space, ppr con, text "! ", brackets (interppSP args)]
| ezyang/ghc | compiler/stgSyn/StgSyn.hs | bsd-3-clause | 28,778 | 0 | 16 | 8,139 | 4,023 | 2,164 | 1,859 | 340 | 4 |
-- |
-- Module : Test.LeanCheck
-- Copyright : (c) 2015-2020 Rudy Matela
-- License : 3-Clause BSD (see the file LICENSE)
-- Maintainer : Rudy Matela <[email protected]>
--
-- LeanCheck is a simple enumerative property-based testing library.
--
-- A __property__ is a function returning a 'Bool' that should be 'True' for
-- all possible choices of arguments. Properties can be viewed as a
-- parameterized unit tests.
--
--
-- To check if a property 'holds' by testing up to a thousand values,
-- we evaluate:
--
-- > holds 1000 property
--
-- 'True' indicates success. 'False' indicates a bug.
--
-- For example:
--
-- > > import Data.List (sort)
-- > > holds 1000 $ \xs -> length (sort xs) == length (xs::[Int])
-- > True
--
-- To get the smallest 'counterExample' by testing up to a thousand values,
-- we evaluate:
--
-- > counterExample 1000 property
--
-- 'Nothing' indicates no counterexample was found,
-- a 'Just' value indicates a counterexample.
--
-- For instance:
--
-- > > import Data.List (union)
-- > > counterExample 1000 $ \xs ys -> union xs ys == union ys (xs :: [Int])
-- > Just ["[]","[0,0]"]
--
-- The suggested values for the number of tests to use with LeanCheck are
-- 500, 1 000 or 10 000. LeanCheck is memory intensive and you should take
-- care if you go beyond that.
--
-- The function 'check' can also be used to test and report counterexamples.
--
-- > > check $ \xs ys -> union xs ys == union ys (xs :: [Int])
-- > *** Failed! Falsifiable (after 4 tests):
-- > [] [0,0]
--
--
-- Arguments of properties should be instances of the 'Listable' typeclass.
-- 'Listable' instances are provided for the most common Haskell types.
-- New instances are easily defined (see 'Listable' for more info).
module Test.LeanCheck
(
-- * Checking and testing
holds
, fails
, exists
-- ** Boolean (property) operators
, (==>)
-- ** Counterexamples and witnesses
, counterExample
, counterExamples
, witness
, witnesses
-- ** Reporting
, check
, checkFor
, checkResult
, checkResultFor
-- * Listing test values
, Listable(..)
-- ** Listing constructors
, cons0
, cons1
, cons2
, cons3
, cons4
, cons5
, cons6
, cons7
, cons8
, cons9
, cons10
, cons11
, cons12
, delay
, reset
, ofWeight
, addWeight
, suchThat
-- ** Combining tiers
, (\/)
, (\\//)
, (><)
, productWith
-- ** Manipulating tiers
, mapT
, filterT
, concatT
, concatMapT
, deleteT
, normalizeT
, toTiers
-- ** Automatically deriving Listable instances
, deriveListable
, deriveListableCascading
-- ** Specialized constructors of tiers
, setCons
, bagCons
, noDupListCons
, mapCons
-- ** Products of tiers
, product3With
, productMaybeWith
-- * Listing lists
, listsOf
, setsOf
, bagsOf
, noDupListsOf
, products
, listsOfLength
-- ** Listing values
, tiersFloating
, tiersFractional
, listIntegral
, (+|)
-- * Test results
, Testable
, results
)
where
import Test.LeanCheck.Basic
import Test.LeanCheck.Tiers
import Test.LeanCheck.Derive
import Test.LeanCheck.IO
| rudymatela/llcheck | src/Test/LeanCheck.hs | bsd-3-clause | 3,130 | 0 | 5 | 707 | 300 | 226 | 74 | 68 | 0 |
module Main where
import CV.Image
import CV.Features
import CV.Drawing
import CV.ImageOp
import CV.Bindings.Types
import CV.Transforms
import Utils.GeometryClass
import Utils.Point
import System.Environment
main = do
Just x <- getArgs >>= loadImage . head
let y = rotate (pi/4) x
lst = getMSER (unsafeImageTo8Bit x) Nothing defaultMSERParams
lsty = getMSER (unsafeImageTo8Bit y) Nothing defaultMSERParams
result lst x = x <## [drawLinesOp 1 1 $ polyline ctr
| ctr <- lst]
print lst
saveImage "mser.png" $ montage (2,1) 2 [result (take 100 lst) x ,result (take 100 lsty) y]
polyline pts = pts `zip` tail pts
| TomMD/CV | examples/mser.hs | bsd-3-clause | 676 | 0 | 13 | 162 | 245 | 126 | 119 | 20 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Infer (
Constraint,
TypeError(..),
Subst(..),
inferTop,
constraintsExpr
) where
import Env
import Type
import Syntax
import Control.Monad.Except
import Control.Monad.State
import Control.Monad.RWS
import Control.Monad.Identity
import Data.List (nub)
import qualified Data.Map as Map
import qualified Data.Set as Set
-------------------------------------------------------------------------------
-- Classes
-------------------------------------------------------------------------------
-- | Inference monad
type Infer a = (RWST
Env -- Typing environment
[Constraint] -- Generated constraints
InferState -- Inference state
(Except -- Inference errors
TypeError)
a) -- Result
-- | Inference state
data InferState = InferState { count :: Int }
-- | Initial inference state
initInfer :: InferState
initInfer = InferState { count = 0 }
type Constraint = (Type, Type)
type Unifier = (Subst, [Constraint])
-- | Constraint solver monad
type Solve a = ExceptT TypeError Identity a
newtype Subst = Subst (Map.Map TVar Type)
deriving (Eq, Ord, Show, Monoid)
class Substitutable a where
apply :: Subst -> a -> a
ftv :: a -> Set.Set TVar
instance Substitutable Type where
apply _ (TCon a) = TCon a
apply (Subst s) t@(TVar a) = Map.findWithDefault t a s
apply s (t1 `TArr` t2) = apply s t1 `TArr` apply s t2
ftv TCon{} = Set.empty
ftv (TVar a) = Set.singleton a
ftv (t1 `TArr` t2) = ftv t1 `Set.union` ftv t2
instance Substitutable Scheme where
apply (Subst s) (Forall as t) = Forall as $ apply s' t
where s' = Subst $ foldr Map.delete s as
ftv (Forall as t) = ftv t `Set.difference` Set.fromList as
instance Substitutable Constraint where
apply s (t1, t2) = (apply s t1, apply s t2)
ftv (t1, t2) = ftv t1 `Set.union` ftv t2
instance Substitutable a => Substitutable [a] where
apply = map . apply
ftv = foldr (Set.union . ftv) Set.empty
instance Substitutable Env where
apply s (TypeEnv env) = TypeEnv $ Map.map (apply s) env
ftv (TypeEnv env) = ftv $ Map.elems env
data TypeError
= UnificationFail Type Type
| InfiniteType TVar Type
| UnboundVariable String
| Ambigious [Constraint]
| UnificationMismatch [Type] [Type]
-------------------------------------------------------------------------------
-- Inference
-------------------------------------------------------------------------------
-- | Run the inference monad
runInfer :: Env -> Infer Type -> Either TypeError (Type, [Constraint])
runInfer env m = runExcept $ evalRWST m env initInfer
-- | Solve for the toplevel type of an expression in a given environment
inferExpr :: Env -> Expr -> Either TypeError Scheme
inferExpr env ex = case runInfer env (infer ex) of
Left err -> Left err
Right (ty, cs) -> case runSolve cs of
Left err -> Left err
Right subst -> Right $ closeOver $ apply subst ty
-- | Return the internal constraints used in solving for the type of an expression
constraintsExpr :: Env -> Expr -> Either TypeError ([Constraint], Subst, Type, Scheme)
constraintsExpr env ex = case runInfer env (infer ex) of
Left err -> Left err
Right (ty, cs) -> case runSolve cs of
Left err -> Left err
Right subst -> Right $ (cs, subst, ty, sc)
where
sc = closeOver $ apply subst ty
-- | Canonicalize and return the polymorphic toplevel type.
closeOver :: Type -> Scheme
closeOver = normalize . generalize Env.empty
-- | Unify two types
uni :: Type -> Type -> Infer ()
uni t1 t2 = tell [(t1, t2)]
-- | Extend type environment
inEnv :: (Name, Scheme) -> Infer a -> Infer a
inEnv (x, sc) m = do
let scope e = (remove e x) `extend` (x, sc)
local scope m
-- | Lookup type in the environment
lookupEnv :: Name -> Infer Type
lookupEnv x = do
(TypeEnv env) <- ask
case Map.lookup x env of
Nothing -> throwError $ UnboundVariable x
Just s -> do t <- instantiate s
return t
letters :: [String]
letters = [1..] >>= flip replicateM ['a'..'z']
fresh :: Infer Type
fresh = do
s <- get
put s{count = count s + 1}
return $ TVar $ TV (letters !! count s)
instantiate :: Scheme -> Infer Type
instantiate (Forall as t) = do
as' <- mapM (\_ -> fresh) as
let s = Subst $ Map.fromList $ zip as as'
return $ apply s t
generalize :: Env -> Type -> Scheme
generalize env t = Forall as t
where as = Set.toList $ ftv t `Set.difference` ftv env
ops :: Map.Map Binop Type
ops = Map.fromList [
(Add, (typeInt `TArr` (typeInt `TArr` typeInt)))
, (Mul, (typeInt `TArr` (typeInt `TArr` typeInt)))
, (Sub, (typeInt `TArr` (typeInt `TArr` typeInt)))
, (Eql, (typeInt `TArr` (typeInt `TArr` typeBool)))
]
infer :: Expr -> Infer Type
infer expr = case expr of
Lit (LInt _) -> return $ typeInt
Lit (LBool _) -> return $ typeBool
Var x -> lookupEnv x
Lam x e -> do
tv <- fresh
t <- inEnv (x, Forall [] tv) (infer e)
return (tv `TArr` t)
App e1 e2 -> do
t1 <- infer e1
t2 <- infer e2
tv <- fresh
uni t1 (t2 `TArr` tv)
return tv
Let x e1 e2 -> do
env <- ask
t1 <- infer e1
let sc = generalize env t1
t2 <- inEnv (x, sc) (infer e2)
return t2
Fix e1 -> do
t1 <- infer e1
tv <- fresh
uni (tv `TArr` tv) t1
return tv
Op op e1 e2 -> do
t1 <- infer e1
t2 <- infer e2
tv <- fresh
let u1 = t1 `TArr` (t2 `TArr` tv)
u2 = ops Map.! op
uni u1 u2
return tv
If cond tr fl -> do
t1 <- infer cond
t2 <- infer tr
t3 <- infer fl
uni t1 typeBool
uni t2 t3
return t2
inferTop :: Env -> [(String, Expr)] -> Either TypeError Env
inferTop env [] = Right env
inferTop env ((name, ex):xs) = case (inferExpr env ex) of
Left err -> Left err
Right ty -> inferTop (extend env (name, ty)) xs
normalize :: Scheme -> Scheme
normalize (Forall _ body) = Forall (map snd ord) (normtype body)
where
ord = zip (nub $ fv body) (map TV letters)
fv (TVar a) = [a]
fv (TArr a b) = fv a ++ fv b
fv (TCon _) = []
normtype (TArr a b) = TArr (normtype a) (normtype b)
normtype (TCon a) = TCon a
normtype (TVar a) =
case Prelude.lookup a ord of
Just x -> TVar x
Nothing -> error "type variable not in signature"
-------------------------------------------------------------------------------
-- Constraint Solver
-------------------------------------------------------------------------------
-- | The empty substitution
emptySubst :: Subst
emptySubst = mempty
-- | Compose substitutions
compose :: Subst -> Subst -> Subst
(Subst s1) `compose` (Subst s2) = Subst $ Map.map (apply (Subst s1)) s2 `Map.union` s1
-- | Run the constraint solver
runSolve :: [Constraint] -> Either TypeError Subst
runSolve cs = runIdentity $ runExceptT $ solver st
where st = (emptySubst, cs)
unifyMany :: [Type] -> [Type] -> Solve Subst
unifyMany [] [] = return emptySubst
unifyMany (t1 : ts1) (t2 : ts2) =
do su1 <- unifies t1 t2
su2 <- unifyMany (apply su1 ts1) (apply su1 ts2)
return (su2 `compose` su1)
unifyMany t1 t2 = throwError $ UnificationMismatch t1 t2
unifies :: Type -> Type -> Solve Subst
unifies t1 t2 | t1 == t2 = return emptySubst
unifies (TVar v) t = v `bind` t
unifies t (TVar v) = v `bind` t
unifies (TArr t1 t2) (TArr t3 t4) = unifyMany [t1, t2] [t3, t4]
unifies t1 t2 = throwError $ UnificationFail t1 t2
-- Unification solver
solver :: Unifier -> Solve Subst
solver (su, cs) =
case cs of
[] -> return su
((t1, t2): cs0) -> do
su1 <- unifies t1 t2
solver (su1 `compose` su, (apply su1 cs0))
bind :: TVar -> Type -> Solve Subst
bind a t | t == TVar a = return emptySubst
| occursCheck a t = throwError $ InfiniteType a t
| otherwise = return $ (Subst $ Map.singleton a t)
occursCheck :: Substitutable a => TVar -> a -> Bool
occursCheck a t = a `Set.member` ftv t
| yupferris/write-you-a-haskell | chapter7/poly_constraints/src/Infer.hs | mit | 8,175 | 0 | 15 | 2,054 | 3,140 | 1,615 | 1,525 | 205 | 9 |
{- |
Module : $Header$
Description : generic mixfix analysis, using an Earley parser
Copyright : Christian Maeder and Uni Bremen 2003-2005
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : portable
Generic mixfix analysis, using an Earley parser
The grammer has a single non-terminal for terms (the double
underscore). A rule of the grammer carries an identifier, a precedence
number, and the actual token list of the identifier to match against
the input token list..
The parser can be instantiated for any term type. A
function parameter determines how applications from identifiers and
arguments are constructed.
-}
module Common.Earley
( Rule
, TokRules
, Rules (..)
, emptyRules
, partitionRules
-- * special tokens for special ids
, varTok
, exprTok
, parenId
, exprId
, varId
, tupleId
, unitId
, protect
, listRules
, mixRule
, getTokenPlaceList
, getPlainPolyTokenList
, getPolyTokenList
-- * resolution chart
, Chart
, mixDiags
, solveDiags
, ToExpr
, rules
, addRules
, initChart
, nextChart
, getResolved
) where
import Common.AS_Annotation
import Common.GlobalAnnotations
import Common.Id
import Common.Prec
import Common.Result
import Common.Utils (nubOrd)
import Control.Exception
import Data.List
import Data.Maybe
import qualified Data.Map as Map
import qualified Data.Set as Set
-- | take the difference of the two input lists take (length l2 - length l1) l2
takeDiff :: [a] -> [b] -> [b]
takeDiff l1 l2 = zipWith const l2 $ dropPrefix l1 l2
{- | update token positions.
return remaining positions -}
setToksPos :: [Token] -> Range -> ([Token], Range)
setToksPos (h : ts) (Range (p : ps)) =
let (rt, rp) = setToksPos ts (Range ps)
in (h {tokPos = Range $ if isPlace h then [p, p] else [p]} : rt, rp)
setToksPos ts ps = (ts, ps)
reverseRange :: Range -> Range
reverseRange = Range . reverse . rangeToList
{- | update positions in 'Id'.
return remaining positions -}
setPlainIdePos :: Id -> Range -> (Id, Range)
setPlainIdePos (Id ts cs _) ps =
if null cs then
let (newTs, restPs) = setToksPos ts ps
in (Id newTs cs nullRange, restPs)
else let (toks, pls) = splitMixToken ts
(front, ps2) = setToksPos toks ps
ps2PL = rangeToList ps2
(newCs, ps3, ps4) =
if isNullRange ps2 then error "setPlainIdePos2"
else foldl ( \ (prevCs, seps, restPs) a ->
let (c1, qs) = setPlainIdePos a restPs
qsPL = rangeToList qs
in if isNullRange qs then error "setPlainIdePos1"
else (c1 : prevCs,
Range (head qsPL : rangeToList seps),
Range (tail qsPL)))
([], Range [head ps2PL], Range (tail ps2PL)) cs
(newPls, ps7) = setToksPos pls ps4
in (Id (front ++ newPls) (reverse newCs) (reverseRange ps3), ps7)
{- no special index type anymore (assuming not much more development)
the info Int denotes fast precedence -}
data Item a = Item
{ rule :: Id -- the rule to match
, info :: Int -- additional precedence info for 'rule'
, lWeight :: Id -- weights for lower precedence pre- and postfixes
, rWeight :: Id -- given by the 'Id's itself
, posList :: Range -- positions of Id tokens
, args :: [a] -- collected arguments are in reverse order
, ambigArgs :: [[a]] -- field for ambiguities
, ambigs :: [[a]] -- field for ambiguities
, rest :: [Token] -- part of the rule after the "dot"
, index :: Int -- index into the Table/input string
}
-- | the non-terminal
termStr :: String
termStr = "(__)"
-- | builtin terminals
commaTok, termTok, oParenTok, cParenTok :: Token
commaTok = mkSimpleId "," -- for list elements
termTok = mkSimpleId termStr
oParenTok = mkSimpleId "("
cParenTok = mkSimpleId ")"
listTok :: Token
listTok = mkSimpleId "[]" -- impossible token
protectTok :: Token
protectTok = mkSimpleId "()" -- impossible token
-- | token for a fixed (or recursively resolved) operator expression
exprTok :: Token
exprTok = mkSimpleId "(op )"
-- | token for a fixed (or recursively resolved) argument expression
varTok :: Token
varTok = mkSimpleId "(var )"
-- | parenthesis around one place
parenId :: Id
parenId = mkId [oParenTok, placeTok, cParenTok]
-- | id for tuples with at least two arguments
tupleId :: Id
tupleId = mkId [oParenTok, placeTok, commaTok, placeTok, cParenTok]
-- | id for the emtpy tuple
unitId :: Id
unitId = mkId [oParenTok, cParenTok]
-- | see 'exprTok'
exprId :: Id
exprId = mkId [exprTok]
-- | see 'varTok'
varId :: Id
varId = mkId [varTok]
listId :: (Id, Id) -> Id
listId (f, c) = Id [listTok] [f, c] nullRange
isListId :: Id -> Bool
isListId (Id ts _ _) = not (null ts) && head ts == listTok
-- | interpret placeholders as literal places
protect :: Id -> Id
protect i = Id [protectTok] [i] nullRange
unProtect :: Id -> Maybe Id
unProtect (Id ts cs _) = case cs of
[i] -> case ts of
[tok] | tok == protectTok -> Just i
_ -> Nothing
_ -> Nothing
-- | get the token list for a mixfix rule
getPolyTokenList :: Id -> [Token]
getPolyTokenList = getGenPolyTokenList termStr
-- | get the plain token list for prefix applications
getPlainPolyTokenList :: Id -> [Token]
getPlainPolyTokenList = getGenPolyTokenList place
type Rule = (Id, Int, [Token])
mkItem :: Int -> Rule -> Item a
mkItem ind (ide, inf, toks) = Item
{ rule = ide
, info = inf
, lWeight = ide
, rWeight = ide
, posList = nullRange
, args = []
, ambigArgs = []
, ambigs = []
, rest = toks
, index = ind }
-- | extract tokens with the non-terminal for places
getTokenPlaceList :: Id -> [Token]
getTokenPlaceList = getTokenList termStr
-- | construct a rule for a mixfix
mixRule :: Int -> Id -> Rule
mixRule b i = (i, b, getTokenPlaceList i)
asListAppl :: ToExpr a -> Id -> [a] -> Range -> a
asListAppl toExpr i ra br
| isListId i =
let Id _ [f, c] _ = i
mkList [] ps = toExpr c [] ps
mkList (hd : tl) ps = toExpr f [hd, mkList tl ps] ps
in mkList ra br
| elem i [typeId, exprId, parenId, varId] = case ra of
[arg] -> arg
_ -> error "asListAppl"
| otherwise = toExpr i ra br
-- | construct the list rules
listRules :: Int -> GlobalAnnos -> [Rule]
listRules inf g =
let lists = list_lit $ literal_annos g
listRule co toks = (listId co, inf, toks)
in concatMap ( \ (bs, (n, c)) ->
let (b1, b2, cs) = getListBrackets bs
e = Id (b1 ++ b2) cs nullRange in
(if e == n then [] -- add b1 ++ b2 if its not yet included by n
else [listRule (c, n) $ getPlainTokenList e])
++ [listRule (c, n) (b1 ++ [termTok] ++ b2),
listRule (c, n) (b1 ++ [termTok, commaTok, termTok] ++ b2)]
) $ Map.toList lists
type Table a = Map.Map Int [Item a]
lookUp :: Table a -> Int -> [Item a]
lookUp ce k = Map.findWithDefault [] k ce
-- | recognize next token (possible introduce new tuple variable)
scanItem :: (a -> a -> a) -> (a, Token) -> Item a -> [Item a]
scanItem addType (trm, t)
p@Item { rest = ts, args = pArgs, posList = pRange } = case ts of
[] -> []
hd : tt -> let
q = p { posList = case rangeToList $ tokPos t of
[] -> pRange
ps@(po : _) -> Range $ (if null tt then last ps else po)
: rangeToList pRange }
r = q { rest = tt } in
if hd == t || t == exprTok && hd == varTok then
if t == commaTok then
case tt of
sd : _ | sd == termTok ->
-- tuple or list elements separator
[ r, q { rest = termTok : ts } ]
_ -> [r]
else if elem t [exprTok, varTok, typeInstTok] then
[r { args = trm : pArgs }]
else if t == typeTok then
case (tt, pArgs) of
([], [arg]) -> [q { rest = [], args = [addType trm arg] }]
_ -> error "scanItem: typeTok"
else [r]
else []
scan :: (a -> a -> a) -> (a, Token) -> [Item a] -> [Item a]
scan f = concatMap . scanItem f
mkAmbigs :: ToExpr a -> Item a -> [a]
mkAmbigs toExpr p@Item { args = l, ambigArgs = aArgs } =
map ( \ aas -> fst $
mkExpr toExpr
p { args = takeDiff aas l ++ aas
} ) aArgs
addArg :: GlobalAnnos -> ToExpr a -> Item a -> Item a -> Item a
addArg ga toExpr argItem@Item { ambigs = ams, posList = aRange }
p@Item { args = pArgs, rule = op, posList = pRange, ambigs = pAmbs
, rest = pRest} =
let (arg, _) = mkExpr toExpr argItem
newAms = mkAmbigs toExpr argItem
q = case pRest of
_ : tl ->
p { rest = tl
, posList = case rangeToList aRange of
[] -> pRange
qs@(h : _) -> Range $ (if null tl then
last qs else h) : rangeToList pRange
, args = arg : pArgs
, ambigs = (if null newAms then ams else newAms : ams)
++ pAmbs }
_ -> error "addArg"
in if isLeftArg op pArgs then
q { lWeight = getNewWeight ALeft ga argItem op }
else if isRightArg op pArgs then
q { rWeight = getNewWeight ARight ga argItem op }
else q
-- | shortcut for a function that constructs an expression
type ToExpr a = Id -> [a] -> Range -> a
mkExpr :: ToExpr a -> Item a -> (a, Range)
mkExpr toExpr Item { rule = orig, posList = ps, args = iArgs } =
let rs = reverseRange ps
(ide, qs) = if isListId orig then (orig, rs) else
setPlainIdePos (fromMaybe orig $ unProtect orig) rs
in (asListAppl toExpr ide (reverse iArgs) qs, rs)
reduce :: GlobalAnnos -> Table a -> ToExpr a -> Item a -> [Item a]
reduce ga table toExpr itm =
map (addArg ga toExpr itm)
$ filter (checkPrecs ga itm)
$ lookUp table $ index itm
getWeight :: AssocEither -> Item a -> Id
getWeight side = case side of
ALeft -> lWeight
ARight -> rWeight
getNewWeight :: AssocEither -> GlobalAnnos -> Item a -> Id -> Id
getNewWeight side ga = nextWeight side ga . getWeight side
-- | check precedences of an argument and a top-level operator.
checkPrecs :: GlobalAnnos -> Item a -> Item a -> Bool
checkPrecs ga argItem@Item { rule = arg, info = argPrec }
Item { rule = op, info = opPrec, args = oArgs } =
checkPrec ga (op, opPrec) (arg, argPrec) oArgs $ flip getWeight argItem
reduceCompleted :: GlobalAnnos -> Table a -> ToExpr a -> [Item a] -> [Item a]
reduceCompleted ga table toExpr =
foldr (mergeItems . reduce ga table toExpr) [] . filter (null . rest)
recReduce :: GlobalAnnos -> Table a -> ToExpr a -> [Item a] -> [Item a]
recReduce ga table toExpr items =
let reduced = reduceCompleted ga table toExpr items
in if null reduced then items
else recReduce ga table toExpr reduced `mergeItems` items
complete :: ToExpr a -> GlobalAnnos -> Table a -> [Item a] -> [Item a]
complete toExpr ga table items =
let reducedItems = recReduce ga table toExpr $
reduceCompleted ga table toExpr items
in reducedItems ++ items
doPredict :: [Item a] -> ([Item a], [Item a])
doPredict = partition ( \ Item { rest = ts } ->
not (null ts) && head ts == termTok)
ordItem :: Item a -> Item a -> Ordering
ordItem Item { index = i1, rest = r1, rule = n1 }
Item { index = i2, rest = r2, rule = n2 } =
compare (i1, r1, n1) (i2, r2, n2)
ambigItems :: Item a -> Item a -> Item a
ambigItems i1@Item { ambigArgs = ams1, args = as1 }
Item { ambigArgs = ams2, args = as2 } =
i1 { ambigArgs = case ams1 ++ ams2 of
[] -> [as1, as2]
ams -> ams }
mergeItems :: [Item a] -> [Item a] -> [Item a]
mergeItems [] i2 = i2
mergeItems i1 [] = i1
mergeItems (i1 : r1) (i2 : r2) =
case ordItem i1 i2 of
LT -> i1 : mergeItems r1 (i2 : r2)
EQ -> ambigItems i1 i2 : mergeItems r1 r2
GT -> i2 : mergeItems (i1 : r1) r2
type TokRules = Token -> Set.Set Rule
-- | the whole state for mixfix resolution
data Chart a = Chart
{ prevTable :: Table a
, currIndex :: Int
, currItems :: ([Item a], [Item a])
, rules :: Rules
, addRules :: TokRules
, solveDiags :: [Diagnosis] }
{- | make one scan, complete, and predict step.
The first function adds a type to the result.
The second function filters based on argument and operator info.
If filtering yields 'Nothing' further filtering by precedence is applied. -}
nextChart :: (a -> a -> a) -> ToExpr a -> GlobalAnnos
-> Chart a -> (a, Token) -> Chart a
nextChart addType toExpr ga st term@(_, tok) = let
table = prevTable st
idx = currIndex st
igz = idx > 0
(cItems, sItems) = currItems st
Rules cRules sRules = rules st
pItems = if null cItems && igz then sItems else
map (mkItem idx) (Set.toList $ Set.union sRules $ addRules st tok)
++ sItems
scannedItems = scan addType term pItems
nextTable = if null cItems && igz then table else
Map.insert idx (map (mkItem idx) (Set.toList cRules) ++ cItems) table
completedItems = complete toExpr ga nextTable $ sortBy ordItem scannedItems
nextIdx = idx + 1
in if null pItems && igz then st else st
{ prevTable = nextTable
, currIndex = nextIdx
, currItems = doPredict completedItems
, solveDiags =
[ Diag Error ("unexpected mixfix token: " ++ tokStr tok) $ tokPos tok
| null scannedItems ] ++ solveDiags st }
-- | add intermediate diagnostic messages
mixDiags :: [Diagnosis] -> Chart a -> Chart a
mixDiags ds st = st { solveDiags = ds ++ solveDiags st }
-- | postfix and prefix rules
data Rules = Rules
{ postRules :: Set.Set Rule
, scanRules :: Set.Set Rule }
emptyRules :: Rules
emptyRules = Rules
{ postRules = Set.empty
, scanRules = Set.empty }
-- | presort rules
partitionRules :: [Rule] -> Rules
partitionRules l =
let (p, s) = partition ( \ (_, _, ts) -> null ts || head ts == termTok) l
in Rules (Set.fromList p) $ Set.fromList s
-- | create the initial chart
initChart :: TokRules -> Rules -> Chart a
initChart adder ruleS = Chart
{ prevTable = Map.empty
, currIndex = 0
, currItems = ([], [])
, rules = ruleS
, addRules = adder
, solveDiags = [] }
-- | extract resolved result
getResolved :: (a -> ShowS) -> Range -> ToExpr a -> Chart a -> Result a
getResolved pp p toExpr st = let
(predicted, items') = currItems st
ds = solveDiags st
items = if null items' && null ds then predicted else items'
in case items of
[] -> assert (not $ null ds) $ Result ds Nothing
_ -> let
(finals, r1) = partition ((0 ==) . index) items
(result, r2) = partition (null . rest) finals
in case result of
[] -> let
expected = if null r2 then filter (not . null . rest) r1 else r2
withpos = filter (not . isNullRange . posList) expected
(q, errs) = if null withpos then (p, expected) else
(concatMapRange (reverseRange . posList) withpos, withpos)
in Result (Diag Error ("expected further mixfix token: "
++ show (take 5 $ nubOrd $ map (tokStr . head . rest) errs)) q : ds)
Nothing
[har] -> case ambigs har of
[] -> case mkAmbigs toExpr har of
[] -> Result ds $ Just $ fst $ mkExpr toExpr har
ambAs -> Result (showAmbigs pp p (take 5 ambAs) : ds) Nothing
ams -> Result (map (showAmbigs pp p) (take 5 ams) ++ ds) Nothing
_ -> Result (showAmbigs pp p (map (fst . mkExpr toExpr) result) : ds)
Nothing
showAmbigs :: (a -> ShowS) -> Range -> [a] -> Diagnosis
showAmbigs pp p as = Diag Error
("ambiguous mixfix term\n " ++ showSepList (showString "\n ") pp
(take 5 as) "") p
| keithodulaigh/Hets | Common/Earley.hs | gpl-2.0 | 16,209 | 11 | 29 | 4,776 | 5,339 | 2,882 | 2,457 | 355 | 10 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.