code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.CreateVpc
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Creates a VPC with the specified CIDR block.
--
-- The smallest VPC you can create uses a /28 netmask (16 IP addresses), and
-- the largest uses a /16 netmask (65,536 IP addresses). To help you decide how
-- big to make your VPC, see <http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_Subnets.html Your VPC and Subnets> in the /Amazon Virtual PrivateCloud User Guide/.
--
-- By default, each instance you launch in the VPC has the default DHCP
-- options, which includes only a default DNS server that we provide
-- (AmazonProvidedDNS). For more information about DHCP options, see <http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_DHCP_Options.html DHCPOptions Sets> in the /Amazon Virtual Private Cloud User Guide/.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-CreateVpc.html>
module Network.AWS.EC2.CreateVpc
(
-- * Request
CreateVpc
-- ** Request constructor
, createVpc
-- ** Request lenses
, cvCidrBlock
, cvDryRun
, cvInstanceTenancy
-- * Response
, CreateVpcResponse
-- ** Response constructor
, createVpcResponse
-- ** Response lenses
, cvrVpc
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data CreateVpc = CreateVpc
{ _cvCidrBlock :: Text
, _cvDryRun :: Maybe Bool
, _cvInstanceTenancy :: Maybe Tenancy
} deriving (Eq, Read, Show)
-- | 'CreateVpc' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cvCidrBlock' @::@ 'Text'
--
-- * 'cvDryRun' @::@ 'Maybe' 'Bool'
--
-- * 'cvInstanceTenancy' @::@ 'Maybe' 'Tenancy'
--
createVpc :: Text -- ^ 'cvCidrBlock'
-> CreateVpc
createVpc p1 = CreateVpc
{ _cvCidrBlock = p1
, _cvDryRun = Nothing
, _cvInstanceTenancy = Nothing
}
-- | The network range for the VPC, in CIDR notation. For example, '10.0.0.0/16'.
cvCidrBlock :: Lens' CreateVpc Text
cvCidrBlock = lens _cvCidrBlock (\s a -> s { _cvCidrBlock = a })
cvDryRun :: Lens' CreateVpc (Maybe Bool)
cvDryRun = lens _cvDryRun (\s a -> s { _cvDryRun = a })
-- | The supported tenancy options for instances launched into the VPC. A value of 'default' means that instances can be launched with any tenancy; a value of 'dedicated' means all instances launched into the VPC are launched as dedicated tenancy
-- instances regardless of the tenancy assigned to the instance at launch.
-- Dedicated tenancy instances run on single-tenant hardware.
--
-- Default: 'default'
cvInstanceTenancy :: Lens' CreateVpc (Maybe Tenancy)
cvInstanceTenancy =
lens _cvInstanceTenancy (\s a -> s { _cvInstanceTenancy = a })
newtype CreateVpcResponse = CreateVpcResponse
{ _cvrVpc :: Maybe Vpc
} deriving (Eq, Read, Show)
-- | 'CreateVpcResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cvrVpc' @::@ 'Maybe' 'Vpc'
--
createVpcResponse :: CreateVpcResponse
createVpcResponse = CreateVpcResponse
{ _cvrVpc = Nothing
}
-- | Information about the VPC.
cvrVpc :: Lens' CreateVpcResponse (Maybe Vpc)
cvrVpc = lens _cvrVpc (\s a -> s { _cvrVpc = a })
instance ToPath CreateVpc where
toPath = const "/"
instance ToQuery CreateVpc where
toQuery CreateVpc{..} = mconcat
[ "CidrBlock" =? _cvCidrBlock
, "DryRun" =? _cvDryRun
, "InstanceTenancy" =? _cvInstanceTenancy
]
instance ToHeaders CreateVpc
instance AWSRequest CreateVpc where
type Sv CreateVpc = EC2
type Rs CreateVpc = CreateVpcResponse
request = post "CreateVpc"
response = xmlResponse
instance FromXML CreateVpcResponse where
parseXML x = CreateVpcResponse
<$> x .@? "vpc"
|
kim/amazonka
|
amazonka-ec2/gen/Network/AWS/EC2/CreateVpc.hs
|
mpl-2.0
| 4,795 | 0 | 9 | 1,058 | 558 | 342 | 216 | 66 | 1 |
{-# LANGUAGE DeriveGeneric, StandaloneDeriving #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Main (main) where
import Control.Applicative ((<|>))
import Control.Exception (IOException, catch)
import Control.Monad (when)
import Data.Foldable (traverse_)
import Data.List (foldl')
import Data.Traversable (for)
import GHC.Generics (Generic)
import Prelude ()
import Prelude.Compat
import System.Directory (getDirectoryContents)
import System.Exit (exitFailure)
import System.FilePath
import Data.TreeDiff
import Data.TreeDiff.Golden
import qualified Options.Applicative as O
import Documentation.Haddock.Types
import qualified Documentation.Haddock.Parser as Parse
type Doc id = DocH () id
data Fixture = Fixture
{ fixtureName :: FilePath
, fixtureOutput :: FilePath
}
deriving Show
data Result = Result
{ _resultSuccess :: !Int
, _resultTotal :: !Int
}
deriving Show
combineResults :: Result -> Result -> Result
combineResults (Result s t) (Result s' t') = Result (s + s') (t + t')
readFixtures :: IO [Fixture]
readFixtures = do
let dir = "fixtures/examples"
files <- getDirectoryContents dir
let inputs = filter (\fp -> takeExtension fp == ".input") files
return $ flip map inputs $ \fp -> Fixture
{ fixtureName = dir </> fp
, fixtureOutput = dir </> fp -<.> "parsed"
}
goldenFixture
:: String
-> IO Expr
-> IO Expr
-> (Expr -> Expr -> IO (Maybe String))
-> (Expr -> IO ())
-> IO Result
goldenFixture name expect actual cmp wrt = do
putStrLn $ "running " ++ name
a <- actual
e <- expect `catch` handler a
mres <- cmp e a
case mres of
Nothing -> return (Result 1 1)
Just str -> do
putStr str
return (Result 0 1)
where
handler :: Expr -> IOException -> IO Expr
handler a exc = do
putStrLn $ "Caught " ++ show exc
putStrLn "Accepting the test"
wrt a
return a
runFixtures :: [Fixture] -> IO ()
runFixtures fixtures = do
results <- for fixtures $ \(Fixture i o) -> do
let name = takeBaseName i
let readDoc = do
input <- readFile i
return (parseString input)
ediffGolden goldenFixture name o readDoc
case foldl' combineResults (Result 0 0) results of
Result s t -> do
putStrLn $ "Fixtures: success " ++ show s ++ "; total " ++ show t
when (s /= t) exitFailure
listFixtures :: [Fixture] -> IO ()
listFixtures = traverse_ $ \(Fixture i _) -> do
let name = takeBaseName i
putStrLn name
acceptFixtures :: [Fixture] -> IO ()
acceptFixtures = traverse_ $ \(Fixture i o) -> do
input <- readFile i
let doc = parseString input
let actual = show (prettyExpr $ toExpr doc) ++ "\n"
writeFile o actual
parseString :: String -> Doc String
parseString = Parse.toRegular . _doc . Parse.parseParas
data Cmd = CmdRun | CmdAccept | CmdList
main :: IO ()
main = runCmd =<< O.execParser opts
where
opts = O.info (O.helper <*> cmdParser) O.fullDesc
cmdParser :: O.Parser Cmd
cmdParser = cmdRun <|> cmdAccept <|> cmdList <|> pure CmdRun
cmdRun = O.flag' CmdRun $ mconcat
[ O.long "run"
, O.help "Run parser fixtures"
]
cmdAccept = O.flag' CmdAccept $ mconcat
[ O.long "accept"
, O.help "Run & accept parser fixtures"
]
cmdList = O.flag' CmdList $ mconcat
[ O.long "list"
, O.help "List fixtures"
]
runCmd :: Cmd -> IO ()
runCmd CmdRun = readFixtures >>= runFixtures
runCmd CmdList = readFixtures >>= listFixtures
runCmd CmdAccept = readFixtures >>= acceptFixtures
-------------------------------------------------------------------------------
-- Orphans
-------------------------------------------------------------------------------
deriving instance Generic (DocH mod id)
instance (ToExpr mod, ToExpr id) => ToExpr (DocH mod id)
deriving instance Generic (Header id)
instance ToExpr id => ToExpr (Header id)
deriving instance Generic Hyperlink
instance ToExpr Hyperlink
deriving instance Generic Picture
instance ToExpr Picture
deriving instance Generic Example
instance ToExpr Example
deriving instance Generic (Table id)
instance ToExpr id => ToExpr (Table id)
deriving instance Generic (TableRow id)
instance ToExpr id => ToExpr (TableRow id)
deriving instance Generic (TableCell id)
instance ToExpr id => ToExpr (TableCell id)
|
Fuuzetsu/haddock
|
haddock-library/fixtures/Fixtures.hs
|
bsd-2-clause
| 4,472 | 0 | 19 | 1,083 | 1,462 | 736 | 726 | 125 | 2 |
module Print3 where
myGreeting :: String
myGreeting = "hell" ++ "o world"
main :: IO()
main = do
putStrLn myGreeting
putStrLn secondGreeting
where secondGreeting = concat["hellO"," ", "wOrld"]
area d = pi * (r * r)
where r = d / 2
|
punitrathore/haskell-first-principles
|
src/Print3.hs
|
bsd-3-clause
| 246 | 0 | 8 | 57 | 94 | 50 | 44 | 10 | 1 |
module FlagSpec where
import Control.Applicative
import Language.Haskell.GhcMod
import Test.Hspec
import TestUtils
spec :: Spec
spec = do
describe "flags" $ do
it "contains at least `-fno-warn-orphans'" $ do
f <- runD $ lines <$> flags
f `shouldContain` ["-fno-warn-orphans"]
|
cabrera/ghc-mod
|
test/FlagSpec.hs
|
bsd-3-clause
| 314 | 0 | 15 | 76 | 80 | 43 | 37 | 11 | 1 |
module Plugin where
-- user doesn't import the API
-- and provides a polymorphic value
-- import API
-- resource :: Interface
--
-- should pass type check, and dump core
--
-- resource :: Num a => a
-- import API
resource :: Num a => a
resource = 7
|
abuiles/turbinado-blog
|
tmp/dependencies/hs-plugins-1.3.1/testsuite/pdynload/spj1/Plugin.hs
|
bsd-3-clause
| 254 | 0 | 6 | 56 | 30 | 21 | 9 | 3 | 1 |
module MatrixMarket
( MatrixList
, mmReadFile
, mmWriteFile
) where
-- reads/writes MatrixMarket files to a generic matrix data type
import Data.Char (toLower)
import Data.Complex
import Data.List (intersperse, nub)
import qualified Data.Map as Map (fromList, lookup, Map)
import Data.Maybe (fromJust, isNothing)
import MatrixList (MatrixList)
import System.IO (hClose, hPutStrLn, openFile, IOMode(WriteMode))
data MMFormat = Array | Coordinate
data MMType = MMInt | MMReal | MMComplex
class MMReadable a where
mmRead :: String -> a
mmReads :: String -> [(a, String)]
instance MMReadable Int where
mmRead = read
mmReads = reads
instance MMReadable Float where
mmRead = read
mmReads = reads
instance MMReadable Double where
mmRead = read
mmReads = reads
instance (MMReadable a, RealFloat a) => MMReadable (Complex a) where
mmRead strz = a :+ b where [a, b] = fmap mmRead (words strz)
mmReads str | len == 2 = if null xread || null yread then []
else [(x :+ y, "")]
| otherwise = []
where len = length (words str)
[xread, yread] = fmap mmReads (words str)
[x, y] = fmap (fst . head) [xread, yread]
instance MMReadable MMFormat where
mmReads name =
case map toLower name of "array" -> [(Array, "")]
"coordinate" -> [(Coordinate, "")]
_ -> []
mmRead = fst . head . mmReads
instance MMReadable MMType where
mmReads name =
case map toLower name of "integer" -> [(MMInt, "")]
"real" -> [(MMReal, "")]
"complex" -> [(MMComplex, "")]
_ -> []
mmRead = fst . head . mmReads
class MMShowable a where
mmShow :: a -> String
instance MMShowable Int where
mmShow = show
instance MMShowable Float where
mmShow = show
instance MMShowable Double where
mmShow = show
instance (MMShowable a, RealFloat a) => MMShowable (Complex a) where
mmShow z = concat . intersperse " " . fmap mmShow $ [realPart z, imagPart z]
instance MMShowable MMFormat where
mmShow Array = "array"
mmShow Coordinate = "coordinate"
instance MMShowable MMType where
mmShow MMInt = "integer"
mmShow MMReal = "real"
mmShow MMComplex = "complex"
mmReadFile :: FilePath -> IO (MatrixList Double)
mmReadFile filePath = readFile filePath >>=
(return . mmToMatrixList filePath)
mmWriteFile :: MatrixList Double -> String -> FilePath -> IO ()
mmWriteFile matrixList format filePath = do
let action = Map.lookup format mmWriteCmds
let list = maybe (error $ "Unrecognized format " ++ format)
($ matrixList) action
handle <- openFile filePath WriteMode
hPutStrLn handle $ "%%MatrixMarket matrix " ++ format ++ " real general"
mapM_ (hPutStrLn handle) list
hClose handle
mmToMatrixList :: FilePath -> String -> MatrixList Double
mmToMatrixList filePath contents
| null (words contents) = error $ filePath ++ " is empty"
| length first < 4 = error $ filePath ++ " header is invalid"
| null rest = error $ filePath ++ " has no contents"
| otherwise = maybe errormsg (($ filePath) . ($ rest)) action
where fileLines = lines contents
first = words . head $ fileLines
rest = filter ((/='%') . head . head) . filter (not . null) .
fmap words $ tail fileLines
action = Map.lookup (fmap toLower (first !! 2)) mmReadCmds
errormsg = error $ filePath ++ " has unrecognized format " ++
(first !! 2)
mmReadCmds :: Map.Map String ([[String]] -> FilePath -> MatrixList Double)
mmReadCmds = Map.fromList [
("array", arrayToMatrixList)
, ("coordinate", coordinateToMatrixList)
]
arrayToMatrixList :: [[String]] -> FilePath -> MatrixList Double
arrayToMatrixList lineList filePath
| length firstLine /= 2 = error $ filePath ++ " matrix size invalid"
| any ((/= 1) . length) valueLines = error $ filePath ++ " has invalid value lines"
| any null size = error $ filePath ++ " matrix size invalid"
| length valueList /= nrows * ncols = error $ filePath ++ " has wrong matrix size"
| any null valueReads = error $ filePath ++ " has invalid values"
| otherwise = (nrows, ncols, ijxs)
where (firstLine, valueLines) = (head lineList, tail lineList)
size = fmap reads firstLine :: [[(Int, String)]]
[nrows, ncols] = fmap (fst . head) size
valueList = concat valueLines
valueReads = fmap reads valueList :: [[(Double, String)]]
values = fmap (fst . head) valueReads
indices = [(i, j) | j <- [1..ncols], i <- [1..nrows]]
ijxs = fmap (\((i, j), x) -> (i, j, x)) . filter ((/= 0) . snd) .
zip indices $ values
coordinateToMatrixList :: [[String]] -> FilePath -> MatrixList Double
coordinateToMatrixList lineList filePath
| any ((/= 3) . length) lineList = error $ filePath ++ " has wrong-length line"
| any null firstLineNums = error $ filePath ++ " has invalid size line"
| length entryLines /= nonzeros = error $ filePath ++ " has wrong no. of nonzeros"
| any (any null) indexReads = error $ filePath ++ " has invalid indices"
| indices /= nub indices = error $ filePath ++ " has duplicate indices"
| maxRow > nrows || maxCol > ncols = error $ filePath ++ " has indices outside range"
| any null valueReads = error $ filePath ++ " has invalid values"
| otherwise = (nrows, ncols, if nonzeros == 0 then [] else ijxs)
where (firstLine, entryLines) = (head lineList, tail lineList)
firstLineNums = fmap reads firstLine :: [[(Int, String)]]
firstLineVals = fmap (fst . head) firstLineNums
[nrows, ncols] = take 2 firstLineVals
nonzeros = firstLineVals !! 2
splitEntries = fmap (splitAt 2) entryLines
indexLines = fmap fst splitEntries
indexReads = fmap (fmap reads) indexLines :: [[[(Int, String)]]]
indices = fmap (fmap $ fst . head) indexReads
[maxRow, maxCol] = if nonzeros == 0 then [1,1] else
fmap maximum [fmap (!! 0) indices, fmap (!! 1) indices]
valueLines = concat . fmap snd $ splitEntries
valueReads = fmap reads valueLines :: [[(Double, String)]]
values = fmap (fst . head) valueReads
ijxs = zipWith (\[i,j] x -> (i,j,x)) indices values
mmWriteCmds :: Map.Map String (MatrixList Double -> [String])
mmWriteCmds = Map.fromList [
("array", matrixListToArray)
, ("coordinate", matrixListToCoordinate)
]
matrixListToArray :: MatrixList Double -> [String]
matrixListToArray (m, n, ijxs) = (joinStr [show m, show n]):showVals
where pairList = fmap (\(i, j, x) -> ((i,j), x)) ijxs
hashTable = Map.fromList pairList
indices = [(i, j) | j <- [1..n], i <- [1..m]]
showVals = fmap (show . value) indices
value pair | isNothing check = 0
| otherwise = fromJust check
where check = Map.lookup pair hashTable
matrixListToCoordinate :: MatrixList Double -> [String]
matrixListToCoordinate (m, n, ijxs) =
(joinStr [show m, show n, show $ length ijxs]):(fmap pairToStr ijxs)
joinStr :: [String] -> String
joinStr = concat . intersperse " "
pairToStr :: (Show a, Show b, Show c) => (a,b,c) -> String
pairToStr = joinStr . (\(i,j,x) -> [show i, show j, show x])
|
FreeON/spammpack
|
src-Haskell/MatrixMarket.hs
|
bsd-3-clause
| 8,262 | 0 | 14 | 2,792 | 2,666 | 1,417 | 1,249 | 154 | 3 |
{- |
Module : $Header$
Description : Handling of extended parameters
Copyright : (c) Ewaryst Schulz, DFKI Bremen 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : experimental
Portability : portable
This module defines an ordering on extended parameters and other analysis tools.
Extended parameters may be based on one of the following
relations:
> =, <=, >=, !=, <, >, -|
We work more generally (compared to CSL.ExtendedParameter) with a
sequence of intervals which generalizes sets representable by
the given relations and with the advantage that this
representation is closed under union, intersection and
complement. Such sequences are represented by an ordered
sequence of intervals.
A sequence of intervals [a_i, b_i] is normalized when for all i
b_i + 1 < a_{i+1}
This implies that this sequence is
1. nonoverlapping (I_i and I_j are disjoint for i/=j)
2. noncontinuing (The union of I_i and I_{i+1} is not an interval)
3. ordered (i<j => !x,y. x in I_i and y in I_j => x < y)
-}
module CSL.GeneralExtendedParameter where
import Data.List (intercalate)
import CSL.BoolBasic
import CSL.TreePO
import CSL.AS_BASIC_CSL
import Common.Id (tokStr)
{- ----------------------------------------------------------------------
Datatypes for efficient Extended Parameter comparison
---------------------------------------------------------------------- -}
data ExtNumber = LeftInf | RightInf | Regular APInt deriving (Show, Eq)
instance Ord ExtNumber where
compare a b | a == b = EQ
| otherwise =
case (a, b) of
(LeftInf, _) -> LT
(RightInf, _) -> GT
(Regular i, Regular j) -> compare i j
_ -> swapCompare $ compare b a
type BaseInterval = (ExtNumber, ExtNumber)
leftOpen :: APInt -> BaseInterval
leftOpen i = (LeftInf, Regular i)
rightOpen :: APInt -> BaseInterval
rightOpen i = (Regular i, RightInf)
between :: APInt -> APInt -> BaseInterval
between i j = (Regular i, Regular j)
-- All methods in the following presuppose normalized expressions
-- | Normalized generalized representation of an extended parameter constraint
type EPExp = [BaseInterval]
showBaseInterval :: String -> BaseInterval -> String
showBaseInterval _ (LeftInf, RightInf) = error "showBaseInterval: unconstrained expression"
showBaseInterval s (LeftInf, Regular i) = concat [s, " <= ", show i]
showBaseInterval s (Regular i, RightInf) = concat [s, " >= ", show i]
showBaseInterval s (Regular i, Regular j)
| i == j = concat [s, " = ", show j]
| otherwise = concat [show i, " <= ", s, " <= ", show j]
showBaseInterval _ ep = error $ "malformed expression: " ++ show ep
showEP :: (String, EPExp) -> String
showEP (s, ep) = intercalate " \\/ " $ map (showBaseInterval s) ep
toBoolRep :: String -> EPExp -> BoolRep
toBoolRep = error "TODO"
-- | Conversion function into the more efficient representation.
toEPExp :: EXTPARAM -> Maybe (String, EPExp)
toEPExp (EP t r i) =
let l = case r of
"<=" -> [leftOpen i]
"<" -> [leftOpen $ i - 1]
">=" -> [rightOpen i]
">" -> [rightOpen $ i + 1]
"=" -> [between i i]
"!=" -> [leftOpen $ i - 1, rightOpen $ i + 1]
"-|" -> []
_ -> error $ "toEPExp: unsupported relation: " ++ r
in if null l then Nothing else Just (tokStr t, l)
{- ----------------------------------------------------------------------
Extended Parameter comparison (subset-comparison)
---------------------------------------------------------------------- -}
leftOf :: BaseInterval -> BaseInterval -> Bool
leftOf (_, b) (_, d) = b <= d
compareBI :: BaseInterval -> BaseInterval -> SetOrdering
compareBI i1@(a, b) i2@(c, d)
| i1 == i2 = Comparable EQ
| b < c || a > d = Incomparable Disjoint
| a <= c = if b < d then Incomparable Overlap else Comparable GT
| b <= d = Comparable LT
| otherwise = Incomparable Overlap
compareBIEP :: BaseInterval -> EPExp -> SetOrdering
compareBIEP _ [] = Comparable GT
compareBIEP i1 [i2] = compareBI i1 i2
compareBIEP i1 (i2 : l) =
case compareBI i1 i2 of
Incomparable Disjoint ->
if leftOf i1 i2 then Incomparable Disjoint else compareBIEP i1 l
Incomparable Overlap -> Incomparable Overlap
Comparable EQ -> Comparable LT -- l has here at least length one!
Comparable LT -> Comparable LT
Comparable GT -> case compareBIEP i1 l of
-- EQ and LT is here an impossible outcome
Comparable GT -> Comparable GT
_ -> Incomparable Overlap
-- TODO: implement this comparison procedure
-- | Compares two 'EPExp': They are uncompareable if they overlap or are disjoint.
compareEP :: EPExp -> EPExp -> SetOrdering
compareEP [] [] = Comparable EQ
compareEP _ [] = Comparable GT
compareEP [] _ = Comparable LT
compareEP _ _ = error "GeneralExtendedParameter: TODO"
{-
compareEP ep1@(i1:l1) ep2@(i2:l2) =
case compareBI i1 i2 of
Comparable EQ -> case compareEP l1 l2 of
Incomparable
i1 == i2 -> wenn compareEP l1 l2 disjoint dann overlap sonst ergebnis
i1 > i2 -> wenn compareEP
-}
|
mariefarrell/Hets
|
CSL/GeneralExtendedParameter.hs
|
gpl-2.0
| 5,377 | 0 | 14 | 1,352 | 1,186 | 612 | 574 | 74 | 9 |
{-
%
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[TcExpr]{Typecheck an expression}
-}
{-# LANGUAGE CPP, TupleSections, ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
module TcExpr ( tcPolyExpr, tcMonoExpr, tcMonoExprNC,
tcInferSigma, tcInferSigmaNC, tcInferRho, tcInferRhoNC,
tcSyntaxOp, tcSyntaxOpGen, SyntaxOpType(..), synKnownType,
tcCheckId,
addExprErrCtxt,
getFixedTyVars ) where
#include "HsVersions.h"
import {-# SOURCE #-} TcSplice( tcSpliceExpr, tcTypedBracket, tcUntypedBracket )
import THNames( liftStringName, liftName )
import HsSyn
import TcHsSyn
import TcRnMonad
import TcUnify
import BasicTypes
import Inst
import TcBinds ( chooseInferredQuantifiers, tcLocalBinds )
import TcSigs ( tcUserTypeSig, tcInstSig )
import TcSimplify ( simplifyInfer, InferMode(..) )
import FamInst ( tcGetFamInstEnvs, tcLookupDataFamInst )
import FamInstEnv ( FamInstEnvs )
import RnEnv ( addUsedGRE, addNameClashErrRn
, unknownSubordinateErr )
import TcEnv
import TcArrows
import TcMatches
import TcHsType
import TcPatSyn( tcPatSynBuilderOcc, nonBidirectionalErr )
import TcPat
import TcMType
import TcType
import DsMonad
import Id
import IdInfo
import ConLike
import DataCon
import PatSyn
import Name
import NameEnv
import NameSet
import RdrName
import TyCon
import Type
import TcEvidence
import VarSet
import TysWiredIn
import TysPrim( intPrimTy )
import PrimOp( tagToEnumKey )
import PrelNames
import MkId ( proxyHashId )
import DynFlags
import SrcLoc
import Util
import VarEnv ( emptyTidyEnv )
import ListSetOps
import Maybes
import Outputable
import FastString
import Control.Monad
import Class(classTyCon)
import UniqFM ( nonDetEltsUFM )
import qualified GHC.LanguageExtensions as LangExt
import Data.Function
import Data.List
import Data.Either
import qualified Data.Set as Set
{-
************************************************************************
* *
\subsection{Main wrappers}
* *
************************************************************************
-}
tcPolyExpr, tcPolyExprNC
:: LHsExpr Name -- Expression to type check
-> TcSigmaType -- Expected type (could be a polytype)
-> TcM (LHsExpr TcId) -- Generalised expr with expected type
-- tcPolyExpr is a convenient place (frequent but not too frequent)
-- place to add context information.
-- The NC version does not do so, usually because the caller wants
-- to do so himself.
tcPolyExpr expr res_ty = tc_poly_expr expr (mkCheckExpType res_ty)
tcPolyExprNC expr res_ty = tc_poly_expr_nc expr (mkCheckExpType res_ty)
-- these versions take an ExpType
tc_poly_expr, tc_poly_expr_nc :: LHsExpr Name -> ExpSigmaType -> TcM (LHsExpr TcId)
tc_poly_expr expr res_ty
= addExprErrCtxt expr $
do { traceTc "tcPolyExpr" (ppr res_ty); tc_poly_expr_nc expr res_ty }
tc_poly_expr_nc (L loc expr) res_ty
= do { traceTc "tcPolyExprNC" (ppr res_ty)
; (wrap, expr')
<- tcSkolemiseET GenSigCtxt res_ty $ \ res_ty ->
setSrcSpan loc $
-- NB: setSrcSpan *after* skolemising, so we get better
-- skolem locations
tcExpr expr res_ty
; return $ L loc (mkHsWrap wrap expr') }
---------------
tcMonoExpr, tcMonoExprNC
:: LHsExpr Name -- Expression to type check
-> ExpRhoType -- Expected type
-- Definitely no foralls at the top
-> TcM (LHsExpr TcId)
tcMonoExpr expr res_ty
= addErrCtxt (exprCtxt expr) $
tcMonoExprNC expr res_ty
tcMonoExprNC (L loc expr) res_ty
= setSrcSpan loc $
do { expr' <- tcExpr expr res_ty
; return (L loc expr') }
---------------
tcInferSigma, tcInferSigmaNC :: LHsExpr Name -> TcM ( LHsExpr TcId
, TcSigmaType )
-- Infer a *sigma*-type.
tcInferSigma expr = addErrCtxt (exprCtxt expr) (tcInferSigmaNC expr)
tcInferSigmaNC (L loc expr)
= setSrcSpan loc $
do { (expr', sigma) <- tcInferNoInst (tcExpr expr)
; return (L loc expr', sigma) }
tcInferRho, tcInferRhoNC :: LHsExpr Name -> TcM (LHsExpr TcId, TcRhoType)
-- Infer a *rho*-type. The return type is always (shallowly) instantiated.
tcInferRho expr = addErrCtxt (exprCtxt expr) (tcInferRhoNC expr)
tcInferRhoNC expr
= do { (expr', sigma) <- tcInferSigmaNC expr
; (wrap, rho) <- topInstantiate (exprCtOrigin (unLoc expr)) sigma
; return (mkLHsWrap wrap expr', rho) }
{-
************************************************************************
* *
tcExpr: the main expression typechecker
* *
************************************************************************
NB: The res_ty is always deeply skolemised.
-}
tcExpr :: HsExpr Name -> ExpRhoType -> TcM (HsExpr TcId)
tcExpr (HsVar (L _ name)) res_ty = tcCheckId name res_ty
tcExpr (HsUnboundVar uv) res_ty = tcUnboundId uv res_ty
tcExpr e@(HsApp {}) res_ty = tcApp1 e res_ty
tcExpr e@(HsAppType {}) res_ty = tcApp1 e res_ty
tcExpr e@(HsLit lit) res_ty = do { let lit_ty = hsLitType lit
; tcWrapResult e (HsLit lit) lit_ty res_ty }
tcExpr (HsPar expr) res_ty = do { expr' <- tcMonoExprNC expr res_ty
; return (HsPar expr') }
tcExpr (HsSCC src lbl expr) res_ty
= do { expr' <- tcMonoExpr expr res_ty
; return (HsSCC src lbl expr') }
tcExpr (HsTickPragma src info srcInfo expr) res_ty
= do { expr' <- tcMonoExpr expr res_ty
; return (HsTickPragma src info srcInfo expr') }
tcExpr (HsCoreAnn src lbl expr) res_ty
= do { expr' <- tcMonoExpr expr res_ty
; return (HsCoreAnn src lbl expr') }
tcExpr (HsOverLit lit) res_ty
= do { lit' <- newOverloadedLit lit res_ty
; return (HsOverLit lit') }
tcExpr (NegApp expr neg_expr) res_ty
= do { (expr', neg_expr')
<- tcSyntaxOp NegateOrigin neg_expr [SynAny] res_ty $
\[arg_ty] ->
tcMonoExpr expr (mkCheckExpType arg_ty)
; return (NegApp expr' neg_expr') }
tcExpr e@(HsIPVar x) res_ty
= do { {- Implicit parameters must have a *tau-type* not a
type scheme. We enforce this by creating a fresh
type variable as its type. (Because res_ty may not
be a tau-type.) -}
ip_ty <- newOpenFlexiTyVarTy
; let ip_name = mkStrLitTy (hsIPNameFS x)
; ipClass <- tcLookupClass ipClassName
; ip_var <- emitWantedEvVar origin (mkClassPred ipClass [ip_name, ip_ty])
; tcWrapResult e (fromDict ipClass ip_name ip_ty (HsVar (noLoc ip_var)))
ip_ty res_ty }
where
-- Coerces a dictionary for `IP "x" t` into `t`.
fromDict ipClass x ty = HsWrap $ mkWpCastR $
unwrapIP $ mkClassPred ipClass [x,ty]
origin = IPOccOrigin x
tcExpr e@(HsOverLabel l) res_ty -- See Note [Type-checking overloaded labels]
= do { isLabelClass <- tcLookupClass isLabelClassName
; alpha <- newOpenFlexiTyVarTy
; let lbl = mkStrLitTy l
pred = mkClassPred isLabelClass [lbl, alpha]
; loc <- getSrcSpanM
; var <- emitWantedEvVar origin pred
; let proxy_arg = L loc (mkHsWrap (mkWpTyApps [typeSymbolKind, lbl])
(HsVar (L loc proxyHashId)))
tm = L loc (fromDict pred (HsVar (L loc var))) `HsApp` proxy_arg
; tcWrapResult e tm alpha res_ty }
where
-- Coerces a dictionary for `IsLabel "x" t` into `Proxy# x -> t`.
fromDict pred = HsWrap $ mkWpCastR $ unwrapIP pred
origin = OverLabelOrigin l
tcExpr (HsLam match) res_ty
= do { (match', wrap) <- tcMatchLambda herald match_ctxt match res_ty
; return (mkHsWrap wrap (HsLam match')) }
where
match_ctxt = MC { mc_what = LambdaExpr, mc_body = tcBody }
herald = sep [ text "The lambda expression" <+>
quotes (pprSetDepth (PartWay 1) $
pprMatches match),
-- The pprSetDepth makes the abstraction print briefly
text "has"]
tcExpr e@(HsLamCase matches) res_ty
= do { (matches', wrap)
<- tcMatchLambda msg match_ctxt matches res_ty
-- The laziness annotation is because we don't want to fail here
-- if there are multiple arguments
; return (mkHsWrap wrap $ HsLamCase matches') }
where
msg = sep [ text "The function" <+> quotes (ppr e)
, text "requires"]
match_ctxt = MC { mc_what = CaseAlt, mc_body = tcBody }
tcExpr e@(ExprWithTySig expr sig_ty) res_ty
= do { let loc = getLoc (hsSigWcType sig_ty)
; sig_info <- checkNoErrs $ -- Avoid error cascade
tcUserTypeSig loc sig_ty Nothing
; (expr', poly_ty) <- tcExprSig expr sig_info
; let expr'' = ExprWithTySigOut expr' sig_ty
; tcWrapResult e expr'' poly_ty res_ty }
{-
Note [Type-checking overloaded labels]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Recall that (in GHC.OverloadedLabels) we have
class IsLabel (x :: Symbol) a where
fromLabel :: Proxy# x -> a
When we see an overloaded label like `#foo`, we generate a fresh
variable `alpha` for the type and emit an `IsLabel "foo" alpha`
constraint. Because the `IsLabel` class has a single method, it is
represented by a newtype, so we can coerce `IsLabel "foo" alpha` to
`Proxy# "foo" -> alpha` (just like for implicit parameters). We then
apply it to `proxy#` of type `Proxy# "foo"`.
That is, we translate `#foo` to `fromLabel (proxy# :: Proxy# "foo")`.
-}
{-
************************************************************************
* *
Infix operators and sections
* *
************************************************************************
Note [Left sections]
~~~~~~~~~~~~~~~~~~~~
Left sections, like (4 *), are equivalent to
\ x -> (*) 4 x,
or, if PostfixOperators is enabled, just
(*) 4
With PostfixOperators we don't actually require the function to take
two arguments at all. For example, (x `not`) means (not x); you get
postfix operators! Not Haskell 98, but it's less work and kind of
useful.
Note [Typing rule for ($)]
~~~~~~~~~~~~~~~~~~~~~~~~~~
People write
runST $ blah
so much, where
runST :: (forall s. ST s a) -> a
that I have finally given in and written a special type-checking
rule just for saturated applications of ($).
* Infer the type of the first argument
* Decompose it; should be of form (arg2_ty -> res_ty),
where arg2_ty might be a polytype
* Use arg2_ty to typecheck arg2
Note [Typing rule for seq]
~~~~~~~~~~~~~~~~~~~~~~~~~~
We want to allow
x `seq` (# p,q #)
which suggests this type for seq:
seq :: forall (a:*) (b:Open). a -> b -> b,
with (b:Open) meaning that be can be instantiated with an unboxed
tuple. The trouble is that this might accept a partially-applied
'seq', and I'm just not certain that would work. I'm only sure it's
only going to work when it's fully applied, so it turns into
case x of _ -> (# p,q #)
So it seems more uniform to treat 'seq' as it it was a language
construct.
See also Note [seqId magic] in MkId
-}
tcExpr expr@(OpApp arg1 op fix arg2) res_ty
| (L loc (HsVar (L lv op_name))) <- op
, op_name `hasKey` seqIdKey -- Note [Typing rule for seq]
= do { arg1_ty <- newFlexiTyVarTy liftedTypeKind
; let arg2_exp_ty = res_ty
; arg1' <- tcArg op arg1 arg1_ty 1
; arg2' <- addErrCtxt (funAppCtxt op arg2 2) $
tc_poly_expr_nc arg2 arg2_exp_ty
; arg2_ty <- readExpType arg2_exp_ty
; op_id <- tcLookupId op_name
; let op' = L loc (HsWrap (mkWpTyApps [arg1_ty, arg2_ty])
(HsVar (L lv op_id)))
; return $ OpApp arg1' op' fix arg2' }
| (L loc (HsVar (L lv op_name))) <- op
, op_name `hasKey` dollarIdKey -- Note [Typing rule for ($)]
= do { traceTc "Application rule" (ppr op)
; (arg1', arg1_ty) <- tcInferSigma arg1
; let doc = text "The first argument of ($) takes"
orig1 = exprCtOrigin (unLoc arg1)
; (wrap_arg1, [arg2_sigma], op_res_ty) <-
matchActualFunTys doc orig1 (Just arg1) 1 arg1_ty
-- We have (arg1 $ arg2)
-- So: arg1_ty = arg2_ty -> op_res_ty
-- where arg2_sigma maybe polymorphic; that's the point
; arg2' <- tcArg op arg2 arg2_sigma 2
-- Make sure that the argument type has kind '*'
-- ($) :: forall (r:RuntimeRep) (a:*) (b:TYPE r). (a->b) -> a -> b
-- Eg we do not want to allow (D# $ 4.0#) Trac #5570
-- (which gives a seg fault)
--
-- The *result* type can have any kind (Trac #8739),
-- so we don't need to check anything for that
; _ <- unifyKind (Just arg2_sigma) (typeKind arg2_sigma) liftedTypeKind
-- ignore the evidence. arg2_sigma must have type * or #,
-- because we know arg2_sigma -> or_res_ty is well-kinded
-- (because otherwise matchActualFunTys would fail)
-- There's no possibility here of, say, a kind family reducing to *.
; wrap_res <- tcSubTypeHR orig1 (Just expr) op_res_ty res_ty
-- op_res -> res
; op_id <- tcLookupId op_name
; res_ty <- readExpType res_ty
; let op' = L loc (HsWrap (mkWpTyApps [ getRuntimeRep "tcExpr ($)" res_ty
, arg2_sigma
, res_ty])
(HsVar (L lv op_id)))
-- arg1' :: arg1_ty
-- wrap_arg1 :: arg1_ty "->" (arg2_sigma -> op_res_ty)
-- wrap_res :: op_res_ty "->" res_ty
-- op' :: (a2_ty -> res_ty) -> a2_ty -> res_ty
-- wrap1 :: arg1_ty "->" (arg2_sigma -> res_ty)
wrap1 = mkWpFun idHsWrapper wrap_res arg2_sigma res_ty
<.> wrap_arg1
; return (OpApp (mkLHsWrap wrap1 arg1') op' fix arg2') }
| (L loc (HsRecFld (Ambiguous lbl _))) <- op
, Just sig_ty <- obviousSig (unLoc arg1)
-- See Note [Disambiguating record fields]
= do { sig_tc_ty <- tcHsSigWcType ExprSigCtxt sig_ty
; sel_name <- disambiguateSelector lbl sig_tc_ty
; let op' = L loc (HsRecFld (Unambiguous lbl sel_name))
; tcExpr (OpApp arg1 op' fix arg2) res_ty
}
| otherwise
= do { traceTc "Non Application rule" (ppr op)
; (wrap, op', [Left arg1', Left arg2'])
<- tcApp (Just $ mk_op_msg op)
op [Left arg1, Left arg2] res_ty
; return (mkHsWrap wrap $ OpApp arg1' op' fix arg2') }
-- Right sections, equivalent to \ x -> x `op` expr, or
-- \ x -> op x expr
tcExpr expr@(SectionR op arg2) res_ty
= do { (op', op_ty) <- tcInferFun op
; (wrap_fun, [arg1_ty, arg2_ty], op_res_ty) <-
matchActualFunTys (mk_op_msg op) SectionOrigin (Just op) 2 op_ty
; wrap_res <- tcSubTypeHR SectionOrigin (Just expr)
(mkFunTy arg1_ty op_res_ty) res_ty
; arg2' <- tcArg op arg2 arg2_ty 2
; return ( mkHsWrap wrap_res $
SectionR (mkLHsWrap wrap_fun op') arg2' ) }
tcExpr expr@(SectionL arg1 op) res_ty
= do { (op', op_ty) <- tcInferFun op
; dflags <- getDynFlags -- Note [Left sections]
; let n_reqd_args | xopt LangExt.PostfixOperators dflags = 1
| otherwise = 2
; (wrap_fn, (arg1_ty:arg_tys), op_res_ty)
<- matchActualFunTys (mk_op_msg op) SectionOrigin (Just op)
n_reqd_args op_ty
; wrap_res <- tcSubTypeHR SectionOrigin (Just expr)
(mkFunTys arg_tys op_res_ty) res_ty
; arg1' <- tcArg op arg1 arg1_ty 1
; return ( mkHsWrap wrap_res $
SectionL arg1' (mkLHsWrap wrap_fn op') ) }
tcExpr expr@(ExplicitTuple tup_args boxity) res_ty
| all tupArgPresent tup_args
= do { let arity = length tup_args
tup_tc = tupleTyCon boxity arity
; res_ty <- expTypeToType res_ty
; (coi, arg_tys) <- matchExpectedTyConApp tup_tc res_ty
-- Unboxed tuples have RuntimeRep vars, which we
-- don't care about here
-- See Note [Unboxed tuple RuntimeRep vars] in TyCon
; let arg_tys' = case boxity of Unboxed -> drop arity arg_tys
Boxed -> arg_tys
; tup_args1 <- tcTupArgs tup_args arg_tys'
; return $ mkHsWrapCo coi (ExplicitTuple tup_args1 boxity) }
| otherwise
= -- The tup_args are a mixture of Present and Missing (for tuple sections)
do { let arity = length tup_args
; arg_tys <- case boxity of
{ Boxed -> newFlexiTyVarTys arity liftedTypeKind
; Unboxed -> replicateM arity newOpenFlexiTyVarTy }
; let actual_res_ty
= mkFunTys [ty | (ty, (L _ (Missing _))) <- arg_tys `zip` tup_args]
(mkTupleTy boxity arg_tys)
; wrap <- tcSubTypeHR (Shouldn'tHappenOrigin "ExpTuple")
(Just expr)
actual_res_ty res_ty
-- Handle tuple sections where
; tup_args1 <- tcTupArgs tup_args arg_tys
; return $ mkHsWrap wrap (ExplicitTuple tup_args1 boxity) }
tcExpr (ExplicitSum alt arity expr _) res_ty
= do { let sum_tc = sumTyCon arity
; res_ty <- expTypeToType res_ty
; (coi, arg_tys) <- matchExpectedTyConApp sum_tc res_ty
; -- Drop levity vars, we don't care about them here
let arg_tys' = drop arity arg_tys
; expr' <- tcPolyExpr expr (arg_tys' `getNth` (alt - 1))
; return $ mkHsWrapCo coi (ExplicitSum alt arity expr' arg_tys') }
tcExpr (ExplicitList _ witness exprs) res_ty
= case witness of
Nothing -> do { res_ty <- expTypeToType res_ty
; (coi, elt_ty) <- matchExpectedListTy res_ty
; exprs' <- mapM (tc_elt elt_ty) exprs
; return $
mkHsWrapCo coi $ ExplicitList elt_ty Nothing exprs' }
Just fln -> do { ((exprs', elt_ty), fln')
<- tcSyntaxOp ListOrigin fln
[synKnownType intTy, SynList] res_ty $
\ [elt_ty] ->
do { exprs' <-
mapM (tc_elt elt_ty) exprs
; return (exprs', elt_ty) }
; return $ ExplicitList elt_ty (Just fln') exprs' }
where tc_elt elt_ty expr = tcPolyExpr expr elt_ty
tcExpr (ExplicitPArr _ exprs) res_ty -- maybe empty
= do { res_ty <- expTypeToType res_ty
; (coi, elt_ty) <- matchExpectedPArrTy res_ty
; exprs' <- mapM (tc_elt elt_ty) exprs
; return $
mkHsWrapCo coi $ ExplicitPArr elt_ty exprs' }
where
tc_elt elt_ty expr = tcPolyExpr expr elt_ty
{-
************************************************************************
* *
Let, case, if, do
* *
************************************************************************
-}
tcExpr (HsLet (L l binds) expr) res_ty
= do { (binds', expr') <- tcLocalBinds binds $
tcMonoExpr expr res_ty
; return (HsLet (L l binds') expr') }
tcExpr (HsCase scrut matches) res_ty
= do { -- We used to typecheck the case alternatives first.
-- The case patterns tend to give good type info to use
-- when typechecking the scrutinee. For example
-- case (map f) of
-- (x:xs) -> ...
-- will report that map is applied to too few arguments
--
-- But now, in the GADT world, we need to typecheck the scrutinee
-- first, to get type info that may be refined in the case alternatives
(scrut', scrut_ty) <- tcInferRho scrut
; traceTc "HsCase" (ppr scrut_ty)
; matches' <- tcMatchesCase match_ctxt scrut_ty matches res_ty
; return (HsCase scrut' matches') }
where
match_ctxt = MC { mc_what = CaseAlt,
mc_body = tcBody }
tcExpr (HsIf Nothing pred b1 b2) res_ty -- Ordinary 'if'
= do { pred' <- tcMonoExpr pred (mkCheckExpType boolTy)
; res_ty <- tauifyExpType res_ty
-- Just like Note [Case branches must never infer a non-tau type]
-- in TcMatches (See #10619)
; b1' <- tcMonoExpr b1 res_ty
; b2' <- tcMonoExpr b2 res_ty
; return (HsIf Nothing pred' b1' b2') }
tcExpr (HsIf (Just fun) pred b1 b2) res_ty
= do { ((pred', b1', b2'), fun')
<- tcSyntaxOp IfOrigin fun [SynAny, SynAny, SynAny] res_ty $
\ [pred_ty, b1_ty, b2_ty] ->
do { pred' <- tcPolyExpr pred pred_ty
; b1' <- tcPolyExpr b1 b1_ty
; b2' <- tcPolyExpr b2 b2_ty
; return (pred', b1', b2') }
; return (HsIf (Just fun') pred' b1' b2') }
tcExpr (HsMultiIf _ alts) res_ty
= do { res_ty <- if isSingleton alts
then return res_ty
else tauifyExpType res_ty
-- Just like TcMatches
-- Note [Case branches must never infer a non-tau type]
; alts' <- mapM (wrapLocM $ tcGRHS match_ctxt res_ty) alts
; res_ty <- readExpType res_ty
; return (HsMultiIf res_ty alts') }
where match_ctxt = MC { mc_what = IfAlt, mc_body = tcBody }
tcExpr (HsDo do_or_lc stmts _) res_ty
= do { expr' <- tcDoStmts do_or_lc stmts res_ty
; return expr' }
tcExpr (HsProc pat cmd) res_ty
= do { (pat', cmd', coi) <- tcProc pat cmd res_ty
; return $ mkHsWrapCo coi (HsProc pat' cmd') }
-- Typechecks the static form and wraps it with a call to 'fromStaticPtr'.
tcExpr (HsStatic fvs expr) res_ty
= do { res_ty <- expTypeToType res_ty
; (co, (p_ty, expr_ty)) <- matchExpectedAppTy res_ty
; (expr', lie) <- captureConstraints $
addErrCtxt (hang (text "In the body of a static form:")
2 (ppr expr)
) $
tcPolyExprNC expr expr_ty
-- Check that the free variables of the static form are closed.
-- It's OK to use nonDetEltsUFM here as the only side effects of
-- checkClosedInStaticForm are error messages.
; mapM_ checkClosedInStaticForm $ nonDetEltsUFM fvs
-- Require the type of the argument to be Typeable.
-- The evidence is not used, but asking the constraint ensures that
-- the current implementation is as restrictive as future versions
-- of the StaticPointers extension.
; typeableClass <- tcLookupClass typeableClassName
; _ <- emitWantedEvVar StaticOrigin $
mkTyConApp (classTyCon typeableClass)
[liftedTypeKind, expr_ty]
-- Insert the constraints of the static form in a global list for later
-- validation.
; stWC <- tcg_static_wc <$> getGblEnv
; updTcRef stWC (andWC lie)
-- Wrap the static form with the 'fromStaticPtr' call.
; fromStaticPtr <- newMethodFromName StaticOrigin fromStaticPtrName p_ty
; let wrap = mkWpTyApps [expr_ty]
; loc <- getSrcSpanM
; return $ mkHsWrapCo co $ HsApp (L loc $ mkHsWrap wrap fromStaticPtr)
(L loc (HsStatic fvs expr'))
}
{-
************************************************************************
* *
Record construction and update
* *
************************************************************************
-}
tcExpr expr@(RecordCon { rcon_con_name = L loc con_name
, rcon_flds = rbinds }) res_ty
= do { con_like <- tcLookupConLike con_name
-- Check for missing fields
; checkMissingFields con_like rbinds
; (con_expr, con_sigma) <- tcInferId con_name
; (con_wrap, con_tau) <-
topInstantiate (OccurrenceOf con_name) con_sigma
-- a shallow instantiation should really be enough for
-- a data constructor.
; let arity = conLikeArity con_like
Right (arg_tys, actual_res_ty) = tcSplitFunTysN arity con_tau
; case conLikeWrapId_maybe con_like of
Nothing -> nonBidirectionalErr (conLikeName con_like)
Just con_id -> do {
res_wrap <- tcSubTypeHR (Shouldn'tHappenOrigin "RecordCon")
(Just expr) actual_res_ty res_ty
; rbinds' <- tcRecordBinds con_like arg_tys rbinds
; return $
mkHsWrap res_wrap $
RecordCon { rcon_con_name = L loc con_id
, rcon_con_expr = mkHsWrap con_wrap con_expr
, rcon_con_like = con_like
, rcon_flds = rbinds' } } }
{-
Note [Type of a record update]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The main complication with RecordUpd is that we need to explicitly
handle the *non-updated* fields. Consider:
data T a b c = MkT1 { fa :: a, fb :: (b,c) }
| MkT2 { fa :: a, fb :: (b,c), fc :: c -> c }
| MkT3 { fd :: a }
upd :: T a b c -> (b',c) -> T a b' c
upd t x = t { fb = x}
The result type should be (T a b' c)
not (T a b c), because 'b' *is not* mentioned in a non-updated field
not (T a b' c'), because 'c' *is* mentioned in a non-updated field
NB that it's not good enough to look at just one constructor; we must
look at them all; cf Trac #3219
After all, upd should be equivalent to:
upd t x = case t of
MkT1 p q -> MkT1 p x
MkT2 a b -> MkT2 p b
MkT3 d -> error ...
So we need to give a completely fresh type to the result record,
and then constrain it by the fields that are *not* updated ("p" above).
We call these the "fixed" type variables, and compute them in getFixedTyVars.
Note that because MkT3 doesn't contain all the fields being updated,
its RHS is simply an error, so it doesn't impose any type constraints.
Hence the use of 'relevant_cont'.
Note [Implicit type sharing]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
We also take into account any "implicit" non-update fields. For example
data T a b where { MkT { f::a } :: T a a; ... }
So the "real" type of MkT is: forall ab. (a~b) => a -> T a b
Then consider
upd t x = t { f=x }
We infer the type
upd :: T a b -> a -> T a b
upd (t::T a b) (x::a)
= case t of { MkT (co:a~b) (_:a) -> MkT co x }
We can't give it the more general type
upd :: T a b -> c -> T c b
Note [Criteria for update]
~~~~~~~~~~~~~~~~~~~~~~~~~~
We want to allow update for existentials etc, provided the updated
field isn't part of the existential. For example, this should be ok.
data T a where { MkT { f1::a, f2::b->b } :: T a }
f :: T a -> b -> T b
f t b = t { f1=b }
The criterion we use is this:
The types of the updated fields
mention only the universally-quantified type variables
of the data constructor
NB: this is not (quite) the same as being a "naughty" record selector
(See Note [Naughty record selectors]) in TcTyClsDecls), at least
in the case of GADTs. Consider
data T a where { MkT :: { f :: a } :: T [a] }
Then f is not "naughty" because it has a well-typed record selector.
But we don't allow updates for 'f'. (One could consider trying to
allow this, but it makes my head hurt. Badly. And no one has asked
for it.)
In principle one could go further, and allow
g :: T a -> T a
g t = t { f2 = \x -> x }
because the expression is polymorphic...but that seems a bridge too far.
Note [Data family example]
~~~~~~~~~~~~~~~~~~~~~~~~~~
data instance T (a,b) = MkT { x::a, y::b }
--->
data :TP a b = MkT { a::a, y::b }
coTP a b :: T (a,b) ~ :TP a b
Suppose r :: T (t1,t2), e :: t3
Then r { x=e } :: T (t3,t1)
--->
case r |> co1 of
MkT x y -> MkT e y |> co2
where co1 :: T (t1,t2) ~ :TP t1 t2
co2 :: :TP t3 t2 ~ T (t3,t2)
The wrapping with co2 is done by the constructor wrapper for MkT
Outgoing invariants
~~~~~~~~~~~~~~~~~~~
In the outgoing (HsRecordUpd scrut binds cons in_inst_tys out_inst_tys):
* cons are the data constructors to be updated
* in_inst_tys, out_inst_tys have same length, and instantiate the
*representation* tycon of the data cons. In Note [Data
family example], in_inst_tys = [t1,t2], out_inst_tys = [t3,t2]
Note [Mixed Record Field Updates]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the following pattern synonym.
data MyRec = MyRec { foo :: Int, qux :: String }
pattern HisRec{f1, f2} = MyRec{foo = f1, qux=f2}
This allows updates such as the following
updater :: MyRec -> MyRec
updater a = a {f1 = 1 }
It would also make sense to allow the following update (which we reject).
updater a = a {f1 = 1, qux = "two" } ==? MyRec 1 "two"
This leads to confusing behaviour when the selectors in fact refer the same
field.
updater a = a {f1 = 1, foo = 2} ==? ???
For this reason, we reject a mixture of pattern synonym and normal record
selectors in the same update block. Although of course we still allow the
following.
updater a = (a {f1 = 1}) {foo = 2}
> updater (MyRec 0 "str")
MyRec 2 "str"
-}
tcExpr expr@(RecordUpd { rupd_expr = record_expr, rupd_flds = rbnds }) res_ty
= ASSERT( notNull rbnds )
do { -- STEP -2: typecheck the record_expr, the record to be updated
(record_expr', record_rho) <- tcInferRho record_expr
-- STEP -1 See Note [Disambiguating record fields]
-- After this we know that rbinds is unambiguous
; rbinds <- disambiguateRecordBinds record_expr record_rho rbnds res_ty
; let upd_flds = map (unLoc . hsRecFieldLbl . unLoc) rbinds
upd_fld_occs = map (occNameFS . rdrNameOcc . rdrNameAmbiguousFieldOcc) upd_flds
sel_ids = map selectorAmbiguousFieldOcc upd_flds
-- STEP 0
-- Check that the field names are really field names
-- and they are all field names for proper records or
-- all field names for pattern synonyms.
; let bad_guys = [ setSrcSpan loc $ addErrTc (notSelector fld_name)
| fld <- rbinds,
-- Excludes class ops
let L loc sel_id = hsRecUpdFieldId (unLoc fld),
not (isRecordSelector sel_id),
let fld_name = idName sel_id ]
; unless (null bad_guys) (sequence bad_guys >> failM)
-- See note [Mixed Record Selectors]
; let (data_sels, pat_syn_sels) =
partition isDataConRecordSelector sel_ids
; MASSERT( all isPatSynRecordSelector pat_syn_sels )
; checkTc ( null data_sels || null pat_syn_sels )
( mixedSelectors data_sels pat_syn_sels )
-- STEP 1
-- Figure out the tycon and data cons from the first field name
; let -- It's OK to use the non-tc splitters here (for a selector)
sel_id : _ = sel_ids
mtycon :: Maybe TyCon
mtycon = case idDetails sel_id of
RecSelId (RecSelData tycon) _ -> Just tycon
_ -> Nothing
con_likes :: [ConLike]
con_likes = case idDetails sel_id of
RecSelId (RecSelData tc) _
-> map RealDataCon (tyConDataCons tc)
RecSelId (RecSelPatSyn ps) _
-> [PatSynCon ps]
_ -> panic "tcRecordUpd"
-- NB: for a data type family, the tycon is the instance tycon
relevant_cons = conLikesWithFields con_likes upd_fld_occs
-- A constructor is only relevant to this process if
-- it contains *all* the fields that are being updated
-- Other ones will cause a runtime error if they occur
-- Step 2
-- Check that at least one constructor has all the named fields
-- i.e. has an empty set of bad fields returned by badFields
; checkTc (not (null relevant_cons)) (badFieldsUpd rbinds con_likes)
-- Take apart a representative constructor
; let con1 = ASSERT( not (null relevant_cons) ) head relevant_cons
(con1_tvs, _, _, _prov_theta, req_theta, con1_arg_tys, _)
= conLikeFullSig con1
con1_flds = map flLabel $ conLikeFieldLabels con1
con1_tv_tys = mkTyVarTys con1_tvs
con1_res_ty = case mtycon of
Just tc -> mkFamilyTyConApp tc con1_tv_tys
Nothing -> conLikeResTy con1 con1_tv_tys
-- Check that we're not dealing with a unidirectional pattern
-- synonym
; unless (isJust $ conLikeWrapId_maybe con1)
(nonBidirectionalErr (conLikeName con1))
-- STEP 3 Note [Criteria for update]
-- Check that each updated field is polymorphic; that is, its type
-- mentions only the universally-quantified variables of the data con
; let flds1_w_tys = zipEqual "tcExpr:RecConUpd" con1_flds con1_arg_tys
bad_upd_flds = filter bad_fld flds1_w_tys
con1_tv_set = mkVarSet con1_tvs
bad_fld (fld, ty) = fld `elem` upd_fld_occs &&
not (tyCoVarsOfType ty `subVarSet` con1_tv_set)
; checkTc (null bad_upd_flds) (badFieldTypes bad_upd_flds)
-- STEP 4 Note [Type of a record update]
-- Figure out types for the scrutinee and result
-- Both are of form (T a b c), with fresh type variables, but with
-- common variables where the scrutinee and result must have the same type
-- These are variables that appear in *any* arg of *any* of the
-- relevant constructors *except* in the updated fields
--
; let fixed_tvs = getFixedTyVars upd_fld_occs con1_tvs relevant_cons
is_fixed_tv tv = tv `elemVarSet` fixed_tvs
mk_inst_ty :: TCvSubst -> (TyVar, TcType) -> TcM (TCvSubst, TcType)
-- Deals with instantiation of kind variables
-- c.f. TcMType.newMetaTyVars
mk_inst_ty subst (tv, result_inst_ty)
| is_fixed_tv tv -- Same as result type
= return (extendTvSubst subst tv result_inst_ty, result_inst_ty)
| otherwise -- Fresh type, of correct kind
= do { (subst', new_tv) <- newMetaTyVarX subst tv
; return (subst', mkTyVarTy new_tv) }
; (result_subst, con1_tvs') <- newMetaTyVars con1_tvs
; let result_inst_tys = mkTyVarTys con1_tvs'
init_subst = mkEmptyTCvSubst (getTCvInScope result_subst)
; (scrut_subst, scrut_inst_tys) <- mapAccumLM mk_inst_ty init_subst
(con1_tvs `zip` result_inst_tys)
; let rec_res_ty = TcType.substTy result_subst con1_res_ty
scrut_ty = TcType.substTy scrut_subst con1_res_ty
con1_arg_tys' = map (TcType.substTy result_subst) con1_arg_tys
; wrap_res <- tcSubTypeHR (exprCtOrigin expr)
(Just expr) rec_res_ty res_ty
; co_scrut <- unifyType (Just record_expr) record_rho scrut_ty
-- NB: normal unification is OK here (as opposed to subsumption),
-- because for this to work out, both record_rho and scrut_ty have
-- to be normal datatypes -- no contravariant stuff can go on
-- STEP 5
-- Typecheck the bindings
; rbinds' <- tcRecordUpd con1 con1_arg_tys' rbinds
-- STEP 6: Deal with the stupid theta
; let theta' = substThetaUnchecked scrut_subst (conLikeStupidTheta con1)
; instStupidTheta RecordUpdOrigin theta'
-- Step 7: make a cast for the scrutinee, in the
-- case that it's from a data family
; let fam_co :: HsWrapper -- RepT t1 .. tn ~R scrut_ty
fam_co | Just tycon <- mtycon
, Just co_con <- tyConFamilyCoercion_maybe tycon
= mkWpCastR (mkTcUnbranchedAxInstCo co_con scrut_inst_tys [])
| otherwise
= idHsWrapper
-- Step 8: Check that the req constraints are satisfied
-- For normal data constructors req_theta is empty but we must do
-- this check for pattern synonyms.
; let req_theta' = substThetaUnchecked scrut_subst req_theta
; req_wrap <- instCallConstraints RecordUpdOrigin req_theta'
-- Phew!
; return $
mkHsWrap wrap_res $
RecordUpd { rupd_expr = mkLHsWrap fam_co (mkLHsWrapCo co_scrut record_expr')
, rupd_flds = rbinds'
, rupd_cons = relevant_cons, rupd_in_tys = scrut_inst_tys
, rupd_out_tys = result_inst_tys, rupd_wrap = req_wrap } }
tcExpr (HsRecFld f) res_ty
= tcCheckRecSelId f res_ty
{-
************************************************************************
* *
Arithmetic sequences e.g. [a,b..]
and their parallel-array counterparts e.g. [: a,b.. :]
* *
************************************************************************
-}
tcExpr (ArithSeq _ witness seq) res_ty
= tcArithSeq witness seq res_ty
tcExpr (PArrSeq _ seq@(FromTo expr1 expr2)) res_ty
= do { res_ty <- expTypeToType res_ty
; (coi, elt_ty) <- matchExpectedPArrTy res_ty
; expr1' <- tcPolyExpr expr1 elt_ty
; expr2' <- tcPolyExpr expr2 elt_ty
; enumFromToP <- initDsTc $ dsDPHBuiltin enumFromToPVar
; enum_from_to <- newMethodFromName (PArrSeqOrigin seq)
(idName enumFromToP) elt_ty
; return $
mkHsWrapCo coi $ PArrSeq enum_from_to (FromTo expr1' expr2') }
tcExpr (PArrSeq _ seq@(FromThenTo expr1 expr2 expr3)) res_ty
= do { res_ty <- expTypeToType res_ty
; (coi, elt_ty) <- matchExpectedPArrTy res_ty
; expr1' <- tcPolyExpr expr1 elt_ty
; expr2' <- tcPolyExpr expr2 elt_ty
; expr3' <- tcPolyExpr expr3 elt_ty
; enumFromThenToP <- initDsTc $ dsDPHBuiltin enumFromThenToPVar
; eft <- newMethodFromName (PArrSeqOrigin seq)
(idName enumFromThenToP) elt_ty -- !!!FIXME: chak
; return $
mkHsWrapCo coi $
PArrSeq eft (FromThenTo expr1' expr2' expr3') }
tcExpr (PArrSeq _ _) _
= panic "TcExpr.tcExpr: Infinite parallel array!"
-- the parser shouldn't have generated it and the renamer shouldn't have
-- let it through
{-
************************************************************************
* *
Template Haskell
* *
************************************************************************
-}
-- HsSpliced is an annotation produced by 'RnSplice.rnSpliceExpr'.
-- Here we get rid of it and add the finalizers to the global environment.
--
-- See Note [Delaying modFinalizers in untyped splices] in RnSplice.
tcExpr (HsSpliceE (HsSpliced mod_finalizers (HsSplicedExpr expr)))
res_ty
= do addModFinalizersWithLclEnv mod_finalizers
tcExpr expr res_ty
tcExpr (HsSpliceE splice) res_ty
= tcSpliceExpr splice res_ty
tcExpr (HsBracket brack) res_ty
= tcTypedBracket brack res_ty
tcExpr (HsRnBracketOut brack ps) res_ty
= tcUntypedBracket brack ps res_ty
{-
************************************************************************
* *
Catch-all
* *
************************************************************************
-}
tcExpr other _ = pprPanic "tcMonoExpr" (ppr other)
-- Include ArrForm, ArrApp, which shouldn't appear at all
-- Also HsTcBracketOut, HsQuasiQuoteE
{-
************************************************************************
* *
Arithmetic sequences [a..b] etc
* *
************************************************************************
-}
tcArithSeq :: Maybe (SyntaxExpr Name) -> ArithSeqInfo Name -> ExpRhoType
-> TcM (HsExpr TcId)
tcArithSeq witness seq@(From expr) res_ty
= do { (wrap, elt_ty, wit') <- arithSeqEltType witness res_ty
; expr' <- tcPolyExpr expr elt_ty
; enum_from <- newMethodFromName (ArithSeqOrigin seq)
enumFromName elt_ty
; return $ mkHsWrap wrap $
ArithSeq enum_from wit' (From expr') }
tcArithSeq witness seq@(FromThen expr1 expr2) res_ty
= do { (wrap, elt_ty, wit') <- arithSeqEltType witness res_ty
; expr1' <- tcPolyExpr expr1 elt_ty
; expr2' <- tcPolyExpr expr2 elt_ty
; enum_from_then <- newMethodFromName (ArithSeqOrigin seq)
enumFromThenName elt_ty
; return $ mkHsWrap wrap $
ArithSeq enum_from_then wit' (FromThen expr1' expr2') }
tcArithSeq witness seq@(FromTo expr1 expr2) res_ty
= do { (wrap, elt_ty, wit') <- arithSeqEltType witness res_ty
; expr1' <- tcPolyExpr expr1 elt_ty
; expr2' <- tcPolyExpr expr2 elt_ty
; enum_from_to <- newMethodFromName (ArithSeqOrigin seq)
enumFromToName elt_ty
; return $ mkHsWrap wrap $
ArithSeq enum_from_to wit' (FromTo expr1' expr2') }
tcArithSeq witness seq@(FromThenTo expr1 expr2 expr3) res_ty
= do { (wrap, elt_ty, wit') <- arithSeqEltType witness res_ty
; expr1' <- tcPolyExpr expr1 elt_ty
; expr2' <- tcPolyExpr expr2 elt_ty
; expr3' <- tcPolyExpr expr3 elt_ty
; eft <- newMethodFromName (ArithSeqOrigin seq)
enumFromThenToName elt_ty
; return $ mkHsWrap wrap $
ArithSeq eft wit' (FromThenTo expr1' expr2' expr3') }
-----------------
arithSeqEltType :: Maybe (SyntaxExpr Name) -> ExpRhoType
-> TcM (HsWrapper, TcType, Maybe (SyntaxExpr Id))
arithSeqEltType Nothing res_ty
= do { res_ty <- expTypeToType res_ty
; (coi, elt_ty) <- matchExpectedListTy res_ty
; return (mkWpCastN coi, elt_ty, Nothing) }
arithSeqEltType (Just fl) res_ty
= do { (elt_ty, fl')
<- tcSyntaxOp ListOrigin fl [SynList] res_ty $
\ [elt_ty] -> return elt_ty
; return (idHsWrapper, elt_ty, Just fl') }
{-
************************************************************************
* *
Applications
* *
************************************************************************
-}
type LHsExprArgIn = Either (LHsExpr Name) (LHsWcType Name)
type LHsExprArgOut = Either (LHsExpr TcId) (LHsWcType Name)
-- Left e => argument expression
-- Right ty => visible type application
tcApp1 :: HsExpr Name -- either HsApp or HsAppType
-> ExpRhoType -> TcM (HsExpr TcId)
tcApp1 e res_ty
= do { (wrap, fun, args) <- tcApp Nothing (noLoc e) [] res_ty
; return (mkHsWrap wrap $ unLoc $ foldl mk_hs_app fun args) }
where
mk_hs_app f (Left a) = mkHsApp f a
mk_hs_app f (Right a) = mkHsAppTypeOut f a
tcApp :: Maybe SDoc -- like "The function `f' is applied to"
-- or leave out to get exactly that message
-> LHsExpr Name -> [LHsExprArgIn] -- Function and args
-> ExpRhoType -> TcM (HsWrapper, LHsExpr TcId, [LHsExprArgOut])
-- (wrap, fun, args). For an ordinary function application,
-- these should be assembled as (wrap (fun args)).
-- But OpApp is slightly different, so that's why the caller
-- must assemble
tcApp m_herald orig_fun orig_args res_ty
= go orig_fun orig_args
where
go :: LHsExpr Name -> [LHsExprArgIn]
-> TcM (HsWrapper, LHsExpr TcId, [LHsExprArgOut])
go (L _ (HsPar e)) args = go e args
go (L _ (HsApp e1 e2)) args = go e1 (Left e2:args)
go (L _ (HsAppType e t)) args = go e (Right t:args)
go (L loc (HsVar (L _ fun))) args
| fun `hasKey` tagToEnumKey
, count isLeft args == 1
= do { (wrap, expr, args) <- tcTagToEnum loc fun args res_ty
; return (wrap, expr, args) }
| fun `hasKey` seqIdKey
, count isLeft args == 2
= do { (wrap, expr, args) <- tcSeq loc fun args res_ty
; return (wrap, expr, args) }
go (L loc (HsRecFld (Ambiguous lbl _))) args@(Left (L _ arg) : _)
| Just sig_ty <- obviousSig arg
= do { sig_tc_ty <- tcHsSigWcType ExprSigCtxt sig_ty
; sel_name <- disambiguateSelector lbl sig_tc_ty
; go (L loc (HsRecFld (Unambiguous lbl sel_name))) args }
go fun args
= do { -- Type-check the function
; (fun1, fun_sigma) <- tcInferFun fun
; let orig = exprCtOrigin (unLoc fun)
; (wrap_fun, args1, actual_res_ty)
<- tcArgs fun fun_sigma orig args
(m_herald `orElse` mk_app_msg fun)
-- this is just like tcWrapResult, but the types don't line
-- up to call that function
; wrap_res <- addFunResCtxt True (unLoc fun) actual_res_ty res_ty $
tcSubTypeDS_NC_O orig GenSigCtxt
(Just $ foldl mk_hs_app fun args)
actual_res_ty res_ty
; return (wrap_res, mkLHsWrap wrap_fun fun1, args1) }
mk_hs_app f (Left a) = mkHsApp f a
mk_hs_app f (Right a) = mkHsAppType f a
mk_app_msg :: LHsExpr Name -> SDoc
mk_app_msg fun = sep [ text "The function" <+> quotes (ppr fun)
, text "is applied to"]
mk_op_msg :: LHsExpr Name -> SDoc
mk_op_msg op = text "The operator" <+> quotes (ppr op) <+> text "takes"
----------------
tcInferFun :: LHsExpr Name -> TcM (LHsExpr TcId, TcSigmaType)
-- Infer type of a function
tcInferFun (L loc (HsVar (L _ name)))
= do { (fun, ty) <- setSrcSpan loc (tcInferId name)
-- Don't wrap a context around a plain Id
; return (L loc fun, ty) }
tcInferFun (L loc (HsRecFld f))
= do { (fun, ty) <- setSrcSpan loc (tcInferRecSelId f)
-- Don't wrap a context around a plain Id
; return (L loc fun, ty) }
tcInferFun fun
= tcInferSigma fun
-- NB: tcInferSigma; see TcUnify
-- Note [Deep instantiation of InferResult]
----------------
-- | Type-check the arguments to a function, possibly including visible type
-- applications
tcArgs :: LHsExpr Name -- ^ The function itself (for err msgs only)
-> TcSigmaType -- ^ the (uninstantiated) type of the function
-> CtOrigin -- ^ the origin for the function's type
-> [LHsExprArgIn] -- ^ the args
-> SDoc -- ^ the herald for matchActualFunTys
-> TcM (HsWrapper, [LHsExprArgOut], TcSigmaType)
-- ^ (a wrapper for the function, the tc'd args, result type)
tcArgs fun orig_fun_ty fun_orig orig_args herald
= go [] 1 orig_fun_ty orig_args
where
orig_arity = length orig_args
go _ _ fun_ty [] = return (idHsWrapper, [], fun_ty)
go acc_args n fun_ty (Right hs_ty_arg:args)
= do { (wrap1, upsilon_ty) <- topInstantiateInferred fun_orig fun_ty
-- wrap1 :: fun_ty "->" upsilon_ty
; case tcSplitForAllTy_maybe upsilon_ty of
Just (tvb, inner_ty) ->
do { let tv = binderVar tvb
vis = binderArgFlag tvb
kind = tyVarKind tv
; MASSERT2( vis == Specified
, (vcat [ ppr fun_ty, ppr upsilon_ty, ppr tvb
, ppr inner_ty, pprTyVar tv
, ppr vis ]) )
; ty_arg <- tcHsTypeApp hs_ty_arg kind
; let insted_ty = substTyWithUnchecked [tv] [ty_arg] inner_ty
; (inner_wrap, args', res_ty)
<- go acc_args (n+1) insted_ty args
-- inner_wrap :: insted_ty "->" (map typeOf args') -> res_ty
; let inst_wrap = mkWpTyApps [ty_arg]
; return ( inner_wrap <.> inst_wrap <.> wrap1
, Right hs_ty_arg : args'
, res_ty ) }
_ -> ty_app_err upsilon_ty hs_ty_arg }
go acc_args n fun_ty (Left arg : args)
= do { (wrap, [arg_ty], res_ty)
<- matchActualFunTysPart herald fun_orig (Just fun) 1 fun_ty
acc_args orig_arity
-- wrap :: fun_ty "->" arg_ty -> res_ty
; arg' <- tcArg fun arg arg_ty n
; (inner_wrap, args', inner_res_ty)
<- go (arg_ty : acc_args) (n+1) res_ty args
-- inner_wrap :: res_ty "->" (map typeOf args') -> inner_res_ty
; return ( mkWpFun idHsWrapper inner_wrap arg_ty res_ty <.> wrap
, Left arg' : args'
, inner_res_ty ) }
ty_app_err ty arg
= do { (_, ty) <- zonkTidyTcType emptyTidyEnv ty
; failWith $
text "Cannot apply expression of type" <+> quotes (ppr ty) $$
text "to a visible type argument" <+> quotes (ppr arg) }
----------------
tcArg :: LHsExpr Name -- The function (for error messages)
-> LHsExpr Name -- Actual arguments
-> TcRhoType -- expected arg type
-> Int -- # of argument
-> TcM (LHsExpr TcId) -- Resulting argument
tcArg fun arg ty arg_no = addErrCtxt (funAppCtxt fun arg arg_no) $
tcPolyExprNC arg ty
----------------
tcTupArgs :: [LHsTupArg Name] -> [TcSigmaType] -> TcM [LHsTupArg TcId]
tcTupArgs args tys
= ASSERT( equalLength args tys ) mapM go (args `zip` tys)
where
go (L l (Missing {}), arg_ty) = return (L l (Missing arg_ty))
go (L l (Present expr), arg_ty) = do { expr' <- tcPolyExpr expr arg_ty
; return (L l (Present expr')) }
---------------------------
-- See TcType.SyntaxOpType also for commentary
tcSyntaxOp :: CtOrigin
-> SyntaxExpr Name
-> [SyntaxOpType] -- ^ shape of syntax operator arguments
-> ExpRhoType -- ^ overall result type
-> ([TcSigmaType] -> TcM a) -- ^ Type check any arguments
-> TcM (a, SyntaxExpr TcId)
-- ^ Typecheck a syntax operator
-- The operator is always a variable at this stage (i.e. renamer output)
tcSyntaxOp orig expr arg_tys res_ty
= tcSyntaxOpGen orig expr arg_tys (SynType res_ty)
-- | Slightly more general version of 'tcSyntaxOp' that allows the caller
-- to specify the shape of the result of the syntax operator
tcSyntaxOpGen :: CtOrigin
-> SyntaxExpr Name
-> [SyntaxOpType]
-> SyntaxOpType
-> ([TcSigmaType] -> TcM a)
-> TcM (a, SyntaxExpr TcId)
tcSyntaxOpGen orig (SyntaxExpr { syn_expr = HsVar (L _ op) })
arg_tys res_ty thing_inside
= do { (expr, sigma) <- tcInferId op
; (result, expr_wrap, arg_wraps, res_wrap)
<- tcSynArgA orig sigma arg_tys res_ty $
thing_inside
; return (result, SyntaxExpr { syn_expr = mkHsWrap expr_wrap expr
, syn_arg_wraps = arg_wraps
, syn_res_wrap = res_wrap }) }
tcSyntaxOpGen _ other _ _ _ = pprPanic "tcSyntaxOp" (ppr other)
{-
Note [tcSynArg]
~~~~~~~~~~~~~~~
Because of the rich structure of SyntaxOpType, we must do the
contra-/covariant thing when working down arrows, to get the
instantiation vs. skolemisation decisions correct (and, more
obviously, the orientation of the HsWrappers). We thus have
two tcSynArgs.
-}
-- works on "expected" types, skolemising where necessary
-- See Note [tcSynArg]
tcSynArgE :: CtOrigin
-> TcSigmaType
-> SyntaxOpType -- ^ shape it is expected to have
-> ([TcSigmaType] -> TcM a) -- ^ check the arguments
-> TcM (a, HsWrapper)
-- ^ returns a wrapper :: (type of right shape) "->" (type passed in)
tcSynArgE orig sigma_ty syn_ty thing_inside
= do { (skol_wrap, (result, ty_wrapper))
<- tcSkolemise GenSigCtxt sigma_ty $ \ _ rho_ty ->
go rho_ty syn_ty
; return (result, skol_wrap <.> ty_wrapper) }
where
go rho_ty SynAny
= do { result <- thing_inside [rho_ty]
; return (result, idHsWrapper) }
go rho_ty SynRho -- same as SynAny, because we skolemise eagerly
= do { result <- thing_inside [rho_ty]
; return (result, idHsWrapper) }
go rho_ty SynList
= do { (list_co, elt_ty) <- matchExpectedListTy rho_ty
; result <- thing_inside [elt_ty]
; return (result, mkWpCastN list_co) }
go rho_ty (SynFun arg_shape res_shape)
= do { ( ( ( (result, arg_ty, res_ty)
, res_wrapper ) -- :: res_ty_out "->" res_ty
, arg_wrapper1, [], arg_wrapper2 ) -- :: arg_ty "->" arg_ty_out
, match_wrapper ) -- :: (arg_ty -> res_ty) "->" rho_ty
<- matchExpectedFunTys herald 1 (mkCheckExpType rho_ty) $
\ [arg_ty] res_ty ->
do { arg_tc_ty <- expTypeToType arg_ty
; res_tc_ty <- expTypeToType res_ty
-- another nested arrow is too much for now,
-- but I bet we'll never need this
; MASSERT2( case arg_shape of
SynFun {} -> False;
_ -> True
, text "Too many nested arrows in SyntaxOpType" $$
pprCtOrigin orig )
; tcSynArgA orig arg_tc_ty [] arg_shape $
\ arg_results ->
tcSynArgE orig res_tc_ty res_shape $
\ res_results ->
do { result <- thing_inside (arg_results ++ res_results)
; return (result, arg_tc_ty, res_tc_ty) }}
; return ( result
, match_wrapper <.>
mkWpFun (arg_wrapper2 <.> arg_wrapper1) res_wrapper
arg_ty res_ty ) }
where
herald = text "This rebindable syntax expects a function with"
go rho_ty (SynType the_ty)
= do { wrap <- tcSubTypeET orig GenSigCtxt the_ty rho_ty
; result <- thing_inside []
; return (result, wrap) }
-- works on "actual" types, instantiating where necessary
-- See Note [tcSynArg]
tcSynArgA :: CtOrigin
-> TcSigmaType
-> [SyntaxOpType] -- ^ argument shapes
-> SyntaxOpType -- ^ result shape
-> ([TcSigmaType] -> TcM a) -- ^ check the arguments
-> TcM (a, HsWrapper, [HsWrapper], HsWrapper)
-- ^ returns a wrapper to be applied to the original function,
-- wrappers to be applied to arguments
-- and a wrapper to be applied to the overall expression
tcSynArgA orig sigma_ty arg_shapes res_shape thing_inside
= do { (match_wrapper, arg_tys, res_ty)
<- matchActualFunTys herald orig noThing (length arg_shapes) sigma_ty
-- match_wrapper :: sigma_ty "->" (arg_tys -> res_ty)
; ((result, res_wrapper), arg_wrappers)
<- tc_syn_args_e arg_tys arg_shapes $ \ arg_results ->
tc_syn_arg res_ty res_shape $ \ res_results ->
thing_inside (arg_results ++ res_results)
; return (result, match_wrapper, arg_wrappers, res_wrapper) }
where
herald = text "This rebindable syntax expects a function with"
tc_syn_args_e :: [TcSigmaType] -> [SyntaxOpType]
-> ([TcSigmaType] -> TcM a)
-> TcM (a, [HsWrapper])
-- the wrappers are for arguments
tc_syn_args_e (arg_ty : arg_tys) (arg_shape : arg_shapes) thing_inside
= do { ((result, arg_wraps), arg_wrap)
<- tcSynArgE orig arg_ty arg_shape $ \ arg1_results ->
tc_syn_args_e arg_tys arg_shapes $ \ args_results ->
thing_inside (arg1_results ++ args_results)
; return (result, arg_wrap : arg_wraps) }
tc_syn_args_e _ _ thing_inside = (, []) <$> thing_inside []
tc_syn_arg :: TcSigmaType -> SyntaxOpType
-> ([TcSigmaType] -> TcM a)
-> TcM (a, HsWrapper)
-- the wrapper applies to the overall result
tc_syn_arg res_ty SynAny thing_inside
= do { result <- thing_inside [res_ty]
; return (result, idHsWrapper) }
tc_syn_arg res_ty SynRho thing_inside
= do { (inst_wrap, rho_ty) <- deeplyInstantiate orig res_ty
-- inst_wrap :: res_ty "->" rho_ty
; result <- thing_inside [rho_ty]
; return (result, inst_wrap) }
tc_syn_arg res_ty SynList thing_inside
= do { (inst_wrap, rho_ty) <- topInstantiate orig res_ty
-- inst_wrap :: res_ty "->" rho_ty
; (list_co, elt_ty) <- matchExpectedListTy rho_ty
-- list_co :: [elt_ty] ~N rho_ty
; result <- thing_inside [elt_ty]
; return (result, mkWpCastN (mkTcSymCo list_co) <.> inst_wrap) }
tc_syn_arg _ (SynFun {}) _
= pprPanic "tcSynArgA hits a SynFun" (ppr orig)
tc_syn_arg res_ty (SynType the_ty) thing_inside
= do { wrap <- tcSubTypeO orig GenSigCtxt res_ty the_ty
; result <- thing_inside []
; return (result, wrap) }
{-
Note [Push result type in]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Unify with expected result before type-checking the args so that the
info from res_ty percolates to args. This is when we might detect a
too-few args situation. (One can think of cases when the opposite
order would give a better error message.)
experimenting with putting this first.
Here's an example where it actually makes a real difference
class C t a b | t a -> b
instance C Char a Bool
data P t a = forall b. (C t a b) => MkP b
data Q t = MkQ (forall a. P t a)
f1, f2 :: Q Char;
f1 = MkQ (MkP True)
f2 = MkQ (MkP True :: forall a. P Char a)
With the change, f1 will type-check, because the 'Char' info from
the signature is propagated into MkQ's argument. With the check
in the other order, the extra signature in f2 is reqd.
************************************************************************
* *
Expressions with a type signature
expr :: type
* *
********************************************************************* -}
tcExprSig :: LHsExpr Name -> TcIdSigInfo -> TcM (LHsExpr TcId, TcType)
tcExprSig expr (CompleteSig { sig_bndr = poly_id, sig_loc = loc })
= setSrcSpan loc $ -- Sets the location for the implication constraint
do { (tv_prs, theta, tau) <- tcInstType (tcInstSigTyVars loc) poly_id
; given <- newEvVars theta
; let skol_info = SigSkol ExprSigCtxt (mkPhiTy theta tau)
skol_tvs = map snd tv_prs
; (ev_binds, expr') <- checkConstraints skol_info skol_tvs given $
tcExtendTyVarEnv2 tv_prs $
tcPolyExprNC expr tau
; let poly_wrap = mkWpTyLams skol_tvs
<.> mkWpLams given
<.> mkWpLet ev_binds
; return (mkLHsWrap poly_wrap expr', idType poly_id) }
tcExprSig expr sig@(PartialSig { psig_name = name, sig_loc = loc })
= setSrcSpan loc $ -- Sets the location for the implication constraint
do { (tclvl, wanted, (expr', sig_inst))
<- pushLevelAndCaptureConstraints $
do { sig_inst <- tcInstSig sig
; expr' <- tcExtendTyVarEnv2 (sig_inst_skols sig_inst) $
tcExtendTyVarEnv2 (sig_inst_wcs sig_inst) $
tcPolyExprNC expr (sig_inst_tau sig_inst)
; return (expr', sig_inst) }
-- See Note [Partial expression signatures]
; let tau = sig_inst_tau sig_inst
infer_mode | null (sig_inst_theta sig_inst)
, isNothing (sig_inst_wcx sig_inst)
= ApplyMR
| otherwise
= NoRestrictions
; (qtvs, givens, ev_binds)
<- simplifyInfer tclvl infer_mode [sig_inst] [(name, tau)] wanted
; tau <- zonkTcType tau
; let inferred_theta = map evVarPred givens
tau_tvs = tyCoVarsOfType tau
; (binders, my_theta) <- chooseInferredQuantifiers inferred_theta
tau_tvs qtvs (Just sig_inst)
; let inferred_sigma = mkInfSigmaTy qtvs inferred_theta tau
my_sigma = mkForAllTys binders (mkPhiTy my_theta tau)
; wrap <- if inferred_sigma `eqType` my_sigma -- NB: eqType ignores vis.
then return idHsWrapper -- Fast path; also avoids complaint when we infer
-- an ambiguouse type and have AllowAmbiguousType
-- e..g infer x :: forall a. F a -> Int
else tcSubType_NC ExprSigCtxt inferred_sigma my_sigma
; traceTc "tcExpSig" (ppr qtvs $$ ppr givens $$ ppr inferred_sigma $$ ppr my_sigma)
; let poly_wrap = wrap
<.> mkWpTyLams qtvs
<.> mkWpLams givens
<.> mkWpLet ev_binds
; return (mkLHsWrap poly_wrap expr', my_sigma) }
{- Note [Partial expression signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Partial type signatures on expressions are easy to get wrong. But
here is a guiding principile
e :: ty
should behave like
let x :: ty
x = e
in x
So for partial signatures we apply the MR if no context is given. So
e :: IO _ apply the MR
e :: _ => IO _ do not apply the MR
just like in TcBinds.decideGeneralisationPlan
This makes a difference (Trac #11670):
peek :: Ptr a -> IO CLong
peek ptr = peekElemOff undefined 0 :: _
from (peekElemOff undefined 0) we get
type: IO w
constraints: Storable w
We must NOT try to generalise over 'w' because the signature specifies
no constraints so we'll complain about not being able to solve
Storable w. Instead, don't generalise; then _ gets instantiated to
CLong, as it should.
-}
{- *********************************************************************
* *
tcInferId
* *
********************************************************************* -}
tcCheckId :: Name -> ExpRhoType -> TcM (HsExpr TcId)
tcCheckId name res_ty
= do { (expr, actual_res_ty) <- tcInferId name
; traceTc "tcCheckId" (vcat [ppr name, ppr actual_res_ty, ppr res_ty])
; addFunResCtxt False (HsVar (noLoc name)) actual_res_ty res_ty $
tcWrapResultO (OccurrenceOf name) expr actual_res_ty res_ty }
tcCheckRecSelId :: AmbiguousFieldOcc Name -> ExpRhoType -> TcM (HsExpr TcId)
tcCheckRecSelId f@(Unambiguous (L _ lbl) _) res_ty
= do { (expr, actual_res_ty) <- tcInferRecSelId f
; addFunResCtxt False (HsRecFld f) actual_res_ty res_ty $
tcWrapResultO (OccurrenceOfRecSel lbl) expr actual_res_ty res_ty }
tcCheckRecSelId (Ambiguous lbl _) res_ty
= case tcSplitFunTy_maybe =<< checkingExpType_maybe res_ty of
Nothing -> ambiguousSelector lbl
Just (arg, _) -> do { sel_name <- disambiguateSelector lbl arg
; tcCheckRecSelId (Unambiguous lbl sel_name) res_ty }
------------------------
tcInferRecSelId :: AmbiguousFieldOcc Name -> TcM (HsExpr TcId, TcRhoType)
tcInferRecSelId (Unambiguous (L _ lbl) sel)
= do { (expr', ty) <- tc_infer_id lbl sel
; return (expr', ty) }
tcInferRecSelId (Ambiguous lbl _)
= ambiguousSelector lbl
------------------------
tcInferId :: Name -> TcM (HsExpr TcId, TcSigmaType)
-- Look up an occurrence of an Id
-- Do not instantiate its type
tcInferId id_name
| id_name `hasKey` tagToEnumKey
= failWithTc (text "tagToEnum# must appear applied to one argument")
-- tcApp catches the case (tagToEnum# arg)
| id_name `hasKey` assertIdKey
= do { dflags <- getDynFlags
; if gopt Opt_IgnoreAsserts dflags
then tc_infer_id (nameRdrName id_name) id_name
else tc_infer_assert id_name }
| otherwise
= do { (expr, ty) <- tc_infer_id (nameRdrName id_name) id_name
; traceTc "tcInferId" (ppr id_name <+> dcolon <+> ppr ty)
; return (expr, ty) }
tc_infer_assert :: Name -> TcM (HsExpr TcId, TcSigmaType)
-- Deal with an occurrence of 'assert'
-- See Note [Adding the implicit parameter to 'assert']
tc_infer_assert assert_name
= do { assert_error_id <- tcLookupId assertErrorName
; (wrap, id_rho) <- topInstantiate (OccurrenceOf assert_name)
(idType assert_error_id)
; return (mkHsWrap wrap (HsVar (noLoc assert_error_id)), id_rho)
}
tc_infer_id :: RdrName -> Name -> TcM (HsExpr TcId, TcSigmaType)
tc_infer_id lbl id_name
= do { thing <- tcLookup id_name
; case thing of
ATcId { tct_id = id }
-> do { check_naughty id -- Note [Local record selectors]
; checkThLocalId id
; return_id id }
AGlobal (AnId id)
-> do { check_naughty id
; return_id id }
-- A global cannot possibly be ill-staged
-- nor does it need the 'lifting' treatment
-- hence no checkTh stuff here
AGlobal (AConLike cl) -> case cl of
RealDataCon con -> return_data_con con
PatSynCon ps -> tcPatSynBuilderOcc ps
_ -> failWithTc $
ppr thing <+> text "used where a value identifier was expected" }
where
return_id id = return (HsVar (noLoc id), idType id)
return_data_con con
-- For data constructors, must perform the stupid-theta check
| null stupid_theta
= return_id con_wrapper_id
| otherwise
-- See Note [Instantiating stupid theta]
= do { let (tvs, theta, rho) = tcSplitSigmaTy (idType con_wrapper_id)
; (subst, tvs') <- newMetaTyVars tvs
; let tys' = mkTyVarTys tvs'
theta' = substTheta subst theta
rho' = substTy subst rho
; wrap <- instCall (OccurrenceOf id_name) tys' theta'
; addDataConStupidTheta con tys'
; return (mkHsWrap wrap (HsVar (noLoc con_wrapper_id)), rho') }
where
con_wrapper_id = dataConWrapId con
stupid_theta = dataConStupidTheta con
check_naughty id
| isNaughtyRecordSelector id = failWithTc (naughtyRecordSel lbl)
| otherwise = return ()
tcUnboundId :: UnboundVar -> ExpRhoType -> TcM (HsExpr TcId)
-- Typecheck an occurrence of an unbound Id
--
-- Some of these started life as a true expression hole "_".
-- Others might simply be variables that accidentally have no binding site
--
-- We turn all of them into HsVar, since HsUnboundVar can't contain an
-- Id; and indeed the evidence for the CHoleCan does bind it, so it's
-- not unbound any more!
tcUnboundId unbound res_ty
= do { ty <- newOpenFlexiTyVarTy -- Allow Int# etc (Trac #12531)
; let occ = unboundVarOcc unbound
; name <- newSysName occ
; let ev = mkLocalId name ty
; loc <- getCtLocM HoleOrigin Nothing
; let can = CHoleCan { cc_ev = CtWanted { ctev_pred = ty
, ctev_dest = EvVarDest ev
, ctev_nosh = WDeriv
, ctev_loc = loc}
, cc_hole = ExprHole unbound }
; emitInsoluble can
; tcWrapResultO (UnboundOccurrenceOf occ) (HsVar (noLoc ev)) ty res_ty }
{-
Note [Adding the implicit parameter to 'assert']
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The typechecker transforms (assert e1 e2) to (assertError e1 e2).
This isn't really the Right Thing because there's no way to "undo"
if you want to see the original source code in the typechecker
output. We'll have fix this in due course, when we care more about
being able to reconstruct the exact original program.
Note [tagToEnum#]
~~~~~~~~~~~~~~~~~
Nasty check to ensure that tagToEnum# is applied to a type that is an
enumeration TyCon. Unification may refine the type later, but this
check won't see that, alas. It's crude, because it relies on our
knowing *now* that the type is ok, which in turn relies on the
eager-unification part of the type checker pushing enough information
here. In theory the Right Thing to do is to have a new form of
constraint but I definitely cannot face that! And it works ok as-is.
Here's are two cases that should fail
f :: forall a. a
f = tagToEnum# 0 -- Can't do tagToEnum# at a type variable
g :: Int
g = tagToEnum# 0 -- Int is not an enumeration
When data type families are involved it's a bit more complicated.
data family F a
data instance F [Int] = A | B | C
Then we want to generate something like
tagToEnum# R:FListInt 3# |> co :: R:FListInt ~ F [Int]
Usually that coercion is hidden inside the wrappers for
constructors of F [Int] but here we have to do it explicitly.
It's all grotesquely complicated.
Note [Instantiating stupid theta]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Normally, when we infer the type of an Id, we don't instantiate,
because we wish to allow for visible type application later on.
But if a datacon has a stupid theta, we're a bit stuck. We need
to emit the stupid theta constraints with instantiated types. It's
difficult to defer this to the lazy instantiation, because a stupid
theta has no spot to put it in a type. So we just instantiate eagerly
in this case. Thus, users cannot use visible type application with
a data constructor sporting a stupid theta. I won't feel so bad for
the users that complain.
-}
tcSeq :: SrcSpan -> Name -> [LHsExprArgIn]
-> ExpRhoType -> TcM (HsWrapper, LHsExpr TcId, [LHsExprArgOut])
-- (seq e1 e2) :: res_ty
-- We need a special typing rule because res_ty can be unboxed
-- See Note [Typing rule for seq]
tcSeq loc fun_name args res_ty
= do { fun <- tcLookupId fun_name
; (arg1_ty, args1) <- case args of
(Right hs_ty_arg1 : args1)
-> do { ty_arg1 <- tcHsTypeApp hs_ty_arg1 liftedTypeKind
; return (ty_arg1, args1) }
_ -> do { arg_ty1 <- newFlexiTyVarTy liftedTypeKind
; return (arg_ty1, args) }
; (arg1, arg2, arg2_exp_ty) <- case args1 of
[Right hs_ty_arg2, Left term_arg1, Left term_arg2]
-> do { arg2_kind <- newOpenTypeKind
; ty_arg2 <- tcHsTypeApp hs_ty_arg2 arg2_kind
-- see Note [Typing rule for seq]
; _ <- tcSubTypeDS (OccurrenceOf fun_name) GenSigCtxt ty_arg2 res_ty
; return (term_arg1, term_arg2, mkCheckExpType ty_arg2) }
[Left term_arg1, Left term_arg2]
-> return (term_arg1, term_arg2, res_ty)
_ -> too_many_args "seq" args
; arg1' <- tcMonoExpr arg1 (mkCheckExpType arg1_ty)
; arg2' <- tcMonoExpr arg2 arg2_exp_ty
; res_ty <- readExpType res_ty -- by now, it's surely filled in
; let fun' = L loc (HsWrap ty_args (HsVar (L loc fun)))
ty_args = WpTyApp res_ty <.> WpTyApp arg1_ty
; return (idHsWrapper, fun', [Left arg1', Left arg2']) }
tcTagToEnum :: SrcSpan -> Name -> [LHsExprArgIn] -> ExpRhoType
-> TcM (HsWrapper, LHsExpr TcId, [LHsExprArgOut])
-- tagToEnum# :: forall a. Int# -> a
-- See Note [tagToEnum#] Urgh!
tcTagToEnum loc fun_name args res_ty
= do { fun <- tcLookupId fun_name
; arg <- case args of
[Right hs_ty_arg, Left term_arg]
-> do { ty_arg <- tcHsTypeApp hs_ty_arg liftedTypeKind
; _ <- tcSubTypeDS (OccurrenceOf fun_name) GenSigCtxt ty_arg res_ty
-- other than influencing res_ty, we just
-- don't care about a type arg passed in.
-- So drop the evidence.
; return term_arg }
[Left term_arg] -> do { _ <- expTypeToType res_ty
; return term_arg }
_ -> too_many_args "tagToEnum#" args
; res_ty <- readExpType res_ty
; ty' <- zonkTcType res_ty
-- Check that the type is algebraic
; let mb_tc_app = tcSplitTyConApp_maybe ty'
Just (tc, tc_args) = mb_tc_app
; checkTc (isJust mb_tc_app)
(mk_error ty' doc1)
-- Look through any type family
; fam_envs <- tcGetFamInstEnvs
; let (rep_tc, rep_args, coi)
= tcLookupDataFamInst fam_envs tc tc_args
-- coi :: tc tc_args ~R rep_tc rep_args
; checkTc (isEnumerationTyCon rep_tc)
(mk_error ty' doc2)
; arg' <- tcMonoExpr arg (mkCheckExpType intPrimTy)
; let fun' = L loc (HsWrap (WpTyApp rep_ty) (HsVar (L loc fun)))
rep_ty = mkTyConApp rep_tc rep_args
; return (mkWpCastR (mkTcSymCo coi), fun', [Left arg']) }
-- coi is a Representational coercion
where
doc1 = vcat [ text "Specify the type by giving a type signature"
, text "e.g. (tagToEnum# x) :: Bool" ]
doc2 = text "Result type must be an enumeration type"
mk_error :: TcType -> SDoc -> SDoc
mk_error ty what
= hang (text "Bad call to tagToEnum#"
<+> text "at type" <+> ppr ty)
2 what
too_many_args :: String -> [LHsExprArgIn] -> TcM a
too_many_args fun args
= failWith $
hang (text "Too many type arguments to" <+> text fun <> colon)
2 (sep (map pp args))
where
pp (Left e) = pprParendLExpr e
pp (Right (HsWC { hswc_body = L _ t })) = pprParendHsType t
{-
************************************************************************
* *
Template Haskell checks
* *
************************************************************************
-}
checkThLocalId :: Id -> TcM ()
checkThLocalId id
= do { mb_local_use <- getStageAndBindLevel (idName id)
; case mb_local_use of
Just (top_lvl, bind_lvl, use_stage)
| thLevel use_stage > bind_lvl
, isNotTopLevel top_lvl
-> checkCrossStageLifting id use_stage
_ -> return () -- Not a locally-bound thing, or
-- no cross-stage link
}
--------------------------------------
checkCrossStageLifting :: Id -> ThStage -> TcM ()
-- If we are inside typed brackets, and (use_lvl > bind_lvl)
-- we must check whether there's a cross-stage lift to do
-- Examples \x -> [|| x ||]
-- [|| map ||]
-- There is no error-checking to do, because the renamer did that
--
-- This is similar to checkCrossStageLifting in RnSplice, but
-- this code is applied to *typed* brackets.
checkCrossStageLifting id (Brack _ (TcPending ps_var lie_var))
= -- Nested identifiers, such as 'x' in
-- E.g. \x -> [|| h x ||]
-- We must behave as if the reference to x was
-- h $(lift x)
-- We use 'x' itself as the splice proxy, used by
-- the desugarer to stitch it all back together.
-- If 'x' occurs many times we may get many identical
-- bindings of the same splice proxy, but that doesn't
-- matter, although it's a mite untidy.
do { let id_ty = idType id
; checkTc (isTauTy id_ty) (polySpliceErr id)
-- If x is polymorphic, its occurrence sites might
-- have different instantiations, so we can't use plain
-- 'x' as the splice proxy name. I don't know how to
-- solve this, and it's probably unimportant, so I'm
-- just going to flag an error for now
; lift <- if isStringTy id_ty then
do { sid <- tcLookupId THNames.liftStringName
-- See Note [Lifting strings]
; return (HsVar (noLoc sid)) }
else
setConstraintVar lie_var $
-- Put the 'lift' constraint into the right LIE
newMethodFromName (OccurrenceOf (idName id))
THNames.liftName id_ty
-- Update the pending splices
; ps <- readMutVar ps_var
; let pending_splice = PendingTcSplice (idName id) (nlHsApp (noLoc lift) (nlHsVar id))
; writeMutVar ps_var (pending_splice : ps)
; return () }
checkCrossStageLifting _ _ = return ()
polySpliceErr :: Id -> SDoc
polySpliceErr id
= text "Can't splice the polymorphic local variable" <+> quotes (ppr id)
{-
Note [Lifting strings]
~~~~~~~~~~~~~~~~~~~~~~
If we see $(... [| s |] ...) where s::String, we don't want to
generate a mass of Cons (CharL 'x') (Cons (CharL 'y') ...)) etc.
So this conditional short-circuits the lifting mechanism to generate
(liftString "xy") in that case. I didn't want to use overlapping instances
for the Lift class in TH.Syntax, because that can lead to overlapping-instance
errors in a polymorphic situation.
If this check fails (which isn't impossible) we get another chance; see
Note [Converting strings] in Convert.hs
Local record selectors
~~~~~~~~~~~~~~~~~~~~~~
Record selectors for TyCons in this module are ordinary local bindings,
which show up as ATcIds rather than AGlobals. So we need to check for
naughtiness in both branches. c.f. TcTyClsBindings.mkAuxBinds.
************************************************************************
* *
\subsection{Record bindings}
* *
************************************************************************
-}
getFixedTyVars :: [FieldLabelString] -> [TyVar] -> [ConLike] -> TyVarSet
-- These tyvars must not change across the updates
getFixedTyVars upd_fld_occs univ_tvs cons
= mkVarSet [tv1 | con <- cons
, let (u_tvs, _, eqspec, prov_theta
, req_theta, arg_tys, _)
= conLikeFullSig con
theta = eqSpecPreds eqspec
++ prov_theta
++ req_theta
flds = conLikeFieldLabels con
fixed_tvs = exactTyCoVarsOfTypes fixed_tys
-- fixed_tys: See Note [Type of a record update]
`unionVarSet` tyCoVarsOfTypes theta
-- Universally-quantified tyvars that
-- appear in any of the *implicit*
-- arguments to the constructor are fixed
-- See Note [Implicit type sharing]
fixed_tys = [ty | (fl, ty) <- zip flds arg_tys
, not (flLabel fl `elem` upd_fld_occs)]
, (tv1,tv) <- univ_tvs `zip` u_tvs
, tv `elemVarSet` fixed_tvs ]
{-
Note [Disambiguating record fields]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the -XDuplicateRecordFields extension is used, and the renamer
encounters a record selector or update that it cannot immediately
disambiguate (because it involves fields that belong to multiple
datatypes), it will defer resolution of the ambiguity to the
typechecker. In this case, the `Ambiguous` constructor of
`AmbiguousFieldOcc` is used.
Consider the following definitions:
data S = MkS { foo :: Int }
data T = MkT { foo :: Int, bar :: Int }
data U = MkU { bar :: Int, baz :: Int }
When the renamer sees `foo` as a selector or an update, it will not
know which parent datatype is in use.
For selectors, there are two possible ways to disambiguate:
1. Check if the pushed-in type is a function whose domain is a
datatype, for example:
f s = (foo :: S -> Int) s
g :: T -> Int
g = foo
This is checked by `tcCheckRecSelId` when checking `HsRecFld foo`.
2. Check if the selector is applied to an argument that has a type
signature, for example:
h = foo (s :: S)
This is checked by `tcApp`.
Updates are slightly more complex. The `disambiguateRecordBinds`
function tries to determine the parent datatype in three ways:
1. Check for types that have all the fields being updated. For example:
f x = x { foo = 3, bar = 2 }
Here `f` must be updating `T` because neither `S` nor `U` have
both fields. This may also discover that no possible type exists.
For example the following will be rejected:
f' x = x { foo = 3, baz = 3 }
2. Use the type being pushed in, if it is already a TyConApp. The
following are valid updates to `T`:
g :: T -> T
g x = x { foo = 3 }
g' x = x { foo = 3 } :: T
3. Use the type signature of the record expression, if it exists and
is a TyConApp. Thus this is valid update to `T`:
h x = (x :: T) { foo = 3 }
Note that we do not look up the types of variables being updated, and
no constraint-solving is performed, so for example the following will
be rejected as ambiguous:
let bad (s :: S) = foo s
let r :: T
r = blah
in r { foo = 3 }
\r. (r { foo = 3 }, r :: T )
We could add further tests, of a more heuristic nature. For example,
rather than looking for an explicit signature, we could try to infer
the type of the argument to a selector or the record expression being
updated, in case we are lucky enough to get a TyConApp straight
away. However, it might be hard for programmers to predict whether a
particular update is sufficiently obvious for the signature to be
omitted. Moreover, this might change the behaviour of typechecker in
non-obvious ways.
See also Note [HsRecField and HsRecUpdField] in HsPat.
-}
-- Given a RdrName that refers to multiple record fields, and the type
-- of its argument, try to determine the name of the selector that is
-- meant.
disambiguateSelector :: Located RdrName -> Type -> TcM Name
disambiguateSelector lr@(L _ rdr) parent_type
= do { fam_inst_envs <- tcGetFamInstEnvs
; case tyConOf fam_inst_envs parent_type of
Nothing -> ambiguousSelector lr
Just p ->
do { xs <- lookupParents rdr
; let parent = RecSelData p
; case lookup parent xs of
Just gre -> do { addUsedGRE True gre
; return (gre_name gre) }
Nothing -> failWithTc (fieldNotInType parent rdr) } }
-- This field name really is ambiguous, so add a suitable "ambiguous
-- occurrence" error, then give up.
ambiguousSelector :: Located RdrName -> TcM a
ambiguousSelector (L _ rdr)
= do { env <- getGlobalRdrEnv
; let gres = lookupGRE_RdrName rdr env
; setErrCtxt [] $ addNameClashErrRn rdr gres
; failM }
-- Disambiguate the fields in a record update.
-- See Note [Disambiguating record fields]
disambiguateRecordBinds :: LHsExpr Name -> TcRhoType
-> [LHsRecUpdField Name] -> ExpRhoType
-> TcM [LHsRecField' (AmbiguousFieldOcc Id) (LHsExpr Name)]
disambiguateRecordBinds record_expr record_rho rbnds res_ty
-- Are all the fields unambiguous?
= case mapM isUnambiguous rbnds of
-- If so, just skip to looking up the Ids
-- Always the case if DuplicateRecordFields is off
Just rbnds' -> mapM lookupSelector rbnds'
Nothing -> -- If not, try to identify a single parent
do { fam_inst_envs <- tcGetFamInstEnvs
-- Look up the possible parents for each field
; rbnds_with_parents <- getUpdFieldsParents
; let possible_parents = map (map fst . snd) rbnds_with_parents
-- Identify a single parent
; p <- identifyParent fam_inst_envs possible_parents
-- Pick the right selector with that parent for each field
; checkNoErrs $ mapM (pickParent p) rbnds_with_parents }
where
-- Extract the selector name of a field update if it is unambiguous
isUnambiguous :: LHsRecUpdField Name -> Maybe (LHsRecUpdField Name, Name)
isUnambiguous x = case unLoc (hsRecFieldLbl (unLoc x)) of
Unambiguous _ sel_name -> Just (x, sel_name)
Ambiguous{} -> Nothing
-- Look up the possible parents and selector GREs for each field
getUpdFieldsParents :: TcM [(LHsRecUpdField Name
, [(RecSelParent, GlobalRdrElt)])]
getUpdFieldsParents
= fmap (zip rbnds) $ mapM
(lookupParents . unLoc . hsRecUpdFieldRdr . unLoc)
rbnds
-- Given a the lists of possible parents for each field,
-- identify a single parent
identifyParent :: FamInstEnvs -> [[RecSelParent]] -> TcM RecSelParent
identifyParent fam_inst_envs possible_parents
= case foldr1 intersect possible_parents of
-- No parents for all fields: record update is ill-typed
[] -> failWithTc (noPossibleParents rbnds)
-- Exactly one datatype with all the fields: use that
[p] -> return p
-- Multiple possible parents: try harder to disambiguate
-- Can we get a parent TyCon from the pushed-in type?
_:_ | Just p <- tyConOfET fam_inst_envs res_ty -> return (RecSelData p)
-- Does the expression being updated have a type signature?
-- If so, try to extract a parent TyCon from it
| Just {} <- obviousSig (unLoc record_expr)
, Just tc <- tyConOf fam_inst_envs record_rho
-> return (RecSelData tc)
-- Nothing else we can try...
_ -> failWithTc badOverloadedUpdate
-- Make a field unambiguous by choosing the given parent.
-- Emits an error if the field cannot have that parent,
-- e.g. if the user writes
-- r { x = e } :: T
-- where T does not have field x.
pickParent :: RecSelParent
-> (LHsRecUpdField Name, [(RecSelParent, GlobalRdrElt)])
-> TcM (LHsRecField' (AmbiguousFieldOcc Id) (LHsExpr Name))
pickParent p (upd, xs)
= case lookup p xs of
-- Phew! The parent is valid for this field.
-- Previously ambiguous fields must be marked as
-- used now that we know which one is meant, but
-- unambiguous ones shouldn't be recorded again
-- (giving duplicate deprecation warnings).
Just gre -> do { unless (null (tail xs)) $ do
let L loc _ = hsRecFieldLbl (unLoc upd)
setSrcSpan loc $ addUsedGRE True gre
; lookupSelector (upd, gre_name gre) }
-- The field doesn't belong to this parent, so report
-- an error but keep going through all the fields
Nothing -> do { addErrTc (fieldNotInType p
(unLoc (hsRecUpdFieldRdr (unLoc upd))))
; lookupSelector (upd, gre_name (snd (head xs))) }
-- Given a (field update, selector name) pair, look up the
-- selector to give a field update with an unambiguous Id
lookupSelector :: (LHsRecUpdField Name, Name)
-> TcM (LHsRecField' (AmbiguousFieldOcc Id) (LHsExpr Name))
lookupSelector (L l upd, n)
= do { i <- tcLookupId n
; let L loc af = hsRecFieldLbl upd
lbl = rdrNameAmbiguousFieldOcc af
; return $ L l upd { hsRecFieldLbl
= L loc (Unambiguous (L loc lbl) i) } }
-- Extract the outermost TyCon of a type, if there is one; for
-- data families this is the representation tycon (because that's
-- where the fields live).
tyConOf :: FamInstEnvs -> TcSigmaType -> Maybe TyCon
tyConOf fam_inst_envs ty0
= case tcSplitTyConApp_maybe ty of
Just (tc, tys) -> Just (fstOf3 (tcLookupDataFamInst fam_inst_envs tc tys))
Nothing -> Nothing
where
(_, _, ty) = tcSplitSigmaTy ty0
-- Variant of tyConOf that works for ExpTypes
tyConOfET :: FamInstEnvs -> ExpRhoType -> Maybe TyCon
tyConOfET fam_inst_envs ty0 = tyConOf fam_inst_envs =<< checkingExpType_maybe ty0
-- For an ambiguous record field, find all the candidate record
-- selectors (as GlobalRdrElts) and their parents.
lookupParents :: RdrName -> RnM [(RecSelParent, GlobalRdrElt)]
lookupParents rdr
= do { env <- getGlobalRdrEnv
; let gres = lookupGRE_RdrName rdr env
; mapM lookupParent gres }
where
lookupParent :: GlobalRdrElt -> RnM (RecSelParent, GlobalRdrElt)
lookupParent gre = do { id <- tcLookupId (gre_name gre)
; if isRecordSelector id
then return (recordSelectorTyCon id, gre)
else failWithTc (notSelector (gre_name gre)) }
-- A type signature on the argument of an ambiguous record selector or
-- the record expression in an update must be "obvious", i.e. the
-- outermost constructor ignoring parentheses.
obviousSig :: HsExpr Name -> Maybe (LHsSigWcType Name)
obviousSig (ExprWithTySig _ ty) = Just ty
obviousSig (HsPar p) = obviousSig (unLoc p)
obviousSig _ = Nothing
{-
Game plan for record bindings
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1. Find the TyCon for the bindings, from the first field label.
2. Instantiate its tyvars and unify (T a1 .. an) with expected_ty.
For each binding field = value
3. Instantiate the field type (from the field label) using the type
envt from step 2.
4 Type check the value using tcArg, passing the field type as
the expected argument type.
This extends OK when the field types are universally quantified.
-}
tcRecordBinds
:: ConLike
-> [TcType] -- Expected type for each field
-> HsRecordBinds Name
-> TcM (HsRecordBinds TcId)
tcRecordBinds con_like arg_tys (HsRecFields rbinds dd)
= do { mb_binds <- mapM do_bind rbinds
; return (HsRecFields (catMaybes mb_binds) dd) }
where
fields = map flLabel $ conLikeFieldLabels con_like
flds_w_tys = zipEqual "tcRecordBinds" fields arg_tys
do_bind :: LHsRecField Name (LHsExpr Name)
-> TcM (Maybe (LHsRecField TcId (LHsExpr TcId)))
do_bind (L l fld@(HsRecField { hsRecFieldLbl = f
, hsRecFieldArg = rhs }))
= do { mb <- tcRecordField con_like flds_w_tys f rhs
; case mb of
Nothing -> return Nothing
Just (f', rhs') -> return (Just (L l (fld { hsRecFieldLbl = f'
, hsRecFieldArg = rhs' }))) }
tcRecordUpd
:: ConLike
-> [TcType] -- Expected type for each field
-> [LHsRecField' (AmbiguousFieldOcc Id) (LHsExpr Name)]
-> TcM [LHsRecUpdField TcId]
tcRecordUpd con_like arg_tys rbinds = fmap catMaybes $ mapM do_bind rbinds
where
flds_w_tys = zipEqual "tcRecordUpd" (map flLabel $ conLikeFieldLabels con_like) arg_tys
do_bind :: LHsRecField' (AmbiguousFieldOcc Id) (LHsExpr Name) -> TcM (Maybe (LHsRecUpdField TcId))
do_bind (L l fld@(HsRecField { hsRecFieldLbl = L loc af
, hsRecFieldArg = rhs }))
= do { let lbl = rdrNameAmbiguousFieldOcc af
sel_id = selectorAmbiguousFieldOcc af
f = L loc (FieldOcc (L loc lbl) (idName sel_id))
; mb <- tcRecordField con_like flds_w_tys f rhs
; case mb of
Nothing -> return Nothing
Just (f', rhs') ->
return (Just
(L l (fld { hsRecFieldLbl
= L loc (Unambiguous (L loc lbl)
(selectorFieldOcc (unLoc f')))
, hsRecFieldArg = rhs' }))) }
tcRecordField :: ConLike -> Assoc FieldLabelString Type -> LFieldOcc Name -> LHsExpr Name
-> TcM (Maybe (LFieldOcc Id, LHsExpr Id))
tcRecordField con_like flds_w_tys (L loc (FieldOcc lbl sel_name)) rhs
| Just field_ty <- assocMaybe flds_w_tys field_lbl
= addErrCtxt (fieldCtxt field_lbl) $
do { rhs' <- tcPolyExprNC rhs field_ty
; let field_id = mkUserLocal (nameOccName sel_name)
(nameUnique sel_name)
field_ty loc
-- Yuk: the field_id has the *unique* of the selector Id
-- (so we can find it easily)
-- but is a LocalId with the appropriate type of the RHS
-- (so the desugarer knows the type of local binder to make)
; return (Just (L loc (FieldOcc lbl field_id), rhs')) }
| otherwise
= do { addErrTc (badFieldCon con_like field_lbl)
; return Nothing }
where
field_lbl = occNameFS $ rdrNameOcc (unLoc lbl)
checkMissingFields :: ConLike -> HsRecordBinds Name -> TcM ()
checkMissingFields con_like rbinds
| null field_labels -- Not declared as a record;
-- But C{} is still valid if no strict fields
= if any isBanged field_strs then
-- Illegal if any arg is strict
addErrTc (missingStrictFields con_like [])
else
return ()
| otherwise = do -- A record
unless (null missing_s_fields)
(addErrTc (missingStrictFields con_like missing_s_fields))
warn <- woptM Opt_WarnMissingFields
unless (not (warn && notNull missing_ns_fields))
(warnTc (Reason Opt_WarnMissingFields) True
(missingFields con_like missing_ns_fields))
where
missing_s_fields
= [ flLabel fl | (fl, str) <- field_info,
isBanged str,
not (fl `elemField` field_names_used)
]
missing_ns_fields
= [ flLabel fl | (fl, str) <- field_info,
not (isBanged str),
not (fl `elemField` field_names_used)
]
field_names_used = hsRecFields rbinds
field_labels = conLikeFieldLabels con_like
field_info = zipEqual "missingFields"
field_labels
field_strs
field_strs = conLikeImplBangs con_like
fl `elemField` flds = any (\ fl' -> flSelector fl == fl') flds
{-
************************************************************************
* *
\subsection{Errors and contexts}
* *
************************************************************************
Boring and alphabetical:
-}
addExprErrCtxt :: LHsExpr Name -> TcM a -> TcM a
addExprErrCtxt expr = addErrCtxt (exprCtxt expr)
exprCtxt :: LHsExpr Name -> SDoc
exprCtxt expr
= hang (text "In the expression:") 2 (ppr expr)
fieldCtxt :: FieldLabelString -> SDoc
fieldCtxt field_name
= text "In the" <+> quotes (ppr field_name) <+> ptext (sLit "field of a record")
addFunResCtxt :: Bool -- There is at least one argument
-> HsExpr Name -> TcType -> ExpRhoType
-> TcM a -> TcM a
-- When we have a mis-match in the return type of a function
-- try to give a helpful message about too many/few arguments
--
-- Used for naked variables too; but with has_args = False
addFunResCtxt has_args fun fun_res_ty env_ty
= addLandmarkErrCtxtM (\env -> (env, ) <$> mk_msg)
-- NB: use a landmark error context, so that an empty context
-- doesn't suppress some more useful context
where
mk_msg
= do { mb_env_ty <- readExpType_maybe env_ty
-- by the time the message is rendered, the ExpType
-- will be filled in (except if we're debugging)
; fun_res' <- zonkTcType fun_res_ty
; env' <- case mb_env_ty of
Just env_ty -> zonkTcType env_ty
Nothing ->
do { dumping <- doptM Opt_D_dump_tc_trace
; MASSERT( dumping )
; newFlexiTyVarTy liftedTypeKind }
; let (_, _, fun_tau) = tcSplitSigmaTy fun_res'
(_, _, env_tau) = tcSplitSigmaTy env'
(args_fun, res_fun) = tcSplitFunTys fun_tau
(args_env, res_env) = tcSplitFunTys env_tau
n_fun = length args_fun
n_env = length args_env
info | n_fun == n_env = Outputable.empty
| n_fun > n_env
, not_fun res_env
= text "Probable cause:" <+> quotes (ppr fun)
<+> text "is applied to too few arguments"
| has_args
, not_fun res_fun
= text "Possible cause:" <+> quotes (ppr fun)
<+> text "is applied to too many arguments"
| otherwise
= Outputable.empty -- Never suggest that a naked variable is -- applied to too many args!
; return info }
where
not_fun ty -- ty is definitely not an arrow type,
-- and cannot conceivably become one
= case tcSplitTyConApp_maybe ty of
Just (tc, _) -> isAlgTyCon tc
Nothing -> False
badFieldTypes :: [(FieldLabelString,TcType)] -> SDoc
badFieldTypes prs
= hang (text "Record update for insufficiently polymorphic field"
<> plural prs <> colon)
2 (vcat [ ppr f <+> dcolon <+> ppr ty | (f,ty) <- prs ])
badFieldsUpd
:: [LHsRecField' (AmbiguousFieldOcc Id) (LHsExpr Name)] -- Field names that don't belong to a single datacon
-> [ConLike] -- Data cons of the type which the first field name belongs to
-> SDoc
badFieldsUpd rbinds data_cons
= hang (text "No constructor has all these fields:")
2 (pprQuotedList conflictingFields)
-- See Note [Finding the conflicting fields]
where
-- A (preferably small) set of fields such that no constructor contains
-- all of them. See Note [Finding the conflicting fields]
conflictingFields = case nonMembers of
-- nonMember belongs to a different type.
(nonMember, _) : _ -> [aMember, nonMember]
[] -> let
-- All of rbinds belong to one type. In this case, repeatedly add
-- a field to the set until no constructor contains the set.
-- Each field, together with a list indicating which constructors
-- have all the fields so far.
growingSets :: [(FieldLabelString, [Bool])]
growingSets = scanl1 combine membership
combine (_, setMem) (field, fldMem)
= (field, zipWith (&&) setMem fldMem)
in
-- Fields that don't change the membership status of the set
-- are redundant and can be dropped.
map (fst . head) $ groupBy ((==) `on` snd) growingSets
aMember = ASSERT( not (null members) ) fst (head members)
(members, nonMembers) = partition (or . snd) membership
-- For each field, which constructors contain the field?
membership :: [(FieldLabelString, [Bool])]
membership = sortMembership $
map (\fld -> (fld, map (Set.member fld) fieldLabelSets)) $
map (occNameFS . rdrNameOcc . rdrNameAmbiguousFieldOcc . unLoc . hsRecFieldLbl . unLoc) rbinds
fieldLabelSets :: [Set.Set FieldLabelString]
fieldLabelSets = map (Set.fromList . map flLabel . conLikeFieldLabels) data_cons
-- Sort in order of increasing number of True, so that a smaller
-- conflicting set can be found.
sortMembership =
map snd .
sortBy (compare `on` fst) .
map (\ item@(_, membershipRow) -> (countTrue membershipRow, item))
countTrue = count id
{-
Note [Finding the conflicting fields]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
data A = A {a0, a1 :: Int}
| B {b0, b1 :: Int}
and we see a record update
x { a0 = 3, a1 = 2, b0 = 4, b1 = 5 }
Then we'd like to find the smallest subset of fields that no
constructor has all of. Here, say, {a0,b0}, or {a0,b1}, etc.
We don't really want to report that no constructor has all of
{a0,a1,b0,b1}, because when there are hundreds of fields it's
hard to see what was really wrong.
We may need more than two fields, though; eg
data T = A { x,y :: Int, v::Int }
| B { y,z :: Int, v::Int }
| C { z,x :: Int, v::Int }
with update
r { x=e1, y=e2, z=e3 }, we
Finding the smallest subset is hard, so the code here makes
a decent stab, no more. See Trac #7989.
-}
naughtyRecordSel :: RdrName -> SDoc
naughtyRecordSel sel_id
= text "Cannot use record selector" <+> quotes (ppr sel_id) <+>
text "as a function due to escaped type variables" $$
text "Probable fix: use pattern-matching syntax instead"
notSelector :: Name -> SDoc
notSelector field
= hsep [quotes (ppr field), text "is not a record selector"]
mixedSelectors :: [Id] -> [Id] -> SDoc
mixedSelectors data_sels@(dc_rep_id:_) pat_syn_sels@(ps_rep_id:_)
= ptext
(sLit "Cannot use a mixture of pattern synonym and record selectors") $$
text "Record selectors defined by"
<+> quotes (ppr (tyConName rep_dc))
<> text ":"
<+> pprWithCommas ppr data_sels $$
text "Pattern synonym selectors defined by"
<+> quotes (ppr (patSynName rep_ps))
<> text ":"
<+> pprWithCommas ppr pat_syn_sels
where
RecSelPatSyn rep_ps = recordSelectorTyCon ps_rep_id
RecSelData rep_dc = recordSelectorTyCon dc_rep_id
mixedSelectors _ _ = panic "TcExpr: mixedSelectors emptylists"
missingStrictFields :: ConLike -> [FieldLabelString] -> SDoc
missingStrictFields con fields
= header <> rest
where
rest | null fields = Outputable.empty -- Happens for non-record constructors
-- with strict fields
| otherwise = colon <+> pprWithCommas ppr fields
header = text "Constructor" <+> quotes (ppr con) <+>
text "does not have the required strict field(s)"
missingFields :: ConLike -> [FieldLabelString] -> SDoc
missingFields con fields
= text "Fields of" <+> quotes (ppr con) <+> ptext (sLit "not initialised:")
<+> pprWithCommas ppr fields
-- callCtxt fun args = text "In the call" <+> parens (ppr (foldl mkHsApp fun args))
noPossibleParents :: [LHsRecUpdField Name] -> SDoc
noPossibleParents rbinds
= hang (text "No type has all these fields:")
2 (pprQuotedList fields)
where
fields = map (hsRecFieldLbl . unLoc) rbinds
badOverloadedUpdate :: SDoc
badOverloadedUpdate = text "Record update is ambiguous, and requires a type signature"
fieldNotInType :: RecSelParent -> RdrName -> SDoc
fieldNotInType p rdr
= unknownSubordinateErr (text "field of type" <+> quotes (ppr p)) rdr
{-
************************************************************************
* *
\subsection{Static Pointers}
* *
************************************************************************
-}
-- | A data type to describe why a variable is not closed.
data NotClosedReason = NotLetBoundReason
| NotTypeClosed VarSet
| NotClosed Name NotClosedReason
-- | Checks if the given name is closed and emits an error if not.
--
-- See Note [Not-closed error messages].
checkClosedInStaticForm :: Name -> TcM ()
checkClosedInStaticForm name = do
type_env <- getLclTypeEnv
case checkClosed type_env name of
Nothing -> return ()
Just reason -> addErrTc $ explain name reason
where
-- See Note [Checking closedness].
checkClosed :: TcTypeEnv -> Name -> Maybe NotClosedReason
checkClosed type_env n = checkLoop type_env (unitNameSet n) n
checkLoop :: TcTypeEnv -> NameSet -> Name -> Maybe NotClosedReason
checkLoop type_env visited n = do
-- The @visited@ set is an accumulating parameter that contains the set of
-- visited nodes, so we avoid repeating cycles in the traversal.
case lookupNameEnv type_env n of
Just (ATcId { tct_id = tcid, tct_info = info }) -> case info of
ClosedLet -> Nothing
NotLetBound -> Just NotLetBoundReason
NonClosedLet fvs type_closed -> listToMaybe $
-- Look for a non-closed variable in fvs
[ NotClosed n' reason
| n' <- nameSetElemsStable fvs
, not (elemNameSet n' visited)
, Just reason <- [checkLoop type_env (extendNameSet visited n') n']
] ++
if type_closed then
[]
else
-- We consider non-let-bound variables easier to figure out than
-- non-closed types, so we report non-closed types to the user
-- only if we cannot spot the former.
[ NotTypeClosed $ tyCoVarsOfType (idType tcid) ]
-- The binding is closed.
_ -> Nothing
-- Converts a reason into a human-readable sentence.
--
-- @explain name reason@ starts with
--
-- "<name> is used in a static form but it is not closed because it"
--
-- and then follows a list of causes. For each id in the path, the text
--
-- "uses <id> which"
--
-- is appended, yielding something like
--
-- "uses <id> which uses <id1> which uses <id2> which"
--
-- until the end of the path is reached, which is reported as either
--
-- "is not let-bound"
--
-- when the final node is not let-bound, or
--
-- "has a non-closed type because it contains the type variables:
-- v1, v2, v3"
--
-- when the final node has a non-closed type.
--
explain :: Name -> NotClosedReason -> SDoc
explain name reason =
quotes (ppr name) <+> text "is used in a static form but it is not closed"
<+> text "because it"
$$
sep (causes reason)
causes :: NotClosedReason -> [SDoc]
causes NotLetBoundReason = [text "is not let-bound."]
causes (NotTypeClosed vs) =
[ text "has a non-closed type because it contains the"
, text "type variables:" <+>
pprVarSet vs (hsep . punctuate comma . map (quotes . ppr))
]
causes (NotClosed n reason) =
let msg = text "uses" <+> quotes (ppr n) <+> text "which"
in case reason of
NotClosed _ _ -> msg : causes reason
_ -> let (xs0, xs1) = splitAt 1 $ causes reason
in fmap (msg <+>) xs0 ++ xs1
-- Note [Not-closed error messages]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- When variables in a static form are not closed, we go through the trouble
-- of explaining why they aren't.
--
-- Thus, the following program
--
-- > {-# LANGUAGE StaticPointers #-}
-- > module M where
-- >
-- > f x = static g
-- > where
-- > g = h
-- > h = x
--
-- produces the error
--
-- 'g' is used in a static form but it is not closed because it
-- uses 'h' which uses 'x' which is not let-bound.
--
-- And a program like
--
-- > {-# LANGUAGE StaticPointers #-}
-- > module M where
-- >
-- > import Data.Typeable
-- > import GHC.StaticPtr
-- >
-- > f :: Typeable a => a -> StaticPtr TypeRep
-- > f x = const (static (g undefined)) (h x)
-- > where
-- > g = h
-- > h = typeOf
--
-- produces the error
--
-- 'g' is used in a static form but it is not closed because it
-- uses 'h' which has a non-closed type because it contains the
-- type variables: 'a'
--
-- Note [Checking closedness]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- @checkClosed@ checks if a binding is closed and returns a reason if it is
-- not.
--
-- The bindings define a graph where the nodes are ids, and there is an edge
-- from @id1@ to @id2@ if the rhs of @id1@ contains @id2@ among its free
-- variables.
--
-- When @n@ is not closed, it has to exist in the graph some node reachable
-- from @n@ that it is not a let-bound variable or that it has a non-closed
-- type. Thus, the "reason" is a path from @n@ to this offending node.
--
-- When @n@ is not closed, we traverse the graph reachable from @n@ to build
-- the reason.
--
|
olsner/ghc
|
compiler/typecheck/TcExpr.hs
|
bsd-3-clause
| 111,515 | 293 | 24 | 35,912 | 20,040 | 10,486 | 9,554 | -1 | -1 |
module A where
a = "a"
|
abuiles/turbinado-blog
|
tmp/dependencies/hs-plugins-1.3.1/testsuite/make/makeall001/A.hs
|
bsd-3-clause
| 24 | 0 | 4 | 7 | 9 | 6 | 3 | 2 | 1 |
-------------------------------------------------------------------------------
-- |
-- Module : CCO.Feedback.Message
-- Copyright : (c) 2008 Utrecht University
-- License : All rights reserved
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Log, warning, and error messages.
--
-------------------------------------------------------------------------------
module CCO.Feedback.Message (
-- * Messages
Message (Log, Warning, Error)
, isError -- :: Message -> Bool
, fromMessage -- :: Message -> Doc
, fatalizeWarnings -- :: [Message] -> [Message]
, filterMessages -- :: Int -> Int -> [Message] -> [Message]
, putMessages -- :: Handle -> [Message] -> IO ()
) where
import CCO.Printing (Doc, render_, renderHeight_)
import System.IO (Handle, hPutStrLn)
-------------------------------------------------------------------------------
-- Messages
-------------------------------------------------------------------------------
-- | Type of messages.
-- Each @Message@ holds a pretty-printable document in which the text for the
-- message is stored.
data Message
= Log Int Doc -- ^ A log message at a specified verbosity level, the
-- default level being 1.
| Warning Int Doc -- ^ A warning message at a specified severity level,
-- the default level being 1.
| Error Doc -- ^ An error message.
-- | Indicates whether a 'Message' is an 'Error' message.
isError :: Message -> Bool
isError (Error _) = True
isError _ = False
-- | Retrieves the 'Doc' stored in a 'Message'.
fromMessage :: Message -> Doc
fromMessage (Log _ doc) = doc
fromMessage (Warning _ doc) = doc
fromMessage (Error doc) = doc
-- | Turns 'Warning' messages into 'Error' messages.
fatalizeWarnings :: [Message] -> [Message]
fatalizeWarnings = map f
where
f (Warning _ doc) = Error doc
f msg = msg
-- | Filters 'Message's that do not exceed specified verbosity and severity
-- levels.
filterMessages :: Int -> Int -> [Message] -> [Message]
filterMessages v w = filter p
where
p (Log v' _) = v' <= v
p (Warning w' _) = w' <= w
p _ = True
-- | Pretty prints the 'Doc' stored in a 'Message' onto a 'Handle'.
putMessages :: Handle -> [Message] -> IO ()
putMessages h = putMsgs
where
putMsgs [] = return ()
putMsgs [msg] = hPutStrLn h (render_ 79 (fromMessage msg))
putMsgs (msg : msgs) = do
let (s, height) = renderHeight_ 79 (fromMessage msg)
hPutStrLn h s
if height >= 0 then hPutStrLn h "" else return ()
putMsgs msgs
|
UU-ComputerScience/uu-cco
|
uu-cco/src/CCO/Feedback/Message.hs
|
bsd-3-clause
| 2,775 | 0 | 14 | 734 | 504 | 283 | 221 | 40 | 4 |
import Eval (eval)
import Parser (parseExpr)
import System.Environment
process :: String -> IO ()
process input = do
let ast = parseExpr input
case ast of
Right ast -> eval ast
Left err -> do
putStrLn "Parser Error:"
print err
main :: IO ()
main = do
args <- getArgs
case args of
[] -> putStrLn "Usage: assign <input file>"
[fname] -> do
contents <- readFile fname
process contents
|
FranklinChen/write-you-a-haskell
|
chapter9/assign/Main.hs
|
mit
| 437 | 0 | 13 | 124 | 161 | 75 | 86 | 19 | 2 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fil-PH">
<title>SOAP Support Add-on</title>
<maps>
<homeID>soap</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Mga Nilalaman</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Paghahanap</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Mga Paborito</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/soap/src/main/javahelp/org/zaproxy/zap/extension/soap/resources/help_fil_PH/helpset_fil_PH.hs
|
apache-2.0
| 979 | 82 | 53 | 159 | 402 | 211 | 191 | -1 | -1 |
{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable #-}
{-
Copyright (C) 2014 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.MediaBag
Copyright : Copyright (C) 2014 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <[email protected]>
Stability : alpha
Portability : portable
Definition of a MediaBag object to hold binary resources, and an
interface for interacting with it.
-}
module Text.Pandoc.MediaBag (
MediaBag,
lookupMedia,
insertMedia,
mediaDirectory,
extractMediaBag
) where
import System.FilePath
import qualified System.FilePath.Posix as Posix
import System.Directory (createDirectoryIfMissing)
import qualified Data.Map as M
import qualified Data.ByteString.Lazy as BL
import Control.Monad (when)
import Text.Pandoc.MIME (MimeType, getMimeTypeDef)
import qualified Text.Pandoc.UTF8 as UTF8
import Data.Maybe (fromMaybe)
import System.IO (stderr)
import Data.Data (Data)
import Data.Typeable (Typeable)
-- | A container for a collection of binary resources, with names and
-- mime types. Note that a 'MediaBag' is a Monoid, so 'mempty'
-- can be used for an empty 'MediaBag', and '<>' can be used to append
-- two 'MediaBag's.
newtype MediaBag = MediaBag (M.Map [String] (MimeType, BL.ByteString))
deriving (Monoid, Data, Typeable)
instance Show MediaBag where
show bag = "MediaBag " ++ show (mediaDirectory bag)
-- | Insert a media item into a 'MediaBag', replacing any existing
-- value with the same name.
insertMedia :: FilePath -- ^ relative path and canonical name of resource
-> Maybe MimeType -- ^ mime type (Nothing = determine from extension)
-> BL.ByteString -- ^ contents of resource
-> MediaBag
-> MediaBag
insertMedia fp mbMime contents (MediaBag mediamap) =
MediaBag (M.insert (splitDirectories fp) (mime, contents) mediamap)
where mime = fromMaybe fallback mbMime
fallback = case takeExtension fp of
".gz" -> getMimeTypeDef $ dropExtension fp
_ -> getMimeTypeDef fp
-- | Lookup a media item in a 'MediaBag', returning mime type and contents.
lookupMedia :: FilePath
-> MediaBag
-> Maybe (MimeType, BL.ByteString)
lookupMedia fp (MediaBag mediamap) = M.lookup (splitDirectories fp) mediamap
-- | Get a list of the file paths stored in a 'MediaBag', with
-- their corresponding mime types and the lengths in bytes of the contents.
mediaDirectory :: MediaBag -> [(String, MimeType, Int)]
mediaDirectory (MediaBag mediamap) =
M.foldWithKey (\fp (mime,contents) ->
(((Posix.joinPath fp), mime, fromIntegral $ BL.length contents):)) [] mediamap
-- | Extract contents of MediaBag to a given directory. Print informational
-- messages if 'verbose' is true.
extractMediaBag :: Bool
-> FilePath
-> MediaBag
-> IO ()
extractMediaBag verbose dir (MediaBag mediamap) = do
sequence_ $ M.foldWithKey
(\fp (_ ,contents) ->
((writeMedia verbose dir (Posix.joinPath fp, contents)):)) [] mediamap
writeMedia :: Bool -> FilePath -> (FilePath, BL.ByteString) -> IO ()
writeMedia verbose dir (subpath, bs) = do
-- we join and split to convert a/b/c to a\b\c on Windows;
-- in zip containers all paths use /
let fullpath = dir </> normalise subpath
createDirectoryIfMissing True $ takeDirectory fullpath
when verbose $ UTF8.hPutStrLn stderr $ "pandoc: extracting " ++ fullpath
BL.writeFile fullpath bs
|
janschulz/pandoc
|
src/Text/Pandoc/MediaBag.hs
|
gpl-2.0
| 4,353 | 0 | 16 | 1,000 | 703 | 391 | 312 | 56 | 2 |
{-# LANGUAGE PolyKinds, DataKinds, ExplicitForAll #-}
module T15743 where
import Data.Kind
import Data.Proxy
data SimilarKind :: forall (c :: k) (d :: k). Proxy c -> Proxy d -> Type
data T k (c :: k) (a :: Proxy c) b (x :: SimilarKind a b)
data T2 k (c :: k) (a :: Proxy c) (b :: Proxy d) (x :: SimilarKind a b)
|
sdiehl/ghc
|
testsuite/tests/dependent/should_fail/T15743c.hs
|
bsd-3-clause
| 316 | 0 | 8 | 69 | 130 | 81 | 49 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
module Network.Wai.Handler.Warp.IO where
import Data.ByteString (ByteString)
import Data.ByteString.Builder (Builder)
import Data.ByteString.Builder.Extra (runBuilder, Next(Done, More, Chunk))
import Network.Wai.Handler.Warp.Buffer
import Network.Wai.Handler.Warp.Types
toBufIOWith :: Buffer -> BufSize -> (ByteString -> IO ()) -> Builder -> IO ()
toBufIOWith buf !size io builder = loop firstWriter
where
firstWriter = runBuilder builder
runIO len = bufferIO buf len io
loop writer = do
(len, signal) <- writer buf size
case signal of
Done -> runIO len
More minSize next
| size < minSize -> error "toBufIOWith: BufferFull: minSize"
| otherwise -> do
runIO len
loop next
Chunk bs next -> do
runIO len
io bs
loop next
|
AndrewRademacher/wai
|
warp/Network/Wai/Handler/Warp/IO.hs
|
mit
| 964 | 0 | 15 | 306 | 268 | 139 | 129 | 25 | 3 |
import Control.Monad
import Data.List.Extra
import Data.Maybe
import qualified Data.Char as C
import qualified Data.Map as Map
import qualified Data.Set as Set
------
iread :: String -> Int
iread = read
answer :: (Show a) => (String -> a) -> IO ()
answer f = interact $ (++"\n") . show . f
ord0 c = C.ord c - C.ord 'a'
chr0 i = C.chr (i + C.ord 'a')
incletter c i = chr0 ((ord0 c + i) `mod` 26)
splitOn1 a b = fromJust $ stripInfix a b
rsplitOn1 a b = fromJust $ stripInfixEnd a b
-- pull out every part of a String that can be read in
-- for some Read a and ignore the rest
readOut :: Read a => String -> [a]
readOut "" = []
readOut s = case reads s of
[] -> readOut $ tail s
[(x, s')] -> x : readOut s'
_ -> error "ambiguous parse"
ireadOut :: String -> [Int]
ireadOut = readOut
--------
main = answer $ map ireadOut . lines
|
msullivan/advent-of-code
|
2016/Template.hs
|
mit
| 842 | 0 | 10 | 187 | 353 | 189 | 164 | 24 | 3 |
-- Weight for weight
-- http://www.codewars.com/kata/55c6126177c9441a570000cc/
module Codewars.G964.WeightSort where
import Data.Char (digitToInt)
import Data.List(sortBy)
orderWeight :: String -> String
orderWeight = unwords . sortBy cmp . words
where weight = sum . map digitToInt
cmp s1 s2 = case compare (weight s1) (weight s2) of EQ -> compare s1 s2
x -> x
|
gafiatulin/codewars
|
src/5 kyu/WeightSort.hs
|
mit
| 443 | 0 | 10 | 134 | 116 | 62 | 54 | 8 | 2 |
{-|
Copyright (c) 2014 Maciej Bendkowski
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-}
module LCParser where
import Data.Char
import ParserUtils
import LC
-- LC grammar
-- <variable> := [a..z] (0-9)*
-- <abstraction> := "\" <variable> "." <term>
-- <subterm> := <variable> | <abstraction> | "(" <term> ")"
-- <term> := <subterm>+
{-|
Variable parser.
-}
variableParser :: Parser Term
variableParser = do
c <- charParser
n <- token $ numberParser
return $ Var (c : n) where
charParser :: Parser Char
charParser = itemPred (\s -> s `elem` ['a'..'z'])
numberParser :: Parser String
numberParser = star $ itemPred isDigit
{-|
Abstraction parser.
-}
abstractionParser :: Parser Term
abstractionParser = do
_ <- symb "\\"
v <- variableParser
_ <- symb "."
t <- termParser
return $ Abs (name v) t
{-|
Subterm parser.
-}
subtermParser :: Parser Term
subtermParser = variableParser `dmplus` abstractionParser `dmplus` termParser' where
termParser' :: Parser Term
termParser' = do
_ <- symb "("
t <- termParser
_ <- symb ")"
return t
{-|
Term parser.
-}
termParser :: Parser Term
termParser = do
ts <- pstar subtermParser
return $ apply' (head ts) (tail ts) where
apply' :: Term -> [Term] -> Term
apply' t [] = t
apply' t (t':[]) = App t t'
apply' t (t':ts) = apply' (App t t') ts
{-|
Attempts to parse a LC term
from the given string.
-}
parseLC :: String -> Maybe Term
parseLC s = case apply termParser s of
(x:_) -> if null (snd x) then
Just $ fst x else Nothing
_ -> Nothing
|
maciej-bendkowski/LCCLUtils
|
src/LCParser.hs
|
mit
| 3,028 | 0 | 11 | 996 | 472 | 241 | 231 | 41 | 3 |
-- -------------------------------------------------------------------------------------
-- Author: Sourabh S Joshi (cbrghostrider); Copyright - All rights reserved.
-- For email, run on linux (perl v5.8.5):
-- perl -e 'print pack "H*","736f75726162682e732e6a6f73686940676d61696c2e636f6d0a"'
-- -------------------------------------------------------------------------------------
import Data.List
bigGCD :: [Integer] ->[Integer] -> Integer
bigGCD ns ms = gcd (reduce ns) (reduce ms)
where reduce = foldl' (*) 1
main :: IO ()
main = do
nstr <- getLine
nsstr <- getLine
mstr <- getLine
msstr <- getLine
let ans = (`mod` (10^9 + 7)) $ bigGCD (map read . words $ nsstr) (map read . words $ msstr)
putStrLn $ show ans
|
cbrghostrider/Hacking
|
HackerRank/FunctionalProgramming/AdHoc/bigGCD.hs
|
mit
| 765 | 0 | 15 | 147 | 187 | 97 | 90 | 12 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
module Prelude.Source.GHC.IO (
FilePath,
) where
import Prelude (
FilePath,
)
|
scott-fleischman/cafeteria-prelude
|
src/Prelude/Source/GHC/IO.hs
|
mit
| 127 | 0 | 5 | 29 | 24 | 17 | 7 | 5 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module WordProblem (answer) where
import Data.Text (pack)
import Data.List (foldl')
import Control.Applicative (pure, (<|>), (<$>), (<*>), (<*), (*>))
import Data.Attoparsec.Text
( Parser, signed, decimal, space, maybeResult, parse, many' )
answerParser :: Parser Int
answerParser = do
n <- "What is " *> signed decimal
ops <- many' (space *> operation)
"?" *> pure (foldl' (flip ($)) n ops)
answer :: String -> Maybe Int
answer = maybeResult . parse answerParser . pack
operation :: Parser (Int -> Int)
operation = (flip <$> operator) <* space <*> signed decimal
operator :: Parser (Int -> Int -> Int)
operator = "plus" *> pure (+) <|>
"minus" *> pure (-) <|>
"multiplied by" *> pure (*) <|>
"divided by" *> pure div
|
pminten/xhaskell
|
wordy/example.hs
|
mit
| 817 | 0 | 12 | 187 | 302 | 170 | 132 | 21 | 1 |
{-# OPTIONS_GHC -F -pgmF htfpp #-}
-- | This module tests the standard value iteration algorithm for
-- discounted problems by comparing its iterations to known iterations
-- from "Dynamic Programming and Optimal Control", Dimitri
-- P. Bertsekas, p. 23.
module Algorithms.MDP.Ex_3_1_Test where
import Test.Framework
import Algorithms.MDP.Examples.Ex_3_1
import Algorithms.MDP
import Algorithms.MDP.ValueIteration
almostEqual eps (x, y) | x == y = True
| otherwise = abs (x - y) <= eps
iterations = take 16 (valueIteration mdp)
correctValuesA =
[ 0
, 0.5
, 1.287
, 1.844
, 2.414
, 2.896
, 3.343
, 3.740
, 4.099
, 4.422
, 4.713
, 4.974
, 5.209
, 5.421
, 5.612
, 5.783
]
correctValuesB =
[ 0
, 1
, 1.562
, 2.220
, 2.745
, 3.247
, 3.686
, 4.086
, 4.444
, 4.767
, 5.057
, 5.319
, 5.554
, 5.766
, 5.957
, 6.128
]
actualValuesA = map (cost A) iterations
actualValuesB = map (cost B) iterations
pairsA = zip actualValuesA correctValuesA
pairsB = zip actualValuesB correctValuesB
badPairsA = filter (not . almostEqual 1e-3) pairsA
badPairsB = filter (not . almostEqual 1e-3) pairsB
test_AValues = assertBoolVerbose (unlines (map show badPairsA)) (null badPairsA)
test_BValues = assertBoolVerbose (unlines (map show badPairsB)) (null badPairsB)
|
prsteele/mdp
|
testsuite/tests/Algorithms/MDP/Ex_3_1_Test.hs
|
mit
| 1,341 | 0 | 9 | 307 | 358 | 203 | 155 | 51 | 1 |
{-# htermination inRange :: Ix a => (a,a) -> a -> Bool #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_inRange_1.hs
|
mit
| 59 | 0 | 2 | 13 | 3 | 2 | 1 | 1 | 0 |
{-# LANGUAGE RankNTypes #-}
--{-# LANGUAGE KindSignatures #-}
--{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Rec where
import Data.Function (fix)
import Control.Monad.Fix
import Control.Monad.State
import Mem
-- This sends ghc into an infinite loop.
-- See https://ghc.haskell.org/trac/ghc/ticket/8168
-- The {-# NOINLINE g #-} pragma fixes this.
newtype Rec a = Rec { runRec :: Rec a -> a }
y :: (a -> a) -> a
y f = g (Rec g)
where g h = f (runRec h h)
{-# NOINLINE g #-}
{- Some implementations of the fixed point operator via value recursion.
fix :: (a -> a) -> a
fix' f = fix $ Rec $ f . runRec
fix' f = f (fix' f)
fix f = let x = f x in x
--}
fact' :: (Int -> Integer) -> (Int -> Integer)
fact' _ 0 = 1
fact' f n = (fromIntegral n) * f (n-1)
fact = fix fact'
fibRec' :: (Int -> Integer) -> (Int -> Integer)
fibRec' _ 0 = 0
fibRec' _ 1 = 1
fibRec' f n = f (n - 2) + f (n - 1)
fib = fix $ memInt . fibRec'
-- doesn't work! only certain monads have mfix
--mfix :: (Monad m) => (a -> m a) -> m a
--mfix f = let x = x >>= f in x
--mfix f = fix (>>= f)
|
vladfi1/hs-misc
|
PFP/Rec.hs
|
mit
| 1,117 | 0 | 9 | 257 | 278 | 156 | 122 | 21 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TemplateHaskell #-}
{- |
MinMax (or minimax) algorithm for 'Game'.
The module provides both an unpruned version 'minMax'
and a AB-pruned version 'minMaxAB'.
There is not reason to use the unpruned version over the pruned one.
The unpruned version is only there for educational purpose.
See the wikipedia [Minimax](https://en.wikipedia.org/wiki/Minimax#Minimax_algorithm_with_alternate_moves)
webpage if you want to know momre about minimax.
-}
module J2S.AI.MinMax
( MinMaxParam (MinMaxParam)
, minMax
, minMaxAB
) where
import qualified Data.Foldable as F
import qualified Data.Functor.Foldable as FF
import qualified Data.NLTree as NL
import qualified Data.List.NonEmpty as NE
import qualified Data.Traversable as T
import Control.Applicative
import Control.Lens
import Control.Monad.Reader
import Control.Monad.State
import Data.Ord (comparing)
import Numeric.Natural
import J2S.AI.Types
-- Parameter for a min max algorithm
data MinMaxParam b s
= MinMaxParam
{ -- | How many play in advance should we look to take a decision?
_depth :: Natural
-- | How do we evaluate the board configurations?
, _eval :: Eval b s
}
makeLenses ''MinMaxParam
minMax :: (ListableActions b, Ord s)
=> Strategy (Reader (MinMaxParam b s)) b
minMax b = do
d <- asks (view depth)
e <- asks (view eval)
pure $ foldForest e . fromGame d $ b
minMaxAB :: (ListableActions b, Ord s)
=> Strategy (Reader (MinMaxParam b s)) b
minMaxAB b = do
d <- asks (view depth)
e <- asks (view eval)
pure $ foldForestAB e . fromGame d $ b
-- | Flag to indicate whether we try to maimize our socre (our turn)
-- or to minimze it (opponent's turn)
data Phase = Max | Min
changePhase :: Phase -> Phase
changePhase Max = Min
changePhase Min = Max
foldForest :: Ord s => (a -> s) -> NE.NonEmpty (c, NL.NLTree b a) -> c
foldForest e = let
evalTree = fmap $ fmap (flip runReader Min . foldTree e)
in fst . F.maximumBy (comparing snd) . evalTree
foldTree :: (Ord s, MonadReader Phase m)
=> (a -> s) -> NL.NLTree b a -> m s
foldTree e = let
selector Max = F.maximum
selector Min = F.minimum
go (NL.L l) = pure (e l)
go (NL.N _ xs) = do
s <- asks selector
s <$> local changePhase (T.sequence xs)
in FF.cata go
foldForestAB :: Ord s => (a -> s) -> NE.NonEmpty (c, NL.NLTree b a) -> c
foldForestAB e = let
evalTree =
fmap $ fmap (flip evalState Nothing . flip runReaderT Min . foldTreeAB e)
in fst . F.maximumBy (comparing snd) . evalTree
foldTreeAB :: (Ord s, MonadState (Maybe s) m, MonadReader Phase m)
=> (a -> s) -> NL.NLTree b a -> m s
foldTreeAB = let
selector Max = max
selector Min = min
comp Max = (<)
comp Min = (>)
cut phase Nothing c n = pure (selector phase c n)
cut phase (Just cv) c n =
if comp phase cv n
then Left n
else Right $ selector phase c n
go e (NL.L l) = pure $ e l
go _ (NL.N _ xs) = do
p <- ask
cutValue <- get
put Nothing
(h NE.:| t) <- local changePhase $ T.sequence xs
let score = foldM (cut p cutValue) h t
liftA2 (>>) (put . pure) pure $ either id id score
in FF.cata . go
|
berewt/J2S
|
src/J2S/AI/MinMax.hs
|
mit
| 3,331 | 0 | 16 | 875 | 1,101 | 562 | 539 | 82 | 6 |
{-# LANGUAGE QuasiQuotes #-}
module Hpack.Syntax.DefaultsSpec (spec) where
import Helper
import Data.Aeson.Config.FromValueSpec hiding (spec)
import Data.Aeson.Config.FromValue
import Hpack.Syntax.Defaults
defaultsGithub :: String -> String -> String -> [FilePath] -> Defaults
defaultsGithub owner repo ref path = DefaultsGithub $ Github owner repo ref path
spec :: Spec
spec = do
describe "isValidOwner" $ do
it "rejects the empty string" $ do
isValidOwner "" `shouldBe` False
it "accepts valid owner names" $ do
isValidOwner "Foo-Bar-23" `shouldBe` True
it "rejects dots" $ do
isValidOwner "foo.bar" `shouldBe` False
it "rejects multiple consecutive hyphens" $ do
isValidOwner "foo--bar" `shouldBe` False
it "rejects hyphens at the beginning" $ do
isValidOwner "-foo" `shouldBe` False
it "rejects hyphens at the end" $ do
isValidOwner "foo-" `shouldBe` False
describe "isValidRepo" $ do
it "rejects the empty string" $ do
isValidRepo "" `shouldBe` False
it "rejects ." $ do
isValidRepo "." `shouldBe` False
it "rejects .." $ do
isValidRepo ".." `shouldBe` False
it "accepts underscores" $ do
isValidRepo "foo_bar" `shouldBe` True
it "accepts dots" $ do
isValidRepo "foo.bar" `shouldBe` True
it "accepts hyphens" $ do
isValidRepo "foo-bar" `shouldBe` True
describe "fromValue" $ do
context "when parsing Defaults" $ do
let
left :: String -> Result Defaults
left = Left
context "with Object" $ do
it "fails when neither github nor local is present" $ do
[yaml|
defaults:
foo: one
bar: two
library: {}
|] `shouldDecodeTo` left "Error while parsing $ - neither key \"github\" nor key \"local\" present"
it "accepts Defaults from GitHub" $ do
[yaml|
github: sol/hpack
ref: 0.1.0
path: defaults.yaml
|] `shouldDecodeTo_` defaultsGithub "sol" "hpack" "0.1.0" ["defaults.yaml"]
it "rejects invalid owner names" $ do
[yaml|
github: ../hpack
ref: 0.1.0
path: defaults.yaml
|] `shouldDecodeTo` left "Error while parsing $.github - invalid owner name \"..\""
it "rejects invalid repository names" $ do
[yaml|
github: sol/..
ref: 0.1.0
path: defaults.yaml
|] `shouldDecodeTo` left "Error while parsing $.github - invalid repository name \"..\""
it "rejects invalid Git references" $ do
[yaml|
github: sol/hpack
ref: ../foo/bar
path: defaults.yaml
|] `shouldDecodeTo` left "Error while parsing $.ref - invalid Git reference \"../foo/bar\""
it "rejects \\ in path" $ do
[yaml|
github: sol/hpack
ref: 0.1.0
path: hpack\defaults.yaml
|] `shouldDecodeTo` left "Error while parsing $.path - rejecting '\\' in \"hpack\\\\defaults.yaml\", please use '/' to separate path components"
it "rejects : in path" $ do
[yaml|
github: sol/hpack
ref: 0.1.0
path: foo:bar.yaml
|] `shouldDecodeTo` left "Error while parsing $.path - rejecting ':' in \"foo:bar.yaml\""
it "rejects absolute paths" $ do
[yaml|
github: sol/hpack
ref: 0.1.0
path: /defaults.yaml
|] `shouldDecodeTo` left "Error while parsing $.path - rejecting absolute path \"/defaults.yaml\""
it "rejects .. in path" $ do
[yaml|
github: sol/hpack
ref: 0.1.0
path: ../../defaults.yaml
|] `shouldDecodeTo` left "Error while parsing $.path - rejecting \"..\" in \"../../defaults.yaml\""
context "with String" $ do
it "accepts Defaults from GitHub" $ do
[yaml|
sol/[email protected]
|] `shouldDecodeTo_` defaultsGithub "sol" "hpack" "0.1.0" [".hpack", "defaults.yaml"]
it "rejects invalid owner names" $ do
[yaml|
../[email protected]
|] `shouldDecodeTo` left "Error while parsing $ - invalid owner name \"..\""
it "rejects invalid repository names" $ do
[yaml|
sol/[email protected]
|] `shouldDecodeTo` left "Error while parsing $ - invalid repository name \"..\""
it "rejects invalid Git references" $ do
[yaml|
sol/pack@../foo/bar
|] `shouldDecodeTo` left "Error while parsing $ - invalid Git reference \"../foo/bar\""
it "rejects missing Git reference" $ do
[yaml|
sol/hpack
|] `shouldDecodeTo` left "Error while parsing $ - missing Git reference for \"sol/hpack\", the expected format is owner/repo@ref"
context "with neither Object nor String" $ do
it "fails" $ do
[yaml|
10
|] `shouldDecodeTo` left "Error while parsing $ - expected Object or String, encountered Number"
|
haskell-tinc/hpack
|
test/Hpack/Syntax/DefaultsSpec.hs
|
mit
| 5,091 | 0 | 21 | 1,591 | 869 | 441 | 428 | 89 | 1 |
----------------
-- prerequisites
import System.Environment
import Data.List.Split
-- Durchschnittliche Ortskraft in Mitteleuropa.
g = -9.81
-- Data structure for Pendulum:
data Pendulum = Pendulum {
getL :: Double, -- Laenge der Pendel
getMass :: Double, -- Masse pro Pendel
getPhi1 :: Double, -- Auslenkung 1 als Winkel
getP1 :: Double,
getPhi2 :: Double, -- Auslenkung 2 als Winkel
getP2 :: Double
} deriving (Read, Show)
-------
-- main
main :: IO ()
main =
let pend = Pendulum 1 1 0.001 0 0 0
in putStr . formatCSV $ step pend 0.001 60.0
-- Formatiert eine Liste von Listen mit Doubles als Comma-separated values in einen String.
formatCSV :: [[Double]] -> String
formatCSV = unlines . splitOn "],[" . drop 2 . init . init . show
-- Konvertiert einen Winkel im Bogenmass zum Gradmass.
toDeg :: Double -> Double
toDeg = (180/pi *)
step :: Pendulum -> Double -> Double -> [[Double]]
step (Pendulum l mass phi1 p1 phi2 p2) timeStep time
| time <= 0 = [] -- Abbruchbedingung fuer Rekursion
| otherwise =
let phi1vel = (6/mass*l^2) * ((2*p1 - 3*(cos (phi1-phi2))*p2) / (16 - 9*(cos (phi1 - phi2))^2))
phi1' = phi1 + phi1vel * timeStep
phi2vel = (6/mass*l^2) * ((8*p2 - 3*(cos (phi1-phi2))*p1) / (16 - 9*(cos (phi1 - phi2))^2))
phi2' = phi2 + phi2vel * timeStep
p1vel = ((-1)/2*mass*l^2) * (phi1vel*phi2vel*(sin (phi1 - phi2)) + 3*(g/l)*(sin phi1))
p1' = p1 + p1vel * timeStep
p2vel = ((-1)/2*mass*l^2) * ((-phi1vel)*phi2vel*(sin (phi1 - phi2)) + (g/l)*(sin phi2))
p2' = p2 + p2vel * timeStep
in [phi1vel, phi1, phi2vel, phi2] : (step (Pendulum l mass phi1' p1' phi2' p2') timeStep (time-timeStep))
|
tychon/chaospendel
|
wikihaskell/main.hs
|
mit
| 1,750 | 0 | 19 | 421 | 736 | 402 | 334 | 32 | 1 |
module GHCJS.DOM.DatabaseCallback (
) where
|
manyoo/ghcjs-dom
|
ghcjs-dom-webkit/src/GHCJS/DOM/DatabaseCallback.hs
|
mit
| 46 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
{-# LANGUAGE TypeFamilies, FlexibleContexts, FlexibleInstances, GADTs, MultiParamTypeClasses, ScopedTypeVariables, AllowAmbiguousTypes, EmptyDataDecls #-}
module Graphics.GPipe.Internal.Texture where
import Graphics.GPipe.Internal.Format
import Graphics.GPipe.Internal.Expr
import Graphics.GPipe.Internal.Context
import Graphics.GPipe.Internal.Shader
import Graphics.GPipe.Internal.Compiler
import Graphics.GPipe.Internal.Buffer
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.IntMap.Lazy (insert)
import Graphics.GL.Core33
import Graphics.GL.Types
import Graphics.GL.Ext.EXT.TextureFilterAnisotropic
import Foreign.Ptr
import Foreign.Storable
import Foreign.Marshal.Alloc
import Foreign.Marshal.Utils
import Control.Monad
import Data.IORef
import Control.Applicative
import Control.Monad.Exception (bracket, MonadAsyncException)
import Linear.V4
import Linear.V3
import Linear.V2
import Control.Exception (throwIO)
import Control.Monad.Trans.Class (lift)
data Texture1D os a = Texture1D TexName Size1 MaxLevels
data Texture1DArray os a = Texture1DArray TexName Size2 MaxLevels
data Texture2D os a = Texture2D TexName Size2 MaxLevels
| RenderBuffer2D TexName Size2
data Texture2DArray os a = Texture2DArray TexName Size3 MaxLevels
data Texture3D os a = Texture3D TexName Size3 MaxLevels
data TextureCube os a = TextureCube TexName Size1 MaxLevels
type MaxLevels = Int
type Size1 = Int
type Size2 = V2 Int
type Size3 = V3 Int
newTexture1D :: forall w os f c m. (ColorSampleable c, MonadIO m) => Format c -> Size1 -> MaxLevels -> ContextT w os f m (Texture1D os (Format c))
newTexture1DArray :: forall w os f c m. (ColorSampleable c, MonadIO m) => Format c -> Size2 -> MaxLevels -> ContextT w os f m (Texture1DArray os (Format c))
newTexture2D :: forall w os f c m. (TextureFormat c, MonadIO m) => Format c -> Size2 -> MaxLevels -> ContextT w os f m (Texture2D os (Format c))
newTexture2DArray :: forall w os f c m. (ColorSampleable c, MonadIO m) => Format c -> Size3 -> MaxLevels -> ContextT w os f m (Texture2DArray os (Format c))
newTexture3D :: forall w os f c m. (ColorRenderable c, MonadIO m) => Format c -> Size3 -> MaxLevels -> ContextT w os f m (Texture3D os (Format c))
newTextureCube :: forall w os f c m. (ColorSampleable c, MonadIO m) => Format c -> Size1 -> MaxLevels -> ContextT w os f m (TextureCube os (Format c))
newTexture1D f s mx | s < 0 = error "newTexture1D, negative size"
| mx <= 0 = error "newTexture1D, non-positive MaxLevels"
| otherwise = do
mxSize <- getGlValue GL_MAX_TEXTURE_SIZE
if s > mxSize
then liftIO $ throwIO $ GPipeException "newTexture1D, size larger then maximum supported by graphics driver"
else do
t <- makeTex
let glintf = fromIntegral $ getGlInternalFormat f
glf = getGlFormat (undefined :: c)
ls = min mx (calcMaxLevels s)
tex = Texture1D t s ls
liftContextIOAsync $ do
useTexSync t GL_TEXTURE_1D
forM_ (zip (texture1DSizes tex) [0..]) $ \(lw, l) ->
glTexImage1D GL_TEXTURE_1D l glintf (fromIntegral lw) 0 glf GL_BYTE nullPtr
setDefaultTexParams GL_TEXTURE_1D (ls-1)
return tex
newTexture1DArray f s@(V2 w sl) mx
| w < 0 || sl < 0 = error "newTexture1DArray, negative size"
| mx <= 0 = error "newTexture1DArray, non-positive MaxLevels"
| otherwise = do
mxSize <- getGlValue GL_MAX_TEXTURE_SIZE
if w > mxSize || sl > mxSize
then liftIO $ throwIO $ GPipeException "newTexture1DArray, size larger then maximum supported by graphics driver"
else do
t <- makeTex
let glintf = fromIntegral $ getGlInternalFormat f
glf = getGlFormat (undefined :: c)
ls = min mx (calcMaxLevels w)
tex = Texture1DArray t s ls
liftContextIOAsync $ do
useTexSync t GL_TEXTURE_1D_ARRAY
forM_ (zip (texture1DArraySizes tex) [0..]) $ \(V2 lw _, l) ->
glTexImage2D GL_TEXTURE_1D_ARRAY l glintf (fromIntegral lw) (fromIntegral sl) 0 glf GL_BYTE nullPtr
setDefaultTexParams GL_TEXTURE_1D_ARRAY (ls-1)
return tex
newTexture2D f s@(V2 w h) mx | w < 0 || h < 0 = error "newTexture2D, negative size"
| mx <= 0 = error "newTexture2D, non-positive MaxLevels"
| getGlFormat (undefined :: c) == GL_STENCIL_INDEX = do
mxSize <- getGlValue GL_MAX_RENDERBUFFER_SIZE
if w > mxSize || h > mxSize
then liftIO $ throwIO $ GPipeException "newTexture2D, size larger then maximum supported by graphics driver"
else do
t <- makeRenderBuff
liftContextIOAsync $
glRenderbufferStorage GL_RENDERBUFFER (getGlInternalFormat f) (fromIntegral w) (fromIntegral h)
return $ RenderBuffer2D t s
| otherwise = do
mxSize <- getGlValue GL_MAX_TEXTURE_SIZE
if w > mxSize || h > mxSize
then liftIO $ throwIO $ GPipeException "newTexture2D, size larger then maximum supported by graphics driver"
else do
t <- makeTex
let glintf = fromIntegral $ getGlInternalFormat f
glf = getGlFormat (undefined :: c)
ls = min mx (calcMaxLevels (max w h))
tex = Texture2D t s ls
liftContextIOAsync $ do
useTexSync t GL_TEXTURE_2D
forM_ (zip (texture2DSizes tex) [0..]) $ \(V2 lw lh, l) ->
glTexImage2D GL_TEXTURE_2D l glintf (fromIntegral lw) (fromIntegral lh) 0 glf GL_BYTE nullPtr
setDefaultTexParams GL_TEXTURE_2D (ls-1)
return tex
newTexture2DArray f s@(V3 w h sl) mx
| w < 0 || h < 0 || sl < 0 = error "newTexture2DArray, negative size"
| mx <= 0 = error "newTexture2DArray, non-positive MaxLevels"
| otherwise = do
mxSize <- getGlValue GL_MAX_TEXTURE_SIZE
if w > mxSize || h > mxSize || sl > mxSize
then liftIO $ throwIO $ GPipeException "newTexture2DArray, size larger then maximum supported by graphics driver"
else do
t <- makeTex
let glintf = fromIntegral $ getGlInternalFormat f
glf = getGlFormat (undefined :: c)
ls = min mx (calcMaxLevels (max w h))
tex = Texture2DArray t s ls
liftContextIOAsync $ do
useTexSync t GL_TEXTURE_2D_ARRAY
forM_ (zip (texture2DArraySizes tex) [0..]) $ \(V3 lw lh _, l) ->
glTexImage3D GL_TEXTURE_2D_ARRAY l glintf (fromIntegral lw) (fromIntegral lh) (fromIntegral sl) 0 glf GL_BYTE nullPtr
setDefaultTexParams GL_TEXTURE_2D_ARRAY (ls-1)
return tex
newTexture3D f s@(V3 w h d) mx | w < 0 || h < 0 || d < 0 = error "newTexture3D, negative size"
| mx <= 0 = error "newTexture3D, non-positive MaxLevels"
| otherwise = do
mxSize <- getGlValue GL_MAX_TEXTURE_SIZE
if w > mxSize || h > mxSize || d > mxSize
then liftIO $ throwIO $ GPipeException "newTexture3D, size larger then maximum supported by graphics driver"
else do
t <- makeTex
let glintf = fromIntegral $ getGlInternalFormat f
glf = getGlFormat (undefined :: c)
ls = min mx (calcMaxLevels (max w (max h d)))
tex = Texture3D t s ls
liftContextIOAsync $ do
useTexSync t GL_TEXTURE_3D
forM_ (zip (texture3DSizes tex) [0..]) $ \(V3 lw lh ld, l) ->
glTexImage3D GL_TEXTURE_3D l glintf (fromIntegral lw) (fromIntegral lh) (fromIntegral ld) 0 glf GL_BYTE nullPtr
setDefaultTexParams GL_TEXTURE_3D (ls-1)
return tex
newTextureCube f s mx | s < 0 = error "newTextureCube, negative size"
| mx <= 0 = error "newTextureCube, non-positive MaxLevels"
| otherwise = do
mxSize <- getGlValue GL_MAX_CUBE_MAP_TEXTURE_SIZE
if s > mxSize
then liftIO $ throwIO $ GPipeException "newTextureCube, size larger then maximum supported by graphics driver"
else do
t <- makeTex
let glintf = fromIntegral $ getGlInternalFormat f
glf = getGlFormat (undefined :: c)
ls = min mx (calcMaxLevels s)
tex = TextureCube t s ls
liftContextIOAsync $ do
useTexSync t GL_TEXTURE_CUBE_MAP
forM_ [(size, getGlCubeSide side) | size <- zip (textureCubeSizes tex) [0..], side <- [minBound..maxBound]] $ \((lx, l), side) ->
glTexImage2D side l glintf (fromIntegral lx) (fromIntegral lx) 0 glf GL_BYTE nullPtr
setDefaultTexParams GL_TEXTURE_CUBE_MAP (ls-1)
glTexParameteri GL_TEXTURE_CUBE_MAP GL_TEXTURE_WRAP_S GL_CLAMP_TO_EDGE
glTexParameteri GL_TEXTURE_CUBE_MAP GL_TEXTURE_WRAP_T GL_CLAMP_TO_EDGE
glTexParameteri GL_TEXTURE_CUBE_MAP GL_TEXTURE_WRAP_R GL_CLAMP_TO_EDGE
return tex
getGlValue :: MonadIO m => GLenum -> ContextT w os f m Int
getGlValue enum = liftContextIO $ alloca (\ptr -> liftM fromIntegral (glGetIntegerv enum ptr >> peek ptr))
setDefaultTexParams :: GLenum -> Int -> IO ()
setDefaultTexParams t ml = do
glTexParameteri t GL_TEXTURE_BASE_LEVEL 0
glTexParameteri t GL_TEXTURE_MAX_LEVEL (fromIntegral ml)
glTexParameteri t GL_TEXTURE_MIN_FILTER GL_NEAREST_MIPMAP_NEAREST
glTexParameteri t GL_TEXTURE_MAG_FILTER GL_NEAREST
texture1DLevels :: Texture1D os f -> Int
texture1DArrayLevels :: Texture1DArray os f -> Int
texture2DLevels :: Texture2D os f -> Int
texture2DArrayLevels :: Texture2DArray os f -> Int
texture3DLevels :: Texture3D os f -> Int
textureCubeLevels :: TextureCube os f -> Int
texture1DLevels (Texture1D _ _ ls) = ls
texture1DArrayLevels (Texture1DArray _ _ ls) = ls
texture2DLevels (Texture2D _ _ ls) = ls
texture2DLevels (RenderBuffer2D _ _) = 1
texture2DArrayLevels (Texture2DArray _ _ ls) = ls
texture3DLevels (Texture3D _ _ ls) = ls
textureCubeLevels (TextureCube _ _ ls) = ls
texture1DSizes :: Texture1D os f -> [Size1]
texture1DArraySizes :: Texture1DArray os f -> [Size2]
texture2DSizes :: Texture2D os f -> [Size2]
texture2DArraySizes :: Texture2DArray os f -> [Size3]
texture3DSizes :: Texture3D os f -> [Size3]
textureCubeSizes :: TextureCube os f -> [Size1]
texture1DSizes (Texture1D _ w ls) = map (calcLevelSize w) [0..(ls-1)]
texture1DArraySizes (Texture1DArray _ (V2 w s) ls) = map (\l -> V2 (calcLevelSize w l) s) [0..(ls-1)]
texture2DSizes (Texture2D _ (V2 w h) ls) = map (\l -> V2 (calcLevelSize w l) (calcLevelSize h l)) [0..(ls-1)]
texture2DSizes (RenderBuffer2D _ s) = [s]
texture2DArraySizes (Texture2DArray _ (V3 w h s) ls) = map (\l -> V3 (calcLevelSize w l) (calcLevelSize h l) s) [0..(ls-1)]
texture3DSizes (Texture3D _ (V3 w h d) ls) = map (\l -> V3 (calcLevelSize w l) (calcLevelSize h l) (calcLevelSize d l)) [0..(ls-1)]
textureCubeSizes (TextureCube _ x ls) = map (calcLevelSize x) [0..(ls-1)]
calcLevelSize :: Int -> Int -> Int
calcLevelSize size0 level = max 1 (size0 `div` (2 ^ level))
calcMaxLevels :: Int -> Int
calcMaxLevels s = 1 + truncate (logBase 2.0 (fromIntegral s :: Double))
type TexName = IORef GLuint
makeTex :: MonadIO m => ContextT w os f m TexName
makeTex = do
name <- liftContextIO $ alloca (\ptr -> glGenTextures 1 ptr >> peek ptr)
tex <- liftIO $ newIORef name
addContextFinalizer tex $ with name (glDeleteTextures 1)
addFBOTextureFinalizer False tex
return tex
makeRenderBuff :: MonadIO m => ContextT w os f m TexName
makeRenderBuff = do
name <- liftContextIO $ alloca (\ptr -> glGenRenderbuffers 1 ptr >> peek ptr)
tex <- liftIO $ newIORef name
addContextFinalizer tex $ with name (glDeleteRenderbuffers 1)
addFBOTextureFinalizer True tex
return tex
useTex :: Integral a => TexName -> GLenum -> a -> IO Int
useTex texNameRef t bind = do glActiveTexture (GL_TEXTURE0 + fromIntegral bind)
n <- readIORef texNameRef
glBindTexture t n
return (fromIntegral n)
useTexSync :: TexName -> GLenum -> IO ()
useTexSync tn t = do maxUnits <- alloca (\ptr -> glGetIntegerv GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS ptr >> peek ptr) -- Use last for all sync actions, keeping 0.. for async drawcalls
void $ useTex tn t (maxUnits-1)
type Level = Int
data CubeSide = CubePosX | CubeNegX | CubePosY | CubeNegY | CubePosZ | CubeNegZ deriving (Eq, Enum, Bounded)
type StartPos1 = Int
type StartPos2 = V2 Int
type StartPos3 = V3 Int
writeTexture1D :: forall b c h w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) h ~ b, h ~ HostFormat b) => Texture1D os (Format c) -> Level -> StartPos1 -> Size1 -> [h] -> ContextT w os f m ()
writeTexture1DArray :: forall b c h w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) h ~ b, h ~ HostFormat b) => Texture1DArray os (Format c) -> Level -> StartPos2 -> Size2 -> [h] -> ContextT w os f m ()
writeTexture2D :: forall b c h w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) h ~ b, h ~ HostFormat b) => Texture2D os (Format c) -> Level -> StartPos2 -> Size2 -> [h] -> ContextT w os f m ()
writeTexture2DArray :: forall b c h w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) h ~ b, h ~ HostFormat b) => Texture2DArray os (Format c) -> Level -> StartPos3 -> Size3 -> [h] -> ContextT w os f m ()
writeTexture3D :: forall b c h w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) h ~ b, h ~ HostFormat b) => Texture3D os (Format c) -> Level -> StartPos3 -> Size3 -> [h] -> ContextT w os f m ()
writeTextureCube :: forall b c h w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) h ~ b, h ~ HostFormat b) => TextureCube os (Format c) -> Level -> CubeSide -> StartPos2 -> Size2 -> [h] -> ContextT w os f m ()
writeTexture1DFromBuffer :: forall b c h w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) (HostFormat b) ~ b) => Texture1D os (Format c) -> Level -> StartPos1 -> Size1 -> Buffer os b -> BufferStartPos -> ContextT w os f m ()
writeTexture1DArrayFromBuffer:: forall b c h w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) (HostFormat b) ~ b) => Texture1DArray os (Format c) -> Level -> StartPos2 -> Size2 -> Buffer os b -> BufferStartPos -> ContextT w os f m ()
writeTexture2DFromBuffer :: forall b c h w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) (HostFormat b) ~ b) => Texture2D os (Format c) -> Level -> StartPos2 -> Size2 -> Buffer os b -> BufferStartPos -> ContextT w os f m ()
writeTexture2DArrayFromBuffer:: forall b c h w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) (HostFormat b) ~ b) => Texture2DArray os (Format c) -> Level -> StartPos3 -> Size3 -> Buffer os b -> BufferStartPos -> ContextT w os f m ()
writeTexture3DFromBuffer :: forall b c h w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) (HostFormat b) ~ b) => Texture3D os (Format c) -> Level -> StartPos3 -> Size3 -> Buffer os b -> BufferStartPos -> ContextT w os f m ()
writeTextureCubeFromBuffer :: forall b c h w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) (HostFormat b) ~ b) => TextureCube os (Format c) -> Level -> CubeSide -> StartPos2 -> Size2 -> Buffer os b -> BufferStartPos -> ContextT w os f m ()
readTexture1D :: forall a b c h w os f m. (MonadAsyncException m, MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) h ~ b, h ~ HostFormat b) => Texture1D os (Format c) -> Level -> StartPos1 -> Size1 -> (a -> h -> ContextT w os f m a) -> a -> ContextT w os f m a
readTexture1DArray :: forall a b c h w os f m. (MonadAsyncException m, MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) h ~ b, h ~ HostFormat b) => Texture1DArray os (Format c) -> Level -> StartPos2 -> Size1 -> (a -> h -> ContextT w os f m a) -> a -> ContextT w os f m a
readTexture2D :: forall a b c h w os f m. (MonadAsyncException m, MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) h ~ b, h ~ HostFormat b) => Texture2D os (Format c) -> Level -> StartPos2 -> Size2 -> (a -> h -> ContextT w os f m a) -> a -> ContextT w os f m a
readTexture2DArray :: forall a b c h w os f m. (MonadAsyncException m, MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) h ~ b, h ~ HostFormat b) => Texture2DArray os (Format c) -> Level -> StartPos3 -> Size2 -> (a -> h -> ContextT w os f m a) -> a -> ContextT w os f m a
readTexture3D :: forall a b c h w os f m. (MonadAsyncException m, MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) h ~ b, h ~ HostFormat b) => Texture3D os (Format c) -> Level -> StartPos3 -> Size2 -> (a -> h -> ContextT w os f m a) -> a -> ContextT w os f m a
readTextureCube :: forall a b c h w os f m. (MonadAsyncException m, MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) h ~ b, h ~ HostFormat b) => TextureCube os (Format c) -> Level -> CubeSide -> StartPos2 -> Size2 -> (a -> h -> ContextT w os f m a) -> a -> ContextT w os f m a
readTexture1DToBuffer :: forall b c w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) (HostFormat b) ~ b) => Texture1D os (Format c) -> Level -> StartPos1 -> Size1 -> Buffer os b -> BufferStartPos -> ContextT w os f m ()
readTexture1DArrayToBuffer:: forall b c w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) (HostFormat b) ~ b) => Texture1DArray os (Format c) -> Level -> StartPos2 -> Size1 -> Buffer os b -> BufferStartPos -> ContextT w os f m ()
readTexture2DToBuffer :: forall b c w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) (HostFormat b) ~ b) => Texture2D os (Format c) -> Level -> StartPos2 -> Size2 -> Buffer os b -> BufferStartPos -> ContextT w os f m ()
readTexture2DArrayToBuffer:: forall b c w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) (HostFormat b) ~ b) => Texture2DArray os (Format c) -> Level -> StartPos3 -> Size2 -> Buffer os b -> BufferStartPos -> ContextT w os f m ()
readTexture3DToBuffer :: forall b c w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) (HostFormat b) ~ b) => Texture3D os (Format c) -> Level -> StartPos3 -> Size2 -> Buffer os b -> BufferStartPos -> ContextT w os f m ()
readTextureCubeToBuffer :: forall b c w os f m. (MonadIO m, BufferFormat b, ColorSampleable c, BufferColor (Color c (ColorElement c)) (HostFormat b) ~ b) => TextureCube os (Format c) -> Level -> CubeSide -> StartPos2 -> Size2 -> Buffer os b-> BufferStartPos -> ContextT w os f m ()
getGlColorFormat :: (TextureFormat f, BufferFormat b) => f -> b -> GLenum
getGlColorFormat f b = let x = getGlFormat f in if x == GL_DEPTH_STENCIL || x == GL_DEPTH_COMPONENT then GL_DEPTH_COMPONENT else getGlPaddedFormat b
writeTexture1D t@(Texture1D texn _ ml) l x w d
| l < 0 || l >= ml = error "writeTexture1D, level out of bounds"
| x < 0 || x >= mx = error "writeTexture1D, x out of bounds"
| w < 0 || x+w > mx = error "writeTexture1D, w out of bounds"
| otherwise = liftContextIOAsync $ do
let b = makeBuffer undefined undefined 0 :: Buffer os b
size = w*bufElementSize b
allocaBytes size $ \ ptr -> do
end <- bufferWriteInternal b ptr (take w d)
if end `minusPtr` ptr /= size
then error "writeTexture1D, data list too short"
else do
useTexSync texn GL_TEXTURE_1D
glTexSubImage1D GL_TEXTURE_1D (fromIntegral l) (fromIntegral x) (fromIntegral w) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) ptr
where mx = texture1DSizes t !! l
writeTexture1DArray t@(Texture1DArray texn _ ml) l (V2 x y) (V2 w h) d
| l < 0 || l >= ml = error "writeTexture1DArray, level out of bounds"
| x < 0 || x >= mx = error "writeTexture1DArray, x out of bounds"
| w < 0 || x+w > mx = error "writeTexture1DArray, w out of bounds"
| y < 0 || y >= my = error "writeTexture1DArray, y out of bounds"
| h < 0 || y+h > my = error "writeTexture2D, h out of bounds"
| otherwise = liftContextIOAsync $ do
let b = makeBuffer undefined undefined 0 :: Buffer os b
size = w*h*bufElementSize b
allocaBytes size $ \ ptr -> do
end <- bufferWriteInternal b ptr (take (w*h) d)
if end `minusPtr` ptr /= size
then error "writeTexture1DArray, data list too short"
else do
useTexSync texn GL_TEXTURE_1D_ARRAY
glTexSubImage2D GL_TEXTURE_1D_ARRAY (fromIntegral l) (fromIntegral x) (fromIntegral y) (fromIntegral w) (fromIntegral h) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) ptr
where V2 mx my = texture1DArraySizes t !! l
writeTexture2D t@(Texture2D texn _ ml) l (V2 x y) (V2 w h) d
| l < 0 || l >= ml = error "writeTexture2D, level out of bounds"
| x < 0 || x >= mx = error "writeTexture2D, x out of bounds"
| w < 0 || x+w > mx = error "writeTexture2D, w out of bounds"
| y < 0 || y >= my = error "writeTexture2D, y out of bounds"
| h < 0 || y+h > my = error "writeTexture2D, h out of bounds"
| otherwise = liftContextIOAsync $ do
let b = makeBuffer undefined undefined 0 :: Buffer os b
size = w*h*bufElementSize b
allocaBytes size $ \ ptr -> do
end <- bufferWriteInternal b ptr (take (w*h) d)
if end `minusPtr` ptr /= size
then error "writeTexture2D, data list too short"
else do
useTexSync texn GL_TEXTURE_2D
glTexSubImage2D GL_TEXTURE_2D (fromIntegral l) (fromIntegral x) (fromIntegral y) (fromIntegral w) (fromIntegral h) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) ptr
where V2 mx my = texture2DSizes t !! l
writeTexture2DArray t@(Texture2DArray texn _ ml) l (V3 x y z) (V3 w h d) dat
| l < 0 || l >= ml = error "writeTexture2DArray, level out of bounds"
| x < 0 || x >= mx = error "writeTexture2DArray, x out of bounds"
| w < 0 || x+w > mx = error "writeTexture2DArray, w out of bounds"
| y < 0 || y >= my = error "writeTexture2DArray, y out of bounds"
| h < 0 || y+h > my = error "writeTexture2DArray, h out of bounds"
| z < 0 || z >= mz = error "writeTexture2DArray, z out of bounds"
| d < 0 || z+d > mz = error "writeTexture2DArray, d out of bounds"
| otherwise = liftContextIOAsync $ do
let b = makeBuffer undefined undefined 0 :: Buffer os b
size = w*h*d*bufElementSize b
allocaBytes size $ \ ptr -> do
end <- bufferWriteInternal b ptr (take (w*h*d) dat)
if end `minusPtr` ptr /= size
then error "writeTexture2DArray, data list too short"
else do
useTexSync texn GL_TEXTURE_2D_ARRAY
glTexSubImage3D GL_TEXTURE_2D_ARRAY (fromIntegral l) (fromIntegral x) (fromIntegral y) (fromIntegral z) (fromIntegral w) (fromIntegral h) (fromIntegral d) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) ptr
where V3 mx my mz = texture2DArraySizes t !! l
writeTexture3D t@(Texture3D texn _ ml) l (V3 x y z) (V3 w h d) dat
| l < 0 || l >= ml = error "writeTexture3D, level out of bounds"
| x < 0 || x >= mx = error "writeTexture3D, x out of bounds"
| w < 0 || x+w > mx = error "writeTexture3D, w out of bounds"
| y < 0 || y >= my = error "writeTexture3D, y out of bounds"
| h < 0 || y+h > my = error "writeTexture3D, h out of bounds"
| z < 0 || z >= mz = error "writeTexture3D, z out of bounds"
| d < 0 || z+d > mz = error "writeTexture3D, d out of bounds"
| otherwise = liftContextIOAsync $ do
let b = makeBuffer undefined undefined 0 :: Buffer os b
size = w*h*d*bufElementSize b
allocaBytes size $ \ ptr -> do
end <- bufferWriteInternal b ptr (take (w*h*d) dat)
if end `minusPtr` ptr /= size
then error "writeTexture3D, data list too short"
else do
useTexSync texn GL_TEXTURE_3D
glTexSubImage3D GL_TEXTURE_3D (fromIntegral l) (fromIntegral x) (fromIntegral y) (fromIntegral z) (fromIntegral w) (fromIntegral h) (fromIntegral d) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) ptr
where V3 mx my mz = texture3DSizes t !! l
writeTextureCube t@(TextureCube texn _ ml) l s (V2 x y) (V2 w h) d
| l < 0 || l >= ml = error "writeTextureCube, level out of bounds"
| x < 0 || x >= mxy = error "writeTextureCube, x out of bounds"
| w < 0 || x+w > mxy = error "writeTextureCube, w out of bounds"
| y < 0 || y >= mxy = error "writeTextureCube, y out of bounds"
| h < 0 || y+h > mxy = error "writeTextureCube, h out of bounds"
| otherwise = liftContextIOAsync $ do
let b = makeBuffer undefined undefined 0 :: Buffer os b
size = w*h*bufElementSize b
allocaBytes size $ \ ptr -> do
end <- bufferWriteInternal b ptr (take (w*h) d)
if end `minusPtr` ptr /= size
then error "writeTextureCube, data list too short"
else do
useTexSync texn GL_TEXTURE_CUBE_MAP
glTexSubImage2D (getGlCubeSide s) (fromIntegral l) (fromIntegral x) (fromIntegral y) (fromIntegral w) (fromIntegral h) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) ptr
where mxy = textureCubeSizes t !! l
writeTexture1DFromBuffer t@(Texture1D texn _ ml) l x w b i
| l < 0 || l >= ml = error "writeTexture1DFromBuffer, level out of bounds"
| x < 0 || x >= mx = error "writeTexture1DFromBuffer, x out of bounds"
| w < 0 || x+w > mx = error "writeTexture1DFromBuffer, w out of bounds"
| i < 0 || i > bufferLength b = error "writeTexture1DFromBuffer, i out of bounds"
| bufferLength b - i < w = error "writeTexture1DFromBuffer, buffer data too small"
| otherwise = liftContextIOAsync $ do
useTexSync texn GL_TEXTURE_1D
bname <- readIORef $ bufName b
glBindBuffer GL_PIXEL_UNPACK_BUFFER bname
glTexSubImage1D GL_TEXTURE_1D (fromIntegral l) (fromIntegral x) (fromIntegral w) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) (wordPtrToPtr $ fromIntegral $ i*bufElementSize b)
glBindBuffer GL_PIXEL_UNPACK_BUFFER 0
where mx = texture1DSizes t !! l
writeTexture1DArrayFromBuffer t@(Texture1DArray texn _ ml) l (V2 x y) (V2 w h) b i
| l < 0 || l >= ml = error "writeTexture1DArrayFromBuffer, level out of bounds"
| x < 0 || x >= mx = error "writeTexture1DArrayFromBuffer, x out of bounds"
| w < 0 || x+w > mx = error "writeTexture1DArrayFromBuffer, w out of bounds"
| y < 0 || y >= my = error "writeTexture1DArrayFromBuffer, y out of bounds"
| h < 0 || y+h > my = error "writeTexture1DArrayFromBuffer, h out of bounds"
| i < 0 || i > bufferLength b = error "writeTexture1DArrayFromBuffer, i out of bounds"
| bufferLength b - i < w*h = error "writeTexture1DArrayFromBuffer, buffer data too small"
| otherwise = liftContextIOAsync $ do
useTexSync texn GL_TEXTURE_1D_ARRAY
bname <- readIORef $ bufName b
glBindBuffer GL_PIXEL_UNPACK_BUFFER bname
glTexSubImage2D GL_TEXTURE_1D_ARRAY (fromIntegral l) (fromIntegral x) (fromIntegral y) (fromIntegral w) (fromIntegral h) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) (wordPtrToPtr $ fromIntegral $ i*bufElementSize b)
glBindBuffer GL_PIXEL_UNPACK_BUFFER 0
where V2 mx my = texture1DArraySizes t !! l
writeTexture2DFromBuffer t@(Texture2D texn _ ml) l (V2 x y) (V2 w h) b i
| l < 0 || l >= ml = error "writeTexture2DFromBuffer, level out of bounds"
| x < 0 || x >= mx = error "writeTexture2DFromBuffer, x out of bounds"
| w < 0 || x+w > mx = error "writeTexture2DFromBuffer, w out of bounds"
| y < 0 || y >= my = error "writeTexture2DFromBuffer, y out of bounds"
| h < 0 || y+h > my = error "writeTexture2DFromBuffer, h out of bounds"
| i < 0 || i > bufferLength b = error "writeTexture2DFromBuffer, i out of bounds"
| bufferLength b - i < w*h = error "writeTexture2DFromBuffer, buffer data too small"
| otherwise = liftContextIOAsync $ do
useTexSync texn GL_TEXTURE_2D
bname <- readIORef $ bufName b
glBindBuffer GL_PIXEL_UNPACK_BUFFER bname
glTexSubImage2D GL_TEXTURE_2D (fromIntegral l) (fromIntegral x) (fromIntegral y) (fromIntegral w) (fromIntegral h) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) (wordPtrToPtr $ fromIntegral $ i*bufElementSize b)
glBindBuffer GL_PIXEL_UNPACK_BUFFER 0
where V2 mx my = texture2DSizes t !! l
writeTexture2DArrayFromBuffer t@(Texture2DArray texn _ ml) l (V3 x y z) (V3 w h d) b i
| l < 0 || l >= ml = error "writeTexture2DArrayFromBuffer, level out of bounds"
| x < 0 || x >= mx = error "writeTexture2DArrayFromBuffer, x out of bounds"
| w < 0 || x+w > mx = error "writeTexture2DArrayFromBuffer, w out of bounds"
| y < 0 || y >= my = error "writeTexture2DArrayFromBuffer, y out of bounds"
| h < 0 || y+h > my = error "writeTexture2DArrayFromBuffer, h out of bounds"
| z < 0 || z >= mz = error "writeTexture2DArrayFromBuffer, z out of bounds"
| d < 0 || z+d > mz = error "writeTexture2DArrayFromBuffer, d out of bounds"
| i < 0 || i > bufferLength b = error "writeTexture2DArrayFromBuffer, i out of bounds"
| bufferLength b - i < w*h = error "writeTexture2DArrayFromBuffer, buffer data too small"
| otherwise = liftContextIOAsync $ do
useTexSync texn GL_TEXTURE_2D_ARRAY
bname <- readIORef $ bufName b
glBindBuffer GL_PIXEL_UNPACK_BUFFER bname
glTexSubImage3D GL_TEXTURE_2D_ARRAY (fromIntegral l) (fromIntegral x) (fromIntegral y) (fromIntegral z) (fromIntegral w) (fromIntegral h) (fromIntegral d) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) (wordPtrToPtr $ fromIntegral $ i*bufElementSize b)
glBindBuffer GL_PIXEL_UNPACK_BUFFER 0
where V3 mx my mz = texture2DArraySizes t !! l
writeTexture3DFromBuffer t@(Texture3D texn _ ml) l (V3 x y z) (V3 w h d) b i
| l < 0 || l >= ml = error "writeTexture3DFromBuffer, level out of bounds"
| x < 0 || x >= mx = error "writeTexture3DFromBuffer, x out of bounds"
| w < 0 || x+w > mx = error "writeTexture3DFromBuffer, w out of bounds"
| y < 0 || y >= my = error "writeTexture3DFromBuffer, y out of bounds"
| h < 0 || y+h > my = error "writeTexture3DFromBuffer, h out of bounds"
| z < 0 || z >= mz = error "writeTexture3DFromBuffer, z out of bounds"
| d < 0 || z+d > mz = error "writeTexture3DFromBuffer, d out of bounds"
| i < 0 || i > bufferLength b = error "writeTexture3DFromBuffer, i out of bounds"
| bufferLength b - i < w*h = error "writeTexture3DFromBuffer, buffer data too small"
| otherwise = liftContextIOAsync $ do
useTexSync texn GL_TEXTURE_3D
bname <- readIORef $ bufName b
glBindBuffer GL_PIXEL_UNPACK_BUFFER bname
glTexSubImage3D GL_TEXTURE_3D (fromIntegral l) (fromIntegral x) (fromIntegral y) (fromIntegral z) (fromIntegral w) (fromIntegral h) (fromIntegral d) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) (wordPtrToPtr $ fromIntegral $ i*bufElementSize b)
glBindBuffer GL_PIXEL_UNPACK_BUFFER 0
where V3 mx my mz = texture3DSizes t !! l
writeTextureCubeFromBuffer t@(TextureCube texn _ ml) l s (V2 x y) (V2 w h) b i
| l < 0 || l >= ml = error "writeTextureCubeFromBuffer, level out of bounds"
| x < 0 || x >= mxy = error "writeTextureCubeFromBuffer, x out of bounds"
| w < 0 || x+w > mxy = error "writeTextureCubeFromBuffer, w out of bounds"
| y < 0 || y >= mxy = error "writeTextureCubeFromBuffer, y out of bounds"
| h < 0 || y+h > mxy = error "writeTextureCubeFromBuffer, h out of bounds"
| i < 0 || i > bufferLength b = error "writeTextureCubeFromBuffer, i out of bounds"
| bufferLength b - i < w*h = error "writeTextureCubeFromBuffer, buffer data too small"
| otherwise = liftContextIOAsync $ do
useTexSync texn GL_TEXTURE_CUBE_MAP
bname <- readIORef $ bufName b
glBindBuffer GL_PIXEL_UNPACK_BUFFER bname
glTexSubImage2D (getGlCubeSide s) (fromIntegral l) (fromIntegral x) (fromIntegral y) (fromIntegral w) (fromIntegral h) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) (wordPtrToPtr $ fromIntegral $ i*bufElementSize b)
glBindBuffer GL_PIXEL_UNPACK_BUFFER 0
where mxy = textureCubeSizes t !! l
readTexture1D t@(Texture1D texn _ ml) l x w f s
| l < 0 || l >= ml = error "readTexture1DArray, level out of bounds"
| x < 0 || x >= mx = error "readTexture1DArray, x out of bounds"
| w < 0 || x+w > mx = error "readTexture1DArray, w out of bounds"
| otherwise =
let b = makeBuffer undefined undefined 0 :: Buffer os b
f' ptr a off = f a =<< liftIO (peekPixel (undefined :: b) (ptr `plusPtr` off))
in bracket
(liftContextIO $ do
ptr <- mallocBytes $ w*bufElementSize b
setGlPixelStoreRange x 0 0 w 1
useTexSync texn GL_TEXTURE_1D_ARRAY
glGetTexImage GL_TEXTURE_1D_ARRAY (fromIntegral l) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) ptr
return ptr)
(liftIO . free)
(\ptr -> foldM (f' ptr) s [0,bufElementSize b..w*bufElementSize b -1])
where mx = texture1DSizes t !! l
readTexture1DArray t@(Texture1DArray texn _ ml) l (V2 x y) w f s
| l < 0 || l >= ml = error "readTexture1DArray, level out of bounds"
| x < 0 || x >= mx = error "readTexture1DArray, x out of bounds"
| w < 0 || x+w > mx = error "readTexture1DArray, w out of bounds"
| y < 0 || y >= my = error "readTexture1DArray, y out of bounds"
| otherwise =
let b = makeBuffer undefined undefined 0 :: Buffer os b
f' ptr a off = f a =<< liftIO (peekPixel (undefined :: b) (ptr `plusPtr` off))
in bracket
(liftContextIO $ do
ptr <- mallocBytes $ w*bufElementSize b
setGlPixelStoreRange x y 0 w 1
useTexSync texn GL_TEXTURE_1D_ARRAY
glGetTexImage GL_TEXTURE_1D_ARRAY (fromIntegral l) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) ptr
return ptr)
(liftIO . free)
(\ptr -> foldM (f' ptr) s [0,bufElementSize b..w*bufElementSize b -1])
where V2 mx my = texture1DArraySizes t !! l
readTexture2D t@(Texture2D texn _ ml) l (V2 x y) (V2 w h) f s
| l < 0 || l >= ml = error "readTexture2D, level out of bounds"
| x < 0 || x >= mx = error "readTexture2D, x out of bounds"
| w < 0 || x+w > mx = error "readTexture2D, w out of bounds"
| y < 0 || y >= my = error "readTexture2D, y out of bounds"
| h < 0 || y+h > my = error "readTexture2D, h out of bounds"
| otherwise =
let b = makeBuffer undefined undefined 0 :: Buffer os b
f' ptr a off = f a =<< liftIO (peekPixel (undefined :: b) (ptr `plusPtr` off))
in bracket
(liftContextIO $ do
ptr <- mallocBytes $ w*h*bufElementSize b
setGlPixelStoreRange x y 0 w h
useTexSync texn GL_TEXTURE_2D
glGetTexImage GL_TEXTURE_2D (fromIntegral l) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) ptr
return ptr)
(liftIO . free)
(\ptr -> foldM (f' ptr) s [0,bufElementSize b..w*h*bufElementSize b -1])
where V2 mx my = texture2DSizes t !! l
readTexture2DArray t@(Texture2DArray texn _ ml) l (V3 x y z) (V2 w h) f s
| l < 0 || l >= ml = error "readTexture2DArray, level out of bounds"
| x < 0 || x >= mx = error "readTexture2DArray, x out of bounds"
| w < 0 || x+w > mx = error "readTexture2DArray, w out of bounds"
| y < 0 || y >= my = error "readTexture2DArray, y out of bounds"
| h < 0 || y+h > my = error "readTexture2DArray, h out of bounds"
| z < 0 || z >= mz = error "readTexture2DArray, y out of bounds"
| otherwise =
let b = makeBuffer undefined undefined 0 :: Buffer os b
f' ptr a off = f a =<< liftIO (peekPixel (undefined :: b) (ptr `plusPtr` off))
in bracket
(liftContextIO $ do
ptr <- mallocBytes $ w*h*bufElementSize b
setGlPixelStoreRange x y z w h
useTexSync texn GL_TEXTURE_2D_ARRAY
glGetTexImage GL_TEXTURE_2D_ARRAY (fromIntegral l) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) ptr
return ptr)
(liftIO . free)
(\ptr -> foldM (f' ptr) s [0,bufElementSize b..w*h*bufElementSize b -1])
where V3 mx my mz = texture2DArraySizes t !! l
readTexture3D t@(Texture3D texn _ ml) l (V3 x y z) (V2 w h) f s
| l < 0 || l >= ml = error "readTexture3D, level out of bounds"
| x < 0 || x >= mx = error "readTexture3D, x out of bounds"
| w < 0 || x+w > mx = error "readTexture3D, w out of bounds"
| y < 0 || y >= my = error "readTexture3D, y out of bounds"
| h < 0 || y+h > my = error "readTexture3D, h out of bounds"
| z < 0 || z >= mz = error "readTexture3D, y out of bounds"
| otherwise =
let b = makeBuffer undefined undefined 0 :: Buffer os b
f' ptr a off = f a =<< liftIO (peekPixel (undefined :: b) (ptr `plusPtr` off))
in bracket
(liftContextIO $ do
ptr <- mallocBytes $ w*h*bufElementSize b
setGlPixelStoreRange x y z w h
useTexSync texn GL_TEXTURE_3D
glGetTexImage GL_TEXTURE_3D (fromIntegral l) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) ptr
return ptr)
(liftIO . free)
(\ptr -> foldM (f' ptr) s [0,bufElementSize b..w*h*bufElementSize b -1])
where V3 mx my mz = texture3DSizes t !! l
readTextureCube t@(TextureCube texn _ ml) l si (V2 x y) (V2 w h) f s
| l < 0 || l >= ml = error "readTextureCube, level out of bounds"
| x < 0 || x >= mxy = error "readTextureCube, x out of bounds"
| w < 0 || x+w > mxy = error "readTextureCube, w out of bounds"
| y < 0 || y >= mxy = error "readTextureCube, y out of bounds"
| h < 0 || y+h > mxy = error "readTextureCube, h out of bounds"
| otherwise =
let b = makeBuffer undefined undefined 0 :: Buffer os b
f' ptr a off = f a =<< liftIO (peekPixel (undefined :: b) (ptr `plusPtr` off))
in bracket
(liftContextIO $ do
ptr <- mallocBytes $ w*h*bufElementSize b
setGlPixelStoreRange x y 0 w h
useTexSync texn GL_TEXTURE_CUBE_MAP
glGetTexImage (getGlCubeSide si) (fromIntegral l) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) ptr
return ptr)
(liftIO . free)
(\ptr -> foldM (f' ptr) s [0,bufElementSize b..w*h*bufElementSize b -1])
where mxy = textureCubeSizes t !! l
readTexture1DToBuffer t@(Texture1D texn _ ml) l x w b i
| l < 0 || l >= ml = error "readTexture1DToBuffer, level out of bounds"
| x < 0 || x >= mx = error "readTexture1DToBuffer, x out of bounds"
| w < 0 || x+w > mx = error "readTexture1DToBuffer, w out of bounds"
| i < 0 || i > bufferLength b = error "readTexture1DToBuffer, i out of bounds"
| bufferLength b - i < w = error "readTexture1DToBuffer, buffer data too small"
| otherwise = liftContextIOAsync $ do
bname <- readIORef $ bufName b
glBindBuffer GL_PIXEL_PACK_BUFFER bname
setGlPixelStoreRange x 0 0 w 1
useTexSync texn GL_TEXTURE_1D
glGetTexImage GL_TEXTURE_1D (fromIntegral l) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) (wordPtrToPtr $ fromIntegral $ i*bufElementSize b)
glBindBuffer GL_PIXEL_PACK_BUFFER 0
where mx = texture1DSizes t !! l
readTexture1DArrayToBuffer t@(Texture1DArray texn _ ml) l (V2 x y) w b i
| l < 0 || l >= ml = error "readTexture1DArrayToBuffer, level out of bounds"
| x < 0 || x >= mx = error "readTexture1DArrayToBuffer, x out of bounds"
| w < 0 || x+w > mx = error "readTexture1DArrayToBuffer, w out of bounds"
| y < 0 || y >= my = error "readTexture1DArrayToBuffer, y out of bounds"
| i < 0 || i > bufferLength b = error "readTexture1DArrayToBuffer, i out of bounds"
| bufferLength b - i < w = error "readTexture1DArrayToBuffer, buffer data too small"
| otherwise = liftContextIOAsync $ do
bname <- readIORef $ bufName b
glBindBuffer GL_PIXEL_PACK_BUFFER bname
setGlPixelStoreRange x y 0 w 1
useTexSync texn GL_TEXTURE_1D_ARRAY
glGetTexImage GL_TEXTURE_1D_ARRAY (fromIntegral l) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) (wordPtrToPtr $ fromIntegral $ i*bufElementSize b)
glBindBuffer GL_PIXEL_PACK_BUFFER 0
where V2 mx my = texture1DArraySizes t !! l
readTexture2DToBuffer t@(Texture2D texn _ ml) l (V2 x y) (V2 w h) b i
| l < 0 || l >= ml = error "readTexture2DToBuffer, level out of bounds"
| x < 0 || x >= mx = error "readTexture2DToBuffer, x out of bounds"
| w < 0 || x+w > mx = error "readTexture2DToBuffer, w out of bounds"
| y < 0 || y >= my = error "readTexture2DToBuffer, y out of bounds"
| h < 0 || y+h > my = error "readTexture2DToBuffer, h out of bounds"
| i < 0 || i > bufferLength b = error "readTexture2DToBuffer, i out of bounds"
| bufferLength b - i < w*h = error "readTexture2DToBuffer, buffer data too small"
| otherwise = liftContextIOAsync $ do
bname <- readIORef $ bufName b
glBindBuffer GL_PIXEL_PACK_BUFFER bname
setGlPixelStoreRange x y 0 w h
useTexSync texn GL_TEXTURE_2D
glGetTexImage GL_TEXTURE_2D (fromIntegral l) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) (wordPtrToPtr $ fromIntegral $ i*bufElementSize b)
glBindBuffer GL_PIXEL_PACK_BUFFER 0
where V2 mx my = texture2DSizes t !! l
readTexture2DArrayToBuffer t@(Texture2DArray texn _ ml) l (V3 x y z) (V2 w h) b i
| l < 0 || l >= ml = error "readTexture2DArrayToBuffer, level out of bounds"
| x < 0 || x >= mx = error "readTexture2DArrayToBuffer, x out of bounds"
| w < 0 || x+w > mx = error "readTexture2DArrayToBuffer, w out of bounds"
| y < 0 || y >= my = error "readTexture2DArrayToBuffer, y out of bounds"
| h < 0 || y+h > my = error "readTexture2DArrayToBuffer, h out of bounds"
| z < 0 || z >= mz = error "readTexture2DArrayToBuffer, z out of bounds"
| i < 0 || i > bufferLength b = error "readTexture2DArrayToBuffer, i out of bounds"
| bufferLength b - i < w*h = error "readTexture2DArrayToBuffer, buffer data too small"
| otherwise = liftContextIOAsync $ do
bname <- readIORef $ bufName b
glBindBuffer GL_PIXEL_PACK_BUFFER bname
setGlPixelStoreRange x y z w h
useTexSync texn GL_TEXTURE_2D_ARRAY
glGetTexImage GL_TEXTURE_2D_ARRAY (fromIntegral l) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) (wordPtrToPtr $ fromIntegral $ i*bufElementSize b)
glBindBuffer GL_PIXEL_PACK_BUFFER 0
where V3 mx my mz = texture2DArraySizes t !! l
readTexture3DToBuffer t@(Texture3D texn _ ml) l (V3 x y z) (V2 w h) b i
| l < 0 || l >= ml = error "readTexture3DToBuffer, level out of bounds"
| x < 0 || x >= mx = error "readTexture3DToBuffer, x out of bounds"
| w < 0 || x+w > mx = error "readTexture3DToBuffer, w out of bounds"
| y < 0 || y >= my = error "readTexture3DToBuffer, y out of bounds"
| h < 0 || y+h > my = error "readTexture3DToBuffer, h out of bounds"
| z < 0 || z >= mz = error "readTexture3DToBuffer, z out of bounds"
| i < 0 || i > bufferLength b = error "readTexture3DToBuffer, i out of bounds"
| bufferLength b - i < w*h = error "readTexture3DToBuffer, buffer data too small"
| otherwise = liftContextIOAsync $ do
bname <- readIORef $ bufName b
glBindBuffer GL_PIXEL_PACK_BUFFER bname
setGlPixelStoreRange x y z w h
useTexSync texn GL_TEXTURE_3D
glGetTexImage GL_TEXTURE_3D (fromIntegral l) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) (wordPtrToPtr $ fromIntegral $ i*bufElementSize b)
glBindBuffer GL_PIXEL_PACK_BUFFER 0
where V3 mx my mz = texture3DSizes t !! l
readTextureCubeToBuffer t@(TextureCube texn _ ml) l s (V2 x y) (V2 w h) b i
| l < 0 || l >= ml = error "readTextureCubeToBuffer, level out of bounds"
| x < 0 || x >= mxy = error "readTextureCubeToBuffer, x out of bounds"
| w < 0 || x+w > mxy = error "readTextureCubeToBuffer, w out of bounds"
| y < 0 || y >= mxy = error "readTextureCubeToBuffer, y out of bounds"
| h < 0 || y+h > mxy = error "readTextureCubeToBuffer, h out of bounds"
| i < 0 || i > bufferLength b = error "readTextureCubeToBuffer, i out of bounds"
| bufferLength b - i < w*h = error "readTextureCubeToBuffer, buffer data too small"
| otherwise = liftContextIOAsync $ do
setGlPixelStoreRange x y 0 w h
bname <- readIORef $ bufName b
glBindBuffer GL_PIXEL_PACK_BUFFER bname
useTexSync texn GL_TEXTURE_CUBE_MAP
glGetTexImage (getGlCubeSide s) (fromIntegral l) (getGlColorFormat (undefined :: c) (undefined :: b)) (getGlType (undefined :: b)) (wordPtrToPtr $ fromIntegral $ i*bufElementSize b)
glBindBuffer GL_PIXEL_PACK_BUFFER 0
where mxy = textureCubeSizes t !! l
setGlPixelStoreRange :: Int -> Int -> Int -> Int -> Int -> IO ()
setGlPixelStoreRange x y z w h = do
glPixelStorei GL_PACK_SKIP_PIXELS $ fromIntegral x
glPixelStorei GL_PACK_SKIP_ROWS $ fromIntegral y
glPixelStorei GL_PACK_SKIP_IMAGES $ fromIntegral z
glPixelStorei GL_PACK_ROW_LENGTH $ fromIntegral w
glPixelStorei GL_PACK_IMAGE_HEIGHT $ fromIntegral h
generateTexture1DMipmap :: MonadIO m => Texture1D os f -> ContextT w os f' m ()
generateTexture1DArrayMipmap :: MonadIO m => Texture1DArray os f -> ContextT w os f' m ()
generateTexture2DMipmap :: MonadIO m => Texture2D os f -> ContextT w os f' m ()
generateTexture2DArrayMipmap :: MonadIO m => Texture2DArray os f -> ContextT w os f' m ()
generateTexture3DMipmap :: MonadIO m => Texture3D os f -> ContextT w os f' m ()
generateTextureCubeMipmap :: MonadIO m => TextureCube os f -> ContextT w os f' m ()
genMips texn target = liftContextIOAsync $ do
useTexSync texn target
glGenerateMipmap target
generateTexture1DMipmap (Texture1D texn _ _) = genMips texn GL_TEXTURE_1D
generateTexture1DArrayMipmap (Texture1DArray texn _ _) = genMips texn GL_TEXTURE_1D_ARRAY
generateTexture2DMipmap (Texture2D texn _ _) = genMips texn GL_TEXTURE_2D
generateTexture2DMipmap _ = return () -- Only one level for renderbuffers
generateTexture2DArrayMipmap (Texture2DArray texn _ _) = genMips texn GL_TEXTURE_2D_ARRAY
generateTexture3DMipmap (Texture3D texn _ _) = genMips texn GL_TEXTURE_3D
generateTextureCubeMipmap (TextureCube texn _ _) = genMips texn GL_TEXTURE_CUBE_MAP
----------------------------------------------------------------------
-- Samplers
data Filter = Nearest | Linear deriving (Eq, Enum)
data EdgeMode = Repeat | Mirror | ClampToEdge | ClampToBorder deriving (Eq, Enum)
type BorderColor c = Color c (ColorElement c)
type Anisotropy = Maybe Float
type MinFilter = Filter
type MagFilter = Filter
type LodFilter = Filter
-- | A GADT for sample filters, where 'SamplerFilter' cannot be used for integer textures.
data SamplerFilter c where
SamplerFilter :: (ColorElement c ~ Float) => MagFilter -> MinFilter -> LodFilter -> Anisotropy -> SamplerFilter c
SamplerNearest :: SamplerFilter c
type EdgeMode2 = V2 EdgeMode
type EdgeMode3 = V3 EdgeMode
data ComparisonFunction =
Never
| Less
| Equal
| Lequal
| Greater
| Notequal
| Gequal
| Always
deriving ( Eq, Ord, Show )
getGlCompFunc :: (Num a, Eq a) => ComparisonFunction -> a
getGlCompFunc Never = GL_NEVER
getGlCompFunc Less = GL_LESS
getGlCompFunc Equal = GL_EQUAL
getGlCompFunc Lequal = GL_LEQUAL
getGlCompFunc Greater = GL_GREATER
getGlCompFunc Notequal = GL_NOTEQUAL
getGlCompFunc Gequal = GL_GEQUAL
getGlCompFunc Always = GL_ALWAYS
newSampler1D :: forall os f s c. ColorSampleable c => (s -> (Texture1D os (Format c), SamplerFilter c, (EdgeMode, BorderColor c))) -> Shader os f s (Sampler1D (Format c))
newSampler1DArray :: forall os f s c. ColorSampleable c => (s -> (Texture1DArray os (Format c), SamplerFilter c, (EdgeMode, BorderColor c))) -> Shader os f s (Sampler1DArray (Format c))
newSampler2D :: forall os f s c. ColorSampleable c => (s -> (Texture2D os (Format c), SamplerFilter c, (EdgeMode2, BorderColor c))) -> Shader os f s (Sampler2D (Format c))
newSampler2DArray :: forall os f s c. ColorSampleable c => (s -> (Texture2DArray os (Format c), SamplerFilter c, (EdgeMode2, BorderColor c))) -> Shader os f s (Sampler2DArray (Format c))
newSampler3D :: forall os f s c. ColorRenderable c => (s -> (Texture3D os (Format c), SamplerFilter c, (EdgeMode3, BorderColor c))) -> Shader os f s (Sampler3D (Format c))
newSamplerCube :: forall os f s c. ColorSampleable c => (s -> (TextureCube os (Format c), SamplerFilter c)) -> Shader os f s (SamplerCube (Format c))
newSampler1DShadow :: forall os f s d. DepthRenderable d => (s -> (Texture1D os (Format d), SamplerFilter d, (EdgeMode, BorderColor d), ComparisonFunction)) -> Shader os f s (Sampler1D Shadow)
newSampler1DArrayShadow :: forall os f s d. DepthRenderable d => (s -> (Texture1DArray os (Format d), SamplerFilter d, (EdgeMode, BorderColor d), ComparisonFunction)) -> Shader os f s (Sampler1DArray Shadow)
newSampler2DShadow :: forall os f s d. DepthRenderable d => (s -> (Texture2D os d, SamplerFilter (Format d), (EdgeMode2, BorderColor d), ComparisonFunction)) -> Shader os f s (Sampler2D Shadow)
newSampler2DArrayShadow :: forall os f s d. DepthRenderable d => (s -> (Texture2DArray os (Format d), SamplerFilter d, (EdgeMode2, BorderColor d), ComparisonFunction)) -> Shader os f s (Sampler2DArray Shadow)
newSamplerCubeShadow :: forall os f s d. DepthRenderable d => (s -> (TextureCube os (Format d), SamplerFilter d, ComparisonFunction)) -> Shader os f s (SamplerCube Shadow)
newSampler1D sf = Shader $ do
sampId <- getName
doForSampler sampId $ \s bind -> let (Texture1D tn _ _, filt, (ex, ec)) = sf s
in do n <- useTex tn GL_TEXTURE_1D bind
setNoShadowMode GL_TEXTURE_1D
setSamplerFilter GL_TEXTURE_1D filt
setEdgeMode GL_TEXTURE_1D (Just ex, Nothing, Nothing) (setBorderColor (undefined :: c) GL_TEXTURE_1D ec)
return n
return $ Sampler1D sampId False (samplerPrefix (undefined :: c))
newSampler1DArray sf = Shader $ do
sampId <- getName
doForSampler sampId $ \s bind -> let (Texture1DArray tn _ _, filt, (ex, ec)) = sf s
in do n <- useTex tn GL_TEXTURE_1D_ARRAY bind
setNoShadowMode GL_TEXTURE_1D_ARRAY
setSamplerFilter GL_TEXTURE_1D_ARRAY filt
setEdgeMode GL_TEXTURE_1D_ARRAY (Just ex, Nothing, Nothing) (setBorderColor (undefined :: c) GL_TEXTURE_1D_ARRAY ec)
return n
return $ Sampler1DArray sampId False (samplerPrefix (undefined :: c))
newSampler2D sf = Shader $ do
sampId <- getName
doForSampler sampId $ \s bind -> let (Texture2D tn _ _, filt, (V2 ex ey, ec)) = sf s
in do n <- useTex tn GL_TEXTURE_2D bind
setNoShadowMode GL_TEXTURE_2D
setSamplerFilter GL_TEXTURE_2D filt
setEdgeMode GL_TEXTURE_2D (Just ex, Just ey, Nothing) (setBorderColor (undefined :: c) GL_TEXTURE_2D ec)
return n
return $ Sampler2D sampId False (samplerPrefix (undefined :: c))
newSampler2DArray sf = Shader $ do
sampId <- getName
doForSampler sampId $ \s bind -> let (Texture2DArray tn _ _, filt, (V2 ex ey, ec)) = sf s
in do n <- useTex tn GL_TEXTURE_2D_ARRAY bind
setNoShadowMode GL_TEXTURE_2D_ARRAY
setSamplerFilter GL_TEXTURE_2D_ARRAY filt
setEdgeMode GL_TEXTURE_2D_ARRAY (Just ex, Just ey, Nothing) (setBorderColor (undefined :: c) GL_TEXTURE_2D_ARRAY ec)
return n
return $ Sampler2DArray sampId False (samplerPrefix (undefined :: c))
newSampler3D sf = Shader $ do
sampId <- getName
doForSampler sampId $ \s bind -> let (Texture3D tn _ _, filt, (V3 ex ey ez, ec)) = sf s
in do n <- useTex tn GL_TEXTURE_3D bind
setNoShadowMode GL_TEXTURE_3D
setSamplerFilter GL_TEXTURE_3D filt
setEdgeMode GL_TEXTURE_3D (Just ex, Just ey, Just ez) (setBorderColor (undefined :: c) GL_TEXTURE_3D ec)
return n
return $ Sampler3D sampId False (samplerPrefix (undefined :: c))
newSamplerCube sf = Shader $ do
sampId <- getName
doForSampler sampId $ \s bind -> let (TextureCube tn _ _, filt) = sf s
in do n <- useTex tn GL_TEXTURE_CUBE_MAP bind
setNoShadowMode GL_TEXTURE_CUBE_MAP
setSamplerFilter GL_TEXTURE_CUBE_MAP filt
return n
return $ SamplerCube sampId False (samplerPrefix (undefined :: c))
newSampler1DShadow sf = Shader $ do
sampId <- getName
doForSampler sampId $ \s bind -> let (Texture1D tn _ _, filt, (ex, ec), cf) = sf s
in do n <- useTex tn GL_TEXTURE_1D bind
setShadowFunc GL_TEXTURE_1D cf
setSamplerFilter GL_TEXTURE_1D filt
setEdgeMode GL_TEXTURE_1D (Just ex, Nothing, Nothing) (setBorderColor (undefined :: d) GL_TEXTURE_1D ec)
return n
return $ Sampler1D sampId True ""
newSampler1DArrayShadow sf = Shader $ do
sampId <- getName
doForSampler sampId $ \s bind -> let (Texture1DArray tn _ _, filt, (ex, ec), cf) = sf s
in do n <- useTex tn GL_TEXTURE_1D_ARRAY bind
setShadowFunc GL_TEXTURE_1D_ARRAY cf
setSamplerFilter GL_TEXTURE_1D_ARRAY filt
setEdgeMode GL_TEXTURE_1D_ARRAY (Just ex, Nothing, Nothing) (setBorderColor (undefined :: d) GL_TEXTURE_1D_ARRAY ec)
return n
return $ Sampler1DArray sampId True ""
newSampler2DShadow sf = Shader $ do
sampId <- getName
doForSampler sampId $ \s bind -> let (Texture2D tn _ _, filt, (V2 ex ey, ec), cf) = sf s
in do n <- useTex tn GL_TEXTURE_2D bind
setShadowFunc GL_TEXTURE_2D cf
setSamplerFilter GL_TEXTURE_2D filt
setEdgeMode GL_TEXTURE_2D (Just ex, Just ey, Nothing) (setBorderColor (undefined :: d) GL_TEXTURE_2D ec)
return n
return $ Sampler2D sampId True ""
newSampler2DArrayShadow sf = Shader $ do
sampId <- getName
doForSampler sampId $ \s bind -> let (Texture2DArray tn _ _, filt, (V2 ex ey, ec), cf) = sf s
in do n <- useTex tn GL_TEXTURE_2D_ARRAY bind
setShadowFunc GL_TEXTURE_2D_ARRAY cf
setSamplerFilter GL_TEXTURE_2D_ARRAY filt
setEdgeMode GL_TEXTURE_2D_ARRAY (Just ex, Just ey, Nothing) (setBorderColor (undefined :: d) GL_TEXTURE_2D_ARRAY ec)
return n
return $ Sampler2DArray sampId True ""
newSamplerCubeShadow sf = Shader $ do
sampId <- getName
doForSampler sampId $ \s bind -> let (TextureCube tn _ _, filt, cf) = sf s
in do n <- useTex tn GL_TEXTURE_CUBE_MAP bind
setShadowFunc GL_TEXTURE_CUBE_MAP cf
setSamplerFilter GL_TEXTURE_CUBE_MAP filt
return n
return $ SamplerCube sampId True ""
setNoShadowMode :: GLenum -> IO ()
setNoShadowMode t = glTexParameteri t GL_TEXTURE_COMPARE_MODE GL_NONE
setShadowFunc :: GLenum -> ComparisonFunction -> IO ()
setShadowFunc t cf = do
glTexParameteri t GL_TEXTURE_COMPARE_MODE GL_COMPARE_REF_TO_TEXTURE
glTexParameteri t GL_TEXTURE_COMPARE_FUNC (getGlCompFunc cf)
setEdgeMode :: GLenum -> (Maybe EdgeMode, Maybe EdgeMode, Maybe EdgeMode) -> IO () -> IO ()
setEdgeMode t (se,te,re) bcio = do glwrap GL_TEXTURE_WRAP_S se
glwrap GL_TEXTURE_WRAP_T te
glwrap GL_TEXTURE_WRAP_R re
when (se == Just ClampToBorder || te == Just ClampToBorder || re == Just ClampToBorder)
bcio
where glwrap _ Nothing = return ()
glwrap x (Just Repeat) = glTexParameteri t x GL_REPEAT
glwrap x (Just Mirror) = glTexParameteri t x GL_MIRRORED_REPEAT
glwrap x (Just ClampToEdge) = glTexParameteri t x GL_CLAMP_TO_EDGE
glwrap x (Just ClampToBorder) = glTexParameteri t x GL_CLAMP_TO_BORDER
setSamplerFilter :: GLenum -> SamplerFilter a -> IO ()
setSamplerFilter t (SamplerFilter magf minf lodf a) = setSamplerFilter' t magf minf lodf a
setSamplerFilter t SamplerNearest = setSamplerFilter' t Nearest Nearest Nearest Nothing
setSamplerFilter' :: GLenum -> MagFilter -> MinFilter -> LodFilter -> Anisotropy -> IO ()
setSamplerFilter' t magf minf lodf a = do
glTexParameteri t GL_TEXTURE_MIN_FILTER glmin
glTexParameteri t GL_TEXTURE_MAG_FILTER glmag
case a of
Nothing -> return ()
Just a' -> glTexParameterf t GL_TEXTURE_MAX_ANISOTROPY_EXT (realToFrac a')
where glmin = case (minf, lodf) of
(Nearest, Nearest) -> GL_NEAREST_MIPMAP_NEAREST
(Linear, Nearest) -> GL_LINEAR_MIPMAP_NEAREST
(Nearest, Linear) -> GL_NEAREST_MIPMAP_LINEAR
(Linear, Linear) -> GL_LINEAR_MIPMAP_LINEAR
glmag = case magf of
Nearest -> GL_NEAREST
Linear -> GL_LINEAR
doForSampler :: Int -> (s -> Binding -> IO Int) -> ShaderM s ()
doForSampler n io = modifyRenderIO (\s -> s { samplerNameToRenderIO = insert n io (samplerNameToRenderIO s) } )
-- | Used instead of 'Format' for shadow samplers. These samplers have specialized sampler values, see 'sample1DShadow' and friends.
data Shadow
data Sampler1D f = Sampler1D Int Bool String
data Sampler1DArray f = Sampler1DArray Int Bool String
data Sampler2D f = Sampler2D Int Bool String
data Sampler2DArray f = Sampler2DArray Int Bool String
data Sampler3D f = Sampler3D Int Bool String
data SamplerCube f = SamplerCube Int Bool String
-- | A GADT to specify where the level of detail and/or partial derivates should be taken from. Some values of this GADT are restricted to
-- only 'FragmentStream's.
data SampleLod vx x where
SampleAuto :: SampleLod v F
SampleBias :: FFloat -> SampleLod vx F
SampleLod :: S x Float -> SampleLod vx x
SampleGrad :: vx -> vx -> SampleLod vx x
-- | For some reason, OpenGl doesnt allow explicit lod to be specified for some sampler types, hence this extra GADT.
data SampleLod' vx x where
SampleAuto' :: SampleLod' v F
SampleBias' :: FFloat -> SampleLod' vx F
SampleGrad' :: vx -> vx -> SampleLod' vx x
type SampleLod1 x = SampleLod (S x Float) x
type SampleLod2 x = SampleLod (V2 (S x Float)) x
type SampleLod3 x = SampleLod (V3 (S x Float)) x
type SampleLod2' x = SampleLod' (V2 (S x Float)) x
type SampleLod3' x = SampleLod' (V3 (S x Float)) x
fromLod' :: SampleLod' v x -> SampleLod v x
fromLod' SampleAuto' = SampleAuto
fromLod' (SampleBias' x) = SampleBias x
fromLod' (SampleGrad' x y) = SampleGrad x y
type SampleProj x = Maybe (S x Float)
type SampleOffset1 x = Maybe Int
type SampleOffset2 x = Maybe (V2 Int)
type SampleOffset3 x = Maybe (V3 Int)
-- | The type of a color sample made by a texture t
type ColorSample x f = Color f (S x (ColorElement f))
type ReferenceValue x = S x Float
sample1D :: forall c x. ColorSampleable c => Sampler1D (Format c) -> SampleLod1 x -> SampleProj x -> SampleOffset1 x -> S x Float -> ColorSample x c
sample1DArray :: forall c x. ColorSampleable c => Sampler1DArray (Format c) -> SampleLod1 x -> SampleOffset1 x -> V2 (S x Float) -> ColorSample x c
sample2D :: forall c x. ColorSampleable c => Sampler2D (Format c) -> SampleLod2 x -> SampleProj x -> SampleOffset2 x -> V2 (S x Float) -> ColorSample x c
sample2DArray :: forall c x. ColorSampleable c => Sampler2DArray (Format c) -> SampleLod2 x -> SampleOffset2 x -> V3 (S x Float) -> ColorSample x c
sample3D :: forall c x. ColorSampleable c => Sampler3D (Format c) -> SampleLod3 x -> SampleProj x -> SampleOffset3 x -> V3 (S x Float) -> ColorSample x c
sampleCube :: forall c x. ColorSampleable c => SamplerCube (Format c) -> SampleLod3 x -> V3 (S x Float) -> ColorSample x c
sample1DShadow :: forall x. Sampler1D Shadow -> SampleLod1 x -> SampleProj x -> SampleOffset1 x -> ReferenceValue x -> S x Float -> S x Float
sample1DArrayShadow :: forall x. Sampler1DArray Shadow-> SampleLod1 x -> SampleOffset1 x -> ReferenceValue x -> V2 (S x Float) -> S x Float
sample2DShadow :: forall x. Sampler2D Shadow -> SampleLod2 x -> SampleProj x -> SampleOffset2 x -> ReferenceValue x -> V2 (S x Float) -> S x Float
sample2DArrayShadow :: forall x. Sampler2DArray Shadow-> SampleLod2' x -> SampleOffset2 x -> ReferenceValue x -> V3 (S x Float)-> S x Float
sampleCubeShadow :: forall x. SamplerCube Shadow -> SampleLod3' x -> ReferenceValue x -> V3 (S x Float) -> S x Float
sample1D (Sampler1D sampId _ prefix) lod proj off coord = toColor (undefined :: c) $ sample (undefined :: ColorElement c) prefix (typeStr4 (undefined :: c)) "1D" sampId lod proj off coord v1toF v1toF civ1toF pv1toF
sample1DArray (Sampler1DArray sampId _ prefix) lod off coord = toColor (undefined :: c) $ sample (undefined :: ColorElement c) prefix (typeStr4 (undefined :: c)) "1DArray" sampId lod Nothing off coord v2toF v1toF civ1toF undefined
sample2D (Sampler2D sampId _ prefix) lod proj off coord = toColor (undefined :: c) $ sample (undefined :: ColorElement c) prefix (typeStr4 (undefined :: c)) "2D" sampId lod proj off coord v2toF v2toF civ2toF pv2toF
sample2DArray (Sampler2DArray sampId _ prefix) lod off coord = toColor (undefined :: c) $ sample (undefined :: ColorElement c) prefix (typeStr4 (undefined :: c)) "2DArray" sampId lod Nothing off coord v3toF v2toF civ2toF undefined
sample3D (Sampler3D sampId _ prefix) lod proj off coord = toColor (undefined :: c) $ sample (undefined :: ColorElement c) prefix (typeStr4 (undefined :: c)) "3D" sampId lod proj off coord v3toF v3toF civ3toF pv3toF
sampleCube (SamplerCube sampId _ prefix) lod coord = toColor (undefined :: c) $ sample (undefined :: ColorElement c) prefix (typeStr4 (undefined :: c)) "Cube" sampId lod Nothing Nothing coord v3toF v3toF undefined undefined
sample1DShadow (Sampler1D sampId _ _) lod proj off ref coord = sampleShadow "1D" sampId lod proj off (t1t3 coord ref) v3toF v1toF civ1toF pv3toF
sample1DArrayShadow (Sampler1DArray sampId _ _) lod off ref coord = sampleShadow "1DArray" sampId lod Nothing off (t2t3 coord ref) v3toF v1toF civ1toF undefined
sample2DShadow (Sampler2D sampId _ _) lod proj off ref coord = sampleShadow "2D" sampId lod proj off (t2t3 coord ref) v3toF v2toF civ2toF pv3toF
sample2DArrayShadow (Sampler2DArray sampId _ _) lod off ref coord = sampleShadow "2DArray" sampId (fromLod' lod) Nothing off (t3t4 coord ref) v4toF v2toF civ2toF undefined
sampleCubeShadow (SamplerCube sampId _ _) lod ref coord = sampleShadow "Cube" sampId (fromLod' lod) Nothing Nothing (t3t4 coord ref) v4toF v3toF undefined undefined
t1t3 :: S x Float -> S x Float -> V3 (S x Float)
t2t3 :: V2 t -> t -> V3 t
t3t4 :: V3 t -> t -> V4 t
t1t3 x = V3 x 0
t2t3 (V2 x y) = V3 x y
t3t4 (V3 x y z) = V4 x y z
texelFetch1D :: forall c x. ColorSampleable c => Sampler1D (Format c) -> SampleOffset1 x -> S x Level -> S x Int -> ColorSample x c
texelFetch1DArray :: forall c x. ColorSampleable c => Sampler1DArray (Format c) -> SampleOffset1 x -> S x Level -> V2(S x Int) -> ColorSample x c
texelFetch2D :: forall c x. ColorSampleable c => Sampler2D (Format c) -> SampleOffset2 x -> S x Level -> V2 (S x Int) -> ColorSample x c
texelFetch2DArray :: forall c x. ColorSampleable c => Sampler2DArray (Format c) -> SampleOffset2 x -> S x Level -> V3 (S x Int) -> ColorSample x c
texelFetch3D :: forall c x. ColorSampleable c => Sampler3D (Format c) -> SampleOffset3 x -> S x Level -> V3 (S x Int) -> ColorSample x c
texelFetch1D (Sampler1D sampId _ prefix) off lod coord = toColor (undefined :: c) $ fetch (undefined :: ColorElement c) prefix (typeStr4 (undefined :: c)) "1D" sampId lod off coord iv1toF civ1toF
texelFetch1DArray (Sampler1DArray sampId _ prefix) off lod coord = toColor (undefined :: c) $ fetch (undefined :: ColorElement c) prefix (typeStr4 (undefined :: c)) "1DArray" sampId lod off coord iv2toF civ1toF
texelFetch2D (Sampler2D sampId _ prefix) off lod coord = toColor (undefined :: c) $ fetch (undefined :: ColorElement c) prefix (typeStr4 (undefined :: c)) "2D" sampId lod off coord iv2toF civ2toF
texelFetch2DArray (Sampler2DArray sampId _ prefix) off lod coord = toColor (undefined :: c) $ fetch (undefined :: ColorElement c) prefix (typeStr4 (undefined :: c)) "2DArray" sampId lod off coord iv3toF civ2toF
texelFetch3D (Sampler3D sampId _ prefix) off lod coord = toColor (undefined :: c) $ fetch (undefined :: ColorElement c) prefix (typeStr4 (undefined :: c)) "3D" sampId lod off coord iv3toF civ3toF
sampler1DSize :: Sampler1D f -> S x Level -> S x Int
sampler1DArraySize :: Sampler1DArray f -> S x Level -> V2 (S x Int)
sampler2DSize :: Sampler2D f -> S x Level -> V2 (S x Int)
sampler2DArraySize :: Sampler2DArray f -> S x Level -> V3 (S x Int)
sampler3DSize :: Sampler3D f -> S x Level -> V3 (S x Int)
samplerCubeSize :: SamplerCube f -> S x Level -> S x Int
sampler1DSize (Sampler1D sampId shadow prefix) = scalarS STypeInt . getTextureSize prefix sampId (addShadowPrefix shadow "1D")
sampler1DArraySize (Sampler1DArray sampId shadow prefix) = vec2S (STypeIVec 2) . getTextureSize prefix sampId (addShadowPrefix shadow "1DArray")
sampler2DSize (Sampler2D sampId shadow prefix) = vec2S (STypeIVec 2) . getTextureSize prefix sampId (addShadowPrefix shadow "2D")
sampler2DArraySize (Sampler2DArray sampId shadow prefix) = vec3S (STypeIVec 3) . getTextureSize prefix sampId (addShadowPrefix shadow "2DArray")
sampler3DSize (Sampler3D sampId shadow prefix) = vec3S (STypeIVec 3) . getTextureSize prefix sampId (addShadowPrefix shadow "3D")
samplerCubeSize (SamplerCube sampId shadow prefix) = (\(V2 x _) -> x) . vec2S (STypeIVec 2) . getTextureSize prefix sampId (addShadowPrefix shadow "Cube")
addShadowPrefix :: Bool -> String -> String
addShadowPrefix shadow = if shadow then (++ "Shadow") else id
getTextureSize :: String -> Int -> String -> S c Int -> ExprM String
getTextureSize prefix sampId sName l = do s <- useSampler prefix sName sampId
l' <- unS l
return $ "textureSize(" ++ s ++ ',' : l' ++ ")"
sample :: e -> String -> String -> String -> Int -> SampleLod lcoord x -> SampleProj x -> Maybe off -> coord -> (coord -> ExprM String) -> (lcoord -> ExprM String) -> (off -> String) -> (coord -> S x Float -> ExprM String) -> V4 (S x e)
sample _ prefix sDynType sName sampId lod proj off coord vToS lvToS ivToS pvToS =
vec4S (STypeDyn sDynType) $ do s <- useSampler prefix sName sampId
sampleFunc s proj lod off coord vToS lvToS ivToS pvToS
sampleShadow :: String -> Int -> SampleLod lcoord x -> SampleProj x -> Maybe off -> coord -> (coord -> ExprM String) -> (lcoord -> ExprM String) -> (off -> String) -> (coord -> S x Float -> ExprM String) -> S x Float
sampleShadow sName sampId lod proj off coord vToS lvToS civToS pvToS =
scalarS STypeFloat $ do s <- useSampler "" (sName ++ "Shadow") sampId
sampleFunc s proj lod off coord vToS lvToS civToS pvToS
fetch :: e -> String -> String -> String -> Int -> S x Int -> Maybe off -> coord -> (coord -> ExprM String) -> (off -> String) -> V4 (S x e)
fetch _ prefix sDynType sName sampId lod off coord ivToS civToS =
vec4S (STypeDyn sDynType) $ do s <- useSampler prefix sName sampId
fetchFunc s off coord lod ivToS civToS
v1toF :: S c Float -> ExprM String
v2toF :: V2 (S c Float) -> ExprM String
v3toF :: V3 (S c Float) -> ExprM String
v4toF :: V4 (S c Float) -> ExprM String
v1toF = unS
v2toF (V2 x y) = do x' <- unS x
y' <- unS y
return $ "vec2(" ++ x' ++ ',':y' ++ ")"
v3toF (V3 x y z) = do x' <- unS x
y' <- unS y
z' <- unS z
return $ "vec3(" ++ x' ++ ',':y' ++ ',':z' ++ ")"
v4toF (V4 x y z w) = do x' <- unS x
y' <- unS y
z' <- unS z
w' <- unS w
return $ "vec4(" ++ x' ++ ',':y' ++ ',':z' ++ ',':w' ++ ")"
iv1toF :: S c Int -> ExprM String
iv2toF :: V2 (S c Int) -> ExprM String
iv3toF :: V3 (S c Int) -> ExprM String
iv1toF = unS
iv2toF (V2 x y) = do x' <- unS x
y' <- unS y
return $ "ivec2(" ++ x' ++ ',':y' ++ ")"
iv3toF (V3 x y z) = do x' <- unS x
y' <- unS y
z' <- unS z
return $ "ivec3(" ++ x' ++ ',':y' ++ ',':z' ++ ")"
civ1toF :: Int -> String
civ2toF :: V2 Int -> String
civ3toF :: V3 Int -> String
civ1toF = show
civ2toF (V2 x y) = "ivec2(" ++ show x ++ ',':show y ++ ")"
civ3toF (V3 x y z) = "ivec3(" ++ show x ++ ',':show y ++ ',':show z ++ ")"
pv1toF :: S c Float -> S c Float -> ExprM String
pv2toF :: V2 (S c Float) -> S c Float -> ExprM String
pv3toF :: V3 (S c Float) -> S c Float -> ExprM String
pv1toF x y = do x' <- unS x
y' <- unS y
return $ "vec2(" ++ x' ++ ',':y' ++ ")"
pv2toF (V2 x y) z = do x' <- unS x
y' <- unS y
z' <- unS z
return $ "vec3(" ++ x' ++ ',':y' ++ ',':z' ++ ")"
pv3toF (V3 x y z) w = do x' <- unS x
y' <- unS y
z' <- unS z
w' <- unS w
return $ "vec4(" ++ x' ++ ',':y' ++ ',':z' ++ ',':w' ++ ")"
sampleFunc s proj lod off coord vToS lvToS civToS pvToS = do
pc <- projCoordParam proj
l <- lodParam lod
b <- biasParam lod
return $ "texture" ++ projName proj ++ lodName lod ++ offName off ++ '(' : s ++ ',' : pc ++ l ++ o ++ b ++ ")"
where
o = offParam off civToS
projName Nothing = ""
projName _ = "Proj"
projCoordParam Nothing = vToS coord
projCoordParam (Just p) = pvToS coord p
lodParam (SampleLod x) = fmap (',':) (unS x)
lodParam (SampleGrad x y) = (++) <$> fmap (',':) (lvToS x) <*> fmap (',':) (lvToS y)
lodParam _ = return ""
biasParam :: SampleLod v x -> ExprM String
biasParam (SampleBias (S x)) = do x' <- x
return $ ',':x'
biasParam _ = return ""
lodName (SampleLod _) = "Lod"
lodName (SampleGrad _ _) = "Grad"
lodName _ = ""
fetchFunc s off coord lod vToS civToS = do
c <- vToS coord
l <- unS lod
return $ "fetch" ++ offName off ++ '(' : s ++ ',' : c ++ ',': l ++ o ++ ")"
where
o = offParam off civToS
offParam :: Maybe t -> (t -> String) -> String
offParam Nothing _ = ""
offParam (Just x) civToS = ',' : civToS x
offName :: Maybe t -> String
offName Nothing = ""
offName _ = "Offset"
----------------------------------------------------------------------------------
-- | A texture image is a reference to a 2D array of pixels in a texture. Some textures contain one 'Image' per level of detail while some contain several.
data Image f = Image TexName Int Int (V2 Int) (GLuint -> IO ()) -- the two Ints is last two in FBOKey
instance Eq (Image f) where
(==) = imageEquals
-- | Compare two images that doesn't necessarily has same type
imageEquals :: Image a -> Image b -> Bool
imageEquals (Image tn' k1' k2' _ _) (Image tn k1 k2 _ _) = tn' == tn && k1' == k1 && k2' == k2
getImageBinding :: Image t -> GLuint -> IO ()
getImageBinding (Image _ _ _ _ io) = io
getImageFBOKey :: Image t -> IO FBOKey
getImageFBOKey (Image tn k1 k2 _ _) = do tn' <- readIORef tn
return $ FBOKey tn' k1 k2
-- | Retrieve the 2D size an image
imageSize :: Image f -> V2 Int
imageSize (Image _ _ _ s _) = s
getTexture1DImage :: Texture1D os f -> Level -> Render os f' (Image f)
getTexture1DArrayImage :: Texture1DArray os f -> Level -> Int -> Render os f' (Image f)
getTexture2DImage :: Texture2D os f -> Level -> Render os f' (Image f)
getTexture2DArrayImage :: Texture2DArray os f -> Level -> Int -> Render os f' (Image f)
getTexture3DImage :: Texture3D os f -> Level -> Int -> Render os f' (Image f)
getTextureCubeImage :: TextureCube os f -> Level -> CubeSide -> Render os f' (Image f)
registerRenderWriteTextureName tn = Render (lift $ lift $ lift $ readIORef tn) >>= registerRenderWriteTexture . fromIntegral
getTexture1DImage t@(Texture1D tn _ ls) l' = let l = min ls l' in do registerRenderWriteTextureName tn
return $ Image tn 0 l (V2 (texture1DSizes t !! l) 1) $ \attP -> do { n <- readIORef tn; glFramebufferTexture1D GL_DRAW_FRAMEBUFFER attP GL_TEXTURE_1D n (fromIntegral l) }
getTexture1DArrayImage t@(Texture1DArray tn _ ls) l' y' = let l = min ls l'
V2 x y = texture1DArraySizes t !! l
in do registerRenderWriteTextureName tn
return $ Image tn y' l (V2 x 1) $ \attP -> do { n <- readIORef tn; glFramebufferTextureLayer GL_DRAW_FRAMEBUFFER attP n (fromIntegral l) (fromIntegral $ min y' (y-1)) }
getTexture2DImage t@(Texture2D tn _ ls) l' = let l = min ls l' in do registerRenderWriteTextureName tn
return $ Image tn 0 l (texture2DSizes t !! l) $ \attP -> do { n <- readIORef tn; glFramebufferTexture2D GL_DRAW_FRAMEBUFFER attP GL_TEXTURE_2D n (fromIntegral l) }
getTexture2DImage t@(RenderBuffer2D tn _) _ = return $ Image tn (-1) 0 (head $ texture2DSizes t) $ \attP -> do { n <- readIORef tn; glFramebufferRenderbuffer GL_DRAW_FRAMEBUFFER attP GL_RENDERBUFFER n }
getTexture2DArrayImage t@(Texture2DArray tn _ ls) l' z' = let l = min ls l'
V3 x y z = texture2DArraySizes t !! l
in do registerRenderWriteTextureName tn
return $ Image tn z' l (V2 x y) $ \attP -> do { n <- readIORef tn; glFramebufferTextureLayer GL_DRAW_FRAMEBUFFER attP n (fromIntegral l) (fromIntegral $ min z' (z-1)) }
getTexture3DImage t@(Texture3D tn _ ls) l' z' = let l = min ls l'
V3 x y z = texture3DSizes t !! l
in do registerRenderWriteTextureName tn
return $ Image tn z' l (V2 x y) $ \attP -> do { n <- readIORef tn; glFramebufferTextureLayer GL_DRAW_FRAMEBUFFER attP n (fromIntegral l) (fromIntegral $ min z' (z-1)) }
getTextureCubeImage t@(TextureCube tn _ ls) l' s = let l = min ls l'
x = textureCubeSizes t !! l
s' = getGlCubeSide s
in do registerRenderWriteTextureName tn
return $ Image tn (fromIntegral s') l (V2 x x) $ \attP -> do { n <- readIORef tn; glFramebufferTexture2D GL_DRAW_FRAMEBUFFER attP s' n (fromIntegral l) }
getGlCubeSide :: CubeSide -> GLenum
getGlCubeSide CubePosX = GL_TEXTURE_CUBE_MAP_POSITIVE_X
getGlCubeSide CubeNegX = GL_TEXTURE_CUBE_MAP_NEGATIVE_X
getGlCubeSide CubePosY = GL_TEXTURE_CUBE_MAP_POSITIVE_Y
getGlCubeSide CubeNegY = GL_TEXTURE_CUBE_MAP_NEGATIVE_Y
getGlCubeSide CubePosZ = GL_TEXTURE_CUBE_MAP_POSITIVE_Z
getGlCubeSide CubeNegZ = GL_TEXTURE_CUBE_MAP_NEGATIVE_Z
|
Teaspot-Studio/GPipe-Core
|
src/Graphics/GPipe/Internal/Texture.hs
|
mit
| 86,727 | 0 | 21 | 28,840 | 28,122 | 13,809 | 14,313 | -1 | -1 |
-- Problems/Problem013.hs
module Problems.Problem013 (p13) where
import Helpers.Numbers
main = print p13
p13 :: Integer
p13 = read . concatMap show $ take 10 $ intToDigits $ sum digitNumbers
digitNumbers :: [Integer]
digitNumbers = [37107287533902102798797998220837590246510135740250,
46376937677490009712648124896970078050417018260538,
74324986199524741059474233309513058123726617309629,
91942213363574161572522430563301811072406154908250,
23067588207539346171171980310421047513778063246676,
89261670696623633820136378418383684178734361726757,
28112879812849979408065481931592621691275889832738,
44274228917432520321923589422876796487670272189318,
47451445736001306439091167216856844588711603153276,
70386486105843025439939619828917593665686757934951,
62176457141856560629502157223196586755079324193331,
64906352462741904929101432445813822663347944758178,
92575867718337217661963751590579239728245598838407,
58203565325359399008402633568948830189458628227828,
80181199384826282014278194139940567587151170094390,
35398664372827112653829987240784473053190104293586,
86515506006295864861532075273371959191420517255829,
71693888707715466499115593487603532921714970056938,
54370070576826684624621495650076471787294438377604,
53282654108756828443191190634694037855217779295145,
36123272525000296071075082563815656710885258350721,
45876576172410976447339110607218265236877223636045,
17423706905851860660448207621209813287860733969412,
81142660418086830619328460811191061556940512689692,
51934325451728388641918047049293215058642563049483,
62467221648435076201727918039944693004732956340691,
15732444386908125794514089057706229429197107928209,
55037687525678773091862540744969844508330393682126,
18336384825330154686196124348767681297534375946515,
80386287592878490201521685554828717201219257766954,
78182833757993103614740356856449095527097864797581,
16726320100436897842553539920931837441497806860984,
48403098129077791799088218795327364475675590848030,
87086987551392711854517078544161852424320693150332,
59959406895756536782107074926966537676326235447210,
69793950679652694742597709739166693763042633987085,
41052684708299085211399427365734116182760315001271,
65378607361501080857009149939512557028198746004375,
35829035317434717326932123578154982629742552737307,
94953759765105305946966067683156574377167401875275,
88902802571733229619176668713819931811048770190271,
25267680276078003013678680992525463401061632866526,
36270218540497705585629946580636237993140746255962,
24074486908231174977792365466257246923322810917141,
91430288197103288597806669760892938638285025333403,
34413065578016127815921815005561868836468420090470,
23053081172816430487623791969842487255036638784583,
11487696932154902810424020138335124462181441773470,
63783299490636259666498587618221225225512486764533,
67720186971698544312419572409913959008952310058822,
95548255300263520781532296796249481641953868218774,
76085327132285723110424803456124867697064507995236,
37774242535411291684276865538926205024910326572967,
23701913275725675285653248258265463092207058596522,
29798860272258331913126375147341994889534765745501,
18495701454879288984856827726077713721403798879715,
38298203783031473527721580348144513491373226651381,
34829543829199918180278916522431027392251122869539,
40957953066405232632538044100059654939159879593635,
29746152185502371307642255121183693803580388584903,
41698116222072977186158236678424689157993532961922,
62467957194401269043877107275048102390895523597457,
23189706772547915061505504953922979530901129967519,
86188088225875314529584099251203829009407770775672,
11306739708304724483816533873502340845647058077308,
82959174767140363198008187129011875491310547126581,
97623331044818386269515456334926366572897563400500,
42846280183517070527831839425882145521227251250327,
55121603546981200581762165212827652751691296897789,
32238195734329339946437501907836945765883352399886,
75506164965184775180738168837861091527357929701337,
62177842752192623401942399639168044983993173312731,
32924185707147349566916674687634660915035914677504,
99518671430235219628894890102423325116913619626622,
73267460800591547471830798392868535206946944540724,
76841822524674417161514036427982273348055556214818,
97142617910342598647204516893989422179826088076852,
87783646182799346313767754307809363333018982642090,
10848802521674670883215120185883543223812876952786,
71329612474782464538636993009049310363619763878039,
62184073572399794223406235393808339651327408011116,
66627891981488087797941876876144230030984490851411,
60661826293682836764744779239180335110989069790714,
85786944089552990653640447425576083659976645795096,
66024396409905389607120198219976047599490197230297,
64913982680032973156037120041377903785566085089252,
16730939319872750275468906903707539413042652315011,
94809377245048795150954100921645863754710598436791,
78639167021187492431995700641917969777599028300699,
15368713711936614952811305876380278410754449733078,
40789923115535562561142322423255033685442488917353,
44889911501440648020369068063960672322193204149535,
41503128880339536053299340368006977710650566631954,
81234880673210146739058568557934581403627822703280,
82616570773948327592232845941706525094512325230608,
22918802058777319719839450180888072429661980811197,
77158542502016545090413245809786882778948721859617,
72107838435069186155435662884062257473692284509516,
20849603980134001723930671666823555245252804609722,
53503534226472524250874054075591789781264330331690]
|
Sgoettschkes/learning
|
haskell/ProjectEuler/src/Problems/Problem013.hs
|
mit
| 5,833 | 0 | 9 | 534 | 373 | 240 | 133 | 106 | 1 |
{-# LANGUAGE CPP, NoImplicitPrelude #-}
module Data.Ratio.Compat (
module Base
#if MIN_VERSION_base(4,4,0) && !(MIN_VERSION_base(4,9,0))
, denominator
, numerator
#endif
) where
#if !(MIN_VERSION_base(4,4,0)) || MIN_VERSION_base(4,9,0)
import Data.Ratio as Base
#else
import Data.Ratio as Base hiding (
denominator
, numerator
)
import GHC.Real (Ratio(..))
#endif
#if MIN_VERSION_base(4,4,0) && !(MIN_VERSION_base(4,9,0))
-- | Extract the numerator of the ratio in reduced form:
-- the numerator and denominator have no common factor and the denominator
-- is positive.
numerator :: Ratio a -> a
numerator (x :% _) = x
-- | Extract the denominator of the ratio in reduced form:
-- the numerator and denominator have no common factor and the denominator
-- is positive.
denominator :: Ratio a -> a
denominator (_ :% y) = y
#endif
|
haskell-compat/base-compat
|
base-compat/src/Data/Ratio/Compat.hs
|
mit
| 843 | 0 | 7 | 139 | 94 | 60 | 34 | 4 | 0 |
module MuevalSlack.Arguments (
getArguments
) where
import Control.Applicative ((<*>))
import Data.Monoid ((<>))
import Options.Applicative (Parser, execParser, strOption, long, short, metavar, info, helper, fullDesc)
parseArguments :: Parser FilePath
parseArguments = strOption $ long "config" <> short 'c' <> metavar "/path/to/file.config"
getArguments :: IO FilePath
getArguments = execParser $ info (helper <*> parseArguments) fullDesc
|
zalora/slack-mueval
|
src/MuevalSlack/Arguments.hs
|
mit
| 446 | 0 | 8 | 58 | 132 | 76 | 56 | 9 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE OverloadedLabels #-}
module GUI.Build (
-- *Types
GUIControl
-- *Functions
, makeGUI
) where
import Control.Concurrent.Chan(Chan)
import Data.Maybe(fromJust)
import Data.Text(Text)
import qualified Data.Text as T
import GI.Gdk (screenGetDefault)
import GI.Gtk hiding (MessageDialog)
import Paths_hrows(getDataFileName)
import GUI.BuildMonad
import GUI.CanBeCast
import GUI.Control
import GUI.DialogManager.Build
import GUI.Iteration
import GUI.ListingWindow.Build
import GUI.MainWindow.Build
import GUI.HKD
import Presenter.Input
makeGUI :: Chan Input -> IO GUIControl
makeGUI iChan = do
_ <- GI.Gtk.init Nothing
gladefn <- getDataFileName "src/hrows.glade"
builder <- builderNewFromFile $ T.pack gladefn
styleFile <- getDataFileName "src/hrows.css"
provider <- cssProviderNew
cssProviderLoadFromPath provider $ T.pack styleFile
Just screen <- screenGetDefault
styleContextAddProviderForScreen screen provider $ fromIntegral STYLE_PROVIDER_PRIORITY_APPLICATION
control <- prepareControl iChan builder
runBuild builder control $ do
configureMainWindow
configureListingWindow $ listingWindow control
configureDialogManager
return control
prepareControl :: Chan Input -> Builder -> IO GUIControl
prepareControl iChan builder = do
let getObject :: CanBeCast obj => Text -> IO obj
getObject name = builderGetObject builder name >>= doCast . fromJust
fromIO GUIControl {
mainWindow = buildMainWindow iChan builder
, listingWindow = buildListingWindow iChan builder
, inputChan = return iChan
, dialogManager = buildDialogManager builder
}
notImplementedDialog :: Text -> Input
notImplementedDialog f = toInput $ MessageDialog (InformationMessage $ T.concat ["Opción ", f, " no implementada"])
|
jvilar/hrows
|
lib/GUI/Build.hs
|
gpl-2.0
| 1,864 | 0 | 12 | 330 | 465 | 238 | 227 | 48 | 1 |
-- | Definición de los errores de matching.
module Equ.Matching.Error where
import Equ.PreExpr
-- | Errores de matching.
data MatchError = DoubleMatch Variable FlatExpr FlatExpr
| BindingVar Variable
| InequPreExpr FlatExpr FlatExpr
| InequOperator Operator Operator
| InequQuantifier Quantifier Quantifier
| SubTermsAC Operator [FlatExpr] [FlatExpr]
| NOperands Operator [FlatExpr] [FlatExpr]
deriving Eq
-- | Pretty print de errores de matching.
instance Show MatchError where
show (DoubleMatch v pe pe') = "Variable \"" ++ show v ++
"\" matched with \"" ++ show pe ++
"\" fail to match with \"" ++ show pe' ++ "\""
show (BindingVar v) = "Binding variable \"" ++ show v ++ "\""
show (InequPreExpr e e') = "\"" ++ show e ++
"\" =/= \"" ++
show e' ++ "\""
show (InequOperator e e') = "\"" ++ show e ++
"\" =/= \"" ++
show e' ++ "\""
show (InequQuantifier e e') = "\"" ++ show e ++
"\" =/= \"" ++
show e' ++ "\""
show _ = "otro error"
|
miguelpagano/equ
|
Equ/Matching/Error.hs
|
gpl-3.0
| 1,398 | 0 | 12 | 629 | 293 | 151 | 142 | 25 | 0 |
module LambdaLine.Util
( ProcessResponse
, cycle3
, cycle4
, deleteNulls
, getPromptType
, getTerminalWidth
, isResponseNull
, parseProcessResponse
, splitOnNewLine
, stdOutListAny
, trimString
)
where
import Control.Monad
import Data.Functor((<$>))
import System.Environment(getArgs)
import Data.List as L
import Data.List.Split as SP
import Data.Text as T
import System.Exit
type ProcessResponse = IO (ExitCode, String, String)
cycle3 :: (a -> b -> c -> d) -> b -> c -> a -> d
cycle3 f y z x = f x y z
cycle4 :: (a -> b -> c -> d -> e) -> b -> c -> d -> a -> e
cycle4 f x y z w = f w x y z
deleteNulls :: [[a]] -> [[a]]
deleteNulls = L.filter $ not . L.null
getPromptType :: IO String
getPromptType = L.head <$> getArgs
getTerminalWidth :: IO String
getTerminalWidth = (!!1) <$> getArgs
isResponseNull :: String -> Bool
isResponseNull = not . L.null . splitOnNewLine . trimString
parseProcessResponse :: ProcessResponse -> IO (Maybe String)
parseProcessResponse processResponse = do
(exitCode,stdOut,_) <- processResponse
case exitCode of ExitSuccess -> return $ Just $ trimString stdOut
ExitFailure _ -> return Nothing
splitOnNewLine :: String -> [String]
splitOnNewLine str = [ s | s <- SP.splitOn "\n" str, not . L.null $ s ]
stdOutListAny :: IO (Maybe [String]) -> IO (Maybe Bool)
stdOutListAny = liftM (fmap $ not . L.null)
trimString :: String -> String
trimString = unpack . strip . pack
|
josiah14/lambdaline
|
LambdaLine/Util.hs
|
gpl-3.0
| 1,436 | 0 | 11 | 277 | 548 | 300 | 248 | 43 | 2 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Handler.SearchTrans where
import Import
import Data.Char (toLower)
$(deriveJSON defaultOptions ''Transaction)
getSearchTransR :: String -> Handler Value
getSearchTransR query = do
trans <- runDB $ selectList [] [] :: Handler [Entity Transaction]
returnJson $ filter (\t -> isInfixOf (map toLower query) (((\s -> (map toLower $ unpack (transactionItem s)) ++ (map toLower $ unpack (transactionDescription s))) . entityVal) t)) trans
|
weshack/thelist
|
TheList/Handler/SearchTrans.hs
|
gpl-3.0
| 484 | 0 | 23 | 71 | 182 | 92 | 90 | -1 | -1 |
module Logic.Config where
import Prelude hiding (FilePath)
import Types.Base
import Types.CliArguments
import Types.LocalConfig
import Types.AppConfig
build :: FilePath -> Maybe LocalConfig -> CliArgs -> Config
build defaultSaveFile Nothing (CliArgs [] verbose) = Config defaultSaveFile verbose
build defaultSaveFile (Just (LocalConfig (Just saveFile) _)) (CliArgs [] verbose) = Config saveFile verbose
build defaultSaveFile _localConfig (CliArgs [saveFile] verbose) = Config saveFile verbose
build defaultSaveFile _localConfig _cliArgs = Config defaultSaveFile False
|
diegospd/pol
|
app/Logic/Config.hs
|
gpl-3.0
| 702 | 0 | 11 | 201 | 174 | 90 | 84 | 11 | 1 |
{-|
Module : Ranking.Glicko
License : GPL-3
Maintainer : [email protected]
Stability : experimental
-}
module Ranking.Glicko
( module Ranking.Glicko.Core
, module Ranking.Glicko.Inference
, module Ranking.Glicko.Types ) where
import Ranking.Glicko.Core
import Ranking.Glicko.Inference
import Ranking.Glicko.Types
|
Prillan/haskell-glicko
|
src/Ranking/Glicko.hs
|
gpl-3.0
| 333 | 0 | 5 | 52 | 48 | 33 | 15 | 7 | 0 |
{-# LANGUAGE DeriveGeneric, DeriveDataTypeable #-}
module Hive.Master.Messaging
where
-------------------------------------------------------------------------------
import Control.Distributed.Process
import Control.Applicative ((<$>))
import Hive.Types (Master (..), Ticket, Problem, Solution, History)
import Hive.Imports.MkBinary
-------------------------------------------------------------------------------
data GetNode = GetNode ProcessId deriving (Generic, Typeable)
instance Binary GetNode where
data ReceiveNode = ReceiveNode NodeId deriving (Generic, Typeable)
instance Binary ReceiveNode where
data ReturnNode = ReturnNode NodeId deriving (Generic, Typeable)
instance Binary ReturnNode where
data NodeUp = NodeUp NodeId Int deriving (Generic, Typeable)
instance Binary NodeUp where
data NodeDown = NodeDown NodeId deriving (Generic, Typeable)
instance Binary NodeDown where
data Request = Request ProcessId Problem deriving (Generic, Typeable)
instance Binary Request where
data TicketDone = TicketDone ProcessId Ticket Solution deriving (Generic, Typeable)
instance Binary TicketDone where
data RequestHistory = RequestHistory ProcessId Ticket Ticket deriving (Generic, Typeable)
instance Binary RequestHistory where
data ReplyHistory = ReplyHistory History deriving (Generic, Typeable)
instance Binary ReplyHistory where
data RequestLatestTicket = RequestLatestTicket ProcessId deriving (Generic, Typeable)
instance Binary RequestLatestTicket where
data ReplyLatestTicket = ReplyLatestTicket Ticket deriving (Generic, Typeable)
instance Binary ReplyLatestTicket where
data Terminate = Terminate deriving (Generic, Typeable)
instance Binary Terminate where
data DeleteHistory = DeleteHistory deriving (Generic, Typeable)
instance Binary DeleteHistory where
-------------------------------------------------------------------------------
nodeUp :: Master -> NodeId -> Int -> Process ()
nodeUp (Master master) node workerCount = send master (NodeUp node workerCount)
getNode :: Master -> ProcessId -> Process NodeId
getNode (Master master) asker = do
send master (GetNode asker)
ReceiveNode nodeId <- expect
return nodeId
getFakeMaster :: ProcessId -> Process Master
getFakeMaster pid = return . Master =<< spawnLocal (fakeMaster pid)
where
fakeMaster :: ProcessId -> Process ()
fakeMaster pid = do
link pid
listen
listen :: Process ()
listen = receiveWait [ match $ \(GetNode asker) -> do
self <- processNodeId <$> getSelfPid
send asker (ReceiveNode self)
listen
, match $ \Terminate ->
return ()
]
terminateMaster :: Master -> Process ()
terminateMaster (Master master) = send master Terminate
returnNode :: Master -> NodeId -> Process ()
returnNode (Master master) node = send master (ReturnNode node)
request :: Master -> Problem -> Process Ticket
request (Master master) problem = do
self <- getSelfPid
send master (Request self problem)
expect
requestHistory :: Master -> Ticket -> Ticket -> Process History
requestHistory (Master master) fromTicket toTicket = do
self <- getSelfPid
send master (RequestHistory self fromTicket toTicket)
ReplyHistory history <- expect
return history
replyHistory :: ProcessId -> History -> Process ()
replyHistory pid history = send pid (ReplyHistory history)
requestLatestTicket :: Master -> Process Ticket
requestLatestTicket (Master master) = do
self <- getSelfPid
send master (RequestLatestTicket self)
ReplyLatestTicket ticket <- expect
return ticket
replyLatestTicket :: ProcessId -> Ticket -> Process ()
replyLatestTicket pid ticket = send pid (ReplyLatestTicket ticket)
deleteHistory :: Master -> Process ()
deleteHistory (Master master) = send master DeleteHistory
|
chrisbloecker/Hive
|
src/Hive/Master/Messaging.hs
|
gpl-3.0
| 3,882 | 0 | 15 | 705 | 1,088 | 551 | 537 | 79 | 1 |
module Model.Transaction where
import Import
import qualified Data.Map as M
import Model.User
renderOtherAccount :: Bool -> Transaction -> M.Map AccountId (Entity User) -> M.Map AccountId (Entity Project) -> Widget
renderOtherAccount is_credit transaction user_accounts project_accounts = do
let maybe_account_id = if is_credit
then transactionDebit transaction
else transactionCredit transaction
maybe_project = maybe Nothing (`M.lookup` project_accounts) maybe_account_id
maybe_user = maybe Nothing (`M.lookup` user_accounts) maybe_account_id
toWidget $ case (maybe_project, maybe_user) of
(Just _, Just _) -> error "account belongs to both a project and a user — this shouldn't happen"
(Just (Entity _ project), Nothing) ->
[hamlet|
<a href="@{ProjectR (projectHandle project)}">
#{projectName project}
|]
(Nothing, Just (Entity user_id user)) ->
[hamlet|
<a href="@{UserR user_id}">
#{userDisplayName (Entity user_id user)}
|]
(Nothing, Nothing) ->
if is_credit
then
[hamlet|
deposited
|]
else
[hamlet|
withdrawn
|]
|
Happy0/snowdrift
|
Model/Transaction.hs
|
agpl-3.0
| 1,395 | 0 | 14 | 501 | 266 | 151 | 115 | -1 | -1 |
-- | Parsing of documentation nodes.
module Data.GI.GIR.Documentation
( Documentation(..)
, queryDocumentation
) where
import Data.Text (Text)
import Text.XML (Element)
import Data.GI.GIR.XMLUtils (firstChildWithLocalName, getElementContent)
-- | Documentation for a given element.
data Documentation = Documentation {
docText :: Text
} deriving (Show, Eq)
-- | Parse the documentation node for the given element of the GIR file.
queryDocumentation :: Element -> Maybe Documentation
queryDocumentation element = fmap Documentation
(firstChildWithLocalName "doc" element >>= getElementContent)
|
hamishmack/haskell-gi
|
lib/Data/GI/GIR/Documentation.hs
|
lgpl-2.1
| 623 | 0 | 8 | 103 | 121 | 72 | 49 | 12 | 1 |
{-#LANGUAGE DeriveDataTypeable#-}
module Data.P440.Domain.PNO where
import Data.P440.Domain.SimpleTypes
import Data.P440.Domain.ComplexTypes
import Data.Typeable (Typeable)
import Data.Text (Text)
-- 2.3 Инкассовое поручение
data Файл = Файл {
идЭС :: GUID
,версПрог :: Text
,телОтпр :: Text
,должнОтпр :: Text
,фамОтпр :: Text
,поручНО :: ПоручНО
} deriving (Eq, Show, Typeable)
data ПоручНО = ПоручНО {
номПоруч :: Text
,датаПодп :: Date
,видПлат :: Maybe Text
,сумПлат :: Text
,статус :: Maybe Text
,инннп :: Text
,кппнп :: Maybe КПП
,плательщ :: Text
,номСчПлИлиКЭСП :: Maybe НомСчПлИлиКЭСП
,банкПл :: Text
,бикбПл :: БИК
,номСчБПл :: Maybe Text
,номФ :: Text
,банкПол :: Text
,бикбПол :: БИК
,номСчБПол :: Maybe Text
,иннПол :: Text
,кппПол :: Maybe КПП
,получ :: Text
,номСчПол :: Maybe Text
,видОп :: Text
,назПлКод :: Maybe Text
,очерПл :: Text
,кодПл :: Maybe Text
,резПоле :: Maybe Text
,назнПл :: Text
,кгн :: Maybe Text
,укгн :: Maybe Text
,кбк :: Text
,октмо :: Text
,кодОсн :: Maybe Text
,срокУплТр :: Maybe Text
,номТреб :: Maybe Text
,датаТреб :: Maybe Date
,типПлат :: Text
,порВал :: Maybe ПорВал
} deriving (Eq, Show, Typeable)
data НомСчПлИлиКЭСП = Счет НомСч
| КЭСП Text
deriving (Eq, Show, Typeable)
data ПорВал = ПорВал {
номПорВал :: Text
,датаПорВал :: Date
,номВалСч :: Text
} deriving (Eq, Show, Typeable)
|
Macil-dev/440P-old
|
src/Data/P440/Domain/PNO.hs
|
unlicense
| 2,131 | 134 | 8 | 589 | 1,041 | 546 | 495 | 60 | 0 |
module TrivialModule where
data Trivial = Trivial1 | Trivial2
instance Eq Trivial where
(==) Trivial1 Trivial1 = True
(==) Trivial2 Trivial2 = True
(==) _ _ = False
|
ocozalp/Haskellbook
|
chapter6/trivial.hs
|
unlicense
| 179 | 0 | 6 | 42 | 58 | 34 | 24 | 6 | 0 |
-- vim: ts=2 sw=2 et :
{-# LANGUAGE CPP #-}
{- |
Parse the output of ghci's @:history@ command.
-}
module GHCi.History.Parse
where
import GHCi.History
import qualified Parsing
import qualified GHCi.History.Parse.Common as C
#ifdef USE_PARSEC
import qualified Parsing.Parsec as P
#else
import qualified Parsing.ReadP as P
#endif
-- | Given a string containing the output of the ghci @:history@
-- command, parse it using "Text.Parsec", if available,
-- otherwise "Text.ParserCombinators.ReadP".
--
-- Returns either an error message, or a list of 'HistoryItem's.
--
-- Examples
--
-- >>> :{
-- let myHistory = unlines [
-- "-1 : fib (src/Stuff.hs:52:8-16)"
-- , "<end of history>" ]
-- :}
--
-- >>> parseHistory myHistory
-- Right [HistoryItem {histStepNum = -1, funcName = "fib", fileName = "src/Stuff.hs", startPos = FilePos {lineNum = 52, colNum = 8}, endPos = FilePos {lineNum = 52, colNum = 16}}]
--
parseHistory :: String -> Either String [GHCi.History.HistoryItem]
parseHistory str =
P.parse C.history "(interactive)" str
|
phlummox/ghci-history-parser
|
src/GHCi/History/Parse.hs
|
unlicense
| 1,065 | 0 | 8 | 195 | 96 | 68 | 28 | 9 | 1 |
module Main where
import qualified Data.FormulaBench
import Criterion.Main
import Criterion.Types
reportFilePath :: FilePath
reportFilePath = "mso-bench.html"
main :: IO ()
main = defaultMainWith defaultConfig{reportFile = Just reportFilePath}
[ bgroup "all"
[ Data.FormulaBench.benchs
]
]
|
jokusi/mso
|
bench/mso-bench.hs
|
apache-2.0
| 306 | 0 | 9 | 50 | 75 | 43 | 32 | 10 | 1 |
module Graham.A269045Spec (main, spec) where
import Test.Hspec
import Graham.A269045 (a269045)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A269045" $
it "correctly computes the first 20 elements" $
take 20 (map a269045 [1..]) `shouldBe` expectedValue where
expectedValue = [1,2,3,4,6,8,9,10,12,15,16,18,20,24,25,27,28,30,32,35]
|
peterokagey/haskellOEIS
|
test/Graham/A269045Spec.hs
|
apache-2.0
| 360 | 0 | 10 | 59 | 160 | 95 | 65 | 10 | 1 |
-- Copyright 2020 Google LLC
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE ForeignFunctionInterface #-}
module Main where
import Foreign.C.Types (CInt(..))
foreign import ccall foo :: CInt -> CInt
main = print $ foo 3
|
google/cabal2bazel
|
bzl/tests/ffi/SimpleFFI.hs
|
apache-2.0
| 748 | 0 | 6 | 128 | 59 | 41 | 18 | 5 | 1 |
module Hyper.Canvas.Concurrent ( CState
, newCState
, stepCState
, enqueueDraws ) where
import System.IO (hPutStrLn, stderr)
import Data.Queue
import Control.Event.Handler (Handler)
import Control.Concurrent.STM
import Control.Concurrent
import Control.Applicative
import Control.Monad
import qualified Data.Map as M
import Hyper.Canvas.Types
import Hyper.Canvas.JS
data DrawChan = DrawChan { drawQ :: TChan [Draw]
, delayQ :: TChan Double
, currentDraws :: TVar [Draw]
, nextTime :: TVar Double }
newDrawChan :: IO DrawChan
newDrawChan = DrawChan
<$> newFifo
<*> newFifo
<*> newTVarIO []
<*> newTVarIO 0
data CState = CState { dchans :: M.Map String DrawChan
, lastDraws :: TVar [Draw] }
newCState :: [String] -> IO CState
newCState chans = do empty <- CState M.empty <$> newTVarIO []
foldM addNewChan empty chans
-- startBrowserPageRun (stepCState cstate write)
addNewChan :: CState -> String -> IO CState
addNewChan cstate s =
do newm <- M.insert s <$> newDrawChan <*> (pure (dchans cstate))
return (cstate { dchans = newm })
enqueueDraws :: CState -> String -> Int -> [[Draw]] -> IO ()
enqueueDraws cstate chan delay draws =
case M.lookup chan (dchans cstate) of
Just dchan -> atomically (sequenceDraws dchan delay draws)
_ -> return ()
sequenceDraws :: DrawChan -> Int -> [[Draw]] -> STM ()
sequenceDraws dchan delay draws =
let delay' = fromIntegral delay
push :: [Draw] -> STM ()
push = pushToChan dchan delay'
in sequence_ (fmap push draws)
pushToChan :: DrawChan -> Double -> [Draw] -> STM ()
pushToChan dchan delay draws = enqueue (drawQ dchan) draws
>> enqueue (delayQ dchan) delay
stepCState :: CState -> ([Draw] -> IO ()) -> IO ()
stepCState cstate write =
do time <- now
let getDraws = atomically (orElse (sequenceSteps time (dchans cstate))
(return [[]]))
allDraws <- concat <$> getDraws
newDraws <- atomically (updateDraws cstate allDraws)
case newDraws of
Just ds -> write ds -- >> hPutStrLn stderr ("writing: " ++ (show ds))
_ -> return ()
sequenceSteps :: Double -> M.Map String DrawChan -> STM [[Draw]]
sequenceSteps time dchans =
let s :: DrawChan -> STM [Draw]
s = stepChan time
in (sequence . fmap s . fmap snd . M.toList) dchans
updateDraws :: CState -> [Draw] -> STM (Maybe [Draw])
updateDraws cstate newDraws =
do oldDraws <- readTVar (lastDraws cstate)
if newDraws /= oldDraws
then writeTVar (lastDraws cstate) newDraws
>> return (Just newDraws)
else return Nothing
stepChan :: Double -> DrawChan -> STM [Draw]
stepChan time dchan =
do next <- readTVar (nextTime dchan)
oldDraws <- readTVar (currentDraws dchan)
if time >= next
then attemptDeq dchan time oldDraws
else return oldDraws
attemptDeq :: DrawChan -> Double -> [Draw] -> STM [Draw]
attemptDeq dchan time oldDraws =
do mNewDelay <- dequeue (delayQ dchan)
mNewDraws <- dequeue (drawQ dchan)
case (mNewDelay, mNewDraws) of
(Just newDelay, Just newDraws) ->
(writeTVar (nextTime dchan) (time + newDelay)
>> writeTVar (currentDraws dchan) newDraws
>> return newDraws)
_ -> return oldDraws
-- curDraws <- readTVar (currentDraws dchan)
-- mNewDelay <- dequeue (delayQ dchan)
-- mNewDraws <- dequeue (drawQ dchan)
-- case (mNewDelay, mNewDraws) of
-- (Just newDelay, Just newDraws) ->
-- if time >= next
-- then writeTVar (nextTime dchan) (time + newDelay)
-- >> writeTVar (currentDraws dchan) newDraws
-- >> return newDraws
-- else return curDraws
-- _ -> return curDraws
|
RoboNickBot/interactive-tree-demos
|
src/Hyper/Canvas/Concurrent.hs
|
bsd-2-clause
| 4,047 | 0 | 16 | 1,225 | 1,189 | 606 | 583 | 86 | 2 |
{-# LANGUAGE OverloadedStrings #-}
-- | NKJP parsing utilities.
module Text.NKJP.Utils
( P, Q
, fStrQ
, fSymQ
, idesQ
, idesQ'
) where
import Control.Applicative
import qualified Data.Text.Lazy as L
import qualified Text.HTML.TagSoup as S
import Text.XML.PolySoup hiding (P, Q)
import qualified Text.XML.PolySoup as P
import Text.NKJP.Ptr
-- | TEI NKJP parsing predicates.
type P a = P.P (XmlTree L.Text) a
type Q a = P.Q (XmlTree L.Text) a
fStrQ :: L.Text -> Q L.Text
fStrQ x =
let checkName = named "f" *> hasAttrVal "name" x
-- | Body sometimes is empty.
safeHead [] = ""
safeHead (z:_) = z
in fmap safeHead $ checkName `joinR` do
first $ named "string" /> node text
fSymQ :: L.Text -> Q L.Text
fSymQ x =
let checkName = named "f" *> hasAttrVal "name" x
p = named "symbol" *> attr "value"
safeHead [] = error "fSymQ: empty head"
safeHead (z:_) = z
in safeHead <$> (checkName /> node p)
-- | Identifier and corresp.
idesQ :: P.Q (S.Tag L.Text) (Ptr L.Text, L.Text)
idesQ = (,)
<$> (readPtr <$> attr "corresp")
<*> attr "xml:id"
-- | Identifier and corresp. A provisional function.
idesQ' :: P.Q (S.Tag L.Text) (Maybe (Ptr L.Text), L.Text)
idesQ' = (,)
<$> (optional (readPtr <$> attr "corresp"))
<*> attr "xml:id"
|
kawu/nkjp
|
src/Text/NKJP/Utils.hs
|
bsd-2-clause
| 1,352 | 0 | 12 | 348 | 476 | 257 | 219 | 37 | 2 |
{-# LANGUAGE NoImplicitPrelude, Arrows #-}
module Simulation.Continuous (
module Simulation.Continuous.Base
, module Simulation.Continuous.Signal
, ContinuousRK
, ContinuousEU
, integrator
, integratorS
, switch
, watch
, accumulator
, delay
, filterT
, filter1
, filter1Mod
, modFrac
) where
import NumericPrelude
import qualified Algebra.VectorSpace as VectorSpace
import qualified Algebra.RealField as RealField
import Simulation.Continuous.Base
import Simulation.Continuous.Signal
import Control.Arrow
import Data.Maybe (fromMaybe)
type ContinuousRK a b = Continuous Double (RK4 a) (RK4 b)
type ContinuousEU a b = Continuous Double (EU1 a) (EU1 b)
integrator :: VectorSpace.C t a => a -> Continuous t a a
integrator = \y0 -> ContinuousS y0 $ \dt x y1 -> let y2 = y1 + dt *> x in (y1, y2)
integratorS :: (Integrable s, VectorSpace.C t a) => a -> Continuous t (s a) (s a)
integratorS = \x0 -> ContinuousS x0 integrate
switch :: (e -> Continuous t a b) -> e -> Continuous t (a, Maybe e) b
switch = \f e -> ContinuousS (f e) $ \dt (x, es) c -> simStep dt x (maybe c f es)
watch :: (a -> a -> Bool) -> a -> Continuous t (a, b) (Maybe b)
watch = \f x0 -> ContinuousS x0 $ \_ (x2, y) x1 -> (if f x1 x2 then Just y else Nothing, x2)
accumulator :: a -> Continuous t (Maybe a) a
accumulator = \x0 -> ContinuousS x0 $ \_ mx x -> let n = fromMaybe x mx in (n, n)
delay :: a -> Continuous t a a
delay = \x0 -> ContinuousS x0 $ \_ x2 x1 -> (x1, x2)
filter1 :: VectorSpace.C t a => t -> a -> Continuous t a a
filter1 = \t y0 -> let k = recip t
in loop $ proc (x, y) -> do
y' <- integrator y0 -< k *> (x - y)
returnA -< (y', y')
filter1Mod :: (VectorSpace.C t a, RealField.C a) => a -> t -> a -> Continuous t a a
filter1Mod = \m t y0 -> let k = recip t
two = fromRational 2
in loop $ proc (x, y) -> do
y' <- integrator y0 -< k *> (modFrac (x - y + m / two) m - m / two)
returnA -< let y'' = modFrac y' m in (y'', y'')
modFrac :: RealField.C t => t -> t -> t
modFrac x x0 = x - floor (x / x0) * x0
filterT :: VectorSpace.C t a => a -> Continuous t (a, t) a
filterT = \y0 -> loop $
proc ((x, t), y) -> do
y' <- integrator y0 -< (recip t) *> (x - y)
returnA -< (y', y')
|
balodja/contisim
|
lib/Simulation/Continuous.hs
|
bsd-2-clause
| 2,348 | 3 | 21 | 638 | 1,064 | 568 | 496 | 55 | 2 |
{-# LANGUAGE DeriveDataTypeable, DeriveGeneric, RecordWildCards #-}
-- |
-- Module : Criterion.Main.Options
-- Copyright : (c) 2014 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC
--
-- Benchmarking command-line configuration.
module Criterion.Main.Options
(
Mode(..)
, MatchType(..)
, defaultConfig
, parseWith
, describe
, versionInfo
) where
-- Temporary: to support pre-AMP GHC 7.8.4:
import Data.Monoid
import Control.Monad (when)
import Criterion.Analysis (validateAccessors)
import Criterion.Types (Config(..), Verbosity(..), measureAccessors,
measureKeys)
import Data.Char (isSpace, toLower)
import Data.Data (Data, Typeable)
import Data.Int (Int64)
import Data.List (isPrefixOf)
import Data.Version (showVersion)
import GHC.Generics (Generic)
import Options.Applicative
import Options.Applicative.Help (Chunk(..), tabulate)
import Options.Applicative.Help.Pretty ((.$.))
import Options.Applicative.Types
import Paths_criterion (version)
import Text.PrettyPrint.ANSI.Leijen (Doc, text)
import qualified Data.Map as M
import Prelude
-- | How to match a benchmark name.
data MatchType = Prefix
-- ^ Match by prefix. For example, a prefix of
-- @\"foo\"@ will match @\"foobar\"@.
| Glob
-- ^ Match by Unix-style glob pattern.
deriving (Eq, Ord, Bounded, Enum, Read, Show, Typeable, Data,
Generic)
-- | Execution mode for a benchmark program.
data Mode = List
-- ^ List all benchmarks.
| Version
-- ^ Print the version.
| RunIters Int64 MatchType [String]
-- ^ Run the given benchmarks, without collecting or
-- analysing performance numbers.
| Run Config MatchType [String]
-- ^ Run and analyse the given benchmarks.
deriving (Eq, Read, Show, Typeable, Data, Generic)
-- | Default benchmarking configuration.
defaultConfig :: Config
defaultConfig = Config {
confInterval = 0.95
, forceGC = True
, timeLimit = 5
, resamples = 1000
, regressions = []
, rawDataFile = Nothing
, reportFile = Nothing
, csvFile = Nothing
, jsonFile = Nothing
, junitFile = Nothing
, verbosity = Normal
, template = "default"
}
-- | Parse a command line.
parseWith :: Config
-- ^ Default configuration to use if options are not
-- explicitly specified.
-> Parser Mode
parseWith cfg =
(matchNames (Run <$> config cfg)) <|>
runIters <|>
(List <$ switch (long "list" <> short 'l' <> help "List benchmarks")) <|>
(Version <$ switch (long "version" <> help "Show version info"))
where
runIters = matchNames $
RunIters <$> option auto
(long "iters" <> short 'n' <> metavar "ITERS" <>
help "Run benchmarks, don't analyse")
matchNames wat = wat
<*> option match
(long "match" <> short 'm' <> metavar "MATCH" <> value Prefix <>
help "How to match benchmark names (\"prefix\" or \"glob\")")
<*> many (argument str (metavar "NAME..."))
config :: Config -> Parser Config
config Config{..} = Config
<$> option (range 0.001 0.999)
(long "ci" <> short 'I' <> metavar "CI" <> value confInterval <>
help "Confidence interval")
<*> (not <$> switch (long "no-gc" <> short 'G' <>
help "Do not collect garbage between iterations"))
<*> option (range 0.1 86400)
(long "time-limit" <> short 'L' <> metavar "SECS" <> value timeLimit <>
help "Time limit to run a benchmark")
<*> option (range 1 1000000)
(long "resamples" <> metavar "COUNT" <> value resamples <>
help "Number of bootstrap resamples to perform")
<*> many (option regressParams
(long "regress" <> metavar "RESP:PRED.." <>
help "Regressions to perform"))
<*> outputOption rawDataFile (long "raw" <>
help "File to write raw data to")
<*> outputOption reportFile (long "output" <> short 'o' <>
help "File to write report to")
<*> outputOption csvFile (long "csv" <>
help "File to write CSV summary to")
<*> outputOption jsonFile (long "json" <>
help "File to write JSON summary to")
<*> outputOption junitFile (long "junit" <>
help "File to write JUnit summary to")
<*> (toEnum <$> option (range 0 2)
(long "verbosity" <> short 'v' <> metavar "LEVEL" <>
value (fromEnum verbosity) <>
help "Verbosity level"))
<*> strOption (long "template" <> short 't' <> metavar "FILE" <>
value template <>
help "Template to use for report")
outputOption :: Maybe String -> Mod OptionFields String -> Parser (Maybe String)
outputOption file m =
optional (strOption (m <> metavar "FILE" <> maybe mempty value file))
range :: (Show a, Read a, Ord a) => a -> a -> ReadM a
range lo hi = do
s <- readerAsk
case reads s of
[(i, "")]
| i >= lo && i <= hi -> return i
| otherwise -> readerError $ show i ++ " is outside range " ++
show (lo,hi)
_ -> readerError $ show s ++ " is not a number"
match :: ReadM MatchType
match = do
m <- readerAsk
case map toLower m of
mm | mm `isPrefixOf` "pfx" -> return Prefix
| mm `isPrefixOf` "prefix" -> return Prefix
| mm `isPrefixOf` "glob" -> return Glob
| otherwise ->
readerError $ show m ++ " is not a known match type. "
++ "Try \"prefix\" or \"glob\"."
regressParams :: ReadM ([String], String)
regressParams = do
m <- readerAsk
let repl ',' = ' '
repl c = c
tidy = reverse . dropWhile isSpace . reverse . dropWhile isSpace
(r,ps) = break (==':') m
when (null r) $
readerError "no responder specified"
when (null ps) $
readerError "no predictors specified"
let ret = (words . map repl . drop 1 $ ps, tidy r)
either readerError (const (return ret)) $ uncurry validateAccessors ret
-- | Flesh out a command line parser.
describe :: Config -> ParserInfo Mode
describe cfg = info (helper <*> parseWith cfg) $
header ("Microbenchmark suite - " <> versionInfo) <>
fullDesc <>
footerDoc (unChunk regressionHelp)
-- | A string describing the version of this benchmark (really, the
-- version of criterion that was used to build it).
versionInfo :: String
versionInfo = "built with criterion " <> showVersion version
-- We sort not by name, but by likely frequency of use.
regressionHelp :: Chunk Doc
regressionHelp =
fmap (text "Regression metrics (for use with --regress):" .$.) $
tabulate [(text n,text d) | (n,(_,d)) <- map f measureKeys]
where f k = (k, measureAccessors M.! k)
|
iu-parfunc/criterion
|
Criterion/Main/Options.hs
|
bsd-2-clause
| 7,103 | 0 | 23 | 2,079 | 1,841 | 954 | 887 | 147 | 2 |
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE OverloadedStrings #-}
import Data.Attoparsec.Text (parseOnly)
import Data.Maybe
import qualified Data.Text.IO as TIO
import Control.Applicative
import Options.Applicative
import Lucid
import Diff2Html
import Chunk
import Diff
import Style
parseChunks :: FilePath -> IO [Chunk]
parseChunks fname = do
Right r <- parseOnly (many diff) <$> TIO.readFile fname
return (toChunks r)
chunksToHtml :: [Chunk] -> Html ()
chunksToHtml chunks =
doctypehtml_ $ do
head_ $ style_ [] styleSheet
body_ $ chunksToTable chunks
data Options = Options { patchFile :: FilePath
, outputFile :: Maybe FilePath
}
options :: Parser Options
options = Options <$> strArgument (help "Patch file")
<*> optional (option auto (help "HTML output"))
main :: IO ()
main = do
opts <- execParser $ info (helper <*> options) mempty
chunks <- parseChunks (patchFile opts)
let out = fromMaybe (patchFile opts++".html") (outputFile opts)
renderToFile out $ chunksToHtml chunks
|
bgamari/diff-utils
|
Main.hs
|
bsd-3-clause
| 1,103 | 0 | 13 | 257 | 337 | 171 | 166 | 32 | 1 |
{-# OPTIONS_GHC -w #-}
{-# LANGUAGE OverloadedStrings #-}
module Lang.TopLevel.HParser where
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Text (Text)
import qualified Data.Text as T
import Lang.TopLevel.Tokenizer
-- parser produced by Happy Version 1.18.6
data HappyAbsSyn
= HappyTerminal (Token)
| HappyErrorToken Int
| HappyAbsSyn5 ([Command])
| HappyAbsSyn7 (Command)
| HappyAbsSyn8 ([Expr])
| HappyAbsSyn9 (Expr)
| HappyAbsSyn10 ([(Text, [Expr])])
| HappyAbsSyn11 ((Text, [Expr]))
{- to allow type-synonyms as our monads (likely
- with explicitly-specified bind and return)
- in Haskell98, it seems that with
- /type M a = .../, then /(HappyReduction M)/
- is not allowed. But Happy is a
- code-generator that can just substitute it.
type HappyReduction m =
Int
-> (Token)
-> HappyState (Token) (HappyStk HappyAbsSyn -> [(Token)] -> m HappyAbsSyn)
-> [HappyState (Token) (HappyStk HappyAbsSyn -> [(Token)] -> m HappyAbsSyn)]
-> HappyStk HappyAbsSyn
-> [(Token)] -> m HappyAbsSyn
-}
action_0,
action_1,
action_2,
action_3,
action_4,
action_5,
action_6,
action_7,
action_8,
action_9,
action_10,
action_11,
action_12,
action_13,
action_14,
action_15,
action_16,
action_17,
action_18,
action_19,
action_20,
action_21,
action_22,
action_23,
action_24,
action_25,
action_26,
action_27,
action_28,
action_29,
action_30,
action_31,
action_32,
action_33,
action_34,
action_35,
action_36,
action_37,
action_38,
action_39 :: () => Int -> ({-HappyReduction (Maybe) = -}
Int
-> (Token)
-> HappyState (Token) (HappyStk HappyAbsSyn -> [(Token)] -> (Maybe) HappyAbsSyn)
-> [HappyState (Token) (HappyStk HappyAbsSyn -> [(Token)] -> (Maybe) HappyAbsSyn)]
-> HappyStk HappyAbsSyn
-> [(Token)] -> (Maybe) HappyAbsSyn)
happyReduce_2,
happyReduce_3,
happyReduce_4,
happyReduce_5,
happyReduce_6,
happyReduce_7,
happyReduce_8,
happyReduce_9,
happyReduce_10,
happyReduce_11,
happyReduce_12,
happyReduce_13,
happyReduce_14,
happyReduce_15,
happyReduce_16,
happyReduce_17,
happyReduce_18,
happyReduce_19 :: () => ({-HappyReduction (Maybe) = -}
Int
-> (Token)
-> HappyState (Token) (HappyStk HappyAbsSyn -> [(Token)] -> (Maybe) HappyAbsSyn)
-> [HappyState (Token) (HappyStk HappyAbsSyn -> [(Token)] -> (Maybe) HappyAbsSyn)]
-> HappyStk HappyAbsSyn
-> [(Token)] -> (Maybe) HappyAbsSyn)
action_0 (12) = happyShift action_4
action_0 (13) = happyShift action_5
action_0 (14) = happyShift action_6
action_0 (18) = happyShift action_7
action_0 (19) = happyShift action_8
action_0 (7) = happyGoto action_12
action_0 _ = happyFail
action_1 (12) = happyShift action_4
action_1 (13) = happyShift action_5
action_1 (14) = happyShift action_6
action_1 (18) = happyShift action_7
action_1 (19) = happyShift action_8
action_1 (5) = happyGoto action_9
action_1 (6) = happyGoto action_10
action_1 (7) = happyGoto action_11
action_1 _ = happyReduce_5
action_2 (12) = happyShift action_4
action_2 (13) = happyShift action_5
action_2 (14) = happyShift action_6
action_2 (18) = happyShift action_7
action_2 (19) = happyShift action_8
action_2 (7) = happyGoto action_3
action_2 _ = happyFail
action_3 (12) = happyShift action_4
action_3 (13) = happyShift action_5
action_3 (14) = happyShift action_6
action_3 (18) = happyShift action_7
action_3 (19) = happyShift action_8
action_3 (5) = happyGoto action_13
action_3 (7) = happyGoto action_11
action_3 _ = happyFail
action_4 (18) = happyShift action_23
action_4 _ = happyFail
action_5 (18) = happyShift action_22
action_5 _ = happyFail
action_6 (16) = happyShift action_21
action_6 _ = happyFail
action_7 (17) = happyShift action_17
action_7 (18) = happyShift action_18
action_7 (19) = happyShift action_19
action_7 (20) = happyShift action_20
action_7 (8) = happyGoto action_15
action_7 (9) = happyGoto action_16
action_7 _ = happyReduce_12
action_8 (16) = happyShift action_14
action_8 _ = happyFail
action_9 _ = happyReduce_4
action_10 (23) = happyAccept
action_10 _ = happyFail
action_11 (12) = happyShift action_4
action_11 (13) = happyShift action_5
action_11 (14) = happyShift action_6
action_11 (18) = happyShift action_7
action_11 (19) = happyShift action_8
action_11 (5) = happyGoto action_13
action_11 (7) = happyGoto action_11
action_11 _ = happyReduce_3
action_12 (23) = happyAccept
action_12 _ = happyFail
action_13 _ = happyReduce_2
action_14 _ = happyReduce_9
action_15 (22) = happyShift action_30
action_15 (10) = happyGoto action_28
action_15 (11) = happyGoto action_29
action_15 _ = happyReduce_18
action_16 (17) = happyShift action_17
action_16 (18) = happyShift action_18
action_16 (19) = happyShift action_19
action_16 (20) = happyShift action_20
action_16 (8) = happyGoto action_27
action_16 (9) = happyGoto action_16
action_16 _ = happyReduce_12
action_17 _ = happyReduce_15
action_18 _ = happyReduce_14
action_19 _ = happyReduce_13
action_20 (17) = happyShift action_17
action_20 (18) = happyShift action_18
action_20 (19) = happyShift action_19
action_20 (20) = happyShift action_20
action_20 (8) = happyGoto action_26
action_20 (9) = happyGoto action_16
action_20 _ = happyReduce_12
action_21 _ = happyReduce_8
action_22 (15) = happyShift action_25
action_22 _ = happyFail
action_23 (15) = happyShift action_24
action_23 _ = happyFail
action_24 (19) = happyShift action_36
action_24 _ = happyFail
action_25 (19) = happyShift action_35
action_25 _ = happyFail
action_26 (21) = happyShift action_34
action_26 _ = happyFail
action_27 _ = happyReduce_11
action_28 (16) = happyShift action_33
action_28 _ = happyFail
action_29 (22) = happyShift action_30
action_29 (10) = happyGoto action_32
action_29 (11) = happyGoto action_29
action_29 _ = happyReduce_18
action_30 (17) = happyShift action_17
action_30 (18) = happyShift action_18
action_30 (19) = happyShift action_19
action_30 (20) = happyShift action_20
action_30 (8) = happyGoto action_31
action_30 (9) = happyGoto action_16
action_30 _ = happyReduce_12
action_31 _ = happyReduce_19
action_32 _ = happyReduce_17
action_33 _ = happyReduce_10
action_34 _ = happyReduce_16
action_35 (16) = happyShift action_38
action_35 _ = happyFail
action_36 (19) = happyShift action_37
action_36 _ = happyFail
action_37 (16) = happyShift action_39
action_37 _ = happyFail
action_38 _ = happyReduce_7
action_39 _ = happyReduce_6
happyReduce_2 = happySpecReduce_2 5 happyReduction_2
happyReduction_2 (HappyAbsSyn5 happy_var_2)
(HappyAbsSyn7 happy_var_1)
= HappyAbsSyn5
(happy_var_1 : happy_var_2
)
happyReduction_2 _ _ = notHappyAtAll
happyReduce_3 = happySpecReduce_1 5 happyReduction_3
happyReduction_3 (HappyAbsSyn7 happy_var_1)
= HappyAbsSyn5
([happy_var_1]
)
happyReduction_3 _ = notHappyAtAll
happyReduce_4 = happySpecReduce_1 6 happyReduction_4
happyReduction_4 (HappyAbsSyn5 happy_var_1)
= HappyAbsSyn5
(happy_var_1
)
happyReduction_4 _ = notHappyAtAll
happyReduce_5 = happySpecReduce_0 6 happyReduction_5
happyReduction_5 = HappyAbsSyn5
([]
)
happyReduce_6 = happyReduce 6 7 happyReduction_6
happyReduction_6 (_ `HappyStk`
(HappyTerminal (TokTerm happy_var_5)) `HappyStk`
(HappyTerminal (TokTerm happy_var_4)) `HappyStk`
_ `HappyStk`
(HappyTerminal (TokIdent happy_var_2)) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn7
(CmdFun happy_var_2 happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_7 = happyReduce 5 7 happyReduction_7
happyReduction_7 (_ `HappyStk`
(HappyTerminal (TokTerm happy_var_4)) `HappyStk`
_ `HappyStk`
(HappyTerminal (TokIdent happy_var_2)) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn7
(CmdTheorem happy_var_2 happy_var_4
) `HappyStk` happyRest
happyReduce_8 = happySpecReduce_2 7 happyReduction_8
happyReduction_8 _
_
= HappyAbsSyn7
(CmdRefl
)
happyReduce_9 = happySpecReduce_2 7 happyReduction_9
happyReduction_9 _
(HappyTerminal (TokTerm happy_var_1))
= HappyAbsSyn7
(CmdTerm happy_var_1
)
happyReduction_9 _ _ = notHappyAtAll
happyReduce_10 = happyReduce 4 7 happyReduction_10
happyReduction_10 (_ `HappyStk`
(HappyAbsSyn10 happy_var_3) `HappyStk`
(HappyAbsSyn8 happy_var_2) `HappyStk`
(HappyTerminal (TokIdent happy_var_1)) `HappyStk`
happyRest)
= HappyAbsSyn7
(CmdExpr happy_var_1 happy_var_2 (Map.fromList happy_var_3)
) `HappyStk` happyRest
happyReduce_11 = happySpecReduce_2 8 happyReduction_11
happyReduction_11 (HappyAbsSyn8 happy_var_2)
(HappyAbsSyn9 happy_var_1)
= HappyAbsSyn8
(happy_var_1 : happy_var_2
)
happyReduction_11 _ _ = notHappyAtAll
happyReduce_12 = happySpecReduce_0 8 happyReduction_12
happyReduction_12 = HappyAbsSyn8
([]
)
happyReduce_13 = happySpecReduce_1 9 happyReduction_13
happyReduction_13 (HappyTerminal (TokTerm happy_var_1))
= HappyAbsSyn9
(ExprTerm happy_var_1
)
happyReduction_13 _ = notHappyAtAll
happyReduce_14 = happySpecReduce_1 9 happyReduction_14
happyReduction_14 (HappyTerminal (TokIdent happy_var_1))
= HappyAbsSyn9
(ExprIdent happy_var_1
)
happyReduction_14 _ = notHappyAtAll
happyReduce_15 = happySpecReduce_1 9 happyReduction_15
happyReduction_15 (HappyTerminal (TokQuote happy_var_1))
= HappyAbsSyn9
(ExprString happy_var_1
)
happyReduction_15 _ = notHappyAtAll
happyReduce_16 = happySpecReduce_3 9 happyReduction_16
happyReduction_16 _
(HappyAbsSyn8 happy_var_2)
_
= HappyAbsSyn9
(ExprList happy_var_2
)
happyReduction_16 _ _ _ = notHappyAtAll
happyReduce_17 = happySpecReduce_2 10 happyReduction_17
happyReduction_17 (HappyAbsSyn10 happy_var_2)
(HappyAbsSyn11 happy_var_1)
= HappyAbsSyn10
(happy_var_1 : happy_var_2
)
happyReduction_17 _ _ = notHappyAtAll
happyReduce_18 = happySpecReduce_0 10 happyReduction_18
happyReduction_18 = HappyAbsSyn10
([]
)
happyReduce_19 = happySpecReduce_2 11 happyReduction_19
happyReduction_19 (HappyAbsSyn8 happy_var_2)
(HappyTerminal (TokKeyword happy_var_1))
= HappyAbsSyn11
((happy_var_1, happy_var_2)
)
happyReduction_19 _ _ = notHappyAtAll
happyNewToken action sts stk [] =
action 23 23 notHappyAtAll (HappyState action) sts stk []
happyNewToken action sts stk (tk:tks) =
let cont i = action i i tk (HappyState action) sts stk tks in
case tk of {
TokFun -> cont 12;
TokTheorem -> cont 13;
TokRefl -> cont 14;
TokColon -> cont 15;
TokPeriod -> cont 16;
TokQuote happy_dollar_dollar -> cont 17;
TokIdent happy_dollar_dollar -> cont 18;
TokTerm happy_dollar_dollar -> cont 19;
TokOpen -> cont 20;
TokClose -> cont 21;
TokKeyword happy_dollar_dollar -> cont 22;
_ -> happyError' (tk:tks)
}
happyError_ tk tks = happyError' (tk:tks)
happyThen :: () => Maybe a -> (a -> Maybe b) -> Maybe b
happyThen = (>>=)
happyReturn :: () => a -> Maybe a
happyReturn = (return)
happyThen1 m k tks = (>>=) m (\a -> k a tks)
happyReturn1 :: () => a -> b -> Maybe a
happyReturn1 = \a tks -> (return) a
happyError' :: () => [(Token)] -> Maybe a
happyError' = parseError
parseCommand tks = happySomeParser where
happySomeParser = happyThen (happyParse action_0 tks) (\x -> case x of {HappyAbsSyn7 z -> happyReturn z; _other -> notHappyAtAll })
parseCommands tks = happySomeParser where
happySomeParser = happyThen (happyParse action_1 tks) (\x -> case x of {HappyAbsSyn5 z -> happyReturn z; _other -> notHappyAtAll })
happySeq = happyDontSeq
data Command = CmdTerm Text
| CmdTheorem Text Text
| CmdFun Text Text Text
| CmdRefl
| CmdExpr Text [Expr] (Map Text [Expr])
deriving (Show)
data Expr = ExprIdent Text
| ExprTerm Text
| ExprString Text
| ExprList [Expr]
deriving (Show)
parseError :: [Token] -> Maybe a
parseError _ = Nothing
{-# LINE 1 "templates/GenericTemplate.hs" #-}
{-# LINE 1 "templates/GenericTemplate.hs" #-}
{-# LINE 1 "<built-in>" #-}
{-# LINE 1 "<command-line>" #-}
{-# LINE 1 "templates/GenericTemplate.hs" #-}
-- Id: GenericTemplate.hs,v 1.26 2005/01/14 14:47:22 simonmar Exp
{-# LINE 30 "templates/GenericTemplate.hs" #-}
{-# LINE 51 "templates/GenericTemplate.hs" #-}
{-# LINE 61 "templates/GenericTemplate.hs" #-}
{-# LINE 70 "templates/GenericTemplate.hs" #-}
infixr 9 `HappyStk`
data HappyStk a = HappyStk a (HappyStk a)
-----------------------------------------------------------------------------
-- starting the parse
happyParse start_state = happyNewToken start_state notHappyAtAll notHappyAtAll
-----------------------------------------------------------------------------
-- Accepting the parse
-- If the current token is (1), it means we've just accepted a partial
-- parse (a %partial parser). We must ignore the saved token on the top of
-- the stack in this case.
happyAccept (1) tk st sts (_ `HappyStk` ans `HappyStk` _) =
happyReturn1 ans
happyAccept j tk st sts (HappyStk ans _) =
(happyReturn1 ans)
-----------------------------------------------------------------------------
-- Arrays only: do the next action
{-# LINE 148 "templates/GenericTemplate.hs" #-}
-----------------------------------------------------------------------------
-- HappyState data type (not arrays)
newtype HappyState b c = HappyState
(Int -> -- token number
Int -> -- token number (yes, again)
b -> -- token semantic value
HappyState b c -> -- current state
[HappyState b c] -> -- state stack
c)
-----------------------------------------------------------------------------
-- Shifting a token
happyShift new_state (1) tk st sts stk@(x `HappyStk` _) =
let (i) = (case x of { HappyErrorToken (i) -> i }) in
-- trace "shifting the error token" $
new_state i i tk (HappyState (new_state)) ((st):(sts)) (stk)
happyShift new_state i tk st sts stk =
happyNewToken new_state ((st):(sts)) ((HappyTerminal (tk))`HappyStk`stk)
-- happyReduce is specialised for the common cases.
happySpecReduce_0 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_0 nt fn j tk st@((HappyState (action))) sts stk
= action nt j tk st ((st):(sts)) (fn `HappyStk` stk)
happySpecReduce_1 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_1 nt fn j tk _ sts@(((st@(HappyState (action))):(_))) (v1`HappyStk`stk')
= let r = fn v1 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happySpecReduce_2 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_2 nt fn j tk _ ((_):(sts@(((st@(HappyState (action))):(_))))) (v1`HappyStk`v2`HappyStk`stk')
= let r = fn v1 v2 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happySpecReduce_3 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_3 nt fn j tk _ ((_):(((_):(sts@(((st@(HappyState (action))):(_))))))) (v1`HappyStk`v2`HappyStk`v3`HappyStk`stk')
= let r = fn v1 v2 v3 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happyReduce k i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happyReduce k nt fn j tk st sts stk
= case happyDrop (k - ((1) :: Int)) sts of
sts1@(((st1@(HappyState (action))):(_))) ->
let r = fn stk in -- it doesn't hurt to always seq here...
happyDoSeq r (action nt j tk st1 sts1 r)
happyMonadReduce k nt fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happyMonadReduce k nt fn j tk st sts stk =
happyThen1 (fn stk tk) (\r -> action nt j tk st1 sts1 (r `HappyStk` drop_stk))
where (sts1@(((st1@(HappyState (action))):(_)))) = happyDrop k ((st):(sts))
drop_stk = happyDropStk k stk
happyMonad2Reduce k nt fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happyMonad2Reduce k nt fn j tk st sts stk =
happyThen1 (fn stk tk) (\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk))
where (sts1@(((st1@(HappyState (action))):(_)))) = happyDrop k ((st):(sts))
drop_stk = happyDropStk k stk
new_state = action
happyDrop (0) l = l
happyDrop n ((_):(t)) = happyDrop (n - ((1) :: Int)) t
happyDropStk (0) l = l
happyDropStk n (x `HappyStk` xs) = happyDropStk (n - ((1)::Int)) xs
-----------------------------------------------------------------------------
-- Moving to a new state after a reduction
{-# LINE 246 "templates/GenericTemplate.hs" #-}
happyGoto action j tk st = action j j tk (HappyState action)
-----------------------------------------------------------------------------
-- Error recovery ((1) is the error token)
-- parse error if we are in recovery and we fail again
happyFail (1) tk old_st _ stk =
-- trace "failing" $
happyError_ tk
{- We don't need state discarding for our restricted implementation of
"error". In fact, it can cause some bogus parses, so I've disabled it
for now --SDM
-- discard a state
happyFail (1) tk old_st (((HappyState (action))):(sts))
(saved_tok `HappyStk` _ `HappyStk` stk) =
-- trace ("discarding state, depth " ++ show (length stk)) $
action (1) (1) tk (HappyState (action)) sts ((saved_tok`HappyStk`stk))
-}
-- Enter error recovery: generate an error token,
-- save the old token and carry on.
happyFail i tk (HappyState (action)) sts stk =
-- trace "entering error recovery" $
action (1) (1) tk (HappyState (action)) sts ( (HappyErrorToken (i)) `HappyStk` stk)
-- Internal happy errors:
notHappyAtAll :: a
notHappyAtAll = error "Internal Happy error\n"
-----------------------------------------------------------------------------
-- Hack to get the typechecker to accept our action functions
-----------------------------------------------------------------------------
-- Seq-ing. If the --strict flag is given, then Happy emits
-- happySeq = happyDoSeq
-- otherwise it emits
-- happySeq = happyDontSeq
happyDoSeq, happyDontSeq :: a -> b -> b
happyDoSeq a b = a `seq` b
happyDontSeq a b = b
-----------------------------------------------------------------------------
-- Don't inline any functions from the template. GHC has a nasty habit
-- of deciding to inline happyGoto everywhere, which increases the size of
-- the generated parser quite a bit.
{-# LINE 311 "templates/GenericTemplate.hs" #-}
{-# NOINLINE happyShift #-}
{-# NOINLINE happySpecReduce_0 #-}
{-# NOINLINE happySpecReduce_1 #-}
{-# NOINLINE happySpecReduce_2 #-}
{-# NOINLINE happySpecReduce_3 #-}
{-# NOINLINE happyReduce #-}
{-# NOINLINE happyMonadReduce #-}
{-# NOINLINE happyGoto #-}
{-# NOINLINE happyFail #-}
-- end of Happy Template.
|
Alasdair/Mella
|
Lang/TopLevel/HParser.hs
|
bsd-3-clause
| 18,583 | 513 | 19 | 3,357 | 5,398 | 2,988 | 2,410 | 438 | 12 |
{-# LANGUAGE ScopedTypeVariables #-}
module Empty where
import Control.Applicative
import Test.Tasty
import Test.Tasty.QuickCheck as QC
import Text.Earley
tests :: TestTree
tests = testGroup "Empty productions"
[ QC.testProperty "The empty production doesn't parse anything" $
\(input :: String) ->
allParses (parser (return empty :: forall r. Grammar r (Prod r () Char ()))) input
== (,) [] Report { position = 0
, expected = []
, unconsumed = input
}
, QC.testProperty "Many empty productions parse very little" $
\(input :: String) ->
allParses (parser (return $ many empty <* pure "blah" :: forall r. Grammar r (Prod r () Char [()]))) input
== (,) [([], 0)] Report { position = 0
, expected = []
, unconsumed = input
}
]
|
sboosali/Earley
|
tests/Empty.hs
|
bsd-3-clause
| 932 | 0 | 19 | 329 | 278 | 154 | 124 | 20 | 1 |
{-|
Module : Data.Array.BitArray.ByteString
Copyright : (c) Claude Heiland-Allen 2012
License : BSD3
Maintainer : [email protected]
Stability : unstable
Portability : portable
Copy bit array data to and from ByteStrings.
-}
module Data.Array.BitArray.ByteString
(
-- * Immutable copying.
toByteString
, fromByteString
-- * Mutable copying.
, toByteStringIO
, fromByteStringIO
) where
import Data.Bits (shiftR, (.&.))
import Data.ByteString (ByteString)
import Data.Ix (Ix, rangeSize)
import Data.Word (Word8)
import Control.Monad (when)
import Foreign.ForeignPtr (withForeignPtr)
import Foreign.Marshal.Utils (copyBytes)
import Foreign.Ptr (castPtr)
import Foreign.Storable (peekByteOff, pokeByteOff)
import System.IO.Unsafe (unsafePerformIO)
import Compat (packCStringLen, unsafeUseAsCStringLen)
import Data.Bits.Bitwise (mask)
import Data.Array.BitArray (BitArray)
import Data.Array.BitArray.IO (IOBitArray)
import qualified Data.Array.BitArray.IO as IO
import Data.Array.BitArray.Internal (iobData)
-- | Copy to a ByteString. The most significant bits of the last byte
-- are padded with 0 unless the array was a multiple of 8 bits in size.
toByteString :: Ix i => BitArray i -> ByteString
toByteString a = unsafePerformIO $ toByteStringIO =<< IO.unsafeThaw a
-- | Copy from a ByteString. Much like 'listArray' but with packed bits.
fromByteString :: Ix i => (i, i) {- ^ bounds -} -> ByteString {- ^ packed elems -} -> BitArray i
fromByteString bs s = unsafePerformIO $ IO.unsafeFreeze =<< fromByteStringIO bs s
-- | Copy to a ByteString. The most significant bits of the last byte
-- are padded with 0 unless the array was a multiple of 8 bits in size.
toByteStringIO :: Ix i => IOBitArray i -> IO ByteString
toByteStringIO a = do
bs <- IO.getBounds a
let rs = rangeSize bs
bytes = (rs + 7) `shiftR` 3
bits = rs .&. 7
lastByte = bytes - 1
withForeignPtr (iobData a) $ \p -> do
when (bits /= 0) $ do
b <- peekByteOff p lastByte
pokeByteOff p lastByte (b .&. mask bits :: Word8)
packCStringLen (castPtr p, bytes)
-- | Copy from a ByteString. Much like 'newListArray' but with packed bits.
fromByteStringIO :: Ix i => (i, i) {- ^ bounds -} -> ByteString {- ^ packed elems -} -> IO (IOBitArray i)
fromByteStringIO bs s = do
a <- IO.newArray bs False
let rs = rangeSize bs
bytes = (rs + 7) `shiftR` 3
unsafeUseAsCStringLen s $ \(src, len) ->
withForeignPtr (iobData a) $ \dst ->
copyBytes dst (castPtr src) (bytes `min` len)
return a
|
ekmett/bitwise
|
src/Data/Array/BitArray/ByteString.hs
|
bsd-3-clause
| 2,558 | 0 | 17 | 501 | 643 | 354 | 289 | 47 | 1 |
-- Copyright: 2007-2012 Dino Morelli
-- License: BSD3 (see LICENSE)
-- Author: Dino Morelli <[email protected]>
module Util (
binPath, resourcesPath,
getProcessOutput, getBinaryOutput,
assertFalse
)
where
import System.IO
import System.Process
import Test.HUnit
binPath :: FilePath
binPath = "dist/build/photoname/photoname"
resourcesPath :: FilePath
resourcesPath = "testsuite/resources"
{- Quick and dirty function to run a process and grab its output.
This evil thing doesn't watch STDERR at all or otherwise do anything
even remotely safe.
XXX Move this somewhere logical like Photoname.Util
-}
getProcessOutput :: FilePath -> [String] -> IO (String, ProcessHandle)
getProcessOutput path' args = do
(_, outH, _, procH) <- runInteractiveProcess path' args Nothing Nothing
output <- hGetContents outH
return (output, procH)
getBinaryOutput :: [String] -> IO (String, ProcessHandle)
getBinaryOutput = getProcessOutput binPath
assertFalse :: String -> Bool -> Assertion
assertFalse l b = assertBool l $ not b
|
tkawachi/photoname
|
testsuite/Util.hs
|
bsd-3-clause
| 1,047 | 0 | 8 | 179 | 207 | 115 | 92 | 20 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
module Servant.Ekg where
import Control.Concurrent.MVar
import Control.Exception
import Control.Monad
import qualified Data.HashMap.Strict as H
import Data.Monoid
import Data.Proxy
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Time.Clock
import GHC.TypeLits
import Network.HTTP.Types (Method, Status (..))
import Network.Wai
import Servant.API
import System.Metrics
import qualified System.Metrics.Counter as Counter
import qualified System.Metrics.Distribution as Distribution
import qualified System.Metrics.Gauge as Gauge
gaugeInflight :: Gauge.Gauge -> Middleware
gaugeInflight inflight application request respond =
bracket_ (Gauge.inc inflight)
(Gauge.dec inflight)
(application request respond)
-- | Count responses with 2XX, 4XX, 5XX, and XXX response codes.
countResponseCodes
:: (Counter.Counter, Counter.Counter, Counter.Counter, Counter.Counter)
-> Middleware
countResponseCodes (c2XX, c4XX, c5XX, cXXX) application request respond =
application request respond'
where
respond' res = count (responseStatus res) >> respond res
count Status{statusCode = sc }
| 200 <= sc && sc < 300 = Counter.inc c2XX
| 400 <= sc && sc < 500 = Counter.inc c4XX
| 500 <= sc && sc < 600 = Counter.inc c5XX
| otherwise = Counter.inc cXXX
responseTimeDistribution :: Distribution.Distribution -> Middleware
responseTimeDistribution dist application request respond =
bracket getCurrentTime stop $ const $ application request respond
where
stop t1 = do
t2 <- getCurrentTime
let dt = diffUTCTime t2 t1
Distribution.add dist $ fromRational $ (*1000) $ toRational dt
data Meters = Meters
{ metersInflight :: Gauge.Gauge
, metersC2XX :: Counter.Counter
, metersC4XX :: Counter.Counter
, metersC5XX :: Counter.Counter
, metersCXXX :: Counter.Counter
, metersTime :: Distribution.Distribution
}
monitorEndpoints
:: HasEndpoint api
=> Proxy api
-> Store
-> MVar (H.HashMap Text Meters)
-> Middleware
monitorEndpoints proxy store meters application request respond = do
let path = case getEndpoint proxy request of
Nothing -> "unknown"
Just (ps,method) -> T.intercalate "." $ ps <> [T.decodeUtf8 method]
Meters{..} <- modifyMVar meters $ \ms -> case H.lookup path ms of
Nothing -> do
let prefix = "servant.path." <> path <> "."
metersInflight <- createGauge (prefix <> "in_flight") store
metersC2XX <- createCounter (prefix <> "responses.2XX") store
metersC4XX <- createCounter (prefix <> "responses.4XX") store
metersC5XX <- createCounter (prefix <> "responses.5XX") store
metersCXXX <- createCounter (prefix <> "responses.XXX") store
metersTime <- createDistribution (prefix <> "time_ms") store
let m = Meters{..}
return (H.insert path m ms, m)
Just m -> return (ms,m)
let application' =
responseTimeDistribution metersTime .
countResponseCodes (metersC2XX, metersC4XX, metersC5XX, metersCXXX) .
gaugeInflight metersInflight $
application
application' request respond
class HasEndpoint a where
getEndpoint :: Proxy a -> Request -> Maybe ([Text], Method)
instance (HasEndpoint (a :: *), HasEndpoint (b :: *)) => HasEndpoint (a :<|> b) where
getEndpoint _ req =
getEndpoint (Proxy :: Proxy a) req `mplus`
getEndpoint (Proxy :: Proxy b) req
instance (KnownSymbol (path :: Symbol), HasEndpoint (sub :: *))
=> HasEndpoint (path :> sub) where
getEndpoint _ req =
case pathInfo req of
p:ps | p == T.pack (symbolVal (Proxy :: Proxy path)) -> do
(end, method) <- getEndpoint (Proxy :: Proxy sub) req{ pathInfo = ps }
return (p:end, method)
_ -> Nothing
instance (KnownSymbol (capture :: Symbol), HasEndpoint (sub :: *))
=> HasEndpoint (Capture capture a :> sub) where
getEndpoint _ req =
case pathInfo req of
_:ps -> do
(end, method) <- getEndpoint (Proxy :: Proxy sub) req{ pathInfo = ps }
let p = T.pack $ (':':) $ symbolVal (Proxy :: Proxy capture)
return (p:end, method)
_ -> Nothing
instance HasEndpoint (sub :: *) => HasEndpoint (Header h a :> sub) where
getEndpoint _ = getEndpoint (Proxy :: Proxy sub)
instance HasEndpoint (sub :: *) => HasEndpoint (QueryParam (h :: Symbol) a :> sub) where
getEndpoint _ = getEndpoint (Proxy :: Proxy sub)
instance HasEndpoint (sub :: *) => HasEndpoint (QueryParams (h :: Symbol) a :> sub) where
getEndpoint _ = getEndpoint (Proxy :: Proxy sub)
instance HasEndpoint (sub :: *) => HasEndpoint (QueryFlag h :> sub) where
getEndpoint _ = getEndpoint (Proxy :: Proxy sub)
instance HasEndpoint (sub :: *) => HasEndpoint (ReqBody cts a :> sub) where
getEndpoint _ = getEndpoint (Proxy :: Proxy sub)
instance HasEndpoint (sub :: *) => HasEndpoint (RemoteHost :> sub) where
getEndpoint _ = getEndpoint (Proxy :: Proxy sub)
instance HasEndpoint (sub :: *) => HasEndpoint (IsSecure :> sub) where
getEndpoint _ = getEndpoint (Proxy :: Proxy sub)
instance HasEndpoint (sub :: *) => HasEndpoint (HttpVersion :> sub) where
getEndpoint _ = getEndpoint (Proxy :: Proxy sub)
instance HasEndpoint (sub :: *) => HasEndpoint (Vault :> sub) where
getEndpoint _ = getEndpoint (Proxy :: Proxy sub)
instance HasEndpoint (sub :: *) => HasEndpoint (WithNamedContext x y sub) where
getEndpoint _ = getEndpoint (Proxy :: Proxy sub)
instance ReflectMethod method => HasEndpoint (Verb method status cts a) where
getEndpoint _ req = case pathInfo req of
[] | requestMethod req == method -> Just ([], method)
_ -> Nothing
where method = reflectMethod (Proxy :: Proxy method)
instance HasEndpoint (Raw) where
getEndpoint _ _ = Just ([],"RAW")
|
orangefiredragon/bear
|
src/Servant/Ekg.hs
|
bsd-3-clause
| 6,603 | 0 | 19 | 1,754 | 2,045 | 1,068 | 977 | 136 | 3 |
{-# LANGUAGE TupleSections, OverloadedStrings #-}
module Handler.Molecule
( getMoleculeR
, postMoleculeR
) where
import Import
import Data.Conduit(($$))
import Data.Conduit.List(consume)
import Data.Text.Encoding(decodeUtf8)
import Ouch.Input.Smiles(readSmi)
import Ouch.Output.Smiles(writeSmiles)
import Ouch.Property.Composition(atomCount, molecularFormula, molecularWeight)
import Ouch.Structure.Molecule(Molecule)
import qualified Data.ByteString as B
import qualified Data.Text as T
import qualified Ouch.Property.Builder as OPB
import Text.Printf(printf)
getMoleculeR :: String -> Handler RepHtml
getMoleculeR cmd = do
let moleculesAndFilename =
case cmd of
"sample" -> Just (sampleMolecules, "")
"upload" -> Nothing :: Maybe ([Molecule], Text)
_ -> Nothing :: Maybe ([Molecule], Text)
(formWidget, formEnctype) <- generateFormPost importForm
defaultLayout $ do
$(widgetFile "molecule")
postMoleculeR :: String -> Handler RepHtml
postMoleculeR cmd = do
((result, formWidget), formEnctype) <- runFormPost importForm
moleculesAndFilename <- processFile result
defaultLayout $ do
$(widgetFile "molecule")
importForm :: Form FileInfo
importForm = renderDivs $ fileAFormReq "Choose a SMILES file:"
processFile :: FormResult FileInfo -> Handler (Maybe ([Molecule], Text))
processFile formRes =
case formRes of
FormSuccess fi -> do
molecules <- lift $ toMolecules <$> (fileSource fi $$ consume)
let fName = fileName fi
return . Just $ (molecules, fName)
_ -> return Nothing
where
toMolecules :: [B.ByteString] -> [Molecule]
toMolecules = map (readSmi . T.unpack) . smiles
where
smiles :: [B.ByteString] -> [Text]
smiles = T.lines . decodeUtf8 . B.concat
showMolForm :: Molecule -> String
showMolForm = showProperty molecularFormula
showAtomCount :: Molecule -> String
showAtomCount = showProperty atomCount
showMolWeight :: Molecule -> String
showMolWeight mol = let (OPB.DoubleValue d) = getPropertyValue molecularWeight mol
in printf "%.2f\n" d
getPropertyValue :: OPB.Property -> Molecule -> OPB.Value
getPropertyValue prop m = case OPB.value prop of
Left v -> v
Right f -> f m
showProperty :: OPB.Property -> Molecule -> String
showProperty p m = show $ getPropertyValue p m
-- Sample set of compounds
sampleMolecules :: [Molecule]
sampleMolecules = map readSmi
[ "[H]C([H])([H])[H]"
, "[H]N([H])[H]"
, "[H]O[H]"
, "[H][N+]([H])([H])[H]"
, "F[H]"
, "C=C"
, "CC"
, "OO"
, "[OH-].[Na+]"
, "O=C=O"
]
|
mkrauskopf/ouch-web
|
Handler/Molecule.hs
|
bsd-3-clause
| 2,705 | 0 | 15 | 617 | 760 | 410 | 350 | -1 | -1 |
{-# LANGUAGE CPP #-}
module Main where
import Control.Applicative
import Control.Monad
import Control.Monad.Reader
import Control.Monad.Error
import Control.Concurrent
import Control.Concurrent.STM
import qualified Data.Map as DM
import Data.Maybe
import qualified Data.Text as DT
import Data.Version
import Distribution.FreeBSD.Common
import Distribution.FreeBSD.Port hiding (normalize)
import Distribution.FreeBSD.Update
import Distribution.Package
import System.Directory
import System.FilePath.Posix
import System.IO
import Text.Printf
#ifdef STANDALONE
import Paths_hsporter
#endif
import Data.List
getConfiguration :: FilePath -> IO Cfg
getConfiguration path = do
contents <- (normalize . DT.lines . DT.pack) <$> readFile path
let m = DM.fromList $ mapMaybe formatLine contents
ghcConf <- getGhcConf
catsConf <- getCategoriesConf
ghcLibs <- readFile ghcConf
platLibs <- getPlatformConf >>= readFile
let platform = Platform $ ghcLibs ++ platLibs
let baselibs = Distribution.FreeBSD.Update.getBaseLibs platform
return $
Cfg (m %!% "dbdir") (m %!% "portsdir") (m %!% "updatesdir")
platform (BuildOpts ghcConf catsConf) baselibs
(read $ m %!% "threads")
where
formatLine line =
case (DT.strip <$> DT.splitOn (DT.pack "=") line) of
(key:val:_) -> Just (DT.unpack . DT.toLower $ key,DT.unpack val)
_ -> Nothing
showUpdates :: (PortUpdate -> Maybe String) -> HPM String
showUpdates format = do
updates <- initialize >>= learnUpdates
return . unlines . sort . mapMaybe format $ updates
fetchCabalFile :: PV -> HPM ()
fetchCabalFile pv = do
liftIO $ putStr (show pv)
fetchCabalFile' pv
liftIO $ putStrLn "done."
fetchCabalFile' :: PV -> HPM ()
fetchCabalFile' pv@(PV (PackageName pn,v)) = do
let uri = getCabalURI (pn,showVersion v)
dbdir <- asks cfgDbDir
liftIO $ downloadFile uri >>= writeFile (dbdir </> cabal (show pv))
resetDirectory :: FilePath -> IO ()
resetDirectory dir = do
removeDirectoryRecursive dir
createDirectoryIfMissing True dir
downloadCabalFiles :: Ports -> HDM -> HPM ()
downloadCabalFiles (Ports ports) hdm = do
dbdir <- asks cfgDbDir
liftIO $ resetDirectory dbdir
forM_ ports $ \(p,_,v) -> do
forM_ (filter (>= v) $ hdm %!% p) $ \v ->
fetchCabalFile (PV (p,v))
cachePortVersions :: HPM ()
cachePortVersions = do
portsDir <- asks cfgPortsDir
let mk = portsDir </> bsdHackageMk
contents <- liftIO $ (normalize . DT.lines . DT.pack) <$> readFile mk
versions <- fmap catMaybes $
forM (map (DT.unpack . (!! 1) . DT.words) contents) $ \d -> do
port <- liftIO $ readFile $ portsDir </> d </> mkfile
let cat = takeDirectory d
let k = filter (not . null) . map words . lines $ port
let pnLine = head $ filter ((== "PORTNAME=") . head) k
let pvLine = head $ filter ((== "PORTVERSION=") . head) k
let metaport = not . null $ filter ((== "METAPORT=") . head) k
return $
if (not metaport)
then Just $ unwords [pnLine !! 1, cat, pvLine !! 1]
else Nothing
liftIO $ writeFile portVersionsFile $ unlines versions
cacheHackageDB :: IO ()
cacheHackageDB = downloadFile hackageLogURI >>= writeFile hackageLog
cacheDB :: HPM ()
cacheDB = do
ports <- getPortVersions portVersionsFile
hdm <- buildHackageDatabase hackageLog
downloadCabalFiles ports hdm
cache :: HPM ()
cache = do
liftIO $ do
putStrLn "Colllecting:"
putStr "Port information..."
cachePortVersions
liftIO $ do
putStrLn "done."
putStr "HackageDB information..."
cacheHackageDB
putStrLn "done."
putStr "Cabal package descriptions..."
cacheDB
liftIO $ putStrLn "done."
fetchPort :: PortUpdate -> HPM ()
fetchPort
(PU { puPackage = p@(PackageName pn)
, puCategory = Category ct
, puOldVersion = v
, puNewVersion = v1
}) =
when (v < v1) $ do
buildopts <- asks cfgBuildOpts
dbdir <- asks cfgDbDir
updatesDir <- asks cfgUpdatesDir
liftIO $ do
Just gpkg <- getDescriptionFromFile $ dbdir </> cabal (show $ PV (p,v1))
(ppath,port) <- buildPort buildopts gpkg (Just ct)
createPortFiles (updatesDir </> ppath) port
downloadUpdates :: HPM ()
downloadUpdates = do
updatesDir <- asks cfgUpdatesDir
liftIO $ do
putStrLn "Update starts."
removeDirectoryRecursive updatesDir
createDirectoryIfMissing True updatesDir
initialize >>= learnUpdates >>= parallel "Downloaded: %s" fetchPort
liftIO $ putStrLn "Update finished."
runCfg :: HPM () -> IO ()
runCfg block = do
haveCfg <- doesFileExist cfg
if haveCfg
then getConfiguration cfg >>= runHPM block
else putStrLn $ printf "No \"%s\" found. Aborting." cfg
[getPlatformConf,getGhcConf,getCategoriesConf] =
#ifdef STANDALONE
[ getDataFileName "platform.conf"
, getDataFileName "ghc.conf"
, getDataFileName "categories.conf"
]
#else
[ return "platform.conf" :: IO FilePath
, return "ghc.conf" :: IO FilePath
, return "categories.conf" :: IO FilePath
]
#endif
cmdPrintUpdates :: IO ()
cmdPrintUpdates = runCfg $
showUpdates prettyUpdateLine >>= fmap liftIO putStrLn
cmdPrintUpdateLogs :: IO ()
cmdPrintUpdateLogs = runCfg $
showUpdates compactUpdateLine >>= fmap liftIO putStrLn
cmdDownloadUpdates :: IO ()
cmdDownloadUpdates = runCfg downloadUpdates
cmdUpdatePortVersions :: IO ()
cmdUpdatePortVersions = runCfg cachePortVersions
data Task a = Do a | Done
newtype PV = PV (PackageName,Version)
deriving Eq
instance Show PV where
show (PV (PackageName n,v)) = printf "%s-%s" n (showVersion v)
displayDone :: Show a => String -> TChan a -> IO ()
displayDone fmt c =
forever $ do
d <- atomically (readTChan c)
putStrLn (printf fmt (show d))
hFlush stdout
worker :: (a -> HPM ()) -> TChan a -> TChan (Task a) -> HPM ()
worker cmd done queue = loop
where
loop = do
job <- liftIO . atomically $ readTChan queue
case job of
Done -> return ()
Do d -> do
cmd d
liftIO . atomically $ writeTChan done d
loop
modifyTVar_ :: TVar a -> (a -> a) -> STM ()
modifyTVar_ tv f = readTVar tv >>= writeTVar tv . f
forkTimes :: Int -> Cfg -> TVar Int -> HPM () -> IO ()
forkTimes k cfg alive act =
replicateM_ k . forkIO $ do
runHPM act cfg
(atomically $ modifyTVar_ alive (subtract 1))
parallel :: Show a => String -> (a -> HPM ()) -> [a] -> HPM ()
parallel fmt cmd queue = do
cfg <- ask
let k = cfgThreads cfg
done <- liftIO $ newTChanIO
jobs <- liftIO $ newTChanIO
workers <- liftIO $ newTVarIO k
liftIO $ do
forkIO (displayDone fmt done)
forkTimes k cfg workers (worker cmd done jobs)
atomically $ mapM_ (writeTChan jobs . Do) queue
atomically $ replicateM_ k (writeTChan jobs Done)
atomically $ do
running <- readTVar workers
check (running == 0)
cmdGetLatestHackageVersions :: IO ()
cmdGetLatestHackageVersions = runCfg $ do
liftIO $ putStrLn "Initializing..."
liftIO $ cacheHackageDB
Ports ports <- getPortVersions portVersionsFile
hdm <- buildHackageDatabase hackageLog
dbdir <- asks cfgDbDir
liftIO $ resetDirectory dbdir
queue <- fmap catMaybes $ forM ports $ \(p@(PackageName pn),_,v) -> do
let available = hdm %!% p
if (not . null $ available)
then
return $ Just (PV (p, maximum available))
else liftIO $ do
putStrLn $ "Cannot be got: " ++ pn ++ ", " ++ showVersion v
putStrLn $ "hdm: " ++ intercalate ", " (map showVersion (hdm %!% p))
return Nothing
liftIO $ putStrLn "Fetching new versions..."
parallel "Fetched: %s" fetchCabalFile' queue
core <- asks cfgBaseLibs
cpm <- buildCabalDatabase
new <- fmap (nub . concat) $ forM (DM.toList cpm) $ \(_,gpkgd) -> do
return $
forM (getDependencies gpkgd) (\(Dependency pk _) -> do
if (pk `elem` [name | (name,_) <- core])
then return Nothing
else do
let available = hdm %!% pk
let versions = repeat pk `zip` available
return $
case (catMaybes $ flip DM.lookup cpm <$> versions) of
[] -> Just (PV (pk, maximum available))
_ -> Nothing)
>>= catMaybes
when (not . null $ new) $ do
liftIO $ putStrLn "Fetching new dependencies..."
parallel "Fetched: %s" fetchCabalFile' new
fetchCabal :: PV -> HPM ()
fetchCabal pv@(PV (p@(PackageName name),version)) = do
dbDir <- asks cfgDbDir
files <- liftIO $ filter (f name) <$> getDirectoryContents dbDir
liftIO $ mapM_ removeFile $ map (dbDir </>) files
fetchCabalFile' pv
where
f r x
| null l = False
| otherwise = (reverse $ tail l) == r
where
l = snd . break (== '-') . reverse $ x
cmdFetchCabal :: String -> String -> IO ()
cmdFetchCabal n v = do
putStr (printf "Fetching %s-%s..." n v)
runCfg $ fetchCabal (PV (PackageName n,toVersion v))
putStrLn "done."
cmdFetchLatestCabal :: String -> IO ()
cmdFetchLatestCabal n = runCfg $ do
hdm <- buildHackageDatabase hackageLog
let latest = last $ hdm %!% (PackageName n)
liftIO $ cmdFetchCabal n (showVersion latest)
cmdPrintCabalVersions :: String -> IO ()
cmdPrintCabalVersions name = runCfg $ do
hdm <- buildHackageDatabase hackageLog
let versions = intercalate ", " $ showVersion <$> hdm %!% (PackageName name)
liftIO $ putStrLn versions
cmdIsVersionAllowed :: String -> String -> IO ()
cmdIsVersionAllowed name version = runCfg $ do
(hdm,cpm,vcm,_) <- initialize
(rs,dp) <- isVersionAllowed hdm cpm vcm pk
let restricted = [ p | ((PackageName p,_),_) <- rs ]
let unsatisfied = [ d | (PackageName d,_) <- dp ]
liftIO $ do
when (not . null $ restricted) $
putStrLn $ "Restricted by: " ++ intercalate ", " restricted
when (not . null $ unsatisfied) $
putStrLn $ "Unsatisfied by: " ++ intercalate ", " unsatisfied
when (null restricted && null unsatisfied) $
putStrLn "OK!"
where
pk = (PackageName name, toVersion version)
allPruneableUpdates :: HPM [PV]
allPruneableUpdates = do
updates <- initialize >>= learnUpdates
return $ mapMaybe toPrune updates
where
toPrune (PU { puRestrictedBy = [], puUnsatisfiedBy = [] }) = Nothing
toPrune (PU { puPackage = p, puOldVersion = v, puNewVersion = v1 })
| v < v1 = Just $ PV (p,v)
toPrune _ = Nothing
pruneableFor :: String -> String -> HPM [PV]
pruneableFor p v = do
cpm <- buildCabalDatabase
hdm <- buildHackageDatabase hackageLog
map PV <$> satisfyingDependencies hdm cpm (PackageName p, toVersion v)
cmdShowPruneableBy :: HPM [PV] -> IO ()
cmdShowPruneableBy query = runCfg $ do
ps <- map show <$> query
liftIO . putStrLn $
if (null ps)
then "There are no pruneable updates."
else unlines ps
cmdPruneBy :: HPM [PV] -> IO ()
cmdPruneBy query = do
putStrLn "Initializing..."
runCfg $ query >>= parallel "Pruned: %s" fetchCabal
body :: HPM ()
body = do
cache
updates <- showUpdates prettyUpdateLine
liftIO $ do
putStrLn "== Port Status Overview =="
putStrLn updates
putStrLn "== Actual Updates =="
downloadUpdates
cfg :: FilePath
cfg = "hsupdater.conf"
main :: IO ()
main = do
haveCfg <- doesFileExist cfg
if haveCfg
then runCfg body
else putStrLn $ printf "No \"%s\" found. Aborting." cfg
|
freebsd-haskell/hsporter
|
src-hsupdater/Main.hs
|
bsd-3-clause
| 11,307 | 0 | 31 | 2,612 | 4,077 | 1,978 | 2,099 | 306 | 4 |
{-# language CPP #-}
-- | = Name
--
-- VK_KHR_external_semaphore - device extension
--
-- == VK_KHR_external_semaphore
--
-- [__Name String__]
-- @VK_KHR_external_semaphore@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 78
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- - Requires @VK_KHR_external_semaphore_capabilities@
--
-- [__Deprecation state__]
--
-- - /Promoted/ to
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#versions-1.1-promotions Vulkan 1.1>
--
-- [__Contact__]
--
-- - James Jones
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_KHR_external_semaphore] @cubanismo%0A<<Here describe the issue or question you have about the VK_KHR_external_semaphore extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2016-10-21
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Interactions and External Dependencies__]
--
-- - Promoted to Vulkan 1.1 Core
--
-- [__Contributors__]
--
-- - Jason Ekstrand, Intel
--
-- - Jesse Hall, Google
--
-- - Tobias Hector, Imagination Technologies
--
-- - James Jones, NVIDIA
--
-- - Jeff Juliano, NVIDIA
--
-- - Matthew Netsch, Qualcomm Technologies, Inc.
--
-- - Ray Smith, ARM
--
-- - Chad Versace, Google
--
-- == Description
--
-- An application using external memory may wish to synchronize access to
-- that memory using semaphores. This extension enables an application to
-- create semaphores from which non-Vulkan handles that reference the
-- underlying synchronization primitive can be exported.
--
-- == Promotion to Vulkan 1.1
--
-- All functionality in this extension is included in core Vulkan 1.1, with
-- the KHR suffix omitted. The original type, enum and command names are
-- still available as aliases of the core functionality.
--
-- == New Structures
--
-- - Extending 'Vulkan.Core10.QueueSemaphore.SemaphoreCreateInfo':
--
-- - 'ExportSemaphoreCreateInfoKHR'
--
-- == New Enums
--
-- - 'SemaphoreImportFlagBitsKHR'
--
-- == New Bitmasks
--
-- - 'SemaphoreImportFlagsKHR'
--
-- == New Enum Constants
--
-- - 'KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME'
--
-- - 'KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION'
--
-- - Extending
-- 'Vulkan.Core11.Enums.SemaphoreImportFlagBits.SemaphoreImportFlagBits':
--
-- - 'SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR'
--
-- == Issues
--
-- 1) Should there be restrictions on what side effects can occur when
-- waiting on imported semaphores that are in an invalid state?
--
-- __RESOLVED__: Yes. Normally, validating such state would be the
-- responsibility of the application, and the implementation would be free
-- to enter an undefined state if valid usage rules were violated. However,
-- this could cause security concerns when using imported semaphores, as it
-- would require the importing application to trust the exporting
-- application to ensure the state is valid. Requiring this level of trust
-- is undesirable for many potential use cases.
--
-- 2) Must implementations validate external handles the application
-- provides as input to semaphore state import operations?
--
-- __RESOLVED__: Implementations must return an error to the application if
-- the provided semaphore state handle cannot be used to complete the
-- requested import operation. However, implementations need not validate
-- handles are of the exact type specified by the application.
--
-- == Version History
--
-- - Revision 1, 2016-10-21 (James Jones)
--
-- - Initial revision
--
-- == See Also
--
-- 'ExportSemaphoreCreateInfoKHR', 'SemaphoreImportFlagBitsKHR',
-- 'SemaphoreImportFlagsKHR'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_KHR_external_semaphore Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_KHR_external_semaphore ( pattern STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR
, pattern SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR
, SemaphoreImportFlagsKHR
, SemaphoreImportFlagBitsKHR
, ExportSemaphoreCreateInfoKHR
, KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION
, pattern KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION
, KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME
, pattern KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME
) where
import Data.String (IsString)
import Vulkan.Core11.Promoted_From_VK_KHR_external_semaphore (ExportSemaphoreCreateInfo)
import Vulkan.Core11.Enums.SemaphoreImportFlagBits (SemaphoreImportFlagBits)
import Vulkan.Core11.Enums.SemaphoreImportFlagBits (SemaphoreImportFlags)
import Vulkan.Core11.Enums.SemaphoreImportFlagBits (SemaphoreImportFlags)
import Vulkan.Core11.Enums.SemaphoreImportFlagBits (SemaphoreImportFlagBits(SEMAPHORE_IMPORT_TEMPORARY_BIT))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO))
-- No documentation found for TopLevel "VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR"
pattern STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO
-- No documentation found for TopLevel "VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR"
pattern SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR = SEMAPHORE_IMPORT_TEMPORARY_BIT
-- No documentation found for TopLevel "VkSemaphoreImportFlagsKHR"
type SemaphoreImportFlagsKHR = SemaphoreImportFlags
-- No documentation found for TopLevel "VkSemaphoreImportFlagBitsKHR"
type SemaphoreImportFlagBitsKHR = SemaphoreImportFlagBits
-- No documentation found for TopLevel "VkExportSemaphoreCreateInfoKHR"
type ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo
type KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION"
pattern KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION :: forall a . Integral a => a
pattern KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION = 1
type KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME = "VK_KHR_external_semaphore"
-- No documentation found for TopLevel "VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME"
pattern KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME = "VK_KHR_external_semaphore"
|
expipiplus1/vulkan
|
src/Vulkan/Extensions/VK_KHR_external_semaphore.hs
|
bsd-3-clause
| 7,000 | 0 | 8 | 1,376 | 394 | 301 | 93 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : Berp.Base.LiftedIO
-- Copyright : (c) 2010 Bernie Pope
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : ghc
--
-- Lifted versions of standard IO functions. Allows them to be used in any
-- MonadIO context. Saves us from having to write "liftIO" everywhere.
--
-----------------------------------------------------------------------------
module Berp.Base.LiftedIO
( liftIO, putStr, putStrLn, putChar, IORef, readIORef
, writeIORef, newIORef, MonadIO, hFlush, getLine
, hPutStr, hPutStrLn, hPutChar
) where
import Prelude hiding (putStr, putStrLn, getLine, putChar)
import qualified Prelude as P (putStr, putStrLn, getLine, putChar)
import Control.Monad.Trans (liftIO, MonadIO)
import Data.IORef hiding (readIORef, writeIORef, newIORef)
import qualified Data.IORef as IORef (readIORef, writeIORef, newIORef)
import qualified System.IO as SIO (hFlush, Handle, hPutStr, hPutStrLn, hPutChar)
hPutStr :: MonadIO m => SIO.Handle -> String -> m ()
hPutStr handle = liftIO . SIO.hPutStr handle
hPutStrLn :: MonadIO m => SIO.Handle -> String -> m ()
hPutStrLn handle = liftIO . SIO.hPutStrLn handle
putStr :: MonadIO m => String -> m ()
putStr = liftIO . P.putStr
putStrLn :: MonadIO m => String -> m ()
putStrLn = liftIO . P.putStrLn
putChar :: MonadIO m => Char -> m ()
putChar = liftIO . P.putChar
hPutChar :: MonadIO m => SIO.Handle -> Char -> m ()
hPutChar h = liftIO . SIO.hPutChar h
readIORef :: MonadIO m => IORef a -> m a
readIORef = liftIO . IORef.readIORef
writeIORef :: MonadIO m => IORef a -> a -> m ()
writeIORef x ref = liftIO $ IORef.writeIORef x ref
newIORef :: MonadIO m => a -> m (IORef a)
newIORef = liftIO . IORef.newIORef
getLine :: MonadIO m => m (String)
getLine = liftIO P.getLine
hFlush :: MonadIO m => SIO.Handle -> m ()
hFlush = liftIO . SIO.hFlush
|
bjpop/berp
|
libs/src/Berp/Base/LiftedIO.hs
|
bsd-3-clause
| 1,958 | 0 | 9 | 334 | 585 | 322 | 263 | 32 | 1 |
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE RankNTypes #-}
-- {-# LANGUAGE PolyKinds #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE GADTs #-}
-- {-# OPTIONS_GHC -fno-warn-missing-methods #-}
module Lib
(
) where
import Data.Comp
import Data.Comp.Derive
import Data.Comp.Ops
import Data.Comp.Render
import Data.Comp.Matching
import Data.Rewriting.Rules
import Data.Rewriting.HigherOrder
import Data.String(IsString(..))
import Data.Maybe(fromMaybe)
import Data.Function (on)
import Data.Monoid
import Data.Proxy
import qualified Data.Set as Set
import Derive
data STUDENT a = Student a a a a deriving Show
--data LIT t (a :: *) = L {unL :: t} deriving Show
data LIT a t where
L :: a -> LIT a t
unL :: LIT a t -> a
unL (L a) = a
deriving instance Show a => Show (LIT a a)
data MAJOR a = English | Math | Physics deriving (Show,Eq)
$(derive [makeFunctor,makeTraversable,makeFoldable,
makeEqF,makeShowF,smartConstructors,makeShowConstr] [''STUDENT,''LIT,''MAJOR])
$(derive [smartRep] [''MAJOR])
$(derive [makeEqF,makeShowF,smartConstructors,makeShowConstr] [''WILD])
instance Render STUDENT
instance Show t => Render (LIT t)
instance Render WILD
instance Render MAJOR
instance (MetaRep f ~ MetaId) => Render (META f)
instance (MetaRep f ~ MetaId) => ShowConstr (META f) where
showConstr (Meta (MVar (MetaId rep))) = show rep
type SIG = MAJOR :+: STUDENT :+: LIT String :+: LIT Int :+: LIT Float :+: ADDONS
type ADDONS = VAR :+: LAM :+: APP -- Not needed as written, but allow higher order rewrite rules.
class Major f where
english :: f a
math :: f a
physics :: f a
class St f where
rStudent :: f Int -> f String -> f Float -> f (MAJOR a) -> f a
instance (Rep (r f),Functor (PF (r f)),STUDENT :<: PF (r f),f :<: SIG) => St (r f) where
rStudent a b c d = toRep $ toCxt $ iStudent (prep a) (prep b) (prep c) (prep d)
where prep = fmap (const ()) . deepInject . fromRep
extract def = maybe def unL . proj . (\(Term m) -> m) . fromRep
instance (Rep (r f),LIT a :<: PF (r f),LIT a :<: f,Num a) => Num (r f a) where
fromInteger = toRep . iL . (id :: a -> a) . fromInteger
signum (extract (0::a) -> a) = toRep $ iL (signum a)
abs (extract (0::a) -> a) = toRep $ iL (abs a)
(extract (0::a) -> a) + (extract (0::a) -> b) = toRep $ iL $ a + b
(extract (1::a) -> a) * (extract (1::a) -> b) = toRep $ iL $ a * b
(extract (0::a) -> a) - (extract (0::a) -> b) = toRep $ iL $ a - b
instance (Rep (r f),LIT a :<: PF (r f),LIT a :<: f,Fractional a) => Fractional (r f a) where
fromRational = toRep . iL . (id :: a -> a) . fromRational
recip (extract (1::a) -> a) = toRep $ iL $ recip a
-- -}
instance (Rep (r f),LIT a :<: PF (r f),LIT a :<: f,IsString a) => IsString (r f a) where
fromString = toRep . iL . (id :: a -> a) . fromString
instance Functor f => Rep (Cxt NoHole f) where
type PF (Cxt NoHole f) = f
toRep = toCxt
fromRep = fmap (const ())
e3 :: Term SIG
e3 = rStudent 3 "hi" 2 rEnglish
e4 :: Term SIG
e4 = rStudent 3 "hi" 2.0 (toCxt $ deepInject e3)
student_rule :: MetaId String -> Rule (LHS SIG) (RHS SIG)
student_rule x = rStudent 3 (meta x) __ rEnglish ===> rStudent 4 "matched" 3 rMath
a ==> b = toRep a ===> toRep b
main = do
drawTerm e3
drawTerm $ stripAnn $ applyFirst app [quantify (student_rule) ] $ prepare e3
print "hi"
|
tomberek/RETE
|
src/RETE/Lib.hs
|
bsd-3-clause
| 3,789 | 0 | 13 | 791 | 1,595 | 831 | 764 | 89 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Snap.Test.Common
( coverEqInstance
, coverOrdInstance
, coverReadInstance
, coverShowInstance
, coverTypeableInstance
, forceSameType
, expectException
, expectExceptionH
, liftQ
, eatException
) where
------------------------------------------------------------------------------
import Control.DeepSeq
import Control.Exception (SomeException(..), evaluate)
import Control.Monad
import Control.Monad.CatchIO
import Control.Monad.Trans
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
import Data.ByteString.Internal (c2w)
import Data.Typeable
import Prelude hiding (catch)
import Test.QuickCheck
import qualified Test.QuickCheck.Monadic as QC
import Test.QuickCheck.Monadic
------------------------------------------------------------------------------
instance Arbitrary S.ByteString where
arbitrary = liftM (S.pack . map c2w) arbitrary
instance Arbitrary L.ByteString where
arbitrary = do
n <- choose(0,5)
chunks <- replicateM n arbitrary
return $ L.fromChunks chunks
------------------------------------------------------------------------------
eatException :: (MonadCatchIO m) => m a -> m ()
eatException a = (a >> return ()) `catch` handler
where
handler :: (MonadCatchIO m) => SomeException -> m ()
handler _ = return ()
------------------------------------------------------------------------------
forceSameType :: a -> a -> a
forceSameType _ a = a
------------------------------------------------------------------------------
-- | Kill the false negative on derived show instances.
coverShowInstance :: (Monad m, Show a) => a -> m ()
coverShowInstance x = a `deepseq` b `deepseq` c `deepseq` return ()
where
a = showsPrec 0 x ""
b = show x
c = showList [x] ""
------------------------------------------------------------------------------
coverReadInstance :: (MonadIO m, Read a) => a -> m ()
coverReadInstance x = do
liftIO $ eatException $ evaluate $ forceSameType [(x,"")] $ readsPrec 0 ""
liftIO $ eatException $ evaluate $ forceSameType [([x],"")] $ readList ""
------------------------------------------------------------------------------
coverEqInstance :: (Monad m, Eq a) => a -> m ()
coverEqInstance x = a `seq` b `seq` return ()
where
a = x == x
b = x /= x
------------------------------------------------------------------------------
coverOrdInstance :: (Monad m, Ord a) => a -> m ()
coverOrdInstance x = a `deepseq` b `deepseq` return ()
where
a = [ x < x
, x >= x
, x > x
, x <= x
, compare x x == EQ ]
b = min a $ max a a
------------------------------------------------------------------------------
coverTypeableInstance :: (Monad m, Typeable a) => a -> m ()
coverTypeableInstance a = typeOf a `seq` return ()
------------------------------------------------------------------------------
expectException :: IO a -> PropertyM IO ()
expectException m = do
e <- liftQ $ try m
case e of
Left (z::SomeException) -> (length $ show z) `seq` return ()
Right _ -> fail "expected exception, didn't get one"
------------------------------------------------------------------------------
expectExceptionH :: IO a -> IO ()
expectExceptionH act = do
e <- try act
case e of
Left (z::SomeException) -> (length $ show z) `seq` return ()
Right _ -> fail "expected exception, didn't get one"
------------------------------------------------------------------------------
liftQ :: forall a m . (Monad m) => m a -> PropertyM m a
liftQ = QC.run
|
f-me/snap-core
|
test/suite/Snap/Test/Common.hs
|
bsd-3-clause
| 3,818 | 0 | 13 | 765 | 1,031 | 556 | 475 | 77 | 2 |
import Test.DocTest
main :: IO ()
main = doctest ["src/2010africa/B.hs"]
|
yuto-matsum/googlecodejam2016-hs
|
test/2010africa/BSpec.hs
|
bsd-3-clause
| 84 | 0 | 6 | 21 | 27 | 14 | 13 | 3 | 1 |
import Genome.Dna.Dna
import Genome.Dna.Kmer
import Test.Util.Util
import Test.Genome.Dna.Dna
import Test.Genome.Dna.Kmer
import Test.QuickCheck
import Test.HUnit
import Control.Monad
import qualified Data.Set as Set
import qualified Data.Map as M
{- For now the tests are defined here, along with the main.
Tests in general should fall in the same folder structure as the main library,
locationOfTest testName = test/dir/file.hs
where
dir = directoryContaining testName in src/
file.hs = fileContaining testName in src/dir/
-}
main :: IO ()
main = do
putStrLn " tests"
putStrLn "test that Char Base can be faithfully complemented"
quickCheck (prop_idempotent_complement :: Char -> Bool)
putStrLn "that Int Base can be faithfully complemented"
quickCheck (prop_idempotent_complement :: Int -> Bool)
putStrLn "that a sequence of Base Char can be faithfully reverse complemented"
quickCheck (prop_idempotent_revcomp :: [Char] -> Bool)
putStrLn "that a sequence of Base Int can be faithfully reverse complemented"
quickCheck (prop_idempotent_revcomp :: [Int] -> Bool)
putStrLn "-------------------------------------"
putStrLn "that Ints are clumped correctly"
do
pss <- return $ map egtest_intsClumps intClumpEgs
putStrLn (concatStrList pss)
putStrLn "-------------------------------------"
do
pss <- return $ map egtest_baseSeqClumps clumerClumpEgs
putStrLn (concatStrList pss)
putStrLn "-------------------------------------"
do
pss <- return $ map egtest_clumpsRegCovSize clumpSizeEgs
putStrLn (concatStrList pss)
do
putStrLn ""
putStr "test pattern is repeated: "
quickCheckWith stdArgs {maxSuccess = 100000} (
test_isRepeated :: Int -> [Nucleotide] -> [Nucleotide] -> Bool
)
do
putStrLn ""
putStr "test occurences: "
quickCheckWith stdArgs {maxSuccess = 100000} (
test_occurences :: [Nucleotide] -> [Int] -> Bool
)
do
putStrLn ""
putStr "test ptrn count: "
quickCheckWith stdArgs {maxSuccess = 1000} (
test_ptrnCount :: [Nucleotide] -> Bool
)
do
putStrLn ""
putStr "test next occurence of ptrn: "
quickCheckWith stdArgs {maxSuccess = 100000} (
test_nextOcc :: [Nucleotide] -> Int -> Bool
)
do
putStrLn ""
putStr "test prop_clumpsRegionIsSizeL: "
quickCheckWith stdArgs {maxSuccess = 1000} (
prop_clumpsRegionIsSizeL :: Int -> Int -> Clumer Nucleotide -> Bool
)
do
putStrLn ""
putStr "test prop_clumpsSizeMinBound: "
quickCheckWith stdArgs {maxSuccess = 1000} (
prop_clumpsSizeMinBound :: Int -> Int -> Clumer Nucleotide -> Bool
)
where
concatStrList :: [String] -> String
concatStrList = foldl (\x y -> x ++ "\n" ++ y) []
|
visood/bioalgo
|
test/Spec.hs
|
bsd-3-clause
| 2,721 | 0 | 14 | 543 | 643 | 313 | 330 | 66 | 1 |
module Paths_simple_c_value (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
catchIO = Exception.catch
version :: Version
version = Version {versionBranch = [0,0,0,1], versionTags = []}
bindir, libdir, datadir, libexecdir :: FilePath
bindir = "/Users/jonathan.fischoff/Library/Haskell/ghc-7.4.1/lib/simple-c-value-0.0.0.1/bin"
libdir = "/Users/jonathan.fischoff/Library/Haskell/ghc-7.4.1/lib/simple-c-value-0.0.0.1/lib"
datadir = "/Users/jonathan.fischoff/Library/Haskell/ghc-7.4.1/lib/simple-c-value-0.0.0.1/share"
libexecdir = "/Users/jonathan.fischoff/Library/Haskell/ghc-7.4.1/lib/simple-c-value-0.0.0.1/libexec"
getBinDir, getLibDir, getDataDir, getLibexecDir :: IO FilePath
getBinDir = catchIO (getEnv "simple_c_value_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "simple_c_value_libdir") (\_ -> return libdir)
getDataDir = catchIO (getEnv "simple_c_value_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "simple_c_value_libexecdir") (\_ -> return libexecdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
|
jfischoff/simple-c-value
|
dist/build/autogen/Paths_simple_c_value.hs
|
bsd-3-clause
| 1,372 | 0 | 10 | 167 | 332 | 190 | 142 | 26 | 1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import ClassyPrelude
import System.Exit
import Text.Parsec
import Data.Proxy
import Database.Persist
import Database.Persist.TH
import Database.Persist.Sql
import Data.ByteString.Base16 as B16
import Data.Time
import Network (PortID(..))
import Yesod.Helpers.Types
import Yesod.Helpers.Parsec
import Yesod.Helpers.FuzzyDay
import Yesod.Helpers.Utils
import Data.SafeCopy
import Data.Serialize
import Yesod.Helpers.SafeCopy
testVerValidate :: VerConstraint -> SimpleVersion -> Bool -> IO ()
testVerValidate c v b = do
if validateSimpleVersion c v /= b
then do
putStrLn $ "constaint '" <> tshow c <> "' validate version '" <> tshow v
<> "' does not get expected result: " <> tshow b
exitFailure
else return ()
tryVerConstraintParse :: String -> IO VerConstraint
tryVerConstraintParse s = do
case parse simpleParser "" s of
Left err -> do
putStrLn $ "failed to parse " <> tshow s <> ": " <> tshow err
exitFailure
Right x -> return x
testAnyCharParser :: (Eq a, Show a) => ParsecT String () Identity a -> String -> a -> IO ()
testAnyCharParser p s expected = do
case parse p "" s of
Left err -> do
putStrLn $ "failed to parse " <> tshow s <> ": " <> tshow err
exitFailure
Right x -> do
if x == expected
then return ()
else do
putStrLn $ "failed to parse " <> tshow s <> "to expected result"
putStrLn $ "expected: " <> tshow expected
putStrLn $ "actual: " <> tshow x
exitFailure
testSimpleStringRepEnumBounded :: forall a. (Eq a, Show a, Enum a, Bounded a, SimpleStringRep a)
=> Proxy a
-> IO ()
testSimpleStringRepEnumBounded _ =
forM_ [minBound .. maxBound] $ \ (v :: a) -> do
testAnyCharParser simpleParser (simpleEncode v) v
testVerConstraint :: IO ()
testVerConstraint = do
testVerValidate (VerWithOrder GT $ SimpleVersion [0, 9]) (SimpleVersion [1]) True
testVerValidate (VerWithOrder GT $ SimpleVersion [1]) (SimpleVersion [1, 0]) True
tryVerConstraintParse "> 0.9"
>>= flip (flip testVerValidate $ SimpleVersion [1]) True
tryVerConstraintParse "> 1.0"
>>= flip (flip testVerValidate $ SimpleVersion [1, 0, 1]) True
tryVerConstraintParse ">= 1.0"
>>= flip (flip testVerValidate $ SimpleVersion [1, 0, 1]) True
tryVerConstraintParse "== 1.*"
>>= flip (flip testVerValidate $ SimpleVersion [1, 0, 1]) True
tryVerConstraintParse "/= 1.*"
>>= flip (flip testVerValidate $ SimpleVersion [1, 0, 1]) False
test_parseFileOrNetworkPath :: IO ()
test_parseFileOrNetworkPath = do
let f = testAnyCharParser parseFileOrConnectPath
f "/path/to/some" $ Left "/path/to/some"
-- f ":/path/to/some" $ Right ("localhost", UnixSocket "/path/to/some")
f "127.0.0.1:80" $ Right ("127.0.0.1", PortNumber (fromIntegral (80::Int)))
f "127.0.0.1:www" $ Right ("127.0.0.1", Service "www")
test_parseSeconds :: IO ()
test_parseSeconds = do
let f = testAnyCharParser parseSeconds
f "10" 10
f "10.1" 10.1
f "1'20\"" 80
f "1'20.1\"" 80.1
f "1'20" 80
f "1′20″" 80
f "1′20" 80
f "01:20" 80
f "00:01:20" 80
test_parseIntGrouping :: IO ()
test_parseIntGrouping = do
let f = testAnyCharParser (parseIntWithGrouping ',')
f "100,123" (100123 :: Int)
f "2,100,123" (2100123 :: Int)
testAnySafeCopy :: (SafeCopy a, Eq a, Show a) => a -> IO ()
testAnySafeCopy x = do
let bs = runPut $ safePut x
putStrLn $ decodeUtf8 $ B16.encode bs
putStrLn $ tshow x
case runGet safeGet bs of
Left err -> do
putStrLn $ "FAIL: safeGet failed: " <> fromString err
putStrLn $ " original value: " <> tshow x
exitFailure
Right x2 -> do
if x == x2
then
putStrLn $ "OK: " <> tshow x2
else do
putStrLn $ "FAIL: safeGet return different value: " <> tshow x2
putStrLn $ " original value: " <> tshow x
share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
Dummy
|]
instance SafeCopy (Key Dummy) where
putCopy = putCopyAnyId
getCopy = getCopyAnyId
testSafeCopy :: IO ()
testSafeCopy = do
testAnySafeCopy (FuzzyDayY 2014)
testAnySafeCopy (SafeCopyId (toSqlKey 1) :: SafeCopyId Dummy)
testAnySafeCopy (SafeCopyId (toSqlKey 1134242) :: SafeCopyId Dummy)
testAnySafeCopy (SafeCopyId (toSqlKey 113424224234) :: SafeCopyId Dummy)
testParseGroups :: IO ()
testParseGroups = do
test_it (p_ints (char ';')) "1" [1]
test_it (p_ints (char ';')) "1;2;3;" [1,2,3]
test_it (p_ints (char ';')) "1;2;3" [1,2,3]
test_it (p_ints newline) "1\n2\n3\n" [1,2,3]
test_it (p_ints newline) "1\n2\n3" [1,2,3]
test_it (p_double newline) "1.0\n2.0\n3.0" [1.0,2.0,3.0]
where
p_ints :: ParsecT String () Identity a
-> ParsecT String () Identity [Int]
p_ints sep = manySepEndBy sep simpleParser <* eof
p_double :: ParsecT String () Identity a
-> ParsecT String () Identity [Double]
p_double sep = manySepEndBy sep simpleParser <* eof
test_it :: (Eq a, Show a) => ParsecT String () Identity a -> String -> a -> IO ()
test_it p t expected = do
case parse p "" t of
Left err -> do
putStrLn $
"FAIL: testParseGroups failed, parse error: "
<> tshow err
<> " text was: " <> tshow t
exitFailure
Right xs -> do
if xs /= expected
then do
putStrLn $
"FAIL: testParseGroups failed, not expected: "
<> tshow xs
<> ", expect " <> tshow expected
<> ", text was: " <> tshow t
exitFailure
else return ()
test_humanParseFuzzyDay :: IO ()
test_humanParseFuzzyDay = do
let f = testAnyCharParser humanParseFuzzyDay
f "2014.9" $ FuzzyDayYM 2014 9
f "2014.09" $ FuzzyDayYM 2014 9
f "2014/1" $ FuzzyDayYM 2014 1
f "2014 /1" $ FuzzyDayYM 2014 1
-- f "2014 . 1" $ FuzzyDayYM 2014 1
f "2014 年 1" $ FuzzyDayYM 2014 1
f "2014 年 1 月" $ FuzzyDayYM 2014 1
f "2014 年 1 月 2 日" $ FuzzyDayYMD 2014 1 2
let f2 = testAnyCharParser humanParseFuzzyDayRange
f2 "2009.08 - 2012.03" (FuzzyDayYM 2009 8, FuzzyDayYM 2012 3)
humanParseUTCTimeIt :: TimeZone -> String -> [String] -> IO ()
humanParseUTCTimeIt tz std_string strs = do
utc_t0 <- case humanParseUTCTime tz std_string of
Nothing -> do
putStrLn $ "cannot parse time string: " <> fromString std_string
exitFailure
Just x -> return x
forM_ strs $ \s -> do
case humanParseUTCTime tz s of
Nothing -> do
putStrLn $ "cannot parse time string: " <> fromString s
exitFailure
Just utc_t -> do
when (utc_t0 /= utc_t) $ do
putStrLn $ "time parsed to: " <> tshow utc_t <>
"\ndoes not equal to expected: " <> tshow utc_t0
exitFailure
test_humanParseUTCTime :: IO ()
test_humanParseUTCTime = do
let tz = hoursToTimeZone 8
humanParseUTCTimeIt tz "2015-01-02 12:53:46+0800"
[ "2015-01-02 12:53:46"
, "2015-01-02 13:53:46+0900"
, "2015年01月02日12时53分46秒"
]
main :: IO ()
main = do
testVerConstraint
test_parseFileOrNetworkPath
test_parseSeconds
test_parseIntGrouping
testSafeCopy
testParseGroups
test_humanParseFuzzyDay
test_humanParseUTCTime
testSimpleStringRepEnumBounded (Proxy :: Proxy Gender)
|
yoo-e/yesod-helpers
|
tests/test.hs
|
bsd-3-clause
| 8,773 | 0 | 22 | 2,839 | 2,388 | 1,146 | 1,242 | 209 | 3 |
{-# LANGUAGE TypeFamilies, TypeSynonymInstances, FlexibleInstances, MultiParamTypeClasses, FlexibleContexts, RankNTypes, GADTs, DeriveGeneric #-}
module QueryArrow.Cypher.Neo4j where
import QueryArrow.Config
import QueryArrow.DB.DB
import QueryArrow.Cypher.Mapping
import QueryArrow.Plugin
import QueryArrow.DB.AbstractDatabaseList
import QueryArrow.Data.Heterogeneous.List
import Data.Aeson
import GHC.Generics
import Data.Maybe
instance FromJSON Neo4jDBConfig
instance ToJSON Neo4jDBConfig
data Neo4jDBConfig = Neo4jDBConfig {
-- db_name :: String,
db_namespace :: String,
db_host :: String,
db_password :: String,
db_port :: Int,
db_username :: String,
db_predicates :: String,
db_sql_mapping :: String
} deriving (Show, Generic)
-- db
data Neo4jPlugin = Neo4jPlugin
instance Plugin Neo4jPlugin MapResultRow where
getDB _ _ ps = do
let fsconf = getDBSpecificConfig ps
let conn = (db_host fsconf, db_port fsconf, db_username fsconf, db_password fsconf)
db <- makeICATCypherDBAdapter (db_namespace fsconf) (db_predicates fsconf) (db_sql_mapping fsconf) conn
return ( AbstractDatabase db)
|
xu-hao/QueryArrow
|
QueryArrow-db-cypher/src/QueryArrow/Cypher/Neo4j.hs
|
bsd-3-clause
| 1,140 | 0 | 12 | 170 | 256 | 142 | 114 | 29 | 0 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
module Data.Var
(
-- * Different types of variables
Sig
, newSig
, Ref
, newRef
, Var
, newVar
-- * Generic operations
, Settable
, set
, Gettable
, get
, modify
, modifyWith
, Subscribable
, subscribe
, withUnsubscriber
-- * Specific operations
, subscribeWithOld
, subscribeChange
, subscribeAndRead
, subscribeChangeAndRead
, subscribeExclusive
, subscribeAndReadExclusive
, mapVar
, mergeVars
, mergeVars'
, tupleVars
, tupleVars'
, waitForN
, waitFor
, oneShot
, holdSig
) where
import Data.Maybe
import FFI
import Prelude
-- | A subscribable signal. Can have handlers subscribed to them, but doesn't
-- store a value.
data Sig a
-- | Make a new signal.
newSig :: Fay (Ptr (Sig a))
newSig = ffi "new Fay$$Sig()"
-- | A mutable reference, with no subscribers.
data Ref a
-- | Make a new mutable reference.
newRef :: Ptr a -> Fay (Ptr (Ref a))
newRef = ffi "new Fay$$Ref2(%1)"
-- | A reactive variable. Stores a value, and can have handlers subscribed to
-- changes.
data Var a
-- | Make a new reactive variable.
newVar :: Ptr a -> Fay (Ptr (Var a))
newVar = ffi "new Fay$$Var(%1)"
-- | All of the variable types can be set to a value.
class Settable v
instance Settable (Ref a)
instance Settable (Sig a)
instance Settable (Var a)
-- | Write to the value (if any), and call subscribers (if any).
set :: Settable (v a) => Ptr (v a) -> Ptr a -> Fay ()
set = ffi "Fay$$setValue(Fay$$_(%1), %2, Fay$$_)"
-- | 'Ref' and 'Var' store their last set value.
class Gettable v
instance Gettable (Ref a)
instance Gettable (Var a)
-- | Get the value of a 'Ref' or 'Var'.
get :: Gettable (v a) => Ptr (v a) -> Fay (Ptr a)
get = ffi "Fay$$_(%1).val"
-- | Modifies the current value with a pure function.
modify :: (Settable (v a), Gettable (v a)) => v a -> (a -> a) -> Fay ()
modify v f = get v >>= set v . f
-- | Runs a 'Fay' action on the current value, and updates with the result.
modifyWith :: (Settable (v a), Gettable (v a)) => v a -> (a -> Fay a) -> Fay ()
modifyWith v f = get v >>= f >>= set v
-- | 'Sig' and 'Var' have lists of subscribers that are notified when 'set' is
-- used.
class Settable v => Subscribable v
instance Subscribable (Sig a)
instance Subscribable (Var a)
-- | Subscribe to the value of a 'Sig' or 'Var'.
--
-- The result is an unsubscribe function.
subscribe :: Subscribable (v a) => Ptr (v a) -> Ptr (a -> Fay void) -> Fay (() -> Fay ())
subscribe = ffi "Fay$$subscribe(Fay$$_(%1), Fay$$_(%2))"
-- | Run the same subscribing action but provide an additional
-- unsubscribe parameter to the handler.
withUnsubscriber :: ((a -> Fay ()) -> Fay (() -> Fay ()))
-> (((() -> Fay ()) -> a -> Fay ()) -> Fay (() -> Fay ()))
withUnsubscriber f = \g -> do
unsubscriber <- newRef Nothing
unsubscribe <- f $ \v -> do munsubscriber <- get unsubscriber
whenJust munsubscriber $ \unsubscribe -> g unsubscribe v
set unsubscriber (Just unsubscribe)
return unsubscribe
-- | Subscribe to a 'Var', along with the previous value.
--
-- The result is an unsubscribe function.
subscribeWithOld :: Var a -> (a -> a -> Fay ()) -> Fay (() -> Fay ())
subscribeWithOld v f = do
o <- get v >>= newRef
subscribe v $ \x' -> do
x <- get o
set o x'
f x x'
-- | Subscribe to a 'Var', but only call handler when it actually changes.
--
-- The result is an unsubscribe function.
subscribeChange :: Eq a => Var a -> (a -> Fay ()) -> Fay (() -> Fay ())
subscribeChange v f = subscribeWithOld v $ \x x' -> when (x /= x') $ f x'
-- | Subscribe to a 'Var', and call the function on the current value.
--
-- The result is an unsubscribe function.
subscribeAndRead :: Var a -> (a -> Fay void) -> Fay (() -> Fay ())
subscribeAndRead v f = do
x <- get v
f x
subscribe v f
-- | Subscribe to a 'Var', but only call handler when it actually changes, and
-- also initially on registration.
--
-- The result is an unsubscribe function.
subscribeChangeAndRead :: Eq a => Var a -> (a -> Fay ()) -> Fay (() -> Fay ())
subscribeChangeAndRead v f = do
x <- get v
f x
subscribeChange v f
-- | Given a change handler, returns a function that can be used to set a
-- subscribable without invoking the handler. This can be useful in
-- situations where the handler for a 'Var' causes an event which otherwise
-- ought to set the value of the 'Var'. An example of this is interfacing
-- with HTML input field change events.
--
-- The 'snd' part of the result is an unsubscribe function.
subscribeExclusive :: Subscribable (v a) => v a -> (a -> Fay ()) -> Fay (a -> Fay (), () -> Fay ())
subscribeExclusive v onChange = do
bracket <- getBracket
unsubscribe <- subscribe v $ bracket . onChange
return (\x -> bracket $ set v x, unsubscribe)
-- | Given a change handler, returns a function that can be used to set a var
-- without invoking the handler. The handler is called with the initial
-- value. This can be useful in situations where the handler for a 'Var'
-- causes an event which otherwise ought to set the value of the 'Var'. An
-- example of this is interfacing with HTML input field change events.
--
-- The 'snd' part of the result is an unsubscribe function.
subscribeAndReadExclusive :: Var a -> (a -> Fay ()) -> Fay (a -> Fay (), () -> Fay ())
subscribeAndReadExclusive v onChange = do
bracket <- getBracket
unsubscribe <- subscribeAndRead v $ bracket . onChange
return (\x -> bracket $ set v x, unsubscribe)
-- Utility used for 'subscribeExclusive', 'subscribeAndReadExclusive', and
-- 'mergeVars'.
getBracket :: Fay (Fay () -> Fay ())
getBracket = do
rhandle <- newRef True
return $ \f -> do
handle <- get rhandle
when handle $ do
set rhandle False
f
set rhandle True
--TODO: mapVar variant that's bidirectional?
--TODO: return unsubscribe?
-- | Creates a 'Var' that updates whenever the source var is changed, applying
-- the provided function to compute the new value.
mapVar :: (a -> b) -> Var a -> Fay (Var b)
mapVar f v = do
x <- get v
r <- newVar (f x)
_ <- subscribe v $ \x' -> set r $ f x'
return r
-- | Creates a 'Var' that updates whenever one of its source vars are changed.
-- If the 2nd argument is a 'Just' value, then its used to set the source
-- vars when the variable is changed. Setting using a merged var is
-- sometimes preferred because both values are set before the subscribers
-- are called.
--
-- The 'snd' part of the result is an unsubscribe function.
mergeVars :: (a -> b -> c) -> Maybe (c -> (a, b)) -> Var a -> Var b
-> Fay (Var c, Fay ())
mergeVars f mg va vb = do
bracket <- getBracket
a0 <- get va
b0 <- get vb
vc <- newVar (f a0 b0)
unsubscribeA <- subscribe va $ \a -> bracket $ do
b <- get vb
set vc (f a b)
unsubscribeB <- subscribe vb $ \b -> bracket $ do
a <- get va
set vc (f a b)
unsubscribe <- case mg of
Nothing -> return $ unsubscribeA () >> unsubscribeB ()
Just g -> do
unsubscribeC <- subscribe vc $ \c -> bracket $ case g c of
(a, b) -> do
-- Set variables before broadcast.
setInternal va a
setInternal vb b
broadcastInternal va a
broadcastInternal vb b
return $ unsubscribeA () >> unsubscribeB () >> unsubscribeC ()
return (vc, unsubscribe)
setInternal :: Ptr (Var a) -> Ptr a -> Fay ()
setInternal = ffi "function() { Fay$$_(%1).val = %2; }()"
broadcastInternal :: Ptr (Var a) -> Ptr a -> Fay ()
broadcastInternal = ffi "Fay$$broadcastInternal(Fay$$_(%1), %2, Fay$$_)"
-- | Like 'mergeVars', but discards the unsubscribe function.
mergeVars' :: (a -> b -> c) -> Maybe (c -> (a, b)) -> Var a -> Var b
-> Fay (Var c)
mergeVars' f mg va vb = do
result <- mergeVars f mg va vb
case result of
(v, _) -> return v
-- | Creates a 'Var' that updates whenever one of its source vars are changed.
-- It can also be used to set both source vars at once.
--
-- See 'mergeVars' for more information. Note that when using nested tuples,
-- if you want all of the values to be set before broadcast, then they should
-- nest to the left.
tupleVars :: Var a -> Var b -> Fay (Var (a, b), Fay ())
tupleVars = mergeVars (\x y -> (x, y)) (Just id)
-- | Like 'tupleVars', but discards the unsubscribe function.
tupleVars' :: Var a -> Var b -> Fay (Var (a, b))
tupleVars' va vb = do
result <- tupleVars va vb
case result of
(v, _) -> return v
-- | Wait for n signals on the given signaller.
waitForN :: Int -> Fay (Fay void -> Fay (),Sig ())
waitForN n = do
sig <- newSig
count <- newVar (0 :: Int)
_ <- subscribe sig (const (modify count (+1)))
return (\m -> subscribeAndRead count (\i -> when (i == n) (m >> return ())) >> return (),sig)
-- | Wait for the given predicate to be satisfied on the var and then
-- unsubscribe.
waitFor :: Var a -> (a -> Bool) -> (a -> Fay ()) -> Fay ()
waitFor v p f = do
_ <- withUnsubscriber (subscribeAndRead v)
$ \unsubscribe x -> when (p x) $ unsubscribe () >> f x
return ()
-- | Make a one-shot variable subscription that immediately
-- unsubscribes after the event has triggered.
oneShot :: Subscribable (v a) => v a -> (a -> Fay ()) -> Fay ()
oneShot v f = do
_ <- withUnsubscriber (subscribe v) $ \unsubscribe x -> unsubscribe () >> f x
return ()
-- | Turn a sig into a var, by storing the last reported value.
holdSig :: a -> Sig a -> Fay (Var a)
holdSig initial sig = do
v <- newVar initial
void $ subscribe sig $ set v
return v
|
beni55/fay
|
fay-base/src/Data/Var.hs
|
bsd-3-clause
| 9,660 | 0 | 22 | 2,285 | 2,931 | 1,454 | 1,477 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleInstances #-}
import Control.Concurrent (threadDelay)
import qualified Control.Exception as E
import Control.Lens hiding (argument)
import Control.Monad
import Control.Monad.Loops
import Control.Monad.State
import Daemon
import Data.Default
import Data.List
import Data.Maybe
import Data.Monoid
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Message
import Options.Applicative hiding ((&), ParserResult(..))
import Pipes
import Server.Main
import System.Directory
import System.Exit
import System.FilePath
import System.IO
import Util
-- | Options that are shared by all subcommands
data SharedOptions = SharedOptions
{ _verbosity :: Integer -- ^ The verbosity level. Applies to client and server
, _neverStart :: Bool -- ^ If True, do not start the server when it's not running
, _disableWarnings :: Bool -- ^ Do not print compiler warning
, _logFile :: Maybe FilePath -- ^ Log file for the server
, _disableCabal :: Bool -- ^ Disable cabal support
, _socketFile :: FilePath -- ^ The socket file to use
, _serverTimeout :: Maybe Int -- ^ Number of seconds after which the server will suicide. 0 means no timeout.
} deriving Show
makeLenses ''SharedOptions
sharedOptions :: Parser SharedOptions
sharedOptions = SharedOptions
<$> option (short 'v' <> long "verbose" <> help "Verbosity level" <> metavar "LEVEL" <> value 0)
<*> switch (short 'r' <> long "disable-start" <> help "Do not start the server if it's not running")
<*> switch (short 'w' <> long "no-warnings" <> help "Do not print compiler warnings")
<*> optional (strOption (short 'f' <> long "log" <> help "logfile" <> metavar "FILE"))
<*> switch (short 'c' <> long "no-cabal" <> help "Do not use cabal to figure out the project settings")
<*> option (short 's' <> long "socket" <> help "The path to the socket file to use. Relative paths are relative to the project root if cabal support is enabled." <> value ".ghc-server.sock")
<*> optional (option (short 't' <> long "timeout" <> help "Number of seconds of idle time after which the server will exit. If set to 0, the server will never exit by itself."))
-- | Configure options for the client's pipeline.
data Config = Config
{ -- | Function to start the server in case it's not already running
_serverStarter :: Server Request Response -> FilePath -> IO ()
, _onSuccess :: IO () -- ^ Action to be executed if the server returned Success
, _onFailure :: IO () -- ^ Action to be executed if the server returned Failure
, _onStartFailure :: IO () -- ^ Action to be executed if the starting the server failed
, _request :: Request -- ^ The request which is sent to the server
, _searchCabal :: Bool -- ^ Search for a cabal configuration file and change the working directory to
-- that project root?
}
makeLenses ''Config
newtype ConfigO = ConfigO (SharedOptions -> Config)
makeIso ''ConfigO
instance Default ConfigO where
def = ConfigO d
where d options = Config starter (return ()) (return ()) startFailure (Multiple []) True
where starter process p = unless (options^.neverStart) $ do
logClient options 1 "Starting server"
forkUnixS (fromMaybe "/dev/null" $ options^.logFile) process p
startFailure = logClient options 0 "Failed to start server."
logClient :: SharedOptions -> Integer -> T.Text -> IO ()
logClient o i m = when (o^.verbosity >= i) $ T.hPutStrLn stderr $ "[Client] " <> m
requestConfig :: Request -> ConfigO
requestConfig r = def & from configO.mapped.request .~ r
commandConfig :: String -> ConfigO -> InfoMod ConfigO -> Mod CommandFields ConfigO
commandConfig c = commandConfigM c . pure
commandConfigM :: String -> Parser ConfigO -> InfoMod ConfigO -> Mod CommandFields ConfigO
commandConfigM c conf = command c . info conf
customConfig :: (SharedOptions -> State Config ()) -> ConfigO
customConfig f = ConfigO $ \opts -> execState (f opts) (review configO def opts)
adminCmd :: ParserInfo ConfigO
adminCmd = info (helper <*> cmd) $ fullDesc <> progDesc "command the server"
where cmd = subparser $ mconcat
[ commandConfigM "start" startParser $ briefDesc <> progDesc "start the server"
, commandConfig "stop" shutdownConfig $ briefDesc <> progDesc "stop the server"
, commandConfig "reset" resetConfig $ briefDesc <> progDesc "reset the GHC options used by the server and reload the cabal file"
, commandConfig "status" statusConfig $ briefDesc <> progDesc "check whether the server is running or not"
, commandConfigM "ghc" ghcParser $ fullDesc <> progDesc "add GHC options for compiling"
]
startParser = startConfig <$> switch (short 'd' <> long "no-daemon" <> help "Do not daemonize")
startConfig nd = customConfig $ \opts -> do
when nd $ serverStarter .= \s p ->
E.bracket (bindUnixSocket p) (maybe (return ()) $ closeUnixSocket p) $ \sock -> case sock of
Nothing -> logClient opts 0 "Race condition detected. Not starting server."
Just sock' -> startServer s sock'
onSuccess .= logClient opts 0 "Server is running now."
onFailure .= logClient opts 0 "Failed to start server."
shutdownConfig = customConfig $ \opts -> do
serverStarter .= \_ _ -> logClient opts 0 "Server is not running." >> exitSuccess
onSuccess .= do
whileM_ (doesFileExist $ opts^.socketFile) $ threadDelay 100000
logClient opts 0 "Server stopped."
onFailure .= logClient opts 0 "Failed to stop server."
request .= Shutdown
resetConfig = customConfig $ const $ request .= EnvChange ResetGhcArgs
statusConfig = customConfig $ \opts -> do
serverStarter .= \_ _ -> logClient opts 0 "Server is not running." >> exitFailure
onSuccess .= logClient opts 0 "Server is running."
ghcParser = requestConfig . EnvChange . AddGhcArgs . map ('-':)
<$> many (argument Just (metavar "FLAG" <> help "a GHC flag, without the leading dash"))
checkCmd :: FilePath -> ParserInfo ConfigO
checkCmd pwd = info (helper <*> cmd) $ fullDesc <> progDesc "check a file for errors and warnings"
where cmd = requestConfig . Command pwd . Check <$> argument Just (metavar "FILE" <> help "The file which to check for errors")
infoCmd :: FilePath -> ParserInfo ConfigO
infoCmd pwd = info (helper <*> cmd) $ fullDesc <> progDesc "print information about an identifier"
where cmd = fmap (requestConfig . Command pwd) $ Info
<$> argument Just (metavar "FILE" <> help "The file of the indentifier")
<*> argument (Just . T.pack) (metavar "IDENTIFIER" <> help "The identifier to retrieve information for")
typeCmd :: FilePath -> ParserInfo ConfigO
typeCmd _ = info (helper <*> cmd) $ fullDesc <> progDesc "get the type of an expression at the given place"
where cmd = undefined
cmds :: FilePath -> Parser (SharedOptions, ConfigO)
cmds pwd = (,) <$> sharedOptions <*> individual
where individual = subparser $ mconcat
[ command "check" $ checkCmd pwd
, command "info" $ infoCmd pwd
, command "type" $ typeCmd pwd
, command "admin" adminCmd
]
renderMessage :: SharedOptions -> Message -> IO ()
renderMessage o (Log l v t) = when (o^.verbosity >= v) $ T.hPutStrLn stderr $ "[Server:" <> l <> "] " <> t
renderMessage _ (CompilerError t) = T.putStrLn t
renderMessage o (CompilerWarning t) = unless (o^.disableWarnings) $ T.putStrLn t
renderMessage _ (CompilerException t) = T.putStrLn $ "[Server:GHC Exception] " <> t
renderMessage _ (InternalError t) = T.putStrLn $ "[Server:Internal error] " <> t
renderMessage _ (UnimplementedError t) = T.putStrLn $ "[Server:Unimplemented] " <> t
findCabal :: FilePath -> IO (Maybe FilePath)
findCabal path = do
parent <- canonicalizePath $ path </> ".."
files <- filterM doesFileExist . map (path </>) =<< getDirectoryContents path
case find (".cabal" `isSuffixOf`) files of
Nothing -> if parent /= path then findCabal parent else return Nothing
Just _ -> return $ Just path
main :: IO ()
main = do
pwd <- getCurrentDirectory
(options, req') <- execParser (opts pwd)
let req = review configO req' options
req'' | not $ options^.disableCabal = req^.request
| otherwise = Multiple [EnvChange DisableCabal, req^.request]
addTimeoutOpt (Just t) | t == 0 = Multiple [EnvChange $ SuicideTimeout Nothing, req'']
| otherwise = Multiple [EnvChange $ SuicideTimeout $ Just t, req'']
addTimeoutOpt _ = req''
req''' = addTimeoutOpt (options^.serverTimeout)
unless (options^.disableCabal) $
when (req^.searchCabal) $ findCabal pwd >>= maybe (return ()) setCurrentDirectory
r <- withUnixS ".ghc-server.sock" (req^.serverStarter $ serve) $ sendCommand (onError options) req''' $ client options
case r of
Nothing -> req^.onStartFailure
Just res -> case res of
Nothing -> hPutStrLn stderr "[Error] Server connection lost before end" >> exitWith (ExitFailure $ -2)
Just s -> case s of
Success -> req^.onSuccess
Failure code -> req^.onFailure >> exitWith (ExitFailure code)
where opts pwd = info (helper <*> cmds pwd)
( fullDesc
<> progDesc "A persistent background ghc process"
)
client :: SharedOptions -> Consumer Response IO Result
client options = do
lift $ logClient options 2 "Starting read loop"
for takeWhileRight $ lift . renderMessage options
onError :: SharedOptions -> String -> IO Bool
onError options x = fmap (const True) $ logClient options 0 $ T.pack $ "Failed to parse server response: " <> x
|
bennofs/ghc-server
|
src/Main.hs
|
bsd-3-clause
| 10,330 | 0 | 20 | 2,624 | 2,738 | 1,362 | 1,376 | -1 | -1 |
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- | Export to Org mode for Emacs.
module ICal.Org
(-- * Handy export functions
exportFromToFile
,parseFromObject
-- * Conversions
,documentParser
,buildDocument
-- * Types
,Event (..)
)
where
import Control.Applicative
import Control.Monad.IO.Class
import Control.Monad.Identity
import Data.Ord
import Data.List
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Lazy.Builder (Builder)
import qualified Data.Text.Lazy.Builder as LT
import qualified Data.Text.Lazy.IO as LT
import Data.Time
import ICal
import ICal.Parser
import ICal.Types
-- | An Org mode section.
data Event =
Event {eventTitle :: !Text -- ^ Title of the section.
,eventStart :: !UTCTime -- ^ Date starts.
,eventDescription :: !(Maybe Text) -- ^ Contents of the section.
,eventEnd :: !(Maybe UTCTime) -- ^ Date ends.
,eventCreated :: !UTCTime -- ^ Date created.
}
deriving (Show)
-- | Handy exporting function.
exportFromToFile :: Day -> FilePath -> FilePath -> IO ()
exportFromToFile base from to =
do obj <- tokenizeObjectFromFile from
today <- getCurrentTime
case parseFromObject obj of
Left er -> error (show er)
Right es ->
LT.writeFile to
(LT.toLazyText (buildDocument base today es))
-- | Parse an iCalendar object into an Org mode document.
parseFromObject :: Object -> Either ParseError [Event]
parseFromObject s = runIdentity (parseEither s documentParser)
-- | Build an org-mode document.
buildDocument :: Day -> UTCTime -> [Event] -> Builder
buildDocument base today =
mconcat .
map build .
dropWhile (\e ->
utctDay (fromMaybe (eventStart e)
(eventEnd e)) <
base) .
sortBy (comparing eventStart)
where build event =
mconcat ["* " <> todo <> LT.fromText (eventTitle event)
,"\n"
," SCHEDULED: <" <> formatDate (eventStart event) <> ">"
,if fromMaybe (eventStart event)
(eventEnd event) >
today
then ""
else "\n - State \"DONE\" from \"TODO\" [" <>
formatDate
(fromMaybe (eventStart event)
(eventEnd event)) <>
"]\n"
,"\n"]
where formatDate =
LT.fromText .
T.pack . formatTime defaultTimeLocale "%Y-%m-%d"
todo =
if fromMaybe (eventStart event)
(eventEnd event) >
today
then "TODO "
else "DONE "
-- | Parse an org-mode document from the object.
documentParser :: Parser Identity Object [Event]
documentParser =
begin "VCALENDAR"
(do version <- property "VERSION"
unless (version == "2.0")
(parseError (GeneralProblem "Expected document version 2.0."))
scale <- property "CALSCALE"
unless (scale == "GREGORIAN")
(parseError (GeneralProblem "Need time gregorian scale."))
timezones <- fmap M.fromList (objects "VTIMEZONE" timeZoneParser)
events <- objects "VEVENT" (eventParser timezones)
return events)
-- | Parse a time zone.
timeZoneParser :: Parser Identity [Object] (Text,TimeZone)
timeZoneParser =
do key <- property "TZID"
return (key,utc)
-- | Parse an event.
eventParser :: Map Text TimeZone -> Parser Identity [Object] Event
eventParser timezones =
do start <- property "DTSTART" >>= utcTimeParser timezones
end <- optional (property "DTEND" >>= utcTimeParser timezones)
created <- property "CREATED" >>= utcTimeParser timezones
description <- optional (property "DESCRIPTION")
summary <- property "SUMMARY"
return (Event {eventTitle = summary
,eventStart = start
,eventEnd = end
,eventDescription = description
,eventCreated = created})
-- | Parse a time field into a UTCTime.
utcTimeParser :: Map Text TimeZone -> Text -> Parser Identity s UTCTime
utcTimeParser timezones s =
case T.stripPrefix "VALUE=DATE:" s of
Just s' ->
case justdate s' of
Nothing ->
parseError (GeneralProblem ("Unable to parse date from " <> s'))
Just t -> return t
Nothing ->
case T.stripPrefix "TZID=" s of
Just tzPlusDate ->
case T.break (== ':') tzPlusDate of
(tz,T.drop 1 -> date) ->
case datetime "" date of
Just t -> return t
Nothing ->
parseError (GeneralProblem ("Couldn't parse: " <> date))
Nothing ->
case datetime "Z" s of
Just t -> return t
Nothing ->
parseError (GeneralProblem ("Invalid date property: " <> s))
where datetime z s' =
parseTimeM True
defaultTimeLocale
("%Y%m%dT%H%M%S" ++ z)
(T.unpack s')
justdate s' =
fmap (\d -> UTCTime d 0)
(parseTimeM True
defaultTimeLocale
"%Y%m%d"
(T.unpack s'))
|
chrisdone/ical
|
src/ICal/Org.hs
|
bsd-3-clause
| 5,771 | 0 | 21 | 2,085 | 1,343 | 697 | 646 | 149 | 6 |
module Main where
import Cartel
ver :: Version
ver = [0,30,0,10]
atLeast :: NonEmptyString -> [Word] -> Package
atLeast name ver = package name (gtEq ver)
base :: Package
base = closedOpen "base" [4,7,0,0] [5]
quickCheck :: Package
quickCheck = atLeast "QuickCheck" [2,7]
quickpull :: Package
quickpull = atLeast "quickpull" [0,4,0,0]
barecheck :: Package
barecheck = atLeast "barecheck" [0,2,0,6]
tasty :: Package
tasty = atLeast "tasty" [0,10]
tastyQuickcheck :: Package
tastyQuickcheck = atLeast "tasty-quickcheck" [0,8]
tastyTh :: Package
tastyTh = atLeast "tasty-th" [0,1]
properties :: Properties
properties = blank
{ name = "multiarg"
, version = ver
, cabalVersion = Just (1,18)
, buildType = Just simple
, license = Just bsd3
, licenseFile = "LICENSE"
, copyright = "Copyright 2011-2015 Omari Norman"
, author = "Omari Norman"
, maintainer = "[email protected]"
, stability = "Experimental"
, homepage = "https://github.com/massysett/multiarg"
, bugReports = "https://github.com/massysett/multiarg/issues"
, synopsis = "Command lines for options that take multiple arguments"
, description =
[ "multiarg helps you build command-line parsers for"
, "programs with options that take more than one argument."
, "See the documentation in the Multiarg module for details."
]
, category = "Console, Parsing"
, extraSourceFiles =
[ "ChangeLog", "README.md" ]
}
commonOptions :: HasBuildInfo a => [a]
commonOptions =
[ hsSourceDirs [ "lib" ]
, ghcOptions ["-Wall"]
, haskell2010
, buildDepends
[ base
]
]
library
:: [String]
-- ^ List of all modules
-> [LibraryField]
library ms = commonOptions ++
[ exposedModules ms
]
tests
:: [String]
-- ^ Library modules
-> [String]
-- ^ Test modules
-> Section
tests ms ts = testSuite "multiarg-tests" $ commonOptions ++
[ exitcodeStdio
, mainIs "multiarg-tests.hs"
, otherModules (ms ++ ts)
, hsSourceDirs [ "tests" ]
, otherExtensions ["TemplateHaskell"]
, testDepends
]
testDepends :: HasBuildInfo a => a
testDepends = buildDepends [ quickCheck, tasty, tastyQuickcheck, tastyTh ]
grover
:: FlagName
-- ^ Programs flag
-> [String]
-- ^ Library modules
-> [String]
-- ^ Test modules
-> Section
grover fl ms ts = executable "grover"
[ mainIs "grover-main.hs"
, condBlock (flag fl)
( buildable True
, commonOptions ++
[ testDepends
, otherModules (ms ++ ts)
, hsSourceDirs ["tests"]
]
)
[ buildable False ]
]
telly
:: FlagName
-- ^ Programs flag
-> [String]
-- ^ Library modules
-> [String]
-- ^ Test modules
-> Section
telly fl ms ts = executable "telly"
[ mainIs "telly-main.hs"
, condBlock (flag fl)
( buildable True
, commonOptions ++
[ testDepends
, otherModules (ms ++ ts)
, hsSourceDirs ["tests"]
]
)
[ buildable False ]
]
main :: IO ()
main = defaultMain $ do
ms <- modules "lib"
ts <- modules "tests"
fl <- makeFlag "programs" $ FlagOpts
{ flagDescription = "Build sample programs"
, flagDefault = False
, flagManual = True
}
return
( properties
, library ms
, [ githubHead "massysett" "multiarg"
, grover fl ms ts
, telly fl ms ts
, tests ms ts
]
)
|
massysett/multiarg
|
genCabal.hs
|
bsd-3-clause
| 3,328 | 0 | 13 | 809 | 920 | 519 | 401 | 110 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : TestSuite.Uninterpreted.Axioms
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Test suite for basic axioms and uninterpreted functions
-----------------------------------------------------------------------------
{-# LANGUAGE DeriveDataTypeable #-}
module TestSuite.Uninterpreted.Axioms(testSuite) where
import Data.SBV
import SBVTest
import Data.Generics
-- Test suite
testSuite :: SBVTestSuite
testSuite = mkTestSuite $ \_ -> test [
"unint-axioms" ~: assert =<< isThm p0
]
-- Example provided by Thomas DuBuisson:
data Bitstring = Bitstring () deriving (Eq, Ord, Data, Typeable, Read, Show)
instance SymWord Bitstring
instance HasKind Bitstring
type SBitstring = SBV Bitstring
a :: SBitstring -> SBool
a = uninterpret "a"
e :: SBitstring -> SBitstring -> SBitstring
e = uninterpret "e"
axE :: [String]
axE = [ "(assert (forall ((p Bitstring) (k Bitstring))"
, " (=> (and (a k) (a p)) (a (e k p)))))"
]
p0 :: Symbolic SBool
p0 = do
p <- free "p" :: Symbolic SBitstring
k <- free "k" :: Symbolic SBitstring
addAxiom "axE" axE
constrain $ a p
constrain $ a k
return $ a (e k p)
|
Copilot-Language/sbv-for-copilot
|
SBVUnitTest/TestSuite/Uninterpreted/Axioms.hs
|
bsd-3-clause
| 1,315 | 0 | 10 | 256 | 283 | 151 | 132 | 27 | 1 |
{-# LANGUAGE OverloadedStrings,ScopedTypeVariables #-}
module Main where
import System.ZMQ4 hiding (message,source)
import System.Posix.Signals (installHandler, Handler(Catch), sigINT, sigTERM)
--import Control.Applicative
import Control.Concurrent
import Control.Monad
import Control.Exception (throw,catch,SomeException)
import qualified Data.Aeson as A
import Data.Foldable (for_)
import Data.Map (Map)
import qualified Data.Map as M
import qualified Data.Text as T
import Monto.Broker (Broker,Response,Server,ServerDependency)
import qualified Monto.Broker as B
import Monto.VersionMessage (VersionMessage)
import Monto.ProductMessage (ProductMessage)
import qualified Monto.VersionMessage as V
import qualified Monto.ProductMessage as P
import Options.Applicative
type Addr = String
data Options = Options
{ debug :: Bool
, sink :: Addr
, source :: Addr
, servers :: [(Server,[ServerDependency],Addr)]
}
options :: Parser Options
options = Options
<$> switch (long "debug" <> help "print messages that are transmitted over the broker")
<*> strOption (long "sink" <> help "address of the sink")
<*> strOption (long "source" <> help "address of the source")
<*> option auto (long "servers" <> help "names, their dependencies and their ports" <> metavar "[(Server,[ServerDependency],Address)]")
start :: IO ()
start = do
opts <- execParser $ info (helper <*> options)
( fullDesc
<> progDesc "Monto Broker"
)
withContext $ \ctx ->
withSocket ctx Sub $ \src -> do
bind src (source opts)
subscribe src ""
putStrLn $ unwords ["listen on address", source opts, "for versions"]
withSocket ctx Pub $ \snk -> do
bind snk (sink opts)
putStrLn $ unwords ["publish all products to sink on address", sink opts]
withServers ctx B.empty (servers opts) $ \b0 sockets -> do
broker <- newMVar b0
interrupted <- newEmptyMVar
let stopExcecution = putMVar interrupted Interrupted
_ <- installHandler sigINT (Catch stopExcecution) Nothing
_ <- installHandler sigTERM (Catch stopExcecution) Nothing
sourceThread <- forkIO $ forever $ do
msg <- A.decodeStrict <$> receive src
for_ msg $ \msg' -> do
when (debug opts) $ putStrLn $ unwords ["version", T.unpack (V.source msg'),"->", "broker"]
modifyMVar_ broker $ onVersionMessage opts msg' sockets
threads <- forM (M.toList sockets) $ \(server,sckt) ->
forkIO $ forever $ do
rawMsg <- receive sckt
send snk [] rawMsg
let msg = A.decodeStrict rawMsg
for_ msg $ \msg' -> do
when (debug opts) $ putStrLn $ unwords [show server, T.unpack (P.source msg'), "->", "broker"]
modifyMVar_ broker $ onProductMessage opts msg' sockets
_ <- readMVar interrupted
killThread sourceThread
forM_ threads killThread
type Sockets = Map Server (Socket Pair)
withServers :: Context -> Broker -> [(Server,[ServerDependency],Addr)] -> (Broker -> Sockets -> IO b) -> IO b
withServers ctx b0 s k = go b0 s M.empty
where
go b ((server,deps,addr):rest) sockets = do
withSocket ctx Pair $ \sckt -> do
bind sckt addr `catch` \(e :: SomeException) -> do
putStrLn $ unwords ["couldn't bind address", addr, "for server", show server]
throw e
putStrLn $ unwords ["listen on address", addr, "for", show server]
go (B.registerServer server deps b) rest (M.insert server sckt sockets)
go b [] sockets = k b sockets
main :: IO ()
main = start
onVersionMessage :: Options -> VersionMessage -> Sockets -> Broker -> IO Broker
{-# INLINE onVersionMessage #-}
onVersionMessage = onMessage B.newVersion
onProductMessage :: Options -> ProductMessage -> Sockets -> Broker -> IO Broker
{-# INLINE onProductMessage #-}
onProductMessage = onMessage B.newProduct
onMessage :: (message -> Broker -> ([Response],Broker)) -> Options -> message -> Sockets -> Broker -> IO Broker
{-# INLINE onMessage #-}
onMessage handler opts msg sockets broker = do
let (responses,broker') = handler msg broker
sendResponses opts sockets responses
return broker'
sendResponses :: Options -> Sockets -> [Response] -> IO ()
{-# INLINE sendResponses #-}
sendResponses opts sockets = mapM_ (sendResponse opts sockets)
sendResponse :: Options -> Sockets -> Response -> IO ()
{-# INLINE sendResponse #-}
sendResponse opts sockets (B.Response src server reqs) = do
let response = A.encode $ A.toJSON $ map toJSON reqs
send' (sockets M.! server) [] response
when (debug opts) $ putStrLn $ unwords ["broker",showReqs, "->", show server]
where
toJSON req = case req of
B.VersionMessage vers -> A.toJSON vers
B.ProductMessage prod -> A.toJSON prod
showReqs = show $ flip map reqs $ \req ->
case req of
B.VersionMessage ver -> Print $ unwords ["version",T.unpack (V.source ver)]
B.ProductMessage prod -> Print $ concat [T.unpack (P.product prod),"/",T.unpack (P.language prod)]
data Print = Print String
instance Show Print where
show (Print s) = s
data Interrupted = Interrupted
deriving (Eq,Show)
|
svenkeidel/monto-broker
|
broker/Main.hs
|
bsd-3-clause
| 5,374 | 0 | 37 | 1,321 | 1,754 | 894 | 860 | 112 | 3 |
{-# LANGUAGE TemplateHaskell, DeriveGeneric #-}
{-|
Module : Database.DataType
Description : Contains a single enumeration type, ShapeType.
This is a small module that contains a single enumeration type.
Note that we can't include this in "Database.Tables" because of
a restriction on Template Haskell.
-}
module Database.DataType where
import Database.Persist.TH
import GHC.Generics
import Data.Aeson
-- | Defines a datatype of a shape, used in Shape json table.
data ShapeType = BoolNode | Node | Hybrid | Region
deriving (Show, Read, Eq, Generic)
-- | Results from [derivePersistField](https://hackage.haskell.org/package/persistent-template-2.5.1.6/docs/Database-Persist-TH.html#v:derivePersistField)
-- call, as does PersistField, most importantly, it allows the data type to be
-- a column in the database.
derivePersistField "ShapeType"
-- | Results from call of [ToJSON](https://hackage.haskell.org/package/aeson-1.1.0.0/docs/Data-Aeson.html#t:ToJSON)
-- .
instance ToJSON ShapeType
-- | Results from call of [FromJSON](https://hackage.haskell.org/package/aeson-1.1.0.0/docs/Data-Aeson.html#t:FromJSON)
-- .
instance FromJSON ShapeType
data PostType = Specialist | Major | Minor | Other
deriving (Show, Read, Eq, Generic)
derivePersistField "PostType"
instance ToJSON PostType
instance FromJSON PostType
|
hermish/courseography
|
app/Database/DataType.hs
|
gpl-3.0
| 1,336 | 0 | 6 | 179 | 142 | 80 | 62 | 15 | 0 |
{-# LANGUAGE OverloadedStrings #-}
-- Module : Test.AWS.DeviceFarm
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Test.AWS.DeviceFarm
( tests
, fixtures
) where
import Network.AWS.DeviceFarm
import Test.AWS.Gen.DeviceFarm
import Test.Tasty
tests :: [TestTree]
tests = []
fixtures :: [TestTree]
fixtures = []
|
fmapfmapfmap/amazonka
|
amazonka-devicefarm/test/Test/AWS/DeviceFarm.hs
|
mpl-2.0
| 752 | 0 | 5 | 201 | 73 | 50 | 23 | 11 | 1 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Data/Attoparsec/Zepto.hs" #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE Trustworthy #-} -- Data.ByteString.Unsafe
{-# LANGUAGE BangPatterns #-}
-- |
-- Module : Data.Attoparsec.Zepto
-- Copyright : Bryan O'Sullivan 2007-2015
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : unknown
--
-- A tiny, highly specialized combinator parser for 'B.ByteString'
-- strings.
--
-- While the main attoparsec module generally performs well, this
-- module is particularly fast for simple non-recursive loops that
-- should not normally result in failed parses.
--
-- /Warning/: on more complex inputs involving recursion or failure,
-- parsers based on this module may be as much as /ten times slower/
-- than regular attoparsec! You should /only/ use this module when you
-- have benchmarks that prove that its use speeds your code up.
module Data.Attoparsec.Zepto
(
Parser
, ZeptoT
, parse
, parseT
, atEnd
, string
, take
, takeWhile
) where
import Control.Applicative
import Control.Monad (MonadPlus(..), ap)
import qualified Control.Monad.Fail as Fail
import Control.Monad.IO.Class (MonadIO(..))
import Data.ByteString (ByteString)
import Data.Functor.Identity (Identity(runIdentity))
import Data.Monoid as Mon (Monoid(..))
import Data.Semigroup (Semigroup(..))
import Data.Word (Word8)
import Prelude hiding (take, takeWhile)
import qualified Data.ByteString as B
import qualified Data.ByteString.Unsafe as B
newtype S = S {
input :: ByteString
}
data Result a = Fail String
| OK !a S
-- | A simple parser.
--
-- This monad is strict in its state, and the monadic bind operator
-- ('>>=') evaluates each result to weak head normal form before
-- passing it along.
newtype ZeptoT m a = Parser {
runParser :: S -> m (Result a)
}
type Parser a = ZeptoT Identity a
instance Monad m => Functor (ZeptoT m) where
fmap f m = Parser $ \s -> do
result <- runParser m s
case result of
OK a s' -> return (OK (f a) s')
Fail err -> return (Fail err)
{-# INLINE fmap #-}
instance MonadIO m => MonadIO (ZeptoT m) where
liftIO act = Parser $ \s -> do
result <- liftIO act
return (OK result s)
{-# INLINE liftIO #-}
instance Monad m => Monad (ZeptoT m) where
return = pure
{-# INLINE return #-}
m >>= k = Parser $ \s -> do
result <- runParser m s
case result of
OK a s' -> runParser (k a) s'
Fail err -> return (Fail err)
{-# INLINE (>>=) #-}
fail = Fail.fail
{-# INLINE fail #-}
instance Monad m => Fail.MonadFail (ZeptoT m) where
fail msg = Parser $ \_ -> return (Fail msg)
{-# INLINE fail #-}
instance Monad m => MonadPlus (ZeptoT m) where
mzero = fail "mzero"
{-# INLINE mzero #-}
mplus a b = Parser $ \s -> do
result <- runParser a s
case result of
ok@(OK _ _) -> return ok
_ -> runParser b s
{-# INLINE mplus #-}
instance (Monad m) => Applicative (ZeptoT m) where
pure a = Parser $ \s -> return (OK a s)
{-# INLINE pure #-}
(<*>) = ap
{-# INLINE (<*>) #-}
gets :: Monad m => (S -> a) -> ZeptoT m a
gets f = Parser $ \s -> return (OK (f s) s)
{-# INLINE gets #-}
put :: Monad m => S -> ZeptoT m ()
put s = Parser $ \_ -> return (OK () s)
{-# INLINE put #-}
-- | Run a parser.
parse :: Parser a -> ByteString -> Either String a
parse p bs = case runIdentity (runParser p (S bs)) of
(OK a _) -> Right a
(Fail err) -> Left err
{-# INLINE parse #-}
-- | Run a parser on top of the given base monad.
parseT :: Monad m => ZeptoT m a -> ByteString -> m (Either String a)
parseT p bs = do
result <- runParser p (S bs)
case result of
OK a _ -> return (Right a)
Fail err -> return (Left err)
{-# INLINE parseT #-}
instance Monad m => Semigroup (ZeptoT m a) where
(<>) = mplus
{-# INLINE (<>) #-}
instance Monad m => Mon.Monoid (ZeptoT m a) where
mempty = fail "mempty"
{-# INLINE mempty #-}
mappend = (<>)
{-# INLINE mappend #-}
instance Monad m => Alternative (ZeptoT m) where
empty = fail "empty"
{-# INLINE empty #-}
(<|>) = mplus
{-# INLINE (<|>) #-}
-- | Consume input while the predicate returns 'True'.
takeWhile :: Monad m => (Word8 -> Bool) -> ZeptoT m ByteString
takeWhile p = do
(h,t) <- gets (B.span p . input)
put (S t)
return h
{-# INLINE takeWhile #-}
-- | Consume @n@ bytes of input.
take :: Monad m => Int -> ZeptoT m ByteString
take !n = do
s <- gets input
if B.length s >= n
then put (S (B.unsafeDrop n s)) >> return (B.unsafeTake n s)
else fail "insufficient input"
{-# INLINE take #-}
-- | Match a string exactly.
string :: Monad m => ByteString -> ZeptoT m ()
string s = do
i <- gets input
if s `B.isPrefixOf` i
then put (S (B.unsafeDrop (B.length s) i)) >> return ()
else fail "string"
{-# INLINE string #-}
-- | Indicate whether the end of the input has been reached.
atEnd :: Monad m => ZeptoT m Bool
atEnd = do
i <- gets input
return $! B.null i
{-# INLINE atEnd #-}
|
phischu/fragnix
|
tests/packages/scotty/Data.Attoparsec.Zepto.hs
|
bsd-3-clause
| 5,239 | 0 | 17 | 1,390 | 1,527 | 809 | 718 | 132 | 2 |
{-# LANGUAGE GADTs, KindSignatures #-}
module Expr0 where
-- See #301
-- This one *does* use GADTs (Fct)
import Data.Kind (Type)
data Expr :: Type -> Type where
Const :: Show a => a -> Expr a
Apply :: Fct a b -> Expr a -> Expr b
data Fct :: Type -> Type -> Type where
Succ :: Fct Int Int
EqZero :: Fct Int Bool
Add :: Fct Int (Int -> Int)
------------------------------
e1 :: Expr Int
e1 = Apply Succ (Const 41)
e2 :: Expr Bool
e2 = Apply EqZero e1
e3 :: Expr (Int -> Int)
e3 = Apply Add e1
------------------------------
eval :: Expr a -> a
eval (Const c) = c
eval (Apply f a) = evalFct f $ eval a
evalFct :: Fct a b -> a -> b
evalFct Succ = succ
evalFct EqZero = (0 ==)
evalFct Add = (+)
{- Up to here, everything works nicely:
\begin{verbatim}
*Expr0> eval e1
42
*Expr0> eval e2
False
*Expr0> eval e3 5
47
\end{verbatim}
But let us now try to define a |Show| instance.
For |Fct|, this is not a problem:
-}
instance Show (Fct a b) where
show Succ = "S"
show EqZero = "isZero"
show Add = "add"
showsExpr :: Expr a -> ShowS
showsExpr (Const c) = shows c
showsExpr (Apply f a) =
('(' :) . shows f . (' ' :) . showsExpr a . (')' :)
instance Show (Expr a) where
showsPrec _ (Const c) = shows c
showsPrec _ (Apply f a) =
('(' :) . shows f . (' ' :) . shows a . (')' :)
{- But we used to get a complaint about the |Const| alternative (then
line 56) that documents that the constraint in the type of |Const|
must have been ignored:
\begin{verbatim}
No instance for (Show a)
arising from use of `shows' at Expr0.lhs:56:22-26
Probable fix: add (Show a) to the type signature(s) for `showsExpr'
In the definition of `showsExpr': showsExpr (Const c) = shows c
\end{verbatim}
But if we do that, the recursive call is of course still unsatisfied:
\begin{verbatim}
No instance for (Show a)
arising from use of `showsExpr' at Expr0.lhs:65:34-42
Probable fix: add (Show a) to the existential context for `Apply'
In the first argument of `(.)', namely `showsExpr a'
In the second argument of `(.)', namely `(showsExpr a) . ((')' :))'
In the second argument of `(.)', namely
`((' ' :)) . ((showsExpr a) . ((')' :)))'
\end{verbatim}
Following also the advice given in this last error message
actually makes GHC accept this, and then we can say:
\begin{verbatim}
*Expr0> showsExpr e1 ""
"(S 41)"
*Expr0> showsExpr e2 ""
"(isZero (S 41))"
\end{verbatim}
However, following this advice is counterintuitive
and should be unnecessary
since the |Show| instance for argument types
is only ever used in the const case.
We get:
\begin{verbatim}
*Expr0> showsExpr e3 ""
<interactive>:1:0:
No instance for (Show (Int -> Int))
arising from use of `showsExpr' at <interactive>:1:0-8
Probable fix: add an instance declaration for (Show (Int -> Int))
In the definition of `it': it = showsExpr e3 ""
\end{verbatim}
But of course we would expect the following:
\begin{verbatim}
*Expr0> showsExpr e3 ""
"(add (S 41))"
\end{verbatim}
\bigskip
The error messages are almost the same
if we define a |Show| instance directly
(line 90 was the |Const| alternative):
\begin{verbatim}
Could not deduce (Show a) from the context (Show (Expr a))
arising from use of `shows' at Expr0.lhs:90:26-30
Probable fix: add (Show a) to the class or instance method `showsPrec'
\end{verbatim}
-}
|
sdiehl/ghc
|
testsuite/tests/gadt/karl2.hs
|
bsd-3-clause
| 3,583 | 0 | 10 | 931 | 498 | 261 | 237 | -1 | -1 |
module B2 where
import Control.Parallel.Strategies (rseq,rpar,runEval)
-- test when already import strategies the import doesn't change.
-- here we need to add rpar and runEval to the import list
qsort (x:xs) = lsort_2 ++ [x] ++ hsort_2
where
lsort = qsort (filter (<) x xs)
hsort = qsort (filter (>=) x xs)
(lsort_2, hsort_2)
= runEval
(do lsort_2 <- rpar lsort
hsort_2 <- rpar hsort
return (lsort_2, hsort_2))
|
RefactoringTools/HaRe
|
old/testing/evalAddEvalMon/B2_TokOut.hs
|
bsd-3-clause
| 499 | 0 | 12 | 155 | 141 | 77 | 64 | 10 | 1 |
module Roman where
-- This is a simplified version of simplCore/should_compile/spec-inline.hs
--
-- It reproduces a problem where workers get specialized in different ways
-- depending on the values of uniques.
--
-- Compare:
--
-- $s$wgo_s1CN :: Int# -> Int -> Int#
-- [LclId, Arity=2, Str=DmdType <L,U><L,U>]
-- $s$wgo_s1CN =
-- \ (sc_s1CI :: Int#) (sc_s1CJ :: Int) ->
-- case tagToEnum# @ Bool (<=# sc_s1CI 0#) of _ [Occ=Dead] {
-- False ->
-- $wgo_s1BU (Just @ Int (I# (-# sc_s1CI 1#))) (Just @ Int sc_s1CJ);
-- True -> 0#
-- }
--
-- vs
--
-- $s$wgo_s18mTj :: Int -> Int# -> Int#
-- [LclId, Arity=2, Str=DmdType <L,U><L,U>]
-- $s$wgo_s18mTj =
-- \ (sc_s18mTn :: Int) (sc_s18mTo :: Int#) ->
-- case tagToEnum# @ Bool (<=# sc_s18mTo 0#) of _ [Occ=Dead] {
-- False ->
-- $wgo_s18mUc
-- (Just @ Int (I# (-# sc_s18mTo 1#))) (Just @ Int sc_s18mTn);
-- True -> 0#
-- }
foo :: Int -> Int
foo n =
go (Just n) (Just (6::Int))
where
go Nothing (Just x) = go (Just 10) (Just x)
go (Just n) (Just x)
| n <= 0 = 0
| otherwise = go (Just (n-1)) (Just x)
|
mcschroeder/ghc
|
testsuite/tests/determinism/simplCore/should_compile/spec-inline-determ.hs
|
bsd-3-clause
| 1,172 | 0 | 12 | 330 | 173 | 101 | 72 | 8 | 2 |
module Data.Aeson.Encode.Functions
(
brackets
, builder
, char7
, encode
, foldable
, list
, pairs
) where
import Data.Aeson.Encode.Builder
import Data.Aeson.Types.Class
import Data.Aeson.Types.Internal
import Data.ByteString.Builder (Builder, char7)
import Data.ByteString.Builder.Prim (primBounded)
import Data.Foldable (Foldable, foldMap)
import Data.Monoid ((<>), mempty)
import qualified Data.ByteString.Builder as B
import qualified Data.ByteString.Lazy as L
builder :: ToJSON a => a -> Builder
builder = fromEncoding . toEncoding
{-# INLINE builder #-}
-- | Efficiently serialize a JSON value as a lazy 'L.ByteString'.
--
-- This is implemented in terms of the 'ToJSON' class's 'toEncoding' method.
encode :: ToJSON a => a -> L.ByteString
encode = B.toLazyByteString . builder
{-# INLINE encode #-}
-- | Encode a 'Foldable' as a JSON array.
foldable :: (Foldable t, ToJSON a) => t a -> Encoding
foldable = brackets '[' ']' . foldMap (Value . toEncoding)
{-# INLINE foldable #-}
list :: (ToJSON a) => [a] -> Encoding
list [] = emptyArray_
list (x:xs) = Encoding $
char7 '[' <> builder x <> commas xs <> char7 ']'
where commas = foldr (\v vs -> char7 ',' <> builder v <> vs) mempty
{-# INLINE list #-}
brackets :: Char -> Char -> Series -> Encoding
brackets begin end (Value v) = Encoding $
char7 begin <> fromEncoding v <> char7 end
brackets begin end Empty = Encoding (primBounded (ascii2 (begin,end)) ())
-- | Encode a series of key/value pairs, separated by commas.
pairs :: Series -> Encoding
pairs s = brackets '{' '}' s
{-# INLINE pairs #-}
|
abbradar/aeson
|
Data/Aeson/Encode/Functions.hs
|
bsd-3-clause
| 1,657 | 0 | 12 | 354 | 466 | 262 | 204 | 40 | 1 |
<?xml version='1.0' encoding='UTF-8'?>
<!DOCTYPE helpset
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN"
"http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="en-GB">
<title>Python Scripting</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/jython/src/main/javahelp/org/zaproxy/zap/extension/jython/resources/help/helpset.hs
|
apache-2.0
| 974 | 81 | 67 | 169 | 417 | 213 | 204 | -1 | -1 |
{-# LANGUAGE TypeFamilies #-}
-- Trac #3346
module Foo where
class EP a where
type Result a
from :: a -> Result a
to :: Result a -> a
{-# RULES "rule1" forall x. to (from x) = x #-}
{-# RULES "rule2" forall x. from (to x) = x #-}
foo :: EP a => a -> a
-- This is typed in a way rather similarly to RULE rule1
foo x = to (from x)
-- 'bar' has an ambiguous type and is rightly rejected
-- bar :: forall a. Result a -> Result a
-- bar x = from (to x :: a)
|
frantisekfarka/ghc-dsi
|
testsuite/tests/typecheck/should_compile/T3346.hs
|
bsd-3-clause
| 472 | 0 | 8 | 125 | 82 | 46 | 36 | 10 | 1 |
main = putStrLn (case [1,2,3,4,5] of
[1,2,3,4,6] -> "6!"
[1,2,4,2,4] -> "a!"
[1,2,3,4,5] -> "OK."
_ -> "Broken.")
|
seereason/ghcjs
|
test/fay/caseList.hs
|
mit
| 132 | 0 | 10 | 36 | 100 | 60 | 40 | 5 | 4 |
{-# LANGUAGE TypeOperators #-}
module Ticket75 where
data a :- b = Q
-- | A reference to ':-'
f :: Int
f = undefined
|
DavidAlphaFox/ghc
|
utils/haddock/html-test/src/Ticket75.hs
|
bsd-3-clause
| 119 | 0 | 5 | 27 | 26 | 17 | 9 | 5 | 1 |
module Text.BraVal
( parser
, module Text.BraVal.Types
) where
import Text.BraVal.Types
import Data.Monoid ((<>))
import Control.Monad.Trans.Writer
import Control.Arrow ((>>>))
insert :: (Symbolic a) => a -> [a] -> Writer [a] [a]
insert s a
| isBlank s = pure a
| isOpen s = pure (s:a)
| (not . null) a && (head a) `isMatching` s = pure (tail a)
| otherwise = curry writer a [s]
parser :: (Symbolic a) => [a] -> Writer [a] [a]
parser = (fmap insert) >>> foldl (>>=) (pure mempty)
|
kindaro/BraVal
|
src/Text/BraVal.hs
|
isc
| 512 | 0 | 12 | 118 | 258 | 140 | 118 | 15 | 1 |
{-# LANGUAGE RecordWildCards #-}
module Control.Workflow.Main.Command
( SubParser(..)
, mkArgsParser
, runParser
, viewParser
, remoteParser
, deleteParser
, showParser
) where
import Options.Applicative
import Control.Workflow.Main.Types
import Control.Workflow.Main.Command.Run
import Control.Workflow.Main.Command.View
import Control.Workflow.Main.Command.Remote
import Control.Workflow.Main.Command.Delete
import Control.Workflow.Main.Command.Show
import Data.Proxy (Proxy(..))
import Control.Workflow.Coordinator
data SubParser a = SubParser
{ _subparser_name :: String -- ^ Name of the command.
, _subparser_desc :: String -- ^ Description of the command.
, _subparser :: Parser a -- ^ Arguments parser.
}
runParser :: Coordinator coord
=> (String -> Int -> FilePath -> IO (Config coord)) -- ^ Config reader
-> SubParser Command
runParser f = SubParser
{ _subparser_name = "run"
, _subparser_desc = "Run workflow"
, _subparser = run f }
viewParser :: SubParser Command
viewParser = SubParser
{ _subparser_name = "view"
, _subparser_desc = "Produce HTML visualization of the workflow"
, _subparser = view }
remoteParser :: Coordinator coord => Proxy coord -> SubParser Command
remoteParser proxy = SubParser
{ _subparser_name = "remote"
, _subparser_desc = "Run workflow in the worker mode"
, _subparser = remote proxy }
deleteParser :: SubParser Command
deleteParser = SubParser
{ _subparser_name = "delete"
, _subparser_desc = "Delete node cache"
, _subparser = delete }
showParser :: SubParser Command
showParser = SubParser
{ _subparser_name = "show"
, _subparser_desc = "Show node cache"
, _subparser = show' }
mkArgsParser :: String -- ^ Header of the Program helper.
-> String -- ^ Description
-> [SubParser a] -> ParserInfo a
mkArgsParser h descr cmd = info (helper <*> parser) $ fullDesc <> header h <> progDesc descr
where
parser = subparser $ mconcat $ map mkSubParser cmd
mkSubParser :: SubParser a -> Mod CommandFields a
mkSubParser SubParser{..} = command _subparser_name $
info (helper <*> _subparser) $ fullDesc <> progDesc _subparser_desc
{-# INLINE mkSubParser #-}
|
kaizhang/SciFlow
|
SciFlow-app/src/Control/Workflow/Main/Command.hs
|
mit
| 2,313 | 0 | 13 | 518 | 517 | 299 | 218 | 58 | 1 |
module B9.RepositorySpec
( spec,
filterRepoImagesMapReturnsAllAndOnlyImagesSatisfieingTheFilterPredicate,
filterRepoImagesMapReturnsAllAndOnlyReposSatisfieingTheFilterPredicate,
allSharedImagesWithRepoReturnsAllNonEmptyRepos,
allSharedImagesWithRepoReturnsOnlyReposContainedInTheParameter,
allSharedImagesWithRepoReturnsAllImages,
allSharedImagesWithRepoReturnsOnlyPairsSuchThatTheImageIsContainedInTheRepository,
maxSharedImageOfAllReposReturnsNonNothingIfInputHasImages,
maxSharedImageOfAllReposReturnsTheMaximumImage,
maxSharedImageOfAllReposReturnsAValidRepoImagePair,
)
where
import B9.DiskImages
import B9.Repository
import Data.Foldable (any)
import qualified Data.Map as Map
import Data.Maybe
import qualified Data.Set as Set
import Test.Hspec
import Test.QuickCheck
spec :: HasCallStack => Spec
spec =
describe
"Repository"
( do
describe
"dropAllButNLatestSharedImages"
( do
it
"returns a set that is disjunct with the result of keepNLatestSharedImages"
( property
( \(Positive n) sharedImages ->
let dropped = dropAllButNLatestSharedImages n sharedImages
kept = keepNLatestSharedImages n sharedImages
in dropped `Set.disjoint` kept
)
)
it
"returns a set with at most n * number of unique names entries, for all n >= 0"
( property
( \(Positive n) sharedImages ->
let actual = keepNLatestSharedImages n sharedImages
noUniqueNames =
Set.size (Set.map sharedImageName sharedImages)
in Set.size actual <= n * noUniqueNames
)
)
)
describe
"keepNLatestSharedImages"
( do
it
"returns a set with a plausible number of elements"
( property
( \n sharedImages ->
let actual = keepNLatestSharedImages n sharedImages
noUniqueNames =
Set.size (Set.map sharedImageName sharedImages)
in label
"number of output elements < (max 0 n) * number of unqiue image names"
(Set.size actual <= max 0 n * noUniqueNames)
.&&. classify
(n > 0)
"n > 0"
( n > 0
==> label
"the output contains as many sharedImageNames as the input"
(Set.size (Set.map sharedImageName actual) === noUniqueNames)
.&&. label
"keepNLatestSharedImages with n == 1 returns exactly as many elements as there are unique names"
(Set.size (keepNLatestSharedImages 1 sharedImages) === noUniqueNames)
.&&. label
"the newest entry in the output is the newest entry in the input"
(Set.lookupMax actual === Set.lookupMax sharedImages)
)
)
)
it
"returns the input unaltered if n >= number of input images"
( property
( \(Positive n) sharedImages ->
let actual = keepNLatestSharedImages n sharedImages
in n >= Set.size sharedImages ==> sharedImages === actual
)
)
it
"returns a set with at most n * number of unique names entries, for all n >= 0"
( property
( \(Positive n) sharedImages ->
let actual = keepNLatestSharedImages n sharedImages
noUniqueNames =
Set.size (Set.map sharedImageName sharedImages)
in Set.size actual <= n * noUniqueNames
)
)
)
describe
"filterRepoImagesMap"
( do
it
"returns all- and only repos matching the repo predicate"
(property (withMaxSuccess 20 filterRepoImagesMapReturnsAllAndOnlyReposSatisfieingTheFilterPredicate))
it
"returns all- and only images matching the image predicate"
(property (withMaxSuccess 20 filterRepoImagesMapReturnsAllAndOnlyImagesSatisfieingTheFilterPredicate))
it "is idempotent" $
property
( withMaxSuccess
20
( \(Fun _ repoPred) (Fun _ p) repoImgMap ->
let expected = filterRepoImagesMap repoPred p repoImgMap
actual = filterRepoImagesMap repoPred p expected
in expected === actual
)
)
)
describe
"lookupCachedImages"
( it
"returns only shared images that are cached"
(property lookupCachedImagesReturnsOnlyImagesFromCache)
)
describe
"allSharedImagesWithRepo"
( do
it
"returns all repositories that are not empty"
(property allSharedImagesWithRepoReturnsAllNonEmptyRepos)
it
"returns only repositories that are in the input"
(property allSharedImagesWithRepoReturnsOnlyReposContainedInTheParameter)
it
"returns all images"
(property allSharedImagesWithRepoReturnsAllImages)
it
"returns only pairs where the image is in the repository"
(property allSharedImagesWithRepoReturnsOnlyPairsSuchThatTheImageIsContainedInTheRepository)
)
describe
"maxSharedImageOfAllRepos"
( do
it
"returns a non-Nothing value of the input has any images and Nothing otherwise"
(property maxSharedImageOfAllReposReturnsNonNothingIfInputHasImages)
it
"returns the maximum of all images"
(property maxSharedImageOfAllReposReturnsTheMaximumImage)
it
"returns a pair where the image is in the repository"
(property maxSharedImageOfAllReposReturnsAValidRepoImagePair)
)
)
matchesSomeButNotAll :: Foldable t => (a -> Bool) -> t a -> Bool
matchesSomeButNotAll p xs = any p xs && any (not . p) xs
filterRepoImagesMapReturnsAllAndOnlyImagesSatisfieingTheFilterPredicate ::
Fun SharedImage Bool -> RepoImagesMap -> Property
filterRepoImagesMapReturnsAllAndOnlyImagesSatisfieingTheFilterPredicate (Fun _ p) t =
let i' = runCodeUnderTest p
i'Complement = runCodeUnderTest (not . p)
i = allSharedImages t
runCodeUnderTest = allSharedImages . flip (filterRepoImagesMap (const True)) t
in classify
(not (p `any` i))
"predicate matches no image"
(null i' && i'Complement == i)
.||. classify
(p `matchesSomeButNotAll` i)
"predicate matches some images"
( p `all` i'
&& not (p `any` i'Complement)
&& Set.union i' i'Complement == i
&& null (Set.intersection i' i'Complement)
)
.||. classify
(p `all` i)
"predicate matches all images"
(null i'Complement && i' == i)
filterRepoImagesMapReturnsAllAndOnlyReposSatisfieingTheFilterPredicate ::
Fun Repository Bool -> RepoImagesMap -> Property
filterRepoImagesMapReturnsAllAndOnlyReposSatisfieingTheFilterPredicate (Fun _ p) t =
let i' = runCodeUnderTest p
i'Complement = runCodeUnderTest (not . p)
i = allRepositories t
runCodeUnderTest = allRepositories . flip (flip filterRepoImagesMap (const True)) t
in classify
(not (p `any` i))
"predicate matches no repo"
(null i' && i'Complement == i)
.||. classify
(p `matchesSomeButNotAll` i)
"predicate matches some repos"
( p `all` i'
&& not (p `any` i'Complement)
&& Set.union i' i'Complement == i
&& null (Set.intersection i' i'Complement)
)
.||. classify
(p `all` i)
"predicate matches all repos"
(null i'Complement && i' == i)
lookupCachedImagesReturnsOnlyImagesFromCache :: SharedImageName -> RepoImagesMap -> Property
lookupCachedImagesReturnsOnlyImagesFromCache sn table =
let result = lookupCachedImages sn table
in Set.intersection
result
(fromMaybe (Set.empty) (Map.lookup Cache table))
=== result
allSharedImagesWithRepoReturnsAllNonEmptyRepos :: RepoImagesMap -> Property
allSharedImagesWithRepoReturnsAllNonEmptyRepos t =
let nonEmptyRepos =
Map.foldrWithKey
(\repo imgs acc -> if null imgs then acc else Set.insert repo acc)
Set.empty
t
reposReturned =
Set.map snd (allSharedImagesWithRepo t)
in reposReturned === nonEmptyRepos
allSharedImagesWithRepoReturnsOnlyReposContainedInTheParameter ::
RepoImagesMap -> Bool
allSharedImagesWithRepoReturnsOnlyReposContainedInTheParameter t =
let allReposReturned = Set.map snd (allSharedImagesWithRepo t)
in allReposReturned `Set.isSubsetOf` allRepositories t
allSharedImagesWithRepoReturnsAllImages :: RepoImagesMap -> Property
allSharedImagesWithRepoReturnsAllImages t =
let allImagesReturned = Set.map fst (allSharedImagesWithRepo t)
in allImagesReturned === allSharedImages t
allSharedImagesWithRepoReturnsOnlyPairsSuchThatTheImageIsContainedInTheRepository ::
RepoImagesMap -> Bool
allSharedImagesWithRepoReturnsOnlyPairsSuchThatTheImageIsContainedInTheRepository t =
let validPair (i, r) =
maybe False (Set.member i) (Map.lookup r t)
in all validPair (allSharedImagesWithRepo t)
maxSharedImageOfAllReposReturnsNonNothingIfInputHasImages :: RepoImagesMap -> Property
maxSharedImageOfAllReposReturnsNonNothingIfInputHasImages t =
isJust (maxSharedImageOfAllRepos t) =/= null (allSharedImages t)
maxSharedImageOfAllReposReturnsTheMaximumImage :: RepoImagesMap -> Property
maxSharedImageOfAllReposReturnsTheMaximumImage t =
not (null (allSharedImages t))
==> fmap fst (maxSharedImageOfAllRepos t) === Just (maximum (allSharedImages t))
maxSharedImageOfAllReposReturnsAValidRepoImagePair :: RepoImagesMap -> Property
maxSharedImageOfAllReposReturnsAValidRepoImagePair t =
case maxSharedImageOfAllRepos t of
Just (i, r) ->
label
"got result"
(Just i === (Map.lookup r t >>= Set.lookupMax))
Nothing ->
label "got no result" True
|
sheyll/b9-vm-image-builder
|
src/tests/B9/RepositorySpec.hs
|
mit
| 11,209 | 0 | 32 | 3,927 | 1,975 | 984 | 991 | -1 | -1 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.HTMLTemplateElement
(getContent, HTMLTemplateElement(..), gTypeHTMLTemplateElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLTemplateElement.content Mozilla HTMLTemplateElement.content documentation>
getContent ::
(MonadDOM m) => HTMLTemplateElement -> m DocumentFragment
getContent self
= liftDOM ((self ^. js "content") >>= fromJSValUnchecked)
|
ghcjs/jsaddle-dom
|
src/JSDOM/Generated/HTMLTemplateElement.hs
|
mit
| 1,336 | 0 | 10 | 156 | 345 | 224 | 121 | 22 | 1 |
--5 Problem 15
--(**) Replicate the elements of a list a given number of times.
replicate_me [] n = []
replicate_me (x:xs) n = [x | y <- [1..n]] ++ replicate_me xs n
--Problem 16
--(**) Drop every N'th element from a list.
dropNth [] n acc = acc
dropNth x n acc
| (length x) < n = x ++ acc
| (length x) == n = (take (n-1) x) ++ acc
| otherwise = dropNth (drop n x) n ((take (n-1) x) ++ acc)
--7 Problem 17
--(*) Split a list into two parts; the length of the first part is given.
split_list x n = ((take n x) , (drop n x))
--(**) Extract a slice from a list.
--Given two indices, i and k, the slice is the list containing the elements between
--the i'th and k'th element of the original list (both limits included).
splice x i k
| k >= (length x) = error "bad argument"
| otherwise = drop (i-1) (take k x)
--9 Problem 19
--(**) Rotate a list N places to the left.
--Hint: Use the predefined functions length and (++).
rotate x n =
let split = split_list x n
in (snd split) ++ (fst split)
--10 Problem 20
--(*) Remove the K'th element from a list.
remove_kth x n
| n >= (length x) = error "too long"
| otherwise = take (n-1) x ++ drop n x
|
sajit/learnyou
|
haskell/src/99questions/problem15plus.hs
|
mit
| 1,186 | 3 | 11 | 287 | 390 | 200 | 190 | 17 | 1 |
{-
Lab Session Software Testing 2013, Week 5
Tuba Kaya Chomette, Sander Leer, Martijn Stegeman
6 October 2013
-}
module Week5Sol_Q1_Q2
where
import Data.List
import Week5
import System.Random
import Control.Monad
--
-- Question 1 | Time spent: 1 hour
--
mergeSrt :: Ord a => [a] -> [a]
mergeSrt [] = []
mergeSrt (x:xs) = merge [x] (mergeSrt xs)
lengthProp :: Ord a => [a] -> [a] -> Bool
lengthProp xs ys= (length xs) == (length ys)
sublistProp1:: Ord a => [a] -> [a] -> Bool
sublistProp1 xs ys = sublist xs ys
permutation :: Eq a => [a] -> [a] -> Bool
permutation xs ys = and [elem x ys | x <- xs]
mergeSrtA :: Ord a => [a] -> [a]
mergeSrtA = assert1 lengthProp
$ assert1 sublistProp1
$ assert1 permutation
$ post1 sorted mergeSrt
-- VVZ: you have four assertions (one of which only concerns a postcondition).
-- VVZ: are some of these assertions weaker than some others?
-- VVZ: can we simplify the function mergeSrtA by skipping some of them?
-- VVZ: (the answer is "YES, WE CAN")
-- VVZ: lengthProp and sublistProp1 are both weaker than permutation, and that one is equivalent to the last condition.
-- VVZ: if you didn't want to write it in a postcondition-style, it could have also been
-- VVZ: assert1 (\ _ ys -> sorted ys) mergeSrt
--
-- Question 2 | Time spent: 2 hours
--
split :: [a] -> ([a],[a])
split xs = let n = (length xs) `div` 2
in (take n xs, drop n xs)
mergeSrt' :: Ord a => [a] -> [a]
mergeSrt' [] = []
mergeSrt' [x] = [x]
mergeSrt' xs | (length xs) < 2 = xs
| otherwise = let (b,c) = (Week5Sol_Q1_Q2.split xs)
in (merge (mergeSrt' b) (mergeSrt' c))
-- VVZ: not a very lazy solution, the first two lines are covered by the third line anyway
mergeSrtA' :: Ord a => [a] -> [a]
mergeSrtA' = assert1 lengthProp
$ assert1 sublistProp1
$ post1 (\ ys -> (sorted ys)) mergeSrt'
-- VVZ: what is the difference between (\ ys -> (sorted ys)) and just sorted? Be lazier!
-- some random testing
-- works with the assertions
autoTest :: Int -> ([Int] -> [Int]) -> [[Int]] -> IO ()
autoTest n _ [] = putStrLn (" " ++ show n ++ " tests passed")
autoTest n p (f:fs) = do putStrLn (show (p f))
autoTest n p fs
-- various random functions from previous weeks
getIntList :: IO [Int]
getIntList = do
r <- newStdGen
return (randomRs (0,15) r)
randomInts :: Int -> IO [Int]
randomInts n = liftM (take n) getIntList
getRandomInt :: Int -> IO Int
getRandomInt n = getStdRandom (randomR (0,n))
getRandomInts :: Int -> IO [[Int]]
getRandomInts 0 = return []
getRandomInts x = do r <- Week5Sol_Q1_Q2.getRandomInt 100 >>= randomInts
rs <- getRandomInts (x-1)
return (r:rs)
main = do xxs <- getRandomInts 1000
autoTest 1000 mergeSrtA' xxs
|
stgm/prac-testing
|
week5/Week5Sol_Q1_Q2.hs
|
mit
| 2,789 | 4 | 12 | 651 | 909 | 473 | 436 | 51 | 1 |
{-# LANGUAGE PackageImports #-}
{-# OPTIONS_GHC -fno-warn-dodgy-exports -fno-warn-unused-imports #-}
-- | Reexports "Foreign.Marshal.Compat"
-- from a globally unique namespace.
module Foreign.Marshal.Compat.Repl (
module Foreign.Marshal.Compat
) where
import "this" Foreign.Marshal.Compat
|
haskell-compat/base-compat
|
base-compat/src/Foreign/Marshal/Compat/Repl.hs
|
mit
| 292 | 0 | 5 | 31 | 28 | 21 | 7 | 5 | 0 |
module JSONSchema.Draft4.Schema where
import Import hiding (mapMaybe)
import qualified Data.HashMap.Strict as HM
import Data.List.NonEmpty (NonEmpty)
import Data.Maybe (fromJust, isJust)
import Data.Scientific
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified JSONSchema.Validator.Draft4 as D4
import JSONSchema.Validator.Utils
data Schema = Schema
{ _schemaVersion :: Maybe Text
, _schemaId :: Maybe Text
, _schemaRef :: Maybe Text
, _schemaDefinitions :: Maybe (HashMap Text Schema)
-- ^ A standardized location for embedding schemas
-- to be referenced from elsewhere in the document.
, _schemaOther :: HashMap Text Value
-- ^ Since the JSON document this schema was built from could
-- contain schemas anywhere (not just in "definitions" or any
-- of the other official keys) we save any leftover key/value
-- pairs not covered by them here.
--
-- TODO: This field is the source of most of the complication in this
-- module and needs to be removed. It should be doable, though it will
-- involve some modification to the fetching code.
, _schemaMultipleOf :: Maybe Scientific
, _schemaMaximum :: Maybe Scientific
, _schemaExclusiveMaximum :: Maybe Bool
, _schemaMinimum :: Maybe Scientific
, _schemaExclusiveMinimum :: Maybe Bool
, _schemaMaxLength :: Maybe Int
, _schemaMinLength :: Maybe Int
, _schemaPattern :: Maybe Text
, _schemaMaxItems :: Maybe Int
, _schemaMinItems :: Maybe Int
, _schemaUniqueItems :: Maybe Bool
, _schemaItems :: Maybe (D4.Items Schema)
-- Note that '_schemaAdditionalItems' is left out of 'runValidate'
-- because its functionality is handled by '_schemaItems'. It always
-- validates data unless 'Items' is present.
, _schemaAdditionalItems :: Maybe (D4.AdditionalItems Schema)
, _schemaMaxProperties :: Maybe Int
, _schemaMinProperties :: Maybe Int
, _schemaRequired :: Maybe (Set Text)
, _schemaDependencies :: Maybe (HashMap Text (D4.Dependency Schema))
, _schemaProperties :: Maybe (HashMap Text Schema)
, _schemaPatternProperties :: Maybe (HashMap Text Schema)
, _schemaAdditionalProperties :: Maybe (D4.AdditionalProperties Schema)
, _schemaEnum :: Maybe (NonEmpty Value)
, _schemaType :: Maybe D4.TypeValidator
, _schemaAllOf :: Maybe (NonEmpty Schema)
, _schemaAnyOf :: Maybe (NonEmpty Schema)
, _schemaOneOf :: Maybe (NonEmpty Schema)
, _schemaNot :: Maybe Schema
} deriving (Eq, Show)
emptySchema :: Schema
emptySchema = Schema
{ _schemaVersion = Nothing
, _schemaId = Nothing
, _schemaRef = Nothing
, _schemaDefinitions = Nothing
, _schemaOther = mempty
, _schemaMultipleOf = Nothing
, _schemaMaximum = Nothing
, _schemaExclusiveMaximum = Nothing
, _schemaMinimum = Nothing
, _schemaExclusiveMinimum = Nothing
, _schemaMaxLength = Nothing
, _schemaMinLength = Nothing
, _schemaPattern = Nothing
, _schemaMaxItems = Nothing
, _schemaMinItems = Nothing
, _schemaUniqueItems = Nothing
, _schemaItems = Nothing
, _schemaAdditionalItems = Nothing
, _schemaMaxProperties = Nothing
, _schemaMinProperties = Nothing
, _schemaRequired = Nothing
, _schemaDependencies = Nothing
, _schemaProperties = Nothing
, _schemaPatternProperties = Nothing
, _schemaAdditionalProperties = Nothing
, _schemaEnum = Nothing
, _schemaType = Nothing
, _schemaAllOf = Nothing
, _schemaAnyOf = Nothing
, _schemaOneOf = Nothing
, _schemaNot = Nothing
}
instance FromJSON Schema where
parseJSON = withObject "Schema" $ \o -> do
a <- o .:! "$schema"
b <- o .:! "id"
c <- o .:! "$ref"
d <- o .:! "definitions"
e <- parseJSON (Object (HM.difference o internalSchemaHashMap))
f <- o .:! "multipleOf"
g <- o .:! "maximum"
h <- o .:! "exclusiveMaximum"
i <- o .:! "minimum"
j <- o .:! "exclusiveMinimum"
k <- o .:! "maxLength"
l <- o .:! "minLength"
m <- o .:! "pattern"
n <- o .:! "maxItems"
o' <- o .:! "minItems"
p <- o .:! "uniqueItems"
q <- o .:! "items"
r <- o .:! "additionalItems"
s <- o .:! "maxProperties"
t <- o .:! "minProperties"
u <- o .:! "required"
v <- o .:! "dependencies"
w <- o .:! "properties"
x <- o .:! "patternProperties"
y <- o .:! "additionalProperties"
z <- o .:! "enum"
a2 <- o .:! "type"
b2 <- fmap _unNonEmpty' <$> o .:! "allOf"
c2 <- fmap _unNonEmpty' <$> o .:! "anyOf"
d2 <- fmap _unNonEmpty' <$> o .:! "oneOf"
e2 <- o .:! "not"
pure Schema
{ _schemaVersion = a
, _schemaId = b
, _schemaRef = c
, _schemaDefinitions = d
, _schemaOther = e
, _schemaMultipleOf = f
, _schemaMaximum = g
, _schemaExclusiveMaximum = h
, _schemaMinimum = i
, _schemaExclusiveMinimum = j
, _schemaMaxLength = k
, _schemaMinLength = l
, _schemaPattern = m
, _schemaMaxItems = n
, _schemaMinItems = o'
, _schemaUniqueItems = p
, _schemaItems = q
, _schemaAdditionalItems = r
, _schemaMaxProperties = s
, _schemaMinProperties = t
, _schemaRequired = u
, _schemaDependencies = v
, _schemaProperties = w
, _schemaPatternProperties = x
, _schemaAdditionalProperties = y
, _schemaEnum = z
, _schemaType = a2
, _schemaAllOf = b2
, _schemaAnyOf = c2
, _schemaOneOf = d2
, _schemaNot = e2
}
instance ToJSON Schema where
-- | The way we resolve JSON Pointers to embedded schemas is by
-- serializing the containing schema to a value and then resolving the
-- pointer against it. This means that FromJSON and ToJSON must be
-- isomorphic.
--
-- This influences the design choices in the library. E.g. right now
-- there are two false values for "exclusiveMaximum" -- Nothing and
-- Just False. We could have condensed them down by using () instead
-- of Bool for "exclusiveMaximum". This would have made writing schemas
-- in haskell easier, but we could no longer round trip through/from
-- JSON without losing information.
toJSON s = Object $ HM.union (mapMaybe ($ s) internalSchemaHashMap)
(toJSON <$> _schemaOther s)
where
-- 'mapMaybe' is provided by unordered-containers after
-- unordered-container-2.6.0.0, but until that is a little older
-- (and has time to get into Stackage etc.) we use our own
-- implementation.
mapMaybe :: (v1 -> Maybe v2) -> HashMap k v1 -> HashMap k v2
mapMaybe f = fmap fromJust . HM.filter isJust . fmap f
-- | Internal. Separate from ToJSON because it's also used
-- by FromJSON to determine what keys aren't official schema
-- keys and therefor should be included in _schemaOther.
internalSchemaHashMap :: HashMap Text (Schema -> Maybe Value)
internalSchemaHashMap = HM.fromList
[ ("$schema" , f _schemaVersion)
, ("id" , f _schemaId)
, ("$ref" , f _schemaRef)
, ("definitions" , f _schemaDefinitions)
, ("multipleOf" , f _schemaMultipleOf)
, ("maximum" , f _schemaMaximum)
, ("exclusiveMaximum" , f _schemaExclusiveMaximum)
, ("minimum" , f _schemaMinimum)
, ("exclusiveMinimum" , f _schemaExclusiveMinimum)
, ("maxLength" , f _schemaMaxLength)
, ("minLength" , f _schemaMinLength)
, ("pattern" , f _schemaPattern)
, ("maxItems" , f _schemaMaxItems)
, ("minItems" , f _schemaMinItems)
, ("uniqueItems" , f _schemaUniqueItems)
, ("items" , f _schemaItems)
, ("additionalItems" , f _schemaAdditionalItems)
, ("maxProperties" , f _schemaMaxProperties)
, ("minProperties" , f _schemaMinProperties)
, ("required" , f _schemaRequired)
, ("dependencies" , f _schemaDependencies)
, ("properties" , f _schemaProperties)
, ("patternProperties" , f _schemaPatternProperties)
, ("additionalProperties", f _schemaAdditionalProperties)
, ("enum" , f _schemaEnum)
, ("type" , f _schemaType)
, ("allOf" , f (fmap NonEmpty' . _schemaAllOf))
, ("anyOf" , f (fmap NonEmpty' . _schemaAnyOf))
, ("oneOf" , f (fmap NonEmpty' . _schemaOneOf))
, ("not" , f _schemaNot)
]
where
f :: ToJSON a => (Schema -> Maybe a) -> Schema -> Maybe Value
f = (fmap.fmap) toJSON
instance Arbitrary Schema where
arbitrary = sized f
where
maybeGen :: Gen a -> Gen (Maybe a)
maybeGen a = oneof [pure Nothing, Just <$> a]
maybeRecurse :: Int -> Gen a -> Gen (Maybe a)
maybeRecurse n a
| n < 1 = pure Nothing
| otherwise = maybeGen $ resize (n `div` 10) a
f :: Int -> Gen Schema
f n = do
a <- maybeGen arbitraryText
b <- maybeGen arbitraryText
c <- maybeGen arbitraryText
-- NOTE: The next two fields are empty to generate cleaner
-- schemas, but note that this means we don't test the
-- invertability of these fields.
d <- pure Nothing -- _schemaDefinitions
e <- pure mempty -- _otherPairs
f' <- maybeGen arbitraryPositiveScientific
g <- maybeGen arbitraryScientific
h <- arbitrary
i <- maybeGen arbitraryScientific
j <- arbitrary
k <- maybeGen (getPositive <$> arbitrary)
l <- maybeGen (getPositive <$> arbitrary)
m <- maybeGen arbitraryText
n' <- maybeGen (getPositive <$> arbitrary)
o <- maybeGen (getPositive <$> arbitrary)
p <- arbitrary
q <- maybeRecurse n arbitrary
r <- maybeRecurse n arbitrary
s <- maybeGen (getPositive <$> arbitrary)
t <- maybeGen (getPositive <$> arbitrary)
u <- maybeGen (Set.map T.pack <$> arbitrary)
v <- maybeRecurse n arbitraryHashMap
w <- maybeRecurse n arbitraryHashMap
x <- maybeRecurse n arbitraryHashMap
y <- maybeRecurse n arbitrary
z <- maybeRecurse n ( fmap _unArbitraryValue . _unNonEmpty'
<$> arbitrary)
a2 <- arbitrary
b2 <- maybeRecurse n (_unNonEmpty' <$> arbitrary)
c2 <- maybeRecurse n (_unNonEmpty' <$> arbitrary)
d2 <- maybeRecurse n (_unNonEmpty' <$> arbitrary)
e2 <- maybeRecurse n arbitrary
pure Schema
{ _schemaVersion = a
, _schemaId = b
, _schemaRef = c
, _schemaDefinitions = d
, _schemaOther = e
, _schemaMultipleOf = f'
, _schemaMaximum = g
, _schemaExclusiveMaximum = h
, _schemaMinimum = i
, _schemaExclusiveMinimum = j
, _schemaMaxLength = k
, _schemaMinLength = l
, _schemaPattern = m
, _schemaMaxItems = n'
, _schemaMinItems = o
, _schemaUniqueItems = p
, _schemaItems = q
, _schemaAdditionalItems = r
, _schemaMaxProperties = s
, _schemaMinProperties = t
, _schemaRequired = u
, _schemaDependencies = v
, _schemaProperties = w
, _schemaPatternProperties = x
, _schemaAdditionalProperties = y
, _schemaEnum = z
, _schemaType = a2
, _schemaAllOf = b2
, _schemaAnyOf = c2
, _schemaOneOf = d2
, _schemaNot = e2
}
|
seagreen/hjsonschema
|
src/JSONSchema/Draft4/Schema.hs
|
mit
| 13,911 | 0 | 16 | 5,658 | 2,654 | 1,455 | 1,199 | 254 | 1 |
module Absyn.Type where
import Util.PrettyPrint
import Prelude hiding (concat)
data Type
= TName String
| TApp Type [Type]
| TArrow [Type] Type
| TRecord [(String, Type)]
| TVoid
| TPlaceholder
instance Show Type where
show = Util.PrettyPrint.print
instance PrettyPrint Type where
pprint (TName name) =
str $ pprName name
pprint (TApp ty args) =
concat [ pprint ty
, str "<"
, interleave (str ", ") (map pprint args)
, str ">"
]
pprint (TArrow params ret) =
concat [ str "("
, interleave (str ", ") (map pprint params)
, str ")"
, str " -> "
, pprint ret
]
pprint (TRecord fields) =
concat [ str "{"
, fields'
, str "}"
]
where
fields' = interleave (str ", ") $ map pprintField fields
pprintField (key, ty) = concat [ str key, str ": ", pprint ty]
pprint TVoid =
str "Void"
pprint TPlaceholder =
str "#placeholder"
|
tadeuzagallo/verve-lang
|
src/Absyn/Type.hs
|
mit
| 1,032 | 0 | 11 | 367 | 352 | 183 | 169 | 36 | 0 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Main
( main
) where
-------------------------------------------------------------------------------
import Control.Applicative as A
import Data.IORef
import Data.List
import Data.Text (Text)
import qualified Data.Text as T
import Test.Tasty
import Test.Tasty.HUnit
import Test.Tasty.QuickCheck
-------------------------------------------------------------------------------
import Drifter
-------------------------------------------------------------------------------
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests = testGroup "drifter"
[
graphTests
, typesTests
, changeSequenceTests
]
-------------------------------------------------------------------------------
changeSequenceTests :: TestTree
changeSequenceTests = testGroup "changeSequence"
[
testProperty "preserves list members" $ \((Blind cs) :: Blind [Change TestDB]) ->
let cnames = changeName <$> cs
cnames' = changeName <$> changeSequence cs
in cnames === cnames'
]
-------------------------------------------------------------------------------
graphTests :: TestTree
graphTests = testGroup "Drifter.Graph"
[
resolveDependencyOrderTests
]
typesTests :: TestTree
typesTests = testGroup "Drifter.Types"
[
migrateTests
]
resolveDependencyOrderTests :: TestTree
resolveDependencyOrderTests = testGroup "resolveDependencyOrder"
[
testProperty "orders by dependency" $ \(UniqueChanges cs) ->
let presorted = changeSequence cs
in resolveDependencyOrder presorted === presorted
]
migrateTests :: TestTree
migrateTests = testGroup "migrate"
[
testCase "runs the given migrations" $ do
runs <- newIORef []
_ <- migrate (DBConnection $ TestDBConn runs) exampleChanges
res <- readIORef runs
res @?= [1,2,3]
]
exampleChanges :: [Change TestDB]
exampleChanges = [c2, c3, c1]
where
c1 = Change c1n Nothing [] (RunMigrationNumber 1)
c1n = ChangeName "c1"
c2n = ChangeName "c2"
c3n = ChangeName "c3"
c2 = c1 { changeName = c2n, changeDependencies = [c1n], changeMethod = RunMigrationNumber 2}
c3 = c1 { changeName = c3n, changeDependencies = [c2n], changeMethod = RunMigrationNumber 3}
data TestDB
newtype TestDBConn = TestDBConn (IORef [Int])
data instance Method TestDB = RunMigrationNumber Int deriving (Show, Eq)
data instance DBConnection TestDB = DBConnection TestDBConn
instance Drifter TestDB where
migrateSingle (DBConnection (TestDBConn runs)) Change { changeMethod = RunMigrationNumber mn} = do
modifyIORef runs (++ [mn])
return $ Right ()
instance Arbitrary Text where
arbitrary = T.pack <$> arbitrary
deriving instance Arbitrary ChangeName
instance Arbitrary (Method TestDB) where
arbitrary = RunMigrationNumber <$> arbitrary
instance Arbitrary (Change TestDB) where
arbitrary = Change <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
deriving instance Eq (Change TestDB)
deriving instance Show (Change TestDB)
newtype UniqueChanges = UniqueChanges [Change TestDB] deriving (Show)
instance Arbitrary UniqueChanges where
arbitrary = UniqueChanges A.<$> arbitrary `suchThat` uniqueNames
where
uniqueNames cs = let names = map changeName cs
in nub names == names
|
AndrewRademacher/drifter
|
test/Main.hs
|
mit
| 3,731 | 0 | 14 | 818 | 834 | 450 | 384 | -1 | -1 |
{-# OPTIONS_GHC -Wall -fno-warn-missing-methods #-}
{-# LANGUAGE BangPatterns #-}
module HW06 where
import Data.List
-- Exercise 1 -----------------------------------------
fib :: Integer -> Integer
fib 0 = 1
fib 1 = 1
fib n = fib (n-1) + fib (n-2)
fibs1 :: [Integer]
fibs1 = map fib [0..]
-- Exercise 2 -----------------------------------------
fibs2 :: [Integer]
fibs2 = 1:1:zipWith (+) fibs2 (tail fibs2)
-- Exercise 3 -----------------------------------------
data Stream a = Cons a (Stream a)
-- Show instance prints the first 20 elements followed by ellipsis
instance Show a => Show (Stream a) where
show s = "[" ++ intercalate ", " (map show $ take 10 $ streamToList s)
++ ",..."
streamToList :: Stream a -> [a]
streamToList (Cons x xs) = x:streamToList xs
-- Exercise 4 -----------------------------------------
instance Functor Stream where
fmap f (Cons x xs) = Cons (f x) (fmap f xs)
-- Exercise 5 -----------------------------------------
sRepeat :: a -> Stream a
sRepeat x = Cons x (sRepeat x)
sIterate :: (a -> a) -> a -> Stream a
sIterate f s = Cons s $ sIterate f $ f s
sInterleave :: Stream a -> Stream a -> Stream a
sInterleave (Cons x xs) s = Cons x $ sInterleave s xs
sTake :: Int -> Stream a -> [a]
sTake n (Cons x xs)
| n <= 0 = []
| otherwise = x:sTake (n-1) xs
-- Exercise 6 -----------------------------------------
nats :: Stream Integer
nats = sIterate (+1) 0
-- In acc, (sRepeat 0) is just some dummy value
-- since foldr will never use them for infinite list
ruler :: Stream Integer
ruler = foldr step acc [0..]
where step a b = sInterleave (sRepeat a) b
acc = sInterleave (sRepeat 0) $ sRepeat 0
-- Exercise 7 -----------------------------------------
-- | Implementation of C rand
rand :: Int -> Stream Int
rand s = sIterate (\x -> (1103515245 * x + 12345) `mod` 2147483648) s
-- Exercise 8 -----------------------------------------
{- Total Memory in use: 236 MB -}
minMaxSlow :: [Int] -> Maybe (Int, Int)
minMaxSlow [] = Nothing -- no min or max if there are no elements
minMaxSlow xs = Just (minimum xs, maximum xs)
-- Exercise 9 -----------------------------------------
{- Total Memory in use: 1 MB -}
minMax :: [Int] -> Maybe (Int, Int)
minMax [] = Nothing
minMax xs = foldl' step (Just (maxBound, minBound)) xs
where step (Just (!mi, !ma)) n = Just (min mi n, max ma n)
step _ _ = Nothing
main :: IO ()
-- main = print $ minMax $ sTake 1000000 $ rand 7666532
main = print $ fastFib 1000000
-- Exercise 10 ----------------------------------------
data Matrix = M Integer Integer Integer Integer
deriving Show
instance Num Matrix where
(M a b c d) * (M a' b' c' d')
= M (a*a'+b*c') (a*b'+b*d') (c*a'+d*c') (c*b'+d*d')
fastFib :: Int -> Integer
fastFib 0 = 0
fastFib 1 = 1
fastFib n
| n < 0 = error "no negative input!"
| otherwise = case (M 1 1 1 0) ^ n of
(M _ fn _ _) -> fn
|
hanjoes/cis194
|
hw6/HW06.hs
|
mit
| 2,937 | 0 | 12 | 637 | 1,064 | 547 | 517 | 60 | 2 |
import Data.Bits
import Data.Char
import Data.List.Split
allhex s = all isHexDigit s
combineHex [x,y] = shiftL x 4 + y
hex2intList h = result
where
hexPairs = chunksOf 2 (map digitToInt h)
result = map combineHex hexPairs
nibble2Hex n = (concat [['0'..'9'],['a'..'f']]) !! n
byte2hex b = [nibble2Hex (shiftR b 4)] ++ [nibble2Hex (b .&. 15)]
xorPair (x,y) = xor x y
doit l1 l2 = result
where
xored = map xorPair (zip (hex2intList l1) (hex2intList l2))
hexbytes = map byte2hex xored
result = concat hexbytes
validate l1 l2 func = result
where
sameLength = (==) (length l1) (length l2)
allHex = (&&) (allhex l1) (allhex l2)
evenLength = (==) (mod (length l1) 2) 0
result = if (&&) ((&&) sameLength allHex) evenLength
then func l1 l2
else "Lists not the same, even length or not all hex"
main :: IO ()
main = do l1 <- getLine
l2 <- getLine
putStr ((validate l1 l2 doit) ++ "\n")
|
mozkeeler/cryptopals
|
set1/challenge2/fixedXOR.hs
|
mit
| 956 | 0 | 11 | 239 | 421 | 218 | 203 | 26 | 2 |
module CCTextParser where
import Text.ParserCombinators.Parsec
import Data.Functor
import Numeric
-- output type
data Value = IString String
| INumber Double
| IBool Bool
deriving (Eq, Ord, Show)
type KVPair = [(String, Value)]
data ItemValue = ID String
| Type String
| Audio { attrs :: KVPair, content :: String }
| Text { attrs :: KVPair, content :: String }
| Pic { attrs :: KVPair, content :: String }
| TR String
| Opts [ItemValue]
deriving (Eq, Ord, Show)
-- lexer and parser
p_type :: CharParser () ItemValue
p_type = Type <$> (between (spaces *> char '[' <* spaces)
(spaces *> char ']')
(string "TYPE" *> (many1 space) *> (many1 (noneOf " ]"))))
p_pic :: CharParser () ItemValue
p_pic = Pic <$> (string "Pic" *> p_params) <*> p_content
p_audio :: CharParser () ItemValue
p_audio = Audio <$> (string "Audio" *> p_params) <*> p_content
p_tr :: CharParser () ItemValue
p_tr = TR <$> p_content
p_params :: CharParser () KVPair
p_params = (between (char '(' <* spaces)
(spaces *> string "):")
((p_field <* spaces) `sepBy` (char ',' <* spaces))) <|> (char ':' $> [])
where p_field = (,) <$> (p_name <* char '=' <* spaces) <*> p_value
p_content :: CharParser () String
p_content = manyTill anyChar (lookAhead eoItem)
p_name :: CharParser () String
p_name = many1 alphaNum
p_value :: CharParser () Value
p_value = choice $ try <$> [ IBool <$> p_bool
, INumber <$> p_number
, IString <$> p_string_value]
p_bool :: CharParser () Bool
p_bool = (p_coms "true" *> optional (string "true") $> True)
<|> (p_coms "false" *> optional (string "false") $> False)
p_coms :: String -> CharParser () String
p_coms prefix = choice $ try . lookAhead . string . (prefix ++) <$> [")", ",", " ", "\n"]
p_string_value :: CharParser () String
p_string_value = spaces *> p_name <* spaces
p_number :: CharParser () Double
p_number = do
s <- getInput
case readSigned readFloat s of
[(n, s')] -> n <$ setInput s'
_ -> fail "Not a numer"
eoItem = choice $ try <$> [ string "Pic("
, string "Pic:"
, string "Audio:"
, string "Audio("
, string "Text:"
, string "Text("
, string "[TYPE"
, string "TR:"
, string "Opts:"
, eof $> "EOF"]
|
cloudorz/parsec_practice
|
cc_text_parse/src/CCTextParser.hs
|
mit
| 2,575 | 0 | 13 | 836 | 848 | 454 | 394 | 63 | 2 |
import Data.Char
import Control.Monad
main = forever $ do
putStr "Input:"
l <- getLine
putStrLn $ map toUpper l
|
feliposz/learning-stuff
|
haskell/capslocker.hs
|
mit
| 125 | 0 | 9 | 32 | 45 | 21 | 24 | 6 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Kafkaesque.Queries.Log
( writeMessageSet
) where
import qualified Data.ByteString (length)
import Data.Int (Int32, Int64)
import Data.List (foldl')
import Data.Serialize.Put (runPut)
import qualified Database.PostgreSQL.Simple as PG
import Kafkaesque.Message (Message(..), putMessage)
getNextOffsetsForUpdate :: PG.Connection -> Int32 -> Int32 -> IO (Int64, Int64)
getNextOffsetsForUpdate conn topicId partitionId = do
let query =
"SELECT next_offset, total_bytes FROM partitions WHERE topic_id = ? AND partition_id = ? FOR UPDATE"
res <- PG.query conn query (topicId, partitionId) :: IO [(Int64, Int64)]
return . head $ res
insertMessages ::
PG.Connection
-> Int32
-> Int32
-> Int64
-> Int64
-> [Message]
-> IO (Int64, Int64)
insertMessages conn topicId partitionId baseOffset totalBytes messages = do
let (newTuples, finalOffset, finalTotalBytes) =
foldl'
(\(f, logOffset, currentTotalBytes) message ->
let messageBytes = runPut $ putMessage message
messageLen =
fromIntegral (Data.ByteString.length messageBytes) :: Int64
endByteOffset = currentTotalBytes + messageLen + 12
tuple =
( topicId
, partitionId
, PG.Binary messageBytes
, logOffset
, endByteOffset)
in (f . (tuple :), logOffset + 1, endByteOffset))
(id, baseOffset, totalBytes)
messages
let query =
"INSERT INTO records (topic_id, partition_id, record, log_offset, byte_offset) VALUES (?, ?, ?, ?, ?)"
_ <- PG.executeMany conn query $ newTuples []
return (finalOffset, finalTotalBytes)
updatePartitionOffsets ::
PG.Connection -> Int32 -> Int32 -> Int64 -> Int64 -> IO ()
updatePartitionOffsets conn topicId partitionId nextOffset totalBytes = do
let query =
"UPDATE partitions SET next_offset = ?, total_bytes = ? WHERE topic_id = ? AND partition_id = ?"
_ <- PG.execute conn query (nextOffset, totalBytes, topicId, partitionId)
return ()
writeMessageSet :: PG.Connection -> Int32 -> Int32 -> [Message] -> IO Int64
writeMessageSet conn topicId partition messages =
PG.withTransaction conn $ do
(baseOffset, totalBytes) <- getNextOffsetsForUpdate conn topicId partition
(finalOffset, finalTotalBytes) <-
insertMessages conn topicId partition baseOffset totalBytes messages
updatePartitionOffsets conn topicId partition finalOffset finalTotalBytes
return baseOffset
|
cjlarose/kafkaesque
|
src/Kafkaesque/Queries/Log.hs
|
mit
| 2,600 | 0 | 20 | 626 | 648 | 345 | 303 | 59 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.