code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module Transformations (identity_t, trans_t, scale_t, uscale_t,
rotatex_t, rotatey_t, rotatez_t, applyTransform,
applyMatrixToPoint, applyMatrixToVector) where
import Types
identity :: Matrix
identity = ((1, 0, 0, 0),
(0, 1, 0, 0),
(0, 0, 1, 0),
(0, 0, 0, 1))
identity_t :: Transform
identity_t = (identity, identity)
trans :: Point -> Matrix
trans (x, y, z) = ((1, 0, 0, x),
(0, 1, 0, y),
(0, 0, 1, z),
(0, 0, 0, 1))
trans_t :: Point -> Transform
trans_t p = (trans p, trans p')
where
p' = neg p
neg (x, y, z) = (-x, -y, -z)
scale :: Point -> Matrix
scale (x, y, z) = ((x, 0, 0, 0),
(0, y, 0, 0),
(0, 0, z, 0),
(0, 0, 0, 1))
scale_t :: Point -> Transform
scale_t p = (scale p, scale p')
where
p' = inv p
inv (x, y, z) = (1/x, 1/y, 1/z)
uscale :: Double -> Matrix
uscale r = scale (r, r, r)
uscale_t :: Double -> Transform
uscale_t r = (uscale r, uscale (1/r))
rotatex :: Double -> Matrix
rotatex r = (( 1, 0, 0, 0),
( 0, c, -s, 0),
( 0, s, c, 0),
( 0, 0, 0, 1))
where c = cos r
s = sin r
rotatex_t :: Double -> Transform
rotatex_t r = (rotatex r, rotatex (-r))
rotatey :: Double -> Matrix
rotatey r = (( c, 0, s, 0),
( 0, 1, 0, 0),
(-s, 0, c, 0),
( 0, 0, 0, 1))
where c = cos r
s = sin r
rotatey_t :: Double -> Transform
rotatey_t r = (rotatey r, rotatey (-r))
rotatez :: Double -> Matrix
rotatez r = (( c, -s, 0, 0),
( s, c, 0, 0),
( 0, 0, 1, 0),
( 0, 0, 0, 1))
where c = cos r
s = sin r
rotatez_t :: Double -> Transform
rotatez_t r = (rotatez r, rotatez (-r))
rows :: Matrix -> (Vector, Vector, Vector, Vector)
rows = id
cols :: Matrix -> (Vector, Vector, Vector, Vector)
cols ((a1, a2, a3, a4),
(b1, b2, b3, b4),
(c1, c2, c3, c4),
(d1, d2, d3, d4)) = ((a1, b1, c1, d1),
(a2, b2, c2, d2),
(a3, b3, c3, d3),
(a4, b4, c4, d4))
dotProduct :: Vector -> Vector -> Double
dotProduct (a1, a2, a3, a4) (b1, b2, b3, b4) =
(a1 * b1 + a2 * b2 + a3 * b3 + a4 * b4)
mmult :: Matrix -> Matrix -> Matrix
mmult m1 m2 = ((a1, a2, a3, a4),
(b1, b2, b3, b4),
(c1, c2, c3, c4),
(d1, d2, d3, d4))
where
a1 = dotProduct row1 col1
a2 = dotProduct row1 col2
a3 = dotProduct row1 col3
a4 = dotProduct row1 col4
b1 = dotProduct row2 col1
b2 = dotProduct row2 col2
b3 = dotProduct row2 col3
b4 = dotProduct row2 col4
c1 = dotProduct row3 col1
c2 = dotProduct row3 col2
c3 = dotProduct row3 col3
c4 = dotProduct row3 col4
d1 = dotProduct row4 col1
d2 = dotProduct row4 col2
d3 = dotProduct row4 col3
d4 = dotProduct row4 col4
(row1, row2, row3, row4) = rows m1
(col1, col2, col3, col4) = cols m2
combineTransforms :: Transform -> Transform -> Transform
combineTransforms (m2, m2') (m1, m1') = (m2 `mmult` m1, m1' `mmult` m2')
applyTransform :: Transform -> Object -> Object
applyTransform t2 (Primitive k t1 s) = Primitive k (combineTransforms t2 t1) s
applyTransform t2 (Union o1 o2) = Union (applyTransform t2 o1) (applyTransform t2 o2)
applyTransform t2 (Intersection o1 o2) = Intersection (applyTransform t2 o1) (applyTransform t2 o2)
applyTransform t2 (Difference o1 o2) = Difference (applyTransform t2 o1) (applyTransform t2 o2)
applyMatrixToPoint :: Matrix -> Point -> Point
applyMatrixToPoint m (x, y, z) = (a1, a2, a3)
where
a1 = dotProduct row1 v
a2 = dotProduct row2 v
a3 = dotProduct row3 v
(row1, row2, row3, row4) = rows m
v = (x, y, z, 1)
applyMatrixToVector :: Matrix -> Point -> Point
applyMatrixToVector m (x, y, z) = (a1, a2, a3)
where
a1 = dotProduct row1 v
a2 = dotProduct row2 v
a3 = dotProduct row3 v
(row1, row2, row3, row4) = rows m
v = (x, y, z, 0)
| jchl/jtrace | src/Transformations.hs | mit | 4,212 | 0 | 12 | 1,452 | 1,883 | 1,092 | 791 | 117 | 1 |
-- file: ch4/InteractWith.hs
-- Save this in a source file, e.g. Interact.hs
import System.Environment (getArgs)
interactWith function inputFile outputFile = do
input <- readFile inputFile
writeFile outputFile (function input)
main = mainWith myFunction
where mainWith function = do
args <- getArgs
case args of
[input,output] -> interactWith function input output
_ -> putStrLn "error: exactly two arguments needed"
-- replace "id" with the name of our function below
myFunction = id
| friedbrice/RealWorldHaskell | ch4/InteractWith.hs | gpl-2.0 | 555 | 0 | 12 | 141 | 115 | 57 | 58 | 11 | 2 |
{-# LANGUAGE TemplateHaskell, FlexibleInstances, MultiParamTypeClasses, DeriveFunctor #-}
module LanguageDef.Data.Relation where
import Utils.All
import LanguageDef.Utils.LocationInfo
import LanguageDef.Utils.ExceptionInfo
import LanguageDef.Utils.Checkable
import LanguageDef.Utils.Grouper
import LanguageDef.Data.Rule
import LanguageDef.Data.Expression hiding (choices')
import LanguageDef.Combiner
import LanguageDef.MetaSyntax (nl, nls, ident, typeIdent)
import Data.Map (Map, (!), filterWithKey)
import qualified Data.Map as M
import Control.Arrow ((&&&))
data Mode = In | Out
deriving (Show, Eq)
data Relation = Relation
{ _relSymbol :: Name
, _relTypes :: [(FQName, Mode)]
, _relPronounce :: Maybe String
, _relDocs :: MetaInfo
} deriving (Show, Eq)
makeLenses ''Relation
--------------------- BUILTIN RELATIONS -------------------
builtinRelationNames :: [String]
builtinRelationNames = [":"]
---------------------------- PARSING -----------------------
choices' nm = choices (["Relations"], nm)
relIdent :: Combiner FQName
relIdent = choices' "relIdent"
[ cmb (\head (tail, nm) -> (head:tail, nm))
capture (lit "." **> relIdent)
, capture |> (,) []
]
mode = choices' "mode"
[ lit "in" |> const In
, lit "out" |> const Out
]
typ :: Combiner (FQName, Mode)
typ = choices' "type"
[ typeIdent <+> lit "(" **> mode <** lit ")"
]
types = choices' "types"
[ cmb (:) typ (skip **> types)
, typ |> (:[])
]
relDeclarationCore :: Combiner (Name, [(FQName, Mode)])
relDeclarationCore
= choices' "relDeclarationCore"
[ lit "(" **> capture <+> lit ")" **> lit ":" **> types
]
relPronounc = choices' "relPronounce"
[ lit ";" **> lit "Pronounced" **> lit "as" **> (capture |> Just) <+> nl
, nl |> (,) Nothing
]
relation :: Combiner Relation
relation = choices' "relation"
[ (nls <+> relDeclarationCore <+> relPronounc) & withLocation _asRel
, (relDeclarationCore <+> relPronounc) |> (,) [] & withLocation _asRel
]
_asRel :: LocationInfo
-> ([String], ((Name, [(FQName, Mode)]), (Maybe String, Maybe String)))
-> Relation
_asRel li (docs, ((symbol, types), (pronounce, extraComment)))
= let docs' = unlines docs ++ fromMaybe "" extraComment in
Relation symbol types pronounce $ MetaInfo li docs'
_relations :: Combiner [Relation]
_relations = choices' "relations"
[cmb (:) relation _relations
, relation |> (:[])
]
relations = _relations |> asGrouper ("relation", "relations") (get relSymbol)
commaSepExpr :: Combiner [Expression' ()]
commaSepExpr
= choices' "commaSepExpr"
[ cmb (:) expression (lit "," **> commaSepExpr)
, expression |> (:[])
]
conclusion :: Combiner (Conclusion' ())
conclusion
= choices' "conclusion"
[ (lit "(" **> relIdent <+> lit ")" **> commaSepExpr)
|> uncurry Conclusion
]
predicate :: Combiner (Predicate' ())
predicate
= choices' "predicate"
[ conclusion & withLocation' PredConcl
, expression & withLocation' PredExpr
]
predicates :: Combiner [Predicate' ()]
predicates
= choices' "predicates"
[ cmb (:) predicate (skip {-tabs-} **> predicates)
, predicate |> (:[])
]
line :: Combiner String
line = choices' "line"
[ skip **> lit "[" **> capture <** lit "]"
, skip |> const ""
]
rule = choices' "rule"
[ (nls <+> predicates <+> skip **>
line <+> skip **>
conclusion <** skip)
& withLocation _asRule
, (nls <+>
line <+> skip **>
conclusion <** skip)
& withLocation _asRule'
]
_asRule' :: LocationInfo -> ([String], (Name, Conclusion' a)) -> Rule' a
_asRule' li (docs, (name, concl))
= _asRule li (docs, ([], (name, concl)))
_asRule :: LocationInfo -> ([String], ([Predicate' a], (Name, Conclusion' a))) -> Rule' a
_asRule li (docs, (preds, (name, conclusion)))
= Rule preds conclusion name $ MetaInfo li (unlines docs)
_rules :: Combiner [Rule' ()]
_rules = choices' "rules"
[ cmb (:) rule _rules
, rule |> (:[])
]
rules = _rules |||>>> const () |> asGrouper ("rule", "rules") (get ruleName)
------------------------------------- CHECKS --------------------------------
{- | Checks the relation. Duplicate relation check is done by the grouper; unknown type check is done by the qualfier
>>> import LanguageDef.API
>>> loadAssetLangDef "TestInput/Faulty/Relations" ["UnknownTypeRelation"] & toCoParsable
"| While fully qualifiying the relation form \"~\" in TestInput/Faulty/Relations/UnknownTypeRelation.language at lines 11 - 13\n Error: \n \8226 The syntactic form \"x\" was not found within the namespace \n \8226 Perhaps you meant: a, \8868, \8869, UnknownTypeRelation.a, UnknownTypeRelation.\8868\n Error: \n \8226 The syntactic form \"x\" was not found within the namespace \n \8226 Perhaps you meant: a, \8868, \8869, UnknownTypeRelation.a, UnknownTypeRelation.\8868"
>>> loadAssetLangDef "TestInput/Faulty/Relations" ["DuplicateRelation"] & toCoParsable
"| While validating the relation declarations while validating \nError: \n \8226 The relation \"~\" is defined multiple times"
>>> loadAssetLangDef "TestInput/Faulty/Relations" ["AllOutRel"] & toCoParsable
"| While validating the relation declarations while validating \nError: \n \8226 Relation (~) should have at least one input type"
-}
instance Checkable Relation where
check relation
= do let modes = relation & get relTypes |> snd
assert' (In `elem` modes) $ "Relation "++inParens (get relSymbol relation) ++" should have at least one input type"
assert' (get relSymbol relation `notElem` builtinRelationNames)
$ "The relation symbol "++show (get relSymbol relation) ++" is builtin and should not be redeclared"
{- |
>>> import LanguageDef.API
>>> loadAssetLangDef "TestInput/Faulty/Relations" ["EmptyLine"] & toCoParsable
"| While validating the relation implementation while validating \nError: \n \8226 This rule has no name. Add a name after the line, in square brackets\n \n predicate\n ----------- [ name ]\n (~) args"
>>> loadAssetLangDef "TestInput/Faulty/Relations" ["NotDeclared"] & toCoParsable
"| While validating \nError: \n \8226 When rules are defined, a relation declaration section should be present"
>>> loadAssetLangDef "TestInput/Faulty/Relations" ["NotLocal"] & toCoParsable
"| While fully qualifiying the rule \"abc\" in TestInput/Faulty/Relations/NotLocal.language at lines 18 - 22\n| While fully qualifiying a conclusion using ([],\"~\") in TestInput/Faulty/Relations/NotLocal.language at line 21, columns 4 - 5\nError: \n \8226 The relation \"~\" was not found within the namespace \n \8226 Perhaps you meant: \8594, NotLocal.\8594"
-}
instance Checkable' (Grouper Relation) (Rule' a) where
check' relations rule
= do assert' (not $ null $ get ruleName rule)
"This rule has no name. Add a name after the line, in square brackets\n\n predicate\n----------- [ name ]\n (~) args"
let ruleAbout = rule & get (ruleConcl . conclRelName) & snd
assert' (ruleAbout `M.member` get grouperDict relations)
$ ["Rule", show $ get ruleName rule, "is about relation", inParens ruleAbout, "which is not declared in this document. Only locally declared relations can be implemented with rules"] & unwords
------------------------------------- TOPARSABLE -----------------------------
instance ToString Relation where
toParsable (Relation symb tps pronounce doc)
= [ toParsable doc
, "(" ++ symb ++ ")\t" ++
(tps |> _typeModeToPars & intercalate " × ") ++
maybe "" ("; Pronounced as " ++ ) pronounce
] & unlines
_typeModeToPars :: (FQName, Mode) -> String
_typeModeToPars (typ, mode)
= [showFQ typ, inParens $ toParsable mode] & unwords
instance ToString Mode where
toParsable In = "in"
toParsable Out = "out"
instance Infoable Relation where
getInfo r = AllInfo (get relSymbol r) "Relation" (get relDocs r) (toParsable r)
| pietervdvn/ALGT2 | src/LanguageDef/Data/Relation.hs | gpl-3.0 | 7,844 | 211 | 20 | 1,383 | 2,094 | 1,128 | 966 | 134 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
module Fs (adbFSOps,
LogFunction,
PathQualification(..),
qualifyPath,
emptyDirContents) where
import System.Fuse
import Classes
import qualified Data.ByteString.Char8 as B
import Adb (serialNo, ifAdbPresent, Device, MonadAdb, AdbFail)
import qualified Adb as Adb
import qualified System.Environment as Env
import System.Directory (removeFile)
import System.Random (randomRIO)
import Control.Monad (forM, void)
import Control.Monad.Except
import Control.Monad.Writer
import Control.Monad.Reader
import Control.Applicative ((<*))
import qualified Utils as U
import Types
import Parsers (Parser)
import qualified Parsers as P
import System.FilePath
import System.Console.GetOpt
import Foreign.C.Error
import System.Posix.Types
import System.Posix.Files
import System.Posix.IO
import Data.Maybe (fromMaybe, isJust)
import Data.List (find, intercalate)
import Prelude hiding (log)
type FileHandle = (DeviceId, FilePath)
type DeviceId = String
type FsCall a = IO (Either Error a)
type FilesList = [(FilePath, FileStat)]
type AdbFsCall = ExceptT Error (WriterT [String] IO)
type DeviceCall = ReaderT Device AdbFsCall
getFuseUID :: (MonadIO m) => m UserID
getFuseUID = fuseCtxUserID <$> (liftIO $ getFuseContext)
getFuseGID :: (MonadIO m) => m GroupID
getFuseGID = fuseCtxGroupID <$> (liftIO $ getFuseContext)
instance GetFuseContext AdbFsCall where
fuseUID = getFuseUID
fuseGID = getFuseGID
instance GetFuseContext DeviceCall where
fuseUID = getFuseUID
fuseGID = getFuseGID
instance WithCurrentDevice DeviceCall where
currentDevice = ask
callCurrentDevice args =
do device <- ask
liftAdbCall $ Adb.callForDevice device args
listDevices :: (MonadIO m) => m (Either String [Device])
listDevices = liftIO $ runExceptT Adb.listDevices
liftAdbCall :: (CanFail o, MonadIO o) => ExceptT String IO a -> o a
liftAdbCall action = do
value <- liftIO $ runExceptT action
case value of
Right r -> return r
-- TODO: logs?
Left e -> simpleError eINVAL
instance Logging DeviceCall where
writeLog = writeLogWriter
instance Logging AdbFsCall where
writeLog = writeLogWriter
writeLogWriter :: (MonadWriter [String] m) => String -> m ()
writeLogWriter l = tell [l]
-- instance (Monad m,) => Logging m where
-- writeLog l = tell [l]
withCleanupError :: (MonadError Error m) => m a -> m () -> m a
withCleanupError action cleanup =
action `catchError` (\e -> do
cleanup
throwError e)
instance CanFail DeviceCall where
failWith = throwError
withCleanup = withCleanupError
instance CanFail AdbFsCall where
failWith = throwError
withCleanup = withCleanupError
onDevice :: DeviceId -> DeviceCall a -> AdbFsCall a
onDevice deviceId action = findDevice deviceId >>= runReaderT action
simpleError :: (CanFail m) => Errno -> m a
simpleError code = failWith $ Error code
deviceCallInIO :: Device -> DeviceCall a -> IO (Either Error a, [String])
deviceCallInIO device action =
runWriterT $ runExceptT $ runReaderT action device
data LogLevel = LogSilent
| LogFailsOnly
| LogFull
data Option = LogLevel (Maybe LogLevel)
| LoggingFile String
type LogFunction = Either String String -> [String] -> IO ()
msgError :: (CanFail m, Logging m) => Errno -> String -> m a
msgError code msg = do
log $ "error: " ++ msg
failWith $ Error code
data FsEntry = FsEntry { fseOpen :: OpenMode -> OpenFileFlags -> AdbFsCall FileHandle
, fseRead :: FileHandle -> ByteCount -> FileOffset -> AdbFsCall B.ByteString
, fseWrite :: FileHandle -> B.ByteString -> FileOffset -> AdbFsCall ByteCount
, fseGetFileStat :: AdbFsCall FileStat
, fseOpenDirectory :: AdbFsCall Errno
, fseReadDirectory :: AdbFsCall FilesList
, fseSetFileMode :: FileMode -> AdbFsCall Errno
, fseSetOwnerAndGroup :: UserID -> GroupID -> AdbFsCall Errno
, fseReadSymlink :: AdbFsCall FilePath
, fseSetFileSize :: FileOffset -> AdbFsCall Errno
, fseCreateDevice :: EntryType -> FileMode -> DeviceID -> AdbFsCall Errno
, fseCreateDirectory :: AdbFsCall Errno
, fseRemoveLink :: AdbFsCall Errno
, fseRemoveDirectory :: AdbFsCall Errno}
data DeviceShellCall = DeviceShellCall [String]
deriving (Show, Eq)
log :: Logging m => String -> m ()
log l = writeLog l
logLn :: Logging m => String -> m ()
logLn ln = writeLog $ ln ++ "\n"
instance Show OpenMode where
show ReadOnly = "ro"
show WriteOnly = "wo"
show ReadWrite = "rw"
instance Show OpenFileFlags where
show (OpenFileFlags append exclusive noccty nonblock trunc) =
intercalate "-" $ map (\(b, s) -> if b then s else "|") [(append, "append")
, (exclusive, "exclusive")
, (noccty, "noccty")
, (nonblock, "nonblock")
, (trunc, "trunc")]
defaultFsEntry :: FsEntry
defaultFsEntry = FsEntry { fseOpen = const $ const $ noImpl
, fseRead = const $ const $ const $ noImpl
, fseWrite = const $ const $ const $ noImpl
, fseGetFileStat = noImpl
, fseOpenDirectory = noImpl
, fseReadDirectory = noImpl
, fseReadSymlink = noImpl
, fseSetFileMode = const $ noImpl
, fseSetOwnerAndGroup = const $ const $ noImpl
, fseSetFileSize = const $ noImpl
, fseCreateDevice = const $ const $ const $ noImpl
, fseCreateDirectory = noImpl
, fseRemoveDirectory = noImpl
, fseRemoveLink = noImpl}
where noImpl = simpleError eNOSYS
adbFSOps :: LogFunction -> FuseOperations FileHandle
adbFSOps logFunc =
defaultFuseOps { fuseGetFileStat = \path ->
run forResult $ method "fuseGetFileStat"
>> path `as` "path"
>> (fseGetFileStat $ pathToFsEntry path)
, fuseOpen = \path -> \mode -> \flags ->
run forResult $ method "fuseOpen"
>> path `as` "path" >> mode `as` "mode" >> flags `as` "flags"
>> (fseOpen (pathToFsEntry path) mode flags)
, fuseRead = \path -> \handle -> \count -> \offset ->
run forResult $ method "fuseRead"
>> path `as` "path" >> handle `as` "handle" >> count `as` "count" >> offset `as` "offset"
>> (fseRead (pathToFsEntry path) handle count offset)
, fuseWrite = \path -> \handle -> \bytes -> \offset ->
run forResult $ method "fuseWrite"
>> path `as` "path" >> handle `as` "handle" >> bytes `as` "bytes" >> offset `as` "offset"
>> (fseWrite (pathToFsEntry path) handle bytes offset)
, fuseOpenDirectory = \path ->
run forCode $ method "fuseOpenDirectory"
>> path `as` "path"
>> (fseOpenDirectory $ pathToFsEntry path)
, fuseReadDirectory = \path ->
run forResult $ method "fuseReadDirectory"
>> path `as` "path"
>> (fseReadDirectory $ pathToFsEntry path)
, fuseGetFileSystemStats = adbFsGetFileSystemStats
, fuseReadSymbolicLink = \path ->
run forResult $ method "fuseReadSymbolicLink"
>> path `as` "path"
>> (fseReadSymlink $ pathToFsEntry path)
, fuseSetFileSize = \path -> \size ->
run forCode $ method "fuseSetFileSize"
>> path `as` "path" >> size `as` "size"
>> (fseSetFileSize (pathToFsEntry path) size)
, fuseSetFileMode = \path -> \fileMode ->
run forCode $ method "fuseSetFileMode"
>> path `as` "path" >> fileMode `as` "fileMode"
>> (fseSetFileMode (pathToFsEntry path) fileMode)
, fuseSetOwnerAndGroup = \path -> \userId -> \groupId ->
run forCode $ method "fuseSetOwnerAndGroup"
>> path `as` "path" >> userId `as` "usedId" >> groupId `as` "groupId"
>> (fseSetOwnerAndGroup (pathToFsEntry path) userId groupId)
, fuseCreateDevice = \path -> \entryType -> \fileMode -> \deviceId ->
run forCode $ method "fuseCreateDevice"
>> path `as` "path" >> entryType `as` "entryType" >> fileMode `as` "fileMode" >> deviceId `as` "deviceId"
>> (fseCreateDevice (pathToFsEntry path) entryType fileMode deviceId)
, fuseRemoveLink = \path ->
run forCode $ method "fuseRemoveLink"
>> path `as` path
>> (fseRemoveLink (pathToFsEntry path))
, fuseRemoveDirectory = \path ->
run forCode $ method "fuseRemoveDirectory"
>> path `as` path
>> (fseRemoveDirectory (pathToFsEntry path))
, fuseCreateDirectory = \path -> \mode ->
run forCode $ method "fuseCreateDirectory"
>> path `as` "path" >> mode `as` "mode"
>> (fseCreateDirectory (pathToFsEntry path))
}
where method name = logLn $ "called method: " ++ name
a `as` n = logLn $ " param: " ++ n ++ ": " ++ (show a)
run :: (Show a) => (Either Error a -> e) -> AdbFsCall a -> IO e
run convert action = do
(result, logged) <- runWriterT $ runExceptT action
let resultMsg = case result of
Right result -> Right $ "call ok: " ++ (show result)
Left fail -> Left $ "call failed: " ++ (show fail)
logFunc resultMsg logged
return $ convert result
forCode = either eErrno id
forResult = either (Left . eErrno) Right
dirStat :: (GetFuseContext m) => m FileStat
dirStat = do
uid <- fuseUID
gid <- fuseGID
return $ FileStat { statEntryType = Directory
, statFileMode = mconcat
[ ownerReadMode
, ownerExecuteMode
, groupReadMode
, groupExecuteMode
, otherReadMode
, otherExecuteMode
]
, statLinkCount = 2
, statFileOwner = uid
, statFileGroup = gid
, statSpecialDeviceID = 0
, statFileSize = 4096
, statBlocks = 1
, statAccessTime = 0
, statModificationTime = 0
, statStatusChangeTime = 0}
emptyDirContents :: GetFuseContext m => m FilesList
emptyDirContents = dirsFromNames [".", ".."]
instance (Monoid (AdbFsCall FilesList)) where
mempty = return $ []
mappend l r = (++) <$> r <*> l
rootEntry = defaultFsEntry { fseGetFileStat = dirStat
, fseOpenDirectory = return eOK
, fseReadDirectory = emptyDirContents `mappend` dirsFromDevices }
deviceFsEntry deviceId path
= defaultFsEntry { fseOpen = const $ const $ return (deviceId, path)
, fseRead = \handle -> \count -> \offset -> onDevice deviceId $ deviceRead path count offset
, fseWrite = \handle -> \bytes -> \offset -> onDevice deviceId $ deviceWrite path bytes offset
, fseGetFileStat = onDevice deviceId $ deviceStat path
, fseOpenDirectory = return eOK
, fseReadDirectory = emptyDirContents `mappend` (onDevice deviceId $ deviceLs path)
, fseSetFileSize = \size -> onDevice deviceId $ deviceSetFileSize path size
, fseReadSymlink = onDevice deviceId $ deviceReadLink path
, fseSetFileMode = \mode -> return eOK
, fseCreateDevice = \entry -> \mode -> \id -> onDevice deviceId $ deviceCreateDevice path entry id
, fseCreateDirectory = onDevice deviceId $ deviceCreateDirectory path
, fseRemoveLink = onDevice deviceId $ deviceDeleteFile path
, fseRemoveDirectory = onDevice deviceId $ deviceDeleteDir path }
deviceRootEntry = defaultFsEntry { fseGetFileStat = dirStat
, fseOpenDirectory = return eOK
, fseReadDirectory = emptyDirContents `mappend` dirsOfDevicePseudoFs }
dirsFromNames :: GetFuseContext m => [String] -> m FilesList
dirsFromNames names = do
ds <- dirStat
return $ map ((\n -> (n, ds))) names
dirsOfDevicePseudoFs :: AdbFsCall FilesList
dirsOfDevicePseudoFs = dirsFromNames ["fs"]
randomFileIn :: FilePath -> IO FilePath
randomFileIn path = do
postfix <- randomRIO ((10000000000000000000, 100000000000000000000) :: (Integer, Integer))
return $ path </> ("adb_fuse_transport." ++ show (postfix))
dirsFromDevices :: AdbFsCall FilesList
dirsFromDevices = do
devices <- (either (const []) id) <$> listDevices
dirsFromNames $ map serialNo devices
qualifyPath :: String -> Maybe PathQualification
qualifyPath path = either (const Nothing) Just $ P.parse P.adbFsPath path
pathToFsEntry :: String -> FsEntry
pathToFsEntry path =
case qualifyPath path of
Nothing -> defaultFsEntry
Just (FsRoot) -> rootEntry
Just (Device deviceName) -> deviceRootEntry
Just (DeviceFs deviceName) -> deviceFsEntry deviceName ""
Just (InDeviceFs deviceName innerPath) -> deviceFsEntry deviceName innerPath
fsBlockSize :: Int
fsBlockSize = 1024 * 50
blockify :: ByteCount -> FileOffset -> U.Block
blockify = U.blockify $ fsBlockSize
adbFsGetFileSystemStats :: String -> IO (Either Errno FileSystemStats)
adbFsGetFileSystemStats str =
return $ Right $ FileSystemStats
{ fsStatBlockSize = fromIntegral fsBlockSize
, fsStatBlockCount = 1000000
, fsStatBlocksFree = 1000000
, fsStatBlocksAvailable = 1000000
, fsStatFileCount = 500
, fsStatFilesFree = 1000
, fsStatMaxNameLength = 255
}
data LsError = PermissionDenied String
findDevice :: DeviceId -> AdbFsCall Device
findDevice deviceId = do
devicesResponse <- listDevices
case devicesResponse of
Left error -> msgError eNOENT error
Right devices ->
case find ((deviceId ==) . serialNo) devices of
Just device -> return device
Nothing -> msgError eNOENT $ "No deivce: " ++ deviceId
quoteAdbCallArgs :: [String] -> [String]
quoteAdbCallArgs = map (foldr quote "")
where quote char acc
| char `elem` quotable = '\\' : d
| otherwise = d
where d = char : acc
quotable = ['\'', '"', ' ']
formatAdbShellCall :: [String] -> Char -> Char -> [String]
formatAdbShellCall inArgs okMarker failMarker
= quoteAdbCallArgs $ "shell" : (["(", "("] ++ inArgs ++ [")", "&&", echo okMarker, ")", "||", echo failMarker])
where echo m = "echo -n " ++ [m]
deviceShellCall :: (Logging m, WithCurrentDevice m) => DeviceShellCall -> m (Either String String)
deviceShellCall (DeviceShellCall inArgs) = do
device <- currentDevice
let args = formatAdbShellCall inArgs ok fail
ok = 't'
fail = 'f'
logLn $ "adb (" ++ (serialNo device) ++ " ) shell call: " ++ (show args)
rawResponse <- callCurrentDevice args
logLn $ "response: " ++ rawResponse
let (response, marker) = splitAt ((length rawResponse) - 1) rawResponse
resultType = if marker == [ok]
then Right
else Left
return $ resultType response
parseWith :: Parser a -> String -> DeviceCall a
parseWith parser string = case P.parse parser string of
Left err -> do
logLn $ "can't be parsed: " ++ (show err)
simpleError eINVAL
Right result -> return result
mapLeft :: (l -> nl) -> Either l r -> Either nl r
mapLeft f (Right value) = Right value
mapLeft f (Left error) = Left $ f error
failOnLeft :: (CanFail m, Logging m) => Either String a -> m a
failOnLeft (Right value) = return value
failOnLeft (Left fail) = logLn fail >> simpleError eINVAL
deviceCall :: (CanFail m, Logging m, WithCurrentDevice m) => [String] -> Parser a -> m a
deviceCall args parser = do
response <- callCurrentDevice $ quoteAdbCallArgs args
device <- currentDevice
logLn $ "adb (" ++ (serialNo device) ++ " ) call: " ++ (show args)
logLn $ "response: " ++ response
failOnLeft $ mapLeft (("response can't be parsed:" ++ ) . show) $ P.parse parser response
deviceLs :: String -> DeviceCall FilesList
deviceLs path = (deviceCall ["shell", "ls", "-al", path ++ "/"] $ P.rfseFromAdbLs) >>= mapM statFromRemoteFsEntry
deviceReadLink :: FilePath -> DeviceCall FilePath
deviceReadLink path = deviceCall ["shell", "realpath", "/" ++ path] $ upToRoot <$> P.filePathFromRealpathResponse
where upToRoot innerPath@(firstChar:_) = if firstChar == '/'
then pathRelativeToRoot innerPath
else innerPath
pathRelativeToRoot p = "." ++ (concat (take ((length $ splitPath path) - 2) $ repeat "/..")) ++ p
deviceStat :: (WithCurrentDevice m, Logging m, CanFail m, GetFuseContext m) => FilePath -> m FileStat
deviceStat path = do
let args = ["shell", "ls", "-ald", "/" ++ path]
statResult <- deviceCall args P.singleFileStat
case statResult of
Just s -> snd <$> statFromRemoteFsEntry s
Nothing -> simpleError eNOENT
withTempFile :: (CanFail m, MonadIO m) => FilePath -> (FilePath -> m ()) -> (FilePath -> m a) -> m a
withTempFile prefix delete action = do
tempFile <- liftIO $ randomFileIn prefix
(action tempFile) `withCleanup` (delete tempFile)
-- let performAndClean = do
-- result <- action tempFile
-- delete tempFile
-- return result
-- cleanupAndRethrow e = do
-- delete tempFile
-- throwError e
-- performAndClean `catchError` cleanupAndRethrow
withLocalTempFile :: (CanFail m, MonadIO m) => (FilePath -> m a) -> m a
withLocalTempFile = withTempFile "/tmp" (liftIO . removeFile)
withRemoteTempFile :: (WithCurrentDevice m, Logging m, CanFail m, MonadIO m) => (FilePath -> m a) -> m a
withRemoteTempFile action = withTempFile "/sdcard" remoteDelete action
remoteDelete :: (WithCurrentDevice m, CanFail m, Logging m) => FilePath -> m ()
remoteDelete filePath = deviceCall ["shell", "rm", "-f", filePath] P.emptyResponse
ddCommand :: FilePath -> Maybe FilePath -> U.Block -> [String]
ddCommand iF oF (U.Block { U.blckFirstBlock = firstBlock
, U.blckBlocksCount = blocksCount
, U.blckBlockSize = blockSize } )
= ([ "shell"
, "dd"
, "if=" ++ iF
, "bs=" ++ (show blockSize)
, "skip=" ++ (show firstBlock)
, "count=" ++ (show blocksCount)]
++ ofParam)
where ofParam = fromMaybe [] $ fmap (\x -> ["of=" ++ x]) oF
pullToTempFile :: (WithCurrentDevice m, CanFail m, MonadIO m, Logging m) => FilePath -> (FilePath -> m a) -> m a
pullToTempFile remoteFilePath action =
withLocalTempFile $ \tempFilePath ->
do
-- FIXME: response not parsed
deviceCall ["pull", remoteFilePath, tempFilePath] P.acceptAnything
action tempFilePath
deviceRead :: (WithCurrentDevice m, CanFail m, MonadIO m, Logging m) => FilePath -> ByteCount -> FileOffset -> m B.ByteString
deviceRead path count offset = do
let [email protected] { U.blckFirstBlock = firstBlock
, U.blckBlocksCount = blocksCount
, U.blckOffsetInFirstBlock = skipInFirstBlock }
= blockify count offset
blockToResult = (B.take (fromIntegral count)) . (B.drop skipInFirstBlock) . B.pack
blockToResult <$> deviceCall (ddCommand path Nothing block) P.parseDDReadFile
-- withRemoteTempFile $ \onDeviceTempFile ->
-- do
-- -- FIXME: response not parsed
-- deviceCall (ddCommand path onDeviceTempFile block) P.acceptAnything
-- pullToTempFile onDeviceTempFile $ \localTempFile ->
-- do
-- d <- liftIO $ B.readFile localTempFile
-- return $ B.take (fromIntegral count) $ B.drop skipInFirstBlock d
deviceWrite :: FilePath -> B.ByteString -> FileOffset -> DeviceCall ByteCount
deviceWrite targetPath dataToWrite offset = do
-- at first we need to get original block containing the data to be
-- written, as we can only "dd" data to file
let [email protected] { U.blckFirstBlock = firstBlock
, U.blckBlocksCount = blocksCount
, U.blckOffsetInFirstBlock = inBlockOffset
, U.blckBlockSize = blockSize }
= blockify (fromIntegral dataSize) offset
dataSize = B.length dataToWrite
originalBlock <- deviceRead targetPath (fromIntegral (blocksCount * blockSize)) (fromIntegral (firstBlock * blockSize))
let transformedBlock = B.concat [ B.take inBlockOffset originalBlock
, dataToWrite
, B.drop (inBlockOffset + dataSize) originalBlock]
withLocalTempFile $ \localPath ->
withRemoteTempFile $ \remotePath -> do
liftIO $ B.writeFile localPath transformedBlock
deviceCall ["push", localPath, remotePath] P.acceptAnything
deviceCall (ddCommand remotePath (Just targetPath) block) $ P.acceptAnything
return $ fromIntegral dataSize
deviceSetFileSize :: FilePath -> FileOffset -> DeviceCall Errno
deviceSetFileSize path 0 = do
deviceCall ["shell", "dd", "of=" ++ path, "count=0"] P.acceptAnything
return eOK
deviceSetFileSize _ _ = return eINVAL
deviceCreateDevice :: FilePath -> EntryType -> DeviceID -> DeviceCall Errno
deviceCreateDevice path RegularFile _ = do
deviceCall ["shell", "touch", path] P.emptyResponse
return eOK
deviceCreateDevice _ _ _= simpleError eINVAL
deviceCreateDirectory :: FilePath -> DeviceCall Errno
deviceCreateDirectory path = do
deviceCall ["shell", "mkdir", path] P.emptyResponse
return eOK
deviceDeleteFile :: FilePath -> DeviceCall Errno
deviceDeleteFile path = do
deviceCall ["shell", "rm", "-f", path] P.emptyResponse
return eOK
deviceDeleteDir :: FilePath -> DeviceCall Errno
deviceDeleteDir path = do
deviceCall ["shell", "rm", "-fd", path] P.emptyResponse
return eOK
isFileMode :: FileMode -> FileMode -> Bool
isFileMode whatToCheck modeOfQuestion = (whatToCheck `intersectFileModes` fileTypeModes) == modeOfQuestion
statFromRemoteFsEntry :: (GetFuseContext m) => RemoteFsEntry -> m (FilePath, FileStat)
statFromRemoteFsEntry (RemoteFsEntry fileMode size name) = do
uid <- fuseUID
gid <- fuseGID
return (name,
FileStat { statEntryType = fromMaybe RegularFile $ snd <$> (find ((fileMode `isFileMode`) . fst)
[(blockSpecialMode, BlockSpecial)
, (characterSpecialMode, CharacterSpecial)
, (namedPipeMode, NamedPipe)
, (directoryMode, Directory)
, (symbolicLinkMode, SymbolicLink)
, (socketMode, Socket)])
, statFileMode = fileMode
, statLinkCount = 2
, statFileOwner = uid
, statFileGroup = gid
, statSpecialDeviceID = 0
, statFileSize = fromIntegral size
, statBlocks = 1
, statAccessTime = 0
, statModificationTime = 0
, statStatusChangeTime = 0})
| 7ocb/fuse_adb_fs | lib/Fs.hs | gpl-3.0 | 26,575 | 8 | 24 | 9,508 | 6,618 | 3,513 | 3,105 | 470 | 5 |
module OhBool.Utils where
import Data.Bits (Bits, shiftR, xor, popCount)
import Data.List (sortBy)
import Data.Function (on)
import Codec.Binary.Gray.Bits (binary)
safeHead :: [a] -> Maybe a
safeHead [] = Nothing
safeHead (x:_) = Just x
grayify :: (Ord a, Eq a) => [a] -> [a]
grayify xs = map snd $ sortBy (compare `on` fst) $ zipWith (\n e -> (binary n,e)) ([0..] :: [Int]) xs
hammingDistance :: (Eq a) => [a] -> [a] -> Int
hammingDistance = hammingDistance' 0
hammingDistance' :: (Eq a) => Int -> [a] -> [a] -> Int
hammingDistance' acc [] ys = acc + length ys
hammingDistance' acc xs [] = acc + length xs
hammingDistance' acc (x:xs) (y:ys) = if x == y
then hammingDistance' acc xs ys
else hammingDistance' (acc+1) xs ys
numHammingDistance :: Bits a => a -> a -> Int
numHammingDistance x y = popCount (x `xor` y)
pairs :: Int -> [a] -> [(a,a)]
pairs n xs = zip xs (drop n xs)
| RomainGehrig/OhBool | src/OhBool/Utils.hs | gpl-3.0 | 970 | 0 | 10 | 254 | 458 | 250 | 208 | 22 | 2 |
{-# LANGUAGE TypeFamilies #-}
module Data.FromTuple where
class FromTuple a where
type Tuple a :: *
fromTuple :: Tuple a -> Either String a
-- toTuple :: a -> Tuple a
| wavewave/qft | old/lib/Data/FromTuple.hs | gpl-3.0 | 178 | 0 | 8 | 42 | 41 | 23 | 18 | 5 | 0 |
module GeometryTest where
import Test.QuickCheck
import Control.Monad
import Geometry
-- | A data structure which both represents the "point list" as well
-- as the randomly selected parameters used to generate the list
data DottedLine = DottedLine Point Point Double [(Point, Point)]
deriving Show
instance Arbitrary DottedLine where
arbitrary = do
let start = (0,0)
end <- liftM2 (,) (choose (-10,10)) (choose (-10,10))
seg <- choose (0.1,0.5)
return $ DottedLine start end seg $ genDottedLine start end seg
-- | The start point in the list shall be exact as specified when
-- generating the list
prop_startPtIsCorrect :: DottedLine -> Bool
prop_startPtIsCorrect (DottedLine start end seg ((point,_):_)) =
point == start
-- | The sign of the endpoint shall be the same for all points except
-- for the first point
prop_ptsShallHaveEqualSign :: DottedLine -> Bool
prop_ptsShallHaveEqualSign (DottedLine start end seg (p:ps)) =
foldl (checkSign end) True ps
-- | Points shall go in half segment long steps
prop_ptsShallGoInEqualSteps :: DottedLine -> Bool
prop_ptsShallGoInEqualSteps (DottedLine start end seg ps) =
foldl (checkLength (seg/2)) True ps
-- | Helper function to check that a reference points' sign is equal
-- to other points. Intended to be used together with foldl
checkSign :: Point -> Bool -> (Point, Point) -> Bool
checkSign (x, y) b ((x', y'), (x'', y'')) =
b && hasSameSign x x' x'' && hasSameSign y y' y''
where
hasSameSign x y z
| x >= 0 && y >= 0 && z >= 0 = True
| x < 0 && y < 0 && z < 0 = True
| otherwise = False
-- | Helper function to check that the distance between two points are
-- equal to a reference distance
checkLength :: Double -> Bool -> (Point, Point) -> Bool
checkLength seg b (p1,p2) = b && fuzzyEqual seg (lineLength p1 p2)
-- Helper function to calculate the distance between two points using
-- Pythargoras theoreme
lineLength :: Point -> Point -> Double
lineLength (x,y) (x',y') =
let dx = x'-x
dy = y'-y
in sqrt $ dx^2+dy^2
-- Helper function to perform a fuzzy comparison between to floating
-- point numbers
fuzzyEqual :: (Ord a, Floating a) => a -> a -> Bool
fuzzyEqual a b = abs (a - b) < 0.000001
| SneakingCat/fay-ticker | test/GeometryTest.hs | gpl-3.0 | 2,300 | 0 | 15 | 529 | 663 | 355 | 308 | 37 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.SecurityCenter.Organizations.Sources.TestIAMPermissions
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the permissions that a caller has on the specified source.
--
-- /See:/ <https://console.cloud.google.com/apis/api/securitycenter.googleapis.com/overview Security Command Center API Reference> for @securitycenter.organizations.sources.testIamPermissions@.
module Network.Google.Resource.SecurityCenter.Organizations.Sources.TestIAMPermissions
(
-- * REST Resource
OrganizationsSourcesTestIAMPermissionsResource
-- * Creating a Request
, organizationsSourcesTestIAMPermissions
, OrganizationsSourcesTestIAMPermissions
-- * Request Lenses
, ostipXgafv
, ostipUploadProtocol
, ostipAccessToken
, ostipUploadType
, ostipPayload
, ostipResource
, ostipCallback
) where
import Network.Google.Prelude
import Network.Google.SecurityCenter.Types
-- | A resource alias for @securitycenter.organizations.sources.testIamPermissions@ method which the
-- 'OrganizationsSourcesTestIAMPermissions' request conforms to.
type OrganizationsSourcesTestIAMPermissionsResource =
"v1p1beta1" :>
CaptureMode "resource" "testIamPermissions" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] TestIAMPermissionsRequest :>
Post '[JSON] TestIAMPermissionsResponse
-- | Returns the permissions that a caller has on the specified source.
--
-- /See:/ 'organizationsSourcesTestIAMPermissions' smart constructor.
data OrganizationsSourcesTestIAMPermissions =
OrganizationsSourcesTestIAMPermissions'
{ _ostipXgafv :: !(Maybe Xgafv)
, _ostipUploadProtocol :: !(Maybe Text)
, _ostipAccessToken :: !(Maybe Text)
, _ostipUploadType :: !(Maybe Text)
, _ostipPayload :: !TestIAMPermissionsRequest
, _ostipResource :: !Text
, _ostipCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'OrganizationsSourcesTestIAMPermissions' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ostipXgafv'
--
-- * 'ostipUploadProtocol'
--
-- * 'ostipAccessToken'
--
-- * 'ostipUploadType'
--
-- * 'ostipPayload'
--
-- * 'ostipResource'
--
-- * 'ostipCallback'
organizationsSourcesTestIAMPermissions
:: TestIAMPermissionsRequest -- ^ 'ostipPayload'
-> Text -- ^ 'ostipResource'
-> OrganizationsSourcesTestIAMPermissions
organizationsSourcesTestIAMPermissions pOstipPayload_ pOstipResource_ =
OrganizationsSourcesTestIAMPermissions'
{ _ostipXgafv = Nothing
, _ostipUploadProtocol = Nothing
, _ostipAccessToken = Nothing
, _ostipUploadType = Nothing
, _ostipPayload = pOstipPayload_
, _ostipResource = pOstipResource_
, _ostipCallback = Nothing
}
-- | V1 error format.
ostipXgafv :: Lens' OrganizationsSourcesTestIAMPermissions (Maybe Xgafv)
ostipXgafv
= lens _ostipXgafv (\ s a -> s{_ostipXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ostipUploadProtocol :: Lens' OrganizationsSourcesTestIAMPermissions (Maybe Text)
ostipUploadProtocol
= lens _ostipUploadProtocol
(\ s a -> s{_ostipUploadProtocol = a})
-- | OAuth access token.
ostipAccessToken :: Lens' OrganizationsSourcesTestIAMPermissions (Maybe Text)
ostipAccessToken
= lens _ostipAccessToken
(\ s a -> s{_ostipAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ostipUploadType :: Lens' OrganizationsSourcesTestIAMPermissions (Maybe Text)
ostipUploadType
= lens _ostipUploadType
(\ s a -> s{_ostipUploadType = a})
-- | Multipart request metadata.
ostipPayload :: Lens' OrganizationsSourcesTestIAMPermissions TestIAMPermissionsRequest
ostipPayload
= lens _ostipPayload (\ s a -> s{_ostipPayload = a})
-- | REQUIRED: The resource for which the policy detail is being requested.
-- See the operation documentation for the appropriate value for this
-- field.
ostipResource :: Lens' OrganizationsSourcesTestIAMPermissions Text
ostipResource
= lens _ostipResource
(\ s a -> s{_ostipResource = a})
-- | JSONP
ostipCallback :: Lens' OrganizationsSourcesTestIAMPermissions (Maybe Text)
ostipCallback
= lens _ostipCallback
(\ s a -> s{_ostipCallback = a})
instance GoogleRequest
OrganizationsSourcesTestIAMPermissions
where
type Rs OrganizationsSourcesTestIAMPermissions =
TestIAMPermissionsResponse
type Scopes OrganizationsSourcesTestIAMPermissions =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient
OrganizationsSourcesTestIAMPermissions'{..}
= go _ostipResource _ostipXgafv _ostipUploadProtocol
_ostipAccessToken
_ostipUploadType
_ostipCallback
(Just AltJSON)
_ostipPayload
securityCenterService
where go
= buildClient
(Proxy ::
Proxy OrganizationsSourcesTestIAMPermissionsResource)
mempty
| brendanhay/gogol | gogol-securitycenter/gen/Network/Google/Resource/SecurityCenter/Organizations/Sources/TestIAMPermissions.hs | mpl-2.0 | 6,082 | 0 | 16 | 1,261 | 779 | 455 | 324 | 120 | 1 |
-- Copyright 2010 Leonid Movshovich <[email protected]>
-- This file is part of SPIM.
-- SPIM is free software: you can redistribute it and/or modify
-- it under the terms of the GNU Affero General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
-- SPIM is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU Affero General Public License for more details.
-- You should have received a copy of the GNU Affero General Public License
-- along with SPIM. If not, see <http://www.gnu.org/licenses/>.
module Main where
import IO
import qualified System.Environment as SysEnv
import qualified System.Directory as SysDir
import qualified System.Exit as Exit
import qualified SpimCommon as Spim
import qualified MIMEDir as MD
import qualified Data.List as List
main :: IO()
main = do repoDir:linkType:fromUID:toUIDs <- SysEnv.getArgs
isRepo <- Spim.isSpimRepo repoDir
if not isRepo
then do putStr ("Error: '" ++ repoDir ++ "' is not a spim repository")
Exit.exitWith (Exit.ExitFailure Spim.badRepoEC)
else do oldDir <- SysDir.getCurrentDirectory
SysDir.setCurrentDirectory repoDir
link <- Spim.loadLink linkType
let newLink = MD.add fromUID (List.intercalate "," toUIDs) link
Spim.saveLink newLink
SysDir.setCurrentDirectory oldDir
| event/spim | AddPILink.hs | agpl-3.0 | 1,647 | 0 | 16 | 409 | 233 | 128 | 105 | 20 | 2 |
module Uint(Int, Word, dflt_size) where
import qualified Prelude
import Data.Int(Int)
import Data.Word(Word)
import qualified Data.Bits
dflt_size :: Prelude.Integer
dflt_size = Prelude.toInteger (bitSize_aux (0::Word)) where
bitSize_aux :: (Data.Bits.Bits a, Prelude.Bounded a) => a -> Int
bitSize_aux = Data.Bits.bitSize
| jwaldmann/ceta-postproc | CeTA-2.39/generated/Haskell/Uint.hs | lgpl-3.0 | 344 | 0 | 9 | 59 | 113 | 67 | 46 | 9 | 1 |
{- Copyright 2014 David Farrell <[email protected]>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module MaxChannels where
import IRC.Numeric
import IRC.Action
import qualified IRC.Server.Client as Client
import qualified IRC.Server.Environment as Env
import Config
import Plugin
plugin = defaultPlugin {handlers=[TransformHandler trans]}
trans :: TransformHSpec
trans env = env {Env.actions=map f (Env.actions env)}
where
maxChans = getConfigInt (Env.config env) "client" "max_channels"
channels = Client.channels (Env.client env)
f a@(ChanAction "Join" chan _) = if length channels < maxChans
then a
else GenericAction $ \e -> sendNumeric e numERR_TOOMANYCHANNELS [chan, "You have joined too many channels"]
>> return e
f a = a
| shockkolate/lambdircd | plugins.old/MaxChannels.hs | apache-2.0 | 1,288 | 0 | 12 | 242 | 207 | 116 | 91 | 17 | 3 |
module NinetyNine where
isPalindrome :: Eq a => [a] -> Bool
isPalindrome xs = foldr cmp True zipped
where zipped = zip xs (reverse xs)
cmp (a, b) acc = acc && (a == b)
| naphthalene/haskell-99onthewall | ten/6.hs | bsd-2-clause | 179 | 0 | 9 | 46 | 83 | 44 | 39 | 5 | 1 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module Handler.File where
import Import
import Handler.Utils
import Codec.Archive.Streaming
import Control.Monad
import Data.Digest.CRC32 (crc32)
import Data.Int (Int64)
import Data.Text.Encoding.Error
import Data.Text.Format
import Data.Time
import Data.Time.Clock.POSIX
import System.Directory
import System.FilePath
import System.Random
import qualified Blaze.ByteString.Builder.ByteString as Builder
import qualified Data.ByteString.Lazy as LBS
import qualified Data.List
import qualified Data.Text
import qualified Data.Text.Encoding
import qualified Data.Text.Lazy
import qualified Network.Wai.Parse as WAI
import qualified System.Locale
import qualified System.PosixCompat -- file sizes
-- upload
uploadDirectory :: FilePath -- FIXME: make this configurable
uploadDirectory = "/var/tmp/personallibrary/incoming"
randomFileName :: IO FilePath
randomFileName = do
createDirectoryIfMissing True uploadDirectory -- hack?
fname'base <- replicateM 20 (randomRIO ('a','z'))
let fname = uploadDirectory </> fname'base <.> "bin"
return fname
fileUploadForm :: Form (FileInfo, Textarea)
fileUploadForm = renderDivs $ (,)
<$> fileAFormReq "Choose a file"
<*> areq textareaField "What's on the file?" Nothing
getFileNewR :: AssetId -> Handler RepHtml
getFileNewR aid = do
_asset <- runDB $ get404 aid
(formWidget, formEnctype) <- generateFormPost fileUploadForm
defaultLayout $ do
setTitle "Upload new file."
$(widgetFile "file/new")
getFileSize :: FilePath -> IO Int64
getFileSize fp = do
stat <- System.PosixCompat.getFileStatus fp
let System.PosixCompat.COff size = System.PosixCompat.fileSize stat
return size
postFileNewR' :: AssetId -> Handler RepHtml
postFileNewR' aid = do
_asset <- runDB $ get404 aid
user <- requireAuth
((result, formWidget), formEnctype) <- runFormPost fileUploadForm
case result of
FormSuccess (fi,info) -> do
fn <- liftIO randomFileName
liftIO (LBS.writeFile fn (fileContent fi))
let crc = crc32 (fileContent fi)
fsize <- liftIO $ getFileSize fn
let newFile = File aid (entityKey user) fn info (fileName fi) (fileContentType fi) fsize crc
fid <- runDB $ insert newFile
redirect (FileViewR fid)
_ -> return ()
defaultLayout $ do
setTitle "Upload new file."
$(widgetFile "file/new")
postFileNewR :: AssetId -> Handler RepHtml
postFileNewR aid = do
_asset <- runDB $ get404 aid
user <- requireAuth
wreq <- waiRequest
yreq <- getRequest
(params, files) <- lift $ WAI.parseRequestBody WAI.tempFileBackEnd wreq
let lookupWaiPar p = Data.List.lookup (Data.Text.Encoding.encodeUtf8 p) params
lookupWaiFile f = Data.List.lookup (Data.Text.Encoding.encodeUtf8 f) files
token = lookupWaiPar "_token"
comment = lookupWaiPar "f3" -- FIXME -- hard coded string
file = lookupWaiFile "f2" -- FIXME -- hard coded string
tokenOK = (fmap Data.Text.Encoding.encodeUtf8 . reqToken $ yreq) == token
-- when (not tokenOK) (redirect $ FileNewR aid) -- FIXME -- is this correct way to handle it?
case (comment,file, tokenOK) of
(Just comm, Just waiFileInfo, True) -> do
fn <- liftIO randomFileName -- generate new name
liftIO $ renameFile (WAI.fileContent waiFileInfo) fn -- rename received file
fsize <- liftIO $ getFileSize fn -- get file size
crc <- liftIO (fmap crc32 (LBS.readFile fn))
let bs2t = Data.Text.Encoding.decodeUtf8With lenientDecode
comment'ta = Textarea (bs2t comm)
uid = entityKey user
declared'fname = bs2t (WAI.fileName waiFileInfo)
declared'ctype = bs2t (WAI.fileContentType waiFileInfo)
new = File aid uid fn comment'ta declared'fname declared'ctype fsize crc
fid <- runDB $ insert new
redirect (FileViewR fid)
_ -> do
(formWidget, formEnctype) <- generateFormPost fileUploadForm
defaultLayout $ do
setTitle "Upload new file."
$(widgetFile "file/new")
-- view attributes
getFileViewR :: FileId -> Handler RepHtml
getFileViewR fid = do
linkUser <- mkDisplayUserWidget
file <- runDB $ get404 fid
defaultLayout $ do
setTitle "File description."
$(widgetFile "file/view")
-- download
getFileGetR :: FileId -> Handler RepHtml
getFileGetR fid = do
file <- runDB $ get404 fid
redirect (FileGetNameR fid (fileOriginalName file))
getFileGetNameR :: FileId -> Text -> Handler ()
getFileGetNameR fid _name = do
file <- runDB $ get404 fid
setHeader "Content-Disposition" (Data.Text.concat ["attachment; filename=\"",(fileOriginalName file), "\";"])
setHeader "X-Content-Type-Options" "nosniff"
let encodeContentType ct = Data.Text.Encoding.encodeUtf8 ct
fct = encodeContentType $ fileOriginalContentType file
sendFile (fct) (filePath file)
-- batch download
getFilesForAssetGetR :: AssetId -> Handler RepPlain
getFilesForAssetGetR key = do
asset <- runDB $ get404 key
now <- liftIO getCurrentTime
now'posix <- liftIO getPOSIXTime
files <- runDB $ selectList [FileAsset ==. key] [Asc FileId]
let lt2lbs t = t2lbs . Data.Text.Lazy.toStrict $ t
t2lbs t = LBS.fromChunks . singletonList . Data.Text.Encoding.encodeUtf8 $ t
singletonList x = [x]
readFileFromDisk (Entity _ file) = LBS.readFile (filePath file)
fileEntry (Entity fid file) =
let name = Data.Text.Format.format "{}/{}" ((Shown fid), (fileOriginalName file))
-- fixme: error/warning for too big files
size = fromIntegral . fileSize $ file
e = Entry { eRelativePath = read . show $ name
, eCompressionMethod = NoCompression
, eLastModified = round now'posix
, eCRC32 = fileCrc file
, eCompressedSize = size
, eUncompressedSize = size
, eExtraField = LBS.empty
, eFileComment = t2lbs $ unTextarea $ fileComment file
, eInternalFileAttributes = 0
, eExternalFileAttributes = 0
}
in e
files'lazy <- liftIO $ mapM readFileFromDisk files
let entries = map fileEntry files
let comment = Data.Text.Format.format "File bundle for asset {} - {}" (Shown key, assetName asset)
archive =
Archive
{ zComment = lt2lbs comment
, zSignature = Nothing
, zEntries = entries
}
-- let now'fmt = Data.Time.formatTime System.Locale.defaultTimeLocale "%s" now
-- fname = Data.Text.Format.format "asset_{}_file_bundle_{}.zip" (Shown key, now'fmt)
-- cont'disp = Data.Text.Encoding.encodeUtf8 $ Data.Text.Format.format "attachment; filename=\"{}\";" [fname]
setHeader "Content-Disposition" "attachment; filename=archive-all.zip;" -- (Data.Text.Lazy.toStrict cont'disp)
setHeader "X-Content-Type-Options" "nosniff"
-- let h1 = ("Content-Disposition","attachment; filename=archive-all.zip;") -- fixme: provide filename different for each asset
-- h2 = ("X-Content-Type-Options","nosniff")
-- -- h3 = ("Content-Length","") -- fixme: calculate exact actual content length.
--
-- sendWaiResponse (responseLBS ok200 [h1,h2] (fromArchive files'lazy archive))
let content = ContentBuilder (Builder.fromLazyByteString (fromArchive files'lazy archive)) Nothing -- fixme: calculate response length
return (RepPlain content)
-- delete
fileDeleteForm = renderTable (const <$> areq areYouSureField "Are you sure?" (Just False))
where areYouSureField = check isSure boolField
isSure False = Left ("You must be sure to delete a file" :: Text)
isSure True = Right True
getFileDeleteR :: FileId -> Handler RepHtml
getFileDeleteR fid = do
file :: File <- runDB $ get404 fid
(fwidget, enctype) <- generateFormPost fileDeleteForm
defaultLayout $ do
setTitle "Deleting a file."
$(widgetFile "file/delete")
-- | delete file from db and disk.
deleteFile :: Entity File -> Handler ()
deleteFile (Entity fid file) = do
-- delete from db
runDB $ delete fid
-- delete from disk
liftIO (removeFile (filePath file))
postFileDeleteR :: FileId -> Handler RepHtml
postFileDeleteR fid = do
file :: File <- runDB $ get404 fid
((result,fwidget), enctype) <- runFormPost fileDeleteForm
case result of
FormSuccess _ -> do
deleteFile (Entity fid file)
defaultLayout [whamlet|
<p> <strong>File deleted.</strong> |]
_ -> defaultLayout $ do
setTitle "Deleting a file."
$(widgetFile "file/delete")
| Tener/personal-library-yesod | Handler/File.hs | bsd-2-clause | 8,791 | 0 | 18 | 2,068 | 2,206 | 1,109 | 1,097 | -1 | -1 |
{-# OPTIONS_GHC -Wwarn #-}
{-# LANGUAGE CPP, ScopedTypeVariables, Rank2Types #-}
{-# LANGUAGE LambdaCase #-}
-----------------------------------------------------------------------------
-- |
-- Module : Haddock
-- Copyright : (c) Simon Marlow 2003-2006,
-- David Waern 2006-2010,
-- Mateusz Kowalczyk 2014
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Haddock - A Haskell Documentation Tool
--
-- Program entry point and top-level code.
-----------------------------------------------------------------------------
module Haddock (
haddock,
haddockWithGhc,
getGhcDirs,
readPackagesAndProcessModules,
withGhc
) where
import Data.Version
import Haddock.Backends.Xhtml
import Haddock.Backends.Xhtml.Themes (getThemes)
import Haddock.Backends.LaTeX
import Haddock.Backends.Hoogle
import Haddock.Backends.Hyperlinker
import Haddock.Interface
import Haddock.Parser
import Haddock.Types
import Haddock.Version
import Haddock.InterfaceFile
import Haddock.Options
import Haddock.Utils
import Control.Monad hiding (forM_)
import Control.Applicative
import Data.Foldable (forM_)
import Data.List (isPrefixOf)
import Control.Exception
import Data.Maybe
import Data.IORef
import Data.Map (Map)
import qualified Data.Map as Map
import System.IO
import System.Exit
#if defined(mingw32_HOST_OS)
import Foreign
import Foreign.C
import Data.Int
#endif
#ifdef IN_GHC_TREE
import System.FilePath
#else
import qualified GHC.Paths as GhcPaths
import Paths_haddock_api (getDataDir)
import System.Directory (doesDirectoryExist)
#endif
import GHC hiding (verbosity)
import Config
import DynFlags hiding (projectVersion, verbosity)
import StaticFlags (discardStaticFlags)
import Packages
import Panic (handleGhcException)
import Module
import FastString
--------------------------------------------------------------------------------
-- * Exception handling
--------------------------------------------------------------------------------
handleTopExceptions :: IO a -> IO a
handleTopExceptions =
handleNormalExceptions . handleHaddockExceptions . handleGhcExceptions
-- | Either returns normally or throws an ExitCode exception;
-- all other exceptions are turned into exit exceptions.
handleNormalExceptions :: IO a -> IO a
handleNormalExceptions inner =
(inner `onException` hFlush stdout)
`catches`
[ Handler (\(code :: ExitCode) -> exitWith code)
, Handler (\(ex :: AsyncException) ->
case ex of
StackOverflow -> do
putStrLn "stack overflow: use -g +RTS -K<size> to increase it"
exitFailure
_ -> do
putStrLn ("haddock: " ++ show ex)
exitFailure)
, Handler (\(ex :: SomeException) -> do
putStrLn ("haddock: internal error: " ++ show ex)
exitFailure)
]
handleHaddockExceptions :: IO a -> IO a
handleHaddockExceptions inner =
catches inner [Handler handler]
where
handler (e::HaddockException) = do
putStrLn $ "haddock: " ++ show e
exitFailure
handleGhcExceptions :: IO a -> IO a
handleGhcExceptions =
-- error messages propagated as exceptions
handleGhcException $ \e -> do
hFlush stdout
print (e :: GhcException)
exitFailure
-------------------------------------------------------------------------------
-- * Top level
-------------------------------------------------------------------------------
-- | Run Haddock with given list of arguments.
--
-- Haddock's own main function is defined in terms of this:
--
-- > main = getArgs >>= haddock
haddock :: [String] -> IO ()
haddock args = haddockWithGhc withGhc args
haddockWithGhc :: (forall a. [Flag] -> Ghc a -> IO a) -> [String] -> IO ()
haddockWithGhc ghc args = handleTopExceptions $ do
-- Parse command-line flags and handle some of them initially.
-- TODO: unify all of this (and some of what's in the 'render' function),
-- into one function that returns a record with a field for each option,
-- or which exits with an error or help message.
(flags, files) <- parseHaddockOpts args
shortcutFlags flags
qual <- case qualification flags of {Left msg -> throwE msg; Right q -> return q}
-- inject dynamic-too into flags before we proceed
flags' <- ghc flags $ do
df <- getDynFlags
case lookup "GHC Dynamic" (compilerInfo df) of
Just "YES" -> return $ Flag_OptGhc "-dynamic-too" : flags
_ -> return flags
unless (Flag_NoWarnings `elem` flags) $ do
hypSrcWarnings flags
forM_ (warnings args) $ \warning -> do
hPutStrLn stderr warning
ghc flags' $ do
dflags <- getDynFlags
if not (null files) then do
(packages, ifaces, homeLinks) <- readPackagesAndProcessModules flags files
-- Dump an "interface file" (.haddock file), if requested.
forM_ (optDumpInterfaceFile flags) $ \path -> liftIO $ do
writeInterfaceFile path InterfaceFile {
ifInstalledIfaces = map toInstalledIface ifaces
, ifLinkEnv = homeLinks
}
-- Render the interfaces.
liftIO $ renderStep dflags flags qual packages ifaces
else do
when (any (`elem` [Flag_Html, Flag_Hoogle, Flag_LaTeX]) flags) $
throwE "No input file(s)."
-- Get packages supplied with --read-interface.
packages <- liftIO $ readInterfaceFiles freshNameCache (readIfaceArgs flags)
-- Render even though there are no input files (usually contents/index).
liftIO $ renderStep dflags flags qual packages []
-- | Create warnings about potential misuse of -optghc
warnings :: [String] -> [String]
warnings = map format . filter (isPrefixOf "-optghc")
where
format arg = concat ["Warning: `", arg, "' means `-o ", drop 2 arg, "', did you mean `-", arg, "'?"]
withGhc :: [Flag] -> Ghc a -> IO a
withGhc flags action = do
libDir <- fmap snd (getGhcDirs flags)
-- Catches all GHC source errors, then prints and re-throws them.
let handleSrcErrors action' = flip handleSourceError action' $ \err -> do
printException err
liftIO exitFailure
withGhc' libDir (ghcFlags flags) (\_ -> handleSrcErrors action)
readPackagesAndProcessModules :: [Flag] -> [String]
-> Ghc ([(DocPaths, InterfaceFile)], [Interface], LinkEnv)
readPackagesAndProcessModules flags files = do
-- Get packages supplied with --read-interface.
packages <- readInterfaceFiles nameCacheFromGhc (readIfaceArgs flags)
-- Create the interfaces -- this is the core part of Haddock.
let ifaceFiles = map snd packages
(ifaces, homeLinks) <- processModules (verbosity flags) files flags ifaceFiles
return (packages, ifaces, homeLinks)
renderStep :: DynFlags -> [Flag] -> QualOption -> [(DocPaths, InterfaceFile)] -> [Interface] -> IO ()
renderStep dflags flags qual pkgs interfaces = do
updateHTMLXRefs pkgs
let
ifaceFiles = map snd pkgs
installedIfaces = concatMap ifInstalledIfaces ifaceFiles
extSrcMap = Map.fromList $ do
((_, Just path), ifile) <- pkgs
iface <- ifInstalledIfaces ifile
return (instMod iface, path)
render dflags flags qual interfaces installedIfaces extSrcMap
-- | Render the interfaces with whatever backend is specified in the flags.
render :: DynFlags -> [Flag] -> QualOption -> [Interface] -> [InstalledInterface] -> Map Module FilePath -> IO ()
render dflags flags qual ifaces installedIfaces extSrcMap = do
let
title = fromMaybe "" (optTitle flags)
unicode = Flag_UseUnicode `elem` flags
pretty = Flag_PrettyHtml `elem` flags
opt_wiki_urls = wikiUrls flags
opt_contents_url = optContentsUrl flags
opt_index_url = optIndexUrl flags
odir = outputDir flags
opt_latex_style = optLaTeXStyle flags
opt_source_css = optSourceCssFile flags
opt_mathjax = optMathjax flags
visibleIfaces = [ i | i <- ifaces, OptHide `notElem` ifaceOptions i ]
-- /All/ visible interfaces including external package modules.
allIfaces = map toInstalledIface ifaces ++ installedIfaces
allVisibleIfaces = [ i | i <- allIfaces, OptHide `notElem` instOptions i ]
pkgMod = ifaceMod (head ifaces)
pkgKey = moduleUnitId pkgMod
pkgStr = Just (unitIdString pkgKey)
pkgNameVer = modulePackageInfo dflags flags pkgMod
(srcBase, srcModule, srcEntity, srcLEntity) = sourceUrls flags
srcModule'
| Flag_HyperlinkedSource `elem` flags = Just hypSrcModuleUrlFormat
| otherwise = srcModule
srcMap = mkSrcMap $ Map.union
(Map.map SrcExternal extSrcMap)
(Map.fromList [ (ifaceMod iface, SrcLocal) | iface <- ifaces ])
pkgSrcMap = Map.mapKeys moduleUnitId extSrcMap
pkgSrcMap'
| Flag_HyperlinkedSource `elem` flags =
Map.insert pkgKey hypSrcModuleNameUrlFormat pkgSrcMap
| Just srcNameUrl <- srcEntity = Map.insert pkgKey srcNameUrl pkgSrcMap
| otherwise = pkgSrcMap
-- TODO: Get these from the interface files as with srcMap
pkgSrcLMap'
| Flag_HyperlinkedSource `elem` flags =
Map.singleton pkgKey hypSrcModuleLineUrlFormat
| Just path <- srcLEntity = Map.singleton pkgKey path
| otherwise = Map.empty
sourceUrls' = (srcBase, srcModule', pkgSrcMap', pkgSrcLMap')
libDir <- getHaddockLibDir flags
prologue <- getPrologue dflags flags
themes <- getThemes libDir flags >>= either bye return
when (Flag_GenIndex `elem` flags) $ do
ppHtmlIndex odir title pkgStr
themes opt_contents_url sourceUrls' opt_wiki_urls
allVisibleIfaces pretty
copyHtmlBits odir libDir themes
when (Flag_GenContents `elem` flags) $ do
ppHtmlContents dflags odir title pkgStr
themes opt_mathjax opt_index_url sourceUrls' opt_wiki_urls
allVisibleIfaces True prologue pretty
(makeContentsQual qual)
copyHtmlBits odir libDir themes
when (Flag_Html `elem` flags) $ do
ppHtml dflags title pkgStr visibleIfaces odir
prologue
themes opt_mathjax sourceUrls' opt_wiki_urls
opt_contents_url opt_index_url unicode qual
pretty
copyHtmlBits odir libDir themes
-- TODO: we throw away Meta for both Hoogle and LaTeX right now,
-- might want to fix that if/when these two get some work on them
when (Flag_Hoogle `elem` flags) $ do
case pkgNameVer of
Nothing -> putStrLn . unlines $
[ "haddock: Unable to find a package providing module "
++ moduleNameString (moduleName pkgMod) ++ ", skipping Hoogle."
, ""
, " Perhaps try specifying the desired package explicitly"
++ " using the --package-name"
, " and --package-version arguments."
]
Just (PackageName pkgNameFS, pkgVer) ->
let pkgNameStr | unpackFS pkgNameFS == "main" && title /= [] = title
| otherwise = unpackFS pkgNameFS
in ppHoogle dflags pkgNameStr pkgVer title (fmap _doc prologue)
visibleIfaces odir
when (Flag_LaTeX `elem` flags) $ do
ppLaTeX title pkgStr visibleIfaces odir (fmap _doc prologue) opt_latex_style
libDir
when (Flag_HyperlinkedSource `elem` flags) $ do
ppHyperlinkedSource odir libDir opt_source_css pretty srcMap ifaces
-- | From GHC 7.10, this function has a potential to crash with a
-- nasty message such as @expectJust getPackageDetails@ because
-- package name and versions can no longer reliably be extracted in
-- all cases: if the package is not installed yet then this info is no
-- longer available. The @--package-name@ and @--package-version@
-- Haddock flags allow the user to specify this information and it is
-- returned here if present: if it is not present, the error will
-- occur. Nasty but that's how it is for now. Potential TODO.
modulePackageInfo :: DynFlags
-> [Flag] -- ^ Haddock flags are checked as they may
-- contain the package name or version
-- provided by the user which we
-- prioritise
-> Module -> Maybe (PackageName, Data.Version.Version)
modulePackageInfo dflags flags modu =
cmdline <|> pkgDb
where
cmdline = (,) <$> optPackageName flags <*> optPackageVersion flags
pkgDb = (\pkg -> (packageName pkg, packageVersion pkg)) <$> lookupPackage dflags (moduleUnitId modu)
-------------------------------------------------------------------------------
-- * Reading and dumping interface files
-------------------------------------------------------------------------------
readInterfaceFiles :: MonadIO m
=> NameCacheAccessor m
-> [(DocPaths, FilePath)]
-> m [(DocPaths, InterfaceFile)]
readInterfaceFiles name_cache_accessor pairs = do
catMaybes `liftM` mapM tryReadIface pairs
where
-- try to read an interface, warn if we can't
tryReadIface (paths, file) =
readInterfaceFile name_cache_accessor file >>= \case
Left err -> liftIO $ do
putStrLn ("Warning: Cannot read " ++ file ++ ":")
putStrLn (" " ++ err)
putStrLn "Skipping this interface."
return Nothing
Right f -> return $ Just (paths, f)
-------------------------------------------------------------------------------
-- * Creating a GHC session
-------------------------------------------------------------------------------
-- | Start a GHC session with the -haddock flag set. Also turn off
-- compilation and linking. Then run the given 'Ghc' action.
withGhc' :: String -> [String] -> (DynFlags -> Ghc a) -> IO a
withGhc' libDir flags ghcActs = runGhc (Just libDir) $ do
dynflags <- getSessionDynFlags
dynflags' <- parseGhcFlags (gopt_set dynflags Opt_Haddock) {
hscTarget = HscNothing,
ghcMode = CompManager,
ghcLink = NoLink
}
let dynflags'' = gopt_unset dynflags' Opt_SplitObjs
defaultCleanupHandler dynflags'' $ do
-- ignore the following return-value, which is a list of packages
-- that may need to be re-linked: Haddock doesn't do any
-- dynamic or static linking at all!
_ <- setSessionDynFlags dynflags''
ghcActs dynflags''
where
parseGhcFlags :: MonadIO m => DynFlags -> m DynFlags
parseGhcFlags dynflags = do
-- TODO: handle warnings?
-- NOTA BENE: We _MUST_ discard any static flags here, because we cannot
-- rely on Haddock to parse them, as it only parses the DynFlags. Yet if
-- we pass any, Haddock will fail. Since StaticFlags are global to the
-- GHC invocation, there's also no way to reparse/save them to set them
-- again properly.
--
-- This is a bit of a hack until we get rid of the rest of the remaining
-- StaticFlags. See GHC issue #8276.
let flags' = discardStaticFlags flags
(dynflags', rest, _) <- parseDynamicFlags dynflags (map noLoc flags')
if not (null rest)
then throwE ("Couldn't parse GHC options: " ++ unwords flags')
else return dynflags'
-------------------------------------------------------------------------------
-- * Misc
-------------------------------------------------------------------------------
getHaddockLibDir :: [Flag] -> IO String
getHaddockLibDir flags =
case [str | Flag_Lib str <- flags] of
[] -> do
#ifdef IN_GHC_TREE
getInTreeDir
#else
d <- getDataDir -- provided by Cabal
doesDirectoryExist d >>= \exists -> case exists of
True -> return d
False -> do
-- If directory does not exist then we are probably invoking from
-- ./dist/build/haddock/haddock so we use ./resources as a fallback.
doesDirectoryExist "resources" >>= \exists_ -> case exists_ of
True -> return "resources"
False -> die ("Haddock's resource directory (" ++ d ++ ") does not exist!\n")
#endif
fs -> return (last fs)
getGhcDirs :: [Flag] -> IO (String, String)
getGhcDirs flags = do
case [ dir | Flag_GhcLibDir dir <- flags ] of
[] -> do
#ifdef IN_GHC_TREE
libDir <- getInTreeDir
return (ghcPath, libDir)
#else
return (ghcPath, GhcPaths.libdir)
#endif
xs -> return (ghcPath, last xs)
where
#ifdef IN_GHC_TREE
ghcPath = "not available"
#else
ghcPath = GhcPaths.ghc
#endif
shortcutFlags :: [Flag] -> IO ()
shortcutFlags flags = do
usage <- getUsage
when (Flag_Help `elem` flags) (bye usage)
when (Flag_Version `elem` flags) byeVersion
when (Flag_InterfaceVersion `elem` flags) (bye (show binaryInterfaceVersion ++ "\n"))
when (Flag_CompatibleInterfaceVersions `elem` flags)
(bye (unwords (map show binaryInterfaceVersionCompatibility) ++ "\n"))
when (Flag_GhcVersion `elem` flags) (bye (cProjectVersion ++ "\n"))
when (Flag_PrintGhcPath `elem` flags) $ do
dir <- fmap fst (getGhcDirs flags)
bye $ dir ++ "\n"
when (Flag_PrintGhcLibDir `elem` flags) $ do
dir <- fmap snd (getGhcDirs flags)
bye $ dir ++ "\n"
when (Flag_UseUnicode `elem` flags && Flag_Html `notElem` flags) $
throwE "Unicode can only be enabled for HTML output."
when ((Flag_GenIndex `elem` flags || Flag_GenContents `elem` flags)
&& Flag_Html `elem` flags) $
throwE "-h cannot be used with --gen-index or --gen-contents"
when ((Flag_GenIndex `elem` flags || Flag_GenContents `elem` flags)
&& Flag_Hoogle `elem` flags) $
throwE "--hoogle cannot be used with --gen-index or --gen-contents"
when ((Flag_GenIndex `elem` flags || Flag_GenContents `elem` flags)
&& Flag_LaTeX `elem` flags) $
throwE "--latex cannot be used with --gen-index or --gen-contents"
where
byeVersion = bye $
"Haddock version " ++ projectVersion ++ ", (c) Simon Marlow 2006\n"
++ "Ported to use the GHC API by David Waern 2006-2008\n"
-- | Generate some warnings about potential misuse of @--hyperlinked-source@.
hypSrcWarnings :: [Flag] -> IO ()
hypSrcWarnings flags = do
when (hypSrc && any isSourceUrlFlag flags) $
hPutStrLn stderr $ concat
[ "Warning: "
, "--source-* options are ignored when "
, "--hyperlinked-source is enabled."
]
when (not hypSrc && any isSourceCssFlag flags) $
hPutStrLn stderr $ concat
[ "Warning: "
, "source CSS file is specified but "
, "--hyperlinked-source is disabled."
]
where
hypSrc = Flag_HyperlinkedSource `elem` flags
isSourceUrlFlag (Flag_SourceBaseURL _) = True
isSourceUrlFlag (Flag_SourceModuleURL _) = True
isSourceUrlFlag (Flag_SourceEntityURL _) = True
isSourceUrlFlag (Flag_SourceLEntityURL _) = True
isSourceUrlFlag _ = False
isSourceCssFlag (Flag_SourceCss _) = True
isSourceCssFlag _ = False
updateHTMLXRefs :: [(DocPaths, InterfaceFile)] -> IO ()
updateHTMLXRefs packages = do
writeIORef html_xrefs_ref (Map.fromList mapping)
writeIORef html_xrefs_ref' (Map.fromList mapping')
where
mapping = [ (instMod iface, html) | ((html, _), ifaces) <- packages
, iface <- ifInstalledIfaces ifaces ]
mapping' = [ (moduleName m, html) | (m, html) <- mapping ]
getPrologue :: DynFlags -> [Flag] -> IO (Maybe (MDoc RdrName))
getPrologue dflags flags =
case [filename | Flag_Prologue filename <- flags ] of
[] -> return Nothing
[filename] -> withFile filename ReadMode $ \h -> do
hSetEncoding h utf8
str <- hGetContents h
return . Just $! parseParas dflags str
_ -> throwE "multiple -p/--prologue options"
#ifdef IN_GHC_TREE
getInTreeDir :: IO String
getInTreeDir = getExecDir >>= \case
Nothing -> error "No GhcDir found"
Just d -> return (d </> ".." </> "lib")
getExecDir :: IO (Maybe String)
#if defined(mingw32_HOST_OS)
getExecDir = try_size 2048 -- plenty, PATH_MAX is 512 under Win32.
where
try_size size = allocaArray (fromIntegral size) $ \buf -> do
ret <- c_GetModuleFileName nullPtr buf size
case ret of
0 -> return Nothing
_ | ret < size -> fmap (Just . dropFileName) $ peekCWString buf
| otherwise -> try_size (size * 2)
foreign import stdcall unsafe "windows.h GetModuleFileNameW"
c_GetModuleFileName :: Ptr () -> CWString -> Word32 -> IO Word32
#else
getExecDir = return Nothing
#endif
#endif
| randen/haddock | haddock-api/src/Haddock.hs | bsd-2-clause | 20,662 | 0 | 23 | 4,904 | 4,500 | 2,328 | 2,172 | 335 | 6 |
{-# LANGUAGE DeriveDataTypeable, GeneralizedNewtypeDeriving #-}
module Propellor.Types.Dns where
import Propellor.Types.OS (HostName)
import Propellor.Types.Empty
import Propellor.Types.Info
import Data.Word
import Data.Monoid
import qualified Data.Map as M
import qualified Data.Set as S
import Data.List
import Data.String.Utils (split, replace)
type Domain = String
data IPAddr = IPv4 String | IPv6 String
deriving (Read, Show, Eq, Ord)
fromIPAddr :: IPAddr -> String
fromIPAddr (IPv4 addr) = addr
fromIPAddr (IPv6 addr) = addr
newtype AliasesInfo = AliasesInfo (S.Set HostName)
deriving (Show, Eq, Ord, Monoid, Typeable)
instance IsInfo AliasesInfo where
propagateInfo _ = False
toAliasesInfo :: [HostName] -> AliasesInfo
toAliasesInfo l = AliasesInfo (S.fromList l)
fromAliasesInfo :: AliasesInfo -> [HostName]
fromAliasesInfo (AliasesInfo s) = S.toList s
newtype DnsInfo = DnsInfo { fromDnsInfo :: S.Set Record }
deriving (Show, Eq, Ord, Monoid, Typeable)
toDnsInfo :: S.Set Record -> DnsInfo
toDnsInfo = DnsInfo
-- | DNS Info is propagated, so that eg, aliases of a container
-- are reflected in the dns for the host where it runs.
instance IsInfo DnsInfo where
propagateInfo _ = True
-- | Represents a bind 9 named.conf file.
data NamedConf = NamedConf
{ confDomain :: Domain
, confDnsServerType :: DnsServerType
, confFile :: FilePath
, confMasters :: [IPAddr]
, confAllowTransfer :: [IPAddr]
, confLines :: [String]
}
deriving (Show, Eq, Ord)
data DnsServerType = Master | Secondary
deriving (Show, Eq, Ord)
-- | Represents a bind 9 zone file.
data Zone = Zone
{ zDomain :: Domain
, zSOA :: SOA
, zHosts :: [(BindDomain, Record)]
}
deriving (Read, Show, Eq)
-- | Every domain has a SOA record, which is big and complicated.
data SOA = SOA
{ sDomain :: BindDomain
-- ^ Typically ns1.your.domain
, sSerial :: SerialNumber
-- ^ The most important parameter is the serial number,
-- which must increase after each change.
, sRefresh :: Integer
, sRetry :: Integer
, sExpire :: Integer
, sNegativeCacheTTL :: Integer
}
deriving (Read, Show, Eq)
-- | Types of DNS records.
--
-- This is not a complete list, more can be added.
data Record
= Address IPAddr
| CNAME BindDomain
| MX Int BindDomain
| NS BindDomain
| TXT String
| SRV Word16 Word16 Word16 BindDomain
| SSHFP Int Int String
| INCLUDE FilePath
| PTR ReverseIP
deriving (Read, Show, Eq, Ord, Typeable)
-- | An in-addr.arpa record corresponding to an IPAddr.
type ReverseIP = String
reverseIP :: IPAddr -> ReverseIP
reverseIP (IPv4 addr) = intercalate "." (reverse $ split "." addr) ++ ".in-addr.arpa"
reverseIP addr@(IPv6 _) = reverse (intersperse '.' $ replace ":" "" $ fromIPAddr $ canonicalIP addr) ++ ".ip6.arpa"
-- | Converts an IP address (particularly IPv6) to canonical, fully
-- expanded form.
canonicalIP :: IPAddr -> IPAddr
canonicalIP (IPv4 addr) = IPv4 addr
canonicalIP (IPv6 addr) = IPv6 $ intercalate ":" $ map canonicalGroup $ split ":" $ replaceImplicitGroups addr
where
canonicalGroup g
| l <= 4 = replicate (4 - l) '0' ++ g
| otherwise = error $ "IPv6 group " ++ g ++ "as more than 4 hex digits"
where
l = length g
emptyGroups n = iterate (++ ":") "" !! n
numberOfImplicitGroups a = 8 - length (split ":" $ replace "::" "" a)
replaceImplicitGroups a = concat $ aux $ split "::" a
where
aux [] = []
aux (x : xs) = x : emptyGroups (numberOfImplicitGroups a) : xs
getIPAddr :: Record -> Maybe IPAddr
getIPAddr (Address addr) = Just addr
getIPAddr _ = Nothing
getCNAME :: Record -> Maybe BindDomain
getCNAME (CNAME d) = Just d
getCNAME _ = Nothing
getNS :: Record -> Maybe BindDomain
getNS (NS d) = Just d
getNS _ = Nothing
-- | Bind serial numbers are unsigned, 32 bit integers.
type SerialNumber = Word32
-- | Domains in the zone file must end with a period if they are absolute.
--
-- Let's use a type to keep absolute domains straight from relative
-- domains.
--
-- The RootDomain refers to the top level of the domain, so can be used
-- to add nameservers, MX's, etc to a domain.
data BindDomain = RelDomain Domain | AbsDomain Domain | RootDomain
deriving (Read, Show, Eq, Ord)
domainHostName :: BindDomain -> Maybe HostName
domainHostName (RelDomain d) = Just d
domainHostName (AbsDomain d) = Just d
domainHostName RootDomain = Nothing
newtype NamedConfMap = NamedConfMap (M.Map Domain NamedConf)
deriving (Eq, Ord, Show, Typeable)
instance IsInfo NamedConfMap where
propagateInfo _ = False
-- | Adding a Master NamedConf stanza for a particulr domain always
-- overrides an existing Secondary stanza for that domain, while a
-- Secondary stanza is only added when there is no existing Master stanza.
instance Monoid NamedConfMap where
mempty = NamedConfMap M.empty
mappend (NamedConfMap old) (NamedConfMap new) = NamedConfMap $
M.unionWith combiner new old
where
combiner n o = case (confDnsServerType n, confDnsServerType o) of
(Secondary, Master) -> o
_ -> n
instance Empty NamedConfMap where
isEmpty (NamedConfMap m) = isEmpty m
fromNamedConfMap :: NamedConfMap -> M.Map Domain NamedConf
fromNamedConfMap (NamedConfMap m) = m
| np/propellor | src/Propellor/Types/Dns.hs | bsd-2-clause | 5,132 | 36 | 13 | 939 | 1,464 | 791 | 673 | 112 | 2 |
----------------------------------------------------------------------------
-- |
-- Module : ANoExportList
-- Copyright : (c) Sergey Vinokurov 2018
-- License : BSD3-style (see LICENSE)
-- Maintainer : [email protected]
----------------------------------------------------------------------------
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
module ANoExportList where
data FooA = FooA1
{ fooA1 :: Int
, fooA2 :: !Double
}
newtype BarA =
BarA1
{ unBarA :: [Double] }
pattern BazAP :: Double -> Double -> BarA
pattern BazAP x y = BarA1 [x, y]
quuxA :: Int -> Int
quuxA x = x
pattern FrobAP :: Int -> FooA
pattern FrobAP x = FooA1 { fooA1 = x, fooA2 = 0 }
data QuuxA =
QuuxA1 Int
| QuuxA2
pattern QuuxAP :: Int -> QuuxA
pattern QuuxAP n = QuuxA1 n
commonFunc :: Double -> Double
commonFunc x = x + x * x
$([d|
derivedA :: Int -> Int
derivedA x = x
|])
| sergv/tags-server | test-data/0012resolve_reexport_import_cycles/ANoExportList.hs | bsd-3-clause | 957 | 0 | 9 | 204 | 216 | 124 | 92 | 28 | 1 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2014 Galois, Inc.
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE PatternGuards #-}
module Cryptol.Symbolic where
import Control.Applicative
import Control.Monad (replicateM, when, zipWithM)
import Control.Monad.IO.Class
import Data.List (transpose)
import qualified Data.Map as Map
import Data.Maybe
import Data.Monoid (Monoid(..))
import Data.Traversable (traverse)
import qualified Control.Exception as X
import qualified Data.SBV as SBV
import Data.SBV.Provers.Prover hiding (verbose)
import Data.SBV.BitVectors.Data hiding (verbose)
import qualified Cryptol.ModuleSystem as M
import qualified Cryptol.ModuleSystem.Env as M
import Cryptol.Symbolic.BitVector
import Cryptol.Symbolic.Prims
import Cryptol.Symbolic.Value
import qualified Cryptol.Eval.Value as Eval
import qualified Cryptol.Eval.Type (evalType)
import qualified Cryptol.Eval.Env (EvalEnv(..))
import Cryptol.TypeCheck.AST
import Cryptol.Utils.PP
import Cryptol.Utils.Panic(panic)
-- External interface ----------------------------------------------------------
proverConfigs :: [(String, SBV.SMTConfig)]
proverConfigs =
[ ("cvc4" , SBV.cvc4 )
, ("yices" , SBV.yices )
, ("z3" , SBV.z3 )
, ("boolector", SBV.boolector)
, ("mathsat" , SBV.mathSAT )
, ("offline" , SBV.defaultSMTCfg )
]
proverNames :: [String]
proverNames = map fst proverConfigs
lookupProver :: String -> SBV.SMTConfig
lookupProver s =
case lookup s proverConfigs of
Just cfg -> cfg
-- should be caught by UI for setting prover user variable
Nothing -> panic "Cryptol.Symbolic" [ "invalid prover: " ++ s ]
-- | A prover result is either an error message, or potentially a
-- counterexample or satisfying assignment.
type ProverResult = Either String (Either [Type] [(Type, Expr, Eval.Value)])
satSMTResult :: SatResult -> SMTResult
satSMTResult (SatResult r) = r
thmSMTResult :: ThmResult -> SMTResult
thmSMTResult (ThmResult r) = r
-- | TODO: Clean up ProverResult; it has grown too much to be a proper datatype!
sat :: Bool
-> (String, Bool, Bool)
-> [DeclGroup]
-> Maybe FilePath
-> (Expr, Schema)
-> M.ModuleCmd ProverResult -- ^ Returns a list of arguments for a satisfying assignment
sat isSat (proverName, useSolverIte, verbose) edecls mfile (expr, schema) = protectStack useSolverIte $ \modEnv -> do
let extDgs = allDeclGroups modEnv ++ edecls
let prover = (lookupProver proverName) { smtFile = mfile }
let tyFn = if isSat then existsFinType else forallFinType
let runFn | isSat = fmap satSMTResult . SBV.satWith prover
| otherwise = fmap thmSMTResult . SBV.proveWith prover
case predArgTypes schema of
Left msg -> return (Right (Left msg, modEnv), [])
Right ts -> do when verbose $ putStrLn "Simulating..."
let env = evalDecls (emptyEnv useSolverIte) extDgs
let v = evalExpr env expr
result <- runFn $ do
args <- mapM tyFn ts
b <- return $! fromVBit (foldl fromVFun v args)
when verbose $ liftIO $ putStrLn $
"Calling " ++ proverName ++ "..."
return b
esatexprs <- case result of
SBV.Satisfiable {} ->
let Right (_, cws) = SBV.getModel result
(vs, _) = parseValues ts cws
sattys = unFinType <$> ts
satexprs = zipWithM Eval.toExpr sattys vs
in case zip3 sattys <$> satexprs <*> pure vs of
Nothing ->
panic "Cryptol.Symbolic.sat"
[ "unable to make assignment into expression" ]
Just tevs -> return $ Right (Right tevs)
SBV.Unsatisfiable {} ->
return $ Right (Left (unFinType <$> ts))
_ -> return $ Left (rshow result)
where rshow | isSat = show . SatResult
| otherwise = show . ThmResult
return (Right (esatexprs, modEnv), [])
satOffline :: Bool
-> Bool
-> Bool
-> [DeclGroup]
-> Maybe FilePath
-> (Expr, Schema)
-> M.ModuleCmd (Either String ())
satOffline isSat useIte vrb edecls mfile (expr, schema) =
protectStack useIte $ \modEnv -> do
let extDgs = allDeclGroups modEnv ++ edecls
let tyFn = if isSat then existsFinType else forallFinType
let filename = fromMaybe "standard output" mfile
case predArgTypes schema of
Left msg -> return (Right (Left msg, modEnv), [])
Right ts ->
do when vrb $ putStrLn "Simulating..."
let env = evalDecls (emptyEnv useIte) extDgs
let v = evalExpr env expr
let satWord | isSat = "satisfiability"
| otherwise = "validity"
txt <- compileToSMTLib True isSat $ do
args <- mapM tyFn ts
b <- return $! fromVBit (foldl fromVFun v args)
liftIO $ putStrLn $
"Writing to SMT-Lib file " ++ filename ++ "..."
return b
liftIO $ putStrLn $
"To determine the " ++ satWord ++
" of the expression, use an external SMT solver."
case mfile of
Just path -> writeFile path txt
Nothing -> putStr txt
return (Right (Right (), modEnv), [])
protectStack :: Bool
-> M.ModuleCmd (Either String a)
-> M.ModuleCmd (Either String a)
protectStack usingITE cmd modEnv = X.catchJust isOverflow (cmd modEnv) handler
where isOverflow X.StackOverflow = Just ()
isOverflow _ = Nothing
msg | usingITE = msgBase
| otherwise = msgBase ++ "\n" ++
"Using ':set iteSolver=on' might help."
msgBase = "Symbolic evaluation failed to terminate."
handler () = return (Right (Left msg, modEnv), [])
parseValues :: [FinType] -> [SBV.CW] -> ([Eval.Value], [SBV.CW])
parseValues [] cws = ([], cws)
parseValues (t : ts) cws = (v : vs, cws'')
where (v, cws') = parseValue t cws
(vs, cws'') = parseValues ts cws'
parseValue :: FinType -> [SBV.CW] -> (Eval.Value, [SBV.CW])
parseValue FTBit [] = panic "Cryptol.Symbolic.parseValue" [ "empty FTBit" ]
parseValue FTBit (cw : cws) = (Eval.VBit (SBV.fromCW cw), cws)
parseValue (FTSeq 0 FTBit) cws = (Eval.VWord (Eval.BV 0 0), cws)
parseValue (FTSeq n FTBit) (cw : cws)
| SBV.isBounded cw = (Eval.VWord (Eval.BV (toInteger n) (SBV.fromCW cw)), cws)
| otherwise = panic "Cryptol.Symbolic.parseValue" [ "unbounded concrete word" ]
parseValue (FTSeq n FTBit) cws = (Eval.VSeq True vs, cws')
where (vs, cws') = parseValues (replicate n FTBit) cws
parseValue (FTSeq n t) cws = (Eval.VSeq False vs, cws')
where (vs, cws') = parseValues (replicate n t) cws
parseValue (FTTuple ts) cws = (Eval.VTuple vs, cws')
where (vs, cws') = parseValues ts cws
parseValue (FTRecord fs) cws = (Eval.VRecord (zip ns vs), cws')
where (ns, ts) = unzip fs
(vs, cws') = parseValues ts cws
allDeclGroups :: M.ModuleEnv -> [DeclGroup]
allDeclGroups = concatMap mDecls . M.loadedModules
data FinType
= FTBit
| FTSeq Int FinType
| FTTuple [FinType]
| FTRecord [(Name, FinType)]
numType :: Type -> Maybe Int
numType (TCon (TC (TCNum n)) [])
| 0 <= n && n <= toInteger (maxBound :: Int) = Just (fromInteger n)
numType (TUser _ _ t) = numType t
numType _ = Nothing
finType :: Type -> Maybe FinType
finType ty =
case ty of
TCon (TC TCBit) [] -> Just FTBit
TCon (TC TCSeq) [n, t] -> FTSeq <$> numType n <*> finType t
TCon (TC (TCTuple _)) ts -> FTTuple <$> traverse finType ts
TRec fields -> FTRecord <$> traverse (traverseSnd finType) fields
TUser _ _ t -> finType t
_ -> Nothing
unFinType :: FinType -> Type
unFinType fty =
case fty of
FTBit -> tBit
FTSeq l ety -> tSeq (tNum l) (unFinType ety)
FTTuple ftys -> tTuple (unFinType <$> ftys)
FTRecord fs -> tRec (zip fns tys)
where
fns = fst <$> fs
tys = unFinType . snd <$> fs
predArgTypes :: Schema -> Either String [FinType]
predArgTypes schema@(Forall ts ps ty)
| null ts && null ps =
case go ty of
Just fts -> Right fts
Nothing -> Left $ "Not a valid predicate type:\n" ++ show (pp schema)
| otherwise = Left $ "Not a monomorphic type:\n" ++ show (pp schema)
where
go (TCon (TC TCBit) []) = Just []
go (TCon (TC TCFun) [ty1, ty2]) = (:) <$> finType ty1 <*> go ty2
go (TUser _ _ t) = go t
go _ = Nothing
forallFinType :: FinType -> Symbolic Value
forallFinType ty =
case ty of
FTBit -> VBit <$> SBV.forall_
FTSeq 0 FTBit -> return $ VWord (SBV.literal (bv 0 0))
FTSeq n FTBit -> VWord <$> (forallBV_ n)
FTSeq n t -> VSeq False <$> replicateM n (forallFinType t)
FTTuple ts -> VTuple <$> mapM forallFinType ts
FTRecord fs -> VRecord <$> mapM (traverseSnd forallFinType) fs
existsFinType :: FinType -> Symbolic Value
existsFinType ty =
case ty of
FTBit -> VBit <$> SBV.exists_
FTSeq 0 FTBit -> return $ VWord (SBV.literal (bv 0 0))
FTSeq n FTBit -> VWord <$> existsBV_ n
FTSeq n t -> VSeq False <$> replicateM n (existsFinType t)
FTTuple ts -> VTuple <$> mapM existsFinType ts
FTRecord fs -> VRecord <$> mapM (traverseSnd existsFinType) fs
-- Simulation environment ------------------------------------------------------
data Env = Env
{ envVars :: Map.Map QName Value
, envTypes :: Map.Map TVar TValue
, envIteSolver :: Bool
}
instance Monoid Env where
mempty = Env
{ envVars = Map.empty
, envTypes = Map.empty
, envIteSolver = False
}
mappend l r = Env
{ envVars = Map.union (envVars l) (envVars r)
, envTypes = Map.union (envTypes l) (envTypes r)
, envIteSolver = envIteSolver l || envIteSolver r
}
emptyEnv :: Bool -> Env
emptyEnv useIteSolver = Env Map.empty Map.empty useIteSolver
-- | Bind a variable in the evaluation environment.
bindVar :: (QName, Value) -> Env -> Env
bindVar (n, thunk) env = env { envVars = Map.insert n thunk (envVars env) }
-- | Lookup a variable in the environment.
lookupVar :: QName -> Env -> Maybe Value
lookupVar n env = Map.lookup n (envVars env)
-- | Bind a type variable of kind *.
bindType :: TVar -> TValue -> Env -> Env
bindType p ty env = env { envTypes = Map.insert p ty (envTypes env) }
-- | Lookup a type variable.
lookupType :: TVar -> Env -> Maybe TValue
lookupType p env = Map.lookup p (envTypes env)
-- Expressions -----------------------------------------------------------------
evalExpr :: Env -> Expr -> Value
evalExpr env expr =
case expr of
ECon econ -> evalECon econ
EList es ty -> VSeq (tIsBit ty) (map eval es)
ETuple es -> VTuple (map eval es)
ERec fields -> VRecord [ (f, eval e) | (f, e) <- fields ]
ESel e sel -> evalSel sel (eval e)
EIf b e1 e2 -> evalIf (fromVBit (eval b)) (eval e1) (eval e2)
where evalIf = if envIteSolver env then SBV.sBranch else SBV.ite
EComp ty e mss -> evalComp env (evalType env ty) e mss
EVar n -> case lookupVar n env of
Just x -> x
_ -> panic "Cryptol.Symbolic.evalExpr" [ "Variable " ++ show n ++ " not found" ]
-- TODO: how to deal with uninterpreted functions?
ETAbs tv e -> VPoly $ \ty -> evalExpr (bindType (tpVar tv) ty env) e
ETApp e ty -> fromVPoly (eval e) (evalType env ty)
EApp e1 e2 -> fromVFun (eval e1) (eval e2)
EAbs n _ty e -> VFun $ \x -> evalExpr (bindVar (n, x) env) e
EProofAbs _prop e -> eval e
EProofApp e -> eval e
ECast e _ty -> eval e
EWhere e ds -> evalExpr (evalDecls env ds) e
where
eval e = evalExpr env e
evalType :: Env -> Type -> TValue
evalType env ty = Cryptol.Eval.Type.evalType env' ty
where env' = Cryptol.Eval.Env.EvalEnv Map.empty (envTypes env)
evalSel :: Selector -> Value -> Value
evalSel sel v =
case sel of
TupleSel n _ ->
case v of
VTuple xs -> xs !! n -- 0-based indexing
VSeq b xs -> VSeq b (map (evalSel sel) xs)
VStream xs -> VStream (map (evalSel sel) xs)
VFun f -> VFun (\x -> evalSel sel (f x))
_ -> panic "Cryptol.Symbolic.evalSel" [ "Tuple selector applied to incompatible type" ]
RecordSel n _ ->
case v of
VRecord bs -> case lookup n bs of
Just x -> x
_ -> panic "Cryptol.Symbolic.evalSel" [ "Selector " ++ show n ++ " not found" ]
VSeq b xs -> VSeq b (map (evalSel sel) xs)
VStream xs -> VStream (map (evalSel sel) xs)
VFun f -> VFun (\x -> evalSel sel (f x))
_ -> panic "Cryptol.Symbolic.evalSel" [ "Record selector applied to non-record" ]
ListSel n _ -> case v of
VWord s -> VBit (SBV.sbvTestBit s n)
_ -> fromSeq v !! n -- 0-based indexing
-- Declarations ----------------------------------------------------------------
evalDecls :: Env -> [DeclGroup] -> Env
evalDecls = foldl evalDeclGroup
evalDeclGroup :: Env -> DeclGroup -> Env
evalDeclGroup env dg =
case dg of
NonRecursive d -> bindVar (evalDecl env d) env
Recursive ds -> let env' = foldr bindVar env lazyBindings
bindings = map (evalDecl env') ds
lazyBindings = [ (qname, copyBySchema env (dSignature d) v)
| (d, (qname, v)) <- zip ds bindings ]
in env' -- foldr bindVar env bindings
evalDecl :: Env -> Decl -> (QName, Value)
evalDecl env d = (dName d, evalExpr env (dDefinition d))
-- | Make a copy of the given value, building the spine based only on
-- the type without forcing the value argument. This lets us avoid
-- strictness problems when evaluating recursive definitions.
copyBySchema :: Env -> Schema -> Value -> Value
copyBySchema env0 (Forall params _props ty) = go params env0
where
go [] env v = logicUnary id id (evalType env ty) v
go (p : ps) env v =
VPoly (\t -> go ps (bindType (tpVar p) t env) (fromVPoly v t))
-- List Comprehensions ---------------------------------------------------------
-- | Evaluate a comprehension.
evalComp :: Env -> TValue -> Expr -> [[Match]] -> Value
evalComp env seqty body ms
| Just (len,el) <- isTSeq seqty = toSeq len el [ evalExpr e body | e <- envs ]
| otherwise = evalPanic "Cryptol.Eval" [ "evalComp given a non sequence"
, show seqty
]
-- XXX we could potentially print this as a number if the type was available.
where
-- generate a new environment for each iteration of each parallel branch
benvs = map (branchEnvs env) ms
-- take parallel slices of each environment. when the length of the list
-- drops below the number of branches, one branch has terminated.
allBranches es = length es == length ms
slices = takeWhile allBranches (transpose benvs)
-- join environments to produce environments at each step through the process.
envs = map mconcat slices
-- | Turn a list of matches into the final environments for each iteration of
-- the branch.
branchEnvs :: Env -> [Match] -> [Env]
branchEnvs env matches =
case matches of
[] -> [env]
m : ms -> do env' <- evalMatch env m
branchEnvs env' ms
-- | Turn a match into the list of environments it represents.
evalMatch :: Env -> Match -> [Env]
evalMatch env m = case m of
From n _ty expr -> [ bindVar (n, v) env | v <- fromSeq (evalExpr env expr) ]
Let d -> [ bindVar (evalDecl env d) env ]
| TomMD/cryptol | src/Cryptol/Symbolic.hs | bsd-3-clause | 16,295 | 232 | 23 | 4,814 | 4,655 | 2,505 | 2,150 | 318 | 18 |
{-# LANGUAGE DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Hooks.WindowHistory
-- Copyright : (c) 2013 Dmitri Iouchtchenko
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Dmitri Iouchtchenko <[email protected]>
-- Stability : unstable
-- Portability : unportable
--
-- Keeps track of window viewing order.
--
-----------------------------------------------------------------------------
module XMonad.Hooks.MyWindowHistory
( -- * Usage
-- $usage
-- * Hooking
windowHistoryHook
-- * Querying
, windowHistory
) where
import XMonad
import XMonad.StackSet (currentTag)
import XMonad.StackSet as W
import qualified XMonad.Util.ExtensibleState as XS
import Control.Applicative
import Data.Maybe
import System.IO (hPutStrLn
,stderr)
-- $usage
-- To record the order in which you view windows, you can use this
-- module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Hooks.WindowHistory (windowHistoryHook)
--
-- Then add the hook to your 'logHook':
--
-- > main = xmonad $ def
-- > { ...
-- > , logHook = ... >> windowHistoryHook >> ...
-- > , ...
-- > }
--
-- To make use of the collected data, a query function is provided.
data WindowHistory =
WindowHistory { history :: [Window] -- ^ Workspaces in reverse-chronological order.
}
deriving (Typeable, Read, Show)
instance ExtensionClass WindowHistory where
initialValue = WindowHistory []
extensionType = PersistentExtension
focus' :: Maybe (Stack a) -> Maybe a
focus' = maybe Nothing (Just . W.focus)
-- | A 'logHook' that keeps track of the order in which windows have
-- been viewed.
windowHistoryHook :: X ()
windowHistoryHook = do
f <- focus' . W.stack . W.workspace . W.current <$> gets windowset
case isNothing f of
True -> return ()
False -> do
return (fromJust f) >>= (XS.modify . makeFirst)
-- | A list of workspace tags in the order they have been viewed, with the
-- most recent first. No duplicates are present, but not all workspaces are
-- guaranteed to appear, and there may be workspaces that no longer exist.
windowHistory :: X [Window]
windowHistory = XS.gets history
-- | Cons the 'Window' onto the 'WindowHistory' if it is not
-- already there, or move it to the front if it is.
makeFirst :: Window -> WindowHistory -> WindowHistory
makeFirst w v = let (xs, ys) = break (w ==) $ history v
in v { history = w : (xs ++ drop 1 ys) }
| eb-gh-cr/XMonadContrib1 | XMonad/Hooks/MyWindowHistory.hs | bsd-3-clause | 2,753 | 0 | 15 | 716 | 403 | 235 | 168 | 33 | 2 |
-- | Data structures for an item (post or comment) and the
-- overall structure in terms of parents and children.
module Blog.Model.Entry where
import qualified Blog.FrontEnd.Urls as U
import Utilities
import qualified Blog.Constants as C
import Maybe
import List ( sortBy, isPrefixOf, intersperse)
import qualified Data.Map as M
import Data.Map ( (!) )
type ISO8601DatetimeString = String
type XhtmlString = String
-- | Overall data model for the runtime.
data Model = Model { -- |
by_permatitle :: M.Map String Item,
by_int_id :: M.Map Int Item,
child_map :: M.Map Int [Int],
all_items :: [Item],
next_id :: Int }
empty :: Model
empty = Model M.empty M.empty M.empty [] 0
data Kind = Post | Comment | Trackback
deriving (Show, Read, Eq)
build_model :: [Item] -> Model
build_model [] = empty
build_model items = Model (map_by permatitle sorted_items)
bid
(build_child_map sorted_items)
(sorted_items)
(n+1)
where
sorted_items = sort_by_created_reverse items
bid = (map_by internal_id sorted_items)
n = fst . M.findMax $ bid
build_child_map :: [Item] -> M.Map Int [Int]
build_child_map i = build_child_map_ (M.fromList $ (map (\x -> (internal_id x,[])) i)) i
-- Constructed to take advantage of the input being in sorted order.
build_child_map_ :: M.Map Int [Int] -> [Item] -> M.Map Int [Int]
build_child_map_ m [] = m
build_child_map_ m (i:is) = if (parent i == Nothing) then
build_child_map_ m is
else
build_child_map_ (M.insertWith (++) (unwrap $ parent i) [internal_id i] m) is
-- | Insert an item, presuming that all of its data other than
-- internal identifier have been correctly set.
insert :: Model -> Item -> (Item,Model)
insert m i = (i', m { by_permatitle = M.insert (permatitle i') i' $ by_permatitle m
, by_int_id = M.insert n i' $ by_int_id m
, child_map = M.insert (internal_id i') [] $
case parent i of
Nothing ->
child_map m
(Just p_id) ->
M.insert p_id (insert_comment_ m (item_by_id m p_id) i') $ child_map m
, all_items = insert_ after (all_items m) i'
, next_id = n + 1 } )
where
n = next_id m
i' = i { internal_id = n }
insert_comment_ :: Model -> Item -> Item -> [Int]
insert_comment_ m p c = map internal_id (insert_ before (children m p) c)
insert_ :: (Item -> Item -> Bool) -> [Item] -> Item -> [Item]
insert_ _ [] y = [y]
insert_ o s@(x:xs) y = if (x `o` y) then
(x:(insert_ o xs y))
else
(y:s)
after :: Item -> Item -> Bool
after a b = (created a) > (created b)
before :: Item -> Item -> Bool
before a b = (created a) < (created b)
-- | Apply a structure-preserving function, i.e., one that does not
-- change parent/child relationships or ids, to a specific item.
alter :: (Item -> Item) -> Model -> Item -> IO Model
alter f m i = do { ts <- now
; let i' = (f i) { updated = ts }
; return $ m { by_permatitle = M.insert (permatitle i') i' $ by_permatitle m
, by_int_id = M.insert (internal_id i') i' $ by_int_id m
, child_map = if (parent i == Nothing) then
child_map m
else
M.insert p_id resort_siblings $ child_map m
, all_items = insert_ after all_but i' } }
where
not_i = \item -> (internal_id item) /= (internal_id i)
all_but = filter not_i $ all_items m
p_id = unwrap $ parent i
p = item_by_id m p_id
resort_siblings = map internal_id (insert_ before (filter not_i $ children m p) i)
cloak :: Model -> Item -> IO Model
cloak = alter (\i -> i { visible = False })
uncloak :: Model -> Item -> IO Model
uncloak = alter (\i -> i { visible = True })
permatitle_exists :: Model -> String -> Bool
permatitle_exists = (flip M.member) . by_permatitle
max_id :: Model -> Int
max_id = fst . M.findMax . by_int_id
post_by_permatitle :: Model -> String -> Item
post_by_permatitle = (!) . by_permatitle
maybe_post_by_permatitle :: Model -> String -> Maybe Item
maybe_post_by_permatitle = (flip M.lookup) . by_permatitle
item_by_id :: Model -> Int -> Item
item_by_id = (!) . by_int_id
children :: Model -> Item -> [Item]
children m i = map (item_by_id m) ((child_map m) ! (internal_id i))
unwrap :: Maybe a -> a
unwrap (Just x) = x
unwrap Nothing = error "Can't unwrap nothing!"
data Author = Author { name :: String,
uri :: Maybe String,
email :: Maybe String,
show_email :: Bool
}
deriving ( Show,Read,Eq )
-- | General purpose runtime data structure for holding a post or
-- comment. For a comment, a number of the fields will be ignored
-- (e.g., comments and tags) until/if the presentation and syndication
-- system gets fancier.
data Item = Item { -- | an internal unique number for this post
internal_id :: Int,
-- | the kind of item that this represents
kind :: Kind,
-- | the title of the post, as it should be rendered on
-- the web or inserted in an Atom feed; this should be a
-- valid XHTML fragment.
title :: XhtmlString,
-- | the summary of the post, as it should be rendered on
-- the web or intersted into an Atom feed; this should be
-- a valid XHTML fragment.
summary :: Maybe XhtmlString,
-- | the body of the post as an XHTML fragment. This
-- will be wrapped in an XHTML @<div>@ when rendered on
-- the web or in a feed.
body :: XhtmlString,
-- | tags for the post, if any, expected to be in
-- alphabetical order and consisting of letters, digits,
-- dashes, and/or underscores.
tags :: [String],
-- | a generated UID for the post; this is expected to be
-- suitable for use as an Atom GUID. The expectation is
-- that it will be supplied by the implementation when
-- the post is ingested.
uid :: String,
-- | a permanent title for the item, consisting of only
-- lowercase letters, digits, and dashes.
permatitle :: String,
-- | the timestamp, as an ISO8601 datetime, when the post
-- came into being. This is never blank and would be
-- supplied by the implementation when the post is
-- ingested.
created :: ISO8601DatetimeString,
-- | the timestamp, as an ISO8601 datetime, when the post
-- was updated. Initially, this is equal to the value of
-- the 'created' field.
updated :: ISO8601DatetimeString,
-- | the author of the post, expected to be hardwired to
-- the author of the blog
author :: Author,
-- | whether or not the item is to be displayed.
visible :: Bool,
-- | this item's parent, if any.
parent :: Maybe Int
}
deriving ( Show, Read, Eq )
-- | Compute a permalink for the item relative to the supplied base URL.
permalink :: Model
-> Item -- ^ the item
-> String
permalink m i = U.post (relative_url m i)
relative_url :: Model -> Item -> String
relative_url m = _form_permalink . (ancestors m)
_form_permalink :: [Item] -> String
_form_permalink [] = ""
_form_permalink [i] = let s = permatitle i in
if (kind i == Post) then
"/" ++ s
else
"#" ++ s
_form_permalink (i:is) = if (kind i == Post) then
("/" ++ permatitle i) ++ (_form_permalink is)
else
(_form_permalink is)
ancestor_path :: Model -> Item -> String
ancestor_path m i = concat . (intersperse "/") . (map permatitle) $ ancestors m i
ancestors :: Model -> Item -> [Item]
ancestors m i = ancestors_ m [] (Just $ internal_id i)
ancestors_ :: Model -> [Item] -> Maybe Int -> [Item]
ancestors_ _ is Nothing = is
ancestors_ m is (Just i) = ancestors_ m (i':is) (parent i')
where
i' = item_by_id m i
lastUpdated :: [Item] -> ISO8601DatetimeString
lastUpdated ps = maximum (map updated ps)
drop_invisible :: [Item] -> [Item]
drop_invisible = filter visible
sort_by_created :: [Item] -> [Item]
sort_by_created = sortBy created_sort
created_sort :: Item -> Item -> Ordering
created_sort a b = compare (created a) (created b)
sort_by_created_reverse :: [Item] -> [Item]
sort_by_created_reverse = sortBy created_sort_reverse
created_sort_reverse :: Item -> Item -> Ordering
created_sort_reverse a b = compare (created b) (created a)
-- | Filter a list of items according to a date fragment
date_fragment_filter_ :: ISO8601DatetimeString -> [Item] -> [Item]
date_fragment_filter_ s = filter ((s `isPrefixOf`) . created)
-- | Filter a list of posts for those made in a specific year.
year_filter :: Int -- ^ year
-> [Item] -> [Item]
year_filter y = date_fragment_filter_ $ show y
-- | Filter a list of posts for those made in a specific month.
month_filter :: Int -- ^ year
-> Int -- ^ month
-> [Item] -> [Item]
month_filter y m | (0 < m) && (m < 13) = date_fragment_filter_ ((show y) ++ (pad_ m))
| otherwise = const []
-- | Filter a list of posts for those made on a specific day
day_filter :: Int -- ^ year
-> Int -- ^ month
-> Int -- ^ day
-> [Item] -> [Item]
day_filter y m d = date_fragment_filter_ ((show y) ++ (pad_ m) ++ (pad_ d))
-- | Utility function to zero pad months and days in date expressions.
pad_ :: Int -> String
pad_ i | i < 10 = "-0" ++ (show i)
| otherwise = ('-':(show i))
-- to do: make this faster using the sortedness.
tags_filter :: [String] -> [Item] -> [Item]
tags_filter t p = foldl (flip ($)) p (map tag_filter t)
tag_filter :: String -> [Item] -> [Item]
tag_filter t = filter ((t `elem`) . tags)
plink_filterf :: String -> Item -> Bool
plink_filterf = flip $ (==) . permatitle
plink_filter :: String -> [Item] -> [Item]
plink_filter = filter . plink_filterf
ymd_plink_finder :: Int -> Int -> Int -> String -> [Item] -> [Item]
ymd_plink_finder y m d t = (plink_filter t) . (day_filter y m d)
all_posts :: Model -> [Item]
all_posts = (filter (\x -> Post == kind x)) . all_items
all_comments :: Model -> [Item]
all_comments = (filter (\x -> Comment == kind x)) . all_items
flatten :: Model -> [Item] -> [Item]
flatten m = flatten_ (children m)
flatten_ :: (a -> [a]) -> [a] -> [a]
flatten_ _ [] = []
flatten_ f (i:is) = (i:(flatten_ f (f i))) ++ (flatten_ f is)
concat_comments :: Model -> [Item] -> [Item]
concat_comments m = (foldr (++) []) . (map $ children m)
(</>) :: String -> String -> String
s </> t = s ++ ('/':t)
to_string :: Item -> String
to_string i = concat [metadata i, "\n", body_block i, "\n", summary_block i]
metadata :: Item -> String
metadata i = unlines $ apply i [ ("internal_id",show . internal_id),
("parent", show . parent),
("title",title),
("tags",show_no_quotes . tags),
("permatitle",permatitle),
("kind",show . kind),
("uid",uid),
("created",created),
("updated",updated),
("author",show . author),
("visible",show . visible) ]
show_no_quotes :: [String] -> String
show_no_quotes = concat . (intersperse ", ")
apply :: Item -> [(String,(Item -> String))] -> [String]
apply _ [] = []
apply i (x:xs) = ((concat [fst x, ": ", (snd x) i]) : (apply i xs))
body_block :: Item -> String
body_block i = concat ["--- START BODY ---\n",
(body i),
"\n--- END BODY ---\n"]
summary_block :: Item -> String
summary_block i | summary i == Nothing = ""
| otherwise = concat ["--- START SUMMARY ---\n",
(unwrap $ summary i),
"\n--- END SUMMARY ---\n"]
default_author :: Author
default_author = Author C.author_name C.author_uri C.author_email True | prb/perpubplat | src/Blog/Model/Entry.hs | bsd-3-clause | 13,296 | 219 | 17 | 4,595 | 3,632 | 1,987 | 1,645 | 220 | 3 |
{-# LANGUAGE FlexibleContexts, FlexibleInstances, RankNTypes #-}
module Common.DataStructure.UnionFind (
UFSet
, make
, find
, union
) where
import Control.Monad (liftM2)
import Control.Monad.Primitive
import qualified Common.MonadRef as R
import qualified Data.Vector.Unboxed as V
import qualified Data.Vector.Unboxed.Mutable as MV
data (PrimMonad m, R.MonadRef r m) => UFSet r m = UFSet {
ufsSize :: r Int,
ufsSet :: MV.MVector (PrimState m) Int
}
make :: (PrimMonad m, R.MonadRef r m) => Int -> m (UFSet r m)
make n = liftM2 UFSet (R.new n) (V.thaw $ V.fromList [0 .. n - 1])
find :: (PrimMonad m, R.MonadRef r m) => UFSet r m -> Int -> m Int
find ufs u = do
f <- MV.unsafeRead (ufsSet ufs) u
if u == f
then return u
else do
f' <- find ufs f
MV.unsafeWrite (ufsSet ufs) u f'
return f'
union :: (PrimMonad m, R.MonadRef r m) => UFSet r m -> Int -> Int -> m Bool
union ufs u v = do
u' <- find ufs u
v' <- find ufs v
if u' /= v'
then do
MV.unsafeWrite (ufsSet ufs) u' v'
R.modify_' (ufsSize ufs) pred
return True
else return False
| foreverbell/project-euler-solutions | lib/Common/DataStructure/UnionFind.hs | bsd-3-clause | 1,127 | 0 | 12 | 289 | 471 | 244 | 227 | 35 | 2 |
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
module FreeAgent.Server.ScheduleSpec (main, spec) where
import FreeAgent.AgentPrelude
import FreeAgent.Process
import FreeAgent.Core.Internal.Lenses
import FreeAgent.Core.Protocol.Schedule as Schedule
import FreeAgent.Core.Protocol.Executive.History (findResultsSince)
import FreeAgent.Core.Protocol.Executive (StoreAction(..))
import FreeAgent.Server.ManagedAgent (callServ)
import FreeAgent.TestHelper
import FreeAgent.Fixtures
import Control.Concurrent.Lifted (threadDelay)
import Control.Distributed.Process.Extras.Timer (Tick(..))
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = --parallel $
describe "Basic scheduler operations" $ do
it "is started by Core supervisor" $ testAgent (
do Just _ <- whereis serverName
return True
) `shouldReturn` True
it "can schedule and find an event" $ testAgent (
do Right () <- schedule (key testAction) "@hourly" Never
Right _ <- lookupEvent (key testAction)
Right () <- unschedule (key testAction)
return True
) `shouldReturn` True
it "won't schedule an older Event" $ testAgent (
do Right () <- schedule "test older" "@hourly" Never
Right old <- lookupEvent "test older"
Right () <- callServ $
ScheduleAddNewerEvent "test older"
"@hourly"
(Fixed 1 10)
(schedModified old)
Right stillold <- lookupEvent "test older"
Right _ <- unschedule "test older"
return (schedRetry stillold)
) `shouldReturn` Never
it "won't schedule a bogus cron format" $ testAgent (
do Right () <- schedule (key testAction) "whatever man" Never
Left (EventNotFound _) <- lookupEvent (key testAction)
return ()
) `shouldThrow` errorCall "Unable to parse cron formatted literal: whatever man"
it "won't find an absent event" $ testAgent (
do Left (EventNotFound key') <- lookupEvent "not here"
return key'
) `shouldReturn` "not here"
it "can remove an event" $ testAgent (
do Right () <- schedule "will delete" "@hourly" Never
Right _ <- lookupEvent "will delete"
Right () <- unschedule "will delete"
Left (EventNotFound key') <- lookupEvent "will delete"
return key'
) `shouldReturn` "will delete"
it "executes a cron scheduled action" $ testAgent (
do Right () <- callServ $ StoreAction (Action testAction)
Right () <- schedule (key testAction) "* * * * *" Never
-- reset the event so it can run now
Right () <- callServ $ ScheduleEnableEvents [key testAction]
threadDelay 10000
Right results' <- findResultsSince zeroDate
Right () <- unschedule (key testAction)
return (length results')
) `shouldReturn` 1
it "does not execute a disabled event" $ testAgent (
do Right () <- callServ $ StoreAction (Action testAction)
Right () <- schedule (key testAction) "* * * * *" Never
-- this really doesn't prove much, since the event won't
-- run till the next minute in any case
{-Right () <- callServ $ ScheduleDisableEvents [key testAction]-}
send serverName Tick
threadDelay 10000
Right rs <- findResultsSince zeroDate
Right () <- unschedule (key testAction)
return (length rs)
) `shouldReturn` 1
| jeremyjh/free-agent | core/test/FreeAgent/Server/ScheduleSpec.hs | bsd-3-clause | 4,117 | 0 | 18 | 1,478 | 937 | 445 | 492 | 77 | 1 |
module System.Process.Safe where
import Control.Applicative
import Control.Concurrent
import qualified Control.Exception as C
import Control.Monad
import Control.Monad.Error
import Data.Maybe
import GHC.IO.Exception
import System.Exit
import System.IO
import System.IO.Error
import System.Process
-- -----------------------------------------------------------------------------
--
-- | readProcess forks an external process, reads its standard output
-- strictly, blocking until the process terminates, and returns the output
-- string.
--
-- Output is returned strictly, so this is not suitable for
-- interactive applications.
--
-- Users of this function should compile with @-threaded@ if they
-- want other Haskell threads to keep running while waiting on
-- the result of readProcess.
--
-- > > readProcess "date" [] []
-- > "Thu Feb 7 10:03:39 PST 2008\n"
--
-- The arguments are:
--
-- * The command to run, which must be in the $PATH, or an absolute path
--
-- * A list of separate command line arguments to the program
--
-- * A string to pass on the standard input to the program.
--
readProcessWithTimeout
:: FilePath -- ^ command to run
-> [String] -- ^ any arguments
-> String -- ^ standard input
-> Int -- ^ timeout in milliseconds
-> IO String -- ^ stdout
readProcessWithTimeout cmd args input microseconds = do
(Just inh, Just outh, _, pid) <-
createProcess (proc cmd args){ std_in = CreatePipe,
std_out = CreatePipe,
std_err = Inherit }
-- fork off a thread to start consuming the output
output <- hGetContents outh
outMVar <- newEmptyMVar
_ <- forkIO $ C.evaluate (length output) >> putMVar outMVar (Just ())
-- now write and flush any input
when (not (null input)) $ do hPutStr inh input; hFlush inh
hClose inh -- done with stdin
-- kill on a timeout
forkIO $ do threadDelay $ microseconds * 1000
got <- isJust <$> tryTakeMVar outMVar
unless got $ do
terminateProcess pid
putMVar outMVar Nothing
-- wait on the output
success <- isJust <$> takeMVar outMVar
hClose outh
-- wait on the process
ex <- waitForProcess pid
case if success then ex else ExitFailure (-1) of
ExitSuccess -> return output
ExitFailure r ->
ioError (mkIOError GHC.IO.Exception.OtherError
("readProcess: " ++ cmd ++
' ':unwords (map show args) ++
" (exit " ++ show r ++ ")")
Nothing Nothing)
| zhuangzi/genrei | src/System/Process/Safe.hs | bsd-3-clause | 2,875 | 0 | 20 | 961 | 486 | 259 | 227 | 44 | 3 |
module Main where
import IncrementalQuery (
initializeCache, updateCache, Cache,
initializeCache2, updateCache2, Cache2,
Row(Order, LineItem), salesDomain, sales)
import Data.Monoid (
Sum)
import Criterion.Main (
defaultMain, bgroup, bench, whnf, nf, Benchmark, env)
main :: IO ()
main = defaultMain [
bgroup "initializeCache" (map benchmarkInitializeCache [10, 100]),
bgroup "updateCache" (map benchmarkUpdateCache [10, 100]),
bgroup "initializeCache2" (map benchmarkInitializeCache2 [10, 100, 1000]),
bgroup "updateCache2" (map benchmarkUpdateCache2 [10, 100, 1000])]
benchmarkInitializeCache :: Int -> Benchmark
benchmarkInitializeCache size =
bench (show size) (nf sizedCache size)
sizedCache :: Int -> Cache Row (Sum Int)
sizedCache size = initializeCache 2 (sizedSalesDomain size) sales
benchmarkInitializeCache2 :: Int -> Benchmark
benchmarkInitializeCache2 size =
bench (show size) (nf sizedCache2 size)
sizedCache2 :: Int -> Cache2 Row (Sum Int)
sizedCache2 size = initializeCache2 (sizedSalesDomain size) sales
sizedSalesDomain :: Int -> [Row]
sizedSalesDomain size = do
key <- [0 .. size]
[Order key 2, LineItem key 20]
benchmarkUpdateCache :: Int -> Benchmark
benchmarkUpdateCache size =
env (return (sizedCache size)) (\cache ->
bench (show size) (whnf (updateCache rowUpdate) cache))
benchmarkUpdateCache2 :: Int -> Benchmark
benchmarkUpdateCache2 size =
env (return (sizedCache2 size)) (\cache ->
bench (show size) (whnf (updateCache2 rowUpdate) cache))
rowUpdate :: Row
rowUpdate = Order 0 2
| phischu/incremental-query | bench/Main.hs | bsd-3-clause | 1,558 | 0 | 12 | 243 | 545 | 292 | 253 | 39 | 1 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE DataKinds #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Geometry.Instances.Real
-- Copyright : Copyright (C) 2015 Artem M. Chirkin <[email protected]>
-- License : BSD3
--
-- Maintainer : Artem M. Chirkin <[email protected]>
-- Stability : Experimental
-- Portability :
--
--
-----------------------------------------------------------------------------
module Data.Geometry.Instances.Real () where
import GHC.TypeLits (KnownNat)
import Data.Coerce (coerce)
import Data.Geometry.Types
import Data.Geometry.Prim.JSNum
import Data.Geometry.VectorMath
import Data.Geometry.Instances.Num ()
import Data.Geometry.Instances.Ord ()
instance (Num t, JSNum t, Real t, KnownNat n) => Real (Vector n t) where
{-# INLINE toRational #-}
toRational = toRational . indexVector 0
{-# RULES
"realToFrac/JSVectorNN" realToFrac = coerce :: Vector n a -> Vector n b
"realToFrac/JSVector23" realToFrac = realToFracVecNM :: Vector 2 a -> Vector 3 b
"realToFrac/JSVector24" realToFrac = realToFracVecNM :: Vector 2 a -> Vector 4 b
"realToFrac/JSVector25" realToFrac = realToFracVecNM :: Vector 2 a -> Vector 5 b
"realToFrac/JSVector26" realToFrac = realToFracVecNM :: Vector 2 a -> Vector 6 b
"realToFrac/JSVector32" realToFrac = realToFracVecNM :: Vector 3 a -> Vector 2 b
"realToFrac/JSVector34" realToFrac = realToFracVecNM :: Vector 3 a -> Vector 4 b
"realToFrac/JSVector35" realToFrac = realToFracVecNM :: Vector 3 a -> Vector 5 b
"realToFrac/JSVector36" realToFrac = realToFracVecNM :: Vector 3 a -> Vector 6 b
"realToFrac/JSVector42" realToFrac = realToFracVecNM :: Vector 4 a -> Vector 2 b
"realToFrac/JSVector43" realToFrac = realToFracVecNM :: Vector 4 a -> Vector 3 b
"realToFrac/JSVector45" realToFrac = realToFracVecNM :: Vector 4 a -> Vector 5 b
"realToFrac/JSVector46" realToFrac = realToFracVecNM :: Vector 4 a -> Vector 6 b
"realToFrac/JSVector52" realToFrac = realToFracVecNM :: Vector 5 a -> Vector 2 b
"realToFrac/JSVector53" realToFrac = realToFracVecNM :: Vector 5 a -> Vector 3 b
"realToFrac/JSVector54" realToFrac = realToFracVecNM :: Vector 5 a -> Vector 4 b
"realToFrac/JSVector56" realToFrac = realToFracVecNM :: Vector 5 a -> Vector 6 b
"realToFrac/JSVector62" realToFrac = realToFracVecNM :: Vector 6 a -> Vector 2 b
"realToFrac/JSVector63" realToFrac = realToFracVecNM :: Vector 6 a -> Vector 3 b
"realToFrac/JSVector64" realToFrac = realToFracVecNM :: Vector 6 a -> Vector 4 b
"realToFrac/JSVector65" realToFrac = realToFracVecNM :: Vector 6 a -> Vector 5 b
#-}
realToFracVecNM :: (KnownNat n, KnownNat m) => Vector n a -> Vector m b
realToFracVecNM = f . resizeVector
where f ::Vector m a -> Vector m b
f = coerce
instance (Num t, JSNum t, Real t, KnownNat n) => Real (Matrix n t) where
{-# INLINE toRational #-}
toRational = toRational . indexMatrix 0 0
{-# RULES
"realToFrac/JSMatrixNN" realToFrac = coerce :: Matrix n a -> Matrix n b
"realToFrac/JSMatrix23" realToFrac = realToFracMatNM :: Matrix 2 a -> Matrix 3 b
"realToFrac/JSMatrix24" realToFrac = realToFracMatNM :: Matrix 2 a -> Matrix 4 b
"realToFrac/JSMatrix25" realToFrac = realToFracMatNM :: Matrix 2 a -> Matrix 5 b
"realToFrac/JSMatrix26" realToFrac = realToFracMatNM :: Matrix 2 a -> Matrix 6 b
"realToFrac/JSMatrix32" realToFrac = realToFracMatNM :: Matrix 3 a -> Matrix 2 b
"realToFrac/JSMatrix34" realToFrac = realToFracMatNM :: Matrix 3 a -> Matrix 4 b
"realToFrac/JSMatrix35" realToFrac = realToFracMatNM :: Matrix 3 a -> Matrix 5 b
"realToFrac/JSMatrix36" realToFrac = realToFracMatNM :: Matrix 3 a -> Matrix 6 b
"realToFrac/JSMatrix42" realToFrac = realToFracMatNM :: Matrix 4 a -> Matrix 2 b
"realToFrac/JSMatrix43" realToFrac = realToFracMatNM :: Matrix 4 a -> Matrix 3 b
"realToFrac/JSMatrix45" realToFrac = realToFracMatNM :: Matrix 4 a -> Matrix 5 b
"realToFrac/JSMatrix46" realToFrac = realToFracMatNM :: Matrix 4 a -> Matrix 6 b
"realToFrac/JSMatrix52" realToFrac = realToFracMatNM :: Matrix 5 a -> Matrix 2 b
"realToFrac/JSMatrix53" realToFrac = realToFracMatNM :: Matrix 5 a -> Matrix 3 b
"realToFrac/JSMatrix54" realToFrac = realToFracMatNM :: Matrix 5 a -> Matrix 4 b
"realToFrac/JSMatrix56" realToFrac = realToFracMatNM :: Matrix 5 a -> Matrix 6 b
"realToFrac/JSMatrix62" realToFrac = realToFracMatNM :: Matrix 6 a -> Matrix 2 b
"realToFrac/JSMatrix63" realToFrac = realToFracMatNM :: Matrix 6 a -> Matrix 3 b
"realToFrac/JSMatrix64" realToFrac = realToFracMatNM :: Matrix 6 a -> Matrix 4 b
"realToFrac/JSMatrix65" realToFrac = realToFracMatNM :: Matrix 6 a -> Matrix 5 b
#-}
realToFracMatNM :: (KnownNat n, KnownNat m) => Matrix n a -> Matrix m b
realToFracMatNM = f . resizeMatrix
where f ::Matrix m a -> Matrix m b
f = coerce
| achirkin/fastvec | src/Data/Geometry/Instances/Real.hs | bsd-3-clause | 4,861 | 0 | 8 | 815 | 338 | 192 | 146 | 68 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
module Tower.Bool
(
Bool(..)
, And(..)
, Or(..)
, Xor(..)
) where
import Protolude (Bool(..))
import qualified Protolude as P
import Tower.Ordering
import Tower.Magma
newtype And = And Bool
newtype Or = Or Bool
newtype Xor = Xor Bool
instance Magma And where
And a ⊕ And b = And (a P.&& b)
instance Magma Or where
Or a ⊕ Or b = Or (a P.|| b)
instance Magma Xor where
Xor True ⊕ Xor True = Xor False
Xor False ⊕ Xor False = Xor False
_ ⊕ _ = Xor True
instance Associative And
instance Associative Or
instance Associative Xor
instance Commutative And
instance Commutative Or
instance Commutative Xor
instance Idempotent Or
instance Idempotent And
instance Unital Or where unit = Or False
instance Unital And where unit = And True
instance Unital Xor where unit = Xor False
instance Invertible Xor where inv a = a
instance Homomorphic Or And where hom (Or x) = And (P.not x)
instance Homomorphic And Or where hom (And x) = Or (P.not x)
instance Isomorphic And Or where iso = (hom, hom)
instance Isomorphic Or And where iso = (hom, hom)
instance Semilattice And
instance Semilattice Or
instance Lattice Bool where
type Inf Bool = Or
type Sup Bool = And
| tonyday567/tower | src/Tower/Bool.hs | bsd-3-clause | 1,248 | 0 | 9 | 265 | 497 | 254 | 243 | -1 | -1 |
{-# LANGUAGE CPP, TypeFamilies, OverloadedStrings, DeriveDataTypeable, FlexibleContexts,
GeneralizedNewtypeDeriving, RecordWildCards, TemplateHaskell, MultiParamTypeClasses, FlexibleInstances #-}
module Data.Grob.Acid where
import Control.Monad.State (get, put)
import Control.Monad.Reader (ask)
import Data.Grob.Types
import Data.Acid (Update, Query, makeAcidic)
import Data.IxSet ( Indexable(..), (@=), getOne )
import qualified Data.IxSet as IxSet
initialBotState :: Bots
initialBotState = Bots
{
robots = empty
}
-- | insert a robot into the DB
newRobot :: Robot -> Update Bots Robot
newRobot robot = do
b@Bots{..} <- get
put $ b {
robots = IxSet.insert robot robots
}
return robot
-- | update the robot in the database (indexed by RobotId)
updateRobot :: Robot -> Update Bots ()
updateRobot updatedRobot =
do b@Bots{..} <- get
put $ b { robots = IxSet.updateIx (robotId updatedRobot) updatedRobot robots
}
-- | get a robot record by its ID (sha1 of its uri)
robotById :: RobotId -> Query Bots (Maybe Robot)
robotById rid =
do Bots{..} <- ask
return $ getOne $ robots @= rid
$(makeAcidic ''Bots
[ 'newRobot
, 'updateRobot
, 'robotById
])
| ixmatus/grob | src/Data/Grob/Acid.hs | bsd-3-clause | 1,350 | 0 | 12 | 376 | 324 | 180 | 144 | 30 | 1 |
module Language.AVR.Shared where
import Control.Monad.Cont
import Control.Monad.Identity
import Control.Monad.State
import Data.Array (Ix(..))
import Data.SBV
import qualified Data.Map as M
data Register = R0
| R1
| R2
| R3
| R4
| R5
| R6
| R7
| R8
| R9
| R10
| R11
| R12
| R13
| R14
| R15
| R16
| R17
| R18
| R19
| R20
| R21
| R22
| R23
| R24
| R25
| R26
| R27
| R28
| R29
| R30
| R31
| X
| INCX
| SUBX
| XINC
| XSUB
| Y
| INCY
| SUBY
| YINC
| YSUB
| Z
| INCZ
| SUBZ
| ZINC
| ZSUB
deriving (Bounded, Enum, Eq, Ix, Ord, Show)
data StatusFlag = SC | SZ | SN | SV | SS | SH | ST | SI
deriving (Bounded, Enum, Eq, Ix, Ord, Show)
data Mode = Assembler
| Symbolic
deriving (Eq, Show)
data ExecutionState a = ExecutionState a
data LabelState a = LabelState {
labelMap :: M.Map String (AVRBackend a),
labelTarget :: Maybe (String, AVRBackend a),
callStack :: [AVRBackend a],
stateSnapshot :: Maybe a,
tmp :: Maybe (AVRBackend a),
callCount :: SInt16
}
type AVRBackend a = ContT () (AVRBackendBase a) ()
type AVRBackendBase a = StateT (LabelState a) (StateT a Identity)
programInternal start machineState labelState = runIdentity $ execStateT (
execStateT (runContT start (return . id)) labelState
) machineState
program start machineState = programInternal start machineState $ LabelState M.empty Nothing [] Nothing Nothing 0
| davnils/avr-asm-dsl | avr-lib/Language/AVR/Shared.hs | bsd-3-clause | 2,079 | 0 | 12 | 1,009 | 529 | 311 | 218 | 74 | 1 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module Main where
import Control.Monad
import Happstack.Server
import System.Environment
handlers :: ServerPart Response
handlers = msum
[ dir "pong" $ ok (toResponseBS "text/plain" "pong")
, serveFile (asContentType "image/png") "FiringGeometry.png"
]
main :: IO ()
main = do
args <- getArgs
let p = if length args == 0 then 8000 else read $ head args
simpleHTTP (nullConf {port = p, logAccess = Nothing}) handlers
| aslatter/happstack-wai | bench/SimpleHttp.hs | bsd-3-clause | 494 | 0 | 12 | 91 | 149 | 78 | 71 | 14 | 2 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -fno-warn-unticked-promoted-constructors #-}
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
module Generics.SOP.Record
( -- * A suitable representation for single-constructor records
FieldLabel
, RecordCode
, Record
, RecordRep
-- * Computing the record code
, RecordCodeOf
, IsRecord
, ValidRecordCode
, ExtractTypesFromRecordCode
, ExtractLabelsFromRecordCode
, RecombineRecordCode
-- * Conversion between a type and its record representation.
, toRecord
, fromRecord
-- * Utilities
, P(..)
, Snd
)
where
import Control.DeepSeq
import Generics.SOP.BasicFunctors
import Generics.SOP.NP
import Generics.SOP.NS
import Generics.SOP.Universe
import Generics.SOP.Sing
import Generics.SOP.Type.Metadata
import qualified GHC.Generics as GHC
import GHC.TypeLits
import GHC.Types
import Unsafe.Coerce
--------------------------------------------------------------------------
-- A suitable representation for single-constructor records.
--------------------------------------------------------------------------
-- | On the type-level, we represent fiel labels using symbols.
type FieldLabel = Symbol
-- | The record code deviates from the normal SOP code in two
-- ways:
--
-- - There is only one list, because we require that there is
-- only a single constructor.
--
-- - In addition to the types of the fields, we store the labels
-- of the fields.
--
type RecordCode = [(FieldLabel, Type)]
-- | The record representation of a type is a record indexed
-- by the record code.
--
type RecordRep (a :: Type) = Record (RecordCodeOf a)
-- | The representation of a record is just a product indexed by
-- a record code, containing elements of the types indicated
-- by the code.
--
-- Note that the representation is deliberately chosen such that
-- it has the same run-time representation as the product part
-- of the normal SOP representation.
--
type Record (r :: RecordCode) = NP P r
--------------------------------------------------------------------------
-- Computing the record code
--------------------------------------------------------------------------
-- | This type-level function takes the type-level metadata provided
-- by generics-sop as well as the normal generics-sop code, and transforms
-- them into the record code.
--
-- Arguably, the record code is more usable than the representation
-- directly on offer by generics-sop. So it's worth asking whether
-- this representation should be included in generics-sop ...
--
-- The function will only reduce if the argument type actually is a
-- record, meaning it must have exactly one constructor, and that
-- constructor must have field labels attached to it.
--
type RecordCodeOf a = ToRecordCode_Datatype a (DatatypeInfoOf a) (Code a)
-- | Helper for 'RecordCodeOf', handling the datatype level. Both
-- datatypes and newtypes are acceptable. Newtypes are just handled
-- as one-constructor datatypes for this purpose.
--
type family
ToRecordCode_Datatype (a :: Type) (d :: DatatypeInfo) (c :: [[Type]]) :: RecordCode where
#if MIN_VERSION_generics_sop(0,5,0)
ToRecordCode_Datatype a (ADT _ _ cis _) c = ToRecordCode_Constructor a cis c
#else
ToRecordCode_Datatype a (ADT _ _ cis) c = ToRecordCode_Constructor a cis c
#endif
ToRecordCode_Datatype a (Newtype _ _ ci) c = ToRecordCode_Constructor a '[ ci ] c
-- | Helper for 'RecordCodeOf', handling the constructor level. Only
-- single-constructor types are acceptable, and the constructor must
-- contain field labels.
--
-- As an exception, we accept an empty record, even though it does
-- not explicitly define any field labels.
--
type family
ToRecordCode_Constructor (a :: Type) (cis :: [ConstructorInfo]) (c :: [[Type]]) :: RecordCode where
ToRecordCode_Constructor a '[ 'Record _ fis ] '[ ts ] = ToRecordCode_Field fis ts
ToRecordCode_Constructor a '[ 'Constructor _ ] '[ '[] ] = '[]
ToRecordCode_Constructor a '[] _ =
TypeError
( Text "The type `" :<>: ShowType a :<>: Text "' is not a record type."
:$$: Text "It has no constructors."
)
ToRecordCode_Constructor a ( _ : _ : _ ) _ =
TypeError
( Text "The type `" :<>: ShowType a :<>: Text "' is not a record type."
:$$: Text "It has more than one constructor."
)
ToRecordCode_Constructor a '[ _ ] _ =
TypeError
( Text "The type `" :<>: ShowType a :<>: Text "' is not a record type."
:$$: Text "It has no labelled fields."
)
-- | Helper for 'RecordCodeOf', handling the field level. At this point,
-- we simply zip the list of field names and the list of types.
--
type family ToRecordCode_Field (fis :: [FieldInfo]) (c :: [Type]) :: RecordCode where
ToRecordCode_Field '[] '[] = '[]
ToRecordCode_Field ( 'FieldInfo l : fis ) ( t : ts ) = '(l, t) : ToRecordCode_Field fis ts
-- * Relating the record code and the original code.
-- | The constraint @IsRecord a r@ states that the type 'a' is a record type
-- (i.e., has exactly one constructor and field labels) and that 'r' is the
-- record code associated with 'a'.
--
type IsRecord (a :: Type) (r :: RecordCode) =
IsRecord' a r (GetSingleton (Code a))
-- | The constraint @IsRecord' a r xs@ states that 'a' is a record type
-- with record code 'r', and that the types contained in 'r' correspond
-- to the list 'xs'.
--
-- If the record code computation is correct, then the record code of a
-- type is strongly related to the original generics-sop code. Extracting
-- the types out of 'r' should correspond to 'xs'. Recombining the
-- labels from 'r' with 'xs' should yield 'r' exactly. These sanity
-- properties are captured by 'ValidRecordCode'.
--
type IsRecord' (a :: Type) (r :: RecordCode) (xs :: [Type]) =
( Generic a, Code a ~ '[ xs ]
, RecordCodeOf a ~ r, ValidRecordCode r xs
)
-- | Relates a recordcode 'r' and a list of types 'xs', stating that
-- 'xs' is indeed the list of types contained in 'r'.
--
type ValidRecordCode (r :: RecordCode) (xs :: [Type]) =
( ExtractTypesFromRecordCode r ~ xs
, RecombineRecordCode (ExtractLabelsFromRecordCode r) xs ~ r
)
-- | Extracts all the types from a record code.
type family ExtractTypesFromRecordCode (r :: RecordCode) :: [Type] where
ExtractTypesFromRecordCode '[] = '[]
ExtractTypesFromRecordCode ( '(_, a) : r ) = a : ExtractTypesFromRecordCode r
-- | Extracts all the field labels from a record code.
type family ExtractLabelsFromRecordCode (r :: RecordCode) :: [FieldLabel] where
ExtractLabelsFromRecordCode '[] = '[]
ExtractLabelsFromRecordCode ( '(l, _) : r ) = l : ExtractLabelsFromRecordCode r
-- | Given a list of labels and types, recombines them into a record code.
--
-- An important aspect of this function is that it is defined by induction
-- on the list of types, and forces the list of field labels to be at least
-- as long.
--
type family RecombineRecordCode (ls :: [FieldLabel]) (ts :: [Type]) :: RecordCode where
RecombineRecordCode _ '[] = '[]
RecombineRecordCode ls (t : ts) = '(Head ls, t) : RecombineRecordCode (Tail ls) ts
--------------------------------------------------------------------------
-- Conversion between a type and its record representation.
--------------------------------------------------------------------------
-- | Convert a value into its record representation.
toRecord :: (IsRecord a _r) => a -> RecordRep a
toRecord = unsafeToRecord_NP . unZ . unSOP . from
-- | Convert an n-ary product into the corresponding record
-- representation. This is a no-op, and more efficiently
-- implented using 'unsafeToRecord_NP'. It is included here
-- to demonstrate that it actually is type-correct and also
-- to make it more obvious that it is indeed a no-op.
--
_toRecord_NP :: (ValidRecordCode r xs) => NP I xs -> Record r
_toRecord_NP Nil = Nil
_toRecord_NP (I x :* xs) = P x :* _toRecord_NP xs
-- | Fast version of 'toRecord_NP'. Not actually unsafe as
-- long as the internal representations of 'NP' and 'Record'
-- are not changed.
--
unsafeToRecord_NP :: (ValidRecordCode r xs) => NP I xs -> Record r
unsafeToRecord_NP = unsafeCoerce
-- | Convert a record representation back into a value.
fromRecord :: (IsRecord a r) => RecordRep a -> a
fromRecord = to . SOP . Z . unsafeFromRecord_NP
-- | Convert a record representation into an n-ary product. This is a no-op,
-- and more efficiently implemented using 'unsafeFromRecord_NP'.
--
-- It is also noteworthy that we let the resulting list drive the computation.
-- This is compatible with the definition of 'RecombineRecordCode' based on
-- the list of types.
--
_fromRecord_NP :: forall r xs . (ValidRecordCode r xs, SListI xs) => Record r -> NP I xs
_fromRecord_NP = case sList :: SList xs of
SNil -> const Nil
SCons -> \ r -> case r of
P x :* xs -> I x :* _fromRecord_NP xs
-- | Fast version of 'fromRecord_NP'. Not actually unsafe as
-- long as the internal representation of 'NP' and 'Record'
-- are not changed.
--
unsafeFromRecord_NP :: forall r xs . (ValidRecordCode r xs, SListI xs) => Record r -> NP I xs
unsafeFromRecord_NP = unsafeCoerce
--------------------------------------------------------------------------
-- Utilities
--------------------------------------------------------------------------
-- | Projection of the second component of a type-level pair,
-- wrapped in a newtype.
--
newtype P (p :: (a, Type)) = P (Snd p)
deriving (GHC.Generic)
deriving instance Eq a => Eq (P '(l, a))
deriving instance Ord a => Ord (P '(l, a))
deriving instance Show a => Show (P '(l, a))
instance NFData a => NFData (P '(l, a)) where
rnf (P x) = rnf x
-- | Type-level variant of 'snd'.
type family Snd (p :: (a, b)) :: b where
Snd '(a, b) = b
-- | Type-level variant of 'head'.
type family Head (xs :: [k]) :: k where
Head (x : xs) = x
-- | Type-level variant of 'tail'.
type family Tail (xs :: [k]) :: [k] where
Tail (x : xs) = xs
-- | Partial type-level function that extracts the only element
-- from a singleton type-level list.
--
type family GetSingleton (xs :: [k]) :: k where
GetSingleton '[ x ] = x
| kosmikus/records-sop | src/Generics/SOP/Record.hs | bsd-3-clause | 10,526 | 0 | 13 | 1,997 | 1,809 | 1,073 | 736 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
module Gun where
import Rumpus
majorScale = map (+60) [0,2,4,7,9]
randomNote = do
i <- randomRange (0, length majorScale - 1)
return (majorScale !! i)
start :: Start
start = do
removeChildren
setRepeatingAction (1/5) $ do
note <- randomNote
sendSynth "note" (Atom $ realToFrac note)
pose <- getPose
childID <- spawnEntity $ do
myPose ==> pose & translation +~
(pose ^. _m33) !* (V3 0 0.3 0)
myShape ==> Sphere
mySize ==> 0.03
myMass ==> 0.1
myColor ==> colorHSL (note / 12) 0.9 0.8
inEntity childID $ do
setLifetime 10
applyForce $ (pose ^. _m33) !* (V3 0 3.9 0)
| lukexi/rumpus | util/DevScenes/Shoo/Gun.hs | bsd-3-clause | 787 | 0 | 17 | 294 | 273 | 135 | 138 | 24 | 1 |
{-# LANGUAGE OverloadedStrings, PostfixOperators, NamedFieldPuns #-}
module Language.Naga.Prim where
import MPS.Env hiding ((<), (>), (+), (*), (/), Int, String, (^), apply, return, Bool)
import Prelude ()
import Language.Naga.Type
import qualified Prelude as P
import qualified Data.ByteString.UTF8 as B
import Data.Tree.Zipper
import Control.Monad.Reader (runReaderT)
import Control.Monad.State (execStateT, modify)
import Data.Default
import Data.Tree
import Control.Monad.Identity (runIdentity)
import Data.Map (union)
import qualified Data.Map as M
import Data.ByteString.UTF8 (toString, fromString)
import Control.Monad.Writer (Writer, execWriter, tell)
import Language.Naga.Operation
lt_ = Lambda $
PrimFunction
{
prim_name = "<"
, prim_body = prim - lt_op (P.<)
, is_infix = True
}
where
lt_op f (Int x:Int y:[]) z = (Bool - f x y, z)
add_ = Lambda $
PrimFunction
{
prim_name = "+"
, prim_body = prim - int_op (P.+)
, is_infix = True
}
subtract_ = Lambda $
PrimFunction
{
prim_name = "-"
, prim_body = prim - int_op (P.-)
, is_infix = True
}
assign__ = Lambda $
PrimFunction
{
prim_name = "="
, prim_body = prim f
, is_infix = True
}
where
f ((String x):y:[]) z = (Null, var x y z)
f _ _ = pe
var x y = modifyLabel add_binding
where
add_binding xs = union (to_h [(toString x, y)]) xs
int_op f ((Int x):(Int y):[]) z = (Int - f x y, z)
int_op _ _ _ = pe
| nfjinjing/naga | src/Language/Naga/Prim.hs | bsd-3-clause | 1,502 | 2 | 14 | 361 | 559 | 333 | 226 | 45 | 2 |
-- Quasicrystals demo.
--
-- Based on code from:
-- http://hackage.haskell.org/package/repa-examples
-- http://mainisusuallyafunction.blogspot.com/2011/10/quasicrystals-as-sums-of-waves-in-plane.html
--
module Main where
import Config
import Data.Word
import Data.Label
import Foreign.Ptr
import Control.Monad
import Control.Exception
import Criterion.Main ( defaultMain, bench, whnf )
import Foreign.ForeignPtr
import System.Environment
import System.IO.Unsafe
import Data.Array.Accelerate ( Array, Scalar, Exp, Acc, DIM2, Z(..), (:.)(..) )
import qualified Data.Array.Accelerate as A
import qualified Graphics.Gloss as G
import Data.Array.Accelerate.Array.Data ( ptrsOfArrayData )
import Data.Array.Accelerate.Array.Sugar ( Array(..) )
-- Types ----------------------------------------------------------------------
-- | Real value
type R = Float
-- | Point on the 2D plane.
type R2 = (R, R)
-- | Angle in radians.
type Angle = R
-- | Angle offset used for animation.
type Phi = Float
-- | Number of waves to sum for each pixel.
type Degree = Int
-- | Feature size of visualisation.
type Scale = Float
-- | Size of image to render.
type Size = Int
-- | How many times to duplicate each pixel / image zoom.
type Zoom = Int
-- | Type of the generated image data
type RGBA = Word32
type Bitmap = Array DIM2 RGBA
-- | Action to render a frame
type Render = Scalar Phi -> Bitmap
-- Point ----------------------------------------------------------------------
-- | Compute a single point of the visualisation.
quasicrystal :: Size -> Scale -> Degree -> Acc (Scalar Phi) -> Exp DIM2 -> Exp R
quasicrystal size scale degree phi p
= waves degree phi $ point size scale p
-- | Sum up all the waves at a particular point.
waves :: Degree -> Acc (Scalar Phi) -> Exp R2 -> Exp R
waves degree phi x = wrap $ waver degree 0
where
waver :: Int -> Exp Float -> Exp Float
waver n acc
| n == 0 = acc
| otherwise = waver (n - 1) (acc + wave (A.constant (fromIntegral n) * A.the phi) x)
wrap n
= let n_ = A.truncate n :: Exp Int
n' = n - A.fromIntegral n_
in
(n_ `rem` 2 A./=* 0) A.? (1-n', n')
-- | Generate the value for a single wave.
wave :: Exp Angle -> Exp R2 -> Exp R
wave th pt = (cos (cth*x + sth*y) + 1) / 2
where
(x,y) = A.unlift pt
cth = cos th
sth = sin th
-- | Convert an image point to a point on our wave plane.
point :: Size -> Scale -> Exp DIM2 -> Exp R2
point size scale ix = A.lift (adj x, adj y)
where
(Z:.x:.y) = A.unlift ix
denom = A.constant (fromIntegral size - 1)
adj n = A.constant scale * ((2 * A.fromIntegral n / denom) - 1)
-- Computation ----------------------------------------------------------------
-- | Compute a single frame
makeImage :: Size -> Scale -> Degree -> Acc (Scalar Phi) -> Acc Bitmap
makeImage size scale degree phi = arrPixels
where
-- Compute [0..1] values for the wave density at each point.
arrVals :: Acc (Array DIM2 Float)
arrVals = A.generate
(A.constant $ Z :. size :. size)
(quasicrystal size scale degree phi)
-- Convert the [0..1] values of wave density to an RGBA flat image
arrPixels :: Acc Bitmap
arrPixels = A.map rampColour arrVals
-- | Colour ramp from red to white, convert into RGBA
rampColour :: Exp Float -> Exp RGBA
rampColour v = ra + g + b
where
u = 0 `A.max` v `A.min` 1
ra = 0xFF0000FF
g = A.truncate ((0.4 + (u * 0.6)) * 0xFF) * 0x10000
b = A.truncate (u * 0xFF) * 0x100
-- Rendering ------------------------------------------------------------------
-- | Compute a single frame of the animation as a Gloss picture.
--frame :: Size -> Scale -> Zoom -> Degree -> Float -> G.Picture
frame :: Render -> Size -> Zoom -> Float -> G.Picture
frame render size zoom time = G.scale zoom' zoom' pic
where
-- Scale the time to be the phi value of the animation. The action seems to
-- slow down at increasing phi values, so we increase phi faster as time
-- moves on.
x = 1 + (time ** 1.5) * 0.005
-- lift to a singleton array, else we would generate new code with the
-- constant embedded at every frame
phi = A.fromList Z [pi / x]
-- Compute the image
arrPixels = render phi
-- Wrap the array data in a Foreign pointer and turn into a Gloss picture
{-# NOINLINE rawData #-}
rawData = let (Array _ adata) = arrPixels
((),ptr) = ptrsOfArrayData adata
in
unsafePerformIO $ newForeignPtr_ (castPtr ptr)
pic = G.bitmapOfForeignPtr
size size -- raw image size
rawData -- the image data
False -- don't cache this in texture memory
-- Zoom the image so we get a bigger window.
zoom' = fromIntegral zoom
-- Main -----------------------------------------------------------------------
main :: IO ()
main
= do (config, nops) <- processArgs =<< getArgs
let size = get optSize config
zoom = get optZoom config
scale = get optScale config
degree = get optDegree config
render = run config $ makeImage size scale degree
void . evaluate $ render (A.fromList Z [0])
if get optBench config
then withArgs nops $ defaultMain
[ bench "crystal" $ whnf render (A.fromList Z [1.0]) ]
else G.animateInWindow
"Quasicrystals"
(size * zoom, size * zoom)
(10, 10)
G.black
(frame render size zoom)
| blambo/accelerate-examples | examples/crystal/Main.hs | bsd-3-clause | 6,039 | 0 | 16 | 1,898 | 1,442 | 779 | 663 | 96 | 2 |
-- TP1
main :: IO()
main = undefined
--Question 3
sommeDeXaY :: Int -> Int -> Int
sommeDeXaY x y =
if x > y then
0
else
x + sommeDeXaY (x+1) y
-- Question 4
somme :: [Int] -> Int
somme [] = 0
somme (x:xs) = x + somme xs
-- Question 5
last' :: [a] -> a
last' xs = head (reverse xs)
init' :: [a] -> [a]
init' xs = reverse(tail(reverse xs))
-- Question 6
-- map
selectn :: [a] -> Int -> a
selectn [] _ = error "no element in the list"
selectn (x:_) 0 = x
selectn (_:xs) n = selectn xs (n-1)
concatDeuxListes :: [a] -> [a] -> [a]
concatDeuxListes [] xs = xs
concatDeuxListes [x] xs = x:xs
concatDeuxListes (x:xs) ys = concatDeuxListes [x] (concatDeuxListes xs ys)
concat' :: [[a]] -> [a]
concat' [] = []
concat' [xs] = xs
concat' (x:xs) = concatDeuxListes x (concat' xs)
map' :: (a->b) -> [a] -> [b]
map' _ [] = []
map' f [x] = [f x]
map' f (x:xs) = concatDeuxListes [f x] (map f xs)
-- Question 7
-- Si l est une liste [a] et que l'on a la declaration x = (!!) l alors lorsque l'on invoque x n on va retourner le n-ieme element de la liste l.
-- Question 8
length' :: [a] -> Int
length' [] = 0
length' xs = somme (map (const 1) xs)
-- Question 9
applique :: (a -> a) -> a -> Int -> [a]
applique _ _ 0 = []
applique f x n = x:applique f (f x) (n-1)
applique' :: (a->a) -> a -> Int -> [a]
applique' f x n = take n (iterate f x)
-- Question 10
f' :: Int -> Int
f' x = x+1
listeEnt :: Int -> [Int]
listeEnt n = 0:applique' f' 1 n
| jecisc/TP_PF_L3 | PF-TP1/src/Main.hs | mit | 1,642 | 0 | 9 | 532 | 740 | 393 | 347 | 42 | 2 |
module SmallLibraryForMaybe where
-- Write the following functions. This may
-- take some time.
-- 1. Simple boolean checks for Maybe values
-- isJust (Just 1)
-- True
-- isJust Nothing
-- False
isJust :: Maybe a -> Bool
isJust Nothing = False
isJust _ = True
-- isNothing (Just 1)
-- False
-- isNothing Nothing
-- True
isNothing :: Maybe a -> Bool
isNothing Nothing = True
isNothing _ = False
-- 2. The following is the Maybe catamorphism. You can
-- turn a Maybe value into anything else with this
-- mayybe 0 (+1) Nothing
-- 0
-- mayybe 0 (+1) (Just 1)
-- 2
mayybe :: b
-> (a -> b)
-> Maybe a
-> b
mayybe seed _ Nothing = seed
mayybe _ f (Just x) = f x
-- 3. In case you want to provide a fallback value.
-- fromMaybe 0 Nothing
-- 0
-- fromMaybe 0 (Just 1)
-- 1
fromMaybe :: a -> Maybe a -> a
fromMaybe seed Nothing = seed
fromMaybe _ (Just x) = x
-- Try writing it in terms of the maybe catamorphism.
-- 4. Converting between List and Maybe.
-- listToMaybe [1, 2, 3]
-- Just 1
-- listToMaybe []
-- Nothing
listToMaybe :: [a] -> Maybe a
listToMaybe [] = Nothing
listToMaybe (x:xs) = Just x
-- maybeToList (Just 1)
-- [1]
-- maybeToList Nothing
-- []
maybeToList :: Maybe a -> [a]
maybeToList Nothing = []
maybeToList (Just x) = [x]
-- 5. For when we want to drop the Nothing values from
-- our list.
-- catMaybes [Just 1, Nothing, Just 2]
-- [1, 2]
-- let xs = take 3 $ repeat Nothing
-- catMaybes xs
-- []
catMaybes :: [Maybe a] -> [a]
catMaybes [] = []
catMaybes (x:xs) =
case x of
(Just n) -> n : catMaybes xs
_ -> catMaybes xs
-- 6. You'll see this called "sequence" later.
-- flipMaybe [Just 1, Just 2, Just 3]
-- Just [1, 2, 3]
-- flipMaybe [Just 1, Nothing, Just 3]
-- Nothing
flipMaybe :: [Maybe a] -> Maybe [a]
flipMaybe list =
if not sawNothing
then Just result
else Nothing
where
(sawNothing, result) = foldr folder (False, []) list
folder m (b, acc) =
if b
then (True, [])
else case m of
(Just n) -> (False, n : acc)
_ -> (True, [])
-- Next: SmallLibraryForEitherChapter12.hs | brodyberg/Notes | ProjectRosalind.hsproj/LearnHaskell/lib/HaskellBook/SmallLibraryForMaybeChapter12.hs | mit | 2,152 | 0 | 12 | 559 | 513 | 289 | 224 | 40 | 4 |
module End.Util.Animation where
import Graphics.UI.SDL
import Graphics.UI.SDL.Image
import Data.Word
import End.Collection
import End.Collection.Header
loadImage :: String -> Maybe (Word8, Word8, Word8) -> IO Surface
loadImage filename colorKey = load filename >>= displayFormat >>= setColorKey' colorKey
setColorKey' :: Maybe (Word8, Word8, Word8) -> Surface -> IO Surface
setColorKey' Nothing s = return s
setColorKey' (Just (r, g, b)) surface = mapRGB' surface r g b >>= setColorKey surface [SrcColorKey] >> return surface
mapRGB' :: Surface -> Word8 -> Word8 -> Word8 -> IO Pixel
mapRGB' = mapRGB . surfaceGetPixelFormat
--animationRectangles o = do
-- let max = o^.
-- create [0..o^.sprite.animation]
-- where create [] = []
-- create (xx:xs) = [] | kwrooijen/sdl-game | End/Util/Animation.hs | gpl-3.0 | 775 | 0 | 9 | 133 | 220 | 120 | 100 | 13 | 1 |
-- |
-- Module : Crypto.PubKey.RSA.Types
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : Good
--
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Crypto.PubKey.RSA.Types
( Error(..)
, Blinder(..)
, PublicKey(..)
, PrivateKey(..)
, KeyPair(..)
, toPublicKey
, toPrivateKey
, private_size
, private_n
, private_e
) where
import Data.Data
import Crypto.Internal.Imports
-- | Blinder which is used to obfuscate the timing
-- of the decryption primitive (used by decryption and signing).
data Blinder = Blinder !Integer !Integer
deriving (Show,Eq)
-- | error possible during encryption, decryption or signing.
data Error =
MessageSizeIncorrect -- ^ the message to decrypt is not of the correct size (need to be == private_size)
| MessageTooLong -- ^ the message to encrypt is too long
| MessageNotRecognized -- ^ the message decrypted doesn't have a PKCS15 structure (0 2 .. 0 msg)
| SignatureTooLong -- ^ the message's digest is too long
| InvalidParameters -- ^ some parameters lead to breaking assumptions.
deriving (Show,Eq)
-- | Represent a RSA public key
data PublicKey = PublicKey
{ public_size :: Int -- ^ size of key in bytes
, public_n :: Integer -- ^ public p*q
, public_e :: Integer -- ^ public exponent e
} deriving (Show,Read,Eq,Data)
instance NFData PublicKey where
rnf (PublicKey sz n e) = rnf n `seq` rnf e `seq` sz `seq` ()
-- | Represent a RSA private key.
--
-- Only the pub, d fields are mandatory to fill.
--
-- p, q, dP, dQ, qinv are by-product during RSA generation,
-- but are useful to record here to speed up massively
-- the decrypt and sign operation.
--
-- implementations can leave optional fields to 0.
--
data PrivateKey = PrivateKey
{ private_pub :: PublicKey -- ^ public part of a private key (size, n and e)
, private_d :: Integer -- ^ private exponent d
, private_p :: Integer -- ^ p prime number
, private_q :: Integer -- ^ q prime number
, private_dP :: Integer -- ^ d mod (p-1)
, private_dQ :: Integer -- ^ d mod (q-1)
, private_qinv :: Integer -- ^ q^(-1) mod p
} deriving (Show,Read,Eq,Data)
instance NFData PrivateKey where
rnf (PrivateKey pub d p q dp dq qinv) =
rnf pub `seq` rnf d `seq` rnf p `seq` rnf q `seq` rnf dp `seq` rnf dq `seq` qinv `seq` ()
-- | get the size in bytes from a private key
private_size :: PrivateKey -> Int
private_size = public_size . private_pub
-- | get n from a private key
private_n :: PrivateKey -> Integer
private_n = public_n . private_pub
-- | get e from a private key
private_e :: PrivateKey -> Integer
private_e = public_e . private_pub
-- | Represent RSA KeyPair
--
-- note the RSA private key contains already an instance of public key for efficiency
newtype KeyPair = KeyPair PrivateKey
deriving (Show,Read,Eq,Data,NFData)
-- | Public key of a RSA KeyPair
toPublicKey :: KeyPair -> PublicKey
toPublicKey (KeyPair priv) = private_pub priv
-- | Private key of a RSA KeyPair
toPrivateKey :: KeyPair -> PrivateKey
toPrivateKey (KeyPair priv) = priv
| vincenthz/cryptonite | Crypto/PubKey/RSA/Types.hs | bsd-3-clause | 3,283 | 0 | 13 | 770 | 569 | 347 | 222 | 59 | 1 |
module Cauterize.Dynamic.Meta
( module M
) where
import Cauterize.Dynamic.Meta.Gen as M
import Cauterize.Dynamic.Meta.Pack as M
import Cauterize.Dynamic.Meta.Unpack as M
import Cauterize.Dynamic.Meta.Types as M
| reiddraper/cauterize | src/Cauterize/Dynamic/Meta.hs | bsd-3-clause | 216 | 0 | 4 | 28 | 49 | 37 | 12 | 6 | 0 |
import Database.HaskellDB.FlatDB
import System.Environment
import System.IO
main :: IO ()
main = do args <- getArgs
case args of
[f] -> newDB f
_ -> hPutStrLn stderr "Usage: flatdb-create <file>" | chrisdone/haskelldb-demo | lib/haskelldb/driver-flat/tools/flatdb-create.hs | bsd-3-clause | 233 | 0 | 10 | 67 | 69 | 35 | 34 | 8 | 2 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable, RecordWildCards #-}
-- | Docker configuration
module Stack.Config.Docker where
import Stack.Prelude
import Data.List (find)
import qualified Data.Text as T
import Distribution.Version (simplifyVersionRange)
import Stack.Types.Version
import Stack.Types.Config
import Stack.Types.Docker
import Stack.Types.Resolver
-- | Add a default Docker tag name to a given base image.
addDefaultTag
:: MonadThrow m
=> String -- ^ base
-> Maybe Project
-> Maybe AbstractResolver
-> m String
addDefaultTag base mproject maresolver = do
let exc = throwM $ ResolverNotSupportedException mproject maresolver
lts <- case maresolver of
Just (ARResolver (RSLSynonym lts@(LTS _ _))) -> return lts
Just _aresolver -> exc
Nothing ->
case projectResolver <$> mproject of
Just (RSLSynonym lts@(LTS _ _)) -> return lts
_ -> exc
return $ base ++ ":" ++ show lts
-- | Interprets DockerOptsMonoid options.
dockerOptsFromMonoid
:: MonadThrow m
=> Maybe Project
-> Maybe AbstractResolver
-> DockerOptsMonoid
-> m DockerOpts
dockerOptsFromMonoid mproject maresolver DockerOptsMonoid{..} = do
let dockerImage =
case getFirst dockerMonoidRepoOrImage of
Nothing -> addDefaultTag "fpco/stack-build" mproject maresolver
Just (DockerMonoidImage image) -> pure image
Just (DockerMonoidRepo repo) ->
case find (`elem` (":@" :: String)) repo of
Nothing -> addDefaultTag repo mproject maresolver
-- Repo already specified a tag or digest, so don't append default
Just _ -> pure repo
let dockerEnable =
fromFirst (getAny dockerMonoidDefaultEnable) dockerMonoidEnable
dockerRegistryLogin =
fromFirst
(isJust (emptyToNothing (getFirst dockerMonoidRegistryUsername)))
dockerMonoidRegistryLogin
dockerRegistryUsername = emptyToNothing (getFirst dockerMonoidRegistryUsername)
dockerRegistryPassword = emptyToNothing (getFirst dockerMonoidRegistryPassword)
dockerAutoPull = fromFirstTrue dockerMonoidAutoPull
dockerDetach = fromFirstFalse dockerMonoidDetach
dockerPersist = fromFirstFalse dockerMonoidPersist
dockerContainerName = emptyToNothing (getFirst dockerMonoidContainerName)
dockerNetwork = emptyToNothing (getFirst dockerMonoidNetwork)
dockerRunArgs = dockerMonoidRunArgs
dockerMount = dockerMonoidMount
dockerMountMode = emptyToNothing (getFirst dockerMonoidMountMode)
dockerEnv = dockerMonoidEnv
dockerSetUser = getFirst dockerMonoidSetUser
dockerRequireDockerVersion =
simplifyVersionRange (getIntersectingVersionRange dockerMonoidRequireDockerVersion)
dockerStackExe = getFirst dockerMonoidStackExe
return DockerOpts{..}
where emptyToNothing Nothing = Nothing
emptyToNothing (Just s) | null s = Nothing
| otherwise = Just s
-- | Exceptions thrown by Stack.Docker.Config.
data StackDockerConfigException
= ResolverNotSupportedException !(Maybe Project) !(Maybe AbstractResolver)
-- ^ Only LTS resolvers are supported for default image tag.
deriving (Typeable)
-- | Exception instance for StackDockerConfigException.
instance Exception StackDockerConfigException
-- | Show instance for StackDockerConfigException.
instance Show StackDockerConfigException where
show (ResolverNotSupportedException mproject maresolver) =
concat
[ "Resolver not supported for Docker images:\n "
, case (mproject, maresolver) of
(Nothing, Nothing) -> "no resolver specified"
(_, Just aresolver) -> T.unpack $ utf8BuilderToText $ display aresolver
(Just project, Nothing) -> T.unpack $ utf8BuilderToText $ display $ projectResolver project
, "\nUse an LTS resolver, or set the '"
, T.unpack dockerImageArgName
, "' explicitly, in your configuration file."]
| juhp/stack | src/Stack/Config/Docker.hs | bsd-3-clause | 4,233 | 0 | 19 | 1,049 | 831 | 423 | 408 | 87 | 5 |
--------------------------------------------------------------------------------
-- | Module dealing with HTTP: request data types, encoding and decoding...
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
module Network.WebSockets.Http
( Headers
, RequestHead (..)
, Request (..)
, ResponseHead (..)
, Response (..)
, HandshakeException (..)
, encodeRequestHead
, encodeRequest
, decodeRequestHead
, encodeResponseHead
, encodeResponse
, decodeResponseHead
, decodeResponse
, response101
, response400
, getRequestHeader
, getResponseHeader
, getRequestSecWebSocketVersion
, getRequestSubprotocols
) where
--------------------------------------------------------------------------------
import qualified Blaze.ByteString.Builder as Builder
import qualified Blaze.ByteString.Builder.Char.Utf8 as Builder
import Control.Applicative (pure, (*>), (<$>), (<*),
(<*>))
import Control.Exception (Exception, throw)
import qualified Data.Attoparsec.ByteString as A
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.ByteString.Char8 ()
import qualified Data.ByteString.Char8 as BC
import Data.ByteString.Internal (c2w)
import qualified Data.CaseInsensitive as CI
import Data.Dynamic (Typeable)
import Data.Monoid (mappend, mconcat)
--------------------------------------------------------------------------------
-- | Request headers
type Headers = [(CI.CI ByteString, ByteString)]
--------------------------------------------------------------------------------
-- | An HTTP request. The request body is not yet read.
data RequestHead = RequestHead
{ requestPath :: !B.ByteString
, requestHeaders :: Headers
, requestSecure :: Bool
} deriving (Show)
--------------------------------------------------------------------------------
-- | A request with a body
data Request = Request RequestHead B.ByteString
deriving (Show)
--------------------------------------------------------------------------------
-- | HTTP response, without body.
data ResponseHead = ResponseHead
{ responseCode :: !Int
, responseMessage :: !B.ByteString
, responseHeaders :: Headers
} deriving (Show)
--------------------------------------------------------------------------------
-- | A response including a body
data Response = Response ResponseHead B.ByteString
deriving (Show)
--------------------------------------------------------------------------------
-- | Error in case of failed handshake. Will be thrown as an 'Exception'.
--
-- TODO: This should probably be in the Handshake module, and is solely here to
-- prevent a cyclic dependency.
data HandshakeException
-- | We don't have a match for the protocol requested by the client.
-- todo: version parameter
= NotSupported
-- | The request was somehow invalid (missing headers or wrong security
-- token)
| MalformedRequest RequestHead String
-- | The servers response was somehow invalid (missing headers or wrong
-- security token)
| MalformedResponse ResponseHead String
-- | The request was well-formed, but the library user rejected it.
-- (e.g. "unknown path")
| RequestRejected Request String
-- | for example "EOF came too early" (which is actually a parse error)
-- or for your own errors. (like "unknown path"?)
| OtherHandshakeException String
deriving (Show, Typeable)
--------------------------------------------------------------------------------
instance Exception HandshakeException
--------------------------------------------------------------------------------
encodeRequestHead :: RequestHead -> Builder.Builder
encodeRequestHead (RequestHead path headers _) =
Builder.copyByteString "GET " `mappend`
Builder.copyByteString path `mappend`
Builder.copyByteString " HTTP/1.1" `mappend`
Builder.fromByteString "\r\n" `mappend`
mconcat (map header headers) `mappend`
Builder.copyByteString "\r\n"
where
header (k, v) = mconcat $ map Builder.copyByteString
[CI.original k, ": ", v, "\r\n"]
--------------------------------------------------------------------------------
encodeRequest :: Request -> Builder.Builder
encodeRequest (Request head' body) =
encodeRequestHead head' `mappend` Builder.copyByteString body
--------------------------------------------------------------------------------
-- | Parse an initial request
decodeRequestHead :: Bool -> A.Parser RequestHead
decodeRequestHead isSecure = RequestHead
<$> requestLine
<*> A.manyTill decodeHeaderLine newline
<*> pure isSecure
where
space = A.word8 (c2w ' ')
newline = A.string "\r\n"
requestLine = A.string "GET" *> space *> A.takeWhile1 (/= c2w ' ')
<* space
<* A.string "HTTP/1.1" <* newline
--------------------------------------------------------------------------------
-- | Encode an HTTP upgrade response
encodeResponseHead :: ResponseHead -> Builder.Builder
encodeResponseHead (ResponseHead code msg headers) =
Builder.copyByteString "HTTP/1.1 " `mappend`
Builder.fromString (show code) `mappend`
Builder.fromChar ' ' `mappend`
Builder.fromByteString msg `mappend`
Builder.fromByteString "\r\n" `mappend`
mconcat (map header headers) `mappend`
Builder.copyByteString "\r\n"
where
header (k, v) = mconcat $ map Builder.copyByteString
[CI.original k, ": ", v, "\r\n"]
--------------------------------------------------------------------------------
encodeResponse :: Response -> Builder.Builder
encodeResponse (Response head' body) =
encodeResponseHead head' `mappend` Builder.copyByteString body
--------------------------------------------------------------------------------
-- | An upgrade response
response101 :: Headers -> B.ByteString -> Response
response101 headers = Response
(ResponseHead 101 "WebSocket Protocol Handshake"
(("Upgrade", "websocket") : ("Connection", "Upgrade") : headers))
--------------------------------------------------------------------------------
-- | Bad request
response400 :: Headers -> B.ByteString -> Response
response400 headers = Response (ResponseHead 400 "Bad Request" headers)
--------------------------------------------------------------------------------
-- | HTTP response parser
decodeResponseHead :: A.Parser ResponseHead
decodeResponseHead = ResponseHead
<$> fmap (read . BC.unpack) code
<*> message
<*> A.manyTill decodeHeaderLine newline
where
space = A.word8 (c2w ' ')
newline = A.string "\r\n"
code = A.string "HTTP/1.1" *> space *> A.takeWhile1 (/= c2w ' ') <* space
message = A.takeWhile1 (/= c2w '\r') <* newline
--------------------------------------------------------------------------------
decodeResponse :: A.Parser Response
decodeResponse = Response <$> decodeResponseHead <*> A.takeByteString
--------------------------------------------------------------------------------
getRequestHeader :: RequestHead
-> CI.CI ByteString
-> ByteString
getRequestHeader rq key = case lookup key (requestHeaders rq) of
Just t -> t
Nothing -> throw $ MalformedRequest rq $
"Header missing: " ++ BC.unpack (CI.original key)
--------------------------------------------------------------------------------
getResponseHeader :: ResponseHead
-> CI.CI ByteString
-> ByteString
getResponseHeader rsp key = case lookup key (responseHeaders rsp) of
Just t -> t
Nothing -> throw $ MalformedResponse rsp $
"Header missing: " ++ BC.unpack (CI.original key)
--------------------------------------------------------------------------------
-- | Get the @Sec-WebSocket-Version@ header
getRequestSecWebSocketVersion :: RequestHead -> Maybe B.ByteString
getRequestSecWebSocketVersion p =
lookup "Sec-WebSocket-Version" (requestHeaders p)
--------------------------------------------------------------------------------
-- | List of subprotocols specified by the client, in order of preference.
-- If the client did not specify a list of subprotocols, this will be the
-- empty list.
getRequestSubprotocols :: RequestHead -> [B.ByteString]
getRequestSubprotocols rh = maybe [] parse mproto
where
mproto = lookup "Sec-WebSocket-Protocol" $ requestHeaders rh
parse = filter (not . B.null) . BC.splitWith (\o -> o == ',' || o == ' ')
--------------------------------------------------------------------------------
decodeHeaderLine :: A.Parser (CI.CI ByteString, ByteString)
decodeHeaderLine = (,)
<$> (CI.mk <$> A.takeWhile1 (/= c2w ':'))
<* A.word8 (c2w ':')
<* A.option (c2w ' ') (A.word8 (c2w ' '))
<*> A.takeWhile (/= c2w '\r')
<* A.string "\r\n"
| nsluss/websockets | src/Network/WebSockets/Http.hs | bsd-3-clause | 9,189 | 0 | 14 | 1,825 | 1,647 | 919 | 728 | 147 | 2 |
{-# LANGUAGE ScopedTypeVariables, EmptyDataDecls, DoRec #-}
import FRP.Sodium
import Control.Applicative
import Control.Monad
import Control.Monad.Trans
import Data.Char
import Data.IORef
import Test.HUnit
event1 = TestCase $ do
(ev, push) <- sync newEvent
outRef <- newIORef ""
sync $ do
push '?'
unlisten <- sync $ do
push 'h'
unlisten <- listen ev $ \letter -> modifyIORef outRef (++ [letter])
push 'e'
return unlisten
sync $ do
push 'l'
push 'l'
push 'o'
unlisten
sync $ do
push '!'
out <- readIORef outRef
assertEqual "event1" "hello" =<< readIORef outRef
fmap1 = TestCase $ do
(ev, push) <- sync newEvent
outRef <- newIORef ""
sync $ do
listen (toUpper `fmap` ev) $ \letter -> modifyIORef outRef (++ [letter])
push 'h'
push 'e'
push 'l'
push 'l'
push 'o'
out <- readIORef outRef
assertEqual "fmap1" "HELLO" =<< readIORef outRef
merge1 = TestCase $ do
(ev1, push1) <- sync newEvent
(ev2, push2) <- sync newEvent
let ev = merge ev1 ev2
outRef <- newIORef []
unlisten <- sync $ listen ev $ \a -> modifyIORef outRef (++ [a])
sync $ do
push1 "hello"
push2 "world"
sync $ push1 "people"
sync $ push1 "everywhere"
unlisten
assertEqual "merge1" ["hello","world","people","everywhere"] =<< readIORef outRef
filterJust1 = TestCase $ do
(ema, push) <- sync newEvent
outRef <- newIORef []
sync $ do
listen (filterJust ema) $ \a -> modifyIORef outRef (++ [a])
push (Just "yes")
push Nothing
push (Just "no")
assertEqual "filterJust1" ["yes", "no"] =<< readIORef outRef
filterE1 = TestCase $ do
(ec, push) <- sync newEvent
outRef <- newIORef ""
sync $ do
let ed = filterE isDigit ec
listen ed $ \a -> modifyIORef outRef (++ [a])
push 'a'
push '2'
push 'X'
push '3'
assertEqual "filterE1" "23" =<< readIORef outRef
gate1 = TestCase $ do
(c, pushc) <- sync newEvent
(pred, pushPred) <- sync $ newBehavior True
outRef <- newIORef []
unlisten <- sync $ listen (gate c pred) $ \a -> modifyIORef outRef (++ [a])
sync $ pushc 'H'
sync $ pushPred False
sync $ pushc 'O'
sync $ pushPred True
sync $ pushc 'I'
unlisten
assertEqual "gate1" "HI" =<< readIORef outRef
beh1 = TestCase $ do
outRef <- newIORef []
(push, unlisten) <- sync $ do
(beh, push) <- newBehavior "init"
unlisten <- listen (value beh) $ \a -> modifyIORef outRef (++ [a])
return (push, unlisten)
sync $ do
push "next"
unlisten
assertEqual "beh1" ["init", "next"] =<< readIORef outRef
beh2 = TestCase $ do
outRef <- newIORef []
(push, unlisten) <- sync $ do
(beh, push) <- newBehavior "init"
unlisten <- listen (value beh) $ \a -> modifyIORef outRef (++ [a])
return (push, unlisten)
unlisten
sync $ do
push "next"
assertEqual "beh2" ["init"] =<< readIORef outRef
beh3 = TestCase $ do
outRef <- newIORef []
(push, unlisten) <- sync $ do
(beh, push) <- newBehavior "init"
unlisten <- listen (value beh) $ \a -> modifyIORef outRef (++ [a])
return (push, unlisten)
sync $ do
push "first"
push "second"
unlisten
assertEqual "beh3" ["init", "second"] =<< readIORef outRef
-- | This demonstrates the fact that if there are multiple updates to a behaviour
-- in a given transaction, the last one prevails in the result of 'value beh'.
beh4 = TestCase $ do
outRef <- newIORef []
(push, unlisten) <- sync $ do
(beh, push) <- newBehavior "init"
unlisten <- listen (value beh) $ \a -> modifyIORef outRef (++ [a])
push "other"
return (push, unlisten)
sync $ do
push "first"
push "second"
unlisten
assertEqual "beh4" ["other", "second"] =<< readIORef outRef
-- | This demonstrates the fact that if there are multiple updates to a behaviour
-- in a given transaction, the last one prevails in the result of 'updates beh'.
beh5 = TestCase $ do
outRef <- newIORef []
(push, unlisten) <- sync $ do
(beh, push) <- newBehavior "init"
unlisten <- listen (updates beh) $ \a -> modifyIORef outRef (++ [a])
return (push, unlisten)
sync $ do
push "first"
push "second"
unlisten
assertEqual "beh4" ["second"] =<< readIORef outRef
beh6 = TestCase $ do
(ea, push) <- sync newEvent
outRef <- newIORef []
unlisten <- sync $ do
beh <- hold "init" ea
unlisten <- listen (map toUpper <$> value beh) $ \a -> modifyIORef outRef (++ [a])
push "other"
return unlisten
sync $ do
push "first"
push "second"
unlisten
assertEqual "beh5" ["OTHER", "SECOND"] =<< readIORef outRef
behConstant = TestCase $ do
outRef <- newIORef []
unlisten <- sync $ listen (value $ pure 'X') $ \a -> modifyIORef outRef (++ [a])
unlisten
assertEqual "behConstant" ['X'] =<< readIORef outRef
valueThenMap = TestCase $ do
(b, push) <- sync $ newBehavior 9
outRef <- newIORef []
unlisten <- sync $ listen (value . fmap (+100) $ b) $ \a -> modifyIORef outRef (++ [a])
sync $ push (2 :: Int)
sync $ push 7
unlisten
assertEqual "valueThenMap" [109,102,107] =<< readIORef outRef
-- | This is used for tests where value() produces a single initial value on listen,
-- and then we double that up by causing that single initial event to be repeated.
-- This needs testing separately, because the code must be done carefully to achieve
doubleUp :: Event a -> Event a
doubleUp e = merge e e
valueTwiceThenMap = TestCase $ do
(b, push) <- sync $ newBehavior 9
outRef <- newIORef []
unlisten <- sync $ listen (doubleUp . value . fmap (+100) $ b) $ \a -> modifyIORef outRef (++ [a])
sync $ push (2 :: Int)
sync $ push 7
unlisten
assertEqual "valueThenMap" [109,109,102,102,107,107] =<< readIORef outRef
valueThenCoalesce = TestCase $ do
(b, push) <- sync $ newBehavior 9
outRef <- newIORef []
unlisten <- sync $ listen (coalesce (\_ x -> x) . value $ b) $ \a -> modifyIORef outRef (++ [a])
sync $ push 2
sync $ push 7
unlisten
assertEqual "valueThenCoalesce" [9,2,7] =<< readIORef outRef
valueTwiceThenCoalesce = TestCase $ do
(b, push) <- sync $ newBehavior 9
outRef <- newIORef []
unlisten <- sync $ listen (coalesce (+) . doubleUp. value $ b) $ \a -> modifyIORef outRef (++ [a])
sync $ push 2
sync $ push 7
unlisten
assertEqual "valueThenCoalesce" [18,4,14] =<< readIORef outRef
valueThenSnapshot = TestCase $ do
(bi, pushi) <- sync $ newBehavior (9 :: Int)
(bc, pushc) <- sync $ newBehavior 'a'
outRef <- newIORef []
unlisten <- sync $ listen (flip (snapshot (flip const)) bc . value $ bi) $ \a -> modifyIORef outRef (++ [a])
sync $ pushc 'b'
sync $ pushi 2
sync $ pushc 'c'
sync $ pushi 7
unlisten
assertEqual "valueThenSnapshot" ['a','b','c'] =<< readIORef outRef
valueTwiceThenSnapshot = TestCase $ do
(bi, pushi) <- sync $ newBehavior (9 :: Int)
(bc, pushc) <- sync $ newBehavior 'a'
outRef <- newIORef []
unlisten <- sync $ listen (flip (snapshot (flip const)) bc . doubleUp . value $ bi) $ \a -> modifyIORef outRef (++ [a])
sync $ pushc 'b'
sync $ pushi 2
sync $ pushc 'c'
sync $ pushi 7
unlisten
assertEqual "valueThenSnapshot" ['a','a','b','b','c','c'] =<< readIORef outRef
valueThenMerge = TestCase $ do
(bi, pushi) <- sync $ newBehavior (9 :: Int)
(bj, pushj) <- sync $ newBehavior (2 :: Int)
outRef <- newIORef []
unlisten <- sync $ listen (mergeWith (+) (value bi) (value bj)) $ \a -> modifyIORef outRef (++ [a])
sync $ pushi 1
sync $ pushj 4
unlisten
assertEqual "valueThenMerge" [11,1,4] =<< readIORef outRef
valueThenFilter = TestCase $ do
(b, push) <- sync $ newBehavior (9 :: Int)
outRef <- newIORef []
unlisten <- sync $ listen (filterE (const True) . value $ b) $ \a -> modifyIORef outRef (++ [a])
sync $ push 2
sync $ push 7
unlisten
assertEqual "valueThenFilter" [9,2,7] =<< readIORef outRef
valueTwiceThenFilter = TestCase $ do
(b, push) <- sync $ newBehavior (9 :: Int)
outRef <- newIORef []
unlisten <- sync $ listen (filterE (const True) . doubleUp . value $ b) $ \a -> modifyIORef outRef (++ [a])
sync $ push 2
sync $ push 7
unlisten
assertEqual "valueThenFilter" [9,9,2,2,7,7] =<< readIORef outRef
valueThenOnce = TestCase $ do
(b, push) <- sync $ newBehavior (9 :: Int)
outRef <- newIORef []
unlisten <- sync $ listen (once . value $ b) $ \a -> modifyIORef outRef (++ [a])
sync $ push 2
sync $ push 7
unlisten
assertEqual "valueThenOnce" [9] =<< readIORef outRef
valueTwiceThenOnce = TestCase $ do
(b, push) <- sync $ newBehavior (9 :: Int)
outRef <- newIORef []
unlisten <- sync $ listen (once . doubleUp . value $ b) $ \a -> modifyIORef outRef (++ [a])
sync $ push 2
sync $ push 7
unlisten
assertEqual "valueThenOnce" [9] =<< readIORef outRef
-- | Test value being "executed" before listen. Somewhat redundant since this is
-- Haskell and "value b" is pure.
valueLateListen = TestCase $ do
(b, push) <- sync $ newBehavior (9 :: Int)
outRef <- newIORef []
let bv = value b
sync $ push 8
unlisten <- sync $ listen bv $ \a -> modifyIORef outRef (++ [a])
sync $ push 2
unlisten
assertEqual "valueLateListen" [8,2] =<< readIORef outRef
appl1 = TestCase $ do
(ea, pusha) <- sync newEvent
ba <- sync $ hold 0 ea
(eb, pushb) <- sync newEvent
bb <- sync $ hold 0 eb
let esum = (+) <$> ba <*> bb
outRef <- newIORef []
unlisten <- sync $ listen (value esum) $ \sum -> modifyIORef outRef (++ [sum])
sync $ pusha 5
sync $ pushb 100
sync $ pusha 10 >> pushb 200
unlisten
assertEqual "appl1" [0, 5, 105, 210] =<< readIORef outRef
snapshot1 = TestCase $ do
(ea, pusha) <- sync newEvent
(eb, pushb) <- sync newEvent
bb <- sync $ hold 0 eb
let ec = snapshot (,) ea bb
outRef <- newIORef []
unlisten <- sync $ listen ec $ \c -> modifyIORef outRef (++ [c])
sync $ pusha 'A'
sync $ pushb 50
sync $ pusha 'B'
sync $ pusha 'C' >> pushb 60
sync $ pusha 'D'
unlisten
assertEqual "snapshot1" [('A',0),('B',50),('C',50),('D',60)] =<< readIORef outRef
holdIsDelayed = TestCase $ do
(e, push) <- sync newEvent
h <- sync $ hold (0 :: Int) e
let pair = snapshot (\a b -> show a ++ " " ++ show b) e h
outRef <- newIORef []
unlisten <- sync $ listen pair $ \a -> modifyIORef outRef (++ [a])
sync $ push 2
sync $ push 3
unlisten
assertEqual "holdIsDelayed" ["2 0", "3 2"] =<< readIORef outRef
collect1 = TestCase $ do
(ea, push) <- sync newEvent
outRef <- newIORef []
unlisten <- sync $ do
ba <- hold 100 ea
sum <- collect (\a s -> (a+s, a+s)) 0 ba
listen (value sum) $ \sum -> modifyIORef outRef (++ [sum])
sync $ push 5
sync $ push 7
sync $ push 1
sync $ push 2
sync $ push 3
unlisten
assertEqual "collect1" [100, 105, 112, 113, 115, 118] =<< readIORef outRef
collect2 = TestCase $ do
outRef <- newIORef []
-- This is a bit of an edge case.
(unlisten, push) <- sync $ do
(ba, push) <- newBehavior 100
sum <- collect (\a s -> (a + s, a + s)) 0 ba
push 5
unlisten <- listen (value sum) $ \sum -> modifyIORef outRef (++ [sum])
return (unlisten, push)
sync $ push 7
sync $ push 1
unlisten
assertEqual "collect2" [105, 112, 113] =<< readIORef outRef
collectE1 = TestCase $ do
(ea, push) <- sync newEvent
outRef <- newIORef []
unlisten <- sync $ do
sum <- collectE (\a s -> (a+s, a+s)) 100 ea
listen sum $ \sum -> modifyIORef outRef (++ [sum])
sync $ push 5
sync $ push 7
sync $ push 1
sync $ push 2
sync $ push 3
unlisten
assertEqual "collectE1" [105, 112, 113, 115, 118] =<< readIORef outRef
collectE2 = TestCase $ do
(ea, push) <- sync newEvent
outRef <- newIORef []
-- This behaviour is a little bit odd but difficult to fix in the
-- implementation. However, it shouldn't be too much of a problem in
-- practice. Here we are defining it.
unlisten <- sync $ do
sum <- collectE (\a s -> (a + s, a + s)) 100 ea
push 5
listen sum $ \sum -> modifyIORef outRef (++ [sum])
sync $ push 7
sync $ push 1
unlisten
assertEqual "collectE2" [105, 112, 113] =<< readIORef outRef
switchE1 = TestCase $ do
(ea, pusha) <- sync newEvent
(eb, pushb) <- sync newEvent
(esw, pushsw) <- sync newEvent
outRef <- newIORef []
unlisten <- sync $ do
sw <- hold ea esw
let eo = switchE sw
unlisten <- listen eo $ \o -> modifyIORef outRef (++ [o])
pusha 'A'
pushb 'a'
return unlisten
sync $ pusha 'B' >> pushb 'b'
sync $ pushsw eb >> pusha 'C' >> pushb 'c'
sync $ pusha 'D' >> pushb 'd'
sync $ pusha 'E' >> pushb 'e' >> pushsw ea
sync $ pusha 'F' >> pushb 'f'
sync $ pusha 'G' >> pushb 'g' >> pushsw eb
sync $ pusha 'H' >> pushb 'h' >> pushsw ea
sync $ pusha 'I' >> pushb 'i' >> pushsw ea
unlisten
assertEqual "switchE1" "ABCdeFGhI" =<< readIORef outRef
switch1 = TestCase $ do
outRef <- newIORef []
(ba, bb, pusha, pushb, pushsw, unlisten) <- sync $ do
(ba, pusha) <- newBehavior 'A'
(bb, pushb) <- newBehavior 'a'
(bsw, pushsw) <- newBehavior ba
bo <- switch bsw
unlisten <- listen (value bo) $ \o -> modifyIORef outRef (++ [o])
return (ba, bb, pusha, pushb, pushsw, unlisten)
sync $ pusha 'B' >> pushb 'b'
sync $ pushsw bb >> pusha 'C' >> pushb 'c'
sync $ pusha 'D' >> pushb 'd'
sync $ pusha 'E' >> pushb 'e' >> pushsw ba
sync $ pusha 'F' >> pushb 'f'
sync $ pushsw bb
sync $ pushsw ba
sync $ pusha 'G' >> pushb 'g' >> pushsw bb
sync $ pusha 'H' >> pushb 'h' >> pushsw ba
sync $ pusha 'I' >> pushb 'i' >> pushsw ba
unlisten
assertEqual "switch1" "ABcdEFfFgHI" =<< readIORef outRef
once1 = TestCase $ do
(ea, pusha) <- sync newEvent
outRef <- newIORef []
unlisten <- sync $ do
listen (once ea) $ \a -> modifyIORef outRef (++ [a])
sync $ pusha 'A'
sync $ pusha 'B'
sync $ pusha 'C'
unlisten
assertEqual "switch1" "A" =<< readIORef outRef
once2 = TestCase $ do
(ea, pusha) <- sync newEvent
outRef <- newIORef []
unlisten <- sync $ do
pusha 'A'
listen (once ea) $ \a -> modifyIORef outRef (++ [a])
sync $ pusha 'B'
sync $ pusha 'C'
unlisten
assertEqual "switch1" "A" =<< readIORef outRef
data Page = Page { unPage :: Reactive (Char, Event Page) }
cycle1 = TestCase $ do
outRef <- newIORef []
(ep, push) <- sync newEvent
bo <- sync $ do
let initPair = ('a', ep)
rec
bPair <- hold initPair ePage
let ePage = execute $ unPage <$> switchE (snd <$> bPair)
return (fst <$> bPair)
unlisten <- sync $ listen (value bo) $ \o -> modifyIORef outRef (++ [o])
sync $ push (Page $ return ('b', ep))
sync $ push (Page $ return ('c', ep))
unlisten
assertEqual "cycle1" "abc" =<< readIORef outRef
mergeWith1 = TestCase $ do
outRef <- newIORef []
(ea, pushA) <- sync newEvent
(eb, pushB) <- sync newEvent
unlisten <- sync $ do
pushA 5
listen (mergeWith (+) ea eb) $ \o -> modifyIORef outRef (++ [o])
sync $ pushA 2
sync $ pushB 3
sync $ pushA 10 >> pushB 4
sync $ pushB 7 >> pushA 1
unlisten
assertEqual "mergeWith1" [5,2,3,14,8] =<< readIORef outRef
mergeWith2 = TestCase $ do
outRef <- newIORef []
(ea, pushA) <- sync newEvent
(eb, pushB) <- sync newEvent
unlisten <- sync $ do
pushA 5
unlisten <- listen (mergeWith (+) ea eb) $ \o -> modifyIORef outRef (++ [o])
pushB 99
return unlisten
unlisten
assertEqual "mergeWith2" [104] =<< readIORef outRef
mergeWith3 = TestCase $ do
outRef <- newIORef []
(ea, pushA) <- sync newEvent
(eb, pushB) <- sync newEvent
unlisten <- sync $ do
listen (mergeWith (+) ea eb) $ \o -> modifyIORef outRef (++ [o])
sync $ pushA 2
sync $ pushB 3 >> pushB 1 >> pushA 10
sync $ pushB 9 >> pushB 11 >> pushB 12
sync $ pushA 32 >> pushA 11 >> pushA 12
unlisten
assertEqual "mergeWith3" [2,14,32,55] =<< readIORef outRef
coalesce1 = TestCase $ do
outRef <- newIORef []
(ea, pushA) <- sync newEvent
(eb, pushB) <- sync newEvent
unlisten <- sync $ do
listen (coalesce (+) (merge ea eb)) $ \o -> modifyIORef outRef (++ [o])
sync $ pushA 2
sync $ pushA 5 >> pushB 6
unlisten
assertEqual "coalesce1" [2, 11] =<< readIORef outRef
split1 = TestCase $ do
outRef <- newIORef []
(ea, pushA) <- sync newEvent
let ewords = coalesce (++) $ split $ words <$> ea
unlisten <- sync $ listen ewords $ \o -> modifyIORef outRef (++ [o])
sync $ pushA "the common cormorant"
sync $ pushA "or shag"
unlisten
assertEqual "split1" ["the","common","cormorant","or","shag"] =<< readIORef outRef
split2 = TestCase $ do
outRef <- newIORef []
(ea, pushA) <- sync newEvent
let halve [] = []
halve [_] = []
halve str = [take (length str `div` 2) str,
drop (length str `div` 2) str]
ehalves = split $ (halve <$> ea) `merge` (halve <$> ehalves)
unlisten <- sync $ listen ehalves $ \o -> modifyIORef outRef (++ [o])
sync $ pushA "abcdefgh"
unlisten
assertEqual "split2" ["abcd","ab","a","b","cd","c","d",
"efgh","ef","e","f","gh","g","h"] =<< readIORef outRef
tests = test [ event1, fmap1, merge1, filterJust1, filterE1, gate1, beh1, beh2, beh3, beh4, beh5,
behConstant, valueThenMap, valueTwiceThenMap, valueThenCoalesce, valueTwiceThenCoalesce,
valueThenSnapshot, valueTwiceThenSnapshot, valueThenMerge, valueThenFilter,
valueTwiceThenFilter, valueThenOnce, valueTwiceThenOnce, valueLateListen,
holdIsDelayed, appl1, snapshot1, collect1, collect2, collectE1, collectE2, switchE1,
switch1, once1, once2, cycle1, split1, split2 {-, mergeWith1, mergeWith2, mergeWith3,
coalesce1-} ]
main = {-forever $ -} runTestTT tests
| kevintvh/sodium | haskell/examples/tests/unit-tests.hs | bsd-3-clause | 18,660 | 0 | 20 | 5,253 | 7,718 | 3,717 | 4,001 | 502 | 3 |
{-|
Module : Idris.PartialEval
Description : Implementation of a partial evaluator.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE FlexibleContexts, PatternGuards #-}
module Idris.PartialEval(
partial_eval, getSpecApps, specType
, mkPE_TyDecl, mkPE_TermDecl, PEArgType(..)
, pe_app, pe_def, pe_clauses, pe_simple
) where
import Idris.AbsSyntax
import Idris.Core.CaseTree
import Idris.Core.Evaluate
import Idris.Core.TT
import Idris.Delaborate
import Control.Applicative
import Control.Monad.State
import Data.Maybe
import Debug.Trace
-- | Data type representing binding-time annotations for partial evaluation of arguments
data PEArgType = ImplicitS Name -- ^ Implicit static argument
| ImplicitD Name -- ^ Implicit dynamic argument
| ConstraintS -- ^ Implementation constraint
| ConstraintD -- ^ Implementation constraint
| ExplicitS -- ^ Explicit static argument
| ExplicitD -- ^ Explicit dynamic argument
| UnifiedD -- ^ Erasable dynamic argument (found under unification)
deriving (Eq, Show)
-- | A partially evaluated function. pe_app captures the lhs of the
-- new definition, pe_def captures the rhs, and pe_clauses is the
-- specialised implementation.
--
-- pe_simple is set if the result is always reducible, because in such
-- a case we'll also need to reduce the static argument
data PEDecl = PEDecl { pe_app :: PTerm, -- new application
pe_def :: PTerm, -- old application
pe_clauses :: [(PTerm, PTerm)], -- clauses of new application
pe_simple :: Bool -- if just one reducible clause
}
-- | Partially evaluates given terms under the given context.
-- It is an error if partial evaluation fails to make any progress.
-- Making progress is defined as: all of the names given with explicit
-- reduction limits (in practice, the function being specialised)
-- must have reduced at least once.
-- If we don't do this, we might end up making an infinite function after
-- applying the transformation.
partial_eval :: Context
-> [(Name, Maybe Int)]
-> [Either Term (Term, Term)]
-> Maybe [Either Term (Term, Term)]
partial_eval ctxt ns_in tms = mapM peClause tms where
ns = squash ns_in
squash ((n, Just x) : ns)
| Just (Just y) <- lookup n ns
= squash ((n, Just (x + y)) : drop n ns)
| otherwise = (n, Just x) : squash ns
squash (n : ns) = n : squash ns
squash [] = []
drop n ((m, _) : ns) | n == m = ns
drop n (x : ns) = x : drop n ns
drop n [] = []
-- If the term is not a clause, it is simply kept as is
peClause (Left t) = Just $ Left t
-- If the term is a clause, specialise the right hand side
peClause (Right (lhs, rhs))
= let (rhs', reductions) = specialise ctxt [] (map toLimit ns) rhs in
do when (length tms == 1) $ checkProgress ns reductions
return (Right (lhs, rhs'))
-- TMP HACK until I do PE by WHNF rather than using main evaluator
toLimit (n, Nothing) | isTCDict n ctxt = (n, 2)
toLimit (n, Nothing) = (n, 65536) -- somewhat arbitrary reduction limit
toLimit (n, Just l) = (n, l)
checkProgress ns [] = return ()
checkProgress ns ((n, r) : rs)
| Just (Just start) <- lookup n ns
= if start <= 1 || r < start then checkProgress ns rs else Nothing
| otherwise = checkProgress ns rs
-- | Specialises the type of a partially evaluated TT function returning
-- a pair of the specialised type and the types of expected arguments.
specType :: [(PEArgType, Term)] -> Type -> (Type, [(PEArgType, Term)])
specType args ty = let (t, args') = runState (unifyEq args ty) [] in
(st (map fst args') t, map fst args')
where
-- Specialise static argument in type by let-binding provided value instead
-- of expecting it as a function argument
st ((ExplicitS, v) : xs) (Bind n (Pi _ _ t _) sc)
= Bind n (Let t v) (st xs sc)
st ((ImplicitS _, v) : xs) (Bind n (Pi _ _ t _) sc)
= Bind n (Let t v) (st xs sc)
st ((ConstraintS, v) : xs) (Bind n (Pi _ _ t _) sc)
= Bind n (Let t v) (st xs sc)
-- Erase argument from function type
st ((UnifiedD, _) : xs) (Bind n (Pi _ _ t _) sc)
= st xs sc
-- Keep types as is
st (_ : xs) (Bind n (Pi rig i t k) sc)
= Bind n (Pi rig i t k) (st xs sc)
st _ t = t
-- Erase implicit dynamic argument if existing argument shares it value,
-- by substituting the value of previous argument
unifyEq (imp@(ImplicitD _, v) : xs) (Bind n (Pi rig i t k) sc)
= do amap <- get
case lookup imp amap of
Just n' ->
do put (amap ++ [((UnifiedD, Erased), n)])
sc' <- unifyEq xs (subst n (P Bound n' Erased) sc)
return (Bind n (Pi rig i t k) sc') -- erase later
_ -> do put (amap ++ [(imp, n)])
sc' <- unifyEq xs sc
return (Bind n (Pi rig i t k) sc')
unifyEq (x : xs) (Bind n (Pi rig i t k) sc)
= do args <- get
put (args ++ [(x, n)])
sc' <- unifyEq xs sc
return (Bind n (Pi rig i t k) sc')
unifyEq xs t = do args <- get
put (args ++ (zip xs (repeat (sUN "_"))))
return t
-- | Creates an Idris type declaration given current state and a
-- specialised TT function application type.
-- Can be used in combination with the output of 'specType'.
--
-- This should: specialise any static argument position, then generalise
-- over any function applications in the result.
mkPE_TyDecl :: IState -> [(PEArgType, Term)] -> Type -> PTerm
mkPE_TyDecl ist args ty = mkty args ty
where
mkty ((ExplicitD, v) : xs) (Bind n (Pi rig _ t k) sc)
= PPi expl n NoFC (delab ist (generaliseIn t)) (mkty xs sc)
mkty ((ConstraintD, v) : xs) (Bind n (Pi rig _ t k) sc)
| concreteInterface ist t = mkty xs sc
| interfaceConstraint ist t
= PPi constraint n NoFC (delab ist (generaliseIn t)) (mkty xs sc)
mkty ((ImplicitD _, v) : xs) (Bind n (Pi rig _ t k) sc)
= PPi impl n NoFC (delab ist (generaliseIn t)) (mkty xs sc)
mkty (_ : xs) t
= mkty xs t
mkty [] t = delab ist t
generaliseIn tm = evalState (gen tm) 0
gen tm | (P _ fn _, args) <- unApply tm,
isFnName fn (tt_ctxt ist)
= do nm <- get
put (nm + 1)
return (P Bound (sMN nm "spec") Erased)
gen (App s f a) = App s <$> gen f <*> gen a
gen tm = return tm
-- | Checks if a given argument is an interface constraint argument
interfaceConstraint :: Idris.AbsSyntax.IState -> TT Name -> Bool
interfaceConstraint ist v
| (P _ c _, args) <- unApply v = case lookupCtxt c (idris_interfaces ist) of
[_] -> True
_ -> False
| otherwise = False
-- | Checks if the given arguments of an interface constraint are all either constants
-- or references (i.e. that it doesn't contain any complex terms).
concreteInterface :: IState -> TT Name -> Bool
concreteInterface ist v
| not (interfaceConstraint ist v) = False
| (P _ c _, args) <- unApply v = all concrete args
| otherwise = False
where concrete (Constant _) = True
concrete tm | (P _ n _, args) <- unApply tm
= case lookupTy n (tt_ctxt ist) of
[_] -> all concrete args
_ -> False
| otherwise = False
mkNewPats :: IState
-> [(Term, Term)] -- ^ definition to specialise
-> [(PEArgType, Term)] -- ^ arguments to specialise with
-> Name -- ^ New name
-> Name -- ^ Specialised function name
-> PTerm -- ^ Default lhs
-> PTerm -- ^ Default rhs
-> PEDecl
-- If all of the dynamic positions on the lhs are variables (rather than
-- patterns or constants) then we can just make a simple definition
-- directly applying the specialised function, since we know the
-- definition isn't going to block on any of the dynamic arguments
-- in this case
mkNewPats ist d ns newname sname lhs rhs | all dynVar (map fst d)
= PEDecl lhs rhs [(lhs, rhs)] True
where dynVar ap = case unApply ap of
(_, args) -> dynArgs ns args
dynArgs _ [] = True -- can definitely reduce from here
-- if Static, doesn't matter what the argument is
dynArgs ((ImplicitS _, _) : ns) (a : as) = dynArgs ns as
dynArgs ((ConstraintS, _) : ns) (a : as) = dynArgs ns as
dynArgs ((ExplicitS, _) : ns) (a : as) = dynArgs ns as
-- if Dynamic, it had better be a variable or we'll need to
-- do some more work
dynArgs (_ : ns) (V _ : as) = dynArgs ns as
dynArgs (_ : ns) (P _ _ _ : as) = dynArgs ns as
dynArgs _ _ = False -- and now we'll get stuck
mkNewPats ist d ns newname sname lhs rhs =
PEDecl lhs rhs (map mkClause d) False
where
mkClause :: (Term, Term) -> (PTerm, PTerm)
mkClause (oldlhs, oldrhs)
= let (_, as) = unApply oldlhs
lhsargs = mkLHSargs [] ns as
lhs = PApp emptyFC (PRef emptyFC [] newname) lhsargs
rhs = PApp emptyFC (PRef emptyFC [] sname)
(mkRHSargs ns lhsargs) in
(lhs, rhs)
mkLHSargs _ [] _ = []
-- dynamics don't appear on the LHS if they're implicit
mkLHSargs sub ((ExplicitD, t) : ns) (a : as)
= pexp (delab ist (substNames sub a)) : mkLHSargs sub ns as
mkLHSargs sub ((ImplicitD n, t) : ns) (a : as)
= pimp n (delab ist (substNames sub a)) True : mkLHSargs sub ns as
mkLHSargs sub ((ConstraintD, t) : ns) (a : as)
= pconst (delab ist (substNames sub a)) : mkLHSargs sub ns as
mkLHSargs sub ((UnifiedD, _) : ns) (a : as)
= mkLHSargs sub ns as
-- statics get dropped in any case
mkLHSargs sub ((ImplicitS _, t) : ns) (a : as)
= mkLHSargs (extend a t sub) ns as
mkLHSargs sub ((ExplicitS, t) : ns) (a : as)
= mkLHSargs (extend a t sub) ns as
mkLHSargs sub ((ConstraintS, t) : ns) (a : as)
= mkLHSargs (extend a t sub) ns as
mkLHSargs sub _ [] = [] -- no more LHS
extend (P _ n _) t sub = (n, t) : sub
extend _ _ sub = sub
--- 'as' are the LHS arguments
mkRHSargs ((ExplicitS, t) : ns) as = pexp (delab ist t) : mkRHSargs ns as
mkRHSargs ((ExplicitD, t) : ns) (a : as) = a : mkRHSargs ns as
-- Keep the implicits on the RHS, in case they got matched on
mkRHSargs ((ImplicitD n, t) : ns) (a : as) = a : mkRHSargs ns as
mkRHSargs ((ImplicitS n, t) : ns) as -- Dropped from LHS
= pimp n (delab ist t) True : mkRHSargs ns as
mkRHSargs ((ConstraintD, t) : ns) (a : as) = a : mkRHSargs ns as
mkRHSargs ((ConstraintS, t) : ns) as -- Dropped from LHS
= pconst (delab ist t) : mkRHSargs ns as
mkRHSargs (_ : ns) as = mkRHSargs ns as
mkRHSargs _ _ = []
mkSubst :: (Term, Term) -> Maybe (Name, Term)
mkSubst (P _ n _, t) = Just (n, t)
mkSubst _ = Nothing
-- | Creates a new declaration for a specialised function application.
-- Simple version at the moment: just create a version which is a direct
-- application of the function to be specialised.
-- More complex version to do: specialise the definition clause by clause
mkPE_TermDecl :: IState
-> Name
-> Name
-> PTerm -- ^ Type of specialised function
-> [(PEArgType, Term)]
-> PEDecl
mkPE_TermDecl ist newname sname specty ns
{- We need to erase the *dynamic* arguments
where their *name* appears in the *type* of a later argument
in specty.
i.e. if a later dynamic argument depends on an earlier dynamic
argument, we should infer the earlier one.
Then we need to erase names from the LHS which no longer appear
on the RHS.
-}
= let deps = getDepNames (eraseRet specty)
lhs = eraseDeps deps $
PApp emptyFC (PRef emptyFC [] newname) (mkp ns)
rhs = eraseDeps deps $
delab ist (mkApp (P Ref sname Erased) (map snd ns))
patdef = -- trace (showTmImpls specty ++ "\n" ++ showTmImpls lhs ++ "\n"
-- ++ showTmImpls rhs) $
lookupCtxtExact sname (idris_patdefs ist)
newpats = case patdef of
Nothing -> PEDecl lhs rhs [(lhs, rhs)] True
Just d -> mkNewPats ist (getPats d) ns
newname sname lhs rhs in
newpats where
getPats (ps, _) = map (\(_, lhs, rhs) -> (lhs, rhs)) ps
eraseRet (PPi p n fc ty sc) = PPi p n fc ty (eraseRet sc)
eraseRet _ = Placeholder
-- Get names used in later arguments; assume we've called eraseRet so there's
-- no names going to appear in return type
getDepNames (PPi _ n _ _ sc)
| n `elem` allNamesIn sc = n : getDepNames sc
| otherwise = getDepNames sc
getDepNames tm = []
mkp [] = []
mkp ((ExplicitD, tm) : tms) = pexp (delab ist tm) : mkp tms
mkp ((ImplicitD n, tm) : tms) = pimp n (delab ist tm) True : mkp tms
mkp (_ : tms) = mkp tms
eraseDeps ns tm = mapPT (deImp ns) tm
deImp ns (PApp fc t as) = PApp fc t (map (deImpArg ns) as)
deImp ns t = t
deImpArg ns a | pname a `elem` ns = a { getTm = Placeholder }
| otherwise = a
-- | Get specialised applications for a given function
getSpecApps :: IState
-> [Name]
-> Term
-> [(Name, [(PEArgType, Term)])]
getSpecApps ist env tm = ga env (explicitNames tm) where
-- staticArg env True _ tm@(P _ n _) _ | n `elem` env = Just (True, tm)
-- staticArg env True _ tm@(App f a) _ | (P _ n _, args) <- unApply tm,
-- n `elem` env = Just (True, tm)
staticArg env True imp tm n
| Just n <- imparg imp = (ImplicitS n, tm)
| constrarg imp = (ConstraintS, tm)
| otherwise = (ExplicitS, tm)
staticArg env False imp tm n
| Just nm <- imparg imp = (ImplicitD nm, (P Ref (sUN (show n ++ "arg")) Erased))
| constrarg imp = (ConstraintD, tm)
| otherwise = (ExplicitD, (P Ref (sUN (show n ++ "arg")) Erased))
imparg (PExp _ _ _ _) = Nothing
imparg (PConstraint _ _ _ _) = Nothing
imparg arg = Just (pname arg)
constrarg (PConstraint _ _ _ _) = True
constrarg arg = False
buildApp env [] [] _ _ = []
buildApp env (s:ss) (i:is) (a:as) (n:ns)
= let s' = staticArg env s i a n
ss' = buildApp env ss is as ns in
(s' : ss')
-- if we have a *defined* function that has static arguments,
-- it will become a specialised application
ga env tm@(App _ f a) | (P _ n _, args) <- unApply tm,
n `notElem` map fst (idris_metavars ist) =
ga env f ++ ga env a ++
case (lookupCtxtExact n (idris_statics ist),
lookupCtxtExact n (idris_implicits ist)) of
(Just statics, Just imps) ->
if (length statics == length args && or statics
&& specialisable (tt_ctxt ist) n) then
case buildApp env statics imps args [0..] of
args -> [(n, args)]
-- _ -> []
else []
_ -> []
ga env (Bind n (Let t v) sc) = ga env v ++ ga (n : env) sc
ga env (Bind n t sc) = ga (n : env) sc
ga env t = []
-- A function is only specialisable if there are no overlapping
-- cases in the case tree (otherwise the partial evaluation could
-- easily get stuck)
specialisable :: Context -> Name -> Bool
specialisable ctxt n = case lookupDefExact n ctxt of
Just (CaseOp _ _ _ _ _ cds) ->
noOverlap (snd (cases_compiletime cds))
_ -> False
noOverlap :: SC -> Bool
noOverlap (Case _ _ [DefaultCase sc]) = noOverlap sc
noOverlap (Case _ _ alts) = noOverlapAlts alts
noOverlap _ = True
-- There's an overlap if the case tree has a default case along with
-- some other cases. It's fine if there's a default case on its own.
noOverlapAlts (ConCase _ _ _ sc : rest)
= noOverlapAlts rest && noOverlap sc
noOverlapAlts (FnCase _ _ sc : rest) = noOverlapAlts rest
noOverlapAlts (ConstCase _ sc : rest)
= noOverlapAlts rest && noOverlap sc
noOverlapAlts (SucCase _ sc : rest)
= noOverlapAlts rest && noOverlap sc
noOverlapAlts (DefaultCase _ : _) = False
noOverlapAlts _ = True
| markuspf/Idris-dev | src/Idris/PartialEval.hs | bsd-3-clause | 17,181 | 2 | 20 | 5,699 | 5,481 | 2,833 | 2,648 | 275 | 24 |
--
-- Pretend long copyright notice is here.
--
module Comment1 where
main = print "Hello, complicated world!"
| charleso/intellij-haskforce | tests/gold/parser/Comment00001.hs | apache-2.0 | 115 | 0 | 5 | 20 | 15 | 10 | 5 | 2 | 1 |
module Tinc.GhcInfo where
import System.Process
import Tinc.GhcPkg
import Tinc.Types
import Tinc.Fail
data GhcInfo = GhcInfo {
ghcInfoPlatform :: String
, ghcInfoVersion :: String
, ghcInfoGlobalPackageDb :: Path PackageDb
} deriving (Eq, Show)
getGhcInfo :: IO GhcInfo
getGhcInfo = do
fields <- read <$> readProcess "ghc" ["--info"] ""
let lookupField :: String -> IO String
lookupField name = do
let err = "Output from `ghc --info` does not contain the field " ++ show name
maybe (dieLoc err) return (lookup name fields)
GhcInfo
<$> lookupField "Target platform"
<*> lookupField "Project version"
<*> (Path <$> lookupField "Global Package DB")
ghcFlavor :: GhcInfo -> String
ghcFlavor ghcInfo = ghcInfoPlatform ghcInfo ++ "-ghc-" ++ ghcInfoVersion ghcInfo
| haskell-tinc/tinc | src/Tinc/GhcInfo.hs | bsd-3-clause | 845 | 0 | 16 | 199 | 227 | 115 | 112 | 23 | 1 |
{-
Copyright (C) 2007 John MacFarlane <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Writers.Man
Copyright : Copyright (C) 2007 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <[email protected]>
Stability : alpha
Portability : portable
Conversion of 'Pandoc' documents to groff man page format.
-}
module Text.Pandoc.Writers.Man ( writeMan) where
import Text.Pandoc.Definition
import Text.Pandoc.Templates
import Text.Pandoc.Shared
import Text.Printf ( printf )
import Data.List ( isPrefixOf, intersperse, intercalate )
import Text.PrettyPrint.HughesPJ hiding ( Str )
import Control.Monad.State
type Notes = [[Block]]
data WriterState = WriterState { stNotes :: Notes
, stHasTables :: Bool }
-- | Convert Pandoc to Man.
writeMan :: WriterOptions -> Pandoc -> String
writeMan opts document = evalState (pandocToMan opts document) (WriterState [] False)
-- | Return groff man representation of document.
pandocToMan :: WriterOptions -> Pandoc -> State WriterState String
pandocToMan opts (Pandoc (Meta title authors date) blocks) = do
titleText <- inlineListToMan opts title
authors' <- mapM (inlineListToMan opts) authors
date' <- inlineListToMan opts date
let (cmdName, rest) = break (== ' ') $ render titleText
let (title', section) = case reverse cmdName of
(')':d:'(':xs) | d `elem` ['0'..'9'] ->
(text (reverse xs), char d)
xs -> (text (reverse xs), doubleQuotes empty)
let description = hsep $
map (doubleQuotes . text . removeLeadingTrailingSpace) $
splitBy '|' rest
body <- blockListToMan opts blocks
notes <- liftM stNotes get
notes' <- notesToMan opts (reverse notes)
let main = render $ body $$ notes'
hasTables <- liftM stHasTables get
let context = writerVariables opts ++
[ ("body", main)
, ("title", render title')
, ("section", render section)
, ("date", render date')
, ("description", render description) ] ++
[ ("has-tables", "yes") | hasTables ] ++
[ ("author", render a) | a <- authors' ]
if writerStandalone opts
then return $ renderTemplate context $ writerTemplate opts
else return main
-- | Return man representation of notes.
notesToMan :: WriterOptions -> [[Block]] -> State WriterState Doc
notesToMan opts notes =
if null notes
then return empty
else mapM (\(num, note) -> noteToMan opts num note) (zip [1..] notes) >>=
return . (text ".SH NOTES" $$) . vcat
-- | Return man representation of a note.
noteToMan :: WriterOptions -> Int -> [Block] -> State WriterState Doc
noteToMan opts num note = do
contents <- blockListToMan opts note
let marker = text "\n.SS [" <> text (show num) <> char ']'
return $ marker $$ contents
-- | Association list of characters to escape.
manEscapes :: [(Char, String)]
manEscapes = [('\160', "\\ "), ('\'', "\\[aq]")] ++ backslashEscapes "@\\"
-- | Escape special characters for Man.
escapeString :: String -> String
escapeString = escapeStringUsing manEscapes
-- | Escape a literal (code) section for Man.
escapeCode :: String -> String
escapeCode = escapeStringUsing (manEscapes ++ backslashEscapes "\t ")
-- We split inline lists into sentences, and print one sentence per
-- line. groff/troff treats the line-ending period differently.
-- See http://code.google.com/p/pandoc/issues/detail?id=148.
-- | Returns the first sentence in a list of inlines, and the rest.
breakSentence :: [Inline] -> ([Inline], [Inline])
breakSentence [] = ([],[])
breakSentence xs =
let isSentenceEndInline (Str ".") = True
isSentenceEndInline (Str "?") = True
isSentenceEndInline _ = False
(as, bs) = break isSentenceEndInline xs
in case bs of
[] -> (as, [])
[c] -> (as ++ [c], [])
(c:Space:cs) -> (as ++ [c], cs)
(Str ".":Str ")":cs) -> (as ++ [Str ".", Str ")"], cs)
(c:cs) -> (as ++ [c] ++ ds, es)
where (ds, es) = breakSentence cs
-- | Split a list of inlines into sentences.
splitSentences :: [Inline] -> [[Inline]]
splitSentences xs =
let (sent, rest) = breakSentence xs
in if null rest then [sent] else sent : splitSentences rest
-- | Convert Pandoc block element to man.
blockToMan :: WriterOptions -- ^ Options
-> Block -- ^ Block element
-> State WriterState Doc
blockToMan _ Null = return empty
blockToMan opts (Plain inlines) =
liftM vcat $ mapM (wrapIfNeeded opts (inlineListToMan opts)) $
splitSentences inlines
blockToMan opts (Para inlines) = do
contents <- liftM vcat $ mapM (wrapIfNeeded opts (inlineListToMan opts)) $
splitSentences inlines
return $ text ".PP" $$ contents
blockToMan _ (RawHtml _) = return empty
blockToMan _ HorizontalRule = return $ text $ ".PP\n * * * * *"
blockToMan opts (Header level inlines) = do
contents <- inlineListToMan opts inlines
let heading = case level of
1 -> ".SH "
_ -> ".SS "
return $ text heading <> contents
blockToMan _ (CodeBlock _ str) = return $
text ".PP" $$ text "\\f[CR]" $$
text ((unlines . map (" " ++) . lines) (escapeCode str)) <> text "\\f[]"
blockToMan opts (BlockQuote blocks) = do
contents <- blockListToMan opts blocks
return $ text ".RS" $$ contents $$ text ".RE"
blockToMan opts (Table caption alignments widths headers rows) =
let aligncode AlignLeft = "l"
aligncode AlignRight = "r"
aligncode AlignCenter = "c"
aligncode AlignDefault = "l"
in do
caption' <- inlineListToMan opts caption
modify $ \st -> st{ stHasTables = True }
let iwidths = if all (== 0) widths
then repeat ""
else map (printf "w(%0.2fn)" . (70 *)) widths
-- 78n default width - 8n indent = 70n
let coldescriptions = text $ intercalate " "
(zipWith (\align width -> aligncode align ++ width)
alignments iwidths) ++ "."
colheadings <- mapM (blockListToMan opts) headers
let makeRow cols = text "T{" $$
(vcat $ intersperse (text "T}@T{") cols) $$
text "T}"
let colheadings' = if all null headers
then empty
else makeRow colheadings $$ char '_'
body <- mapM (\row -> do
cols <- mapM (blockListToMan opts) row
return $ makeRow cols) rows
return $ text ".PP" $$ caption' $$
text ".TS" $$ text "tab(@);" $$ coldescriptions $$
colheadings' $$ vcat body $$ text ".TE"
blockToMan opts (BulletList items) = do
contents <- mapM (bulletListItemToMan opts) items
return (vcat contents)
blockToMan opts (OrderedList attribs items) = do
let markers = take (length items) $ orderedListMarkers attribs
let indent = 1 + (maximum $ map length markers)
contents <- mapM (\(num, item) -> orderedListItemToMan opts num indent item) $
zip markers items
return (vcat contents)
blockToMan opts (DefinitionList items) = do
contents <- mapM (definitionListItemToMan opts) items
return (vcat contents)
-- | Convert bullet list item (list of blocks) to man.
bulletListItemToMan :: WriterOptions -> [Block] -> State WriterState Doc
bulletListItemToMan _ [] = return empty
bulletListItemToMan opts ((Para first):rest) =
bulletListItemToMan opts ((Plain first):rest)
bulletListItemToMan opts ((Plain first):rest) = do
first' <- blockToMan opts (Plain first)
rest' <- blockListToMan opts rest
let first'' = text ".IP \\[bu] 2" $$ first'
let rest'' = if null rest
then empty
else text ".RS 2" $$ rest' $$ text ".RE"
return (first'' $$ rest'')
bulletListItemToMan opts (first:rest) = do
first' <- blockToMan opts first
rest' <- blockListToMan opts rest
return $ text "\\[bu] .RS 2" $$ first' $$ rest' $$ text ".RE"
-- | Convert ordered list item (a list of blocks) to man.
orderedListItemToMan :: WriterOptions -- ^ options
-> String -- ^ order marker for list item
-> Int -- ^ number of spaces to indent
-> [Block] -- ^ list item (list of blocks)
-> State WriterState Doc
orderedListItemToMan _ _ _ [] = return empty
orderedListItemToMan opts num indent ((Para first):rest) =
orderedListItemToMan opts num indent ((Plain first):rest)
orderedListItemToMan opts num indent (first:rest) = do
first' <- blockToMan opts first
rest' <- blockListToMan opts rest
let num' = printf ("%" ++ show (indent - 1) ++ "s") num
let first'' = text (".IP \"" ++ num' ++ "\" " ++ show indent) $$ first'
let rest'' = if null rest
then empty
else text ".RS 4" $$ rest' $$ text ".RE"
return $ first'' $$ rest''
-- | Convert definition list item (label, list of blocks) to man.
definitionListItemToMan :: WriterOptions
-> ([Inline],[[Block]])
-> State WriterState Doc
definitionListItemToMan opts (label, defs) = do
labelText <- inlineListToMan opts label
contents <- if null defs
then return empty
else liftM vcat $ forM defs $ \blocks -> do
let (first, rest) = case blocks of
((Para x):y) -> (Plain x,y)
(x:y) -> (x,y)
[] -> error "blocks is null"
rest' <- liftM vcat $
mapM (\item -> blockToMan opts item) rest
first' <- blockToMan opts first
return $ first' $$ text ".RS" $$ rest' $$ text ".RE"
return $ text ".TP\n.B " <> labelText $+$ contents
-- | Convert list of Pandoc block elements to man.
blockListToMan :: WriterOptions -- ^ Options
-> [Block] -- ^ List of block elements
-> State WriterState Doc
blockListToMan opts blocks =
mapM (blockToMan opts) blocks >>= (return . vcat)
-- | Convert list of Pandoc inline elements to man.
inlineListToMan :: WriterOptions -> [Inline] -> State WriterState Doc
-- if list starts with ., insert a zero-width character \& so it
-- won't be interpreted as markup if it falls at the beginning of a line.
inlineListToMan opts lst@(Str "." : _) = mapM (inlineToMan opts) lst >>=
(return . (text "\\&" <>) . hcat)
inlineListToMan opts lst = mapM (inlineToMan opts) lst >>= (return . hcat)
-- | Convert Pandoc inline element to man.
inlineToMan :: WriterOptions -> Inline -> State WriterState Doc
inlineToMan opts (Emph lst) = do
contents <- inlineListToMan opts lst
return $ text "\\f[I]" <> contents <> text "\\f[]"
inlineToMan opts (Strong lst) = do
contents <- inlineListToMan opts lst
return $ text "\\f[B]" <> contents <> text "\\f[]"
inlineToMan opts (Strikeout lst) = do
contents <- inlineListToMan opts lst
return $ text "[STRIKEOUT:" <> contents <> char ']'
inlineToMan opts (Superscript lst) = do
contents <- inlineListToMan opts lst
return $ char '^' <> contents <> char '^'
inlineToMan opts (Subscript lst) = do
contents <- inlineListToMan opts lst
return $ char '~' <> contents <> char '~'
inlineToMan opts (SmallCaps lst) = inlineListToMan opts lst -- not supported
inlineToMan opts (Quoted SingleQuote lst) = do
contents <- inlineListToMan opts lst
return $ char '`' <> contents <> char '\''
inlineToMan opts (Quoted DoubleQuote lst) = do
contents <- inlineListToMan opts lst
return $ text "\\[lq]" <> contents <> text "\\[rq]"
inlineToMan opts (Cite _ lst) =
inlineListToMan opts lst
inlineToMan _ EmDash = return $ text "\\[em]"
inlineToMan _ EnDash = return $ text "\\[en]"
inlineToMan _ Apostrophe = return $ char '\''
inlineToMan _ Ellipses = return $ text "\\&..."
inlineToMan _ (Code str) =
return $ text $ "\\f[B]" ++ escapeCode str ++ "\\f[]"
inlineToMan _ (Str str) = return $ text $ escapeString str
inlineToMan opts (Math InlineMath str) = inlineToMan opts (Code str)
inlineToMan opts (Math DisplayMath str) = do
contents <- inlineToMan opts (Code str)
return $ text ".RS" $$ contents $$ text ".RE"
inlineToMan _ (TeX _) = return empty
inlineToMan _ (HtmlInline _) = return empty
inlineToMan _ (LineBreak) = return $ text "\n.PD 0\n.P\n.PD\n"
inlineToMan _ Space = return $ char ' '
inlineToMan opts (Link txt (src, _)) = do
linktext <- inlineListToMan opts txt
let srcSuffix = if isPrefixOf "mailto:" src then drop 7 src else src
return $ if txt == [Code srcSuffix]
then char '<' <> text srcSuffix <> char '>'
else linktext <> text " (" <> text src <> char ')'
inlineToMan opts (Image alternate (source, tit)) = do
let txt = if (null alternate) || (alternate == [Str ""]) ||
(alternate == [Str source]) -- to prevent autolinks
then [Str "image"]
else alternate
linkPart <- inlineToMan opts (Link txt (source, tit))
return $ char '[' <> text "IMAGE: " <> linkPart <> char ']'
inlineToMan _ (Note contents) = do
-- add to notes in state
modify $ \st -> st{ stNotes = contents : stNotes st }
notes <- liftM stNotes get
let ref = show $ (length notes)
return $ char '[' <> text ref <> char ']'
| kowey/pandoc-old | src/Text/Pandoc/Writers/Man.hs | gpl-2.0 | 14,294 | 0 | 22 | 3,856 | 4,273 | 2,118 | 2,155 | 258 | 7 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="bs-BA">
<title>Requester</title>
<maps>
<homeID>requester</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/requester/src/main/javahelp/help_bs_BA/helpset_bs_BA.hs | apache-2.0 | 960 | 92 | 29 | 155 | 389 | 208 | 181 | -1 | -1 |
import Control.Concurrent
import GHC.Conc
import Debug.Trace
-- <<main
main = do
t <- myThreadId
labelThread t "main"
m <- newEmptyMVar
t <- forkIO $ putMVar m 'a'
labelThread t "a"
t <- forkIO $ putMVar m 'b'
labelThread t "b"
traceEventIO "before takeMVar"
takeMVar m
takeMVar m
-- >>
| prt2121/haskell-practice | parconc/mvar4.hs | apache-2.0 | 308 | 1 | 9 | 70 | 114 | 49 | 65 | 14 | 1 |
-- GSoC 2015 - Haskell bindings for OpenCog.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE DataKinds #-}
-- | Simple example on using the Pattern Matcher.
import OpenCog.AtomSpace (AtomSpace,insert,get,remove,cogBind,
debug,runOnNewAtomSpace,printAtom,
Atom(..),TruthVal(..),noTv,stv)
import Control.Monad.IO.Class (liftIO)
findAnimals :: Atom
findAnimals = Link "BindLink"
[Node "VariableNode" "$var" noTv
,Link "InheritanceLink"
[Node "VariableNode" "$var" noTv
,Node "ConceptNode" "animal" noTv
] noTv
,Node "VariableNode" "$var" noTv
] noTv
main :: IO ()
main = runOnNewAtomSpace program
program :: AtomSpace ()
program = do
insert $ Link "InheritanceLink"
[Node "ConceptNode" "fox" noTv
,Node "ConceptNode" "animal" noTv
] noTv
insert $ Link "InheritanceLink"
[Node "ConceptNode" "cat" noTv
,Node "ConceptNode" "animal" noTv
] noTv
insert findAnimals
res <- cogBind findAnimals
liftIO $ putStrLn "Result: " >> case res of
Nothing -> print res
Just at -> printAtom at
liftIO $ putStrLn "-----AtomSpace state at the end:-----"
debug
liftIO $ putStrLn "-------------------------------------"
| inflector/atomspace | examples/haskell/example_pattern_matcher.hs | agpl-3.0 | 1,517 | 2 | 11 | 575 | 320 | 161 | 159 | 35 | 2 |
{-# LANGUAGE CPP, BangPatterns #-}
-----------------------------------------------------------------------------
-- | Separate module for HTTP actions, using a proxy server if one exists
-----------------------------------------------------------------------------
module Distribution.Client.HttpUtils (
DownloadResult(..),
configureTransport,
HttpTransport(..),
downloadURI,
transportCheckHttps,
remoteRepoCheckHttps,
remoteRepoTryUpgradeToHttps,
isOldHackageURI
) where
import Network.HTTP
( Request (..), Response (..), RequestMethod (..)
, Header(..), HeaderName(..), lookupHeader )
import Network.HTTP.Proxy ( Proxy(..), fetchProxy)
import Network.URI
( URI (..), URIAuth (..) )
import Network.Browser
( browse, setOutHandler, setErrHandler, setProxy
, setAuthorityGen, request, setAllowBasicAuth, setUserAgent )
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative
#endif
import qualified Control.Exception as Exception
import Control.Monad
( when, guard )
import qualified Data.ByteString.Lazy.Char8 as BS
import Data.List
( isPrefixOf, find, intercalate )
import Data.Maybe
( listToMaybe, maybeToList )
import qualified Paths_cabal_install (version)
import Distribution.Verbosity (Verbosity)
import Distribution.Simple.Utils
( die, info, warn, debug, notice, writeFileAtomic
, copyFileVerbose, withTempFile
, rawSystemStdInOut, toUTF8, fromUTF8, normaliseLineEndings )
import Distribution.Client.Utils
( readMaybe, withTempFileName )
import Distribution.Client.Types
( RemoteRepo(..) )
import Distribution.System
( buildOS, buildArch )
import Distribution.Text
( display )
import Data.Char
( isSpace )
import qualified System.FilePath.Posix as FilePath.Posix
( splitDirectories )
import System.FilePath
( (<.>) )
import System.Directory
( doesFileExist, renameFile )
import System.IO.Error
( isDoesNotExistError )
import Distribution.Simple.Program
( Program, simpleProgram, ConfiguredProgram, programPath
, ProgramInvocation(..), programInvocation
, getProgramInvocationOutput )
import Distribution.Simple.Program.Db
( ProgramDb, emptyProgramDb, addKnownPrograms
, configureAllKnownPrograms
, requireProgram, lookupProgram )
import Distribution.Simple.Program.Run
( IOEncoding(..), getEffectiveEnvironment )
import Numeric (showHex)
import System.Directory (canonicalizePath)
import System.IO (hClose, hPutStr)
import System.FilePath (takeFileName, takeDirectory)
import System.Random (randomRIO)
import System.Exit (ExitCode(..))
------------------------------------------------------------------------------
-- Downloading a URI, given an HttpTransport
--
data DownloadResult = FileAlreadyInCache
| FileDownloaded FilePath
deriving (Eq)
downloadURI :: HttpTransport
-> Verbosity
-> URI -- ^ What to download
-> FilePath -- ^ Where to put it
-> IO DownloadResult
downloadURI _transport verbosity uri path | uriScheme uri == "file:" = do
copyFileVerbose verbosity (uriPath uri) path
return (FileDownloaded path)
-- Can we store the hash of the file so we can safely return path when the
-- hash matches to avoid unnecessary computation?
downloadURI transport verbosity uri path = do
let etagPath = path <.> "etag"
targetExists <- doesFileExist path
etagPathExists <- doesFileExist etagPath
-- In rare cases the target file doesn't exist, but the etag does.
etag <- if targetExists && etagPathExists
then Just <$> readFile etagPath
else return Nothing
-- Only use the external http transports if we actually have to
-- (or have been told to do so)
let transport'
| uriScheme uri == "http:"
, not (transportManuallySelected transport)
= plainHttpTransport
| otherwise
= transport
withTempFileName (takeDirectory path) (takeFileName path) $ \tmpFile -> do
result <- getHttp transport' verbosity uri etag tmpFile
-- Only write the etag if we get a 200 response code.
-- A 304 still sends us an etag header.
case result of
(200, Just newEtag) -> writeFile etagPath newEtag
_ -> return ()
case fst result of
200 -> do
info verbosity ("Downloaded to " ++ path)
renameFile tmpFile path
return (FileDownloaded path)
304 -> do
notice verbosity "Skipping download: local and remote files match."
return FileAlreadyInCache
errCode -> die $ "Failed to download " ++ show uri
++ " : HTTP code " ++ show errCode
------------------------------------------------------------------------------
-- Utilities for repo url management
--
remoteRepoCheckHttps :: HttpTransport -> RemoteRepo -> IO ()
remoteRepoCheckHttps transport repo
| uriScheme (remoteRepoURI repo) == "https:"
, not (transportSupportsHttps transport)
= die $ "The remote repository '" ++ remoteRepoName repo
++ "' specifies a URL that " ++ requiresHttpsErrorMessage
| otherwise = return ()
transportCheckHttps :: HttpTransport -> URI -> IO ()
transportCheckHttps transport uri
| uriScheme uri == "https:"
, not (transportSupportsHttps transport)
= die $ "The URL " ++ show uri
++ " " ++ requiresHttpsErrorMessage
| otherwise = return ()
requiresHttpsErrorMessage :: String
requiresHttpsErrorMessage =
"requires HTTPS however the built-in HTTP implementation "
++ "does not support HTTPS. The transport implementations with HTTPS "
++ "support are " ++ intercalate ", "
[ name | (name, _, True, _ ) <- supportedTransports ]
++ ". One of these will be selected automatically if the corresponding "
++ "external program is available, or one can be selected specifically "
++ "with the global flag --http-transport="
remoteRepoTryUpgradeToHttps :: HttpTransport -> RemoteRepo -> IO RemoteRepo
remoteRepoTryUpgradeToHttps transport repo
| remoteRepoShouldTryHttps repo
, uriScheme (remoteRepoURI repo) == "http:"
, not (transportSupportsHttps transport)
, not (transportManuallySelected transport)
= die $ "The builtin HTTP implementation does not support HTTPS, but using "
++ "HTTPS for authenticated uploads is recommended. "
++ "The transport implementations with HTTPS support are "
++ intercalate ", " [ name | (name, _, True, _ ) <- supportedTransports ]
++ "but they require the corresponding external program to be "
++ "available. You can either make one available or use plain HTTP by "
++ "using the global flag --http-transport=plain-http (or putting the "
++ "equivalent in the config file). With plain HTTP, your password "
++ "is sent using HTTP digest authentication so it cannot be easily "
++ "intercepted, but it is not as secure as using HTTPS."
| remoteRepoShouldTryHttps repo
, uriScheme (remoteRepoURI repo) == "http:"
, transportSupportsHttps transport
= return repo {
remoteRepoURI = (remoteRepoURI repo) { uriScheme = "https:" }
}
| otherwise
= return repo
-- | Utility function for legacy support.
isOldHackageURI :: URI -> Bool
isOldHackageURI uri
= case uriAuthority uri of
Just (URIAuth {uriRegName = "hackage.haskell.org"}) ->
FilePath.Posix.splitDirectories (uriPath uri) == ["/","packages","archive"]
_ -> False
------------------------------------------------------------------------------
-- Setting up a HttpTransport
--
data HttpTransport = HttpTransport {
-- | GET a URI, with an optional ETag (to do a conditional fetch),
-- write the resource to the given file and return the HTTP status code,
-- and optional ETag.
getHttp :: Verbosity -> URI -> Maybe ETag -> FilePath
-> IO (HttpCode, Maybe ETag),
-- | POST a resource to a URI, with optional auth (username, password)
-- and return the HTTP status code and any redirect URL.
postHttp :: Verbosity -> URI -> String -> Maybe Auth
-> IO (HttpCode, String),
-- | POST a file resource to a URI using multipart\/form-data encoding,
-- with optional auth (username, password) and return the HTTP status
-- code and any error string.
postHttpFile :: Verbosity -> URI -> FilePath -> Maybe Auth
-> IO (HttpCode, String),
-- | Whether this transport supports https or just http.
transportSupportsHttps :: Bool,
-- | Whether this transport implementation was specifically chosen by
-- the user via configuration, or whether it was automatically selected.
-- Strictly speaking this is not a property of the transport itself but
-- about how it was chosen. Nevertheless it's convenient to keep here.
transportManuallySelected :: Bool
}
--TODO: why does postHttp return a redirect, but postHttpFile return errors?
type HttpCode = Int
type ETag = String
type Auth = (String, String)
noPostYet :: Verbosity -> URI -> String -> Maybe (String, String)
-> IO (Int, String)
noPostYet _ _ _ _ = die "Posting (for report upload) is not implemented yet"
supportedTransports :: [(String, Maybe Program, Bool,
ProgramDb -> Maybe HttpTransport)]
supportedTransports =
[ let prog = simpleProgram "curl" in
( "curl", Just prog, True
, \db -> curlTransport <$> lookupProgram prog db )
, let prog = simpleProgram "wget" in
( "wget", Just prog, True
, \db -> wgetTransport <$> lookupProgram prog db )
, let prog = simpleProgram "powershell" in
( "powershell", Just prog, True
, \db -> powershellTransport <$> lookupProgram prog db )
, ( "plain-http", Nothing, False
, \_ -> Just plainHttpTransport )
]
configureTransport :: Verbosity -> Maybe String -> IO HttpTransport
configureTransport verbosity (Just name) =
-- the user secifically selected a transport by name so we'll try and
-- configure that one
case find (\(name',_,_,_) -> name' == name) supportedTransports of
Just (_, mprog, _tls, mkTrans) -> do
progdb <- case mprog of
Nothing -> return emptyProgramDb
Just prog -> snd <$> requireProgram verbosity prog emptyProgramDb
-- ^^ if it fails, it'll fail here
let Just transport = mkTrans progdb
return transport { transportManuallySelected = True }
Nothing -> die $ "Unknown HTTP transport specified: " ++ name
++ ". The supported transports are "
++ intercalate ", "
[ name' | (name', _, _, _ ) <- supportedTransports ]
configureTransport verbosity Nothing = do
-- the user hasn't selected a transport, so we'll pick the first one we
-- can configure successfully, provided that it supports tls
-- for all the transports except plain-http we need to try and find
-- their external executable
progdb <- configureAllKnownPrograms verbosity $
addKnownPrograms
[ prog | (_, Just prog, _, _) <- supportedTransports ]
emptyProgramDb
let availableTransports =
[ (name, transport)
| (name, _, _, mkTrans) <- supportedTransports
, transport <- maybeToList (mkTrans progdb) ]
-- there's always one because the plain one is last and never fails
let (name, transport) = head availableTransports
debug verbosity $ "Selected http transport implementation: " ++ name
return transport { transportManuallySelected = False }
------------------------------------------------------------------------------
-- The HttpTransports based on external programs
--
curlTransport :: ConfiguredProgram -> HttpTransport
curlTransport prog =
HttpTransport gethttp posthttp posthttpfile True False
where
gethttp verbosity uri etag destPath = do
withTempFile (takeDirectory destPath)
"curl-headers.txt" $ \tmpFile tmpHandle -> do
hClose tmpHandle
let args = [ show uri
, "--output", destPath
, "--location"
, "--write-out", "%{http_code}"
, "--user-agent", userAgent
, "--silent", "--show-error"
, "--dump-header", tmpFile ]
++ concat
[ ["--header", "If-None-Match: " ++ t]
| t <- maybeToList etag ]
resp <- getProgramInvocationOutput verbosity
(programInvocation prog args)
headers <- readFile tmpFile
(code, _err, etag') <- parseResponse uri resp headers
return (code, etag')
posthttp = noPostYet
posthttpfile verbosity uri path auth = do
let args = [ show uri
, "--form", "package=@"++path
, "--write-out", "%{http_code}"
, "--user-agent", userAgent
, "--silent", "--show-error"
, "--header", "Accept: text/plain" ]
++ concat
[ ["--digest", "--user", uname ++ ":" ++ passwd]
| (uname,passwd) <- maybeToList auth ]
resp <- getProgramInvocationOutput verbosity
(programInvocation prog args)
(code, err, _etag) <- parseResponse uri resp ""
return (code, err)
-- on success these curl involcations produces an output like "200"
-- and on failure it has the server error response first
parseResponse uri resp headers =
let codeerr =
case reverse (lines resp) of
(codeLine:rerrLines) ->
case readMaybe (trim codeLine) of
Just i -> let errstr = unlines (reverse rerrLines)
in Just (i, errstr)
Nothing -> Nothing
[] -> Nothing
mb_etag :: Maybe ETag
mb_etag = listToMaybe $ reverse
[ etag
| ["ETag:", etag] <- map words (lines headers) ]
in case codeerr of
Just (i, err) -> return (i, err, mb_etag)
_ -> statusParseFail uri resp
wgetTransport :: ConfiguredProgram -> HttpTransport
wgetTransport prog =
HttpTransport gethttp posthttp posthttpfile True False
where
gethttp verbosity uri etag destPath = do
resp <- runWGet verbosity args
(code, _err, etag') <- parseResponse uri resp
return (code, etag')
where
args = [ show uri
, "--output-document=" ++ destPath
, "--user-agent=" ++ userAgent
, "--tries=5"
, "--timeout=15"
, "--server-response" ]
++ concat
[ ["--header", "If-None-Match: " ++ t]
| t <- maybeToList etag ]
posthttp = noPostYet
posthttpfile verbosity uri path auth =
withTempFile (takeDirectory path)
(takeFileName path) $ \tmpFile tmpHandle -> do
(body, boundary) <- generateMultipartBody path
BS.hPut tmpHandle body
BS.writeFile "wget.in" body
hClose tmpHandle
let args = [ show uri
, "--post-file=" ++ tmpFile
, "--user-agent=" ++ userAgent
, "--server-response"
, "--header=Content-type: multipart/form-data; " ++
"boundary=" ++ boundary ]
++ concat
[ [ "--http-user=" ++ uname
, "--http-password=" ++ passwd ]
| (uname,passwd) <- maybeToList auth ]
resp <- runWGet verbosity args
(code, err, _etag) <- parseResponse uri resp
return (code, err)
runWGet verbosity args = do
-- wget returns its output on stderr rather than stdout
(_, resp, exitCode) <- getProgramInvocationOutputAndErrors verbosity
(programInvocation prog args)
-- wget returns exit code 8 for server "errors" like "304 not modified"
if exitCode == ExitSuccess || exitCode == ExitFailure 8
then return resp
else die $ "'" ++ programPath prog
++ "' exited with an error:\n" ++ resp
-- With the --server-response flag, wget produces output with the full
-- http server response with all headers, we want to find a line like
-- "HTTP/1.1 200 OK", but only the last one, since we can have multiple
-- requests due to redirects.
--
-- Unfortunately wget apparently cannot be persuaded to give us the body
-- of error responses, so we just return the human readable status message
-- like "Forbidden" etc.
parseResponse uri resp =
let codeerr = listToMaybe
[ (code, unwords err)
| (protocol:codestr:err) <- map words (reverse (lines resp))
, "HTTP/" `isPrefixOf` protocol
, code <- maybeToList (readMaybe codestr) ]
mb_etag :: Maybe ETag
mb_etag = listToMaybe
[ etag
| ["ETag:", etag] <- map words (reverse (lines resp)) ]
in case codeerr of
Just (i, err) -> return (i, err, mb_etag)
_ -> statusParseFail uri resp
powershellTransport :: ConfiguredProgram -> HttpTransport
powershellTransport prog =
HttpTransport gethttp posthttp posthttpfile True False
where
gethttp verbosity uri etag destPath =
withTempFile (takeDirectory destPath)
"psScript.ps1" $ \tmpFile tmpHandle -> do
hPutStr tmpHandle script
hClose tmpHandle
let args = ["-InputFormat", "None", "-File", tmpFile]
resp <- getProgramInvocationOutput verbosity
(programInvocation prog args)
parseResponse resp
where
script =
concatMap (++";\n") $
[ "$wc = new-object system.net.webclient"
, "$wc.Headers.Add(\"user-agent\","++escape userAgent++")"]
++ [ "$wc.Headers.Add(\"If-None-Match\"," ++ t ++ ")"
| t <- maybeToList etag ]
++ [ "Try {"
, "$wc.DownloadFile("++ escape (show uri) ++
"," ++ escape destPath ++ ")"
, "} Catch {Write-Error $_; Exit(5);}"
, "Write-Host \"200\""
, "Write-Host $wc.ResponseHeaders.Item(\"ETag\")"
, "Exit" ]
escape x = '"' : x ++ "\"" --TODO write/find real escape.
parseResponse x = case readMaybe . unlines . take 1 . lines $ trim x of
Just i -> return (i, Nothing) -- TODO extract real etag
Nothing -> statusParseFail uri x
posthttp = noPostYet
posthttpfile verbosity uri path auth =
withTempFile (takeDirectory path)
(takeFileName path) $ \tmpFile tmpHandle ->
withTempFile (takeDirectory path)
"psScript.ps1" $ \tmpScriptFile tmpScriptHandle -> do
(body, boundary) <- generateMultipartBody path
BS.hPut tmpHandle body
hClose tmpHandle
fullPath <- canonicalizePath tmpFile
hPutStr tmpScriptHandle (script fullPath boundary)
hClose tmpScriptHandle
let args = ["-InputFormat", "None", "-File", tmpScriptFile]
resp <- getProgramInvocationOutput verbosity
(programInvocation prog args)
parseResponse resp
where
script fullPath boundary =
concatMap (++";\n") $
[ "$wc = new-object system.net.webclient"
, "$wc.Headers.Add(\"user-agent\","++escape userAgent++")"
, "$wc.Headers.Add(\"Content-type\"," ++
"\"multipart/form-data; " ++
"boundary="++boundary++"\")" ]
++ [ "$wc.Credentials = new-object System.Net.NetworkCredential("
++ escape uname ++ "," ++ escape passwd ++ ",\"\")"
| (uname,passwd) <- maybeToList auth ]
++ [ "Try {"
, "$bytes = [System.IO.File]::ReadAllBytes("++escape fullPath++")"
, "$wc.UploadData("++ escape (show uri) ++ ",$bytes)"
, "} Catch {Write-Error $_; Exit(1);}"
, "Write-Host \"200\""
, "Exit" ]
escape x = show x
parseResponse x = case readMaybe . unlines . take 1 . lines $ trim x of
Just i -> return (i, x) -- TODO extract real etag
Nothing -> statusParseFail uri x
------------------------------------------------------------------------------
-- The builtin plain HttpTransport
--
plainHttpTransport :: HttpTransport
plainHttpTransport =
HttpTransport gethttp posthttp posthttpfile False False
where
gethttp verbosity uri etag destPath = do
let req = Request{
rqURI = uri,
rqMethod = GET,
rqHeaders = [ Header HdrIfNoneMatch t
| t <- maybeToList etag ],
rqBody = BS.empty
}
(_, resp) <- cabalBrowse verbosity Nothing (request req)
let code = convertRspCode (rspCode resp)
etag' = lookupHeader HdrETag (rspHeaders resp)
when (code==200) $
writeFileAtomic destPath $ rspBody resp
return (code, etag')
posthttp = noPostYet
posthttpfile verbosity uri path auth = do
(body, boundary) <- generateMultipartBody path
let headers = [ Header HdrContentType
("multipart/form-data; boundary="++boundary)
, Header HdrContentLength (show (BS.length body))
, Header HdrAccept ("text/plain")
]
req = Request {
rqURI = uri,
rqMethod = POST,
rqHeaders = headers,
rqBody = body
}
(_, resp) <- cabalBrowse verbosity auth (request req)
return (convertRspCode (rspCode resp), rspErrorString resp)
convertRspCode (a,b,c) = a*100 + b*10 + c
rspErrorString resp =
case lookupHeader HdrContentType (rspHeaders resp) of
Just contenttype
| takeWhile (/= ';') contenttype == "text/plain"
-> BS.unpack (rspBody resp)
_ -> rspReason resp
cabalBrowse verbosity auth act = do
p <- fixupEmptyProxy <$> fetchProxy True
Exception.handleJust
(guard . isDoesNotExistError)
(const . die $ "Couldn't establish HTTP connection. "
++ "Possible cause: HTTP proxy server is down.") $
browse $ do
setProxy p
setErrHandler (warn verbosity . ("http error: "++))
setOutHandler (debug verbosity)
setUserAgent userAgent
setAllowBasicAuth False
setAuthorityGen (\_ _ -> return auth)
act
fixupEmptyProxy (Proxy uri _) | null uri = NoProxy
fixupEmptyProxy p = p
------------------------------------------------------------------------------
-- Common stuff used by multiple transport impls
--
userAgent :: String
userAgent = concat [ "cabal-install/", display Paths_cabal_install.version
, " (", display buildOS, "; ", display buildArch, ")"
]
statusParseFail :: URI -> String -> IO a
statusParseFail uri r =
die $ "Failed to download " ++ show uri ++ " : "
++ "No Status Code could be parsed from response: " ++ r
-- Trim
trim :: String -> String
trim = f . f
where f = reverse . dropWhile isSpace
------------------------------------------------------------------------------
-- Multipart stuff partially taken from cgi package.
--
generateMultipartBody :: FilePath -> IO (BS.ByteString, String)
generateMultipartBody path = do
content <- BS.readFile path
boundary <- genBoundary
let !body = formatBody content (BS.pack boundary)
return (body, boundary)
where
formatBody content boundary =
BS.concat $
[ crlf, dd, boundary, crlf ]
++ [ BS.pack (show header) | header <- headers ]
++ [ crlf
, content
, crlf, dd, boundary, dd, crlf ]
headers =
[ Header (HdrCustom "Content-disposition")
("form-data; name=package; " ++
"filename=\"" ++ takeFileName path ++ "\"")
, Header HdrContentType "application/x-gzip"
]
crlf = BS.pack "\r\n"
dd = BS.pack "--"
genBoundary :: IO String
genBoundary = do
i <- randomRIO (0x10000000000000,0xFFFFFFFFFFFFFF) :: IO Integer
return $ showHex i ""
------------------------------------------------------------------------------
-- Compat utils
-- TODO: This is only here temporarily so we can release without also requiring
-- the latest Cabal lib. The function is also included in Cabal now.
getProgramInvocationOutputAndErrors :: Verbosity -> ProgramInvocation
-> IO (String, String, ExitCode)
getProgramInvocationOutputAndErrors verbosity
ProgramInvocation {
progInvokePath = path,
progInvokeArgs = args,
progInvokeEnv = envOverrides,
progInvokeCwd = mcwd,
progInvokeInput = minputStr,
progInvokeOutputEncoding = encoding
} = do
let utf8 = case encoding of IOEncodingUTF8 -> True; _ -> False
decode | utf8 = fromUTF8 . normaliseLineEndings
| otherwise = id
menv <- getEffectiveEnvironment envOverrides
(output, errors, exitCode) <- rawSystemStdInOut verbosity
path args
mcwd menv
input utf8
return (decode output, decode errors, exitCode)
where
input =
case minputStr of
Nothing -> Nothing
Just inputStr -> Just $
case encoding of
IOEncodingText -> (inputStr, False)
IOEncodingUTF8 -> (toUTF8 inputStr, True) -- use binary mode for utf8
| thoughtpolice/cabal | cabal-install/Distribution/Client/HttpUtils.hs | bsd-3-clause | 26,445 | 0 | 23 | 7,972 | 5,589 | 2,938 | 2,651 | 502 | 5 |
{-# LANGUAGE DeriveDataTypeable, RecordWildCards #-}
import Network.Wai.Application.Static
( StaticSettings (..), staticApp, defaultMimeType, defaultListing
, defaultMimeTypes, mimeTypeByExt
)
import Network.Wai.Handler.Warp (run)
import System.Environment (getArgs)
import System.Console.CmdArgs
import Text.Printf (printf)
import System.Directory (canonicalizePath)
import Control.Monad (unless)
import Network.Wai.Middleware.Autohead
import Network.Wai.Middleware.Debug
import Network.Wai.Middleware.Gzip
import qualified Data.Map as Map
import qualified Data.ByteString.Char8 as S8
import Control.Arrow (second)
data Args = Args
{ docroot :: FilePath
, index :: [FilePath]
, port :: Int
, noindex :: Bool
, quiet :: Bool
, verbose :: Bool
, mime :: [(String, String)]
}
deriving (Show, Data, Typeable)
defaultArgs = Args "." ["index.html", "index.htm"] 3000 False False False []
main :: IO ()
main = do
Args {..} <- cmdArgs defaultArgs
let mime' = map (second S8.pack) mime
let mimeMap = Map.fromList mime' `Map.union` defaultMimeTypes
docroot' <- canonicalizePath docroot
args <- getArgs
unless quiet $ printf "Serving directory %s on port %d with %s index files.\n" docroot' port (if noindex then "no" else show index)
let middle = gzip False
. (if verbose then debug else id)
. autohead
run port $ middle $ staticApp StaticSettings
{ ssFolder = docroot
, ssIndices = if noindex then [] else index
, ssListing = Just defaultListing
, ssGetMimeType = return . mimeTypeByExt mimeMap defaultMimeType
}
| ygale/yesod | yesod-static/test/unicode/warp.hs | mit | 1,658 | 18 | 13 | 359 | 480 | 266 | 214 | 42 | 4 |
module Sudoku
(
Sudoku,
solve,
isSolved,
pPrint
) where
import Data.Maybe
import Data.List
import Data.List.Split
type Sudoku = [Int]
solve :: Sudoku -> Maybe Sudoku
solve sudoku
| isSolved sudoku = Just sudoku
| otherwise = do
index <- elemIndex 0 sudoku
let sudokus = [nextTest sudoku index i | i <- [1..9],
checkRow (nextTest sudoku index i) index,
checkColumn (nextTest sudoku index i) index,
checkBox (nextTest sudoku index i) index]
listToMaybe $ mapMaybe solve sudokus
where nextTest sudoku index i = take index sudoku ++ [i] ++ drop (index+1) sudoku
checkRow sudoku index = (length $ getRow sudoku index) == (length $ nub $ getRow sudoku index)
checkColumn sudoku index = (length $ getColumn sudoku index) == (length $ nub $ getColumn sudoku index)
checkBox sudoku index = (length $ getBox sudoku index) == (length $ nub $ getBox sudoku index)
getRow sudoku index = filter (/=0) $ (chunksOf 9 sudoku) !! (quot index 9)
getColumn sudoku index = filter (/=0) $ (transpose $ chunksOf 9 sudoku) !! (mod index 9)
getBox sudoku index = filter (/=0) $ (map concat $ concatMap transpose $ chunksOf 3 $ map (chunksOf 3) $ chunksOf 9 sudoku)
!! (3 * (quot index 27) + (quot (mod index 9) 3))
isSolved :: Sudoku -> Bool
isSolved sudoku
| product sudoku == 0 = False
| map (length . nub) sudokuRows /= map length sudokuRows = False
| map (length . nub) sudokuColumns /= map length sudokuColumns = False
| map (length . nub) sudokuBoxes /= map length sudokuBoxes = False
| otherwise = True
where sudokuRows = chunksOf 9 sudoku
sudokuColumns = transpose sudokuRows
sudokuBoxes = map concat $ concatMap transpose $ chunksOf 3 $ map (chunksOf 3) $ chunksOf 9 sudoku
pPrint :: Sudoku -> String
pPrint sudoku = intercalate "\n" $ map (intercalate " " . map show) $ chunksOf 9 sudoku | kyoungrok0517/linguist | samples/Haskell/Sudoku.hs | mit | 2,061 | 0 | 15 | 599 | 795 | 390 | 405 | 40 | 1 |
-- Copyright (c) 2000 Galois Connections, Inc.
-- All rights reserved. This software is distributed as
-- free software under the license in the file "LICENSE",
-- which is included in the distribution.
module RayTrace(module Illumination, module Surface) where
import Illumination
import Surface
| olsner/ghc | testsuite/tests/programs/galois_raytrace/RayTrace.hs | bsd-3-clause | 300 | 0 | 4 | 46 | 24 | 18 | 6 | 3 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Exception ( bracket, catch )
import Control.Monad ( forever )
import System.Socket
import System.Socket.Family.Inet6
import System.Socket.Type.Stream
import System.Socket.Protocol.TCP
main :: IO ()
main = bracket
( socket :: IO (Socket Inet6 Stream TCP) )
( \s-> do
close s
putStrLn "Listening socket closed."
)
( \s-> do
setSocketOption s (ReuseAddress True)
setSocketOption s (V6Only False)
bind s (SocketAddressInet6 inet6Any 8080 0 0)
listen s 5
putStrLn "Listening socket ready..."
forever $ acceptAndHandle s `catch` \e-> print (e :: SocketException)
)
acceptAndHandle :: Socket Inet6 Stream TCP -> IO ()
acceptAndHandle s = bracket
( accept s )
( \(p, addr)-> do
close p
putStrLn $ "Closed connection to " ++ show addr
)
( \(p, addr)-> do
putStrLn $ "Accepted connection from " ++ show addr
sendAll p "Hello world!" msgNoSignal
)
| lpeterse/haskell-socket | examples/HelloWorldServer.hs | mit | 977 | 0 | 13 | 212 | 329 | 169 | 160 | 30 | 1 |
{-# htermination listToFM :: [(Ordering,b)] -> FiniteMap Ordering b #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_listToFM_11.hs | mit | 89 | 0 | 3 | 12 | 5 | 3 | 2 | 1 | 0 |
module MaybeLib where
import Data.Maybe as M (fromJust)
-- >>> isJust (Just 1)
-- True
-- >>> isJust Nothing
-- False
isJust :: Maybe a -> Bool
isJust Nothing = False
isJust (Just _) = True
-- >>> isNothing (Just 1)
-- False
-- >>> isNothing Nothing
-- True
isNothing :: Maybe a -> Bool
isNothing Nothing = True
isNothing (Just _) = False
-- >>> mayybee 0 (+1) Nothing
--0
-- >>> mayybee 0 (+1) (Just 1)
--2
mayybee :: b -> (a -> b) -> Maybe a -> b
mayybee initialValue _ Nothing = initialValue
mayybee _ f (Just number) = f number
-- >>> fromMaybe 0 Nothing
--0
-- >>> fromMaybe 0 (Just 1)
--1
fromMaybe :: a -> Maybe a -> a
fromMaybe def Nothing = def
fromMaybe _ (Just val) = val
-- >>> listToMaybe [1, 2, 3]
-- Just 1
-- >>> listToMaybe []
-- Nothing
listToMaybe :: [a] -> Maybe a
listToMaybe [] = Nothing
listToMaybe list = Just (head list)
-- >>> maybeToList (Just 1)
-- [1]
-- >>> maybeToList Nothing
-- []
maybeToList :: Maybe a -> [a]
maybeToList Nothing = []
maybeToList (Just val) = [val]
-- >>> catMaybes [Just 1, Nothing, Just 2]
-- [1, 2]
-- >>> catMaybes [Nothing, Nothing, Nothing]
-- []
catMaybes :: [Maybe a] -> [a]
catMaybes list = map M.fromJust (filter isJust list)
-- >>> flipMaybe [Just 1, Just 2, Just 3]
-- Just [1, 2, 3]
-- >>> flipMaybe [Just 1, Nothing, Just 3]
-- Nothing
flipMaybe :: [Maybe a] -> Maybe [a]
flipMaybe list =
if difference > 0 then Nothing
else Just justValues
where
justValues = catMaybes list
difference = length list - length justValues | mikegehard/haskellBookExercises | chapter12/MaybeLib.hs | mit | 1,760 | 0 | 8 | 559 | 412 | 227 | 185 | 28 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module SoOSiM.Examples.Parser where
import Data.Aeson ((.:),eitherDecode,FromJSON(..),Value (..))
import Data.ByteString.Lazy as BS
import Data.Maybe (fromJust)
import Control.Applicative ((<$>),(<*>))
import Control.Monad (mzero)
import SoOSiM.Components.ResourceDescriptor
import SoOSiM.Components.SoOSApplicationGraph
data Example
= Example [ApplicationGraph] String [Resource]
instance FromJSON Example where
parseJSON (Object v) =
Example <$>
(v .: "Apps") <*>
(v .: "Distribution") <*>
(v .: "Platform")
parseJSON k = error $ "Parse error, not an object: " ++ show k
readExample ::
FilePath
-> IO Example
readExample fn = do
exampleBS <- BS.readFile fn
let example = either error id $ eitherDecode exampleBS
return $! example
| christiaanb/SoOSiM-components | src/SoOSiM/Examples/Parser.hs | mit | 838 | 0 | 11 | 167 | 247 | 140 | 107 | 25 | 1 |
module Proteome.System.Path where
import Path (Abs, File, Path, parseRelFile)
import Path.IO (findExecutable)
import Proteome.Data.GrepError (GrepError)
import qualified Proteome.Data.GrepError as GrepError (GrepError(NoSuchExecutable, NotInPath))
findExe ::
MonadIO m =>
MonadDeepError e GrepError m =>
Text ->
m (Path Abs File)
findExe exe = do
path <- hoistEitherAs parseError $ parseRelFile (toString exe)
hoistMaybe notInPath =<< findExecutable path
where
parseError =
GrepError.NoSuchExecutable exe
notInPath =
GrepError.NotInPath exe
| tek/proteome | packages/proteome/lib/Proteome/System/Path.hs | mit | 578 | 0 | 11 | 101 | 169 | 92 | 77 | -1 | -1 |
import Drawing
import Exercises
import Geometry (find,beyond,line_circle,circle_circle)
import Geometry.Utils (coarse_ne)
main = drawPicture myPicture
myPicture = version1
{--
grain = 0.02
left = -10
next = left + grain
right = -left
--}
version1 points =
drawPoints circle &
drawPoints parabola &
message "Fat Circle and Parabola"
where circle = [(x,y) | x<-range,
y<-range,
(let r2=x*x+y*y
in r2 > 1 && r2 < 4)]
parabola = [(x,x*x) | x<-range]
| alphalambda/k12math | prog/demo/ex17visible.hs | mit | 564 | 1 | 14 | 187 | 176 | 94 | 82 | 15 | 1 |
-- |
-- Module: Main
-- License: MIT
--
-- Run like:
-- >$ cabal-graphdeps mypackage | tred | dot -Tpng > ~/mypackage.png
module Main where
import Control.Applicative
import Control.Exception
import Control.Monad (foldM, forM_)
import Data.Char (isSpace)
import Data.List (isInfixOf, stripPrefix)
import Data.List.Split (splitWhen)
import qualified Data.Map as Map
import qualified Data.Set as Set
import Options
import qualified System.Directory as Filesystem
import System.Exit (exitFailure)
import qualified System.Process as Process
import qualified System.IO as IO
import System.IO.Temp (withSystemTempDirectory)
import qualified Text.Parsec as Parsec
import qualified Text.Parsec.String as Parsec
data MainOptions = MainOptions
{ optGlobalPackageDb :: String
, optFundamental :: [String]
, optExclude :: Set.Set String
}
instance Options MainOptions where
defineOptions = pure MainOptions
<*> simpleOption "global-package-db" ""
""
<*> defineOption (optionType_list ',' optionType_string) (\o -> o
{ optionLongFlags = ["fundamental-packages"]
, optionDefault = ["base"]
, optionDescription = "These packages and their dependencies should be considered fundamental to the package DB."
})
<*> defineOption (optionType_set ',' optionType_string) (\o -> o
{ optionLongFlags = ["exclude-packages"]
, optionDefault = Set.empty
, optionDescription = "These packages and their dependencies will be excluded when rendering the graph."
})
resolveDeps :: MainOptions -> Map.Map String (Set.Set String) -> String -> IO (Map.Map String (Set.Set String))
resolveDeps _ allDeps pkg | Map.member pkg allDeps = return allDeps
resolveDeps opts allDeps pkg | Set.member (extractPkgName pkg) (optExclude opts) = return allDeps
resolveDeps opts allDeps pkg = do
let header1 = "Resolving dependencies...\nIn order, the following would be installed (use -v for more details):\n"
let header2 = "Resolving dependencies...\nIn order, the following would be installed:\n"
IO.hPutStrLn IO.stderr ("[" ++ pkg ++ "]")
output <- Process.readProcess "cabal" ["--package-db=clear", "install", "--dry-run", "--ghc-pkg-options=--global-package-db=./empty-db", pkg] ""
if "All the requested packages are already installed:" `isInfixOf` output
then return allDeps
else do
trimmedOutput <- case stripPrefix header1 output of
Just stripped -> return stripped
Nothing -> case stripPrefix header2 output of
Just stripped -> return stripped
Nothing -> do
IO.hPutStrLn IO.stderr (show output)
error "unknown output format"
let newDeps = linesToDeps pkg (splitWhen (== '\n') trimmedOutput)
let insertedDeps = Map.insertWith Set.union pkg newDeps allDeps
foldM (resolveDeps opts) insertedDeps (Set.toList newDeps)
linesToDeps :: String -> [String] -> Set.Set String
linesToDeps pkg lines = Set.fromList $ do
line <- filter (not . null) lines
parsedLine <- case stringMatch lineOnlyVersion line of
Just _ -> return line
Nothing -> case stringMatch lineVersionWithNote line of
Just version -> return version
Nothing -> error ("can't parse line " ++ show line)
if parsedLine == pkg
then []
else [parsedLine]
stringMatch :: Parsec.Parser a -> String -> Maybe a
stringMatch parser input = case Parsec.parse parser "" input of
Left _ -> Nothing
Right x -> Just x
lineOnlyVersion :: Parsec.Parser ()
lineOnlyVersion = do
Parsec.skipMany1 alphaNumDot
Parsec.eof
lineVersionWithNote :: Parsec.Parser String
lineVersionWithNote = do
version <- Parsec.many1 alphaNumDot
_ <- Parsec.char ' '
_ <- Parsec.char '('
Parsec.skipMany1 Parsec.anyChar
Parsec.eof
return version
alphaNumDot :: Parsec.Parser Char
alphaNumDot = Parsec.lower <|> Parsec.upper <|> Parsec.digit <|> Parsec.oneOf "-."
renderDeps :: MainOptions -> Map.Map String (Set.Set String) -> String -> Set.Set String
renderDeps opts deps rootPkg = rendered where
(_, _, rendered) = loop rootPkg (rootPkg, Set.empty, Set.empty)
-- This package has already been visited, so we don't need to continue
-- any further.
loop pkg acc@(_, visited, _) | Set.member pkg visited = acc
-- map "foo-bar-baz-1.0" to "foo-bar-baz" for excluded packages
loop pkg acc | Set.member (extractPkgName pkg) (optExclude opts) = acc
loop pkg (parent, visited, lines) = let
pkgDeps = Set.toList (Map.findWithDefault Set.empty pkg deps)
visited' = Set.insert pkg visited
lines' = Set.union lines $ Set.fromList $ do
dep <- pkgDeps
-- map "foo-bar-baz-1.0" to "foo-bar-baz" for excluded packages
if Set.member (extractPkgName dep) (optExclude opts)
then []
else [show pkg ++ " -> " ++ show dep]
(_, visited'', lines'') = foldr loop (pkg, visited', lines') pkgDeps
in (parent, visited'', lines'')
extractPkgName :: String -> String
extractPkgName pkg = case stripPrefix "-" (dropWhile (/= '-') (reverse pkg)) of
Nothing -> pkg
Just rev -> reverse rev
printDeps :: MainOptions -> Map.Map String (Set.Set String) -> String -> IO ()
printDeps opts deps pkg = forM_ (Set.toAscList (renderDeps opts deps pkg)) putStrLn
readGhcPkgField :: String -> String -> IO String
readGhcPkgField pkgName fieldName = do
rawField <- Process.readProcess "ghc-pkg" ["field", pkgName, fieldName] ""
case stripPrefix (fieldName ++ ":") rawField of
Nothing -> error ("Unexpected output from ghc-pkg field: " ++ show rawField)
Just s -> return (dropWhile isSpace (dropWhileEnd isSpace s))
dropWhileEnd :: (a -> Bool) -> [a] -> [a]
dropWhileEnd p = foldr (\x xs -> if p x && null xs then [] else x : xs) []
initSandbox :: MainOptions -> IO ()
initSandbox opts = do
_ <- Process.readProcess "cabal" ["sandbox", "init"] ""
Filesystem.createDirectory "empty-db"
globalDb <- case optGlobalPackageDb opts of
-- Look for the global package DB
"" -> do
rawGlobalDb <- Process.readProcess "ghc-pkg" ["list", "--no-user-package-db"] ""
case lines rawGlobalDb of
firstLine:_ ->
-- ghc-pkg adds a ':' to the first line when not
-- run from a terminal
return (reverse (drop 1 (reverse firstLine)))
_ -> error "Unexpected output from ghc-pkg list"
path -> return path
-- these packages and their dependencies will be excluded from the
-- graph for being fundamental.
forM_ (optFundamental opts) $ \pkg -> do
-- find package id (used in .conf filename)
pkgId <- readGhcPkgField pkg "id"
let pkgConf = pkgId ++ ".conf"
-- try to figure out what the package depends on; in the GHC
-- installed on my system, ::depends includes all dependencies
-- of the package.
deps <- readGhcPkgField pkg "depends"
let splitDeps = words deps
let emptyPackageDbConfs = pkgConf : [s ++ ".conf" | s <- splitDeps]
forM_ emptyPackageDbConfs $ \conf -> Filesystem.copyFile (globalDb ++ "/" ++ conf) ("empty-db/" ++ conf)
withCurrentDirectory :: FilePath -> IO a -> IO a
withCurrentDirectory dir io = bracket
(do
cwd <- Filesystem.getCurrentDirectory
Filesystem.setCurrentDirectory dir
return cwd)
Filesystem.setCurrentDirectory
(\_ -> io)
main :: IO ()
main = runCommand $ \opts args -> do
rootPackageName <- case args of
[x] -> return x
_ -> do
IO.hPutStrLn IO.stderr "Usage: cabal-graphdeps <package-name>"
exitFailure
deps <- withSystemTempDirectory "cabal-graphdeps.d-" $ \dir -> withCurrentDirectory dir $ do
initSandbox opts
resolveDeps opts Map.empty rootPackageName
putStrLn "digraph {"
printDeps opts deps rootPackageName
putStrLn "}"
| jmillikin/cabal-graphdeps | src/Main.hs | mit | 7,535 | 363 | 10 | 1,400 | 2,113 | 1,198 | 915 | 152 | 4 |
module FrontEnd.SrcLoc where
import Control.Applicative
import Control.Monad.Identity
import Control.Monad.Writer
import Data.Foldable
import Data.Traversable
import Data.Binary
import Data.Generics
import PackedString
data SrcLoc = SrcLoc {
srcLocFileName :: PackedString,
srcLocLine :: {-# UNPACK #-} !Int,
srcLocColumn :: {-# UNPACK #-} !Int
}
deriving(Data,Typeable,Eq,Ord)
{-! derive: update, Binary !-}
data SrcSpan = SrcSpan { srcSpanBegin :: !SrcLoc, srcSpanEnd :: !SrcLoc }
deriving(Data,Typeable,Eq,Ord)
{-! derive: update, Binary !-}
-- Useful bogus file names used to indicate where non file based errors are.
fileNameCommandLine = packString "(command line)"
fileNameUnknown = packString "(unknown)"
fileNameGenerated = packString "(generated)"
bogusASrcLoc = SrcLoc fileNameUnknown (-1) (-1)
bogusSrcSpan = SrcSpan bogusASrcLoc bogusASrcLoc
instance Monoid SrcLoc where
mempty = bogusASrcLoc
mappend a b
| a == bogusASrcLoc = b
| otherwise = a
--------------------
-- haslocation class
--------------------
class HasLocation a where
srcLoc :: a -> SrcLoc
srcSpan :: a -> SrcSpan
srcSpan x = bogusSrcSpan { srcSpanBegin = slx, srcSpanEnd = slx } where slx = srcLoc x
srcLoc x = srcSpanBegin (srcSpan x)
instance HasLocation a => HasLocation [a] where
srcLoc xs = mconcat (map srcLoc xs)
instance HasLocation SrcLoc where
srcLoc x = x
instance HasLocation SrcSpan where
srcSpan x = x
instance HasLocation (SrcLoc,SrcLoc) where
srcSpan (x,y) = SrcSpan x y
instance HasLocation (Located a) where
srcSpan (Located x _) = x
data Located x = Located SrcSpan x
deriving(Ord,Show,Data,Typeable,Eq)
{-! derive: Binary !-}
fromLocated :: Located x -> x
fromLocated (Located _ x) = x
instance Functor Located where
fmap f (Located l x) = Located l (f x)
instance Foldable Located where
foldMap f (Located l x) = f x
instance Traversable Located where
traverse f (Located l x) = Located l <$> f x
located ss x = Located (srcSpan ss) x
-----------------------
-- srcloc monad classes
-----------------------
class Monad m => MonadSrcLoc m where
getSrcLoc :: m SrcLoc
getSrcSpan :: m SrcSpan
getSrcSpan = getSrcLoc >>= return . srcSpan
getSrcLoc = getSrcSpan >>= return . srcLoc
class MonadSrcLoc m => MonadSetSrcLoc m where
withSrcLoc :: SrcLoc -> m a -> m a
withSrcSpan :: SrcSpan -> m a -> m a
withSrcLoc sl a = withSrcSpan (srcSpan sl) a
withSrcSpan ss a = withSrcLoc (srcLoc ss) a
withLocation :: (HasLocation l,MonadSetSrcLoc m) => l -> m a -> m a
withLocation l = withSrcSpan (srcSpan l)
instance Monoid w => MonadSrcLoc (Writer w) where
getSrcLoc = return mempty
instance Monoid w => MonadSetSrcLoc (Writer w) where
withSrcLoc _ a = a
instance MonadSrcLoc Identity where
getSrcLoc = return mempty
instance MonadSetSrcLoc Identity where
withSrcLoc _ a = a
-----------------
-- show instances
-----------------
instance Show SrcLoc where
show (SrcLoc fn l c) = unpackPS fn ++ f l ++ f c where
f (-1) = ""
f n = ':':show n
instance Show SrcSpan where
show SrcSpan { srcSpanBegin = sl1, srcSpanEnd = sl2 }
| sl1 == sl2 = show sl1
| otherwise = show sl1 ++ "-" ++ show sl2
| dec9ue/jhc_copygc | src/FrontEnd/SrcLoc.hs | gpl-2.0 | 3,318 | 0 | 10 | 732 | 1,082 | 554 | 528 | 84 | 1 |
{-# LANGUAGE RecordWildCards, PatternGuards #-}
module ParseHints where
import Types
import Arches
import AtomIndex
import qualified IndexSet as IxS
import Data.List.Split
import System.IO
import Data.Functor
import System.FilePath
import System.Directory
import qualified Data.ByteString.Char8 as BS
import Data.Maybe
import Data.Char
import Data.List
{- The format is
# comment
hint <pkg1>/<ver1> <pkg2>/<ver2> ...
easy <pkg1>/<ver1> <pkg2>/<ver2> ...
force-hint <pkg1>/<ver1> <pkg2>/<ver2> ...
remove <pkg1>/<ver1> ...
force <pkg1>/<ver1> ...
block <pkg1> <pkg2> <pkg3> ...
block-all source
approve <pkg1>/<ver1> <pkg2>/<ver2> ...
unblock <pkg1>/<ver1> <pkg2>/<ver2> ...
urgent <pkg1>/<ver1> <pkg2>/<ver2> ...
age-days <days> <pkg1>/<ver1> <pkg2>/<ver2> ...
finished
-}
-- Copied from /srv/release.debian.org/britney/etc/britney2.conf for now
hintFiles = [
("vorlon" , hints_all),
("aba" , hints_all),
("he" , hints_all),
("luk" , hints_all),
("zobel" , hints_standard ++ ["force"]),
("pkern" , hints_standard ++ ["force"]),
("adsb" , hints_standard ++ ["force","force-hint"]),
("neilm" , hints_standard),
("mehdi" , hints_standard),
("jcristau" , hints_standard),
("faw" , hints_helpers),
("nthykier" , hints_helpers),
("freeze" , ["block","block-all","block-udeb"]),
("freeze-exception" , ["unblock","unblock-udeb"]),
("test-hints", hints_all) -- for the test suite
]
-- Copied from code/b2/britney.py
hints_helpers = ["easy", "hint", "remove", "block", "block-udeb", "unblock", "unblock-udeb", "approve"]
hints_standard = ["urgent", "age-days"] ++ hints_helpers
hints_all = ["force", "force-hint", "block-all"] ++ hints_standard
data HintSpec = HintSpec SourceName (Maybe DebianVersion) (Maybe Arch)
deriving (Show, Eq, Ord)
data Hint = Easy [HintSpec]
| Hint [HintSpec]
| ForceHint [HintSpec]
| Remove HintSpec
| Force HintSpec
| Block HintSpec
| BlockUdeb HintSpec
| BlockAll
| Approve HintSpec
| Unblock HintSpec
| UnblockUdeb HintSpec
| Urgent HintSpec
| AgeDays Age [HintSpec]
deriving (Show, Eq, Ord)
readHintFiles :: Config -> IO [Hint]
readHintFiles config | Nothing <- hintDir config = return []
readHintFiles config | Just dir <- hintDir config = concat <$> mapM (readHintFile dir) hintFiles
readHintFile :: FilePath -> (String, [String]) -> IO [Hint]
readHintFile dir (file,allowed) =
do ex <- doesFileExist (dir </> file)
if ex
then do
h <- openFile (dir </> file) ReadMode
hSetEncoding h char8
concatMap (readHintLine allowed) . untilFinished . lines <$> hGetContents h
else return []
untilFinished :: [String] -> [String]
untilFinished = takeWhile (\l -> not ("finished" `isPrefixOf` l))
readHintLine :: [String] -> String -> [Hint]
readHintLine allowed line =
case words line of
[] -> []
cmd:args | cmd `notElem` allowed -> []
| otherwise -> parseHint cmd args
parseHint "unblock" args = map Unblock $ mapMaybe parseHintSpec args
parseHint "unblock-udeb" args = map UnblockUdeb $ mapMaybe parseHintSpec args
parseHint "block" args = map Block $ mapMaybe parseHintSpec args
parseHint "block-udeb" args = map BlockUdeb $ mapMaybe parseHintSpec args
parseHint "remove" args = map Remove $ mapMaybe parseHintSpec args
parseHint "block-all" ["source"] = [BlockAll]
parseHint _ _ = []
parseHintSpec src = case splitOn "/" src of
[src] -> Just $ HintSpec (SourceName (BS.pack src)) Nothing Nothing
[src,version] -> Just $ HintSpec (SourceName (BS.pack src)) (Just (DebianVersion (BS.pack version))) Nothing
_ -> Nothing
data HintResults = HintResults {
blockedSources :: IxS.Set Source
, removedSources :: IxS.Set Source
}
deriving (Show)
processHints :: Config -> AtomIndex -> SuiteInfo -> SuiteInfo -> GeneralInfo -> [Hint] -> HintResults
processHints config ai unstable testing general hints = HintResults {..}
where blockedSources = IxS.filter isReallyBlockedSource $ sources unstable `IxS.difference` sources testing
isReallyBlockedSource srcI =
((allBlocked || isBlockedSource srcI) && not (isUnblockedSource srcI))
|| (isBlockedUdebSource srcI && not (isUnblockedUdebSource srcI))
allBlocked = BlockAll `elem` hints
isUnblockedSource srcI = foldl' (isUnblockedBy (ai `lookupSrc` srcI)) False hints
isUnblockedBy src True _ = True
isUnblockedBy src False (Unblock hintSpec) = hintSpecApplies hintSpec src
isUnblockedBy src b _ = b
isBlockedSource srcI = foldl' (isBlockedBy (ai `lookupSrc` srcI)) False hints
isBlockedBy src True _ = True
isBlockedBy src False (Block hintSpec) = hintSpecApplies hintSpec src
isBlockedBy src b _ = b
isUnblockedUdebSource srcI = foldl' (isUnblockedUdebBy (ai `lookupSrc` srcI)) False hints
isUnblockedUdebBy src True _ = True
isUnblockedUdebBy src False (UnblockUdeb hintSpec) = hintSpecApplies hintSpec src
isUnblockedUdebBy src b _ = b
isBlockedUdebSource srcI = foldl' (isBlockedUdebBy (ai `lookupSrc` srcI)) False hints
isBlockedUdebBy src True _ = True
isBlockedUdebBy src False (BlockUdeb hintSpec) = hintSpecApplies hintSpec src
isBlockedUdebBy src b _ = b
removedSources = IxS.filter isRemovedSource $ sources unstable `IxS.union` sources testing
isRemovedSource srcI = foldl' (isRemovedBy (ai `lookupSrc` srcI)) False hints
isRemovedBy src True _ = True
isRemovedBy src False (Remove hintSpec) = hintSpecApplies hintSpec src
isRemovedBy src b _ = b
-- TODO: binNMU syntax
hintSpecApplies (HintSpec sn1 v1 Nothing) (Source name version) =
sn1 == name && maybe True (== version) v1
hintSpecApplies _ _ = False
| nomeata/sat-britney | ParseHints.hs | gpl-2.0 | 6,038 | 0 | 16 | 1,393 | 1,703 | 914 | 789 | 116 | 11 |
{-----------------------------------------------------------------
(c) 2009-2014 Markus Dittrich,
Pittsburgh Supercomputing Center &
Carnegie Mellon University
This program is free software; you can redistribute it
and/or modify it under the terms of the GNU General Public
License Version 3 as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License Version 3 for more details.
You should have received a copy of the GNU General Public
License along with this program; if not, write to the Free
Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
--------------------------------------------------------------------}
-- | RpnCalc defines the data structures and a calculator engine
-- for computing mathematical expressions that have been parsed
-- into reverse polish notations
module RpnCalc ( get_val_from_symbolTable
, rpn_compute
, try_evaluate_expression
) where
-- imports
import qualified Data.Map as M
import Prelude
-- local imports
import GenericModel
import RpnData
-- | computes an expressions based on an rpn stack
-- molecule names are looked up in a MoleculeMap
-- NOTE: This function expects the RPNstack to be sanitized
-- with respect to the variables, i.e., all variables in
-- the stack are assumed to exist in the VariableMap
rpn_compute :: SymbolTable -> Double -> RpnStack -> Double
rpn_compute _ _ (RpnStack [(Number x)]) = x
rpn_compute symbols theTime (RpnStack xs) = num
where
(Number num) = head . foldl evaluate [] $ xs
-- evaluate unary function (sin, cos, ..)
evaluate ((Number x):ys) (UnaFunc f) =
(Number $ f x):ys
-- evaluate binary function (*,+,..)
evaluate ((Number x):(Number y):ys) (BinFunc f) =
(Number $ f y x):ys
-- extract current time
evaluate ys (Time) = (Number theTime):ys
-- extract molecule variable
evaluate ys (Variable x) =
(Number $ get_val_from_symbolTable x theTime symbols):ys
evaluate ys item = item:ys
-- | retrieve the value of a given symbol (either variable or molecule count) from
-- the symbol table
get_val_from_symbolTable :: String -> Double -> SymbolTable -> Double
get_val_from_symbolTable var aTime symbols =
case M.lookup var (molSymbols symbols) of
Just value -> fromIntegral value
Nothing -> case (M.!) (varSymbols symbols) var of
Constant c -> c
Function s -> rpn_compute symbols aTime s
-- | try to evaluate an RPN stack and return the result as a Double
-- if we can evaluate it during parse-time (i.e. if it doesn't contain
-- things like time and molecule count)
-- NOTE: We do _not_ want to substitute any molecule counts even
-- if we should now them; molecule counts are inherently dynamic
-- and need to be evaluated at run-time
try_evaluate_expression :: RpnStack -> VariableMap -> Either RpnStack Double
try_evaluate_expression stack varMap =
if can_evaluate stack
then
Right $ rpn_compute (SymbolTable M.empty varMap) 0.0 stack
else
Left stack
where
can_evaluate = null . filter unknown_element . toList
where
unknown_element x = case x of
Time -> True
Variable v -> if v `elem` M.keys varMap
then False
else True
_ -> False
| haskelladdict/simgi | src/RpnCalc.hs | gpl-3.0 | 3,791 | 0 | 15 | 1,054 | 558 | 297 | 261 | 38 | 5 |
module Utils.Numeric (
compareDouble
) where
compareDouble :: Double -> Double -> Double -> Bool
compareDouble a b eps
| a == b
= True -- shortcut, handles infinities
| a == 0 || b == 0 || diff < minValue
-- a or b is zero or both are extremely close to it
-- relative error is less meaningful here
= diff < (eps * minValue)
| otherwise
-- use relative error
= diff / (absA + absB) < eps
where
absA = abs a
absB = abs b
diff = abs (a - b)
minValue :: (RealFloat a) => a
minValue = x
where n = floatDigits x
b = floatRadix x
(l, _) = floatRange x
x = encodeFloat (b^n - 1) (l - n - 1) | thalerjonathan/phd | thesis/code/sir/src/test/Utils/Numeric.hs | gpl-3.0 | 684 | 0 | 12 | 230 | 235 | 124 | 111 | 19 | 1 |
module Ampersand.Core.ShowAStruct
(AStruct(..))
where
import Ampersand.Basics
import Ampersand.Core.A2P_Converters
import Ampersand.Core.AbstractSyntaxTree
import Ampersand.Core.ShowPStruct
class AStruct a where
showA :: a -> String
instance AStruct A_Context where
showA = showP . aCtx2pCtx
instance AStruct Expression where
showA = showP . aExpression2pTermPrim
instance AStruct A_Concept where
showA = showP . aConcept2pConcept
instance AStruct AClassify where
showA = showP . aClassify2pClassify
instance AStruct Rule where
showA = showP . aRule2pRule
instance AStruct Relation where
showA = showP . aRelation2pRelation
instance AStruct AAtomPair where
showA p = "("++showA (apLeft p)++","++ showA (apRight p)++")"
instance AStruct AAtomValue where
showA at = case at of
AAVString{} -> show (aavstr at)
AAVInteger _ i -> show i
AAVFloat _ f -> show f
AAVBoolean _ b -> show b
AAVDate _ day -> show day
AAVDateTime _ dt -> show dt
AtomValueOfONE -> "1"
instance AStruct ExplObj where
showA = showP . aExplObj2PRef2Obj
| AmpersandTarski/ampersand | src/Ampersand/Core/ShowAStruct.hs | gpl-3.0 | 1,157 | 0 | 12 | 281 | 341 | 174 | 167 | 33 | 0 |
-- Solutions for the exercises of the SLIDES "Programming In Haskell" by Graham Hutton: CHAPTER 4 (Defining Functions)
{-- (1) Consider a function safetail that behaves in the same way as tail, except that safetail maps the empty list to the empty list, whereas tail gives an error in this case. Define safetail using:
(a) a conditional expression;
(b) guarded equations;
(c) pattern matching.
Hint: the library function null :: [a] -> Bool can be used to test if a list is empty.
--}
-- the type of the function
safetail :: [a] -> [a]
-- (a) conditional expression
safetail xs = if null xs then [] else tail xs
-- (b) guarded equations
safetail' xs
| null xs = []
| otherwise = tail xs
-- (c) using pattern matching
safetail'' [] = []
safetail'' (_:xs) = xs
-- (2) Give three possible definitions for the logical or operator (||) using pattern matching.
or' :: Bool -> Bool -> Bool
or' _ True = True
or' True _ = True
or' False False = False
-- (3) Redefine the following version of (&&) using conditionals rather than patterns:
-- True && True = True
-- _ && _ = False
and' :: Bool -> Bool -> Bool
and' x y = if x == True then (if y == True then True else False) else False
-- (4) Do the same for the following version:
-- True && b = b
-- False && _ = False
and'' :: Bool -> Bool -> Bool
and'' x y = if x == True then y else False
| thalerjonathan/phd | coding/learning/haskell/grahambook/src/Slides/Chapter4.hs | gpl-3.0 | 1,364 | 0 | 8 | 297 | 239 | 130 | 109 | 15 | 3 |
module Scheme.Evaluator.IOPrimitivesSpec where
import Test.Hspec hiding (shouldThrow)
spec = do
describe "" $ do
it "" $ pending
| Altech/haScm | test/Scheme/Evaluator/IOPrimitivesSpec.hs | gpl-3.0 | 139 | 0 | 11 | 28 | 43 | 23 | 20 | 5 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module ProperDemo where
import qualified Data.Map.Strict as M
import Control.Monad.IO.Class (liftIO)
import Control.Monad.State (modify, get)
import Visnov
import VisnovDesc
import Sound
type Affinity = Int
main = do
putStrLn "Ok."
world <- loadWorld
runVisnov game world 0
pembertonLikes, pembertonDislikes :: Visnov Affinity ()
pembertonLikes = modify $ \ aff -> aff + 1
pembertonDislikes = modify $ \ aff -> aff - 1
loadWorld :: IO World
loadWorld = setupWorld charMap bgMap
where
charMap :: M.Map CharacterID (M.Map PoseID FilePath)
charMap = M.fromList [ ("duderton", M.singleton "blank_stare" "img/duderton.png")
, ("archibald", M.singleton "blank_stare" "img/archibald.png")
, ("pemberton", M.fromList [("blank_stare", "img/pemberton.png"), ("b", "img/pemberton_b.png")])
, ("box", M.singleton "base" "img/box.png")
]
bgMap :: M.Map BackgroundID FilePath
bgMap = M.fromList [ ("paris", "img/paris.png")
, ("space_wedge", "img/space_wedge.png")
]
onStage = pose "blank_stare"
game = do
liftIO $ playBackground "suave.wav"
setBackground "paris"
p <- getCharacter "pemberton"
as p $ do
onStage
say "The Proper Visual Novel EngineLibraryFramework makes it easy to write"
say "your own interactive novel experience!"
getChoice [ ("That sounds awesome!", awesome)
, ("It's called an ELF?", elf)
]
awesome = do
a <- getCharacter "archibald"
p <- getCharacter "pemberton"
as a $ do
onStage
say "I am skeptical of the computing machine's ability to capture a novel."
as p $ do
say "Prepare to be amazed."
getChoice [ ("I too am skeptical.", pembertonDislikes >> skeptic)
, ("I am very prepared.", pembertonLikes >> ready)
]
elf = do
p <- getCharacter "pemberton"
as p $ do
onStage
say "Yup!"
getChoice [("Cool!", awesome)]
skeptic = do
a <- getCharacter "archibald"
p <- getCharacter "pemberton"
as a $ do
onStage
say "Ah! A man with reason superior to this ape, Pemberton's!"
as p $ do
say "Trust me just a little bit, my fair collective of judges."
getChoice [ ("...", ready)]
ready = do
a <- getCharacter "archibald"
p <- getCharacter "pemberton"
affinity <- get
as p $ do
pose "b"
say "AWWWW YEAH! I HAVE TRANSFORMED INTO A HERSHEY KISS ANGEL!"
say "TOP THAT, BITCH!"
as a $ do
pose "blank_stare"
say "I am defeated."
if affinity > 0
then
as p $ do
say "I'm glad you believed in me, judges."
else
as p $ do
say "Don't you wish you trusted me?"
| exitmouse/proper | src/ProperDemo.hs | gpl-3.0 | 2,763 | 0 | 12 | 753 | 732 | 360 | 372 | 78 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidDeviceProvisioning.Customers.Devices.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists a customer\'s devices.
--
-- /See:/ <https://developers.google.com/zero-touch/ Android Device Provisioning Partner API Reference> for @androiddeviceprovisioning.customers.devices.list@.
module Network.Google.Resource.AndroidDeviceProvisioning.Customers.Devices.List
(
-- * REST Resource
CustomersDevicesListResource
-- * Creating a Request
, customersDevicesList
, CustomersDevicesList
-- * Request Lenses
, cParent
, cXgafv
, cUploadProtocol
, cAccessToken
, cUploadType
, cPageToken
, cPageSize
, cCallback
) where
import Network.Google.AndroidDeviceProvisioning.Types
import Network.Google.Prelude
-- | A resource alias for @androiddeviceprovisioning.customers.devices.list@ method which the
-- 'CustomersDevicesList' request conforms to.
type CustomersDevicesListResource =
"v1" :>
Capture "parent" Text :>
"devices" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int64) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] CustomerListDevicesResponse
-- | Lists a customer\'s devices.
--
-- /See:/ 'customersDevicesList' smart constructor.
data CustomersDevicesList =
CustomersDevicesList'
{ _cParent :: !Text
, _cXgafv :: !(Maybe Xgafv)
, _cUploadProtocol :: !(Maybe Text)
, _cAccessToken :: !(Maybe Text)
, _cUploadType :: !(Maybe Text)
, _cPageToken :: !(Maybe Text)
, _cPageSize :: !(Maybe (Textual Int64))
, _cCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CustomersDevicesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cParent'
--
-- * 'cXgafv'
--
-- * 'cUploadProtocol'
--
-- * 'cAccessToken'
--
-- * 'cUploadType'
--
-- * 'cPageToken'
--
-- * 'cPageSize'
--
-- * 'cCallback'
customersDevicesList
:: Text -- ^ 'cParent'
-> CustomersDevicesList
customersDevicesList pCParent_ =
CustomersDevicesList'
{ _cParent = pCParent_
, _cXgafv = Nothing
, _cUploadProtocol = Nothing
, _cAccessToken = Nothing
, _cUploadType = Nothing
, _cPageToken = Nothing
, _cPageSize = Nothing
, _cCallback = Nothing
}
-- | Required. The customer managing the devices. An API resource name in the
-- format \`customers\/[CUSTOMER_ID]\`.
cParent :: Lens' CustomersDevicesList Text
cParent = lens _cParent (\ s a -> s{_cParent = a})
-- | V1 error format.
cXgafv :: Lens' CustomersDevicesList (Maybe Xgafv)
cXgafv = lens _cXgafv (\ s a -> s{_cXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
cUploadProtocol :: Lens' CustomersDevicesList (Maybe Text)
cUploadProtocol
= lens _cUploadProtocol
(\ s a -> s{_cUploadProtocol = a})
-- | OAuth access token.
cAccessToken :: Lens' CustomersDevicesList (Maybe Text)
cAccessToken
= lens _cAccessToken (\ s a -> s{_cAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
cUploadType :: Lens' CustomersDevicesList (Maybe Text)
cUploadType
= lens _cUploadType (\ s a -> s{_cUploadType = a})
-- | A token specifying which result page to return.
cPageToken :: Lens' CustomersDevicesList (Maybe Text)
cPageToken
= lens _cPageToken (\ s a -> s{_cPageToken = a})
-- | The maximum number of devices to show in a page of results. Must be
-- between 1 and 100 inclusive.
cPageSize :: Lens' CustomersDevicesList (Maybe Int64)
cPageSize
= lens _cPageSize (\ s a -> s{_cPageSize = a}) .
mapping _Coerce
-- | JSONP
cCallback :: Lens' CustomersDevicesList (Maybe Text)
cCallback
= lens _cCallback (\ s a -> s{_cCallback = a})
instance GoogleRequest CustomersDevicesList where
type Rs CustomersDevicesList =
CustomerListDevicesResponse
type Scopes CustomersDevicesList = '[]
requestClient CustomersDevicesList'{..}
= go _cParent _cXgafv _cUploadProtocol _cAccessToken
_cUploadType
_cPageToken
_cPageSize
_cCallback
(Just AltJSON)
androidDeviceProvisioningService
where go
= buildClient
(Proxy :: Proxy CustomersDevicesListResource)
mempty
| brendanhay/gogol | gogol-androiddeviceprovisioning/gen/Network/Google/Resource/AndroidDeviceProvisioning/Customers/Devices/List.hs | mpl-2.0 | 5,411 | 0 | 18 | 1,278 | 879 | 508 | 371 | 120 | 1 |
{-# LANGUAGE DeriveDataTypeable, RecordWildCards, ExistentialQuantification, FlexibleContexts #-}
module Main where
-- standard modules
import Prelude hiding (catch)
import Control.Exception
import Control.Monad
import Control.Parallel.Strategies
import qualified Data.Char as Char
import Data.List
import Data.Maybe
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Ratio
import System.Console.CmdArgs
import System.Environment
import System.IO
import System.IO.Unsafe
import Text.Printf
-- local modules
import Basics
import Benchmark
import Calculus.All
import DecisionProcedure
import DecisionProcedure.All
import Helpful.String
--import Debug.Trace
--import Helpful.General
-- begin commandline option handling ------------------------------------------
data Options = Options { optMinRange :: Int
, optMaxRange :: Int
-- , optSampleSize :: Int
, optNumOfNets :: Int
, optTimeout :: Int
, optRelations :: String
, optCalculus :: String
-- , optNumOfNodes :: Int
, optDensity :: Float
, optAreal :: Int
, optBatch :: Bool
, optScenario :: Bool
} deriving (Show, Data, Typeable)
defaultOptions = Options
{ optMinRange = 5
&= opt (5 :: Int)
&= explicit
&= name "m"
&= name "minsize"
&= typ "MINSIZE"
&= help "Start with networks of size MINSIZE. (default = 5)"
, optMaxRange = (-1)
&= opt (-1 :: Int)
&= explicit
&= name "M"
&= name "maxsize"
&= typ "MAXSIZE"
&= help "Stop after networks of size MAXSIZE. (default = negative value = infinity)"
-- , optSampleSize = 1
-- &= opt (1 :: Int)
-- &= explicit
-- &= name "s"
-- &= name "samplesize"
-- &= typ "SAMPLESIZE"
-- &= help "Create SAMPLESIZE number of networks per adjustment step. (default = 1)"
, optNumOfNets = 5000
&= opt (5000 :: Int)
&= explicit
&= name "n"
&= name "networks"
&= typ "NUMBER OF NETWORKS"
&= help "Generate NUMBER OF NETWORKS networks per density. (default = 5000)"
, optTimeout = (-1)
&= opt ((-1) :: Int)
&= explicit
&= name "t"
&= name "timeout"
&= typ "TIMEOUT"
&= help "Timeout each decision procedure on each network after TIMEOUT seconds. (default = negative value = infinity)"
, optRelations = ""
&= opt ""
&= explicit
&= name "r"
&= name "relations"
&= typ "Relation(s)"
&= help "Only use these relations. (default = \"\")"
, optCalculus = ""
&= opt ""
&= explicit
&= name "c"
&= name "calculus"
&= typ "Calculus"
&= help "Use this calculus. Supported Calculi: Dipole-72, FlipFlop, OPRA-1, OPRA-4, OPRA-8, OPRA-10. (default = \"\")"
, optDensity = 0.5
&= opt (0.5 :: Float)
&= explicit
&= name "d"
&= name "density"
&= typ "INITIAL DENSITY"
&= help "Start with the density closest to the given value. (default = 0.5)"
, optAreal = 0
&= opt (0 :: Int)
&= explicit
&= name "a"
&= name "areal"
&= typ "NUMBER"
&= help "1 = Only use areal relations. 2 = Only use non-areal relations. Any other number = No restriction. (Default = 0)"
, optBatch = def
&= explicit
&= name "b"
&= name "batch"
&= help "Start in batch mode and don't wait for input."
, optScenario = def
&= explicit
&= name "s"
&= name "scenario"
&= help "Generate scenarios and don't search for a phase transition."
} &=
-- verbosity &=
-- help "Compares the results of semi-decision procedures for consistency of\
help ("This progam compares several semi-decision procedures for the " ++
"consistency of constraint networks using the given relations.") &=
helpArg [explicit, name "h", name "help", help "Show this message."] &=
-- versionArg [ignore] &=
versionArg [help "Show version information."] &=
program "compareAndAdjust" &=
summary "compare version 12.02.23, (K) André Scholz" &=
details [ ""
-- "This progam compares several semi-decision procedures for the consistency of constraint networks using the given relations."
-- , "To compare the procedures on 13 networks of density 0.3 with 5 nodes type:"
-- , "compare 13 5 0.3"
]
-- end commandline option handling --------------------------------------------
data Calc = forall b . (Calculus b, HasDecisionProcedure (ARel b)) => Calc b
instance Show Calc where
-- show (Calc a) = "Calc " ++ show a
show (Calc a) = show a
-- improve: put this list somewhere else and just use it here.
-- maybe make it more generic by having a list of known calculi and mapping the
-- appropriate function over it.
allBaseRels = concat
[ map Calc (Set.toList cBaserelations :: [FlipFlop])
-- , map Calc (Set.toList cBaserelations :: [Dipole24])
, map Calc (Set.toList cBaserelations :: [Dipole72])
-- , map Calc (Set.toList cBaserelations :: [Dipole80])
, map Calc (Set.toList cBaserelations :: [Opra1])
, map Calc (Set.toList cBaserelations :: [Opra2])
, map Calc (Set.toList cBaserelations :: [Opra3])
, map Calc (Set.toList cBaserelations :: [Opra4])
, map Calc (Set.toList cBaserelations :: [Opra8])
, map Calc (Set.toList cBaserelations :: [Opra10])
, map Calc (Set.toList cBaserelations :: [Opra16])
]
helperForCalculus str = case map Char.toUpper $ trim str of
"DIPOLE-72" -> Calc (Set.findMin cBaserelations :: Dipole72)
"DIPOLE72" -> Calc (Set.findMin cBaserelations :: Dipole72)
"FLIPFLOP" -> Calc (Set.findMin cBaserelations :: FlipFlop)
"FF" -> Calc (Set.findMin cBaserelations :: FlipFlop)
"LR" -> Calc (Set.findMin cBaserelations :: FlipFlop)
"OPRA-1" -> Calc (Set.findMin cBaserelations :: Opra1 )
"OPRA-2" -> Calc (Set.findMin cBaserelations :: Opra2 )
"OPRA-3" -> Calc (Set.findMin cBaserelations :: Opra3 )
"OPRA-4" -> Calc (Set.findMin cBaserelations :: Opra4 )
"OPRA-8" -> Calc (Set.findMin cBaserelations :: Opra8 )
"OPRA-10" -> Calc (Set.findMin cBaserelations :: Opra10 )
"OPRA-16" -> Calc (Set.findMin cBaserelations :: Opra16 )
"OPRA1" -> Calc (Set.findMin cBaserelations :: Opra1 )
"OPRA2" -> Calc (Set.findMin cBaserelations :: Opra2 )
"OPRA3" -> Calc (Set.findMin cBaserelations :: Opra3 )
"OPRA4" -> Calc (Set.findMin cBaserelations :: Opra4 )
"OPRA8" -> Calc (Set.findMin cBaserelations :: Opra8 )
"OPRA10" -> Calc (Set.findMin cBaserelations :: Opra10 )
"OPRA16" -> Calc (Set.findMin cBaserelations :: Opra16 )
otherwise -> error $
"Sorry, i don't know about the calculus \"" ++ str ++ "\""
main = do
hSetBuffering stdout NoBuffering
args <- getArgs
opts <- (if null args then withArgs ["--help"] else id) $ cmdArgs defaultOptions
optionHandler opts
optionHandler opts@Options{..} = do
let wordsOptRelations = words optRelations
if null optCalculus then do
when (null optRelations)
(error $ "Which relations or calculus should i use?")
let typeHelperStr = head $ wordsOptRelations
let helperLst = filter
(\ (Calc a) -> cShowRel a == map Char.toLower typeHelperStr)
allBaseRels
when (null helperLst)
(error $ "\"" ++ typeHelperStr ++
"\" is not a valid relation in any known calculus.")
let boxedTypeHelper = head helperLst
unboxAndExec boxedTypeHelper wordsOptRelations opts
else if null optRelations then do
useWholeCalculusAndExec (helperForCalculus optCalculus) opts
else do
unboxAndExec (helperForCalculus optCalculus) wordsOptRelations opts
unboxAndExec (Calc typeHelper) wordsOptRelations opts@Options{..} = do
let rels =
( case optAreal of
1 -> intersect (tail $ typeHelper:cBaserelationsArealList)
2 -> intersect (tail $ typeHelper:cBaserelationsNonArealList)
_ -> id
) $ tail $ typeHelper:(map cReadRel wordsOptRelations)
-- force full evaluation of rels. Is there a better way to do this?
-- ( `seq` does not help here )
when (rels == rels) (return ())
exec rels opts
useWholeCalculusAndExec (Calc typeHelper) opts@Options{..} = do
let rels = tail $ typeHelper:case optAreal of
1 -> cBaserelationsArealList
2 -> cBaserelationsNonArealList
_ -> cBaserelationsList
exec rels opts
exec rels opts@Options{..} = do
let head' = head rels
let rank' = rank head'
let procedures' = procedures $ ARel head'
let startStr = "Starting a new Benchmarking"
++ ( if optBatch then
" (running in batch mode)"
else
" (press 'q' to quit)" )
++ "...\n"
-- restore:
-- startBenchString <- catch
-- (readFile "BENCHMARK.COLLECTION")
-- ((\e -> do
-- putStrLn startStr
-- return "fromList []"
-- ) :: SomeException -> IO String
-- )
-- delete:
putStrLn startStr
let startBenchString = "fromList []"
let startBenchRead = reads startBenchString
startBench <- if startBenchRead == [] || (snd $ head startBenchRead) /= "" then do
putStrLn startStr
return Map.empty
else
return $ fst $ head startBenchRead
bench <- markTheBench optScenario optBatch optMinRange optMaxRange optNumOfNets procedures' optTimeout rank' rels optDensity startBench
analyze bench
plotInconsistenciesPerSizeAndMethodInPercent bench
plotPercentageOfInconsistentNetworksPerDensity bench
plotSpeedPerSizeAndMethodSuccessOnly bench
plotSpeedPerSizeAndMethod bench
| spatial-reasoning/zeno | src/compareAndAdjust.hs | bsd-2-clause | 10,467 | 0 | 22 | 3,194 | 2,212 | 1,135 | 1,077 | 204 | 20 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
module Ermine.Parser where
import Control.Applicative
import Control.Lens
import Control.Monad
import Data.Foldable
import Data.Monoid
import qualified Data.Set as Set
import Data.Set (Set)
import Data.Traversable
import Data.Void
import Text.Parser.Char hiding (text)
import Text.Parser.Combinators
import Text.Parser.LookAhead
import Text.Parser.Token
import Text.PrettyPrint.ANSI.Leijen as Pretty hiding (line, (<>), (<$>), empty, braces)
import Text.Trifecta.Combinators
import Text.Trifecta.Delta
data LayoutContext
= IndentedLayout !Int
| BracedLayout
{ braceLeft :: String
, braceEndsWith :: Parser ()
, braceUnmatchedBy :: Parser Void
, braceRight :: String
}
data Location = Location Delta String deriving Show
class HasLocation t where
location :: Lens' t Location
instance HasLocation Location where
location = id
instance Eq Location where
Location a _ == Location b _ = a == b
instance Ord Location where
compare (Location a _) (Location b _) = compare a b
data ParseState = ParseState
{ _parserLayout :: [LayoutContext]
, _parserBol :: Bool
, _parserInput :: String
, _parserLocation :: {-# UNPACK #-} !Location
}
data Failure = Failure
{ _message :: [Doc]
, _expected :: Set String
} deriving Show
instance Monoid Failure where
mempty = Failure [] mempty
Failure as xs `mappend` Failure bs ys = Failure (as <|> bs) (mappend xs ys)
data ParseResult a
= Pure a {-# UNPACK #-} !Failure
| Fail {-# UNPACK #-} !Failure
| Commit a !ParseState (Set String)
| Err !Location {-# UNPACK #-} !Failure
deriving (Functor, Foldable, Traversable)
newtype Parser a = Parser { runParser :: ParseState -> ParseResult a }
deriving Functor
makeClassy ''Failure
makeLenses ''ParseState
layoutDepth :: Parser Int
layoutDepth = Parser $ \s -> case s^.parserLayout of
IndentedLayout n:_ -> Pure n mempty
_ -> Pure 0 mempty
layoutEndsWith :: Parser ()
layoutEndsWith = Parser $ \s -> runParser (views parserLayout go s) s where
go (BracedLayout _ p _ _:_) = p
go (IndentedLayout _ : xs) = go xs
go [] = eof
instance HasLocation ParseState where
location = parserLocation
bump :: Char -> String -> Location -> Location
bump '\n' xs (Location d _ ) = Location (d <> delta '\n') xs
bump c _ (Location d ys) = Location (d <> delta c) ys
err :: ParseState -> Failure -> ParseResult a
err s = Err (s^.location)
instance Applicative Parser where
pure a = Parser $ \_ -> Pure a mempty
(<*>) = ap
instance Monad Parser where
return a = Parser $ \_ -> Pure a mempty
Parser m >>= f = Parser $ \s -> case m s of
Fail e -> Fail e
Err x y -> Err x y
Pure a e -> case runParser (f a) s of
Pure b e' -> Pure b (e <> e')
Fail e' -> Fail (e <> e')
Commit b t xs -> Commit b t xs
Err x y -> Err x y
Commit a t xs -> case runParser (f a) t of
Pure b e -> Commit b t (xs <> e^.expected)
Fail e -> err t (over expected (xs<>) e)
Commit b u ys -> Commit b u ys
Err x y -> Err x y
fail e = Parser $ \s -> Fail (Failure [text e] mempty)
-- | race two parsers merging errors, and expected values, while taking the longer successful parse.
race :: Parser a -> Parser a -> Parser a
race (Parser m) (Parser n) = Parser $ \s -> case m s of
Pure a e -> case n s of -- old state
Pure a' e' -> Pure a (e <> e')
Fail e' -> Pure a (e <> e')
r -> r
Fail e -> case n s of -- old state
Pure a' e' -> Pure a' e'
Fail e' -> Fail (e <> e')
r -> r
l@(Commit a t xs) -> case n s of -- old state
r@(Commit a' t' xs') -> case _parserLocation t `compare` _parserLocation t' of
LT -> r
EQ -> Commit a t (xs <> xs')
GT -> l
Err el e
| el == _parserLocation t -> Commit a t (xs <> e^.expected)
-- fall through
_ -> l
l@(Err el e) -> case n s of -- old state
r@(Err el' e') -> case compare el el' of
LT -> r
EQ -> Err el (mappend e e')
GT -> l
r@(Commit a t xs)
| el == _parserLocation t -> Commit a t (e^.expected <> xs)
-- fall through
r -> r
instance Alternative Parser where
empty = Parser $ \_ -> Fail (Failure [] mempty)
Parser m <|> Parser n = Parser $ \s -> case m s of
Fail e -> case n s of
Pure a e' -> Pure a (e <> e')
Fail e' -> Fail (e <> e')
r -> r
r -> r
instance MonadPlus Parser where
mzero = empty
mplus = (<|>)
instance Parsing Parser where
try (Parser m) = Parser $ \s -> case m s of
Err{} -> Fail (Failure [] mempty)
r -> r
Parser m <?> n = Parser $ \s -> case m s of
Pure a (Failure r@(_:_) _) -> Pure a (Failure r (Set.singleton n))
Fail (Failure r _) -> Fail (Failure r (Set.singleton n))
l -> l
eof = Parser $ \s -> case s^.parserInput of
"" -> Pure () mempty
_ -> Fail (Failure [] (Set.singleton "EOF"))
notFollowedBy (Parser m) = Parser $ \s -> case m s of
Pure a _ -> Fail (Failure [text "unexpected" <+> text (show a)] mempty)
Commit a t _ -> err t (Failure [text "unexpected" <+> text (show a)] mempty)
_ -> Pure () mempty
unexpected s = Parser $ \_ -> Fail (Failure [text "unexpected" <+> text s] mempty)
-- carefully avoid flagging this as a mutation if we don't change state so we can backtrack.
setBol :: Bool -> Parser ()
setBol b = Parser $ \s -> if s^.parserBol == b
then Pure () mempty
else Commit () (s & parserBol .~ b) mempty
instance LookAheadParsing Parser where
lookAhead (Parser m) = Parser $ \s -> case m s of
Commit a _ _ -> Pure a mempty
Err{} -> Fail mempty
r -> r
-- satisfaction without changing beginning of line flag
rawSatisfy :: (Char -> Bool) -> Parser Char
rawSatisfy p = Parser $ \s -> case s^.parserInput of
c:cs
| p c -> Commit c (s & parserInput .~ cs & parserLocation %~ bump c cs) mempty
-- fall through
_ -> Fail mempty
instance CharParsing Parser where
satisfy p = Parser $ \s -> case s^.parserInput of
c:cs
| p c -> Commit c (s & parserInput .~ cs & parserLocation %~ bump c cs & parserBol .~ False) mempty
-- fall through
_ -> Fail mempty
data Token = VirtualSemi | VirtualBrace | Whitespace | Other
deriving (Eq,Ord,Show,Read)
layout :: Parser Token
layout = undefined
virtualLeftBrace :: Parser ()
virtualRightBrace :: Parser ()
virtualLeftBrace = undefined
virtualRightBrace = undefined
laidout :: Parser a -> Parser [a]
laidout p
= braces (semiSep p)
<|> between virtualLeftBrace virtualRightBrace (semiSep p)
parseState :: Parser ParseState
parseState = Parser $ \s -> Pure s mempty
-- stillOnside :: Parser ()
-- stillOnside = ps >>= \case
-- s -> column s >
instance TokenParsing Parser where
someSpace = do
Whitespace <- layout
return ()
nesting = undefined -- TODO: we need to update parsers for this, as we don't follow this scheme precisely
semi = undefined -- TODO
-- token = stillOnside *> p <* optional someSpace
{-
instance DeltaParsing P where
-}
{-
instance Errable P where
raiseErr e = P $ \_ -> raiseErr e
-}
| PipocaQuemada/ermine | src/Ermine/Parser.hs | bsd-2-clause | 7,320 | 0 | 19 | 1,933 | 2,815 | 1,428 | 1,387 | 192 | 16 |
-- #hide
-----------------------------------------------------------------------------
-- |
-- Module : Network.CGI.Multipart
-- Copyright : (c) Peter Thiemann 2001,2002
-- (c) Bjorn Bringert 2005-2006
-- License : BSD-style
--
-- Maintainer : Anders Kaseorg <[email protected]>
-- Stability : experimental
-- Portability : non-portable
--
-- Parsing of the multipart format from RFC2046.
-- Partly based on code from WASHMail.
--
-----------------------------------------------------------------------------
module Network.CGI.Multipart
(
-- * Multi-part messages
MultiPart(..), BodyPart(..)
, parseMultipartBody, hGetMultipartBody
, showMultipartBody
-- * Headers
, ContentType(..), ContentTransferEncoding(..)
, ContentDisposition(..)
, parseContentType
, getContentType
, getContentTransferEncoding
, getContentDisposition
) where
import Control.Monad
import Data.Int (Int64)
import Data.List (intersperse)
import Data.Maybe
import System.IO (Handle)
import Network.CGI.Header
import qualified Data.ByteString.Lazy.Char8 as BS
import Data.ByteString.Lazy.Char8 (ByteString)
--
-- * Multi-part stuff.
--
data MultiPart = MultiPart [BodyPart]
deriving (Show, Eq, Ord)
data BodyPart = BodyPart Headers ByteString
deriving (Show, Eq, Ord)
-- | Read a multi-part message from a 'ByteString'.
parseMultipartBody :: String -- ^ Boundary
-> ByteString -> MultiPart
parseMultipartBody b =
MultiPart . mapMaybe parseBodyPart . splitParts (BS.pack b)
-- | Read a multi-part message from a 'Handle'.
-- Fails on parse errors.
hGetMultipartBody :: String -- ^ Boundary
-> Handle
-> IO MultiPart
hGetMultipartBody b = liftM (parseMultipartBody b) . BS.hGetContents
parseBodyPart :: ByteString -> Maybe BodyPart
parseBodyPart s =
do
let (hdr,bdy) = splitAtEmptyLine s
hs <- parseM pHeaders "<input>" (BS.unpack hdr)
return $ BodyPart hs bdy
showMultipartBody :: String -> MultiPart -> ByteString
showMultipartBody b (MultiPart bs) =
unlinesCRLF $ foldr (\x xs -> d:showBodyPart x:xs) [c,BS.empty] bs
where d = BS.pack ("--" ++ b)
c = BS.pack ("--" ++ b ++ "--")
showBodyPart :: BodyPart -> ByteString
showBodyPart (BodyPart hs c) =
unlinesCRLF $ [BS.pack (n++": "++v) | (HeaderName n,v) <- hs] ++ [BS.empty,c]
--
-- * Splitting into multipart parts.
--
-- | Split a multipart message into the multipart parts.
splitParts :: ByteString -- ^ The boundary, without the initial dashes
-> ByteString
-> [ByteString]
splitParts b = spl . dropPreamble b
where
spl x = case splitAtBoundary b x of
Nothing -> []
Just (s1,d,s2) | isClose b d -> [s1]
| otherwise -> s1:spl s2
-- | Drop everything up to and including the first line starting
-- with the boundary.
dropPreamble :: ByteString -- ^ The boundary, without the initial dashes
-> ByteString
-> ByteString
dropPreamble b s | BS.null s = BS.empty
| isBoundary b s = dropLine s
| otherwise = dropPreamble b (dropLine s)
-- | Split a string at the first boundary line.
splitAtBoundary :: ByteString -- ^ The boundary, without the initial dashes
-> ByteString -- ^ String to split.
-> Maybe (ByteString,ByteString,ByteString)
-- ^ The part before the boundary, the boundary line,
-- and the part after the boundary line. The CRLF
-- before and the CRLF (if any) after the boundary line
-- are not included in any of the strings returned.
-- Returns 'Nothing' if there is no boundary.
splitAtBoundary b s = spl 0
where
spl i = case findCRLF (BS.drop i s) of
Nothing -> Nothing
Just (j,l) | isBoundary b s2 -> Just (s1,d,s3)
| otherwise -> spl (i+j+l)
where
s1 = BS.take (i+j) s
s2 = BS.drop (i+j+l) s
(d,s3) = splitAtCRLF s2
-- | Check whether a string starts with two dashes followed by
-- the given boundary string.
isBoundary :: ByteString -- ^ The boundary, without the initial dashes
-> ByteString
-> Bool
isBoundary b s = startsWithDashes s && b `BS.isPrefixOf` BS.drop 2 s
-- | Check whether a string for which 'isBoundary' returns true
-- has two dashes after the boudary string.
isClose :: ByteString -- ^ The boundary, without the initial dashes
-> ByteString
-> Bool
isClose b s = startsWithDashes (BS.drop (2+BS.length b) s)
-- | Checks whether a string starts with two dashes.
startsWithDashes :: ByteString -> Bool
startsWithDashes s = BS.pack "--" `BS.isPrefixOf` s
--
-- * RFC 2046 CRLF
--
crlf :: ByteString
crlf = BS.pack "\r\n"
unlinesCRLF :: [ByteString] -> ByteString
unlinesCRLF = BS.concat . intersperse crlf
-- | Drop everything up to and including the first CRLF.
dropLine :: ByteString -> ByteString
dropLine s = snd (splitAtCRLF s)
-- | Split a string at the first empty line. The CRLF (if any) before the
-- empty line is included in the first result. The CRLF after the
-- empty line is not included in the result.
-- If there is no empty line, the entire input is returned
-- as the first result.
splitAtEmptyLine :: ByteString -> (ByteString, ByteString)
splitAtEmptyLine s | startsWithCRLF s = (BS.empty, dropCRLF s)
| otherwise = spl 0
where
spl i = case findCRLF (BS.drop i s) of
Nothing -> (s, BS.empty)
Just (j,l) | startsWithCRLF s2 -> (s1, dropCRLF s2)
| otherwise -> spl (i+j+l)
where (s1,s2) = BS.splitAt (i+j+l) s
-- | Split a string at the first CRLF. The CRLF is not included
-- in any of the returned strings.
-- If there is no CRLF, the entire input is returned
-- as the first string.
splitAtCRLF :: ByteString -- ^ String to split.
-> (ByteString,ByteString)
splitAtCRLF s = case findCRLF s of
Nothing -> (s,BS.empty)
Just (i,l) -> (s1, BS.drop l s2)
where (s1,s2) = BS.splitAt i s
-- | Get the index and length of the first CRLF, if any.
findCRLF :: ByteString -- ^ String to split.
-> Maybe (Int64,Int64)
findCRLF s =
case findCRorLF s of
Nothing -> Nothing
Just j | BS.null (BS.drop (j+1) s) -> Just (j,1)
Just j -> case (BS.index s j, BS.index s (j+1)) of
('\n','\r') -> Just (j,2)
('\r','\n') -> Just (j,2)
_ -> Just (j,1)
findCRorLF :: ByteString -> Maybe Int64
findCRorLF s = BS.findIndex (\c -> c == '\n' || c == '\r') s
startsWithCRLF :: ByteString -> Bool
startsWithCRLF s = not (BS.null s) && (c == '\n' || c == '\r')
where c = BS.index s 0
-- | Drop an initial CRLF, if any. If the string is empty,
-- nothing is done. If the string does not start with CRLF,
-- the first character is dropped.
dropCRLF :: ByteString -> ByteString
dropCRLF s | BS.null s = BS.empty
| BS.null (BS.drop 1 s) = BS.empty
| c0 == '\n' && c1 == '\r' = BS.drop 2 s
| c0 == '\r' && c1 == '\n' = BS.drop 2 s
| otherwise = BS.drop 1 s
where c0 = BS.index s 0
c1 = BS.index s 1
| andersk/haskell-cgi | Network/CGI/Multipart.hs | bsd-3-clause | 7,530 | 0 | 15 | 2,163 | 1,870 | 997 | 873 | 123 | 5 |
module Language.Slambda.Read
( parseTerm
, readTerm
, parseProgram
, readProgram
) where
import Language.Slambda.Types
import Language.Slambda.Util
import Control.Monad (liftM, liftM2, when)
import Data.Char
import Data.List
import qualified Data.Map as Map
import Data.Map (Map)
import Text.ParserCombinators.ReadP
parseProgram :: String -> Maybe Program
parseProgram str =
case readP_to_S (do p <- program; eof; return p) str of
[(p, "")] -> Just p
[] -> Nothing
parses -> error $ foldr (.) id (showString "multiple parses:\n" : intersperse (showChar '\n') (map shows parses)) ""
readProgram :: ReadS Program
readProgram = readP_to_S program
parseTerm :: String -> Maybe Term
parseTerm str =
case readP_to_S (do t <- term; eof; return t) str of
[(t, "")] -> Just t
[] -> Nothing
parses -> error $ "multiple parses: " ++ show parses
readTerm :: ReadS Term
readTerm = readP_to_S term
spaces :: ReadP String
spaces = munch1 isSpace
specialChars :: [Char]
specialChars =
[ lambda
, primTag
, strictTag
, lambdaSep
, lparen
, rparen
, lbracket
, rbracket
, defSep
, commentStart
]
isVarInitChar, isVarRestChar :: Char -> Bool
isVarInitChar c = not (isDigit c) && isVarRestChar c && not (elem c [quote, bottom])
isVarRestChar c = not (isSpace c) && not (elem c specialChars)
var :: ReadP Var
var = do
first <- satisfy isVarInitChar
rest <- munch isVarRestChar
return $ Var (first:rest)
integer :: ReadP Integer
integer = do
ds <- munch1 isDigit
case reads ds of
[(n, "")] -> return n
[] -> pfail
_ -> error "multiple parses for integer"
quotedChar :: ReadP Char
quotedChar = do
char quote
c <- do
c <- get
case c of
'\\' -> do
c <- get
case c of
'n' -> return '\n'
't' -> return '\t'
'\'' -> return '\''
_ -> pfail
_ -> return c
char quote
return c
skipSC :: ReadP () -- spaces and comments
skipSC = do
do s <- look
skip s
where skip (c:s) | isSpace c = do _ <- get; skip s
| c == commentStart = do _ <- get; skipComment s
skip _ = do return ()
skipComment ('\n':s) = do _ <- get; skip s
skipComment (_:s) = do _ <- get; skipComment s
skipComment [] = do return ()
unitC :: ReadP Const
unitC = do
char lparen
skipSC
char rparen
return UnitC
bottomC :: ReadP Const
bottomC = do
char bottom
return BottomC
integerC :: ReadP Const
integerC =
liftM IntegerC integer
charC :: ReadP Const
charC =
liftM CharC quotedChar
constT :: ReadP Term
constT =
liftM ConstT $ choice [ bottomC
, unitC
, integerC
, charC
]
varT :: ReadP Term
varT = do
liftM VarT var
primT :: ReadP Term
primT = do
char primTag
name <- munch1 isVarRestChar
case Map.lookup name primsByName of
Just p -> return $ PrimT p
Nothing -> pfail
absT :: ReadP Term
absT = do
char lambda
skipSC
vs <- absVars
skipSC
char lambdaSep
skipSC
t <- term
return $ foldr (\(s, v) t -> AbsT s v t) t vs
where absVars = do
sv <- absVar
vs <- absVars'
return (sv:vs)
absVar = do
s <- option False (char strictTag >> skipSC >> return True)
v <- var
return (s, v)
absVars' = (spaces >> liftM2 (:) absVar absVars') <++ return []
letRecT :: ReadP Term
letRecT = do
between (char lbracket)
(char rbracket)
(do skipSC
ds <- definitions
skipSC
char lambdaSep
skipSC
t <- term
skipSC
return (LetRecT ds t))
nonAppT = do
choice [ constT
, varT
, primT
, absT
, letRecT
, between (char lparen >> skipSC)
(skipSC >> char rparen)
term
]
term = do
skipSC
t <- nonAppT
ts <- term'
return $ foldl1 AppT (t:ts)
where term' = (spaces >> liftM2 (:) nonAppT term') <++ return []
definition :: ReadP Def
definition = do
skipSC
v <- var
skipSC
string defineStr
t <- term
skipSC
return (v, t)
definitions :: ReadP [Def]
definitions = do
defs <- between emptyDefs
emptyDefs
(sepBy definition (char defSep >> emptyDefs))
when (not (null defs)) $ do
let vars = sort (map fst defs)
hasDup v [] = False
hasDup v (v':vs) = v == v' || hasDup v' vs
when (hasDup (head vars) (tail vars)) pfail
return defs
where emptyDefs = do
skipSpaces
optional (char defSep >> emptyDefs)
program :: ReadP Program
program = do
skipSC
ds <- definitions
skipSC
return ds
| pgavin/secdh | lib/Language/Slambda/Read.hs | bsd-3-clause | 4,937 | 0 | 18 | 1,704 | 1,813 | 882 | 931 | 191 | 5 |
module Util.JSON where
import Error
import Util.HandleIO
import qualified Data.Aeson as A
import qualified Data.ByteString.Lazy as BS
decodeFile :: A.FromJSON a => FilePath -> IO a
decodeFile f = io errMsg . errorDecode =<< BS.readFile f
where
errMsg = "couldn't parse file '" ++ f ++ "' to JSON"
errorDecode :: A.FromJSON a => BS.ByteString -> Error a
errorDecode = fromEither . A.eitherDecode
| kylcarte/wangtiles | src/Util/JSON.hs | bsd-3-clause | 405 | 0 | 8 | 73 | 125 | 68 | 57 | 10 | 1 |
data X
data Y
data Z
xz :: X -> Z
xz = undefined
yz :: Y -> Z
yz = undefined
xform :: (X, Y) -> (Z, Z)
xform (x, y) = (xz x, yz y) | pdmurray/haskell-book-ex | src/ch5/ch5.9.6.hs | bsd-3-clause | 134 | 0 | 6 | 40 | 85 | 49 | 36 | -1 | -1 |
{-# LANGUAGE
RankNTypes
, ScopedTypeVariables
#-}
module Linear.Grammar
( (.+.)
, (.*.)
, (.==.)
, (.<=.)
, (.>=.)
, module Linear.Grammar.Types.Inequalities
)where
import Linear.Grammar.Types.Syntax (LinAst (EAdd, ECoeff))
import Linear.Grammar.Types.Expressions (makeLinExpr)
import qualified Linear.Grammar.Types.Inequalities as Ineq
import Linear.Grammar.Types.Inequalities (standardForm)
-- | Addition operation
(.+.) :: LinAst k a -> LinAst k a -> LinAst k a
(.+.) = EAdd
infixl 6 .+.
-- | Multiplication operation
(.*.) :: LinAst k a -> a -> LinAst k a
(.*.) = ECoeff
infixl 5 .*.
(.==.) :: Eq a => Num a => Ord k => LinAst k a -> LinAst k a -> Ineq.IneqExpr k a a
x .==. y = Ineq.EquExpr (makeLinExpr x) (makeLinExpr y)
infixl 7 .==.
(.<=.) :: Eq a => Num a => Ord k => LinAst k a -> LinAst k a -> Ineq.IneqExpr k a a
x .<=. y = Ineq.LteExpr (makeLinExpr x) (makeLinExpr y)
infixl 7 .<=.
(.>=.) :: Eq a => Num a => Ord k => LinAst k a -> LinAst k a -> Ineq.IneqExpr k a a
(.>=.) = flip (.<=.)
infixl 7 .>=.
| athanclark/cassowary-haskell | src/Linear/Grammar.hs | bsd-3-clause | 1,050 | 0 | 11 | 217 | 427 | 238 | 189 | 29 | 1 |
module Onedrive.Internal.Response (json, lbs) where
import Control.Monad (void)
import Control.Monad.Catch (MonadThrow(throwM))
import Control.Monad.IO.Class (MonadIO(liftIO))
import Data.Aeson (FromJSON)
import Data.ByteString (empty)
import Data.ByteString.Lazy (ByteString)
import Network.HTTP.Conduit (HttpExceptionContent(StatusCodeException))
import Network.HTTP.Simple (HttpException(HttpExceptionRequest), Request, Response, httpJSONEither, httpLBS, getResponseStatus, getResponseBody)
import Network.HTTP.Types.Status (unauthorized401, ok200)
import Onedrive.Auth (authorizeRequest)
import Onedrive.Session (Session, tryRenewToken)
json :: (MonadThrow m, MonadIO m, FromJSON a) => Session -> Request -> m a
json session req =
doRequest True session req httpJSONEither processBody
where
processBody (Left e) = throwM e
processBody (Right res) = return res
lbs :: (MonadThrow m, MonadIO m) => Session -> Request -> m ByteString
lbs session req =
doRequest True session req httpLBS return
doRequest :: (MonadThrow m, MonadIO m) => Bool -> Session -> Request -> (Request -> m (Response b)) -> (b -> m a) -> m a
doRequest allowRenew session req getResponse processBody = do
authReq <- liftIO $ authorizeRequest session req
resp <- getResponse authReq
let
responseStatus =
getResponseStatus resp
throwException =
throwM $ HttpExceptionRequest req $ StatusCodeException (void resp) empty
if responseStatus == ok200
then
processBody $ getResponseBody resp
else
if responseStatus == unauthorized401 && allowRenew
then do
newAccessToken <- liftIO $ tryRenewToken session
case newAccessToken of
Nothing ->
throwException
Just _ ->
doRequest False session req getResponse processBody
else
throwException
| asvyazin/hs-onedrive | src/Onedrive/Internal/Response.hs | bsd-3-clause | 1,829 | 0 | 14 | 337 | 554 | 297 | 257 | 40 | 4 |
import Control.Concurrent
import Control.Monad
import Hypervisor.Debug
import System.CPUTime
import Data.Time.Format
main :: IO ()
main = showTime 32
showTime :: Int -> IO ()
showTime 0 = return ()
showTime x = do
cputime <- getCPUTime
writeDebugConsole $ "[" ++ show x ++ "] Now is: \n"
writeDebugConsole $ " " ++ show cputime ++ "\n"
threadDelay 1000000 -- ~1sec
showTime (x - 1)
| GaloisInc/HaLVM | examples/Core/SimplerTime/SimplerTime.hs | bsd-3-clause | 397 | 0 | 9 | 79 | 143 | 70 | 73 | 15 | 1 |
import Text.Printf
import Vec3
main :: IO ()
main = do
let r0 = Vec3 150e09 0.0 0.0
let r1 = Vec3 0.0 0.0 0.0
let v0 = Vec3 0.0 29.658e03 0.0
let v1 = Vec3 0.0 0.0 0.0
let m0 = 5.98e24
let m1 = 1.98855e30
let n = 720 :: Int
let dt = (365.0*24.0*60.0*60.0) / fromIntegral n :: Double
printf "%12s, %12s, %12s\n" "x" "y" "z"
print r0
printOrbit $ orbit n dt r0 v0 m0 r1 v1 m1
g = 6.67e-11
grav :: Vec3 -> Vec3 -> Double -> Double -> Vec3
grav r1 r2 m1 m2 =
let r = vecSub r1 r2;
rm = vecMod r;
c = ((-1) * g * m1 * m2) / rm^3
in vecMuls c r
accel :: Double -> Vec3 -> Vec3
accel m f = vecMuls (1/m) f
applydt :: Double -> Vec3 -> Vec3 -> Vec3
applydt dt v c =
let dv = vecMuls dt c :: Vec3
in vecAdd v dv
orbit :: (Integral i) => i -> Double -> Vec3 -> Vec3 -> Double -> Vec3 -> Vec3 -> Double -> [(Vec3,Vec3)]
orbit n dt r0 v0 m0 r1 v1 m1
| n == 0 = []
| otherwise =
let f0 = grav r0 r1 m0 m1;
f1 = vecInv(f0);
a0 = accel m0 f0;
a1 = accel m1 f1;
nv0 = applydt dt v0 a0;
nv1 = applydt dt v1 a1;
nr0 = applydt dt r0 nv0;
nr1 = applydt dt r1 nv1
in (nr0, nr1) : orbit (n-1) dt nr0 nv0 m0 nr1 nv1 m1
printOrbit :: [(Vec3,Vec3)] -> IO ()
printOrbit [] = putStr ""
printOrbit l = do
print $ fst $ head l;
printOrbit $ tail l
| jguillaumes/orbit-examples | haskell/orbits.hs | bsd-3-clause | 1,340 | 0 | 16 | 411 | 693 | 346 | 347 | 46 | 1 |
{-# LANGUAGE RecursiveDo, RankNTypes, NamedFieldPuns, RecordWildCards #-}
module Distribution.Server.Features.RecentPackages (
RecentPackagesFeature(..),
RecentPackagesResource(..),
initRecentPackagesFeature,
) where
import Distribution.Server.Framework
import Distribution.Server.Features.Core
import Distribution.Server.Features.Users
import Distribution.Server.Features.PackageContents (PackageContentsFeature(..))
import Distribution.Server.Packages.Types
import Distribution.Server.Packages.Render
import qualified Distribution.Server.Packages.PackageIndex as PackageIndex
import qualified Distribution.Server.Framework.ResponseContentTypes as Resource
import Data.Time.Clock (getCurrentTime)
import Data.List (sortBy)
import Data.Ord (comparing)
-- the goal is to have the HTML modules import /this/ one, not the other way around
import qualified Distribution.Server.Pages.Recent as Pages
data RecentPackagesFeature = RecentPackagesFeature {
recentPackagesFeatureInterface :: HackageFeature,
recentPackagesResource :: RecentPackagesResource,
-- necessary information for the representation of a package resource
packageRender :: PkgInfo -> IO PackageRender
-- other informational hooks: perhaps a simplified CondTree so a browser script can dynamically change the package page based on flags
}
instance IsHackageFeature RecentPackagesFeature where
getFeatureInterface = recentPackagesFeatureInterface
data RecentPackagesResource = RecentPackagesResource {
-- replace with log resource
recentPackages :: Resource,
recentRevisions :: Resource
}
initRecentPackagesFeature :: ServerEnv
-> IO (UserFeature
-> CoreFeature
-> PackageContentsFeature
-> IO RecentPackagesFeature)
initRecentPackagesFeature env@ServerEnv{serverCacheDelay, serverVerbosity = verbosity} = do
return $ \user core@CoreFeature{packageChangeHook} packageContents -> do
-- recent caches. in lieu of an ActionLog
-- TODO: perhaps a hook, recentUpdated :: HookList ([PkgInfo] -> IO ())
rec let (feature, updateRecentCache) =
recentPackagesFeature env user core packageContents
cacheRecent
cacheRecent <- newAsyncCacheNF updateRecentCache
defaultAsyncCachePolicy {
asyncCacheName = "recent uploads and revisions (html,rss,html,rss)",
asyncCacheUpdateDelay = serverCacheDelay,
asyncCacheSyncInit = False,
asyncCacheLogVerbosity = verbosity
}
registerHookJust packageChangeHook isPackageChangeAny $ \_ ->
prodAsyncCache cacheRecent
return feature
recentPackagesFeature :: ServerEnv
-> UserFeature
-> CoreFeature
-> PackageContentsFeature
-> AsyncCache (Response, Response, Response, Response)
-> (RecentPackagesFeature, IO (Response, Response, Response, Response))
recentPackagesFeature env
UserFeature{..}
CoreFeature{..}
PackageContentsFeature{packageChangeLog}
cacheRecent
= (RecentPackagesFeature{..}, updateRecentCache)
where
recentPackagesFeatureInterface = (emptyHackageFeature "recentPackages") {
featureResources = map ($ recentPackagesResource) [recentPackages, recentRevisions]
, featureState = []
, featureCaches = [
CacheComponent {
cacheDesc = "recents packages and revisions page (html, rss, html, rss)",
getCacheMemSize = memSize <$> readAsyncCache cacheRecent
}
]
, featurePostInit = syncAsyncCache cacheRecent
}
recentPackagesResource = RecentPackagesResource {
recentPackages = (extendResourcePath "/recent.:format" (corePackagesPage coreResource)) {
resourceGet = [
("html", const $ liftM (\(x,_,_,_) -> x) $ readAsyncCache cacheRecent)
, ("rss", const $ liftM (\(_,x,_,_) -> x) $ readAsyncCache cacheRecent)
]
},
recentRevisions = (extendResourcePath "/recent/revisions.:format" (corePackagesPage coreResource)) {
resourceGet = [
("html", const $ liftM (\(_,_,x,_) -> x) $ readAsyncCache cacheRecent)
, ("rss", const $ liftM (\(_,_,_,x) -> x) $ readAsyncCache cacheRecent)
]
}
}
packageRender pkg = do
users <- queryGetUserDb
changeLog <- packageChangeLog pkg
let showChangeLogLink = case changeLog of Right _ -> True ; _ -> False
doPackageRender users pkg showChangeLogLink
updateRecentCache = do
-- TODO: move the html version to the HTML feature
pkgIndex <- queryGetPackageIndex
users <- queryGetUserDb
now <- getCurrentTime
let recentChanges = sortBy (flip $ comparing pkgOriginalUploadTime)
(PackageIndex.allPackages pkgIndex)
xmlRepresentation = toResponse $ Resource.XHtml $ Pages.recentPage users recentChanges
rssRepresentation = toResponse $ Pages.recentFeed users (serverBaseURI env) now recentChanges
recentRevisions = sortBy (flip $ comparing revisionTime) .
filter isRevised $ (PackageIndex.allPackages pkgIndex)
revisionTime pkgInfo = pkgLatestUploadTime pkgInfo
isRevised pkgInfo = pkgNumRevisions pkgInfo > 1
xmlRevisions = toResponse $ Resource.XHtml $ Pages.revisionsPage users recentRevisions
rssRevisions = toResponse $ Pages.recentRevisionsFeed users (serverBaseURI env) now recentRevisions
return (xmlRepresentation, rssRepresentation, xmlRevisions, rssRevisions)
{-
data SimpleCondTree = SimpleCondNode [Dependency] [(Condition ConfVar, SimpleCondTree, SimpleCondTree)]
| SimpleCondLeaf
deriving (Show, Eq)
doMakeCondTree :: GenericPackageDescription -> [(String, SimpleCondTree)]
doMakeCondTree desc = map (\lib -> ("library", makeCondTree lib)) (maybeToList $ condLibrary desc)
++ map (\(exec, tree) -> (exec, makeCondTree tree)) (condExecutables desc)
where
makeCondTree (CondNode _ deps comps) = case deps of
[] -> SimpleCondLeaf
_ -> SimpleCondNode deps $ map makeCondComponents comps
makeCondComponents (cond, tree, mtree) = (cond, makeCondTree tree, maybe SimpleCondLeaf makeCondTree mtree)
-}
| snoyberg/hackage-server | Distribution/Server/Features/RecentPackages.hs | bsd-3-clause | 6,753 | 0 | 18 | 1,830 | 1,111 | 623 | 488 | 94 | 2 |
-------------------------------------------------------------------------------
{- LANGUAGE CPP #-}
#if OVERLOADED_STRINGS
{-# LANGUAGE FlexibleInstances #-}
{- LANGUAGE TypeSynonymInstances #-}
#endif
#define DO_TRACE 0
#if ABBREV_WN_AND_TN_CONCRETE_SYNTAX_TO_NUMBER_ALONE__SAFE_ONLY_TO_DEPTH_19 && ABBREV_WN_AND_TN_CONCRETE_SYNTAX_TO_SINGLE_DIGIT__CAN_ONLY_EXPRESS_DOWN_TO_DEPTH_9
#error Please set at most one of the flags ABBREV_WN_AND_TN_CONCRETE_SYNTAX_TO_NUMBER_ALONE__SAFE_ONLY_TO_DEPTH_19 and ABBREV_WN_AND_TN_CONCRETE_SYNTAX_TO_SINGLE_DIGIT__CAN_ONLY_EXPRESS_DOWN_TO_DEPTH_9 to True.
#endif
#define WARN_IGNORED_SUBPATTERNS 1
#define NEVER_IGNORE_SUBPATTERNS 0
-- Formerly DEBUG_WITH_DEEPSEQ_GENERICS.
-- Now also needed to force issuance of all compilePat warnings
-- (so not strictly a debugging flag anymore).
-- [Except it didn't work...]
--- #define NFDATA_INSTANCE_PATTERN 0 -- now a .cabal flag
#define DO_DERIVE_DATA_AND_TYPEABLE 0
#define DO_DERIVE_ONLY_TYPEABLE 1
#if DO_DERIVE_ONLY_TYPEABLE && DO_DERIVE_DATA_AND_TYPEABLE
#undef DO_DERIVE_ONLY_TYPEABLE
#warning DO_DERIVE_ONLY_TYPEABLE forced 0, due to DO_DERIVE_DATA_AND_TYPEABLE being 1.
#define DO_DERIVE_ONLY_TYPEABLE 0
#endif
-- Now specified via --flag=[-]USE_WWW_DEEPSEQ
--- #define USE_WW_DEEPSEQ 1
-------------------------------------------------------------------------------
#if DO_DERIVE_DATA_AND_TYPEABLE
{-# LANGUAGE DeriveDataTypeable #-}
#endif
-- XXX Only needed for something in Blah.hs.
-- Check into it, and see if can't get rid of the need
-- for Typeable instances in here!
#if DO_DERIVE_ONLY_TYPEABLE
{-# LANGUAGE DeriveDataTypeable #-}
#endif
#if NFDATA_INSTANCE_PATTERN
-- For testing only (controlling trace interleaving):
{-# LANGUAGE DeriveGeneric #-}
#endif
{- LANGUAGE DeriveFunctor #-}
-------------------------------------------------------------------------------
-- |
-- Module : Control.DeepSeq.Bounded.Compile
-- Copyright : Andrew G. Seniuk 2014-2015
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : Andrew Seniuk <[email protected]>
-- Stability : provisional
-- Portability : portable (H98)
--
-------------------------------------------------------------------------------
module Control.DeepSeq.Bounded.Compile
(
compilePat
, showPat
--- compilePat' ,
)
where
-------------------------------------------------------------------------------
import Control.DeepSeq.Bounded.Pattern
import Control.DeepSeq.Bounded.PatUtil ( liftPats )
import Data.Char ( isSpace )
import Data.Char ( isLower )
import Data.Char ( ord )
import Data.Char ( isDigit )
import Data.List ( intercalate )
import Data.List ( sort )
import Data.Maybe ( isNothing, fromJust )
import Data.Maybe ( isJust )
#if 0
#if DO_DERIVE_DATA_AND_TYPEABLE
import Data.Data ( Data )
import Data.Typeable ( Typeable )
#elif DO_DERIVE_ONLY_TYPEABLE
import Data.Typeable ( Typeable )
#endif
#if USE_WW_DEEPSEQ
import Control.DeepSeq ( NFData )
#endif
#endif
import Debug.Trace ( trace )
-- The only uses of force in this module are for debugging purposes
-- (including trying to get messages to be displayed in a timely
-- manner, although that problem has not been completely solved).
import Control.DeepSeq ( force )
#if 0
#if NFDATA_INSTANCE_PATTERN
-- for helping trace debugging
import qualified Control.DeepSeq.Generics as DSG
import qualified GHC.Generics as GHC ( Generic )
#endif
#endif
-- Want it in Pattern with other Pattern instances, but it
-- makes a cyclical dependency and we all know that is more
-- trouble than it's worth in GHC (and other Haskell compilers
-- I've known)...
#if OVERLOADED_STRINGS
import GHC.Exts( IsString(..) )
#endif
-------------------------------------------------------------------------------
-- (See note at import of IsString; this instance should be in Pattern.hs.)
#if OVERLOADED_STRINGS
--instance IsString (Rose PatNode) where
instance IsString Pattern where
fromString = compilePat -- aahhhh..... (20150204)
#endif
-------------------------------------------------------------------------------
#if DO_TRACE
mytrace = trace
#else
mytrace _ = id
#endif
-- XXX This is still lacks support for the two condensed grammars:
-- ABBREV_WN_AND_TN_CONCRETE_SYNTAX_TO_NUMBER_ALONE__SAFE_ONLY_TO_DEPTH_19
-- ABBREV_WN_AND_TN_CONCRETE_SYNTAX_TO_SINGLE_DIGIT__CAN_ONLY_EXPRESS_DOWN_TO_DEPTH_9
compilePat' :: String -> Pattern
compilePat' s
| not $ null s' = error $ "\ncompilePat: input rejected: "
++ s
++ if isNothing mmsg then "" else "\nParser message: "
++ fromJust mmsg
++ "\nPatterns parsed so far: ["
++ intercalate ", " (map show pats)
++ "]"
| otherwise = case pats of
[] -> error $ "\ncompilePat: "
++ if null s then "empty input" else "vacuous input"
[pat] -> setPatternPatNodeUniqueIDs 0 pat
pats -> setPatternPatNodeUniqueIDs 0 $ liftPats pats
where (pats, mmsg, s') = compilePats s []
-- String in last component of result is unconsumed input.
compilePats :: String -> [Pattern] -> ([Pattern], Maybe String, String)
compilePats s acc
| null s_ltrim = (reverse acc, Nothing, s_ltrim)
| otherwise = case cpat s of
(Left "", s') -> (reverse acc, Nothing, s')
-- (Left "", s') -> compilePats s' acc
(Left msg, s') -> (reverse acc, Just msg, s')
(Right pat, s') -> compilePats s' (pat:acc)
where s_ltrim = dropWhile isSpace s
-- XXX Don't forget to do a post-pass to change W* nodes
-- to corresponding T* nodes, when : modifier was present!
-- Oops, guess I did it here; but the original idea would
-- be less cloning...
cpat :: String -> (Either String Pattern, String)
--cpat _ | trace "J-1: " $ False = undefined
cpat [] = (Left "unexpected end of input", [])
--cpat s | trace ("J-2: "++show s) $ False = undefined
cpat s
| null s'' = error "\ncompilePat: type constraint must precede a pattern node"
| isW = case c of
'.' -> (Right $ Node (WI as) [], cs)
'!' -> (Right $ Node (WS as) [], cs)
'*' -> case parseInt cs [] of
(Nothing, cs'') -> (Right $ Node (WW as ) [], cs'')
(Just is, cs'') -> (Right $ Node (WN $ asn is) [], cs'')
#if USE_CURLY_BRACE_INSTEAD_OF_PAREN_FOR_SUBPATTERNS
'{' ->
#else
'(' ->
#endif
if isNothing mmsg_subpats
then (Right $ Node (WR as) subpats, cs_subpats)
else (Left $ fromJust mmsg_subpats, cs_subpats)
#if USE_CURLY_BRACE_INSTEAD_OF_PAREN_FOR_SUBPATTERNS
'}' ->
#else
')' ->
#endif
(Left "", cs)
c -> error $ "\ncompilePat: unexpected character '" ++ [c] ++ "'"
| otherwise = case c of
'.' -> (Right $ Node (TI as) [], cs)
-- '!' -> (Right $ Node (TS as) [], cs)
'*' -> case parseInt cs [] of
(Nothing, cs'') -> (Right $ Node (TW as ) [], cs'')
(Just is, cs'') -> (Right $ Node (TN $ asn is) [], cs'')
#if USE_CURLY_BRACE_INSTEAD_OF_PAREN_FOR_SUBPATTERNS
'{' ->
#else
'(' ->
#endif
if isNothing mmsg_subpats
then (Right $ Node (TR as) subpats, cs_subpats)
else (Left $ fromJust mmsg_subpats, cs_subpats)
#if USE_CURLY_BRACE_INSTEAD_OF_PAREN_FOR_SUBPATTERNS
'}' ->
#else
')' ->
#endif
(Left "", cs)
c -> error $ "\ncompilePat: unexpected character '" ++ [c] ++ "'"
where
s' = dropWhile isSpace s
(c:cs) = s''
(as_mods, mmsg_mods, s'') = cmods s' -- collect any prefix modifiers
as = case mmsg_mods of
Nothing -> as_mods
Just msg -> error $ "\ncompilePat: " ++ msg
asn is = as { depth = read is :: Int }
isW = not $ doConstrainType as
(subpats, mmsg_subpats, cs_subpats) = compilePats cs []
-- Accumulate any prefix modifiers into an empty PatNodeAttrs structure.
cmods :: String -> (PatNodeAttrs, Maybe String, String)
cmods s = cmods' s emptyPatNodeAttrs
cmods' :: String -> PatNodeAttrs -> (PatNodeAttrs, Maybe String, String)
cmods' [] as = (as, Nothing, [])
--cmods' [] as = (as, Just "cmods': unexpected end of input", [])
cmods' s as = case c of
':' -> cmods' cs_types as_types
'@' -> cmods' cs_delay as_delay
#if USE_PAR_PATNODE
'=' -> cmods' cs_par as_par
#endif
#if USE_PSEQ_PATNODE
'>' -> cmods' cs_pseq as_pseq
#endif
#if USE_TRACE_PATNODE
'+' -> cmods' cs_trace as_trace
#endif
#if USE_PING_PATNODE
'^' -> cmods' cs_ping as_ping
#endif
#if USE_DIE_PATNODE
'/' -> cmods' cs_die as_die
#endif
#if USE_TIMING_PATNODE
'%' -> cmods' cs_timing as_timing
#endif
_ -> (as, Nothing, s)
where
s'@(c:cs) = dropWhile isSpace s
( cs_types , as_types ) = parse_type_constraints cs as
( cs_delay , as_delay ) = parse_delay cs as
#if USE_PAR_PATNODE
( cs_par , as_par ) = ( cs, as { doSpark = True } )
#endif
#if USE_PSEQ_PATNODE
( cs_pseq , as_pseq ) = parse_pseq cs as
#endif
#if USE_TRACE_PATNODE
( cs_trace , as_trace ) = ( cs, as { doTrace = True } )
#endif
#if USE_PING_PATNODE
( cs_ping , as_ping ) = ( cs, as { doPing = True } )
#endif
#if USE_DIE_PATNODE
( cs_die , as_die ) = ( cs, as { doDie = True } )
#endif
#if USE_TIMING_PATNODE
( cs_timing , as_timing ) = ( cs, as { doTiming = True } )
#endif
parse_type_constraints :: String -> PatNodeAttrs -> (String, PatNodeAttrs)
parse_type_constraints s'' as
--- | doConstrainType as = trace "\nwarning: type constraints (:...:) mod given multiple times to a single node, so aggregating type lists." $ (s', as')
| otherwise = (s', as')
where
s = dropWhile isSpace s''
as' = as { doConstrainType = True
, typeConstraints = typeConstraints as ++ tys }
(tys, s') = f s "" []
-- Take up to the next ';', ':', or '\\' and deal.
f :: String -> String -> [String] -> ([String],String)
f s'' tyacc tysacc
| null s' = error "\ncompilePat: type constraint list not ':'-terminated"
| '\\' == c = if null cs
then f cs (c:tyacc) tysacc
else if ':' == head cs -- note ty is already reversed
then f (tail cs) ((':':'\\':ty) ++ tyacc) tysacc
else f cs (('\\':ty) ++ tyacc) tysacc
| ':' == c = ( reverse $ (reverse $ tyacc ++ ty) : tysacc , dropWhile isSpace cs )
-- otherwise ';' == c
| otherwise = f cs "" $ (reverse $ tyacc ++ ty) : tysacc
where
s = dropWhile isSpace s''
(c:cs) = s'
(ty',s') = span (\c->c/=';'&&c/=':'&&c/='\\') s
ty = dropWhile isSpace $ reverse ty' -- yeah yeah
parse_delay :: String -> PatNodeAttrs -> (String, PatNodeAttrs)
parse_delay [] as = error "\nparse_delay: unexpected end of input"
parse_delay s'' as
--- | doDelay as = error "\ncompilePat: delay (@) modifier given multiple times to a single node"
--- | doDelay as = trace "\nwarning: delay (@) mod given multiple times to a single node, so summing." $ (s', as')
| isNothing mis = error $ "\nparse_delay: expected a digit 1-9 (not '" ++ [head s] ++ "')"
| otherwise = (s', as')
where
s = dropWhile isSpace s''
as' = as { doDelay = True -- (convenient to set both here)
, delayus = delayus as + i }
(mis, s') = parseInt s []
is = fromJust mis
i = read is :: Int
#if USE_PSEQ_PATNODE
parse_pseq :: String -> PatNodeAttrs -> (String, PatNodeAttrs)
parse_pseq s'' as
| doPseq as = error "\ncompilePat: pseq (>) modifier given multiple times to a single node"
| not ok = error "\ncompilePat: pseq permutation must cover an initial segment of abc..yz"
-- No harm in allowing it; as for testing arity mismatch, that is not
-- in the parser's purview (at least at this time). It is easily done
-- as a post-parsing check.
--- | null perm = error "\ncompilePat: empty pseq permutation"
| otherwise = (s', as')
where
s = dropWhile isSpace s''
as' = as { doPseq = True -- (convenient to set both here)
, pseqPerm = Just $ map (\c -> ord c - ord 'a') perm }
(perm, s') = span isLower s
ok = sort perm == take (length perm) ['a'..'z']
#endif
-------------------------------------------------------------------------------
-- XXX Doing this to ensure issuance of all warning messages
-- pertaining to the pattern to be compiled!
-- Which isn't quite working?!?.... [Never did resolve this.]
compilePat :: String -> Pattern
#if NFDATA_INSTANCE_PATTERN
compilePat s = force $ compilePat_ s
--compilePat s = let pat = force $! compilePat_ s in trace (show pat) $! pat
--compilePat s = let pat = force $ compilePat_ s in trace (show pat) $! pat
--compilePat s = let !pat = force $ compilePat_ s in trace (show pat) $ pat
--compilePat s = let pat = force $ compilePat_ s in trace (show pat) $ pat
#else
compilePat = compilePat_
#endif
compilePat_ :: String -> Pattern
compilePat_ str = compilePat' str
-------------------------------------------------------------------------------
-- | Inverse of 'compilePat'.
--
-- @showPat . compilePat patstring = patstring@
--
-- (up to optional whitespace, and canonical ordering of any attributes),
-- provided that @compilePat patstring@ succeeds.
{--}
-- /(And, only up to subpatterns elided from # ('WI' or 'TI') or from * ('WW', 'WN', 'TW', or 'TN') nodes, in case these are still accepted by the parser!)/
showPat :: Pattern -> String
showPat (Node pas chs)
| doDelay as = "@" ++ show (delayus as)
++ let as' = as { doDelay = False }
in showPat (Node (setPatNodeAttrs pas as') chs)
#if USE_PAR_PATNODE
| doSpark as = "=" ++ let as' = as { doSpark = False }
in showPat (Node (setPatNodeAttrs pas as') chs)
#endif
#if USE_PSEQ_PATNODE
| doPseq as = ">" ++ showPerm (pseqPerm as)
++ let as' = as { doPseq = False }
in showPat (Node (setPatNodeAttrs pas as') chs)
#endif
#if USE_TRACE_PATNODE
| doTrace as = "+" ++ let as' = as { doTrace = False }
in showPat (Node (setPatNodeAttrs pas as') chs)
#endif
#if USE_PING_PATNODE
| doPing as = "^" ++ let as' = as { doPing = False }
in showPat (Node (setPatNodeAttrs pas as') chs)
#endif
#if USE_DIE_PATNODE
| doDie as = "/" ++ let as' = as { doDie = False }
in showPat (Node (setPatNodeAttrs pas as') chs)
#endif
#if USE_TIMING_PATNODE
| doTiming as = "%" ++ let as' = as { doTiming = False }
in showPat (Node (setPatNodeAttrs pas as') chs)
#endif
| doConstrainType as
=
-- trace "showPat-doConstraintType HERE!" $
":" ++ treps_str
++ let as' = as { doConstrainType = False }
in showPat (Node (setPatNodeAttrs pas as') chs)
#if ABBREV_WN_AND_TN_CONCRETE_SYNTAX_TO_SINGLE_DIGIT__CAN_ONLY_EXPRESS_DOWN_TO_DEPTH_9
| WI{} <- pas = "0" ++ descend chs
#else
| WI{} <- pas = "." ++ descend chs
#endif
| WR{} <- pas = "" ++ descend chs ++ perhapsEmptySubpatterns
#if ABBREV_WN_AND_TN_CONCRETE_SYNTAX_TO_SINGLE_DIGIT__CAN_ONLY_EXPRESS_DOWN_TO_DEPTH_9
| WS{} <- pas = "1" ++ descend chs
#else
| WS{} <- pas = "!" ++ descend chs
#endif
#if ABBREV_WN_AND_TN_CONCRETE_SYNTAX_TO_NUMBER_ALONE__SAFE_ONLY_TO_DEPTH_19 || ABBREV_WN_AND_TN_CONCRETE_SYNTAX_TO_SINGLE_DIGIT__CAN_ONLY_EXPRESS_DOWN_TO_DEPTH_9
| WN{} <- pas = show n ++ descend chs
#else
| WN{} <- pas = "*" ++ show n ++ descend chs
#endif
#if USE_WW_DEEPSEQ
| WW{} <- pas = "*" ++ descend chs
#endif
#if ABBREV_WN_AND_TN_CONCRETE_SYNTAX_TO_SINGLE_DIGIT__CAN_ONLY_EXPRESS_DOWN_TO_DEPTH_9
| TI{} <- pas = "0" ++ descend chs
#else
| TI{} <- pas = "." ++ descend chs
#endif
| TR{} <- pas = "" ++ descend chs ++ perhapsEmptySubpatterns
--- #if ABBREV_WN_AND_TN_CONCRETE_SYNTAX_TO_SINGLE_DIGIT__CAN_ONLY_EXPRESS_DOWN_TO_DEPTH_9
--- | TS{} <- pas = "1" ++ descend chs
--- #else
--- | TS{} <- pas = "!" ++ descend chs
--- #endif
#if ABBREV_WN_AND_TN_CONCRETE_SYNTAX_TO_NUMBER_ALONE__SAFE_ONLY_TO_DEPTH_19 || ABBREV_WN_AND_TN_CONCRETE_SYNTAX_TO_SINGLE_DIGIT__CAN_ONLY_EXPRESS_DOWN_TO_DEPTH_9
| TN{} <- pas = show n ++ descend chs
#else
| TN{} <- pas = "*" ++ show n ++ descend chs
#endif
#if USE_WW_DEEPSEQ
| TW{} <- pas = "*" ++ descend chs
#endif
where
#if USE_CURLY_BRACE_INSTEAD_OF_PAREN_FOR_SUBPATTERNS
emptySubpatterns = "{}"
#else
emptySubpatterns = "()"
#endif
perhapsEmptySubpatterns = if null chs then emptySubpatterns else ""
as = getPatNodeAttrs pas
n = depth as
treps = typeConstraints as
treps_str = intercalate ";" treps ++ ":"
descend :: [Pattern] -> String
descend chs
| null chs = ""
#if USE_CURLY_BRACE_INSTEAD_OF_PAREN_FOR_SUBPATTERNS
| otherwise = "{" ++ concatMap showPat chs ++ "}"
#else
| otherwise = "(" ++ concatMap showPat chs ++ ")"
#endif
-------------------------------------------------------------------------------
parseInt :: String -> String -> ( Maybe String, String )
parseInt [] acc = ( if null acc then Nothing else Just acc , "" )
parseInt s@(c:cs) acc
-- These should be safe cutoffs without having to worry about exact figures.
--- DEPTH_USES_INT64 isn't implemented yet, this is just a note
--- for future consideration. (Should be in NFDataN if anywhere...).
--- I'm not ready to make this sweeping change yet.
--- #if DEPTH_USES_INT64
--- | length acc > 19 = error $ "compilePat: * followed by too many (>19) digits"
--- #else
| length acc > 9 = error $ "compilePat: * followed by too many (>9) digits"
--- #endif
| isDigit c = parseInt cs (acc++[c])
| otherwise = ( if null acc then Nothing else Just acc , s )
-------------------------------------------------------------------------------
-- | Using String instead of TypeRep since I wasn't sure
-- how to avoid mandatory recursion to complete the latter.
-- (Probably it can be done -- ':~:' perhaps -- but I was
-- unsure and this is working for the moment.)
compileTypeReps :: String -> ([String], String)
--compileTypeReps :: String -> ([TypeRep], String)
compileTypeReps cs = (treps,cs')
where
(tnames, cs') = parseTyNames cs
parseTyNames :: String -> ([String], String)
parseTyNames s = (sps', s')
where
sps' = map (dropWhile pstop) sps
-- !_ = trace ("(sps,s') = " ++ show (sps,s')) ()
(sps,s') = splitPred psplit pstop s
-- (sps,s') = splitPred p s
pstop x = x == '{' || x == '}'
-- pstop x = x == '{'
psplit x = x == ' ' || pstop x
-- p x = x == ' ' || x == '{'
-- p x = not $ isAlphaNum x || x == '_' || x == '\''
#if 1
-- XXX In consideration of the recursion problem with mkTyConApp below,
-- try to use typeOf instead -- but, this won't work! Because we are
-- starting with a String encoding the ...
-- ... or will it? We have to compare two strings; one comes from
-- the user-supplied pattern string we're parsing; the other? We
-- are not "comparing equality" here, it will be done later; we're
-- only compiling a pattern... So if the treps remain strings
-- in a Pattern, until we're ready to make comparisons; it's
-- inefficient unfortunately, but I feel this will work.
-- More detail: B/c when it comes time to match the pattern,
-- you DO have a concrete value (of some type); it is THEN that
-- you apply (show . typeRepTyCon . typeOf) to it, and then
-- make your Eq String comparison. [This can be optimised later;
-- I'm concerned now with a proof-of-concept, without TH.]
treps = tnames
#else
treps = map mktrep tnames
-- XXX You need the recursion for (==) to work; that may not mean
-- we can't use it, but will need some form of pattern-matching,
-- as full equality is going to be disfunctional. (B/c user would
-- have to specify the fully-recursive pattern [when they want to
-- use wildcards or stop nodes down there] -- totally ridiculous.)
-- This could be what :~: is for? (It's recursive, but you perhaps
-- can use in patterns without going full depth?)
-- mkTyConApp (mkTyCon3 "base" "Data.Either" "Either") [typeRep (Proxy::Proxy Bool), typeRep (Proxy::Proxy Int)] == typeRep (Proxy :: Proxy (Either Bool Int))
mktrep :: String -> TypeRep
mktrep tname = trep
where
tcon = mkTyCon3 "" "" tname
trep = mkTyConApp tcon []
--mkTyCon3 :: 3xString -> TypeCon
--mkTyConApp :: TyCon -> [TypeRep] -> TypeRep
#endif
-------------------------------------------------------------------------------
-- Split on the psplit predicate, stop consuming the list
-- on the pstop predicate.
splitPred :: (a -> Bool) -> (a -> Bool) -> [a] -> ([[a]], [a])
splitPred psplit pstop list = splitPred' psplit pstop list []
splitPred' :: (a -> Bool) -> (a -> Bool) -> [a] -> [[a]] -> ([[a]], [a])
splitPred' psplit pstop list acc
| null first = {-trace "-1-" $-} (acc, rest)
| null rest = {-trace "-2-" $-} (acc', []) -- or (acc, rest), obv.
| pstop h = {-trace "-3-" $-} (acc', rest)
| otherwise = {-trace "-4-" $-} splitPred' psplit pstop t acc'
where
(first,rest) = break psplit list
(h:t) = rest
acc' = acc ++ [first]
-------------------------------------------------------------------------------
| phunehehe/deepseq-bounded | src/Control/DeepSeq/Bounded/Compile.hs | bsd-3-clause | 21,725 | 0 | 17 | 5,148 | 4,099 | 2,252 | 1,847 | 179 | 15 |
module Repository where
import Common (PackageName,VersionNumber,SourceRepository)
import Data.Version (showVersion,Version(Version))
import Distribution.Hackage.DB (readHackage')
import System.Directory (
doesFileExist,createDirectoryIfMissing,doesDirectoryExist)
import System.Cmd (rawSystem)
import Control.Monad (when,forM,void)
import Data.Map (Map)
import qualified Data.Map as Map (
map,keys,filterWithKey,toList,fromList,union)
packagesDigest :: [PackageName]
packagesDigest = packagesThatMightComeWithGHC
smallPackageSelection :: [PackageName]
smallPackageSelection = ["base","ghc-prim","integer","rts","integer-simple"]
packagesThatMightComeWithGHC :: [PackageName]
packagesThatMightComeWithGHC = smallPackageSelection ++ [
"array","bytestring","Cabal","containers","deepseq","directory","filepath",
"haskell2010","haskell98","hpc","old-locale","old-time","pretty","process",
"syb","template-haskell","time","unix","Win32"]
packagesThatMightBeInThePlatform :: [PackageName]
packagesThatMightBeInThePlatform = packagesThatMightComeWithGHC ++ [
"async","attoparsec","case-insensitive","cgi","fgl","GLUT","GLURaw",
"hashable","haskell-src","html","HTTP","HUnit","mtl","network","OpenGL",
"OpenGLRaw","parallel","parsec","QuickCheck","random","regex-base",
"regex-compat","regex-posix","split","stm","syb","text","transformers",
"unordered-containers","vector","xhtml","zlib","cabal-install","alex","happy","haddock"]
type Index = Map PackageName [VersionNumber]
loadRepository :: IO SourceRepository
loadRepository =
availablePackagesOnHackage >>=
return . pruneIndex packagesDigest >>=
return . Map.union packagesNotOnHackage >>=
getPackages
availablePackagesOnHackage :: IO Index
availablePackagesOnHackage = do
putStrLn "Downloading Index ..."
exists <- doesFileExist "data/index.tar"
when (not exists) (void (do
createDirectoryIfMissing True "data/"
void (rawSystem "wget" [
"-nv",
"-O","data/index.tar.gz",
"hackage.haskell.org/packages/index.tar.gz"])
rawSystem "gunzip" ["-f","data/index.tar.gz"]))
hackage <- readHackage' "data/index.tar"
return (Map.map Map.keys hackage)
pruneIndex :: [PackageName] -> Index -> Index
pruneIndex packagenames = Map.filterWithKey (\key _ -> key `elem` packagenames)
packagesNotOnHackage :: Index
packagesNotOnHackage = Map.fromList [
("ghc-prim",[Version [0,3,0,0] []]),
("integer-simple",[Version [0,1,0,1] []]),
("rts",[Version [0] []])]
getPackages :: Index -> IO SourceRepository
getPackages index = downloadPackages index >> extractPackages index
downloadPackages :: Index -> IO ()
downloadPackages index = do
putStrLn "Downloading Packages ..."
void (
forM (Map.toList index) (\(packagename,versionnumbers) -> do
forM versionnumbers (\versionnumber -> do
let directory = archiveFilePath packagename versionnumber
exists <- doesFileExist directory
when (not exists) (do
createDirectoryIfMissing True "data/archives"
void (rawSystem "wget" [
"-nv",
"-O",directory,
packageUrl packagename versionnumber])))))
packageIdentifier :: PackageName -> VersionNumber -> String
packageIdentifier packagename versionnumber = packagename ++ "-" ++ showVersion versionnumber
archiveFilePath :: PackageName -> VersionNumber -> FilePath
archiveFilePath packagename versionnumber = concat [
"data/archives/",
packageIdentifier packagename versionnumber,
".tar.gz"]
packageUrl :: PackageName -> VersionNumber -> String
packageUrl packagename versionnumber = concat [
"hackage.haskell.org/packages/archive/",
packagename,
"/",
showVersion versionnumber,
"/",
packageIdentifier packagename versionnumber,
".tar.gz"]
packageDirectory :: PackageName -> VersionNumber -> FilePath
packageDirectory packagename versionnumber = concat [
"data/packages/",
packagename,
"/",
packageIdentifier packagename versionnumber,
"/"]
extractPackages :: Index -> IO SourceRepository
extractPackages index = do
putStrLn "Extracting Packages ..."
packageList <- forM (Map.toList index) (\(packagename,versionnumbers) -> do
versionList <- forM versionnumbers (\versionnumber -> do
let directory = packageDirectory packagename versionnumber
targetDirectory = "data/packages/" ++ packagename
exists <- doesDirectoryExist directory
when (not exists) (do
createDirectoryIfMissing True targetDirectory
void (rawSystem "tar" [
"xzf",
archiveFilePath packagename versionnumber,
"-C",targetDirectory]))
return (versionnumber,directory))
return (packagename,(Map.fromList versionList)))
return (Map.fromList packageList)
| phischu/hackage-analysis | src/Repository.hs | bsd-3-clause | 5,029 | 0 | 27 | 985 | 1,273 | 697 | 576 | 110 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main
where
import Control.Applicative hiding (many)
import Data.Monoid
import Data.List (foldl')
import Data.Attoparsec
import Data.Attoparsec.Char8 (char8)
import Test.QuickCheck
import qualified Network.IRC.Message as I
import System.Random (Random(..), RandomGen)
import Data.Word (Word8)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as B hiding (map)
instance Arbitrary I.Params where
-- XXX TODO fix mE
arbitrary = I.Params <$> undefined <*> undefined
arbNick :: Gen B.ByteString
arbNick = BS.cons <$> letter <*> lnss
arbHost :: Gen B.ByteString
arbHost = foldr BS.cons mempty <$> xs
where c :: Gen Word8 -- "a-z" "A-Z" "-./0-9"
c = elements $ [97..122] ++ [65..90] ++ [45,48..57]
xs = do
a <- listOf1 c
b <- listOf1 c
let dot = [46] -- at least one '.'
return $ a ++ dot ++ b
arbServer :: Gen B.ByteString
arbServer = arbHost
arbMaybeUser :: Gen (Maybe B.ByteString)
arbMaybeUser = frequency [(1, pure Nothing), (2, Just <$> arbNick)]
arbMaybeHost :: Gen (Maybe B.ByteString)
arbMaybeHost = frequency [(1, pure Nothing), (2, Just <$> arbHost)]
lnss :: Gen B.ByteString
lnss = foldr BS.cons mempty <$> listOf lns
instance Random Word8 where
randomR = integralRandomR
random = randomR (minBound, maxBound)
integralRandomR (a,b) g = case randomR (c,d) g of
(x,h) -> (fromIntegral x, h)
where (c,d) = (fromIntegral a :: Integer
,fromIntegral b :: Integer)
letter :: Gen Word8
letter = frequency [(1, choose (65,90)), (1, choose (97,122)) ]
digit :: Gen Word8
digit = choose (48, 57)
number :: Gen Word8
number = digit
special :: Gen Word8
special = elements [45,91,93,96,92,94,95,123,124,125]
-- letter | number | special
lns :: Gen Word8
lns = frequency [(1, letter), (1, number), (1, special)]
instance Arbitrary I.Prefix where
arbitrary = oneof [ I.PrefixServer <$> arbServer
, I.PrefixNick <$> arbNick <*> arbMaybeUser <*> arbMaybeHost
]
instance Arbitrary I.Command where
arbitrary = elements
[ I.PASS ,I.NICK ,I.USER ,I.OPER ,I.MODE ,I.SERVICE ,I.QUIT ,I.SQUIT
, I.JOIN ,I.PART, I.NAMES ,I.KICK ,I.PRIVMSG ,I.NOTICE ,I.MOTD
, I.TIME ,I.WHO ,I.PING ,I.AWAY ,I.TOPIC ,I.PONG ,I.INVITE ,I.WHOIS
,I.ERROR ]
instance Arbitrary B.ByteString where
arbitrary = B.pack <$> arbitrary
instance Arbitrary I.Message where
arbitrary = I.Message <$> arbitrary <*> arbitrary <*> arbitrary
prop_message :: I.Message -> Bool
prop_message m =
Just m == maybeResult ( parse I.message (I.toByteString m))
prop_command :: I.Command -> Bool
prop_command c =
Just c == maybeResult (feed (parse I.command (B.pack (show c))) B.empty)
prop_prefix :: I.Prefix -> Bool
prop_prefix c =
Just c == maybeResult (feed (parse (char8 ':' *> I.prefix) (I.prefixToByteString c `B.append` " ")) B.empty)
prop_params :: I.Params -> Bool
prop_params ps =
Just ps == maybeResult (feed (parse I.params (I.paramsToByteString ps)) mempty)
main :: IO ()
main =
quickCheckWith (stdArgs { maxSuccess = 10000, maxSize = 200 }) $
(label "prop_message" prop_message)
.&. (label "prop_prefix" prop_prefix)
.&. (label "prop_command" prop_command)
.&. (label "prop_params" prop_params)
| bernstein/ircfs | tests/Tests.hs | bsd-3-clause | 3,403 | 0 | 14 | 748 | 1,272 | 700 | 572 | 83 | 1 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Guesswork.Estimate where
import Control.Arrow
import Data.Ord
import Data.List
import Guesswork.Types
import Guesswork.Math.Statistics
import qualified Guesswork.Transform as TRANSFORM
data (Sample a) => Estimated a =
Estimated { truths :: [Double]
, estimates :: [Double]
, samples :: [a]
, trace :: Trace
}
deriving (Show)
-- | Returns (element from given index, rest).
takeBut :: [a] -> Int -> (a,[a])
takeBut xs index = if index < length xs
then (xs !! index, take index xs ++ drop (index+1) xs)
else error "takeBut: index too big."
fitnessAvg :: [Double] -> [Double] -> Double
fitnessAvg truths estimates = avg $ zipWith (\a b -> abs (a-b)) truths estimates
calcFitness = fitnessAvg
| deggis/guesswork | src/Guesswork/Estimate.hs | bsd-3-clause | 887 | 0 | 11 | 240 | 251 | 146 | 105 | 22 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
module Language.Whippet.Typecheck (
module Language.Whippet.Typecheck.Errors
, module Language.Whippet.Typecheck.Lenses
, module Language.Whippet.Typecheck.Types
, Checkable (..)
, TypeCheck
, typecheck
) where
import Control.Lens
import qualified Data.Foldable as Foldable
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq
import qualified Data.Text as Text
import qualified Language.Whippet.Parser as Parser
import Language.Whippet.Parser.Lenses
import Language.Whippet.Typecheck.CheckM
import Language.Whippet.Typecheck.Errors
import Language.Whippet.Typecheck.Lenses
import Language.Whippet.Typecheck.Types
import qualified Text.Trifecta as Trifecta
type TypeCheck = CheckM Type
resolve :: Parser.Type a -> TypeCheck
resolve (Parser.TyNominal _ (Parser.QualId path)) =
let joined = Text.intercalate "." (path ^.. traverse.identLabel)
in pure (TyNominal joined)
-- |Return the first of the two types if they unify, otherwise add a type error
-- to the type checking context and return a divergence marker.
unify :: Parser.Pos -> Type -> Type -> TypeCheck
-- Divergences are subsumed by the expected type in order to continue
-- type-checking.
unify _ t TyDiverge = success t
unify _ TyDiverge t = success t
unify p t1 t2
| t1 == t2 = success t1
| otherwise = unificationError p t1 t2
where
unificationError s t1 t2 = failure (Err s e)
where
e = ErrorUnification (UnificationError t1 t2)
-- * Type-checker
-- |Run the type checker.
typecheck :: Checkable a => a -> Either (Seq Err) Type
typecheck c =
let (t, state) = runCheckM (check c)
errs = state ^. checkStateErrs
in
if Seq.null errs
then Right t
else Left errs
-- |The 'Checkable' class implements the type-checking algorithm. If
-- type-checking fails, the result is a divergence marker.
class Checkable a where
check :: a -> TypeCheck
instance Parser.HasPos p => Checkable (Parser.Expr p) where
check e =
case e of
-- An annotation asserts that an expression has an expected type, then
-- propagates the type specified in the annotation.
Parser.EAnnotation a -> do
t1 <- resolve (a ^. annotationType)
t2 <- check (a ^. annotationExpr)
unify Parser.emptyPos t1 t2
Parser.ELit l -> check l
-- Parser.EApp a -> undefined
-- Parser.EHole i -> undefined
-- Parser.EIf i -> undefined
-- Parser.EFn ps -> undefined
-- Parser.ELet l -> undefined
-- Parser.EMatch m -> undefined
-- Parser.EVar i -> undefined
-- Parser.EOpen o e -> undefined
instance Parser.HasPos p => Checkable (Parser.Lit p) where
check Parser.LitInt {} =
success (TyNominal "Language.Whippet.Prim.Int")
check Parser.LitChar {} =
success (TyNominal "Language.Whippet.Prim.Char")
check Parser.LitScientific {} =
success (TyNominal "Language.Whippet.Prim.Scientific")
check Parser.LitRecord {} =
undefined
check Parser.LitString {} =
success (TyNominal "Language.Whippet.Prim.String")
-- An empty list literal has a polymorphic type.
check (Parser.LitList []) = do
var <- freshTyVar
success (TyApp (TyNominal "Language.Whippet.Prim.Seq") var)
-- Otherwise a list must have values of a homogeneous type.
check (Parser.LitList (x : xs)) = do
t1 <- check x
Foldable.traverse_ (\x -> check x >>= unify Parser.emptyPos t1) xs
success (TyApp (TyNominal "Language.Whippet.Prim.Seq") t1)
instance Checkable Parser.TopLevel where
check = undefined
instance Parser.HasPos p => Checkable (Parser.AST p) where
check = undefined
| chrisbarrett/whippet | src/Language/Whippet/Typecheck.hs | bsd-3-clause | 4,047 | 0 | 14 | 1,112 | 899 | 475 | 424 | -1 | -1 |
-------------------------------------------------------------------------------
---- |
---- Module : Main
---- Copyright : (c) Stefan Berthold 2014-2015
---- License : BSD3-style (see LICENSE)
----
---- Maintainer : [email protected]
---- Stability : unstable
---- Portability :
----
---- Alternative implementation for the command line interface of SmallCaps.
----
-------------------------------------------------------------------------------
module Main where
import Data.Default ( def )
import Data.Map ( insert )
import System.Environment ( withProgName )
import Data.Text as T ( Text, pack, length )
import Text.SmallCaps ( smallcaps )
import Text.SmallCaps.Config ( Config ( replaceFilter ), Profile, defaultProfile, small, footnote )
main :: IO ()
main = withProgName "twocase"
$ smallcaps twoConf twoProfile
twoFilter :: Text -> Bool
twoFilter = (<) 1 . T.length
twoConf :: Config
twoConf = def { replaceFilter = twoFilter }
twoSmall :: Config
twoSmall = small { replaceFilter = twoFilter }
twoFootnote :: Config
twoFootnote = footnote { replaceFilter = twoFilter }
twoProfile :: Profile
twoProfile = insert (pack "default") twoConf
$ insert (pack "small") twoSmall
$ insert (pack "footnote") twoFootnote
$ defaultProfile
-- vim: ft=haskell:sts=2:sw=2:et:nu:ai
| ZjMNZHgG5jMXw/smallcaps | src/twocase/twocase.hs | bsd-3-clause | 1,403 | 0 | 10 | 295 | 264 | 158 | 106 | 23 | 1 |
{-# Language RankNTypes #-}
{-# Language PatternSynonyms #-}
{-# Language TypeApplications #-}
{-# Language DataKinds #-}
{-# Language GADTs #-}
{-# Language OverloadedStrings #-}
module Mir.Compositional
where
import Data.Parameterized.Context (pattern Empty, pattern (:>))
import Data.Parameterized.NatRepr
import Data.Text (Text)
import qualified Data.Text as Text
import Lang.Crucible.Backend
import Lang.Crucible.CFG.Core
import Lang.Crucible.Simulator
import qualified What4.Expr.Builder as W4
import Crux
import Mir.DefId
import Mir.Generator (CollectionState)
import Mir.Intrinsics
import Mir.Compositional.Builder (builderNew)
import Mir.Compositional.Clobber (clobberGlobalsOverride)
compositionalOverrides ::
forall sym bak p t st fs args ret blocks rtp a r .
(IsSymInterface sym, sym ~ W4.ExprBuilder t st fs) =>
Maybe (SomeOnlineSolver sym bak) ->
CollectionState ->
Text ->
CFG MIR blocks args ret ->
Maybe (OverrideSim (p sym) sym MIR rtp a r ())
compositionalOverrides _symOnline cs name cfg
| (normDefId "crucible::method_spec::raw::builder_new" <> "::_inst") `Text.isPrefixOf` name
, Empty <- cfgArgTypes cfg
, MethodSpecBuilderRepr <- cfgReturnType cfg
= Just $ bindFnHandle (cfgHandle cfg) $ UseOverride $
mkOverride' "method_spec_builder_new" MethodSpecBuilderRepr $ do
msb <- builderNew cs (textId name)
return $ MethodSpecBuilder msb
| (normDefId "crucible::method_spec::raw::builder_add_arg" <> "::_inst") `Text.isPrefixOf` name
, Empty :> MethodSpecBuilderRepr :> MirReferenceRepr _tpr <- cfgArgTypes cfg
, MethodSpecBuilderRepr <- cfgReturnType cfg
= Just $ bindFnHandle (cfgHandle cfg) $ UseOverride $
mkOverride' "method_spec_builder_add_arg" MethodSpecBuilderRepr $ do
RegMap (Empty :> RegEntry _tpr (MethodSpecBuilder msb)
:> RegEntry (MirReferenceRepr tpr) argRef) <- getOverrideArgs
msb' <- msbAddArg tpr argRef msb
return $ MethodSpecBuilder msb'
| (normDefId "crucible::method_spec::raw::builder_set_return" <> "::_inst") `Text.isPrefixOf` name
, Empty :> MethodSpecBuilderRepr :> MirReferenceRepr _tpr <- cfgArgTypes cfg
, MethodSpecBuilderRepr <- cfgReturnType cfg
= Just $ bindFnHandle (cfgHandle cfg) $ UseOverride $
mkOverride' "method_spec_builder_set_return" MethodSpecBuilderRepr $ do
RegMap (Empty :> RegEntry _tpr (MethodSpecBuilder msb)
:> RegEntry (MirReferenceRepr tpr) argRef) <- getOverrideArgs
msb' <- msbSetReturn tpr argRef msb
return $ MethodSpecBuilder msb'
| normDefId "crucible::method_spec::raw::builder_gather_assumes" == name
, Empty :> MethodSpecBuilderRepr <- cfgArgTypes cfg
, MethodSpecBuilderRepr <- cfgReturnType cfg
= Just $ bindFnHandle (cfgHandle cfg) $ UseOverride $
mkOverride' "method_spec_builder_gather_assumes" MethodSpecBuilderRepr $ do
RegMap (Empty :> RegEntry _tpr (MethodSpecBuilder msb)) <- getOverrideArgs
msb' <- msbGatherAssumes msb
return $ MethodSpecBuilder msb'
| normDefId "crucible::method_spec::raw::builder_gather_asserts" == name
, Empty :> MethodSpecBuilderRepr <- cfgArgTypes cfg
, MethodSpecBuilderRepr <- cfgReturnType cfg
= Just $ bindFnHandle (cfgHandle cfg) $ UseOverride $
mkOverride' "method_spec_builder_gather_asserts" MethodSpecBuilderRepr $ do
RegMap (Empty :> RegEntry _tpr (MethodSpecBuilder msb)) <- getOverrideArgs
msb' <- msbGatherAsserts msb
return $ MethodSpecBuilder msb'
| normDefId "crucible::method_spec::raw::builder_finish" == name
, Empty :> MethodSpecBuilderRepr <- cfgArgTypes cfg
, MethodSpecRepr <- cfgReturnType cfg
= Just $ bindFnHandle (cfgHandle cfg) $ UseOverride $
mkOverride' "method_spec_builder_finish" MethodSpecRepr $ do
RegMap (Empty :> RegEntry _tpr (MethodSpecBuilder msb)) <- getOverrideArgs
msbFinish msb
| normDefId "crucible::method_spec::raw::clobber_globals" == name
, Empty <- cfgArgTypes cfg
, UnitRepr <- cfgReturnType cfg
= Just $ bindFnHandle (cfgHandle cfg) $ UseOverride $
mkOverride' "method_spec_clobber_globals" UnitRepr $ do
clobberGlobalsOverride cs
| normDefId "crucible::method_spec::raw::spec_pretty_print" == name
, Empty :> MethodSpecRepr <- cfgArgTypes cfg
, MirSliceRepr (BVRepr w) <- cfgReturnType cfg
, Just Refl <- testEquality w (knownNat @8)
= Just $ bindFnHandle (cfgHandle cfg) $ UseOverride $
mkOverride' "method_spec_spec_pretty_print" (MirSliceRepr $ BVRepr $ knownNat @8) $ do
RegMap (Empty :> RegEntry _tpr (MethodSpec ms _)) <- getOverrideArgs
msPrettyPrint ms
| normDefId "crucible::method_spec::raw::spec_enable" == name
, Empty :> MethodSpecRepr <- cfgArgTypes cfg
, UnitRepr <- cfgReturnType cfg
= Just $ bindFnHandle (cfgHandle cfg) $ UseOverride $
mkOverride' "method_spec_spec_enable" UnitRepr $ do
RegMap (Empty :> RegEntry _tpr (MethodSpec ms _)) <- getOverrideArgs
msEnable ms
| otherwise = Nothing
| GaloisInc/saw-script | crux-mir-comp/src/Mir/Compositional.hs | bsd-3-clause | 5,038 | 0 | 16 | 914 | 1,367 | 663 | 704 | 100 | 1 |
import System.Environment (getArgs)
main :: IO ()
main = do
[inpFile] <- getArgs
input <- readFile inpFile
putStr . unlines . map (show . sum . map (read . (:[]))) $ lines input
| nikai3d/ce-challenges | easy/sum_digits.hs | bsd-3-clause | 191 | 0 | 16 | 46 | 94 | 47 | 47 | 6 | 1 |
module Main where
main :: IO ()
main = putStrLn "Hi from app"
| cdodev/scintilla | app/Main.hs | bsd-3-clause | 63 | 0 | 6 | 14 | 22 | 12 | 10 | 3 | 1 |
module GrMain where
import qualified System.Environment as S
import Data.Map (toList)
import Control.Monad (forM_)
import GraphReduction.Compiler (runProgram, compile)
import GraphReduction.Machine (eval, code, dump, stack, heap)
import Util.Heap (objects)
import Core.LambdaLift (lambdaLift)
import Core.Parser (parseProgramFromFile)
import Core.Pretty (prettyProgram)
-- testProg = "main = let x = if false 3 5; y = 2 in x; true = \\x y. x; false = \\x y. y; if = \\p x y. p x y;"
-- prog = parseProgram testProg
-- main = (putStrLn . show . runProgram . lambdaLift) prog
breakln = putStrLn "---------------------------"
printEval prog = do
forM_ (eval $ compile prog) $ \s -> do
breakln
putStr "Code: "
print $ code s
putStr "Dump: "
print $ dump s
putStr "Stack: "
print $ stack s
putStrLn "Heap Objs: "
putStrLn . unlines . map (\(x,y) -> show x ++ "\t\t" ++ show y) . toList . objects . heap $ s
main :: IO ()
main = do
args <- S.getArgs
if not (null args) then do
let file:_ = args
prog <- parseProgramFromFile file
let prettyprog = prettyProgram prog
lambdaprog = lambdaLift prog
putStrLn prettyprog
breakln
putStrLn $ prettyProgram lambdaprog
breakln
printEval lambdaprog
else do
putStrLn "Need a file to parse."
return ()
| WraithM/CoreCompiler | src/GrMain.hs | bsd-3-clause | 1,421 | 0 | 21 | 395 | 396 | 196 | 200 | 38 | 2 |
module UntypedSpec where
import Untyped.Eval
import Untyped.Syntax
import Untyped.Builtins
import Control.Monad.State
import Control.Monad.Except
import Test.Hspec
toplevel' :: [String] -> StateT Context Error String
toplevel' [] = return "empty expressions"
toplevel' [x] = do expr <- parseExpr x
result <- eval expr
return $ show result
toplevel' (x:xs) = do expr <- parseExpr x
_ <- eval expr
toplevel' xs
toplevel :: [String] -> IO (Either String String)
toplevel es = runExceptT $ evalStateT (toplevel' es) initialCtx
spec :: Spec
spec = describe "untyped test" $
it "should evaluate untyped expressions" $ do
r1 <- toplevel ["(set fac (fn (n) (if (eq n 1) 1 (* n (fac (- n 1))))))", "(fac 5)"]
r1 `shouldBe` Right "120"
r2 <- toplevel ["(set fib (fn (n) (if (eq n 0) 1 (if (eq n 1) 1 (+ (fib (- n 1)) (fib (- n 2)))))))", "(fib 5)"]
r2 `shouldBe` Right "8"
| zjhmale/Ntha | test/UntypedSpec.hs | bsd-3-clause | 1,014 | 0 | 10 | 299 | 276 | 138 | 138 | 24 | 1 |
{-# LANGUAGE RankNTypes, TypeOperators, ScopedTypeVariables,
DataKinds, TypeFamilies, PolyKinds,
GADTs #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.Glambda.Eval
-- Copyright : (C) 2015 Richard Eisenberg
-- License : BSD-style (see LICENSE)
-- Maintainer : Richard Eisenberg ([email protected])
-- Stability : experimental
--
-- Glambda expression evaluators for checked expressions.
--
----------------------------------------------------------------------------
module Language.Glambda.Eval ( eval, step ) where
import Language.Glambda.Exp
import Language.Glambda.Token
import Language.Glambda.Shift
-- | Given a lambda and an expression, beta-reduce.
apply :: Val (arg -> res) -> Exp '[] arg -> Exp '[] res
apply (LamVal body) arg = subst arg body
-- | Apply an arithmetic operator to two values.
arith :: Val Int -> ArithOp ty -> Val Int -> Exp '[] ty
arith (IntVal n1) Plus (IntVal n2) = IntE (n1 + n2)
arith (IntVal n1) Minus (IntVal n2) = IntE (n1 - n2)
arith (IntVal n1) Times (IntVal n2) = IntE (n1 * n2)
arith (IntVal n1) Divide (IntVal n2) = IntE (n1 `div` n2)
arith (IntVal n1) Mod (IntVal n2) = IntE (n1 `mod` n2)
arith (IntVal n1) Less (IntVal n2) = BoolE (n1 < n2)
arith (IntVal n1) LessE (IntVal n2) = BoolE (n1 <= n2)
arith (IntVal n1) Greater (IntVal n2) = BoolE (n1 > n2)
arith (IntVal n1) GreaterE (IntVal n2) = BoolE (n1 >= n2)
arith (IntVal n1) Equals (IntVal n2) = BoolE (n1 == n2)
-- | Conditionally choose between two expressions
cond :: Val Bool -> Exp '[] t -> Exp '[] t -> Exp '[] t
cond (BoolVal True) e _ = e
cond (BoolVal False) _ e = e
-- | Unroll a `fix` one level
unfix :: Val (ty -> ty) -> Exp '[] ty
unfix (LamVal body) = subst (Fix (Lam body)) body
-- | A well-typed variable in an empty context is impossible.
impossibleVar :: Elem '[] x -> a
impossibleVar _ = error "GHC's typechecker failed"
-- GHC 7.8+ supports EmptyCase for this, but the warnings for that
-- construct don't work yet.
-- | Evaluate an expression, using big-step semantics.
eval :: Exp '[] t -> Val t
eval (Var v) = impossibleVar v
eval (Lam body) = LamVal body
eval (App e1 e2) = eval (apply (eval e1) e2)
eval (Arith e1 op e2) = eval (arith (eval e1) op (eval e2))
eval (Cond e1 e2 e3) = eval (cond (eval e1) e2 e3)
eval (Fix e) = eval (unfix (eval e))
eval (IntE n) = IntVal n
eval (BoolE b) = BoolVal b
-- | Step an expression, either to another expression or to a value.
step :: Exp '[] t -> Either (Exp '[] t) (Val t)
step (Var v) = impossibleVar v
step (Lam body) = Right (LamVal body)
step (App e1 e2) = case step e1 of
Left e1' -> Left (App e1' e2)
Right (LamVal body) -> Left (subst e2 body)
step (Arith e1 op e2) = case step e1 of
Left e1' -> Left (Arith e1' op e2)
Right v1 -> case step e2 of
Left e2' -> Left (Arith (val v1) op e2')
Right v2 -> Left (arith v1 op v2)
step (Cond e1 e2 e3) = case step e1 of
Left e1' -> Left (Cond e1' e2 e3)
Right v1 -> Left (cond v1 e2 e3)
step (Fix e) = case step e of
Left e' -> Left (Fix e')
Right v -> Left (unfix v)
step (IntE n) = Right (IntVal n)
step (BoolE b) = Right (BoolVal b)
| ajnsit/glambda | src/Language/Glambda/Eval.hs | bsd-3-clause | 3,594 | 0 | 15 | 1,024 | 1,333 | 661 | 672 | 56 | 6 |
{-# LANGUAGE OverloadedStrings
, BangPatterns #-}
import System (getArgs)
import Data.List (groupBy, foldl', intercalate)
import Data.Maybe (fromJust, listToMaybe)
import Control.Applicative ((<$>))
import Control.Monad (forM_, msum)
import Data.Binary (Binary, put, get, encodeFile)
import qualified Data.Map as M
import qualified Data.Set as S
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Data.Text.Encoding as T
import Text.XML.PolySoup
instance Binary T.Text where
put = put . T.encodeUtf8
get = T.decodeUtf8 <$> get
-- LMF parser
type Ne = T.Text
type Type = T.Text
lmfP :: XmlParser String [(Ne, Type)]
lmfP = true ##> lexEntryP
lexEntryP :: XmlParser String [(Ne, Type)]
lexEntryP = tag "LexicalEntry" `joinR` do
many_ $ cut $ tag "feat"
words <- many wordP
!sense <- senseP
return [(x, sense) | !x <- words]
wordP :: XmlParser String Ne
wordP = head <$> (tag "Lemma" <|> tag "WordForm" /> featP "writtenForm")
senseP :: XmlParser String Type
senseP = head <$> (tag "Sense" //> featP "externalReference" <|> featP "label")
featP :: String -> XmlParser String T.Text
featP att = T.pack <$> cut (tag "feat" *> hasAttr "att" att *> getAttr "val")
-- Main program
main = do
[inPath, outPath] <- getArgs
entries <- parseXML lmfP <$> readFile inPath
encodeFile outPath $ mkDict entries
-- forM_ (M.toList $ mkDict entries) $ \(k, x) -> do
-- T.putStr k
-- putStr " => "
-- T.putStrLn $ T.intercalate " " x
mkDict :: [(Ne, Type)] -> M.Map Ne [Type]
mkDict xs
= fmap S.toList
$ fromListWith S.union
$ concatMap process xs
where
-- | Key k can be a multiword NE.
process (k, x) =
[(k, label) | k <- ks]
where
ks = T.words k
label = S.singleton $ if length ks == 1
then "e-" `T.append` x
else "p-" `T.append` x
fromListWith :: Ord k => (a -> a -> a) -> [(k, a)] -> M.Map k a
fromListWith f xs =
let update m (!k, !x) = M.insertWith' f k x m
in foldl' update M.empty xs
| kawu/nerf-misc | util/Lmf2Map.hs | bsd-3-clause | 2,084 | 0 | 12 | 508 | 731 | 397 | 334 | -1 | -1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE PackageImports #-}
module Main
where
import Control.Concurrent.Async
import Control.Monad
import System.Environment
import qualified Data.Primitive as P
import Control.Concurrent
import Control.Concurrent.Chan
import Control.Concurrent.STM
import Control.Concurrent.STM.TQueue
import Control.Concurrent.STM.TBQueue
import Control.Concurrent.MVar
import Data.IORef
import Criterion.Main
import Control.Exception(evaluate)
import qualified "chan-split-fast" Control.Concurrent.Chan.Split as S
import qualified "split-channel" Control.Concurrent.Chan.Split as SC
import Data.Primitive.MutVar
import Control.Monad.Primitive(PrimState)
import Data.Atomics.Counter
import Data.Atomics
#if MIN_VERSION_base(4,7,0)
#else
import qualified Data.Concurrent.Queue.MichaelScott as MS
#endif
import GHC.Conc
import Benchmarks
-- These tests initially taken from stm/bench/chanbench.hs, ported to
-- criterion, with some additions.
--
-- The original used CPP to avoid code duplication while also ensuring GHC
-- optimized the code in a realistic fashion. Here we just copy paste.
main = do
let n = 100000
--let n = 2000000 -- original suggested value, bugs if exceeded
procs <- getNumCapabilities
let procs_div2 = procs `div` 2
if procs_div2 >= 0 then return ()
else error "Run with RTS +N2 or more"
mv <- newEmptyMVar -- This to be left empty after each test
mvFull <- newMVar undefined
-- --
-- mvWithFinalizer <- newEmptyMVar
-- mkWeakMVar mvWithFinalizer $ return ()
-- --
-- mvFinalizee <- newMVar 'a'
-- mvWithFinalizer <- newMVar ()
-- mkWeakMVar mvWithFinalizer $
-- modifyMVar_ mvFinalizee (const $ return 'b')
-- --
tmv <- newEmptyTMVarIO
tv <- newTVarIO undefined
ior <- newIORef undefined
mutv <- newMutVar undefined
counter_mvar <- newMVar (1::Int)
counter_ioref <- newIORef (1::Int)
counter_tvar <- newTVarIO (1::Int)
counter_atomic_counter <- newCounter (1::Int)
fill_empty_chan <- newChan
fill_empty_tchan <- newTChanIO
fill_empty_tqueue <- newTQueueIO
fill_empty_tbqueue <- newTBQueueIO maxBound
(fill_empty_fastI, fill_empty_fastO) <- S.newSplitChan
(fill_empty_splitchannelI, fill_empty_splitchannelO) <- SC.new
#if MIN_VERSION_base(4,7,0)
#else
fill_empty_lockfree <- MS.newQ
#endif
defaultMain $
[ bgroup "Var primitives" $
-- This gives us an idea of how long a lock is held by these atomic
-- ops, and the effects of retry/blocking scheduling behavior.
-- compare this with latency measure in Main1 to get the whole
-- picture:
-- Subtract the cost of:
-- - 2 context switches
-- - 4 newEmptyMVar
-- - 4 takeMVar
-- - 4 putMVar
-- TODO: also test with N green threads per core.
[ bgroup ("Throughput on "++(show n)++" concurrent atomic mods") $
-- just forks some threads all atomically modifying a variable:
let {-# INLINE mod_test #-}
mod_test = mod_test_n n
{-# INLINE mod_test_n #-}
mod_test_n n' = \threads modf -> do
dones <- replicateM threads newEmptyMVar ; starts <- replicateM threads newEmptyMVar
mapM_ (\(start1,done1)-> forkIO $ takeMVar start1 >> replicateM_ (n' `div` threads) modf >> putMVar done1 ()) $ zip starts dones
mapM_ (\v-> putMVar v ()) starts ; mapM_ (\v-> takeMVar v) dones
-- We use this payload to scale contention; on my machine
-- timesN values of 1,2,3,4 run at fairly consistent: 15ns,
-- 19ns, 29ns, and 37ns (note: 22.4ns for an atomicModifyIORef)
{-# NOINLINE payload #-}
payload timesN = (evaluate $ (foldr ($) 2 $ replicate timesN sqrt) :: IO Float)
varGroupPayload perProc numPL = [
bench "modifyMVar_" $ mod_test (procs*perProc) $
(modifyMVar_ counter_mvar (return . (+1)) >> payload numPL)
, bench "modifyMVarMasked_" $ mod_test (procs*perProc) $
(modifyMVarMasked_ counter_mvar (return . (+1)) >> payload numPL)
, bench "atomicModifyIORef'" $ mod_test (procs*perProc) $
(atomicModifyIORef' counter_ioref (\x-> (x+1,()) ) >> payload numPL)
, bench "atomically modifyTVar'" $ mod_test (procs*perProc) $
((atomically $ modifyTVar' counter_tvar ((+1))) >> payload numPL)
, bench "incrCounter (atomic-primops)" $ mod_test (procs*perProc) $
(incrCounter 1 counter_atomic_counter >> payload numPL)
, bench "atomicModifyIORefCAS (atomic-primops)" $ mod_test (procs*perProc) $
(atomicModifyIORefCAS counter_ioref (\x-> (x+1,()) ) >> payload numPL)
, bench "atomicModifyIORefCAS' (my CAS loop)" $ mod_test (procs*perProc) $
(atomicModifyIORefCAS' counter_ioref (\x-> (x+1,()) ) >> payload numPL)
]
in [ bgroup "1 thread per HEC, full contention" $
[ bench "modifyMVar_" $ mod_test procs $
(modifyMVar_ counter_mvar (return . (+1)))
, bench "modifyMVarMasked_" $ mod_test procs $
(modifyMVarMasked_ counter_mvar (return . (+1)))
, bench "atomicModifyIORef'" $ mod_test procs $
(atomicModifyIORef' counter_ioref (\x-> (x+1,()) ))
, bench "atomically modifyTVar'" $ mod_test procs $
(atomically $ modifyTVar' counter_tvar ((+1)))
, bench "incrCounter (atomic-primops)" $ mod_test procs $
(incrCounter 1 counter_atomic_counter)
, bench "atomicModifyIORefCAS (atomic-primops)" $ mod_test procs $
(atomicModifyIORefCAS counter_ioref (\x-> (x+1,()) ))
, bench "atomicModifyIORefCAS' (my CAS loop)" $ mod_test procs $
(atomicModifyIORefCAS' counter_ioref (\x-> (x+1,()) ))
-- I want to compare these with the same results above;
-- see also TVarExperiment:
-- , bench "atomicModifyIORef' x10" $ mod_test_n (10*n) procs $
-- (atomicModifyIORef' counter_ioref (\x-> (x+1,()) ))
-- , bench "atomically modifyTVar' x10" $ mod_test_n (10*n) procs $
-- (atomically $ modifyTVar' counter_tvar ((+1)))
]
, bgroup "2 threads per HEC, full contention" $
[ bench "modifyMVar_" $ mod_test (procs*2) $
(modifyMVar_ counter_mvar (return . (+1)))
, bench "modifyMVarMasked_" $ mod_test (procs*2) $
(modifyMVarMasked_ counter_mvar (return . (+1)))
-- WTF! This is suddenly giving me a stack overflow....
-- , bench "atomicModifyIORef'" $ mod_test (procs*2) $
-- (atomicModifyIORef' counter_ioref (\x-> (x+1,()) ))
, bench "atomically modifyTVar'" $ mod_test (procs*2) $
(atomically $ modifyTVar' counter_tvar ((+1)))
, bench "incrCounter (atomic-primops)" $ mod_test (procs*2) $
(incrCounter 1 counter_atomic_counter)
, bench "atomicModifyIORefCAS (atomic-primops)" $ mod_test (procs*2) $
(atomicModifyIORefCAS counter_ioref (\x-> (x+1,()) ))
]
{- COMMENTING, since the atomicModifyIORef' below is *again*
causing stack overflow for no apparent reason TODO why?
-- NOTE: adding more threads per-HEC at this point shows
-- little difference (very bad MVar locking behavior has
-- mostly disappeared)
--
-- test dialing back the contention:
, bgroup "1 threads per HEC, 1 payload" $
varGroupPayload 1 1
, bgroup "1 threads per HEC, 2 payload" $
varGroupPayload 1 2
, bgroup "1 threads per HEC, 4 payload" $
varGroupPayload 1 4
, bgroup "1 threads per HEC, 8 payload" $
varGroupPayload 1 8
-- this is an attempt to see if a somewhat random delay can
-- get rid of (some or all) the very slow runs; hypothesis
-- being that those runs get into some bad harmonics and
-- contention is slow to resolve.
, bgroup "1 thread per HEC, scattered payloads with IORefs" $
let benchRandPayloadIORef evry pyld =
bench ("atomicModifyIORef' "++(show evry)++" "++(show pyld)) $
mod_test procs $
(atomicModifyIORef' counter_ioref (\x-> (x+1,x) )
>>= \x-> if x `mod` evry == 0 then payload pyld else return 1)
in [ benchRandPayloadIORef 2 1
, benchRandPayloadIORef 2 4
, benchRandPayloadIORef 2 16
, benchRandPayloadIORef 8 1
, benchRandPayloadIORef 8 4
, benchRandPayloadIORef 8 16
, benchRandPayloadIORef 32 1
, benchRandPayloadIORef 32 4
, benchRandPayloadIORef 32 16
]
, bgroup "Test Payload" $
[ bench "payload x1" $ payload 1
, bench "payload x2" $ payload 2
, bench "payload x4" $ payload 4
, bench "payload x8" $ payload 8
]
-}
]
, bgroup "Misc" $
-- If the second shows some benefit on just two threads, then
-- it represents a useful technique for reducing contention:
[ bench "contentious atomic-maybe-modify IORef" $ atomicMaybeModifyIORef n
, bench "read first, then maybe contentious atomic-maybe-modify IORef" $ readMaybeAtomicModifyIORef n
, bench "readForCAS, then CAS (atomic-primops)" $ readMaybeCAS n
-- NOT RELEVANT:
-- , bench "Higher contention, contentious atomic-maybe-modify IORef" $ atomicMaybeModifyIORefHiC n
-- , bench "Higher contention, read first, then maybe contentious atomic-maybe-modify IORef" $ readMaybeAtomicModifyIORefHiC n
, bench "contentious atomic-maybe-modify TVar" $ atomicMaybeModifyTVar n
, bench "read first, then maybe contentious atomic-maybe-modify TVar" $ readMaybeAtomicModifyTVar n
-- we should expect these to be the same:
, bench "reads against atomicModifyIORefs" $ readsAgainstAtomicModifyIORefs n
, bench "reads against modifyIORefs" $ readsAgainstNonAtomicModify n
-- TODO how do these compare with STM?
]
]
-- TODO: define these in terms of numCapabilities:
-- 1 r thread 1 w thread: measuring r/w contention
-- 2 w threads ONLY: meeasure w/w contention, THEN:
-- 2 r threads ONLY: meeasure r/r contention
-- more threads: measuring descheduling bottlenecks, context switching overheads (+ above)
-- above better tested outside criterion, w/ eventlogging
-- also test equivalents of above on 8-core
, bgroup "Channel implementations" $
[ bgroup ("Operations on "++(show n)++" messages") $
[ bgroup "For scale" $
-- For TQueue style chans, test the cost of reverse
[ bench "reverse [1..n]" $ nf (\n'-> reverse [1..n']) n
, bench "reverse replicate n 1" $ nf (\n'-> replicate n' (1::Int)) n
]
, bgroup "Chan" $
-- this gives us a measure of effects of contention between
-- readers and writers when compared with single-threaded
-- version:
[ bench "async 1 writer 1 readers" $ runtestChanAsync 1 1 n
-- NOTE: this is a bit hackish, filling in one test and
-- reading in the other; make sure memory usage isn't
-- influencing mean:
--
-- This measures writer/writer contention, in this case I
-- think we see a lot of thread blocking/waiting delays
, bench ("async "++(show procs)++" writers") $ do
dones <- replicateM procs newEmptyMVar ; starts <- replicateM procs newEmptyMVar
mapM_ (\(start1,done1)-> forkIO $ takeMVar start1 >> replicateM_ (n `div` procs) (writeChan fill_empty_chan ()) >> putMVar done1 ()) $ zip starts dones
mapM_ (\v-> putMVar v ()) starts ; mapM_ (\v-> takeMVar v) dones
-- This measures reader/reader contention:
, bench ("async "++(show procs)++" readers") $ do
dones <- replicateM procs newEmptyMVar ; starts <- replicateM procs newEmptyMVar
mapM_ (\(start1,done1)-> forkIO $ takeMVar start1 >> replicateM_ (n `div` procs) (readChan fill_empty_chan) >> putMVar done1 ()) $ zip starts dones
mapM_ (\v-> putMVar v ()) starts ; mapM_ (\v-> takeMVar v) dones
-- This is measuring the effects of bottlenecks caused by
-- descheduling, context-switching overhead (forced my
-- fairness properties in the case of MVar), as well as
-- all of the above; this is probably less than
-- informative. Try threadscope on a standalone test:
, bench "contention: async 100 writers 100 readers" $ runtestChanAsync 100 100 n
]
, bgroup "TChan" $
[ bench "async 1 writers 1 readers" $ runtestTChanAsync 1 1 n
-- This measures writer/writer contention:
{- LIVELOCK!!!
, bench ("async "++(show procs)++" writers") $ do
dones <- replicateM procs newEmptyMVar ; starts <- replicateM procs newEmptyMVar
mapM_ (\(start1,done1)-> forkIO $ takeMVar start1 >> replicateM_ (n `div` procs) (atomically $ writeTChan fill_empty_tchan ()) >> putMVar done1 ()) $ zip starts dones
mapM_ (\v-> putMVar v ()) starts ; mapM_ (\v-> takeMVar v) dones
-- This measures reader/reader contention:
, bench ("async "++(show procs)++" readers") $ do
dones <- replicateM procs newEmptyMVar ; starts <- replicateM procs newEmptyMVar
mapM_ (\(start1,done1)-> forkIO $ takeMVar start1 >> replicateM_ (n `div` procs) (atomically $ readTChan fill_empty_tchan) >> putMVar done1 ()) $ zip starts dones
mapM_ (\v-> putMVar v ()) starts ; mapM_ (\v-> takeMVar v) dones
, bench "contention: async 100 writers 100 readers" $ runtestTChanAsync 100 100 n
-}
]
, bgroup "TQueue" $
[ bench "async 1 writers 1 readers" $ runtestTQueueAsync 1 1 n
-- This measures writer/writer contention:
, bench ("async "++(show procs)++" writers") $ do
dones <- replicateM procs newEmptyMVar ; starts <- replicateM procs newEmptyMVar
mapM_ (\(start1,done1)-> forkIO $ takeMVar start1 >> replicateM_ (n `div` procs) (atomically $ writeTQueue fill_empty_tqueue ()) >> putMVar done1 ()) $ zip starts dones
mapM_ (\v-> putMVar v ()) starts ; mapM_ (\v-> takeMVar v) dones
-- This measures reader/reader contention:
, bench ("async "++(show procs)++" readers") $ do
dones <- replicateM procs newEmptyMVar ; starts <- replicateM procs newEmptyMVar
mapM_ (\(start1,done1)-> forkIO $ takeMVar start1 >> replicateM_ (n `div` procs) (atomically $ readTQueue fill_empty_tqueue) >> putMVar done1 ()) $ zip starts dones
mapM_ (\v-> putMVar v ()) starts ; mapM_ (\v-> takeMVar v) dones
, bench "contention: async 100 writers 100 readers" $ runtestTQueueAsync 100 100 n
]
, bgroup "TBQueue" $
[ bench "async 1 writers 1 readers" $ runtestTBQueueAsync 1 1 n
-- This measures writer/writer contention:
, bench ("async "++(show procs)++" writers") $ do
dones <- replicateM procs newEmptyMVar ; starts <- replicateM procs newEmptyMVar
mapM_ (\(start1,done1)-> forkIO $ takeMVar start1 >> replicateM_ (n `div` procs) (atomically $ writeTBQueue fill_empty_tbqueue ()) >> putMVar done1 ()) $ zip starts dones
mapM_ (\v-> putMVar v ()) starts ; mapM_ (\v-> takeMVar v) dones
-- This measures reader/reader contention:
, bench ("async "++(show procs)++" readers") $ do
dones <- replicateM procs newEmptyMVar ; starts <- replicateM procs newEmptyMVar
mapM_ (\(start1,done1)-> forkIO $ takeMVar start1 >> replicateM_ (n `div` procs) (atomically $ readTBQueue fill_empty_tbqueue) >> putMVar done1 ()) $ zip starts dones
mapM_ (\v-> putMVar v ()) starts ; mapM_ (\v-> takeMVar v) dones
, bench "contention: async 100 writers 100 readers" $ runtestTBQueueAsync 100 100 n
]
-- OTHER CHAN IMPLEMENTATIONS:
, bgroup "chan-split-fast" $
[ bench "async 1 writers 1 readers" $ runtestSplitChanAsync 1 1 n
-- This measures writer/writer contention:
, bench ("async "++(show procs)++" writers") $ do
dones <- replicateM procs newEmptyMVar ; starts <- replicateM procs newEmptyMVar
mapM_ (\(start1,done1)-> forkIO $ takeMVar start1 >> replicateM_ (n `div` procs) (S.writeChan fill_empty_fastI ()) >> putMVar done1 ()) $ zip starts dones
mapM_ (\v-> putMVar v ()) starts ; mapM_ (\v-> takeMVar v) dones
-- This measures reader/reader contention:
, bench ("async "++(show procs)++" readers") $ do
dones <- replicateM procs newEmptyMVar ; starts <- replicateM procs newEmptyMVar
mapM_ (\(start1,done1)-> forkIO $ takeMVar start1 >> replicateM_ (n `div` procs) (S.readChan fill_empty_fastO) >> putMVar done1 ()) $ zip starts dones
mapM_ (\v-> putMVar v ()) starts ; mapM_ (\v-> takeMVar v) dones
, bench "contention: async 100 writers 100 readers" $ runtestSplitChanAsync 100 100 n
]
, bgroup "split-channel" $
[ bench "async 1 writers 1 readers" $ runtestSplitChannelAsync 1 1 n
-- This measures writer/writer contention:
, bench ("async "++(show procs)++" writers") $ do
dones <- replicateM procs newEmptyMVar ; starts <- replicateM procs newEmptyMVar
mapM_ (\(start1,done1)-> forkIO $ takeMVar start1 >> replicateM_ (n `div` procs) (SC.send fill_empty_splitchannelI ()) >> putMVar done1 ()) $ zip starts dones
mapM_ (\v-> putMVar v ()) starts ; mapM_ (\v-> takeMVar v) dones
-- This measures reader/reader contention:
, bench ("async "++(show procs)++" readers") $ do
dones <- replicateM procs newEmptyMVar ; starts <- replicateM procs newEmptyMVar
mapM_ (\(start1,done1)-> forkIO $ takeMVar start1 >> replicateM_ (n `div` procs) (SC.receive fill_empty_splitchannelO) >> putMVar done1 ()) $ zip starts dones
mapM_ (\v-> putMVar v ()) starts ; mapM_ (\v-> takeMVar v) dones
, bench "contention: async 100 writers 100 readers" $ runtestSplitChannelAsync 100 100 n
]
-- michael-scott queue implementation, using atomic-primops
#if MIN_VERSION_base(4,7,0)
#else
, bgroup "lockfree-queue" $
[ bench "async 1 writer 1 readers" $ runtestLockfreeQueueAsync 1 1 n
, bench ("async "++(show procs)++" writers") $ do
dones <- replicateM procs newEmptyMVar ; starts <- replicateM procs newEmptyMVar
mapM_ (\(start1,done1)-> forkIO $ takeMVar start1 >> replicateM_ (n `div` procs) (MS.pushL fill_empty_lockfree ()) >> putMVar done1 ()) $ zip starts dones
mapM_ (\v-> putMVar v ()) starts ; mapM_ (\v-> takeMVar v) dones
, bench ("async "++(show procs)++" readers") $ do
dones <- replicateM procs newEmptyMVar ; starts <- replicateM procs newEmptyMVar
mapM_ (\(start1,done1)-> forkIO $ takeMVar start1 >> replicateM_ (n `div` procs) (msreadR fill_empty_lockfree) >> putMVar done1 ()) $ zip starts dones
mapM_ (\v-> putMVar v ()) starts ; mapM_ (\v-> takeMVar v) dones
, bench "contention: async 100 writers 100 readers" $ runtestLockfreeQueueAsync 100 100 n
]
#endif
-- Chase / Lev work-stealing queue
-- NOTE: we can have at most 1 writer (pushL); not a general-purpose queue, so don't do more tests
, bgroup "chaselev-dequeue" $
[ bench "async 1 writer 1 readers" $ runtestChaseLevQueueAsync_1_1 n
]
]
]
, bgroup "Arrays misc" $
-- be sure to subtract "cost" of 2 forkIO's and context switch
[ bench "baseline" $
do x <- newEmptyMVar
y <- newEmptyMVar
forkIO $ (replicateM_ 500 $ return ()) >> putMVar x ()
forkIO $ (replicateM_ 500 $ return ()) >> putMVar y ()
takeMVar x
takeMVar y
, bench "New 32-length MutableArrays x1000 across two threads" $
do x <- newEmptyMVar
y <- newEmptyMVar
forkIO $ (replicateM_ 500 $ (P.newArray 32 0 :: IO (P.MutableArray (PrimState IO) Int))) >> putMVar x ()
forkIO $ (replicateM_ 500 $ (P.newArray 32 0 :: IO (P.MutableArray (PrimState IO) Int))) >> putMVar y ()
takeMVar x
takeMVar y
, bench "New MVar x1000 across two threads" $
do x <- newEmptyMVar
y <- newEmptyMVar
forkIO $ (replicateM_ 500 $ (newEmptyMVar :: IO (MVar Int))) >> putMVar x ()
forkIO $ (replicateM_ 500 $ (newEmptyMVar :: IO (MVar Int))) >> putMVar y ()
takeMVar x
takeMVar y
]
]
-- to make sure the counter is actually being incremented!:
cntv <- readCounter counter_atomic_counter
putStrLn $ "Final counter val is "++(show cntv)
| jberryman/chan-benchmarks | MainN.hs | bsd-3-clause | 24,888 | 0 | 28 | 9,459 | 4,211 | 2,160 | 2,051 | 208 | 2 |
-- | Module containing the template data structure
--
{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable #-}
module Hakyll.Web.Template.Internal
( Template (..)
, TemplateElement (..)
) where
import Control.Applicative ((<$>))
import Data.Binary (Binary, get, getWord8, put, putWord8)
import Data.Typeable (Typeable)
import Hakyll.Core.Writable
-- | Datatype used for template substitutions.
--
newtype Template = Template
{ unTemplate :: [TemplateElement]
}
deriving (Show, Eq, Binary, Typeable)
instance Writable Template where
-- Writing a template is impossible
write _ _ = return ()
-- | Elements of a template.
--
data TemplateElement
= Chunk String
| Key String
| Escaped
deriving (Show, Eq, Typeable)
instance Binary TemplateElement where
put (Chunk string) = putWord8 0 >> put string
put (Key key) = putWord8 1 >> put key
put (Escaped) = putWord8 2
get = getWord8 >>= \tag -> case tag of
0 -> Chunk <$> get
1 -> Key <$> get
2 -> return Escaped
_ -> error $ "Hakyll.Web.Template.Internal: "
++ "Error reading cached template"
| sol/hakyll | src/Hakyll/Web/Template/Internal.hs | bsd-3-clause | 1,193 | 0 | 12 | 309 | 303 | 171 | 132 | 28 | 0 |
{-# Language QuasiQuotes, OverloadedStrings #-}
{-|
Module : Client.Hook.Snotice
Description : Hook for sorting some service notices into separate windows.
Copyright : (c) Eric Mertens 2019
License : ISC
Maintainer : [email protected]
These sorting rules are based on the solanum server notices.
-}
module Client.Hook.Snotice
( snoticeHook
) where
import qualified Data.Text as Text
import Data.Text (Text)
import Data.List (find)
import Text.Regex.TDFA
import Text.Regex.TDFA.String
import Client.Hook
import Irc.Message
import Irc.Identifier (mkId, Identifier)
import Irc.UserInfo
import StrQuote (str)
snoticeHook :: MessageHook
snoticeHook = MessageHook "snotice" True remap
remap ::
IrcMsg -> MessageResult
remap (Notice (Source (UserInfo u "" "") _) _ msg)
| Just msg1 <- Text.stripPrefix "*** Notice -- " msg
, let msg2 = Text.filter (\x -> x /= '\x02' && x /= '\x0f') msg1
, Just (lvl, cat) <- characterize msg2
= if lvl < 1 then OmitMessage
else RemapMessage (Notice (Source (UserInfo u "" "*") "") cat msg1)
remap _ = PassMessage
toPattern :: (Int, String, String) -> (Int, Identifier, Regex)
toPattern (lvl, cat, reStr) =
case compile co eo reStr of
Left e -> error e
Right r -> (lvl, mkId (Text.pack ('~':cat)), r)
where
co = CompOption
{ caseSensitive = True
, multiline = False
, rightAssoc = True
, newSyntax = True
, lastStarGreedy = True }
eo = ExecOption
{ captureGroups = False }
characterize :: Text -> Maybe (Int, Identifier)
characterize txt =
do let s = Text.unpack txt
(lvl, cat, _) <- find (\(_, _, re) -> matchTest re s) patterns
pure (lvl, cat)
patterns :: [(Int, Identifier, Regex)]
patterns = map toPattern
[
-- PATTERN LIST, most common snotes
-- Client connecting, more complete regex: ^Client connecting: [^ ]+ \([^ ]+@[^ ]+\) \[[^ ]+\] \{[^ ]+\} \[.*\]$
(1, "c", [str|^Client connecting: |]),
-- Client exiting, more complete regex: ^Client exiting: [^ ]+ \([^ ]+@[^ ]+\) \[.*\] \[[^ ]+\]$
(0, "c", [str|^Client exiting: |]),
-- Nick change
(0, "c", [str|^Nick change: From |]),
-- Connection limit, more complete regex: ^Too many user connections for [^ ]+![^ ]+@[^ ]+$
(1, "u", [str|^Too many user connections for |]),
-- Join alerts, more complete regex: ^User [^ ]+ \([^ ]+@[^ ]+\) trying to join #[^ ]* is a possible spambot$
(1, "a", [str|^User [^ ]+ \([^ ]+\) trying to join #[^ ]* is a possible spambot|]),
-- Kline hitting user
(1, "k", [str|^K/DLINE active for|]),
-- Connection limit, more complete regex: ^Too many local connections for [^ ]+![^ ]+@[^ ]+$
(1, "u", [str|^Too many local connections for |]),
-- Global kline added, more complete regex: ^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} added global [0-9]+ min. K-Line for \[[^ ]+\] \[.*\]$
(2, "k", [str|^[^ ]+ added global [0-9]+ min. K-Line for |]),
(2, "k", [str|^[^ ]+ added global [0-9]+ min. X-Line for |]),
-- Global kline expiring, more complete regex: ^Propagated ban for \[[^ ]+\] expired$
(0, "k", [str|^Propagated ban for |]),
-- Chancreate
(1, "u", [str|^[^ ]+ is creating new channel #|]),
-- m_filter
(0, "u", [str|^FILTER: |]),
(0, "m", [str|^New filters loaded.$|]),
(0, "m", [str|^Filtering enabled.$|]),
-- Failed login
(0, "f", [str|^Warning: [0-9]+ failed login attempts|]),
-- Temporary kline added, more complete regex: ^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} added temporary [0-9]+ min. K-Line for \[[^ ]+\] \[.*\]$
(1, "k", [str|^OperServ![^ ]+\{services\.\} added temporary [0-9]+ min. K-Line for \[[^ ]+\] \[Joining #|]), -- klinechans
(1, "k", [str|^OperSyn![^ ]+\{syn\.\} added temporary [0-9]+ min. K-Line for \[[^ ]+\] \[You've been temporarily|]), -- lethals
(2, "k", [str|^[^ ]+ added temporary [0-9]+ min. K-Line for |]),
-- Nick collision
(1, "m", [str|^Nick collision on|]),
-- KILLs
(0, "k", [str|^Received KILL message for [^ ]+\. From NickServ |]),
(0, "k", [str|^Received KILL message for [^ ]+\. From syn Path: [^ ]+ \(Facility Blocked\)|]),
(1, "k", [str|^Received KILL message for [^ ]+\. From syn Path: [^ ]+ \(Banned\)|]),
(2, "k", [str|^Received KILL message|]),
-- Teporary kline expiring, more complete regex: ^Temporary K-line for \[[^ ]+\] expired$
(0, "k", [str|^Temporary K-line for |]),
-- PATTERN LIST, uncommon snotes. regex performance isn't very important beyond this point
(2, "a", [str|^Possible Flooder|]),
(0, "a", [str|^New Max Local Clients: [0-9]+$|]),
(1, "a", [str|^Excessive target change from|]),
(1, "f", [str|^Failed (OPER|CHALLENGE) attempt - host mismatch|]),
(3, "f", [str|^Failed (OPER|CHALLENGE) attempt|]), -- ORDER IMPORTANT - catch all failed attempts that aren't host mismatch
(1, "k", [str|^Rejecting [XK]-Lined user|]),
(1, "k", [str|^Disconnecting [XK]-Lined user|]),
(1, "k", [str|^KLINE active for|]),
(1, "k", [str|^XLINE active for|]),
(3, "k", [str|^KLINE over-ruled for |]),
(2, "k", [str|^[^ ]+ added global [0-9]+ min. K-Line from [^ ]+![^ ]+@[^ ]+\{[^ ]+\} for \[[^ ]+\] \[.*\]$|]),
(2, "k", [str|^[^ ]+ added global [0-9]+ min. X-Line from [^ ]+![^ ]+@[^ ]+\{[^ ]+\} for \[[^ ]+\] \[.*\]$|]),
(2, "k", [str|^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} has removed the global K-Line for: \[.*\]$|]),
(2, "k", [str|^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} has removed the temporary K-Line for: \[.*\]$|]),
(2, "k", [str|^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} added temporary [0-9]+ min. D-Line for \[[^ ]+\] \[.*\]$|]),
(2, "k", [str|^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} has removed the X-Line for:|]),
(2, "k", [str|^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} is removing the X-Line for|]),
(2, "k", [str|^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} has removed the temporary D-Line for:|]),
(0, "m", [str|^Received SAVE message for|]),
(0, "m", [str|^Ignored noop SAVE message for|]),
(0, "m", [str|^Ignored SAVE message for|]),
(0, "m", [str|^TS for #[^ ]+ changed from|]),
(0, "m", [str|^Nick change collision from |]),
(0, "m", [str|^Dropping IDENTIFIED|]),
(1, "m", [str|^Got signal SIGHUP, reloading ircd conf\. file|]),
(1, "m", [str|^Got SIGHUP; reloading|]),
(1, "m", [str|^Updating database by request of system console\.$|]),
(1, "m", [str|^Rehashing .* by request of system console\.$|]),
(2, "m", [str|^Updating database by request of [^ ]+( \([^ ]+\))?\.$|]),
(2, "m", [str|^Rehashing .* by request of [^ ]+( \([^ ]+\))?\.$|]),
(2, "m", [str|.* is rehashing server config file$|]),
(3, "m", [str|^".*", line [0-9+]|]), -- configuration syntax error!
(0, "m", [str|^Ignoring attempt from [^ ]+( \([^ ]+\))? to set login name for|]),
(1, "m", [str|^binding listener socket: 99 \(Cannot assign requested address\)$|]),
(2, "m", [str|^binding listener socket: |]),
(2, "o", [str|^OPERSPY [^ ]+![^ ]+@[^ ]+\{[^ ]+\} |]),
(2, "o", [str|^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} is overriding |]),
(2, "o", [str|^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} is using oper-override on |]),
(2, "o", [str|^[^ ]+ \([^ ]+@[^ ]+\) is now an operator$|]),
(1, "o", [str|^[^ ]+( \([^ ]+\))? dropped the nick |]),
(1, "o", [str|^[^ ]+( \([^ ]+\))? dropped the account |]),
(2, "o", [str|^[^ ]+( \([^ ]+\))? dropped the channel |]),
(1, "o", [str|^[^ ]+( \([^ ]+\))? set vhost |]),
(1, "o", [str|^[^ ]+( \([^ ]+\))? deleted vhost |]),
(2, "o", [str|^[^ ]+( \([^ ]+\))? is using MODE |]),
(3, "o", [str|^[^ ]+( \([^ ]+\))? froze the account |]),
(3, "o", [str|^[^ ]+( \([^ ]+\))? thawed the account |]),
(2, "o", [str|^[^ ]+( \([^ ]+\))? transferred foundership of #|]),
(2, "o", [str|^[^ ]+( \([^ ]+\))? marked the channel #|]),
(2, "o", [str|^[^ ]+( \([^ ]+\))? unmarked the channel #|]),
(2, "o", [str|^[^ ]+( \([^ ]+\))? is forcing flags change |]),
(3, "o", [str|^[^ ]+( \([^ ]+\))? is clearing channel #|]),
(2, "o", [str|^[^ ]+( \([^ ]+\))? closed the channel #|]),
(2, "o", [str|^[^ ]+( \([^ ]+\))? reopened the channel #|]),
(2, "o", [str|^[^ ]+( \([^ ]+\))? reopened #|]),
(3, "o", [str|^[^ ]+( \([^ ]+\))? reset the password for the account|]),
(3, "o", [str|^[^ ]+( \([^ ]+\))? enabled automatic klines on the channel|]),
(3, "o", [str|^[^ ]+( \([^ ]+\))? disabled automatic klines on the channel|]),
(3, "o", [str|^[^ ]+( \([^ ]+\))? forbade the nickname |]),
(3, "o", [str|^[^ ]+( \([^ ]+\))? unforbade the nickname |]),
(3, "o", [str|^[^ ]+( \([^ ]+\))? is removing oper class for |]),
(3, "o", [str|^[^ ]+( \([^ ]+\))? is changing oper class for |]),
(3, "o", [str|^[^ ]+( \([^ ]+\))? set the REGNOLIMIT option for the account |]),
(3, "o", [str|^[^ ]+( \([^ ]+\))? set the HOLD option for the account |]),
(3, "o", [str|^[^ ]+( \([^ ]+\))? returned the account |]),
(1, "o", [str|^Not kicking immune user |]),
(1, "o", [str|^Not kicking oper |]),
(1, "o", [str|^Overriding KICK from |]),
(1, "o", [str|^Overriding REMOVE from |]),
(1, "o", [str|^Server [^ ]+ split from |]),
(3, "o", [str|^Netsplit [^ ]+ <->|]),
(2, "o", [str|^Remote SQUIT|]),
(3, "o", [str|^ssld error for |]),
(1, "o", [str|^Finished synchronizing with network|]),
(3, "o", [str|^Link [^ ]+ notable TS delta|]),
(1, "o", [str|^End of burst \(emulated\) from |]),
(2, "o", [str|^Link with [^ ]+ established: |]),
(2, "o", [str|^Connection to [^ ]+ activated$|]),
(2, "o", [str|^Attempt to re-introduce|]),
(1, "o", [str|^Server [^ ]+ being introduced|]),
(2, "o", [str|^Netjoin [^ ]+ <->|]),
(2, "o", [str|^Error connecting to|]),
(1, "o", [str|^[^ ]+![^ ]+@[^ ]+ is sending resvs and xlines|]),
(3, "o", [str|^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} is changing the privilege set|]),
(3, "o", [str|^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} is opering |]),
(3, "o", [str|^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} is deopering |]),
(3, "o", [str|^[^ ]+ is using DEHELPER |]),
(3, "o", [str|^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} is clearing the nick delay table|]),
(3, "o", [str|^Module |]),
(3, "o", [str|^Cannot locate module |]),
(2, "o", [str|^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} is adding a permanent X-Line for \[.*\]|]),
(2, "o", [str|^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} added X-Line for \[.*\]|]),
(2, "o", [str|^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} is adding a permanent RESV for \[.*\]|]),
(2, "o", [str|^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} added RESV for \[.*\]|]),
(2, "o", [str|^[^ ]+![^ ]+@[^ ]+\{[^ ]+\} has removed the RESV for:|]),
(3, "o", [str|^[^ ]+ is an idiot\. Dropping |]), -- someone k-lined *@*
(3, "o", [str|^Rejecting email for |]), -- registering from a badmailed address won't trigger this, emailing broken?
(3, "o", [str|^ERROR |]),
(3, "o", [str|^No response from [^ ]+, closing link$|]),
(1, "u", [str|^Too many global connections for [^ ]+![^ ]+@[^ ]+$|]),
(0, "u", [str|^Invalid username: |]),
(0, "u", [str|^HTTP Proxy disconnected: |]),
(2, "u", [str|^Unauthorised client connection from |]),
(2, "u", [str|^[^ ]+( \([^ ]+\))? sent the password for the MARKED account|]),
(2, "u", [str|^Not restoring mark|])]
-- -}
| glguy/irc-core | src/Client/Hook/Snotice.hs | isc | 11,326 | 0 | 16 | 2,469 | 2,647 | 1,882 | 765 | 174 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE ViewPatterns #-}
module Database.Persist.Quasi
( parse
, PersistSettings (..)
, upperCaseSettings
, lowerCaseSettings
, nullable
#if TEST
, Token (..)
, tokenize
, parseFieldType
#endif
) where
import Prelude hiding (lines)
import Database.Persist.Types
import Data.Char
import Data.Maybe (mapMaybe, fromMaybe, maybeToList)
import Data.Text (Text)
import qualified Data.Text as T
import Control.Arrow ((&&&))
import qualified Data.Map as M
import Data.List (foldl')
import Data.Monoid (mappend)
import Control.Monad (msum, mplus)
data ParseState a = PSDone | PSFail String | PSSuccess a Text deriving Show
parseFieldType :: Text -> Either String FieldType
parseFieldType t0 =
case parseApplyFT t0 of
PSSuccess ft t'
| T.all isSpace t' -> Right ft
PSFail err -> Left $ "PSFail " ++ err
other -> Left $ show other
where
parseApplyFT t =
case goMany id t of
PSSuccess (ft:fts) t' -> PSSuccess (foldl' FTApp ft fts) t'
PSSuccess [] _ -> PSFail "empty"
PSFail err -> PSFail err
PSDone -> PSDone
parseEnclosed :: Char -> (FieldType -> FieldType) -> Text -> ParseState FieldType
parseEnclosed end ftMod t =
let (a, b) = T.break (== end) t
in case parseApplyFT a of
PSSuccess ft t' -> case (T.dropWhile isSpace t', T.uncons b) of
("", Just (c, t'')) | c == end -> PSSuccess (ftMod ft) (t'' `Data.Monoid.mappend` t')
(x, y) -> PSFail $ show (b, x, y)
x -> PSFail $ show x
parse1 t =
case T.uncons t of
Nothing -> PSDone
Just (c, t')
| isSpace c -> parse1 $ T.dropWhile isSpace t'
| c == '(' -> parseEnclosed ')' id t'
| c == '[' -> parseEnclosed ']' FTList t'
| isUpper c ->
let (a, b) = T.break (\x -> isSpace x || x `elem` ("()[]"::String)) t
in PSSuccess (getCon a) b
| otherwise -> PSFail $ show (c, t')
getCon t =
case T.breakOnEnd "." t of
(_, "") -> FTTypeCon Nothing t
("", _) -> FTTypeCon Nothing t
(a, b) -> FTTypeCon (Just $ T.init a) b
goMany front t =
case parse1 t of
PSSuccess x t' -> goMany (front . (x:)) t'
PSFail err -> PSFail err
PSDone -> PSSuccess (front []) t
-- _ ->
data PersistSettings = PersistSettings
{ psToDBName :: !(Text -> Text)
, psStrictFields :: !Bool
-- ^ Whether fields are by default strict. Default value: @True@.
--
-- @since 1.2
, psIdName :: !Text
-- ^ The name of the id column. Default value: @id@
-- The name of the id column can also be changed on a per-model basis
-- <https://github.com/yesodweb/persistent/wiki/Persistent-entity-syntax>
--
-- @since 2.0
}
defaultPersistSettings, upperCaseSettings, lowerCaseSettings :: PersistSettings
defaultPersistSettings = PersistSettings
{ psToDBName = id
, psStrictFields = True
, psIdName = "id"
}
upperCaseSettings = defaultPersistSettings
lowerCaseSettings = defaultPersistSettings
{ psToDBName =
let go c
| isUpper c = T.pack ['_', toLower c]
| otherwise = T.singleton c
in T.dropWhile (== '_') . T.concatMap go
}
-- | Parses a quasi-quoted syntax into a list of entity definitions.
parse :: PersistSettings -> Text -> [EntityDef]
parse ps = parseLines ps
. removeSpaces
. filter (not . empty)
. map tokenize
. T.lines
-- | A token used by the parser.
data Token = Spaces !Int -- ^ @Spaces n@ are @n@ consecutive spaces.
| Token Text -- ^ @Token tok@ is token @tok@ already unquoted.
deriving (Show, Eq)
-- | Tokenize a string.
tokenize :: Text -> [Token]
tokenize t
| T.null t = []
| "--" `T.isPrefixOf` t = [] -- Comment until the end of the line.
| "#" `T.isPrefixOf` t = [] -- Also comment to the end of the line, needed for a CPP bug (#110)
| T.head t == '"' = quotes (T.tail t) id
| T.head t == '(' = parens 1 (T.tail t) id
| isSpace (T.head t) =
let (spaces, rest) = T.span isSpace t
in Spaces (T.length spaces) : tokenize rest
-- support mid-token quotes and parens
| Just (beforeEquals, afterEquals) <- findMidToken t
, not (T.any isSpace beforeEquals)
, Token next : rest <- tokenize afterEquals =
Token (T.concat [beforeEquals, "=", next]) : rest
| otherwise =
let (token, rest) = T.break isSpace t
in Token token : tokenize rest
where
findMidToken t' =
case T.break (== '=') t' of
(x, T.drop 1 -> y)
| "\"" `T.isPrefixOf` y || "(" `T.isPrefixOf` y -> Just (x, y)
_ -> Nothing
quotes t' front
| T.null t' = error $ T.unpack $ T.concat $
"Unterminated quoted string starting with " : front []
| T.head t' == '"' = Token (T.concat $ front []) : tokenize (T.tail t')
| T.head t' == '\\' && T.length t' > 1 =
quotes (T.drop 2 t') (front . (T.take 1 (T.drop 1 t'):))
| otherwise =
let (x, y) = T.break (`elem` ['\\','\"']) t'
in quotes y (front . (x:))
parens count t' front
| T.null t' = error $ T.unpack $ T.concat $
"Unterminated parens string starting with " : front []
| T.head t' == ')' =
if count == (1 :: Int)
then Token (T.concat $ front []) : tokenize (T.tail t')
else parens (count - 1) (T.tail t') (front . (")":))
| T.head t' == '(' =
parens (count + 1) (T.tail t') (front . ("(":))
| T.head t' == '\\' && T.length t' > 1 =
parens count (T.drop 2 t') (front . (T.take 1 (T.drop 1 t'):))
| otherwise =
let (x, y) = T.break (`elem` ['\\','(',')']) t'
in parens count y (front . (x:))
-- | A string of tokens is empty when it has only spaces. There
-- can't be two consecutive 'Spaces', so this takes /O(1)/ time.
empty :: [Token] -> Bool
empty [] = True
empty [Spaces _] = True
empty _ = False
-- | A line. We don't care about spaces in the middle of the
-- line. Also, we don't care about the ammount of indentation.
data Line = Line { lineIndent :: Int
, tokens :: [Text]
}
-- | Remove leading spaces and remove spaces in the middle of the
-- tokens.
removeSpaces :: [[Token]] -> [Line]
removeSpaces =
map toLine
where
toLine (Spaces i:rest) = toLine' i rest
toLine xs = toLine' 0 xs
toLine' i = Line i . mapMaybe fromToken
fromToken (Token t) = Just t
fromToken Spaces{} = Nothing
-- | Divide lines into blocks and make entity definitions.
parseLines :: PersistSettings -> [Line] -> [EntityDef]
parseLines ps lines =
fixForeignKeysAll $ toEnts lines
where
toEnts (Line indent (name:entattribs) : rest) =
let (x, y) = span ((> indent) . lineIndent) rest
in mkEntityDef ps name entattribs x : toEnts y
toEnts (Line _ []:rest) = toEnts rest
toEnts [] = []
fixForeignKeysAll :: [UnboundEntityDef] -> [EntityDef]
fixForeignKeysAll unEnts = map fixForeignKeys unEnts
where
ents = map unboundEntityDef unEnts
entLookup = M.fromList $ map (\e -> (entityHaskell e, e)) ents
fixForeignKeys :: UnboundEntityDef -> EntityDef
fixForeignKeys (UnboundEntityDef foreigns ent) =
ent { entityForeigns = map (fixForeignKey ent) foreigns }
-- check the count and the sqltypes match and update the foreignFields with the names of the primary columns
fixForeignKey :: EntityDef -> UnboundForeignDef -> ForeignDef
fixForeignKey ent (UnboundForeignDef foreignFieldTexts fdef) =
case M.lookup (foreignRefTableHaskell fdef) entLookup of
Just pent -> case entityPrimary pent of
Just pdef ->
if length foreignFieldTexts /= length (compositeFields pdef)
then lengthError pdef
else let fds_ffs = zipWith (toForeignFields pent)
foreignFieldTexts
(compositeFields pdef)
in fdef { foreignFields = map snd fds_ffs
, foreignNullable = setNull $ map fst fds_ffs
}
Nothing ->
error $ "no explicit primary key fdef="++show fdef++ " ent="++show ent
Nothing ->
error $ "could not find table " ++ show (foreignRefTableHaskell fdef)
++ " fdef=" ++ show fdef ++ " allnames="
++ show (map (unHaskellName . entityHaskell . unboundEntityDef) unEnts)
++ "\n\nents=" ++ show ents
where
setNull :: [FieldDef] -> Bool
setNull [] = error "setNull: impossible!"
setNull (fd:fds) = let nullSetting = isNull fd in
if all ((nullSetting ==) . isNull) fds then nullSetting
else error $ "foreign key columns must all be nullable or non-nullable"
++ show (map (unHaskellName . fieldHaskell) (fd:fds))
isNull = (NotNullable /=) . nullable . fieldAttrs
toForeignFields pent fieldText pfd =
case chktypes fd haskellField (entityFields pent) pfh of
Just err -> error err
Nothing -> (fd, ((haskellField, fieldDB fd), (pfh, pfdb)))
where
fd = getFd (entityFields ent) haskellField
haskellField = HaskellName fieldText
(pfh, pfdb) = (fieldHaskell pfd, fieldDB pfd)
chktypes :: FieldDef -> HaskellName -> [FieldDef] -> HaskellName -> Maybe String
chktypes ffld _fkey pflds pkey =
if fieldType ffld == fieldType pfld then Nothing
else Just $ "fieldType mismatch: " ++ show (fieldType ffld) ++ ", " ++ show (fieldType pfld)
where
pfld = getFd pflds pkey
entName = entityHaskell ent
getFd [] t = error $ "foreign key constraint for: " ++ show (unHaskellName entName)
++ " unknown column: " ++ show t
getFd (f:fs) t
| fieldHaskell f == t = f
| otherwise = getFd fs t
lengthError pdef = error $ "found " ++ show (length foreignFieldTexts) ++ " fkeys and " ++ show (length (compositeFields pdef)) ++ " pkeys: fdef=" ++ show fdef ++ " pdef=" ++ show pdef
data UnboundEntityDef = UnboundEntityDef
{ _unboundForeignDefs :: [UnboundForeignDef]
, unboundEntityDef :: EntityDef
}
lookupKeyVal :: Text -> [Text] -> Maybe Text
lookupKeyVal key = lookupPrefix $ key `mappend` "="
lookupPrefix :: Text -> [Text] -> Maybe Text
lookupPrefix prefix = msum . map (T.stripPrefix prefix)
-- | Construct an entity definition.
mkEntityDef :: PersistSettings
-> Text -- ^ name
-> [Attr] -- ^ entity attributes
-> [Line] -- ^ indented lines
-> UnboundEntityDef
mkEntityDef ps name entattribs lines =
UnboundEntityDef foreigns $
EntityDef
entName
(DBName $ getDbName ps name' entattribs)
-- idField is the user-specified Id
-- otherwise useAutoIdField
-- but, adjust it if the user specified a Primary
(setComposite primaryComposite $ fromMaybe autoIdField idField)
entattribs
cols
uniqs
[]
derives
extras
isSum
where
entName = HaskellName name'
(isSum, name') =
case T.uncons name of
Just ('+', x) -> (True, x)
_ -> (False, name)
(attribs, extras) = splitExtras lines
attribPrefix = flip lookupKeyVal entattribs
idName | Just _ <- attribPrefix "id" = error "id= is deprecated, ad a field named 'Id' and use sql="
| otherwise = Nothing
(idField, primaryComposite, uniqs, foreigns) = foldl' (\(mid, mp, us, fs) attr ->
let (i, p, u, f) = takeConstraint ps name' cols attr
squish xs m = xs `mappend` maybeToList m
in (just1 mid i, just1 mp p, squish us u, squish fs f)) (Nothing, Nothing, [],[]) attribs
derives = concat $ mapMaybe takeDerives attribs
cols :: [FieldDef]
cols = mapMaybe (takeColsEx ps) attribs
autoIdField = mkAutoIdField ps entName (DBName `fmap` idName) idSqlType
idSqlType = maybe SqlInt64 (const $ SqlOther "Primary Key") primaryComposite
setComposite Nothing fd = fd
setComposite (Just c) fd = fd { fieldReference = CompositeRef c }
just1 :: (Show x) => Maybe x -> Maybe x -> Maybe x
just1 (Just x) (Just y) = error $ "expected only one of: "
`mappend` show x `mappend` " " `mappend` show y
just1 x y = x `mplus` y
mkAutoIdField :: PersistSettings -> HaskellName -> Maybe DBName -> SqlType -> FieldDef
mkAutoIdField ps entName idName idSqlType = FieldDef
{ fieldHaskell = HaskellName "Id"
-- this should be modeled as a Maybe
-- but that sucks for non-ID field
-- TODO: use a sumtype FieldDef | IdFieldDef
, fieldDB = fromMaybe (DBName $ psIdName ps) idName
, fieldType = FTTypeCon Nothing $ keyConName $ unHaskellName entName
, fieldSqlType = idSqlType
-- the primary field is actually a reference to the entity
, fieldReference = ForeignRef entName defaultReferenceTypeCon
, fieldAttrs = []
, fieldStrict = True
}
defaultReferenceTypeCon :: FieldType
defaultReferenceTypeCon = FTTypeCon (Just "Data.Int") "Int64"
keyConName :: Text -> Text
keyConName entName = entName `mappend` "Id"
splitExtras :: [Line] -> ([[Text]], M.Map Text [[Text]])
splitExtras [] = ([], M.empty)
splitExtras (Line indent [name]:rest)
| not (T.null name) && isUpper (T.head name) =
let (children, rest') = span ((> indent) . lineIndent) rest
(x, y) = splitExtras rest'
in (x, M.insert name (map tokens children) y)
splitExtras (Line _ ts:rest) =
let (x, y) = splitExtras rest
in (ts:x, y)
takeColsEx :: PersistSettings -> [Text] -> Maybe FieldDef
takeColsEx = takeCols (\ft perr -> error $ "Invalid field type " ++ show ft ++ " " ++ perr)
takeCols :: (Text -> String -> Maybe FieldDef) -> PersistSettings -> [Text] -> Maybe FieldDef
takeCols _ _ ("deriving":_) = Nothing
takeCols onErr ps (n':typ:rest)
| not (T.null n) && isLower (T.head n) =
case parseFieldType typ of
Left err -> onErr typ err
Right ft -> Just FieldDef
{ fieldHaskell = HaskellName n
, fieldDB = DBName $ getDbName ps n rest
, fieldType = ft
, fieldSqlType = SqlOther $ "SqlType unset for " `mappend` n
, fieldAttrs = rest
, fieldStrict = fromMaybe (psStrictFields ps) mstrict
, fieldReference = NoReference
}
where
(mstrict, n)
| Just x <- T.stripPrefix "!" n' = (Just True, x)
| Just x <- T.stripPrefix "~" n' = (Just False, x)
| otherwise = (Nothing, n')
takeCols _ _ _ = Nothing
getDbName :: PersistSettings -> Text -> [Text] -> Text
getDbName ps n [] = psToDBName ps n
getDbName ps n (a:as) = fromMaybe (getDbName ps n as) $ T.stripPrefix "sql=" a
takeConstraint :: PersistSettings
-> Text
-> [FieldDef]
-> [Text]
-> (Maybe FieldDef, Maybe CompositeDef, Maybe UniqueDef, Maybe UnboundForeignDef)
takeConstraint ps tableName defs (n:rest) | not (T.null n) && isUpper (T.head n) = takeConstraint'
where
takeConstraint'
| n == "Unique" = (Nothing, Nothing, Just $ takeUniq ps tableName defs rest, Nothing)
| n == "Foreign" = (Nothing, Nothing, Nothing, Just $ takeForeign ps tableName defs rest)
| n == "Primary" = (Nothing, Just $ takeComposite defs rest, Nothing, Nothing)
| n == "Id" = (Just $ takeId ps tableName (n:rest), Nothing, Nothing, Nothing)
| otherwise = (Nothing, Nothing, Just $ takeUniq ps "" defs (n:rest), Nothing) -- retain compatibility with original unique constraint
takeConstraint _ _ _ _ = (Nothing, Nothing, Nothing, Nothing)
-- TODO: this is hacky (the double takeCols, the setFieldDef stuff, and setIdName.
-- need to re-work takeCols function
takeId :: PersistSettings -> Text -> [Text] -> FieldDef
takeId ps tableName (n:rest) = fromMaybe (error "takeId: impossible!") $ setFieldDef $
takeCols (\_ _ -> addDefaultIdType) ps (field:rest `mappend` setIdName)
where
field = case T.uncons n of
Nothing -> error "takeId: empty field"
Just (f, ield) -> toLower f `T.cons` ield
addDefaultIdType = takeColsEx ps (field : keyCon : rest `mappend` setIdName)
setFieldDef = fmap (\fd ->
let refFieldType = if fieldType fd == FTTypeCon Nothing keyCon
then defaultReferenceTypeCon
else fieldType fd
in fd { fieldReference = ForeignRef (HaskellName tableName) $ refFieldType
})
keyCon = keyConName tableName
-- this will be ignored if there is already an existing sql=
-- TODO: I think there is a ! ignore syntax that would screw this up
setIdName = ["sql=" `mappend` psIdName ps]
takeId _ tableName _ = error $ "empty Id field for " `mappend` show tableName
takeComposite :: [FieldDef]
-> [Text]
-> CompositeDef
takeComposite fields pkcols
= CompositeDef
(map (getDef fields) pkcols)
attrs
where
(_, attrs) = break ("!" `T.isPrefixOf`) pkcols
getDef [] t = error $ "Unknown column in primary key constraint: " ++ show t
getDef (d:ds) t
| fieldHaskell d == HaskellName t =
if nullable (fieldAttrs d) /= NotNullable
then error $ "primary key column cannot be nullable: " ++ show t
else d
| otherwise = getDef ds t
-- Unique UppercaseConstraintName list of lowercasefields
takeUniq :: PersistSettings
-> Text
-> [FieldDef]
-> [Text]
-> UniqueDef
takeUniq ps tableName defs (n:rest)
| not (T.null n) && isUpper (T.head n)
= UniqueDef
(HaskellName n)
(DBName $ psToDBName ps (tableName `T.append` n))
(map (HaskellName &&& getDBName defs) fields)
attrs
where
(fields,attrs) = break ("!" `T.isPrefixOf`) rest
getDBName [] t = error $ "Unknown column in unique constraint: " ++ show t
getDBName (d:ds) t
| fieldHaskell d == HaskellName t = fieldDB d
| otherwise = getDBName ds t
takeUniq _ tableName _ xs = error $ "invalid unique constraint on table[" ++ show tableName ++ "] expecting an uppercase constraint name xs=" ++ show xs
data UnboundForeignDef = UnboundForeignDef
{ _unboundFields :: [Text] -- ^ fields in other entity
, _unboundForeignDef :: ForeignDef
}
takeForeign :: PersistSettings
-> Text
-> [FieldDef]
-> [Text]
-> UnboundForeignDef
takeForeign ps tableName _defs (refTableName:n:rest)
| not (T.null n) && isLower (T.head n)
= UnboundForeignDef fields $ ForeignDef
(HaskellName refTableName)
(DBName $ psToDBName ps refTableName)
(HaskellName n)
(DBName $ psToDBName ps (tableName `T.append` n))
[]
attrs
False
where
(fields,attrs) = break ("!" `T.isPrefixOf`) rest
takeForeign _ tableName _ xs = error $ "invalid foreign key constraint on table[" ++ show tableName ++ "] expecting a lower case constraint name xs=" ++ show xs
takeDerives :: [Text] -> Maybe [Text]
takeDerives ("deriving":rest) = Just rest
takeDerives _ = Nothing
nullable :: [Text] -> IsNullable
nullable s
| "Maybe" `elem` s = Nullable ByMaybeAttr
| "nullable" `elem` s = Nullable ByNullableAttr
| otherwise = NotNullable
| psibi/persistent | persistent/Database/Persist/Quasi.hs | mit | 20,226 | 0 | 20 | 6,166 | 6,362 | 3,285 | 3,077 | 401 | 13 |
module Sound.Tidal.Blofeld where
import Sound.Tidal.Stream (makeI, makeF)
import Sound.Tidal.MIDI.Control
keys :: ControllerShape
keys = ControllerShape {params = [
mCC "portamento" 5,
mCC "expression" 11,
CC "lfoshape" 15 (0, 5) 0 passThru, -- 0..5 - sine,triangle,square,saw,random,sample&hold
mCC "lforate" 16,
CC "lfosync" 17 (0, 1) 0 passThru, -- 0 off, 1 on
mCC "lfodelay" 18,
CC "octave" 27 (16, 112) 0 passThru, -- 16, 28, 40 .. 112 - 128' .. 1/2'
CC "semitone" 28 (52, 76) 0.5 passThru, -- 52 .. 76 - -12 - +12 semitones
mCC "detune" 29,
mCC "osc1fm" 30,
SysEx "osc1fmsrc" 6 (0, 11) 0 passThru,
CC "osc1shape" 31 (0, 5) 0 passThru, -- 0..5 - pulse, saw, tri, sine, alt 1, alt 2
mCC "osc1pw" 33,
mCC "osc1pwm" 34,
SysEx "osc1pwmsrc" 10 (0, 30) 0 passThru,
mCC "osc1vol" 52,
mCC "osc1pan" 53,
mCC "ringmod" 54,
mCC "ringpan" 55,
mCC "noise" 60,
mCC "noisepan" 61,
mCC "noisecol" 62,
mCC "kcutoff" 69,
mCC "attack" 101,
mCC "decay" 102,
mCC "sustain" 103,
mCC "release" 106
],
duration = ("dur", 0.05),
velocity = ("vel", 0.5),
latency = 0.1}
oscKeys = toOscShape keys
note = makeI oscKeys "note"
dur = makeF oscKeys "dur"
portamento = makeF oscKeys "portamento"
expression = makeF oscKeys "expression"
octave = makeF oscKeys "octave"
semitone = makeF oscKeys "semitone"
detune = makeF oscKeys "detune"
kcutoff = makeF oscKeys "kcutoff"
lforate = makeF oscKeys "lforate"
lfoshape = makeF oscKeys "lfoshape"
lfodelay = makeF oscKeys "lfodelay"
lfosync = makeF oscKeys "lfosyn"
attack = makeF oscKeys "attack"
decay = makeF oscKeys "decay"
sustain = makeF oscKeys "sustain"
release = makeF oscKeys "release"
osc1fm = makeF oscKeys "osc1fm"
osc1fmsrc = makeF oscKeys "osc1fmsrc"
osc1shape = makeF oscKeys "osc1shape"
osc1pw = makeF oscKeys "osc1pw"
osc1pwm = makeF oscKeys "osc1pwm"
osc1pwmsrc = makeF oscKeys "osc1pwmsrc"
osc1vol = makeF oscKeys "osc1vol"
osc1pan = makeF oscKeys "osc1pan"
ringmod = makeF oscKeys "ringmod"
ringpan = makeF oscKeys "ringpan"
noise = makeF oscKeys "noise"
noisepan = makeF oscKeys "noisepan"
noisecol = makeF oscKeys "noisecol"
| kindohm/tidal-midi | Sound/Tidal/MIDI/Blofeld.hs | gpl-3.0 | 2,958 | 0 | 9 | 1,221 | 679 | 361 | 318 | 65 | 1 |
module Main where
import HplProducts.Test hiding (main)
main :: IO ()
main = do return ()
| alessandroleite/hephaestus-pl | src/meta-hephaestus/HplDrivers/LoadTest.hs | lgpl-3.0 | 92 | 0 | 8 | 18 | 38 | 21 | 17 | 4 | 1 |
-- | Example of defining FFI functions.
--
-- The `ffi' method is currently incompatible with 'RebindableSyntax',
-- so these are defined in another module.
module FFIExample where
import Data.Text (Text)
import DOM
import FFI
onKeyUp :: Element -> Fay () -> Fay ()
onKeyUp = ffi "%1.onkeyup=%2"
setInnerHTML :: Element -> Text -> Fay ()
setInnerHTML = ffi "%1.innerHTML=%2"
| lubomir/dot-race | fay/FFIExample.hs | bsd-3-clause | 379 | 0 | 8 | 64 | 81 | 45 | 36 | 8 | 1 |
{-# LANGUAGE TypeFamilies, FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, RankNTypes, GADTs, ScopedTypeVariables, FunctionalDependencies, RecursiveDo, UndecidableInstances, GeneralizedNewtypeDeriving, StandaloneDeriving, EmptyDataDecls, NoMonomorphismRestriction, TypeOperators, DeriveDataTypeable, PackageImports, TemplateHaskell, LambdaCase, BangPatterns, ConstraintKinds #-}
module Reflex.Test.CrossImpl (test) where
import Prelude hiding (mapM, mapM_, sequence, sequence_, foldl, and)
import Reflex.Class
import Reflex.Host.Class
import Reflex.Dynamic
import qualified Reflex.Spider.Internal as S
import qualified Reflex.Pure as P
import Control.Monad.Ref
import Control.Monad.Identity hiding (mapM, mapM_, forM, forM_, sequence, sequence_)
import qualified Data.Set as Set
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Control.Arrow (second, (&&&))
import Data.Traversable
import Data.Foldable
import Control.Monad.State.Strict hiding (mapM, mapM_, forM, forM_, sequence, sequence_)
import Control.Monad.Writer hiding (mapM, mapM_, forM, forM_, sequence, sequence_)
import Data.Dependent.Map (DSum (..))
import System.Mem
import System.Exit
import System.IO.Unsafe
mapToPureBehavior :: Map Int a -> Behavior PureReflexDomain a
mapToPureBehavior m = P.Behavior $ \t -> case Map.lookupLE t m of
Nothing -> error $ "mapToPureBehavior: no value for time " <> show t
Just (_, v) -> v
mapToPureEvent :: Map Int a -> Event PureReflexDomain a
mapToPureEvent m = P.Event $ flip Map.lookup m
relevantTestingTimes :: (Map Int a, Map Int b) -> [Int]
relevantTestingTimes (b, e) = case Set.minView &&& Set.maxView $ Map.keysSet b `Set.union` Map.keysSet e of
(Just (t0, _), Just (t1, _)) -> [t0..t1+1] -- We need to go to b+1 to see the result of the final event
_ -> [] -- Doesn't actually make much sense
type PureReflexDomain = P.Pure Int
type TimeM = (->) Int
testPure :: (t ~ PureReflexDomain, m ~ TimeM) => ((Behavior t a, Event t b) -> m (Behavior t c, Event t d)) -> (Map Int a, Map Int b) -> (Map Int c, Map Int d)
testPure builder (b, e) =
let (P.Behavior b', P.Event e') = ($ 0) $ builder (mapToPureBehavior b, mapToPureEvent e)
relevantTimes = relevantTestingTimes (b, e)
e'' = Map.mapMaybe id $ Map.fromList $ map (id &&& e') relevantTimes
b'' = Map.fromList $ map (id &&& b') relevantTimes
in (b'', e'')
class MapMSignals a a' t t' | a -> t, a' -> t', a t' -> a', a' t -> a where
mapMSignals :: Monad m => (forall b. Behavior t b -> m (Behavior t' b)) -> (forall b. Event t b -> m (Event t' b)) -> a -> m a'
instance MapMSignals (Behavior t a) (Behavior t' a) t t' where
mapMSignals fb _ = fb
instance MapMSignals (Event t a) (Event t' a) t t' where
mapMSignals _ fe = fe
instance (MapMSignals a a' t t', MapMSignals b b' t t') => MapMSignals (a, b) (a', b') t t' where
mapMSignals fb fe (a, b) = liftM2 (,) (mapMSignals fb fe a) (mapMSignals fb fe b)
testSpider :: (forall m t. TestCaseConstraint t m => (Behavior t a, Event t b) -> m (Behavior t c, Event t d)) -> (Map Int a, Map Int b) -> (Map Int c, Map Int d)
testSpider builder (bMap, eMap) = unsafePerformIO $ S.runSpiderHost $ do
(re, reTrigger) <- newEventWithTriggerRef
(rb, rbTrigger) <- newEventWithTriggerRef
b <- runHostFrame $ hold (error "testSpider: No value for input behavior yet") rb
(b', e') <- runHostFrame $ builder (b, re)
e'Handle <- subscribeEvent e' --TODO: This should be unnecessary
let times = relevantTestingTimes (bMap, eMap)
liftIO performGC
outputs <- forM times $ \t -> do
forM_ (Map.lookup t bMap) $ \val -> mapM_ (\ rbt -> fireEvents [rbt :=> val]) =<< readRef rbTrigger
bOutput <- sample b'
eOutput <- liftM join $ forM (Map.lookup t eMap) $ \val -> do
mret <- readRef reTrigger
let firing = case mret of
Just ret -> [ret :=> val]
Nothing -> []
fireEventsAndRead firing $ sequence =<< readEvent e'Handle
liftIO performGC
return (t, (bOutput, eOutput))
return (Map.fromList $ map (second fst) outputs, Map.mapMaybe id $ Map.fromList $ map (second snd) outputs)
tracePerf :: Show a => a -> b -> b
tracePerf = flip const
testAgreement :: (Eq c, Eq d, Show c, Show d) => (forall m t. TestCaseConstraint t m => (Behavior t a, Event t b) -> m (Behavior t c, Event t d)) -> (Map Int a, Map Int b) -> IO Bool
testAgreement builder inputs = do
let identityResult = testPure builder inputs
tracePerf "---------" $ return ()
let spiderResult = testSpider builder inputs
tracePerf "---------" $ return ()
let resultsAgree = identityResult == spiderResult
if resultsAgree
then do putStrLn "Success:"
print identityResult
else do putStrLn "Failure:"
putStrLn $ "Pure result: " <> show identityResult
putStrLn $ "Spider result: " <> show spiderResult
return resultsAgree
type TestCaseConstraint t m = (Reflex t, MonadSample t m, MonadHold t m, MonadFix m, MonadFix (PushM t))
data TestCase = forall a b c d. (Eq c, Eq d, Show c, Show d) => TestCase (Map Int a, Map Int b) (forall m t. TestCaseConstraint t m => (Behavior t a, Event t b) -> m (Behavior t c, Event t d))
testCases :: [(String, TestCase)]
testCases =
[ (,) "hold" $ TestCase (Map.singleton 0 "asdf", Map.fromList [(1, "qwer"), (2, "lkj")]) $ \(_, e) -> do
b' <- hold "123" e
return (b', e)
, (,) "count" $ TestCase (Map.singleton 0 (), Map.fromList [(1, ()), (2, ()), (3, ())]) $ \(_, e) -> do
e' <- liftM updated $ count e
b' <- hold (0 :: Int) e'
return (b', e')
, (,) "onceE-1" $ TestCase (Map.singleton 0 "asdf", Map.fromList [(1, "qwer"), (2, "lkj")]) $ \(b, e) -> do
e' <- onceE $ leftmost [e, e]
return (b, e')
, (,) "switch-1" $ TestCase (Map.singleton 0 "asdf", Map.fromList [(1, "qwer"), (2, "lkj")]) $ \(b, e) -> do
let e' = fmap (const e) e
b' <- hold never e'
let e'' = switch b'
return (b, e'')
, (,) "switch-2" $ TestCase (Map.singleton 0 "asdf", Map.fromList [(1, "qwer"), (2, "lkj")]) $ \(b, e) -> do
let e' = flip pushAlways e $ const $ do
let ea = fmap (const "a") e
let eb = fmap (const "b") e
let eab = leftmost [ea, eb]
liftM switch $ hold eab never
e'' = coincidence e'
return (b, e'')
, (,) "switch-3" $ TestCase (Map.singleton 0 "asdf", Map.fromList [(1, "qwer"), (2, "lkj")]) $ \(b, e) -> do
let e' = flip pushAlways e $ const $ do
let ea = fmap (const "a") e
let eb = fmap (const "b") e
let eab = leftmost [ea, eb]
liftM switch $ hold eab (fmap (const e) e)
e'' = coincidence e'
return (b, e'')
, (,) "switch-4" $ TestCase (Map.singleton 0 "asdf", Map.fromList [(1, "qwer"), (2, "lkj")]) $ \(b, e) -> do
let e' = leftmost [e, e]
e'' <- liftM switch $ hold e' (fmap (const e) e)
return (b, e'')
, (,) "switchPromptly-1" $ TestCase (Map.singleton 0 (0 :: Int), Map.fromList [(1, "qwer"), (2, "lkj")]) $ \(b, e) -> do
let e' = fmap (const e) e
e'' <- switchPromptly never e'
return (b, e'')
, (,) "switchPromptly-2" $ TestCase (Map.singleton 0 (0 :: Int), Map.fromList [(1, "qwer"), (2, "lkj")]) $ \(b, e) -> do
let e' = fmap (const e) e
e'' <- switchPromptly never $ leftmost [e', e']
return (b, e'')
, (,) "switchPromptly-3" $ TestCase (Map.singleton 0 (0 :: Int), Map.fromList [(1, "qwer"), (2, "lkj")]) $ \(b, e) -> do
let e' = leftmost [e, e]
e'' <- switchPromptly never (fmap (const e) e')
return (b, e'')
, (,) "switchPromptly-4" $ TestCase (Map.singleton 0 (0 :: Int), Map.fromList [(1, "qwer"), (2, "lkj"), (3, "asdf")]) $ \(b, e) -> do
let e' = leftmost [e, e]
e'' <- switchPromptly never (fmap (const e') e)
return (b, e'')
, (,) "switch-5" $ TestCase (Map.singleton 0 (0 :: Int), Map.fromList [(1, "qwer"), (2, "lkj")]) $ \(b, e) -> do
let e' = leftmost [e, e]
e'' <- liftM switch $ hold never (fmap (const e') e)
return (b, e'')
, (,) "switchPromptly-5" $ TestCase (Map.singleton 0 (0 :: Int), Map.fromList [(1, "qwer"), (2, "lkj")]) $ \(b, e) -> do
let e' = flip push e $ \_ -> do
return . Just =<< onceE e
e'' <- switchPromptly never e'
return (b, e'')
, (,) "switchPromptly-6" $ TestCase (Map.singleton 0 (0 :: Int), Map.fromList [(1, "qwer"), (2, "lkj")]) $ \(b, e) -> do
let e' = flip pushAlways e $ \_ -> do
switchPromptly e never
e'' <- switchPromptly never e'
return (b, e'')
, (,) "coincidence-1" $ TestCase (Map.singleton 0 (0 :: Int), Map.fromList [(1, "qwer"), (2, "lkj")]) $ \(b, e) -> do
let e' = flip pushAlways e $ \_ -> return $ fmap id e
e'' = coincidence e'
return (b, e'')
, (,) "coincidence-2" $ TestCase (Map.singleton 0 (0 :: Int), Map.fromList [(1, "qwer"), (2, "lkj")]) $ \(b, e) -> do
let e' = flip pushAlways e $ \_ -> return $ leftmost [e, e]
e'' = coincidence e'
return (b, e'')
, (,) "coincidence-3" $ TestCase (Map.singleton 0 (0 :: Int), Map.fromList [(1, "qwer"), (2, "lkj")]) $ \(b, e) -> do
let e' = flip pushAlways e $ \_ -> return $ coincidence $ fmap (const e) e
e'' = coincidence e'
return (b, e'')
, (,) "coincidence-4" $ TestCase (Map.singleton 0 (0 :: Int), Map.fromList [(1, "qwer"), (2, "lkj"), (3, "asdf")]) $ \(b, e) -> do
let e' = flip pushAlways e $ \_ -> onceE e
e'' = coincidence e'
return (b, e'')
, (,) "coincidence-5" $ TestCase (Map.singleton 0 (0 :: Int), Map.fromList [(1, "qwer")]) $ \(b, e) -> do
let eChild = flip pushAlways e $ const $ do
let eNewValues = leftmost [e, e]
return $ coincidence $ fmap (const eNewValues) eNewValues
e' = coincidence eChild
return (b, e')
, (,) "coincidence-6" $ TestCase (Map.singleton 0 (0 :: Int), Map.fromList [(1, "qwer")]) $ \(b, e) -> do
let eChild = flip pushAlways e $ const $ do
let e' = coincidence $ fmap (const e) e
return $ leftmost [e', e']
e'' = coincidence eChild
return (b, e'')
, (,) "coincidence-7" $ TestCase (Map.singleton 0 (0 :: Int), Map.fromList [(1, "qwer"), (2, "lkj"), (3, "asdf")]) $ \(b, e) -> do
let e' = leftmost [e, e]
eCoincidences = coincidence $ fmap (const e') e
return (b, eCoincidences)
, (,) "holdWhileFiring" $ TestCase (Map.singleton 0 "zxc", Map.fromList [(1, "qwer"), (2, "lkj")]) $ \(b, e) -> do
eo <- onceE e
bb <- hold b $ pushAlways (const $ hold "asdf" eo) eo
let b' = pull $ sample =<< sample bb
return (b', e)
, (,) "joinDyn" $ TestCase (Map.singleton 0 (0 :: Int), Map.fromList [(1, "qwer"), (2, "lkj")]) $ \(b, e) -> do
bb <- hold "b" e
bd <- hold never . fmap (const e) =<< onceE e
eOuter <- liftM (pushAlways sample . fmap (const bb)) $ onceE e
let eInner = switch bd
e' = leftmost [eOuter, eInner]
return (b, e')
]
test :: IO ()
test = do
results <- forM testCases $ \(name, TestCase inputs builder) -> do
putStrLn $ "Test: " <> name
testAgreement builder inputs
exitWith $ if and results
then ExitSuccess
else ExitFailure 1
| k0001/reflex | test/Reflex/Test/CrossImpl.hs | bsd-3-clause | 11,406 | 0 | 25 | 2,840 | 5,250 | 2,754 | 2,496 | -1 | -1 |
Subsets and Splits