code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE FlexibleContexts #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
-- | A JSON API which describes itself.
module Descriptive.JSON
(-- * Consumers
parse
,object
,key
,keyMaybe
,array
,string
,integer
,double
,bool
,null
-- * Annotations
,label
-- * Description
,Doc(..)
)
where
import Descriptive
import Descriptive.Internal
import Control.Monad.State.Strict
import Data.Scientific
import Data.Function
import Data.Aeson hiding (Value(Object,Null,Array),object)
import Data.Aeson.Types (Value,parseMaybe)
import qualified Data.Aeson.Types as Aeson
import Data.Bifunctor
import Data.Data
import Data.Monoid
import Data.Text (Text)
import Data.Vector ((!))
import Data.Vector (Vector)
import qualified Data.Vector as V
import Prelude hiding (null)
-- | Description of parseable things.
data Doc a
= Integer !Text
| Double !Text
| Text !Text
| Boolean !Text
| Null !Text
| Object !Text
| Key !Text
| Array !Text
| Label !a
deriving (Eq,Show,Typeable,Data)
-- | Consume an object.
object :: Monad m
=> Text -- ^ Description of what the object is.
-> Consumer Object (Doc d) m a -- ^ An object consumer.
-> Consumer Value (Doc d) m a
object desc =
wrap (\d ->
do s <- get
runSubStateT (const mempty)
(const s)
(liftM (Wrap doc) d))
(\_ p ->
do v <- get
case fromJSON v of
Error{} ->
return (Continued (Unit doc))
Success (o :: Object) ->
do s <- get
runSubStateT
(const o)
(const s)
(do r <- p
case r of
Failed e ->
return (Continued (Wrap doc e))
Continued e ->
return (Continued (Wrap doc e))
Succeeded a ->
return (Succeeded a)))
where doc = Object desc
-- | Consume from object at the given key.
key :: Monad m
=> Text -- ^ The key to lookup.
-> Consumer Value (Doc d) m a -- ^ A value consumer of the object at the key.
-> Consumer Object (Doc d) m a
key k =
wrap (\d ->
do s <- get
runSubStateT toJSON
(const s)
(liftM (Wrap doc) d))
(\_ p ->
do s <- get
case parseMaybe (const (s .: k))
() of
Nothing ->
return (Continued (Unit doc))
Just (v :: Value) ->
do r <-
runSubStateT (const v)
(const s)
p
return (bimap (Wrap doc) id r))
where doc = Key k
-- | Optionally consume from object at the given key, only if it
-- exists.
keyMaybe :: Monad m
=> Text -- ^ The key to lookup.
-> Consumer Value (Doc d) m a -- ^ A value consumer of the object at the key.
-> Consumer Object (Doc d) m (Maybe a)
keyMaybe k =
wrap (\d ->
do s <- get
runSubStateT toJSON
(const s)
(liftM (Wrap doc) d))
(\_ p ->
do s <- get
case parseMaybe (const (s .: k))
() of
Nothing ->
return (Succeeded Nothing)
Just (v :: Value) ->
do r <-
runSubStateT (const v)
(const s)
p
return (bimap (Wrap doc) Just r))
where doc = Key k
-- | Consume an array.
array :: Monad m
=> Text -- ^ Description of this array.
-> Consumer Value (Doc d) m a -- ^ Consumer for each element in the array.
-> Consumer Value (Doc d) m (Vector a)
array desc =
wrap (\d -> liftM (Wrap doc) d)
(\_ p ->
do s <- get
case fromJSON s of
Error{} ->
return (Continued (Unit doc))
Success (o :: Vector Value) ->
fix (\loop i acc ->
if i < V.length o
then do r <-
runSubStateT (const (o ! i))
(const s)
p
case r of
Failed e ->
return (Continued (Wrap doc e))
Continued e ->
return (Continued (Wrap doc e))
Succeeded a ->
loop (i + 1)
(a : acc)
else return (Succeeded (V.fromList (reverse acc))))
0
[])
where doc = Array desc
-- | Consume a string.
string :: Monad m
=> Text -- ^ Description of what the string is for.
-> Consumer Value (Doc d) m Text
string doc =
consumer (return d)
(do s <- get
case fromJSON s of
Error{} -> return (Continued d)
Success a ->
return (Succeeded a))
where d = Unit (Text doc)
-- | Consume an integer.
integer :: Monad m
=> Text -- ^ Description of what the integer is for.
-> Consumer Value (Doc d) m Integer
integer doc =
consumer (return d)
(do s <- get
case s of
Number a
| Right i <- floatingOrInteger a ->
return (Succeeded i)
_ -> return (Continued d))
where d = Unit (Integer doc)
-- | Consume an double.
double :: Monad m
=> Text -- ^ Description of what the double is for.
-> Consumer Value (Doc d) m Double
double doc =
consumer (return d)
(do s <- get
case s of
Number a ->
return (Succeeded (toRealFloat a))
_ -> return (Continued d))
where d = Unit (Double doc)
-- | Parse a boolean.
bool :: Monad m
=> Text -- ^ Description of what the bool is for.
-> Consumer Value (Doc d) m Bool
bool doc =
consumer (return d)
(do s <- get
case fromJSON s of
Error{} -> return (Continued d)
Success a ->
return (Succeeded a))
where d = Unit (Boolean doc)
-- | Expect null.
null :: Monad m
=> Text -- ^ What the null is for.
-> Consumer Value (Doc d) m ()
null doc =
consumer (return d)
(do s <- get
case fromJSON s of
Success Aeson.Null ->
return (Succeeded ())
_ -> return (Continued d))
where d = Unit (Null doc)
-- | Wrap a consumer with a label e.g. a type tag.
label :: Monad m
=> d -- ^ Some label.
-> Consumer s (Doc d) m a -- ^ A value consumer.
-> Consumer s (Doc d) m a
label desc =
wrap (liftM (Wrap doc))
(\_ p ->
do r <- p
case r of
Failed e ->
return (Failed (Wrap doc e))
Continued e ->
return (Continued (Wrap doc e))
k -> return k)
where doc = Label desc
-- | Parse from a consumer.
parse :: Monad m
=> d -- ^ Description of what it expects.
-> (a -> StateT s m (Maybe b)) -- ^ Attempt to parse the value.
-> Consumer s d m a -- ^ Consumer to add validation to.
-> Consumer s d m b -- ^ A new validating consumer.
parse d' check =
wrap (liftM wrapper)
(\d p ->
do s <- get
r <- p
case r of
(Failed e) -> return (Failed e)
(Continued e) ->
return (Continued e)
(Succeeded a) ->
do r' <- check a
case r' of
Nothing ->
do doc <- withStateT (const s) d
return (Continued (wrapper doc))
Just a' -> return (Succeeded a'))
where wrapper = Wrap d'
|
chrisdone/descriptive
|
src/Descriptive/JSON.hs
|
bsd-3-clause
| 8,887 | 0 | 26 | 4,143 | 2,507 | 1,255 | 1,252 | 266 | 5 |
{-|
Module : MZinHaskell
Description : Integration of MiniZinc 2.0 in Haskell
License : BSD3
Maintainer : Klara Marntirosian <[email protected]>
Stability : experimental
This module provides IO functionality for running the Haskell representation of a
MiniZinc model and getting back the solutions in Haskell values.
-}
module Interfaces.MZinHaskell (
-- iTestModel,
iRunModel,
runModel,
Interfaces.MZPrinter.layout,
-- testModelWithData,
-- testModelWithParser,
writeData
) where
import Data.List
import Data.Char
import System.Process
import System.FilePath
import System.Directory
import Interfaces.MZAuxiliary
import Interfaces.MZASTBase (MZModel, Item(Comment))
import Interfaces.MZAST (GItem(..))
import Interfaces.MZPrinter
import Interfaces.FZSolutionParser (Solution, trySolutionsDefault, getAllSolutions)
import Text.Parsec.Error
import Text.Parsec.String (Parser)
{-
-- | Same as `testModel` but accepts one more argument for the data of the model.
testModelWithData
:: [GItem 'OK] -- ^ The model
-> [GItem 'OK] -- ^ The data to be used by the model
-> FilePath -- ^ Path of the file in which the FlatZinc translation will be printed (without ".fzn" extension)
-> Int -- ^ Chosen solver (@1@ for the G12/FD built-in solver or @2@ for choco3)
-> Int -- ^ Number of solutions to be returned
-> IO (Either ParseError [Solution])
testModelWithData model mdata path solver num =
let fdata = [Comment' "Model\'s data"] ++ mdata ++ [Comment' "End of model\'s data"]
in testModel (fdata ++ model) path solver num
-}
-- | Same as `testModel`, but interactive.
--
-- Interactively runs a constraint model and outputs its solution(s). The
-- function first prompts the user for the working directory: the FlatZinc file
-- will be created in that directory. Then, for a name for the constraint model:
-- the created FlatZinc file will be named after this. Also asks the user to
-- choose between supported solvers and the desired number of solutions. Returns
-- either a parse error or a list of solutions of the constraint model. The length
-- of the list is at most equal to the number of solutions requested.
iRunModel :: [GItem a] -> IO (Either ParseError [Solution])
iRunModel m = do
putStrLn "Enter working directory:"
dirpath <- getLine
putStr "Enter model\'s name: "
name <- getLine
putStr "Choose a solver from the list below:\r\n\t1. G12/FD\r\n\t2. choco3\r\n\r\nEnter the number associated with the solver: "
str_solver <- getLine
putStr $ if (str_solver /= "2")
then "Number of solutions to be returned: "
else "Return all solutions? Y/N: "
str_ns <- getLine;
let solver = read str_solver
ns = if (solver == 2)
then if (read ("\"" ++ str_ns ++ "\"") == "Y")
then 0
else 1
else read str_ns
path = joinPath [dirpath, name]
runModel m path solver ns
-- | Runs a model and parses its solution(s). Use this function if the model contains no
-- @output@ item, so that the solutions have the default format.
runModel :: [GItem a] -- ^ The model
-> FilePath -- ^ The path of the file in which the FlatZinc translation will be printed (without ".fzn" extension)
-> Int -- ^ The chosen solver (@1@ for the G12/FD built-in solver or @2@ for choco3)
-> Int -- ^ The number of solutions to be returned
-> IO (Either ParseError [Solution])
runModel = testModelWithParser trySolutionsDefault
-- | Runs a model and parses its solution(s) with the use of the specified parser. Use
-- this function if the model outputs its solutions in a different format than the
-- default.
testModelWithParser :: Parser [Solution] -- ^ The parser with which solutions will be
-- parsed
-> [GItem a] -- ^ The model
-> FilePath -- ^ The path of the file in which the FlatZinc
-- translation will be printed (without ".fzn"
-- extension)
-> Int -- ^ The chosen solver (@1@ for the G12/FD
-- built-in solver or @2@ for choco3)
-> Int -- ^ The number of solutions to be returned
-> IO (Either ParseError [Solution])
testModelWithParser p m mpath s n = do
-- Get configuration and set filepaths
configuration <- parseConfig
let mz_dir = addTrailingPathSeparator $ case minizinc configuration of
"" -> "."
str -> str
let mzn_fp = spaceFix $ mpath ++ ".mzn"
let fzn_fp = spaceFix $ mpath ++ ".fzn"
let res_fp = spaceFix $ mpath ++ ".res"
-- Write mzn file
writeFile mzn_fp (layout m)
let mzn2fzn = proc (mz_dir ++ "mzn2fzn") ["-O-"
,"-o", fzn_fp
, mzn_fp]
(ec1, out1, err1) <- readCreateProcessWithExitCode mzn2fzn ""
res <- case err1 of
"" -> case s of
-- G12/FD solver
1 -> do
let fz_options = ["-b", "fd"]
++ case (n > 0) of
True -> ["-n", show n]
_ -> []
++ [fzn_fp]
let flatzinc = proc (mz_dir ++ "flatzinc") fz_options
(ec2, out2, err2) <- readCreateProcessWithExitCode flatzinc ""
return $ case err2 of
"" -> out2
_ -> "flatzinc error: " ++ err2 ++ "."
-- Choco solver
2 -> let antlr = antlr_path configuration
chocoParser = chocoparser configuration
chocoSolver = chocosolver configuration
all_or_first = if (n == 0) then "-a " else ""
in readCreateProcess (shell $ "java -cp ." ++ (intercalate [searchPathSeparator] [chocoSolver, chocoParser, antlr]) ++ " org.chocosolver.parser.flatzinc.ChocoFZN " ++ all_or_first ++ mpath ++ ".fzn") ""
_ -> readIO ("mzn2fzn error: " ++ err1 ++ ".")
writeFile res_fp res
-- Comment lines below for debugging
removeFile res_fp
removeFile mzn_fp
removeFile fzn_fp
return $ getAllSolutions p res
-- | Writes the model's data file. The 'MZModel' of the argument must contain
-- only 'Interfaces.MZASTBase.Assign' items.
writeData :: MZModel -> IO ()
writeData m = do
putStrLn "Enter datafile's filepath:"
datapath <- getLine
writeFile datapath (Prelude.show $ printModel m)
|
GRACeFUL-project/haskelzinc
|
src/Interfaces/MZinHaskell.hs
|
bsd-3-clause
| 6,814 | 0 | 25 | 2,125 | 1,012 | 536 | 476 | 91 | 7 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
-- |
-- Module : Data.Array.Nikola.Util.Generic
-- Copyright : (c) Geoffrey Mainland 2012
-- License : BSD-style
--
-- Maintainer : Geoffrey Mainland <[email protected]>
-- Stability : experimental
-- Portability : non-portable
module Data.Array.Nikola.Util.Generic (
Traversal,
TraversableFamily(..),
Fold,
foldFam
) where
import Control.Applicative (Applicative)
import Data.Functor.Constant
import Data.Monoid
type Traversal fam f = forall a. fam a -> a -> f a
class TraversableFamily fam where
traverseFam :: Applicative f => Traversal fam f -> Traversal fam f
type Fold fam m = forall a. fam a -> a -> m
foldFam :: (TraversableFamily fam, Monoid m) => Fold fam m -> Fold fam m
foldFam child w a = getConstant $ traverseFam (\v b -> Constant $ child v b) w a
|
mainland/nikola
|
src/Data/Array/Nikola/Util/Generic.hs
|
bsd-3-clause
| 1,125 | 0 | 10 | 203 | 227 | 132 | 95 | 23 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
module InteractivePrompt where
import System.Console.CmdArgs
import System.Directory
import System.FilePath
import System.Exit
import Control.Monad
import Control.Applicative
import GetURLs
import GPTracklists
import Data.Aeson
import qualified Data.ByteString.Lazy as B
import Control.Concurrent.Async
-- Fetches a multiple GP show tracklists from URLs in a file and write to file in JSON format
-- The urls in the file are assumed to have the format of 200 mixes per page from mixesdb.com
-- e.g. http://www.mixesdb.com/db/index.php?title=Category%3AGilles+Peterson&pagefrom=1980
createGPShowFromCategoryURL :: String -> FilePath -> IO ()
createGPShowFromCategoryURL url_ outputFilePath_ = do
urls <- readCategoryURL url_
let airDates = map getAirDate urls
tracklists <- mapConcurrently getTracklistFromURL urls
let gpShows = createGPShows airDates tracklists
let outputResult = encode gpShows
B.writeFile outputFilePath_ outputResult
data MyOptions = MyOptions
{
url :: Maybe String,
outputFilePath :: Maybe FilePath
}
deriving (Data, Typeable, Show, Eq)
myProgOpts :: MyOptions
myProgOpts = MyOptions
{
url = Nothing &= typ "URL" &= help "link to category url"
, outputFilePath = Nothing &= typ "file path" &= help "path to output json file"
}
getOpts :: IO MyOptions
getOpts = cmdArgs $ myProgOpts
&= summary _PROGRAM_INFO
&= help _PROGRAM_ABOUT
&= helpArg [explicit, name "help", name "h"]
&= program _PROGRAM_NAME
_PROGRAM_NAME :: String
_PROGRAM_NAME = "Gilles Peterson Web Crawler"
_PROGRAM_VERSION :: String
_PROGRAM_VERSION = "0.1"
_PROGRAM_INFO :: String
_PROGRAM_INFO = _PROGRAM_NAME ++ " version " ++ _PROGRAM_VERSION
_PROGRAM_ABOUT :: String
_PROGRAM_ABOUT = "Scrapes tracklists of the yearly category urls of Gilles Peterson shows from mixesdb.com, e.g http://bit.ly/1wgd2MX and outputs the result to a JSON file"
optionHandler :: MyOptions -> IO()
optionHandler (MyOptions Nothing _) = print "URL required"
optionHandler (MyOptions _ Nothing) = print "Filepath required"
optionHandler (MyOptions (Just url_) (Just outputFilePath_)) = do
let dir = takeDirectory outputFilePath_
let filePathGood = all ($ outputFilePath_) [isAbsolute, isValid, hasExtension]
fileInputGood <- pure (&&) <*> pure filePathGood <*> doesDirectoryExist dir
unless fileInputGood $ putStrLn ("the path for the outputfile " ++ outputFilePath_ ++ ": is invalid") >> exitWith (ExitFailure 1)
let urlGood = not (null url_)
unless urlGood $ putStrLn "empty url" >> exitWith (ExitFailure 1)
createGPShowFromCategoryURL url_ outputFilePath_
|
shaurya0/GPWebCrawler
|
src/InteractivePrompt.hs
|
bsd-3-clause
| 2,693 | 0 | 12 | 474 | 605 | 307 | 298 | 54 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module Templates.Containers where
import Text.InterpolatedString.Perl6 (q)
import Types
containerIndexTemplate :: Template
containerIndexTemplate = Template "index.js" template
where template = [q|import COMPONENTContainer from './COMPONENTContainer';
export default COMPONENTContainer;
|]
containerTemplate :: Template
containerTemplate = Template "COMPONENTContainer.js" [q|// @flow
/*
NOTE: This file was auto-generated for a component
named "COMPONENT"; it is intended to be modified as
needed to be useful.
*/
import {connect} from 'react-redux';
import COMPONENT from './COMPONENT';
const mapStateToProps = (state: Object) => {
return {
};
};
const mapDispatchToProps = (dispatch) => {
return {
fn: () => {
dispatch();
},
}
};
const COMPONENTContainer = connect(
mapStateToProps,
mapDispatchToProps,
)(COMPONENT);
export default COMPONENTContainer;
|]
|
tpoulsen/generate-component
|
src/Templates/Containers.hs
|
bsd-3-clause
| 995 | 0 | 6 | 186 | 67 | 43 | 24 | 10 | 1 |
{-# LANGUAGE ExistentialQuantification
, ScopedTypeVariables #-}
module Rad.QL.Define.Field where
import Control.Monad.Trans.Class
import qualified Data.ByteString as B
import Data.ByteString.Builder
import Data.Maybe (fromJust, fromMaybe)
import Data.Monoid ((<>))
import qualified Data.Trie as Trie
import Rad.QL.Internal.Builders
import Rad.QL.Internal.Types
import Rad.QL.AST
import Rad.QL.Define.Util
import Rad.QL.Query
import Rad.QL.Types
class HasFields d m a where
fieldSingleton :: GraphQLFieldDef m a -> d m a ()
field :: forall d m a b. (HasFields d m a, GraphQLValue m b)
=> Name -> FieldDefM a m b -> d m a ()
field n fdef = fieldSingleton fdef'
where fdef' = GraphQLFieldDef
{ fieldDef = def
, fieldResolver = res
}
def = FieldDef n (fdDesc fdef)
(fdArgs fdef)
(graphQLValueTypeRef (undefined :: m b))
(graphQLValueTypeDef (undefined :: m b))
(fdDepr fdef)
res :: QArgs -> a -> QSelectionSet -> Result m
res args = resultM . fdFunc fdef args
resultM :: (Monad m, GraphQLValue m a) => FieldResult m a -> QSelectionSet -> Result m
resultM (Intercept x y) _ = SubResult (x, y)
resultM (Result x ) s = graphQLValueResolve s x
resultM (Cont c ) s = SubResultM (c >>= withErrs (graphQLValueResolve s))
where withErrs fn (Left err) = return err
withErrs fn (Right x) = case fn x of
SubResult a -> return a
SubResultM m -> m
-- special type name resolver
resolveTypeName :: (Monad m) => Name -> FieldRunner m a
resolveTypeName tn _ _ _ = pure $ buildString tn
-- filler token
resolve :: ()
resolve = ()
infixl 1 $->, $->>
($->) :: (Applicative m) => () -> (a -> b) -> FieldDefM a m b
_ $-> f = f <$> self
($->>) :: (Functor m) => () -> (a -> m b) -> FieldDefM a m b
_ $->> f = fieldDefM $ \_ x -> Cont $ Right <$> f x
-- this should be a monad transformer...
-- Field definition monad
data FieldResult m a = Intercept Builder [ B.ByteString ]
| Result a
| Cont (m (Either (Builder, [B.ByteString]) a))
interceptErr :: B.ByteString -> FieldResult m a
interceptErr err = Intercept buildNull [err]
instance (Functor m) => Functor (FieldResult m) where
fmap f (Intercept x y) = Intercept x y
fmap f (Result x) = Result (f x)
fmap f (Cont c) = Cont $ fmap f <$> c
instance (Applicative m) => Applicative (FieldResult m) where
pure = Result
-- intercepts cancel
(Intercept x y) <*> _ = Intercept x y
_ <*> (Intercept x y) = Intercept x y
-- field results are immediately resolved
(Result f) <*> x = f <$> x
f <*> (Result x) = ($ x) <$> f
-- both sides blocked, cry yourself to sleep
(Cont f) <*> (Cont x) = Cont $ applyConts <$> f <*> x
where applyConts (Left k) _ = Left k
applyConts _ (Left k) = Left k
applyConts (Right f) (Right x) = Right (f x)
instance (Monad m) => Monad (FieldResult m) where
(Intercept x y) >>= _ = Intercept x y
(Result x ) >>= k = k x
(Cont x ) >>= k = Cont $ ((fmap k <$> x) >>= unwrapCont)
where unwrapCont (Left err ) = return $ Left err
unwrapCont (Right (Intercept x y)) = return $ Left (x, y)
unwrapCont (Right (Result x )) = return $ Right x
unwrapCont (Right (Cont m )) = m
fieldErr :: B.ByteString -> FieldDefM a m b
fieldErr err = fieldDefM $ \_ _ -> interceptErr err
data FieldDefM a m b = FieldDefM
{ fdDesc :: Description
, fdDepr :: Description
, fdArgs :: ArgumentsDef
, fdFunc :: QArgs -> a -> FieldResult m b
}
infixl 5 <.>
(<.>) :: (Applicative m)
=> (a -> b -> FieldResult m (c -> d))
-> (a -> b -> FieldResult m c)
-> (a -> b -> FieldResult m d)
f1 <.> f2 = \x y -> f1 x y <*> f2 x y
instance (Functor m) => Functor (FieldDefM a m) where
fmap f x = x { fdFunc = \y -> fmap f . fdFunc x y }
fieldDefM :: (QArgs -> a -> FieldResult m b) -> FieldDefM a m b
fieldDefM f = FieldDefM
{ fdDesc = ""
, fdDepr = ""
, fdArgs = []
, fdFunc = f
}
instance (Applicative m) => Applicative (FieldDefM a m) where
f <*> x = x
{ fdDesc = fdDesc f <> fdDesc x
, fdDepr = fdDepr f <> fdDepr x
, fdArgs = fdArgs f <> fdArgs x
, fdFunc = fdFunc f <.> fdFunc x
}
pure x = fieldDefM $ \_ _ -> pure x
instance (Monad m) => Monad (FieldDefM a m) where
m >>= k = m { fdFunc = f' }
where f = fdFunc m
f' x y = (k <$> f x y) >>= \k' -> fdFunc k' x y
m >> k = k { fdDesc = fdDesc m <> fdDesc k
, fdDepr = fdDepr m <> fdDepr k
, fdArgs = fdArgs m <> fdArgs k
}
instance MonadTrans (FieldDefM a) where
lift m = fieldDefM $ \_ _ -> Cont $ Right <$> m
instance (Monad m) => Describable (FieldDefM a m) where
describe d = (pure ()) { fdDesc = d }
instance (Monad m) => Deprecatable (FieldDefM a m) where
deprecate d = (pure ()) { fdDepr = d }
data Validation = OK | ERR B.ByteString deriving (Eq, Show)
assert :: (Applicative m) => B.ByteString -> Bool -> FieldDefM a m ()
assert message cond | cond = pure ()
| otherwise = fieldErr message
validate :: (Applicative m) => b -> (b -> Validation) -> FieldDefM a m ()
validate x f | ERR e <- f x = fieldErr e
| otherwise = pure ()
-- | ARGUMENTS
-- TODO: refactor this into its own thing so we can do mutations
-- arg builders
type Arg m a b = FieldDefM a m b
getArg :: (GraphQLScalar b) => Name -> QArgs -> Maybe b
getArg n = deserialize -- deserialize result
. fromMaybe QEmpty -- create empty entry for nullable case
. qArgsLookup n -- try to get the value
self :: (Applicative m) => FieldDefM a m a
self = fieldDefM $ \_ -> pure . id
arg :: forall m a b. (GraphQLValue m b, GraphQLScalar b) => Name -> FieldDefM a m b
arg n = getter { fdArgs = [def] }
where getter = fieldDefM $ \args _ ->
maybe (interceptErr "Invalid argument")
pure
$ getArg n args
def = InputValueDef n "" t td Nothing
td = graphQLValueTypeDef (undefined :: m b)
t = graphQLValueTypeRef (undefined :: m b)
-- argument combinators
infixl 8 |=
(|=) :: (GraphQLValue m b, GraphQLScalar b)
=> FieldDefM a m (Maybe b) -> b -> FieldDefM a m b
a |= v = fromMaybe v <$> a
infixl 7 @>
(@>) :: (GraphQLValue m b, GraphQLScalar b)
=> FieldDefM a m b -> B.ByteString -> FieldDefM a m b
a @> d = a { fdArgs = [def $ head (fdArgs a)] }
where def (InputValueDef n desc t td def) = InputValueDef n (desc <> d) t td def
|
jqyu/bustle-chi
|
src/Rad/QL/Define/Field.hs
|
bsd-3-clause
| 6,813 | 0 | 13 | 2,081 | 2,726 | 1,417 | 1,309 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
module Main (main) where
import Control.Monad (void, forever)
import Criterion.Main
import Series.Types
import Series.Combinators
import Series.Prelude
import qualified Series.FreeT.Prelude as F
import qualified Series.Prelude.Direct as N
import qualified Series.Producer.Prelude as Pr
import qualified Series.List.Prelude as L
import qualified Control.Monad.Trans.Free as F
import qualified Remorse.FreeT.Prelude as E
import qualified Remorse.FreeT as E
import Prelude hiding (map, filter, drop, take, sum
, iterate, repeat, replicate, splitAt
, takeWhile, enumFrom, enumFromTo
, mapM, scanr, span)
import qualified Prelude as P
import Data.Functor.Identity
import Pipes hiding (yield)
import qualified Pipes
import qualified Pipes.Prelude as PP
import qualified Data.Vector.Unboxed as V
value :: Int
value = 100
big :: Int
big = 10000000
-- -------------------
-- long composition
-- -------------------
long_fused :: Int -> Int
long_fused n = runIdentity $ sum (
(take n
(drop 100
(map (\x -> 3*x + 1)
(filter even
((iterate (\x -> x+1) (10 :: Int) ) :: Series (Of Int) Identity ())
)))))
{-# INLINE long_fused #-}
long_fused_free :: Int -> Int
long_fused_free n = runIdentity $ F.sum (
(F.take n
(F.drop 100
(F.map (\x -> 3*x + 1)
(F.filter even
((F.iterate (\x -> x+1) (10 :: Int) ) :: F.FreeT (Of Int) Identity ())
)))))
{-# INLINE long_fused_free #-}
long_fused_pipes :: Int -> Int
long_fused_pipes n = runIdentity $ Pr.sum (
(Pr.take n
(Pr.drop 100
(Pr.map (\x -> 3*x + 1)
(Pr.filter even
((Pr.iterate (\x -> x+1) (10 :: Int) ) :: Producer Int Identity ())
)))))
{-# INLINE long_fused_pipes #-}
--
long_fused_list :: Int -> Int
long_fused_list n = L.sum (
(L.take n
(L.drop 100
(L.map (\x -> 3*x + 1)
(L.filter even
((L.iterate (\x -> x+1) (10 :: Int) ) :: [Int])
)))))
{-# INLINE long_fused_list #-}
long_naive :: Int -> Int
long_naive n = runIdentity $ N.sum (
(N.take n
(N.drop 100
(N.map (\x -> 3*x + 1)
(N.filter even
((N.iterate (\x -> x+1) (10 :: Int) ) )
)))))
{-# INLINE long_naive #-}
long_list :: Int -> Int
long_list n = P.sum (
(P.take n
(P.drop 100
(P.map (\x -> 3*x + 1)
(P.filter even
((P.iterate (\x -> x+1) (10 :: Int) ) )
)))))
{-# INLINE long_list #-}
long_vector :: Int -> Int
long_vector n = V.sum (
(V.take n
(V.drop 100
(V.map (\x -> 3*x + 1)
(V.filter even
((V.iterateN (n*2+300) (\x -> x+1) (10 :: Int) ) )
)))))
{-# INLINE long_vector #-}
pipe_naive :: Int -> Int
pipe_naive n = runIdentity $
PP.sum $ each (P.iterate (\x -> x+1) (10 :: Int) )
>-> PP.filter even
>-> PP.map (\x -> 3*x + 1)
>-> PP.drop 100
>-> PP.take n
{-# INLINE pipe_naive #-}
long_fused_remorse :: Int -> Int
long_fused_remorse n = runIdentity $ E.sum_ (
(E.take n
(E.drop 100
(E.map (\x -> 3*x + 1)
(E.filter even
((E.iterate (\x -> x+1) (10 :: Int) ) :: E.FreeT (E.Of Int) Identity ())
)))))
{-# INLINE long_fused_remorse #-}
-- -------------------
-- longish compositions
-- -------------------
longish_naive :: Int -> Int
longish_naive n = runIdentity $ N.sum (
(N.take n
(N.drop 100
(N.map (\x -> 3*x + 1)
((N.iterate (\x -> x+1) (10 :: Int) ) )
))))
{-# INLINE longish_naive #-}
longish_list :: Int -> Int
longish_list n = P.sum (
(P.take n
(P.drop 100
(P.map (\x -> 3*x + 1)
((P.iterate (\x -> x+1) (10 :: Int) ) )
))))
{-# INLINE longish_list #-}
longish_vector :: Int -> Int
longish_vector n = V.sum (
(V.take n
(V.drop 100
(V.map (\x -> 3*x + 1)
((V.iterateN (n*2+300) (\x -> x+1) (10 :: Int) ) )
))))
{-# INLINE longish_vector #-}
longish_pipe :: Int -> Int
longish_pipe n = runIdentity $
PP.sum $ each (P.iterate (\x -> x+1) (10 :: Int) )
>-> PP.map (\x -> 3*x + 1)
>-> PP.drop 100
>-> PP.take n
{-# INLINE longish_pipe #-}
longish_remorse :: Int -> Int
longish_remorse n = runIdentity $ E.sum_ (
(E.take n
(E.drop 100
(E.map (\x -> 3*x + 1)
((E.iterate (\x -> x+1) (10 :: Int) ) :: E.FreeT (E.Of Int) Identity ())
))))
{-# INLINE longish_remorse #-}
--
-- -------------------
-- shortish compositions
-- -------------------
shortish_naive :: Int -> Int
shortish_naive n = runIdentity $ N.sum (
(N.take n
(N.map (\x -> 3*x + 1)
((N.iterate (\x -> x+1) (10 :: Int) ) )
)))
{-# INLINE shortish_naive #-}
shortish_list :: Int -> Int
shortish_list n = P.sum (
(P.take n
(P.map (\x -> 3*x + 1)
((P.iterate (\x -> x+1) (10 :: Int) ) )
)))
{-# INLINE shortish_list #-}
shortish_vector :: Int -> Int
shortish_vector n = V.sum (
(V.take n
(V.map (\x -> 3*x + 1)
((V.iterateN (n*2+300) (\x -> x+1) (10 :: Int) ) )
)))
{-# INLINE shortish_vector #-}
shortish_pipe :: Int -> Int
shortish_pipe n = runIdentity $
PP.sum $ each (P.iterate (\x -> x+1) (10 :: Int) )
>-> PP.map (\x -> 3*x + 1)
>-> PP.take n
{-# INLINE shortish_pipe #-}
shortish_remorse :: Int -> Int
shortish_remorse n = runIdentity $ E.sum_ (
(E.take n
(E.map (\x -> 3*x + 1)
((E.iterate (\x -> x+1) (10 :: Int) ) :: E.FreeT (E.Of Int) Identity ())
)))
{-# INLINE shortish_remorse #-}
-- -------------------
-- shorter composition
-- -------------------
short_naive :: Int -> Int
short_naive = \n -> runIdentity $ N.sum (N.take n (N.iterate (\x -> x+1) (10 :: Int) :: Series (Of Int) Identity ()))
{-# INLINE short_naive #-}
short_free :: Int -> Int
short_free = \n -> runIdentity $ F.sum (F.take n (F.iterate (\x -> x+1) (10 :: Int) :: F.FreeT (Of Int) Identity ()))
{-# INLINE short_free #-}
short_fused :: Int -> Int
short_fused = \n -> runIdentity $ sum (take n (iterate (\x -> x+1) (10 :: Int) :: Series (Of Int) Identity ()))
{-# INLINE short_fused #-}
short_producer :: Int -> Int
short_producer = \n -> runIdentity $ Pr.sum (Pr.take n (Pr.iterate (\x -> x+1) (10 :: Int) :: Producer Int Identity ()))
{-# INLINE short_producer #-}
short_fused_list :: Int -> Int
short_fused_list = \n -> L.sum (L.take n
( L.iterate (\x -> x+1) (10 :: Int) ))
{-# INLINE short_fused_list #-}
short_list :: Int -> Int
short_list = \n -> P.sum (P.take n (P.iterate (\x -> x+1) (10 :: Int)))
{-# INLINE short_list #-}
short_vector :: Int -> Int
short_vector = \n -> V.sum (V.take n (V.iterateN (n*2) (\x -> x+1) (10 :: Int)))
{-# INLINE short_vector #-}
short_producer_naive :: Int -> Int
short_producer_naive = \n -> runIdentity $
PP.sum (each (P.iterate (\x -> x+1) (10 :: Int) )
>-> PP.take n
)
{-# INLINE short_producer_naive #-}
short_remorse :: Int -> Int
short_remorse = \n -> runIdentity $
E.sum_ (E.take n (E.iterate (\x -> x+1) (10 :: Int) :: E.FreeT (E.Of Int) Identity ()))
{-# INLINE short_remorse #-}
-- -------------------
-- simple sum
-- -------------------
rN :: Int -> Int
rN n = runIdentity (N.sum (N.replicate n 1))
{-# INLINE rN #-}
rF :: Int -> Int
rF n = runIdentity (sum (replicate n 1))
{-# INLINE rF #-}
rFr :: Int -> Int
rFr n = runIdentity (F.sum (F.replicate n 1))
{-# INLINE rFr #-}
rPr :: Int -> Int
rPr n = runIdentity (Pr.sum (Pr.replicate n 1))
{-# INLINE rPr #-}
rPrN :: Int -> Int
rPrN n = runIdentity (PP.sum (each (P.replicate n 1)))
{-# INLINE rPrN #-}
rL :: Int -> Int
rL n = P.sum (P.replicate n 1)
{-# INLINE rL #-}
rl :: Int -> Int
rl n = L.sum (L.replicate n 1)
{-# INLINE rl #-}
rV :: Int -> Int
rV n = V.sum (V.replicate n 1)
{-# INLINE rV #-}
rFu :: Int -> Int
rFu n = runIdentity (E.sum_ (E.replicate n 1))
{-# INLINE rFu #-}
-- -----
-- enum
-- -----
enumFromStep_ :: Int -> Int -> Int -> Producer Int Identity ()
enumFromStep_ n1 s n2 = loop n1 n2
where
loop !step 0 = return ()
loop step n = do
Pipes.yield $! step
loop (step + s) $! (n - 1)
{-# INLINABLE enumFromStep_ #-}
z :: Int
z = 0
enum_naive n = runIdentity (N.sum (N.map (+7) (N.enumFromStepN z 10 (n*3))))
{-# INLINE enum_naive #-}
enum_free n = runIdentity (F.sum (F.map (+7) (F.enumFromStepN z 10 (n*3))))
{-# INLINE enum_free #-}
enum_pipe n = runIdentity (Pr.sum (Pr.map (+7) (Pr.enumFromStepN z 10 (n*3))))
{-# INLINE enum_pipe #-}
enum_pipe_naive n = runIdentity (PP.sum (enumFromStep_ z 10 (n*30) >-> PP.map (+7)))
{-# INLINE enum_pipe_naive #-}
enum_fused n = runIdentity (sum (map (+7) (enumFromStepN z 10 (n*3))))
{-# INLINE enum_fused #-}
enum_vector n = V.sum (V.map (+7) (V.enumFromStepN z 10 (n*3)))
{-# INLINE enum_vector #-}
enum_list n = P.sum (P.map (+7) (P.take (n*3) [z, 10 ..]))
{-# INLINE enum_list #-}
enum_list_fused n = L.sum (L.map (+7) (L.take (n*3) (L.enumFromStepN z 10 (n*3))))
{-# INLINE enum_list_fused #-}
--
enum_naive_dot = runIdentity . N.sum . N.map (+7) . N.enumFromStepN z 10 . (*3)
{-# INLINE enum_naive_dot #-}
enum_free_dot = runIdentity . F.sum . F.map (+7) . F.enumFromStepN z 10 . (*3)
{-# INLINE enum_free_dot #-}
enum_pipe_dot = runIdentity . Pr.sum . Pr.map (+7) . Pr.enumFromStepN z 10 . (*3)
{-# INLINE enum_pipe_dot #-}
enum_fused_dot = runIdentity . sum . map (+7) . enumFromStepN z 10 . (*3)
{-# INLINE enum_fused_dot #-}
enum_vector_dot = V.sum . V.map (+7) . V.enumFromStepN z 10 . (*3)
{-# INLINE enum_vector_dot #-}
enum_list_dot = P.sum . P.map (+7) . (\n -> [z, 10 .. n*30])
{-# INLINE enum_list_dot #-}
enum_list_fused_dot = L.sum . L.map (+7) . L.enumFromStepN z 10 . (*3)
{-# INLINE enum_list_fused_dot #-}
goo :: Int -> Int
goo = runIdentity . sum . take 1000 . forever . yield
{-# INLINE goo #-}
gooo :: Int -> Int
gooo = runIdentity . N.sum . N.take 1000 . forever . (\a -> Construct (a :> Done ()))
{-# INLINE gooo #-}
goooo :: Int -> Int
goooo = runIdentity . sum . replicate 1000
{-# INLINE goooo #-}
main :: IO ()
main =
defaultMain
[
-- bgroup "sum.map.enumFromTo"
-- [ bench "vector" $ whnf enum_vector value
-- , bench "series" $ whnf enum_naive value
-- -- , bench "pipes" $ whnf enum_pipe_naive value
-- , bench "list" $ whnf enum_list value
-- , bench "FOLDING/series" $ whnf enum_fused value
-- , bench "FOLDING/freet" $ whnf enum_free value
-- , bench "FOLDING/pipes" $ whnf enum_pipe value
-- , bench "FOLDING/list" $ whnf enum_list_fused value
-- ]
-- , bgroup "sum.map.enumFromTo.pointfree"
-- [ bench "vector" $ whnf enum_vector_dot value
-- , bench "series" $ whnf enum_naive_dot value
-- , bench "list" $ whnf enum_list_dot value
-- , bench "FOLDING/series" $ whnf enum_fused_dot value
-- , bench "FOLDING/freet" $ whnf enum_fused_dot value
-- , bench "FOLDING/pipes" $ whnf enum_pipe_dot value
-- , bench "FOLDING/list" $ whnf enum_list_fused_dot value
-- ]
-- ,
bgroup "sum.replicate"
[ bench "vector" $ whnf rV value
, bench "series" $ whnf rN value
, bench "pipes" $ whnf rPrN value
, bench "list" $ whnf rL value
-- , bench "remorse" $ whnf rFu value
, bench "FOLDING/series" $ whnf rF value
, bench "FOLDING/freet" $ whnf rFr value
, bench "FOLDING/list" $ whnf rl value
-- , bench "FOLDING/pipes" $ whnf rPr value
]
, bgroup "sum.take.iterate"
[ bench "vector" $ whnf short_vector value
, bench "series" $ whnf short_naive value
-- , bench "pipes" $ whnf short_producer_naive value
, bench "list" $ whnf short_list value
-- , bench "remorse" $ whnf short_remorse value
, bench "FOLDING/series" $ whnf short_fused value
, bench "FOLDING/freet" $ whnf short_free value
, bench "FOLDING/list" $ whnf short_fused_list value
-- , bench "FOLDING/pipes" $ whnf short_producer value
]
, bgroup "sum.take.map.iterate"
[bench "vector" $ whnf shortish_vector value
, bench "series" $ whnf shortish_naive value
-- , bench "remorse" $ whnf shortish_remorse value
, bench "list" $ whnf shortish_list value
, bench "FOLDING/series" $ whnf short_fused value
, bench "FOLDING/list" $ whnf short_fused_list value
-- , bench "FOLDING/freet" $ whnf short_fused_free value
-- , bench "FOLDING/pipes" $ whnf short_fused_pipes value
]
, bgroup "sum.take.map.drop.iterate"
[bench "vector" $ whnf longish_vector value
, bench "series" $ whnf longish_naive value
-- , bench "remorse" $ whnf longish_remorse value
, bench "list" $ whnf longish_list value
, bench "FOLDING/series" $ whnf long_fused value
, bench "FOLDING/list" $ whnf long_fused_list value
, bench "FOLDING/freet" $ whnf long_fused_free value
-- , bench "FOLDING/pipes" $ whnf long_fused_pipes value
]
, bgroup "sum.take.map.drop.filter.iterate"
[bench "vector" $ whnf long_vector value
, bench "series" $ whnf long_naive value
-- , bench "remorse" $ whnf long_fused_remorse value
, bench "list" $ whnf long_list value
, bench "FOLDING/series" $ whnf long_fused value
-- , bench "FOLDING/list" $ whnf long_fused_list value
, bench "FOLDING/freet" $ whnf long_fused_free value
-- , bench "FOLDING/pipes" $ whnf long_fused_pipes value
]
]
|
michaelt/series
|
benchmarks/benchmarks.hs
|
bsd-3-clause
| 14,211 | 0 | 21 | 4,052 | 4,920 | 2,665 | 2,255 | 308 | 2 |
module Main (main) where
import qualified Data.ByteString.Lazy as BL
import Data.Binary.Get
import Data.Binary.Put
import Codec.Tracker.S3M
main :: IO ()
main = BL.putStrLn . runPut <$> putModule <$> runGet getModule =<< BL.getContents
|
riottracker/modfile
|
examples/putS3M.hs
|
bsd-3-clause
| 278 | 0 | 9 | 73 | 74 | 44 | 30 | 7 | 1 |
import Data.Matrix
import qualified Data.Vector as V
intGrid :: [Char] -> [[Int]]
intGrid = fmap (fmap read . words) . lines
toMatrix :: String -> Matrix Int
toMatrix = fromLists . intGrid
size = 4 :: Int
subs :: Matrix a -> [Matrix a]
subs m = do
i <- [1..nrows m - size + 1]
j <- [1..ncols m - size + 1]
return $ submatrix i (i+size-1) j (j+size-1) m
nozero :: Matrix Int -> Bool
nozero = any (==0) . toList
products :: Matrix Int -> [Int]
products m = fmap product $ r : d : cs ++ rs
where
rs = fmap (\i -> getRow i m) [1..nrows m]
cs = fmap (\i -> getCol i m) [1..ncols m]
d = getDiag m
r = V.fromList $ fmap (\i -> getElem i (n - i + 1) m) [1..n]
n = min (nrows m) (ncols m)
main = do
s <- readFile "./20-20grid.txt"
let m = toMatrix $ s
print . maximum . fmap (maximum . products) . subs $ m
|
mskmysht/ProjectEuler
|
src/Problem11.hs
|
bsd-3-clause
| 848 | 0 | 14 | 226 | 463 | 238 | 225 | 25 | 1 |
{-# LANGUAGE ForeignFunctionInterface #-}
module Main where
import System.Event (EventManager)
import GHC.Conc.Sync (sharedCAF)
import Foreign.Ptr (Ptr)
import Data.IORef (IORef, newIORef, readIORef)
import System.IO.Unsafe (unsafePerformIO)
import Control.Monad
import Control.Concurrent
import System.Timeout.Resetable.ADT
--------------------------------------------------------------------------------
main :: IO ()
main = do
Just mgr <- readIORef eventManager
k <- register mgr 500000 $ putStrLn "Timeout!"
pause k
replicateM_ 10 $ do
threadDelay 100000
reset k
-- cancel k
threadDelay 1000000
--------------------------------------------------------------------------------
{-# NOINLINE eventManager #-}
eventManager :: IORef (Maybe EventManager)
eventManager = unsafePerformIO $ do
em <- newIORef Nothing
sharedCAF em getOrSetSystemEventThreadEventManagerStore
foreign import ccall unsafe "getOrSetSystemEventThreadEventManagerStore"
getOrSetSystemEventThreadEventManagerStore :: Ptr a -> IO (Ptr a)
-- The End ---------------------------------------------------------------------
|
basvandijk/resettable-timeouts
|
test.hs
|
bsd-3-clause
| 1,162 | 0 | 10 | 179 | 237 | 123 | 114 | 26 | 1 |
module Channel where
import qualified Data.HashMap.Lazy as HM
import qualified User as U
import Control.Concurrent.MVar
import Control.Concurrent
import Utility
data Channel = Channel {
name :: String,
topic :: MVar String,
users :: MVar [MVar U.User]
}
-- sendToChannel takes the channel, a user to not send the message to, and the message itself and sends to all users
-- but the given one
sendToChannel :: Channel -> MVar U.User -> String -> IO ()
sendToChannel chan origin msg = do
users <- readMVar (users chan)
let dumpRes _ = return ()
_ <- forkFinally (sendToChannelAsync users origin msg) dumpRes
return ()
sendToChannelAsync :: [MVar U.User] -> MVar U.User -> String -> IO ()
sendToChannelAsync users origin msg = mapM_ sendToUser users
where
sendToUser use = if use == origin then return ()
else do
targetUser <- takeMVar use
sendSafe (U.handle targetUser) msg
createChannel :: String -> String -> IO Channel
createChannel nam top = do
newTopic <- newMVar top
userList <- newMVar []
return $ Channel nam newTopic userList
changeTopic :: Channel -> String -> IO ()
changeTopic chan newTopic = do
oldTopic <- takeMVar $ topic chan
putMVar (topic chan) newTopic
addUserToChannel :: Channel -> MVar U.User -> IO ()
addUserToChannel chan userVar = do
userList <- takeMVar $ users chan
putMVar (users chan) (userVar : userList)
removeUserFromChannel :: Channel -> MVar U.User -> IO ()
removeUserFromChannel chan userVar = do
userList <- takeMVar $ users chan
let newUserList = filter (/=userVar) userList
putMVar (users chan) newUserList
|
allonsy/chirp
|
src/Channel.hs
|
bsd-3-clause
| 1,614 | 0 | 13 | 320 | 548 | 268 | 280 | 40 | 2 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE UndecidableInstances #-}
module Data.CSV.Conduit
(
-- * Main Interface
decodeCSV
, readCSVFile
, writeCSVFile
, transformCSV
, transformCSV'
, mapCSVFile
, writeHeaders
-- Types
, CSV (..)
, CSVSettings (..)
, defCSVSettings
, MapRow
, Row
-- * Re-exported For Convenience
, runResourceT
) where
-------------------------------------------------------------------------------
import Control.Exception
import Control.Monad.Morph
import Control.Monad.Primitive
import Control.Monad.ST
import Control.Monad.Trans
import Control.Monad.Trans.Resource (MonadResource, MonadThrow,
runExceptionT,
runResourceT)
import Data.Attoparsec.Types (Parser)
import qualified Data.ByteString as B
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as B8
import Data.ByteString.Internal (c2w)
import Data.Conduit
import Data.Conduit.Attoparsec
import Data.Conduit.Binary (sinkFile, sinkIOHandle,
sourceFile)
import qualified Data.Conduit.List as C
import qualified Data.Map as M
import Data.String
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Vector as V
import qualified Data.Vector.Generic as GV
import qualified Data.Vector.Generic.Mutable as GMV
import System.IO
-------------------------------------------------------------------------------
import Data.CSV.Conduit.Conversion (FromNamedRecord (..),
Named (..),
ToNamedRecord (..),
runParser)
import qualified Data.CSV.Conduit.Parser.ByteString as BSP
import qualified Data.CSV.Conduit.Parser.Text as TP
import Data.CSV.Conduit.Types
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-- | Represents types 'r' that are CSV-like and can be converted
-- to/from an underlying stream of type 's'. There is nothing scary
-- about the type:
--
-- @s@ represents stream types that can be converted to\/from CSV rows.
-- Examples are 'ByteString', 'Text' and 'String'.
--
-- @r@ represents the target CSV row representations that this library
-- can work with. Examples are the 'Row' types, the 'Record' type and
-- the 'MapRow' family of types. We can also convert directly to
-- complex Haskell types using the 'Data.CSV.Conduit.Conversion'
-- module that was borrowed from the cassava package, which was itself
-- inspired by the aeson package.
--
--
-- Example #1: Basics Using Convenience API
--
-- >import Data.Conduit
-- >import Data.Conduit.Binary
-- >import Data.Conduit.List as CL
-- >import Data.CSV.Conduit
-- >
-- >myProcessor :: Conduit (Row Text) m (Row Text)
-- >myProcessor = CL.map reverse
-- >
-- >test = runResourceT $
-- > transformCSV defCSVSettings
-- > (sourceFile "input.csv")
-- > myProcessor
-- > (sinkFile "output.csv")
--
--
-- Example #2: Basics Using Conduit API
--
-- >import Data.Conduit
-- >import Data.Conduit.Binary
-- >import Data.CSV.Conduit
-- >
-- >myProcessor :: Conduit (MapRow Text) m (MapRow Text)
-- >myProcessor = undefined
-- >
-- >test = runResourceT $
-- > sourceFile "test/BigFile.csv" $=
-- > intoCSV defCSVSettings $=
-- > myProcessor $=
-- > (writeHeaders defCSVSettings >> fromCSV defCSVSettings) $$
-- > sinkFile "test/BigFileOut.csv"
class CSV s r where
-----------------------------------------------------------------------------
-- | Convert a CSV row into strict ByteString equivalent.
rowToStr :: CSVSettings -> r -> s
-----------------------------------------------------------------------------
-- | Turn a stream of 's' into a stream of CSV row type. An example
-- would be parsing a ByteString stream as rows of 'MapRow' 'Text'.
intoCSV :: (MonadThrow m) => CSVSettings -> Conduit s m r
-----------------------------------------------------------------------------
-- | Turn a stream of CSV row type back into a stream of 's'. An
-- example would be rendering a stream of 'Row' 'ByteString' rows as
-- 'Text'.
fromCSV :: Monad m => CSVSettings -> Conduit r m s
------------------------------------------------------------------------------
-- | 'Row' instance using 'ByteString'
instance CSV ByteString (Row ByteString) where
rowToStr s !r =
let
sep = B.pack [c2w (csvSep s)]
wrapField !f = case csvQuoteChar s of
Just !x -> (x `B8.cons` escape x f) `B8.snoc` x
_ -> f
escape c str = B8.intercalate (B8.pack [c,c]) $ B8.split c str
in B.intercalate sep . map wrapField $ r
intoCSV set = intoCSVRow (BSP.row set)
fromCSV set = fromCSVRow set
------------------------------------------------------------------------------
-- | 'Row' instance using 'Text'
instance CSV Text (Row Text) where
rowToStr s !r =
let
sep = T.pack [csvSep s]
wrapField !f = case csvQuoteChar s of
Just !x -> x `T.cons` escape x f `T.snoc` x
_ -> f
escape c str = T.intercalate (T.pack [c,c]) $ T.split (== c) str
in T.intercalate sep . map wrapField $ r
intoCSV set = intoCSVRow (TP.row set)
fromCSV set = fromCSVRow set
-------------------------------------------------------------------------------
-- | 'Row' instance using 'Text' based on 'ByteString' stream
instance CSV ByteString (Row Text) where
rowToStr s r = T.encodeUtf8 $ rowToStr s r
intoCSV set = intoCSV set =$= C.map (map T.decodeUtf8)
fromCSV set = fromCSV set =$= C.map T.encodeUtf8
-------------------------------------------------------------------------------
-- | 'Row' instance using 'String' based on 'ByteString' stream.
-- Please note this uses the ByteString operations underneath and has
-- lots of unnecessary overhead. Included for convenience.
instance CSV ByteString (Row String) where
rowToStr s r = rowToStr s $ map B8.pack r
intoCSV set = intoCSV set =$= C.map (map B8.unpack)
fromCSV set = C.map (map B8.pack) =$= fromCSV set
-- | Support for parsing rows in the 'Vector' form.
instance (CSV s (Row s)) => CSV s (V.Vector s) where
rowToStr s r = rowToStr s . V.toList $ r
intoCSV set = intoCSV set =$= C.map (V.fromList)
fromCSV set = C.map (V.toList) =$= fromCSV set
-------------------------------------------------------------------------------
fromCSVRow :: (Monad m, IsString s, CSV s r)
=> CSVSettings -> Conduit r m s
fromCSVRow set = awaitForever $ \row -> mapM_ yield [rowToStr set row, "\n"]
-------------------------------------------------------------------------------
intoCSVRow :: (MonadThrow m, AttoparsecInput i) => Parser i (Maybe o) -> Conduit i m o
intoCSVRow p = parse =$= puller
where
parse = {-# SCC "conduitParser_p" #-} conduitParser p
puller = {-# SCC "puller" #-}
awaitForever $ \ (_, mrow) -> maybe (return ()) yield mrow
-------------------------------------------------------------------------------
-- | Generic 'MapRow' instance; any stream type with a 'Row' instance
-- automatically gets a 'MapRow' instance.
instance (CSV s (Row s'), Ord s', IsString s) => CSV s (MapRow s') where
rowToStr s r = rowToStr s . M.elems $ r
intoCSV set = intoCSVMap set
fromCSV set = fromCSVMap set
-------------------------------------------------------------------------------
intoCSVMap :: (Ord a, MonadThrow m, CSV s [a])
=> CSVSettings -> Conduit s m (MapRow a)
intoCSVMap set = intoCSV set =$= (headers >>= converter)
where
headers = do
mrow <- await
case mrow of
Nothing -> return []
Just [] -> headers
Just hs -> return hs
converter hs = awaitForever $ yield . toMapCSV hs
toMapCSV !hs !fs = M.fromList $ zip hs fs
-- | Conversion of stream directly to/from a custom complex haskell
-- type.
instance (FromNamedRecord a, ToNamedRecord a, CSV s (MapRow ByteString)) =>
CSV s (Named a) where
rowToStr s a = rowToStr s . toNamedRecord . getNamed $ a
intoCSV set = intoCSV set =$= C.mapMaybe go
where
go x = either (const Nothing) (Just . Named) $
runParser (parseNamedRecord x)
fromCSV set = C.map go =$= fromCSV set
where
go = toNamedRecord . getNamed
-------------------------------------------------------------------------------
fromCSVMap :: (Monad m, IsString s, CSV s [a])
=> CSVSettings -> Conduit (M.Map k a) m s
fromCSVMap set = awaitForever push
where
push r = mapM_ yield [rowToStr set (M.elems r), "\n"]
-------------------------------------------------------------------------------
-- | Write headers AND the row into the output stream, once. If you
-- don't call this while using 'MapRow' family of row types, then your
-- resulting output will NOT have any headers in it.
--
-- Usage: Just chain this using the 'Monad' instance in your pipeline:
--
-- > ... =$= writeHeaders settings >> fromCSV settings $$ sinkFile "..."
writeHeaders
:: (Monad m, CSV s (Row r), IsString s)
=> CSVSettings
-> Conduit (MapRow r) m s
writeHeaders set = do
mrow <- await
case mrow of
Nothing -> return ()
Just row -> mapM_ yield [ rowToStr set (M.keys row)
, "\n"
, rowToStr set (M.elems row)
, "\n" ]
---------------------------
-- Convenience Functions --
---------------------------
-------------------------------------------------------------------------------
-- | Read the entire contents of a CSV file into memory.
-- readCSVFile
-- :: (GV.Vector v a, CSV ByteString a)
-- => CSVSettings
-- -- ^ Settings to use in deciphering stream
-- -> FilePath
-- -- ^ Input file
-- -> IO (v a)
readCSVFile :: (MonadIO m, CSV ByteString a) => CSVSettings -> FilePath -> m (V.Vector a)
readCSVFile set fp = liftIO . runResourceT $ sourceFile fp $= intoCSV set $$ hoist lift (sinkVector 10)
-------------------------------------------------------------------------------
-- | A simple way to decode a CSV string. Don't be alarmed by the
-- polymorphic nature of the signature. 's' is the type for the string
-- and 'v' is a kind of 'Vector' here.
--
-- For example for 'ByteString':
--
-- >>> s <- LB.readFile "my.csv"
-- >>> decodeCSV 'def' s :: Vector (Vector ByteString)
--
-- will just work.
decodeCSV
:: (GV.Vector v a, CSV s a)
=> CSVSettings
-> s
-> Either SomeException (v a)
decodeCSV set bs = runST $ runExceptionT $ C.sourceList [bs] $= intoCSV set $$ hoist lift (sinkVector 10)
-------------------------------------------------------------------------------
-- | Write CSV data into file. As we use a 'ByteString' sink, you'll
-- need to get your data into a 'ByteString' stream type.
writeCSVFile
:: (CSV ByteString a)
=> CSVSettings
-- ^ CSV Settings
-> FilePath
-- ^ Target file
-> IOMode
-- ^ Write vs. append mode
-> [a]
-- ^ List of rows
-> IO ()
writeCSVFile set fo fmode rows = runResourceT $ do
C.sourceList rows $= fromCSV set $$
sinkIOHandle (openFile fo fmode)
-------------------------------------------------------------------------------
-- | Map over the rows of a CSV file. Provided for convenience for
-- historical reasons.
--
-- An easy way to run this function would be 'runResourceT' after
-- feeding it all the arguments.
mapCSVFile
:: (MonadResource m, MonadThrow m, CSV ByteString a, CSV ByteString b)
=> CSVSettings
-- ^ Settings to use both for both input and output
-> (a -> [b])
-- ^ A mapping function
-> FilePath
-- ^ Input file
-> FilePath
-- ^ Output file
-> m ()
mapCSVFile set f fi fo =
transformCSV set (sourceFile fi) (C.concatMap f) (sinkFile fo)
-------------------------------------------------------------------------------
-- | Like transformCSV' but uses the same settings for both input and
-- output.
transformCSV
:: (MonadThrow m, CSV s a, CSV s' b)
=> CSVSettings
-- ^ Settings to be used for both input and output
-> Source m s
-- ^ A raw stream data source. Ex: 'sourceFile inFile'
-> Conduit a m b
-- ^ A transforming conduit
-> Sink s' m ()
-- ^ A raw stream data sink. Ex: 'sinkFile outFile'
-> m ()
transformCSV set = transformCSV' set set
-------------------------------------------------------------------------------
-- | General purpose CSV transformer. Apply a list-like processing
-- function from 'Data.Conduit.List' to the rows of a CSV stream. You
-- need to provide a stream data source, a transformer and a stream
-- data sink.
--
-- An easy way to run this function would be 'runResourceT' after
-- feeding it all the arguments.
--
-- Example - map a function over the rows of a CSV file:
--
-- > transformCSV setIn setOut (sourceFile inFile) (C.map f) (sinkFile outFile)
transformCSV'
:: (MonadThrow m, CSV s a, CSV s' b)
=> CSVSettings
-- ^ Settings to be used for input
-> CSVSettings
-- ^ Settings to be used for output
-> Source m s
-- ^ A raw stream data source. Ex: 'sourceFile inFile'
-> Conduit a m b
-- ^ A transforming conduit
-> Sink s' m ()
-- ^ A raw stream data sink. Ex: 'sinkFile outFile'
-> m ()
transformCSV' setIn setOut source c sink =
source $=
intoCSV setIn $=
c $=
fromCSV setOut $$
sink
------------------
-- Vector Utils --
------------------
-------------------------------------------------------------------------------
-- | An efficient sink that incrementally grows a vector from the input stream
sinkVector :: (PrimMonad m, GV.Vector v a) => Int -> ConduitM a o m (v a)
sinkVector by = do
v <- lift $ GMV.new by
go 0 v
where
-- i is the index of the next element to be written by go
-- also exactly the number of elements in v so far
go i v = do
res <- await
case res of
Nothing -> do
v' <- lift $ GV.freeze $ GMV.slice 0 i v
return $! v'
Just x -> do
v' <- case GMV.length v == i of
True -> lift $ GMV.grow v by
False -> return v
lift $ GMV.write v' i x
go (i+1) v'
|
mohsen3/csv-conduit
|
src/Data/CSV/Conduit.hs
|
bsd-3-clause
| 15,361 | 0 | 20 | 3,977 | 2,874 | 1,557 | 1,317 | 207 | 3 |
{-# LANGUAGE PackageImports #-}
-- http://graphics.cs.brown.edu/games/quake/quake3.html#RenderPatch
module Q3Patch where
import Control.Monad
import "linear" Linear
import Data.Vector (Vector,(!))
import qualified Data.Vector as V
import qualified Data.Vector.Mutable as MV
import BSP
tessellate :: Vector DrawVertex -> Int -> (Vector DrawVertex,Vector Int)
tessellate controls level = (v,stripsI)
where
plus (DrawVertex p1 d1 l1 n1 c1) (DrawVertex p2 d2 l2 n2 c2) = DrawVertex (p1 + p2) (d1 + d2) (l1 + l2) (n1 + n2) (c1 + c2)
mult (DrawVertex p d l n c) f = DrawVertex (p ^* f) (d ^* f) (l ^* f) (n ^* f) (c ^* f)
mix a c0 c1 c2 = let b = 1 - a in (c0 `mult` (b * b)) `plus` (c1 `mult` (2 * b * a)) `plus` (c2 `mult` (a * a))
l1 = level + 1
v = V.create $ do
vertex <- MV.new (l1*l1)
forM_ [0..level] $ \i -> let a = fromIntegral i / fromIntegral level in MV.write vertex i $ mix a (controls ! 0) (controls ! 3) (controls ! 6)
forM_ [1..level] $ \i -> do
let a = fromIntegral i / fromIntegral level
c0 = mix a (controls ! 0) (controls ! 1) (controls ! 2)
c1 = mix a (controls ! 3) (controls ! 4) (controls ! 5)
c2 = mix a (controls ! 6) (controls ! 7) (controls ! 8)
forM_ [0..level] $ \j -> let a' = fromIntegral j / fromIntegral level in MV.write vertex (i * l1 + j) $ mix a' c0 c1 c2
return vertex
-- merge triangle strips using degenerate triangles
idx row col2 | col2 `mod` 2 == 1 = (row + 1) * l1 + col2 `div` 2
| otherwise = row * l1 + col2 `div` 2
strips = [V.generate (l1*2) (idx row) | row <- [0..level-1]]
separate (a:b:c:xs) = a:b:c:separate (b:c:xs)
separate [] = []
trisI = V.concat [V.fromList $ separate $ V.toList s | s <- strips]
stripsI = V.concat [V.concat [h,s,l] | s <- strips -- concatenated triangle strips using degenerated triangles
, let h = V.singleton $ V.head s -- degenerate triangles will be shown in line polygon mode
, let l = V.singleton $ V.last s
]
{-
tess c l = [f u v | v <- [0..l], u <- [0..l]]
mo = Mat3 (Vec3 1 (-2) 1)
(Vec3 (-2) 2 0)
(Vec3 1 0 0)
------------------------
m = Mat3 (Vec3 1 0 0)
(Vec3 (-2) 2 0)
(Vec3 1 (-2) 1)
m' = transpose m
cm c f = Mat3 (Vec3 (a 0) (a 1) (a 2))
(Vec3 (a 3) (a 4) (a 5))
(Vec3 (a 6) (a 7) (a 8))
where a n = f $ c ! n
fn u v c f = Vec3 1 u u^2 *. m .*. cm c f .*. m' .* Vec3 1 v v^2
p u v c = Vec3 (fn u v c _1) (fn u v c _2) (fn u v c _3)
tess c l = [p u v c | u <- [0..l], v <- [0..l]]
-}
|
csabahruska/gpipe-quake3
|
Q3Patch.hs
|
bsd-3-clause
| 2,718 | 0 | 22 | 859 | 979 | 519 | 460 | 33 | 2 |
module W3C.TurtleTest where
import Test.Tasty
import qualified Test.Tasty.HUnit as TU
import Data.Maybe (fromJust)
import qualified Data.Text as T
import W3C.Manifest
import W3C.W3CAssertions
import Data.RDF.Types
import Data.RDF.Query
import Text.RDF.RDF4H.TurtleParser
import Text.RDF.RDF4H.NTriplesParser
import Data.RDF.Graph.TriplesList
tests :: Manifest -> TestTree
tests = runManifestTests mfEntryToTest
mfEntryToTest :: TestEntry -> TestTree
mfEntryToTest (TestTurtleEval nm _ _ act' res') =
let act = (UNode . fromJust . fileSchemeToFilePath) act'
res = (UNode . fromJust . fileSchemeToFilePath) res'
parsedRDF = parseFile testParser (nodeURI act) >>= return . fromEither :: IO TriplesList
expectedRDF = parseFile NTriplesParser (nodeURI res) >>= return . fromEither :: IO TriplesList
in TU.testCase (T.unpack nm) $ assertIsIsomorphic parsedRDF expectedRDF
mfEntryToTest (TestTurtleNegativeEval nm _ _ act') =
let act = (UNode . fromJust . fileSchemeToFilePath) act'
rdf = parseFile testParser (nodeURI act) :: IO (Either ParseFailure TriplesList)
in TU.testCase (T.unpack nm) $ assertIsNotParsed rdf
mfEntryToTest (TestTurtlePositiveSyntax nm _ _ act') =
let act = (UNode . fromJust . fileSchemeToFilePath) act'
rdf = parseFile testParser (nodeURI act) :: IO (Either ParseFailure TriplesList)
in TU.testCase (T.unpack nm) $ assertIsParsed rdf
mfEntryToTest (TestTurtleNegativeSyntax nm _ _ act') =
let act = (UNode . fromJust . fileSchemeToFilePath) act'
rdf = parseFile testParser (nodeURI act) :: IO (Either ParseFailure TriplesList)
in TU.testCase (T.unpack nm) $ assertIsNotParsed rdf
mfEntryToTest x = error $ "unknown TestEntry pattern in mfEntryToTest: " ++ show x
mfBaseURITurtle :: BaseUrl
mfBaseURITurtle = BaseUrl "http://www.w3.org/2013/TurtleTests/"
testParser :: TurtleParser
testParser = TurtleParser (Just mfBaseURITurtle) Nothing
|
jutaro/rdf4h
|
testsuite/tests/W3C/TurtleTest.hs
|
bsd-3-clause
| 1,921 | 0 | 13 | 301 | 593 | 307 | 286 | 38 | 1 |
-- Copyright 2019 Google LLC
--
-- Use of this source code is governed by a BSD-style
-- license that can be found in the LICENSE file or at
-- https://developers.google.com/open-source/licenses/bsd
-- | This module defines custom types for defining names of various
-- syntax terms.
--
-- These types are all instances of 'Data.String.IsString'. For ease of use,
-- we recommend enabling the @OverloadedStrings@ extension.
{-# LANGUAGE CPP #-}
module GHC.SourceGen.Name
( -- * RdrNameStr
RdrNameStr(..)
, RawNameSpace(..)
, rdrNameStrToString
, qual
, unqual
-- * OccNameStr
, OccNameStr
, occNameStrToString
, occNameStrNamespace
, occNameToStr
, nameToStr
-- ModuleNameStr
, ModuleNameStr(..)
, moduleNameStrToString
) where
#if MIN_VERSION_ghc(9,0,0)
import GHC.Data.FastString (unpackFS)
import GHC.Unit.Module (moduleNameString)
import GHC.Types.Name.Occurrence (OccName, occNameFS, occNameSpace, isVarNameSpace)
import GHC.Types.Name (Name, nameOccName)
#else
import FastString (unpackFS)
import Module (moduleNameString)
import OccName (OccName, occNameFS, occNameSpace, isVarNameSpace)
import Name (Name, nameOccName)
#endif
import GHC.SourceGen.Name.Internal
unqual :: OccNameStr -> RdrNameStr
unqual = UnqualStr
qual :: ModuleNameStr -> OccNameStr -> RdrNameStr
qual = QualStr
moduleNameStrToString :: ModuleNameStr -> String
moduleNameStrToString = moduleNameString . unModuleNameStr
occNameStrToString :: OccNameStr -> String
occNameStrToString (OccNameStr _ s) = unpackFS s
occNameStrNamespace :: OccNameStr -> RawNameSpace
occNameStrNamespace (OccNameStr n _) = n
rdrNameStrToString :: RdrNameStr -> String
rdrNameStrToString (UnqualStr o) = occNameStrToString o
rdrNameStrToString (QualStr m o) =
moduleNameStrToString m ++ '.' : occNameStrToString o
-- | Converts a GHC 'OccName' to an 'OccNameStr'. Ignores whether the input
-- came from the namespace of types or of values.
occNameToStr :: OccName -> OccNameStr
occNameToStr o = OccNameStr n (occNameFS o)
where
n = if isVarNameSpace $ occNameSpace o
then Value
else Constructor
-- | Converts from a GHC 'Name' to an 'OccNameStr'. Ignores whether
-- the input came from the namespace of types or of values, as well
-- as any other information about where the name came from.
nameToStr :: Name -> OccNameStr
nameToStr = occNameToStr . nameOccName
|
google/ghc-source-gen
|
src/GHC/SourceGen/Name.hs
|
bsd-3-clause
| 2,433 | 0 | 9 | 430 | 370 | 221 | 149 | 41 | 2 |
{-# LANGUAGE DeriveFoldable, DeriveFunctor, DeriveTraversable, NoMonomorphismRestriction, ScopedTypeVariables, TupleSections #-}
module TypeCheck where
import Control.Monad
import Data.Either (Either)
import Control.Monad.State
import Control.Monad.Trans.Either (runEitherT, EitherT(..), left)
import Data.Foldable (Foldable)
import qualified Data.Map as M
import Data.Monoid ((<>))
import Data.Traversable (Traversable)
import System.IO.Unsafe (unsafePerformIO) -- TODO: REMOVE
import AST
import Interface
checkAST :: Monad m => UnnAST -> EitherT String m (InterfaceMap, AnnA)
checkAST (Fix ast) = do
interfaces <- return $ buildInterface (Fix ast)
case interfaces of
Just interfaces' -> do
let state = (M.empty, Nothing, interfaces')
ast' <- EitherT $ return $ evalState (runEitherT $ check ast) state
return (interfaces', ast')
Nothing -> do
left "Failed to build class interface"
type CheckState = (M.Map AId AVarType, Maybe (AId, InterfaceEntry), InterfaceMap)
type MCheck = EitherT String (State CheckState)
withSnapshot action = do
snap <- lift get
ret <- action
lift $ put snap
return ret
reserve :: AId -> AVarType -> MCheck ()
reserve name kind = do
(prev, _, interfaces) <- lift get
let addEntry = lift . modify $ \(_, _2, _3) -> (M.insert name kind prev, _2, _3)
case (kind, M.lookup name prev) of
(_, Just val) -> left $ errorMsg val
(TypeAppDefined classRef, _) -> do
when (M.lookup classRef interfaces == Nothing) (left $ missingType classRef)
addEntry
(_, Nothing) -> addEntry
where
errorMsg val = "Identifier '" <> name <> "'" <>
" already declared as type '" <> show val <> "'"
missingType kind = "Identifier declared with invalid type " <> kind
unFix (Fix f) = f
mapUnfix = map unFix
getVarType :: AId -> MCheck AVarType
getVarType name = do
(locals, (Just (_, (interface, _))), _3) <- get
case (M.lookup name $ M.union locals interface) of
Nothing -> left $ "Invalid variable reference '" <> name <> "'"
Just t -> return t
arrayMapping = M.fromList [(TypeIntegerArray, TypeInteger), (TypeStringArray, TypeString)]
newtype Ann tag a = Ann (tag, AEntry a)
deriving (Functor, Foldable, Traversable)
type AnnA = Fix (Ann AVarType)
check :: AEntry UnnAST -> MCheck AnnA
------------------- Programs -------------------
check (AProgram classes) = do
res <- mapM (check . unFix) classes
return . Fix . Ann $ (TypeVoid, AProgram res)
------------------- Classes -------------------
check (AClass name vars methods) = do
(_, _, interface) <- get
let Just interface' = M.lookup name interface
modify $ \(_1, _, _3) -> (_1, Just (name, interface'), _3)
res <- mapM (check . unFix) methods
modify $ \(_1, _, _3) -> (_1, Nothing, _3)
return . Fix . Ann $ (TypeVoid, AClass name vars' res) -- TODO: need to handle vars
where
vars' = map (\(Fix (AVar k n)) -> Fix (Ann (k, AVar k n))) vars
check (AVar kind name) = do
reserve name kind
return . Fix . Ann $ (TypeVoid, AVar kind name) -- TODO: Check if actual type is needed
------------------- Methods -------------------
check (AMethod refRet name args vars code (Fix retExpr)) = withSnapshot $ do
args' <- mapM check $ mapUnfix args
vars' <- mapM check $ mapUnfix vars
code' <- mapM (check . unFix) code
ret@(Fix (Ann (inferredRet, _))) <- check retExpr
when (inferredRet /= refRet) $
left $ "Invalid type of return expression (" <>
show refRet <> " indicated, " <>
show inferredRet <> " used)"
return . Fix . Ann $ (TypeVoid, AMethod refRet name args' vars' code' ret)
------------------- Statements -------------------
check (AStatScope s) = withSnapshot $ do
res <- forM s $ \(Fix s') -> do
subRes@(Fix (Ann (statType, _))) <- check s'
when (statType /= TypeVoid) $
left $ "Invalid statement in scope block"
return subRes
return . Fix . Ann $ (TypeVoid, AStatScope res)
check (AIf (Fix guard) (Fix s1) (Fix s2)) = withSnapshot $ do
guard'@(Fix (Ann (guardType, _))) <- check guard
when (guardType /= TypeBoolean) $
left $ "Non-boolean type of guard in if-statement"
[body1, body2] <- forM [s1, s2] $ \body -> do
subRes@(Fix (Ann (bodyType, _))) <- check body
when (bodyType /= TypeVoid) $
left $ "Invalid statement body in if-statement"
return subRes
return . Fix . Ann $ (TypeVoid, AIf guard' body1 body2)
check (AWhile (Fix guard) (Fix s)) = withSnapshot $ do
guard'@(Fix (Ann (guardType, _))) <- check guard
when (guardType /= TypeBoolean) $
left $ "Non-boolean type of guard in while-statement"
s'@(Fix (Ann (bodyType, _))) <- check s
when (bodyType /= TypeVoid) $
left $ "Invalid loop body in while-statement"
return . Fix . Ann $ (TypeVoid, AWhile guard' s')
check (APrint (Fix e)) = do
subRes@(Fix (Ann (inner, _))) <- check e
when (not $ elem inner [TypeInteger, TypeBoolean]) $
left $ "Invalid print call on type " <> show inner
return . Fix . Ann $ (TypeVoid, APrint subRes)
check (AAssignment (Fix name) (Fix e)) = do
res1@(Fix (Ann (outerType, _))) <- check name
res2@(Fix (Ann (asssnType, _))) <- check e
when (outerType /= asssnType) $
left $ "Assignment with incompatible types, variable " <> show name
return . Fix . Ann $ (TypeVoid, AAssignment res1 res2)
check (AIndexedAssignment (Fix name) (Fix e1) (Fix e2)) = do
res1@(Fix (Ann (outerType, _))) <- check name
when (outerType /= TypeIntegerArray && outerType /= TypeStringArray) $
left $ "Invalid [] access to non-array variable " <> show name
res2@(Fix (Ann (innerType, _))) <- check e1
when (innerType /= TypeInteger) $
left $ "Invalid non-integer index into variable " <> show name
res3@(Fix (Ann (asssnType, _))) <- check e2
when (M.lookup outerType arrayMapping /= Just asssnType) $
left $ "Invalid non-array-type assignment into variable " <> show name
return . Fix . Ann $ (TypeVoid, AIndexedAssignment res1 res2 res3)
------------------- Expressions -------------------
check (AExprOp op (Fix e1) (Fix e2)) = do
e1'@(Fix (Ann (t1, _))) <- check e1
e2'@(Fix (Ann (t2, _))) <- check e2
res <- inf op t1 t2
return . Fix . Ann $ (res, AExprOp op e1' e2')
where
inf OperandLess TypeInteger TypeInteger = return $ TypeBoolean
inf OperandLessEqual TypeInteger TypeInteger = return $ TypeBoolean
inf OperandEqual TypeInteger TypeInteger = return $ TypeBoolean
inf OperandEqual TypeBoolean TypeBoolean = return $ TypeBoolean
inf OperandEqual (TypeAppDefined t1) (TypeAppDefined t2) = do
when (t1 /= t2) $ left "Invalid objects to == operator"
return $ TypeBoolean
inf OperandEqual TypeIntegerArray TypeIntegerArray = return $ TypeBoolean
inf OperandLogicalAnd TypeBoolean TypeBoolean = return $ TypeBoolean
inf OperandLogicalOr TypeBoolean TypeBoolean = return $ TypeBoolean
inf _ TypeInteger TypeInteger = return $ TypeInteger
inf _ t1 t2 = left $
"Invalid operator '" <> show op <> "'" <>
" on types " <> show t1 <> " and " <> show t2
check (AExprList (Fix e1) (Fix e2)) = do
e2'@(Fix (Ann (innerType, _))) <- check e2
when (innerType /= TypeInteger) $
left "Invalid expression of type in [] operation"
e1'@(Fix (Ann (outerType, _))) <- check e1
res <- case M.lookup outerType arrayMapping of
Nothing -> left "Invalid [] operation on non-array type"
Just resType -> return resType
return . Fix . Ann $ (res, AExprList e1' e2')
check (AExprLength e) = do
e'@(Fix (Ann (exprType, _))) <- check $ unFix e
when (exprType /= TypeIntegerArray && exprType /= TypeStringArray) $
left "Invalid .length access to non-array type"
return . Fix . Ann $ (TypeInteger, AExprLength e')
check (AExprInvocation (Fix expr) name args) = do
-- validate object reference
expr'@(Fix (Ann (exprType, _))) <- check expr
className <- case exprType of
TypeAppDefined className -> return $ className
t -> left $ "Invalid method-invocation on type: " <> show t
-- lookup method in interface map
(_, _, interfaces) <- get
let methodRef = M.lookup className interfaces >>= M.lookup name . snd
-- validate inferred argument types
args' <- mapM (check . unFix) args
retType <- case methodRef of
Nothing -> left $
"Invalid method reference, couldn't find " <> name <> " in " <> className
Just (iRet, iArgs) -> do
let types = map (\(Fix (Ann (t, _))) -> t) args'
when (types /= iArgs) $ left "Invalid type(s) of method argument."
return iRet
return . Fix . Ann $ (retType, AExprInvocation expr' name args')
check (AExprInt i) = do
when (i > maxInt) $
left $ "Integer literal out of range: " <> show i
return . Fix . Ann $ (TypeInteger, AExprInt i)
where
maxInt = 2 ^ 31 - 1
check AExprTrue = return . Fix . Ann $ (TypeBoolean, AExprTrue)
check AExprFalse = return . Fix . Ann $ (TypeBoolean, AExprFalse)
check (AExprIdentifier name) = do
res <- getVarType name
return . Fix . Ann $ (res, AExprIdentifier name)
check AExprThis = do
(_, Just (thisClass, _), _) <- get
return . Fix . Ann $ (TypeAppDefined thisClass, AExprThis)
check (AExprIntArray (Fix e)) = do
e'@(Fix (Ann (subExpr, _))) <- check e
when (subExpr /= TypeInteger) $ left "Invalid subexpr in new int []"
return . Fix . Ann $ (TypeIntegerArray, AExprIntArray e')
check (AExprNewObject name) = do
(_, _, interfaces) <- get
res <- case M.lookup name interfaces of
Nothing -> left $ "Missing class declaration of " <> name
Just _ -> return $ TypeAppDefined name
return . Fix . Ann $ (res, AExprNewObject name)
check (AExprNegation (Fix e)) = do
e'@(Fix (Ann (t, _))) <- check e
when (t /= TypeBoolean) $
left $ "Negation of non-boolean variable" <> show e
return . Fix . Ann $ (t, AExprNegation e')
check AExprVoid = return . Fix . Ann $ (TypeVoid, AExprVoid)
|
davnils/minijava-compiler
|
src/TypeCheck.hs
|
bsd-3-clause
| 10,190 | 5 | 23 | 2,341 | 3,873 | 1,953 | 1,920 | 205 | 14 |
{-# LANGUAGE BangPatterns #-}
module Data.Picture.Drawing.ShapeMatrix.EdgeMatrix (
EdgeMatrix,
addEdge
) where
import Control.Monad
import Data.Pair
import Data.Matrix
import Data.Picture.Drawing.ShapeMatrix.ShapeMatrix
import Data.Picture.Drawing.Line
newtype EdgeMatrix = EdgeMatrix { runEM :: Matrix Double }
instance ShapeMatrix EdgeMatrix where
drawColor color _ _ (EdgeMatrix m) = forM_ [Pair (get3DPoint m i) (get3DPoint m (i+1)) | i <- [0,2.. cols m - 1]] . \pic (Pair (Triple !x1 !y1 !z1) (Triple !x2 !y2 !z2)) -> drawColorLine color (round x1) (round y1) z1 (round x2) (round y2) z2 pic
unwrap = runEM
wrap = EdgeMatrix
instance Monoid EdgeMatrix where
mempty = wrap empty
mappend = liftDraw2 mergeCols
addEdge :: Triple Double -> Triple Double -> EdgeMatrix -> EdgeMatrix
addEdge p1 p2 = wrap . addP p2 . addP p1 . unwrap
|
jbaum98/graphics
|
src/Data/Picture/Drawing/ShapeMatrix/EdgeMatrix.hs
|
bsd-3-clause
| 862 | 0 | 13 | 154 | 320 | 168 | 152 | 19 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ImplicitParams #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# OPTIONS_GHC -Wall #-}
-- | Concurrent multi-queue
--
-- Intended for qualified import
--
-- > import Network.Broadcast.OutboundQueue.ConcurrentMultiQueue (MultiQueue)
-- > import qualified Network.Broadcast.OutboundQueue.ConcurrentMultiQueue as MQ
--
-- This module provides a data structure which I've termed a
-- /concurrent multi-queue/:
--
-- * Queue because elements are only inserted at the back and removed from
-- the front (though not necessarily the very front).
-- * Multi-queue because it is a collection of queues which can share elements.
-- * Concurrent because multiple threads can read the multi-queue concurrently
-- (though not modify it).
--
-- Every queue itself is basically a fairly standard imperative doubly-linked
-- list, except that since nodes are shared, every node has a bunch of back
-- pointers and a bunch of next pointers. These " pointers " are 'IORef's, not
-- 'MVar's; instead, each queue has a single @MVar ()@ which serves as a
-- write lock for the entire queue.
--
-- > | \
-- > | -\
-- > | -\
-- > +---|---+ +----\--+ +-------+
-- > | | | | -\| | |
-- > --------|-----------------\--------------->
-- > | | | | | \ | |
-- > +---|---+ +-------+ -\+-------+
-- > | -\
-- > | -\
-- > +---|---+ +-------+ +----\--+
-- > | | | | | | -\|
-- > --------|--------------------------------->
-- > | | | | | | /|
-- > +---|---+ +-------+ +----/--+
-- > | /-
-- > | /-
-- > +---|---+ +-------+ /--------+
-- > | | | | | /- | |
-- > --------|-----------------/--------------->
-- > | | | | /- | |
-- > +---|---+ +----/--+ +-------+
-- > | /-
-- > | /-
-- > v <
module Network.Broadcast.OutboundQueue.ConcurrentMultiQueue
( MultiQueue -- opaque
, new
, size
, sizeBy
, enqueue
, dequeue
, remove
, removeFront
, removeAllIn
-- * Tests
, snapshot
, tests
) where
import Control.Concurrent
import Control.Exception (Exception, throwIO)
import Control.Lens
import Control.Monad
import Data.IORef
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Maybe (fromMaybe, isJust)
import GHC.Stack
{-------------------------------------------------------------------------------
Concurrent multi-queue
SKETCH OF CORRECTNESS (LINEARIZABILITY) PROOF
I'm going to take single-threaded correctness for granted, and focus only
on potential interference between concurrent operations.
enqueue/enqueue
These cannot possibly interfere with each other, since 'enqueue' holds the
write lock for the entirity of the update. □
dequeue/dequeue
Dequeue finds a node in a queue which has not yet been deleted (without
taking any locks). It then takes the write lock and checks again if the node
has not yet been deleted. If it has, it releases the lock again and
continues its search. If it hasn't, it updates the next and back pointers
of its predecessor and successor and returns the node's value.
How might two dequeue's interfere with each other? In general, the
queue will look something like
> (..) ~ A ~ B ~ C ~ D ~ E ~ F <-> (..)
We do a case analysis on which two nodes will be dequeued:
Two nodes far enough apart (at least two nodes in between; e.g., B and E):
In this case there is no inference; the first dequeue will update
A and C, and the second dequeue will update D and F. □
Two identical nodes (e.g., both pick B):
In this case the first dequeue will succeed and the second dequeue will
find after it acquires the lock that the node has already been deleted,
and it will continue searching. □
Two nodes with a single node in between (e.g., B and D):
Suppose B gets dequeued first. The predecessor of C becomes A:
> (..) ~ A ~ C ~ D ~ E ~ F <-> (..)
Now when D is dequeued, the successor of C becomes E:
> (..) ~ A ~ C ~ E ~ F <-> (..)
There is no interference, as one dequeue only modifies the precessor and
the other only modifies the successor.
The same argument holds, mutatis mutandis, when D is dequeued first. □
Two adjacent nodes (e.g., B and C):
Suppose B gets dequeued first. The predecessor of C becomes A:
> (..) ~ A ~ C ~ D ~ E ~ F <-> (..)
Now when we dequeue C, we need to make sure that the predecessor of D
becomes A, not B; this will be the case if we read these next and back
pointers after having obtained the lock.
The same argument holds, mutatis mutandis, when C is dequeued first. □
enqueue/dequeue
If an element is not found because it was enqueue during the search,
then that is okay: we can consider the enqueue to have happened after the
dequeue.
We do have to worry about what happens when the enqueue and the dequeue
both modify the end of the queue. If the queue looks something like
> (..) ~ X ~ Y ~ Z
We do a case analysis on which node we dequeue:
Dequeueing the end of the queue (Z):
If the enqueue happens first, adding a new node N at the end
> (..) ~ X ~ Y ~ Z ~ N
then there is no problem; enqueuing happens with the write lock taken
and dequeue will wait for that lock to be released before modifying Z.
If the dequeue happens first, it will modify the queue with the write
lock taken, so that when the enqueue proceeds it will see the queue
> (..) ~ X ~ Y
instead. Again, all good. □
Dequeueing any other item:
Suppose we dequeue Y. This will modify the back pointer of Z, and
enqueue will modify the forward pointer of Z. No interference.
No interference is possible when dequeuing even earlier elements. □
-------------------------------------------------------------------------------}
{-------------------------------------------------------------------------------
Node
-------------------------------------------------------------------------------}
-- | Node in the linked list
data Node k a = Node {
-- | The value stored in the node
_value :: a
-- | Mark whether or not the node has been removed from the linked list
--
-- This is used for concurrency, as discussed above.
, _deleted :: Bool
-- | Links to previous and next node for each list the node is part of
--
-- Invariants:
--
-- * There must be an entry in the map for each list the node is part of
-- * The kind of link must match the 'Ends' recorded for the list
-- (i.e., if the node is listed as the front of the list, the 'Links'
-- must be 'Front' or 'Singleton').
, _links :: Map k (Links k a)
}
-- | Links from a node to its (previous and next) neighbours
data Links k a =
Middle (PNode k a) (PNode k a)
| Front (PNode k a)
| Back (PNode k a)
| Singleton
-- | Pointer to a node
type PNode k a = IORef (Node k a)
makeLenses ''Node
{-------------------------------------------------------------------------------
Statistics about the queue
-------------------------------------------------------------------------------}
-- | Statistics about the queue
data Stats k = Stats {
_countPerKey :: Map k Int
, _totalCount :: Int
}
makeLenses ''Stats
emptyStats :: Stats k
emptyStats = Stats {
_countPerKey = Map.empty
, _totalCount = 0
}
modifyCounts :: Ord k => (Int -> Int) -> [k] -> Stats k -> Stats k
modifyCounts f = go
where
go [] !stats' = stats' & totalCount %~ f
go (k:ks') !stats' = go ks' $ stats' & countPerKey . ix k %~ f
increaseCounts, decreaseCounts :: Ord k => [k] -> Stats k -> Stats k
increaseCounts = modifyCounts (\n -> n + 1)
decreaseCounts = modifyCounts (\n -> n - 1)
{-------------------------------------------------------------------------------
Multiqueue proper
-------------------------------------------------------------------------------}
-- | Front and back of a queue
data Ends k a = Ends { _front :: PNode k a, _back :: PNode k a }
makeLenses ''Ends
-- | We use a single write lock to protect modifications to the queue
type Lock k = MVar (Stats k)
-- | Concurrent multi-queue
data MultiQueue k a = MultiQueue {
pEnds :: IORef (Map k (Ends k a))
, writeLock :: Lock k
}
{-------------------------------------------------------------------------------
Lenses
Although the lens definitions for 'Links' are pretty straight-forward, it's
not immediately obvious that they don't break the lens laws, so I've spelled
out the proof for 'next':
> view next (set next mn l) == n.
>
> Need to prove: g (s l mn) == mn
>
> Case mn == Nothing. Case analysis on l.
>
> g (s (Middle p _n) Nothing) == g (Back p) == Nothing
> g (s (Front _n) Nothing) == g (Singleton) == Nothing
> g (s (Back p ) Nothing) == g (Back p) == Nothing
> g (s Singleton Nothing) == g (Singleton) == Nothing
>
> Case mn == Just n'. Case analysis on l.
>
> g (s (Middle p _n) (Just n')) == g (Middle p n') == Just n'
> g (s (Front _n) (Just n')) == g (Front n') == Just n'
> g (s (Back p ) (Just n')) == g (Middle p n') == Just n'
> g (s Singleton (Just n')) == g (Front n') == Just n' □
>
> set next (view next l) l == l
>
> Need to prove: s l (g l) == l
>
> Case analysis on l.
>
> s (Middle _p n) (g (Middle _p n)) == s (Middle _p n) (Just n) == Middle _p n
> s (Front n) (g (Front n)) == s (Front n) (Just n) == Front n
> s (Back _p ) (g (Back _p )) == s (Back _p ) Nothing == Back _p
> s Singleton (g Singleton ) == s Singleton Nothing == Singleton □
>
> set next mn' (set next mn l) == set next mn' l
>
> Need to prove: s (s l mn) mn' == s l mn'
>
> Case mn == Nothing, mn' = Nothing. Case analysis on l.
>
> s (s (Middle p _n) Nothing) Nothing == s (Back p) Nothing == Back p == s (Middle p _n) Nothing
> s (s (Front _n) Nothing) Nothing == s Singleton Nothing == Singleton == s (Front _n) Nothing
> s (s (Back p ) Nothing) Nothing == s (Back p) Nothing == Back p == s (Back p ) Nothing
> s (s Singleton Nothing) Nothing == s Singleton Nothing == Singleton == s Singleton Nothing
>
> Case mn == Just n', mn' = Nothing. Case analysis on l.
>
> s (s (Middle p _n) (Just n')) Nothing == s (Middle p n') Nothing == Back p == s (Middle p _n) Nothing
> s (s (Front _n) (Just n')) Nothing == s (Front n') Nothing == Singleton == s (Front _n) Nothing
> s (s (Back p ) (Just n')) Nothing == s (Middle p n') Nothing == Back p == s (Back p ) Nothing
> s (s Singleton (Just n')) Nothing == s (Front n') Nothing == Singleton == s Singleton Nothing
>
> Case mn = Nothing, mn' = n''. Case analysis on l.
>
> s (s (Middle p _n) Nothing) (Just n'') = s (Back p) (Just n'') == Middle p n'' == s (Middle p _n) (Just n'')
> s (s (Front _n) Nothing) (Just n'') = s Singleton (Just n'') == Front n'' == s (Front _n) (Just n'')
> s (s (Back p ) Nothing) (Just n'') = s (Back p) (Just n'') == Middle p n'' == s (Back p ) (Just n'')
> s (s Singleton Nothing) (Just n'') = s Singleton (Just n'') == Front n'' == s Singleton (Just n'')
>
> Case mn = Just n', mn' = Just n''. Case analysis on l.
>
> s (s (Middle p _n) (Just n')) (Just n'') == s (Middle p n') (Just n'') == Middle p n'' == s (Middle p _n) (Just n'')
> s (s (Front _n) (Just n')) (Just n'') == s (Front n') (Just n'') == Front n'' == s (Front _n) (Just n'')
> s (s (Back p ) (Just n')) (Just n'') == s (Middle p n') (Just n'') == Middle p n'' == s (Back p ) (Just n'')
> s (s Singleton (Just n')) (Just n'') == s (Front n') (Just n'') == Front n'' == s Singleton (Just n'') □
-------------------------------------------------------------------------------}
-- | Links from this node in a particular queue
--
-- NOTE: This should /ONLY/ be used if (externally) it is know that this node
-- in indeed part of this queue.
linksAt :: Ord k => k -> Lens' (Node k a) (Links k a)
linksAt k = unsafeSingular $ links . ix k
-- | Next pointer, if any.
--
-- The getter returns the next pointer, if one exists. The setter overrides or
-- remove the next pointer, possibly changing the type of link in the process
-- (for instance, removing the next of a middle node turns it into a back node).
next :: Lens' (Links k a) (Maybe (PNode k a))
next = lens g s
where
g :: Links k a -> Maybe (PNode k a)
g (Middle _p n) = Just n
g (Front n) = Just n
g (Back _p ) = Nothing
g Singleton = Nothing
s :: Links k a -> Maybe (PNode k a) -> Links k a
s (Middle p _n) Nothing = Back p
s (Front _n) Nothing = Singleton
s (Back p ) Nothing = Back p
s Singleton Nothing = Singleton
s (Middle p _n) (Just n') = Middle p n'
s (Front _n) (Just n') = Front n'
s (Back p ) (Just n') = Middle p n'
s Singleton (Just n') = Front n'
-- | Previous node, if any.
--
-- See detailed discussion at 'next'.
prev :: Lens' (Links k a) (Maybe (PNode k a))
prev = lens g s
where
g :: Links k a -> Maybe (PNode k a)
g (Middle p _n) = Just p
g (Front _n) = Nothing
g (Back p ) = Just p
g Singleton = Nothing
s :: Links k a -> Maybe (PNode k a) -> Links k a
s (Middle _p n) Nothing = Front n
s (Front n) Nothing = Front n
s (Back _p ) Nothing = Singleton
s Singleton Nothing = Singleton
s (Middle _p n) (Just p') = Middle p' n
s (Front n) (Just p') = Middle p' n
s (Back _p ) (Just p') = Back p'
s Singleton (Just p') = Back p'
{-------------------------------------------------------------------------------
Construction and modification
-------------------------------------------------------------------------------}
new :: IO (MultiQueue k a)
new = do
pEnds <- newIORef Map.empty
writeLock <- newMVar emptyStats
return MultiQueue{..}
enqueue :: forall k a. Ord k => MultiQueue k a -> [k] -> a -> IO ()
enqueue MultiQueue{..} ks a = modifyMVar_ writeLock $ \stats -> do
ends <- readIORef pEnds
let -- Find the back of queue @k@
backOfQueue :: k -> Maybe (PNode k a)
backOfQueue k = _back <$> Map.lookup k ends
-- Construct 'Links' for the new node given the previous node
-- (the new node is always inserted at the end of the queue)
mkLink :: Maybe (PNode k a) -> Links k a
mkLink Nothing = Singleton
mkLink (Just p) = Back p
newNode :: Node k a
newNode = Node {
_value = a
, _deleted = False
, _links = Map.fromList $ map (\k -> (k, mkLink (backOfQueue k))) ks
}
pNewBack :: PNode k a <- newIORef newNode
let updateNexts :: [k] -> Map k (Ends k a) -> IO (Map k (Ends k a))
updateNexts [] ends' = return ends'
updateNexts (k:ks') ends' =
case ends' ^. at k of
Nothing -> do
let pNewFront = pNewBack -- First node in this particular queue
updateNexts ks' $ ends' & at k .~ Just (Ends pNewFront pNewBack)
Just (Ends _pOldFront pOldBack) -> do
modIORef pOldBack $ linksAt k . next .~ Just pNewBack
updateNexts ks' $ ends' & ix k . back .~ pNewBack
writeIORef pEnds =<< updateNexts ks ends
return $! increaseCounts ks stats
-- | Total size of the queue
size :: MultiQueue k a -> IO Int
size MultiQueue{..} =
view totalCount <$> readMVar writeLock
-- | Size of the specified queue
sizeBy :: Ord k => k -> MultiQueue k a -> IO Int
sizeBy k MultiQueue{..} =
fromMaybe 0 . view (countPerKey . at k) <$> readMVar writeLock
-- | Remove a node from the queues
--
-- The node is located starting at queue @k@, but removed from /all queues.
dequeue :: Ord k => k -> (a -> Bool) -> MultiQueue k a -> IO (Maybe a)
dequeue k p qs@MultiQueue{..} =
maybe (return Nothing) (dequeueFrom k p qs) =<< find k p qs
-- Dequeue starting at a particular node
dequeueFrom :: Ord k
=> k -> (a -> Bool) -> MultiQueue k a -> PNode k a -> IO (Maybe a)
dequeueFrom k p qs@MultiQueue{..} pNode = do
-- We try to remove the node from the queue, returning 'Nothing' if the node
-- was already deleted by someone else
ma <- modifyMVar writeLock $ \stats -> do
cur <- readIORef pNode
if cur ^. deleted
then return (stats, Nothing)
else do
modIORef pNode $ deleted .~ True
forM_ (Map.toList (cur ^. links)) $ \(k', linksK') -> case linksK' of
Middle pPrev pNext -> do
modIORef pPrev $ links . ix k' . next .~ Just pNext
modIORef pNext $ links . ix k' . prev .~ Just pPrev
Front pNext -> do
modIORef pNext $ links . ix k' . prev .~ Nothing
modIORef pEnds $ ix k' . front .~ pNext
Back pPrev -> do
modIORef pPrev $ links . ix k' . next .~ Nothing
modIORef pEnds $ ix k' . back .~ pPrev
Singleton ->
modIORef pEnds $ at k' .~ Nothing
return (
decreaseCounts (Map.keys (cur ^. links)) stats
, Just (cur ^. value)
)
case ma of
Just a -> return (Just a)
Nothing -> findFrom k p pNode
>>= maybe (return Nothing) (dequeueFrom k p qs)
-- | Remove a node from the queue (if a matching node can be found)
remove :: Ord k => k -> (a -> Bool) -> MultiQueue k a -> IO ()
remove k p qs = void $ dequeue k p qs
-- | Remove the first element from the queue (if such an element exists)
removeFront :: Ord k => k -> MultiQueue k a -> IO ()
removeFront k = remove k (const True)
-- | Remove all elements in the given queue
--
-- The queue may not be empty when 'removeAllIn' returns if there were
-- concurrent enqueues.
removeAllIn :: Ord k => k -> MultiQueue k a -> IO ()
removeAllIn k qs = go
where
go :: IO ()
go = do
ma <- dequeue k (const True) qs
when (isJust ma) go
-- | Locate a node satisfying a predicate
--
-- Returns the node pointer. Internal function.
find :: Ord k => k -> (a -> Bool) -> MultiQueue k a -> IO (Maybe (PNode k a))
find k p MultiQueue{..} = do
ends <- readIORef pEnds
maybe (return Nothing) (findFrom k p . _front) (ends ^. at k)
-- | Locate a node satisfying a predicate, starting from a given node
findFrom :: Ord k => k -> (a -> Bool) -> PNode k a -> IO (Maybe (PNode k a))
findFrom k p pNode = do
node <- readIORef pNode
if not (node ^. deleted) && p (node ^. value)
then return $ Just pNode
else maybe (return Nothing) (findFrom k p) (node ^. linksAt k . next)
{-------------------------------------------------------------------------------
Auxiliary
-------------------------------------------------------------------------------}
modIORef :: IORef a -> (a -> a) -> IO ()
modIORef ref f = atomicModifyIORef' ref ((, ()) . f)
{-------------------------------------------------------------------------------
Tests
TODO: QuickCheck? Mocking? Test framework?
-------------------------------------------------------------------------------}
-- | Take a snapshot of the queue
--
-- We take the lock so that the queue cannot be modified.
snapshot :: forall k a. Ord k => MultiQueue k a -> IO [(k, [a])]
snapshot MultiQueue{..} = withMVar writeLock $ \_stats -> do
ends <- readIORef pEnds
mapM (\(k, Ends fr _) -> (k,) <$> snapshotQueue k fr) $ Map.toList ends
where
snapshotQueue :: k -> PNode k a -> IO [a]
snapshotQueue k pNode = do
node <- readIORef pNode
as <- maybe (return []) (snapshotQueue k) (node ^. linksAt k . next)
return (node ^. value : as)
-- | Very simple test, single queue
test1 :: IO ()
test1 = do
q :: MultiQueue () Char <- new
enqueue q [()] 'A'
assertEq [((), "A")] =<< snapshot q
enqueue q [()] 'B'
assertEq [((), "AB")] =<< snapshot q
enqueue q [()] 'C'
assertEq [((), "ABC")] =<< snapshot q
assertEq (Just 'A') =<< dequeue () (const True) q
assertEq [((), "BC")] =<< snapshot q
assertEq (Just 'B') =<< dequeue () (const True) q
assertEq [((), "C")] =<< snapshot q
assertEq (Just 'C') =<< dequeue () (const True) q
assertEq [] =<< snapshot q
assertEq Nothing =<< dequeue () (const True) q
assertEq [] =<< snapshot q
-- | Two intersecting queues
--
-- > X
-- > |
-- > A -- O -- C
-- > |
-- > Y
test2 :: IO ()
test2 = do
q :: MultiQueue Bool Char <- new
enqueue q [True] 'A'
assertEq [(True,"A")] =<< snapshot q
enqueue q [False] 'X'
assertEq [(False,"X"),(True,"A")] =<< snapshot q
enqueue q [True, False] 'O'
assertEq [(False,"XO"),(True,"AO")] =<< snapshot q
enqueue q [True] 'C'
assertEq [(False,"XO"),(True,"AOC")] =<< snapshot q
enqueue q [False] 'Y'
assertEq [(False,"XOY"),(True,"AOC")] =<< snapshot q
assertEq (Just 'O') =<< dequeue True (== 'O') q
assertEq [(False,"XY"),(True,"AC")] =<< snapshot q
assertEq Nothing =<< dequeue True (== 'X') q
assertEq [(False,"XY"),(True,"AC")] =<< snapshot q
assertEq (Just 'X') =<< dequeue False (== 'X') q
assertEq [(False,"Y"),(True,"AC")] =<< snapshot q
tests :: IO ()
tests = do
test1
test2
assertEq :: HasCallStack => Eq a => a -> a -> IO ()
assertEq expected actual =
if expected == actual
then return ()
else throwIO $ AssertionFailure ?callStack
data AssertionFailure = AssertionFailure CallStack
deriving (Show)
instance Exception AssertionFailure
|
input-output-hk/pos-haskell-prototype
|
networking/src/Network/Broadcast/OutboundQueue/ConcurrentMultiQueue.hs
|
mit
| 22,790 | 0 | 26 | 6,596 | 4,307 | 2,193 | 2,114 | -1 | -1 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE CPP #-}
module IHaskell.Test.Parser (testParser) where
import Prelude
import Data.String.Here (hereLit)
import Test.Hspec
import Test.Hspec.Contrib.HUnit
import Test.HUnit (assertBool, assertFailure)
import IHaskell.Test.Util (ghc, strip)
import IHaskell.Eval.Parser (parseString, getModuleName, unloc, layoutChunks, Located(..),
CodeBlock(..), DirectiveType(..), StringLoc(..))
import IHaskell.Eval.ParseShell (parseShell)
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>))
#endif
parses :: String -> IO [CodeBlock]
parses str = map unloc <$> ghc (parseString str)
like :: (Show a, Eq a) => IO a -> a -> IO ()
like parser desired = parser >>= (`shouldBe` desired)
is :: String -> (String -> CodeBlock) -> IO ()
is string blockType = do
result <- ghc $ parseString string
map unloc result `shouldBe` [blockType $ strip string]
testParser :: Spec
testParser = do
testLayoutChunks
testModuleNames
testParseString
testParseShell
testLayoutChunks :: Spec
testLayoutChunks = describe "Layout Chunk" $ do
it "chunks 'a string'" $
map unloc (layoutChunks "a string") `shouldBe` ["a string"]
it "chunks 'a\\n string'" $
map unloc (layoutChunks "a\n string") `shouldBe` ["a\n string"]
it "chunks 'a\\n string\\nextra'" $
map unloc (layoutChunks "a\n string\nextra") `shouldBe` ["a\n string", "extra"]
it "chunks strings with too many lines" $
map unloc (layoutChunks "a\n\nstring") `shouldBe` ["a", "string"]
it "parses multiple exprs" $ do
let text = [hereLit|
first
second
third
fourth
|]
layoutChunks text `shouldBe` [ Located 2 "first"
, Located 4 "second"
, Located 5 "third"
, Located 7 "fourth"
]
it "deals with quasiquotes" $ do
let parsesAsBlocks strs = map unloc (layoutChunks $ unlines strs) `shouldBe` strs
parsesAsBlocks ["let x = [q|a quasiquote|]"]
parsesAsBlocks ["let x = [q|a quasiquote|]", "3"]
parsesAsBlocks ["let x = [q|a quasiquote\n|]"]
parsesAsBlocks ["let x = [q|\na quasiquote\n|]"]
parsesAsBlocks ["let x = \"[q|doesn't matter\""]
parsesAsBlocks ["[q|q<-[1..10]]"]
parsesAsBlocks ["[q|x|] [q|x|]"]
parsesAsBlocks ["[q|\nx\n|] [q|x|]"]
testModuleNames :: Spec
testModuleNames = describe "Get Module Name" $ do
it "parses simple module names" $
"module A where\nx = 3" `named` ["A"]
it "parses module names with dots" $
"module A.B where\nx = 3" `named` ["A", "B"]
it "parses module names with exports" $
"module A.B.C ( x ) where x = 3" `named` ["A", "B", "C"]
it "errors when given unnamed modules" $ do
ghc (getModuleName "x = 3") `shouldThrow` anyException
where
named str result = do
res <- ghc $ getModuleName str
res `shouldBe` result
testParseShell :: Spec
testParseShell =
describe "Parsing Shell Commands" $ do
test "A" ["A"]
test ":load A" [":load", "A"]
test ":!l ~/Downloads/MyFile\\ Has\\ Spaces.txt"
[":!l", "~/Downloads/MyFile\\ Has\\ Spaces.txt"]
test ":!l \"~/Downloads/MyFile Has Spaces.txt\" /Another/File\\ WithSpaces.doc"
[":!l", "~/Downloads/MyFile Has Spaces.txt", "/Another/File\\ WithSpaces.doc"]
where
test string expected =
it ("parses " ++ string ++ " correctly") $
string `shouldParseTo` expected
shouldParseTo xs ys =
case parseShell xs of
Right xs' -> xs' `shouldBe` ys
Left e -> assertFailure $ "parseShell returned error: \n" ++ show e
testParseString :: Spec
testParseString = describe "Parser" $ do
it "parses empty strings" $
parses "" `like` []
it "parses simple imports" $
"import Data.Monoid" `is` Import
it "parses simple arithmetic" $
"3 + 5" `is` Expression
it "parses :type" $
parses ":type x\n:ty x" `like` [Directive GetType "x", Directive GetType "x"]
it "parses :info" $
parses ":info x\n:in x" `like` [Directive GetInfo "x", Directive GetInfo "x"]
it "parses :help and :?" $
parses ":? x\n:help x" `like` [Directive GetHelp "x", Directive GetHelp "x"]
it "parses :set x" $
parses ":set x" `like` [Directive SetDynFlag "x"]
it "parses :extension x" $
parses ":ex x\n:extension x" `like` [Directive SetExtension "x", Directive SetExtension "x"]
it "fails to parse :nope" $
parses ":nope goodbye" `like` [ParseError (Loc 1 1) "Unknown directive: 'nope'."]
it "parses number followed by let stmt" $
parses "3\nlet x = expr" `like` [Expression "3", Statement "let x = expr"]
it "parses let x in y" $
"let x = 3 in x + 3" `is` Expression
it "parses a data declaration" $
"data X = Y Int" `is` Declaration
it "parses number followed by type directive" $
parses "3\n:t expr" `like` [Expression "3", Directive GetType "expr"]
it "parses a <- statement" $
"y <- print 'no'" `is` Statement
it "parses a <- stmt followed by let stmt" $
parses "y <- do print 'no'\nlet x = expr" `like` [ Statement "y <- do print 'no'"
, Statement "let x = expr"
]
it "parses <- followed by let followed by expr" $
parses "y <- do print 'no'\nlet x = expr\nexpression" `like` [ Statement "y <- do print 'no'"
, Statement "let x = expr"
, Expression "expression"
]
it "parses two print statements" $
parses "print yes\nprint no" `like` [Expression "print yes", Expression "print no"]
it "parses a pattern-maching function declaration" $
"fun [] = 10" `is` Declaration
it "parses a function decl followed by an expression" $
parses "fun [] = 10\nprint 'h'" `like` [Declaration "fun [] = 10", Expression "print 'h'"]
it "parses list pattern matching fun decl" $
"fun (x : xs) = 100" `is` Declaration
it "parses two pattern matches as the same declaration" $
"fun [] = 10\nfun (x : xs) = 100" `is` Declaration
it "parses a type signature followed by a declaration" $
"fun :: [a] -> Int\nfun [] = 10\nfun (x : xs) = 100" `is` Declaration
it "parases a simple module" $
"module A where x = 3" `is` Module
it "parses a module with an export" $
"module B (x) where x = 3" `is` Module
it "breaks when a let is incomplete" $
parses "let x = 3 in" `like` [ ParseError (Loc 1 13)
"parse error (possibly incorrect indentation or mismatched brackets)"
]
it "breaks without data kinds" $
parses "data X = 3" `like` [dataKindsError]
it "parses statements after imports" $ do
parses "import X\nprint 3" `like` [Import "import X", Expression "print 3"]
parses "import X\n\nprint 3" `like` [Import "import X", Expression "print 3"]
it "ignores blank lines properly" $
[hereLit|
test arg = hello
where
x = y
z = w
|] `is` Declaration
it "doesn't break on long strings" $ do
let longString = concat $ replicate 20 "hello "
("img ! src \"" ++ longString ++ "\" ! width \"500\"") `is` Expression
it "parses do blocks in expression" $ do
[hereLit|
show (show (do
Just 10
Nothing
Just 100))
|] `is` Expression
it "correctly locates parsed items" $ do
ghc (parseString
[hereLit|
first
second
|]) >>= (`shouldBe` [Located 2 (Expression "first"), Located 4 (Expression "second")])
where
dataKindsError = ParseError (Loc 1 10) msg
#if MIN_VERSION_ghc(7, 10, 0)
msg = "Cannot parse data constructor in a data/newtype declaration: 3"
#elif MIN_VERSION_ghc(7, 8, 0)
msg = "Illegal literal in type (use DataKinds to enable): 3"
#else
msg = "Illegal literal in type (use -XDataKinds to enable): 3"
#endif
|
thomasjm/IHaskell
|
src/tests/IHaskell/Test/Parser.hs
|
mit
| 8,248 | 0 | 18 | 2,384 | 1,873 | 959 | 914 | 158 | 2 |
{- |
Module : ./Adl/Print.hs
Description : pretty printing ADL syntax
Copyright : (c) Stef Joosten, Christian Maeder DFKI GmbH 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
-}
module Adl.Print (adlGA) where
import Adl.As
import Common.AS_Annotation
import Common.Doc
import Common.DocUtils
import Common.GlobalAnnotations
import Common.Id
import Data.List
import qualified Data.Map as Map
instance Pretty Concept where
pretty c = case c of
C s -> pretty s
_ -> text $ show c
instance Pretty RelType where
pretty (RelType c1 c2) = case (c1, c2) of
(Anything, Anything) -> empty
_ | c1 == c2 -> brackets $ pretty c1
_ -> brackets $ hcat [pretty c1, cross, pretty c2]
instance Pretty Relation where
pretty (Sgn n t) = let s = tokStr n in
(if isBRel s then keyword s else pretty n)
<> pretty t
pOp :: UnOp -> Id
pOp o = case o of
Co -> converseId
Cp -> minusId
_ -> stringToId $ show o
instance Pretty UnOp where
pretty = idDoc . stringToId . show
inOp :: MulOp -> Id
inOp = stringToId . show
instance Pretty MulOp where
pretty o = let i = idDoc (inOp o) in case o of
Fc -> i
Fd -> i
_ -> space <> i <> space
prettyParen :: (Rule -> Bool) -> Rule -> Doc
prettyParen p e = (if p e then parens else id) $ pretty e
minusId :: Id
minusId = mkId [mkSimpleId $ show Cp, placeTok]
converseId :: Id
converseId = mkId [placeTok, mkSimpleId $ show Co]
displayMap :: DisplayMap
displayMap = Map.fromList $ map ( \ (i, l) -> (i, Map.singleton DF_LATEX l))
[ (minusId, [mkSimpleId "\\overline{", placeTok, mkSimpleId "}"])
, (converseId, [mkSimpleId "\\widetilde{", placeTok, mkSimpleId "}"])
, (inOp Fi, [mkSimpleId "\\cap"])
, (inOp Fu, [mkSimpleId "\\cup"])
, (inOp Fd, [mkSimpleId "\\dag"])
, (inOp Ri, [mkSimpleId "\\vdash"])
, (inOp Rr, [mkSimpleId "\\dashv"])
, (inOp Re, [mkSimpleId "\\equiv"])
, (stringToId $ show Co, [mkSimpleId "\\breve{~}"])
, (pOp K0, [mkSimpleId "\\texttt{*}"])
, (pOp K1, [mkSimpleId "\\texttt{+}"])
]
adlGA :: GlobalAnnos
adlGA = emptyGlobalAnnos
{ display_annos = displayMap }
instance Pretty Rule where
pretty e = useGlobalAnnos adlGA $ case e of
Tm r -> pretty r
MulExp o es ->
fcat . punctuate (pretty o) $ map
(prettyParen (\ a -> case a of
MulExp p _ -> p >= o || o == Rr && p == Ri
_ -> False)) es
UnExp o r -> (if o == Cp
then idApplDoc (pOp o) . (: [])
else (<> pretty o))
$ prettyParen (\ a -> case a of
MulExp _ _ -> True
UnExp p _ -> o /= Cp && p == Cp
_ -> False) r
instance Pretty Prop where
pretty = text . showUp
instance Pretty RangedProp where
pretty = pretty . propProp
instance Pretty Object where
pretty (Object n e as os) = sep
[ fsep [commentText (tokStr n) <> colon, pretty e]
, if null as then empty else fsep $ keyword "ALWAYS" : map pretty as
, if null os then empty else equals <+> brackets (ppWithCommas os) ]
instance Pretty RuleKind where
pretty = keyword . showRuleKind
instance Pretty RuleHeader where
pretty h = case h of
Always -> empty
RuleHeader k t -> keyword
(if k == SignalOn then "SIGNAL" else "RULE")
<+> pretty t <+> pretty k
instance Pretty KeyAtt where
pretty (KeyAtt mt e) = sep [case mt of
Nothing -> empty
Just t -> pretty t <> colon
, pretty e]
instance Pretty KeyDef where
pretty (KeyDef l c atts) = fsep
[ keyword "KEY"
, pretty l <> colon
, pretty c
, parens $ ppWithCommas atts ]
instance Pretty Pair where
pretty (Pair x y) = parens $ ppWithCommas [x, y]
prettyContent :: [Pair] -> Doc
prettyContent = brackets . vcat . punctuate semi . map pretty
instance Pretty PatElem where
pretty e = case e of
Pr k r -> pretty k <+> pretty r
Pg c1 c2 -> fsep [keyword "GEN", pretty c1, keyword "ISA", pretty c2]
Pk k -> pretty k
Pm ps (Sgn n (RelType c1 c2)) b ->
let u = rProp Uni
t = rProp Tot
f = elem u ps && elem t ps
ns = if f then delete t $ delete u ps else ps
in fsep
[ pretty n, text "::", pretty c1
, if f then funArrow else cross, pretty c2
, if null ns then empty else brackets $ ppWithCommas ns]
<> if b then empty else dot
Plug p o -> sep [keyword $ showUp p, pretty o]
Population b r l -> let d = prettyContent l in
if b then equals <+> d <> dot else fsep
[ keyword "POPULATION"
, pretty r
, keyword "CONTAINS"
, d ]
instance Pretty Context where
pretty (Context m ps) = let l = vcat $ map pretty ps in case m of
Nothing -> l
Just t -> vcat
[keyword "CONTEXT" <+> structId (tokStr t), l, keyword "ENDCONTEXT"]
|
spechub/Hets
|
Adl/Print.hs
|
gpl-2.0
| 4,856 | 0 | 23 | 1,326 | 1,915 | 978 | 937 | 132 | 3 |
module ExprLambdaDupVar where
main = \a a -> 0
|
roberth/uu-helium
|
test/staticerrors/ExprLambdaDupVar.hs
|
gpl-3.0
| 49 | 0 | 5 | 11 | 16 | 10 | 6 | 2 | 1 |
-- GSoC 2015 - Haskell bindings for OpenCog.
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE UndecidableInstances #-}
-- | This Module defines the relation between different atom types.
module OpenCog.AtomSpace.Inheritance (
type (<~)
, Children
) where
import GHC.Exts (Constraint)
import OpenCog.AtomSpace.AtomType (AtomType(..),Up(..),Down(..))
import Data.Typeable (Typeable)
-- | 'In' type level function to check if a type belongs to a list.
type family In a (b :: [AtomType]) :: Bool where
In a (a ': b) = 'True
In a (b ': c) = In a c
In a '[] = 'False
-- | 'FUp' type level function to get the list of all the ancestors
-- of a given atom type.
type family FUp a b :: [AtomType] where
FUp (x ': xs) a = x ': FUp xs (x ': a)
FUp '[] (x ': xs) = FUp (Up x) xs
FUp '[] '[] = '[]
-- | 'FDown' type level function to get the list of all descendants
-- of a given atom type.
type family FDown a b :: [AtomType] where
FDown (x ': xs) a = x ': FDown xs (x ': a)
FDown '[] (x ': xs) = FDown (Down x) xs
FDown '[] '[] = '[]
type Children a = FDown '[a] '[]
-- | 'IsParent'' is a predicate to decide if atom type b is an ancestor
-- of atom type a.
type family IsParent' a b :: Bool where
IsParent' a b = (In b (FUp '[a] '[]))
-- | 'IsParent' is a contraint on being 'b' an ancestor of 'a'.
type IsParent a b = IsParent' a b ~ 'True
-- | 'ParConst' builds a list of constraints to assert that all the members of
-- the list are ancestors of a.
type family ParConst a (b :: [AtomType]) :: Constraint where
ParConst a '[] = Typeable a
ParConst a (b ': c) = (IsParent a b,ParConst a c)
-- | '<~' builds a list of constraints to assert that all the ancestors of b
-- (included b itself) are ancestors of a.
infix 9 <~
type a <~ b = ParConst a (FUp '[b] '[])
|
jswiergo/atomspace
|
opencog/haskell/OpenCog/AtomSpace/Inheritance.hs
|
agpl-3.0
| 2,218 | 0 | 11 | 597 | 585 | 339 | 246 | -1 | -1 |
-- Copyright (C) 2016-2017 Red Hat, Inc.
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, see <http://www.gnu.org/licenses/>.
module BDCS.Files(insertFiles,
associateFilesWithBuild,
associateFilesWithSource,
associateFilesWithPackage,
files,
filesC,
getFile,
getKeyValuesForFile,
groupIdToFiles,
groupIdToFilesC,
pathToGroupId,
sourceIdToFiles,
sourceIdToFilesC)
where
import Control.Monad.IO.Class(MonadIO)
import Control.Monad.Trans.Resource(MonadResource)
import Data.Conduit((.|), Conduit, Source, toProducer)
import qualified Data.Conduit.List as CL
import qualified Data.Text as T
import Database.Esqueleto
import BDCS.DB
import BDCS.Utils.Conduit(awaitWith)
{-# ANN getKeyValuesForFile ("HLint: ignore Use ." :: String) #-}
{-# ANN groupIdToFiles ("HLint: ignore Use ." :: String) #-}
{-# ANN sourceIdToFiles ("HLint: ignore Use ." :: String) #-}
{-# ANN pathToGroupId ("HLint: ignore Use ." :: String) #-}
insertFiles :: MonadIO m => [Files] -> SqlPersistT m [Key Files]
insertFiles = mapM insert
associateFilesWithBuild :: MonadIO m => [Key Files] -> Key Builds -> SqlPersistT m [Key BuildFiles]
associateFilesWithBuild fs build =
mapM (\(fID, bID) -> insert $ BuildFiles bID fID)
(zip fs $ repeat build)
associateFilesWithSource :: MonadIO m => [Key Files] -> Key Sources -> SqlPersistT m [Key SourceFiles]
associateFilesWithSource fs source =
mapM (\(fID, sID) -> insert $ SourceFiles sID fID)
(zip fs $ repeat source)
associateFilesWithPackage :: MonadIO m => [Key Files] -> Key KeyVal -> SqlPersistT m [Key FileKeyValues]
associateFilesWithPackage fs package =
mapM (\(fID, pID) -> insert $ FileKeyValues fID pID)
(zip fs $ repeat package)
files :: MonadIO m => SqlPersistT m [Files]
files = do
results <- select $ from $ \file -> do
orderBy [asc (file ^. FilesPath)]
return file
return $ map entityVal results
filesC :: MonadResource m => Source (SqlPersistT m) Files
filesC = do
let source = selectSource $ from $ \file -> do
orderBy [asc (file ^. FilesPath)]
return file
source .| CL.map entityVal
getFile :: MonadIO m => Key Files -> SqlPersistT m (Maybe Files)
getFile key = firstEntityResult $
select $ from $ \file -> do
where_ $ file ^. FilesId ==. val key
limit 1
return file
getKeyValuesForFile :: MonadIO m => T.Text -> SqlPersistT m [KeyVal]
getKeyValuesForFile path = do
results <- select $ from $ \(file `InnerJoin` file_key_val `InnerJoin` key_val) -> do
on $ file ^. FilesId ==. file_key_val ^. FileKeyValuesFile_id &&.
key_val ^. KeyValId ==. file_key_val ^. FileKeyValuesKey_val_id
where_ $ file ^. FilesPath ==. val path
return key_val
return $ map entityVal results
groupIdToFiles :: MonadResource m => Key Groups -> Source (SqlPersistT m) Files
groupIdToFiles groupid = do
let source = selectSource $ from $ \(fs `InnerJoin` group_files) -> do
on $ fs ^. FilesId ==. group_files ^. GroupFilesFile_id
where_ $ group_files ^. GroupFilesGroup_id ==. val groupid
return fs
source .| CL.map entityVal
groupIdToFilesC :: MonadResource m => Conduit (Key Groups) (SqlPersistT m) Files
groupIdToFilesC = awaitWith $ \groupid -> toProducer (groupIdToFiles groupid) >> groupIdToFilesC
sourceIdToFiles :: MonadResource m => Key Sources -> Source (SqlPersistT m) Files
sourceIdToFiles sourceid = do
let source = selectSource $ from $ \(fs `InnerJoin` source_files) -> do
on $ fs ^. FilesId ==. source_files ^. SourceFilesFile_id
where_ $ source_files ^. SourceFilesSource_id ==. val sourceid
return fs
source .| CL.map entityVal
sourceIdToFilesC :: MonadResource m => Conduit (Key Sources) (SqlPersistT m) Files
sourceIdToFilesC = awaitWith $ \sourceid -> toProducer (sourceIdToFiles sourceid) >> sourceIdToFilesC
pathToGroupId :: MonadIO m => T.Text -> SqlPersistT m [Key Groups]
pathToGroupId path = do
vals <- select $ distinct $ from $ \(group_files `InnerJoin` fs) -> do
on $ group_files ^. GroupFilesFile_id ==. fs ^. FilesId
where_ $ fs ^. FilesPath ==. val path
return $ group_files ^. GroupFilesGroup_id
return $ map unValue vals
|
atodorov/bdcs
|
src/BDCS/Files.hs
|
lgpl-2.1
| 5,258 | 0 | 19 | 1,390 | 1,347 | 686 | 661 | 90 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-----------------------------------------------------------------
-- Autogenerated by Thrift
-- --
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-- @generated
-----------------------------------------------------------------
module Includes_Consts where
import Prelude ( Bool(..), Enum, Float, IO, Double, String, Maybe(..),
Eq, Show, Ord,
concat, error, fromIntegral, fromEnum, length, map,
maybe, not, null, otherwise, return, show, toEnum,
enumFromTo, Bounded, minBound, maxBound, seq, succ,
pred, enumFrom, enumFromThen, enumFromThenTo,
(.), (&&), (||), (==), (++), ($), (-), (>>=), (>>))
import qualified Control.Applicative as Applicative (ZipList(..))
import Control.Applicative ( (<*>) )
import qualified Control.DeepSeq as DeepSeq
import qualified Control.Exception as Exception
import qualified Control.Monad as Monad ( liftM, ap, when )
import qualified Data.ByteString.Lazy as BS
import Data.Functor ( (<$>) )
import qualified Data.Hashable as Hashable
import qualified Data.Int as Int
import Data.List
import qualified Data.Maybe as Maybe (catMaybes)
import qualified Data.Text.Lazy.Encoding as Encoding ( decodeUtf8, encodeUtf8 )
import qualified Data.Text.Lazy as LT
import qualified Data.Typeable as Typeable ( Typeable )
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Vector as Vector
import qualified Test.QuickCheck.Arbitrary as Arbitrary ( Arbitrary(..) )
import qualified Test.QuickCheck as QuickCheck ( elements )
import qualified Thrift
import qualified Thrift.Types as Types
import qualified Thrift.Serializable as Serializable
import qualified Thrift.Arbitraries as Arbitraries
import qualified Transitive_Types as Transitive_Types
import qualified Includes_Types
exampleIncluded :: Includes_Types.Included
exampleIncluded = Includes_Types.default_Included{Includes_Types.included_MyIntField = 2, Includes_Types.included_MyTransitiveField = Transitive_Types.default_Foo{Transitive_Types.foo_a = 2}}
includedConstant :: Int.Int64
includedConstant = 42
|
getyourguide/fbthrift
|
thrift/compiler/test/fixtures/includes/gen-hs/Includes_Consts.hs
|
apache-2.0
| 2,526 | 0 | 8 | 430 | 483 | 343 | 140 | 44 | 1 |
module BitcoinCore.Transaction.Optcodes (OPCODE(..), opcodeTable) where
import Data.Tuple (swap)
import Data.List (lookup)
import Data.Maybe (fromMaybe)
import Test.QuickCheck.Arbitrary (Arbitrary(..))
import Test.QuickCheck.Gen (elements)
data OPCODE
= OP_FALSE
| OP_PUSHDATA1
| OP_PUSHDATA2
| OP_PUSHDATA4
| OP_1NEGATE
| OP_TRUE
| OP_2
| OP_3
| OP_4
| OP_5
| OP_6
| OP_7
| OP_8
| OP_9
| OP_10
| OP_11
| OP_12
| OP_13
| OP_14
| OP_15
| OP_16
| OP_NOP
| OP_IF
| OP_NOTIF
| OP_ELSE
| OP_ENDIF
| OP_VERIFY
| OP_RETURN
| OP_TOALTSTACK
| OP_FROMALTSTACK
| OP_IFDUP
| OP_DEPTH
| OP_DROP
| OP_DUP
| OP_NIP
| OP_OVER
| OP_PICK
| OP_ROLL
| OP_ROT
| OP_SWAP
| OP_TUCK
| OP_2DROP
| OP_2DUP
| OP_3DUP
| OP_2OVER
| OP_2ROT
| OP_2SWAP
| OP_CAT
| OP_SUBSTR
| OP_LEFT
| OP_RIGHT
| OP_SIZE
| OP_INVERT
| OP_AND
| OP_OR
| OP_XOR
| OP_EQUAL
| OP_EQUALVERIFY
| OP_1ADD
| OP_1SUB
| OP_2MUL
| OP_2DIV
| OP_NEGATE
| OP_ABS
| OP_NOT
| OP_0NOTEQUAL
| OP_ADD
| OP_SUB
| OP_MUL
| OP_DIV
| OP_MOD
| OP_LSHIFT
| OP_RSHIFT
| OP_BOOLAND
| OP_BOOLOR
| OP_NUMEQUAL
| OP_NUMEQUALVERIFY
| OP_NUMNOTEQUAL
| OP_LESSTHAN
| OP_GREATERTHAN
| OP_LESSTHANOREQUAL
| OP_GREATERTHANOREQUAL
| OP_MIN
| OP_MAX
| OP_WITHIN
| OP_RIPEMD160
| OP_SHA1
| OP_SHA256
| OP_HASH160
| OP_HASH256
| OP_CODESEPERATOR
| OP_CHECKSIG
| OP_CHECKSIGVERIFY
| OP_CHECKMULTISIG
| OP_CHECKMULTISIGVERIFY
| OP_CHECKLOCKTIMEVERIFY
| OP_CHECKSEQUENCEVERIFY
| OP_PUBKEYHASH
| OP_PUBKEY
| OP_INVALIDOPCODE
deriving (Eq, Show)
instance Enum OPCODE where
fromEnum opcode = fromMaybe
(error $ "Unable to lookup opcode " ++ show opcode)
(lookup opcode opcodeTable)
toEnum i = fromMaybe
(error $ "Unable to lookup opcode with i " ++ show i)
(lookup i $ map swap opcodeTable)
opcodeTable :: [(OPCODE, Int)]
opcodeTable =
[ (OP_FALSE, 0)
, (OP_PUSHDATA1, 76)
, (OP_PUSHDATA2, 77)
, (OP_PUSHDATA4, 78)
, (OP_1NEGATE, 79)
, (OP_TRUE, 81)
, (OP_2, 82)
, (OP_3, 83)
, (OP_4, 84)
, (OP_5, 85)
, (OP_6, 86)
, (OP_7, 87)
, (OP_8, 88)
, (OP_9, 89)
, (OP_10, 90)
, (OP_11, 91)
, (OP_12, 92)
, (OP_13, 93)
, (OP_14, 94)
, (OP_15, 95)
, (OP_16, 96)
, (OP_NOP, 97)
, (OP_IF, 99)
, (OP_NOTIF, 100)
, (OP_ELSE, 103)
, (OP_ENDIF, 104)
, (OP_VERIFY, 105)
, (OP_RETURN, 106)
, (OP_TOALTSTACK, 107)
, (OP_FROMALTSTACK, 108)
, (OP_IFDUP, 115)
, (OP_DEPTH, 116)
, (OP_DROP, 117)
, (OP_DUP, 118)
, (OP_NIP, 119)
, (OP_OVER, 120)
, (OP_PICK, 121)
, (OP_ROLL, 122)
, (OP_ROT, 123)
, (OP_SWAP, 124)
, (OP_TUCK, 125)
, (OP_2DROP, 109)
, (OP_2DUP, 110)
, (OP_3DUP, 111)
, (OP_2OVER, 112)
, (OP_2ROT, 113)
, (OP_2SWAP, 114)
, (OP_CAT, 126)
, (OP_SUBSTR, 127)
, (OP_LEFT, 128)
, (OP_RIGHT, 129)
, (OP_SIZE, 130)
, (OP_INVERT, 131)
, (OP_AND, 132)
, (OP_OR, 133)
, (OP_XOR, 134)
, (OP_EQUAL, 135)
, (OP_EQUALVERIFY, 136)
, (OP_1ADD, 139)
, (OP_1SUB, 140)
, (OP_2MUL, 141)
, (OP_2DIV, 142)
, (OP_NEGATE, 143)
, (OP_ABS, 144)
, (OP_NOT, 145)
, (OP_0NOTEQUAL, 146)
, (OP_ADD, 147)
, (OP_SUB, 148)
, (OP_MUL, 149)
, (OP_DIV, 150)
, (OP_MOD, 151)
, (OP_LSHIFT, 152)
, (OP_RSHIFT, 153)
, (OP_BOOLAND, 154)
, (OP_BOOLOR, 155)
, (OP_NUMEQUAL, 156)
, (OP_NUMEQUALVERIFY, 157)
, (OP_NUMNOTEQUAL, 158)
, (OP_LESSTHAN, 159)
, (OP_GREATERTHAN, 160)
, (OP_LESSTHANOREQUAL, 161)
, (OP_GREATERTHANOREQUAL, 162)
, (OP_MIN, 163)
, (OP_MAX, 164)
, (OP_WITHIN, 165)
, (OP_RIPEMD160, 166)
, (OP_SHA1, 167)
, (OP_SHA256, 168)
, (OP_HASH160, 169)
, (OP_HASH256, 170)
, (OP_CODESEPERATOR, 171)
, (OP_CHECKSIG, 172)
, (OP_CHECKSIGVERIFY, 173)
, (OP_CHECKMULTISIG, 174)
, (OP_CHECKMULTISIGVERIFY, 175)
, (OP_CHECKLOCKTIMEVERIFY, 177)
, (OP_CHECKSEQUENCEVERIFY, 178)
, (OP_PUBKEYHASH, 253)
, (OP_PUBKEY, 254)
, (OP_INVALIDOPCODE, 255)]
instance Arbitrary OPCODE where
arbitrary = do
let opcodes = map fst opcodeTable
elements opcodes
|
clample/lamdabtc
|
backend/src/BitcoinCore/Transaction/Optcodes.hs
|
bsd-3-clause
| 4,136 | 0 | 11 | 1,027 | 1,427 | 924 | 503 | 221 | 1 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "src/Text/Read/Compat.hs" #-}
{-# LANGUAGE CPP, NoImplicitPrelude #-}
module Text.Read.Compat (
-- * The 'Read' class
Read(..),
ReadS,
-- * Haskell 2010 functions
reads,
read,
readParen,
lex,
-- * New parsing functions
module Text.ParserCombinators.ReadPrec,
L.Lexeme(..),
lexP,
parens,
readListDefault,
readListPrecDefault,
readEither,
readMaybe
) where
import Text.Read
import Text.ParserCombinators.ReadPrec
import qualified Text.Read.Lex as L
|
phischu/fragnix
|
tests/packages/scotty/Text.Read.Compat.hs
|
bsd-3-clause
| 591 | 0 | 5 | 164 | 91 | 64 | 27 | 21 | 0 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="da-DK">
<title>ToDo-List</title>
<maps>
<homeID>todo</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
denniskniep/zap-extensions
|
addOns/todo/src/main/javahelp/help_da_DK/helpset_da_DK.hs
|
apache-2.0
| 955 | 77 | 67 | 155 | 408 | 207 | 201 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleContexts #-}
module Opaleye.SQLite.Internal.Binary where
import Opaleye.SQLite.Internal.Column (Column(Column))
import qualified Opaleye.SQLite.Internal.Tag as T
import qualified Opaleye.SQLite.Internal.PackMap as PM
import qualified Opaleye.SQLite.Internal.HaskellDB.PrimQuery as HPQ
import Data.Profunctor (Profunctor, dimap)
import Data.Profunctor.Product (ProductProfunctor, empty, (***!))
import qualified Data.Profunctor.Product as PP
import Data.Profunctor.Product.Default (Default, def)
import Control.Applicative (Applicative, pure, (<*>))
import Control.Arrow ((***))
extractBinaryFields :: T.Tag -> (HPQ.PrimExpr, HPQ.PrimExpr)
-> PM.PM [(HPQ.Symbol, (HPQ.PrimExpr, HPQ.PrimExpr))]
HPQ.PrimExpr
extractBinaryFields = PM.extractAttr "binary"
newtype Binaryspec columns columns' =
Binaryspec (PM.PackMap (HPQ.PrimExpr, HPQ.PrimExpr) HPQ.PrimExpr
(columns, columns) columns')
runBinaryspec :: Applicative f => Binaryspec columns columns'
-> ((HPQ.PrimExpr, HPQ.PrimExpr) -> f HPQ.PrimExpr)
-> (columns, columns) -> f columns'
runBinaryspec (Binaryspec b) = PM.traversePM b
binaryspecColumn :: Binaryspec (Column a) (Column a)
binaryspecColumn = Binaryspec (PM.PackMap (\f (Column e, Column e')
-> fmap Column (f (e, e'))))
instance Default Binaryspec (Column a) (Column a) where
def = binaryspecColumn
-- {
-- Boilerplate instance definitions. Theoretically, these are derivable.
instance Functor (Binaryspec a) where
fmap f (Binaryspec g) = Binaryspec (fmap f g)
instance Applicative (Binaryspec a) where
pure = Binaryspec . pure
Binaryspec f <*> Binaryspec x = Binaryspec (f <*> x)
instance Profunctor Binaryspec where
dimap f g (Binaryspec b) = Binaryspec (dimap (f *** f) g b)
instance ProductProfunctor Binaryspec where
empty = PP.defaultEmpty
(***!) = PP.defaultProfunctorProduct
-- }
|
bergmark/haskell-opaleye
|
opaleye-sqlite/src/Opaleye/SQLite/Internal/Binary.hs
|
bsd-3-clause
| 2,088 | 0 | 13 | 458 | 598 | 341 | 257 | 38 | 1 |
{-# OPTIONS -cpp #-}
{-# LANGUAGE LambdaCase #-}
module Main where
import Control.Concurrent (forkIO, threadDelay)
import Control.Concurrent.MVar (putMVar, takeMVar, newEmptyMVar)
import Control.Monad
import Control.Exception
import Data.Maybe (isNothing)
import System.Environment (getArgs)
import System.Exit
import System.IO (hPutStrLn, stderr)
#if !defined(mingw32_HOST_OS)
import System.Posix hiding (killProcess)
import System.IO.Error hiding (try,catch)
#endif
#if defined(mingw32_HOST_OS)
import System.Process
import WinCBindings
import Foreign
import System.Win32.DebugApi
import System.Win32.Types
import System.Win32.Console.CtrlHandler
#endif
main :: IO ()
main = do
args <- getArgs
case args of
[secs,cmd] ->
case reads secs of
[(secs', "")] -> run secs' cmd
_ -> die ("Can't parse " ++ show secs ++ " as a number of seconds")
_ -> die ("Bad arguments " ++ show args)
run :: Int -> String -> IO ()
#if !defined(mingw32_HOST_OS)
run secs cmd = do
m <- newEmptyMVar
mp <- newEmptyMVar
installHandler sigINT (Catch (putMVar m Nothing)) Nothing
forkIO $ do threadDelay (secs * 1000000)
putMVar m Nothing
forkIO $ do ei <- try $ do pid <- systemSession cmd
return pid
putMVar mp ei
case ei of
Left _ -> return ()
Right pid -> do
r <- getProcessStatus True False pid
putMVar m r
ei_pid_ph <- takeMVar mp
case ei_pid_ph of
Left e -> do hPutStrLn stderr
("Timeout:\n" ++ show (e :: IOException))
exitWith (ExitFailure 98)
Right pid -> do
r <- takeMVar m
case r of
Nothing -> do
killProcess pid
exitWith (ExitFailure 99)
Just (Exited r) -> exitWith r
Just (Terminated s) -> raiseSignal s
Just _ -> exitWith (ExitFailure 1)
systemSession cmd =
forkProcess $ do
createSession
executeFile "/bin/sh" False ["-c", cmd] Nothing
-- need to use exec() directly here, rather than something like
-- System.Process.system, because we are in a forked child and some
-- pthread libraries get all upset if you start doing certain
-- things in a forked child of a pthread process, such as forking
-- more threads.
killProcess pid = do
ignoreIOExceptions (signalProcessGroup sigTERM pid)
checkReallyDead 10
where
checkReallyDead 0 = hPutStrLn stderr "checkReallyDead: Giving up"
checkReallyDead (n+1) =
do threadDelay (3*100000) -- 3/10 sec
m <- tryJust (guard . isDoesNotExistError) $
getProcessStatus False False pid
case m of
Right Nothing -> return ()
Left _ -> return ()
_ -> do
ignoreIOExceptions (signalProcessGroup sigKILL pid)
checkReallyDead n
ignoreIOExceptions :: IO () -> IO ()
ignoreIOExceptions io = io `catch` ((\_ -> return ()) :: IOException -> IO ())
#else
run secs cmd =
let escape '\\' = "\\\\"
escape '"' = "\\\""
escape c = [c]
cmd' = "sh -c \"" ++ concatMap escape cmd ++ "\"" in
alloca $ \p_startupinfo ->
alloca $ \p_pi ->
withTString cmd' $ \cmd'' ->
do job <- createJobObjectW nullPtr nullPtr
b_info <- setJobParameters job
unless b_info $ errorWin "setJobParameters"
ioPort <- createCompletionPort job
when (ioPort == nullPtr) $ errorWin "createCompletionPort, cannot continue."
-- We're explicitly turning off handle inheritance to prevent misc handles
-- from being inherited by the child. Notable we don't want the I/O Completion
-- Ports and Job handles to be inherited. So we mark them as non-inheritable.
setHandleInformation job cHANDLE_FLAG_INHERIT 0
setHandleInformation ioPort cHANDLE_FLAG_INHERIT 0
-- Now create the process suspended so we can add it to the job and then resume.
-- This is so we don't miss any events on the receiving end of the I/O port.
let creationflags = cCREATE_SUSPENDED
b <- createProcessW nullPtr cmd'' nullPtr nullPtr True
creationflags
nullPtr nullPtr p_startupinfo p_pi
unless b $ errorWin "createProcessW"
pi <- peek p_pi
b_assign <- assignProcessToJobObject job (piProcess pi)
unless b_assign $ errorWin "assignProcessToJobObject, cannot continue."
let handleInterrupt action =
action `onException` terminateJobObject job 99
handleCtrl _ = do
terminateJobObject job 99
closeHandle ioPort
closeHandle job
exitWith (ExitFailure 99)
return True
withConsoleCtrlHandler handleCtrl $
handleInterrupt $ do
resumeThread (piThread pi)
-- The program is now running
let handle = piProcess pi
let millisecs = secs * 1000
rc <- waitForJobCompletion job ioPort (fromIntegral millisecs)
closeHandle ioPort
if not rc
then do terminateJobObject job 99
closeHandle job
exitWith (ExitFailure 99)
else alloca $ \p_exitCode ->
do terminateJobObject job 0
-- Ensured it's all really dead.
closeHandle job
r <- getExitCodeProcess handle p_exitCode
if r
then peek p_exitCode >>= \case
0 -> exitWith ExitSuccess
e -> exitWith $ ExitFailure (fromIntegral e)
else errorWin "getExitCodeProcess"
#endif
|
sdiehl/ghc
|
testsuite/timeout/timeout.hs
|
bsd-3-clause
| 6,100 | 1 | 19 | 2,164 | 873 | 433 | 440 | -1 | -1 |
import System.IO (hFlush, stdout)
import System.Environment (getArgs)
import Control.Monad (mapM)
import Control.Monad.Except (runExceptT)
import Control.Monad.Trans (liftIO)
import qualified Data.Map as Map
import qualified Data.Traversable as DT
import Readline (readline, load_history)
import Types
import Reader (read_str)
import Printer (_pr_str)
import Env (Env, env_new, env_bind, env_get, env_set)
import Core as Core
-- read
mal_read :: String -> IOThrows MalVal
mal_read str = read_str str
-- eval
is_pair (MalList x _:xs) = True
is_pair (MalVector x _:xs) = True
is_pair _ = False
quasiquote :: MalVal -> MalVal
quasiquote ast =
case ast of
(MalList (MalSymbol "unquote" : a1 : []) _) -> a1
(MalList (MalList (MalSymbol "splice-unquote" : a01 : []) _ : rest) _) ->
MalList [(MalSymbol "concat"), a01, quasiquote (MalList rest Nil)] Nil
(MalVector (MalList (MalSymbol "splice-unquote" : a01 : []) _ : rest) _) ->
MalList [(MalSymbol "concat"), a01, quasiquote (MalVector rest Nil)] Nil
(MalList (a0 : rest) _) -> MalList [(MalSymbol "cons"),
quasiquote a0,
quasiquote (MalList rest Nil)] Nil
(MalVector (a0 : rest) _) -> MalList [(MalSymbol "cons"),
quasiquote a0,
quasiquote (MalVector rest Nil)] Nil
_ -> MalList [(MalSymbol "quote"), ast] Nil
eval_ast :: MalVal -> Env -> IOThrows MalVal
eval_ast sym@(MalSymbol _) env = env_get env sym
eval_ast ast@(MalList lst m) env = do
new_lst <- mapM (\x -> (eval x env)) lst
return $ MalList new_lst m
eval_ast ast@(MalVector lst m) env = do
new_lst <- mapM (\x -> (eval x env)) lst
return $ MalVector new_lst m
eval_ast ast@(MalHashMap lst m) env = do
new_hm <- DT.mapM (\x -> (eval x env)) lst
return $ MalHashMap new_hm m
eval_ast ast env = return ast
let_bind :: Env -> [MalVal] -> IOThrows Env
let_bind env [] = return env
let_bind env (b:e:xs) = do
evaled <- eval e env
x <- liftIO $ env_set env b evaled
let_bind env xs
apply_ast :: MalVal -> Env -> IOThrows MalVal
apply_ast ast@(MalList [] _) env = do
return ast
apply_ast ast@(MalList (MalSymbol "def!" : args) _) env = do
case args of
(a1@(MalSymbol _): a2 : []) -> do
evaled <- eval a2 env
liftIO $ env_set env a1 evaled
_ -> throwStr "invalid def!"
apply_ast ast@(MalList (MalSymbol "let*" : args) _) env = do
case args of
(a1 : a2 : []) -> do
params <- (_to_list a1)
let_env <- liftIO $ env_new $ Just env
let_bind let_env params
eval a2 let_env
_ -> throwStr "invalid let*"
apply_ast ast@(MalList (MalSymbol "quote" : args) _) env = do
case args of
a1 : [] -> return a1
_ -> throwStr "invalid quote"
apply_ast ast@(MalList (MalSymbol "quasiquote" : args) _) env = do
case args of
a1 : [] -> eval (quasiquote a1) env
_ -> throwStr "invalid quasiquote"
apply_ast ast@(MalList (MalSymbol "do" : args) _) env = do
case args of
([]) -> return Nil
_ -> do
el <- eval_ast (MalList args Nil) env
case el of
(MalList lst _) -> return $ last lst
apply_ast ast@(MalList (MalSymbol "if" : args) _) env = do
case args of
(a1 : a2 : a3 : []) -> do
cond <- eval a1 env
if cond == MalFalse || cond == Nil
then eval a3 env
else eval a2 env
(a1 : a2 : []) -> do
cond <- eval a1 env
if cond == MalFalse || cond == Nil
then return Nil
else eval a2 env
_ -> throwStr "invalid if"
apply_ast ast@(MalList (MalSymbol "fn*" : args) _) env = do
case args of
(a1 : a2 : []) -> do
params <- (_to_list a1)
return $ (_malfunc a2 env (MalList params Nil)
(\args -> do
fn_env1 <- liftIO $ env_new $ Just env
fn_env2 <- liftIO $ env_bind fn_env1 params args
eval a2 fn_env2))
_ -> throwStr "invalid fn*"
apply_ast ast@(MalList _ _) env = do
el <- eval_ast ast env
case el of
(MalList ((Func (Fn f) _) : rest) _) ->
f $ rest
(MalList ((MalFunc {ast=ast, env=fn_env, params=(MalList params Nil)}) : rest) _) -> do
fn_env1 <- liftIO $ env_new $ Just fn_env
fn_env2 <- liftIO $ env_bind fn_env1 params rest
eval ast fn_env2
el ->
throwStr $ "invalid apply: " ++ (show el)
eval :: MalVal -> Env -> IOThrows MalVal
eval ast env = do
case ast of
(MalList _ _) -> apply_ast ast env
_ -> eval_ast ast env
-- print
mal_print :: MalVal -> String
mal_print exp = show exp
-- repl
rep :: Env -> String -> IOThrows String
rep env line = do
ast <- mal_read line
exp <- eval ast env
return $ mal_print exp
repl_loop :: Env -> IO ()
repl_loop env = do
line <- readline "user> "
case line of
Nothing -> return ()
Just "" -> repl_loop env
Just str -> do
res <- runExceptT $ rep env str
out <- case res of
Left (StringError str) -> return $ "Error: " ++ str
Left (MalValError mv) -> return $ "Error: " ++ (show mv)
Right val -> return val
putStrLn out
hFlush stdout
repl_loop env
main = do
args <- getArgs
load_history
repl_env <- env_new Nothing
-- core.hs: defined using Haskell
(mapM (\(k,v) -> (env_set repl_env (MalSymbol k) v)) Core.ns)
env_set repl_env (MalSymbol "eval") (_func (\[ast] -> eval ast repl_env))
env_set repl_env (MalSymbol "*ARGV*") (MalList [] Nil)
-- core.mal: defined using the language itself
runExceptT $ rep repl_env "(def! not (fn* (a) (if a false true)))"
runExceptT $ rep repl_env "(def! load-file (fn* (f) (eval (read-string (str \"(do \" (slurp f) \")\")))))"
if length args > 0 then do
env_set repl_env (MalSymbol "*ARGV*") (MalList (map MalString (drop 1 args)) Nil)
runExceptT $ rep repl_env $ "(load-file \"" ++ (args !! 0) ++ "\")"
return ()
else
repl_loop repl_env
|
0gajun/mal
|
haskell/step7_quote.hs
|
mpl-2.0
| 6,464 | 0 | 21 | 2,140 | 2,468 | 1,218 | 1,250 | 158 | 13 |
{-# LANGUAGE NoImplicitPrelude #-}
module Stack.Options.HaddockParser where
import Options.Applicative
import Options.Applicative.Args
import Stack.Options.Utils
import Stack.Prelude
import Stack.Types.Config
-- | Parser for haddock arguments.
haddockOptsParser :: Bool -> Parser HaddockOptsMonoid
haddockOptsParser hide0 =
HaddockOptsMonoid <$> fmap (fromMaybe [])
(optional
(argsOption
(long "haddock-arguments" <>
metavar "HADDOCK_ARGS" <>
help "Arguments passed to the haddock program" <>
hide)))
where hide = hideMods hide0
|
MichielDerhaeg/stack
|
src/Stack/Options/HaddockParser.hs
|
bsd-3-clause
| 771 | 0 | 15 | 301 | 120 | 65 | 55 | 17 | 1 |
module ShouldFail where
import ImpExp_Exp
single :: [a] -> Maybe a
single (Single x) = Just x
single _ = Nothing
|
siddhanathan/ghc
|
testsuite/tests/patsyn/should_compile/ImpExp_Imp.hs
|
bsd-3-clause
| 115 | 0 | 7 | 23 | 47 | 25 | 22 | 5 | 1 |
{-# LANGUAGE QuasiQuotes #-}
module Main where
main :: IO ()
main = p undefined
where
p = \parse -> case () of
[parse||] -> return ()
_ -> return ()
|
urbanslug/ghc
|
testsuite/tests/quasiquotation/qq004/qq004.hs
|
bsd-3-clause
| 196 | 0 | 12 | 78 | 67 | 37 | 30 | 7 | 2 |
{-# htermination (zipWith :: (b -> c -> a) -> (List b) -> (List c) -> (List a)) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
zipWith :: (a -> b -> c) -> (List a) -> (List b) -> (List c);
zipWith z (Cons a as) (Cons b bs) = Cons (z a b) (zipWith z as bs);
zipWith vv vw vx = Nil;
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/basic_haskell/zipWith_1.hs
|
mit
| 355 | 0 | 9 | 102 | 149 | 80 | 69 | 6 | 1 |
{-# LANGUAGE FlexibleInstances #-}
module Language.Lambda.Util.PrettyPrint where
import qualified Data.List as L
class PrettyPrint a where
prettyPrint :: a -> String
instance PrettyPrint String where
prettyPrint = id
newtype PDoc s = PDoc [s]
deriving (Eq, Show)
instance PrettyPrint s => PrettyPrint (PDoc s) where
prettyPrint (PDoc ls) = concatMap prettyPrint ls
instance Monoid (PDoc s) where
mempty = empty
(PDoc p1) `mappend` (PDoc p2) = PDoc $ p1 ++ p2
instance Functor PDoc where
fmap f (PDoc ls) = PDoc (fmap f ls)
empty :: PDoc s
empty = PDoc []
add :: s -> PDoc s -> PDoc s
add s (PDoc ps) = PDoc (s:ps)
append :: [s] -> PDoc s -> PDoc s
append = mappend . PDoc
between :: PDoc s -> s -> s -> PDoc s -> PDoc s
between (PDoc str) start end pdoc = PDoc ((start:str) ++ [end]) `mappend` pdoc
betweenParens :: PDoc String -> PDoc String -> PDoc String
betweenParens doc = between doc "(" ")"
intercalate :: [[s]] -> [s] -> PDoc [s] -> PDoc [s]
intercalate ss sep = add $ L.intercalate sep ss
addSpace :: PDoc String -> PDoc String
addSpace = add [space]
space :: Char
space = ' '
lambda :: Char
lambda = 'λ'
upperLambda :: Char
upperLambda = 'Λ'
|
sgillespie/lambda-calculus
|
src/Language/Lambda/Util/PrettyPrint.hs
|
mit
| 1,190 | 0 | 10 | 252 | 520 | 271 | 249 | 36 | 1 |
module Examples where
import Text.Blaze.Html5 hiding (nav,map,progress)
import Text.Blaze.Html5.Attributes hiding (content)
import Text.Blaze.Html.Renderer.Pretty
import Data.Monoid
import Utils
import Components
import Grid
import Templates
import Models
import CDN
import Javascript
import Ratchet
{-
This example is pretty messy, but you should be able to get the idea. Should delegate some of the building of the inner html components to utility functions.
-}
ex1InnerHtml = innerHtml
where
n = nav [a (toHtml "Foo") ! href (toValue "#"), a (toHtml "Bar") ! href (toValue "#")] Pills
jc = (h1 (toHtml "This is the jumbotron heading!") >> p (toHtml "This is the jumbotron body. There could be some additional text here. Notice how the heading and body are within a container.") >> bootstrapButton Primary (toHtml "Button"))
j = jumbotron (container jc)
b = h1 (toHtml "Container heading") >> p (toHtml "This should be in a container.")
als = concatHtml [alert (toHtml (show t)) t | t <- [Success .. Danger] ]
btnExpl = h2 (toHtml "These are the button types from Large to ExtraSmall")
btns = btnExpl >> (concatHtml . toHtmlRows) [concatHtml [bootstrapButton_ t (toHtml (show t)) size | t <- [Default .. Link]] | size <- [Large .. ExtraSmall]]
innerHtml = (n >> j >> (container (b >> als >> btns)))
ex1 = rawTemplate_ ex1InnerHtml 0
toLink text path = a (toHtml text) ! href (toValue path)
stringLsAsHtml :: [String] -> [Html]
stringLsAsHtml strLs = map toHtml strLs
latinStrings = ["Cras justo odio", "Dapibus ac facilisis in","Morbi leo risus","Porta ac consectetur a","Vestibulum at eros"]
latinHtmlLs = stringLsAsHtml latinStrings
{-
Generate a bunch of components found in the Components module. The documentation for this corresponds to Bootstrap 3/Components.
-}
componentExample =
let
btns = mconcat [bootstrapButton infoType h | infoType <- [Default .. Link], h <- (take 2 latinHtmlLs) ]
bs = [bootstrapButton infoType h | (infoType,h) <- (zip [Default .. Link] latinHtmlLs)]
bg = buttonGroup bs
bars = mconcat $ map (\pb -> mconcat [progress pb [],progress pb ["striped"],progress pb ["striped","active"]]) $ [progressBar noHtml infoType width | infoType <- [Primary .. Danger], width <- [25,100]]
lsGrp = listGroup latinHtmlLs
pgr = pager [toLink "Previous" "#", toLink "Next" "#"]
pgs = pagination $ [toLink "<<" "#"] ++ [toLink (show i) "#" | i <- [1..5]] ++ [toLink ">>" "#"]
bcumbs = breadcrumbs [toLink "Foo" "#", toLink "Bar" "#", toLink "Baz" "#"]
pnls = mconcat [panel (toHtml "This is a heading") (toHtml "This is the body") noHtml (toHtml "This is the footer") infoType | infoType <- [Default .. Danger]]
-- Wrap the html in a container.
allHtml = container $ mconcat [btns,bg,bars,lsGrp,pnls,pgs,pgr,bcumbs]
in
allHtml
simpleModal = modal (h4 (toHtml "Modal heading")) (p (toHtml "Modal body")) footerBtns
where
footerBtns = mconcat [bootstrapButton Default (toHtml "Close"), bootstrapButton Primary (toHtml "Save Changes")]
mtgImages = ["http://media.wizards.com/images/magic/daily/wallpapers/Wallpapper_JOU_PW01_Miller_1920x1080.jpg","http://media.wizards.com/images/magic/daily/wallpapers/WeightoftheUnderworld_BNG_1920x1080_Wallpaper.jpg","http://media.wizards.com/images/magic/daily/wallpapers/Sunbond_BNG_1920x1080_Wallpaper.jpg"]
mtgImagesMobile = [
"http://media.wizards.com/images/magic/daily/wallpapers/Wallpaper_JOU_05_Barger_iPhone.jpg",
"http://media.wizards.com/images/magic/daily/wallpapers/Wallpaper_KeyArt_JOU_iPhone.jpg",
"http://media.wizards.com/images/magic/daily/wallpapers/Wallpapper_JOU_PW01_Miller_iPhone.jpg"
]
simpleCarousel = carousel "myCarousel" mtgImages
{-
Use all available bootswatch themes with the given inner Html content.
-}
allBootswatchThemes :: Html -> [Html]
allBootswatchThemes innerHtml = [rawTemplate_ innerHtml i | i <-[0..(length cssLinks)-1]]
saveAsAllBootswatchThemes rootDir name innerHtml = sequence_ [saveHtmlFile (rootDir++"/"++(name ++ (show i))++".html") h | (i,h) <- (zip [0..(length cssLinks)-1] (allBootswatchThemes innerHtml))]
-- Ratchet
allRatchetPlatforms :: Html -> [Html]
allRatchetPlatforms innerHtml = [ratchetTemplate platform innerHtml | platform <- [Android .. Standard]]
saveAllRatchetPlatforms rootDir name innerHtml = sequence_ [saveHtmlFile (rootDir++"/"++(name ++ "-"++(show platform))++".html") h | (platform,h) <- (zip [Android .. Standard] (allRatchetPlatforms innerHtml))]
basicRatchet = innerHtml
where
topNav = tabBar $ mconcat [
tabItem (ratchicon iconName >> (tabLabel (toHtml name))) (toValue "#")
| (iconName,name) <- zip
["home","person","star-filled","search","gear"]
["Home","Profile","Favorites","Search","Settings"]
]
tvi1 = mconcat [
if t == "Divider"
then
tableViewDivider (toHtml t)
else
tableViewCell (toHtml t >> toggle True) | t <- ["Item 1", "Divider", "Item 2", "Item 3", "Divider", "Item 4"]
]
tbView = h2 (toHtml "Table Views") >> tableView tvi1
blockBtns = mconcat [
ratchetButton [it,Block] (toHtml (show it)) | it <- ratchetInfoTypes
] >> mconcat [
ratchetButton [it,Block,Outlined] (toHtml (show it)) | it <- ratchetInfoTypes
]
btns = h2 (toHtml "Buttons") >> mconcat [
bootstrapButton_ infoType (toHtml (show infoType)) Normal
| infoType <- ratchetInfoTypes
] >> blockBtns
ctnt = content $ mconcat [
tbView,
btns
]
innerHtml = mconcat[
topNav,
ctnt
]
sliderEx = ratchetTemplate Standard (easySlider (map toValue mtgImagesMobile))
generateRatchetExs :: IO ()
generateRatchetExs = do
saveAllRatchetPlatforms "examples/ratchet" "basic" basicRatchet
saveHtmlFile "examples/ratchet/sliderEx.html" sliderEx
main = do
saveHtmlFile "examples/ex1.html" ex1
saveAsAllBootswatchThemes "examples/ex1s" "ex" ex1InnerHtml
saveHtmlFile "examples/components.html" (rawTemplate_ componentExample 0)
saveAsAllBootswatchThemes "examples/components" "components" componentExample
saveHtmlFile "examples/modal.html" (rawTemplate_ simpleModal 0)
saveHtmlFile "examples/carousel.html" (rawTemplate_ simpleCarousel 0)
|
lnunno/blaze-bootstrap3
|
Bootstrap3/Examples.hs
|
mit
| 7,393 | 0 | 17 | 2,210 | 1,810 | 957 | 853 | 92 | 2 |
module Proteome.Data.PersistBuffers where
import Data.Aeson (FromJSON, ToJSON (toEncoding), defaultOptions, genericToEncoding)
import Path (Abs, File, Path)
data PersistBuffers =
PersistBuffers {
current :: Maybe (Path Abs File),
buffers :: [Path Abs File]
}
deriving stock (Eq, Generic, Show)
deriving anyclass (FromJSON)
instance ToJSON PersistBuffers where
toEncoding = genericToEncoding defaultOptions
|
tek/proteome
|
packages/proteome/lib/Proteome/Data/PersistBuffers.hs
|
mit
| 427 | 0 | 11 | 69 | 126 | 73 | 53 | -1 | -1 |
module ProjectEuler.Problem120
( problem
, f
) where
import ProjectEuler.Types
problem :: Problem
problem = pureProblem 120 Solved result
{-
Idea:
let B(n,k) denote "n choose k" (a.k.a binomial coefficient)
(a + 1)^n = sum{ B(n,i)*a^(n-i)*1^i, i = 0 to n }
(a - 1)^n = sum{ B(n,i)*a^(n-i)*(-1)^i, i = 0 to n }
notice that these two expansion has same number of terms, and when i is odd,
some terms will cancel each other, leaving:
(a + 1)^n + (a - 1)^n = 2 * sum{ B(n,i)*a^(n-i), i = 0 to n && i is even }
note that a^k mod a^2 = 0 for all k >= 2, therefore:
[(a + 1)^n + (a - 1)^n] (mod a^2)
= 2 * sum{ B(n,i)*a^(n-i), i = 0 to n && i is even } (mod a^2)
= 2 * sum{ B(n,i)*a^(n-i), i = 0 to n && i is even && n - i < 2} (mod a^2)
(n - i < 2 restricts i to only 2 possible values: i = n or i = n - 1.)
= 2 * sum{ B(n,i)*a^(n-i), i = n-1 or n && i is even } (mod a^2)
when n is even:
[(a + 1)^n + (a - 1)^n] (mod a^2)
= 2 * sum{ B(n,i)*a^(n-i), i = n } (mod a^2)
= 2 * B(n,n) (mod a^2)
= 2 (mod a^2)
when n is odd:
[(a + 1)^n + (a - 1)^n] (mod a^2)
= 2 * sum{ B(n,i)*a^(n-i), i = n-1 } (mod a^2)
= 2 * B(n,n-1) * a (mod a^2)
= 2 * n * a (mod a^2)
-}
{-
Efficient implementation of ( (a+1)^n + (a-1)^n ) mod (a^2)
-}
f :: Integral i => i -> Int -> i
f a n =
if even n
then 2
else (2 * fromIntegral n * a) `rem` (a * a)
{-# INLINE f #-}
findMax :: Int -> Int
findMax a = maximum $ 2 : [f a n | n <- [1,3..a*2-1]]
result :: Int
result = sum $ findMax <$> [3..1000]
|
Javran/Project-Euler
|
src/ProjectEuler/Problem120.hs
|
mit
| 1,542 | 0 | 11 | 428 | 187 | 103 | 84 | 16 | 2 |
#!/usr/bin/env stack
-- stack --resolver lts-10.0 --install-ghc runghc --package turtle
-- for randomly picking up a language to use
module Main where
import System.Random
languages :: [String]
languages = words "Python3 C# JavaScript Ruby Scala Kotlin"
langCount :: Int
langCount = length languages
main :: IO ()
main = do
g <- newStdGen
let (ind, _) = randomR (0, langCount-1) g
putStrLn $ languages !! ind
|
Javran/leetcode
|
WhatLang.hs
|
mit
| 427 | 0 | 12 | 84 | 105 | 57 | 48 | 11 | 1 |
list = [(x,y) | x <- [1,2,3], y <- [4,5,6]]
list' = do
x <- [1,2,3]
y <- [4,5,6]
return (x,y)
list'' =
[1,2,3] >>= (\x -> [4,5,6] >>= (\y -> return (x,y)))
|
raventid/coursera_learning
|
haskell/stepik/5.4list_as_monad_lecture.hs
|
mit
| 166 | 0 | 12 | 43 | 154 | 90 | 64 | 7 | 1 |
paren '(' = 1
paren ')' = -1
(.&&.) f g a = (f a) && (g a)
isBalanced = (((==0) . last) .&&. all (>=0)) . scanl1 (+) . map paren
main = do
content <- getContents
print $ length $ filter isBalanced $ lines content
|
MAPSuio/spring-challenge16
|
balanced_parens/larstvei.hs
|
mit
| 220 | 0 | 11 | 54 | 128 | 66 | 62 | 7 | 1 |
eachLine f = unlines . map f . lines
main :: IO ()
main = do
input <- getLine
let times = read input :: Int
interact (eachLine $ solver "" [])
-- Op stack, symbol stack, exprression -> ans
solver :: String -> [String] -> String -> String
solver (o:op) (x:(y:symbol)) [] =
solver op ((y ++ x ++ [o]) : symbol) []
solver "" (x:_) [] = x
solver op symbol ('(':expr) =
solver ('(':op) symbol expr
solver op symbol (')':expr) =
let
(o:(_:op')) = op -- discard left parenthesis
(x:(y:sym')) = symbol
newSym = (y ++ x ++ [o]) : sym'
in
solver op' newSym expr
solver "" symbol ('-':expr) =
solver "-" symbol expr
solver "" symbol ('*':expr) =
solver "*" symbol expr
solver "" symbol ('/':expr) =
solver "/" symbol expr
solver "" symbol ('^':expr) =
solver "^" symbol expr
solver "" symbol ('+':expr) =
solver "+" symbol expr
solver op symbol ('+':expr) =
let
(o:op') = op
(x:(y:sym')) = symbol
newSym = (y ++ x ++ [o]) : sym'
in
if o == '-' || o == '*' || o == '/' || o == '^' || o == '+'
then solver op' newSym ('+':expr)
else solver ('+':op) symbol expr
solver op symbol ('-':expr) =
let
(o:op') = op
(x:(y:sym')) = symbol
newSym = (y ++ x ++ [o]) : sym'
in
if o == '-' || o == '*' || o == '/' || o == '^'
then solver op' newSym ('-':expr)
else solver ('-':op) symbol expr
solver op symbol ('*':expr) =
let
(o:op') = op
(x:(y:sym')) = symbol
newSym = (y ++ x ++ [o]) : sym'
in
if o == '*' || o == '/' || o == '^'
then solver op' newSym ('*':expr)
else solver ('*':op) symbol expr
solver op symbol ('/':expr) =
let
(o:op') = op
(x:(y:sym')) = symbol
newSym = (y ++ x ++ [o]) : sym'
in
if o == '/' || o == '^'
then solver op' newSym ('*':expr)
else solver ('/':op) symbol expr
solver op symbol ('^':expr) =
let
(o:op') = op
(x:(y:sym')) = symbol
newSym = (y ++ x ++ [o]) : sym'
in
if o == '^'
then solver op' newSym ('*':expr)
else solver ('^':op) symbol expr
solver op symbol (alpha : expr) =
solver op ([alpha] : symbol) expr
|
pollow/OJSol
|
SPOJ/onp.hs
|
mit
| 2,139 | 0 | 16 | 606 | 1,164 | 606 | 558 | 70 | 6 |
{-# LANGUAGE OverloadedStrings #-}
module PrepareDatabase where
{-
Parses related part of UnicodeData.txt,
and prepare a representation for efficient querying.
-}
import Control.Monad
import Data.Bifunctor
import Data.Char
import Data.Function
import Data.List
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import qualified Data.Text as T
import Numeric
import System.Exit
type Ranged =
Either
(Int, Int) -- [l .. r] (both inclusive)
Int
gcLitTable :: M.Map T.Text GeneralCategory
gcLitTable = M.fromList $ zip (T.words abbrs) [minBound .. maxBound]
where
abbrs =
"Lu Ll Lt Lm Lo \
\Mn Mc Me \
\Nd Nl No \
\Pc Pd Ps Pe Pi Pf Po \
\Sm Sc Sk So \
\Zs Zl Zp \
\Cc Cf Cs Co Cn"
verifyAndProcess :: T.Text -> IO [(Ranged, GeneralCategory)]
verifyAndProcess raw = do
let dieIf flag reason = when flag $ do
putStrLn reason
exitFailure
rawLines = T.lines raw
rows = fmap extract rawLines
where
extract rawLine = (code :: Int, desc, gc)
where
[(code, "")] = readHex (T.unpack rawCode)
rawCode : desc : gc : _ = T.splitOn ";" rawLine
groupped :: [(T.Text, Either (Int, Int) Int)]
groupped = norm <$> groupBy zCmp rows
where
norm [(c, _, gc)] = (gc, Right c)
norm [(c0, _, gc0), (c1, _, gc1)]
| gc0 == gc1
&& T.dropEnd (T.length "First>") gc0
== T.dropEnd (T.length "Last>") gc1 =
(gc0, Left (c0, c1))
norm _ = error "invalid"
zCmp (_, desc0, _) (_, desc1, _) =
"First>" `T.isSuffixOf` desc0
&& "Last>" `T.isSuffixOf` desc1
gpMinus (Left (_a, b)) (Left (c, _d)) = b - c
gpMinus (Left (_a, b)) (Right c) = b - c
gpMinus (Right a) (Left (b, _c)) = a - b
gpMinus (Right a) (Right b) = a - b
isIncr = and $ zipWith isStrictIncr gs (tail gs)
where
isStrictIncr l r = gpMinus l r < 0
gs = fmap snd groupped
dieIf
(not isIncr)
"Data rows are not strictly ascending."
let gcGroupped :: [(T.Text, [Either (Int, Int) Int])]
gcGroupped =
(\ts -> (fst . head $ ts, fmap snd ts)) <$> groupBy ((==) `on` fst) groupped
merge acc [] = reverse acc
merge [] (x : xs) = merge [x] xs
merge (u : us) (x : xs) = case (u, x) of
(Left (a, b), Left (c, d)) ->
if b + 1 == c then merge (Left (a, d) : us) xs else merge (x : u : us) xs
(Left (a, b), Right c) ->
if b + 1 == c then merge (Left (a, c) : us) xs else merge (x : u : us) xs
(Right a, Left (b, c)) ->
if a + 1 == b then merge (Left (a, c) : us) xs else merge (x : u : us) xs
(Right a, Right b) ->
if a + 1 == b then merge (Left (a, b) : us) xs else merge (x : u : us) xs
gcGroupped' :: [(T.Text, [Either (Int, Int) Int])]
gcGroupped' = (fmap . second) (merge []) gcGroupped
dieIf
(S.member "Cn" (S.fromList $ fmap fst gcGroupped'))
"No character should be in 'Cn' category"
putStrLn $ "Raw rows in total: " <> show (length rows)
putStrLn $ "Rows after range groupping: " <> show (sum (fmap (length . snd) gcGroupped))
let revGroups :: [(Ranged, GeneralCategory)]
revGroups = concatMap (\(gc, xs) -> [(x, gcLitTable M.! gc) | x <- xs]) gcGroupped'
putStrLn $
"Final item count (consecutive ranges with same category): " <> show (length revGroups)
pure revGroups
|
Javran/misc
|
unicode-data/src/PrepareDatabase.hs
|
mit
| 3,524 | 0 | 20 | 1,093 | 1,413 | 760 | 653 | 78 | 15 |
{-# LANGUAGE DeriveGeneric #-}
module D20.Internal.Character.Feat where
import GHC.Generics
import D20.Internal.Character.Skill
import D20.Internal.Character.Ability
import D20.Internal.Character.FeatEffect
import qualified Data.Map as M
-- TODO: this is ok until we start to need custom prerequisites.
data FeatPrerequisite
= FeatPrerequisite FeatReference
| AbilityPrerequisite Ability
Int
| SkillPrerequisite Skill
Int
| BaseAttackBonusPrerequisite Int
deriving (Show,Generic)
type FeatName = String
data Feat =
Feat {getFeatName :: FeatName
,prerequisites :: [FeatPrerequisite]
,benefit :: FeatEffect
,normal :: Maybe FeatEffect
,special :: Maybe String}
deriving (Show,Generic)
data FeatReference =
FeatReference FeatName
deriving (Show,Ord,Eq,Generic)
{-
Every basic class offers a selection of bonus feats to choose from. A character gains a bonus feat upon attaining each even-numbered level in a class. These bonus feats are in addition to the feats that all characters receive as they attain new levels. Some feats have prerequisites that must be met before a character can select them.
-}
data BonusFeat
class HasFeats a where
getFeats :: a -> [Feat]
resolveFeatReference :: FeatReference -> Feat
resolveFeatReference reference = case flip M.lookup featLookup reference of
Just feat -> feat
Nothing -> error $ "Missing definition for " ++ show reference
-- TODO find a way to load this from a config.
featLookup :: M.Map FeatReference Feat
featLookup = M.empty
|
elkorn/d20
|
src/D20/Internal/Character/Feat.hs
|
mit
| 1,581 | 0 | 9 | 312 | 277 | 160 | 117 | -1 | -1 |
{-# OPTIONS_GHC -O0 #-}
module Handler.Admin.Group where
import Import
import Handler.Admin.Modlog (addModlogEntry)
import qualified Data.Text as T (intercalate)
-------------------------------------------------------------------------------------------------------------
groupsForm :: Html -> MForm Handler (FormResult GroupConfigurationForm, Widget)
groupsForm extra = do
(nameRes , nameView ) <- mreq textField "" Nothing
(manageThreadRes , manageThreadView) <- mreq checkBoxField "" Nothing
(manageBoardRes , manageBoardView ) <- mreq checkBoxField "" Nothing
(manageUsersRes , manageUsersView ) <- mreq checkBoxField "" Nothing
(manageConfigRes , manageConfigView) <- mreq checkBoxField "" Nothing
(deletePostsRes , deletePostsView ) <- mreq checkBoxField "" Nothing
(managePanelRes , managePanelView ) <- mreq checkBoxField "" Nothing
(manageBanRes , manageBanView ) <- mreq checkBoxField "" Nothing
(editPostsRes , editPostsView ) <- mreq checkBoxField "" Nothing
(aMarkupRes , aMarkupView ) <- mreq checkBoxField "" Nothing
(shadowEditRes , shadowEditView ) <- mreq checkBoxField "" Nothing
(viewModlogRes , viewModlogView ) <- mreq checkBoxField "" Nothing
(viewIPAndIDRes , viewIPAndIDView ) <- mreq checkBoxField "" Nothing
(hellbanningRes , hellbanningView ) <- mreq checkBoxField "" Nothing
(ratingRes , ratingView ) <- mreq checkBoxField "" Nothing
(applControlRes , applControlView ) <- mreq checkBoxField "" Nothing
(wordfilterRes , wordfilterView ) <- mreq checkBoxField "" Nothing
(reportsRes , reportsView ) <- mreq checkBoxField "" Nothing
let result = GroupConfigurationForm <$> nameRes <*>
manageThreadRes <*> manageBoardRes <*> manageUsersRes <*>
manageConfigRes <*> deletePostsRes <*> managePanelRes <*>
manageBanRes <*> editPostsRes <*> shadowEditRes <*>
aMarkupRes <*> viewModlogRes <*> viewIPAndIDRes <*>
hellbanningRes <*> ratingRes <*> applControlRes <*>
wordfilterRes <*> reportsRes
widget = $(widgetFile "admin/groups-form")
return (result, widget)
getManageGroupsR :: Handler Html
getManageGroupsR = do
groups <- map entityVal <$> runDB (selectList ([]::[Filter Group]) [])
(formWidget, _) <- generateFormPost groupsForm
defaultLayout $ do
defaultTitleMsg MsgGroups
$(widgetFile "admin/groups")
postManageGroupsR :: Handler Html
postManageGroupsR = do
((result, _), _) <- runFormPost groupsForm
let msgRedirect msg = setMessageI msg >> redirect ManageGroupsR
case result of
FormFailure [] -> msgRedirect MsgBadFormData
FormFailure xs -> msgRedirect (MsgError $ T.intercalate "; " xs)
FormMissing -> msgRedirect MsgNoFormData
FormSuccess (GroupConfigurationForm name manageThread manageBoard manageUsers
manageConfig deletePostsP managePanel manageBan editPosts shadowEdit
aMarkup viewModLog viewIPAndID hellbanning changeFileRating applControl wordfilter reports
) -> do
let permissions = [(ManageThreadP,manageThread), (ManageBoardP,manageBoard ), (ManageUsersP,manageUsers)
,(ManageConfigP,manageConfig), (DeletePostsP,deletePostsP), (ManagePanelP,managePanel)
,(ManageBanP ,manageBan ), (EditPostsP ,editPosts ), (AdditionalMarkupP,aMarkup)
,(ShadowEditP ,shadowEdit ) , (ViewModlogP ,viewModLog ), (ViewIPAndIDP,viewIPAndID )
,(HellBanP,hellbanning) , (ChangeFileRatingP, changeFileRating), (AppControlP,applControl)
,(WordfilterP,wordfilter) , (ReportsP , reports)
]
newGroup = Group { groupName = name
, groupPermissions = map fst $ filter snd permissions
}
g <- runDB $ getBy $ GroupUniqName name
if isJust g
then (addModlogEntry $ MsgModlogUpdateGroup name) >> (void $ runDB $ replace (entityKey $ fromJust g) newGroup)
else (addModlogEntry $ MsgModlogAddGroup name) >> (void $ runDB $ insert newGroup)
msgRedirect MsgGroupAddedOrUpdated
getDeleteGroupsR :: Text -> Handler ()
getDeleteGroupsR group = do
delGroup <- runDB $ selectFirst [GroupName ==. group] []
when (isNothing delGroup) $ setMessageI MsgGroupDoesNotExist >> redirect ManageGroupsR
usrGroup <- getMaybeGroup =<< maybeAuth
when (isNothing usrGroup) $ notFound
groups <- map (groupPermissions . entityVal) <$> runDB (selectList ([]::[Filter Group]) [])
when ((ManageUsersP `notElem` groupPermissions (entityVal $ fromJust delGroup) ) || ((>1) $ length $ filter (ManageUsersP `elem`) groups)) $ do
void $ runDB $ deleteWhere [GroupName ==. group]
addModlogEntry $ MsgModlogDelGroup group
setMessageI MsgGroupDeleted >> redirect ManageGroupsR
setMessageI MsgYouAreTheOnlyWhoCanManageUsers >> redirect ManageGroupsR
|
ahushh/Monaba
|
monaba/src/Handler/Admin/Group.hs
|
mit
| 5,090 | 0 | 27 | 1,194 | 1,356 | 698 | 658 | -1 | -1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module DemoContractAST where
import Data.Time
import Data.List (transpose)
import Data.Monoid (Monoid (..))
import System.Locale
import Text.XML.HaXml.Types
import Text.XML.HaXml.XmlContent.Parser
import Common (DiffDateTime, adjustDateTime, DateTime, FeeCalc)
import Calendar
newtype Location = Location String deriving Show
newtype Commodity = Commodity String deriving Show
type Volume = Double
type Price = Double
data Market = Market Commodity Unit Location deriving Show
data OptionDirection = CallOption | PutOption deriving Show
newtype Unit = Unit String deriving Show
newtype Currency = Currency String deriving Show
newtype CashFlowType = CashFlowType String deriving Show
data Product a = Product a DeliverySchedule deriving Show
type SegmentedSchedule = [DeliverySchedule]
newtype DeliverySchedule = DeliverySchedule [DeliveryScheduleBlock] deriving (Monoid, Show)
data DeliveryScheduleBlock = DeliveryScheduleBlock
{ startDateTime :: DateTime
, endDateTime :: DateTime
, deliveryDays :: [DateTime]
, deliveryShape :: DeliveryShape
} deriving Show
deliverySchedule :: DateTime -> DateTime
-> Calendar -- ^ What days to deliver on
-> DeliveryShape
-> DeliverySchedule
deliverySchedule start end cal shape =
DeliverySchedule [DeliveryScheduleBlock start end days shape]
where
days = calendarDaysInPeriod cal (start, end)
newtype DeliveryShape = DeliveryShape [DiffTime] deriving (Monoid, Show)
deliverAtTimeOfDay :: Int -> Int -> DeliveryShape
deliverAtTimeOfDay hour minute = DeliveryShape
[timeOfDayToTime (TimeOfDay hour minute 0)]
deliverAtMidnight :: DeliveryShape
deliverAtMidnight = deliverAtTimeOfDay 0 0
complexDeliveryShape :: [DeliveryShape] -> DeliveryShape
complexDeliveryShape = mconcat
data Contract
= NamedContract String Contract
| AndContract [Contract]
| GiveContract Contract
| ProductBasedContract ProductBasedContract
| OptionContract
{ optionDirection :: OptionDirection
, premium :: PremiumPrice
, exerciseDetails :: ExerciseDetails
, underlying :: Contract
}
deriving Show
data ProductBasedContract
= PhysicalContract Double (Product Market)
| FinancialContract Double (Product Currency) (Maybe CashFlowType)
deriving Show
type StrikePrice = (Double, Currency)
type PremiumPrice = (Double, Currency)
data ExerciseDetails
= European StrikePrice (DateTime, DateTime)
| American StrikePrice (DateTime, DateTime)
| Bermudan StrikePrice [(DateTime, DateTime)] Int
deriving Show
--------------------------------------------------------------------------------
forward :: FeeCalc -- ^Fees
-> Market -- ^Market, e.g. NBP Gas
-> Volume -- ^Notional volume
-> Price -- ^Price
-> Currency -- ^Payment currency
-> CashFlowType -- ^Cashflow type
-> DeliverySchedule -- ^Schedule for physical settlement
-> DeliverySchedule -- ^Schedule for financial settlement
-> Contract
forward _fee market vol pr cur cft pSch fSch = NamedContract "Forward" $
AndContract [ ProductBasedContract physicalLeg
, GiveContract $ ProductBasedContract financialLeg]
where
physicalLeg = PhysicalContract vol physicalProduct
physicalProduct = Product market pSch
financialLeg = FinancialContract (vol * pr) financialProduct (Just cft)
financialProduct = Product cur fSch
-- * Option template
-- | Basic option template. Flexibility is achieved through 'ExerciseDetails'.
option :: ExerciseDetails -- ^Details of when and at what price the option can be exercised
-> OptionDirection -- ^Direction of the option (call or put)
-> PremiumPrice
-> Contract -- ^Underlying asset
-> Contract
option exerciseDetails direction premium underlyingContract =
OptionContract direction premium exerciseDetails underlyingContract
namedContract :: String -> Contract -> Contract
namedContract = NamedContract
europeanExercise :: (DateTime, DateTime) -> StrikePrice -> ExerciseDetails
europeanExercise = flip European
americanExercise :: (DateTime, DateTime) -> StrikePrice -> ExerciseDetails
americanExercise = flip American
allOf :: [Contract] -> Contract
allOf = AndContract
commoditySpreadOption ::
[( Market
, Volume
, Price, Currency, CashFlowType
, SegmentedSchedule
, FeeCalc
)] -- ^List of underlying legs
-> DiffDateTime -- ^Exercise start date offset (relative to leg)
-> DiffDateTime -- ^Exercise stop date offset (relative to leg)
-> OptionDirection -- ^Option direction (put or call)
-> StrikePrice -- ^Strike price of the option
-> CashFlowType -- ^Cashflow type of the strike price
-> PremiumPrice
-> Contract
commoditySpreadOption legs exerciseDiffTimeStart exerciseDiffTimeStop opDir strikePrice cftype premium =
allOf
[ legOption groupedLeg $
allOf [ forward fee m vol pr cur cft seg seg
| (m, vol, pr, cur, cft, seg, fee) <- groupedLeg ]
| groupedLeg <- groupedLegs ]
where
groupedLegs :: [[(Market, Volume, Price, Currency, CashFlowType, DeliverySchedule, FeeCalc)]]
groupedLegs =
transpose
[ [ (m, vol, pr, cur, cft, seg, fee) | seg <- sch ]
| (m, vol, pr, cur, cft, sch, fee) <- legs ]
legOption :: [(Market, Volume, Price, Currency, CashFlowType, DeliverySchedule, FeeCalc)] -> Contract -> Contract
legOption groupedLeg underlying =
option exerciseDetails opDir premium underlying
where
exerciseDetails = europeanExercise exerciseTime strikePrice
exerciseTime = ( adjustDateTime earliestDeliveryTime exerciseDiffTimeStart
, adjustDateTime earliestDeliveryTime exerciseDiffTimeStop
)
where
earliestDeliveryTime =
minimum [ earliestDelivery seg | (_, _, _, _, _, seg, _) <- groupedLeg ]
earliestDelivery :: DeliverySchedule -> DateTime
earliestDelivery (DeliverySchedule blocks) =
minimum [ day | block <- blocks, day <- deliveryDays block]
--------------------------------------------------------------------------------
toXml :: XmlContent' a => Bool -> a -> Document ()
toXml _ value =
Document (Prolog (Just (XMLDecl "1.0" Nothing Nothing))
[] Nothing [])
emptyST
( case toContents' value of
[] -> Elem "empty" [] []
[CElem e ()] -> e
(CElem _ ():_) -> error "too many XML elements in document" )
[]
class XmlContent' a where
toContents' :: a -> [Content ()]
instance XmlContent' Contract where
toContents' (NamedContract name contract) =
[ CElem (Elem "Contract" [mkAttr "type" name] (toContents' contract)) () ]
toContents' (AndContract contracts) =
[ mkElemC "And" (concatMap toContents' contracts) ]
toContents' (GiveContract contract) =
[ mkElemC "Give" (toContents' contract) ]
toContents' (ProductBasedContract contract) =
toContents' contract
toContents' (OptionContract dir (premiumQty, premiumCcy) det und) =
[ mkElemC "Option" $ concat
[ toContents' dir
, toContents' det
, [ mkElemC "Premium" $ concat
[ [ mkElemC "Quantity" (toText . show $ premiumQty) ]
, toContents' premiumCcy
]
]
, [ mkElemC "Underlying" (toContents' und) ]
]
]
instance XmlContent' OptionDirection where
toContents' CallOption = [mkElemC "OptionDirection" $ toText "Call"]
toContents' PutOption = [mkElemC "OptionDirection" $ toText "Put"]
instance XmlContent' ExerciseDetails where
toContents' (European strike window) =
[ CElem (Elem "ExerciseDetails" [mkAttr "type" "European"] $ concat
[ [ mkExerciseWindow [window]
, mkStrikePrice strike
, mkElemC "Premium" [] ]
]) () ]
toContents' (American strike window) =
[ CElem (Elem "ExerciseDetails" [mkAttr "type" "American"] $ concat
[ [ mkExerciseWindow [window]
, mkStrikePrice strike
, mkElemC "Premium" [] ]
]) () ]
toContents' (Bermudan strike windows _limit) =
[ CElem (Elem "ExerciseDetails" [mkAttr "type" "American"] $ concat
[ [ mkExerciseWindow windows
, mkStrikePrice strike
, mkElemC "Premium" [] ]
]) () ]
mkStrikePrice (strikeQty, strikeCcy) =
mkElemC "StrikePrice" $ concat
[ [ mkElemC "Quantity" (toText . show $ strikeQty) ]
, toContents' strikeCcy
]
mkExerciseWindow :: [(DateTime, DateTime)] -> Content ()
mkExerciseWindow windows =
mkElemC "ExerciseWindow" $ map mkExerciseWindowElement windows
mkExerciseWindowElement :: (DateTime, DateTime) -> Content ()
mkExerciseWindowElement (startDateTime, endDateTime) =
mkElemC "ExerciseWindowElement"
[ mkElemC "StartDateTime" $ toText . showDate $ startDateTime
, mkElemC "EndDateTime" $ toText . showDate $ endDateTime
]
instance XmlContent' Currency where
toContents' (Currency ccy) = [mkElemC "Currency" (toText ccy)]
instance XmlContent' ProductBasedContract where
toContents' (PhysicalContract qty prod) =
[ CElem (Elem "Contract" [mkAttr "type" "Physical"] $ concat
[ toContents' prod
, [ mkElemC "Quantity" $ toText . show $ qty ]
]) () ]
-- TODO: Is there a need for the mCashFlowType here?
toContents' (FinancialContract qty prod _mCashFlowType) =
[ CElem (Elem "Contract" [mkAttr "type" "Financial"] $ concat
[ toContents' prod
, [ mkElemC "Quantity" $ toText . show $ qty ]
]) () ]
instance XmlContent' a => XmlContent' (Product a) where
toContents' (Product market schedule) =
[ mkElemC "Product" $ toContents' market ++ toContents' schedule ]
instance XmlContent' Market where
toContents' (Market commodity unit location) =
[ mkElemC "Market" $ concat
[ toContents' commodity
, toContents' unit
, toContents' location
]
]
instance XmlContent' Commodity where
toContents' (Commodity commodity) = [mkElemC "Commodity" $ toText commodity ]
instance XmlContent' Unit where
toContents' (Unit unit) = [mkElemC "Unit" $ toText unit ]
instance XmlContent' Location where
toContents' (Location location) = [mkElemC "Location" $ toText location ]
instance XmlContent' DeliverySchedule where
toContents' (DeliverySchedule scheds) =
[ mkElemC "DeliverySchedule" $ concatMap toContents' scheds ]
instance XmlContent' DeliveryScheduleBlock where
toContents' (DeliveryScheduleBlock startDateTime endDateTime days shape) =
[ mkElemC "DeliveryScheduleElement" $ concat
[ [ mkElemC "StartDateTime" $ toText . showDate $ startDateTime ]
, [ mkElemC "EndDateTime" $ toText . showDate $ endDateTime ]
, map (mkElemC "DeliveryDay" . toText . showDay) days
, toContents' shape
]
]
instance XmlContent' DeliveryShape where
toContents' (DeliveryShape times) =
[ mkElemC "DeliveryShape" $ concatMap
(return . mkElemC "DeliveryShapeElement" . toText . showTime) times
]
showDate = formatTime defaultTimeLocale "%d/%m/%Y %H:%M:%S"
showDay = formatTime defaultTimeLocale "%d/%m/%Y"
showTime = formatTime defaultTimeLocale "%H:%M:%S" . timeToTimeOfDay
|
netrium/Netrium
|
examples/DemoContractAST.hs
|
mit
| 11,807 | 0 | 18 | 2,969 | 2,861 | 1,532 | 1,329 | 239 | 3 |
module Rebase.Data.Profunctor.Sieve
(
module Data.Profunctor.Sieve
)
where
import Data.Profunctor.Sieve
|
nikita-volkov/rebase
|
library/Rebase/Data/Profunctor/Sieve.hs
|
mit
| 107 | 0 | 5 | 12 | 23 | 16 | 7 | 4 | 0 |
import Math.NumberTheory.Primes.Sieve (primes)
import Math.NumberTheory.Primes.Testing (isPrime)
import Data.Time.Clock
-- Returns the number of decimal digits of a number
nbrDigits :: Integral a => a -> Int
nbrDigits = (+1) . floor . logBase 10 . fromIntegral
-- Returns the number obtained by concating two numbers one after the other
(#) :: Integral a => a -> a -> a
n1 # n2 = n1 * 10^nbrDigits n2 + n2
-- Check if a pair of numbers is valid
isValidPair :: Integer -> Integer -> Bool
isValidPair n1 n2 = isPrime (n1#n2) && isPrime (n2#n1)
type ExtendedSet = ([Integer], Integer)
-- The list of all valid sets.
validSets :: [[Integer]]
validSets = [] : [2] : [3] : merge (drop 2 set1) (drop 2 set2)
where
set1, set2 :: [[Integer]]
set1 = [[], [3]] ++ (generate set1 $ filter (\n -> rem n 3 == 1) (drop 2 primes))
set2 = [[], [3]] ++ (generate set2 $ filter (\n -> rem n 3 == 2) (drop 2 primes))
generate :: [[Integer]] -> [Integer] -> [[Integer]]
generate ([]:vsets) primes = [head primes] : generate vsets primes
generate (vset:vsets) (p:ps)
| vset == [p] = generate validSets ps
| isNewValidSet = (p:vset) : generate vsets (p:ps)
| otherwise = generate vsets (p:ps)
where
isNewValidSet = and $ map (isValidPair p) vset
merge :: [[Integer]] -> [[Integer]] -> [[Integer]]
merge lss@(ls:lss') rss@(rs:rss')
| head ls < head rs = ls : merge lss' rss
| otherwise = rs : merge lss rss'
euler :: Int -> ([Integer], Integer)
euler size = pickupBest (head candidates'') candidates''
where
candidates :: [[Integer]]
candidates = filter (\ls -> size == length ls) validSets
-- All the valid sets with the required number of elements
upBound :: Integer
upBound = sum $ head candidates
-- The sum of the first candidate is de facto an upper bound
candidates' :: [[Integer]]
candidates' = takeWhile (\set -> head set < upBound) candidates
-- Refine the candidates by considering only those whose greatest elements
-- (i.e. ) the first is smaller than the
candidates'' :: [ExtendedSet]
candidates'' = zip candidates' $ map sum candidates'
-- Zip each refined candidate with its sum
pickupBest :: ExtendedSet -> [ExtendedSet] -> ExtendedSet
pickupBest (cur, sum) [] = (cur, sum)
pickupBest (cur, sum) ((cur', sum'):candidates')
| sum'<sum = pickupBest (cur', sum') candidates'
| otherwise = pickupBest (cur , sum ) candidates'
main = do
t1 <- getCurrentTime
putStr $ "3: " ++ (show $ euler 3) ++ " => "
t2 <- getCurrentTime
print $ (diffUTCTime t2 t1)
t1 <- getCurrentTime
putStr $ "4: " ++ (show $ euler 4) ++ " => "
t2 <- getCurrentTime
print $ (diffUTCTime t2 t1)
t1 <- getCurrentTime
putStr $ "5: " ++ (show $ euler 5) ++ " => "
t2 <- getCurrentTime
print $ (diffUTCTime t2 t1)
t1 <- getCurrentTime
putStr $ "6: " ++ (show $ euler 6) ++ " => "
t2 <- getCurrentTime
print $ (diffUTCTime t2 t1)
{-
Y:\>euler_0060.exe
3: ([67,37,3],107) => 0.0490017s
4: ([673,109,7,3],792) => 0.281016s
5: ([8389,6733,5701,5197,13],26033) => 890.1959162s
-}
|
dpieroux/euler
|
0/0060b.hs
|
mit
| 3,201 | 23 | 16 | 779 | 914 | 526 | 388 | 58 | 2 |
{
module Parser (
parseExpr,
parseTokens,
) where
import Lexer
import Syntax
}
|
JohnKossa/Contractual-C
|
Parser.hs
|
mit
| 83 | 2 | 4 | 17 | 23 | 15 | 8 | -1 | -1 |
module AoC.Day11 (
search,
shortinput,
fullinput,
fullinput2
) where
-- import Debug.Trace
import Control.Monad
import qualified Data.List as L
import Data.Vector (Vector, (!), concat)
import Data.Hashable
import Data.HashSet (HashSet)
import qualified Data.HashSet as H
import qualified Data.Vector as V
import Text.Parsec.Char
import Text.Parsec.Text
import Text.Parsec.Combinator
data Floors = Floors Int (Vector String) (Vector Int) Int
deriving (Show, Eq)
instance Hashable Floors where
hashWithSalt i (Floors _ _ fs e) = i * sum (V.zipWith power fs (V.fromList [0..(length fs)]))
power :: Int -> Int -> Int
power d p = d * 10 ^ p
targetFloor = 3
shortinput = Floors 2 (V.fromList ["hydrogen", "lithium"]) (V.fromList [0, 1, 0, 2]) 0
fullinput = Floors 5
(V.fromList ["promethium", "cobalt", "curium", "ruthenium", "plutonium"])
(V.fromList [ 0, 0, 2, 1, 2, 1, 2, 1, 2, 1 ])
0
fullinput2 = Floors 7
(V.fromList ["promethium", "cobalt", "curium", "ruthenium", "plutonium", "elerium", "dilithium"])
(V.fromList [ 0, 0, 2, 1, 2, 1, 2, 1, 2, 1 , 1, 1, 1, 1])
0
-- For the nth component type produce the vector index containing the floor of the microchip
offsetToChipIdx :: Int -> Int
offsetToChipIdx n = n * 2
-- For the nth component type produce the vector index containing the floor of the generator
offsetToGeneratorIdx :: Int -> Int
offsetToGeneratorIdx = (+1) . offsetToChipIdx
isChipSafe :: Floors -> Int -> Bool
isChipSafe (Floors len _ fs e) i = fs ! chip == fs ! generator || notElem (fs ! chip) otherGeneratorFloors
where
otherGeneratorFloors = fmap (fs !) otherGenerators
chip = offsetToChipIdx i
generator = offsetToGeneratorIdx i
otherGenerators = fmap offsetToGeneratorIdx (L.delete i [0..(len - 1)])
isValid :: Floors -> Bool
isValid conf = all (isChipSafe conf) [0..(len - 1)]
where
(Floors len _ _ _) = conf
isGoal :: Floors -> Bool
isGoal (Floors _ _ fs e) = e == targetFloor && V.all (targetFloor==) fs
add :: Vector Int -> Vector Int -> Vector Int
add = V.zipWith (+)
down :: Int
down = -1
up :: Int
up = 1
data Direction = Up | Down
data Move = Single Int | Pair Int Int deriving (Eq, Show)
data Update = Update (Vector Int) Int deriving (Eq, Show)
diff :: Direction -> Int
diff Up = up
diff Down = down
delta :: Int -> Direction -> Move -> Update
delta n d (Pair i1 i2) =
Update (V.update (V.replicate n 0) (V.fromList [(i1, dir), (i2, dir)])) dir
where dir = diff d
delta n d (Single i) =
Update (V.update (V.replicate n 0) (V.fromList [(i, dir)])) dir
where dir = diff d
apply :: Floors -> Update -> Floors
apply (Floors len names fs e) (Update v i) = Floors len names (add v fs) (i + e)
successorsTo :: Floors -> [Floors]
successorsTo current = filter isValid (apply current <$> (posUpdates ++ negUpdates))
where
(Floors len _ fs e) = current
upper = delta (len * 2) Up
downer = delta (len * 2) Down
moveable = filter onTargetFloor [0..(len * 2) - 1]
moveablePairs = [(x, y) | x <- moveable, y <- moveable, x < y]
posUpdates = if e < 3 then (upper . Single <$> moveable) ++ (upper . uncurry Pair <$> moveablePairs) else []
negUpdates = if e > 0 then (downer . Single <$> moveable) ++ (downer . uncurry Pair <$> moveablePairs) else []
onTargetFloor i = fs ! i == e
search :: Floors -> (Int, Floors)
search = run H.empty 0 . H.singleton
where
run :: HashSet Floors -> Int -> HashSet Floors -> (Int, Floors)
run seen dist next =
let
fringe = H.difference next seen
found = H.filter isGoal fringe
in
if null found
then run (H.union fringe seen) (dist + 1) (H.fromList (H.toList fringe >>= successorsTo) )
else (dist, head (H.toList found))
|
rzeigler/advent-of-code-2016
|
Lib/AoC/Day11.hs
|
mit
| 3,931 | 0 | 16 | 1,000 | 1,570 | 855 | 715 | 87 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module AutoFilterTests
( tests
) where
import Test.SmallCheck.Series
import Test.Tasty (testGroup, TestTree)
import Test.Tasty.SmallCheck (testProperty)
import Codec.Xlsx
import Codec.Xlsx.Parser.Internal
import Codec.Xlsx.Writer.Internal
import Common
import Test.SmallCheck.Series.Instances ()
tests :: TestTree
tests =
testGroup
"Types.AutFilter tests"
[ testProperty "fromCursor . toElement == id" $ \(autoFilter :: AutoFilter) ->
[autoFilter] == fromCursor (cursorFromElement $ toElement (n_ "autoFilter") autoFilter)
]
|
qrilka/xlsx
|
test/AutoFilterTests.hs
|
mit
| 628 | 0 | 15 | 95 | 138 | 83 | 55 | 18 | 1 |
-- hmonads.hs
import qualified Control.Monad
import System.Environment (getArgs)
main :: IO ()
-- main = getLine >>= putStrLn
-- main = putStrLn =<< getLine
-- main = putStrLn Control.Monad.>=> putStrLn
main = putStrLn "Hello" >> putStrLn "How are you"
|
gitrookie/functionalcode
|
code/Haskell/snippets/hmonads.hs
|
mit
| 263 | 0 | 6 | 49 | 46 | 26 | 20 | 4 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedLists #-}
--{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Irg.Lab2.Geometry.Matrix where
import qualified Irg.Lab2.Geometry.Vector as V
import qualified Data.Vector as V2
type Number = Float
type Vector = V.Vector Number
type Matrix = V.Vector (V.Vector Number)
get :: Int -> Int -> Matrix -> Number
get i j mat = V.get j (V.get i mat)
set :: Int -> Int -> Number -> Matrix -> Matrix
set i j val mat = V.set i (V.set j val (V.get i mat)) mat
getRowsCount :: Matrix -> Int
getRowsCount = length
getColsCount :: Matrix -> Int
getColsCount = length . V.get 0
transpose :: Matrix -> Matrix
transpose mat = V2.fromList $ map (`getColumn` mat) [0..getColsCount mat - 1]
add :: Matrix -> Matrix -> Matrix
add = V2.zipWith (V2.zipWith (+))
sub :: Matrix -> Matrix -> Matrix
sub = V2.zipWith (V2.zipWith (-))
mult :: Number -> Matrix -> Matrix
mult num = fmap (fmap (* num))
multiply :: Matrix -> Matrix -> Matrix
multiply mat1 mat2 = fmap (\x -> fmap (V.scalarProduct x) (transpose mat2)) mat1
determinant :: Matrix -> Number
determinant mat
| getRowsCount mat < 2 = get 0 0 mat
| otherwise = foldl (\s (x, i) -> s + (if odd i then 1 else -1) * x*determinant (subMatrix 0 i mat)) 0 $ zip (head $ toList mat) [0..]
subMatrix :: Int -> Int -> Matrix -> Matrix
subMatrix row col mat = V.dropElem row $ fmap (V.dropElem col) mat
invert :: Matrix -> Matrix
invert mat
| getRowsCount mat == 3 = mult (-1/determinant mat) $ fromList [[d 1 1,d 1 2,d 1 3],[d 2 1,d 2 2,d 2 3],[d 3 1,d 3 2,d 3 3]]
| otherwise = error "Unsupported matrix"
where
d r c = determinant $ fromList [[a (r+1) (c+1), a (r+1) (c+2)], [a (r+2) (c+1), a (r+2) (c+2)]]
a r c = get ((r-1) `mod` 3) ((c-1) `mod` 3) mat
fromList :: [[Number]] -> Matrix
fromList = V.fromList . map V.fromList
toList :: Matrix -> [[Number]]
toList = V.toList . fmap V.toList
toVector :: Matrix -> Vector
toVector mat
| getRowsCount mat == 1 = getRow 0 mat
| getColsCount mat == 1 = getColumn 0 mat
| otherwise = error "Invalid matrix"
fromHorizontal :: Vector -> Matrix
fromHorizontal = fmap return
getRow :: Int -> Matrix -> Vector
getRow = V.get
getColumn :: Int -> Matrix -> Vector
getColumn i = fmap (V2.! i)
|
DominikDitoIvosevic/Uni
|
IRG/src/Irg/Lab2/Geometry/Matrix.hs
|
mit
| 2,306 | 0 | 15 | 470 | 1,101 | 577 | 524 | 54 | 2 |
module NLP.OPUS.Raw (parseRaw) where
import NLP.OPUS.Util (mkCorpus)
import NLP.OPUS.Types
import Data.Text (Text, pack)
import Data.List (transpose)
import Text.XML.HXT.Expat
import Text.XML.HXT.Core
-- | Raw OPUS format parser
-- Input is a list of pairs: language name - raw file path.
parseRaw :: [(Lang, FilePath)] -> IO Corpus
parseRaw langFiles =
fmap (mkCorpus . consistentCheck . rawConverter) $
mapM runParser files
where
files = map snd langFiles
langs = map fst langFiles
rawConverter = map (zip langs) . transpose
runParser f = runX $
readDocument [ withValidate no
, withExpat yes
] f >>> rawParser
consistentCheck c
| all ((== length (head c)) . length) c = c
| otherwise = error "unaligned source"
rawParser :: ArrowXml a => a XmlTree Text
rawParser = getChildren >>>
hasName "DOC" //> hasName "s" >>> getChildren >>> getText >>^ pack
|
akru/nlp-opus
|
src/NLP/OPUS/Raw.hs
|
mit
| 963 | 0 | 16 | 247 | 292 | 155 | 137 | 24 | 1 |
module Gui where
import Data.Map
import Graphics.UI.SDL
import Graphics.UI.SDL.TTF.Render
import Attacker
import Bunker
import Dataset
import Utils
-- Display the main screen elements
displayMainScreen appData = do
-- Display the background
applySurface 0 0 (getBackgroundImg appData) (getScreen appData)
-- Display the main title
txt <- renderTextSolid (getFontTitle appData) "Space Intruders" (getFontColor appData)
applySurface 150 100 txt (getScreen appData)
-- Display points table caption
txt <- renderTextSolid (getFontMenu appData) "*SCORE ADVANCE TABLE*" (getFontColor appData)
applySurface 325 250 txt (getScreen appData)
-- Display the spaceship points
txt <- renderTextSolid (getFontMenu appData) "= ? MYSTERY" (getFontColor appData)
applySurface 455 330 txt (getScreen appData)
-- Display the octopus points
txt <- renderTextSolid (getFontMenu appData) "= 30 POINTS" (getFontColor appData)
applySurface 455 390 txt (getScreen appData)
-- Display the crab points
txt <- renderTextSolid (getFontMenu appData) "= 20 POINTS" (getFontColor appData)
applySurface 455 450 txt (getScreen appData)
-- Display the squid points
txt <- renderTextSolid (getFontMenu appData) "= 10 POINTS" (getFontColor appData)
applySurface 455 510 txt (getScreen appData)
-- Display points table icons
applySurface 390 330 (getSpaceshipImg appData) (getScreen appData)
applySurface 390 390 (getOctopusImg appData) (getScreen appData)
applySurface 390 450 (getCrabImg appData) (getScreen appData)
applySurface 395 510 (getSquidImg appData) (getScreen appData)
-- Display the instructions
txt <- renderTextSolid (getFontMenu appData) "PRESS ENTER TO PLAY" (getFontColor appData)
applySurface 345 700 txt (getScreen appData)
-- Refresh the screen to show all elements
Graphics.UI.SDL.flip (getScreen appData)
-- Display in game elements such as the statistics, attackers, the player or the baseline
displayInGameScreen gameData appData = do
-- Display the background
applySurface 0 0 (getBackgroundImg appData) (getScreen appData)
-- Display the status
-- Level
txt <- renderTextSolid (getFontStatus appData) ("LEVEL: " ++ (show (getLevel gameData))) (getFontColor appData)
applySurface 20 20 txt (getScreen appData)
-- Score
txt <- renderTextSolid (getFontStatus appData) ("SCORE: " ++ (show (getScore gameData))) (getFontColor appData)
applySurface (512-((surfaceGetWidth txt) `quot` 2)) 20 txt (getScreen appData)
-- Life
txt <- renderTextSolid (getFontStatus appData) ("LIFE: " ++ (show (getPlayerLife gameData))) (getFontColor appData)
applySurface (1004-surfaceGetWidth txt) 20 txt (getScreen appData)
-- Draw all bunkers
displayBunker (getBunkerTypeList gameData) (getBunkerStateList gameData) appData (getBunkerPositionList gameData)
-- Display attackers
displayAttacker (getAttackerTypeList gameData) appData (getAliveAttackerPositionList (getAttackerAliveList gameData) (getAttackerPositionList gameData))
-- Display an eventual spaceship
applySurface (fst (getSpaceshipPosition gameData)) (snd (getSpaceshipPosition gameData)) (getSpaceshipImg appData) (getScreen appData)
-- Draw the bullets
displayBullet (getBulletList gameData) appData
-- Draw the player
applySurface (fst (getPlayerPosition gameData)) (755-(surfaceGetHeight (getPlayerImg appData))) (getPlayerImg appData) (getScreen appData)
-- Draw the baseline
applySurface 0 765 (getBaselineImg appData) (getScreen appData)
-- Refresh the screen to show all elements
Graphics.UI.SDL.flip (getScreen appData)
-- Display the game over screen elements
displayGameOverScreen gameData appData = do
-- Display the background
applySurface 0 0 (getBackgroundImg appData) (getScreen appData)
-- Display the main title
txt <- renderTextSolid (getFontTitle appData) "GAME OVER" (getFontColor appData)
applySurface (512-((surfaceGetWidth txt) `quot` 2)) 100 txt (getScreen appData)
-- Display the final score
txt <- renderTextSolid (getFontMenu appData) "*FINAL SCORE*" (getFontColor appData)
applySurface (512-((surfaceGetWidth txt) `quot` 2)) 250 txt (getScreen appData)
-- Display the score
txt <- renderTextSolid (getFontMenu appData) ((show (getScore gameData))++" POINTS") (getFontColor appData)
applySurface (512-((surfaceGetWidth txt) `quot` 2)) 330 txt (getScreen appData)
-- Display the level
txt <- renderTextSolid (getFontMenu appData) ("LEVEL: "++(show (getLevel gameData))) (getFontColor appData)
applySurface (512-((surfaceGetWidth txt) `quot` 2)) 390 txt (getScreen appData)
-- Display the instructions
txt <- renderTextSolid (getFontMenu appData) "PRESS ENTER TO PLAY AGAIN" (getFontColor appData)
applySurface (512-((surfaceGetWidth txt) `quot` 2)) 700 txt (getScreen appData)
-- Refresh the screen to show all elements
Graphics.UI.SDL.flip (getScreen appData)
-- Display all attackers
displayAttacker attackerTypeList appData [] = putStr ""
displayAttacker attackerTypeList appData (x:xs) = do
-- Display by type
if attackerTypeEq (fromList attackerTypeList ! (fst x)) Crab
then applySurface (fst (snd x)) (snd (snd x)) (getCrabImg appData) (getScreen appData)
else if attackerTypeEq (fromList attackerTypeList ! (fst x)) Octopus
then applySurface (fst (snd x)) (snd (snd x)) (getOctopusImg appData) (getScreen appData)
else if attackerTypeEq (fromList attackerTypeList ! (fst x)) Squid
then applySurface (fst (snd x)) (snd (snd x)) (getSquidImg appData) (getScreen appData)
else applySurface (fst (snd x)) (snd (snd x)) (getSpaceshipImg appData) (getScreen appData)
-- Loop through all remaining attackers to display
displayAttacker attackerTypeList appData xs
-- Display all bunkers
displayBunker bunkerTypeList bunkerStateList appData [] = putStr ""
displayBunker bunkerTypeList bunkerStateList appData (x:xs) = do
-- Display by type and by state
-- A clever way would be better with pointers or an eval operator to reduce the amount of code
if bunkerTypeEq (fromList bunkerTypeList ! (fst x)) TopLeft
then if bunkerStateEq (fromList bunkerStateList ! (fst x)) Initial
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerTopLeft0Img appData) (getScreen appData)
else if bunkerStateEq (fromList bunkerStateList ! (fst x)) Minor
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerTopLeft1Img appData) (getScreen appData)
else if bunkerStateEq (fromList bunkerStateList ! (fst x)) Partial
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerTopLeft2Img appData) (getScreen appData)
else if bunkerStateEq (fromList bunkerStateList ! (fst x)) Major
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerTopLeft3Img appData) (getScreen appData)
else applySurface (fst (snd x)) (snd (snd x)) (getBunkerDestroyedImg appData) (getScreen appData)
else if bunkerTypeEq (fromList bunkerTypeList ! (fst x)) TopRight
then if bunkerStateEq (fromList bunkerStateList ! (fst x)) Initial
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerTopRight0Img appData) (getScreen appData)
else if bunkerStateEq (fromList bunkerStateList ! (fst x)) Minor
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerTopRight1Img appData) (getScreen appData)
else if bunkerStateEq (fromList bunkerStateList ! (fst x)) Partial
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerTopRight2Img appData) (getScreen appData)
else if bunkerStateEq (fromList bunkerStateList ! (fst x)) Major
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerTopRight3Img appData) (getScreen appData)
else applySurface (fst (snd x)) (snd (snd x)) (getBunkerDestroyedImg appData) (getScreen appData)
else if bunkerTypeEq (fromList bunkerTypeList ! (fst x)) CenterLeft
then if bunkerStateEq (fromList bunkerStateList ! (fst x)) Initial
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerCenterLeft0Img appData) (getScreen appData)
else if bunkerStateEq (fromList bunkerStateList ! (fst x)) Minor
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerCenterLeft1Img appData) (getScreen appData)
else if bunkerStateEq (fromList bunkerStateList ! (fst x)) Partial
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerCenterLeft2Img appData) (getScreen appData)
else if bunkerStateEq (fromList bunkerStateList ! (fst x)) Major
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerCenterLeft3Img appData) (getScreen appData)
else applySurface (fst (snd x)) (snd (snd x)) (getBunkerDestroyedImg appData) (getScreen appData)
else if bunkerTypeEq (fromList bunkerTypeList ! (fst x)) CenterRight
then if bunkerStateEq (fromList bunkerStateList ! (fst x)) Initial
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerCenterRight0Img appData) (getScreen appData)
else if bunkerStateEq (fromList bunkerStateList ! (fst x)) Minor
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerCenterRight1Img appData) (getScreen appData)
else if bunkerStateEq (fromList bunkerStateList ! (fst x)) Partial
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerCenterRight2Img appData) (getScreen appData)
else if bunkerStateEq (fromList bunkerStateList ! (fst x)) Major
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerCenterRight3Img appData) (getScreen appData)
else applySurface (fst (snd x)) (snd (snd x)) (getBunkerDestroyedImg appData) (getScreen appData)
else if bunkerTypeEq (fromList bunkerTypeList ! (fst x)) Plain
then if bunkerStateEq (fromList bunkerStateList ! (fst x)) Initial
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerPlain0Img appData) (getScreen appData)
else if bunkerStateEq (fromList bunkerStateList ! (fst x)) Minor
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerPlain1Img appData) (getScreen appData)
else if bunkerStateEq (fromList bunkerStateList ! (fst x)) Partial
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerPlain2Img appData) (getScreen appData)
else if bunkerStateEq (fromList bunkerStateList ! (fst x)) Major
then applySurface (fst (snd x)) (snd (snd x)) (getBunkerPlain3Img appData) (getScreen appData)
else applySurface (fst (snd x)) (snd (snd x)) (getBunkerDestroyedImg appData) (getScreen appData)
else applySurface (fst (snd x)) (snd (snd x)) (getBunkerDestroyedImg appData) (getScreen appData)
-- Loop through all remaining bunker parts to display
displayBunker bunkerTypeList bunkerStateList appData xs
-- Display all bullets
displayBullet [] appData = putStr ""
displayBullet (x:xs) appData = do
applySurface (fst (fst x)) (snd (fst x)) (getBulletImg appData) (getScreen appData)
-- Loop through all remaining bullets to display
displayBullet xs appData
|
joeyinbox/space-invaders-haskell
|
src/Gui.hs
|
gpl-2.0
| 11,551 | 0 | 20 | 2,494 | 3,778 | 1,882 | 1,896 | 125 | 26 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RecordWildCards #-}
-------------------------------------------------------------------------------
-- Module : Domain.Concrete.State
-- Copyright : (c) 2015-17 Marcelo Sousa
--
-- The domain for the concrete semantics.
-------------------------------------------------------------------------------
module Domain.Concrete.State where
import Data.Hashable
import Data.IntMap (IntMap)
import Data.List
import Data.Map (Map)
import Data.Set (Set)
import Domain.Concrete.Value
import Domain.MemAddr
import Domain.Util
import Language.SimpleC.AST
import Language.SimpleC.Util
import Model.GCS
import Util.Generic hiding (safeLookup)
import qualified Data.IntMap as IM
import qualified Data.Map as M
import qualified Data.Set as S
-- | Concrete Memory Cell
type ConMCell = MemCell SymId () ConValue
instance Show ConMCell where
show (MCell ty val) = show val
-- | Concrete Heap
type ConHeap = Map MemAddrBase ConMCell
-- | Concrete state
data ConState =
ConState
{
cs_heap :: ConHeap
, cs_tstates :: ThConStates
, cs_numth :: Int
, cs_bot :: Bool
}
deriving (Show,Eq,Ord)
-- | A thread state is a control and local data
type ThConStates = Map TId ThConState
type Locals = Map SymId ConValue
data ThState =
ThState
{
pos :: Pos
, id :: SymId
, locals :: Locals
}
deriving (Show,Eq,Ord)
bot_th_state :: Pos -> SymId -> ThState
bot_th_state pos id = ThState pos id M.empty
bot_sigma :: Sigma
bot_sigma = Sigma M.empty M.empty 1 False
bot_state :: CState
bot_state = CState S.empty
-- | Initial state which is not bottom
empty_state :: CState
empty_state = CState $ S.singleton bot_sigma
-- | Set the position in the cfg of a thread
set_pos :: CState -> TId -> (Pos,SymId) -> CState
set_pos (CState st) tid npos = CState $ S.map (\s -> set_pos_s s tid npos) st
set_pos_s :: Sigma -> TId -> (Pos,SymId) -> Sigma
set_pos_s st@Sigma{..} tid (npos,sid) =
let th_st' =
case M.lookup tid th_states of
-- if nothing, it could be the beginning
Nothing -> ThState npos sid M.empty
Just t@ThState{..} ->
let pos' = npos
in t { pos = pos' }
th_states' = M.insert tid th_st' th_states
in st { th_states = th_states' }
inc_num_th :: Sigma -> (Int,Sigma)
inc_num_th s@Sigma{..} =
let n = num_th + 1
in (num_th,s { num_th = n })
-- | The collecting domain
newtype CState = CState { sts :: Set Sigma }
deriving (Show,Eq)
join_cstate :: CState -> CState -> CState
join_cstate (CState s1) (CState s2) = CState (s1 `S.union` s2)
-- | Checks for state subsumption
-- 1. Check bottoms
-- 2. Check if the number of threads
-- is greater or equal
-- 3. Check the heap
-- 4. Check the thread states
subsumes_concrete :: Sigma -> Sigma -> Bool
subsumes_concrete st1 st2 =
case check_bottoms (is_bot st1) (is_bot st2) of
Just r -> r
Nothing ->
if (num_th st1) < (num_th st2)
then False
else
let sts1 = th_states st1
hp1 = heap st1
in if M.foldrWithKey' (\tid th b -> check_threads tid th sts1 && b) True (th_states st2)
then M.foldrWithKey' (\mid mcell b -> check_heap mid mcell hp1 && b) True (heap st2)
else False
where
check_bottoms b1 b2 =
if b1
then Just b2
else if b2
then Just True
else Nothing
check_threads tid th2 sts1 =
case M.lookup tid sts1 of
Nothing -> False
Just th1 ->
let lcs1 = locals th1
in if pos th1 == pos th2
then M.foldrWithKey' (\sym vals b -> check_locals sym vals lcs1 && b) True (locals th2)
else False
check_locals :: SymId -> ConValue -> Map SymId ConValue -> Bool
check_locals sym val2 lcs1 =
case M.lookup sym lcs1 of
Nothing -> False
Just val1 -> val2 <= val1
check_heap mid cell2 hp1 =
case M.lookup mid hp1 of
Nothing -> False
Just cell1 ->
let r = ty cell1 == ty cell2
val1 = val cell1
val2 = val cell2
in r && val2 <= val1
instance Projection Sigma where
controlPart st@Sigma{..} = M.map pos th_states
subsumes a b = subsumes_concrete a b
isBottom = is_bot
-- toThSym st@Sigma{..} tid =
-- case M.lookup tid th_states of
-- Nothing -> error "toThSym sigma failed"
-- Just s@ThState{..} -> id
instance Projection CState where
controlPart (CState a) =
if S.null a
then error "control part of bottom state"
else let s = S.map controlPart a
in if S.size s > 1
then error "more than one control vector in the set"
else S.elemAt 0 s
subsumes (CState a) (CState b) = S.isSubsetOf b a
isBottom (CState a) = S.null a
-- toThSym (CState a) tid = toThSym (S.elemAt 0 a) tid
-- | API for modifying the state
-- | insert_heap: inserts an element to the heap
insert_heap :: Sigma -> SymId -> STy -> ConValues -> CState
insert_heap st sym ty vals =
if null vals
then error "insert_heap: no values"
else let sts = map (insert_heap_sigma st sym ty) vals
in CState $ S.fromList sts
insert_heap_sigma :: Sigma -> SymId -> STy -> ConValue -> Sigma
insert_heap_sigma st@Sigma{..} sym ty val =
let cell = MCell ty val
heap' = M.insert sym cell heap
in st { heap = heap' }
modify_heap :: Sigma -> SymId -> ConValue -> Sigma
modify_heap st@Sigma{..} id val =
let heap' = M.update (update_conmcell val) id heap
in st {heap = heap'}
update_conmcell :: ConValue -> ConMCell -> Maybe ConMCell
update_conmcell nval c@MCell{..} = Just $ c { val = nval }
-- | insert_local: inserts an element to local state
insert_local :: Sigma -> TId -> SymId -> ConValues -> CState
insert_local st tid sym vals =
if null vals
then error "insert_local: no values"
else let sts = map (insert_local_sigma st tid sym) vals
in CState $ S.fromList sts
insert_local_sigma :: Sigma -> TId -> SymId -> ConValue -> Sigma
insert_local_sigma st@Sigma{..} tid sym val =
case M.lookup tid th_states of
Nothing -> error "insert_local_sigma: tid not found in th_states"
Just s@ThState{..} ->
let locals' = M.insert sym val locals
s' = s { locals = locals' }
th_states' = M.insert tid s' th_states
in st { th_states = th_states' }
-- | modify the state: receives a MemAddrs and a
-- ConValue and assigns the ConValue to the MemAddrs
modify_state :: Scope -> Sigma -> MemAddrs -> ConValues -> CState
modify_state scope st addrs vals =
case addrs of
MemAddrTop -> error "modify_state: top addrs, need to traverse everything"
MemAddrs l -> case l of
[] -> error "modify_state: list of addresses is empty"
[a@MemAddr{..}] ->
if null vals
then error "modify_state: null vals"
else let sts = map (modify_local_sigma scope st base) vals
in CState $ S.fromList sts
_ -> error "modify_state: list of addresses contains more than one"
modify_local_sigma :: Scope -> Sigma -> SymId -> ConValue -> Sigma
modify_local_sigma scope st@Sigma{..} sym val =
-- First search in the heap
case M.lookup sym heap of
Nothing ->
-- If not in the heap, search in the thread
case scope of
Global -> error "modify_state: id is not the heap and scope is global"
Local i -> insert_local_sigma st i sym val
Just _ -> modify_heap st sym val
insert_thread :: Sigma -> SymId -> Pos -> (TId,CState)
insert_thread s sym pos =
let (tid,s'@Sigma{..}) = inc_num_th s
th = bot_th_state pos sym
th_states' = M.insert tid th th_states
ns = s' { th_states = th_states' }
in (tid,CState $ S.singleton ns)
checkBoolVals :: ConValues -> (Bool,Bool)
checkBoolVals vals = (any isTrue vals, any isFalse vals)
isTrue :: ConValue -> Bool
isTrue val = case val of
ConVal v -> case v of
VBool b -> b
_ -> False
_ -> False
isFalse :: ConValue -> Bool
isFalse val = case val of
ConVal v -> case v of
VBool b -> not b
_ -> False
_ -> False
-- Arithmetic operations in ConValue
add_conval, sub_conval, mult_conval :: ConValue -> ConValue -> ConValue
add_conval c1 c2 = case (c1,c2) of
(ConVal v1,ConVal v2) -> ConVal $ add_value v1 v2
_ -> error "add_conval: not ConVal"
sub_conval c1 c2 = case (c1,c2) of
(ConVal v1,ConVal v2) -> ConVal $ sub_value v1 v2
_ -> error "sub_ConVal: not ConVal"
mult_conval c1 c2 = case (c1,c2) of
(ConVal v1,ConVal v2) -> ConVal $ mult_value v1 v2
_ -> error "mult_ConVal: not ConVal"
divs_conval c1 c2 = case (c1,c2) of
(ConVal v1,ConVal v2) -> ConVal $ div_value v1 v2
_ -> error "div_ConVal: not ConVal"
rmdr_conval c1 c2 = case (c1,c2) of
(ConVal v1,ConVal v2) -> ConVal $ rmd_value v1 v2
_ -> error "rmdr_ConVal: not ConVal"
minus_conval c1 = case c1 of
ConVal v1 -> ConVal $ minus_value v1
_ -> error "minus_conval: not ConVal"
-- Boolean operations in ConValue
neg_conval c1 = case c1 of
ConVal v -> ConVal $ neg_value v
le_conval c1 c2 = case (c1,c2) of
(ConVal v1,ConVal v2) -> ConVal $ le_value v1 v2
_ -> error "le_conval: not conVal"
gr_conval c1 c2 = case (c1,c2) of
(ConVal v1,ConVal v2) -> ConVal $ gr_value v1 v2
_ -> error "gr_conval: not conVal"
leq_conval c1 c2 = case (c1,c2) of
(ConVal v1,ConVal v2) -> ConVal $ lor_value (gr_value v1 v2) (eq_value v1 v2)
_ -> error "leq_conval: not conVal"
geq_conval c1 c2 = case (c1,c2) of
(ConVal v1,ConVal v2) -> ConVal $ lor_value (gr_value v1 v2) (eq_value v1 v2)
_ -> error "geq_conval: not conVal"
eq_conval c1 c2 = case (c1,c2) of
(ConVal v1,ConVal v2) -> ConVal $ eq_value v1 v2
_ -> error "eq_conval: not conVal"
neq_conval c1 c2 = case (c1,c2) of
(ConVal v1,ConVal v2) -> ConVal $ neg_value $ eq_value v1 v2
_ -> error "neq_conval: not conVal"
land_conval c1 c2 = case (c1,c2) of
(ConVal v1,ConVal v2) -> ConVal $ land_value v1 v2
_ -> error "neq_conval: not conVal"
lor_conval c1 c2 = case (c1,c2) of
(ConVal v1,ConVal v2) -> ConVal $ lor_value v1 v2
_ -> error "neq_conval: not conVal"
instance Hashable CState where
hash (CState sts) = hash $ S.toList sts
hashWithSalt s (CState sts) = hashWithSalt s $ S.toList sts
instance Hashable Sigma where
hash s@Sigma{..} = hash (heap,th_states,num_th,is_bot)
hashWithSalt s st@Sigma{..} = hashWithSalt s (heap,th_states,num_th,is_bot)
instance Hashable ConHeap where
hash = hash . M.toList
hashWithSalt s h = hashWithSalt s $ M.toList h
instance Hashable ThStates where
hash = hash . M.toList
hashWithSalt s th = hashWithSalt s $ M.toList th
instance Hashable ThState where
hash th@ThState{..} = hash (pos,id,locals)
hashWithSalt s th@ThState{..} = hashWithSalt s (pos,id,locals)
instance Hashable Locals where
hash = hash . M.toList
hashWithSalt s h = hashWithSalt s $ M.toList h
instance Hashable ConMCell where
hash m@MCell{..} = hash val
hashWithSalt s m@MCell{..} = hashWithSalt s val
instance Hashable ConValue where
hash v = case v of
ConVal val -> hash val
ConMemAddr mem -> hash mem
_ -> error "hash not supported"
hashWithSalt s v = case v of
ConVal val -> hashWithSalt s val
ConMemAddr mem -> hashWithSalt s mem
_ -> error "hash not supported"
|
marcelosousa/poet
|
src/Domain/Powerset/State.hs
|
gpl-2.0
| 11,344 | 0 | 18 | 2,732 | 3,779 | 1,945 | 1,834 | 268 | 10 |
{-
Copyright 2012 liquid_amber
This file is part of PicasaDB.
PicasaDB is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PicasaDB is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PicasaDB. If not, see <http://www.gnu.org/licenses/>.
-}
import Control.Monad ((>=>), (<=<))
import Data.PicasaDB
import Data.PicasaDB.Reader
import qualified Data.ByteString.Lazy as BL
import qualified Data.Binary.Get as G
import System.Environment (getArgs)
readPMPDBFile :: FilePath -> IO PMPDB
readPMPDBFile = return . G.runGet getPMPDB <=< BL.readFile
main = do
args <- getArgs
mapM_ (readPMPDBFile >=> listPMPDB) args
|
liquidamber/PicasaDB
|
pmpDB.hs
|
gpl-3.0
| 1,041 | 0 | 9 | 166 | 117 | 69 | 48 | -1 | -1 |
module Config where
import Data.Configurator(load,require,Worth(Required))
import Data.Configurator.Types as C
import Data.Text(unpack,pack)
import Data.Maybe(fromJust,fromMaybe)
import Network.URI
import Transforms
import Types as T
configure::FilePath -> IO T.Config
configure path = do
cfg <- load[Required path]
cache <- require cfg "cache" :: IO FilePath
output <-require cfg "output" :: IO FilePath
feedNames <- require cfg "feeds" :: IO [String]
feeds <- mapM (getFeed cfg) feedNames
return T.Config{feeds = feeds, cache = cache, rssStore = output}
getFeed::C.Config -> String -> IO Feed
getFeed cfg feed = do
url <- require cfg (pack (feed++".url")) :: IO String
let uri = fromMaybe (error (feed ++ ".url is not a valid url :-("))
(parseURI url)
transformNames <- require cfg (pack $ feed++".transforms") :: IO [String]
transforms <- mapM (\t -> case t of
"fetchfull" -> return fetchfull
"regexsnip" -> do
start_re <- require cfg (pack $ feed++".regexsnip.start_re") :: IO String
end_re <- require cfg (pack $ feed++".regexsnip.end_re") :: IO String
return (regexsnip start_re end_re)
_ -> error "Unknown transform?"
) transformNames
--use a maybe, get rid of warning
return Feed{name = feed, items = [], feedurl = url, transforms = transforms}
instance C.Configured [String] where
convert (C.List vals) = Just $ map (fromJust . fmap unpack . convert) vals
convert _ = Nothing
|
bcorrigan/PipeFeed
|
src/Config.hs
|
gpl-3.0
| 2,202 | 0 | 20 | 1,023 | 551 | 284 | 267 | 34 | 3 |
{-# LANGUAGE CPP #-}
-- |
-- Module : Packages
-- Copyright : (C) 2017-2019 Jens Petersen
--
-- Maintainer : Jens Petersen <[email protected]>
-- Stability : alpha
--
-- Explanation: Cloning and pulling package git repos
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
#if (defined(MIN_VERSION_base) && MIN_VERSION_base(4,13,0))
#else
import Control.Applicative (optional, some, (<|>)
#if (defined(MIN_VERSION_base) && MIN_VERSION_base(4,8,0))
#else
, pure, (<$>), (<*>)
#endif
)
#endif
import Control.Monad (filterM, unless, when, (>=>))
import Control.Monad.Extra (mapMaybeM, unlessM, whenJustM)
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as BL
import Data.Char (isDigit)
import Data.Maybe
import Data.List
import Data.List.Split (splitOn)
import Network.HTTP.Simple
import Network.HTTP.Types
import System.Directory (doesDirectoryExist, doesFileExist,
getCurrentDirectory, getHomeDirectory,
#if (defined(MIN_VERSION_directory) && MIN_VERSION_directory(1,2,5))
listDirectory,
#else
getDirectoryContents,
#endif
setCurrentDirectory)
import System.FilePath ((</>), (<.>), takeFileName)
import System.IO (BufferMode(..), hIsTerminalDevice, hSetBuffering, stdout)
--import System.Posix.Env (getEnv)
import Text.CSV (parseCSV)
import Distribution.Fedora (Dist(..), distBranch, distRepo, distUpdates,
getLatestFedoraDist, getRawhideDist)
--import Distribution.Fedora.Branch (Branch(..))
import SimpleCmd ((+-+), cmd, cmd_, cmdLines, cmdMaybe, cmdQuiet, {-cmdSilent,-}
grep_, removePrefix, removeSuffix, shell_, warning)
import SimpleCmd.Git
import SimpleCmdArgs
import Build (build, readBuildCmd)
import Dist (distArg, distRemote, hackageRelease, ltsStream)
import Koji (rpkg)
import Paths_fedora_haskell_tools (version)
import RPM (buildRequires, haskellSrcPkgs, Package, pkgDir,
repoquery, rpmspec)
import Utils (checkPkgsGit, withCurrentDirectory)
#if (defined(MIN_VERSION_directory) && MIN_VERSION_directory(1,2,5))
#else
listDirectory :: FilePath -> IO [FilePath]
listDirectory path =
filter f <$> getDirectoryContents path
where f filename = filename /= "." && filename /= ".."
#endif
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
cwd <- getCurrentDirectory
branched <- getLatestFedoraDist
simpleCmdArgs (Just version) "Fedora Haskell packages tool"
"Fedora packages maintenance tool" $
subcommands . sort $
[ Subcommand "checkout" "fedpkg switch-branch" $
repoAction_ branched True False (return ()) <$> distArg <*> pkgArgs
, Subcommand "clone" "clone repos" $
clone branched <$> branching <*> distArg <*> pkgArgs
, Subcommand "clone-new" "clone new packages" $
cloneNew branched <$> branching <*> distArg
, Subcommand "cblrpm" "Run cblrpm command" $
cblrpm branched <$> strOptionWith 'c' "cmd" "CMD" "command to execute" <*> distArg <*> pkgArgs
, Subcommand "cmd" "arbitrary command (with args)" $
execCmd branched <$> strOptionWith 'c' "cmd" "CMD" "command to execute" <*> distArg <*> pkgArgs
, Subcommand "count" "count number of packages" $
(repoqueryHaskellPkgs branched False >=> (print . length)) <$> distArg
, Subcommand "depends" "cabal-depends" $
repoAction branched False (Output cabalDepends) <$> distArg <*> pkgArgs
, Subcommand "diff" "git diff" $
gitDiff branched <$> optional gitFormat
<*> optional (strOptionWith 'w' "with-branch" "BRANCH" "Branch to compare")
<*> distArg <*> pkgArgs
, Subcommand "diff-origin" "git diff origin" $
gitDiffOrigin branched <$> distArg <*> pkgArgs
-- , Subcommand "diff-branch" "compare branch with master" $
-- repoAction branched True (Header False compareRawhide) <$> distArg <*> pkgArgs
, Subcommand "diffstat" "Show diffstat output" $
repoAction branched False (Output (const diffStat)) <$> distArg <*> pkgArgs
, Subcommand "hackage-upload" "upload Hackage distro data" $
hackageUpload branched <$> switchRefresh
, Subcommand "hackage-compare" "compare with Hackage distro data" $
hackageCompare branched <$> switchRefresh
-- more or less the same as 'pushed'
, Subcommand "head-origin" "packages with head in sync with origin" $
headOrigin branched <$> distArg <*> pkgArgs
, Subcommand "leaf" "list leaf packages (slow!)" $
leaves branched <$> switchWith 'v' "deps" "show also deps" <*> distArg <*> pkgArgs
, Subcommand "list" "list packages that BR ghc-Cabal-devel" $
(repoqueryHaskellPkgs branched False >=> putStrList) <$> distArg
, Subcommand "merge" "git merge" $
merge branched <$> strOptionWith 'f' "from" "BRANCH" "specify branch to merge from" <*> distArg <*> pkgArgs
, Subcommand "missing" "missing dependency source packages" $
missingDeps branched <$> distArg <*> pkgArgs
, Subcommand "new" "unbuilt packages" $
(newPackages branched >=> putStrList) <$> distArg
, Subcommand "old-packages" "packages not in repoquery" $
oldPackages branched <$> distArg <*> pkgArgs
, Subcommand "prep" "fedpkg prep" $
prep branched <$> distArg <*> pkgArgs
, Subcommand "stackage-compare" "compare with stackage" $
stackageCompare branched <$> streamOpt <*> stackageOpts <*> distArg <*> pkgArgs
, Subcommand "commit" "fedpkg commit" $
commit branched <$> strOptionWith 'm' "message" "COMMITMSG" "commit message" <*> distArg <*> pkgArgs
, Subcommand "fetch" "git fetch repos" $
repoAction_ branched True False (git_ "fetch" []) <$> distArg <*> pkgArgs
, Subcommand "pull" "git pull repos" $
repoAction_ branched True False (git_ "pull" ["--rebase"]) <$> distArg <*> pkgArgs
, Subcommand "push" "git push repos" $
repoAction_ branched True False (git_ "push" []) <$> distArg <*> pkgArgs
, Subcommand "pushed" "show git pushed packages" $
pushed branched <$> distArg <*> pkgArgs
, Subcommand "refresh" "cabal-rpm refresh" $
refresh branched <$> switchWith 'n' "dry-run" "Show patch but don't apply" <*> distArg <*> pkgArgs
, Subcommand "remaining" "remaining packages to be built in TAG" $
remaining <$> switchWith 'c' "count" "show many packages left" <*> strArg "TAG" <*> pkgArgs
, Subcommand "subpkgs" "list subpackages" $
repoAction branched True (Header True (\ p -> rpmspec [] (Just "%{name}-%{version}") (p <.> "spec") >>= putStrList)) <$> distArg <*> pkgArgs
, Subcommand "subpackaged" "list subpackaged libraries" $
subpackaged branched <$> switchWith 'V' "show-versions" "Show versions" <*> distArg <*> pkgArgs
, Subcommand "tagged" "list koji DIST tagged builds" $
listTagged_ <$> switchWith 's' "short" "list packages not builds" <*> strArg "TAG"
, Subcommand "unbranched" "packages without this branch" $
unbranched branched <$> distArg <*> pkgArgs
, Subcommand "unpushed" "show unpushed commits" $
unpushed branched <$> switchWith 's' "short" "no log" <*> distArg <*> pkgArgs
, Subcommand "update" "cabal-rpm update" $
update branched <$> streamOpt <*> distArg <*> pkgArgs
, Subcommand "verrel" "show nvr of packages" $
verrel branched <$> distArg <*> pkgArgs] ++
map (buildCmd cwd) [ ("install", "build locally and install")
, ("mock", "build in mock")
, ("chain", "build deps recursively in Koji")
, ("koji", "build in Koji (deprecated) without checking dependencies")
, ("pending", "show planned changes")
, ("changed", "show changed pkgs")
, ("built", "show pkgs whose NVR already built")
, ("bump", "bump release for NVRs already built")
, ("not-installed", "list packages not locally installed")
]
where
pkgArgs = some (removeSuffix "/" <$> strArg "PKG...")
branching = switchWith 'B' "branches" "clone branch dirs (fedpkg clone -B)"
gitFormat :: Parser DiffFormat
gitFormat =
flagWith' DiffShort 's' "short" "Just output package name" <|>
DiffContext <$> optionWith auto 'u' "unified" "CONTEXT" "Lines of context"
buildCmd cwd (c, desc) =
Subcommand c desc $
build cwd Nothing False (readBuildCmd c) <$> distArg <*> pkgArgs
switchRefresh = switchWith 'r' "refresh" "repoquery --refresh"
streamOpt = strOptionalWith 's' "stream" "STACKAGESTREAM" ("Stackage stream [" ++ ltsStream ++ "]") ltsStream
stackageOpts :: Parser StkgOpt
stackageOpts =
flagWith' StkgMissing 'm' "missing" "only list missing packages" <|>
flagWith StkgAll StkgOnly 'o' "only" "only Stackage packages"
data DiffFormat =
DiffShort | DiffContext Int
deriving (Eq)
data StkgOpt = StkgAll | StkgOnly | StkgMissing
deriving Eq
putStrList :: [String] -> IO ()
putStrList =
putStr . unlines
-- should make separate rhel client so -B does not need dist
clone :: Dist -> Bool -> Dist -> [Package] -> IO ()
clone _ True dist pkgs = cloneAllBranches dist pkgs
clone branched False dist pkgs =
repoAction_ branched True False (return ()) dist pkgs
cloneNew :: Dist -> Bool -> Dist -> IO ()
cloneNew branched True dist = do
rawhide <- getRawhideDist
newPackages branched rawhide >>= cloneAllBranches dist
cloneNew branched False dist =
newPackages branched dist >>= repoAction_ branched True False (return ()) dist
execCmd :: Dist -> String -> Dist -> [Package] -> IO ()
execCmd _ "" _ _ = error "CMD string must be given"
execCmd branched cs dist pkgs =
repoAction_ branched True True (shell_ cs) dist pkgs
gitDiff :: Dist -> Maybe DiffFormat -> Maybe String -> Dist -> [Package] -> IO ()
gitDiff branched (Just DiffShort) mbrnch =
repoAction branched False (Header False doGitDiff)
where
doGitDiff pkg = do
let branch = maybeToList mbrnch
out <- git "diff" branch
unless (null out) $ putStrLn pkg
gitDiff branched fmt mbrnch =
repoAction branched False (Output (const doGitDiff))
where
doGitDiff = do
let branch = maybeToList mbrnch
contxt = case fmt of
(Just (DiffContext n)) -> ["--unified=" ++ show n]
_ -> []
out <- git "diff" $ branch ++ contxt
return $ if null out then "" else out
gitDiffOrigin :: Dist -> Dist -> [Package] -> IO ()
gitDiffOrigin branched dist =
repoAction branched False (Output (const (git "diff" [distRemote branched dist]))) dist
stackageCompare :: Dist -> String -> StkgOpt -> Dist -> [Package] -> IO ()
stackageCompare branched stream opt dist =
repoAction branched True (Header False compareStackage) dist
where
compareStackage :: Package -> IO ()
compareStackage p = do
nvr <- cmd (rpkg dist) ["verrel"]
stkg <- cmdMaybe "stackage" ["package", stream, removePrefix "ghc-" p]
let same = isJust stkg && (fromJust stkg ++ "-") `isInfixOf` nvr
unless same $
if opt == StkgMissing
then when (isNothing stkg) $ putStrLn p
else
if isNothing stkg
then unless (opt == StkgOnly) $
putStrLn $ stream ++ " missing: " ++ removePrefix "ghc-" p
else do
putStrLn nvr
putStrLn $ replicate (length (dropVerrel nvr) + 1) ' ' ++ fromJust stkg +-+ "(" ++ stream ++ ")"
diffStat :: IO String
diffStat = git "diff" ["--stat"]
hackageUpload :: Dist -> Bool -> IO ()
hackageUpload branched refreshData = do
csv <- repoqueryHackageCSV hackageRelease
home <- getHomeDirectory
[username, password] <- map B.pack . words <$> readFile (home </> ".fedora/hackage.auth")
req <- setRequestBasicAuth username password .
setRequestBodyLBS (BL.pack csv) .
addRequestHeader hContentType (B.pack "text/csv") .
setRequestMethod methodPut <$>
parseRequestThrow "https://hackage.haskell.org/distro/Fedora/packages.csv"
resp <- httpLbs req
BL.putStrLn $ getResponseBody resp
where
repoqueryHackageCSV :: Dist -> IO String
repoqueryHackageCSV dist = do
pkgs <- repoqueryHackages branched hackageRelease
-- Hackage csv chokes on a final newline
intercalate "\n" . sort . map (replace "\"ghc-" "\"") . lines <$> repoquery dist (["--repo=fedora", "--repo=updates", "--latest-limit=1", "--qf=\"%{name}\",\"%{version}\",\"https://src.fedoraproject.org/rpms/%{source_name}\""] ++ ["--refresh" | refreshData] ++ pkgs)
hackageCompare :: Dist -> Bool -> IO ()
hackageCompare branched refreshData =
repoqueryHackages branched hackageRelease >>=
compareHackage hackageRelease
where
compareHackage :: Dist -> [Package] -> IO ()
compareHackage dist pkgs' = do
req <- addRequestHeader hContentType (B.pack "text/csv") <$>
parseRequestThrow "https://hackage.haskell.org/distro/Fedora/packages.csv"
hck <- getResponseBody <$> httpLbs req
let hackage = sort . either (error "Malformed Hackage csv") (map mungeHackage) $ parseCSV "packages.csv" (BL.unpack hck)
repoquery dist (["--repo=fedora", "--repo=updates", "--latest-limit=1", "--qf=%{name},%{version}"] ++ ["--refresh" | refreshData] ++ pkgs') >>=
compareSets True hackage . sort . map mungeRepo . lines
mungeHackage :: [String] -> PkgVer
mungeHackage [n,v,_] = PV n v
mungeHackage _ = error "Malformed Hackage csv"
mungeRepo :: String -> PkgVer
mungeRepo s | ',' `elem` s =
let (p,v) = break (== ',') s in
PV (removePrefix "ghc-" p) (tail v)
| otherwise = error "Malformed repoquery output"
compareSets :: Bool -> [PkgVer] -> [PkgVer] -> IO ()
compareSets _ [] [] = return ()
compareSets _ [] (f:fs) = do
putStrLn (show f +-+ "(new)")
compareSets False [] fs
compareSets all' (h:hs) [] = do
when all' $ putStrLn ("Removed:" +-+ show h)
compareSets all' hs []
compareSets all' (h:hs) (f:fs) | h == f = compareSets all' hs fs
| h < f = do
when all' $ putStrLn $ "Removed:" +-+ show h
compareSets all' hs (f:fs)
| h > f = do
putStrLn $ show f +-+ "(new)"
compareSets all' (h:hs) fs
| otherwise = do
putStrLn $ pvPkg h ++ ":" +-+ pvVer h +-+ "->" +-+ pvVer f
compareSets all' hs fs
headOrigin :: Dist -> Dist -> [Package] -> IO ()
headOrigin branched dist =
repoAction branched False (Header False gitHeadAtOrigin) dist
where
gitHeadAtOrigin :: Package -> IO ()
gitHeadAtOrigin pkg = do
-- use gitDiffQuiet
same <- gitBool "diff" ["--quiet", distRemote branched dist ++ "..HEAD"]
when same $ putStrLn pkg
-- FIXME does not take static requires into account
leaves :: Dist -> Bool -> Dist -> [Package] -> IO ()
leaves branched verb =
repoAction branched True (Header verb checkLeafPkg)
where
-- FIXME: make a dependency cache
checkLeafPkg :: Package -> IO ()
checkLeafPkg pkg = do
dir <- takeFileName <$> getCurrentDirectory
let branchdir = dir /= pkg
top = if branchdir then "../.." else ".."
spec = pkg <.> "spec"
subpkgs <- rpmspec ["--builtrpms"] (Just "%{name}") spec
allpkgs <- listDirectory top
let other = map (\ p -> top </> p </> (if branchdir then dir else "") </> p <.> "spec") $ allpkgs \\ [pkg]
found <- filterM (dependsOn subpkgs) other
if null found
then putStrLn pkg
else when verb $ putStrList found
where
dependsOn :: [Package] -> Package -> IO Bool
dependsOn subpkgs p = do
file <- doesFileExist p
if file
then do
deps <- buildRequires p
return $ any (`elem` deps) subpkgs
else return False
merge :: Dist -> String -> Dist -> [Package] -> IO ()
merge branched branch =
repoAction_ branched True False (git_ "merge" [branch])
missingDeps :: Dist -> Dist -> [Package] -> IO ()
missingDeps branched dist =
repoAction branched True (Output checkForMissingDeps) dist
where
checkForMissingDeps :: Package -> IO String
checkForMissingDeps pkg = do
dir <- takeFileName <$> getCurrentDirectory
let top = if dir == pkg then ".." else "../.."
spec = pkg <.> "spec"
hasSpec <- doesFileExist spec
if hasSpec
then do
deps <- buildRequires (pkg <.> "spec") >>= haskellSrcPkgs top dist
unlines <$> filterM (noPkgDir top) deps
else putStrLn ("no " ++ pkg ++ ".spec file found!") >> return ""
where
noPkgDir :: FilePath -> Package -> IO Bool
noPkgDir top dep =
not <$> doesDirectoryExist (top </> dep)
oldPackages :: Dist -> Dist -> [Package] -> IO ()
oldPackages branched dist pkgs = do
repopkgs <- repoqueryHaskellPkgs branched True dist
putStrList (pkgs \\ repopkgs)
prep :: Dist -> Dist -> [Package] -> IO ()
prep branched dist =
repoAction_ branched True True (cmd_ (rpkg dist) ["prep"]) dist
commit :: Dist -> String -> Dist -> [Package] -> IO ()
commit branched logmsg dist =
repoAction branched False (Output (const commitChanges)) dist
where
commitChanges :: IO String
commitChanges = do
nochgs <- gitDiffQuiet []
if nochgs
then return ""
else cmd (rpkg dist) ["commit", "-m", logmsg]
unpushed :: Dist -> Bool -> Dist -> [Package] -> IO ()
unpushed branched nolog dist =
repoAction branched True (Header False gitLogOneLine) dist
where
gitLogOneLine :: Package -> IO ()
gitLogOneLine pkg = do
out <- git "log" [distRemote branched dist ++ "..HEAD", "--pretty=oneline"]
unless (null out) $
putStrLn $ pkg ++ if nolog then "" else (unwords . map replaceHash . words) out
where
replaceHash h = if length h /= 40 then h else ":"
pushed :: Dist -> Dist -> [Package] -> IO ()
pushed branched dist =
repoAction branched True (Header False checkPushed) dist
where
checkPushed :: Package -> IO ()
checkPushed pkg = do
out <- git "log" [distRemote branched dist ++ "..HEAD", "--pretty=oneline"]
when (null out) $ putStrLn pkg
verrel :: Dist -> Dist -> [Package] -> IO ()
verrel branched dist =
repoAction_ branched False True (cmd_ (rpkg dist) ["verrel"]) dist
data PkgVer = PV { pvPkg :: String, pvVer :: String}
deriving (Eq)
instance Show PkgVer
where
show (PV p v) = p ++ "-" ++ v
instance Ord PkgVer
where
compare (PV p _) (PV p' _) = compare p p'
replace :: Eq a => [a] -> [a] -> [a] -> [a]
replace a b s@(x:xs) =
if a `isPrefixOf` s
then b ++ replace a b (drop (length a) s)
else x:replace a b xs
replace _ _ [] = []
repoqueryHaskellPkgs :: Dist -> Bool -> Dist -> IO [Package]
repoqueryHaskellPkgs branched verbose dist = do
when verbose $ do
tty <- hIsTerminalDevice stdout
when tty $ warning "Getting packages from repoquery"
let repo = distRepo branched dist
updates = maybeToList $ distUpdates branched dist
bin <- words <$> repoquery dist (["--repo=" ++ repo ++ "-source"] ++ ["--repo=" ++ u ++ "-source" | u <- updates] ++ ["--qf=%{name}", "--whatrequires", "ghc-Cabal-*"])
when (null bin) $ error "No packages using ghc-Cabal-devel found!"
return $ sort $ filter (not . isGhcXY) $ nub bin
where
isGhcXY :: String -> Bool
isGhcXY p = let prefix = head (splitOn "-" p)
in all (\c -> isDigit c || c == '.') (removePrefix "ghc" prefix)
repoqueryHackages :: Dist -> Dist -> IO [Package]
repoqueryHackages branched dist = do
srcs <- repoqueryHaskellPkgs branched False dist
libs <- repoqueryHaskellLibs False
let binsrcs = filter (not . ("ghc-" `isPrefixOf`)) srcs
sublibs = libs \\ map ("ghc-" ++) binsrcs
return $ sort $ nub (delete "haskell-platform" srcs ++ sublibs)
where
repoqueryHaskellLibs :: Bool -> IO [Package]
repoqueryHaskellLibs verbose = do
when verbose $ putStrLn "Getting libraries from repoquery"
let repo = distRepo branched dist
updates = maybeToList $ distUpdates branched dist
bin <- words <$> repoquery dist (["--repo=" ++ repo] ++ ["--repo=" ++ u | u <- updates] ++ ["--qf=%{name}", "--whatprovides", "libHS*-ghc*.so()(64bit)"])
when (null bin) $ error "No libHS*.so providers found!"
return $ sort $ filter ("ghc-" `isPrefixOf`) $ nub bin
newPackages :: Dist -> Dist -> IO [Package]
newPackages branched dist = do
ps <- repoqueryHaskellPkgs branched True dist
pps <- cmdLines "pagure" ["list", "--namespace", "rpms", "ghc*"]
local <- listDirectory "."
filterM (\ d -> not <$> doesFileExist (d </> "dead.package")) $ nub (pps ++ ps) \\ (local ++ ["Agda-stdlib", "ghc", "ghc-rpm-macros", "ghc-srpm-macros", "haskell-platform"])
haveSshKey :: IO Bool
haveSshKey = do
home <- getHomeDirectory
doesFileExist $ home </> ".ssh/id_rsa"
cloneAllBranches :: Dist -> [Package] -> IO ()
cloneAllBranches _ [] = return ()
cloneAllBranches dist (pkg:rest) = do
withCurrentDirectory "." $ do
putStrLn $ "\n==" +-+ pkg +-+ "=="
-- muser <- getEnv "USER"
haveSSH <- haveSshKey
dirExists <- doesDirectoryExist pkg
unless dirExists $
cmd_ (rpkg dist) $ ["clone"] ++ ["-a" | not haveSSH] ++ ["-B", pkg]
singleDir <- isGitDir pkg
when singleDir $
error "branch checkout already exists!"
cloneAllBranches dist rest
data Action = Output (Package -> IO String) | Header Bool (Package -> IO ())
showHeader :: Action -> Bool
showHeader (Header b _) = b
showHeader (Output _) = False
repoAction :: Dist -> Bool -> Action -> Dist -> [Package] -> IO ()
repoAction _ _ _ _ [] = return ()
repoAction branched needsSpec action dist (pkg:rest) = do
withCurrentDirectory "." $ do
let branch = distBranch branched dist
when (showHeader action) $
putStrLn $ "\n==" +-+ pkg +-+ branch +-+ "=="
-- muser <- getEnv "USER"
haveSSH <- haveSshKey
fileExists <- doesFileExist pkg
if fileExists
then error $ pkg +-+ "is a file"
else do
dirExists <- doesDirectoryExist pkg
unless dirExists $
cmd_ (rpkg dist) $ ["clone"] ++ ["-a" | not haveSSH] ++ ["-b", branch, pkg]
singleDir <- isGitDir pkg
unless singleDir $ do
branchDir <- doesDirectoryExist $ pkg </> branch
unless branchDir $
withCurrentDirectory pkg $
cmd_ (rpkg dist) ["clone", "-b", branch, pkg, branch]
wd <- pkgDir pkg branch ""
setCurrentDirectory wd
pkggit <- do
gd <- isGitDir "."
if gd
then checkPkgsGit
else return False
unless pkggit $
error $ "not a Fedora pkg git dir!:" +-+ wd
actual <- gitBranch
when (branch /= actual) $
whenJustM (cmdMaybe (rpkg dist) ["switch-branch", branch]) $ \ out ->
when (showHeader action) $ putStrLn out
currentBranch <- gitBranch
when (branch == currentBranch) $
unlessM (doesFileExist "dead.package") $ do
let spec = pkg <.> "spec"
hasSpec <- doesFileExist spec
-- FIXME: silence for cmds that only output package names (eg unpushed -s)
when (not hasSpec && showHeader action) $
putStrLn "No spec file!"
unless (needsSpec && not hasSpec) $
case action of
Header _ act -> act pkg
Output act -> do
out <- act pkg
unless (null out) $ do
putStrLn $ "\n==" +-+ pkg +-+ branch +-+ "=="
putStrLn out
repoAction branched needsSpec action dist rest
-- io independent of package
repoAction_ :: Dist -> Bool -> Bool -> IO () -> Dist -> [Package] -> IO ()
repoAction_ branched header needsSpec action =
repoAction branched needsSpec (Header header (const action))
-- compareRawhide :: Package -> IO ()
-- compareRawhide p = do
-- let spec = p <.> "spec"
-- nvr <- removeDisttag . unwords <$> rpmspec ["--srpm"] (Just "%{name}-%{version}-%{release}") spec
-- nvr' <- withBranch "master" $ do
-- haveSpec <- doesFileExist spec
-- unless haveSpec $ cmdSilent "git" ["pull"]
-- removeDisttag . unwords <$> rpmspec ["--srpm"] (Just "%{name}-%{version}-%{release}") spec
-- if nvr == nvr'
-- then putStrLn nvr
-- else do
-- putStrLn nvr
-- putStrLn nvr'
-- putStrLn ""
-- where
-- removeDisttag = reverse . tail . dropWhile (/= '.') . reverse
isFromHackage :: Package -> IO Bool
isFromHackage pkg =
grep_ "hackage.haskell.org/package/" $ pkg <.> "spec"
update :: Dist -> String -> Dist -> [Package] -> IO ()
update branched stream =
repoAction branched True (Header True doUpdate)
where
doUpdate :: Package -> IO ()
doUpdate pkg = do
hckg <- isFromHackage pkg
if hckg
then cmd_ "cabal-rpm" ["update", "-s", stream]
else putStrLn "skipping since not hackage"
refresh :: Dist -> Bool -> Dist -> [Package] -> IO ()
refresh branched dryrun =
repoAction branched True (Header True refreshPkg)
where
refreshPkg :: Package -> IO ()
refreshPkg pkg = do
hckg <- isFromHackage pkg
if hckg
then cmd_ "cabal-rpm" $ "refresh" : ["--dry-run" | dryrun]
else putStrLn "skipping since not hackage"
listTagged_ :: Bool -> String -> IO ()
listTagged_ short tag =
listTagged short tag >>= putStrList
listTagged :: Bool -> String -> IO [String]
listTagged short tag = do
builds <- map (head . words) <$> cmdLines "koji" ["list-tagged", "--quiet", tag]
return $ nub $ map (if short then dropVerrel else id) builds
dropVerrel :: String -> String
dropVerrel nvr =
let parts = splitOn "-" nvr in
intercalate "-" $ take (length parts - 2) parts
remaining :: Bool -> String -> [Package] -> IO ()
remaining count tag pkgs = do
built <- listTagged True tag
let left = pkgs \\ built
if count
then print $ length left
else cmd_ "rpmbuild-order" $ ["sort", "-p"] ++ left
cabalDepends :: Package -> IO String
cabalDepends p = do
hckg <- isFromHackage p
if hckg then do
vr <- removePrefix "ghc-" . head <$>
rpmspec ["--srpm"] (Just "%{name}-%{version}") (p <.> "spec")
setCurrentDirectory vr
cmdQuiet "cabal-depends" ["--not-build", "--unique"]
else return ""
cblrpm :: Dist -> String -> Dist -> [Package] -> IO ()
cblrpm _ "" = error "CMD string must be given"
cblrpm branched cs =
repoAction branched True (Header True doCblRpm)
where
doCblRpm :: Package -> IO ()
doCblRpm p = do
hckg <- isFromHackage p
when hckg $
cmd_ "cblrpm" [cs]
unbranched :: Dist -> Dist -> [Package] -> IO ()
unbranched branched dist =
mapM_ checkBranch
where
checkBranch :: Package -> IO ()
checkBranch pkg =
withCurrentDirectory pkg $ do
dead <- doesFileExist "dead.package"
unless dead $ do
let distbranch = distBranch branched dist
branch <- gitBool "show-ref" ["--verify", "--quiet", "refs/heads/" ++ distbranch]
unless branch $ do
remotebranch <- gitBool "ls-remote" ["--exit-code", "--refs", "origin", distbranch]
unless remotebranch $
putStrLn pkg
subpackaged :: Dist -> Bool -> Dist -> [Package] -> IO ()
subpackaged branched versions dist pkgs = do
repoAction branched True (Output listSubpkgs) dist pkgs
where
listSubpkgs pkg = do
msubpkgs <- fmap (drop 2 . words) <$> cmdMaybe "grep" ["%global subpkgs", pkg <.> "spec"]
case msubpkgs of
Nothing -> return ""
Just subpkgs ->
intercalate "\n" <$> mapMaybeM expand subpkgs
where
expand subpkg =
let macro = (init . drop 2) subpkg in
fmap (removeVersion . last . words) <$> cmdMaybe "grep" ["%global " ++ macro, pkg <.> "spec"]
removeVersion nv =
if versions then nv else init (dropWhileEnd (/= '-') nv)
#if !MIN_VERSION_simple_cmd(0,2,2)
-- | 'gitBool c args' runs git command and return result
gitBool :: String -- ^ git command
-> [String] -- ^ arguments
-> IO Bool -- ^ result
gitBool c args = do
mout <- cmdMaybe "git" (c:args)
return $ isJust mout
#endif
|
fedora-haskell/fedora-haskell-tools
|
fhpkg.hs
|
gpl-3.0
| 28,759 | 1 | 28 | 7,224 | 8,384 | 4,185 | 4,199 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS -fwarn-unused-imports #-}
module Main
where
import ClassyPrelude
import Data.Serialize as Cereal
data Message = Message MagicValue ByteString Word32 Word32 ByteString
parseMessage :: Cereal.Get Message
parseMessage = fmap Message Cereal.get <*>
Cereal.getByteString 12 <*>
Cereal.getWord32be <*>
Cereal.getWord32be <*>
(Cereal.remaining >>= Cereal.getByteString)
data MagicValue = MagicValue -- (extensible)
deriving (Eq, Show)
instance Cereal.Serialize MagicValue where
get = Cereal.getBytes 4 >>= f
where
f "\xE9\xBE\xB4\xD9" = return MagicValue
f bad = fail $ "Cereal.Serialize: bad magic value: " <> show bad
put MagicValue = Cereal.putByteString "\xE9\xBE\xB4\xD9"
newtype VInt = VInt Word64
instance Cereal.Serialize VInt where
get = do
b <- Cereal.getWord8
case b of
0xfd -> VInt . fromIntegral <$> Cereal.getWord16be
0xfe -> VInt . fromIntegral <$> Cereal.getWord32be
0xff -> fmap VInt Cereal.getWord64be
a -> return $ VInt $ fromIntegral a
put (VInt x) | x < 0xfd = putWord8 $ fromIntegral x
put (VInt x) | x < 0xffff = putWord8 0xfd >> putWord16be (fromIntegral x)
put (VInt x) | x < 0xffffffff = putWord8 0xfe >> putWord32be (fromIntegral x)
put (VInt x) = putWord8 0xff >> putWord64be x
newtype VString = VString ByteString
instance Cereal.Serialize VString where
get = do
(VInt (fromIntegral -> len)) <- get
VString <$> getByteString len
put (VString x) = do
let len = VInt $ fromIntegral $ length x
put len
putByteString x
newtype VIntList = VL [VInt]
instance Cereal.Serialize VIntList where
get = do
(VInt (fromIntegral -> len)) <- get
VL <$> sequence (replicate len get)
put (VL x) = do
let len = VInt $ fromIntegral $ length x
put len
mapM_ put x
data MessageType =
Version
| Verack
| Addr
| Inv
| Getdata
| ObjectType ObjectType
deriving (Eq, Ord, Show)
data ObjectType =
Getpubkey
| Pubkey
| Msg
| Broadcast
deriving (Eq, Ord, Show)
|
berdario/hitmessage
|
hitmessage.hs
|
agpl-3.0
| 3,295 | 0 | 12 | 1,099 | 716 | 367 | 349 | 84 | 1 |
{-
Bustle.Loader: loads logs using one of the two sub-loaders
Copyright © 2011–2012 Collabora Ltd.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
-}
module Bustle.Loader
( readLog
, LoadError(..)
-- * This function bothers me, but it's used by the live recorder for now...
, isRelevant
)
where
import Control.Monad.Except
import Control.Arrow (second)
import qualified Bustle.Loader.Pcap as Pcap
import Bustle.Types
import Bustle.Util (io)
data LoadError = LoadError FilePath String
-- this nested case stuff is ugly, but it's less ugly than it looked with
-- combinators to turn IO (Either a b) into ErrorT LoadError IO b using various
-- a -> LoadError functions.
readLog :: MonadIO io
=> FilePath
-> ExceptT LoadError io ([String], Log)
readLog f = do
pcapResult <- io $ Pcap.readPcap f
case pcapResult of
Right ms -> return $ second (filter (isRelevant . deEvent)) ms
Left ioe -> throwError $ LoadError f (show ioe)
isRelevant :: Event
-> Bool
isRelevant (NOCEvent _) = True
isRelevant (MessageEvent m) = case m of
Signal {} -> not senderIsBus
MethodCall {} -> none3
MethodReturn {} -> none3
Error {} -> none3
where
-- FIXME: really? Maybe we should allow people to be interested in,
-- say, binding to signals?
senderIsBus = sender m == busDriver
destIsBus = destination m == busDriver
busDriver = O (OtherName dbusName)
none bs = not $ or bs
none3 = none [senderIsBus, destIsBus]
|
wjt/bustle
|
Bustle/Loader.hs
|
lgpl-2.1
| 2,181 | 0 | 15 | 483 | 350 | 187 | 163 | 31 | 4 |
{-# LANGUAGE FlexibleContexts
, FlexibleInstances
, FunctionalDependencies
, MultiParamTypeClasses
, QuasiQuotes
, TemplateHaskell
#-}
module Avro.Records
( FieldDesc
( FieldDesc
, fieldName
, fieldDoc
, fieldDefaultValue
, fieldOrder
, fieldAliases
)
, Order (Ascending, Descending, Ignore)
, Field (Field, fieldDesc, fieldSchema)
, RecordDesc
( RecordDesc
, recordName
, recordDoc
, recordAliases
)
, field
, record
, name
, doc
, defaultValue
, order
, aliases
, desc
, schema
, withDoc
, withDefault
, withOrder
, withAlias
)
where
import Control.Lens ((&), (.~), (%~))
import Control.Lens.TH (makeLensesWith, abbreviatedFields)
import Data.ByteString (ByteString)
data FieldDesc a
= FieldDesc
{ fieldName :: ByteString
, fieldDoc :: Maybe ByteString
, fieldDefaultValue :: Maybe a
, fieldOrder :: Order
, fieldAliases :: [ByteString]
}
deriving (Eq, Show)
data Order = Ascending | Descending | Ignore
deriving (Eq, Show)
data Field s a
= Field
{ fieldDesc :: FieldDesc a
, fieldSchema :: s a
}
field :: ByteString -> FieldDesc a
field name
= FieldDesc
{ fieldName = name
, fieldDoc = Nothing
, fieldDefaultValue = Nothing
, fieldOrder = Ascending
, fieldAliases = []
}
data RecordDesc
= RecordDesc
{ recordName :: ByteString
, recordDoc :: Maybe ByteString
, recordAliases :: [ByteString]
}
deriving (Eq, Show)
record :: ByteString -> RecordDesc
record name
= RecordDesc
{ recordName = name
, recordDoc = Nothing
, recordAliases = []
}
$(makeLensesWith abbreviatedFields ''FieldDesc)
$(makeLensesWith abbreviatedFields ''Field)
$(makeLensesWith abbreviatedFields ''RecordDesc)
withDoc :: HasDoc r ByteString => r -> ByteString -> r
f `withDoc` d = f & doc .~ d
withDefault :: HasDefaultValue r (Maybe a) => r -> a -> r
f `withDefault` d = f & defaultValue .~ Just d
withOrder :: HasOrder r Order => r -> Order -> r
f `withOrder` o = f & order .~ o
withAlias :: HasAliases r [a] => r -> a -> r
f `withAlias` a = f & aliases %~ (a:)
|
cumber/havro
|
src/Avro/Records.hs
|
lgpl-3.0
| 2,279 | 0 | 9 | 659 | 626 | 370 | 256 | 91 | 1 |
import Control.Monad
a = do
x <- [1..50]
guard $ '7' `elem` show x
return x
|
Crazycolorz5/Haskell-Code
|
Comprehensions.hs
|
unlicense
| 78 | 0 | 8 | 18 | 45 | 22 | 23 | 5 | 1 |
module TreapTest where
import qualified Treap as T
import Test.Hspec
import Test.QuickCheck
import System.Random
main :: IO ()
main = hspec $
describe "empty" $
it "is a valid Treap" $
verbose $ property $ \seed -> T.checkTreap (T.newStdGenTreap seed)
|
songpp/my-haskell-playground
|
tests/TreapTest.hs
|
apache-2.0
| 279 | 0 | 10 | 67 | 84 | 46 | 38 | 10 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Openshift.V1.PodTemplateSpec where
import GHC.Generics
import Kubernetes.V1.ObjectMeta
import Openshift.V1.PodSpec
import qualified Data.Aeson
-- | PodTemplateSpec describes the data a pod should have when created from a template
data PodTemplateSpec = PodTemplateSpec
{ metadata :: Maybe ObjectMeta -- ^ Standard object's metadata. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#metadata
, spec :: Maybe PodSpec -- ^ Specification of the desired behavior of the pod. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#spec-and-status
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON PodTemplateSpec
instance Data.Aeson.ToJSON PodTemplateSpec
|
minhdoboi/deprecated-openshift-haskell-api
|
openshift/lib/Openshift/V1/PodTemplateSpec.hs
|
apache-2.0
| 887 | 0 | 9 | 111 | 100 | 61 | 39 | 16 | 0 |
-- Copyright (c) 2010 - Seweryn Dynerowicz
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- imitations under the License.
module Policy.UsablePath
( UsablePath(..)
) where
import Algebra.Semiring
import LaTeX
data UsablePath = U Int
deriving (Eq)
instance Show UsablePath where
show (U 0) = "X"
show (U 1) = "V"
instance Semiring UsablePath where
add (U a) (U b) = U (max a b)
zero = (U 0)
mul (U a) (U b) = U (min a b)
unit = (U 1)
instance LaTeX UsablePath where
toLaTeX (U 0) = "\\ko"
toLaTeX (U 1) = "\\ok"
|
sdynerow/SemiringsLibrary
|
Policy/UsablePath.hs
|
apache-2.0
| 1,010 | 0 | 8 | 201 | 219 | 120 | 99 | 17 | 0 |
module BrownPLT.TypedJS.Unification
( unify
, unifyList
, subst
) where
import BrownPLT.TypedJS.Prelude
import BrownPLT.TypedJS.PrettyPrint (renderType)
import BrownPLT.TypedJS.Infrastructure
import BrownPLT.TypedJS.TypeDefinitions
import qualified Data.Map as M
doesOccurIn :: String
-> Type
-> Bool
doesOccurIn x ty = occ ty
where occField (_, _, ty) = occ ty
occArg (ArgType argTys opt) = any occ argTys || maybe False occ opt
occ ty = case ty of
TArguments arg -> occArg arg
TAny -> False
TObject _ argTys fields -> any occ argTys || any occField fields
TArrow thisTy args retTy -> occ thisTy || occArg args || occ retTy
TId y | x == y -> True
| otherwise -> False
TIx _ -> False
TApp _ tys -> any occ tys
TUnion ty1 ty2 -> occ ty1 || occ ty2
TExists ty -> occ ty
TForall ty -> occ ty
TNamedForall y ty | x == y -> False
| otherwise -> occ ty
TIntersect ty1 ty2 -> occ ty1 || occ ty2
TConstr _ argTys initTy retTy ->
any occ argTys || occ initTy || occ retTy
type Subst = Map String Type
subst :: Subst -> Type -> Type
subst s ty = everywhere (mkT f) ty
where f :: Type -> Type
f (TId x) = case M.lookup x s of
Just ty -> ty
Nothing -> TId x
f ty = ty
extendSubst :: Subst -> String -> Type -> Subst
extendSubst s x ty = M.insert x ty (M.map (subst (M.singleton x ty)) s)
unifyAllM :: EnvM m
=> [Type]
-> [Type]
-> Subst
-> m Subst
unifyAllM xs ys s = case (xs, ys) of
([], []) -> return s
(x:xs, y:ys) -> do
s <- unifyM (subst s x) (subst s y) s
unifyAllM xs ys s
otherwise -> fail "unification failed: lists of uneven lengths"
unifyArgsM :: EnvM m
=> ArgType
-> ArgType
-> Subst
-> m Subst
unifyArgsM (ArgType args1 opt1) (ArgType args2 opt2) s = do
s <- unifyAllM args1 args2 s
return s
unifyFieldsM :: EnvM m
=> [Field]
-> [Field]
-> Subst
-> m Subst
unifyFieldsM [] [] s = return s
unifyFieldsM _ [] s = return s -- permit extra fields on LHS
unifyFieldsM [] _ s = fail "unification failed on an object"
unifyFieldsM ((f1, ro1, ty1):xs) ((f2, ro2, ty2):ys) s
| f1 < f2 = unifyFieldsM xs ((f2, ro2, ty2):ys) s -- extra field on LHS
| f2 > f1 = fail "unification failed: extra field on RHS"
| otherwise = case (ro1, ro2) of
(True, False) -> fail "unification failed: field r/w permissions"
(False, False) -> do
s <- unifyM (subst s ty1) (subst s ty2) s
s <- unifyM (subst s ty2) (subst s ty1) s
unifyFieldsM xs ys s
(_, True) -> do
s <- unifyM (subst s ty1) (subst s ty2) s
unifyFieldsM xs ys s
unifyM :: EnvM m
=> Type
-> Type
-> Subst
-> m Subst
unifyM ty1 ty2 s = case (ty1, ty2) of
(TObject brand1 argTys1 fields1, TObject brand2 argTys2 fields2) -> do
r <- isSubbrand brand1 brand2
unless r $ fail $ printf
"unification failed: %s is not a sub-brand of %s" brand1 brand2
s <- unifyAllM argTys1 argTys2 s
s <- unifyFieldsM fields1 fields2 s
return s
(TAny, TAny) -> return s
(TArguments arg1, TArguments arg2) -> unifyArgsM arg1 arg2 s
(TArrow thisTy1 args1 retTy1, TArrow thisTy2 args2 retTy2) -> do
s <- unifyM thisTy2 thisTy1 s
s <- unifyArgsM args2 args1 s
unifyM retTy1 retTy2 s
(TIx x, TIx y) -> do
unless (x == y) $ fail $ printf
"unification failed: cannot unify bound variables %s and %s"
(show x) (show y)
return s
(TApp constr1 argTys1, TApp constr2 argTys2) -> do
unless (constr1 == constr2) $ fail $ printf
"cannot unify %s with %s" constr1 constr2
unifyAllM argTys1 argTys2 s
(TId v1, TId v2)
| v1 == v2 -> return s
| otherwise -> case (v1, v2) of
('#':_, _) -> return (extendSubst s v1 (TId v2))
otherwise -> return (extendSubst s v2 (TId v1))
(TId v1, ty2)
| v1 `doesOccurIn` ty2 -> fail $ printf
"unification failed: %s occurs in %s" v1 (renderType ty2)
| otherwise -> return (extendSubst s v1 ty2)
(ty1, TId v2)
| v2 `doesOccurIn` ty1 -> fail $ printf
"unification failed: %s occurs in %s" v2 (renderType ty1)
| otherwise -> return (extendSubst s v2 ty1)
(TUnion ty11 ty12, TUnion ty21 ty22) -> do
s <- unifyM ty11 ty21 s
unifyM (subst s ty12) (subst s ty22) s
(TIntersect ty11 ty12, TIntersect ty21 ty22) -> do
s <- unifyM ty11 ty21 s
unifyM (subst s ty12) (subst s ty22) s
(TExists ty1, TExists ty2) -> unifyM ty1 ty2 s
(TForall ty1, ty2) -> unifyM ty1 ty2 s
(TNamedForall x ty1, TNamedForall y ty2) ->
unifyM ty1 (subst (M.singleton y (TId x)) ty2) (extendSubst s y (TId x))
otherwise -> fail $ printf "unification failed: cannot unify\n%s\nwith\n%s"
(renderType ty1) (renderType ty2)
unifyList :: EnvM m
=> [Type]
-> [Type]
-> m Subst
unifyList tys1 tys2 = unifyAllM tys1 tys2 M.empty
unify :: EnvM m
=> Type
-> Type
-> m Subst
unify t1 t2 = do
unifyM t1 t2 M.empty
|
brownplt/strobe-old
|
src/BrownPLT/TypedJS/Unification.hs
|
bsd-2-clause
| 5,287 | 0 | 16 | 1,651 | 2,120 | 1,029 | 1,091 | 145 | 16 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ViewPatterns #-}
-- | Tag a Store instance with structural version info to ensure we're
-- reading a compatible format.
module Data.Store.VersionTagged
( versionedEncodeFile
, versionedDecodeOrLoad
, versionedDecodeFile
, storeVersionConfig
) where
import Control.Applicative
import Control.Monad.IO.Unlift
import Control.Monad.Logger
import qualified Data.ByteString as BS
import Data.Data (Data)
import qualified Data.Map as M
import Data.Monoid ((<>))
import qualified Data.Set as S
import Data.Store
import Data.Store.Core (unsafeEncodeWith)
import Data.Store.Version
import qualified Data.Text as T
import Language.Haskell.TH
import Path
import Path.IO (ensureDir)
import Prelude
versionedEncodeFile :: Data a => VersionConfig a -> Q Exp
versionedEncodeFile vc = [e| storeEncodeFile $(encodeWithVersionQ vc) $(decodeWithVersionQ vc) |]
versionedDecodeOrLoad :: Data a => VersionConfig a -> Q Exp
versionedDecodeOrLoad vc = [| versionedDecodeOrLoadImpl $(encodeWithVersionQ vc) $(decodeWithVersionQ vc) |]
versionedDecodeFile :: Data a => VersionConfig a -> Q Exp
versionedDecodeFile vc = [e| versionedDecodeFileImpl $(decodeWithVersionQ vc) |]
-- | Write to the given file.
storeEncodeFile :: (Store a, MonadIO m, MonadLogger m, Eq a)
=> (a -> (Int, Poke ()))
-> Peek a
-> Path Abs File
-> a
-> m ()
storeEncodeFile pokeFunc peekFunc fp x = do
let fpt = T.pack (toFilePath fp)
$logDebug $ "Encoding " <> fpt
ensureDir (parent fp)
let (sz, poker) = pokeFunc x
encoded = unsafeEncodeWith poker sz
assert (decodeExWith peekFunc encoded == x) $ liftIO $ BS.writeFile (toFilePath fp) encoded
$logDebug $ "Finished writing " <> fpt
-- | Read from the given file. If the read fails, run the given action and
-- write that back to the file. Always starts the file off with the
-- version tag.
versionedDecodeOrLoadImpl :: (Store a, Eq a, MonadUnliftIO m, MonadLogger m)
=> (a -> (Int, Poke ()))
-> Peek a
-> Path Abs File
-> m a
-> m a
versionedDecodeOrLoadImpl pokeFunc peekFunc fp mx = do
let fpt = T.pack (toFilePath fp)
$logDebug $ "Trying to decode " <> fpt
mres <- versionedDecodeFileImpl peekFunc fp
case mres of
Just x -> do
$logDebug $ "Success decoding " <> fpt
return x
_ -> do
$logDebug $ "Failure decoding " <> fpt
x <- mx
storeEncodeFile pokeFunc peekFunc fp x
return x
versionedDecodeFileImpl :: (Store a, MonadUnliftIO m, MonadLogger m)
=> Peek a
-> Path loc File
-> m (Maybe a)
versionedDecodeFileImpl peekFunc fp = do
mbs <- liftIO (Just <$> BS.readFile (toFilePath fp)) `catch` \(err :: IOException) -> do
$logDebug ("Exception ignored when attempting to load " <> T.pack (toFilePath fp) <> ": " <> T.pack (show err))
return Nothing
case mbs of
Nothing -> return Nothing
Just bs ->
liftIO (Just <$> decodeIOWith peekFunc bs) `catch` \(err :: PeekException) -> do
let fpt = T.pack (toFilePath fp)
$logDebug ("Error while decoding " <> fpt <> ": " <> T.pack (show err) <> " (this might not be an error, when switching between stack versions)")
return Nothing
storeVersionConfig :: String -> String -> VersionConfig a
storeVersionConfig name hash = (namedVersionConfig name hash)
{ vcIgnore = S.fromList
[ "Data.Vector.Unboxed.Base.Vector GHC.Types.Word"
, "Data.ByteString.Internal.ByteString"
]
, vcRenames = M.fromList
[ ( "Data.Maybe.Maybe", "GHC.Base.Maybe")
, ( "Stack.Types.Compiler.CVActual"
, "Stack.Types.Compiler.'CVActual"
)
, ( "Stack.Types.Compiler.CVWanted"
, "Stack.Types.Compiler.'CVWanted"
)
]
}
|
martin-kolinek/stack
|
src/Data/Store/VersionTagged.hs
|
bsd-3-clause
| 4,305 | 0 | 20 | 1,196 | 1,052 | 544 | 508 | 93 | 2 |
module WithTiming.Programs (basic, allowAnyExitCode) where
import qualified Data.Text as T
import System.Exit (ExitCode (..))
import WithTiming.Prediction (showDiff)
import WithTiming.Program
successful :: ExitCode -> Bool
successful ExitSuccess = True
successful _ = False
showSeconds :: Integer -> String
showSeconds = showDiff . fromInteger
-- | A typical example of a program constructed in the 'Program' type.
basic :: Key -> T.Text -> Program time ExitCode
basic key command = do
prev <- readPrevious key
predict prev
start <- beginTimer
exitCode <- execute command
if (successful exitCode) then do
time <- secondsSince start
inform $ "Command executed successfully in " ++ showSeconds time ++ "."
writeResult key time
else
inform "Command failed! Not recording results."
return exitCode
-- | A program that informs the user of abnormal exit codes, but records the results nonetheless.
allowAnyExitCode :: Key -> T.Text -> Program time ExitCode
allowAnyExitCode key command = do
prev <- readPrevious key
predict prev
start <- beginTimer
exitCode <- execute command
time <- secondsSince start
case exitCode of
ExitSuccess -> inform $ "Command executed successfully in " ++ showSeconds time ++ "."
ExitFailure num -> inform $ "Command exited abnormally (" ++ show num ++ ") in " ++ showSeconds time ++ "."
writeResult key time
return exitCode
|
holguinj/with-timing
|
src/WithTiming/Programs.hs
|
bsd-3-clause
| 1,468 | 0 | 14 | 327 | 371 | 179 | 192 | 34 | 2 |
{-# LANGUAGE PackageImports #-}
module Control.Applicative (module M) where
import "base" Control.Applicative as M
|
silkapp/base-noprelude
|
src/Control/Applicative.hs
|
bsd-3-clause
| 120 | 0 | 4 | 18 | 21 | 15 | 6 | 3 | 0 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE PolyKinds #-}
module Arith1 (TmBool(..), TmNat(..), TmArith(..)) where
import Lib
import Text.Parsec hiding (runP)
import Text.PrettyPrint hiding (char, space)
import Data.Typeable
import GHC.TypeLits
import qualified Data.Map as M
-- TmBool
data TmBool e l where
TmTrue :: TmBool e 0
TmFalse :: TmBool e 0
TmIf :: e 0 -> e 0 -> e 0 -> TmBool e 0
instance HFunctor TmBool where
hfmap _ TmTrue = TmTrue
hfmap _ TmFalse = TmFalse
hfmap f (TmIf e1 e2 e3) = TmIf (f e1) (f e2) (f e3)
instance MyShow TmBool where
showMe TmTrue = "true"
showMe TmFalse = "false"
showMe (TmIf (In _ e1) (In _ e2) (In _ e3)) =
"if (" ++ showMe e1 ++ ") then (" ++ showMe e2 ++ ") else (" ++ showMe e3 ++ ")"
parseTmBool :: NewParser TmBool fs 0
parseTmBool e p =
(keyword "true" >> pure (In e TmTrue)) <|>
(keyword "false" >> pure (In e TmFalse)) <|>
do { keyword "if"; e1 <- p !!! (Proxy :: Proxy 0);
keyword "then"; e2 <- p !!! (Proxy :: Proxy 0);
keyword "else"; e3 <- p !!! (Proxy :: Proxy 0);
return $ In e (TmIf e1 e2 e3)}
instance Syntax TmBool 0 where
keywords _ _ = ["true", "false", "if", "then", "else"]
parseF _ = parseTmBool
-- TmNat
data TmNat e l where
TmZero :: TmNat e 0
TmSucc :: e 0 -> TmNat e 0
TmPred :: e 0 -> TmNat e 0
instance HFunctor TmNat where
hfmap _ TmZero = TmZero
hfmap f (TmSucc e) = TmSucc (f e)
hfmap f (TmPred e) = TmPred (f e)
instance MyShow TmNat where
showMe TmZero = "0"
showMe (TmSucc (In _ e)) = "succ (" ++ showMe e ++ ")"
showMe (TmPred (In _ e)) = "pred (" ++ showMe e ++ ")"
parseTmNat :: NewParser TmNat fs 0
parseTmNat e p =
(keyword "0" >> pure (In e TmZero)) <|>
(keyword "succ" >> (In e . TmSucc) <$> (p !!! (Proxy :: Proxy 0))) <|>
(keyword "pred" >> (In e . TmPred) <$> (p !!! (Proxy :: Proxy 0)))
instance Syntax TmNat 0 where
keywords _ _ = ["0", "succ", "pred"]
parseF _ = parseTmNat
-- TmArith
data TmArith e l where
TmIsZero :: e 0 -> TmArith e 0
instance HFunctor TmArith where
hfmap f (TmIsZero e) = TmIsZero (f e)
instance MyShow TmArith where
showMe (TmIsZero (In _ e)) = "iszero (" ++ showMe e ++ ")"
parseTmArith :: NewParser TmArith fs 0
parseTmArith e p = keyword "iszero" >> (In e . TmIsZero <$> (p !!! (Proxy :: Proxy 0)))
instance Syntax TmArith 0 where
keywords _ _ = ["iszero"]
parseF _ = parseTmArith
-- s :: [Int] -> Syntactic '[TmBool, TmNat, TmArith] '[0, 0, 0]
-- s (x : y : z : zs) = CCons x $ CCons y $ CCons z $ CVoid
-- r = testRun s 3 (Proxy :: Proxy 0)
-- r' = test (s [1,2,3]) (Proxy :: Proxy 0)
-- [("0", "0"),
-- ("succ (pred 0)", "succ (pred (0))"),
-- ("iszero (pred (succ (succ 0)))", "iszero (pred (succ (succ (0))))")]
|
hy-zhang/parser
|
experimental/Arith1.hs
|
bsd-3-clause
| 3,238 | 0 | 12 | 849 | 1,102 | 578 | 524 | 74 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Mime where
import Type
jsonMime :: Mime
jsonMime = "application/json"
{-# INLINE jsonMime #-}
cssMime :: Mime
cssMime = "text/css"
{-# INLINE cssMime #-}
jsMime :: Mime
jsMime = "application/javascript"
{-# INLINE jsMime #-}
|
g0v/encoding-mashup-server
|
src/Mime.hs
|
bsd-3-clause
| 271 | 0 | 4 | 43 | 41 | 27 | 14 | 12 | 1 |
{-# LANGUAGE DeriveDataTypeable, DeriveFunctor, FlexibleInstances, GADTs,
OverloadedStrings, RankNTypes, RecordWildCards #-}
-- |
-- Module : Network.Wreq.Internal.Types
-- Copyright : (c) 2014 Bryan O'Sullivan
--
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : GHC
--
-- HTTP client types.
module Network.Wreq.Internal.Types
(
-- * Client configuration
Options(..)
, Mgr
, Auth(..)
, AWSAuthVersion(..)
, StatusChecker
-- * Request payloads
, Payload(..)
, Postable(..)
, Putable(..)
-- ** URL-encoded forms
, FormParam(..)
, FormValue(..)
-- * Headers
, ContentType
, Link(..)
-- * Errors
, JSONError(..)
-- * Request types
, Req(..)
, reqURL
-- * Sessions
, Session(..)
, Run
, Body(..)
-- * Caches
, CacheEntry(..)
) where
import Control.Exception (Exception, SomeException)
import Data.IORef (IORef)
import Data.Monoid ((<>), mconcat)
import Data.Text (Text)
import Data.Time.Clock (UTCTime)
import Data.Typeable (Typeable)
import Network.HTTP.Client (CookieJar, Manager, ManagerSettings, Request,
RequestBody)
import Network.HTTP.Client.Internal (Response, Proxy)
import Network.HTTP.Types (Header, Status, ResponseHeaders)
import Prelude hiding (head)
import qualified Data.ByteString.Char8 as S
import qualified Data.ByteString.Lazy as L
import qualified Network.HTTP.Client as HTTP
-- | A MIME content type, e.g. @\"application/octet-stream\"@.
type ContentType = S.ByteString
type Mgr = Either ManagerSettings Manager
-- | Options for configuring a client.
data Options = Options {
manager :: Mgr
-- ^ Either configuration for a 'Manager', or an actual 'Manager'.
--
-- If only 'ManagerSettings' are provided, then by default a new
-- 'Manager' will be created for each request.
--
-- /Note/: when issuing HTTP requests using 'Options'-based
-- functions from the the "Network.Wreq.Session" module
-- ('Network.Wreq.Session.getWith', 'Network.Wreq.Session.putWith',
-- etc.), this field will be ignored.
--
-- An example of using a specific manager:
--
-- @
--import "Network.HTTP.Client" ('Network.HTTP.Client.withManager')
--
--'Network.HTTP.Client.withManager' $ \\mgr -> do
-- let opts = 'Network.Wreq.defaults' { 'manager' = Right mgr }
-- 'Network.Wreq.getWith' opts \"http:\/\/httpbin.org\/get\"
-- @
--
-- An example of changing settings (this will use a separate
-- 'Manager' for every request, so make sense only if you're issuing
-- a tiny handful of requets):
--
-- @
--import "Network.HTTP.Client" ('Network.HTTP.Client.defaultManagerSettings')
--
--let settings = 'Network.HTTP.Client.defaultManagerSettings' { managerConnCount = 5 }
-- opts = 'Network.Wreq.defaults' { 'manager' = Left settings }
--'Network.Wreq.getWith' opts \"http:\/\/httpbin.org\/get\"
-- @
, proxy :: Maybe Proxy
-- ^ Host name and port for a proxy to use, if any.
, auth :: Maybe Auth
-- ^ Authentication information.
--
-- Example (note the use of TLS):
--
-- @
--let opts = 'Network.Wreq.defaults' { 'auth' = 'Network.Wreq.basicAuth' \"user\" \"pass\" }
--'Network.Wreq.getWith' opts \"https:\/\/httpbin.org\/basic-auth\/user\/pass\"
-- @
, headers :: [Header]
-- ^ Additional headers to send with each request.
--
-- @
--let opts = 'Network.Wreq.defaults' { 'headers' = [(\"Accept\", \"*\/*\")] }
--'Network.Wreq.getWith' opts \"http:\/\/httpbin.org\/get\"
-- @
, params :: [(Text, Text)]
-- ^ Key-value pairs to assemble into a query string to add to the
-- end of a URL.
--
-- For example, given:
--
-- @
--let opts = 'Network.Wreq.defaults' { params = [(\"sort\", \"ascending\"), (\"key\", \"name\")] }
--'Network.Wreq.getWith' opts \"http:\/\/httpbin.org\/get\"
-- @
--
-- This will generate a URL of the form:
--
-- >http://httpbin.org/get?sort=ascending&key=name
, redirects :: Int
-- ^ The maximum number of HTTP redirects to follow before giving up
-- and throwing an exception.
--
-- In this example, a 'Network.HTTP.Client.HttpException' will be
-- thrown with a 'Network.HTTP.Client.TooManyRedirects' constructor,
-- because the maximum number of redirects allowed will be exceeded:
--
-- @
--let opts = 'Network.Wreq.defaults' { 'redirects' = 3 }
--'Network.Wreq.getWith' opts \"http:\/\/httpbin.org\/redirect/5\"
-- @
, cookies :: Maybe CookieJar
-- ^ Cookies to set when issuing requests.
--
-- /Note/: when issuing HTTP requests using 'Options'-based
-- functions from the the "Network.Wreq.Session" module
-- ('Network.Wreq.Session.getWith', 'Network.Wreq.Session.putWith',
-- etc.), this field will be used only for the /first/ HTTP request
-- to be issued during a 'Network.Wreq.Session.Session'. Any changes
-- changes made for subsequent requests will be ignored.
, checkStatus :: Maybe StatusChecker
-- ^ Function that checks the status code and potentially returns an
-- exception.
--
-- This defaults to 'Nothing', which will just use the default of
-- 'Network.HTTP.Client.Request' which throws a 'StatusException' if
-- the status is not 2XX.
} deriving (Typeable)
-- | A function that checks the result of a HTTP request and
-- potentially returns an exception.
type StatusChecker = Status -> ResponseHeaders -> CookieJar
-> Maybe SomeException
-- | Supported authentication types.
--
-- Do not use HTTP authentication unless you are using TLS encryption.
-- These authentication tokens can easily be captured and reused by an
-- attacker if transmitted in the clear.
data Auth = BasicAuth S.ByteString S.ByteString
-- ^ Basic authentication. This consists of a plain
-- username and password.
| OAuth2Bearer S.ByteString
-- ^ An OAuth2 bearer token. This is treated by many
-- services as the equivalent of a username and password.
| OAuth2Token S.ByteString
-- ^ A not-quite-standard OAuth2 bearer token (that seems
-- to be used only by GitHub). This is treated by whoever
-- accepts it as the equivalent of a username and
-- password.
| AWSAuth AWSAuthVersion S.ByteString S.ByteString
-- ^ Amazon Web Services request signing
-- AWSAuthVersion key secret
| OAuth1 S.ByteString S.ByteString S.ByteString S.ByteString
-- ^ OAuth1 request signing
-- OAuth1 consumerToken consumerSecret token secret
deriving (Eq, Show, Typeable)
data AWSAuthVersion = AWSv4
-- ^ AWS request signing version 4
deriving (Eq, Show)
instance Show Options where
show (Options{..}) = concat [
"Options { "
, "manager = ", case manager of
Left _ -> "Left _"
Right _ -> "Right _"
, ", proxy = ", show proxy
, ", auth = ", show auth
, ", headers = ", show headers
, ", params = ", show params
, ", redirects = ", show redirects
, ", cookies = ", show cookies
, " }"
]
-- | A type that can be converted into a POST request payload.
class Postable a where
postPayload :: a -> Request -> IO Request
-- ^ Represent a value in the request body (and perhaps the
-- headers) of a POST request.
-- | A type that can be converted into a PUT request payload.
class Putable a where
putPayload :: a -> Request -> IO Request
-- ^ Represent a value in the request body (and perhaps the
-- headers) of a PUT request.
-- | A product type for representing more complex payload types.
data Payload where
Raw :: ContentType -> RequestBody -> Payload
deriving (Typeable)
-- | A type that can be rendered as the value portion of a key\/value
-- pair for use in an @application\/x-www-form-urlencoded@ POST
-- body. Intended for use with the 'FormParam' type.
--
-- The instances for 'String', strict 'Data.Text.Text', and lazy
-- 'Data.Text.Lazy.Text' are all encoded using UTF-8 before being
-- URL-encoded.
--
-- The instance for 'Maybe' gives an empty string on 'Nothing',
-- and otherwise uses the contained type's instance.
class FormValue a where
renderFormValue :: a -> S.ByteString
-- ^ Render the given value.
-- | A key\/value pair for an @application\/x-www-form-urlencoded@
-- POST request body.
data FormParam where
(:=) :: (FormValue v) => S.ByteString -> v -> FormParam
instance Show FormParam where
show (a := b) = show a ++ " := " ++ show (renderFormValue b)
infixr 3 :=
-- | The error type used by 'Network.Wreq.asJSON' and
-- 'Network.Wreq.asValue' if a failure occurs when parsing a response
-- body as JSON.
data JSONError = JSONError String
deriving (Show, Typeable)
instance Exception JSONError
-- | An element of a @Link@ header.
data Link = Link {
linkURL :: S.ByteString
, linkParams :: [(S.ByteString, S.ByteString)]
} deriving (Eq, Show, Typeable)
-- | A request that is ready to be submitted.
data Req = Req Mgr Request
-- | Return the URL associated with the given 'Req'.
--
-- This includes the port number if not standard, and the query string
-- if one exists.
reqURL :: Req -> S.ByteString
reqURL (Req _ req) = mconcat [
if https then "https" else "http"
, "://"
, HTTP.host req
, case (HTTP.port req, https) of
(80, False) -> ""
(443, True) -> ""
(p, _) -> S.pack (show p)
, HTTP.path req
, case HTTP.queryString req of
qs | S.null qs -> ""
| otherwise -> "?" <> qs
]
where https = HTTP.secure req
-- | A function that runs a request and returns the associated
-- response.
type Run body = Req -> IO (Response body)
-- | A session that spans multiple requests. This is responsible for
-- cookie management and TCP connection reuse.
data Session = Session {
seshCookies :: Maybe (IORef CookieJar)
, seshManager :: Manager
, seshRun :: Session -> Run Body -> Run Body
}
instance Show Session where
show _ = "Session"
data CacheEntry body = CacheEntry {
entryCreated :: UTCTime
, entryExpires :: Maybe UTCTime
, entryResponse :: Response body
} deriving (Functor)
data Body = NoBody
| StringBody L.ByteString
| ReaderBody HTTP.BodyReader
instance Show (CacheEntry body) where
show _ = "CacheEntry"
|
bitemyapp/wreq
|
Network/Wreq/Internal/Types.hs
|
bsd-3-clause
| 10,487 | 1 | 14 | 2,406 | 1,388 | 864 | 524 | 125 | 4 |
{-# LANGUAGE OverloadedStrings #-}
module Data.Patron where
import Snap
import Database.SQLite.Simple
import qualified Data.Text as T
import Data.Aeson
import Data.Int
------------------------------------------------------------------------------
import Db.Utils
-- | A Patron of the library, with contact info.
data Patron = Patron
{ pId :: Maybe Int64
, pNumber :: Int64
} deriving (Eq,Show)
instance FromRow Patron where
fromRow = Patron
<$> field -- ^ pId
<*> field -- ^ pNumber
-- Schema --------------------------------------------------------------------
fldPId, fldPNumber :: Field
fldPId = "id"
fldPNumber = "number"
patronTableName :: T.Text
patronTableName = "patrons"
patronTableFields :: [(T.Text,T.Text)]
patronTableFields =
[ ( fldPId , "INTEGER PRIMARY KEY" )
, ( fldPNumber , "INTEGER NOT NULL" )
, ( fldTimestamp , timestampDBType )
]
-- DB Ops --------------------------------------------------------------------
createPatronTable :: Connection -> IO ()
createPatronTable conn = createTable conn patronTableName patronTableFields
getPatrons :: Connection -> IO [Patron]
getPatrons conn = getRows conn patronTableName
[ fldPId , fldPNumber ]
savePatron :: Connection -> Patron -> IO Patron
savePatron conn pat = do
pid <- maybe newPatron updatePatron (pId pat)
return pat { pId = Just pid }
where
flds = [ fldPNumber ]
vals = ( Only $ pNumber pat )
newPatron = newRow conn patronTableName flds vals
updatePatron = updateRow conn patronTableName flds vals fldPId
deletePatron :: Connection -> Int64 -> IO [Patron]
deletePatron conn = deleteRow conn patronTableName fldPId
-- JSON ----------------------------------------------------------------------
instance FromJSON Patron where
parseJSON (Object v) = Patron
<$> v .:? fldPId
<*> v .:# fldPNumber
parseJSON _ = mzero
instance ToJSON Patron where
toJSON pat = object
[ fldPId .= pId_ pat
, fldPNumber .= pNumber pat
]
where
pId_ = maybe (noIdError "Patron") id . pId
|
kylcarte/qclib
|
src/Data/Patron.hs
|
bsd-3-clause
| 2,142 | 0 | 11 | 477 | 505 | 276 | 229 | 51 | 1 |
{-# LANGUAGE DeriveGeneric, GeneralizedNewtypeDeriving #-}
-- | Creation of items on the server. Types and operations that don't involve
-- server state nor our custom monads.
module Game.LambdaHack.Server.ItemRev
( ItemKnown(..), NewItem(..), ItemRev, UniqueSet
, newItemKind, newItem
-- * Item discovery types
, DiscoveryKindRev, emptyDiscoveryKindRev, serverDiscos
-- * The @FlavourMap@ type
, FlavourMap, emptyFlavourMap, dungeonFlavourMap
-- * Important implementation parts, exposed for tests
, rollFlavourMap
#ifdef EXPOSE_INTERNAL
-- * Internal operations
, buildItem, keepMetaGameInformation
#endif
) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
import Data.Binary
import qualified Data.EnumMap.Strict as EM
import qualified Data.EnumSet as ES
import qualified Data.HashMap.Strict as HM
import Data.Hashable (Hashable)
import Data.Vector.Binary ()
import qualified Data.Vector.Unboxed as U
import GHC.Generics (Generic)
import Game.LambdaHack.Common.Item
import qualified Game.LambdaHack.Common.ItemAspect as IA
import Game.LambdaHack.Common.Kind
import Game.LambdaHack.Common.Types
import Game.LambdaHack.Content.ItemKind (ItemKind)
import qualified Game.LambdaHack.Content.ItemKind as IK
import qualified Game.LambdaHack.Core.Dice as Dice
import Game.LambdaHack.Core.Frequency
import Game.LambdaHack.Core.Random
import qualified Game.LambdaHack.Definition.Ability as Ability
import Game.LambdaHack.Definition.Defs
import Game.LambdaHack.Definition.Flavour
-- | The essential item properties, used for the @ItemRev@ hash table
-- from items to their ids, needed to assign ids to newly generated items.
-- All the other meaningful properties can be derived from them.
-- Note: item seed instead of @AspectRecord@ is not enough,
-- becaused different seeds may result in the same @AspectRecord@
-- and we don't want such items to be distinct in UI and elsewhere.
data ItemKnown = ItemKnown ItemIdentity IA.AspectRecord (Maybe FactionId)
deriving (Show, Eq, Generic)
instance Binary ItemKnown
instance Hashable ItemKnown
data NewItem =
NewItem (GroupName ItemKind) ItemKnown ItemFull ItemQuant
| NoNewItem
-- | Reverse item map, for item creation, to keep items and item identifiers
-- in bijection.
type ItemRev = HM.HashMap ItemKnown ItemId
type UniqueSet = ES.EnumSet (ContentId ItemKind)
-- | Build an item with the given kind and aspects.
buildItem :: COps -> IA.AspectRecord -> FlavourMap
-> DiscoveryKindRev -> ContentId ItemKind
-> Item
buildItem COps{coitem} arItem (FlavourMap flavourMap)
(DiscoveryKindRev discoRev) ikChosen =
let jkind = case IA.aPresentAs arItem of
Just grp ->
let kindHidden = ouniqGroup coitem grp
in IdentityCovered
(toItemKindIx $ discoRev U.! contentIdIndex ikChosen)
kindHidden
Nothing -> IdentityObvious ikChosen
jfid = Nothing -- the default
jflavour = toEnum $ fromEnum $ flavourMap U.! contentIdIndex ikChosen
in Item{..}
-- | Roll an item kind based on given @Freqs@ and kind rarities
newItemKind :: COps -> UniqueSet -> Freqs ItemKind
-> Dice.AbsDepth -> Dice.AbsDepth -> Int
-> Frequency (GroupName ItemKind, ContentId IK.ItemKind, ItemKind)
newItemKind COps{coitem, coItemSpeedup} uniqueSet itemFreq
(Dice.AbsDepth ldepth) (Dice.AbsDepth totalDepth) lvlSpawned =
assert (any (\(_, n) -> n > 0) itemFreq) $
-- Effective generation depth of actors (not items) increases with spawns.
-- Up to 10 spawns, no effect. With 20 spawns, depth + 5, and then
-- each 10 spawns adds 5 depth.
let numSpawnedCoeff = max 0 $ lvlSpawned `div` 2 - 5
ldSpawned = ldepth + numSpawnedCoeff
f _ _ acc _ ik _ | ik `ES.member` uniqueSet = acc
f !itemGroup !q !acc !p !ik !kind =
-- Don't consider lvlSpawned for uniques, except those that have
-- @Unique@ under @Odds@.
let ld = if IA.checkFlag Ability.Unique
$ IA.kmMean $ getKindMean ik coItemSpeedup
then ldepth
else ldSpawned
rarity = linearInterpolation ld totalDepth (IK.irarity kind)
!fr = q * p * rarity
in (fr, (itemGroup, ik, kind)) : acc
g (!itemGroup, !q) = ofoldlGroup' coitem itemGroup (f itemGroup q) []
freqDepth = concatMap g itemFreq
in toFreq "newItemKind" freqDepth
-- | Given item kind frequency, roll item kind, generate item aspects
-- based on level and put together the full item data set.
newItem :: COps
-> Frequency (GroupName ItemKind, ContentId IK.ItemKind, ItemKind)
-> FlavourMap -> DiscoveryKindRev
-> Dice.AbsDepth -> Dice.AbsDepth
-> Rnd NewItem
newItem cops freq flavourMap discoRev levelDepth totalDepth =
if nullFreq freq
then return NoNewItem -- e.g., rare tile has a unique embed, only first time
else do
(itemGroup, itemKindId, itemKind) <- frequency freq
-- Number of new items/actors unaffected by number of spawned actors.
itemN <- castDice levelDepth totalDepth (IK.icount itemKind)
arItem <- IA.rollAspectRecord (IK.iaspects itemKind) levelDepth totalDepth
let itemBase = buildItem cops arItem flavourMap discoRev itemKindId
itemIdentity = jkind itemBase
!itemK = max 1 itemN
!itemTimer = [itemTimerZero | IA.checkFlag Ability.Periodic arItem]
-- enable optimization in @applyPeriodicLevel@
itemSuspect = False
-- Bonuses on items/actors unaffected by number of spawned actors.
itemDisco = ItemDiscoFull arItem
itemFull = ItemFull {..}
itemKnown = ItemKnown itemIdentity arItem (jfid itemBase)
itemQuant = if itemK == 1 && null itemTimer
then quantSingle
else (itemK, itemTimer)
return $! NewItem itemGroup itemKnown itemFull itemQuant
-- | The reverse map to @DiscoveryKind@, needed for item creation.
-- This is total and never changes, hence implemented as vector.
-- Morally, it's indexed by @ContentId ItemKind@ and elements are @ItemKindIx@.
newtype DiscoveryKindRev = DiscoveryKindRev (U.Vector Word16)
deriving (Show, Binary)
emptyDiscoveryKindRev :: DiscoveryKindRev
emptyDiscoveryKindRev = DiscoveryKindRev U.empty
serverDiscos :: COps -> DiscoveryKindRev
-> Rnd (DiscoveryKind, DiscoveryKindRev)
serverDiscos COps{coitem} (DiscoveryKindRev discoRevFromPreviousGame) = do
let ixs = [0..toEnum (olength coitem - 1)]
shuffled <-
if U.null discoRevFromPreviousGame
then shuffle ixs
else shuffleExcept (keepMetaGameInformation coitem discoRevFromPreviousGame)
(olength coitem)
ixs
let udiscoRev = U.fromListN (olength coitem) shuffled
f :: ContentId ItemKind -> Word16 -> (ItemKindIx, ContentId ItemKind)
f ik ikx = (toItemKindIx ikx, ik)
-- Not @fromDistinctAscList@, because it's the reverse map.
discoS = EM.fromList $ zipWith f [toEnum 0 ..] $ U.toList udiscoRev
return (discoS, DiscoveryKindRev udiscoRev)
-- | Keep in a vector the information that is retained from playthrough
-- to playthrough. The information being, e.g., @ItemKindIx@ or @Flavour@.
-- The information is morally indexed by @ContentId ItemKind@ and its @Enum@
-- instance fits in @Word16@.
keepMetaGameInformation :: ContentData ItemKind
-> U.Vector Word16
-> U.Vector Word16
keepMetaGameInformation coitem informationFromPreviousGame =
let inMetaGame :: ContentId ItemKind -> Bool
inMetaGame kindId =
IK.SetFlag Ability.MetaGame `elem` IK.iaspects (okind coitem kindId)
keepMeta :: Int -> Word16 -> Word16
keepMeta i ix = if inMetaGame (toEnum i)
then ix
else invalidInformationCode
in U.imap keepMeta informationFromPreviousGame
-- | Flavours assigned by the server to item kinds, in this particular game.
-- This is total and never changes, hence implemented as vector.
-- Morally, it's indexed by @ContentId ItemKind@ and elements are @Flavour@.
newtype FlavourMap = FlavourMap (U.Vector Word16)
deriving (Show, Binary)
emptyFlavourMap :: FlavourMap
emptyFlavourMap = FlavourMap U.empty
-- | Assigns flavours to item kinds. Assures no flavor is repeated for the same
-- symbol, except for items with only one permitted flavour.
rollFlavourMap
:: U.Vector Word16
-> Rnd ( EM.EnumMap (ContentId ItemKind) Flavour
, EM.EnumMap (ContentSymbol ItemKind) (ES.EnumSet Flavour) )
-> ContentId ItemKind -> ItemKind
-> Rnd ( EM.EnumMap (ContentId ItemKind) Flavour
, EM.EnumMap (ContentSymbol ItemKind) (ES.EnumSet Flavour) )
rollFlavourMap uFlavMeta !rnd !key !ik = case IK.iflavour ik of
[] -> error "empty iflavour"
[flavour] -> do
(!assocs, !availableMap) <- rnd
return ( EM.insert key flavour assocs
, availableMap )
flvs -> do
(!assocs, !availableMap) <- rnd
let a0 = uFlavMeta U.! toEnum (fromEnum key)
if a0 == invalidInformationCode then do
if length flvs < 6 then do -- too few to even attempt unique assignment
flavour <- oneOf flvs
return ( EM.insert key flavour assocs
, availableMap )
else do
let available = availableMap EM.! IK.isymbol ik
proper = ES.fromList flvs `ES.intersection` available
assert (not (ES.null proper)
`blame` "not enough flavours for items"
`swith` (flvs, available, ik, availableMap)) $ do
flavour <- oneOf $ ES.elems proper
let availableReduced = ES.delete flavour available
return ( EM.insert key flavour assocs
, EM.insert (IK.isymbol ik) availableReduced availableMap )
else return ( EM.insert key (toEnum $ fromEnum a0) assocs
, availableMap )
-- | Randomly chooses flavour for all item kinds for this game.
dungeonFlavourMap :: COps -> FlavourMap -> Rnd FlavourMap
dungeonFlavourMap COps{coitem} (FlavourMap flavourMapFromPreviousGame) = do
let uFlavMeta = if U.null flavourMapFromPreviousGame
then U.replicate (olength coitem) invalidInformationCode
else keepMetaGameInformation coitem flavourMapFromPreviousGame
flavToAvailable :: EM.EnumMap Char (ES.EnumSet Flavour) -> Int -> Word16
-> EM.EnumMap Char (ES.EnumSet Flavour)
flavToAvailable em i fl =
let ik = okind coitem (toEnum i)
setBase = EM.findWithDefault (ES.fromList stdFlavList)
(IK.isymbol ik)
em
setMeta = if fl == invalidInformationCode
then setBase
else ES.delete (toEnum $ fromEnum fl) setBase
in EM.insert (IK.isymbol ik) setMeta em
availableMap = U.ifoldl' flavToAvailable EM.empty uFlavMeta
(assocsFlav, _) <- ofoldlWithKey' coitem (rollFlavourMap uFlavMeta)
(return (EM.empty, availableMap))
let uFlav = U.fromListN (olength coitem)
$ map (toEnum . fromEnum) $ EM.elems assocsFlav
return $! FlavourMap uFlav
|
LambdaHack/LambdaHack
|
engine-src/Game/LambdaHack/Server/ItemRev.hs
|
bsd-3-clause
| 11,370 | 0 | 24 | 2,795 | 2,526 | 1,327 | 1,199 | -1 | -1 |
{-# LANGUAGE PatternGuards #-}
module Spec.StripExtensions
( stripWSIExtensions
) where
import Spec.Spec
import Spec.Command
import Spec.Type
import Write.TypeConverter(cTypeDependencyNames)
import Write.Utils
import Data.Maybe(catMaybes)
-- | 'stripWSIExtensions' removes everything that depends upon any windowing
-- system headers
stripWSIExtensions :: Spec -> Spec
stripWSIExtensions spec =
let allTypes = sTypes spec
allCommands = sCommands spec
platformTypes = catMaybes . fmap typeDeclToPlatformType $ sTypes spec
disallowedPlatformTypes = filter (isDisallowedTypeName . ptName) $
platformTypes
disallowedTypes = transitiveClosure (reverseDependencies allTypes)
(==)
(APlatformType <$>
disallowedPlatformTypes)
disallowedTypeNames = catMaybes . fmap typeDeclTypeName $ disallowedTypes
isInDisallowed = flip elem disallowedTypes
allowedTypeDecls = filter (not . isInDisallowed) allTypes
isInDisallowedNames = flip elem disallowedTypeNames
allowedCommands = filter
(not . any isInDisallowedNames . commandDependencies)
allCommands
allowedExtensions = []
in spec{ sTypes = allowedTypeDecls
, sCommands = allowedCommands
, sExtensions = allowedExtensions
}
commandDependencies :: Command -> [String]
commandDependencies c = concatMap cTypeDependencyNames types
where types = cReturnType c : fmap pType (cParameters c)
--
-- Not exactly optimal in terms of complxity, but wins out in code simplicity
--
-- | Given all the type decls filter them by whether they contain this name
reverseDependencies :: [TypeDecl] -> TypeDecl -> [TypeDecl]
reverseDependencies ts t = filter (flip dependsOn t) ts
-- | 'dependsOn x y' returns True if x depends on y
dependsOn :: TypeDecl -> TypeDecl -> Bool
dependsOn x y
| Just dependeeName <- typeDeclTypeName y
, dependees <- typeDeclDependees x
= elem dependeeName dependees
dependsOn _ _ = False
-- | Is the type one which we can't supply
isDisallowedTypeName :: String -> Bool
isDisallowedTypeName = not . flip elem allowedTypes
-- | A list of all types which are not part of any WSI extension
allowedTypes :: [String]
allowedTypes = [ "void"
, "char"
, "float"
, "uint8_t"
, "uint32_t"
, "uint64_t"
, "int32_t"
, "size_t"
]
typeDeclDependees :: TypeDecl -> [String]
typeDeclDependees (AnInclude _) = []
typeDeclDependees (ADefine _) = []
typeDeclDependees (ABaseType _) = []
typeDeclDependees (APlatformType _) = []
typeDeclDependees (ABitmaskType bmt) = cTypeDependencyNames $ bmtCType bmt
typeDeclDependees (AHandleType ht) = cTypeDependencyNames $ htCType ht
typeDeclDependees (AnEnumType _) = []
typeDeclDependees (AFuncPointerType fpt) = cTypeDependencyNames $ fptCType fpt
typeDeclDependees (AStructType st) = concatMap (cTypeDependencyNames . smCType)
$ stMembers st
typeDeclDependees (AUnionType ut) = concatMap (cTypeDependencyNames . smCType)
$ utMembers ut
|
oldmanmike/vulkan
|
generate/src/Spec/StripExtensions.hs
|
bsd-3-clause
| 3,419 | 0 | 13 | 986 | 703 | 370 | 333 | 66 | 1 |
module Main where
import Control.Applicative
import Control.Monad
import Control.Monad.Cont
-- import Control.Arrow( (***) )
import Control.Lens
-- import Data.Maybe( fromJust )
import System.Environment( getArgs )
import System.Exit( exitSuccess )
import System.Random( newStdGen, {- split, -} randoms )
import Text.Printf( printf )
-- import Model
-- import User
-- import Random( randomBools )
-- import Interface( UserID(..), MsgQueueLen(..) )
import PoissonModel
import Random
import Signals
import GiaStation
-- import TreeCSMACD
-- import User
main :: IO ()
main = mainPoisson
mainPoisson :: IO ()
mainPoisson = do
n : sub : args <- getArgs
let
noiseR = read n
subSnr = read sub
lambdas = [0.1, 0.11 ..]
-- lambdas = [0.1, 0.2, 0.25, 0.3] ++ [0.31, 0.32 ..]
-- snrs = [1000000, 8, 7] ++ [6.9, 6.8 .. 5.0]
-- subSnrs = [0.1, 0.2 .. 3.0]
subSnrs = [subSnr]
baseSnr = fromDb 10
forM_ subSnrs $ \subSnr -> do
putStrLn "#SSNR LAM QS SNR MEAN DELAYS TRANS GENER NCONF"
withBreak $ \break -> do
forM_ lambdas $ \lambda -> do
-- forM_ [1,2,3,4,5,6,7,8,9,10] $ \nConf -> do
gen <- liftIO $ newStdGen -- bool transmission gen
poissonGen <- liftIO $ newStdGen -- poisson gen
randGen <- liftIO $ randoms <$> newStdGen
let numGen = poissonStream lambda poissonGen
-- lambda = 0.65 :: Double
-- numGen = repeat 1 -- one message generated each time
noiseGen = repeat noiseR
nsteps = 1000000 -- 10KK
l = 424 -- #bits, dunno why
qs = probError baseSnr l
model = runPoissonModel nsteps $ initPoissonModel {-nConf-} numGen gen noiseGen baseSnr subSnr randGen
dlays = view delays model
ngene = view totalGenerated model
ntran = view transmitted model
nconf = view nConflicts model
-- nFalseConf = view (station . falseConflicts) model
-- lConf = view lenConf model
meanD = fromIntegral dlays / fromIntegral ntran :: Double
liftIO $ void $ printf "%4.2f %.2f %8.4f %8.2f %10.5f %10d %10d %10d %10d\n" subSnr lambda qs baseSnr (meanD - 0.5) dlays ntran ngene nconf
-- void $ printf "%d\t%5.3f\t%5.3f\t%d\t%d\n" nConf (fromIntegral dlays / fromIntegral ntran :: Double)
-- (fromIntegral lConf / fromIntegral nconf :: Double) lConf nconf
when (meanD > 30.0) $ break ()
putStrLn "\n\n"
where withBreak = (`runContT` return) . callCC
-- print $ model^.curConflictLen
-- print $ model^.curConflictNum
-- let Just (Undef lbl inp) = leftUndef . fromJust $ view (station . GiaStation.tree) model
-- print $ all not $ tail lbl
-- print inp
-- mapM_ (print . cleanUser) $ model ^.. activeUsers.traversed
|
ownclo/alohas
|
app/Main.hs
|
bsd-3-clause
| 3,084 | 0 | 23 | 1,014 | 497 | 272 | 225 | 46 | 1 |
module Day07 where
import Control.Monad.State.Lazy
import Data.Bits
import Data.Map.Strict ((!))
import Data.Word
import qualified Data.Map.Strict as Map
import Text.Parsec hiding (State)
import Text.Parsec.String
type Instruction = (Ident, Expr)
data Expr
= Atom Atom
| And Atom Atom
| Or Atom Atom
| LShift Ident Int
| RShift Ident Int
| Not Atom
deriving (Show)
data Atom
= Lit Signal
| Ref Ident
deriving (Show)
type Ident = String
type Signal = Word16
type Env = Map.Map Ident Expr
findSignalAtA :: String -> Signal
findSignalAtA s = evalState (evalIdent "a") $ parseEnv s
findSignalAtAWithModifiedB :: String -> Signal
findSignalAtAWithModifiedB s = evalState (evalIdent "a") env'
where
env = parseEnv s
env' = Map.insert "b" (Atom (Lit 956)) env
-- Memoized evaluation of the environment
evalIdent :: Ident -> State Env Signal
evalIdent i = lookupIdent i >>= evalExpr
evalExpr :: Expr -> State Env Signal
evalExpr = \case
Atom a -> evalAtom a
Or a b -> (.|.) <$> evalAtom a <*> evalAtom b
And a b -> (.&.) <$> evalAtom a <*> evalAtom b
LShift i b -> (`shift` b) <$> evalIdent i
RShift i b -> (`shift` (-b)) <$> evalIdent i
Not a -> complement <$> evalAtom a
evalAtom :: Atom -> State Env Signal
evalAtom = \case
Lit s -> return s
Ref i -> do
s <- evalIdent i
memoizeIdent i s
return s
lookupIdent :: Ident -> State Env Expr
lookupIdent i = gets (! i)
memoizeIdent :: Ident -> Signal -> State Env ()
memoizeIdent i s = modify (Map.insert i (Atom (Lit s)))
-- Parsing AST
parseEnv :: String -> Env
parseEnv s = Map.fromList is
where
Right is = parse instructions "" s
instructions = instruction `sepEndBy` endOfLine
instruction :: Parser Instruction
instruction = do
e <- expr
void $ string " -> "
i <- ident
return (i, e)
expr = try andExpr
<|> try orExpr
<|> try lShiftExpr
<|> try rShiftExpr
<|> try notExpr
<|> atomExpr
atomExpr = Atom <$> atom
atom = try litAtom <|> refAtom
litAtom = Lit <$> (read <$> many1 digit)
refAtom = Ref <$> ident
andExpr = And <$> atom <*> (string " AND " *> atom)
orExpr = Or <$> atom <*> (string " OR " *> atom)
lShiftExpr = LShift <$> ident <*> (string " LSHIFT " *> num)
rShiftExpr = RShift <$> ident <*> (string " RSHIFT " *> num)
notExpr = Not <$> (string "NOT " *> atom)
num = read <$> many1 digit
ident = many1 lower
|
patrickherrmann/advent
|
src/Day07.hs
|
bsd-3-clause
| 2,460 | 0 | 12 | 614 | 921 | 478 | 443 | -1 | -1 |
{-# LANGUAGE CPP, GeneralizedNewtypeDeriving, NoImplicitPrelude, PackageImports,
FlexibleContexts, StandaloneDeriving, UndecidableInstances #-}
{-# OPTIONS -Wall #-}
module Language.Paraiso.Tuning.Genetic
(
Genome, Species(..),
makeSpecies,
readGenome, overwriteGenome,
mutate, cross, triangulate,
generateIO
) where
import qualified "mtl" Control.Monad.State as State
import qualified Data.Graph.Inductive as FGL
import qualified Data.Vector as V
import Data.Vector ((!))
import qualified Language.Paraiso.Annotation as Anot
import qualified Language.Paraiso.Annotation.Allocation as Alloc
import qualified Language.Paraiso.Annotation.SyncThreads as Sync
import qualified Language.Paraiso.Generator.Native as Native
import qualified Language.Paraiso.OM as OM
import qualified Language.Paraiso.OM.Graph as OM
import qualified Language.Paraiso.Optimization as Opt
import Language.Paraiso.Prelude hiding (Boolean(..))
import qualified Language.Paraiso.Generator as Gen (generateIO)
import qualified Prelude as Prelude
import NumericPrelude hiding ((++))
import System.Random
import qualified Text.Read as Read
data Species v g =
Species {
setup :: Native.Setup v g,
machine :: OM.OM v g Anot.Annotation
}
deriving instance (Show (Native.Setup v g), Show (OM.OM v g Anot.Annotation))
=> Show (Species v g)
makeSpecies :: Native.Setup v g -> OM.OM v g Anot.Annotation -> Species v g
makeSpecies = Species
generateIO :: (Opt.Ready v g) => Species v g -> IO [(FilePath, Text)]
generateIO (Species s om) = Gen.generateIO s om
newtype Genome = Genome [Bool] deriving (Eq)
instance Show Genome where
show (Genome xs) = show $ toDNA xs
instance Read Genome where
readPrec = fmap (Genome . fromDNA) Read.readPrec
toDNA :: [Bool] -> String
toDNA xss@(x:xs)
| even $ length xss = 'C' : inner xss
| not x = 'A' : inner xs
| otherwise = 'T' : inner xs
where
inner [] = ""
inner (x:y:xs) = inner1 x y : inner xs
inner _ = error "parity conservation law is broken"
inner1 False False = 'A'
inner1 False True = 'C'
inner1 True False = 'G'
inner1 True True = 'T'
fromDNA :: String -> [Bool]
fromDNA xss@(x:xs)
| x == 'C' = inner xs
| x == 'A' = False : inner xs
| x == 'T' = True : inner xs
| otherwise = error "bad DNA"
where
inner = concat . map inner1
inner1 :: Char -> [Bool]
inner1 'A' = [False, False]
inner1 'C' = [False, True ]
inner1 'G' = [True , False]
inner1 'T' = [True , True ]
inner1 _ = error "bad DNA"
mutate :: Genome -> IO Genome
mutate original@(Genome xs) = do
let oldVector = V.fromList xs
n = length xs
logN :: Double
logN = log (fromIntegral n)
-- 20% of the mutations are single point mutation
logRand <- randomRIO (-0.2 * logN, logN)
mutaCoin <- randomRIO (0, 1::Double)
let randN :: Int
randN = Prelude.max 1 $ ceiling $ exp logRand
randRanges = V.replicate randN (0, n - 1)
randUpd range = do
idx <- randomRIO range
let newVal
-- 50% are negating mutations
| mutaCoin < 0.5 || randN <= 1 = not $ oldVector ! idx
-- 25% are all True mutation
| mutaCoin < 0.75 = True
-- other 25% are all False mutation
| otherwise = False
return (idx, newVal)
randUpds <- V.mapM randUpd randRanges
let pureMutant = Genome $ V.toList $ V.update oldVector randUpds
if randN > 8
then return pureMutant >>= cross original >>= cross original
else return pureMutant
cross :: Genome -> Genome -> IO Genome
cross (Genome xs0) (Genome ys0) = do
swapCoin <- randomRIO (0,1)
let
(xs,ys) = if swapCoin < (0.5::Double) then (xs0, ys0) else (ys0, xs0)
n = Prelude.max (length xs) (length ys)
vx = V.fromList $ take n $ xs ++ repeat False
vy = V.fromList $ take n $ ys ++ repeat False
atLeast :: Int -> IO Int
atLeast n = do
coin <- randomRIO (0,1)
if coin < (0.5 :: Double)
then return n
else atLeast (n+1)
randN <- atLeast 1
let randRanges = replicate randN (-1, n + 1)
crossPoints <- mapM randomRIO randRanges
let vz = V.generate n $ \i ->
if odd $ length $ filter (<i) crossPoints then vx!i else vy!i
return $ Genome $ V.toList $ vz
triangulate :: Genome -> Genome -> Genome -> IO Genome
triangulate (Genome base) (Genome left) (Genome right) = do
return $ Genome $ zipWith3 f base left right
where
f b l r = if b/=l then l else r
readGenome :: Species v g -> Genome
readGenome spec =
encode $ do
let (x,y) = Native.cudaGridSize $ setup spec
putInt 16 x
putInt 16 y
let om = machine spec
kerns = OM.kernels om
V.mapM_ (putGraph . OM.dataflow) kerns
overwriteGenome :: (Opt.Ready v g) => Genome -> Species v g -> Species v g
overwriteGenome dna oldSpec =
decode dna $ do
-- load cuda grid topology from the genome
x <- getInt 16
y <- getInt 16
let oldSetup = setup oldSpec
oldOM = machine oldSpec
oldKernels = OM.kernels oldOM
oldFlags = OM.globalAnnotation $ OM.setup $ oldOM
let overwriteKernel kern = do
let graph = OM.dataflow kern
newGraph <- overwriteGraph graph
return $ kern{OM.dataflow = newGraph}
-- load manifesto from the genome
newKernels <- V.mapM overwriteKernel oldKernels
let newGrid = (x,y)
newSetup = oldSetup {Native.cudaGridSize = newGrid}
-- reset the optimization flag and cause the optimization again
newFlags = Anot.set Opt.Unoptimized oldFlags
newOM = oldOM
{ OM.kernels = newKernels,
OM.setup = (OM.setup oldOM){ OM.globalAnnotation = newFlags }
}
return $ Species newSetup (Opt.optimize Opt.O3 newOM)
newtype Get a = Get { getGet :: State.State [Bool] a }
deriving (Prelude.Functor, Prelude.Applicative, Monad)
newtype Put a = Put { getPut :: State.State [Bool] a }
deriving (Prelude.Functor, Prelude.Applicative, Monad)
get :: Get Bool
get = Get $ do
dat <- State.get
case dat of
(x:xs) -> do
State.put xs
return x
_ -> return False -- When the data is depleted default to False
put :: Bool -> Put ()
put x = Put $ do
xs <- State.get
State.put $ x:xs
decode :: Genome -> Get a -> a
decode (Genome dna) m = State.evalState (getGet m) dna
encode :: Put a -> Genome
encode m = Genome $ reverse $ State.execState (getPut m) []
putInt :: Int -> Int -> Put ()
putInt bit n
| bit <= 0 = return ()
| n >= val = do
put True
putInt (bit-1) (n-val)
| otherwise = do
put False
putInt (bit-1) n
where
val :: Int
val = 2^(fromIntegral $ bit-1)
getInt :: Int -> Get Int
getInt bit
| bit <= 0 = return 0
| otherwise = do
x <- get
y <- getInt (bit-1)
return $ y + 2^(fromIntegral $ bit-1) * (if x then 1 else 0)
putGraph :: OM.Graph v g Anot.Annotation -> Put ()
putGraph graph = do
V.mapM_ put focus
V.mapM_ put2 focus2
where
focus =
V.map (isManifest . OM.getA . snd) $
V.filter (hasChoice . OM.getA . snd) idxNodes
-- idxNodes :: V.Vector (FGL.Node, OM.Node v g a)
idxNodes = V.fromList $ FGL.labNodes graph
hasChoice :: Anot.Annotation -> Bool
hasChoice anot =
case Anot.toMaybe anot of
Just (Alloc.AllocationChoice _) -> True
_ -> False
isManifest :: Anot.Annotation -> Bool
isManifest anot =
case Anot.toMaybe anot of
Just Alloc.Manifest -> True
_ -> False
focus2 =
V.map (getSyncBools . OM.getA . snd) $
V.filter (isValue . snd) idxNodes
isValue nd = case nd of
OM.NValue _ _ -> True
_ -> False
getSyncBools :: Anot.Annotation -> (Bool, Bool)
getSyncBools xs = let ys = Anot.toList xs in
(Sync.Pre `elem` ys, Sync.Post `elem` ys)
put2 (a,b) = put a >> put b
overwriteGraph :: OM.Graph v g Anot.Annotation -> Get (OM.Graph v g Anot.Annotation)
overwriteGraph graph = do
ovs <- V.mapM getAt focusIndices
ovs2 <- V.mapM getAt2 focus2Indices
return $ overwrite ovs2 $ overwrite ovs graph
where
overwrite ovs =
let updater :: V.Vector (Anot.Annotation -> Anot.Annotation)
updater =
flip V.update ovs $
V.map (const id) idxNodes in
OM.imap $ \idx anot -> updater ! idx $ anot
getAt idx = do
ret <- get
return (idx, Anot.set $ if ret then Alloc.Manifest else Alloc.Delayed)
getAt2 idx = do
a <- get
b <- get
return (idx, (if a then Anot.set Sync.Pre else id) . (if b then Anot.set Sync.Post else id))
focusIndices =
V.map fst $
V.filter (hasChoice . OM.getA . snd) idxNodes
focus2Indices =
V.map fst $
V.filter (isValue . snd) idxNodes
idxNodes = V.fromList $ FGL.labNodes graph
hasChoice :: Anot.Annotation -> Bool
hasChoice anot =
case Anot.toMaybe anot of
Just (Alloc.AllocationChoice _) -> True
_ -> False
isManifest :: Anot.Annotation -> Bool
isManifest anot =
case Anot.toMaybe anot of
Just Alloc.Manifest -> True
_ -> False
isValue nd = case nd of
OM.NValue _ _ -> True
_ -> False
{-
overwriteGraph :: OM.Graph v g Anot.Annotation -> Get (OM.Graph v g Anot.Annotation)
overwriteGraph graph = do
ovs <- V.mapM getAt focusIndices
return $ overwritten ovs
where
overwritten ovs =
let newManifest :: V.Vector (Maybe Alloc.Allocation)
newManifest =
flip V.update ovs $
V.map (const Nothing) anots in
flip OM.imap graph $ \idx anot ->
case newManifest ! idx of
Nothing -> anot
Just x -> Anot.set x anot
getAt idx = do
ret <- get
return (idx, Just $ if ret then Alloc.Manifest else Alloc.Delayed)
focusIndices =
V.map fst $
V.filter (hasChoice . snd) anots
anots :: V.Vector (FGL.Node, Anot.Annotation)
anots = V.fromList $ map (\(n, lab) -> (n, OM.getA lab)) $ FGL.labNodes graph
hasChoice :: Anot.Annotation -> Bool
hasChoice anot =
case Anot.toMaybe anot of
Just (Alloc.AllocationChoice _) -> True
_ -> False
isManifest :: Anot.Annotation -> Bool
isManifest anot =
case Anot.toMaybe anot of
Just Alloc.Manifest -> True
_ -> False
-}
|
nushio3/Paraiso
|
Language/Paraiso/Tuning/Genetic.hs
|
bsd-3-clause
| 10,733 | 0 | 20 | 3,174 | 3,464 | 1,775 | 1,689 | 248 | 7 |
-- |
-- Module : Network.Machine.Protocol.NNTP
-- Copyright : Lodvær 2015
-- License : BSD3
--
-- Maintainer : Lodvær <[email protected]>
-- Stability : provisional
-- Portability : unknown
--
-- NNTP machines.
module Network.Machine.Protocol.NNTP where
-- TODO
|
lodvaer/machines-network
|
src/Network/Machine/Protocol/NNTP.hs
|
bsd-3-clause
| 285 | 0 | 3 | 59 | 19 | 17 | 2 | 1 | 0 |
module GW.API.Recipe where
import ClassyPrelude
import GW.API.DbService
import Data.Aeson
data Ingredient =
Ingredient
{ ingredientItemId :: Int
, ingredientCount :: Int
} deriving (Show)
instance FromJSON Ingredient where
parseJSON (Object v) =
Ingredient <$>
(v .: "item_id") <*>
(v .: "count")
data Recipe =
RecipeService
| Recipe
{ recipeId :: Int
, recipeType :: String
, recipeOutputItemId :: Int
, recipeOutputItemCount :: Int
, recipeTimeToCraftMS :: Int
, recipeDiciplines :: [String]
, recipeMinRating :: Int
, recipeFlags :: [String]
, recipeIngredients :: [Ingredient]
} deriving (Show)
instance FromJSON Recipe where
parseJSON (Object v) =
Recipe <$>
(v .: "id") <*>
(v .: "type") <*>
(v .: "output_item_id") <*>
(v .: "output_item_count") <*>
(v .: "time_to_craft_ms") <*>
(v .: "disciplines") <*>
(v .: "min_rating") <*>
(v .: "flags") <*>
(v .: "ingredients")
instance ApiDbService Recipe where
serviceName _ = "recipes"
|
andgate/gwtrader
|
src/GW/API/Recipe.hs
|
bsd-3-clause
| 1,062 | 0 | 16 | 264 | 312 | 180 | 132 | 41 | 0 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- | This module provides the Jat monad providing access to the current
-- 'Program' and counters.
module Jat.JatM
(
JatM (..)
, Jat
, initJat
, evalJat
, getProgram
, freshVarIdx
, nextVarIdx
, freshKey
, module Control.Monad
)
where
import Jinja.Program (Program)
import Control.Applicative
import Control.Monad.State.Lazy
import Control.Monad (liftM,liftM2,foldM,mapM,sequence)
import Control.Monad.Identity
-- | The Jat monad.
newtype JatM m a = JatM (StateT JatST m a)
deriving (Functor, Applicative, Monad, MonadIO, MonadState JatST)
-- | The Jat monad with base Idenitity.
type Jat a = JatM Identity a
data JatST = JatST {
varcounter::Int
, keycounter::Int
, program::Program
}
-- | Sets the 'Program' and resets the counters.
initJat :: Program -> JatST
initJat p = JatST {
varcounter = 0
, keycounter = 0
, program = p
}
-- | Evaluates the monad.
evalJat :: Monad m => JatM m a -> JatST -> m a
evalJat (JatM a) = evalStateT a
--withProgram :: Monad m => (Program -> JatM m a) -> JatM m a
--withProgram f = gets program >>= f
-- | Returns the initialised 'Program'.
getProgram :: Monad m => JatM m Program
getProgram = gets program
-- | Returns a fresh key from the node counter.
freshKey :: Monad m => JatM m Int
freshKey = do
st <- get
let i = keycounter st
put $ st{ keycounter=i+1 }
return i
-- | Returns a fresh key from the variable counter.
freshVarIdx :: Monad m => JatM m Int
freshVarIdx = do
st <- get
let i = varcounter st
put $ st{ varcounter=i+1 }
return i
-- | Returns the next variable counter, but does not increment it.
nextVarIdx :: Monad m => JatM m Int
nextVarIdx = gets varcounter
|
ComputationWithBoundedResources/jat
|
src/Jat/JatM.hs
|
bsd-3-clause
| 1,746 | 0 | 10 | 387 | 453 | 251 | 202 | 47 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Data.Text.Wrap where
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.ICU.Types
-- | A type representing the possible errors produced by
-- | running one of the wrapping functions with invalid inputs
data WrapError = InvalidWidth | PlaceholderTooLarge
deriving (Show, Eq)
-- | A collection of config information
data WrapperConfig =
WrapperConfig { width :: Int -- ^ Maximum length of a wrapped line
, expandTabs :: Bool -- ^ If true, all tabs will be replaced with spaces
, tabsize :: Int -- ^ Number of spaces to use for a tab if 'expandTabs' is true
, replaceWhitespace :: Bool -- ^ Replace all whitespace with spaces before wrapping
, dropWhitespace :: Bool -- ^ Drop whitespace around lines
, initialIndent :: Text -- ^ Text prepended to the first line
, subsequentIndent :: Text -- ^ Text prepended to lines besides the first
, fixSentenceEndings :: Bool -- ^ Attempt to ensure sentences end in two spaces
, breakLongWords :: Bool -- ^ Put words longer than width on multiple lines
, breakOnHyphens :: Bool -- ^ Break on hyphens as well as spaces
, maxLines :: Maybe Int -- ^ If not 'Nothing', truncate to this many lines
, placeholder :: Text -- ^ Text placed after truncated text
, locale :: LocaleName -- ^ Locale of the text, defaults to current locale
}
-- | Default config settings
defaultConfig :: WrapperConfig
defaultConfig = WrapperConfig { width = 70
, expandTabs = True
, tabsize = 8
, replaceWhitespace = True
, dropWhitespace = True
, initialIndent = ""
, subsequentIndent = ""
, fixSentenceEndings = False
, breakLongWords = True
, breakOnHyphens = True
, maxLines = Nothing
, placeholder = " [...]"
, locale = Current
}
-- | Wraps the input text, returning a list of lines no more than 'width'
-- | characters long
wrap :: WrapperConfig -> Text -> Either WrapError [Text]
wrap = undefined
-- | Like wrap, but concatinates lines and adds newlines
fill :: WrapperConfig -> Text -> Either WrapError Text
fill = undefined
-- | Truncates input to no more than 'width' characters
shorten :: WrapperConfig -> Text -> Either WrapError Text
shorten = undefined
-- | Remove common leading whitespace from all lines
dedent :: Text -> Text
dedent text = undefined
-- | Remove common leading whitespace from all lines
-- | Finds line breaks based on the given locale
dedentLocale :: LocaleName -> Text -> Text
dedentLocale locale text = undefined
-- | Add 'prefix' to all lines matching the given predicate
indent :: Maybe (Text -> Bool) -> Text -> Text -> Text
indent pred prefix text = undefined
-- | Add 'prefix' to all lines matching the given predicate
-- | Finds line breaks based on the given locale
indentWithLocale :: LocaleName -> Maybe (Text -> Bool) -> Text -> Text -> Text
indentWithLocale local pred prefix text = undefined
|
Hrothen/textwrap
|
src/Data/Text/Wrap.hs
|
bsd-3-clause
| 3,514 | 0 | 9 | 1,194 | 457 | 281 | 176 | 49 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Web.Orion.Config where
import Data.Configurator
import Data.Configurator.Types
import Data.List
import Control.Monad
import System.Environment
import System.Directory
import System.IO
defaultCfgFileName :: String
defaultCfgFileName = "orion.cfg"
defaultCfgFileContents :: String
defaultCfgFileContents = intercalate "\n"
[ "port = 9988"
, "cookie_life = " ++ show (60 * 10 :: Int) -- in seconds.
, "users-db-filepath = users.db"
, "new-user-acl-level = 100"
]
getCfg :: IO Config
getCfg = do
-- Get the path to our config file.
args <- getArgs
let cfgFilePath = case args of
fp:_ -> fp
[] -> defaultCfgFileName
fileExists <- doesFileExist cfgFilePath
-- Create it if it doesn't exist.
unless fileExists $ do
cwd <- getCurrentDirectory
let d = cwd ++ "/" ++ defaultCfgFileName
putStrLn $ unwords ["Creating default config file at",d]
h <- openFile d WriteMode
hPutStr h defaultCfgFileContents
hClose h
-- Get our config file.
fmap fst $ autoReload autoConfig [Required cfgFilePath]
getCfgPort :: Config -> IO Int
getCfgPort cfg = lookupDefault 9988 cfg "port"
getCfgCookieLife :: Config -> IO Integer
getCfgCookieLife cfg = lookupDefault (10 * 60) cfg "cookie_life"
getCfgUserDBFilePath :: Config -> IO FilePath
getCfgUserDBFilePath cfg = lookupDefault "users.db" cfg "users-db-filepath"
getCfgNewUserAclLevel :: Config -> IO Integer
getCfgNewUserAclLevel cfg = lookupDefault 100 cfg "new-user-acl-level"
getCfgBaseUrl :: Config -> IO String
getCfgBaseUrl cfg = lookupDefault "http://127.0.0.1:9988" cfg "base-url"
|
schell/orion
|
src/Web/Orion/Config.hs
|
bsd-3-clause
| 1,727 | 0 | 14 | 385 | 402 | 203 | 199 | 42 | 2 |
module Data.Text.Benchmarks.Micro.DecodeUtf8
( benchmark
) where
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TL
import qualified Data.Text.Lazy.IO as TL
import qualified Codec.Binary.UTF8.Generic as U8
import Control.DeepSeq (rnf)
import System.IO (IOMode (ReadMode), openFile, hGetContents, hSetEncoding, utf8)
import Criterion.Main (Benchmark, bgroup, bench)
benchmark :: FilePath -> Benchmark
benchmark fp = bgroup "DecodeUtf8"
[ bench "Strict" $ do
bs <- B.readFile fp
rnf (T.decodeUtf8 bs) `seq` return ()
, bench "StrictLength" $ do
bs <- B.readFile fp
rnf (T.length $ T.decodeUtf8 bs) `seq` return ()
, bench "StrictInitLength" $ do
bs <- B.readFile fp
rnf (T.length $ T.init $ T.decodeUtf8 bs) `seq` return ()
, bench "StrictIO" $ do
h <- openFile fp ReadMode
hSetEncoding h utf8
t <- T.hGetContents h
rnf t `seq` return ()
, bench "StrictLengthIO" $ do
h <- openFile fp ReadMode
hSetEncoding h utf8
t <- T.hGetContents h
rnf (T.length t) `seq` return ()
, bench "Lazy" $ do
bs <- BL.readFile fp
rnf (TL.decodeUtf8 bs) `seq` return ()
, bench "LazyLength" $ do
bs <- BL.readFile fp
rnf (TL.length $ TL.decodeUtf8 bs) `seq` return ()
, bench "LazyInitLength" $ do
bs <- BL.readFile fp
rnf (TL.length $ TL.init $ TL.decodeUtf8 bs) `seq` return ()
, bench "LazyIO" $ do
h <- openFile fp ReadMode
hSetEncoding h utf8
t <- TL.hGetContents h
rnf t `seq` return ()
, bench "LazyLengthIO" $ do
h <- openFile fp ReadMode
hSetEncoding h utf8
t <- TL.hGetContents h
rnf (TL.length t) `seq` return ()
, bench "String" $ do
h <- openFile fp ReadMode
hSetEncoding h utf8
t <- hGetContents h
rnf t `seq` return ()
, bench "StringLength" $ do
h <- openFile fp ReadMode
hSetEncoding h utf8
t <- hGetContents h
rnf (length t) `seq` return ()
, bench "LazyStringUtf8" $ do
s <- U8.toString `fmap` BL.readFile fp
rnf s `seq` return ()
, bench "LazyStringUtf8Length" $ do
s <- U8.toString `fmap` BL.readFile fp
rnf (length s) `seq` return ()
, bench "StrictStringUtf8" $ do
s <- U8.toString `fmap` B.readFile fp
rnf s `seq` return ()
, bench "StrictStringUtf8Length" $ do
s <- U8.toString `fmap` B.readFile fp
rnf (length s) `seq` return ()
]
|
JensTimmerman/text-benchmarks
|
src/Data/Text/Benchmarks/Micro/DecodeUtf8.hs
|
bsd-3-clause
| 2,804 | 0 | 15 | 827 | 1,060 | 531 | 529 | 76 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE LambdaCase #-}
--{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
--{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-} -- not really needed
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module LensRef
( -- * Reference context
Monad'
, MonadTrace (..), TraceT, runTraceT
, Reversible (..), ReversibleT, postponed, reversible, neut, runRev
, SimpleRefs (..), modSimpleRef, memoRead
, RefContext
-- * References
, Ref
, unitRef
, joinRef
, lensMap
-- * RefReader
, RefReader
, readRef
-- * RefCreator
, RefCreator
, readerToCreator
, runRefCreator
, extendRef, newRef
, stabilize_, stabilize
, delayPrev, delay_ -- uses previous
, generator_, onChange', joinCreator, generator', onChangeMemo'
-- * RefWriter
, RefWriter
, creatorToWriter, readerToWriter
, writeRef, modRef
) where
import Data.Monoid
--import Data.Function
import Data.Maybe
import Data.String
import qualified Data.Set as Set
import qualified Data.Map as Map
import Control.Applicative
import Control.Arrow
import Control.Monad.State
import Control.Monad.Reader
import Control.Monad.Writer
--import Control.Monad.Identity
import Control.Monad.Except
import Lens.Family2
import Lens.Family2.State
import Lens.Family2.Stock
import Unsafe.Coerce
import Utils
infixr 8 `lensMap`
-------------------------------------------------------------------------------- timed values
data Value a
= Value Time{-known present-} a{-past value-} (Maybe a){-present value; Nothing: past value propagated-}
createValue :: Time -> a -> Value a
createValue t v = Value t (error $ "undefined past value created at " ++ show t) $ Just v
getValue :: MonadError String m => Time -> Value a -> m (a, Time, Value a)
getValue time v@(Value t v1 v2)
| time == prevTime t = return (v1, prevTime t, v)
| time == t = return (v2', maybe (prevTime t) (const t) v2, v)
| time > t = return (v2', prevTime time, Value time v2' Nothing) -- automatic value propagation
| otherwise = throwError $ "undefined past value read at " ++ show time ++ "; minimum was " ++ show (prevTime t)
where
v2' = fromMaybe v1 v2
setValue :: MonadError String m => Time -> a -> Value a -> m (Value a)
setValue time v3 (Value t v1 v2)
| time > t = return $ Value time (fromMaybe v1 v2) $ Just v3 -- automatic value propagation
| otherwise = throwError $ "past value set at " ++ show time ++ "; minimum was " ++ show (nextTime t)
-------------------------------------------------------------------------------- RefReader --> Signal
-- TODO: rename to Signal?
newtype RefReader m a = RefReader { runRefReader_ :: ReaderT Bool (WriterT (Time, Set.Set PathId) (Backtrack m)) a }
deriving (Functor, Applicative, Monad)
instance (IsString str, Monad' s) => IsString (RefReader s str) where
fromString = pure . fromString
instance (Monad' s, Num a) => Num (RefReader s a) where
(+) = liftA2 (+)
(*) = liftA2 (*)
negate = fmap negate
abs = fmap abs
signum = fmap signum
fromInteger = pure . fromInteger
instance (Monad' s, Fractional a) => Fractional (RefReader s a) where
recip = fmap recip
fromRational = pure . fromRational
instance (Monad' s, Floating a) => Floating (RefReader s a) where
pi = pure pi
exp = fmap exp
sqrt = fmap sqrt
log = fmap log
(**) = liftA2 (**)
logBase = liftA2 logBase
sin = fmap sin
tan = fmap tan
cos = fmap cos
asin = fmap asin
atan = fmap atan
acos = fmap acos
sinh = fmap sinh
tanh = fmap tanh
cosh = fmap cosh
asinh = fmap asinh
atanh = fmap atanh
acosh = fmap acosh
-----------
runRefReader :: Monad' m => RefReader m a -> Backtrack m (a, Time)
runRefReader (RefReader m) = (id *** fst) <$> runWriterT (flip runReaderT True{- TODO!!!-} m)
simpleRead :: Monad' m => RefReader m a -> Backtrack m a
simpleRead = fmap fst . runRefReader
readRef :: Monad' m => Ref m a -> RefReader m a
readRef (Ref (r, _)) = r
whenChanged :: (Monad' m, Monoid b) => Bool -> RefReader m a -> (a -> Backtrack m b) -> Backtrack m b
whenChanged check r m = do
present <- lift $ getTime
(a, t) <- runRefReader r
when' (not check || t == present) $ m a
-- TODO: make safer (make idempotent)
previous :: Monad' m => RefReader m a -> RefReader m a
previous m = RefReader $ ReaderT $ \r -> mapWriterT (mapExceptT inPast) $ flip runReaderT r $ runRefReader_ m
where
inPast :: Monad m => RefCreator m b -> RefCreator m b
inPast (RefCreator m) = RefCreator $ local (prevTime *** id) m
previousValue :: Monad' m => RefReader m a -> Backtrack m a
previousValue = simpleRead . previous
-------------------------------------------------------------------------------- Ref
-- TODO: change to Laarhoven style (inside a newtype wrapper)?
newtype Ref m a = Ref (RefReader m a, a -> Backtrack m ())
unitRef :: Monad' m => Ref m ()
unitRef = Ref (return (), \() -> return ())
lensMap :: Monad' m => Lens' a b -> Ref m a -> Ref m b
lensMap k (Ref (g, s)) = Ref ((^. k) <$> g, (previousValue g >>=) . (s .) . (k .~))
joinRef :: Monad' m => RefReader m (Ref m a) -> Ref m a
joinRef rr = Ref (join $ readRef <$> rr, (simpleRead rr >>=) . (. writeRefSafe) . flip ($))
writeRefSafe :: Ref m a -> a -> Backtrack m ()
writeRefSafe (Ref (_, w)) = w
writeRefSafe' r v = writeRefSafe r v >> return (Just ())
-------------------------------------------------------------------------------- RefCreator
newtype RefCreator m a = RefCreator { runRefCreator'' :: StateT (TriggerList m) (ReaderT (Time, Context_ m) m) a }
deriving (Functor, Applicative, Monad, MonadFix)
deriving instance MonadWriter w m => MonadWriter w (RefCreator m)
--deriving instance MonadReader r m => MonadReader r (RefCreator m)
--deriving instance MonadState s m => MonadState s (RefCreator m)
instance MonadTrans RefCreator where
lift = RefCreator . lift . lift
instance MonadTrace m => MonadTrace (RefCreator m) where
traceM = RefCreator . traceM
instance Reversible m => Reversible (RefCreator m) where
restore m = RefCreator $ restore $ runRefCreator'' m <&> (runRefCreator'' +++ id)
instance SimpleRefs m => SimpleRefs (RefCreator m) where
type SimpleRef (RefCreator m) = SimpleRef m
newSimpleRef = lift . newSimpleRef
readSimpleRef = lift . readSimpleRef
writeSimpleRef r = lift . writeSimpleRef r
type Backtrack m = Exc (RefCreator m)
getTime :: Monad m => RefCreator m Time
getTime = RefCreator $ asks fst
createVal :: Monad m => a -> RefCreator m (Value a)
createVal x = liftM (flip createValue x) getTime
-- may fail
readerToCreator :: Monad' m => RefReader m a -> RefCreator m a
readerToCreator = assert "rToC" . simpleRead
runRefCreator :: forall a m . SimpleRefs m => ((forall b . RefWriter m b -> m b) -> RefCreator m a) -> m a
runRefCreator f = do
st <- newSimpleRef (nextTime mempty, mempty)
let g :: forall b . RefWriter m b -> m b
g x = modSimpleRef st $ \s -> do
(y, s') <- flip runReaderT inMain $ flip runStateT s $ runRefWriter x
return (s', y)
modSimpleRef st $ \(t, trs) -> do
(x, trs') <- flip runReaderT (t, inMain) $ flip runStateT trs $ runRefCreator'' $ f g
return ((t, trs'), x)
-------------------------------------------------------------------------------- RefWriter
newtype RefWriter m a = RefWriter { runRefWriter :: StateT (Time, TriggerList m) (ReaderT (Context_ m) m) a }
deriving (Functor, Applicative, Monad, MonadFix)
deriving instance MonadWriter w m => MonadWriter w (RefWriter m)
instance MonadTrans RefWriter where
lift = RefWriter . lift . lift
instance MonadTrace m => MonadTrace (RefWriter m) where
traceM = RefWriter . traceM
instance SimpleRefs m => SimpleRefs (RefWriter m) where
type SimpleRef (RefWriter m) = SimpleRef m
newSimpleRef = lift . newSimpleRef
readSimpleRef = lift . readSimpleRef
writeSimpleRef r = lift . writeSimpleRef r
creatorToWriter :: Monad' m => RefCreator m a -> RefWriter m a
creatorToWriter = RefWriter . (\f -> StateT $ \(time, st) -> ReaderT $ \i -> fmap (id *** (,) time) $ flip runReaderT (time, i) $ runStateT f st) . runRefCreator''
writeRef :: forall m a . (Reversible m, MonadTrace m, MonadFix m) => Ref m a -> a -> RefWriter m ()
writeRef r x = do
traceM " write"
RefWriter $ _1 %= nextTime
creatorToWriter $ do
assert "impossible" $ writeRefSafe r x
assert "can't schedule network" $ runTriggers 1
where
runTriggers :: Int -> Backtrack m ()
runTriggers k = do
present <- lift $ getTime
trs <- activeTriggers
void $ foldr (orElse $ show k) (return Nothing) $ trs <&> \t -> do
lift $ RefCreator $ tpTime t .= present
traceM $ " run " ++ show t
runTrigger t >>= \case
Nothing -> return Nothing
Just () -> runTriggers (k + 1) >> return (Just ())
-- | derived
readerToWriter :: Monad' m => RefReader m a -> RefWriter m a
readerToWriter = creatorToWriter . readerToCreator
-- | derived
modRef :: (Reversible m, MonadTrace m, MonadFix m) => Ref m a -> (a -> a) -> RefWriter m ()
r `modRef` f = readerToWriter (readRef r) >>= writeRef r . f
-------------------------------------------------------------------------------- Context
data Context_ m = Context_
{ cName :: String -- TODO: make it more efficient?
, _contextTriggers :: Lens' (TriggerList m) (TriggerList m)
}
instance Show (Context_ m) where show = cName
inMain :: Context_ m
inMain = Context_ "I" id
inContext :: (MonadTrace m, IsTrigger t) => String -> Lens' (t m) (TriggerList m) -> TPath t m -> RefCreator m b -> RefCreator m b
inContext sh k tp (RefCreator m) = do
let i = Context_ (show tp ++ sh) (tpSnd tp . k)
traceM $ " context " ++ show i
b <- RefCreator $ local (id *** const i) m
traceM $ " end contex " ++ show i
return b
-------------------------------------------------------------------------------- triggers
class IsTrigger t where
type Val t :: *
val :: Lens' (t m) (Value (Val t))
collectChildren :: t m -> [Trigger_ m]
runTrigger_ :: (MonadTrace m, MonadFix m) => (forall b . RefReader m b -> (b -> Backtrack m (Maybe ())) -> e) -> (e -> e -> e) -> TPath t m -> t m -> e
runTrigger :: (Reversible m, MonadTrace m, MonadFix m) => Trigger_ m -> Backtrack m (Maybe ())
runTrigger (Trg _ i tr) = runTrigger_ (whenChanged True) (orElse $ show i) i tr
-- TODO: make it more efficient
type PathId = String
data TPath t m = TPath
{ tpFst :: PathId
, _tpSnd1 :: Lens' (TriggerList m) (Trigger_ m)
, _tpSnd2 :: Lens' (Trigger_ m) (t m)
}
instance Show (TPath t m) where show = tpFst
tpSnd :: TPath t m -> Lens' (TriggerList m) (t m)
tpSnd (TPath _ i j) = i . j
data Trigger_ m = forall t . IsTrigger t => Trg Time (TPath t m) (t m) -- Time: mikor futtattuk utoljára
type TriggerList m = Seq (Trigger_ m)
tpTime :: Trigger_ m -> Lens' (TriggerList m) Time
tpTime (Trg _ ~(TPath _ i _) _) = i . (\tr (Trg ti p x) -> tr ti <&> \ti -> Trg ti p x)
instance Show (Trigger_ m) where show (Trg _ p _) = show p
{-
filterTriggers :: Reversible m => [(Path, Trigger_ m)] -> Backtrack m [(Path, Trigger_ m)]
filterTriggers trs = return trs -- do
take' . map snd . filter (fst) <$> mapM f trs
where
take' [] = trs
take' x = take 1 x
f (i, tr) = do
present <- getTime
(x, (t, ids)) <- runWriterT $ flip runReaderT False $ runRefReader_ $ runTrigger' i tr
let active = t == present
return (active, (i, tr))
-}
{-
infó gyűjtés minden triggerről:
(mit olvasna először ha most következne, mit olvasott, {- mire volt érzékeny -}) -> mit ír
=> topologikus rendezés 'mit olvasott' szerint
=> kiválasztani a forrás nélkülieket
=> kiválasztani a forrás nélkülieket 'mit olvasna' alapján
=> kiválasztani a legkisebbet
=> végrehajtani
=> újra elölről
-}
activeTriggers :: Monad' m => Backtrack m [Trigger_ m]
activeTriggers = do
present <- lift $ getTime
let g :: [Time] -> Trigger_ m -> [([Time], Trigger_ m)]
g ts x@(Trg t _ tr) = (t', x): concatMap (g t') (collectChildren tr)
where
t' = t: ts
ok _ts@(t:_) = t < present -- && all (zipWith () ts $ tail ts)
ok _ = error "impossible"
trs <- lift $ RefCreator get
return $ map snd $ filter (ok . fst) $ concatMap (g []) $ toList' trs
addTrigger' :: forall m t x . (MonadTrace m, MonadFix m, IsTrigger t)
=> (x -> t m) -> (TPath t m -> RefCreator m x) -> RefCreator m (Ref m (Val t))
addTrigger' mk ft = do
Context_ rn r <- RefCreator $ asks snd
i <- RefCreator $ length' <$> use r
let p = TPath (rn ++ "_" ++ show i) (r . at' i) trg_
present <- getTime
traceM $ " create " ++ show p
void $ mfix $ \ta -> do
RefCreator $ r %= (`snoc'` Trg present p (mk ta))
ft p
return $ refOf p
where
trg_ :: Lens' (Trigger_ m) (t m)
trg_ tr (Trg time p t) = tr (unsafeCoerce t) <&> \t -> Trg time (unsafeCoerce p) t
addTrigger'' :: forall m t . (MonadTrace m, IsTrigger t)
=> t m -> RefCreator m (Ref m (Val t))
addTrigger'' mk = do
Context_ rn r <- RefCreator $ asks snd
i <- RefCreator $ length' <$> use r
let p = TPath (rn ++ "_" ++ show i) (r . at' i) trg_
present <- getTime
traceM $ " create " ++ show p
RefCreator $ r %= (`snoc'` Trg present p mk)
return $ refOf p
where
trg_ :: Lens' (Trigger_ m) (t m)
trg_ tr (Trg time p t) = tr (unsafeCoerce t) <&> \t -> Trg time (unsafeCoerce p) t
refOf :: (MonadTrace m, IsTrigger t) => TPath t m -> Ref m (Val t)
refOf i = Ref (get', set') where
get' = RefReader $ ReaderT $ \_setTime -> (mapWriterT $ fmap (fmap $ flip (,) $ Set.singleton $ tpFst i) . mapExceptT RefCreator) $
(mapWriterT . mapExceptT . zoom) (tpSnd i . val) $ do
time <- asks fst
v <- get
(val, creation, v') <- getValue time v
put v' -- TODO: use setTime
tell creation
return val
set' a = do
traceM $ " wr " ++ show i
mapExceptT RefCreator $ (mapExceptT . zoom) (tpSnd i . val) $ do
time <- asks fst
v <- get
put =<< setValue time a v
-------------------------------------------------------------------------------- concrete triggers
--------------------------------------------------------------------------------
data ExtendRef b a m = ExtendRef
{ _erValue :: Value a
, _erRef :: Ref m b
, _erLens :: Lens' a b
}
instance IsTrigger (ExtendRef b a) where
type Val (ExtendRef b a) = a
val tr ~ExtendRef{..} = tr _erValue <&> \_erValue -> ExtendRef{..}
collectChildren _ = mempty
runTrigger_ whenChanged orElse i ~ExtendRef{..} =
do whenChanged (readRef $ refOf i) $ writeRefSafe' _erRef . (^. _erLens)
`orElse`
do whenChanged (readRef _erRef) $ \b -> writeRefSafe' (refOf i) . (_erLens .~ b) =<< previousValue (readRef $ refOf i)
extendRef :: (MonadTrace m, MonadFix m) => Ref m b -> Lens' a b -> a -> RefCreator m (Ref m a)
extendRef _erRef _erLens a = do
_erValue <- createVal . flip (set _erLens) a =<< readerToCreator (readRef _erRef)
addTrigger'' ExtendRef{..}
---------- derived
newRef :: (MonadTrace m, MonadFix m) => a -> RefCreator m (Ref m a)
newRef = extendRef unitRef (\tr a -> tr () <&> \() -> a)
--------------------------------------------------------------------------------
data Stabilize a m = Stabilize
{ _sValue :: Value a
, _sRefR :: RefReader m a
, _ocEq :: a -> a -> Bool
}
instance IsTrigger (Stabilize a) where
type Val (Stabilize a) = a
val tr ~Stabilize{..} = tr _sValue <&> \_sValue -> Stabilize{..}
collectChildren _ = mempty
runTrigger_ whenChanged _ i ~Stabilize{..} = whenChanged _sRefR $ \a -> do
old <- previousValue $ readRef $ refOf i
when' (not $ _ocEq a old) $ writeRefSafe' (refOf i) a
stabilize_ :: (MonadTrace m, MonadFix m) => (a -> a -> Bool) -> a -> RefReader m a -> RefCreator m (Ref m a)
stabilize_ _ocEq a _sRefR = do
_sValue <- createVal a
addTrigger'' Stabilize{..}
-- | derived, but uses internal function 'previous'
delayPrev :: (MonadTrace m, MonadFix m) => a -> RefReader m (b -> a) -> RefReader m b -> RefCreator m (RefReader m a)
delayPrev a f r = readRef <$> stabilize_ (\_ _ -> False) a (f <*> previous r)
---------- derived
stabilize :: (MonadTrace m, MonadFix m, Eq a) => RefReader m a -> RefCreator m (RefReader m a)
stabilize r = readerToCreator r >>= \a -> readRef <$> stabilize_ (==) a r
delay_ :: (MonadTrace m, MonadFix m) => a -> RefReader m a -> RefCreator m (RefReader m a)
delay_ v1 r = delayPrev v1 (const id <$> r) r
--------------------------------------------------------------------------------
-- TODO: bidirectional stabilize for Ref
--------------------------------------------------------------------------------
-- TODO: relay
--------------------------------------------------------------------------------
data OnChange c a m = OnChange
{ _ocValue :: Value a
, _ocRefR :: RefReader m (RefCreator m a)
, _ocChildren :: c (TriggerList m)
}
ocChildren :: Lens' (OnChange c a m) (c (TriggerList m))
ocChildren tr ~OnChange{..} = tr _ocChildren <&> \_ocChildren -> OnChange{..}
ocBody_ :: (MonadTrace m, IsSeq c) => Int -> TPath (OnChange c a) m -> RefCreator m b -> RefCreator m b
ocBody_ idx = inContext ("C" ++ show idx) (ocChildren . at' idx)
instance IsSeq c => IsTrigger (OnChange c a) where
type Val (OnChange c a) = a
val tr ~OnChange{..} = tr _ocValue <&> \_ocValue -> OnChange{..}
collectChildren ~OnChange{..} = concatMap toList' $ toList' _ocChildren
runTrigger_ whenChanged _ i ~OnChange{..}
= whenChanged _ocRefR $ writeRefSafe' (refOf i) <=< lift . ocBody
where
ocBody a = do
idx <- RefCreator $ tpSnd i . ocChildren %%= \ch -> if (==0) $ length' $ last' ch
then (length' ch - 1, ch)
else (length' ch, ch `snoc'` mempty)
ocBody_ idx i a
generator_ :: forall m b. (MonadTrace m, MonadFix m) => Bool -> RefCreator m b -> RefReader m (RefCreator m b) -> RefCreator m (Ref m b)
generator_ True first _ocRefR = addTrigger' (\_ocValue -> OnChange{_ocChildren = singleton mempty :: FakeSeq (TriggerList m), ..}) $
\i -> createVal =<< ocBody_ 0 i first
generator_ False first _ocRefR = addTrigger' (\_ocValue -> OnChange{_ocChildren = singleton mempty :: Seq (TriggerList m), ..}) $
\i -> createVal =<< ocBody_ 0 i first
---------- derived
onChange' :: (MonadTrace m, MonadFix m) => RefReader m (RefCreator m b) -> RefCreator m (Ref m b)
onChange' r = readerToCreator r >>= \i -> generator_ True i r
joinCreator :: (MonadTrace m, MonadFix m) => RefReader m (RefCreator m b) -> RefCreator m (RefReader m b)
joinCreator = fmap readRef . onChange'
generator' :: (MonadTrace m, MonadFix m) => RefReader m (RefCreator m b) -> RefCreator m (RefReader m b)
generator' r = readerToCreator r >>= \i -> readRef <$> generator_ False i r
onChangeMemo' :: (SimpleRefs m, MonadTrace m, MonadFix m, Ord a) => RefReader m a -> (a -> RefCreator m b) -> RefCreator m (RefReader m b)
onChangeMemo' r f = do
memo <- newSimpleRef mempty
generator' $ r <&> \a -> modSimpleRef memo $ \ma -> case Map.lookup a ma of
Just b -> return (ma, b)
Nothing -> do
b <- f a
return (Map.insert a b ma, b)
{- depricated
onChange :: (MonadTrace m, MonadFix m) => RefReader m a -> (a -> RefCreator m b) -> RefCreator m (RefReader m b)
onChange r f = joinCreator $ f <$> r
onChange_ :: (MonadTrace m, MonadFix m) => RefReader m a -> (a -> RefCreator m b) -> RefCreator m (Ref m b)
onChange_ r f = onChange' $ f <$> r
onChangeEq :: (Eq a, MonadTrace m, MonadFix m) => RefReader m a -> (a -> RefCreator m b) -> RefCreator m (RefReader m b)
onChangeEq r f = stabilize r >>= joinCreator . fmap f
onChangeEq_ :: (Eq a, MonadTrace m, MonadFix m) => RefReader m a -> (a -> RefCreator m b) -> RefCreator m (Ref m b)
onChangeEq_ r f = stabilize r >>= onChange' . fmap f
onChangeEqOld :: (Eq a, MonadTrace m, MonadFix m) => RefReader m a -> (a -> a -> RefCreator m b) -> RefCreator m (RefReader m b)
onChangeEqOld r f = do
v <- readerToCreator r
stabilize r >>= \r' -> delay_ v r' >>= \r'' -> joinCreator $ f <$> r'' <*> r'
onChangeMemo_ :: (SimpleRefs m, MonadTrace m, MonadFix m, Ord a) => RefReader m a -> (a -> RefCreator m b) -> RefCreator m (RefReader m b)
onChangeMemo_ r a = stabilize r >>= flip onChangeMemo' a
onChangeMemo :: (SimpleRefs m, MonadTrace m, MonadFix m, Ord a) => RefReader m a -> (a -> RefCreator m (RefCreator m b)) -> RefCreator m (RefReader m b)
onChangeMemo r f = stabilize r >>= flip onChangeMemo' f >>= joinCreator
-}
--------------------------------------------------------------------------------
{-
runTrigger' :: Monad' m => Path -> Trigger m -> RefReader m (Backtrack m (Maybe ()))
runTrigger' = runTrigger_ (<&>) sor
sor :: Monad' m => RefReader m a -> RefReader m a -> RefReader m a
sor r1 r2 = do
present <- RefReader $ lift $ asks fst
(_, (t, _)) <- RefReader $ lift $ lift $ runWriterT $ flip runReaderT False $ runRefReader_ r1
if t == present then r1 else r2
-}
|
divipp/lensref
|
src/LensRef.hs
|
bsd-3-clause
| 22,143 | 0 | 21 | 5,352 | 7,295 | 3,715 | 3,580 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
-- | This module defines a uniform interface for interacting with
-- compressed archives (zip files and tarballs). The interface is
-- small:
--
-- 1) Read an archive using either the 'readArchive' helper (which
-- reads a file off of disk) or 'decodeArchive' (which works on
-- ByteStrings).
--
-- 2) List files in the archive using 'archiveEntries'
--
-- 3) Extract files using 'entryContent', which returns a ByteString
--
-- Example:
--
-- > archive <- readArchive "/tmp/gsl-1.15.tar.gz"
-- > let Just configScript = entryContent archive "configure"
module Codec.Archive (
-- * Types
ArchiveIndex,
ArchiveFormat(..),
ArchiveException(..),
-- * Constructors
readArchive,
-- * Accessors
archiveEntries,
entryContent,
entryContentSuffix
) where
import Control.Exception
import Data.ByteString.Lazy ( ByteString )
import qualified Data.ByteString.Lazy as LBS
import Data.Char ( toLower )
import Data.List ( find, isSuffixOf )
import Data.Map ( Map )
import qualified Data.Map as M
import Data.Typeable ( Typeable )
import System.FilePath
import qualified System.Process as Proc
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Zip as Zip
data ArchiveFormat = Tar
| Zip
| TarGz
| TarBz2
| TarXz
-- | The abstract handle representing a compressed archive
data ArchiveIndex = TarArchive !(Map FilePath Tar.Entry)
| ZipArchive !Zip.Archive
-- | The errors that the library can report
data ArchiveException = UnrecognizedFormatError String -- ^ The format of the archive could not be determined (or is unsupported)
| TarDecodeError Tar.FormatError -- ^ An error occured while decoding a tar archive
| TarEntryIsNotFile String -- ^ The named tar archive entry is not a normal file
deriving (Show, Typeable)
instance Exception ArchiveException
-- | Read an archive and guess its format based on its filename.
-- Non-standard extensions (or missing extensions) will fail.
readArchive :: FilePath -> IO ArchiveIndex
readArchive p = do
let fmt = classifyArchive p
c <- decompressIfNeeded fmt p
return $! decodeArchive fmt c
decompressIfNeeded :: ArchiveFormat -> FilePath -> IO ByteString
decompressIfNeeded fmt fp =
case fmt of
TarGz -> doDecompress "gunzip" fp
TarBz2 -> doDecompress "bunzip2" fp
TarXz -> doDecompress "unxz" fp
_ -> LBS.readFile fp
where
doDecompress fltr p = do
let p0 = Proc.proc fltr ["-c", p]
p1 = p0 { Proc.std_out = Proc.CreatePipe }
(_, Just hOut, _, _) <- Proc.createProcess p1
LBS.hGetContents hOut
classifyArchive :: FilePath -> ArchiveFormat
classifyArchive p = case splitExtension (map toLower p) of
(_, ".zip") -> Zip
(_, ".tbz2") -> TarBz2
(_, ".tgz") -> TarGz
(rest, ".bz2") ->
case takeExtension rest of
".tar" -> TarBz2
_ -> throw $ UnrecognizedFormatError p
(rest, ".gz") ->
case takeExtension rest of
".tar" -> TarGz
_ -> throw $ UnrecognizedFormatError p
(rest, ".xz") ->
case takeExtension rest of
".tar" -> TarXz
_ -> throw $ UnrecognizedFormatError p
_ -> throw $ UnrecognizedFormatError p
-- | Read an archive from a ByteString, with a given archive format.
decodeArchive :: ArchiveFormat -> ByteString -> ArchiveIndex
decodeArchive Zip content = ZipArchive zarch
where
zarch = Zip.toArchive content
-- otherwise it is a decompressed tar
decodeArchive _ content = TarArchive entryMap
where
es = Tar.read content
entryMap = Tar.foldEntries fillMap M.empty (throw . TarDecodeError) es
fillMap e m = M.insert (Tar.entryPath e) e m
-- | Retrieve the list of all files in the archive
archiveEntries :: ArchiveIndex -> [FilePath]
archiveEntries (TarArchive ix) = M.keys ix
archiveEntries (ZipArchive zarch) = Zip.filesInArchive zarch
-- | Retrieve the contents of the named file from the archive. If the
-- requested file is not in the archive, this function returns Nothing
entryContent :: ArchiveIndex -> FilePath -> Maybe ByteString
entryContent (TarArchive ix) p = do
e <- M.lookup p ix
case Tar.entryContent e of
Tar.NormalFile bs _ -> return bs
_ -> throw $ TarEntryIsNotFile p
entryContent (ZipArchive zarch) p = do
e <- Zip.findEntryByPath p zarch
return (Zip.fromEntry e)
-- | Get the archive entry (if any) such that the file path of the
-- archive entry is a *suffix* of @p@. This is useful to ignore some
-- absolute prefix attached to @p@ that would match the path in the
-- archive if it was extracted at the correct location.
entryContentSuffix :: ArchiveIndex -> FilePath -> Maybe ByteString
entryContentSuffix (TarArchive ix) p = do
(_, e) <- find (matchPrefix p) (M.assocs ix)
case Tar.entryContent e of
Tar.NormalFile bs _ -> return bs
_ -> throw $ TarEntryIsNotFile p
where
matchPrefix fp (arcPath, _) = arcPath `isSuffixOf` fp
entryContentSuffix (ZipArchive zarch) p = do
e <- find (matchPrefix p) (Zip.zEntries zarch)
return (Zip.fromEntry e)
where
matchPrefix fp e = Zip.eRelativePath e `isSuffixOf` fp
|
travitch/archive-inspection
|
src/Codec/Archive.hs
|
bsd-3-clause
| 5,206 | 0 | 14 | 1,139 | 1,181 | 628 | 553 | 102 | 10 |
module Test.Spec.TxMetaScenarios (
txMetaScenarioA
, txMetaScenarioB
, txMetaScenarioC
, txMetaScenarioD
, txMetaScenarioE
, txMetaScenarioF
, txMetaScenarioG
, txMetaScenarioH
, txMetaScenarioI
, txMetaScenarioJ
, bracketActiveWalletTxMeta
, TxScenarioRet
) where
import Universum
import qualified Data.Set as Set
import Data.Time.Units (fromMicroseconds)
import qualified Cardano.Wallet.API.V1.Types as V1
import qualified Cardano.Wallet.Kernel as Kernel
import Cardano.Wallet.Kernel.DB.TxMeta.Types
import qualified Cardano.Wallet.Kernel.Diffusion as Kernel
import Cardano.Wallet.Kernel.Internal
import qualified Cardano.Wallet.Kernel.Keystore as Keystore
import Cardano.Wallet.Kernel.NodeStateAdaptor
(MockNodeStateParams (..), SecurityParameter (..),
mockNodeState)
import Cardano.Wallet.WalletLayer.Kernel.Transactions
import Pos.Chain.Genesis (Config (..))
import Pos.Core
import Pos.Core.Chrono
import Pos.Core.Slotting (EpochIndex (..), LocalSlotIndex (..),
SlotId (..))
import Pos.Crypto (ProtocolMagic)
import Pos.Infra.InjectFail (mkFInjects)
import Pos.Util (withCompileInfo)
import Test.Hspec
import Test.Infrastructure.Genesis
import Test.Pos.Configuration (withDefUpdateConfiguration,
withProvidedMagicConfig)
import UTxO.Context
import UTxO.DSL
import Wallet.Inductive
{-# ANN module ("HLint: ignore Reduce duplication" :: Text) #-}
-- | A Payment from P0 to P1 with change returned to P0
paymentWithChangeFromP0ToP1 :: forall h. Hash h Addr
=> GenesisValues h Addr -> Transaction h Addr
paymentWithChangeFromP0ToP1 GenesisValues{..} = Transaction {
trFresh = 0
, trIns = Set.fromList [ fst initUtxoP0 ]
, trOuts = [ Output p1 1000
, Output p0 (initBalP0 - 1 * (1000 + fee)) -- change
]
, trFee = fee
, trHash = 1
, trExtra = []
}
where
fee = overestimate txFee 1 2
-- | A payment from P1 to P0 with change returned to P1.
paymentWithChangeFromP1ToP0 :: forall h. Hash h Addr
=> GenesisValues h Addr -> Transaction h Addr
paymentWithChangeFromP1ToP0 GenesisValues{..} = Transaction {
trFresh = 0
, trIns = Set.fromList [ fst initUtxoP1 ]
, trOuts = [ Output p0 1000
, Output p1 (initBalP1 - 1 * (1000 + fee)) -- change
]
, trFee = fee
, trHash = 1
, trExtra = []
}
where
fee = overestimate txFee 1 2
-- | A payment from P0 to himself.
paymentWithChangeFromP0ToP0 :: forall h. Hash h Addr
=> GenesisValues h Addr -> Transaction h Addr
paymentWithChangeFromP0ToP0 GenesisValues{..} = Transaction {
trFresh = 0
, trIns = Set.fromList [ fst initUtxoP0 ]
, trOuts = [ Output p0 1000
, Output p0 (initBalP1 - 1 * (1000 + fee))
]
, trFee = fee
, trHash = 1
, trExtra = []
}
where
fee = overestimate txFee 1 2
-- | A payment from P0 to himself.
bigPaymentWithChange :: forall h. Hash h Addr
=> GenesisValues h Addr -> Transaction h Addr
bigPaymentWithChange GenesisValues{..} = Transaction {
trFresh = 0
, trIns = Set.fromList [ fst initUtxoP0 ]
, trOuts = [ Output p1 1000
, Output r0 2000
, Output r1 3000
, Output p0 (initBalP0 - 1 * (6000 + fee))
]
, trFee = fee
, trHash = 1
, trExtra = []
}
where
fee = overestimate txFee 1 4
-- | Two payments from P0 to P1 with change returned to P0.
-- (t0 uses change address p0 and t1 uses p0b)
-- The second payment spends the change of the first payment.
repeatPaymentWithChangeFromP0ToP1 :: forall h. Hash h Addr
=> GenesisValues h Addr
-> Addr
-> (Transaction h Addr, Transaction h Addr)
repeatPaymentWithChangeFromP0ToP1 genVals@GenesisValues{..} changeAddr =
(t0,t1)
where
fee = overestimate txFee 1 2
t0 = paymentWithChangeFromP0ToP1 genVals
t1 = Transaction {
trFresh = 0
, trIns = Set.fromList [ Input (hash t0) 1 ]
, trOuts = [ Output p1 1000
, Output changeAddr (initBalP0 - 2 * (1000 + fee)) -- change
]
, trFee = fee
, trHash = 2
, trExtra = []
}
type TxScenarioRet h = (MockNodeStateParams, Inductive h Addr, PassiveWallet -> IO ())
-- | Scenario A
-- Empty case
txMetaScenarioA :: GenesisValues h Addr
-> TxScenarioRet h
txMetaScenarioA GenesisValues{..} = (nodeStParams1, ind, lengthCheck 0)
where
ind = Inductive {
inductiveBoot = boot
, inductiveOurs = Set.singleton p0 -- define the owner of the wallet: Poor actor 0
, inductiveEvents = OldestFirst [
]
}
-- | Scenario B
-- A single pending payment.
txMetaScenarioB :: forall h. Hash h Addr
=> GenesisValues h Addr
-> TxScenarioRet h
txMetaScenarioB genVals@GenesisValues{..} = (nodeStParams1, ind, check)
where
t0 = paymentWithChangeFromP0ToP1 genVals
ind = Inductive {
inductiveBoot = boot
, inductiveOurs = Set.singleton p0
, inductiveEvents = OldestFirst [
NewPending t0
]
}
check = checkWithTxs $ \ txs -> do
let fees = overestimate txFee 1 2
let props = map (\t -> (V1.txConfirmations t, V1.txAmount t, V1.txType t,V1.txDirection t, V1.txStatus t)) txs
props `shouldMatchList` [(0,V1.V1 . Coin $ fees + 1000,V1.ForeignTransaction,V1.OutgoingTransaction,V1.Applying)]
-- | Scenario C
-- A single pending payment and then confirmation.
txMetaScenarioC :: forall h. Hash h Addr
=> GenesisValues h Addr
-> TxScenarioRet h
txMetaScenarioC genVals@GenesisValues{..} = (nodeStParams1, ind, check)
where
t0 = paymentWithChangeFromP0ToP1 genVals
ind = Inductive {
inductiveBoot = boot
, inductiveOurs = Set.singleton p0
, inductiveEvents = OldestFirst [
NewPending t0
, ApplyBlock $ OldestFirst [t0]
]
}
check = checkWithTxs $ \ txs -> do
let fees = overestimate txFee 1 2
let props = map (\t -> (V1.txConfirmations t, V1.txAmount t, V1.txType t,V1.txDirection t, V1.txStatus t)) txs
props `shouldMatchList` [(3,V1.V1 . Coin $ fees + 1000,V1.ForeignTransaction,V1.OutgoingTransaction,V1.InNewestBlocks)]
-- | Scenario D
-- Two confirmed payments from P0 to P1, using `change` addresses P0 and P0b respectively
txMetaScenarioD :: forall h. Hash h Addr
=> GenesisValues h Addr
-> TxScenarioRet h
txMetaScenarioD genVals@GenesisValues{..} = (nodeStParams1, ind, check)
where
(t0,t1) = repeatPaymentWithChangeFromP0ToP1 genVals p0b
ind = Inductive {
inductiveBoot = boot
, inductiveOurs = Set.fromList [p0,p0b]
, inductiveEvents = OldestFirst [
NewPending t0
, ApplyBlock $ OldestFirst [t0]
, ApplyBlock $ OldestFirst [t1]
]
}
check = checkWithTxs $ \ txs -> do
-- txs `shouldBe` []
let fees = overestimate txFee 1 2
let props = map (\t -> (V1.txConfirmations t, V1.txAmount t, V1.txType t,V1.txDirection t, V1.txStatus t)) txs
props `shouldMatchList` [(3,V1.V1 . Coin $ fees + 1000,V1.ForeignTransaction,V1.OutgoingTransaction,V1.InNewestBlocks)
,(2,V1.V1 . Coin $ fees + 1000,V1.ForeignTransaction,V1.OutgoingTransaction,V1.InNewestBlocks)
]
-- | Scenario E
-- ScenarioD + Rollback
--
-- This scenario exercises Rollback behaviour.
txMetaScenarioE :: forall h. Hash h Addr
=> GenesisValues h Addr
-> TxScenarioRet h
txMetaScenarioE genVals@GenesisValues{..} = (nodeStParams1, ind, check)
where
(t0,t1) = repeatPaymentWithChangeFromP0ToP1 genVals p0b
ind = Inductive {
inductiveBoot = boot
, inductiveOurs = Set.fromList [p0,p0b] -- define the owner of the wallet: Poor actor 0
, inductiveEvents = OldestFirst [
NewPending t0
, ApplyBlock $ OldestFirst [t0] -- confirms t0 and updates block metadata
, ApplyBlock $ OldestFirst [t1] -- confirms t1 and updates block metadata
, Rollback -- rolls back t1, so it should be V1.WontApply
, ApplyBlock $ OldestFirst []
, ApplyBlock $ OldestFirst []
]
}
check = checkWithTxs $ \ txs -> do
let fees = overestimate txFee 1 2
let props = map (\t -> (V1.txConfirmations t, V1.txAmount t, V1.txType t,V1.txDirection t, V1.txStatus t)) txs
props `shouldMatchList` [(3,V1.V1 . Coin $ fees + 1000,V1.ForeignTransaction,V1.OutgoingTransaction,V1.InNewestBlocks)
,(0,V1.V1 . Coin $ fees + 1000,V1.ForeignTransaction,V1.OutgoingTransaction,V1.WontApply)
]
-- | Scenario F
-- A payment from P1 to P0's single address.
-- This should create IncomingTransactions.
txMetaScenarioF :: forall h. Hash h Addr
=> GenesisValues h Addr
-> TxScenarioRet h
txMetaScenarioF genVals@GenesisValues{..} = (nodeStParams1, ind, check)
where
t0 = paymentWithChangeFromP1ToP0 genVals
ind = Inductive {
inductiveBoot = boot
, inductiveOurs = Set.singleton p0 -- define the owner of the wallet: Poor actor 0
, inductiveEvents = OldestFirst [
ApplyBlock $ OldestFirst [t0] -- confirms t0 and updates block metadata
]
}
check = checkWithTxs $ \ txs -> do
let props = map (\t -> (V1.txConfirmations t, V1.txAmount t, V1.txType t,V1.txDirection t, V1.txStatus t)) txs
props `shouldMatchList` [(3,V1.V1 . Coin $ 1000,V1.ForeignTransaction,V1.IncomingTransaction,V1.InNewestBlocks)]
-- | Scenario G
-- A single pending payment and then confirmation.
-- This tests differnet node parameters.
txMetaScenarioG :: forall h. Hash h Addr
=> GenesisValues h Addr
-> TxScenarioRet h
txMetaScenarioG genVals@GenesisValues{..} = (nodeStParams2, ind, check)
where
t0 = paymentWithChangeFromP0ToP1 genVals
ind = Inductive {
inductiveBoot = boot
, inductiveOurs = Set.singleton p0 -- define the owner of the wallet: Poor actor 0
, inductiveEvents = OldestFirst [
NewPending t0
, ApplyBlock $ OldestFirst [t0] -- confirms t0 and updates block metadata
]
}
check = checkWithTxs $ \ txs -> do
let fees = overestimate txFee 1 2
let props = map (\t -> (V1.txAmount t, V1.txType t,V1.txDirection t, V1.txStatus t)) txs
props `shouldMatchList` [(V1.V1 . Coin $ fees + 1000,V1.ForeignTransaction,V1.OutgoingTransaction,V1.Persisted)]
-- | Scenario H
-- A single pending payment to itself and then confirmation.
-- This should be a Local Tx.
txMetaScenarioH :: forall h. Hash h Addr
=> GenesisValues h Addr
-> TxScenarioRet h
txMetaScenarioH genVals@GenesisValues{..} = (nodeStParams1, ind, check)
where
t0 = paymentWithChangeFromP0ToP0 genVals
ind = Inductive {
inductiveBoot = boot
, inductiveOurs = Set.singleton p0 -- define the owner of the wallet: Poor actor 0
, inductiveEvents = OldestFirst [
NewPending t0
, ApplyBlock $ OldestFirst [t0] -- confirms t0 and updates block metadata
]
}
check = checkWithTxs $ \ txs -> do
let fees = overestimate txFee 1 2
let props = map (\t -> (V1.txAmount t, V1.txType t,V1.txDirection t, V1.txStatus t)) txs
props `shouldMatchList` [(V1.V1 . Coin $ fees,V1.LocalTransaction,V1.OutgoingTransaction,V1.InNewestBlocks)]
-- | Scenario I. This is like Scenario C with rollbacks.
-- results should not change.
txMetaScenarioI :: forall h. Hash h Addr
=> GenesisValues h Addr
-> TxScenarioRet h
txMetaScenarioI genVals@GenesisValues{..} = (nodeStParams1, ind, check)
where
t0 = paymentWithChangeFromP0ToP1 genVals
ind = Inductive {
inductiveBoot = boot
, inductiveOurs = Set.singleton p0 -- define the owner of the wallet: Poor actor 0
, inductiveEvents = OldestFirst [
NewPending t0
, ApplyBlock $ OldestFirst [t0] -- confirms t0 and updates block metadata
, Rollback
, ApplyBlock $ OldestFirst [t0]
, Rollback
, ApplyBlock $ OldestFirst [t0]
]
}
check = checkWithTxs $ \ txs -> do
let fees = overestimate txFee 1 2
let props = map (\t -> (V1.txConfirmations t, V1.txAmount t, V1.txType t,V1.txDirection t, V1.txStatus t)) txs
props `shouldMatchList` [(3,V1.V1 . Coin $ fees + 1000,V1.ForeignTransaction,V1.OutgoingTransaction,V1.InNewestBlocks)]
-- | Scenario J
-- A single payment with 4 outputs.
txMetaScenarioJ :: forall h. Hash h Addr
=> GenesisValues h Addr
-> TxScenarioRet h
txMetaScenarioJ genVals@GenesisValues{..} = (nodeStParams1, ind, check)
where
t0 = bigPaymentWithChange genVals
ind = Inductive {
inductiveBoot = boot
, inductiveOurs = Set.singleton p0 -- define the owner of the wallet: Poor actor 0
, inductiveEvents = OldestFirst [
NewPending t0
]
}
check = checkWithTxs $ \ txs -> do
let fees = overestimate txFee 1 4
let props = map (\t -> (V1.txConfirmations t, V1.txAmount t, V1.txType t,V1.txDirection t, V1.txStatus t)) txs
props `shouldMatchList` [(0,V1.V1 . Coin $ fees + 6000,V1.ForeignTransaction,V1.OutgoingTransaction,V1.Applying)]
lengthCheck :: Int -> PassiveWallet -> IO ()
lengthCheck n pw = do
let db = pw ^. Kernel.walletMeta
meta <- getAllTxMetas db
length meta `shouldBe` n
checkWithTxs :: ([V1.Transaction] -> IO ()) -> PassiveWallet -> IO ()
checkWithTxs check pw = do
let db = pw ^. Kernel.walletMeta
metas <- getAllTxMetas db
txs <- do
mapOfEitherTx <- mapM (toTransaction pw) metas
let eiTxs = sequence mapOfEitherTx
return $ fromRight (error ("Account not found")) eiTxs
check txs
nodeStParams1 :: MockNodeStateParams
nodeStParams1 =
withDefUpdateConfiguration $
withCompileInfo $
MockNodeStateParams {
mockNodeStateTipSlotId = SlotId (EpochIndex 0) (UnsafeLocalSlotIndex 4)
, mockNodeStateSlotStart = const $ Right getSomeTimestamp
, mockNodeStateSecurityParameter = SecurityParameter 2160
, mockNodeStateNextEpochSlotDuration = fromMicroseconds 200
, mockNodeStateNtpDrift = const V1.TimeInfoUnavailable
, mockNodeStateSyncProgress = (Just 100, 100)
, mockNodeStateCreationTimestamp = getSomeTimestamp
}
nodeStParams2 :: MockNodeStateParams
nodeStParams2 =
withDefUpdateConfiguration $
withCompileInfo $
MockNodeStateParams {
mockNodeStateTipSlotId = SlotId (EpochIndex 1) (UnsafeLocalSlotIndex 1)
, mockNodeStateSlotStart = const $ Right getSomeTimestamp
, mockNodeStateSecurityParameter = SecurityParameter 2160
, mockNodeStateNextEpochSlotDuration = fromMicroseconds 200
, mockNodeStateNtpDrift = const V1.TimeInfoUnavailable
, mockNodeStateSyncProgress = (Just 100, 100)
, mockNodeStateCreationTimestamp = getSomeTimestamp
}
-- | Initialize active wallet in a manner suitable for generator-based testing.
-- This is different from the one in Kernel, because it is parametrised over
-- the NodeStateParameters. This is important for Transactions, because
-- dynamic TxMeta depend on the state of the Node and we want to be flexible
-- there for better testing.
bracketActiveWalletTxMeta :: ProtocolMagic -> MockNodeStateParams -> (Kernel.ActiveWallet -> IO a) -> IO a
bracketActiveWalletTxMeta pm stateParams test =
withProvidedMagicConfig pm $ \genesisConfig _ _ -> do
bracketPassiveWalletTxMeta (configProtocolMagic genesisConfig) stateParams $ \passive ->
Kernel.bracketActiveWallet passive
diffusion
$ \active -> test active
-- | Initialize passive wallet in a manner suitable for the unit tests
bracketPassiveWalletTxMeta :: ProtocolMagic -> MockNodeStateParams -> (Kernel.PassiveWallet -> IO a) -> IO a
bracketPassiveWalletTxMeta pm stateParams postHook = do
Keystore.bracketTestKeystore $ \keystore -> do
mockFInjects <- mkFInjects mempty
Kernel.bracketPassiveWallet
pm
Kernel.UseInMemory
logMessage
keystore
(mockNodeState stateParams)
mockFInjects
postHook
where
logMessage _ _ = return ()
-- TODO: Decide what we want to do with submitted transactions
diffusion :: Kernel.WalletDiffusion
diffusion = Kernel.WalletDiffusion {
walletSendTx = \_tx -> return False
, walletGetSubscriptionStatus = return mempty
}
getSomeTimestamp :: Pos.Core.Timestamp
getSomeTimestamp = Pos.Core.Timestamp $ fromMicroseconds 12340000
|
input-output-hk/pos-haskell-prototype
|
wallet/test/unit/Test/Spec/TxMetaScenarios.hs
|
mit
| 17,887 | 0 | 19 | 5,284 | 4,367 | 2,392 | 1,975 | -1 | -1 |
-- |
-- Module : Network.OAuth.Util
-- Copyright : (c) Joseph Abrahamson 2013
-- License : MIT
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable
--
module Network.OAuth.Util where
import qualified Data.ByteString as S
import Network.HTTP.Types (urlEncode)
pctEncode :: S.ByteString -> S.ByteString
pctEncode = urlEncode True
|
ibotty/oauthenticated
|
src/Network/OAuth/Util.hs
|
mit
| 395 | 0 | 6 | 81 | 56 | 38 | 18 | 5 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFunctor #-}
module Data.Prop where
import Control.Arrow ((&&&))
import Control.Monad (void)
import Data.Tuple.HT
import Numeric.Natural
import qualified Numeric.Natural.Prelude as N
import Numeric.Natural.QuickCheck ()
import Data.Typeable (Typeable)
import Data.Maybe (fromJust)
import Control.Applicative
import Data.List (nub, sort)
import Test.QuickCheck
import Text.Read (readMaybe)
import Data.List.NonEmpty (NonEmpty (..))
import qualified Data.List.NonEmpty as NE
import qualified Text.Read as R
import qualified Text.ParserCombinators.ReadP as RP
import qualified Text.ParserCombinators.ReadPrec as RPC
import Data.Name
-- $setup
-- >>> import Text.Show.Functions ()
-- >>> :set -XOverloadedStrings
-- >>> :set -XScopedTypeVariables
-- >>> import Data.Maybe
-- >>> import Data.List
data Prop a
= Prop a
| Imply (Prop a) (Prop a)
deriving (Eq, Ord, Typeable, Functor)
type PropS = Prop Name
type PropI = Prop Natural
instance Arbitrary a => Arbitrary (Prop a) where
arbitrary = frequency
[ (,) 2 $ Prop <$> arbitrary
, (,) 1 $ Imply <$> arbitrary <*> arbitrary
]
instance CoArbitrary a => CoArbitrary (Prop a) where
coarbitrary (Prop a) = coarbitrary a
coarbitrary (Imply a b) = coarbitrary a . coarbitrary b
isAtom :: Prop a -> Bool
isAtom (Prop _) = True
isAtom _ = False
isArrow :: Prop a -> Bool
isArrow = not . isAtom
implyLeft :: Prop a -> Maybe (Prop a)
implyLeft (Imply p _) = Just p
implyLeft _ = Nothing
implyRight :: Prop a -> Maybe (Prop a)
implyRight (Imply _ q) = Just q
implyRight _ = Nothing
instance Show a => Show (Prop a) where
show (Prop a) = show a
show (Imply p q) = paren (show p) ++ " -> " ++ show q where
paren = if isAtom p then id else ("(" ++) . (++ ")")
instance Read a => Read (Prop a) where
readPrec = RPC.lift p where
p = parens $ do
q <- p'
qs <- RP.many (arrow >> p')
return $ foldr1 Imply (q:qs)
p' = paren p RP.<++ atom
arrow = do
RP.skipSpaces
void $ RP.string "->"
return Imply
atom = Prop <$> RP.readS_to_P reads
paren = RP.between (RP.skipSpaces >> RP.char '(') (RP.skipSpaces >> RP.char ')')
parens x = x RP.<++ paren (parens x)
-- prop> \ p q r -> (p ~> q ~> r) == (p ~> (q ~> r))
(~>) :: Prop a -> Prop a -> Prop a
(~>) = Imply
infixr ~>
-- | >>> readPropI "2" == Prop 2
-- True
-- >>> readPropS "p -> q" == (Prop "p" `Imply` Prop "q")
-- True
-- >>> readPropI "0 -> (1 -> 2) -> 3" == (Prop 0 `Imply` ((Prop 1 `Imply` Prop 2) `Imply` Prop 3))
-- True
--
-- prop> \ p -> (read . show) p == (p :: PropI)
-- prop> \ p -> (read . show) p == (p :: PropS)
readProp :: Read a => String -> Prop a
readProp = read
readPropS :: String -> PropS
readPropS = read
readPropI :: String -> PropI
readPropI = read
readPropMay :: Read a => String -> Maybe (Prop a)
readPropMay = readMaybe
-- | >>> show ((Prop 0 `Imply` Prop 1) `Imply` (Prop 0 `Imply` Prop 2))
-- "(0 -> 1) -> 0 -> 2"
showProp :: Show a => Prop a -> String
showProp = show
-- | prop> \ x -> (True `imply`) x == id x
-- >>> False `imply` undefined
-- True
--
-- prop> \ p q -> p `imply` q == (not p || q)
-- prop> \ p q -> p `imply` q == (p <= q)
imply :: Bool -> Bool -> Bool
imply p q = not p || q
foldProp :: (a -> b) -> (b -> b -> b) -> Prop a -> b
foldProp f g = rec where
rec (Prop a) = f a
rec (Imply a b) = rec a `g` rec b
-- | prop> \ x xs -> let ps = map Prop (x:xs) in (chain . foldr1 Imply) ps == ps
chain :: Prop a -> [Prop a]
chain p@(Prop _) = [p]
chain (Imply p q) = p : chain q
chain' :: Prop a -> NonEmpty (Prop a)
chain' = NE.fromList . chain
-- | prop> \ xs -> (isJust . unchainMay) xs == (not . null) xs
unchainMay :: [Prop a] -> Maybe (Prop a)
unchainMay [] = Nothing
unchainMay xs = Just $ foldr1 Imply xs
-- | prop> \ x -> (unchain . chain') x == x
--
-- >>> let x = readPropS "p -> q" :| [] in (chain' . unchain) x == x
-- False
unchain :: NonEmpty (Prop a) -> Prop a
unchain = fromJust . unchainMay . NE.toList
-- | prop> \ p q f -> evaluate f (Prop p ~> Prop q) == (f p `imply` f q)
-- prop> \ f -> evaluate f (readPropS "((p -> q) -> p) -> p")
evaluate :: (a -> Bool) -> Prop a -> Bool
evaluate v = foldProp v imply
-- | >>> sort . atoms $ readPropS "p -> q -> r"
-- [p,q,r]
atoms :: Eq a => Prop a -> [a]
atoms = nub . foldProp (: []) (++)
-- | >>> atomCount (readPropS "foo -> (bar -> baz) -> qux")
-- 4
atomCount :: Eq a => Prop a -> Natural
atomCount = N.length . atoms
-- | >>> normalize (readPropS "e -> (b -> d) -> b") == readPropI "0 -> (1 -> 2) -> 1"
-- True
--
-- prop> \ p -> normalize p == (normalize . normalize) p
-- prop> \ p -> atomCount (normalize p) == atomCount p
normalize :: Eq a => Prop a -> PropI
normalize p = (fromJust . (`N.elemIndex` atoms p)) <$> p
-- | >>> normalize' (readPropS "f -> (b -> c) -> c") == readPropI "2 -> (0 -> 1) -> 1"
-- True
--
-- prop> \ p -> normalize' p == (normalize' . normalize') p
-- prop> \ p -> atomCount (normalize' p) == atomCount p
normalize' :: Ord a => Prop a -> PropI
normalize' p = (fromJust . (`N.elemIndex` sort (atoms p))) <$> p
-- | O(2^n)
-- prop> \ n -> (n < 8) `imply` (2 ^ n == length (valuations' n))
valuations :: Eq a => [a] -> [a -> Maybe Bool]
valuations xs = map (\ f x -> f =<< x `N.elemIndex` xs) $ valuations' (N.length xs)
valuations' :: Natural -> [Natural -> Maybe Bool]
valuations' n = map N.atMay $ N.replicateM n [True,False]
-- | prop> \ p v -> (atomCount p < 4) `imply` (tautology p `imply` evaluate v p)
-- >>> tautology $ readPropS "p -> q -> p"
-- True
-- >>> tautology $ readPropS "(p -> q) -> p"
-- False
tautology :: Eq a => Prop a -> Bool
tautology = tautology' . normalize
tautology' :: PropI -> Bool
tautology' p = all (`evaluate` normalize p) (map (fromJust .) . valuations' . N.length $ atoms p)
-- | prop> \ p -> support [] p == tautology p
-- prop> \ p qs -> tautology p `imply` support qs p
support :: Eq a => [Prop a] -> Prop a -> Bool
support hs p = all (`evaluate` p') $ filter (\ v -> all (evaluate v) hs') (map (fromJust .) . valuations' $ N.length as) where
as = nub $ concatMap atoms (p : hs)
ix = fmap (fromJust . (`N.elemIndex` as))
(p':hs') = map ix (p:hs)
-- | prop> \ p -> not . null $ split p
-- prop> \ t -> let p = Prop t in split p == [([], p)]
--
-- prop> \ t ts -> let ps = map Prop (t:ts) in length (split (foldr1 (~>) ps)) == length ps
-- >>> split (readPropS "p -> q -> r")
-- [([],p -> q -> r),([p],q -> r),([p,q],r)]
split :: Eq a => Prop a -> [([Prop a], Prop a)]
split = nub . f where
f (Prop n) = [([], Prop n)]
f (Imply p q) = ([], p ~> q) : ([p], q) : map (mapFst (p :)) (f q)
-- | prop> \ p -> not . null $ conclusions p
conclusions :: Eq a => Prop a -> [Prop a]
conclusions = nub . map snd . split
-- | >>> split1 (readPropS "a -> (b -> c) -> d")
-- ([a,b -> c],d)
split1 :: Prop a -> ([Prop a], Prop a)
split1 = (init &&& last) . chain
|
solorab/proof-haskell
|
Data/Prop.hs
|
mit
| 7,049 | 0 | 14 | 1,716 | 2,157 | 1,160 | 997 | 123 | 2 |
{-# LANGUAGE CPP #-}
{- |
Module : $Header$
Description : utility functions that can't be found in the libraries
Copyright : (c) Klaus Luettich, Uni Bremen 2002-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
Utility functions that can't be found in the libraries
but should be shared across Hets.
-}
module Common.Utils
( isSingleton
, replace
, hasMany
, number
, combine
, trim
, trimLeft
, trimRight
, toSnakeCase
, nubOrd
, nubOrdOn
, atMaybe
, readMaybe
, mapAccumLM
, mapAccumLCM
, concatMapM
, composeMap
, keepMins
, splitOn
, splitPaths
, splitBy
, splitByList
, numberSuffix
, basename
, dirname
, fileparse
, stripDir
, stripSuffix
, makeRelativeDesc
, getEnvSave
, getEnvDef
, filterMapWithList
, timeoutSecs
, executeProcess
, timeoutCommand
, withinDirectory
, writeTempFile
, getTempFile
, getTempFifo
, readFifo
, verbMsg
, verbMsgLn
, verbMsgIO
, verbMsgIOLn
) where
import Data.Char
import Data.List
import Data.Maybe
import qualified Data.Map as Map
import qualified Data.Set as Set
import System.Directory
import System.Environment
import System.Exit
import System.FilePath (joinPath, makeRelative, equalFilePath, takeDirectory)
import System.IO
import System.IO.Error (isEOFError)
import System.Process
import System.Timeout
#ifdef UNIX
import System.Posix.Files (createNamedPipe, unionFileModes,
ownerReadMode, ownerWriteMode)
import System.Posix.IO (OpenMode (ReadWrite), defaultFileFlags,
openFd, closeFd, fdRead)
import Control.Concurrent (threadDelay, forkIO, killThread)
import Control.Concurrent.MVar (MVar, newEmptyMVar, takeMVar, putMVar)
import Control.Exception as Exception
import System.IO.Unsafe (unsafeInterleaveIO)
#endif
import Control.Monad
{- | Writes the message to the given handle unless the verbosity is less than
the message level. -}
verbMsg :: Handle -- ^ Output handle
-> Int -- ^ global verbosity
-> Int -- ^ message level
-> String -- ^ message level
-> IO ()
verbMsg hdl v lvl = when (lvl <= v) . hPutStr hdl
-- | Same as 'verbMsg' but with a newline at the end
verbMsgLn :: Handle -> Int -> Int -> String -> IO ()
verbMsgLn hdl v lvl = when (lvl <= v) . hPutStrLn hdl
-- | 'verbMsg' with stdout as handle
verbMsgIO :: Int -> Int -> String -> IO ()
verbMsgIO = verbMsg stdout
-- | 'verbMsgLn' with stdout as handle
verbMsgIOLn :: Int -> Int -> String -> IO ()
verbMsgIOLn = verbMsgLn stdout
{- | replace all occurrences of the first (non-empty sublist) argument
with the second argument in the third (list) argument. -}
replace :: Eq a => [a] -> [a] -> [a] -> [a]
replace sl r = case sl of
[] -> error "Common.Utils.replace: empty list"
_ -> concat . unfoldr (\ l -> case l of
[] -> Nothing
hd : tl -> Just $ case stripPrefix sl l of
Nothing -> ([hd], tl)
Just rt -> (r, rt))
-- | add indices to a list starting from one
number :: [a] -> [(a, Int)]
number = flip zip [1 ..]
-- | /O(1)/ test if the set's size is one
isSingleton :: Set.Set a -> Bool
isSingleton s = Set.size s == 1
-- | /O(1)/ test if the set's size is greater one
hasMany :: Set.Set a -> Bool
hasMany s = Set.size s > 1
{- | Transform a list @[l1, l2, ... ln]@ to (in sloppy notation)
@[[x1, x2, ... xn] | x1 <- l1, x2 <- l2, ... xn <- ln]@
(this is just the 'sequence' function!) -}
combine :: [[a]] -> [[a]]
combine = sequence
-- see http://www.haskell.org/pipermail/haskell-cafe/2009-November/069490.html
-- | trims a string both on left and right hand side
trim :: String -> String
trim = trimRight . trimLeft
-- | trims a string only on the left side
trimLeft :: String -> String
trimLeft = dropWhile isSpace
-- | trims a string only on the right side
trimRight :: String -> String
trimRight = foldr (\ c cs -> if isSpace c && null cs then [] else c : cs) ""
{-
Convert CamelCased or mixedCases 'String' to a 'String' with underscores,
the \"snake\" 'String'.
It also considers SCREAMINGCamelCase:
`toSnakeCase "SomeSCREAMINGCamelCase" == "some_screaming_camel_case"`
-}
toSnakeCase :: String -> String
toSnakeCase c = if hasBump c then unScream c else mkSnake c where
hasBump s = case s of
a : b : _ -> isUpper a && isLower b
_ -> False
unScream s = case s of
a : r -> toLower a : mkSnake r
_ -> s
mkSnake s = let newSnake t = '_' : unScream t in case s of
a : r@(b : _) | hasBump [b, a] -> a : newSnake r
_ | hasBump s -> newSnake s
_ -> unScream s
{- | The 'nubWith' function accepts as an argument a \"stop list\" update
function and an initial stop list. The stop list is a set of list elements
that 'nubWith' uses as a filter to remove duplicate elements. The stop list
is normally initially empty. The stop list update function is given a list
element a and the current stop list b, and returns 'Nothing' if the element is
already in the stop list, else 'Just' b', where b' is a new stop list updated
to contain a. -}
nubWith :: (a -> b -> Maybe b) -> b -> [a] -> [a]
nubWith f s es = case es of
[] -> []
e : rs -> case f e s of
Just s' -> e : nubWith f s' rs
Nothing -> nubWith f s rs
nubOrd :: Ord a => [a] -> [a]
nubOrd = nubOrdOn id
nubOrdOn :: Ord b => (a -> b) -> [a] -> [a]
nubOrdOn g = let f a s = let e = g a in
if Set.member e s then Nothing else Just (Set.insert e s)
in nubWith f Set.empty
-- | safe variant of !!
atMaybe :: [a] -> Int -> Maybe a
atMaybe l i = if i < 0 then Nothing else case l of
[] -> Nothing
x : r -> if i == 0 then Just x else atMaybe r (i - 1)
readMaybe :: Read a => String -> Maybe a
readMaybe s = case filter (all isSpace . snd) $ reads s of
[(a, _)] -> Just a
_ -> Nothing
-- | generalization of mapAccumL to monads
mapAccumLM :: Monad m
=> (acc -> x -> m (acc, y))
{- ^ Function taking accumulator and list element,
returning new accumulator and modified list element -}
-> acc -- ^ Initial accumulator
-> [x] -- ^ Input list
-> m (acc, [y]) -- ^ Final accumulator and result list
mapAccumLM f s l = case l of
[] -> return (s, [])
x : xs -> do
(s', y) <- f s x
(s'', ys) <- mapAccumLM f s' xs
return (s'', y : ys)
-- | generalization of mapAccumL to monads with combine function
mapAccumLCM :: (Monad m) => (a -> b -> c) -> (acc -> a -> m (acc, b))
-> acc -> [a] -> m (acc, [c])
mapAccumLCM g f s l = case l of
[] -> return (s, [])
x : xs -> do
(s', y) <- f s x
(s'', ys) <- mapAccumLCM g f s' xs
return (s'', g x y : ys)
{- | Monadic version of concatMap
taken from http://darcs.haskell.org/ghc/compiler/utils/MonadUtils.hs -}
concatMapM :: Monad m => (a -> m [b]) -> [a] -> m [b]
concatMapM f = liftM concat . mapM f
-- | composition of arbitrary maps
composeMap :: Ord a => Map.Map a b -> Map.Map a a -> Map.Map a a -> Map.Map a a
composeMap s m1 m2 = if Map.null m2 then m1 else Map.intersection
(Map.foldWithKey ( \ i j ->
let k = Map.findWithDefault j j m2 in
if i == k then Map.delete i else Map.insert i k) m2 m1) s
-- | keep only minimal elements
keepMins :: (a -> a -> Bool) -> [a] -> [a]
keepMins lt l = case l of
[] -> []
x : r -> let s = filter (not . lt x) r
m = keepMins lt s
in if any (`lt` x) s then m
else x : m
{- |
A function inspired by the perl function split. A list is splitted
on a separator element in smaller non-empty lists.
The separator element is dropped from the resulting list.
-}
splitOn :: Eq a => a -- ^ separator
-> [a] -- ^ list to split
-> [[a]]
splitOn x = filter (not . null) . splitBy x
-- | split a colon (or on windows semicolon) separated list of paths
splitPaths :: String -> [FilePath]
splitPaths = splitOn
#ifdef UNIX
':'
#else
';'
#endif
{- |
Same as splitOn but empty lists are kept. Even the empty list is split into
a singleton list containing the empty list.
-}
splitBy :: Eq a => a -- ^ separator
-> [a] -- ^ list to split
-> [[a]]
splitBy c l = let (p, q) = break (c ==) l in p : case q of
[] -> []
_ : r -> splitBy c r
{- | Same as splitBy but the separator is a sublist not only one element.
Note that the separator must be non-empty. -}
splitByList :: Eq a => [a] -> [a] -> [[a]]
splitByList sep l = case l of
[] -> [[]]
h : t -> case stripPrefix sep l of
Just suf -> [] : splitByList sep suf
Nothing -> let f : r = splitByList sep t in (h : f) : r
{- | If the given string is terminated by a decimal number
this number and the nonnumber prefix is returned. -}
numberSuffix :: String -> Maybe (String, Int)
numberSuffix s =
let f a r@(x, y, b) =
let b' = isDigit a
y' = y + x * digitToInt a
out | not b = r
| b && b' = (x * 10, y', b')
| otherwise = (x, y, False)
in out
in case foldr f (1, 0, True) s of
(1, _, _) ->
Nothing
(p, n, _) ->
Just (take (1 + length s - length (show p)) s, n)
{- |
A function inspired by a perl function from the standard perl-module
File::Basename. It removes the directory part of a filepath.
-}
basename :: FilePath -> FilePath
basename = snd . stripDir
{- |
A function inspired by a perl function from the standard perl-module
File::Basename. It gives the directory part of a filepath.
-}
dirname :: FilePath -> FilePath
dirname = fst . stripDir
{- |
A function inspired by a perl function from the standard perl-module
File::Basename. It splits a filepath into the basename, the
directory and gives the suffix that matched from the list of
suffixes. If a suffix matched it is removed from the basename.
-}
fileparse :: [String] -- ^ list of suffixes
-> FilePath
-> (FilePath, FilePath, Maybe String)
-- ^ (basename,directory,matched suffix)
fileparse sufs fp = let (path, base) = stripDir fp
(base', suf) = stripSuffix sufs base
in (base', path, suf)
stripDir :: FilePath -> (FilePath, FilePath)
stripDir =
(\ (x, y) -> (if null y then "./" else reverse y, reverse x))
. break (== '/') . reverse
stripSuffix :: [String] -> FilePath -> (FilePath, Maybe String)
stripSuffix suf fp = case filter isJust $ map (stripSuf fp) suf of
Just (x, s) : _ -> (x, Just s)
_ -> (fp, Nothing)
where stripSuf f s | s `isSuffixOf` f =
Just (take (length f - length s) f, s)
| otherwise = Nothing
{- |
This function generalizes makeRelative in that it computes also a relative
path with descents such as ../../test.txt
-}
makeRelativeDesc :: FilePath -- ^ path to a directory
-> FilePath -- ^ to be computed relatively to given directory
-> FilePath -- ^ resulting relative path
makeRelativeDesc dp fp = f dp fp []
where f "" y l = joinPath $ l ++ [y]
f x y l = let y' = makeRelative x y
in if equalFilePath y y'
then f (takeDirectory x) y $ ".." : l
else joinPath $ l ++ [y']
{- | filter a map according to a given list of keys (it dosen't hurt
if a key is not present in the map) -}
filterMapWithList :: Ord k => [k] -> Map.Map k e -> Map.Map k e
filterMapWithList = filterMapWithSet . Set.fromList
{- | filter a map according to a given set of keys (it dosen't hurt if
a key is not present in the map) -}
filterMapWithSet :: Ord k => Set.Set k -> Map.Map k e -> Map.Map k e
filterMapWithSet s = Map.filterWithKey (\ k _ -> Set.member k s)
{- | get, parse and check an environment variable; provide the default
value, only if the envionment variable is not set or the
parse-check-function returns Nothing -}
getEnvSave :: a -- ^ default value
-> String -- ^ name of environment variable
-> (String -> Maybe a) -- ^ parse and check value of variable
-> IO a
getEnvSave defValue envVar readFun =
liftM (maybe defValue (fromMaybe defValue . readFun)
. lookup envVar) getEnvironment
-- | get environment variable
getEnvDef :: String -- ^ environment variable
-> String -- ^ default value
-> IO String
getEnvDef envVar defValue = getEnvSave defValue envVar Just
-- | the timeout function taking seconds instead of microseconds
timeoutSecs :: Int -> IO a -> IO (Maybe a)
timeoutSecs time = timeout $ let
m = 1000000
u = div maxBound m
in if time > u then maxBound else
if time < 1 then 100000 -- 1/10 of a second
else m * time
-- | like readProcessWithExitCode, but checks the command argument first
executeProcess
:: FilePath -- ^ command to run
-> [String] -- ^ any arguments
-> String -- ^ standard input
-> IO (ExitCode, String, String) -- ^ exitcode, stdout, stderr
executeProcess cmd args input = do
mp <- findExecutable cmd
case mp of
Nothing -> return (ExitFailure 127, "", "command not found: " ++ cmd)
Just exe -> readProcessWithExitCode exe args input
-- | runs a command with timeout
timeoutCommand :: Int -> FilePath -> [String]
-> IO (Maybe (ExitCode, String, String))
timeoutCommand time cmd args =
timeoutSecs time $
executeProcess cmd args "" -- no input from stdin
{- | runs an action in a different directory without changing the current
directory globally. -}
withinDirectory :: FilePath -> IO a -> IO a
withinDirectory p a = do
d <- getCurrentDirectory
setCurrentDirectory p
r <- a
setCurrentDirectory d
return r
-- | calls openTempFile but directly writes content and closes the file
writeTempFile :: String -- ^ Content
-> FilePath -- ^ Directory in which to create the file
-> String -- ^ File name template
-> IO FilePath -- ^ create file
writeTempFile str tmpDir file = do
(tmpFile, hdl) <- openTempFile tmpDir file
hPutStr hdl str
hFlush hdl
hClose hdl
return tmpFile
-- | create file in temporary directory (the first argument is the content)
getTempFile :: String -- ^ Content
-> String -- ^ File name template
-> IO FilePath -- ^ create file
getTempFile str file = do
tmpDir <- getTemporaryDirectory
writeTempFile str tmpDir file
#ifdef UNIX
getTempFifo :: String -> IO FilePath
getTempFifo f = do
tmpDir <- getTemporaryDirectory
(tmpFile, hdl) <- openTempFile tmpDir f
hClose hdl
removeFile tmpFile
createNamedPipe tmpFile $ unionFileModes ownerReadMode ownerWriteMode
return tmpFile
#else
getTempFifo :: String -> IO FilePath
getTempFifo _ = return ""
#endif
#ifdef UNIX
type Pipe = (IO (), MVar String)
#endif
#ifdef UNIX
openFifo :: FilePath -> IO Pipe
openFifo fp = do
mvar <- newEmptyMVar
let readF fd = forever (fmap fst (fdRead fd 100) >>= putMVar mvar)
`Exception.catch`
\ e -> const (threadDelay 100) (e :: Exception.IOException)
let reader = forever $ do
fd <- openFd fp ReadWrite Nothing defaultFileFlags
readF fd `Exception.catch`
\ e -> closeFd fd >>
if isEOFError e then reader
else throwIO (e :: Exception.IOException)
return (reader, mvar)
readFifo' :: MVar String -> IO [String]
readFifo' mvar = do
x <- unsafeInterleaveIO $ takeMVar mvar
xs <- unsafeInterleaveIO $ readFifo' mvar
return $ x : xs
readFifo :: FilePath -> IO ([String], IO ())
readFifo fp = do
(reader, pipe) <- openFifo fp
tid <- forkIO reader
l <- readFifo' pipe
m <- newEmptyMVar
forkIO $ takeMVar m >> killThread tid
return (l, putMVar m ())
#else
readFifo :: FilePath -> IO ([String], IO ())
readFifo _ = return ([], return ())
#endif
|
mariefarrell/Hets
|
Common/Utils.hs
|
gpl-2.0
| 16,000 | 0 | 20 | 4,271 | 4,396 | 2,308 | 2,088 | 286 | 6 |
{-# LANGUAGE GADTs #-}
module HN.Optimizer.Utils where
import Compiler.Hoopl
import qualified Data.Map as M
import Data.Maybe
import HN.Optimizer.ClassyLattice
import HN.Optimizer.Node
rewriteExitF :: (DefinitionNode -> f -> Maybe DefinitionNode) -> Node e x -> f -> Maybe (Graph Node e x)
rewriteExitF _ (Entry _) _ = Nothing
rewriteExitF rewriteDefinition (Exit n) f = mkLast . Exit <$> rewriteDefinition n f
rewriteExitB :: (DefinitionNode -> FactBase f -> Maybe DefinitionNode) -> Node e x -> Fact x f -> Maybe (Graph Node e x)
rewriteExitB _ (Entry _) _ = Nothing
rewriteExitB rf (Exit dn) f = mkLast . Exit <$> rf dn f
transferExitF :: (DefinitionNode -> f -> FactBase f) -> Node e x -> f -> Fact x f
transferExitF _ (Entry _) f = f
transferExitF tf (Exit n) f = tf n f
transferExitB :: (DefinitionNode -> FactBase f -> f) -> Node e x -> Fact x f -> f
transferExitB _ (Entry _) f = f
transferExitB tf (Exit n) f = tf n f
mergeFact label current base = let
update fact = (fact, M.insert label fact base)
in case M.lookup label base of
Nothing -> update current
Just baseFact -> case join (OldFact baseFact) (NewFact current) of
Nothing -> (baseFact, base)
Just newFact -> update newFact
transferMapExitB :: Lattice f => (DefinitionNode -> FactBase f -> f) -> Node e x -> Fact x (MapFact f) -> MapFact f
transferMapExitB _ (Entry l) (curFact, factBase) = mergeFact l curFact factBase
transferMapExitB tf (Exit dn) f = (tf dn (mapMap fst $ convertFactBase f), bot)
type MapFact f = (f, M.Map Label f)
transferMapExitF :: Lattice f => (DefinitionNode -> f -> [(Label, f)]) -> Node e x -> MapFact f -> Fact x (MapFact f)
transferMapExitF _ (Entry l) (curFact, factBase) = mergeFact l curFact factBase
transferMapExitF tf nn @ (Exit n) (f, m) = distributeFact nn $ (,) bot $ foldr (uncurry $ M.insertWith mereJoin) m $ tf n f
noTransferMapF :: Lattice f => FwdTransfer Node (MapFact f)
noTransferMapF = mkFTransfer $ transferMapExitF (\_ _ -> [])
noTransferMapB :: Lattice f => BwdTransfer Node (MapFact f)
noTransferMapB = mkBTransfer $ transferMapExitB (\_ _ -> bot)
convertFactBase :: Lattice f => FactBase (MapFact f) -> FactBase (MapFact f)
convertFactBase f = mapSquare $ foldr foo M.empty $ concatMap ff $ mapToList f where
ff (l, (f, m)) = (l, f) : M.toList m
foo (l, f) = M.insertWith mereJoin l f
mapSquare1 :: Lattice f => M.Map Label f -> Label -> MapFact f
mapSquare1 m l = (fromMaybe bot $ M.lookup l m, m)
mapSquare :: Lattice f => M.Map Label f -> FactBase (MapFact f)
mapSquare m = mapFromList $ map ff $ M.keys m where
ff l = (l, mapSquare1 m l)
|
kayuri/HNC
|
HN/Optimizer/Utils.hs
|
lgpl-3.0
| 2,596 | 1 | 13 | 496 | 1,193 | 596 | 597 | 46 | 3 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
module HERMIT.Dictionary.Remembered
( -- * Remembering definitions.
externals
, prefixRemembered
, rememberR
, unfoldRememberedR
, foldRememberedR
, foldAnyRememberedR
, compileRememberedT
) where
import Control.Monad
import qualified Data.Map as Map
import Data.List (isPrefixOf)
import Data.Monoid
import HERMIT.Context
import HERMIT.Core
import HERMIT.External
import HERMIT.GHC hiding ((<>), (<+>), nest, ($+$))
import HERMIT.Kure
import HERMIT.Lemma
import HERMIT.Monad
import HERMIT.PrettyPrinter.Common
import HERMIT.Dictionary.Fold hiding (externals)
import HERMIT.Dictionary.Reasoning hiding (externals)
------------------------------------------------------------------------------
externals :: [External]
externals =
[ external "remember" (promoteCoreT . rememberR :: LemmaName -> TransformH LCore ())
[ "Remember the current binding, allowing it to be folded/unfolded in the future." ] .+ Context
, external "unfold-remembered" (promoteExprR . unfoldRememberedR Obligation :: LemmaName -> RewriteH LCore)
[ "Unfold a remembered definition." ] .+ Deep .+ Context
, external "fold-remembered" (promoteExprR . foldRememberedR Obligation :: LemmaName -> RewriteH LCore)
[ "Fold a remembered definition." ] .+ Context .+ Deep
, external "fold-any-remembered" (promoteExprR foldAnyRememberedR :: RewriteH LCore)
[ "Attempt to fold any of the remembered definitions." ] .+ Context .+ Deep
, external "show-remembered" (promoteCoreT . showLemmasT (Just "remembered-") :: PrettyPrinter -> PrettyH LCore)
[ "Display all remembered definitions." ]
]
------------------------------------------------------------------------------
prefixRemembered :: LemmaName -> LemmaName
prefixRemembered = ("remembered-" <>)
-- | Remember a binding with a name for later use. Allows us to look at past definitions.
rememberR :: (AddBindings c, ExtendPath c Crumb, ReadPath c Crumb, HasLemmas m, MonadCatch m)
=> LemmaName -> Transform c m Core ()
rememberR nm = prefixFailMsg "remember failed: " $ do
Def v e <- setFailMsg "not applied to a binding." $ defOrNonRecT idR idR Def
insertLemmaT (prefixRemembered nm) $ Lemma (mkClause [] (varToCoreExpr v) e) Proven NotUsed
-- | Unfold a remembered definition (like unfoldR, but looks in stash instead of context).
unfoldRememberedR :: (LemmaContext c, ReadBindings c, HasLemmas m, MonadCatch m)
=> Used -> LemmaName -> Rewrite c m CoreExpr
unfoldRememberedR u = prefixFailMsg "Unfolding remembered definition failed: " . forwardT . lemmaBiR u . prefixRemembered
-- | Fold a remembered definition (like foldR, but looks in stash instead of context).
foldRememberedR :: (LemmaContext c, ReadBindings c, HasLemmas m, MonadCatch m)
=> Used -> LemmaName -> Rewrite c m CoreExpr
foldRememberedR u = prefixFailMsg "Folding remembered definition failed: " . backwardT . lemmaBiR u . prefixRemembered
-- | Fold any of the remembered definitions.
foldAnyRememberedR :: (LemmaContext c, ReadBindings c, HasLemmas m, MonadCatch m)
=> Rewrite c m CoreExpr
foldAnyRememberedR = setFailMsg "Fold failed: no definitions could be folded."
$ compileRememberedT >>= runFoldR
-- | Compile all remembered definitions into something that can be run with `runFoldR`
compileRememberedT :: (LemmaContext c, HasLemmas m, Monad m) => Transform c m x CompiledFold
compileRememberedT = do
qs <- liftM (map lemmaC . Map.elems . Map.filterWithKey (\ k _ -> "remembered-" `isPrefixOf` show k)) getLemmasT
return $ compileFold $ concatMap (map flipEquality . toEqualities) qs -- fold rhs to lhs
|
ku-fpg/hermit
|
src/HERMIT/Dictionary/Remembered.hs
|
bsd-2-clause
| 4,025 | 0 | 15 | 855 | 839 | 452 | 387 | 61 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
-- | SHA256 digest
module Distribution.Server.Features.Security.SHA256 (
SHA256Digest
, sha256
, sha256DigestBytes
) where
-- stdlibs
import Control.Applicative
import Control.DeepSeq
import Control.Monad
import qualified Data.ByteString as BS
import qualified Data.ByteString.Base16 as B16
import Data.SafeCopy
import Data.Serialize as Ser
#if MIN_VERSION_binary(0,8,3)
import Data.ByteString.Builder.Extra as BS
#endif
import qualified Data.Binary as Bin
import qualified Data.Binary.Put as Bin
import qualified Data.ByteString.Char8 as BS.Char8
import qualified Data.ByteString.Lazy as BS.Lazy
import Data.Word
-- cryptohash
import qualified Crypto.Hash.SHA256 as SHA256
-- hackage
import Distribution.Server.Framework.MemSize
import Distribution.Server.Util.ReadDigest
-- | SHA256 digest
data SHA256Digest = SHA256Digest {-# UNPACK #-} !Word64 {-# UNPACK #-} !Word64
{-# UNPACK #-} !Word64 {-# UNPACK #-} !Word64
deriving (Eq)
instance NFData SHA256Digest where
rnf !_ = () -- 'SHA256Digest' has only strict primitive fields, hence WHNF==NF
-- internal convenience helper
-- fails if input has wrong length; callers must ensure correct length
sha256digestFromBS :: BS.ByteString -> SHA256Digest
sha256digestFromBS bs = case runGet getSHA256NoPfx bs of
Left e -> error ("sha256digestFromBS: " ++ e)
Right d -> d
-- | 'Data.Serialize.Get' helper to read a raw 32byte SHA256Digest w/o
-- any length-prefix
getSHA256NoPfx :: Get SHA256Digest
getSHA256NoPfx = SHA256Digest <$> getWord64be
<*> getWord64be
<*> getWord64be
<*> getWord64be
-- | The 'Show' instance for 'SHA256Digest' prints the underlying digest
-- (without showing the newtype wrapper)
--
-- For legacy reasons, this instance emits the base16 encoded digest
-- string without surrounding quotation marks
instance Show SHA256Digest where
show = BS.Char8.unpack . B16.encode . sha256DigestBytes
instance ReadDigest SHA256Digest where
-- NOTE: This differs in an important way from the 'Serialize' instance:
-- the base16-encoded digest doesn't have a length prefix
readDigest str =
case B16.decode (BS.Char8.pack str) of
(d,rest) | BS.null rest
, BS.length d == 32 -> Right $! sha256digestFromBS d
| otherwise -> Left $ "Could not decode SHA256 " ++
show str
-- | Compute SHA256 digest
sha256 :: BS.Lazy.ByteString -> SHA256Digest
sha256 = sha256digestFromBS . SHA256.hashlazy
instance MemSize SHA256Digest where
memSize _ = 5
instance SafeCopy SHA256Digest where
-- use default Serialize instance
-- For legacy reasons this length-prefixes the serialised digest
instance Serialize SHA256Digest where
put (SHA256Digest w1 w2 w3 w4) =
do put (32 :: Int)
putWord64be w1
putWord64be w2
putWord64be w3
putWord64be w4
get = do lenpfx <- get
unless (lenpfx == (32 :: Int)) $
fail "Bytestring of the wrong length"
getSHA256NoPfx
-- | Export 'SHA256Digest' as raw 16-byte 'BS.ByteString' digest-value
sha256DigestBytes :: SHA256Digest -> BS.ByteString
sha256DigestBytes =
toBs . putSHA256Digest
where
putSHA256Digest :: SHA256Digest -> Bin.PutM ()
putSHA256Digest (SHA256Digest w1 w2 w3 w4)
= Bin.putWord64be w1 >> Bin.putWord64be w2 >>
Bin.putWord64be w3 >> Bin.putWord64be w4
toBs :: Bin.Put -> BS.ByteString
#if MIN_VERSION_binary(0,8,3)
-- with later binary versions we can control the buffer size precisely:
toBs = BS.Lazy.toStrict
. BS.toLazyByteStringWith (BS.untrimmedStrategy 32 0) BS.Lazy.empty
. Bin.execPut
#else
toBs = BS.Lazy.toStrict . Bin.runPut
#endif
|
agrafix/hackage-server
|
Distribution/Server/Features/Security/SHA256.hs
|
bsd-3-clause
| 4,193 | 0 | 14 | 1,192 | 719 | 400 | 319 | 69 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE ViewPatterns #-}
module Domain.Tenant where
import Domain.Base
import Opaleye
import Data.Text hiding (head)
import Control.Monad.IO.Class (liftIO)
import Control.Lens
import qualified Database.PostgreSQL.Simple as PGS
import Control.Monad.Reader (runReaderT)
import Data.String.Conv
import Control.Monad.Catch
import Database.PostgreSQL.Simple.Errors
import Database.PostgreSQL.Simple (SqlError)
--
-- Tenant creation
--
data TenantCreationError = DuplicateBackofficeDomainError Text deriving Show
-- NOTE: Going with simple data-types for now (i.e. not creating different
-- data-types for Active/Inactive tenant). Let's see how this pans out.
-- tenantPgToApp :: TenantPGRead -> Tenant
-- tenantPgToApp pgTenant =
createTenant :: NewTenant -> AppM (Either TenantCreationError Tenant)
createTenant newTenant = do
conn <- askDbConnection
liftIO $ catchViolation catcher (createTenant_ conn)
where
createTenant_ conn = (Right . head) <$> runInsertManyReturning conn tenantTable [newTenantToPg newTenant] id
-- TODO: Figure out how to write this in a pattern-matching style
catcher _ (UniqueViolation s) | s == toS "idx_unique_tenants_backoffice_domain" = return $ Left $ DuplicateBackofficeDomainError $ newTenant ^. backofficeDomain
catcher sqlError _ = throwM sqlError
-- activateTenant :: TenantId -> AppM (Tenant)
-- activateTenant tenantId@(TenantId tid) = do
-- conn <- askDbConnection
-- liftIO $ do
-- _ <- runUpdate conn tenantTable
-- (\tenant -> tenant & status .~ TenantActive)
-- (\tenant -> (tenant ^. key .== pgInt8 tid))
-- head <$> runQuery conn (tenantById tenantId)
--
-- main
--
testHarness = do
conn <- PGS.connect PGS.defaultConnectInfo{
PGS.connectUser = "servant_opaleye"
,PGS.connectPassword = "123"
,PGS.connectDatabase = "servant_opaleye"
}
let newTenant = Tenant{
tenantKey = ()
,tenantCreatedAt = ()
,tenantUpdatedAt = ()
,tenantStatus = ()
,tenantOwnerId = Nothing
,tenantName = toS "Vacation Lab4"
,tenantBackofficeDomain = toS "http://app.vacatinlabs.com/vl4"
}
runReaderT (createTenant newTenant) AppConfig{appConfigDbPool=conn}
-- where
-- action = do
-- conn <- askDbConnection
-- liftIO $ (runQuery conn (tenantById $ TenantId 3) :: IO [Tenant])
|
meditans/haskell-webapps
|
ServantOpaleye/Domain/Tenant.hs
|
mit
| 2,439 | 0 | 12 | 455 | 412 | 240 | 172 | 38 | 2 |
module Shift.Meta
( Meta (..)
, vector
, meta
)
where
-- -- $Id$
import Util.Size
import ToDoc
import Shift.Type
import Shift.Verify
import Shift.Computer (find, ffind, zustands_folge, next0)
import Reporter
vector :: Meta -> Int -> Pins
vector me k = do
(x, d) <- zip (start me) (diff me)
return $ x + k * d
meta :: Int -> Meta -> Reporter Int
meta limit me = do
inform $ text "Sie haben eingesandt:" <+> toDoc me
newline
let shs = do
(k, (q,p)) <- zip [0..] $ qps me
return $ Shift { pins = vector me k
, vorperiode = q
, periode = p
}
inform $ text "Ich prüfe die Shift-Instanzen"
inform $ toDoc shs
sequence_ $ map (silent . verify limit) shs
inform $ text "OK"
newline
let ps = map periode shs
inform $ text "Ich untersuche das Wachstum der Periodenlängen."
delta 0 ps
delta :: Int -> [Int] -> Reporter Int
delta deg ps = do
inform $ fsep [ text "Betrachte die", toDoc deg <> text "-te"
, text "Differenzenfolge", toDoc ps
]
when ( length ps < deg )
$ reject $ text "Diese Folge ist zu kurz,"
<+> text "ich kann das Wachstum nicht bestimmen."
if ( all (== 0) ps )
then do
inform $ text "Diese Folge ist konstant 0,"
inform $ text "deswegen ist die Original-Folge wahrscheinlich"
inform $ text "ein Polynom vom Grad" <+> toDoc (deg-1)
return (deg-1)
else do
when ( any ( < 0 ) ps )
$ reject $ text "Einige Elemente sind < 0"
delta (deg + 1) $ zipWith (-) (tail ps) ps
|
Erdwolf/autotool-bonn
|
src/Shift/Meta.hs
|
gpl-2.0
| 1,562 | 14 | 19 | 462 | 555 | 283 | 272 | 48 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE CPP #-}
module Utils
( globalAdmin
, globalHost
, globalHttpPort
, shell
, withGlobalConn
, ShellFailure(..)
) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative
#endif
import Control.Exception
import Control.Monad
import Data.Typeable
import Data.Yaml.TH (decodeFile)
import Internal
import System.Exit
import System.IO.Unsafe (unsafePerformIO)
import System.Timeout
import qualified Network.Riak as Riak
import qualified Network.Riak.Basic as B
import Network.Riak.Connection.Pool (Pool, create, withConnection)
import qualified System.Process as Process
config :: Config
config = $$(decodeFile "tests/test.yaml")
-- | The global riak-admin string, configured in test.yaml.
globalAdmin :: String
globalAdmin = configAdmin config
globalHost :: String
globalHost = configHost config
globalHttpPort :: Int
globalHttpPort = configHttpPort config
-- | Run action in some Riak connection
withGlobalConn :: (B.Connection -> IO a) -> IO a
withGlobalConn = withConnection pool
-- | The global riak pool that all tests share.
pool :: Pool
pool = unsafePerformIO (create client 1 1 1)
where
client = Riak.defaultClient
{ Riak.host = globalHost
, Riak.port = show (configProtoPort config)
}
{-# NOINLINE pool #-}
data ShellFailure
= ShellFailure Int String
| ShellTimeout String
deriving (Show, Typeable)
instance Exception ShellFailure
-- | Run a shell command (inheriting stdin, stdout, and stderr), and throw an
-- exception if it fails. Time out after 30 seconds.
shell :: String -> IO ()
shell s =
timeout (30*1000*1000) act >>= \case
Nothing -> throw (ShellTimeout s)
_ -> pure ()
where
act :: IO ()
act =
bracketOnError
(do
(_, _, _, h) <- Process.createProcess (Process.shell s)
pure h)
Process.terminateProcess
(Process.waitForProcess >=> \case
ExitSuccess -> pure ()
ExitFailure n -> throw (ShellFailure n s))
|
k-bx/riak-haskell-client
|
tests/Utils.hs
|
apache-2.0
| 2,086 | 0 | 15 | 425 | 500 | 281 | 219 | -1 | -1 |
-- | Internal types to the library.
module Stack.Types.Internal where
import Control.Concurrent.MVar
import Control.Monad.Logger (LogLevel)
import Data.Text (Text)
import Network.HTTP.Client.Conduit (Manager,HasHttpManager(..))
import Stack.Types.Config
-- | Monadic environment.
data Env config =
Env {envConfig :: !config
,envLogLevel :: !LogLevel
,envTerminal :: !Bool
,envReExec :: !Bool
,envManager :: !Manager
,envSticky :: !Sticky}
instance HasStackRoot config => HasStackRoot (Env config) where
getStackRoot = getStackRoot . envConfig
instance HasPlatform config => HasPlatform (Env config) where
getPlatform = getPlatform . envConfig
instance HasConfig config => HasConfig (Env config) where
getConfig = getConfig . envConfig
instance HasBuildConfig config => HasBuildConfig (Env config) where
getBuildConfig = getBuildConfig . envConfig
instance HasEnvConfig config => HasEnvConfig (Env config) where
getEnvConfig = getEnvConfig . envConfig
instance HasHttpManager (Env config) where
getHttpManager = envManager
class HasLogLevel r where
getLogLevel :: r -> LogLevel
instance HasLogLevel (Env config) where
getLogLevel = envLogLevel
instance HasLogLevel LogLevel where
getLogLevel = id
class HasTerminal r where
getTerminal :: r -> Bool
instance HasTerminal (Env config) where
getTerminal = envTerminal
class HasReExec r where
getReExec :: r -> Bool
instance HasReExec (Env config) where
getReExec = envReExec
newtype Sticky = Sticky
{ unSticky :: Maybe (MVar (Maybe Text))
}
class HasSticky r where
getSticky :: r -> Sticky
instance HasSticky (Env config) where
getSticky = envSticky
|
duplode/stack
|
src/Stack/Types/Internal.hs
|
bsd-3-clause
| 1,692 | 0 | 11 | 307 | 471 | 255 | 216 | 57 | 0 |
#!/usr/bin/env runhaskell
import Data.List
import System
main = do
args <- getArgs
case args of
["gather", rtsStat, combinatorrentStat, timeStat] ->
gatherStats rtsStat combinatorrentStat timeStat
["present", database] -> presentStats database
gatherStats rtsStat combinatorrentStat timeStat = do
tStat <- readTimes timeStat
cStat <- readCombinatorrentStat combinatorrentStat
rStat <- readRtsStat rtsStat
putStrLn $ show (tStat ++ cStat ++ rStat)
readRtsStat :: FilePath -> IO [(String, String)]
readRtsStat fp = do
cts <- readFile fp
return $ read . unlines . tail . lines $ cts
readCombinatorrentStat :: FilePath -> IO [(String, String)]
readCombinatorrentStat fp = do
cts <- readFile fp
let d = read cts :: [(String, String)]
return $ map (\(k, v) -> (k, show v)) d
readTimes :: FilePath -> IO [(String, String)]
readTimes timeStat = do
contents <- readFile timeStat
let [s, e] = (map read . lines $ contents) :: [Integer]
return [("start_time", show s)
,("end_time" , show e)]
presentStats db = do
cts <- readFile db
let ls = map read . lines $ cts
putStrLn "#Start\tEnd\tMaxBytesUsed\tPeakMegabytesAlloc\tMutCPU\tGCCPU\tUploaded\tDownloaded"
let formatted = map (format ["start_time", "end_time", "max_bytes_used",
"peak_megabytes_allocated",
"mutator_cpu_seconds",
"GC_cpu_seconds",
"uploaded", "downloaded"]) ls
mapM_ putStrLn formatted
format :: [String] -> [(String, String)] -> String
format cols row = concat $ intersperse "\t" entries
where entries = map (\c -> case find ((==c) . fst) row of
Nothing -> error "Column doesn't exist"
Just x -> snd x) cols
|
beni55/combinatorrent
|
tools/postproc.hs
|
bsd-2-clause
| 1,903 | 1 | 13 | 563 | 606 | 306 | 300 | 43 | 2 |
module Text.Parsec.String.Expr (buildExpressionParser
,Operator(..)
,OperatorTable
,E.Assoc(..)
)where
{-
Wrappers for the Text.Parsec.Expr module with simplified types.
-}
import Text.Parsec.String (Parser)
import qualified Text.Parsec.Expr as E
-- not sure if this is neccessary, or a type alias would be good
-- enough
data Operator a = Infix (Parser (a -> a -> a)) E.Assoc
| Prefix (Parser (a -> a))
| Postfix (Parser (a -> a))
type OperatorTable a = [[Operator a]]
buildExpressionParser :: OperatorTable a -> Parser a -> Parser a
buildExpressionParser t = E.buildExpressionParser (map (map f) t)
where
f (Infix p a) = E.Infix p a
f (Prefix p) = E.Prefix p
f (Postfix p) = E.Postfix p
|
EliuX/AdHocParser
|
src/Text/Parsec/String/Expr.hs
|
bsd-3-clause
| 867 | 0 | 11 | 288 | 250 | 139 | 111 | 15 | 3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.