code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE FlexibleInstances,
FlexibleContexts #-}
-- -----------------------------------------------------------------------------
module Obsidian.MonadObsidian.GenCuda where
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.List
import Control.Monad.State
import Obsidian.MonadObsidian.Types
--import MyWriter
import Obsidian.MonadObsidian.GPUMonad
import Obsidian.MonadObsidian.Exp
import Obsidian.MonadObsidian.IC
import Obsidian.MonadObsidian.MM
import Obsidian.MonadObsidian.Tools
import Obsidian.MonadObsidian.Printing
import Obsidian.MonadObsidian.Arr
import System.Directory
import System.FilePath
import System.Process
-- -----------------------------------------------------------------------------
type ICLive = [(Statement,Set.Set Name)]
-- -----------------------------------------------------------------------------
-- TODO: rediscover what I am doing here
liveness :: IC -> Set.Set Name -> ICLive
liveness ic names = reverse $ analyze (reverse ic) names
where
analyze [] _ = []
analyze (x@((nom,_) ::= d2):xs) ns =
let ns' = Set.delete nom $ (ns `Set.union` getArrayNames d2 )
in (x, (Set.insert nom ns')) : analyze xs ns'
analyze (x:xs) ns = (x,ns) : analyze xs ns
getArrayNames :: DExp -> Set.Set Name
-- NOTE: Only add arrays to the set
getArrayNames (Index x []) = Set.empty
getArrayNames (Index x ixs) = Set.singleton x
getArrayNames (Op2 _ a b) = (getArrayNames a `Set.union` getArrayNames b)
getArrayNames (Op1 _ a) = getArrayNames a
getArrayNames (If a b c) = (getArrayNames a `Set.union`
getArrayNames b `Set.union`
getArrayNames c)
getArrayNames _ = Set.empty
-- -----------------------------------------------------------------------------
type ICT = [(Statement,Int)]
type MemoryMap = Map.Map Name (Address,Type)
sharedMem = newMem
globalMem = newMem
threads :: Name -> SymbolTable -> Int
threads name table = case Map.lookup name table of
Nothing -> error "error: threads"
Just a -> snd a
bytes :: Name -> SymbolTable -> Int
bytes name table = case Map.lookup name table of
Nothing -> error "error: bytes"
Just a -> (snd a) * sizeof (elemType (fst a))
typeof :: Name -> Map.Map Name (Type,a) -> Type
typeof name table = case Map.lookup name table of
Nothing -> error ("error: typeof " ++ name)
Just a -> fst a
sizeof :: Type -> Int
sizeof Int = 4
sizeof Float = 4
sizeof Bool = 4
sizeof _ = error "sizeof: only element types"
name :: DExp -> Name
name (Index x _) = x
ptrStr :: Type -> String
ptrStr t = typeStr t ++ " *"
-- c type string
typeStr :: Type -> String
typeStr Float = "float"
typeStr Int = "int"
typeStr Bool = "int"
typeStr _ = error "typeStr: only element types"
-- printf specifier string
pfSpecStr Float = "%f"
pfSpecStr Int = "%d"
pfSpecStr Bool = "%d"
pfSpecStr _ = error "pfSpecPtr: only element types"
-- -----------------------------------------------------------------------------
data MState = MState {shared :: Memory,
global :: Memory,
ct :: SymbolTable,
mm :: MemoryMap}
type GenState a = State MState a
-- -----------------------------------------------------------------------------
-- new version of genMemoryMap
genMemoryMap :: ICLive -> Set.Set Name -> GenState (ICT,MemoryMap)
genMemoryMap ic set = genMemoryMap' ic set []
genMemoryMap' :: ICLive -> Set.Set Name -> ICT -> GenState (ICT,MemoryMap)
genMemoryMap' [] _ ict =
do
mstate <- get;
return (reverse ict,mm mstate) --REVERSE IT
genMemoryMap' ((a@((nom,_) ::= d2),ns):xs) prev_names ict =
do
freeDead prev_names ns -- updates memorymap
allocFor nom -- updates memorymap
mstate <- get
genMemoryMap' xs ns ((a,threads nom (ct mstate)):ict)
genMemoryMap' ((x,ns):xs) prev_names ict =
do
freeDead prev_names ns
genMemoryMap' xs ns ((x,0):ict)
-- -----------------------------------------------------------------------------
freeDead :: Set.Set Name -> Set.Set Name -> GenState ()
freeDead prev curr =
do
mstate <- get
let (smem,gmem,nt,table) = (shared mstate,
global mstate,
mm mstate,
ct mstate)
(smem',gmem') =
freeAll smem gmem table nt
(Set.toList (prev `Set.difference` curr))
put (mstate {shared = smem', global = gmem'})
return ()
where
freeAll smem gmem _ _ [] = (smem,gmem)
freeAll smem gmem ct nt (x:xs) =
if ("source" `isPrefixOf` x)
then freeAll smem gmem ct nt xs
else freeAll smem' gmem' ct nt xs
where
address =
case Map.lookup x nt of
Nothing -> error $ "error: freeDead, "++
show x ++
"not found in nt"
Just a -> fst a
(smem',gmem') =
case typeof x ct of
Global_Array t -> (smem,free gmem address)
Shared_Array t -> (free smem address,gmem)
Constant_Array t -> error "Impossible to free constant arrays"
-- -----------------------------------------------------------------------------
allocFor :: Name -> GenState ()
allocFor n1 =
do
mstate <- get
let nt = mm mstate
table = ct mstate
(smem,gmem) = (shared mstate,global mstate)
--(smem',gmem') = freeDead' smem gmem table nt prev_names ns
(smem',gmem',addr) =
case typeof n1 table of
Global_Array _ ->
let (m,a) = allocate gmem (bytes n1 table)
in (smem,m,a)
Shared_Array _ ->
let (m,a) = allocate smem (bytes n1 table)
in (m,gmem,a)
Constant_Array _ ->
error "Impossible to allocate constant arrays"
nt' = Map.insert n1 (addr,typeof n1 table) nt
put (mstate {shared = smem',global = gmem',mm = nt'})
-- -----------------------------------------------------------------------------
mm_IC :: ICT -> MemoryMap -> ICT
mm_IC [] _ = []
mm_IC (((nom,d1) ::= d2,i):stms) mm =
((rename nom mm,d1) ::= (renameAll d2 mm),i):(mm_IC stms mm)
--mm_IC ((Cond (E d1) j,i):stms) mm =
-- ((Cond (E (renameAll d1 mm)) j),i):(mm_IC stms mm)
mm_IC ((x,i):stms) mm = (x,i):(mm_IC stms mm)
-- -----------------------------------------------------------------------------
rename :: Name -> MemoryMap -> Name
rename n mm = if (not ("source" `isPrefixOf` n))
then n'
else n
where
(j,at) = case Map.lookup n mm of
Nothing -> error $ "error: "++ n ++" not found in MemoryMap"
Just a -> a
n' = case at of
Global_Array t ->
"(("++ptrStr t++")(gbase+"++ show j++"))"
Shared_Array t ->
"(("++ptrStr t++")(sbase+"++ show j++"))"
Constant_Array t ->
"(("++ptrStr t++")(cbase+"++ show j++"))"
renameAll a@(Index n []) mm = a
renameAll (Index n is) mm =
Index (rename n mm) (map (\x -> renameAll x mm) is)
renameAll (Op2 op d1 d2) mm = Op2 op (renameAll d1 mm) (renameAll d2 mm)
renameAll (Op1 op d1) mm = Op1 op (renameAll d1 mm)
renameAll (If d1 d2 d3) mm = If (renameAll d1 mm)
(renameAll d2 mm)
(renameAll d3 mm)
renameAll x _ = x
threadsIC :: ICT -> Int -> IC
threadsIC [] j = []
threadsIC ((d1 ::= d2,i):stms) j =
case (compare i j) of
LT -> (
IfThen(E (Op2 Lt (Index "tid" []) (LitInt i)))
[(d1 ::= d2)]
):(threadsIC stms j)
EQ -> (d1 ::= d2):(threadsIC stms j)
GT -> error "threadsIC: Impossible"
threadsIC ((x,i):stms) j = x:(threadsIC stms j)
-- -----------------------------------------------------------------------------
kernelHead :: Name -> Env -> SymbolTable -> String
kernelHead kname env ct = header
where header = "__global__ static void " ++ kname ++ "(" ++
inputs ++"char *gbase){\n" ++
sbase ++ tid ++ lengths
tid = "const int tid = threadIdx.x;\n"
sbase = "extern __shared__ char sbase[] __attribute__ ((aligned(4)));\n"
ls (x,y)= "const int " ++ x ++
" __attribute__ ((unused)) = " ++ show y ++ ";\n"
lengths = unwords $ map ls env
inputs = unwords (map (\x -> (ptrStr ((elemType . typeof x) ct)) ++ x++",") sources)
sources = filter (\x -> "source" `isPrefixOf` x) (Map.keys ct)
genCudaKernel :: Name -> Env -> IC -> GenState String
genCudaKernel name env ic = do
mstate <- get
return $ (kernelHead name env (ct mstate)) ++ (printICKernel ic) ++ "\n}\n"
-- -----------------------------------------------------------------------------
-- test leading towards an "execute function"
test :: (Input (Exp a), Sources' (GArr (Exp a))) =>
(GArr (Exp a) -> GPU (GArr (Exp b))) -> [Exp a] -> String
test inp inputs =
let
-- generate C style array
len = length inputs
cInputs = strListToCArrayStr $ map render inputs
-- names of the result arrays, used to read back result
-- the results are also alive when leaving the program
nms = names $ getResult inp len
res = Set.fromList nms
-- perform liveness analysis.
icl = liveness (getIC inp len) res
-- evaluate the symbolic table
env = [("n0",len)]
st = symtab $ getState inp len
concreteT = st -- evalSymTab env st
((ict,memMap),mstate) =
runState (genMemoryMap icl Set.empty)
(MState sharedMem globalMem concreteT Map.empty)
nthreads = maximum [snd x | x <- ict] -- number of threads needed
ic = mm_IC ict memMap
--ic' = (map fst ic) -- threadsIC ic nthreads
ic' = threadsIC ic nthreads
(kernel,mstate') = runState (genCudaKernel "generated" env ic') mstate
inputType = elemType $ typeof "source0" concreteT
outputType = elemType $ typeof (head nms) st
olen = threads (head nms) concreteT
sharedMemoryNeeded = size (shared mstate')
globalMemoryNeeded = size (global mstate')
result_pos = case Map.lookup (head nms) memMap of
Nothing -> error "error"
Just a -> "(gbase + " ++ show (fst a) ++ ")"
--generate code that launches kernel
in "/*\n" ++ "number of threads:" ++ show nthreads ++ "\n" ++ cInputs ++ "\n" ++
"*/\n\n\n" ++ includes ++ kernel ++
cmain (
" "++ typeStr inputType ++ " values[" ++ show len ++ "] = " ++ cInputs ++ ";\n" ++
" "++ typeStr outputType ++" result[" ++ show olen ++ "];\n" ++
" char *gbase;\n" ++
" " ++ typeStr inputType ++" * dvalues;\n" ++
" cudaMalloc((void**)&dvalues, sizeof("++typeStr inputType++") * "++ show len ++" ); \n" ++
" cudaMemcpy(dvalues, values, sizeof("++typeStr inputType++") * "++show len ++", cudaMemcpyHostToDevice);\n" ++
" cudaMalloc((void**)&gbase," ++ show globalMemoryNeeded ++ "); \n" ++
" " ++ runKernel "generated" nthreads sharedMemoryNeeded ++
" cudaMemcpy(result," ++ result_pos ++", sizeof("++typeStr outputType++") * "++ show olen ++" , cudaMemcpyDeviceToHost);\n" ++
" cudaFree(dvalues);\n" ++
" cudaFree(gbase);\n" ++
" for(int i = 0; i < " ++ show olen ++ "; i++){\n" ++
" printf(\"" ++ pfSpecStr outputType ++ " \",result[i]);\n" ++
" }\n"
)
-- -----------------------------------------------------------------------------
includes = "#include <stdio.h>\n#include <stdlib.h>\n"
cmain str = "int main(int argc, char **argv){\n" ++ str ++ "\n}\n"
-- -----------------------------------------------------------------------------
runKernel :: Name -> Int -> Int -> String
runKernel name threads sm = name ++ "<<<1, " ++ show threads ++
"," ++ show sm ++ ">>>(dvalues,gbase);\n"
-- -----------------------------------------------------------------------------
class Input a where
render :: a -> String
instance Input (Exp Int) where
render (E (LitInt a)) = show a
instance Input (Exp Float) where
render (E (LitFloat a)) = show a
render (E(Op2 Sub (LitFloat 0.0) (LitFloat a))) = "-"++show a
class Output a where
scan :: String -> a
instance Output (Exp Int) where
scan str = (E (LitInt (read str :: Int)))
instance Output (Exp Float) where
scan str = (E (LitFloat (read str :: Float)))
strListToCArrayStr :: [String] -> String
strListToCArrayStr xs = '{':((concat $ Data.List.intersperse "," xs) ++ "}")
listToCArrayStr :: Show a => [a] -> String
listToCArrayStr xs = '{':((concat $ Data.List.intersperse "," (map show xs)) ++ "}")
-- -----------------------------------------------------------------------------
data ExecMode = EMU | GPU
execute :: (Input (Exp a) , Output (Exp b),Sources' (GArr (Exp a))) =>
(GArr (Exp a) -> GPU (GArr (Exp b))) ->
[Exp a] -> IO [Exp b]
execute = execute' GPU
execute' :: (Input (Exp a) , Output (Exp b),Sources' (GArr (Exp a))) =>
ExecMode ->
(GArr (Exp a) -> GPU (GArr (Exp b))) ->
[Exp a] -> IO [Exp b]
execute' _ _ [] = return [] -- ULGY FIX
execute' mode program list = exec
where
exec = do
tmp_dir <- getTemporaryDirectory
let fullpath = tmp_dir ++ pathSeparator : "GPU-HASKELL"
createDirectoryIfMissing False fullpath
-- genCudaProjectL n (div n threads) fullpath name program list
writeFile (fullpath ++ (pathSeparator : name) ++ ".cu")
(test program list)
writeFile (fullpath ++ (pathSeparator : "Makefile"))
(genMakefile name)
working_dir <- getCurrentDirectory
setCurrentDirectory fullpath
pid1 <- case mode of
GPU -> runCommand "make -s 2> messages.txt"
EMU -> runCommand "make emulation -s 2> messages.txt"
waitForProcess pid1 -- make sure the executable is generated
pid2 <- runCommand (fullpath ++ pathSeparator:name ++ " > output.txt")
waitForProcess pid2 -- make sure output is generated
result <- readOutput (fullpath ++ pathSeparator:"output.txt" )
setCurrentDirectory working_dir
--removeDirectoryRecursive fullpath
return result
n = length list
name = "generated"
readOutput file =
do
string <- readFile file
let strings = words string
return (map scan strings)
-- -----------------------------------------------------------------------------
--"CUDA_INSTALL_PATH := /usr/local/cuda\n" ++
--"CUDA_SDK_PATH := /home/ian/NVIDIA_CUDA_SDK\n" ++
-- -----------------------------------------------------------------------------
-- genMakefile now uses env variables CUDA_INSTALL_PATH and
-- CUDA_SDK_PATH(not used at the moment, maybe remove)
genMakefile :: Name -> String
genMakefile name =
"TARGET := " ++ name ++ "\nCOMMON_PATH := $(CUDA_SDK_PATH)/common\n\n\
\LIBPATHS := -L$(CUDA_INSTALL_PATH)/lib -L$(COMMON_PATH)/lib -L$(CUDA_\
\SDK_PATH)/lib\nINCPATHS := -I$(CUDA_INSTALL_PATH)/include -I$(COMMON_\
\PATH)/inc\nLIBRARIES := -lcuda -lcudart\n\nLIBRARIESEMU :\
\=-lcudart\n\nNVCC := nvcc \n\nall: $(TARGET)\n$(TARGET): \
\$(TARGET).cu\n\t$(NVCC) -o $(TARGET) $(TARGET).cu $(INCPATHS) $(LIBPA\
\THS) $(LIBRARIES)\n\nemulation: $(TARGET).cu\n\t$(NVCC) -deviceemu -o\
\ $(TARGET) $(TARGET).cu $(INCPATHS) $(LIBPATHS) $(LIBRARIESEMU)\n\ncl\
\ean:\n\trm $(TARGET)\n"
{-
genMakefile :: Name -> String
genMakefile name =
"TARGET := " ++ name ++ "\nCOMMON_PATH := $(CUDA_SDK_PATH)/common\n\n\
\LIBPATHS := -L$(CUDA_INSTALL_PATH)/lib -L$(COMMON_PATH)/lib -L$(CUDA_\
\SDK_PATH)/lib\nINCPATHS := -I$(CUDA_INSTALL_PATH)/include -I$(COMMON_\
\PATH)/inc\nLIBRARIES := -lcuda -lGL -lGLU\n\nLIBRARIESEMU :\
\=-lGL -lGLU\n\nNVCC := nvcc \n\nall: $(TARGET)\n$(TARGET): \
\$(TARGET).cu\n\t$(NVCC) -o $(TARGET) $(TARGET).cu $(INCPATHS) $(LIBPA\
\THS) $(LIBRARIES)\n\nemulation: $(TARGET).cu\n\t$(NVCC) -deviceemu -o\
\ $(TARGET) $(TARGET).cu $(INCPATHS) $(LIBPATHS) $(LIBRARIESEMU)\n\ncl\
\ean:\n\trm $(TARGET)\n"
-}
-- -----------------------------------------------------------------------------
-- EXPERIMENT
-- -----------------------------------------------------------------------------
-- EXPERIMENT
-- -----------------------------------------------------------------------------
executep :: (Input (Exp a) ,
Input (Exp b) ,
Output (Exp c),
Output (Exp d),
Sources' (GArr (Exp a)),
Sources' (GArr (Exp b))) =>
ExecMode ->
(GArr (Exp a,Exp b) -> GPU (GArr (Exp c,Exp d))) ->
[(Exp a,Exp b)] -> IO [(Exp c,Exp d)]
executep _ _ [] = return [] -- ULGY FIX
executep mode program list = exec
where
exec = do
tmp_dir <- getTemporaryDirectory
let fullpath = tmp_dir ++ pathSeparator : "GPU-HASKELL"
createDirectoryIfMissing False fullpath
-- genCudaProjectL n (div n threads) fullpath name program list
writeFile (fullpath ++ (pathSeparator : name) ++ ".cu")
(test2 program list)
writeFile (fullpath ++ (pathSeparator : "Makefile"))
(genMakefile name)
working_dir <- getCurrentDirectory
setCurrentDirectory fullpath
pid1 <- case mode of
GPU -> runCommand "make -s"
EMU -> runCommand "make emulation -s"
waitForProcess pid1 -- make sure the executable is generated
pid2 <- runCommand (fullpath ++ pathSeparator:name ++ " > output.txt")
waitForProcess pid2 -- make sure output is generated
result <- readOutput (fullpath ++ pathSeparator:"output.txt" )
setCurrentDirectory working_dir
--removeDirectoryRecursive fullpath
return result
n = length list
name = "generated"
--readOutput :: Name -> IO [(Exp b,Exp c)]
readOutput file =
do
string <- readFile file
let strings = words string
[s1,s2] = splits "SEPARATOR" strings
eb = map scan (s1)
ec = map scan (s2)
return (zip eb ec)
splits sep xs = case eat sep xs [] of
(s,[]) -> [s]
(s,ss) -> s : splits sep ss
where
eat sep [] acc = (acc,[])
eat sep (x:xs) acc | x == sep = (acc,xs)
| otherwise = eat sep xs (acc ++ [x])
-- -----------------------------------------------------------------------------
test2 :: (Input (Exp a),
Input (Exp b),
Sources' (GArr (Exp a)),
Sources' (GArr (Exp b))) =>
(GArr (Exp a,Exp b) -> GPU (GArr (Exp c,Exp d))) -> [(Exp a,Exp b)] -> String
test2 inp inputs =
let
-- generate C style array
len = length inputs
(i1,i2) = unzip inputs
cInputs1 = strListToCArrayStr $ map render i1
cInputs2 = strListToCArrayStr $ map render i2
-- names of the result arrays, used to read back result
-- the results are also alive when leaving the program
--nms = names $ getResult inp
[nm1,nm2] = names $ getResult inp len
res = Set.fromList [nm1,nm2]
-- perform liveness analysis.
icl = liveness (getIC inp len) res
-- evaluate the symbolic table
env = [("n0",len),("n1",len)]
st = symtab $ getState inp len
concreteT = st -- evalSymTab env st
((ict,memMap),mstate) =
runState (genMemoryMap icl Set.empty)
(MState sharedMem globalMem concreteT Map.empty)
nthreads = maximum [snd x | x <- ict] -- number of threads needed
ic = mm_IC ict memMap
--ic' = (map fst ic) -- threadsIC ic nthreads
ic' = threadsIC ic nthreads
(kernel,mstate') = runState (genCudaKernel "generated" env ic') mstate
inputType1 = elemType $ typeof "source0" concreteT
inputType2 = elemType $ typeof "source1" concreteT
outputType1 = elemType $ typeof nm1 st
outputType2 = elemType $ typeof nm2 st
olen = threads nm1 concreteT
sharedMemoryNeeded = size (shared mstate')
globalMemoryNeeded = size (global mstate')
result_pos1 = case Map.lookup nm1 memMap of
Nothing -> error "error"
Just a -> "(gbase + " ++ show (fst a) ++ ")"
result_pos2 = case Map.lookup nm2 memMap of
Nothing -> error "error"
Just a -> "(gbase + " ++ show (fst a) ++ ")"
--generate code that launches kernel
in "/*\n" ++ "number of threads:" ++ show nthreads ++ "\n" ++
cInputs1 ++ "\n" ++
cInputs2 ++ "\n" ++
"*/\n\n\n" ++ includes ++ kernel ++
cmain (
" "++ typeStr inputType1 ++ " values1[" ++ show len ++ "] = " ++ cInputs1 ++ ";\n" ++
" "++ typeStr inputType2 ++ " values2[" ++ show len ++ "] = " ++ cInputs2 ++ ";\n" ++
" "++ typeStr outputType1 ++ " result1[" ++ show olen ++ "];\n" ++
" "++ typeStr outputType2 ++ " result2[" ++ show olen ++ "];\n" ++
" char *gbase;\n" ++
" " ++typeStr inputType1 ++" * dvalues1;\n" ++
" " ++typeStr inputType2 ++" * dvalues2;\n" ++
" cudaMalloc((void**)&dvalues1, sizeof("++typeStr inputType1++") * "++ show len ++" ); \n" ++
" cudaMemcpy(dvalues1, values1, sizeof("++typeStr inputType1++") * "++ show len ++", cudaMemcpyHostToDevice);\n" ++
" cudaMalloc((void**)&dvalues2, sizeof("++typeStr inputType2++") * "++ show len ++" ); \n" ++
" cudaMemcpy(dvalues2, values2, sizeof("++typeStr inputType2++") * "++ show len ++", cudaMemcpyHostToDevice);\n" ++
" cudaMalloc((void**)&gbase," ++ show globalMemoryNeeded ++ "); \n" ++
" " ++ runKernelp "generated" nthreads sharedMemoryNeeded ++
" cudaMemcpy(result1," ++ result_pos1 ++", sizeof("++typeStr outputType1++") * "++ show olen ++" , cudaMemcpyDeviceToHost);\n" ++
" cudaMemcpy(result2," ++ result_pos2 ++", sizeof("++typeStr outputType2++") * "++ show olen ++" , cudaMemcpyDeviceToHost);\n" ++
" cudaFree(dvalues1);\n" ++
" cudaFree(dvalues2);\n" ++
" cudaFree(gbase);\n" ++
" for(int i = 0; i < " ++ show olen ++ "; i++){\n" ++
" printf(\"" ++ pfSpecStr outputType1 ++ " \",result1[i]);\n" ++
" }\n" ++
" printf(\"\\nSEPARATOR\\n\");\n" ++
" for(int i = 0; i < " ++ show olen ++ "; i++){\n" ++
" printf(\"" ++ pfSpecStr outputType1 ++ " \",result2[i]);\n" ++
" }\n"
)
-- -----------------------------------------------------------------------------
runKernelp :: Name -> Int -> Int -> String
runKernelp name threads sm = name ++ "<<<1, " ++ show threads ++
"," ++ show sm ++ ">>>(dvalues1,dvalues2,gbase);\n"
-- -----------------------------------------------------------------------------
| svenssonjoel/MonadObsidian | Obsidian/MonadObsidian/GenCuda.hs | bsd-3-clause | 24,626 | 0 | 97 | 7,613 | 6,479 | 3,298 | 3,181 | 416 | 6 |
{-|
Module : Data.Weave.Tagged
Copyright : (c) 2013 Craig Roche
License : BSD-style
Maintainer : [email protected]
Stability : stable
Portability : portable
-}
module Data.Weave.Tagged where
import Control.Arrow ( (***) )
import Control.Applicative
import Data.Traversable
import Data.Foldable ( Foldable, foldMap )
import Data.Monoid
import Data.Bifunctor
import Data.Foldable ( toList )
import Data.Weave
newtype Tagged t a = Tagged { runTagged :: Weave a (t, a) }
deriving Show
instance Functor (Tagged t) where
fmap = fmapDefault
instance Bifunctor Tagged where
bimap f g = Tagged . bimap g (f *** g) . runTagged
instance Monoid (Tagged t a) where
mempty = Tagged mempty
Tagged t `mappend` Tagged u = Tagged (t `mappend` u)
instance Traversable (Tagged t) where
traverse f = fmap fromAssocList . (traverse.traversePair) f . toAssocList
where
traversePair f (b, a) = (,) b <$> f a
instance Foldable (Tagged t) where
foldMap = foldMapDefault
-- * Constructors
-- | A value without a tag.
untag :: a -> Tagged t a
untag = Tagged . weave
-- | A value annotated with a tag.
tag :: t -> a -> Tagged t a
tag t a = Tagged $ pure (t, a)
-- | A list of untagged values
list :: [a] -> Tagged t a
list = foldMap untag
-- | A list of tag-value pairs
pairs :: [(t, a)] -> Tagged t a
pairs = foldMap (uncurry tag)
-- | Create a 'Tagged' from a list of values with optional tags
fromAssocList :: [(Maybe t, a)] -> Tagged t a
fromAssocList = mconcat . map f
where
f (Nothing, a) = untag a
f (Just t, a) = tag t a
-- * Destructors
-- | Reduce a 'Tagged' to a list, using a function on tagless values and a
-- function on tagged values.
tagged :: (a -> b) -> (t -> a -> b) -> Tagged t a -> [b]
tagged f g = flatten . bimap f (uncurry g) . runTagged
taggedA :: (Applicative f)
=> (a -> f b)
-> (t -> a -> f b)
-> Tagged t a
-> f [b]
taggedA f g = sequenceA . tagged f g
-- | Reduce a 'Tagged' to a list by discarding all tags.
discardTags :: Tagged t a -> [a]
discardTags = tagged id (curry snd)
-- | Similar to 'discardTags', but discards the content instead of the tags
-- and uses the tags in their place.
joinTags :: Tagged a a -> [a]
joinTags = tagged id (curry fst)
-- | Convert a 'Tagged' into a list of tag-value pairs.
toAssocList :: Tagged t a -> [(Maybe t, a)]
toAssocList = tagged ((,) Nothing) ((,) . Just)
-- | Create an association list of tags and values, discarding the tagless
-- values.
listPairs :: Tagged t a -> [(t, a)]
listPairs = toList . runTagged
-- * Transformations
--liftWeave :: (Weave a (t, a) -> Weave b (s, b)) -> Tagged t a -> Tagged s b
--liftWeave f = Tagged . f . runTagged
-- | Map over all tags. This is a specialization of 'Data.Bifunctor.first'
mapTags :: (t -> u) -> Tagged t a -> Tagged u a
mapTags = Data.Bifunctor.first
reduceByTag :: (Applicative f) => (t -> f a) -> Tagged t a -> f [a]
reduceByTag f = sequenceA . tagged pure (const . f)
reduceByTagM :: (Monad m) => (t -> m a) -> Tagged t a -> m [a]
reduceByTagM f = Prelude.sequence . tagged return (const . f)
{-
traverseTaggeds :: (Applicative f) => (t -> f a) -> Tagged t a -> f [a]
traverseTaggeds f = transcribeA (f . fst)
traverseTaggedsM :: (Monad m) => (Name -> m a) -> Tagged a -> m a
traverseTaggedsM f = unwrapMonad . traverseTaggeds (WrapMonad . f)
-}
| cdxr/weave | Data/Weave/Tagged.hs | bsd-3-clause | 3,384 | 0 | 11 | 773 | 1,004 | 540 | 464 | 57 | 2 |
{-# LANGUAGE OverlappingInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleInstances #-}
module Tronkell.Data.Parse where
import Tronkell.Types as T
import Tronkell.Game.Types as GT
import Tronkell.Server.Types as ST
import Data.Text as Text
import qualified Data.Aeson as A
import Data.Aeson.Types as AT ((.=), (.:), typeMismatch)
instance A.ToJSON T.Coordinate where
toJSON (x,y) = A.object [ "x" .= x, "y" .= y]
instance A.ToJSON T.Orientation where
toJSON = A.String . Text.pack . show
instance A.ToJSON GT.GameConfig where
toJSON (GameConfig w h playerSpeed ticksPerSec) =
A.object [ "width" .= w
, "height" .= h
, "player-speed" .= playerSpeed
, "ticks-per-second" .= ticksPerSec ]
instance A.ToJSON PlayerNick where
toJSON (PlayerNick nick) = A.String nick
instance A.ToJSON PlayerId where
toJSON = A.Number . fromInteger . toInteger . getPlayerId
instance A.ToJSON PlayerStatus where
toJSON = A.String . Text.pack . show
instance A.ToJSON Player where
toJSON (Player pid nick status coord orient trail) =
A.object [ "id" .= pid
, "nick" .= nick
, "status" .= status
, "coordinate" .= coord
, "orientation" .= orient
, "trail" .= trail ]
instance A.ToJSON UserID where
toJSON = A.Number . fromInteger . toInteger . getUserID
instance A.ToJSON OutMessage where
toJSON msg =
case msg of
ST.GameReady config players ->
A.object [ "type" .= A.String "GameReady"
, "config" .= config
, "players" .= players
]
ST.PlayerMoved uId coord orien ->
A.object [ "type" .= A.String "PlayerMoved"
, "id" .= uId
, "coordinate" .= coord
, "orientation" .= orien
]
ST.PlayerDied uId coord ->
A.object [ "type" .= A.String "PlayerDied"
, "id" .= uId
, "coordinate" .= coord
]
ST.GameEnded winnerId ->
A.object [ "type" .= A.String "GameEnded"
, "winnerId" .=
case winnerId of
Just wId -> A.toJSON wId
Nothing -> A.Null
]
ServerMsg m ->
A.object [ "type" .= A.String "ServerMsg"
, "message" .= m
]
PlayerRegisterId uId ->
A.object [ "type" .= A.String "PlayerRegisterId"
, "id" .= uId
]
type JsonInMessage = UserID -> InMessage
instance A.FromJSON JsonInMessage where
parseJSON (A.Object v) = do
objType <- v .: "type"
case objType of
"Ready" -> return PlayerReady
"Exit" -> return PlayerExit
"Left" -> return PlayerTurnLeft
"Right" -> return PlayerTurnRight
"Name" -> do
name <- v .: "name"
return (flip PlayerName name)
j -> AT.typeMismatch "InMessage" j -- should be wrong data error.
parseJSON invalid = AT.typeMismatch "InMessage" invalid
| nilenso/tronkell | src/Tronkell/Data/Parse.hs | bsd-3-clause | 3,190 | 0 | 16 | 1,116 | 845 | 441 | 404 | 78 | 0 |
{-|
Module: FacesWidget
Description: Specialization of the thumbnail widget for Peterson's face-selection task
Copyright: (c) Greg Hale, 2016
License: BSD3
Maintainer: [email protected]
Stability: experimental
Portability: GHCJS
-}
{-# language RankNTypes #-}
{-# language DeriveGeneric #-}
{-# language CPP #-}
{-# language FlexibleContexts #-}
{-# language GADTs #-}
{-# language DeriveGeneric #-}
{-# language ScopedTypeVariables #-}
{-# language OverloadedStrings #-}
module FacesWidget where
-------------------------------------------------------------------------------
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.Default
import qualified Data.Text as T
import Reflex
import Reflex.Dom
import qualified Data.Map as Mapp
import Data.Map (Map)
import GHC.Generics
import GHCJS.DOM.EventM (on)
#ifdef ghcjs_HOST_OS
import GHCJS.DOM.Element (getBoundingClientRect)
import GHCJS.DOM.ClientRect (getTop, getLeft)
#endif
import GHCJS.DOM.HTMLElement
-- import GHCJS.DOM.MouseEvent (Mousemove)
-------------------------------------------------------------------------------
import Canvas2D
import Thumbnail
data PicUrl = PicUrl T.Text
data FaceLoc = FaceLoc
{ faceCenterX :: Double
, faceCenterY :: Double
, faceWidth :: Double
, faceHeight :: Double
} deriving (Eq, Ord, Show, Generic)
-------------------------------------------------------------------------------
data FacesWidgetConfig t = FacesWidgetConfig
{ facesWidgetConfig_attributes :: Dynamic t (Map T.Text T.Text)
, facesWidgetConfig_initialFaces :: Map Int FaceLoc
, facesWidgetConfig_setFace :: Event t (Int, Maybe FaceLoc)
, facesWidgetConfig_intialPic :: PicUrl
, facesWidgetConfig_setPic :: Event t PicUrl
, facesWidgetConfig_select :: Event t Int
}
data ZoomRect = ZoomRect
{ zrCenter :: (Double, Double)
, zrWidth :: Double
}
data FacesWidget t = FacesWidget
{ facesWidget_faces :: Dynamic t (Map Int FaceLoc)
, facesWidget_canvas :: El t
, facesWidget_selection :: Dynamic t (Maybe (Int, FaceLoc))
}
instance Reflex t => Default (FacesWidgetConfig t) where
def = FacesWidgetConfig (constDyn mempty) mempty
never (PicUrl "") never never
widgetEventCoords :: MonadWidget t m => El t -> m (Event t (Maybe (Double,Double)))
widgetEventCoords el = do
let moveFunc (x,y) = do
Just cr <- getBoundingClientRect (_element_raw el)
t <- realToFrac <$> (getTop cr :: IO Float)
l <- realToFrac <$> (getLeft cr :: IO Float)
return $ Just (fromIntegral x - l, fromIntegral y - t)
performEvent $ fmap (liftIO . moveFunc) (domEvent Mousemove el)
facesWidget :: forall t m.MonadWidget t m => FacesWidgetConfig t -> m (FacesWidget t)
facesWidget (FacesWidgetConfig attrs faces0 dFaces pic0 dPic sel) =
elClass "div" "faces-widget" $ do
-- imgAttrs <- holdDyn pic0 dPic >>= mapDyn (\(PicUrl url) -> "src" =: url)
imgAttrs <- fmap ((\(PicUrl url) -> "src" =: url) <$>) $ holdDyn pic0 dPic
sourcePic <- fst <$> elDynAttr' "img" imgAttrs (return ())
sourceCoords <- widgetEventCoords sourcePic
zoomArea <- canvas $ undefined
undefined
#ifndef ghcjs_HOST_OS
getBoundingClientRect = undefined
getTop :: MonadIO m => ClientRect -> m Float
getTop = error "getTop only available in ghcjs"
getLeft :: MonadIO m => ClientRect -> m Float
getLeft = error "getLeft only available in ghcjs"
data ClientRect
#endif
| CBMM/petersonfaces | petersonfaces-frontend/src/FacesWidget.hs | bsd-3-clause | 3,580 | 0 | 16 | 744 | 837 | 455 | 382 | -1 | -1 |
--- Soundex code calculator
--- Copyright © 2008 Bart Massey
--- ALL RIGHTS RESERVED
--- This software is licensed under the "3-clause ('new')
--- BSD License". Please see the file COPYING provided with
--- this distribution for license terms.
-- |Soundex is a phonetic coding algorithm.
-- It transforms word into a similarity hash based on an
-- approximation of its sounds. Thus, similar-sounding
-- words tend to have the same hash.
--
-- This implementation is based on a number of sources,
-- including a description of soundex at
-- <http://wikipedia.org/wiki/Soundex>
-- and in Knuth's "The Art of Computer Programming" 2nd ed
-- v1 pp394-395. A very helpful reference on the details
-- and differences among soundex algorithms is "Soundex:
-- The True Story",
-- <http://west-penwith.org.uk/misc/soundex.htm>
-- accessed 11 September 2008.
--
-- This code was originally written for the "thimk" spelling suggestion
-- application in Nickle (http://nickle.org) in July 2002
-- based on a description from
-- http://www.geocities.com/Heartland/Hills/3916/soundex.html
-- which is now
-- http://www.searchforancestors.com/soundex.html
-- The code was ported September 2008; the Soundex variants were also
-- added at this time.
module Text.PhoneticCode.Soundex (soundex, soundexSimple,
soundexNARA, soundexCodes)
where
import Data.List
import Data.Char
import Data.Array.IArray
-- |Array of soundex codes for single characters. The
-- array maps uppercase letters (only) to a character
-- representing a code in the range ['1'..'7'] or '?'. Code
-- '7' is returned as a coding convenience for
-- American/Miracode/NARA/Knuth soundex.
soundexCodes :: Array Char Char
soundexCodes = accumArray updater '?' ('A', 'Z') codes where
updater '?' c = c
updater _ c = error ("updater called twice on " ++ [c])
groups = [('1', "BFPV"),
('2', "CGJKQSXZ"),
('3', "DT"),
('4', "L"),
('5', "MN"),
('6', "R"),
('7', "HW")]
codes = concatMap make_codes groups
make_codes (i, s) = zip s (repeat i)
-- | Utility function: id except for point substitution.
subst :: Eq a => a -> a -> a -> a
subst from to source
| from == source = to
| otherwise = source
-- | Compute a "full" soundex code; i.e., do not drop any
-- encodable characters from the result. The leading
-- character of the code will be folded to uppercase.
-- Non-alphabetics are not encoded. If no alphabetics are
-- present, the soundex code will be "0".
--
-- The two commonly encountered forms of soundex are Simplified
-- and another known as American, Miracode, NARA or Knuth. This
-- code will calculate either---passing True gets NARA, and False
-- gets Simplified.
soundex :: Bool -> String -> String
soundex nara = filter (/= '?')
. encode
. map toUpper
. dropWhile (not . isAlpha)
where
narify
| nara = filter (/= '7')
| otherwise = map (subst '7' '?')
filter_multiples = map head . group
--- The second clause of encode originally had a bug
--- correctly predicted by STTS (ref above)!
encode "" = "0"
encode as@(a : _) = (a :)
. drop 1
. filter_multiples
. narify
. map unsound $ as
unsound c | c >= 'A' && c <= 'Z' = soundexCodes ! c
unsound _ = '?'
soundex_truncated :: Bool -> String -> String
soundex_truncated nara = take 4 . (++ repeat '0') . soundex nara
--- | This is the simple variant of `soundex`. It gives the
--- first four characters of the full soundex code, zero-padded
--- as needed.
soundexSimple :: String -> String
soundexSimple = soundex_truncated False
--- | This is the most common US census variant of `soundex`,
--- compatible with most existing calculators. It gives the
--- first four characters of the full soundex code, zero-padded
--- as needed.
soundexNARA :: String -> String
soundexNARA = soundex_truncated True
--- Some tests from the web and from Knuth that this
--- software passes.
---
-- soundexTest = and [
-- soundexSimple "Lloyd" == "L300",
-- soundexSimple "Woolcock" == "W422",
-- soundexSimple "Donnell" == "D540",
-- soundexSimple "Baragwanath" == "B625",
-- soundexSimple "Williams" == "W452",
-- soundexSimple "Ashcroft" == "A226",
-- soundexNARA "Ashcroft" == "A261",
-- soundexSimple "Euler" == "E460",
-- soundexSimple "Ellery" == "E460",
-- soundexSimple "Gauss" == "G200",
-- soundexSimple "Ghosh" == "G200",
-- soundexSimple "Hilbert" == "H416",
-- soundexSimple "Heilbronn" == "H416",
-- soundexSimple "Knuth" == "K530",
-- soundexSimple "Kant" == "K530",
-- soundexSimple "Ladd" == "L300",
-- soundexSimple "Lukasiewicz" == "L222",
-- soundexSimple "Lissajous" == "L222"]
| BartMassey/phonetic-code | Text/PhoneticCode/Soundex.hs | bsd-3-clause | 4,958 | 0 | 12 | 1,189 | 611 | 361 | 250 | 45 | 3 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.SBV.Utils.TDiff
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Runs an IO computation printing the time it took to run it
-----------------------------------------------------------------------------
module Data.SBV.Utils.TDiff
( Timing(..)
, showTDiff
)
where
import Data.Time (NominalDiffTime)
import Data.IORef (IORef)
import Data.List (intercalate)
import Data.Ratio
import GHC.Real (Ratio((:%)))
import Numeric (showFFloat)
-- | Specify how to save timing information, if at all.
data Timing = NoTiming | PrintTiming | SaveTiming (IORef NominalDiffTime)
-- | Show 'NominalDiffTime' in human readable form. 'NominalDiffTime' is
-- essentially picoseconds (10^-12 seconds). We show it so that
-- it's represented at the day:hour:minute:second.XXX granularity.
showTDiff :: NominalDiffTime -> String
showTDiff diff
| denom /= 1 -- Should never happen! But just in case.
= show diff
| True
= intercalate ":" fields
where total, denom :: Integer
total :% denom = (picoFactor % 1) * toRational diff
-- there are 10^12 pico-seconds in a second
picoFactor :: Integer
picoFactor = (10 :: Integer) ^ (12 :: Integer)
[s2p, m2s, h2m, d2h] = drop 1 $ scanl (*) 1 [picoFactor, 60, 60, 24]
(days, days') = total `divMod` d2h
(hours, hours') = days' `divMod` h2m
(minutes, seconds') = hours' `divMod` m2s
(seconds, picos) = seconds' `divMod` s2p
secondsPicos = show seconds
++ dropWhile (/= '.') (showFFloat (Just 3) (fromIntegral picos * (10**(-12) :: Double)) "s")
aboveSeconds = map (\(t, v) -> show v ++ [t]) $ dropWhile (\p -> snd p == 0) [('d', days), ('h', hours), ('m', minutes)]
fields = aboveSeconds ++ [secondsPicos]
| josefs/sbv | Data/SBV/Utils/TDiff.hs | bsd-3-clause | 2,018 | 0 | 16 | 493 | 494 | 292 | 202 | 29 | 1 |
-- Code reused from http://hackage.haskell.org/package/deepseq-generics
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TupleSections #-}
module Main (main) where
import Control.Concurrent.MVar
import Control.Exception
import Control.Monad
import Data.Bits
import Data.IORef
import Data.Typeable
import Data.Word
import GHC.Generics
import System.IO.Unsafe (unsafePerformIO)
-- import Test.Framework (defaultMain, testGroup, testCase)
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit
-- IUT
import Control.DeepSeq
-- needed for GHC-7.4 compatibility
#if !MIN_VERSION_base(4,6,0)
atomicModifyIORef' :: IORef a -> (a -> (a,b)) -> IO b
atomicModifyIORef' ref f = do
b <- atomicModifyIORef ref
(\x -> let (a, b) = f x
in (a, a `seq` b))
b `seq` return b
#endif
----------------------------------------------------------------------------
-- simple hacky abstraction for testing forced evaluation via `rnf`-like functions
seqStateLock :: MVar ()
seqStateLock = unsafePerformIO $ newMVar ()
{-# NOINLINE seqStateLock #-}
withSeqState :: Word64 -> IO () -> IO ()
withSeqState expectedState act = withMVar seqStateLock $ \() -> do
0 <- resetSeqState
() <- act
st <- resetSeqState
unless (st == expectedState) $
assertFailure ("withSeqState: actual seq-state ("++show st++") doesn't match expected value ("++
show expectedState++")")
seqState :: IORef Word64
seqState = unsafePerformIO $ newIORef 0
{-# NOINLINE seqState #-}
resetSeqState :: IO Word64
resetSeqState = atomicModifyIORef' seqState (0,)
-- |Set flag and raise exception is flag already set
setSeqState :: Int -> IO ()
setSeqState i | 0 <= i && i < 64 = atomicModifyIORef' seqState go
| otherwise = error "seqSeqState: flag index must be in [0..63]"
where
go x | testBit x i = error ("setSeqState: flag #"++show i++" already set")
| otherwise = (setBit x i, ())
-- weird type whose NFData instacne calls 'setSeqState' when rnf-ed
data SeqSet = SeqSet !Int | SeqIgnore
deriving Show
instance NFData SeqSet where
rnf (SeqSet i) = unsafePerformIO $ setSeqState i
rnf (SeqIgnore) = ()
{-# NOINLINE rnf #-}
-- |Exception to be thrown for testing 'seq'/'rnf'
data RnfEx = RnfEx deriving (Eq, Show, Typeable)
instance Exception RnfEx
instance NFData RnfEx where rnf e = throw e
assertRnfEx :: () -> IO ()
assertRnfEx v = handleJust isWanted (const $ return ()) $ do
() <- evaluate v
assertFailure "failed to trigger expected RnfEx exception"
where isWanted = guard . (== RnfEx)
----------------------------------------------------------------------------
case_1, case_2, case_3, case_4_1, case_4_2, case_4_3, case_4_4 :: Test.Framework.Test
newtype Case1 = Case1 Int
deriving (Generic)
instance NFData Case1
case_1 = testCase "Case1" $ do
assertRnfEx $ rnf $ (Case1 (throw RnfEx))
----
data Case2 = Case2 Int
deriving (Generic)
instance NFData Case2
case_2 = testCase "Case2" $ do
assertRnfEx $ rnf $ (Case2 (throw RnfEx))
----
data Case3 = Case3 RnfEx
deriving (Generic)
instance NFData Case3
case_3 = testCase "Case3" $ do
assertRnfEx $ rnf $ Case3 RnfEx
----
data Case4 a = Case4a
| Case4b a a
| Case4c a (Case4 a)
deriving (Generic)
instance NFData a => NFData (Case4 a)
case_4_1 = testCase "Case4.1" $ withSeqState 0x0 $ do
evaluate $ rnf $ (Case4a :: Case4 SeqSet)
case_4_2 = testCase "Case4.2" $ withSeqState 0x3 $ do
evaluate $ rnf $ (Case4b (SeqSet 0) (SeqSet 1) :: Case4 SeqSet)
case_4_3 = testCase "Case4.3" $ withSeqState (bit 55) $ do
evaluate $ rnf $ (Case4b SeqIgnore (SeqSet 55) :: Case4 SeqSet)
case_4_4 = testCase "Case4.4" $ withSeqState 0xffffffffffffffff $ do
evaluate $ rnf $ (genCase 63)
where
genCase n | n > 1 = Case4c (SeqSet n) (genCase (n-1))
| otherwise = Case4b (SeqSet 0) (SeqSet 1)
----------------------------------------------------------------------------
main :: IO ()
main = defaultMain [tests]
where
tests = testGroup "" [case_1, case_2, case_3, case_4_1, case_4_2, case_4_3, case_4_4]
| DavidAlphaFox/ghc | libraries/deepseq/tests/Main.hs | bsd-3-clause | 4,291 | 0 | 16 | 923 | 1,245 | 647 | 598 | 95 | 1 |
module OneTimePad (solve) where
import Data.List (tails, isInfixOf)
import Data.Maybe (listToMaybe)
import Numeric (showHex)
import qualified Data.ByteString as B
import Data.ByteString.Char8 (pack)
import qualified Crypto.Hash.MD5 as MD5
import qualified Data.NibbleString as NS
windows :: Int -> [a] -> [[a]]
windows n xs = take (length xs - n + 1) . map (take n) . tails $ xs
toNibbleList :: B.ByteString -> [NS.Nibble]
toNibbleList bs = B.unpack bs >>= NS.byte2Nibbles
allHashes :: String -> [B.ByteString]
allHashes prefix = map hash [0..]
where
prefixCtx = MD5.update MD5.init $ pack prefix
hash = MD5.finalize . MD5.update prefixCtx . pack . show
findTriplet :: [NS.Nibble] -> Maybe NS.Nibble
findTriplet ns = listToMaybe triplets
where
triplets = map head . filter (\[a, b, c] -> a == b && b == c) $ windows 3 ns
keys' :: [[NS.Nibble]] -> Int -> [(Int, [NS.Nibble])]
keys' (h:hs) i = if isKey then (i, h) : keys' hs (i + 1) else keys' hs (i + 1)
where
isKey = maybe False (\c -> any (isInfixOf (replicate 5 c)) (take 1000 hs)) $ findTriplet h
keys :: [B.ByteString] -> [(Int, [NS.Nibble])]
keys hashes = keys' (map toNibbleList hashes) 0
stretchHash :: B.ByteString -> B.ByteString
stretchHash = MD5.hash . pack . concatMap (`showHex` "") . toNibbleList
solve :: String -> IO ()
solve input = do
let prefix = head . lines $ input
let ks = keys $ allHashes prefix
print . fst $ ks !! 63
let stretchedHashes = map (\x -> iterate stretchHash x !! 2016) $ allHashes prefix
let stretchedKeys = keys stretchedHashes
print . fst $ stretchedKeys !! 63
| cjlarose/advent-2016 | src/OneTimePad.hs | bsd-3-clause | 1,596 | 0 | 15 | 311 | 712 | 379 | 333 | 34 | 2 |
--------------------------------------------------------------------------
-- Copyright (c) 2007-2015 ETH Zurich.
-- All rights reserved.
--
-- This file is distributed under the terms in the attached LICENSE file.
-- If you do not find this file, copies can be found by writing to:
-- ETH Zurich D-INFK, Universitaetstasse 6, CH-8092 Zurich. Attn: Systems Group.
--
-- Arguments to major Hake targets
--
--------------------------------------------------------------------------
module Args where
import HakeTypes
import TreeDB
import Data.Maybe as Maybe
data Args = Args {
buildFunction :: TreeDB -> String -> Args -> HRule,
target :: String,
driverType :: String,
cFiles :: [String],
generatedCFiles :: [String],
cxxFiles :: [String],
generatedCxxFiles :: [String],
assemblyFiles :: [String],
flounderDefs :: [String],
flounderBindings :: [String], -- built stubs for all enabled backends
flounderExtraDefs :: [(String, [String])],
flounderExtraBindings :: [(String, [String])], -- build stubs for specific backends
flounderTHCDefs :: [String], -- TODO: this can probably be subsumed into the above?
flounderTHCStubs :: [String], -- TODO: this can probably be subsumed into the above?
mackerelDevices :: [String],
addCFlags :: [String],
addCxxFlags :: [String],
omitCFlags :: [String],
omitCxxFlags :: [String],
addIncludes :: [String],
addGeneratedIncludes :: [String],
omitIncludes :: [String],
addLinkFlags :: [String],
addLibraries :: [String],
addModules :: [String],
addGeneratedDependencies :: [String],
architectures :: [String],
skateSchemaDefs :: [String], -- just the Skate Schema headers
skateSchemas :: [String], -- Schema headers and functions
installDirs :: InstallDirs,
libraryOs :: Maybe Args -- Select a library OS
}
data InstallDirs = InstallDirs {
bindir :: String,
libdir :: String
}
defaultArgs = Args {
buildFunction = defaultBuildFn,
target = "",
driverType = "",
cFiles = [],
generatedCFiles = [],
cxxFiles = [],
generatedCxxFiles = [],
assemblyFiles = [],
flounderDefs = [],
flounderBindings = [],
flounderExtraDefs = [],
flounderExtraBindings = [],
flounderTHCDefs = [],
flounderTHCStubs = [],
mackerelDevices = [],
addCFlags = [],
addCxxFlags = [],
omitCFlags = [],
omitCxxFlags = [],
addIncludes = [],
addGeneratedIncludes = [],
omitIncludes = [],
addLinkFlags = [],
addLibraries = [],
addModules = [],
addGeneratedDependencies = [],
architectures = allArchitectures,
skateSchemaDefs = [],
skateSchemas = [],
installDirs = InstallDirs {
bindir = "/sbin",
libdir = "/lib"
},
-- default libos needs to be selected in application macro!
libraryOs = Maybe.Nothing
}
makeTarget :: Maybe Args -> String
makeTarget args = Args.target (Maybe.fromJust args)
allArchitectures = [ "x86_64", "armv7", "armv8", "k1om" ]
allArchitectureFamilies = [ "x86_64", "arm", "k1om" ]
-- architectures that currently support THC
thcArchitectures = ["x86_64" ]
-- all known flounder backends that we might want to generate defs for
allFlounderBackends
= [ "lmp", "ump", "ump_ipi", "loopback", "rpcclient", "msgbuf", "multihop", "ahci", "local" ]
defaultBuildFn :: TreeDB -> String -> Args -> HRule
defaultBuildFn _ f _ =
Error ("Bad use of default Args in " ++ f)
showArgs :: String -> Args -> String
showArgs prefix a =
prefix ++ "Args:"
++ "\n target: " ++ (show $ target a)
++ "\n cFiles: " ++ (show $ cFiles a)
++ "\n generatedCFiles: " ++ (show $ generatedCFiles a)
++ "\n cxxFiles: " ++ (show $ cxxFiles a)
++ "\n generatedCxxFiles " ++ (show $ generatedCxxFiles a)
++ "\n assemblyFiles: " ++ (show $ assemblyFiles a)
++ "\n flounderDefs: " ++ (show $ flounderDefs a)
++ "\n flounderBindings: " ++ (show $ flounderBindings a)
++ "\n flounderExtraDefs: " ++ (show $ flounderExtraDefs a)
++ "\n flounderExtraBindings: " ++ (show $ flounderExtraBindings a)
++ "\n flounderTHCDefs: " ++ (show $ flounderTHCDefs a)
++ "\n flounderTHCStubs: " ++ (show $ flounderTHCStubs a)
++ "\n addCFlags: " ++ (show $ addCFlags a)
++ "\n addCxxFlags: " ++ (show $ addCxxFlags a)
++ "\n omitCFlags: " ++ (show $ omitCFlags a)
++ "\n omitCxxFlags: " ++ (show $ omitCxxFlags a)
++ "\n addIncludes: " ++ (show $ addIncludes a)
++ "\n omitIncludes: " ++ (show $ omitIncludes a)
++ "\n addLinkFlags: " ++ (show $ addLinkFlags a)
++ "\n addLibraries: " ++ (show $ addLibraries a)
++ "\n addModules: " ++ (show $ addModules a)
++ "\n addDeps: " ++ (show $ addGeneratedDependencies a)
++ "\n architectures: " ++ (show $ architectures a)
++ "\n skateSchemaDefs: " ++ (show $ skateSchemaDefs a)
++ "\n skateSchemas: " ++ (show $ skateSchemas a)
++ "\n"
| kishoredbn/barrelfish | hake/Args.hs | mit | 5,328 | 0 | 57 | 1,476 | 1,248 | 732 | 516 | 112 | 1 |
-- | Failures which can happen in Poll.
module Pos.Chain.Update.Poll.Failure
( PollVerFailure (..)
, reportUnexpectedError
) where
import Universum hiding (id, last)
import Formatting (bprint, build, int, sformat, stext, (%))
import qualified Formatting.Buildable
import Serokell.Data.Memory.Units (Byte, memory)
import Pos.Chain.Block.Header (HeaderHash)
import Pos.Chain.Update.ApplicationName (ApplicationName)
import Pos.Chain.Update.BlockVersion (BlockVersion)
import Pos.Chain.Update.BlockVersionData (BlockVersionData)
import Pos.Chain.Update.BlockVersionModifier (BlockVersionModifier)
import Pos.Chain.Update.SoftwareVersion (NumSoftwareVersion)
import Pos.Chain.Update.Vote (UpAttributes, UpId)
import Pos.Core (Coin, EpochIndex, ScriptVersion, StakeholderId,
coinF)
import Pos.Core.Reporting (MonadReporting, reportError)
import Pos.Crypto (shortHashF)
-- | PollVerFailure represents all possible errors which can
-- appear in Poll data verification.
data PollVerFailure
=
-- | 'BlockVersionModifier' for this 'BlockVersion' is already known and
-- the one we saw doesn't match it.
-- PollInconsistentBVM
-- pibExpected
-- pibFound
-- pibUpId
PollInconsistentBVM !BlockVersionModifier !BlockVersionModifier !UpId
-- | 'BlockVersion' is already adopted and 'BlockVersionData' associated
-- with it differs from the one we saw.
-- PollAlreadyAdoptedDiffers
-- paadAdopted
-- paadProposed
-- paadUpId
| PollAlreadyAdoptedDiffers !BlockVersionData !BlockVersionModifier !UpId
-- | Proposed script version must be the same as adopted one or
-- greater by one, but this rule is violated.
-- PollWrongScriptVersion
-- pwsvAdopted
-- pwsvProposed
-- pwsvUpId
| PollWrongScriptVersion !ScriptVersion !ScriptVersion !UpId
-- | A proposal tried to increase the block size limit more than it was
-- allowed to
-- PollLargeMaxBlockSize
-- plmbsMaxPossible
-- plmbsFound
-- plmbsUpId
| PollLargeMaxBlockSize !Byte !Byte !UpId
-- | A proposal attempted to change the end of the bootstrap era
-- post factum
-- PollBootstrapEraInvalidChange
-- pbeicLast
-- pbeicAdopted
-- pbeicProposed
-- pbeicUpId
| PollBootstrapEraInvalidChange !EpochIndex !EpochIndex !EpochIndex !UpId
| PollNotFoundScriptVersion !BlockVersion
| PollProposalAlreadyActive !UpId
-- | PollSmallProposalStake
-- pspsThreshold
-- pspsActual
-- pspsUpId
| PollSmallProposalStake !Coin !Coin !UpId
-- | PollNotRichman
-- pnrStakeholder
-- pnrThreshold
-- pnrStake
| PollNotRichman !StakeholderId !Coin !(Maybe Coin)
-- | PollUnknownProposal
-- pupStakeholder
-- pupProposal
| PollUnknownProposal !StakeholderId !UpId
| PollUnknownStakes !EpochIndex
-- PollWrongSoftwareVersion
-- pwsvStored
-- pwsvApp
-- pwsvGiven
-- pwsvUpId
| PollWrongSoftwareVersion
!(Maybe NumSoftwareVersion)
!ApplicationName
!NumSoftwareVersion
!UpId
-- | PollProposalIsDecided
-- ppidUpId
-- ppidStakeholder
| PollProposalIsDecided !UpId !StakeholderId
-- | PollExtraRevote
-- perUpId
-- perStakeholder
-- perDecision
| PollExtraRevote !UpId !StakeholderId !Bool
-- | PollWrongHeaderBlockVersion
-- pwhpvGiven
-- pwhpvAdopted
| PollWrongHeaderBlockVersion !BlockVersion !BlockVersion
-- | PollBadBlockVersion
-- pbpvUpId
-- pbpvGiven
-- pbpvAdopted
| PollBadBlockVersion !UpId !BlockVersion !BlockVersion
-- | PollTooLargeProposal
-- ptlpUpId
-- ptlpSize
-- ptlpLimit
| PollTooLargeProposal !UpId !Byte !Byte
-- | PollMoreThanOneProposalPerEpoch
-- ptopFrom
-- ptopUpId
| PollMoreThanOneProposalPerEpoch !StakeholderId !UpId
-- | PollUnknownAttributesInProposal
-- puapUpId
-- puapAttrs
| PollUnknownAttributesInProposal !UpId !UpAttributes
-- | PollTipMismatch
-- ptmTipDB
-- ptmTipMemory
| PollTipMismatch !HeaderHash !HeaderHash
| PollInvalidUpdatePayload !Text
| PollInternalError !Text
| PollUpdateVersionNoChange !BlockVersion !NumSoftwareVersion
instance Buildable PollVerFailure where
build (PollInconsistentBVM pibExpected pibFound pibUpId) =
bprint ("proposal "%shortHashF%" contains block version"%
" which is already competing and its"%
" BlockVersionModifier is different"%
" (expected "%build%", proposed "%build%")")
pibUpId pibExpected pibFound
build (PollAlreadyAdoptedDiffers paadAdopted paadProposed paadUpId) =
bprint ("proposal "%shortHashF%" contains block version"%
" which is already adopted and its"%
" BlockVersionModifier doesn't correspond to the adopted"%
" BlockVersionData (adopted "%build%", proposed "%build%")")
paadUpId paadAdopted paadProposed
build (PollWrongScriptVersion pwsvAdopted pwsvProposed pwsvUpId) =
bprint ("proposal "%shortHashF%" contains script version"%
" which is neither same not greater by one than the"%
" adopted one (adopted one is "%int%
", proposed one is "%int%")")
pwsvUpId pwsvAdopted pwsvProposed
build (PollLargeMaxBlockSize maxPossible found upId) =
bprint ("proposal "%build%" tried to increase max block size"%
" beyond what is allowed"%
" (expected max. "%memory%", found "%memory%")")
upId maxPossible found
build (PollBootstrapEraInvalidChange last adopted proposed upId) =
bprint ("proposal "%build%" tried to change the end of the bootstrap"%
" era to epoch"%build%", but the bootstrap era has ended with"%
" unlock stakes epoch "%build%", and now the epoch is "%
build%".")
upId proposed adopted last
build (PollProposalAlreadyActive upId) =
bprint ("proposal "%build%" was already proposed") upId
build (PollNotFoundScriptVersion pv) =
bprint ("not found script version for protocol version "%build) pv
build (PollSmallProposalStake threshold actual upId) =
bprint ("proposal "%build%
" doesn't have enough stake from positive votes "%
"(threshold is "%coinF%", proposal has "%coinF%")")
upId threshold actual
build (PollNotRichman id threshold stake) =
bprint ("voter "%build%" is not richman (his stake is "%stext%", but"%
" threshold is "%coinF%")")
id (maybe "negligible" (sformat coinF) stake) threshold
build (PollUnknownProposal stakeholder proposal) =
bprint (build%" has voted for unkown proposal "%build)
stakeholder proposal
build (PollUnknownStakes epoch) =
bprint ("stake distribution for epoch "%build%" is unknown") epoch
build (PollWrongSoftwareVersion pwsvStored pwsvApp pwsvGiven pwsvUpId) =
bprint ("proposal "%build%" has wrong software version for app "%
build%" (last known is "%stext%", proposal contains "%int%")")
pwsvUpId pwsvApp (maybe "unknown" pretty pwsvStored) pwsvGiven
build (PollProposalIsDecided
ppidUpId
ppidStakeholder) =
bprint ("proposal "%build%" is in decided state, but stakeholder "%
build%" has voted for it")
ppidUpId ppidStakeholder
build (PollExtraRevote perUpId perStakeholder perDecision) =
bprint ("stakeholder "%build%" vote "%stext%" proposal "
%build%" more than once")
perStakeholder (bool "against" "for" perDecision) perUpId
build (PollWrongHeaderBlockVersion pwhpvGiven pwhpvAdopted) =
bprint ("wrong protocol version has been seen in header: "%
build%" (current adopted is "%build%"), "%
"this version is smaller than last adopted "%
"or is not confirmed")
pwhpvGiven pwhpvAdopted
build (PollBadBlockVersion pbpvUpId pbpvGiven pbpvAdopted) =
bprint ("proposal "%build%" has bad protocol version: "%
build%" (current adopted is "%build%")")
pbpvUpId pbpvGiven pbpvAdopted
build (PollTooLargeProposal ptlpUpId ptlpSize ptlpLimit) =
bprint ("update proposal "%shortHashF%" exceeds maximal size ("%
int%" > "%int%")")
ptlpUpId ptlpSize ptlpLimit
build (PollMoreThanOneProposalPerEpoch ptopFrom ptopUpId) =
bprint ("stakeholder "%shortHashF%
" proposed second proposal "%shortHashF%" in epoch")
ptopFrom ptopUpId
build (PollUnknownAttributesInProposal puapUpId puapAttrs) =
bprint ("proposal "%shortHashF%" has unknown attributes "%build)
puapUpId puapAttrs
build (PollTipMismatch ptmTipMemory ptmTipDB) =
bprint ("tip we store in US mem-state ("%shortHashF%
") differs from the tip we store in DB ("%build%")")
ptmTipMemory ptmTipDB
build (PollInvalidUpdatePayload msg) =
bprint ("invalid update payload: "%stext) msg
build (PollInternalError msg) =
bprint ("internal error: "%stext) msg
build (PollUpdateVersionNoChange blockVer softVer) =
bprint ("update did not increment the block version ("%build
%") or the software version ("%build%").")
blockVer softVer
-- | Report an error if it's unexpected.
--
-- If tips are different, we report error, because it's suspicious and
-- we want to review logs. If it's internal error, we definitely want
-- to investigate it.
reportUnexpectedError
:: ( Monad m, MonadReporting m )
=> m (Either PollVerFailure a)
-> m (Either PollVerFailure a)
reportUnexpectedError action = do
res <- action
-- REPORT:ERROR Internal error in update system or tips mismatch.
res <$
case res of
Left (PollInternalError msg) ->
reportError $
"Internal error occurred in update system: " <> msg
Left (err@(PollTipMismatch {})) -> reportError (pretty err)
_ -> pass
| input-output-hk/pos-haskell-prototype | chain/src/Pos/Chain/Update/Poll/Failure.hs | mit | 10,591 | 0 | 17 | 2,906 | 1,820 | 970 | 850 | 262 | 3 |
module Main where
import Application.Boot
main = boot
| EPashkin/gamenumber-freegame | src/Main.hs | gpl-3.0 | 56 | 0 | 4 | 10 | 14 | 9 | 5 | 3 | 1 |
{-# LANGUAGE DataKinds #-}
-- GSoC 2015 - Haskell bindings for OpenCog.
-- | This Module offers useful functions for working on an AtomSpace.
module OpenCog.AtomSpace.Utils (
showAtom
, printAtom
, genInsert
, genGet
) where
import OpenCog.AtomSpace.Types (Atom(..),TruthVal(..))
import OpenCog.AtomSpace.Internal (fromTVRaw,toRaw,AtomRaw(..))
import OpenCog.AtomSpace.Api (get,insert)
import OpenCog.AtomSpace.Types
import OpenCog.AtomSpace.AtomType (AtomType(..))
import OpenCog.AtomSpace.Env (AtomSpace)
import Data.Functor ((<$>))
import Data.Typeable (Typeable)
-- | 'showTV' shows a truth value in opencog notation.
showTV :: TruthVal -> String
showTV (SimpleTV a b ) = "(stv "++show a++" "++show b++")"
showTV (CountTV a b c ) = "(ctv "++show a++" "++show b++" "++show c++")"
showTV (IndefTV a b c d e) = "(itv "++show a++" "++show b++" "
++show c++" "++show d++" "
++show e++")"
showTV (FuzzyTV a b ) = "(ftv "++show a++" "++show b++")"
showTV (ProbTV a b c ) = "(ptv "++show a++" "++show b++" "++show c++")"
showTV' :: Maybe TruthVal -> String
showTV' (Just tv) = showTV tv
showTV' Nothing = ""
-- | 'showAtom' shows an atom in opencog notation (indented notation).
showAtom :: Typeable a => Atom a -> String
showAtom at = concatWNewline $ list 0 $ toRaw at
where
list :: Int -> AtomRaw -> [String]
list lv at = case at of
Link atype lraw tv -> let showtv = showTV $ fromTVRaw tv
in [tab lv $ concatWSpaces [atype,showtv]]
++ concat (map (list (lv+1)) lraw)
Node atype aname tv -> let showtv = showTV $ fromTVRaw tv
in [tab lv $ concatWSpaces [atype,showtv
,"\""++aname++"\""]]
concatWNewline :: [String] -> String
concatWNewline [] = []
concatWNewline (x:xs) = foldr1 (\a b -> a++"\n"++b) (x:xs)
concatWSpaces :: [String] -> String
concatWSpaces [] = []
concatWSpaces (x:xs) = foldr1 (\a b -> if a /= ""
then a++" "++b
else b) (x:xs)
tab :: Int -> String -> String
tab 0 s = s
tab lv s = " "++ tab (lv-1) s
-- | 'printAtom' prints the given atom on stdout.
printAtom :: Typeable a => Atom a -> IO ()
printAtom at = putStrLn $ showAtom at
genInsert :: Gen a -> AtomSpace ()
genInsert a = appGen insert a
genGet :: Gen AtomT -> AtomSpace (Maybe (Gen AtomT))
genGet (Gen a) = do
res <- get a
case res of
Just x -> return $ Just $ Gen x
Nothing -> return $ Nothing
| cosmoharrigan/atomspace | opencog/haskell/OpenCog/AtomSpace/Utils.hs | agpl-3.0 | 2,787 | 0 | 19 | 904 | 1,007 | 519 | 488 | 56 | 6 |
-- https://projecteuler.net/problem=6
square :: Num a => a -> a
square x = x*x
sumOfSquares :: Num a => [a] -> a
sumOfSquares xs = sum (map square xs)
squareOfSum :: Num a => [a] -> a
squareOfSum xs = square (sum xs)
diffOfSums :: Num a => [a] -> a
diffOfSums xs = (squareOfSum xs) - (sumOfSquares xs)
-- diffOfSums [1..100]
| nothiphop/project-euler | 006/solution.hs | apache-2.0 | 329 | 0 | 7 | 66 | 148 | 75 | 73 | 8 | 1 |
--------------------------------------------------------------------------------
module Language.Haskell.Stylish.Step.Records.Tests
( tests
) where
--------------------------------------------------------------------------------
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Test.HUnit (Assertion, (@=?))
--------------------------------------------------------------------------------
import Language.Haskell.Stylish.Step.Records
import Language.Haskell.Stylish.Tests.Util
--------------------------------------------------------------------------------
tests :: Test
tests = testGroup "Language.Haskell.Stylish.Step.Records.Tests"
[ testCase "case 01" case01
, testCase "case 02" case02
]
--------------------------------------------------------------------------------
case01 :: Assertion
case01 = expected @=? testStep step input
where
input = unlines
[ "data Foo = Foo"
, " { foo :: Int"
, " , barqux :: String"
, " } deriving (Show)"
]
expected = unlines
[ "data Foo = Foo"
, " { foo :: Int"
, " , barqux :: String"
, " } deriving (Show)"
]
--------------------------------------------------------------------------------
case02 :: Assertion
case02 = input @=? testStep step input
where
-- Don't attempt to align this since a field spans multiple lines
input = unlines
[ "data Foo = Foo"
, " { foo :: Int"
, " , barqux"
, " :: String"
, " } deriving (Show)"
]
| silkapp/stylish-haskell | tests/Language/Haskell/Stylish/Step/Records/Tests.hs | bsd-3-clause | 1,719 | 0 | 8 | 462 | 214 | 132 | 82 | 31 | 1 |
{-# LANGUAGE Trustworthy #-}
-- ----------------------------------------------------------------------------
-- | This module provides scalable event notification for file
-- descriptors and timeouts.
--
-- This module should be considered GHC internal.
--
-- ----------------------------------------------------------------------------
module GHC.Event
( -- * Types
EventManager
-- * Creation
, getSystemEventManager
, new
, getSystemTimerManager
-- * Registering interest in I/O events
, Event
, evtRead
, evtWrite
, IOCallback
, FdKey(keyFd)
, registerFd
, registerFd_
, unregisterFd
, unregisterFd_
, closeFd
-- * Registering interest in timeout events
, TimeoutCallback
, TimeoutKey
, registerTimeout
, updateTimeout
, unregisterTimeout
) where
import GHC.Event.Manager
import GHC.Event.TimerManager (TimeoutCallback, TimeoutKey, registerTimeout,
updateTimeout, unregisterTimeout)
import GHC.Event.Thread (getSystemEventManager, getSystemTimerManager)
| ryantm/ghc | libraries/base/GHC/Event.hs | bsd-3-clause | 1,099 | 0 | 5 | 249 | 124 | 86 | 38 | 29 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS -Wall #-}
import Data.ListLike
import Data.ListLike.IO
import qualified Data.ByteString.Lazy.Char8 as BS
import Prelude hiding (putStrLn)
greetings :: [BS.ByteString]
greetings = ["おはよう", "こんばんわ", "さよなら"]
punkt :: BS.ByteString
punkt = "、"
main :: IO ()
main = do
putStrLn $ BS.intercalate punkt greetings
| nushio3/Paraiso | attic/Unicode/ByteString.hs | bsd-3-clause | 388 | 0 | 9 | 54 | 98 | 59 | 39 | 13 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE LambdaCase #-}
module GHC.Prof.Parser
( profile
, timestamp
, title
, commandLine
, totalTime
, totalAlloc
, topCostCentres
, aggregatedCostCentre
, costCentres
, costCentre
) where
import Control.Applicative
import Control.Monad
import Data.Char (isDigit, isSpace)
import Data.Foldable (asum, foldl')
import Data.Maybe
import Data.Time
import Data.Text (Text)
import Data.Attoparsec.Text as A
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Text.Read as TR
import Control.Monad.Extras (seqM)
import GHC.Prof.Types
#if MIN_VERSION_containers(0, 5, 0)
import qualified Data.IntMap.Strict as IntMap
import qualified Data.Map.Strict as Map
#else
import qualified Data.IntMap as IntMap
import qualified Data.Map as Map
#endif
-- | Parse a GHC time-allocation profiling report
profile :: Parser Profile
profile = do
skipHorizontalSpace
profileTimestamp <- timestamp; skipSpace
void title; skipSpace
profileCommandLine <- commandLine; skipSpace
profileTotalTime <- totalTime; skipSpace
profileTotalAlloc <- totalAlloc; skipSpace
profileTopCostCentres <- topCostCentres; skipSpace
profileCostCentreTree <- costCentres; skipSpace
endOfInput
return $! Profile {..}
-- | Parse the timestamp in a header as local time
timestamp :: Parser LocalTime
timestamp = do
parseDayOfTheWeek >> skipSpace
month <- parseMonth; skipSpace
day <- parseDay; skipSpace
tod <- parseTimeOfDay; skipSpace
year <- parseYear; skipSpace
return $! LocalTime
{ localDay = fromGregorian year month day
, localTimeOfDay = tod
}
where
parseYear = decimal
parseMonth = A.take 3 >>= nameToInt
where
nameToInt name = case name of
"Jan" -> return 1; "Feb" -> return 2; "Mar" -> return 3
"Apr" -> return 4; "May" -> return 5; "Jun" -> return 6
"Jul" -> return 7; "Aug" -> return 8; "Sep" -> return 9
"Oct" -> return 10; "Nov" -> return 11; "Dec" -> return 12
_ -> fail $ "timestamp.toNum: invalid month - " ++ show name
parseDay = decimal
parseTimeOfDay = TimeOfDay
<$> decimal <* string ":"
<*> decimal
<*> pure 0
parseDayOfTheWeek = takeTill isSpace
title :: Parser Text
title = string "Time and Allocation Profiling Report (Final)"
commandLine :: Parser Text
commandLine = A.takeWhile $ not . isEndOfLine
totalTime :: Parser TotalTime
totalTime = do
void $ string "total time ="; skipSpace
elapsed <- rational
void $ string " secs"; skipSpace
(ticks, resolution, processors) <- parens $ (,,)
<$> decimal <* string " ticks @ "
<*> picoSeconds
<*> optional (string ", " *> decimal <* many1 (notChar ')'))
return $! TotalTime
{ totalTimeElapsed = elapsed
, totalTimeTicks = ticks
, totalTimeResolution = picosecondsToDiffTime resolution
, totalTimeProcessors = processors
}
where
picoSeconds = asum
[ ((10 `pow` 3)*) <$> decimal <* string " us"
, ((10 `pow` 6)*) <$> decimal <* string " ms"
]
pow :: Integer -> Int -> Integer
pow = (^)
totalAlloc :: Parser TotalAlloc
totalAlloc = do
string "total alloc =" >> skipSpace
!n <- groupedDecimal
string " bytes" >> skipSpace
parens $ void $ string "excludes profiling overheads"
return TotalAlloc { totalAllocBytes = n }
where
groupedDecimal = do
ds <- decimal `sepBy` char ','
return $! foldl' go 0 ds
where
go z n = z * 1000 + n
newtype HeaderParams = HeaderParams
{ headerHasSrc :: Bool -- ^ SRC column exists
} deriving Show
header :: Parser HeaderParams
header = do
optional_ $ do
string "individual" >> skipHorizontalSpace
string "inherited" >> skipSpace
string "COST CENTRE" >> skipHorizontalSpace
string "MODULE" >> skipHorizontalSpace
headerHasSrc <- option False $ True <$ string "SRC"; skipHorizontalSpace
optional_ $ string "no." >> skipHorizontalSpace
optional_ $ string "entries" >> skipHorizontalSpace
string "%time" >> skipHorizontalSpace
string "%alloc" >> skipHorizontalSpace
optional_ $ do
string "%time" >> skipHorizontalSpace
string "%alloc" >> skipHorizontalSpace
optional_ $ do
string "ticks" >> skipHorizontalSpace
string "bytes" >> skipHorizontalSpace
return HeaderParams
{..}
topCostCentres :: Parser [AggregatedCostCentre]
topCostCentres = do
params <- header; skipSpace
aggregatedCostCentre params `sepBy1` endOfLine
aggregatedCostCentre :: HeaderParams -> Parser AggregatedCostCentre
aggregatedCostCentre HeaderParams {..} = AggregatedCostCentre
<$> symbol <* skipHorizontalSpace -- name
<*> symbol <* skipHorizontalSpace -- module
<*> source <* skipHorizontalSpace -- src
<*> pure Nothing -- entries
<*> scientific <* skipHorizontalSpace -- %time
<*> scientific <* skipHorizontalSpace -- %alloc
<*> optional decimal <* skipHorizontalSpace -- ticks
<*> optional decimal <* skipHorizontalSpace -- bytes
where
source
| headerHasSrc = Just <$> sourceSpan
| otherwise = pure Nothing
costCentres :: Parser CostCentreTree
costCentres = do
params <- header; skipSpace
costCentreTree params
costCentre :: HeaderParams -> Parser CostCentre
costCentre params = do
name <- symbol; skipHorizontalSpace
(modName, src, no, (entries, indTime, indAlloc, inhTime, inhAlloc, optInfo))
<- validCostCentre params <|> jammedCostCentre
return $! CostCentre
{ costCentreName = name
, costCentreModule = modName
, costCentreSrc = src
, costCentreNo = no
, costCentreEntries = entries
, costCentreIndTime = indTime
, costCentreIndAlloc = indAlloc
, costCentreInhTime = inhTime
, costCentreInhAlloc = inhAlloc
, costCentreTicks = fst <$> optInfo
, costCentreBytes = snd <$> optInfo
}
where
validCostCentre HeaderParams {..} = do
modName <- symbol; skipHorizontalSpace
src <- if headerHasSrc
then do
!sym <- sourceSpan
return $ Just sym
else pure Nothing
skipHorizontalSpace
no <- decimal; skipHorizontalSpace
vals <- metrics
return (modName, src, no, vals)
-- Workaround for https://ghc.haskell.org/trac/ghc/ticket/8811.
-- This bug had been fixed before the SRC column was implemented so
-- @sourceSpan@ isn't parsed here.
-- Caveat: This parser can be confused if module name contains digits and
-- the digits are jammed with the cost centre number. In such cases, all
-- the digits are parsed as a number of entries.
jammedCostCentre = do
jammed <- symbol; skipHorizontalSpace
let modName = T.dropWhileEnd isDigit jammed
no <- either fail (return . fst) $ TR.decimal $ T.takeWhileEnd isDigit jammed
vals <- metrics
return (modName, Nothing, no, vals)
metrics = do
entries <- decimal; skipHorizontalSpace
indTime <- scientific; skipHorizontalSpace
indAlloc <- scientific; skipHorizontalSpace
inhTime <- scientific; skipHorizontalSpace
inhAlloc <- scientific; skipHorizontalSpace
optInfo <- optional $ do
!ticks <- decimal; skipHorizontalSpace
!bytes <- decimal
return (ticks, bytes)
return (entries, indTime, indAlloc, inhTime, inhAlloc, optInfo)
costCentreTree :: HeaderParams -> Parser CostCentreTree
costCentreTree params = buildTree <$> costCentreList
where
costCentreList = nestedCostCentre `sepBy1` endOfLine
nestedCostCentre = (,)
<$> nestLevel
<*> costCentre params
<* skipHorizontalSpace
nestLevel = howMany space
type Level = Int
-- | TreePath represents a path to a node in a cost centre tree.
--
-- Invariant: @'treePathLevel' == length 'treePath'@
data TreePath = TreePath
{ treePathLevel :: !Level
-- ^ Current depth of the path
, treePath :: [CostCentreNo]
-- ^ Path to the node
}
push :: CostCentreNo -> TreePath -> TreePath
push ccNo path@TreePath {..} = path
{ treePathLevel = treePathLevel + 1
, treePath = ccNo:treePath
}
popTo :: Level -> TreePath -> TreePath
popTo level path@TreePath {..} = path
{ treePathLevel = level
, treePath = drop (treePathLevel - level) treePath
}
currentNo :: TreePath -> Maybe CostCentreNo
currentNo TreePath {treePath} = listToMaybe treePath
buildTree :: [(Level, CostCentre)] -> CostCentreTree
buildTree = snd . foldl' go (TreePath 0 [], emptyCostCentreTree)
where
go
:: (TreePath, CostCentreTree)
-> (Level, CostCentre)
-> (TreePath, CostCentreTree)
go (!path, !CostCentreTree {..}) (level, node) = (path', tree')
where
ccNo = costCentreNo node
parentPath = popTo level path
parentNo = currentNo parentPath
path' = push ccNo parentPath
tree' = CostCentreTree
{ costCentreNodes = IntMap.insert ccNo node costCentreNodes
, costCentreParents = maybe costCentreParents
(\parent -> IntMap.insert ccNo parent costCentreParents)
parentNo
, costCentreChildren = maybe costCentreChildren
(\parent -> IntMap.insertWith Set.union parent
(Set.singleton node)
costCentreChildren)
parentNo
, costCentreCallSites = Map.insertWith Set.union
(costCentreName node, costCentreModule node)
(Set.singleton node)
costCentreCallSites
, costCentreAggregate = Map.alter
(Just . updateCostCentre)
(costCentreModule node)
costCentreAggregate
}
aggregate = AggregatedCostCentre
{ aggregatedCostCentreName = costCentreName node
, aggregatedCostCentreModule = costCentreModule node
, aggregatedCostCentreSrc = costCentreSrc node
, aggregatedCostCentreEntries = Just $! costCentreEntries node
, aggregatedCostCentreTime = costCentreIndTime node
, aggregatedCostCentreAlloc = costCentreIndAlloc node
, aggregatedCostCentreTicks = costCentreTicks node
, aggregatedCostCentreBytes = costCentreBytes node
}
updateCostCentre
:: Maybe (Map.Map Text AggregatedCostCentre)
-> Map.Map Text AggregatedCostCentre
updateCostCentre = \case
Nothing -> Map.singleton (costCentreName node) aggregate
Just costCentreByName ->
Map.insertWith
addCostCentre
(costCentreName node)
aggregate
costCentreByName
addCostCentre x y = x
{ aggregatedCostCentreEntries = seqM $ (+)
<$> aggregatedCostCentreEntries x
<*> aggregatedCostCentreEntries y
, aggregatedCostCentreTime =
aggregatedCostCentreTime x + aggregatedCostCentreTime y
, aggregatedCostCentreAlloc =
aggregatedCostCentreAlloc x + aggregatedCostCentreAlloc y
, aggregatedCostCentreTicks = seqM $ (+)
<$> aggregatedCostCentreTicks x
<*> aggregatedCostCentreTicks y
, aggregatedCostCentreBytes = seqM $ (+)
<$> aggregatedCostCentreBytes x
<*> aggregatedCostCentreBytes y
}
howMany :: Parser a -> Parser Int
howMany p = loop 0
where
loop !n = (p >> loop (succ n)) <|> return n
parens :: Parser a -> Parser a
parens p = string "(" *> p <* string ")"
symbol :: Parser Text
symbol = A.takeWhile $ not . isSpace
sourceSpan :: Parser Text
sourceSpan = asum
[ T.pack <$> angleBrackets
, symbol
]
where
angleBrackets = (:) <$> char '<' <*> manyTill anyChar (char '>')
skipHorizontalSpace :: Parser ()
skipHorizontalSpace = void $ A.takeWhile isHorizontalSpace
optional_ :: Parser a -> Parser ()
optional_ = void . optional
| maoe/ghc-time-alloc-prof | src/GHC/Prof/Parser.hs | bsd-3-clause | 11,895 | 0 | 19 | 2,871 | 2,958 | 1,540 | 1,418 | 294 | 13 |
{-|
Module : Main
Copyright : (c) Henry J. Wylde, 2015
License : BSD3
Maintainer : [email protected]
-}
module Main (
-- * Main
main
) where
import Options.Applicative
import Pipes
import Prelude hiding (log)
import qualified Qux.Command.Build as Build
import qualified Qux.Command.Check as Check
import qualified Qux.Command.Compile as Compile
import qualified Qux.Command.Dependencies as Dependencies
import qualified Qux.Command.Print as Print
import Qux.Options
import Qux.Worker
main :: IO ()
main = customExecParser quxPrefs quxInfo >>= handle
handle :: Options -> IO ()
handle options = runWorkerT $ (logOptions options >> worker) >-> quietFilter options >-> verboseFilter options
where
worker = case argCommand options of
Build options -> Build.handle options
Check options -> Check.handle options
Compile options -> Compile.handle options
Dependencies options -> Dependencies.handle options
Print options -> Print.handle options
logOptions :: Options -> WorkerT IO ()
logOptions = log Debug . show
quietFilter :: Monad m => Options -> Pipe Message Message m r
quietFilter options
| optQuiet options = requirePriority Error
| otherwise = cat
verboseFilter :: MonadIO m => Options -> Pipe Message Message m r
verboseFilter options
| optVerbose options = prependPriority >-> prependTimestamp
| otherwise = requirePriority Info
| hjwylde/qux | app/Main.hs | bsd-3-clause | 1,562 | 0 | 11 | 420 | 381 | 197 | 184 | 32 | 5 |
ignore "Eta reduce"
| markus1189/discrimination | HLint.hs | bsd-2-clause | 20 | 0 | 5 | 3 | 7 | 2 | 5 | -1 | -1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Configure
-- Copyright : (c) David Himmelstrup 2005,
-- Duncan Coutts 2005
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- High level interface to configuring a package.
-----------------------------------------------------------------------------
module Distribution.Client.Configure (
configure,
configureSetupScript,
chooseCabalVersion,
) where
import Distribution.Client.Dependency
import Distribution.Client.Dependency.Types (AllowNewer(..), isAllowNewer)
import qualified Distribution.Client.InstallPlan as InstallPlan
import Distribution.Client.InstallPlan (InstallPlan)
import Distribution.Client.IndexUtils as IndexUtils
( getSourcePackages, getInstalledPackages )
import Distribution.Client.Setup
( ConfigExFlags(..), configureCommand, filterConfigureFlags )
import Distribution.Client.Types as Source
import Distribution.Client.SetupWrapper
( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions )
import Distribution.Client.Targets
( userToPackageConstraint )
import qualified Distribution.Client.ComponentDeps as CD
import Distribution.Package (PackageId)
import Distribution.Client.JobControl (Lock)
import Distribution.Simple.Compiler
( Compiler, CompilerInfo, compilerInfo, PackageDB(..), PackageDBStack )
import Distribution.Simple.Program (ProgramConfiguration )
import Distribution.Simple.Setup
( ConfigFlags(..), fromFlag, toFlag, flagToMaybe, fromFlagOrDefault )
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import Distribution.Simple.Utils
( defaultPackageDesc )
import qualified Distribution.InstalledPackageInfo as Installed
import Distribution.Package
( Package(..), InstalledPackageId, packageName
, Dependency(..), thisPackageVersion
)
import qualified Distribution.PackageDescription as PkgDesc
import Distribution.PackageDescription.Parse
( readPackageDescription )
import Distribution.PackageDescription.Configuration
( finalizePackageDescription )
import Distribution.Version
( anyVersion, thisVersion )
import Distribution.Simple.Utils as Utils
( notice, info, debug, die )
import Distribution.System
( Platform )
import Distribution.Verbosity as Verbosity
( Verbosity )
import Distribution.Version
( Version(..), VersionRange, orLaterVersion )
#if !MIN_VERSION_base(4,8,0)
import Data.Monoid (Monoid(..))
#endif
import Data.Maybe (isJust, fromMaybe)
-- | Choose the Cabal version such that the setup scripts compiled against this
-- version will support the given command-line flags.
chooseCabalVersion :: ConfigExFlags -> Maybe Version -> VersionRange
chooseCabalVersion configExFlags maybeVersion =
maybe defaultVersionRange thisVersion maybeVersion
where
-- Cabal < 1.19.2 doesn't support '--exact-configuration' which is needed
-- for '--allow-newer' to work.
allowNewer = fromFlagOrDefault False $
fmap isAllowNewer (configAllowNewer configExFlags)
defaultVersionRange = if allowNewer
then orLaterVersion (Version [1,19,2] [])
else anyVersion
-- | Configure the package found in the local directory
configure :: Verbosity
-> PackageDBStack
-> [Repo]
-> Compiler
-> Platform
-> ProgramConfiguration
-> ConfigFlags
-> ConfigExFlags
-> [String]
-> IO ()
configure verbosity packageDBs repos comp platform conf
configFlags configExFlags extraArgs = do
installedPkgIndex <- getInstalledPackages verbosity comp packageDBs conf
sourcePkgDb <- getSourcePackages verbosity repos
progress <- planLocalPackage verbosity comp platform configFlags configExFlags
installedPkgIndex sourcePkgDb
notice verbosity "Resolving dependencies..."
maybePlan <- foldProgress logMsg (return . Left) (return . Right)
progress
case maybePlan of
Left message -> do
info verbosity $
"Warning: solver failed to find a solution:\n"
++ message
++ "Trying configure anyway."
setupWrapper verbosity (setupScriptOptions installedPkgIndex Nothing) Nothing
configureCommand (const configFlags) extraArgs
Right installPlan -> case InstallPlan.ready installPlan of
[pkg@(ReadyPackage (SourcePackage _ _ (LocalUnpackedPackage _) _) _ _ _)] -> do
configurePackage verbosity
(InstallPlan.planPlatform installPlan)
(InstallPlan.planCompiler installPlan)
(setupScriptOptions installedPkgIndex (Just pkg))
configFlags pkg extraArgs
_ -> die $ "internal error: configure install plan should have exactly "
++ "one local ready package."
where
setupScriptOptions :: InstalledPackageIndex -> Maybe ReadyPackage -> SetupScriptOptions
setupScriptOptions =
configureSetupScript
packageDBs
comp
platform
conf
(fromFlagOrDefault
(useDistPref defaultSetupScriptOptions)
(configDistPref configFlags))
(chooseCabalVersion
configExFlags
(flagToMaybe (configCabalVersion configExFlags)))
Nothing
False
logMsg message rest = debug verbosity message >> rest
configureSetupScript :: PackageDBStack
-> Compiler
-> Platform
-> ProgramConfiguration
-> FilePath
-> VersionRange
-> Maybe Lock
-> Bool
-> InstalledPackageIndex
-> Maybe ReadyPackage
-> SetupScriptOptions
configureSetupScript packageDBs
comp
platform
conf
distPref
cabalVersion
lock
forceExternal
index
mpkg
= SetupScriptOptions {
useCabalVersion = cabalVersion
, useCompiler = Just comp
, usePlatform = Just platform
, usePackageDB = packageDBs'
, usePackageIndex = index'
, useProgramConfig = conf
, useDistPref = distPref
, useLoggingHandle = Nothing
, useWorkingDir = Nothing
, setupCacheLock = lock
, useWin32CleanHack = False
, forceExternalSetupMethod = forceExternal
-- If we have explicit setup dependencies, list them; otherwise, we give
-- the empty list of dependencies; ideally, we would fix the version of
-- Cabal here, so that we no longer need the special case for that in
-- `compileSetupExecutable` in `externalSetupMethod`, but we don't yet
-- know the version of Cabal at this point, but only find this there.
-- Therefore, for now, we just leave this blank.
, useDependencies = fromMaybe [] explicitSetupDeps
, useDependenciesExclusive = isJust explicitSetupDeps
}
where
-- When we are compiling a legacy setup script without an explicit
-- setup stanza, we typically want to allow the UserPackageDB for
-- finding the Cabal lib when compiling any Setup.hs even if we're doing
-- a global install. However we also allow looking in a specific package
-- db.
packageDBs' :: PackageDBStack
index' :: Maybe InstalledPackageIndex
(packageDBs', index') =
case packageDBs of
(GlobalPackageDB:dbs) | UserPackageDB `notElem` dbs
, Nothing <- explicitSetupDeps
-> (GlobalPackageDB:UserPackageDB:dbs, Nothing)
-- but if the user is using an odd db stack, don't touch it
_otherwise -> (packageDBs, Just index)
explicitSetupDeps :: Maybe [(InstalledPackageId, PackageId)]
explicitSetupDeps = do
ReadyPackage (SourcePackage _ gpkg _ _) _ _ deps <- mpkg
-- Check if there is an explicit setup stanza
_buildInfo <- PkgDesc.setupBuildInfo (PkgDesc.packageDescription gpkg)
-- Return the setup dependencies computed by the solver
return [ ( Installed.installedPackageId deppkg
, Installed.sourcePackageId deppkg
)
| deppkg <- CD.setupDeps deps
]
-- | Make an 'InstallPlan' for the unpacked package in the current directory,
-- and all its dependencies.
--
planLocalPackage :: Verbosity -> Compiler
-> Platform
-> ConfigFlags -> ConfigExFlags
-> InstalledPackageIndex
-> SourcePackageDb
-> IO (Progress String String InstallPlan)
planLocalPackage verbosity comp platform configFlags configExFlags installedPkgIndex
(SourcePackageDb _ packagePrefs) = do
pkg <- readPackageDescription verbosity =<< defaultPackageDesc verbosity
solver <- chooseSolver verbosity (fromFlag $ configSolver configExFlags) (compilerInfo comp)
let -- We create a local package and ask to resolve a dependency on it
localPkg = SourcePackage {
packageInfoId = packageId pkg,
Source.packageDescription = pkg,
packageSource = LocalUnpackedPackage ".",
packageDescrOverride = Nothing
}
testsEnabled = fromFlagOrDefault False $ configTests configFlags
benchmarksEnabled =
fromFlagOrDefault False $ configBenchmarks configFlags
resolverParams =
removeUpperBounds (fromFlagOrDefault AllowNewerNone $
configAllowNewer configExFlags)
. addPreferences
-- preferences from the config file or command line
[ PackageVersionPreference name ver
| Dependency name ver <- configPreferences configExFlags ]
. addConstraints
-- version constraints from the config file or command line
-- TODO: should warn or error on constraints that are not on direct
-- deps or flag constraints not on the package in question.
(map userToPackageConstraint (configExConstraints configExFlags))
. addConstraints
-- package flags from the config file or command line
[ PackageConstraintFlags (packageName pkg)
(configConfigurationsFlags configFlags) ]
. addConstraints
-- '--enable-tests' and '--enable-benchmarks' constraints from
-- command line
[ PackageConstraintStanzas (packageName pkg) $
[ TestStanzas | testsEnabled ] ++
[ BenchStanzas | benchmarksEnabled ]
]
$ standardInstallPolicy
installedPkgIndex
(SourcePackageDb mempty packagePrefs)
[SpecificSourcePackage localPkg]
return (resolveDependencies platform (compilerInfo comp) solver resolverParams)
-- | Call an installer for an 'SourcePackage' but override the configure
-- flags with the ones given by the 'ReadyPackage'. In particular the
-- 'ReadyPackage' specifies an exact 'FlagAssignment' and exactly
-- versioned package dependencies. So we ignore any previous partial flag
-- assignment or dependency constraints and use the new ones.
--
-- NB: when updating this function, don't forget to also update
-- 'installReadyPackage' in D.C.Install.
configurePackage :: Verbosity
-> Platform -> CompilerInfo
-> SetupScriptOptions
-> ConfigFlags
-> ReadyPackage
-> [String]
-> IO ()
configurePackage verbosity platform comp scriptOptions configFlags
(ReadyPackage (SourcePackage _ gpkg _ _) flags stanzas deps) extraArgs =
setupWrapper verbosity
scriptOptions (Just pkg) configureCommand configureFlags extraArgs
where
configureFlags = filterConfigureFlags configFlags {
configConfigurationsFlags = flags,
-- We generate the legacy constraints as well as the new style precise
-- deps. In the end only one set gets passed to Setup.hs configure,
-- depending on the Cabal version we are talking to.
configConstraints = [ thisPackageVersion (packageId deppkg)
| deppkg <- CD.nonSetupDeps deps ],
configDependencies = [ (packageName (Installed.sourcePackageId deppkg),
Installed.installedPackageId deppkg)
| deppkg <- CD.nonSetupDeps deps ],
-- Use '--exact-configuration' if supported.
configExactConfiguration = toFlag True,
configVerbosity = toFlag verbosity,
configBenchmarks = toFlag (BenchStanzas `elem` stanzas),
configTests = toFlag (TestStanzas `elem` stanzas)
}
pkg = case finalizePackageDescription flags
(const True)
platform comp [] (enableStanzas stanzas gpkg) of
Left _ -> error "finalizePackageDescription ReadyPackage failed"
Right (desc, _) -> desc
| corngood/cabal | cabal-install/Distribution/Client/Configure.hs | bsd-3-clause | 13,298 | 0 | 20 | 3,678 | 2,109 | 1,164 | 945 | 227 | 3 |
{-|
Module: Tidl.Generate.C
Description: Generation primitive (C target) for TIDL
Copyright 2015, Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module Tidl.Generate.C where
import Tidl.Cfg
import Tidl.Ast
import Control.Monad
import Data.Char
import qualified Data.Map.Strict as Map
import Text.PrettyPrint
import Tidl.Generate
import Tidl.Generate.Pretty
data TargetC = TargetC
instance GenTarget TargetC where
genAll _ ast modname = [header ast modname, impl ast modname]
arrayLenFixed :: String -> String -> String
arrayLenFixed sname fname = map toUpper sname ++ "_" ++ map toUpper fname ++ "_LENGTH"
arrayLenMax :: String -> String -> String
arrayLenMax sname fname = map toUpper sname ++ "_" ++ map toUpper fname ++ "_MAX_LENGTH"
arrayLen :: String -> String
arrayLen fname = fname ++ "_length"
enumSerialize ename = ename ++ "_serialize"
enumDeserialize ename = ename ++ "_deserialize"
structInit sname = sname ++ "_init"
structCleanup sname = sname ++ "_cleanup"
structSerialize sname = sname ++ "_serialize"
structDeserialize sname = sname ++ "_deserialize"
arrayResize sname fname = sname ++ "_" ++ fname ++ "_resize"
intType :: IntKind -> IntSize -> String
intType IKSigned IS8 = "int8_t"
intType IKSigned IS16 = "int16_t"
intType IKSigned IS32 = "int32_t"
intType IKSigned IS64 = "int64_t"
intType IKUnsigned IS8 = "uint8_t"
intType IKUnsigned IS16 = "uint16_t"
intType IKUnsigned IS32 = "uint32_t"
intType IKUnsigned IS64 = "uint64_t"
fieldType ftype
= case ftype of
FEnum ename -> "enum " ++ ename
FStruct sname -> "struct " ++ sname
FString -> "char*"
FInt ik is -> intType ik is
header :: AST -> String -> Rendered
header ast modname
= let hdr_def = "__" ++ (map toUpper modname) ++ "_H__"
out_h_name = modname ++ ".h"
in Rendered
{ path = ["include"]
, fname = out_h_name
, contents = runPrinter $
do sayln' $ "/**"
sayln' $ " * @file " ++ out_h_name
sayln' $ " */"
sayln' $ ""
sayln' $ "#ifndef " ++ hdr_def
sayln' $ "#define " ++ hdr_def
sayln' $ ""
sayln' $ ""
sayln' $ "#include <tidl.h>"
sayln' $ ""
sayln' $ ""
sayln' $ "/* Start forward declarations */"
sayln' $ ""
mapM_ forwardDeclaration (ast_defs ast)
sayln' $ ""
sayln' $ "/* End forward declarations */"
sayln' $ ""
sayln' $ ""
sayln' $ "/* Start array length constants */"
sayln' $ ""
mapM_ arrayLengthConstant (ast_defs ast)
sayln' $ ""
sayln' $ "/* End array length constants */"
sayln' $ ""
sayln' $ ""
sayln' $ "/* Start type definitions */"
sayln' $ ""
mapM_ (\x -> typeDefinition x >> sayln' "") (ast_defs ast)
sayln' $ ""
sayln' $ "/* End type definitions */"
sayln' $ ""
sayln' $ ""
sayln' $ "/* Start function prototypes */"
sayln' $ ""
mapM_ (\x -> functionProtos x >> sayln' "") (ast_defs ast)
sayln' $ ""
sayln' $ "/* End function prototypes */"
sayln' $ ""
sayln' $ ""
sayln' $ "#endif /* " ++ hdr_def ++ " */"
sayln' $ ""
}
forwardDeclaration :: TopLevel -> Printer ()
forwardDeclaration (TLEnum{ enum_name = ename }) = sayln' $ "enum " ++ ename ++ ";"
forwardDeclaration (TLStruct{ struct_name = sname }) = sayln' $ "struct " ++ sname ++ ";"
arrayLengthConstant :: TopLevel -> Printer ()
arrayLengthConstant (TLEnum{ }) = return ()
arrayLengthConstant (TLStruct{ struct_name = sname, struct_fields = fields })
= mapM_ arrayLengthConstant' fields
where arrayLengthConstant' field
= case field of
StructField{ field_array = Scalar }
-> return ()
StructField{ field_name = fname, field_array = Fixed l }
-> sayln' $ "#define " ++ arrayLenFixed sname fname ++ " " ++ show l
StructField{ field_name = fname, field_array = Max l }
-> sayln' $ "#define " ++ arrayLenMax sname fname ++ " " ++ show l
typeDefinition :: TopLevel -> Printer ()
typeDefinition (TLEnum{ enum_name = ename, enum_labels = labels })
= do sayln' $ "enum " ++ ename ++ " {"
mapM_ (\EnumLabel{ label_name = lname, label_val = lval }
-> sayln' $ " " ++ lname ++ " = " ++ show lval ++ ","
) labels
sayln' $ "};"
typeDefinition (TLStruct{ struct_name = sname, struct_fields = fields })
= do sayln' $ "struct " ++ sname ++ " {"
mapM_ fieldDefinition fields
sayln' $ "};"
where fieldDefinition (StructField{ field_name = fname, field_array = farray, field_type = ftype })
= case farray of
Scalar -> sayln' $ " " ++ fieldType ftype ++ " " ++ fname ++ ";"
Fixed _ -> sayln' $ " " ++ fieldType ftype ++ " " ++ fname ++ "[" ++ arrayLenFixed sname fname ++ "];"
Max _ -> do sayln' $ " " ++ intType IKUnsigned IS32 ++ " " ++ arrayLen fname ++ ";"
sayln' $ " " ++ fieldType ftype ++ "* " ++ fname ++ ";"
functionProtos :: TopLevel -> Printer ()
functionProtos (TLEnum{ enum_name = ename })
= do sayln' $ enumSerializeProto ename ++ ";"
sayln' $ enumDeserializeProto ename ++ ";"
functionProtos (TLStruct{ struct_name = sname, struct_fields = fields })
= do sayln' $ structInitProto sname ++ ";"
sayln' $ structCleanupProto sname ++ ";"
sayln' $ structSerializeProto sname ++ ";"
sayln' $ structDeserializeProto sname ++ ";"
mapM_ (\f -> sayln' $ arrayResizeProto sname f ++ ";") (filter isResizable fields)
enumSerializeProto ename = "int " ++ enumSerialize ename ++ "(uint8_t *out, uint32_t out_len, uint32_t *pos, enum " ++ ename ++ " in)"
enumDeserializeProto ename = "int " ++ enumDeserialize ename ++ "(uint8_t *in, uint32_t in_len, uint32_t *pos, enum " ++ ename ++ " *out)"
structInitProto sname = "void " ++ structInit sname ++ "(struct " ++ sname ++ " *in)"
structCleanupProto sname = "void " ++ structCleanup sname ++ "(struct " ++ sname ++ " *in)"
structSerializeProto sname = "int " ++ structSerialize sname ++ "(uint8_t *out, uint32_t out_len, uint32_t *pos, struct " ++ sname ++ " *in)"
structDeserializeProto sname = "int " ++ structDeserialize sname ++ "(uint8_t *in, uint32_t in_len, uint32_t *pos, struct " ++ sname ++ " *out)"
arrayResizeProto sname (StructField{ field_name = fname, field_array = farray })
= case farray of
Max _ -> "int " ++ arrayResize sname fname ++ "(struct " ++ sname ++ " *in, " ++ intType IKUnsigned IS32 ++ " nsize)"
_ -> undefined
impl :: AST -> String -> Rendered
impl ast modname
= let out_h_name = modname ++ ".h"
out_c_name = modname ++ ".c"
in Rendered
{ path = []
, fname = out_c_name
, contents = runPrinter $
do sayln' $ "/**"
sayln' $ " * @file " ++ out_c_name
sayln' $ " */"
sayln' $ ""
sayln' $ ""
sayln' $ "#include <string.h>"
sayln' $ "#include <stdlib.h>"
sayln' $ "#include <" ++ out_h_name ++ ">"
sayln' $ ""
sayln' $ ""
mapM_ (\x -> functionImpls x >> sayln' "") (ast_defs ast)
sayln' $ ""
sayln' $ ""
}
functionImpls :: TopLevel -> Printer ()
functionImpls (TLEnum{ enum_name = ename, enum_labels = labels })
= do enumSerializeImpl ename labels >> sayln' ""
enumDeserializeImpl ename labels >> sayln' ""
functionImpls (TLStruct{ struct_name = sname, struct_fields = fields })
= do structInitImpl sname fields >> sayln' ""
structCleanupImpl sname fields >> sayln' ""
structSerializeImpl sname fields >> sayln' ""
structDeserializeImpl sname fields >> sayln' ""
mapM_ (\f -> arrayResizeImpl sname f >> sayln' "") (filter isResizable fields)
serialize :: FieldType -> String
serialize (FEnum ename) = enumSerialize ename
serialize (FStruct sname) = structSerialize sname
serialize FString = "string_serialize"
serialize (FInt IKSigned IS8) = "int8_serialize"
serialize (FInt IKSigned IS16) = "int16_serialize"
serialize (FInt IKSigned IS32) = "int32_serialize"
serialize (FInt IKSigned IS64) = "int64_serialize"
serialize (FInt IKUnsigned IS8) = "uint8_serialize"
serialize (FInt IKUnsigned IS16) = "uint16_serialize"
serialize (FInt IKUnsigned IS32) = "uint32_serialize"
serialize (FInt IKUnsigned IS64) = "uint64_serialize"
deserialize :: FieldType -> String
deserialize (FEnum ename) = enumDeserialize ename
deserialize (FStruct sname) = structDeserialize sname
deserialize FString = "string_deserialize"
deserialize (FInt IKSigned IS8) = "int8_deserialize"
deserialize (FInt IKSigned IS16) = "int16_deserialize"
deserialize (FInt IKSigned IS32) = "int32_deserialize"
deserialize (FInt IKSigned IS64) = "int64_deserialize"
deserialize (FInt IKUnsigned IS8) = "uint8_deserialize"
deserialize (FInt IKUnsigned IS16) = "uint16_deserialize"
deserialize (FInt IKUnsigned IS32) = "uint32_deserialize"
deserialize (FInt IKUnsigned IS64) = "uint64_deserialize"
enumSerializeImpl ename labels
= do sayln' $ enumSerializeProto ename ++ " {"
sayln' $ " int ret = 0;"
sayln' $ " "
sayln' $ " switch (in) {"
flip mapM_ labels $ \EnumLabel{ label_name = lname, label_val = lval }
-> do sayln' $ " case " ++ lname ++ ":"
sayln' $ " ret = " ++ serialize (FInt IKSigned IS32) ++ "(out, out_len, pos, " ++ show lval ++ ");"
sayln' $ " break;"
sayln' $ " default: ret = -1;"
sayln' $ " }"
sayln' $ " "
sayln' $ " return ret;"
sayln' $ "}"
enumDeserializeImpl ename labels
= do sayln' $ enumDeserializeProto ename ++ " {"
sayln' $ " int ret = 0;"
sayln' $ " "
sayln' $ " " ++ intType IKSigned IS32 ++ " temp = 0;"
sayln' $ " ret = " ++ deserialize (FInt IKSigned IS32) ++ "(in, in_len, pos, &temp);"
sayln' $ " if (ret) return ret;"
sayln' $ " switch (temp) {"
flip mapM_ labels $ \EnumLabel{ label_name = lname, label_val = lval }
-> do sayln' $ " case " ++ show lval ++ ":"
sayln' $ " *out = " ++ lname ++ ";"
sayln' $ " break;"
sayln' $ " default: ret = -1;"
sayln' $ " }"
sayln' $ " "
sayln' $ " return ret;"
sayln' $ "}"
structInitImpl sname fields
= do sayln' $ structInitProto sname ++ " {"
sayln' $ " memset(in, 0, sizeof(*in));"
sayln' $ "}"
structCleanupImpl sname fields
= do sayln' $ structCleanupProto sname ++ " {"
flip mapM_ fields $ \(StructField{ field_name = fname, field_array = farray, field_type = ftype })
-> case farray of
Scalar ->
case ftype of
FStruct csname ->
sayln' $ " " ++ structCleanup csname ++ "(&in->" ++ fname ++ ");";
FString ->
do sayln' $ " if (in->" ++ fname ++ ") {"
sayln' $ " free(in->" ++ fname ++ ");"
sayln' $ " in->" ++ fname ++ " = NULL;"
sayln' $ " }"
_ -> return ()
Fixed _ ->
case ftype of
FStruct csname ->
do sayln' $ " for (int i = 0; i < " ++ arrayLenFixed sname fname ++ "; i++)"
sayln' $ " " ++ structCleanup csname ++ "(&in->" ++ fname ++ "[i]);";
FString ->
do sayln' $ " for (int i = 0; i < " ++ arrayLenFixed sname fname ++ "; i++) {"
sayln' $ " if (in->" ++ fname ++ "[i]) {"
sayln' $ " free(in->" ++ fname ++ "[i]);"
sayln' $ " in->" ++ fname ++ "[i] = NULL;"
sayln' $ " }"
sayln' $ " }"
_ -> return ()
Max _ ->
do case ftype of
FStruct csname ->
do sayln' $ " for (int i = 0; i < in->" ++ arrayLen fname ++ "; i++)"
sayln' $ " " ++ structCleanup csname ++ "(&in->" ++ fname ++ "[i]);";
FString ->
do sayln' $ " for (int i = 0; i < in->" ++ arrayLen fname ++ "; i++) {"
sayln' $ " if (in->" ++ fname ++ "[i]) {"
sayln' $ " free(in->" ++ fname ++ "[i]);"
sayln' $ " in->" ++ fname ++ "[i] = NULL;"
sayln' $ " }"
sayln' $ " }"
_ -> return ()
sayln' $ " if (in->" ++ fname ++ ") {"
sayln' $ " free(in->" ++ fname ++ ");"
sayln' $ " in->" ++ fname ++ " = NULL;"
sayln' $ " }"
sayln' $ " " ++ structInit sname ++ "(in);"
sayln' $ "}"
structSerializeImpl sname fields
= do sayln' $ structSerializeProto sname ++ " {"
sayln' $ " int ret = 0;"
sayln' $ " "
flip mapM_ fields $ \(StructField{ field_name = fname, field_array = farray, field_type = ftype })
-> case farray of
Scalar ->
case ftype of
FStruct csname ->
do sayln' $ " ret = " ++ serialize ftype ++ "(out, out_len, pos, &in->" ++ fname ++ ");"
sayln' $ " if (ret) return ret;"
_ ->
do sayln' $ " ret = " ++ serialize ftype ++ "(out, out_len, pos, in->" ++ fname ++ ");"
sayln' $ " if (ret) return ret;"
Fixed _ ->
do sayln' $ " for (int i = 0; i < " ++ arrayLenFixed sname fname ++ "; i++) {"
case ftype of
FStruct csname ->
do sayln' $ " ret = " ++ serialize ftype ++ "(out, out_len, pos, &in->" ++ fname ++ "[i]);"
sayln' $ " if (ret) return ret;"
_ ->
do sayln' $ " ret = " ++ serialize ftype ++ "(out, out_len, pos, in->" ++ fname ++ "[i]);"
sayln' $ " if (ret) return ret;"
sayln' $ " }"
Max _ ->
do sayln' $ " ret = " ++ serialize (FInt IKUnsigned IS32) ++ "(out, out_len, pos, in->" ++ arrayLen fname ++ ");"
sayln' $ " if (ret) return ret;"
sayln' $ " for (int i = 0; i < in->" ++ arrayLen fname ++ "; i++) {"
case ftype of
FStruct csname ->
do sayln' $ " ret = " ++ serialize ftype ++ "(out, out_len, pos, &in->" ++ fname ++ "[i]);"
sayln' $ " if (ret) return ret;"
_ ->
do sayln' $ " ret = " ++ serialize ftype ++ "(out, out_len, pos, in->" ++ fname ++ "[i]);"
sayln' $ " if (ret) return ret;"
sayln' $ " }"
sayln' $ " "
sayln' $ " return ret;"
sayln' $ "}"
structDeserializeImpl sname fields
= do sayln' $ structDeserializeProto sname ++ " {"
sayln' $ " int ret = 0;"
when (any isResizable fields) (sayln' $ " " ++ intType IKUnsigned IS32 ++ " temp = 0;")
sayln' $ " "
flip mapM_ fields $ \(StructField{ field_name = fname, field_array = farray, field_type = ftype })
-> case farray of
Scalar ->
do sayln' $ " ret = " ++ deserialize ftype ++ "(in, in_len, pos, &out->" ++ fname ++ ");"
sayln' $ " if (ret) return ret;"
Fixed _ ->
do sayln' $ " for (int i = 0; i < " ++ arrayLenFixed sname fname ++ "; i++) {"
sayln' $ " ret = " ++ deserialize ftype ++ "(in, in_len, pos, &out->" ++ fname ++ "[i]);"
sayln' $ " if (ret) return ret;"
sayln' $ " }"
Max _ ->
do sayln' $ " ret = " ++ deserialize (FInt IKUnsigned IS32) ++ "(in, in_len, pos, &temp);"
sayln' $ " if (ret) return ret;"
sayln' $ " ret = " ++ arrayResize sname fname ++ "(out, temp);"
sayln' $ " if (ret) return ret;"
sayln' $ " for (int i = 0; i < out->" ++ arrayLen fname ++ "; i++) {"
sayln' $ " ret = " ++ deserialize ftype ++ "(in, in_len, pos, &out->" ++ fname ++ "[i]);"
sayln' $ " if (ret) return ret;"
sayln' $ " }"
sayln' $ " "
sayln' $ " return ret;"
sayln' $ "}"
arrayResizeImpl sname field@(StructField{ field_name = fname, field_array = farray, field_type = ftype })
= case farray of
Scalar -> undefined
Fixed _ -> undefined
Max _ -> do sayln' $ arrayResizeProto sname field ++ " {"
sayln' $ " if (nsize > " ++ arrayLenMax sname fname ++ ") return -1;"
sayln' $ " "
sayln' $ " if (0 == nsize) {"
sayln' $ " if (in->" ++ fname ++ ") {"
sayln' $ " free(in->" ++ fname ++ ");"
sayln' $ " in->" ++ fname ++ " = NULL;"
sayln' $ " in->" ++ arrayLen fname ++ " = 0;"
sayln' $ " }"
sayln' $ " return 0;"
sayln' $ " }"
sayln' $ " "
sayln' $ " " ++ fieldType ftype ++ "* temp = (" ++ fieldType ftype ++ "*)malloc(sizeof(*temp) * nsize);"
sayln' $ " if (!temp) return -1;"
sayln' $ " memset(temp, 0, sizeof(*temp) * nsize);"
sayln' $ " "
sayln' $ " " ++ intType IKUnsigned IS32 ++ " csize = in->" ++ arrayLen fname ++ ";"
sayln' $ " if (csize > nsize) csize = nsize;"
sayln' $ " memcpy(temp, in->" ++ fname ++ ", csize);"
sayln' $ " "
sayln' $ " free(in->" ++ fname ++ ");"
sayln' $ " in->" ++ fname ++ " = temp;";
sayln' $ " in->" ++ arrayLen fname ++ " = nsize;";
sayln' $ " return 0;"
sayln' $ "}"
| AnttiLukats/orp | software/tools/tidl/src/Tidl/Generate/C.hs | apache-2.0 | 18,884 | 0 | 23 | 6,493 | 4,950 | 2,370 | 2,580 | 372 | 9 |
module FunIn4 where
--A new definition can be introduced to denote an identified sub-expression.
--The newly introduced definition may be a function binding
--or a simple constant binding. The new binding will be put at the end of the
--local 'where' or 'let' clause depending on the scope of high lighted source.
--In this example: Introduce a new definition to denote 'x*5*z'
--this example aims to test the layout adjustment.
foo x=x* 5*z*w where z=3
w=5 {-there is
a comment-}
main=foo 10
| SAdams601/HaRe | old/testing/introNewDef/FunIn4.hs | bsd-3-clause | 524 | 0 | 7 | 113 | 51 | 31 | 20 | 4 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE UnboxedTuples #-}
module T12513 where
import Language.Haskell.TH.Lib
import Language.Haskell.TH.Syntax
f :: $([t| (# Int #) |]) -> Int
f x = x
g :: $(unboxedTupleT 1 `appT` conT ''Int) -> Int
g x = x
| ezyang/ghc | testsuite/tests/th/T12513.hs | bsd-3-clause | 249 | 0 | 10 | 44 | 80 | 48 | 32 | 9 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
-- | Turn GroupStreams that operate on entire input into do-loops,
-- thus aiding subsequent optimisation. It is very important that
-- this is run *after* any access-pattern-related optimisation,
-- because this pass will destroy information.
module Futhark.Optimise.Unstream
( unstream )
where
import Control.Applicative
import Control.Monad.State
import Control.Monad.Reader
import Data.List
import Prelude
import Futhark.MonadFreshNames
import Futhark.Representation.Kernels
import Futhark.Pass
import Futhark.Tools
unstream :: Pass Kernels Kernels
unstream =
Pass { passName = "unstream"
, passDescription = "Remove whole-array streams in kernels"
, passFunction = intraproceduralTransformation optimiseFunDef
}
optimiseFunDef :: MonadFreshNames m => FunDef Kernels -> m (FunDef Kernels)
optimiseFunDef fundec = do
body' <- modifyNameSource $ runState $
runReaderT m (scopeOfFParams (funDefParams fundec))
return fundec { funDefBody = body' }
where m = optimiseBody $ funDefBody fundec
type UnstreamM = ReaderT (Scope Kernels) (State VNameSource)
optimiseBody :: Body Kernels -> UnstreamM (Body Kernels)
optimiseBody (Body () stms res) =
localScope (scopeOf stms) $
Body () <$> (concat <$> mapM optimiseStm stms) <*> pure res
optimiseStm :: Stm Kernels -> UnstreamM [Stm Kernels]
optimiseStm (Let pat aux (Op (Kernel desc space ts body))) = do
stms' <- localScope (scopeOfKernelSpace space) $
runBinder_ $ optimiseInKernelStms $ kernelBodyStms body
return [Let pat aux $ Op $ Kernel desc space ts $ body { kernelBodyStms = stms' }]
optimiseStm (Let pat aux e) =
pure <$> (Let pat aux <$> mapExpM optimise e)
where optimise = identityMapper { mapOnBody = \scope -> localScope scope . optimiseBody }
type InKernelM = Binder InKernel
optimiseInKernelStms :: [Stm InKernel] -> InKernelM ()
optimiseInKernelStms = mapM_ optimiseInKernelStm
optimiseInKernelStm :: Stm InKernel -> InKernelM ()
optimiseInKernelStm (Let pat aux (Op (GroupStream w max_chunk lam accs arrs)))
| max_chunk == w = do
let GroupStreamLambda chunk_size chunk_offset acc_params arr_params body = lam
letBindNames'_ [chunk_size] $ BasicOp $ SubExp $ constant (1::Int32)
loop_body <- insertStmsM $ do
forM_ (zip arr_params arrs) $ \(p,a) ->
letBindNames'_ [paramName p] $
BasicOp $ Index a $ fullSlice (paramType p)
[DimSlice (Var chunk_offset) (Var chunk_size) (constant (1::Int32))]
optimiseInBody body
-- Accumulators are updated in-place and must hence be unique.
let merge = zip (map (fmap (`toDecl` Unique)) acc_params) accs
certifying (stmAuxCerts aux) $
letBind_ pat $ DoLoop [] merge (ForLoop chunk_offset Int32 w []) loop_body
optimiseInKernelStm (Let pat aux e) =
addStm =<< (Let pat aux <$> mapExpM optimise e)
where optimise = identityMapper
{ mapOnBody = \scope -> localScope scope . optimiseInBody }
optimiseInBody :: Body InKernel -> InKernelM (Body InKernel)
optimiseInBody body = do
stms' <- collectStms_ $ optimiseInKernelStms $ bodyStms body
return body { bodyStms = stms' }
| ihc/futhark | src/Futhark/Optimise/Unstream.hs | isc | 3,232 | 0 | 20 | 636 | 976 | 492 | 484 | 62 | 1 |
module SmallBitSet where
import Prelude hiding (null)
import Data.Foldable (foldl')
import Data.Bits
newtype SmallBitSet = SmallBitSet { setRep :: Int }
deriving (Eq, Ord)
instance Show SmallBitSet where
showsPrec p x = showParen (11 >= p)
$ showString "fromList "
. shows (toList x)
{-# INLINE null #-}
null :: SmallBitSet -> Bool
null (SmallBitSet x) = x == 0
toList :: SmallBitSet -> [Int]
toList (SmallBitSet x) = [ i | i <- [0..finiteBitSize x - 1 - countLeadingZeros x], testBit x i ]
fromList :: [Int] -> SmallBitSet
fromList xs = SmallBitSet (foldl' setBit 0 xs)
{-# INLINE member #-}
member :: Int -> SmallBitSet -> Bool
member i (SmallBitSet x) = testBit x i
{-# INLINE insert #-}
insert :: Int -> SmallBitSet -> SmallBitSet
insert i (SmallBitSet x) = SmallBitSet (setBit x i)
{-# INLINE delete #-}
delete :: Int -> SmallBitSet -> SmallBitSet
delete i (SmallBitSet x) = SmallBitSet (clearBit x i)
{-# INLINE difference #-}
difference :: SmallBitSet -> SmallBitSet -> SmallBitSet
difference (SmallBitSet x) (SmallBitSet y) =
SmallBitSet (x .&. complement y)
{-# INLINE (\\) #-}
(\\) :: SmallBitSet -> SmallBitSet -> SmallBitSet
(\\) = difference
infix 5 \\
{-# INLINE intersection #-}
intersection :: SmallBitSet -> SmallBitSet -> SmallBitSet
intersection (SmallBitSet x) (SmallBitSet y) = SmallBitSet (x .&. y)
{-# INLINE union #-}
union :: SmallBitSet -> SmallBitSet -> SmallBitSet
union (SmallBitSet x) (SmallBitSet y) = SmallBitSet (x .|. y)
{-# INLINE singleton #-}
singleton :: Int -> SmallBitSet
singleton i = SmallBitSet (bit i)
{-# INLINE empty #-}
empty :: SmallBitSet
empty = SmallBitSet 0
| glguy/advent2016 | lib/SmallBitSet.hs | isc | 1,664 | 0 | 11 | 318 | 553 | 294 | 259 | 46 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
-- | All (almost) compiler pipelines end with an 'Action', which does
-- something with the result of the pipeline.
module Futhark.Actions
( printAction,
printAliasesAction,
callGraphAction,
impCodeGenAction,
kernelImpCodeGenAction,
multicoreImpCodeGenAction,
metricsAction,
compileCAction,
compileCtoWASMAction,
compileOpenCLAction,
compileCUDAAction,
compileMulticoreAction,
compileMulticoreToWASMAction,
compilePythonAction,
compilePyOpenCLAction,
)
where
import Control.Monad
import Control.Monad.IO.Class
import Data.List (intercalate)
import Data.Maybe (fromMaybe)
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Futhark.Analysis.Alias
import Futhark.Analysis.CallGraph (buildCallGraph)
import Futhark.Analysis.Metrics
import qualified Futhark.CodeGen.Backends.CCUDA as CCUDA
import qualified Futhark.CodeGen.Backends.COpenCL as COpenCL
import qualified Futhark.CodeGen.Backends.MulticoreC as MulticoreC
import qualified Futhark.CodeGen.Backends.MulticoreWASM as MulticoreWASM
import qualified Futhark.CodeGen.Backends.PyOpenCL as PyOpenCL
import qualified Futhark.CodeGen.Backends.SequentialC as SequentialC
import qualified Futhark.CodeGen.Backends.SequentialPython as SequentialPy
import qualified Futhark.CodeGen.Backends.SequentialWASM as SequentialWASM
import qualified Futhark.CodeGen.ImpGen.GPU as ImpGenGPU
import qualified Futhark.CodeGen.ImpGen.Multicore as ImpGenMulticore
import qualified Futhark.CodeGen.ImpGen.Sequential as ImpGenSequential
import Futhark.Compiler.CLI
import Futhark.IR
import Futhark.IR.GPUMem (GPUMem)
import Futhark.IR.MCMem (MCMem)
import Futhark.IR.Prop.Aliases
import Futhark.IR.SOACS (SOACS)
import Futhark.IR.SeqMem (SeqMem)
import Futhark.Util (runProgramWithExitCode, unixEnvironment)
import Futhark.Version (versionString)
import System.Directory
import System.Exit
import System.FilePath
import qualified System.Info
-- | Print the result to stdout.
printAction :: ASTRep rep => Action rep
printAction =
Action
{ actionName = "Prettyprint",
actionDescription = "Prettyprint the resulting internal representation on standard output.",
actionProcedure = liftIO . putStrLn . pretty
}
-- | Print the result to stdout, alias annotations.
printAliasesAction :: (ASTRep rep, CanBeAliased (Op rep)) => Action rep
printAliasesAction =
Action
{ actionName = "Prettyprint",
actionDescription = "Prettyprint the resulting internal representation on standard output.",
actionProcedure = liftIO . putStrLn . pretty . aliasAnalysis
}
-- | Print call graph to stdout.
callGraphAction :: Action SOACS
callGraphAction =
Action
{ actionName = "call-graph",
actionDescription = "Prettyprint the callgraph of the result to standard output.",
actionProcedure = liftIO . putStrLn . pretty . buildCallGraph
}
-- | Print metrics about AST node counts to stdout.
metricsAction :: OpMetrics (Op rep) => Action rep
metricsAction =
Action
{ actionName = "Compute metrics",
actionDescription = "Print metrics on the final AST.",
actionProcedure = liftIO . putStr . show . progMetrics
}
-- | Convert the program to sequential ImpCode and print it to stdout.
impCodeGenAction :: Action SeqMem
impCodeGenAction =
Action
{ actionName = "Compile imperative",
actionDescription = "Translate program into imperative IL and write it on standard output.",
actionProcedure = liftIO . putStrLn . pretty . snd <=< ImpGenSequential.compileProg
}
-- | Convert the program to GPU ImpCode and print it to stdout.
kernelImpCodeGenAction :: Action GPUMem
kernelImpCodeGenAction =
Action
{ actionName = "Compile imperative kernels",
actionDescription = "Translate program into imperative IL with kernels and write it on standard output.",
actionProcedure = liftIO . putStrLn . pretty . snd <=< ImpGenGPU.compileProgOpenCL
}
-- | Convert the program to CPU multicore ImpCode and print it to stdout.
multicoreImpCodeGenAction :: Action MCMem
multicoreImpCodeGenAction =
Action
{ actionName = "Compile to imperative multicore",
actionDescription = "Translate program into imperative multicore IL and write it on standard output.",
actionProcedure = liftIO . putStrLn . pretty . snd <=< ImpGenMulticore.compileProg
}
-- Lines that we prepend (in comments) to generated code.
headerLines :: [T.Text]
headerLines = T.lines $ "Generated by Futhark " <> T.pack versionString
cHeaderLines :: [T.Text]
cHeaderLines = map ("// " <>) headerLines
pyHeaderLines :: [T.Text]
pyHeaderLines = map ("# " <>) headerLines
cPrependHeader :: T.Text -> T.Text
cPrependHeader = (T.unlines cHeaderLines <>)
pyPrependHeader :: T.Text -> T.Text
pyPrependHeader = (T.unlines pyHeaderLines <>)
cmdCC :: String
cmdCC = fromMaybe "cc" $ lookup "CC" unixEnvironment
cmdCFLAGS :: [String] -> [String]
cmdCFLAGS def = maybe def words $ lookup "CFLAGS" unixEnvironment
runCC :: String -> String -> [String] -> [String] -> FutharkM ()
runCC cpath outpath cflags_def ldflags = do
ret <-
liftIO $
runProgramWithExitCode
cmdCC
( [cpath, "-o", outpath]
++ cmdCFLAGS cflags_def
++
-- The default LDFLAGS are always added.
ldflags
)
mempty
case ret of
Left err ->
externalErrorS $ "Failed to run " ++ cmdCC ++ ": " ++ show err
Right (ExitFailure code, _, gccerr) ->
externalErrorS $
cmdCC ++ " failed with code "
++ show code
++ ":\n"
++ gccerr
Right (ExitSuccess, _, _) ->
return ()
-- | The @futhark c@ action.
compileCAction :: FutharkConfig -> CompilerMode -> FilePath -> Action SeqMem
compileCAction fcfg mode outpath =
Action
{ actionName = "Compile to sequential C",
actionDescription = "Compile to sequential C",
actionProcedure = helper
}
where
helper prog = do
cprog <- handleWarnings fcfg $ SequentialC.compileProg prog
let cpath = outpath `addExtension` "c"
hpath = outpath `addExtension` "h"
jsonpath = outpath `addExtension` "json"
case mode of
ToLibrary -> do
let (header, impl, manifest) = SequentialC.asLibrary cprog
liftIO $ T.writeFile hpath $ cPrependHeader header
liftIO $ T.writeFile cpath $ cPrependHeader impl
liftIO $ T.writeFile jsonpath manifest
ToExecutable -> do
liftIO $ T.writeFile cpath $ SequentialC.asExecutable cprog
runCC cpath outpath ["-O3", "-std=c99"] ["-lm"]
ToServer -> do
liftIO $ T.writeFile cpath $ SequentialC.asServer cprog
runCC cpath outpath ["-O3", "-std=c99"] ["-lm"]
-- | The @futhark opencl@ action.
compileOpenCLAction :: FutharkConfig -> CompilerMode -> FilePath -> Action GPUMem
compileOpenCLAction fcfg mode outpath =
Action
{ actionName = "Compile to OpenCL",
actionDescription = "Compile to OpenCL",
actionProcedure = helper
}
where
helper prog = do
cprog <- handleWarnings fcfg $ COpenCL.compileProg prog
let cpath = outpath `addExtension` "c"
hpath = outpath `addExtension` "h"
jsonpath = outpath `addExtension` "json"
extra_options
| System.Info.os == "darwin" =
["-framework", "OpenCL"]
| System.Info.os == "mingw32" =
["-lOpenCL64"]
| otherwise =
["-lOpenCL"]
case mode of
ToLibrary -> do
let (header, impl, manifest) = COpenCL.asLibrary cprog
liftIO $ T.writeFile hpath $ cPrependHeader header
liftIO $ T.writeFile cpath $ cPrependHeader impl
liftIO $ T.writeFile jsonpath manifest
ToExecutable -> do
liftIO $ T.writeFile cpath $ cPrependHeader $ COpenCL.asExecutable cprog
runCC cpath outpath ["-O", "-std=c99"] ("-lm" : extra_options)
ToServer -> do
liftIO $ T.writeFile cpath $ cPrependHeader $ COpenCL.asServer cprog
runCC cpath outpath ["-O", "-std=c99"] ("-lm" : extra_options)
-- | The @futhark cuda@ action.
compileCUDAAction :: FutharkConfig -> CompilerMode -> FilePath -> Action GPUMem
compileCUDAAction fcfg mode outpath =
Action
{ actionName = "Compile to CUDA",
actionDescription = "Compile to CUDA",
actionProcedure = helper
}
where
helper prog = do
cprog <- handleWarnings fcfg $ CCUDA.compileProg prog
let cpath = outpath `addExtension` "c"
hpath = outpath `addExtension` "h"
jsonpath = outpath `addExtension` "json"
extra_options =
[ "-lcuda",
"-lcudart",
"-lnvrtc"
]
case mode of
ToLibrary -> do
let (header, impl, manifest) = CCUDA.asLibrary cprog
liftIO $ T.writeFile hpath $ cPrependHeader header
liftIO $ T.writeFile cpath $ cPrependHeader impl
liftIO $ T.writeFile jsonpath manifest
ToExecutable -> do
liftIO $ T.writeFile cpath $ cPrependHeader $ CCUDA.asExecutable cprog
runCC cpath outpath ["-O", "-std=c99"] ("-lm" : extra_options)
ToServer -> do
liftIO $ T.writeFile cpath $ cPrependHeader $ CCUDA.asServer cprog
runCC cpath outpath ["-O", "-std=c99"] ("-lm" : extra_options)
-- | The @futhark multicore@ action.
compileMulticoreAction :: FutharkConfig -> CompilerMode -> FilePath -> Action MCMem
compileMulticoreAction fcfg mode outpath =
Action
{ actionName = "Compile to multicore",
actionDescription = "Compile to multicore",
actionProcedure = helper
}
where
helper prog = do
cprog <- handleWarnings fcfg $ MulticoreC.compileProg prog
let cpath = outpath `addExtension` "c"
hpath = outpath `addExtension` "h"
jsonpath = outpath `addExtension` "json"
case mode of
ToLibrary -> do
let (header, impl, manifest) = MulticoreC.asLibrary cprog
liftIO $ T.writeFile hpath $ cPrependHeader header
liftIO $ T.writeFile cpath $ cPrependHeader impl
liftIO $ T.writeFile jsonpath manifest
ToExecutable -> do
liftIO $ T.writeFile cpath $ cPrependHeader $ MulticoreC.asExecutable cprog
runCC cpath outpath ["-O3", "-std=c99"] ["-lm", "-pthread"]
ToServer -> do
liftIO $ T.writeFile cpath $ cPrependHeader $ MulticoreC.asServer cprog
runCC cpath outpath ["-O3", "-std=c99"] ["-lm", "-pthread"]
pythonCommon ::
(CompilerMode -> String -> prog -> FutharkM (Warnings, T.Text)) ->
FutharkConfig ->
CompilerMode ->
FilePath ->
prog ->
FutharkM ()
pythonCommon codegen fcfg mode outpath prog = do
let class_name =
case mode of
ToLibrary -> takeBaseName outpath
_ -> "internal"
pyprog <- handleWarnings fcfg $ codegen mode class_name prog
case mode of
ToLibrary ->
liftIO $ T.writeFile (outpath `addExtension` "py") $ pyPrependHeader pyprog
_ -> liftIO $ do
T.writeFile outpath $ "#!/usr/bin/env python3\n" <> pyPrependHeader pyprog
perms <- liftIO $ getPermissions outpath
setPermissions outpath $ setOwnerExecutable True perms
-- | The @futhark python@ action.
compilePythonAction :: FutharkConfig -> CompilerMode -> FilePath -> Action SeqMem
compilePythonAction fcfg mode outpath =
Action
{ actionName = "Compile to PyOpenCL",
actionDescription = "Compile to Python with OpenCL",
actionProcedure = pythonCommon SequentialPy.compileProg fcfg mode outpath
}
-- | The @futhark pyopencl@ action.
compilePyOpenCLAction :: FutharkConfig -> CompilerMode -> FilePath -> Action GPUMem
compilePyOpenCLAction fcfg mode outpath =
Action
{ actionName = "Compile to PyOpenCL",
actionDescription = "Compile to Python with OpenCL",
actionProcedure = pythonCommon PyOpenCL.compileProg fcfg mode outpath
}
cmdEMCFLAGS :: [String] -> [String]
cmdEMCFLAGS def = maybe def words $ lookup "EMCFLAGS" unixEnvironment
runEMCC :: String -> String -> FilePath -> [String] -> [String] -> [String] -> Bool -> FutharkM ()
runEMCC cpath outpath classpath cflags_def ldflags expfuns lib = do
ret <-
liftIO $
runProgramWithExitCode
"emcc"
( [cpath, "-o", outpath]
++ ["-lnodefs.js"]
++ ["-s", "--extern-post-js", classpath]
++ ( if lib
then ["-s", "EXPORT_NAME=loadWASM"]
else []
)
++ ["-s", "WASM_BIGINT"]
++ cmdCFLAGS cflags_def
++ cmdEMCFLAGS [""]
++ [ "-s",
"EXPORTED_FUNCTIONS=["
++ intercalate "," ("'_malloc'" : "'_free'" : expfuns)
++ "]"
]
-- The default LDFLAGS are always added.
++ ldflags
)
mempty
case ret of
Left err ->
externalErrorS $ "Failed to run emcc: " ++ show err
Right (ExitFailure code, _, emccerr) ->
externalErrorS $
"emcc failed with code "
++ show code
++ ":\n"
++ emccerr
Right (ExitSuccess, _, _) ->
return ()
-- | The @futhark wasm@ action.
compileCtoWASMAction :: FutharkConfig -> CompilerMode -> FilePath -> Action SeqMem
compileCtoWASMAction fcfg mode outpath =
Action
{ actionName = "Compile to sequential C",
actionDescription = "Compile to sequential C",
actionProcedure = helper
}
where
helper prog = do
(cprog, jsprog, exps) <- handleWarnings fcfg $ SequentialWASM.compileProg prog
case mode of
ToLibrary -> do
writeLibs cprog jsprog
liftIO $ T.appendFile classpath SequentialWASM.libraryExports
runEMCC cpath mjspath classpath ["-O3", "-msimd128"] ["-lm"] exps True
_ -> do
-- Non-server executables are not supported.
writeLibs cprog jsprog
liftIO $ T.appendFile classpath SequentialWASM.runServer
runEMCC cpath outpath classpath ["-O3", "-msimd128"] ["-lm"] exps False
writeLibs cprog jsprog = do
let (h, imp, _) = SequentialC.asLibrary cprog
liftIO $ T.writeFile hpath h
liftIO $ T.writeFile cpath imp
liftIO $ T.writeFile classpath jsprog
cpath = outpath `addExtension` "c"
hpath = outpath `addExtension` "h"
mjspath = outpath `addExtension` "mjs"
classpath = outpath `addExtension` ".class.js"
-- | The @futhark wasm-multicore@ action.
compileMulticoreToWASMAction :: FutharkConfig -> CompilerMode -> FilePath -> Action MCMem
compileMulticoreToWASMAction fcfg mode outpath =
Action
{ actionName = "Compile to sequential C",
actionDescription = "Compile to sequential C",
actionProcedure = helper
}
where
helper prog = do
(cprog, jsprog, exps) <- handleWarnings fcfg $ MulticoreWASM.compileProg prog
case mode of
ToLibrary -> do
writeLibs cprog jsprog
liftIO $ T.appendFile classpath MulticoreWASM.libraryExports
runEMCC cpath mjspath classpath ["-O3", "-msimd128"] ["-lm", "-pthread"] exps True
_ -> do
-- Non-server executables are not supported.
writeLibs cprog jsprog
liftIO $ T.appendFile classpath MulticoreWASM.runServer
runEMCC cpath outpath classpath ["-O3", "-msimd128"] ["-lm", "-pthread"] exps False
writeLibs cprog jsprog = do
let (h, imp, _) = MulticoreC.asLibrary cprog
liftIO $ T.writeFile hpath h
liftIO $ T.writeFile cpath imp
liftIO $ T.writeFile classpath jsprog
cpath = outpath `addExtension` "c"
hpath = outpath `addExtension` "h"
mjspath = outpath `addExtension` "mjs"
classpath = outpath `addExtension` ".class.js"
| HIPERFIT/futhark | src/Futhark/Actions.hs | isc | 15,931 | 0 | 19 | 3,909 | 3,786 | 2,018 | 1,768 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
module Text.Html.PigLet.Th1
( defTemplate
, composeTemplate
, genTemplate
, addAttr
, maybeAttr
, maybeContent
, maybeVal
, var
)
where
import Text.HTML.TagSoup
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as HA
import Text.Blaze.Html.Renderer.Pretty (renderHtml)
import Language.Haskell.TH
import Data.Monoid
import Data.String.Utils (join)
import qualified Data.Set as S
import Control.Applicative
import GHC.Exts (IsString (..))
import Util.BlazeFromHtml hiding (main)
import Util.GenerateHtmlCombinators hiding (main)
import Text.Html.PigLet.Html5Defs
import Text.Html.PigLet.HtmlMod
-- TODO: R
-- 1. All html5 tags
-- 2. Better selector
type Args = [String]
type HtmlTemplate = Q Template
data Template = Template Args HtmlMod
readTemplate :: [String] -> FilePath -> Q Template
readTemplate params file = (Template params . makeHtmlMod) <$>
runIO (readFile file)
defTemplate :: [String] -> FilePath -> [HtmlMod -> HtmlMod]
-> Q Template
defTemplate params file trans = transformTemplate trans <$>
readTemplate params file
composeTemplate :: Selector -> HtmlTemplate -> HtmlTemplate -> HtmlTemplate
composeTemplate selector = liftA2 (mergeTemplate selector)
defSnippet :: [String]
-> FilePath
-> HtmlTemplate
-> Selector
-> [HtmlMod -> HtmlMod]
-> HtmlTemplate
defSnippet params file parent selector trans = composeTemplate selector parent
$ transformTemplate trans <$> readTemplate params file
mergeTemplate :: Selector -> Template -> Template -> Template
mergeTemplate selector (Template pa pm) (Template ca cm) =
Template (pa <> ca) (mergeTree selector cm pm)
transformTemplate :: [HtmlMod -> HtmlMod] -> Template -> Template
transformTemplate trans (Template args hm) =
Template args (foldr1 (.) trans $ hm)
genTemplate :: HtmlTemplate -> ExpQ
genTemplate t = do
Template args mods <- t
lamE (map (varP . mkName) args) [| $(genCode mods) |]
var :: String -> ExpQ
var = varE . mkName
-- makeTemplate :: FilePath -> [String] -> [(HtmlMod -> HtmlMod)] -> ExpQ
-- makeTemplate file args trans = runIO (readFile file) >>=
-- transformHtml trans args
-- transformHtml :: [(HtmlMod -> HtmlMod)] -> [String] -> String -> ExpQ
-- transformHtml trans args htmlStr =
-- lamE (map (varP . mkName) args) [| $(genCode $ foldr ($) hm trans) |]
-- where hm = html2HtmlMod $ htmlTree html5 htmlStr
makeHtmlMod :: String -> HtmlMod
makeHtmlMod = html2HtmlMod . htmlTree html5
htmlTree :: HtmlVariant -> String -> Html
htmlTree variant = removeEmptyText . fst . makeTree variant False [] .
parseTagsOptions parseOptions { optTagPosition = True }
genCode :: HtmlMod -> ExpQ
genCode (HtmlText str) = [| H.toHtml (str :: String) |]
genCode (HtmlParent tag attrs children modn) =
genParent tag attrs children modn
genCode (HtmlLeaf tag attrs modn) =
genLeaf tag attrs modn
genCode (HtmlBlock htmls) =
[| $(foldr genHtmls [| mempty |] htmls) |]
genHtmls :: HtmlMod -> ExpQ -> ExpQ
genHtmls html code = [| $(genCode html) <> $code |]
genParent :: String -> Attrs -> HtmlMod -> ModNode -> ExpQ
genParent _ _ _ (ModNode _ (SetContent expr)) = expr
genParent tag attrs _ (ModNode attrMods (EmbedContent expr)) =
[| $(getHtmlParent tag) H.! genAttrs $attrMods attrTuples
$ $expr |]
where attrTuples = map (\ (k, vs) -> (k, S.toList vs)) attrs
genParent tag attrs child (ModNode attrMods NotTouched) =
[| $(getHtmlParent tag) H.! genAttrs $attrMods attrTuples $
$(genCode child) |]
where attrTuples = map (\ (k, vs) -> (k, S.toList vs)) attrs
genLeaf :: String -> Attrs -> ModNode -> ExpQ
genLeaf _ _ (ModNode _ (SetContent expr)) = expr
genLeaf tag attrs (ModNode attrMods _) =
[| $(getHtmlLeaf tag) H.! genAttrs $attrMods attrTuples |]
where attrTuples = map (\ (k, vs) -> (k, S.toList vs)) attrs
type AttrT = (String, [String])
type AttrsT = [AttrT]
genAttrs :: (AttrsT -> AttrsT) -> AttrsT -> H.Attribute
genAttrs trans =
mconcat . map (\(n, v) -> case lookup n html5Attr1 of
Nothing -> error $ "Looking up " ++ n
Just f -> f
$ fromString $ join " " v) . trans
addAttr :: (String, String) -> AttrsT -> AttrsT
addAttr (k, v) attrs = maybe ((k, [v]):attrs)
(\ vs -> if v `elem` vs
then (k, vs) : delKey k attrs
else (k, v:vs) : delKey k attrs)
(lookup k attrs)
where delKey key kvs = filter (not . (== key) . fst) kvs
maybeAttr :: Maybe a -> (String, String) -> AttrsT -> AttrsT
maybeAttr m kv = maybe id (const $ addAttr kv) m
maybeVal :: (Show a) => Maybe a -> String -> AttrsT -> AttrsT
maybeVal v k = maybe id (\x -> addAttr (k, show x)) v
maybeContent :: (H.ToMarkup a) => Maybe a -> H.Html
maybeContent = maybe mempty H.toHtml
| kkspeed/PigLet | src/Text/Html/PigLet/Th1.hs | mit | 5,340 | 1 | 16 | 1,460 | 1,526 | 838 | 688 | 108 | 2 |
{-# LANGUAGE LambdaCase, MultiWayIf, RecordWildCards, OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
module IRC.Bot ( runIRCBot ) where
import qualified Network.IRC.Bot as IRC
import qualified Network.IRC.Bot.Part.Ping as IRC
import qualified Network.IRC.Bot.Part.NickUser as IRC
import qualified Network.IRC.Bot.Part.Channels as IRC
import Network.IRC.Base hiding ( encode )
import Network.IRC.Commands
import qualified Data.Set as S
import qualified Data.ByteString.UTF8 as Utf8
import qualified Data.Text.Encoding as T
import qualified Data.ByteString as B
import Pipes
import Data.Maybe
import IRC.Socket
import IRC.Types
import Data.Serialize
import Control.Concurrent
import Control.Concurrent.STM
import Control.Exception
import Control.Monad
import Data.Foldable
utf8 :: String -> B.ByteString
utf8 = Utf8.fromString
communicatorPart :: IRC.BotMonad m
=> MVar (IRCMessage -> IO ())
-> TVar (S.Set B.ByteString)
-> TChan IRCMessage
-> m ()
communicatorPart mvar tvar tchan = do
chan <- IRC.askOutChan
_ <- liftIO $ tryPutMVar mvar (\case
PrivateMessage {..} ->
writeChan chan $ IRC.toMessage $
IRC.PrivMsg Nothing [T.encodeUtf8 target] (T.encodeUtf8 content)
Join channel -> do
atomically $ modifyTVar tvar (S.insert (T.encodeUtf8 channel))
writeChan chan (joinChan $ T.encodeUtf8 channel)
Part channel -> do
atomically $ modifyTVar tvar (S.delete (T.encodeUtf8 channel))
writeChan chan (part $ T.encodeUtf8 channel))
mesg <- IRC.askMessage
maybe_nname <- IRC.askSenderNickName
guard (isJust maybe_nname)
let Just nname = maybe_nname
me <- IRC.whoami
if | msg_command mesg == "JOIN" &&
nname == me ->
liftIO $ atomically $ writeTChan tchan $
Join $ T.decodeUtf8 $ head $ msg_params mesg
| msg_command mesg == "PART" &&
nname == me ->
liftIO $ atomically $ writeTChan tchan $
Part $ T.decodeUtf8 $ head $ msg_params mesg
| msg_command mesg == "PRIVMSG" -> do
privmsg <- IRC.privMsg
reply_to <- IRC.replyTo
case reply_to of
Nothing -> return ()
Just rep ->
liftIO $ atomically $ writeTChan tchan $
PrivateMessage (T.decodeUtf8 nname)
(T.decodeUtf8 $ head $
IRC.receivers privmsg)
(T.decodeUtf8 rep)
(T.decodeUtf8 $ IRC.msg privmsg)
| otherwise -> return ()
listener :: MVar (IRCMessage -> IO ()) -> TChan IRCMessage -> IO ()
listener mvar chan = do
sender <- takeMVar mvar
serve "127.0.0.1" "27315" $ \sock _ -> mask $ \restore -> do
bcast <- atomically $ dupTChan chan
tid <- forkIO $ restore $ do
runEffect $
fromSocket sock >->
receiveIRCMessages >->
(forever $ await >>= liftIO . sender)
flip finally (killThread tid) $ restore $ forever $ do
next_msg <- atomically $ readTChan bcast
send sock (encode next_msg)
runIRCBot :: IO ()
runIRCBot = withSocketsDo $ mask $ \restore -> do
mvar <- newEmptyMVar
chan <- newTChanIO
tid <- forkIO $ listener mvar chan
(tvar, part) <- IRC.initChannelsPart S.empty
(tids, _) <- IRC.simpleBot
(IRC.nullBotConf { IRC.host = "irc.freenode.org"
, IRC.nick = utf8 "Pinobot"
, IRC.commandPrefix = "@"
, IRC.user =
IRC.nullUser { IRC.username = utf8 "pino"
, IRC.realname = utf8 "Pinobot"
, IRC.hostname = "trankesbel" } })
[IRC.nickUserPart, IRC.pingPart, part, communicatorPart mvar tvar chan]
finally (restore $ forever $ threadDelay 10000000) $ do
killThread tid
for_ tids killThread
| UnNetHack/pinobot | lib/IRC/Bot.hs | mit | 4,175 | 0 | 22 | 1,395 | 1,191 | 603 | 588 | 99 | 7 |
module BinaryTrees
( Tree,
leaf,
compBalTree,
isSymmTree,
lst2Tree,
genTestTree,
heightBalTree
) where
import Data.Maybe
import Data.List
data Tree a = Empty | Branch a (Tree a) (Tree a) deriving (Show, Eq)
leaf :: a -> Tree a
leaf x = Branch x Empty Empty
--Problem 55: Construct completely balanced binary trees.
--In a completely balanced binary tree, the following property holds for every Branch: The number of Branchs in its left subtree and the number of Branchs in its right subtree are almost equal, which means their difference is not greater than one.
compBalTree :: a -> Int -> [Tree a]
compBalTree _ 0 = [Empty]
compBalTree x n
| odd n = [Branch x l r | l <- compBalTree x $ (n - 1) `quot` 2, r <- compBalTree x $ (n - 1) `quot` 2]
| even n = concat [[Branch x p q, Branch x q p] | p <- compBalTree x $ (n - 1) `quot` 2, q <- compBalTree x $ n `quot` 2]
| otherwise = []
--Problem 56: Symmetric binary trees.
--Let us call a binary tree symmetric if you can draw a vertical line through the root Branch and then the right subtree is the mirror image of the left subtree.
--Write a predicate symmetric/1 to check whether a given binary tree is symmetric.
isSymmTree :: Tree a -> Bool
isSymmTree x = isEqTree x $ invertTree x where
invertTree :: Tree a -> Tree a
invertTree Empty = Empty
invertTree (Branch n y z) = Branch n (invertTree z) (invertTree y)
isEqTree :: Tree a -> Tree b -> Bool
isEqTree Empty Empty = True
isEqTree Empty Branch {} = False
isEqTree Branch {} Empty = False
isEqTree (Branch _ a b) (Branch _ c d) = isEqTree a c && isEqTree b d
--Problem 57: Binary search trees (dictionaries).
--Write a predicate to construct a binary search tree from a list of integer numbers.
lst2Tree :: (Ord a) => [a] -> Tree a
lst2Tree [] = Empty
lst2Tree xs = process xs Empty where
insertTree :: (Ord a) => a -> Tree a -> Tree a
insertTree n Empty = leaf n
insertTree n (Branch m p q) = if m > n then Branch m (insertTree n p) q else Branch m p (insertTree n q)
process :: (Ord a) => [a] -> Tree a -> Tree a
process y a = foldl (flip insertTree) a y
--Problem 58: Generate-and-test paradigm.
--Apply the generate-and-test paradigm to construct all symmetric, completely balanced binary trees with a given number of nodes.
genTestTree :: a -> Int -> [Tree a]
genTestTree x n = filter isSymmTree $ compBalTree x n
--Problem 59: Construct height balanced binary trees.
--Construct all height balanced trees for a given height.
heightBalTree :: a -> Int -> [Tree a]
heightBalTree _ 0 = [Empty]
heightBalTree x 1 = [leaf x]
heightBalTree x n = [Branch x l r | (hl, hr) <- [(n - 1, n - 2), (n - 1, n - 1), (n - 2, n - 1)], l <- heightBalTree x hl, r <- heightBalTree x hr]
--Problem 60: Construct height-balanced binary trees with a given number of nodes.
heightBalTreeNodes :: a -> Int -> [Tree a]
heightBalTreeNodes x n = concatMap filterToTrees [minHeight .. maxHeight] where
--filterToTrees :: Int -> [Tree a]
filterToTrees = filter ((n ==) . countNodes) . heightBalTree x
minNodesSequence :: [Int]
minNodesSequence = 0 : 1 : zipWith ((+) . (1 +)) minNodesSequence (tail minNodesSequence)
minHeight :: Int
minHeight = ceiling $ logBase 2 $ fromIntegral $ n + 1
maxHeight :: Int
maxHeight = fromJust (findIndex (> n) minNodesSequence) - 1
countNodes :: Tree a -> Int
countNodes Empty = 0
countNodes (Branch _ l r) = countNodes l + countNodes r + 1
| 5hubh4m/99-haskell-problems | BinaryTrees.hs | mit | 3,457 | 0 | 13 | 734 | 1,156 | 601 | 555 | 55 | 5 |
module ChessSerializer() where
import ChessPiece
import qualified Data.Map as Map
import Data.List.Split
import Data.Bits
import Data.Word
import Data.Maybe
import Data.Serialize
import qualified Data.ByteString as BStr
instance Serialize Board where
put = put . encodeBoard
get = fmap decodeBoard get
instance Serialize PieceColor
instance Serialize ChessGame
encodePiece :: Maybe ChessPiece -> Word8
encodePiece (Just (ChessPiece pType color)) = fromIntegral(2 * fromEnum pType + fromEnum color + 1)
encodePiece Nothing = 0
decodePiece :: Word8 -> Maybe ChessPiece
decodePiece 0 = Nothing
decodePiece n =
let
n' = fromIntegral n - 1
color = toEnum $ fromIntegral n' `mod` 2
pType = toEnum $ (n' - (fromEnum color)) `div` 2
in Just (ChessPiece pType color)
positions :: [Position]
positions = [(i,j)|i<-[0..7],j<-[0..7]]
encodeBoard :: Board -> BStr.ByteString
encodeBoard (Board board) =
BStr.pack $ map (\ [x,y] ->(shiftL (encodePiece x) 4) .|. encodePiece y) $
chunksOf 2 $ map (\pos -> Map.lookup pos board ) positions
decodeBoard :: BStr.ByteString -> Board
decodeBoard str =
Board $ Map.fromList $ mapMaybe removeNoneValue $ concat $
zipWith zip (chunksOf 2 positions) (map unpackNumber $ BStr.unpack str)
removeNoneValue :: (a, Maybe b) -> Maybe (a,b)
removeNoneValue (k,v) =
case v of
Just v' -> Just (k, v')
Nothing -> Nothing
unpackNumber :: Word8 -> [Maybe ChessPiece]
unpackNumber n =
let
a = shiftR n 4
b = n .&. 0xF
in map decodePiece [a,b]
| bruno-cadorette/IFT630-TP3 | ChessSerializer.hs | mit | 1,566 | 0 | 15 | 336 | 608 | 320 | 288 | 46 | 2 |
module Main where
import Test.Hspec
import PrimSpec
import TypeSpec
main :: IO ()
main = mapM_ hspec [ primSpec
, typeSpec
]
| DimaSamoz/mezzo | test/Main.hs | mit | 168 | 0 | 6 | 65 | 41 | 24 | 17 | 7 | 1 |
module Rebase.Control.Monad.Trans.RWS.Lazy
(
module Control.Monad.Trans.RWS.Lazy
)
where
import Control.Monad.Trans.RWS.Lazy
| nikita-volkov/rebase | library/Rebase/Control/Monad/Trans/RWS/Lazy.hs | mit | 128 | 0 | 5 | 12 | 29 | 22 | 7 | 4 | 0 |
{-# LANGUAGE TypeFamilies #-}
module Database.Toy.Internal.Util.HasFileIO where
import Database.Toy.Internal.Prelude hiding (Handle)
import qualified Data.ByteString.Char8 as B
import qualified System.IO as S
class Monad m => HasFileIO m where
type Handle m :: *
hOpen :: S.FilePath -> S.IOMode -> m (Handle m)
hClose :: Handle m -> m ()
hGet :: Handle m -> Int -> m ByteString
hPut :: Handle m -> ByteString -> m ()
hSeek :: Handle m -> S.SeekMode -> Integer -> m ()
instance HasFileIO IO where
type Handle IO = S.Handle
hOpen = S.openBinaryFile
hClose = S.hClose
hGet = B.hGet
hPut = B.hPut
hSeek = S.hSeek
| dancingrobot84/toydb | src/Database/Toy/Internal/Util/HasFileIO.hs | mit | 662 | 0 | 11 | 155 | 227 | 125 | 102 | 19 | 0 |
module Y2020.M07.D01.Exercise where
import Data.Graph
{--
Okay, yesterday we converted a highly connected, cyclical set of arcs. Today
we'll look at pathing through a graph's nodes, but let's do this with a simpler,
not-so-connected, graph for today's exercise.
Find all acyclic* paths from node a to node b in the graph g.
* acyclic in that a node is visited only once in the path.
--}
data Node = R | S | T | U | V
deriving (Eq, Ord, Show)
data Arc = Arc Node Node -- no distance-edges in this graph
deriving (Eq, Ord, Show)
graphArcs :: [Arc]
graphArcs = [Arc S R, Arc S U, Arc U S, Arc U R, Arc V U]
-- n.b.: node T is not connected to any other node
graph :: [Arc] -> Graph -- should be same-..ish as yesterday
graph arcs = undefined
path :: Graph -> Node -> Node -> [[Arc]]
path g a b = undefined
| geophf/1HaskellADay | exercises/HAD/Y2020/M07/D01/Exercise.hs | mit | 829 | 0 | 9 | 185 | 185 | 105 | 80 | 12 | 1 |
{-|
Module: Data.Conf.PrettyPrint
Description: Pretty-printer for 'Data.Conf'
Copyright: (c) Copyright Pedro Tacla Yamada 2016
License: MIT
Maintainer: [email protected]
Stability: experimental
Portability: unknown
import Data.Char
import Debug.Trace
Pretty-printer for "Data.Conf". Declares a 'Pretty' instance for
'ConfStatement'.
-}
{-# LANGUAGE OverloadedStrings #-}
module Data.Conf.PrettyPrint
(
pPrintConf
, Pretty (..)
, Doc (..)
)
where
import Data.Char (isSpace)
import Data.Conf.Types
import Data.Text (Text)
import qualified Data.Text as Text
import Text.PrettyPrint.HughesPJClass
import Debug.Trace
-- | Pretty-prints a 'Conf' to a 'Doc'
--
-- 'pPrint' restricted to 'Conf'
--
-- @
-- print (pPrintConf c)
-- @
--
-- Because of https://github.com/haskell/pretty/issues/26, it's not easy to
-- prevent trailing spaces while generating the output. This function patches
-- it at the end so there're no trailing spaces.
--
-- See "Text.PrettyPrint"
pPrintConf :: Conf -> Doc
pPrintConf c =
let d = pPrint c
ds = init (unlines (map stripEnd (lines (show d))))
in text ds
where
stripEnd = reverse . dropWhile isSpace . reverse
instance Pretty ConfStatement where
pPrint s = case s of
ConfStatementEmptyLine -> text ""
ConfStatementComment (Comment c) ->
"#" <> ttext c
ConfStatementBlock (Block ks ss) ->
thsep ks <+> "{"
$+$ nest 2 (pPrintList (PrettyLevel 0) ss) $+$
"}"
ConfStatementExpression (Expression t ts) ->
ttext t <+> thsep ts <> ";"
pPrintList _ ss = foldl ($+$) empty (map pPrint ss)
thsep :: [Text] -> Doc
thsep = hsep . map ttext
ttext :: Text -> Doc
ttext = text . Text.unpack
| beijaflor-io/haskell-language-conf | src/Data/Conf/PrettyPrint.hs | mit | 1,876 | 0 | 17 | 512 | 376 | 204 | 172 | 34 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UnicodeSyntax #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- -------------------------------------------------------------------------- --
-- |
-- Module: Data.ByteString.Random.MWC
-- Copyright: (c) Lars Kuhtz <[email protected]> 2017
-- License: MIT
-- Maintainer: [email protected]
-- Stability: experimental
module Data.ByteString.Random.MWC
( random
, randomGen
) where
import Data.ByteString (ByteString)
import Numeric.Natural (Natural)
import System.Random.MWC (uniform, GenIO, withSystemRandom)
-- internal imports
import Data.ByteString.Random.Internal
-- -------------------------------------------------------------------------- --
instance (g ~ GenIO) ⇒ RandomWords g where
uniformW8 = uniform
{-# INLINE uniformW8 #-}
uniformW64 = uniform
{-# INLINE uniformW64 #-}
{-# SPECIALIZE generate ∷ GenIO → Natural → IO ByteString #-}
-- -------------------------------------------------------------------------- --
randomGen
∷ GenIO
-- ^ PRNG
→ Natural
-- ^ Length of the result bytestring in bytes
→ IO ByteString
randomGen = generate
{-# INLINE randomGen #-}
-- $setup
-- >>> import qualified Data.ByteString as B
-- >>> import Test.QuickCheck
-- | Generate a random bytestring of length n. The PRNG is seeded
-- from the system randomness source.
--
-- prop> ioProperty $ ((fromIntegral n ===) . B.length) <$> random n
-- prop> n > 4 ==> ioProperty $ (/=) <$> random n <*> random n
--
random
∷ Natural
-- ^ Length of the result bytestring in bytes
→ IO ByteString
random = withSystemRandom . flip randomGen
| larskuhtz/random-bytestring | src/Data/ByteString/Random/MWC.hs | mit | 1,686 | 0 | 7 | 287 | 164 | 108 | 56 | 27 | 1 |
-- Pattern Matching
-- Matches patterns from top to bottom
lucky :: (Integral a) => a -> String
lucky 7 = "SEVEN!"
lucky x = "Not seven. LOL EZ"
-- Recursive factorial definition
factorial :: (Integral a) => a -> a
factorial 0 = 1
factorial a = a * factorial (a - 1)
-- Exhaustive pattern match failing if we don't pass either "a", "b" or "c"
broName :: Char -> String
broName 'a' = "Albroth"
broName 'b' = "Broseph"
broName 'c' = "Charbro"
-- Triple handling
first :: (a, b, c) -> a
first (a, _, _) = a
second :: (a, b, c) -> b
second (_, b, _) = b
third :: (a, b, c) -> c
third (_, _, c) = c
-- Creating our own version of head
head' :: [a] -> a
head' [] = error "CAN'T GET HEAD OF NOTHING!"
head' (x:_) = x
-- Converting length to phrases
tell :: (Show a) => [a] -> String
tell [] = "This list is empty..."
tell (x:[]) = "This list has one element, which is: " ++ show x
tell (x:y:[]) = "This list has two elements, which are: " ++ show x ++ ", " ++ show y
tell _ = "This list is too long. I'm tired."
-- Reimplementing length recursively
length' :: (Num b) => [a] -> b
length' [] = 0
length' (_:y) = 1 + length' y
| lucasfcosta/haskell-experiences | Chapter 4/patternMatching.hs | mit | 1,128 | 0 | 9 | 249 | 424 | 232 | 192 | 27 | 1 |
module Control.Concurrent.Lock (
Lock,
Acquisition,
Condition,
new,
newAcquired,
acquire,
release,
registerCondition,
await,
signal,
signalAll,
destroy
) where
import Control.Concurrent
import Control.Concurrent.Condition
import Control.Concurrent.Lock.Internal
import Control.Exception
import qualified Data.Vector as V
data Conditions = Conditions
{ lastNotified :: Int
, conditions :: V.Vector (MVar ())
}
signalImpl :: Lock -> IO ()
signalImpl lock = mask_ $ do
conds <- takeMVar $ lockConditions lock
let cs = conditions conds
notifyIndex = (1 + lastNotified conds) `mod` V.length cs
chosen = V.unsafeIndex (conditions conds) notifyIndex
tryPutMVar chosen ()
putMVar (lockConditions lock) (conds { lastNotified = notifyIndex })
signalAllImpl :: Lock -> IO ()
signalAllImpl lock = mask_ $ do
-- Considered using readMVar, but don't want to allow
-- conditions to be concurrently added while signalling?
conds <- takeMVar $ lockConditions lock
V.forM_ (conditions conds) (flip tryPutMVar ())
putMVar (lockConditions lock) conds
data Lock = Lock { lock :: MVar ()
, lockConditions :: MVar Conditions
}
new :: IO Lock
new = Lock <$> newMVar () <*> newMVar (Conditions 0 V.empty)
newAcquired :: IO (Lock, Acquisition Lock)
newAcquired = do
l <- new
a <- acquire l
return (l, a)
acquire :: Lock -> IO (Acquisition Lock)
acquire l = do
val <- takeMVar $ lock l
return $ Acquisition (putMVar (lock l) val)
tryAcquire :: Lock -> IO (Maybe (Acquisition Lock))
tryAcquire l = do
mVal <- tryTakeMVar $ lock l
return $ case mVal of
Nothing -> Nothing
Just val -> Just $ Acquisition (putMVar (lock l) val)
locked :: Lock -> IO Bool
locked = isEmptyMVar . lock
registerCondition :: Lock -> IO (Condition Lock)
registerCondition l = mask_ $ do
conds <- takeMVar $ lockConditions l
newCond <- newEmptyMVar
putMVar (lockConditions l) $ conds { conditions = V.snoc (conditions conds) newCond }
let destroyImpl = modifyMVarMasked_ (lockConditions l) $ \laterConds -> do
putMVar newCond ()
let updatedConditions = V.filter (/= newCond) (conditions laterConds)
return $ laterConds { conditions = updatedConditions
, lastNotified = if lastNotified laterConds > V.length updatedConditions
then 0
else lastNotified laterConds
}
-- release lock, block until signal, then reacquire lock
awaitImpl = mask_ (putMVar (lock l) () >> takeMVar newCond >> takeMVar (lock l))
return $ Condition awaitImpl (signalImpl l) (signalAllImpl l) destroyImpl
| iand675/disruptor | src/Control/Concurrent/Lock.hs | mit | 2,757 | 0 | 21 | 707 | 888 | 448 | 440 | 69 | 2 |
-- |This module was taken from:
-- "Erwig, M., Kollmansberger, S.: Functional pearls: Probabilistic functional
-- programming in Haskell. J. Funct. Program. 16, 21–34 (January 2006)
-- "
module MMM.Core.Probability where
import qualified System.Random
import Data.List (sort,sortBy,transpose)
import Control.Monad
import System.IO.Unsafe (unsafePerformIO)
import MMM.Core.ListUtils
import MMM.Core.Show
import MMM.Core.FuncComb
{- TO DO:
* create export list
* extend Dist by a constructor for continuous distributions:
C (Float -> Float)
* prove correctness of |||
-}
------------------------------------------------------------------------------
-- CONTENTS:
--
-- 0 AUXILIARY DEFINITIONS
-- 1 DETERMINISTIC AND PROBABILISTIC VALUES
-- 2 RANDOMIZED VALUES
-- 3 DETERMINISTIC AND PROBABILISTIC GENERATORS
-- 4 RANDOMIZED GENERATORS
-- 5 ITERATORS AND SIMULATORS
-- 6 TRACING
------------------------------------------------------------------------------
------------------------------------------------------------------------------
-- 0 AUXILIARY DEFINITIONS
--
-- Event
-- Probability
------------------------------------------------------------------------------
--
-- Events
--
type Event a = a -> Bool
oneOf :: Eq a => [a] -> Event a
oneOf = flip elem
just :: Eq a => a -> Event a
just = oneOf . singleton
--
-- Probabilities
--
newtype Probability = P ProbRep
type ProbRep = Float
precision :: Int
precision = 1
showPfix :: ProbRep -> String
showPfix f | precision==0 = showR 3 (round (f*100))++"%"
| otherwise = showR (4+precision) (fromIntegral (round (f*100*d))/d)++"%"
where d = 10^precision
-- -- mixed precision
-- --
-- showP :: ProbRep -> String
-- showP f | f>=0.1 = showR 3 (round (f*100))++"%"
-- | otherwise = show (f*100)++"%"
-- fixed precision
--
showP :: ProbRep -> String
showP = showPfix
instance Show Probability where
show (P p) = showP p
errorMargin :: ProbRep
errorMargin = 0.00001
--
-- Monad composition
--
-- (>@>) binary composition
-- sequ composition of a list of monadic functions
--
(>@>) :: Monad m => (a -> m b) -> (b -> m c) -> a -> m c
f >@> g = (>>= g) . f
sequ :: Monad m => [a -> m a] -> a -> m a
sequ = foldl (>@>) return
------------------------------------------------------------------------------
-- 1 DETERMINISTIC AND PROBABILISTIC VALUES
--
-- Dist probability disribution
-- Spread functions to convert a list of values into a distribution
------------------------------------------------------------------------------
--
-- Distributions
--
newtype Dist a = D {unD :: [(a,ProbRep)]}
instance Monad Dist where
return x = D [(x,1)]
d >>= f = D [(y,q*p) | (x,p) <- unD d, (y,q) <- unD (f x)]
fail _ = D []
-- note: mzero is a zero for >>= and a unit for mplus
--
instance MonadPlus Dist where
mzero = D []
mplus d d' | isZero d || isZero d' = mzero
| otherwise = unfoldD $ choose 0.5 d d'
isZero :: Dist a -> Bool
isZero (D d) = null d
instance Strong Dist where
rstr (x, b) = do a <- x; return (a, b)
lstr (b, x) = do a <- x; return (b, a)
instance Functor Dist where
fmap f (D d) = D [(f x,p) | (x,p) <- d]
instance (Ord a,Eq a) => Eq (Dist a) where
D xs == D ys = map fst (norm' xs)==map fst (norm' ys) &&
all (\((_,p),(_,q))->abs (p-q)<errorMargin) (zip (norm' xs) (norm' ys))
-- auxiliary functions for constructing and working with distributions
--
onD :: ([(a,ProbRep)] -> [(a,ProbRep)]) -> Dist a -> Dist a
onD f = D . f . unD
sizeD :: Dist a -> Int
sizeD = length . unD
checkD :: Dist a -> Dist a
checkD (D d) | abs (1-sumP d) < errorMargin = D d
| otherwise = error ("Illegal distribution: total probability = "++show (sumP d))
mkD :: [(a,ProbRep)] -> Dist a
mkD = checkD . D
sumP :: [(a,ProbRep)] -> ProbRep
sumP = sum . map snd
sortP :: [(a,ProbRep)] -> [(a,ProbRep)]
sortP = sortBy (\x y->compare (snd y) (snd x))
-- normalization = grouping
--
normBy :: Ord a => (a -> a -> Bool) -> Dist a -> Dist a
normBy f = onD $ accumBy f . sort
accumBy :: Num b => (a -> a -> Bool) -> [(a,b)] -> [(a,b)]
accumBy f ((x,p):ys@((y,q):xs)) | f x y = accumBy f ((x,p+q):xs)
| otherwise = (x,p):accumBy f ys
accumBy _ xs = xs
norm :: Ord a => Dist a -> Dist a
norm = normBy (==)
norm' :: Ord a => [(a,ProbRep)] -> [(a,ProbRep)]
norm' = accumBy (==) . sort
-- pretty printing
--
instance (Ord a,Show a) => Show (Dist a) where
show (D []) = "Impossible"
show (D xs) = concatMap (\(x,p)->showR w x++' ':showP p++"\n") (sortP (norm' xs))
where w = maximum (map (length.show.fst) xs)
--
-- Operations on distributions
--
-- product of independent distributions
--
joinWith :: (a -> b -> c) -> Dist a -> Dist b -> Dist c
joinWith f (D d) (D d') = D [ (f x y,p*q) | (x,p) <- d, (y,q) <- d']
prod :: Dist a -> Dist b -> Dist (a,b)
prod = joinWith (,)
-- distribution generators
--
type Spread a = [a] -> Dist a
certainly :: Trans a
certainly = return
impossible :: Dist a
impossible = mzero
choose :: ProbRep -> a -> a -> Dist a
choose p x y = enum [p,1-p] [x,y]
enum :: [ProbRep] -> Spread a
enum ps xs = mkD $ zip xs ps
enumPC :: [ProbRep] -> Spread a
enumPC ps = enum (map (/100) ps)
relative :: [Int] -> Spread a
relative ns = enum (map (\n->fromIntegral n/fromIntegral (sum ns)) ns)
shape :: (Float -> Float) -> Spread a
shape _ [] = impossible
shape f xs = scale (zip xs ps)
where incr = 1 / fromIntegral ((length xs) - 1)
ps = map f (iterate (+incr) 0)
linear :: Float -> Spread a
linear c = shape (c*)
uniform :: Spread a
uniform = shape (const 1)
negexp :: Spread a
negexp = shape (\x -> exp (-x))
normal :: Spread a
normal = shape (normalCurve 0.5 0.5)
normalCurve :: Float -> Float -> Float -> Float
normalCurve mean stddev x = 1 / sqrt (2 * pi) * exp (-1/2 * u^2)
where u = (x - mean) / stddev
-- extracting and mapping the domain of a distribution
--
extract :: Dist a -> [a]
extract = map fst . unD
mapD :: (a -> b) -> Dist a -> Dist b
mapD = fmap
-- unfold a distribution of distributions into one distribution
--
unfoldD :: Dist (Dist a) -> Dist a
unfoldD (D d) = D [ (x,p*q) | (d',q) <- d, (x,p) <- unD d' ]
-- conditional distribution
--
cond :: Dist Bool -> Dist a -> Dist a -> Dist a
cond b d d' = unfoldD $ choose p d d'
where P p = truth b
truth :: Dist Bool -> Probability
truth (D ((b,p):_)) = P (if b then p else 1-p)
-- conditional probability
--
(|||) :: Dist a -> Event a -> Dist a
(|||) = flip filterD
-- filtering distributions
--
data Select a = Case a | Other
deriving (Eq,Ord,Show)
above :: Ord a => ProbRep -> Dist a -> Dist (Select a)
above p (D d) = D (map (\(x,q)->(Case x,q)) d1++[(Other,sumP d2)])
where (d1,d2) = span (\(_,q)->q>=p) (sortP (norm' d))
scale :: [(a,ProbRep)] -> Dist a
scale xs = D (map (\(x,p)->(x,p/q)) xs)
where q = sumP xs
filterD :: (a -> Bool) -> Dist a -> Dist a
filterD p = scale . filter (p . fst) . unD
-- selecting from distributions
--
selectP :: Dist a -> ProbRep -> a
selectP (D d) p = scanP p d
scanP :: ProbRep -> [(a,ProbRep)] -> a
scanP p ((x,q):ps) | p<=q || null ps = x
| otherwise = scanP (p-q) ps
infix 8 ??
(??) :: Event a -> Dist a -> Probability
(??) p = P . sumP . filter (p . fst) . unD
-- TO DO: generalize Float to arbitrary Num type
--
class ToFloat a where
toFloat :: a -> Float
instance ToFloat Float where toFloat = id
instance ToFloat Int where toFloat = fromIntegral
instance ToFloat Integer where toFloat = fromIntegral
class FromFloat a where
fromFloat :: Float -> a
instance FromFloat Float where fromFloat = id
instance FromFloat Int where fromFloat = round
instance FromFloat Integer where fromFloat = round
-- expected :: ToFloat a => Dist a -> Float
-- expected = sum . map (\(x,p)->toFloat x*p) . unD
class Expected a where
expected :: a -> Float
-- instance ToFloat a => Expected a where
-- expected = toFloat
instance Expected Float where expected = id
instance Expected Int where expected = toFloat
instance Expected Integer where expected = toFloat
instance Expected a => Expected [a] where
expected xs = sum (map expected xs) / toFloat (length xs)
instance Expected a => Expected (Dist a) where
expected = sum . map (\(x,p)->expected x*p) . unD
instance Expected a => Expected (IO a) where
expected r = expected (System.IO.Unsafe.unsafePerformIO r)
-- statistical analyses
--
variance :: Expected a => Dist a -> Float
variance d@(D ps) = sum $ map (\(x,p)->p*sqr (expected x - ex)) ps
where sqr x = x * x
ex = expected d
stddev :: Expected a => Dist a -> Float
stddev = sqrt . variance
------------------------------------------------------------------------------
-- 2 RANDOMIZED VALUES
--
-- R random value
-- RDist random distribution
------------------------------------------------------------------------------
--
-- Random values
--
type R a = IO a
printR :: Show a => R a -> R ()
printR = (>>= print)
instance Show (IO a) where
show _ = ""
pick :: Dist a -> R a
-- pick d = do {p <- Random.randomRIO (0,1); return (selectP p d)}
pick d = System.Random.randomRIO (0,1) >>= return . selectP d
--
-- Randomized distributions
--
type RDist a = R (Dist a)
rAbove :: Ord a => ProbRep -> RDist a -> RDist (Select a)
rAbove p rd = do D d <- rd
let (d1,d2) = span (\(_,q)->q>=p) (sortP (norm' d))
return (D (map (\(x,q)->(Case x,q)) d1++[(Other,sumP d2)]))
------------------------------------------------------------------------------
-- 3 DETERMINISTIC AND PROBABILISTIC GENERATORS
--
-- Change deterministic generator
-- Trans probabilistic generator
-- SpreadC functions to convert a list of changes into a transition
-- SpreadT functions to convert a list of transitions into a transition
------------------------------------------------------------------------------
--
-- transitions
--
type Change a = a -> a
type Trans a = a -> Dist a
idT :: Trans a
idT = certainlyT id
-- mapT maps a change function to the result of a transformation
-- (mapT is somehow a lifted form of mapD)
-- The restricted type of f results from the fact that the
-- argument to t cannot be changed to b in the result Trans type.
--
mapT :: Change a -> Trans a -> Trans a
mapT f t = mapD f . t
-- unfold a distribution of transitions into one transition
--
-- NOTE: The argument transitions must be independent
--
unfoldT :: Dist (Trans a) -> Trans a
unfoldT (D d) x = D [ (y,p*q) | (f,p) <- d, (y,q) <- unD (f x) ]
-- spreading changes into transitions
--
type SpreadC a = [Change a] -> Trans a
certainlyT :: Change a -> Trans a
certainlyT f = certainly . f
-- certainlyT = (certainly .)
-- certainlyT = maybeC 1
maybeT :: ProbRep -> Change a -> Trans a
maybeT p f = enumT [p,1-p] [f,id]
liftC :: Spread a -> [Change a] -> Trans a
liftC s cs x = s [f x | f <- cs]
-- liftC s cs x = s $ map ($ x) cs
uniformT = liftC uniform
normalT = liftC normal
linearT c = liftC (linear c)
enumT xs = liftC (enum xs)
-- spreading transitions into transitions
--
type SpreadT a = [Trans a] -> Trans a
liftT :: Spread (Trans a) -> [Trans a] -> Trans a
liftT s = unfoldT . s
uniformTT = liftT uniform
normalTT = liftT normal
linearTT c = liftT (linear c)
enumTT xs = liftT (enum xs)
------------------------------------------------------------------------------
-- 4 RANDOMIZED GENERATORS
--
-- RChange random change
-- RTrans random transition
------------------------------------------------------------------------------
--
-- Randomized changes
--
type RChange a = a -> R a
random :: Trans a -> RChange a
random t = pick . t
-- random = (pick .)
--
-- Randomized transitions
--
type RTrans a = a -> RDist a
type ApproxDist a = R [a]
-- rDist converts a list of randomly generated values into
-- a distribution by taking equal weights for all values
--
rDist :: Ord a => [R a] -> RDist a
rDist = fmap (norm . uniform) . sequence
------------------------------------------------------------------------------
-- 5 ITERATION AND SIMULATION
--
-- Iterate class defining *.
-- Sim class defining ~.
------------------------------------------------------------------------------
{-
Naming convention:
* takes n :: Int and a generator and iterates the generator n times
. produces a single result
.. produces a trace
~ takes k :: Int [and n :: Int] and a generator and simulates
the [n-fold repetition of the] generator k times
n *. t iterates t and produces a distribution
n *.. t iterates t and produces a trace
k ~. t simulates t and produces a distribution
(k,n) ~*. t simulates the n-fold repetition of t and produces a distribution
(k,n) ~.. t simulates the n-fold repetition of t and produces a trace
-}
-- Iteration captures three iteration strategies:
-- iter builds an n-fold composition of a (randomized) transition
-- while and until implement conditional repetitions
--
-- The class Iterate allows the overloading of iteration for different
-- kinds of generators, namely transitions and random changes:
--
-- Trans a = a -> Dist a ==> c = Dist
-- RChange a = a -> R a ==> c = R = IO
--
class Iterate c where
(*.) :: Int -> (a -> c a) -> (a -> c a)
while :: (a -> Bool) -> (a -> c a) -> (a -> c a)
until :: (a -> Bool) -> (a -> c a) -> (a -> c a)
until p = while (not.p)
infix 8 *.
-- iteration of transitions
--
instance Iterate Dist where
n *. t = head . (n *.. t)
while p t x = if p x then t x >>= while p t else certainly x
-- iteration of random changes
--
instance Iterate IO where
n *. r = (>>= return . head) . rWalk n r
while p t x = do {l <- t x; if p l then while p t l else return l}
-- Simulation means to repeat a random chage many times and
-- to accumulate all results into a distribution. Therefore,
-- simulation can be regarded as an approximation of distributions
-- through randomization.
--
-- The Sim class contains two functions:
--
-- ~. returns the final randomized transition
-- ~.. returns the whole trace
--
-- The Sim class allows the overloading of simulation for different
-- kinds of generators, namely transitions and random changes:
--
-- Trans a = a -> Dist a ==> c = Dist
-- RChange a = a -> R a ==> c = R = IO
--
class Sim c where
(~.) :: Ord a => Int -> (a -> c a) -> RTrans a
(~..) :: Ord a => (Int,Int) -> (a -> c a) -> RExpand a
(~*.) :: Ord a => (Int,Int) -> (a -> c a) -> RTrans a
infix 6 ~.
infix 6 ~..
-- simulation for transitions
--
instance Sim Dist where
(~.) x = (~.) x . random
(~..) x = (~..) x . random
(~*.) x = (~*.) x . random
-- simulation for random changes
--
instance Sim IO where
(~.) n t = rDist . replicate n . t
(~..) (k,n) t = mergeTraces . replicate k . rWalk n t
(~*.) (k,n) t = k ~. n *. t
infix 8 ~*.
--(~*.) :: (Iterate c,Sim c,Ord a) => (Int,Int) -> (a -> c a) -> RTrans a
--(k,n) ~*. t =
------------------------------------------------------------------------------
-- 7 TRACING
--
-- (R)Trace
-- (R)Space
-- (R)Walk
-- (R)Expand
------------------------------------------------------------------------------
type Trace a = [a]
type Space a = Trace (Dist a)
type Walk a = a -> Trace a
type Expand a = a -> Space a
-- >>: composes the result of a transition with a space
-- (transition is composed on the left)
--
-- (a -> m a) -> (a -> [m a]) -> (a -> [m a])
(>>:) :: Trans a -> Expand a -> Expand a
f >>: g = \x -> let ds@(D d:_)=g x in
D [ (z,p*q) | (y,p) <- d, (z,q) <- unD (f y)]:ds
infix 6 >>:
-- walk is a bounded version of the predefined function iterate
--
walk :: Int -> Change a -> Walk a
walk n f = take n . iterate f
-- *.. is identical to *., but returns the list of all intermediate
-- distributions
--
(*..) :: Int -> Trans a -> Expand a
0 *.. _ = singleton . certainly
1 *.. t = singleton . t
n *.. t = t >>: (n-1) *.. t
infix 8 *..
type RTrace a = R (Trace a)
type RSpace a = R (Space a)
type RWalk a = a -> RTrace a
type RExpand a = a -> RSpace a
-- (a -> m a) -> (a -> m [a]) -> (a -> m [a])
composelR :: RChange a -> RWalk a -> RWalk a
composelR f g x = do {rs@(r:_) <- g x; s <- f r; return (s:rs)}
-- rWalk computes a list of values by
-- randomly selecting one value from a distribution in each step.
--
rWalk :: Int -> RChange a -> RWalk a
rWalk 0 _ = return . singleton
rWalk 1 t = (>>= return . singleton) . t
rWalk n t = composelR t (rWalk (n-1) t)
-- mergeTraces converts a list of RTraces, into a list of randomized
-- distributions, i.e., an RSpace, by creating a randomized
-- distribution for each list position across all traces
--
mergeTraces :: Ord a => [RTrace a] -> RSpace a
mergeTraces = fmap (zipListWith (norm . uniform)) . sequence
where
zipListWith :: ([a] -> b) -> [[a]] -> [b]
zipListWith f = map f . transpose
{-
LAWS
const . pick = random . const
-}
| VictorCMiraldo/mmm | MMM/Core/Probability.hs | mit | 17,302 | 0 | 16 | 4,100 | 5,845 | 3,142 | 2,703 | 269 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-missing-fields #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-----------------------------------------------------------------
-- Autogenerated by Thrift Compiler (0.9.0) --
-- --
-- DO NOT EDIT UNLESS YOU ARE SURE YOU KNOW WHAT YOU ARE DOING --
-----------------------------------------------------------------
module Database.HBase.Internal.Thrift.Hbase_Client(enableTable,disableTable,isTableEnabled,compact,majorCompact,getTableNames,getColumnDescriptors,getTableRegions,createTable,deleteTable,get,getVer,getVerTs,getRow,getRowWithColumns,getRowTs,getRowWithColumnsTs,getRows,getRowsWithColumns,getRowsTs,getRowsWithColumnsTs,mutateRow,mutateRowTs,mutateRows,mutateRowsTs,atomicIncrement,deleteAll,deleteAllTs,deleteAllRow,increment,incrementRows,deleteAllRowTs,scannerOpenWithScan,scannerOpen,scannerOpenWithStop,scannerOpenWithPrefix,scannerOpenTs,scannerOpenWithStopTs,scannerGet,scannerGetList,scannerClose,getRowOrBefore,getRegionInfo) where
import Data.IORef
import Prelude ( Bool(..), Enum, Double, String, Maybe(..),
Eq, Show, Ord,
return, length, IO, fromIntegral, fromEnum, toEnum,
(.), (&&), (||), (==), (++), ($), (-) )
import Control.Exception
import Data.ByteString.Lazy
import Data.Hashable
import Data.Int
import Data.Text.Lazy ( Text )
import qualified Data.Text.Lazy as TL
import Data.Typeable ( Typeable )
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Vector as Vector
import Thrift
import Thrift.Types ()
import Database.HBase.Internal.Thrift.Hbase_Types
import Database.HBase.Internal.Thrift.Hbase
seqid = newIORef 0
enableTable (ip,op) arg_tableName = do
send_enableTable op arg_tableName
recv_enableTable ip
send_enableTable op arg_tableName = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("enableTable", M_CALL, seqn)
write_EnableTable_args op (EnableTable_args{f_EnableTable_args_tableName=Just arg_tableName})
writeMessageEnd op
tFlush (getTransport op)
recv_enableTable ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_EnableTable_result ip
readMessageEnd ip
case f_EnableTable_result_io res of
Nothing -> return ()
Just _v -> throw _v
return ()
disableTable (ip,op) arg_tableName = do
send_disableTable op arg_tableName
recv_disableTable ip
send_disableTable op arg_tableName = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("disableTable", M_CALL, seqn)
write_DisableTable_args op (DisableTable_args{f_DisableTable_args_tableName=Just arg_tableName})
writeMessageEnd op
tFlush (getTransport op)
recv_disableTable ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_DisableTable_result ip
readMessageEnd ip
case f_DisableTable_result_io res of
Nothing -> return ()
Just _v -> throw _v
return ()
isTableEnabled (ip,op) arg_tableName = do
send_isTableEnabled op arg_tableName
recv_isTableEnabled ip
send_isTableEnabled op arg_tableName = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("isTableEnabled", M_CALL, seqn)
write_IsTableEnabled_args op (IsTableEnabled_args{f_IsTableEnabled_args_tableName=Just arg_tableName})
writeMessageEnd op
tFlush (getTransport op)
recv_isTableEnabled ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_IsTableEnabled_result ip
readMessageEnd ip
case f_IsTableEnabled_result_success res of
Just v -> return v
Nothing -> do
case f_IsTableEnabled_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "isTableEnabled failed: unknown result")
compact (ip,op) arg_tableNameOrRegionName = do
send_compact op arg_tableNameOrRegionName
recv_compact ip
send_compact op arg_tableNameOrRegionName = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("compact", M_CALL, seqn)
write_Compact_args op (Compact_args{f_Compact_args_tableNameOrRegionName=Just arg_tableNameOrRegionName})
writeMessageEnd op
tFlush (getTransport op)
recv_compact ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_Compact_result ip
readMessageEnd ip
case f_Compact_result_io res of
Nothing -> return ()
Just _v -> throw _v
return ()
majorCompact (ip,op) arg_tableNameOrRegionName = do
send_majorCompact op arg_tableNameOrRegionName
recv_majorCompact ip
send_majorCompact op arg_tableNameOrRegionName = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("majorCompact", M_CALL, seqn)
write_MajorCompact_args op (MajorCompact_args{f_MajorCompact_args_tableNameOrRegionName=Just arg_tableNameOrRegionName})
writeMessageEnd op
tFlush (getTransport op)
recv_majorCompact ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_MajorCompact_result ip
readMessageEnd ip
case f_MajorCompact_result_io res of
Nothing -> return ()
Just _v -> throw _v
return ()
getTableNames (ip,op) = do
send_getTableNames op
recv_getTableNames ip
send_getTableNames op = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("getTableNames", M_CALL, seqn)
write_GetTableNames_args op (GetTableNames_args{})
writeMessageEnd op
tFlush (getTransport op)
recv_getTableNames ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_GetTableNames_result ip
readMessageEnd ip
case f_GetTableNames_result_success res of
Just v -> return v
Nothing -> do
case f_GetTableNames_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "getTableNames failed: unknown result")
getColumnDescriptors (ip,op) arg_tableName = do
send_getColumnDescriptors op arg_tableName
recv_getColumnDescriptors ip
send_getColumnDescriptors op arg_tableName = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("getColumnDescriptors", M_CALL, seqn)
write_GetColumnDescriptors_args op (GetColumnDescriptors_args{f_GetColumnDescriptors_args_tableName=Just arg_tableName})
writeMessageEnd op
tFlush (getTransport op)
recv_getColumnDescriptors ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_GetColumnDescriptors_result ip
readMessageEnd ip
case f_GetColumnDescriptors_result_success res of
Just v -> return v
Nothing -> do
case f_GetColumnDescriptors_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "getColumnDescriptors failed: unknown result")
getTableRegions (ip,op) arg_tableName = do
send_getTableRegions op arg_tableName
recv_getTableRegions ip
send_getTableRegions op arg_tableName = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("getTableRegions", M_CALL, seqn)
write_GetTableRegions_args op (GetTableRegions_args{f_GetTableRegions_args_tableName=Just arg_tableName})
writeMessageEnd op
tFlush (getTransport op)
recv_getTableRegions ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_GetTableRegions_result ip
readMessageEnd ip
case f_GetTableRegions_result_success res of
Just v -> return v
Nothing -> do
case f_GetTableRegions_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "getTableRegions failed: unknown result")
createTable (ip,op) arg_tableName arg_columnFamilies = do
send_createTable op arg_tableName arg_columnFamilies
recv_createTable ip
send_createTable op arg_tableName arg_columnFamilies = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("createTable", M_CALL, seqn)
write_CreateTable_args op (CreateTable_args{f_CreateTable_args_tableName=Just arg_tableName,f_CreateTable_args_columnFamilies=Just arg_columnFamilies})
writeMessageEnd op
tFlush (getTransport op)
recv_createTable ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_CreateTable_result ip
readMessageEnd ip
case f_CreateTable_result_io res of
Nothing -> return ()
Just _v -> throw _v
case f_CreateTable_result_ia res of
Nothing -> return ()
Just _v -> throw _v
case f_CreateTable_result_exist res of
Nothing -> return ()
Just _v -> throw _v
return ()
deleteTable (ip,op) arg_tableName = do
send_deleteTable op arg_tableName
recv_deleteTable ip
send_deleteTable op arg_tableName = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("deleteTable", M_CALL, seqn)
write_DeleteTable_args op (DeleteTable_args{f_DeleteTable_args_tableName=Just arg_tableName})
writeMessageEnd op
tFlush (getTransport op)
recv_deleteTable ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_DeleteTable_result ip
readMessageEnd ip
case f_DeleteTable_result_io res of
Nothing -> return ()
Just _v -> throw _v
return ()
get (ip,op) arg_tableName arg_row arg_column arg_attributes = do
send_get op arg_tableName arg_row arg_column arg_attributes
recv_get ip
send_get op arg_tableName arg_row arg_column arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("get", M_CALL, seqn)
write_Get_args op (Get_args{f_Get_args_tableName=Just arg_tableName,f_Get_args_row=Just arg_row,f_Get_args_column=Just arg_column,f_Get_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_get ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_Get_result ip
readMessageEnd ip
case f_Get_result_success res of
Just v -> return v
Nothing -> do
case f_Get_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "get failed: unknown result")
getVer (ip,op) arg_tableName arg_row arg_column arg_numVersions arg_attributes = do
send_getVer op arg_tableName arg_row arg_column arg_numVersions arg_attributes
recv_getVer ip
send_getVer op arg_tableName arg_row arg_column arg_numVersions arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("getVer", M_CALL, seqn)
write_GetVer_args op (GetVer_args{f_GetVer_args_tableName=Just arg_tableName,f_GetVer_args_row=Just arg_row,f_GetVer_args_column=Just arg_column,f_GetVer_args_numVersions=Just arg_numVersions,f_GetVer_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_getVer ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_GetVer_result ip
readMessageEnd ip
case f_GetVer_result_success res of
Just v -> return v
Nothing -> do
case f_GetVer_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "getVer failed: unknown result")
getVerTs (ip,op) arg_tableName arg_row arg_column arg_timestamp arg_numVersions arg_attributes = do
send_getVerTs op arg_tableName arg_row arg_column arg_timestamp arg_numVersions arg_attributes
recv_getVerTs ip
send_getVerTs op arg_tableName arg_row arg_column arg_timestamp arg_numVersions arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("getVerTs", M_CALL, seqn)
write_GetVerTs_args op (GetVerTs_args{f_GetVerTs_args_tableName=Just arg_tableName,f_GetVerTs_args_row=Just arg_row,f_GetVerTs_args_column=Just arg_column,f_GetVerTs_args_timestamp=Just arg_timestamp,f_GetVerTs_args_numVersions=Just arg_numVersions,f_GetVerTs_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_getVerTs ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_GetVerTs_result ip
readMessageEnd ip
case f_GetVerTs_result_success res of
Just v -> return v
Nothing -> do
case f_GetVerTs_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "getVerTs failed: unknown result")
getRow (ip,op) arg_tableName arg_row arg_attributes = do
send_getRow op arg_tableName arg_row arg_attributes
recv_getRow ip
send_getRow op arg_tableName arg_row arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("getRow", M_CALL, seqn)
write_GetRow_args op (GetRow_args{f_GetRow_args_tableName=Just arg_tableName,f_GetRow_args_row=Just arg_row,f_GetRow_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_getRow ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_GetRow_result ip
readMessageEnd ip
case f_GetRow_result_success res of
Just v -> return v
Nothing -> do
case f_GetRow_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "getRow failed: unknown result")
getRowWithColumns (ip,op) arg_tableName arg_row arg_columns arg_attributes = do
send_getRowWithColumns op arg_tableName arg_row arg_columns arg_attributes
recv_getRowWithColumns ip
send_getRowWithColumns op arg_tableName arg_row arg_columns arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("getRowWithColumns", M_CALL, seqn)
write_GetRowWithColumns_args op (GetRowWithColumns_args{f_GetRowWithColumns_args_tableName=Just arg_tableName,f_GetRowWithColumns_args_row=Just arg_row,f_GetRowWithColumns_args_columns=Just arg_columns,f_GetRowWithColumns_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_getRowWithColumns ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_GetRowWithColumns_result ip
readMessageEnd ip
case f_GetRowWithColumns_result_success res of
Just v -> return v
Nothing -> do
case f_GetRowWithColumns_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "getRowWithColumns failed: unknown result")
getRowTs (ip,op) arg_tableName arg_row arg_timestamp arg_attributes = do
send_getRowTs op arg_tableName arg_row arg_timestamp arg_attributes
recv_getRowTs ip
send_getRowTs op arg_tableName arg_row arg_timestamp arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("getRowTs", M_CALL, seqn)
write_GetRowTs_args op (GetRowTs_args{f_GetRowTs_args_tableName=Just arg_tableName,f_GetRowTs_args_row=Just arg_row,f_GetRowTs_args_timestamp=Just arg_timestamp,f_GetRowTs_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_getRowTs ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_GetRowTs_result ip
readMessageEnd ip
case f_GetRowTs_result_success res of
Just v -> return v
Nothing -> do
case f_GetRowTs_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "getRowTs failed: unknown result")
getRowWithColumnsTs (ip,op) arg_tableName arg_row arg_columns arg_timestamp arg_attributes = do
send_getRowWithColumnsTs op arg_tableName arg_row arg_columns arg_timestamp arg_attributes
recv_getRowWithColumnsTs ip
send_getRowWithColumnsTs op arg_tableName arg_row arg_columns arg_timestamp arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("getRowWithColumnsTs", M_CALL, seqn)
write_GetRowWithColumnsTs_args op (GetRowWithColumnsTs_args{f_GetRowWithColumnsTs_args_tableName=Just arg_tableName,f_GetRowWithColumnsTs_args_row=Just arg_row,f_GetRowWithColumnsTs_args_columns=Just arg_columns,f_GetRowWithColumnsTs_args_timestamp=Just arg_timestamp,f_GetRowWithColumnsTs_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_getRowWithColumnsTs ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_GetRowWithColumnsTs_result ip
readMessageEnd ip
case f_GetRowWithColumnsTs_result_success res of
Just v -> return v
Nothing -> do
case f_GetRowWithColumnsTs_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "getRowWithColumnsTs failed: unknown result")
getRows (ip,op) arg_tableName arg_rows arg_attributes = do
send_getRows op arg_tableName arg_rows arg_attributes
recv_getRows ip
send_getRows op arg_tableName arg_rows arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("getRows", M_CALL, seqn)
write_GetRows_args op (GetRows_args{f_GetRows_args_tableName=Just arg_tableName,f_GetRows_args_rows=Just arg_rows,f_GetRows_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_getRows ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_GetRows_result ip
readMessageEnd ip
case f_GetRows_result_success res of
Just v -> return v
Nothing -> do
case f_GetRows_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "getRows failed: unknown result")
getRowsWithColumns (ip,op) arg_tableName arg_rows arg_columns arg_attributes = do
send_getRowsWithColumns op arg_tableName arg_rows arg_columns arg_attributes
recv_getRowsWithColumns ip
send_getRowsWithColumns op arg_tableName arg_rows arg_columns arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("getRowsWithColumns", M_CALL, seqn)
write_GetRowsWithColumns_args op (GetRowsWithColumns_args{f_GetRowsWithColumns_args_tableName=Just arg_tableName,f_GetRowsWithColumns_args_rows=Just arg_rows,f_GetRowsWithColumns_args_columns=Just arg_columns,f_GetRowsWithColumns_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_getRowsWithColumns ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_GetRowsWithColumns_result ip
readMessageEnd ip
case f_GetRowsWithColumns_result_success res of
Just v -> return v
Nothing -> do
case f_GetRowsWithColumns_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "getRowsWithColumns failed: unknown result")
getRowsTs (ip,op) arg_tableName arg_rows arg_timestamp arg_attributes = do
send_getRowsTs op arg_tableName arg_rows arg_timestamp arg_attributes
recv_getRowsTs ip
send_getRowsTs op arg_tableName arg_rows arg_timestamp arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("getRowsTs", M_CALL, seqn)
write_GetRowsTs_args op (GetRowsTs_args{f_GetRowsTs_args_tableName=Just arg_tableName,f_GetRowsTs_args_rows=Just arg_rows,f_GetRowsTs_args_timestamp=Just arg_timestamp,f_GetRowsTs_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_getRowsTs ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_GetRowsTs_result ip
readMessageEnd ip
case f_GetRowsTs_result_success res of
Just v -> return v
Nothing -> do
case f_GetRowsTs_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "getRowsTs failed: unknown result")
getRowsWithColumnsTs (ip,op) arg_tableName arg_rows arg_columns arg_timestamp arg_attributes = do
send_getRowsWithColumnsTs op arg_tableName arg_rows arg_columns arg_timestamp arg_attributes
recv_getRowsWithColumnsTs ip
send_getRowsWithColumnsTs op arg_tableName arg_rows arg_columns arg_timestamp arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("getRowsWithColumnsTs", M_CALL, seqn)
write_GetRowsWithColumnsTs_args op (GetRowsWithColumnsTs_args{f_GetRowsWithColumnsTs_args_tableName=Just arg_tableName,f_GetRowsWithColumnsTs_args_rows=Just arg_rows,f_GetRowsWithColumnsTs_args_columns=Just arg_columns,f_GetRowsWithColumnsTs_args_timestamp=Just arg_timestamp,f_GetRowsWithColumnsTs_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_getRowsWithColumnsTs ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_GetRowsWithColumnsTs_result ip
readMessageEnd ip
case f_GetRowsWithColumnsTs_result_success res of
Just v -> return v
Nothing -> do
case f_GetRowsWithColumnsTs_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "getRowsWithColumnsTs failed: unknown result")
mutateRow (ip,op) arg_tableName arg_row arg_mutations arg_attributes = do
send_mutateRow op arg_tableName arg_row arg_mutations arg_attributes
recv_mutateRow ip
send_mutateRow op arg_tableName arg_row arg_mutations arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("mutateRow", M_CALL, seqn)
write_MutateRow_args op (MutateRow_args{f_MutateRow_args_tableName=Just arg_tableName,f_MutateRow_args_row=Just arg_row,f_MutateRow_args_mutations=Just arg_mutations,f_MutateRow_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_mutateRow ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_MutateRow_result ip
readMessageEnd ip
case f_MutateRow_result_io res of
Nothing -> return ()
Just _v -> throw _v
case f_MutateRow_result_ia res of
Nothing -> return ()
Just _v -> throw _v
return ()
mutateRowTs (ip,op) arg_tableName arg_row arg_mutations arg_timestamp arg_attributes = do
send_mutateRowTs op arg_tableName arg_row arg_mutations arg_timestamp arg_attributes
recv_mutateRowTs ip
send_mutateRowTs op arg_tableName arg_row arg_mutations arg_timestamp arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("mutateRowTs", M_CALL, seqn)
write_MutateRowTs_args op (MutateRowTs_args{f_MutateRowTs_args_tableName=Just arg_tableName,f_MutateRowTs_args_row=Just arg_row,f_MutateRowTs_args_mutations=Just arg_mutations,f_MutateRowTs_args_timestamp=Just arg_timestamp,f_MutateRowTs_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_mutateRowTs ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_MutateRowTs_result ip
readMessageEnd ip
case f_MutateRowTs_result_io res of
Nothing -> return ()
Just _v -> throw _v
case f_MutateRowTs_result_ia res of
Nothing -> return ()
Just _v -> throw _v
return ()
mutateRows (ip,op) arg_tableName arg_rowBatches arg_attributes = do
send_mutateRows op arg_tableName arg_rowBatches arg_attributes
recv_mutateRows ip
send_mutateRows op arg_tableName arg_rowBatches arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("mutateRows", M_CALL, seqn)
write_MutateRows_args op (MutateRows_args{f_MutateRows_args_tableName=Just arg_tableName,f_MutateRows_args_rowBatches=Just arg_rowBatches,f_MutateRows_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_mutateRows ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_MutateRows_result ip
readMessageEnd ip
case f_MutateRows_result_io res of
Nothing -> return ()
Just _v -> throw _v
case f_MutateRows_result_ia res of
Nothing -> return ()
Just _v -> throw _v
return ()
mutateRowsTs (ip,op) arg_tableName arg_rowBatches arg_timestamp arg_attributes = do
send_mutateRowsTs op arg_tableName arg_rowBatches arg_timestamp arg_attributes
recv_mutateRowsTs ip
send_mutateRowsTs op arg_tableName arg_rowBatches arg_timestamp arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("mutateRowsTs", M_CALL, seqn)
write_MutateRowsTs_args op (MutateRowsTs_args{f_MutateRowsTs_args_tableName=Just arg_tableName,f_MutateRowsTs_args_rowBatches=Just arg_rowBatches,f_MutateRowsTs_args_timestamp=Just arg_timestamp,f_MutateRowsTs_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_mutateRowsTs ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_MutateRowsTs_result ip
readMessageEnd ip
case f_MutateRowsTs_result_io res of
Nothing -> return ()
Just _v -> throw _v
case f_MutateRowsTs_result_ia res of
Nothing -> return ()
Just _v -> throw _v
return ()
atomicIncrement (ip,op) arg_tableName arg_row arg_column arg_value = do
send_atomicIncrement op arg_tableName arg_row arg_column arg_value
recv_atomicIncrement ip
send_atomicIncrement op arg_tableName arg_row arg_column arg_value = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("atomicIncrement", M_CALL, seqn)
write_AtomicIncrement_args op (AtomicIncrement_args{f_AtomicIncrement_args_tableName=Just arg_tableName,f_AtomicIncrement_args_row=Just arg_row,f_AtomicIncrement_args_column=Just arg_column,f_AtomicIncrement_args_value=Just arg_value})
writeMessageEnd op
tFlush (getTransport op)
recv_atomicIncrement ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_AtomicIncrement_result ip
readMessageEnd ip
case f_AtomicIncrement_result_success res of
Just v -> return v
Nothing -> do
case f_AtomicIncrement_result_io res of
Nothing -> return ()
Just _v -> throw _v
case f_AtomicIncrement_result_ia res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "atomicIncrement failed: unknown result")
deleteAll (ip,op) arg_tableName arg_row arg_column arg_attributes = do
send_deleteAll op arg_tableName arg_row arg_column arg_attributes
recv_deleteAll ip
send_deleteAll op arg_tableName arg_row arg_column arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("deleteAll", M_CALL, seqn)
write_DeleteAll_args op (DeleteAll_args{f_DeleteAll_args_tableName=Just arg_tableName,f_DeleteAll_args_row=Just arg_row,f_DeleteAll_args_column=Just arg_column,f_DeleteAll_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_deleteAll ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_DeleteAll_result ip
readMessageEnd ip
case f_DeleteAll_result_io res of
Nothing -> return ()
Just _v -> throw _v
return ()
deleteAllTs (ip,op) arg_tableName arg_row arg_column arg_timestamp arg_attributes = do
send_deleteAllTs op arg_tableName arg_row arg_column arg_timestamp arg_attributes
recv_deleteAllTs ip
send_deleteAllTs op arg_tableName arg_row arg_column arg_timestamp arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("deleteAllTs", M_CALL, seqn)
write_DeleteAllTs_args op (DeleteAllTs_args{f_DeleteAllTs_args_tableName=Just arg_tableName,f_DeleteAllTs_args_row=Just arg_row,f_DeleteAllTs_args_column=Just arg_column,f_DeleteAllTs_args_timestamp=Just arg_timestamp,f_DeleteAllTs_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_deleteAllTs ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_DeleteAllTs_result ip
readMessageEnd ip
case f_DeleteAllTs_result_io res of
Nothing -> return ()
Just _v -> throw _v
return ()
deleteAllRow (ip,op) arg_tableName arg_row arg_attributes = do
send_deleteAllRow op arg_tableName arg_row arg_attributes
recv_deleteAllRow ip
send_deleteAllRow op arg_tableName arg_row arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("deleteAllRow", M_CALL, seqn)
write_DeleteAllRow_args op (DeleteAllRow_args{f_DeleteAllRow_args_tableName=Just arg_tableName,f_DeleteAllRow_args_row=Just arg_row,f_DeleteAllRow_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_deleteAllRow ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_DeleteAllRow_result ip
readMessageEnd ip
case f_DeleteAllRow_result_io res of
Nothing -> return ()
Just _v -> throw _v
return ()
increment (ip,op) arg_increment = do
send_increment op arg_increment
recv_increment ip
send_increment op arg_increment = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("increment", M_CALL, seqn)
write_Increment_args op (Increment_args{f_Increment_args_increment=Just arg_increment})
writeMessageEnd op
tFlush (getTransport op)
recv_increment ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_Increment_result ip
readMessageEnd ip
case f_Increment_result_io res of
Nothing -> return ()
Just _v -> throw _v
return ()
incrementRows (ip,op) arg_increments = do
send_incrementRows op arg_increments
recv_incrementRows ip
send_incrementRows op arg_increments = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("incrementRows", M_CALL, seqn)
write_IncrementRows_args op (IncrementRows_args{f_IncrementRows_args_increments=Just arg_increments})
writeMessageEnd op
tFlush (getTransport op)
recv_incrementRows ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_IncrementRows_result ip
readMessageEnd ip
case f_IncrementRows_result_io res of
Nothing -> return ()
Just _v -> throw _v
return ()
deleteAllRowTs (ip,op) arg_tableName arg_row arg_timestamp arg_attributes = do
send_deleteAllRowTs op arg_tableName arg_row arg_timestamp arg_attributes
recv_deleteAllRowTs ip
send_deleteAllRowTs op arg_tableName arg_row arg_timestamp arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("deleteAllRowTs", M_CALL, seqn)
write_DeleteAllRowTs_args op (DeleteAllRowTs_args{f_DeleteAllRowTs_args_tableName=Just arg_tableName,f_DeleteAllRowTs_args_row=Just arg_row,f_DeleteAllRowTs_args_timestamp=Just arg_timestamp,f_DeleteAllRowTs_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_deleteAllRowTs ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_DeleteAllRowTs_result ip
readMessageEnd ip
case f_DeleteAllRowTs_result_io res of
Nothing -> return ()
Just _v -> throw _v
return ()
scannerOpenWithScan (ip,op) arg_tableName arg_scan arg_attributes = do
send_scannerOpenWithScan op arg_tableName arg_scan arg_attributes
recv_scannerOpenWithScan ip
send_scannerOpenWithScan op arg_tableName arg_scan arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("scannerOpenWithScan", M_CALL, seqn)
write_ScannerOpenWithScan_args op (ScannerOpenWithScan_args{f_ScannerOpenWithScan_args_tableName=Just arg_tableName,f_ScannerOpenWithScan_args_scan=Just arg_scan,f_ScannerOpenWithScan_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_scannerOpenWithScan ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_ScannerOpenWithScan_result ip
readMessageEnd ip
case f_ScannerOpenWithScan_result_success res of
Just v -> return v
Nothing -> do
case f_ScannerOpenWithScan_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "scannerOpenWithScan failed: unknown result")
scannerOpen (ip,op) arg_tableName arg_startRow arg_columns arg_attributes = do
send_scannerOpen op arg_tableName arg_startRow arg_columns arg_attributes
recv_scannerOpen ip
send_scannerOpen op arg_tableName arg_startRow arg_columns arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("scannerOpen", M_CALL, seqn)
write_ScannerOpen_args op (ScannerOpen_args{f_ScannerOpen_args_tableName=Just arg_tableName,f_ScannerOpen_args_startRow=Just arg_startRow,f_ScannerOpen_args_columns=Just arg_columns,f_ScannerOpen_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_scannerOpen ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_ScannerOpen_result ip
readMessageEnd ip
case f_ScannerOpen_result_success res of
Just v -> return v
Nothing -> do
case f_ScannerOpen_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "scannerOpen failed: unknown result")
scannerOpenWithStop (ip,op) arg_tableName arg_startRow arg_stopRow arg_columns arg_attributes = do
send_scannerOpenWithStop op arg_tableName arg_startRow arg_stopRow arg_columns arg_attributes
recv_scannerOpenWithStop ip
send_scannerOpenWithStop op arg_tableName arg_startRow arg_stopRow arg_columns arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("scannerOpenWithStop", M_CALL, seqn)
write_ScannerOpenWithStop_args op (ScannerOpenWithStop_args{f_ScannerOpenWithStop_args_tableName=Just arg_tableName,f_ScannerOpenWithStop_args_startRow=Just arg_startRow,f_ScannerOpenWithStop_args_stopRow=Just arg_stopRow,f_ScannerOpenWithStop_args_columns=Just arg_columns,f_ScannerOpenWithStop_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_scannerOpenWithStop ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_ScannerOpenWithStop_result ip
readMessageEnd ip
case f_ScannerOpenWithStop_result_success res of
Just v -> return v
Nothing -> do
case f_ScannerOpenWithStop_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "scannerOpenWithStop failed: unknown result")
scannerOpenWithPrefix (ip,op) arg_tableName arg_startAndPrefix arg_columns arg_attributes = do
send_scannerOpenWithPrefix op arg_tableName arg_startAndPrefix arg_columns arg_attributes
recv_scannerOpenWithPrefix ip
send_scannerOpenWithPrefix op arg_tableName arg_startAndPrefix arg_columns arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("scannerOpenWithPrefix", M_CALL, seqn)
write_ScannerOpenWithPrefix_args op (ScannerOpenWithPrefix_args{f_ScannerOpenWithPrefix_args_tableName=Just arg_tableName,f_ScannerOpenWithPrefix_args_startAndPrefix=Just arg_startAndPrefix,f_ScannerOpenWithPrefix_args_columns=Just arg_columns,f_ScannerOpenWithPrefix_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_scannerOpenWithPrefix ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_ScannerOpenWithPrefix_result ip
readMessageEnd ip
case f_ScannerOpenWithPrefix_result_success res of
Just v -> return v
Nothing -> do
case f_ScannerOpenWithPrefix_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "scannerOpenWithPrefix failed: unknown result")
scannerOpenTs (ip,op) arg_tableName arg_startRow arg_columns arg_timestamp arg_attributes = do
send_scannerOpenTs op arg_tableName arg_startRow arg_columns arg_timestamp arg_attributes
recv_scannerOpenTs ip
send_scannerOpenTs op arg_tableName arg_startRow arg_columns arg_timestamp arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("scannerOpenTs", M_CALL, seqn)
write_ScannerOpenTs_args op (ScannerOpenTs_args{f_ScannerOpenTs_args_tableName=Just arg_tableName,f_ScannerOpenTs_args_startRow=Just arg_startRow,f_ScannerOpenTs_args_columns=Just arg_columns,f_ScannerOpenTs_args_timestamp=Just arg_timestamp,f_ScannerOpenTs_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_scannerOpenTs ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_ScannerOpenTs_result ip
readMessageEnd ip
case f_ScannerOpenTs_result_success res of
Just v -> return v
Nothing -> do
case f_ScannerOpenTs_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "scannerOpenTs failed: unknown result")
scannerOpenWithStopTs (ip,op) arg_tableName arg_startRow arg_stopRow arg_columns arg_timestamp arg_attributes = do
send_scannerOpenWithStopTs op arg_tableName arg_startRow arg_stopRow arg_columns arg_timestamp arg_attributes
recv_scannerOpenWithStopTs ip
send_scannerOpenWithStopTs op arg_tableName arg_startRow arg_stopRow arg_columns arg_timestamp arg_attributes = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("scannerOpenWithStopTs", M_CALL, seqn)
write_ScannerOpenWithStopTs_args op (ScannerOpenWithStopTs_args{f_ScannerOpenWithStopTs_args_tableName=Just arg_tableName,f_ScannerOpenWithStopTs_args_startRow=Just arg_startRow,f_ScannerOpenWithStopTs_args_stopRow=Just arg_stopRow,f_ScannerOpenWithStopTs_args_columns=Just arg_columns,f_ScannerOpenWithStopTs_args_timestamp=Just arg_timestamp,f_ScannerOpenWithStopTs_args_attributes=Just arg_attributes})
writeMessageEnd op
tFlush (getTransport op)
recv_scannerOpenWithStopTs ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_ScannerOpenWithStopTs_result ip
readMessageEnd ip
case f_ScannerOpenWithStopTs_result_success res of
Just v -> return v
Nothing -> do
case f_ScannerOpenWithStopTs_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "scannerOpenWithStopTs failed: unknown result")
scannerGet (ip,op) arg_id = do
send_scannerGet op arg_id
recv_scannerGet ip
send_scannerGet op arg_id = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("scannerGet", M_CALL, seqn)
write_ScannerGet_args op (ScannerGet_args{f_ScannerGet_args_id=Just arg_id})
writeMessageEnd op
tFlush (getTransport op)
recv_scannerGet ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_ScannerGet_result ip
readMessageEnd ip
case f_ScannerGet_result_success res of
Just v -> return v
Nothing -> do
case f_ScannerGet_result_io res of
Nothing -> return ()
Just _v -> throw _v
case f_ScannerGet_result_ia res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "scannerGet failed: unknown result")
scannerGetList (ip,op) arg_id arg_nbRows = do
send_scannerGetList op arg_id arg_nbRows
recv_scannerGetList ip
send_scannerGetList op arg_id arg_nbRows = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("scannerGetList", M_CALL, seqn)
write_ScannerGetList_args op (ScannerGetList_args{f_ScannerGetList_args_id=Just arg_id,f_ScannerGetList_args_nbRows=Just arg_nbRows})
writeMessageEnd op
tFlush (getTransport op)
recv_scannerGetList ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_ScannerGetList_result ip
readMessageEnd ip
case f_ScannerGetList_result_success res of
Just v -> return v
Nothing -> do
case f_ScannerGetList_result_io res of
Nothing -> return ()
Just _v -> throw _v
case f_ScannerGetList_result_ia res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "scannerGetList failed: unknown result")
scannerClose (ip,op) arg_id = do
send_scannerClose op arg_id
recv_scannerClose ip
send_scannerClose op arg_id = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("scannerClose", M_CALL, seqn)
write_ScannerClose_args op (ScannerClose_args{f_ScannerClose_args_id=Just arg_id})
writeMessageEnd op
tFlush (getTransport op)
recv_scannerClose ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_ScannerClose_result ip
readMessageEnd ip
case f_ScannerClose_result_io res of
Nothing -> return ()
Just _v -> throw _v
case f_ScannerClose_result_ia res of
Nothing -> return ()
Just _v -> throw _v
return ()
getRowOrBefore (ip,op) arg_tableName arg_row arg_family = do
send_getRowOrBefore op arg_tableName arg_row arg_family
recv_getRowOrBefore ip
send_getRowOrBefore op arg_tableName arg_row arg_family = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("getRowOrBefore", M_CALL, seqn)
write_GetRowOrBefore_args op (GetRowOrBefore_args{f_GetRowOrBefore_args_tableName=Just arg_tableName,f_GetRowOrBefore_args_row=Just arg_row,f_GetRowOrBefore_args_family=Just arg_family})
writeMessageEnd op
tFlush (getTransport op)
recv_getRowOrBefore ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_GetRowOrBefore_result ip
readMessageEnd ip
case f_GetRowOrBefore_result_success res of
Just v -> return v
Nothing -> do
case f_GetRowOrBefore_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "getRowOrBefore failed: unknown result")
getRegionInfo (ip,op) arg_row = do
send_getRegionInfo op arg_row
recv_getRegionInfo ip
send_getRegionInfo op arg_row = do
seq <- seqid
seqn <- readIORef seq
writeMessageBegin op ("getRegionInfo", M_CALL, seqn)
write_GetRegionInfo_args op (GetRegionInfo_args{f_GetRegionInfo_args_row=Just arg_row})
writeMessageEnd op
tFlush (getTransport op)
recv_getRegionInfo ip = do
(fname, mtype, rseqid) <- readMessageBegin ip
if mtype == M_EXCEPTION then do
x <- readAppExn ip
readMessageEnd ip
throw x
else return ()
res <- read_GetRegionInfo_result ip
readMessageEnd ip
case f_GetRegionInfo_result_success res of
Just v -> return v
Nothing -> do
case f_GetRegionInfo_result_io res of
Nothing -> return ()
Just _v -> throw _v
throw (AppExn AE_MISSING_RESULT "getRegionInfo failed: unknown result")
| danplubell/hbase-haskell | src/Database/HBase/Internal/Thrift/Hbase_Client.hs | mit | 45,793 | 0 | 16 | 7,854 | 13,200 | 6,121 | 7,079 | 1,125 | 5 |
module Text.Regex.PCRE.Gsub
(gsub) where
import Text.Regex.PCRE ((=~), MatchText)
import Data.Array ((!))
import Data.Char (isDigit)
-------------------------------------------------------------------------------
-- | Returns a copy of the given input string with all the occurrences of
-- regexp pattern substituted for the third argument.
--
-- If the replacement string contains back references on the form of `\\d`
-- (where `d` is a group number), they will be replaced with the captured
-- groups of the regexp match
--
-- Examples:
--
-- > gsub "hello world" "world$" "mundo" => "hello mundo"
-- > gsub "hello world " "world$" "mundo" => "hello world "
-- > gsub "hallo world" "(.*) world" "\\1-\\1" => "hallo-hallo mundo"
--
gsub :: String -- ^ Input String
-> String -- ^ Regexp Pattern
-> String -- ^ Replacement String
-> String -- ^ Output String
gsub text pattern replacement = replaceMatches 0 matches text
where
matches = (text =~ pattern :: [MatchText String])
rl = length replacement
replaceMatches _ [] text = text
replaceMatches accum (m:ms) text = replaceMatches accum' ms text'
where
(o, l) = snd (m ! 0)
(pre, post) = splitAt (o + accum) text
accum' = accum + (rl - l + 1)
post' = drop l post
text' = concat [ pre
, replacePlaceholder (fst $ m ! 0) pattern replacement
, post' ]
replacePlaceholder :: String -> String -> String -> String
replacePlaceholder expr pattern sub =
concat $ zipWith f ('_':sub) sub
where
matches = (expr =~ pattern :: [MatchText String]) !! 0
f :: Char -> Char -> String
f '\\' i
| isDigit i = fst $ matches ! read [i]
| otherwise = '\\':[i]
f _ '\\' = []
f _ x = [x]
| HaskVan/regex-pcre-gsub | src/Text/Regex/PCRE/Gsub.hs | mit | 1,880 | 0 | 14 | 547 | 450 | 250 | 200 | 31 | 4 |
module SCCPragmas where
x = {-# SCC "wibble" #-} 3 | Pnom/haskell-ast-pretty | Test/examples/SCCPragmas.hs | mit | 51 | 0 | 4 | 10 | 10 | 7 | 3 | 2 | 1 |
{-# language DeriveDataTypeable #-}
{-# language TemplateHaskell #-}
{-# language LambdaCase #-}
module Flow.Common.Shape where
import Flow.Common.Data
import Autolib.Reader
import Autolib.ToDoc
import Autolib.Reporter
import Data.Typeable
data Condition
= No_Loops
| No_Labels -- ^ program contains no labels
| No_Gotos -- ^ program contains no gotos
| Simple_Branches -- ^ just "if (b) goto foo;"
| Simple_Loops -- ^ no break or continue
| Flat
| And [ Condition ]
deriving ( Eq, Ord, Typeable )
derives [makeReader,makeToDoc] [''Condition]
check :: Condition -> Statement -> Reporter ()
check c s = let { ss = substatements s } in case c of
And cs -> forM_ cs $ \ c -> check c s
No_Loops -> whine "Das Programm darf keine Schleifen enthalten."
$ filter ( \ case While {} -> True ; _ -> False ) ss
No_Labels -> whine "Das Programm darf keine Marken enthalten."
$ filter ( \ case Label {} -> True ; _ -> False ) ss
No_Gotos -> whine "Das Programm darf keine Sprünge enthalten."
$ filter ( \ case Goto {} -> True ; _ -> False ) ss
Simple_Branches -> whine "Alle Verzweigungen sollen die Form 'if (b) goto l' haben."
$ filter ( \ case
Branch c y n -> case (y,n) of (Goto _, Nothing) -> False; _ -> True
_ -> False ) ss
Simple_Loops -> whine "Alle Schleifen sollen einfach sein (ohne break/continue)."
$ filter ( \ case Break _ -> True ; Continue _ -> True ; _ -> False ) ss
Flat -> case s of
Block ss -> whine "Der Block darf keine Blöcke enthalten."
$ filter ( \ case Block _ -> True ; _ -> False ) $ ss >>= substatements
_ -> reject $ text "Das Programm soll ein Block sein."
whine msg bad = when (not $ null bad) $ reject $
text msg </> ( text "Folgende Teilprogramme verletzen diese Bedingung:"
</> vcat ( map toDoc bad ) )
| marcellussiegburg/autotool | collection/src/Flow/Common/Shape.hs | gpl-2.0 | 1,952 | 0 | 21 | 551 | 551 | 285 | 266 | 41 | 16 |
{-#LANGUAGE TemplateHaskell, DeriveDataTypeable #-}
module Crystal.Config where
import Control.Lens hiding (enum)
import Control.Lens.TH
import Data.Generics
import System.Console.CmdArgs.Implicit
data TSType = Dumb | Smart deriving (Show, Eq, Data, Typeable)
data Config =
Config { _cfgTypeSys :: TSType,
_cfgInputFile :: String,
_cfgCheckMobility :: Bool,
_cfgCheckSimplification :: Bool,
_cfgMobilityStats :: Bool,
_cfgDumpTypes :: Bool,
_cfgDumpTree :: Bool,
_cfgDumpImmediately :: Bool,
_cfgAnnotateLabels :: Bool,
_cfgMutate :: Bool
} deriving (Show, Eq, Data, Typeable)
$(makeLenses ''Config)
defaultArgs = Config { _cfgTypeSys = enum [ Smart &= help "Smart type system" &= name "S", Dumb &= help "Dynamic type system" &= name "D" ]
, _cfgInputFile = def &= args &= typFile
, _cfgCheckMobility = enum [ True &= help "Test mobility (default)" &= name "mobility" &= explicit,
False &= help "Disable mobility" &= name "no-mobility" &= explicit ]
, _cfgCheckSimplification = enum [ True &= help "Test simplification (default)" &= name "simplification" &= explicit,
False &= help "Disable simplification" &= name "no-simplification" &= explicit ]
, _cfgMobilityStats = enum [ False &= help "Don't generate mobility stats (default)" &= name "no-stats" &= explicit,
True &= help "Generate mobility stats" &= name "stats" &= explicit ]
, _cfgDumpTypes = enum [ False &= help "Don't dump top-level types (default)" &= name "no-types" &= explicit,
True &= help "Dump top-level types" &= name "types" &= explicit ]
, _cfgDumpTree = enum [ False &= name "no-tree" &= ignore,
True &= help "Dump results of intermediate steps" &= name "tree" &= explicit ]
, _cfgDumpImmediately = enum [ False &= ignore,
True &= help "Dump results immediately" &= name "i" &= explicit ]
, _cfgAnnotateLabels = enum [ False &= ignore,
True &= help "Annotate expressions with labels" &= name "@" &= explicit ]
, _cfgMutate = enum [ False &= name "no-mutate" &= ignore,
True &= help "Mutate program before performing blame prediction" &= name "mutate" &= explicit]
}
| Botje/crystal | Crystal/Config.hs | gpl-2.0 | 2,759 | 0 | 12 | 1,031 | 595 | 315 | 280 | 38 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- import Debug.Trace
import System.Exit
-- import System.Posix.Directory
import qualified Data.Text as T
-- import qualified Data.Text.IO as T
import Control.Monad
-- import qualified Data.ByteString.Lazy.Char8 as BS8
import qualified Data.Map.Lazy as Map
import Data.Time.Calendar
import Data.Ssheet
----------------------------------------------------------------------
main :: IO ()
main =
mapM_ testIt [("tests/simple.xlsx", simpleExpected),
("tests/simple.xls", simpleExpected),
("tests/simple.csv", simpleExpectedCsv),
("tests/dates.xlsx", datesExpected),
("tests/dates1904.xlsx", dates1904Expected)
]
where
testIt (source, expected) = testSimple source expected
----------------------------------------------------------------------
showError :: [T.Text] -> IO ()
showError err = putStrLn $ "Errors: " ++ show (length err) ++ "\n" ++ T.unpack (T.unlines err)
----------------------------------------------------------------------
testSimple :: FilePath -> Ssheet -> IO ()
testSimple source expected = do
putStrLn $ "Testing " ++ source
either_ssheet <- ssheetReadFile ssheetDefaultOptions source
case either_ssheet of
Left err -> do
showError err
exitFailure
Right ssheet -> do
unless (ssheet == expected) (error $ source ++ ": unexpected data extracted:\n" ++ (ssheetJsonPrettyPrintToString ssheet))
simpleExpected :: Ssheet
simpleExpected = [Sheet "Sheet1" (Map.fromList [(0,Map.fromList [(0,CellString "A1")]),
(1,Map.fromList [(0,CellFloat 42.0)]),
(2,Map.fromList [(0,CellFloat 42.42)]),
(3,Map.fromList [(0,CellDate (fromGregorian 2014 12 16))])])]
simpleExpectedCsv :: Ssheet
simpleExpectedCsv = [Sheet "" (Map.fromList [(0,Map.fromList [(0,CellString "A1")]),
(1,Map.fromList [(0,CellString "42")]),
(2,Map.fromList [(0,CellString "42.42")]),
(3,Map.fromList [(0,CellString "2014-12-16")])])]
datesExpected :: Ssheet
datesExpected = [Sheet "Sheet1" (Map.fromList [(0, Map.fromList [(0, CellDate (fromGregorian 2015 3 8))]),
(1, Map.fromList [(0, CellDate (fromGregorian 2015 3 8))]),
(2, Map.fromList [(0, CellDate (fromGregorian 2015 3 8))]),
(3, Map.fromList [(0, CellDate (fromGregorian 2015 3 8))]),
(4, Map.fromList [(0, CellDate (fromGregorian 2015 3 8))]),
(5, Map.fromList [(0, CellDate (fromGregorian 2015 3 8))]),
(6, Map.fromList [(0, CellDate (fromGregorian 2015 3 8))]),
(7, Map.fromList [(0, CellDate (fromGregorian 2015 3 8))]),
(8, Map.fromList [(0, CellDate (fromGregorian 2015 3 8))]),
(9, Map.fromList [(0, CellDate (fromGregorian 2015 3 8))]),
(10, Map.fromList [(0, CellDate (fromGregorian 2015 3 8))])])]
dates1904Expected :: Ssheet
dates1904Expected = [Sheet "Sheet1" (Map.fromList [(0, Map.fromList [(0, CellDate (fromGregorian 2014 10 15))]),
(1, Map.fromList [(0, CellDate (fromGregorian 2014 10 15))])
])]
----------------------------------------------------------------------
| skepner/ssheet | tests/Test.hs | gpl-2.0 | 3,844 | 0 | 16 | 1,300 | 1,110 | 615 | 495 | 52 | 2 |
module Test.EventChain.LHEParse where
-- from other packages from others
import Data.Conduit
import qualified Data.Conduit.List as CL
import Data.Maybe (catMaybes)
import System.Directory
import System.FilePath
import System.IO
-- from this package
import HEP.Automation.EventChain.FileDriver
--
import Paths_evchain
-- |
test_parse_unzip :: IO Bool
test_parse_unzip = do
dir <- getDataDir
let fn = dir </> "test" </> "resources" </> "pp_gogo_events.lhe"
h <- openFile fn ReadMode
lst <- evtsHandle False h $$ CL.consume
return $ (length (catMaybes lst)) == 2
-- |
test_parse_zip :: IO Bool
test_parse_zip = do
dir <- getDataDir
let fn = dir </> "test" </> "resources" </> "pp_gogo_events.lhe.gz"
h <- openFile fn ReadMode
lst <- evtsHandle True h $$ CL.consume
return $ (length (catMaybes lst)) == 2
| wavewave/evchain | test/Test/EventChain/LHEParse.hs | gpl-3.0 | 890 | 0 | 12 | 205 | 249 | 130 | 119 | 23 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module User where
import Control.Monad.Reader (ask)
import Control.Monad.State (get, put)
import Crypto.Scrypt
import Data.Acid (Query, Update)
import qualified Data.ByteString.Lazy as B
import Data.Digest.Pure.SHA
import Data.IxSet hiding (delete)
import Data.List (delete)
import qualified Data.Morgue.Agenda as A
import qualified Data.Morgue.Outline as O
import Data.Morgue.Options
import Data.Text (pack, unpack)
import Data.Time
import System.Entropy
import Group (toGroupFileList)
import Types
import Util
-- = Utility functions
-- TODO: maybe incorporate user data (or use JWT and STFU)
-- | generate a user's API key
genApiKey :: IO ApiKey
genApiKey = pack . showDigest <$> (hmacSha256 <$>
(B.fromStrict <$> getEntropy 256) <*>
(B.fromStrict <$> getEntropy 256))
-- | transform a SignUpRequest as passed to our application to a version
-- suited for pure computation
toSignUpRequest :: SignUpRequest' -> IO SignUpRequest
toSignUpRequest req =
SignUpRequest (suRqCreds' req) <$> genApiKey <*> newSalt
-- | transform a SignInRequest as passed to our application to a version
-- suited for pure computation
toSignInRequest :: SignInRequest' -> IO SignInRequest
toSignInRequest req =
SignInRequest (siRqCreds' req) <$> genApiKey
-- | transform a ProcessingRequest as passed to our application to a version
-- suited for pure computation
toProcessingRequest :: ProcessingRequest' -> IO ProcessingRequest
toProcessingRequest req =
ProcessingRequest (prRqUser' req) (prRqOptions' req) (prRqFiles' req) <$>
getCurrentTime <*> getCurrentTimeZone
-- | store a user, updating it if told so
packUser :: Bool -> InternalUser -> Update Morgue InternalUser
packUser update user = do
morgue <- get
put $ morgue { allUsers = func user (allUsers morgue) }
return user
where func | update = updateIx (iUserName user)
| otherwise = insert
-- | store a new user
storeUser :: InternalUser -> Update Morgue InternalUser
storeUser = packUser False
-- | update an existing user
updateUser :: InternalUser -> Update Morgue InternalUser
updateUser = packUser True
-- | conversion for public access
toUser :: InternalUser -> User
toUser = User <$> iUserName <*> iApiKey
-- = Signing up
-- | provide data needed for signup. We don't do any IO here
signUpProvider :: SignUpRequest -> Query Morgue SignUpData
signUpProvider req = do
morgue <- ask
return (req, getOne $ allUsers morgue @= cName (suRqCreds req))
-- | process data from a sign up request
makeUser :: SignUpData -> ApiResponse InternalUser
makeUser (SignUpRequest creds key salt, Nothing) = success $
InternalUser (cName creds) key encPass []
where encPass = B.fromStrict . getEncryptedPass $
encryptPass' salt (Pass . B.toStrict $ cPass creds)
makeUser (_, Just _) = failure UserExists
-- = Logging in
-- | provide data from a login request
signInProvider :: SignInRequest -> Query Morgue SignInData
signInProvider (SignInRequest creds key) = do
morgue <- ask
return ( cPass creds
, getOne $ allUsers morgue @= cName creds
, key
)
-- | process user data in order to login
loginUser :: SignInData -> ApiResponse InternalUser
loginUser (encPass, Just user@(InternalUser _ _ pass _), newApiKey)
| verified = success $ user { iApiKey = newApiKey }
| otherwise = failure AuthError
where verified = verifyPass'
(Pass $ B.toStrict pass)
(EncryptedPass $ B.toStrict encPass)
loginUser (_, Nothing, _) = failure AuthError
-- = Uploading a file
-- | provide data from a user-push request
pushUProvider :: PushURequest -> Query Morgue PushUData
pushUProvider (PushURequest user file) = do
morgue <- ask
return (getOne $ allUsers morgue @= user, file)
-- | add a file to a user's files
addFileToUser :: PushUData -> ApiResponse InternalUser
addFileToUser (Just user, file)
| file `elem` iUserFiles user = failure FileExists
| otherwise = success $ user { iUserFiles = file : iUserFiles user }
addFileToUser (Nothing, _) = failure AuthError
-- | get a user's last file
getLastFile :: InternalUser -> FileName
getLastFile = fileName . head . iUserFiles
-- = Deleting a file
-- | provide data from a user-delete request
deleteUProvider :: DeleteURequest -> Query Morgue DeleteUData
deleteUProvider (DeleteURequest user fName) = do
morgue <- ask
return (getOne $ allUsers morgue @= user, fName)
-- | delete a file from a user's datastore
deleteFileFromUser :: DeleteUData -> ApiResponse InternalUser
deleteFileFromUser (Nothing, _) = failure AuthError
deleteFilefromUser (Just user, fName)
| oldLen == length newFiles = failure $ NoSuchFile fName
| otherwise = success $ user { iUserFiles = newFiles }
where newFiles = delete (File fName "") (iUserFiles user)
oldLen = length $ iUserFiles user
-- = Pulling a file
-- | provide data from a user-pull request
pullUProvider :: PullURequest -> Query Morgue PullUData
pullUProvider (PullURequest user fName) = do
morgue <- ask
return (getOne $ allUsers morgue @= user, fName)
-- | get a file from a user, looking it up by name
getFileFromUser :: PullUData -> ApiResponse File
getFileFromUser (Just user, fName) =
case filter ((==fName) . fileName) $ iUserFiles user of
[file] -> success file
_ -> failure $ NoSuchFile fName
getFileFromUser (Nothing, _) = failure AuthError
-- = Listing available files
-- | provide data from a user-list request
listProvider :: ListRequest -> Query Morgue ListData
listProvider req = do
morgue <- ask
return ( getOne $ allUsers morgue @= user
, toList $ allGroups morgue @= userName user
)
where user = lRqUser req
-- | list all files from a user and a list of groups
toFileList :: ListData -> ApiResponse FileList
toFileList (Just user, groups) = success $
FileList (map fileName $ iUserFiles user)
(map toGroupFileList groups)
toFileList (Nothing, _) = failure AuthError
-- = processing files to an agenda
-- | provide data from a processing request
processingProvider :: ProcessingRequest -> Query Morgue ProcessingData
processingProvider (ProcessingRequest user opts fList tz time) = do
morgue <- ask
return ( getOne $ allUsers morgue @= user
, toList $ allGroups morgue @= user
, fList
, opts
, tz
, time
)
-- | process files
processFiles :: ProcessingData -> ApiResponse String
processFiles (Just user, groups, FileList uFiles gFiles, opts, time, tz) =
processor <$> files
where opts' = convertOptions opts
files = (unpack . mconcat . mconcat) <$>
((:) <$> matchFiles (iUserFiles user) uFiles <*>
(matchGroups groups gFiles >>= mapM (uncurry matchFiles)))
processor = case opts of
SAgendaOptions{} -> A.getAgenda opts' tz time
SOutlineOptions{} -> O.getOutline opts'
processFiles (Nothing, _, _, _, _, _) = failure AuthError
-- = patching files
-- | provide data from a patching request
patchUProvider :: PatchURequest -> Query Morgue PatchUData
patchUProvider (PatchURequest user fName patch) = do
morgue <- ask
return ( getOne $ allUsers morgue @= user
, fName
, patch
)
-- | process a patch in an impure fashion ;(
processUPatch :: PatchUData -> IO (ApiResponse (InternalUser, File))
processUPatch (Nothing, _, _) = return $ failure AuthError
processUPatch (Just user@(InternalUser _ _ _ uFiles), fName, patch) =
case matchFiles uFiles [fName] of
ApiResponse (Right [content]) -> do
newFile <- patchFile (File fName content) patch
return $ success
(user { iUserFiles = replaceFile uFiles newFile }, newFile)
ApiResponse (Right _) -> return . failure $ NoSuchFile fName
ApiResponse (Left err) -> return $ failure err
| ibabushkin/morgue-server | src/User.hs | gpl-3.0 | 7,959 | 0 | 15 | 1,740 | 2,130 | 1,107 | 1,023 | 149 | 3 |
-----------------------------------------------------------------------------
--
-- Module : Main
-- Copyright :
-- License : AllRightsReserved
--
-- Maintainer :
-- Stability :
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module Main ( main ) where
import Check3D as C3D
import Check2D as C2D
import CheckCommon as C2C
import Test.QuickCheck
main = do
C2C.runChecker
print "=========== Common test finished =================="
C2D.runChecker
print "=========== 2D test finished ======================"
C3D.runChecker
print "=========== 3D test finished ======================"
| lostbean/DeUni | profile/Check.hs | gpl-3.0 | 672 | 0 | 7 | 105 | 80 | 49 | 31 | 12 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE PatternGuards #-}
-- Copyright : (c) 2019 Robert Künnemann
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Robert Künnemann <[email protected]>
-- Portability : GHC only
--
-- Compute a functiont hat maps positions in a process to where they will need
-- to move to ensure local progress whereever possible
module Sapic.ProgressFunction (
pfFrom
,pf
,ProgressFunction
,pfRange
,pfInv
) where
-- import Data.Maybe
-- import Data.Foldable
-- import Control.Exception
-- import Control.Monad.Fresh
import Data.Typeable
import Control.Monad.Catch
import Control.Monad
-- import Sapic.Exceptions
-- import Theory
import Theory.Sapic
-- import Sapic.Exceptions
import Sapic.ProcessUtils
-- import Theory.Model.Rule
-- import Data.Typeable
import qualified Data.Set as S
import qualified Data.List as L
-- import Control.Monad.Trans.FastFresh
import qualified Data.Map.Strict as M
type ProgressFunction = M.Map ProcessPosition (S.Set (S.Set ProcessPosition))
-- | suffix list p to each element of set *)
(<.>) :: Ord a => [a] -> S.Set [a] -> S.Set [a]
(<.>) pos set = S.map (\pos' -> pos ++ pos' ) set
-- | suffix list p to each element in a set of set of sets *)
(<..>) :: Ord a => [a] -> S.Set (S.Set [a]) -> S.Set (S.Set [a])
(<..>) pos setset = S.map (\set' -> pos <.> set') setset
-- -- | Combinators that are exclusive, i.e., only one child can be visited
-- isExclusive (Cond _) = True
-- isExclusive (CondEq _ _) = True
-- isExclusive (Lookup _ _) = True
-- isExclusive _ = False
-- | Actions that are blocking
isBlockingAct :: SapicAction -> Bool
isBlockingAct Rep = True
isBlockingAct (ChIn _ _) = True
isBlockingAct _ = False
-- | determine whether process is blocking
blocking :: AnProcess ann -> Bool
blocking (ProcessNull _) = True
blocking (ProcessAction ac _ _ ) = isBlockingAct ac
blocking (ProcessComb NDC _ pl pr) = blocking pl && blocking pr
blocking _ = False
-- | next position to jump to
next :: (Num a, Ord a) => AnProcess ann -> S.Set [a]
next (ProcessNull _) = S.empty
next (ProcessAction _ _ _ ) = S.singleton [1]
next (ProcessComb NDC _ pl pr) = nextOrChild pl [1] `S.union` nextOrChild pr [2]
where nextOrChild p' pos = if blocking p' then
pos <.> next p'
else S.singleton pos
next ProcessComb{} = S.fromList $ [[1],[2]]
-- | next position to jump but consider empty position for null process, used in pi
next0 :: (Num a, Ord a) => AnProcess ann -> S.Set [a]
next0 (ProcessNull _) = S.singleton []
next0 (ProcessAction _ _ _ ) = S.singleton [1]
next0 (ProcessComb NDC _ pl pr) = next0OrChild pl [1] `S.union` next0OrChild pr [2]
where next0OrChild p' pos = if blocking p' then
pos <.> next0 p'
else S.singleton pos
next0 ProcessComb{} = S.fromList [[1],[2]]
-- blocking0 (ProcessAction ac _ _ )
-- | isBlocking ac = Just (S.singleton [1])
-- | otherwise = Nothing
-- blocking0 (ProcessComb NDC _ pl pr)
-- | (Just sl) <- blocking0 pl, (Just sr) <- blocking0 pr = Just (([1] <.>sl) `S.union` ([2]<.>sr))
-- | otherwise = Nothing
-- blocking0 _ = Nothing
pfFrom :: (MonadCatch m, Show ann, Typeable ann) => AnProcess ann -> m (S.Set ProcessPosition)
pfFrom process = from' process True
where
from' proc b
| ProcessNull _ <- proc = return S.empty
| otherwise = do
-- | (ProcessAction ac _ p' ) <- proc =
-- singletonOrEmpty (conditionAction proc b) `S.union` [1]<.> from' p' (isBlocking ac)
-- | (ProcessComb comb _ pl pr) <- proc =
res <- foldM (addWithRecursive proc) S.empty (next proc)
return $ singletonOrEmpty (conditionAction proc b) `S.union` res
-- `S.union` ([1] <.> from' pl False)
-- `S.union` ([2] <.> from' pr False)
singletonOrEmpty True = S.singleton []
singletonOrEmpty False = S.empty
conditionAction proc b = not (blocking proc) && b -- condition to add singleton set is given, see Def. 14 in paper
addWithRecursive proc accu pos = do
p' <- processAt proc pos
res <- from' p' (blocking proc)
return $ accu `S.union` (pos <.> res)
-- | Combine set of sets of position so that they describe alternatives (see comment for progressTo)
-- combine x y = { union of xi and yi | xi in x and yi in y}
combine :: Ord a => S.Set (S.Set a) -> S.Set (S.Set a) -> S.Set (S.Set a)
combine x y = S.foldr (combine_with y) S.empty x
-- | Take x_i, take union with y_i for all y_i in y and add result to accumulator set1.
combine_with :: Ord a => S.Set (S.Set a) -> S.Set a -> S.Set (S.Set a) -> S.Set (S.Set a)
combine_with y x_i set1 = S.foldr (\y_i set2 -> (x_i `S.union` y_i) `S.insert` set2) set1 y
-- | Given a process p, find set of set of positions describing the conjunctive
-- normal form of the positions that we need to go to.
-- For example: {{p1},{p2,p3}} means we need to go to p1 AND to either p2 or p3.
-- Correspond to f in Def. 15
-- TODO This is massively refactored code. Remove stuff that's commented out once everything works.
f :: (Show ann, MonadCatch m, Typeable ann) => (AnProcess ann) -> m (S.Set (S.Set ProcessPosition))
f p -- corresponds to f within generate progressfunction.ml
| blocking p = return $ ss []
| (ProcessComb Parallel _ pl pr) <- p = do
ll <- f pl
lr <- f pr
return $ S.union ([1] <..> ll) ([2] <..> lr)
| otherwise = foldM combineWithRecursive
(S.singleton S.empty) -- accumulator, set of sets of position
-- not that the Singleton set of the empty set is
-- the neutral element with respect to combine
-- the empty set combined with anything gives an emptyset
(next0 p) -- list of p∈next^0(proc)
-- | (ProcessNull _) <- p = return $ ss []
-- | (ProcessAction Rep _ _ ) <-p = return $ ss []
-- | (ProcessAction (ChIn _ _) _ _) <-p = return $ ss []
-- | (ProcessComb comb _ pl pr) <- p
-- , isExclusive comb = foldM combineWithRecursive
-- S.empty -- accumulator, set of sets of position
-- (next0 proc) -- list of p∈next^0(proc)
-- | (ProcessComb NDC _ pl pr) <- p
-- , Just psl <- blocking0 pl, Just psr <- blocking0 pr = return $ ss []
-- | (ProcessComb NDC _ pl pr) <- p
-- , Just psl <- blocking0 pl, Nothing <- blocking0 pr = do
-- lr <- f pr
-- foldM combineWithRecursive
-- ([2] <..> lr) -- accumulator start with rhs positions
-- ([1] <.> psl) -- fold over lhs positions
-- | (ProcessComb NDC _ pl pr) <- p
-- , Nothing <- blocking0 pl, Just psr <- blocking0 pr = do
-- ll <- f pl
-- foldM combineWithRecursive ([1] <..> ll) ([2] <.> psr)
-- | (ProcessComb NDC _ pl pr) <- p
-- , Nothing <- blocking0 pl, Nothing <- blocking0 pr = do
-- ll <- f pl
-- lr <- f pr
-- return $ combine ([1] <..> ll) ([2] <..> lr)
-- | (ProcessComb Parallel _ pl pr) <- p = do
-- ll <- f pl
-- lr <- f pr
-- return $ S.union ([1] <..> ll) ([2] <..> lr)
-- | (ProcessAction _ _ p') <- p = do l' <- f p'
-- return $ [1] <..> l'
where ss x = S.singleton ( S.singleton x) -- shortcut for singleton set of singleton set
combineWithRecursive acc pos = do -- combine pss with positions from recursive call (case of nested NDCs)
proc' <- (processAt p pos)
lpos <- f proc'
return $ combine (pos <..> lpos) acc
-- | Compute progress function of proc
pf :: (Show ann, MonadCatch m, Typeable ann) => AnProcess ann -> ProcessPosition -> m (S.Set (S.Set ProcessPosition))
pf proc pos = do proc' <- processAt proc pos
res <- f proc'
return $ pos <..> res
flatten :: Ord a => S.Set (S.Set a) -> S.Set a
flatten = S.foldr S.union S.empty
pfRange' :: (Show ann, Typeable ann, MonadCatch m) => AnProcess ann -> m (S.Set (ProcessPosition, ProcessPosition))
pfRange' proc = do
froms <- pfFrom proc
res <- foldM mapFlat S.empty froms
return res
where
mapFlat acc pos = do res <- flatten <$> pf proc pos
return (acc `S.union` S.map (\r -> (r,pos)) res)
pfRange :: (Show ann, Typeable ann, MonadCatch m) => AnProcess ann -> m (S.Set ProcessPosition)
pfRange proc = do
set <- pfRange' proc
return $ S.map fst set
pfInv :: (Show ann, Typeable ann, MonadCatch m) => AnProcess ann -> m (ProcessPosition -> Maybe ProcessPosition)
pfInv proc = do
set <- pfRange' proc
return $ \x -> snd <$> L.find (\(to,_) -> to == x ) (S.toList set)
| kmilner/tamarin-prover | lib/sapic/src/Sapic/ProgressFunction.hs | gpl-3.0 | 9,426 | 0 | 15 | 2,968 | 2,085 | 1,092 | 993 | 100 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Games.Types.Sum
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.Games.Types.Sum where
import Network.Google.Prelude
-- | Collection of players being retrieved
data PlayersListCollection
= PlayedWith
-- ^ @played_with@
-- Retrieve a list of players you have played a multiplayer game (realtime
-- or turn-based) with recently.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable PlayersListCollection
instance FromHttpApiData PlayersListCollection where
parseQueryParam = \case
"played_with" -> Right PlayedWith
x -> Left ("Unable to parse PlayersListCollection from: " <> x)
instance ToHttpApiData PlayersListCollection where
toQueryParam = \case
PlayedWith -> "played_with"
instance FromJSON PlayersListCollection where
parseJSON = parseJSONText "PlayersListCollection"
instance ToJSON PlayersListCollection where
toJSON = toJSONText
-- | The collection of scores you\'re requesting.
data ScoresListCollection
= Public
-- ^ @PUBLIC@
-- List all scores in the public leaderboard.
| Social
-- ^ @SOCIAL@
-- List only social scores.
| Social1P
-- ^ @SOCIAL_1P@
-- List only social scores, not respecting the fACL.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ScoresListCollection
instance FromHttpApiData ScoresListCollection where
parseQueryParam = \case
"PUBLIC" -> Right Public
"SOCIAL" -> Right Social
"SOCIAL_1P" -> Right Social1P
x -> Left ("Unable to parse ScoresListCollection from: " <> x)
instance ToHttpApiData ScoresListCollection where
toQueryParam = \case
Public -> "PUBLIC"
Social -> "SOCIAL"
Social1P -> "SOCIAL_1P"
instance FromJSON ScoresListCollection where
parseJSON = parseJSONText "ScoresListCollection"
instance ToJSON ScoresListCollection where
toJSON = toJSONText
-- | Restrict application details returned to the specific platform.
data ApplicationsGetPlatformType
= Android
-- ^ @ANDROID@
-- Retrieve applications that can be played on Android.
| Ios
-- ^ @IOS@
-- Retrieve applications that can be played on iOS.
| WebApp
-- ^ @WEB_APP@
-- Retrieve applications that can be played on desktop web.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ApplicationsGetPlatformType
instance FromHttpApiData ApplicationsGetPlatformType where
parseQueryParam = \case
"ANDROID" -> Right Android
"IOS" -> Right Ios
"WEB_APP" -> Right WebApp
x -> Left ("Unable to parse ApplicationsGetPlatformType from: " <> x)
instance ToHttpApiData ApplicationsGetPlatformType where
toQueryParam = \case
Android -> "ANDROID"
Ios -> "IOS"
WebApp -> "WEB_APP"
instance FromJSON ApplicationsGetPlatformType where
parseJSON = parseJSONText "ApplicationsGetPlatformType"
instance ToJSON ApplicationsGetPlatformType where
toJSON = toJSONText
-- | The collection of scores you\'re requesting.
data ScoresListWindowCollection
= SLWCPublic
-- ^ @PUBLIC@
-- List all scores in the public leaderboard.
| SLWCSocial
-- ^ @SOCIAL@
-- List only social scores.
| SLWCSocial1P
-- ^ @SOCIAL_1P@
-- List only social scores, not respecting the fACL.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ScoresListWindowCollection
instance FromHttpApiData ScoresListWindowCollection where
parseQueryParam = \case
"PUBLIC" -> Right SLWCPublic
"SOCIAL" -> Right SLWCSocial
"SOCIAL_1P" -> Right SLWCSocial1P
x -> Left ("Unable to parse ScoresListWindowCollection from: " <> x)
instance ToHttpApiData ScoresListWindowCollection where
toQueryParam = \case
SLWCPublic -> "PUBLIC"
SLWCSocial -> "SOCIAL"
SLWCSocial1P -> "SOCIAL_1P"
instance FromJSON ScoresListWindowCollection where
parseJSON = parseJSONText "ScoresListWindowCollection"
instance ToJSON ScoresListWindowCollection where
toJSON = toJSONText
-- | The time span for the scores and ranks you\'re requesting.
data ScoresListWindowTimeSpan
= AllTime
-- ^ @ALL_TIME@
-- List the all-time top scores.
| Daily
-- ^ @DAILY@
-- List the top scores for the current day.
| Weekly
-- ^ @WEEKLY@
-- List the top scores for the current week.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ScoresListWindowTimeSpan
instance FromHttpApiData ScoresListWindowTimeSpan where
parseQueryParam = \case
"ALL_TIME" -> Right AllTime
"DAILY" -> Right Daily
"WEEKLY" -> Right Weekly
x -> Left ("Unable to parse ScoresListWindowTimeSpan from: " <> x)
instance ToHttpApiData ScoresListWindowTimeSpan where
toQueryParam = \case
AllTime -> "ALL_TIME"
Daily -> "DAILY"
Weekly -> "WEEKLY"
instance FromJSON ScoresListWindowTimeSpan where
parseJSON = parseJSONText "ScoresListWindowTimeSpan"
instance ToJSON ScoresListWindowTimeSpan where
toJSON = toJSONText
-- | The time span for the scores and ranks you\'re requesting.
data ScoresGetTimeSpan
= SGTSAll
-- ^ @ALL@
-- Get the high scores for all time spans. If this is used, maxResults
-- values will be ignored.
| SGTSAllTime
-- ^ @ALL_TIME@
-- Get the all time high score.
| SGTSDaily
-- ^ @DAILY@
-- List the top scores for the current day.
| SGTSWeekly
-- ^ @WEEKLY@
-- List the top scores for the current week.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ScoresGetTimeSpan
instance FromHttpApiData ScoresGetTimeSpan where
parseQueryParam = \case
"ALL" -> Right SGTSAll
"ALL_TIME" -> Right SGTSAllTime
"DAILY" -> Right SGTSDaily
"WEEKLY" -> Right SGTSWeekly
x -> Left ("Unable to parse ScoresGetTimeSpan from: " <> x)
instance ToHttpApiData ScoresGetTimeSpan where
toQueryParam = \case
SGTSAll -> "ALL"
SGTSAllTime -> "ALL_TIME"
SGTSDaily -> "DAILY"
SGTSWeekly -> "WEEKLY"
instance FromJSON ScoresGetTimeSpan where
parseJSON = parseJSONText "ScoresGetTimeSpan"
instance ToJSON ScoresGetTimeSpan where
toJSON = toJSONText
-- | The types of ranks to return. If the parameter is omitted, no ranks will
-- be returned.
data ScoresGetIncludeRankType
= SGIRTAll
-- ^ @ALL@
-- Retrieve public and social ranks.
| SGIRTPublic
-- ^ @PUBLIC@
-- Retrieve public ranks, if the player is sharing their gameplay activity
-- publicly.
| SGIRTSocial
-- ^ @SOCIAL@
-- Retrieve the social rank.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ScoresGetIncludeRankType
instance FromHttpApiData ScoresGetIncludeRankType where
parseQueryParam = \case
"ALL" -> Right SGIRTAll
"PUBLIC" -> Right SGIRTPublic
"SOCIAL" -> Right SGIRTSocial
x -> Left ("Unable to parse ScoresGetIncludeRankType from: " <> x)
instance ToHttpApiData ScoresGetIncludeRankType where
toQueryParam = \case
SGIRTAll -> "ALL"
SGIRTPublic -> "PUBLIC"
SGIRTSocial -> "SOCIAL"
instance FromJSON ScoresGetIncludeRankType where
parseJSON = parseJSONText "ScoresGetIncludeRankType"
instance ToJSON ScoresGetIncludeRankType where
toJSON = toJSONText
-- | The time span for the scores and ranks you\'re requesting.
data ScoresListTimeSpan
= SLTSAllTime
-- ^ @ALL_TIME@
-- List the all-time top scores.
| SLTSDaily
-- ^ @DAILY@
-- List the top scores for the current day.
| SLTSWeekly
-- ^ @WEEKLY@
-- List the top scores for the current week.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable ScoresListTimeSpan
instance FromHttpApiData ScoresListTimeSpan where
parseQueryParam = \case
"ALL_TIME" -> Right SLTSAllTime
"DAILY" -> Right SLTSDaily
"WEEKLY" -> Right SLTSWeekly
x -> Left ("Unable to parse ScoresListTimeSpan from: " <> x)
instance ToHttpApiData ScoresListTimeSpan where
toQueryParam = \case
SLTSAllTime -> "ALL_TIME"
SLTSDaily -> "DAILY"
SLTSWeekly -> "WEEKLY"
instance FromJSON ScoresListTimeSpan where
parseJSON = parseJSONText "ScoresListTimeSpan"
instance ToJSON ScoresListTimeSpan where
toJSON = toJSONText
-- | The collection of categories for which data will be returned.
data MetagameListCategoriesByPlayerCollection
= All
-- ^ @all@
-- Retrieve data for all categories. This is the default.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable MetagameListCategoriesByPlayerCollection
instance FromHttpApiData MetagameListCategoriesByPlayerCollection where
parseQueryParam = \case
"all" -> Right All
x -> Left ("Unable to parse MetagameListCategoriesByPlayerCollection from: " <> x)
instance ToHttpApiData MetagameListCategoriesByPlayerCollection where
toQueryParam = \case
All -> "all"
instance FromJSON MetagameListCategoriesByPlayerCollection where
parseJSON = parseJSONText "MetagameListCategoriesByPlayerCollection"
instance ToJSON MetagameListCategoriesByPlayerCollection where
toJSON = toJSONText
-- | Tells the server to return only achievements with the specified state.
-- If this parameter isn\'t specified, all achievements are returned.
data AchievementsListState
= ALSAll
-- ^ @ALL@
-- List all achievements. This is the default.
| ALSHidden
-- ^ @HIDDEN@
-- List only hidden achievements.
| ALSRevealed
-- ^ @REVEALED@
-- List only revealed achievements.
| ALSUnlocked
-- ^ @UNLOCKED@
-- List only unlocked achievements.
deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic)
instance Hashable AchievementsListState
instance FromHttpApiData AchievementsListState where
parseQueryParam = \case
"ALL" -> Right ALSAll
"HIDDEN" -> Right ALSHidden
"REVEALED" -> Right ALSRevealed
"UNLOCKED" -> Right ALSUnlocked
x -> Left ("Unable to parse AchievementsListState from: " <> x)
instance ToHttpApiData AchievementsListState where
toQueryParam = \case
ALSAll -> "ALL"
ALSHidden -> "HIDDEN"
ALSRevealed -> "REVEALED"
ALSUnlocked -> "UNLOCKED"
instance FromJSON AchievementsListState where
parseJSON = parseJSONText "AchievementsListState"
instance ToJSON AchievementsListState where
toJSON = toJSONText
| rueshyna/gogol | gogol-games/gen/Network/Google/Games/Types/Sum.hs | mpl-2.0 | 11,267 | 0 | 11 | 2,622 | 1,814 | 968 | 846 | 212 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Partners.Companies.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets a company.
--
-- /See:/ <https://developers.google.com/partners/ Google Partners API Reference> for @partners.companies.get@.
module Network.Google.Resource.Partners.Companies.Get
(
-- * REST Resource
CompaniesGetResource
-- * Creating a Request
, companiesGet
, CompaniesGet
-- * Request Lenses
, cgXgafv
, cgCurrencyCode
, cgUploadProtocol
, cgCompanyId
, cgOrderBy
, cgAccessToken
, cgUploadType
, cgAddress
, cgRequestMetadataPartnersSessionId
, cgRequestMetadataLocale
, cgView
, cgRequestMetadataExperimentIds
, cgRequestMetadataUserOverridesIPAddress
, cgRequestMetadataTrafficSourceTrafficSubId
, cgRequestMetadataUserOverridesUserId
, cgRequestMetadataTrafficSourceTrafficSourceId
, cgCallback
) where
import Network.Google.Partners.Types
import Network.Google.Prelude
-- | A resource alias for @partners.companies.get@ method which the
-- 'CompaniesGet' request conforms to.
type CompaniesGetResource =
"v2" :>
"companies" :>
Capture "companyId" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "currencyCode" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "orderBy" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "address" Text :>
QueryParam "requestMetadata.partnersSessionId" Text
:>
QueryParam "requestMetadata.locale" Text :>
QueryParam "view" Text :>
QueryParams "requestMetadata.experimentIds" Text
:>
QueryParam
"requestMetadata.userOverrides.ipAddress"
Text
:>
QueryParam
"requestMetadata.trafficSource.trafficSubId"
Text
:>
QueryParam
"requestMetadata.userOverrides.userId"
Text
:>
QueryParam
"requestMetadata.trafficSource.trafficSourceId"
Text
:>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] GetCompanyResponse
-- | Gets a company.
--
-- /See:/ 'companiesGet' smart constructor.
data CompaniesGet =
CompaniesGet'
{ _cgXgafv :: !(Maybe Xgafv)
, _cgCurrencyCode :: !(Maybe Text)
, _cgUploadProtocol :: !(Maybe Text)
, _cgCompanyId :: !Text
, _cgOrderBy :: !(Maybe Text)
, _cgAccessToken :: !(Maybe Text)
, _cgUploadType :: !(Maybe Text)
, _cgAddress :: !(Maybe Text)
, _cgRequestMetadataPartnersSessionId :: !(Maybe Text)
, _cgRequestMetadataLocale :: !(Maybe Text)
, _cgView :: !(Maybe Text)
, _cgRequestMetadataExperimentIds :: !(Maybe [Text])
, _cgRequestMetadataUserOverridesIPAddress :: !(Maybe Text)
, _cgRequestMetadataTrafficSourceTrafficSubId :: !(Maybe Text)
, _cgRequestMetadataUserOverridesUserId :: !(Maybe Text)
, _cgRequestMetadataTrafficSourceTrafficSourceId :: !(Maybe Text)
, _cgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CompaniesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cgXgafv'
--
-- * 'cgCurrencyCode'
--
-- * 'cgUploadProtocol'
--
-- * 'cgCompanyId'
--
-- * 'cgOrderBy'
--
-- * 'cgAccessToken'
--
-- * 'cgUploadType'
--
-- * 'cgAddress'
--
-- * 'cgRequestMetadataPartnersSessionId'
--
-- * 'cgRequestMetadataLocale'
--
-- * 'cgView'
--
-- * 'cgRequestMetadataExperimentIds'
--
-- * 'cgRequestMetadataUserOverridesIPAddress'
--
-- * 'cgRequestMetadataTrafficSourceTrafficSubId'
--
-- * 'cgRequestMetadataUserOverridesUserId'
--
-- * 'cgRequestMetadataTrafficSourceTrafficSourceId'
--
-- * 'cgCallback'
companiesGet
:: Text -- ^ 'cgCompanyId'
-> CompaniesGet
companiesGet pCgCompanyId_ =
CompaniesGet'
{ _cgXgafv = Nothing
, _cgCurrencyCode = Nothing
, _cgUploadProtocol = Nothing
, _cgCompanyId = pCgCompanyId_
, _cgOrderBy = Nothing
, _cgAccessToken = Nothing
, _cgUploadType = Nothing
, _cgAddress = Nothing
, _cgRequestMetadataPartnersSessionId = Nothing
, _cgRequestMetadataLocale = Nothing
, _cgView = Nothing
, _cgRequestMetadataExperimentIds = Nothing
, _cgRequestMetadataUserOverridesIPAddress = Nothing
, _cgRequestMetadataTrafficSourceTrafficSubId = Nothing
, _cgRequestMetadataUserOverridesUserId = Nothing
, _cgRequestMetadataTrafficSourceTrafficSourceId = Nothing
, _cgCallback = Nothing
}
-- | V1 error format.
cgXgafv :: Lens' CompaniesGet (Maybe Xgafv)
cgXgafv = lens _cgXgafv (\ s a -> s{_cgXgafv = a})
-- | If the company\'s budget is in a different currency code than this one,
-- then the converted budget is converted to this currency code.
cgCurrencyCode :: Lens' CompaniesGet (Maybe Text)
cgCurrencyCode
= lens _cgCurrencyCode
(\ s a -> s{_cgCurrencyCode = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
cgUploadProtocol :: Lens' CompaniesGet (Maybe Text)
cgUploadProtocol
= lens _cgUploadProtocol
(\ s a -> s{_cgUploadProtocol = a})
-- | The ID of the company to retrieve.
cgCompanyId :: Lens' CompaniesGet Text
cgCompanyId
= lens _cgCompanyId (\ s a -> s{_cgCompanyId = a})
-- | How to order addresses within the returned company. Currently, only
-- \`address\` and \`address desc\` is supported which will sorted by
-- closest to farthest in distance from given address and farthest to
-- closest distance from given address respectively.
cgOrderBy :: Lens' CompaniesGet (Maybe Text)
cgOrderBy
= lens _cgOrderBy (\ s a -> s{_cgOrderBy = a})
-- | OAuth access token.
cgAccessToken :: Lens' CompaniesGet (Maybe Text)
cgAccessToken
= lens _cgAccessToken
(\ s a -> s{_cgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
cgUploadType :: Lens' CompaniesGet (Maybe Text)
cgUploadType
= lens _cgUploadType (\ s a -> s{_cgUploadType = a})
-- | The address to use for sorting the company\'s addresses by proximity. If
-- not given, the geo-located address of the request is used. Used when
-- order_by is set.
cgAddress :: Lens' CompaniesGet (Maybe Text)
cgAddress
= lens _cgAddress (\ s a -> s{_cgAddress = a})
-- | Google Partners session ID.
cgRequestMetadataPartnersSessionId :: Lens' CompaniesGet (Maybe Text)
cgRequestMetadataPartnersSessionId
= lens _cgRequestMetadataPartnersSessionId
(\ s a -> s{_cgRequestMetadataPartnersSessionId = a})
-- | Locale to use for the current request.
cgRequestMetadataLocale :: Lens' CompaniesGet (Maybe Text)
cgRequestMetadataLocale
= lens _cgRequestMetadataLocale
(\ s a -> s{_cgRequestMetadataLocale = a})
-- | The view of \`Company\` resource to be returned. This must not be
-- \`COMPANY_VIEW_UNSPECIFIED\`.
cgView :: Lens' CompaniesGet (Maybe Text)
cgView = lens _cgView (\ s a -> s{_cgView = a})
-- | Experiment IDs the current request belongs to.
cgRequestMetadataExperimentIds :: Lens' CompaniesGet [Text]
cgRequestMetadataExperimentIds
= lens _cgRequestMetadataExperimentIds
(\ s a -> s{_cgRequestMetadataExperimentIds = a})
. _Default
. _Coerce
-- | IP address to use instead of the user\'s geo-located IP address.
cgRequestMetadataUserOverridesIPAddress :: Lens' CompaniesGet (Maybe Text)
cgRequestMetadataUserOverridesIPAddress
= lens _cgRequestMetadataUserOverridesIPAddress
(\ s a ->
s{_cgRequestMetadataUserOverridesIPAddress = a})
-- | Second level identifier to indicate where the traffic comes from. An
-- identifier has multiple letters created by a team which redirected the
-- traffic to us.
cgRequestMetadataTrafficSourceTrafficSubId :: Lens' CompaniesGet (Maybe Text)
cgRequestMetadataTrafficSourceTrafficSubId
= lens _cgRequestMetadataTrafficSourceTrafficSubId
(\ s a ->
s{_cgRequestMetadataTrafficSourceTrafficSubId = a})
-- | Logged-in user ID to impersonate instead of the user\'s ID.
cgRequestMetadataUserOverridesUserId :: Lens' CompaniesGet (Maybe Text)
cgRequestMetadataUserOverridesUserId
= lens _cgRequestMetadataUserOverridesUserId
(\ s a ->
s{_cgRequestMetadataUserOverridesUserId = a})
-- | Identifier to indicate where the traffic comes from. An identifier has
-- multiple letters created by a team which redirected the traffic to us.
cgRequestMetadataTrafficSourceTrafficSourceId :: Lens' CompaniesGet (Maybe Text)
cgRequestMetadataTrafficSourceTrafficSourceId
= lens _cgRequestMetadataTrafficSourceTrafficSourceId
(\ s a ->
s{_cgRequestMetadataTrafficSourceTrafficSourceId =
a})
-- | JSONP
cgCallback :: Lens' CompaniesGet (Maybe Text)
cgCallback
= lens _cgCallback (\ s a -> s{_cgCallback = a})
instance GoogleRequest CompaniesGet where
type Rs CompaniesGet = GetCompanyResponse
type Scopes CompaniesGet = '[]
requestClient CompaniesGet'{..}
= go _cgCompanyId _cgXgafv _cgCurrencyCode
_cgUploadProtocol
_cgOrderBy
_cgAccessToken
_cgUploadType
_cgAddress
_cgRequestMetadataPartnersSessionId
_cgRequestMetadataLocale
_cgView
(_cgRequestMetadataExperimentIds ^. _Default)
_cgRequestMetadataUserOverridesIPAddress
_cgRequestMetadataTrafficSourceTrafficSubId
_cgRequestMetadataUserOverridesUserId
_cgRequestMetadataTrafficSourceTrafficSourceId
_cgCallback
(Just AltJSON)
partnersService
where go
= buildClient (Proxy :: Proxy CompaniesGetResource)
mempty
| brendanhay/gogol | gogol-partners/gen/Network/Google/Resource/Partners/Companies/Get.hs | mpl-2.0 | 11,234 | 0 | 27 | 2,984 | 1,602 | 920 | 682 | 238 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
-- |
-- Module : Network.Google.Types
-- Copyright : (c) 2015-2016 Brendan Hay <[email protected]>
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : provisional
-- Portability : non-portable (GHC extensions)
--
module Network.Google.Types where
import Control.Exception.Lens (exception)
import Control.Lens
import Control.Monad.Catch
import Control.Monad.Trans.Resource
import Data.Aeson
import Data.ByteString (ByteString)
import Data.Coerce
import Data.Conduit
import Data.Data
import Data.DList (DList)
import Data.Foldable (foldMap, foldl')
import Data.String
import Data.Semigroup
import Data.Text (Text)
import Data.Text.Lazy.Builder (Builder)
import GHC.Generics
import GHC.TypeLits
import Network.HTTP.Client (HttpException, RequestBody (..))
import Network.HTTP.Media hiding (Accept)
import Network.HTTP.Types hiding (Header)
import Servant.API hiding (Stream)
import qualified Data.ByteString.Char8 as BS8
import qualified Data.ByteString.Lazy as LBS
import qualified Data.CaseInsensitive as CI
import qualified Data.Conduit.Combinators as Conduit
import qualified Data.DList as DList
import qualified Data.Text.Encoding as Text
import qualified Data.Text.Lazy.Builder as Build
import qualified Network.HTTP.Types as HTTP
data AltJSON = AltJSON deriving (Eq, Ord, Show, Read, Generic, Typeable)
data AltMedia = AltMedia deriving (Eq, Ord, Show, Read, Generic, Typeable)
data Multipart = Multipart deriving (Eq, Ord, Show, Read, Generic, Typeable)
instance ToHttpApiData AltJSON where toQueryParam = const "json"
instance ToHttpApiData AltMedia where toQueryParam = const "media"
instance ToHttpApiData Multipart where toQueryParam = const "multipart"
-- | An OAuth2 scope.
newtype OAuthScope = OAuthScope Text
deriving
( Eq
, Ord
, Show
, Read
, IsString
, Generic
, Typeable
, FromHttpApiData
, ToHttpApiData
, FromJSON
, ToJSON
)
-- | An OAuth2 access token.
newtype AccessToken = AccessToken Text
deriving
( Eq
, Ord
, Show
, Read
, IsString
, Generic
, Typeable
, FromHttpApiData
, ToHttpApiData
, FromJSON
, ToJSON
)
-- | An OAuth2 refresh token.
newtype RefreshToken = RefreshToken Text
deriving
( Eq
, Ord
, Show
, Read
, IsString
, Generic
, Typeable
, FromHttpApiData
, ToHttpApiData
, FromJSON
, ToJSON
)
-- | A client identifier.
newtype ClientId = ClientId Text
deriving
( Eq
, Ord
, Show
, Read
, IsString
, Generic
, Typeable
, FromHttpApiData
, ToHttpApiData
, FromJSON
, ToJSON
)
-- | A service identifier.
newtype ServiceId = ServiceId Text
deriving
( Eq
, Ord
, Show
, Read
, IsString
, Generic
, Typeable
, FromHttpApiData
, ToHttpApiData
, FromJSON
, ToJSON
)
-- | An opaque client secret.
newtype GSecret = GSecret Text
deriving
( Eq
, Ord
, Read
, IsString
, Generic
, Typeable
, FromHttpApiData
, ToHttpApiData
, FromJSON
, ToJSON
)
instance Show GSecret where
show = const "*****"
newtype MediaDownload a = MediaDownload a
data MediaUpload a = MediaUpload a GBody
_Coerce :: (Coercible a b, Coercible b a) => Iso' a b
_Coerce = iso coerce coerce
-- | Invalid Iso, exists for ease of composition with the current 'Lens . Iso'
-- chaining to hide internal types from the user.
_Default :: Monoid a => Iso' (Maybe a) a
_Default = iso f Just
where
f (Just x) = x
f Nothing = mempty
type Stream = ConduitM () ByteString (ResourceT IO) ()
data Error
= TransportError HttpException
| SerializeError SerializeError
| ServiceError ServiceError
deriving (Show, Typeable)
instance Exception Error
data SerializeError = SerializeError'
{ _serializeId :: !ServiceId
, _serializeHeaders :: [HTTP.Header]
, _serializeStatus :: !Status
, _serializeMessage :: !String
, _serializeBody :: !(Maybe LBS.ByteString)
} deriving (Eq, Show, Typeable)
data ServiceError = ServiceError'
{ _serviceId :: !ServiceId
, _serviceStatus :: !Status
, _serviceHeaders :: ![HTTP.Header]
, _serviceBody :: !(Maybe LBS.ByteString)
} deriving (Eq, Show, Typeable)
class AsError a where
-- | A general Amazonka error.
_Error :: Prism' a Error
{-# MINIMAL _Error #-}
-- | An error occured while communicating over HTTP with a remote service.
_TransportError :: Prism' a HttpException
-- | A serialisation error occured when attempting to deserialise a response.
_SerializeError :: Prism' a SerializeError
-- | A service specific error returned by the remote service.
_ServiceError :: Prism' a ServiceError
_TransportError = _Error . _TransportError
_SerializeError = _Error . _SerializeError
_ServiceError = _Error . _ServiceError
instance AsError SomeException where
_Error = exception
instance AsError Error where
_Error = id
_TransportError = prism TransportError $ \case
TransportError e -> Right e
x -> Left x
_SerializeError = prism SerializeError $ \case
SerializeError e -> Right e
x -> Left x
_ServiceError = prism ServiceError $ \case
ServiceError e -> Right e
x -> Left x
data ServiceConfig = ServiceConfig
{ _svcId :: !ServiceId
, _svcHost :: !ByteString
, _svcPath :: !Builder
, _svcPort :: !Int
, _svcSecure :: !Bool
, _svcTimeout :: !(Maybe Seconds)
}
defaultService :: ServiceId -> ByteString -> ServiceConfig
defaultService i h = ServiceConfig
{ _svcId = i
, _svcHost = h
, _svcPath = mempty
, _svcPort = 443
, _svcSecure = True
, _svcTimeout = Just 70
}
-- | The remote host name, used for both the IP address to connect to and the
-- host request header.
serviceHost :: Lens' ServiceConfig ByteString
serviceHost = lens _svcHost (\s a -> s { _svcHost = a })
-- | The remote port to connect to.
--
-- Defaults to @443@.
servicePort :: Lens' ServiceConfig Int
servicePort = lens _svcPort (\s a -> s { _svcPort = a })
-- | A path prefix that is prepended to any sent HTTP request.
--
-- Defaults to @mempty@.
servicePath :: Lens' ServiceConfig Builder
servicePath = lens _svcPath (\s a -> s { _svcPath = a })
-- | Whether to use HTTPS/SSL.
--
-- Defaults to @True@.
serviceSecure :: Lens' ServiceConfig Bool
serviceSecure = lens _svcSecure (\s a -> s { _svcSecure = a })
-- | Number of seconds to wait for a response.
serviceTimeout :: Lens' ServiceConfig (Maybe Seconds)
serviceTimeout = lens _svcTimeout (\s a -> s { _svcTimeout = a })
-- | A single part of a (potentially multipart) request body.
--
-- /Note:/ The 'IsString' instance defaults to a @text/plain@ MIME type.
data GBody = GBody !MediaType !RequestBody
instance IsString GBody where
fromString = GBody ("text" // "plain") . fromString
-- | A lens into the 'MediaType' of a request 'Body'.
bodyContentType :: Lens' GBody MediaType
bodyContentType = lens (\(GBody m _) -> m) (\(GBody _ b) m -> GBody m b)
-- | An intermediary request builder.
data Request = Request
{ _rqPath :: !Builder
, _rqQuery :: !(DList (ByteString, Maybe ByteString))
, _rqHeaders :: !(DList (HeaderName, ByteString))
, _rqBody :: ![GBody]
}
instance Monoid Request where
mempty = Request mempty mempty mempty mempty
mappend = (<>)
instance Semigroup Request where
a <> b = Request
(_rqPath a <> "/" <> _rqPath b)
(_rqQuery a <> _rqQuery b)
(_rqHeaders a <> _rqHeaders b)
(_rqBody b <> _rqBody a)
appendPath :: Request -> Builder -> Request
appendPath rq x = rq { _rqPath = _rqPath rq <> "/" <> x }
appendPaths :: ToHttpApiData a => Request -> [a] -> Request
appendPaths rq = appendPath rq . foldMap (mappend "/" . buildText)
appendQuery :: Request -> ByteString -> Maybe Text -> Request
appendQuery rq k v = rq
{ _rqQuery = DList.snoc (_rqQuery rq) (k, Text.encodeUtf8 <$> v)
}
appendHeader :: Request -> HeaderName -> Maybe Text -> Request
appendHeader rq _ Nothing = rq
appendHeader rq k (Just v) = rq
{ _rqHeaders = DList.snoc (_rqHeaders rq) (k, Text.encodeUtf8 v)
}
setBody :: Request -> [GBody] -> Request
setBody rq bs = rq { _rqBody = bs }
-- | A materialised 'http-client' request and associated response parser.
data GClient a = GClient
{ _cliAccept :: !(Maybe MediaType)
, _cliMethod :: !Method
, _cliCheck :: !(Status -> Bool)
, _cliService :: !ServiceConfig
, _cliRequest :: !Request
, _cliResponse :: !(Stream -> ResourceT IO (Either (String, LBS.ByteString) a))
}
clientService :: Lens' (GClient a) ServiceConfig
clientService = lens _cliService (\s a -> s { _cliService = a })
mime :: FromStream c a
=> Proxy c
-> Method
-> [Int]
-> Request
-> ServiceConfig
-> GClient a
mime p = gClient (fromStream p) (Just (contentType p))
discard :: Method
-> [Int]
-> Request
-> ServiceConfig
-> GClient ()
discard = gClient (\b -> runConduit (b .| Conduit.sinkNull) >> pure (Right ())) Nothing
gClient :: (Stream -> ResourceT IO (Either (String, LBS.ByteString) a))
-> Maybe MediaType
-> Method
-> [Int]
-> Request
-> ServiceConfig
-> GClient a
gClient f cs m statuses rq s = GClient
{ _cliAccept = cs
, _cliMethod = m
, _cliCheck = \status -> fromEnum status `elem` statuses
, _cliService = s
, _cliRequest = rq
, _cliResponse = f
}
class Accept c => ToBody c a where
toBody :: Proxy c -> a -> GBody
instance ToBody OctetStream ByteString where
toBody p = GBody (contentType p) . RequestBodyBS
instance ToBody OctetStream LBS.ByteString where
toBody p = GBody (contentType p) . RequestBodyLBS
instance ToBody PlainText ByteString where
toBody p = GBody (contentType p) . RequestBodyBS
instance ToBody PlainText LBS.ByteString where
toBody p = GBody (contentType p) . RequestBodyLBS
instance ToJSON a => ToBody JSON a where
toBody p = GBody (contentType p) . RequestBodyLBS . encode
class Accept c => FromStream c a where
fromStream :: Proxy c
-> Stream
-> ResourceT IO (Either (String, LBS.ByteString) a)
instance FromStream OctetStream Stream where
fromStream Proxy = pure . Right
instance FromJSON a => FromStream JSON a where
fromStream Proxy s = do
bs <- sinkLBS s
case eitherDecode bs of
Left e -> pure $! Left (e, bs)
Right x -> pure $! Right x
class GoogleRequest a where
type Rs a :: *
type Scopes a :: [Symbol]
requestClient :: a -> GClient (Rs a)
class GoogleClient fn where
type Fn fn :: *
buildClient :: Proxy fn -> Request -> Fn fn
-- | Multiple path captures, with @[xs]@ forming @/<x1>/<x2>/<x2>/...@.
data Captures (s :: Symbol) a
deriving (Typeable)
-- | Form a Google style sub-resource, such as @/<capture>:<mode>@.
data CaptureMode (s :: Symbol) (m :: Symbol) a
deriving (Typeable)
data MultipartRelated (cs :: [*]) m
deriving (Typeable)
instance ( ToBody c m
, GoogleClient fn
) => GoogleClient (MultipartRelated (c ': cs) m :> fn) where
type Fn (MultipartRelated (c ': cs) m :> fn) = m -> GBody -> Fn fn
buildClient Proxy rq m b =
buildClient (Proxy :: Proxy fn) $
setBody rq [toBody (Proxy :: Proxy c) m, b]
instance GoogleClient fn => GoogleClient (AltMedia :> fn) where
type Fn (AltMedia :> fn) = GBody -> Fn fn
buildClient Proxy rq b =
buildClient (Proxy :: Proxy fn) $
setBody rq [b]
instance (KnownSymbol s, GoogleClient fn) => GoogleClient (s :> fn) where
type Fn (s :> fn) = Fn fn
buildClient Proxy rq = buildClient (Proxy :: Proxy fn) $
appendPath rq (buildSymbol (Proxy :: Proxy s))
instance (GoogleClient a, GoogleClient b) => GoogleClient (a :<|> b) where
type Fn (a :<|> b) = Fn a :<|> Fn b
buildClient Proxy rq =
buildClient (Proxy :: Proxy a) rq
:<|> buildClient (Proxy :: Proxy b) rq
instance ( KnownSymbol s
, ToHttpApiData a
, GoogleClient fn
) => GoogleClient (Capture s a :> fn) where
type Fn (Capture s a :> fn) = a -> Fn fn
buildClient Proxy rq = buildClient (Proxy :: Proxy fn)
. appendPath rq
. buildText
instance ( KnownSymbol s
, ToHttpApiData a
, GoogleClient fn
) => GoogleClient (Captures s a :> fn) where
type Fn (Captures s a :> fn) = [a] -> Fn fn
buildClient Proxy rq = buildClient (Proxy :: Proxy fn)
. appendPaths rq
instance ( KnownSymbol s
, KnownSymbol m
, ToHttpApiData a
, GoogleClient fn
) => GoogleClient (CaptureMode s m a :> fn) where
type Fn (CaptureMode s m a :> fn) = a -> Fn fn
buildClient Proxy rq x = buildClient (Proxy :: Proxy fn)
. appendPath rq
$ buildText x <> ":" <> buildSymbol (Proxy :: Proxy m)
instance ( KnownSymbol s
, ToHttpApiData a
, GoogleClient fn
) => GoogleClient (QueryParam s a :> fn) where
type Fn (QueryParam s a :> fn) = Maybe a -> Fn fn
buildClient Proxy rq mx = buildClient (Proxy :: Proxy fn) $
case mx of
Nothing -> rq
Just x -> appendQuery rq k v
where
k = byteSymbol (Proxy :: Proxy s)
v = Just (toQueryParam x)
instance ( KnownSymbol s
, ToHttpApiData a
, GoogleClient fn
) => GoogleClient (QueryParams s a :> fn) where
type Fn (QueryParams s a :> fn) = [a] -> Fn fn
buildClient Proxy rq = buildClient (Proxy :: Proxy fn) . foldl' go rq
where
go r = appendQuery r k . Just . toQueryParam
k = byteSymbol (Proxy :: Proxy s)
instance ( KnownSymbol s
, ToHttpApiData a
, GoogleClient fn
) => GoogleClient (Header s a :> fn) where
type Fn (Header s a :> fn) = Maybe a -> Fn fn
buildClient Proxy rq mx = buildClient (Proxy :: Proxy fn) $
case mx of
Nothing -> rq
Just x -> appendHeader rq (CI.mk k) v
where
k = byteSymbol (Proxy :: Proxy s)
v = Just (toQueryParam x)
instance ( ToBody c a
, GoogleClient fn
) => GoogleClient (ReqBody (c ': cs) a :> fn) where
type Fn (ReqBody (c ': cs) a :> fn) = a -> Fn fn
buildClient Proxy rq x =
buildClient (Proxy :: Proxy fn) $
setBody rq [toBody (Proxy :: Proxy c) x]
instance {-# OVERLAPPABLE #-}
FromStream c a => GoogleClient (Get (c ': cs) a) where
type Fn (Get (c ': cs) a) = ServiceConfig -> GClient a
buildClient Proxy = mime (Proxy :: Proxy c) methodGet [200, 203]
instance {-# OVERLAPPING #-}
GoogleClient (Get (c ': cs) ()) where
type Fn (Get (c ': cs) ()) = ServiceConfig -> GClient ()
buildClient Proxy = discard methodGet [204]
instance {-# OVERLAPPABLE #-}
(FromStream c a, cs' ~ (c ': cs)) => GoogleClient (Post cs' a) where
type Fn (Post cs' a) = ServiceConfig -> GClient a
buildClient Proxy = mime (Proxy :: Proxy c) methodPost [200, 201]
instance {-# OVERLAPPING #-}
GoogleClient (Post cs ()) where
type Fn (Post cs ()) = ServiceConfig -> GClient ()
buildClient Proxy = discard methodPost [204]
instance {-# OVERLAPPABLE #-}
FromStream c a => GoogleClient (Put (c ': cs) a) where
type Fn (Put (c ': cs) a) = ServiceConfig -> GClient a
buildClient Proxy = mime (Proxy :: Proxy c) methodPut [200, 201]
instance {-# OVERLAPPING #-}
GoogleClient (Put (c ': cs) ()) where
type Fn (Put (c ': cs) ()) = ServiceConfig -> GClient ()
buildClient Proxy = discard methodPut [204]
instance {-# OVERLAPPABLE #-}
FromStream c a => GoogleClient (Patch (c ': cs) a) where
type Fn (Patch (c ': cs) a) = ServiceConfig -> GClient a
buildClient Proxy = mime (Proxy :: Proxy c) methodPatch [200, 201]
instance {-# OVERLAPPING #-}
GoogleClient (Patch (c ': cs) ()) where
type Fn (Patch (c ': cs) ()) = ServiceConfig -> GClient ()
buildClient Proxy = discard methodPatch [204]
instance {-# OVERLAPPABLE #-}
FromStream c a => GoogleClient (Delete (c ': cs) a) where
type Fn (Delete (c ': cs) a) = ServiceConfig -> GClient a
buildClient Proxy = mime (Proxy :: Proxy c) methodDelete [200, 202]
instance {-# OVERLAPPING #-}
GoogleClient (Delete (c ': cs) ()) where
type Fn (Delete (c ': cs) ()) = ServiceConfig -> GClient ()
buildClient Proxy = discard methodDelete [204]
sinkLBS :: Stream -> ResourceT IO LBS.ByteString
sinkLBS = runConduit . (.| Conduit.sinkLazy)
buildText :: ToHttpApiData a => a -> Builder
buildText = Build.fromText . toQueryParam
buildSymbol :: forall n proxy. KnownSymbol n => proxy n -> Builder
buildSymbol = Build.fromString . symbolVal
byteSymbol :: forall n proxy. KnownSymbol n => proxy n -> ByteString
byteSymbol = BS8.pack . symbolVal
-- | An integral value representing seconds.
newtype Seconds = Seconds Int
deriving
( Eq
, Ord
, Read
, Show
, Enum
, Num
, Bounded
, Integral
, Real
, Data
, Typeable
, Generic
)
seconds :: Seconds -> Int
seconds (Seconds n)
| n < 0 = 0
| otherwise = n
microseconds :: Seconds -> Int
microseconds = (1000000 *) . seconds
newtype GFieldMask = GFieldMask Text
deriving
( Eq
, Ord
, Show
, Read
, IsString
, Generic
, Data
, Typeable
, FromHttpApiData
, ToHttpApiData
, FromJSON
, ToJSON
)
| brendanhay/gogol | core/src/Network/Google/Types.hs | mpl-2.0 | 19,033 | 0 | 16 | 5,477 | 5,674 | 3,014 | 2,660 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
func = do
let !forced = some
pure ()
| lspitzner/brittany | data/Test325.hs | agpl-3.0 | 71 | 0 | 10 | 17 | 26 | 11 | 15 | 4 | 1 |
{--------------------------------------------------------------------------------
A utility to view images
--------------------------------------------------------------------------------}
module Main where
import Char( toLower )
import Graphics.UI.WXCore
defaultWidth,defaultHeight :: Int
defaultWidth = 300
defaultHeight = 300
main
= run imageViewer
imageViewer
= do -- variable that holds the current bitmap
vbitmap <- varCreate Nothing
-- create file menu: we use standard Id's but could also use any other identifier, like 1 or 27.
fm <- menuCreate "" 0
menuAppend fm wxID_OPEN "&Open..\tCtrl+O" "Open image" False
menuAppend fm wxID_CLOSE "&Close\tCtrl+C" "Close image" False
menuAppendSeparator fm
menuAppend fm wxID_ABOUT "&About.." "About ImageViewer" False {- not checkable -}
menuAppend fm wxID_EXIT "&Quit\tCtrl+Q" "Quit the viewer" False
-- disable close
menuEnable fm wxID_CLOSE False
-- create menu bar
m <- menuBarCreate 0
menuBarAppend m fm "&File"
-- create top frame
f <- frameCreateTopFrame "Image Viewer"
windowSetClientSize f (sz defaultWidth defaultHeight)
-- coolness: set a custom icon
topLevelWindowSetIconFromFile f "../bitmaps/eye.ico"
-- put a scrolled window inside the frame to paint the image on
-- note that 'wxNO_FULL_REPAINT_ON_RESIZE' is needed to prevent flicker on resize.
s <- scrolledWindowCreate f idAny rectNull (wxHSCROLL + wxVSCROLL + wxNO_FULL_REPAINT_ON_RESIZE + wxCLIP_CHILDREN)
-- set paint event handler:
windowOnPaint s (onPaint vbitmap)
-- connect menu
frameSetMenuBar f m
evtHandlerOnMenuCommand f wxID_OPEN (onOpen f vbitmap fm s)
evtHandlerOnMenuCommand f wxID_CLOSE (onClose f vbitmap fm s)
evtHandlerOnMenuCommand f wxID_ABOUT (onAbout f)
evtHandlerOnMenuCommand f wxID_EXIT (onQuit f)
windowAddOnDelete f (close f vbitmap)
-- show it
windowShow f
windowRaise f
return ()
where
onAbout f
= infoDialog f "About 'Image Viewer'" "This is a wxHaskell demo"
onQuit f
= do windowClose f True {- force close -}
return ()
onOpen f vbitmap fm s
= do mbfname <- fileOpenDialog f False True "Open image" imageFiles "" ""
case mbfname of
Nothing
-> return ()
Just fname
-> do bm <- bitmapCreateFromFile fname -- can fail with exception
close f vbitmap
varSet vbitmap (Just bm)
menuEnable fm wxID_CLOSE True
-- and than reset the scrollbars and resize around the picture
w <- bitmapGetWidth bm
h <- bitmapGetHeight bm
oldsz <- windowGetClientSize f
let newsz = (sizeMin (sz w h) oldsz)
windowSetClientSize f newsz
scrolledWindowSetScrollbars s 1 1 w h 0 0 False
-- and repaint explicitly (to delete previous stuff)
view <- windowGetViewRect s
withClientDC s (\dc -> onPaint vbitmap dc view)
`catch` (\err -> return ())
where
imageFiles
= [("Image files",["*.bmp","*.jpg","*.gif","*.png"])
,("Portable Network Graphics (*.png)",["*.png"])
,("BMP files (*.bmp)",["*.bmp"])
,("JPG files (*.jpg)",["*.jpg"])
,("GIF files (*.gif)",["*.gif"])
]
onClose f vbitmap fm s
= do close f vbitmap
menuEnable fm wxID_CLOSE False
-- explicitly delete the old bitmap
withClientDC s dcClear
-- and than reset the scrollbars
scrolledWindowSetScrollbars s 1 1 0 0 0 0 False
close f vbitmap
= do mbBitmap <- varSwap vbitmap Nothing
case mbBitmap of
Nothing -> return ()
Just bm -> bitmapDelete bm
onPaint vbitmap dc viewArea
= do mbBitmap <- varGet vbitmap
case mbBitmap of
Nothing -> return ()
Just bm -> do dcDrawBitmap dc bm pointZero False {- use mask? -}
return () | thielema/wxhaskell | samples/wxcore/ImageViewer.hs | lgpl-2.1 | 4,344 | 0 | 21 | 1,443 | 919 | 438 | 481 | 80 | 4 |
import Data.List.Split
import Data.List
import System.IO
import System.Exit
import Debug.Trace
data Device = Device {idD:: String
, user_agent::String
, fall_back::String} deriving(Show)
data Group = Group {idG:: String}deriving(Show)
data Capability = Capability { name::String
, value:: String}deriving(Show)
setDevice ::[String] -> Device
setDevice [] = Device "" "" ""
setDevice all@(a:b:cs) = do
Device a b (head cs)
getIdDevice :: Device -> String
getIdDevice (Device did ua fb) = did
setGroup ::[String] -> Group
setGroup [] = Group ""
setGroup lista = do
Group (head lista)
getIdGroup :: Group -> String
getIdGroup (Group gid) = gid
setCapability ::[String] -> Capability
setCapability [] = Capability "" ""
setCapability all@(x:xs) = do
Capability x (head xs)
getNameCapability :: Capability -> String
getNameCapability (Capability name value) = name
deviceFunction :: [String]->[String]
deviceFunction [] = []
deviceFunction all@(x:xs) = do
if x=="id"
then [head xs]++deviceFunction xs
else if x == "user_agent"
then [head xs]++deviceFunction xs
else if x == "fall_back"
then [head xs]++deviceFunction xs
else deviceFunction xs
groupFunction :: [String]->[String]
groupFunction [] = []
groupFunction all@(x:xs) = do
if x=="id"
then [head xs]++groupFunction xs
else groupFunction xs
capabilityFunction :: [String]->[String]
capabilityFunction [] = []
capabilityFunction all@(x:xs) = do
if x=="name"
then [head xs]++capabilityFunction xs
else if x == "value"
then [head xs]++capabilityFunction xs
else capabilityFunction xs
main = do
putStrLn "\t\t\t**************************************"
putStrLn "\t\t\t ANALIZADOR SEMANTICO"
putStrLn "\t\t\t José Antonio Vélez Gómez"
putStrLn "\t\t\t Leonel Fernando Ramirez Gonzalez"
putStrLn "\t\t\t Kevin Guillermo Campuzano Castillo"
putStrLn "\t\t\t**************************************"
contentFile <- readFile "wurfl-2.3.xml"
file <- lineFile contentFile
removeLines file
lineFile :: String -> IO [String]
lineFile cadenaArchivo = return (lines cadenaArchivo)
menuPrincipal ::[String] ->String-> IO()
menuPrincipal [] _ = putStrLn "\t\t\tNo exixten Devices con esa Capability"
menuPrincipal list nameCapability= do
putStrLn "\n\t\t\t\tINGRESE LA OPCION"
putStrLn "\t\t1) Cuantos Devices tienen la Capability ingresada"
putStrLn "\t\t2) Cuales Devices tienen la Capability ingresada"
putStrLn "\t\t3) Desea SALIR"
putStr "\t\tIngrese su opcion: "
number <- getLine
if number == "1"
then putStrLn $ "Existen "++show(length list) ++" Device"
else if number == "2"
then putStrLn $ "\tLos Devices que tienen la Capability :"++show(nameCapability)++"\n\n"++show( list)
else if number == "3"
then exitSuccess
else menuPrincipal list nameCapability
removeLines :: [String] -> IO ()
removeLines [] = return ()
removeLines (x:xs) = do
if isInfixOf "<devices" x
then do
putStr "\nIngrese todas los Capability que desee consultar en el Device: "
nameCapability <- getLine
let listC = listCapability nameCapability
let number = length listC
let listNew = removeEmpty xs
let list = listDevice listNew (Device "" "" "") (Group "") listC number number
menuPrincipal list nameCapability
else
removeLines xs
suman ::Int->Int->Int
suman n m = n+m
listCapability :: String -> [String]
listCapability [] = []
listCapability cadena = removeEmpty( splitOneOf(",; \"") cadena)
removeEmpty ::[String]->[String]
removeEmpty [] = []
removeEmpty (x:xs)=do
if x==""
then []++removeEmpty xs
else [x]++removeEmpty xs
listDevice :: [String] -> Device -> Group -> [String]->Int ->Int -> [String]
listDevice [] _ _ _ _ _= []
listDevice (x:xs) device group capabilityUser@(c:cs) n1 n2= do
let lista = splitOneOf ("<>= \\\"\t\n") x
let lista1 = removeEmpty lista
if ( (head lista1) == "device" )
then do
let device = setDevice( deviceFunction( lista1 ) )
[]++listDevice xs device group capabilityUser n2 n2
else if ( (head lista1) == "group" )
then do
let group = setGroup( groupFunction( lista1 ) )
[]++listDevice xs device group capabilityUser n1 n2
else if ( (head lista1) == "capability" )
then do
let capability = setCapability( capabilityFunction( lista1 ) )
let nameCapability = getNameCapability(capability)
let idDevice = getIdDevice(device)
if elem nameCapability capabilityUser
then do
if(n1 <= 1 )
then [idDevice]++listDevice xs device group capabilityUser n2 n2
else []++listDevice xs device group capabilityUser (n1-1) n2
else []++listDevice xs device group capabilityUser n1 n2
else []++listDevice xs device group capabilityUser n1 n2 | josanvel/AnalizadorSintacticoXML | ProyectoXML/ProyectoXml.hs | unlicense | 4,984 | 75 | 15 | 1,131 | 1,746 | 889 | 857 | 130 | 6 |
module Main where
import Lib
main :: IO ()
main = do
hf <- rHasFoo
hfhb <- rHasFooHasBar
hfhbhx <- rHasFooHasBarHasX
hy <- rHasY
hy' <- rHasY'
hy'' <- rHasY''
print hf
print hfhb
print hfhbhx
print hy
print hy'
print hy''
| haroldcarr/learn-haskell-coq-ml-etc | haskell/topic/lens/hc-usage/app/Main.hs | unlicense | 263 | 0 | 7 | 83 | 100 | 43 | 57 | 16 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Openshift.V1.Role where
import GHC.Generics
import Data.Text
import Kubernetes.V1.ObjectMeta
import Openshift.V1.PolicyRule
import qualified Data.Aeson
-- |
data Role = Role
{ kind :: Maybe Text -- ^ Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
, apiVersion :: Maybe Text -- ^ APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources
, metadata :: Maybe ObjectMeta -- ^
, rules :: [PolicyRule] -- ^ all the rules for this role
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON Role
instance Data.Aeson.ToJSON Role
| minhdoboi/deprecated-openshift-haskell-api | openshift/lib/Openshift/V1/Role.hs | apache-2.0 | 1,156 | 0 | 9 | 175 | 125 | 77 | 48 | 19 | 0 |
{-# LANGUAGE DataKinds, EmptyDataDecls, TypeOperators, UndecidableInstances #-}
module HaskHOL.Lib.IndTypes.Context
( IndTypesType
, IndTypesThry
, IndTypesCtxt
, ctxtIndTypes
) where
import HaskHOL.Core
import HaskHOL.Deductive hiding (newDefinition)
import HaskHOL.Lib.Recursion
import HaskHOL.Lib.Pair
import HaskHOL.Lib.IndTypesPre.Context
import HaskHOL.Lib.IndTypes.Pre
import HaskHOL.Lib.IndTypes.Base
data IndTypesThry
type instance IndTypesThry == IndTypesThry = 'True
instance CtxtName IndTypesThry where
ctxtName _ = "IndTypesCtxt"
type instance PolyTheory IndTypesType b = IndTypesCtxt b
type family IndTypesCtxt a :: Constraint where
IndTypesCtxt a = (Typeable a, IndTypesPreCtxt a, IndTypesContext a ~ 'True)
type IndTypesType = ExtThry IndTypesThry IndTypesPreType
type family IndTypesContext a :: Bool where
IndTypesContext BaseThry = 'False
IndTypesContext (ExtThry a b) = IndTypesContext b || (a == IndTypesThry)
ctxtIndTypes :: TheoryPath IndTypesType
ctxtIndTypes = extendTheory ctxtIndTypesPre $(thisModule') $
do ctxt <- parseContext
let indDefFun = defineTypeRaw <=< (parseInductiveTypeSpecification ctxt)
(sindth, srecth) <- indDefFun [txt| sum = INL A | INR B |]
mapM_ (newRecursiveDefinition srecth)
[ ("OUTL", [txt| OUTL (INL x :A+B) = x |])
, ("OUTR", [txt| OUTR (INR y :A+B) = y |])
]
addIndDef ("sum", (2, sindth, srecth))
-- Stage3
(oindth, orecth) <- indDefFun "option = NONE | SOME A"
(lindth, lrecth) <- indDefFun "list = NIL | CONS A list"
mapM_ addIndDef [ ("option", (2, oindth, orecth))
, ("list", (2, lindth, lrecth))
]
void $ newDefinition
("ISO", [txt| ISO (f:A->B) (g:B->A) <=>
(!x. f(g x) = x) /\ (!y. g(f y) = y) |])
acid1 <- openLocalStateHOL (InductiveTypes mapEmpty)
updateHOL acid1 (PutInductiveTypes $ mapFromList
[ ("list = NIL | CONS A list", (lindth, lrecth))
, ("option = NONE | SOME A", (oindth, orecth))
, ("sum = INL A | INR B", (sindth, srecth))
])
closeAcidStateHOL acid1
boolth <- ruleTAUT [txt| (T <=> F) <=> F |]
acid2 <- openLocalStateHOL (DistinctnessStore [])
updateHOL acid2 (PutDistinctnessStore [("bool", boolth)])
closeAcidStateHOL acid2
mapM_ extendRectypeNet =<< liftM mapToAscList getIndDefs
extendBasicConvs
[ ("convMATCH_SEQPATTERN_TRIV",
("_MATCH x (_SEQPATTERN r s)", "HaskHOL.Lib.IndTypes"))
, ("convMATCH_SEQPATTERN_TRIV",
("_FUNCTION (_SEQPATTERN r s) x", "HaskHOL.Lib.IndTypes"))
, ("convMATCH_ONEPATTERN_TRIV",
([txt| _MATCH x (\y z. P y z) |], "HaskHOL.Lib.IndTypes"))
, ("convMATCH_ONEPATTERN_TRIV",
([txt|_FUNCTION (\y z. P y z) x|], "HaskHOL.Lib.IndTypes"))
]
| ecaustin/haskhol-math | src/HaskHOL/Lib/IndTypes/Context.hs | bsd-2-clause | 3,035 | 1 | 14 | 827 | 688 | 396 | 292 | -1 | -1 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
--------------------------------------------------------------------
-- |
-- Copyright : (c) Edward Kmett and Dan Doel 2012
-- License : BSD2
-- Maintainer: Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability: non-portable (DeriveDataTypeable)
--
-- This module provides the AST for Terms
--------------------------------------------------------------------
module Ermine.Syntax.Term
(
-- * Terms
Term(..)
, bindTerm
-- * Hard Terms
, HardTerm(..)
, Terminal(..)
-- * Bindings
, BodyBound(..)
, WhereBound(..)
, AsDecl(..)
, Bodies(..)
, HasBodies(..)
, Binding(..)
, HasBinding(..)
, BindingType(..)
, _Implicit
, _Explicit
, fullAnnotation
, Body(..)
, bodyPatterns
, bodyGuarded
, bodyWhere
, bodyDecls
) where
import Bound
import Bound.Scope
import Bound.Var
import Control.Lens
import Control.Applicative
import Control.Monad.Identity
import Data.Bifoldable
import qualified Data.Binary as Binary
import Data.Binary (Binary)
import Data.Bitraversable
import Data.Bytes.Get
import Data.Bytes.Put
import Data.Bytes.Serial
import Data.Foldable
import Data.Function (on)
import Data.IntMap hiding (map)
import Data.Map hiding (map)
import Data.Monoid
import Data.String
import qualified Data.Serialize as Serialize
import Data.Serialize (Serialize)
import Data.Tagged
import Data.Void
import Data.Word
import Ermine.Diagnostic
import Ermine.Syntax
import Ermine.Syntax.Global
import Ermine.Syntax.Kind hiding (Var)
import Ermine.Syntax.Pattern
import Ermine.Syntax.Literal
import Ermine.Syntax.Scope
import Ermine.Syntax.Type hiding (App, Loc, Var, Tuple)
import GHC.Generics
import Prelude.Extras
-- import Text.Trifecta.Diagnostic.Rendering.Prim
-- | Simple terms that can be compared with structural equality.
data HardTerm
= Lit Literal
| DataCon !Global (Type Void Void)
| Tuple !Word64 -- (,,)
| Hole -- ^ A placeholder that can take any type. Easy to 'Remember'.
deriving (Eq, Show, Generic)
-- | This class provides a prism to match against or inject a 'HardTerm'.
class Terminal t where
hardTerm :: Prism' t HardTerm
litTerm :: Literal -> t
litTerm = review hardTerm . Lit
hole :: t
hole = review hardTerm Hole
instance Terminal HardTerm where
hardTerm = id
-- | Indicate if a definition is explicitly bound with a type annotation or implicitly bound without.
data BindingType t
= Explicit t
| Implicit
deriving (Eq, Show, Functor, Foldable, Traversable, Generic)
-- | Bound variables in a declaration are rather complicated. One can refer
-- to any of the following:
-- 1. Definitions in the same declaration sequence
-- 2. Variables bound in a pattern
-- 3. Definitions in a where clause
-- the 'DeclBound' type captures these three cases in the respective constructors.
data BodyBound = BodyDecl Word64
| BodyPat PatternPath
| BodyWhere Word64
deriving (Eq,Ord,Show,Read,Generic)
data WhereBound = WhereDecl Word64
| WherePat PatternPath
deriving (Eq,Ord,Show,Read,Generic)
class AsDecl t where
_Decl :: Prism' t Word64
instance AsDecl BodyBound where
_Decl = prism BodyDecl $ \ xs -> case xs of
BodyDecl d -> Right d
x -> Left x
instance AsDecl WhereBound where
_Decl = prism WhereDecl $ \ xs -> case xs of
WhereDecl d -> Right d
x -> Left x
-- | A body is the right hand side of a definition. This isn't a term because it has to perform simultaneous
-- matches on multiple patterns with backtracking.
-- Each Body contains a list of where clause bindings to which the body and
-- guards can refer.
data Body t a = Body
{ _bodyPatterns :: [Pattern t]
, _bodyGuarded :: Guarded (Scope BodyBound (Term t) a)
, _bodyWhere :: [Binding t (Var WhereBound a)]
} deriving (Eq, Show, Functor, Foldable, Traversable)
bodyDecls :: Traversal' (Body t a) Word64
bodyDecls f (Body ps g bs) = Body ps <$> (traverse.traverseBound._Decl) f g <*> (traverse.traverse._B._Decl) f bs
instance Bifunctor Body where
bimap = bimapDefault
instance Bifoldable Body where
bifoldMap = bifoldMapDefault
instance Bitraversable Body where
bitraverse f g (Body ps ss wh) =
Body <$> traverse (traverse f) ps
<*> traverse (bitraverseScope f g) ss
<*> traverse (bitraverse f (traverse g)) wh
data Bodies t a = Bodies
{ _loc :: !Rendering
, _cases :: [Body t a]
} deriving (Functor, Foldable, Traversable)
instance (Eq t, Eq a) => Eq (Bodies t a) where
(==) = (==) `on` _cases
instance (Show t, Show a) => Show (Bodies t a) where
show (Bodies _ xs) =
"Binding { _loc = emptyRendering,\
\ _cases = " ++ show xs ++ "}"
instance Bifunctor Bodies where
bimap = bimapDefault
instance Bifoldable Bodies where
bifoldMap = bifoldMapDefault
instance Bitraversable Bodies where
bitraverse f g (Bodies l xs) = Bodies l <$> traverse (bitraverse f g) xs
-- | A Binding provides its source location as a rendering, knowledge of if it is explicit or implicitly bound
-- and a list of right hand side bindings.
data Binding t a = Binding
{ _bindingType :: !(BindingType t)
, _bindingBodies :: Bodies t a
} deriving (Show, Functor, Foldable, Traversable)
fullAnnotation :: Fold (Binding (Annot k t) a) (Type k t)
fullAnnotation = folding $ \case
Binding (Explicit (Annot [] [] sc)) _ ->
Just . instantiateKinds (imposs "kind") . instantiate (imposs "type") $ sc
_ -> Nothing
where imposs s = error $ "fullAnnotation: dangling " ++ s
instance (Eq t, Eq a) => Eq (Binding t a) where
Binding t bs == Binding t' bs' = t == t' && bs == bs'
instance Bifunctor Binding where
bimap = bimapDefault
instance Bifoldable Binding where
bifoldMap = bifoldMapDefault
instance Bitraversable Binding where
bitraverse f g (Binding t xs) = Binding <$> traverse f t <*> bitraverse f g xs
-- | Terms in the Ermine language.
data Term t a
= Var a
| App !(Term t a) !(Term t a)
| HardTerm !HardTerm
| Sig !(Term t a) t
| Lam [Pattern t] !(Scope PatternPath (Term t) a)
| Case !(Term t a) [Alt t (Term t) a]
| Let [Binding t a] !(Scope Word64 (Term t) a)
| Loc !Rendering !(Term t a) -- ^ informational link to the location the term came from
| Remember !Int !(Term t a) -- ^ Used to provide hole support.
deriving (Show, Functor, Foldable, Traversable)
instance IsString a => IsString (Term t a) where
fromString = Var . fromString
instance Variable (Term t) where
_Var = prism Var $ \t -> case t of
Var a -> Right a
_ -> Left t
{-# INLINE _Var #-}
instance App (Term t) where
_App = prism (uncurry App) $ \t -> case t of
App l r -> Right (l,r)
_ -> Left t
{-# INLINE _App #-}
instance (p ~ Tagged, f ~ Identity) => Tup p f (Term t a) where
_Tup = unto hither
where hither [x] = x
hither l = apps (HardTerm . Tuple . fromIntegral $ length l) l
instance Terminal (Term t a) where
hardTerm = prism HardTerm $ \t -> case t of
HardTerm a -> Right a
_ -> Left t
instance (Eq t, Eq a) => Eq (Term t a) where
Loc _ l == r = l == r
l == Loc _ r = l == r
Remember _ l == r = l == r -- ?
l == Remember _ r = l == r -- ?
Var a == Var b = a == b
Sig e t == Sig e' t' = e == e' && t == t'
Lam p b == Lam p' b' = p == p' && b == b'
HardTerm t == HardTerm t' = t == t'
Case b as == Case b' as' = b == b' && as == as'
App a b == App c d = a == c && b == d
Let bs e == Let bs' e' = bs == bs' && e == e' -- this is rather inflexible
_ == _ = False
instance Bifunctor Term where
bimap = bimapDefault
instance Bifoldable Term where
bifoldMap = bifoldMapDefault
instance Bitraversable Term where
bitraverse f g = tm where
tm (Var a) = Var <$> g a
tm (Sig e t) = Sig <$> tm e <*> f t
tm (Lam ps b) = Lam <$> traverse (traverse f) ps <*> bitraverseScope f g b
tm (HardTerm t) = pure (HardTerm t)
tm (App l r) = App <$> tm l <*> tm r
tm (Loc r b) = Loc r <$> tm b
tm (Remember i b) = Remember i <$> tm b
tm (Case b as) = Case <$> tm b <*> traverse (bitraverseAlt f g) as
tm (Let bs ss) = Let <$> traverse (bitraverse f g) bs <*> bitraverseScope f g ss
{-# INLINE bitraverse #-}
instance Eq t => Eq1 (Term t)
instance Show t => Show1 (Term t)
instance Eq2 Term
instance Show2 Term
-- | Perform simultaneous substitution on terms and type annotations.
bindTerm :: (t -> t') -> (a -> Term t' b) -> Term t a -> Term t' b
bindTerm _ g (Var a) = g a
bindTerm f g (App l r) = App (bindTerm f g l) (bindTerm f g r)
bindTerm f g (Sig e t) = Sig (bindTerm f g e) (f t)
bindTerm _ _ (HardTerm t) = HardTerm t
bindTerm f g (Lam ps (Scope b)) = Lam (fmap f <$> ps) (Scope (bimap f (fmap (bindTerm f g)) b))
bindTerm f g (Loc r b) = Loc r (bindTerm f g b)
bindTerm f g (Remember i b) = Remember i (bindTerm f g b)
bindTerm f g (Case b as) = Case (bindTerm f g b) (bindAlt f g <$> as)
bindTerm f g (Let bs (Scope b)) = Let (bindBinding f g <$> bs) (Scope (bimap f (fmap (bindTerm f g)) b))
bindBody :: (t -> t') -> (a -> Term t' b) -> Body t a -> Body t' b
bindBody f g (Body ps gs wh) =
let s (Scope b) = Scope $ bimap f (fmap $ bindTerm f g) b in
Body (fmap f <$> ps)
(s <$> gs)
(fmap (bindBinding f (unvar (pure . B) (fmap F . g))) wh)
bindBinding :: (t -> t') -> (a -> Term t' b) -> Binding t a -> Binding t' b
bindBinding f g (Binding bt (Bodies r bs)) = Binding (f <$> bt) $ Bodies r $ bindBody f g <$> bs
bindAlt :: (t -> t') -> (a -> Term t' b) -> Alt t (Term t) a -> Alt t' (Term t') b
bindAlt f g (Alt p gs) =
let s (Scope b) = Scope $ bimap f (fmap $ bindTerm f g) b in
Alt (fmap f p) (fmap s gs)
instance Applicative (Term t) where
pure = Var
(<*>) = ap
instance Monad (Term t) where
return = Var
m >>= g = bindTerm id g m
------------------------------------------------------------------------------
-- Variables
------------------------------------------------------------------------------
instance HasKindVars t t' k k' => HasKindVars (Term t a) (Term t' a) k k' where
kindVars f = bitraverse (kindVars f) pure
instance HasTypeVars t t' tv tv' => HasTypeVars (Term t a) (Term t' a) tv tv' where
typeVars f = bitraverse (typeVars f) pure
-- | Provides a traversal of term variables for variable->variable substitution or extracting free variables.
class HasTermVars s t a b | s -> a, t -> b, s b -> t, t a -> s where
termVars :: Traversal s t a b
instance HasTermVars (Term t a) (Term t b) a b where
termVars = traverse
instance HasTermVars s t a b => HasTermVars [s] [t] a b where
termVars = traverse.termVars
instance HasTermVars s t a b => HasTermVars (IntMap s) (IntMap t) a b where
termVars = traverse.termVars
instance HasTermVars s t a b => HasTermVars (Map k s) (Map k t) a b where
termVars = traverse.termVars
--------------------------------------------------------------------
-- Serialization
--------------------------------------------------------------------
instance Serial HardTerm
instance Binary HardTerm where put = serialize ; get = deserialize
instance Serialize HardTerm where put = serialize ; get = deserialize
instance Serial BodyBound
instance Binary BodyBound where put = serialize ; get = deserialize
instance Serialize BodyBound where put = serialize ; get = deserialize
instance Serial WhereBound
instance Binary WhereBound where put = serialize ; get = deserialize
instance Serialize WhereBound where put = serialize ; get = deserialize
instance Serial1 BindingType where
serializeWith f (Explicit a) = putWord8 0 >> f a
serializeWith _ Implicit = putWord8 1
deserializeWith m = getWord8 >>= \a -> case a of
0 -> Explicit <$> m
1 -> return Implicit
_ -> fail "BindingType.deserializeWith: unexpected case"
instance Serial t => Serial (BindingType t) where
serialize = serialize1 ; deserialize = deserialize1
instance Binary t => Binary (BindingType t) where
put = serializeWith Binary.put
get = deserializeWith Binary.get
instance Serialize t => Serialize (BindingType t) where
put = serializeWith Serialize.put
get = deserializeWith Serialize.get
instance Serial2 Bodies where
serializeWith2 pt pa (Bodies _ xs) =
serializeWith (serializeWith2 pt pa) xs
deserializeWith2 gt ga = Bodies <$> return mempty
<*> deserializeWith (deserializeWith2 gt ga)
instance Serial t => Serial1 (Bodies t) where
serializeWith = serializeWith2 serialize
deserializeWith = deserializeWith2 deserialize
instance (Serial t, Serial v) => Serial (Bodies t v) where
serialize = serialize1 ; deserialize = deserialize1
instance (Binary t, Binary v) => Binary (Bodies t v) where
put = serializeWith2 Binary.put Binary.put
get = deserializeWith2 Binary.get Binary.get
instance Serial2 Binding where
serializeWith2 pt pa (Binding bt body) =
serializeWith pt bt *> serializeWith2 pt pa body
deserializeWith2 gt ga = Binding <$> deserializeWith gt
<*> deserializeWith2 gt ga
instance Serial t => Serial1 (Binding t) where
serializeWith = serializeWith2 serialize
deserializeWith = deserializeWith2 deserialize
instance (Serial t, Serial v) => Serial (Binding t v) where
serialize = serialize1 ; deserialize = deserialize1
instance (Binary t, Binary v) => Binary (Binding t v) where
put = serializeWith2 Binary.put Binary.put
get = deserializeWith2 Binary.get Binary.get
instance Serial2 Body where
serializeWith2 pt pa (Body pats g bs) =
serializeWith (serializeWith pt) pats *>
serializeWith (serializeScope3 serialize (serializeWith2 pt) pa) g *>
serializeWith (serializeWith2 pt (serializeWith pa)) bs
deserializeWith2 gt ga =
Body <$> deserializeWith (deserializeWith gt)
<*> deserializeWith (deserializeScope3 deserialize (deserializeWith2 gt) ga)
<*> deserializeWith (deserializeWith2 gt (deserializeWith ga))
instance Serial t => Serial1 (Body t) where
serializeWith = serializeWith2 serialize
deserializeWith = deserializeWith2 deserialize
instance (Serial t, Serial v) => Serial (Body t v) where
serialize = serialize1 ; deserialize = deserialize1
instance (Binary t, Binary a) => Binary (Body t a) where
put = serializeWith2 Binary.put Binary.put
get = deserializeWith2 Binary.get Binary.get
instance (Serialize t, Serialize a) => Serialize (Body t a) where
put = serializeWith2 Serialize.put Serialize.put
get = deserializeWith2 Serialize.get Serialize.get
instance Serial2 Term where
serializeWith2 pt pa = go
where
go (Var a) = putWord8 0 *> pa a
go (App t1 t2) = putWord8 1 *> go t1 *> go t2
go (HardTerm h) = putWord8 2 *> serialize h
go (Sig t1 t) = putWord8 3 *> go t1 *> pt t
go (Lam ps s) =
putWord8 4 *> serializeWith (serializeWith pt) ps
*> serializeScope3 serialize (serializeWith2 pt) pa s
go (Case t alts) =
putWord8 5 *> go t *> serializeWith (serializeAlt3 pt (serializeWith2 pt) pa) alts
go (Let bs s) =
putWord8 6 *> serializeWith (serializeWith2 pt pa) bs
*> serializeScope3 serialize (serializeWith2 pt) pa s
go (Loc _ t) = putWord8 7 *> go t
go (Remember i t) = putWord8 8 *> serialize i *> go t
{-# INLINE serializeWith2 #-}
deserializeWith2 gt ga = go
where
go = getWord8 >>= \b -> case b of
0 -> Var <$> ga
1 -> App <$> go <*> go
2 -> HardTerm <$> deserialize
3 -> Sig <$> go <*> gt
4 -> Lam <$> deserializeWith (deserializeWith gt)
<*> deserializeScope3 deserialize (deserializeWith2 gt) ga
5 -> Case <$> go <*> deserializeWith (deserializeAlt3 gt (deserializeWith2 gt) ga)
6 -> Let <$> deserializeWith (deserializeWith2 gt ga)
<*> deserializeScope3 deserialize (deserializeWith2 gt) ga
7 -> Loc <$> return mempty <*> go
8 -> Remember <$> deserialize <*> go
_ -> fail $ "getTerm: Unexpected constructor code: " ++ show b
{-# INLINE deserializeWith2 #-}
instance Serial t => Serial1 (Term t) where
serializeWith = serializeWith2 serialize
deserializeWith = deserializeWith2 deserialize
instance (Serial t, Serial v) => Serial (Term t v) where
serialize = serialize1
deserialize = deserialize1
instance (Binary t, Binary v) => Binary (Term t v) where
put = serializeWith2 Binary.put Binary.put
get = deserializeWith2 Binary.get Binary.get
instance (Serialize t, Serialize v) => Serialize (Term t v) where
put = serializeWith2 Serialize.put Serialize.put
get = deserializeWith2 Serialize.get Serialize.get
makeClassy ''Bodies
makeClassy ''Binding
makePrisms ''BindingType
makeLenses ''Body
instance HasBodies (Binding t a) t a where
bodies = bindingBodies
| PipocaQuemada/ermine | src/Ermine/Syntax/Term.hs | bsd-2-clause | 17,352 | 0 | 18 | 3,954 | 6,042 | 3,088 | 2,954 | 410 | 2 |
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Test.HUnit
import Test.QuickCheck
import TypeLevel.NaturalNumber
import Data.Eq.Approximate
instance Arbitrary value => Arbitrary (AbsolutelyApproximateValue tolerance value) where
arbitrary = fmap AbsolutelyApproximateValue arbitrary
instance Arbitrary value => Arbitrary (RelativelyApproximateValue zerotol reltol value) where
arbitrary = fmap RelativelyApproximateValue arbitrary
type A = AbsolutelyApproximateValue (Digits N5) Double
wrapA :: Double -> A
wrapA = AbsolutelyApproximateValue
unwrapA :: A -> Double
unwrapA = unwrapAbsolutelyApproximateValue
type R = RelativelyApproximateValue (Digits N7) (Digits N5) Double
wrapR :: Double -> R
wrapR = RelativelyApproximateValue
unwrapR :: R -> Double
unwrapR = unwrapRelativelyApproximateValue
main = defaultMain
[testGroup "Absolutely approximate values"
[testGroup "Num operations"
[testProperty "+" $ \a b -> wrapA (unwrapA a + unwrapA b) == a + b
,testProperty "-" $ \a b -> wrapA (unwrapA a - unwrapA b) == a - b
,testProperty "*" $ \a b -> wrapA (unwrapA a * unwrapA b) == a * b
]
,testGroup "Eq operations"
[testProperty "Inside range" $ \a -> a == a + wrapA 1e-6
,testProperty "Outside range" $ \a -> a /= a + wrapA 1e-4
]
]
,testGroup "Relatively approximate values"
[testGroup "Num operations"
[testProperty "+" $ \a b -> wrapR (unwrapR a + unwrapR b) == a + b
,testProperty "-" $ \a b -> wrapR (unwrapR a - unwrapR b) == a - b
,testProperty "*" $ \a b -> wrapR (unwrapR a * unwrapR b) == a * b
]
,testGroup "Eq operations"
[testGroup "Non-zero"
[testProperty "Inside range" $ \a -> a /= 0 ==> a == a + (a * wrapR 1e-6)
,testProperty "Outside range" $ \a -> a /= 0 ==> a /= a + (a * wrapR 1e-4)
]
,testGroup "Zero"
[testCase "Inside range" $
assertBool "Is the value equal to zero within the tolerance?" $
0 == wrapR 1e-8 && wrapR 1e-8 == 0
,testCase "Outside range" $
assertBool "Is the value not equal to zero within the tolerance?" $
0 /= wrapR 1e-6 && wrapR 1e-6 /= 0
,testCase "Both inside range" $
assertBool "Is the value equal to zero within the tolerance?" $
wrapR 1e-20 == wrapR 1e-8 && wrapR 1e-8 == wrapR 1e-20
]
]
]
]
| gcross/approximate-equality | test.hs | bsd-2-clause | 2,792 | 0 | 18 | 868 | 775 | 392 | 383 | -1 | -1 |
{- Miserere mei Deus
secundum magnam
misericordiam tuam
-}
{-# LANGUAGE GADTs,ScopedTypeVariables #-}
{-| Implements the native Promela target. -}
module Language.GTL.Target.Promela
(verifyModel) where
import Language.GTL.Model
import Language.GTL.Expression as GTL
import Language.Promela.Syntax as Pr
import Language.GTL.Buchi
import Language.GTL.Types
import Language.GTL.Target.Common
import Language.GTL.ErrorRefiner
import Language.GTL.Restriction
import Control.Monad.Identity
import Data.Set as Set hiding (foldl)
import Data.Map as Map hiding (foldl)
import Data.List (elemIndex,genericLength,genericIndex)
import Data.Foldable
import Prelude hiding (foldl,concat,foldl1,mapM)
import Data.Maybe
import Data.Int
import Misc.ProgramOptions as Opts
import Misc.VerificationEnvironment
-- | Do a complete verification of a given GTL file
verifyModel :: Opts.Options -- ^ Options
-> String -- ^ Name of the GTL file without extension
-> GTLSpec String -- ^ The GTL file contents
-> IO ()
verifyModel opts name spec = do
let pr = translateSpec spec
model = buildTargetModel spec
traceFiles <- runVerification opts name pr
parseTraces opts name traceFiles (traceToAtoms model)
-- | Given a list of transitions, give a list of atoms that have to hold for each transition.
traceToAtoms :: TargetModel -- ^ The program to work on
-> [(String,Integer,Integer)] -- ^ The transitions, given in the form (model,state,transition-number)
-> Trace
traceToAtoms model trace = fmap transitionToAtoms trace
where
transitionToAtoms :: (String,Integer,Integer) -> [TypedExpr (String, String)]
transitionToAtoms (mdl,st,t) =
let stateMachine = tprocAutomaton $ (tmodelProcs model)!mdl
trans = (baTransitions stateMachine)!st
(ats,_) = trans `genericIndex` t
in tcOriginal ats
translateTarget :: Bool -> TargetModel -> [Pr.Module]
translateTarget use_ltl tm = var_decls ++ procs ++ init ++ verify
where
allP = Map.keys (tmodelProcs tm)
var_decls = [ Pr.Decl $ Pr.Declaration Nothing (convertType tp) [(varString mdl var idx l,Nothing,case inits of
Nothing -> Nothing
Just dset -> Just $ translateConstant tp (unfix $ head $ Set.toList dset)
)]
| ((mdl,var,idx),lvl,tp,inits) <- tmodelVars tm,
l <- [0..lvl]
] ++
(if Set.null clocks
then []
else [Pr.Decl $ Pr.Declaration Nothing TypeInt [ ("timer"++show clk,Nothing,Just $ Pr.ConstExpr $ Pr.ConstInt (-1)) | clk <- Set.toList clocks ]]) ++
[ Pr.Decl $ Pr.Declaration Nothing TypeInt [ ("_count_"++mdl,Nothing,Nothing) | mdl <- allP ]
, Pr.Decl $ Pr.Declaration (Just False) TypeInt [ ("_minimum",Nothing,Nothing) ]
]
procs = [ Pr.ProcType { proctypeActive = Nothing
, proctypeName = pname
, proctypeArguments = []
, proctypePriority = Nothing
, proctypeProvided = Nothing
, proctypeSteps = fmap Pr.toStep $
[ prIf [ [ translateTransition allP (Set.toList clocks) pname cycle_time ist n trg cond ]
| ist <- Set.toList $ baInits buchi,
((cond,trg),n) <- zip ((baTransitions buchi)!ist) [0..]
]
] ++
[ Pr.StmtLabel ("st"++show st) $ if Prelude.null trans
then Pr.StmtExpr $ ExprAny $ Pr.ConstExpr $ ConstBool False
else prIf [ [ translateTransition allP (Set.toList clocks) pname cycle_time st n trg cond ]
| ((cond,trg),n) <- zip trans [0..]
]
| (st,trans) <- Map.toList (baTransitions buchi)
]
}
| (pname,TargetProc buchi cycle_time) <- Map.toList $ tmodelProcs tm ]
init = [Pr.Init Nothing
[Pr.toStep $ prAtomic $ [Pr.StmtSkip] ++
{-concat [ case def of
Nothing -> [] -- XXX
Just (Fix p) -> outputTAssign [(tvar,lvl)] (translateConstant tp p)
| (tvar,lvl,tp,def) <- tmodelInits tm
] ++-}
[ Pr.StmtRun iname []
| iname <- Map.keys (tmodelProcs tm)
]]
]
clocks = Set.fromList $ automatonClocks id ltl_aut
ltl_aut = getVerifyAutomaton tm
verify = if use_ltl
then [Pr.LTL Nothing (translateVerify (tmodelVerify tm))]
else [translateVerifyAutomaton ltl_aut]
translateTransition :: [String] -> [Integer] -> String -> Integer -> Integer -> Integer -> Integer -> TransitionConditions -> Pr.Statement
translateTransition (y:ys) clks pname cy st n trg cond
= prAtomic $ [Pr.StmtExpr $ Pr.ExprAny $ (case translateTExprs (tcAtoms cond) of
(Nothing,[]) -> cond0
(Just r,[]) -> BinExpr Pr.BinAnd cond0 r
) ]++
(catMaybes [ translateTRestr tvars restr
| (tvars,restr) <- tcOutputs cond ])++
[Pr.StmtPrintf ("TRANSITION "++pname++" "++show st++" "++show n++"\n") []
,prDStep ([ StmtAssign (VarRef ("_count_"++pname) Nothing Nothing) (BinExpr Pr.BinPlus (RefExpr (VarRef ("_count_"++pname) Nothing Nothing)) (ConstExpr (ConstInt cy)))
, StmtAssign (VarRef "_minimum" Nothing Nothing) (RefExpr (VarRef ("_count_"++y) Nothing Nothing))
] ++
[ prIf [ [ StmtExpr $ ExprAny $ BinExpr Pr.BinLT (RefExpr (VarRef ("_count_"++v) Nothing Nothing)) (RefExpr (VarRef "_minimum" Nothing Nothing))
, StmtAssign (VarRef "_minimum" Nothing Nothing) (RefExpr (VarRef ("_count_"++v) Nothing Nothing))
]
, [ StmtElse ]
]
| v <- ys ] ++
[ StmtAssign (VarRef ("_count_"++v) Nothing Nothing) (BinExpr Pr.BinMinus (RefExpr (VarRef ("_count_"++v) Nothing Nothing)) (RefExpr (VarRef "_minimum" Nothing Nothing)))
| v <- y:ys
] ++
[ prIf [ [ StmtExpr $ ExprAny $ BinExpr Pr.BinGTE (RefExpr clk_var) (RefExpr (VarRef "_minimum" Nothing Nothing)),
StmtAssign clk_var (BinExpr Pr.BinMinus (RefExpr clk_var) (RefExpr (VarRef "_minimum" Nothing Nothing)))
],
[ StmtElse,
StmtAssign clk_var (ConstExpr (ConstInt (-1)))
]
]
| clk <- clks,
let clk_var = VarRef ("timer"++show clk) Nothing Nothing ]
)
,Pr.StmtGoto ("st"++show trg)]
where
cond0 = BinExpr Pr.BinEquals (RefExpr (VarRef ("_count_"++pname) Nothing Nothing)) (ConstExpr (ConstInt 0))
translateVerify :: TypedExpr TargetVar -> LTLExpr
translateVerify e = case getValue $ unfix e of
BinBoolExpr op lhs rhs -> LTLBin (case op of
And -> LTLAnd
Or -> LTLOr
Implies -> LTLImplication
Until NoTime -> LTLUntil
UntilOp NoTime -> LTLUntilOp) (translateVerify lhs) (translateVerify rhs)
UnBoolExpr op ne -> LTLUn (case op of
Not -> LTLNot
Always -> LTLAlways
Next NoTime -> LTLNext
Finally NoTime -> LTLEventually) (translateVerify ne)
_ -> let (Just re,[]) = translateTExpr e
in LTLNormalExpr re
translateVerifyAutomaton :: BA [TypedExpr TargetVar] Integer -> Pr.Module
translateVerifyAutomaton buchi = prNever $
[prIf [ buildTrans mcond resets trg
| ist <- Set.toList $ baInits buchi,
(cond,trg) <- (baTransitions buchi)!ist,
let (mcond,resets) = translateTExprs cond
]
] ++
[ (if Set.member st (baFinals buchi)
then Pr.StmtLabel ("accept"++show st)
else id) $
Pr.StmtLabel ("st"++show st) $
prIf [ buildTrans mcond resets trg
| (cond,trg) <- trans,
let (mcond,resets) = translateTExprs cond
]
| (st,trans) <- Map.toList $ baTransitions buchi ]
where
buildTrans mcond resets trg = [ prAtomic $ (case mcond of
Nothing -> []
Just rcond -> [Pr.StmtExpr $ Pr.ExprAny rcond]) ++
[ Pr.StmtAssign (Pr.VarRef ("timer"++show clk) Nothing Nothing) (Pr.ConstExpr $ Pr.ConstInt val)
| (clk,val) <- resets ] ++
[Pr.StmtGoto ("st"++show trg)]
]
translateTExprs :: [TypedExpr TargetVar] -> (Maybe Pr.AnyExpression,[(Integer,Integer)])
translateTExprs [] = (Nothing,[])
translateTExprs xs = foldl1 (\(ce,cc) (ce',cc') -> (case ce of
Nothing -> ce'
Just rce -> case ce' of
Nothing -> Just rce
Just rce' -> Just $ Pr.BinExpr Pr.BinAnd rce rce',cc++cc')) (fmap translateTExpr xs)
translateConstant :: GTLType -> GTLValue r -> Pr.AnyExpression
translateConstant _ (GTLIntVal x) = Pr.ConstExpr $ Pr.ConstInt x
translateConstant _ (GTLByteVal x) = Pr.ConstExpr $ Pr.ConstInt (fromIntegral x)
translateConstant _ (GTLBoolVal x) = Pr.ConstExpr $ Pr.ConstBool x
translateConstant (Fix (GTLEnum xs)) (GTLEnumVal x)
= let Just i = elemIndex x xs
in Pr.ConstExpr $ Pr.ConstInt $ fromIntegral i
translateTExpr :: TypedExpr TargetVar -> (Maybe Pr.AnyExpression,[(Integer,Integer)])
translateTExpr e = case getValue $ unfix e of
Var (mdl,var,i) lvl _ -> (Just $ Pr.RefExpr (varName mdl var i lvl),[])
Value val -> (Just $ translateConstant (getType $ unfix e) val,[])
BinBoolExpr op lhs rhs -> let (l,cl) = translateTExpr lhs
(r,cr) = translateTExpr rhs
in (case op of
And -> case l of
Nothing -> case r of
Nothing -> Nothing
Just rr -> Just rr
Just rl -> case r of
Nothing -> Just rl
Just rr -> Just $ Pr.BinExpr Pr.BinAnd rl rr
Or -> case l of
Nothing -> Nothing
Just rl -> case r of
Nothing -> Nothing
Just rr -> Just $ Pr.BinExpr Pr.BinOr rl rr
Implies -> case l of
Nothing -> r
Just rl -> case r of
Nothing -> Nothing
Just rr -> Just $ Pr.BinExpr Pr.BinOr (Pr.UnExpr Pr.UnLNot rl) rr,cl++cr)
BinRelExpr op lhs rhs -> let (Just l,[]) = translateTExpr lhs
(Just r,[]) = translateTExpr rhs
in (Just $ Pr.BinExpr (case op of
GTL.BinLT -> Pr.BinLT
GTL.BinLTEq -> Pr.BinLTE
GTL.BinGT -> Pr.BinGT
GTL.BinGTEq -> Pr.BinGTE
GTL.BinEq -> Pr.BinEquals
GTL.BinNEq -> Pr.BinNotEquals) l r,[])
BinIntExpr op lhs rhs -> let (Just l,[]) = translateTExpr lhs
(Just r,[]) = translateTExpr rhs
in (Just $ Pr.BinExpr (case op of
OpPlus -> Pr.BinPlus
OpMinus -> Pr.BinMinus
OpMult -> Pr.BinMult
OpDiv -> Pr.BinDiv) l r,[])
UnBoolExpr op ne -> let (e,c) = translateTExpr ne
in case e of
Nothing -> (Nothing,c)
Just re -> (Just $ Pr.UnExpr (case op of
Not -> Pr.UnLNot) re,c)
ClockReset clk val -> (Nothing,[(clk,val)])
ClockRef clk -> (Just $ Pr.BinExpr Pr.BinGTE (Pr.RefExpr $ Pr.VarRef ("timer"++show clk) Nothing Nothing) (Pr.ConstExpr (Pr.ConstInt 0)),[])
-- | Assigns variables including changing their respective history.
outputTAssign :: [(TargetVar,Integer)] -> Pr.AnyExpression -> [Pr.Statement]
outputTAssign [] _ = []
outputTAssign (((inst,var,idx),lvl):rest) expr
= (assign inst var idx lvl expr) ++
(outputTAssign rest (Pr.RefExpr (varName inst var idx 0)))
-- | Does only do assignments to variables at time 0.
outputTAssignNow :: [(TargetVar,Integer)] -> Pr.AnyExpression -> [Pr.Statement]
outputTAssignNow [] _ = []
outputTAssignNow (((inst,var,idx),lvl):rest) expr
= (assignNow inst var idx lvl expr) ++
(outputTAssign rest (Pr.RefExpr (varName inst var idx 0)))
translateTRestr :: [(TargetVar,Integer)] -> Restriction TargetVar -> Maybe Pr.Statement
translateTRestr tvars restr
= let checkNEquals to = case unequals restr of
[] -> Nothing
xs -> Just $ foldl1 (Pr.BinExpr Pr.BinAnd) (fmap (\x -> let (Just p,[]) = translateTExpr x
in Pr.BinExpr Pr.BinNotEquals to p) xs)
checkEquals to = case equals restr of
[] -> Nothing
xs -> Just $ foldl1 (Pr.BinExpr Pr.BinAnd) (fmap (\x -> let (Just p,[]) = translateTExpr x
in Pr.BinExpr Pr.BinEquals to p) xs)
checkAllowed to = case allowedValues restr of
Nothing -> Nothing
Just s -> Just $ if Set.null s
then Pr.ConstExpr $ Pr.ConstBool False
else foldl1 (Pr.BinExpr Pr.BinOr) (fmap (\i -> Pr.BinExpr Pr.BinEquals to (translateConstant (restrictionType restr) i)
) (Set.toList s)
)
checkNAllowed to = if Set.null (forbiddenValues restr)
then Nothing
else Just $ foldl1 (Pr.BinExpr Pr.BinAnd) (fmap (\i -> Pr.BinExpr Pr.BinNotEquals to
(translateConstant (restrictionType restr) i)
) (Set.toList $ forbiddenValues restr))
checkUppers to = case upperLimits restr of
[] -> Nothing
_ -> Just $ foldl1 (Pr.BinExpr Pr.BinAnd) (fmap (\(incl,expr) -> let (Just p,[]) = translateTExpr expr
in Pr.BinExpr (if incl
then Pr.BinLTE
else Pr.BinLT) to p)
(upperLimits restr))
checkLowers to = case lowerLimits restr of
[] -> Nothing
_ -> Just $ foldl1 (Pr.BinExpr Pr.BinAnd) (fmap (\(incl,expr) -> let (Just p,[]) = translateTExpr expr
in Pr.BinExpr (if incl
then Pr.BinGTE
else Pr.BinGT) to p)
(lowerLimits restr))
build f = foldl (\cur el -> case el of
Nothing -> cur
Just rel -> case cur of
Nothing -> Just rel
Just rcur -> Just (f rel rcur)) Nothing
in case equals restr of
[] -> case allowedValues restr of
Just r -> let rr = Set.difference r (forbiddenValues restr)
check v = build (Pr.BinExpr Pr.BinAnd) (fmap (\f -> f (translateConstant (restrictionType restr) v)) [checkNEquals,checkUppers,checkLowers])
in case catMaybes [ case ((case check v of
Nothing -> []
Just chk -> [ Pr.StmtExpr $ Pr.ExprAny chk ])++
(outputTAssign tvars (translateConstant (restrictionType restr) v))) of
[] -> Nothing
p -> Just p
| v <- Set.toList rr ] of
[] -> Nothing
p -> Just $ prIf p
Nothing -> case buildTGenerator (restrictionType restr)
(fmap (\(t,e) -> let (Just p,[]) = translateTExpr e in (t,p)) $ upperLimits restr)
(fmap (\(t,e) -> let (Just p,[]) = translateTExpr e in (t,p)) $ lowerLimits restr)
(\v -> build (Pr.BinExpr Pr.BinAnd) (fmap (\f -> f v) [checkNEquals,checkNAllowed])) tvars of
[] -> Nothing
[x] -> Just x
xs -> Just $ prSequence xs
_ -> case catMaybes [ case ((case build (Pr.BinExpr Pr.BinAnd) (fmap (\f -> f tv) [checkAllowed,checkNEquals,checkNAllowed,checkUppers,checkLowers]) of
Nothing -> []
Just chk -> [Pr.StmtExpr $ Pr.ExprAny chk])++
(outputTAssign tvars tv)) of
[] -> Nothing
p -> Just p
| v <- equals restr
, let (Just tv,[]) = translateTExpr v ] of
[] -> Nothing
[[p]] -> Just p
p -> Just $ prIf p
buildTGenerator :: GTLType -> [(Bool,Pr.AnyExpression)] -> [(Bool,Pr.AnyExpression)] -> (Pr.AnyExpression -> Maybe Pr.AnyExpression) -> [(TargetVar,Integer)] -> [Pr.Statement]
buildTGenerator tp upper lower check to
= let rupper e = case upper of
[] -> Pr.BinExpr Pr.BinLT e (Pr.ConstExpr $ Pr.ConstInt (case baseType tp of
Fix (GTLEnum xs) -> (genericLength xs)-1
Fix GTLInt -> fromIntegral (maxBound::Int32)
Fix GTLBool -> 1
))
_ -> foldl1 (Pr.BinExpr Pr.BinAnd) $
fmap (\(inc,expr) -> Pr.BinExpr Pr.BinLT e (if inc
then expr
else Pr.BinExpr Pr.BinMinus expr (Pr.ConstExpr $ Pr.ConstInt 1))
) upper
rlower = fmap (\(inc,expr) -> if inc
then expr
else Pr.BinExpr Pr.BinPlus expr (Pr.ConstExpr $ Pr.ConstInt 1)) lower
in case to of
[] -> []
((inst,var,idx),lvl):fs
-> let trg = Pr.RefExpr (varName inst var idx 0)
in [minimumAssignment (Pr.ConstExpr $ Pr.ConstInt (case baseType tp of
Fix (GTLEnum _) -> 0
Fix GTLInt -> fromIntegral (minBound::Int32)
Fix GTLBool -> 0
)
)
(\x -> case assign inst var idx lvl x of
[stp] -> stp
stps -> prSequence stps)
rlower]++
[prDo $ [[Pr.StmtExpr $ Pr.ExprAny $ rupper trg]++
(outputTAssignNow to (Pr.BinExpr Pr.BinPlus trg (Pr.ConstExpr $ Pr.ConstInt 1)))
]++(case check trg of
Nothing -> [[Pr.StmtBreak]]
Just rcheck -> [[Pr.StmtExpr $ Pr.ExprAny rcheck
,Pr.StmtBreak]
,[Pr.StmtElse,Pr.StmtSkip]
])
]
translateSpec :: GTLSpec String -> [Pr.Module]
translateSpec spec = translateTarget False (buildTargetModel spec)
convertType :: GTLType -> Pr.Typename
convertType (Fix GTLInt) = Pr.TypeInt
convertType (Fix GTLBool) = Pr.TypeBool
convertType (Fix (GTLEnum _)) = Pr.TypeInt
convertType (Fix (GTLNamed _ tp)) = convertType tp
convertType tp = error $ "Promela target can't use type "++show tp++" yet."
varName :: String -> String -> [Integer] -> Integer -> Pr.VarRef
varName mdl var idx lvl = VarRef (varString mdl var idx lvl) Nothing Nothing
varString :: String -> String -> [Integer] -> Integer -> String
varString mdl var idx lvl = mdl ++ "_" ++ var ++ concat [ "_"++show i | i <- idx] ++ "_"++show lvl
-- | Does an assignemnt to a variable including updating its history.
assign :: String -> String -> [Integer] -> Integer -> Pr.AnyExpression -> [Pr.Statement]
assign mdl var idx lvl expr
= foldl (\stmts cl -> Pr.StmtAssign (varName mdl var idx cl) (if cl==0
then expr
else RefExpr (varName mdl var idx (cl-1))):stmts)
[]
[0..lvl]
-- | Does only do an assignment for the actual moment
assignNow :: String -> String -> [Integer] -> Integer -> Pr.AnyExpression -> [Pr.Statement]
assignNow mdl var idx lvl expr
= [Pr.StmtAssign (varName mdl var idx 0) expr]
minimumAssignment :: Pr.AnyExpression -> (Pr.AnyExpression -> Pr.Statement) -> [Pr.AnyExpression] -> Pr.Statement
minimumAssignment def f [] = f def
minimumAssignment _ f (x:xs) = minimumAssignment' x xs
where
minimumAssignment' x [] = f x
minimumAssignment' x (y:ys) = prIf [ [Pr.StmtExpr $ Pr.ExprAny $ Pr.BinExpr Pr.BinLT x y
,minimumAssignment' x ys
]
, [Pr.StmtElse
,minimumAssignment' y ys
]
]
| hguenther/gtl | lib/Language/GTL/Target/Promela.hs | bsd-3-clause | 24,836 | 0 | 28 | 11,442 | 6,923 | 3,566 | 3,357 | 348 | 26 |
{-# LANGUAGE OverloadedStrings #-}
-- | Visualize (x,y,z) co-ordinates as a 3D scatter plot.
module Web.Lightning.Plots.Scatter3
(
Scatter3Plot(..)
, Visualization (..)
, scatter3Plot
)
where
--------------------------------------------------------------------------------
import Control.Monad.Reader
import Data.Aeson
import Data.Default.Class
import qualified Web.Lightning.Routes as R
import Web.Lightning.Types.Lightning
import Web.Lightning.Types.Visualization (Visualization (..))
import Web.Lightning.Utilities
--------------------------------------------------------------------------------
-- | Scatter Plot 3D parameters
data Scatter3Plot =
Scatter3Plot { sptX :: [Double]
-- ^ List of x points.
, sptY :: [Double]
-- ^ List of y points.
, sptZ :: [Double]
-- ^ List of z points.
, sptColors :: Maybe [Int]
-- ^ List of rgb values to set colors.
, sptGroups :: Maybe [Int]
-- ^ List to set colors via groups.
, sptSize :: Maybe [Int]
-- ^ List to set point sizes.
, sptAlpha :: Maybe [Double]
-- ^ List of alpha values to set file and stroke opacity.
}
deriving (Show, Eq)
instance Default Scatter3Plot where
def = Scatter3Plot [] [] [] Nothing Nothing Nothing Nothing
instance ToJSON Scatter3Plot where
toJSON (Scatter3Plot xs ys zs cs gs ss as) =
omitNulls [ "points" .= getPoints3 xs ys zs
, "color" .= cs
, "group" .= gs
, "size" .= ss
, "alpha" .= as
]
instance ValidatablePlot Scatter3Plot where
validatePlot (Scatter3Plot xs ys zs c g s a) = do
(xs', ys', zs') <- validateCoordinates3 xs ys zs
c' <- validateColor c
s' <- validateSize s
a' <- validateAlpha a
return $ Scatter3Plot xs' ys' zs' c' g s' a'
-- | Submits a request to the specified lightning-viz server to create
-- a 3D scatter plot.
--
-- <http://lightning-viz.org/visualizations/scatter-3/ Scatter Visualization>
scatter3Plot :: Monad m => Scatter3Plot
-- ^ Scatter plot to create
-> LightningT m Visualization
-- ^ Transformer stack with created visualization.
scatter3Plot scatter3Plt = do
url <- ask
viz <- sendPlot "scatter-3" scatter3Plt R.plot
return $ viz { vizBaseUrl = Just url }
| cmoresid/lightning-haskell | src/Web/Lightning/Plots/Scatter3.hs | bsd-3-clause | 2,617 | 0 | 10 | 857 | 487 | 274 | 213 | 44 | 1 |
module Add (add) where
import System.Hclip (setClipboard)
import System.Random
import Util
import VaultData
getNewPassword :: IO String
getNewPassword = do
newPass <- genNewPassword defaultLen <$> newStdGen
flushStr $ "Password (default: " ++ newPass ++ " ):"
password <- getLine
if null password
then do
setClipboard newPass
putStrLn $ newPass ++ "\t\t - copied to clipboard"
return newPass
else return password
where
defaultLen = 20
getEntryInfo :: Vault -> IO (Maybe VaultEntry)
getEntryInfo vault = do
putStr "Enter "
masterPass <- getMasterPassword
if verifyMasterPass masterPass vault
then do
flushStr "Service: "
service <- getLine
flushStr "Username: "
username <- getLine
password <- getNewPassword
flushStr "Tags (separate by whitespace): "
tagsStr <- getLine
putStrLn "Note: (control-d to finish)"
note <- getContents
passIV <- genNewSalt <$> newStdGen
noteIV <- genNewSalt <$> newStdGen
let tags = words tagsStr
passwordB16 = encryptToB16 masterPass password passIV
noteB16 = encryptToB16 masterPass note noteIV
passIVB16 = encodeToStrB16 passIV
noteIVB16 = encodeToStrB16 noteIV
return $
Just $
VaultEntry
service
username
passwordB16
passIVB16
noteB16
noteIVB16
tags
else do
putStrLn "Incorrect password!"
return Nothing
addEntry :: Maybe VaultEntry -> Vault -> Vault
addEntry (Just entry) (Vault s m es) = Vault s m $ es ++ [entry]
addEntry Nothing v = v
add :: [String] -> IO ()
add [fileName] = do
jsonStr <- readFile fileName
let v = parseVault jsonStr
newEntry <- getEntryInfo v
updateFile fileName $ prettyPrintedStr $ addEntry newEntry v
| oahziur/yige-pass | src/Add.hs | bsd-3-clause | 2,100 | 0 | 13 | 773 | 499 | 233 | 266 | 61 | 2 |
{-# OPTIONS_GHC -fglasgow-exts #-}
{--
Copyright (c) 2006, Peng Li
2006, Stephan A. Zdancewic
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the copyright owners nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--}
module Network.TCP.Type.Base where
import Data.Time.Clock.POSIX (POSIXTime,getPOSIXTime)
import Foreign
import Foreign.C
import System.IO.Unsafe
import Control.Exception
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
to_Int x = (fromIntegral x)::Int
to_Int8 x = (fromIntegral x)::Int8
to_Int16 x = (fromIntegral x)::Int16
to_Int32 x = (fromIntegral x)::Int32
to_Int64 x = (fromIntegral x)::Int64
to_Word x = (fromIntegral x)::Word
to_Word8 x = (fromIntegral x)::Word8
to_Word16 x = (fromIntegral x)::Word16
to_Word32 x = (fromIntegral x)::Word32
to_Word64 x = (fromIntegral x)::Word64
{-# INLINE to_Int #-}
{-# INLINE to_Int8 #-}
{-# INLINE to_Int16 #-}
{-# INLINE to_Int32 #-}
{-# INLINE to_Int64 #-}
{-# INLINE to_Word #-}
{-# INLINE to_Word8 #-}
{-# INLINE to_Word16 #-}
{-# INLINE to_Word32 #-}
{-# INLINE to_Word64 #-}
-- Port numbers, IP addresses
type Port = Word16
newtype IPAddr = IPAddr Word32 deriving (Eq,Ord)
newtype TCPAddr = TCPAddr (IPAddr, Port) deriving (Eq,Ord)
newtype SocketID = SocketID (Port, TCPAddr) deriving (Eq,Ord,Show)
instance Show IPAddr where
show (IPAddr w) = (show $ w .&. 255) ++ "." ++
(show $ (w `shiftR` 8) .&. 255) ++ "." ++
(show $ (w `shiftR` 16) .&. 255) ++ "." ++
(show $ (w `shiftR` 24) .&. 255)
instance Show TCPAddr where
show (TCPAddr (ip,pt)) = (show ip) ++ ":" ++ (show pt)
get_ip :: TCPAddr -> IPAddr
get_ip (TCPAddr (i,p)) = i
get_port :: TCPAddr -> Port
get_port (TCPAddr (i,p)) = p
get_remote_addr :: SocketID -> TCPAddr
get_remote_addr (SocketID (p,a)) = a
get_local_port :: SocketID -> Port
get_local_port (SocketID (p,a)) = p
{-# INLINE get_ip #-}
{-# INLINE get_port #-}
{-# INLINE get_remote_addr #-}
{-# INLINE get_local_port #-}
-- TCP Sequence numbers
class (Eq a) => Seq32 a where
seq_val :: a -> Word32
seq_lt :: a -> a -> Bool
seq_leq :: a -> a -> Bool
seq_gt :: a -> a -> Bool
seq_geq :: a -> a -> Bool
seq_plus :: (Integral n) => a -> n -> a
seq_minus :: (Integral n) => a -> n -> a
seq_diff :: (Integral n) => a -> a -> n
instance Seq32 Word32 where
seq_val w = w
seq_lt x y = (to_Int32 (x-y)) < 0
seq_leq x y = (to_Int32 (x-y)) <= 0
seq_gt x y = (to_Int32 (x-y)) > 0
seq_geq x y = (to_Int32 (x-y)) >= 0
seq_plus s i = assert (i>=0) $ s + (to_Word32 i)
seq_minus s i = assert (i>=0) $ s - (to_Word32 i)
seq_diff s t = let res=fromIntegral $ to_Int32 (s-t) in assert (res>=0) res
{-# INLINE seq_val #-}
{-# INLINE seq_lt #-}
{-# INLINE seq_leq #-}
{-# INLINE seq_gt #-}
{-# INLINE seq_geq #-}
{-# INLINE seq_plus #-}
{-# INLINE seq_minus #-}
{-# INLINE seq_diff #-}
newtype SeqLocal = SeqLocal Word32 deriving (Eq,Show,Seq32)
newtype SeqForeign = SeqForeign Word32 deriving (Eq,Show,Seq32)
newtype Timestamp = Timestamp Word32 deriving (Eq,Show,Seq32)
instance Ord SeqLocal where
(<) = seq_lt
(>) = seq_gt
(<=) = seq_leq
(>=) = seq_geq
{-# INLINE (<) #-}
{-# INLINE (>) #-}
{-# INLINE (<=) #-}
{-# INLINE (>=) #-}
instance Ord SeqForeign where
(<) = seq_lt
(>) = seq_gt
(<=) = seq_leq
(>=) = seq_geq
{-# INLINE (<) #-}
{-# INLINE (>) #-}
{-# INLINE (<=) #-}
{-# INLINE (>=) #-}
instance Ord Timestamp where
(<) = seq_lt
(>) = seq_gt
(<=) = seq_leq
(>=) = seq_geq
{-# INLINE (<) #-}
{-# INLINE (>) #-}
{-# INLINE (<=) #-}
{-# INLINE (>=) #-}
seq_flip_ltof (SeqLocal w) = SeqForeign w
seq_flip_ftol (SeqForeign w) = SeqLocal w
fseq_val :: SeqForeign -> Word32
fseq_val (SeqForeign w32) = w32
{-# INLINE seq_flip_ltof #-}
{-# INLINE seq_flip_ftol #-}
-- | Clock time, in microseconds.
type Time = Int64
seconds_to_time :: RealFrac a => a -> Time
seconds_to_time f = round (f * 1000*1000)
{-# INLINE seconds_to_time #-}
get_current_time :: IO Time
get_current_time = posixtime_to_time `fmap` getPOSIXTime
posixtime_to_time :: POSIXTime -> Time
posixtime_to_time = seconds_to_time . toRational
---------------------------------------------------------------------------
type Buffer = S.ByteString
buf_len :: Buffer -> Int
buf_len = S.length
buffer_ok :: Buffer -> Bool
buffer_ok _ = True
buffer_empty :: Buffer
buffer_empty = S.empty
buffer_to_string :: Buffer -> IO String
buffer_to_string = return . map (toEnum . fromEnum) . S.unpack
string_to_buffer :: String -> IO Buffer
string_to_buffer = return . S.pack . map (toEnum . fromEnum)
buffer_split :: Int -> Buffer -> (Buffer,Buffer)
buffer_split = S.splitAt
buffer_take = S.take
buffer_drop = S.drop
buffer_merge :: Buffer -> Buffer -> [Buffer]
buffer_merge bs1 bs2
| S.length bs1 == 0 = [bs2]
| S.length bs2 == 0 = [bs1]
| otherwise = [bs1,bs2]
type BufferChain = L.ByteString
bufc_length :: BufferChain -> Int
bufc_length = fromIntegral . L.length
bufferchain_empty = L.empty
bufferchain_singleton b
| S.null b = L.empty
| otherwise = L.fromChunks [b]
bufferchain_add bs bc = bufferchain_singleton bs `L.append` bc
bufferchain_get :: BufferChain -> Int -> BufferChain
bufferchain_get bc ix = L.take 1 (L.drop (fromIntegral ix) bc)
bufferchain_append bc bs = bc `L.append` bufferchain_singleton bs
bufferchain_concat :: BufferChain -> BufferChain -> BufferChain
bufferchain_concat = L.append
bufferchain_head :: BufferChain -> Buffer
bufferchain_head = head . L.toChunks
bufferchain_tail :: BufferChain -> BufferChain
bufferchain_tail = L.fromChunks . tail . L.toChunks
bufferchain_take :: Int -> BufferChain -> BufferChain
bufferchain_take = L.take . fromIntegral
bufferchain_drop :: Int -> BufferChain -> BufferChain
bufferchain_drop = L.drop . fromIntegral
bufferchain_split_at :: Int -> BufferChain -> (BufferChain,BufferChain)
bufferchain_split_at = L.splitAt . fromIntegral
bufferchain_collapse :: BufferChain -> IO Buffer
bufferchain_collapse = return . S.concat . L.toChunks
-- bufferchain_output bc@(BufferChain lst len) (ptr::Ptr CChar) =
-- copybuf ptr lst
-- where copybuf ptrDest [] = return ()
-- copybuf ptrDest (x:xs) =
-- withForeignPtr (buf_ptr x)
-- (\ptrSrc -> do
-- copyArray ptrDest (ptrSrc `plusPtr` (buf_offset x)) (buf_len x)
-- copybuf (ptrDest `plusPtr` (buf_len x)) xs
-- )
bufferchain_ok :: BufferChain -> Bool
bufferchain_ok _ = True
| Tener/HaNS | src/Network/TCP/Type/Base.hs | bsd-3-clause | 7,999 | 0 | 15 | 1,623 | 1,938 | 1,082 | 856 | 169 | 1 |
module Control.Concurrent.Latch (
Latch,
newCountDownLatch,
onLatch,
awaitLatch,
) where
import Control.Concurrent.STM
import Control.Monad
data Latch = CountDownLatch (TVar Int)
-- | A latch that will will allow the waiting thread(s) to continue as soon as onLatch
-- has been called (at least) n times.
newCountDownLatch :: Integral n => n -> STM Latch
newCountDownLatch = liftM CountDownLatch . newTVar . fromIntegral
-- | Signal that the latch has been reached by a thread. Removes one 'token'.
onLatch :: Latch -> STM ()
onLatch (CountDownLatch v) = readTVar v >>= next
where next n | n > 0 = writeTVar v (n-1)
| otherwise = return ()
-- | Wait until the onLatch has been called for the specified times.
awaitLatch (CountDownLatch v) = readTVar v >>= ck
where ck 0 = return ()
ck _ = retry
| msiegenthaler/haskell-xbee | Control/Concurrent/Latch.hs | bsd-3-clause | 857 | 0 | 10 | 199 | 212 | 110 | 102 | 17 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Josh.Bootstrap (bootstrap) where
import Prelude hiding (lookup)
import Data.Configurator
import Data.Configurator.Types
import System.Directory
import System.Process
import System.Exit
import qualified Data.Text as T
import System.FilePath
import Control.Monad
import System.Environment.Executable (getExecutablePath)
import Data.List
import Data.Maybe
import System.Posix.Env (setEnv)
sh :: String -> IO ()
sh cmd = do
ec <- system cmd
if ec == ExitSuccess then return ()
else error $ "Failed: " ++ cmd
compileBaseLib :: FilePath -> FilePath -> FilePath -> Value -> IO ()
compileBaseLib base_dir jcab jreg (String lib') = do
b <- doesDirectoryExist dir
unless b $ sh $ "cd " ++ base_dir ++ " && git clone " ++ lib
fls <- getDirectoryContents dir
if isJust $ find (isSuffixOf ".conf") fls
then return ()
else compile $ elem "ghc-prim.cabal" fls
where lib = drop_git_suffix $ T.unpack lib'
dir = base_dir ++ "/" ++ takeFileName lib
cd = "cd " ++ dir ++ " && "
compile b = do when b $ sh $ cd ++ "cabal configure"
sh $ cd ++ jcab ++ " configure && " ++ jcab ++ " build"
sh $ cd ++ jreg
drop_git_suffix l = if isSuffixOf ".git" l then reverse $ drop 4 $ reverse l else l
compileBaseLib _ _ _ e = error $ "Bad value: " ++ show e
writeSH :: FilePath -> String -> IO FilePath
writeSH fp ln = do
writeFile fp $ unlines $ [ "#!/bin/sh", ln ]
p <- getPermissions fp
setPermissions fp (p { executable = True })
canonicalizePath fp
cmmBuilds :: String -> String -> String -> [String] -> [String]
cmmBuilds ghc ghc_build lib_dir ss = (map go ss) ++ [cat] where
go f = ghc ++ " -I" ++ ghc_build ++ "/rts -c -O2 "
++ ghc_build ++ "/rts/" ++ f ++ ".cmm -o " ++ o f
cat = "cat " ++ (intercalate " " $ map o ss) ++ " > " ++ lib_dir ++ "/rts/libHSrts.a"
o f = lib_dir ++ "/rts/" ++ (takeFileName f) ++ ".o"
bootstrap :: FilePath -> IO ()
bootstrap fp = do
config <- load [ Required fp ]
lib_dir' <- require config "lib_dir"
createDirectoryIfMissing True $ lib_dir' ++ "/package.conf.d"
lib_dir <- canonicalizePath lib_dir'
let pkg_dir = lib_dir ++ "/package.conf.d"
let bin_dir = lib_dir ++ "/bin"
ghc_build <- require config "ghc_build_dir"
let ghc = ghc_build ++ "/inplace/bin/ghc-stage2 -fjavascript"
let ghc_pkg = ghc_build ++ "/inplace/lib/bin/ghc-pkg"
createDirectoryIfMissing True $ lib_dir ++ "/rts"
writeFile (lib_dir ++ "/rts/rts.conf") $ unlines [
"name: rts"
, "version: 1.0"
, "id: builtin_rts"
, "license: BSD3"
, "exposed: True"
, "library-dirs: " ++ lib_dir ++ "/rts"
, "hs-libraries: HSrts"
, "include-dirs: " ++ ghc_build ++ "/rts/dist/build"
, " " ++ ghc_build ++ "/includes"
, " " ++ ghc_build ++ "/includes/dist-derivedconstants/header"
]
exe <- getExecutablePath
createDirectoryIfMissing True bin_dir
jghc <- writeSH (bin_dir ++ "/josh-ghc") $ concat [
"GHC_PACKAGE_PATH=", pkg_dir
, "\nexport GHC_PACKAGE_PATH\n"
, "exec ", ghc, " -pgml ", exe, " -pgmc "
, exe, " ${1+\"$@\"}" ]
mapM_ sh $ cmmBuilds ghc ghc_build lib_dir
[ "dist/build/AutoApply", "Apply", "HeapStackCheck", "StgMiscClosures"
, "Updates", "PrimOps", "Exception", "StgStartup", "StgStdThunks" ]
sh $ "gcc -traditional -P -E -I" ++ ghc_build ++ "/includes"
++ " -I" ++ ghc_build ++ "/includes/dist-derivedconstants/header -x c -o "
++ lib_dir ++ "/rts/libptr.a etc/ptr.js"
sh $ "gcc -traditional -P -E -I" ++ ghc_build ++ "/includes"
++ " -I" ++ ghc_build ++ "/includes/dist-derivedconstants/header -x c -o "
++ lib_dir ++ "/rts/trace.js etc/trace.js"
sh $ ghc_pkg ++ " --force --global-package-db"
++ " " ++ pkg_dir ++ " update " ++ lib_dir ++ "/rts/rts.conf"
jpkg <- writeSH (bin_dir ++ "/josh-pkg") $ concat [
"exec ", ghc_pkg, " --global-package-db=", pkg_dir, " ${1+\"$@\"}" ]
jcab <- writeSH (bin_dir ++ "/josh-cabal") $ concat [
"exec cabal --with-ghc=", jghc, " --with-ghc-pkg=", jpkg
, " --with-hsc2hs=", ghc_build, "/inplace/bin/hsc2hs"
, " --with-ld=", exe, " ${1+\"$@\"}" ]
jreg <- writeSH (bin_dir ++ "/josh-register") $ unlines [
"cabal register --gen-pkg-config --inplace"
, jpkg ++ " register *.conf" ]
setEnv "GHC_BUILD_DIR" ghc_build False
let base_dir = lib_dir ++ "/libs"
createDirectoryIfMissing True base_dir
(List base_libs) <- require config "base_libs"
mapM_ (compileBaseLib base_dir jcab jreg) base_libs
| bosu/josh | Josh/Bootstrap.hs | bsd-3-clause | 4,950 | 0 | 14 | 1,385 | 1,342 | 678 | 664 | 103 | 3 |
{-# LANGUAGE TypeFamilies, MultiParamTypeClasses, FlexibleInstances, UndecidableInstances, FlexibleContexts, DeriveGeneric #-}
module QueryArrow.Translation where
import QueryArrow.DB.DB
import QueryArrow.Syntax.Term
import QueryArrow.Syntax.Type
import QueryArrow.Semantics.TypeChecker
import QueryArrow.Semantics.Value
import QueryArrow.Syntax.Utils
import QueryArrow.QueryPlan
import QueryArrow.Rewriting
import QueryArrow.Config
import QueryArrow.Utils
import QueryArrow.ListUtils
import QueryArrow.Plugin
import QueryArrow.RuleParser
import Prelude hiding (lookup)
import Data.ByteString.Lazy.UTF8 (toString)
import Data.Map.Strict (foldrWithKey, elems, lookup, unionWithKey)
import Control.Monad.Except
import Control.Monad.Trans.Reader
import Control.Monad.Trans.Except
import Control.Monad.Trans.State
import qualified Data.ByteString.Lazy as B
import Text.ParserCombinators.Parsec hiding (State)
import Data.Namespace.Namespace
import Algebra.SemiBoundedLattice
import Algebra.Lattice
import Data.Set (toAscList, Set)
import Data.Monoid
import Data.Aeson
import GHC.Generics
import System.Log.Logger (debugM)
import Language.Preprocessor.Cpphs (runCpphs, defaultCpphsOptions, CpphsOptions(..), defaultBoolOptions, BoolOptions(..))
-- exec query from dbname
-- getAllResults2 :: (MonadIO m, MonadBaseControl IO m, IResultRow row, Num (ElemType row), Ord (ElemType row)) => [AbstractDatabase row Formula] -> MSet Var -> Formula -> m [row]
-- getAllResults2 dbs rvars query = do
-- qp <- prepareQuery' dbs rvars query bottom
-- let (_, stream) = execQueryPlan ([], pure mempty) qp
-- getAllResultsInStream stream
-- queryPlan2 :: AbstractDBList row -> Set Var -> MSet Var -> Formula -> QueryPlan2
-- queryPlan2 dbs vars vars2 formula =
-- let qp = formulaToQueryPlan dbs formula
-- qp1 = simplifyQueryPlan qp
-- qp2 = calculateVars vars vars2 qp1 in
-- optimizeQueryPlan dbs qp2
-- rewriteQuery' :: (MonadIO m) => [InsertRewritingRule] -> [InsertRewritingRule] -> [InsertRewritingRule] -> MSet Var -> Formula -> Set Var -> m Formula
-- rewriteQuery' qr ir dr rvars qu0 vars = do
-- liftIO $ infoM "QA" ("original query: " ++ show qu0)
-- let qu = rewriteQuery qr ir dr rvars qu0 vars
-- liftIO $ infoM "QA" ("rewritten query: " ++ show qu)
-- return qu
--
defaultRewritingLimit :: Int
defaultRewritingLimit = 100
type RewritingRuleSets = ([InsertRewritingRule], [InsertRewritingRule], [InsertRewritingRule])
type RewritingRuleTSets = ([InsertRewritingRuleT], [InsertRewritingRuleT], [InsertRewritingRuleT])
rewriteQuery :: [InsertRewritingRuleT] -> [InsertRewritingRuleT] -> [InsertRewritingRuleT] -> MSet Var -> FormulaT -> Set Var -> FormulaT
rewriteQuery qr ir dr vars form ext = runNew (do
registerVars (toAscList ((case vars of
Include vs -> vs
Exclude vs -> vs) \/ freeVars form))
rewrites defaultRewritingLimit ext qr ir dr form)
data TransDB db = TransDB String db [Pred] RewritingRuleTSets
instance (IDatabaseUniformDBFormula FormulaT db) => IDatabase0 (TransDB db) where
type DBFormulaType (TransDB db) = FormulaT
getName (TransDB name _ _ _ ) = name
getPreds (TransDB _ _ predmap _ ) = predmap
supported _ _ _ _ = True
instance (IDatabaseUniformDBFormula FormulaT db) => IDatabase1 (TransDB db) where
type DBQueryType (TransDB db) = DBQueryType db
translateQuery (TransDB _ db _ (qr, ir, dr) ) vars2 qu vars =
let qu' = rewriteQuery qr ir dr (Include vars2) qu vars in do
qu' <- translateQuery db vars2 qu' vars
return qu'
instance (IDatabase db) => IDatabase2 (TransDB db) where
newtype ConnectionType (TransDB db) = TransDBConnection (ConnectionType db)
dbOpen (TransDB _ db _ _ ) = TransDBConnection <$> dbOpen db
instance (IDatabaseUniformDBFormula FormulaT db) => IDatabase (TransDB db)
instance (IDatabase db) => IDBConnection0 (ConnectionType (TransDB db)) where
dbClose (TransDBConnection db ) = dbClose db
dbBegin (TransDBConnection db) = dbBegin db
dbCommit (TransDBConnection db) = dbCommit db
dbPrepare (TransDBConnection db) = dbPrepare db
dbRollback (TransDBConnection db) = dbRollback db
instance (IDatabase db) => IDBConnection (ConnectionType (TransDB db)) where
type QueryType (ConnectionType (TransDB db)) = QueryType (ConnectionType db)
type StatementType (ConnectionType (TransDB db)) = StatementType (ConnectionType db)
prepareQuery (TransDBConnection db) = prepareQuery db
-- exec (TransDB _ dbs _ _ _ _ _) qp vars stream = snd (execQueryPlan dbs (vars, stream ) qp)
getRewriting :: PredMap -> ICATTranslationConnInfo -> IO (RewritingRuleSets, PredMap, PredMap)
getRewriting predmap ps = do
debugM "QA" ("loading rewriting rules from " ++ rewriting_file_path ps)
d0 <- toString <$> B.readFile (rewriting_file_path ps)
d1 <- runCpphs defaultCpphsOptions{includes = include_file_path ps, boolopts = defaultBoolOptions {locations = False}} (rewriting_file_path ps) d0
case runParser rulesp () (rewriting_file_path ps) d1 of
Left err -> error ("getRewriting: " ++ show err)
Right actions ->
return (processActions predmap actions)
typecheckRules :: PredTypeMap -> RewritingRuleSets -> Either String RewritingRuleTSets
typecheckRules ptm (qr, ir, dr) = do
qr' <- mapM (\r ->
case runNew (runReaderT (evalStateT (runExceptT (typecheck r)) (mempty, mempty)) ptm) of
Right a -> return a
Left err -> Left ("typecheckRules: rewrite rule " ++ show r ++ " type error\n" ++ err)) qr
ir' <- mapM (\r ->
case runNew (runReaderT (evalStateT (runExceptT (typecheck r)) (mempty, mempty)) ptm) of
Right a -> return a
Left err -> Left ("typecheckRules: insert rewrite rule " ++ show r ++ " type error\n" ++ err)) ir
dr' <- mapM (\r ->
case runNew (runReaderT (evalStateT (runExceptT (typecheck r)) (mempty, mempty)) ptm) of
Right a -> return a
Left err -> Left ("typecheckRules: delete rewrite rule " ++ show r ++ " type error\n" ++ err)) dr
return (qr', ir', dr')
transDB :: (IDatabase db, DBFormulaType db ~ FormulaT, RowType (StatementType (ConnectionType db)) ~ MapResultRow) => String -> db -> ICATTranslationConnInfo -> IO (TransDB db)
transDB name sumdb transinfo = do
let predmap0 = constructDBPredMap sumdb
debugM "QA" ("predmap = " ++ show predmap0)
-- trace ("preds:\n" ++ intercalate "\n" (map show (elems predmap0))) $ return ()
(rewriting, predmap, exports) <- getRewriting predmap0 transinfo
let exportmap = allObjects exports
debugM "QA" ("exportmap = " ++ show exportmap)
let (rules0, exportedpreds) = foldrWithKey (\key pred1@(Pred pn predtype@(PredType _ paramTypes)) (rules0', exportedpreds') ->
if key /= pn
then
let pred0 = Pred key predtype
params = map (\i -> VarExpr (Var ("var" ++ show i))) [0..length paramTypes - 1]
atom0 = Atom key params
atom1 = Atom pn params in
((if null (outputOnlyComponents predtype params)
then ([InsertRewritingRule atom0 (FAtomic atom1)], [InsertRewritingRule atom0 (FInsert (Lit Pos atom1))], [InsertRewritingRule atom0 (FInsert (Lit Neg atom1))])
else ([InsertRewritingRule atom0 (FAtomic atom1)], [], [])) <> rules0', pred0 : exportedpreds')
else
(rules0', pred1 : exportedpreds')) (([], [], []), []) exportmap
-- trace (intercalate "\n" (map show (exports))) $ return ()
-- trace (intercalate "\n" (map show (predmap1))) $ return ()
let repeats = findRepeats exportedpreds
unless (null repeats) $ error ("more than one export for predicates " ++ show repeats)
let rules1@(qr, ir, dr) = rules0 <> rewriting
let checkPatterns rules = do
let repeats1 = findRepeats (map (\(InsertRewritingRule (Atom p _) _) -> p) rules)
unless (null repeats1) $ error ("more than one definition for predicates " ++ show repeats1)
checkPatterns qr
checkPatterns ir
checkPatterns dr
mapM_ (debugM "QA" . show) qr
mapM_ (debugM "QA" . show) ir
mapM_ (debugM "QA" . show) dr
let ptm = constructPredTypeMap (elems (allObjects predmap) ++ exportedpreds)
let checkNoOutput (InsertRewritingRule (Atom p args) _) =
case lookup p ptm of
Nothing -> error "error"
Just pt ->
unless (null (outputOnlyComponents pt args)) $ error "rule pattern contains output parameters"
mapM_ checkNoOutput ir
mapM_ checkNoOutput dr
case typecheckRules ptm rules1 of
Left err -> error err
Right rules1' ->
return (TransDB name sumdb exportedpreds rules1')
data ICATTranslationConnInfo = ICATCacheConnInfo {
rewriting_file_path :: String,
include_file_path :: [String],
trans_db_plugin :: ICATDBConnInfo
} deriving (Show, Generic)
instance ToJSON ICATTranslationConnInfo
instance FromJSON ICATTranslationConnInfo
data TransPlugin = TransPlugin
instance Plugin TransPlugin MapResultRow where
getDB _ getDB0 ps = do
let fsconf = getDBSpecificConfig ps
db0 <- getDB0 (trans_db_plugin fsconf)
case db0 of
AbstractDatabase db -> AbstractDatabase <$> transDB (qap_name ps) db fsconf
| xu-hao/QueryArrow | QueryArrow-db-trans/src/QueryArrow/Translation.hs | bsd-3-clause | 9,991 | 0 | 26 | 2,568 | 2,598 | 1,341 | 1,257 | 148 | 5 |
--
-- Photon:
--
module Aya.Photon
(
) where
import Aya.Algebra
----------
-- TYPE --
----------
-- Wavelength
data Wavelength = Red | Green | Blue
-- Behaivior
data Behaivior = Diffuse | Specular | Absorbed
-- Photon
data Photon = Photon Wavelength Point3 Direction3
tracePhoton :: Photon -> [Primitive] -> Material -> Int -> [Double] -> [Photon]
tracePhoton ph@(Photon wl p d) prims mat0 depth (x:xs)
| depth > maxDepth = []
| ray == Nothing = []
| dt == Nothing = []
| btype == Absorbed = []
| otherwise = mark:
where
ray = photonToRay ph
dt = psearch prims fromJust ray
dt' = fromJust dt
(pt, mate1, _) = targetMaterial dt' ray mate0
btype = selectBehaivior ph mate1 x
mark = if btype == Diffuse then [(Photon wl pt d)] else []
photonToRay :: Photon -> Maybe Ray
photonToRay (Photon _ p d) = initRay p d
selectBehaivior :: Photon -> Material -> Double -> Behaivior
selectBehaivior ph ray mis mate0 x
| x < rhod = Diffuse
| x < rhod + rhos = Specular
| otherwise = Absorbed
where
-- mate1 is target Material
rhod = rho_d mate1
rhos = rho_s mate1
selectWavelen :: Photon -> Color -> Double
selectWavelen (Photon w _ _) (Color r g b)
| w == Red = r
| w == Green = g
| w == Blue = b
| eiji-a/aya | src/Aya/Photon.hs | bsd-3-clause | 1,298 | 6 | 11 | 346 | 499 | 259 | 240 | -1 | -1 |
module Parser.InternalSpec where
import Test.Hspec
import Test.QuickCheck (property, (==>))
import Text.ParserCombinators.Parsec (parse)
import Data
import Parser.Internal (parseAtom, parseDottedList,
parseList, parseNumber,
parseQuoted, parseString)
import SpecUtils
sourceName :: String
sourceName = "Parser.Internal.Spec"
specs :: Spec
specs =
describe "Parser.Internal" $ do
describe "Strings" $ do
it "parses the empty string" $ do
(parse parseString sourceName "\"\"" == (Right (String "")))
it "parses spaces" $ do
(parse parseString sourceName "\" \"" == (Right (String " ")))
it "parses arbitrary expr strings" $ property $
\(ALispValString s) -> (parse parseString sourceName (show s)) == (Right (String s))
describe "Numbers" $ do
it "parses positive numbers" $ property $
\k -> (k >= 0) ==> (parse parseNumber sourceName (show k)) == (Right (Number k))
it "parses negative numbers" $ property $
\k -> (k < 0) ==> (parse parseNumber sourceName (show k)) == (Right (List [Atom "-", (Number (-k))]))
describe "Atoms" $ do
it "parses arbitrary atoms" $
property $ \(ALispValAtom s) -> (parse parseAtom sourceName s) == Right (Atom s)
describe "Booleans" $ do
it "parses the #t symbol" $ do
parse parseAtom sourceName "#t" `shouldBe` (Right (Bool True))
it "parses the #f symbol" $ do
parse parseAtom sourceName "#f" `shouldBe` (Right (Bool False))
describe "Quoted Forms" $ do
it "parses quoted booleans" $ property $
\b -> (parse parseQuoted sourceName ("'" ++ (show (Bool b)))) == (Right (List [Atom "quote", (Bool b)]))
it "parses quoted positive numbers" $ property $
\k -> (k >= 0) ==> (parse parseQuoted sourceName ("'" ++ (show (Number k)))) == (Right (List [Atom "quote", (Number k)]))
it "parses quoted negative numbers" $ property $
\k -> (k < 0) ==> (parse parseQuoted sourceName ("'" ++ (show (Number k)))) == (Right (List [Atom "quote", (List [Atom "-", Number (-k)])]))
it "parses quoted exprs" $ property $
\(ALispVal lispVal) ->
lispValWithPositiveIntegers lispVal ==>
(parse parseQuoted sourceName ("'" ++ (show lispVal))) == (Right (List [Atom "quote", lispVal]))
describe "Lists" $ do
it "parses the empty list" $ do
parse parseList sourceName "()" `shouldBe` (Right (List []))
it "parses the one element list" $ do
parse parseList sourceName "(0)" `shouldBe` (Right (List [Number 0]))
it "parses the one element list" $ do
parse parseList sourceName "(1)" `shouldBe` (Right (List [Number 1]))
it "parses the one element nested list" $ do
parse parseList sourceName "((1))" `shouldBe` (Right (List [(List [Number 1])]))
-- FIXME: too hard to generate elements without negative numbers
it "parses lists" $ property $
\(ALispValList lispVals) ->
all lispValWithPositiveIntegers lispVals ==>
parse parseList sourceName (show (List lispVals)) == Right (List lispVals)
describe "Dotted Lists" $ do
it "parses the 2 elements dotted list" $ do
parse parseDottedList sourceName "(1 . 2)" `shouldBe` (Right (DottedList [Number 1] (Number 2)))
-- FIXME: too hard to generate elements without negative numbers
it "parses dotted lists" $ property $
\(ALispValList lispVals) (ALispVal lispVal) ->
length lispVals > 0 && all lispValWithPositiveIntegers (lispVal:lispVals) ==>
parse parseDottedList sourceName (show (DottedList lispVals lispVal)) == Right (DottedList lispVals lispVal)
| Chouffe/skelme | test/Parser/InternalSpec.hs | bsd-3-clause | 3,874 | 0 | 24 | 1,067 | 1,321 | 655 | 666 | 65 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
----------------------------------------------------------------------
-- |
-- Module : XMonad.Actions.GroupNavigation
-- Copyright : (c) [email protected]
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : [email protected]
-- Stability : unstable
-- Portability : unportable
--
-- Provides methods for cycling through groups of windows across
-- workspaces, ignoring windows that do not belong to this group. A
-- group consists of all windows matching a user-provided boolean
-- query.
--
-- Also provides a method for jumping back to the most recently used
-- window in any given group.
--
----------------------------------------------------------------------
module XMonad.Actions.GroupNavigation ( -- * Usage
-- $usage
Direction (..)
, nextMatch
, nextMatchOrDo
, nextMatchWithThis
, historyHook
) where
import Control.Monad.Reader
import Data.Foldable as Fold
import Data.Map as Map
import Data.Sequence as Seq
import Data.Set as Set
import Graphics.X11.Types
import Prelude hiding (concatMap, drop, elem, filter, null, reverse)
import XMonad.Core
import XMonad.ManageHook
import XMonad.Operations (windows, withFocused)
import qualified XMonad.StackSet as SS
import qualified XMonad.Util.ExtensibleState as XS
{- $usage
Import the module into your @~\/.xmonad\/xmonad.hs@:
> import XMonad.Actions,GroupNavigation
To support cycling forward and backward through all xterm windows, add
something like this to your keybindings:
> , ((modm , xK_t), nextMatch Forward (className =? "XTerm"))
> , ((modm .|. shiftMask, xK_t), nextMatch Backward (className =? "XTerm"))
These key combinations do nothing if there is no xterm window open.
If you rather want to open a new xterm window if there is no open
xterm window, use 'nextMatchOrDo' instead:
> , ((modm , xK_t), nextMatchOrDo Forward (className =? "XTerm") (spawn "xterm"))
> , ((modm .|. shiftMask, xK_t), nextMatchOrDo Backward (className =? "XTerm") (spawn "xterm"))
You can use 'nextMatchWithThis' with an arbitrary query to cycle
through all windows for which this query returns the same value as the
current window. For example, to cycle through all windows in the same
window class as the current window use:
> , ((modm , xK_f), nextMatchWithThis Forward className)
> , ((modm , xK_b), nextMatchWithThis Backward className)
Finally, you can define keybindings to jump to the most recent window
matching a certain Boolean query. To do this, you need to add
'historyHook' to your logHook:
> main = xmonad $ defaultConfig { logHook = historyHook }
Then the following keybindings, for example, allow you to return to
the most recent xterm or emacs window or to simply to the most recent
window:
> , ((modm .|. controlMask, xK_e), nextMatch History (className =? "Emacs"))
> , ((modm .|. controlMask, xK_t), nextMatch History (className =? "XTerm"))
> , ((modm , xK_BackSpace), nextMatch History (return True))
Again, you can use 'nextMatchOrDo' instead of 'nextMatch' if you want
to execute an action if no window matching the query exists. -}
--- Basic cyclic navigation based on queries -------------------------
-- | The direction in which to look for the next match
data Direction = Forward -- ^ Forward from current window or workspace
| Backward -- ^ Backward from current window or workspace
| History -- ^ Backward in history
-- | Focuses the next window for which the given query produces the
-- same result as the currently focused window. Does nothing if there
-- is no focused window (i.e., the current workspace is empty).
nextMatchWithThis :: Eq a => Direction -> Query a -> X ()
nextMatchWithThis dir qry = withFocused $ \win -> do
prop <- runQuery qry win
nextMatch dir (qry =? prop)
-- | Focuses the next window that matches the given boolean query.
-- Does nothing if there is no such window. This is the same as
-- 'nextMatchOrDo' with alternate action @return ()@.
nextMatch :: Direction -> Query Bool -> X ()
nextMatch dir qry = nextMatchOrDo dir qry (return ())
-- | Focuses the next window that matches the given boolean query. If
-- there is no such window, perform the given action instead.
nextMatchOrDo :: Direction -> Query Bool -> X () -> X ()
nextMatchOrDo dir qry act = orderedWindowList dir
>>= focusNextMatchOrDo qry act
-- Produces the action to perform depending on whether there's a
-- matching window
focusNextMatchOrDo :: Query Bool -> X () -> Seq Window -> X ()
focusNextMatchOrDo qry act = findM (runQuery qry)
>=> maybe act (windows . SS.focusWindow)
-- Returns the list of windows ordered by workspace as specified in
-- ~/.xmonad/xmonad.hs
orderedWindowList :: Direction -> X (Seq Window)
orderedWindowList History = liftM (\(HistoryDB w ws) -> maybe ws (ws |>) w) XS.get
orderedWindowList dir = withWindowSet $ \ss -> do
wsids <- asks (Seq.fromList . workspaces . config)
let wspcs = orderedWorkspaceList ss wsids
wins = dirfun dir
$ Fold.foldl' (><) Seq.empty
$ fmap (Seq.fromList . SS.integrate' . SS.stack) wspcs
cur = SS.peek ss
return $ maybe wins (rotfun wins) cur
where
dirfun Backward = Seq.reverse
dirfun _ = id
rotfun wins x = rotate $ rotateTo (== x) wins
-- Returns the ordered workspace list as specified in ~/.xmonad/xmonad.hs
orderedWorkspaceList :: WindowSet -> Seq String -> Seq WindowSpace
orderedWorkspaceList ss wsids = rotateTo isCurWS wspcs'
where
wspcs = SS.workspaces ss
wspcsMap = Fold.foldl' (\m ws -> Map.insert (SS.tag ws) ws m) Map.empty wspcs
wspcs' = fmap (\wsid -> wspcsMap ! wsid) wsids
isCurWS ws = SS.tag ws == SS.tag (SS.workspace $ SS.current ss)
--- History navigation, requires a layout modifier -------------------
-- The state extension that holds the history information
data HistoryDB = HistoryDB (Maybe Window) -- currently focused window
(Seq Window) -- previously focused windows
deriving (Read, Show, Typeable)
instance ExtensionClass HistoryDB where
initialValue = HistoryDB Nothing Seq.empty
extensionType = PersistentExtension
-- | Action that needs to be executed as a logHook to maintain the
-- focus history of all windows as the WindowSet changes.
historyHook :: X ()
historyHook = XS.get >>= updateHistory >>= XS.put
-- Updates the history in response to a WindowSet change
updateHistory :: HistoryDB -> X HistoryDB
updateHistory (HistoryDB oldcur oldhist) = withWindowSet $ \ss -> do
let newcur = SS.peek ss
wins = Set.fromList $ SS.allWindows ss
newhist = flt (flip Set.member wins) (ins oldcur oldhist)
return $ HistoryDB newcur (del newcur newhist)
where
ins x xs = maybe xs (<| xs) x
del x xs = maybe xs (\x' -> flt (/= x') xs) x
--- Two replacements for Seq.filter and Seq.breakl available only in
--- containers-0.3.0.0, which only ships with ghc 6.12. Once we
--- decide to no longer support ghc < 6.12, these should be replaced
--- with Seq.filter and Seq.breakl.
flt :: (a -> Bool) -> Seq a -> Seq a
flt p = Fold.foldl (\xs x -> if p x then xs |> x else xs) Seq.empty
brkl :: (a -> Bool) -> Seq a -> (Seq a, Seq a)
brkl p xs = flip Seq.splitAt xs
$ snd
$ Fold.foldr (\x (i, j) -> if p x then (i-1, i-1) else (i-1, j)) (l, l) xs
where
l = Seq.length xs
--- Some sequence helpers --------------------------------------------
-- Rotates the sequence by one position
rotate :: Seq a -> Seq a
rotate xs = rotate' (viewl xs)
where
rotate' EmptyL = Seq.empty
rotate' (x' :< xs') = xs' |> x'
-- Rotates the sequence until an element matching the given condition
-- is at the beginning of the sequence.
rotateTo :: (a -> Bool) -> Seq a -> Seq a
rotateTo cond xs = let (lxs, rxs) = brkl cond xs in rxs >< lxs
--- A monadic find ---------------------------------------------------
-- Applies the given action to every sequence element in turn until
-- the first element is found for which the action returns true. The
-- remaining elements in the sequence are ignored.
findM :: Monad m => (a -> m Bool) -> Seq a -> m (Maybe a)
findM cond xs = findM' cond (viewl xs)
where
findM' _ EmptyL = return Nothing
findM' qry (x' :< xs') = do
isMatch <- qry x'
if isMatch
then return (Just x')
else findM qry xs'
| csstaub/xmonad-contrib | XMonad/Actions/GroupNavigation.hs | bsd-3-clause | 8,784 | 0 | 17 | 2,094 | 1,556 | 830 | 726 | 90 | 3 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Minecraft.PrettyPrinting
-- Copyright : (c) Tamar Christina 2012
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Contains the basic pretty printing classes for this package along with
-- basic defaults to be used.
--
-----------------------------------------------------------------------------
module Minecraft.PrettyPrinting
( -- * Pretty printing operations
ppList
-- * Pretty printing classes
, Pretty(..)
) where
import Data.Word
import Data.Int
import Data.List
import Text.PrettyPrint
class Pretty a where
pp :: a -> Doc
ppRender :: a -> String
ppRender = render . pp
ppStyle :: Style -> a -> String
ppStyle = \style -> renderStyle style . pp
instance Pretty Int where
pp = int
instance Pretty Int8 where
pp = int . (fromIntegral :: Int8 -> Int)
instance Pretty Int16 where
pp = int . (fromIntegral :: Int16 -> Int)
instance Pretty Int32 where
pp = int . (fromIntegral :: Int32 -> Int)
instance Pretty Int64 where
pp = int . (fromIntegral :: Int64 -> Int)
instance Pretty Float where
pp = (<> text "f") . float
instance Pretty Double where
pp = double
instance Pretty String where
pp = text
instance Pretty a => Pretty [a] where
pp = brackets . hcat . intersperse comma . map pp
ppList :: Pretty a => Int -> [a] -> Doc
ppList width = vcat
. appendComma
. map (hcat . intersperse comma . map pp)
. blow'
where blow' :: [a] -> [[a]]
blow' xs = case splitAt width xs of
(px, []) -> [px]
(px, pm) -> px : blow' pm
appendComma :: [Doc] -> [Doc]
appendComma [] = []
appendComma (x:xs) = ((text "[" <> x) : map (comma <>) xs) ++ [text "]"] | Mistuke/CraftGen | Minecraft/PrettyPrinting.hs | bsd-3-clause | 2,007 | 0 | 12 | 533 | 548 | 304 | 244 | 46 | 3 |
{-# LANGUAGE GeneralizedNewtypeDeriving, FlexibleContexts, FlexibleInstances, ScopedTypeVariables #-}
module Network.EasyBitcoin.Internal.Keys
where
import Network.EasyBitcoin.Internal.Words(Word256,FieldN,FieldP,Word160)
import Network.EasyBitcoin.Internal.InstanciationHelpers
import Network.EasyBitcoin.Internal.ByteString
import Network.EasyBitcoin.Internal.CurveConstants
import Network.EasyBitcoin.Internal.HashFunctions
import Network.EasyBitcoin.NetworkParams
import Data.Binary
import Control.Applicative
import Control.Monad
import Data.Aeson
newtype PrvKey net = PrvKey FieldN deriving (Eq, Ord,Num,Enum,Real,Integral)
data PubKey net = PubKey {pubKeyPoint::Point} deriving Eq
instance Binary (PubKey net) where
get = fmap fromCompressed get
put = put.Compressed True
derivePubKey_ :: PrvKey net -> PubKey net
derivePubKey_ (PrvKey k) = PubKey $ mulPoint k curveG
addPrvKeys_ :: PrvKey net -> Word256 -> PrvKey net
addPrvKeys_ key i = key + fromIntegral i
addPubKeys_ :: PubKey net -> Word256 -> PubKey net
addPubKeys_ (PubKey pub) i = PubKey $ addPoint pub $ mulPoint (fromIntegral i) curveG
-- | Computes the key identifier of an extended private key.
xPrvID :: PrvKey net -> Word160
xPrvID = xPubID . derivePubKey_
-- | Computes the key identifier of an extended public key.
xPubID :: PubKey net -> Word160
xPubID = hash160 . hash256BS . encode' . Compressed True
------------------------------------------------------------------------------------------------------------------
data Compressed key = Compressed{ isCompressed :: Bool
, fromCompressed :: key
}
instance Show (Compressed (PubKey net)) where
show = showAsBinary
instance Read (Compressed (PubKey net)) where
readsPrec = readsPrecAsBinary
instance (BlockNetwork net) => Show (Compressed (PrvKey net)) where
show = showAsBinary58
instance (BlockNetwork net) => Read (Compressed (PrvKey net)) where
readsPrec = readsPrecAsBinary58
instance Binary (Compressed (PubKey net)) where
get = do index <- getWord8
case index of
2 -> Compressed True . PubKey <$> (compressedWith True =<< get)
3 -> Compressed True . PubKey <$> (compressedWith False =<< get)
4 -> Compressed False . PubKey <$> (makePoint <$> get <*> get)
_ -> fail $ "Get: Invalid public key encoding: "
where
compressedWith isEven x = let a = x ^ (3 :: Integer) + (curveA * x) + curveB
ys = filter ((== isEven) . even) (quadraticResidue a)
in case ys of
y:_ -> return $ makePoint x y
_ -> fail $ "No ECC point for x = " ++ (show x)
put (Compressed comp (PubKey point)) = let (x,y) = getAffine point
in case comp of
True
| even y -> putWord8 2 >> put x
| otherwise -> putWord8 3 >> put x
False -> putWord8 4 >> put x >> put y
quadraticResidue :: FieldP -> [FieldP]
quadraticResidue x = guard (y^(2 :: Int) == x) >> [y, (-y)]
where
q = (curveP + 1) `div` 4
y = x^q
instance (BlockNetwork net) => Binary (Compressed (PrvKey net)) where
get = get_
where
get_ :: forall x. (BlockNetwork x) => Get (Compressed (PrvKey x))
get_ = let params = valuesOf:: Params x
in getPriv (wifFormat params)
put = put_
where
put_ :: forall x. (BlockNetwork x) => Compressed (PrvKey x) -> Put
put_ = let params = valuesOf:: Params x
in putPriv (wifFormat params)
---- wifFormatMainNet
---- wifFormatTestNet3
getPriv prefix = do mark <- getWord8
payload <- fromIntegral <$> (get::Get Word256)
compressed <- (getWord8 >>= (guard.(==0x01)) >> return True ) <|> (return False)
guard (mark == prefix)
return (Compressed compressed$PrvKey payload)
putPriv prefix (Compressed c (PrvKey k)) = case c of
True -> putWord8 prefix >> put (fromIntegral k::Word256) >> putWord8 0x01
False -> putWord8 prefix >> put (fromIntegral k::Word256)
--------------------------------------------------------------------------------------------------------------------
data Point = Point !FieldP !FieldP !FieldP deriving(Show) -- add extra field for performance
--Use the jacobian in the correct way...¿memoization of the right values for faster comparison?
-- this equal instance is wrong...
instance Eq Point where
(Point x1 y1 z1) == (Point x2 y2 z2) = a == b && c == d
where
a = x1*z2 ^ (2 :: Int)
b = x2*z1 ^ (2 :: Int)
c = y1*z2 ^ (3 :: Int)
d = y2*z1 ^ (3 :: Int)
getAffine :: Point -> (FieldP, FieldP)
getAffine (Point x y z) = (x/z ^ (2 :: Int), y/z ^ (3 :: Int))
-- ¿create here the Q point?
-- Elliptic curve point addition
addPoint :: Point -> Point -> Point
addPoint p1@(Point x1 y1 z1) (Point x2 y2 z2) = Point x3 y3 z3
where
u1 = x1*z2 ^ (2 :: Int)
u2 = x2*z1 ^ (2 :: Int)
s1 = y1*z2 ^ (3 :: Int)
s2 = y2*z1 ^ (3 :: Int)
h = u2 - u1
r = s2 - s1
x3 = r ^ (2 :: Int) - h ^ (3 :: Int) - 2*u1*h ^ (2 :: Int)
y3 = r*(u1 * h ^ (2 :: Int) - x3) - s1 * h ^ (3 :: Int)
z3 = h * z1 * z2
-- Elliptic curve point doubling
doublePoint :: Point -> Point
doublePoint (Point x y z) = Point x' y' z'
where
s = 4*x*y ^ (2 :: Int)
m = 3*x ^ (2 :: Int) + curveA * z ^ (4 :: Int)
x' = m ^ (2 :: Int) - 2*s
y' = m*(s - x') - 8*y ^ (4 :: Int)
z' = 2*y*z
mulPoint :: FieldN -> Point -> Point
mulPoint 0 p = error "please change this!!"-- p -- this is not correct...
mulPoint 1 p = p
mulPoint n p | odd n = addPoint p (mulPoint (n-1) p)
| otherwise = mulPoint (n `div` 2) (doublePoint p)
-------------------------------------------------------------------------------------
-------------------------------------------------------------------------------------
-- change the name
curveG :: Point
curveG = makePoint (fromInteger $ fst pairG)
(fromInteger $ snd pairG)
curveA :: FieldP
curveA = fromInteger integerA
curveB :: FieldP
curveB = fromInteger integerB
----------------------------------------------------------------------------------------
----------------------------------------------------------------------------------------
makePoint :: FieldP -> FieldP -> Point
makePoint x y = Point x y 1 | vwwv/easy-bitcoin | Network/EasyBitcoin/Internal/Keys.hs | bsd-3-clause | 7,194 | 2 | 15 | 2,334 | 2,169 | 1,149 | 1,020 | 123 | 2 |
{-# LANGUAGE TypeOperators #-}
-- | Module for rays and ray calculations
module Ray (
Depth
, Ray (..)
, intersectB
, intersectWorld
, intersectP
, intersectLights
, phongShader
) where
import World
import Vector
import qualified Data.Array.Repa as R
import Control.Monad.State
-- |
type Depth = Int
-- | Data type for creating rays
data Ray = Ray {
dir :: DoubleVector
,point :: DoubleVector
}
deriving (Show)
-- | Should back the first object that was intersected and the intersection
-- point
intersectWorld :: Ray -> World -> IO (Maybe (Object, DoubleVector))
intersectWorld ray@Ray{dir = d, point= o} w'@World{items = w} = do
let r' = Ray{ dir = normalize d, point = o }
objs <- filterM (\x -> intersectB r' x) w
case objs of
[] -> return Nothing
_ -> do
intp <- mapM (intersectP r') objs
let index = findShortest o intp
return $ Just (objs !! index , intp !! index)
-- | Calculates Shadowrays returns a combinded color of the visible lights
-- and an intensity factor of how much light the point is exposed to
intersectLights :: DoubleVector -> DoubleVector -> DoubleVector -> World
-> Double -> IO(Double, Color)
intersectLights cP hitp norm w@World{lights = []} _ = return (0,(0,0,0))
intersectLights cP hitp norm w@World{items = o, lights = (l:ls)} shin= do
res <- intersectLight cP hitp norm w l shin
res2 <-intersectLights cP hitp norm (World{items = o , lights = ls}) shin
let fres =(((fst res)+fst(res2)) /2.0,(cadd (snd res)(snd res2)))
return $ fres
-- Calculates the shadowray for one specific light and returns the color of the
-- light and the intensity
intersectLight ::DoubleVector -> DoubleVector -> DoubleVector -> World -> Light
-> Double-> IO(Double, Color)
intersectLight
cPos hitp norm (w@World{items = o}) l@Light{lpos = pos, lcolor=lc} shin = do
let directionToL = R.computeUnboxedS $ ( R.zipWith (-) pos hitp )
let cPos2htp = R.computeUnboxedS $ ( R.zipWith (-) cPos hitp)
let dir'= normalize directionToL
obj <- intersectWorld Ray{point = hitp, dir = dir'} w
case obj of
Nothing -> do
let halfDir' = fun directionToL cPos2htp
specang1' <- dotProd halfDir' norm
let temp' = (maximum [specang1', 0])**shin
return(temp',cmul lc temp')
Just (obj,hitpoint) -> do
let llenght = vLength directionToL
let olenght = (vLength $ fun2 hitp hitpoint)
case llenght > olenght of
True -> return (0.0, (0,0,0))
False -> do
let halfDir = fun directionToL cPos2htp
specang1 <- dotProd halfDir norm
let temp = (maximum [specang1, 0])**shin
return(temp,cmul lc temp)
where fun a b = (normalize $ R.computeUnboxedS $ R.zipWith (+) a b)
fun2 a b = ( R.computeUnboxedS $ R.zipWith (-) a b)
-- | Intersection Tests
-- | Intersection test between a ray and an object returns the hit point,
-- expects that `intersectB` has been run before to not get complex solutions
intersectP :: Ray -> Object -> IO DoubleVector
intersectP
ray@Ray{dir=d , point=o} obj@Object{shape=s@Sphere{spos=c, radius = r}} = do
let d' = normalize d
loc <- (dotProd d' $ R.computeUnboxedS $ R.zipWith (-) o c)
let p = - loc
let q1 = sqrt ((loc*loc) - ((dist o c)*(dist o c)) + (r*r))
let q2 = -(sqrt $ (loc*loc) - ((dist o c)*(dist o c)) + (r*r))
case ((p + q1) > (p + q2)) of
True -> do
return $ R.computeUnboxedS $ R.map ((p+q2)*) d'
False ->do
return $ R.computeUnboxedS $ R.map ((p+q1)*) d'
intersectP
ray@Ray{dir=d , point=o} obj@Object{shape=s@Plane{ppos=c, pnormal = n}} = do
let d' = normalize d
denum <- dotProd d' n
let sub = R.computeUnboxedS $ R.zipWith (-) c o
l' <- dotProd sub n
return $ R.computeUnboxedS $ R.map ((l'/denum)*) d'
-- | IntersectB performs an intersection test, returns if the object is hit or
-- not
intersectB :: Ray -> Object -> IO Bool
intersectB
ray@Ray{dir=d , point=o} obj@Object{shape = s@Sphere{spos=c, radius = r}}=do
let sub' = R.computeUnboxedS $ R.zipWith (-) o c
let d' = normalize d
s1' <- (dotProd d' sub')
case s1' < 0 of
False -> return False
True -> do
d1 <- dotProd sub' sub'
let sndcheck = d1 - (r*r)
case sndcheck > 0 of
False -> return False
True -> do
let s1 = s1'*s1'
let s2 = (vLength sub')*(vLength sub')
case (s1-s2 + (r*r)) > 0 of
False -> return False
True -> return True
intersectB
ray@Ray{dir=d , point=o} obj@Object{shape = s@Plane{ppos=c, pnormal = n}}=do
let d' = normalize d
s1 <- (dotProd d' n)
case s1 /= 0 of
False -> return False
True -> do
let sub = R.computeUnboxedS $ R.zipWith (-) c o
l' <- dotProd sub n
case (l'/s1) > 0 of
False -> return False
True -> return True
-- | Test function to run a "standard" phongshader gives a cartoonish result
-- with bad performence only used for testing and reference imagry
phongShader :: Ray -> World -> Color -> DoubleVector -> Double -> IO Color
phongShader ray@Ray{dir=d, point=p}
w@World{lights=(l@Light{lpos = lpos1, lcolor = col}:ls)}
(r,g,b) norm shadow = do
let ambient = ( r*0.1,g*0.1,b*0.1)
let specular = col
let lightDir = R.computeUnboxedS $ R.zipWith (-) lpos1 p
lamb1 <- dotProd lightDir norm
let labertian = dmax lamb1 0.0
case labertian > 0.0 of
False -> return ambient
True -> do
let halfDir=normalize $ R.computeUnboxedS $ R.zipWith (-) lightDir d
specang1 <- dotProd halfDir norm
let specAngle = dmax specang1 0.0
let specular = specAngle ** 16.0
return $ (cadd (cmul(cadd (cmul (r,g,b) labertian)
(cmul col specular)) shadow) ambient )
-- | Convinience funtion ported from GLSL
dmax :: Double -> Double -> Double
dmax d1 d2 | d1 > d2 = d1
| otherwise = d2
-- | Convinience funtion ported from HLSL
saturate :: Double -> Double
saturate d | d >= 1.0 = 1.0
| d <= 0.0 = 0.0
| otherwise = d | axhav/AFPLAB3 | Ray.hs | bsd-3-clause | 6,698 | 0 | 23 | 2,161 | 2,465 | 1,279 | 1,186 | 138 | 6 |
{-# LANGUAGE GADTs, EmptyDataDecls, MultiParamTypeClasses, TypeFamilies #-}
{-# LANGUAGE DeriveDataTypeable, StandaloneDeriving #-}
{-# LANGUAGE DeriveGeneric #-}
module Language.Nextgen.Syntax where
import Data.Data (Data)
import Data.Typeable (Typeable, Typeable3)
import Data.Generics (Generic)
import Data.Generics.Uniplate.Direct
data HasHoles
data Complete
deriving instance Typeable HasHoles
deriving instance Data HasHoles
--deriving instance Generic HasHoles
deriving instance Typeable Complete
deriving instance Data Complete
--deriving instance Generic Complete
type family Holes a b :: *
canHaveHolesT :: a -> b -> Holes a b
canHaveHolesT _ _ = undefined
type instance Holes HasHoles Complete = HasHoles
type instance Holes Complete HasHoles = HasHoles
type instance Holes HasHoles HasHoles = HasHoles
type instance Holes Complete Complete = HasHoles
data EInt
data EBool
deriving instance Typeable EInt
deriving instance Data EInt
deriving instance Typeable EBool
deriving instance Data EBool
data Expression k t a where
EQuote :: a -> String -> Expression HasHoles EInt a
IntLit :: a -> Int -> Expression Complete EInt a
BoolLit :: a -> Bool -> Expression EBool Complete a
EArith :: a -> ArithOp -> Expression k1 EInt a -> Expression k2 EInt a ->
Expression (Holes k1 k2) EInt a
-- EComp :: a -> CompOp -> Expression a EInt k1 -> Expression a EInt k2 ->
-- Expression a EBool (Holes k1 k2)
-- EBool :: a -> BoolOp -> Expression a EBool k1 -> Expression a EBool k2 ->
-- Expression a EBool (Holes k1 k2)
-- VarRef :: a -> String -> Expression a EInt Complete
instance Uniplate (Expression k t a) where
uniplate (EQuote a q) = plate EQuote |- a |- q
uniplate (IntLit a i) = plate IntLit |- a |- i
uniplate (BoolLit a b) = plate BoolLit |- a |- b
uniplate (EArith a op e1 e2) = plate EArith |- a |- op |* e1 |* e2
--deriving instance Typeable3 Expression
--deriving instance Data (Expression a t k)
--deriving instance Generic (Expression EInt Complete)
-- data Statement a k where
-- SAssign :: a -> Var a k1 -> Expression a t k2 -> Statement a (Holes k1 k2)
-- SIf :: a -> Expression a EBool k1 -> Statement a k2 -> Statement a k3 ->
-- Statement a (Holes (Holes k1 k2) k3)
-- SWhile :: a -> Expression a EBool k1 -> Statement a k2 ->
-- Statement a (Holes k1 k2)
data Var a k where
Var :: a -> String -> Var a Complete
VarQuote :: a -> String -> Var a HasHoles
data ArithOp = OpAdd
| OpSub
| OpMul
| OpDiv
data CompOp = OpEq
| OpNEq
| OpLEq
| OpL
| OpGEq
| OpG
data BoolOp = OpAnd
| OpOr
| OpImply | achudnov/language-nextgen | Language/Nextgen/Syntax.hs | bsd-3-clause | 2,858 | 0 | 12 | 791 | 576 | 320 | 256 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Ivory.Tower.HAL.Bus.CAN.Sched
( CANTask()
, canTask
, canScheduler
) where
import Control.Monad (forM, forM_)
import Data.Bits (shiftL)
import Ivory.Language
import Ivory.Stdlib
import Ivory.Tower
import Ivory.Tower.HAL.Bus.CAN
import Ivory.Tower.HAL.Bus.Interface
[ivory|
struct can_transmit_result
{ task_idx :: Stored Uint8
; task_success :: Stored IBool
}
|]
-- | One step in inserting a new task into a priority-sorted array. If
-- the new task should go before the existing task, then exchange the
-- two; afterward, the existing task becomes a "new" task to insert
-- before the next element in the array.
shiftUp :: Def ('[ Ref s0 ('Stored Uint8)
, Ref s1 ('Stored Uint32), Ref s2 ('Stored Uint8)
, Ref s3 ('Stored Uint32), Ref s4 ('Stored Uint8)
] ':-> IBool)
shiftUp = proc "shift_task_up" $ \ insert_position new_prio new_task current_prio current_task -> body $ do
new <- deref new_prio
when (new ==? maxBound) $ ret true
current <- deref current_prio
assert (new /=? current)
ifte_ (new >? current) (insert_position %= (+ 1)) $ do
temp_task <- deref current_task
refCopy current_prio new_prio
refCopy current_task new_task
store new_prio current
store new_task temp_task
ret false
-- | One step in removing an old task from a sorted array. When we reach
-- the target element of the array, pull the next element down to this
-- one, overwriting it; afterward, the new deletion target is the
-- now-duplicated task that we just pulled forward.
shiftDown :: Def ('[ Ref s0 ('Stored Uint8)
, Ref s1 ('Stored Uint32), Ref s2 ('Stored Uint8)
, ConstRef s3 ('Stored Uint32), ConstRef s4 ('Stored Uint8)
] ':-> IBool)
shiftDown = proc "shift_task_down" $ \ target_ref current_prio current_task next_prio next_task -> body $ do
target <- deref target_ref
when (target ==? maxBound) $ ret true
current <- deref current_task
when (current ==? target) $ do
refCopy current_prio next_prio
refCopy current_task next_task
comment "We just duplicated the next task, so arrange to delete that next."
refCopy target_ref next_task
ret false
-- | These definitions do not depend on the number of mailboxes or
-- tasks, so factor them into a separate module to reduce code
-- duplication.
schedulerHelperModule :: Module
schedulerHelperModule = package "can_scheduler_helper" $ do
defStruct (Proxy :: Proxy "can_transmit_result")
incl shiftUp
incl shiftDown
-- | Pass this data to 'canScheduler' to tie the corresponding client to
-- a given collection of multiplexed hardware mailboxes.
data CANTask = CANTask
{ canTaskReq :: ChanOutput ('Struct "can_message")
, canTaskRes :: ChanInput ('Stored IBool)
, canTaskAbortReq :: ChanOutput ('Stored IBool)
}
-- | Construct a virtual CAN transmit mailbox that a client can use as
-- if it had sole ownership of the bus. The returned 'CANTask' must be
-- passed to an instance of 'canScheduler'; otherwise, the virtual
-- mailbox will discard requests sent to it and will never complete
-- them.
canTask :: Tower e (CANTask, AbortableTransmit ('Struct "can_message") ('Stored IBool))
canTask = do
(abortableTransmit, canTaskReq) <- channel
(canTaskRes, abortableComplete) <- channel
(abortableAbort, canTaskAbortReq) <- channel
return (CANTask { .. }, AbortableTransmit { .. })
-- | Multiplex a collection of CAN transmit tasks onto a collection of
-- hardware transmit mailboxes. The transmit mailboxes must ensure that
-- the highest-priority message queued on any of them is sent first.
canScheduler :: [AbortableTransmit ('Struct "can_message") ('Stored IBool)]
-> [CANTask]
-> Tower e ()
canScheduler mailboxes tasks = do
(doTaskComplete, taskCompleteChan) <- channel
(doTaskAbort, taskAbortChan) <- channel
towerModule schedulerHelperModule
towerDepends schedulerHelperModule
monitor "can_scheduler" $ do
-- Maintain a priority queue of tasks. The priority is stored in
-- state variables in prio_queue, and the corresponding task index
-- in task_queue. We use linear-time insertion in sorted order to
-- maintain the priority queue invariants, rather than a more
-- complex data structure like a heap.
--
-- The state variables are allocated in separate groups to minimize
-- padding waste, since task-ID is 8-bit and priority is 32-bit.
--
-- The highest-priority tasks are those which *should* be in the
-- hardware mailboxes right now. However, since we have to wait for
-- completion notifications after hardware aborts, the current
-- contents of each mailbox can lag behind the current set of
-- highest priority tasks.
--
-- The sentinels are global only because we place references to them
-- in a list that also contains references to the queues themselves,
-- and Ivory's embedding in Haskell's type system requires the list
-- elements to all have the same scope.
prio_queue <- forM (zipWith const [(0 :: Int) ..] tasks) $ \ idx -> do
stateInit ("prio_" ++ show idx) $ ival maxBound
sentinel_prio <- fmap constRef $ stateInit "prio_sentinel" $ ival maxBound
task_queue <- forM (zipWith const [(0 :: Int) ..] tasks) $ \ idx -> do
stateInit ("task_" ++ show idx) $ ival maxBound
sentinel_task <- fmap constRef $ stateInit "task_sentinel" $ ival maxBound
-- For each mailbox, we need to track which task is actually in the
-- mailbox right now. That's the task to notify when that mailbox
-- reports completion. One extra value (maxBound) is reserved to
-- indicate that the mailbox is currently empty.
mbox_states <- forM (zip [0..] mailboxes) $ \ (idx, mbox) -> do
current <- stateInit ("current_task_in_" ++ show idx) $ ival maxBound
return (idx, mbox, current)
-- If we queue up two aborts to the same mailbox, then there could
-- be an interleaving of threads with this sequence:
-- 1. Mailbox processes abort #1.
-- 2. Hardware reports completion.
-- 3. We queue the next request on this mailbox.
-- 4. Mailbox processes abort #2, aborting the wrong request.
-- We don't want to spuriously abort requests, so keep a flag per
-- mailbox to record whether we already have a pending abort there.
mbox_aborting <- stateInit "mbox_aborting" (izero :: Init ('Stored Uint8))
task_states <- forM (zip [0..] tasks) $ \ (idx, task) -> do
-- We buffer one request from each task. They aren't allowed to
-- send another until we send them a completion notification,
-- although they can trigger that early by sending us an abort
-- request. A sentinel message ID (maxBound) indicates that there
-- is no pending request from this task.
last_request <- stateInit ("last_request_for_" ++ show idx) $ istruct
[ can_message_id .= ival (fromRep maxBound) ]
return (idx, task, last_request)
-- Global properties
-- - If a task ID is in some 'current' variable in 'mbox_states',
-- then we're waiting for it to complete in some hardware mailbox.
--
-- - If it's in the high-priority elements of task_queue, then the
-- task is in the "on-hardware" state.
-- - If it's in the low-priority elements of task_queue, then the
-- task is in the "reschedule" state, waiting to be moved out of
-- the mailbox to make room for an incoming higher-priority task.
-- - If it's missing from task_queue, then the task is in the
-- "abort" state, waiting for the hardware to notify us of its
-- final disposition.
--
-- - If a task ID is not in a mailbox:
--
-- - If it's in task_queue somewhere, then it's in the "schedule"
-- state, waiting for the current mailbox contents to get sent
-- so that there's an empty mailbox to put this task into.
-- - Otherwise the task is idle. Its "last_request" variable must
-- have a can_message_id of 'maxBound'.
--
-- See sched.dot in this directory for a state machine graph
-- describing the legal transitions between the above five states.
-- Procedures for manipulating the priority queue:
-- Return whether the given task is currently in the task queue.
-- Used to determine whether a task which completed unsuccessfully
-- (which can only happen if we abort its mailbox) is in
-- "reschedule" or "abort" state.
let isTaskQueued = proc "is_task_queued" $ \ task -> body $ do
forM_ task_queue $ \ current_task -> do
current <- deref current_task
when (current ==? maxBound) $ ret false
when (current ==? task) $ ret true
ret false
-- Return whether the given task is currently in a hardware mailbox.
-- Used in nextTask to determine whether a high-priority task is in
-- "on-hardware" or "schedule" state.
let isTaskCurrent = proc "is_task_current" $ \ task -> body $ do
forM_ mbox_states $ \ (_, _, current) -> do
current_task <- deref current
when (task ==? current_task) $ ret true
ret false
-- Return a reference to the given task's current request. The task
-- ID must be a valid index in task_states.
let getTaskRequest = proc "get_task_request" $ \ task -> body $ do
let ((last_idx, _, last_task) : ts) = reverse task_states
forM_ (reverse ts) $ \ (idx, _, last_request) -> do
when (task ==? fromInteger idx) $ ret (constRef last_request)
assert (task ==? fromInteger last_idx)
ret (constRef last_task)
-- Select the highest-priority task which is not already in a
-- hardware mailbox, or return 'maxBound' if there is no such task.
-- Used when a mailbox reports completion so we're ready to place a
-- new request in it.
--
-- It doesn't make sense to choose a task which would be in the
-- "reschedule" state immediately, so we only look at the tasks
-- which are in the high-priority portion of the queue. Taking one
-- of those tasks from not-current to current implies a transition
-- from "schedule" to "on-hardware" state.
let nextTask = proc "next_task" $ body $ do
forM_ (zipWith const task_queue mbox_states) $ \ task -> do
target_task <- deref task
comment "Stop at the end of the list."
when (target_task ==? maxBound) $ ret maxBound
comment "Skip tasks that are already on the hardware."
is_current <- call isTaskCurrent target_task
unless is_current $ ret target_task
ret maxBound
-- Add a task to the task_queue, possibly returning a reschedule
-- request for some lower-priority task which should be bounced out
-- of a hardware mailbox, or a schedule request to place this task
-- into a currently-empty mailbox.
let insertTask :: Def ('[ Uint8
, Ref s0 ('Stored Uint8)
, Ref s1 ('Stored Uint8)
, Ref s2 ('Struct "can_message")
, ConstRef s3 ('Struct "can_message")
] ':-> IBool)
insertTask = proc "insert_task" $ \ task resched_task resched_mbox last_request req -> body $ do
comment "Task must not have an outstanding request already."
last_id <- deref $ last_request ~> can_message_id
assert (toRep last_id ==? maxBound)
comment "Save this request until we can deliver it."
refCopy last_request req
insert_position_ref <- local (izero :: Init ('Stored Uint8))
new_prio <- local =<< do
req_id <- deref $ req ~> can_message_id
return $ ival $ toRep req_id
new_task <- local $ ival task
let checkPlace (current_prio, current_task) next = do
done <- call shiftUp insert_position_ref new_prio new_task current_prio current_task
unless done next
foldr checkPlace (return ()) $ zip prio_queue task_queue
comment "Check if we overflowed the queue and still have a task left to insert."
final_task <- deref new_task
assert (final_task ==? maxBound)
insert_position <- deref insert_position_ref
let positions = fromIntegral (length tasks)
let mbox_count = fromIntegral (length mailboxes)
assert (insert_position <? positions)
when (positions <=? mbox_count .|| insert_position <? mbox_count) $ do
comment "Priority is high enough to get a mailbox immediately."
bounce_task <- case drop (length mailboxes) task_queue of
[] -> do
comment "No more tasks than mailboxes, so there must be a free mailbox."
return maxBound
bounce_task : _ -> do
comment "Reschedule the task that we just shoved out of the high-priority group."
deref bounce_task
when (bounce_task ==? maxBound) $ do
comment "Put this new task in a free mailbox."
store resched_task task
conds <- forM mbox_states $ \ (mbox_idx, _, current) -> do
pending_task <- deref current
return
(pending_task ==? bounce_task ==> do
store resched_mbox (fromInteger mbox_idx)
ret true
)
cond_ conds
ret false
-- Remove a task from task_queue, either because it has completed,
-- or because its client has requested to abort it.
let removeTask = proc "remove_task" $ \ initial_task -> body $ do
target <- local $ ival initial_task
let current_queue = zip prio_queue task_queue
let next_queue = [ (constRef prio, constRef task) | (prio, task) <- drop 1 current_queue ] ++ [(sentinel_prio, sentinel_task)]
let checkPlace ((current_prio, current_task), (next_prio, next_task)) next = do
done <- call shiftDown target current_prio current_task next_prio next_task
unless done next
foldr checkPlace (return ()) (zip current_queue next_queue)
final_task <- deref target
when (initial_task ==? final_task) $ do
comment "Task not found, hopefully because it was previously aborted."
ret false
comment "Task found; check that we reached the end of the list."
assert (final_task ==? maxBound)
ret true
monitorModuleDef $ do
incl getTaskRequest
incl isTaskQueued
incl nextTask
incl insertTask
incl removeTask
private $ do
incl isTaskCurrent
-- Channel handlers:
forM_ mbox_states $ \ (idx, mbox, current) -> do
-- Handle a transmit-complete event. On entry, the task must be in
-- "on-hardware", "reschedule", or "abort" states. On exit, the
-- task will be "idle" if it completed successfully, or either
-- "schedule" or "idle" if unsuccessful. In addition, if any other
-- task was in "schedule" state, on exit the highest-priority of
-- those will be in "on-hardware" state.
--
-- We can't check the state precondition at runtime because the
-- response from the driver doesn't indicate which message
-- completed, only the status of the given mailbox. Instead we
-- assume that we've maintained the invariants on our "current"
-- state variables.
handler (abortableComplete mbox) ("mailbox_" ++ show idx ++ "_complete") $ do
taskComplete <- emitter doTaskComplete 1
sendReq <- emitter (abortableTransmit mbox) 1
callbackV $ \ success -> do
current_task <- deref current
assert (current_task /=? maxBound)
res <- fmap constRef $ local $ istruct
[ task_idx .= ival current_task
, task_success .= ival success
]
ifte_ success
(do
comment "On success, always report back to the task."
call_ removeTask current_task
emit taskComplete res
) (do
comment "On failure: did the task abort, or are we rescheduling?"
still_queued <- call isTaskQueued current_task
unless still_queued $ do
comment "Task aborted and is no longer queued. Complete it."
emit taskComplete res
)
store current maxBound
let bit = fromInteger (1 `shiftL` fromInteger idx)
already_aborting <- deref mbox_aborting
store mbox_aborting (already_aborting .& iComplement bit)
next <- call nextTask
when (next /=? maxBound) $ do
req <- call getTaskRequest next
store current next
emit sendReq req
-- Handle a taskAbort event. On entry, the task may be in any state,
-- including abort or idle. On exit, if the task was current, then
-- its new state is "abort"; otherwise, it is "idle".
handler taskAbortChan "task_abort" $ do
emitters <- forM mbox_states $ \ (idx, mbox, current) -> do
e <- emitter (abortableAbort mbox) 1
return (idx, current, e)
taskComplete <- emitter doTaskComplete 1
callbackV $ \ task -> do
removed <- call removeTask task
when removed $ do
-- If this task is current in some mailbox, abort that mailbox.
abort_msg <- fmap constRef $ local $ ival true
already_aborting <- deref mbox_aborting
current_conds <- forM emitters $ \ (idx, current, e) -> do
current_task <- deref current
return $ (task ==? current_task ==>) $ do
let bit = fromInteger (1 `shiftL` fromInteger idx)
when ((already_aborting .& bit) ==? 0) $ do
store mbox_aborting (already_aborting .| bit)
emit e abort_msg
-- Otherwise, we hadn't handed the task off to the hardware yet,
-- so we can immediately report that it wasn't sent, without
-- waiting for a hardware abort.
cond_ $ current_conds ++ [ true ==> do
res <- fmap constRef $ local $ istruct
[ task_idx .= ival task
, task_success .= ival false
]
emit taskComplete res
]
forM_ task_states $ \ (idx, task, last_request) -> do
-- Handle a taskRequest event. On entry, the task must be in
-- "idle" state. On exit, it will either be in "schedule" or
-- "on-hardware" states, according to whether there's a free
-- mailbox available.
handler (canTaskReq task) ("task_" ++ show idx ++ "_request") $ do
emitters <- forM mbox_states $ \ (mbox_idx, mbox, current) -> do
sendReq <- emitter (abortableTransmit mbox) 1
abort <- emitter (abortableAbort mbox) 1
return (mbox_idx, current, sendReq, abort)
callback $ \ req -> do
resched_task <- local $ ival maxBound
resched_mbox <- local $ ival maxBound
needs_resched <- call insertTask (fromInteger idx) resched_task resched_mbox last_request req
when needs_resched $ do
target_task <- deref resched_task
mailbox <- deref resched_mbox
ifte_ (target_task ==? maxBound)
(do
abortReq <- fmap constRef $ local $ ival true
already_aborting <- deref mbox_aborting
cond_
[ mailbox ==? fromInteger mbox_idx ==> do
let bit = fromInteger (1 `shiftL` fromInteger mbox_idx)
when ((already_aborting .& bit) ==? 0) $ do
store mbox_aborting (already_aborting .| bit)
emit abort abortReq
| (mbox_idx, _, _, abort) <- emitters
]
) (do
taskReq <- call getTaskRequest target_task
cond_
[ mailbox ==? fromInteger mbox_idx ==> do
store current target_task
emit sendReq taskReq
| (mbox_idx, current, sendReq, _) <- emitters
]
)
-- Delegate taskAbort events to the common task abort handler,
-- above. This only adds the internal task ID to the request.
-- Note: aborts can race with completion, and this handler
-- delegates to another handler, which ends this critical section.
-- So this handler should not modify any monitor state; the common
-- abort handler is atomic with respect to other task aborts and
-- any completions.
handler (canTaskAbortReq task) ("task_" ++ show idx ++ "_abort") $ do
taskAbort <- emitter doTaskAbort 1
callback $ const $ emitV taskAbort $ fromInteger idx
-- Deliver a task-complete notification to its client, and record
-- that this task is now allowed to submit another request. This
-- must be triggered when and only when the task has just
-- transitioned to the "idle" state from any other state.
--
-- However, it doesn't need to run immediately in the same critical
-- section as that state transition, because: (1) further task abort
-- requests will no-op in the "idle" state; (2) a well-behaved
-- client won't send another request until this handler emits its
-- completion notice; (3) a poorly-behaved client will trigger an
-- assert in its canTaskReq handler.
--
-- So this code is factored out to a separate handler because it's
-- safe to do so and this reduces code duplication elsewhere.
handler taskCompleteChan "task_complete" $ do
emitters <- forM task_states $ \ (idx, task, last_request) -> do
e <- emitter (canTaskRes task) 1
return (idx, e, last_request)
callback $ \ res -> do
task <- deref $ res ~> task_idx
assert (task <? fromIntegral (length tasks))
cond_
[ task ==? fromInteger idx ==> do
store (last_request ~> can_message_id) $ fromRep maxBound
emit e $ res ~> task_success
| (idx, e, last_request) <- emitters
]
| GaloisInc/tower | tower-hal/src/Ivory/Tower/HAL/Bus/CAN/Sched.hs | bsd-3-clause | 22,670 | 18 | 43 | 6,524 | 4,117 | 2,057 | 2,060 | 285 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module NML.Attributes (
AdaptationFunction, adaptationFunction,
Encoding, encoding, mkEncoding,
noReturnTraffic,
Label, label,
) where
import Control.Monad
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Maybe (MaybeT(..), runMaybeT)
import Data.Maybe (listToMaybe, fromMaybe)
import Data.Text
import NML.Query
import NML.RDFRep
import NML.Types
import Util
newtype AdaptationFunction = AdaptationFunction { unpackAdaptationFunction :: GUID } deriving (Eq, Ord, Show)
-- | get the 'AdaptationFunction' for a 'NetworkObject' if it has one
adaptationFunction :: NetworkObject -> NMLReader (Maybe AdaptationFunction)
adaptationFunction n = case n of
(Service Deadaptation _) -> q n
(Service Adaptation _) -> q n
_ -> return Nothing
where
q = queryAttrib "nml:adaptationFunction" AdaptationFunction
newtype Encoding = Encoding { unpackEncoding :: GUID } deriving (Eq, Show)
-- | get the 'Encoding' for a 'NetworkObject' if it has one
encoding :: NetworkObject -> NMLReader (Maybe Encoding)
encoding n = case n of
(Single Link _) -> q n
(Single Port _) -> q n
(Group LinkGroup _) -> q n
(Group PortGroup _) -> q n
(Service Switch _) -> q n
_ -> return Nothing
where
q = queryAttrib "nml:encoding" Encoding
mkEncoding :: GUID -> Encoding
mkEncoding = Encoding
-- | get the 'noReturnTraffic' for a 'NetworkObject' if it has one or
-- False if it doesn't
noReturnTraffic :: NetworkObject -> NMLReader Bool
noReturnTraffic n = case n of
(Single Link _) -> liftM (fromMaybe False) (q n)
_ -> return False
where
q = queryAttrib "nml:noReturnTraffic" (=="true")
type LabelType = GUID
type LabelValue = Text
-- | Label is implemented as an attribute here, to simplify implementation it
-- is important to note that NML specifies it as a class on equal footing with
-- e.g. NetworkObject
newtype Label = Label { unpackLabel :: (GUID, LabelType, LabelValue) } deriving (Ord, Show)
instance Eq Label where
a == b = let (_, ta, va) = unpackLabel a
(_, tb, vb) = unpackLabel b
in (ta == tb) && (va == vb)
label :: NetworkObject -> NMLReader (Maybe Label)
label n = case n of
(Single Link _) -> q n
(Single Port _) -> q n
_ -> return Nothing
where
q n = runMaybeT $
labelGuid n >>= \guid ->
labelType guid >>= \type' ->
labelValue guid >>= \value ->
maybeZero $ Just $ Label $ (guid, type', value)
labelGuid :: NetworkObject -> MaybeT NMLReader GUID
labelGuid n = MaybeT $ liftM listToMaybe $ baseQueryWrap (id,toRDF,id)
(return . unpackUNode) Object (Just $ toRDF n) (Just "nml:hasLabel" :: Maybe Text) Nothing
labelType :: GUID -> MaybeT NMLReader LabelType
labelType = MaybeT . queryAttrib "nml:type" id
labelValue :: GUID -> MaybeT NMLReader LabelValue
labelValue = MaybeT . queryAttrib "nml:value" id
-- | utility function to simplify attribute queries
queryAttrib :: (RDFRep r) => Text -> (Text -> a) -> r -> NMLReader (Maybe a)
queryAttrib attrib wrap n = liftM listToMaybe $
baseQueryWrap (id,toRDF,id) pack Object (Just $ toRDF n)
(Just attrib)
Nothing
where
pack = return . wrap . unpackUNode
| maertsen/netPropCheck | NML/Attributes.hs | bsd-3-clause | 3,775 | 0 | 15 | 1,223 | 1,003 | 535 | 468 | 69 | 6 |
-- |
-- Module : Data.ASN1.Serialize
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : unknown
--
module Data.ASN1.Serialize (getHeader, putHeader) where
import qualified Data.ByteString as B
import Data.ASN1.Get
import Data.ASN1.Internal
import Data.ASN1.Types
import Data.ASN1.Types.Lowlevel
import Data.Bits
import Data.Word
import Control.Applicative ((<$>))
import Control.Monad
-- | parse an ASN1 header
getHeader :: Get ASN1Header
getHeader = do
(cl,pc,t1) <- parseFirstWord <$> getWord8
tag <- if t1 == 0x1f then getTagLong else return t1
len <- getLength
return $ ASN1Header cl tag pc len
-- | Parse the first word of an header
parseFirstWord :: Word8 -> (ASN1Class, Bool, ASN1Tag)
parseFirstWord w = (cl,pc,t1)
where cl = toEnum $ fromIntegral $ (w `shiftR` 6)
pc = testBit w 5
t1 = fromIntegral (w .&. 0x1f)
{- when the first tag is 0x1f, the tag is in long form, where
- we get bytes while the 7th bit is set. -}
getTagLong :: Get ASN1Tag
getTagLong = do
t <- fromIntegral <$> getWord8
when (t == 0x80) $ error "not canonical encoding of tag"
if testBit t 7
then loop (clearBit t 7)
else return t
where loop n = do
t <- fromIntegral <$> getWord8
if testBit t 7
then loop (n `shiftL` 7 + clearBit t 7)
else return (n `shiftL` 7 + t)
{- get the asn1 length which is either short form if 7th bit is not set,
- indefinite form is the 7 bit is set and every other bits clear,
- or long form otherwise, where the next bytes will represent the length
-}
getLength :: Get ASN1Length
getLength = do
l1 <- fromIntegral <$> getWord8
if testBit l1 7
then case clearBit l1 7 of
0 -> return LenIndefinite
len -> do
lw <- getBytes len
return (LenLong len $ uintbs lw)
else
return (LenShort l1)
where
{- uintbs return the unsigned int represented by the bytes -}
uintbs = B.foldl (\acc n -> (acc `shiftL` 8) + fromIntegral n) 0
-- | putIdentifier encode an ASN1 Identifier into a marshalled value
putHeader :: ASN1Header -> B.ByteString
putHeader (ASN1Header cl tag pc len) = B.concat
[ B.singleton word1
, if tag < 0x1f then B.empty else tagBS
, lenBS]
where cli = shiftL (fromIntegral $ fromEnum cl) 6
pcval = shiftL (if pc then 0x1 else 0x0) 5
tag0 = if tag < 0x1f then fromIntegral tag else 0x1f
word1 = cli .|. pcval .|. tag0
lenBS = B.pack $ putLength len
tagBS = putVarEncodingIntegral tag
{- | putLength encode a length into a ASN1 length.
- see getLength for the encoding rules -}
putLength :: ASN1Length -> [Word8]
putLength (LenShort i)
| i < 0 || i > 0x7f = error "putLength: short length is not between 0x0 and 0x80"
| otherwise = [fromIntegral i]
putLength (LenLong _ i)
| i < 0 = error "putLength: long length is negative"
| otherwise = lenbytes : lw
where
lw = bytesOfUInt $ fromIntegral i
lenbytes = fromIntegral (length lw .|. 0x80)
putLength (LenIndefinite) = [0x80]
| mboes/hs-asn1 | encoding/Data/ASN1/Serialize.hs | bsd-3-clause | 3,246 | 0 | 16 | 902 | 853 | 452 | 401 | 65 | 4 |
module Matrix(
modelMatrix
, cameraMatrix
, projMatrix
) where
import Linear
import qualified LambdaCube.Linear as LC
-- | Convert from linear matrix format to LambdaCube format
convLC :: M44 Float -> LC.M44F
convLC (V4 !a !b !c !d) = LC.V4 (cv a) (cv b) (cv c) (cv d)
where
cv (V4 !x !y !z !w) = LC.V4 x y z w
-- | Model matrix, maps from local model coords to world coords
modelMatrix :: Float -> LC.M44F
modelMatrix t = convLC . quatMatrix $ axisAngle (normalize $ V3 1 1 3) t
-- | Camera matrix, maps from world coords to camera coords
cameraMatrix :: Float -> LC.M44F
cameraMatrix _ = convLC $ lookAt eye (V3 0 0 0) (V3 0 1 0)
where eye = V3 5 2 5 -- rotate (axisAngle (V3 0 1 0) t) (V3 5 2 5)
-- | Projection matrix, maps from camera coords to device normalized coords
projMatrix :: Float -> LC.M44F
projMatrix !aspect = convLC $ perspective (pi/3) aspect 0.1 100
-- | Transform quaternion to rotation matrix
quatMatrix :: Quaternion Float -> M44 Float
quatMatrix q@(Quaternion !w (V3 !x !y !z)) = V4
(V4 m00 m01 m02 0)
(V4 m10 m11 m12 0)
(V4 m20 m21 m22 0)
(V4 0 0 0 1)
where
s = 2 / norm q
x2 = x * s
y2 = y * s
z2 = z * s
xx = x * x2
xy = x * y2
xz = x * z2
yy = y * y2
yz = y * z2
zz = z * z2
wx = w * x2
wy = w * y2
wz = w * z2
m00 = 1 - (yy + zz)
m10 = xy - wz
m20 = xz + wy
m01 = xy + wz
m11 = 1 - (xx + zz)
m21 = yz - wx
m02 = xz - wy
m12 = yz + wx
m22 = 1 - (xx + yy)
| Teaspot-Studio/gore-and-ash-lambdacube | examples/03/Matrix.hs | bsd-3-clause | 1,509 | 0 | 11 | 457 | 621 | 323 | 298 | -1 | -1 |
module Data.SouSiT (
-- * Sink
Sink,
Fetch,
input,
inputOr,
inputMap,
inputMaybe,
skip,
liftSink,
liftFetch,
-- * Source
Source,
transfer,
FeedSource,
SimpleSource(..),
feedToSink,
($$),
(=+=),
(=+|=),
-- * Transform
Transform,
(=$=),
(=$),
($=)
) where
import Data.SouSiT.Sink
import Data.SouSiT.Source
import Data.SouSiT.Transform
import Control.Monad.Identity
type Fetch i a = Sink i Identity a
-- | Lift the (pure) fetch sink into any monad.
liftFetch :: Monad m => Fetch i a -> Sink i m a
liftFetch = liftSink (return . runIdentity)
| msiegenthaler/SouSiT | Data/SouSiT.hs | bsd-3-clause | 638 | 0 | 7 | 181 | 173 | 112 | 61 | 29 | 1 |
module Codec.Grib.Parser (
parser
) where
import Control.Applicative ((<$>),
(<*>),
(*>),
(<*))
import Control.Monad (void)
import Data.Attoparsec.ByteString (Parser,
anyWord8,
endOfInput,
string,
take,
word8)
import Data.Attoparsec.ByteString.Utils (anyInt16,
anyWord16,
anyWord32,
anyWord64,
word16,
word32)
import Data.Attoparsec.Combinator (many1)
import Prelude hiding (take)
import Codec.Grib.Types (Grib(Grib),
Section0(Section0),
Section1(Section1),
Section3(Section3),
Section4(Section4),
Section5(Section5),
Section7(Section7))
parser :: Parser Grib
parser = Grib <$> section0 <*> section1 <*> many1 sections <* section8 <* endOfInput
section0 :: Parser Section0
section0 = header *> (Section0 <$> anyWord8 <*> anyWord64)
where
header = string "GRIB" *>
word16 0xffff *>
word8 0
section1 :: Parser Section1
section1 = word32 21 *>
word8 1 *>
word16 34 *>
word16 0 *>
word8 2 *>
word8 1 *>
word8 1 *>
(Section1 <$> anyWord16
<*> anyWord8
<*> anyWord8
<*> anyWord8
<*> anyWord8
<*> anyWord8) <*
word8 0 <*
word8 1
sections :: Parser (Section3, [(Section4, Section5, Section7)])
sections = (,) <$> section3 <*> many1 sections2
sections2 :: Parser (Section4, Section5, Section7)
sections2 = (,,) <$> section4 <*> section5 <* section6 <*> section7
section3 :: Parser Section3
section3 = word32 72 *>
word8 3 *>
word8 0 *>
anyWord32 *>
word8 0 *>
word8 0 *>
word16 0 *>
word8 6 *>
word8 0xff *>
word32 0xffffffff *>
word8 0xff *>
word32 0xffffffff *>
word8 0xff *>
word32 0xffffffff *>
(Section3 <$> anyWord32
<*> anyWord32 <* anyWord32 <* anyWord32
<*> anyWord32
<*> anyWord32 <* anyWord8
<*> anyWord32
<*> anyWord32
<*> anyWord32
<*> anyWord32) <*
anyWord8
section4 :: Parser Section4
section4 = anyWord32 >>= \len ->
word8 4 *>
word16 0 *>
anyWord16 *>
(Section4 <$> anyWord8
<*> anyWord8 <* anyWord8 <* word8 0x1f <* word8 0xff <* anyWord16 <* anyWord8 <* anyWord8
<*> anyWord32
<*> anyWord8 <* anyWord8
<*> anyWord32) <*
take (fromIntegral len - 28)
section5 :: Parser Section5
section5 = word32 21 *>
word8 5 *>
(Section5 <$> anyWord32 <* word16 0
<*> anyWord32
<*> anyInt16
<*> anyInt16
<*> anyWord8) <*
word8 0
section6 :: Parser ()
section6 = void $ word32 6 *>
word8 6 *>
word8 0xff
section7 :: Parser Section7
section7 = anyWord32 >>= \len ->
word8 7 *>
(Section7 <$> take (fromIntegral len - 5))
section8 :: Parser ()
section8 = void $ string "7777"
| snakamura/zeepaardje | Codec/Grib/Parser.hs | bsd-3-clause | 3,877 | 0 | 21 | 1,876 | 901 | 478 | 423 | 109 | 1 |
module GenTestCases where
import Data.List
-- Parsable examples
sample1 = "C1 = Int32 Int32 Int8"
sample2 = "C2 = C1 Int8"
sample3 = "C3 = Int8 | Int16 Int32"
data Info = Info String [[String]] Bool deriving (Show)
getSums :: [String] -> [[String]]
getSums strs = go strs [] []
where go [] [] acc = reverse acc
go [] s1 acc = reverse ((reverse s1):acc)
go ("|":ss) s1 acc = go ss [] ((reverse s1):acc)
go (s:ss) s1 acc = go ss (s:s1) acc
-- Parses the text.
getInfo :: String -> Maybe Info
getInfo str = case words str of
(('*':type_name):"=":f1:fields) -> Just $ Info type_name (getSums $ f1:fields) True
(type_name:"=":f1:fields) -> Just $ Info type_name (getSums $ f1:fields) False
[type_name] -> Just $ Info type_name [] False
otherwise -> Nothing
-------------
-- Haskell --
-------------
-- name (type, is_prim)
data HSStruct = HSStruct String [[(String, Bool)]]
instance Show HSStruct where
show (HSStruct name types) = concat [fst_line, deriv_line]
where fst_line = case length types of
0 -> concat ["data ", name, " = ", name, "\n"]
1 -> concat ["data ", name, " = ", name, s_ix 0, " ", concat $ intersperse " " (map fst $ head types), "\n"]
n -> concat ["data ", name, " = ", name, s_ix 1, " ", concat $ intersperse " " (map fst $ head types), "\n"]
++ concat next_lines
next_lines = case length types of
0 -> []
1 -> []
n -> zipWith (\i t -> next_fn i $ map fst t) [2..] (tail types)
deriv_line = concat [" deriving (Show, Eq, Generic, GStorable)\n\n"]
next_fn ix fields = concat [replicate (length name + 6) ' ',"| ", name, s_ix ix, " ", concat $ intersperse " " fields, "\n"]
infoToHSStruct :: Info -> HSStruct
infoToHSStruct (Info name types _) = HSStruct name hs_types
where hs_types = map (map (\t -> (t, t `elem` hs_prims))) types
s_ix ix = if ix > 0 then '_':show ix else ""
six ix = if ix > 0 then show ix else ""
genCheckable :: HSStruct -> String
-- genCheckable (HSStruct name types) = concat [fst_line, fields_line, offsets_line, size_line, alignment_line, new_line]
genCheckable (HSStruct name types) = concat [fst_line, fields_line, offsets_lines, size_line, alignment_line, new_line]
where fst_line = concat ["instance Checkable ", name, " where\n"]
fields_line = concat [" checkFields ptr1 ptr2 = (==1) <$> checkFields",name," ptr1 ptr2\n"]
-- offsets_line = concat [" checkOffsets _ offs = (==1) <$> checkOffsets",name," offs\n"]
size_line = concat [" getSize a = fromIntegral <$> getSize",name,"\n"]
alignment_line = concat [" getAlignment a = fromIntegral <$> getAlignment",name,"\n"]
--- news
new_line' = case length types of
0 -> new_fn 0 []
1 -> new_fn 0 $ head types
n -> concat $ zipWith new_fn [1..] types
new_line = new_line' ++ "\n"
new_fn ix tps = concat [" new ",pattern_match ix tps, " = do\n", do_vars ix tps]
---
offsets_lines = case length types of
0 -> offsets_fn 0 []
1 -> offsets_fn 0 $ head types
n -> concat $ zipWith offsets_fn [1..] types
offsets_fn 0 tps = concat [" checkOffsets _ offs = (==1) <$> checkOffsets",name," offs\n"]
offsets_fn ix tps = concat [" checkOffsets ", pattern_match ix tps, " offs = (==1) <$> checkOffsets",name, s_ix ix, " offs\n"]
---
variables tps = map return $ take (length tps) ['a'..'z']
is_prs tps = map snd tps
constr_vars var ptrs = concat $ intersperse " " $ zipWith (\v is_pr -> if is_pr then v else "ptr_"++v) var ptrs
do_vars ix tps = do
let vars = variables tps
prs = is_prs tps
constants = constr_vars vars prs
the_vars = map fst $ filter (not.snd) $ zip vars prs
ptr_vars = map (\v -> "ptr_"++v) the_vars
alloc_lines = zipWith (\ptr var -> concat [" ", ptr, " <- newStorable " , var,"\n"]) ptr_vars the_vars
the_new_line= concat [" ptr <- new",name,s_ix ix, " ", constants,"\n"]
free_lines = map (\ptr -> concat [" free ",ptr,"\n"]) ptr_vars
papa_line = " return ptr\n"
concat $ [concat alloc_lines, the_new_line, concat free_lines, papa_line]
pattern_match ix tps = concat ["(",name,s_ix ix," ", concat $ intersperse " " $ variables tps, ")"]
genArbitrary :: HSStruct -> String
genArbitrary (HSStruct name []) = genArbitrary' name []
genArbitrary (HSStruct name [prod]) = genArbitrary' name prod
genArbitrary (HSStruct name types) = concat [fst_line, snd_line,"\n"]
where fst_line = concat ["instance Arbitrary ", name, " where \n"]
snd_line = concat [" arbitrary = oneof ", list_lines]
list_lines = concat $ intersperse " " fillings
fillings = ('[':' ':f):(map (\x -> ',':' ':x) illings) ++ ["]"]
(f:illings) = zipWith (\ix t -> filling ix t ++ "\n") [1..] types
filling ix tps = concat [name, s_ix ix, " <$> ", arbitraries tps]
arbitraries tps = concat $ intersperse " <*> "$ take (length tps) $ repeat "arbitrary"
genArbitrary' :: String -> [(String, Bool)] -> String
genArbitrary' name tps = concat [fst_line, snd_line, "\n"]
where fst_line = concat ["instance Arbitrary ", name, " where \n"]
snd_line = case tps of
[] -> concat [" arbitrary = return ",name, "\n"]
tps -> concat [" arbitrary = ",name, " <$> ", arbitraries, "\n"]
arbitraries = concat $ intersperse " <*> "$ take (length tps) $ repeat "arbitrary"
genFFI :: HSStruct -> String
genFFI (HSStruct name types) = concat [new_lines, fields_line, offsets_lines, size_line, alignment]
where new_lines = case length types of
0 -> new_fn 0 []
1 -> new_fn 0 $ head types
n -> concat $ zipWith new_fn [1..] types
new_fn ix tps = concat [beginning, "new", name, s_ix ix," :: ", arguments tps,"\n"]
types' tps = map (\(t,b) -> if b then t else "Ptr " ++ t) tps
arguments tps = concat [concat (intersperse " -> " (types' tps ++ ["IO (Ptr "++ name++ ")"]))]
fields_line = concat [beginning, "checkFields" ,name, " :: Ptr ", name, " -> Ptr ",name, " -> IO Int8\n"]
offsets_lines = case length types of
0 -> offsets_un
1 -> offsets_un
n -> concat $ offsets_un:map offsets_fn [1..length types]
offsets_un = concat [beginning, "checkOffsets",name, " :: Ptr Int16 -> IO Int8\n"]
offsets_fn ix = concat [beginning, "checkOffsets",name, s_ix ix, " :: Ptr Int16 -> IO Int8\n"]
size_line = concat [beginning, "getSize" ,name, " :: IO Int16\n"]
alignment = concat [beginning, "getAlignment",name, " :: IO Int16\n\n"]
beginning = "foreign import ccall "
headerHS = ["{-# LANGUAGE ForeignFunctionInterface #-}"
,"{-# LANGUAGE CApiFFI #-}"
,"{-# LANGUAGE CPP #-}"
,"{-# LANGUAGE DeriveGeneric #-}"
,"{-# LANGUAGE DeriveAnyClass #-}"
,"{-# LANGUAGE FlexibleContexts #-}"
,"{-# LANGUAGE FlexibleInstances #-}"
,"{-# LANGUAGE ScopedTypeVariables #-}"
,"{-# LANGUAGE TypeOperators #-}"
,"{-# LANGUAGE DataKinds #-}"
,"{-# LANGUAGE UndecidableInstances #-}"
,""
,"module TestCases where"
,""
,"import GHC.Generics hiding (C1,S1)"
,"import GHC.TypeLits "
,"import Foreign.C.Types"
,"import Foreign.Storable"
,"import Foreign.Storable.Generic"
,"import Foreign.Ptr (Ptr)"
,"import Foreign.ForeignPtr (ForeignPtr, mallocForeignPtr)"
,"import Foreign.Marshal.Alloc (malloc, free)"
,"import Foreign.Marshal.Array (mallocArray, pokeArray)"
,""
,"import Foreign.Storable.Generic.Tools"
,"import Foreign.Storable.Generic.Internal"
,"import Data.Int"
,"import Control.Monad (sequence, liftM)"
,"import System.Exit"
,""
,"import Test.QuickCheck"
,"-- The module tests the memory alignment of Storable data-types "
,"-- derived with generic-storable package. "
,""
,"-- Adding parametric polimorphism to reduce the boilerplate."
,"class (Storable a) => Checkable a where"
," -- | Checks whether the fields are the same"
," checkFields :: Ptr a -> Ptr a -> IO Bool "
," -- | Checks whether the offsets are the same"
," checkOffsets :: a -> Ptr Int16 -> IO Bool "
," -- | Checks whether the size is the same"
," getSize :: a -> IO Int "
," -- | Checks whether the alignment is the same"
," getAlignment :: a-> IO Int "
," "
," new :: a -> IO (Ptr a)"
,""
,""
,"newStorable :: Storable a => a -> IO (Ptr a)"
,"newStorable val = do"
," ptr <- malloc"
," poke ptr val"
," return ptr"
,""
,""
,"class SumOffsets' f where"
," sumOffsets' :: f p -> [Offset]"
,""
,"instance (SumOffsets' f) => SumOffsets' (M1 D t f) where"
," sumOffsets' (M1 v) = sumOffsets' v"
,""
,"instance (GStorable' f, SumOffsets' f) => SumOffsets' (M1 C t f) where"
," sumOffsets' (M1 v) = internalOffsets v"
,""
,"instance (SumOffsets' f, SumOffsets' g) => SumOffsets' (f :+: g) where"
," sumOffsets' (L1 v) = sumOffsets' v"
," sumOffsets' (R1 v) = sumOffsets' v"
,""
,"instance SumOffsets' (M1 S t f) where"
," sumOffsets' _ = undefined"
,"instance SumOffsets' (f :*: g) where"
," sumOffsets' _ = undefined"
,"instance SumOffsets' (K1 i a) where"
," sumOffsets' _ = undefined"
,"instance SumOffsets' (U1) where"
," sumOffsets' _ = undefined"
,"instance SumOffsets' (V1) where"
," sumOffsets' _ = undefined"
,""
,""
,"goffsets :: (SumOffsets' (Rep a), GStorable a, Generic a) => a -> [Int16]"
,"goffsets v = map fromIntegral $ sumOffsets' (from v)"
-- ,"goffsets :: (GStorable' (Rep a), GStorable a, Generic a) => a -> [Int16]"
-- ,"goffsets v = map fromIntegral $ internalOffsets (from v)"
,""
]
-------
-- C --
-------
-- | name [((type, is_prim), fieldname)]
data CStruct = CStruct String [[((String,Bool),String)]]
cstructArguments :: CStruct -> [[String]]
cstructArguments (CStruct _ structs) = map cstructArguments' structs
cstructArguments' :: [((String,Bool),String)] -> [String]
cstructArguments' types_names = map (\(t,n) -> concat [type_str t, " ", n]) types_names
where type_str (tn, False) = concat [tn, "*"]
type_str (tn, True)= tn
accessAsPointer :: ((String,Bool),String) -> String
accessAsPointer ((_,False),n) = "*" ++ n
accessAsPointer ((_, True),n) = n
instance Show CStruct where
show cs@(CStruct name types_names) = structs
where structs = case length types_names of
0 -> struct_def 0 []
1 -> struct_def 0 $ head types_names
n -> concat $ (zipWith struct_def [1..] types_names) ++ [union]
struct_def ix tps = concat [beginning ix, field_lines tps, end ix]
-- struct things
beginning ix = concat ["typedef struct ", name, s_ix ix ,"{\n"] :: String
args tps = map (\((t,_),n) -> concat [t, " ", n]) tps
field_lines tps = concat $ map (\arg -> concat [" ",arg, ";\n"]) $ args tps
end ix = concat ["} ", name,s_ix ix, ";\n\n"]
-- union things
union = concat [ union_beginning, union_fill, union_end
, stunion_beginning, tag_line, union_line, stunion_end
]
stunion_beginning = concat ["typedef struct ", name, " {\n"]
stunion_end = concat ["} ",name,";\n\n"]
tag_line = concat [" HsWord8 tag;\n"]
union_line = concat [" ",name,"_union val;\n"]
union_beginning = concat ["typedef union ", name, "_union","{\n"]
union_fill = concat $ zipWith union_fill_fn [1..length types_names] $ map return ['a'..'z']
union_end = concat ["} ",name,"_union;\n\n"]
union_fill_fn ix n= concat [" ", name, s_ix ix, " ", n,";\n"]
infoToCStruct :: Info -> CStruct
infoToCStruct (Info name field_names _) = CStruct name $ map types_names field_names
where types_names tps = zip (c_types tps) names
c_types tps = map (\n -> (toC n, (toC n) `elem` c_prims) ) tps
names = map return ['a'..'z']
genConstructor :: CStruct -> String
genConstructor (CStruct name []) = genConstructor' name []
genConstructor (CStruct name [one]) = genConstructor' name $ one
genConstructor (CStruct name sums) = concat $ zipWith (\ix ft -> genConstructorUnion' ix name ft) [1..] sums
genConstructorUnion' :: Int -> String -> [((String, Bool),String)] -> String
genConstructorUnion' ix name types_names = concat [fst_line, snd_line, trd_line, concat middle_lines, prelast_line, last_line]
where fst_line = concat [name, " * new", name, s_ix ix, "(", args, "){\n"]
args = concat $ intersperse ", "$ cstructArguments' types_names
snd_line = concat [" ",name, " * ret = (",name,"*) malloc(sizeof(",name,"));\n"]
trd_line = concat [" ret->tag = ",show (ix-1), ";\n"]
middle_lines = zipWith (\n1 n2 -> concat [" ret->val.",union_val, ".",n1," = ",n2, ";\n"]) (map snd types_names) (map accessAsPointer types_names)
prelast_line = " return ret;\n"
last_line = "}\n\n"
union_val = (map return ['a'..'z']) !! (ix - 1)
genConstructor' :: String -> [((String, Bool),String)] -> String
genConstructor' name types_names = concat [fst_line, snd_line, concat middle_lines, prelast_line, last_line]
where fst_line = concat [name, " * new", name,"(", args, "){\n"]
args = concat $ intersperse ", "$ cstructArguments' types_names
snd_line = concat [" ",name, " * ret = (",name,"*) malloc(sizeof(",name,"));\n"]
middle_lines = zipWith (\n1 n2 -> concat [" ret->",n1," = ",n2, ";\n"]) (map snd types_names) (map accessAsPointer types_names)
prelast_line = " return ret;\n"
last_line = "}\n\n"
genPoke :: CStruct -> String
genPoke (CStruct name []) = genPoke' name []
genPoke (CStruct name [one]) = genPoke' name $ one
genPoke (CStruct name sums) = concat $ zipWith (\ix ft -> genPokeUnion' ix name ft) [1..] sums
genPokeUnion' :: Int -> String -> [((String,Bool),String)] -> String
genPokeUnion' ix name types_names = concat [fst_line, snd_line, concat middle_lines, last_line]
where fst_line = concat ["void poke",name,s_ix ix,"(", args, "){\n"]
args = concat $ intersperse ", " $(concat [name, "* un"]) :cstructArguments' types_names
snd_line = concat [" un->tag = ", show $ ix - 1, ";\n"]
middle_lines = zipWith (\n1 n2 -> concat [" un->val.",union_val,".",n1," = ", n2, ";\n"]) (map snd types_names) (map accessAsPointer types_names)
last_line = "}\n\n"
union_val = (map return ['a'..'z']) !! (ix - 1)
genPoke' :: String -> [((String,Bool),String)] -> String
genPoke' name types_names = concat [fst_line, concat middle_lines, last_line]
where fst_line = concat ["void poke",name,"(", args, "){\n"]
args = concat $ intersperse ", " $(concat [name, "* val"]) :cstructArguments' types_names
middle_lines = zipWith (\n1 n2 -> concat [" val->",n1," = ", n2, ";\n"]) (map snd types_names) (map accessAsPointer types_names)
last_line = "}\n\n"
genCheckOffsets :: CStruct -> String
genCheckOffsets (CStruct name []) = genCheckOffsets' name []
genCheckOffsets (CStruct name [one]) = genCheckOffsets' name $ one
genCheckOffsets (CStruct name sums) = concat $ offsets ++ [genCheckOffsetsUnion' name]
where offsets = zipWith (\ix ft -> genCheckOffsets' (name ++ s_ix ix) ft) [1..] sums
genCheckOffsetsUnion' :: String -> String
genCheckOffsetsUnion' name = concat [fst_line, snd_line, trd_line, fth_line, last_line]
where fst_line = concat ["int checkOffsets",name,"(HsInt16 *offs){\n"]
snd_line = concat [" int t = offsetof(",name,", tag) == offs[0];\n"]
trd_line = concat [" int v = offsetof(",name,", val) == offs[1];\n"]
fth_line = concat [" return t && v;\n"]
last_line = "}\n\n"
genCheckOffsets' :: String -> [((String,Bool),String)] -> String
genCheckOffsets' name types_names = concat [fst_line, concat middle_lines, prelst_line, last_line]
where fst_line = concat ["int checkOffsets",name,"(HsInt16 *offs){\n"]
names_ixs = zip (map snd types_names) (map show [0,1..])
middle_lines = map (\(n,i) -> concat [" int ",n," = offsetof(",name,", ",n,") == offs[",i,"];\n"]) names_ixs
prelst_line = if length types_names > 0
then concat [" return ",concat $ intersperse " && " $ map snd types_names, ";\n"]
else " return 1;\n"
last_line = "}\n\n"
genCheckFields :: CStruct -> String
genCheckFields (CStruct name []) = genCheckFields' name []
genCheckFields (CStruct name [one]) = genCheckFields' name $ one
genCheckFields (CStruct name sums) = concat $ offsets ++ [genCheckFieldsUnion' (length sums) name]
where offsets = zipWith (\ix ft -> genCheckFields' (name ++ s_ix ix) ft) [1..] sums
genCheckFieldsUnion' :: Int -> String -> String
genCheckFieldsUnion' n name = concat [fst_line, snd_line, mid_lines, prelst_line, last_line]
where fst_line = concat ["int checkFields",name,"(",name,"* s1, ", name,"* s2){\n"]
snd_line = concat [" if (s1->tag != s2->tag) return 0;\n"]
mid_lines = concat $ map mid_fn [1..n]
mid_fn ix = concat [" if (s1->tag == ", show (ix-1),") return checkFields",name++s_ix ix
,"(&s1->val.",u_val ix,",&s2->val.",u_val ix,");\n"]
prelst_line = concat [" return 0;\n"]
last_line = "}\n\n"
u_val ix = (map return ['a'..'z']) !! (ix - 1)
genCheckFields' :: String -> [((String,Bool),String)] -> String
genCheckFields' name types_names = concat [fst_line, concat middle_lines, prelst_line, last_line]
where fst_line = concat ["int checkFields",name,"(",name,"* s1, ", name,"* s2){\n"]
names = map snd types_names
middle_lines = map (\tp@((_,_),n) -> concat [" int ",n," = ", as_prim tp, ";\n"]) types_names
where as_prim ((_,True ),n) = concat ["s1->",n, " == s2->",n]
as_prim ((t,False),n) = concat ["checkFields",t,"(&(s1->",n,"),&(s2->",n,"))"]
prelst_line = if length types_names > 0
then concat [" return ",concat $ intersperse " && " names, ";\n"]
else " return 1; \n"
last_line = "}\n\n"
genGetSize :: CStruct -> String
genGetSize (CStruct name _) = concat [fst_line, middle_line, last_line]
where fst_line = concat ["HsInt16 getSize", name,"() {\n"]
middle_line = concat [" return sizeof(",name,");\n"]
last_line = "}\n\n";
genGetAlignment :: CStruct -> String
genGetAlignment (CStruct name _) = concat [fst_line, middle_line, last_line]
where fst_line = concat ["HsInt16 getAlignment", name,"() {\n"]
middle_line = concat [" return alignof(",name,");\n"]
last_line = "}\n\n";
headerC = ["#include <stddef.h>"
,"#include <stdio.h>"
,"#include <stdalign.h>"
,"#include <stdlib.h>"
,"#include \"HsFFI.h\""
]
-----------
-- Hspec --
-----------
headerTest = ["{-# LANGUAGE ScopedTypeVariables #-}"
,"{-# LANGUAGE FlexibleContexts #-}"
,"{-# LANGUAGE CPP #-}"
,"module Main where"
,""
,""
,"-- Tested module."
,"import TestCases"
,""
,"-- Test libraries"
,"import Test.Hspec"
,"import Test.QuickCheck hiding (getSize)"
,""
,"-- Helpers"
,"import Foreign.Marshal.Array"
,"import Foreign.Storable"
,""
,""
,"same_alignment a = getAlignment a `shouldReturn` alignment a"
,"same_size a = getSize a `shouldReturn` sizeOf a"
,"same_offsets a = do"
," let offsets = goffsets a"
," ptr <- mallocArray $ length offsets"
," pokeArray ptr offsets"
," checkOffsets a ptr `shouldReturn` True"
,""
,"same_fields a = do"
," ptr1 <- newStorable a"
," ptr2 <- new a"
," checkFields ptr1 ptr2 `shouldReturn` True"
]
genTests :: [String] -> String
genTests types = (++) fst_line $ concat $ map (" "++) $ concat [sizes_lines, aligns_lines, off_lines, fields_lines]
where fst_line = "main = hspec $ do\n"
size n = concat ["it \"", n, "\" $ property $ (same_size :: ", n," -> Expectation)\n"]
align n = concat ["it \"", n, "\" $ property $ (same_alignment :: ", n," -> Expectation)\n"]
off n = concat ["it \"", n, "\" $ property $ (same_offsets :: ", n," -> Expectation)\n"]
fields n = concat ["it \"", n, "\" $ property $ (same_fields :: ", n," -> Expectation)\n"]
sizes_lines = ("describe \"Test for same size\" $ do\n" ): (map (\t -> " " ++ size t) types)
aligns_lines = ("describe \"Test for same alignment\" $ do\n" ): (map (\t -> " " ++ align t) types)
off_lines = ("describe \"Test for same offsets\" $ do\n" ): (map (\t -> " " ++ off t) types)
fields_lines = ("describe \"Test for same fields\" $ do\n" ): (map (\t -> " " ++ fields t) types)
genTestsS :: [String] -> String
genTestsS types = concat $ map (" "++) $ concat [sizes_lines, aligns_lines, off_lines, fields_lines]
where fst_line = "main = hspec $ do\n"
size n = concat ["it \"", n, "\" $ property $ (same_size :: ", n," -> Expectation)\n"]
align n = concat ["it \"", n, "\" $ property $ (same_alignment :: ", n," -> Expectation)\n"]
off n = concat ["it \"", n, "\" $ property $ (same_offsets :: ", n," -> Expectation)\n"]
fields n = concat ["it \"", n, "\" $ property $ (same_fields :: ", n," -> Expectation)\n"]
sizes_lines = ("describe \"Test for same size - sums\" $ do\n" ): (map (\t -> " " ++ size t) types)
aligns_lines = ("describe \"Test for same alignment - sums\" $ do\n" ): (map (\t -> " " ++ align t) types)
off_lines = ("describe \"Test for same offsets - sums\" $ do\n" ): (map (\t -> " " ++ off t) types)
fields_lines = ("describe \"Test for same fields - sums\" $ do\n" ): (map (\t -> " " ++ fields t) types)
-- -------
-- -------
--
toC :: String -> String
toC "Int64" = "HsInt64"
toC "Int32" = "HsInt32"
toC "Int16" = "HsInt16"
toC "Int8" = "HsInt8"
toC "Double" = "HsDouble"
toC "Float" = "HsFloat"
toC v = v
c_prims = ["HsInt64", "HsInt32", "HsInt16", "HsInt8","HsDouble", "HsFloat"]
hs_prims = ["Int64", "Int32", "Int16", "Int8", "Double", "Float"]
-- Generates C structs and related functions
stuffC info = do
let c_struct = infoToCStruct info
c_cons = genConstructor c_struct
c_checkOffs = genCheckOffsets c_struct
c_checkFields = genCheckFields c_struct
c_size = genGetSize c_struct
c_alignment = genGetAlignment c_struct
c_poke = genPoke c_struct
concat [show c_struct, c_cons, c_poke, c_checkOffs, c_checkFields, c_size, c_alignment]
-- Generates Haskell datatypes and related instances
stuffHS info = do
let hs_struct = infoToHSStruct info
datatype = show hs_struct
instanced = genCheckable hs_struct
arbitrary = genArbitrary hs_struct
ffi = genFFI hs_struct
concat [datatype, instanced, arbitrary, ffi]
wrapInIfdefs :: String -> String
wrapInIfdefs s = concat ["#ifdef GSTORABLE_SUMTYPES\n", s, "#endif"]
-- Generated the files.
genFiles filename = do
file <- readFile filename
let cases = lines file
infos = [i | Just i <- map getInfo cases]
infosP = filter (\(Info _ _ t) -> not t) infos
infosS = filter (\(Info _ _ t) -> t) infos
header_hs = concat $ map (++"\n") headerHS
header_test = concat $ map (++"\n") headerTest
header_c = concat $ map (++"\n") headerC
hs_codeP = concat $ map stuffHS infosP
hs_codeS = concat $ map stuffHS infosS
test_codeP = genTests $ map (\(Info n _ _) -> n) infosP
test_codeS = genTestsS $ map (\(Info n _ _) -> n) infosS
c_code = concat $ map stuffC infos
writeFile "MemoryCSpec.hs" (header_test ++ test_codeP ++ wrapInIfdefs test_codeS)
writeFile "TestCases.hs" (header_hs ++ hs_codeP ++ wrapInIfdefs hs_codeS)
writeFile "cbits/TestCases.c" (header_c ++ c_code)
-- Default usage
main = genFiles "TestCases"
| mkloczko/derive-storable | test/Basic/GenTestCases.hs | mit | 26,360 | 0 | 18 | 7,990 | 7,228 | 3,976 | 3,252 | 426 | 7 |
{-# LANGUAGE CPP #-}
{- |
Module : ./VSE/Prove.hs
Description : Interface to the VSE prover
Copyright : (c) C. Maeder, DFKI 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : needs POSIX
call an adaption of VSE II to hets
-}
module VSE.Prove where
import Logic.Prover
import VSE.As
import VSE.Ana
import VSE.ToSExpr
import Common.AS_Annotation
import Common.IO
import Common.ProverTools
import Common.SExpr
import Common.Utils
import Data.Char
import Data.Maybe
import Data.List
import qualified Data.Map as Map
import System.Process
import System.IO
import Text.ParserCombinators.Parsec
#ifdef TAR_PACKAGE
import Control.Monad
import System.Directory
import qualified Codec.Archive.Tar as Tar
#endif
vseProverName :: String
vseProverName = "VSE"
mkVseProofStatus :: String -> [String] -> ProofStatus ()
mkVseProofStatus n axs = (openProofStatus n vseProverName ())
{ goalStatus = Proved True
, usedAxioms = axs }
vse :: Prover VSESign Sentence VSEMor () ()
vse = (mkProverTemplate vseProverName () prove)
{ proverUsable = vseBinary >>= checkBinary }
nameP :: String
nameP = "SPECIFICATION-NAMES"
linksP :: String
linksP = "IN-LINKS"
sigP :: String
sigP = "SIG"
lemsP :: String
lemsP = "LEMMABASE"
prx :: String -> String
prx = ("(API::GET-" ++)
data MaybeChar = Wait | Stop | JustChar Char
vseErrFile :: String
vseErrFile = "hetsvse.out"
readUntilMatchParen :: ProcessHandle -> Handle -> String -> IO String
readUntilMatchParen = readUntilMatchParenAux 10000
readUntilMatchParenAux :: Int -> ProcessHandle -> Handle -> String -> IO String
readUntilMatchParenAux n cp h str =
let os = length $ filter (== '(') str
cs = length $ filter (== ')') str
in if n < 1 then do
appendFile vseErrFile $ "readUntilMatchParen failed after\n" ++ str ++ "\n"
return ""
else if os == cs && os > 0 then return str
else do
mc <- myGetChar cp h
case mc of
Wait -> do
appendFile vseErrFile "waiting for character ...\n"
readUntilMatchParenAux (n - 1) cp h str
Stop -> return str
JustChar c -> readUntilMatchParen cp h $ c : str
myGetChar :: ProcessHandle -> Handle -> IO MaybeChar
myGetChar cp h = do
mc <- catchIOException Wait (fmap JustChar $ hGetChar h)
case mc of
Wait -> do
ms <- getProcessExitCode cp
return $ case ms of
Nothing -> mc
Just _ -> Stop
_ -> return mc
readMyMsg :: ProcessHandle -> Handle -> String -> IO String
readMyMsg = readMyMsgAux 1000
readMyMsgAux :: Int -> ProcessHandle -> Handle -> String -> IO String
readMyMsgAux n cp h expect = if n < 1 then do
appendFile vseErrFile $ "gave up waiting for: " ++ expect ++ "\n"
return ""
else do
mc <- myGetChar cp h
case mc of
Wait -> do
appendFile vseErrFile "waiting for first character ...\n"
readMyMsgAux (n - 1) cp h expect
Stop -> return ""
JustChar c -> do
r <- readUntilMatchParen cp h [c]
let rr = reverse r
if isPrefixOf (prx expect) $ dropWhile (/= '(') rr
then return rr else do
appendFile vseErrFile $ "waiting for '" ++ expect ++ "' got:\n" ++ rr
++ "\ntrying again\n"
readMyMsgAux (n - 1) cp h expect -- try again
sendMyMsg :: Handle -> String -> IO ()
sendMyMsg cp str = catchIOException () $ hPutStrLn cp str >> hFlush cp
readRest :: ProcessHandle -> Handle -> String -> IO String
readRest cp out str = do
mc <- myGetChar cp out
case mc of
Wait -> readRest cp out str
Stop -> return str
JustChar c -> readRest cp out $ c : str
parseSymbol :: Parser SExpr
parseSymbol = skipWhite
$ fmap SSymbol $ many1 $ satisfy $ \ c -> not (isSpace c || elem c "()")
parseList :: Parser SExpr
parseList = do
skipWhite $ char '('
l <- many parseSExpr
skipWhite $ char ')'
return $ SList l
parseSExpr :: Parser SExpr
parseSExpr = parseList <|> parseSymbol
skipWhite :: Parser a -> Parser a
skipWhite p = do
a <- p
spaces
return a
skipJunk :: Parser ()
skipJunk = skipMany $ satisfy (/= '(')
parseSExprs :: Parser [SExpr]
parseSExprs = do
skipJunk
sepEndBy parseSExpr skipJunk
findState :: SExpr -> Maybe (String, String)
findState sexpr = case sexpr of
SList (SSymbol "API::SET-SENTENCE" : SSymbol nodeStr :
SList (SSymbol "API::ASENTENCE" : SSymbol senStr :
SSymbol "API::OBLIGATION" : SSymbol "API::PROVED" : _) : _)
| isPrefixOf "API::" nodeStr && isPrefixOf "API::" senStr
-> Just (drop 5 nodeStr, drop 5 senStr)
_ -> Nothing
specDir :: String
specDir = "specifications"
allSpecFile :: String
allSpecFile = "all-specifications"
allSpecInDir :: String
allSpecInDir = specDir ++ "/" ++ allSpecFile
#ifdef TAR_PACKAGE
createVSETarFile :: FilePath -> IO ()
createVSETarFile tar = do
hasSpecDir <- doesDirectoryExist specDir
hasAllSpecFile <- doesFileExist allSpecFile
if (hasSpecDir && hasAllSpecFile) then do
renameFile allSpecFile allSpecInDir
Tar.create (tar ++ ".tar") "" [specDir]
else putStrLn $ "hetsvse did not create: "
++ if hasSpecDir then allSpecFile else specDir
moveVSEFiles :: FilePath -> IO ()
moveVSEFiles str = do
let tarFile = str ++ ".tar"
hasTarFile <- doesFileExist tarFile
hasSpecDir <- doesDirectoryExist specDir
hasAllSpecFile <- doesFileExist allSpecFile
when (hasSpecDir && hasAllSpecFile) $ do
createVSETarFile (specDir ++ ".bak")
removeDirectoryRecursive specDir
when hasTarFile $ do
Tar.extract "" tarFile
renameFile allSpecInDir allSpecFile
#endif
vseBinary :: IO String
vseBinary = getEnvDef "HETS_VSE" "hetsvse"
prepareAndCallVSE :: IO (Handle, Handle, ProcessHandle)
prepareAndCallVSE = do
vseBin <- vseBinary
(inp, out, _, cp) <-
runInteractiveProcess vseBin ["-std"] Nothing Nothing
readMyMsg cp out nameP
return (inp, out, cp)
readFinalVSEOutput :: ProcessHandle -> Handle
-> IO (Maybe (Map.Map String [String]))
readFinalVSEOutput cp out = do
ms <- getProcessExitCode cp
case ms of
Just _ -> do
appendFile vseErrFile "hetsvse unavailable\n"
return Nothing
Nothing -> do
revres <- readRest cp out ""
let res = reverse revres
writeFile "hetsvse-debug.txt" res
case parse parseSExprs vseErrFile res of
Right l -> return $ Just $ readLemmas l
Left e -> do
print e
appendFile vseErrFile $ res ++ "\n"
return Nothing
readLemmas :: [SExpr] -> Map.Map String [String]
readLemmas =
foldr (\ (node, sen) -> Map.insertWith (++) node [sen]) Map.empty
. mapMaybe findState
prove :: String -> Theory VSESign Sentence () -> a -> IO [ProofStatus ()]
prove ostr (Theory sig thsens) _freedefs = do
let str = map (\ c -> if c == '/' then '-' else c) ostr
oSens = toNamedList thsens
(fsig, sens) = addUniformRestr sig oSens
(disAxs, disGoals) = partition isAxiom oSens
rMap = Map.fromList $ map (\ SenAttr { senAttr = n } ->
(map toUpper $ transString n, n)) disGoals
#ifdef TAR_PACKAGE
moveVSEFiles str
#endif
(inp, out, cp) <- prepareAndCallVSE
sendMyMsg inp $ "(" ++ str ++ ")"
readMyMsg cp out linksP
sendMyMsg inp "nil"
readMyMsg cp out sigP
sendMyMsg inp $ show $ prettySExpr $ vseSignToSExpr fsig
readMyMsg cp out lemsP
sendMyMsg inp $ show $ prettySExpr $ SList $ map (namedSenToSExpr fsig) sens
ms <- readFinalVSEOutput cp out
#ifdef TAR_PACKAGE
createVSETarFile str
#endif
case ms of
Nothing -> return []
Just lemMap -> return
$ foldr (\ s r -> case Map.lookup s rMap of
Nothing -> r
Just n -> mkVseProofStatus n (map senAttr disAxs) : r) []
$ Map.findWithDefault [] (map toUpper str) lemMap
| spechub/Hets | VSE/Prove.hs | gpl-2.0 | 7,774 | 0 | 21 | 1,801 | 2,548 | 1,253 | 1,295 | 189 | 5 |
-- | Export lamdu version for --help
{-# LANGUAGE CPP, TemplateHaskell, NamedFieldPuns #-}
{-# OPTIONS -O0 #-}
#ifndef DEV_BUILD
{-# OPTIONS -fforce-recomp #-}
#endif
module Lamdu.Version
( VersionInfo(..), currentVersionInfo, currentVersionInfoStr
) where
import Data.Time (getZonedTime, formatTime, defaultTimeLocale)
import Language.Haskell.TH (runIO, stringE)
import qualified System.Process.Git as Git
import Prelude.Compat
data VersionInfo = VersionInfo
{ version :: !String
, gitCommit :: !String
, gitStatus :: !String
, gitDirty :: !Bool
}
_curdate :: String
_curdate =
$(runIO (formatTime defaultTimeLocale "%y-%m-%d" <$> getZonedTime) >>= stringE)
_rc :: String -> String
_rc ver = ver ++ "-rc-" ++ _curdate
currentVersionInfo :: VersionInfo
currentVersionInfo =
VersionInfo
{ version =
#ifdef DEV_BUILD
"<devel>"
#else
"0.8"
#endif
, gitCommit = $(Git.hash)
, gitStatus = $(Git.status)
, gitDirty = $(Git.dirty)
}
currentVersionInfoStr :: String
currentVersionInfoStr =
concat
[ "Lamdu ", version
, "\n built from git revision: ", gitCommit
, statusLine
]
where
statusLine
| null gitStatus = ""
| otherwise = "\n status:" ++ gitStatus
VersionInfo{version, gitCommit, gitStatus} = currentVersionInfo
| lamdu/lamdu | src/main/Lamdu/Version.hs | gpl-3.0 | 1,386 | 0 | 11 | 338 | 298 | 175 | 123 | 45 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.StorageGateway.DisableGateway
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Disables a gateway when the gateway is no longer functioning. For
-- example, if your gateway VM is damaged, you can disable the gateway so
-- you can recover virtual tapes.
--
-- Use this operation for a gateway-VTL that is not reachable or not
-- functioning.
--
-- Once a gateway is disabled it cannot be enabled.
--
-- /See:/ <http://docs.aws.amazon.com/storagegateway/latest/APIReference/API_DisableGateway.html AWS API Reference> for DisableGateway.
module Network.AWS.StorageGateway.DisableGateway
(
-- * Creating a Request
disableGateway
, DisableGateway
-- * Request Lenses
, dGatewayARN
-- * Destructuring the Response
, disableGatewayResponse
, DisableGatewayResponse
-- * Response Lenses
, disrsGatewayARN
, disrsResponseStatus
) where
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
import Network.AWS.StorageGateway.Types
import Network.AWS.StorageGateway.Types.Product
-- | DisableGatewayInput
--
-- /See:/ 'disableGateway' smart constructor.
newtype DisableGateway = DisableGateway'
{ _dGatewayARN :: Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DisableGateway' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dGatewayARN'
disableGateway
:: Text -- ^ 'dGatewayARN'
-> DisableGateway
disableGateway pGatewayARN_ =
DisableGateway'
{ _dGatewayARN = pGatewayARN_
}
-- | Undocumented member.
dGatewayARN :: Lens' DisableGateway Text
dGatewayARN = lens _dGatewayARN (\ s a -> s{_dGatewayARN = a});
instance AWSRequest DisableGateway where
type Rs DisableGateway = DisableGatewayResponse
request = postJSON storageGateway
response
= receiveJSON
(\ s h x ->
DisableGatewayResponse' <$>
(x .?> "GatewayARN") <*> (pure (fromEnum s)))
instance ToHeaders DisableGateway where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("StorageGateway_20130630.DisableGateway" ::
ByteString),
"Content-Type" =#
("application/x-amz-json-1.1" :: ByteString)])
instance ToJSON DisableGateway where
toJSON DisableGateway'{..}
= object
(catMaybes [Just ("GatewayARN" .= _dGatewayARN)])
instance ToPath DisableGateway where
toPath = const "/"
instance ToQuery DisableGateway where
toQuery = const mempty
-- | DisableGatewayOutput
--
-- /See:/ 'disableGatewayResponse' smart constructor.
data DisableGatewayResponse = DisableGatewayResponse'
{ _disrsGatewayARN :: !(Maybe Text)
, _disrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DisableGatewayResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'disrsGatewayARN'
--
-- * 'disrsResponseStatus'
disableGatewayResponse
:: Int -- ^ 'disrsResponseStatus'
-> DisableGatewayResponse
disableGatewayResponse pResponseStatus_ =
DisableGatewayResponse'
{ _disrsGatewayARN = Nothing
, _disrsResponseStatus = pResponseStatus_
}
-- | The unique Amazon Resource Name of the disabled gateway.
disrsGatewayARN :: Lens' DisableGatewayResponse (Maybe Text)
disrsGatewayARN = lens _disrsGatewayARN (\ s a -> s{_disrsGatewayARN = a});
-- | The response status code.
disrsResponseStatus :: Lens' DisableGatewayResponse Int
disrsResponseStatus = lens _disrsResponseStatus (\ s a -> s{_disrsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-storagegateway/gen/Network/AWS/StorageGateway/DisableGateway.hs | mpl-2.0 | 4,451 | 0 | 13 | 978 | 589 | 355 | 234 | 77 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Haddock.Backends.Html
-- Copyright : (c) Simon Marlow 2003-2006,
-- David Waern 2006-2009,
-- Mark Lentczner 2010,
-- Mateusz Kowalczyk 2013
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
-----------------------------------------------------------------------------
{-# LANGUAGE CPP #-}
module Haddock.Backends.Xhtml (
ppHtml, copyHtmlBits,
ppHtmlIndex, ppHtmlContents,
) where
import Prelude hiding (div)
import Haddock.Backends.Xhtml.Decl
import Haddock.Backends.Xhtml.DocMarkup
import Haddock.Backends.Xhtml.Layout
import Haddock.Backends.Xhtml.Names
import Haddock.Backends.Xhtml.Themes
import Haddock.Backends.Xhtml.Types
import Haddock.Backends.Xhtml.Utils
import Haddock.ModuleTree
import Haddock.Types
import Haddock.Version
import Haddock.Utils
import Text.XHtml hiding ( name, title, p, quote )
import Haddock.GhcUtils
import Control.Monad ( when, unless )
import Data.Char ( toUpper )
import Data.List ( sortBy, groupBy, intercalate, isPrefixOf )
import Data.Maybe
import System.FilePath hiding ( (</>) )
import System.Directory
import Data.Map ( Map )
import qualified Data.Map as Map hiding ( Map )
import qualified Data.Set as Set hiding ( Set )
import Data.Function
import Data.Ord ( comparing )
import DynFlags (Language(..))
import GHC hiding ( NoLink, moduleInfo )
import Name
import Module
--------------------------------------------------------------------------------
-- * Generating HTML documentation
--------------------------------------------------------------------------------
ppHtml :: DynFlags
-> String -- ^ Title
-> Maybe String -- ^ Package
-> [Interface]
-> FilePath -- ^ Destination directory
-> Maybe (MDoc GHC.RdrName) -- ^ Prologue text, maybe
-> Themes -- ^ Themes
-> SourceURLs -- ^ The source URL (--source)
-> WikiURLs -- ^ The wiki URL (--wiki)
-> Maybe String -- ^ The contents URL (--use-contents)
-> Maybe String -- ^ The index URL (--use-index)
-> Bool -- ^ Whether to use unicode in output (--use-unicode)
-> QualOption -- ^ How to qualify names
-> Bool -- ^ Output pretty html (newlines and indenting)
-> IO ()
ppHtml dflags doctitle maybe_package ifaces odir prologue
themes maybe_source_url maybe_wiki_url
maybe_contents_url maybe_index_url unicode
qual debug = do
let
visible_ifaces = filter visible ifaces
visible i = OptHide `notElem` ifaceOptions i
when (isNothing maybe_contents_url) $
ppHtmlContents dflags odir doctitle maybe_package
themes maybe_index_url maybe_source_url maybe_wiki_url
(map toInstalledIface visible_ifaces)
False -- we don't want to display the packages in a single-package contents
prologue debug (makeContentsQual qual)
when (isNothing maybe_index_url) $
ppHtmlIndex odir doctitle maybe_package
themes maybe_contents_url maybe_source_url maybe_wiki_url
(map toInstalledIface visible_ifaces) debug
mapM_ (ppHtmlModule odir doctitle themes
maybe_source_url maybe_wiki_url
maybe_contents_url maybe_index_url unicode qual debug) visible_ifaces
copyHtmlBits :: FilePath -> FilePath -> Themes -> IO ()
copyHtmlBits odir libdir themes = do
let
libhtmldir = joinPath [libdir, "html"]
copyCssFile f = copyFile f (combine odir (takeFileName f))
copyLibFile f = copyFile (joinPath [libhtmldir, f]) (joinPath [odir, f])
mapM_ copyCssFile (cssFiles themes)
mapM_ copyLibFile [ jsFile, framesFile ]
headHtml :: String -> Maybe String -> Themes -> Html
headHtml docTitle miniPage themes =
header << [
meta ! [httpequiv "Content-Type", content "text/html; charset=UTF-8"],
thetitle << docTitle,
styleSheet themes,
script ! [src jsFile, thetype "text/javascript"] << noHtml,
script ! [thetype "text/javascript"]
-- NB: Within XHTML, the content of script tags needs to be
-- a <![CDATA[ section. Will break if the miniPage name could
-- have "]]>" in it!
<< primHtml (
"//<![CDATA[\nwindow.onload = function () {pageLoad();"
++ setSynopsis ++ "};\n//]]>\n")
]
where
setSynopsis = maybe "" (\p -> "setSynopsis(\"" ++ p ++ "\");") miniPage
srcButton :: SourceURLs -> Maybe Interface -> Maybe Html
srcButton (Just src_base_url, _, _, _) Nothing =
Just (anchor ! [href src_base_url] << "Source")
srcButton (_, Just src_module_url, _, _) (Just iface) =
let url = spliceURL (Just $ ifaceOrigFilename iface)
(Just $ ifaceMod iface) Nothing Nothing src_module_url
in Just (anchor ! [href url] << "Source")
srcButton _ _ =
Nothing
wikiButton :: WikiURLs -> Maybe Module -> Maybe Html
wikiButton (Just wiki_base_url, _, _) Nothing =
Just (anchor ! [href wiki_base_url] << "User Comments")
wikiButton (_, Just wiki_module_url, _) (Just mdl) =
let url = spliceURL Nothing (Just mdl) Nothing Nothing wiki_module_url
in Just (anchor ! [href url] << "User Comments")
wikiButton _ _ =
Nothing
contentsButton :: Maybe String -> Maybe Html
contentsButton maybe_contents_url
= Just (anchor ! [href url] << "Contents")
where url = fromMaybe contentsHtmlFile maybe_contents_url
indexButton :: Maybe String -> Maybe Html
indexButton maybe_index_url
= Just (anchor ! [href url] << "Index")
where url = fromMaybe indexHtmlFile maybe_index_url
bodyHtml :: String -> Maybe Interface
-> SourceURLs -> WikiURLs
-> Maybe String -> Maybe String
-> Html -> Html
bodyHtml doctitle iface
maybe_source_url maybe_wiki_url
maybe_contents_url maybe_index_url
pageContent =
body << [
divPackageHeader << [
unordList (catMaybes [
srcButton maybe_source_url iface,
wikiButton maybe_wiki_url (ifaceMod <$> iface),
contentsButton maybe_contents_url,
indexButton maybe_index_url])
! [theclass "links", identifier "page-menu"],
nonEmptySectionName << doctitle
],
divContent << pageContent,
divFooter << paragraph << (
"Produced by " +++
(anchor ! [href projectUrl] << toHtml projectName) +++
(" version " ++ projectVersion)
)
]
moduleInfo :: Interface -> Html
moduleInfo iface =
let
info = ifaceInfo iface
doOneEntry :: (String, HaddockModInfo GHC.Name -> Maybe String) -> Maybe HtmlTable
doOneEntry (fieldName, field) =
field info >>= \a -> return (th << fieldName <-> td << a)
entries :: [HtmlTable]
entries = mapMaybe doOneEntry [
("Copyright",hmi_copyright),
("License",hmi_license),
("Maintainer",hmi_maintainer),
("Stability",hmi_stability),
("Portability",hmi_portability),
("Safe Haskell",hmi_safety),
("Language", lg)
] ++ extsForm
where
lg inf = case hmi_language inf of
Nothing -> Nothing
Just Haskell98 -> Just "Haskell98"
Just Haskell2010 -> Just "Haskell2010"
extsForm
| OptShowExtensions `elem` ifaceOptions iface =
let fs = map (dropOpt . show) (hmi_extensions info)
in case map stringToHtml fs of
[] -> []
[x] -> extField x -- don't use a list for a single extension
xs -> extField $ unordList xs ! [theclass "extension-list"]
| otherwise = []
where
extField x = return $ th << "Extensions" <-> td << x
dropOpt x = if "Opt_" `isPrefixOf` x then drop 4 x else x
in
case entries of
[] -> noHtml
_ -> table ! [theclass "info"] << aboves entries
--------------------------------------------------------------------------------
-- * Generate the module contents
--------------------------------------------------------------------------------
ppHtmlContents
:: DynFlags
-> FilePath
-> String
-> Maybe String
-> Themes
-> Maybe String
-> SourceURLs
-> WikiURLs
-> [InstalledInterface] -> Bool -> Maybe (MDoc GHC.RdrName)
-> Bool
-> Qualification -- ^ How to qualify names
-> IO ()
ppHtmlContents dflags odir doctitle _maybe_package
themes maybe_index_url
maybe_source_url maybe_wiki_url ifaces showPkgs prologue debug qual = do
let tree = mkModuleTree dflags showPkgs
[(instMod iface, toInstalledDescription iface) | iface <- ifaces]
html =
headHtml doctitle Nothing themes +++
bodyHtml doctitle Nothing
maybe_source_url maybe_wiki_url
Nothing maybe_index_url << [
ppPrologue qual doctitle prologue,
ppModuleTree qual tree
]
createDirectoryIfMissing True odir
writeFile (joinPath [odir, contentsHtmlFile]) (renderToString debug html)
-- XXX: think of a better place for this?
ppHtmlContentsFrame odir doctitle themes ifaces debug
ppPrologue :: Qualification -> String -> Maybe (MDoc GHC.RdrName) -> Html
ppPrologue _ _ Nothing = noHtml
ppPrologue qual title (Just doc) =
divDescription << (h1 << title +++ docElement thediv (rdrDocToHtml qual doc))
ppModuleTree :: Qualification -> [ModuleTree] -> Html
ppModuleTree qual ts =
divModuleList << (sectionName << "Modules" +++ mkNodeList qual [] "n" ts)
mkNodeList :: Qualification -> [String] -> String -> [ModuleTree] -> Html
mkNodeList qual ss p ts = case ts of
[] -> noHtml
_ -> unordList (zipWith (mkNode qual ss) ps ts)
where
ps = [ p ++ '.' : show i | i <- [(1::Int)..]]
mkNode :: Qualification -> [String] -> String -> ModuleTree -> Html
mkNode qual ss p (Node s leaf pkg srcPkg short ts) =
htmlModule <+> shortDescr +++ htmlPkg +++ subtree
where
modAttrs = case (ts, leaf) of
(_:_, False) -> collapseControl p True "module"
(_, _ ) -> [theclass "module"]
cBtn = case (ts, leaf) of
(_:_, True) -> thespan ! collapseControl p True "" << spaceHtml
(_, _ ) -> noHtml
-- We only need an explicit collapser button when the module name
-- is also a leaf, and so is a link to a module page. Indeed, the
-- spaceHtml is a minor hack and does upset the layout a fraction.
htmlModule = thespan ! modAttrs << (cBtn +++
if leaf
then ppModule (mkModule (stringToPackageKey (fromMaybe "" pkg))
(mkModuleName mdl))
else toHtml s
)
mdl = intercalate "." (reverse (s:ss))
shortDescr = maybe noHtml (origDocToHtml qual) short
htmlPkg = maybe noHtml (thespan ! [theclass "package"] <<) srcPkg
subtree = mkNodeList qual (s:ss) p ts ! collapseSection p True ""
-- | Turn a module tree into a flat list of full module names. E.g.,
-- @
-- A
-- +-B
-- +-C
-- @
-- becomes
-- @["A", "A.B", "A.B.C"]@
flatModuleTree :: [InstalledInterface] -> [Html]
flatModuleTree ifaces =
map (uncurry ppModule' . head)
. groupBy ((==) `on` fst)
. sortBy (comparing fst)
$ mods
where
mods = [ (moduleString mdl, mdl) | mdl <- map instMod ifaces ]
ppModule' txt mdl =
anchor ! [href (moduleHtmlFile mdl), target mainFrameName]
<< toHtml txt
ppHtmlContentsFrame :: FilePath -> String -> Themes
-> [InstalledInterface] -> Bool -> IO ()
ppHtmlContentsFrame odir doctitle themes ifaces debug = do
let mods = flatModuleTree ifaces
html =
headHtml doctitle Nothing themes +++
miniBody << divModuleList <<
(sectionName << "Modules" +++
ulist << [ li ! [theclass "module"] << m | m <- mods ])
createDirectoryIfMissing True odir
writeFile (joinPath [odir, frameIndexHtmlFile]) (renderToString debug html)
--------------------------------------------------------------------------------
-- * Generate the index
--------------------------------------------------------------------------------
ppHtmlIndex :: FilePath
-> String
-> Maybe String
-> Themes
-> Maybe String
-> SourceURLs
-> WikiURLs
-> [InstalledInterface]
-> Bool
-> IO ()
ppHtmlIndex odir doctitle _maybe_package themes
maybe_contents_url maybe_source_url maybe_wiki_url ifaces debug = do
let html = indexPage split_indices Nothing
(if split_indices then [] else index)
createDirectoryIfMissing True odir
when split_indices $ do
mapM_ (do_sub_index index) initialChars
-- Let's add a single large index as well for those who don't know exactly what they're looking for:
let mergedhtml = indexPage False Nothing index
writeFile (joinPath [odir, subIndexHtmlFile merged_name]) (renderToString debug mergedhtml)
writeFile (joinPath [odir, indexHtmlFile]) (renderToString debug html)
where
indexPage showLetters ch items =
headHtml (doctitle ++ " (" ++ indexName ch ++ ")") Nothing themes +++
bodyHtml doctitle Nothing
maybe_source_url maybe_wiki_url
maybe_contents_url Nothing << [
if showLetters then indexInitialLetterLinks else noHtml,
if null items then noHtml else
divIndex << [sectionName << indexName ch, buildIndex items]
]
indexName ch = "Index" ++ maybe "" (\c -> " - " ++ [c]) ch
merged_name = "All"
buildIndex items = table << aboves (map indexElt items)
-- an arbitrary heuristic:
-- too large, and a single-page will be slow to load
-- too small, and we'll have lots of letter-indexes with only one
-- or two members in them, which seems inefficient or
-- unnecessarily hard to use.
split_indices = length index > 150
indexInitialLetterLinks =
divAlphabet <<
unordList (map (\str -> anchor ! [href (subIndexHtmlFile str)] << str) $
[ [c] | c <- initialChars
, any ((==c) . toUpper . head . fst) index ] ++
[merged_name])
-- todo: what about names/operators that start with Unicode
-- characters?
-- Exports beginning with '_' can be listed near the end,
-- presumably they're not as important... but would be listed
-- with non-split index!
initialChars = [ 'A'..'Z' ] ++ ":!#$%&*+./<=>?@\\^|-~" ++ "_"
do_sub_index this_ix c
= unless (null index_part) $
writeFile (joinPath [odir, subIndexHtmlFile [c]]) (renderToString debug html)
where
html = indexPage True (Just c) index_part
index_part = [(n,stuff) | (n,stuff) <- this_ix, toUpper (head n) == c]
index :: [(String, Map GHC.Name [(Module,Bool)])]
index = sortBy cmp (Map.toAscList full_index)
where cmp (n1,_) (n2,_) = comparing (map toUpper) n1 n2
-- for each name (a plain string), we have a number of original HsNames that
-- it can refer to, and for each of those we have a list of modules
-- that export that entity. Each of the modules exports the entity
-- in a visible or invisible way (hence the Bool).
full_index :: Map String (Map GHC.Name [(Module,Bool)])
full_index = Map.fromListWith (flip (Map.unionWith (++)))
(concatMap getIfaceIndex ifaces)
getIfaceIndex iface =
[ (getOccString name
, Map.fromList [(name, [(mdl, name `Set.member` visible)])])
| name <- instExports iface ]
where
mdl = instMod iface
visible = Set.fromList (instVisibleExports iface)
indexElt :: (String, Map GHC.Name [(Module,Bool)]) -> HtmlTable
indexElt (str, entities) =
case Map.toAscList entities of
[(nm,entries)] ->
td ! [ theclass "src" ] << toHtml str <->
indexLinks nm entries
many_entities ->
td ! [ theclass "src" ] << toHtml str <-> td << spaceHtml </>
aboves (zipWith (curry doAnnotatedEntity) [1..] many_entities)
doAnnotatedEntity :: (Integer, (Name, [(Module, Bool)])) -> HtmlTable
doAnnotatedEntity (j,(nm,entries))
= td ! [ theclass "alt" ] <<
toHtml (show j) <+> parens (ppAnnot (nameOccName nm)) <->
indexLinks nm entries
ppAnnot n | not (isValOcc n) = toHtml "Type/Class"
| isDataOcc n = toHtml "Data Constructor"
| otherwise = toHtml "Function"
indexLinks nm entries =
td ! [ theclass "module" ] <<
hsep (punctuate comma
[ if visible then
linkId mdl (Just nm) << toHtml (moduleString mdl)
else
toHtml (moduleString mdl)
| (mdl, visible) <- entries ])
--------------------------------------------------------------------------------
-- * Generate the HTML page for a module
--------------------------------------------------------------------------------
ppHtmlModule
:: FilePath -> String -> Themes
-> SourceURLs -> WikiURLs
-> Maybe String -> Maybe String -> Bool -> QualOption
-> Bool -> Interface -> IO ()
ppHtmlModule odir doctitle themes
maybe_source_url maybe_wiki_url
maybe_contents_url maybe_index_url unicode qual debug iface = do
let
mdl = ifaceMod iface
aliases = ifaceModuleAliases iface
mdl_str = moduleString mdl
real_qual = makeModuleQual qual aliases mdl
html =
headHtml mdl_str (Just $ "mini_" ++ moduleHtmlFile mdl) themes +++
bodyHtml doctitle (Just iface)
maybe_source_url maybe_wiki_url
maybe_contents_url maybe_index_url << [
divModuleHeader << (moduleInfo iface +++ (sectionName << mdl_str)),
ifaceToHtml maybe_source_url maybe_wiki_url iface unicode real_qual
]
createDirectoryIfMissing True odir
writeFile (joinPath [odir, moduleHtmlFile mdl]) (renderToString debug html)
ppHtmlModuleMiniSynopsis odir doctitle themes iface unicode real_qual debug
ppHtmlModuleMiniSynopsis :: FilePath -> String -> Themes
-> Interface -> Bool -> Qualification -> Bool -> IO ()
ppHtmlModuleMiniSynopsis odir _doctitle themes iface unicode qual debug = do
let mdl = ifaceMod iface
html =
headHtml (moduleString mdl) Nothing themes +++
miniBody <<
(divModuleHeader << sectionName << moduleString mdl +++
miniSynopsis mdl iface unicode qual)
createDirectoryIfMissing True odir
writeFile (joinPath [odir, "mini_" ++ moduleHtmlFile mdl]) (renderToString debug html)
ifaceToHtml :: SourceURLs -> WikiURLs -> Interface -> Bool -> Qualification -> Html
ifaceToHtml maybe_source_url maybe_wiki_url iface unicode qual
= ppModuleContents qual exports +++
description +++
synopsis +++
divInterface (maybe_doc_hdr +++ bdy)
where
exports = numberSectionHeadings (ifaceRnExportItems iface)
-- todo: if something has only sub-docs, or fn-args-docs, should
-- it be measured here and thus prevent omitting the synopsis?
has_doc ExportDecl { expItemMbDoc = (Documentation mDoc mWarning, _) } = isJust mDoc || isJust mWarning
has_doc (ExportNoDecl _ _) = False
has_doc (ExportModule _) = False
has_doc _ = True
no_doc_at_all = not (any has_doc exports)
description | isNoHtml doc = doc
| otherwise = divDescription $ sectionName << "Description" +++ doc
where doc = docSection Nothing qual (ifaceRnDoc iface)
-- omit the synopsis if there are no documentation annotations at all
synopsis
| no_doc_at_all = noHtml
| otherwise
= divSynposis $
paragraph ! collapseControl "syn" False "caption" << "Synopsis" +++
shortDeclList (
mapMaybe (processExport True linksInfo unicode qual) exports
) ! (collapseSection "syn" False "" ++ collapseToggle "syn")
-- if the documentation doesn't begin with a section header, then
-- add one ("Documentation").
maybe_doc_hdr
= case exports of
[] -> noHtml
ExportGroup {} : _ -> noHtml
_ -> h1 << "Documentation"
bdy =
foldr (+++) noHtml $
mapMaybe (processExport False linksInfo unicode qual) exports
linksInfo = (maybe_source_url, maybe_wiki_url)
miniSynopsis :: Module -> Interface -> Bool -> Qualification -> Html
miniSynopsis mdl iface unicode qual =
divInterface << concatMap (processForMiniSynopsis mdl unicode qual) exports
where
exports = numberSectionHeadings (ifaceRnExportItems iface)
processForMiniSynopsis :: Module -> Bool -> Qualification -> ExportItem DocName
-> [Html]
processForMiniSynopsis mdl unicode qual ExportDecl { expItemDecl = L _loc decl0 } =
((divTopDecl <<).(declElem <<)) <$> case decl0 of
TyClD d -> let b = ppTyClBinderWithVarsMini mdl d in case d of
(FamDecl decl) -> [ppTyFamHeader True False decl unicode qual]
(DataDecl{}) -> [keyword "data" <+> b]
(SynDecl{}) -> [keyword "type" <+> b]
(ClassDecl {}) -> [keyword "class" <+> b]
SigD (TypeSig lnames (L _ _) _) ->
map (ppNameMini Prefix mdl . nameOccName . getName . unLoc) lnames
_ -> []
processForMiniSynopsis _ _ qual (ExportGroup lvl _id txt) =
[groupTag lvl << docToHtml Nothing qual (mkMeta txt)]
processForMiniSynopsis _ _ _ _ = []
ppNameMini :: Notation -> Module -> OccName -> Html
ppNameMini notation mdl nm =
anchor ! [ href (moduleNameUrl mdl nm)
, target mainFrameName ]
<< ppBinder' notation nm
ppTyClBinderWithVarsMini :: Module -> TyClDecl DocName -> Html
ppTyClBinderWithVarsMini mdl decl =
let n = tcdName decl
ns = tyvarNames $ tcdTyVars decl -- it's safe to use tcdTyVars, see code above
in ppTypeApp n [] ns (\is_infix -> ppNameMini is_infix mdl . nameOccName . getName) ppTyName
ppModuleContents :: Qualification -> [ExportItem DocName] -> Html
ppModuleContents qual exports
| null sections = noHtml
| otherwise = contentsDiv
where
contentsDiv = divTableOfContents << (
sectionName << "Contents" +++
unordList sections)
(sections, _leftovers{-should be []-}) = process 0 exports
process :: Int -> [ExportItem DocName] -> ([Html],[ExportItem DocName])
process _ [] = ([], [])
process n items@(ExportGroup lev id0 doc : rest)
| lev <= n = ( [], items )
| otherwise = ( html:secs, rest2 )
where
html = linkedAnchor (groupId id0)
<< docToHtmlNoAnchors (Just id0) qual (mkMeta doc) +++ mk_subsections ssecs
(ssecs, rest1) = process lev rest
(secs, rest2) = process n rest1
process n (_ : rest) = process n rest
mk_subsections [] = noHtml
mk_subsections ss = unordList ss
-- we need to assign a unique id to each section heading so we can hyperlink
-- them from the contents:
numberSectionHeadings :: [ExportItem DocName] -> [ExportItem DocName]
numberSectionHeadings = go 1
where go :: Int -> [ExportItem DocName] -> [ExportItem DocName]
go _ [] = []
go n (ExportGroup lev _ doc : es)
= ExportGroup lev (show n) doc : go (n+1) es
go n (other:es)
= other : go n es
processExport :: Bool -> LinksInfo -> Bool -> Qualification
-> ExportItem DocName -> Maybe Html
processExport _ _ _ _ ExportDecl { expItemDecl = L _ (InstD _) } = Nothing -- Hide empty instances
processExport summary _ _ qual (ExportGroup lev id0 doc)
= nothingIf summary $ groupHeading lev id0 << docToHtml (Just id0) qual (mkMeta doc)
processExport summary links unicode qual (ExportDecl decl doc subdocs insts fixities splice)
= processDecl summary $ ppDecl summary links decl doc insts fixities subdocs splice unicode qual
processExport summary _ _ qual (ExportNoDecl y [])
= processDeclOneLiner summary $ ppDocName qual Prefix True y
processExport summary _ _ qual (ExportNoDecl y subs)
= processDeclOneLiner summary $
ppDocName qual Prefix True y
+++ parenList (map (ppDocName qual Prefix True) subs)
processExport summary _ _ qual (ExportDoc doc)
= nothingIf summary $ docSection_ Nothing qual doc
processExport summary _ _ _ (ExportModule mdl)
= processDeclOneLiner summary $ toHtml "module" <+> ppModule mdl
nothingIf :: Bool -> a -> Maybe a
nothingIf True _ = Nothing
nothingIf False a = Just a
processDecl :: Bool -> Html -> Maybe Html
processDecl True = Just
processDecl False = Just . divTopDecl
processDeclOneLiner :: Bool -> Html -> Maybe Html
processDeclOneLiner True = Just
processDeclOneLiner False = Just . divTopDecl . declElem
groupHeading :: Int -> String -> Html -> Html
groupHeading lev id0 = groupTag lev ! [identifier (groupId id0)]
groupTag :: Int -> Html -> Html
groupTag lev
| lev == 1 = h1
| lev == 2 = h2
| lev == 3 = h3
| otherwise = h4
| adamse/haddock | haddock-api/src/Haddock/Backends/Xhtml.hs | bsd-2-clause | 25,236 | 1 | 20 | 6,537 | 6,887 | 3,553 | 3,334 | 485 | 7 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE RecordWildCards #-}
module Main where
import Network.Wai.Handler.Warp hiding (run)
import System.IO
import System.IO.Temp
import WithCli
import Network.Wai.Ghcjs
main :: IO ()
main = withCliModified mods run
where
mods =
AddShortOption "port" 'p' :
AddShortOption "sourceDirs" 'i' :
AddShortOption "mainIs" 'm' :
[]
data Options
= Options {
port :: Int,
mainIs :: String,
sourceDirs :: [FilePath]
}
deriving (Show, Generic, HasArguments)
run :: Options -> IO ()
run Options{..} = do
let settings =
setPort port $
setBeforeMainLoop (hPutStrLn stderr ("listening on " ++ show port ++ "...")) $
defaultSettings
withSystemTempDirectory "serve-ghcjs" $ \ buildDir -> do
app <- mkDevelopmentApp (BuildConfig mainIs Nothing sourceDirs "." Cabal buildDir)
runSettings settings app
| soenkehahn/wai-ghcjs | drivers/serve-ghcjs.hs | bsd-3-clause | 989 | 0 | 18 | 260 | 263 | 139 | 124 | 31 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MonadComprehensions #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RebindableSyntax #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ViewPatterns #-}
-- TPC-H Q4
module Queries.TPCH.Standard.Q4
( q4
, q4a
, q4b
, q4Default
, q4aDefault
, q4bDefault
) where
import qualified Data.Time.Calendar as C
import Database.DSH
import Queries.TPCH.BuildingBlocks
import Schema.TPCH
--------------------------------------------------------------------------------
q4Default :: Q [(Text, Integer)]
q4Default = q4 $ C.fromGregorian 1993 7 1
q4aDefault :: Q [(Text, Integer)]
q4aDefault = q4a $ C.fromGregorian 1993 7 1
q4bDefault :: Q [(Text, Integer)]
q4bDefault = q4b $ C.fromGregorian 1993 7 1
--------------------------------------------------------------------------------
-- | Is at least one of the orders' items overdue?
hasOverdueItem :: Q Order -> Q Bool
hasOverdueItem o = or [ l_commitdateQ l < l_receiptdateQ l
| l <- lineitems
, l_orderkeyQ l == o_orderkeyQ o
]
-- | An order is problematic if at least one of its items was overdue.
problematicOrders :: Interval -> Q [Text]
problematicOrders interval =
[ o_orderpriorityQ o
| o <- orders
, o_orderdateQ o `inInterval` interval
, hasOverdueItem o
]
-- | TPC-H Query Q4 (abstraction-heavy DSH style)
-- Compute the number of problematic orders per priority level.
q4 :: Day -> Q [(Text, Integer)]
q4 startDate =
sortWith fst [ tup2 op (length g) | (view -> (op, g)) <- groupWithKey id oids]
where
interval = Interval startDate (C.addGregorianMonthsRollOver 3 startDate)
oids = problematicOrders interval
--------------------------------------------------------------------------------
-- | TPC-H Query Q4 (literal transcription with 'null')
q4a :: Day -> Q [(Text, Integer)]
q4a startDate =
sortWith fst
$ map (\(view -> (k, g)) -> pair k (length g))
$ groupWithKey id
[ o_orderpriorityQ o
| o <- orders
, o_orderdateQ o >= toQ startDate
, o_orderdateQ o < toQ endDate
, not $ null [ toQ ()
| l <- lineitems
, l_orderkeyQ l == o_orderkeyQ o
, l_commitdateQ l < l_receiptdateQ l
]
]
where
endDate = C.addGregorianMonthsRollOver 3 startDate
--------------------------------------------------------------------------------
-- | TPC-H Query Q4 (literal transcription with 'any')
q4b :: Day -> Q [(Text, Integer)]
q4b startDate =
sortWith fst
$ map (\(view -> (k, g)) -> pair k (length g))
$ groupWithKey id
[ o_orderpriorityQ o
| o <- orders
, o_orderdateQ o >= toQ startDate
, o_orderdateQ o < toQ endDate
, any (\l -> l_commitdateQ l < l_receiptdateQ l
&& l_orderkeyQ l == o_orderkeyQ o)
lineitems
]
where
endDate = C.addGregorianMonthsRollOver 3 startDate
| ulricha/dsh-example-queries | Queries/TPCH/Standard/Q4.hs | bsd-3-clause | 3,191 | 0 | 15 | 812 | 769 | 405 | 364 | 69 | 1 |
module Main where
-- the MPFR interval type:
import qualified Numeric.AERN.MPFRBasis.Interval as MI
-- numerical comparison abstraction and operators:
import qualified Numeric.AERN.NumericOrder as NumOrd
import Numeric.AERN.NumericOrder.OpsDefaultEffort
-- refinement order abstraction and operators:
import qualified Numeric.AERN.RefinementOrder as RefOrd
import Numeric.AERN.RefinementOrder.OpsDefaultEffort
-- real arithmetic operators and imprecision measure:
--import Numeric.AERN.RealArithmetic.ExactOps
import Numeric.AERN.RealArithmetic.Measures
import qualified Numeric.AERN.RealArithmetic.RefinementOrderRounding as ArithInOut
--import Numeric.AERN.RealArithmetic.RefinementOrderRounding.OpsDefaultEffort
-- generic tools for controlling effort and formatting:
import Numeric.AERN.Basics.Effort
import Numeric.AERN.Basics.ShowInternals
import System.IO
import System.Environment
-- convenience type synonyms:
type RealApprox = MI.MI
type Precision = MI.Precision
main =
do
-- print each line asap:
hSetBuffering stdout LineBuffering
-- boilerplate to process arguments:
[digitsS] <- getArgs
let digits = read digitsS -- desired accuracy in decimal digits
-- sqrt(2) to the given number of digits:
putStrLn $ "findRootDigits(fn(x)=x^2-2, leftEndpoint=0, rightEndpoint=2, digits=" ++ show digits ++ ") = "
putStrLn $ showRealApprox digits $ findRootDigits digits (100,()) xSquareMinus2 0 2
-- log(3) to the given number of digits:
putStrLn $ "findRootDigits(fn(x)=exp(x)-3, leftEndpoint=0, rightEndpoint=2, digits=" ++ show digits ++ ") = "
putStrLn $ showRealApprox digits $ findRootDigits digits (100,()) expXMinus3 0 2
where
showRealApprox digits = showInternals shouldShowInternals
where
shouldShowInternals = (digitsW+2, False)
digitsW = fromIntegral digits
xSquareMinus2 _eff x = x * x - 2
-- the effort parameter is not required in this case, using the unit type ()
expXMinus3 _eff x = (exp x) - 3
-- the effort parameter is not required in this case, using the unit type ()
findRootDigits ::
(EffortIndicator effort)
=>
Int ->
(Precision, effort) ->
(effort -> RealApprox -> RealApprox) ->
RealApprox ->
RealApprox ->
RealApprox
findRootDigits digits initEff fn leftEndpoint rightEndpoint =
snd $ last $
iterateUntilAccurate maxIncrements (maxImprecision digits) initEff $
\eff -> findRoot eff fn leftEndpoint rightEndpoint
where
maxIncrements = 100
maxImprecision :: Int -> RealApprox
maxImprecision digits = (ensurePrecision 10 10)^^(-digits)
findRoot ::
(Precision, effort) ->
(effort -> RealApprox -> RealApprox) ->
RealApprox ->
RealApprox ->
RealApprox
findRoot (prec, effFn) fn leftEndpoint rightEndpoint =
case oppositeSigns leftSign rightSign of
True ->
shrinkLRandMerge (leftEndpointPrec, leftSign) (rightEndpointPrec, rightSign)
False ->
error $
"findRoot: cannot establish that the given function fn has opposite signs at the endpoints:"
++ "\n fn(" ++ show leftEndpointPrec ++ ") = " ++ show (fn effFn leftEndpointPrec)
++ "\n fn(" ++ show rightEndpointPrec ++ ") = " ++ show (fn effFn rightEndpointPrec)
where
leftSign = getSign leftEndpointPrec
rightSign = getSign rightEndpointPrec
getSign x =
case (fnAtX >? 0, fnAtX <? 0) of
(Just True, _) -> Just Positive
(_, Just True) -> Just Negative
_ -> Nothing
where
fnAtX = fn effFn x
leftEndpointPrec = ensurePrecision prec leftEndpoint
rightEndpointPrec = ensurePrecision prec rightEndpoint
oppositeSigns (Just Positive) (Just Negative) = True
oppositeSigns (Just Negative) (Just Positive) = True
oppositeSigns _ _ = False
shrinkLRandMerge lAndSign@(l,lSign) rAndSign@(r,rSign) =
case findMidPointWithSign l r of
Nothing -> l </\> r
Just (mAndSign@(_, mSign))
| oppositeSigns lSign mSign -> shrinkLRandMerge lAndSign mAndSign
| oppositeSigns mSign rSign -> shrinkLRandMerge mAndSign rAndSign
findMidPointWithSign l r =
case potentialSplitPoints of
[] -> Nothing
mAndSign : _ -> Just mAndSign
where
potentialSplitPoints =
filter hasSign $ map addSign $ pointsInBetween l r
addSign x = (x, getSign x)
hasSign (_, Nothing) = False
hasSign _ = True
pointsInBetween l r =
-- -- ensure the points are exact and in the interior of the interval [l,r]:
-- filter betweenLR $
-- map getLeftEndpoint $
[
(l * 2 + r) / 3
,
(l + r * 2) / 3
]
-- where
-- betweenLR x =
-- ((l <? x) == Just True)
-- &&
-- ((x <? r) == Just True)
-- getLeftEndpoint x =
-- fst $ RefOrd.getEndpointsOut x
data Sign = Positive | Negative
ensurePrecision :: Precision -> RealApprox -> RealApprox
ensurePrecision prec x =
(ArithInOut.convertOutEff prec (0:: Int)) + x
--expEffort :: Int -> ArithInOut.ExpEffortIndicator RealApprox
--expEffort n =
-- (a, Int1To10 n)
-- where
-- (a, _) = ArithInOut.expDefaultEffort a
| michalkonecny/aern | aern-mpfr/demos/IntermediateValue.hs | bsd-3-clause | 5,422 | 0 | 16 | 1,412 | 1,127 | 609 | 518 | 94 | 9 |
-- |
-- Module : Crypto.PubKey.EdDSA
-- License : BSD-style
-- Maintainer : Olivier Chéron <[email protected]>
-- Stability : experimental
-- Portability : unknown
--
-- EdDSA signature generation and verification, implemented in Haskell and
-- parameterized with elliptic curve and hash algorithm. Only edwards25519 is
-- supported at the moment.
--
-- The module provides \"context\" and \"prehash\" variants defined in
-- <https://tools.ietf.org/html/rfc8032 RFC 8032>.
--
-- This implementation is most useful when wanting to customize the hash
-- algorithm. See module "Crypto.PubKey.Ed25519" for faster Ed25519 with
-- SHA-512.
--
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
module Crypto.PubKey.EdDSA
( SecretKey
, PublicKey
, Signature
-- * Curves with EdDSA implementation
, EllipticCurveEdDSA(CurveDigestSize)
, publicKeySize
, secretKeySize
, signatureSize
-- * Smart constructors
, signature
, publicKey
, secretKey
-- * Methods
, toPublic
, sign
, signCtx
, signPh
, verify
, verifyCtx
, verifyPh
, generateSecretKey
) where
import Data.Bits
import Data.ByteArray (ByteArray, ByteArrayAccess, Bytes, ScrubbedBytes, View)
import qualified Data.ByteArray as B
import Data.ByteString (ByteString)
import Data.Proxy
import Crypto.ECC
import qualified Crypto.ECC.Edwards25519 as Edwards25519
import Crypto.Error
import Crypto.Hash (Digest)
import Crypto.Hash.IO
import Crypto.Random
import GHC.TypeLits (KnownNat, Nat)
import Crypto.Internal.Builder
import Crypto.Internal.Compat
import Crypto.Internal.Imports
import Crypto.Internal.Nat (integralNatVal)
import Foreign.Storable
-- API
-- | An EdDSA Secret key
newtype SecretKey curve = SecretKey ScrubbedBytes
deriving (Show,Eq,ByteArrayAccess,NFData)
-- | An EdDSA public key
newtype PublicKey curve hash = PublicKey Bytes
deriving (Show,Eq,ByteArrayAccess,NFData)
-- | An EdDSA signature
newtype Signature curve hash = Signature Bytes
deriving (Show,Eq,ByteArrayAccess,NFData)
-- | Elliptic curves with an implementation of EdDSA
class ( EllipticCurveBasepointArith curve
, KnownNat (CurveDigestSize curve)
) => EllipticCurveEdDSA curve where
-- | Size of the digest for this curve (in bytes)
type CurveDigestSize curve :: Nat
-- | Size of secret keys for this curve (in bytes)
secretKeySize :: proxy curve -> Int
-- hash with specified parameters
hashWithDom :: (HashAlgorithm hash, ByteArrayAccess ctx, ByteArrayAccess msg)
=> proxy curve -> hash -> Bool -> ctx -> Builder -> msg -> Bytes
-- conversion between scalar, point and public key
pointPublic :: proxy curve -> Point curve -> PublicKey curve hash
publicPoint :: proxy curve -> PublicKey curve hash -> CryptoFailable (Point curve)
encodeScalarLE :: ByteArray bs => proxy curve -> Scalar curve -> bs
decodeScalarLE :: ByteArrayAccess bs => proxy curve -> bs -> CryptoFailable (Scalar curve)
-- how to use bits in a secret key
scheduleSecret :: ( HashAlgorithm hash
, HashDigestSize hash ~ CurveDigestSize curve
)
=> proxy curve
-> hash
-> SecretKey curve
-> (Scalar curve, View Bytes)
-- | Size of public keys for this curve (in bytes)
publicKeySize :: EllipticCurveEdDSA curve => proxy curve -> Int
publicKeySize prx = signatureSize prx `div` 2
-- | Size of signatures for this curve (in bytes)
signatureSize :: forall proxy curve . EllipticCurveEdDSA curve
=> proxy curve -> Int
signatureSize _ = integralNatVal (Proxy :: Proxy (CurveDigestSize curve))
-- Constructors
-- | Try to build a public key from a bytearray
publicKey :: ( EllipticCurveEdDSA curve
, HashAlgorithm hash
, HashDigestSize hash ~ CurveDigestSize curve
, ByteArrayAccess ba
)
=> proxy curve -> hash -> ba -> CryptoFailable (PublicKey curve hash)
publicKey prx _ bs
| B.length bs == publicKeySize prx =
CryptoPassed (PublicKey $ B.convert bs)
| otherwise =
CryptoFailed CryptoError_PublicKeySizeInvalid
-- | Try to build a secret key from a bytearray
secretKey :: (EllipticCurveEdDSA curve, ByteArrayAccess ba)
=> proxy curve -> ba -> CryptoFailable (SecretKey curve)
secretKey prx bs
| B.length bs == secretKeySize prx =
CryptoPassed (SecretKey $ B.convert bs)
| otherwise =
CryptoFailed CryptoError_SecretKeyStructureInvalid
-- | Try to build a signature from a bytearray
signature :: ( EllipticCurveEdDSA curve
, HashAlgorithm hash
, HashDigestSize hash ~ CurveDigestSize curve
, ByteArrayAccess ba
)
=> proxy curve -> hash -> ba -> CryptoFailable (Signature curve hash)
signature prx _ bs
| B.length bs == signatureSize prx =
CryptoPassed (Signature $ B.convert bs)
| otherwise =
CryptoFailed CryptoError_SecretKeyStructureInvalid
-- Conversions
-- | Generate a secret key
generateSecretKey :: (EllipticCurveEdDSA curve, MonadRandom m)
=> proxy curve -> m (SecretKey curve)
generateSecretKey prx = SecretKey <$> getRandomBytes (secretKeySize prx)
-- | Create a public key from a secret key
toPublic :: ( EllipticCurveEdDSA curve
, HashAlgorithm hash
, HashDigestSize hash ~ CurveDigestSize curve
)
=> proxy curve -> hash -> SecretKey curve -> PublicKey curve hash
toPublic prx alg priv =
let p = pointBaseSmul prx (secretScalar prx alg priv)
in pointPublic prx p
secretScalar :: ( EllipticCurveEdDSA curve
, HashAlgorithm hash
, HashDigestSize hash ~ CurveDigestSize curve
)
=> proxy curve -> hash -> SecretKey curve -> Scalar curve
secretScalar prx alg priv = fst (scheduleSecret prx alg priv)
-- EdDSA signature generation & verification
-- | Sign a message using the key pair
sign :: ( EllipticCurveEdDSA curve
, HashAlgorithm hash
, HashDigestSize hash ~ CurveDigestSize curve
, ByteArrayAccess msg
)
=> proxy curve -> SecretKey curve -> PublicKey curve hash -> msg -> Signature curve hash
sign prx = signCtx prx emptyCtx
-- | Verify a message
verify :: ( EllipticCurveEdDSA curve
, HashAlgorithm hash
, HashDigestSize hash ~ CurveDigestSize curve
, ByteArrayAccess msg
)
=> proxy curve -> PublicKey curve hash -> msg -> Signature curve hash -> Bool
verify prx = verifyCtx prx emptyCtx
-- | Sign a message using the key pair under context @ctx@
signCtx :: ( EllipticCurveEdDSA curve
, HashAlgorithm hash
, HashDigestSize hash ~ CurveDigestSize curve
, ByteArrayAccess ctx
, ByteArrayAccess msg
)
=> proxy curve -> ctx -> SecretKey curve -> PublicKey curve hash -> msg -> Signature curve hash
signCtx prx = signPhCtx prx False
-- | Verify a message under context @ctx@
verifyCtx :: ( EllipticCurveEdDSA curve
, HashAlgorithm hash
, HashDigestSize hash ~ CurveDigestSize curve
, ByteArrayAccess ctx
, ByteArrayAccess msg
)
=> proxy curve -> ctx -> PublicKey curve hash -> msg -> Signature curve hash -> Bool
verifyCtx prx = verifyPhCtx prx False
-- | Sign a prehashed message using the key pair under context @ctx@
signPh :: ( EllipticCurveEdDSA curve
, HashAlgorithm hash
, HashDigestSize hash ~ CurveDigestSize curve
, ByteArrayAccess ctx
)
=> proxy curve -> ctx -> SecretKey curve -> PublicKey curve hash -> Digest prehash -> Signature curve hash
signPh prx = signPhCtx prx True
-- | Verify a prehashed message under context @ctx@
verifyPh :: ( EllipticCurveEdDSA curve
, HashAlgorithm hash
, HashDigestSize hash ~ CurveDigestSize curve
, ByteArrayAccess ctx
)
=> proxy curve -> ctx -> PublicKey curve hash -> Digest prehash -> Signature curve hash -> Bool
verifyPh prx = verifyPhCtx prx True
signPhCtx :: forall proxy curve hash ctx msg .
( EllipticCurveEdDSA curve
, HashAlgorithm hash
, HashDigestSize hash ~ CurveDigestSize curve
, ByteArrayAccess ctx
, ByteArrayAccess msg
)
=> proxy curve -> Bool -> ctx -> SecretKey curve -> PublicKey curve hash -> msg -> Signature curve hash
signPhCtx prx ph ctx priv pub msg =
let alg = undefined :: hash
(s, prefix) = scheduleSecret prx alg priv
digR = hashWithDom prx alg ph ctx (bytes prefix) msg
r = decodeScalarNoErr prx digR
pR = pointBaseSmul prx r
bsR = encodePoint prx pR
sK = getK prx ph ctx pub bsR msg
sS = scalarAdd prx r (scalarMul prx sK s)
in encodeSignature prx (bsR, pR, sS)
verifyPhCtx :: ( EllipticCurveEdDSA curve
, HashAlgorithm hash
, HashDigestSize hash ~ CurveDigestSize curve
, ByteArrayAccess ctx
, ByteArrayAccess msg
)
=> proxy curve -> Bool -> ctx -> PublicKey curve hash -> msg -> Signature curve hash -> Bool
verifyPhCtx prx ph ctx pub msg sig =
case doVerify of
CryptoPassed verified -> verified
CryptoFailed _ -> False
where
doVerify = do
(bsR, pR, sS) <- decodeSignature prx sig
nPub <- pointNegate prx `fmap` publicPoint prx pub
let sK = getK prx ph ctx pub bsR msg
pR' = pointsSmulVarTime prx sS sK nPub
return (pR == pR')
emptyCtx :: Bytes
emptyCtx = B.empty
getK :: forall proxy curve hash ctx msg .
( EllipticCurveEdDSA curve
, HashAlgorithm hash
, HashDigestSize hash ~ CurveDigestSize curve
, ByteArrayAccess ctx
, ByteArrayAccess msg
)
=> proxy curve -> Bool -> ctx -> PublicKey curve hash -> Bytes -> msg -> Scalar curve
getK prx ph ctx (PublicKey pub) bsR msg =
let alg = undefined :: hash
digK = hashWithDom prx alg ph ctx (bytes bsR <> bytes pub) msg
in decodeScalarNoErr prx digK
encodeSignature :: EllipticCurveEdDSA curve
=> proxy curve
-> (Bytes, Point curve, Scalar curve)
-> Signature curve hash
encodeSignature prx (bsR, _, sS) = Signature $ buildAndFreeze $
bytes bsR <> bytes bsS <> zero len0
where
bsS = encodeScalarLE prx sS :: Bytes
len0 = signatureSize prx - B.length bsR - B.length bsS
decodeSignature :: ( EllipticCurveEdDSA curve
, HashDigestSize hash ~ CurveDigestSize curve
)
=> proxy curve
-> Signature curve hash
-> CryptoFailable (Bytes, Point curve, Scalar curve)
decodeSignature prx (Signature bs) = do
let (bsR, bsS) = B.splitAt (publicKeySize prx) bs
pR <- decodePoint prx bsR
sS <- decodeScalarLE prx bsS
return (bsR, pR, sS)
-- implementations are supposed to decode any scalar up to the size of the digest
decodeScalarNoErr :: (EllipticCurveEdDSA curve, ByteArrayAccess bs)
=> proxy curve -> bs -> Scalar curve
decodeScalarNoErr prx = unwrap "decodeScalarNoErr" . decodeScalarLE prx
unwrap :: String -> CryptoFailable a -> a
unwrap name (CryptoFailed _) = error (name ++ ": assumption failed")
unwrap _ (CryptoPassed x) = x
-- Ed25519 implementation
instance EllipticCurveEdDSA Curve_Edwards25519 where
type CurveDigestSize Curve_Edwards25519 = 64
secretKeySize _ = 32
hashWithDom _ alg ph ctx bss
| not ph && B.null ctx = digestDomMsg alg bss
| otherwise = digestDomMsg alg (dom <> bss)
where dom = bytes ("SigEd25519 no Ed25519 collisions" :: ByteString) <>
byte (if ph then 1 else 0) <>
byte (fromIntegral $ B.length ctx) <>
bytes ctx
pointPublic _ = PublicKey . Edwards25519.pointEncode
publicPoint _ = Edwards25519.pointDecode
encodeScalarLE _ = Edwards25519.scalarEncode
decodeScalarLE _ = Edwards25519.scalarDecodeLong
scheduleSecret prx alg priv =
(decodeScalarNoErr prx clamped, B.dropView hashed 32)
where
hashed = digest alg $ \update -> update priv
clamped :: Bytes
clamped = B.copyAndFreeze (B.takeView hashed 32) $ \p -> do
b0 <- peekElemOff p 0 :: IO Word8
b31 <- peekElemOff p 31 :: IO Word8
pokeElemOff p 31 ((b31 .&. 0x7F) .|. 0x40)
pokeElemOff p 0 (b0 .&. 0xF8)
{-
Optimize hashing by limiting the number of roundtrips between Haskell and C.
Hash "update" functions do not use unsafe FFI call, so better concanetate
small fragments together and call the update function once.
Using the IO hash interface avoids context buffer copies.
Data type Digest is not used directly but converted to Bytes early. Any use of
withByteArray on the unpinned Digest backend would require copy through a
pinned trampoline.
-}
digestDomMsg :: (HashAlgorithm alg, ByteArrayAccess msg)
=> alg -> Builder -> msg -> Bytes
digestDomMsg alg bss bs = digest alg $ \update ->
update (buildAndFreeze bss :: Bytes) >> update bs
digest :: HashAlgorithm alg
=> alg
-> ((forall bs . ByteArrayAccess bs => bs -> IO ()) -> IO ())
-> Bytes
digest alg fn = B.convert $ unsafeDoIO $ do
mc <- hashMutableInitWith alg
fn (hashMutableUpdate mc)
hashMutableFinalize mc
| vincenthz/cryptonite | Crypto/PubKey/EdDSA.hs | bsd-3-clause | 14,161 | 0 | 16 | 4,063 | 3,385 | 1,713 | 1,672 | 259 | 2 |
import Control.Exception
import Data.Compact
import Data.Compact.Internal
import qualified Data.Map as Map
import Data.Time.Clock
import Text.Printf
import System.Environment
import System.Mem
import Control.DeepSeq
-- Benchmark compact against compactWithSharing. e.g.
-- ./compact_bench 1000000
main = do
[n] <- map read <$> getArgs
let m = Map.fromList [(x,[x*1000..x*1000+10]) | x <- [1..(n::Integer)]]
evaluate (force m)
timeIt "compact" $ compact m >>= compactSize >>= print
timeIt "compactWithSharing" $ compactWithSharing m >>= compactSize >>= print
timeIt :: String -> IO a -> IO a
timeIt str io = do
performMajorGC
t0 <- getCurrentTime
a <- io
t1 <- getCurrentTime
printf "%s: %.2f\n" str (realToFrac (t1 `diffUTCTime` t0) :: Double)
return a
| olsner/ghc | libraries/compact/tests/compact_bench.hs | bsd-3-clause | 780 | 0 | 15 | 132 | 275 | 141 | 134 | 23 | 1 |
{-# LANGUAGE Haskell2010 #-}
{-# LINE 1 "dist/dist-sandbox-261cd265/build/System/Posix/DynamicLinker/Common.hs" #-}
{-# LINE 1 "System/Posix/DynamicLinker/Common.hsc" #-}
{-# LINE 2 "System/Posix/DynamicLinker/Common.hsc" #-}
{-# LANGUAGE Safe #-}
{-# LINE 6 "System/Posix/DynamicLinker/Common.hsc" #-}
-----------------------------------------------------------------------------
-- |
-- Module : System.Posix.DynamicLinker.Common
-- Copyright : (c) Volker Stolz <[email protected]> 2003
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : non-portable (requires POSIX)
--
-- Dynamic linker support through dlopen()
-----------------------------------------------------------------------------
module System.Posix.DynamicLinker.Common (
module System.Posix.DynamicLinker.Prim,
dlsym,
dlerror,
dlclose,
undl,
throwDLErrorIf,
Module(..)
)
-- Usage:
-- ******
--
-- Let's assume you want to open a local shared library \'foo\' (.\/libfoo.so)
-- offering a function
-- @char \* mogrify (char\*,int)@
-- and invoke @str = mogrify("test",1)@:
--
--
-- type Fun = CString -> Int -> IO CString
-- foreign import dynamic unsafe fun__ :: FunPtr Fun -> Fun
--
-- withDL "libfoo.so" [RTLD_NOW] \$ \\ mod -> do
-- funptr <- dlsym mod "mogrify"
-- let fun = fun__ funptr
-- withCString "test" \$ \\ str -> do
-- strptr <- fun str 1
-- strstr <- peekCString strptr
-- ...
--
where
{-# LINE 54 "System/Posix/DynamicLinker/Common.hsc" #-}
import System.Posix.DynamicLinker.Prim
import Foreign
import Foreign.C
dlclose :: DL -> IO ()
dlclose (DLHandle h) = throwDLErrorIf_ "dlclose" (/= 0) $ c_dlclose h
dlclose h = error $ "dlclose: invalid argument" ++ (show h)
dlerror :: IO String
dlerror = c_dlerror >>= peekCString
-- |'dlsym' returns the address binding of the symbol described in @symbol@,
-- as it occurs in the shared object identified by @source@.
dlsym :: DL -> String -> IO (FunPtr a)
dlsym source symbol = do
withCAString symbol $ \ s -> do
throwDLErrorIf "dlsym" (== nullFunPtr) $ c_dlsym (packDL source) s
-- |'undl' obtains the raw handle. You mustn't do something like
-- @withDL mod flags $ liftM undl >>= \ p -> use p@
undl :: DL -> Ptr ()
undl = packDL
throwDLErrorIf :: String -> (a -> Bool) -> IO a -> IO a
throwDLErrorIf s p f = do
r <- f
if (p r)
then dlerror >>= \ err -> ioError (userError ( s ++ ": " ++ err))
else return r
throwDLErrorIf_ :: String -> (a -> Bool) -> IO a -> IO ()
throwDLErrorIf_ s p f = throwDLErrorIf s p f >> return ()
-- abstract handle for dynamically loaded module (EXPORTED)
--
newtype Module = Module (Ptr ())
| phischu/fragnix | tests/packages/scotty/System.Posix.DynamicLinker.Common.hs | bsd-3-clause | 2,756 | 0 | 15 | 533 | 450 | 258 | 192 | 34 | 2 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Utils/Containers/Internal/BitQueue.hs" #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE BangPatterns #-}
-----------------------------------------------------------------------------
-- |
-- Module : Utils.Containers.Internal.BitQueue
-- Copyright : (c) David Feuer 2016
-- License : BSD-style
-- Maintainer : [email protected]
-- Portability : portable
--
-- = WARNING
--
-- This module is considered __internal__.
--
-- The Package Versioning Policy __does not apply__.
--
-- This contents of this module may change __in any way whatsoever__
-- and __without any warning__ between minor versions of this package.
--
-- Authors importing this module are expected to track development
-- closely.
--
-- = Description
--
-- An extremely light-weight, fast, and limited representation of a string of
-- up to (2*WORDSIZE - 2) bits. In fact, there are two representations,
-- misleadingly named bit queue builder and bit queue. The builder supports
-- only `emptyQB`, creating an empty builder, and `snocQB`, enqueueing a bit.
-- The bit queue builder is then turned into a bit queue using `buildQ`, after
-- which bits can be removed one by one using `unconsQ`. If the size limit is
-- exceeded, further operations will silently produce nonsense.
-----------------------------------------------------------------------------
module Utils.Containers.Internal.BitQueue
( BitQueue
, BitQueueB
, emptyQB
, snocQB
, buildQ
, unconsQ
, toListQ
) where
import Utils.Containers.Internal.BitUtil (shiftLL, shiftRL, wordSize)
import Data.Bits ((.|.), (.&.), testBit)
import Data.Bits (countTrailingZeros)
-- A bit queue builder. We represent a double word using two words
-- because we don't currently have access to proper double words.
data BitQueueB = BQB {-# UNPACK #-} !Word
{-# UNPACK #-} !Word
newtype BitQueue = BQ BitQueueB deriving Show
-- Intended for debugging.
instance Show BitQueueB where
show (BQB hi lo) = "BQ"++
show (map (testBit hi) [(wordSize - 1),(wordSize - 2)..0]
++ map (testBit lo) [(wordSize - 1),(wordSize - 2)..0])
-- | Create an empty bit queue builder. This is represented as a single guard
-- bit in the most significant position.
emptyQB :: BitQueueB
emptyQB = BQB (1 `shiftLL` (wordSize - 1)) 0
{-# INLINE emptyQB #-}
-- Shift the double word to the right by one bit.
shiftQBR1 :: BitQueueB -> BitQueueB
shiftQBR1 (BQB hi lo) = BQB hi' lo' where
lo' = (lo `shiftRL` 1) .|. (hi `shiftLL` (wordSize - 1))
hi' = hi `shiftRL` 1
{-# INLINE shiftQBR1 #-}
-- | Enqueue a bit. This works by shifting the queue right one bit,
-- then setting the most significant bit as requested.
{-# INLINE snocQB #-}
snocQB :: BitQueueB -> Bool -> BitQueueB
snocQB bq b = case shiftQBR1 bq of
BQB hi lo -> BQB (hi .|. (fromIntegral (fromEnum b) `shiftLL` (wordSize - 1))) lo
-- | Convert a bit queue builder to a bit queue. This shifts in a new
-- guard bit on the left, and shifts right until the old guard bit falls
-- off.
{-# INLINE buildQ #-}
buildQ :: BitQueueB -> BitQueue
buildQ (BQB hi 0) = BQ (BQB 0 lo') where
zeros = countTrailingZeros hi
lo' = ((hi `shiftRL` 1) .|. (1 `shiftLL` (wordSize - 1))) `shiftRL` zeros
buildQ (BQB hi lo) = BQ (BQB hi' lo') where
zeros = countTrailingZeros lo
lo1 = (lo `shiftRL` 1) .|. (hi `shiftLL` (wordSize - 1))
hi1 = (hi `shiftRL` 1) .|. (1 `shiftLL` (wordSize - 1))
lo' = (lo1 `shiftRL` zeros) .|. (hi1 `shiftLL` (wordSize - zeros))
hi' = hi1 `shiftRL` zeros
-- Test if the queue is empty, which occurs when theres
-- nothing left but a guard bit in the least significant
-- place.
nullQ :: BitQueue -> Bool
nullQ (BQ (BQB 0 1)) = True
nullQ _ = False
{-# INLINE nullQ #-}
-- | Dequeue an element, or discover the queue is empty.
unconsQ :: BitQueue -> Maybe (Bool, BitQueue)
unconsQ q | nullQ q = Nothing
unconsQ (BQ bq@(BQB _ lo)) = Just (hd, BQ tl)
where
!hd = (lo .&. 1) /= 0
!tl = shiftQBR1 bq
{-# INLINE unconsQ #-}
-- | Convert a bit queue to a list of bits by unconsing.
-- This is used to test that the queue functions properly.
toListQ :: BitQueue -> [Bool]
toListQ bq = case unconsQ bq of
Nothing -> []
Just (hd, tl) -> hd : toListQ tl
| phischu/fragnix | tests/packages/scotty/Utils.Containers.Internal.BitQueue.hs | bsd-3-clause | 4,687 | 0 | 15 | 1,266 | 889 | 519 | 370 | 59 | 2 |
{-
Copyright 2015 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE NoImplicitPrelude #-}
module Foreign.C.String (module M) where
import "base" Foreign.C.String as M
| Ye-Yong-Chi/codeworld | codeworld-base/src/Foreign/C/String.hs | apache-2.0 | 747 | 0 | 4 | 136 | 25 | 19 | 6 | 4 | 0 |
module Settings.Builders.Common (
module Base,
module Expression,
module Oracles.Flag,
module Oracles.Setting,
module Settings,
module UserSettings,
cIncludeArgs, ldArgs, cArgs, cWarnings,
packageDatabaseArgs, bootPackageDatabaseArgs
) where
import Hadrian.Haskell.Cabal.Type
import Base
import Expression
import Oracles.Flag
import Oracles.Setting
import Packages
import Settings
import UserSettings
cIncludeArgs :: Args
cIncludeArgs = do
pkg <- getPackage
path <- getBuildPath
incDirs <- getContextData includeDirs
depDirs <- getContextData depIncludeDirs
stage <- getStage
iconvIncludeDir <- getSetting IconvIncludeDir
gmpIncludeDir <- getSetting GmpIncludeDir
ffiIncludeDir <- getSetting FfiIncludeDir
libdwIncludeDir <- getSetting FfiIncludeDir
libPath <- expr $ stageLibPath stage
mconcat [ notStage0 ||^ package compiler ? arg "-Iincludes"
, arg $ "-I" ++ libPath
, arg $ "-I" ++ path
, pure . map ("-I"++) . filter (/= "") $ [iconvIncludeDir, gmpIncludeDir]
, flag UseSystemFfi ? arg ("-I" ++ ffiIncludeDir)
, flag WithLibdw ? if not (null libdwIncludeDir) then arg ("-I" ++ libdwIncludeDir) else mempty
-- Add @incDirs@ in the build directory, since some files generated
-- with @autoconf@ may end up in the build directory.
, pure [ "-I" ++ path -/- dir | dir <- incDirs ]
-- Add @incDirs@ in the package directory for include files shipped
-- with the package.
, pure [ "-I" ++ pkgPath pkg -/- dir | dir <- incDirs ]
, pure [ "-I" ++ unifyPath dir | dir <- depDirs ] ]
ldArgs :: Args
ldArgs = mempty
cArgs :: Args
cArgs = mempty
-- TODO: should be in a different file
cWarnings :: Args
cWarnings = mconcat
[ arg "-Wall"
, flag CcLlvmBackend ? arg "-Wno-unknown-pragmas"
, notM (flag CcLlvmBackend) ? not windowsHost ? arg "-Werror=unused-but-set-variable"
, notM (flag CcLlvmBackend) ? arg "-Wno-error=inline" ]
packageDatabaseArgs :: Args
packageDatabaseArgs = do
stage <- getStage
dbPath <- expr (packageDbPath stage)
expr (need [dbPath -/- packageDbStamp])
prefix <- ifM (builder Ghc) (return "-package-db ") (return "--package-db=")
arg $ prefix ++ dbPath
bootPackageDatabaseArgs :: Args
bootPackageDatabaseArgs = do
stage <- getStage
dbPath <- expr $ packageDbPath stage
expr $ need [dbPath -/- packageDbStamp]
stage0 ? packageDatabaseArgs
| sdiehl/ghc | hadrian/src/Settings/Builders/Common.hs | bsd-3-clause | 2,567 | 0 | 13 | 638 | 658 | 339 | 319 | 61 | 2 |
--------------------------------------------------------------------------
-- Copyright (c) 2007-2010, ETH Zurich.
-- All rights reserved.
--
-- This file is distributed under the terms in the attached LICENSE file.
-- If you do not find this file, copies can be found by writing to:
-- ETH Zurich D-INFK, Haldeneggsteig 4, CH-8092 Zurich. Attn: Systems Group.
--
-- Architectural definitions for Barrelfish on ARMv6K ISA.
--
-- The build target is the Realview target on qemu-system-arm with the
-- arm11mpcore CPU.
--
--------------------------------------------------------------------------
module ARM11MP where
import HakeTypes
import Path
import qualified Config
import qualified ARMv5
import qualified ArchDefaults
-------------------------------------------------------------------------
--
-- Architecture specific definitions for ARMv6
--
-------------------------------------------------------------------------
arch = "arm11mp"
archFamily = "arm"
compiler = "arm-none-linux-gnueabi-gcc"
objcopy = "arm-none-linux-gnueabi-objcopy"
objdump = "arm-none-linux-gnueabi-objdump"
ar = "arm-none-linux-gnueabi-ar"
ranlib = "arm-none-linux-gnueabi-ranlib"
cxxcompiler = "arm-none-linux-gnueabi-g++"
ourCommonFlags = [ Str "-fno-unwind-tables",
Str "-Wno-packed-bitfield-compat",
Str "-mcpu=mpcore",
Str "-mapcs",
Str "-mabi=aapcs-linux",
Str "-ffixed-r9",
Str "-DTHREAD_REGISTER=R9" ]
cFlags = ArchDefaults.commonCFlags
++ ArchDefaults.commonFlags
++ ourCommonFlags
cxxFlags = ArchDefaults.commonCxxFlags
++ ArchDefaults.commonFlags
++ ourCommonFlags
cDefines = ArchDefaults.cDefines options
ourLdFlags = ARMv5.ourLdFlags
ldFlags = ArchDefaults.ldFlags arch ++ ourLdFlags
ldCxxFlags = ArchDefaults.ldCxxFlags arch ++ ourLdFlags
stdLibs = ArchDefaults.stdLibs arch ++ [ Str "-lgcc" ]
options = (ArchDefaults.options arch archFamily) {
optFlags = cFlags,
optCxxFlags = cxxFlags,
optDefines = cDefines,
optLdFlags = ldFlags,
optLdCxxFlags = ldCxxFlags,
optLibs = stdLibs,
optInterconnectDrivers = ["lmp"],
optFlounderBackends = ["lmp"]
}
--
-- Compilers
--
cCompiler = ArchDefaults.cCompiler arch compiler
cxxCompiler = ArchDefaults.cxxCompiler arch cxxcompiler
makeDepend = ArchDefaults.makeDepend arch compiler
makeCxxDepend = ArchDefaults.makeCxxDepend arch cxxcompiler
cToAssembler = ArchDefaults.cToAssembler arch compiler
assembler = ArchDefaults.assembler arch compiler
archive = ArchDefaults.archive arch
linker = ArchDefaults.linker arch compiler
cxxlinker = ArchDefaults.cxxlinker arch cxxcompiler
--
-- The kernel is "different"
--
kernelCFlags = [ Str s | s <- [ "-fno-builtin",
"-fno-unwind-tables",
"-nostdinc",
"-std=c99",
"-mcpu=mpcore",
"-mapcs",
"-mabi=aapcs-linux",
"-fPIE",
"-U__linux__",
"-Wall",
"-Wshadow",
"-Wstrict-prototypes",
"-Wold-style-definition",
"-Wmissing-prototypes",
"-Wmissing-declarations",
"-Wmissing-field-initializers",
"-Wredundant-decls",
"-Werror",
"-imacros deputy/nodeputy.h",
"-fpie",
"-fno-stack-check",
"-ffreestanding",
"-fomit-frame-pointer",
"-mno-long-calls",
"-Wmissing-noreturn",
"-mno-apcs-stack-check",
"-mno-apcs-reentrant",
"-msingle-pic-base",
"-mpic-register=r10",
"-DPIC_REGISTER=R10",
"-ffixed-r9",
"-DTHREAD_REGISTER=R9" ]]
kernelLdFlags = [ Str "-Wl,-N",
NStr "-Wl,-Map,", Out arch "kernel.map",
Str "-fno-builtin",
Str "-nostdlib",
Str "-Wl,--fatal-warnings"
]
--
-- Link the kernel (CPU Driver)
--
linkKernel :: Options -> [String] -> [String] -> String -> HRule
linkKernel opts objs libs kbin =
let linkscript = "/kernel/linker.lds"
kbootable = kbin ++ ".bin"
in
Rules [ Rule ([ Str compiler, Str Config.cOptFlags,
NStr "-T", In BuildTree arch linkscript,
Str "-o", Out arch kbin
]
++ (optLdFlags opts)
++
[ In BuildTree arch o | o <- objs ]
++
[ In BuildTree arch l | l <- libs ]
++
[ Str "-lgcc" ]
),
-- Edit ELF header so qemu-system-arm will treat it as a Linux kernel
Rule [ In SrcTree "src" "/tools/arm-mkbootelf.sh",
Str objdump, In BuildTree arch kbin, Out arch (kbootable)],
-- Generate kernel assembly dump
Rule [ Str (objdump ++ " -d -M reg-names-raw"),
In BuildTree arch kbin, Str ">", Out arch (kbin ++ ".asm")],
Rule [ Str "cpp",
NStr "-I", NoDep SrcTree "src" "/kernel/include/arch/arm",
Str "-D__ASSEMBLER__",
Str "-P", In SrcTree "src" "/kernel/arch/arm/linker.lds.in",
Out arch linkscript
]
]
| utsav2601/cmpe295A | hake/ARM11MP.hs | mit | 6,131 | 0 | 17 | 2,348 | 905 | 508 | 397 | 110 | 1 |
module LocalSpecTyVar where
foo = go
where
{-@ go :: xs:[a] -> {v:[a] | (len v) > (len xs)} @-}
go [] = []
go (x:xs) = x : go xs
| ssaavedra/liquidhaskell | tests/todo/LocalSpecTyVar.hs | bsd-3-clause | 148 | 0 | 9 | 50 | 46 | 25 | 21 | 4 | 2 |
{-# LANGUAGE Rank2Types #-}
module Haddock.Syb
( everything, everythingWithState, everywhere
, mkT
, combine
) where
import Data.Data
import Control.Applicative
-- | Perform a query on each level of a tree.
--
-- This is stolen directly from SYB package and copied here to not introduce
-- additional dependencies.
everything :: (r -> r -> r) -> (forall a. Data a => a -> r)
-> (forall a. Data a => a -> r)
everything k f x = foldl k (f x) (gmapQ (everything k f) x)
-- | Perform a query with state on each level of a tree.
--
-- This is the same as 'everything' but allows for stateful computations. In
-- SYB it is called @everythingWithContext@ but I find this name somewhat
-- nicer.
everythingWithState :: s -> (r -> r -> r)
-> (forall a. Data a => a -> s -> (r, s))
-> (forall a. Data a => a -> r)
everythingWithState s k f x =
let (r, s') = f x s
in foldl k r (gmapQ (everythingWithState s' k f) x)
-- | Apply transformation on each level of a tree.
--
-- Just like 'everything', this is stolen from SYB package.
everywhere :: (forall a. Data a => a -> a) -> (forall a. Data a => a -> a)
everywhere f = f . gmapT (everywhere f)
-- | Create generic transformation.
--
-- Another function stolen from SYB package.
mkT :: (Typeable a, Typeable b) => (b -> b) -> (a -> a)
mkT f = case cast f of
Just f' -> f'
Nothing -> id
-- | Combine two queries into one using alternative combinator.
combine :: Alternative f => (forall a. Data a => a -> f r)
-> (forall a. Data a => a -> f r)
-> (forall a. Data a => a -> f r)
combine f g x = f x <|> g x
| Helkafen/haddock | haddock-api/src/Haddock/Syb.hs | bsd-2-clause | 1,690 | 0 | 13 | 470 | 535 | 285 | 250 | 26 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
module Stack.BuildPlanSpec where
import Stack.BuildPlan
import Control.Monad.Logger
import Control.Exception hiding (try)
import Control.Monad.Catch (try)
import Data.Monoid
import qualified Data.Map as Map
import qualified Data.Set as Set
import Network.HTTP.Conduit (Manager)
import Prelude -- Fix redundant import warnings
import System.Directory
import System.IO.Temp
import System.Environment
import Test.Hspec
import Stack.Config
import Stack.Types
import Stack.Types.StackT
data T = T
{ manager :: Manager
}
setup :: IO T
setup = do
manager <- newTLSManager
unsetEnv "STACK_YAML"
return T{..}
teardown :: T -> IO ()
teardown _ = return ()
main :: IO ()
main = hspec spec
spec :: Spec
spec = beforeAll setup $ afterAll teardown $ do
let logLevel = LevelDebug
let loadConfig' m = runStackLoggingT m logLevel False False (loadConfig mempty Nothing)
let loadBuildConfigRest m = runStackLoggingT m logLevel False False
let inTempDir action = do
currentDirectory <- getCurrentDirectory
withSystemTempDirectory "Stack_BuildPlanSpec" $ \tempDir -> do
let enterDir = setCurrentDirectory tempDir
let exitDir = setCurrentDirectory currentDirectory
bracket_ enterDir exitDir action
it "finds missing transitive dependencies #159" $ \T{..} -> inTempDir $ do
-- Note: this test is somewhat fragile, depending on packages on
-- Hackage remaining in a certain state. If it fails, confirm that
-- github still depends on failure.
writeFile "stack.yaml" "resolver: lts-2.9"
LoadConfig{..} <- loadConfig' manager
bconfig <- loadBuildConfigRest manager (lcLoadBuildConfig Nothing)
runStackT manager logLevel bconfig False False $ do
menv <- getMinimalEnvOverride
mbp <- loadMiniBuildPlan $ LTS 2 9
eres <- try $ resolveBuildPlan
menv
mbp
(const False)
(Map.fromList
[ ($(mkPackageName "github"), Set.empty)
])
case eres of
Left (UnknownPackages _ unknown _) -> do
case Map.lookup $(mkPackageName "github") unknown of
Nothing -> error "doesn't list github as unknown"
Just _ -> return ()
{- Currently not implemented, see: https://github.com/fpco/stack/issues/159#issuecomment-107809418
case Map.lookup $(mkPackageName "failure") unknown of
Nothing -> error "failure not listed"
Just _ -> return ()
-}
_ -> error $ "Unexpected result from resolveBuildPlan: " ++ show eres
return ()
describe "shadowMiniBuildPlan" $ do
let version = $(mkVersion "1.0.0") -- unimportant for this test
pn = either throw id . parsePackageNameFromString
mkMPI deps = MiniPackageInfo
{ mpiVersion = version
, mpiFlags = Map.empty
, mpiPackageDeps = Set.fromList $ map pn $ words deps
, mpiToolDeps = Set.empty
, mpiExes = Set.empty
, mpiHasLibrary = True
}
go x y = (pn x, mkMPI y)
resourcet = go "resourcet" ""
conduit = go "conduit" "resourcet"
conduitExtra = go "conduit-extra" "conduit"
text = go "text" ""
attoparsec = go "attoparsec" "text"
aeson = go "aeson" "text attoparsec"
mkMBP pkgs = MiniBuildPlan
{ mbpCompilerVersion = GhcVersion version
, mbpPackages = Map.fromList pkgs
}
mbpAll = mkMBP [resourcet, conduit, conduitExtra, text, attoparsec, aeson]
test name input shadowed output extra =
it name $ const $
shadowMiniBuildPlan input (Set.fromList $ map pn $ words shadowed)
`shouldBe` (output, Map.fromList extra)
test "no shadowing" mbpAll "" mbpAll []
test "shadow something that isn't there" mbpAll "does-not-exist" mbpAll []
test "shadow a leaf" mbpAll "conduit-extra"
(mkMBP [resourcet, conduit, text, attoparsec, aeson])
[]
test "shadow direct dep" mbpAll "conduit"
(mkMBP [resourcet, text, attoparsec, aeson])
[conduitExtra]
test "shadow deep dep" mbpAll "resourcet"
(mkMBP [text, attoparsec, aeson])
[conduit, conduitExtra]
test "shadow deep dep and leaf" mbpAll "resourcet aeson"
(mkMBP [text, attoparsec])
[conduit, conduitExtra]
test "shadow deep dep and direct dep" mbpAll "resourcet conduit"
(mkMBP [text, attoparsec, aeson])
[conduitExtra]
| akhileshs/stack | src/test/Stack/BuildPlanSpec.hs | bsd-3-clause | 5,093 | 0 | 25 | 1,681 | 1,143 | 584 | 559 | 105 | 3 |
qsort [] = []
qsort (p:ps) = (qsort (filter (\x -> x<=p) ps))
++ p:(qsort (filter (\x -> x> p) ps))
| MartinThoma/LaTeX-examples | documents/Programmierparadigmen/scripts/haskell/qsort.hs | mit | 115 | 0 | 13 | 36 | 88 | 46 | 42 | 3 | 1 |
module Driver
( parseFile
, parseString
, compileScheme
, compileC
, renderC
, compiler
,) where
import Control.Monad
import Control.Error
import System.Posix.User
import System.Cmd
import Language.C.DSL (pretty, CExtDecl)
import Utils.Error
import AST
import Parser
import RewriteToplevels
import CPS
import OptimizeCPS
import ClosureConvert
import CodeGen
-- | The big compilation function, chains together each section of the
-- compiler and returns a list of C declarations
compileScheme :: [SDec UserPrim] -> Compiler [CExtDecl]
compileScheme = addPrimops >=> makeMain >=> cpsify >=> optimizeCPS >=> closConvert >=> codegen
where addPrimops = return . (++prims)
-- | Pretty print the C code with proper includes
renderC :: [CExtDecl] -> String
renderC = ("#include <stdlib.h>\n#include \"rts.h\"\n"++) . unlines . map (show . pretty)
-- | A hacky way to automatically compile the C
-- code using GCC, assumes the RTS is in ~/.c_of_scheme/
compileC :: [CExtDecl] -> IO ()
compileC code = do
UserEntry{homeDirectory = hd} <- getRealUserID >>= getUserEntryForID
let cFile = "out.c"
cBits = hd ++ "/.c_of_scheme/"
rts = cBits ++ "rts.c"
gc = cBits ++ "gc.c"
writeFile cFile (renderC code)
output <- system $ "gcc -g -I" ++ unwords [cBits, cFile, rts, gc] ++ " `pkg-config --cflags --libs glib-2.0`"
print output -- Prints ExitSuccess or failure, the only output of the compiler ATM (usability!)
-- | The IO part of the compiler, handling loading and parsing files
-- and joining together all the compilation glue of compileScheme and friends
compiler :: [FilePath] -> IO ()
compiler files = do
res <- mapM parseFile files :: IO ([Compiler [SDec UserPrim]])
let compRes = runCompiler (joinFiles res >>= compileScheme)
case compRes of
Right source -> compileC source
Left e -> errLn (presentError e)
where joinFiles = fmap concat . sequence
-- | List of primitives wrapped in fully eta-converted
-- functions. These will be properly CPS converted.
prims :: [SDec UserPrim]
prims = [ Def (SVar "+") $ Lam [a, b] [Plus # [a', b']]
, Def (SVar "-") $ Lam [a, b] [Sub # [a', b']]
, Def (SVar "*") $ Lam [a, b] [Mult # [a', b']]
, Def (SVar "/") $ Lam [a, b] [Div # [a', b']]
, Def (SVar "eq?") $ Lam [a, b] [Eq # [a', b']]
, Def (SVar "cons") $ Lam [a, b] [Cons # [a', b']]
, Def (SVar "display") $ Lam [a] [Display # [a']]
, Def (SVar "car") $ Lam [a] [Car # [a']]
, Def (SVar "cdr") $ Lam [a] [Cdr # [a']]]
where a = SVar "a"
b = SVar "b"
a' = Var a
b' = Var b
f # args = App (Prim f) args
| jozefg/c_of_scheme | src/Driver.hs | mit | 2,748 | 0 | 13 | 697 | 876 | 473 | 403 | 58 | 2 |
Subsets and Splits