code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, BangPatterns,
PatternGuards #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Idris.Core.Evaluate(normalise, normaliseTrace, normaliseC, normaliseAll, toValue, quoteTerm,
rt_simplify, simplify, specialise, hnf, convEq, convEq',
Def(..), CaseInfo(..), CaseDefs(..),
Accessibility(..), Totality(..), PReason(..), MetaInformation(..),
Context, initContext, ctxtAlist, next_tvar,
addToCtxt, setAccess, setTotal, setMetaInformation, addCtxtDef, addTyDecl,
addDatatype, addCasedef, simplifyCasedef, addOperator,
lookupNames, lookupTyName, lookupTyNameExact, lookupTy, lookupTyExact,
lookupP, lookupP_all, lookupDef, lookupNameDef, lookupDefExact, lookupDefAcc, lookupDefAccExact, lookupVal,
mapDefCtxt,
lookupTotal, lookupNameTotal, lookupMetaInformation, lookupTyEnv, isTCDict, isDConName, canBeDConName, isTConName, isConName, isFnName,
Value(..), Quote(..), initEval, uniqueNameCtxt, uniqueBindersCtxt, definitions,
isUniverse) where
import Debug.Trace
import Control.Applicative hiding (Const)
import Control.Monad.State -- not Strict!
import qualified Data.Binary as B
import Data.Binary hiding (get, put)
import Data.Maybe (listToMaybe)
import Idris.Core.TT
import Idris.Core.CaseTree
data EvalState = ES { limited :: [(Name, Int)],
nexthole :: Int,
blocking :: Bool }
deriving Show
type Eval a = State EvalState a
data EvalOpt = Spec
| HNF
| Simplify
| AtREPL
| RunTT
deriving (Show, Eq)
initEval = ES [] 0 False
-- VALUES (as HOAS) ---------------------------------------------------------
-- | A HOAS representation of values
data Value = VP NameType Name Value
| VV Int
-- True for Bool indicates safe to reduce
| VBind Bool Name (Binder Value) (Value -> Eval Value)
-- For frozen let bindings when simplifying
| VBLet Int Name Value Value Value
| VApp Value Value
| VType UExp
| VUType Universe
| VErased
| VImpossible
| VConstant Const
| VProj Value Int
-- | VLazy Env [Value] Term
| VTmp Int
instance Show Value where
show x = show $ evalState (quote 100 x) initEval
instance Show (a -> b) where
show x = "<<fn>>"
-- THE EVALUATOR ------------------------------------------------------------
-- The environment is assumed to be "locally named" - i.e., not de Bruijn
-- indexed.
-- i.e. it's an intermediate environment that we have while type checking or
-- while building a proof.
-- | Normalise fully type checked terms (so, assume all names/let bindings resolved)
normaliseC :: Context -> Env -> TT Name -> TT Name
normaliseC ctxt env t
= evalState (do val <- eval False ctxt [] (map finalEntry env) t []
quote 0 val) initEval
normaliseAll :: Context -> Env -> TT Name -> TT Name
normaliseAll ctxt env t
= evalState (do val <- eval False ctxt [] (map finalEntry env) t [AtREPL]
quote 0 val) initEval
normalise :: Context -> Env -> TT Name -> TT Name
normalise = normaliseTrace False
normaliseTrace :: Bool -> Context -> Env -> TT Name -> TT Name
normaliseTrace tr ctxt env t
= evalState (do val <- eval tr ctxt [] (map finalEntry env) (finalise t) []
quote 0 val) initEval
toValue :: Context -> Env -> TT Name -> Value
toValue ctxt env t
= evalState (eval False ctxt [] (map finalEntry env) t []) initEval
quoteTerm :: Value -> TT Name
quoteTerm val = evalState (quote 0 val) initEval
-- Return a specialised name, and an updated list of reductions available,
-- so that the caller can tell how much specialisation was achieved.
specialise :: Context -> Env -> [(Name, Int)] -> TT Name ->
(TT Name, [(Name, Int)])
specialise ctxt env limits t
= let (tm, st) =
runState (do val <- eval False ctxt []
(map finalEntry env) (finalise t)
[Spec]
quote 0 val) (initEval { limited = limits }) in
(tm, limited st)
-- | Like normalise, but we only reduce functions that are marked as okay to
-- inline (and probably shouldn't reduce lets?)
-- 20130908: now only used to reduce for totality checking. Inlining should
-- be done elsewhere.
simplify :: Context -> Env -> TT Name -> TT Name
simplify ctxt env t
= evalState (do val <- eval False ctxt [(sUN "lazy", 0),
(sUN "force", 0),
(sUN "Force", 0),
(sUN "assert_smaller", 0),
(sUN "assert_total", 0),
(sUN "par", 0),
(sUN "prim__syntactic_eq", 0),
(sUN "fork", 0)]
(map finalEntry env) (finalise t)
[Simplify]
quote 0 val) initEval
-- | Simplify for run-time (i.e. basic inlining)
rt_simplify :: Context -> Env -> TT Name -> TT Name
rt_simplify ctxt env t
= evalState (do val <- eval False ctxt [(sUN "lazy", 0),
(sUN "force", 0),
(sUN "Force", 0),
(sUN "par", 0),
(sUN "prim__syntactic_eq", 0),
(sUN "prim_fork", 0)]
(map finalEntry env) (finalise t)
[RunTT]
quote 0 val) initEval
-- | Reduce a term to head normal form
hnf :: Context -> Env -> TT Name -> TT Name
hnf ctxt env t
= evalState (do val <- eval False ctxt []
(map finalEntry env)
(finalise t) [HNF]
quote 0 val) initEval
-- unbindEnv env (quote 0 (eval ctxt (bindEnv env t)))
finalEntry :: (Name, Binder (TT Name)) -> (Name, Binder (TT Name))
finalEntry (n, b) = (n, fmap finalise b)
bindEnv :: EnvTT n -> TT n -> TT n
bindEnv [] tm = tm
bindEnv ((n, Let t v):bs) tm = Bind n (NLet t v) (bindEnv bs tm)
bindEnv ((n, b):bs) tm = Bind n b (bindEnv bs tm)
unbindEnv :: EnvTT n -> TT n -> TT n
unbindEnv [] tm = tm
unbindEnv (_:bs) (Bind n b sc) = unbindEnv bs sc
unbindEnv env tm = error $ "Impossible case occurred: couldn't unbind env."
usable :: Bool -- specialising
-> Name -> [(Name, Int)] -> Eval (Bool, [(Name, Int)])
-- usable _ _ ns@((MN 0 "STOP", _) : _) = return (False, ns)
usable False n [] = return (True, [])
usable True n ns
= do ES ls num b <- get
if b then return (False, ns)
else case lookup n ls of
Just 0 -> return (False, ns)
Just i -> return (True, ns)
_ -> return (False, ns)
usable False n ns
= case lookup n ns of
Just 0 -> return (False, ns)
Just i -> return $ (True, (n, abs (i-1)) : filter (\ (n', _) -> n/=n') ns)
_ -> return $ (True, (n, 100) : filter (\ (n', _) -> n/=n') ns)
fnCount :: Int -> Name -> Eval ()
fnCount inc n
= do ES ls num b <- get
case lookup n ls of
Just i -> do put $ ES ((n, (i - inc)) :
filter (\ (n', _) -> n/=n') ls) num b
_ -> return ()
setBlock :: Bool -> Eval ()
setBlock b = do ES ls num _ <- get
put (ES ls num b)
deduct = fnCount 1
reinstate = fnCount (-1)
-- | Evaluate in a context of locally named things (i.e. not de Bruijn indexed,
-- such as we might have during construction of a proof)
-- The (Name, Int) pair in the arguments is the maximum depth of unfolding of
-- a name. The corresponding pair in the state is the maximum number of
-- unfoldings overall.
eval :: Bool -> Context -> [(Name, Int)] -> Env -> TT Name ->
[EvalOpt] -> Eval Value
eval traceon ctxt ntimes genv tm opts = ev ntimes [] True [] tm where
spec = Spec `elem` opts
simpl = Simplify `elem` opts
runtime = RunTT `elem` opts
atRepl = AtREPL `elem` opts
hnf = HNF `elem` opts
-- returns 'True' if the function should block
-- normal evaluation should return false
blockSimplify (CaseInfo inl always dict) n stk
| RunTT `elem` opts
= if always then False
else not (inl || dict) || elem n stk
| Simplify `elem` opts
= (not (inl || dict) || elem n stk)
|| (n == sUN "prim__syntactic_eq")
| otherwise = False
getCases cd | simpl = cases_totcheck cd
| runtime = cases_runtime cd
| otherwise = cases_compiletime cd
ev ntimes stk top env (P _ n ty)
| Just (Let t v) <- lookup n genv = ev ntimes stk top env v
ev ntimes_in stk top env (P Ref n ty)
| not top && hnf = liftM (VP Ref n) (ev ntimes stk top env ty)
| otherwise
= do (u, ntimes) <- usable spec n ntimes_in
if u then
do let val = lookupDefAcc n (spec || atRepl) ctxt
case val of
[(Function _ tm, _)] | sUN "assert_total" `elem` stk ->
ev ntimes (n:stk) True env tm
[(Function _ tm, Public)] ->
ev ntimes (n:stk) True env tm
[(Function _ tm, Hidden)] ->
ev ntimes (n:stk) True env tm
[(TyDecl nt ty, _)] -> do vty <- ev ntimes stk True env ty
return $ VP nt n vty
[(CaseOp ci _ _ _ _ cd, acc)]
| (acc /= Frozen || sUN "assert_total" `elem` stk) &&
null (fst (cases_totcheck cd)) -> -- unoptimised version
let (ns, tree) = getCases cd in
if blockSimplify ci n stk
then liftM (VP Ref n) (ev ntimes stk top env ty)
else -- traceWhen runtime (show (n, ns, tree)) $
do c <- evCase ntimes n (n:stk) top env ns [] tree
case c of
(Nothing, _) -> liftM (VP Ref n) (ev ntimes stk top env ty)
(Just v, _) -> return v
_ -> liftM (VP Ref n) (ev ntimes stk top env ty)
else liftM (VP Ref n) (ev ntimes stk top env ty)
ev ntimes stk top env (P nt n ty)
= liftM (VP nt n) (ev ntimes stk top env ty)
ev ntimes stk top env (V i)
| i < length env && i >= 0 = return $ snd (env !! i)
| otherwise = return $ VV i
ev ntimes stk top env (Bind n (Let t v) sc)
| not runtime || occurrences n sc < 2
= do v' <- ev ntimes stk top env v --(finalise v)
sc' <- ev ntimes stk top ((n, v') : env) sc
wknV (-1) sc'
| otherwise
= do t' <- ev ntimes stk top env t
v' <- ev ntimes stk top env v --(finalise v)
-- use Tmp as a placeholder, then make it a variable reference
-- again when evaluation finished
hs <- get
let vd = nexthole hs
put (hs { nexthole = vd + 1 })
sc' <- ev ntimes stk top ((n, VP Bound (sMN vd "vlet") VErased) : env) sc
return $ VBLet vd n t' v' sc'
ev ntimes stk top env (Bind n (NLet t v) sc)
= do t' <- ev ntimes stk top env (finalise t)
v' <- ev ntimes stk top env (finalise v)
sc' <- ev ntimes stk top ((n, v') : env) sc
return $ VBind True n (Let t' v') (\x -> return sc')
ev ntimes stk top env (Bind n b sc)
= do b' <- vbind env b
let n' = uniqueName n (map fst genv ++ map fst env)
return $ VBind True -- (vinstances 0 sc < 2)
n' b' (\x -> ev ntimes stk False ((n', x):env) sc)
where vbind env t
= fmapMB (\tm -> ev ntimes stk top env (finalise tm)) t
-- block reduction immediately under codata (and not forced)
ev ntimes stk top env
(App _ (App _ (App _ d@(P _ (UN dly) _) l@(P _ (UN lco) _)) t) arg)
| dly == txt "Delay" && lco == txt "LazyCodata" && not (simpl || atRepl)
= do let (f, _) = unApply arg
let ntimes' = case f of
P _ fn _ -> (fn, 0) : ntimes
_ -> ntimes
when spec $ setBlock True
d' <- ev ntimes' stk False env d
l' <- ev ntimes' stk False env l
t' <- ev ntimes' stk False env t
arg' <- ev ntimes' stk False env arg
when spec $ setBlock False
evApply ntimes' stk top env [l',t',arg'] d'
-- Treat "assert_total" specially, as long as it's defined!
ev ntimes stk top env (App _ (App _ (P _ n@(UN at) _) _) arg)
| [(CaseOp _ _ _ _ _ _, _)] <- lookupDefAcc n (spec || atRepl) ctxt,
at == txt "assert_total" && not simpl
= ev ntimes (n : stk) top env arg
ev ntimes stk top env (App _ f a)
= do f' <- ev ntimes stk False env f
a' <- ev ntimes stk False env a
evApply ntimes stk top env [a'] f'
ev ntimes stk top env (Proj t i)
= do -- evaluate dictionaries if it means the projection works
t' <- ev ntimes stk top env t
-- tfull' <- reapply ntimes stk top env t' []
return (doProj t' (getValArgs t'))
where doProj t' (VP (DCon _ _ _) _ _, args)
| i >= 0 && i < length args = args!!i
doProj t' _ = VProj t' i
ev ntimes stk top env (Constant c) = return $ VConstant c
ev ntimes stk top env Erased = return VErased
ev ntimes stk top env Impossible = return VImpossible
ev ntimes stk top env (TType i) = return $ VType i
ev ntimes stk top env (UType u) = return $ VUType u
evApply ntimes stk top env args (VApp f a)
= evApply ntimes stk top env (a:args) f
evApply ntimes stk top env args f
= apply ntimes stk top env f args
reapply ntimes stk top env f@(VP Ref n ty) args
= let val = lookupDefAcc n (spec || atRepl) ctxt in
case val of
[(CaseOp ci _ _ _ _ cd, acc)] ->
let (ns, tree) = getCases cd in
do c <- evCase ntimes n (n:stk) top env ns args tree
case c of
(Nothing, _) -> return $ unload env (VP Ref n ty) args
(Just v, rest) -> evApply ntimes stk top env rest v
_ -> case args of
(a : as) -> return $ unload env f (a : as)
[] -> return f
reapply ntimes stk top env (VApp f a) args
= reapply ntimes stk top env f (a : args)
reapply ntimes stk top env v args = return v
apply ntimes stk top env (VBind True n (Lam t) sc) (a:as)
= do a' <- sc a
app <- apply ntimes stk top env a' as
wknV 1 app
apply ntimes_in stk top env f@(VP Ref n ty) args
| not top && hnf = case args of
[] -> return f
_ -> return $ unload env f args
| otherwise
= do (u, ntimes) <- usable spec n ntimes_in
if u then
do let val = lookupDefAcc n (spec || atRepl) ctxt
case val of
[(CaseOp ci _ _ _ _ cd, acc)]
| acc /= Frozen || sUN "assert_total" `elem` stk ->
-- unoptimised version
let (ns, tree) = getCases cd in
if blockSimplify ci n stk
then return $ unload env (VP Ref n ty) args
else -- traceWhen runtime (show (n, ns, tree)) $
do c <- evCase ntimes n (n:stk) top env ns args tree
case c of
(Nothing, _) -> return $ unload env (VP Ref n ty) args
(Just v, rest) -> evApply ntimes stk top env rest v
[(Operator _ i op, _)] ->
if (i <= length args)
then case op (take i args) of
Nothing -> return $ unload env (VP Ref n ty) args
Just v -> evApply ntimes stk top env (drop i args) v
else return $ unload env (VP Ref n ty) args
_ -> case args of
[] -> return f
_ -> return $ unload env f args
else case args of
(a : as) -> return $ unload env f (a:as)
[] -> return f
apply ntimes stk top env f (a:as) = return $ unload env f (a:as)
apply ntimes stk top env f [] = return f
-- specApply stk env f@(VP Ref n ty) args
-- = case lookupCtxt n statics of
-- [as] -> if or as
-- then trace (show (n, map fst (filter (\ (_, s) -> s) (zip args as)))) $
-- return $ unload env f args
-- else return $ unload env f args
-- _ -> return $ unload env f args
-- specApply stk env f args = return $ unload env f args
unload :: [(Name, Value)] -> Value -> [Value] -> Value
unload env f [] = f
unload env f (a:as) = unload env (VApp f a) as
evCase ntimes n stk top env ns args tree
| length ns <= length args
= do let args' = take (length ns) args
let rest = drop (length ns) args
when spec $ deduct n
t <- evTree ntimes stk top env (zip ns args') tree
when spec $ case t of
Nothing -> reinstate n -- Blocked, count n again
Just _ -> return ()
-- (zipWith (\n , t) -> (n, t)) ns args') tree
return (t, rest)
| otherwise = return (Nothing, args)
evTree :: [(Name, Int)] -> [Name] -> Bool ->
[(Name, Value)] -> [(Name, Value)] -> SC -> Eval (Maybe Value)
evTree ntimes stk top env amap (UnmatchedCase str) = return Nothing
evTree ntimes stk top env amap (STerm tm)
= do let etm = pToVs (map fst amap) tm
etm' <- ev ntimes stk (not (conHeaded tm))
(amap ++ env) etm
return $ Just etm'
evTree ntimes stk top env amap (ProjCase t alts)
= do t' <- ev ntimes stk top env t
doCase ntimes stk top env amap t' alts
evTree ntimes stk top env amap (Case _ n alts)
= case lookup n amap of
Just v -> doCase ntimes stk top env amap v alts
_ -> return Nothing
evTree ntimes stk top env amap ImpossibleCase = return Nothing
doCase ntimes stk top env amap v alts =
do c <- chooseAlt env v (getValArgs v) alts amap
case c of
Just (altmap, sc) -> evTree ntimes stk top env altmap sc
_ -> do c' <- chooseAlt' ntimes stk env v (getValArgs v) alts amap
case c' of
Just (altmap, sc) -> evTree ntimes stk top env altmap sc
_ -> return Nothing
conHeaded tm@(App _ _ _)
| (P (DCon _ _ _) _ _, args) <- unApply tm = True
conHeaded t = False
chooseAlt' ntimes stk env _ (f, args) alts amap
= do f' <- apply ntimes stk True env f args
chooseAlt env f' (getValArgs f')
alts amap
chooseAlt :: [(Name, Value)] -> Value -> (Value, [Value]) -> [CaseAlt] ->
[(Name, Value)] ->
Eval (Maybe ([(Name, Value)], SC))
chooseAlt env _ (VP (DCon i a _) _ _, args) alts amap
| Just (ns, sc) <- findTag i alts = return $ Just (updateAmap (zip ns args) amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VP (TCon i a) _ _, args) alts amap
| Just (ns, sc) <- findTag i alts
= return $ Just (updateAmap (zip ns args) amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VConstant c, []) alts amap
| Just v <- findConst c alts = return $ Just (amap, v)
| Just (n', sub, sc) <- findSuc c alts
= return $ Just (updateAmap [(n',sub)] amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VP _ n _, args) alts amap
| Just (ns, sc) <- findFn n alts = return $ Just (updateAmap (zip ns args) amap, sc)
chooseAlt env _ (VBind _ _ (Pi i s k) t, []) alts amap
| Just (ns, sc) <- findFn (sUN "->") alts
= do t' <- t (VV 0) -- we know it's not in scope or it's not a pattern
return $ Just (updateAmap (zip ns [s, t']) amap, sc)
chooseAlt _ _ _ alts amap
| Just v <- findDefault alts
= if (any fnCase alts)
then return $ Just (amap, v)
else return Nothing
| otherwise = return Nothing
fnCase (FnCase _ _ _) = True
fnCase _ = False
-- Replace old variable names in the map with new matches
-- (This is possibly unnecessary since we make unique names and don't
-- allow repeated variables...?)
updateAmap newm amap
= newm ++ filter (\ (x, _) -> not (elem x (map fst newm))) amap
findTag i [] = Nothing
findTag i (ConCase n j ns sc : xs) | i == j = Just (ns, sc)
findTag i (_ : xs) = findTag i xs
findFn fn [] = Nothing
findFn fn (FnCase n ns sc : xs) | fn == n = Just (ns, sc)
findFn fn (_ : xs) = findFn fn xs
findDefault [] = Nothing
findDefault (DefaultCase sc : xs) = Just sc
findDefault (_ : xs) = findDefault xs
findSuc c [] = Nothing
findSuc (BI val) (SucCase n sc : _)
| val /= 0 = Just (n, VConstant (BI (val - 1)), sc)
findSuc c (_ : xs) = findSuc c xs
findConst c [] = Nothing
findConst c (ConstCase c' v : xs) | c == c' = Just v
findConst (AType (ATInt ITNative)) (ConCase n 1 [] v : xs) = Just v
findConst (AType ATFloat) (ConCase n 2 [] v : xs) = Just v
findConst (AType (ATInt ITChar)) (ConCase n 3 [] v : xs) = Just v
findConst StrType (ConCase n 4 [] v : xs) = Just v
findConst (AType (ATInt ITBig)) (ConCase n 6 [] v : xs) = Just v
findConst (AType (ATInt (ITFixed ity))) (ConCase n tag [] v : xs)
| tag == 7 + fromEnum ity = Just v
findConst c (_ : xs) = findConst c xs
getValArgs tm = getValArgs' tm []
getValArgs' (VApp f a) as = getValArgs' f (a:as)
getValArgs' f as = (f, as)
-- tmpToV i vd (VLetHole j) | vd == j = return $ VV i
-- tmpToV i vd (VP nt n v) = liftM (VP nt n) (tmpToV i vd v)
-- tmpToV i vd (VBind n b sc) = do b' <- fmapMB (tmpToV i vd) b
-- let sc' = \x -> do x' <- sc x
-- tmpToV (i + 1) vd x'
-- return (VBind n b' sc')
-- tmpToV i vd (VApp f a) = liftM2 VApp (tmpToV i vd f) (tmpToV i vd a)
-- tmpToV i vd x = return x
instance Eq Value where
(==) x y = getTT x == getTT y
where getTT v = evalState (quote 0 v) initEval
class Quote a where
quote :: Int -> a -> Eval (TT Name)
instance Quote Value where
quote i (VP nt n v) = liftM (P nt n) (quote i v)
quote i (VV x) = return $ V x
quote i (VBind _ n b sc) = do sc' <- sc (VTmp i)
b' <- quoteB b
liftM (Bind n b') (quote (i+1) sc')
where quoteB t = fmapMB (quote i) t
quote i (VBLet vd n t v sc)
= do sc' <- quote i sc
t' <- quote i t
v' <- quote i v
let sc'' = pToV (sMN vd "vlet") (addBinder sc')
return (Bind n (Let t' v') sc'')
quote i (VApp f a) = liftM2 (App MaybeHoles) (quote i f) (quote i a)
quote i (VType u) = return $ TType u
quote i (VUType u) = return $ UType u
quote i VErased = return $ Erased
quote i VImpossible = return $ Impossible
quote i (VProj v j) = do v' <- quote i v
return (Proj v' j)
quote i (VConstant c) = return $ Constant c
quote i (VTmp x) = return $ V (i - x - 1)
wknV :: Int -> Value -> Eval Value
wknV i (VV x) | x >= i = return $ VV (x - 1)
wknV i (VBind red n b sc) = do b' <- fmapMB (wknV i) b
return $ VBind red n b' (\x -> do x' <- sc x
wknV (i + 1) x')
wknV i (VApp f a) = liftM2 VApp (wknV i f) (wknV i a)
wknV i t = return t
isUniverse :: Term -> Bool
isUniverse (TType _) = True
isUniverse (UType _) = True
isUniverse _ = False
isUsableUniverse :: Term -> Bool
isUsableUniverse (UType NullType) = False
isUsableUniverse x = isUniverse x
convEq' ctxt hs x y = evalStateT (convEq ctxt hs x y) (0, [])
convEq :: Context -> [Name] -> TT Name -> TT Name -> StateT UCs TC Bool
convEq ctxt holes topx topy = ceq [] topx topy where
ceq :: [(Name, Name)] -> TT Name -> TT Name -> StateT UCs TC Bool
ceq ps (P xt x _) (P yt y _)
| x `elem` holes || y `elem` holes = return True
| x == y || (x, y) `elem` ps || (y,x) `elem` ps = return True
| otherwise = sameDefs ps x y
ceq ps x (Bind n (Lam t) (App _ y (V 0)))
= ceq ps x (substV (P Bound n t) y)
ceq ps (Bind n (Lam t) (App _ x (V 0))) y
= ceq ps (substV (P Bound n t) x) y
ceq ps x (Bind n (Lam t) (App _ y (P Bound n' _)))
| n == n' = ceq ps x y
ceq ps (Bind n (Lam t) (App _ x (P Bound n' _))) y
| n == n' = ceq ps x y
ceq ps (Bind n (PVar t) sc) y = ceq ps sc y
ceq ps x (Bind n (PVar t) sc) = ceq ps x sc
ceq ps (Bind n (PVTy t) sc) y = ceq ps sc y
ceq ps x (Bind n (PVTy t) sc) = ceq ps x sc
ceq ps (V x) (V y) = return (x == y)
ceq ps (V x) (P _ y _)
| x >= 0 && length ps > x = return (fst (ps!!x) == y)
| otherwise = return False
ceq ps (P _ x _) (V y)
| y >= 0 && length ps > y = return (x == snd (ps!!y))
| otherwise = return False
ceq ps (Bind n xb xs) (Bind n' yb ys)
= liftM2 (&&) (ceqB ps xb yb) (ceq ((n,n'):ps) xs ys)
where
ceqB ps (Let v t) (Let v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t')
ceqB ps (Guess v t) (Guess v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t')
ceqB ps (Pi i v t) (Pi i' v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t')
ceqB ps b b' = ceq ps (binderTy b) (binderTy b')
ceq ps (App _ fx ax) (App _ fy ay) = liftM2 (&&) (ceq ps fx fy) (ceq ps ax ay)
ceq ps (Constant x) (Constant y) = return (x == y)
ceq ps (TType x) (TType y) = do (v, cs) <- get
put (v, ULE x y : cs)
return True
ceq ps (UType AllTypes) x = return (isUsableUniverse x)
ceq ps x (UType AllTypes) = return (isUsableUniverse x)
ceq ps (UType u) (UType v) = return (u == v)
ceq ps Erased _ = return True
ceq ps _ Erased = return True
ceq ps x y = return False
caseeq ps (Case _ n cs) (Case _ n' cs') = caseeqA ((n,n'):ps) cs cs'
where
caseeqA ps (ConCase x i as sc : rest) (ConCase x' i' as' sc' : rest')
= do q1 <- caseeq (zip as as' ++ ps) sc sc'
q2 <- caseeqA ps rest rest'
return $ x == x' && i == i' && q1 && q2
caseeqA ps (ConstCase x sc : rest) (ConstCase x' sc' : rest')
= do q1 <- caseeq ps sc sc'
q2 <- caseeqA ps rest rest'
return $ x == x' && q1 && q2
caseeqA ps (DefaultCase sc : rest) (DefaultCase sc' : rest')
= liftM2 (&&) (caseeq ps sc sc') (caseeqA ps rest rest')
caseeqA ps [] [] = return True
caseeqA ps _ _ = return False
caseeq ps (STerm x) (STerm y) = ceq ps x y
caseeq ps (UnmatchedCase _) (UnmatchedCase _) = return True
caseeq ps _ _ = return False
sameDefs ps x y = case (lookupDef x ctxt, lookupDef y ctxt) of
([Function _ xdef], [Function _ ydef])
-> ceq ((x,y):ps) xdef ydef
([CaseOp _ _ _ _ _ xd],
[CaseOp _ _ _ _ _ yd])
-> let (_, xdef) = cases_compiletime xd
(_, ydef) = cases_compiletime yd in
caseeq ((x,y):ps) xdef ydef
_ -> return False
-- SPECIALISATION -----------------------------------------------------------
-- We need too much control to be able to do this by tweaking the main
-- evaluator
spec :: Context -> Ctxt [Bool] -> Env -> TT Name -> Eval (TT Name)
spec ctxt statics genv tm = error "spec undefined"
-- CONTEXTS -----------------------------------------------------------------
{-| A definition is either a simple function (just an expression with a type),
a constant, which could be a data or type constructor, an axiom or as an
yet undefined function, or an Operator.
An Operator is a function which explains how to reduce.
A CaseOp is a function defined by a simple case tree -}
data Def = Function !Type !Term
| TyDecl NameType !Type
| Operator Type Int ([Value] -> Maybe Value)
| CaseOp CaseInfo
!Type
![Type] -- argument types
![Either Term (Term, Term)] -- original definition
![([Name], Term, Term)] -- simplified for totality check definition
!CaseDefs
-- [Name] SC -- Compile time case definition
-- [Name] SC -- Run time cae definitions
data CaseDefs = CaseDefs {
cases_totcheck :: !([Name], SC),
cases_compiletime :: !([Name], SC),
cases_inlined :: !([Name], SC),
cases_runtime :: !([Name], SC)
}
data CaseInfo = CaseInfo {
case_inlinable :: Bool, -- decided by machine
case_alwaysinline :: Bool, -- decided by %inline flag
tc_dictionary :: Bool
}
{-!
deriving instance Binary Def
!-}
{-!
deriving instance Binary CaseInfo
!-}
{-!
deriving instance Binary CaseDefs
!-}
{-!
deriving instance NFData Def
!-}
{-!
deriving instance NFData CaseInfo
!-}
{-!
deriving instance NFData CaseDefs
!-}
instance Show Def where
show (Function ty tm) = "Function: " ++ show (ty, tm)
show (TyDecl nt ty) = "TyDecl: " ++ show nt ++ " " ++ show ty
show (Operator ty _ _) = "Operator: " ++ show ty
show (CaseOp (CaseInfo inlc inla inlr) ty atys ps_in ps cd)
= let (ns, sc) = cases_compiletime cd
(ns_t, sc_t) = cases_totcheck cd
(ns', sc') = cases_runtime cd in
"Case: " ++ show ty ++ " " ++ show ps ++ "\n" ++
"TOTALITY CHECK TIME:\n\n" ++
show ns_t ++ " " ++ show sc_t ++ "\n\n" ++
"COMPILE TIME:\n\n" ++
show ns ++ " " ++ show sc ++ "\n\n" ++
"RUN TIME:\n\n" ++
show ns' ++ " " ++ show sc' ++ "\n\n" ++
if inlc then "Inlinable" else "Not inlinable" ++
if inla then " Aggressively\n" else "\n"
-------
-- Frozen => doesn't reduce
-- Hidden => doesn't reduce and invisible to type checker
data Accessibility = Public | Frozen | Hidden
deriving (Show, Eq)
{-!
deriving instance NFData Accessibility
!-}
-- | The result of totality checking
data Totality = Total [Int] -- ^ well-founded arguments
| Productive -- ^ productive
| Partial PReason
| Unchecked
| Generated
deriving Eq
{-!
deriving instance NFData Totality
!-}
-- | Reasons why a function may not be total
data PReason = Other [Name] | Itself | NotCovering | NotPositive | UseUndef Name
| ExternalIO | BelieveMe | Mutual [Name] | NotProductive
deriving (Show, Eq)
{-!
deriving instance NFData PReason
!-}
instance Show Totality where
show (Total args)= "Total" -- ++ show args ++ " decreasing arguments"
show Productive = "Productive" -- ++ show args ++ " decreasing arguments"
show Unchecked = "not yet checked for totality"
show (Partial Itself) = "possibly not total as it is not well founded"
show (Partial NotCovering) = "not total as there are missing cases"
show (Partial NotPositive) = "not strictly positive"
show (Partial ExternalIO) = "an external IO primitive"
show (Partial NotProductive) = "not productive"
show (Partial BelieveMe) = "not total due to use of believe_me in proof"
show (Partial (Other ns)) = "possibly not total due to: " ++ showSep ", " (map show ns)
show (Partial (Mutual ns)) = "possibly not total due to recursive path " ++
showSep " --> " (map show ns)
show (Partial (UseUndef n)) = "possibly not total because it uses the undefined name " ++ show n
show Generated = "auto-generated"
{-!
deriving instance Binary Accessibility
!-}
{-!
deriving instance Binary Totality
!-}
{-!
deriving instance Binary PReason
!-}
-- Possible attached meta-information for a definition in context
data MetaInformation =
EmptyMI -- ^ No meta-information
| DataMI [Int] -- ^ Meta information for a data declaration with position of parameters
deriving (Eq, Show)
-- | Contexts used for global definitions and for proof state. They contain
-- universe constraints and existing definitions.
data Context = MkContext {
next_tvar :: Int,
definitions :: Ctxt (Def, Accessibility, Totality, MetaInformation)
} deriving Show
-- | The initial empty context
initContext = MkContext 0 emptyContext
mapDefCtxt :: (Def -> Def) -> Context -> Context
mapDefCtxt f (MkContext t !defs) = MkContext t (mapCtxt f' defs)
where f' (!d, a, t, m) = f' (f d, a, t, m)
-- | Get the definitions from a context
ctxtAlist :: Context -> [(Name, Def)]
ctxtAlist ctxt = map (\(n, (d, a, t, m)) -> (n, d)) $ toAlist (definitions ctxt)
veval ctxt env t = evalState (eval False ctxt [] env t []) initEval
addToCtxt :: Name -> Term -> Type -> Context -> Context
addToCtxt n tm ty uctxt
= let ctxt = definitions uctxt
!ctxt' = addDef n (Function ty tm, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
setAccess :: Name -> Accessibility -> Context -> Context
setAccess n a uctxt
= let ctxt = definitions uctxt
!ctxt' = updateDef n (\ (d, _, t, m) -> (d, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
setTotal :: Name -> Totality -> Context -> Context
setTotal n t uctxt
= let ctxt = definitions uctxt
!ctxt' = updateDef n (\ (d, a, _, m) -> (d, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
setMetaInformation :: Name -> MetaInformation -> Context -> Context
setMetaInformation n m uctxt
= let ctxt = definitions uctxt
!ctxt' = updateDef n (\ (d, a, t, _) -> (d, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
addCtxtDef :: Name -> Def -> Context -> Context
addCtxtDef n d c = let ctxt = definitions c
!ctxt' = addDef n (d, Public, Unchecked, EmptyMI) $! ctxt in
c { definitions = ctxt' }
addTyDecl :: Name -> NameType -> Type -> Context -> Context
addTyDecl n nt ty uctxt
= let ctxt = definitions uctxt
!ctxt' = addDef n (TyDecl nt ty, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
addDatatype :: Datatype Name -> Context -> Context
addDatatype (Data n tag ty unique cons) uctxt
= let ctxt = definitions uctxt
ty' = normalise uctxt [] ty
!ctxt' = addCons 0 cons (addDef n
(TyDecl (TCon tag (arity ty')) ty, Public, Unchecked, EmptyMI) ctxt) in
uctxt { definitions = ctxt' }
where
addCons tag [] ctxt = ctxt
addCons tag ((n, ty) : cons) ctxt
= let ty' = normalise uctxt [] ty in
addCons (tag+1) cons (addDef n
(TyDecl (DCon tag (arity ty') unique) ty, Public, Unchecked, EmptyMI) ctxt)
-- FIXME: Too many arguments! Refactor all these Bools.
--
-- Issue #1724 on the issue tracker.
-- https://github.com/idris-lang/Idris-dev/issues/1724
addCasedef :: Name -> ErasureInfo -> CaseInfo ->
Bool -> SC -> -- default case
Bool -> Bool ->
[Type] -> -- argument types
[Int] -> -- inaccessible arguments
[Either Term (Term, Term)] ->
[([Name], Term, Term)] -> -- totality
[([Name], Term, Term)] -> -- compile time
[([Name], Term, Term)] -> -- inlined
[([Name], Term, Term)] -> -- run time
Type -> Context -> TC Context
addCasedef n ei ci@(CaseInfo inline alwaysInline tcdict)
tcase covering reflect asserted argtys inacc
ps_in ps_tot ps_inl ps_ct ps_rt ty uctxt
= do let ctxt = definitions uctxt
access = case lookupDefAcc n False uctxt of
[(_, acc)] -> acc
_ -> Public
totalityTime <- simpleCase tcase covering reflect CompileTime emptyFC inacc argtys ps_tot ei
compileTime <- simpleCase tcase covering reflect CompileTime emptyFC inacc argtys ps_ct ei
inlined <- simpleCase tcase covering reflect CompileTime emptyFC inacc argtys ps_inl ei
runtime <- simpleCase tcase covering reflect RunTime emptyFC inacc argtys ps_rt ei
ctxt' <- case (totalityTime, compileTime, inlined, runtime) of
(CaseDef args_tot sc_tot _,
CaseDef args_ct sc_ct _,
CaseDef args_inl sc_inl _,
CaseDef args_rt sc_rt _) ->
let inl = alwaysInline -- tcdict
inlc = (inl || small n args_ct sc_ct) && (not asserted)
inlr = inl || small n args_rt sc_rt
cdef = CaseDefs (args_tot, sc_tot)
(args_ct, sc_ct)
(args_inl, sc_inl)
(args_rt, sc_rt)
op = (CaseOp (ci { case_inlinable = inlc })
ty argtys ps_in ps_tot cdef,
access, Unchecked, EmptyMI)
in return $ addDef n op ctxt
-- other -> tfail (Msg $ "Error adding case def: " ++ show other)
return uctxt { definitions = ctxt' }
-- simplify a definition for totality checking
simplifyCasedef :: Name -> ErasureInfo -> Context -> TC Context
simplifyCasedef n ei uctxt
= do let ctxt = definitions uctxt
ctxt' <- case lookupCtxt n ctxt of
[(CaseOp ci ty atys [] ps _, acc, tot, metainf)] ->
return ctxt -- nothing to simplify (or already done...)
[(CaseOp ci ty atys ps_in ps cd, acc, tot, metainf)] ->
do let ps_in' = map simpl ps_in
pdef = map debind ps_in'
CaseDef args sc _ <- simpleCase False (STerm Erased) False CompileTime emptyFC [] atys pdef ei
return $ addDef n (CaseOp ci
ty atys ps_in' ps (cd { cases_totcheck = (args, sc) }),
acc, tot, metainf) ctxt
_ -> return ctxt
return uctxt { definitions = ctxt' }
where
depat acc (Bind n (PVar t) sc)
= depat (n : acc) (instantiate (P Bound n t) sc)
depat acc x = (acc, x)
debind (Right (x, y)) = let (vs, x') = depat [] x
(_, y') = depat [] y in
(vs, x', y')
debind (Left x) = let (vs, x') = depat [] x in
(vs, x', Impossible)
simpl (Right (x, y)) = Right (x, simplify uctxt [] y)
simpl t = t
addOperator :: Name -> Type -> Int -> ([Value] -> Maybe Value) ->
Context -> Context
addOperator n ty a op uctxt
= let ctxt = definitions uctxt
ctxt' = addDef n (Operator ty a op, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
tfst (a, _, _, _) = a
lookupNames :: Name -> Context -> [Name]
lookupNames n ctxt
= let ns = lookupCtxtName n (definitions ctxt) in
map fst ns
-- | Get the list of pairs of fully-qualified names and their types that match some name
lookupTyName :: Name -> Context -> [(Name, Type)]
lookupTyName n ctxt = do
(name, def) <- lookupCtxtName n (definitions ctxt)
ty <- case tfst def of
(Function ty _) -> return ty
(TyDecl _ ty) -> return ty
(Operator ty _ _) -> return ty
(CaseOp _ ty _ _ _ _) -> return ty
return (name, ty)
-- | Get the pair of a fully-qualified name and its type, if there is a unique one matching the name used as a key.
lookupTyNameExact :: Name -> Context -> Maybe (Name, Type)
lookupTyNameExact n ctxt = listToMaybe [ (nm, v) | (nm, v) <- lookupTyName n ctxt, nm == n ]
-- | Get the types that match some name
lookupTy :: Name -> Context -> [Type]
lookupTy n ctxt = map snd (lookupTyName n ctxt)
-- | Get the single type that matches some name precisely
lookupTyExact :: Name -> Context -> Maybe Type
lookupTyExact n ctxt = fmap snd (lookupTyNameExact n ctxt)
isConName :: Name -> Context -> Bool
isConName n ctxt = isTConName n ctxt || isDConName n ctxt
isTConName :: Name -> Context -> Bool
isTConName n ctxt
= case lookupDefExact n ctxt of
Just (TyDecl (TCon _ _) _) -> True
_ -> False
-- | Check whether a resolved name is certainly a data constructor
isDConName :: Name -> Context -> Bool
isDConName n ctxt
= case lookupDefExact n ctxt of
Just (TyDecl (DCon _ _ _) _) -> True
_ -> False
-- | Check whether any overloading of a name is a data constructor
canBeDConName :: Name -> Context -> Bool
canBeDConName n ctxt
= or $ do def <- lookupCtxt n (definitions ctxt)
case tfst def of
(TyDecl (DCon _ _ _) _) -> return True
_ -> return False
isFnName :: Name -> Context -> Bool
isFnName n ctxt
= case lookupDefExact n ctxt of
Just (Function _ _) -> True
Just (Operator _ _ _) -> True
Just (CaseOp _ _ _ _ _ _) -> True
_ -> False
isTCDict :: Name -> Context -> Bool
isTCDict n ctxt
= case lookupDefExact n ctxt of
Just (Function _ _) -> False
Just (Operator _ _ _) -> False
Just (CaseOp ci _ _ _ _ _) -> tc_dictionary ci
_ -> False
lookupP :: Name -> Context -> [Term]
lookupP = lookupP_all False False
lookupP_all :: Bool -> Bool -> Name -> Context -> [Term]
lookupP_all all exact n ctxt
= do (n', def) <- names
p <- case def of
(Function ty tm, a, _, _) -> return (P Ref n' ty, a)
(TyDecl nt ty, a, _, _) -> return (P nt n' ty, a)
(CaseOp _ ty _ _ _ _, a, _, _) -> return (P Ref n' ty, a)
(Operator ty _ _, a, _, _) -> return (P Ref n' ty, a)
case snd p of
Hidden -> if all then return (fst p) else []
_ -> return (fst p)
where
names = let ns = lookupCtxtName n (definitions ctxt) in
if exact
then filter (\ (n', d) -> n' == n) ns
else ns
lookupDefExact :: Name -> Context -> Maybe Def
lookupDefExact n ctxt = tfst <$> lookupCtxtExact n (definitions ctxt)
lookupDef :: Name -> Context -> [Def]
lookupDef n ctxt = tfst <$> lookupCtxt n (definitions ctxt)
lookupNameDef :: Name -> Context -> [(Name, Def)]
lookupNameDef n ctxt = mapSnd tfst $ lookupCtxtName n (definitions ctxt)
where mapSnd f [] = []
mapSnd f ((x,y):xys) = (x, f y) : mapSnd f xys
lookupDefAcc :: Name -> Bool -> Context ->
[(Def, Accessibility)]
lookupDefAcc n mkpublic ctxt
= map mkp $ lookupCtxt n (definitions ctxt)
-- io_bind a special case for REPL prettiness
where mkp (d, a, _, _) = if mkpublic && (not (n == sUN "io_bind" || n == sUN "io_return"))
then (d, Public) else (d, a)
lookupDefAccExact :: Name -> Bool -> Context ->
Maybe (Def, Accessibility)
lookupDefAccExact n mkpublic ctxt
= fmap mkp $ lookupCtxtExact n (definitions ctxt)
-- io_bind a special case for REPL prettiness
where mkp (d, a, _, _) = if mkpublic && (not (n == sUN "io_bind" || n == sUN "io_return"))
then (d, Public) else (d, a)
lookupTotal :: Name -> Context -> [Totality]
lookupTotal n ctxt = map mkt $ lookupCtxt n (definitions ctxt)
where mkt (d, a, t, m) = t
lookupMetaInformation :: Name -> Context -> [MetaInformation]
lookupMetaInformation n ctxt = map mkm $ lookupCtxt n (definitions ctxt)
where mkm (d, a, t, m) = m
lookupNameTotal :: Name -> Context -> [(Name, Totality)]
lookupNameTotal n = map (\(n, (_, _, t, _)) -> (n, t)) . lookupCtxtName n . definitions
lookupVal :: Name -> Context -> [Value]
lookupVal n ctxt
= do def <- lookupCtxt n (definitions ctxt)
case tfst def of
(Function _ htm) -> return (veval ctxt [] htm)
(TyDecl nt ty) -> return (VP nt n (veval ctxt [] ty))
_ -> []
lookupTyEnv :: Name -> Env -> Maybe (Int, Type)
lookupTyEnv n env = li n 0 env where
li n i [] = Nothing
li n i ((x, b): xs)
| n == x = Just (i, binderTy b)
| otherwise = li n (i+1) xs
-- | Create a unique name given context and other existing names
uniqueNameCtxt :: Context -> Name -> [Name] -> Name
uniqueNameCtxt ctxt n hs
| n `elem` hs = uniqueNameCtxt ctxt (nextName n) hs
| [_] <- lookupTy n ctxt = uniqueNameCtxt ctxt (nextName n) hs
| otherwise = n
uniqueBindersCtxt :: Context -> [Name] -> TT Name -> TT Name
uniqueBindersCtxt ctxt ns (Bind n b sc)
= let n' = uniqueNameCtxt ctxt n ns in
Bind n' (fmap (uniqueBindersCtxt ctxt (n':ns)) b) (uniqueBindersCtxt ctxt ns sc)
uniqueBindersCtxt ctxt ns (App s f a) = App s (uniqueBindersCtxt ctxt ns f) (uniqueBindersCtxt ctxt ns a)
uniqueBindersCtxt ctxt ns t = t
| ExNexu/Idris-dev | src/Idris/Core/Evaluate.hs | bsd-3-clause | 47,973 | 0 | 28 | 17,899 | 16,924 | 8,627 | 8,297 | 860 | 80 |
{-# LANGUAGE ForeignFunctionInterface #-}
module TestStub019 where
foreign export ccall f :: Int -> Int
f :: Int -> Int
f n = n + 1
| ghc-android/ghc | testsuite/tests/driver/F019.hs | bsd-3-clause | 132 | 0 | 6 | 26 | 40 | 23 | 17 | 5 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
module Worklist where
import Control.Monad.Trans.State.Strict
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Maybe (fromMaybe)
import Control.Monad (forM_)
newtype TransferFunction node lattice a
= TFM (State (WorklistState node lattice) a)
deriving (Functor, Applicative, Monad)
type ChangeDetector node lattice
= Set node -> lattice -> lattice -> Bool
newtype DataFlowFramework node lattice
= DFF { getTransfer :: node -> (TransferFunction node lattice lattice, ChangeDetector node lattice) }
eqChangeDetector :: Eq lattice => ChangeDetector node lattice
eqChangeDetector _ = (/=)
alwaysChangeDetector :: ChangeDetector node lattice
alwaysChangeDetector _ _ _ = True
frameworkWithEqChangeDetector
:: Eq lattice
=> (node -> TransferFunction node lattice lattice)
-> DataFlowFramework node lattice
frameworkWithEqChangeDetector transfer = DFF transfer'
where
transfer' node = (transfer node, eqChangeDetector)
data NodeInfo node lattice
= NodeInfo
{ value :: !(Maybe lattice) -- ^ the value at this node. can be Nothing only when a loop was detected
, references :: !(Set node) -- ^ nodes this value depends on
, referrers :: !(Set node) -- ^ nodes depending on this value
} deriving (Show, Eq)
emptyNodeInfo :: NodeInfo node lattice
emptyNodeInfo = NodeInfo Nothing Set.empty Set.empty
type Graph node lattice = Map node (NodeInfo node lattice)
data WorklistState node lattice
= WorklistState
{ graph :: !(Graph node lattice)
, callStack :: !(Set node)
, referencedNodes :: !(Set node)
, loopBreakers :: !(Set node)
, framework :: !(DataFlowFramework node lattice)
}
zoomGraph :: State (Graph node lattice) a -> State (WorklistState node lattice) a
zoomGraph modifyGraph = state $ \st ->
let (res, g) = runState modifyGraph (graph st)
in (res, st { graph = g })
zoomReferencedNodes :: State (Set node) a -> State (WorklistState node lattice) a
zoomReferencedNodes modifier = state $ \st ->
let (res, rn) = runState modifier (referencedNodes st)
in (res, st { referencedNodes = rn })
zoomLoopBreakers :: State (Set node) a -> State (WorklistState node lattice) a
zoomLoopBreakers modifier = state $ \st ->
let (res, lb) = runState modifier (loopBreakers st)
in (res, st { loopBreakers = lb })
initialWorklistState :: DataFlowFramework node lattice -> WorklistState node lattice
initialWorklistState = WorklistState Map.empty Set.empty Set.empty Set.empty
dependOn :: Ord node => node -> TransferFunction node lattice (Maybe lattice)
dependOn node = TFM $ do
loopDetected <- Set.member node <$> gets callStack
maybeNodeInfo <- Map.lookup node <$> gets graph
zoomReferencedNodes (modify' (Set.insert node)) -- save that we depend on this value
case maybeNodeInfo of
Nothing | loopDetected -> do
-- We have to revisit these later
zoomLoopBreakers (modify' (Set.insert node))
return Nothing
Nothing -> fmap (\(val, _, _) -> Just val) (recompute node)
Just info -> return (value info)
data Diff a
= Diff
{ added :: !(Set a)
, removed :: !(Set a)
}
computeDiff :: Ord a => Set a -> Set a -> Diff a
computeDiff from to = Diff (to `Set.difference` from) (from `Set.difference` to)
updateGraphNode
:: Ord node
=> node
-> lattice
-> Set node
-> State (WorklistState node lattice) (NodeInfo node lattice)
updateGraphNode node val refs = zoomGraph $ do
-- if we are lucky (e.g. no refs changed), we get away with one map access
-- first update `node`s NodeInfo
let newInfo = emptyNodeInfo { value = Just val, references = refs }
let merger _ new old = new { referrers = referrers old }
oldInfo <- fromMaybe emptyNodeInfo <$> state (Map.insertLookupWithKey merger node newInfo)
-- Now compute the diff of changed references
let diff = computeDiff (references oldInfo) refs
-- finally register/unregister at all references as referrer.
let updater f dep = modify' (Map.alter (Just . f . fromMaybe emptyNodeInfo) dep)
let addReferrer ni = ni { referrers = Set.insert node (referrers ni) }
let removeReferrer ni = ni { referrers = Set.delete node (referrers ni) }
forM_ (added diff) (updater addReferrer)
forM_ (removed diff) (updater removeReferrer)
return oldInfo
recompute
:: Ord node
=> node
-> State (WorklistState node lattice) (lattice, NodeInfo node lattice, ChangeDetector node lattice)
recompute node = do
oldState <- get
put $ oldState
{ referencedNodes = Set.empty
, callStack = Set.insert node (callStack oldState)
}
let (TFM transfer, changeDetector) = getTransfer (framework oldState) node
val <- transfer
refs <- gets referencedNodes
oldInfo <- updateGraphNode node val refs
modify' $ \st -> st
{ referencedNodes = referencedNodes oldState
, callStack = callStack oldState
}
return (val, oldInfo, changeDetector)
enqueue :: Ord node => node -> Set node -> Map node (Set node) -> Map node (Set node)
enqueue reference referrers_ = Map.unionWith Set.union referrersMap
where
referrersMap = Map.fromSet (\_ -> Set.singleton reference) referrers_
dequeue :: Map node (Set node) -> Maybe ((node, Set node), Map node (Set node))
dequeue = Map.maxViewWithKey
lookupReferrers :: Ord node => node -> Graph node lattice -> Set node
lookupReferrers node = maybe Set.empty referrers . Map.lookup node
work :: Ord node => Map node (Set node) -> State (WorklistState node lattice) ()
work nodes =
case dequeue nodes of
Nothing -> return ()
Just ((node, changedRefs), nodes') -> do
modify' $ \st -> st { loopBreakers = Set.empty }
(newVal, oldInfo, detectChange) <- recompute node
-- We have to enqueue all referrers to loop breakers, e.g. nodes which we
-- returned `Nothing` from `dependOn` to break cyclic dependencies.
-- Their referrers probably aren't carrying safe values, so we have to
-- revisit them. This looks expensive, but loopBreakers should be pretty
-- rare later on.
g <- gets graph
lbs <- gets loopBreakers
let nodes'' = Set.foldr (\lb -> enqueue lb (lookupReferrers lb g)) nodes' lbs
case value oldInfo of
Just oldVal | not (detectChange changedRefs oldVal newVal) -> work nodes''
_ -> work (enqueue node (referrers oldInfo) nodes'')
runFramework
:: Ord node
=> DataFlowFramework node lattice
-> Set node
-> Map node lattice
runFramework framework_ interestingNodes = run framework_
where
st = work (Map.fromSet (const Set.empty) interestingNodes)
run = Map.mapMaybe value . graph . execState st . initialWorklistState
| sgraf812/worklist | src/Worklist.hs | isc | 6,756 | 0 | 19 | 1,323 | 2,195 | 1,113 | 1,082 | 157 | 3 |
module Helpers (eiFilter, eiMap, numberToDigits) where
eiFilter :: (a -> Bool) -> [a] -> [Either a a]
eiFilter f xs = map (conditional f) xs
where
conditional f x
| f x = Right x
| otherwise = Left x
eiMap :: (a -> c) -> (b -> d) -> [Either a b] -> [Either c d]
eiMap _ _ [] = []
eiMap fL fR (x:xs) = f x : eiMap fL fR xs
where
f (Left x) = Left $ fL x
f (Right y) = Right $ fR y
numberToDigits :: Integer -> [Integer]
numberToDigits = (map (read . return)) . show
| kindaro/cis194 | src/Helpers.hs | isc | 504 | 0 | 10 | 144 | 277 | 141 | 136 | 13 | 2 |
module Algebra.Vector where
import Control.Exception.Base
import Data.List as List
import qualified Data.List.Extensions as ListExt
import qualified Data.Map as Map
import Data.Ratio as Ratio
import Data.Ratio.Extensions as RatioExt
import Data.Tuple.Extensions as TupleExt
import Prelude.Extensions as PreludeExt
type Vector = [Rational]
fromList = id
toList = id
fromArray = ((.) Algebra.Vector.fromList Map.elems)
toArray = ((.) ListExt.toArray0 Algebra.Vector.toList)
size = List.length
sameSize = \a b -> ((==) (size a) (size b))
element = \v index -> ((!!) v index)
zero = \n -> (List.replicate n (toRational 0))
isZero = \v -> (all ((==) (0::Rational)) v)
notZero = ((.) not isZero)
map = List.map
map2 = ListExt.map2
add :: Vector -> Vector -> Vector
add = (map2 (+))
sum = (List.foldr add (zero 2))
subtract :: Vector -> Vector -> Vector
subtract = (map2 (-))
scale :: Rational -> Vector -> Vector
scale = \scalar v -> (Algebra.Vector.map ((*) scalar) v)
scaleTo = \magnitude v -> let
preconditions = ((>) length 0)
length = (Algebra.Vector.length v)
result = (scale ((/) magnitude length) v)
in (assert preconditions result)
scaleToOrZero = \magnitude v -> let
length = (Algebra.Vector.length v)
scaled = (scale ((/) magnitude length) v)
in (ifElse ((==) length 0) v scaled)
negate = \v -> (scale (Prelude.negate 1) v)
dotProduct = \a b -> (List.sum (map2 (*) a b))
angle = \a b -> let
dot = (dotProduct a b)
lengths = ((*) (Algebra.Vector.length a) (Algebra.Vector.length b))
in (toRational (acos (fromRational ((/) dot lengths))))
isParallel = \a b -> let
pairs = (zip (toList b) (toList a))
(zero, nonzero) = (List.partition ((.) ((==) 0) second2) pairs)
both_zeros = (and (List.map ((.) ((==) 0) first2) zero))
scalars = (List.map (uncurry (/)) nonzero)
in ((&&) both_zeros (ListExt.allEqual scalars), (ifElse (List.null scalars) (0::Rational) (head scalars)))
lengthSquared = \v -> (dotProduct v v)
length = \v -> (toRational (sqrt (fromRational (lengthSquared v))))
distanceSquared = \a b -> (lengthSquared (Algebra.Vector.subtract b a))
distance = \a b -> (Algebra.Vector.length (Algebra.Vector.subtract b a))
normalize = \v -> (scale ((/) (toRational 1) (Algebra.Vector.length v)) v)
normalizeOrZero = \v -> (ifElse ((==) (lengthSquared v) (toRational 0)) (zero (size v)) (normalize v))
projectionScalar = \base vector -> let
base_squared = (lengthSquared base)
in (ifElse ((==) base_squared 0) 0 ((/) (dotProduct base vector) base_squared))
projection = \base vector -> (scale (projectionScalar base vector) base)
setPrecision = \precision -> (List.map (RatioExt.setPrecision precision))
setPrecision10 = (Algebra.Vector.setPrecision ((%) 1 10000000000))
| stevedonnelly/haskell | code/Algebra/Vector.hs | mit | 2,770 | 0 | 17 | 488 | 1,252 | 707 | 545 | 62 | 1 |
module Rebase.GHC.STRef
(
module GHC.STRef
)
where
import GHC.STRef
| nikita-volkov/rebase | library/Rebase/GHC/STRef.hs | mit | 71 | 0 | 5 | 12 | 20 | 13 | 7 | 4 | 0 |
{-# htermination readsPrec :: Int -> String -> [(Bool,String)] #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_readsPrec_8.hs | mit | 67 | 0 | 2 | 10 | 3 | 2 | 1 | 1 | 0 |
{-
GCBackend: Flounder stub generator for generic code
Part of Flounder: a message passing IDL for Barrelfish
Copyright (c) 2007-2010, ETH Zurich.
All rights reserved.
This file is distributed under the terms in the attached LICENSE file.
If you do not find this file, copies can be found by writing to:
ETH Zurich D-INFK, Universit\"atstr. 6, CH-8092 Zurich. Attn: Systems Group.
-}
module GCBackend where
import Data.Char
import qualified CAbsSyntax as C
import Syntax (Interface (Interface))
import GHBackend (flounder_backends, export_fn_name, bind_fn_name, accept_fn_name, connect_fn_name)
import BackendCommon
import LMP (lmp_bind_type, lmp_bind_fn_name)
import qualified UMP (bind_type, bind_fn_name)
import qualified UMP_IPI (bind_type, bind_fn_name)
import qualified Multihop (m_bind_type, m_bind_fn_name)
-- name of the bind continuation function
bind_cont_name :: String -> String
bind_cont_name ifn = ifscope ifn "bind_continuation_direct"
-- name of an alternative bind continuation function
bind_cont_name2 :: String -> String
bind_cont_name2 ifn = ifscope ifn "bind_contination_multihop"
compile :: String -> String -> Interface -> String
compile infile outfile interface =
unlines $ C.pp_unit $ stub_body infile interface
stub_body :: String -> Interface -> C.Unit
stub_body infile (Interface ifn descr _) = C.UnitList [
intf_preamble infile ifn descr,
C.Blank,
C.Include C.Standard "barrelfish/barrelfish.h",
C.Include C.Standard "flounder/flounder_support.h",
C.Include C.Standard ("if/" ++ ifn ++ "_defs.h"),
C.Blank,
C.MultiComment [ "Export function" ],
export_fn_def ifn,
C.Blank,
C.MultiComment [ "Accept function (Export over already shared frame)" ],
accept_fn_def ifn,
C.Blank,
C.MultiComment [ "Generic bind function" ],
-- the two bind functions use the idc drivers in a different order
bind_cont_def ifn (bind_cont_name ifn) (bind_backends ifn (bind_cont_name ifn)),
bind_cont_def ifn (bind_cont_name2 ifn) (multihop_bind_backends ifn (bind_cont_name2 ifn)),
bind_fn_def ifn,
connect_fn_def ifn]
export_fn_def :: String -> C.Unit
export_fn_def n =
C.FunctionDef C.NoScope (C.TypeName "errval_t") (export_fn_name n) params [
localvar (C.Ptr $ C.Struct $ export_type n) "e"
(Just $ C.Call "malloc" [C.SizeOfT $ C.Struct $ export_type n]),
C.If (C.Binary C.Equals exportvar (C.Variable "NULL"))
[C.Return $ C.Variable "LIB_ERR_MALLOC_FAIL"] [],
C.SBlank,
C.SComment "fill in common parts of export struct",
C.StmtList [C.Ex $ C.Assignment dste (C.Variable srcn) | (dste, srcn) <- [
(exportvar `C.DerefField` "connect_cb", "connect_cb"),
(exportvar `C.DerefField` "waitset", "ws"),
(exportvar `C.DerefField` "st", "st"),
(commonvar `C.FieldOf` "export_callback", "export_cb"),
(commonvar `C.FieldOf` "flags", "flags"),
(commonvar `C.FieldOf` "connect_cb_st", "e"),
(commonvar `C.FieldOf` "export_cb_st", "st")]],
C.SBlank,
C.SComment "fill in connect handler for each enabled backend",
C.StmtList [
C.SIfDef ("CONFIG_FLOUNDER_BACKEND_" ++ (map toUpper drv))
[C.Ex $ C.Assignment
(commonvar `C.FieldOf` (drv_connect_callback drv))
(C.Variable $ drv_connect_handler_name drv n)] []
| drv <- flounder_backends ],
C.SBlank,
C.Return $ C.Call "idc_export_service" [C.AddressOf commonvar]
]
where
params = [ C.Param (C.Ptr $ C.TypeName "void") "st",
C.Param (C.Ptr $ C.TypeName "idc_export_callback_fn") "export_cb",
C.Param (C.Ptr $ C.TypeName $ connect_callback_name n) "connect_cb",
C.Param (C.Ptr $ C.Struct "waitset") "ws",
C.Param (C.TypeName "idc_export_flags_t") "flags"]
exportvar = C.Variable "e"
commonvar = exportvar `C.DerefField` "common"
-- XXX: UMP_IPI uses the UMP connect callback
drv_connect_callback "ump_ipi" = drv_connect_callback "ump"
drv_connect_callback drv = drv ++ "_connect_callback"
accept_fn_def :: String -> C.Unit
accept_fn_def n =
C.FunctionDef C.NoScope (C.TypeName "errval_t") (accept_fn_name n) params [
C.Return $ C.Call (drv_accept_fn_name "ump" n) [(C.Variable intf_frameinfo_var), C.Variable "st", C.Variable intf_cont_var, C.Variable "ws", C.Variable "flags"]
]
where
params = [ C.Param (C.Ptr $ C.Struct $ intf_frameinfo_type n) intf_frameinfo_var,
C.Param (C.Ptr $ C.TypeName "void") "st",
-- C.Param (C.Ptr $ C.TypeName "idc_export_callback_fn") "export_cb",
C.Param (C.Ptr $ C.TypeName $ intf_bind_cont_type n) intf_cont_var,
C.Param (C.Ptr $ C.Struct "waitset") "ws",
C.Param (C.TypeName "idc_export_flags_t") "flags"]
connect_fn_def :: String -> C.Unit
connect_fn_def n =
C.FunctionDef C.NoScope (C.TypeName "errval_t") (connect_fn_name n) params [
C.Return $ C.Call (drv_connect_fn_name "ump" n) [C.Variable intf_frameinfo_var, C.Variable intf_cont_var, C.Variable "st", C.Variable "ws", C.Variable "flags"]
]
where
params = [ C.Param (C.Ptr $ C.Struct $ intf_frameinfo_type n) intf_frameinfo_var,
C.Param (C.Ptr $ C.TypeName $ intf_bind_cont_type n) intf_cont_var,
C.Param (C.Ptr $ C.TypeName "void") "st",
C.Param (C.Ptr $ C.Struct "waitset") "ws",
C.Param (C.TypeName "idc_bind_flags_t") "flags"]
-- bind continuation function
bind_cont_def :: String -> String -> [BindBackend] -> C.Unit
bind_cont_def ifn fn_name backends =
C.FunctionDef C.Static C.Void fn_name params [
C.SComment "This bind cont function uses the different backends in the following order:",
C.SComment $ unwords $ map flounder_backend backends,
C.SBlank,
localvar (C.Ptr $ C.Struct "flounder_generic_bind_attempt") "b"
(Just $ C.Variable "st"),
C.Switch driver_num cases
[C.Ex $ C.Call "assert" [C.Unary C.Not $ C.StringConstant "invalid state"]],
C.SBlank,
C.Label "out",
C.Ex $ C.CallInd (C.Cast (C.Ptr $ C.TypeName $ intf_bind_cont_type ifn)
(bindst `C.DerefField` "callback"))
[bindst `C.DerefField` "st", errvar, C.Variable intf_bind_var],
C.Ex $ C.Call "free" [bindst]
]
where
params = [ C.Param (C.Ptr $ C.Void) "st",
C.Param (C.TypeName "errval_t") "err",
C.Param (C.Ptr $ C.Struct $ intf_bind_type ifn) intf_bind_var]
driver_num = bindst `C.DerefField` "driver_num"
bindst = C.Variable "b"
cases = [ C.Case (C.NumConstant $ toInteger n) (mkcase n)
| n <- [0 .. length backends] ]
mkcase n
| n == 0 = try_next
| n == length backends = [
C.SIfDef config_prev_driver
[C.If (test_cb_success prev_backend)
-- success!
[success_callback]
-- failure, but clean up attempt
[C.StmtList $ cleanup_bind prev_backend,
C.If (C.Unary C.Not $ test_cb_try_next prev_backend)
[fail_callback errvar]
[]]
]
[],
fail_callback (C.Variable "FLOUNDER_ERR_GENERIC_BIND_NO_MORE_DRIVERS")
]
| otherwise = [
C.SIfDef config_prev_driver
[C.If (test_cb_success prev_backend)
-- success!
[success_callback]
-- failure, cleanup and decide whether to continue
[C.StmtList $ cleanup_bind prev_backend,
C.If (test_cb_try_next prev_backend)
[C.Goto ("try_next_" ++ show n)]
[C.SComment "report permanent failure to user",
fail_callback errvar]
],
C.Label ("try_next_" ++ show n)
] [],
-- previous driver not enabled, just try the next
C.StmtList try_next]
where
prev_backend = backends !! (n - 1)
next_backend = backends !! n
config_prev_driver = "CONFIG_FLOUNDER_BACKEND_"
++ (map toUpper (flounder_backend prev_backend))
config_next_driver = "CONFIG_FLOUNDER_BACKEND_"
++ (map toUpper (flounder_backend next_backend))
try_next = [C.Ex $ C.PostInc driver_num,
C.SIfDef config_next_driver
[C.SComment "try next backend",
C.StmtList $ start_bind next_backend,
C.If (C.Call "err_is_fail" [errvar])
-- bind attempt failed
[C.StmtList $ cleanup_bind next_backend,
fail_callback errvar]
[C.ReturnVoid]]
[C.SComment "skip non-enabled backend (fall through)"]]
fail_callback err = C.StmtList $
(if err /= errvar
then [C.Ex $ C.Assignment errvar err]
else [])
++ [
C.Ex $ C.Assignment (C.Variable intf_bind_var) (C.Variable "NULL"),
C.Goto "out"]
success_callback = C.Goto "out"
bind_fn_def :: String -> C.Unit
bind_fn_def n =
C.FunctionDef C.NoScope (C.TypeName "errval_t") (bind_fn_name n) params [
C.SComment "allocate state",
localvar (C.Ptr $ C.Struct "flounder_generic_bind_attempt") "b"
(Just $ C.Call "malloc" [C.SizeOfT $ C.Struct "flounder_generic_bind_attempt"]),
C.If (C.Binary C.Equals (C.Variable "b") (C.Variable "NULL"))
[C.Return $ C.Variable "LIB_ERR_MALLOC_FAIL"] [],
C.SBlank,
C.SComment "fill in binding state",
C.StmtList [C.Ex $ C.Assignment (C.Variable "b" `C.DerefField` dstf) srce
| (dstf, srce) <- [
("iref", C.Variable "iref"),
("waitset", C.Variable "waitset"),
("driver_num", C.NumConstant 0),
("callback", C.Variable intf_cont_var),
("st", C.Variable "st"),
("flags", C.Variable "flags")]],
C.SBlank,
C.If (C.Binary C.BitwiseAnd (C.Variable "flags") (C.Variable "IDC_BIND_FLAG_MULTIHOP"))
[C.Ex $ C.Call (bind_cont_name2 n) [C.Variable "b", C.Variable "SYS_ERR_OK", C.Variable "NULL"]]
[C.Ex $ C.Call (bind_cont_name n) [C.Variable "b", C.Variable "SYS_ERR_OK", C.Variable "NULL"]],
C.SBlank,
C.Return $ C.Variable "SYS_ERR_OK"
]
where
params = [ C.Param (C.TypeName "iref_t") "iref",
C.Param (C.Ptr $ C.TypeName $ intf_bind_cont_type n) intf_cont_var,
C.Param (C.Ptr $ C.TypeName "void") "st",
C.Param (C.Ptr $ C.Struct "waitset") "waitset",
C.Param (C.TypeName "idc_bind_flags_t") "flags" ]
----------------------------------------------------------------------------
-- everything that we need to know about a backend to attempt a generic bind
----------------------------------------------------------------------------
data BindBackend = BindBackend {
flounder_backend :: String, -- name of the flounder backend
start_bind :: [C.Stmt], -- code to attempt a bind
test_cb_success :: C.Expr, -- expression to test if a bind succeeded (in the callback)
test_cb_try_next :: C.Expr, -- expression to test if a bind might succeed with another backend
cleanup_bind :: [C.Stmt] -- code to cleanup a failed bind
}
-- the available bind backends
-- Cation: order of list matters (we will try to bind in that order)
bind_backends :: String -> String -> [BindBackend]
bind_backends ifn cont_fn_name = map (\i -> i ifn (C.Variable cont_fn_name))
[lmp_bind_backend,
ump_ipi_bind_backend,
ump_bind_backend,
multihop_bind_backend]
-- backends in different order (prefer multihop over ump, etc.)
multihop_bind_backends :: String -> String -> [BindBackend]
multihop_bind_backends ifn cont_fn_name = map (\i -> i ifn (C.Variable cont_fn_name))
[lmp_bind_backend,
multihop_bind_backend,
ump_ipi_bind_backend,
ump_bind_backend]
bindst = C.Variable "b"
binding = bindst `C.DerefField` "binding"
iref = bindst `C.DerefField` "iref"
waitset = bindst `C.DerefField` "waitset"
flags = bindst `C.DerefField` "flags"
lmp_bind_backend ifn cont =
BindBackend {
flounder_backend = "lmp",
start_bind = [
C.Ex $ C.Assignment binding $
C.Call "malloc" [C.SizeOfT $ C.Struct $ lmp_bind_type ifn],
C.Ex $ C.Call "assert" [C.Binary C.NotEquals binding (C.Variable "NULL")],
C.Ex $ C.Assignment errvar $
C.Call (lmp_bind_fn_name ifn) [binding, iref, cont, C.Variable "b", waitset,
flags,
C.Variable "DEFAULT_LMP_BUF_WORDS"]
],
test_cb_success = C.Call "err_is_ok" [errvar],
test_cb_try_next = C.Binary C.Equals (C.Call "err_no" [errvar])
(C.Variable "MON_ERR_IDC_BIND_NOT_SAME_CORE"),
cleanup_bind = [ C.Ex $ C.Call "free" [binding] ]
}
ump_bind_backend ifn cont =
BindBackend {
flounder_backend = "ump",
start_bind = [
C.Ex $ C.Assignment binding $
C.Call "malloc" [C.SizeOfT $ C.Struct $ UMP.bind_type ifn],
C.Ex $ C.Call "assert" [C.Binary C.NotEquals binding (C.Variable "NULL")],
C.Ex $ C.Assignment errvar $
C.Call (UMP.bind_fn_name ifn) [binding, iref, cont, C.Variable "b", waitset,
flags,
C.Variable "DEFAULT_UMP_BUFLEN",
C.Variable "DEFAULT_UMP_BUFLEN"]
],
test_cb_success = C.Call "err_is_ok" [errvar],
test_cb_try_next = C.Variable "true",
cleanup_bind = [ C.Ex $ C.Call "free" [binding] ]
}
ump_ipi_bind_backend ifn cont =
BindBackend {
flounder_backend = "ump_ipi",
start_bind = [
C.Ex $ C.Assignment binding $
C.Call "malloc" [C.SizeOfT $ C.Struct $ UMP_IPI.bind_type ifn],
C.Ex $ C.Call "assert" [C.Binary C.NotEquals binding (C.Variable "NULL")],
C.Ex $ C.Assignment errvar $
C.Call (UMP_IPI.bind_fn_name ifn) [binding, iref, cont, C.Variable "b", waitset,
flags,
C.Variable "DEFAULT_UMP_BUFLEN",
C.Variable "DEFAULT_UMP_BUFLEN"]
],
test_cb_success = C.Call "err_is_ok" [errvar],
test_cb_try_next = C.Variable "true",
cleanup_bind = [ C.Ex $ C.Call "free" [binding] ]
}
multihop_bind_backend ifn cont =
BindBackend {
flounder_backend = "multihop",
start_bind = [C.Ex $ C.Assignment binding $
C.Call "malloc" [C.SizeOfT $ C.Struct $ Multihop.m_bind_type ifn],
C.Ex $ C.Call "assert" [C.Binary C.NotEquals binding (C.Variable "NULL")],
C.Ex $ C.Assignment errvar $
C.Call (Multihop.m_bind_fn_name ifn) [binding, iref, cont, C.Variable "b", waitset, flags]],
test_cb_success = C.Call "err_is_ok" [errvar],
test_cb_try_next = C.Variable "true",
cleanup_bind = [ C.Ex $ C.Call "free" [binding] ]
}
| joe9/barrelfish | tools/flounder/GCBackend.hs | mit | 16,534 | 6 | 18 | 5,388 | 4,301 | 2,275 | 2,026 | 268 | 2 |
{-# htermination addToFM_C :: (b -> b -> b) -> FiniteMap (Ratio Int) b -> (Ratio Int) -> b -> FiniteMap (Ratio Int) b #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_addToFM_C_7.hs | mit | 139 | 0 | 3 | 27 | 5 | 3 | 2 | 1 | 0 |
module CLaSH.DepCore.Type where
import Bound (Scope (..))
import Bound.Name (Name, name, instantiate1Name)
import Control.Comonad (extract)
import CLaSH.DepCore.Environment (Env (..), declarePat, extendEnv)
import CLaSH.DepCore.Term (Alt (..), Binder (..), FastString, Term (..), Type)
import CLaSH.DepCore.Util (whnf)
inferType :: Eq a => Show n => Show a
=> Env n a -- ^ Environment
-> Term n a -- ^ Term
-> Type n a -- ^ Inferred type
inferType env (Var a) = lookupTy env a
inferType _ (Universe i) = Universe (i+1)
inferType env (App e1 e2) = if s == whnf env te
then instantiate1Name e2 t
else error "Mismatch"
where
te = inferType env e2
(_,s,t) = inferPi env (inferType env e1)
inferType env (Bind (Pi b) (Scope s)) = Universe (max k1 k2)
where
ty = extract b
env' = extendEnv inferType env ty Nothing
k1 = inferUniverse env (inferType env ty)
k2 = inferUniverse env' (inferType env' s)
inferType env (Bind (Lam b) (Scope s)) = Bind (Pi b) (Scope (inferType env' s))
where
env' = extendEnv inferType env (extract b) Nothing
inferType env (Bind (Let ty tm) (Scope e2)) = instantiate1Name e1 (Scope t)
where
e1 = extract tm
env' = extendEnv inferType env (extract ty) (Just e1)
t = inferType env' e2
inferType env (Case scrut alts) = undefined
where
sty = inferType env scrut
(tc,params) = inferTCon env sty
checkAlt (Alt n pat s) = undefined
where
decls = declarePat pat (TCon tc params)
inferPi :: Eq a => Show n => Show a
=> Env n a
-> Type n a
-> (n,Type n a,Scope (Name n ()) (Type n) a)
inferPi env ty = case whnf env ty of
Bind (Pi b) s -> (name b, extract b, s)
ty' -> error ("Function expected: " ++ show ty)
inferUniverse :: Eq a => Show n => Show a
=> Env n a
-> Type n a
-> Integer
inferUniverse env ty = case whnf env ty of
Universe i -> i
ty' -> error ("Type expected: " ++ show ty')
inferTCon :: Eq a => Show n => Show a
=> Env n a
-> Type n a
-> (FastString, [Type n a])
inferTCon env ty = case whnf env ty of
(TCon n params) -> (n,params)
ty' -> error ("TCon expected: " ++ show ty')
| christiaanb/NewCore | src/CLaSH/DepCore/Type.hs | mit | 2,564 | 0 | 14 | 942 | 1,004 | 511 | 493 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.Types
-- License : MIT (see the LICENSE file)
-- Maintainer : Felix Klein ([email protected])
--
-- Types of the different expressions, semantics and targets.
--
-----------------------------------------------------------------------------
{-# LANGUAGE
LambdaCase
, MultiParamTypeClasses
, TypeSynonymInstances
, FlexibleInstances
#-}
-----------------------------------------------------------------------------
module Data.Types
( Target(..)
, Semantics(..)
, SignalType(..)
, IdType(..)
, SignalDecType(..)
) where
-----------------------------------------------------------------------------
import Data.Convertible
( Convertible(..)
, ConvertError(..)
)
import Data.Expression
( ExprPos
, Expr
)
import Data.Char
( toLower
)
import Control.Arrow
( (>>>)
)
-----------------------------------------------------------------------------
-- | Target types.
data Target =
TargetMealy
-- ^ Mealy machine target
| TargetMoore
-- ^ Moore machine target
deriving (Eq, Ord)
-----------------------------------------------------------------------------
instance Convertible Target String where
safeConvert = return . \case
TargetMealy -> "mealy"
TargetMoore -> "moore"
-----------------------------------------------------------------------------
instance Convertible String Target where
safeConvert = map toLower >>> \case
"mealy" -> return TargetMealy
"moore" -> return TargetMoore
str -> Left ConvertError
{ convSourceValue = str
, convSourceType = "String"
, convDestType = "Target"
, convErrorMessage = "Unknown target"
}
-----------------------------------------------------------------------------
-- | Semantic types.
data Semantics =
SemanticsMealy
-- ^ Standard Mealy machine semantics.
| SemanticsMoore
-- ^ Standard Moore machine semantics.
| SemanticsStrictMealy
-- ^ Mealy machine semantics with strict envionment assumptions.
| SemanticsStrictMoore
-- ^ Moore machine semantics with strict envionment assumptions.
deriving (Eq, Ord)
-----------------------------------------------------------------------------
instance Convertible Semantics String where
safeConvert = return . \case
SemanticsMealy -> "mealy"
SemanticsMoore -> "moore"
SemanticsStrictMealy -> "mealy,strict"
SemanticsStrictMoore -> "moore,strict"
-----------------------------------------------------------------------------
instance Convertible String Semantics where
safeConvert = map toLower >>> \case
"mealy" -> return SemanticsMealy
"moore" -> return SemanticsMoore
"mealy,strict" -> return SemanticsStrictMealy
"moore,strict" -> return SemanticsStrictMoore
str -> Left ConvertError
{ convSourceValue = str
, convSourceType = "String"
, convDestType = "Semantics"
, convErrorMessage = "Unknown semantics"
}
-----------------------------------------------------------------------------
-- | Signal types.
data SignalType =
STInput
| STOutput
| STGeneric
deriving (Eq)
-----------------------------------------------------------------------------
-- | Signal decleration types.
data SignalDecType a =
SDSingle (a,ExprPos)
| SDBus (a,ExprPos) (Expr a)
| SDEnum (a,ExprPos) (a,ExprPos)
-----------------------------------------------------------------------------
-- | Expression types.
data IdType =
TEmptySet
| TSignal SignalType
| TBus SignalType
| TTypedBus SignalType String Int
| TEnum String Int
| TNumber
| TBoolean
| TLtl
| TPattern
| TPoly Int
| TSet IdType
deriving (Eq)
-----------------------------------------------------------------------------
instance Show IdType where
show x = case x of
TEmptySet -> "empty set"
TSignal STInput -> "input signal"
TSignal STOutput -> "output signal"
TSignal STGeneric -> "signal"
TBus STInput -> "input bus"
TBus STOutput -> "output bus"
TBus STGeneric -> "bus"
TTypedBus STInput t _ -> t ++ " input bus"
TTypedBus STOutput t _ -> t ++ " output bus"
TTypedBus STGeneric t _ -> t ++ " bus"
TEnum t _ -> t
TNumber -> "numerical"
TBoolean -> "boolean"
TLtl -> "ltl"
TPattern -> "pattern"
TPoly y -> "a" ++ show y
TSet y -> show y ++ " set"
-----------------------------------------------------------------------------
| reactive-systems/syfco | src/lib/Data/Types.hs | mit | 4,749 | 0 | 12 | 1,064 | 765 | 432 | 333 | 102 | 0 |
{-# LANGUAGE DisambiguateRecordFields #-}
{-# LANGUAGE RecordWildCards #-}
module Config where
import Data.Colour
data Config = Config {
dhdx :: Double,
dhdy :: Double,
saturation :: Double,
value :: Double,
style :: Style
}
data Style = Normal
| Background {fgConfig :: TrueColour}
| BackgroundFull {_fgbf :: TrueColour}
defaultConfig :: Config
defaultConfig = Config 5 5 1 1 Normal
defaultBGConfig :: Config
defaultBGConfig = Config 5 5 1 1 (Background (0,0,0))
defaultBGFConfig :: Config
defaultBGFConfig = Config 5 5 1 1 (BackgroundFull (0,0,0))
-- TODO : nicer type-level handling
sanitise :: Config -> Config
sanitise = mkConfig . UnsafeConfig
newtype UnsafeConfig = UnsafeConfig Config
mkConfig :: UnsafeConfig -> Config
mkConfig (UnsafeConfig c@(Config {..})) =
c { saturation = fitRange 0 1 saturation,
value = fitRange 0 1 value,
style = sanitiseStyle style
}
sanitiseStyle :: Style -> Style
sanitiseStyle Normal = Normal
sanitiseStyle (Background (r,g,b)) =
Background (f r, f g, f b) where f = fitRange 0 255
sanitiseStyle (BackgroundFull (r,g,b)) =
BackgroundFull (f r, f g, f b) where f = fitRange 0 255
fitRange :: Ord a => a -> a -> a -> a
fitRange lBound uBound n
| n < lBound = lBound
| uBound < n = uBound
| otherwise = n
| lesguillemets/holcat | src/Config.hs | mit | 1,387 | 0 | 11 | 354 | 468 | 256 | 212 | 38 | 1 |
{-# LANGUAGE LambdaCase #-}
data Exp a
= Lam a (Exp a)
| Var a
| App (Exp a) (Exp a)
example :: Exp a -> a
example = \case
Lam a b -> a
Var a -> a
App a b -> example a
main = return ()
| riwsky/wiwinwlh | src/lambdacase.hs | mit | 202 | 0 | 8 | 66 | 107 | 54 | 53 | 11 | 3 |
{-# LANGUAGE InstanceSigs #-}
module FunctionApplicative where
import Control.Applicative (liftA2)
newtype HumanName =
HumanName String
deriving (Eq, Show)
newtype DogName =
DogName String
deriving (Eq, Show)
newtype Address =
Address String
deriving (Eq, Show)
data Person =
Person {
humanName :: HumanName
, dogName :: DogName
, address :: Address
} deriving (Eq, Show)
data Dog =
Dog {
dogsname :: DogName
, dogsAddress :: Address
} deriving (Eq, Show)
pers :: Person
pers =
Person (HumanName "Big Bird")
(DogName "Barkley")
(Address "Sesame Street")
chris :: Person
chris = Person (HumanName "Chris Allen")
(DogName "Papu")
(Address "Austin")
-- without Reader
getDog :: Person -> Dog
getDog p =
Dog (dogName p) (address p)
-- with Reader
getDogR :: Person -> Dog
getDogR =
Dog <$> dogName <*> address
-- with Reader alternative
getDogR' :: Person -> Dog
getDogR' = liftA2 Dog dogName address
-- 1.
myLiftA2 :: Applicative f => (a -> b -> c) -> f a -> f b -> f c
myLiftA2 f a b = f <$> a <*> b
-- 2.
newtype Reader r a = Reader { runReader :: r -> a }
asks :: (r -> a) -> Reader r a
asks f = Reader f
-- 3.
-- (.) ::(b->c)->(a->b)->(a->c)
instance Functor (Reader r) where
fmap :: (a -> b) -> Reader r a -> Reader r b
fmap f (Reader ra) = Reader $ f . ra
instance Applicative (Reader r) where
pure :: a -> Reader r a
pure a = Reader $ const a
--(<*>) :: (r -> a -> b) -> (r -> a) -> (r -> b)
(<*>) :: Reader r (a -> b) -> Reader r a -> Reader r b
(Reader rab) <*> (Reader ra) = Reader $ \r -> (rab r) (ra r)
-- 4.
getDogR'' :: Reader Person Dog
getDogR'' = Reader $ liftA2 Dog dogName address
| NickAger/LearningHaskell | HaskellProgrammingFromFirstPrinciples/Chapter22.hsproj/FunctionApplicative.hs | mit | 1,777 | 0 | 10 | 494 | 634 | 340 | 294 | 55 | 1 |
--------------------------------------------------------------------------------
-- Lattice paths
-- Problem 15
-- Starting in the top left corner of a 2×2 grid, and only being able to move
-- to the right and down, there are exactly 6 routes to the bottom right corner.
-- How many such routes are there through a 20×20 grid?
--------------------------------------------------------------------------------
import Math.Combinatorics.Exact.Binomial
solve grids = choose (2*grids) grids
main = do
print $ solve 2
print $ solve 20
{-
This is equivalent to Pascal's triangle, just turned sideways, constrained
to even numbered rows, with only paths to the center position allowed.
-}
| bertdouglas/euler-haskell | 001-050/15a.hs | mit | 694 | 0 | 8 | 106 | 61 | 34 | 27 | 5 | 1 |
{-# LANGUAGE TypeOperators, GADTs #-}
-- | Some basic type definitions and re-exporting universally used
-- libraries.
module Core.Types
( Row, Col
, (:->)
, (:~>), unsafeWeakLens, weak, (<#)
, Dir4 (..)
, Dir8 (..)
, move4, opposite4, manhattan, clock, anticlock, direction
, (?)
, ix, ixD
-- * Functor fixpoints and anamorphisms
, Fix (..)
, step, eternal, fixF
-- * Topped Types
, Topped (..)
-- * Standard Terminal Colors
, Color (..)
-- * Re-exports
, module Control.Category
, module Data.Lens.Common
) where
import Prelude hiding ((.), id)
import Control.Category
import Data.Array.IArray
import Data.Lens.Common
import qualified Data.Serialize as S
import Test.QuickCheck
-- | A row in an array. Used because @('Row', 'Col')@ is more readable than @(Int, Int)@.
type Row = Int
-- | A column in an array. Used because @('Row', 'Col')@ is more readable than @(Int, Int)@.
type Col = Int
-- | fclabels style type operator for data-lens
type a :-> b = Lens a b
-- | Weak Lenses: Weak lenses @(a :~> b)@ are a subset of lenses that
-- only form a semigroupoid, and not a category. By design there is no
-- id lens, so one cannot access the entire state.
--
-- The intent is to forbid the following (bad) situation:
--
-- > action = do
-- > level <- access id
-- > monsterRef <- spawn monster
-- > id != level
-- > kill monsterRef -- monsterRef no longer exists!
--
-- Notice that by accessing the entire state (level), we can
-- essentially /time travel/ by resetting the state back to a previous
-- state after getting references pointing to the current state!
--
-- While weak lenses do form a semigroupoid, we don't define an
-- instance because all the weak leneses have type @Level :~> a@ where
-- a is not a @Level@. Instead, we use @(<#)@ to combine Weak lenses
-- with ordinary lenses.
--
-- Note that (currently) nothing particularly bad happens if you try
-- to use a undefined reference, and the above code could even make
-- sense as part of a time-travel game mechanic. One could set all
-- undefined monsters as some kind of Dr Who-esque temporal paradox
-- monster, and then trying to access an undefined monster would
-- create such a paradox monster. At the very least I think to keeping
-- lenses into the game level somehow seperate will potentially be
-- useful if I need to optimize the game in the future by adding
-- mutable state using IOArrays or similar.
newtype a :~> b = WL { weak :: a :-> b }
-- | Introduce weak lenses from ordinary lenses. It's marked unsafe
-- because @unsafeWeakLens id@ would make @(:~>)@ a category, which
-- breaks the design constract for weak lenses.
unsafeWeakLens :: a :-> b -> a :~> b
unsafeWeakLens = WL
infixr 8 <#
-- | Weak lenses can be combined with ordinary lenses using '(<#)'. It
-- satisfies the law:
--
-- > f <# g <# h = (f . g) <# h
--
(<#) :: (b :-> c) -> (a :~> b) -> (a :~> c)
f <# (WL g) = WL (f . g)
-- | Eight way directions
--
-- Abyss is built on 4 way movement, so we don't use these much except
-- in @Core.FOV@.
data Dir8 = N8 | NE8 | E8 | SE8 | S8 | SW8 | W8 | NW8 deriving (Eq, Enum, Bounded, Show)
instance Arbitrary Dir8 where
arbitrary = arbitraryBoundedEnum
instance S.Serialize Dir8 where
put d = S.put (fromEnum d)
get = toEnum <$> S.get
-- | Four way directions
data Dir4 = N4 | E4 | S4 | W4 deriving (Eq, Enum, Show)
instance S.Serialize Dir4 where
put N4 = S.putWord8 0x00
put E4 = S.putWord8 0x01
put S4 = S.putWord8 0x02
put W4 = S.putWord8 0x03
get = (toEnum . fromIntegral) <$> S.getWord8
-- | Move a @('Row', 'Col')@ pair one unit in a specified direction.
move4 :: Dir4 -> (Row, Col) -> (Row, Col)
move4 N4 (r, c) = (r - 1, c)
move4 E4 (r, c) = (r, c + 1)
move4 S4 (r, c) = (r + 1, c)
move4 W4 (r, c) = (r, c - 1)
direction :: (Row, Col) -> (Row, Col) -> Maybe Dir4
direction (r1, c1) (r2, c2)
| r1 < r2 = Just S4
| r1 > r2 = Just N4
| c1 > c2 = Just W4
| c1 < c2 = Just E4
| otherwise = Nothing
-- | Return the opposite compass facing direction.
opposite4 :: Dir4 -> Dir4
opposite4 N4 = S4
opposite4 E4 = W4
opposite4 S4 = N4
opposite4 W4 = E4
-- | The manhattan (taxicab) distance between two locations.
manhattan :: (Row, Col) -> (Row, Col) -> Int
manhattan (r1, c1) (r2, c2) = abs (r1 - r2) + abs (c1 - c2)
clock :: Dir4 -> Dir4
clock N4 = E4
clock E4 = S4
clock S4 = W4
clock W4 = N4
anticlock :: Dir4 -> Dir4
anticlock N4 = W4
anticlock E4 = N4
anticlock S4 = E4
anticlock W4 = S4
(?) :: (IArray a e, Ix i) => a i e -> i -> Maybe e
arr ? i | bounds arr `inRange` i = Just (arr ! i)
| otherwise = Nothing
ix :: (IArray a e, Ix i) => i -> a i e :-> e
ix i = lens (! i) (\v arr -> arr // [(i, v)])
ixD :: (IArray a e, Ix i) => e -> a i e -> i -> e
ixD d arr i | bounds arr `inRange` i = arr ! i
| otherwise = d
-- | A @Fix f@ is a function @s -> f s@ with some state @s@. Notice
-- that we use GADTs/existential types to hide the state @s@. The
-- state is therefore /encapsulated/ within the type. They are mostly
-- used implement transition systems for things like AI routines and
-- effects occuring over time, where the internal state is used to
-- represent either the state in the transition system, or the
-- progress of the effect respectively.
--
-- More abstractly we can think of @Fix f@ as a fixpoint of the
-- functor @f@ given by its anamorphism. I suspect (but have not
-- proved) that this is isomorphic to the traditional definition:
--
-- > data FixA f = Fix { unFix :: (f (FixA f)) }
--
-- as we can define:
--
-- > ana :: Functor f => (a -> f a) -> (a -> FixA f)
-- > ana f = Fix . fmap (ana f) . f
-- >
-- > data FixB f where
-- > Ana :: (s -> f s) -> s -> FixB f
-- >
-- > bToA :: Functor f => FixB f -> FixA f
-- > bToA (Ana f x) = ana f x
-- >
-- > aToB :: FixA f -> FixB f
-- > aToB f = Ana unFix f
data Fix f where
Ana :: (s -> f s) -> s -> Fix f
instance Applicative f => Monoid (Fix f) where
mempty = Ana pure ()
mappend (Ana f x) (Ana g y) = Ana h (x, y)
where
h (a, b) = (,) <$> f a <*> g b
step :: Functor f => Fix f -> f (Fix f)
step (Ana f x) = Ana f <$> f x
fixF :: Functor f => f (Fix f) -> Fix f
fixF = Ana (fmap step)
eternal :: Functor f => f () -> Fix f
eternal g = Ana (const g) ()
-- | @Topped a@ is simply the type @a@ with a designated top element
-- @Top@ with the @Bounded@ and @Ord@ typeclasses defined
-- appropriately.
data Topped a = Top | NotTop !a deriving (Eq, Show)
instance Bounded a => Bounded (Topped a) where
minBound = NotTop minBound
maxBound = Top
instance Ord a => Ord (Topped a) where
_ <= Top = True
Top <= _ = False
(NotTop x) <= (NotTop y) = x <= y
data Color = Black | Red | Green | Yellow | Blue | Magenta | Cyan | White
| jameshsmith/HRL | Server/Core/Types.hs | mit | 6,771 | 0 | 10 | 1,573 | 1,712 | 967 | 745 | -1 | -1 |
module Y2016.M09.D14.Exercise where
import Data.Map (Map)
{--
This problem is from Master's Varity Puzzles from PennyPress, page 33.
Today we'll do a little domino arithmetic.
A domino tile is a tile with the playing side divided in half with each half
marked with zero up to 6 (inclusive) marks
https://en.wikipedia.org/wiki/Dominoes
So, an example tile:
_________________
| * * | * * * |
| * | |
| * * | * * * |
-----------------
is the (5,6) domino.
As a domino can be play in any direction, the (5,6) and (6,5) domino are the
same domino.
There's a hint for you.
Okay, we're not going to play a game of dominos (not today, anyway), we're
going to use some domino tiles in an unorthodox manner.
Given the following dominos:
--}
type Domino = (Int, Int)
dominos :: [Domino]
dominos = [(5,6), (2,5), (6,6), (4,2)]
{--
Now, here's the unorthodoxy: we're going to align the dominos so that they form
decimal numbers and perform 'domino multiplication
Here's the alignment:
(x, y) (z,
* a)
-------------
(b, c) (d, e)
That is to say, domino (x,y) forms the 100 and 10 digits of the multiplicand
the second domino (z, a) forms the 1's digit of the multiplicand and also is
the 1's digit of the multiplier.
The other two dominos (b,c) and (d,e) form the solution. So
xyz * a = bcde
Solve this problem for positions "xyzabcde" with the dominos supplied:
--}
solver :: [Domino] -> [Map Char Int]
solver tiles = undefined
| geophf/1HaskellADay | exercises/HAD/Y2016/M09/D14/Exercise.hs | mit | 1,496 | 0 | 7 | 335 | 109 | 69 | 40 | 7 | 1 |
{-# LANGUAGE NamedFieldPuns #-}
module Hickory.Utils.Projection where
import Hickory.Math.Matrix
import Hickory.Math.VectorMatrix
import Hickory.Types
import Hickory.Math.Vector
import Control.Lens ((^.))
import Linear (V2, V3(..), _xy, zero)
unproject :: Mat44 -> Size Int -> Scalar -> V2 Scalar -> V2 Scalar
unproject mat ss z pos = lerpUnproject pos z mat (realToFrac <$> viewportFromSize ss) ^. _xy
-- Useful for transforming a difference in touch coordinates into
-- a world difference vector
unprojectDelta :: V2 Scalar -> Scalar -> Mat44 -> Size Int -> V2 Scalar
unprojectDelta p depth mat ss = unproject mat ss depth p - unproject mat ss depth zero
project :: Mat44 -> Size Int -> V3 Scalar -> V3 Scalar
project mat ss = viewProject mat (realToFrac <$> viewportFromSize ss)
| asivitz/Hickory | Hickory/Utils/Projection.hs | mit | 787 | 0 | 9 | 127 | 250 | 132 | 118 | 14 | 1 |
module Audit where
import Data.Char
import Test.QuickCheck
type Audit a = (a,String)
-- Given a function, debug produces a function logging information
-- about the computed value for each argument
debug :: (Show a, Show b) => (a -> b) -> (a -> Audit b)
debug f x = (y, show x ++ " |-> " ++ show y ++ "; ")
where
y = f x
ord' :: Char -> Audit Int
ord' = debug ord
chr' :: Int -> Audit Char
chr' = debug chr
-- Exercise 1
bind :: (a -> Audit b) -> (Audit a -> Audit b)
bind = undefined
-- Using # instead of * for composition to avoid ambiguities
(#) :: (b -> Audit c) -> (a -> Audit b) -> (a -> Audit c)
g' # f' = bind g' . f'
-- Exercise 2
unit :: a -> Audit a
unit = undefined
-- lift --- lifting functions
lift :: (a -> b) -> (a -> Audit b)
lift f = unit . f
-- Test that (for a given value x) lift g # lift f = lift (g.f)
-- For simplicity we restrict to Float functions as in the tutorial
check_lift :: (Float -> Float) -> (Float -> Float) -> Float -> Bool
check_lift f g x = undefined
test_lift :: IO ()
test_lift = quickCheck $ check_lift (+2) (*3)
-- Exercise Ten (a): Rewrite the module to make Audit instance of
-- the Monad typeclass
-- Note: You first need to make it an instance of Functor and Applicative
| PavelClaudiuStefan/FMI | An_3_Semestru_1/ProgramareDeclarativa/Extra/Laborator/Laborator 9/Audit.hs | cc0-1.0 | 1,241 | 0 | 9 | 283 | 393 | 214 | 179 | 23 | 1 |
module Test where
import Probability
model = do
xs <- iid 10 (categorical [0.1, 0.2, 0.3, 0.4])
return ["xs" %=% xs]
main = do
mcmc model
| bredelings/BAli-Phy | tests/prob_prog/categorical/sample.hs | gpl-2.0 | 161 | 0 | 11 | 49 | 66 | 35 | 31 | 7 | 1 |
import qualified Data.Map as Map
import Data.List (group, sort)
import Control.Arrow
import Data.Maybe
-- 26
combinations _ [] = []
combinations n al@(x: xs)
| n >= length al = [al]
| n == 1 = map (: []) al
| otherwise = combinations n xs ++ map (\a -> x: a) (combinations (n - 1) xs)
test26 = do print $ combinations 3 "abcdef"
-- 28
predf f x y = (f x) > (f y)
pred' = predf length
lsortg _ [] = []
lsortg predf (x: xs) =
lsortg predf [y | y <- xs, predf x y]
++ [x] ++
lsortg predf [y | y <- xs, not (predf x y)]
lsort = lsortg pred'
test28 = do print $ lsort ["ab0","d0","fg1","d1","ijkl","m2","o"]
stat s = map (head &&& length) . group . sort $ map length s
pred0 m = predf (\x -> fromJust (lookup (length x) m))
lsort0 x = lsortg (pred0 (stat x)) x
test280 = do print $ stat ["ab0","d0","fg1","d1","ijkl","m2","o"]
test281 = do print $ lsort0 ["ab0","d0","fg1","d1","ijkl","m2","o"]
main = test281
| liuyang1/H99 | 20.hs | gpl-2.0 | 934 | 0 | 12 | 206 | 526 | 276 | 250 | 25 | 1 |
module Block
( defaultBlockHeight
, defaultBlockLength
, defaultBlockWidth
, blockHeight
, blockColor
, blockType
, blockVertices
, blockIndices
, blockNormals
, renderBlock
) where
import Control.Applicative
import Data.Monoid
import Control.Lens
import Control.Wire.Core
import Graphics.Rendering.OpenGL
import Graphics.GLUtil
import Linear
import Resource
import Types
import Util
defaultBlockLength :: GLf
defaultBlockLength = 1.0
defaultBlockWidth :: GLf
defaultBlockWidth = 4.0
defaultBlockHeight :: GLf
defaultBlockHeight = 2.0
blockVertices :: [V3 GLf]
blockVertices = V3 <$> [l, -l] <*> [h, -h] <*> [w, -w]
where
l = defaultBlockLength / 2.0
w = defaultBlockWidth / 2.0
h = defaultBlockHeight / 2.0
blockIndices :: [V3 GLuint]
blockIndices = [ V3 5 3 1 -- front
, V3 5 7 2
, V3 4 1 0 -- top
, V3 4 5 1
, V3 4 2 0 -- back
, V3 4 6 2
, V3 6 7 3 -- bottom
, V3 3 2 6
, V3 0 1 2 -- left
, V3 1 3 2
, V3 5 7 6 -- right
, V3 6 4 5 ]
blockNormals :: [V3 GLf]
blockNormals = blockVertices
blockHeight :: Block -> GLf
blockHeight (Start _ h _) = h
blockHeight (Block _ h _) = h
blockHeight (EmptyBlock) = 0.0
blockHeight (Goal _ h _) = h
blockColor :: Block -> String
blockColor (Start c _ _) = c
blockColor (Block c _ _) = c
blockColor (Goal c _ _) = c
blockColor (EmptyBlock ) = "#0000ff"
blockType :: Block -> BlockRenderType
blockType (Start _ _ t) = t
blockType (Block _ _ t) = t
blockType (Goal _ _ t) = t
blockType (EmptyBlock ) = BlockRenderWire
instance Entity Block where
wire = mkConst (Left mempty)
collide = mkSF_ snd
aabb = aabbBlock
aabbBlock :: Object Block -> AABB
aabbBlock (Object pos@(V3 x y z) _ block) = AABB pos (V3 maxX maxY maxZ)
where
h = blockHeight block
maxX = x + defaultBlockWidth
maxY = y + h
maxZ = z - defaultBlockHeight
renderBlock :: GLsizei -> M44 GLf -> ShaderProgram -> Object Block -> Runtime ()
renderBlock i vp _ (Object _ _ EmptyBlock) = return ()
renderBlock i vp prg (Object pos vel block) = io $ do
asUniform mvp $ getUniform prg "mvp"
asUniform color $ getUniform prg "color"
drawIndexedTris i
where
mvp = vp !*! model
model = mkTransformationMat im33 pos
im33 :: V3 (V3 GLf)
im33 = identity
color = color4f_ $ blockColor block
| felixsch/drivingthesky | src/Block.hs | gpl-2.0 | 2,504 | 0 | 10 | 734 | 878 | 465 | 413 | 83 | 1 |
module Player where
import Utils
-- Hard Reset Player informations
hardResetPlayer :: (Int, Position)
hardResetPlayer = (3, (190, 727)) | joeyinbox/space-invaders-haskell | src/Player.hs | gpl-2.0 | 138 | 0 | 6 | 21 | 36 | 24 | 12 | 4 | 1 |
{-# LANGUAGE CPP #-}
module Portage.Dependency.Print
(
dep2str
, dep2str_noindent
) where
import Portage.Version
import Portage.Use
import Portage.PackageId
import qualified Distribution.Pretty as DP (Pretty(..))
import qualified Text.PrettyPrint as Disp
import Text.PrettyPrint ( vcat, nest, render )
import Text.PrettyPrint as PP ((<>))
import Portage.Dependency.Types
dispSlot :: SlotDepend -> Disp.Doc
dispSlot AnySlot = Disp.empty
dispSlot AnyBuildTimeSlot = Disp.text ":="
dispSlot (GivenSlot slot) = Disp.text (':' : slot)
dispLBound :: PackageName -> LBound -> Disp.Doc
dispLBound pn (StrictLB v) = Disp.char '>' PP.<> DP.pretty pn <-> DP.pretty v
dispLBound pn (NonstrictLB v) = Disp.text ">=" PP.<> DP.pretty pn <-> DP.pretty v
dispLBound _pn ZeroB = error "unhandled 'dispLBound ZeroB'"
dispUBound :: PackageName -> UBound -> Disp.Doc
dispUBound pn (StrictUB v) = Disp.char '<' PP.<> DP.pretty pn <-> DP.pretty v
dispUBound pn (NonstrictUB v) = Disp.text "<=" PP.<> DP.pretty pn <-> DP.pretty v
dispUBound _pn InfinityB = error "unhandled 'dispUBound Infinity'"
dispDAttr :: DAttr -> Disp.Doc
dispDAttr (DAttr s u) = dispSlot s PP.<> dispUses u
dep2str :: Int -> Dependency -> String
dep2str start_indent = render . nest start_indent . showDepend
dep2str_noindent :: Dependency -> String
dep2str_noindent = render . showDepend
(<->) :: Disp.Doc -> Disp.Doc -> Disp.Doc
a <-> b = a PP.<> Disp.char '-' PP.<> b
sp :: Disp.Doc
sp = Disp.char ' '
sparens :: Disp.Doc -> Disp.Doc
sparens doc = Disp.parens (sp PP.<> valign doc PP.<> sp)
valign :: Disp.Doc -> Disp.Doc
valign d = nest 0 d
showDepend :: Dependency -> Disp.Doc
showDepend (DependAtom (Atom pn range dattr))
= case range of
-- any version
DRange ZeroB InfinityB -> DP.pretty pn PP.<> dispDAttr dattr
DRange ZeroB ub -> dispUBound pn ub PP.<> dispDAttr dattr
DRange lb InfinityB -> dispLBound pn lb PP.<> dispDAttr dattr
-- TODO: handle >=foo-0 special case
-- TODO: handle =foo-x.y.* special case
DRange lb ub -> showDepend (DependAtom (Atom pn (DRange lb InfinityB) dattr))
PP.<> Disp.char ' '
PP.<> showDepend (DependAtom (Atom pn (DRange ZeroB ub) dattr))
DExact v -> Disp.char '~' PP.<> DP.pretty pn <-> DP.pretty v { versionRevision = 0 } PP.<> dispDAttr dattr
showDepend (DependIfUse u td fd) = valign $ vcat [td_doc, fd_doc]
where td_doc
| is_empty_dependency td = Disp.empty
| otherwise = DP.pretty u PP.<> Disp.char '?' PP.<> sp PP.<> sparens (showDepend td)
fd_doc
| is_empty_dependency fd = Disp.empty
| otherwise = Disp.char '!' PP.<> DP.pretty u PP.<> Disp.char '?' PP.<> sp PP.<> sparens (showDepend fd)
showDepend (DependAnyOf deps) = Disp.text "||" PP.<> sp PP.<> sparens (vcat $ map showDependInAnyOf deps)
showDepend (DependAllOf deps) = valign $ vcat $ map showDepend deps
-- needs special grouping
showDependInAnyOf :: Dependency -> Disp.Doc
showDependInAnyOf d@(DependAllOf _deps) = sparens (showDepend d)
-- both lower and upper bounds are present thus needs 2 atoms
-- TODO: '=foo-x.y.*' will take only one atom, not two
showDependInAnyOf d@(DependAtom (Atom _pn (DRange lb ub) _dattr))
| lb /= ZeroB && ub /= InfinityB
= sparens (showDepend d)
-- rest are fine
showDependInAnyOf d = showDepend d
| gentoo-haskell/hackport | Portage/Dependency/Print.hs | gpl-3.0 | 3,576 | 0 | 16 | 877 | 1,202 | 600 | 602 | 64 | 5 |
{-
This source file is a part of the noisefunge programming environment.
Copyright (C) 2015 Rev. Johnny Healey <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE TemplateHaskell #-}
module Language.NoiseFunge.Beat (Tempo(Tempo), bpm, subbeats,
Beat(Beat), beat, subbeat,
(##),
Beats, beats) where
import Control.Lens
import Data.Binary
import Data.Default
data Tempo = Tempo {
_bpm :: !Word32,
_subbeats :: !Word32
} deriving (Read, Show, Eq, Ord)
$(makeLenses ''Tempo)
data Beat = Beat {
_beat :: !Word32,
_subbeat :: !Word32
} deriving (Read, Eq, Ord)
$(makeLenses ''Beat)
instance Binary Beat where
get = Beat <$> get <*> get
put (Beat x y) = put x >> put y
instance Default Beat where
def = Beat 0 0
instance Show Beat where
show (Beat x y) = shows x . showChar '|' . shows y $ []
type Beats = [Beat]
(##) :: Tempo -> Beat -> Beat
(Tempo _ sb) ## (Beat b s) = nb where
s' = s + 1
b' = b + 1
nb = if s' == sb
then (Beat b' 0)
else (Beat b s')
beats :: Tempo -> Beats
beats t = iterate (t ##) def
| revnull/noisefunge | src/Language/NoiseFunge/Beat.hs | gpl-3.0 | 1,825 | 0 | 9 | 516 | 409 | 222 | 187 | 48 | 2 |
{-# LANGUAGE PatternSynonyms #-}
{-|
Module : Accents
Description : Accents
Copyright : (c) Frédéric BISSON, 2015
License : GPL-3
Maintainer : [email protected]
Stability : experimental
Portability : POSIX
Accents
-}
module Minitel.Constants.Accents where
import qualified Minitel.Constants.C0 as C0
import Minitel.Type.MNatural (MNat)
default (MNat)
-- * Accents (for the VideoTex mode)
pattern Accent x = [C0.SS2, x]
pattern AddAccent x y = [C0.SS2, x, y]
pattern Cedilla = Accent 0x4b
pattern Grave = Accent 0x41
pattern Acute = Accent 0x42
pattern Circumflex = Accent 0x43
pattern Umlaut = Accent 0x48
pattern AddCedilla x = AddAccent 0x4b x
pattern AddGrave x = AddAccent 0x41 x
pattern AddAcute x = AddAccent 0x42 x
pattern AddCircumflex x = AddAccent 0x43 x
pattern AddUmlaut x = AddAccent 0x48 x
| Zigazou/HaMinitel | src/Minitel/Constants/Accents.hs | gpl-3.0 | 864 | 0 | 7 | 185 | 214 | 109 | 105 | 17 | 0 |
{- scan
Gregory W. Schwartz
Executes the SCAN algorithm to cluster a list of records
http://research.google.com/pubs/pub36940.html
-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE QuasiQuotes #-}
module Main where
-- Standard
import Data.Maybe
import qualified Data.Map as Map
import qualified Data.Vector as V
import Control.Monad
-- Cabal
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Pipes
import qualified Pipes.Prelude as P
import qualified Pipes.Text as PT
import qualified Pipes.Text.IO as PT
import qualified Pipes.Prelude.Text as PT
import qualified Foreign.R as R
import Foreign.R (SEXP, SEXPTYPE)
import Language.R.Instance as R
import Language.R.QQ
import qualified H.Prelude as H
import Options.Applicative
-- Local
import Types
import Utility
import B1Matrix
import BMatrix
import HierarchicalClustering
import Print
-- | Command line arguments
data Options = Options { input :: Maybe String
, output :: Maybe String
, outputTree :: Maybe String
, q :: Int
, alphabetString :: Maybe String
}
-- | Command line options
options :: Parser Options
options = Options
<$> optional ( strOption
( long "input"
<> short 'i'
<> metavar "FILE"
<> help "The input file, each line is a record."
)
)
<*> optional ( strOption
( long "output"
<> short 'o'
<> metavar "FILE"
<> help "The output file."
)
)
<*> optional ( strOption
( long "output-tree"
<> short 'o'
<> metavar "FILE"
<> help "The output file for the structure of the tree."
)
)
<*> option auto
( long "q"
<> short 'q'
<> metavar "[3] | INT"
<> value 3
<> help "The length of the qgrams."
)
<*> optional ( strOption
( long "alphabet"
<> short 'a'
<> metavar "STRING"
<> help "The alphabet (all characters) used in the records.\
\ For instance, for a records of just capital characters with\
\ periods and commas,\
\ the alphabet would be \"ABCDEFGHIJKLMNOPQRSTUVWXYZ.,\".\
\ If empty, uses the (very large and slow)\
\ maxBound for characters (not supported until R supports\
\ 64 bit integers)."
)
)
mainFunc :: Options -> IO ()
mainFunc opts = do
let alphabet = fmap getAlphabet . alphabetString $ opts
readPipes = case input opts of
Nothing -> PT.stdinLn
(Just x) -> PT.readFileLn x
contents <- fmap nub' . PT.runSafeT . runEffect $ P.toListM $ readPipes
>-> P.filter (not . T.null)
>-> P.map (\ !x -> ( Record x
, getB1Row alphabet (Q $ q opts) (Record x)
)
)
let recordsOnly = V.fromList . fmap fst $ contents
recordIDs = V.enumFromN 1 . V.length $ recordsOnly
records = V.zip recordIDs recordsOnly
numQGrams = case alphabet of
Nothing -> (fromEnum (maxBound :: Char)) ^ (q opts)
(Just (Alphabet x)) -> (Map.size x) ^ (q opts)
R.withEmbeddedR R.defaultConfig $ R.runRegion $ do
rB <- mToRM
. unB
. getB
. getB2
. getB1 alphabet (Q $ q opts)
. map snd
$ contents
[r| suppressMessages(library("irlba")) |]
clusterTree <- cluster 0 (ID 1) records rB
-- Output clusters
case output opts of
Nothing ->
H.io . T.putStrLn . printClusters . ClusterTree $ clusterTree
(Just x) ->
H.io . T.writeFile x . printClusters . ClusterTree $ clusterTree
-- Output tree
case (outputTree opts) of
Nothing -> return ()
(Just x) -> H.io
. T.writeFile x
. T.pack
. show
. printTree
. ClusterTree
$ clusterTree
return ()
main :: IO ()
main = execParser opts >>= mainFunc
where
opts = info (helper <*> options)
( fullDesc
<> progDesc "Executes the SCAN algorithm to cluster a list of records"
<> header "scan, Gregory W. Schwartz" )
| GregorySchwartz/scan | app/Main.hs | gpl-3.0 | 4,741 | 0 | 20 | 1,916 | 1,035 | 540 | 495 | 106 | 5 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.RegionTargetHTTPProxies.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns the specified TargetHttpProxy resource in the specified region.
-- Gets a list of available target HTTP proxies by making a list() request.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.regionTargetHttpProxies.get@.
module Network.Google.Resource.Compute.RegionTargetHTTPProxies.Get
(
-- * REST Resource
RegionTargetHTTPProxiesGetResource
-- * Creating a Request
, regionTargetHTTPProxiesGet
, RegionTargetHTTPProxiesGet
-- * Request Lenses
, rthttppgProject
, rthttppgTargetHTTPProxy
, rthttppgRegion
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.regionTargetHttpProxies.get@ method which the
-- 'RegionTargetHTTPProxiesGet' request conforms to.
type RegionTargetHTTPProxiesGetResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"targetHttpProxies" :>
Capture "targetHttpProxy" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] TargetHTTPProxy
-- | Returns the specified TargetHttpProxy resource in the specified region.
-- Gets a list of available target HTTP proxies by making a list() request.
--
-- /See:/ 'regionTargetHTTPProxiesGet' smart constructor.
data RegionTargetHTTPProxiesGet =
RegionTargetHTTPProxiesGet'
{ _rthttppgProject :: !Text
, _rthttppgTargetHTTPProxy :: !Text
, _rthttppgRegion :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'RegionTargetHTTPProxiesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rthttppgProject'
--
-- * 'rthttppgTargetHTTPProxy'
--
-- * 'rthttppgRegion'
regionTargetHTTPProxiesGet
:: Text -- ^ 'rthttppgProject'
-> Text -- ^ 'rthttppgTargetHTTPProxy'
-> Text -- ^ 'rthttppgRegion'
-> RegionTargetHTTPProxiesGet
regionTargetHTTPProxiesGet pRthttppgProject_ pRthttppgTargetHTTPProxy_ pRthttppgRegion_ =
RegionTargetHTTPProxiesGet'
{ _rthttppgProject = pRthttppgProject_
, _rthttppgTargetHTTPProxy = pRthttppgTargetHTTPProxy_
, _rthttppgRegion = pRthttppgRegion_
}
-- | Project ID for this request.
rthttppgProject :: Lens' RegionTargetHTTPProxiesGet Text
rthttppgProject
= lens _rthttppgProject
(\ s a -> s{_rthttppgProject = a})
-- | Name of the TargetHttpProxy resource to return.
rthttppgTargetHTTPProxy :: Lens' RegionTargetHTTPProxiesGet Text
rthttppgTargetHTTPProxy
= lens _rthttppgTargetHTTPProxy
(\ s a -> s{_rthttppgTargetHTTPProxy = a})
-- | Name of the region scoping this request.
rthttppgRegion :: Lens' RegionTargetHTTPProxiesGet Text
rthttppgRegion
= lens _rthttppgRegion
(\ s a -> s{_rthttppgRegion = a})
instance GoogleRequest RegionTargetHTTPProxiesGet
where
type Rs RegionTargetHTTPProxiesGet = TargetHTTPProxy
type Scopes RegionTargetHTTPProxiesGet =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient RegionTargetHTTPProxiesGet'{..}
= go _rthttppgProject _rthttppgRegion
_rthttppgTargetHTTPProxy
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy RegionTargetHTTPProxiesGetResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/RegionTargetHTTPProxies/Get.hs | mpl-2.0 | 4,464 | 0 | 16 | 972 | 468 | 280 | 188 | 82 | 1 |
module IsPalindrome where
isPalindrome :: String -> Bool
isPalindrome s = reverse s == s | thewoolleyman/haskellbook | 04/09/chad/IsPalindrome.hs | unlicense | 89 | 0 | 6 | 15 | 28 | 15 | 13 | 3 | 1 |
take 2 [1, 3, 5, 7, 9]
| kujua/erlang-elixir-imperative-bookcompanion | code examples/example-14-24.hs | apache-2.0 | 23 | 0 | 6 | 7 | 24 | 13 | 11 | -1 | -1 |
-- Copyright 2014 (c) Diego Souza <[email protected]>
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module Leela.HZMQ.ZHelpers where
import System.ZMQ4
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import Control.Concurrent.STM
retryUnless :: TVar Bool -> a -> STM a
retryUnless tvar a = do
expired <- readTVar tvar
if expired then retry else return a
sendAll' :: (Sender a) => Socket a -> [L.ByteString] -> IO ()
sendAll' _ [] = return ()
sendAll' fh [chk] = send' fh [] chk
sendAll' fh (chk:msg) = do
send' fh [SendMore] chk
sendAll' fh msg
sendAll :: (Sender a) => Socket a -> [B.ByteString] -> IO ()
sendAll _ [] = return ()
sendAll fh [chk] = send fh [] chk
sendAll fh (chk:msg) = do
send fh [SendMore] chk
sendAll fh msg
ms :: Int -> Int
ms = (* 1000)
setHWM :: (Int, Int) -> Socket a -> IO ()
setHWM (rcvQueue, sndQueue) fh = do
setReceiveHighWM (restrict rcvQueue) fh
setSendHighWM (restrict sndQueue) fh
config :: Socket a -> IO ()
config fh = do
setLinger (restrict 0) fh
setTcpKeepAlive On fh
setImmediate True fh
configAndConnect :: Socket a -> String -> IO ()
configAndConnect fh addr = do
config fh
connect fh addr
configAndBind :: Socket a -> String -> IO ()
configAndBind fh addr = do
config fh
bind fh addr
| locaweb/leela | src/warpdrive/src/Leela/HZMQ/ZHelpers.hs | apache-2.0 | 1,853 | 0 | 9 | 395 | 559 | 283 | 276 | 40 | 2 |
{-# LANGUAGE OverloadedStrings, CPP #-}
import CabalMeta
import OmniConfig
import Shelly
import Paths_cabal_meta
import qualified Data.Text as T
import Control.Monad (forM_)
import Data.Maybe (isNothing, isJust)
import Data.Text (Text)
import Data.Version (showVersion)
import Prelude hiding (FilePath)
headDef :: a -> [a] -> a
headDef d [] = d
headDef _ (x:_) = x
help :: Text
help = T.intercalate "\n" [
"cabal-meta is a cabal wrapper for installing multiple packages at once that may not be on hackage"
,"run with:"
,""
," cabal-meta [--[no-]dev] install [cabal install arguments]"
,""
," --dev means use cabal-dev instead of cabal"
,""
,"You can also set options through the CABAL_META_OPTS environment variable or the ~/.cabal-meta/opts file"
]
cabal_install_ :: CabalExe -> [Text] -> Sh ()
cabal_install_ cabal = command_ (progName cabal) ["install"]
data CabalExe = Cabal | CabalDev deriving Show
progName :: CabalExe -> FilePath
progName Cabal = "cabal"
progName CabalDev = "cabal-dev"
assertCabalDependencies :: CabalExe -> IO Bool
assertCabalDependencies Cabal = shelly $ do
whenM (test_e "cabal-dev") $ do
echo help
echo "\n\ncabal-dev/ folder found. use the --dev option"
quietExit 1
mPath <- which "cabal-src-install"
if isNothing mPath
then warn >> return False
else return True
where
warn = echo "\nWARNING: cabal-src not installed. run:\n cabal install cabal-src\n"
assertCabalDependencies CabalDev = do
mcd <- shelly $ which "cabal-dev"
case mcd of
Just _ -> return False
Nothing -> error "--dev requires cabal-dev to be installed"
main :: IO ()
main = do
allArgs <- fmap (filter $ not . T.null) $
allProgramOpts [commandLine, environment "cabal-meta",
homeOptFile "cabal-meta"]
when ("--version" `elem` allArgs) $ do
putStrLn $ "cabal-meta " ++ showVersion version
shelly $ exit 0
let (mDev, noDevArgs) = checkNegatedOpt "dev" allArgs
let isDev = isJust mDev
let cabal = if isDev then CabalDev else Cabal
unless (headDef "" noDevArgs == "install") $ do
putStrLn $ T.unpack help
putStrLn $ "using cabal: " ++ show cabal
shelly $
if (headDef "" noDevArgs == "--help") then exit 0 else quietExit 1
installSrc <- assertCabalDependencies cabal
let (_:args) = noDevArgs
shelly $ verbosely $ do
packageSources <- readPackages True "."
let installs = packageList packageSources
echo "Installing packages:"
mapM_ echo $ map (T.intercalate " ") installs
cabal_install_ cabal $ args ++ concat installs
case (cabal, installSrc) of
(Cabal, True) ->
forM_ (unstablePackages packageSources) $ \pkg ->
chdir (diskPath pkg) $ run "cabal-src-install" ["--src-only"]
_ -> return ()
return ()
| yesodweb/cabal-meta | main.hs | bsd-2-clause | 2,832 | 0 | 18 | 633 | 818 | 404 | 414 | 76 | 4 |
module Main where
import Data.Range.Range
import Lib
import Text.Megaparsec
import Text.Megaparsec.String
main = do contents <- readFile "input.txt"
let rs = parseLines range contents
print $ head $ difference [fullrange] rs
print $ length $ difference [fullrange] rs
where fullrange = SpanRange 0 4294967295
range :: Parser (Range Int)
range = do n1 <- number
string "-"
n2 <- number
return $ SpanRange n1 n2
| shapr/adventofcode2016 | src/Twenty/Main.hs | bsd-3-clause | 527 | 0 | 10 | 182 | 154 | 76 | 78 | 15 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-}
module PersistKeyInstances where
import Database.Persist.Postgresql
import Servant
import Data.Text (pack)
import Data.Text.Read (Reader, decimal)
instance ToBackendKey SqlBackend a => FromText (Key a) where
fromText = either (\ _ -> Nothing) (Just . toSqlKey . fromIntegral . fst)
. (decimal :: Reader Integer)
instance ToBackendKey SqlBackend a => ToText (Key a) where
toText = pack . show . fromSqlKey
| hectorhon/autotrace2 | src/PersistKeyInstances.hs | bsd-3-clause | 498 | 0 | 11 | 87 | 144 | 80 | 64 | 12 | 0 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.ARB.TransformFeedbackInstanced
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/ARB/transform_feedback_instanced.txt ARB_transform_feedback_instanced> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.ARB.TransformFeedbackInstanced (
-- * Functions
glDrawTransformFeedbackInstanced,
glDrawTransformFeedbackStreamInstanced
) where
import Graphics.Rendering.OpenGL.Raw.Functions
| phaazon/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/ARB/TransformFeedbackInstanced.hs | bsd-3-clause | 768 | 0 | 4 | 81 | 40 | 33 | 7 | 4 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE EmptyDataDecls #-}
module Astro.Orbit.Types where
import Numeric.Units.Dimensional.Prelude
import qualified Prelude
-- Parameters.
newtype SemiMajorAxis a = SMA { sma :: Length a } deriving (Show, Eq, Ord)
newtype SemiLatusRectum a = SLR { slr :: Length a } deriving (Show, Eq, Ord)
-- | Eccentricity. Should be >= 0.
newtype Eccentricity a = Ecc { ecc :: Dimensionless a } deriving (Show, Eq, Ord)
-- Angles
-- ======
data True
data Mean
data Eccentric
newtype Anomaly t a = Anom { anom :: Angle a } deriving (Show)
newtype Longitude t a = Long { long :: Angle a } deriving (Show)
-- Rename the above the "argument of longitude".
--newtype ArgumentOfLatitude t a = AoLat { argLat :: Angle a } deriving (Show)
--newtype ArgumentOfLongitude t a = AoLon { argLon :: Angle a } deriving (Show)
--newtype ArgumentOfPerigee t a = AoP { app :: Angle a } deriving (Show)
-- Maneuvers
-- =========
data Maneuver a = ImpulsiveRTN { dvr :: Velocity a
, dvt :: Velocity a
, dvn :: Velocity a
} deriving (Show, Eq, Ord)
| bjornbm/astro | src/Astro/Orbit/Types.hs | bsd-3-clause | 1,158 | 0 | 9 | 298 | 237 | 149 | 88 | -1 | -1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE NoRebindableSyntax #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Time.EN.AU.Rules
( rules
) where
import Data.Maybe
import Prelude
import Duckling.Dimensions.Types
import Duckling.Numeral.Helpers (parseInt)
import Duckling.Regex.Types
import Duckling.Time.Computed (easterSunday)
import Duckling.Time.Helpers
import Duckling.Time.Types (TimeData (..))
import Duckling.Types
import qualified Duckling.Time.Types as TTime
import qualified Duckling.TimeGrain.Types as TG
ruleDDMM :: Rule
ruleDDMM = Rule
{ name = "dd/mm"
, pattern =
[ regex "(3[01]|[12]\\d|0?[1-9])\\s?[/-]\\s?(1[0-2]|0?[1-9])"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (dd:mm:_)):_) -> do
d <- parseInt dd
m <- parseInt mm
tt $ monthDay m d
_ -> Nothing
}
ruleDDMMYYYY :: Rule
ruleDDMMYYYY = Rule
{ name = "dd/mm/yyyy"
, pattern =
[ regex "(3[01]|[12]\\d|0?[1-9])[-/\\s](1[0-2]|0?[1-9])[-/\\s](\\d{2,4})"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (dd:mm:yy:_)):_) -> do
y <- parseInt yy
d <- parseInt dd
m <- parseInt mm
tt $ yearMonthDay y m d
_ -> Nothing
}
-- Clashes with HHMMSS, hence only 4-digit years
ruleDDMMYYYYDot :: Rule
ruleDDMMYYYYDot = Rule
{ name = "dd.mm.yyyy"
, pattern =
[ regex "(3[01]|[12]\\d|0?[1-9])\\.(1[0-2]|0?[1-9])\\.(\\d{4})"
]
, prod = \tokens -> case tokens of
(Token RegexMatch (GroupMatch (dd:mm:yy:_)):_) -> do
y <- parseInt yy
d <- parseInt dd
m <- parseInt mm
tt $ yearMonthDay y m d
_ -> Nothing
}
rulePeriodicHolidays :: [Rule]
rulePeriodicHolidays = mkRuleHolidays
-- Fixed dates, year over year
[ ( "ANZAC Day", "anzac day", monthDay 4 25 )
, ( "Australia Day", "(ana|anniversary|australia|foundation) day"
, monthDay 1 26 )
, ( "Harmony Day", "harmony day", monthDay 3 21 )
, ( "National Sorry Day", "national sorry day", monthDay 5 26 )
, ( "Queensland Day", "queensland day", monthDay 6 6 )
, ( "Remembrance Day", "remembrance day", monthDay 11 11 )
, ( "Take our Daughters and Sons to Work Day"
, "take our daughters and sons to work day", monthDay 1 5 )
-- Fixed day/week/month, year over year
, ( "Adelaide Cup", "adelaide cup", nthDOWOfMonth 2 1 3 )
, ( "Administrative Professionals' Day"
, "(administrative professional|secretarie|admin)('?s'?)? day"
, nthDOWOfMonth 1 5 5 )
, ( "Canberra Day", "canberra day", nthDOWOfMonth 2 1 3 )
, ( "Eight Hours Day", "eight hours day", nthDOWOfMonth 2 1 3 )
, ( "Father's Day", "father'?s?'? day", nthDOWOfMonth 1 7 9 )
, ( "Labour Day", "labour day", nthDOWOfMonth 1 1 10 )
, ( "Melbourne Cup Day", "melbourne cup day", nthDOWOfMonth 1 2 11 )
, ( "Mother's Day", "mother'?s?'? day", nthDOWOfMonth 2 7 5 )
, ( "National Close the Gap Day", "national close the gap day"
, nthDOWOfMonth 3 4 3 )
, ( "National Tree Day", "(arbor|national tree) day"
, predLastOf (dayOfWeek 7) (month 6) )
, ( "National Schools Tree Day", "national schools tree day"
, predLastOf (dayOfWeek 5) (month 6) )
, ( "New South Wales Bank Holiday", "new south wales bank holiday"
, nthDOWOfMonth 1 1 8 )
, ( "Picnic Day", "(northern territory )?picnic day", nthDOWOfMonth 1 1 8 )
, ( "Recreation Day", "recreation day", nthDOWOfMonth 1 1 10 )
, ( "Thanksgiving Day", "thanks?giving( day)?", nthDOWOfMonth 4 4 11 )
, ( "Western Australia Day", "western australia day", nthDOWOfMonth 1 1 6 )
-- Other
, ( "Reconciliation Day", "reconciliation\\s+day"
, predNthAfter 0 (dayOfWeek 1) (monthDay 5 26) )
]
rulePeriodicHolidays' :: [Rule]
rulePeriodicHolidays' = mkRuleHolidays'
-- Fixed day/week/month, year over year
-- Week from Sunday of July until following Sunday that has the second Friday
[ ( "NAIDOC Week"
, "(naidoc|national aboriginal and islander day observance committee) week"
, let fri = nthDOWOfMonth 2 5 7
start = cycleNthAfter False TG.Day (- 5) fri
end = cycleNthAfter False TG.Day 2 fri
in interval TTime.Open start end )
-- 3 days ending on the second Monday of February
, ( "Royal Hobart Regatta", "royal hobart regatta"
, let end = nthDOWOfMonth 2 1 2
in interval TTime.Open (cycleNthAfter False TG.Day (- 2) end) end )
-- Other
-- Wednesday of the Royal Queensland Show
-- Starts on the first Friday of August if it's not before August 5th
-- Otherwise on the second Friday of August
, ( "Royal Queensland Show Day"
, "(royal (national agricultural|queensland)|rna) show day|ekka day"
, let tentative = nthDOWOfMonth 1 5 8
alternative = nthDOWOfMonth 2 5 8
in do
forbidden <- interval TTime.Open (monthDay 8 1) (monthDay 8 4)
start <- intersectWithReplacement forbidden tentative alternative
return $ cycleNthAfter False TG.Day 5 start )
-- Starts on the first Friday of August if it's not before August 5th
-- Otherwise on the second Friday of August
, ( "Royal Queensland Show"
, "ekka|(royal (national agricultural|queensland)|rna) show"
, let tentative = nthDOWOfMonth 1 5 8
alternative = nthDOWOfMonth 2 5 8
in do
forbidden <- interval TTime.Open (monthDay 8 1) (monthDay 8 4)
start <- intersectWithReplacement forbidden tentative alternative
interval TTime.Open start $ cycleNthAfter False TG.Day 9 start )
]
ruleComputedHolidays :: [Rule]
ruleComputedHolidays = mkRuleHolidays
[ ( "Easter Tuesday", "easter\\s+tue(sday)?"
, cycleNthAfter False TG.Day 2 easterSunday )
]
rules :: [Rule]
rules =
[ ruleDDMM
, ruleDDMMYYYY
, ruleDDMMYYYYDot
]
++ ruleComputedHolidays
++ rulePeriodicHolidays
++ rulePeriodicHolidays'
| facebookincubator/duckling | Duckling/Time/EN/AU/Rules.hs | bsd-3-clause | 6,034 | 0 | 19 | 1,354 | 1,441 | 794 | 647 | 125 | 2 |
module Bench.BigTerms where
import qualified Bench.LocallyNameless as LN
import qualified Bench.DeBruijn as DB
import Untyped
{-
We define three forms of "big terms" parameterized by a natural number N:
Exponential terms:
\f:(a -> a) -> ((a -> a) -> a). \g:(a -> ... -> a -> a). EXP N []
where
EXP 0 [x1,...,xn] = g x1 ... xn
EXP (k+1) xs = f (\xk:a. EXP k xk:xs) (\xk:a. EXP k xk:xs)
Linear terms:
\f:(a -> a) -> a. \g:(a -> ... -> a -> a).
f (\x1:a. f (\x2:a. ... f (\xN:a. g xN ... x1)))
Linear terms version 2:
\f:(a -> a) -> (a -> a). \g:(a -> ... -> a -> a).
f (\x1:a . f (\x2:a. ... (f (\xN:a. g xN ... x1) xN) ... x2) x1)
Linear terms version 3:
\f:(a -> a) -> (a -> a). \g:(a -> ... -> a -> a).
f (\x1:a . f (\x2:a. ... (f (\xN:a. g xN ... xN) xN) ... x2) x1)
Linear terms version 4:
\f:(a -> a) -> (a -> a). \g:(a -> ... -> a -> a).
f (\x1:a . f (\x2:a. ... (f (\xN:a. g x1 ... x1) xN) ... x2) x1)
-}
name i = "x" ++ show i
a = Base 0
infixr -->
a --> b = Arrow a b
-- the exponential one
{-
bigLN n = LN.Lam $ LN.Lam $ go n
where go 0 = foldl (LN.:@) (LN.BVar n) (map LN.BVar [0..n-1])
go i = let r = LN.Lam (go (i-1))
in LN.BVar (n-i+1) LN.:@ r LN.:@ r
bigDB n = DB.Lam $ DB.Lam $ go n
where go 0 = foldl (DB.:@) (DB.Var n) (map DB.Var [0..n-1])
go i = let r = DB.Lam (go (i-1))
in DB.Var (n-i+1) DB.:@ r DB.:@ r
-}
bigUntyped n = ULam "f" ((a --> a) --> (a --> a) --> a) $ ULam "g" (foldr (-->) a (replicate n a)) $ go n []
where go 0 xs = foldl UApp (UVar "g") xs
go i xs = let x = name i
r = ULam x a (go (i-1) (UVar x:xs))
in UVar "f" `UApp` r `UApp` r
-- a linear one
{-
bigLinearLN n = LN.Lam $ LN.Lam $ go n
where go 0 = foldl (LN.:@) (LN.BVar n) (map LN.BVar [0..n-1])
go i = let r = LN.Lam (go (i-1))
in LN.BVar (n-i+1) LN.:@ r
bigLinearDB n = DB.Lam $ DB.Lam $ go n
where go 0 = foldl (DB.:@) (DB.Var n) (map DB.Var [0..n-1])
go i = let r = DB.Lam (go (i-1))
in DB.Var (n-i+1) DB.:@ r
-}
bigLinearUntyped n = ULam "f" ((a --> a) --> a) $ ULam "g" (foldr (-->) a (replicate n a)) $ go n []
where go 0 xs = foldl UApp (UVar "g") xs
go i xs = let x = name i
r = ULam x a (go (i-1) (UVar x:xs))
in UVar "f" `UApp` r
-- linear version 2
{-
bigLinearLN2 n = LN.Lam $ LN.Lam $ LN.Lam $ go n
where go 0 = foldl (LN.:@) (LN.BVar (n+1)) (map LN.BVar [0..n])
go i = let r = LN.Lam (go (i-1))
in LN.BVar (n-i+2) LN.:@ r LN.:@ LN.BVar 0
bigLinearDB2 n = DB.Lam $ DB.Lam $ DB.Lam $ go n
where go 0 = foldl (DB.:@) (DB.Var (n+1)) (map DB.Var [0..n])
go i = let r = DB.Lam (go (i-1))
in DB.Var (n-i+2) DB.:@ r DB.:@ DB.Var 0
-}
bigLinearUntyped2 n = ULam "f" ((a --> a) --> a --> a) $ ULam "g" (foldr (-->) a (replicate (n+1) a)) $ ULam "x" a $ go n [UVar "x"]
where go 0 xs = foldl UApp (UVar "g") xs
go i xs = let x = name i
r = ULam x a (go (i-1) (UVar x:xs))
in UVar "f" `UApp` r `UApp` head xs
-- linear version 3
{-
bigLinearLN3 n = LN.Lam $ LN.Lam $ LN.Lam $ go n
where go 0 = foldl (LN.:@) (LN.BVar (n+1)) (replicate (n+1) $ LN.BVar 0)
go i = let r = LN.Lam (go (i-1))
in LN.BVar (n-i+2) LN.:@ r LN.:@ LN.BVar 0
bigLinearDB3 n = DB.Lam $ DB.Lam $ DB.Lam $ go n
where go 0 = foldl (DB.:@) (DB.Var (n+1)) (replicate (n+1) $ DB.Var 0)
go i = let r = DB.Lam (go (i-1))
in DB.Var (n-i+2) DB.:@ r DB.:@ DB.Var 0
-}
bigLinearUntyped3 n = ULam "f" ((a --> a) --> a --> a) $ ULam "g" (foldr (-->) a (replicate (n+1) a)) $ ULam "x" a $ go n (UVar "x")
where go 0 lastX = foldl UApp (UVar "g") (replicate (n+1) lastX)
go i lastX = let x = name i
r = ULam x a (go (i-1) $ UVar x)
in UVar "f" `UApp` r `UApp` lastX
-- linear version 4
bigLinearUntyped4 n = ULam "f" ((a --> a) --> a --> a) $ ULam "g" (foldr (-->) a (replicate (n+1) a)) $ ULam "x" a $ go n (UVar "x")
where go 0 lastX = foldl UApp (UVar "g") (replicate (n+1) (UVar "x"))
go i lastX = let x = name i
r = ULam x a (go (i-1) $ UVar x)
in UVar "f" `UApp` r `UApp` lastX
| eddywestbrook/hobbits | archival/Bench/BigTerms.hs | bsd-3-clause | 4,391 | 0 | 16 | 1,426 | 1,024 | 525 | 499 | 33 | 2 |
-- |
-- Copyright : (c) 2011 Simon Meier
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Simon Meier <[email protected]>
-- Stability : experimental
-- Portability : tested on GHC only
--
-- Hexadecimal encoding of nibbles (4-bit) and octets (8-bit) as ASCII
-- characters.
--
-- The current implementation is based on a table based encoding inspired by
-- the code in the 'base64-bytestring' library by Bryan O'Sullivan. In our
-- benchmarks on a 32-bit machine it turned out to be the fastest
-- implementation option.
--
module Codec.Bounded.Encoding.Internal.Base16 (
EncodingTable
, upperTable
, lowerTable
, encode4_as_8
, encode8_as_16h
, encode8_as_8_8
) where
import Control.Applicative
import Foreign
import qualified Codec.Bounded.Encoding.Internal.Region as R
-- Creating the encoding tables
-------------------------------
-- | An encoding table for Base16 encoding.
newtype EncodingTable = EncodingTable (ForeignPtr Word8)
tableFromList :: [Word8] -> IO EncodingTable
tableFromList = fmap (EncodingTable . fst) . R.fromList
unsafeIndex :: EncodingTable -> Int -> IO Word8
unsafeIndex (EncodingTable table) = peekElemOff (unsafeForeignPtrToPtr table)
{-# NOINLINE upperAlphabet #-}
upperAlphabet :: EncodingTable
upperAlphabet = unsafePerformIO $
tableFromList $ map (fromIntegral . fromEnum) $ ['0'..'9'] ++ ['A'..'F']
{-# NOINLINE lowerAlphabet #-}
lowerAlphabet :: EncodingTable
lowerAlphabet = unsafePerformIO $
tableFromList $ map (fromIntegral . fromEnum) $ ['0'..'9'] ++ ['a'..'f']
base16EncodingTable :: EncodingTable -> IO EncodingTable
base16EncodingTable alphabet = do
xs <- sequence $ concat $ [ [ix j, ix k] | j <- [0..15], k <- [0..15] ]
tableFromList xs
where
ix = unsafeIndex alphabet
-- | The encoding table for hexadecimal values with upper-case characters;
-- e.g., DEADBEEF.
{-# NOINLINE upperTable #-}
upperTable :: EncodingTable
upperTable = unsafePerformIO $ base16EncodingTable upperAlphabet
-- | The encoding table for hexadecimal values with lower-case characters;
-- e.g., deadbeef.
{-# NOINLINE lowerTable #-}
lowerTable :: EncodingTable
lowerTable = unsafePerformIO $ base16EncodingTable lowerAlphabet
-- Encoding nibbles and octets
------------------------------
-- | Encode a nibble as an octet.
--
-- > encode4_as_8 lowerTable 10 = fromIntegral (char 'a')
--
{-# INLINE encode4_as_8 #-}
encode4_as_8 :: EncodingTable -> Word8 -> IO Word8
encode4_as_8 table x = unsafeIndex table (2 * fromIntegral x + 1)
-- | Encode an octet as 16bit word comprising both encoded nibbles ordered
-- according to the host endianness. Writing these 16bit to memory will write
-- the nibbles in the correct order (i.e. big-endian).
{-# INLINE encode8_as_16h #-}
encode8_as_16h :: EncodingTable -> Word8 -> IO Word16
encode8_as_16h (EncodingTable table) =
peekElemOff (castPtr $ unsafeForeignPtrToPtr table) . fromIntegral
-- | Encode an octet as a big-endian ordered tuple of octets; i.e.,
--
-- > encode8_as_8_8 lowerTable 10
-- > = (fromIntegral (chr '0'), fromIntegral (chr 'a'))
--
{-# INLINE encode8_as_8_8 #-}
encode8_as_8_8 :: EncodingTable -> Word8 -> IO (Word8, Word8)
encode8_as_8_8 table x =
(,) <$> unsafeIndex table i <*> unsafeIndex table (i + 1)
where
i = 2 * fromIntegral x
| meiersi/system-io-write | src/Codec/Bounded/Encoding/Internal/Base16.hs | bsd-3-clause | 3,310 | 0 | 12 | 552 | 565 | 321 | 244 | 46 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverlappingInstances #-}
-- only export instances
module Web.Leo.Pretty() where
import Web.Leo.Types
import Data.List
import qualified Text.PrettyPrint.ANSI.Leijen as P
import qualified Text.PrettyPrint.Boxes as B
instance P.Pretty Language where
pretty De = P.string "German"
pretty En = P.string "English"
pretty Fr = P.string "French"
pretty Es = P.string "Spanish"
pretty It = P.string "Italian"
pretty Ch = P.string "Chinese"
pretty Ru = P.string "Russian"
pretty Pt = P.string "Portugese"
pretty Pl = P.string "Polish"
pretty Unknown = P.string "Unknown"
instance P.Pretty [QueryResult] where
pretty r = render . hhcat $ map (B.vcat B.left . map B.text) $ accColumns r
where render = P.string . B.render
hhcat = B.hsep 2 B.left
accColumns :: [QueryResult] -> [[String]]
accColumns [] = [ ["No results"] ]
accColumns (x:[]) = toColumns x
accColumns (x:xs) = foldr proc (toColumns x) xs
where proc qr acc = [ head acc ++ [""] ++ (head $ toColumns qr)
, last acc ++ [""] ++ (last $ toColumns qr) ]
toColumns :: QueryResult -> [[String]]
toColumns n = transpire (resultHeader n) (fromResult n)
transpire :: [[String]] -> [(Entry,Entry)] -> [[String]]
transpire header xs = transpose $ (header ++ body)
where body = map (\(l,r) -> [processResult l, processResult r]) xs
processResult :: Entry -> String
processResult = intercalate ", " . getResult
fromResult :: QueryResult -> [(Entry, Entry)]
fromResult (Nouns xs) = xs
fromResult (Verbs xs) = xs
fromResult (Phrase xs) = xs
fromResult (Praep xs) = xs
fromResult (AdjAdvs xs) = xs
fromResult (Examples xs) = xs
fromResult None = []
resultHeader :: QueryResult -> [[String]]
resultHeader (Nouns _) = (prettier "Nouns" : [""]) : emptyL
resultHeader (Verbs _) = (prettier "Verbs" : [""]) : emptyL
resultHeader (Phrase _) = (prettier "Phrases" : [""]) : emptyL
resultHeader (Praep _) = (prettier "Praepositions" : [""]) : emptyL
resultHeader (AdjAdvs _) = (prettier "Adjectives/Adverbs" : [""]) : emptyL
resultHeader (Examples _) = (prettier "Examples" : [""]) : emptyL
resultHeader None = (prettier "Unknown" : [""]) : emptyL
prettier :: String -> String
prettier = show . P.underline . P.bold . P.text
emptyL :: [[String]]
emptyL = ["",""] : []
| krgn/leo | src/Web/Leo/Pretty.hs | bsd-3-clause | 2,518 | 0 | 12 | 628 | 970 | 513 | 457 | 55 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -w #-}
module Main where
import Mellow
import Vision.Image as I
import qualified Vision.Image.Filter.Internal as I
import Vision.Primitive
import Control.Concurrent
import Control.Monad
import Data.IORef
import Data.Time
import Data.Char (isSpace)
import Data.Maybe (listToMaybe)
import Data.Word
-- import Vision.Image.Storage.DevIL -- requires friday-devil too
import Control.Concurrent.MVar
import qualified Data.Vector.Storable as V
import System.IO
import System.Exit
import Control.DeepSeq
import Vision.Image.JuicyPixels (toJuicyRGBA, toFridayRGBA)
import Graphics.Gloss.Juicy (fromImageRGBA8)
import Graphics.Gloss.Interface.IO.Game
-- | The world consists of the psycState and a render method. The
-- PsycState includes the wacky frame, input frame, processing frames,
-- background elimination process, colors, information for deleting frame
-- older than some number of steps.
-- 'renderMethod' just selects one of these many states to render
data World = World { psycState :: PsycState Depth
, renderMethod :: RenderMethod
}
newtype RenderMethod = RenderCustom (PsycState Depth -> RGBA)
renderInput, renderPsyc, renderOutlines :: RenderMethod
renderInput = RenderCustom (depthToRGBA . inputFrame)
renderPsyc = RenderCustom psycFrame
renderOutlines = RenderCustom outlinesFrame
renderBlurred = RenderCustom (rgbaBlur 8 . psycFrame)
depthToRGBA :: Depth -> RGBA
depthToRGBA d = I.fromFunction (shape d) (\p -> let val = floor $ 255.0 * (fromIntegral (d!p) / 2047)
in RGBAPixel val val val 255)
blackRGBA :: RGBA
blackRGBA = I.fromFunction (Z :. 640 :. 480) (const (RGBAPixel 0 0 0 0))
main :: IO ()
main = mellow (initialWorld []) update (return . render) handleEvent
update :: Depth -> World -> IO World
update i w =
let new = psyc (psycState w) i
in new `deepseq` return w { psycState = new }
render :: World -> RGBA
render (World {..}) | RenderCustom f <- renderMethod =
case psycState of
PSInit {} -> blackRGBA
PS {} -> horizontalFlip (f psycState)
-- | Maximum number of frames held for funky rendering effect.
maxCaps :: Int
maxCaps = 6
initialWorld :: [Depth] -> World
initialWorld xs =
let psyc0 = initialPsycState xs
in World psyc0 renderBlurred
initialPsycState :: [Depth] -> PsycState Depth
initialPsycState [] = PSInit []
initialPsycState xs =
let cleans = fmap clean xs
clean :: Depth -> Depth
clean = I.map (\p -> if p < 100 then 4095 else p)
in maybe (error "Impossible") id $ initPsyc (depthFilter cleans) maxCaps xs
depthFilter :: [Depth] -> Depth -> Grey
depthFilter ds@(d0:_) = depthMask `seq` \t ->
let bods = getBodies t
e = erode 4 bods :: Grey
d = dilate 4 bods
dBig = dilate 6 d
in dBig .- d
where
(Z :. h :. w) = shape d0
(.-) :: Grey -> Grey -> Grey
(.-) o i = fromFunction (shape o) (\p -> if i!p /= 0 then 0 else o!p)
getBodies :: Depth -> Grey
getBodies testImage =
I.fromFunction (shape testImage) (\p -> if (testImage ! p) < (depthMask ! p) then 255 else 0)
-- Min: best so far, square artifacts persist
depthMask :: Depth
!depthMask = manifest $ I.map (\x -> if x < 100 then 4095 else (floor . (0.97 *) . fromIntegral $ x) ) $ minPF (Z :. 25 :. 25) $ foldl1 (zipImage min) ds
-- Harmonic: works so so
-- !depthMask = I.fromFunction (shape d0) (\p -> (0.9 *) $ harmonicMean $ V.map fromIntegral $ V.fromList $ fmap (!p) ds)
zipImage :: (Word16 -> Word16 -> Word16) -> Depth -> Depth -> Depth
zipImage f a b = fromFunction (shape a) (\p -> f (a ! p) (b ! p))
--------------------------------------------------------------------------------
-- Event Handling (key presses)
-- * 'esc' quit
-- * 'r' reset the background
-- * 's' save the frame
-- * 'p' display Psychedelic image
-- * 'i' display input image
-- * 'o' display outline image
handleEvent :: Event -> World -> IO World
handleEvent (EventKey (SpecialKey KeyEsc) _ _ _) st = exitSuccess
handleEvent (EventKey (Char 's') Down _ _) st =
do -- now <- getCurrentTime
-- let nowString = filter (not . isSpace) (show now)
-- if Devil is imported _ <- save JPG (nowString ++ ".jpg") (render st)
-- putStrLn $ "Saved " ++ show now
return st
handleEvent (EventKey (Char 'i') Down _ _) st = return st { renderMethod = renderInput }
handleEvent (EventKey (Char 'o') Down _ _) st = return st { renderMethod = renderOutlines }
handleEvent (EventKey (Char 'p') Down _ _) st = return st { renderMethod = renderPsyc }
handleEvent (EventKey (Char 'b') Down _ _) st = return st { renderMethod = renderBlurred }
handleEvent _ st = return st
--------------------------------------------------------------------------------
-- Psychedelic Image Creation
-- PsycState contains enough information to
-- 1) Ingest new images by removing the background and performing contour tracing.
-- 2) Delete old outlines from the rendered frame without re-drawing all of history.
-- 3) The frame as it exists today, for adding new overlays and deleting old.
data PsycState inImg
= PSInit [inImg]
| PS { getOutlines :: inImg -> Grey -- Removes background, yielding row contours of an image
, history :: [(Grey,RGBAPixel)] -- oldest-to-newest queue of contours for use in deletion
, inputFrame :: inImg -- Most recent input frame
, psycFrame :: RGBA -- Current psycedelic image before final processing (for deleting old outlines)
, outlinesFrame :: RGBA
, colors :: [RGBAPixel] -- Infinite list of colors for drawing
}
-- The state retains N frames, each new frame is given the next color in the list.
defaultColors :: [RGBAPixel]
defaultColors = cycle [RGBAPixel 0x3f 0x63 0xad 0xff
,RGBAPixel 0x40 0x64 0xae 0xff
,RGBAPixel 0xae 0x09 0xea 0xff
,RGBAPixel 0xd1 0x03 0xe2 0xff
,RGBAPixel 0xea 0x2f 0xa7 0xff
,RGBAPixel 0xf4 0x72 0x5a 0xff
,RGBAPixel 0xfa 0xae 0x1c 0xff
,RGBAPixel 0xee 0xfe 0x84 0xff
,RGBAPixel 0x75 0xf0 0x7b 0xff
,RGBAPixel 0x0a 0xd0 0x6e 0xff
,RGBAPixel 0x07 0xa9 0x80 0xff
,RGBAPixel 0x2b 0x79 0xa0 0xff
]
instance NFData a => NFData (PsycState a) where
rnf (PS a b c r o d) = r `deepseq` o `deepseq` a `seq` b `seq` ()
rnf _ = ()
-- Make a PsycState based on background samples and number of history frames to retain
initPsyc :: MaskedImage inImg => (inImg -> Grey) -> Int -> [inImg] -> Maybe (PsycState inImg)
initPsyc _ _ [] = Nothing
initPsyc getOutlines nr xs@(firstX:_) =
let history = replicate nr (frame0,background)
frame0 = fromFunction sz (const 0)
psycFrame = fromFunction sz (const (RGBAPixel 0 0 0 0))
inputFrame = firstX
colors = defaultColors
in Just PS { .. }
where
contours :: Grey -> Grey
contours i0 =
let i = erode 3 i0 :: Grey
d3 = dilate 4 i :: Grey
d7 = dilate 3 d3 :: Grey
v3 p = d3 ! p
v7 p = d7 ! p
in fromFunction sz (\idx -> if v7 idx /= 0 then (if v3 idx /= 0 then 0 else 255) else 0)
sz = shape (head xs)
-- Update the PsycState with a new image, yielding a new 'frame' for
-- rendering as well as updating the 'history'.
ingest :: PsycState Depth -> Depth -> PsycState Depth
ingest (PSInit xs) img | length xs < neededInitFrames = PSInit (img:xs)
| otherwise = ingest (initialPsycState xs) img
ingest orig@(PS {..}) img = orig { history = hist
, psycFrame = compositeFrame
, inputFrame = img
, outlinesFrame = I.map (\p -> let x = fromIntegral p in RGBAPixel x x x x) os
, colors = newColors
}
where
os = getOutlines img
hist = history ++ [(os,currColor)]
compositeFrame = drawPsychedelic os currColor psycFrame
(currColor:newColors) = colors
drawPsychedelic :: Grey -> RGBAPixel -> RGBA -> RGBA
drawPsychedelic os color@(RGBAPixel r g b _) frame =
fromFunction (shape frame)
(\pnt -> case os ! pnt of
0 -> frame ! pnt
1 -> background
o -> color)
rgbaBlur :: Int -> RGBA -> RGBA
rgbaBlur i = onComponents (blur i)
where
onComponents :: (Grey -> Grey) -> RGBA -> RGBA
onComponents f img = fromFunction (shape img) (\p -> RGBAPixel (fromIntegral $ r!p) (fromIntegral $ g!p) (fromIntegral $ b!p) (rgbaAlpha (img!p)))
where
(redChan,greenChan,blueChan) = (I.map (fromIntegral . rgbaRed) img, I.map (fromIntegral . rgbaGreen) img, I.map (fromIntegral . rgbaBlue) img)
r = f redChan
g = f greenChan
b = f blueChan
-- | The number of frames needed to compute a useful depth mask to perform
-- background removal (depends on hardware and stability of mounting).
neededInitFrames :: Int
neededInitFrames = 300
-- | Ingest an image and age-out old images in one step.
psyc :: PsycState Depth -> Depth -> PsycState Depth
psyc (PSInit xs) i | length xs < neededInitFrames = PSInit (i:xs)
| otherwise = psyc (initialPsycState xs) i
psyc st img = garbageCollect (ingest st img)
where
-- The early version just take the most-recent X frames, eventually this
-- should be a time-driven collection so frame-rate doesn't have such an
-- impact.
garbageCollect :: NFData inImg => PsycState inImg -> PsycState inImg
garbageCollect ps@(PS {..}) =
let newPF = deleteOne (listToMaybe history) psycFrame
in newPF `deepseq` ps { history = (drop 1 history)
, psycFrame = newPF }
deleteOne :: Maybe (Grey,RGBAPixel) -> RGBA -> RGBA
deleteOne Nothing f = f
deleteOne (Just (ps,color)) f =
let mk pnt = if f ! pnt == color && (ps ! pnt /= 0) -- XXX delete color regardless of alpha
then background
else f ! pnt
in fromFunction (shape f) mk
background :: RGBAPixel
background = RGBAPixel 0 0 0 0
minPF :: (FromFunction src, Image src, Integral (FromFunctionPixel src), FromFunctionPixel src ~ ImagePixel src, SeparatelyFiltrable src src (ImagePixel src), Integral (ImagePixel src)) => Size -> src -> src
minPF sz img = I.apply (minFilter sz) img
{-# INLINE minPF #-}
-- | Computes the minimum of a region
--
-- This is similar to 'blur' but with a rectangular kernel and a 'Fractional'
-- result.
minFilter :: (Integral src)
=> Size -> I.SeparableFilter src () src src
minFilter size =
I.Filter size I.KernelAnchorCenter (I.SeparableKernel vert horiz) (\_ _ -> ())
(I.FilterFold (const 4095)) post I.BorderReplicate
where
vert _ _ !val !acc = min acc val
horiz _ _ !acc' !acc = min acc acc'
post _ _ _ !acc = acc
{-# INLINE minFilter #-}
| TomMD/mellow | examples/Psychedelic.hs | bsd-3-clause | 11,545 | 0 | 18 | 3,275 | 3,195 | 1,701 | 1,494 | 190 | 4 |
{- | all datatypes for the algebraic trees and transformation
the backstage stuff -}
module Polynomial (
-- * Classes
Rev (reverse),
-- * Types
Sort ( C2N,
N2C),
Polynomial (Poly),
-- * Functions
eval,
scalePoly,
norm1,
norm2,
snorm
)
where
import Prelude hiding (reverse)
import qualified Prelude as P (reverse)
-- | denotes all reversable things
class Rev a where
-- | reverse should have the property /reverse.reverse = id/
reverse :: a -> a
instance Rev [a] where
reverse = P.reverse
-- | Sort denotes the sorting of a polynomial
data Sort = C2N -- const term to leading coefficient
| N2C -- leading coefficient to const term
deriving (Eq, Show)
instance Rev (Sort) where
reverse C2N = N2C
reverse N2C = C2N
-- | Poly sp p generates a polynomial
data Polynomial a = Poly { sort :: Sort -- mostly denoted /sp/: the sorting of p
, coeff :: [a] -- and p the list of coefficients
} deriving (Show)
instance Functor (Polynomial) where
fmap f (Poly sp p) = Poly sp (fmap f p)
instance Rev (Polynomial a) where
reverse (Poly C2N p) = Poly N2C (reverse p)
reverse (Poly N2C p) = Poly C2N (reverse p)
instance (Eq a) => Eq (Polynomial a) where
Poly sp p == Poly sq q = sp == sq && p == q
instance (Num a) => Num (Polynomial a) where
-- (+)
Poly C2N p + Poly C2N q = Poly C2N (zipWith_ (+) p q)
Poly N2C p + Poly C2N q = Poly C2N (zipWith_ (+) (reverse p) q)
Poly C2N p + Poly N2C q = Poly C2N (zipWith_ (+) p (reverse q))
Poly N2C p + Poly N2C q = Poly C2N (zipWith_ (+) (reverse p) (reverse q))
-- (-)
Poly C2N p - Poly C2N q = Poly C2N (zipWith_ (-) p q)
Poly N2C p - Poly C2N q = Poly C2N (zipWith_ (-) (reverse p) q)
Poly C2N p - Poly N2C q = Poly C2N (zipWith_ (-) p (reverse q))
Poly N2C p - Poly N2C q = Poly C2N (zipWith_ (-) (reverse p) (reverse q))
-- (*)
Poly C2N p * Poly C2N [] = Poly C2N []
Poly C2N p * Poly C2N (q1 : qs) = Poly C2N (foldr mul [] p)
where mul 0 bs = 0 : bs
mul a bs = (a * q1) : zipWith_ (+) (map (a *) qs) bs
p * Poly C2N q = reverse p * Poly C2N q
Poly C2N p * q = Poly C2N p * reverse q
p * q = reverse p * reverse q
-- negate
negate = fmap negate
-- abs - abs of the leading coefficient
abs p = p * signum p
-- fromInteger
fromInteger i = Poly C2N [fromInteger i]
-- signum
signum (Poly _ []) = Poly C2N []
signum (Poly N2C (p1 : ps)) = Poly C2N [ signum p1 ]
signum p = signum $ reverse p
scalePoly :: (Num a) => a -> Polynomial a -> Polynomial a
-- ^ is scalar multiplication - within the vector space of polynomials
scalePoly t = fmap (* t )
norm1 :: Num a => Polynomial a -> a
-- ^ the 1-Norm = $\sum_{i=1}^n |a_i|$
norm1 (Poly _ p) = sum $ map abs p
norm2 :: (Floating a, Num a) => Polynomial a -> a
-- ^ the traditional euclidian norm
norm2 (Poly _ p) = sqrt $ sum $ map (^ 2) p
snorm :: (Ord a, Num a) => Polynomial a -> a
-- ^ supreme/maximum norm - also handy sometime
snorm (Poly _ p) = maximum $ map (abs) p
eval :: Num a => a -> Polynomial a -> a
-- ^ evaluates an /a/-polynomial to an /a/-value
eval n (Poly N2C p) = _eval 0 n p
where _eval acc _ (x : []) = acc + x
_eval acc n (x : xs) = _eval (acc * n + x) n xs
eval n p = eval n (reverse p)
zipWith_ :: (a -> a -> a) -> [a] -> [a] -> [a]
-- ^ internal helper - like zipWith but appends the longer tail
zipWith_ fun = _zipWith_ fun []
_zipWith_ :: (a -> a -> a) -> [a] -> [a] -> [a] -> [a]
_zipWith_ _ tmp p [] = tmp ++ p
_zipWith_ _ tmp [] q = tmp ++ q
_zipWith_ fun tmp (p1 : ps) (q1 : qs) = _zipWith_ fun _tmp ps qs
where _tmp = tmp ++ [ fun p1 q1 ]
| epsilonhalbe/Algebra-Alchemy | Polynomial.hs | bsd-3-clause | 3,894 | 0 | 12 | 1,215 | 1,599 | 820 | 779 | 80 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module Data.Db where
import Database.Persist.TH
import Data.Type
derivePersistField "Type"
| showpoint/refs | src/Data/Db.hs | bsd-3-clause | 131 | 0 | 5 | 20 | 24 | 14 | 10 | 5 | 0 |
module GPipeFPSRender where
import Control.Monad
import Data.List
import Data.Trie (Trie)
import Data.Vec.LinAlg.Transform3D
import Data.Vec.Nat
import Foreign
import Graphics.GPipe
import System.Directory
import System.FilePath.Posix
import qualified Data.ByteString.Char8 as SB
import qualified Data.Trie as T
import qualified Data.Vec as Vec
import qualified Data.Vect as Vect
import qualified Data.Vector as V
import qualified Data.Vector.Mutable as MV
import qualified Data.Vector.Storable as SV
import BSPLoader
import GPipeFPSMaterial
import GPipeUtils
type VertexData = (Vec.Vec3 (Vertex Float),{-Vec.Vec3 (Vertex Float), -}Vec.Vec2 (Vertex Float),Vec.Vec2 (Vertex Float),Vec.Vec4 (Vertex Float))
type Mesh = PrimitiveStream Triangle VertexData
type FB = FrameBuffer RGBAFormat DepthFormat ()
-- time -> worldProjection -> inFrameBuffer -> resultFrameBuffer
type SurfaceRenderer = Float -> Vertex Float -> Vec.Mat44 (Vertex Float) -> FB -> FB
type Renderer = Texture2D RGBAFormat -> Mesh -> SurfaceRenderer
type RGBFun = Vertex Float -> VertexData -> Vec.Vec3 (Vertex Float)
type AlphaFun = Vertex Float -> VertexData -> Vertex Float
type TCFun = Vertex Float -> VertexData -> Vec.Vec2 (Vertex Float)
type TexFun = Texture2D RGBAFormat -> Float -> Texture2D RGBAFormat
type SampleFun = Texture2D RGBAFormat -> Vec.Vec2 (Fragment Float) -> Color RGBAFormat (Fragment Float)
type VertexDeformer = Vertex Float -> Vec.Vec3 (Vertex Float) -> Vec.Vec3 (Vertex Float)
{-
identity - RGBA 1 1 1 1
identity_lighting (identity_light_byte = ilb) - RGBA ilb ilb ilb ilb
lighting_diffuse - ??? check: RB_CalcDiffuseColor
exact_vertex - vertex color
const - constant color
vertex (identity_light = il, vertex_color*il) - RGBA (r*il) (g*il) (b*il) a
one_minus_vertex = (identity_light = il, vertex_color*il) - RGBA ((1-r)*il) ((1-g)*il) ((1-b)*il) a
fog - fog color
waveform (c = clamp 0 1 (wave_value * identity_light)) - RGBA c c c 1
entity - entity's shaderRGB
one_minus_entity - 1 - entity's shaderRGB
-}
rgbExactVertex _ (_,_,_,r:.g:.b:._:.()) = r:.g:.b:.()
rgbIdentity _ _ = toGPU (1:.1:.1:.())
rgbIdentityLighting _ _ = toGPU (identityLight:.identityLight:.identityLight:.())
rgbConst r g b _ _ = toGPU (r:.g:.b:.())
rgbVertex _ (_,_,_,r:.g:.b:._:.()) = f r:.f g:.f b:.()
where
f a = toGPU identityLight * a
rgbOneMinusVertex _ (_,_,_,r:.g:.b:._:.()) = f r:.f g:.f b:.()
where
f a = 1 - toGPU identityLight * a
convRGBGen a = case a of
-- RGB_Wave w
RGB_Const r g b -> rgbConst r g b
RGB_Identity -> rgbIdentity
RGB_IdentityLighting -> rgbIdentityLighting
-- RGB_Entity
-- RGB_OneMinusEntity
RGB_ExactVertex -> rgbExactVertex
RGB_Vertex -> rgbVertex
-- RGB_LightingDiffuse
RGB_OneMinusVertex -> rgbOneMinusVertex
_ -> rgbIdentity
{-
identity - alpha = 1
const - constant alpha
wave - clamped waveform
lightingspecular - ??? check: RB_CalcSpecularAlpha
entity - entity's shaderRGBA's alpha
oneminusentity - 1 - entity's shaderRGBA's alpha
vertex - vertex alpha
oneminusvertex - 1 - vertex alpha
portal - ???
-}
alphaIdentity _ _ = 1
alphaConst a _ _ = toGPU a
alphaVertex _ (_,_,_,_:._:._:.a:.()) = a
alphaOneMinusVertex _ (_,_,_,_:._:._:.a:.()) = 1 - a
convAlphaGen a = case a of
-- A_Wave w
A_Const a -> alphaConst a
-- A_Portal
A_Identity -> alphaIdentity
-- A_Entity
-- A_OneMinusEntity
A_Vertex -> alphaVertex
-- A_LightingSpecular
A_OneMinusVertex -> alphaOneMinusVertex
_ -> alphaIdentity
tgBase (_,uv,_,_) = uv
tgLightmap (_,_,uv,_) = uv
tgVector u v (p,_,_,_) = (dot p (toGPU u)):.(dot p (toGPU v)):.()
convTCGen a = case a of
TG_Base -> tgBase
TG_Lightmap -> tgLightmap
-- TG_Environment -- TODO, check: RB_CalcEnvironmentTexCoords
TG_Vector u v -> tgVector u v
_ -> tgBase
tmScroll su sv t (u:.v:.()) = fract' (u+t*toGPU su):.fract' (v+t*toGPU sv):.()
tmScale su sv _ (u:.v:.()) = (u*toGPU su):.(v*toGPU sv):.()
convTCMod a = case a of
--TM_EntityTranslate
--TM_Rotate Float
TM_Scroll u v -> tmScroll u v
TM_Scale u v -> tmScale u v
-- TM_Stretch Wave
-- TM_Transform Float Float Float Float Float Float
-- TM_Turb Float Float Float Float
_ -> \_ uv -> uv
shaderRenderer :: CommonAttrs -> (Int,Renderer)
shaderRenderer ca = (caSort ca, \lm obj time' time cWorldProjection fb -> foldl' (\f r -> r lm obj time' time cWorldProjection f) fb $ map (stage ca) $ caStages ca)
stage ca sa = stageRenderer (saDepthFunc sa) depthWrite blend vertexFun rgbGen alphaGen tcFun texFun sampleFun
where
-- tcGen = undefined
-- tcMod = undefined
alphaGen = convAlphaGen $ saAlphaGen sa
rgbGen = convRGBGen $ saRGBGen sa
mipmap = not $ caNoMipMaps ca
vertexFun t v = v
tcFun t vd = foldl' (\uv f -> f t uv) ((convTCGen $ saTCGen sa) vd) (map convTCMod $ saTCMod sa)
depthWrite = if NoBlending == blend then True else True --saDepthWrite sa
blend = case saBlend sa of
Nothing -> NoBlending
Just b -> Blend (FuncAdd,FuncAdd) (b,(SrcAlpha,OneMinusSrcAlpha)) (RGBA (0:.0:.0:.()) 1)
texFun = case saTexture sa of
ST_Map t -> \_ _ -> loadQ3Texture mipmap $ SB.unpack t
ST_ClampMap t -> \_ _ -> loadQ3Texture mipmap $ SB.unpack t
ST_AnimMap f l -> \_ t -> let
txl = map (loadQ3Texture mipmap . SB.unpack) l
i = floor $ (fromIntegral $ length l) * fract' (t*f)
in txl !! i
ST_Lightmap -> \lm _ -> lm
ST_WhiteImage -> \_ _ -> whiteImage
sampleFun = case saTexture sa of
ST_ClampMap _ -> \t uv -> sample (Sampler Linear Clamp) t uv
ST_WhiteImage -> \_ _ -> RGBA (1:.1:.1:.()) 1
_ -> \t uv -> sample (Sampler Linear Wrap) t uv
stageRenderer :: ComparisonFunction -> Bool -> Blending -> VertexDeformer -> RGBFun -> AlphaFun -> TCFun -> TexFun -> SampleFun -> Renderer
stageRenderer depthFun depthWrite blending vertexFun rgbFun alphaFun tcFun texFun sampleFun lmTex obj time' time cWorldProjection fb =
paintColorRastDepth depthFun depthWrite blending (RGBA (Vec.vec True) True) (rast obj) fb
where
rast obj = fmap frag $ rasterizeBack $ fmap vert obj
vert vd@(v3,_,_,_) = (cWorldProjection `multmv` v4,(rgbFun time vd, alphaFun time vd, tcFun time vd))
where
v4 = Vec.snoc (vertexFun time v3) 1
frag (rgb,a,uv) = RGBA (rgb * rgb') (a * a')
where
RGBA rgb' a' = sampleFun (texFun lmTex time') uv
renderSurfaces :: Float -> Vertex Float -> Vec.Mat44 (Vertex Float) -> V.Vector (Int,(Int,SurfaceRenderer)) -> FB
renderSurfaces time' time worldProjection faces = V.foldl' (foldl' (\fb (_,fun) -> fun time' time worldProjection fb)) cleanFB $ batch $ sorted
where
maxSort = 256
cleanFB = newFrameBufferColorDepth (RGBA (0:.0:.0:.()) 1) 1000
sorted = V.accumulate (\l e -> e:l) (V.replicate maxSort []) faces
batch v = V.map (sortBy (\(a,_) (b,_) -> a `compare` b)) v
{-
#define LIGHTMAP_2D -4 // shader is for 2D rendering
#define LIGHTMAP_BY_VERTEX -3 // pre-lit triangle models
#define LIGHTMAP_WHITEIMAGE -2
#define LIGHTMAP_NONE -1
-}
imageRenderer lmidx txName = shaderRenderer $ defaultCommonAttrs {caStages = sa:if lmidx < 0 then [] else saLM:[]}
where
sa = defaultStageAttrs
{ saTexture = ST_Map txName
-- , saBlend = Just (SrcColor,Zero)
-- , saBlend = Just (SrcColor,DstColor)
}
saLM = defaultStageAttrs
{ saTexture = ST_Lightmap
, saTCGen = TG_Lightmap
-- , saBlend = Just (SrcColor,One)
, saBlend = Just (SrcColor,DstColor)
}
compileBSP :: Trie CommonAttrs -> BSPLevel -> V.Vector (Int,(Int,SurfaceRenderer))
compileBSP shaderMap bsp = V.map convertSurface $ blSurfaces bsp
where
lightmaps = V.map (textureFromByteString True 3 128 128 . lmMap) $ blLightmaps bsp
shaders = V.map (\s -> T.lookup (shName s) shaderMap) $ blShaders bsp
convertSurface sf = (shidx,(srShaderNum sf,sh (lightmap $ srLightmapNum sf) geom))
where
shaderName = shName $ (blShaders bsp) V.! (srShaderNum sf)
(shidx,sh) = case shaders V.! srShaderNum sf of
Just s -> shaderRenderer s
Nothing -> imageRenderer (srLightmapNum sf) shaderName
geom :: Mesh
geom = case srSurfaceType sf of
Planar -> toIndexedGPUStream TriangleList v i
TriangleSoup -> toIndexedGPUStream TriangleList v i
Patch -> toGPUStream TriangleList $ concatMap (pointToCube (0:.1:.0:.1:.())) v
Flare -> toGPUStream TriangleList $ concatMap (pointToCube (1:.0:.0:.1:.())) v
v = V.toList $ V.take (srNumVertices sf) $ V.drop (srFirstVertex sf) vertices
i = V.toList $ V.take (srNumIndices sf) $ V.drop (srFirstIndex sf) indices
lightmap lidx | 0 <= lidx && lidx < V.length lightmaps = lightmaps V.! lidx
| otherwise = whiteImage
vertices = V.map convertVertex $ blDrawVertices bsp
indices = blDrawIndices bsp
convertVertex (DrawVertex p dt lt n c) = (v3 p,v2 dt,v2 lt,v4 c)
v2 (Vect.Vec2 i j) = i:.j:.()
v3 (Vect.Vec3 i j k) = i:.j:.k:.()
v4 (Vect.Vec4 i j k l) = i:.j:.k:.l:.()
isClusterVisible :: BSPLevel -> Int -> Int -> Bool
isClusterVisible bl a b
| a >= 0 = 0 /= (visSet .&. (shiftL 1 (b .&. 7)))
| otherwise = True
where
Visibility nvecs szvecs vecs = blVisibility bl
i = a * szvecs + (shiftR b 3)
visSet = vecs V.! i
findLeafIdx bl camPos i
| i >= 0 = if dist >= 0 then findLeafIdx bl camPos f else findLeafIdx bl camPos b
| otherwise = (-i) - 1
where
node = blNodes bl V.! i
(f,b) = ndChildren node
plane = blPlanes bl V.! ndPlaneNum node
dist = plNormal plane `Vect.dotprod` camPos - plDist plane
cullSurfaces bsp cam frust surfaces = case leafIdx < 0 || leafIdx >= V.length leaves of
True -> unsafePerformIO $ print ("findLeafIdx error") >> return surfaces
False -> unsafePerformIO $ print ("findLeafIdx ok",leafIdx,camCluster) >> return (V.ifilter (\i _ -> surfaceMask V.! i) surfaces)
where
leafIdx = findLeafIdx bsp cam 0
leaves = blLeaves bsp
camCluster = lfCluster $ leaves V.! leafIdx
visibleLeafs = V.filter (\a -> (isClusterVisible bsp camCluster $ lfCluster a) && inFrustum a) leaves
surfaceMask = unsafePerformIO $ do
let leafSurfaces = blLeafSurfaces bsp
mask <- MV.replicate (V.length surfaces) False
V.forM_ visibleLeafs $ \l ->
V.forM_ (V.slice (lfFirstLeafSurface l) (lfNumLeafSurfaces l) leafSurfaces) $ \i ->
MV.write mask i True
V.unsafeFreeze mask
inFrustum a = boxInFrustum (lfMaxs a) (lfMins a) frust
-- Utility code
tableTexture :: [Float] -> Texture1D LuminanceFormat
tableTexture t = unsafePerformIO $ SV.unsafeWith (SV.fromList t) $ \p -> newTexture FloatFormat Luminance16 (length t) [castPtr p]
funcTableSize = 1024 :: Float
sinTexture = tableTexture [sin (i*2*pi/(funcTableSize-1)) | i <- [0..funcTableSize-1]]
squareTexture = tableTexture [if i < funcTableSize / 2 then 1 else -1 | i <- [0..funcTableSize-1]]
sawToothTexture = tableTexture [i / funcTableSize | i <- [0..funcTableSize-1]]
inverseSawToothTexture = tableTexture $ reverse [i / funcTableSize | i <- [0..funcTableSize-1]]
triangleTexture = tableTexture $ l1 ++ map ((-1)*) l1
where
n = funcTableSize / 4
l0 = [i / n | i <- [0..n-1]]
l1 = l0 ++ reverse l0
whiteImage = textureFromByteString False 4 8 8 $ SB.replicate (8*8*4) '\255'
defaultImage = textureFromByteString True 4 16 16 $ SB.pack $ concatMap (replicate 4) [if e x || e y then '\255' else '\32' | y <- [0..15], x <- [0..15]]
where
e 0 = True
e 15 = True
e _ = False
loadQ3Texture :: Bool -> String -> Texture2D RGBAFormat
loadQ3Texture mipmap name' = unsafePerformIO $ do
let name = "fps/" ++ name'
n1 = replaceExtension name "tga"
n2 = replaceExtension name "jpg"
b0 <- doesFileExist name
b1 <- doesFileExist n1
b2 <- doesFileExist n2
return $ maybe defaultImage id $ textureFromFile mipmap $ if b0 then name else if b1 then n1 else n2
| csabahruska/GFXDemo | GPipeFPSRender.hs | bsd-3-clause | 12,288 | 0 | 20 | 2,827 | 4,450 | 2,294 | 2,156 | 196 | 9 |
{-# LANGUAGE OverloadedStrings #-}
module AWS.Types where
| IanConnolly/aws-sdk-fork | AWS/Types.hs | bsd-3-clause | 59 | 0 | 3 | 8 | 7 | 5 | 2 | 2 | 0 |
{-# LANGUAGE LambdaCase, RecordWildCards, TupleSections,
NoMonomorphismRestriction #-}
module Mote.Suggest where
import Control.Applicative
import Control.Monad
import Control.Monad.Error
import Data.Function (on)
import qualified Data.List as List
import qualified Data.Map as M
import Data.Maybe
import Mote.GhcUtil
import Mote.Holes
import Mote.Refine
import Mote.Types
import Mote.Util
import GHC
import RdrName
import TcRnMonad (TcRn)
import Type
import Module (packageIdString)
import Name (isInternalName, nameModule_maybe)
import TcEvidence (HsWrapper (..))
eitherToMaybe = either (const Nothing) Just
-- TODO: Penalize for universally quantified variables that appear in the
-- refineTarget. Or rather,
--
-- Think I should order lexicographically.
-- First by specificity, then locality (is this hole local, module local, project
-- local, or from an external package)
-- TODO: If the goal is of function type A -> B, suggest things that take
-- an argument of type A and then we can refine by composition
-- TODO: Suggest things where the goal type appears as a type parameter in
-- some type constructor application
data Locality = Hole | Module | Project | External deriving (Eq, Ord)
type Score = (Int, Int, Locality)
vagueness :: RefineMatch -> Int
vagueness (RefineMatch {..}) = go refineWrapper where
go :: HsWrapper -> Int
go = \case
WpCompose x0 x1 -> go x0 + go x1
WpCast {} -> 0
WpEvLam {} -> 1
WpEvApp {} -> 1
WpTyLam {} -> 1
WpTyApp {} -> 1
WpLet {} -> 1
WpHole -> 0
-- should really count the number of "hard to get" arg tys,
-- but this spills over into proof search
burdensomeness :: RefineMatch -> Int
burdensomeness (RefineMatch {..}) = length refineArgTys
locality :: Name -> Locality
locality n = case nameModule_maybe n of
Just mod -> case packageIdString (modulePackageId mod) of
"main" -> Project
_ -> External
Nothing -> if isInternalName n then Module else External
-- TODO: push foralls in
-- TODO: concatMap for tuple types
-- We would like to recurse over args in which this tycon is
-- covariantly functorial. The Haskell convention is for the TyCon to be
-- so at least in the last argument if it's a functor at all.
innerArgs :: Type -> [Type]
innerArgs t = case splitAppTys t of
(_, []) -> [t]
(_, args) -> innerArgs (last args) -- Proof search strategy is about finding ways to make this descent real
matchInnerArgs :: Type -> Type -> TcRn [RefineMatch]
matchInnerArgs goalTy ty = mapMaybeM (refineMatch goalTy) (innerArgs ty)
score :: Bool -> Type -> Type -> Name -> TcRn (Maybe (Score, (Name, Type)))
score hole goalTy ty n = do
let loc = if hole then Hole else locality n
score' rm = (vagueness rm, burdensomeness rm, loc)
let attempts = ty : innerArgs ty
goals = goalTy : innerArgs goalTy
-- TODO: tlm style
fmap (fmap (,(n,ty)) . maximumMay)
. fmap catMaybes
$ sequence (liftA2 (\t g -> fmap score' <$> refineMatch g t) attempts goals)
where
maximumMay = \case { [] -> Nothing; xs -> Just (maximum xs) }
suggestions :: TypecheckedModule -> HoleInfo -> M [(Name, Type)]
suggestions tcmod hi = do
gblScope <- lift getNamesInScope
-- not sure if it's stricly necessary to do this in Tc environment of the
-- hole
gblSuggestions <- mapMaybeM gblScore gblScope
-- TODO: tlm style
lclSuggestions <- inHoleEnv tcmod hi $
discardConstraints . fmap catMaybes . forM (holeEnv hi) $ \(id, ty) ->
score True goalTy ty (getName id)
return
. map snd
. List.sortBy (compare `on` fst)
$ (lclSuggestions ++ gblSuggestions)
where
goalTy = holeType hi
maybeErr ex = fmap Just ex `catchError` \_ -> return Nothing
gblScore n = fmap join . maybeErr . inHoleEnv tcmod hi . discardConstraints $ do
ty <- tcRnExprTc . noLoc . HsVar $ Exact n
score False goalTy ty n
getAndMemoizeSuggestions :: Ref MoteState -> AugmentedHoleInfo -> M [(Name, Type)]
getAndMemoizeSuggestions stRef ahi =
case Mote.Types.suggestions ahi of
Just suggs -> return suggs
Nothing -> do
fdata@(FileData {..}) <- getFileDataErr stRef
let hi = holeInfo ahi
suggs <- Mote.Suggest.suggestions typecheckedModule hi
saveInCurrentHole hi suggs
gModifyRef stRef (\s ->
s {
fileData = Just (
fdata {
holesInfo =
M.update (\ahi' -> Just $ ahi' { Mote.Types.suggestions = Just suggs })
(holeSpan hi) holesInfo})})
return suggs
where
saveInCurrentHole hi suggs =
fmap currentHole (gReadRef stRef) >>= \case
Nothing -> return ()
Just ahi' ->
if holeSpan (holeInfo ahi') == holeSpan hi
then gModifyRef stRef (\s ->
s { currentHole = Just (ahi' { Mote.Types.suggestions = Just suggs }) })
else return ()
| imeckler/mote | Mote/Suggest.hs | bsd-3-clause | 5,158 | 0 | 25 | 1,411 | 1,382 | 728 | 654 | 102 | 8 |
{-# LANGUAGE ForeignFunctionInterface #-}
-- |
-- Module : System.Crypto.Random
-- Copyright : (c) Austin Seipp 2011-2013
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- Cross-platform access to cryptographically sure randomness. Use
-- 'randombytes' to generate nonces or secret keys.
--
-- On Unix machines, this uses the @\/dev\/urandom@ device. On
-- Windows, it uses the @CryptGenRandom@ API.
--
module System.Crypto.Random
( randombytes -- :: Int -> IO ByteString
) where
import Data.Word
import Foreign.C.Types
import Foreign.Ptr
import Data.ByteString (ByteString)
import Data.ByteString.Internal (create)
-- | Generate a random @'ByteString'@ from your system entropy source.
randombytes :: Int -> IO ByteString
randombytes n
| n < 0 = error "Crypto.NaCl.Random.randomBytes: invalid length"
| otherwise = create n $ \out ->
c_randombytes out (fromIntegral n) >> return ()
foreign import ccall unsafe "randombytes"
c_randombytes :: Ptr Word8 -> CULLong -> IO Int
| thoughtpolice/hs-nacl | src/System/Crypto/Random.hs | bsd-3-clause | 1,143 | 0 | 11 | 257 | 169 | 97 | 72 | 15 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Application where
import Data.Lens.Template
import Snap.Snaplet
import Snap.Snaplet.Heist
import Text.SyntaxHighlight.TextMate (TMSyntaxFile)
data App = App {
_heist :: Snaplet (Heist App),
_cspmSynytaxFile :: TMSyntaxFile
}
type AppHandler = Handler App App
makeLens ''App
instance HasHeist App where
heistLens = subSnaplet heist
| tomgr/webcspm | src/Application.hs | bsd-3-clause | 406 | 0 | 11 | 81 | 98 | 56 | 42 | 13 | 0 |
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Test.Tasty
import Test.Tasty.TH (defaultMainGenerator)
--import Test.Tasty.SmallCheck as SC
import Test.Tasty.QuickCheck as QC
import Test.Tasty.HUnit
import Test.QuickCheck.Arbitrary (Arbitrary)
import Test.QuickCheck.Property (forAll, Property, (==>))
import Test.QuickCheck (arbitrary, oneof, choose, Gen)
import System.FilePath(hasTrailingPathSeparator)
import Data.List(intersperse, isInfixOf)
import Data.Char
import Denominate.Internal
main :: IO ()
main = $(defaultMainGenerator)
instance Arbitrary FileType where
arbitrary = oneof $ map return [Directory, File]
randPathGen :: Gen [Char]
randPathGen =
do numSegments <- choose (0 :: Int, 8 :: Int)
words <- mapM (\_ -> choose (0 :: Int, 10 :: Int) >>= randWordGen) [0..numSegments]
return $ concat $ intersperse "/" words
randWordGen :: Int -> Gen [Char]
randWordGen len = mapM (\_ -> randCharGen) [0..len]
randCharGen :: Gen Char
randCharGen = oneof $ map return chars
where chars = letters ++ map toUpper letters ++ punc
letters = "abcdefghijklmnopqrstuvwxyz"
punc = "..............................-----_____,?`~!@#$%^&*()=+[]{}|'\"<>?"
lastSlashOf :: String -> Int
lastSlashOf = lastIndexOf '/'
lastPeriodOf :: String -> Int
lastPeriodOf = lastIndexOf '.'
lastPathPart :: String -> [Char]
lastPathPart path = if n > -1 then drop (n+1) path else path
where n = lastSlashOf path
lastIndexOf :: Char -> String -> Int
lastIndexOf chr str = lastIndexOf' str 0 (-1)
where
lastIndexOf' [] _ highest = highest
lastIndexOf' (c:cs) i highest =
case c == chr of
True -> lastIndexOf' cs (i+1) i
False -> lastIndexOf' cs (i+1) highest
convert :: TypedFilePath -> FilePath
convert = normalizeFilename defaultFilenameConverter
hasLetter :: [Char] -> Bool
hasLetter = any isLetter
hasLegalChar :: [Char] -> Bool
hasLegalChar = any (\c -> isLetter c || c == '-')
lastDirPartHasLetter :: String -> Bool
lastDirPartHasLetter path = hasLetter $ drop n path
where n = lastSlashOf path
lastPathPartHasExt :: String -> Bool
lastPathPartHasExt path = not $ null (ext filename)
where
n = lastSlashOf path
filename = drop n path
hasInitialDot :: [Char] -> Bool
hasInitialDot p = char1 p == "."
char1 :: String -> String
char1 = take 1
isInitialGarbageChar :: Char -> Bool
isInitialGarbageChar c = not (isLetter c || c == '.')
hasInitialGarbageChar :: [Char] -> Bool
hasInitialGarbageChar s | null s = False
| otherwise = isInitialGarbageChar $ head s
hasTrailingGarbageChar :: [Char] -> Bool
hasTrailingGarbageChar s | null s = False
| otherwise = not $ isLetter $ last s
stripExt :: String -> [Char]
stripExt fname =
case n > 0 of
True -> take n fname
False -> fname
where n = lastPeriodOf fname
ext :: String -> [Char]
ext fname =
case lastDotIndex < 1 of
True -> ""
False -> drop (lastDotIndex + 1) fname
where
lastDotIndex = lastIndexOf '.' fname
-- PROPERTIES:
-- Only the filename or the very last directory name (everything before
-- the last slash) should ever change.
prop_changesOnlyLastPart :: FileType -> Property
prop_changesOnlyLastPart ftype =
forAll randPathGen f
where
f p = take 2 p /= "./"
&& not (isInfixOf "//" p)
&& not (hasTrailingPathSeparator p) ==> test p
test path = take n path == take n result
where n = lastSlashOf path + 1
result = convert (ftype, path)
-- every char in last part of a directory will be either a lowercase letter
-- or a hyphen if there is at least one letter in the last part of the
-- original path, with the possible exception of an initial dot
prop_dirLastPartLegalChars :: Property
prop_dirLastPartLegalChars =
forAll randPathGen f
where
f p = lastDirPartHasLetter p ==> test p
test path =
let dirResult = lastPathPart $ result path
initChar = head dirResult
in if not (null dirResult)
then initChar == '.' || initChar == '-' ||(isLower initChar) &&
all (\c -> c == '-' || isLower c) (tail dirResult)
else True
result p = convert (Directory, p)
-- if the original filename without extension has at least one letter,
-- then the new filename without extension should consist of nothing but
-- lowercase letters and hyphens.
prop_fileLastPartLegalChars :: Property
prop_fileLastPartLegalChars =
forAll randPathGen (\p -> hasLetter (extractFilename p) ==> test p)
where
test path = all (\c -> isLetter c || c == '.' || c == '-') (newFileNoExt path)
newFileNoExt p = extractFilename $ convert (File, p)
extractFilename = stripExt . lastPathPart
-- the extension of a file should only be lowercased, with no other
-- changes made.
prop_fileExtOnlyLowercased :: Property
prop_fileExtOnlyLowercased =
forAll randPathGen (\p -> lastPathPartHasExt p ==> test p)
where
test path = f result == map toLower (f path)
where result = convert (File, path)
f = ext . lastPathPart
-- a file that begins with a '.' should not have the '.' removed
prop_fileInitialDotUnchanged :: Property
prop_fileInitialDotUnchanged =
forAll randPathGen test
where
test p = hasInitialDot (lastPathPart p) ==>
hasInitialDot (lastPathPart $ convert (File, p))
-- the length of the converted path is never longer than the original path
prop_pathNeverLongerAfterConvert :: FileType -> Property
prop_pathNeverLongerAfterConvert ft =
forAll randPathGen (\p -> length (convert (ft, p)) <= length p)
-- there should always be the same number of letters in the path
-- before and after, because letters are never removed.
prop_numLettersBeforeAndAfterAreEqual :: FileType -> Property
prop_numLettersBeforeAndAfterAreEqual ft =
forAll randPathGen (\p -> f (convert (ft, p)) == f p)
where
f p = length $ filter isLetter p
-- the letters in the original should be equal to, and in the
-- same order as, the letters in the converted path, except
-- for case differences.
prop_lettersBeforeAndAfterAreEqual :: FileType -> Property
prop_lettersBeforeAndAfterAreEqual ft =
forAll randPathGen (\p -> f (convert (ft, p)) == f p)
where
f p = map toLower $ filter isLetter p
-- if the file has at least one letter in the filename without extension,
-- then the first character of the converted filename will be a non-garbage
-- character (letter or '.').
prop_initialFileGarbageIsRemoved :: Property
prop_initialFileGarbageIsRemoved =
forAll randPathGen test
where
test p = hasLetter origLastPathPartNoExt ==>
not (hasInitialGarbageChar newLastPathPartNoExt)
where
origLastPathPartNoExt = stripExt $ lastPathPart p
newLastPathPartNoExt = stripExt $ lastPathPart $ convert (File, p)
-- likewise for directory, but we don't consider extensions at all
prop_initialDirGarbageIsRemoved :: Property
prop_initialDirGarbageIsRemoved =
forAll randPathGen test
where
test p = hasLetter origLastPathPart ==>
not (hasInitialGarbageChar newLastPathPart)
where
origLastPathPart = lastPathPart p
newLastPathPart = lastPathPart $ convert (Directory, p)
-- if the file has at least one letter in the filename without extension,
-- then the last character of the converted filename will be a non-garbage
-- character (letter or '.').
prop_trailingFileGarbageIsRemoved :: Property
prop_trailingFileGarbageIsRemoved =
forAll randPathGen test
where
test p = hasLetter origLastPathPartNoExt ==>
not (hasTrailingGarbageChar newLastPathPartNoExt)
where
origLastPathPartNoExt = stripExt $ lastPathPart p
newLastPathPartNoExt = stripExt $ lastPathPart $ convert (File, p)
-- likewise for directory, but we don't consider extensions at all
prop_trailingDirGarbageIsRemoved :: Property
prop_trailingDirGarbageIsRemoved =
forAll randPathGen test
where
test p = hasLetter origLastPathPart ==>
not (hasTrailingGarbageChar newLastPathPart)
where
origLastPathPart = lastPathPart p
newLastPathPart = lastPathPart $ convert (Directory, p)
test_dirAndFile_currentDir = [
testCase "test_dirAndFile_currentDir with './'"
(assertEqual "for dirAndFile \"./test\","
("", "test")
$ dirAndFile "./test"),
testCase "test_dirAndFile_currentDir without './'"
(assertEqual "for dirAndFile \"test\","
("", "test")
$ dirAndFile "test")
]
test_dirAndFile_absolutePath = [
testCase "test_dirAndFile_absolutePath"
(assertEqual "for dirAndFile \"/foo/bar/baz.txt\""
("/foo/bar", "baz.txt")
$ dirAndFile "/foo/bar/baz.txt")
]
| eukaryote/denominate | tests/test.hs | bsd-3-clause | 8,882 | 0 | 15 | 1,999 | 2,174 | 1,138 | 1,036 | 171 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
-- | Parsing command line targets
module Stack.Build.Target
( -- * Types
ComponentName
, UnresolvedComponent (..)
, RawTarget (..)
, LocalPackageView (..)
, SimpleTarget (..)
, NeedTargets (..)
-- * Parsers
, parseRawTarget
, parseTargets
) where
import Control.Applicative
import Control.Arrow (second)
import Control.Monad.Catch (MonadCatch, throwM)
import Control.Monad.IO.Class
import Data.Either (partitionEithers)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (mapMaybe)
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import Path
import Path.Extra (rejectMissingDir)
import Path.IO
import Prelude -- Fix redundant import warnings
import Stack.Types
-- | The name of a component, which applies to executables, test suites, and benchmarks
type ComponentName = Text
newtype RawInput = RawInput { unRawInput :: Text }
-- | Either a fully resolved component, or a component name that could be
-- either an executable, test, or benchmark
data UnresolvedComponent
= ResolvedComponent !NamedComponent
| UnresolvedComponent !ComponentName
deriving (Show, Eq, Ord)
-- | Raw command line input, without checking against any databases or list of
-- locals. Does not deal with directories
data RawTarget (a :: RawTargetType) where
RTPackageComponent :: !PackageName -> !UnresolvedComponent -> RawTarget a
RTComponent :: !ComponentName -> RawTarget a
RTPackage :: !PackageName -> RawTarget a
RTPackageIdentifier :: !PackageIdentifier -> RawTarget 'HasIdents
deriving instance Show (RawTarget a)
deriving instance Eq (RawTarget a)
deriving instance Ord (RawTarget a)
data RawTargetType = HasIdents | NoIdents
-- | If this function returns @Nothing@, the input should be treated as a
-- directory.
parseRawTarget :: Text -> Maybe (RawTarget 'HasIdents)
parseRawTarget t =
(RTPackageIdentifier <$> parsePackageIdentifierFromString s)
<|> (RTPackage <$> parsePackageNameFromString s)
<|> (RTComponent <$> T.stripPrefix ":" t)
<|> parsePackageComponent
where
s = T.unpack t
parsePackageComponent =
case T.splitOn ":" t of
[pname, "lib"]
| Just pname' <- parsePackageNameFromString (T.unpack pname) ->
Just $ RTPackageComponent pname' $ ResolvedComponent CLib
[pname, cname]
| Just pname' <- parsePackageNameFromString (T.unpack pname) ->
Just $ RTPackageComponent pname' $ UnresolvedComponent cname
[pname, typ, cname]
| Just pname' <- parsePackageNameFromString (T.unpack pname)
, Just wrapper <- parseCompType typ ->
Just $ RTPackageComponent pname' $ ResolvedComponent $ wrapper cname
_ -> Nothing
parseCompType t' =
case t' of
"exe" -> Just CExe
"test" -> Just CTest
"bench" -> Just CBench
_ -> Nothing
-- | A view of a local package needed for resolving components
data LocalPackageView = LocalPackageView
{ lpvVersion :: !Version
, lpvRoot :: !(Path Abs Dir)
, lpvCabalFP :: !(Path Abs File)
, lpvComponents :: !(Set NamedComponent)
, lpvExtraDep :: !TreatLikeExtraDep
}
-- | Same as @parseRawTarget@, but also takes directories into account.
parseRawTargetDirs :: (MonadIO m, MonadCatch m)
=> Path Abs Dir -- ^ current directory
-> Map PackageName LocalPackageView
-> Text
-> m (Either Text [(RawInput, RawTarget 'HasIdents)])
parseRawTargetDirs root locals t =
case parseRawTarget t of
Just rt -> return $ Right [(ri, rt)]
Nothing -> do
mdir <- forgivingAbsence (resolveDir root (T.unpack t))
>>= rejectMissingDir
case mdir of
Nothing -> return $ Left $ "Directory not found: " `T.append` t
Just dir ->
case mapMaybe (childOf dir) $ Map.toList locals of
[] -> return $ Left $
"No local directories found as children of " `T.append`
t
names -> return $ Right $ map ((ri, ) . RTPackage) names
where
ri = RawInput t
childOf dir (name, lpv) =
if (dir == lpvRoot lpv || isParentOf dir (lpvRoot lpv)) && not (lpvExtraDep lpv)
then Just name
else Nothing
data SimpleTarget
= STUnknown
| STNonLocal
| STLocalComps !(Set NamedComponent)
| STLocalAll
deriving (Show, Eq, Ord)
resolveIdents :: Map PackageName Version -- ^ snapshot
-> Map PackageName Version -- ^ extra deps
-> Map PackageName LocalPackageView
-> (RawInput, RawTarget 'HasIdents)
-> Either Text ((RawInput, RawTarget 'NoIdents), Map PackageName Version)
resolveIdents _ _ _ (ri, RTPackageComponent x y) = Right ((ri, RTPackageComponent x y), Map.empty)
resolveIdents _ _ _ (ri, RTComponent x) = Right ((ri, RTComponent x), Map.empty)
resolveIdents _ _ _ (ri, RTPackage x) = Right ((ri, RTPackage x), Map.empty)
resolveIdents snap extras locals (ri, RTPackageIdentifier (PackageIdentifier name version)) =
case mfound of
Just (foundPlace, foundVersion) | foundVersion /= version -> Left $ T.pack $ concat
[ "Specified target version "
, versionString version
, " for package "
, packageNameString name
, " does not match "
, foundPlace
, " version "
, versionString foundVersion
]
_ -> Right
( (ri, RTPackage name)
, case mfound of
-- Add to extra deps since we didn't have it already
Nothing -> Map.singleton name version
-- Already had it, don't add to extra deps
Just _ -> Map.empty
)
where
mfound = mlocal <|> mextra <|> msnap
mlocal = (("local", ) . lpvVersion) <$> Map.lookup name locals
mextra = ("extra-deps", ) <$> Map.lookup name extras
msnap = ("snapshot", ) <$> Map.lookup name snap
resolveRawTarget :: Map PackageName Version -- ^ snapshot
-> Map PackageName Version -- ^ extra deps
-> Map PackageName LocalPackageView
-> (RawInput, RawTarget 'NoIdents)
-> Either Text (PackageName, (RawInput, SimpleTarget))
resolveRawTarget snap extras locals (ri, rt) =
go rt
where
go (RTPackageComponent name ucomp) =
case Map.lookup name locals of
Nothing -> Left $ T.pack $ "Unknown local package: " ++ packageNameString name
Just lpv ->
case ucomp of
ResolvedComponent comp
| comp `Set.member` lpvComponents lpv ->
Right (name, (ri, STLocalComps $ Set.singleton comp))
| otherwise -> Left $ T.pack $ concat
[ "Component "
, show comp
, " does not exist in package "
, packageNameString name
]
UnresolvedComponent comp ->
case filter (isCompNamed comp) $ Set.toList $ lpvComponents lpv of
[] -> Left $ T.concat
[ "Component "
, comp
, " does not exist in package "
, T.pack $ packageNameString name
]
[x] -> Right (name, (ri, STLocalComps $ Set.singleton x))
matches -> Left $ T.concat
[ "Ambiguous component name "
, comp
, " for package "
, T.pack $ packageNameString name
, ": "
, T.pack $ show matches
]
go (RTComponent cname) =
let allPairs = concatMap
(\(name, lpv) -> map (name,) $ Set.toList $ lpvComponents lpv)
(Map.toList locals)
in case filter (isCompNamed cname . snd) allPairs of
[] -> Left $ "Could not find a component named " `T.append` cname
[(name, comp)] ->
Right (name, (ri, STLocalComps $ Set.singleton comp))
matches -> Left $ T.concat
[ "Ambiugous component name "
, cname
, ", matches: "
, T.pack $ show matches
]
go (RTPackage name) =
case Map.lookup name locals of
Just _lpv -> Right (name, (ri, STLocalAll))
Nothing ->
case Map.lookup name extras of
Just _ -> Right (name, (ri, STNonLocal))
Nothing ->
case Map.lookup name snap of
Just _ -> Right (name, (ri, STNonLocal))
Nothing -> Right (name, (ri, STUnknown))
isCompNamed :: Text -> NamedComponent -> Bool
isCompNamed _ CLib = False
isCompNamed t1 (CExe t2) = t1 == t2
isCompNamed t1 (CTest t2) = t1 == t2
isCompNamed t1 (CBench t2) = t1 == t2
simplifyTargets :: [(PackageName, (RawInput, SimpleTarget))]
-> ([Text], Map PackageName SimpleTarget)
simplifyTargets =
mconcat . map go . Map.toList . Map.fromListWith (++) . fmap (second return)
where
go :: (PackageName, [(RawInput, SimpleTarget)])
-> ([Text], Map PackageName SimpleTarget)
go (_, []) = error "Stack.Build.Target.simplifyTargets: the impossible happened"
go (name, [(_, st)]) = ([], Map.singleton name st)
go (name, pairs) =
case partitionEithers $ map (getLocalComp . snd) pairs of
([], comps) -> ([], Map.singleton name $ STLocalComps $ Set.unions comps)
_ ->
let err = T.pack $ concat
[ "Overlapping targets provided for package "
, packageNameString name
, ": "
, show $ map (unRawInput . fst) pairs
]
in ([err], Map.empty)
getLocalComp (STLocalComps comps) = Right comps
getLocalComp _ = Left ()
-- | Need targets, e.g. `stack build` or allow none?
data NeedTargets
= NeedTargets
| AllowNoTargets
parseTargets :: (MonadCatch m, MonadIO m)
=> NeedTargets -- ^ need at least one target
-> Bool -- ^ using implicit global project?
-> Map PackageName Version -- ^ snapshot
-> Map PackageName Version -- ^ extra deps
-> Map PackageName LocalPackageView
-> Path Abs Dir -- ^ current directory
-> [Text] -- ^ command line targets
-> m (Map PackageName Version, Map PackageName SimpleTarget)
parseTargets needTargets implicitGlobal snap extras locals currDir textTargets' = do
let textTargets =
if null textTargets'
then map (T.pack . packageNameString) $ Map.keys $ Map.filter (not . lpvExtraDep) locals
else textTargets'
erawTargets <- mapM (parseRawTargetDirs currDir locals) textTargets
let (errs1, rawTargets) = partitionEithers erawTargets
(errs2, unzip -> (rawTargets', newExtras)) = partitionEithers $
map (resolveIdents snap extras locals) $ concat rawTargets
(errs3, targetTypes) = partitionEithers $
map (resolveRawTarget snap extras locals) rawTargets'
(errs4, targets) = simplifyTargets targetTypes
errs = concat [errs1, errs2, errs3, errs4]
if null errs
then if Map.null targets
then case needTargets of
AllowNoTargets ->
return (Map.empty, Map.empty)
NeedTargets ->
throwM $ TargetParseException
$ if implicitGlobal
then ["The specified targets matched no packages.\nPerhaps you need to run 'stack init'?"]
else ["The specified targets matched no packages"]
else return (Map.unions newExtras, targets)
else throwM $ TargetParseException errs
| narrative/stack | src/Stack/Build/Target.hs | bsd-3-clause | 13,228 | 0 | 21 | 4,853 | 3,208 | 1,695 | 1,513 | 286 | 12 |
{-# LANGUAGE CPP, ViewPatterns #-}
#ifndef CABAL_VERSION_CHECK
#error This module has to be compiled via the Setup.hs program which generates the gtk2hs-macros.h file
#endif
-- | Build a Gtk2hs package.
--
module Gtk2HsSetup (
gtk2hsUserHooks,
getPkgConfigPackages,
checkGtk2hsBuildtools,
typeGenProgram,
signalGenProgram,
c2hsLocal
) where
import Distribution.Simple
import Distribution.Simple.PreProcess
import Distribution.InstalledPackageInfo ( importDirs,
showInstalledPackageInfo,
libraryDirs,
extraLibraries,
extraGHCiLibraries )
import Distribution.Simple.PackageIndex ( lookupInstalledPackageId )
import Distribution.PackageDescription as PD ( PackageDescription(..),
updatePackageDescription,
BuildInfo(..),
emptyBuildInfo, allBuildInfo,
Library(..),
libModules, hasLibs)
import Distribution.Simple.LocalBuildInfo (LocalBuildInfo(withPackageDB, buildDir, localPkgDescr, installedPkgs, withPrograms),
InstallDirs(..),
componentPackageDeps,
absoluteInstallDirs)
import Distribution.Simple.Compiler ( Compiler(..) )
import Distribution.Simple.Program (
Program(..), ConfiguredProgram(..),
rawSystemProgramConf, rawSystemProgramStdoutConf, programName, programPath,
c2hsProgram, pkgConfigProgram, gccProgram, requireProgram, ghcPkgProgram,
simpleProgram, lookupProgram, rawSystemProgramStdout, ProgArg)
import Distribution.ModuleName ( ModuleName, components, toFilePath )
import Distribution.Simple.Utils
import Distribution.Simple.Setup (CopyFlags(..), InstallFlags(..), CopyDest(..),
defaultCopyFlags, ConfigFlags(configVerbosity),
fromFlag, toFlag, RegisterFlags(..), flagToMaybe,
fromFlagOrDefault, defaultRegisterFlags)
import Distribution.Simple.BuildPaths ( autogenModulesDir )
import Distribution.Simple.Install ( install )
import Distribution.Simple.Register ( generateRegistrationInfo, registerPackage )
import Distribution.Text ( simpleParse, display )
import System.FilePath
import System.Exit (exitFailure)
import System.Directory ( doesFileExist, getDirectoryContents, doesDirectoryExist )
import Distribution.Version (Version(..))
import Distribution.Verbosity
import Control.Monad (when, unless, filterM, liftM, forM, forM_)
import Data.Maybe ( isJust, isNothing, fromMaybe, maybeToList )
import Data.List (isPrefixOf, isSuffixOf, stripPrefix, nub)
import Data.Char (isAlpha, isNumber)
import qualified Data.Map as M
import qualified Data.Set as S
import qualified Distribution.Simple.LocalBuildInfo as LBI
import Distribution.Simple.Compiler (compilerVersion)
import Control.Applicative ((<$>))
#if CABAL_VERSION_CHECK(1,17,0)
import Distribution.Simple.Program.Find ( defaultProgramSearchPath )
onDefaultSearchPath f a b = f a b defaultProgramSearchPath
libraryConfig lbi = case [clbi | (LBI.CLibName, clbi, _) <- LBI.componentsConfigs lbi] of
[clbi] -> Just clbi
_ -> Nothing
#else
onDefaultSearchPath = id
libraryConfig = LBI.libraryConfig
#endif
-- the name of the c2hs pre-compiled header file
precompFile = "precompchs.bin"
gtk2hsUserHooks = simpleUserHooks {
hookedPrograms = [typeGenProgram, signalGenProgram, c2hsLocal],
hookedPreProcessors = [("chs", ourC2hs)],
confHook = \pd cf ->
(fmap adjustLocalBuildInfo (confHook simpleUserHooks pd cf)),
postConf = \args cf pd lbi -> do
genSynthezisedFiles (fromFlag (configVerbosity cf)) pd lbi
postConf simpleUserHooks args cf pd lbi,
buildHook = \pd lbi uh bf -> fixDeps pd >>= \pd ->
buildHook simpleUserHooks pd lbi uh bf,
copyHook = \pd lbi uh flags -> copyHook simpleUserHooks pd lbi uh flags >>
installCHI pd lbi (fromFlag (copyVerbosity flags)) (fromFlag (copyDest flags)),
instHook = \pd lbi uh flags ->
#if defined(mingw32_HOST_OS) || defined(__MINGW32__)
installHook pd lbi uh flags >>
installCHI pd lbi (fromFlag (installVerbosity flags)) NoCopyDest,
regHook = registerHook
#else
instHook simpleUserHooks pd lbi uh flags >>
installCHI pd lbi (fromFlag (installVerbosity flags)) NoCopyDest
#endif
}
------------------------------------------------------------------------------
-- Lots of stuff for windows ghci support
------------------------------------------------------------------------------
getDlls :: [FilePath] -> IO [FilePath]
getDlls dirs = filter ((== ".dll") . takeExtension) . concat <$>
mapM getDirectoryContents dirs
fixLibs :: [FilePath] -> [String] -> [String]
fixLibs dlls = concatMap $ \ lib ->
case filter (isLib lib) dlls of
dll:_ -> [dropExtension dll]
_ -> if lib == "z" then [] else [lib]
where
isLib lib dll =
case stripPrefix ("lib"++lib) dll of
Just ('.':_) -> True
Just ('-':n:_) | isNumber n -> True
_ -> False
-- The following code is a big copy-and-paste job from the sources of
-- Cabal 1.8 just to be able to fix a field in the package file. Yuck.
installHook :: PackageDescription -> LocalBuildInfo
-> UserHooks -> InstallFlags -> IO ()
installHook pkg_descr localbuildinfo _ flags = do
let copyFlags = defaultCopyFlags {
copyDistPref = installDistPref flags,
copyDest = toFlag NoCopyDest,
copyVerbosity = installVerbosity flags
}
install pkg_descr localbuildinfo copyFlags
let registerFlags = defaultRegisterFlags {
regDistPref = installDistPref flags,
regInPlace = installInPlace flags,
regPackageDB = installPackageDB flags,
regVerbosity = installVerbosity flags
}
when (hasLibs pkg_descr) $ register pkg_descr localbuildinfo registerFlags
registerHook :: PackageDescription -> LocalBuildInfo
-> UserHooks -> RegisterFlags -> IO ()
registerHook pkg_descr localbuildinfo _ flags =
if hasLibs pkg_descr
then register pkg_descr localbuildinfo flags
else setupMessage verbosity
"Package contains no library to register:" (packageId pkg_descr)
where verbosity = fromFlag (regVerbosity flags)
register :: PackageDescription -> LocalBuildInfo
-> RegisterFlags -- ^Install in the user's database?; verbose
-> IO ()
register pkg@(library -> Just lib )
lbi@(libraryConfig -> Just clbi) regFlags
= do
installedPkgInfoRaw <- generateRegistrationInfo
verbosity pkg lib lbi clbi inplace distPref
dllsInScope <- getSearchPath >>= (filterM doesDirectoryExist) >>= getDlls
let libs = fixLibs dllsInScope (extraLibraries installedPkgInfoRaw)
installedPkgInfo = installedPkgInfoRaw {
extraGHCiLibraries = libs }
-- Three different modes:
case () of
_ | modeGenerateRegFile -> die "Generate Reg File not supported"
| modeGenerateRegScript -> die "Generate Reg Script not supported"
| otherwise -> registerPackage verbosity
installedPkgInfo pkg lbi inplace
#if CABAL_VERSION_CHECK(1,10,0)
packageDbs
#else
packageDb
#endif
where
modeGenerateRegFile = isJust (flagToMaybe (regGenPkgConf regFlags))
modeGenerateRegScript = fromFlag (regGenScript regFlags)
inplace = fromFlag (regInPlace regFlags)
packageDbs = nub $ withPackageDB lbi
++ maybeToList (flagToMaybe (regPackageDB regFlags))
packageDb = registrationPackageDB packageDbs
distPref = fromFlag (regDistPref regFlags)
verbosity = fromFlag (regVerbosity regFlags)
register _ _ regFlags = notice verbosity "No package to register"
where
verbosity = fromFlag (regVerbosity regFlags)
------------------------------------------------------------------------------
-- This is a hack for Cabal-1.8, It is not needed in Cabal-1.9.1 or later
------------------------------------------------------------------------------
adjustLocalBuildInfo :: LocalBuildInfo -> LocalBuildInfo
adjustLocalBuildInfo lbi =
let extra = (Just libBi, [])
libBi = emptyBuildInfo { includeDirs = [ autogenModulesDir lbi
, buildDir lbi ] }
in lbi { localPkgDescr = updatePackageDescription extra (localPkgDescr lbi) }
------------------------------------------------------------------------------
-- Processing .chs files with our local c2hs.
------------------------------------------------------------------------------
ourC2hs :: BuildInfo -> LocalBuildInfo -> PreProcessor
ourC2hs bi lbi = PreProcessor {
platformIndependent = False,
runPreProcessor = runC2HS bi lbi
}
runC2HS :: BuildInfo -> LocalBuildInfo ->
(FilePath, FilePath) -> (FilePath, FilePath) -> Verbosity -> IO ()
runC2HS bi lbi (inDir, inFile) (outDir, outFile) verbosity = do
-- have the header file name if we don't have the precompiled header yet
header <- case lookup "x-c2hs-header" (customFieldsBI bi) of
Just h -> return h
Nothing -> die ("Need x-c2hs-Header definition in the .cabal Library section "++
"that sets the C header file to process .chs.pp files.")
-- c2hs will output files in out dir, removing any leading path of the input file.
-- Thus, append the dir of the input file to the output dir.
let (outFileDir, newOutFile) = splitFileName outFile
let newOutDir = outDir </> outFileDir
-- additional .chi files might be needed that other packages have installed;
-- we assume that these are installed in the same place as .hi files
let chiDirs = [ dir |
ipi <- maybe [] (map fst . componentPackageDeps) (libraryConfig lbi),
dir <- maybe [] importDirs (lookupInstalledPackageId (installedPkgs lbi) ipi) ]
(gccProg, _) <- requireProgram verbosity gccProgram (withPrograms lbi)
rawSystemProgramConf verbosity c2hsLocal (withPrograms lbi) $
map ("--include=" ++) (outDir:chiDirs)
++ [ "--cpp=" ++ programPath gccProg, "--cppopts=-E" ]
++ ["--cppopts=" ++ opt | opt <- getCppOptions bi lbi]
++ ["--output-dir=" ++ newOutDir,
"--output=" ++ newOutFile,
"--precomp=" ++ buildDir lbi </> precompFile,
header, inDir </> inFile]
getCppOptions :: BuildInfo -> LocalBuildInfo -> [String]
getCppOptions bi lbi
= nub $
["-I" ++ dir | dir <- PD.includeDirs bi]
++ [opt | opt@('-':c:_) <- PD.cppOptions bi ++ PD.ccOptions bi, c `elem` "DIU"]
++ ["-D__GLASGOW_HASKELL__="++show (ghcDefine . versionBranch . compilerVersion $ LBI.compiler lbi)]
where
ghcDefine (v1:v2:_) = v1 * 100 + v2
ghcDefine _ = __GLASGOW_HASKELL__
installCHI :: PackageDescription -- ^information from the .cabal file
-> LocalBuildInfo -- ^information from the configure step
-> Verbosity -> CopyDest -- ^flags sent to copy or install
-> IO ()
installCHI [email protected] { library = Just lib } lbi verbosity copydest = do
let InstallDirs { libdir = libPref } = absoluteInstallDirs pkg lbi copydest
-- cannot use the recommended 'findModuleFiles' since it fails if there exists
-- a modules that does not have a .chi file
mFiles <- mapM (findFileWithExtension' ["chi"] [buildDir lbi] . toFilePath)
(PD.libModules lib)
let files = [ f | Just f <- mFiles ]
installOrdinaryFiles verbosity libPref files
installCHI _ _ _ _ = return ()
------------------------------------------------------------------------------
-- Generating the type hierarchy and signal callback .hs files.
------------------------------------------------------------------------------
typeGenProgram :: Program
typeGenProgram = simpleProgram "gtk2hsTypeGen"
signalGenProgram :: Program
signalGenProgram = simpleProgram "gtk2hsHookGenerator"
c2hsLocal :: Program
c2hsLocal = (simpleProgram "gtk2hsC2hs") {
programFindVersion = findProgramVersion "--version" $ \str ->
-- Invoking "gtk2hsC2hs --version" gives a string like:
-- C->Haskell Compiler, version 0.13.4 (gtk2hs branch) "Bin IO", 13 Nov 2004
case words str of
(_:_:_:ver:_) -> ver
_ -> ""
}
genSynthezisedFiles :: Verbosity -> PackageDescription -> LocalBuildInfo -> IO ()
genSynthezisedFiles verb pd lbi = do
cPkgs <- getPkgConfigPackages verb lbi pd
let xList = maybe [] (customFieldsBI . libBuildInfo) (library pd)
++customFieldsPD pd
typeOpts :: String -> [ProgArg]
typeOpts tag = concat [ map (\val -> '-':'-':drop (length tag) field++'=':val) (words content)
| (field,content) <- xList,
tag `isPrefixOf` field,
field /= (tag++"file")]
++ [ "--tag=" ++ tag
| PackageIdentifier name (Version (major:minor:_) _) <- cPkgs
, let name' = filter isAlpha (display name)
, tag <- name'
: [ name' ++ "-" ++ show major ++ "." ++ show digit
| digit <- [0,2..minor] ]
]
signalsOpts :: [ProgArg]
signalsOpts = concat [ map (\val -> '-':'-':drop 10 field++'=':val) (words content)
| (field,content) <- xList,
"x-signals-" `isPrefixOf` field,
field /= "x-signals-file"]
genFile :: Program -> [ProgArg] -> FilePath -> IO ()
genFile prog args outFile = do
res <- rawSystemProgramStdoutConf verb prog (withPrograms lbi) args
rewriteFile outFile res
forM_ (filter (\(tag,_) -> "x-types-" `isPrefixOf` tag && "file" `isSuffixOf` tag) xList) $
\(fileTag, f) -> do
let tag = reverse (drop 4 (reverse fileTag))
info verb ("Ensuring that class hierarchy in "++f++" is up-to-date.")
genFile typeGenProgram (typeOpts tag) f
case lookup "x-signals-file" xList of
Nothing -> return ()
Just f -> do
info verb ("Ensuring that callback hooks in "++f++" are up-to-date.")
genFile signalGenProgram signalsOpts f
writeFile "gtk2hs_macros.h" $ generateMacros cPkgs
-- Based on Cabal/Distribution/Simple/Build/Macros.hs
generateMacros :: [PackageId] -> String
generateMacros cPkgs = concat $
"/* DO NOT EDIT: This file is automatically generated by Gtk2HsSetup.hs */\n\n" :
[ concat
["/* package ",display pkgid," */\n"
,"#define VERSION_",pkgname," ",show (display version),"\n"
,"#define MIN_VERSION_",pkgname,"(major1,major2,minor) (\\\n"
," (major1) < ",major1," || \\\n"
," (major1) == ",major1," && (major2) < ",major2," || \\\n"
," (major1) == ",major1," && (major2) == ",major2," && (minor) <= ",minor,")"
,"\n\n"
]
| pkgid@(PackageIdentifier name version) <- cPkgs
, let (major1:major2:minor:_) = map show (versionBranch version ++ repeat 0)
pkgname = map fixchar (display name)
]
where fixchar '-' = '_'
fixchar '.' = '_'
fixchar c = c
--FIXME: Cabal should tell us the selected pkg-config package versions in the
-- LocalBuildInfo or equivalent.
-- In the mean time, ask pkg-config again.
getPkgConfigPackages :: Verbosity -> LocalBuildInfo -> PackageDescription -> IO [PackageId]
getPkgConfigPackages verbosity lbi pkg =
sequence
[ do version <- pkgconfig ["--modversion", display pkgname]
case simpleParse version of
Nothing -> die "parsing output of pkg-config --modversion failed"
Just v -> return (PackageIdentifier pkgname v)
| Dependency pkgname _ <- concatMap pkgconfigDepends (allBuildInfo pkg) ]
where
pkgconfig = rawSystemProgramStdoutConf verbosity
pkgConfigProgram (withPrograms lbi)
------------------------------------------------------------------------------
-- Dependency calculation amongst .chs files.
------------------------------------------------------------------------------
-- Given all files of the package, find those that end in .chs and extract the
-- .chs files they depend upon. Then return the PackageDescription with these
-- files rearranged so that they are built in a sequence that files that are
-- needed by other files are built first.
fixDeps :: PackageDescription -> IO PackageDescription
fixDeps [email protected] {
PD.library = Just [email protected] {
PD.exposedModules = expMods,
PD.libBuildInfo = [email protected] {
PD.hsSourceDirs = srcDirs,
PD.otherModules = othMods
}}} = do
let findModule m = findFileWithExtension [".chs.pp",".chs"] srcDirs
(joinPath (components m))
mExpFiles <- mapM findModule expMods
mOthFiles <- mapM findModule othMods
-- tag all exposed files with True so we throw an error if we need to build
-- an exposed module before an internal modules (we cannot express this)
let modDeps = zipWith (ModDep True []) expMods mExpFiles++
zipWith (ModDep False []) othMods mOthFiles
modDeps <- mapM extractDeps modDeps
let (expMods, othMods) = span mdExposed $ sortTopological modDeps
badOther = map (fromMaybe "<no file>" . mdLocation) $
filter (not . mdExposed) expMods
unless (null badOther) $
die ("internal chs modules "++intercalate "," badOther++
" depend on exposed chs modules; cabal needs to build internal modules first")
return pd { PD.library = Just lib {
PD.exposedModules = map mdOriginal expMods,
PD.libBuildInfo = bi { PD.otherModules = map mdOriginal othMods }
}}
data ModDep = ModDep {
mdExposed :: Bool,
mdRequires :: [ModuleName],
mdOriginal :: ModuleName,
mdLocation :: Maybe FilePath
}
instance Show ModDep where
show x = show (mdLocation x)
instance Eq ModDep where
ModDep { mdOriginal = m1 } == ModDep { mdOriginal = m2 } = m1==m2
instance Ord ModDep where
compare ModDep { mdOriginal = m1 } ModDep { mdOriginal = m2 } = compare m1 m2
-- Extract the dependencies of this file. This is intentionally rather naive as it
-- ignores CPP conditionals. We just require everything which means that the
-- existance of a .chs module may not depend on some CPP condition.
extractDeps :: ModDep -> IO ModDep
extractDeps md@ModDep { mdLocation = Nothing } = return md
extractDeps md@ModDep { mdLocation = Just f } = withUTF8FileContents f $ \con -> do
let findImports acc (('{':'#':xs):xxs) = case (dropWhile (' ' ==) xs) of
('i':'m':'p':'o':'r':'t':' ':ys) ->
case simpleParse (takeWhile ('#' /=) ys) of
Just m -> findImports (m:acc) xxs
Nothing -> die ("cannot parse chs import in "++f++":\n"++
"offending line is {#"++xs)
-- no more imports after the first non-import hook
_ -> return acc
findImports acc (_:xxs) = findImports acc xxs
findImports acc [] = return acc
mods <- findImports [] (lines con)
return md { mdRequires = mods }
-- Find a total order of the set of modules that are partially sorted by their
-- dependencies on each other. The function returns the sorted list of modules
-- together with a list of modules that are required but not supplied by this
-- in the input set of modules.
sortTopological :: [ModDep] -> [ModDep]
sortTopological ms = reverse $ fst $ foldl visit ([], S.empty) (map mdOriginal ms)
where
set = M.fromList (map (\m -> (mdOriginal m, m)) ms)
visit (out,visited) m
| m `S.member` visited = (out,visited)
| otherwise = case m `M.lookup` set of
Nothing -> (out, m `S.insert` visited)
Just md -> (md:out', visited')
where
(out',visited') = foldl visit (out, m `S.insert` visited) (mdRequires md)
-- Check user whether install gtk2hs-buildtools correctly.
checkGtk2hsBuildtools :: [Program] -> IO ()
checkGtk2hsBuildtools programs = do
programInfos <- mapM (\ prog -> do
location <- onDefaultSearchPath programFindLocation prog normal
return (programName prog, location)
) programs
let printError name = do
putStrLn $ "Cannot find " ++ name ++ "\n"
++ "Please install `gtk2hs-buildtools` first and check that the install directory is in your PATH (e.g. HOME/.cabal/bin)."
exitFailure
forM_ programInfos $ \ (name, location) ->
when (isNothing location) (printError name)
| keithodulaigh/Hets | glade-0.12.5.0/Gtk2HsSetup.hs | gpl-2.0 | 21,148 | 0 | 23 | 5,392 | 4,960 | 2,644 | 2,316 | -1 | -1 |
{-# LANGUAGE BangPatterns, DeriveDataTypeable, DeriveGeneric, FlexibleInstances, MultiParamTypeClasses, OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Network.Riak.Protocol.TsPutResponse (TsPutResponse(..)) where
import Prelude ((+), (/), (++), (.))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified GHC.Generics as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data TsPutResponse = TsPutResponse{}
deriving (Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data, Prelude'.Generic)
instance P'.Mergeable TsPutResponse where
mergeAppend TsPutResponse TsPutResponse = TsPutResponse
instance P'.Default TsPutResponse where
defaultValue = TsPutResponse
instance P'.Wire TsPutResponse where
wireSize ft' self'@(TsPutResponse)
= case ft' of
10 -> calc'Size
11 -> P'.prependMessageSize calc'Size
_ -> P'.wireSizeErr ft' self'
where
calc'Size = 0
wirePutWithSize ft' self'@(TsPutResponse)
= case ft' of
10 -> put'Fields
11 -> put'FieldsSized
_ -> P'.wirePutErr ft' self'
where
put'Fields = P'.sequencePutWithSize []
put'FieldsSized
= let size' = Prelude'.fst (P'.runPutM put'Fields)
put'Size
= do
P'.putSize size'
Prelude'.return (P'.size'WireSize size')
in P'.sequencePutWithSize [put'Size, put'Fields]
wireGet ft'
= case ft' of
10 -> P'.getBareMessageWith (P'.catch'Unknown' P'.discardUnknown update'Self)
11 -> P'.getMessageWith (P'.catch'Unknown' P'.discardUnknown update'Self)
_ -> P'.wireGetErr ft'
where
update'Self wire'Tag old'Self
= case wire'Tag of
_ -> let (field'Number, wire'Type) = P'.splitWireTag wire'Tag in P'.unknown field'Number wire'Type old'Self
instance P'.MessageAPI msg' (msg' -> TsPutResponse) TsPutResponse where
getVal m' f' = f' m'
instance P'.GPB TsPutResponse
instance P'.ReflectDescriptor TsPutResponse where
getMessageInfo _ = P'.GetMessageInfo (P'.fromDistinctAscList []) (P'.fromDistinctAscList [])
reflectDescriptorInfo _
= Prelude'.read
"DescriptorInfo {descName = ProtoName {protobufName = FIName \".Protocol.TsPutResponse\", haskellPrefix = [MName \"Network\",MName \"Riak\"], parentModule = [MName \"Protocol\"], baseName = MName \"TsPutResponse\"}, descFilePath = [\"Network\",\"Riak\",\"Protocol\",\"TsPutResponse.hs\"], isGroup = False, fields = fromList [], descOneofs = fromList [], keys = fromList [], extRanges = [], knownKeys = fromList [], storeUnknown = False, lazyFields = False, makeLenses = False, jsonInstances = False}"
instance P'.TextType TsPutResponse where
tellT = P'.tellSubMessage
getT = P'.getSubMessage
instance P'.TextMsg TsPutResponse where
textPut msg = Prelude'.return ()
textGet = Prelude'.return P'.defaultValue
where | tmcgilchrist/riak-haskell-client | protobuf/src/Network/Riak/Protocol/TsPutResponse.hs | apache-2.0 | 3,024 | 1 | 17 | 600 | 648 | 344 | 304 | 58 | 0 |
-- Test for https://gitlab.haskell.org/ghc/ghc/issues/2533
import System.Environment
import Data.List
main = do
(n:_) <- getArgs
print (genericTake (read n) "none taken")
print (genericDrop (read n) "none dropped")
print (genericSplitAt (read n) "none split")
| sdiehl/ghc | libraries/base/tests/genericNegative001.hs | bsd-3-clause | 264 | 0 | 11 | 36 | 91 | 44 | 47 | 7 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE PackageImports #-}
module Main where
import Gauge.Main
import Crypto.Hash.Algorithms as Crypto
import "cryptonite" Crypto.KDF.PBKDF2 as Crypto
import "fastpbkdf2" Crypto.KDF.PBKDF2 as Fast
import Data.ByteString as B
password :: ByteString
password = "password"
salt :: ByteString
salt = "salt"
runBench :: Int
-> (ByteString -> ByteString -> ByteString)
-> (ByteString -> ByteString -> ByteString)
-> (ByteString -> ByteString -> ByteString)
-> Benchmark
runBench iter cryptonite fastCryptonite fastBinding =
bgroup (show iter)
[ bench "cryptonite" $ whnf (cryptonite password) salt
, bench "cryptonite-fast" $ whnf (fastCryptonite password) salt
, bench "fastpbkdf2-hs" $ whnf (fastBinding password) salt
]
makeBench :: (Parameters -> ByteString -> ByteString -> ByteString)
-> (Parameters -> ByteString -> ByteString -> ByteString)
-> (ByteString -> ByteString -> Int -> Int -> ByteString)
-> [Benchmark]
makeBench cryptonite fastCryptonite fastBinding =
[ runBench 1
(cryptonite (Parameters 1 32))
(fastCryptonite (Parameters 1 32))
(\p s -> fastBinding p s 1 32)
, runBench 10000
(cryptonite (Parameters 10000 32))
(fastCryptonite (Parameters 10000 32))
(\p s -> fastBinding p s 10000 32)
]
main :: IO ()
main = defaultMain
[ bgroup "SHA1" $ makeBench
(Crypto.generate (Crypto.prfHMAC Crypto.SHA1))
(Crypto.fastPBKDF2_SHA1)
(Fast.fastpbkdf2_hmac_sha1)
, bgroup "SHA256" $ makeBench
(Crypto.generate (Crypto.prfHMAC Crypto.SHA256))
(Crypto.fastPBKDF2_SHA256)
(Fast.fastpbkdf2_hmac_sha256)
, bgroup "SHA512" $ makeBench
(Crypto.generate (Crypto.prfHMAC Crypto.SHA512))
(Crypto.fastPBKDF2_SHA512)
(Fast.fastpbkdf2_hmac_sha512)
]
| vincenthz/cryptonite | benchs/PBKDF2.hs | bsd-3-clause | 1,981 | 0 | 13 | 480 | 559 | 298 | 261 | 50 | 1 |
{- |
This module allows you to use general literals like 'left' and 'center',
and the overloading will resove to the appropreate type.
-}
module Graphics.Storyboard.Literals where
-----------------------------------------------------------------------------
-- Short cut literal/DSL-keyword classes
class LR a where
left :: a
right :: a
class TB a where
top :: a
bottom :: a
class Center a where
center :: a
-----------------------------------------------------------------------------
data Side = T | B | L | R
deriving (Eq,Ord,Show)
instance LR Side where
left = L
right = R
instance TB Side where
top = T
bottom = B
-----------------------------------------------------------------------------
data Vertical = VT | VC | VB
deriving (Eq,Ord,Show)
instance TB Vertical where
top = VT
bottom = VB
instance Center Vertical where
center = VC
-----------------------------------------------------------------------------
data Horizontal = HL | HC | HR
deriving (Eq,Ord,Show)
instance LR Horizontal where
left = HL
right = HR
instance Center Horizontal where
center = HC
-----------------------------------------------------------------------------
-- http://en.wikipedia.org/wiki/Typographic_alignment
data Alignment = JustLeft | JustCenter | JustRight | Justified | Truncated
deriving (Eq,Ord,Show)
instance LR Alignment where
left = JustLeft
right = JustRight
instance Center Alignment where
center = JustCenter
justified :: Alignment
justified = Justified
truncated :: Alignment
truncated = Truncated
| tonymorris/story-board | src/Graphics/Storyboard/Literals.hs | bsd-3-clause | 1,585 | 0 | 6 | 279 | 337 | 193 | 144 | 42 | 1 |
module Main where
import Test.DocTest
main :: IO ()
main = doctest [
"-idist/build/autogen/"
, "-optP-include"
, "-optPdist/build/autogen/cabal_macros.h"
, "Network/Wai/Handler/Warp.hs"
]
| beni55/wai | warp/test/doctests.hs | mit | 202 | 0 | 6 | 35 | 39 | 23 | 16 | 8 | 1 |
module LetIn1 where
--A definition can be lifted from a where or let into the surronding binding group.
--Lifting a definition widens the scope of the definition.
--In this example, lift 'sq' in 'sumSquares'
--This example aims to test lifting a definition from a let clause to a where clause,
--and the elimination of the keywords 'let' and 'in'
sumSquares x y = sq x + sq y
where pow=2
sq 0=0
sq z=z^pow
anotherFun 0 y = sq y
where sq x = x^2
| kmate/HaRe | old/testing/liftOneLevel/LetIn1_TokOut.hs | bsd-3-clause | 575 | 0 | 7 | 215 | 83 | 44 | 39 | 7 | 2 |
{-# LANGUAGE OverloadedStrings, FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Documentation.Haddock.ParserSpec (main, spec) where
import Data.String
import qualified Documentation.Haddock.Parser as Parse
import Documentation.Haddock.Types
import Documentation.Haddock.Doc (docAppend)
import Test.Hspec
import Test.QuickCheck
infixr 6 <>
(<>) :: Doc id -> Doc id -> Doc id
(<>) = docAppend
type Doc id = DocH () id
instance IsString (Doc String) where
fromString = DocString
instance IsString a => IsString (Maybe a) where
fromString = Just . fromString
parseParas :: String -> MetaDoc () String
parseParas = overDoc Parse.toRegular . Parse.parseParas
parseString :: String -> Doc String
parseString = Parse.toRegular . Parse.parseString
hyperlink :: String -> Maybe String -> Doc String
hyperlink url = DocHyperlink . Hyperlink url
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "parseString" $ do
let infix 1 `shouldParseTo`
shouldParseTo :: String -> Doc String -> Expectation
shouldParseTo input ast = parseString input `shouldBe` ast
it "is total" $ do
property $ \xs ->
(length . show . parseString) xs `shouldSatisfy` (> 0)
context "when parsing text" $ do
it "can handle unicode" $ do
"灼眼のシャナ" `shouldParseTo` "灼眼のシャナ"
it "accepts numeric character references" $ do
"foo bar baz λ" `shouldParseTo` "foo bar baz λ"
it "accepts hexadecimal character references" $ do
"e" `shouldParseTo` "e"
it "allows to backslash-escape characters except \\r" $ do
property $ \case
'\r' -> "\\\r" `shouldParseTo` DocString "\\"
x -> ['\\', x] `shouldParseTo` DocString [x]
context "when parsing strings contaning numeric character references" $ do
it "will implicitly convert digits to characters" $ do
"AAAA" `shouldParseTo` "AAAA"
"灼眼のシャナ"
`shouldParseTo` "灼眼のシャナ"
it "will implicitly convert hex encoded characters" $ do
"eeee" `shouldParseTo` "eeee"
context "when parsing identifiers" $ do
it "parses identifiers enclosed within single ticks" $ do
"'foo'" `shouldParseTo` DocIdentifier "foo"
it "parses identifiers enclosed within backticks" $ do
"`foo`" `shouldParseTo` DocIdentifier "foo"
it "parses a word with an one of the delimiters in it as DocString" $ do
"don't" `shouldParseTo` "don't"
it "doesn't pass pairs of delimiters with spaces between them" $ do
"hel'lo w'orld" `shouldParseTo` "hel'lo w'orld"
it "don't use apostrophe's in the wrong place's" $ do
" don't use apostrophe's in the wrong place's" `shouldParseTo`
"don't use apostrophe's in the wrong place's"
it "doesn't parse empty identifiers" $ do
"``" `shouldParseTo` "``"
it "can parse infix identifiers" $ do
"``infix``" `shouldParseTo` "`" <> DocIdentifier "infix" <> "`"
context "when parsing URLs" $ do
it "parses a URL" $ do
"<http://example.com/>" `shouldParseTo` hyperlink "http://example.com/" Nothing
it "accepts an optional label" $ do
"<http://example.com/ some link>" `shouldParseTo` hyperlink "http://example.com/" "some link"
it "does not accept newlines in label" $ do
"<foo bar\nbaz>" `shouldParseTo` "<foo bar\nbaz>"
-- new behaviour test, this will be now consistent with other markup
it "allows us to escape > inside the URL" $ do
"<http://examp\\>le.com>" `shouldParseTo`
hyperlink "http://examp>le.com" Nothing
"<http://exa\\>mp\\>le.com>" `shouldParseTo`
hyperlink "http://exa>mp>le.com" Nothing
-- Likewise in label
"<http://example.com f\\>oo>" `shouldParseTo`
hyperlink "http://example.com" "f>oo"
it "parses inline URLs" $ do
"foo <http://example.com/> bar" `shouldParseTo`
"foo " <> hyperlink "http://example.com/" Nothing <> " bar"
it "doesn't allow for multi-line link tags" $ do
"<ba\nz aar>" `shouldParseTo` "<ba\nz aar>"
context "when parsing markdown links" $ do
it "parses a simple link" $ do
"[some label](url)" `shouldParseTo`
hyperlink "url" "some label"
it "allows whitespace between label and URL" $ do
"[some label] \t (url)" `shouldParseTo`
hyperlink "url" "some label"
it "allows newlines in label" $ do
"[some\n\nlabel](url)" `shouldParseTo`
hyperlink "url" "some\n\nlabel"
it "allows escaping in label" $ do
"[some\\] label](url)" `shouldParseTo`
hyperlink "url" "some] label"
it "strips leading and trailing whitespace from label" $ do
"[ some label ](url)" `shouldParseTo`
hyperlink "url" "some label"
it "rejects whitespace in URL" $ do
"[some label]( url)" `shouldParseTo`
"[some label]( url)"
context "when URL is on a separate line" $ do
it "allows URL to be on a separate line" $ do
"[some label]\n(url)" `shouldParseTo`
hyperlink "url" "some label"
it "allows leading whitespace" $ do
"[some label]\n \t (url)" `shouldParseTo`
hyperlink "url" "some label"
it "rejects additional newlines" $ do
"[some label]\n\n(url)" `shouldParseTo`
"[some label]\n\n(url)"
context "when autolinking URLs" $ do
it "autolinks HTTP URLs" $ do
"http://example.com/" `shouldParseTo` hyperlink "http://example.com/" Nothing
it "autolinks HTTPS URLs" $ do
"https://www.example.com/" `shouldParseTo` hyperlink "https://www.example.com/" Nothing
it "autolinks FTP URLs" $ do
"ftp://example.com/" `shouldParseTo` hyperlink "ftp://example.com/" Nothing
it "does not include a trailing comma" $ do
"http://example.com/, Some other sentence." `shouldParseTo`
hyperlink "http://example.com/" Nothing <> ", Some other sentence."
it "does not include a trailing dot" $ do
"http://example.com/. Some other sentence." `shouldParseTo`
hyperlink "http://example.com/" Nothing <> ". Some other sentence."
it "does not include a trailing exclamation mark" $ do
"http://example.com/! Some other sentence." `shouldParseTo`
hyperlink "http://example.com/" Nothing <> "! Some other sentence."
it "does not include a trailing question mark" $ do
"http://example.com/? Some other sentence." `shouldParseTo`
hyperlink "http://example.com/" Nothing <> "? Some other sentence."
it "autolinks URLs occuring mid-sentence with multiple ‘/’s" $ do
"foo https://example.com/example bar" `shouldParseTo`
"foo " <> hyperlink "https://example.com/example" Nothing <> " bar"
context "when parsing images" $ do
let image :: String -> Maybe String -> Doc String
image uri = DocPic . Picture uri
it "accepts markdown syntax for images" $ do
"" `shouldParseTo` image "url" "label"
it "accepts Unicode" $ do
"" `shouldParseTo` image "url" "灼眼のシャナ"
it "supports deprecated picture syntax" $ do
"<<baz>>" `shouldParseTo` image "baz" Nothing
it "supports title for deprecated picture syntax" $ do
"<<b a z>>" `shouldParseTo` image "b" "a z"
context "when parsing anchors" $ do
it "parses a single word anchor" $ do
"#foo#" `shouldParseTo` DocAName "foo"
it "parses a multi word anchor" $ do
"#foo bar#" `shouldParseTo` DocAName "foo bar"
it "parses a unicode anchor" $ do
"#灼眼のシャナ#" `shouldParseTo` DocAName "灼眼のシャナ"
it "does not accept newlines in anchors" $ do
"#foo\nbar#" `shouldParseTo` "#foo\nbar#"
it "accepts anchors mid-paragraph" $ do
"Hello #someAnchor# world!"
`shouldParseTo` "Hello " <> DocAName "someAnchor" <> " world!"
it "does not accept empty anchors" $ do
"##" `shouldParseTo` "##"
context "when parsing emphasised text" $ do
it "emphasises a word on its own" $ do
"/foo/" `shouldParseTo` DocEmphasis "foo"
it "emphasises inline correctly" $ do
"foo /bar/ baz" `shouldParseTo` "foo " <> DocEmphasis "bar" <> " baz"
it "emphasises unicode" $ do
"/灼眼のシャナ/" `shouldParseTo` DocEmphasis "灼眼のシャナ"
it "does not emphasise multi-line strings" $ do
" /foo\nbar/" `shouldParseTo` "/foo\nbar/"
it "does not emphasise the empty string" $ do
"//" `shouldParseTo` "//"
it "parses escaped slashes literally" $ do
"/foo\\/bar/" `shouldParseTo` DocEmphasis "foo/bar"
it "recognizes other markup constructs within emphasised text" $ do
"/foo @bar@ baz/" `shouldParseTo`
DocEmphasis ("foo " <> DocMonospaced "bar" <> " baz")
it "allows other markup inside of emphasis" $ do
"/__inner bold__/" `shouldParseTo` DocEmphasis (DocBold "inner bold")
it "doesn't mangle inner markup unicode" $ do
"/__灼眼のシャナ A__/" `shouldParseTo` DocEmphasis (DocBold "灼眼のシャナ A")
it "properly converts HTML escape sequences" $ do
"/AAAA/" `shouldParseTo` DocEmphasis "AAAA"
it "allows to escape the emphasis delimiter inside of emphasis" $ do
"/empha\\/sis/" `shouldParseTo` DocEmphasis "empha/sis"
context "when parsing monospaced text" $ do
it "parses simple monospaced text" $ do
"@foo@" `shouldParseTo` DocMonospaced "foo"
it "parses inline monospaced text" $ do
"foo @bar@ baz" `shouldParseTo` "foo " <> DocMonospaced "bar" <> " baz"
it "allows to escape @" $ do
"@foo \\@ bar@" `shouldParseTo` DocMonospaced "foo @ bar"
it "accepts unicode" $ do
"@foo 灼眼のシャナ bar@" `shouldParseTo` DocMonospaced "foo 灼眼のシャナ bar"
it "accepts other markup in monospaced text" $ do
"@/foo/@" `shouldParseTo` DocMonospaced (DocEmphasis "foo")
it "requires the closing @" $ do
"@foo /bar/ baz" `shouldParseTo` "@foo " <> DocEmphasis "bar" <> " baz"
context "when parsing bold strings" $ do
it "allows for a bold string on its own" $ do
"__bold string__" `shouldParseTo`
DocBold "bold string"
it "bolds inline correctly" $ do
"hello __everyone__ there" `shouldParseTo`
"hello "
<> DocBold "everyone" <> " there"
it "bolds unicode" $ do
"__灼眼のシャナ__" `shouldParseTo`
DocBold "灼眼のシャナ"
it "does not do __multi-line\\n bold__" $ do
" __multi-line\n bold__" `shouldParseTo` "__multi-line\n bold__"
it "allows other markup inside of bold" $ do
"__/inner emphasis/__" `shouldParseTo`
(DocBold $ DocEmphasis "inner emphasis")
it "doesn't mangle inner markup unicode" $ do
"__/灼眼のシャナ A/__" `shouldParseTo`
(DocBold $ DocEmphasis "灼眼のシャナ A")
it "properly converts HTML escape sequences" $ do
"__AAAA__" `shouldParseTo`
DocBold "AAAA"
it "allows to escape the bold delimiter inside of bold" $ do
"__bo\\__ld__" `shouldParseTo`
DocBold "bo__ld"
it "doesn't allow for empty bold" $ do
"____" `shouldParseTo` "____"
context "when parsing module strings" $ do
it "should parse a module on its own" $ do
"\"Module\"" `shouldParseTo`
DocModule "Module"
it "should parse a module inline" $ do
"This is a \"Module\"." `shouldParseTo`
"This is a " <> DocModule "Module" <> "."
it "can accept a simple module name" $ do
"\"Hello\"" `shouldParseTo` DocModule "Hello"
it "can accept a module name with dots" $ do
"\"Hello.World\"" `shouldParseTo` DocModule "Hello.World"
it "can accept a module name with unicode" $ do
"\"Hello.Worldλ\"" `shouldParseTo` DocModule "Hello.Worldλ"
it "parses a module name with a trailing dot as regular quoted string" $ do
"\"Hello.\"" `shouldParseTo` "\"Hello.\""
it "parses a module name with a space as regular quoted string" $ do
"\"Hello World\"" `shouldParseTo` "\"Hello World\""
it "parses a module name with invalid characters as regular quoted string" $ do
"\"Hello&[{}(=*)+]!\"" `shouldParseTo` "\"Hello&[{}(=*)+]!\""
it "accepts a module name with unicode" $ do
"\"Foo.Barλ\"" `shouldParseTo` DocModule "Foo.Barλ"
it "treats empty module name as regular double quotes" $ do
"\"\"" `shouldParseTo` "\"\""
it "accepts anchor reference syntax as DocModule" $ do
"\"Foo#bar\"" `shouldParseTo` DocModule "Foo#bar"
it "accepts old anchor reference syntax as DocModule" $ do
"\"Foo\\#bar\"" `shouldParseTo` DocModule "Foo\\#bar"
describe "parseParas" $ do
let infix 1 `shouldParseTo`
shouldParseTo :: String -> Doc String -> Expectation
shouldParseTo input ast = _doc (parseParas input) `shouldBe` ast
it "is total" $ do
property $ \xs ->
(length . show . parseParas) xs `shouldSatisfy` (> 0)
context "when parsing @since" $ do
it "adds specified version to the result" $ do
parseParas "@since 0.5.0" `shouldBe`
MetaDoc { _meta = Meta { _version = Just [0,5,0] }
, _doc = DocEmpty }
it "ignores trailing whitespace" $ do
parseParas "@since 0.5.0 \t " `shouldBe`
MetaDoc { _meta = Meta { _version = Just [0,5,0] }
, _doc = DocEmpty }
it "does not allow trailing input" $ do
parseParas "@since 0.5.0 foo" `shouldBe`
MetaDoc { _meta = Meta { _version = Nothing }
, _doc = DocParagraph "@since 0.5.0 foo" }
context "when given multiple times" $ do
it "gives last occurrence precedence" $ do
(parseParas . unlines) [
"@since 0.5.0"
, "@since 0.6.0"
, "@since 0.7.0"
] `shouldBe` MetaDoc { _meta = Meta { _version = Just [0,7,0] }
, _doc = DocEmpty }
context "when parsing text paragraphs" $ do
let filterSpecial = filter (`notElem` (".(=#-[*`\v\f\n\t\r\\\"'_/@<> " :: String))
it "parses an empty paragraph" $ do
"" `shouldParseTo` DocEmpty
it "parses a simple text paragraph" $ do
"foo bar baz" `shouldParseTo` DocParagraph "foo bar baz"
it "accepts markup in text paragraphs" $ do
"foo /bar/ baz" `shouldParseTo` DocParagraph ("foo " <> DocEmphasis "bar" <> " baz")
it "preserve all regular characters" $ do
property $ \xs -> let input = filterSpecial xs in (not . null) input ==>
input `shouldParseTo` DocParagraph (DocString input)
it "separates paragraphs by empty lines" $ do
unlines [
"foo"
, " \t "
, "bar"
] `shouldParseTo` DocParagraph "foo" <> DocParagraph "bar"
context "when a pragraph only contains monospaced text" $ do
it "turns it into a code block" $ do
"@foo@" `shouldParseTo` DocCodeBlock "foo"
context "when a paragraph starts with a markdown link" $ do
it "correctly parses it as a text paragraph (not a definition list)" $ do
"[label](url)" `shouldParseTo`
DocParagraph (hyperlink "url" "label")
it "can be followed by an other paragraph" $ do
"[label](url)\n\nfoobar" `shouldParseTo`
DocParagraph (hyperlink "url" "label") <> DocParagraph "foobar"
context "when paragraph contains additional text" $ do
it "accepts more text after the link" $ do
"[label](url) foo bar baz" `shouldParseTo`
DocParagraph (hyperlink "url" "label" <> " foo bar baz")
it "accepts a newline right after the markdown link" $ do
"[label](url)\nfoo bar baz" `shouldParseTo`
DocParagraph (hyperlink "url" "label" <> " foo bar baz")
it "can be followed by an other paragraph" $ do
"[label](url)foo\n\nbar" `shouldParseTo`
DocParagraph (hyperlink "url" "label" <> "foo") <> DocParagraph "bar"
context "when parsing birdtracks" $ do
it "parses them as a code block" $ do
unlines [
">foo"
, ">bar"
, ">baz"
] `shouldParseTo` DocCodeBlock "foo\nbar\nbaz"
it "ignores leading whitespace" $ do
unlines [
" >foo"
, " \t >bar"
, " >baz"
]
`shouldParseTo` DocCodeBlock "foo\nbar\nbaz"
it "strips one leading space from each line of the block" $ do
unlines [
"> foo"
, "> bar"
, "> baz"
] `shouldParseTo` DocCodeBlock "foo\n bar\nbaz"
it "ignores empty lines when stripping spaces" $ do
unlines [
"> foo"
, ">"
, "> bar"
] `shouldParseTo` DocCodeBlock "foo\n\nbar"
context "when any non-empty line does not start with a space" $ do
it "does not strip any spaces" $ do
unlines [
">foo"
, "> bar"
] `shouldParseTo` DocCodeBlock "foo\n bar"
it "ignores nested markup" $ do
unlines [
">/foo/"
] `shouldParseTo` DocCodeBlock "/foo/"
it "treats them as regular text inside text paragraphs" $ do
unlines [
"foo"
, ">bar"
] `shouldParseTo` DocParagraph "foo\n>bar"
context "when parsing code blocks" $ do
it "accepts a simple code block" $ do
unlines [
"@"
, "foo"
, "bar"
, "baz"
, "@"
] `shouldParseTo` DocCodeBlock "foo\nbar\nbaz\n"
it "ignores trailing whitespace after the opening @" $ do
unlines [
"@ "
, "foo"
, "@"
] `shouldParseTo` DocCodeBlock "foo\n"
it "rejects code blocks that are not closed" $ do
unlines [
"@"
, "foo"
] `shouldParseTo` DocParagraph "@\nfoo"
it "accepts nested markup" $ do
unlines [
"@"
, "/foo/"
, "@"
] `shouldParseTo` DocCodeBlock (DocEmphasis "foo" <> "\n")
it "allows to escape the @" $ do
unlines [
"@"
, "foo"
, "\\@"
, "bar"
, "@"
] `shouldParseTo` DocCodeBlock "foo\n@\nbar\n"
it "accepts horizontal space before the @" $ do
unlines [ " @"
, "foo"
, ""
, "bar"
, "@"
] `shouldParseTo` DocCodeBlock "foo\n\nbar\n"
it "strips a leading space from a @ block if present" $ do
unlines [ " @"
, " hello"
, " world"
, " @"
] `shouldParseTo` DocCodeBlock "hello\nworld\n"
unlines [ " @"
, " hello"
, ""
, " world"
, " @"
] `shouldParseTo` DocCodeBlock "hello\n\nworld\n"
it "only drops whitespace if there's some before closing @" $ do
unlines [ "@"
, " Formatting"
, " matters."
, "@"
]
`shouldParseTo` DocCodeBlock " Formatting\n matters.\n"
it "accepts unicode" $ do
"@foo 灼眼のシャナ bar@" `shouldParseTo` DocCodeBlock "foo 灼眼のシャナ bar"
it "requires the closing @" $ do
"@foo /bar/ baz"
`shouldParseTo` DocParagraph ("@foo " <> DocEmphasis "bar" <> " baz")
context "when parsing examples" $ do
it "parses a simple example" $ do
">>> foo" `shouldParseTo` DocExamples [Example "foo" []]
it "parses an example with result" $ do
unlines [
">>> foo"
, "bar"
, "baz"
] `shouldParseTo` DocExamples [Example "foo" ["bar", "baz"]]
it "parses consecutive examples" $ do
unlines [
">>> fib 5"
, "5"
, ">>> fib 10"
, "55"
] `shouldParseTo` DocExamples [
Example "fib 5" ["5"]
, Example "fib 10" ["55"]
]
it ("requires an example to be separated"
++ " from a previous paragraph by an empty line") $ do
"foobar\n\n>>> fib 10\n55" `shouldParseTo`
DocParagraph "foobar"
<> DocExamples [Example "fib 10" ["55"]]
it "parses bird-tracks inside of paragraphs as plain strings" $ do
let xs = "foo\n>>> bar"
xs `shouldParseTo` DocParagraph (DocString xs)
it "skips empty lines in front of an example" $ do
"\n \n\n>>> foo" `shouldParseTo` DocExamples [Example "foo" []]
it "terminates example on empty line" $ do
unlines [
">>> foo"
, "bar"
, " "
, "baz"
]
`shouldParseTo`
DocExamples [Example "foo" ["bar"]] <> DocParagraph "baz"
it "parses a <BLANKLINE> result as an empty result" $ do
unlines [
">>> foo"
, "bar"
, "<BLANKLINE>"
, "baz"
]
`shouldParseTo` DocExamples [Example "foo" ["bar", "", "baz"]]
it "accepts unicode in examples" $ do
">>> 灼眼\nシャナ" `shouldParseTo` DocExamples [Example "灼眼" ["シャナ"]]
context "when prompt is prefixed by whitespace" $ do
it "strips the exact same amount of whitespace from result lines" $ do
unlines [
" >>> foo"
, " bar"
, " baz"
] `shouldParseTo` DocExamples [Example "foo" ["bar", "baz"]]
it "preserves additional whitespace" $ do
unlines [
" >>> foo"
, " bar"
] `shouldParseTo` DocExamples [Example "foo" [" bar"]]
it "keeps original if stripping is not possible" $ do
unlines [
" >>> foo"
, " bar"
] `shouldParseTo` DocExamples [Example "foo" [" bar"]]
context "when parsing paragraphs nested in lists" $ do
it "can nest the same type of list" $ do
"* foo\n\n * bar" `shouldParseTo`
DocUnorderedList [ DocParagraph "foo"
<> DocUnorderedList [DocParagraph "bar"]]
it "can nest another type of list inside" $ do
"* foo\n\n 1. bar" `shouldParseTo`
DocUnorderedList [ DocParagraph "foo"
<> DocOrderedList [DocParagraph "bar"]]
it "can nest a code block inside" $ do
"* foo\n\n @foo bar baz@" `shouldParseTo`
DocUnorderedList [ DocParagraph "foo"
<> DocCodeBlock "foo bar baz"]
"* foo\n\n @\n foo bar baz\n @" `shouldParseTo`
DocUnorderedList [ DocParagraph "foo"
<> DocCodeBlock "foo bar baz\n"]
it "can nest more than one level" $ do
"* foo\n\n * bar\n\n * baz\n qux" `shouldParseTo`
DocUnorderedList [ DocParagraph "foo"
<> DocUnorderedList [ DocParagraph "bar"
<> DocUnorderedList [DocParagraph "baz\nqux"]
]
]
it "won't fail on not fully indented paragraph" $ do
"* foo\n\n * bar\n\n * qux\nquux" `shouldParseTo`
DocUnorderedList [ DocParagraph "foo"
<> DocUnorderedList [ DocParagraph "bar" ]
, DocParagraph "qux\nquux"]
it "can nest definition lists" $ do
"[a]: foo\n\n [b]: bar\n\n [c]: baz\n qux" `shouldParseTo`
DocDefList [ ("a", "foo"
<> DocDefList [ ("b", "bar"
<> DocDefList [("c", "baz\nqux")])
])
]
it "can come back to top level with a different list" $ do
"* foo\n\n * bar\n\n1. baz" `shouldParseTo`
DocUnorderedList [ DocParagraph "foo"
<> DocUnorderedList [ DocParagraph "bar" ]
]
<> DocOrderedList [ DocParagraph "baz" ]
it "allows arbitrary initial indent of a list" $ do
unlines
[ " * foo"
, " * bar"
, ""
, " * quux"
, ""
, " * baz"
]
`shouldParseTo`
DocUnorderedList
[ DocParagraph "foo"
, DocParagraph "bar"
<> DocUnorderedList [ DocParagraph "quux" ]
, DocParagraph "baz"
]
it "definition lists can come back to top level with a different list" $ do
"[foo]: foov\n\n [bar]: barv\n\n1. baz" `shouldParseTo`
DocDefList [ ("foo", "foov"
<> DocDefList [ ("bar", "barv") ])
]
<> DocOrderedList [ DocParagraph "baz" ]
it "list order is preserved in presence of nesting + extra text" $ do
"1. Foo\n\n > Some code\n\n2. Bar\n\nSome text"
`shouldParseTo`
DocOrderedList [ DocParagraph "Foo" <> DocCodeBlock "Some code"
, DocParagraph "Bar"
]
<> DocParagraph (DocString "Some text")
"1. Foo\n\n2. Bar\n\nSome text"
`shouldParseTo`
DocOrderedList [ DocParagraph "Foo"
, DocParagraph "Bar"
]
<> DocParagraph (DocString "Some text")
context "when parsing properties" $ do
it "can parse a single property" $ do
"prop> 23 == 23" `shouldParseTo` DocProperty "23 == 23"
it "can parse multiple subsequent properties" $ do
unlines [
"prop> 23 == 23"
, "prop> 42 == 42"
]
`shouldParseTo`
DocProperty "23 == 23" <> DocProperty "42 == 42"
it "accepts unicode in properties" $ do
"prop> 灼眼のシャナ ≡ 愛" `shouldParseTo`
DocProperty "灼眼のシャナ ≡ 愛"
it "can deal with whitespace before and after the prop> prompt" $ do
" prop> xs == (reverse $ reverse xs) " `shouldParseTo`
DocProperty "xs == (reverse $ reverse xs)"
context "when parsing unordered lists" $ do
it "parses a simple list" $ do
unlines [
" * one"
, " * two"
, " * three"
]
`shouldParseTo` DocUnorderedList [
DocParagraph "one"
, DocParagraph "two"
, DocParagraph "three"
]
it "ignores empty lines between list items" $ do
unlines [
"* one"
, ""
, "* two"
]
`shouldParseTo` DocUnorderedList [
DocParagraph "one"
, DocParagraph "two"
]
it "accepts an empty list item" $ do
"*" `shouldParseTo` DocUnorderedList [DocParagraph DocEmpty]
it "accepts multi-line list items" $ do
unlines [
"* point one"
, " more one"
, "* point two"
, "more two"
]
`shouldParseTo` DocUnorderedList [
DocParagraph "point one\n more one"
, DocParagraph "point two\nmore two"
]
it "accepts markup in list items" $ do
"* /foo/" `shouldParseTo` DocUnorderedList [DocParagraph (DocEmphasis "foo")]
it "requires empty lines between list and other paragraphs" $ do
unlines [
"foo"
, ""
, "* bar"
, ""
, "baz"
]
`shouldParseTo` DocParagraph "foo" <> DocUnorderedList [DocParagraph "bar"] <> DocParagraph "baz"
context "when parsing ordered lists" $ do
it "parses a simple list" $ do
unlines [
" 1. one"
, " (1) two"
, " 3. three"
]
`shouldParseTo` DocOrderedList [
DocParagraph "one"
, DocParagraph "two"
, DocParagraph "three"
]
it "ignores empty lines between list items" $ do
unlines [
"1. one"
, ""
, "2. two"
]
`shouldParseTo` DocOrderedList [
DocParagraph "one"
, DocParagraph "two"
]
it "accepts an empty list item" $ do
"1." `shouldParseTo` DocOrderedList [DocParagraph DocEmpty]
it "accepts multi-line list items" $ do
unlines [
"1. point one"
, " more one"
, "1. point two"
, "more two"
]
`shouldParseTo` DocOrderedList [
DocParagraph "point one\n more one"
, DocParagraph "point two\nmore two"
]
it "accepts markup in list items" $ do
"1. /foo/" `shouldParseTo` DocOrderedList [DocParagraph (DocEmphasis "foo")]
it "requires empty lines between list and other paragraphs" $ do
unlines [
"foo"
, ""
, "1. bar"
, ""
, "baz"
]
`shouldParseTo` DocParagraph "foo" <> DocOrderedList [DocParagraph "bar"] <> DocParagraph "baz"
context "when parsing definition lists" $ do
it "parses a simple list" $ do
unlines [
" [foo]: one"
, " [bar]: two"
, " [baz]: three"
]
`shouldParseTo` DocDefList [
("foo", "one")
, ("bar", "two")
, ("baz", "three")
]
it "ignores empty lines between list items" $ do
unlines [
"[foo]: one"
, ""
, "[bar]: two"
]
`shouldParseTo` DocDefList [
("foo", "one")
, ("bar", "two")
]
it "accepts an empty list item" $ do
"[foo]:" `shouldParseTo` DocDefList [("foo", DocEmpty)]
it "accepts multi-line list items" $ do
unlines [
"[foo]: point one"
, " more one"
, "[bar]: point two"
, "more two"
]
`shouldParseTo` DocDefList [
("foo", "point one\n more one")
, ("bar", "point two\nmore two")
]
it "accepts markup in list items" $ do
"[foo]: /foo/" `shouldParseTo` DocDefList [("foo", DocEmphasis "foo")]
it "accepts markup for the label" $ do
"[/foo/]: bar" `shouldParseTo` DocDefList [(DocEmphasis "foo", "bar")]
it "requires empty lines between list and other paragraphs" $ do
unlines [
"foo"
, ""
, "[foo]: bar"
, ""
, "baz"
]
`shouldParseTo` DocParagraph "foo" <> DocDefList [("foo", "bar")] <> DocParagraph "baz"
it "dose not require the colon (deprecated - this will be removed in a future release)" $ do
unlines [
" [foo] one"
, " [bar] two"
, " [baz] three"
]
`shouldParseTo` DocDefList [
("foo", "one")
, ("bar", "two")
, ("baz", "three")
]
context "when parsing consecutive paragraphs" $ do
it "will not capture irrelevant consecutive lists" $ do
unlines [ " * bullet"
, ""
, ""
, " - different bullet"
, ""
, ""
, " (1) ordered"
, " "
, " 2. different bullet"
, " "
, " [cat]: kitten"
, " "
, " [pineapple]: fruit"
] `shouldParseTo`
DocUnorderedList [ DocParagraph "bullet"
, DocParagraph "different bullet"]
<> DocOrderedList [ DocParagraph "ordered"
, DocParagraph "different bullet"
]
<> DocDefList [ ("cat", "kitten")
, ("pineapple", "fruit")
]
context "when parsing function documentation headers" $ do
it "can parse a simple header" $ do
"= Header 1\nHello." `shouldParseTo`
(DocHeader (Header 1 "Header 1"))
<> DocParagraph "Hello."
it "allow consecutive headers" $ do
"= Header 1\n== Header 2" `shouldParseTo`
DocHeader (Header 1 "Header 1")
<> DocHeader (Header 2 "Header 2")
it "accepts markup in the header" $ do
"= /Header/ __1__\nFoo" `shouldParseTo`
DocHeader (Header 1 (DocEmphasis "Header" <> " " <> DocBold "1"))
<> DocParagraph "Foo"
| adamse/haddock | haddock-library/test/Documentation/Haddock/ParserSpec.hs | bsd-2-clause | 33,574 | 0 | 27 | 11,627 | 6,528 | 3,190 | 3,338 | 730 | 2 |
module ParseCertAttrs(parseCertAttr,CertAttrs) where
import CertAttrs
import OneLineAttrs
import Monad(mplus)
import MUtils
parseCertAttr :: String -> Maybe (Name,CertAttr)
parseCertAttr s = certAttrs =<< parseOneLineAttrs s
certAttrs attrs = (,) # attr "name" <# certAttrsFields
where
certAttrsFields = Attr # atype <# label <# return adefault
atype = atypeP =<< attr "type"
label = attr "label" `mplus` attr "name"
adefault = attr "default"
atypeP s =
case break (=='/') s of
("string","/file") -> return File
("string",_) -> return String
("nat","") -> return Nat
("bool","") -> return Bool
_ -> fail $ "Unknown attribute type: "++s
-- Error message is lost when using the Maybe monad.
attr a = lookup a attrs
| forste/haReFork | tools/property/pfe/ParseCertAttrs.hs | bsd-3-clause | 761 | 1 | 11 | 157 | 252 | 134 | 118 | 20 | 5 |
{-# LANGUAGE OverloadedStrings #-}
import Sound.Tidal.Context
import Sound.Tidal.Vis
import Data.Colour
render :: [Pattern ColourD] -> IO ()
render xs = mapM_ (\(n, p) -> vPDF (show n ++ ".pdf") (300,100) p) $ zip [0..] xs
main = do render [a,b,c,d,e,f,g]
return ()
a = density 16 $ every 2 rev $ every 3 (superimpose (iter 4)) $ rev "[black blue darkblue, grey lightblue]"
b = flip darken <$> "[black blue orange, red green]*16" <*> sinewave1
c = density 10 $ flip darken
<$> "[black blue, grey ~ navy, cornflowerblue blue]*2"
<*> (slow 5 $ (*) <$> sinewave1 <*> (slow 2 triwave1))
d = every 2 rev $ density 10 $ (blend'
<$> "blue navy"
<*> "orange [red, orange, purple]"
<*> (slow 6 $ sinewave1)
)
where blend' a b c = blend c a b
e = density 32 $ (flip over
<$> ("[grey olive, black ~ brown, darkgrey]")
<*> (withOpacity
<$> "[beige, lightblue white darkgreen, beige]"
<*> ((*) <$> (slow 8 $ slow 4 sinewave1) <*> (slow 3 $ sinewave1)))
)
f = density 2 $ (flip darken
<$> (density 8 $ "[black blue, grey ~ navy, cornflowerblue blue]*2")
<*> sinewave1
)
g = density 2 $
do let x = "[skyblue olive, grey ~ navy, cornflowerblue green]"
coloura <- density 8 x
colourb <- density 4 x
slide <- slow 2 sinewave1
return $ blend slide coloura colourb
| lennart/Tidal | vis/examples/example.hs | gpl-3.0 | 1,588 | 0 | 15 | 573 | 503 | 254 | 249 | 32 | 1 |
{-# LANGUAGE CPP, RecordWildCards, MultiParamTypeClasses #-}
-- |
-- Package configuration information: essentially the interface to Cabal, with
-- some utilities
--
-- (c) The University of Glasgow, 2004
--
module PackageConfig (
-- $package_naming
-- * UnitId
packageConfigId,
-- * The PackageConfig type: information about a package
PackageConfig,
InstalledPackageInfo(..),
ComponentId(..),
SourcePackageId(..),
PackageName(..),
Version(..),
defaultPackageConfig,
sourcePackageIdString,
packageNameString,
pprPackageConfig,
) where
#include "HsVersions.h"
import GHC.PackageDb
import Data.Version
import FastString
import Outputable
import Module
import Unique
-- -----------------------------------------------------------------------------
-- Our PackageConfig type is the InstalledPackageInfo from ghc-boot,
-- which is similar to a subset of the InstalledPackageInfo type from Cabal.
type PackageConfig = InstalledPackageInfo
SourcePackageId
PackageName
Module.UnitId
Module.ModuleName
Module.Module
-- TODO: there's no need for these to be FastString, as we don't need the uniq
-- feature, but ghc doesn't currently have convenient support for any
-- other compact string types, e.g. plain ByteString or Text.
newtype ComponentId = ComponentId FastString deriving (Eq, Ord)
newtype SourcePackageId = SourcePackageId FastString deriving (Eq, Ord)
newtype PackageName = PackageName FastString deriving (Eq, Ord)
instance BinaryStringRep ComponentId where
fromStringRep = ComponentId . mkFastStringByteString
toStringRep (ComponentId s) = fastStringToByteString s
instance BinaryStringRep SourcePackageId where
fromStringRep = SourcePackageId . mkFastStringByteString
toStringRep (SourcePackageId s) = fastStringToByteString s
instance BinaryStringRep PackageName where
fromStringRep = PackageName . mkFastStringByteString
toStringRep (PackageName s) = fastStringToByteString s
instance Uniquable ComponentId where
getUnique (ComponentId n) = getUnique n
instance Uniquable SourcePackageId where
getUnique (SourcePackageId n) = getUnique n
instance Uniquable PackageName where
getUnique (PackageName n) = getUnique n
instance Outputable ComponentId where
ppr (ComponentId str) = ftext str
instance Outputable SourcePackageId where
ppr (SourcePackageId str) = ftext str
instance Outputable PackageName where
ppr (PackageName str) = ftext str
defaultPackageConfig :: PackageConfig
defaultPackageConfig = emptyInstalledPackageInfo
sourcePackageIdString :: PackageConfig -> String
sourcePackageIdString pkg = unpackFS str
where
SourcePackageId str = sourcePackageId pkg
packageNameString :: PackageConfig -> String
packageNameString pkg = unpackFS str
where
PackageName str = packageName pkg
pprPackageConfig :: PackageConfig -> SDoc
pprPackageConfig InstalledPackageInfo {..} =
vcat [
field "name" (ppr packageName),
field "version" (text (showVersion packageVersion)),
field "id" (ppr unitId),
field "exposed" (ppr exposed),
field "exposed-modules" (ppr exposedModules),
field "hidden-modules" (fsep (map ppr hiddenModules)),
field "trusted" (ppr trusted),
field "import-dirs" (fsep (map text importDirs)),
field "library-dirs" (fsep (map text libraryDirs)),
field "hs-libraries" (fsep (map text hsLibraries)),
field "extra-libraries" (fsep (map text extraLibraries)),
field "extra-ghci-libraries" (fsep (map text extraGHCiLibraries)),
field "include-dirs" (fsep (map text includeDirs)),
field "includes" (fsep (map text includes)),
field "depends" (fsep (map ppr depends)),
field "cc-options" (fsep (map text ccOptions)),
field "ld-options" (fsep (map text ldOptions)),
field "framework-dirs" (fsep (map text frameworkDirs)),
field "frameworks" (fsep (map text frameworks)),
field "haddock-interfaces" (fsep (map text haddockInterfaces)),
field "haddock-html" (fsep (map text haddockHTMLs))
]
where
field name body = text name <> colon <+> nest 4 body
-- -----------------------------------------------------------------------------
-- UnitId (package names, versions and dep hash)
-- $package_naming
-- #package_naming#
-- Mostly the compiler deals in terms of 'UnitId's, which are md5 hashes
-- of a package ID, keys of its dependencies, and Cabal flags. You're expected
-- to pass in the unit id in the @-this-unit-id@ flag. However, for
-- wired-in packages like @base@ & @rts@, we don't necessarily know what the
-- version is, so these are handled specially; see #wired_in_packages#.
-- | Get the GHC 'UnitId' right out of a Cabalish 'PackageConfig'
packageConfigId :: PackageConfig -> UnitId
packageConfigId = unitId
| tjakway/ghcjvm | compiler/main/PackageConfig.hs | bsd-3-clause | 5,188 | 0 | 11 | 1,208 | 1,007 | 533 | 474 | 84 | 1 |
{-# LANGUAGE OverloadedStrings, RecordWildCards, TemplateHaskell, TupleSections #-}
-- | Extensions to Aeson parsing of objects.
module Data.Aeson.Extended (
module Export
-- * Extended failure messages
, (.:)
, (.:?)
-- * JSON Parser that emits warnings
, WarningParser
, JSONWarning (..)
, withObjectWarnings
, jsonSubWarnings
, jsonSubWarningsT
, jsonSubWarningsTT
, logJSONWarnings
, tellJSONField
, unWarningParser
, (..:)
, (..:?)
, (..!=)
) where
import Control.Monad.Logger (MonadLogger, logWarn)
import Control.Monad.Trans (lift)
import Control.Monad.Trans.Writer.Strict (WriterT, mapWriterT, runWriterT, tell)
import Data.Aeson as Export hiding ((.:), (.:?))
import qualified Data.Aeson as A
import Data.Aeson.Types hiding ((.:), (.:?))
import qualified Data.HashMap.Strict as HashMap
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (unpack, Text)
import qualified Data.Text as T
import Data.Traversable
import qualified Data.Traversable as Traversable
import Prelude -- Fix redundant import warnings
-- | Extends @.:@ warning to include field name.
(.:) :: FromJSON a => Object -> Text -> Parser a
(.:) o p = modifyFailure (("failed to parse field '" <> unpack p <> "': ") <>) (o A..: p)
{-# INLINE (.:) #-}
-- | Extends @.:?@ warning to include field name.
(.:?) :: FromJSON a => Object -> Text -> Parser (Maybe a)
(.:?) o p = modifyFailure (("failed to parse field '" <> unpack p <> "': ") <>) (o A..:? p)
{-# INLINE (.:?) #-}
-- | 'WarningParser' version of @.:@.
(..:)
:: FromJSON a
=> Object -> Text -> WarningParser a
o ..: k = tellJSONField k >> lift (o .: k)
-- | 'WarningParser' version of @.:?@.
(..:?)
:: FromJSON a
=> Object -> Text -> WarningParser (Maybe a)
o ..:? k = tellJSONField k >> lift (o .:? k)
-- | 'WarningParser' version of @.!=@.
(..!=) :: WarningParser (Maybe a) -> a -> WarningParser a
wp ..!= d =
flip mapWriterT wp $
\p ->
do a <- fmap snd p
fmap (, a) (fmap fst p .!= d)
-- | Tell warning parser about an expected field, so it doesn't warn about it.
tellJSONField :: Text -> WarningParser ()
tellJSONField key = tell (mempty { wpmExpectedFields = Set.singleton key})
-- | 'WarningParser' version of 'withObject'.
withObjectWarnings :: String
-> (Object -> WarningParser a)
-> Value
-> Parser (a, [JSONWarning])
withObjectWarnings expected f =
withObject expected $
\obj ->
do (a,w) <- runWriterT (f obj)
let unrecognizedFields =
Set.toList
(Set.difference
(Set.fromList (HashMap.keys obj))
(wpmExpectedFields w))
return
( a
, wpmWarnings w ++
case unrecognizedFields of
[] -> []
_ -> [JSONUnrecognizedFields expected unrecognizedFields])
-- | Convert a 'WarningParser' to a 'Parser'.
unWarningParser :: WarningParser a -> Parser a
unWarningParser wp = do
(a,_) <- runWriterT wp
return a
-- | Log JSON warnings.
logJSONWarnings
:: MonadLogger m
=> FilePath -> [JSONWarning] -> m ()
logJSONWarnings fp =
mapM_ (\w -> $logWarn ("Warning: " <> T.pack fp <> ": " <> T.pack (show w)))
-- | Handle warnings in a sub-object.
jsonSubWarnings :: WarningParser (a, [JSONWarning]) -> WarningParser a
jsonSubWarnings f = do
(result,warnings) <- f
tell
(mempty
{ wpmWarnings = warnings
})
return result
-- | Handle warnings in a @Traversable@ of sub-objects.
jsonSubWarningsT
:: Traversable t
=> WarningParser (t (a, [JSONWarning])) -> WarningParser (t a)
jsonSubWarningsT f =
Traversable.mapM (jsonSubWarnings . return) =<< f
-- | Handle warnings in a @Maybe Traversable@ of sub-objects.
jsonSubWarningsTT
:: (Traversable t, Traversable u)
=> WarningParser (u (t (a, [JSONWarning])))
-> WarningParser (u (t a))
jsonSubWarningsTT f =
Traversable.mapM (jsonSubWarningsT . return) =<< f
-- | JSON parser that warns about unexpected fields in objects.
type WarningParser a = WriterT WarningParserMonoid Parser a
-- | Monoid used by 'WarningParser' to track expected fields and warnings.
data WarningParserMonoid = WarningParserMonoid
{ wpmExpectedFields :: !(Set Text)
, wpmWarnings :: [JSONWarning]
}
instance Monoid WarningParserMonoid where
mempty = WarningParserMonoid Set.empty []
mappend a b =
WarningParserMonoid
{ wpmExpectedFields = Set.union
(wpmExpectedFields a)
(wpmExpectedFields b)
, wpmWarnings = wpmWarnings a ++ wpmWarnings b
}
-- | Warning output from 'WarningParser'.
data JSONWarning = JSONUnrecognizedFields String [Text]
instance Show JSONWarning where
show (JSONUnrecognizedFields obj [field]) =
"Unrecognized field in " <> obj <> ": " <> T.unpack field
show (JSONUnrecognizedFields obj fields) =
"Unrecognized fields in " <> obj <> ": " <> T.unpack (T.intercalate ", " fields)
| mathhun/stack | src/Data/Aeson/Extended.hs | bsd-3-clause | 5,153 | 0 | 19 | 1,282 | 1,385 | 759 | 626 | 120 | 2 |
{-# LANGUAGE TypeFamilies #-}
-- This should fail, I think, because of the loopy equality,
-- but the error message is hopeless
module Simple13 where
type family F a
same :: a -> a -> a
same = undefined
mkf :: a -> [F a]
mkf p = undefined
foo :: a ~ [F a] => a -> a
foo p = same p (mkf p)
| spacekitteh/smcghc | testsuite/tests/indexed-types/should_compile/Simple13.hs | bsd-3-clause | 296 | 0 | 9 | 73 | 97 | 53 | 44 | 9 | 1 |
{-# LANGUAGE BangPatterns, FlexibleInstances, TypeFamilies,
TypeSynonymInstances, GADTs #-}
{-# OPTIONS_GHC -fno-warn-orphans -fno-warn-warnings-deprecations #-}
-- |
-- Module : Data.Attoparsec.ByteString.Char8
-- Copyright : Bryan O'Sullivan 2007-2014
-- License : BSD3
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : unknown
--
-- Simple, efficient, character-oriented combinator parsing for
-- 'B.ByteString' strings, loosely based on the Parsec library.
module Data.Attoparsec.ByteString.Char8
(
-- * Character encodings
-- $encodings
-- * Parser types
Parser
, A.Result
, A.IResult(..)
, I.compareResults
-- * Running parsers
, A.parse
, A.feed
, A.parseOnly
, A.parseWith
, A.parseTest
-- ** Result conversion
, A.maybeResult
, A.eitherResult
-- * Parsing individual characters
, char
, char8
, anyChar
, notChar
, satisfy
-- ** Lookahead
, peekChar
, peekChar'
-- ** Special character parsers
, digit
, letter_iso8859_15
, letter_ascii
, space
-- ** Fast predicates
, isDigit
, isDigit_w8
, isAlpha_iso8859_15
, isAlpha_ascii
, isSpace
, isSpace_w8
-- *** Character classes
, inClass
, notInClass
-- * Efficient string handling
, I.string
, stringCI
, skipSpace
, skipWhile
, I.take
, scan
, takeWhile
, takeWhile1
, takeTill
-- ** String combinators
-- $specalt
, (.*>)
, (<*.)
-- ** Consume all remaining input
, I.takeByteString
, I.takeLazyByteString
-- * Text parsing
, I.endOfLine
, isEndOfLine
, isHorizontalSpace
-- * Numeric parsers
, decimal
, hexadecimal
, signed
, Number(..)
-- * Combinators
, try
, (<?>)
, choice
, count
, option
, many'
, many1
, many1'
, manyTill
, manyTill'
, sepBy
, sepBy'
, sepBy1
, sepBy1'
, skipMany
, skipMany1
, eitherP
, I.match
-- * State observation and manipulation functions
, I.endOfInput
, I.atEnd
) where
import Control.Applicative ((*>), (<*), (<$>), (<|>))
import Data.Attoparsec.ByteString.FastSet (charClass, memberChar)
import Data.Attoparsec.ByteString.Internal (Parser)
import Data.Attoparsec.Combinator
import Data.Attoparsec.Number (Number(..))
import Data.Bits (Bits, (.|.), shiftL)
import Data.ByteString.Internal (c2w, w2c)
import Data.Int (Int8, Int16, Int32, Int64)
import Data.String (IsString(..))
import Data.Word
import Prelude hiding (takeWhile)
import qualified Data.Attoparsec.ByteString as A
import qualified Data.Attoparsec.ByteString.Internal as I
import qualified Data.Attoparsec.Internal as I
import qualified Data.ByteString as B8
import qualified Data.ByteString.Char8 as B
instance (a ~ B.ByteString) => IsString (Parser a) where
fromString = I.string . B.pack
-- $encodings
--
-- This module is intended for parsing text that is
-- represented using an 8-bit character set, e.g. ASCII or
-- ISO-8859-15. It /does not/ make any attempt to deal with character
-- encodings, multibyte characters, or wide characters. In
-- particular, all attempts to use characters above code point U+00FF
-- will give wrong answers.
--
-- Code points below U+0100 are simply translated to and from their
-- numeric values, so e.g. the code point U+00A4 becomes the byte
-- @0xA4@ (which is the Euro symbol in ISO-8859-15, but the generic
-- currency sign in ISO-8859-1). Haskell 'Char' values above U+00FF
-- are truncated, so e.g. U+1D6B7 is truncated to the byte @0xB7@.
-- ASCII-specific but fast, oh yes.
toLower :: Word8 -> Word8
toLower w | w >= 65 && w <= 90 = w + 32
| otherwise = w
-- | Satisfy a literal string, ignoring case.
stringCI :: B.ByteString -> Parser B.ByteString
stringCI = I.stringTransform (B8.map toLower)
{-# INLINE stringCI #-}
-- | Consume input as long as the predicate returns 'True', and return
-- the consumed input.
--
-- This parser requires the predicate to succeed on at least one byte
-- of input: it will fail if the predicate never returns 'True' or if
-- there is no input left.
takeWhile1 :: (Char -> Bool) -> Parser B.ByteString
takeWhile1 p = I.takeWhile1 (p . w2c)
{-# INLINE takeWhile1 #-}
-- | The parser @satisfy p@ succeeds for any byte for which the
-- predicate @p@ returns 'True'. Returns the byte that is actually
-- parsed.
--
-- >digit = satisfy isDigit
-- > where isDigit c = c >= '0' && c <= '9'
satisfy :: (Char -> Bool) -> Parser Char
satisfy = I.satisfyWith w2c
{-# INLINE satisfy #-}
-- | Match a letter, in the ISO-8859-15 encoding.
letter_iso8859_15 :: Parser Char
letter_iso8859_15 = satisfy isAlpha_iso8859_15 <?> "letter_iso8859_15"
{-# INLINE letter_iso8859_15 #-}
-- | Match a letter, in the ASCII encoding.
letter_ascii :: Parser Char
letter_ascii = satisfy isAlpha_ascii <?> "letter_ascii"
{-# INLINE letter_ascii #-}
-- | A fast alphabetic predicate for the ISO-8859-15 encoding
--
-- /Note/: For all character encodings other than ISO-8859-15, and
-- almost all Unicode code points above U+00A3, this predicate gives
-- /wrong answers/.
isAlpha_iso8859_15 :: Char -> Bool
isAlpha_iso8859_15 c = (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') ||
(c >= '\166' && moby c)
where moby = notInClass "\167\169\171-\179\182\183\185\187\191\215\247"
{-# NOINLINE moby #-}
{-# INLINE isAlpha_iso8859_15 #-}
-- | A fast alphabetic predicate for the ASCII encoding
--
-- /Note/: For all character encodings other than ASCII, and
-- almost all Unicode code points above U+007F, this predicate gives
-- /wrong answers/.
isAlpha_ascii :: Char -> Bool
isAlpha_ascii c = (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')
{-# INLINE isAlpha_ascii #-}
-- | Parse a single digit.
digit :: Parser Char
digit = satisfy isDigit <?> "digit"
{-# INLINE digit #-}
-- | A fast digit predicate.
isDigit :: Char -> Bool
isDigit c = c >= '0' && c <= '9'
{-# INLINE isDigit #-}
-- | A fast digit predicate.
isDigit_w8 :: Word8 -> Bool
isDigit_w8 w = w >= 48 && w <= 57
{-# INLINE isDigit_w8 #-}
-- | Match any character.
anyChar :: Parser Char
anyChar = satisfy $ const True
{-# INLINE anyChar #-}
-- | Match any character, to perform lookahead. Returns 'Nothing' if
-- end of input has been reached. Does not consume any input.
--
-- /Note/: Because this parser does not fail, do not use it with
-- combinators such as 'many', because such parsers loop until a
-- failure occurs. Careless use will thus result in an infinite loop.
peekChar :: Parser (Maybe Char)
peekChar = (fmap w2c) `fmap` I.peekWord8
{-# INLINE peekChar #-}
-- | Match any character, to perform lookahead. Does not consume any
-- input, but will fail if end of input has been reached.
peekChar' :: Parser Char
peekChar' = w2c `fmap` I.peekWord8'
{-# INLINE peekChar' #-}
-- | Fast predicate for matching ASCII space characters.
--
-- /Note/: This predicate only gives correct answers for the ASCII
-- encoding. For instance, it does not recognise U+00A0 (non-breaking
-- space) as a space character, even though it is a valid ISO-8859-15
-- byte. For a Unicode-aware and only slightly slower predicate,
-- use 'Data.Char.isSpace'
isSpace :: Char -> Bool
isSpace c = (c == ' ') || ('\t' <= c && c <= '\r')
{-# INLINE isSpace #-}
-- | Fast 'Word8' predicate for matching ASCII space characters.
isSpace_w8 :: Word8 -> Bool
isSpace_w8 w = (w == 32) || (9 <= w && w <= 13)
{-# INLINE isSpace_w8 #-}
-- | Parse a space character.
--
-- /Note/: This parser only gives correct answers for the ASCII
-- encoding. For instance, it does not recognise U+00A0 (non-breaking
-- space) as a space character, even though it is a valid ISO-8859-15
-- byte.
space :: Parser Char
space = satisfy isSpace <?> "space"
{-# INLINE space #-}
-- | Match a specific character.
char :: Char -> Parser Char
char c = satisfy (== c) <?> [c]
{-# INLINE char #-}
-- | Match a specific character, but return its 'Word8' value.
char8 :: Char -> Parser Word8
char8 c = I.satisfy (== c2w c) <?> [c]
{-# INLINE char8 #-}
-- | Match any character except the given one.
notChar :: Char -> Parser Char
notChar c = satisfy (/= c) <?> "not " ++ [c]
{-# INLINE notChar #-}
-- | Match any character in a set.
--
-- >vowel = inClass "aeiou"
--
-- Range notation is supported.
--
-- >halfAlphabet = inClass "a-nA-N"
--
-- To add a literal \'-\' to a set, place it at the beginning or end
-- of the string.
inClass :: String -> Char -> Bool
inClass s = (`memberChar` mySet)
where mySet = charClass s
{-# INLINE inClass #-}
-- | Match any character not in a set.
notInClass :: String -> Char -> Bool
notInClass s = not . inClass s
{-# INLINE notInClass #-}
-- | Consume input as long as the predicate returns 'True', and return
-- the consumed input.
--
-- This parser does not fail. It will return an empty string if the
-- predicate returns 'False' on the first byte of input.
--
-- /Note/: Because this parser does not fail, do not use it with
-- combinators such as 'many', because such parsers loop until a
-- failure occurs. Careless use will thus result in an infinite loop.
takeWhile :: (Char -> Bool) -> Parser B.ByteString
takeWhile p = I.takeWhile (p . w2c)
{-# INLINE takeWhile #-}
-- | A stateful scanner. The predicate consumes and transforms a
-- state argument, and each transformed state is passed to successive
-- invocations of the predicate on each byte of the input until one
-- returns 'Nothing' or the input ends.
--
-- This parser does not fail. It will return an empty string if the
-- predicate returns 'Nothing' on the first byte of input.
--
-- /Note/: Because this parser does not fail, do not use it with
-- combinators such as 'many', because such parsers loop until a
-- failure occurs. Careless use will thus result in an infinite loop.
scan :: s -> (s -> Char -> Maybe s) -> Parser B.ByteString
scan s0 p = I.scan s0 (\s -> p s . w2c)
{-# INLINE scan #-}
-- | Consume input as long as the predicate returns 'False'
-- (i.e. until it returns 'True'), and return the consumed input.
--
-- This parser does not fail. It will return an empty string if the
-- predicate returns 'True' on the first byte of input.
--
-- /Note/: Because this parser does not fail, do not use it with
-- combinators such as 'many', because such parsers loop until a
-- failure occurs. Careless use will thus result in an infinite loop.
takeTill :: (Char -> Bool) -> Parser B.ByteString
takeTill p = I.takeTill (p . w2c)
{-# INLINE takeTill #-}
-- | Skip past input for as long as the predicate returns 'True'.
skipWhile :: (Char -> Bool) -> Parser ()
skipWhile p = I.skipWhile (p . w2c)
{-# INLINE skipWhile #-}
-- | Skip over white space.
skipSpace :: Parser ()
skipSpace = I.skipWhile isSpace_w8
{-# INLINE skipSpace #-}
-- $specalt
--
-- If you enable the @OverloadedStrings@ language extension, you can
-- use the '*>' and '<*' combinators to simplify the common task of
-- matching a statically known string, then immediately parsing
-- something else.
--
-- Instead of writing something like this:
--
-- @
--'I.string' \"foo\" '*>' wibble
-- @
--
-- Using @OverloadedStrings@, you can omit the explicit use of
-- 'I.string', and write a more compact version:
--
-- @
-- \"foo\" '*>' wibble
-- @
--
-- (Note: the '.*>' and '<*.' combinators that were originally
-- provided for this purpose are obsolete and unnecessary, and will be
-- removed in the next major version.)
-- | /Obsolete/. A type-specialized version of '*>' for
-- 'B.ByteString'. Use '*>' instead.
(.*>) :: B.ByteString -> Parser a -> Parser a
s .*> f = I.string s *> f
{-# DEPRECATED (.*>) "This is no longer necessary, and will be removed. Use '*>' instead." #-}
-- | /Obsolete/. A type-specialized version of '<*' for
-- 'B.ByteString'. Use '<*' instead.
(<*.) :: Parser a -> B.ByteString -> Parser a
f <*. s = f <* I.string s
{-# DEPRECATED (<*.) "This is no longer necessary, and will be removed. Use '<*' instead." #-}
-- | A predicate that matches either a carriage return @\'\\r\'@ or
-- newline @\'\\n\'@ character.
isEndOfLine :: Word8 -> Bool
isEndOfLine w = w == 13 || w == 10
{-# INLINE isEndOfLine #-}
-- | A predicate that matches either a space @\' \'@ or horizontal tab
-- @\'\\t\'@ character.
isHorizontalSpace :: Word8 -> Bool
isHorizontalSpace w = w == 32 || w == 9
{-# INLINE isHorizontalSpace #-}
-- | Parse and decode an unsigned hexadecimal number. The hex digits
-- @\'a\'@ through @\'f\'@ may be upper or lower case.
--
-- This parser does not accept a leading @\"0x\"@ string.
hexadecimal :: (Integral a, Bits a) => Parser a
hexadecimal = B8.foldl' step 0 `fmap` I.takeWhile1 isHexDigit
where
isHexDigit w = (w >= 48 && w <= 57) ||
(w >= 97 && w <= 102) ||
(w >= 65 && w <= 70)
step a w | w >= 48 && w <= 57 = (a `shiftL` 4) .|. fromIntegral (w - 48)
| w >= 97 = (a `shiftL` 4) .|. fromIntegral (w - 87)
| otherwise = (a `shiftL` 4) .|. fromIntegral (w - 55)
{-# SPECIALISE hexadecimal :: Parser Int #-}
{-# SPECIALISE hexadecimal :: Parser Int8 #-}
{-# SPECIALISE hexadecimal :: Parser Int16 #-}
{-# SPECIALISE hexadecimal :: Parser Int32 #-}
{-# SPECIALISE hexadecimal :: Parser Int64 #-}
{-# SPECIALISE hexadecimal :: Parser Integer #-}
{-# SPECIALISE hexadecimal :: Parser Word #-}
{-# SPECIALISE hexadecimal :: Parser Word8 #-}
{-# SPECIALISE hexadecimal :: Parser Word16 #-}
{-# SPECIALISE hexadecimal :: Parser Word32 #-}
{-# SPECIALISE hexadecimal :: Parser Word64 #-}
-- | Parse and decode an unsigned decimal number.
decimal :: Integral a => Parser a
decimal = B8.foldl' step 0 `fmap` I.takeWhile1 isDig
where isDig w = w >= 48 && w <= 57
step a w = a * 10 + fromIntegral (w - 48)
{-# SPECIALISE decimal :: Parser Int #-}
{-# SPECIALISE decimal :: Parser Int8 #-}
{-# SPECIALISE decimal :: Parser Int16 #-}
{-# SPECIALISE decimal :: Parser Int32 #-}
{-# SPECIALISE decimal :: Parser Int64 #-}
{-# SPECIALISE decimal :: Parser Integer #-}
{-# SPECIALISE decimal :: Parser Word #-}
{-# SPECIALISE decimal :: Parser Word8 #-}
{-# SPECIALISE decimal :: Parser Word16 #-}
{-# SPECIALISE decimal :: Parser Word32 #-}
{-# SPECIALISE decimal :: Parser Word64 #-}
-- | Parse a number with an optional leading @\'+\'@ or @\'-\'@ sign
-- character.
signed :: Num a => Parser a -> Parser a
{-# SPECIALISE signed :: Parser Int -> Parser Int #-}
{-# SPECIALISE signed :: Parser Int8 -> Parser Int8 #-}
{-# SPECIALISE signed :: Parser Int16 -> Parser Int16 #-}
{-# SPECIALISE signed :: Parser Int32 -> Parser Int32 #-}
{-# SPECIALISE signed :: Parser Int64 -> Parser Int64 #-}
{-# SPECIALISE signed :: Parser Integer -> Parser Integer #-}
signed p = (negate <$> (char8 '-' *> p))
<|> (char8 '+' *> p)
<|> p
| DavidAlphaFox/ghc | utils/haddock/haddock-library/vendor/attoparsec-0.12.1.1/Data/Attoparsec/ByteString/Char8.hs | bsd-3-clause | 14,921 | 0 | 12 | 3,122 | 2,236 | 1,340 | 896 | 234 | 1 |
-- Mark II lazy wheel-sieve.
-- Colin Runciman ([email protected]); March 1996.
-- See article "Lazy wheel sieves and spirals of primes" (to appear, JFP).
import System.Environment
primes :: [Int]
primes = spiral wheels primes squares
spiral (Wheel s ms ns:ws) ps qs =
foldr turn0 (roll s) ns
where
roll o = foldr (turn o) (foldr (turn o) (roll (o+s)) ns) ms
turn0 n rs =
if n<q then n:rs else sp
turn o n rs =
let n' = o+n in
if n'==2 || n'<q then n':rs else dropWhile (<n') sp
sp = spiral ws (tail ps) (tail qs)
q = head qs
squares :: [Int]
squares = [p*p | p <- primes]
data Wheel = Wheel Int [Int] [Int]
wheels :: [Wheel]
wheels = Wheel 1 [1] [] :
zipWith3 nextSize wheels primes squares
nextSize (Wheel s ms ns) p q =
Wheel (s*p) ms' ns'
where
(xs, ns') = span (<=q) (foldr turn0 (roll (p-1) s) ns)
ms' = foldr turn0 xs ms
roll 0 _ = []
roll t o = foldr (turn o) (foldr (turn o) (roll (t-1) (o+s)) ns) ms
turn0 n rs =
if n`mod`p>0 then n:rs else rs
turn o n rs =
let n' = o+n in
if n'`mod`p>0 then n':rs else rs
main = do
[arg] <- getArgs
print (primes!!((read arg) :: Int))
| beni55/ghcjs | test/nofib/imaginary/wheel-sieve2/Main.hs | mit | 1,159 | 0 | 13 | 300 | 613 | 322 | 291 | 33 | 4 |
module T2182ghci_C(T(..)) where
import T2182ghci_B
| urbanslug/ghc | testsuite/tests/ghci/scripts/T2182ghci_C.hs | bsd-3-clause | 51 | 0 | 5 | 5 | 16 | 11 | 5 | 2 | 0 |
module Main where
main :: IO ()
main =
do ws <- loadInput
print (sum (part1 <$> ws))
print (sum (part2 <$> ws))
loadInput :: IO [String]
loadInput = lines <$> readFile "input8.txt"
part1 :: String -> Int
part1 str = 2 + sum (aux (init (tail str)))
where
aux ('\\':'"' :xs) = 1 : aux xs
aux ('\\':'\\' :xs) = 1 : aux xs
aux ('\\':'x':_:_:xs) = 3 : aux xs
aux (_ :xs) = aux xs
aux [] = []
part2 :: String -> Int
part2 str = 2 + count isExpand str
where
isExpand x = x `elem` "\\\""
count :: (a -> Bool) -> [a] -> Int
count p xs = length (filter p xs)
| glguy/advent2015 | Day8.hs | isc | 612 | 0 | 12 | 182 | 321 | 163 | 158 | 20 | 5 |
{- |
Module : Haskdown
Copyrigt : Copyright (C) 2014-2014 Reeze Xia
License : MIT License
-}
module Haskdown
(
--
module Haskdown.Parser
module Haskdown.Generator
) where
import Haskdown.Parser
import Haskdown.Generator | reeze/haskdown | src/Haskdown.hs | mit | 306 | 1 | 5 | 117 | 25 | 17 | 8 | -1 | -1 |
module Sandbox.Recursion (length') where
length' :: (Num b) => [a] -> b
length' [] = 0
length' (_:xs) = 1 + length' xs | olkinn/my-haskell-sandbox | src/Sandbox/Recursion.hs | mit | 120 | 0 | 7 | 24 | 64 | 35 | 29 | 4 | 1 |
module J2S.AI
( module AI
) where
import J2S.AI.MaxN as AI
import J2S.AI.MinMax as AI
import J2S.AI.Random as AI
import J2S.AI.Types as AI
| berewt/J2S | src/J2S/AI.hs | mit | 187 | 0 | 4 | 71 | 44 | 32 | 12 | 6 | 0 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
import Test.QuickCheck
import Test.Framework.TH
import Test.Framework.Providers.QuickCheck2
import qualified Data.Vector.Storable as V
import qualified Data.List as L
import Control.Applicative
import Control.Arrow
import qualified Data.KDTree as KD
import qualified Data.KDTree.Internal.Common as KD
import qualified Data.KDTree.Internal.LinSearch as LS
import Linear
--------------------------------------------------
-- Arbitrary instances
instance Arbitrary a => Arbitrary (V3 a) where
arbitrary = V3 <$> arbitrary <*> arbitrary <*> arbitrary
instance (V.Storable a, Arbitrary a) => Arbitrary (V.Vector a) where
arbitrary = V.fromList . getNonEmpty <$> arbitrary
shrink v = [V.init v, V.tail v]
instance Arbitrary KD.BucketSize where
arbitrary = KD.BucketSize . getPositive <$> arbitrary
instance Arbitrary (KD.Dim KD.V3D) where
arbitrary = elements [KD.V3X, KD.V3Y, KD.V3Z]
--------------------------------------------------
-- tree building properties
prop_verify :: (KD.BucketSize, KD.VV3D) -> Bool
prop_verify (b,vs) = KD.verify . KD.kdtree b $ vs
--------------------------------------------------
-- nearest neighbor properties
prop_nns_leaf :: (V3 Double,KD.VV3D) -> Bool
prop_nns_leaf (p,vs) = treeSearch == linSearch
where treeSearch = KD.nearestNeighbors p . KD.kdtree 1000000 $ vs
linSearch = LS.nearestNeighbors p vs
prop_nns_node :: (V3 Double,KD.VV3D) -> Bool
prop_nns_node (p,vs) = treeSearch == linSearch
where treeSearch = KD.nearestNeighbors p . KD.kdtree 1 $ vs
linSearch = LS.nearestNeighbors p vs
--------------------------------------------------
prop_nns :: (KD.BucketSize,V3 Double,KD.VV3D) -> Bool
prop_nns (b,p,vs) = treeSearch == linSearch
where treeSearch = KD.nearestNeighbors p . KD.kdtree b $ vs
linSearch = LS.nearestNeighbors p vs
prop_nn :: (KD.BucketSize,V3 Double,KD.VV3D) -> Bool
prop_nn (b,p,vs) = treeSearch == linSearch
where treeSearch = KD.nearestNeighbor p . KD.kdtree b $ vs
linSearch = LS.nearestNeighbor p vs
prop_nr :: (KD.BucketSize,V3 Double,Double,KD.VV3D) -> Bool
prop_nr (b,p,r,vs) = treeSearch == linSearch
where treeSearch = KD.pointsAround r p . KD.kdtree b $ vs
linSearch = LS.pointsAround r p vs
--------------------------------------------------
prop_merge :: (KD.BucketSize, KD.VV3D, KD.VV3D) -> Bool
prop_merge (bs, vs0, vs1) = KD.verify $ KD.merge bs (KD.kdtree bs vs0) (KD.kdtree bs vs1)
--------------------------------------------------
prop_partition :: (KD.Dim KD.V3D, Ordering, KD.BucketSize, KD.V3D, KD.VV3D) -> Bool
prop_partition (dim,ord,bs,q,vs) = L.null (tsA L.\\ lsA) && L.null (tsB L.\\ lsB)
where (tsA, tsB) = both KD.toList
. KD.partition dim ord q
$ KD.kdtree bs vs
(lsA, lsB) = both V.toList . LS.partition dim ord q $ vs
both f = f *** f
prop_partition_leaf :: (KD.Dim KD.V3D, Ordering, KD.V3D, KD.VV3D) -> Bool
prop_partition_leaf (dim,ord,q,vs) = prop_partition (dim,ord,1000000,q,vs)
prop_partition_node :: (KD.Dim KD.V3D, Ordering, KD.V3D, KD.VV3D) -> Bool
prop_partition_node (dim,ord,q,vs) = prop_partition (dim,ord,1,q,vs)
--------------------------------------------------
prop_select :: (KD.Dim KD.V3D, Ordering, KD.BucketSize, KD.V3D, KD.VV3D) -> Bool
prop_select (dim,ord,bs,q,vs) = L.null $ treeSearch L.\\ linSearch
where treeSearch = KD.toList
. KD.select dim ord q
$ KD.kdtree bs vs
linSearch = V.toList . LS.select dim ord q $ vs
prop_delete :: (KD.Dim KD.V3D, Ordering, KD.BucketSize, KD.V3D, KD.VV3D) -> Bool
prop_delete (dim,ord,bs,q,vs) = L.null $ treeSearch L.\\ linSearch
where treeSearch = KD.toList
. KD.delete dim ord q
$ KD.kdtree bs vs
linSearch = V.toList . LS.delete dim ord q $ vs
prop_update :: (KD.Dim KD.V3D, Ordering, KD.BucketSize, KD.V3D, KD.VV3D) -> Bool
prop_update (dim,ord,bs,q,vs) = L.null $ treeSearch L.\\ linSearch
where treeSearch = KD.toList
. KD.update bs dim ord q updateFun
$ KD.kdtree bs vs
linSearch = V.toList . LS.update dim ord q updateFun $ vs
updateFun = (+1)
--------------------------------------------------
--------------------------------------------------
-- main function / test generator
main :: IO ()
main = $defaultMainGenerator
| fhaust/kdtree | test/MainTest.hs | mit | 4,523 | 0 | 10 | 869 | 1,541 | 846 | 695 | 79 | 1 |
module Main where
import System.Environment (getArgs)
import System.IO (readFile)
import Data.Char (chr, ord)
import Control.Applicative (many, (<$), (<|>), (<*>), (<*), (*>), (<$>) )
import Control.Monad (foldM, void)
import Text.ParserCombinators.ReadP hiding (many)
data BrainFuck = PtrInc | PtrDec | ValInc | ValDec | Put | Get | Loop [BrainFuck]
deriving (Show)
main :: IO ()
main = do
args <- getArgs
case args of
[] -> return ()
[path] -> do
src <- readFile path
run $ parse $ filter (`elem` "+-><.,[]") src
putChar '\n'
parse :: String -> [BrainFuck]
parse = fst . last . readP_to_S (many bf) where
bf = ValInc <$ char '+'
<|> ValDec <$ char '-'
<|> PtrInc <$ char '>'
<|> PtrDec <$ char '<'
<|> Put <$ char '.'
<|> Get <$ char ','
<|> char '[' *> (Loop <$> many bf) <* char ']'
<|> pfail
type Tape = ([Int], Int, [Int])
next :: Tape -> Tape
next (x:xs, y, ys) = (xs, x, y:ys)
prev :: Tape -> Tape
prev (xs, x, y:ys) = (x:xs, y, ys)
adjust :: (Int -> Int) -> Tape -> Tape
adjust f (xs, x, ys) = let y = f x in seq y (xs, y, ys)
run :: [BrainFuck] -> IO ()
run = void . foldM run' (repeat 0, 0, repeat 0) where
run' :: Tape -> BrainFuck -> IO Tape
run' tape@(_, x, _) inst = case inst of
ValInc -> return $ adjust succ tape
ValDec -> return $ adjust pred tape
PtrInc -> return $ next tape
PtrDec -> return $ prev tape
Put -> tape <$ putChar (chr $ (`mod` 128) $ abs x)
Get -> getChar >>= \ c -> return $ adjust (const $ ord c) tape
loop@(Loop insts)
| x == 0 -> return tape
| otherwise -> foldM run' tape insts >>= flip run' loop
| KeizoBookman/bf-haskell | Main.hs | mit | 1,740 | 0 | 22 | 512 | 803 | 433 | 370 | 48 | 7 |
-- Problems/Problem012Spec.hs
module Problems.Problem012Spec (main, spec) where
import Test.Hspec
import Problems.Problem012
main :: IO()
main = hspec spec
spec :: Spec
spec = describe "Problem 12" $
it "Should evaluate to 76576500" $
p12 `shouldBe` 76576500
| Sgoettschkes/learning | haskell/ProjectEuler/tests/Problems/Problem012Spec.hs | mit | 274 | 0 | 8 | 51 | 73 | 41 | 32 | 9 | 1 |
#!/usr/bin/env stack
{- stack --system-ghc --resolver lts-18.18 script -}
module Main where
import System.Environment
import System.Exit
import System.IO
import Control.Arrow ((&&&))
import Data.Char
import Data.List
import Data.Function (on)
import Text.Printf
main :: IO ()
main = do
d <- getContents
let xs = filter (\x -> snd x > 0) $ map wordFilter $ wordCount d
display $ sndSort xs
display :: [(String, Int)] -> IO ()
display [] = return ()
display (x:xs) = do
printf "%-8d %s\n" (snd x) (fst x)
display xs
fstSort :: Ord a => [(a, b)] -> [(a, b)]
fstSort = sortBy (flip compare `on` fst)
lengthF :: (String, Int) -> (String, Int)
lengthF (a, b) = if b > 1 then (a, b) else (a, 0)
sndSort :: Ord b => [(a, b)] -> [(a, b)]
sndSort = sortBy (flip compare `on` snd)
wordCount :: String -> [(String, Int)]
wordCount = map (head &&& length) . group . sort . words . map toLower
wordF :: (String, Int) -> (String, Int)
wordF (a, b) = case a of
"to" -> (a, 0)
"and" -> (a, 0)
"for" -> (a, 0)
"be" -> (a, 0)
"are" -> (a, 0)
"in" -> (a, 0)
"the" -> (a, 0)
"of" -> (a, 0)
"with" -> (a, 0)
"will" -> (a, 0)
"where" -> (a, 0)
"which" -> (a, 0)
"week" -> (a, 0)
"was" -> (a, 0)
_ -> (a, b)
wordFilter :: (String, Int) -> (String, Int)
wordFilter a = lengthF $ wordL $ wordF a
wordL :: (String, Int) -> (String, Int)
wordL (a, b) = if length a > 1 then (a, b) else (a, 0)
| joelelmercarlson/stack | haskell/wordcount.hs | mit | 1,703 | 0 | 16 | 611 | 752 | 424 | 328 | 48 | 15 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module Hpack.UtilSpec (main, spec) where
import Data.Aeson
import Data.Aeson.QQ
import Data.Aeson.Types
import Helper
import System.Directory
import Hpack.Config
import Hpack.Util
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "sort" $ do
it "sorts lexicographically" $ do
sort ["foo", "Foo"] `shouldBe` ["Foo", "foo" :: String]
describe "parseMain" $ do
it "accepts source file" $ do
parseMain "Main.hs" `shouldBe` ("Main.hs", [])
it "accepts literate source file" $ do
parseMain "Main.lhs" `shouldBe` ("Main.lhs", [])
it "accepts module" $ do
parseMain "Foo" `shouldBe` ("Foo.hs", ["-main-is Foo"])
it "accepts hierarchical module" $ do
parseMain "Foo.Bar.Baz" `shouldBe` ("Foo/Bar/Baz.hs", ["-main-is Foo.Bar.Baz"])
it "accepts qualified identifier" $ do
parseMain "Foo.bar" `shouldBe` ("Foo.hs", ["-main-is Foo.bar"])
describe "toModule" $ do
it "maps .hs paths to module names" $ do
toModule ["Foo", "Bar", "Baz.hs"] `shouldBe` Just "Foo.Bar.Baz"
it "maps .lhs paths to module names" $ do
toModule ["Foo", "Bar", "Baz.lhs"] `shouldBe` Just "Foo.Bar.Baz"
it "maps .hsc paths to module names" $ do
toModule ["Foo", "Bar", "Baz.hsc"] `shouldBe` Just "Foo.Bar.Baz"
it "rejects invalid module names" $ do
toModule ["resources", "hello.hs"] `shouldBe` Nothing
describe "getFilesRecursive" $ do
it "gets all files from given directory and all its subdirectories" $ do
inTempDirectoryNamed "test" $ do
touch "foo/bar"
touch "foo/baz"
touch "foo/foobar/baz"
actual <- getFilesRecursive "foo"
actual `shouldMatchList` [
["bar"]
, ["baz"]
, ["foobar", "baz"]
]
describe "List" $ do
let invalid = [aesonQQ|{
name: "hpack",
gi: "sol/hpack",
ref: "master"
}|]
parseError :: String -> Either String (List Dependency)
parseError prefix = Left (prefix ++ ": neither key \"git\" nor key \"github\" present")
context "when parsing single values" $ do
it "returns the value in a singleton list" $ do
fromJSON (toJSON $ Number 23) `shouldBe` Success (List [23 :: Int])
it "returns error messages from element parsing" $ do
parseEither parseJSON invalid `shouldBe` parseError "Error in $"
context "when parsing a list of values" $ do
it "returns the list" $ do
fromJSON (toJSON [Number 23, Number 42]) `shouldBe` Success (List [23, 42 :: Int])
it "propagates parse error messages of invalid elements" $ do
parseEither parseJSON (toJSON [String "foo", invalid]) `shouldBe` parseError "Error in $[1]"
describe "tryReadFile" $ do
it "reads file" $ do
inTempDirectory $ do
writeFile "foo" "bar"
tryReadFile "foo" `shouldReturn` Just "bar"
it "returns Nothing if file does not exist" $ do
inTempDirectory $ do
tryReadFile "foo" `shouldReturn` Nothing
describe "expandGlobs" $ around withTempDirectory $ do
it "accepts simple files" $ \dir -> do
touch (dir </> "foo.js")
expandGlobs dir ["foo.js"] `shouldReturn` ([], ["foo.js"])
it "removes duplicates" $ \dir -> do
touch (dir </> "foo.js")
expandGlobs dir ["foo.js", "*.js"] `shouldReturn` ([], ["foo.js"])
it "rejects directories" $ \dir -> do
touch (dir </> "foo")
createDirectory (dir </> "bar")
expandGlobs dir ["*"] `shouldReturn` ([], ["foo"])
it "rejects character ranges" $ \dir -> do
touch (dir </> "foo1")
touch (dir </> "foo2")
touch (dir </> "foo[1,2]")
expandGlobs dir ["foo[1,2]"] `shouldReturn` ([], ["foo[1,2]"])
context "when expanding *" $ do
it "expands by extension" $ \dir -> do
let files = [
"files/foo.js"
, "files/bar.js"
, "files/baz.js"]
mapM_ (touch . (dir </>)) files
touch (dir </> "files/foo.hs")
expandGlobs dir ["files/*.js"] `shouldReturn` ([], sort files)
it "rejects dot-files" $ \dir -> do
touch (dir </> "foo/bar")
touch (dir </> "foo/.baz")
expandGlobs dir ["foo/*"] `shouldReturn` ([], ["foo/bar"])
it "accepts dot-files when explicitly asked to" $ \dir -> do
touch (dir </> "foo/bar")
touch (dir </> "foo/.baz")
expandGlobs dir ["foo/.*"] `shouldReturn` ([], ["foo/.baz"])
it "matches at most one directory component" $ \dir -> do
touch (dir </> "foo/bar/baz.js")
touch (dir </> "foo/bar.js")
expandGlobs dir ["*/*.js"] `shouldReturn` ([], ["foo/bar.js"])
context "when expanding **" $ do
it "matches arbitrary many directory components" $ \dir -> do
let file = "foo/bar/baz.js"
touch (dir </> file)
expandGlobs dir ["**/*.js"] `shouldReturn` ([], [file])
context "when a pattern does not match anything" $ do
it "warns" $ \dir -> do
expandGlobs dir ["foo"] `shouldReturn`
(["Specified pattern \"foo\" for extra-source-files does not match any files"], [])
context "when a pattern only matches a directory" $ do
it "warns" $ \dir -> do
createDirectory (dir </> "foo")
expandGlobs dir ["foo"] `shouldReturn`
(["Specified pattern \"foo\" for extra-source-files does not match any files"], [])
| yamadapc/hpack-convert | test/Hpack/UtilSpec.hs | mit | 5,575 | 0 | 21 | 1,524 | 1,638 | 818 | 820 | 121 | 1 |
module Main where
import qualified AoC201601 as AoC01
import qualified AoC201602 as AoC02
import qualified AoC201603 as AoC03
import qualified AoC201604 as AoC04
import qualified AoC201605 as AoC05
import qualified AoC201606 as AoC06
import qualified AoC201607 as AoC07
import qualified AoC201608 as AoC08
import qualified AoC201609 as AoC09
main :: IO ()
main = do
AoC01.runDay
AoC02.runDay
AoC03.runDay
AoC04.runDay
AoC05.runDay
AoC06.runDay
AoC07.runDay
AoC08.runDay
AoC09.runDay
| rickerbh/AoC | AoC2016/app/Main.hs | mit | 503 | 0 | 7 | 84 | 118 | 74 | 44 | 21 | 1 |
module Main where
import qualified Prelude as P
import Lib
import Derivation
import System.IO
import Data.List as L
import System.Random.Shuffle
import Control.Monad as M
import Data.Time
haskCoq :: [a] -> List a
haskCoq [] = Nil
haskCoq (h : hs) = Cons h (haskCoq hs)
coqNat :: P.Int -> Nat
coqNat n
| n P.== 0 = O
| P.otherwise = S (coqNat (n P.- 1))
candEq :: Cand -> Cand -> Sumbool
candEq c d
| c P.== d = Left
| P.otherwise = Right
{- Wikipedia example -}
charCand :: P.Char -> Cand
charCand 'A' = A
charCand 'B' = B
charCand 'C' = C
charCand 'D' = D
--charCand 'E' = E
balfun :: [(Cand, Nat)] -> Ballot
balfun ((A, b1) : (B, b2) : (C, b3) : (D, b4) {-: (E, b5) -}: _) = f where
f :: Cand -> Nat
f A = b1
f B = b2
f C = b3
f D = b4
--f E = b5
createCand :: IO ()
createCand = do
let t = (P.replicate 4 "ABCD") P.++ (P.replicate 3 "BCAD") P.++ (P.replicate 2 "CABD") P.++ (P.replicate 1 "ACBD")
v <- shuffleM t
writeFile ("votes_customized.txt") (P.unlines v)
main :: IO ()
main = do
{- call this function to create list of ballots -}
--createCand
r <- readFile "votes_customized.txt"
let votes = final_count candEq P.. haskCoq P.. P.map balfun P..
P.map (P.map (\(y, z) -> (charCand y, coqNat z)))
P.. P.map L.sort P.. P.map (\x -> P.zip x [1..]) P.. P.lines P.$ r
P.print votes
| mukeshtiwari/formalized-voting | paper-code/schulze-voting/app/Main.hs | mit | 1,372 | 0 | 21 | 344 | 638 | 334 | 304 | 44 | 4 |
{-# LANGUAGE OverloadedStrings #-}
module Cryptogram where
import Prelude
import qualified Prelude as P
import Data.HashMap.Lazy
import qualified Data.HashSet as HS
import Data.Word (Word8)
import Data.ByteString hiding (empty)
import qualified Data.ByteString as BS
canonicalize :: ByteString -> [Int]
canonicalize s = canonicalize_ s empty 0
canonicalize_ :: ByteString -> HashMap Word8 Int -> Int -> [Int]
canonicalize_ "" _ _ = []
canonicalize_ s m next = let c = BS.head s in
if c `member` m
then (m ! c) : (canonicalize_ (BS.tail s) m next)
else next : (canonicalize_ (BS.tail s) (insert c next m) (next+1))
-- Lower case letters are arbitrary while all others are treated as fixed
data Symbol = Option Int | Fixed Word8 deriving (Eq, Show, Ord)
isUpperLetter :: Word8 -> Bool
isUpperLetter x = x >= 65 && x <= 90
isLowerLetter :: Word8 -> Bool
isLowerLetter x = x >= 97 && x <= 122
toLower :: Word8 -> Word8
toLower x = if isUpperLetter x then x + 32 else x
getPattern :: ByteString -> [Symbol]
getPattern s = getPattern_ s empty 0
getPattern_ :: ByteString -> HashMap Word8 Int -> Int -> [Symbol]
getPattern_ "" _ _ = []
getPattern_ s m next = let c = BS.head s in
if isLowerLetter c
then if c `member` m
then (Option (m ! c)) : (getPattern_ (BS.tail s) m next)
else (Option next) : (getPattern_ (BS.tail s) (insert c next m) (next+1))
else (Fixed $ toLower c) : getPattern_ (BS.tail s) m next
fixedSet :: [Symbol] -> HS.HashSet Word8
fixedSet [] = HS.empty
fixedSet ((Option _):ss) = fixedSet ss
fixedSet ((Fixed c):ss) = HS.insert c (fixedSet ss)
matchesPattern :: [Symbol] -> ByteString -> Bool
matchesPattern pattern = matchesPattern_ pattern (fixedSet pattern) empty 0
matchesPattern' :: [Symbol] -> HS.HashSet Word8 -> ByteString -> Bool
matchesPattern' pattern fixed = matchesPattern_ pattern fixed empty 0
matchesPattern_ :: [Symbol] -> HS.HashSet Word8 -> HashMap Word8 Int -> Int -> ByteString -> Bool
matchesPattern_ [] _ _ _ str = BS.null str
matchesPattern_ ((Option ind):ss) fixed m next str = not (BS.null str) && let c = BS.head str in
if c `member` m
then (m ! c) == ind && not (c `HS.member` fixed) && matchesPattern_ ss fixed m next (BS.tail str)
else next == ind && not (c `HS.member` fixed) && matchesPattern_ ss fixed (insert c next m) (next+1) (BS.tail str)
matchesPattern_ ((Fixed s):ss) fixed m next str = not (BS.null str) && BS.head str == s && matchesPattern_ ss fixed m next (BS.tail str)
| bhamrick/puzzlib | Cryptogram.hs | mit | 2,495 | 0 | 14 | 490 | 1,076 | 562 | 514 | 49 | 3 |
{-# LANGUAGE NoImplicitPrelude, TypeSynonymInstances, FlexibleInstances, TemplateHaskellQuotes, OverloadedStrings, PostfixOperators #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures -fno-warn-type-defaults -fno-warn-orphans #-} -- Rankable
{-# OPTIONS_GHC -O0 -fno-cse -fno-full-laziness #-} -- preserve "lexical" sharing for observed sharing
module Commands.Plugins.Spiros.Number where
import Commands.Plugins.Spiros.Extra
import Commands.Mixins.DNS13OSX9
import GHC.Stack
import Digit
import Prelude.Spiros()
instance Rankable Digit -- omg this stupid typeclass must die
instance Rankable Decimal
type Decimal = [Digit]
fromDecimal :: (Integral a) => Decimal -> a
fromDecimal = fmap fromDigit > fromDigits 10
fromDigit :: (Num a) => Digit -> a
fromDigit (Digit i) = fromIntegral i
digitizedNumber :: R Decimal
digitizedNumber = 'digitizedNumber <=> (digit__ -++)-- TODO remove
digits__ :: R Decimal
digits__ = 'digits__ <=> (digit__ -++)
digit__ :: R Digit-- TODO renamed the one in Correct.Grammar to avoid conflict with this
digit__ = vocab
[ "zero" -: Digit 0 -- disyllabic
, "oh" -: Digit 0 -- sometimes more fluent
-- , "nil" -: Digit 0 -- monosyllabic
, "one" -: Digit 1
, "two" -: Digit 2
, "three" -: Digit 3
, "four" -: Digit 4
, "five" -: Digit 5
, "six" -: Digit 6
-- , "sev" -: Digit 7 -- monosyllabic
, "seven" -: Digit 7 -- disyllabic
, "eight" -: Digit 8
, "nine" -: Digit 9
]
--------------------------------------------------------------------------------
englishNumericRHS :: DNSEarleyRHS Char
englishNumericRHS = vocab
[ "zero"-: '0'
, "one"-: '1'
, "two"-: '2'
, "three"-: '3'
, "four"-: '4'
, "five"-: '5'
, "six"-: '6'
, "seven"-: '7'
, "eight"-: '8'
, "nine"-: '9'
]
-- | @('read' <$> digits :: R_ 'Int')@ is total.
digits :: R String
digits = 'digits <=> (digit-++) -- TODO
digit :: (HasCallStack) => R Char
digit = 'digit <=> (head . show) <$> digitRHS -- TODO errorWithCallStack
digitRHS :: (Num a) => R a
digitRHS = vocab
[ "nil" -: 0 -- monosyllabic
, "zero" -: 0 -- disyllabic
, "one" -: 1
, "two" -: 2
, "three" -: 3
, "four" -: 4
, "five" -: 5
, "six" -: 6
, "sev" -: 7 -- monosyllabic
, "seven" -: 7 -- disyllabic
, "eight" -: 8
, "nine" -: 9
]
--------------------------------------------------------------------------------
number :: R Number
number = 'number <=> numberRHS
numberRHS :: (Num a) => R a
numberRHS = digitRHS <|> vocab
[ "ten"-: 10
, "eleven"-: 11
, "twelve"-: 12
, "thirteen"-: 13
, "fourteen"-: 14
, "fifteen"-: 15
, "sixteen"-: 16
, "seventeen"-: 17
, "eighteen"-: 18
, "nineteen"-: 19
, "twenty"-: 20
, "twenty-one"-: 21
, "twenty-two"-: 22
, "twenty-three"-: 23
, "twenty-four"-: 24
, "twenty-five"-: 25
, "twenty-six"-: 26
, "twenty-seven"-: 27
, "twenty-eight"-: 28
, "twenty-nine"-: 29
, "thirty"-: 30
, "thirty-one"-: 31
, "thirty-two"-: 32
, "thirty-three"-: 33
, "thirty-four"-: 34
, "thirty-five"-: 35
, "thirty-six"-: 36
, "thirty-seven"-: 37
, "thirty-eight"-: 38
, "thirty-nine"-: 39
, "forty"-: 40
, "forty-one"-: 41
, "forty-two"-: 42
, "forty-three"-: 43
, "forty-four"-: 44
, "forty-five"-: 45
, "forty-six"-: 46
, "forty-seven"-: 47
, "forty-eight"-: 48
, "forty-nine"-: 49
, "fifty"-: 50
, "fifty-one"-: 51
, "fifty-two"-: 52
, "fifty-three"-: 53
, "fifty-four"-: 54
, "fifty-five"-: 55
, "fifty-six"-: 56
, "fifty-seven"-: 57
, "fifty-eight"-: 58
, "fifty-nine"-: 59
, "sixty"-: 60
, "sixty-one"-: 61
, "sixty-two"-: 62
, "sixty-three"-: 63
, "sixty-four"-: 64
, "sixty-five"-: 65
, "sixty-six"-: 66
, "sixty-seven"-: 67
, "sixty-eight"-: 68
, "sixty-nine"-: 69
, "seventy"-: 70
, "seventy-one"-: 71
, "seventy-two"-: 72
, "seventy-three"-: 73
, "seventy-four"-: 74
, "seventy-five"-: 75
, "seventy-six"-: 76
, "seventy-seven"-: 77
, "seventy-eight"-: 78
, "seventy-nine"-: 79
, "eighty"-: 80
, "eighty-one"-: 81
, "eighty-two"-: 82
, "eighty-three"-: 83
, "eighty-four"-: 84
, "eighty-five"-: 85
, "eighty-six"-: 86
, "eighty-seven"-: 87
, "eighty-eight"-: 88
, "eighty-nine"-: 89
, "ninety"-: 90
, "ninety-one"-: 91
, "ninety-two"-: 92
, "ninety-three"-: 93
, "ninety-four"-: 94
, "ninety-five"-: 95
, "ninety-six"-: 96
, "ninety-seven"-: 97
, "ninety-eight"-: 98
, "ninety-nine"-: 99
, "one-hundred"-: 100
]
| sboosali/commands | commands-spiros/config/Commands/Plugins/Spiros/Number.hs | mit | 4,526 | 0 | 8 | 994 | 1,257 | 714 | 543 | -1 | -1 |
module Tak.Editor.Cursor where
import qualified Tak.Buffer.Line as L
import Tak.Util
import Tak.Types
import Tak.Buffer
insertPos :: SimpleEditor -> Pos
insertPos ed = posWithinBuffer (buffer ed) (cursorPos ed)
screenPos :: SimpleEditor -> Pos
screenPos se = let iPos = insertPos se
Pos y x = iPos
screenLine = (line iPos) - (lineScroll se)
isOnScreen = screenLine >= 0
width = L.lineWidth (L.take x (lineAt y $ buffer se))
in if isOnScreen
then iPos { line = screenLine, row = width }
else Pos 0 0
fixScroll = fixScrollN 0
fixScrollCentered ed = fixScrollN (viewHeight ed `div` 2) ed
fixScrollN offset ed =
let cp = cursorPos ed
l = line cp
ls = lineScroll ed
h = viewHeight ed
isBefore = l < ls
isAfter = l >= ls + h
newLineScroll
| isBefore = l
| isAfter = l - h + 1 + offset
| otherwise = ls
in ed { lineScroll = newLineScroll }
cursorDown ed =
let cp = cursorPos ed
nextLinePos = min (lastLineIdx $ buffer ed) (line cp + 1)
in fixScroll $ ed { cursorPos = cp { line = nextLinePos }}
cursorUp ed =
let cp = cursorPos ed
nextLinePos = max 0 (line cp - 1)
in fixScroll $ ed { cursorPos = cp { line = nextLinePos }}
cursorLeft ed =
let cp = insertPos ed
l = line cp
r = row cp
lenOfLineBefore = L.length $ lineAt (l - 1) $ buffer ed
in if r > 0
then ed { cursorPos = cp { row = r - 1 }}
else if l > 0
then fixScroll $ ed { cursorPos = Pos (l - 1) lenOfLineBefore }
else ed
cursorRight ed =
let cp = insertPos ed
l = line cp
r = row cp
lenCurLine = L.length $ lineAt l $ buffer ed
in if r < lenCurLine
then ed { cursorPos = cp { row = r + 1 } }
else if l < (lastLineIdx $ buffer ed)
then fixScroll $ ed { cursorPos = Pos (l + 1) 0 }
else ed
cursorEndOfLine ed =
let cp = cursorPos ed
nextRowPos = (L.length $ lineAt (line cp) (buffer ed))
in ed { cursorPos = cp { row = nextRowPos }}
cursorBeginningOfLine ed = ed { cursorPos = (cursorPos ed) { row = 0 } }
cursorPageDown ed =
let cp = cursorPos ed
l = line cp
pageLen = (viewHeight ed) - 3
lastBufLineIdx = lastLineIdx $ buffer ed
in fixScroll $ ed { cursorPos = cp { line = min (l + pageLen) lastBufLineIdx } }
cursorPageUp ed =
let cp = cursorPos ed
l = line cp
pageLen = (viewHeight ed) - 3
in fixScroll $ ed { cursorPos = cp { line = max 0 (l - pageLen) } }
updateCursor :: (Buffer -> Pos -> Pos) -> SimpleEditor -> SimpleEditor
updateCursor f ed = fixScroll $ ed { cursorPos = f (buffer ed) (insertPos ed) }
cursorNextPara = updateCursor posNextPara
cursorPrevPara = updateCursor posPrevPara
cursorNextWord = updateCursor posNextWord
cursorPrevWord = updateCursor posPrevWord
cursorFirstPos = updateCursor posFirstPos
cursorLastPos = updateCursor posLastPos
| sixohsix/tak | src/Tak/Editor/Cursor.hs | mit | 3,003 | 0 | 14 | 910 | 1,130 | 590 | 540 | 82 | 3 |
module Main where
import QLib
main :: IO ()
main = quantumRun
| byronwasti/Quantum-Computer-Simulation | haskell/app/Main.hs | mit | 65 | 0 | 6 | 15 | 22 | 13 | 9 | 4 | 1 |
module Test.Misc where
import Test.Hspec
import Control.Exception
import Text.Parsec.String
import Text.Parsec.Prim
import Language.Java.Parser
shouldParse :: (Show a, Eq a) => Parser a -> [(String, a)] -> Expectation
shouldParse parser =
mapM_ (\(source, expected) ->
case parse parser "" source of
Left _ -> expectationFailure ("Test case \"" ++
source ++ "\" failed")
Right result -> result `shouldBe` expected)
shouldFailOn :: (Show a, Eq a) => Parser a -> [String] -> Expectation
shouldFailOn parser =
mapM_ (\source ->
case parse parser "" source of
Left _ -> True `shouldBe` True
Right _ -> expectationFailure ("Test case \"" ++ source ++ "\" failed"))
to :: a -> b -> (a, b)
to = (,)
shouldParseJ p =
mapM_ (\(source, expected) ->
parseJava p source `shouldBe` expected)
shouldFailOnJ p =
mapM_ (\source ->
evaluate (parseJava p source)
`shouldThrow` anyErrorCall)
| evansb/jasper | test/Test/Misc.hs | mit | 1,088 | 0 | 14 | 351 | 349 | 189 | 160 | 28 | 2 |
{-# LANGUAGE ForeignFunctionInterface #-}
module System.Environment.FindBin
( __Bin__
, getProgPath
) where
import Foreign (Ptr, alloca, peek, peekElemOff)
import Foreign.C (CInt, CString, peekCString)
import System.Directory (canonicalizePath, findExecutable)
import System.FilePath (takeDirectory, takeBaseName)
import System.IO.Unsafe (unsafePerformIO)
{-# NOINLINE __Bin__ #-}
-- | Unsafe (/constant/) version of 'getProgPath'.
__Bin__ :: String
__Bin__ = let path = unsafePerformIO getProgPath
in length path `seq` path
-- | Get the full directory to the running program.
getProgPath :: IO String
getProgPath = alloca $ \p_argc -> alloca $ \p_argv -> do
getProgArgv p_argc p_argv
argv <- peek p_argv
findBin =<< peekCString =<< peekElemOff argv 0
where
directoryOf x = do
x' <- canonicalizePath x
let path = takeDirectory x'
return (length path `seq` path)
findBin s = case takeDirectory s of
"" -> do
-- This should work for ghci as well, as long as nobody name
-- their executable file "<interactive>"...
rv <- findExecutable s
case rv of
Just fullName -> directoryOf fullName
_ -> alloca $ \p_argc' -> alloca $ \p_argv' -> do
-- Here we are in the "runghc"/"runhaskell" land. Fun!
getFullProgArgv p_argc' p_argv'
argc' <- peek p_argc'
argv' <- peek p_argv'
prog <- peekCString =<< peekElemOff argv' 0
s' <- case (takeBaseName prog) of
"runghc" -> peekCString =<< peekElemOff argv' (fromEnum argc'-1)
"runhaskell" -> peekCString =<< peekElemOff argv' (fromEnum argc'-1)
_ -> return prog
canon <- canonicalizePath s
canon' <- canonicalizePath s'
if canon == canon'
then findBin canon
else findBin s'
_ -> directoryOf s
foreign import ccall unsafe "getFullProgArgv"
getFullProgArgv :: Ptr CInt -> Ptr (Ptr CString) -> IO ()
foreign import ccall unsafe "getProgArgv"
getProgArgv :: Ptr CInt -> Ptr (Ptr CString) -> IO ()
| soenkehahn/findbin | src/System/Environment/FindBin.hs | mit | 2,331 | 0 | 29 | 782 | 562 | 281 | 281 | 46 | 6 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGZoomEvent
(js_getZoomRectScreen, getZoomRectScreen, js_getPreviousScale,
getPreviousScale, js_getPreviousTranslate, getPreviousTranslate,
js_getNewScale, getNewScale, js_getNewTranslate, getNewTranslate,
SVGZoomEvent, castToSVGZoomEvent, gTypeSVGZoomEvent)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"zoomRectScreen\"]"
js_getZoomRectScreen :: SVGZoomEvent -> IO (Nullable SVGRect)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGZoomEvent.zoomRectScreen Mozilla SVGZoomEvent.zoomRectScreen documentation>
getZoomRectScreen ::
(MonadIO m) => SVGZoomEvent -> m (Maybe SVGRect)
getZoomRectScreen self
= liftIO (nullableToMaybe <$> (js_getZoomRectScreen (self)))
foreign import javascript unsafe "$1[\"previousScale\"]"
js_getPreviousScale :: SVGZoomEvent -> IO Float
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGZoomEvent.previousScale Mozilla SVGZoomEvent.previousScale documentation>
getPreviousScale :: (MonadIO m) => SVGZoomEvent -> m Float
getPreviousScale self = liftIO (js_getPreviousScale (self))
foreign import javascript unsafe "$1[\"previousTranslate\"]"
js_getPreviousTranslate :: SVGZoomEvent -> IO (Nullable SVGPoint)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGZoomEvent.previousTranslate Mozilla SVGZoomEvent.previousTranslate documentation>
getPreviousTranslate ::
(MonadIO m) => SVGZoomEvent -> m (Maybe SVGPoint)
getPreviousTranslate self
= liftIO (nullableToMaybe <$> (js_getPreviousTranslate (self)))
foreign import javascript unsafe "$1[\"newScale\"]" js_getNewScale
:: SVGZoomEvent -> IO Float
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGZoomEvent.newScale Mozilla SVGZoomEvent.newScale documentation>
getNewScale :: (MonadIO m) => SVGZoomEvent -> m Float
getNewScale self = liftIO (js_getNewScale (self))
foreign import javascript unsafe "$1[\"newTranslate\"]"
js_getNewTranslate :: SVGZoomEvent -> IO (Nullable SVGPoint)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGZoomEvent.newTranslate Mozilla SVGZoomEvent.newTranslate documentation>
getNewTranslate ::
(MonadIO m) => SVGZoomEvent -> m (Maybe SVGPoint)
getNewTranslate self
= liftIO (nullableToMaybe <$> (js_getNewTranslate (self))) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/SVGZoomEvent.hs | mit | 3,191 | 30 | 10 | 420 | 689 | 404 | 285 | 46 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Day9 (day9, day9', run) where
import Data.Attoparsec.Text
( Parser
, char
, decimal
, endOfLine
, isHorizontalSpace
, many'
, string
, parseOnly
, takeTill
)
import Data.Function (on)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map (fromList)
import Data.Monoid (Sum(..))
import Data.Ord (comparing)
import Data.Text (Text, pack, unpack)
import Graph
-- Parsing types and type synonyms for the problem at hand
type City = Text
type Distance = Sum Int
data Source = Source
{ start :: City
, end :: City
, distance :: Distance
} deriving Show
type SourceData = EdgeWeightMap City Distance
compileMap :: [Source] -> SourceData
compileMap = Map.fromList . map toPair
where
toPair (Source s e d) = (Edge (Node s) (Node e), d)
parseSourceData :: Parser SourceData
parseSourceData = do
sources <- many' parseSource
return $ compileMap sources
parseSource :: Parser Source
parseSource = do
a <- takeTill isHorizontalSpace
string " to "
b <- takeTill isHorizontalSpace
string " = "
dist <- decimal
endOfLine
return $ Source a b (Sum dist)
-- Begin actual implementation
day9 :: String -> Int
day9 = day9Impl (compare `on` negate)
day9' :: String -> Int
day9' = day9Impl compare
day9Impl :: (Sum Int -> Sum Int -> Ordering) -> String -> Int
day9Impl comp input = case parseOnly parseSourceData . pack $ input of
(Left _) -> -1
(Right sourceData) -> case bestTraversal sourceData comp of
Nothing -> -2
(Just t) -> getSum . traversalWeight $ t
-- Input
run :: IO ()
run = do
putStrLn "Day 9 results: "
input <- readFile "inputs/day9.txt"
putStrLn $ " " ++ show (day9 input)
putStrLn $ " " ++ show (day9' input)
| brianshourd/adventOfCode2015 | src/Day9.hs | mit | 1,826 | 0 | 12 | 444 | 597 | 318 | 279 | 59 | 3 |
{-# LANGUAGE QuasiQuotes #-}
module Main (main) where
import Control.Applicative ((<$>))
import Data.String.Interpolate (i)
import System.Cmd (system)
import System.Exit (ExitCode(..))
import Test.Assert (runAssertions)
import Paths_assertions (getDataFileName)
is0 :: ExitCode -> Bool
is0 = (== ExitSuccess)
is1 :: ExitCode -> Bool
is1 = (== ExitFailure 1)
quietly :: String -> IO ExitCode
quietly = system . (++"&>/dev/null")
main :: IO ()
main = getDataFileName "test/fixtures" >>= \fixtures -> do
greenTest <- is0 <$> quietly [i|runhaskell #{fixtures}/Green.hs|]
redTest <- is1 <$> quietly [i|runhaskell #{fixtures}/Red.hs|]
mixedTest <- is1 <$> quietly [i|runhaskell #{fixtures}/Mixed.hs|]
runAssertions $
[ ("When all tests in a suite pass, it should exit with 0.", greenTest)
, ("When some but not all of the tests in a suite pass, it should exit with 1.", mixedTest)
, ("When all of the tests in a suite fail, it should exit with 1.", redTest)
]
| fmap/assertions | test/Assert.hs | mit | 986 | 0 | 11 | 173 | 254 | 150 | 104 | 23 | 1 |
module Format.Format where
--import qualified Brick.Main as Brick
import Control.Monad (when, zipWithM)
import Data.List.Split
import qualified Data.List as List
import Text.Printf
type Title = String
type Summary = String
type ArxivId = String
data Entry = Entry Title Summary ArxivId deriving Show
-- ANSI color escape code
colorCode :: String
colorCode = "\x1b["
-- A format string for printf for printing text in color
-- Format is: color code| color number | bold code | color code terminator | string
-- ... | %d | %s | m | %s
simpleColorFormatString :: String
simpleColorFormatString = colorCode ++ "%d%sm%s" ++ colorCode ++ "0m"
-- List of supported color names
colorNames :: [String]
colorNames = ["black", "red", "green", "yellow", "blue", "magenta", "cyan", "white"]
-- |Format a string with a (possibly bolded) color
formatWithColor :: String -> String -> Bool -> String
formatWithColor string color bold =
case List.elemIndex color colorNames of
Just i -> printf simpleColorFormatString (i + 30) boldCode string
where boldCode = if bold then ";1"
else ""
Nothing -> string
entryMarker :: String
entryMarker = formatWithColor "▶ " "green" True
formatTest :: Entry -> IO ()
formatTest (Entry title summary id) = do
let idString = formatWithColor id "yellow" True
putStrLn (entryMarker ++ "\"" ++ title ++ "\" (arxiv id: " ++ idString ++ ")")
mapM_ (putStrLn . (\l -> " " ++ l)) (splitOn "\n" summary)
printTest :: IO ()
printTest = do
mapM_ (putStrLn . (\n -> formatWithColor "LolLeR" n False)) colorNames
mapM_ (putStrLn . (\n -> formatWithColor "LolLeR" n True)) colorNames
testEntries :: [Entry]
testEntries = [Entry "Title1" "This oirgoi\nbrgjlrg\nlsnrg" "075329",
Entry "Title2" "oubrsg\nbkrg\nubdglinf\noir" "93203"]
-- |The main loop for browsing arXiv digests
main :: IO ()
main = do renderEntry (Entry "Title1" "This oirgoi\nbrgjlrg\nlsnrg" "075329") True
renderEntry (Entry "Title1" "This oirgoi\nbrgjlrg\nlsnrg" "075329") False
{-main = sequence (renderEntries 0 testEntries)-}
{-main = runCurses $ do-}
{- setEcho False-}
{- window <- defaultWindow-}
{- userLoop window-}
{-userLoop :: Window -> IO ()-}
{-userLoop window = do-}
{- eventHandler-}
{- updateWindow window-}
{- renderEntries testEntries-}
{- render-}
{- userLoop-}
{-handleEvents :: Window -> IO ()-}
{-handleEvents window = do-}
{- event <- getEvent window Nothing-}
{- case event of-}
{- Just event' -> print event'-}
{- Nothing -> return ()-}
renderEntry :: Entry -> Bool -> IO ()
renderEntry (Entry title summary id) expand = do
let idString = formatWithColor id "yellow" True
putStrLn (entryMarker ++ "\"" ++ title ++ "\" (arxiv id: " ++ idString ++ ")")
when expand $ mapM_ (putStrLn . (\l -> " " ++ l)) (splitOn "\n" summary)
-- TODO: Why does this not work with sequence?
renderEntries :: Int -> [Entry] -> IO [()]
renderEntries activeIndex =
zipWithM (\i e -> renderEntry e (i == activeIndex)) [0..]
| andreasbock/arxivd | src/Format/Format.hs | mit | 3,174 | 0 | 13 | 731 | 729 | 397 | 332 | 47 | 3 |
module UUID
( randomUUID
) where
import Control.Applicative ((<$>))
import qualified Data.ByteString.Char8 as B (ByteString, pack)
import Data.UUID ()
import Data.UUID.V4 (nextRandom)
randomUUID :: IO B.ByteString
randomUUID = (B.pack . show) <$> nextRandom
| shak-mar/botstrats | common/UUID.hs | mit | 260 | 0 | 8 | 35 | 85 | 53 | 32 | 8 | 1 |
module Rebase.Control.Monad.Trans.State
(
module Control.Monad.Trans.State
)
where
import Control.Monad.Trans.State
| nikita-volkov/rebase | library/Rebase/Control/Monad/Trans/State.hs | mit | 119 | 0 | 5 | 12 | 26 | 19 | 7 | 4 | 0 |
{-
- IOUtil.hs - Unicode console IO
-
- http://stackoverflow.com/questions/10779149/unicode-console-i-o-in-haskell-on-windows
- Retrieved January 13 2015
-
-}
{-# LANGUAGE ForeignFunctionInterface #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE NoImplicitPrelude #-}
module IOUtil (
IOUtil.interact,
IOUtil.putChar, IOUtil.putStr, IOUtil.putStrLn, IOUtil.print,
IOUtil.getChar, IOUtil.getLine, IOUtil.getContents, IOUtil.readIO,
IOUtil.readLn,
ePutChar, ePutStr, ePutStrLn, ePrint,
trace, traceIO
) where
#ifdef mingw32_HOST_OS
import System.Win32.Types (BOOL, HANDLE, DWORD, LPDWORD, LPWSTR, LPCWSTR, LPVOID)
import Foreign.C.Types (CWchar)
import Foreign hiding (unsafePerformIO)
import Prelude hiding (getContents, putStr, putStrLn) --(IO, Read, Show, String)
--import qualified System.IO
import qualified System.IO (getContents)
import System.IO hiding (getContents, putStr, putStrLn)
import System.IO.Unsafe (unsafePerformIO)
import Data.Char (ord)
{- <http://msdn.microsoft.com/en-us/library/ms683231(VS.85).aspx>
HANDLE WINAPI GetStdHandle(DWORD nStdHandle);
returns INVALID_HANDLE_VALUE, NULL, or a valid handle -}
foreign import stdcall unsafe "GetStdHandle" win32GetStdHandle :: DWORD -> IO (HANDLE)
std_OUTPUT_HANDLE = -11 :: DWORD -- all DWORD arithmetic is performed modulo 2^n
std_ERROR_HANDLE = -12 :: DWORD
{- <http://msdn.microsoft.com/en-us/library/aa364960(VS.85).aspx>
DWORD WINAPI GetFileType(HANDLE hFile); -}
foreign import stdcall unsafe "GetFileType" win32GetFileType :: HANDLE -> IO (DWORD)
_FILE_TYPE_CHAR = 0x0002 :: DWORD
_FILE_TYPE_REMOTE = 0x8000 :: DWORD
{- <http://msdn.microsoft.com/en-us/library/ms683167(VS.85).aspx>
BOOL WINAPI GetConsoleMode(HANDLE hConsole, LPDWORD lpMode); -}
foreign import stdcall unsafe "GetConsoleMode" win32GetConsoleMode :: HANDLE -> LPDWORD -> IO (BOOL)
_INVALID_HANDLE_VALUE = (intPtrToPtr $ -1) :: HANDLE
is_a_console :: HANDLE -> IO (Bool)
is_a_console handle
= if (handle == _INVALID_HANDLE_VALUE) then return False
else do ft <- win32GetFileType handle
if ((ft .&. complement _FILE_TYPE_REMOTE) /= _FILE_TYPE_CHAR) then return False
else do ptr <- malloc
cm <- win32GetConsoleMode handle ptr
free ptr
return cm
real_stdout :: IO (Bool)
real_stdout = is_a_console =<< win32GetStdHandle std_OUTPUT_HANDLE
real_stderr :: IO (Bool)
real_stderr = is_a_console =<< win32GetStdHandle std_ERROR_HANDLE
{- BOOL WINAPI WriteConsoleW(HANDLE hOutput, LPWSTR lpBuffer, DWORD nChars,
LPDWORD lpCharsWritten, LPVOID lpReserved); -}
foreign import stdcall unsafe "WriteConsoleW" win32WriteConsoleW
:: HANDLE -> LPWSTR -> DWORD -> LPDWORD -> LPVOID -> IO (BOOL)
data ConsoleInfo = ConsoleInfo Int (Ptr CWchar) (Ptr DWORD) HANDLE
writeConsole :: ConsoleInfo -> [Char] -> IO ()
writeConsole (ConsoleInfo bufsize buf written handle) string
= let fillbuf :: Int -> [Char] -> IO ()
fillbuf i [] = emptybuf buf i []
fillbuf i remain@(first:rest)
| i + 1 < bufsize && ordf <= 0xffff = do pokeElemOff buf i asWord
fillbuf (i+1) rest
| i + 1 < bufsize && ordf > 0xffff = do pokeElemOff buf i word1
pokeElemOff buf (i+1) word2
fillbuf (i+2) rest
| otherwise = emptybuf buf i remain
where ordf = ord first
asWord = fromInteger (toInteger ordf) :: CWchar
sub = ordf - 0x10000
word1' = ((shiftR sub 10) .&. 0x3ff) + 0xD800
word2' = (sub .&. 0x3FF) + 0xDC00
word1 = fromInteger . toInteger $ word1'
word2 = fromInteger . toInteger $ word2'
emptybuf :: (Ptr CWchar) -> Int -> [Char] -> IO ()
emptybuf _ 0 [] = return ()
emptybuf _ 0 remain = fillbuf 0 remain
emptybuf ptr nLeft remain
= do let nLeft' = fromInteger . toInteger $ nLeft
ret <- win32WriteConsoleW handle ptr nLeft' written nullPtr
nWritten <- peek written
let nWritten' = fromInteger . toInteger $ nWritten
if ret && (nWritten > 0)
then emptybuf (ptr `plusPtr` (nWritten' * szWChar)) (nLeft - nWritten') remain
else fail "WriteConsoleW failed.\n"
in fillbuf 0 string
szWChar = sizeOf (0 :: CWchar)
makeConsoleInfo :: DWORD -> Handle -> IO (Either ConsoleInfo Handle)
makeConsoleInfo nStdHandle fallback
= do handle <- win32GetStdHandle nStdHandle
is_console <- is_a_console handle
let bufsize = 10000
if not is_console then return $ Right fallback
else do buf <- mallocBytes (szWChar * bufsize)
written <- malloc
return . Left $ ConsoleInfo bufsize buf written handle
{-# NOINLINE stdoutConsoleInfo #-}
stdoutConsoleInfo :: Either ConsoleInfo Handle
stdoutConsoleInfo = unsafePerformIO $ makeConsoleInfo std_OUTPUT_HANDLE stdout
{-# NOINLINE stderrConsoleInfo #-}
stderrConsoleInfo :: Either ConsoleInfo Handle
stderrConsoleInfo = unsafePerformIO $ makeConsoleInfo std_ERROR_HANDLE stderr
interact :: (String -> String) -> IO ()
interact f = do s <- getContents
putStr (f s)
conPutChar ci = writeConsole ci . replicate 1
conPutStr = writeConsole
conPutStrLn ci = writeConsole ci . ( ++ "\n")
putChar :: Char -> IO ()
putChar = (either conPutChar hPutChar ) stdoutConsoleInfo
putStr :: String -> IO ()
putStr = (either conPutStr hPutStr ) stdoutConsoleInfo
putStrLn :: String -> IO ()
putStrLn = (either conPutStrLn hPutStrLn) stdoutConsoleInfo
print :: Show a => a -> IO ()
print = putStrLn . show
getChar = System.IO.getChar
getLine = System.IO.getLine
getContents = System.IO.getContents
readIO :: Read a => String -> IO a
readIO = System.IO.readIO
readLn :: Read a => IO a
readLn = System.IO.readLn
ePutChar :: Char -> IO ()
ePutChar = (either conPutChar hPutChar ) stderrConsoleInfo
ePutStr :: String -> IO ()
ePutStr = (either conPutStr hPutStr ) stderrConsoleInfo
ePutStrLn :: String -> IO ()
ePutStrLn = (either conPutStrLn hPutStrLn) stderrConsoleInfo
ePrint :: Show a => a -> IO ()
ePrint = ePutStrLn . show
#else
import qualified System.IO
import Prelude (IO, Read, Show, String)
interact = System.IO.interact
putChar = System.IO.putChar
putStr = System.IO.putStr
putStrLn = System.IO.putStrLn
getChar = System.IO.getChar
getLine = System.IO.getLine
getContents = System.IO.getContents
ePutChar = System.IO.hPutChar System.IO.stderr
ePutStr = System.IO.hPutStr System.IO.stderr
ePutStrLn = System.IO.hPutStrLn System.IO.stderr
print :: Show a => a -> IO ()
print = System.IO.print
readIO :: Read a => String -> IO a
readIO = System.IO.readIO
readLn :: Read a => IO a
readLn = System.IO.readLn
ePrint :: Show a => a -> IO ()
ePrint = System.IO.hPrint System.IO.stderr
#endif
trace :: String -> a -> a
trace string expr = unsafePerformIO $ do
traceIO string
return expr
traceIO :: String -> IO ()
traceIO = ePutStrLn | SwiftsNamesake/ElegantChess | IOUtil.hs | mit | 7,491 | 0 | 16 | 1,982 | 1,754 | 914 | 840 | 36 | 1 |
module Auth0.API
( module Auth0.API.Types
,
) where
import Auth0.API.Authentification
import Auth0.API.Management
import Auth0.API.Config
import Auth0.API.Types
| kelecorix/api-auth0 | src/Auth0/API.hs | mit | 179 | 0 | 5 | 34 | 40 | 27 | 13 | 6 | 0 |
isBigGang :: Int -> (Bool, String)
isBigGang x = let n = 9 in (x > n, "Compared the gang size to " ++ show n ++ ".")
applyLog :: (a, String) -> (a -> (b, String)) -> (b, String)
applyLog (x,log) f = let (y, newLog) = f x in (y, log++newLog) | RAFIRAF/HASKELL | For a Few Monads More/bigBang.hs | mit | 241 | 0 | 10 | 53 | 141 | 77 | 64 | 4 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Data.GiveYouAHead.JSON
(
USettings(..),
getUSettings
) where
import Data.GiveYouAHead(USettings(..))
import Data.Aeson(FromJSON(..),ToJSON(..),decode,Value(..),object,(.:),(.=))
import Control.Applicative()
import Data.ByteString.Lazy.Char8(pack)
import GiveYouAHead.Common(readF)
instance FromJSON USettings where
parseJSON (Object v) = USettings
<$> (v .: "SystemShell")
instance ToJSON USettings where
toJSON (USettings sysSh) =
object ["SystemShell" .= sysSh]
getUSettings :: FilePath -> IO (Maybe USettings)
getUSettings = (>>= return.decode.pack).readF
| Qinka/GiveYouAHead | lib/Data/GiveYouAHead/JSON.hs | mit | 742 | 0 | 8 | 198 | 206 | 124 | 82 | 18 | 1 |
module Calico.Text.IO
( module Calico.Text
, putText
, putText'
, putTextLn
, putTextLn'
, hPutText
, hPutText'
, hPutTextLn
, hPutTextLn'
) where
import Prelude ()
import Calico.Base
import Calico.Text
import Calico.IO
import qualified Data.Text.IO as IO
putText :: Text -> IO ()
putText = IO.putStr
putText' :: Text -> IO ()
putText' = (>> hFlush stdout) . putText
putTextLn :: Text -> IO ()
putTextLn = IO.putStrLn
putTextLn' :: Text -> IO ()
putTextLn' = (>> hFlush stdout) . putTextLn
hPutText :: Handle -> Text -> IO ()
hPutText = IO.hPutStr
hPutText' :: Handle -> Text -> IO ()
hPutText' h = (>> hFlush h) . hPutText h
hPutTextLn :: Handle -> Text -> IO ()
hPutTextLn = IO.hPutStrLn
hPutTextLn' :: Handle -> Text -> IO ()
hPutTextLn' h = (>> hFlush h) . hPutTextLn h
| Rufflewind/calico-hs | Calico/Text/IO.hs | mit | 802 | 0 | 8 | 165 | 301 | 166 | 135 | 31 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.Plot.Gtk.UI
-- Copyright : (c) Sumit Sahrawat
-- License : GPL-2
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Functions to build and make changes to 'Figure's
--
--------------------------------------------------------------------------------
module Graphics.Rendering.Plot.Gtk.UI.Figure where
--------------------------------------------------------------------------------
-- Standard Libraries
import Control.Monad (unless, when)
import Data.IORef (IORef, readIORef)
import Data.Maybe (fromJust, isJust)
--------------------------------------------------------------------------------
-- Other Libraries
-- import Data.Colour.SRGB (sRGB24read)
import Graphics.Rendering.Plot
import Graphics.UI.Gtk (Adjustment,
adjustmentGetValue)
import Numeric.LinearAlgebra (linspace, cmap)
--------------------------------------------------------------------------------
-- Custom Modules
import Graphics.Rendering.Plot.Gtk.UI.Settings
--------------------------------------------------------------------------------
updateFigureText :: (Text () -> Figure ())
-> Maybe String -> Double -> Figure ()
updateFigureText withSomething txt size =
case txt of
Nothing -> return ()
Just str -> withSomething . unless (null str) $ do
setText str
setFontSize size
--------------------------------------------------------------------------------
updateAxis :: AxisType -> Bool -> (AxisPosn, AxisSide) -> Maybe (Double, Double)
-> Scale -> Maybe String -> FontSize -> Plot ()
updateAxis axis state location range scale label size = do
let (position, side) = location
when state $ addAxis axis position
(when (isJust label) $ withAxisLabel $ do setText . fromJust $ label
setFontSize size)
maybe (setRangeFromData axis side scale)
(uncurry (setRange axis side scale))
range
--------------------------------------------------------------------------------
figurePlot :: ([Double] -> Double -> Double)
-> IORef FigureSettings
-> [Adjustment]
-> IO (Figure ())
figurePlot g iofset adjs = do
fset <- readIORef iofset
vars <- mapM adjustmentGetValue adjs
let rate = samplingRate fset
range = fromJust $ xRange fset
samples = round $ (\(x, y) -> rate * (y - x)) range
domain = linspace samples range
func = g vars
stype = plotType fset
dset = [(stype, domain, cmap func domain)]
return $ buildFigure dset fset
--------------------------------------------------------------------------------
buildFigure :: Dataset a => a -> FigureSettings -> Figure ()
buildFigure dset fset = do
withTextDefaults $ setFontFamily (fontFamily fset)
let str = plotTitle fset
size = plotTitleSize fset
in updateFigureText withTitle str size
let str = subTitle fset
size = subTitleSize fset
in updateFigureText withSubTitle str size
setPlots 1 1
withPlot (1, 1) $ do
setDataset dset
-- X-Axis
let state = showXAxis fset
label = xLabel fset
size = xLabelSize fset
loc = xLocation fset
range = xRange fset
scale = plotScaleX fset
in updateAxis XAxis state loc range scale label size
-- Y-Axis
let state = showYAxis fset
label = yLabel fset
size = yLabelSize fset
loc = yLocation fset
range = yRange fset
scale = plotScaleY fset
in updateAxis YAxis state loc range scale label size
--------------------------------------------------------------------------------
| sumitsahrawat/plot-gtk-ui | lib/Graphics/Rendering/Plot/Gtk/UI/Figure.hs | gpl-2.0 | 4,133 | 0 | 16 | 1,153 | 899 | 460 | 439 | 68 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module Hint.Interpreter.Context
( Command
, Environment
, Macros
, Context(..)
, Error(..)
, initial
, raise
, command
, macroses
, environment
, history
, promptString
, output
, welcome
, farewell
, promptLarge
, promptSmall
, message
, context
) where
import Control.Lens hiding (Context)
type Command = String
data Environment = Environment
{ _promptLarge:: String
, _promptSmall :: String
, _welcome :: String
, _farewell :: String
} deriving (Show)
type Macros = String
data Context = Context
{ _command :: Command
, _macroses :: [Macros]
, _environment :: Environment
, _history :: [Command]
, _promptString :: String
, _output :: String
} deriving (Show)
data Error = Error
{ _message :: String
, _context :: Context
}
makeLenses ''Context
makeLenses ''Environment
makeLenses ''Error
instance Show Error where
show e =
"ERROR: " ++ (e^.message) ++
"\nCONTEXT: " ++ show (e^.context)
initial :: Context
initial = Context
{ _command = ""
, _macroses = []
, _environment = Environment
{ _promptLarge = ">> "
, _promptSmall = "> ... "
, _welcome = "Hello!"
, _farewell = "Goodbye!"
}
, _history = []
, _promptString = ">> "
, _output = ""
}
raise :: String -> Context -> Error
raise string c = Error
{ _message = string
, _context = c
}
| go1dshtein/hint | src/haskell/main/Hint/Interpreter/Context.hs | gpl-2.0 | 1,525 | 0 | 10 | 471 | 401 | 244 | 157 | 64 | 1 |
module Main (
main
) where
import Text.Pandoc.JSON
import Text.Pandoc
import System.Process
import Data.List
import System.IO.Temp
import System.IO
import System.Directory
-- | Merges the code block and its output into a Pandoc 'Block'
combine :: Block -> String -> Block
combine cb out = Div ("", [], []) ([cb] ++ bs)
where (Pandoc _ bs) = readDoc out
readDoc :: String -> Pandoc
readDoc s = case readMarkdown def s of
Right doc -> doc
Left err -> error (show err)
output :: Block -> IO Block
output cb@(CodeBlock (_, ["haskell"], namevals) xs) =
case lookup "runWith" namevals of
Just c -> do
(tmpFile, hFile) <- openTempFile "./" "*.hs"
hPutStr hFile xs
hClose hFile
let ([cName], params) = splitAt 1 $ words c
out <- readProcess cName (params ++ [tmpFile]) []
removeFile tmpFile
return $ combine cb out
Nothing -> return cb
output x = return x
main :: IO ()
main = toJSONFilter output
| iemxblog/pandoc-code-block-output | Main.hs | gpl-2.0 | 1,043 | 0 | 14 | 309 | 374 | 190 | 184 | 31 | 2 |
-- #hide
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.PointParameter
-- Copyright : (c) Sven Panne 2002-2009
-- License : BSD-style (see the file libraries/OpenGL/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : stable
-- Portability : portable
--
-- This is a purely internal module for setting point parameters.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.PointParameter (
PointParameter(..), pointParameterf, pointParameterfv
) where
import Foreign.Ptr
import Graphics.Rendering.OpenGL.Raw.ARB.Compatibility (
gl_POINT_DISTANCE_ATTENUATION, gl_POINT_SIZE_MAX, gl_POINT_SIZE_MIN )
import Graphics.Rendering.OpenGL.Raw.Core31
--------------------------------------------------------------------------------
data PointParameter =
PointSizeMin
| PointSizeMax
| PointFadeThresholdSize
| PointDistanceAttenuation
marshalPointParameter :: PointParameter -> GLenum
marshalPointParameter x = case x of
PointSizeMin -> gl_POINT_SIZE_MIN
PointSizeMax -> gl_POINT_SIZE_MAX
PointFadeThresholdSize -> gl_POINT_FADE_THRESHOLD_SIZE
PointDistanceAttenuation -> gl_POINT_DISTANCE_ATTENUATION
--------------------------------------------------------------------------------
pointParameterf :: PointParameter -> GLfloat -> IO ()
pointParameterf = glPointParameterf . marshalPointParameter
pointParameterfv :: PointParameter -> Ptr GLfloat -> IO ()
pointParameterfv = glPointParameterfv . marshalPointParameter
| ducis/haAni | hs/common/Graphics/Rendering/OpenGL/GL/PointParameter.hs | gpl-2.0 | 1,625 | 0 | 8 | 191 | 191 | 118 | 73 | 21 | 4 |
-- construct height balanced trees
-- with given node number n
import Data.List
data Tree a = Empty | Branch a (Tree a) (Tree a) deriving (Show, Eq)
p59 :: a -> Int -> [Tree a]
p59 x n = trees !! n where
trees = [Empty] : [Branch x Empty Empty] : zipWith combine (tail trees) trees where
combine t st = [ Branch x l r | (lt, rt) <- [(t,t), (t,st), (st,t)],
l <- lt, r <- rt ]
p60 :: Char -> Int -> [Tree Char]
p60 _ 0 = [Empty]
p60 x n = concatMap filteredTrees [minH .. maxH] where
filteredTrees = filter ( (n==) . countN ) . p59 x
minNodes = 0 : 1 : zipWith (\x y -> x + y + 1) minNodes (tail minNodes)
minH = ceiling $ logBase 2 $ fromIntegral (n+1)
maxH = (fromJ $ findIndex (>n) minNodes) - 1 where
fromJ (Just j) = j
countN Empty = 0
countN (Branch _ l r) = 1 + (countN l) + (countN r)
| yalpul/CENG242 | H99/54-60/p60.hs | gpl-3.0 | 942 | 0 | 13 | 323 | 439 | 234 | 205 | 17 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TupleSections #-}
-- | A module that allows to generate widgets that show (and allow editing of) Karnaugh cards.
module Dep.Ui.Karnaugh (
karnaugh1Widget,karnaughWidget
) where
import Control.Arrow(first,second)
import Control.Monad(mapM,(>=>))
import Data.Bits(shiftL,shiftR)
import Data.IORef(readIORef)
import Data.List
import Data.Tuple(swap)
import qualified Data.Text as T
import Debug.Trace
import Graphics.Vty.Image
import Graphics.Vty.Input.Events
import Graphics.Vty.Prelude
import Graphics.Vty.Widgets.All
import Graphics.Vty.Widgets.Events
import Dep.Algorithms()
import Dep.Printing()
import Dep.Structures
import Dep.Ui.Utils(KeyContextHandler(..),swapAttr,Decorator(..),UiDecorator(..),alternate,matrixImg,composeImg,flattenImg,handleKeyWidget,WidgetKeyHandling(..),uiCurX,uiCurY,shiftCursorWithPosition)
import Dep.Ui.Utils.Boxes(hBoxes,setBoxesSpacing)
import Dep.Ui.Utils.Scrollable(autoScrollable)
import Dep.Utils(selectBool,burst,burstInner,burstItems,concatReplicate,(<&|>),(<&->))
import Dep.Algorithms.Comb(synthetizeFun)
type Inr a = a -> a
type Opr a = a -> a -> a
type Tu2 a = (a,a)
type Ta2 a = [Tu2 a]
instance KeyContextHandler CombFunc (Decorator CombFunc UiDecorator) where
handleKeyCtx k m c fnc = Just $ specifyModMay pth tos fnc
where dc = decorators c
pth = cursmt 4 (uiCurX dc,uiCurY dc)
tos = handleKeyCtx k m c :: BitTh -> Maybe BitTh
instance Show CombFunc where
show (CF ct i) = show (cttab ct)++'/':show i
instance WidgetKeyHandling CombFunc
displayKarnaugh :: Widget (Decorator CombFunc UiDecorator) -> DisplayRegion -> RenderContext -> IO Image
displayKarnaugh w d c = do
r <- readIORef w
Decorator (CF (CT n tr) vi) (_:_:Option d _ _:_) <- getState w
return $ dispK c d n tr vi r
dispK c d n tr vi r = (twig <|> kMark '\x251c' '\x2500' '\x2524' (<|>) (<->) norm nw 0) <-> (string norm (replicate (nwh-nh) ' ') <|> kMark '\x252c' '\x2502' '\x2534' (<->) (<|>) norm nh 1 <|> outerKgh norm d n vi tr)
where nh = div n 2
nw = n-nh
nwh = max nw nh
norm = normalAttr c
twig = matrixImg twigyx norm nwh nwh
where twigyx 0 x | x > 0 = (!!) (show (vi-1) ++ repeat ' ') (x-1) --TODO: fix show twig
| otherwise = 'f'
twigyx y x | y == x = '\\'
| otherwise = ' '
kMark :: Char -> Char -> Char -> Opr Image -> Opr Image -> Attr -> Int -> Int -> Image
kMark c0 c1 c2 fw fh a n0 = kMark0 n2
where n2 = shiftL 1 n0
w = 2*n2+1
br s n i = foldl1 fw . map (char a) $ take w $ (++) (replicate s ' ') $ cycle $ burstInner c0 c1 c2 (show i) (n+1) ++ replicate (n-1) ' '
kMark0 :: Int -> Int -> Image
kMark0 n i = fh (br n n i) $ kMark1 (div n 2) $ i+2
kMark1 :: Int -> Int -> Image
kMark1 1 _ = emptyImage
kMark1 n i = fh (br n (2*n) i) $ kMark1 (div n 2) $ i+2
outerKgh :: Attr -> Bool -> Int -> Int -> Three [BitTh] -> Image
outerKgh a@(Attr aa _ ac) c n v t = composeImg $ flattenImg $ lyr [(a,(:) tp $ zipWith (\i x -> li i : x ++ [ri i]) (cycle [False,True]) inr++[bt])]
where inr = innerKgh n v t
iw = length $ head inr
tp = '\x250f' : take iw (cycle "\x2501\x252f") ++ "\x2513"
bt = '\x2517' : take iw (cycle "\x2501\x2537") ++ "\x251b"
li True = '\x2520'
li _ = '\x2503'
ri True = '\x2528'
ri _ = '\x2503'
lyr = selectBool c ((++) $ zip (map (\i -> Attr aa (SetTo $ ISOColor i) ac) (cycle [1..6])) $ map (uncurry (printRmb n) . rmb n) $ reverse $ terms $ synthetizeFun (CF (CT n t) v)) id --reverse is used to make small regions more visible
innerKgh :: Int -> Int -> Three [BitTh] -> [String]
innerKgh n0 v = dykghW n0
where v1 = v-1
dykghW = dykgh dykghH (zipWith3 (\j x -> (++) x . f j . reverse) (cycle [False,True]))
f True = (:) '\x253c'
f _ = (:) '\x2502'
dykghH = dykgh dykghW (\x -> (++) x . (:) (take (length $ head x) $ cycle "\x2500\x253c") . reverse)
dykgh :: (Int -> Three [BitTh] -> [String]) -> Opr [String] -> Int -> Three [BitTh] -> [String]
dykgh f _ 0 (ThLeaf a) = [[show a !! v1]]
dykgh f m n t = uncurry m $ decap t
where fn = f (n-1)
decap :: Three [BitTh] -> ([String],[String])
decap l@(ThLeaf _) = (ls,ls)
where ls = fn l
decap (ThDirect l) = (ls,ls)
where ls = fn l
decap (ThNode la lb) = (fn la,fn lb)
karnaugh1Widget :: CombFunc -> IO (Widget (Decorator CombFunc UiDecorator))
karnaugh1Widget cf@(CF (CT n _) _) = newWidget (Decorator cf [CursorX 0 (shiftL 1 (div (n+1) 2)-1) 0,CursorY 0 (shiftL 1 (div n 2)-1) 0,Option True (KChar 'c') 0]) $ \x -> x {
growHorizontal_ = const $ return False,
growVertical_ = const $ return False,
render_ = displayKarnaugh,
keyEventHandler = handleKeyWidget,
getCursorPosition_ = kCurPos
}
kCurPos :: Widget (Decorator CombFunc UiDecorator) -> IO (Maybe (Int,Int))
kCurPos wg = do
Decorator (CF (CT n _) _) (CursorX _ _ cx:CursorY _ _ cy:_) <- getState wg
shiftCursorWithPosition wg $ let m = 1+div (n+1) 2 in Just (2*cx+m,2*cy+m)
karnaughWidget ct = do
es <- mapM (karnaugh1Widget . CF ct) [1..ysize ct]
(e,fg) <- hBoxes es
setBoxesSpacing e 1
w <- autoScrollable e
return (w,fg)
rmb :: Int -> [Int] -> Tu2 (Ta2 Int)
rmb n = rmbW n 1
where mgrmb :: Int -> [(Int,Int)] -> [(Int,Int)] -> [(Int,Int)]
mgrmb d [] ys = swaprmb (shiftL d 1) ys
mgrmb _ xs [] = xs
mgrmb d xs ys = init xs++f (last xs) ysmh
where (ysmh:ysms) = swaprmb (shiftL d 1) ys
f (xa,dx) (ya,dy) | xa+dx >= ya = (xa,max dx $ ya+dy-xa) : ysms
| otherwise = (xa,dx) : (ya,dy) : ysms
swaprmb :: Int -> [(Int,Int)] -> [(Int,Int)]
swaprmb d = swaprmb' []
where swaprmb' :: [(Int,Int)] -> [(Int,Int)] -> [(Int,Int)]
swaprmb' ts ((xa,dx):xs) = swaprmb' ((d-xa-dx,dx):ts) xs
swaprmb' ts _ = ts
rmbW :: Int -> Int -> [Int] -> Tu2 (Ta2 Int)
rmbW = rmbWH rmbH (\w d b -> ([(0,b)],[(d,w)])) snd second
rmbH :: Int -> Int -> [Int] -> Tu2 (Ta2 Int)
rmbH = rmbWH rmbW (\w d b -> ([(d,w)],[(0,b)])) fst first
rmbWH :: (Int -> Int -> [Int] -> Tu2 (Ta2 Int)) -> (Int -> Int -> Int -> Tu2 (Ta2 Int)) -> (Tu2 (Ta2 Int) -> Ta2 Int) -> (Inr (Ta2 Int) -> Tu2 (Ta2 Int) -> Tu2 (Ta2 Int)) -> Int -> Int -> [Int] -> Tu2 (Ta2 Int)
rmbWH f _ _ _ 0 _ _ = ([],[])
rmbWH f _ s m n i xs@(x:xs2@(_:_)) | i == (-x) = irs
| i == x = m (mgrmb w []) irs
where w = shiftL 1 $ shiftR (n-1) 1
irf = f (n-1) (i+1)
irs = irf xs2
rmbWH f g s m n i xs@(x:_) | i == (-x) = g w 0 b
| i == x = g w w b
where w = shiftL 1 $ shiftR (n-1) 1
b = shiftL 1 $ shiftR n 1
rmbWH f _ s m n i xs@(_:_) = m (mgrmb w $ s ir) ir
where w = shiftL 1 $ shiftR (n-1) 1
ir = f (n-1) (i+1) xs
rmbWH _ _ _ _ _ _ _ = ([],[])
printRmb :: Int -> Ta2 Int -> Ta2 Int -> [String] --TODO: modulo check
printRmb n ys xs = burstItems l0 l1 l2 l3 h (outlineG ys)
where xsg = outlineG xs
nh = shiftR n 1
nw = n-nh
w = 1+shiftL 2 nw
h = 1+shiftL 2 nh
l0 = replicate w ' '
l1 = burstItems ' ' '\x250c' '\x2500' '\x2510' w xsg
l2 = burstItems ' ' '\x2502' ' ' '\x2502' w xsg
l3 = burstItems ' ' '\x2514' '\x2500' '\x2518' w xsg
outlineG :: [(Int,Int)] -> [(Int,Int)]
outlineG = map (\(x,y) -> (2*x,2*y+1))
cursmt :: Int -> (Int,Int) -> BitThSeq --TODO: optimize
cursmt = mt mth ntow fst first
where ntow n = shiftL 1 $ shiftR (n+1) 1
mth = mt cursmt ntow snd second
mt _ _ _ _ 0 _ = []
mt r w s m n c | sc >= hl = T : rin (m (const $ wn-sc-1) c)
| otherwise = F : rin c
where sc = s c
wn = w n
hl = shiftR wn 1
rin = r (n-1)
| KommuSoft/dep-software | Dep.Ui.Karnaugh.hs | gpl-3.0 | 8,760 | 0 | 22 | 2,945 | 4,064 | 2,136 | 1,928 | 166 | 8 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.