code
stringlengths
5
1.03M
repo_name
stringlengths
5
90
path
stringlengths
4
158
license
stringclasses
15 values
size
int64
5
1.03M
n_ast_errors
int64
0
53.9k
ast_max_depth
int64
2
4.17k
n_whitespaces
int64
0
365k
n_ast_nodes
int64
3
317k
n_ast_terminals
int64
1
171k
n_ast_nonterminals
int64
1
146k
loc
int64
-1
37.3k
cycloplexity
int64
-1
1.31k
module Data.HashPSQ.Tests ( tests ) where import Prelude hiding (lookup) import Data.Maybe (isNothing) import Test.Framework (Test) import Test.Framework.Providers.HUnit (testCase) import Test.HUnit (Assertion, assert) import Data.HashPSQ.Internal import qualified Data.OrdPSQ as OrdPSQ import Data.PSQ.Tests.Util -------------------------------------------------------------------------------- -- Index of tests -------------------------------------------------------------------------------- tests :: [Test] tests = [ testCase "showBucket" test_showBucket , testCase "toBucket" test_toBucket ] -------------------------------------------------------------------------------- -- Unit tests -------------------------------------------------------------------------------- test_showBucket :: Assertion test_showBucket = assert $ length (coverShowInstance bucket) > 0 where bucket :: Bucket Int Int Char bucket = B 1 'a' OrdPSQ.empty test_toBucket :: Assertion test_toBucket = assert $ isNothing $ toBucket (OrdPSQ.empty :: OrdPSQ.OrdPSQ Int Int Char)
meiersi/psqueues-old
tests/Data/HashPSQ/Tests.hs
bsd-3-clause
1,267
0
9
315
215
127
88
22
1
{-# LANGUAGE PatternGuards #-} module Idris.CaseSplit(splitOnLine, replaceSplits, getClause, getProofClause, mkWith, nameMissing, getUniq, nameRoot) where -- splitting a variable in a pattern clause import Idris.AbsSyntax import Idris.AbsSyntaxTree (Idris, IState, PTerm) import Idris.ElabDecls import Idris.Delaborate import Idris.Parser import Idris.Error import Idris.Output import Idris.Elab.Value import Idris.Elab.Term import Idris.Core.TT import Idris.Core.Typecheck import Idris.Core.Evaluate import Data.Maybe import Data.Char import Data.List (isPrefixOf, isSuffixOf) import Control.Monad import Control.Monad.State.Strict import Text.Parser.Combinators import Text.Parser.Char(anyChar) import Text.Trifecta(Result(..), parseString) import Text.Trifecta.Delta import Debug.Trace {- Given a pattern clause and a variable 'n', elaborate the clause and find the type of 'n'. Make new pattern clauses by replacing 'n' with all the possibly constructors applied to '_', and replacing all other variables with '_' in order to resolve other dependencies. Finally, merge the generated patterns with the original, by matching. Always take the "more specific" argument when there is a discrepancy, i.e. names over '_', patterns over names, etc. -} -- Given a variable to split, and a term application, return a list of -- variable updates split :: Name -> PTerm -> Idris [[(Name, PTerm)]] split n t' = do ist <- getIState -- Make sure all the names in the term are accessible mapM_ (\n -> setAccessibility n Public) (allNamesIn t') -- ETyDecl rather then ELHS because there'll be explicit type -- matching (tm, ty, pats) <- elabValBind recinfo ETyDecl True (addImplPat ist t') -- ASSUMPTION: tm is in normal form after elabValBind, so we don't -- need to do anything special to find out what family each argument -- is in logLvl 4 ("Elaborated:\n" ++ show tm ++ " : " ++ show ty ++ "\n" ++ show pats) -- iputStrLn (show (delab ist tm) ++ " : " ++ show (delab ist ty)) -- iputStrLn (show pats) let t = mergeUserImpl (addImplPat ist t') (delab ist tm) let ctxt = tt_ctxt ist case lookup n pats of Nothing -> ifail $ show n ++ " is not a pattern variable" Just ty -> do let splits = findPats ist ty logLvl 1 ("New patterns " ++ showSep ", " (map showTmImpls splits)) let newPats_in = zipWith (replaceVar ctxt n) splits (repeat t) logLvl 4 ("Working from " ++ show t) logLvl 4 ("Trying " ++ showSep "\n" (map (showTmImpls) newPats_in)) newPats <- mapM elabNewPat newPats_in logLvl 3 ("Original:\n" ++ show t) logLvl 3 ("Split:\n" ++ (showSep "\n" (map show (mapMaybe id newPats)))) logLvl 3 "----" let newPats' = mergeAllPats ist n t (mapMaybe id newPats) logLvl 1 ("Name updates " ++ showSep "\n" (map (\ (p, u) -> show u ++ " " ++ show p) newPats')) return (map snd newPats') data MergeState = MS { namemap :: [(Name, Name)], invented :: [(Name, Name)], explicit :: [Name], updates :: [(Name, PTerm)] } addUpdate :: Name -> Idris.AbsSyntaxTree.PTerm -> State MergeState () addUpdate n tm = do ms <- get put (ms { updates = ((n, stripNS tm) : updates ms) } ) inventName :: Idris.AbsSyntaxTree.IState -> Maybe Name -> Name -> State MergeState Name inventName ist ty n = do ms <- get let supp = case ty of Nothing -> [] Just t -> getNameHints ist t let nsupp = case n of MN i n | not (tnull n) && thead n == '_' -> mkSupply (supp ++ varlist) MN i n -> mkSupply (UN n : supp ++ varlist) UN n | thead n == '_' -> mkSupply (supp ++ varlist) x -> mkSupply (x : supp) let badnames = map snd (namemap ms) ++ map snd (invented ms) ++ explicit ms case lookup n (invented ms) of Just n' -> return n' Nothing -> do let n' = uniqueNameFrom nsupp badnames put (ms { invented = (n, n') : invented ms }) return n' mkSupply :: [Name] -> [Name] mkSupply ns = mkSupply' ns (map nextName ns) where mkSupply' xs ns' = xs ++ mkSupply ns' varlist :: [Name] varlist = map (sUN . (:[])) "xyzwstuv" -- EB's personal preference :) stripNS :: Idris.AbsSyntaxTree.PTerm -> Idris.AbsSyntaxTree.PTerm stripNS tm = mapPT dens tm where dens (PRef fc hls n) = PRef fc hls (nsroot n) dens t = t mergeAllPats :: IState -> Name -> PTerm -> [PTerm] -> [(PTerm, [(Name, PTerm)])] mergeAllPats ist cv t [] = [] mergeAllPats ist cv t (p : ps) = let (p', MS _ _ _ u) = runState (mergePat ist t p Nothing) (MS [] [] (filter (/=cv) (patvars t)) []) ps' = mergeAllPats ist cv t ps in ((p', u) : ps') where patvars (PRef _ _ n) = [n] patvars (PApp _ _ as) = concatMap (patvars . getTm) as patvars (PPatvar _ n) = [n] patvars _ = [] mergePat :: IState -> PTerm -> PTerm -> Maybe Name -> State MergeState PTerm -- If any names are unified, make sure they stay unified. Always prefer -- user provided name (first pattern) mergePat ist (PPatvar fc n) new t = mergePat ist (PRef fc [] n) new t mergePat ist old (PPatvar fc n) t = mergePat ist old (PRef fc [] n) t mergePat ist orig@(PRef fc _ n) new@(PRef _ _ n') t | isDConName n' (tt_ctxt ist) = do addUpdate n new return new | otherwise = do ms <- get case lookup n' (namemap ms) of Just x -> do addUpdate n (PRef fc [] x) return (PRef fc [] x) Nothing -> do put (ms { namemap = ((n', n) : namemap ms) }) return (PRef fc [] n) mergePat ist (PApp _ _ args) (PApp fc f args') t = do newArgs <- zipWithM mergeArg args (zip args' (argTys ist f)) return (PApp fc f newArgs) where mergeArg x (y, t) = do tm' <- mergePat ist (getTm x) (getTm y) t case x of (PImp _ _ _ _ _) -> return (y { machine_inf = machine_inf x, getTm = tm' }) _ -> return (y { getTm = tm' }) mergePat ist (PRef fc _ n) tm ty = do tm <- tidy ist tm ty addUpdate n tm return tm mergePat ist x y t = return y mergeUserImpl :: PTerm -> PTerm -> PTerm mergeUserImpl x y = x argTys :: IState -> PTerm -> [Maybe Name] argTys ist (PRef fc hls n) = case lookupTy n (tt_ctxt ist) of [ty] -> map (tyName . snd) (getArgTys ty) ++ repeat Nothing _ -> repeat Nothing where tyName (Bind _ (Pi _ _ _) _) = Just (sUN "->") tyName t | (P _ n _, _) <- unApply t = Just n | otherwise = Nothing argTys _ _ = repeat Nothing tidy :: IState -> PTerm -> Maybe Name -> State MergeState PTerm tidy ist orig@(PRef fc hls n) ty = do ms <- get case lookup n (namemap ms) of Just x -> return (PRef fc [] x) Nothing -> case n of (UN _) -> return orig _ -> do n' <- inventName ist ty n return (PRef fc [] n') tidy ist (PApp fc f args) ty = do args' <- zipWithM tidyArg args (argTys ist f) return (PApp fc f args') where tidyArg x ty' = do tm' <- tidy ist (getTm x) ty' return (x { getTm = tm' }) tidy ist tm ty = return tm -- mapPT tidyVar tm -- where tidyVar (PRef _ _) = Placeholder -- tidyVar t = t elabNewPat :: PTerm -> Idris (Maybe PTerm) elabNewPat t = idrisCatch (do (tm, ty) <- elabVal recinfo ELHS t i <- getIState return (Just (delab i tm))) (\e -> do i <- getIState logLvl 5 $ "Not a valid split:\n" ++ pshow i e return Nothing) findPats :: IState -> Type -> [PTerm] findPats ist t | (P _ n _, _) <- unApply t = case lookupCtxt n (idris_datatypes ist) of [ti] -> map genPat (con_names ti) _ -> [Placeholder] where genPat n = case lookupCtxt n (idris_implicits ist) of [args] -> PApp emptyFC (PRef emptyFC [] n) (map toPlaceholder args) _ -> error $ "Can't happen (genPat) " ++ show n toPlaceholder tm = tm { getTm = Placeholder } findPats ist t = [Placeholder] replaceVar :: Context -> Name -> PTerm -> PTerm -> PTerm replaceVar ctxt n t (PApp fc f pats) = PApp fc f (map substArg pats) where subst :: PTerm -> PTerm subst orig@(PPatvar _ v) | v == n = t | otherwise = Placeholder subst orig@(PRef _ _ v) | v == n = t | isDConName v ctxt = orig subst (PRef _ _ _) = Placeholder subst (PApp fc (PRef _ _ t) pats) | isTConName t ctxt = Placeholder -- infer types subst (PApp fc f pats) = PApp fc f (map substArg pats) subst x = x substArg arg = arg { getTm = subst (getTm arg) } replaceVar ctxt n t pat = pat splitOnLine :: Int -- ^ line number -> Name -- ^ variable -> FilePath -- ^ name of file -> Idris [[(Name, PTerm)]] splitOnLine l n fn = do -- let (before, later) = splitAt (l-1) (lines inp) -- i <- getIState cl <- getInternalApp fn l logLvl 3 ("Working with " ++ showTmImpls cl) tms <- split n cl -- iputStrLn (showSep "\n" (map show tms)) return tms -- "" -- not yet done... replaceSplits :: String -> [[(Name, PTerm)]] -> Idris [String] replaceSplits l ups = updateRHSs 1 (map (rep (expandBraces l)) ups) where rep str [] = str ++ "\n" rep str ((n, tm) : ups) = rep (updatePat False (show n) (nshow tm) str) ups updateRHSs i [] = return [] updateRHSs i (x : xs) = do (x', i') <- updateRHS (null xs) i x xs' <- updateRHSs i' xs return (x' : xs') updateRHS last i ('?':'=':xs) = do (xs', i') <- updateRHS last i xs return ("?=" ++ xs', i') updateRHS last i ('?':xs) = do let (nm, rest_in) = span (not . (\x -> isSpace x || x == ')' || x == '(')) xs let rest = if last then rest_in else case span (not . (=='\n')) rest_in of (_, restnl) -> restnl (nm', i') <- getUniq nm i return ('?':nm' ++ rest, i') updateRHS last i (x : xs) = do (xs', i') <- updateRHS last i xs return (x : xs', i') updateRHS last i [] = return ("", i) -- TMP HACK: If there are Nats, we don't want to show as numerals since -- this isn't supported in a pattern, so special case here nshow (PRef _ _ (UN z)) | z == txt "Z" = "Z" nshow (PApp _ (PRef _ _ (UN s)) [x]) | s == txt "S" = "(S " ++ addBrackets (nshow (getTm x)) ++ ")" nshow t = show t -- if there's any {n} replace with {n=n} -- but don't replace it in comments expandBraces ('{' : '-' : xs) = '{' : '-' : xs expandBraces ('{' : xs) = let (brace, (_:rest)) = span (/= '}') xs in if (not ('=' `elem` brace)) then ('{' : brace ++ " = " ++ brace ++ "}") ++ expandBraces rest else ('{' : brace ++ "}") ++ expandBraces rest expandBraces (x : xs) = x : expandBraces xs expandBraces [] = [] updatePat start n tm [] = [] updatePat start n tm ('{':rest) = let (space, rest') = span isSpace rest in '{' : space ++ updatePat False n tm rest' updatePat start n tm done@('?':rest) = done updatePat True n tm xs@(c:rest) | length xs > length n = let (before, after@(next:_)) = splitAt (length n) xs in if (before == n && not (isAlphaNum next)) then addBrackets tm ++ updatePat False n tm after else c : updatePat (not (isAlphaNum c)) n tm rest updatePat start n tm (c:rest) = c : updatePat (not ((isAlphaNum c) || c == '_')) n tm rest addBrackets tm | ' ' `elem` tm , not (isPrefixOf "(" tm) , not (isSuffixOf ")" tm) = "(" ++ tm ++ ")" | otherwise = tm getUniq :: (Show t, Num t) => [Char] -> t -> Idris ([Char], t) getUniq nm i = do ist <- getIState let n = nameRoot [] nm ++ "_" ++ show i case lookupTy (sUN n) (tt_ctxt ist) of [] -> return (n, i+1) _ -> getUniq nm (i+1) nameRoot acc nm | all isDigit nm = showSep "_" acc nameRoot acc nm = case span (/='_') nm of (before, ('_' : after)) -> nameRoot (acc ++ [before]) after _ -> showSep "_" (acc ++ [nm]) getClause :: Int -- ^ line number that the type is declared on -> Name -- ^ Function name -> Name -- ^ User given name -> FilePath -- ^ Source file name -> Idris String getClause l fn un fp = do i <- getIState case lookupCtxt fn (idris_classes i) of [c] -> return (mkClassBodies i (class_methods c)) _ -> do ty_in <- getInternalApp fp l let ty = case ty_in of PTyped n t -> t x -> x ist <- get let ap = mkApp ist ty [] return (show un ++ " " ++ ap ++ "= ?" ++ show un ++ "_rhs") where mkApp :: IState -> PTerm -> [Name] -> String mkApp i (PPi (Exp _ _ False) (MN _ _) _ ty sc) used = let n = getNameFrom i used ty in show n ++ " " ++ mkApp i sc (n : used) mkApp i (PPi (Exp _ _ False) (UN n) _ ty sc) used | thead n == '_' = let n = getNameFrom i used ty in show n ++ " " ++ mkApp i sc (n : used) mkApp i (PPi (Exp _ _ False) n _ _ sc) used = show n ++ " " ++ mkApp i sc (n : used) mkApp i (PPi _ _ _ _ sc) used = mkApp i sc used mkApp i _ _ = "" getNameFrom i used (PPi _ _ _ _ _) = uniqueNameFrom (mkSupply [sUN "f", sUN "g"]) used getNameFrom i used (PApp fc f as) = getNameFrom i used f getNameFrom i used (PRef fc _ f) = case getNameHints i f of [] -> uniqueNameFrom (mkSupply [sUN "x", sUN "y", sUN "z"]) used ns -> uniqueNameFrom (mkSupply ns) used getNameFrom i used _ = uniqueNameFrom (mkSupply [sUN "x", sUN "y", sUN "z"]) used -- write method declarations, indent with 4 spaces mkClassBodies :: IState -> [(Name, (FnOpts, PTerm))] -> String mkClassBodies i ns = showSep "\n" (zipWith (\(n, (_, ty)) m -> " " ++ def (show (nsroot n)) ++ " " ++ mkApp i ty [] ++ "= ?" ++ show un ++ "_rhs_" ++ show m) ns [1..]) def n@(x:xs) | not (isAlphaNum x) = "(" ++ n ++ ")" def n = n getProofClause :: Int -- ^ line number that the type is declared -> Name -- ^ Function name -> FilePath -- ^ Source file name -> Idris String getProofClause l fn fp = do ty_in <- getInternalApp fp l let ty = case ty_in of PTyped n t -> t x -> x return (mkApp ty ++ " = ?" ++ show fn ++ "_rhs") where mkApp (PPi _ _ _ _ sc) = mkApp sc mkApp rt = "(" ++ show rt ++ ") <== " ++ show fn -- Purely syntactic - turn a pattern match clause into a with and a new -- match clause mkWith :: String -> Name -> String mkWith str n = let str' = replaceRHS str "with (_)" in str' ++ "\n" ++ newpat str where replaceRHS [] str = str replaceRHS ('?':'=': rest) str = str replaceRHS ('=': rest) str | not ('=' `elem` rest) = str replaceRHS (x : rest) str = x : replaceRHS rest str newpat ('>':patstr) = '>':newpat patstr newpat patstr = " " ++ replaceRHS patstr "| with_pat = ?" ++ show n ++ "_rhs" -- Replace _ with names in missing clauses nameMissing :: [PTerm] -> Idris [PTerm] nameMissing ps = do ist <- get newPats <- mapM nm ps let newPats' = mergeAllPats ist (sUN "_") (base (head ps)) newPats return (map fst newPats') where base (PApp fc f args) = PApp fc f (map (fmap (const (PRef fc [] (sUN "_")))) args) base t = t nm ptm = do mptm <- elabNewPat ptm case mptm of Nothing -> return ptm Just ptm' -> return ptm'
shlevy/Idris-dev
src/Idris/CaseSplit.hs
bsd-3-clause
18,135
0
23
7,169
6,368
3,178
3,190
345
18
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} ------------------------------------------------------------------- -- | -- Module : Irreverent.Bitbucket.Json.Pipelines.UpdateConfig -- Copyright : (C) 2017 Irreverent Pixel Feats -- License : BSD-style (see the file /LICENSE.md) -- Maintainer : Dom De Re -- ------------------------------------------------------------------- module Irreverent.Bitbucket.Json.Pipelines.UpdateConfig ( -- * Types UpdatePipelinesConfigJsonV2(..) -- * Functions , updatePipelineConfigFromJson ) where import Irreverent.Bitbucket.Core.Data.Pipelines.UpdateConfig (UpdatePipelinesConfig(..)) import Ultra.Data.Aeson (FromJSON(..), ToJSON(..), (.=), (.:), object) import Preamble newtype UpdatePipelinesConfigJsonV2 = UpdatePipelinesConfigJsonV2 { fromUpdatePipelinesConfigJsonV2 :: UpdatePipelinesConfig } deriving (Show, Eq) updatePipelineConfigFromJson :: UpdatePipelinesConfigJsonV2 -> UpdatePipelinesConfig updatePipelineConfigFromJson = fromUpdatePipelinesConfigJsonV2 instance ToJSON UpdatePipelinesConfigJsonV2 where --toJSON :: a -> Value toJSON (UpdatePipelinesConfigJsonV2 cfg) = object [ "enabled" .= upcEnabled cfg ] instance FromJSON UpdatePipelinesConfigJsonV2 where --parseJSON :: Value -> Parser a parseJSON v = UpdatePipelinesConfigJsonV2 <$> do o <- parseJSON v UpdatePipelinesConfig <$> (o .: "enabled")
irreverent-pixel-feats/bitbucket
bitbucket-json/src/Irreverent/Bitbucket/Json/Pipelines/UpdateConfig.hs
bsd-3-clause
1,456
0
11
213
208
130
78
22
1
module Proplists where import Data.List import Data.Char type Property = (String, PropVal) type Proplist = [ Property ] data PropVal = Int Integer | Str String | Undefined | Obj Proplist | Arr [PropVal] deriving (Show, Eq) testList :: Proplist testList = [("a",Int 1), ("b", Str "Foo")] testList2 :: Proplist testList2 = [("b", Str "Bar"),("c", Undefined)] fromArr a = Arr a fromStr s = Str s fromObj o = Obj o fromInt i = Int i
tty/joy-of-coding
src/Proplists.hs
bsd-3-clause
442
0
7
89
185
106
79
15
1
import System.Environment (getArgs) import Data.Bits ((.|.)) import Data.List.Split (splitOn) lak :: Int -> Int -> [Int] -> [Int] lak _ _ [] = [] lak x y (z:zs) | z == x = y : lak x y zs | otherwise = z : lak x y zs lake :: Int -> [Int] -> [String] -> [[String]] -> Int lake i xs [] [] = 0 lake i xs [] (zs:zss) = lake i (tail xs ++ [0]) zs zss lake i (x:xs) (y:ys) zss | y == "#" = lake i (xs ++ [0]) ys zss | ll > 0 && rl > 0 && ll /= rl = (-1) + lake i (lak rl ll xs ++ [ll]) ys zss | ll > 0 = lake i (xs ++ [ll]) ys zss | rl > 0 = lake i (xs ++ [rl]) ys zss | otherwise = 1 + lake (succ i) (xs ++ [i]) ys zss where ll = (.|.) x ((.|.) (head xs) (last xs)) rl = xs!!1 lakes :: [[String]] -> Int lakes (xs:xss) = lake 1 (replicate (length xs + 2) 0) xs xss main :: IO () main = do [inpFile] <- getArgs input <- readFile inpFile putStr . unlines . map (show . lakes . map words . splitOn "|") $ lines input
nikai3d/ce-challenges
hard/lakes.hs
bsd-3-clause
1,284
0
13
597
631
324
307
24
1
{-# LANGUAGE NegativeLiterals #-} module Singletons.NegativeLiterals where import Data.Singletons.Base.TH import Numeric.Natural (Natural) $(singletons [d| f :: Natural f = -1 |])
goldfirere/singletons
singletons-base/tests/compile-and-dump/Singletons/NegativeLiterals.hs
bsd-3-clause
189
0
7
30
38
24
14
-1
-1
module IRC.UrlHandler where import IRC import MissileLauncher import Text.ParserCombinators.Parsec import Network.Curl import Text.Regex import Control.Monad urlHandler = defaultHandler { _onMessage = onMessage } -- The message handler onMessage :: IRCConnection -> String -> String -> String -> IO () onMessage conn chan nick msg = do let url = matchRegex (mkRegex "(https?://[^ \r\n$]+)") msg case url of Nothing -> return () Just [url] -> do (code, text) <- curlGetString url [CurlFollowLocation True] let title = matchRegex (mkRegex "<[tT][iI][tT][lL][eE].*?>\\s*(.*?)\\s*<\\/[tT][iI][tT][lL][eE]") text case title of Nothing -> return () Just [title] -> sendMessage conn chan title
Catchouli/robo
src/IRC/UrlHandler.hs
bsd-3-clause
806
0
17
212
218
111
107
19
3
module Parser where import Text.Parsec hiding ((<|>), many, optional) import Text.Parsec.Combinator hiding (optional) import Text.Parsec.String import Text.Parsec.Token import Text.Parsec.Char import Data.Monoid import Control.Applicative import Control.Monad import RewriteSystem parseRuleFile :: FilePath -> IO ([(String, [String])], Rules Char) parseRuleFile fp = do x <- parseFromFile entry fp case x of Left e -> error (show e) Right rs -> let ck = validate (snd rs) in case ck of ([], []) -> return rs ([], xs) -> error $ "Not all positional place holders in RHS are in LHS, eg: 1<>a -> 1<>a<>2. \nAffected rules: " <> (show xs) (xs, []) -> error $ "Positional place holders cannot be placed after each other, eg: 1<>2<>b -> b <> 1.\nAffected rules: " <> (show x) (ds,ps) -> error $ "Positional place holders cannot be placed after each other, eg: 2<>1<>a -> 1<>a<>2\n Affected rules: " <> (show ds) <> "\n and not all place holders in RHS are in LHS, eg: 1<>2<>a -> 1<>b<>3\nAffected rules: " <> (show ps) entry :: Parser ([(String, [String])], Rules Char) entry = whiteSpace transformDef *> (reserved transformDef "start" *> ((,) [] <$> rules)) <|> transforms transforms :: Parser ([(String, [String])], Rules Char) transforms = do tr <- manyTill (whiteSpace transformDef *> transform <* whiteSpace transformDef) $ reserved transformDef "start" whiteSpace transformDef rs <- rules return (tr, rs) transform :: Parser (String, [String]) transform = (,) <$> (keys <* reservedOp transformDef ":") <*> values values = do x <- identifier transformDef whiteSpace transformDef rest <- option [] (reservedOp transformDef "," *> values) return (x:rest) keys = let res = reserved transformDef in res "transforms" *> return "transforms" validate :: Rules Char -> ([Int], [Int]) validate (Rules xs) = let (sp, pr) = unzip $ fmap work xs in (fp $ sp `zip` [1..], fp $ pr `zip` [1..]) where work (ls,rs) = (noSeqPos ls, posRsInLs ls rs) fp = fmap snd . filter (\(x,y) -> (not x)) noSeqPos :: [Match p] -> Bool noSeqPos (x:y:xs) = not (bothPos x y) && noSeqPos (y:xs) noSeqPos [x] = True noSeqPos [] = True posRsInLs :: Eq p => [Match p] -> [Match p] -> Bool posRsInLs ls rs = let posses = filter fpos rs in and $ fmap (`elem` ls) posses where fpos (Positional _) = True fpos _ = False bothPos (Positional _) (Positional _) = True bothPos _ _ = False rules :: Parser (Rules Char) rules = Rules <$> manyTill (whiteSpace rulesDef *> rule) eof rule :: Parser ([Match Char], [Match Char]) rule = ((,) <$> rule1 <* reservedOp rulesDef "->") <*> rule1 <* optional newline rule1 :: Parser [Match Char] rule1 = try position <|> do ids <- fmap Match <$> identifier rulesDef whiteSpace transformDef rest <- option [] (reservedOp rulesDef "<>" *> position) return (ids <> rest) position :: Parser [Match Char] position = do x <- Positional <$> natural rulesDef rest <- option [] $ reservedOp rulesDef "<>" *> rule1 return (x:rest) transformDef :: TokenParser st transformDef = makeTokenParser $ LanguageDef { commentStart = "{-", commentEnd = "-}", commentLine = "--", nestedComments = True, identStart = alphaNum, identLetter = alphaNum, opStart = oneOf ":.,", opLetter = oneOf "", reservedNames = ["transforms"], reservedOpNames = [":",",","."], caseSensitive = True } rulesDef :: TokenParser st rulesDef = makeTokenParser $ LanguageDef { commentStart = "{-", commentEnd = "-}", commentLine = "--", nestedComments = True, identStart = oneOf $ ['a' .. 'z'] <> ['A' .. 'Z'] <> ['0' .. '9'], identLetter = oneOf $ ['a' .. 'z'] <> ['A' .. 'Z'] <> ['0' .. '9'], opStart = oneOf "-,", opLetter = char '>', reservedNames = [], reservedOpNames = ["->", "<>"], caseSensitive = True }
edgarklerks/rewrite_systems
Parser.hs
bsd-3-clause
4,342
0
19
1,289
1,489
793
696
96
5
module ImportNotAtTop where main = 3 + 4 + 4 import Int f = 4
roberth/uu-helium
test/staticerrors/ImportNotAtTop.hs
gpl-3.0
65
0
6
18
25
15
10
-1
-1
module Haskus.System.PCI.Types ( Vendor (..) , Device (..) , Class (..) , SubClass (..) ) where import Data.IntMap.Strict data Vendor = Vendor { vendorName :: String , vendorDevices :: IntMap Device } deriving (Show) data Device = Device { deviceName :: String , deviceSubDevices :: IntMap String } deriving (Show) data Class = Class { className :: String , classSubClasses :: IntMap SubClass } deriving (Show) data SubClass = SubClass { subclassName :: String , subclassInterfaces :: IntMap String } deriving (Show)
hsyl20/ViperVM
haskus-system/src/lib/Haskus/System/PCI/Types.hs
bsd-3-clause
594
0
9
159
168
103
65
22
0
-- Copyright (c) 2016 Eric McCorkle. All rights reserved. -- -- Redistribution and use in source and binary forms, with or without -- modification, are permitted provided that the following conditions -- are met: -- 1. Redistributions of source code must retain the above copyright -- notice, this list of conditions and the following disclaimer. -- 2. Redistributions in binary form must reproduce the above copyright -- notice, this list of conditions and the following disclaimer in the -- documentation and/or other materials provided with the distribution. -- 3. Neither the name of the author nor the names of any contributors -- may be used to endorse or promote products derived from this software -- without specific prior written permission. -- -- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' -- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED -- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS -- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF -- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT -- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF -- SUCH DAMAGE. {-# OPTIONS_GHC -Wall -Werror #-} {-# LANGUAGE MultiParamTypeClasses, FlexibleContexts, FlexibleInstances, UndecidableInstances #-} module Control.Monad.Positions( MonadPositions(..), PositionsT, Positions, runPositionsT, runPositions, mapPositionsT ) where import Control.Applicative import Control.Monad.Artifacts.Class import Control.Monad.CommentBuffer.Class import Control.Monad.Comments.Class import Control.Monad.Cont import Control.Monad.Except import Control.Monad.Gensym.Class import Control.Monad.GraphBuilder.Class import Control.Monad.Journal import Control.Monad.Keywords.Class import Control.Monad.Loader.Class import Control.Monad.Messages.Class import Control.Monad.Positions.Class import Control.Monad.Reader import Control.Monad.ScopeBuilder.Class import Control.Monad.SourceFiles.Class import Control.Monad.SourceBuffer.Class import Control.Monad.State import Control.Monad.Symbols.Class import Control.Monad.Writer import Data.Array import Data.Position.Filename import Data.Position.Point data Info = Info { pointInfoArray :: !(Array Point PointInfo), fileInfoArray :: !(Array Filename FileInfo) } newtype PositionsT m a = PositionsT { unpackPositionsT :: (ReaderT Info m) a } type Positions a = PositionsT IO a -- | Execute the computation represented by a Positions monad. runPositions :: Positions a -- ^ The Positions monad to execute. -> (Point, Point) -- ^ The low and high range of the symbols. -> [(Point, PointInfo)] -- ^ The mapping of symbols. The mapping to the lowest -- index is taken as the null symbol. -> (Filename, Filename) -- ^ The low and high range of the symbols. -> [(Filename, FileInfo)] -- ^ The mapping of symbols. The mapping to the lowest -- index is taken as the null symbol. -> IO a runPositions = runPositionsT -- | Execute the computation wrapped in a PositionsT monad transformer. runPositionsT :: Monad m => PositionsT m a -- ^ The PositionsT monad to execute. -> (Point, Point) -- ^ The low and high range of the symbols. The lowest -- index is used as the index of the null symbol. -> [(Point, PointInfo)] -- ^ The mapping of symbols to indexes. The mapping to the -- lowest index is taken as the null symbol. -> (Filename, Filename) -- ^ The low and high range of the symbols. -> [(Filename, FileInfo)] -- ^ The mapping of symbols. The mapping to the lowest -- index is taken as the null symbol. -> m a runPositionsT s posbounds positions filebounds file = runReaderT (unpackPositionsT s) Info { pointInfoArray = array posbounds positions, fileInfoArray = array filebounds file } mapPositionsT :: (Monad m, Monad n) => (m a -> n b) -> PositionsT m a -> PositionsT n b mapPositionsT f = PositionsT . mapReaderT f . unpackPositionsT pointInfo' :: Monad m => Point -> (ReaderT Info m) PointInfo pointInfo' pos = liftM ((! pos) . pointInfoArray) ask fileInfo' :: Monad m => Filename -> (ReaderT Info m) FileInfo fileInfo' pos = liftM ((! pos) . fileInfoArray) ask instance Monad m => Monad (PositionsT m) where return = PositionsT . return s >>= f = PositionsT $ unpackPositionsT s >>= unpackPositionsT . f instance Monad m => Applicative (PositionsT m) where pure = return (<*>) = ap instance (Monad m, Alternative m) => Alternative (PositionsT m) where empty = lift empty s1 <|> s2 = PositionsT (unpackPositionsT s1 <|> unpackPositionsT s2) instance Functor (PositionsT m) where fmap = fmap instance Monad m => MonadPositions (PositionsT m) where pointInfo = PositionsT . pointInfo' fileInfo = PositionsT . fileInfo' instance MonadIO m => MonadIO (PositionsT m) where liftIO = PositionsT . liftIO instance MonadTrans PositionsT where lift = PositionsT . lift instance MonadArtifacts path m => MonadArtifacts path (PositionsT m) where artifact path = lift . artifact path artifactBytestring path = lift . artifactBytestring path artifactLazyBytestring path = lift . artifactLazyBytestring path instance MonadCommentBuffer m => MonadCommentBuffer (PositionsT m) where startComment = lift startComment appendComment = lift . appendComment finishComment = lift finishComment addComment = lift . addComment saveCommentsAsPreceeding = lift . saveCommentsAsPreceeding clearComments = lift clearComments instance MonadComments m => MonadComments (PositionsT m) where preceedingComments = lift . preceedingComments instance MonadCont m => MonadCont (PositionsT m) where callCC f = PositionsT (callCC (\c -> unpackPositionsT (f (PositionsT . c)))) instance (MonadError e m) => MonadError e (PositionsT m) where throwError = lift . throwError m `catchError` h = PositionsT (unpackPositionsT m `catchError` (unpackPositionsT . h)) instance MonadEdgeBuilder nodety m => MonadEdgeBuilder nodety (PositionsT m) where addEdge src dst = lift . addEdge src dst instance MonadGensym m => MonadGensym (PositionsT m) where symbol = lift . symbol unique = lift . unique instance (Monoid w, MonadJournal w m) => MonadJournal w (PositionsT m) where journal = lift . journal history = lift history clear = lift clear instance MonadKeywords p t m => MonadKeywords p t (PositionsT m) where mkKeyword p = lift . mkKeyword p instance MonadLoader path info m => MonadLoader path info (PositionsT m) where load = lift . load instance MonadMessages msg m => MonadMessages msg (PositionsT m) where message = lift . message instance MonadNodeBuilder nodety m => MonadNodeBuilder nodety (PositionsT m) where addNode = lift . addNode instance MonadScopeStack m => MonadScopeStack (PositionsT m) where enterScope = lift . enterScope finishScope = lift finishScope instance MonadScopeBuilder tmpscope m => MonadScopeBuilder tmpscope (PositionsT m) where getScope = lift getScope setScope = lift . setScope instance MonadSourceFiles m => MonadSourceFiles (PositionsT m) where sourceFile = lift . sourceFile instance MonadSourceBuffer m => MonadSourceBuffer (PositionsT m) where linebreak = lift . linebreak startFile fname = lift . startFile fname finishFile = lift finishFile instance MonadState s m => MonadState s (PositionsT m) where get = lift get put = lift . put instance MonadSymbols m => MonadSymbols (PositionsT m) where nullSym = lift nullSym allNames = lift allNames allSyms = lift allSyms name = lift . name instance MonadReader r m => MonadReader r (PositionsT m) where ask = lift ask local f = mapPositionsT (local f) instance MonadWriter w m => MonadWriter w (PositionsT m) where tell = lift . tell listen = mapPositionsT listen pass = mapPositionsT pass instance MonadPlus m => MonadPlus (PositionsT m) where mzero = lift mzero mplus s1 s2 = PositionsT (mplus (unpackPositionsT s1) (unpackPositionsT s2)) instance MonadFix m => MonadFix (PositionsT m) where mfix f = PositionsT (mfix (unpackPositionsT . f))
saltlang/compiler-toolbox
src/Control/Monad/Positions.hs
bsd-3-clause
8,898
0
15
1,941
1,981
1,061
920
157
1
module GHC.Driver.Monad (module GhcMonad) where import GhcMonad
google/ghc-source-gen
compat/GHC/Driver/Monad.hs
bsd-3-clause
64
0
4
7
16
11
5
2
0
-- | -- Module : Crypto.Hash.%%MODULENAME%% -- License : BSD-style -- Maintainer : Vincent Hanquez <[email protected]> -- Stability : experimental -- Portability : unknown -- -- module containing the binding functions to work with the -- %%MODULENAME%% cryptographic hash. -- {-# LANGUAGE ForeignFunctionInterface #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE TypeFamilies #-} module Crypto.Hash.%%MODULENAME%% ( %%MODULENAME%% (..) ) where import Crypto.Hash.Types import Foreign.Ptr (Ptr) import Data.Data import Data.Typeable import Data.Word (Word8, Word32) -- | %%MODULENAME%% cryptographic hash algorithm data %%MODULENAME%% = %%MODULENAME%% deriving (Show,Data,Typeable) instance HashAlgorithm %%MODULENAME%% where type HashBlockSize %%MODULENAME%% = %%BLOCK_SIZE_BYTES%% type HashDigestSize %%MODULENAME%% = %%DIGEST_SIZE_BYTES%% type HashInternalContextSize %%MODULENAME%% = %%CTX_SIZE_BYTES%% hashBlockSize _ = %%BLOCK_SIZE_BYTES%% hashDigestSize _ = %%DIGEST_SIZE_BYTES%% hashInternalContextSize _ = %%CTX_SIZE_BYTES%% hashInternalInit = c_%%HASHNAME%%_init hashInternalUpdate = c_%%HASHNAME%%_update hashInternalFinalize = c_%%HASHNAME%%_finalize foreign import ccall unsafe "cryptonite_%%HASHNAME%%_init" c_%%HASHNAME%%_init :: Ptr (Context a)-> IO () foreign import ccall "cryptonite_%%HASHNAME%%_update" c_%%HASHNAME%%_update :: Ptr (Context a) -> Ptr Word8 -> Word32 -> IO () foreign import ccall unsafe "cryptonite_%%HASHNAME%%_finalize" c_%%HASHNAME%%_finalize :: Ptr (Context a) -> Ptr (Digest a) -> IO ()
tekul/cryptonite
gen/template/hash.hs
bsd-3-clause
1,738
21
15
353
351
216
135
-1
-1
{-# LANGUAGE CPP, NoImplicitPrelude #-} {-# LANGUAGE ForeignFunctionInterface #-} -- | Miscellaneous information about the system environment. module System.Environment.Compat ( getArgs , getProgName , getEnv , lookupEnv , setEnv , unsetEnv , withArgs , withProgName , getEnvironment ) where import System.Environment #if !(MIN_VERSION_base(4,7,0)) import Prelude.Compat # ifdef mingw32_HOST_OS import Control.Monad import Foreign.C import Foreign.Safe import GHC.Windows # else import qualified System.Posix.Env as Posix # endif # ifdef mingw32_HOST_OS # if defined(i386_HOST_ARCH) # define WINDOWS_CCONV stdcall # elif defined(x86_64_HOST_ARCH) # define WINDOWS_CCONV ccall # else # error Unknown mingw32 arch # endif foreign import WINDOWS_CCONV unsafe "windows.h GetLastError" c_GetLastError:: IO DWORD eRROR_ENVVAR_NOT_FOUND :: DWORD eRROR_ENVVAR_NOT_FOUND = 203 # endif # if !(MIN_VERSION_base(4,6,0)) -- | Return the value of the environment variable @var@, or @Nothing@ if -- there is no such value. -- -- For POSIX users, this is equivalent to 'System.Posix.Env.getEnv'. lookupEnv :: String -> IO (Maybe String) lookupEnv k = lookup k `fmap` getEnvironment # endif -- | @setEnv name value@ sets the specified environment variable to @value@. -- -- On Windows setting an environment variable to the /empty string/ removes -- that environment variable from the environment. For the sake of -- compatibility we adopt that behavior. In particular -- -- @ -- setEnv name \"\" -- @ -- -- has the same effect as -- -- @ -- `unsetEnv` name -- @ -- -- If you don't care about Windows support and want to set an environment -- variable to the empty string use @System.Posix.Env.setEnv@ from the @unix@ -- package instead. -- -- Throws `Control.Exception.IOException` if @name@ is the empty string or -- contains an equals sign. -- -- Note that setting Unicode values may not work correctly on versions of GHC -- prior to 7.2. setEnv :: String -> String -> IO () setEnv key value_ | null value = unsetEnv key | otherwise = setEnv_ key value where -- NOTE: Anything that follows NUL is ignored on both POSIX and Windows. -- We still strip it manually so that the null check above succeds if a -- value starts with NUL, and `unsetEnv` is called. This is important for -- two reasons. -- -- * On POSIX setting an environment variable to the empty string does not -- remove it. -- -- * On Windows setting an environment variable to the empty string -- removes that environment variable. A subsequent call to -- GetEnvironmentVariable will then return 0, but the calling thread's -- last-error code will not be updated, and hence a call to GetLastError -- may not return ERROR_ENVVAR_NOT_FOUND. The failed lookup will then -- result in a random error instead of the expected -- `isDoesNotExistError` (this is at least true for Windows XP, SP 3). -- Explicitly calling `unsetEnv` prevents this. value = takeWhile (/= '\NUL') value_ setEnv_ :: String -> String -> IO () # ifdef mingw32_HOST_OS setEnv_ key value = withCWString key $ \k -> withCWString value $ \v -> do success <- c_SetEnvironmentVariable k v unless success (throwGetLastError "setEnv") foreign import WINDOWS_CCONV unsafe "windows.h SetEnvironmentVariableW" c_SetEnvironmentVariable :: LPTSTR -> LPTSTR -> IO Bool # else setEnv_ k v = Posix.setEnv k v True # endif -- | @unsetEnv name@ removes the specified environment variable from the -- environment of the current process. -- -- Throws `Control.Exception.IOException` if @name@ is the empty string or -- contains an equals sign. unsetEnv :: String -> IO () # ifdef mingw32_HOST_OS unsetEnv key = withCWString key $ \k -> do success <- c_SetEnvironmentVariable k nullPtr unless success $ do -- We consider unsetting an environment variable that does not exist not as -- an error, hence we ignore eRROR_ENVVAR_NOT_FOUND. err <- c_GetLastError unless (err == eRROR_ENVVAR_NOT_FOUND) $ do throwGetLastError "unsetEnv" # else unsetEnv = Posix.unsetEnv # endif #endif
beni55/base-compat
src/System/Environment/Compat.hs
mit
4,200
46
7
849
504
307
197
26
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- Module : Network.AWS.ElasticBeanstalk.SwapEnvironmentCNAMEs -- Copyright : (c) 2013-2014 Brendan Hay <[email protected]> -- License : This Source Code Form is subject to the terms of -- the Mozilla Public License, v. 2.0. -- A copy of the MPL can be found in the LICENSE file or -- you can obtain it at http://mozilla.org/MPL/2.0/. -- Maintainer : Brendan Hay <[email protected]> -- Stability : experimental -- Portability : non-portable (GHC extensions) -- -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | Swaps the CNAMEs of two environments. -- -- <http://docs.aws.amazon.com/elasticbeanstalk/latest/api/API_SwapEnvironmentCNAMEs.html> module Network.AWS.ElasticBeanstalk.SwapEnvironmentCNAMEs ( -- * Request SwapEnvironmentCNAMEs -- ** Request constructor , swapEnvironmentCNAMEs -- ** Request lenses , secnameDestinationEnvironmentId , secnameDestinationEnvironmentName , secnameSourceEnvironmentId , secnameSourceEnvironmentName -- * Response , SwapEnvironmentCNAMEsResponse -- ** Response constructor , swapEnvironmentCNAMEsResponse ) where import Network.AWS.Prelude import Network.AWS.Request.Query import Network.AWS.ElasticBeanstalk.Types import qualified GHC.Exts data SwapEnvironmentCNAMEs = SwapEnvironmentCNAMEs { _secnameDestinationEnvironmentId :: Maybe Text , _secnameDestinationEnvironmentName :: Maybe Text , _secnameSourceEnvironmentId :: Maybe Text , _secnameSourceEnvironmentName :: Maybe Text } deriving (Eq, Ord, Read, Show) -- | 'SwapEnvironmentCNAMEs' constructor. -- -- The fields accessible through corresponding lenses are: -- -- * 'secnameDestinationEnvironmentId' @::@ 'Maybe' 'Text' -- -- * 'secnameDestinationEnvironmentName' @::@ 'Maybe' 'Text' -- -- * 'secnameSourceEnvironmentId' @::@ 'Maybe' 'Text' -- -- * 'secnameSourceEnvironmentName' @::@ 'Maybe' 'Text' -- swapEnvironmentCNAMEs :: SwapEnvironmentCNAMEs swapEnvironmentCNAMEs = SwapEnvironmentCNAMEs { _secnameSourceEnvironmentId = Nothing , _secnameSourceEnvironmentName = Nothing , _secnameDestinationEnvironmentId = Nothing , _secnameDestinationEnvironmentName = Nothing } -- | The ID of the destination environment. -- -- Condition: You must specify at least the 'DestinationEnvironmentID' or the 'DestinationEnvironmentName'. You may also specify both. You must specify the 'SourceEnvironmentId' with -- the 'DestinationEnvironmentId'. secnameDestinationEnvironmentId :: Lens' SwapEnvironmentCNAMEs (Maybe Text) secnameDestinationEnvironmentId = lens _secnameDestinationEnvironmentId (\s a -> s { _secnameDestinationEnvironmentId = a }) -- | The name of the destination environment. -- -- Condition: You must specify at least the 'DestinationEnvironmentID' or the 'DestinationEnvironmentName'. You may also specify both. You must specify the 'SourceEnvironmentName' with -- the 'DestinationEnvironmentName'. secnameDestinationEnvironmentName :: Lens' SwapEnvironmentCNAMEs (Maybe Text) secnameDestinationEnvironmentName = lens _secnameDestinationEnvironmentName (\s a -> s { _secnameDestinationEnvironmentName = a }) -- | The ID of the source environment. -- -- Condition: You must specify at least the 'SourceEnvironmentID' or the 'SourceEnvironmentName'. You may also specify both. If you specify the 'SourceEnvironmentId', you must -- specify the 'DestinationEnvironmentId'. secnameSourceEnvironmentId :: Lens' SwapEnvironmentCNAMEs (Maybe Text) secnameSourceEnvironmentId = lens _secnameSourceEnvironmentId (\s a -> s { _secnameSourceEnvironmentId = a }) -- | The name of the source environment. -- -- Condition: You must specify at least the 'SourceEnvironmentID' or the 'SourceEnvironmentName'. You may also specify both. If you specify the 'SourceEnvironmentName', you -- must specify the 'DestinationEnvironmentName'. secnameSourceEnvironmentName :: Lens' SwapEnvironmentCNAMEs (Maybe Text) secnameSourceEnvironmentName = lens _secnameSourceEnvironmentName (\s a -> s { _secnameSourceEnvironmentName = a }) data SwapEnvironmentCNAMEsResponse = SwapEnvironmentCNAMEsResponse deriving (Eq, Ord, Read, Show, Generic) -- | 'SwapEnvironmentCNAMEsResponse' constructor. swapEnvironmentCNAMEsResponse :: SwapEnvironmentCNAMEsResponse swapEnvironmentCNAMEsResponse = SwapEnvironmentCNAMEsResponse instance ToPath SwapEnvironmentCNAMEs where toPath = const "/" instance ToQuery SwapEnvironmentCNAMEs where toQuery SwapEnvironmentCNAMEs{..} = mconcat [ "DestinationEnvironmentId" =? _secnameDestinationEnvironmentId , "DestinationEnvironmentName" =? _secnameDestinationEnvironmentName , "SourceEnvironmentId" =? _secnameSourceEnvironmentId , "SourceEnvironmentName" =? _secnameSourceEnvironmentName ] instance ToHeaders SwapEnvironmentCNAMEs instance AWSRequest SwapEnvironmentCNAMEs where type Sv SwapEnvironmentCNAMEs = ElasticBeanstalk type Rs SwapEnvironmentCNAMEs = SwapEnvironmentCNAMEsResponse request = post "SwapEnvironmentCNAMEs" response = nullResponse SwapEnvironmentCNAMEsResponse
kim/amazonka
amazonka-elasticbeanstalk/gen/Network/AWS/ElasticBeanstalk/SwapEnvironmentCNAMEs.hs
mpl-2.0
5,714
0
9
1,013
552
336
216
70
1
{-# Language RankNTypes, PatternSynonyms, TypeOperators, DataKinds, PolyKinds, KindSignatures, GADTs #-} module T15694 where import Data.Kind import Data.Type.Equality data Ctx :: Type -> Type where E :: Ctx(Type) (:&:) :: a -> Ctx(as) -> Ctx(a -> as) data ApplyT(kind::Type) :: kind -> Ctx(kind) -> Type where AO :: a -> ApplyT(Type) a E AS :: ApplyT(ks) (f a) ctx -> ApplyT(k -> ks) f (a:&:ctx) pattern ASSO :: () => forall ks k (f :: k -> ks) (a1 :: k) (ctx :: Ctx ks) (ks1 :: Type) k1 (a2 :: k1) (ctx1 :: Ctx ks1) a3. (kind ~ (k -> ks), a ~~ f, b ~~ (a1 :&: ctx), ks ~ (k1 -> ks1), ctx ~~ (a2 :&: E), ks1 ~ Type, f a1 a2 ~~ a3) => a3 -> ApplyT kind a b pattern ASSO a = AS (AS (AO a))
sdiehl/ghc
testsuite/tests/patsyn/should_fail/T15694.hs
bsd-3-clause
831
0
12
284
362
204
158
-1
-1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE StandaloneDeriving #-} module PlaceHolder where import Type ( Type ) import Outputable import Name import NameSet import RdrName import Var import Coercion import ConLike (ConLike) import FieldLabel import SrcLoc (Located) import TcEvidence ( HsWrapper ) import Data.Data hiding ( Fixity ) import BasicTypes (Fixity) {- %************************************************************************ %* * \subsection{Annotating the syntax} %* * %************************************************************************ -} -- NB: These are intentionally open, allowing API consumers (like Haddock) -- to declare new instances -- | used as place holder in PostTc and PostRn values data PlaceHolder = PlaceHolder deriving (Data) -- | Types that are not defined until after type checking type family PostTc id ty -- Note [Pass sensitive types] type instance PostTc Id ty = ty type instance PostTc Name ty = PlaceHolder type instance PostTc RdrName ty = PlaceHolder -- | Types that are not defined until after renaming type family PostRn id ty -- Note [Pass sensitive types] type instance PostRn Id ty = ty type instance PostRn Name ty = ty type instance PostRn RdrName ty = PlaceHolder placeHolderKind :: PlaceHolder placeHolderKind = PlaceHolder placeHolderFixity :: PlaceHolder placeHolderFixity = PlaceHolder placeHolderType :: PlaceHolder placeHolderType = PlaceHolder placeHolderTypeTc :: Type placeHolderTypeTc = panic "Evaluated the place holder for a PostTcType" placeHolderNames :: PlaceHolder placeHolderNames = PlaceHolder placeHolderNamesTc :: NameSet placeHolderNamesTc = emptyNameSet placeHolderHsWrapper :: PlaceHolder placeHolderHsWrapper = PlaceHolder {- Note [Pass sensitive types] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Since the same AST types are re-used through parsing,renaming and type checking there are naturally some places in the AST that do not have any meaningful value prior to the pass they are assigned a value. Historically these have been filled in with place holder values of the form panic "error message" This has meant the AST is difficult to traverse using standard generic programming techniques. The problem is addressed by introducing pass-specific data types, implemented as a pair of open type families, one for PostTc and one for PostRn. These are then explicitly populated with a PlaceHolder value when they do not yet have meaning. In terms of actual usage, we have the following PostTc id Kind PostTc id Type PostRn id Fixity PostRn id NameSet TcId and Var are synonyms for Id Unfortunately the type checker termination checking conditions fail for the DataId constraint type based on this, so even though it is safe the UndecidableInstances pragma is required where this is used. -} type DataId id = ( DataIdPost id , DataIdPost (NameOrRdrName id) ) type DataIdPost id = ( Data id , Data (PostRn id NameSet) , Data (PostRn id Fixity) , Data (PostRn id Bool) , Data (PostRn id Name) , Data (PostRn id (Located Name)) , Data (PostRn id [Name]) , Data (PostRn id id) , Data (PostTc id Type) , Data (PostTc id Coercion) , Data (PostTc id id) , Data (PostTc id [Type]) , Data (PostTc id ConLike) , Data (PostTc id [ConLike]) , Data (PostTc id HsWrapper) , Data (PostTc id [FieldLabel]) ) -- |Follow the @id@, but never beyond Name. This is used in a 'HsMatchContext', -- for printing messages related to a 'Match' type family NameOrRdrName id where NameOrRdrName Id = Name NameOrRdrName Name = Name NameOrRdrName RdrName = RdrName -- |Constraint type to bundle up the requirement for 'OutputableBndr' on both -- the @id@ and the 'NameOrRdrName' type for it type OutputableBndrId id = ( OutputableBndr id , OutputableBndr (NameOrRdrName id) )
snoyberg/ghc
compiler/hsSyn/PlaceHolder.hs
bsd-3-clause
4,085
0
10
850
589
339
250
70
1
module WithRenamingIn2 where sumSquares pow y = (let pow_1 = 2 in pow ^ pow_1) + (sq y) sq x = x ^ pow where pow = 2
kmate/HaRe
old/testing/unfoldDef/WithRenamingIn2_AstOut.hs
bsd-3-clause
124
0
10
35
61
32
29
4
1
module T12192 where
ezyang/ghc
testsuite/tests/driver/T12192.hs
bsd-3-clause
20
0
2
3
4
3
1
1
0
import Test.Cabal.Prelude -- Test unneed version bound on internal build-tools deps main = setupAndCabalTest $ do setup' "configure" [] assertOutputContains "extraneous version range" =<< setup' "sdist" []
mydaum/cabal
cabal-testsuite/PackageTests/InternalVersions/BuildToolsExtra/setup.test.hs
bsd-3-clause
222
0
10
43
44
21
23
5
1
{-# OPTIONS_GHC -Wall -fwarn-tabs -Werror #-} -- check that all warnings are emitted before failing due to -Werror (#1666) -- missing type sig warning (type checker) main = do let main = main -- shadowing warning (renamer) putStrLn "hello" -- tab warning (lexer) f [] = [] f [] = [] -- overlapping pattern -- incomplete pattern
forked-upstream-packages-for-ghcjs/ghc
testsuite/tests/driver/werror.hs
bsd-3-clause
341
0
10
70
51
28
23
6
1
{-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE NoMonomorphismRestriction #-} {-# LANGUAGE OverloadedStrings #-} -- convert account transactions in csv format to ledger entries -- pesco, 2009 http://www.khjk.org/log/2009/oct.html module Buchhaltung.Common (module Buchhaltung.Common ,module Buchhaltung.Utils ,module Buchhaltung.Types ,textstrip ) where import Buchhaltung.Types import Buchhaltung.Utils import Control.Applicative ((<$>)) import Control.Arrow import Control.Lens (Traversal', Lens', lens) import Control.Monad.RWS.Strict import Control.Monad.Reader import Control.Monad.Writer import qualified Data.Aeson as A import Data.Char import qualified Data.Csv as CSV import Data.Csv.Parser import Data.Decimal import Data.Foldable import qualified Data.HashMap.Strict as HM import Data.List import Data.List.NonEmpty (NonEmpty(..)) import qualified Data.List.NonEmpty as E import qualified Data.ListLike as L import qualified Data.ListLike.String as L import qualified Data.Map.Strict as M import Data.Maybe import Data.Ord import qualified Data.Set as S import qualified Data.Text as T import qualified Data.Text.IO as T import qualified Data.Text.Encoding as T import qualified Data.Text.Lazy as TL import Data.Text.Lazy.Encoding import qualified Data.Text.Lazy.Encoding as S import Data.Time.Calendar import Data.Time.Format import Data.Traversable (traverse) import qualified Data.Vector as V import Hledger (textstrip) import Hledger.Data hiding (at) import Hledger.Query import Hledger.Read import Hledger.Reports (defreportopts) import Hledger.Reports.EntriesReport (entriesReport) import System.IO import Text.Parsec import qualified Text.Parsec.Text as T import qualified Text.PrettyPrint.Boxes as P import Text.Printf -- * CONFIGURATION -- * CSV PARSER readcsv :: Char -> T.Text -> [[T.Text]] readcsv sep = map (readcsvrow sep) . T.lines readcsvrow :: Char -> T.Text -> [T.Text] readcsvrow sep s = either (error.msg.show) id (parse (p_csvrow sep) "stdin" s) where msg = printf "CSV (sep %c) Parsing error:\n\n%v\n\n%s" sep s p_csvrow :: Char -> T.Parser [T.Text] p_csvrow sep = sepBy1 (p_csvfield sep) (char sep) p_csvfield :: Char -> T.Parser T.Text p_csvfield sep = fmap T.pack $ between (char '"') (char '"') p_csvstring <|> many (noneOf [sep]) p_csvstring :: T.Parser String p_csvstring = many (noneOf "\"" <|> (string escapedDoubleQuotes >> return '"')) escapedDoubleQuotes = "\\\"" parens :: Parsec T.Text () Int parens = ( do char ('('::Char) m <- parens char ')' n <- parens return $ max (m+1) n ) <|> return 0 -- * dbacl output parser testdbacl = parseTest ( dbacl_parser [ "Aktiva:Transfer:Visa" ,"Aktiva:Transfer" ] ) ("Aktiva:Transfer 134.32 Aktiva:Transfer:Visa Aktiva:Transfer:Visa 9129.73 a " :: String) -- | parse dbacl output (see testdbacl for the format of dbacl's output) dbacl_parse :: [AccountName] -> String -> Either ParseError [(AccountName,String)] dbacl_parse accounts = fmap conv . parse (dbacl_parser sorted) "" where conv = fmap $ second L.unwords sorted = sortBy (flip $ comparing T.length) $ accounts dbacl_parser :: [AccountName] -> Parsec String () [(AccountName, [String])] dbacl_parser accounts = weiter [] where weiter :: [(AccountName, [String])] -> Parsec String () [(AccountName, [String])] weiter res = choice ((map (cat res) accounts) ++ [info res] ) cat res y = do newc <- try $ do string $ T.unpack y space return y spaces weiter $ (newc,[]) : res info ((c,i):res) = do w <- try $ manyTill anyChar (many1 space) weiter $ (c,i <> [w]) : res <|> do { w <- many anyChar; return ((c,i++[w]):res) } info [] = fail "empty list in dbacl_parser: This was not planned" -- * Utilities idx :: (Eq a, Show a) => [a] -> a -> Int idx xs x = maybe (error (show x++": CSV Field not found")) id (findIndex (==x) xs) -- * Dates -- | Read the journal file again, before applying Changes (to not -- overwrite possible changes, that were made in the mean time) -- saveChanges :: String -- ^ journal path -- -> (Journal-> (Journal, Integer)) -- -> IO Journal saveChanges :: (MonadReader (Options User config env) m, MonadIO m) => Maybe Journal -- this journal will be also changed and then returned -> (Journal -> (Journal, Integer)) -- ^ modifier, returning number of changed -> m Journal saveChanges journal change = do journalPath <- absolute =<< readLedger imported liftIO $ do ej <- readJournalFile definputopts -- ignore balance assertions journalPath -- print $ length todos -- putStr $ unlines $ show <$> todos -- either error (print.length.jtxns) ej let (j, n) = either error change ej if n == 0 then putStrLn "\nNo transactions were changed!\n" else do let res = showTransactions j writeFile journalPath res putStrLn $ "\n"++ show n ++" Transactions were changed" return $ maybe j (\j -> let (j2, m) = change j in if (n == m) then j2 else error $ printf "Error 123, see source code. Solution: Use a proper database instead of a file. read: %d passed: %d" n m ) journal mixed' :: Amount -> MixedAmount mixed' = mixed . (:[]) showTransactions :: Hledger.Data.Journal -> [Char] showTransactions = concatMap showTransactionUnelided . entriesReport defreportopts Hledger.Query.Any -- * Lenses jTrans :: Lens' Journal [Transaction] jTrans = lens jtxns $ \j y->j{jtxns=y} tPosts :: Lens' Transaction [Posting] tPosts = lens tpostings $ \t y -> t{tpostings=y} pAcc :: Lens' Posting AccountName pAcc = lens paccount $ \p y -> p{paccount=y} -- | replaces every matching transaction in the given journal counts -- the number of changed transactions changeTransaction :: [(Transaction, Transaction)] -> Journal -> (Journal, Integer) changeTransaction ts = countUpdates (jTrans . traverse) h where h t1 = asum $ fmap g ts where g (t2, tNew) = guard (t1 == t2) *> Just tNew -- | Update a traversal and count the number of updates countUpdates :: Traversal' s a -> (a -> Maybe a) -> s -> (s, Integer) countUpdates trav mod = second getSum . runWriter . trav g where g x = maybe (return x) ((tell (Sum 1) >>) . return) $ mod x -- instance Monoid.Monoid Integer where -- mempty = 0 -- mappend = (+) data WithSource a = WithSource { wTx :: Transaction , wIdx :: Int -- ^ index of the posting with source , wPosting :: Posting , wSource :: Source , wInfo :: a } deriving (Functor) -- instance Hashable Day where -- hash = fromInteger . toModifiedJulianDay -- hashWithSalt salt = hashWithSalt salt . toModifiedJulianDay -- instance Hashable Transaction where -- instance Hashable Posting where -- instance Hashable PostingType where -- instance Hashable MixedAmount where -- instance Hashable Amount where -- | extracts the source line from a Transaction extractSource :: ImportTag -> Transaction -> Either String (WithSource ()) extractSource tag' tx = left (<> "\nComments: " <> T.unpack (L.unlines $ pcomment <$> ps)) $ g $ asum $ zipWith f [0..] ps where f i p = fmap ((,,) i p) . E.nonEmpty . tail . T.splitOn tag $ pcomment p tag = commentPrefix tag' g Nothing = Left $ printf "no comment with matching tag '%s' found." tag g (Just (i,p,n)) = do source <- A.eitherDecode' . S.encodeUtf8 . TL.fromStrict . E.head $ n return $ WithSource tx i p source () ps = tpostings tx injectSource :: ImportTag -> Source -> Transaction -> Transaction injectSource tag source t = t {tpostings = reverse $ p1{pcomment = commentPrefix tag <> TL.toStrict (json source) } : rest} where (p1 : rest) = reverse $ tpostings t -- instance MonadReader (Options user Config env) m => ReaderM user env m commentPrefix :: ImportTag -> T.Text commentPrefix (ImportTag tag) = tag <> ": " trimnl :: T.Text -> T.Text trimnl = mconcat . T.lines -- * make CSV data easier to handle -- http://hackage.haskell.org/package/cassava-0.4.1.0/docs/Data-Csv.html#t:NamedRecord -- parseCsv :: CSV.FromField a => String -> V.Vector (HM.HashMap B.ByteString a) type MyRecord = HM.HashMap T.Text T.Text stripCsv :: ([T.Text], [MyRecord]) -> ([T.Text], [MyRecord]) stripCsv = fmap textstrip *** fmap (HM.fromList . fmap (textstrip *** textstrip ) . HM.toList) parseCsv :: Char -- ^ separator -> TL.Text -> ([T.Text], [MyRecord]) parseCsv sep = either error ((fmap T.decodeUtf8 . V.toList) *** V.toList) . CSV.decodeByNameWith CSV.defaultDecodeOptions { decDelimiter = fromIntegral $ ord sep } . encodeUtf8 getCsvCreditDebit :: T.Text -> T.Text -> MyRecord -> T.Text getCsvCreditDebit creditColumn debitColumn record = if T.any isDigit creditValue then "-" <> creditValue else debitValue where creditValue = getCsv creditColumn record debitValue = getCsv debitColumn record getCsvConcat :: [T.Text] -> MyRecord -> T.Text getCsvConcat fields record = L.unwords $ flip getCsv record <$> fields getCsvConcatDescription :: env -> [Description env] -> MyRecord -> T.Text getCsvConcatDescription env x record = L.unwords $ g <$> x where g (Field f) = getCsv f record g (Const t) = t g (Read f) = f env getCsv :: T.Text -> MyRecord -> T.Text getCsv c x = lookupErrD (show (HM.keys x)) HM.lookup c x -- * Import Types data ImportedEntry' a s = ImportedEntry { ieT :: Transaction -- ^ transaction without postings (they will be inserted later) ,iePostings :: a ,ieSource :: s -- ^ source to check for duplicates and for Bayesian matching } deriving Show type ImportedEntry = ImportedEntry' [(AccountId, T.Text, Maybe T.Text, Bool)] Source -- ^ postings of [acount,amount]: only ImportedEntry with one -- posting is currently implemented in the statists functionality of -- Add.hs (See PROBLEM1) as well in the duplicates algorithm in 'addNew' type FilledEntry = ImportedEntry' () Source fromFilled :: FilledEntry -> Entry fromFilled x = x{ieSource = Right $ ieSource x} type Entry = ImportedEntry' () (Either String Source) -- | helper function to create transaction for ImportedEntry genTrans :: Day -> Maybe Day -> T.Text -> Transaction genTrans date date2 desc = nulltransaction{tdate=date, tdescription=desc, tdate2=date2} normalizeMixedAmountWith :: (Amount -> Decimal) -> MixedAmount -> MixedAmount normalizeMixedAmountWith f (Mixed ams) = Mixed $ g <$> ams where g a = a{aquantity = normalizeDecimal $ f a} type Importer env = Either T.Text Handle -> CommonM (env, Maybe Version) [ImportedEntry] windoof :: Handle -> IO T.Text windoof h = do hSetNewlineMode h universalNewlineMode hSetEncoding h latin1 T.hGetContents h parseDate :: String -> T.Text -> Day parseDate format = parseTimeOrError True defaultTimeLocale format . T.unpack parseDateM :: Monad m => String -> T.Text -> m Day parseDateM format = parseTimeM True defaultTimeLocale format . T.unpack parseDateDE = parseDate "%d.%m.%Y" parseDateUS = parseDate "%m/%d/%Y" -- | retrieval function type Getter a = MyRecord -> a data CsvPostingImport = CsvPosting { cAccount :: Getter T.Text , cAmount :: Getter T.Text , cSuffix :: Maybe (Getter T.Text) , cNegate :: Getter Bool -- ^ Amount parsable by 'mamoumtp\'' } data CsvImport env = CSV { cFilter :: MyRecord -> Bool -- ^ should this csv line be processed? , cDate :: Getter Day , cStrip :: Bool , cVDate :: Getter (Maybe Day) , cBank :: env -> Getter T.Text , cHeader :: [T.Text] , cBayes :: [T.Text] , cDescription :: [Description env] , cVersion :: Version , cSeparator :: Char , cPostings :: [env -> CsvPostingImport] , cGetContents :: Handle -> IO T.Text } data Description env = Field T.Text | Const T.Text | Read (env -> T.Text) toField (Field t) = Just t toField _ = Nothing data CheckedCsvImport a = UnsafeCSV { cRaw :: CsvImport a } -- deriving (Show) toVersionedCSV :: SFormat DefaultVersion -> [CsvImport a] -> VersionedCSV a toVersionedCSV format headers = sequence $ (,) format $ fromListUnique $ (cVersion . cRaw &&& id) . checkRawCSV format <$> headers type VersionedCSV env = forall m. MonadError Msg m => m (SFormat DefaultVersion, M.Map Version (CheckedCsvImport env)) -- ^ (format with default version, _) data DefaultVersion = DefaultVersion { fromDefaultVersion :: Version } checkRawCSV :: SFormat b -> CsvImport a -> CheckedCsvImport a checkRawCSV format rh = if null missing then UnsafeCSV rh else error $ printf ("format '%s', version '%s': The configured header misses the following " ++ "fields required for Bayes or Description:\n%s") (fName format) (cVersion rh) $ unlines $ uncurry (printf "%s: %s") <$> missing where [head, bayes, desc] = S.fromList . ($rh) <$> [cHeader, cBayes, mapMaybe toField . cDescription] missing = concatMap (uncurry zip <&> repeat *** (fmap T.unpack . toList . flip S.difference head)) [("cBayes", bayes), ("cDescription", desc)] :: [(String, String)] -- * Pretty Printing table :: [Int] -- ^ max width -> [T.Text] -- ^ Header -> [[T.Text]] -- ^ list of cols -> P.Box table w h = table1 . table2 w h table1 :: NonEmpty [P.Box] -- ^ list of rows -> P.Box table1 (header :| rows) = P.punctuateH P.top (P.vcat P.top $ replicate (ml P.rows cols2) $ P.text " | ") cols2 where h colHead col = P.vcat P.left $ colHead : sep : col where sep = text' $ L.replicate (ml P.cols $ colHead : col) '-' ml f = maximum . fmap f cols2 = zipWith h header $ transpose rows table2 :: [Int] -- ^ max width -> [T.Text] -- ^ Header -> [[T.Text]] -- ^ list of cols -> NonEmpty [P.Box] -- ^ list of rows table2 widths header cols = toRow <$> (header :| transpose cols) where toRow = g . zipWith asd widths asd w = P.para P.left w . T.unpack g row = P.alignVert P.top mr <$> row where mr = maximum $ P.rows <$> row mlen :: L.ListLike l e => [l] -> Int mlen = maximum . fmap L.length text' :: T.Text -> P.Box text' = P.text . T.unpack loadJournal :: (MonadError Msg m, MonadIO m) => [Ledgers -> Maybe FilePath] -> Options User config env -> m Journal loadJournal journals options = do liftIO $ printf "(Reading journal from \n%s)\n...\n\n" $ intercalateL "\n" $ show <$> jfiles journal <- liftIO $ -- to conquer problems with the `instance Monoid Journal` right mconcat' . sequence <$> mapM (readJournalFile definputopts) jfiles either (throwError . T.pack) return journal where jfiles = runReader (catMaybes <$> mapM (mapM absolute <=< readLedger) journals) options jfiles :: [FilePath]
johannesgerer/buchhaltung
src/Buchhaltung/Common.hs
mit
16,087
0
18
4,240
4,550
2,455
2,095
-1
-1
module FeatureModel.Parsers.GenericParser ( parseFeatureModel, parseInstanceModel, FmFormat ( FMPlugin, FMIde, FMGrammar, SXFM, SPLOT ) ) where import FeatureModel.Types import qualified BasicTypes as Core -- modules related to the FMPlugin parser import FeatureModel.Parsers.FMPlugin.XmlFeatureParser import FeatureModel.Parsers.FMPlugin.XmlFeatureModel (xmlFeature2FeatureTree, xml2FeatureConfiguration) -- modules related to the FMIde parser import FeatureModel.Parsers.FMIde.FMIde2FeatureModel import FeatureModel.Parsers.FMIde.AbsFMIde import FeatureModel.Parsers.FMIde.SkelFMIde import FeatureModel.Parsers.FMIde.ErrM import FeatureModel.Parsers.FMIde.LexFMIde import FeatureModel.Parsers.FMIde.ParFMIde -- modules related to the FMGrammar parser import qualified FeatureModel.Parsers.FMGrammar.Grammar2FeatureModel as GFMG import qualified FeatureModel.Parsers.FMGrammar.LexFMGrammar as LFMG import qualified FeatureModel.Parsers.FMGrammar.SkelFMGrammar as SFMG import qualified FeatureModel.Parsers.FMGrammar.AbsFMGrammar as AFMG import qualified FeatureModel.Parsers.FMGrammar.ParFMGrammar as PFMG import qualified FeatureModel.Parsers.FMGrammar.ErrM as EFMG --modules related to the SPLOT parser import FeatureModel.Parsers.SPLOT.NewSPLOT2FeatureModel import qualified FeatureModel.Parsers.SPLOT.LexSPLOT as LexSPLOT import FeatureModel.Parsers.SPLOT.SkelSPLOT import qualified FeatureModel.Parsers.SPLOT.AbsSPLOT as AbsSPLOT import qualified FeatureModel.Parsers.SPLOT.ParSPLOT as ParSPLOT import qualified FeatureModel.Parsers.SPLOT.ErrM as ErrSPLOT -- modules related to the SXFM parser import qualified FeatureModel.Parsers.SXFM.ParsecSXFM as ParsecSXFM import Text.ParserCombinators.Parsec import qualified Text.ParserCombinators.Parsec.Token as P import Text.ParserCombinators.Parsec.Language( haskellStyle ) import Text.XML.HXT.Core import Text.XML.HXT.RelaxNG data FmFormat = FMPlugin | FMIde | FMGrammar | SXFM | SPLOT -- The top most function for parsing feature models -- in different formats. -- parseFeatureModel (schema, fileName) format = do x <- readFile (fileName) case (format) of -- FMPlugin -> do -- fm <- translateFMPToFm schema fileName -- return fm -- -- FMIde -> do -- let fm = translateFMIdeToFm (pGrammar (myLexer x)) -- return fm -- -- FMGrammar -> do -- let fm = translateFMGrammarToFm (PFMG.pFMGrammar (PFMG.myLexer x)) -- return fm SPLOT -> do let fm = translateFMSPLOTToFm (ParSPLOT.pSPLOTModel (ParSPLOT.myLexer x)) return fm -- SXFM -> do -- r <- parseFromFile ParsecSXFM.parseFeatureModel fileName ; -- case (r) of -- Left err -> return $ Core.Fail (show err) -- Right f -> do let fm = f -- return $ Core.Success fm -- -- | -- Parse a feature configuration. This parser -- is based on the exported instance models from -- FMPlugin -- parseInstanceModel schema fileName = do errs <- checkXMLFile schema fileName case errs of [] -> do instanceModel <- parseInstanceModel' fileName return $ instanceModel otherwise -> do let errs' = concat $ map show errs return $ Core.Fail errs' parseInstanceModel' fileName = do i <- runX ( xunpickleDocument xpFeatureConfiguration [ withValidate yes , withTrace 1 , withRemoveWS yes , withPreserveComment yes ] (Core.createURI fileName) ) case i of [x] -> do return $ Core.Success (xml2FeatureConfiguration x) otherwise -> return $ Core.Fail "Error parsing instance configuration. Try to check it before parsing." translateFMIdeToFm (Ok g) = Core.Success (grammarToFeatureModel g) translateFMIdeToFm (Bad s) = Core.Fail s translateFMGrammarToFm (EFMG.Ok g) = Core.Success (GFMG.grammarToFeatureModel g) translateFMGrammarToFm (EFMG.Bad s) = Core.Fail s translateFMSPLOTToFm (ErrSPLOT.Ok g) = Core.Success (splotToFeatureModel g) translateFMSPLOTToFm (ErrSPLOT.Bad s) = Core.Fail s translateFMPToFm schema fileName = do errs <- checkXMLFile schema fileName case errs of [] -> do u <- runX ( xunpickleDocument xpFeature [ withValidate yes , withTrace 1 , withRemoveWS yes , withPreserveComment yes ] (Core.createURI fileName)); case u of [x] -> return $ Core.Success (FeatureModel { fmTree = (xmlFeature2FeatureTree x), fmConstraints = [] }) otherwise -> return $ Core.Fail "Error parsing feature model. Try to check it before parsing." -- errors found after checking the FMPlugin file otherwise -> return $ Core.Fail ("Error parsing feature model. " ++ (concat [show e | e <- errs])) checkXMLFile schema fileName = do errs <- runX ( errorMsgCollect >>> readDocument [ withValidate yes , withRelaxNG (Core.createURI schema) , withErrors yes ] (Core.createURI fileName) >>> getErrorMessages ) ; return errs
hephaestus-pl/hephaestus
alexandre/feature-modeling/src/FeatureModel/Parsers/GenericParser.hs
mit
5,323
2
21
1,301
1,018
582
436
93
3
{-# LANGUAGE DeriveDataTypeable #-} import CompatibilityShims import Superscripts (superScriptNum) {- - Sub all unqualified usages of xK_{...}: :%s/\W\@<=\%(Key\.\)\@<!\%(xK\)\@=/Key./g -} import Numeric (showHex) import XMonad -- (ExtensionClass(..), xmonad) import XMonad.Actions.CycleWS import XMonad.Actions.DynamicWorkspaces import XMonad.Actions.GridSelect import XMonad.Actions.SwapWorkspaces import XMonad.Actions.Warp (warpToScreen) import XMonad.Hooks.DynamicLog import XMonad.Hooks.EwmhDesktops import XMonad.Hooks.ManageDocks (avoidStruts, docks) import XMonad.Hooks.UrgencyHook import XMonad.Layout.Grid import qualified XMonad.Layout.Fullscreen as FS import XMonad.Layout.WindowNavigation import XMonad.Util.Run import qualified XMonad.Util.ExtensibleState as XS import Data.Monoid import System.Exit import qualified XMonad.Layout.LayoutCombinators as LC import XMonad.Layout.Groups (group) import XMonad.Layout.Groups.Examples (zoomRowG) import XMonad.Layout.Groups.Wmii import XMonad.Layout.MessageControl (ignore, unEscape) import XMonad.Layout.Named (named) import XMonad.Layout.Renamed (renamed, Rename(CutWordsLeft)) import XMonad.Layout.Simplest import XMonad.Layout.Tabbed (addTabs) import XMonad.Operations (rescreen) import System.Directory (getHomeDirectory) import GHC.IO.Handle (hClose, hFlush) import GHC.IO.Handle.Types (Handle) import qualified XMonad.StackSet as W import qualified Data.Map as M import XMonad.Prompt import XMonad.Prompt.Workspace (workspacePrompt) import qualified Graphics.X11.Xlib.Atom as Atom import qualified Graphics.X11.Xlib.Extras as Xtras import qualified Graphics.X11.Types as Key import Graphics.X11.ExtraTypes.XF86 -- for myDynamicLogWithPP import Codec.Binary.UTF8.String (encodeString) import Data.List (find, intersperse, sortBy) import Data.Maybe (isJust, catMaybes) import System.Posix (getProcessGroupIDOf) import System.Posix.Signals (signalProcessGroup, sigTERM) import System.Posix.Types (ProcessGroupID, ProcessID) import XMonad.Util.NamedWindows import XMonad.Util.WorkspaceCompare (getWsCompareByTag, WorkspaceSort) import Control.Applicative (liftA) import qualified Control.Exception as E import Control.Monad (when) import Data.Set ((\\), toList, fromList) -- The preferred terminal program, which is used in a binding below and by -- certain contrib modules. -- myTerminal = "term" -- Whether focus follows the mouse pointer. myFocusFollowsMouse :: Bool myFocusFollowsMouse = True -- Width of the window border in pixels. -- myBorderWidth = 1 -- modMask lets you specify which modkey you want to use. The default -- is mod1Mask ("left alt"). You may also consider using mod3Mask -- ("right alt"), which does not conflict with emacs keybindings. The -- "windows key" is usually mod4Mask. -- myModMask = mod3Mask -- The default number of workspaces (virtual screens) and their names. -- By default we use numeric strings, but any string may be used as a -- workspace name. The number of workspaces is determined by the length -- of this list. -- -- A tagging example: -- -- > workspaces = ["web", "irc", "code" ] ++ map show [4..9] -- myWorkspaces = map show [1..10] -- Border colors for unfocused and focused windows, respectively. -- myNormalBorderColor = "#285577" myFocusedBorderColor = "#ff9900" myUrgentColor = "orange" myPrompt = def { bgColor = statusColorBG , fgColor = statusColorNormalFG , bgHLight = statusColorNormalFG , fgHLight = statusColorBG , borderColor = myNormalBorderColor , position = Top } ------------------------------------------------------------------------ -- Key bindings. Add, modify or remove key bindings here. -- -- probably-broken window movement commands. Using two competing -- metaphors (tabs and stacks), so we do both actions. -- windowDown = do sendMessage $ Go D focusDown windowUp = do sendMessage $ Go U focusUp data WarpViewStyle = Warp | Greedy deriving (Typeable, Show, Read, Eq, Enum, Bounded) instance ExtensionClass WarpViewStyle where initialValue = Warp warpViewCycle :: WarpViewStyle -> WarpViewStyle warpViewCycle x | x == maxBound = minBound | otherwise = succ x -- | -- Change to the specified workspace. If the newly-selected workspace was -- visible, but not primary, before the change, warp the mouse pointer to it. warpView :: WorkspaceId -> X () warpView tag = do XState { windowset = old } <- get style <- XS.get :: X WarpViewStyle case style of Greedy -> windows $ W.greedyView tag Warp -> (windows $ W.view tag) >> warpIfVisible tag old warpIfVisible :: String -> WindowSet -> X () warpIfVisible tag old = do let byTag = ((tag ==) . W.tag . W.workspace) case find byTag (W.visible old) of Just s -> warpToScreen (W.screen s) 0.4 0.5 _ -> return () currentWindows :: XState -> [Window] currentWindows = W.integrate' . W.stack . W.workspace . W.current . windowset shiftAll :: WorkspaceId -> X () shiftAll tag = gets currentWindows >>= windows . sendAll where sendAll :: [Window] -> WindowSet -> WindowSet sendAll = flip $ foldl sendWindow sendWindow :: WindowSet -> Window -> WindowSet sendWindow = flip $ W.shiftWin tag fixXinerama :: X () fixXinerama = do rescreen windows $ \wins@(W.StackSet { W.visible = vis, W.hidden = hid }) -> let present = fromList $ W.tag <$> W.workspaces wins wanted = fromList $ show <$> [1..10] missing = toList $ wanted \\ present base = W.workspace (W.current wins) withTag tag = base { W.tag = tag, W.stack = Nothing } in wins { W.hidden = W.hidden wins ++ (withTag <$> missing) } raiseWindowByClass :: String -> X () raiseWindowByClass cls = withDisplay $ \d -> do XConf{ theRoot = r } <- ask (_, _, lowToHigh) <- io $ Xtras.queryTree d r case (reverse lowToHigh) of (top:r:rs) -> do tclass <- classOf top when (tclass /= Just cls) $ withDisplay $ \d -> do win <- findFirst (r:rs) maybe (pure ()) (io . raiseWindow d) win _ -> pure () where findFirst :: [Window] -> X (Maybe Window) findFirst [] = pure Nothing findFirst (w:ws) = do c <- classOf w case c of Just x | x == cls -> return $ Just w _ -> findFirst ws classOf :: Window -> X (Maybe String) classOf win = do cs <- (classes win) `catchX` (pure []) case cs of (_:c:_) -> return $ Just c _ -> pure Nothing classes :: Window -> X [String] classes win = withDisplay $ \d -> io $ do p <- Xtras.getTextProperty d win Atom.wM_CLASS Xtras.wcTextPropertyToTextList d p myKeys conf@(XConfig {XMonad.modMask = modm}) = M.fromList $ -- launch a terminal [ ((modm, xK_Return), spawn $ "in-cwd " ++ XMonad.terminal conf) , ((modm .|. shiftMask, xK_Return), spawn $ "in-cwd uxterm") , ((mod1Mask, xK_Return), spawn $ "in-cwd " ++ XMonad.terminal conf) , ((mod4Mask, xK_Return), spawn $ "LC_ALL=en_US.UTF-8 term -e /bin/bash -l") , ((mod4Mask, xK_t), spawn "tmux-choose") -- launch dmenu , ((mod1Mask, xK_space), spawn "dmenu_run") -- other launchers ---- process monitoring , ((mod4Mask, xK_h), spawn $ XMonad.terminal conf ++ " -e htop") , ((mod4Mask, xK_i), spawn $ XMonad.terminal conf ++ " -e sudo iotop") , ((mod4Mask, xK_p), spawn $ XMonad.terminal conf ++ " -e sudo powertop") ---- web browsers , ((modm, xK_c), spawn "chromium --new-window") , ((mod4Mask, xK_c), spawn "chromium --incognito") , ((mod4Mask, xK_s), spawn "scratch") ---- ssh , ((modm, xK_u), spawn "ssh-choose -r -1") , ((modm, xK_slash), spawn "ssh-choose -r") ---- screensaver/sleep , ((modm, xK_Scroll_Lock), spawn "xscreensaver-command -activate") , ((mod4Mask, xK_Scroll_Lock), spawn "xscreensaver-command -activate") , ((mod4Mask, xK_l), spawn "xscreensaver-command -activate") , ((0, xF86XK_ScreenSaver), spawn "xscreensaver-command -activate") , ((0, xF86XK_Sleep), spawn "slp") , ((mod4Mask .|. shiftMask, xK_l), spawn "slp") , ((mod4Mask .|. controlMask, xK_l), spawn "blank") ---- screenshot , ((0, xK_Print), spawn "screenshot") , ((mod4Mask, xK_Multi_key), spawn "screenshot") ---- monitor toggle , ((0, xF86XK_Display), spawn "mon --toggle") , ((shiftMask, xF86XK_Display), spawn "mon --cycle") ---- mpd , ((0, xF86XK_AudioPlay), spawn "music toggle") , ((0, xF86XK_AudioPause), spawn "music toggle") {- headset sends "Pause" -} , ((0, xF86XK_AudioStop), spawn "music stop") , ((0, xF86XK_AudioNext), spawn "music next") , ((0, xF86XK_AudioPrev), spawn "music prev") , ((0, xF86XK_AudioMute), spawn "toggle-pulse-mute") , ((mod4Mask, xF86XK_AudioRaiseVolume), spawn "sound-check") , let speakerTest = spawn "speaker-test -t wav -c2 -l1" in ((mod4Mask .|. shiftMask, xF86XK_AudioRaiseVolume), speakerTest) , ((0, xF86XK_AudioLowerVolume), spawn "pavucontrol") ---- brightness , ((modm, xK_F5), spawn "brightness = 1") , ((modm, xF86XK_MonBrightnessDown), spawn "brightness = 1") , ((modm .|. shiftMask, xK_F5), spawn "brightness down") , ((modm, xK_F6), spawn "brightness max") , ((modm, xF86XK_MonBrightnessUp), spawn "brightness max") , ((modm .|. shiftMask, xK_F6), spawn "brightness up") ---- pickers -- `run` a command , ((mod4Mask, xK_r), spawn "pick run") -- `vim` by file(s) being edited , ((mod4Mask, xK_v), spawn "pick kak") , ((mod4Mask .|. shiftMask, xK_v), spawn "pick vim") -- `zsh` by command or working directory , ((mod4Mask, xK_z), spawn "pick zsh") -- `mpc` by song , ((mod4Mask, xK_m), spawn "pick mpc") -- `window` , ((mod4Mask, xK_w), spawn "pick window") -- `wifi` ('shift+' because it's less common) , ((mod4Mask .|. shiftMask, xK_w), spawn "wifi-chooser") -- launch gmrun , ((modm .|. shiftMask, xK_p), spawn "gmrun") -- close focused window , ((modm .|. shiftMask, xK_c), kill) -- Rotate through the available layout algorithms , ((mod4Mask, xK_space), sendMessage NextLayout) -- Reset the layouts on the current workspace to default , ((mod4Mask .|. shiftMask, xK_space), setLayout $ XMonad.layoutHook conf) -- Resize viewed windows to the correct size , ((modm, xK_n), refresh) -- Swap the focused window and the master window , ((mod4Mask .|. shiftMask, xK_Return), windows W.swapMaster) -- Increment the number of windows in the master area , ((modm .|. shiftMask, xK_comma), sendMessage (IncMasterN 1)) -- Deincrement the number of windows in the master area , ((modm .|. shiftMask, xK_period), sendMessage (IncMasterN (-1))) -- Quit xmonad , ((modm .|. shiftMask, xK_q), io (exitWith ExitSuccess)) -- Restart xmonad , ((modm, xK_q), spawn "xmonad --recompile && xmonad --restart") , ((modm, xK_r), spawn "xmonad --recompile") ] ++ -- Directional movement [ ((modm, xK_h), sendMessage $ Go L) , ((modm, xK_j), windowDown) , ((modm, xK_k), windowUp) , ((modm, xK_l), sendMessage $ Go R) , ((modm .|. shiftMask, xK_j), sendMessage $ Swap D) , ((modm .|. shiftMask, xK_j), swapDown) , ((modm .|. shiftMask, xK_k), sendMessage $ Swap U) , ((modm .|. shiftMask, xK_k), swapUp) ] ++ -- wmii style layouts [ ((modm, xK_s), groupToTabbedLayout) , ((modm, xK_m), groupToFullLayout) , ((modm, xK_f), groupToNextLayout) , ((modm, xK_d), groupToVerticalLayout) , ((modm .|. shiftMask, xK_h), moveToGroupUp False) , ((modm .|. shiftMask, xK_l), moveToGroupDown False) , ((modm, xK_space), withFocused float) , ((modm .|. shiftMask, xK_space), withFocused $ windows . W.sink) ] ++ -- named workspaces [ ((modm, xK_t), selectWorkspace myPrompt) , ((modm .|. shiftMask, xK_t), withWorkspace myPrompt (windows . W.shift)) , ((modm, xK_p), moveTo Prev NonEmptyWS) , ((modm .|. shiftMask, xK_p), swapTo Prev) , ((modm, xK_n), moveTo Next NonEmptyWS) , ((modm .|. shiftMask, xK_n), swapTo Next) , ((modm .|. shiftMask, xK_minus), removeEmptyWorkspace) ] ++ -- fix issue with Xinerama when (dis-/)connecting laptop dock [ ((mod4Mask, xK_q), fixXinerama) ] ++ -- -- mod-[1..9,0,i], Switch to workspace N -- mod-shift-[1..9,0,i], Move client to workspace N -- "0" maps to N=10 -- "i" for "IM" maps to N=7 (force of habit) -- let workspaces = show <$> [1 .. 9] ++ [10, 7] keys = [xK_1 .. xK_9] ++ [xK_0, xK_i] in [ ((modm .|. mask, key), action workspace) | (workspace, key) <- zip workspaces keys , (mask, action) <- [ (0, warpView) , (shiftMask, windows . W.shift) , (mod4Mask, windows . W.greedyView) , (mod4Mask .|. shiftMask, shiftAll) ] ] ++ -- Toggle the state for warpView actions [ ((mod4Mask .|. shiftMask, xK_1), XS.modify warpViewCycle >> refresh) ] ++ -- Ctrl+Win+[q,w,e,r,t,...] = warp to screen 0,1,2,... [ ((mod4Mask .|. controlMask, key), warpToScreen screen 0.5 0.5) | (key, screen) <- zip [xK_q, xK_w, xK_e, xK_r, xK_t, xK_y] [0..]] ++ -- Grid Select [ ((mod4Mask, xK_g), goToSelected def) ] ------------------------------------------------------------------------ -- Mouse bindings: default actions bound to mouse events -- type WinFunc = Window -> X () modsMouseBindings :: ([KeyMask], Button, WinFunc) -> [((KeyMask, Button), WinFunc)] modsMouseBindings (ms, b, f) = map (\m -> ((m, b), f)) $ ms myMouseBindings (XConfig {XMonad.modMask = modm}) = M.fromList $ concat $ map modsMouseBindings $ -- mod-button1, Set the window to floating mode and move by dragging [ (mainmods, button1, (\w -> focus w >> mouseMoveWindow w)) , (mainmods, button2, (\w -> focus w >> windows W.shiftMaster)) -- mod-button3, Set the window to floating mode and resize by dragging , (mainmods, button3, (\w -> focus w >> mouseResizeWindow w)) -- alt + scroll = workspace up/down , (mainmods, button4, (\w -> windowUp)) , (mainmods, button5, (\w -> windowDown)) ] where mainmods = [modm, mod1Mask] ------------------------------------------------------------------------ -- Layouts: -- You can specify and transform your layouts by modifying these values. -- If you change layout bindings be sure to use 'mod-shift-space' after -- restarting (with 'mod-q') to reset your layout state to the new -- defaults, as xmonad preserves your old layout settings by default. -- -- The available layouts. Note that each layout is separated by |||, -- which denotes layout choice. -- myLayoutHook = windowNavigation $ FS.fullscreenFocus $ myWmii ||| wmiiLike ||| Grid where -- Default proportion of screen occupied by master pane ratio = 1/2 -- Percent of screen to increment by when resizing panes delta = 3/100 wmiiLike = wmii shrinkText def -- myWmii is basically the same as stock wmii, -- but it defaults to tabs first myWmii = group innerLayout zoomRowG where column = named "Column" $ avoidStruts $ Tall 0 delta ratio tabs = named "Tabs" $ avoidStruts $ Simplest innerLayout = renamed [CutWordsLeft 3] $ addTabs shrinkText def $ ignore NextLayout $ ignore (LC.JumpToLayout "") $ unEscape $ tabs LC.||| column LC.||| Full ------------------------------------------------------------------------ -- Window rules: -- Execute arbitrary actions and WindowSet manipulations when managing -- a new window. You can use this to, for example, always float a -- particular program, or have a client always appear on a particular -- workspace. -- -- To find the property name associated with a program, use -- > xprop | grep WM_CLASS -- and click on the client you're interested in. -- -- To match on the WM_NAME, you can use 'title' in the same way that -- 'className' and 'resource' are used below. -- myManageHook = FS.fullscreenManageHook <+> composeAll [ className =? "MPlayer" --> doFloat , resource =? "desktop_window" --> doIgnore , resource =? "kdesktop" --> doIgnore , className =? "Pinentry" --> doFloat , className =? "Pavucontrol" --> doFloat , className =? "Vncviewer" --> doFloat , className =? "sun-awt-X11-XFramePeer" --> doFloat , className =? "java-lang-Thread" --> doFloat , className =? "xfreerdp" --> unfloat , title =? "QEMU" --> doFloat , title =? "Authy" --> doFloat ] where unfloat = ask >>= doF . W.sink ------------------------------------------------------------------------ -- Event handling -- * EwmhDesktops users should change this to ewmhDesktopsEventHook -- -- Defines a custom handler function for X Events. The function should -- return (All True) if the default handler is to be run afterwards. To -- combine event hooks use mappend or mconcat from Data.Monoid. -- myEventHook = FS.fullscreenEventHook ------------------------------------------------------------------------ -- Status bars and logging -- Perform an arbitrary action on each internal state change or X event. -- See the 'XMonad.Hooks.DynamicLog' extension for examples. -- myLayoutDisplay :: String -> String myLayoutDisplay "Tall" = "[]=" myLayoutDisplay "Full" = "[M]" myLayoutDisplay "Tabs by ZoomRow" = "tabs" myLayoutDisplay other = wrap "(layout:" ")" other statusColorNormalFG = "white" statusColorSubdued = "gray60" statusColorBG = "#285577" statusBarProc :: String -> String statusBarProc xmonadDir = "dzen2 -dock -expand right -w 980 -x 0 -fn 'DejaVu Sans Mono'" statusBarColor = dzenColor statusNormalColor = statusBarColor statusColorNormalFG statusColorBG escapeStatusCodes :: String -> String escapeStatusCodes title = foldl (\acc c -> acc ++ case c of '{' -> "(" '}' -> ")" a -> [a]) [] title -- | Output a list of strings, ignoring empty ones and separating the -- rest with the given separator. sepBy :: String -- ^ separator -> [String] -- ^ fields to output -> String sepBy sep = concat . intersperse sep . filter (not . null) -- | Format the current status using the supplied pretty-printing format, -- and write it to stdout. myDynamicLogWithPP :: PP -> X () myDynamicLogWithPP pp = myDynamicLogString pp >>= io . ppOutput pp -- | Format the workspace information, given a workspace sorting function, -- a list of urgent windows, a pretty-printer format, and the current -- WindowSet. myPprWindowSet :: WorkspaceSort -> [Window] -> PP -> WindowSet -> String myPprWindowSet sort' urgents pp s = sepBy (ppWsSep pp) . map fmt . sort' $ map W.workspace (W.current s : W.visible s) ++ W.hidden s where this = W.currentTag s visibles = map (W.tag . W.workspace) (W.visible s) fmt w = printer pp (W.tag w ++ superScriptNum (nws w)) where printer | any (\x -> maybe False (== W.tag w) (W.findTag x s)) urgents = ppUrgent | W.tag w == this = ppCurrent | W.tag w `elem` visibles = ppVisible | isJust (W.stack w) = ppHidden | otherwise = ppHiddenNoWindows nws = length . W.integrate' . W.stack -- | The same as 'dynamicLogWithPP', except it simply returns the status -- as a formatted string without actually printing it to stdout, to -- allow for further processing, or use in some application other than -- a status bar. myDynamicLogString :: PP -> X String myDynamicLogString pp = do winset <- gets windowset urgents <- readUrgents sort' <- ppSort pp style <- XS.get :: X WarpViewStyle -- layout description let ld = description . W.layout . W.workspace . W.current $ winset -- workspace list let ws = pprWindowSet sort' urgents pp winset -- let ws = show ((ppSort pp) $ map W.tag $ W.hidden $ winset) -- let ws = show (W.allWindows winset) -- let ws = show $ map length $ map W.tag $ W.hidden $ winset -- let ws = show $ map (\x -> ((length . W.integrate' . W.stack) x, W.tag x) :: (Int,String)) $ sortBy getWsCompareByTag $ (W.workspaces winset) -- window title wt <- maybe (return "") (fmap show . getName) . W.peek $ winset -- run extra loggers, ignoring any that generate errors. extras <- mapM (flip catchX (return Nothing)) $ ppExtras pp -- Description of current `warpView` style let warpStyle = statusNormalColor $ show style return $ encodeString . sepBy (ppSep pp) . ppOrder pp $ [ ws , warpStyle , ppLayout pp ld , ppTitle pp wt ] ++ catMaybes extras withLogHandlePP :: PP -> Handle -> PP withLogHandlePP pp proc = pp { ppOutput = hPutStrLn proc . escapeStatusCodes } withLogHandlePPX :: X PP -> Handle -> X PP withLogHandlePPX xpp proc = do pp <- xpp return (withLogHandlePP pp proc) myLogPP = def { ppCurrent = statusBarColor myNormalBorderColor statusColorNormalFG , ppHidden = statusNormalColor , ppHiddenNoWindows = const "" , ppUrgent = statusBarColor myNormalBorderColor myUrgentColor , ppTitle = statusNormalColor . shorten 120 , ppLayout = statusNormalColor . myLayoutDisplay , ppSep = statusNormalColor " │ " , ppWsSep = statusNormalColor " " } dropcolon :: String -> String dropcolon s = if takeWhile (/= ':') s == s then s else drop 1 $ dropWhile (/= ':') s myLogHook :: Handle -> X () myLogHook statusproc = do raiseWindowByClass "Dunst" myDynamicLogWithPP (withLogHandlePP myLogPP statusproc) wrapIn :: String -> String -> String wrapIn q string = q ++ string ++ q quote :: String -> String quote = wrapIn "'" arg :: String -> String -> [String] arg flag value = [("-" ++ flag), quote value] externalStatusCmd :: String externalStatusCmd = let fg = statusColorNormalFG bg = statusColorBG cmd arg0 args = unwords $ [arg0] ++ args status = cmd "status" $ quote <$> [fg, bg] dzenOptions = ["-dock", "-ta", "r", "-w", "800", "-x", "972"] fontName = "DejaVu Sans Mono" flags = [("fn", fontName), ("fg", fg), ("bg", bg)] displayOptions = (unwords . uncurry arg) <$> flags dzen2 = cmd "dzen2" $ dzenOptions ++ displayOptions in status ++ " | " ++ dzen2 ------------------------------------------------------------------------ -- Startup hook -- Perform an arbitrary action each time xmonad starts or is restarted -- with mod-q. Used by, e.g., XMonad.Layout.PerWorkspace to initialize -- per-workspace layout choices. -- -- By default, do nothing. myStartupHook = ewmhDesktopsStartup ------------------------------------------------------------------------ -- Shutdown hook -- Perform an arbitrary action each time xmonad exits. -- Normal shutdown occurs by not catching an ExitSuccess exception. -- For the hook, first run the action, then rethrow the error. myShutdownHook :: IO () -> E.SomeException -> IO () myShutdownHook pre e = pre >> E.throw e -- |Kill the statusbar handlers by closing one's pipe and killing the other. killStatusProcs :: Handle -> ProcessID -> IO () killStatusProcs h pid = do E.handle printException $ hFlush h E.handle printException $ hClose h pgid <- getPGID pid E.handle printException $ either E.throw (signalProcessGroup sigTERM) pgid where getPGID :: ProcessID -> IO (Either E.SomeException ProcessGroupID) getPGID = E.try . getProcessGroupIDOf printException :: E.SomeException -> IO () printException = putStrLn . show -- |Modifies an XConfig to install a handler for XMONAD_RESTART events. withRestartHook :: IO () -> XConfig l -> XConfig l withRestartHook handler conf@XConfig { handleEventHook = orig } = conf { handleEventHook = handleRestartEvent handler >> orig } -- |Run a hook when the restart message is received. handleRestartEvent :: IO () -> Event -> X () -- Process ClientMessageEvent to check its type. handleRestartEvent onrestart e@ClientMessageEvent {ev_message_type = msgT} = do restartAtom <- getAtom "XMONAD_RESTART" if (msgT == restartAtom) then io onrestart else return () -- Ignore everything else. handleRestartEvent _ _ = return () ------------------------------------------------------------------------ -- Now run xmonad with all the defaults we set up. -- Run xmonad with the settings you specify. No need to modify this. -- main = do homeDir <- getHomeDirectory statusproc <- spawnPipe $ statusBarProc (homeDir ++ "/.xmonad") barPid <- spawnPID externalStatusCmd let statusKiller = killStatusProcs statusproc barPid E.handle (myShutdownHook statusKiller) $ xmonad $ docks $ ewmh $ withRestartHook statusKiller $ withUrgencyHook NoUrgencyHook $ def { -- simple stuff terminal = myTerminal, focusFollowsMouse = myFocusFollowsMouse, borderWidth = myBorderWidth, modMask = myModMask, workspaces = myWorkspaces, normalBorderColor = myNormalBorderColor, focusedBorderColor = myFocusedBorderColor, -- key bindings keys = myKeys, mouseBindings = myMouseBindings, -- hooks, layouts layoutHook = myLayoutHook, manageHook = myManageHook, handleEventHook = myEventHook, logHook = myLogHook statusproc, startupHook = myStartupHook }
benizi/dotfiles
.xmonad/xmonad.hs
mit
26,147
0
21
6,107
6,379
3,603
2,776
428
5
{-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} module Hummingbird.Logging ( Config (..) , Appender (..) , setup ) where -------------------------------------------------------------------------------- -- | -- Module : Hummingbird.Logging -- Copyright : (c) Lars Petersen 2017 -- License : MIT -- -- Maintainer : [email protected] -- Stability : experimental -------------------------------------------------------------------------------- import Control.Monad (forM_) import Data.Aeson import Data.Aeson.Types import System.IO import qualified System.Log.Formatter as Log import qualified System.Log.Handler as Log hiding (setLevel) import qualified System.Log.Handler.Simple as Log import qualified System.Log.Handler.Syslog as Log import qualified System.Log.Logger as Log data Config = Config { level :: Log.Priority , appenders :: [Appender] } deriving (Eq, Ord, Show) data Appender = SyslogAppender | ConsoleAppender deriving (Eq, Ord, Show) instance FromJSON Config where parseJSON (Object v) = Config <$> pLevel <*> v .: "appenders" where pLevel = do s <- v .: "level" :: Parser String case s of "DEBUG" -> pure Log.DEBUG "INFO" -> pure Log.INFO "NOTICE" -> pure Log.NOTICE "WARNING" -> pure Log.WARNING "ERROR" -> pure Log.ERROR "CRITICAL" -> pure Log.CRITICAL "ALERT" -> pure Log.ALERT "EMERGENCY" -> pure Log.EMERGENCY _ -> fail "Expected DEBUG, INFO, WARNING, ERROR etc." parseJSON invalid = typeMismatch "Config" invalid instance FromJSON Appender where parseJSON (Object v) = do t <- v .: "type" :: Parser String case t of "syslog" -> pure SyslogAppender "console" -> pure ConsoleAppender _ -> fail "Expected 'syslog' or 'console'." parseJSON invalid = typeMismatch "Appender" invalid setup :: Config -> IO () setup config = do Log.removeAllHandlers Log.updateGlobalLogger Log.rootLoggerName (Log.setLevel $ level config) forM_ (appenders config) $ \case SyslogAppender -> do s <- Log.openlog "hummingbird" [Log.PID] Log.USER Log.DEBUG Log.updateGlobalLogger Log.rootLoggerName (Log.addHandler s) ConsoleAppender -> do lh <- Log.streamHandler stderr Log.DEBUG let h = Log.setFormatter lh (Log.simpleLogFormatter "[$time : $loggername : $prio] $msg") Log.updateGlobalLogger Log.rootLoggerName (Log.addHandler h) Log.infoM "hummingbird" "Started hummingbird MQTT message broker."
lpeterse/haskell-hummingbird
src/Hummingbird/Logging.hs
mit
2,702
0
19
686
640
336
304
62
2
skips :: [a] -> [[a]] skips [] = [] skips list = skipsAux [1..(length list)] list skipsAux :: [Int] -> [a] -> [[a]] skipsAux [] _ = [] skipsAux _ [] = [] {- 1) Create list of tuples [(index, value)] 2) Filter tuples whose indexes are multiples a particular value 3) Extract the value out of the tuples via map 4) Keep doing this until the list of multiples is exausted -} skipsAux (x:xs) list = (map (\(a,b) -> b) (filter (\(a,b) -> a `mod` x == 0) (zip [1..(length list)] list))) : (skipsAux xs list)
julitopower/HaskellLearning
hopscotch.hs
mit
594
0
14
189
211
116
95
10
1
module Y2017.M02.D09.Exercise where {-- Today's problem is inspired by http://rosalind.info/problems/fibo/ Fibonacci Numbers solved by 2850 as of February 8th, 2017 Problem The Fibonacci numbers 0,1,1,2,3,5,8,13,21,34,... are generated by the following simple rule F(0) = 0 F(1) = 1 F(n) = F(n-1) + F(n-2) for n > 1 Given: A positive integer n <= 25 Return: The value of F(n) --} sample :: Integer sample = 6 result :: Integer result = 8 fibo :: Integer -> Integer fibo n = undefined -- so here's the thing. If you defined fibo doubly-recursively, as shown above -- you can get an answer, within a second for fibo 25. Try it. big :: Integer big = 25 -- what is the value of fibo big? -- Not a problem on systems today with GHC, because GHC is that good. {-- BONUS ----------------------------------------------------------------- but: --} really :: Integer really = 100 {-- What is the value of fibo really? That's a problem, isn't it, because fibo is in exponential time if defined doubly-recursively. But here's the thing: if you know F(n) you already know F(n-1) ... so why recompute that subtree when you've already just computed it So, use that knowledge. Retain it. Define a fibonacci function that returns the fibonacci at n in linear time by retaining the previous [0..n-1] fibonacci numbers. This is what we call dynamic programming. --} fibr :: [Integer] -> Integer -> Integer fibr fibs n = undefined -- of course you need to seed your fibonacci computer for it to work. What shall -- your seed be? seed :: [Integer] seed = undefined -- What is the values of map (fibr seed) [sample, big, really]? -- Are they return timely?
geophf/1HaskellADay
exercises/HAD/Y2017/M02/D09/Exercise.hs
mit
1,662
0
6
309
115
72
43
15
1
{-| Low-Level IPC flows for interacting with the serf process. - Serf process can be started and shutdown with `start` and `stop`. - You can ask the serf what it's last event was with `serfLastEventBlocking`. - A running serf can be asked to compact it's heap or take a snapshot. - You can scry into a running serf. - A running serf can be asked to execute a boot sequence, replay from existing events, and run a ship with `boot`, `replay`, and `run`. The `run` and `replay` flows will do batching of events to keep the IPC pipe full. ``` |% :: +writ: from king to serf :: +$ writ $% $: %live $% [%cram eve=@] [%exit cod=@] [%save eve=@] [%meld ~] [%pack ~] == == :: sam=[gang (each path $%([%once @tas @tas path] [beam @tas beam]))] [%peek mil=@ sam=*] [%play eve=@ lit=(list ?((pair @da ovum) *))] [%work mil=@ job=(pair @da ovum)] == :: +plea: from serf to king :: +$ plea $% [%live ~] [%ripe [pro=%1 hon=@ nok=@] eve=@ mug=@] [%slog pri=@ tank] [%flog cord] $: %peek $% [%done dat=(unit (cask))] [%bail dud=goof] == == $: %play $% [%done mug=@] [%bail eve=@ mug=@ dud=goof] == == $: %work $% [%done eve=@ mug=@ fec=(list ovum)] [%swap eve=@ mug=@ job=(pair @da ovum) fec=(list ovum)] [%bail lud=(list goof)] == == == -- ``` -} module Urbit.Vere.Serf.IPC ( Serf , start , stop , serfLastEventBlocking , snapshot , compact , scry , boot , replay , run , swim , sendSIGINT , module Urbit.Vere.Serf.Types ) where import Urbit.Prelude hiding ((<|)) import Data.Bits import Data.Conduit import System.Process import Urbit.Vere.Serf.Types import Urbit.Vere.Serf.IPC.Types import Control.Monad.STM (retry) import Control.Monad.Trans.Resource (MonadResource, allocate, runResourceT) import Data.Sequence (Seq((:<|), (:|>))) import Foreign.Marshal.Alloc (alloca) import Foreign.Ptr (castPtr) import Foreign.Storable (peek, poke) import RIO.Prelude (decodeUtf8Lenient) import System.Posix.Signals (sigINT, sigKILL, signalProcess) import Urbit.Arvo (FX) import Urbit.Arvo.Event import Urbit.Noun.Time (Wen) import qualified Data.ByteString as BS import qualified Data.ByteString.Unsafe as BS import qualified System.IO.Error as IO import qualified Urbit.Noun.Time as Time -- Serf API -------------------------------------------------------------------- data Serf = Serf { serfSend :: Handle , serfRecv :: Handle , serfProc :: ProcessHandle , serfSlog :: Slog -> IO () , serfLock :: MVar (Maybe SerfState) } -- Access Current Serf State --------------------------------------------------- serfLastEventBlocking :: Serf -> IO EventId serfLastEventBlocking Serf{serfLock} = readMVar serfLock >>= \case Nothing -> throwIO SerfNotRunning Just ss -> pure (ssLast ss) -- Low Level IPC Functions ----------------------------------------------------- fromRightExn :: (Exception e, MonadIO m) => Either a b -> (a -> e) -> m b fromRightExn (Left m) exn = throwIO (exn m) fromRightExn (Right x) _ = pure x -- TODO Support Big Endian sendLen :: Serf -> Int -> IO () sendLen s i = do w <- evaluate (fromIntegral i :: Word64) withWord64AsByteString w (hPut (serfSend s)) where withWord64AsByteString :: Word64 -> (ByteString -> IO a) -> IO a withWord64AsByteString w k = alloca $ \wp -> do poke wp w bs <- BS.unsafePackCStringLen (castPtr wp, 8) k bs sendBytes :: Serf -> ByteString -> IO () sendBytes s bs = handle onIOError $ do sendLen s (length bs) hPut (serfSend s) bs hFlush (serfSend s) where onIOError :: IOError -> IO () onIOError = const (throwIO SerfConnectionClosed) recvBytes :: Serf -> Word64 -> IO ByteString recvBytes serf = BS.hGet (serfRecv serf) . fromIntegral recvLen :: Serf -> IO Word64 recvLen w = do bs <- BS.hGet (serfRecv w) 8 case length bs of 8 -> BS.unsafeUseAsCString bs (peek @Word64 . castPtr) _ -> throwIO SerfConnectionClosed recvResp :: Serf -> IO ByteString recvResp serf = do len <- recvLen serf recvBytes serf len -- Send Writ / Recv Plea ------------------------------------------------------- sendWrit :: Serf -> Writ -> IO () sendWrit s = sendBytes s . jamBS . toNoun recvPlea :: Serf -> IO Plea recvPlea w = do b <- recvResp w n <- fromRightExn (cueBS b) (const $ BadPleaAtom $ bytesAtom b) p <- fromRightExn (fromNounErr @Plea n) (\(p, m) -> BadPleaNoun n p m) pure p recvPleaHandlingSlog :: Serf -> IO Plea recvPleaHandlingSlog serf = loop where loop = recvPlea serf >>= \case PSlog info -> serfSlog serf info >> loop PFlog (Cord ofni) -> serfSlog serf (0, Tank $ Leaf $ Tape $ ofni) >> loop other -> pure other -- Higher-Level IPC Functions -------------------------------------------------- recvRipe :: Serf -> IO SerfInfo recvRipe serf = recvPleaHandlingSlog serf >>= \case PRipe ripe -> pure ripe plea -> throwIO (UnexpectedPlea (toNoun plea) "expecting %play") recvPlay :: Serf -> IO Play recvPlay serf = recvPleaHandlingSlog serf >>= \case PPlay play -> pure play plea -> throwIO (UnexpectedPlea (toNoun plea) "expecting %play") recvLive :: Serf -> IO () recvLive serf = recvPleaHandlingSlog serf >>= \case PLive () -> pure () plea -> throwIO (UnexpectedPlea (toNoun plea) "expecting %live") recvWork :: Serf -> IO Work recvWork serf = do recvPleaHandlingSlog serf >>= \case PWork work -> pure work plea -> throwIO (UnexpectedPlea (toNoun plea) "expecting %work") recvPeek :: Serf -> IO (Maybe (Term, Noun)) recvPeek serf = do recvPleaHandlingSlog serf >>= \case PPeek (SDone peek) -> pure peek -- XX surface error content PPeek (SBail dud) -> pure Nothing plea -> throwIO (UnexpectedPlea (toNoun plea) "expecting %peek") -- Request-Response Points -- These don't touch the lock ----------------------- sendSnapshotRequest :: Serf -> EventId -> IO () sendSnapshotRequest serf eve = do sendWrit serf (WLive $ LSave eve) recvLive serf sendCompactionRequest :: Serf -> IO () sendCompactionRequest serf = do sendWrit serf (WLive $ LPack ()) recvLive serf sendScryRequest :: Serf -> Gang -> ScryReq -> IO (Maybe (Term, Noun)) sendScryRequest serf g r = do sendWrit serf (WPeek 0 g r) recvPeek serf sendShutdownRequest :: Serf -> Atom -> IO () sendShutdownRequest serf exitCode = do sendWrit serf (WLive $ LExit exitCode) pure () -- Starting the Serf ----------------------------------------------------------- compileFlags :: [Flag] -> Word compileFlags = foldl' (\acc flag -> setBit acc (fromEnum flag)) 0 readStdErr :: Handle -> (Text -> IO ()) -> IO () -> IO () readStdErr h onLine onClose = loop where loop = do IO.tryIOError (BS.hGetLine h >>= onLine . decodeUtf8Lenient) >>= \case Left exn -> onClose Right () -> loop start :: Config -> IO (Serf, SerfInfo) start (Config exePax pierPath flags onSlog onStdr onDead) = do (Just i, Just o, Just e, p) <- createProcess pSpec void $ async (readStdErr e onStdr onDead) vLock <- newEmptyMVar let serf = Serf i o p onSlog vLock info <- recvRipe serf putMVar vLock (Just $ siStat info) pure (serf, info) where diskKey = "" config = show (compileFlags flags) rock = "0" -- XX support loading from rock cache = "50000" -- XX support memo-cache size args = ["serf", pierPath, diskKey, config, cache, rock] pSpec = (proc exePax args) { std_in = CreatePipe , std_out = CreatePipe , std_err = CreatePipe } -- Taking the SerfState Lock --------------------------------------------------- takeLock :: MonadIO m => Serf -> m SerfState takeLock serf = io $ do takeMVar (serfLock serf) >>= \case Nothing -> putMVar (serfLock serf) Nothing >> throwIO SerfNotRunning Just ss -> pure ss serfLockTaken :: MonadResource m => Serf -> m (IORef (Maybe SerfState), SerfState) serfLockTaken serf = snd <$> allocate take release where take = (,) <$> newIORef Nothing <*> takeLock serf release (rv, _) = do mRes <- readIORef rv when (mRes == Nothing) (forcefullyKillSerf serf) putMVar (serfLock serf) mRes withSerfLock :: MonadResource m => Serf -> (SerfState -> m (SerfState, a)) -> m a withSerfLock serf act = do (vState , initialState) <- serfLockTaken serf (newState, result ) <- act initialState writeIORef vState (Just newState) pure result withSerfLockIO :: Serf -> (SerfState -> IO (SerfState, a)) -> IO a withSerfLockIO s a = runResourceT (withSerfLock s (io . a)) -- SIGINT ---------------------------------------------------------------------- sendSIGINT :: Serf -> IO () sendSIGINT serf = do getPid (serfProc serf) >>= \case Nothing -> pure () Just pid -> do io $ signalProcess sigINT pid -- Killing the Serf ------------------------------------------------------------ {-| Ask the serf to shutdown. If it takes more than 2s, kill it with SIGKILL. -} stop :: HasLogFunc e => Serf -> RIO e () stop serf = do race_ niceKill (wait2sec >> forceKill) where wait2sec = threadDelay 2_000_000 niceKill = do logTrace "Asking serf to shut down" io (gracefullyKillSerf serf) logTrace "Serf went down when asked." forceKill = do logTrace "Serf taking too long to go down, kill with fire (SIGTERM)." io (forcefullyKillSerf serf) logTrace "Serf process killed with SIGTERM." {-| Kill the serf by taking the lock, then asking for it to exit. -} gracefullyKillSerf :: Serf -> IO () gracefullyKillSerf serf@Serf{..} = do finalState <- takeMVar serfLock sendShutdownRequest serf 0 waitForProcess serfProc pure () {-| Kill the serf by sending it a SIGKILL. -} forcefullyKillSerf :: Serf -> IO () forcefullyKillSerf serf = do getPid (serfProc serf) >>= \case Nothing -> pure () Just pid -> do io $ signalProcess sigKILL pid io $ void $ waitForProcess (serfProc serf) -- Flows for Interacting with the Serf ----------------------------------------- {-| Ask the serf to write a snapshot to disk. -} snapshot :: Serf -> IO () snapshot serf = withSerfLockIO serf $ \ss -> do sendSnapshotRequest serf (ssLast ss) pure (ss, ()) {-| Ask the serf to de-duplicate and de-fragment it's heap. -} compact :: Serf -> IO () compact serf = withSerfLockIO serf $ \ss -> do sendCompactionRequest serf pure (ss, ()) {-| Peek into the serf state. -} scry :: Serf -> Gang -> ScryReq -> IO (Maybe (Term, Noun)) scry serf g r = withSerfLockIO serf $ \ss -> do (ss,) <$> sendScryRequest serf g r {-| Given a list of boot events, send them to to the serf in a single %play message. They must all be sent in a single %play event so that the serf can determine the length of the boot sequence. -} boot :: Serf -> [Noun] -> IO (Maybe PlayBail) boot serf@Serf {..} seq = do withSerfLockIO serf $ \ss -> do sendWrit serf (WPlay 1 seq) recvPlay serf >>= \case PBail bail -> pure (ss, Just bail) PDone mug -> pure (SerfState (fromIntegral $ length seq) mug, Nothing) {-| Given a stream of nouns (from the event log), feed them into the serf in batches of size `batchSize`. - On `%bail` response, return early. - On IPC errors, kill the serf and rethrow. - On success, return `Nothing`. -} replay :: forall m . (MonadResource m, MonadUnliftIO m, MonadIO m) => Int -> (Int -> IO ()) -> Serf -> ConduitT Noun Void m (Maybe PlayBail) replay batchSize cb serf = do withSerfLock serf $ \ss -> do (r, ss') <- loop ss pure (ss', r) where loop :: SerfState -> ConduitT Noun Void m (Maybe PlayBail, SerfState) loop ss@(SerfState lastEve lastMug) = do awaitBatch batchSize >>= \case [] -> pure (Nothing, SerfState lastEve lastMug) evs -> do let nexEve = lastEve + 1 let newEve = lastEve + fromIntegral (length evs) io $ sendWrit serf (WPlay nexEve evs) io (recvPlay serf) >>= \case PBail bail -> pure (Just bail, SerfState lastEve lastMug) PDone newMug -> do io (cb $ length evs) loop (SerfState newEve newMug) {-| TODO If this is slow, use a mutable vector instead of reversing a list. -} awaitBatch :: Monad m => Int -> ConduitT i o m [i] awaitBatch = go [] where go acc 0 = pure (reverse acc) go acc n = await >>= \case Nothing -> pure (reverse acc) Just x -> go (x:acc) (n-1) -- Special Replay for Collecting FX -------------------------------------------- {-| This does event-log replay using the running IPC flow so that we can collect effects. We don't tolerate replacement events or bails since we are actually replaying the log, so we just throw exceptions in those cases. -} swim :: forall m . (MonadIO m, MonadUnliftIO m, MonadResource m) => Serf -> ConduitT (Wen, Ev) (EventId, FX) m () swim serf = do withSerfLock serf $ \SerfState {..} -> do (, ()) <$> loop ssHash ssLast where loop :: Mug -> EventId -> ConduitT (Wen, Ev) (EventId, FX) m SerfState loop mug eve = await >>= \case Nothing -> do pure (SerfState eve mug) Just (wen, evn) -> do io (sendWrit serf (WWork 0 wen evn)) io (recvWork serf) >>= \case WBail goofs -> do throwIO (BailDuringReplay eve goofs) WSwap eid hash (wen, noun) fx -> do throwIO (SwapDuringReplay eid hash (wen, noun) fx) WDone eid hash fx -> do yield (eid, fx) loop hash eid -- Running Ship Flow ----------------------------------------------------------- {-| TODO Don't take snapshot until event log has processed current event. -} run :: Serf -> Int -> STM EventId -> STM RunReq -> ((Fact, FX) -> STM ()) -> (Maybe Ev -> STM ()) -> IO () run serf maxBatchSize getLastEvInLog onInput sendOn spin = topLoop where topLoop :: IO () topLoop = atomically onInput >>= \case RRWork workErr -> doWork workErr RRSave () -> doSave RRKill () -> doKill RRPack () -> doPack RRScry g r k -> doScry g r k doPack :: IO () doPack = compact serf >> topLoop waitForLog :: IO () waitForLog = do serfLast <- serfLastEventBlocking serf atomically $ do logLast <- getLastEvInLog when (logLast < serfLast) retry doSave :: IO () doSave = waitForLog >> snapshot serf >> topLoop doKill :: IO () doKill = waitForLog >> snapshot serf >> pure () doScry :: Gang -> ScryReq -> (Maybe (Term, Noun) -> IO ()) -> IO () doScry g r k = (scry serf g r >>= k) >> topLoop doWork :: EvErr -> IO () doWork firstWorkErr = do que <- newTBMQueueIO 1 () <- atomically (writeTBMQueue que firstWorkErr) tWork <- async (processWork serf maxBatchSize que onWorkResp spin) -- Avoid wrapping all subsequent runs of the event loop in an exception -- handler which retains tWork. nexSt <- flip onException (cancel tWork) $ do nexSt <- workLoop que wait tWork pure nexSt nexSt workLoop :: TBMQueue EvErr -> IO (IO ()) workLoop que = atomically onInput >>= \case RRKill () -> atomically (closeTBMQueue que) >> pure doKill RRSave () -> atomically (closeTBMQueue que) >> pure doSave RRPack () -> atomically (closeTBMQueue que) >> pure doPack RRScry g r k -> atomically (closeTBMQueue que) >> pure (doScry g r k) RRWork workErr -> atomically (writeTBMQueue que workErr) >> workLoop que onWorkResp :: Wen -> EvErr -> Work -> IO () onWorkResp wen (EvErr evn err) = \case WDone eid hash fx -> do io $ err (RunOkay eid fx) atomically $ sendOn ((Fact eid hash wen (toNoun evn)), fx) WSwap eid hash (wen, noun) fx -> do io $ err (RunSwap eid hash wen noun fx) atomically $ sendOn (Fact eid hash wen noun, fx) WBail goofs -> do io $ err (RunBail goofs) {-| Given: - A stream of incoming requests - A sequence of in-flight requests that haven't been responded to - A maximum number of in-flight requests. Wait until the number of in-fligh requests is smaller than the maximum, and then take the next item from the stream of requests. -} pullFromQueueBounded :: Int -> TVar (Seq a) -> TBMQueue b -> STM (Maybe b) pullFromQueueBounded maxSize vInFlight queue = do inFlight <- length <$> readTVar vInFlight if inFlight >= maxSize then retry else readTBMQueue queue {-| Given - `maxSize`: The maximum number of jobs to send to the serf before getting a response. - `q`: A bounded queue (which can be closed) - `onResp`: a callback to call for each response from the serf. - `spin`: a callback to tell the terminal driver which event is currently being processed. Pull jobs from the queue and send them to the serf (eagerly, up to `maxSize`) and call the callback with each response from the serf. When the queue is closed, wait for the serf to respond to all pending work, and then return. Whenever the serf is idle, call `spin Nothing` and whenever the serf is working on an event, call `spin (Just ev)`. -} processWork :: Serf -> Int -> TBMQueue EvErr -> (Wen -> EvErr -> Work -> IO ()) -> (Maybe Ev -> STM ()) -> IO () processWork serf maxSize q onResp spin = do vDoneFlag <- newTVarIO False vInFlightQueue <- newTVarIO empty recvThread <- async (recvLoop serf vDoneFlag vInFlightQueue spin) flip onException (print "KILLING: processWork" >> cancel recvThread) $ do loop vInFlightQueue vDoneFlag wait recvThread where loop :: TVar (Seq (Ev, Work -> IO ())) -> TVar Bool -> IO () loop vInFlight vDone = do atomically (pullFromQueueBounded maxSize vInFlight q) >>= \case Nothing -> do atomically (writeTVar vDone True) Just evErr@(EvErr ev _) -> do now <- Time.now let cb = onResp now evErr atomically $ modifyTVar' vInFlight (:|> (ev, cb)) sendWrit serf (WWork 0 now ev) loop vInFlight vDone {-| Given: - `vDone`: A flag that no more work will be sent to the serf. - `vWork`: A list of work requests that have been sent to the serf, haven't been responded to yet. If the serf has responded to all work requests, and no more work is going to be sent to the serf, then return. If we are going to send more work to the serf, but the queue is empty, then wait. If work requests have been sent to the serf, take the first one, wait for a response from the serf, call the associated callback, and repeat the whole process. -} recvLoop :: Serf -> TVar Bool -> TVar (Seq (Ev, Work -> IO ())) -> (Maybe Ev -> STM ()) -> IO () recvLoop serf vDone vWork spin = do withSerfLockIO serf \SerfState {..} -> do loop ssLast ssHash where loop eve mug = do atomically $ do whenM (null <$> readTVar vWork) $ do spin Nothing atomically takeCallback >>= \case Nothing -> pure (SerfState eve mug, ()) Just (curEve, cb) -> do atomically (spin (Just curEve)) recvWork serf >>= \case work@(WDone eid hash _) -> cb work >> loop eid hash work@(WSwap eid hash _ _) -> cb work >> loop eid hash work@(WBail _) -> cb work >> loop eve mug takeCallback :: STM (Maybe (Ev, Work -> IO ())) takeCallback = do ((,) <$> readTVar vDone <*> readTVar vWork) >>= \case (False, Empty ) -> retry (True , Empty ) -> pure Nothing (_ , (e, x) :<| xs) -> writeTVar vWork xs $> Just (e, x) (_ , _ ) -> error "impossible"
urbit/urbit
pkg/hs/urbit-king/lib/Urbit/Vere/Serf/IPC.hs
mit
19,993
0
24
5,096
5,827
2,870
2,957
-1
-1
{-#LANGUAGE AllowAmbiguousTypes #-} {-#LANGUAGE ScopedTypeVariables #-} import Criterion.Main import Criterion.Types import TestCases import Foreign.Marshal.Alloc import Foreign.Marshal.Array (mallocArray, peekArray, pokeArray) import Foreign.Ptr import Foreign.Storable import Foreign.Storable.Generic import Foreign.Storable.Generic.Internal import GHC.Generics (from, to) import GHC.Exts import Control.DeepSeq import Data.Proxy mallocFree :: forall a. (Storable a) => a -> IO () mallocFree a = do ptr <- mallocBytes $ (sizeOf a) free ptr singularTests = -- [ bgroup "mallocfree" $ -- [ bgroup "Handwritten" $ -- [ bench "C1" $ nfIO (mallocFree c1hw_def) -- , bench "C2" $ nfIO (mallocFree c2hw_def) -- , bench "C3" $ nfIO (mallocFree c3hw_def) -- , bench "C4" $ nfIO (mallocFree c4hw_def) -- , bench "C5" $ nfIO (mallocFree c5hw_def) -- ] -- , bgroup "GStorable" $ -- [ bench "C1" $ nfIO (mallocFree c1_def) -- , bench "C2" $ nfIO (mallocFree c2_def) -- , bench "C3" $ nfIO (mallocFree c3_def) -- , bench "C4" $ nfIO (mallocFree c4_def) -- , bench "C5" $ nfIO (mallocFree c5_def) -- ] -- ] [ bgroup "sizeOf" $ [ bgroup "Handwritten" $ [ bench "C0" $ nf sizeOf c0hw_def , bench "C1" $ nf sizeOf c1hw_def , bench "C2" $ nf sizeOf c2hw_def , bench "C3" $ nf sizeOf c3hw_def , bench "C4" $ nf sizeOf c4hw_def , bench "C5" $ nf sizeOf c5hw_def ] , bgroup "GStorable" $ [ bench "C0" $ nf sizeOf c0_def , bench "C1" $ nf sizeOf c1_def , bench "C2" $ nf sizeOf c2_def , bench "C3" $ nf sizeOf c3_def , bench "C4" $ nf sizeOf c4_def , bench "C5" $ nf sizeOf c5_def ] ] , bgroup "alignment" $ [ bgroup "Handwritten" $ [ bench "C0" $ nf alignment c0hw_def , bench "C1" $ nf alignment c1hw_def , bench "C2" $ nf alignment c2hw_def , bench "C3" $ nf alignment c3hw_def , bench "C4" $ nf alignment c4hw_def , bench "C5" $ nf alignment c5hw_def ] , bgroup "GStorable" $ [ bench "C0" $ nf alignment c0_def , bench "C1" $ nf alignment c1_def , bench "C2" $ nf alignment c2_def , bench "C3" $ nf alignment c3_def , bench "C4" $ nf alignment c4_def , bench "C5" $ nf alignment c5_def ] ] , bgroup "peek" $ [ bgroup "Handwritten" $ [ env (malloc :: IO (Ptr C0hw)) $ \ptr -> bench "C0" $ nfIO (peek ptr) , env (malloc :: IO (Ptr C1hw)) $ \ptr -> bench "C1" $ nfIO (peek ptr) , env (malloc :: IO (Ptr C2hw)) $ \ptr -> bench "C2" $ nfIO (peek ptr) , env (malloc :: IO (Ptr C3hw)) $ \ptr -> bench "C3" $ nfIO (peek ptr) , env (malloc :: IO (Ptr C4hw)) $ \ptr -> bench "C4" $ nfIO (peek ptr) , env (malloc :: IO (Ptr C5hw)) $ \ptr -> bench "C5" $ nfIO (peek ptr) ] , bgroup "GStorable" $ [ env (malloc :: IO (Ptr C0)) $ \ptr -> bench "C0" $ nfIO (peek ptr) , env (malloc :: IO (Ptr C1)) $ \ptr -> bench "C1" $ nfIO (peek ptr) , env (malloc :: IO (Ptr C2)) $ \ptr -> bench "C2" $ nfIO (peek ptr) , env (malloc :: IO (Ptr C3)) $ \ptr -> bench "C3" $ nfIO (peek ptr) , env (malloc :: IO (Ptr C4)) $ \ptr -> bench "C4" $ nfIO (peek ptr) , env (malloc :: IO (Ptr C5)) $ \ptr -> bench "C5" $ nfIO (peek ptr) ] ] , bgroup "poke" $ [ bgroup "Handwritten" $ [ env malloc $ \ptr -> bench "C0" $ nfIO (poke ptr c0hw_def) , env malloc $ \ptr -> bench "C1" $ nfIO (poke ptr c1hw_def) , env malloc $ \ptr -> bench "C2" $ nfIO (poke ptr c2hw_def) , env malloc $ \ptr -> bench "C3" $ nfIO (poke ptr c3hw_def) , env malloc $ \ptr -> bench "C4" $ nfIO (poke ptr c4hw_def) , env malloc $ \ptr -> bench "C5" $ nfIO (poke ptr c5hw_def) ] , bgroup "GStorable" $ [ env malloc $ \ptr -> bench "C0" $ nfIO (poke ptr c0_def) , env malloc $ \ptr -> bench "C1" $ nfIO (poke ptr c1_def) , env malloc $ \ptr -> bench "C2" $ nfIO (poke ptr c2_def) , env malloc $ \ptr -> bench "C3" $ nfIO (poke ptr c3_def) , env malloc $ \ptr -> bench "C4" $ nfIO (poke ptr c4_def) , env malloc $ \ptr -> bench "C5" $ nfIO (poke ptr c5_def) ] ] ] -- Our benchmark harness. main = defaultMain $ singularTests
mkloczko/derive-storable
benchmark/Main.hs
mit
4,702
0
15
1,611
1,608
807
801
81
1
module ElasticSearch.Query where data Query = Query match :: Query match = undefined multiMatch :: Query multiMatch = undefined bool :: Query bool = undefined boosting :: Query boosting = undefined common :: Query common = undefined constantScore :: Query constantScore = undefined disMax :: Query disMax = undefined field :: Query field = undefined filtered :: Query filtered = undefined fuzzyLikeThis :: Query fuzzyLikeThis = undefined fuzzyLikeThisField :: Query fuzzyLikeThisField = undefined functionScore :: Query functionScore = undefined fuzzy :: Query fuzzy = undefined geoShape :: Query geoShape = undefined hasChild :: Query hasChild = undefined hasParent :: Query hasParent = undefined ids :: Query ids = undefined indices :: Query indices = undefined matchAll :: Query matchAll = undefined moreLikeThis :: Query moreLikeThis = undefined moreLikeThisField :: Query moreLikeThisField = undefined nested :: Query nested = undefined prefix :: Query prefix = undefined queryString :: Query queryString = undefined range :: Query range = undefined regexp :: Query regexp = undefined spanFirst :: Query spanFirst = undefined spanMultiTerm :: Query spanMultiTerm = undefined spanNear :: Query spanNear = undefined spanNot :: Query spanNot = undefined spanOr :: Query spanOr = undefined spanTerm :: Query spanTerm = undefined term :: Query term = undefined terms :: Query terms = undefined topChildren :: Query topChildren = undefined wildcard :: Query wildcard = undefined text :: Query text = undefined
SaneApp/elastic-search-api
src/ElasticSearch/Query.hs
mit
1,549
0
5
269
383
230
153
76
1
{- | Module : Language.Egison.Parser Licence : MIT This module provides the parser interface. -} module Language.Egison.Parser ( -- * Parse readTopExprs , readTopExpr , readExprs , readExpr , parseTopExpr -- * Parse a file , loadLibraryFile , loadFile -- * Parser utils (for translator) , removeShebang , readUTF8File ) where import Control.Monad.Except (lift, liftIO, throwError) import Control.Monad.Reader (asks, local) import Control.Monad.State (unless) import System.Directory (doesFileExist, getHomeDirectory) import System.IO import Language.Egison.AST import Language.Egison.CmdOptions import Language.Egison.Data import qualified Language.Egison.Parser.NonS as NonS import qualified Language.Egison.Parser.SExpr as SExpr import Language.Egison.RState import Paths_egison (getDataFileName) readTopExprs :: String -> EvalM [TopExpr] readTopExprs expr = do isSExpr <- asks optSExpr if isSExpr then either (throwError . Parser) return (SExpr.parseTopExprs expr) else do r <- lift . lift $ NonS.parseTopExprs expr either (throwError . Parser) return r parseTopExpr :: String -> RuntimeM (Either String TopExpr) parseTopExpr expr = do isSExpr <- asks optSExpr if isSExpr then return (SExpr.parseTopExpr expr) else NonS.parseTopExpr expr readTopExpr :: String -> EvalM TopExpr readTopExpr expr = do isSExpr <- asks optSExpr if isSExpr then either (throwError . Parser) return (SExpr.parseTopExpr expr) else do r <- lift . lift $ NonS.parseTopExpr expr either (throwError . Parser) return r readExprs :: String -> EvalM [Expr] readExprs expr = do isSExpr <- asks optSExpr if isSExpr then either (throwError . Parser) return (SExpr.parseExprs expr) else do r <- lift . lift $ NonS.parseExprs expr either (throwError . Parser) return r readExpr :: String -> EvalM Expr readExpr expr = do isSExpr <- asks optSExpr if isSExpr then either (throwError . Parser) return (SExpr.parseExpr expr) else do r <- lift . lift $ NonS.parseExpr expr either (throwError . Parser) return r -- |Load a libary file loadLibraryFile :: FilePath -> EvalM [TopExpr] loadLibraryFile file = do homeDir <- liftIO getHomeDirectory doesExist <- liftIO $ doesFileExist $ homeDir ++ "/.egison/" ++ file if doesExist then loadFile $ homeDir ++ "/.egison/" ++ file else liftIO (getDataFileName file) >>= loadFile -- |Load a file loadFile :: FilePath -> EvalM [TopExpr] loadFile file = do doesExist <- liftIO $ doesFileExist file unless doesExist $ throwError $ Default ("file does not exist: " ++ file) input <- liftIO $ readUTF8File file let useSExpr = checkIfUseSExpr file exprs <- local (\opt -> opt { optSExpr = useSExpr }) (readTopExprs (removeShebang useSExpr input)) concat <$> mapM recursiveLoad exprs where recursiveLoad (Load file) = loadLibraryFile file recursiveLoad (LoadFile file) = loadFile file recursiveLoad expr = return [expr] removeShebang :: Bool -> String -> String removeShebang useSExpr cs@('#':'!':_) = if useSExpr then ';' : cs else "--" ++ cs removeShebang _ cs = cs readUTF8File :: FilePath -> IO String readUTF8File name = do h <- openFile name ReadMode hSetEncoding h utf8 hGetContents h checkIfUseSExpr :: String -> Bool checkIfUseSExpr file = drop (length file - 5) file == ".segi"
egison/egison
hs-src/Language/Egison/Parser.hs
mit
3,672
0
13
955
1,053
540
513
86
3
module Physie.Colors( colorsWhite , colorsBlack , colorsRed ) where import Graphics.UI.SDL.Color (Color (..)) colorsBlack :: Color colorsBlack = Color 0 0 0 255 colorsWhite :: Color colorsWhite = Color 255 255 255 255 colorsRed :: Color colorsRed = Color 255 0 0 255
pmiddend/physie
src/Physie/Colors.hs
mit
297
0
6
73
90
52
38
11
1
{-# LANGUAGE AllowAmbiguousTypes #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeApplications #-} module Test.Smoke.Types.Base where import Control.Monad (when) import Data.Aeson import Data.Aeson.Types (Parser, typeMismatch) import Data.Default import Data.String (IsString) import Data.Text (Text) import qualified Data.Text as Text import Data.Vector (Vector) import qualified Data.Vector as Vector import Test.Smoke.Paths newtype SuiteName = SuiteName { unSuiteName :: String } deriving (Eq, Ord, Show) newtype TestName = TestName { unTestName :: String } deriving (Eq, Ord, Show) newtype WorkingDirectory = WorkingDirectory { unWorkingDirectory :: ResolvedPath Dir } deriving (Eq, Show) newtype Args = Args { unArgs :: Vector String } deriving (Eq, Show, Semigroup, Monoid, FromJSON) newtype Script = Script { unScript :: Text } deriving (Eq, Show, FromJSON) data CommandLine = CommandLine (RelativePath File) Args deriving (Eq, Show) instance FromJSON CommandLine where parseJSON = withArray "command line" $ \v -> do line <- mapM parseJSON v when (Vector.null line) $ fail "empty command line" return $ CommandLine (parseFile (Vector.head line)) (Args (Vector.tail line)) data Command = CommandArgs CommandLine | CommandScript (Maybe CommandLine) Script deriving (Eq, Show) instance FromJSON Command where parseJSON (Object v) = CommandScript <$> v .:? "shell" <*> v .: "script" parseJSON (String script) = return $ CommandScript Nothing (Script script) parseJSON args@(Array _) = CommandArgs <$> parseJSON args parseJSON invalid = typeMismatch "command" invalid newtype FixtureName = FixtureName { unFixtureName :: String } deriving (Eq, Show, IsString) class FixtureType a where fixtureName :: FixtureName serializeFixture :: a -> Text deserializeFixture :: Text -> a parseFixtureJSON :: forall a. FixtureType a => Value -> Parser a parseFixtureJSON = withText (unFixtureName (fixtureName @a)) (return . deserializeFixture) instance FixtureType Text where fixtureName = "text" serializeFixture = id deserializeFixture = id newtype Status = Status { unStatus :: Int } deriving (Eq, Show) instance Default Status where def = Status 0 instance FromJSON Status where parseJSON number@(Number _) = Status <$> parseJSON number parseJSON invalid = typeMismatch "status" invalid newtype StdIn = StdIn { unStdIn :: Text } deriving (Eq, Show) instance Default StdIn where def = StdIn Text.empty instance FixtureType StdIn where fixtureName = "stdin" serializeFixture = unStdIn deserializeFixture = StdIn instance FromJSON StdIn where parseJSON = parseFixtureJSON newtype StdOut = StdOut { unStdOut :: Text } deriving (Eq, Show) instance Default StdOut where def = StdOut Text.empty instance FixtureType StdOut where fixtureName = "stdout" serializeFixture = unStdOut deserializeFixture = StdOut instance FromJSON StdOut where parseJSON = parseFixtureJSON newtype StdErr = StdErr { unStdErr :: Text } deriving (Eq, Show) instance Default StdErr where def = StdErr Text.empty instance FixtureType StdErr where fixtureName = "stderr" serializeFixture = unStdErr deserializeFixture = StdErr instance FromJSON StdErr where parseJSON = parseFixtureJSON newtype TestFileContents = TestFileContents { unTestFileContents :: Text } deriving (Eq, Show) instance FixtureType TestFileContents where fixtureName = "files" serializeFixture = unTestFileContents deserializeFixture = TestFileContents instance FromJSON TestFileContents where parseJSON = parseFixtureJSON
SamirTalwar/Smoke
src/lib/Test/Smoke/Types/Base.hs
mit
3,747
0
16
680
1,029
572
457
113
1
module Eval where import Syntax import TypeCheck import Unbound.Generics.LocallyNameless import Control.Monad.Except type Eval a = ExceptT Error FreshM a eval :: Term -> Eval Term eval TmTrue = return TmTrue eval TmFalse = return TmFalse eval v@Var{} = return v eval a@Abs{} = return a eval (If b t1 t2) = do b' <- eval b case b' of TmTrue -> eval t1 TmFalse -> eval t2 x -> throwError $ show x eval (App e1 e2) = do v1 <- eval e1 v2 <- eval e2 case v1 of (Abs bnd _) -> do (x, body) <- unbind bnd let body' = subst x v2 body eval body' _ -> throwError "application of non-lambda" runEval :: Term -> Either Error Term runEval t = do _ <- runTypeOf t runFreshM . runExceptT . eval $ t
kellino/TypeSystems
simpleBool/Eval.hs
mit
797
0
15
250
323
156
167
30
4
{- | Module : Game.Physics Copyright : Copyright (c) 2016 Michael Litchard License : MIT Maintainer : Michael Litchard Stability : experimental Portability: not portable This module provides the functions describing the physics. -} module Game.Physics ( Speed , direction , position , Position (..) , Target (..) , ) where import Data.Vector.V3 import Data.Vector.Class type Speed = Scalar newtype Target = Target Vector3 deriving Show newtype Position = Position Vector3 deriving Show newtype Direction = Direction Vector3 deriving Show direction :: Position -> Target -> Direction direction (Position pos) (Target tar) = Direction $ vnormalise $ tar - pos position :: Position -> Direction -> Scalar -> Position position (Position position) (Direction direction) speed = Position $ velocity + position where velocity = direction |* speed
mlitchard/cosmos
library/Game/Physics.hs
mit
1,003
0
7
290
191
111
80
19
1
----------------------------------------------------------------------------- -- | -- Module : Core.AST -- Copyright : (c) Leonidas Lampropoulos, 2016, -- -- License : ?? -- -- Standard AST for the Core language for generators. -- Heavily influenced by Language.Haskell.Src -- ----------------------------------------------------------------------------- {-# LANGUAGE ViewPatterns #-} module Outer.AST where import Common.SrcLoc import qualified Common.Types as CT import Data.List -- | Types of Identifiers type ConId = String type TyConId = String type VarId = String type TyVarId = String type ClassId = String type OTcType = CT.TcType TyConId VarId type OScheme = CT.Scheme TyConId TyVarId type OTcEnv = CT.TcEnv VarId ConId TyConId TyVarId -- | Primes (@'@) mark an intermediate representation of types that -- distinguishes rigid variables from flexible ones. data TyVarId' = Flexible TyVarId | Rigid TyVarId deriving (Eq, Ord, Show) type OTcType' = CT.TcType TyConId TyVarId' type OScheme' = CT.Scheme TyConId TyVarId' type OTcEnv' = CT.TcEnv VarId ConId TyConId TyVarId' -- | Program is a list of top-level declarations type Prg = [Decl] -- | Constructor declaration data ConDecl = ConDecl ConId [OTcType] -- ^ ordinary data constructor deriving (Eq, Ord, Show) -- | A top level declaration data Decl = DataDecl SrcLoc TyConId [TyVarId] [ConDecl] -- ^ Datatype declaration | TypeSig SrcLoc VarId [(ClassId, OTcType)] OTcType -- ^ TcType signature declaration | FunDecl SrcLoc VarId [(VarId,Maybe Int)] Exp (Maybe OTcType') -- ^ Function declaration. | IncludeDecl String | ClassDecl SrcLoc ClassId TyVarId [(VarId, OTcType)] | InstanceDecl SrcLoc ClassId OTcType [(ClassId, OTcType)] [(VarId, [(VarId, Maybe Int)], Exp, Maybe OTcType')] deriving (Eq, Ord, Show) -- | Core Language Expressions. Expose boolean primitives data Exp = Var (VarId, Maybe OTcType') -- ^ variable and (maybe) type | Con ConId -- ^ data constructor | Lit Literal -- ^ literal constant | Unop Op1 Exp -- ^ unary operators | Conj Exp Exp -- ^ conjunction | Disj (Maybe Exp) Exp (Maybe Exp) Exp -- ^ disjunction | Binop Exp Op2 Exp -- ^ infix application | App Exp Exp -- ^ function application | If Exp Exp Exp -- ^ if expression | Case Exp [Alt] -- ^ case expression | Let Binds Exp -- ^ local declarations let ... in ... | Fix Exp -- ^ Fixpoint | FixN Int Exp -- ^ Indexed fixpoint | Fun [(VarId, Maybe Int)] Exp -- ^ Anonymous functions | Fresh VarId OTcType Exp Exp -- ^ Generate Fresh Variable of some type with some depth limit | Inst Exp VarId -- ^ Post-fix Instantiation point | TRACE VarId Exp -- ^ Trace a variable (debugging) | Collect Exp Exp -- ^ Collect statistics deriving (Eq, Ord, Show) -- | Binding groups are just lists of declarations type Binds = [Decl] -- | Alternatives in a case expression data Alt = Alt { altLoc :: SrcLoc , altWeight :: Maybe Exp , altPat :: Pat , altExp :: Exp } -- ^ A possibly weighted alternative in a case expression deriving (Eq, Ord, Show) -- | Implicit weights are equal to 1. altWeight' :: Alt -> Exp altWeight' (altWeight -> Just n) = n altWeight' _ = litIntE 1 -- | Helper constructor for literal expressions. litIntE :: Int -> Exp litIntE = Lit . LitInt -- | let' x = e in e' letE :: VarId -> Exp -> Exp -> Exp letE x e e' = Case e [Alt noLoc (Just $ litIntE 1) (PVar x) e'] -- | Constant literals data Literal = LitInt Int -- ^ integer literals deriving (Eq, Ord, Show) -- | Patterns for case expressions data Pat = PVar VarId -- ^ variable | PLit Literal -- ^ literal constant | PApp ConId [Pat] -- ^ constructor and argument patterns | PWild -- ^ wildcard pattern deriving (Eq, Ord, Show) isDefaultPat :: Pat -> Bool isDefaultPat (PVar _) = True isDefaultPat PWild = True isDefaultPat _ = False -- | Binary operators data Op2 = OpPlus | OpMinus | OpTimes | OpDiv | OpMod | OpEq | OpNe | OpLt | OpGt | OpLe | OpGe deriving (Eq, Ord, Show) -- | Unary operators data Op1 = OpNeg | OpNot deriving (Eq, Ord, Show) -- | Pre-defined constructors list_tycon_name :: TyConId list_tycon_name = "List" nil_con_name, cons_con_name :: ConId nil_con_name = "Nil" cons_con_name = "Cons" tuple_tycon_name :: Int -> TyConId tuple_tycon_name n = "Tuple " ++ show n tuple_con_name :: Int -> ConId tuple_con_name n = "#" ++ show n tc_int_tycon, tc_bool_tycon, tc_unit_tycon :: CT.TcType TyConId v tc_int_tycon = CT.TcCon "Int" 0 [] tc_bool_tycon = CT.TcCon "Bool" 0 [] tc_unit_tycon = CT.TcCon "Unit" 0 [] -- | Smart constructor that simplifies constants. times :: Exp -> Exp -> Exp times (Binop e1 OpTimes e2) e3 = times e1 . times e2 $ e3 times (Lit (LitInt n)) (Lit (LitInt m)) = litIntE (n * m) times (Lit (LitInt n)) (Binop (Lit (LitInt m)) OpTimes e3) = Binop (litIntE (n * m)) OpTimes e3 times (Lit (LitInt 1)) e3 = e3 times e1 (Binop (Lit (LitInt n)) OpTimes e2) = Binop (litIntE n) OpTimes (Binop e1 OpTimes e2) times e1 e2 = Binop e1 OpTimes e2 -- | Smart constructor that simplifies constants. plus :: Exp -> Exp -> Exp plus (Lit (LitInt n)) (Lit (LitInt m)) = litIntE (n + m) plus e1 e2 = Binop e1 OpTimes e2 -- | Expression of a long sum. sumE :: [Exp] -> Exp sumE es = foldl1' plus es
QuickChick/Luck
luck/src/Outer/AST.hs
mit
5,842
0
11
1,619
1,463
826
637
115
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE StrictData #-} {-# LANGUAGE TupleSections #-} -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigatewayv2-routeresponse.html module Stratosphere.Resources.ApiGatewayV2RouteResponse where import Stratosphere.ResourceImports -- | Full data type definition for ApiGatewayV2RouteResponse. See -- 'apiGatewayV2RouteResponse' for a more convenient constructor. data ApiGatewayV2RouteResponse = ApiGatewayV2RouteResponse { _apiGatewayV2RouteResponseApiId :: Val Text , _apiGatewayV2RouteResponseModelSelectionExpression :: Maybe (Val Text) , _apiGatewayV2RouteResponseResponseModels :: Maybe Object , _apiGatewayV2RouteResponseResponseParameters :: Maybe Object , _apiGatewayV2RouteResponseRouteId :: Val Text , _apiGatewayV2RouteResponseRouteResponseKey :: Val Text } deriving (Show, Eq) instance ToResourceProperties ApiGatewayV2RouteResponse where toResourceProperties ApiGatewayV2RouteResponse{..} = ResourceProperties { resourcePropertiesType = "AWS::ApiGatewayV2::RouteResponse" , resourcePropertiesProperties = hashMapFromList $ catMaybes [ (Just . ("ApiId",) . toJSON) _apiGatewayV2RouteResponseApiId , fmap (("ModelSelectionExpression",) . toJSON) _apiGatewayV2RouteResponseModelSelectionExpression , fmap (("ResponseModels",) . toJSON) _apiGatewayV2RouteResponseResponseModels , fmap (("ResponseParameters",) . toJSON) _apiGatewayV2RouteResponseResponseParameters , (Just . ("RouteId",) . toJSON) _apiGatewayV2RouteResponseRouteId , (Just . ("RouteResponseKey",) . toJSON) _apiGatewayV2RouteResponseRouteResponseKey ] } -- | Constructor for 'ApiGatewayV2RouteResponse' containing required fields as -- arguments. apiGatewayV2RouteResponse :: Val Text -- ^ 'agvrrApiId' -> Val Text -- ^ 'agvrrRouteId' -> Val Text -- ^ 'agvrrRouteResponseKey' -> ApiGatewayV2RouteResponse apiGatewayV2RouteResponse apiIdarg routeIdarg routeResponseKeyarg = ApiGatewayV2RouteResponse { _apiGatewayV2RouteResponseApiId = apiIdarg , _apiGatewayV2RouteResponseModelSelectionExpression = Nothing , _apiGatewayV2RouteResponseResponseModels = Nothing , _apiGatewayV2RouteResponseResponseParameters = Nothing , _apiGatewayV2RouteResponseRouteId = routeIdarg , _apiGatewayV2RouteResponseRouteResponseKey = routeResponseKeyarg } -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigatewayv2-routeresponse.html#cfn-apigatewayv2-routeresponse-apiid agvrrApiId :: Lens' ApiGatewayV2RouteResponse (Val Text) agvrrApiId = lens _apiGatewayV2RouteResponseApiId (\s a -> s { _apiGatewayV2RouteResponseApiId = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigatewayv2-routeresponse.html#cfn-apigatewayv2-routeresponse-modelselectionexpression agvrrModelSelectionExpression :: Lens' ApiGatewayV2RouteResponse (Maybe (Val Text)) agvrrModelSelectionExpression = lens _apiGatewayV2RouteResponseModelSelectionExpression (\s a -> s { _apiGatewayV2RouteResponseModelSelectionExpression = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigatewayv2-routeresponse.html#cfn-apigatewayv2-routeresponse-responsemodels agvrrResponseModels :: Lens' ApiGatewayV2RouteResponse (Maybe Object) agvrrResponseModels = lens _apiGatewayV2RouteResponseResponseModels (\s a -> s { _apiGatewayV2RouteResponseResponseModels = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigatewayv2-routeresponse.html#cfn-apigatewayv2-routeresponse-responseparameters agvrrResponseParameters :: Lens' ApiGatewayV2RouteResponse (Maybe Object) agvrrResponseParameters = lens _apiGatewayV2RouteResponseResponseParameters (\s a -> s { _apiGatewayV2RouteResponseResponseParameters = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigatewayv2-routeresponse.html#cfn-apigatewayv2-routeresponse-routeid agvrrRouteId :: Lens' ApiGatewayV2RouteResponse (Val Text) agvrrRouteId = lens _apiGatewayV2RouteResponseRouteId (\s a -> s { _apiGatewayV2RouteResponseRouteId = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-apigatewayv2-routeresponse.html#cfn-apigatewayv2-routeresponse-routeresponsekey agvrrRouteResponseKey :: Lens' ApiGatewayV2RouteResponse (Val Text) agvrrRouteResponseKey = lens _apiGatewayV2RouteResponseRouteResponseKey (\s a -> s { _apiGatewayV2RouteResponseRouteResponseKey = a })
frontrowed/stratosphere
library-gen/Stratosphere/Resources/ApiGatewayV2RouteResponse.hs
mit
4,587
0
15
469
619
352
267
52
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE StrictData #-} {-# LANGUAGE TupleSections #-} -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-application-applicationcodeconfiguration.html module Stratosphere.ResourceProperties.KinesisAnalyticsV2ApplicationApplicationCodeConfiguration where import Stratosphere.ResourceImports import Stratosphere.ResourceProperties.KinesisAnalyticsV2ApplicationCodeContent -- | Full data type definition for -- KinesisAnalyticsV2ApplicationApplicationCodeConfiguration. See -- 'kinesisAnalyticsV2ApplicationApplicationCodeConfiguration' for a more -- convenient constructor. data KinesisAnalyticsV2ApplicationApplicationCodeConfiguration = KinesisAnalyticsV2ApplicationApplicationCodeConfiguration { _kinesisAnalyticsV2ApplicationApplicationCodeConfigurationCodeContent :: KinesisAnalyticsV2ApplicationCodeContent , _kinesisAnalyticsV2ApplicationApplicationCodeConfigurationCodeContentType :: Val Text } deriving (Show, Eq) instance ToJSON KinesisAnalyticsV2ApplicationApplicationCodeConfiguration where toJSON KinesisAnalyticsV2ApplicationApplicationCodeConfiguration{..} = object $ catMaybes [ (Just . ("CodeContent",) . toJSON) _kinesisAnalyticsV2ApplicationApplicationCodeConfigurationCodeContent , (Just . ("CodeContentType",) . toJSON) _kinesisAnalyticsV2ApplicationApplicationCodeConfigurationCodeContentType ] -- | Constructor for -- 'KinesisAnalyticsV2ApplicationApplicationCodeConfiguration' containing -- required fields as arguments. kinesisAnalyticsV2ApplicationApplicationCodeConfiguration :: KinesisAnalyticsV2ApplicationCodeContent -- ^ 'kavaaccCodeContent' -> Val Text -- ^ 'kavaaccCodeContentType' -> KinesisAnalyticsV2ApplicationApplicationCodeConfiguration kinesisAnalyticsV2ApplicationApplicationCodeConfiguration codeContentarg codeContentTypearg = KinesisAnalyticsV2ApplicationApplicationCodeConfiguration { _kinesisAnalyticsV2ApplicationApplicationCodeConfigurationCodeContent = codeContentarg , _kinesisAnalyticsV2ApplicationApplicationCodeConfigurationCodeContentType = codeContentTypearg } -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-application-applicationcodeconfiguration.html#cfn-kinesisanalyticsv2-application-applicationcodeconfiguration-codecontent kavaaccCodeContent :: Lens' KinesisAnalyticsV2ApplicationApplicationCodeConfiguration KinesisAnalyticsV2ApplicationCodeContent kavaaccCodeContent = lens _kinesisAnalyticsV2ApplicationApplicationCodeConfigurationCodeContent (\s a -> s { _kinesisAnalyticsV2ApplicationApplicationCodeConfigurationCodeContent = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisanalyticsv2-application-applicationcodeconfiguration.html#cfn-kinesisanalyticsv2-application-applicationcodeconfiguration-codecontenttype kavaaccCodeContentType :: Lens' KinesisAnalyticsV2ApplicationApplicationCodeConfiguration (Val Text) kavaaccCodeContentType = lens _kinesisAnalyticsV2ApplicationApplicationCodeConfigurationCodeContentType (\s a -> s { _kinesisAnalyticsV2ApplicationApplicationCodeConfigurationCodeContentType = a })
frontrowed/stratosphere
library-gen/Stratosphere/ResourceProperties/KinesisAnalyticsV2ApplicationApplicationCodeConfiguration.hs
mit
3,251
0
13
221
262
153
109
30
1
{-# LANGUAGE ViewPatterns #-} module Quark.Optimize (optimize) where import Quark.Type import qualified Data.Sequence as Seq import Data.Sequence (viewl, viewr, (><), (<|)) import Data.Sequence (ViewL(..)) import Data.Sequence (ViewR(..)) --- Utils --- recQProgOpt :: (QProg -> QProg) -> (QItem -> QItem) recQProgOpt opt = rec_opt where rec_opt (QQuote p b) = QQuote p $ opt $ fmap rec_opt b rec_opt x = x partialApplySub :: (QItem, QItem) -> QItem -> QItem partialApplySub (from, to) (QQuote p b) = QQuote (fmap no_rec_sub p) (fmap (partialApplySub (from, to)) b) where no_rec_sub x = if x == from then to else x partialApplySub (from, to) x = if x == from then to else x --- Optimization --- redundantCall_O :: QProg -> QProg redundantCall_O (viewl -> (QQuote (viewl -> EmptyL) b) :< (viewl -> (QFunc "call") :< rest)) = b >< (redundantCall_O rest) redundantCall_O (viewl -> x :< rest) = x <| (redundantCall_O rest) redundantCall_O (viewl -> EmptyL) = Seq.empty partialApply_O :: QProg -> QProg partialApply_O (viewl -> x :< (viewl -> z :< (viewl -> (QFunc "call") :< rest))) = case z of QQuote (viewr -> rest_pattern :> (QVar v)) b -> case x of QFunc _ -> no_opt _ -> (partialApplySub (QVar v, x) (QQuote rest_pattern b)) <| (QFunc "call") <| (partialApply_O rest) QQuote (viewr -> rest_pattern :> v) b -> if v == x then (QQuote rest_pattern b) <| (QFunc "call") <| (partialApply_O rest) else no_opt _ -> no_opt where no_opt = x <| z <| (QFunc "call") <| (partialApply_O rest) partialApply_O (viewl -> x :< rest) = x <| (partialApply_O rest) partialApply_O (viewl -> EmptyL) = Seq.empty -- reduces a quark item to a less complex but semantically equivalent version optimize :: QItem -> QItem optimize x = if x == x' then x else optimize x' where opts = recQProgOpt partialApply_O . recQProgOpt redundantCall_O x' = opts x
henrystanley/Quark
Quark/Optimize.hs
cc0-1.0
1,882
0
16
365
748
403
345
35
5
{- Copyright (C) 2017 WATANABE Yuki <[email protected]> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. -} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE Safe #-} {-| Copyright : (C) 2017 WATANABE Yuki License : GPL-2 Portability : non-portable (flexible contexts) This module defines functions for printing parsed syntax. The functions in this module print in multi-line format including here document contents, unlike the Show instances implemented in Flesh.Language.Syntax, which print in single-line format without here document contents. -} module Flesh.Language.Syntax.Print ( PrintState(..), initPrintState, PrintS, runPrint, Printable(..), ListPrintable(..)) where import Control.Monad (unless, when) import Control.Monad.State.Strict ( MonadState, State, evalState, get, modify', put, state) import Control.Monad.Writer.Lazy (Endo(Endo), MonadWriter, tell) import Data.List.NonEmpty (NonEmpty((:|)), toList) import Data.Text (unpack) import Flesh.Language.Syntax import Flesh.Text.Show -- | Intermediate state used while constructing a printer function. data PrintState = PrintState { -- | Number of spaces at the beginning of lines. indent :: !Int, -- | Function to print here document contents at the next newline. hereDoc :: ShowS} -- | Initial state to initiate the PrintS monad. initPrintState :: PrintState initPrintState = PrintState 0 id -- | Monad to construct ShowS functions. -- -- The outer State monad carries PrintState, from which the ShowS function -- results in the inner PrintT monad. type PrintS = PrintT (State PrintState) -- | Runs the PrintS state with 'initPrintState'. runPrint :: PrintS a -> ShowS runPrint = flip evalState initPrintState . execPrintT -- | Shows the argument. printShows :: (MonadWriter (Endo String) m, Show a) => a -> m () printShows = tell' . shows -- | Shows the argument character. printChar :: MonadWriter (Endo String) m => Char -> m () printChar = tell' . showChar -- | Shows the argument string. printString :: MonadWriter (Endo String) m => String -> m () printString = tell' . showString -- | Appends the given here document content to the current 'hereDoc'. appendHereDoc :: MonadState PrintState m => ShowS -> m () appendHereDoc s = modify' (\(PrintState i h) -> PrintState i (h . s)) -- | Shows the current hereDoc and clears it. printHereDoc :: (MonadState PrintState m, MonadWriter (Endo String) m) => m () printHereDoc = state (\(PrintState i h) -> (Endo h, PrintState i id)) >>= tell -- | Shows as many spaces as the indent of the current state. printIndent :: (MonadState PrintState m, MonadWriter (Endo String) m) => m () printIndent = do s <- get printString $ replicate (indent s) ' ' -- | Combination of @showChar '\n'@ and printHereDoc and printIndent. printNewline :: (MonadState PrintState m, MonadWriter (Endo String) m) => m () printNewline = do printChar '\n' printHereDoc printIndent -- | Temporarily increments the 'indent' while performing the given monad. indented :: MonadState PrintState m => m a -> m a indented m = do before <- get put PrintState {indent = indent before + 2, hereDoc = hereDoc before} x <- m after <- get put PrintState {indent = indent before, hereDoc = hereDoc after} return x -- TODO: Consider using Lens. -- | Class of printable syntax. class Printable s where -- | Prints the given syntax. prints :: (MonadState PrintState m, MonadWriter (Endo String) m) => s -> m () -- | Class of printable lists of syntax. class ListPrintable s where -- | Prints the given list of syntax. printList :: (MonadState PrintState m, MonadWriter (Endo String) m) => [s] -> m () instance Printable Redirection where prints r@FileRedirection {} = printShows r prints r@(HereDoc op cntnt) = do appendHereDoc $ showContent . showDelimiter . showChar '\n' printShows r where showContent = showList $ map snd cntnt showDelimiter = showList $ snd $ unquoteToken $ delimiter op instance ListPrintable Redirection where printList [] = return () printList [r] = prints r printList (r:rs) = foldl printSpaceAnd (prints r) rs where printSpaceAnd mrs' r' = do () <- mrs' showSpace' prints r' printsIndentedLists :: (MonadState PrintState m, MonadWriter (Endo String) m) => CommandList -> m () printsIndentedLists ls = do indented $ do printNewline printList $ toList ls printIndent printDoGroup :: (MonadState PrintState m, MonadWriter (Endo String) m) => CommandList -> m () printDoGroup c = do printString "do" printsIndentedLists c printString "done" printsWhileUntilTail :: (MonadState PrintState m, MonadWriter (Endo String) m) => CommandList -> CommandList -> m () printsWhileUntilTail c b = do printsIndentedLists c printDoGroup b instance Printable CompoundCommand where prints (Grouping ls) = do printString "{" printsIndentedLists ls printString "}" prints (Subshell ls) = do printChar '(' printsIndentedLists ls printChar ')' prints (For name optwords ls) = do printString "for " printShows name printForWords optwords printNewline printDoGroup ls where printForWords Nothing = return () printForWords (Just ws) = do printString " in " printShows ws prints (Case w is) = do printString "case " printShows w printString " in" indented $ sequence_ $ printCaseItem <$> is printNewline printString "esac" where printCaseItem (p :| ps, ls) = do printNewline printChar '(' printShows p sequence_ $ printPattern <$> ps printChar ')' indented $ do printNewline unless (null ls) $ do printList ls printIndent printString ";;" printPattern p = do printString " | " printShows p prints (If its me) = do printsIfThenList its maybePrintsElse me printString "fi" where printsIfThenList (ifthen :| elifthens) = do printsIfThen ifthen printsElifThenList elifthens printsElifThenList [] = return () printsElifThenList (h:t) = do printString "el" printsIfThen h printsElifThenList t printsIfThen (c, t) = do printString "if" printsIndentedLists c printString "then" printsIndentedLists t maybePrintsElse Nothing = return () maybePrintsElse (Just e) = do printString "else" printsIndentedLists e prints (While c b) = do printString "while" printsWhileUntilTail c b prints (Until c b) = do printString "until" printsWhileUntilTail c b instance Printable Command where prints (SimpleCommand [] [] []) = return () prints c@(SimpleCommand _ _ []) = printShows c prints (SimpleCommand [] [] rs) = printList rs prints (SimpleCommand ts as rs) = do prints (SimpleCommand ts as []) showSpace' printList rs prints (CompoundCommand (_, cc) []) = prints cc prints (CompoundCommand (_, cc) rs) = do prints cc showSpace' printList rs prints (FunctionDefinition name cmd) = do printString $ unpack name printString "()" printNewline prints cmd instance Printable Pipeline where prints p = do when (isNegated p) (printString "! ") foldl printPipeAnd (prints c) cs where c :| cs = pipeCommands p printPipeAnd mcs' c' = do () <- mcs' printString " |" printNewline prints c' instance Printable ConditionalPipeline where prints (ConditionalPipeline (c, p)) = do printShows c printNewline prints p instance ListPrintable ConditionalPipeline where printList [] = return () printList [p] = prints p printList (p:ps) = do prints p showSpace' printList ps printAndOrHeadTail :: (MonadState PrintState m, MonadWriter (Endo String) m) => Pipeline -> [ConditionalPipeline] -> m () printAndOrHeadTail h [] = prints h printAndOrHeadTail h t = do prints h showSpace' printList t instance Printable AndOrList where prints (AndOrList p ps asy) = do printAndOrHeadTail p ps when asy $ printChar '&' printChar '\n' printHereDoc instance ListPrintable AndOrList where printList [] = return () printList [a] = prints a printList (a:as) = do prints a printIndent printList as -- vim: set et sw=2 sts=2 tw=78:
magicant/flesh
src/Flesh/Language/Syntax/Print.hs
gpl-2.0
9,282
0
16
2,338
2,479
1,199
1,280
217
1
{- | Module : $Header$ Description : Tools for CommonLogic static analysis Copyright : (c) Eugen Kuksa, Uni Bremen 2011 License : GPLv2 or higher, see LICENSE.txt Maintainer : [email protected] Stability : experimental Portability : portable Tools for CommonLogic static analysis -} module CommonLogic.Tools ( freeName -- finds a free discourse name , indvC_text -- retrieves all discourse names from a text , indvC_sen -- retrieves all discourse names from a sentence , indvC_term -- retrieves all discourse names from a term , prd_text -- retrieves all predicates from a text , setUnion_list -- maps function @f@ to the list @ts@ and unifies the results ) where import Data.Char (intToDigit) import Data.Set (Set) import qualified Data.Set as Set import CommonLogic.AS_CommonLogic import Common.Id ------------------------------------------------------------------------------- -- Misc functions -- ------------------------------------------------------------------------------- -- | Finds a free discourse name (appends "_" at the end until free name found) -- given the set of all discourse names freeName :: (String, Int) -> Set NAME -> (NAME, Int) freeName (s, i) ns = if Set.member n ns then freeName (s, i+1) ns else (n, i+1) where n = mkSimpleId (s++"_"++[intToDigit i]) ------------------------------------------------------------------------------- -- Functions to compute the set of individual constants (discourse items), -- -- these work by recursing into all subelements -- ------------------------------------------------------------------------------- -- | maps @f@ to @ts@ and unifies the results setUnion_list :: (Ord b) => (a -> Set b) -> [a] -> Set b setUnion_list f ts = foldl Set.union Set.empty $ map f ts -- | retrieves the individual constants from a text indvC_text :: TEXT -> Set NAME indvC_text t = case t of Text ps _ -> setUnion_list indvC_phrase ps Named_text _ txt _ -> indvC_text txt -- | retrieves the individual constants from a phrase indvC_phrase :: PHRASE -> Set NAME indvC_phrase p = case p of Module m -> indvC_module m Sentence s -> indvC_sen s Comment_text _ t _ -> indvC_text t _ -> Set.empty -- | retrieves the individual constants from a module indvC_module :: MODULE -> Set NAME indvC_module m = case m of Mod _ t _ -> indvC_text t Mod_ex _ _ t _ -> indvC_text t -- | retrieves the individual constants from a sentence indvC_sen :: SENTENCE -> Set NAME indvC_sen s = case s of Quant_sent q _ -> indvC_quantsent q Bool_sent b _ -> indvC_boolsent b Atom_sent a _ -> indvC_atomsent a Comment_sent _ c _ -> indvC_sen c Irregular_sent i _ -> indvC_sen i -- | retrieves the individual constants from a qantified sentence indvC_quantsent ::QUANT_SENT -> Set NAME indvC_quantsent q = case q of Universal noss s -> quant noss s Existential noss s -> quant noss s where quant :: [NAME_OR_SEQMARK] -> SENTENCE -> Set NAME quant nss s = Set.difference (indvC_sen s) $ setUnion_list nameof nss nameof :: NAME_OR_SEQMARK -> Set NAME nameof nsm = case nsm of Name n -> Set.singleton n SeqMark _ -> Set.empty --FIXME: what to do with seqmarks? -- | retrieves the individual constants from a boolean sentence indvC_boolsent :: BOOL_SENT -> Set NAME indvC_boolsent b = case b of Conjunction ss -> setUnion_list indvC_sen ss Disjunction ss -> setUnion_list indvC_sen ss Negation s -> indvC_sen s Implication s1 s2 -> setUnion_list indvC_sen [s1,s2] Biconditional s1 s2 -> setUnion_list indvC_sen [s1,s2] -- | retrieves the individual constants from an atom indvC_atomsent :: ATOM -> Set NAME indvC_atomsent a = case a of Equation t1 t2 -> indvC_term t1 `Set.union` indvC_term t2 Atom t ts -> if null ts then indvC_term t --constant else setUnion_list indvC_termSeq ts -- arguments -- | retrieves the individual constants from a term indvC_term :: TERM -> Set NAME indvC_term t = case t of Name_term n -> Set.singleton n Funct_term _ ts _ -> setUnion_list indvC_termSeq ts -- arguments Comment_term t1 _ _ -> indvC_term t1 -- | retrieves the individual constant from a single argument indvC_termSeq :: TERM_SEQ -> Set NAME indvC_termSeq t = case t of Term_seq txt -> indvC_term txt Seq_marks _ -> Set.empty -- FIXME: what to do with seqmarks? ------------------------------------------------------------------------------ -- Functions to compute the set of predicates, these work by recursing -- -- into all subelements -- ------------------------------------------------------------------------------ unifyPredicates :: (a -> Set.Set NAME) -> [a] -> Set.Set NAME unifyPredicates prd_item = foldl (\ns i -> Set.union ns (prd_item i)) Set.empty -- | Retrieves all predicates from a text prd_text :: TEXT -> Set.Set NAME prd_text t = case t of Text ps _ -> prd_phrases ps Named_text _ nt _ -> prd_text nt prd_phrases :: [PHRASE] -> Set.Set NAME prd_phrases = unifyPredicates prd_phrase prd_phrase :: PHRASE -> Set.Set NAME prd_phrase p = case p of Module m -> prd_module m Sentence s -> prd_sentence s Importation i -> prd_importation i Comment_text _ t _ -> prd_text t prd_module :: MODULE -> Set.Set NAME prd_module m = case m of Mod _ t _ -> prd_text t Mod_ex _ _ t _ -> prd_text t prd_sentence :: SENTENCE -> Set.Set NAME prd_sentence s = case s of Quant_sent q _ -> prd_quantSent q Bool_sent b _ -> prd_boolSent b Atom_sent a _ -> prd_atomSent a Comment_sent _ c _ -> prd_sentence c Irregular_sent i _ -> prd_sentence i prd_importation :: IMPORTATION -> Set.Set NAME prd_importation (Imp_name n) = prd_name n prd_quantSent :: QUANT_SENT -> Set.Set NAME prd_quantSent q = case q of Universal noss s -> Set.union (unifyPredicates prd_nameOrSeqmark noss) $ prd_sentence s Existential noss s -> Set.union (unifyPredicates prd_nameOrSeqmark noss) $ prd_sentence s --TODO SequenceMarker Handling prd_nameOrSeqmark :: NAME_OR_SEQMARK -> Set.Set NAME prd_nameOrSeqmark nos = case nos of Name n -> prd_name n SeqMark s -> prd_seqmark s prd_boolSent :: BOOL_SENT -> Set.Set NAME prd_boolSent b = case b of Conjunction ss -> unifyPredicates prd_sentence ss Disjunction ss -> unifyPredicates prd_sentence ss Negation s -> prd_sentence s Implication s1 s2 -> unifyPredicates prd_sentence [s1,s2] Biconditional s1 s2 -> unifyPredicates prd_sentence [s1,s2] prd_atomSent :: ATOM -> Set.Set NAME prd_atomSent a = case a of Equation t1 t2 -> unifyPredicates prd_term [t1,t2] Atom t tseq -> if null tseq then prd_term t else Set.union (prd_termSeqs tseq) $ prd_add_term t prd_term :: TERM -> Set.Set NAME prd_term t = case t of Name_term n -> prd_name n Funct_term ft tseqs _ -> prd_add_term ft `Set.union` prd_termSeqs tseqs Comment_term ct _ _ -> prd_term ct prd_name :: NAME -> Set.Set NAME prd_name _ = Set.empty prd_seqmark :: SEQ_MARK -> Set.Set NAME prd_seqmark _ = Set.empty prd_termSeqs :: [TERM_SEQ] -> Set.Set NAME prd_termSeqs = unifyPredicates prd_termSeq prd_termSeq :: TERM_SEQ -> Set.Set NAME prd_termSeq tsec = case tsec of Term_seq t -> prd_term t Seq_marks s -> prd_seqmark s prd_add_term :: TERM -> Set.Set NAME prd_add_term t = case t of Name_term n -> prd_add_name n Funct_term ft tseqs _ -> prd_add_term ft `Set.union` prd_termSeqs tseqs Comment_term ct _ _ -> prd_term ct prd_add_name :: NAME -> Set.Set NAME prd_add_name = Set.singleton
nevrenato/Hets_Fork
CommonLogic/Tools.hs
gpl-2.0
8,269
0
12
2,220
2,086
1,023
1,063
168
5
module HFlint.NMod.Base where import Control.DeepSeq ( NFData(..) ) import HFlint.NMod.FFI instance Show (NMod ctxProxy) where show (NMod a) = show a instance Eq (NMod ctxProxy) where {-# INLINE (==) #-} (NMod a) == (NMod a') = a == a' instance Ord (NMod ctxProxy) where {-# INLINE compare #-} compare (NMod a) (NMod a') = compare a a' instance NFData (NMod ctxProxy) where rnf (NMod a) = seq a ()
martinra/hflint
src/HFlint/NMod/Base.hs
gpl-3.0
418
0
8
89
177
92
85
13
0
module Substitution ( Substitution, comboSubs, applySubs, applyOneSubs, foteSet2Subs, mapTuple, headTuple ) where import ReadPrintTerms (Term(..), isVariable, occursAt) import FOTEset (FOTEset) import Data.Tuple (swap) ------------------------------------------------------------------------------- type Substitution = [(Term, Term)] --the second Term is expected to have "Variable" as it's value constructor --the first Term value cannot contain the same variable as the second Term value --when the first Term value is constructed by a "Function" value constructor. ------------------------------------------------------------------------------ foteSet2Subs :: FOTEset -> Substitution foteSet2Subs = map swap mapTuple :: (a -> b) -> (a, a) -> (b, b) --mapTuple :: ([Term] -> [Term]) -> ([Term],[Term]) -> ([Term],[Term]) mapTuple f (x, y) = (f x , f y) headTuple :: ([a],[b]) -> (a, b) headTuple (as, bs) = (head as, head bs) comboSubs :: Maybe Substitution -> Maybe Substitution -> Maybe Substitution --the context of this function makes the first argument be (Just _) comboSubs _ Nothing = Nothing comboSubs subs1 (Just []) = subs1 comboSubs (Just []) subs2@(Just (_:_)) = subs2 comboSubs (Just subs1@(_:_)) (Just subs2@(_:_)) = Just (s1 ++ subs2) where tsc1 = map fst subs1 -- tsc stands for terms of substitution component vsc1 = map snd subs1 -- vsc stands for variables of substitution component tsc1' = applySubs subs2 tsc1 s1 = zip tsc1' vsc1 ------------------------------------------------------------------------------ applySubs :: Substitution -> [Term] -> [Term] -- the context of using this function provides that termsList is not empty applySubs [] termsList = termsList applySubs (sub:[]) termsList = map (applyOneSubs sub) termsList applySubs (sub1:s@(_:_)) termsList = applySubs s (map (applyOneSubs sub1) termsList) ---------------------------------------------------------------- applyOneSubs :: (Term,Term) -> Term -> Term applyOneSubs (term, v1@(Variable _)) v2@(Variable _) | v1 `occursAt` v2 = term | otherwise = v2 -- occurs check of v1 in term is not needed since this case has been -- covered in definition of unifyTerms applyOneSubs ( _ , Variable _) c@(Constant _) = c applyOneSubs s@(term, Variable _) (Function n a tms) = Function n a (map (applyOneSubs s) tms) applyOneSubs _ _ = undefined ---------------------------------------------------------------- --The following code are not used ------------------------------------------------------------------------------- --reduceSubs :: Substitution -> Substitution --reduceSubs [] = [] --reduceSubs s@[(_,_)] = s --reduceSubs s@(subs1:subs2:subss) -- | or (map isVar2VarSubs s) == False = s -- | = s -- | otherwise -- where -- v2vSubs = takeVar2Var s -- combinable_check = or (map (combinable v2vSubs) s) -- v2vSubss = filter isVar2VarSubs s ------------------------------------------------------------------------------ --isVar2VarSubs :: (Term,Term) -> Bool --tell whether a substitution is from a variable to a variable --only if yes then this substitutuion is possible to be combined with --another substitution --isVar2VarSubs (t1, t2) = (isVariable t1) && (isVariable t2) ----------------------------------------------------------------------------- --takeVar2Var :: Substitution -> (Term, Term) --return earliest variable-to-variable substitutuion --context of this function provides there is at least such one in list --takeVar2Var subss = -- let -- (_ , next) = break isVar2VarSubs subss -- in head next ------------------------------------------------------------------------------ --combineSubs :: (Term,Term) -> (Term,Term) -> (Term,Term) --try to combine two substitution: if they can't be combined --return the second argument otherwise return the composition --note that all second elements of substitution tuples are variable --combineSubs (Variable varTo, Variable varFrom) subs@(termTo, Variable varTo') -- | varTo == varTo' = (termTo, Variable varFrom) -- | otherwise = subs --combineSubs _ _ = undefined --combinable :: (Term,Term) -> (Term,Term) -> Bool --combinable (Variable varTo, Variable varFrom) subs@(termTo, Variable varTo') -- | varTo == varTo' = True -- | otherwise = False --combinable _ _ = undefined ----------------------------------------------------------------
YueLiPicasso/unification
Substitution.hs
gpl-3.0
4,772
0
10
1,056
715
416
299
40
1
{- Copyright (C) 2014 Richard Larocque <[email protected]> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -} module Wiki.DumpReader(Page,getPages) where import qualified Data.Text as T import qualified Data.ByteString.Lazy as LB import Text.XML.Expat.SAX import Data.Maybe import Wiki.Types type SAXType = SAXEvent String T.Text saxProcessDoc :: [SAXType] -> [Page] saxProcessDoc = catMaybes . saxProcessDoc' saxProcessDoc' :: [SAXType] -> [Maybe Page] saxProcessDoc' [] = [] saxProcessDoc' xs = case saxFindTag "page" xs of Just (page, rest) -> (processPageContents page):(saxProcessDoc' rest) Nothing -> [] processPageContents :: [SAXType] -> Maybe Page processPageContents xs = do (title, s1) <- saxFindText "title" xs (text, _) <- saxFindText "text" s1 return $ Page title text isEndTag :: String -> SAXType -> Bool isEndTag name (EndElement etag) = etag == name isEndTag _ _ = False saxFindTag :: String -> [SAXType] -> Maybe ([SAXType], [SAXType]) saxFindTag _ [] = Nothing saxFindTag needle ((StartElement tag _):xs) | needle == tag = Just $ span (not.(isEndTag needle)) xs saxFindTag needle (_:xs) = saxFindTag needle xs saxFindText :: String -> [SAXType] -> Maybe (T.Text, [SAXType]) saxFindText needle xs = do (in_tag, rest) <- saxFindTag needle xs return (saxTextContents in_tag, rest) saxTextContents :: [SAXType] -> T.Text saxTextContents xs = T.concat $ mapMaybe getText xs where getText (CharacterData txt) = Just txt getText _ = Nothing getPages :: String -> IO [Page] getPages filename = do xml_text <- LB.readFile filename return $ saxProcessDoc $ parse defaultParseOptions xml_text
richardlarocque/latin-db-builder
Wiki/DumpReader.hs
gpl-3.0
2,274
3
11
377
594
312
282
39
2
--samenvoegen :: Ord a => [a]->[a]->[a] samenvoegen :: [Int]->[Int]->[Int] samenvoegen [] l = l samenvoegen l [] = l samenvoegen (x:xs) (y:ys) | x<y = (x : samenvoegen xs (y:ys)) | x>y = (y : samenvoegen (x:xs) ys) | True = (x : samenvoegen xs ys) hamming :: [Int] hamming = 1:(samenvoegen (samenvoegen [2*a|a<-hamming] [3*a|a<-hamming]) [5*a|a<-hamming])
jorenverspeurt/joren-assignments-haskell
hamming.hs
gpl-3.0
410
0
12
110
232
121
111
8
1
module JunctionSimulation where import Hammer.VTK (writeMultiVTKfile) import Linear.Vect import Texture.Bingham import Texture.Orientation import Texture.Symmetry import VirMat.Core.FlexMicro import VirMat.Distributions.Texture.ODFSampling import VirMat.Distributions.GrainSize.GrainQuery import VirMat.IO.Import.CommandLine import VirMat.IO.Import.Types import VirMat.Run2D import VirMat.Run3D import VirMat.Types go2D :: JobRequest -> IO () go2D jobReq = do simul <- runVirMat2D jobReq let fm :: FlexMicro Vec2 () fm = mkFlexMicro $ grainSet simul fmMorph = add2DGrainMorph 2 fm showLeng = RenderGrainProp ("Length", \_ x -> fmap (getLength . grainLength . fst) x) showArea = RenderGrainProp ("Area", \_ x -> fmap (getArea . grainArea . fst) x) showNeig = RenderGrainProp ("Neighbors", \_ x -> fmap (grainNeighbors . fst) x) showall = [showGrainID, showLeng, showArea, showNeig] writeMultiVTKfile "virmat-2d.vtu" True $ renderFlexMicro showall 1 fmMorph writeFM "fm.vtu" fm 2 let quads = IM.elems $ mapCP0D fm ps = V.map controlPoint . V.fromList . IM.elems . controlPoints $ fm let upQ = replicate 10 updateQuads quadFM = L.foldl' (\acc f -> f acc) fm upQ let ts = getAllGBTriangles fm 0 ns = V.map getNormalTri ts plot = renderPoleFigureGB Lambert ns in renderSVGFile "pf.svg" (sizeSpec (Just 200, Nothing)) plot print "Get Stable Quadriple junctions..." writeFM "fmQuad.vtu" quadFM 2 print "first angles" print $ listAngle fm print "first angles" print $ listAngle quadFM getNormalTri :: (Vec3, Vec3, Vec3) -> Vec3 getNormalTri (a, b, c) = let ba = b &- a ca = c &- a in normalize $ ba &^ ca updateQuads fm = let -- TODO solve the need for ps ps = V.map controlPoint . V.fromList . IM.elems . controlPoints $ fm func acc i = acc { controlPoints = IM.adjust (func2 i) i (controlPoints acc)} func2 i old = old { controlPoint = controlPoint old &+ (0.3 *& (calcForceAtQuad $ getPatchsAtQuadrs ps fm i))} quadriP = mapCP0D fm in IM.foldl' func fm quadriP getPatchsAtQuadrs ps fm id = case IM.lookup id (controlPoints fm) of Nothing -> [] Just cp -> let sIDs = S.elems $ surfaceMembers cp func x = do s <- M.lookup x (surfaces fm) (patch, pos) <- findVertex (patchs s) id tn1 <- normalize <$> tan1 (evalPatch ps patch) pos tn2 <- normalize <$> tan2 (evalPatch ps patch) pos return (tn1 &+ tn2) in mapMaybe func sIDs calcForceAtQuad ts | L.length ts >= 6 = L.foldl' (&+) zero ts | otherwise = L.foldl' (&+) zero ts --zero listAngle fm = concat . IM.elems . IM.map (getAngle fm) $ (mapCP0D fm) getAngle fm id = let ps = V.map controlPoint . V.fromList . IM.elems . controlPoints $ fm func x = do s <- M.lookup x (surfaces fm) (patch, pos) <- findVertex (patchs s) id tn1 <- normalize <$> tan1 (evalPatch ps patch) pos tn2 <- normalize <$> tan2 (evalPatch ps patch) pos return (acos $ tn1 &. tn2) in case IM.lookup id (controlPoints fm) of Nothing -> [] Just cp -> let sIDs = S.elems $ surfaceMembers cp in mapMaybe func sIDs
lostbean/VirMat
src/JunctionSimulation.hs
gpl-3.0
3,386
0
18
940
1,220
606
614
83
2
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE Rank2Types #-} module Reviewer.LinkRange where import ClassyPrelude import Control.Lens(makeLenses,(^.),Getter,to) data LinkRange = LinkRange { _linkThis :: Int , _linksTotal :: Int } $(makeLenses ''LinkRange) linkPercentage :: Int -> Getter LinkRange Int linkPercentage m = to (\lr -> lr ^. linkThis * m `div` lr ^. linksTotal) visualizeLinkRange :: LinkRange -> Text visualizeLinkRange r = let m = 80 pt = r ^. linkPercentage m in "[" <> (replicate pt '=') <> replicate (m - pt) ' ' <> "] " <> pack (show (r ^. linkThis)) <> "/" <> pack (show (r ^. linksTotal))
pmiddend/reviewer
src/Reviewer/LinkRange.hs
gpl-3.0
650
0
14
140
232
125
107
16
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Healthcare.Projects.Locations.DataSets.GetIAMPolicy -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Gets the access control policy for a resource. Returns an empty policy -- if the resource exists and does not have a policy set. -- -- /See:/ <https://cloud.google.com/healthcare Cloud Healthcare API Reference> for @healthcare.projects.locations.datasets.getIamPolicy@. module Network.Google.Resource.Healthcare.Projects.Locations.DataSets.GetIAMPolicy ( -- * REST Resource ProjectsLocationsDataSetsGetIAMPolicyResource -- * Creating a Request , projectsLocationsDataSetsGetIAMPolicy , ProjectsLocationsDataSetsGetIAMPolicy -- * Request Lenses , pldsgipOptionsRequestedPolicyVersion , pldsgipXgafv , pldsgipUploadProtocol , pldsgipAccessToken , pldsgipUploadType , pldsgipResource , pldsgipCallback ) where import Network.Google.Healthcare.Types import Network.Google.Prelude -- | A resource alias for @healthcare.projects.locations.datasets.getIamPolicy@ method which the -- 'ProjectsLocationsDataSetsGetIAMPolicy' request conforms to. type ProjectsLocationsDataSetsGetIAMPolicyResource = "v1" :> CaptureMode "resource" "getIamPolicy" Text :> QueryParam "options.requestedPolicyVersion" (Textual Int32) :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> Get '[JSON] Policy -- | Gets the access control policy for a resource. Returns an empty policy -- if the resource exists and does not have a policy set. -- -- /See:/ 'projectsLocationsDataSetsGetIAMPolicy' smart constructor. data ProjectsLocationsDataSetsGetIAMPolicy = ProjectsLocationsDataSetsGetIAMPolicy' { _pldsgipOptionsRequestedPolicyVersion :: !(Maybe (Textual Int32)) , _pldsgipXgafv :: !(Maybe Xgafv) , _pldsgipUploadProtocol :: !(Maybe Text) , _pldsgipAccessToken :: !(Maybe Text) , _pldsgipUploadType :: !(Maybe Text) , _pldsgipResource :: !Text , _pldsgipCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ProjectsLocationsDataSetsGetIAMPolicy' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'pldsgipOptionsRequestedPolicyVersion' -- -- * 'pldsgipXgafv' -- -- * 'pldsgipUploadProtocol' -- -- * 'pldsgipAccessToken' -- -- * 'pldsgipUploadType' -- -- * 'pldsgipResource' -- -- * 'pldsgipCallback' projectsLocationsDataSetsGetIAMPolicy :: Text -- ^ 'pldsgipResource' -> ProjectsLocationsDataSetsGetIAMPolicy projectsLocationsDataSetsGetIAMPolicy pPldsgipResource_ = ProjectsLocationsDataSetsGetIAMPolicy' { _pldsgipOptionsRequestedPolicyVersion = Nothing , _pldsgipXgafv = Nothing , _pldsgipUploadProtocol = Nothing , _pldsgipAccessToken = Nothing , _pldsgipUploadType = Nothing , _pldsgipResource = pPldsgipResource_ , _pldsgipCallback = Nothing } -- | Optional. The policy format version to be returned. Valid values are 0, -- 1, and 3. Requests specifying an invalid value will be rejected. -- Requests for policies with any conditional bindings must specify version -- 3. Policies without any conditional bindings may specify any valid value -- or leave the field unset. To learn which resources support conditions in -- their IAM policies, see the [IAM -- documentation](https:\/\/cloud.google.com\/iam\/help\/conditions\/resource-policies). pldsgipOptionsRequestedPolicyVersion :: Lens' ProjectsLocationsDataSetsGetIAMPolicy (Maybe Int32) pldsgipOptionsRequestedPolicyVersion = lens _pldsgipOptionsRequestedPolicyVersion (\ s a -> s{_pldsgipOptionsRequestedPolicyVersion = a}) . mapping _Coerce -- | V1 error format. pldsgipXgafv :: Lens' ProjectsLocationsDataSetsGetIAMPolicy (Maybe Xgafv) pldsgipXgafv = lens _pldsgipXgafv (\ s a -> s{_pldsgipXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). pldsgipUploadProtocol :: Lens' ProjectsLocationsDataSetsGetIAMPolicy (Maybe Text) pldsgipUploadProtocol = lens _pldsgipUploadProtocol (\ s a -> s{_pldsgipUploadProtocol = a}) -- | OAuth access token. pldsgipAccessToken :: Lens' ProjectsLocationsDataSetsGetIAMPolicy (Maybe Text) pldsgipAccessToken = lens _pldsgipAccessToken (\ s a -> s{_pldsgipAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). pldsgipUploadType :: Lens' ProjectsLocationsDataSetsGetIAMPolicy (Maybe Text) pldsgipUploadType = lens _pldsgipUploadType (\ s a -> s{_pldsgipUploadType = a}) -- | REQUIRED: The resource for which the policy is being requested. See the -- operation documentation for the appropriate value for this field. pldsgipResource :: Lens' ProjectsLocationsDataSetsGetIAMPolicy Text pldsgipResource = lens _pldsgipResource (\ s a -> s{_pldsgipResource = a}) -- | JSONP pldsgipCallback :: Lens' ProjectsLocationsDataSetsGetIAMPolicy (Maybe Text) pldsgipCallback = lens _pldsgipCallback (\ s a -> s{_pldsgipCallback = a}) instance GoogleRequest ProjectsLocationsDataSetsGetIAMPolicy where type Rs ProjectsLocationsDataSetsGetIAMPolicy = Policy type Scopes ProjectsLocationsDataSetsGetIAMPolicy = '["https://www.googleapis.com/auth/cloud-platform"] requestClient ProjectsLocationsDataSetsGetIAMPolicy'{..} = go _pldsgipResource _pldsgipOptionsRequestedPolicyVersion _pldsgipXgafv _pldsgipUploadProtocol _pldsgipAccessToken _pldsgipUploadType _pldsgipCallback (Just AltJSON) healthcareService where go = buildClient (Proxy :: Proxy ProjectsLocationsDataSetsGetIAMPolicyResource) mempty
brendanhay/gogol
gogol-healthcare/gen/Network/Google/Resource/Healthcare/Projects/Locations/DataSets/GetIAMPolicy.hs
mpl-2.0
6,831
0
16
1,395
806
471
335
125
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Healthcare.Projects.Locations.DataSets.FhirStores.Create -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Creates a new FHIR store within the parent dataset. -- -- /See:/ <https://cloud.google.com/healthcare Cloud Healthcare API Reference> for @healthcare.projects.locations.datasets.fhirStores.create@. module Network.Google.Resource.Healthcare.Projects.Locations.DataSets.FhirStores.Create ( -- * REST Resource ProjectsLocationsDataSetsFhirStoresCreateResource -- * Creating a Request , projectsLocationsDataSetsFhirStoresCreate , ProjectsLocationsDataSetsFhirStoresCreate -- * Request Lenses , pldsfscParent , pldsfscXgafv , pldsfscUploadProtocol , pldsfscAccessToken , pldsfscUploadType , pldsfscPayload , pldsfscFhirStoreId , pldsfscCallback ) where import Network.Google.Healthcare.Types import Network.Google.Prelude -- | A resource alias for @healthcare.projects.locations.datasets.fhirStores.create@ method which the -- 'ProjectsLocationsDataSetsFhirStoresCreate' request conforms to. type ProjectsLocationsDataSetsFhirStoresCreateResource = "v1" :> Capture "parent" Text :> "fhirStores" :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "fhirStoreId" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> ReqBody '[JSON] FhirStore :> Post '[JSON] FhirStore -- | Creates a new FHIR store within the parent dataset. -- -- /See:/ 'projectsLocationsDataSetsFhirStoresCreate' smart constructor. data ProjectsLocationsDataSetsFhirStoresCreate = ProjectsLocationsDataSetsFhirStoresCreate' { _pldsfscParent :: !Text , _pldsfscXgafv :: !(Maybe Xgafv) , _pldsfscUploadProtocol :: !(Maybe Text) , _pldsfscAccessToken :: !(Maybe Text) , _pldsfscUploadType :: !(Maybe Text) , _pldsfscPayload :: !FhirStore , _pldsfscFhirStoreId :: !(Maybe Text) , _pldsfscCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ProjectsLocationsDataSetsFhirStoresCreate' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'pldsfscParent' -- -- * 'pldsfscXgafv' -- -- * 'pldsfscUploadProtocol' -- -- * 'pldsfscAccessToken' -- -- * 'pldsfscUploadType' -- -- * 'pldsfscPayload' -- -- * 'pldsfscFhirStoreId' -- -- * 'pldsfscCallback' projectsLocationsDataSetsFhirStoresCreate :: Text -- ^ 'pldsfscParent' -> FhirStore -- ^ 'pldsfscPayload' -> ProjectsLocationsDataSetsFhirStoresCreate projectsLocationsDataSetsFhirStoresCreate pPldsfscParent_ pPldsfscPayload_ = ProjectsLocationsDataSetsFhirStoresCreate' { _pldsfscParent = pPldsfscParent_ , _pldsfscXgafv = Nothing , _pldsfscUploadProtocol = Nothing , _pldsfscAccessToken = Nothing , _pldsfscUploadType = Nothing , _pldsfscPayload = pPldsfscPayload_ , _pldsfscFhirStoreId = Nothing , _pldsfscCallback = Nothing } -- | The name of the dataset this FHIR store belongs to. pldsfscParent :: Lens' ProjectsLocationsDataSetsFhirStoresCreate Text pldsfscParent = lens _pldsfscParent (\ s a -> s{_pldsfscParent = a}) -- | V1 error format. pldsfscXgafv :: Lens' ProjectsLocationsDataSetsFhirStoresCreate (Maybe Xgafv) pldsfscXgafv = lens _pldsfscXgafv (\ s a -> s{_pldsfscXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). pldsfscUploadProtocol :: Lens' ProjectsLocationsDataSetsFhirStoresCreate (Maybe Text) pldsfscUploadProtocol = lens _pldsfscUploadProtocol (\ s a -> s{_pldsfscUploadProtocol = a}) -- | OAuth access token. pldsfscAccessToken :: Lens' ProjectsLocationsDataSetsFhirStoresCreate (Maybe Text) pldsfscAccessToken = lens _pldsfscAccessToken (\ s a -> s{_pldsfscAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). pldsfscUploadType :: Lens' ProjectsLocationsDataSetsFhirStoresCreate (Maybe Text) pldsfscUploadType = lens _pldsfscUploadType (\ s a -> s{_pldsfscUploadType = a}) -- | Multipart request metadata. pldsfscPayload :: Lens' ProjectsLocationsDataSetsFhirStoresCreate FhirStore pldsfscPayload = lens _pldsfscPayload (\ s a -> s{_pldsfscPayload = a}) -- | The ID of the FHIR store that is being created. The string must match -- the following regex: \`[\\p{L}\\p{N}_\\-\\.]{1,256}\`. pldsfscFhirStoreId :: Lens' ProjectsLocationsDataSetsFhirStoresCreate (Maybe Text) pldsfscFhirStoreId = lens _pldsfscFhirStoreId (\ s a -> s{_pldsfscFhirStoreId = a}) -- | JSONP pldsfscCallback :: Lens' ProjectsLocationsDataSetsFhirStoresCreate (Maybe Text) pldsfscCallback = lens _pldsfscCallback (\ s a -> s{_pldsfscCallback = a}) instance GoogleRequest ProjectsLocationsDataSetsFhirStoresCreate where type Rs ProjectsLocationsDataSetsFhirStoresCreate = FhirStore type Scopes ProjectsLocationsDataSetsFhirStoresCreate = '["https://www.googleapis.com/auth/cloud-platform"] requestClient ProjectsLocationsDataSetsFhirStoresCreate'{..} = go _pldsfscParent _pldsfscXgafv _pldsfscUploadProtocol _pldsfscAccessToken _pldsfscUploadType _pldsfscFhirStoreId _pldsfscCallback (Just AltJSON) _pldsfscPayload healthcareService where go = buildClient (Proxy :: Proxy ProjectsLocationsDataSetsFhirStoresCreateResource) mempty
brendanhay/gogol
gogol-healthcare/gen/Network/Google/Resource/Healthcare/Projects/Locations/DataSets/FhirStores/Create.hs
mpl-2.0
6,556
0
18
1,413
862
502
360
134
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Books.Onboarding.ListCategoryVolumes -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- List available volumes under categories for onboarding experience. -- -- /See:/ <https://code.google.com/apis/books/docs/v1/getting_started.html Books API Reference> for @books.onboarding.listCategoryVolumes@. module Network.Google.Resource.Books.Onboarding.ListCategoryVolumes ( -- * REST Resource OnboardingListCategoryVolumesResource -- * Creating a Request , onboardingListCategoryVolumes , OnboardingListCategoryVolumes -- * Request Lenses , olcvXgafv , olcvUploadProtocol , olcvLocale , olcvAccessToken , olcvMaxAllowedMaturityRating , olcvUploadType , olcvCategoryId , olcvPageToken , olcvPageSize , olcvCallback ) where import Network.Google.Books.Types import Network.Google.Prelude -- | A resource alias for @books.onboarding.listCategoryVolumes@ method which the -- 'OnboardingListCategoryVolumes' request conforms to. type OnboardingListCategoryVolumesResource = "books" :> "v1" :> "onboarding" :> "listCategoryVolumes" :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "locale" Text :> QueryParam "access_token" Text :> QueryParam "maxAllowedMaturityRating" OnboardingListCategoryVolumesMaxAllowedMaturityRating :> QueryParam "uploadType" Text :> QueryParams "categoryId" Text :> QueryParam "pageToken" Text :> QueryParam "pageSize" (Textual Word32) :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> Get '[JSON] Volume2 -- | List available volumes under categories for onboarding experience. -- -- /See:/ 'onboardingListCategoryVolumes' smart constructor. data OnboardingListCategoryVolumes = OnboardingListCategoryVolumes' { _olcvXgafv :: !(Maybe Xgafv) , _olcvUploadProtocol :: !(Maybe Text) , _olcvLocale :: !(Maybe Text) , _olcvAccessToken :: !(Maybe Text) , _olcvMaxAllowedMaturityRating :: !(Maybe OnboardingListCategoryVolumesMaxAllowedMaturityRating) , _olcvUploadType :: !(Maybe Text) , _olcvCategoryId :: !(Maybe [Text]) , _olcvPageToken :: !(Maybe Text) , _olcvPageSize :: !(Maybe (Textual Word32)) , _olcvCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'OnboardingListCategoryVolumes' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'olcvXgafv' -- -- * 'olcvUploadProtocol' -- -- * 'olcvLocale' -- -- * 'olcvAccessToken' -- -- * 'olcvMaxAllowedMaturityRating' -- -- * 'olcvUploadType' -- -- * 'olcvCategoryId' -- -- * 'olcvPageToken' -- -- * 'olcvPageSize' -- -- * 'olcvCallback' onboardingListCategoryVolumes :: OnboardingListCategoryVolumes onboardingListCategoryVolumes = OnboardingListCategoryVolumes' { _olcvXgafv = Nothing , _olcvUploadProtocol = Nothing , _olcvLocale = Nothing , _olcvAccessToken = Nothing , _olcvMaxAllowedMaturityRating = Nothing , _olcvUploadType = Nothing , _olcvCategoryId = Nothing , _olcvPageToken = Nothing , _olcvPageSize = Nothing , _olcvCallback = Nothing } -- | V1 error format. olcvXgafv :: Lens' OnboardingListCategoryVolumes (Maybe Xgafv) olcvXgafv = lens _olcvXgafv (\ s a -> s{_olcvXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). olcvUploadProtocol :: Lens' OnboardingListCategoryVolumes (Maybe Text) olcvUploadProtocol = lens _olcvUploadProtocol (\ s a -> s{_olcvUploadProtocol = a}) -- | ISO-639-1 language and ISO-3166-1 country code. Default is en-US if -- unset. olcvLocale :: Lens' OnboardingListCategoryVolumes (Maybe Text) olcvLocale = lens _olcvLocale (\ s a -> s{_olcvLocale = a}) -- | OAuth access token. olcvAccessToken :: Lens' OnboardingListCategoryVolumes (Maybe Text) olcvAccessToken = lens _olcvAccessToken (\ s a -> s{_olcvAccessToken = a}) -- | The maximum allowed maturity rating of returned volumes. Books with a -- higher maturity rating are filtered out. olcvMaxAllowedMaturityRating :: Lens' OnboardingListCategoryVolumes (Maybe OnboardingListCategoryVolumesMaxAllowedMaturityRating) olcvMaxAllowedMaturityRating = lens _olcvMaxAllowedMaturityRating (\ s a -> s{_olcvMaxAllowedMaturityRating = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). olcvUploadType :: Lens' OnboardingListCategoryVolumes (Maybe Text) olcvUploadType = lens _olcvUploadType (\ s a -> s{_olcvUploadType = a}) -- | List of category ids requested. olcvCategoryId :: Lens' OnboardingListCategoryVolumes [Text] olcvCategoryId = lens _olcvCategoryId (\ s a -> s{_olcvCategoryId = a}) . _Default . _Coerce -- | The value of the nextToken from the previous page. olcvPageToken :: Lens' OnboardingListCategoryVolumes (Maybe Text) olcvPageToken = lens _olcvPageToken (\ s a -> s{_olcvPageToken = a}) -- | Number of maximum results per page to be included in the response. olcvPageSize :: Lens' OnboardingListCategoryVolumes (Maybe Word32) olcvPageSize = lens _olcvPageSize (\ s a -> s{_olcvPageSize = a}) . mapping _Coerce -- | JSONP olcvCallback :: Lens' OnboardingListCategoryVolumes (Maybe Text) olcvCallback = lens _olcvCallback (\ s a -> s{_olcvCallback = a}) instance GoogleRequest OnboardingListCategoryVolumes where type Rs OnboardingListCategoryVolumes = Volume2 type Scopes OnboardingListCategoryVolumes = '["https://www.googleapis.com/auth/books"] requestClient OnboardingListCategoryVolumes'{..} = go _olcvXgafv _olcvUploadProtocol _olcvLocale _olcvAccessToken _olcvMaxAllowedMaturityRating _olcvUploadType (_olcvCategoryId ^. _Default) _olcvPageToken _olcvPageSize _olcvCallback (Just AltJSON) booksService where go = buildClient (Proxy :: Proxy OnboardingListCategoryVolumesResource) mempty
brendanhay/gogol
gogol-books/gen/Network/Google/Resource/Books/Onboarding/ListCategoryVolumes.hs
mpl-2.0
7,147
0
22
1,666
1,066
613
453
153
1
{- Habit of Fate, a game to incentivize habit formation. Copyright (C) 2017 Gregory Crosswhite This program is free software: you can redistribute it and/or modify it under version 3 of the terms of the GNU Affero General Public License. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <https://www.gnu.org/licenses/>. -} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE UnicodeSyntax #-} module HabitOfFate.Server.Requests.Web.GetQuestStatus (handler) where import HabitOfFate.Prelude import Network.HTTP.Types.Status (ok200) import Web.Scotty (ScottyM) import qualified Web.Scotty as Scotty import HabitOfFate.Data.Markdown import HabitOfFate.Server.Common import HabitOfFate.Server.Requests.Shared.GetQuestStatus import HabitOfFate.Server.Transaction handler ∷ Environment → ScottyM () handler environment = Scotty.get "/status" <<< webTransaction environment $ getQuestStatus <&> ( renderMarkdownToHtml >>> const >>> renderTopOnlyPageResult "Habit of Fate - Quest Status" (\_ → []) [] Nothing ok200 )
gcross/habit-of-fate
sources/library/HabitOfFate/Server/Requests/Web/GetQuestStatus.hs
agpl-3.0
1,446
0
11
273
155
93
62
23
1
-- Copyright (C) 2016-2017 Red Hat, Inc. -- -- This library is free software; you can redistribute it and/or -- modify it under the terms of the GNU Lesser General Public -- License as published by the Free Software Foundation; either -- version 2.1 of the License, or (at your option) any later version. -- -- This library is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -- Lesser General Public License for more details. -- -- You should have received a copy of the GNU Lesser General Public -- License along with this library; if not, see <http://www.gnu.org/licenses/>. module BDCS.Exceptions(DBException(..), throwIfNothing, throwIfNothingOtherwise) where import Control.Exception(Exception, throw) import Data.Data(Typeable) -- A general purpose exception type for dealing with things that go wrong when working -- with the database. This could be broken out into a lot more type constructors to -- make for an actually useful exception system. In general, I dislike Haskell exceptions -- but runSqlite will roll back the entire transaction if an exception is raised. That's -- a good reason to use them. data DBException = DBException String | MissingRPMTag String deriving(Eq, Typeable) instance Exception DBException instance Show DBException where show (DBException s) = show s show (MissingRPMTag s) = "Missing required tag in RPM: " ++ s throwIfNothing :: Exception e => Maybe a -> e -> a throwIfNothing (Just v) _ = v throwIfNothing _ exn = throw exn throwIfNothingOtherwise :: Exception e => Maybe a -> e -> (a -> b) -> b throwIfNothingOtherwise (Just v) _ fn = fn v throwIfNothingOtherwise _ exn _ = throw exn
dashea/bdcs
importer/BDCS/Exceptions.hs
lgpl-2.1
1,867
0
10
392
260
144
116
18
1
------------------------------------------------------------------------------ -- Copyright 2012 Microsoft Corporation. -- -- This is free software; you can redistribute it and/or modify it under the -- terms of the Apache License, Version 2.0. A copy of the License can be -- found in the file "license.txt" at the root of this distribution. ----------------------------------------------------------------------------- {- Map type names to type definition schemes. -} ----------------------------------------------------------------------------- module Kind.Newtypes( -- * Type newtypes Newtypes, DataInfo(..) , newtypesEmpty, newtypesExtend, newtypesLookup, newtypesFind , newtypesNew, newtypesCompose , newtypesIsEmpty , newtypesTypeDefs , extractNewtypes -- * Pretty -- , ppNewtypes ) where import qualified Common.NameMap as M import Common.Failure( failure ) import Common.Name import Common.Syntax ( Visibility(..)) import Type.Type import Type.Pretty import Lib.PPrint import qualified Data.List as L import qualified Core.Core as Core {-------------------------------------------------------------------------- Newtype map --------------------------------------------------------------------------} -- | Newtypes: a map from newtype names to newtype information newtype Newtypes = Newtypes (M.NameMap DataInfo) newtypesEmpty :: Newtypes newtypesEmpty = Newtypes M.empty newtypesIsEmpty :: Newtypes -> Bool newtypesIsEmpty (Newtypes m) = M.null m newtypesNew :: [DataInfo] -> Newtypes newtypesNew infos = Newtypes (M.fromList [(dataInfoName info, info) | info <- infos]) newtypesCompose :: Newtypes -> Newtypes -> Newtypes newtypesCompose (Newtypes m1) (Newtypes m2) = Newtypes (M.union m2 m1) -- ASSUME: left-biased union newtypesTypeDefs :: Newtypes -> M.NameMap DataInfo newtypesTypeDefs (Newtypes m) = m newtypesExtend :: Name -> DataInfo -> Newtypes -> Newtypes newtypesExtend name info (Newtypes m) = Newtypes (M.insert name info m) newtypesLookup :: Name -> Newtypes -> Maybe DataInfo newtypesLookup name (Newtypes m) = M.lookup name m newtypesFind :: Name -> Newtypes -> DataInfo newtypesFind name syn = case newtypesLookup name syn of Nothing -> failure ("Kind.Newtypes.newtypesFind: unknown newtype: " ++ show name) Just x -> x -- | Extract data infos from core extractNewtypes :: Core.Core -> Newtypes extractNewtypes core = newtypesNew (concatMap extractTypeDefGroup (Core.coreProgTypeDefs core)) extractTypeDefGroup (Core.TypeDefGroup tdefs) = concatMap extractTypeDef tdefs extractTypeDef :: Core.TypeDef -> [DataInfo] extractTypeDef tdef = case tdef of Core.Data dataInfo Public conViss -> [dataInfo] _ -> [] {-------------------------------------------------------------------------- Pretty printing TODO: redo --------------------------------------------------------------------------} instance Show Newtypes where show = show . pretty instance Pretty Newtypes where pretty syns = ppNewtypes Type.Pretty.defaultEnv syns ppNewtypes showOptions (Newtypes m) = vcat [fill 8 (pretty name) <> colon <+> -- text "rank" <+> pretty rank <> colon <+> ppDataInfo defaultEnv True dataInfo | (name,dataInfo) <- L.sortBy (\(n1,_) (n2,_) -> compare (show n1) (show n2)) $ M.toList m]
lpeterse/koka
src/Kind/Newtypes.hs
apache-2.0
3,533
0
15
724
733
398
335
63
2
module Miscellaneous.A328863Spec (main, spec) where import Test.Hspec import Miscellaneous.A328863 (a328863) main :: IO () main = hspec spec spec :: Spec spec = describe "A328863" $ it "correctly computes the first 20 elements" $ map a328863 [1..20] `shouldBe` expectedValue where expectedValue = [1,2,4,6,9,14,19,27,37,50,66,89,115,151,195,252,321,412,520,660]
peterokagey/haskellOEIS
test/Miscellaneous/A328863Spec.hs
apache-2.0
376
0
8
57
154
92
62
10
1
-- -- Copyright : (c) T.Mishima 2014 -- License : Apache-2.0 -- {-# LANGUAGE BangPatterns #-} {-# LANGUAGE CPP, ForeignFunctionInterface #-} module Main where import Bindings.OculusRift import Bindings.OculusRift.Types import Control.Exception ( bracket ) import Debug.Trace ( traceIO ) import Foreign.C.String ( peekCString ) import Foreign.Storable ( peek ) import Data.Maybe ( isJust,fromJust ) import Data.Bits import Control.Monad ( forM_, forM ) import Control.Concurrent (threadDelay) import GLFWWindow import GLView import Bindings.OculusRift.Types import Graphics.Rendering.OpenGL as GL import Graphics.GLUtil import Foreign.Ptr (nullPtr) import Linear.V4 import Control.Applicative ------------- #if defined(mingw32_HOST_OS) import Bindings.Utils.Windows #endif ------------- winSize = (1920,1080) main :: IO () main = bracket (do !b <- ovr_Initialize !ghmd <- initGLFW winSize "oculus test" False return (b,ghmd)) (\ (_,ghmd) -> do ovr_Shutdown exitGLFW ghmd traceIO "exit") (\ (b,ghmd) -> if b then do traceIO "init OK" bracket (do !maxIdx <- ovrHmd_Detect traceIO $ "detect = " ++ show maxIdx !hmd <- if maxIdx > 0 then ovrHmd_Create (maxIdx - 1) else Just <$> ovrHmd_CreateDebug ovrHmd_DK2 return hmd) (\ hmd' -> if isJust hmd' then do ovrHmd_Destroy $ fromJust hmd' traceIO "destroy hmd" else do traceIO "hmd is Null" return ()) (mainProcess ghmd) else traceIO "init NG") mainProcess _ Nothing = traceIO "create hmd NG" mainProcess ghmd hmd' = do !glhdl <- initGL let hmd = fromJust hmd' traceIO $ "create hmd OK : " ++ show hmd !msg <- ovrHmd_GetLastError hmd traceIO $ "GetLastError = " ++ msg ++ " Msg End" traceIO " == Print HmdDesc ==" hmdDesc <- castToOvrHmdDesc hmd printHmdDesc hmdDesc traceIO " ===================" !r <- ovrHmd_ConfigureTracking hmd ( ovrTrackingCap_Orientation .|. ovrTrackingCap_MagYawCorrection .|. ovrTrackingCap_Position) ovrTrackingCap_None traceIO $ "ConfigureTracking : " ++ show r -- #if defined(mingw32_HOST_OS) !hwnd <- getWindowHandle "oculus test" traceIO $ "windowHandle : " ++ show hwnd -- !hdc <- getWinDC hwnd !ba <- ovrHmd_AttachToWindow hmd hwnd Nothing Nothing let nativeWindow = Just hwnd traceIO $ "AttachToWindow : " ++ show (ba,hwnd) #else let nativeWindow = Nothing #endif recommenedTex0Size <- ovrHmd_GetDefaultFovTextureSize hmd ovrEye_Left 1.0 recommenedTex1Size <- ovrHmd_GetDefaultFovTextureSize hmd ovrEye_Right 1.0 traceIO $ "recommentedTexSize L : " ++ show recommenedTex0Size ++ " R : " ++ show recommenedTex1Size let !renderTargetSizeW = (si_w recommenedTex0Size) + (si_w recommenedTex1Size) !renderTargetSizeH = max (si_h recommenedTex0Size) (si_h recommenedTex1Size) twidth = fromIntegral renderTargetSizeW theight = fromIntegral renderTargetSizeH !tex <- genColorTexture 0 twidth theight !fbo <- genColorFrameBuffer tex twidth theight -- let !eyeTexture = genEyeTextureData tex renderTargetSizeW renderTargetSizeH !hd = OvrRenderAPIConfigHeader ovrRenderAPI_OpenGL (resolution hmdDesc) 0 -- 1 !apiconf = OvrRenderAPIConfig hd nativeWindow Nothing -- (Just hdc) !caps = ovrDistortionCap_Vignette -- .|. ovrDistortionCap_SRGB .|. ovrDistortionCap_Overdrive .|. ovrDistortionCap_TimeWarp -- .|. ovrDistortionCap_ProfileNoSpinWaits .|. ovrDistortionCap_HqDistortion -- .|. ovrDistortionCap_ComputeShader -- -- .|. ovrDistortionCap_NoRestore -- .|. ovrDistortionCap_FlipInput traceIO $ "OvrEyeTexture : " ++ show eyeTexture traceIO $ "OvrRenderAPIConfigHeader : " ++ show hd traceIO $ "render caps : " ++ show caps !lfv <- ovrHmd_GetDefaultFov hmd ovrEye_Left !rfv <- ovrHmd_GetDefaultFov hmd ovrEye_Right !(bret, eyeRD) <- ovrHmd_ConfigureRendering hmd (Just apiconf) caps [lfv,rfv] traceIO $ "ConfigureRendering : " ++ show (bret,eyeRD) -- ovrHmd_SetEnabledCaps hmd ( -- ovrHmdCap_Present -- .|. ovrHmdCap_Available -- .|. ovrHmdCap_Captured -- ovrHmdCap_ExtendDesktop -- .|. ovrHmdCap_DisplayOff ovrHmdCap_LowPersistence .|. ovrHmdCap_DynamicPrediction -- .|. ovrHmdCap_NoMirrorToWindow -- .|. ovrHmdCap_NoVSync ) -- !tis <- ovr_GetTimeInSeconds -- traceIO $ "GetTimeInSeconds : " ++ (show tis) msg2 <- ovrHmd_GetLastError hmd traceIO $ "GetLastError 2 = " ++ msg2 ++ " Msg End" printError ovrHmd_RecenterPose hmd tex <- loadTextureObj "test/sample_tex.png" mainLoop hmd ghmd glhdl (eyeTexture,tex,fbo) eyeRD tex 0 -- ovrHmd_ConfigureRendering hmd Nothing caps [lfv,rfv] return () where genColorTexture textureUnitNo width height = do tex <- genObjectName withTexturesAt Texture2D [(tex,textureUnitNo)] $ do texImage2D Texture2D NoProxy 0 RGBA' (TextureSize2D width height) 0 (PixelData RGBA UnsignedByte nullPtr) textureFilter Texture2D $= ((Nearest, Nothing), Nearest) texture2DWrap $= (Repeated, ClampToEdge) --textureBorderColor Texture2D $= Color4 1.0 0.0 0.0 (0.0::GLfloat) --textureMaxAnisotropy Texture2D $= 1.0 return tex genColorFrameBuffer tex width height = do traceIO $ "tex size = " ++ (show (width,height)) !fbo <- genObjectName :: IO FramebufferObject bindFramebuffer Framebuffer $= fbo !rbo <- genObjectName :: IO RenderbufferObject bindRenderbuffer Renderbuffer $= rbo renderbufferStorage Renderbuffer DepthComponent' (RenderbufferSize width height) framebufferRenderbuffer Framebuffer DepthAttachment Renderbuffer rbo framebufferTexture2D Framebuffer (ColorAttachment 0) Texture2D tex 0 drawBuffers $= [FBOColorAttachment 0] -- unbind bindRenderbuffer Renderbuffer $= noRenderbufferObject bindFramebuffer Framebuffer $= defaultFramebufferObject return fbo genEyeTextureData tex width height = [ OvrTexture hd0 texID , OvrTexture hd1 texID ] where texID = (\ (TextureObject t') -> t' ) tex vpSize = OvrSizei (div width 2) height hd0 = OvrTextureHeader { apiT = ovrRenderAPI_OpenGL , textureSize = OvrSizei width height , renderViewport = OvrRecti (OvrVector2i 0 0) vpSize } hd1 = OvrTextureHeader { apiT = ovrRenderAPI_OpenGL , textureSize = OvrSizei width height , renderViewport = OvrRecti (OvrVector2i (div width 2) 0) vpSize } mainLoop hmd glfwHdl glhdl (eyeTexture,texobj,fbo) eyeRD tex frameNo = do pollGLFW --threadDelay 10000 --threadDelay 1000 --threadDelay 1000000 dt <- getDeltTime glfwHdl exitflg' <- getExitReqGLFW glfwHdl --ts <- ovrHmd_GetTrackingState hmd =<< ovr_GetTimeInSeconds --traceIO $ show ts ovrHmd_BeginFrame hmd frameNo bindFramebuffer Framebuffer $= fbo let (winW,winH) = winSize withViewport (Position 0 0) (Size (fromIntegral winW) (fromIntegral winH)) $ clear [GL.ColorBuffer, GL.DepthBuffer] (poseL:poseR:_) <- ovrHmd_GetEyePoses hmd frameNo $ map hmdToEyeViewOffset eyeRD renderPose <- forM [(ovrEye_Left,0,poseL),(ovrEye_Right,1,poseR)] $ \ (eyeType,i,pose) -> do --pose <- ovrHmd_GetHmdPosePerEye hmd eyeType (OvrMatrix4f m) <- ovrMatrix4f_Projection (fov (eyeRD !! i)) 0.1 20 True let pm = (\ [v1,v2,v3,v4] -> V4 v1 v2 v3 v4) $ map (\ l -> (\ [a,b,c,d] -> V4 a b c d) $ map realToFrac l ) m (OvrQuatf qx qy qz qw) = orientation pose --traceIO $ "pose : " ++ (show eyeType) ++ " : " ++ (show pose) --textureBinding Texture2D $= Just texobj let fov' = fov $ head eyeRD vPos = if eyeType == ovrEye_Left then Position 0 0 else Position 1182 0 withViewport vPos (Size 1182 1461) $ render glhdl tex pm (qx,qy,qz,qw) flush return pose bindFramebuffer Framebuffer $= defaultFramebufferObject --traceIO $ "renderPose = " ++ (show renderPose) --traceIO $ "eyeTexture = " ++ (show eyeTexture) ovrHmd_EndFrame hmd renderPose eyeTexture --swapBuff glfwHdl --msg <- ovrHmd_GetLastError hmd --traceIO $ "GetLastError 3 = " ++ msg ++ " Msg End" --printError if exitflg' then return () else mainLoop hmd glfwHdl glhdl (eyeTexture,texobj,fbo) eyeRD tex (frameNo + 1)
lukexi/bindings-Oculus
test/case2/Main.hs
apache-2.0
8,940
0
22
2,367
2,131
1,062
1,069
180
4
{-# LANGUAGE GeneralizedNewtypeDeriving, FlexibleInstances, TypeSynonymInstances #-} module Type where import qualified Data.Map as M import Data.Monoid (Monoid(..)) import Prelude import Error import Pretty newtype TVar = TV String deriving (Show, Eq, Ord) instance Pretty TVar where ppr _ (TV x) = text x data Type = TVar TVar -- ^ Variable | TCon String -- ^ Constant | TCns String Type -- ^ Type constructor | Type :-> Type -- ^ Arrow deriving (Eq, Ord, Show) infixr 0 :-> instance Pretty Type where ppr p (a :-> b) = parensIf (isArrow a) (ppr p a) <+> text "->" <+> ppr p b where isArrow (:->){} = True isArrow _ = False ppr p (TVar a) = ppcolor Vivid Magenta $ ppr p a ppr _ (TCon a) = ppcolor Vivid Blue $ text a ppr p (TCns x a) = ppcolor Vivid Blue $ text x <+> ppr p a pptype :: Type -> String pptype = render . ppr 0 ppsignature :: (String, Scheme) -> String ppsignature (a, b) = a ++ " : " ++ ppscheme b data Scheme = Forall [TVar] Type deriving (Show, Eq, Ord) instance Pretty Scheme where ppr p (Forall [] t) = ppr p t ppr p (Forall ts t) = text "forall" <+> hcat (punctuate space exvars) <> text "." <+> ppr p t where exvars = fmap (ppcolor Vivid Magenta . ppr p) ts ppscheme :: Scheme -> String ppscheme = render . ppr 0 newtype Subst = Subst (M.Map TVar Type) deriving (Eq, Ord, Show, Monoid) instance Pretty Subst where ppr _ (Subst s) = vcat (punctuate space (map pprSub $ M.toList s)) where pprSub (a, b) = ppr 0 a <+> text "~" <+> ppr 0 b ppsubst :: Subst -> String ppsubst = render . ppr 0 typeNum :: Type typeNum = TCon "Num" typeDate :: Type typeDate = TCon "Date" typeText :: Type typeText = TCon "Text" typeBool :: Type typeBool = TCon "Bool" typeTimeUnit :: Type typeTimeUnit = TCon "TimeUnit" typeWeekStart :: Type typeWeekStart = TCon "WeekStart" typeA :: Type typeA = TVar (TV "a") typeB :: Type typeB = TVar (TV "a") typeList :: Type -> Type typeList = TCns "List" -- | Number of argument needed to fully apply a function typeArgCnt :: Type -> Int typeArgCnt x = cnt x - 1 where cnt (_ :-> b) = 1 + cnt b cnt (TVar _) = 1 cnt (TCon _) = 1 cnt (TCns {}) = 1 -- | (Expected type, Actual type) type Constraint = (Type, Type) instance Pretty Constraint where ppr p (a, b) = ppr p a <+> text " ~ " <+> ppr p b instance Pretty [Constraint] where ppr p cs = vcat (punctuate space (map (ppr p) cs)) ppconstraint :: Constraint -> String ppconstraint = render . ppr 0 ppconstraints :: [Constraint] -> String ppconstraints = render . ppr 0 -- | Inference errors data TypeError = UnificationFail Pos Type Type | InfiniteType TVar Type | UnboundVariable Pos String | Ambigious [Constraint] | UnificationMismatch [Type] [Type] | EmptyList instance Show TypeError where show (UnificationFail p a b) = "Type error at " ++ show p ++ ":\n" ++ "Cannot unify types. Expected\n\t" ++ pptype a ++ "\nbut got\n\t" ++ pptype b show (InfiniteType (TV a) b) = "Cannot construct the infinite type: " ++ a ++ " = " ++ pptype b show (Ambigious cs) = concat ["Cannot not match expected type: '" ++ pptype a ++ "' with actual type: '" ++ pptype b ++ "'\n" | (a,b) <- cs] show (UnboundVariable p a) = "Error at " ++ show p ++ ":\n" ++ "Not in scope: \"" ++ a ++ "\"" show EmptyList = "Empty list" show (UnificationMismatch _ _) = "Unification Mismatch" instance Error TypeError where showError = show
ahodgen/archer-calc
src/Type.hs
bsd-2-clause
3,833
0
12
1,168
1,340
694
646
100
4
{-# LANGUAGE DataKinds, RecordWildCards, TypeOperators #-} module Sprockell where import CLaSH.Prelude hiding (Word) {------------------------------------------------------------- | SPROCKELL: Simple PROCessor in hasKELL :-) | | [email protected] | October 28, 2012 -------------------------------------------------------------} -- Types type Word = Signed 16 type RegBankSize = 8 type ProgMemSize = 128 type DataMemSize = 128 type RegBank = Vec RegBankSize Word type ProgMem = Vec ProgMemSize Assembly type DataMem = Vec DataMemSize Word type RegBankAddr = Unsigned 3 type ProgMemAddr = Unsigned 7 type DataMemAddr = Unsigned 7 -- value to be put in Register Bank data RegValue = RAddr DataMemAddr | RImm Word deriving (Eq,Show) -- value to be put in data memory data MemValue = MAddr RegBankAddr | MImm Word deriving (Eq,Show) data LdCode = NoLoad | LdImm | LdAddr | LdAlu deriving (Eq,Show) data StCode = NoStore | StImm | StReg deriving (Eq,Show) data SPCode = None | Up | Down deriving (Eq,Show) data JmpCode = NoJump -- No jump | UA -- UnConditional - Absolute | UR -- UnConditional - Relative | CA -- Conditional - Absolute | CR -- Conditional - Relative | Back -- Back from subroutine deriving (Eq,Show) data MachCode = MachCode { ldCode :: LdCode -- 0/1: load from dmem to rbank? , stCode :: StCode -- storeCode , spCode :: SPCode , opCode :: OpCode -- opCode , immvalueR :: Word -- value from Immediate - to regbank , immvalueS :: Word -- value from Immediate - to store , fromreg0 :: RegBankAddr -- ibid, first parameter of Compute , fromreg1 :: RegBankAddr -- ibid, second parameter of Compute , fromaddr :: DataMemAddr -- address in dmem , toreg :: RegBankAddr -- ibid, third parameter of Compute , toaddr :: DataMemAddr -- address in dmem , wen :: Bool -- enable signal for store , jmpCode :: JmpCode -- 0/1: indicates a jump , jumpN :: ProgMemAddr -- which instruction to jump to } deriving (Eq,Show) data OpCode = NoOp | Id | Incr | Decr -- no corresponding functions in prog.language | Neg | Not -- unary operations | Add | Sub | Mul | Equal | NEq | Gt | Lt | And | Or -- binary operations deriving (Eq,Show) data Assembly = Compute OpCode RegBankAddr RegBankAddr RegBankAddr -- Compute opCode r0 r1 r2: go to "alu", -- do "opCode" on regs r0, r1, and put result in reg r2 | Jump JmpCode ProgMemAddr -- JumpAbs n: set program counter to n | Load RegValue RegBankAddr -- Load (Addr a) r : from "memory a" to "regbank r" -- Load (Imm v) r : put "Int v" in "regbank r" | Store MemValue DataMemAddr -- Store (Addr r) a: from "regbank r" to "memory a" -- Store (Imm v) r: put "Int v" in "memory r" | Push RegBankAddr -- push a value on the stack | Pop RegBankAddr -- pop a value from the stack | EndProg -- end of program, handled bij exec function | Debug Word deriving (Eq,Show) --record type for internal state of processor data PState = PState { regbank :: RegBank -- register bank , dmem :: DataMem -- main memory, data memory , cnd :: Bool -- condition register (whether condition was true) , pc :: ProgMemAddr , sp :: DataMemAddr } deriving (Eq, Show) -- move reg0 reg1 = Compute Id reg0 zeroreg reg1 -- wait = Jump UR 0 nullcode = MachCode { ldCode = NoLoad , stCode = NoStore , spCode = None , opCode = NoOp , immvalueR = 0 , immvalueS = 0 , fromreg0 = 0 , fromreg1 = 0 , fromaddr = 0 , toreg = 0 , toaddr = 0 , wen = False , jmpCode = NoJump , jumpN = 0 } -- {------------------------------------------------------------- -- | some constants -- -------------------------------------------------------------} -- zeroreg = 0 :: RegBankAddr -- regA = 1 :: RegBankAddr -- regB = 2 :: RegBankAddr -- endreg = 3 :: RegBankAddr -- for FOR-loop -- stepreg = 4 :: RegBankAddr -- ibid jmpreg = 5 :: RegBankAddr -- for jump instructions -- pcreg = 7 :: RegBankAddr -- pc is added at the end of the regbank => regbank0 -- sp0 = 20 :: DataMemAddr -- TODO: get sp0 from compiler, add OS tobit True = 1 tobit False = 0 oddB = (== 1) . lsb -- wmax :: Word -> Word -> Word -- wmax w1 w2 = if w1 > w2 then w1 else w2 -- (<~) :: RegBank -> (RegBankAddr, Word) -> RegBank -- xs <~ (0, x) = xs -- xs <~ (7, x) = xs -- xs <~ (i, x) = xs' -- where -- addr = i -- xs' = vreplace xs (fromUnsigned addr) x -- (<~~) :: DataMem -> (Bool, DataMemAddr, Word) -> DataMem -- xs <~~ (False, i, x) = xs -- xs <~~ (True, i , x) = vreplace xs i x {------------------------------------------------------------- | The actual Sprockell -------------------------------------------------------------} decode :: (ProgMemAddr, DataMemAddr) -> Assembly -> MachCode decode (pc, sp) instr = case instr of Compute c i0 i1 i2 -> nullcode {ldCode = LdAlu, opCode = c, fromreg0 = i0, fromreg1=i1, toreg=i2} Jump jc n -> nullcode {jmpCode = jc, fromreg0 = jmpreg, jumpN = n} Load (RImm n) j -> nullcode {ldCode = LdImm, immvalueR = n, toreg = j} Load (RAddr i) j -> nullcode {ldCode = LdAddr, fromaddr = i, toreg = j} Store (MAddr i) j -> nullcode {stCode = StReg, fromreg0 = i, toaddr = j, wen = True} Store (MImm n) j -> nullcode {stCode = StImm, immvalueS = n, toaddr = j, wen = True} Push r -> nullcode {stCode = StReg, fromreg0 = r, toaddr = sp + 1, spCode = Up, wen = True} Pop r -> nullcode {ldCode = LdAddr, fromaddr = sp, toreg = r, spCode = Down} EndProg -> nullcode Debug _ -> nullcode alu :: OpCode -> (Word, Word) -> (Word, Bool) alu opCode (x, y) = (z, cnd) where (z, cnd) = (app opCode x y, oddB z) app opCode = case opCode of Id -> \x y -> x -- identity function on first argument Incr -> \x y -> x + 1 -- increment first argument with 1 Decr -> \x y -> x - 1 -- decrement first argument with 1 Neg -> \x y -> -x Add -> (+) -- goes without saying Sub -> (-) Mul -> (*) Equal -> (tobit.).(==) -- test for equality; result 0 or 1 NEq -> (tobit.).(/=) -- test for inequality Gt -> (tobit.).(>) Lt -> (tobit.).(<) And -> (*) Or -> \x y -> 0 Not -> \x y -> 1-x NoOp -> \x y -> 0 -- result will always be 0 -- load :: RegBank -> LdCode -> RegBankAddr -> (Word, Word, Word) -> RegBank -- load regbank ldCode toreg (immvalueR, mval, z) = regbank' -- where -- v = case ldCode of -- NoLoad -> 0 -- LdImm -> immvalueR -- LdAddr -> mval -- LdAlu -> z -- regbank' = regbank <~ (toreg, v) -- store :: DataMem -> StCode -> (Bool, DataMemAddr) -> (Word, Word) -> DataMem -- store dmem stCode (wen, toaddr) (immvalueS, x) = dmem' -- where -- v = case stCode of -- NoStore -> 0 -- StImm -> immvalueS -- StReg -> x -- dmem' = dmem <~~ (wen, toaddr, v) -- pcUpd :: (JmpCode, Bool) -> (ProgMemAddr, ProgMemAddr, Word) -> ProgMemAddr -- pcUpd (jmpCode, cnd) (pc, jumpN, x) = pc' -- where -- pc' = case jmpCode of -- NoJump -> inc pc -- UA -> jumpN -- UR -> pc + jumpN -- CA -> if cnd then jumpN else inc pc -- CR -> if cnd then pc + jumpN else inc pc -- Back -> bv2u (vdrop d9 (s2bv x)) -- inc i = i + 1 -- spUpd :: SPCode -> DataMemAddr -> DataMemAddr -- spUpd spCode sp = case spCode of -- Up -> sp + 1 -- Down -> sp - 1 -- None -> sp -- -- ====================================================================================== -- -- Putting it all together -- sprockell :: ProgMem -> (State PState) -> Bit -> (State PState, Bit) -- sprockell prog (State state) inp = (State (PState {dmem = dmem',regbank = regbank',cnd = cnd',pc = pc',sp = sp'}), outp) -- where -- PState{..} = state -- MachCode{..} = decode (pc,sp) (prog ! (fromUnsigned pc)) -- regbank0 = vreplace regbank (fromUnsigned pcreg) (pc2wrd pc) -- (x,y) = (regbank0 ! (fromUnsigned fromreg0) , regbank0 ! (fromUnsigned fromreg1)) -- mval = dmem ! fromaddr -- (z,cnd') = alu opCode (x,y) -- regbank' = load regbank ldCode toreg (immvalueR,mval,z) -- dmem' = store dmem stCode (wen,toaddr) (immvalueS,x) -- pc' = pcUpd (jmpCode,cnd) (pc,jumpN,x) -- sp' = spUpd spCode sp -- outp = inp -- pc2wrd pca = bv2s (u2bv (resizeUnsigned pca :: Unsigned 16)) -- prog1 = vcopy EndProg -- initstate = PState { -- regbank = vcopy 0, -- dmem = vcopy 0, -- cnd = False, -- pc = 0, -- sp = sp0 -- } -- sprockellL = sprockell prog1 ^^^ initstate topEntity = alu
ggreif/clash-compiler
examples/Sprockell.hs
bsd-2-clause
11,235
4
11
4,777
1,467
923
544
123
15
-- | Project Euler No. 2 -- -- The prime factors of 13195 are 5, 7, 13 and 29. -- -- What is the largest prime factor of the number 600851475143 ? primes = 2 : filter ((==1) . length . primeFactors) [3,5..] primeFactors n = factor n primes where factor n (p:ps) | p*p > n = [n] | n `mod` p == 0 = p : factor (n `div` p) (p:ps) | otherwise = factor n ps main :: IO () main = do let result = last (primeFactors 600851475143) putStrLn (show result)
mazelife/project_euler
three.hs
bsd-3-clause
502
0
12
151
193
100
93
10
1
{-# LANGUAGE TemplateHaskell, RecordWildCards,ScopedTypeVariables,FlexibleContexts #-} module Network.AuthorizeNet.Response where import qualified Data.ByteString.Lazy as BSL import qualified Data.Text as T import Network.AuthorizeNet.Request import Network.AuthorizeNet.TH import Network.AuthorizeNet.Types import Network.AuthorizeNet.Util -- | The API responses are documented at http://developer.authorize.net/api/reference/index.html data AuthenticateTestResponse = AuthenticateTestResponse { authenticateTestResponse_refId :: Maybe T.Text, authenticateTestResponse_messages :: Messages, authenticateTestResponse_sessionToken :: Maybe T.Text } deriving (Eq, Show) data CreateCustomerProfileResponse = CreateCustomerProfileResponse { createCustomerProfileResponse_refId :: Maybe T.Text, createCustomerProfileResponse_messages :: Messages, createCustomerProfileResponse_sessionToken :: Maybe T.Text, -- | The CustomerProfileId should be present on success. Save this for later. createCustomerProfileResponse_customerProfileId :: Maybe CustomerProfileId, createCustomerProfileResponse_customerPaymentProfileIdList :: ArrayOfNumericString, createCustomerProfileResponse_customerShippingAddressIdList :: ArrayOfNumericString, -- | I believe these are returned by the bank when Authorize.NET attempts to validate the information createCustomerProfileResponse_validationDirectResponseList :: ArrayOfString } deriving (Eq, Show) mkAuthenticateTestResponse :: Messages -> AuthenticateTestResponse mkAuthenticateTestResponse messages = AuthenticateTestResponse Nothing messages Nothing data GetCustomerProfileResponse = GetCustomerProfileResponse { getCustomerProfileResponse_refId :: Maybe T.Text, getCustomerProfileResponse_messages :: Messages, getCustomerProfileResponse_sessionToken :: Maybe T.Text, getCustomerProfileResponse_profile :: CustomerProfileMasked, getCustomerProfileResponse_subscriptionIds :: Maybe SubscriptionIdList } deriving (Eq, Show) data GetCustomerProfileIdsResponse = GetCustomerProfileIdsResponse { getCustomerProfileIdsResponse_refId :: Maybe T.Text, getCustomerProfileIdsResponse_messages :: Messages, getCustomerProfileIdsResponse_sessionToken :: Maybe T.Text, getCustomerProfileIdsResponse_ids :: ArrayOfNumericString } deriving (Eq, Show) data UpdateCustomerProfileResponse = UpdateCustomerProfileResponse { updateCustomerProfileResponse_refId :: Maybe T.Text, updateCustomerProfileResponse_messages :: Messages, updateCustomerProfileResponse_sessionToken :: Maybe T.Text } deriving (Eq, Show) data DeleteCustomerProfileResponse = DeleteCustomerProfileResponse { deleteCustomerProfileResponse_refId :: Maybe T.Text, deleteCustomerProfileResponse_messages :: Messages, deleteCustomerProfileResponse_sessionToken :: Maybe T.Text } deriving (Eq, Show) data CreateCustomerPaymentProfileResponse = CreateCustomerPaymentProfileResponse { createCustomerPaymentProfileResponse_refId :: Maybe T.Text, createCustomerPaymentProfileResponse_messages :: Messages, createCustomerPaymentProfileResponse_sessionToken :: Maybe T.Text, createCustomerPaymentProfileResponse_customerPaymentProfileId :: Maybe CustomerPaymentProfileId, createCustomerPaymentProfileResponse_validationDirectResponse :: Maybe T.Text } deriving (Eq, Show) data GetCustomerPaymentProfileResponse = GetCustomerPaymentProfileResponse { getCustomerPaymentProfileResponse_refId :: Maybe T.Text, getCustomerPaymentProfileResponse_messages :: Messages, getCustomerPaymentProfileResponse_sessionToken :: Maybe T.Text, getCustomerPaymentProfileResponse_paymentProfile :: Maybe CustomerPaymentProfileMasked } deriving (Eq, Show) data GetCustomerPaymentProfileListResponse = GetCustomerPaymentProfileListResponse { getCustomerPaymentProfileListResponse_refId :: Maybe T.Text, getCustomerPaymentProfileListResponse_messages :: Messages, getCustomerPaymentProfileListResponse_sessionToken :: Maybe T.Text, getCustomerPaymentProfileListResponse_totalNumInResultSet :: NumericString, getCustomerPaymentProfileListResponse_paymentProfiles :: Maybe ArrayOfCustomerPaymentProfileListItem } deriving (Eq, Show) data ValidateCustomerPaymentProfileResponse = ValidateCustomerPaymentProfileResponse { validateCustomerPaymentProfileResponse_refId :: Maybe T.Text, validateCustomerPaymentProfileResponse_messages :: Messages, validateCustomerPaymentProfileResponse_sessionToken :: Maybe T.Text, validateCustomerPaymentProfileResponse_directResponse :: Maybe T.Text } deriving (Eq, Show) data UpdateCustomerPaymentProfileResponse = UpdateCustomerPaymentProfileResponse { updateCustomerPaymentProfileResponse_refId :: Maybe T.Text, updateCustomerPaymentProfileResponse_messages :: Messages, updateCustomerPaymentProfileResponse_sessionToken :: Maybe T.Text, updateCustomerPaymentProfileResponse_validationDirectResponse :: Maybe T.Text } deriving (Eq, Show) data DeleteCustomerPaymentProfileResponse = DeleteCustomerPaymentProfileResponse { deleteCustomerPaymentProfileResponse_refId :: Maybe T.Text, deleteCustomerPaymentProfileResponse_messages :: Messages, deleteCustomerPaymentProfileResponse_sessionToken :: Maybe T.Text } deriving (Eq, Show) data GetHostedProfilePageResponse = GetHostedProfilePageResponse { getHostedProfilePageResponse_refId :: Maybe T.Text, getHostedProfilePageResponse_messages :: Messages, getHostedProfilePageResponse_sessionToken :: Maybe T.Text, getHostedProfilePageResponse_token :: Maybe T.Text } deriving (Eq, Show) data CreateProfileResponse = CreateProfileResponse { createProfileResponse_refId :: Maybe T.Text, createProfileResponse_messages :: Messages, createProfileResponse_sessionToken :: Maybe T.Text, createProfileResponse_customerProfileId :: Maybe CustomerProfileId, createProfileResponse_customerPaymentProfileIdList :: Maybe (ArrayOfNumericString), createProfileResponse_customerShippingAddressIdList :: Maybe (ArrayOfNumericString) } deriving (Eq, Show) data TransactionResponse = TransactionResponse { transactionResponse_responseCode :: Maybe T.Text, transactionResponse_rawResponseCode :: Maybe T.Text, transactionResponse_authCode :: Maybe T.Text, transactionResponse_avsResultCode :: Maybe T.Text, transactionResponse_cvvResultCode :: Maybe T.Text, transactionResponse_cavvResultCode :: Maybe T.Text, transactionResponse_transId :: Maybe T.Text, transactionResponse_refTransID :: Maybe T.Text, transactionResponse_transHash :: Maybe T.Text, transactionResponse_testRequest :: Maybe T.Text, transactionResponse_accountNumber :: Maybe T.Text, transactionResponse_entryMode :: Maybe T.Text, transactionResponse_accountType :: Maybe T.Text, transactionResponse_splitTenderId :: Maybe T.Text, transactionResponse_prePaidCard :: Maybe PrePaidCard, -- | I've observed both a <messages>(in the actual output) and a single <message> tag(in the example output). transactionResponse_message :: Maybe TransactionResponse_message, transactionResponse_messages :: Maybe ArrayOfTransactionResponseMessage, transactionResponse_errors :: Maybe ArrayOfTransactionResponseError, transactionResponse_splitTenderPayments :: Maybe ArrayOfTransactionResponseSplitTenderPayment, transactionResponse_userFields :: Maybe ArrayOfUserField, transactionResponse_shipTo :: Maybe NameAndAddress, transactionResponse_secureAcceptance :: Maybe SecureAcceptance, transactionResponse_emvResponse :: Maybe EmvResponse } deriving (Eq, Show) mkTransactionResponse :: TransactionResponse mkTransactionResponse = TransactionResponse Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing data CreateTransactionResponse = CreateTransactionResponse { createTransactionResponse_refId :: Maybe T.Text, createTransactionResponse_messages :: Messages, createTransactionResponse_sessionToken :: Maybe T.Text, createTransactionResponse_transactionResponse :: TransactionResponse, createTransactionResponse_profileResponse :: Maybe CreateProfileResponse } deriving (Eq, Show)
MichaelBurge/haskell-authorize-net
src/Network/AuthorizeNet/Response.hs
bsd-3-clause
8,742
0
10
1,370
1,282
726
556
132
1
module HEP.Data.LHCO.PipesUtil ( getLHCOEvent , eventFromHandle , eventFromBS ) where import Control.Monad.Trans.State.Strict import Data.ByteString.Char8 (ByteString) import Pipes import Pipes.Attoparsec (parse) import Pipes.ByteString (fromHandle) import System.IO (Handle) import HEP.Data.LHCO.Parser (lhcoEvent) import HEP.Data.LHCO.Type (Event) getLHCOEvent :: Monad m => Producer ByteString m () -> Producer Event m () getLHCOEvent s = do (r, s') <- lift $ runStateT (parse lhcoEvent) s case r of Just (Right ev) -> yield ev >> getLHCOEvent s' _ -> return () eventFromBS :: Monad m => ByteString -> Producer Event m () eventFromBS = getLHCOEvent . yield eventFromHandle :: MonadIO m => Handle -> Producer Event m () eventFromHandle = getLHCOEvent . fromHandle
cbpark/lhco-tools
src/HEP/Data/LHCO/PipesUtil.hs
bsd-3-clause
1,044
0
12
385
276
149
127
21
2
{-# LANGUAGE ViewPatterns #-} -- -- This module gives certain backend objects types on the front so that they -- are easier to work with, eg Files, Folders etc -- module ThirdLight.Assets.Types ( ID(..), -- unique ID for each unique "asset" AssetUID(..), -- asset type to distinguish overlapping ids: AssetType(..), -- regular files File(..), -- regular folders Folder(..), FolderType(..), -- file links Link(..), -- folder links FolderLink(..), SharerType(..), -- context details (shares UID with backing folder) Context(..), ContextType(..), -- fake folders (Users, Group..) PseudoFolder(..), -- sum type to hold anything "assetish" Asset(..), -- class w/props available on everything "assetish" AssetLike(..) ) where import qualified Data.Aeson as Json import Data.Aeson ((.:),(.=),parseJSON) import qualified Data.Text as Text import Data.Text (Text) import Control.Applicative (empty,(<|>)) -- -- ID's -- newtype ID = ID { unwrapId :: Text } deriving (Show, Eq, Ord) instance Json.FromJSON ID where parseJSON (Json.String s) = return (ID s) parseJSON (Json.Number n) = return $ ID $ Text.pack $ show $ floor n parseJSON _ = empty instance Json.ToJSON ID where toJSON (ID txt) = Json.String txt -- -- Unique Ids for Assets -- data AssetUID = AssetID AssetType ID | UsersID | GroupsID | TopID deriving (Show, Eq, Ord) instance Json.FromJSON AssetUID where parseJSON obj@(Json.Object o) = fromTypeIdHash <|> fromSomeAsset where fromSomeAsset = parseJSON obj >>= \(a :: Asset) -> return (assetUid a) fromTypeIdHash = AssetID <$> o .: "type" <*> o .: "id" parseJSON _ = empty instance Json.ToJSON AssetUID where toJSON (AssetID ty i) = Json.object ["type" .= ty, "id" .= i] toJSON UsersID = Json.object ["type" .= ("users" :: Text)] toJSON GroupsID = Json.object ["type" .= ("groups" :: Text)] toJSON TopID = Json.object ["type" .= ("top" :: Text)] data AssetType = IsFile | IsFolder | IsLink | IsFolderLink deriving (Eq, Show, Ord) instance Json.FromJSON AssetType where parseJSON (Json.String s) = case s of "folder" -> return IsFolder "container" -> return IsFolder "file" -> return IsFile "link" -> return IsLink "folderlink" -> return IsFolderLink _ -> empty parseJSON _ = empty instance Json.ToJSON AssetType where toJSON IsFile = Json.String "file" toJSON IsFolder = Json.String "folder" toJSON IsLink = Json.String "link" toJSON IsFolderLink = Json.String "folderlink" -- -- Regular files -- data File = File { fileName :: Text , fileParentId :: ID , fileId :: ID , fileParentType :: FolderType , filePreviewUrl :: Text } deriving (Eq, Show) instance Json.FromJSON File where parseJSON (Json.Object o) = File <$> o .: "filename" <*> o .: "parentId" <*> o .: "id" <*> o .: "parentType" <*> o .: "previewUrl" parseJSON _ = empty instance AssetLike File where assetName = fileName assetUid = AssetID IsFile . fileId assetParentUid = Just . AssetID IsFolder . fileParentId -- -- Folders/smartfolders/collections -- data Folder = Folder { folderName :: Text , folderDescription :: Text , folderParentId :: Maybe ID , folderParentType :: Maybe FolderType , folderId :: ID , folderType :: FolderType , folderContext :: Text } deriving (Eq, Show) instance Json.FromJSON Folder where parseJSON (Json.Object o) = Folder <$> o .: "name" <*> o .: "description" <*> o .: "parentId" <*> o .: "parentType" <*> o .: "id" <*> o .: "folderType" <*> o .: "context" parseJSON _ = empty instance AssetLike Folder where assetName = folderName assetUid f = AssetID IsFolder (folderId f) assetParentUid f = case folderParentId f of Just id' -> Just (AssetID IsFolder id') Nothing -> Just (ctxParent $ folderContext f) data FolderType = ContextFolder | SmartFolder | Collection | NormalFolder | PublicFolder deriving (Eq, Show, Ord) instance Json.FromJSON FolderType where parseJSON (Json.String s) = case s of "contextfolder" -> return ContextFolder "smartfolder" -> return SmartFolder "folder" -> return NormalFolder "collection" -> return Collection "publicfolder" -> return PublicFolder _ -> empty parseJSON _ = empty -- -- File Links -- data Link = Link { linkedFileId :: ID , linkParentType :: FolderType , linkParentId :: ID , linkId :: ID } deriving (Eq, Show) instance Json.FromJSON Link where parseJSON (Json.Object o) = Link <$> o .: "linkedFileId" <*> o .: "parentType" <*> o .: "parentId" <*> o .: "id" parseJSON _ = empty instance AssetLike Link where assetName = const "" assetUid = AssetID IsLink . linkId assetParentUid = Just . AssetID IsFolder . linkParentId -- -- Folder links -- data FolderLink = FolderLink { flinkName :: Text , flinkParentId :: ID , flinkId :: ID , flinkedFolderId :: ID , flinkSharerType :: SharerType , flinkSharerId :: ID } deriving (Eq, Show) instance Json.FromJSON FolderLink where parseJSON (Json.Object o) = FolderLink <$> (o .: "name" <|> return "") <*> o .: "parentId" <*> o .: "id" <*> o .: "linkedFolderId" <*> o .: "sharerType" <*> o .: "sharerId" parseJSON _ = empty instance AssetLike FolderLink where assetName = flinkName assetUid = AssetID IsFolderLink . flinkId assetParentUid = Just . AssetID IsFolder . flinkParentId data SharerType = UserShare | GroupShare deriving (Eq, Show, Ord) instance Json.FromJSON SharerType where parseJSON (Json.String s) = case s of "user" -> return UserShare "group" -> return GroupShare _ -> empty parseJSON _ = empty -- -- a root context -- data Context = Context { contextDomain :: Text , contextName :: Text , contextId :: ID , contextType :: ContextType , contextBackingFolder :: ID } deriving (Eq, Show) instance AssetLike Context where assetName = contextName -- the unique ID for this is identical to that of the -- backing folder; they are two sides of the same coin. assetUid = AssetID IsFolder . contextBackingFolder -- this should match the parent of the corresponding -- backing folder, so we go through the same call rather -- than base decision on contexttype: assetParentUid = Just . ctxParent . unwrapId . contextId instance Json.FromJSON Context where parseJSON (Json.Object o) = Context <$> o .: "domain" <*> o .: "name" <*> o .: "id" <*> o .: "type" <*> o .: "backingFolderId" parseJSON _ = empty data ContextType = UserContext | GroupContext | DomainContext deriving (Eq, Show, Ord) instance Json.FromJSON ContextType where parseJSON (Json.String s) = case s of "user" -> return UserContext "group" -> return GroupContext "domain" -> return DomainContext _ -> empty parseJSON _ = empty -- -- Fake folders. These only exist for our file hierarchy -- data PseudoFolder = Users | Groups | Top deriving (Eq, Show, Ord) instance AssetLike PseudoFolder where assetName Users = "Users" assetName Groups = "Groups" assetName Top = "Top" assetUid Users = UsersID assetUid Groups = GroupsID assetUid Top = TopID assetParentUid Top = Nothing assetParentUid _ = Just TopID -- -- Aggregate the various asset types into one, so that we -- can use them alongside eachother. -- data Asset = FileAsset File | FolderAsset Folder -- these guys come in two parts, so some -- assembly required! | LinkAsset File Link | FolderLinkAsset Folder FolderLink | ContextAsset Folder Context -- fake folders: | PseudoFolder PseudoFolder deriving (Eq, Show) instance Json.FromJSON Asset where parseJSON o@(Json.Object _) = FileAsset <$> parseJSON o <|> FolderAsset <$> parseJSON o parseJSON _ = empty instance AssetLike Asset where -- specific definitions needed for links, -- and folderlinks, since they pull info from -- more than one place: assetName (LinkAsset f _) = assetName f assetName (FolderLinkAsset f _) = assetName f assetName a = runOnAsset assetName a assetUid (LinkAsset _ l) = assetUid l assetUid (FolderLinkAsset _ l) = assetUid l assetUid a = runOnAsset assetUid a assetParentUid (LinkAsset _ l) = assetParentUid l assetParentUid (FolderLinkAsset _ l) = assetParentUid l assetParentUid a = runOnAsset assetParentUid a runOnAsset :: (forall a. AssetLike a => a -> b) -> Asset -> b runOnAsset fn (FileAsset f) = fn f runOnAsset fn (FolderAsset f) = fn f runOnAsset fn (PseudoFolder f) = fn f runOnAsset fn (ContextAsset _ c) = fn c runOnAsset _ _ = error "runOnAsset called on variant it shouldnt have been" -- -- A general interface to pull things out of the assets that -- are common to all of them -- class AssetLike a where assetName :: a -> Text assetUid :: a -> AssetUID assetParentUid :: a -> Maybe AssetUID ctxParent :: Text -> AssetUID ctxParent (Text.isPrefixOf "user" -> True) = UsersID ctxParent (Text.isPrefixOf "group" -> True) = GroupsID ctxParent _ = UsersID
jsdw/hs-thirdlight-api
src/ThirdLight/Assets/Types.hs
bsd-3-clause
10,427
0
19
3,329
2,616
1,410
1,206
-1
-1
{- A binomial option pricing model Assume a put option with strike price $110 currently trading at $100 and expiring in one year. Annual risk free rate is at 5%. Price is expected to increase 20% and decrease 15% every six months. It is necessary to estimate the price of the put option. -} import Control.Monad import Control.Monad.Trans import Simulation.Aivika.Trans import Simulation.Aivika.Lattice import Simulation.Aivika.Experiment.Histogram -- the lattice size n = 50 -- the up and down factors u0 = 1.2 d0 = 0.85 -- corrected factors for the lattice size u = exp (log u0 / (fromIntegral n / 2)) d = exp (log d0 / (fromIntegral n / 2)) -- initial stock price s0 = 100.0 -- strike price for put option strikePrice = 110.0 -- risk free rate r = 0.05 specs = Specs { spcStartTime = 0.0, spcStopTime = 1.0, spcDT = 0.1, spcMethod = RungeKutta4, spcGeneratorType = SimpleGenerator } model :: Simulation LIO Double model = do -- stock price s <- newRef s0 -- calculate the stock price tree runEventInStartTime $ enqueueEventWithLatticeTimes $ do k <- liftComp latticeMemberIndex k0 <- liftComp latticeParentMemberIndex case k0 of Nothing -> return () Just k0 | k == k0 -> modifyRef s (\x -> x * u) Just k0 | k == k0 + 1 -> modifyRef s (\x -> x * d) -- the lattice time step dt <- liftParameter latticeTimeStep -- calculate the up move probability let p = (exp (- r * dt) - d) / (u - d) -- estimate the option price in the end time let leaf :: Estimate LIO Double leaf = do x <- readObservable s -- this is a put option return $ max (strikePrice - x) 0 -- estimate the option price by the forecast let reduce :: Double -> Double -> Estimate LIO Double reduce x1 x2 = return $ exp (- r * dt) * (p * x1 + (1 - p) * x2) price <- foldEstimate reduce leaf runEstimateInStartTime price main :: IO () main = do lat <- newRandomLattice n e <- runLIO lat $ runSimulation model specs putStrLn "Estimation:" putStrLn (show e)
dsorokin/aivika-lattice
examples/BinomialPricingModel.hs
bsd-3-clause
2,292
0
17
733
569
290
279
51
3
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} {-# OPTIONS -Wall #-} -- | -- Module : Network.Mail.Client.Gmail -- License : BSD3 -- Maintainer : Enzo Haussecker -- Stability : Experimental -- Portability : Unknown -- -- A dead simple SMTP Client for sending Gmail. module Network.Mail.Client.Gmail ( -- ** Sending sendGmail -- ** Exceptions , GmailException(..) ) where import Control.Applicative ((<$>)) import Control.Monad (forever, forM) import Control.Monad.IO.Class (MonadIO(..)) import Control.Monad.Trans.Resource (ResourceT, runResourceT) import Control.Exception (Exception, bracket, throw) import Data.Attoparsec.ByteString.Char8 as P import Data.ByteString.Char8 as B (ByteString, pack) import Data.ByteString.Base64.Lazy (encode) import Data.ByteString.Lazy.Char8 as L (ByteString, hPutStrLn, readFile) import Data.ByteString.Lazy.Search (replace) import Data.Conduit import Data.Conduit.Attoparsec (sinkParser) import Data.Default (def) import Data.Monoid ((<>)) import Data.Text as S (Text, pack) import Data.Text.Lazy as L (Text, fromChunks) import Data.Text.Lazy.Encoding (encodeUtf8) import Data.Typeable (Typeable) import Network (PortID(PortNumber), connectTo) import Network.Mail.Mime hiding (renderMail) import Network.TLS import Network.TLS.Extra (ciphersuite_all) import Prelude hiding (any, readFile) import System.FilePath (takeExtension, takeFileName) import System.IO hiding (readFile) import System.Timeout (timeout) data GmailException = ParseError String | Timeout deriving (Show, Typeable) instance Exception GmailException -- | -- Send an email from your Gmail account using the -- simple message transfer protocol with transport -- layer security. If you have 2-step verification -- enabled on your account, then you will need to -- retrieve an application specific password before -- using this function. Below is an example using -- ghci, where Alice sends an Excel spreadsheet to -- Bob. -- -- > >>> :set -XOverloadedStrings -- > >>> :module Network.Mail.Mime Network.Mail.Client.Gmail -- > >>> sendGmail "alice" "password" (Address (Just "Alice") "[email protected]") [Address (Just "Bob") "[email protected]"] [] [] "Excel Spreadsheet" "Hi Bob,\n\nThe Excel spreadsheet is attached.\n\nRegards,\n\nAlice" ["Spreadsheet.xls"] 10000000 -- sendGmail :: L.Text -- ^ username -> L.Text -- ^ password -> Address -- ^ from -> [Address] -- ^ to -> [Address] -- ^ cc -> [Address] -- ^ bcc -> S.Text -- ^ subject -> L.Text -- ^ body -> [FilePath] -- ^ attachments -> Int -- ^ timeout (in microseconds) -> IO () sendGmail user pass from to cc bcc subject body attachments micros = do let handle = connectTo "smtp.gmail.com" $ PortNumber 587 bracket handle hClose $ \ hdl -> do recvSMTP hdl micros "220" sendSMTP hdl "EHLO" recvSMTP hdl micros "250" sendSMTP hdl "STARTTLS" recvSMTP hdl micros "220" let context = contextNew hdl params bracket context cClose $ \ ctx -> do handshake ctx sendSMTPS ctx "EHLO" recvSMTPS ctx micros "250" sendSMTPS ctx "AUTH LOGIN" recvSMTPS ctx micros "334" sendSMTPS ctx username recvSMTPS ctx micros "334" sendSMTPS ctx password recvSMTPS ctx micros "235" sendSMTPS ctx sender recvSMTPS ctx micros "250" sendSMTPS ctx recipient recvSMTPS ctx micros "250" sendSMTPS ctx "DATA" recvSMTPS ctx micros "354" sendSMTPS ctx =<< mail recvSMTPS ctx micros "250" sendSMTPS ctx "QUIT" recvSMTPS ctx micros "221" where username = encode $ encodeUtf8 user password = encode $ encodeUtf8 pass sender = "MAIL FROM: " <> angleBracket [from] recipient = "RCPT TO: " <> angleBracket (to ++ cc ++ bcc) mail = renderMail from to cc bcc subject body attachments -- | -- Consume the SMTP packet stream. sink :: B.ByteString -> Sink (Maybe B.ByteString) (ResourceT IO) () sink code = (awaitForever $ maybe (throw Timeout) yield) =$= sinkParser (parser code) -- | -- Parse the SMTP packet stream. parser :: B.ByteString -> P.Parser () parser code = do reply <- P.take 3 if code /= reply then throw . ParseError $ "Expected SMTP reply code " ++ show code ++ ", but recieved SMTP reply code " ++ show reply ++ "." else anyChar >>= \ case ' ' -> return () '-' -> manyTill anyChar endOfLine >> parser code _ -> throw $ ParseError "Unexpected character." -- | -- Define the TLS client parameters. params :: ClientParams params = (defaultParamsClient "smtp.gmail.com" "587") { clientSupported = def { supportedCiphers = ciphersuite_all } , clientShared = def { sharedValidationCache = noValidate } } where noValidate = ValidationCache (\_ _ _ -> return ValidationCachePass) -- This is not secure! (\_ _ _ -> return ()) -- | -- Terminate the TLS connection. cClose :: Context -> IO () cClose ctx = bye ctx >> contextClose ctx -- | -- Display the first email address in the -- given list using angle bracket formatting. angleBracket :: [Address] -> L.ByteString angleBracket = \ case [] -> ""; (Address _ email:_) -> "<" <> encodeUtf8 (fromChunks [email]) <> ">" -- | -- Send an unencrypted. sendSMTP :: Handle -> L.ByteString -> IO () sendSMTP = L.hPutStrLn -- | -- Send an encrypted message. sendSMTPS :: Context -> L.ByteString -> IO () sendSMTPS ctx msg = sendData ctx $ msg <> "\r\n" -- | -- Receive an unencrypted. recvSMTP :: Handle -> Int -> B.ByteString -> IO () recvSMTP hdl micros code = runResourceT $ source $$ sink code where source = forever $ liftIO chunk >>= yield chunk = timeout micros $ append <$> hGetLine hdl append = flip (<>) "\n" . B.pack -- | -- Receive an encrypted message. recvSMTPS :: Context -> Int -> B.ByteString -> IO () recvSMTPS ctx micros code = runResourceT $ source $$ sink code where source = forever $ liftIO chunk >>= yield chunk = timeout micros $ recvData ctx -- | -- Render an email using the RFC 2822 message format. renderMail :: Address -- ^ from -> [Address] -- ^ to -> [Address] -- ^ cc -> [Address] -- ^ bcc -> S.Text -- ^ subject -> L.Text -- ^ body -> [FilePath] -- ^ attachments -> IO L.ByteString renderMail from to cc bcc subject body attach = do parts <- forM attach $ \ path -> do content <- readFile path let mime = getMime $ takeExtension path file = Just . S.pack $ takeFileName path return $! [Part mime Base64 file [] content] let plain = [Part "text/plain; charset=utf-8" QuotedPrintableText Nothing [] $ encodeUtf8 body] mail <- renderMail' . Mail from to cc bcc headers $ plain : parts return $! replace "\n." ("\n.." :: L.ByteString) mail <> "\r\n.\r\n" where headers = [("Subject", subject)] -- | -- Get the mime type for the given file extension. getMime :: String -> S.Text getMime = \ case ".3dm" -> "x-world/x-3dmf" ".3dmf" -> "x-world/x-3dmf" ".a" -> "application/octet-stream" ".aab" -> "application/x-authorware-bin" ".aam" -> "application/x-authorware-map" ".aas" -> "application/x-authorware-seg" ".abc" -> "text/vnd.abc" ".acgi" -> "text/html" ".afl" -> "video/animaflex" ".ai" -> "application/postscript" ".aif" -> "audio/aiff" ".aifc" -> "audio/aiff" ".aiff" -> "audio/aiff" ".aim" -> "application/x-aim" ".aip" -> "text/x-audiosoft-intra" ".ani" -> "application/x-navi-animation" ".aos" -> "application/x-nokia-9000-communicator-add-on-software" ".aps" -> "application/mime" ".arc" -> "application/octet-stream" ".arj" -> "application/arj" ".art" -> "image/x-jg" ".asf" -> "video/x-ms-asf" ".asm" -> "text/x-asm" ".asp" -> "text/asp" ".asx" -> "application/x-mplayer2" ".au" -> "audio/basic" ".avi" -> "application/x-troff-msvideo" ".avs" -> "video/avs-video" ".bcpio" -> "application/x-bcpio" ".bin" -> "application/mac-binary" ".bm" -> "image/bmp" ".bmp" -> "image/bmp" ".boo" -> "application/book" ".book" -> "application/book" ".boz" -> "application/x-bzip2" ".bsh" -> "application/x-bsh" ".bz" -> "application/x-bzip" ".bz2" -> "application/x-bzip2" ".c" -> "text/plain" ".c++" -> "text/plain" ".cat" -> "application/vnd.ms-pki.seccat" ".cc" -> "text/plain" ".ccad" -> "application/clariscad" ".cco" -> "application/x-cocoa" ".cdf" -> "application/cdf" ".cer" -> "application/pkix-cert" ".cha" -> "application/x-chat" ".chat" -> "application/x-chat" ".class" -> "application/java" ".com" -> "application/octet-stream" ".conf" -> "text/plain" ".cpio" -> "application/x-cpio" ".cpp" -> "text/x-c" ".cpt" -> "application/mac-compactpro" ".crl" -> "application/pkcs-crl" ".crt" -> "application/pkix-cert" ".csh" -> "application/x-csh" ".css" -> "application/x-pointplus" ".cxx" -> "text/plain" ".dcr" -> "application/x-director" ".deepv" -> "application/x-deepv" ".def" -> "text/plain" ".der" -> "application/x-x509-ca-cert" ".dif" -> "video/x-dv" ".dir" -> "application/x-director" ".dl" -> "video/dl" ".doc" -> "application/msword" ".dot" -> "application/msword" ".dp" -> "application/commonground" ".drw" -> "application/drafting" ".dump" -> "application/octet-stream" ".dv" -> "video/x-dv" ".dvi" -> "application/x-dvi" ".dwf" -> "drawing/x-dwf (old)" ".dwg" -> "application/acad" ".dxf" -> "application/dxf" ".dxr" -> "application/x-director" ".el" -> "text/x-script.elisp" ".elc" -> "application/x-bytecode.elisp (compiled elisp)" ".env" -> "application/x-envoy" ".eps" -> "application/postscript" ".es" -> "application/x-esrehber" ".etx" -> "text/x-setext" ".evy" -> "application/envoy" ".exe" -> "application/octet-stream" ".f" -> "text/plain" ".f77" -> "text/x-fortran" ".f90" -> "text/plain" ".fdf" -> "application/vnd.fdf" ".fif" -> "application/fractals" ".fli" -> "video/fli" ".flo" -> "image/florian" ".flx" -> "text/vnd.fmi.flexstor" ".fmf" -> "video/x-atomic3d-feature" ".for" -> "text/plain" ".fpx" -> "image/vnd.fpx" ".frl" -> "application/freeloader" ".funk" -> "audio/make" ".g" -> "text/plain" ".g3" -> "image/g3fax" ".gif" -> "image/gif" ".gl" -> "video/gl" ".gsd" -> "audio/x-gsm" ".gsm" -> "audio/x-gsm" ".gsp" -> "application/x-gsp" ".gss" -> "application/x-gss" ".gtar" -> "application/x-gtar" ".gz" -> "application/x-compressed" ".gzip" -> "application/x-gzip" ".h" -> "text/plain" ".hdf" -> "application/x-hdf" ".help" -> "application/x-helpfile" ".hgl" -> "application/vnd.hp-hpgl" ".hh" -> "text/plain" ".hlb" -> "text/x-script" ".hlp" -> "application/hlp" ".hpg" -> "application/vnd.hp-hpgl" ".hpgl" -> "application/vnd.hp-hpgl" ".hqx" -> "application/binhex" ".hs" -> "text/x-haskell" ".hta" -> "application/hta" ".htc" -> "text/x-component" ".htm" -> "text/html" ".html" -> "text/html" ".htmls" -> "text/html" ".htt" -> "text/webviewhtml" ".htx" -> "text/html" ".ice" -> "x-conference/x-cooltalk" ".ico" -> "image/x-icon" ".idc" -> "text/plain" ".ief" -> "image/ief" ".iefs" -> "image/ief" ".iges" -> "application/iges" ".igs" -> "application/iges" ".ima" -> "application/x-ima" ".imap" -> "application/x-httpd-imap" ".inf" -> "application/inf" ".ins" -> "application/x-internett-signup" ".ip" -> "application/x-ip2" ".isu" -> "video/x-isvideo" ".it" -> "audio/it" ".iv" -> "application/x-inventor" ".ivr" -> "i-world/i-vrml" ".ivy" -> "application/x-livescreen" ".jam" -> "audio/x-jam" ".jav" -> "text/plain" ".java" -> "text/plain" ".jcm" -> "application/x-java-commerce" ".jfif" -> "image/jpeg" ".jfif-tbnl" -> "image/jpeg" ".jpe" -> "image/jpeg" ".jpeg" -> "image/jpeg" ".jpg" -> "image/jpeg" ".jps" -> "image/x-jps" ".js" -> "application/x-javascript" ".jut" -> "image/jutvision" ".kar" -> "audio/midi" ".ksh" -> "application/x-ksh" ".la" -> "audio/nspaudio" ".lam" -> "audio/x-liveaudio" ".latex" -> "application/x-latex" ".lha" -> "application/lha" ".lhx" -> "application/octet-stream" ".list" -> "text/plain" ".lma" -> "audio/nspaudio" ".log" -> "text/plain" ".lsp" -> "application/x-lisp" ".lst" -> "text/plain" ".lsx" -> "text/x-la-asf" ".ltx" -> "application/x-latex" ".lzh" -> "application/octet-stream" ".lzx" -> "application/lzx" ".m" -> "text/plain" ".m1v" -> "video/mpeg" ".m2a" -> "audio/mpeg" ".m2v" -> "video/mpeg" ".m3u" -> "audio/x-mpequrl" ".man" -> "application/x-troff-man" ".map" -> "application/x-navimap" ".mar" -> "text/plain" ".mbd" -> "application/mbedlet" ".mc$" -> "application/x-magic-cap-package-1.0" ".mcd" -> "application/mcad" ".mcf" -> "image/vasa" ".mcp" -> "application/netmc" ".me" -> "application/x-troff-me" ".mht" -> "message/rfc822" ".mhtml" -> "message/rfc822" ".mid" -> "application/x-midi" ".midi" -> "application/x-midi" ".mif" -> "application/x-frame" ".mime" -> "message/rfc822" ".mjf" -> "audio/x-vnd.audioexplosion.mjuicemediafile" ".mjpg" -> "video/x-motion-jpeg" ".mm" -> "application/base64" ".mme" -> "application/base64" ".mod" -> "audio/mod" ".moov" -> "video/quicktime" ".mov" -> "video/quicktime" ".movie" -> "video/x-sgi-movie" ".mp2" -> "audio/mpeg" ".mp3" -> "audio/mpeg3" ".mpa" -> "audio/mpeg" ".mpc" -> "application/x-project" ".mpe" -> "video/mpeg" ".mpeg" -> "video/mpeg" ".mpg" -> "audio/mpeg" ".mpga" -> "audio/mpeg" ".mpp" -> "application/vnd.ms-project" ".mpt" -> "application/x-project" ".mpv" -> "application/x-project" ".mpx" -> "application/x-project" ".mrc" -> "application/marc" ".ms" -> "application/x-troff-ms" ".mv" -> "video/x-sgi-movie" ".my" -> "audio/make" ".mzz" -> "application/x-vnd.audioexplosion.mzz" ".nap" -> "image/naplps" ".naplps" -> "image/naplps" ".nc" -> "application/x-netcdf" ".ncm" -> "application/vnd.nokia.configuration-message" ".nif" -> "image/x-niff" ".niff" -> "image/x-niff" ".nix" -> "application/x-mix-transfer" ".nsc" -> "application/x-conference" ".nvd" -> "application/x-navidoc" ".o" -> "application/octet-stream" ".oda" -> "application/oda" ".omc" -> "application/x-omc" ".omcd" -> "application/x-omcdatamaker" ".omcr" -> "application/x-omcregerator" ".p" -> "text/x-pascal" ".p10" -> "application/pkcs10" ".p12" -> "application/pkcs-12" ".p7a" -> "application/x-pkcs7-signature" ".p7c" -> "application/pkcs7-mime" ".p7m" -> "application/pkcs7-mime" ".p7r" -> "application/x-pkcs7-certreqresp" ".p7s" -> "application/pkcs7-signature" ".part" -> "application/pro_eng" ".pas" -> "text/pascal" ".pbm" -> "image/x-portable-bitmap" ".pcl" -> "application/vnd.hp-pcl" ".pct" -> "image/x-pict" ".pcx" -> "image/x-pcx" ".pdb" -> "chemical/x-pdb" ".pdf" -> "application/pdf" ".pfunk" -> "audio/make" ".pgm" -> "image/x-portable-graymap" ".pic" -> "image/pict" ".pict" -> "image/pict" ".pkg" -> "application/x-newton-compatible-pkg" ".pko" -> "application/vnd.ms-pki.pko" ".pl" -> "text/plain" ".plx" -> "application/x-pixclscript" ".pm" -> "image/x-xpixmap" ".pm4" -> "application/x-pagemaker" ".pm5" -> "application/x-pagemaker" ".png" -> "image/png" ".pnm" -> "application/x-portable-anymap" ".pot" -> "application/mspowerpoint" ".pov" -> "model/x-pov" ".ppa" -> "application/vnd.ms-powerpoint" ".ppm" -> "image/x-portable-pixmap" ".pps" -> "application/mspowerpoint" ".ppt" -> "application/mspowerpoint" ".ppz" -> "application/mspowerpoint" ".pre" -> "application/x-freelance" ".prt" -> "application/pro_eng" ".ps" -> "application/postscript" ".psd" -> "application/octet-stream" ".pvu" -> "paleovu/x-pv" ".pwz" -> "application/vnd.ms-powerpoint" ".py" -> "text/x-script.phyton" ".pyc" -> "applicaiton/x-bytecode.python" ".qcp" -> "audio/vnd.qcelp" ".qd3" -> "x-world/x-3dmf" ".qd3d" -> "x-world/x-3dmf" ".qif" -> "image/x-quicktime" ".qt" -> "video/quicktime" ".qtc" -> "video/x-qtc" ".qti" -> "image/x-quicktime" ".qtif" -> "image/x-quicktime" ".ra" -> "audio/x-pn-realaudio" ".ram" -> "audio/x-pn-realaudio" ".ras" -> "application/x-cmu-raster" ".rast" -> "image/cmu-raster" ".rexx" -> "text/x-script.rexx" ".rf" -> "image/vnd.rn-realflash" ".rgb" -> "image/x-rgb" ".rm" -> "application/vnd.rn-realmedia" ".rmi" -> "audio/mid" ".rmm" -> "audio/x-pn-realaudio" ".rmp" -> "audio/x-pn-realaudio" ".rng" -> "application/ringing-tones" ".rnx" -> "application/vnd.rn-realplayer" ".roff" -> "application/x-troff" ".rp" -> "image/vnd.rn-realpix" ".rpm" -> "audio/x-pn-realaudio-plugin" ".rt" -> "text/richtext" ".rtf" -> "application/rtf" ".rtx" -> "application/rtf" ".rv" -> "video/vnd.rn-realvideo" ".s" -> "text/x-asm" ".s3m" -> "audio/s3m" ".saveme" -> "application/octet-stream" ".sbk" -> "application/x-tbook" ".scm" -> "application/x-lotusscreencam" ".sdml" -> "text/plain" ".sdp" -> "application/sdp" ".sdr" -> "application/sounder" ".sea" -> "application/sea" ".set" -> "application/set" ".sgm" -> "text/sgml" ".sgml" -> "text/sgml" ".sh" -> "application/x-bsh" ".shar" -> "application/x-bsh" ".shtml" -> "text/html" ".sid" -> "audio/x-psid" ".sit" -> "application/x-sit" ".skd" -> "application/x-koan" ".skm" -> "application/x-koan" ".skp" -> "application/x-koan" ".skt" -> "application/x-koan" ".sl" -> "application/x-seelogo" ".smi" -> "application/smil" ".smil" -> "application/smil" ".snd" -> "audio/basic" ".sol" -> "application/solids" ".spc" -> "application/x-pkcs7-certificates" ".spl" -> "application/futuresplash" ".spr" -> "application/x-sprite" ".sprite" -> "application/x-sprite" ".src" -> "application/x-wais-source" ".ssi" -> "text/x-server-parsed-html" ".ssm" -> "application/streamingmedia" ".sst" -> "application/vnd.ms-pki.certstore" ".step" -> "application/step" ".stl" -> "application/sla" ".stp" -> "application/step" ".sv4cpio" -> "application/x-sv4cpio" ".sv4crc" -> "application/x-sv4crc" ".svf" -> "image/vnd.dwg" ".svr" -> "application/x-world" ".swf" -> "application/x-shockwave-flash" ".t" -> "application/x-troff" ".talk" -> "text/x-speech" ".tar" -> "application/x-tar" ".tbk" -> "application/toolbook" ".tcl" -> "application/x-tcl" ".tcsh" -> "text/x-script.tcsh" ".tex" -> "application/x-tex" ".texi" -> "application/x-texinfo" ".texinfo" -> "application/x-texinfo" ".text" -> "application/plain" ".tgz" -> "application/gnutar" ".tif" -> "image/tiff" ".tiff" -> "image/tiff" ".tr" -> "application/x-troff" ".tsi" -> "audio/tsp-audio" ".tsp" -> "application/dsptype" ".tsv" -> "text/tab-separated-values" ".turbot" -> "image/florian" ".txt" -> "text/plain" ".uil" -> "text/x-uil" ".uni" -> "text/uri-list" ".unis" -> "text/uri-list" ".unv" -> "application/i-deas" ".uri" -> "text/uri-list" ".uris" -> "text/uri-list" ".ustar" -> "application/x-ustar" ".uu" -> "application/octet-stream" ".uue" -> "text/x-uuencode" ".vcd" -> "application/x-cdlink" ".vcs" -> "text/x-vcalendar" ".vda" -> "application/vda" ".vdo" -> "video/vdo" ".vew" -> "application/groupwise" ".viv" -> "video/vivo" ".vivo" -> "video/vivo" ".vmd" -> "application/vocaltec-media-desc" ".vmf" -> "application/vocaltec-media-file" ".voc" -> "audio/voc" ".vos" -> "video/vosaic" ".vox" -> "audio/voxware" ".vqe" -> "audio/x-twinvq-plugin" ".vqf" -> "audio/x-twinvq" ".vql" -> "audio/x-twinvq-plugin" ".vrml" -> "application/x-vrml" ".vrt" -> "x-world/x-vrt" ".vsd" -> "application/x-visio" ".vst" -> "application/x-visio" ".vsw" -> "application/x-visio" ".w60" -> "application/wordperfect6.0" ".w61" -> "application/wordperfect6.1" ".w6w" -> "application/msword" ".wav" -> "audio/wav" ".wb1" -> "application/x-qpro" ".wbmp" -> "image/vnd.wap.wbmp" ".web" -> "application/vnd.xara" ".wiz" -> "application/msword" ".wk1" -> "application/x-123" ".wmf" -> "windows/metafile" ".wml" -> "text/vnd.wap.wml" ".wmlc" -> "application/vnd.wap.wmlc" ".wmls" -> "text/vnd.wap.wmlscript" ".wmlsc" -> "application/vnd.wap.wmlscriptc" ".word" -> "application/msword" ".wp" -> "application/wordperfect" ".wp5" -> "application/wordperfect" ".wp6" -> "application/wordperfect" ".wpd" -> "application/wordperfect" ".wq1" -> "application/x-lotus" ".wri" -> "application/mswrite" ".wrl" -> "application/x-world" ".wrz" -> "model/vrml" ".wsc" -> "text/scriplet" ".wsrc" -> "application/x-wais-source" ".wtk" -> "application/x-wintalk" ".xbm" -> "image/x-xbitmap" ".xdr" -> "video/x-amt-demorun" ".xgz" -> "xgl/drawing" ".xif" -> "image/vnd.xiff" ".xl" -> "application/excel" ".xla" -> "application/excel" ".xlb" -> "application/excel" ".xlc" -> "application/excel" ".xld" -> "application/excel" ".xlk" -> "application/excel" ".xll" -> "application/excel" ".xlm" -> "application/excel" ".xls" -> "application/excel" ".xlt" -> "application/excel" ".xlv" -> "application/excel" ".xlw" -> "application/excel" ".xm" -> "audio/xm" ".xml" -> "application/xml" ".xmz" -> "xgl/movie" ".xpix" -> "application/x-vnd.ls-xpix" ".xpm" -> "image/x-xpixmap" ".x-png" -> "image/png" ".xsr" -> "video/x-amt-showrun" ".xwd" -> "image/x-xwd" ".xyz" -> "chemical/x-pdb" ".z" -> "application/x-compress" ".zip" -> "application/x-compressed" ".zoo" -> "application/octet-stream" ".zsh" -> "text/x-script.zsh" _ -> "application/octet-stream"
nikita-volkov/smtps-gmail
Network/Mail/Client/Gmail.hs
bsd-3-clause
24,843
0
17
6,923
4,523
2,329
2,194
590
449
{-# LANGUAGE RecordWildCards #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE BangPatterns #-} -- | TUF security features module Distribution.Server.Features.Security ( initSecurityFeature ) where -- Standard libraries import Control.Exception import Data.Time import qualified Data.ByteString.Lazy.Char8 as BS.Lazy -- Hackage import Distribution.Server.Features.Core import Distribution.Server.Features.Security.Backup import Distribution.Server.Features.Security.Layout import Distribution.Server.Features.Security.ResponseContentTypes import Distribution.Server.Features.Security.State import Distribution.Server.Features.Security.FileInfo import Distribution.Server.Framework import Distribution.Server.Packages.Index import Distribution.Server.Packages.Types -- Hackage security import Hackage.Security.Util.Some import qualified Hackage.Security.Server as Sec import qualified Hackage.Security.Util.Path as Sec data SecurityFeature = SecurityFeature { securityFeatureInterface :: HackageFeature } instance IsHackageFeature SecurityFeature where getFeatureInterface SecurityFeature{..} = securityFeatureInterface initSecurityFeature :: ServerEnv -> IO (CoreFeature -> IO SecurityFeature) initSecurityFeature env = do securityState <- securityStateComponent env (serverStateDir env) return $ \coreFeature -> do -- Update the security state whenever the main package index changes registerHook (indexUpdatedHook coreFeature) $ \_ -> updateIndexFileInfo coreFeature securityState -- Add package metadata whenever a package is added/changed -- -- For package changes we just add a new metadata file to the index, -- which will override any previous one. -- -- TODO: We cannot deal with deletes (they are a problem elsewhere too) -- -- NOTE: this hook is in general _not atomic_ with the package index update related to it. -- It is atomic _only_ in the PackageChangeAdd case. As most other significant cases are no-ops -- at the moment for adding index entries, this should be ok. (The exception is updated tarball -- but this is only used for the mirror client). -- -- If in the future more stuff is registered here, we may need to change code elsewhere -- to ensure that it is added atomically as well... registerHook (preIndexUpdateHook coreFeature) $ \chg -> do let (ents,msg) = case chg of PackageChangeAdd pkg -> (indexEntriesFor pkg,"PackageChangeAdd") PackageChangeInfo s _ new -> case s of PackageUpdatedTarball -> (indexEntriesFor new,"PackageChangeInfo:PackageUpdatedTarball") -- .cabal file is not recorded in the TUF metadata -- (until we have author signing anyway) PackageUpdatedCabalFile -> ([],"PackageChangeInfo:PackageUpdatedCabalFile") -- the uploader is not included in the TUF metadata PackageUpdatedUploader -> ([],"PackageChangeInfo:PackageUpdatedUploader") -- upload time is not included in the TUF metadata -- (it is recorded in the MetadataEntry because we use it for -- the tarball construction, but it doesn't affect the contents -- of the TUF metadata) PackageUpdatedUploadTime -> ([],"PackageChangeInfo:PackageUpdatedUploadTime") PackageChangeDelete _ -> ([],"PackageChangeDelete") PackageChangeIndexExtra{} -> ([],"PackageChangeIndexExtra") loginfo maxBound (mconcat ["TUF preIndexUpdateHook invoked (", msg, ", n = ", show (length ents), ")"]) return ents return $ securityFeature env securityState where indexEntriesFor :: PkgInfo -> [TarIndexEntry] indexEntriesFor pkgInfo = case pkgLatestTarball pkgInfo of Nothing -> [] Just (_tarball, (uploadTime, _uploadUserId), latestRev) -> [MetadataEntry (pkgInfoId pkgInfo) latestRev uploadTime] -- | The main security feature -- -- Missing resources (for Phase 2 of the security work): -- -- * Top-level targets.json (currently top-level targets.json is not -- required because it's hardcoded in the clients) -- * Other targets.json files for OOT targets -- -- Note that even once we have author signing, per-package targets.json file -- do not get their own resource, but are instead recorded in the tarball. securityFeature :: ServerEnv -> StateComponent AcidState SecurityState -> SecurityFeature securityFeature env securityState = SecurityFeature{..} where securityFeatureInterface = (emptyHackageFeature "security") { featureDesc = "TUF Security" , featureState = [abstractAcidStateComponent securityState] , featureReloadFiles = updateRootMirrorsAndKeys env securityState , featurePostInit = updateRootMirrorsAndKeys env securityState >> setupResignCronJob env securityState , featureResources = [ resourceTimestamp , resourceSnapshot , resourceRoot , resourceMirrors ] } resourceTimestamp = (secResourceAt Sec.repoLayoutTimestamp) { resourceDesc = [(GET, "Get TUF timestamp")] , resourceGet = [("json", serveFromState securityTimestamp)] } resourceSnapshot = (secResourceAt Sec.repoLayoutSnapshot) { resourceDesc = [(GET, "Get TUF snapshot")] , resourceGet = [("json", serveFromState securitySnapshot)] } resourceRoot = (secResourceAt Sec.repoLayoutRoot) { resourceDesc = [(GET, "Get TUF root")] , resourceGet = [("json", serveFromState securityRoot)] } resourceMirrors = (secResourceAt Sec.repoLayoutMirrors) { resourceDesc = [(GET, "Get TUF mirrors")] , resourceGet = [("json", serveFromState securityMirrors)] } serveFromState :: (IsTUFFile a, ToMessage a) => (SecurityStateFiles -> a) -> DynamicPath -> ServerPartE Response serveFromState file _ = do msfiles <- queryState securityState GetSecurityFiles case msfiles of Nothing -> errNotFound "Security files not available" [MText $ "The repository is not currently using TUF " ++ "security so the security files are not " ++ "available."] Just sfiles -> do let tufFile = file sfiles eTag = ETag $ show (tufFileHashMD5 tufFile) -- Higher max-age values result in higher cache hit ratios, but also -- in higher likelyhood of cache incoherence problems (and of course in -- higher likelyhood of caches beind out of date with updates to the -- central server). cacheControl [Public, NoTransform, maxAgeMinutes 1] eTag enableRange return $ toResponse tufFile securityStateComponent :: ServerEnv -> FilePath -> IO (StateComponent AcidState SecurityState) securityStateComponent env stateDir = do let stateFile = stateDir </> "db" </> "TUF" st <- logTiming (serverVerbosity env) "Loaded SecurityState" $ openLocalStateFrom stateFile initialSecurityState return StateComponent { stateDesc = "TUF specific state" , stateHandle = st , getState = query st GetSecurityState , putState = update st . ReplaceSecurityState , resetState = securityStateComponent env , backupState = \_ -> securityBackup , restoreState = securityRestore } updateIndexFileInfo :: CoreFeature -> StateComponent AcidState SecurityState -> IO () updateIndexFileInfo coreFeature securityState = do IndexTarballInfo{..} <- queryGetIndexTarballInfo coreFeature let !tarGzFileInfo = fileInfo indexTarballIncremGz !tarFileInfo = fileInfo indexTarballIncremUn now <- getCurrentTime updateState securityState (SetTarGzFileInfo tarGzFileInfo tarFileInfo now) updateRootMirrorsAndKeys :: ServerEnv -> StateComponent AcidState SecurityState -> IO () updateRootMirrorsAndKeys env securityState = do mbRootMirrorsAndKeys <- loadRootMirrorsAndKeys env st <- queryState securityState GetSecurityState case mbRootMirrorsAndKeys of Just (root, mirrors, snapshotKey, timestampKey) | anyChange st root mirrors snapshotKey timestampKey -> do loginfo (serverVerbosity env) "Security files changed, updating" now <- getCurrentTime updateState securityState (SetRootMirrorsAndKeys root mirrors snapshotKey timestampKey now) _ -> loginfo (serverVerbosity env) "Security files unchanged" where anyChange SecurityState{ securityStateFiles = Nothing } _ _ _ _ = True anyChange SecurityState{ securityStateFiles = Just SecurityStateFiles{..} } root mirrors snapshotKey timestampKey = securityRoot /= root || securityMirrors /= mirrors || securitySnapshotKey /= snapshotKey || securityTimestampKey /= timestampKey loadRootMirrorsAndKeys :: ServerEnv -> IO (Maybe (Root, Mirrors, Some Sec.Key, Some Sec.Key)) loadRootMirrorsAndKeys env = do anyExist <- (\s t r m -> s || t || r || m) <$> Sec.doesFileExist (onDiskSnapshotKey env) <*> Sec.doesFileExist (onDiskTimestampKey env) <*> Sec.doesFileExist (onDiskRoot env) <*> Sec.doesFileExist (onDiskMirrors env) if not anyExist then return Nothing else do snapshotKey <- readKey (onDiskSnapshotKey env) timestampKey <- readKey (onDiskTimestampKey env) root <- Root <$> getTUFFile (onDiskRoot env) mirrors <- Mirrors <$> getTUFFile (onDiskMirrors env) --TODO: check sanity before updating return (Just (root, mirrors, snapshotKey, timestampKey)) setupResignCronJob :: ServerEnv -> StateComponent AcidState SecurityState -> IO () setupResignCronJob env securityState = addCronJob (serverCron env) CronJob { cronJobName = "Resign TUF data" , cronJobFrequency = DailyJobFrequency , cronJobOneShot = False , cronJobAction = do now <- getCurrentTime updateState securityState (ResignSnapshotAndTimestamp maxAge now) } where maxAge = 60 * 60 * 23 -- Don't resign if unchanged and younger than ~1 day readKey :: Sec.Path Sec.Absolute -> IO (Some Sec.Key) readKey fp = do mKey <- Sec.readJSON_NoKeys_NoLayout fp case mKey of Left err -> throwIO err Right key -> return key getTUFFile :: Sec.Path Sec.Absolute -> IO TUFFile getTUFFile file = Sec.withFile file Sec.ReadMode $ \h -> evaluate . mkTUFFile =<< BS.Lazy.hGetContents h
edsko/hackage-server
Distribution/Server/Features/Security.hs
bsd-3-clause
11,319
0
25
3,149
2,063
1,096
967
179
8
module Network.Orchid.Core.Format where import Data.ByteString.Lazy (ByteString) import Data.FileStore (FileStore) -- Formats produce proper UTF-8 text or binary docs. (TODO: ascii for tex?) data Output = TextOutput String | BinaryOutput ByteString -- Wiki format description data type. data WikiFormat = WikiFormat { postfix :: String , mime :: String , handler :: FileStore -> FilePath -- Working dir. -> FilePath -- Document name. -> String -- Contents. -> IO Output }
sebastiaanvisser/orchid
src/Network/Orchid/Core/Format.hs
bsd-3-clause
555
0
13
153
94
58
36
15
0
import System import MyApp1.TokyoCabinet main :: IO () main = do args <- getArgs case head args of "-h" -> (new :: IO HDB) >>= main' "-b" -> (new :: IO BDB) >>= main' "-f" -> (new :: IO FDB) >>= main' _ -> putStrLn "./myapp1 [-h|-b|-f]" where main' tc = do let ext = defaultExtension tc v <- flip runTCM tc $ do open ("foo" ++ ext) [OWRITER, OCREAT] put "100" "bar" putcat "100" "bar" v <- get "100" close return v print (v :: Maybe String)
tom-lpsd/tokyocabinet-haskell
examples/myapp1.hs
bsd-3-clause
639
0
15
286
216
102
114
19
4
-- | Interval-based implementation of preview polling -- module Hakyll.Web.Preview.Poll ( previewPoll ) where import Control.Applicative ((<$>)) import Control.Concurrent (threadDelay) import Control.Monad (filterM) import System.Time (getClockTime) import System.Directory (getModificationTime, doesFileExist) import Hakyll.Core.Configuration -- | A preview thread that periodically recompiles the site. -- previewPoll :: HakyllConfiguration -- ^ Configuration -> IO [FilePath] -- ^ Updating action -> IO () -- ^ Can block forever previewPoll _ update = do time <- getClockTime loop time =<< update where delay = 1000000 loop time files = do threadDelay delay files' <- filterM doesFileExist files filesTime <- case files' of [] -> return time _ -> maximum <$> mapM getModificationTime files' if filesTime > time || files' /= files then loop filesTime =<< update else loop time files'
sol/hakyll
src/Hakyll/Web/Preview/Poll.hs
bsd-3-clause
1,046
0
14
285
239
128
111
24
3
-- ------------------------------------------------------------ {- | Module : Yuuko.Text.XML.HXT.DTDValidation.TypeDefs Copyright : Copyright (C) 2008 Uwe Schmidt License : MIT Maintainer : Uwe Schmidt ([email protected]) Stability : experimental Portability: portable This module provides functions for validating the DTD of XML documents represented as XmlTree. Unlike other popular XML validation tools the validation process returns a list of errors instead of aborting after the first error was found. Unlike validation of the document, the DTD branch is traversed four times: - Validation of Notations - Validation of Unparsed Entities - Validation of Element declarations - Validation of Attribute declarations -} -- ------------------------------------------------------------ module Yuuko.Text.XML.HXT.DTDValidation.DTDValidation ( removeDoublicateDefs , validateDTD ) where import Yuuko.Text.XML.HXT.DTDValidation.TypeDefs import Yuuko.Text.XML.HXT.DTDValidation.AttributeValueValidation -- | -- Validate a DTD. -- -- - returns : a functions which takes the DTD subset of the XmlTree, checks -- if the DTD is valid and returns a list of errors validateDTD :: XmlArrow validateDTD -- dtdPart = isDTDDoctype `guards` ( listA getChildren >>> ( validateParts $<< (getNotationNames &&& getElemNames) ) ) where validateParts notationNames elemNames = validateNotations <+> validateEntities notationNames <+> validateElements elemNames <+> validateAttributes elemNames notationNames getNotationNames :: LA [XmlTree] [String] getNotationNames = listA $ unlistA >>> isDTDNotation >>> getDTDAttrValue a_name getElemNames :: LA [XmlTree] [String] getElemNames = listA $ unlistA >>> isDTDElement >>> getDTDAttrValue a_name -- ------------------------------------------------------------ checkName :: String -> SLA [String] XmlTree XmlTree -> SLA [String] XmlTree XmlTree checkName name msg = ifA ( getState >>> isA (name `elem`) ) msg (nextState (name:) >>> none) -- ------------------------------------------------------------ -- | -- Validation of Notations, checks if all notation names are unique. -- Validity constraint: Unique Notation Name (4.7 \/ p.44 in Spec) -- -- * 1.parameter dtdPart : the children of the @DOCTYPE@ node -- -- - returns : a list of errors validateNotations :: LA XmlTrees XmlTree validateNotations = fromSLA [] ( unlistA >>> isDTDNotation >>> (checkForUniqueNotation $< getDTDAttrl) ) where checkForUniqueNotation :: Attributes -> SLA [String] XmlTree XmlTree checkForUniqueNotation al = checkName name $ err ( "Notation "++ show name ++ " was already specified." ) where name = dtd_name al -- | -- Validation of Entities. -- -- 1. Issues a warning if entities are declared multiple times. -- -- Optional warning: (4.2 \/ p.35 in Spec) -- -- -- 2. Validates that a notation is declared for an unparsed entity. -- -- Validity constraint: Notation Declared (4.2.2 \/ p.36 in Spec) -- -- * 1.parameter dtdPart : the children of the @DOCTYPE@ node -- -- - 2.parameter notationNames : list of all notation names declared in the DTD -- -- - returns : a list of errors validateEntities :: [String] -> LA XmlTrees XmlTree validateEntities notationNames = ( fromSLA [] ( unlistA >>> isDTDEntity >>> (checkForUniqueEntity $< getDTDAttrl) ) ) <+> ( unlistA >>> isUnparsedEntity >>> (checkNotationDecl $< getDTDAttrl) ) where -- Check if entities are declared multiple times checkForUniqueEntity :: Attributes -> SLA [String] XmlTree XmlTree checkForUniqueEntity al = checkName name $ warn ( "Entity "++ show name ++ " was already specified. " ++ "First declaration will be used." ) where name = dtd_name al -- Find unparsed entities for which no notation is specified checkNotationDecl :: Attributes -> XmlArrow checkNotationDecl al | notationName `elem` notationNames = none | otherwise = err ( "The notation " ++ show notationName ++ " must be declared " ++ "when referenced in the unparsed entity declaration for " ++ show upEntityName ++ "." ) where notationName = lookup1 k_ndata al upEntityName = dtd_name al -- | -- Validation of Element declarations. -- -- 1. Validates that an element is not declared multiple times. -- -- Validity constraint: Unique Element Type Declaration (3.2 \/ p.21 in Spec) -- -- -- 2. Validates that an element name only appears once in a mixed-content declaration. -- -- Validity constraint: No Duplicate Types (3.2 \/ p.21 in Spec) -- -- -- 3. Issues a warning if an element mentioned in a content model is not declared in the -- DTD. -- -- Optional warning: (3.2 \/ p.21 in Spec) -- -- * 1.parameter dtdPart : the children of the @DOCTYPE@ node -- -- - 2.parameter elemNames : list of all element names declared in the DTD -- -- - returns : a list of errors validateElements :: [String] -> LA XmlTrees XmlTree validateElements elemNames -- dtdPart = ( fromSLA [] ( unlistA >>> isDTDElement >>> (checkForUniqueElement $< getDTDAttrl) ) ) <+> ( unlistA >>> isMixedContentElement >>> (checkMixedContent $< getDTDAttrl) ) <+> ( unlistA >>> isDTDElement >>> (checkContentModel elemNames $< getDTDAttrl) ) where -- Validates that an element is not declared multiple times checkForUniqueElement :: Attributes -> SLA [String] XmlTree XmlTree checkForUniqueElement al = checkName name $ err ( "Element type " ++ show name ++ " must not be declared more than once." ) where name = dtd_name al -- Validates that an element name only appears once in a mixed-content declaration checkMixedContent :: Attributes -> XmlArrow checkMixedContent al = fromSLA [] ( getChildren >>> getChildren >>> isDTDName >>> (check $< getDTDAttrl) ) where elemName = dtd_name al check al' = checkName name $ err ( "The element type " ++ show name ++ " was already specified in the mixed-content model of the element declaration " ++ show elemName ++ "." ) where name = dtd_name al' -- Issues a warning if an element mentioned in a content model is not -- declared in the DTD. checkContentModel :: [String] -> Attributes -> XmlArrow checkContentModel names al | cm `elem` [v_children, v_mixed] = getChildren >>> checkContent | otherwise = none where elemName = dtd_name al cm = dtd_type al checkContent :: XmlArrow checkContent = choiceA [ isDTDName :-> ( checkName' $< getDTDAttrl ) , isDTDContent :-> ( getChildren >>> checkContent ) , this :-> none ] where checkName' al' | childElemName `elem` names = none | otherwise = warn ( "The element type "++ show childElemName ++ ", used in content model of element "++ show elemName ++ ", is not declared." ) where childElemName = dtd_name al' -- | -- Validation of Attribute declarations. -- -- (1) Issues a warning if an attribute is declared for an element type not itself -- decared. -- -- Optinal warning: (3.3 \/ p. 24 in Spec) -- -- -- 2. Issues a warning if more than one definition is provided for the same -- attribute of a given element type. Fist declaration is binding, later -- definitions are ignored. -- -- Optional warning: (3.3 \/ p.24 in Spec) -- -- -- 3. Issues a warning if the same Nmtoken occures more than once in enumerated -- attribute types of a single element type. -- -- Optional warning: (3.3.1 \/ p.27 in Spec) -- -- -- 4. Validates that an element type has not more than one ID attribute defined. -- -- Validity constraint: One ID per Element Type (3.3.1 \/ p.26 in Spec) -- -- -- 5. Validates that an element type has not more than one NOTATION attribute defined. -- -- Validity constraint: One Notation per Element Type (3.3.1 \/ p.27 in Spec) -- -- -- 6. Validates that an ID attributes has the type #IMPLIED or #REQUIRED. -- -- Validity constraint: ID Attribute Default (3.3.1 \/ p.26 in Spec) -- -- -- 7. Validates that all referenced notations are declared. -- -- Validity constraint: Notation Attributes (3.3.1 \/ p.27 in Spec) -- -- -- 8. Validates that notations are not declared for EMPTY elements. -- -- Validity constraint: No Notation on Empty Element (3.3.1 \/p.27 in Spec) -- -- -- 9. Validates that the default value matches the lexical constraints of it's type. -- -- Validity constraint: Attribute default legal (3.3.2 \/ p.28 in Spec) -- -- -- * 1.parameter dtdPart : the children of the @DOCTYPE@ node -- -- - 2.parameter elemNames : list of all element names declared in the DTD -- -- - 3.parameter notationNames : list of all notation names declared in the DTD -- -- - returns : a list of errors validateAttributes :: [String] -> [String] -> LA XmlTrees XmlTree validateAttributes elemNames notationNames = -- 1. Find attributes for which no elements are declared ( runCheck this (checkDeclaredElements elemNames) ) <+> -- 2. Find attributes which are declared more than once ( runNameCheck this checkForUniqueAttributeDeclaration ) <+> -- 3. Find enumerated attribute types which nmtokens are declared more than once ( runCheck (isEnumAttrType `orElse` isNotationAttrType) checkEnumeratedTypes ) <+> -- 4. Validate that there exists only one ID attribute for an element ( runNameCheck isIdAttrType checkForUniqueId ) <+> -- 5. Validate that there exists only one NOTATION attribute for an element ( runNameCheck isNotationAttrType checkForUniqueNotation ) <+> -- 6. Validate that ID attributes have the type #IMPLIED or #REQUIRED ( runCheck isIdAttrType checkIdKindConstraint ) <+> -- 7. Validate that all referenced notations are declared ( runCheck isNotationAttrType (checkNotationDeclaration notationNames) ) <+> -- 8. Validate that notations are not declared for EMPTY elements ( checkNoNotationForEmptyElements $< listA ( unlistA >>> isEmptyElement >>> getDTDAttrValue a_name ) ) <+> -- 9. Validate that the default value matches the lexical constraints of it's type ( checkDefaultValueTypes $< this ) where -- ------------------------------------------------------------ -- control structures runCheck select check = unlistA >>> isDTDAttlist >>> select >>> (check $< getDTDAttrl) runNameCheck select check = fromSLA [] $ runCheck select check -------------------------------------------------------------------------- -- 1. Find attributes for which no elements are declared checkDeclaredElements :: [String] -> Attributes -> XmlArrow checkDeclaredElements elemNames' al | en `elem` elemNames' = none | otherwise = warn ( "The element type \""++ en ++ "\" used in dclaration "++ "of attribute \""++ an ++"\" is not declared." ) where en = dtd_name al an = dtd_value al -------------------------------------------------------------------------- -- 2. Find attributes which are declared more than once checkForUniqueAttributeDeclaration :: Attributes -> SLA [String] XmlTree XmlTree checkForUniqueAttributeDeclaration al = checkName name $ warn ( "Attribute \""++ aname ++"\" for element type \""++ ename ++"\" is already declared. First "++ "declaration will be used." ) where ename = dtd_name al aname = dtd_value al name = ename ++ "|" ++ aname -------------------------------------------------------------------------- -- 3. Find enumerated attribute types which nmtokens are declared more than once checkEnumeratedTypes :: Attributes -> XmlArrow checkEnumeratedTypes al = fromSLA [] ( getChildren >>> isDTDName >>> (checkForUniqueType $< getDTDAttrl) ) where checkForUniqueType :: Attributes -> SLA [String] XmlTree XmlTree checkForUniqueType al' = checkName nmtoken $ warn ( "Nmtoken \""++ nmtoken ++"\" should not "++ "occur more than once in attribute \""++ dtd_value al ++ "\" for element \""++ dtd_name al ++ "\"." ) where nmtoken = dtd_name al' -------------------------------------------------------------------------- -- 4. Validate that there exists only one ID attribute for an element checkForUniqueId :: Attributes -> SLA [String] XmlTree XmlTree checkForUniqueId al = checkName ename $ err ( "Element \""++ ename ++ "\" already has attribute of type "++ "ID, another attribute \""++ dtd_value al ++ "\" of type ID is "++ "not permitted." ) where ename = dtd_name al -------------------------------------------------------------------------- -- 5. Validate that there exists only one NOTATION attribute for an element checkForUniqueNotation :: Attributes -> SLA [String] XmlTree XmlTree checkForUniqueNotation al = checkName ename $ err ( "Element \""++ ename ++ "\" already has attribute of type "++ "NOTATION, another attribute \""++ dtd_value al ++ "\" of type NOTATION "++ "is not permitted." ) where ename = dtd_name al -------------------------------------------------------------------------- -- 6. Validate that ID attributes have the type #IMPLIED or #REQUIRED checkIdKindConstraint :: Attributes -> XmlArrow checkIdKindConstraint al | attKind `elem` [k_implied, k_required] = none | otherwise = err ( "ID attribute \""++ dtd_value al ++"\" must have a declared default "++ "of \"#IMPLIED\" or \"REQUIRED\"") where attKind = dtd_kind al -------------------------------------------------------------------------- -- 7. Validate that all referenced notations are declared checkNotationDeclaration :: [String] -> Attributes -> XmlArrow checkNotationDeclaration notations al = getChildren >>> isDTDName >>> (checkNotations $< getDTDAttrl) where checkNotations :: Attributes -> XmlArrow checkNotations al' | notation `elem` notations = none | otherwise = err ( "The notation \""++ notation ++"\" must be declared when "++ "referenced in the notation type list for attribute \""++ dtd_value al ++ "\" of element \""++ dtd_name al ++"\"." ) where notation = dtd_name al' -------------------------------------------------------------------------- -- 8. Validate that notations are not declared for EMPTY elements checkNoNotationForEmptyElements :: [String] -> LA XmlTrees XmlTree checkNoNotationForEmptyElements emptyElems = unlistA >>> isDTDAttlist >>> isNotationAttrType >>> (checkNoNotationForEmptyElement $< getDTDAttrl) where checkNoNotationForEmptyElement :: Attributes -> XmlArrow checkNoNotationForEmptyElement al | ename `elem` emptyElems = err ( "Attribute \""++ dtd_value al ++"\" of type NOTATION must not be "++ "declared on the element \""++ ename ++"\" declared EMPTY." ) | otherwise = none where ename = dtd_name al -------------------------------------------------------------------------- -- 9. Validate that default values meet the lexical constraints of the attribute types checkDefaultValueTypes :: XmlTrees -> LA XmlTrees XmlTree checkDefaultValueTypes dtdPart' = unlistA >>> isDTDAttlist >>> isDefaultAttrKind >>> (checkAttributeValue dtdPart' $< this) -- ------------------------------------------------------------ -- | -- Removes doublicate declarations from the DTD, which first declaration is -- binding. This is the case for ATTLIST and ENTITY declarations. -- -- - returns : A function that replaces the children of DOCTYPE nodes by a list -- where all multiple declarations are removed. removeDoublicateDefs :: XmlArrow removeDoublicateDefs = replaceChildren ( fromSLA [] ( getChildren >>> choiceA [ isDTDAttlist :-> (removeDoubleAttlist $< getDTDAttrl) , isDTDEntity :-> (removeDoubleEntity $< getDTDAttrl) , this :-> this ] ) ) `when` isDTDDoctype where checkName' n' = ifA ( getState >>> isA (n' `elem`) ) none (this >>> perform (nextState (n':))) removeDoubleAttlist :: Attributes -> SLA [String] XmlTree XmlTree removeDoubleAttlist al = checkName' elemAttr where elemAttr = elemName ++ "|" ++ attrName attrName = dtd_value al elemName = dtd_name al removeDoubleEntity :: Attributes -> SLA [String] XmlTree XmlTree removeDoubleEntity al = checkName' (dtd_name al) -- ------------------------------------------------------------
nfjinjing/yuuko
src/Yuuko/Text/XML/HXT/DTDValidation/DTDValidation.hs
bsd-3-clause
17,540
442
21
4,392
2,825
2,101
724
286
1
module Perceptron where import qualified Prelude andb :: Prelude.Bool -> Prelude.Bool -> Prelude.Bool andb b1 b2 = case b1 of { Prelude.True -> b2; Prelude.False -> Prelude.False} negb :: Prelude.Bool -> Prelude.Bool negb = (Prelude.not) data Nat = O | S Nat data Option a = Some a | None snd :: ((,) a1 a2) -> a2 snd p = case p of { (,) _ y -> y} data Comparison = Eq | Lt | Gt compOpp :: Comparison -> Comparison compOpp r = case r of { Eq -> Eq; Lt -> Gt; Gt -> Lt} add :: Nat -> Nat -> Nat add n m = case n of { O -> m; S p -> S (add p m)} data Positive = XI Positive | XO Positive | XH data Z = Z0 | Zpos Positive | Zneg Positive eqb :: Prelude.Bool -> Prelude.Bool -> Prelude.Bool eqb = (Prelude.==) succ :: Positive -> Positive succ x = case x of { XI p -> XO (succ p); XO p -> XI p; XH -> XO XH} add0 :: Positive -> Positive -> Positive add0 x y = case x of { XI p -> case y of { XI q -> XO (add_carry p q); XO q -> XI (add0 p q); XH -> XO (succ p)}; XO p -> case y of { XI q -> XI (add0 p q); XO q -> XO (add0 p q); XH -> XI p}; XH -> case y of { XI q -> XO (succ q); XO q -> XI q; XH -> XO XH}} add_carry :: Positive -> Positive -> Positive add_carry x y = case x of { XI p -> case y of { XI q -> XI (add_carry p q); XO q -> XO (add_carry p q); XH -> XI (succ p)}; XO p -> case y of { XI q -> XO (add_carry p q); XO q -> XI (add0 p q); XH -> XO (succ p)}; XH -> case y of { XI q -> XI (succ q); XO q -> XO (succ q); XH -> XI XH}} pred_double :: Positive -> Positive pred_double x = case x of { XI p -> XI (XO p); XO p -> XI (pred_double p); XH -> XH} data Mask = IsNul | IsPos Positive | IsNeg succ_double_mask :: Mask -> Mask succ_double_mask x = case x of { IsNul -> IsPos XH; IsPos p -> IsPos (XI p); IsNeg -> IsNeg} double_mask :: Mask -> Mask double_mask x = case x of { IsPos p -> IsPos (XO p); x0 -> x0} double_pred_mask :: Positive -> Mask double_pred_mask x = case x of { XI p -> IsPos (XO (XO p)); XO p -> IsPos (XO (pred_double p)); XH -> IsNul} sub_mask :: Positive -> Positive -> Mask sub_mask x y = case x of { XI p -> case y of { XI q -> double_mask (sub_mask p q); XO q -> succ_double_mask (sub_mask p q); XH -> IsPos (XO p)}; XO p -> case y of { XI q -> succ_double_mask (sub_mask_carry p q); XO q -> double_mask (sub_mask p q); XH -> IsPos (pred_double p)}; XH -> case y of { XH -> IsNul; _ -> IsNeg}} sub_mask_carry :: Positive -> Positive -> Mask sub_mask_carry x y = case x of { XI p -> case y of { XI q -> succ_double_mask (sub_mask_carry p q); XO q -> double_mask (sub_mask p q); XH -> IsPos (pred_double p)}; XO p -> case y of { XI q -> double_mask (sub_mask_carry p q); XO q -> succ_double_mask (sub_mask_carry p q); XH -> double_pred_mask p}; XH -> IsNeg} sub :: Positive -> Positive -> Positive sub x y = case sub_mask x y of { IsPos z -> z; _ -> XH} mul :: Positive -> Positive -> Positive mul x y = case x of { XI p -> add0 y (XO (mul p y)); XO p -> XO (mul p y); XH -> y} size_nat :: Positive -> Nat size_nat p = case p of { XI p0 -> S (size_nat p0); XO p0 -> S (size_nat p0); XH -> S O} compare_cont :: Comparison -> Positive -> Positive -> Comparison compare_cont r x y = case x of { XI p -> case y of { XI q -> compare_cont r p q; XO q -> compare_cont Gt p q; XH -> Gt}; XO p -> case y of { XI q -> compare_cont Lt p q; XO q -> compare_cont r p q; XH -> Gt}; XH -> case y of { XH -> r; _ -> Lt}} compare :: Positive -> Positive -> Comparison compare = compare_cont Eq ggcdn :: Nat -> Positive -> Positive -> (,) Positive ((,) Positive Positive) ggcdn n a b = case n of { O -> (,) XH ((,) a b); S n0 -> case a of { XI a' -> case b of { XI b' -> case compare a' b' of { Eq -> (,) a ((,) XH XH); Lt -> case ggcdn n0 (sub b' a') a of { (,) g p -> case p of { (,) ba aa -> (,) g ((,) aa (add0 aa (XO ba)))}}; Gt -> case ggcdn n0 (sub a' b') b of { (,) g p -> case p of { (,) ab bb -> (,) g ((,) (add0 bb (XO ab)) bb)}}}; XO b0 -> case ggcdn n0 a b0 of { (,) g p -> case p of { (,) aa bb -> (,) g ((,) aa (XO bb))}}; XH -> (,) XH ((,) a XH)}; XO a0 -> case b of { XI _ -> case ggcdn n0 a0 b of { (,) g p -> case p of { (,) aa bb -> (,) g ((,) (XO aa) bb)}}; XO b0 -> case ggcdn n0 a0 b0 of { (,) g p -> (,) (XO g) p}; XH -> (,) XH ((,) a XH)}; XH -> (,) XH ((,) XH b)}} ggcd :: Positive -> Positive -> (,) Positive ((,) Positive Positive) ggcd a b = ggcdn (add (size_nat a) (size_nat b)) a b double :: Z -> Z double x = case x of { Z0 -> Z0; Zpos p -> Zpos (XO p); Zneg p -> Zneg (XO p)} succ_double :: Z -> Z succ_double x = case x of { Z0 -> Zpos XH; Zpos p -> Zpos (XI p); Zneg p -> Zneg (pred_double p)} pred_double0 :: Z -> Z pred_double0 x = case x of { Z0 -> Zneg XH; Zpos p -> Zpos (pred_double p); Zneg p -> Zneg (XI p)} pos_sub :: Positive -> Positive -> Z pos_sub x y = case x of { XI p -> case y of { XI q -> double (pos_sub p q); XO q -> succ_double (pos_sub p q); XH -> Zpos (XO p)}; XO p -> case y of { XI q -> pred_double0 (pos_sub p q); XO q -> double (pos_sub p q); XH -> Zpos (pred_double p)}; XH -> case y of { XI q -> Zneg (XO q); XO q -> Zneg (pred_double q); XH -> Z0}} add1 :: Z -> Z -> Z add1 x y = case x of { Z0 -> y; Zpos x' -> case y of { Z0 -> x; Zpos y' -> Zpos (add0 x' y'); Zneg y' -> pos_sub x' y'}; Zneg x' -> case y of { Z0 -> x; Zpos y' -> pos_sub y' x'; Zneg y' -> Zneg (add0 x' y')}} opp :: Z -> Z opp x = case x of { Z0 -> Z0; Zpos x0 -> Zneg x0; Zneg x0 -> Zpos x0} mul0 :: Z -> Z -> Z mul0 x y = case x of { Z0 -> Z0; Zpos x' -> case y of { Z0 -> Z0; Zpos y' -> Zpos (mul x' y'); Zneg y' -> Zneg (mul x' y')}; Zneg x' -> case y of { Z0 -> Z0; Zpos y' -> Zneg (mul x' y'); Zneg y' -> Zpos (mul x' y')}} compare0 :: Z -> Z -> Comparison compare0 x y = case x of { Z0 -> case y of { Z0 -> Eq; Zpos _ -> Lt; Zneg _ -> Gt}; Zpos x' -> case y of { Zpos y' -> compare x' y'; _ -> Gt}; Zneg x' -> case y of { Zneg y' -> compOpp (compare x' y'); _ -> Lt}} sgn :: Z -> Z sgn z = case z of { Z0 -> Z0; Zpos _ -> Zpos XH; Zneg _ -> Zneg XH} leb :: Z -> Z -> Prelude.Bool leb x y = case compare0 x y of { Gt -> Prelude.False; _ -> Prelude.True} abs :: Z -> Z abs z = case z of { Zneg p -> Zpos p; x -> x} to_pos :: Z -> Positive to_pos z = case z of { Zpos p -> p; _ -> XH} ggcd0 :: Z -> Z -> (,) Z ((,) Z Z) ggcd0 a b = case a of { Z0 -> (,) (abs b) ((,) Z0 (sgn b)); Zpos a0 -> case b of { Z0 -> (,) (abs a) ((,) (sgn a) Z0); Zpos b0 -> case ggcd a0 b0 of { (,) g p -> case p of { (,) aa bb -> (,) (Zpos g) ((,) (Zpos aa) (Zpos bb))}}; Zneg b0 -> case ggcd a0 b0 of { (,) g p -> case p of { (,) aa bb -> (,) (Zpos g) ((,) (Zpos aa) (Zneg bb))}}}; Zneg a0 -> case b of { Z0 -> (,) (abs a) ((,) (sgn a) Z0); Zpos b0 -> case ggcd a0 b0 of { (,) g p -> case p of { (,) aa bb -> (,) (Zpos g) ((,) (Zneg aa) (Zpos bb))}}; Zneg b0 -> case ggcd a0 b0 of { (,) g p -> case p of { (,) aa bb -> (,) (Zpos g) ((,) (Zneg aa) (Zneg bb))}}}} zeq_bool :: Z -> Z -> Prelude.Bool zeq_bool x y = case compare0 x y of { Eq -> Prelude.True; _ -> Prelude.False} data Q = Qmake Z Positive qnum :: Q -> Z qnum q = case q of { Qmake qnum0 _ -> qnum0} qden :: Q -> Positive qden q = case q of { Qmake _ qden0 -> qden0} qeq_bool :: Q -> Q -> Prelude.Bool qeq_bool x y = zeq_bool (mul0 (qnum x) (Zpos (qden y))) (mul0 (qnum y) (Zpos (qden x))) qle_bool :: Q -> Q -> Prelude.Bool qle_bool x y = leb (mul0 (qnum x) (Zpos (qden y))) (mul0 (qnum y) (Zpos (qden x))) qplus :: Q -> Q -> Q qplus x y = Qmake (add1 (mul0 (qnum x) (Zpos (qden y))) (mul0 (qnum y) (Zpos (qden x)))) (mul (qden x) (qden y)) qmult :: Q -> Q -> Q qmult x y = Qmake (mul0 (qnum x) (qnum y)) (mul (qden x) (qden y)) qred :: Q -> Q qred q = case q of { Qmake q1 q2 -> case snd (ggcd0 q1 (Zpos q2)) of { (,) r1 r2 -> Qmake r1 (to_pos r2)}} map :: (a1 -> a2) -> Nat -> (([]) a1) -> ([]) a2 map = (\g _ l -> Prelude.map g l) map2 :: (a1 -> a2 -> a3) -> Nat -> (([]) a1) -> (([]) a2) -> ([]) a3 map2 = (\g _ l1 l2 -> Prelude.map (\(x,y) -> g x y) (Prelude.zip l1 l2)) fold_left :: (a2 -> a1 -> a2) -> a2 -> Nat -> (([]) a1) -> a2 fold_left = (\g a _ l -> Prelude.foldl g a l) qplus0 :: Q -> Q -> Q qplus0 a b = qred (qplus a b) qmult0 :: Q -> Q -> Q qmult0 a b = qred (qmult a b) type Qvec = ([]) Q qvec_plus :: Nat -> Qvec -> Qvec -> ([]) Q qvec_plus n v1 v2 = map2 qplus0 n v1 v2 qvec_dot :: Nat -> Qvec -> Qvec -> Q qvec_dot n v1 v2 = fold_left qplus0 (Qmake Z0 XH) n (map2 qmult0 n v1 v2) class0 :: Q -> Prelude.Bool class0 i = qle_bool (Qmake Z0 XH) i correct_class :: Q -> Prelude.Bool -> Prelude.Bool correct_class i l = andb (eqb l (class0 i)) (negb (qeq_bool i (Qmake Z0 XH))) qvec_mult_class :: Nat -> Prelude.Bool -> Qvec -> Qvec qvec_mult_class n l f = case l of { Prelude.True -> f; Prelude.False -> map (qmult0 (Qmake (opp (Zpos XH)) XH)) n f} consb :: Nat -> Qvec -> ([]) Q consb n v = (\a _ v -> a : v) (Qmake (Zpos XH) XH) n v inner_perceptron :: Nat -> (([]) ((,) Qvec Prelude.Bool)) -> Qvec -> Option Qvec inner_perceptron n t w = case t of { [] -> None; (:) p t' -> case p of { (,) f l -> case correct_class (qvec_dot (S n) w (consb n f)) l of { Prelude.True -> inner_perceptron n t' w; Prelude.False -> case inner_perceptron n t' (qvec_plus (S n) w (qvec_mult_class (S n) l (consb n f))) of { Some w' -> Some w'; None -> Some (qvec_plus (S n) w (qvec_mult_class (S n) l (consb n f)))}}}} perceptron :: Nat -> Nat -> (([]) ((,) Qvec Prelude.Bool)) -> Qvec -> Option Qvec perceptron n e t w = case e of { O -> None; S e' -> case inner_perceptron n t w of { Some w' -> perceptron n e' t w'; None -> Some w}} inner_perceptron_MCE :: Nat -> (([]) ((,) Qvec Prelude.Bool)) -> Qvec -> Option ((,) (([]) ((,) Qvec Prelude.Bool)) Qvec) inner_perceptron_MCE n t w = case t of { [] -> None; (:) p t' -> case p of { (,) f l -> case correct_class (qvec_dot (S n) w (consb n f)) l of { Prelude.True -> inner_perceptron_MCE n t' w; Prelude.False -> case inner_perceptron_MCE n t' (qvec_plus (S n) w (qvec_mult_class (S n) l (consb n f))) of { Some p0 -> case p0 of { (,) l0 w' -> Some ((,) ((:) ((,) f l) l0) w')}; None -> Some ((,) ((:) ((,) f l) []) (qvec_plus (S n) w (qvec_mult_class (S n) l (consb n f))))}}}} gas :: (Nat -> a1) -> a1 gas = (\f -> let infiniteGas = S infiniteGas in f infiniteGas) fueled_perceptron :: Nat -> Nat -> (([]) ((,) Qvec Prelude.Bool)) -> Qvec -> Option Qvec fueled_perceptron n _ t w = gas (\fuel -> perceptron n fuel t w)
tm507211/CoqPerceptron
Benchmarks/hs/Perceptron.hs
bsd-3-clause
11,938
22
29
4,121
6,408
3,338
3,070
477
10
module Data.Genetics.Core where import Control.Monad import Control.Monad.Random import Data.Function import Data.Ord import qualified Data.List as L import Data.Genetics.Class type Population a = [a] data EvOptions a = EvOptions{ ePopSize :: Int , eMaxGen :: Int , eMutaRate :: Double , eElites :: Double , eTarget :: Double , eGMC :: GMCOptions a } randomPopulation :: Evolvable a => EvOptions a -> IO (Population a) randomPopulation EvOptions{..} = replicateM ePopSize $ generate eGMC evolve :: Evolvable a => EvOptions a -> EvData a -> IO (Population a) evolve opts datum = do pop <- randomPopulation opts evolve' 0 opts datum pop evolve' :: Evolvable a => Int -> EvOptions a -> EvData a -> Population a -> IO (Population a) evolve' n evo@(EvOptions{..}) datum pop = if n >= eMaxGen then return pop else do next <- oneStep evo datum pop if snd (getBestFit datum next) >= eTarget then return next else evolve' (n+1) evo datum pop stepEvolution :: Evolvable a => Int -> EvOptions a -> EvData a -> Population a -> IO [(a, Double)] stepEvolution _ _ _ [] = return [] stepEvolution 0 _ datum pop = return $ map (\(a,f) -> if(isNaN f) then (a,0) else (a,f)) $ indiFit datum <$> pop stepEvolution n evo datum pop = do next <- oneStep evo datum pop stepEvolution (n-1) evo datum next oneStep :: Evolvable a => EvOptions a -> EvData a -> Population a -> IO (Population a) oneStep _ _ [] = return [] oneStep EvOptions{..} datum pop = if(length matingPool < 2) then return [] else do kids <- replicateM n $ do p1 <- fromList matingPool p2 <- fromList matingPool (c,d) <- crossover eGMC (p1,p2) c' <- mayMutate c d' <- mayMutate d sequence [c',d'] return $ fst $ L.splitAt ePopSize $ (++) elites $ concat kids where n = ePopSize `div` 2 + ePopSize `mod` 2 e = (*) eElites $ fromIntegral $ length matingPool matingPool = foldl (\acc i -> let f = fitness datum i in if(isNaN f) then (i, toRational 0):acc else (i, toRational f):acc) [] pop elites = fst $ L.splitAt (ceiling e) $ map fst $ L.sortBy (compare `on` (Down . snd)) $ matingPool mr = toRational eMutaRate mayMutate e = fromList [(mutate eGMC e, mr), (return e, 1 - mr)] indiFit :: Evolvable a => EvData a -> a -> (a,Double) indiFit datum indi = (indi, fitness datum indi) getBestFit :: Evolvable a => EvData a -> Population a -> (a,Double) getBestFit datum pop = head $ L.sortBy (compare `on` snd) $ indiFit datum <$> pop
Teaspot-Studio/genmus
src/Data/Genetics/Core.hs
bsd-3-clause
2,545
0
15
601
1,129
576
553
-1
-1
module GameInfo where import Control.DeepSeq import Data.Map (Map) data GameInfo = GameInfo { name :: String, state :: GameState, turn :: Int, timeToHost :: Int, era :: Maybe Era, nations :: [Nation], mods :: [ModInfo] } deriving (Eq, Read, Show) instance NFData GameInfo where rnf game = name game `seq` state game `seq` turn game `seq` timeToHost game `seq` era game `seq` nations game `seq` mods game `seq` () data GameState = Waiting | Running deriving(Eq, Read, Show) data Era = Early | Middle | Late deriving (Eq, Read, Show) data Nation = Nation { nationId :: !Int, player :: !Player, submitted :: !Bool, connected :: !Bool } deriving (Eq, Read, Show) data Player = Empty | Human | AI | Closed | DefeatedThisTurn | DefeatedEarlier deriving (Eq, Read, Show) data ModInfo = ModInfo { modName :: !String, modMajorVersion :: !Int, modMinorVersion :: !Int } deriving (Eq, Show, Read) nationName :: Int -> String nationName 0 = "EA Arcoscephale" nationName 1 = "EA Ermor" nationName 2 = "EA Ulm" nationName 3 = "EA Marverni" nationName 4 = "EA Sauromatia" nationName 5 = "EA T'ien Ch'i" nationName 6 = "EA Machaka" nationName 7 = "EA Mictlan" nationName 8 = "EA Abysia" nationName 9 = "EA Caelum" nationName 10 = "EA C'tis" nationName 11 = "EA Pangaea" nationName 12 = "EA Agartha" nationName 13 = "EA Tir na n'Og" nationName 14 = "EA Fomoria" nationName 15 = "EA Vanheim" nationName 16 = "EA Helheim" nationName 17 = "EA Niefelheim" nationName 18 = "EA Kailasa" nationName 19 = "EA Yomi" nationName 20 = "EA Hinnom" nationName 21 = "EA Atlantis" nationName 22 = "EA R'lyeh" nationName 26 = "EA Oceania" nationName 68 = "EA Lanka" nationName 27 = "MA Arcoscephale" nationName 28 = "MA Ermor" nationName 29 = "MA Pythium" nationName 30 = "MA Man" nationName 31 = "MA Ulm" nationName 32 = "MA Marignon" nationName 33 = "MA Mictlan" nationName 34 = "MA T'ien Ch'i" nationName 35 = "MA Machaka" nationName 36 = "MA Agartha" nationName 37 = "MA Abysia" nationName 38 = "MA Caelum" nationName 39 = "MA C'tis" nationName 40 = "MA Pangaea" nationName 41 = "MA Vanheim" nationName 42 = "MA Jotunheim" nationName 43 = "MA Bandar Log" nationName 44 = "MA Shinuyama" nationName 45 = "MA Ashdod" nationName 46 = "MA Atlantis" nationName 47 = "MA R'lyeh" nationName 48 = "MA Oceania" nationName 69 = "MA Eriu" nationName 49 = "LA Arcoscephale" nationName 50 = "LA Ermor" nationName 51 = "LA Man" nationName 52 = "LA Ulm" nationName 53 = "LA Marignon" nationName 54 = "LA Mictlan" nationName 55 = "LA T'ien Ch'i" nationName 56 = "LA Jomon" nationName 57 = "LA Agartha" nationName 58 = "LA Abysia" nationName 59 = "LA Caelum" nationName 60 = "LA C'tis" nationName 61 = "LA Pangaea" nationName 62 = "LA Midgård" nationName 63 = "LA Utgård" nationName 64 = "LA Patala" nationName 65 = "LA Gath" nationName 66 = "LA Atlantis" nationName 67 = "LA R'lyeh" nationName 70 = "LA Pythium" nationName 71 = "LA Bogarus" nationName 23 = "AI Special monsters 1" nationName 24 = "AI Special monsters 2" nationName 25 = "AI Independents" nationName n = "Nation " ++ show n
Ornedan/dom3statusbot
GameInfo.hs
bsd-3-clause
3,679
0
13
1,166
962
508
454
130
1
{-# LANGUAGE CPP #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.Freeze -- Copyright : (c) David Himmelstrup 2005 -- Duncan Coutts 2011 -- License : BSD-like -- -- Maintainer : [email protected] -- Stability : provisional -- Portability : portable -- -- The cabal freeze command ----------------------------------------------------------------------------- module Distribution.Client.Freeze ( freeze, getFreezePkgs ) where import Distribution.Client.Config ( SavedConfig(..) ) import Distribution.Client.Types import Distribution.Client.Targets import Distribution.Client.Dependency import Distribution.Client.IndexUtils as IndexUtils ( getSourcePackages, getInstalledPackages ) import Distribution.Client.SolverInstallPlan ( SolverInstallPlan, SolverPlanPackage ) import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan import Distribution.Client.Setup ( GlobalFlags(..), FreezeFlags(..), ConfigExFlags(..) , RepoContext(..) ) import Distribution.Client.Sandbox.PackageEnvironment ( loadUserConfig, pkgEnvSavedConfig, showPackageEnvironment, userPackageEnvironmentFile ) import Distribution.Client.Sandbox.Types ( SandboxPackageInfo(..) ) import Distribution.Solver.Types.ConstraintSource import Distribution.Solver.Types.LabeledPackageConstraint import Distribution.Solver.Types.OptionalStanza import Distribution.Solver.Types.PkgConfigDb import Distribution.Solver.Types.SolverId import Distribution.Package ( Package, packageId, packageName, packageVersion ) import Distribution.Simple.Compiler ( Compiler, compilerInfo, PackageDBStack ) import Distribution.Simple.PackageIndex (InstalledPackageIndex) import Distribution.Simple.Program ( ProgramDb ) import Distribution.Simple.Setup ( fromFlag, fromFlagOrDefault, flagToMaybe ) import Distribution.Simple.Utils ( die, notice, debug, writeFileAtomic ) import Distribution.System ( Platform ) import Distribution.Text ( display ) import Distribution.Verbosity ( Verbosity ) import Control.Monad ( when ) import qualified Data.ByteString.Lazy.Char8 as BS.Char8 #if !MIN_VERSION_base(4,8,0) import Data.Monoid ( mempty ) #endif import Data.Version ( showVersion ) import Distribution.Version ( thisVersion ) -- ------------------------------------------------------------ -- * The freeze command -- ------------------------------------------------------------ -- | Freeze all of the dependencies by writing a constraints section -- constraining each dependency to an exact version. -- freeze :: Verbosity -> PackageDBStack -> RepoContext -> Compiler -> Platform -> ProgramDb -> Maybe SandboxPackageInfo -> GlobalFlags -> FreezeFlags -> IO () freeze verbosity packageDBs repoCtxt comp platform progdb mSandboxPkgInfo globalFlags freezeFlags = do pkgs <- getFreezePkgs verbosity packageDBs repoCtxt comp platform progdb mSandboxPkgInfo globalFlags freezeFlags if null pkgs then notice verbosity $ "No packages to be frozen. " ++ "As this package has no dependencies." else if dryRun then notice verbosity $ unlines $ "The following packages would be frozen:" : formatPkgs pkgs else freezePackages verbosity globalFlags pkgs where dryRun = fromFlag (freezeDryRun freezeFlags) -- | Get the list of packages whose versions would be frozen by the @freeze@ -- command. getFreezePkgs :: Verbosity -> PackageDBStack -> RepoContext -> Compiler -> Platform -> ProgramDb -> Maybe SandboxPackageInfo -> GlobalFlags -> FreezeFlags -> IO [SolverPlanPackage] getFreezePkgs verbosity packageDBs repoCtxt comp platform progdb mSandboxPkgInfo globalFlags freezeFlags = do installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb sourcePkgDb <- getSourcePackages verbosity repoCtxt pkgConfigDb <- readPkgConfigDb verbosity progdb pkgSpecifiers <- resolveUserTargets verbosity repoCtxt (fromFlag $ globalWorldFile globalFlags) (packageIndex sourcePkgDb) [UserTargetLocalDir "."] sanityCheck pkgSpecifiers planPackages verbosity comp platform mSandboxPkgInfo freezeFlags installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers where sanityCheck pkgSpecifiers = do when (not . null $ [n | n@(NamedPackage _ _) <- pkgSpecifiers]) $ die $ "internal error: 'resolveUserTargets' returned " ++ "unexpected named package specifiers!" when (length pkgSpecifiers /= 1) $ die $ "internal error: 'resolveUserTargets' returned " ++ "unexpected source package specifiers!" planPackages :: Verbosity -> Compiler -> Platform -> Maybe SandboxPackageInfo -> FreezeFlags -> InstalledPackageIndex -> SourcePackageDb -> PkgConfigDb -> [PackageSpecifier UnresolvedSourcePackage] -> IO [SolverPlanPackage] planPackages verbosity comp platform mSandboxPkgInfo freezeFlags installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers = do solver <- chooseSolver verbosity (fromFlag (freezeSolver freezeFlags)) (compilerInfo comp) notice verbosity "Resolving dependencies..." installPlan <- foldProgress logMsg die return $ resolveDependencies platform (compilerInfo comp) pkgConfigDb solver resolverParams return $ pruneInstallPlan installPlan pkgSpecifiers where resolverParams = setMaxBackjumps (if maxBackjumps < 0 then Nothing else Just maxBackjumps) . setIndependentGoals independentGoals . setReorderGoals reorderGoals . setCountConflicts countConflicts . setShadowPkgs shadowPkgs . setStrongFlags strongFlags . addConstraints [ let pkg = pkgSpecifierTarget pkgSpecifier pc = PackageConstraintStanzas pkg stanzas in LabeledPackageConstraint pc ConstraintSourceFreeze | pkgSpecifier <- pkgSpecifiers ] . maybe id applySandboxInstallPolicy mSandboxPkgInfo $ standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers logMsg message rest = debug verbosity message >> rest stanzas = [ TestStanzas | testsEnabled ] ++ [ BenchStanzas | benchmarksEnabled ] testsEnabled = fromFlagOrDefault False $ freezeTests freezeFlags benchmarksEnabled = fromFlagOrDefault False $ freezeBenchmarks freezeFlags reorderGoals = fromFlag (freezeReorderGoals freezeFlags) countConflicts = fromFlag (freezeCountConflicts freezeFlags) independentGoals = fromFlag (freezeIndependentGoals freezeFlags) shadowPkgs = fromFlag (freezeShadowPkgs freezeFlags) strongFlags = fromFlag (freezeStrongFlags freezeFlags) maxBackjumps = fromFlag (freezeMaxBackjumps freezeFlags) -- | Remove all unneeded packages from an install plan. -- -- A package is unneeded if it is either -- -- 1) the package that we are freezing, or -- -- 2) not a dependency (directly or transitively) of the package we are -- freezing. This is useful for removing previously installed packages -- which are no longer required from the install plan. -- -- Invariant: @pkgSpecifiers@ must refer to packages which are not -- 'PreExisting' in the 'SolverInstallPlan'. pruneInstallPlan :: SolverInstallPlan -> [PackageSpecifier UnresolvedSourcePackage] -> [SolverPlanPackage] pruneInstallPlan installPlan pkgSpecifiers = removeSelf pkgIds $ SolverInstallPlan.dependencyClosure installPlan pkgIds where pkgIds = [ PlannedId (packageId pkg) | SpecificSourcePackage pkg <- pkgSpecifiers ] removeSelf [thisPkg] = filter (\pp -> packageId pp /= packageId thisPkg) removeSelf _ = error $ "internal error: 'pruneInstallPlan' given " ++ "unexpected package specifiers!" freezePackages :: Package pkg => Verbosity -> GlobalFlags -> [pkg] -> IO () freezePackages verbosity globalFlags pkgs = do pkgEnv <- fmap (createPkgEnv . addFrozenConstraints) $ loadUserConfig verbosity "" (flagToMaybe . globalConstraintsFile $ globalFlags) writeFileAtomic userPackageEnvironmentFile $ showPkgEnv pkgEnv where addFrozenConstraints config = config { savedConfigureExFlags = (savedConfigureExFlags config) { configExConstraints = map constraint pkgs } } constraint pkg = (pkgIdToConstraint $ packageId pkg, ConstraintSourceUserConfig userPackageEnvironmentFile) where pkgIdToConstraint pkgId = UserConstraintVersion (packageName pkgId) (thisVersion $ packageVersion pkgId) createPkgEnv config = mempty { pkgEnvSavedConfig = config } showPkgEnv = BS.Char8.pack . showPackageEnvironment formatPkgs :: Package pkg => [pkg] -> [String] formatPkgs = map $ showPkg . packageId where showPkg pid = name pid ++ " == " ++ version pid name = display . packageName version = showVersion . packageVersion
sopvop/cabal
cabal-install/Distribution/Client/Freeze.hs
bsd-3-clause
9,795
0
20
2,491
1,724
925
799
184
3
{-# LANGUAGE CPP, ConstraintKinds, DeriveDataTypeable, FlexibleContexts, MultiWayIf, NamedFieldPuns, OverloadedStrings, PackageImports, RankNTypes, RecordWildCards, ScopedTypeVariables, TemplateHaskell, TupleSections #-} -- | Run commands in Docker containers module Stack.Docker (cleanup ,CleanupOpts(..) ,CleanupAction(..) ,dockerCleanupCmdName ,dockerCmdName ,dockerHelpOptName ,dockerPullCmdName ,entrypoint ,preventInContainer ,pull ,reexecWithOptionalContainer ,reset ,reExecArgName ,StackDockerException(..) ) where import Control.Applicative import Control.Concurrent.MVar.Lifted (MVar,modifyMVar_,newMVar) import Control.Exception.Lifted import Control.Monad import Control.Monad.Catch (MonadThrow,throwM,MonadCatch) import Control.Monad.IO.Class (MonadIO,liftIO) import Control.Monad.Logger (MonadLogger,logError,logInfo,logWarn) import Control.Monad.Reader (MonadReader,runReaderT) import Control.Monad.Trans.Control (MonadBaseControl) import Control.Monad.Writer (execWriter,runWriter,tell) import qualified Crypto.Hash as Hash (Digest, MD5, hash) import Data.Aeson.Extended (FromJSON(..),(.:),(.:?),(.!=),eitherDecode) import Data.ByteString.Builder (stringUtf8,charUtf8,toLazyByteString) import qualified Data.ByteString.Char8 as BS import qualified Data.ByteString.Lazy.Char8 as LBS import Data.Char (isSpace,toUpper,isAscii,isDigit) import Data.Conduit.List (sinkNull) import Data.List (dropWhileEnd,intercalate,isPrefixOf,isInfixOf,foldl') import Data.List.Extra (trim, nubOrd) import Data.Map.Strict (Map) import qualified Data.Map.Strict as Map import Data.Maybe import Data.Ord (Down(..)) import Data.Streaming.Process (ProcessExitedUnsuccessfully(..)) import Data.Text (Text) import qualified Data.Text as T import qualified Data.Text.Encoding as T import Data.Time (UTCTime,LocalTime(..),diffDays,utcToLocalTime,getZonedTime,ZonedTime(..)) import Data.Version (showVersion) import GHC.Exts (sortWith) import Path import Path.Extra (toFilePathNoTrailingSep) import Path.IO hiding (canonicalizePath) import qualified Paths_stack as Meta import Prelude -- Fix redundant import warnings import Stack.Config (getInContainer) import Stack.Constants import Stack.Docker.GlobalDB import Stack.Types.PackageIndex import Stack.Types.Version import Stack.Types.Config import Stack.Types.Docker import Stack.Types.Internal import Stack.Types.StackT import Stack.Setup (ensureDockerStackExe) import System.Directory (canonicalizePath,getHomeDirectory) import System.Environment (getEnv,getEnvironment,getProgName,getArgs,getExecutablePath) import System.Exit (exitSuccess, exitWith, ExitCode(..)) import qualified System.FilePath as FP import System.IO (stderr,stdin,stdout,hIsTerminalDevice, hClose) import System.IO.Error (isDoesNotExistError) import System.IO.Unsafe (unsafePerformIO) import qualified System.PosixCompat.User as User import qualified System.PosixCompat.Files as Files import System.Process (CreateProcess(..), StdStream(..), waitForProcess) import System.Process.PagerEditor (editByteString) import System.Process.Read import System.Process.Run import Text.Printf (printf) #ifndef WINDOWS import Control.Concurrent (threadDelay) import qualified Control.Monad.Trans.Control as Control import System.Posix.Signals import qualified System.Posix.User as PosixUser #endif -- | If Docker is enabled, re-runs the currently running OS command in a Docker container. -- Otherwise, runs the inner action. -- -- This takes an optional release action which should be taken IFF control is -- transfering away from the current process to the intra-container one. The main use -- for this is releasing a lock. After launching reexecution, the host process becomes -- nothing but an manager for the call into docker and thus may not hold the lock. reexecWithOptionalContainer :: (StackM env m, HasConfig env) => Maybe (Path Abs Dir) -> Maybe (m ()) -> IO () -> Maybe (m ()) -> Maybe (m ()) -> m () reexecWithOptionalContainer mprojectRoot = execWithOptionalContainer mprojectRoot getCmdArgs where getCmdArgs docker envOverride imageInfo isRemoteDocker = do config <- view configL deUser <- if fromMaybe (not isRemoteDocker) (dockerSetUser docker) then liftIO $ do duUid <- User.getEffectiveUserID duGid <- User.getEffectiveGroupID duGroups <- nubOrd <$> User.getGroups duUmask <- Files.setFileCreationMask 0o022 -- Only way to get old umask seems to be to change it, so set it back afterward _ <- Files.setFileCreationMask duUmask return (Just DockerUser{..}) else return Nothing args <- fmap (["--" ++ reExecArgName ++ "=" ++ showVersion Meta.version ,"--" ++ dockerEntrypointArgName ,show DockerEntrypoint{..}] ++) (liftIO getArgs) case dockerStackExe (configDocker config) of Just DockerStackExeHost | configPlatform config == dockerContainerPlatform -> do exePath <- liftIO getExecutablePath cmdArgs args exePath | otherwise -> throwM UnsupportedStackExeHostPlatformException Just DockerStackExeImage -> do progName <- liftIO getProgName return (FP.takeBaseName progName, args, [], []) Just (DockerStackExePath path) -> do exePath <- liftIO $ canonicalizePath (toFilePath path) cmdArgs args exePath Just DockerStackExeDownload -> exeDownload args Nothing | configPlatform config == dockerContainerPlatform -> do (exePath,exeTimestamp,misCompatible) <- liftIO $ do exePath <- liftIO getExecutablePath exeTimestamp <- resolveFile' exePath >>= getModificationTime isKnown <- liftIO $ getDockerImageExe config (iiId imageInfo) exePath exeTimestamp return (exePath, exeTimestamp, isKnown) case misCompatible of Just True -> cmdArgs args exePath Just False -> exeDownload args Nothing -> do e <- try $ sinkProcessStderrStdout Nothing envOverride "docker" [ "run" , "-v" , exePath ++ ":" ++ "/tmp/stack" , iiId imageInfo , "/tmp/stack" , "--version"] sinkNull sinkNull let compatible = case e of Left (ProcessExitedUnsuccessfully _ _) -> False Right _ -> True liftIO $ setDockerImageExe config (iiId imageInfo) exePath exeTimestamp compatible if compatible then cmdArgs args exePath else exeDownload args Nothing -> exeDownload args exeDownload args = do exePath <- ensureDockerStackExe dockerContainerPlatform cmdArgs args (toFilePath exePath) cmdArgs args exePath = do let mountPath = hostBinDir FP.</> FP.takeBaseName exePath return (mountPath, args, [], [Mount exePath mountPath]) -- | If Docker is enabled, re-runs the OS command returned by the second argument in a -- Docker container. Otherwise, runs the inner action. -- -- This takes an optional release action just like `reexecWithOptionalContainer`. execWithOptionalContainer :: (StackM env m, HasConfig env) => Maybe (Path Abs Dir) -> GetCmdArgs env m -> Maybe (m ()) -> IO () -> Maybe (m ()) -> Maybe (m ()) -> m () execWithOptionalContainer mprojectRoot getCmdArgs mbefore inner mafter mrelease = do config <- view configL inContainer <- getInContainer isReExec <- view reExecL if | inContainer && not isReExec && (isJust mbefore || isJust mafter) -> throwM OnlyOnHostException | inContainer -> liftIO (do inner exitSuccess) | not (dockerEnable (configDocker config)) -> do fromMaybeAction mbefore liftIO inner fromMaybeAction mafter liftIO exitSuccess | otherwise -> do fromMaybeAction mrelease runContainerAndExit getCmdArgs mprojectRoot (fromMaybeAction mbefore) (fromMaybeAction mafter) where fromMaybeAction Nothing = return () fromMaybeAction (Just hook) = hook -- | Error if running in a container. preventInContainer :: (MonadIO m,MonadThrow m) => m () -> m () preventInContainer inner = do inContainer <- getInContainer if inContainer then throwM OnlyOnHostException else inner -- | Run a command in a new Docker container, then exit the process. runContainerAndExit :: (StackM env m, HasConfig env) => GetCmdArgs env m -> Maybe (Path Abs Dir) -- ^ Project root (maybe) -> m () -- ^ Action to run before -> m () -- ^ Action to run after -> m () runContainerAndExit getCmdArgs mprojectRoot before after = do config <- view configL let docker = configDocker config envOverride <- getEnvOverride (configPlatform config) checkDockerVersion envOverride docker (env,isStdinTerminal,isStderrTerminal,homeDir) <- liftIO $ (,,,) <$> getEnvironment <*> hIsTerminalDevice stdin <*> hIsTerminalDevice stderr <*> (parseAbsDir =<< getHomeDirectory) isStdoutTerminal <- view terminalL let dockerHost = lookup "DOCKER_HOST" env dockerCertPath = lookup "DOCKER_CERT_PATH" env bamboo = lookup "bamboo_buildKey" env jenkins = lookup "JENKINS_HOME" env msshAuthSock = lookup "SSH_AUTH_SOCK" env muserEnv = lookup "USER" env isRemoteDocker = maybe False (isPrefixOf "tcp://") dockerHost image = dockerImage docker when (isRemoteDocker && maybe False (isInfixOf "boot2docker") dockerCertPath) ($logWarn "Warning: Using boot2docker is NOT supported, and not likely to perform well.") maybeImageInfo <- inspect envOverride image imageInfo@Inspect{..} <- case maybeImageInfo of Just ii -> return ii Nothing | dockerAutoPull docker -> do pullImage envOverride docker image mii2 <- inspect envOverride image case mii2 of Just ii2 -> return ii2 Nothing -> throwM (InspectFailedException image) | otherwise -> throwM (NotPulledException image) sandboxDir <- projectDockerSandboxDir projectRoot let ImageConfig {..} = iiConfig imageEnvVars = map (break (== '=')) icEnv platformVariant = show $ hashRepoName image stackRoot = configStackRoot config sandboxHomeDir = sandboxDir </> homeDirName isTerm = not (dockerDetach docker) && isStdinTerminal && isStdoutTerminal && isStderrTerminal keepStdinOpen = not (dockerDetach docker) && -- Workaround for https://github.com/docker/docker/issues/12319 -- This is fixed in Docker 1.9.1, but will leave the workaround -- in place for now, for users who haven't upgraded yet. (isTerm || (isNothing bamboo && isNothing jenkins)) hostBinDirPath <- parseAbsDir hostBinDir newPathEnv <- augmentPath [ hostBinDirPath , sandboxHomeDir </> $(mkRelDir ".local/bin")] (T.pack <$> lookupImageEnv "PATH" imageEnvVars) (cmnd,args,envVars,extraMount) <- getCmdArgs docker envOverride imageInfo isRemoteDocker pwd <- getCurrentDir liftIO (do updateDockerImageLastUsed config iiId (toFilePath projectRoot) mapM_ ensureDir [sandboxHomeDir, stackRoot]) -- Since $HOME is now mounted in the same place in the container we can -- just symlink $HOME/.ssh to the right place for the stack docker user let sshDir = homeDir </> sshRelDir sshDirExists <- doesDirExist sshDir sshSandboxDirExists <- liftIO (Files.fileExist (toFilePathNoTrailingSep (sandboxHomeDir </> sshRelDir))) when (sshDirExists && not sshSandboxDirExists) (liftIO (Files.createSymbolicLink (toFilePathNoTrailingSep sshDir) (toFilePathNoTrailingSep (sandboxHomeDir </> sshRelDir)))) containerID <- (trim . decodeUtf8) <$> readDockerProcess envOverride (Just projectRoot) (concat [["create" ,"--net=host" ,"-e",inContainerEnvVar ++ "=1" ,"-e",stackRootEnvVar ++ "=" ++ toFilePathNoTrailingSep stackRoot ,"-e",platformVariantEnvVar ++ "=dk" ++ platformVariant ,"-e","HOME=" ++ toFilePathNoTrailingSep sandboxHomeDir ,"-e","PATH=" ++ T.unpack newPathEnv ,"-e","PWD=" ++ toFilePathNoTrailingSep pwd ,"-v",toFilePathNoTrailingSep homeDir ++ ":" ++ toFilePathNoTrailingSep homeDir ,"-v",toFilePathNoTrailingSep stackRoot ++ ":" ++ toFilePathNoTrailingSep stackRoot ,"-v",toFilePathNoTrailingSep projectRoot ++ ":" ++ toFilePathNoTrailingSep projectRoot ,"-v",toFilePathNoTrailingSep sandboxHomeDir ++ ":" ++ toFilePathNoTrailingSep sandboxHomeDir ,"-w",toFilePathNoTrailingSep pwd] ,case muserEnv of Nothing -> [] Just userEnv -> ["-e","USER=" ++ userEnv] ,case msshAuthSock of Nothing -> [] Just sshAuthSock -> ["-e","SSH_AUTH_SOCK=" ++ sshAuthSock ,"-v",sshAuthSock ++ ":" ++ sshAuthSock] -- Disable the deprecated entrypoint in FP Complete-generated images ,["--entrypoint=/usr/bin/env" | isJust (lookupImageEnv oldSandboxIdEnvVar imageEnvVars) && (icEntrypoint == ["/usr/local/sbin/docker-entrypoint"] || icEntrypoint == ["/root/entrypoint.sh"])] ,concatMap (\(k,v) -> ["-e", k ++ "=" ++ v]) envVars ,concatMap mountArg (extraMount ++ dockerMount docker) ,concatMap (\nv -> ["-e", nv]) (dockerEnv docker) ,case dockerContainerName docker of Just name -> ["--name=" ++ name] Nothing -> [] ,["-t" | isTerm] ,["-i" | keepStdinOpen] ,dockerRunArgs docker ,[image] ,[cmnd] ,args]) before #ifndef WINDOWS runInBase <- Control.liftBaseWith $ \run -> return (void . run) oldHandlers <- forM [sigINT,sigABRT,sigHUP,sigPIPE,sigTERM,sigUSR1,sigUSR2] $ \sig -> do let sigHandler = runInBase $ do readProcessNull Nothing envOverride "docker" ["kill","--signal=" ++ show sig,containerID] when (sig `elem` [sigTERM,sigABRT]) $ do -- Give the container 30 seconds to exit gracefully, then send a sigKILL to force it liftIO $ threadDelay 30000000 readProcessNull Nothing envOverride "docker" ["kill",containerID] oldHandler <- liftIO $ installHandler sig (Catch sigHandler) Nothing return (sig, oldHandler) #endif let cmd = Cmd Nothing "docker" envOverride (concat [["start"] ,["-a" | not (dockerDetach docker)] ,["-i" | keepStdinOpen] ,[containerID]]) e <- finally (try $ callProcess' (\cp -> cp { delegate_ctlc = False }) cmd) (do unless (dockerPersist docker || dockerDetach docker) $ catch (readProcessNull Nothing envOverride "docker" ["rm","-f",containerID]) (\(_::ReadProcessException) -> return ()) #ifndef WINDOWS forM_ oldHandlers $ \(sig,oldHandler) -> liftIO $ installHandler sig oldHandler Nothing #endif ) case e of Left (ProcessExitedUnsuccessfully _ ec) -> liftIO (exitWith ec) Right () -> do after liftIO exitSuccess where -- This is using a hash of the Docker repository (without tag or digest) to ensure -- binaries/libraries aren't shared between Docker and host (or incompatible Docker images) hashRepoName :: String -> Hash.Digest Hash.MD5 hashRepoName = Hash.hash . BS.pack . takeWhile (\c -> c /= ':' && c /= '@') lookupImageEnv name vars = case lookup name vars of Just ('=':val) -> Just val _ -> Nothing mountArg (Mount host container) = ["-v",host ++ ":" ++ container] projectRoot = fromMaybeProjectRoot mprojectRoot sshRelDir = $(mkRelDir ".ssh/") -- | Clean-up old docker images and containers. cleanup :: (StackM env m, HasConfig env) => CleanupOpts -> m () cleanup opts = do config <- view configL let docker = configDocker config envOverride <- getEnvOverride (configPlatform config) checkDockerVersion envOverride docker let runDocker = readDockerProcess envOverride Nothing imagesOut <- runDocker ["images","--no-trunc","-f","dangling=false"] danglingImagesOut <- runDocker ["images","--no-trunc","-f","dangling=true"] runningContainersOut <- runDocker ["ps","-a","--no-trunc","-f","status=running"] restartingContainersOut <- runDocker ["ps","-a","--no-trunc","-f","status=restarting"] exitedContainersOut <- runDocker ["ps","-a","--no-trunc","-f","status=exited"] pausedContainersOut <- runDocker ["ps","-a","--no-trunc","-f","status=paused"] let imageRepos = parseImagesOut imagesOut danglingImageHashes = Map.keys (parseImagesOut danglingImagesOut) runningContainers = parseContainersOut runningContainersOut ++ parseContainersOut restartingContainersOut stoppedContainers = parseContainersOut exitedContainersOut ++ parseContainersOut pausedContainersOut inspectMap <- inspects envOverride (Map.keys imageRepos ++ danglingImageHashes ++ map fst stoppedContainers ++ map fst runningContainers) (imagesLastUsed,curTime) <- liftIO ((,) <$> getDockerImagesLastUsed config <*> getZonedTime) let planWriter = buildPlan curTime imagesLastUsed imageRepos danglingImageHashes stoppedContainers runningContainers inspectMap plan = toLazyByteString (execWriter planWriter) plan' <- case dcAction opts of CleanupInteractive -> liftIO (editByteString (intercalate "-" [stackProgName ,dockerCmdName ,dockerCleanupCmdName ,"plan"]) plan) CleanupImmediate -> return plan CleanupDryRun -> do liftIO (LBS.hPut stdout plan) return LBS.empty mapM_ (performPlanLine envOverride) (reverse (filter filterPlanLine (lines (LBS.unpack plan')))) allImageHashesOut <- runDocker ["images","-aq","--no-trunc"] liftIO (pruneDockerImagesLastUsed config (lines (decodeUtf8 allImageHashesOut))) where filterPlanLine line = case line of c:_ | isSpace c -> False _ -> True performPlanLine envOverride line = case filter (not . null) (words (takeWhile (/= '#') line)) of [] -> return () (c:_):t:v:_ -> do args <- if | toUpper c == 'R' && t == imageStr -> do $logInfo (concatT ["Removing image: '",v,"'"]) return ["rmi",v] | toUpper c == 'R' && t == containerStr -> do $logInfo (concatT ["Removing container: '",v,"'"]) return ["rm","-f",v] | otherwise -> throwM (InvalidCleanupCommandException line) e <- try (readDockerProcess envOverride Nothing args) case e of Left ex@ProcessFailed{} -> $logError (concatT ["Could not remove: '",v,"': ", show ex]) Left e' -> throwM e' Right _ -> return () _ -> throwM (InvalidCleanupCommandException line) parseImagesOut = Map.fromListWith (++) . map parseImageRepo . drop 1 . lines . decodeUtf8 where parseImageRepo :: String -> (String, [String]) parseImageRepo line = case words line of repo:tag:hash:_ | repo == "<none>" -> (hash,[]) | tag == "<none>" -> (hash,[repo]) | otherwise -> (hash,[repo ++ ":" ++ tag]) _ -> throw (InvalidImagesOutputException line) parseContainersOut = map parseContainer . drop 1 . lines . decodeUtf8 where parseContainer line = case words line of hash:image:rest -> (hash,(image,last rest)) _ -> throw (InvalidPSOutputException line) buildPlan curTime imagesLastUsed imageRepos danglingImageHashes stoppedContainers runningContainers inspectMap = do case dcAction opts of CleanupInteractive -> do buildStrLn (concat ["# STACK DOCKER CLEANUP PLAN" ,"\n#" ,"\n# When you leave the editor, the lines in this plan will be processed." ,"\n#" ,"\n# Lines that begin with 'R' denote an image or container that will be." ,"\n# removed. You may change the first character to/from 'R' to remove/keep" ,"\n# and image or container that would otherwise be kept/removed." ,"\n#" ,"\n# To cancel the cleanup, delete all lines in this file." ,"\n#" ,"\n# By default, the following images/containers will be removed:" ,"\n#"]) buildDefault dcRemoveKnownImagesLastUsedDaysAgo "Known images last used" buildDefault dcRemoveUnknownImagesCreatedDaysAgo "Unknown images created" buildDefault dcRemoveDanglingImagesCreatedDaysAgo "Dangling images created" buildDefault dcRemoveStoppedContainersCreatedDaysAgo "Stopped containers created" buildDefault dcRemoveRunningContainersCreatedDaysAgo "Running containers created" buildStrLn (concat ["#" ,"\n# The default plan can be adjusted using command-line arguments." ,"\n# Run '" ++ unwords [stackProgName, dockerCmdName, dockerCleanupCmdName] ++ " --help' for details." ,"\n#"]) _ -> buildStrLn (unlines ["# Lines that begin with 'R' denote an image or container that will be." ,"# removed."]) buildSection "KNOWN IMAGES (pulled/used by stack)" imagesLastUsed buildKnownImage buildSection "UNKNOWN IMAGES (not managed by stack)" (sortCreated (Map.toList (foldl' (\m (h,_) -> Map.delete h m) imageRepos imagesLastUsed))) buildUnknownImage buildSection "DANGLING IMAGES (no named references and not depended on by other images)" (sortCreated (map (,()) danglingImageHashes)) buildDanglingImage buildSection "STOPPED CONTAINERS" (sortCreated stoppedContainers) (buildContainer (dcRemoveStoppedContainersCreatedDaysAgo opts)) buildSection "RUNNING CONTAINERS" (sortCreated runningContainers) (buildContainer (dcRemoveRunningContainersCreatedDaysAgo opts)) where buildDefault accessor description = case accessor opts of Just days -> buildStrLn ("# - " ++ description ++ " at least " ++ showDays days ++ ".") Nothing -> return () sortCreated = sortWith (\(_,_,x) -> Down x) . mapMaybe (\(h,r) -> case Map.lookup h inspectMap of Nothing -> Nothing Just ii -> Just (h,r,iiCreated ii)) buildSection sectionHead items itemBuilder = do let (anyWrote,b) = runWriter (forM items itemBuilder) when (or anyWrote) $ do buildSectionHead sectionHead tell b buildKnownImage (imageHash,lastUsedProjects) = case Map.lookup imageHash imageRepos of Just repos@(_:_) -> do case lastUsedProjects of (l,_):_ -> forM_ repos (buildImageTime (dcRemoveKnownImagesLastUsedDaysAgo opts) l) _ -> forM_ repos buildKeepImage forM_ lastUsedProjects buildProject buildInspect imageHash return True _ -> return False buildUnknownImage (hash, repos, created) = case repos of [] -> return False _ -> do forM_ repos (buildImageTime (dcRemoveUnknownImagesCreatedDaysAgo opts) created) buildInspect hash return True buildDanglingImage (hash, (), created) = do buildImageTime (dcRemoveDanglingImagesCreatedDaysAgo opts) created hash buildInspect hash return True buildContainer removeAge (hash,(image,name),created) = do let disp = name ++ " (image: " ++ image ++ ")" buildTime containerStr removeAge created disp buildInspect hash return True buildProject (lastUsedTime, projectPath) = buildInfo ("Last used " ++ showDaysAgo lastUsedTime ++ " in " ++ projectPath) buildInspect hash = case Map.lookup hash inspectMap of Just Inspect{iiCreated,iiVirtualSize} -> buildInfo ("Created " ++ showDaysAgo iiCreated ++ maybe "" (\s -> " (size: " ++ printf "%g" (fromIntegral s / 1024.0 / 1024.0 :: Float) ++ "M)") iiVirtualSize) Nothing -> return () showDays days = case days of 0 -> "today" 1 -> "yesterday" n -> show n ++ " days ago" showDaysAgo oldTime = showDays (daysAgo oldTime) daysAgo oldTime = let ZonedTime (LocalTime today _) zone = curTime LocalTime oldDay _ = utcToLocalTime zone oldTime in diffDays today oldDay buildImageTime = buildTime imageStr buildTime t removeAge time disp = case removeAge of Just d | daysAgo time >= d -> buildStrLn ("R " ++ t ++ " " ++ disp) _ -> buildKeep t disp buildKeep t d = buildStrLn (" " ++ t ++ " " ++ d) buildKeepImage = buildKeep imageStr buildSectionHead s = buildStrLn ("\n#\n# " ++ s ++ "\n#\n") buildInfo = buildStrLn . (" # " ++) buildStrLn l = do buildStr l tell (charUtf8 '\n') buildStr = tell . stringUtf8 imageStr = "image" containerStr = "container" -- | Inspect Docker image or container. inspect :: (MonadIO m,MonadLogger m,MonadBaseControl IO m,MonadCatch m) => EnvOverride -> String -> m (Maybe Inspect) inspect envOverride image = do results <- inspects envOverride [image] case Map.toList results of [] -> return Nothing [(_,i)] -> return (Just i) _ -> throwM (InvalidInspectOutputException "expect a single result") -- | Inspect multiple Docker images and/or containers. inspects :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m) => EnvOverride -> [String] -> m (Map String Inspect) inspects _ [] = return Map.empty inspects envOverride images = do maybeInspectOut <- try (readDockerProcess envOverride Nothing ("inspect" : images)) case maybeInspectOut of Right inspectOut -> -- filtering with 'isAscii' to workaround @docker inspect@ output containing invalid UTF-8 case eitherDecode (LBS.pack (filter isAscii (decodeUtf8 inspectOut))) of Left msg -> throwM (InvalidInspectOutputException msg) Right results -> return (Map.fromList (map (\r -> (iiId r,r)) results)) Left (ProcessFailed _ _ _ err) | "Error: No such image" `LBS.isPrefixOf` err -> return Map.empty Left e -> throwM e -- | Pull latest version of configured Docker image from registry. pull :: (StackM env m, HasConfig env) => m () pull = do config <- view configL let docker = configDocker config envOverride <- getEnvOverride (configPlatform config) checkDockerVersion envOverride docker pullImage envOverride docker (dockerImage docker) -- | Pull Docker image from registry. pullImage :: (MonadLogger m,MonadIO m,MonadThrow m) => EnvOverride -> DockerOpts -> String -> m () pullImage envOverride docker image = do $logInfo (concatT ["Pulling image from registry: '",image,"'"]) when (dockerRegistryLogin docker) (do $logInfo "You may need to log in." callProcess $ Cmd Nothing "docker" envOverride (concat [["login"] ,maybe [] (\n -> ["--username=" ++ n]) (dockerRegistryUsername docker) ,maybe [] (\p -> ["--password=" ++ p]) (dockerRegistryPassword docker) ,[takeWhile (/= '/') image]])) -- We redirect the stdout of the process to stderr so that the output -- of @docker pull@ will not interfere with the output of other -- commands when using --auto-docker-pull. See issue #2733. let stdoutToStderr cp = cp { std_out = UseHandle stderr , std_err = UseHandle stderr , std_in = CreatePipe } (Just hin, _, _, ph) <- createProcess' "pullImage" stdoutToStderr $ Cmd Nothing "docker" envOverride ["pull",image] liftIO (hClose hin) ec <- liftIO (waitForProcess ph) case ec of ExitSuccess -> return () ExitFailure _ -> throwM (PullFailedException image) -- | Check docker version (throws exception if incorrect) checkDockerVersion :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m) => EnvOverride -> DockerOpts -> m () checkDockerVersion envOverride docker = do dockerExists <- doesExecutableExist envOverride "docker" unless dockerExists (throwM DockerNotInstalledException) dockerVersionOut <- readDockerProcess envOverride Nothing ["--version"] case words (decodeUtf8 dockerVersionOut) of (_:_:v:_) -> case parseVersionFromString (stripVersion v) of Just v' | v' < minimumDockerVersion -> throwM (DockerTooOldException minimumDockerVersion v') | v' `elem` prohibitedDockerVersions -> throwM (DockerVersionProhibitedException prohibitedDockerVersions v') | not (v' `withinRange` dockerRequireDockerVersion docker) -> throwM (BadDockerVersionException (dockerRequireDockerVersion docker) v') | otherwise -> return () _ -> throwM InvalidVersionOutputException _ -> throwM InvalidVersionOutputException where minimumDockerVersion = $(mkVersion "1.6.0") prohibitedDockerVersions = [] stripVersion v = takeWhile (/= '-') (dropWhileEnd (not . isDigit) v) -- | Remove the project's Docker sandbox. reset :: (MonadIO m, MonadReader env m, HasConfig env) => Maybe (Path Abs Dir) -> Bool -> m () reset maybeProjectRoot keepHome = do dockerSandboxDir <- projectDockerSandboxDir projectRoot liftIO (removeDirectoryContents dockerSandboxDir [homeDirName | keepHome] []) where projectRoot = fromMaybeProjectRoot maybeProjectRoot -- | The Docker container "entrypoint": special actions performed when first entering -- a container, such as switching the UID/GID to the "outside-Docker" user's. entrypoint :: (MonadIO m, MonadBaseControl IO m, MonadCatch m, MonadLogger m) => Config -> DockerEntrypoint -> m () entrypoint config@Config{..} DockerEntrypoint{..} = modifyMVar_ entrypointMVar $ \alreadyRan -> do -- Only run the entrypoint once unless alreadyRan $ do envOverride <- getEnvOverride configPlatform homeDir <- parseAbsDir =<< liftIO (getEnv "HOME") -- Get the UserEntry for the 'stack' user in the image, if it exists estackUserEntry0 <- liftIO $ tryJust (guard . isDoesNotExistError) $ User.getUserEntryForName stackUserName -- Switch UID/GID if needed, and update user's home directory case deUser of Nothing -> return () Just (DockerUser 0 _ _ _) -> return () Just du -> updateOrCreateStackUser envOverride estackUserEntry0 homeDir du case estackUserEntry0 of Left _ -> return () Right ue -> do -- If the 'stack' user exists in the image, copy any build plans and package indices from -- its original home directory to the host's stack root, to avoid needing to download them origStackHomeDir <- parseAbsDir (User.homeDirectory ue) let origStackRoot = origStackHomeDir </> $(mkRelDir ("." ++ stackProgName)) buildPlanDirExists <- doesDirExist (buildPlanDir origStackRoot) when buildPlanDirExists $ do (_, buildPlans) <- listDir (buildPlanDir origStackRoot) forM_ buildPlans $ \srcBuildPlan -> do let destBuildPlan = buildPlanDir configStackRoot </> filename srcBuildPlan exists <- doesFileExist destBuildPlan unless exists $ do ensureDir (parent destBuildPlan) copyFile srcBuildPlan destBuildPlan forM_ configPackageIndices $ \pkgIdx -> do msrcIndex <- flip runReaderT (config{configStackRoot = origStackRoot}) $ do srcIndex <- configPackageIndex (indexName pkgIdx) exists <- doesFileExist srcIndex return $ if exists then Just srcIndex else Nothing case msrcIndex of Nothing -> return () Just srcIndex -> do flip runReaderT config $ do destIndex <- configPackageIndex (indexName pkgIdx) exists <- doesFileExist destIndex unless exists $ do ensureDir (parent destIndex) copyFile srcIndex destIndex return True where updateOrCreateStackUser envOverride estackUserEntry homeDir DockerUser{..} = do case estackUserEntry of Left _ -> do -- If no 'stack' user in image, create one with correct UID/GID and home directory readProcessNull Nothing envOverride "groupadd" ["-o" ,"--gid",show duGid ,stackUserName] readProcessNull Nothing envOverride "useradd" ["-oN" ,"--uid",show duUid ,"--gid",show duGid ,"--home",toFilePathNoTrailingSep homeDir ,stackUserName] Right _ -> do -- If there is already a 'stack' user in the image, adjust its UID/GID and home directory readProcessNull Nothing envOverride "usermod" ["-o" ,"--uid",show duUid ,"--home",toFilePathNoTrailingSep homeDir ,stackUserName] readProcessNull Nothing envOverride "groupmod" ["-o" ,"--gid",show duGid ,stackUserName] forM_ duGroups $ \gid -> do readProcessNull Nothing envOverride "groupadd" ["-o" ,"--gid",show gid ,"group" ++ show gid] -- 'setuid' to the wanted UID and GID liftIO $ do User.setGroupID duGid #ifndef WINDOWS PosixUser.setGroups duGroups #endif User.setUserID duUid _ <- Files.setFileCreationMask duUmask return () stackUserName = "stack"::String -- | MVar used to ensure the Docker entrypoint is performed exactly once entrypointMVar :: MVar Bool {-# NOINLINE entrypointMVar #-} entrypointMVar = unsafePerformIO (newMVar False) -- | Remove the contents of a directory, without removing the directory itself. -- This is used instead of 'FS.removeTree' to clear bind-mounted directories, since -- removing the root of the bind-mount won't work. removeDirectoryContents :: Path Abs Dir -- ^ Directory to remove contents of -> [Path Rel Dir] -- ^ Top-level directory names to exclude from removal -> [Path Rel File] -- ^ Top-level file names to exclude from removal -> IO () removeDirectoryContents path excludeDirs excludeFiles = do isRootDir <- doesDirExist path when isRootDir (do (lsd,lsf) <- listDir path forM_ lsd (\d -> unless (dirname d `elem` excludeDirs) (removeDirRecur d)) forM_ lsf (\f -> unless (filename f `elem` excludeFiles) (removeFile f))) -- | Produce a strict 'S.ByteString' from the stdout of a -- process. Throws a 'ReadProcessException' exception if the -- process fails. Logs process's stderr using @$logError@. readDockerProcess :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m) => EnvOverride -> Maybe (Path Abs Dir) -> [String] -> m BS.ByteString readDockerProcess envOverride mpwd = readProcessStdout mpwd envOverride "docker" -- | Name of home directory within docker sandbox. homeDirName :: Path Rel Dir homeDirName = $(mkRelDir "_home/") -- | Directory where 'stack' executable is bind-mounted in Docker container hostBinDir :: FilePath hostBinDir = "/opt/host/bin" -- | Convenience function to decode ByteString to String. decodeUtf8 :: BS.ByteString -> String decodeUtf8 bs = T.unpack (T.decodeUtf8 bs) -- | Convenience function constructing message for @$log*@. concatT :: [String] -> Text concatT = T.pack . concat -- | Fail with friendly error if project root not set. fromMaybeProjectRoot :: Maybe (Path Abs Dir) -> Path Abs Dir fromMaybeProjectRoot = fromMaybe (throw CannotDetermineProjectRootException) -- | Environment variable that contained the old sandbox ID. -- | Use of this variable is deprecated, and only used to detect old images. oldSandboxIdEnvVar :: String oldSandboxIdEnvVar = "DOCKER_SANDBOX_ID" -- | Options for 'cleanup'. data CleanupOpts = CleanupOpts { dcAction :: !CleanupAction , dcRemoveKnownImagesLastUsedDaysAgo :: !(Maybe Integer) , dcRemoveUnknownImagesCreatedDaysAgo :: !(Maybe Integer) , dcRemoveDanglingImagesCreatedDaysAgo :: !(Maybe Integer) , dcRemoveStoppedContainersCreatedDaysAgo :: !(Maybe Integer) , dcRemoveRunningContainersCreatedDaysAgo :: !(Maybe Integer) } deriving (Show) -- | Cleanup action. data CleanupAction = CleanupInteractive | CleanupImmediate | CleanupDryRun deriving (Show) -- | Parsed result of @docker inspect@. data Inspect = Inspect {iiConfig :: ImageConfig ,iiCreated :: UTCTime ,iiId :: String ,iiVirtualSize :: Maybe Integer} deriving (Show) -- | Parse @docker inspect@ output. instance FromJSON Inspect where parseJSON v = do o <- parseJSON v Inspect <$> o .: "Config" <*> o .: "Created" <*> o .: "Id" <*> o .:? "VirtualSize" -- | Parsed @Config@ section of @docker inspect@ output. data ImageConfig = ImageConfig {icEnv :: [String] ,icEntrypoint :: [String]} deriving (Show) -- | Parse @Config@ section of @docker inspect@ output. instance FromJSON ImageConfig where parseJSON v = do o <- parseJSON v ImageConfig <$> fmap join (o .:? "Env") .!= [] <*> fmap join (o .:? "Entrypoint") .!= [] -- | Function to get command and arguments to run in Docker container type GetCmdArgs env m = (StackM env m, HasConfig env) => DockerOpts -> EnvOverride -> Inspect -> Bool -> m (FilePath,[String],[(String,String)],[Mount])
Fuuzetsu/stack
src/Stack/Docker.hs
bsd-3-clause
43,091
0
34
14,339
9,594
4,877
4,717
848
22
import System header = "#include <stdio.h>\n\ntypedef struct info_table {\n\tconst int srt;\n\tconst int type;\n\tconst int args;\n} info_table;\n\nint empty_entry() {return 1;}" table :: String -> Int -> String table name id = "const info_table " ++ name ++ "_info_tb __attribute__ " ++ "((section (\".text,\\\"ax\\\",@progbits\\n\\t.subsection "++ (show id) ++ " #\"))) = {-1,1," ++ (show id) ++ "};" function_dec :: String -> Int -> String function_dec name id = "int " ++ name ++ "_entry() __attribute__ ((section" ++ "(\".text,\\\"ax\\\",@progbits\\n\\t.subsection " ++ (show id) ++ " #\")));" function_def :: String -> String -> String function_def prev name = "int " ++ name ++ "_entry()\n{\n\tint d = " ++ prev ++ "_entry();\n\tprintf(\"prev = %d\\n\", d);" ++ "\n\tint *p = (int*) (&" ++ name ++ "_entry) - 1;\n\treturn *p;\n}" mkFunction :: String -> String -> Int -> String mkFunction prev name id = let tbl = table name id fun = function_dec name (id + 1) imp = function_def prev name in fun ++ "\n" ++ imp ++ "\n\n" ++ tbl genFunctions :: Int -> IO String genFunctions n = genFuncs' [1..n] "empty" where genFuncs' [] prev = return prev genFuncs' xs prev = let x = 2 * (head xs) n = "a" ++ (show x) in do putStrLn $ mkFunction prev n x genFuncs' (tail xs) n main = do n:_ <- getArgs putStrLn header last <- genFunctions (read n) putStrLn $ "int main()\n{\n\t" ++ last ++ "_entry();\n\treturn 1;\n}\n" return ()
dterei/Scraps
ghc/testAsSubLimit.hs
bsd-3-clause
1,743
0
13
557
442
217
225
34
2
#!/usr/bin/env runghc module Main where import Control.DeepSeq import Control.Exception import Test.Tasty import Test.Tasty.HUnit import Graphics.OpenSCAD import Data.Colour (withOpacity) import Data.List.NonEmpty (fromList) import Data.Monoid ((<>), Monoid,mconcat, mempty, mappend) assertRaises :: (Show a, Control.Exception.Exception e, Show e, Eq e) => String -> e -> IO a -> IO () assertRaises msg selector action = let thetest e = if e == selector then return () else assertFailure $ msg ++ "\nReceived unexpected exception: " ++ (show e) ++ "\ninstead of exception: " ++ (show selector) in do r <- Control.Exception.try action case r of Left e -> thetest e Right _ -> assertFailure $ msg ++ "\nReceived no exception, but was expecting exception: " ++ (show selector) assertError err code = assertRaises "Check error" (ErrorCall err) . evaluate $ deepseq (show code) () sw = concat . words st n e a = testCase n $ (sw $ render a) @?= (sw e) {- About the test result values. Running "cabal test" does not verify that the results do the intended thing in OpenSCAD. Possibly we'll add shell tests for that at some point, but not yet. For now, if you change or add strings, please manually copy them into OpenSCAD and make sure they do what you want the Model data structure that they are testing does. -} tests = testGroup "Tests" [ testGroup "3d-primitives" [ testGroup "Spheres" [ st "1" "sphere(1.0);" $ sphere 1 def, st "2" "sphere(2.0,$fn=100);" (sphere 2 $ fn 100), st "3" "sphere(2.0,$fa=5.0);" (sphere 2 $ fa 5), st "4" "sphere(2.0,$fs=0.1);" (sphere 2 $ fs 0.1) ], testGroup "Boxes" [ st "box" "cube([1.0,2.0,3.0]);" $ box 1 2 3, st "cube" "cube([2.0,2.0,2.0]);" $ cube 2 ], testGroup "Cylinders" [ st "1" "cylinder(r=1.0,h=2.0);" $ cylinder 1 2 def, st "2" "cylinder(r=1.0,h=2.0,$fs=0.6);" (cylinder 1 2 $ fs 0.6), st "3" "cylinder(r=1.0,h=2.0,$fn=10);" (cylinder 1 2 $ fn 10), st "4" "cylinder(r=1.0,h=2.0,$fa=30.0);" (cylinder 1 2 $ fa 30) ], testGroup "Oblique-Cylinders" [ st "1" "cylinder(r1=1.0,h=2.0,r2=2.0);" $ obCylinder 1 2 2 def, st "2" "cylinder(r1=1.0,h=2.0,r2=2.0,$fs=0.6);" (obCylinder 1 2 2 $ fs 0.6), st "3" "cylinder(r1=1.0,h=2.0,r2=2.0,$fn=10);" (obCylinder 1 2 2 $ fn 10), st "4" "cylinder(r1=1.0,h=2.0,r2=2.0,$fa=30.0);" (obCylinder 1 2 2 $ fa 30) ], testGroup "Misc" [ st "import" "import(\"test.stl\");" (solid $ importFile "test.stl"), st "polyhedron 1" "polyhedron(points=[[10.0,10.0,0.0],[10.0,-10.0,0.0],[0.0,0.0,10.0],[-10.0,-10.0,0.0],[-10.0,10.0,0.0]],triangles=[[0,1,2],[1,3,2],[3,4,2],[4,0,2],[1,0,4],[3,1,4]],convexity=1);" $ polyhedron 1 [[(10, 10, 0), (10, -10, 0), (0, 0, 10)], [(10, -10, 0), (-10, -10, 0), (0, 0, 10)], [(-10, -10, 0), (-10, 10, 0), (0, 0, 10)], [(-10, 10, 0), (10, 10, 0), (0, 0, 10)], [(10, -10, 0), (10, 10, 0), (-10, 10, 0)], [(-10, -10, 0), (10, -10, 0), (-10, 10, 0)]], st "polyhedron 2" "polyhedron(points=[[10.0,10.0,0.0],[10.0,-10.0,0.0],[0.0,0.0,10.0],[-10.0,-10.0,0.0],[-10.0,10.0,0.0]],faces=[[0,1,2],[1,3,2],[3,4,2],[4,0,2],[4,3,1,0]],convexity=1);" $ polyhedron 1 [[(10, 10, 0), (10, -10, 0), (0, 0, 10)], [(10, -10, 0), (-10, -10, 0), (0, 0, 10)], [(-10, -10, 0), (-10, 10, 0), (0, 0, 10)], [(-10, 10, 0), (10, 10, 0), (0, 0, 10)], [(-10, 10, 0), (-10, -10, 0), (10, -10, 0), (10, 10, 0)]], st "unsafePolyhedron" "polyhedron(points=[[10.0,10.0,0.0],[10.0,-10.0,0.0],[-10.0,-10.0,0.0],[-10.0,10.0,0.0],[0.0,0.0,10.0]],faces=[[0,1,4],[1,2,4],[2,3,4],[3,0,4],[1,0,3],[2,1,3]],convexity=1);" (unsafePolyhedron 1 [(10.0,10.0,0.0),(10.0,-10.0,0.0),(-10.0,-10.0,0.0), (-10.0,10.0,0.0),(0.0,0.0,10)] $ Faces [[0,1,4],[1,2,4],[2,3,4],[3,0,4],[1,0,3], [2,1,3]]) ], testGroup "Linear-Extrusion" [ st "1" "linear_extrude(height=10.0,twist=0.0,scale=[1.0,1.0],slices=10,convexity=10)circle(1.0);" (linearExtrude 10 0 (1, 1) 10 10 def $ circle 1 def), st "2" "linear_extrude(height=10.0,twist=100.0,scale=[1.0,1.0],slices=10,convexity=10)translate([2.0,0.0])circle(1.0);" (linearExtrude 10 100 (1, 1) 10 10 def $ translate (2, 0) $ circle 1 def), st "3" "linear_extrude(height=10.0,twist=500.0,scale=[1.0,1.0],slices=10,convexity=10)translate([2.0,0.0])circle(1.0);" (linearExtrude 10 500 (1, 1) 10 10 def $ translate (2, 0) $ circle 1 def), st "4" "linear_extrude(height=10.0,twist=360.0,scale=[1.0,1.0],slices=100,convexity=10)translate([2.0,0.0])circle(1.0);" (linearExtrude 10 360 (1, 1) 100 10 def $ translate (2, 0) $ circle 1 def), st "5" "linear_extrude(height=10.0,twist=360.0,scale=[1.0,1.0],slices=100,convexity=10,$fn=100)translate([2.0,0.0])circle(1.0);" (linearExtrude 10 360 (1, 1) 100 10 (fn 100) $ translate (2, 0) $ circle 1 def), st "6" "linear_extrude(height=10.0,twist=0.0,scale=[3.0,3.0],slices=100,convexity=10)translate([2.0,0.0])circle(1.0);" (linearExtrude 10 0 (3, 3) 100 10 def $ translate (2, 0) $ circle 1 def), st "7" "linear_extrude(height=10.0,twist=0.0,scale=[1.0,5.0],slices=100,convexity=10,$fn=100)translate([2.0,0.0])circle(1.0);" (linearExtrude 10 0 (1, 5) 100 10 (fn 100) $ translate (2, 0) $ circle 1 def) ], testGroup "Rotated-Extrusion" [ st "1" "rotate_extrude(convexity=10)translate([2.0,0.0])circle(1.0);" (rotateExtrude 10 def $ translate (2, 0) $ circle 1 def), st "2" "rotate_extrude(convexity=10,$fn=100)translate([2.0,0.0])circle(1.0,$fn=100);" (rotateExtrude 10 (fn 100) $ translate (2, 0) $ circle 1 $ fn 100) ], testGroup "Surface" [ st "Normal" "surface(file=\"test.dat\",convexity=5);" $ surface "test.dat" False 5, st "Inverted" "surface(file=\"test.dat\",invert=true,convexity=5);" $ surface "test.dat" True 5 -- Requires 2014.QX ] ], testGroup "2d-primitives" [ testGroup "Squares" [ st "rectangle" "square([2.0,3.0]);" $ rectangle 2 3, st "square" "square([2.0,2.0]);" $ square 2 ], testGroup "Circles" [ st "1" "circle(1.0);" $ circle 1 def, st "2" "circle(2.0,$fn=100);" (circle 2 $ fn 100), st "3" "circle(2.0,$fa=5.0);" (circle 2 $ fa 5), st "4" "circle(2.0,$fs=0.1);" (circle 2 $ fs 0.1) ], testGroup "Misc" [ st "import" "import(\"test.dxf\");" (solid $ importFile "test.dxf"), st "polygon" "polygon(points=[[0.0,0.0],[100.0,0.0],[0.0,100.0],[10.0,10.0],[80.0,10.0],[10.0,80.0]],paths=[[0,1,2],[3,4,5]],convexity=10);" $ polygon 10 [[(0,0),(100,0),(0,100)],[(10,10),(80,10),(10,80)]], st "unsafePolygon" "polygon(points=[[0.0,0.0],[100.0,0.0],[0.0,100.0],[10.0,10.0],[80.0,10.0],[10.0,80.0]], paths=[[0,1,2],[3,4,5]],convexity=1);" (unsafePolygon 1 [(0,0),(100,0),(0,100),(10,10),(80,10),(10,80)] [[0,1,2],[3,4,5]]), st "projection" "projection(cut=false)scale([10.0,10.0,10.0])difference(){translate([0.0,0.0,1.0])cube([1.0,1.0,1.0]);translate([0.25,0.25,0.0])cube([0.5,0.5,3.0]);}" (projection False . scale (10, 10, 10) . difference (up 1 (cube 1)) $ translate (0.25, 0.25, 0) (box 0.5 0.5 3)) ] ], testGroup "Transformations" [ testGroup "Size changes" [ st "scale 1" "scale([0.5,1.0,2.0])cube([1.0,1.0,1.0]);" (scale (0.5, 1, 2) $ cube 1), st "scale 2" "scale([0.5,2.0])square([1.0,1.0]);" (scale (0.5, 2) $ rectangle 1 1), st "resize 1" "resize([10.0,20.0])square([2.0,2.0]);" (resize (10, 20) $ square 2), st "resize 2" "resize([10.0,20.0,30.0])cube([2.0,2.0,2.0]);" (resize (10, 20, 30) $ cube 2) ], testGroup "Rotations" [ st "1" "rotate([180.0,0.0,0.0])cube([2.0,2.0,2.0]);" (rotate (180, 0, 0) $ cube 2), st "2" "rotate([0.0,180.0,0.0])cube([2.0,2.0,2.0]);" (rotate (0, 180, 0) $ cube 2), st "3" "rotate([0.0,180.0,180.0])cube([2.0,2.0,2.0]);" (rotate (0, 180, 180) $ cube 2), st "4" "rotate(90.0)square([2.0,1.0]);" (rotate2d 90 $ rectangle 2 1), st "5" "rotate(180.0)square([2.0,1.0]);" (rotate2d 180 $ rectangle 2 1) ], testGroup "Mirrors" [ st "1" "mirror([1.0,0.0,0.0])cube([2.0,2.0,2.0]);" (mirror (1, 0, 0) $ cube 2), st "2" "mirror([0.0,1.0,0.0])cube([2.0,2.0,2.0]);" (mirror (0, 1, 0) $ cube 2), st "3" "rotate([0.0,1.0,1.0])cube([2.0,2.0,2.0]);" (rotate (0, 1, 1) $ cube 2), st "4" "mirror([1.0,0.0])square([2.0,1.0]);" (mirror (1, 0) $ rectangle 2 1), st "2" "mirror([0.0,1.0])square([2.0,1.0]);" (mirror (0, 1) $ rectangle 2 1) ], st "multmatrix" "multmatrix([[1.0,0.0,0.0,10.0],[0.0,1.0,0.0,20.0],[0.0,0.0,1.0,30.0],[0.0,0.0,0.0,1.0]])cylinder(r=2.0,h=3.0);" (multMatrix ( (1, 0, 0, 10), (0, 1, 0, 20), (0, 0, 1, 30), (0, 0, 0, 1) ) $ cylinder 2 3 def), testGroup "Colors" [ st "color 1" "color([1.0,0.0,0.0])cube([1.0,1.0,1.0]);" (color red $ cube 1), st "color 2" "color([1.0,0.0,0.0])square([1.0,1.0]);" (color red $ square 1), st "transparent 1" "color([1.0,0.0,0.0,0.7])cube([1.0,1.0,1.0]);" (transparent (red `withOpacity` 0.7) $ cube 1), st "transparent 2" "color([1.0,0.0,0.0,0.7])square([1.0,1.0]);" (transparent (red `withOpacity` 0.7) $ square 1) ] ], testGroup "Facets" [ st "facet 1" "assign($fn=100){sphere(2.0,$fn=100);}" (var (fn 100) [sphere 2 $ fn 100]), st "facet 2" "assign($fa=5.0){sphere(2.0,$fa=5.0);}" (var (fa 5) [sphere 2 $ fa 5]), st "facet 3" "assign($fs=0.1){sphere(2.0,$fs=0.1);}" (var (fs 0.1) [sphere 2 $ fs 0.1]) ], testGroup "Modifiers" [ st "ignore" "%sphere(1.0);" (ignore (sphere 1 def)), st "debug" "#sphere(1.0);" (debug (sphere 1 def)) ], testGroup "Errors" [ testCase "Polygon Pointcount" . assertError "Polygon has fewer than 3 points." $ polygon 1 [[(0, 0), (0, 1)]], testCase "Polygon Linearity" . assertError "Points in polygon are collinear." $ polygon 1 [[(0, 0), (0, 1), (0, 2)]], testCase "Polyhedron Linearity" . assertError "Some face has collinear points." $ polyhedron 1 [[(0, 0, 0), (1, 0, 0), (2, 0, 0)]], testCase "Polyhedron Planarity" . assertError "Some face isn't coplanar." $ polyhedron 1 [[(10, 10, 0), (10, -10, 0), (0, 10, 10)], [(10, -10, 0), (-10, -10, 0), (0, 0, 10)], [(-10, -10, 0), (-10, 10, 0), (0, 0, 10)], [(-10, 10, 0), (10, 10, 0), (0, 0, 10)], [(-10, 10, 0), (-10, -10, 0), (10, -10, 0), (0, 0, -10)]], testCase "Polyhedron Edges" . assertError "Some edges are not in two faces." $ polyhedron 1 [[(10, 10, 0), (10, -10, 0), (0, 0, 10)], [(10, -10, 0), (-10, -10, 0), (0, 0, 10)], [(-10, -10, 0), (-10, 10, 0), (0, 0, 10)], [(-10, 10, 0), (10, 10, 0), (0, 0, 10)], [(10, -10, 0), (10, 10, 0), (-10, 10, 0)], [(-10, -10, 0), (10, -10, 0), (-10, 20, 0)]], testCase "Polyhedron Faces" . assertError "Some faces have different orientation." $ polyhedron 1 [[(10, 10, 0), (10, -10, 0), (0, 0, 10)], [(10, -10, 0), (-10, -10, 0), (0, 0, 10)], [(-10, -10, 0), (-10, 10, 0), (0, 0, 10)], [(-10, 10, 0), (10, 10, 0), (0, 0, 10)], [(10, -10, 0), (10, 10, 0), (-10, 10, 0)], [(10, -10, 0), (-10, -10, 0), (-10, 10, 0)]], testCase "Polyhedron Orientation" . assertError "Face orientations are counterclockwise." $ polyhedron 1 [[(10, -10, 0), (10, 10, 0), (0, 0, 10)], [(-10, -10, 0), (10, -10, 0), (0, 0, 10)], [(-10, 10, 0), (-10, -10, 0), (0, 0, 10)], [(10, 10, 0), (-10, 10, 0), (0, 0, 10)], [(10, 10, 0), (10, -10, 0), (-10, 10, 0)], [(10, -10, 0), (-10, -10, 0), (-10, 10, 0)]] ], testGroup "Combinations" [ st "union" "union(){cube([1.0,1.0,1.0]);sphere(1.1,$fs=0.1);}" (union [cube 1, sphere 1.1 $ fs 0.1]), st "difference" "difference(){cube([1.0,1.0,1.0]);sphere(1.1,$fs=0.1);}" (difference (cube 1) . sphere 1.1 $ fs 0.1), st "intersection" "intersection(){cube([1.0,1.0,1.0]);sphere(1.1,$fs=0.1);}" (intersection [cube 1, sphere 1.1 $ fs 0.1]), st "minkowski" "minkowski(){cube([10.0,10.0,10.0]);cylinder(r=2.0,h=1.1,$fn=50);}" (minkowski [cube 10, cylinder 2 1.1 $ fn 50]), st "hull" "hull(){translate([15.0,10.0])circle(10.0);circle(10.0);}" (hull [circle 10 def # translate (15, 10), circle 10 def]) ], testGroup "Haskell" [ st "# 3d" "translate([-3.0,-3.0,-3.0])color([1.0,0.0,0.0])cube([3.0,3.0,3.0]);" (cube 3 # color red # translate (-3, -3, -3)), st "# 2d" "translate([3.0,3.0])color([1.0,0.6470588235294119,0.0])square([2.0,2.0]);" (square 2 # color orange # translate (3, 3)), st "Monoid 1 3d" "union(){cube([1.0,1.0,1.0]);sphere(1.1,$fs=0.1);}" (cube 1 <> sphere 1.1 (fs 0.1)), st "Monoid 1 2d" "union(){square([1.0,1.0]);circle(1.1,$fs=0.1);}" (square 1 <> circle 1.1 (fs 0.1)), st "Monoid 2 3d" "union(){cube([1.0,1.0,1.0]);sphere(1.1,$fs=0.1);}" (mconcat [cube 1, sphere 1.1 $ fs 0.1]), st "Monoid 2 2d" "union(){square([1.0,1.0]);circle(1.1,$fs=0.1);}" (mconcat [square 1, circle 1.1 $ fs 0.1]), st "Monoid 3 3d" "sphere(1.1,$fs=0.1);" (mconcat [sphere 1.1 $ fs 0.1]), st "Monoid 3 2d" "square([1.0,1.0]);" (mconcat [square 1]), st "Monoid 4 3d" "cube([0.0,0.0,0.0]);" (solid mempty), st "Monoid 4 2d" "cube([0.0,0.0,0.0]);" (mempty :: Model2d), st "Monoid 5 3d" "union(){cube([1.0,1.0,1.0]);sphere(1.1,$fs=0.1);}" (mappend (cube 1) $ sphere 1.1 (fs 0.1)), st "Monoid 5 2d" "union(){square([1.0,1.0]);circle(1.1,$fs=0.1);}" (mappend (square 1) $ circle 1.1 (fs 0.1)) ] ] main = defaultMain tests
andreyk0/HSOpenSCAD
UnitTest.hs
bsd-3-clause
15,180
0
19
4,157
5,092
2,841
2,251
262
3
{-#LANGUAGE TypeFamilies#-} {-#LANGUAGE TemplateHaskell#-} {-#LANGUAGE QuasiQuotes#-} {-#LANGUAGE OverloadedStrings#-} module HsVerilog.Type where import qualified Data.Text as T import qualified Data.Map as M class Verilog a where toVerilog :: a -> T.Text data Range = Range Integer Integer | RangeBit Integer | Bit deriving (Show,Read,Eq) data Signal = Signal { sname :: T.Text , sbits :: Range , sval :: Integer } deriving (Show,Read,Eq) type InstanceName = T.Text data Instance = Instance { iname :: InstanceName , icircuit :: Circuit } deriving (Show,Read,Eq) data Stim = Posedge Signal | Negedge Signal deriving (Show,Read,Eq) data Always = Always { alsig :: Signal , alstim :: [Stim] , alexp :: Exp } deriving (Show,Read,Eq) data Assign = Assign { assig :: Signal , asexp :: Exp } deriving (Show,Read,Eq) data Exp = If Exp Exp Exp | Mux Exp Exp Exp | Not Exp | Or Exp Exp | BitOr Exp Exp | And Exp Exp | BitAnd Exp Exp | Add Exp Exp | Sub Exp Exp | Mul Exp Exp | Div Exp Exp | Eq Exp Exp | S Signal | C Integer | NonBlockAssign Exp Exp | BlockAssign Exp Exp deriving (Show,Read,Eq) instance Num Exp where fromInteger a = C a (+) a b = Add a b (-) a b = Sub a b (*) a b = Mul a b data Circuit = Circuit { cname :: T.Text , cinput :: [Signal] , coutput :: [Signal] , cinout :: [Signal] , creg :: [Always] , cassign :: [Assign] , cinstance :: [Instance] , cinstanceConnect :: M.Map InstanceName [(Signal,Signal)] } deriving (Show,Read,Eq)
junjihashimoto/hsverilog
src/HsVerilog/Type.hs
bsd-3-clause
1,539
0
11
359
585
341
244
70
0
module Playground18 where import Playground17 import qualified Data.Map as Map import Data.Maybe import Data.List possibleDrawers :: [Int] possibleDrawers = [1 .. 50] getDrawerContents :: [Int] -> Map.Map Int a -> [Maybe a] getDrawerContents ids' catalog = map getIdsContents ids' where getIdsContents = \ drawerId -> Map.lookup drawerId catalog availableOrgans :: [Maybe Organ] availableOrgans = getDrawerContents possibleDrawers organCatalog countOrgan :: Organ -> [Maybe Organ] -> Int countOrgan organ available = length (filter (\x -> x == (Just organ)) available) isSomething :: Maybe Organ -> Bool isSomething Nothing = False isSomething (Just _) = True justTheOrgans :: [Maybe Organ] justTheOrgans = filter isSomething availableOrgans justTheOrgans' :: [Maybe Organ] justTheOrgans' = filter isJust availableOrgans showOrgan :: Maybe Organ -> String showOrgan Nothing = "" showOrgan (Just o) = show o organList :: [String] organList = map showOrgan justTheOrgans cleanList :: [Char] cleanList = intercalate ", " organList data Container = Vat Organ | Cooler Organ | Bag Organ instance Show Container where show (Vat organ) = show organ ++ " in a vat" show (Cooler organ) = show organ ++ " in a cooler" show (Bag organ) = show organ ++ " in a bag" data Location = Lab | Kitchen | Bathroom deriving Show organToContainer :: Organ -> Container organToContainer Brain = Vat Brain organToContainer Heart = Cooler Heart organToContainer organ = Bag organ placeInLocation :: Container -> (Location,Container) placeInLocation (Vat a) = (Lab, Vat a) placeInLocation (Cooler a) = (Lab, Cooler a) placeInLocation (Bag a) = (Kitchen, Bag a) process :: Organ -> (Location, Container) process organ = placeInLocation (organToContainer organ) report ::(Location,Container) -> String report (location,container) = show container ++ " in the " ++ show location processAndReport :: Maybe Organ -> String processAndReport (Just organ) = report (process organ) processAndReport Nothing = "error, is not found" processRequest :: Int -> Map.Map Int Organ -> String processRequest id' catalog = processAndReport organ where organ = Map.lookup id' catalog
stefanocerruti/haskell-primer-alpha
src/Playground18.hs
bsd-3-clause
2,236
0
12
416
732
383
349
54
1
{-# LANGUAGE OverloadedStrings, RecordWildCards #-} module DB.CardUserSession.Queries where import Prelude hiding (id) import Control.Monad.IO.Class (liftIO) import Data.Functor ((<$>)) import Data.Maybe (listToMaybe) import Data.Monoid import Data.String (fromString) import Data.UUID hiding (fromString) import Data.Text (Text) import Database.PostgreSQL.Simple import Web.Scotty import DB.CardUser.Model import DB.CardUser.Queries import DB.CardUserSession.Model getUserFromSession :: Connection -> UUID -> ActionM (Maybe CardUser) getUserFromSession conn uuid = do liftIO $ listToMaybe <$> query conn lookupUserQuery (Only uuid) where lookupUserQuery :: Query lookupUserQuery = fromString $ mconcat [ "SELECT * FROM carduser WHERE id = " , "(SELECT user_id FROM carduser_session " , " WHERE id = ? LIMIT 1)" ] createSession :: Connection -> Int -- ^ User ID -> ActionM (Maybe CardUserSession) createSession conn userId = liftIO $ listToMaybe <$> query conn createQuery (Only userId) where createQuery = "INSERT INTO carduser_session (id, user_id, expires) VALUES (gen_random_uuid(), ?, NOW() + '2 WEEKS') RETURNING *" deleteSession :: Connection -> UUID -> ActionM Bool deleteSession conn sessionId = do let deleteQuery = "DELETE FROM carduser_session WHERE id = ?" numRows <- liftIO $ execute conn deleteQuery (Only sessionId) return $ if numRows == 1 then True else False loginUser :: Connection -> Text -- ^ Username -> Text -- ^ Password (plaintext) -> ActionM (Maybe CardUserSession) loginUser conn uname passwd = do user <- getUserByCredentials conn uname passwd case user of Nothing -> return Nothing Just (CardUser{..}) -> createSession conn id
ppseafield/backend-flashcard
src/DB/CardUserSession/Queries.hs
bsd-3-clause
1,983
0
13
554
432
232
200
45
2
module Pos.Infra.Communication.Relay ( module Pos.Infra.Communication.Relay.Class , module Pos.Infra.Communication.Relay.Logic , module Pos.Infra.Communication.Relay.Types , module Pos.Infra.Communication.Relay.Util ) where import Pos.Infra.Communication.Relay.Class import Pos.Infra.Communication.Relay.Logic import Pos.Infra.Communication.Relay.Types import Pos.Infra.Communication.Relay.Util
input-output-hk/pos-haskell-prototype
infra/src/Pos/Infra/Communication/Relay.hs
mit
471
0
5
100
78
59
19
9
0
type A = Int data B = B A deriving Show main = show (B 100)
roberth/uu-helium
test/correct/DerivableSyn.hs
gpl-3.0
61
0
7
17
33
18
15
3
1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-matches #-} -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | -- Module : Network.AWS.OpsWorks.RegisterInstance -- Copyright : (c) 2013-2015 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Registers instances with a specified stack that were created outside of -- AWS OpsWorks. -- -- We do not recommend using this action to register instances. The -- complete registration operation has two primary steps, installing the -- AWS OpsWorks agent on the instance and registering the instance with the -- stack. 'RegisterInstance' handles only the second step. You should -- instead use the AWS CLI 'register' command, which performs the entire -- registration operation. For more information, see -- <http://docs.aws.amazon.com/opsworks/latest/userguide/registered-instances-register.html Registering an Instance with an AWS OpsWorks Stack>. -- -- __Required Permissions__: To use this action, an IAM user must have a -- Manage permissions level for the stack or an attached policy that -- explicitly grants permissions. For more information on user permissions, -- see -- <http://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html Managing User Permissions>. -- -- /See:/ <http://docs.aws.amazon.com/opsworks/latest/APIReference/API_RegisterInstance.html AWS API Reference> for RegisterInstance. module Network.AWS.OpsWorks.RegisterInstance ( -- * Creating a Request registerInstance , RegisterInstance -- * Request Lenses , riPrivateIP , riHostname , riInstanceIdentity , riPublicIP , riRsaPublicKeyFingerprint , riRsaPublicKey , riStackId -- * Destructuring the Response , registerInstanceResponse , RegisterInstanceResponse -- * Response Lenses , rirsInstanceId , rirsResponseStatus ) where import Network.AWS.OpsWorks.Types import Network.AWS.OpsWorks.Types.Product import Network.AWS.Prelude import Network.AWS.Request import Network.AWS.Response -- | /See:/ 'registerInstance' smart constructor. data RegisterInstance = RegisterInstance' { _riPrivateIP :: !(Maybe Text) , _riHostname :: !(Maybe Text) , _riInstanceIdentity :: !(Maybe InstanceIdentity) , _riPublicIP :: !(Maybe Text) , _riRsaPublicKeyFingerprint :: !(Maybe Text) , _riRsaPublicKey :: !(Maybe Text) , _riStackId :: !Text } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'RegisterInstance' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'riPrivateIP' -- -- * 'riHostname' -- -- * 'riInstanceIdentity' -- -- * 'riPublicIP' -- -- * 'riRsaPublicKeyFingerprint' -- -- * 'riRsaPublicKey' -- -- * 'riStackId' registerInstance :: Text -- ^ 'riStackId' -> RegisterInstance registerInstance pStackId_ = RegisterInstance' { _riPrivateIP = Nothing , _riHostname = Nothing , _riInstanceIdentity = Nothing , _riPublicIP = Nothing , _riRsaPublicKeyFingerprint = Nothing , _riRsaPublicKey = Nothing , _riStackId = pStackId_ } -- | The instance\'s private IP address. riPrivateIP :: Lens' RegisterInstance (Maybe Text) riPrivateIP = lens _riPrivateIP (\ s a -> s{_riPrivateIP = a}); -- | The instance\'s hostname. riHostname :: Lens' RegisterInstance (Maybe Text) riHostname = lens _riHostname (\ s a -> s{_riHostname = a}); -- | An InstanceIdentity object that contains the instance\'s identity. riInstanceIdentity :: Lens' RegisterInstance (Maybe InstanceIdentity) riInstanceIdentity = lens _riInstanceIdentity (\ s a -> s{_riInstanceIdentity = a}); -- | The instance\'s public IP address. riPublicIP :: Lens' RegisterInstance (Maybe Text) riPublicIP = lens _riPublicIP (\ s a -> s{_riPublicIP = a}); -- | The instances public RSA key fingerprint. riRsaPublicKeyFingerprint :: Lens' RegisterInstance (Maybe Text) riRsaPublicKeyFingerprint = lens _riRsaPublicKeyFingerprint (\ s a -> s{_riRsaPublicKeyFingerprint = a}); -- | The instances public RSA key. This key is used to encrypt communication -- between the instance and the service. riRsaPublicKey :: Lens' RegisterInstance (Maybe Text) riRsaPublicKey = lens _riRsaPublicKey (\ s a -> s{_riRsaPublicKey = a}); -- | The ID of the stack that the instance is to be registered with. riStackId :: Lens' RegisterInstance Text riStackId = lens _riStackId (\ s a -> s{_riStackId = a}); instance AWSRequest RegisterInstance where type Rs RegisterInstance = RegisterInstanceResponse request = postJSON opsWorks response = receiveJSON (\ s h x -> RegisterInstanceResponse' <$> (x .?> "InstanceId") <*> (pure (fromEnum s))) instance ToHeaders RegisterInstance where toHeaders = const (mconcat ["X-Amz-Target" =# ("OpsWorks_20130218.RegisterInstance" :: ByteString), "Content-Type" =# ("application/x-amz-json-1.1" :: ByteString)]) instance ToJSON RegisterInstance where toJSON RegisterInstance'{..} = object (catMaybes [("PrivateIp" .=) <$> _riPrivateIP, ("Hostname" .=) <$> _riHostname, ("InstanceIdentity" .=) <$> _riInstanceIdentity, ("PublicIp" .=) <$> _riPublicIP, ("RsaPublicKeyFingerprint" .=) <$> _riRsaPublicKeyFingerprint, ("RsaPublicKey" .=) <$> _riRsaPublicKey, Just ("StackId" .= _riStackId)]) instance ToPath RegisterInstance where toPath = const "/" instance ToQuery RegisterInstance where toQuery = const mempty -- | Contains the response to a 'RegisterInstanceResult' request. -- -- /See:/ 'registerInstanceResponse' smart constructor. data RegisterInstanceResponse = RegisterInstanceResponse' { _rirsInstanceId :: !(Maybe Text) , _rirsResponseStatus :: !Int } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'RegisterInstanceResponse' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'rirsInstanceId' -- -- * 'rirsResponseStatus' registerInstanceResponse :: Int -- ^ 'rirsResponseStatus' -> RegisterInstanceResponse registerInstanceResponse pResponseStatus_ = RegisterInstanceResponse' { _rirsInstanceId = Nothing , _rirsResponseStatus = pResponseStatus_ } -- | The registered instance\'s AWS OpsWorks ID. rirsInstanceId :: Lens' RegisterInstanceResponse (Maybe Text) rirsInstanceId = lens _rirsInstanceId (\ s a -> s{_rirsInstanceId = a}); -- | The response status code. rirsResponseStatus :: Lens' RegisterInstanceResponse Int rirsResponseStatus = lens _rirsResponseStatus (\ s a -> s{_rirsResponseStatus = a});
fmapfmapfmap/amazonka
amazonka-opsworks/gen/Network/AWS/OpsWorks/RegisterInstance.hs
mpl-2.0
7,483
0
13
1,615
1,085
650
435
128
1
{-# LANGUAGE FlexibleContexts #-} -- | This module collects several function for converting Hagl values to -- strings and printing out the current state of an execution. Note that -- some other pretty printing functions are located in the modules they are -- specific to. module Hagl.Print where import Control.Monad (liftM,liftM2,unless) import Control.Monad.IO.Class import Hagl.Lists import Hagl.Payoff import Hagl.Game import Hagl.History import Hagl.Exec -- -- * Generic printing functions -- -- | Print a value from within a `MonadIO` monad. print :: (MonadIO m, Show a) => m a -> m () print = (>>= printStr . show) -- | Print a value + newline from within a `MonadIO` monad. printLn :: (MonadIO m, Show a) => m a -> m () printLn = (>>= printStrLn . show) -- | Print a string from within a `MonadIO` monad. printStr :: MonadIO m => String -> m () printStr = liftIO . putStr -- | Print a string + newline from within a `MonadIO` monad. printStrLn :: MonadIO m => String -> m () printStrLn = liftIO . putStrLn -- -- * Pure showing functions -- -- | String representation of a transcript. showTranscript :: (Game g, Show (Move g)) => ByPlayer (Player g) -> Transcript (Move g) -> String showTranscript ps = unlines . map mv . reverse where mv (Just i, m) = " " ++ show (forPlayer i ps) ++ "'s move: " ++ show m mv (Nothing, m) = " Chance: " ++ show m -- | String representation of a move summary. showMoveSummary :: (Game g, Show (Move g)) => ByPlayer (Player g) -> MoveSummary (Move g) -> String showMoveSummary ps mss = (unlines . map row) (zip (everyPlayer ps) (map everyTurn (everyPlayer mss))) where row (p,ms) = " " ++ show p ++ " moves: " ++ showSeq (reverse (map show ms)) -- | Generate a string showing a set of players' scores. scoreString :: ByPlayer (Player g) -> Payoff -> String scoreString (ByPlayer ps) (ByPlayer vs) = unlines [" "++show p++": "++showFloat v | (p,v) <- zip ps vs] -- -- * Printing game execution state -- -- | Print the current location in the game tree. printLocation :: (GameM m g, Show (Move g), Show (TreeType g (State g) (Move g))) => m () printLocation = do l <- location printStrLn (show l) -- | Print the moves from the current location. printMovesFromHere :: (GameM m g, Show (Move g), DiscreteGame g) => m () printMovesFromHere = do l <- location (printStrLn . show . movesFrom) l -- | Print a payoff or nothing. printMaybePayoff :: GameM m g => Maybe Payoff -> m () printMaybePayoff Nothing = return () printMaybePayoff (Just p) = printStrLn $ " Payoff: " ++ showPayoffAsList p -- | Print the transcript of the current game iteration, or if the game -- has just finished, print the transcript of the last iteration. printTranscript :: (GameM m g, Show (Move g)) => m () printTranscript = do new <- isNewGame n <- gameNumber printTranscriptOfGame (if new then n-1 else n) -- | Print transcript of the given game. printTranscriptOfGame :: (GameM m g, Show (Move g)) => Int -> m () printTranscriptOfGame n = do printStrLn $ "Iteration " ++ show n ++ ":" (t,(_,p)) <- liftM (forGame n) history ps <- players printStr (showTranscript ps t) printMaybePayoff p -- | Print transcripts of all completed games. printTranscripts :: (GameM m g, Show (Move g)) => m () printTranscripts = do n <- numCompleted mapM_ printTranscriptOfGame [1..n] -- | Print summary of the last game. printSummary :: (GameM m g, Show (Move g)) => m () printSummary = numCompleted >>= printSummaryOfGame -- | Print summary of every completed game. printSummaries :: (GameM m g, Show (Move g)) => m () printSummaries = numCompleted >>= \n -> mapM_ printSummaryOfGame [1..n] -- | Print the summary of the indicated game. printSummaryOfGame :: (GameM m g, Show (Move g)) => Int -> m () printSummaryOfGame n = do (mss,pay) <- liftM (forGame n) summaries ps <- players printStrLn $ "Summary of Game "++show n++":" printStr $ showMoveSummary ps mss printMaybePayoff pay -- | Print the current score. printScore :: (GameM m g, Show (Move g)) => m Payoff printScore = do printStrLn "Score:" printStr =<< liftM2 scoreString players score score
pparkkin/Hagl
Hagl/Print.hs
bsd-3-clause
4,275
0
12
945
1,349
693
656
71
2
{-# LANGUAGE TemplateHaskell #-} import Network.Socket import Network.Wai.Ghcjs import Network.Wai.Handler.Warp import System.IO main :: IO () main = do (port, socket) <- openFreePort app <- $(serveGhcjs $ BuildConfig { mainFile = "Main.hs", customIndexFile = Nothing, sourceDirs = ["."], projectDir = "client", projectExec = Stack, buildDir = "builds" }) let settings = setBeforeMainLoop (do print port hFlush stdout) $ defaultSettings runSettingsSocket settings socket app openFreePort :: IO (Port, Socket) openFreePort = do s <- socket AF_INET Stream defaultProtocol localhost <- inet_addr "127.0.0.1" bind s (SockAddrInet aNY_PORT localhost) listen s 1 port <- socketPort s return (fromIntegral port, s)
soenkehahn/wai-shake
test/test-project/Main.hs
bsd-3-clause
833
0
15
222
242
124
118
29
1
{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-} module Distribution.Client.Dependency.Modular.Tree ( FailReason(..) , POption(..) , Tree(..) , TreeF(..) , ana , anaM , cata , cataM , choices , inn , innM , lchoices , para , trav ) where import Control.Monad hiding (mapM, sequence) import Data.Foldable import Data.Traversable import Prelude hiding (foldr, mapM, sequence) import Distribution.Client.Dependency.Modular.Dependency import Distribution.Client.Dependency.Modular.Flag import Distribution.Client.Dependency.Modular.Package import Distribution.Client.Dependency.Modular.PSQ (PSQ) import qualified Distribution.Client.Dependency.Modular.PSQ as P import Distribution.Client.Dependency.Modular.Version import Distribution.Client.Dependency.Types ( ConstraintSource(..) ) -- | Type of the search tree. Inlining the choice nodes for now. data Tree a = PChoice QPN a (PSQ POption (Tree a)) | FChoice QFN a Bool Bool (PSQ Bool (Tree a)) -- Bool indicates whether it's weak/trivial, second Bool whether it's manual | SChoice QSN a Bool (PSQ Bool (Tree a)) -- Bool indicates whether it's trivial | GoalChoice (PSQ (OpenGoal ()) (Tree a)) -- PSQ should never be empty | Done RevDepMap | Fail (ConflictSet QPN) FailReason deriving (Eq, Show, Functor) -- Above, a choice is called trivial if it clearly does not matter. The -- special case of triviality we actually consider is if there are no new -- dependencies introduced by this node. -- -- A (flag) choice is called weak if we do want to defer it. This is the -- case for flags that should be implied by what's currently installed on -- the system, as opposed to flags that are used to explicitly enable or -- disable some functionality. -- | A package option is a package instance with an optional linking annotation -- -- The modular solver has a number of package goals to solve for, and can only -- pick a single package version for a single goal. In order to allow to -- install multiple versions of the same package as part of a single solution -- the solver uses qualified goals. For example, @0.P@ and @1.P@ might both -- be qualified goals for @P@, allowing to pick a difference version of package -- @P@ for @0.P@ and @1.P@. -- -- Linking is an essential part of this story. In addition to picking a specific -- version for @1.P@, the solver can also decide to link @1.P@ to @0.P@ (or -- vice versa). Teans that @1.P@ and @0.P@ really must be the very same package -- (and hence must have the same build time configuration, and their -- dependencies must also be the exact same). -- -- See <http://www.well-typed.com/blog/2015/03/qualified-goals/> for details. data POption = POption I (Maybe PP) deriving (Eq, Show) data FailReason = InconsistentInitialConstraints | Conflicting [Dep QPN] | CannotInstall | CannotReinstall | Shadowed | Broken | GlobalConstraintVersion VR ConstraintSource | GlobalConstraintInstalled ConstraintSource | GlobalConstraintSource ConstraintSource | GlobalConstraintFlag ConstraintSource | ManualFlag | BuildFailureNotInIndex PN | MalformedFlagChoice QFN | MalformedStanzaChoice QSN | EmptyGoalChoice | Backjump | MultipleInstances | DependenciesNotLinked String deriving (Eq, Show) -- | Functor for the tree type. data TreeF a b = PChoiceF QPN a (PSQ POption b) | FChoiceF QFN a Bool Bool (PSQ Bool b) | SChoiceF QSN a Bool (PSQ Bool b) | GoalChoiceF (PSQ (OpenGoal ()) b) | DoneF RevDepMap | FailF (ConflictSet QPN) FailReason deriving (Functor, Foldable, Traversable) out :: Tree a -> TreeF a (Tree a) out (PChoice p i ts) = PChoiceF p i ts out (FChoice p i b m ts) = FChoiceF p i b m ts out (SChoice p i b ts) = SChoiceF p i b ts out (GoalChoice ts) = GoalChoiceF ts out (Done x ) = DoneF x out (Fail c x ) = FailF c x inn :: TreeF a (Tree a) -> Tree a inn (PChoiceF p i ts) = PChoice p i ts inn (FChoiceF p i b m ts) = FChoice p i b m ts inn (SChoiceF p i b ts) = SChoice p i b ts inn (GoalChoiceF ts) = GoalChoice ts inn (DoneF x ) = Done x inn (FailF c x ) = Fail c x innM :: Monad m => TreeF a (m (Tree a)) -> m (Tree a) innM (PChoiceF p i ts) = liftM (PChoice p i ) (sequence ts) innM (FChoiceF p i b m ts) = liftM (FChoice p i b m) (sequence ts) innM (SChoiceF p i b ts) = liftM (SChoice p i b ) (sequence ts) innM (GoalChoiceF ts) = liftM (GoalChoice ) (sequence ts) innM (DoneF x ) = return $ Done x innM (FailF c x ) = return $ Fail c x -- | Determines whether a tree is active, i.e., isn't a failure node. active :: Tree a -> Bool active (Fail _ _) = False active _ = True -- | Determines how many active choices are available in a node. Note that we -- count goal choices as having one choice, always. choices :: Tree a -> Int choices (PChoice _ _ ts) = P.length (P.filter active ts) choices (FChoice _ _ _ _ ts) = P.length (P.filter active ts) choices (SChoice _ _ _ ts) = P.length (P.filter active ts) choices (GoalChoice _ ) = 1 choices (Done _ ) = 1 choices (Fail _ _ ) = 0 -- | Variant of 'choices' that only approximates the number of choices, -- using 'llength'. lchoices :: Tree a -> Int lchoices (PChoice _ _ ts) = P.llength (P.filter active ts) lchoices (FChoice _ _ _ _ ts) = P.llength (P.filter active ts) lchoices (SChoice _ _ _ ts) = P.llength (P.filter active ts) lchoices (GoalChoice _ ) = 1 lchoices (Done _ ) = 1 lchoices (Fail _ _ ) = 0 -- | Catamorphism on trees. cata :: (TreeF a b -> b) -> Tree a -> b cata phi x = (phi . fmap (cata phi) . out) x trav :: (TreeF a (Tree b) -> TreeF b (Tree b)) -> Tree a -> Tree b trav psi x = cata (inn . psi) x -- | Paramorphism on trees. para :: (TreeF a (b, Tree a) -> b) -> Tree a -> b para phi = phi . fmap (\ x -> (para phi x, x)) . out cataM :: Monad m => (TreeF a b -> m b) -> Tree a -> m b cataM phi = phi <=< mapM (cataM phi) <=< return . out -- | Anamorphism on trees. ana :: (b -> TreeF a b) -> b -> Tree a ana psi = inn . fmap (ana psi) . psi anaM :: Monad m => (b -> m (TreeF a b)) -> b -> m (Tree a) anaM psi = return . inn <=< mapM (anaM psi) <=< psi
randen/cabal
cabal-install/Distribution/Client/Dependency/Modular/Tree.hs
bsd-3-clause
6,861
0
11
2,013
1,930
1,022
908
-1
-1
module Main where import Test.Invariant import Test.Tasty import Test.Tasty.QuickCheck main :: IO () main = defaultMain $ testGroup "Tests" [ testGroup "Arity 1" [ testProperty "Idempotence" $ idempotent (* (0 :: Int)) , testProperty "Idempotence" $ (/=0) &> not . idempotent (* (2 :: Int)) , testProperty "Idempotence" $ idempotent (abs :: Double -> Double) , testProperty "Idempotence" $ idempotent (signum :: Int -> Int) , testProperty "Idempotence" $ ((*) :: Int -> Int -> Int) `distributesOver` (+) , testProperty "Idempotence" $ (*) `distributesOver` ((+) :: Int -> Int -> Int) ] ]
knupfer/test-invariant
test/Test.hs
bsd-3-clause
697
0
14
202
224
128
96
-1
-1
{-# LANGUAGE Haskell98 #-} {-# LINE 1 "Data/Text/Internal/Unsafe.hs" #-} {-# LANGUAGE CPP, MagicHash, UnboxedTuples #-} {-# OPTIONS_HADDOCK not-home #-} -- | -- Module : Data.Text.Internal.Unsafe -- Copyright : (c) 2009, 2010, 2011 Bryan O'Sullivan -- License : BSD-style -- Maintainer : [email protected] -- Stability : experimental -- Portability : portable -- -- /Warning/: this is an internal module, and does not have a stable -- API or name. Functions in this module may not check or enforce -- preconditions expected by public modules. Use at your own risk! -- -- A module containing /unsafe/ operations, for /very very careful/ use -- in /heavily tested/ code. module Data.Text.Internal.Unsafe ( inlineInterleaveST , inlinePerformIO ) where import GHC.ST (ST(..)) import GHC.IO (IO(IO)) import GHC.Base (realWorld#) -- | Just like unsafePerformIO, but we inline it. Big performance gains as -- it exposes lots of things to further inlining. /Very unsafe/. In -- particular, you should do no memory allocation inside an -- 'inlinePerformIO' block. On Hugs this is just @unsafePerformIO@. -- {-# INLINE inlinePerformIO #-} inlinePerformIO :: IO a -> a inlinePerformIO (IO m) = case m realWorld# of (# _, r #) -> r -- | Allow an 'ST' computation to be deferred lazily. When passed an -- action of type 'ST' @s@ @a@, the action will only be performed when -- the value of @a@ is demanded. -- -- This function is identical to the normal unsafeInterleaveST, but is -- inlined and hence faster. -- -- /Note/: This operation is highly unsafe, as it can introduce -- externally visible non-determinism into an 'ST' action. inlineInterleaveST :: ST s a -> ST s a inlineInterleaveST (ST m) = ST $ \ s -> let r = case m s of (# _, res #) -> res in (# s, r #) {-# INLINE inlineInterleaveST #-}
phischu/fragnix
tests/packages/scotty/Data.Text.Internal.Unsafe.hs
bsd-3-clause
1,895
1
12
407
206
129
77
18
1
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE TypeSynonymInstances #-} module IHaskell.Display.Widgets.Float.BoundedFloat.BoundedFloatText ( -- * The BoundedFloatText -- Widget BoundedFloatText, -- * Constructor mkBoundedFloatText) where -- To keep `cabal repl` happy when running from the ihaskell repo import Prelude import Data.Aeson import qualified Data.HashMap.Strict as HM import Data.IORef (newIORef) import qualified Data.Scientific as Sci import Data.Text (Text) import IHaskell.Display import IHaskell.Eval.Widgets import IHaskell.IPython.Message.UUID as U import IHaskell.Display.Widgets.Types import IHaskell.Display.Widgets.Common -- | 'BoundedFloatText' represents an BoundedFloatText widget from IPython.html.widgets. type BoundedFloatText = IPythonWidget BoundedFloatTextType -- | Create a new widget mkBoundedFloatText :: IO BoundedFloatText mkBoundedFloatText = do -- Default properties, with a random uuid uuid <- U.random let widgetState = WidgetState $ defaultBoundedFloatWidget "FloatTextView" stateIO <- newIORef widgetState let widget = IPythonWidget uuid stateIO -- Open a comm for this widget, and store it in the kernel state widgetSendOpen widget $ toJSON widgetState -- Return the widget return widget instance IHaskellDisplay BoundedFloatText where display b = do widgetSendView b return $ Display [] instance IHaskellWidget BoundedFloatText where getCommUUID = uuid comm widget (Object dict1) _ = do let key1 = "sync_data" :: Text key2 = "value" :: Text Just (Object dict2) = HM.lookup key1 dict1 Just (Number value) = HM.lookup key2 dict2 setField' widget FloatValue (Sci.toRealFloat value) triggerChange widget
artuuge/IHaskell
ihaskell-display/ihaskell-widgets/src/IHaskell/Display/Widgets/Float/BoundedFloat/BoundedFloatText.hs
mit
1,918
0
13
423
353
192
161
40
1
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="pl-PL"> <title>Active Scan Rules - Blpha | ZAP Extension</title> <maps> <homeID>top</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Zawartość</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Indeks</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>Szukaj</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Ulubione</label> <type>javax.help.FavoritesView</type> </view> </helpset>
veggiespam/zap-extensions
addOns/wavsepRpt/src/main/javahelp/org/zaproxy/zap/extension/wavsepRpt/resources/help_pl_PL/helpset_pl_PL.hs
apache-2.0
990
89
29
163
410
218
192
-1
-1
-- (c) The University of Glasgow 2012 {-# LANGUAGE CPP, DataKinds, DeriveDataTypeable, GADTs, KindSignatures, ScopedTypeVariables, StandaloneDeriving, RoleAnnotations #-} -- | Module for coercion axioms, used to represent type family instances -- and newtypes module CoAxiom ( BranchFlag, Branched, Unbranched, BranchIndex, Branches, manyBranches, unbranched, fromBranches, numBranches, mapAccumBranches, CoAxiom(..), CoAxBranch(..), toBranchedAxiom, toUnbranchedAxiom, coAxiomName, coAxiomArity, coAxiomBranches, coAxiomTyCon, isImplicitCoAxiom, coAxiomNumPats, coAxiomNthBranch, coAxiomSingleBranch_maybe, coAxiomRole, coAxiomSingleBranch, coAxBranchTyVars, coAxBranchRoles, coAxBranchLHS, coAxBranchRHS, coAxBranchSpan, coAxBranchIncomps, placeHolderIncomps, Role(..), fsFromRole, CoAxiomRule(..), Eqn, BuiltInSynFamily(..), trivialBuiltInFamily ) where import {-# SOURCE #-} TypeRep ( Type ) import {-# SOURCE #-} TyCon ( TyCon ) import Outputable import FastString import Name import Unique import Var import Util import Binary import Pair import BasicTypes import Data.Typeable ( Typeable ) import SrcLoc import qualified Data.Data as Data import Data.Array import Data.List ( mapAccumL ) #include "HsVersions.h" {- Note [Coercion axiom branches] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In order to allow closed type families, an axiom needs to contain an ordered list of alternatives, called branches. The kind of the coercion built from an axiom is determined by which index is used when building the coercion from the axiom. For example, consider the axiom derived from the following declaration: type family F a where F [Int] = Bool F [a] = Double F (a b) = Char This will give rise to this axiom: axF :: { F [Int] ~ Bool ; forall (a :: *). F [a] ~ Double ; forall (k :: BOX) (a :: k -> *) (b :: k). F (a b) ~ Char } The axiom is used with the AxiomInstCo constructor of Coercion. If we wish to have a coercion showing that F (Maybe Int) ~ Char, it will look like axF[2] <*> <Maybe> <Int> :: F (Maybe Int) ~ Char -- or, written using concrete-ish syntax -- AxiomInstCo axF 2 [Refl *, Refl Maybe, Refl Int] Note that the index is 0-based. For type-checking, it is also necessary to check that no previous pattern can unify with the supplied arguments. After all, it is possible that some of the type arguments are lambda-bound type variables whose instantiation may cause an earlier match among the branches. We wish to prohibit this behavior, so the type checker rules out the choice of a branch where a previous branch can unify. See also [Apartness] in FamInstEnv.hs. For example, the following is malformed, where 'a' is a lambda-bound type variable: axF[2] <*> <a> <Bool> :: F (a Bool) ~ Char Why? Because a might be instantiated with [], meaning that branch 1 should apply, not branch 2. This is a vital consistency check; without it, we could derive Int ~ Bool, and that is a Bad Thing. Note [Branched axioms] ~~~~~~~~~~~~~~~~~~~~~~ Although a CoAxiom has the capacity to store many branches, in certain cases, we want only one. These cases are in data/newtype family instances, newtype coercions, and type family instances. Furthermore, these unbranched axioms are used in a variety of places throughout GHC, and it would difficult to generalize all of that code to deal with branched axioms, especially when the code can be sure of the fact that an axiom is indeed a singleton. At the same time, it seems dangerous to assume singlehood in various places through GHC. The solution to this is to label a CoAxiom with a phantom type variable declaring whether it is known to be a singleton or not. The branches are stored using a special datatype, declared below, that ensures that the type variable is accurate. ************************************************************************ * * Branches * * ************************************************************************ -} type BranchIndex = Int -- The index of the branch in the list of branches -- Counting from zero -- promoted data type data BranchFlag = Branched | Unbranched type Branched = 'Branched deriving instance Typeable 'Branched type Unbranched = 'Unbranched deriving instance Typeable 'Unbranched -- By using type synonyms for the promoted constructors, we avoid needing -- DataKinds and the promotion quote in client modules. This also means that -- we don't need to export the term-level constructors, which should never be used. newtype Branches (br :: BranchFlag) = MkBranches { unMkBranches :: Array BranchIndex CoAxBranch } deriving Typeable type role Branches nominal manyBranches :: [CoAxBranch] -> Branches Branched manyBranches brs = ASSERT( snd bnds >= fst bnds ) MkBranches (listArray bnds brs) where bnds = (0, length brs - 1) unbranched :: CoAxBranch -> Branches Unbranched unbranched br = MkBranches (listArray (0, 0) [br]) toBranched :: Branches br -> Branches Branched toBranched = MkBranches . unMkBranches toUnbranched :: Branches br -> Branches Unbranched toUnbranched (MkBranches arr) = ASSERT( bounds arr == (0,0) ) MkBranches arr fromBranches :: Branches br -> [CoAxBranch] fromBranches = elems . unMkBranches branchesNth :: Branches br -> BranchIndex -> CoAxBranch branchesNth (MkBranches arr) n = arr ! n numBranches :: Branches br -> Int numBranches (MkBranches arr) = snd (bounds arr) + 1 -- | The @[CoAxBranch]@ passed into the mapping function is a list of -- all previous branches, reversed mapAccumBranches :: ([CoAxBranch] -> CoAxBranch -> CoAxBranch) -> Branches br -> Branches br mapAccumBranches f (MkBranches arr) = MkBranches (listArray (bounds arr) (snd $ mapAccumL go [] (elems arr))) where go :: [CoAxBranch] -> CoAxBranch -> ([CoAxBranch], CoAxBranch) go prev_branches cur_branch = ( cur_branch : prev_branches , f prev_branches cur_branch ) {- ************************************************************************ * * Coercion axioms * * ************************************************************************ Note [Storing compatibility] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ During axiom application, we need to be aware of which branches are compatible with which others. The full explanation is in Note [Compatibility] in FamInstEnv. (The code is placed there to avoid a dependency from CoAxiom on the unification algorithm.) Although we could theoretically compute compatibility on the fly, this is silly, so we store it in a CoAxiom. Specifically, each branch refers to all other branches with which it is incompatible. This list might well be empty, and it will always be for the first branch of any axiom. CoAxBranches that do not (yet) belong to a CoAxiom should have a panic thunk stored in cab_incomps. The incompatibilities are properly a property of the axiom as a whole, and they are computed only when the final axiom is built. During serialization, the list is converted into a list of the indices of the branches. -} -- | A 'CoAxiom' is a \"coercion constructor\", i.e. a named equality axiom. -- If you edit this type, you may need to update the GHC formalism -- See Note [GHC Formalism] in coreSyn/CoreLint.hs data CoAxiom br = CoAxiom -- Type equality axiom. { co_ax_unique :: Unique -- unique identifier , co_ax_name :: Name -- name for pretty-printing , co_ax_role :: Role -- role of the axiom's equality , co_ax_tc :: TyCon -- the head of the LHS patterns , co_ax_branches :: Branches br -- the branches that form this axiom , co_ax_implicit :: Bool -- True <=> the axiom is "implicit" -- See Note [Implicit axioms] -- INVARIANT: co_ax_implicit == True implies length co_ax_branches == 1. } deriving Typeable data CoAxBranch = CoAxBranch { cab_loc :: SrcSpan -- Location of the defining equation -- See Note [CoAxiom locations] , cab_tvs :: [TyVar] -- Bound type variables; not necessarily fresh -- See Note [CoAxBranch type variables] , cab_roles :: [Role] -- See Note [CoAxBranch roles] , cab_lhs :: [Type] -- Type patterns to match against , cab_rhs :: Type -- Right-hand side of the equality , cab_incomps :: [CoAxBranch] -- The previous incompatible branches -- See Note [Storing compatibility] } deriving ( Data.Data, Data.Typeable ) toBranchedAxiom :: CoAxiom br -> CoAxiom Branched toBranchedAxiom (CoAxiom unique name role tc branches implicit) = CoAxiom unique name role tc (toBranched branches) implicit toUnbranchedAxiom :: CoAxiom br -> CoAxiom Unbranched toUnbranchedAxiom (CoAxiom unique name role tc branches implicit) = CoAxiom unique name role tc (toUnbranched branches) implicit coAxiomNumPats :: CoAxiom br -> Int coAxiomNumPats = length . coAxBranchLHS . (flip coAxiomNthBranch 0) coAxiomNthBranch :: CoAxiom br -> BranchIndex -> CoAxBranch coAxiomNthBranch (CoAxiom { co_ax_branches = bs }) index = branchesNth bs index coAxiomArity :: CoAxiom br -> BranchIndex -> Arity coAxiomArity ax index = length $ cab_tvs $ coAxiomNthBranch ax index coAxiomName :: CoAxiom br -> Name coAxiomName = co_ax_name coAxiomRole :: CoAxiom br -> Role coAxiomRole = co_ax_role coAxiomBranches :: CoAxiom br -> Branches br coAxiomBranches = co_ax_branches coAxiomSingleBranch_maybe :: CoAxiom br -> Maybe CoAxBranch coAxiomSingleBranch_maybe (CoAxiom { co_ax_branches = MkBranches arr }) | snd (bounds arr) == 0 = Just $ arr ! 0 | otherwise = Nothing coAxiomSingleBranch :: CoAxiom Unbranched -> CoAxBranch coAxiomSingleBranch (CoAxiom { co_ax_branches = MkBranches arr }) = arr ! 0 coAxiomTyCon :: CoAxiom br -> TyCon coAxiomTyCon = co_ax_tc coAxBranchTyVars :: CoAxBranch -> [TyVar] coAxBranchTyVars = cab_tvs coAxBranchLHS :: CoAxBranch -> [Type] coAxBranchLHS = cab_lhs coAxBranchRHS :: CoAxBranch -> Type coAxBranchRHS = cab_rhs coAxBranchRoles :: CoAxBranch -> [Role] coAxBranchRoles = cab_roles coAxBranchSpan :: CoAxBranch -> SrcSpan coAxBranchSpan = cab_loc isImplicitCoAxiom :: CoAxiom br -> Bool isImplicitCoAxiom = co_ax_implicit coAxBranchIncomps :: CoAxBranch -> [CoAxBranch] coAxBranchIncomps = cab_incomps -- See Note [Compatibility checking] in FamInstEnv placeHolderIncomps :: [CoAxBranch] placeHolderIncomps = panic "placeHolderIncomps" {- Note [CoAxBranch type variables] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In the case of a CoAxBranch of an associated type-family instance, we use the *same* type variables (where possible) as the enclosing class or instance. Consider class C a b where type F x b type F [y] b = ... -- Second param must be b instance C Int [z] where type F Int [z] = ... -- Second param must be [z] In the CoAxBranch in the instance decl (F Int [z]) we use the same 'z', so that it's easy to check that that type is the same as that in the instance header. Similarly in the CoAxBranch for the default decl for F in the class decl, we use the same 'b' to make the same check easy. So, unlike FamInsts, there is no expectation that the cab_tvs are fresh wrt each other, or any other CoAxBranch. Note [CoAxBranch roles] ~~~~~~~~~~~~~~~~~~~~~~~ Consider this code: newtype Age = MkAge Int newtype Wrap a = MkWrap a convert :: Wrap Age -> Int convert (MkWrap (MkAge i)) = i We want this to compile to: NTCo:Wrap :: forall a. Wrap a ~R a NTCo:Age :: Age ~R Int convert = \x -> x |> (NTCo:Wrap[0] NTCo:Age[0]) But, note that NTCo:Age is at role R. Thus, we need to be able to pass coercions at role R into axioms. However, we don't *always* want to be able to do this, as it would be disastrous with type families. The solution is to annotate the arguments to the axiom with roles, much like we annotate tycon tyvars. Where do these roles get set? Newtype axioms inherit their roles from the newtype tycon; family axioms are all at role N. Note [CoAxiom locations] ~~~~~~~~~~~~~~~~~~~~~~~~ The source location of a CoAxiom is stored in two places in the datatype tree. * The first is in the location info buried in the Name of the CoAxiom. This span includes all of the branches of a branched CoAxiom. * The second is in the cab_loc fields of the CoAxBranches. In the case of a single branch, we can extract the source location of the branch from the name of the CoAxiom. In other cases, we need an explicit SrcSpan to correctly store the location of the equation giving rise to the FamInstBranch. Note [Implicit axioms] ~~~~~~~~~~~~~~~~~~~~~~ See also Note [Implicit TyThings] in HscTypes * A CoAxiom arising from data/type family instances is not "implicit". That is, it has its own IfaceAxiom declaration in an interface file * The CoAxiom arising from a newtype declaration *is* "implicit". That is, it does not have its own IfaceAxiom declaration in an interface file; instead the CoAxiom is generated by type-checking the newtype declaration -} instance Eq (CoAxiom br) where a == b = case (a `compare` b) of { EQ -> True; _ -> False } a /= b = case (a `compare` b) of { EQ -> False; _ -> True } instance Ord (CoAxiom br) where a <= b = case (a `compare` b) of { LT -> True; EQ -> True; GT -> False } a < b = case (a `compare` b) of { LT -> True; EQ -> False; GT -> False } a >= b = case (a `compare` b) of { LT -> False; EQ -> True; GT -> True } a > b = case (a `compare` b) of { LT -> False; EQ -> False; GT -> True } compare a b = getUnique a `compare` getUnique b instance Uniquable (CoAxiom br) where getUnique = co_ax_unique instance Outputable (CoAxiom br) where ppr = ppr . getName instance NamedThing (CoAxiom br) where getName = co_ax_name instance Typeable br => Data.Data (CoAxiom br) where -- don't traverse? toConstr _ = abstractConstr "CoAxiom" gunfold _ _ = error "gunfold" dataTypeOf _ = mkNoRepType "CoAxiom" {- ************************************************************************ * * Roles * * ************************************************************************ Roles are defined here to avoid circular dependencies. -} -- See Note [Roles] in Coercion -- defined here to avoid cyclic dependency with Coercion data Role = Nominal | Representational | Phantom deriving (Eq, Data.Data, Data.Typeable) -- These names are slurped into the parser code. Changing these strings -- will change the **surface syntax** that GHC accepts! If you want to -- change only the pretty-printing, do some replumbing. See -- mkRoleAnnotDecl in RdrHsSyn fsFromRole :: Role -> FastString fsFromRole Nominal = fsLit "nominal" fsFromRole Representational = fsLit "representational" fsFromRole Phantom = fsLit "phantom" instance Outputable Role where ppr = ftext . fsFromRole instance Binary Role where put_ bh Nominal = putByte bh 1 put_ bh Representational = putByte bh 2 put_ bh Phantom = putByte bh 3 get bh = do tag <- getByte bh case tag of 1 -> return Nominal 2 -> return Representational 3 -> return Phantom _ -> panic ("get Role " ++ show tag) {- ************************************************************************ * * CoAxiomRule Rules for building Evidence * * ************************************************************************ Conditional axioms. The general idea is that a `CoAxiomRule` looks like this: forall as. (r1 ~ r2, s1 ~ s2) => t1 ~ t2 My intention is to reuse these for both (~) and (~#). The short-term plan is to use this datatype to represent the type-nat axioms. In the longer run, it may be good to unify this and `CoAxiom`, as `CoAxiom` is the special case when there are no assumptions. -} -- | A more explicit representation for `t1 ~ t2`. type Eqn = Pair Type -- | For now, we work only with nominal equality. data CoAxiomRule = CoAxiomRule { coaxrName :: FastString , coaxrTypeArity :: Int -- number of type argumentInts , coaxrAsmpRoles :: [Role] -- roles of parameter equations , coaxrRole :: Role -- role of resulting equation , coaxrProves :: [Type] -> [Eqn] -> Maybe Eqn -- ^ coaxrProves returns @Nothing@ when it doesn't like -- the supplied arguments. When this happens in a coercion -- that means that the coercion is ill-formed, and Core Lint -- checks for that. } deriving Typeable instance Data.Data CoAxiomRule where -- don't traverse? toConstr _ = abstractConstr "CoAxiomRule" gunfold _ _ = error "gunfold" dataTypeOf _ = mkNoRepType "CoAxiomRule" instance Uniquable CoAxiomRule where getUnique = getUnique . coaxrName instance Eq CoAxiomRule where x == y = coaxrName x == coaxrName y instance Ord CoAxiomRule where compare x y = compare (coaxrName x) (coaxrName y) instance Outputable CoAxiomRule where ppr = ppr . coaxrName -- Type checking of built-in families data BuiltInSynFamily = BuiltInSynFamily { sfMatchFam :: [Type] -> Maybe (CoAxiomRule, [Type], Type) , sfInteractTop :: [Type] -> Type -> [Eqn] , sfInteractInert :: [Type] -> Type -> [Type] -> Type -> [Eqn] } -- Provides default implementations that do nothing. trivialBuiltInFamily :: BuiltInSynFamily trivialBuiltInFamily = BuiltInSynFamily { sfMatchFam = \_ -> Nothing , sfInteractTop = \_ _ -> [] , sfInteractInert = \_ _ _ _ -> [] }
ml9951/ghc
compiler/types/CoAxiom.hs
bsd-3-clause
18,558
0
14
4,558
2,462
1,367
1,095
199
1
-- | You don't need to import this module to enable bash completion. -- -- See -- <http://github.com/pcapriotti/optparse-applicative/wiki/Bash-Completion the wiki> -- for more information on bash completion. module Options.Applicative.BashCompletion ( bashCompletionParser ) where import Control.Applicative ((<$>), (<*>), many) import Data.Foldable (asum) import Data.List (isPrefixOf) import Data.Maybe (fromMaybe, listToMaybe) import Options.Applicative.Builder import Options.Applicative.Common import Options.Applicative.Internal import Options.Applicative.Types bashCompletionParser :: ParserInfo a -> ParserPrefs -> Parser CompletionResult bashCompletionParser pinfo pprefs = complParser where failure opts = CompletionResult { execCompletion = \progn -> unlines <$> opts progn } complParser = asum [ failure <$> ( bashCompletionQuery pinfo pprefs <$> (many . strOption) (long "bash-completion-word" `mappend` internal) <*> option auto (long "bash-completion-index" `mappend` internal) ) , failure <$> (bashCompletionScript <$> strOption (long "bash-completion-script" `mappend` internal)) ] bashCompletionQuery :: ParserInfo a -> ParserPrefs -> [String] -> Int -> String -> IO [String] bashCompletionQuery pinfo pprefs ws i _ = case runCompletion compl pprefs of Just (Left (SomeParser p)) -> list_options p Just (Right c) -> run_completer c _ -> return [] where list_options = fmap concat . sequence . mapParser (const opt_completions) opt_completions opt = case optMain opt of OptReader ns _ _ -> return $ show_names ns FlagReader ns _ -> return $ show_names ns ArgReader rdr -> run_completer (crCompleter rdr) CmdReader ns _ -> return $ filter_names ns show_name :: OptName -> String show_name (OptShort c) = '-':[c] show_name (OptLong name) = "--" ++ name show_names :: [OptName] -> [String] show_names = filter_names . map show_name filter_names :: [String] -> [String] filter_names = filter is_completion run_completer :: Completer -> IO [String] run_completer c = runCompleter c (fromMaybe "" (listToMaybe ws'')) (ws', ws'') = splitAt i ws is_completion :: String -> Bool is_completion = case ws'' of w:_ -> isPrefixOf w _ -> const True compl = runParserInfo pinfo (drop 1 ws') bashCompletionScript :: String -> String -> IO [String] bashCompletionScript prog progn = return [ "_" ++ progn ++ "()" , "{" , " local cmdline" , " CMDLINE=(--bash-completion-index $COMP_CWORD)" , "" , " for arg in ${COMP_WORDS[@]}; do" , " CMDLINE=(${CMDLINE[@]} --bash-completion-word $arg)" , " done" , "" , " COMPREPLY=( $(" ++ prog ++ " \"${CMDLINE[@]}\") )" , "}" , "" , "complete -o filenames -F _" ++ progn ++ " " ++ progn ]
d12frosted/optparse-applicative-kb
Options/Applicative/BashCompletion.hs
bsd-3-clause
2,980
0
16
727
797
422
375
68
8
{-# LANGUAGE CPP #-} #if __GLASGOW_HASKELL__ {-# LANGUAGE DeriveDataTypeable, StandaloneDeriving #-} #endif #if !defined(TESTING) && __GLASGOW_HASKELL__ >= 703 {-# LANGUAGE Trustworthy #-} #endif ----------------------------------------------------------------------------- -- | -- Module : Data.Set.Base -- Copyright : (c) Daan Leijen 2002 -- License : BSD-style -- Maintainer : [email protected] -- Stability : provisional -- Portability : portable -- -- An efficient implementation of sets. -- -- These modules are intended to be imported qualified, to avoid name -- clashes with Prelude functions, e.g. -- -- > import Data.Set (Set) -- > import qualified Data.Set as Set -- -- The implementation of 'Set' is based on /size balanced/ binary trees (or -- trees of /bounded balance/) as described by: -- -- * Stephen Adams, \"/Efficient sets: a balancing act/\", -- Journal of Functional Programming 3(4):553-562, October 1993, -- <http://www.swiss.ai.mit.edu/~adams/BB/>. -- -- * J. Nievergelt and E.M. Reingold, -- \"/Binary search trees of bounded balance/\", -- SIAM journal of computing 2(1), March 1973. -- -- Note that the implementation is /left-biased/ -- the elements of a -- first argument are always preferred to the second, for example in -- 'union' or 'insert'. Of course, left-biasing can only be observed -- when equality is an equivalence relation instead of structural -- equality. ----------------------------------------------------------------------------- -- [Note: Using INLINABLE] -- ~~~~~~~~~~~~~~~~~~~~~~~ -- It is crucial to the performance that the functions specialize on the Ord -- type when possible. GHC 7.0 and higher does this by itself when it sees th -- unfolding of a function -- that is why all public functions are marked -- INLINABLE (that exposes the unfolding). -- [Note: Using INLINE] -- ~~~~~~~~~~~~~~~~~~~~ -- For other compilers and GHC pre 7.0, we mark some of the functions INLINE. -- We mark the functions that just navigate down the tree (lookup, insert, -- delete and similar). That navigation code gets inlined and thus specialized -- when possible. There is a price to pay -- code growth. The code INLINED is -- therefore only the tree navigation, all the real work (rebalancing) is not -- INLINED by using a NOINLINE. -- -- All methods marked INLINE have to be nonrecursive -- a 'go' function doing -- the real work is provided. -- [Note: Type of local 'go' function] -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -- If the local 'go' function uses an Ord class, it sometimes heap-allocates -- the Ord dictionary when the 'go' function does not have explicit type. -- In that case we give 'go' explicit type. But this slightly decrease -- performance, as the resulting 'go' function can float out to top level. -- [Note: Local 'go' functions and capturing] -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -- As opposed to IntSet, when 'go' function captures an argument, increased -- heap-allocation can occur: sometimes in a polymorphic function, the 'go' -- floats out of its enclosing function and then it heap-allocates the -- dictionary and the argument. Maybe it floats out too late and strictness -- analyzer cannot see that these could be passed on stack. -- -- For example, change 'member' so that its local 'go' function is not passing -- argument x and then look at the resulting code for hedgeInt. -- [Note: Order of constructors] -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -- The order of constructors of Set matters when considering performance. -- Currently in GHC 7.0, when type has 2 constructors, a forward conditional -- jump is made when successfully matching second constructor. Successful match -- of first constructor results in the forward jump not taken. -- On GHC 7.0, reordering constructors from Tip | Bin to Bin | Tip -- improves the benchmark by up to 10% on x86. module Data.Set.Base ( -- * Set type Set(..) -- instance Eq,Ord,Show,Read,Data,Typeable -- * Operators , (\\) -- * Query , null , size , member , notMember , lookupLT , lookupGT , lookupLE , lookupGE , isSubsetOf , isProperSubsetOf -- * Construction , empty , singleton , insert , delete -- * Combine , union , unions , difference , intersection -- * Filter , filter , partition , split , splitMember -- * Map , map , mapMonotonic -- * Folds , foldr , foldl -- ** Strict folds , foldr' , foldl' -- ** Legacy folds , fold -- * Min\/Max , findMin , findMax , deleteMin , deleteMax , deleteFindMin , deleteFindMax , maxView , minView -- * Conversion -- ** List , elems , toList , fromList -- ** Ordered list , toAscList , toDescList , fromAscList , fromDistinctAscList -- * Debugging , showTree , showTreeWith , valid -- Internals (for testing) , bin , balanced , join , merge ) where import Prelude hiding (filter,foldl,foldr,null,map) import qualified Data.List as List import Data.Monoid (Monoid(..)) import qualified Data.Foldable as Foldable import Data.Typeable import Control.DeepSeq (NFData(rnf)) #if __GLASGOW_HASKELL__ import GHC.Exts ( build ) import Text.Read import Data.Data #endif -- Use macros to define strictness of functions. -- STRICT_x_OF_y denotes an y-ary function strict in the x-th parameter. -- We do not use BangPatterns, because they are not in any standard and we -- want the compilers to be compiled by as many compilers as possible. #define STRICT_1_OF_2(fn) fn arg _ | arg `seq` False = undefined #define STRICT_1_OF_3(fn) fn arg _ _ | arg `seq` False = undefined {-------------------------------------------------------------------- Operators --------------------------------------------------------------------} infixl 9 \\ -- -- | /O(n+m)/. See 'difference'. (\\) :: Ord a => Set a -> Set a -> Set a m1 \\ m2 = difference m1 m2 #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE (\\) #-} #endif {-------------------------------------------------------------------- Sets are size balanced trees --------------------------------------------------------------------} -- | A set of values @a@. -- See Note: Order of constructors data Set a = Bin {-# UNPACK #-} !Size !a !(Set a) !(Set a) | Tip type Size = Int instance Ord a => Monoid (Set a) where mempty = empty mappend = union mconcat = unions instance Foldable.Foldable Set where fold Tip = mempty fold (Bin _ k l r) = Foldable.fold l `mappend` k `mappend` Foldable.fold r foldr = foldr foldl = foldl foldMap _ Tip = mempty foldMap f (Bin _ k l r) = Foldable.foldMap f l `mappend` f k `mappend` Foldable.foldMap f r #if __GLASGOW_HASKELL__ {-------------------------------------------------------------------- A Data instance --------------------------------------------------------------------} -- This instance preserves data abstraction at the cost of inefficiency. -- We omit reflection services for the sake of data abstraction. instance (Data a, Ord a) => Data (Set a) where gfoldl f z set = z fromList `f` (toList set) toConstr _ = error "toConstr" gunfold _ _ = error "gunfold" dataTypeOf _ = mkNoRepType "Data.Set.Set" dataCast1 f = gcast1 f #endif {-------------------------------------------------------------------- Query --------------------------------------------------------------------} -- | /O(1)/. Is this the empty set? null :: Set a -> Bool null Tip = True null (Bin {}) = False {-# INLINE null #-} -- | /O(1)/. The number of elements in the set. size :: Set a -> Int size Tip = 0 size (Bin sz _ _ _) = sz {-# INLINE size #-} -- | /O(log n)/. Is the element in the set? member :: Ord a => a -> Set a -> Bool member = go where STRICT_1_OF_2(go) go _ Tip = False go x (Bin _ y l r) = case compare x y of LT -> go x l GT -> go x r EQ -> True #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE member #-} #else {-# INLINE member #-} #endif -- | /O(log n)/. Is the element not in the set? notMember :: Ord a => a -> Set a -> Bool notMember a t = not $ member a t #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE notMember #-} #else {-# INLINE notMember #-} #endif -- | /O(log n)/. Find largest element smaller than the given one. -- -- > lookupLT 3 (fromList [3, 5]) == Nothing -- > lookupLT 5 (fromList [3, 5]) == Just 3 lookupLT :: Ord a => a -> Set a -> Maybe a lookupLT = goNothing where STRICT_1_OF_2(goNothing) goNothing _ Tip = Nothing goNothing x (Bin _ y l r) | x <= y = goNothing x l | otherwise = goJust x y r STRICT_1_OF_3(goJust) goJust _ best Tip = Just best goJust x best (Bin _ y l r) | x <= y = goJust x best l | otherwise = goJust x y r #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE lookupLT #-} #else {-# INLINE lookupLT #-} #endif -- | /O(log n)/. Find smallest element greater than the given one. -- -- > lookupGT 4 (fromList [3, 5]) == Just 5 -- > lookupGT 5 (fromList [3, 5]) == Nothing lookupGT :: Ord a => a -> Set a -> Maybe a lookupGT = goNothing where STRICT_1_OF_2(goNothing) goNothing _ Tip = Nothing goNothing x (Bin _ y l r) | x < y = goJust x y l | otherwise = goNothing x r STRICT_1_OF_3(goJust) goJust _ best Tip = Just best goJust x best (Bin _ y l r) | x < y = goJust x y l | otherwise = goJust x best r #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE lookupGT #-} #else {-# INLINE lookupGT #-} #endif -- | /O(log n)/. Find largest element smaller or equal to the given one. -- -- > lookupLE 2 (fromList [3, 5]) == Nothing -- > lookupLE 4 (fromList [3, 5]) == Just 3 -- > lookupLE 5 (fromList [3, 5]) == Just 5 lookupLE :: Ord a => a -> Set a -> Maybe a lookupLE = goNothing where STRICT_1_OF_2(goNothing) goNothing _ Tip = Nothing goNothing x (Bin _ y l r) = case compare x y of LT -> goNothing x l EQ -> Just y GT -> goJust x y r STRICT_1_OF_3(goJust) goJust _ best Tip = Just best goJust x best (Bin _ y l r) = case compare x y of LT -> goJust x best l EQ -> Just y GT -> goJust x y r #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE lookupLE #-} #else {-# INLINE lookupLE #-} #endif -- | /O(log n)/. Find smallest element greater or equal to the given one. -- -- > lookupGE 3 (fromList [3, 5]) == Just 3 -- > lookupGE 4 (fromList [3, 5]) == Just 5 -- > lookupGE 6 (fromList [3, 5]) == Nothing lookupGE :: Ord a => a -> Set a -> Maybe a lookupGE = goNothing where STRICT_1_OF_2(goNothing) goNothing _ Tip = Nothing goNothing x (Bin _ y l r) = case compare x y of LT -> goJust x y l EQ -> Just y GT -> goNothing x r STRICT_1_OF_3(goJust) goJust _ best Tip = Just best goJust x best (Bin _ y l r) = case compare x y of LT -> goJust x y l EQ -> Just y GT -> goJust x best r #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE lookupGE #-} #else {-# INLINE lookupGE #-} #endif {-------------------------------------------------------------------- Construction --------------------------------------------------------------------} -- | /O(1)/. The empty set. empty :: Set a empty = Tip {-# INLINE empty #-} -- | /O(1)/. Create a singleton set. singleton :: a -> Set a singleton x = Bin 1 x Tip Tip {-# INLINE singleton #-} {-------------------------------------------------------------------- Insertion, Deletion --------------------------------------------------------------------} -- | /O(log n)/. Insert an element in a set. -- If the set already contains an element equal to the given value, -- it is replaced with the new value. -- See Note: Type of local 'go' function insert :: Ord a => a -> Set a -> Set a insert = go where go :: Ord a => a -> Set a -> Set a STRICT_1_OF_2(go) go x Tip = singleton x go x (Bin sz y l r) = case compare x y of LT -> balanceL y (go x l) r GT -> balanceR y l (go x r) EQ -> Bin sz x l r #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE insert #-} #else {-# INLINE insert #-} #endif -- Insert an element to the set only if it is not in the set. -- Used by `union`. -- See Note: Type of local 'go' function insertR :: Ord a => a -> Set a -> Set a insertR = go where go :: Ord a => a -> Set a -> Set a STRICT_1_OF_2(go) go x Tip = singleton x go x t@(Bin _ y l r) = case compare x y of LT -> balanceL y (go x l) r GT -> balanceR y l (go x r) EQ -> t #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE insertR #-} #else {-# INLINE insertR #-} #endif -- | /O(log n)/. Delete an element from a set. -- See Note: Type of local 'go' function delete :: Ord a => a -> Set a -> Set a delete = go where go :: Ord a => a -> Set a -> Set a STRICT_1_OF_2(go) go _ Tip = Tip go x (Bin _ y l r) = case compare x y of LT -> balanceR y (go x l) r GT -> balanceL y l (go x r) EQ -> glue l r #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE delete #-} #else {-# INLINE delete #-} #endif {-------------------------------------------------------------------- Subset --------------------------------------------------------------------} -- | /O(n+m)/. Is this a proper subset? (ie. a subset but not equal). isProperSubsetOf :: Ord a => Set a -> Set a -> Bool isProperSubsetOf s1 s2 = (size s1 < size s2) && (isSubsetOf s1 s2) #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE isProperSubsetOf #-} #endif -- | /O(n+m)/. Is this a subset? -- @(s1 `isSubsetOf` s2)@ tells whether @s1@ is a subset of @s2@. isSubsetOf :: Ord a => Set a -> Set a -> Bool isSubsetOf t1 t2 = (size t1 <= size t2) && (isSubsetOfX t1 t2) #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE isSubsetOf #-} #endif isSubsetOfX :: Ord a => Set a -> Set a -> Bool isSubsetOfX Tip _ = True isSubsetOfX _ Tip = False isSubsetOfX (Bin _ x l r) t = found && isSubsetOfX l lt && isSubsetOfX r gt where (lt,found,gt) = splitMember x t #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE isSubsetOfX #-} #endif {-------------------------------------------------------------------- Minimal, Maximal --------------------------------------------------------------------} -- | /O(log n)/. The minimal element of a set. findMin :: Set a -> a findMin (Bin _ x Tip _) = x findMin (Bin _ _ l _) = findMin l findMin Tip = error "Set.findMin: empty set has no minimal element" -- | /O(log n)/. The maximal element of a set. findMax :: Set a -> a findMax (Bin _ x _ Tip) = x findMax (Bin _ _ _ r) = findMax r findMax Tip = error "Set.findMax: empty set has no maximal element" -- | /O(log n)/. Delete the minimal element. deleteMin :: Set a -> Set a deleteMin (Bin _ _ Tip r) = r deleteMin (Bin _ x l r) = balanceR x (deleteMin l) r deleteMin Tip = Tip -- | /O(log n)/. Delete the maximal element. deleteMax :: Set a -> Set a deleteMax (Bin _ _ l Tip) = l deleteMax (Bin _ x l r) = balanceL x l (deleteMax r) deleteMax Tip = Tip {-------------------------------------------------------------------- Union. --------------------------------------------------------------------} -- | The union of a list of sets: (@'unions' == 'foldl' 'union' 'empty'@). unions :: Ord a => [Set a] -> Set a unions = foldlStrict union empty #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE unions #-} #endif -- | /O(n+m)/. The union of two sets, preferring the first set when -- equal elements are encountered. -- The implementation uses the efficient /hedge-union/ algorithm. -- Hedge-union is more efficient on (bigset `union` smallset). union :: Ord a => Set a -> Set a -> Set a union Tip t2 = t2 union t1 Tip = t1 union t1 t2 = hedgeUnion NothingS NothingS t1 t2 #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE union #-} #endif hedgeUnion :: Ord a => MaybeS a -> MaybeS a -> Set a -> Set a -> Set a hedgeUnion _ _ t1 Tip = t1 hedgeUnion blo bhi Tip (Bin _ x l r) = join x (filterGt blo l) (filterLt bhi r) hedgeUnion _ _ t1 (Bin _ x Tip Tip) = insertR x t1 -- According to benchmarks, this special case increases -- performance up to 30%. It does not help in difference or intersection. hedgeUnion blo bhi (Bin _ x l r) t2 = join x (hedgeUnion blo bmi l (trim blo bmi t2)) (hedgeUnion bmi bhi r (trim bmi bhi t2)) where bmi = JustS x #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE hedgeUnion #-} #endif {-------------------------------------------------------------------- Difference --------------------------------------------------------------------} -- | /O(n+m)/. Difference of two sets. -- The implementation uses an efficient /hedge/ algorithm comparable with /hedge-union/. difference :: Ord a => Set a -> Set a -> Set a difference Tip _ = Tip difference t1 Tip = t1 difference t1 t2 = hedgeDiff NothingS NothingS t1 t2 #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE difference #-} #endif hedgeDiff :: Ord a => MaybeS a -> MaybeS a -> Set a -> Set a -> Set a hedgeDiff _ _ Tip _ = Tip hedgeDiff blo bhi (Bin _ x l r) Tip = join x (filterGt blo l) (filterLt bhi r) hedgeDiff blo bhi t (Bin _ x l r) = merge (hedgeDiff blo bmi (trim blo bmi t) l) (hedgeDiff bmi bhi (trim bmi bhi t) r) where bmi = JustS x #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE hedgeDiff #-} #endif {-------------------------------------------------------------------- Intersection --------------------------------------------------------------------} -- | /O(n+m)/. The intersection of two sets. -- Elements of the result come from the first set, so for example -- -- > import qualified Data.Set as S -- > data AB = A | B deriving Show -- > instance Ord AB where compare _ _ = EQ -- > instance Eq AB where _ == _ = True -- > main = print (S.singleton A `S.intersection` S.singleton B, -- > S.singleton B `S.intersection` S.singleton A) -- -- prints @(fromList [A],fromList [B])@. intersection :: Ord a => Set a -> Set a -> Set a intersection Tip _ = Tip intersection _ Tip = Tip intersection t1 t2 = hedgeInt NothingS NothingS t1 t2 #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE intersection #-} #endif hedgeInt :: Ord a => MaybeS a -> MaybeS a -> Set a -> Set a -> Set a hedgeInt _ _ _ Tip = Tip hedgeInt _ _ Tip _ = Tip hedgeInt blo bhi (Bin _ x l r) t2 = let l' = hedgeInt blo bmi l (trim blo bmi t2) r' = hedgeInt bmi bhi r (trim bmi bhi t2) in if x `member` t2 then join x l' r' else merge l' r' where bmi = JustS x #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE hedgeInt #-} #endif {-------------------------------------------------------------------- Filter and partition --------------------------------------------------------------------} -- | /O(n)/. Filter all elements that satisfy the predicate. filter :: (a -> Bool) -> Set a -> Set a filter _ Tip = Tip filter p (Bin _ x l r) | p x = join x (filter p l) (filter p r) | otherwise = merge (filter p l) (filter p r) -- | /O(n)/. Partition the set into two sets, one with all elements that satisfy -- the predicate and one with all elements that don't satisfy the predicate. -- See also 'split'. partition :: (a -> Bool) -> Set a -> (Set a,Set a) partition _ Tip = (Tip, Tip) partition p (Bin _ x l r) = case (partition p l, partition p r) of ((l1, l2), (r1, r2)) | p x -> (join x l1 r1, merge l2 r2) | otherwise -> (merge l1 r1, join x l2 r2) {---------------------------------------------------------------------- Map ----------------------------------------------------------------------} -- | /O(n*log n)/. -- @'map' f s@ is the set obtained by applying @f@ to each element of @s@. -- -- It's worth noting that the size of the result may be smaller if, -- for some @(x,y)@, @x \/= y && f x == f y@ map :: (Ord a, Ord b) => (a->b) -> Set a -> Set b map f = fromList . List.map f . toList #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE map #-} #endif -- | /O(n)/. The -- -- @'mapMonotonic' f s == 'map' f s@, but works only when @f@ is monotonic. -- /The precondition is not checked./ -- Semi-formally, we have: -- -- > and [x < y ==> f x < f y | x <- ls, y <- ls] -- > ==> mapMonotonic f s == map f s -- > where ls = toList s mapMonotonic :: (a->b) -> Set a -> Set b mapMonotonic _ Tip = Tip mapMonotonic f (Bin sz x l r) = Bin sz (f x) (mapMonotonic f l) (mapMonotonic f r) {-------------------------------------------------------------------- Fold --------------------------------------------------------------------} -- | /O(n)/. Fold the elements in the set using the given right-associative -- binary operator. This function is an equivalent of 'foldr' and is present -- for compatibility only. -- -- /Please note that fold will be deprecated in the future and removed./ fold :: (a -> b -> b) -> b -> Set a -> b fold = foldr {-# INLINE fold #-} -- | /O(n)/. Fold the elements in the set using the given right-associative -- binary operator, such that @'foldr' f z == 'Prelude.foldr' f z . 'toAscList'@. -- -- For example, -- -- > toAscList set = foldr (:) [] set foldr :: (a -> b -> b) -> b -> Set a -> b foldr f z = go z where go z' Tip = z' go z' (Bin _ x l r) = go (f x (go z' r)) l {-# INLINE foldr #-} -- | /O(n)/. A strict version of 'foldr'. Each application of the operator is -- evaluated before using the result in the next application. This -- function is strict in the starting value. foldr' :: (a -> b -> b) -> b -> Set a -> b foldr' f z = go z where STRICT_1_OF_2(go) go z' Tip = z' go z' (Bin _ x l r) = go (f x (go z' r)) l {-# INLINE foldr' #-} -- | /O(n)/. Fold the elements in the set using the given left-associative -- binary operator, such that @'foldl' f z == 'Prelude.foldl' f z . 'toAscList'@. -- -- For example, -- -- > toDescList set = foldl (flip (:)) [] set foldl :: (a -> b -> a) -> a -> Set b -> a foldl f z = go z where go z' Tip = z' go z' (Bin _ x l r) = go (f (go z' l) x) r {-# INLINE foldl #-} -- | /O(n)/. A strict version of 'foldl'. Each application of the operator is -- evaluated before using the result in the next application. This -- function is strict in the starting value. foldl' :: (a -> b -> a) -> a -> Set b -> a foldl' f z = go z where STRICT_1_OF_2(go) go z' Tip = z' go z' (Bin _ x l r) = go (f (go z' l) x) r {-# INLINE foldl' #-} {-------------------------------------------------------------------- List variations --------------------------------------------------------------------} -- | /O(n)/. An alias of 'toAscList'. The elements of a set in ascending order. -- Subject to list fusion. elems :: Set a -> [a] elems = toAscList {-------------------------------------------------------------------- Lists --------------------------------------------------------------------} -- | /O(n)/. Convert the set to a list of elements. Subject to list fusion. toList :: Set a -> [a] toList = toAscList -- | /O(n)/. Convert the set to an ascending list of elements. Subject to list fusion. toAscList :: Set a -> [a] toAscList = foldr (:) [] -- | /O(n)/. Convert the set to a descending list of elements. Subject to list -- fusion. toDescList :: Set a -> [a] toDescList = foldl (flip (:)) [] -- List fusion for the list generating functions. #if __GLASGOW_HASKELL__ -- The foldrFB and foldlFB are foldr and foldl equivalents, used for list fusion. -- They are important to convert unfused to{Asc,Desc}List back, see mapFB in prelude. foldrFB :: (a -> b -> b) -> b -> Set a -> b foldrFB = foldr {-# INLINE[0] foldrFB #-} foldlFB :: (a -> b -> a) -> a -> Set b -> a foldlFB = foldl {-# INLINE[0] foldlFB #-} -- Inline elems and toList, so that we need to fuse only toAscList. {-# INLINE elems #-} {-# INLINE toList #-} -- The fusion is enabled up to phase 2 included. If it does not succeed, -- convert in phase 1 the expanded to{Asc,Desc}List calls back to -- to{Asc,Desc}List. In phase 0, we inline fold{lr}FB (which were used in -- a list fusion, otherwise it would go away in phase 1), and let compiler do -- whatever it wants with to{Asc,Desc}List -- it was forbidden to inline it -- before phase 0, otherwise the fusion rules would not fire at all. {-# NOINLINE[0] toAscList #-} {-# NOINLINE[0] toDescList #-} {-# RULES "Set.toAscList" [~1] forall s . toAscList s = build (\c n -> foldrFB c n s) #-} {-# RULES "Set.toAscListBack" [1] foldrFB (:) [] = toAscList #-} {-# RULES "Set.toDescList" [~1] forall s . toDescList s = build (\c n -> foldlFB (\xs x -> c x xs) n s) #-} {-# RULES "Set.toDescListBack" [1] foldlFB (\xs x -> x : xs) [] = toDescList #-} #endif -- | /O(n*log n)/. Create a set from a list of elements. fromList :: Ord a => [a] -> Set a fromList = foldlStrict ins empty where ins t x = insert x t #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE fromList #-} #endif {-------------------------------------------------------------------- Building trees from ascending/descending lists can be done in linear time. Note that if [xs] is ascending that: fromAscList xs == fromList xs --------------------------------------------------------------------} -- | /O(n)/. Build a set from an ascending list in linear time. -- /The precondition (input list is ascending) is not checked./ fromAscList :: Eq a => [a] -> Set a fromAscList xs = fromDistinctAscList (combineEq xs) where -- [combineEq xs] combines equal elements with [const] in an ordered list [xs] combineEq xs' = case xs' of [] -> [] [x] -> [x] (x:xx) -> combineEq' x xx combineEq' z [] = [z] combineEq' z (x:xs') | z==x = combineEq' z xs' | otherwise = z:combineEq' x xs' #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE fromAscList #-} #endif -- | /O(n)/. Build a set from an ascending list of distinct elements in linear time. -- /The precondition (input list is strictly ascending) is not checked./ fromDistinctAscList :: [a] -> Set a fromDistinctAscList xs = create const (length xs) xs where -- 1) use continutations so that we use heap space instead of stack space. -- 2) special case for n==5 to create bushier trees. create c 0 xs' = c Tip xs' create c 5 xs' = case xs' of (x1:x2:x3:x4:x5:xx) -> c (bin x4 (bin x2 (singleton x1) (singleton x3)) (singleton x5)) xx _ -> error "fromDistinctAscList create 5" create c n xs' = seq nr $ create (createR nr c) nl xs' where nl = n `div` 2 nr = n - nl - 1 createR n c l (x:ys) = create (createB l x c) n ys createR _ _ _ [] = error "fromDistinctAscList createR []" createB l x c r zs = c (bin x l r) zs {-------------------------------------------------------------------- Eq converts the set to a list. In a lazy setting, this actually seems one of the faster methods to compare two trees and it is certainly the simplest :-) --------------------------------------------------------------------} instance Eq a => Eq (Set a) where t1 == t2 = (size t1 == size t2) && (toAscList t1 == toAscList t2) {-------------------------------------------------------------------- Ord --------------------------------------------------------------------} instance Ord a => Ord (Set a) where compare s1 s2 = compare (toAscList s1) (toAscList s2) {-------------------------------------------------------------------- Show --------------------------------------------------------------------} instance Show a => Show (Set a) where showsPrec p xs = showParen (p > 10) $ showString "fromList " . shows (toList xs) {-------------------------------------------------------------------- Read --------------------------------------------------------------------} instance (Read a, Ord a) => Read (Set a) where #ifdef __GLASGOW_HASKELL__ readPrec = parens $ prec 10 $ do Ident "fromList" <- lexP xs <- readPrec return (fromList xs) readListPrec = readListPrecDefault #else readsPrec p = readParen (p > 10) $ \ r -> do ("fromList",s) <- lex r (xs,t) <- reads s return (fromList xs,t) #endif {-------------------------------------------------------------------- Typeable/Data --------------------------------------------------------------------} #include "Typeable.h" INSTANCE_TYPEABLE1(Set,setTc,"Set") {-------------------------------------------------------------------- NFData --------------------------------------------------------------------} instance NFData a => NFData (Set a) where rnf Tip = () rnf (Bin _ y l r) = rnf y `seq` rnf l `seq` rnf r {-------------------------------------------------------------------- Utility functions that return sub-ranges of the original tree. Some functions take a `Maybe value` as an argument to allow comparisons against infinite values. These are called `blow` (Nothing is -\infty) and `bhigh` (here Nothing is +\infty). We use MaybeS value, which is a Maybe strict in the Just case. [trim blow bhigh t] A tree that is either empty or where [x > blow] and [x < bhigh] for the value [x] of the root. [filterGt blow t] A tree where for all values [k]. [k > blow] [filterLt bhigh t] A tree where for all values [k]. [k < bhigh] [split k t] Returns two trees [l] and [r] where all values in [l] are <[k] and all keys in [r] are >[k]. [splitMember k t] Just like [split] but also returns whether [k] was found in the tree. --------------------------------------------------------------------} data MaybeS a = NothingS | JustS !a {-------------------------------------------------------------------- [trim blo bhi t] trims away all subtrees that surely contain no values between the range [blo] to [bhi]. The returned tree is either empty or the key of the root is between @blo@ and @bhi@. --------------------------------------------------------------------} trim :: Ord a => MaybeS a -> MaybeS a -> Set a -> Set a trim NothingS NothingS t = t trim (JustS lx) NothingS t = greater lx t where greater lo (Bin _ x _ r) | x <= lo = greater lo r greater _ t' = t' trim NothingS (JustS hx) t = lesser hx t where lesser hi (Bin _ x l _) | x >= hi = lesser hi l lesser _ t' = t' trim (JustS lx) (JustS hx) t = middle lx hx t where middle lo hi (Bin _ x _ r) | x <= lo = middle lo hi r middle lo hi (Bin _ x l _) | x >= hi = middle lo hi l middle _ _ t' = t' #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE trim #-} #endif {-------------------------------------------------------------------- [filterGt b t] filter all values >[b] from tree [t] [filterLt b t] filter all values <[b] from tree [t] --------------------------------------------------------------------} filterGt :: Ord a => MaybeS a -> Set a -> Set a filterGt NothingS t = t filterGt (JustS b) t = filter' b t where filter' _ Tip = Tip filter' b' (Bin _ x l r) = case compare b' x of LT -> join x (filter' b' l) r EQ -> r GT -> filter' b' r #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE filterGt #-} #endif filterLt :: Ord a => MaybeS a -> Set a -> Set a filterLt NothingS t = t filterLt (JustS b) t = filter' b t where filter' _ Tip = Tip filter' b' (Bin _ x l r) = case compare x b' of LT -> join x l (filter' b' r) EQ -> l GT -> filter' b' l #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE filterLt #-} #endif {-------------------------------------------------------------------- Split --------------------------------------------------------------------} -- | /O(log n)/. The expression (@'split' x set@) is a pair @(set1,set2)@ -- where @set1@ comprises the elements of @set@ less than @x@ and @set2@ -- comprises the elements of @set@ greater than @x@. split :: Ord a => a -> Set a -> (Set a,Set a) split _ Tip = (Tip,Tip) split x (Bin _ y l r) = case compare x y of LT -> let (lt,gt) = split x l in (lt,join y gt r) GT -> let (lt,gt) = split x r in (join y l lt,gt) EQ -> (l,r) #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE split #-} #endif -- | /O(log n)/. Performs a 'split' but also returns whether the pivot -- element was found in the original set. splitMember :: Ord a => a -> Set a -> (Set a,Bool,Set a) splitMember _ Tip = (Tip, False, Tip) splitMember x (Bin _ y l r) = case compare x y of LT -> let (lt, found, gt) = splitMember x l in (lt, found, join y gt r) GT -> let (lt, found, gt) = splitMember x r in (join y l lt, found, gt) EQ -> (l, True, r) #if __GLASGOW_HASKELL__ >= 700 {-# INLINABLE splitMember #-} #endif {-------------------------------------------------------------------- Utility functions that maintain the balance properties of the tree. All constructors assume that all values in [l] < [x] and all values in [r] > [x], and that [l] and [r] are valid trees. In order of sophistication: [Bin sz x l r] The type constructor. [bin x l r] Maintains the correct size, assumes that both [l] and [r] are balanced with respect to each other. [balance x l r] Restores the balance and size. Assumes that the original tree was balanced and that [l] or [r] has changed by at most one element. [join x l r] Restores balance and size. Furthermore, we can construct a new tree from two trees. Both operations assume that all values in [l] < all values in [r] and that [l] and [r] are valid: [glue l r] Glues [l] and [r] together. Assumes that [l] and [r] are already balanced with respect to each other. [merge l r] Merges two trees and restores balance. Note: in contrast to Adam's paper, we use (<=) comparisons instead of (<) comparisons in [join], [merge] and [balance]. Quickcheck (on [difference]) showed that this was necessary in order to maintain the invariants. It is quite unsatisfactory that I haven't been able to find out why this is actually the case! Fortunately, it doesn't hurt to be a bit more conservative. --------------------------------------------------------------------} {-------------------------------------------------------------------- Join --------------------------------------------------------------------} join :: a -> Set a -> Set a -> Set a join x Tip r = insertMin x r join x l Tip = insertMax x l join x l@(Bin sizeL y ly ry) r@(Bin sizeR z lz rz) | delta*sizeL < sizeR = balanceL z (join x l lz) rz | delta*sizeR < sizeL = balanceR y ly (join x ry r) | otherwise = bin x l r -- insertMin and insertMax don't perform potentially expensive comparisons. insertMax,insertMin :: a -> Set a -> Set a insertMax x t = case t of Tip -> singleton x Bin _ y l r -> balanceR y l (insertMax x r) insertMin x t = case t of Tip -> singleton x Bin _ y l r -> balanceL y (insertMin x l) r {-------------------------------------------------------------------- [merge l r]: merges two trees. --------------------------------------------------------------------} merge :: Set a -> Set a -> Set a merge Tip r = r merge l Tip = l merge l@(Bin sizeL x lx rx) r@(Bin sizeR y ly ry) | delta*sizeL < sizeR = balanceL y (merge l ly) ry | delta*sizeR < sizeL = balanceR x lx (merge rx r) | otherwise = glue l r {-------------------------------------------------------------------- [glue l r]: glues two trees together. Assumes that [l] and [r] are already balanced with respect to each other. --------------------------------------------------------------------} glue :: Set a -> Set a -> Set a glue Tip r = r glue l Tip = l glue l r | size l > size r = let (m,l') = deleteFindMax l in balanceR m l' r | otherwise = let (m,r') = deleteFindMin r in balanceL m l r' -- | /O(log n)/. Delete and find the minimal element. -- -- > deleteFindMin set = (findMin set, deleteMin set) deleteFindMin :: Set a -> (a,Set a) deleteFindMin t = case t of Bin _ x Tip r -> (x,r) Bin _ x l r -> let (xm,l') = deleteFindMin l in (xm,balanceR x l' r) Tip -> (error "Set.deleteFindMin: can not return the minimal element of an empty set", Tip) -- | /O(log n)/. Delete and find the maximal element. -- -- > deleteFindMax set = (findMax set, deleteMax set) deleteFindMax :: Set a -> (a,Set a) deleteFindMax t = case t of Bin _ x l Tip -> (x,l) Bin _ x l r -> let (xm,r') = deleteFindMax r in (xm,balanceL x l r') Tip -> (error "Set.deleteFindMax: can not return the maximal element of an empty set", Tip) -- | /O(log n)/. Retrieves the minimal key of the set, and the set -- stripped of that element, or 'Nothing' if passed an empty set. minView :: Set a -> Maybe (a, Set a) minView Tip = Nothing minView x = Just (deleteFindMin x) -- | /O(log n)/. Retrieves the maximal key of the set, and the set -- stripped of that element, or 'Nothing' if passed an empty set. maxView :: Set a -> Maybe (a, Set a) maxView Tip = Nothing maxView x = Just (deleteFindMax x) {-------------------------------------------------------------------- [balance x l r] balances two trees with value x. The sizes of the trees should balance after decreasing the size of one of them. (a rotation). [delta] is the maximal relative difference between the sizes of two trees, it corresponds with the [w] in Adams' paper. [ratio] is the ratio between an outer and inner sibling of the heavier subtree in an unbalanced setting. It determines whether a double or single rotation should be performed to restore balance. It is correspondes with the inverse of $\alpha$ in Adam's article. Note that according to the Adam's paper: - [delta] should be larger than 4.646 with a [ratio] of 2. - [delta] should be larger than 3.745 with a [ratio] of 1.534. But the Adam's paper is errorneous: - it can be proved that for delta=2 and delta>=5 there does not exist any ratio that would work - delta=4.5 and ratio=2 does not work That leaves two reasonable variants, delta=3 and delta=4, both with ratio=2. - A lower [delta] leads to a more 'perfectly' balanced tree. - A higher [delta] performs less rebalancing. In the benchmarks, delta=3 is faster on insert operations, and delta=4 has slightly better deletes. As the insert speedup is larger, we currently use delta=3. --------------------------------------------------------------------} delta,ratio :: Int delta = 3 ratio = 2 -- The balance function is equivalent to the following: -- -- balance :: a -> Set a -> Set a -> Set a -- balance x l r -- | sizeL + sizeR <= 1 = Bin sizeX x l r -- | sizeR > delta*sizeL = rotateL x l r -- | sizeL > delta*sizeR = rotateR x l r -- | otherwise = Bin sizeX x l r -- where -- sizeL = size l -- sizeR = size r -- sizeX = sizeL + sizeR + 1 -- -- rotateL :: a -> Set a -> Set a -> Set a -- rotateL x l r@(Bin _ _ ly ry) | size ly < ratio*size ry = singleL x l r -- | otherwise = doubleL x l r -- rotateR :: a -> Set a -> Set a -> Set a -- rotateR x l@(Bin _ _ ly ry) r | size ry < ratio*size ly = singleR x l r -- | otherwise = doubleR x l r -- -- singleL, singleR :: a -> Set a -> Set a -> Set a -- singleL x1 t1 (Bin _ x2 t2 t3) = bin x2 (bin x1 t1 t2) t3 -- singleR x1 (Bin _ x2 t1 t2) t3 = bin x2 t1 (bin x1 t2 t3) -- -- doubleL, doubleR :: a -> Set a -> Set a -> Set a -- doubleL x1 t1 (Bin _ x2 (Bin _ x3 t2 t3) t4) = bin x3 (bin x1 t1 t2) (bin x2 t3 t4) -- doubleR x1 (Bin _ x2 t1 (Bin _ x3 t2 t3)) t4 = bin x3 (bin x2 t1 t2) (bin x1 t3 t4) -- -- It is only written in such a way that every node is pattern-matched only once. -- -- Only balanceL and balanceR are needed at the moment, so balance is not here anymore. -- In case it is needed, it can be found in Data.Map. -- Functions balanceL and balanceR are specialised versions of balance. -- balanceL only checks whether the left subtree is too big, -- balanceR only checks whether the right subtree is too big. -- balanceL is called when left subtree might have been inserted to or when -- right subtree might have been deleted from. balanceL :: a -> Set a -> Set a -> Set a balanceL x l r = case r of Tip -> case l of Tip -> Bin 1 x Tip Tip (Bin _ _ Tip Tip) -> Bin 2 x l Tip (Bin _ lx Tip (Bin _ lrx _ _)) -> Bin 3 lrx (Bin 1 lx Tip Tip) (Bin 1 x Tip Tip) (Bin _ lx ll@(Bin _ _ _ _) Tip) -> Bin 3 lx ll (Bin 1 x Tip Tip) (Bin ls lx ll@(Bin lls _ _ _) lr@(Bin lrs lrx lrl lrr)) | lrs < ratio*lls -> Bin (1+ls) lx ll (Bin (1+lrs) x lr Tip) | otherwise -> Bin (1+ls) lrx (Bin (1+lls+size lrl) lx ll lrl) (Bin (1+size lrr) x lrr Tip) (Bin rs _ _ _) -> case l of Tip -> Bin (1+rs) x Tip r (Bin ls lx ll lr) | ls > delta*rs -> case (ll, lr) of (Bin lls _ _ _, Bin lrs lrx lrl lrr) | lrs < ratio*lls -> Bin (1+ls+rs) lx ll (Bin (1+rs+lrs) x lr r) | otherwise -> Bin (1+ls+rs) lrx (Bin (1+lls+size lrl) lx ll lrl) (Bin (1+rs+size lrr) x lrr r) (_, _) -> error "Failure in Data.Map.balanceL" | otherwise -> Bin (1+ls+rs) x l r {-# NOINLINE balanceL #-} -- balanceR is called when right subtree might have been inserted to or when -- left subtree might have been deleted from. balanceR :: a -> Set a -> Set a -> Set a balanceR x l r = case l of Tip -> case r of Tip -> Bin 1 x Tip Tip (Bin _ _ Tip Tip) -> Bin 2 x Tip r (Bin _ rx Tip rr@(Bin _ _ _ _)) -> Bin 3 rx (Bin 1 x Tip Tip) rr (Bin _ rx (Bin _ rlx _ _) Tip) -> Bin 3 rlx (Bin 1 x Tip Tip) (Bin 1 rx Tip Tip) (Bin rs rx rl@(Bin rls rlx rll rlr) rr@(Bin rrs _ _ _)) | rls < ratio*rrs -> Bin (1+rs) rx (Bin (1+rls) x Tip rl) rr | otherwise -> Bin (1+rs) rlx (Bin (1+size rll) x Tip rll) (Bin (1+rrs+size rlr) rx rlr rr) (Bin ls _ _ _) -> case r of Tip -> Bin (1+ls) x l Tip (Bin rs rx rl rr) | rs > delta*ls -> case (rl, rr) of (Bin rls rlx rll rlr, Bin rrs _ _ _) | rls < ratio*rrs -> Bin (1+ls+rs) rx (Bin (1+ls+rls) x l rl) rr | otherwise -> Bin (1+ls+rs) rlx (Bin (1+ls+size rll) x l rll) (Bin (1+rrs+size rlr) rx rlr rr) (_, _) -> error "Failure in Data.Map.balanceR" | otherwise -> Bin (1+ls+rs) x l r {-# NOINLINE balanceR #-} {-------------------------------------------------------------------- The bin constructor maintains the size of the tree --------------------------------------------------------------------} bin :: a -> Set a -> Set a -> Set a bin x l r = Bin (size l + size r + 1) x l r {-# INLINE bin #-} {-------------------------------------------------------------------- Utilities --------------------------------------------------------------------} foldlStrict :: (a -> b -> a) -> a -> [b] -> a foldlStrict f = go where go z [] = z go z (x:xs) = let z' = f z x in z' `seq` go z' xs {-# INLINE foldlStrict #-} {-------------------------------------------------------------------- Debugging --------------------------------------------------------------------} -- | /O(n)/. Show the tree that implements the set. The tree is shown -- in a compressed, hanging format. showTree :: Show a => Set a -> String showTree s = showTreeWith True False s {- | /O(n)/. The expression (@showTreeWith hang wide map@) shows the tree that implements the set. If @hang@ is @True@, a /hanging/ tree is shown otherwise a rotated tree is shown. If @wide@ is 'True', an extra wide version is shown. > Set> putStrLn $ showTreeWith True False $ fromDistinctAscList [1..5] > 4 > +--2 > | +--1 > | +--3 > +--5 > > Set> putStrLn $ showTreeWith True True $ fromDistinctAscList [1..5] > 4 > | > +--2 > | | > | +--1 > | | > | +--3 > | > +--5 > > Set> putStrLn $ showTreeWith False True $ fromDistinctAscList [1..5] > +--5 > | > 4 > | > | +--3 > | | > +--2 > | > +--1 -} showTreeWith :: Show a => Bool -> Bool -> Set a -> String showTreeWith hang wide t | hang = (showsTreeHang wide [] t) "" | otherwise = (showsTree wide [] [] t) "" showsTree :: Show a => Bool -> [String] -> [String] -> Set a -> ShowS showsTree wide lbars rbars t = case t of Tip -> showsBars lbars . showString "|\n" Bin _ x Tip Tip -> showsBars lbars . shows x . showString "\n" Bin _ x l r -> showsTree wide (withBar rbars) (withEmpty rbars) r . showWide wide rbars . showsBars lbars . shows x . showString "\n" . showWide wide lbars . showsTree wide (withEmpty lbars) (withBar lbars) l showsTreeHang :: Show a => Bool -> [String] -> Set a -> ShowS showsTreeHang wide bars t = case t of Tip -> showsBars bars . showString "|\n" Bin _ x Tip Tip -> showsBars bars . shows x . showString "\n" Bin _ x l r -> showsBars bars . shows x . showString "\n" . showWide wide bars . showsTreeHang wide (withBar bars) l . showWide wide bars . showsTreeHang wide (withEmpty bars) r showWide :: Bool -> [String] -> String -> String showWide wide bars | wide = showString (concat (reverse bars)) . showString "|\n" | otherwise = id showsBars :: [String] -> ShowS showsBars bars = case bars of [] -> id _ -> showString (concat (reverse (tail bars))) . showString node node :: String node = "+--" withBar, withEmpty :: [String] -> [String] withBar bars = "| ":bars withEmpty bars = " ":bars {-------------------------------------------------------------------- Assertions --------------------------------------------------------------------} -- | /O(n)/. Test if the internal set structure is valid. valid :: Ord a => Set a -> Bool valid t = balanced t && ordered t && validsize t ordered :: Ord a => Set a -> Bool ordered t = bounded (const True) (const True) t where bounded lo hi t' = case t' of Tip -> True Bin _ x l r -> (lo x) && (hi x) && bounded lo (<x) l && bounded (>x) hi r balanced :: Set a -> Bool balanced t = case t of Tip -> True Bin _ _ l r -> (size l + size r <= 1 || (size l <= delta*size r && size r <= delta*size l)) && balanced l && balanced r validsize :: Set a -> Bool validsize t = (realsize t == Just (size t)) where realsize t' = case t' of Tip -> Just 0 Bin sz _ l r -> case (realsize l,realsize r) of (Just n,Just m) | n+m+1 == sz -> Just sz _ -> Nothing
technogeeky/d-A
include/containers-0.5.0.0/Data/Set/Base.hs
gpl-3.0
48,659
0
21
12,805
10,463
5,390
5,073
-1
-1
{-# LANGUAGE OverloadedStrings #-} module Lamdu.GUI.ExpressionEdit.ListEdit(make) where import Control.Applicative ((<$>), (<|>), Applicative(..)) import Control.Lens.Operators import Control.MonadA (MonadA) import Data.Monoid (Monoid(..)) import Lamdu.GUI.ExpressionGui (ExpressionGui) import Lamdu.GUI.ExpressionGui.Monad (ExprGuiM, holePickersAction) import qualified Control.Lens as Lens import qualified Graphics.UI.Bottle.EventMap as E import qualified Graphics.UI.Bottle.Widget as Widget import qualified Lamdu.Config as Config import qualified Lamdu.GUI.ExpressionEdit.EventMap as ExprEventMap import qualified Lamdu.GUI.ExpressionGui as ExpressionGui import qualified Lamdu.GUI.ExpressionGui.Monad as ExprGuiM import qualified Lamdu.GUI.WidgetEnvT as WE import qualified Lamdu.GUI.WidgetIds as WidgetIds import qualified Lamdu.Sugar.Types as Sugar make :: MonadA m => Sugar.List m (ExprGuiM.SugarExpr m) -> Sugar.Payload Sugar.Name m ExprGuiM.Payload -> Widget.Id -> ExprGuiM m (ExpressionGui m) make list pl = ExpressionGui.stdWrapParentExpr pl $ makeUnwrapped pl list makeBracketLabel :: MonadA m => String -> Widget.Id -> ExprGuiM m (ExpressionGui f) makeBracketLabel label myId = do config <- ExprGuiM.widgetEnv WE.readConfig ExpressionGui.fromValueWidget <$> ExpressionGui.makeColoredLabel (Config.listBracketTextSize config) (Config.listBracketColor config) label myId lastLens :: Lens.Traversal' [a] a lastLens = Lens.taking 1 . Lens.backwards $ Lens.traversed makeUnwrapped :: MonadA m => Sugar.Payload Sugar.Name m ExprGuiM.Payload -> Sugar.List m (ExprGuiM.SugarExpr m) -> Widget.Id -> ExprGuiM m (ExpressionGui m) makeUnwrapped pl list myId = ExprGuiM.assignCursor myId cursorDest $ do bracketOpenLabel <- makeBracketLabel "[" bracketsIdForAnim bracketCloseLabel <- makeBracketLabel "]" bracketsIdForAnim config <- ExprGuiM.widgetEnv WE.readConfig let addFirstElemEventMap = actionEventMap (Config.listAddItemKeys config) "Add First Item" Sugar.addFirstItem onFirstBracket mItem itemPl label = do let hg = itemPl ^. Sugar.plData . ExprGuiM.plHoleGuids jumpHolesEventMap <- hg & case mItem of Just item | Lens.has (Sugar.liExpr . Sugar.rBody . Sugar._BodyHole) item -> ExprGuiM.hgMNextHole %~ (storedGuid <|>) where storedGuid = item ^? Sugar.liExpr . Sugar.rPayload . Sugar.plActions . Lens._Just . Sugar.storedGuid _ -> id & ExprEventMap.jumpHolesEventMap [] ExpressionGui.makeFocusableView firstBracketId label <&> ExpressionGui.egWidget %~ Widget.weakerEvents (mappend addFirstElemEventMap jumpHolesEventMap) case Sugar.lValues list of [] -> onFirstBracket Nothing pl $ ExpressionGui.hbox [bracketOpenLabel, bracketCloseLabel] firstValue : nextValues -> do (_, firstEdit) <- makeItem firstValue nextEdits <- mapM makeItem nextValues bracketOpen <- onFirstBracket (Just firstValue) (firstValue ^. Sugar.liExpr . Sugar.rPayload) bracketOpenLabel let nilDeleteEventMap = actionEventMap (Config.delKeys config) "Replace nil with hole" Sugar.replaceNil addLastEventMap = maybe mempty ( Widget.keysEventMapMovesCursor (Config.listAddItemKeys config) (E.Doc ["Edit", "List", "Add Last Item"]) . fmap WidgetIds.fromGuid ) $ Sugar.lValues list ^? lastLens . Sugar.liMActions . Lens._Just . Sugar.itemAddNext closerEventMap = mappend nilDeleteEventMap addLastEventMap bracketClose <- ExpressionGui.makeFocusableView closeBracketId bracketCloseLabel <&> ExpressionGui.egWidget %~ Widget.weakerEvents closerEventMap return . ExpressionGui.hbox $ concat [[bracketOpen, firstEdit], nextEdits >>= pairToList, [bracketClose]] where bracketsIdForAnim = WidgetIds.fromGuid $ Sugar.lNilGuid list pairToList (x, y) = [x, y] closeBracketId = Widget.joinId myId ["close-bracket"] itemId = WidgetIds.fromGuid . (^. Sugar.liExpr . Sugar.rPayload . Sugar.plGuid) actionEventMap keys doc actSelect = maybe mempty ( Widget.keysEventMapMovesCursor keys (E.Doc ["Edit", "List", doc]) . fmap WidgetIds.fromGuid . actSelect) $ Sugar.lMActions list firstBracketId = Widget.joinId myId ["first-bracket"] cursorDest = maybe firstBracketId itemId $ Sugar.lValues list ^? Lens.traversed makeItem :: MonadA m => Sugar.ListItem m (ExprGuiM.SugarExpr m) -> ExprGuiM m (ExpressionGui m, ExpressionGui m) makeItem item = do config <- ExprGuiM.widgetEnv WE.readConfig let mkItemEventMap resultPickers Sugar.ListItemActions { Sugar._itemAddNext = addItem , Sugar._itemDelete = delItem } = mconcat [ E.keyPresses (Config.listAddItemKeys config) (doc resultPickers) $ mappend <$> holePickersAction resultPickers <*> (Widget.eventResultFromCursor . WidgetIds.fromGuid <$> addItem) , Widget.keysEventMapMovesCursor (Config.delKeys config) (E.Doc ["Edit", "List", "Delete Item"]) $ WidgetIds.fromGuid <$> delItem ] (pair, resultPickers) <- ExprGuiM.listenResultPickers $ Lens.sequenceOf Lens.both ( fmap ExpressionGui.fromValueWidget . ExpressionGui.makeColoredLabel (Config.listCommaTextSize config) (Config.listCommaColor config) ", " $ Widget.augmentId ',' itemWidgetId , ExprGuiM.makeSubexpression 0 itemExpr ) return $ pair & Lens._2 . ExpressionGui.egWidget %~ Widget.weakerEvents (maybe mempty (mkItemEventMap resultPickers) (item ^. Sugar.liMActions)) where itemExpr = item ^. Sugar.liExpr itemWidgetId = WidgetIds.fromGuid $ itemExpr ^. Sugar.rPayload . Sugar.plGuid doc [] = E.Doc ["Edit", "List", "Add Next Item"] doc _ = E.Doc ["Edit", "List", "Pick Result and Add Next Item"]
schell/lamdu
Lamdu/GUI/ExpressionEdit/ListEdit.hs
gpl-3.0
6,068
0
27
1,296
1,658
860
798
-1
-1
import Control.Exception import GHC.Compact import qualified Data.Map as Map import Data.Time.Clock import Text.Printf import System.Environment import System.Mem import Control.DeepSeq -- Benchmark compact against compactWithSharing. e.g. -- ./compact_bench 1000000 main = do [n] <- map read <$> getArgs let m = Map.fromList [(x,[x*1000..x*1000+10]) | x <- [1..(n::Integer)]] evaluate (force m) timeIt "compact" $ compact m >>= compactSize >>= print timeIt "compactWithSharing" $ compactWithSharing m >>= compactSize >>= print timeIt :: String -> IO a -> IO a timeIt str io = do performMajorGC t0 <- getCurrentTime a <- io t1 <- getCurrentTime printf "%s: %.2f\n" str (realToFrac (t1 `diffUTCTime` t0) :: Double) return a
ezyang/ghc
libraries/ghc-compact/tests/compact_bench.hs
bsd-3-clause
750
1
15
130
276
138
138
22
1
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-} module Tc170_Aux where class ReadMode mode data Attr m w a = Attr (w -> IO a) (w -> a -> IO ()) mapAttr :: ReadMode m => (a -> b) -> (a -> b -> a) -> Attr m w a -> Attr m w b mapAttr get set (Attr getter setter) = Attr (\w -> do a <- getter w; return (get a)) (\w b -> do a <- getter w; setter w (set a b)) data Rect = Rect data Point = Point topLeft = undefined rectMoveTo = undefined class Dimensions w where frame :: ReadMode m => Attr m w Rect position :: ReadMode m => Attr m w Point position = mapAttr (\f -> topLeft f) (\f p -> rectMoveTo p f) frame
urbanslug/ghc
testsuite/tests/typecheck/should_compile/Tc170_Aux.hs
bsd-3-clause
651
0
12
174
310
158
152
-1
-1
{-# LANGUAGE NoMonomorphismRestriction, FlexibleContexts, TypeFamilies #-} module Main where import Diagrams.Prelude import Diagrams.Backend.SVG.CmdLine rearrange :: [a] -> [a] rearrange = uncurry (++) . divide where divide [] = ([],[]) divide (x:y:as) = (x:xs', y:ys') where (xs',ys') = divide as divide _ = error "list length is odd" main :: IO () main = mainWith (vsep 1 (replicate 4 (hrule 1 # lc blue)) :: Diagram B)
Javran/misc
butterfly/src/Main.hs
mit
453
0
12
97
182
99
83
12
3
module Ch07Spec where import Test.Hspec import Ch07 spec :: Spec spec = do describe "indexJ" $ it "select the nth element" $ do indexJ 2 yeah `shouldBe` Just 'a' indexJ 5 yeah `shouldBe` Nothing describe "dropJ" $ it "drops first n elements" $ do jlToList (dropJ 1 yeah) `shouldBe` "eah" jlToList (dropJ 4 yeah) `shouldBe` [] describe "takeJ" $ it "takes the first n elements" $ do jlToList (takeJ 1 yeah) `shouldBe` "y" jlToList (takeJ 3 yeah) `shouldBe` "yea"
isaiah/cis194
test/Ch07Spec.hs
mit
518
0
13
135
186
91
95
17
1
-- roman2decimal module Roman2Decimal ( RomanAlg(..), romanAlgToDecimal, RomanNum, romanToDecimal, strToRoman, romanStrToDecimal ) where data RomanAlg = I | V | X | L | C | D | M deriving(Eq, Ord, Show, Read) type RomanNum = [RomanAlg] romanAlgToDecimal :: RomanAlg -> Int romanAlgToDecimal c = case c of I -> 1 V -> 5 X -> 10 L -> 50 C -> 100 D -> 500 M -> 1000 romanToDecimal :: RomanNum -> Int romanToDecimal = loop 0 . map romanAlgToDecimal where loop m (x:xs@(y:_)) | x < y = loop (m - x) xs loop m (y:ys) = loop (m + y) ys loop m [] = m romanStrToDecimal :: String -> Int romanStrToDecimal = romanToDecimal . strToRoman strToRoman :: String -> RomanNum strToRoman = read . surround "[" "]" . interpose "," surround :: String -> String -> String -> String surround l r s = l ++ s ++ r interpose :: String -> String -> String interpose c = init . concatMap (:c)
yamadapc/general-roman2decimal
haskell/Roman2Decimal.hs
mit
1,094
0
12
395
378
204
174
32
7
{- Copyright (c) 2015 Nils 'bash0r' Jonsson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -} {- | Module : $Header$ Description : The bitwise operators of the EDSL. Author : Nils 'bash0r' Jonsson Copyright : (c) 2015 Nils 'bash0r' Jonsson License : MIT Maintainer : [email protected] Stability : unstable Portability : non-portable (Portability is untested.) The bitwise operators of the EDSL. -} module Language.JavaScript.DSL.Operators.Bitwise ( (.&) , (.|) , (.^) ) where import qualified Language.JavaScript.DOM as DOM import Language.JavaScript.DSL.TypeAliases -- | Creates a bitwise and expression. infixl 2 .& (.&) :: Expression -> Expression -> Expression (.&) = DOM.BinaryExpression DOM.BitwiseAnd -- | Creates a bitwise or expression. infixl 2 .| (.|) :: Expression -> Expression -> Expression (.|) = DOM.BinaryExpression DOM.BitwiseOr -- | Creates a bitwise xor expression. infixl 2 .^ (.^) :: Expression -> Expression -> Expression (.^) = DOM.BinaryExpression DOM.BitwiseXOr
project-horizon/framework
src/lib/Language/JavaScript/DSL/Operators/Bitwise.hs
mit
2,014
0
6
346
149
97
52
15
1
-- Esolang: MiniBitMove -- https://www.codewars.com/kata/587c0138110b20624e000253 module Haskell.SylarDoom.MiniBitMove where import Data.List (foldl') import Data.Foldable(toList) import qualified Data.Sequence as Seq interpreter :: String -> String -> String interpreter tape array = toList . fst . foldl' (\(a, i) c -> if c == '0' then (a, succ i) else (Seq.adjust flip' i a, i)) (Seq.fromList array, 0) . take tl . cycle $ tape' where tape' = filter (`elem` "01") tape tl = (* length tape') . succ . (length array `div`) . length . filter (== '0') $ tape' flip' '0' = '1' flip' '1' = '0' flip' c = c
gafiatulin/codewars
src/6 kyu/MiniBitMove.hs
mit
651
0
15
148
241
136
105
11
4
module Either where lefts' :: [Either a b] -> [a] lefts' = foldr (\a b -> leftToList a ++ b) [] rights' :: [Either a b] -> [b] rights' = foldr (\a b -> rightToList a ++ b) [] partitionEithers' :: [Either a b] -> ([a], [b]) partitionEithers' = foldr (\a (b, c) -> (leftToList a ++ b, rightToList a ++ c)) ([], []) eitherMaybe' :: (b -> c) -> Either a b -> Maybe c eitherMaybe' _ (Left _) = Nothing eitherMaybe' f (Right x) = Just $ f x either' :: (a -> c) -> (b -> c) -> Either a b -> c either' f _ (Left x) = f x either' _ f (Right x) = f x eitherMaybe'' :: (b -> c) -> Either a b -> Maybe c eitherMaybe'' f = either' (const Nothing) (Just . f) leftToList :: Either a b -> [a] leftToList (Left a) = [a] leftToList _ = [] rightToList :: Either a b -> [b] rightToList (Right b) = [b] rightToList _ = []
JoshuaGross/haskell-learning-log
Code/Haskellbook/Either.hs
mit
810
0
10
184
474
248
226
21
1