code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
module Syntax.ANF
(
)
where
import Text.PrettyPrint.Leijen
data
| kvanberendonck/ssa-to-cps | src/Syntax/ANF.hs | bsd-3-clause | 76 | 1 | 5 | 20 | 17 | 11 | 6 | -1 | -1 |
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, TemplateHaskell, FlexibleContexts #-}
module TypeSystem.Rule where
import Utils.Utils
import Utils.ToString
import TypeSystem.Types
import TypeSystem.Syntax
import TypeSystem.ParseTree
import TypeSystem.Expression
import TypeSystem.Relation
import Data.Maybe
import qualified Data.Map as M
import Data.Map (Map)
import Data.List as L
import Control.Arrow ((&&&))
import Lens.Micro hiding ((&))
import Lens.Micro.TH
{- Predicates for a rule -}
data Predicate = TermIsA Name TypeName
| Same Expression Expression
| Needed Conclusion
deriving (Show, Ord, Eq)
fromTermIsA :: Predicate -> Maybe (Name, TypeName)
fromTermIsA (TermIsA n tn) = Just (n, tn)
fromTermIsA _ = Nothing
fromSame :: Predicate -> Maybe (Expression, Expression)
fromSame (Same e1 e2) = Just (e1, e2)
fromSame _ = Nothing
fromNeeded :: Predicate -> Maybe Conclusion
fromNeeded (Needed c) = Just c
fromNeeded _ = Nothing
instance Refactorable TypeName Predicate where
refactor ftn (TermIsA n tn) = TermIsA n (ftn tn)
refactor ftn (Same e1 e2) = Same (refactor ftn e1) (refactor ftn e2)
refactor ftn (Needed concl) = Needed $ refactor ftn concl
instance Refactorable FunctionName Predicate where
refactor _ isA@TermIsA{} = isA
refactor ffn (Same e1 e2) = Same (refactor ffn e1) (refactor ffn e2)
refactor ffn (Needed concl) = Needed $ refactor ffn concl
instance Refactorable RelationSymbol Predicate where
refactor _ isA@TermIsA{} = isA
refactor _ same@Same{} = same
refactor frs (Needed concl) = Needed $ refactor frs concl
{- A generic conclusion that can be drawn. E.g. given these parsetrees, this relation holds. -}
data ConclusionA a = RelationMet { _conclusionRel :: Relation
, _conclusionArgs :: [a]
} deriving (Show, Ord, Eq)
instance Functor ConclusionA where
fmap f (RelationMet r args)
= RelationMet r (args |> f)
instance (Refactorable TypeName a) => Refactorable TypeName (ConclusionA a) where
refactor ftn (RelationMet rel args)
= RelationMet (refactor ftn rel) (args |> refactor ftn)
instance (Refactorable FunctionName a) => Refactorable FunctionName (ConclusionA a) where
refactor ffn (RelationMet rel args)
= RelationMet rel (args |> refactor ffn)
instance Refactorable RelationSymbol (ConclusionA a) where
refactor frs (RelationMet rel args)
= RelationMet (refactor frs rel) args
-- Proof for a conclusion; it should be valid using giving parsetrees
type Conclusion' = ConclusionA ParseTree
-- Prototype for a conclusion. Might be valid, given a specific parsetree as value.
type Conclusion = ConclusionA Expression
{-
A rule is an expression, often of the form:
If these predicates are met, then this relation is valid.
Note that these can be transformed to also produce values, giving the modes of the relations used
-}
data Rule = Rule { _ruleName :: Name
, _rulePreds :: [Predicate]
, _ruleConcl :: Conclusion
} deriving (Show, Ord, Eq)
{-
A property states something which is always true, given the predicates,
It looks a lot like rules, except that the conclusion might be ORred
(e.g. given predicates A and B, either C or D is valid)
-}
data Property = Property { _propName :: Name
, _propPreds :: [Predicate]
, _propConcl :: [Predicate] -- CHOICE of conclusions, at least one should be true (if the propPReds are all true)
} deriving (Show, Ord, Eq)
newtype Rules = Rules {_rules :: Map Symbol [Rule]}
deriving (Show, Eq)
makeLenses ''ConclusionA
makeLenses ''Rule
makeLenses ''Property
makeLenses ''Rules
_ruleAsProp :: Rule -> Property
_ruleAsProp (Rule rel pred concl)
= Property rel pred [Needed concl]
getRulesOnName :: Rules -> Map Name Rule
getRulesOnName rls
= rls & get rules & M.elems & concat |> (get ruleName &&& id) & M.fromList
fromRulesOnName :: Syntax -> Map k Rule -> Either String Rules
fromRulesOnName syntax rules
= rules & M.elems & makeRules syntax
rulesOnName :: Lens' Rules (Map Name Rule)
rulesOnName = lens getRulesOnName (\_ rulesDict -> _makeRules $ M.elems rulesDict)
_makeRules :: [Rule] -> Rules
_makeRules rules
= let sortedRules = rules |> ((\r -> r & get ruleConcl & get conclusionRel & get relSymbol) &&& id) & merge
in Rules $ M.fromList sortedRules
makeRules :: Syntax -> [Rule] -> Either String Rules
makeRules s rules
= do checkNoDuplicates (rules |> get ruleName) (\dups -> "Multiple rules have the name "++showComma dups)
let rules' = _makeRules rules
check' s rules'
return rules'
makeProperties :: Syntax -> [Property] -> Either String [Property]
makeProperties s props
= do checkNoDuplicates (props |> get propName) (\dups -> "Multiple properties have the name "++showComma dups)
props |+> check' s
return props
instance Refactorable TypeName Rule where
refactor ftn rule
= rule & over (rulePreds . each) (refactor ftn)
& over ruleConcl (refactor ftn)
instance Refactorable FunctionName Rule where
refactor ffn rule
= rule & over (rulePreds . each) (refactor ffn)
& over ruleConcl (refactor ffn)
instance Refactorable RelationSymbol Rule where
refactor ffn rule
= rule & over (rulePreds . each) (refactor ffn)
& over ruleConcl (refactor ffn)
instance Refactorable RuleName Rule where
refactor frn rule
= rule & over ruleName (unliftRuleName frn)
instance Refactorable TypeName Rules where
refactor f rls = rls & over rules (||>> refactor f)
instance Refactorable FunctionName Rules where
refactor f rls = rls & over rules (||>> refactor f)
instance Refactorable RelationSymbol Rules where
-- rules are ordered over symbols, so we map the keys too
refactor f rls = rls & over rules (||>> refactor f)
& over rules (M.mapKeys $ unliftRelationSymbol f)
instance Refactorable RuleName Rules where
-- rules are ordered on symbol, so no changes here
refactor f rls = rls & over rules (||>> refactor f)
instance Refactorable TypeName Property where
refactor ftn prop
= prop & over (propPreds . each) (refactor ftn)
& over (propConcl . each) (refactor ftn)
instance Refactorable FunctionName Property where
refactor ffn prop
= prop & over (propPreds . each) (refactor ffn)
& over (propConcl . each) (refactor ffn)
instance Refactorable RelationSymbol Property where
refactor ffn prop
= prop & over (propPreds . each) (refactor ffn)
& over (propConcl . each) (refactor ffn)
------------------------------ CHECKS --------------------------------
instance Check' Syntax Rules where
check' syntax (Rules rules)
= rules & M.elems & concat |> check' syntax & allRight_
instance Check' Syntax Rule where
check' syntax rule
= inMsg ("While typechecking the rule "++show (get ruleName rule)) $
typeCheckProperty syntax $ _ruleAsProp rule
instance Check' Syntax Property where
check' syntax prop
= inMsg ("While typechecking the property "++show (get propName prop)) $
typeCheckProperty syntax prop
typeCheckProperty :: Syntax -> Property -> Either String ()
typeCheckProperty syntax (Property nm preds concls)
= do predTypings <- mapi preds |> (\(i, p) -> inMsg ("In predicate "++show i) $
typeCheckPredicate syntax p) & allRight
predTyping <- inMsg "In the combination of typings generated by all the predicates" $
mergeContexts syntax predTypings
conclTypings <- inMsg "In the conclusion"
(concls |+> typeCheckPredicate syntax)
conclTyping <- inMsg "In the combinations of typings generated by the choices in the conclusion" $
mergeContexts syntax conclTypings
finalTyping <- inMsg "While matching the predicate typing and the conclusion typing" $
mergeContext syntax predTyping conclTyping
return ()
typeCheckPredicate :: Syntax -> Predicate -> Either String (Map Name TypeName)
typeCheckPredicate syntax (TermIsA e tp)
= return $ M.singleton e tp
typeCheckPredicate syntax (Same e1 e2)
= do t1 <- expectedTyping syntax e1
t2 <- expectedTyping syntax e2
mergeContext syntax t1 t2
typeCheckPredicate syntax (Needed concl)
= typeCheckConclusion syntax concl
typeCheckConclusion :: Syntax -> Conclusion -> Either String (Map Name TypeName)
typeCheckConclusion syntax (RelationMet relation exprs)
= inMsg "While typechecking the conclusion" $
do let types = relation & relType -- Types of the relation
let modes = relation & relModes
assert Left (length types == length exprs) $
"Expected "++show (length types)++" expressions as arguments to the relation "++
show (get relSymbol relation)++" : "++show types++", but only got "++show (length exprs)++" arguments"
let usagesForMode mode
= filterMode mode relation exprs -- we get the expressions that are used for INput or OUTput
|> expectedTyping syntax & allRight -- how are these typed? Either String [Map Name TypeName]
>>= mergeContexts syntax -- merge these, crash for input/output contradictions
pats <- usagesForMode In
usages <- usagesForMode Out
mergeContext syntax pats usages
------------------------------- TO STRING ------------------------------
instance ToString Predicate where
toParsable = showPredicateWith toParsable toParsable
toCoParsable = showPredicateWith toCoParsable toCoParsable
debug = showPredicateWith debug debug
showPredicateWith :: (Expression -> String) -> (Conclusion -> String) -> Predicate -> String
showPredicateWith se sc (TermIsA e tp)
= se (MVar tp e) ++ ": "++ tp
showPredicateWith se sc (Same e1 e2)
= se e1 ++ " = "++ se e2++" : "++ typeOf e1
showPredicateWith se sc (Needed concl)
= sc concl
instance ToString Rule where
toParsable = showPropertyWith toParsable . _ruleAsProp
toCoParsable = showPropertyWith toCoParsable . _ruleAsProp
debug = showPropertyWith debug . _ruleAsProp
instance ToString Property where
toParsable = showPropertyWith toParsable
toCoParsable = showPropertyWith toCoParsable
debug = showPropertyWith debug
showPropertyWith :: (Predicate -> String) -> Property -> String
showPropertyWith sp (Property nm predicates conclusion)
= let
predicates' = predicates |> sp & intercalate "\t"
conclusion' = conclusion |> sp & intercalate " | "
nm' = " \t[" ++ nm ++ "]"
line = replicate (2 + max (length' 1 predicates') (length' 1 conclusion')) '-'
in
["", " " ++ predicates', line ++ " " ++ nm', " "++ conclusion'] & unlines
instance ToString' [Relation] Rules where
show' = const show
toParsable' = showRulesWith toParsable
toCoParsable' = showRulesWith toCoParsable
debug' = showRulesWith debug
showRulesWith :: (Rule -> String) -> [Relation] -> Rules -> String
showRulesWith sr relations (Rules rules)
= let relationOf nm = relations & filter ((==) nm . get relSymbol) & head
relationOrder symbol = fromMaybe (length relations) (elemIndex symbol (relations |> get relSymbol))
in
rules & M.toList & sortOn (relationOrder . fst) |> (\(symbol, rules) ->
"\n" ++ inHeader "# " ("Rules about "++toCoParsable (relationOf symbol)) '-'
(rules |> sr & unlines)
)
& intercalate "\n\n"
instance (ToString a) => ToString (ConclusionA a) where
toParsable = showConclusionWith toParsable
toCoParsable = showConclusionWith toCoParsable
debug = showConclusionWith debug
showConclusionWith showArg (RelationMet rel [arg])
= inParens (get relSymbol rel) ++ " " ++ showArg arg
showConclusionWith showArg (RelationMet rel (arg1:args))
= showArg arg1 ++ " " ++ get relSymbol rel ++ " " ++ (args |> showArg & commas)
| pietervdvn/ALGT | src/TypeSystem/Rule.hs | bsd-3-clause | 11,490 | 515 | 18 | 2,161 | 3,742 | 1,965 | 1,777 | 230 | 1 |
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Grammar
import GrammarLexer
import GrammarProcessing
import Generator
import System.Environment
import System.IO
import Data.Either.Combinators
import Filesystem.Path
import Filesystem.Path.CurrentOS
generateParser ∷ String → String → Either String String
generateParser pname input = do
(header, stateData, custom, pgr, lgr) ← mapLeft show $ runLexer input >>= parseGrammarFile
(pgr', first, follow) ← processGrammar pgr
let config = GC { parserName = pname
, parserHeader = header
, parserState = stateData
, parserCustom = custom
, pGrammar = pgr'
, lGrammar = lgr
, gFIRST = first
, gFOLLOW = follow
}
generateParserSource config
main :: IO ()
main = do
args ← getArgs
if length args == 0 then putStrLn "No file name specified" >> return ()
else do
let filename = head args
file ← openFile filename ReadMode
input ← hGetContents file
case generateParser (encodeString $ basename $ decodeString filename) input of
Left err → putStrLn err
Right res → putStrLn res
| flyingleafe/parser-gen | app/Main.hs | bsd-3-clause | 1,289 | 0 | 14 | 373 | 329 | 173 | 156 | 36 | 3 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Language.PiSigma.Equality
( eq )
where
import Control.Monad
import Language.PiSigma.Evaluate
import Language.PiSigma.Syntax
class Equal a where
eq :: a -> a -> Eval ()
instance Equal (Clos Term) where
eq t u = do t' <- eval t
u' <- eval u
eq t' u'
-- unused?
-- eq' :: Env e => Clos (Term,Term) -> Eval e ()
-- eq' ((t,u),s) = eq (t,s) (u,s)
eqBind :: (Closure a) =>
(a -> a -> Eval ()) ->
Bind a -> Bind a -> Eval ()
eqBind eqArg (x0,c0) (x1,c1) =
do let s0 = getScope c0
s1 = getScope c1
(i,s0') <- decl' x0 s0
let s1' = extendScope s1 x1 (i,Nothing)
let c0' = putScope c0 s0'
c1' = putScope c1 s1'
eqArg c0' c1'
instance (Equal a,Closure a) => Equal (Bind a) where
eq = eqBind eq
instance Equal Val where
eq (Ne t0) (Ne t1) = eq t0 t1
eq (VQ ps0 ((a0,(x0,b0)),s0)) (VQ ps1 ((a1,(x1,b1)),s1))
| ps0 == ps1 =
do eq (a0,s0) (a1,s1)
eq (x0,(b0,s0)) (x1,(b1,s1))
eq (VLam xt0) (VLam xt1) = eq xt0 xt1
eq (VPair ((t0,u0),s0)) (VPair ((t1,u1),s1)) =
do eq (t0,s0) (t1,s1)
eq (u0,s0) (u1,s1)
eq (VBox b) (VBox b') = eq b b'
eq (VLift a) (VLift a') = eq a a'
eq (VRec a) (VRec a') = eq a a'
eq (VFold a) (VFold a') = eq a a'
eq v0 v1 | v0 == v1 = return () -- Type, Label, Enum
| otherwise = fail "Different values"
{- eqBox implements alpha equality -}
eqBox :: Clos Term -> Clos Term -> Eval ()
--eqBox c c' | c == c' = return ()
eqBox (Var l x,s) (Var l' y,s') =
do x' <- getId l x s
y' <- getId l' y s'
eq x' y'
eqBox (Let _ p t,s) c =
do s' <- evalProg (p,s)
eqBox (t,s') c
eqBox c c'@(Let _ _ _,_) = eqBox c' c
eqBox (Q _ ps (a,(x,b)),s) (Q _ ps' (a',(x',b')),s')
| ps == ps' =
do eqBox (a,s) (a',s')
eq (x,Boxed (b,s)) (x',Boxed (b',s'))
eqBox (Lam _ (x,t),s) (Lam _ (x',t'),s') =
eq (x,Boxed (t,s)) (x',Boxed (t',s'))
eqBox (App t u,s) (App t' u',s') =
do eqBox (t,s) (t',s')
eqBox (u,s) (u',s')
eqBox (Pair _ t u,s) (Pair _ t' u',s') =
do eqBox (t,s) (t',s')
eqBox (u,s) (u',s')
eqBox (Split _ t (x,(y,u)),s) (Split _ t' (x',(y',u')),s') =
do eqBox (t,s) (t',s')
eq (x,(y,Boxed (u,s))) (x',(y',Boxed (u',s')))
eqBox (Case _ t bs,s) (Case _ t' bs',s') =
do eqBox (t,s) (t',s')
zipWithM_ (\ (l,t'') (l',t''') ->
if l==l' then eqBox (t'',s) (t''',s')
else fail "eqBox case") bs bs'
eqBox (Lift _ t,s) (Lift _ t',s') = eqBox (t,s) (t',s')
eqBox (Box _ t,s) (Box _ t',s') = eqBox (t,s) (t',s')
eqBox (Force _ t,s) (Force _ t',s') = eqBox (t,s) (t',s')
eqBox (Rec _ t,s) (Rec _ t',s') = eqBox (t,s) (t',s')
eqBox (Fold _ t,s) (Fold _ t',s') = eqBox (t,s) (t',s')
eqBox (Unfold _ t (x, u), s) (Unfold _ t' (x', u'), s') =
do eqBox (t,s) (t',s')
eq (x,Boxed (u,s)) (x',Boxed (u',s'))
eqBox (t,_) (t',_) | t == t' = return () -- Type, Label, Enum
| otherwise = fail "Different terms"
instance Equal Boxed where
eq (Boxed c) (Boxed c') = eqBox c c'
instance Equal Id where
eq i0 i1
| i0 == i1 = return ()
| otherwise = do ei0 <- lookupId i0
ei1 <- lookupId i1
case (ei0,ei1) of
(Id j0, Id j1) -> unless (j0 == j1)
(fail "Different variables")
(Closure t0, Closure t1) ->
letn i0 (Id i0)
(letn i1 (Id i0)
(eq t0 t1))
_ -> fail "Variable vs neutral"
instance Equal Ne where
eq (NVar i0) (NVar i1) = eq i0 i1
eq (t0 :.. u0) (t1 :.. u1) =
do eq t0 t1
eq u0 u1
eq (NSplit t0 xyu0) (NSplit t1 xyu1) =
do eq t0 t1
eq xyu0 xyu1
eq (NCase t0 (lus0,s0)) (NCase t1 (lus1,s1)) =
do eq t0 t1
let eqBranches [] [] = return ()
eqBranches ((l0,u0):lus0') ((l1,u1):lus1')
| l0 == l1 = do
eq (u0,s0) (u1,s1)
eqBranches lus0' lus1'
eqBranches _ _ = fail "Case: branches differ"
eqBranches lus0 lus1
eq (NForce t) (NForce t') = eq t t'
eq (NUnfold t xu) (NUnfold t' xu') = do
eq t t'
eq xu xu'
eq t u = fail ("Different neutrals:\n"++ show t ++"\n/=\n"++ show u ++"\n")
| zlizta/PiSigma | src/Language/PiSigma/Equality.hs | bsd-3-clause | 4,701 | 10 | 16 | 1,728 | 2,511 | 1,312 | 1,199 | 118 | 2 |
-- | Input and output.
--
-- TODO: input.
module Propane.IO
( saveRaster
, saveRastimation
) where
import qualified Data.Array.Repa as R
import qualified Data.Array.Repa.IO.DevIL as D
import qualified Data.Foldable as F
import Control.Monad
import Control.Concurrent.Spawn
import Control.Exception
import System.FilePath
import System.Directory
import Text.Printf
import Propane.Types
import Propane.IO.Lock ( lock )
errStr :: String -> String
errStr = ("Propane.IO: " ++)
-- | Save the @'Raster'@ to a given file.
--
-- The file format is specified by the filename, and can
-- be any of the formats supported by the DevIL library.
--
-- Note that DevIL silently refuses to overwrite an existing
-- file.
saveRaster :: FilePath -> Raster -> IO ()
saveRaster name (Raster img) = do
evaluate (R.deepSeqArray img ())
lock $ D.runIL (D.writeImage name img)
-- | Save the @'Rastimation'@ to a sequence of frames in
-- the given directory.
--
-- The frames will be PNG files with names like
--
-- >00000000.png
-- >00000001.png
--
-- etc, in frame order.
--
-- Files are written concurrently, and there is no guarantee
-- about which files exist, until the IO action completes.
--
-- Note that DevIL silently refuses to overwrite an existing
-- file.
saveRastimation :: FilePath -> Rastimation -> IO ()
saveRastimation dir (Rastimation frames) = do
createDirectoryIfMissing True dir
-- Check existence, to give better error messages
e <- doesDirectoryExist dir
when (not e)
(throwIO . ErrorCall $ errStr ("directory does not exist: " ++ dir))
let go :: Int -> Raster -> IO (IO ())
go i img = spawn $ saveRaster (dir </> printf "%08d.png" i) img
zipWithM go [0..] (F.toList frames) >>= sequence_
| kmcallister/propane | Propane/IO.hs | bsd-3-clause | 1,768 | 0 | 15 | 354 | 378 | 213 | 165 | 29 | 1 |
-- TODO: Review and possibly move elsewhere. This code was part of the
-- RecentPackages (formerly "Check") feature, but that caused some cyclic
-- dependencies.
module Distribution.Server.Packages.Render (
-- * Package render
PackageRender(..)
, DependencyTree
, IsBuildable (..)
, doPackageRender
-- * Utils
, categorySplit,
) where
import Data.Maybe (catMaybes, isJust, maybeToList)
import Control.Monad (guard)
import Control.Arrow (second)
import Data.Char (toLower, isSpace)
import qualified Data.Map as Map
import qualified Data.Vector as Vec
import Data.Ord (comparing)
import Data.List (sortBy, intercalate)
import Data.Time.Clock (UTCTime)
import System.FilePath.Posix ((</>), (<.>))
-- Cabal
import Distribution.PackageDescription
import Distribution.PackageDescription.Configuration
import Distribution.Package
import Distribution.Text
import Distribution.Version
import Distribution.ModuleName as ModuleName
-- hackage-server
import Distribution.Server.Framework.CacheControl (ETag)
import Distribution.Server.Packages.Types
import Distribution.Server.Packages.ModuleForest
import qualified Distribution.Server.Users.Users as Users
import Distribution.Server.Users.Types
import qualified Data.TarIndex as TarIndex
import Data.TarIndex (TarIndex, TarEntryOffset)
-- This should provide the caller enough information to encode the package information
-- in its particular format (text, html, json) with minimal effort on its part.
-- This is why some fields of PackageDescription are preprocessed, and others aren't.
data PackageRender = PackageRender {
rendPkgId :: PackageIdentifier,
rendDepends :: [Dependency],
rendExecNames :: [String],
rendLibraryDeps :: Maybe DependencyTree,
rendExecutableDeps :: [(String, DependencyTree)],
rendLicenseName :: String,
rendLicenseFiles :: [FilePath],
rendMaintainer :: Maybe String,
rendCategory :: [String],
rendRepoHeads :: [(RepoType, String, SourceRepo)],
rendModules :: Maybe TarIndex -> Maybe ModuleForest,
rendHasTarball :: Bool,
rendChangeLog :: Maybe (FilePath, ETag, TarEntryOffset, FilePath),
rendReadme :: Maybe (FilePath, ETag, TarEntryOffset, FilePath),
rendUploadInfo :: (UTCTime, Maybe UserInfo),
rendUpdateInfo :: Maybe (Int, UTCTime, Maybe UserInfo),
rendPkgUri :: String,
rendFlags :: [Flag],
-- rendOther contains other useful fields which are merely strings, possibly empty
-- for example: description, home page, copyright, author, stability
-- If PackageRender is the One True Resource Representation, should they
-- instead be fields of PackageRender?
rendOther :: PackageDescription
} deriving (Show)
doPackageRender :: Users.Users -> PkgInfo -> PackageRender
doPackageRender users info = PackageRender
{ rendPkgId = pkgInfoId info
, rendDepends = flatDependencies genDesc
, rendExecNames = map exeName (executables flatDesc)
, rendLibraryDeps = depTree libBuildInfo `fmap` condLibrary genDesc
, rendExecutableDeps = second (depTree buildInfo) `map` condExecutables genDesc
, rendLicenseName = display (license desc) -- maybe make this a bit more human-readable
, rendLicenseFiles = licenseFiles desc
, rendMaintainer = case maintainer desc of
"None" -> Nothing
"none" -> Nothing
"" -> Nothing
person -> Just person
, rendCategory = case category desc of
[] -> []
str -> categorySplit str
, rendRepoHeads = catMaybes (map rendRepo $ sourceRepos desc)
, rendModules = \docindex ->
fmap (moduleForest
. map (\m -> (m, moduleHasDocs docindex m))
. exposedModules)
(library flatDesc)
, rendHasTarball = not . Vec.null $ pkgTarballRevisions info
, rendChangeLog = Nothing -- populated later
, rendReadme = Nothing -- populated later
, rendUploadInfo = let (utime, uid) = pkgOriginalUploadInfo info
in (utime, Users.lookupUserId uid users)
, rendUpdateInfo = let maxrevision = Vec.length (pkgMetadataRevisions info) - 1
(utime, uid) = pkgLatestUploadInfo info
uinfo = Users.lookupUserId uid users
in if maxrevision > 0
then Just (maxrevision, utime, uinfo)
else Nothing
, rendPkgUri = pkgUri
, rendFlags = genPackageFlags genDesc
, rendOther = desc
}
where
genDesc = pkgDesc info
flatDesc = flattenPackageDescription genDesc
desc = packageDescription genDesc
pkgUri = "/package/" ++ display (pkgInfoId info)
depTree :: (a -> BuildInfo) -> CondTree ConfVar [Dependency] a -> DependencyTree
depTree getBuildInfo = mapTreeData isBuildable . mapTreeConstrs simplifyDeps
where
simplifyDeps = sortDeps . combineDepsBy intersectVersionIntervals
isBuildable ctData = if buildable $ getBuildInfo ctData
then Buildable
else NotBuildable
moduleHasDocs :: Maybe TarIndex -> ModuleName -> Bool
moduleHasDocs Nothing = const False
moduleHasDocs (Just doctar) = isJust . TarIndex.lookup doctar
. moduleDocTarPath (packageId genDesc)
moduleDocTarPath :: PackageId -> ModuleName -> FilePath
moduleDocTarPath pkgid modname =
display pkgid ++ "-docs" </>
intercalate "-" (ModuleName.components modname) <.> "html"
rendRepo r = do
guard $ repoKind r == RepoHead
ty <- repoType r
loc <- repoLocation r
return (ty, loc, r)
type DependencyTree = CondTree ConfVar [Dependency] IsBuildable
data IsBuildable = Buildable
| NotBuildable
deriving (Eq, Show)
{-------------------------------------------------------------------------------
Util
-------------------------------------------------------------------------------}
categorySplit :: String -> [String]
categorySplit xs | all isSpace xs = []
categorySplit xs = map (dropWhile isSpace) $ splitOn ',' xs
where
splitOn x ys = front : case back of
[] -> []
(_:ys') -> splitOn x ys'
where (front, back) = break (== x) ys
-----------------------------------------------------------------------
--
-- Flatten the dependencies of a GenericPackageDescription into a
-- simple summary form. Library and executable dependency ranges
-- are combined using intersection, except for dependencies within
-- if and else branches, which are unioned together.
--
flatDependencies :: GenericPackageDescription -> [Dependency]
flatDependencies =
sortOn (\(Dependency pkgname _) -> map toLower (display pkgname))
. pkgDeps
where
pkgDeps :: GenericPackageDescription -> [Dependency]
pkgDeps pkg = fromMap $ Map.unionsWith intersectVersions $
map condTreeDeps (maybeToList $ condLibrary pkg)
++ map (condTreeDeps . snd) (condExecutables pkg)
where
fromMap = map fromPair . Map.toList
fromPair (pkgname, Versions _ ver) =
Dependency pkgname $ fromVersionIntervals ver
condTreeDeps :: CondTree v [Dependency] a -> PackageVersions
condTreeDeps (CondNode _ ds comps) =
Map.unionsWith intersectVersions $
toMap ds : map fromComponent comps
where
fromComponent (_, then_part, else_part) =
unionDeps (condTreeDeps then_part)
(maybe Map.empty condTreeDeps else_part)
toMap = Map.fromListWith intersectVersions . map toPair
toPair (Dependency pkgname ver) =
(pkgname, Versions All $ toVersionIntervals ver)
unionDeps :: PackageVersions -> PackageVersions -> PackageVersions
unionDeps ds1 ds2 = Map.unionWith unionVersions
(Map.union ds1 defaults) (Map.union ds2 defaults)
where
defaults = Map.map (const notSpecified) $ Map.union ds1 ds2
notSpecified = Versions Some $ toVersionIntervals noVersion
-- | Version intervals for a dependency that also indicate whether the
-- dependency has been specified on all branches. For example, package x's
-- version intervals use 'All' while package y's version intervals use
-- 'Some':
--
-- > if flag(f)
-- > build-depends: x < 1, y < 1
-- > else
-- > build-depends: x >= 1
--
-- This distinction affects the intersection of intervals.
data Versions = Versions Branches VersionIntervals
data Branches = All | Some deriving Eq
type PackageVersions = Map.Map PackageName Versions
unionVersions :: Versions -> Versions -> Versions
unionVersions (Versions b1 v1) (Versions b2 v2) =
let b3 = if b1 == Some || b2 == Some
then Some
else All
in Versions b3 $ unionVersionIntervals v1 v2
intersectVersions :: Versions -> Versions -> Versions
intersectVersions (Versions Some v1) (Versions Some v2) =
Versions Some $ unionVersionIntervals v1 v2
intersectVersions (Versions Some _) v@(Versions All _) = v
intersectVersions v@(Versions All _) (Versions Some _) = v
intersectVersions (Versions All v1) (Versions All v2) =
Versions All $ intersectVersionIntervals v1 v2
sortDeps :: [Dependency] -> [Dependency]
sortDeps = sortOn $ \(Dependency pkgname _) -> map toLower (display pkgname)
combineDepsBy :: (VersionIntervals -> VersionIntervals -> VersionIntervals)
-> [Dependency] -> [Dependency]
combineDepsBy f =
map (\(pkgname, ver) -> Dependency pkgname (fromVersionIntervals ver))
. Map.toList
. Map.fromListWith f
. map (\(Dependency pkgname ver) -> (pkgname, toVersionIntervals ver))
-- Same as @sortBy (comparing f)@, but without recomputing @f@.
sortOn :: Ord b => (a -> b) -> [a] -> [a]
sortOn f xs = map snd (sortBy (comparing fst) [(f x, x) | x <- xs])
| chrisdotcode/hackage-server | Distribution/Server/Packages/Render.hs | bsd-3-clause | 10,234 | 0 | 16 | 2,637 | 2,362 | 1,293 | 1,069 | 174 | 8 |
module Code07 where
import Data.List (minimumBy)
import Data.Ord (comparing)
data Tree = Leaf Int | Fork Tree Tree
deriving (Eq,Show)
fringe :: Tree -> [Int]
fringe (Leaf x) = [x]
fringe (Fork xt yt) = fringe xt ++ fringe yt
treeRec :: [Int] -> Tree
treeRec [x] = Leaf x
treeRec xs = Fork (treeRec ys) (treeRec zs)
where n = length xs `div` 2
(ys,zs) = splitAt n xs
treeIter :: [Int] -> Tree
treeIter = head . iter . map Leaf
where
iter [] = []
iter [t] = [t]
iter (xt:yt:ts) = iter (Fork xt yt : iter ts)
-- First steps
cost :: Tree -> Int
cost (Leaf x) = x
cost (Fork u v) = 1 + (cost u `max` cost v)
minBy0 :: Ord a => (Tree -> a) -> [Tree] -> Tree
minBy0 = minimumBy . comparing
mincostTree0 :: [Int] -> Tree
mincostTree0 = minBy0 cost . trees0
trees0 :: [Int] -> [Tree]
trees0 [x] = [Leaf x]
trees0 (x:xs) = concatMap (prefixes x) (trees0 xs)
prefixes :: Int -> Tree -> [Tree]
prefixes x t@(Leaf y) = [Fork (Leaf x) t]
prefixes x t@(Fork u v) = [Fork (Leaf x) t]
++ [Fork u' v | u' <- prefixes x u ]
--
foldrn :: (a -> b -> b) -> (a -> b) -> [a] -> b
foldrn f g [x] = g x
foldrn f g (x:xs) = f x (foldrn f g xs)
mincostTree1 :: [Int] -> Tree
mincostTree1 = minBy0 cost . trees1
trees1 :: [Int] -> [Tree]
trees1 = foldrn (concatMap . prefixes) (wrap . Leaf)
wrap :: a -> [a]
wrap x = [x]
--
type Forest = [Tree]
mincostTree2 :: [Int] -> Tree
mincostTree2 = minBy0 cost . trees2
trees2 :: [Int] -> [Tree]
trees2 = map rollup . forests
forests :: [Int] -> [Forest]
forests = foldrn (concatMap . prefixes2) (wrap . wrap . Leaf)
prefixes2 :: Int -> Forest -> [Forest]
prefixes2 x ts = [ Leaf x : rollup (take k ts) : drop k ts
| k <- [1 .. length ts]]
rollup :: Forest -> Tree
rollup = foldl1 Fork
--
mincostTree3 :: [Int] -> Tree
mincostTree3 = minBy3 cost . trees2
minBy3 :: Ord b => (a -> b) -> [a] -> a
minBy3 f = foldl1 (cmp3 f)
cmp3 :: Ord b => (a -> b) -> a -> a -> a
cmp3 f u v = if f u <= f v then u else v
--
-- cmp u v = if u -<= v then u else v
-- Fusion
{-
h (foldrn f g xs) = foldrn f' g' xs
h (g x) = g' x
h (f x y) = f' x (h y)
h (foldrn f g xs) ~> foldrn f' g' xs
-}
-- cost' = map cost . reverse . spine
insert' :: Int -> [Tree] -> [Tree]
insert' x ts = Leaf x : split' x ts
split' :: Int -> [Tree] -> [Tree]
split' x [u] = [u]
split' x (u:v:ts) = if x `max` cost u < cost v then u:v:ts
else split' x (Fork u v : ts)
-- Final Argorithm
mincostTree :: [Int] -> Tree
mincostTree = foldl1 Fork . map snd . foldrn insert (wrap . leaf)
insert :: Int -> [(Int, Tree)] -> [(Int, Tree)]
insert x ts = leaf x : split x ts
split :: Int -> [(Int, Tree)] -> [(Int, Tree)]
split x [u] = [u]
split x (u:v:ts) = if x `max` fst u < fst v then u:v:ts
else split x (fork u v : ts)
leaf :: Int -> (Int, Tree)
leaf x = (x, Leaf x)
fork :: (Int, Tree) -> (Int, Tree) -> (Int, Tree)
fork (a,u) (b,v) = (1 + (a `max` b), Fork u v)
| sampou-org/pfad | Code/Code07.hs | bsd-3-clause | 3,241 | 0 | 10 | 1,057 | 1,556 | 837 | 719 | 77 | 3 |
-- Hesthoppning
-- Problem ID: hesthoppning Time limit: 7 seconds Memory limit: 1024 MB
-- Input: första raden: rutans storlek: N och M, separerade med ett blanksteg.
-- N rader med M tecken: Ett ’.’= tom, ’#’ = sten, ’H’ = hest
-- Hagen är omgiven av stängsel. Det är garanterat att indata alltid innehåller exakt två ’H’-celler.
-- Output: Ditt program ska skriva ut ett ord på en rad - "JA" om hestarna kan mötas på någon cell och "NEJ" annars.
import qualified Data.Map.Strict as Map
import Data.Map (Map)
data Ruta = Tom | Sten | Hest | Besökt
deriving (Eq, Show)
type Rad = Int
type Kolumn = Int
type Pos = (Rad,Kolumn)
type Karta = Map Pos Ruta
-- Tillåtna hesthopp från en viss punkt
hesthoppFrån :: Karta -> Pos -> [Pos]
hesthoppFrån karta p = [p2 | p2 <- allaHestHoppFrån p, braPlats karta p2]
braPlats karta p2 = Map.findWithDefault Sten p2 karta == Tom
-- Alla åtta "hesthopp" från en viss punkt
allaHestHoppFrån :: Pos -> [Pos]
allaHestHoppFrån (r,k) = map (\(a,b)->(r+a,k+b)) [ ( 2,1), ( 2,-1)
, ( 1,2), ( 1,-2)
, (-1,2), (-1,-2)
, (-2,1), (-2,-1)
]
byggKarta :: [String] -> Karta
byggKarta css = Map.fromList (concat numreradePlatser)
where numreradeRader :: [(Rad,String)]
numreradeRader = numrera css
numreradePlatser :: [[(Pos,Ruta)]]
numreradePlatser = map (\(r,cs) -> paraMed r (numrera cs)) numreradeRader
paraMed r = map (\(k, c) -> ((r, k), char2Ruta c))
char2Ruta :: Char -> Ruta
char2Ruta 'H' = Hest
char2Ruta '.' = Tom
char2Ruta '#' = Sten
char2Ruta c = error ("char2Ruta: otillåtet tecken: " ++ [c])
ruta2Char :: Ruta -> Char
ruta2Char Hest = 'H'
ruta2Char Tom = '.'
ruta2Char Sten = '#'
ruta2Char Besökt = '*'
numrera :: [a] -> [(Int, a)]
numrera = zip [1..]
----------------------------------------------------------------
-- Utgå från (en karta som visar alla platser nåbara efter n steg) och
-- (en lista på intressanta punkter). Fyll i alla punkter som är
-- nåbara från någon av de intressanta punkterna. Då får vi (kartan
-- med punkter nåbara efter n+1 steg) samt (nya intressanta punkter).
solution :: [String] -> String
solution = jaNej . fst . leta . byggKarta
jaNej :: Bool -> String
jaNej b = if b then "JA" else "NEJ"
leta :: Karta -> (Bool, [Pos])
leta karta = (check slutKarta, ps)
where (slutKarta, ps) = head (dropWhile (not.done) (iterate steg startPar))
(startPar, check) = prepare karta
done (k, ps) = null ps || check k
prepare :: Karta -> ((Karta, [Pos]), Karta -> Bool)
prepare karta = (startPar, check)
where startPar = (Map.adjust (const Tom) slutPos karta, [startPos])
(startPos,Hest):(slutPos,Hest):_ = filter ((Hest==).snd) (Map.assocs karta)
done (k, ps) = null ps || check k
check k = Just Besökt == Map.lookup slutPos k
steg :: (Karta, [Pos]) -> (Karta, [Pos])
steg (karta, intressant) = (nyKarta, nyaPos)
where nyaPos = uniq [p | rk <- intressant, p <- hesthoppFrån karta rk]
nyKarta = foldr (Map.adjust (const Besökt)) karta nyaPos
-- uniq :: Ord a => [a] -> [a]
uniq :: [Pos] -> [Pos]
uniq [] = []
uniq (x:xs) = x : (uniq lt ++ uniq gt)
where lt = filter (<x) xs
gt = filter (>x) xs
main :: IO ()
main = do
nOchM <- getLine
let n, m :: Int
(n, rest):_ = reads nOchM
(m, _):_ = reads rest
råKarta <- sequence (replicate n getLine)
putStrLn (solution råKarta)
----------------
test1 = ["H.H","...",".#."]
test2 = ["H#H","...",".#."]
test3 = "H..":replicate 100 "..."++["H.."]
test n = ('H':replicate (n-1) '.') : replicate (n-2) (replicate n '.') ++ [replicate (n-1) '.' ++ "H"]
showKarta :: Karta -> String
showKarta karta = unlines [[maybe ' ' ruta2Char (Map.lookup (r,k) karta) | r <- [1..mr]] | k <- [1..mk]]
where mr = maximum (map fst (Map.keys karta))
mk = maximum (map snd (Map.keys karta))
poss2Karta :: [Pos] -> Karta
poss2Karta ps = Map.fromList (zip ps (repeat Besökt))
showPoss :: [Pos] -> String
showPoss = showKarta . poss2Karta
{-
let (startPar, check) = prepare (byggKarta (test 200))
let qs = iterate steg startPar
let is = map snd qs
-}
| patrikja/progolymp | onlinekval/2015/hesthoppning.hs | bsd-3-clause | 4,346 | 151 | 9 | 1,052 | 1,561 | 865 | 696 | 79 | 2 |
--
-- Copyright (c) 2009-2011, ERICSSON AB
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of the ERICSSON AB nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
{-# LANGUAGE RecordWildCards #-}
module Feldspar.Compiler.Backend.C.Platforms
( availablePlatforms
, c99
, c99OpenMp
, c99Wool
, tic64x
, extend
, deepCopy
) where
import Data.Maybe (fromMaybe)
import Feldspar.Compiler.Backend.C.Options
import Feldspar.Compiler.Imperative.Representation
import Feldspar.Compiler.Imperative.Frontend
availablePlatforms :: [Platform]
availablePlatforms = [ c99, c99OpenMp, c99Wool, tic64x ]
c99 :: Platform
c99 = Platform {
name = "c99",
types =
[ (MachineVector 1 (NumType Signed S8), "int8_t")
, (MachineVector 1 (NumType Signed S16), "int16_t")
, (MachineVector 1 (NumType Signed S32), "int32_t")
, (MachineVector 1 (NumType Signed S64), "int64_t")
, (MachineVector 1 (NumType Unsigned S8), "uint8_t")
, (MachineVector 1 (NumType Unsigned S16), "uint16_t")
, (MachineVector 1 (NumType Unsigned S32), "uint32_t")
, (MachineVector 1 (NumType Unsigned S64), "uint64_t")
, (MachineVector 1 BoolType, "bool")
, (MachineVector 1 FloatType, "float")
, (MachineVector 1 DoubleType, "double")
, (MachineVector 1 (ComplexType (MachineVector 1 FloatType)), "float complex")
, (MachineVector 1 (ComplexType (MachineVector 1 DoubleType)),"double complex")
] ,
values =
[ (MachineVector 1 (ComplexType (MachineVector 1 FloatType)), \cx -> "(" ++ showRe cx ++ "+" ++ showIm cx ++ "i)")
, (MachineVector 1 (ComplexType (MachineVector 1 DoubleType)), \cx -> "(" ++ showRe cx ++ "+" ++ showIm cx ++ "i)")
, (MachineVector 1 BoolType, \b -> if boolValue b then "true" else "false")
] ,
includes =
[ "feldspar_c99.h"
, "feldspar_array.h"
, "feldspar_future.h"
, "ivar.h"
, "taskpool.h"
, "<stdint.h>"
, "<string.h>"
, "<math.h>"
, "<stdbool.h>"
, "<complex.h>"],
varFloating = True
}
c99OpenMp :: Platform
c99OpenMp = c99 { name = "c99OpenMp"
, varFloating = False
}
c99Wool :: Platform
c99Wool = c99 { name = "c99Wool"
, includes = "wool.h":includes c99
, varFloating = False
}
tic64x :: Platform
tic64x = Platform {
name = "tic64x",
types =
[ (MachineVector 1 (NumType Signed S8), "char")
, (MachineVector 1 (NumType Signed S16), "short")
, (MachineVector 1 (NumType Signed S32), "int")
, (MachineVector 1 (NumType Signed S40), "long")
, (MachineVector 1 (NumType Signed S64), "long long")
, (MachineVector 1 (NumType Unsigned S8), "unsigned char")
, (MachineVector 1 (NumType Unsigned S16), "unsigned short")
, (MachineVector 1 (NumType Unsigned S32), "unsigned")
, (MachineVector 1 (NumType Unsigned S40), "unsigned long")
, (MachineVector 1 (NumType Unsigned S64), "unsigned long long")
, (MachineVector 1 BoolType, "int")
, (MachineVector 1 FloatType, "float")
, (MachineVector 1 DoubleType, "double")
, (MachineVector 1 (ComplexType (MachineVector 1 FloatType)), "complexOf_float")
, (MachineVector 1 (ComplexType (MachineVector 1 DoubleType)),"complexOf_double")
] ,
values =
[ (MachineVector 1 (ComplexType (MachineVector 1 FloatType)), \cx -> "complex_fun_float(" ++ showRe cx ++ "," ++ showIm cx ++ ")")
, (MachineVector 1 (ComplexType (MachineVector 1 DoubleType)), \cx -> "complex_fun_double(" ++ showRe cx ++ "," ++ showIm cx ++ ")")
, (MachineVector 1 BoolType, \b -> if boolValue b then "1" else "0")
] ,
includes = [ "feldspar_tic64x.h", "feldspar_array.h", "<c6x.h>", "<string.h>"
, "<math.h>"],
varFloating = True
}
showRe, showIm :: Constant t -> String
showRe = showConstant . realPartComplexValue
showIm = showConstant . imagPartComplexValue
showConstant :: Constant t -> String
showConstant (DoubleConst c) = show c ++ "f"
showConstant (FloatConst c) = show c ++ "f"
showConstant c = show c
deepCopy :: Options -> [ActualParameter ()] -> [Program ()]
deepCopy opts [ValueParameter arg1, ValueParameter arg2]
| arg1 == arg2
= []
| ConstExpr ArrayConst{..} <- arg2
= initArray (Just arg1) (litI32 $ toInteger $ length arrayValues)
: zipWith (\i c -> Assign (Just $ ArrayElem arg1 (litI32 i)) (ConstExpr c)) [0..] arrayValues
| NativeArray{} <- typeof arg2
, l@(ConstExpr (IntConst n _)) <- arrayLength arg2
= if n < safetyLimit opts
then initArray (Just arg1) l:map (\i -> Assign (Just $ ArrayElem arg1 (litI32 i)) (ArrayElem arg2 (litI32 i))) [0..(n-1)]
else error $ unlines ["Internal compiler error: array size (" ++ show n ++ ") too large for deepcopy", show arg1, show arg2]
| StructType _ fts <- typeof arg2
= concatMap (deepCopyField . fst) fts
| not (isArray (typeof arg1))
= [Assign (Just arg1) arg2]
where deepCopyField fld = deepCopy opts [ ValueParameter $ StructField arg1 fld
, ValueParameter $ StructField arg2 fld]
deepCopy _ (ValueParameter arg1 : ins'@(ValueParameter in1:ins))
| isArray (typeof arg1)
= [ initArray (Just arg1) expDstLen, copyFirstSegment ] ++
flattenCopy (ValueParameter arg1) ins argnLens arg1len
where expDstLen = foldr ePlus (litI32 0) aLens
copyFirstSegment = if arg1 == in1
then Empty
else call "copyArray" [ ValueParameter arg1
, ValueParameter in1]
aLens@(arg1len:argnLens) = map (\(ValueParameter src) -> arrayLength src) ins'
deepCopy _ _ = error "Multiple scalar arguments to copy"
flattenCopy :: ActualParameter () -> [ActualParameter ()] -> [Expression ()] ->
Expression () -> [Program ()]
flattenCopy _ [] [] _ = []
flattenCopy dst (t:ts) (l:ls) cLen = call "copyArrayPos" [dst, ValueParameter cLen, t]
: flattenCopy dst ts ls (ePlus cLen l)
ePlus :: Expression () -> Expression () -> Expression ()
ePlus (ConstExpr (IntConst 0 _)) e = e
ePlus e (ConstExpr (IntConst 0 _)) = e
ePlus e1 e2 = binop (MachineVector 1 (NumType Signed S32)) "+" e1 e2
extend :: Platform -> String -> Type -> String
extend Platform{..} s t = s ++ "_fun_" ++ fromMaybe (show t) (lookup t types)
| emwap/feldspar-compiler | lib/Feldspar/Compiler/Backend/C/Platforms.hs | bsd-3-clause | 8,147 | 0 | 16 | 2,113 | 2,225 | 1,197 | 1,028 | 128 | 3 |
{-# LANGUAGE TemplateHaskell #-}
module SyntaxHighlighting.AsSVGPt where
{- Draws parsetrees as images -}
import Utils.Utils
import Utils.Image
import TypeSystem
import SyntaxHighlighting.Coloring
import SyntaxHighlighting.Renderer
import Text.Blaze.Svg11 ((!), stringValue)
import qualified Text.Blaze.Svg11 as S
import qualified Text.Blaze.Svg11.Attributes as A
import qualified Data.Text as Text
import Data.Text (Text)
import Data.Maybe
import qualified Data.Map as M
import Lens.Micro hiding ((&))
import Lens.Micro.TH
import Control.Monad
import Control.Monad.State hiding (get)
import Control.Arrow ((&&&))
import Text.PrettyPrint.ANSI.Leijen
data SVGRenderer = SVGRenderer FullColoring SyntaxStyle
data Point = Point
{ _pointName :: Text
, _contents :: Text
, _x :: X
, _y :: Y
, _showUnderDot :: Bool
, _style :: Maybe Name
} deriving (Show)
data PPS = PPS { _cs :: ColorScheme
, _fs :: Int -- fontsize
, _hDiff :: Y -- size between layers
, _currName :: Text -- current running name, for unique identification (used to draw the lines)
, _conns :: [(Text, Text)] -- Connections
}
makeLenses ''Point
makeLenses ''PPS
instance Renderer SVGRenderer where
create = SVGRenderer
name _ = "SVG"
renderParseTree pt (SVGRenderer fc style)
= text $ parseTreeSVG style 1 fc pt
renderParseTree' pt (SVGRenderer fc style)
= text $ parseTreeSVG style 1 fc (deAnnot pt)
renderParseTreeDebug pt (SVGRenderer fc style)
= error "No renderPTDebug supported"
renderString styleName str _
= error "No renderString supported"
supported _ = properties
properties :: [String]
properties = ["foreground-color","background-color","svg-line-color","svg-line-thickness","svg-dotsize","svg-fontsize"]
toSVGColorScheme :: Name -> FullColoring -> ColorScheme
toSVGColorScheme style fc
= let property p def = getProperty fc style p & fromMaybe def
property' p def = property p (Right def) & either (const def) id
properti p def = property p (Left def)
& either id (const def) :: Int
fg = property' "foreground-color" "#000000"
bg = property' "background-color" "#ffffff"
lineColor = property' "svg-line-color" fg
lineThickness = properti "svg-line-thickness" 1
dotSize = properti "svg-dotsize" 4
fontSize = properti "svg-fontsize" 20
in
CS fg bg lineColor fontSize lineThickness dotSize
coor :: Point -> (X, Y)
coor p = (get x p, get y p)
parseTreeSVG :: SyntaxStyle -> Int -> FullColoring -> ParseTree -> String
parseTreeSVG style factor fc pt
= let pt' = determineStyle' style pt
cs = toSVGColorScheme "" fc
((points, w, h), state)
= runState (pointPositions pt') $ startState cs
pointsDict = points |> (get pointName &&& (get x &&& get y))
& M.fromList
connections = get conns state
svg = do S.rect ! A.width (intValue w) ! A.height (intValue h) ! A.fill (stringValue $ get bg cs)
connections |+> uncurry (drawLineBetween cs False pointsDict)
points |+> renderPoint fc
pass
in
packageSVG (w*factor, h*factor) (w, h) svg
renderPoint :: FullColoring -> Point -> S.Svg
renderPoint fc point
= let cs = fc & toSVGColorScheme (get style point & fromMaybe "") in
annotatedDot cs (get showUnderDot point)
(get contents point, (get x point, get y point))
startState cs
= let fs = get fontSize cs in
PPS cs fs (fs * 3) (Text.pack "_") []
get' = gets . get
withName :: Int -> State PPS a -> State PPS a
withName i st = do oldName <- get' currName
let newName = mappend oldName $ Text.pack ("." ++ show i)
modify (set currName newName)
a <- st
modify (set currName oldName)
return a
pointPositions :: ParseTreeA (Maybe Name) -> State PPS ([Point], W, H)
pointPositions (MLiteral style _ content)
= do w <- get' fs
ds <- get' (cs . dotSize)
let w' = w * length content
n <- get' currName
let p = Point n (Text.pack content)
(w' `div` 2) (ds*2) True
style
return ([p], w', 2*(ds + w))
pointPositions (MInt style mi i)
= pointPositions (MLiteral style mi $ show i)
pointPositions (PtSeq style mi@(tpName, choice) pts)
= do (pointss, ws, hs)
<- pts |> pointPositions
& mapi |+> uncurry withName
|> unzip3
nm <- get' currName
let conns' = zip (repeat nm) $ pointss |> head |> get pointName
modify (over conns (conns' ++))
yDiff <- get' hDiff
let points' = pointss
& mapi |> uncurry (movePointsRelative ws)
& concat
|> over y (yDiff +)
h <- get' fs
ds <- get' $ cs . dotSize
let width = sum ws
let node = Point nm (Text.pack $ tpName ++ "." ++ show choice)
(width `div` 2)
(2 * (ds + h))
False
style
return (node:points', width, yDiff + maximum hs)
movePointsRelative :: [W] -> Int -> [Point] -> [Point]
movePointsRelative widest index (top:children)
= let topPos = sum (take index widest) + ((widest !! index) `div` 2)
xDiff = topPos - get x top
in
(top:children) |> over x (+ xDiff)
| pietervdvn/ALGT | src/SyntaxHighlighting/AsSVGPt.hs | bsd-3-clause | 4,973 | 288 | 12 | 1,078 | 1,977 | 1,096 | 881 | 134 | 1 |
{-# LANGUAGE OverloadedStrings, DeriveDataTypeable , NoMonomorphismRestriction #-}
import MFlow.Wai.Blaze.Html.All hiding (footer, retry,step, push)
import Control.Monad.State
import Data.Monoid
import Control.Applicative
import Control.Concurrent
import Control.Workflow as WF
import Control.Workflow.Stat
import Control.Concurrent.STM
import Data.Typeable
import Data.TCache.DefaultPersistence
import Data.Persistent.Collection
import Data.ByteString.Lazy.Char8(pack,unpack)
import Data.Map as M (fromList)
import Data.List(isPrefixOf)
import Data.Maybe
import Debug.Trace
import System.IO.Unsafe
(!>) = flip trace
--comprar o reservar
--no está en stock
--reservar libro
--si está en stock pasado un tiempo quitar la reserva
--si está en stock y reservado, comprar
data Book= Book{btitle :: String, stock,reserved :: Int} deriving (Read,Show, Eq,Typeable)
instance Indexable Book where key= btitle
instance Serializable Book where
serialize= pack. show
deserialize= read . unpack
keyBook= "booktitle" :: String
rbook= getDBRef $ keyBook
stm= liftIO . atomically
reservetime= 5* 24 * 60 * 60 -- five days waiting for reserve and five days reserved
data RouteOptions= Buy | Other | Reserve | NoReserve deriving (Typeable,Show)
main= do
enterStock 30 rbook
restartWorkflows $ M.fromList [("buyreserve", buyReserve reservetime)]
runNavigation "" . transientNav $ do
op <- page $ wlink Buy "buy or reserve the book" <++ br <|> wlink Other "Do other things"
case op of
Other -> page $ "doing other things" ++> wlink () "home"
Buy -> do
reserved <- stm $ do
mr <- readDBRef rbook
case mr of
Nothing -> return False
Just r ->
if reserved r > 0 then return True
else if stock r > 0 then reserveIt rbook >> return True
else return False
if reserved then page $ buyIt keyBook
else reserveOffline keyBook
buyIt keyBook= do
mh <- getHistory "buyreserve" keyBook
p "there is one book for you in stock "
++> case mh of
Nothing -> p "The book was in stock and reserved online right now"
Just hist ->
let histmarkup= mconcat[p << l | l <- hist]
in h2 "History of your reserve:"
<> histmarkup
++> wlink keyBook "buy?"
`waction` (\keyBook -> do
stm $ buy rbook
page $ "bought! " ++> wlink () "home"
delWF "buyreserve" keyBook)
reserveOffline keyBook = do
v <- getState "buyreserve" (buyReserve reservetime) keyBook
case v of
Left AlreadyRunning -> lookReserve keyBook
Left err -> error $ show err
Right (name, f, stat) -> do
r <- page $ wlink Reserve "not in stock. Press to reserve it when available in\
\ the next five days. It will be reserved for five days "
<|> br
++> wlink NoReserve "no thanks, go to home"
case r of
Reserve -> do
liftIO $ forkIO $ runWF1 name (buyReserve reservetime keyBook) stat True
return ()
NoReserve -> return()
lookReserve keyBook= do
hist <- getHistory "buyreserve" keyBook `onNothing ` return ["No workflow log"]
let histmarkup= mconcat[p << l | l <- hist]
page $ do
mr <- stm $ readDBRef rbook
if mr== Nothing
|| fmap stock mr == Just 0
&& fmap reserved mr == Just 0
then
"Sorry, not available but you already demanded a reservation when the book\
\ enter in stock"
++> wlink () << p "press here to go home if the book has not arrived"
<++ p "you can refresh or enter this url to verify availability"
<> h2 "status of your request for reservation upto now:"
<> histmarkup
else
h2 "Good! things changed: the book arrived and was reserved"
++> buyIt keyBook
buyReserve timereserve keyBook= do
let rbook = getDBRef keyBook
logWF $ "You requested the reserve for: "++ keyBook
t <- getTimeoutFlag timereserve -- $ 5 * 24 * 60 * 60
r <- WF.step . atomically $ (reserveAndMailIt rbook >> return True)
`orElse` (waitUntilSTM t >> return False)
if not r
then do
logWF "reservation period ended, no stock available"
return ()
else do
logWF "The book entered in stock, reserved "
t <- getTimeoutFlag timereserve -- $ 5 * 24 *60 * 60
r <- WF.step . atomically $ (waitUntilSTM t >> return False)
`orElse` (testBought rbook >> return True)
if r
then do
logWF "Book was bought at this time"
else do
logWF "Reserved for a time, but reserve period ended"
WF.step . atomically $ unreserveIt rbook
return ()
userMail= "[email protected]"
mailQueue= "mailqueue"
reserveAndMailIt rbook= do
let qref = getQRef mailQueue
pushSTM qref ( userMail :: String
, "your book "++ keyObjDBRef rbook ++ " received" :: String
, "Hello, your book...." :: String)
reserveIt rbook
reserveIt rbook = do
mr <- readDBRef rbook
case mr of
Nothing -> retry
Just (Book t s r) ->
if s >0 then writeDBRef rbook $ Book t (s-1) (r+1)
else retry
unreserveIt rbook= do
mr <- readDBRef rbook
case mr of
Nothing -> error "unreserveIt: where is the book?"
Just (Book t s r) ->
if r >0 then writeDBRef rbook $ Book t (s+1) (r-1)
else return()
enterStock delay rbook= forkIO $ loop enter
where
loop f= f >> loop f
enter= do
threadDelay $ delay * 1000000
atomically $ do
Book _ n r <- readDBRef rbook `onNothing` return (Book keyBook 0 0)
writeDBRef rbook $ Book "booktitle" (n +1) r
!> "Added 1 more book to the stock"
buy rbook= do
mr <- readDBRef rbook
case mr of
Nothing -> error "Not in stock"
Just (Book t n n') ->
if n' > 0 !> show mr then writeDBRef rbook $ Book t n (n'-1)
!> "There is in Stock and reserved, BOUGHT"
else if n > 0 then
writeDBRef rbook $ Book t (n-1) 0
!> "No reserved, but stock available, BOUGHT"
else error "buy: neither stock nor reserve"
testBought rbook= do
mr <- readDBRef rbook
case mr of
Nothing -> retry !> ("testbought: the register does not exist: " ++ show rbook)
Just (Book t stock reserve) ->
case reserve of
0 -> return()
n -> retry
stopRestart delay timereserve th= do
threadDelay $ delay * 1000000
killThread th !> "workflow KILLED"
syncCache
atomically flushAll
restartWorkflows ( fromList [("buyreserve", buyReserve timereserve)] ) !> "workflow RESTARTED"
getHistory name x= liftIO $ do
let wfname= keyWF name x
let key= keyResource stat0{wfName=wfname}
atomically $ flushKey key
mh <- atomically . readDBRef . getDBRef $ key
case mh of
Nothing -> return Nothing
Just h -> return . Just
. catMaybes
. map eitherToMaybe
. map safeFromIDyn
$ versions h :: IO (Maybe [String])
where
eitherToMaybe (Right r)= Just r
eitherToMaybe (Left _) = Nothing
| agocorona/MFlow | tests/workflow.hs | bsd-3-clause | 7,629 | 5 | 25 | 2,512 | 2,127 | 1,030 | 1,097 | 181 | 6 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances, GADTs, ScopedTypeVariables, FlexibleContexts, TemplateHaskell #-}
{-|
This module contains functionality related to K3's let-bound polymorphism
model.
-}
module Language.K3.TypeSystem.Polymorphism
( generalize
, polyinstantiate
) where
import Control.Applicative
import Control.Monad.Reader
import Control.Monad.Trans.List
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as Set
import Language.K3.Core.Common
import Language.K3.Utils.Pretty
import Language.K3.Utils.TemplateHaskell.Transform
import Language.K3.TypeSystem.Closure
import qualified Language.K3.TypeSystem.ConstraintSetLike as CSL
import Language.K3.TypeSystem.Data
import Language.K3.TypeSystem.Monad.Iface.FreshVar
import Language.K3.TypeSystem.Morphisms.ExtractVariables
import Language.K3.TypeSystem.Morphisms.ReplaceVariables
import Language.K3.Utils.Logger
$(loggingFunctions)
-- * Generalization
-- |Generalizes a type to produce a quantified type.
generalize :: TNormEnv -> QVar -> ConstraintSet -> NormalQuantType
generalize env qa cs =
let cs' = calculateClosure cs in
let reachableQVars = Set.map SomeQVar $ Set.fromList $
runReader (runListT $ reachableFromQVar qa) cs' in
let freeEnvVars = Set.unions $ map openVars $ Map.elems env in
let quantSet = extractVariables cs' `Set.difference` reachableQVars
`Set.difference` freeEnvVars in
QuantType quantSet qa cs
where
openVars :: NormalQuantType -> Set AnyTVar
openVars (QuantType bound var cs'') =
Set.insert (SomeQVar var) (extractVariables cs'') `Set.difference` bound
type Reachability = ListT (Reader ConstraintSet) QVar
reachableUnions :: [Reachability] -> Reachability
reachableUnions xs = do
cs <- ask
let rs = map (\x -> runReader (runListT x) cs) xs
ListT $ return $ concat rs
reachableFromQVar :: QVar -> Reachability
reachableFromQVar qa =
reachableUnions [immediate, lowerBound]
where
immediate = do
cs <- ask
tqs <- ListT $ return $ csQuery cs $ QueryTQualSetByQVarUpperBound qa
guard $ TMut `Set.member` tqs
return qa
lowerBound = do
cs <- ask
t <- ListT $ return $ csQuery cs $ QueryTypeByQVarUpperBound qa
reachableFromType t
reachableFromType :: ShallowType -> Reachability
reachableFromType t = case t of
SOption qa -> reachableFromQVar qa
SIndirection qa -> reachableFromQVar qa
STuple qas -> reachableUnions $ map reachableFromQVar qas
SRecord m _ _ -> reachableUnions $ map reachableFromQVar $ Map.elems m
_ -> mzero
-- * Polyinstantiation
-- |Polyinstantiates a quantified type.
polyinstantiate
:: forall m e c.
( Show c, Pretty c, FreshVarI m, CSL.ConstraintSetLike e c
, CSL.ConstraintSetLikePromotable ConstraintSet c
, Transform ReplaceVariables c)
=> UID -- ^The span at which this polyinstantiation occurred.
-> QuantType c -- ^The type to polyinstantiate.
-> m (QVar, c) -- ^The result of polyinstantiation.
polyinstantiate inst qt@(QuantType boundSet qa cs) = do
_debug $ boxToString $
["Polyinstantiating quantified type: "] %+ prettyLines qt
(qvarMap,uvarMap) <- mconcat <$> mapM freshMap (Set.toList boundSet)
let (qa',cs') = replaceVariables qvarMap uvarMap (qa,cs)
let cs'' = cs' `CSL.union` CSL.promote
(csFromList (map (uncurry PolyinstantiationLineageConstraint)
$ Map.toList qvarMap))
_debug $ boxToString $
["Polyinstantiated "] %+ prettyLines qt %$
indent 2 (["to: "] %+ prettyLines qa' %+ ["\\"] %+ prettyLines cs'')
return (qa',cs'')
where
freshMap :: AnyTVar -> m (Map QVar QVar, Map UVar UVar)
freshMap var =
case var of
SomeQVar qa' -> do
qa'' <- freshQVar $ TVarPolyinstantiationOrigin qa' inst
return (Map.singleton qa' qa'', Map.empty)
SomeUVar a' -> do
a'' <- freshUVar $ TVarPolyinstantiationOrigin a' inst
return (Map.empty, Map.singleton a' a'')
| DaMSL/K3 | src/Language/K3/TypeSystem/Polymorphism.hs | apache-2.0 | 4,172 | 0 | 18 | 903 | 1,132 | 589 | 543 | 89 | 5 |
module DebuggerUtils (
dataConInfoPtrToName,
) where
import ByteCodeItbls
import FastString
import TcRnTypes
import TcRnMonad
import IfaceEnv
import CgInfoTbls
import SMRep
import Module
import OccName
import Name
import Outputable
import Constants
import MonadUtils ()
import Util
import Data.Char
import Foreign
import Data.List
#include "HsVersions.h"
-- | Given a data constructor in the heap, find its Name.
-- The info tables for data constructors have a field which records
-- the source name of the constructor as a Ptr Word8 (UTF-8 encoded
-- string). The format is:
--
-- > Package:Module.Name
--
-- We use this string to lookup the interpreter's internal representation of the name
-- using the lookupOrig.
--
dataConInfoPtrToName :: Ptr () -> TcM (Either String Name)
dataConInfoPtrToName x = do
theString <- liftIO $ do
let ptr = castPtr x :: Ptr StgInfoTable
conDescAddress <- getConDescAddress ptr
peekArray0 0 conDescAddress
let (pkg, mod, occ) = parse theString
pkgFS = mkFastStringByteList pkg
modFS = mkFastStringByteList mod
occFS = mkFastStringByteList occ
occName = mkOccNameFS OccName.dataName occFS
modName = mkModule (fsToPackageId pkgFS) (mkModuleNameFS modFS)
return (Left$ showSDoc$ ppr modName <> dot <> ppr occName )
`recoverM` (Right `fmap` lookupOrig modName occName)
where
{- To find the string in the constructor's info table we need to consider
the layout of info tables relative to the entry code for a closure.
An info table can be next to the entry code for the closure, or it can
be separate. The former (faster) is used in registerised versions of ghc,
and the latter (portable) is for non-registerised versions.
The diagrams below show where the string is to be found relative to
the normal info table of the closure.
1) Code next to table:
--------------
| | <- pointer to the start of the string
--------------
| | <- the (start of the) info table structure
| |
| |
--------------
| entry code |
| .... |
In this case the pointer to the start of the string can be found in
the memory location _one word before_ the first entry in the normal info
table.
2) Code NOT next to table:
--------------
info table structure -> | *------------------> --------------
| | | entry code |
| | | .... |
--------------
ptr to start of str -> | |
--------------
In this case the pointer to the start of the string can be found
in the memory location: info_table_ptr + info_table_size
-}
getConDescAddress :: Ptr StgInfoTable -> IO (Ptr Word8)
getConDescAddress ptr
| ghciTablesNextToCode = do
offsetToString <- peek $ ptr `plusPtr` (- wORD_SIZE)
return $ (ptr `plusPtr` stdInfoTableSizeB) `plusPtr` (fromIntegral (offsetToString :: StgWord))
| otherwise =
peek $ intPtrToPtr $ (ptrToIntPtr ptr) + fromIntegral stdInfoTableSizeB
-- parsing names is a little bit fiddly because we have a string in the form:
-- pkg:A.B.C.foo, and we want to split it into three parts: ("pkg", "A.B.C", "foo").
-- Thus we split at the leftmost colon and the rightmost occurrence of the dot.
-- It would be easier if the string was in the form pkg:A.B.C:foo, but alas
-- this is not the conventional way of writing Haskell names. We stick with
-- convention, even though it makes the parsing code more troublesome.
-- Warning: this code assumes that the string is well formed.
parse :: [Word8] -> ([Word8], [Word8], [Word8])
parse input
= ASSERT (all (>0) (map length [pkg, mod, occ])) (pkg, mod, occ)
where
dot = fromIntegral (ord '.')
(pkg, rest1) = break (== fromIntegral (ord ':')) input
(mod, occ)
= (concat $ intersperse [dot] $ reverse modWords, occWord)
where
(modWords, occWord) = ASSERT (length rest1 > 0) (parseModOcc [] (tail rest1))
parseModOcc :: [[Word8]] -> [Word8] -> ([[Word8]], [Word8])
-- We only look for dots if str could start with a module name,
-- i.e. if it starts with an upper case character.
-- Otherwise we might think that "X.:->" is the module name in
-- "X.:->.+", whereas actually "X" is the module name and
-- ":->.+" is a constructor name.
parseModOcc acc str@(c : _)
| isUpper $ chr $ fromIntegral c
= case break (== dot) str of
(top, []) -> (acc, top)
(top, _ : bot) -> parseModOcc (top : acc) bot
parseModOcc acc str = (acc, str)
| mcmaniac/ghc | compiler/ghci/DebuggerUtils.hs | bsd-3-clause | 5,002 | 0 | 15 | 1,526 | 784 | 432 | 352 | 55 | 3 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UndecidableInstances #-}
-- | Provides functions for manipulating articulation.
module Music.Score.Articulation (
-- ** Articulation type functions
Articulation,
SetArticulation,
Accentuation,
Separation,
Articulated(..),
-- ** Accessing articulation
HasArticulations(..),
HasArticulation(..),
HasArticulations',
HasArticulation',
articulation',
articulations',
-- * Manipulating articulation
-- ** Accents
accent,
marcato,
accentLast,
marcatoLast,
accentAll,
marcatoAll,
-- ** Phrasing and separation
staccatissimo,
staccato,
separated,
portato,
legato,
legatissimo,
tenuto,
spiccato,
-- * Articulation transformer
ArticulationT(..),
-- * Context
varticulation,
addArtCon,
) where
import Control.Applicative
import Control.Comonad
import Control.Lens hiding (above, below, transform)
import Data.AffineSpace
import Data.Foldable
import Data.Functor.Couple
import Data.Semigroup
import Data.Typeable
import Data.VectorSpace hiding (Sum)
import Data.Functor.Context
import Music.Score.Part
import Music.Time
import Music.Time.Internal.Transform
import Music.Dynamics.Literal
import Music.Pitch.Literal
import Music.Score.Harmonics
import Music.Score.Part
import Music.Score.Phrases
import Music.Score.Slide
import Music.Score.Text
import Music.Score.Ties
-- |
-- Articulations type.
--
type family Articulation (s :: *) :: *
-- |
-- Articulation type.
--
type family SetArticulation (b :: *) (s :: *) :: *
type ArticulationLensLaws' s t a b = (
Articulation (SetArticulation a s) ~ a,
SetArticulation (Articulation t) s ~ t,
SetArticulation a (SetArticulation b s) ~ SetArticulation a s
)
type ArticulationLensLaws s t = ArticulationLensLaws' s t (Articulation s) (Articulation t)
-- |
-- Class of types that provide a single articulation.
--
class (HasArticulations s t) => HasArticulation s t where
-- | Articulation type.
articulation :: Lens s t (Articulation s) (Articulation t)
-- |
-- Class of types that provide a articulation traversal.
--
class (Transformable (Articulation s),
Transformable (Articulation t),
ArticulationLensLaws s t) => HasArticulations s t where
-- | Articulation type.
articulations :: Traversal s t (Articulation s) (Articulation t)
type HasArticulation' a = HasArticulation a a
type HasArticulations' a = HasArticulations a a
-- |
-- Articulation type.
--
articulation' :: (HasArticulation s t, s ~ t) => Lens' s (Articulation s)
articulation' = articulation
-- |
-- Articulation type.
--
articulations' :: (HasArticulations s t, s ~ t) => Traversal' s (Articulation s)
articulations' = articulations
#define PRIM_ARTICULATION_INSTANCE(TYPE) \
\
type instance Articulation TYPE = TYPE; \
type instance SetArticulation a TYPE = a; \
\
instance (Transformable a, a ~ Articulation a, SetArticulation TYPE a ~ TYPE) \
=> HasArticulation TYPE a where { \
articulation = ($) } ; \
\
instance (Transformable a, a ~ Articulation a, SetArticulation TYPE a ~ TYPE) \
=> HasArticulations TYPE a where { \
articulations = ($) } ; \
PRIM_ARTICULATION_INSTANCE(())
PRIM_ARTICULATION_INSTANCE(Bool)
PRIM_ARTICULATION_INSTANCE(Ordering)
PRIM_ARTICULATION_INSTANCE(Char)
PRIM_ARTICULATION_INSTANCE(Int)
PRIM_ARTICULATION_INSTANCE(Integer)
PRIM_ARTICULATION_INSTANCE(Float)
PRIM_ARTICULATION_INSTANCE(Double)
type instance Articulation (c,a) = Articulation a
type instance SetArticulation b (c,a) = (c,SetArticulation b a)
type instance Articulation [a] = Articulation a
type instance SetArticulation b [a] = [SetArticulation b a]
type instance Articulation (Maybe a) = Articulation a
type instance SetArticulation b (Maybe a) = Maybe (SetArticulation b a)
type instance Articulation (Either c a) = Articulation a
type instance SetArticulation b (Either c a) = Either c (SetArticulation b a)
type instance Articulation (Event a) = Articulation a
type instance SetArticulation g (Event a) = Event (SetArticulation g a)
type instance Articulation (Placed a) = Articulation a
type instance SetArticulation g (Placed a) = Placed (SetArticulation g a)
type instance Articulation (Note a) = Articulation a
type instance SetArticulation g (Note a) = Note (SetArticulation g a)
type instance Articulation (Voice a) = Articulation a
type instance SetArticulation b (Voice a) = Voice (SetArticulation b a)
type instance Articulation (Track a) = Articulation a
type instance SetArticulation b (Track a) = Track (SetArticulation b a)
type instance Articulation (Score a) = Articulation a
type instance SetArticulation b (Score a) = Score (SetArticulation b a)
instance HasArticulation a b => HasArticulation (c, a) (c, b) where
articulation = _2 . articulation
instance HasArticulations a b => HasArticulations (c, a) (c, b) where
articulations = traverse . articulations
instance HasArticulations a b => HasArticulations [a] [b] where
articulations = traverse . articulations
instance HasArticulations a b => HasArticulations (Maybe a) (Maybe b) where
articulations = traverse . articulations
instance HasArticulations a b => HasArticulations (Either c a) (Either c b) where
articulations = traverse . articulations
instance (HasArticulations a b) => HasArticulations (Event a) (Event b) where
articulations = from event . whilstL articulations
instance (HasArticulation a b) => HasArticulation (Event a) (Event b) where
articulation = from event . whilstL articulation
instance (HasArticulations a b) => HasArticulations (Placed a) (Placed b) where
articulations = _Wrapped . whilstLT articulations
instance (HasArticulation a b) => HasArticulation (Placed a) (Placed b) where
articulation = _Wrapped . whilstLT articulation
instance (HasArticulations a b) => HasArticulations (Note a) (Note b) where
articulations = _Wrapped . whilstLD articulations
instance (HasArticulation a b) => HasArticulation (Note a) (Note b) where
articulation = _Wrapped . whilstLD articulation
instance HasArticulations a b => HasArticulations (Voice a) (Voice b) where
articulations = traverse . articulations
{-
type instance Articulation (Chord a) = Articulation a
type instance SetArticulation b (Chord a) = Chord (SetArticulation b a)
instance HasArticulations a b => HasArticulations (Chord a) (Chord b) where
articulations = traverse . articulations
-}
instance HasArticulations a b => HasArticulations (Track a) (Track b) where
articulations = traverse . articulations
instance HasArticulations a b => HasArticulations (Score a) (Score b) where
articulations =
_Wrapped . _2 -- into NScore
. _Wrapped
. traverse
. from event -- this needed?
. whilstL articulations
type instance Articulation (Couple c a) = Articulation a
type instance SetArticulation g (Couple c a) = Couple c (SetArticulation g a)
type instance Articulation (TextT a) = Articulation a
type instance SetArticulation g (TextT a) = TextT (SetArticulation g a)
type instance Articulation (HarmonicT a) = Articulation a
type instance SetArticulation g (HarmonicT a) = HarmonicT (SetArticulation g a)
type instance Articulation (TieT a) = Articulation a
type instance SetArticulation g (TieT a) = TieT (SetArticulation g a)
type instance Articulation (SlideT a) = Articulation a
type instance SetArticulation g (SlideT a) = SlideT (SetArticulation g a)
instance (HasArticulations a b) => HasArticulations (Couple c a) (Couple c b) where
articulations = _Wrapped . articulations
instance (HasArticulation a b) => HasArticulation (Couple c a) (Couple c b) where
articulation = _Wrapped . articulation
instance (HasArticulations a b) => HasArticulations (TextT a) (TextT b) where
articulations = _Wrapped . articulations
instance (HasArticulation a b) => HasArticulation (TextT a) (TextT b) where
articulation = _Wrapped . articulation
instance (HasArticulations a b) => HasArticulations (HarmonicT a) (HarmonicT b) where
articulations = _Wrapped . articulations
instance (HasArticulation a b) => HasArticulation (HarmonicT a) (HarmonicT b) where
articulation = _Wrapped . articulation
instance (HasArticulations a b) => HasArticulations (TieT a) (TieT b) where
articulations = _Wrapped . articulations
instance (HasArticulation a b) => HasArticulation (TieT a) (TieT b) where
articulation = _Wrapped . articulation
instance (HasArticulations a b) => HasArticulations (SlideT a) (SlideT b) where
articulations = _Wrapped . articulations
instance (HasArticulation a b) => HasArticulation (SlideT a) (SlideT b) where
articulation = _Wrapped . articulation
type family Accentuation (a :: *) :: *
type family Separation (a :: *) :: *
type instance Accentuation () = ()
type instance Separation () = ()
type instance Accentuation (a, b) = a
type instance Separation (a, b) = b
-- |
-- Class of types that can be transposed, inverted and so on.
--
class (
Fractional (Accentuation a),
Fractional (Separation a),
AffineSpace (Accentuation a),
AffineSpace (Separation a)
) => Articulated a where
accentuation :: Lens' a (Accentuation a)
separation :: Lens' a (Separation a)
instance (AffineSpace a, AffineSpace b, Fractional a, Fractional b) => Articulated (a, b) where
accentuation = _1
separation = _2
accent :: (HasPhrases' s b, HasArticulations' b, Articulation b ~ a, Articulated a) => s -> s
accent = set (phrases . _head . articulations . accentuation) 1
marcato :: (HasPhrases' s b, HasArticulations' b, Articulation b ~ a, Articulated a) => s -> s
marcato = set (phrases . _head . articulations . accentuation) 2
accentLast :: (HasPhrases' s b, HasArticulations' b, Articulation b ~ a, Articulated a) => s -> s
accentLast = set (phrases . _last . articulations . accentuation) 1
marcatoLast :: (HasPhrases' s b, HasArticulations' b, Articulation b ~ a, Articulated a) => s -> s
marcatoLast = set (phrases . _last . articulations . accentuation) 2
accentAll :: (HasArticulations' s, Articulation s ~ a, Articulated a) => s -> s
accentAll = set (articulations . accentuation) 1
marcatoAll :: (HasArticulations' s, Articulation s ~ a, Articulated a) => s -> s
marcatoAll = set (articulations . accentuation) 2
tenuto :: (HasArticulations' s, Articulation s ~ a, Articulated a) => s -> s
tenuto = id
spiccato :: (HasArticulations' s, Articulation s ~ a, Articulated a) => s -> s
spiccato = id
legatissimo :: (HasArticulations' s, Articulation s ~ a, Articulated a) => s -> s
legatissimo = set (articulations . separation) (-2)
legato :: (HasArticulations' s, Articulation s ~ a, Articulated a) => s -> s
legato = set (articulations . separation) (-1)
separated :: (HasArticulations' s, Articulation s ~ a, Articulated a) => s -> s
separated = set (articulations . separation) 0
portato :: (HasArticulations' s, Articulation s ~ a, Articulated a) => s -> s
portato = set (articulations . separation) 0.5
staccato :: (HasArticulations' s, Articulation s ~ a, Articulated a) => s -> s
staccato = set (articulations . separation) 1
staccatissimo :: (HasArticulations' s, Articulation s ~ a, Articulated a) => s -> s
staccatissimo = set (articulations . separation) 2
newtype ArticulationT n a = ArticulationT { getArticulationT :: (n, a) }
deriving (
Eq, Ord, Show, Typeable, Functor, Applicative, Monad,
Comonad, Transformable, Monoid, Semigroup
)
instance (Monoid n, Num a) => Num (ArticulationT n a) where
(+) = liftA2 (+)
(*) = liftA2 (*)
(-) = liftA2 (-)
abs = fmap abs
signum = fmap signum
fromInteger = pure . fromInteger
instance (Monoid n, Fractional a) => Fractional (ArticulationT n a) where
recip = fmap recip
fromRational = pure . fromRational
instance (Monoid n, Floating a) => Floating (ArticulationT n a) where
pi = pure pi
sqrt = fmap sqrt
exp = fmap exp
log = fmap log
sin = fmap sin
cos = fmap cos
asin = fmap asin
atan = fmap atan
acos = fmap acos
sinh = fmap sinh
cosh = fmap cosh
asinh = fmap asinh
atanh = fmap atanh
acosh = fmap acos
instance (Monoid n, Enum a) => Enum (ArticulationT n a) where
toEnum = pure . toEnum
fromEnum = fromEnum . extract
instance (Monoid n, Bounded a) => Bounded (ArticulationT n a) where
minBound = pure minBound
maxBound = pure maxBound
-- instance (Monoid n, Num a, Ord a, Real a) => Real (ArticulationT n a) where
-- toRational = toRational . extract
--
-- instance (Monoid n, Real a, Enum a, Integral a) => Integral (ArticulationT n a) where
-- quot = liftA2 quot
-- rem = liftA2 rem
-- toInteger = toInteger . extract
instance Wrapped (ArticulationT p a) where
type Unwrapped (ArticulationT p a) = (p, a)
_Wrapped' = iso getArticulationT ArticulationT
instance Rewrapped (ArticulationT p a) (ArticulationT p' b)
type instance Articulation (ArticulationT p a) = p
type instance SetArticulation p' (ArticulationT p a) = ArticulationT p' a
instance (Transformable p, Transformable p')
=> HasArticulation (ArticulationT p a) (ArticulationT p' a) where
articulation = _Wrapped . _1
instance (Transformable p, Transformable p')
=> HasArticulations (ArticulationT p a) (ArticulationT p' a) where
articulations = _Wrapped . _1
deriving instance (IsPitch a, Monoid n) => IsPitch (ArticulationT n a)
deriving instance (IsInterval a, Monoid n) => IsInterval (ArticulationT n a)
deriving instance Reversible a => Reversible (ArticulationT p a)
instance (Tiable n, Tiable a) => Tiable (ArticulationT n a) where
isTieEndBeginning (ArticulationT (_,a)) = isTieEndBeginning a
toTied (ArticulationT (d,a)) = (ArticulationT (d1,a1), ArticulationT (d2,a2))
where
(a1,a2) = toTied a
(d1,d2) = toTied d
-- TODO move
addArtCon :: (
HasPhrases s t a b, HasArticulation' a, HasArticulation a b, Articulation a ~ d, Articulation b ~ Ctxt d
) => s -> t
addArtCon = over (phrases.varticulation) withContext
varticulation = lens (fmap $ view articulation) (flip $ zipVoiceWithNoScale (set articulation))
| music-suite/music-score | src/Music/Score/Articulation.hs | bsd-3-clause | 14,942 | 0 | 10 | 3,341 | 4,500 | 2,414 | 2,086 | -1 | -1 |
-- |
-- Module : Basement.FinalPtr
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : portable
--
-- A smaller ForeignPtr reimplementation that work in any prim monad.
--
-- Here be dragon.
--
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE UnboxedTuples #-}
{-# LANGUAGE CPP #-}
module Basement.FinalPtr
( FinalPtr(..)
, finalPtrSameMemory
, castFinalPtr
, toFinalPtr
, toFinalPtrForeign
, touchFinalPtr
, withFinalPtr
, withUnsafeFinalPtr
, withFinalPtrNoTouch
) where
import GHC.Ptr
import qualified GHC.ForeignPtr as GHCF
import GHC.IO
import Basement.Monad
import Basement.Compat.Primitive
import Basement.Compat.Base
import Control.Monad.ST (runST)
-- | Create a pointer with an associated finalizer
data FinalPtr a = FinalPtr (Ptr a)
| FinalForeign (GHCF.ForeignPtr a)
instance Show (FinalPtr a) where
show f = runST $ withFinalPtr f (pure . show)
instance Eq (FinalPtr a) where
(==) f1 f2 = runST (equal f1 f2)
instance Ord (FinalPtr a) where
compare f1 f2 = runST (compare_ f1 f2)
-- | Check if 2 final ptr points on the same memory bits
--
-- it stand to reason that provided a final ptr that is still being referenced
-- and thus have the memory still valid, if 2 final ptrs have the
-- same address, they should be the same final ptr
finalPtrSameMemory :: FinalPtr a -> FinalPtr b -> Bool
finalPtrSameMemory (FinalPtr p1) (FinalPtr p2) = p1 == castPtr p2
finalPtrSameMemory (FinalForeign p1) (FinalForeign p2) = p1 == GHCF.castForeignPtr p2
finalPtrSameMemory (FinalForeign _) (FinalPtr _) = False
finalPtrSameMemory (FinalPtr _) (FinalForeign _) = False
-- | create a new FinalPtr from a Pointer
toFinalPtr :: PrimMonad prim => Ptr a -> (Ptr a -> IO ()) -> prim (FinalPtr a)
toFinalPtr ptr finalizer = unsafePrimFromIO (primitive makeWithFinalizer)
where
makeWithFinalizer s =
case compatMkWeak# ptr () (finalizer ptr) s of { (# s2, _ #) -> (# s2, FinalPtr ptr #) }
-- | Create a new FinalPtr from a ForeignPtr
toFinalPtrForeign :: GHCF.ForeignPtr a -> FinalPtr a
toFinalPtrForeign fptr = FinalForeign fptr
-- | Cast a finalized pointer from type a to type b
castFinalPtr :: FinalPtr a -> FinalPtr b
castFinalPtr (FinalPtr a) = FinalPtr (castPtr a)
castFinalPtr (FinalForeign a) = FinalForeign (GHCF.castForeignPtr a)
withFinalPtrNoTouch :: FinalPtr p -> (Ptr p -> a) -> a
withFinalPtrNoTouch (FinalPtr ptr) f = f ptr
withFinalPtrNoTouch (FinalForeign fptr) f = f (GHCF.unsafeForeignPtrToPtr fptr)
{-# INLINE withFinalPtrNoTouch #-}
-- | Looks at the raw pointer inside a FinalPtr, making sure the
-- data pointed by the pointer is not finalized during the call to 'f'
withFinalPtr :: PrimMonad prim => FinalPtr p -> (Ptr p -> prim a) -> prim a
withFinalPtr (FinalPtr ptr) f = do
r <- f ptr
primTouch ptr
pure r
withFinalPtr (FinalForeign fptr) f = do
r <- f (GHCF.unsafeForeignPtrToPtr fptr)
unsafePrimFromIO (GHCF.touchForeignPtr fptr)
pure r
{-# INLINE withFinalPtr #-}
touchFinalPtr :: PrimMonad prim => FinalPtr p -> prim ()
touchFinalPtr (FinalPtr ptr) = primTouch ptr
touchFinalPtr (FinalForeign fptr) = unsafePrimFromIO (GHCF.touchForeignPtr fptr)
-- | Unsafe version of 'withFinalPtr'
withUnsafeFinalPtr :: PrimMonad prim => FinalPtr p -> (Ptr p -> prim a) -> a
withUnsafeFinalPtr fptr f = unsafePerformIO (unsafePrimToIO (withFinalPtr fptr f))
{-# NOINLINE withUnsafeFinalPtr #-}
equal :: PrimMonad prim => FinalPtr a -> FinalPtr a -> prim Bool
equal f1 f2 =
withFinalPtr f1 $ \ptr1 ->
withFinalPtr f2 $ \ptr2 ->
pure $ ptr1 == ptr2
{-# INLINE equal #-}
compare_ :: PrimMonad prim => FinalPtr a -> FinalPtr a -> prim Ordering
compare_ f1 f2 =
withFinalPtr f1 $ \ptr1 ->
withFinalPtr f2 $ \ptr2 ->
pure $ ptr1 `compare` ptr2
{-# INLINE compare_ #-}
| vincenthz/hs-foundation | basement/Basement/FinalPtr.hs | bsd-3-clause | 3,919 | 0 | 11 | 785 | 1,063 | 543 | 520 | 74 | 1 |
import Control.Monad (unless)
import Data.List (isInfixOf)
import StackTest
main :: IO ()
main = do
stack ["setup"]
stackCheckStderr ["test", "--coverage"] $ \out -> do
unless ("The coverage report for foo's test-suite \"foo-test\" is available at" `isInfixOf` out) $
fail "Coverage report didn't build"
| juhp/stack | test/integration/tests/3997-coverage-with-cabal-3/Main.hs | bsd-3-clause | 333 | 1 | 14 | 72 | 94 | 48 | 46 | 9 | 1 |
module Main where
import GUI.Main (runGUI)
import System.Environment
import System.Exit
import System.Console.GetOpt
import Data.Version (showVersion)
import Paths_threadscope (version)
-------------------------------------------------------------------------------
main :: IO ()
main = do
args <- getArgs
(flags, args') <- parseArgs args
handleArgs flags args'
handleArgs :: Flags -> [String] -> IO ()
handleArgs flags args
| flagHelp flags = printHelp
| flagVersion flags = printVersion
| otherwise = do
initialTrace <- case (args, flagTest flags) of
([filename], Nothing) -> return (Just (Left filename))
([], Just tracename) -> return (Just (Right tracename))
([], Nothing) -> return Nothing
_ -> printUsage >> exitFailure
runGUI initialTrace
where
printVersion = putStrLn ("ThreadScope version " ++ showVersion version)
printUsage = putStrLn usageHeader
usageHeader = "Usage: threadscope [eventlog]\n" ++
" or: threadscope [FLAGS]"
helpHeader = usageHeader ++ "\n\nFlags: "
printHelp = putStrLn (usageInfo helpHeader flagDescrs
++ "\nFor more details see http://www.haskell.org/haskellwiki/ThreadScope_Tour\n")
-------------------------------------------------------------------------------
data Flags = Flags {
flagTest :: Maybe FilePath,
flagVersion :: Bool,
flagHelp :: Bool
}
defaultFlags :: Flags
defaultFlags = Flags Nothing False False
flagDescrs :: [OptDescr (Flags -> Flags)]
flagDescrs =
[ Option ['h'] ["help"]
(NoArg (\flags -> flags { flagHelp = True }))
"Show this help text"
, Option ['v'] ["version"]
(NoArg (\flags -> flags { flagVersion = True }))
"Program version"
, Option ['t'] ["test"]
(ReqArg (\name flags -> flags { flagTest = Just name }) "NAME")
"Load a named internal test (see Events/TestEvents.hs)"
]
parseArgs :: [String] -> IO (Flags, [String])
parseArgs args
| flagHelp flags = return (flags, args')
| not (null errs) = printErrors errs
| otherwise = return (flags, args')
where
(flags0, args', errs) = getOpt Permute flagDescrs args
flags = foldr (flip (.)) id flags0 defaultFlags
printErrors errs = do
putStrLn $ concat errs ++ "Try --help."
exitFailure
| ml9951/ThreadScope | Main.hs | bsd-3-clause | 2,387 | 0 | 16 | 580 | 695 | 366 | 329 | 57 | 4 |
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Network/Wai/Handler/Warp/IO.hs" #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
module Network.Wai.Handler.Warp.IO where
import Data.ByteString (ByteString)
import Data.ByteString.Builder (Builder)
import Data.ByteString.Builder.Extra (runBuilder, Next(Done, More, Chunk))
import Network.Wai.Handler.Warp.Buffer
import Network.Wai.Handler.Warp.Types
toBufIOWith :: Buffer -> BufSize -> (ByteString -> IO ()) -> Builder -> IO ()
toBufIOWith buf !size io builder = loop firstWriter
where
firstWriter = runBuilder builder
runIO len = bufferIO buf len io
loop writer = do
(len, signal) <- writer buf size
case signal of
Done -> runIO len
More minSize next
| size < minSize -> error "toBufIOWith: BufferFull: minSize"
| otherwise -> do
runIO len
loop next
Chunk bs next -> do
runIO len
io bs
loop next
| phischu/fragnix | tests/packages/scotty/Network.Wai.Handler.Warp.IO.hs | bsd-3-clause | 1,157 | 0 | 15 | 433 | 270 | 141 | 129 | 27 | 3 |
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GLU.Errors
-- Copyright : (c) Sven Panne 2002-2013
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
-- This module corresponds to section 2.5 (GL Errors) of the OpenGL 2.1 specs
-- and chapter 8 (Errors) of the GLU specs, offering a generalized view of
-- errors in GL and GLU.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GLU.Errors (
Error(..), ErrorCategory(..), errors
) where
import Graphics.Rendering.OpenGL.GL.StateVar
import Graphics.Rendering.OpenGL.GLU.ErrorsInternal
--------------------------------------------------------------------------------
-- | When an error occurs, it is recorded in this state variable and no further
-- errors are recorded. Reading 'errors' returns the currently recorded errors
-- (there may be more than one due to a possibly distributed implementation) and
-- resets the state variable to @[]@, re-enabling the recording of future
-- errors. The value @[]@ means that there has been no detectable error since
-- the last time 'errors' was read, or since the GL was initialized.
errors :: GettableStateVar [Error]
errors = makeGettableStateVar getErrors
| hesiod/OpenGL | src/Graphics/Rendering/OpenGL/GLU/Errors.hs | bsd-3-clause | 1,392 | 0 | 6 | 200 | 85 | 64 | 21 | 6 | 1 |
module Main where
-------------------------------------------------------------------------------
import Data.Conduit
import Data.Conduit.Binary
import Data.Conduit.List
import System.IO
-------------------------------------------------------------------------------
import Hadron
-------------------------------------------------------------------------------
main :: IO ()
main = runResourceT $
sourceHandle stdin $=
(protoDec linesProtocol) $=
(protoEnc linesProtocol) $$
sinkHandle stdout
| fpinsight/hadron | src/Hadron/OutputFixer.hs | bsd-3-clause | 554 | 0 | 9 | 96 | 81 | 46 | 35 | 12 | 1 |
{-# LANGUAGE FlexibleInstances, FlexibleContexts, GADTs, OverloadedStrings #-}
-- | Hash manipulation and callbacks.
module Haste.Hash (
onHashChange, onHashChange', setHash, getHash, setHash', getHash'
) where
import Haste.Foreign
import Control.Monad.IO.Class
import Haste.Prim
-- | Register a callback to be run whenever the URL hash changes.
-- The two arguments of the callback are the new and old hash respectively.
onHashChange :: MonadIO m
=> (String -> String -> IO ())
-> m ()
onHashChange f = do
firsthash <- getHash'
liftIO $ jsOnHashChange firsthash cb
where
cb = \old new -> f (fromJSStr old) (fromJSStr new)
-- | JSString version of @onHashChange@.
onHashChange' :: MonadIO m
=> (JSString -> JSString -> IO ())
-> m ()
onHashChange' f = do
firsthash <- getHash'
liftIO $ jsOnHashChange firsthash f
jsOnHashChange :: JSString -> (JSString -> JSString -> IO ()) -> IO ()
jsOnHashChange =
ffi "(function(firsthash,cb){\
\window.__old_hash = firsthash;\
\window.onhashchange = function(e){\
\var oldhash = window.__old_hash;\
\var newhash = window.location.hash.split('#')[1] || '';\
\window.__old_hash = newhash;\
\cb(oldhash,newhash);\
\};\
\})"
-- | Set the hash part of the current URL.
setHash :: MonadIO m => String -> m ()
setHash = liftIO . jsSetHash . toJSStr
-- | Set the hash part of the current URL - JSString version.
setHash' :: MonadIO m => JSString -> m ()
setHash' = liftIO . jsSetHash
jsSetHash :: JSString -> IO ()
jsSetHash = ffi "(function(h) {location.hash = '#'+h;})"
-- | Read the hash part of the currunt URL.
getHash :: MonadIO m => m String
getHash = liftIO $ fromJSStr `fmap` jsGetHash
-- | Read the hash part of the currunt URL - JSString version.
getHash' :: MonadIO m => m JSString
getHash' = liftIO jsGetHash
jsGetHash :: IO JSString
jsGetHash = ffi "(function() {return location.hash.substring(1);})"
| beni55/haste-compiler | libraries/haste-lib/src/Haste/Hash.hs | bsd-3-clause | 2,025 | 0 | 11 | 469 | 408 | 212 | 196 | 34 | 1 |
maxSubarray :: (Num a, Ord a) => [a] -> a
maxSubarray [x] = x
maxSubarray (x:xs) = snd $ foldl msReducer (x, x) xs
msReducer :: (Num a, Ord a) => (a, a) -> a -> (a, a)
msReducer (maxEndingHere, maxSoFar) x = (meh, maxSoFar `max` meh)
where meh = x `max` (maxEndingHere + x)
testMaxSubarray :: Bool
testMaxSubarray = and $ map (\ (xs, e) -> e == (maxSubarray xs)) testCases
where testCases = [([-2, 1, -3, 4, -1, 2, 1, -5, 4], 6)] ++
[([2, 3, 7, -5, -1, 4, -10], 12)]
| warreee/Algorithm-Implementations | Maximum_Subarray/Haskell/jcla1/kadane.hs | mit | 499 | 0 | 11 | 126 | 295 | 171 | 124 | 10 | 1 |
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
{-# LANGUAGE ImplicitParams, RankNTypes #-}
-- This program failed to typecheck in an early version of
-- GHC with impredicative polymorphism, but it was fixed by
-- doing pre-subsumption in the subsumption check.
-- Trac bug #821
module ShouldCompile where
type PPDoc = (?env :: Int) => Char
f :: Char -> PPDoc
f = succ
| urbanslug/ghc | testsuite/tests/typecheck/should_compile/tc208.hs | bsd-3-clause | 379 | 0 | 6 | 66 | 39 | 27 | 12 | 6 | 1 |
-- | Convenience methods and types for interacting with "Network.Curl".
module LinkChecker.Curl
( CurlResult
, statusCode
, redirectTarget
, getFinalRedirectTarget
, processUrl
) where
import Data.Version (showVersion)
import Foreign.C.Types (CChar, CInt)
import Foreign (Ptr)
import Network.Curl hiding (getResponseCode)
import Paths_link_checker (version)
-- * Constants
-- | The HTTP User-Agent string that we'll use as we fetch URLs.
userAgent :: String
userAgent = "link-checker/" ++ ver ++ " (+" ++ url ++ ")"
where ver = showVersion version
url = "https://github.com/bdesham/link-checker"
-- * Types
-- | The result of checking a URL's status code.
data CurlResult
= CurlResult { -- | The HTTP status code (or 0 if there was a Curl error)
statusCode :: Integer,
-- | The ultimate target of HTTP redirects, if there were any
redirectTarget :: Maybe String
} deriving (Show)
-- * Functions for interacting with Curl
-- | Function that will accept Curl's output, do nothing with it, and report
-- that it has consumed the entire thing.
dummyWriteFunction :: Ptr CChar -> CInt -> CInt -> Ptr () -> IO CInt
dummyWriteFunction _ width num _ = return $ width * num
-- | Given a Curl object, extracts the HTTP response code.
getResponseCode :: Curl -> IO Integer
getResponseCode curl = do
code <- getInfo curl ResponseCode
let longValue = case code of ILong l -> l
_ -> error "Curl ResponseCode is not an integer"
return $ fromIntegral longValue
-- | Given a Curl object, extracts the URL to which we were redirected.
getEffectiveUrl :: Curl -> IO String
getEffectiveUrl curl = do
url <- getInfo curl EffectiveUrl
let stringValue = case url of IString s -> s
_ -> error "Curl EffectiveUrl is not a string"
return stringValue
-- | Accesses the given URL, following any number of HTTP redirects that are
-- issued. Returns the URL at which we finally ended up.
getFinalRedirectTarget :: String -> IO String
getFinalRedirectTarget url = do
curl <- initialize
setopts curl [ CurlURL url
, CurlUserAgent userAgent
, CurlWriteFunction dummyWriteFunction
, CurlFollowLocation True
]
perform curl
getEffectiveUrl curl
-- | Accesses the given URL. If the resulting HTTP status code is 301, 302, or
-- 303, the URL is re-requested and the HTTP redirects are followed as far as
-- they go. The returned 'CurlResult' includes the HTTP status code from the
-- original request and the final target of redirects, if applicable.
--
-- If Curl encounters some kind of problem and gives a status code of 0 then
-- this function will retry that URL once. (This can happen if the server thinks
-- we're a malicious bot and unexpectedly terminates the connection.)
processUrl :: String -> IO CurlResult
processUrl url_ = go url_ True
where go url retry = do
curl <- initialize
setopts curl [ CurlURL url
, CurlUserAgent userAgent
, CurlWriteFunction dummyWriteFunction
]
perform curl
code <- getResponseCode curl
if code == 0 && retry
then go url False
else if code `elem` [301, 302, 303]
then getFinalRedirectTarget url >>= \target ->
-- If the server just gave us a redirect to the same URL
-- (which they sometimes do as a bot-protection measure),
-- report that the URL worked fine.
if target == url
then return $ CurlResult 200 Nothing
else return $ CurlResult code (Just target)
else return $ CurlResult code Nothing
| bdesham/link-checker | src/LinkChecker/Curl.hs | isc | 3,931 | 0 | 17 | 1,184 | 628 | 328 | 300 | 60 | 4 |
{-# LANGUAGE PatternGuards #-}
module Draw
( drawState
, drawWorld)
where
import State
import World
import Geometry.Segment
import Graphics.Gloss
import Graphics.Gloss.Geometry.Line
import qualified Data.Vector.Unboxed as V
import Data.Maybe
drawState :: State -> Picture
drawState state
| ModeDisplayWorld <- stateModeDisplay state
= drawWorldWithViewPos
(stateModeOverlay state)
(stateViewPos state)
(stateTargetPos state)
(stateWorld state)
| ModeDisplayNormalised <- stateModeDisplay state
= drawWorldWithViewPos
(stateModeOverlay state)
(0, 0)
Nothing
$ normaliseWorld (stateViewPos state)
$ stateWorld state
| otherwise
= Blank
drawWorldWithViewPos :: ModeOverlay -> Point -> Maybe Point -> World -> Picture
drawWorldWithViewPos
modeOverlay
pView@(vx, vy)
mTarget
world
= let
-- the world
picWorld = Color white
$ drawWorld world
-- view position indicator
picView = Color red
$ Translate vx vy
$ ThickCircle 2 4
-- target position indicator
picTargets
| Just pTarget@(px, py) <- mTarget
= let picTarget = Translate px py $ ThickCircle 2 4
-- line between view and target pos
picLine = Line [pView, pTarget]
picSegsHit = Pictures
$ [ Line [p1, p2]
| (_, p1, p2) <- V.toList $ worldSegments world
, isJust $ intersectSegSeg p1 p2 pView pTarget ]
in Color red $ Pictures [picTarget, picLine, picSegsHit]
| otherwise
= blank
-- overlay
picOverlay
| ModeOverlayVisApprox <- modeOverlay
= drawVisGrid 10 pView world
| otherwise
= blank
in Pictures [picOverlay, picWorld, picView, picTargets]
-- | Draw a grid of points showing what is visible from a view position
drawVisGrid :: Float -> Point -> World -> Picture
drawVisGrid cellSize pView world
= let
visible pTarget = not $ any isJust
$ map (\(_, p1, p2) -> intersectSegSeg pView pTarget p1 p2)
$ V.toList
$ worldSegments world
picGrid = Pictures
$ [ if visible (x, y)
then Color (dim green) $ Translate x y $ rectangleSolid cellSize cellSize
else Color (greyN 0.2) $ Translate x y $ rectangleSolid cellSize cellSize
| x <- [-400, -400 + cellSize .. 400]
, y <- [-400, -400 + cellSize .. 400] ]
in picGrid
-- | Draw the segments in the world.
drawWorld :: World -> Picture
drawWorld world
= drawSegments
$ worldSegments world
-- | Draw an array of segments.
drawSegments :: V.Vector Segment -> Picture
drawSegments segments
= Pictures
$ map drawSegment
$ V.toList
$ segments
-- | Draw a single segment.
drawSegment :: Segment -> Picture
drawSegment (_, (x1, y1), (x2, y2))
= Line [(f x1, f y1), (f x2, f y2)]
where f = fromRational . toRational
| gscalzo/HaskellTheHardWay | gloss-try/gloss-master/gloss-examples/picture/Visibility/Draw.hs | mit | 2,703 | 97 | 15 | 601 | 936 | 494 | 442 | 85 | 2 |
-- | Configuration for Hablog
{-# LANGUAGE OverloadedStrings #-}
module Web.Hablog.Config where
import Data.Text.Lazy (Text)
-- | Data type to set the theme for your Hablog blog
data Theme = Theme
{ bgTheme :: FilePath -- ^ General theme for hablog. a file path for a css file
, codeTheme :: FilePath -- ^ Theme for code. a file path for a highlight.js css file
}
deriving (Show, Read)
-- | Configuration for Hablog
data Config = Config
{ blogTitle :: Text
, blogTheme :: Theme
, blogDomain :: Text
, blogPort :: Int
}
deriving (Show, Read)
-- | A default configuration
defaultConfig :: Config
defaultConfig = Config
{ blogTitle = defaultTitle
, blogTheme = snd defaultTheme
, blogDomain = defaultDomain
, blogPort = defaultPort
}
-- | "Hablog"
defaultTitle :: Text
defaultTitle = "Hablog"
defaultDomain :: Text
defaultDomain = "localhost"
-- | The default HTTP port is 80
defaultPort :: Int
defaultPort = 80
-- | The default is the dark theme
defaultTheme :: (String, Theme)
defaultTheme = ("dark", darkTheme)
darkTheme :: Theme
darkTheme = Theme "/static/css/dark.css" "/static/highlight/styles/hybrid.css"
lightTheme :: Theme
lightTheme = Theme "/static/css/light.css" "/static/highlight/styles/docco.css"
themes :: [(String, Theme)]
themes =
[ ("dark", darkTheme)
, ("light", lightTheme)
]
| soupi/hablog | src/Web/Hablog/Config.hs | mit | 1,349 | 0 | 8 | 256 | 262 | 164 | 98 | 35 | 1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Model
where
import Database.Persist()
import Database.Persist.TH
share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
Person
name String
age Int Maybe
deriving Show
BlogPost
title String
authorId PersonId
deriving Show
|]
| parsonsmatt/scotty-persistent-example | Model.hs | mit | 697 | 0 | 7 | 218 | 51 | 34 | 17 | 13 | 0 |
{-# LANGUAGE DeriveDataTypeable, ScopedTypeVariables #-}
module Contas.Couch where
import Control.Monad
import Data.Data (Data, Typeable)
import Database.CouchDB
import Text.JSON
import Text.JSON.Generic (toJSON, fromJSON, decodeJSON)
data Item = Item { blob, ref :: String }
deriving (Eq, Show, Data, Typeable)
contas = db "contas"
-- Transform a json string into an Ttem
decodeItem :: String -> Item
decodeItem = decodeJSON
-- Get the whole document data
getRaw :: String -> IO (Maybe String)
getRaw ref = do
raw <- runCouchDB' $ getDocRaw contas $ doc ref
return raw
-- Get the whole document data as an Item
getItem :: String -> IO (Maybe Item)
getItem ref = do
raw <- getRaw ref
return $ liftM decodeItem $ raw
-- Get the ref field of the current doc
getCurrentRef :: IO (Maybe String)
getCurrentRef = do
item <- getItem "current"
return $ liftM ref $ item
-- Change the "current" doc ref to newRef
updateCurrent :: String -> IO ()
updateCurrent newRef = do
theDoc <- runCouchDB' $ getDoc contas $ doc "current"
let Just (doc1, rev1, json1 :: JSValue) = theDoc
let (Ok (item1 :: Item)) = fromJSON json1
let json2 = toJSON item1 { ref = newRef }
Just (doc2, rev2) <- runCouchDB' $ updateDoc contas (doc1, rev1) json2
return ()
-- Add a new doc with given blob.
-- The new doc points to the previous current doc and current now points
-- to the new doc
addNewBlob :: String -> IO ()
addNewBlob newBlob = do
Just current <- getCurrentRef
let item = Item newBlob current
(doc1, rev1) <- runCouchDB' $ newDoc contas $ toJSON item
updateCurrent $ show doc1
return ()
| lessandro/contas | src/etc/Couch.hs | mit | 1,649 | 3 | 15 | 356 | 494 | 253 | 241 | 39 | 1 |
module Example where
import MicroKanren.Plain
import MicroKanren.Cons
emptyS ∷ SC α
emptyS = ([], 0)
aANDb ∷ Goal Int
aANDb = conj
(callFresh (\a -> a === LVal 7))
(callFresh (\b -> disj (b === LVal 5) (b === LVal 6)))
ex1, ex2 ∷ [SC Int]
ex1 = callFresh (\q -> q === LVal 5) emptyS
ex2 = aANDb emptyS
fives, sixes ∷ LVar Int -> Goal Int
fives x = disj (x === LVal 5) (fives x)
sixes x = disj (x === LVal 6) (sixes x)
runFives, run5and6 ∷ [SC Int]
runFives = callFresh fives emptyS
run5and6 = callFresh (\x -> disj (fives x) (sixes x)) emptyS
runCons ∷ [SC (LCons Int)]
runCons = callFresh
(\x -> disj (x === LVal (fromList [2])) (x === LVal empty))
emptyS
| Oregu/featherweight | Example.hs | mit | 705 | 0 | 14 | 165 | 361 | 190 | 171 | -1 | -1 |
{-# htermination (==) :: Char -> Char -> Bool #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_EQEQ_3.hs | mit | 50 | 0 | 2 | 10 | 3 | 2 | 1 | 1 | 0 |
{- arch-tag: CSV tests main file
Copyright (C) 2005-2011 John Goerzen <[email protected]>
All rights reserved.
For license and copyright information, see the file LICENSE
-}
module Str.CSVtest(tests) where
import Test.HUnit
import Data.CSV
import Text.ParserCombinators.Parsec
test_csv =
let f inp exp = TestLabel inp $ TestCase $
exp @=? case parse csvFile "" inp of
Right x -> Right x
Left y -> Left (show y)
in [
f "" (Right []),
f "\n" (Right [[""]]),
f "1,2,3\n" (Right [["1", "2", "3"]]),
f "This is a,Test,Really\n" (Right [["This is a", "Test", "Really"]]),
f "l1\nl2\n" (Right [["l1"], ["l2"]]),
f "NQ,\"Quoted\"\n" (Right [["NQ", "Quoted"]]),
f "1Q,\"\"\"\"\n" (Right [["1Q", "\""]]),
f ",\"\"\n" (Right [["", ""]]),
f "\"Embedded\"\"Quote\"\n" (Right [["Embedded\"Quote"]])
]
tests = TestList [TestLabel "csv" (TestList test_csv)]
| haskellbr/missingh | missingh-all/testsrc/Str/CSVtest.hs | mit | 1,034 | 0 | 15 | 311 | 330 | 178 | 152 | 20 | 2 |
{-# htermination minusFM :: (Ord a, Ord k) => FiniteMap (a,k) b1 -> FiniteMap (a,k) b2 -> FiniteMap (a,k) b1 #-}
import FiniteMap
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/FiniteMap_minusFM_12.hs | mit | 130 | 0 | 3 | 23 | 5 | 3 | 2 | 1 | 0 |
module Chorale.Test.Common (
tests) where
import Chorale.Common
import Chorale.Test
import Data.List
import Safe
import Test.Framework hiding (testGroup)
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Test.HUnit hiding (Test)
import Test.QuickCheck.Property
-- --== Tests
tests :: [Test]
tests = [testGroupBool, testGroupCommon]
-- --== FBM.Common.Bool
testGroupBool :: Test
testGroupBool = testGroup "FBM.Common.Bool" [
testCase "xor 1" caseXor1,
testCase "xor 2" caseXor2,
testCase "xor 3" caseXor3,
testCase "xor 4" caseXor4,
testCase "xnor 1" caseXnor1,
testCase "xnor 2" caseXnor2,
testCase "xnor 3" caseXnor3,
testCase "xnor 4" caseXnor4
]
caseXor1 :: Assertion
caseXor1 = assertEqualBool "" False (xor True True)
caseXor2 :: Assertion
caseXor2 = assertEqualBool "" False (xor False False)
caseXor3 :: Assertion
caseXor3 = assertEqualBool "" True (xor True False)
caseXor4 :: Assertion
caseXor4 = assertEqualBool "" True (xor False True)
caseXnor1 :: Assertion
caseXnor1 = assertEqualBool "" True (xnor True True)
caseXnor2 :: Assertion
caseXnor2 = assertEqualBool "" True (xnor False False)
caseXnor3 :: Assertion
caseXnor3 = assertEqualBool "" False (xnor True False)
caseXnor4 :: Assertion
caseXnor4 = assertEqualBool "" False (xnor False True)
-- --== FBM.Common.Common
testGroupCommon :: Test
testGroupCommon = testGroup "FBM.Common.Common" [
testProperty "lookupBy" propLookupBy,
testProperty "takeWhileList" propTakeWhileList,
testProperty "takeToFirst" propTakeToFirst,
testProperty "splitOnFirst" propSplitOnFirst,
testProperty "nubOrd" propNubOrd,
testProperty "nubOrdBy" propNubOrdBy,
testProperty "sublistByIndex" propSublistByIndex,
testProperty "replaceInList 1" propReplaceInList1,
testProperty "replaceInList 2" propReplaceInList2,
testProperty "replaceElementInList" propReplaceElementInList,
testProperty "applyToList 1" propApplyToList1,
testProperty "applyToList 2" propApplyToList2,
testProperty "applyToList 3" propApplyToList3,
testProperty "applyToList 4" propApplyToList4
]
-- @lookupBy f b xs@ finds the element @x@ which satisfies @f x = b@
propLookupBy :: Int -> Int -> [Int] -> Bool
propLookupBy fn b xs = let
l = lookupBy f b xs
f = (* fn)
in case l of
Just l' -> f l' == b
Nothing -> b `notElem` map f xs
-- | @takeWhile p == takeWhileList (p . head)@
propTakeWhileList :: Int -> Int -> [Int] -> Property
propTakeWhileList i j xs = (0 /= i && j < i) ==> takeWhile p xs == takeWhileList (p . last) xs where
p x = x `mod` i == j
-- | @takeWhile (not . p) = init . takeToFirst $ p@ (apart from special cases)
propTakeToFirst :: Int -> Int -> [Int] -> Property
propTakeToFirst i j xs = (0 /= i && j < i && (not . null) xs) ==> takeWhile (not . p) xs == (l . takeToFirst p) xs where
p x = x `mod` i == j
l
| any p xs = init
| otherwise = id
-- | @((\(as, bs) -> as ++ [x] ++ bs) . propSplitOnFirst x) xs == xs@
propSplitOnFirst :: Int -> [Int] -> Bool
propSplitOnFirst x xs = ((\(as, bs) -> as ++ maybe [] ([x] ++) bs) . splitOnFirst x) xs == xs
-- | @nubOrd == nub@
propNubOrd :: [Int] -> Bool
propNubOrd = uncurry (==) . map21 (nubOrd, nub)
-- | @nubOrdBy f = nubBy (equaling f)@
propNubOrdBy :: Int -> [(Int, Int)] -> Bool
propNubOrdBy i = uncurry (==) . map21 (nubOrdBy fst, nubBy $ equaling fst)
-- | @map21 (head, last) . sublistByIndex (i, j) $ [0..] == (i, j)@
propSublistByIndex :: Int -> Int -> Property
propSublistByIndex i j = i >= 0 && j > i ==> (map21 (head, last) . sublistByIndex (i, j)) [0..] == (i, j)
-- | @replaceInList i [xs!!i] xs == xs@
propReplaceInList1 :: [Int] -> Int -> Property
propReplaceInList1 xs j = length xs > 0 ==> replaceInList i [xs!!i] xs == xs where
i = j `mod` length xs
-- | @replaceInList i []@ just removes the i-th element
propReplaceInList2 :: [Int] -> Int -> Property
propReplaceInList2 xs j = (not . null) xs ==> replaceInList i [] xs == (uncurry (++) . mapSnd tail . splitAt i) xs where
i = abs j `mod` length xs
-- | ensure that @replaceElementInList (xs!!i)@ and @replaceInList i@ are the same
propReplaceElementInList :: [Int] -> Int -> [Int] -> Property
propReplaceElementInList xs j as = (not . null) xs && nub xs == xs ==> replaceElementInList (xs!!i) as xs == replaceInList i as xs where
i = abs j `mod` length xs
-- | ensure that the length of the list stays the same
propApplyToList1 :: Int -> Int -> [Int] -> Property
propApplyToList1 n m xs = (0 <= n && n < length xs) ==> length (applyToList n (* m) xs) == length xs
-- | ensure that the identity does not change the list
propApplyToList2 :: Int -> [Int] -> Property
propApplyToList2 n xs = (0 <= n && n < length xs) ==> applyToList n id xs == xs
-- | ensure that the function is applied to the list
propApplyToList3 :: Int -> Int -> [Int] -> Property
propApplyToList3 n m xs = (0 <= n && n < length xs) ==> ((`at` n) . applyToList n (* m)) xs == ((* m) . (`at` n)) xs
-- | ensure that the rest of the list is not changed
propApplyToList4 :: Int -> Int -> [Int] -> Property
propApplyToList4 n m xs = (0 <= n && n < length xs) ==> (replaceInList n [] . applyToList n (* m)) xs == replaceInList n [] xs
| mocnik-science/chorale | tests/Chorale/Test/Common.hs | mit | 5,380 | 0 | 14 | 1,155 | 1,696 | 901 | 795 | -1 | -1 |
{-# OPTIONS_HADDOCK show-extensions #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Safe #-}
{-|
Module : MPD.Commands.Types
Copyright : (c) Joachim Fasting, 2014
License : MIT
Maintainer : [email protected]
Stability : unstable
Portability : unportable
Encodings for MPD protocol objects.
-}
module MPD.Commands.Types (
-- * Scalars
-- $scalar
Decibel
, Metadata(..)
, PlaybackState(..)
, Seconds
, SongId
, SongPos
, SubsystemName(..)
, Volume
, Path(..)
, Range
-- * Compound objects
-- $object
, LsEntry(..)
, LsEntryInfo(..)
, SongInfo(..)
, StatusInfo(..)
, StatsInfo(..)
, viewTag
) where
import MPD.Core.CommandArg
import Control.DeepSeq (NFData(..))
import Data.Maybe (fromJust)
import Data.Monoid (Sum(..), (<>))
import Data.String (IsString(..))
import Data.Data (Data, Typeable)
import Data.Time (UTCTime)
import Data.Text (Text)
import Data.ByteString.Char8 (ByteString)
import qualified Data.HashMap.Strict as M
import qualified Data.List as L
------------------------------------------------------------------------
-- $scalar
type Decibel = Double
type Seconds = Int
type SongId = Int
type SongPos = Int
type Volume = Int
newtype Path = Path { unPath :: Text } deriving (Show, Data, Typeable)
instance Monoid Path where
mempty = Path ""
p1 `mappend` p2 = Path (unPath p1 `mappend` unPath p2)
instance NFData Path where
rnf (Path x) = rnf x
instance IsString Path where
fromString = Path . fromString
instance CommandArg Path where
fromArg (Path x) = "\"" <> x <> "\""
newtype Range = Range (Int, Maybe Int) deriving (Show, Data, Typeable)
instance Monoid Range where
mempty = Range (0, Nothing)
Range (s1, e1) `mappend` Range (s2, e2) = Range (s1 + s2, e3)
where e3 = fmap getSum (fmap Sum e1 `mappend` fmap Sum e2)
instance NFData Range where
rnf (Range x) = rnf x
instance CommandArg Range where
fromArg (Range (a, b)) = fromArg a <> ":" <> fromArg b
data Metadata
= Artist
| ArtistSort
| Album
| AlbumArtist
| AlbumArtistSort
| Title
| Track
| Name
| Genre
| Date
| Composer
| Performer
| Disc
| MUSICBRAINZ_ARTISTID
| MUSICBRAINZ_ALBUMID
| MUSICBRAINZ_ALBUMARTISTID
| MUSICBRAINZ_TRACKID
deriving (Eq, Show, Read, Enum, Data, Typeable)
instance CommandArg Metadata where
fromArg = fromString . show
data PlaybackState
= PlaybackPlaying
| PlaybackStopped
| PlaybackPaused
deriving (Eq, Show, Read, Enum, Data, Typeable)
instance NFData PlaybackState where
rnf x = x `seq` ()
instance CommandArg PlaybackState where
fromArg x = case x of
PlaybackPlaying -> "play"
PlaybackStopped -> "stop"
PlaybackPaused -> "pause"
data SubsystemName
= Database
| Player
| Mixer
deriving (Eq, Show, Read, Enum, Data, Typeable)
instance CommandArg SubsystemName where
fromArg = fromJust . (`L.lookup` tbl)
where
tbl = [ (Database, "database")
, (Player, "player")
, (Mixer, "mixer")
]
------------------------------------------------------------------------
-- $object
data LsEntry
= LsFile !Path
| LsDir !Path
| LsPlaylist !Path
deriving (Show, Data, Typeable)
instance NFData LsEntry where
rnf (LsFile x) = rnf x
rnf (LsDir x) = rnf x
rnf (LsPlaylist x) = rnf x
data LsEntryInfo
= LsSongInfo !SongInfo
| LsDirInfo !Path !UTCTime
| LsPListInfo !Path !UTCTime
deriving (Show, Data, Typeable)
instance NFData LsEntryInfo where
rnf (LsSongInfo x) = rnf x
rnf (LsDirInfo x y) = rnf x `seq` rnf y
rnf (LsPListInfo x y) = rnf x `seq` rnf y
data StatsInfo = StatsInfo
{ statsArtists :: !Int
, statsAlbums :: !Int
, statsSongs :: !Int
, statsUptime :: !Int
, statsDbPlaytime :: !Int
, statsDbUpdate :: !Int
, statsPlaytime :: !Int
} deriving (Show, Data, Typeable)
instance NFData StatsInfo where
rnf x = rnf (statsArtists x) `seq`
rnf (statsAlbums x) `seq`
rnf (statsSongs x) `seq`
rnf (statsUptime x) `seq`
rnf (statsDbPlaytime x) `seq`
rnf (statsDbUpdate x) `seq`
rnf (statsPlaytime x)
data StatusInfo = StatusInfo
{ statusVolume :: !(Maybe Volume)
-- ^ 'Just' the current volume (0-100) or 'Nothing' if missing mixer.
, statusRepeatEnabled :: !Bool
, statusRandomEnabled :: !Bool
, statusSingleEnabled :: !Bool
, statusConsumeEnabled :: !Bool
, statusPlaylistVersion :: !Int
, statusPlaylistLength :: !Int
, statusMixrampDb :: !Decibel
, statusPlaybackState :: !PlaybackState
, statusSongPos :: !(Maybe SongPos)
, statusSongId :: !(Maybe SongId)
, statusTime :: !(Maybe (Seconds, Seconds))
, statusElapsedTime :: !(Maybe Double)
, statusBitrate :: !(Maybe Int)
, statusAudio :: !(Maybe (Int, Int, Int))
, statusNextSongPos :: !(Maybe SongPos)
, statusNextSongId :: !(Maybe SongId)
} deriving (Show, Data, Typeable)
instance NFData StatusInfo where
rnf x = rnf (statusVolume x) `seq`
rnf (statusRepeatEnabled x) `seq`
rnf (statusRandomEnabled x) `seq`
rnf (statusSingleEnabled x) `seq`
rnf (statusConsumeEnabled x) `seq`
rnf (statusPlaylistVersion x) `seq`
rnf (statusPlaylistLength x) `seq`
rnf (statusMixrampDb x) `seq`
rnf (statusPlaybackState x) `seq`
rnf (statusSongPos x) `seq`
rnf (statusSongId x) `seq`
rnf (statusTime x) `seq`
rnf (statusElapsedTime x) `seq`
rnf (statusBitrate x) `seq`
rnf (statusAudio x) `seq`
rnf (statusNextSongPos x) `seq`
rnf (statusNextSongId x)
data SongInfo = SongInfo
{ songFile :: !Path
, songLastModified :: !UTCTime
, songTime :: !Seconds
, songTags :: !(M.HashMap ByteString Text)
, songPos :: !(Maybe SongPos)
, songId :: !(Maybe SongId)
} deriving (Show, Data, Typeable)
instance NFData SongInfo where
rnf x = rnf (songFile x) `seq`
rnf (songLastModified x) `seq`
rnf (songTime x) `seq`
rnf (songTags x) `seq`
rnf (songPos x) `seq`
rnf (songId x)
viewTag :: SongInfo -> ByteString -> Maybe Text
viewTag si l = M.lookup l (songTags si)
| joachifm/nanompd | src/MPD/Commands/Types.hs | mit | 6,245 | 0 | 24 | 1,464 | 2,016 | 1,118 | 898 | 264 | 1 |
module SpecHelper (module Helper) where
import Test.Hspec as Helper
import Parser.Lexer as Helper
import Parser.Types as Helper
import Parser.Syntax as Helper hiding (context)
| Jiggins/Final-Year-Project | test/SpecHelper.hs | mit | 183 | 0 | 5 | 30 | 44 | 31 | 13 | 5 | 0 |
{- ifl-haskell: "Implementing Functional Languages: a tutorial" in Haskell.
Copyright 2014 Nikita Karetnikov <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
module Core.Parser.Tests (tests) where
import Test.HUnit (Test(..), (~=?))
import Core.Parser
import Core.Language
tests = TestList $
[ TestLabel "Exercise 1.9: Ignore comments" $
[(1,"foo"), (2,"bar")] ~=? (clex "foo||a comment\nbar" 1)
, TestLabel "Exercise 1.10: Recognize two-character operators" $
[ (1,"=="), (1,"~="), (1,"~"), (1,"~"), (1,">=")
, (1,"<="), (1,"->"), (1,"<"), (1,"-")
] ~=? (clex "== ~= ~~ >= <= -> <-" 1)
, TestLabel "Exercise 1.11: Attach a line number to each token" $
[(1,"foo"), (3,"bar")] ~=? (clex "foo \n\n bar" 1)
, TestLabel "'pLit': empty input" $
(Left . ParseError 1 "empty input" $ show "foo") ~=? (pLit "foo" [])
, TestLabel "'pLit': matching string" $
Right ("foo", [(1,"bar")]) ~=? (pLit "foo" $ [(1,"foo"), (1,"bar")])
, TestLabel "'pLit': failure to parse" $
(Left . ParseError 1 (show "hello") $ show "goodbye") ~=?
(pLit "goodbye" [(1,"hello"), (1,"rest")])
, TestLabel "'pVar': variable" $
Right ("foo", [(2,"bar")]) ~=? (pVar [(1,"foo"), (2,"bar")])
, TestLabel "'pVar': not a variable" $
(Left $ ParseError 1 (show "1") "a variable") ~=?
(pVar [(1,"1"), (1,"foo")])
, TestLabel "Exercise 1.17: 'pVar': ignore keywords" $
(Left $ ParseError 1 (show "let") "a variable") ~=?
(pVar [(1,"let"), (1,"!")])
, TestLabel "Exercise 1.18: 'pNum': integer" $
Right (4242, [(1,"!")]) ~=? (pNum [(1,"4242"), (1,"!")])
, TestLabel "Exercise 1.18: 'pNum': integer" $
(Left $ ParseError 1 "empty input" "an integer") ~=? (pNum [])
, TestLabel "Exercise 1.18: 'pNum': parse failure" $
(Left $ ParseError 1 (show "42.42") "an integer") ~=?
(pNum [(1,"42.42"), (1,"!")])
, TestLabel "'pAlt': first match" $
Right ("hello", [(1,"rest")]) ~=?
(pHelloOrGoodbye [(1,"hello"), (1,"rest")])
, TestLabel "'pAlt': second match" $
Right ("goodbye", [(1,"rest")]) ~=?
(pHelloOrGoodbye [(1,"goodbye"), (1,"rest")])
, TestLabel "'pAlt': parse failure" $
(Left . ParseError 1 (show "foo") $
(show "hello") ++ " or " ++ (show "goodbye")) ~=?
(pHelloOrGoodbye [(1,"foo"), (1,"rest")])
, TestLabel "'pThen': both parsers succeed" $
Right (("hello","James"), [(1,"!")]) ~=?
(pGreeting [(1,"hello"), (1,"James"), (1,"!")])
, TestLabel "'pThen': first parser fails" $
(Left . ParseError 1 (show "hi") $
show "hello" ++ " or " ++ show "goodbye") ~=?
(pGreeting [(1,"hi"), (1,"James"), (1,"!")])
, TestLabel "'pThen': second parser fails" $
(Left $ ParseError 1 (show "42") "a variable") ~=?
(pGreeting [(1,"hello"), (1,"42"), (1,"!")])
, TestLabel "'pThen': both parsers fail" $
(Left . ParseError 1 (show "hi") $
show "hello" ++ " or " ++ show "goodbye") ~=?
(pGreeting [(1,"hi"), (1,"42"), (1,"!")])
, TestLabel "Exercise 1.12: 'pThen3': all succeed" $
Right (("hello","James"), []) ~=?
(pGreeting3 [(1,"hello"), (1,"James"), (1,"!")])
, TestLabel "'pThen3': first parser fails" $
(Left . ParseError 1 (show "hi") $
show "hello" ++ " or " ++ show "goodbye") ~=?
(pGreeting3 [(1,"hi"), (1,"James"), (1,"!")])
, TestLabel "'pThen3': second parser fails" $
(Left $ ParseError 1 (show "42") "a variable") ~=?
(pGreeting3 [(1,"hello"), (1,"42"), (1,"!")])
, TestLabel "'pThen3': third parser fails" $
(Left $ ParseError 1 (show "?") (show "!")) ~=?
(pGreeting3 [(1,"hello"), (1,"James"), (1,"?")])
, TestLabel "Exercise 1.12: 'pThen4': all succeed" $
Right (("foobarbazqux"),[(1, "quux")]) ~=?
(let combine a b c d = a ++ b ++ c ++ d
in pThen4 combine (pLit "foo") (pLit "bar") (pLit "baz") (pLit "qux")
[(1, "foo"), (1, "bar"), (1, "baz"), (1, "qux"), (1, "quux")])
, TestLabel "'pThen4': first parser fails" $
(Left $ ParseError 1 (show "notfoo") (show "foo")) ~=?
(let combine a b c d = a ++ b ++ c ++ d
in pThen4 combine (pLit "foo") (pLit "bar") (pLit "baz") (pLit "qux")
[(1, "notfoo"), (1, "bar"), (1, "baz"), (1, "qux"), (1, "quux")])
, TestLabel "'pThen4': second parser fails" $
(Left $ ParseError 1 (show "notbar") (show "bar")) ~=?
(let combine a b c d = a ++ b ++ c ++ d
in pThen4 combine (pLit "foo") (pLit "bar") (pLit "baz") (pLit "qux")
[(1, "foo"), (1, "notbar"), (1, "baz"), (1, "qux"), (1, "quux")])
, TestLabel "'pThen4': third parser fails" $
(Left $ ParseError 1 (show "notbaz") (show "baz")) ~=?
(let combine a b c d = a ++ b ++ c ++ d
in pThen4 combine (pLit "foo") (pLit "bar") (pLit "baz") (pLit "qux")
[(1, "foo"), (1, "bar"), (1, "notbaz"), (1, "qux"), (1, "quux")])
, TestLabel "'pThen4': fourth parser fails" $
(Left $ ParseError 1 (show "notqux") (show "qux")) ~=?
(let combine a b c d = a ++ b ++ c ++ d
in pThen4 combine (pLit "foo") (pLit "bar") (pLit "baz") (pLit "qux")
[(1, "foo"), (1, "bar"), (1, "baz"), (1, "notqux"), (1, "quux")])
, TestLabel "Exercise 1.13: 'pEmpty'" $
Right ("foo",[(1,"rest")]) ~=?
(pEmpty "foo" [(1,"rest")])
, TestLabel "Exercise 1.13: 'pZeroOrMore': zero" $
Right ([],[(1,"foo")]) ~=? (pGreetings [(1,"foo")])
, TestLabel "Exercise 1.13: 'pZeroOrMore': one" $
Right ([("hello","James")], [(1,"!")]) ~=?
(pGreetings [(1,"hello"), (1, "James"), (1,"!")])
, TestLabel "Exercise 1.13: 'pZeroOrMore': more" $
Right ( [("hello","James"), ("goodbye","James"), ("hello","James")]
, [(1,"!")]
) ~=?
(pGreetings [ (1,"hello"), (1, "James"), (1, "goodbye"), (1, "James")
, (1, "hello"), (1, "James"), (1, "!")
])
, TestLabel "Exercise 1.13: 'pOneOrMore': one" $
Right ([("hello", "James")], [(1,"!")]) ~=?
(pGreetings1 [(1,"hello"), (1, "James"), (1,"!")])
, TestLabel "Exercise 1.13: 'pOneOrMore': more" $
Right ( [("hello","James"), ("hello","James"), ("goodbye","James")]
, [(1,"!")]
) ~=?
(pGreetings1 [ (1,"hello"), (1, "James"), (1, "hello"), (1, "James")
, (1, "goodbye"), (1, "James"), (1, "!")
])
, TestLabel "Exercise 1.13: 'pOneOrMore': zero" $
(Left . ParseError 1 (show "!") $
show "hello" ++ " or " ++ show "goodbye") ~=?
(pGreetings1 [(1,"!")])
, TestLabel "Exercise 1.14: 'pApply': match" $
Right (3, [(1,"!")]) ~=?
(pGreetingsN [ (1,"hello"), (1,"James"), (1,"hello"), (1,"James")
, (1,"goodbye"), (1,"James"), (1,"!")
])
, TestLabel "Exercise 1.14: 'pApply': failure" $
(Left . ParseError 1 (show "!") $
show "hello" ++ " or " ++ show "goodbye") ~=?
(pGreetingsN [(1,"!")])
, TestLabel "Exercise 1.15: 'pOneOrMoreWithSep': one match" $
Right (["a"], [(1,"!")]) ~=?
pOneOrMoreWithSep (pLit "a") (pLit "b") [(1,"a"), (1,"!")]
, TestLabel "Exercise 1.15: 'pOneOrMoreWithSep': separator" $
Right (["a", "a"], [(1,"!")]) ~=?
pOneOrMoreWithSep (pLit "a") (pLit "b")
[(1,"a"), (1,"b"), (1,"a"), (1,"!")]
, TestLabel "Exercise 1.15: 'pOneOrMoreWithSep': no match" $
(Left $ ParseError 1 (show "!") (show "a")) ~=?
pOneOrMoreWithSep (pLit "a") (pLit "b") [(1,"!")]
, TestLabel "Exercise 1.15: 'pOneOrMoreWithSep': no separator" $
Right (["a"], [(1,"a"),(1,"!")]) ~=?
pOneOrMoreWithSep (pLit "a") (pLit "b") [(1, "a"), (1,"a"), (1,"!")]
, TestLabel "Exercise 1.21: 'parse'" $
[ ("f", [], ENum 3)
, ("g", ["x","y"], ELet False [("z", EVar "x")] (EVar "z"))
, ("h", ["x"], ECase (ELet False [("y", EVar "x")] (EVar "y"))
[ (1, [], ENum 2)
, (2, [], ENum 5)
])
] ~=?
(parse $ "f = 3 ;\n"
++ "g x y = let z = x in z ;\n"
++ "h x = case (let y = x in y) of\n"
++ " <1> -> 2 ;\n"
++ " <2> -> 5")
, TestLabel "Exercise 1.22: dangling else" $
[("f", ["x","y"], ECase (EVar "x")
[(1, [], ECase (EVar "y")
[ (1, [], ENum 1)
, (2, [], ENum 2)
])])] ~=?
(parse $ "f x y = case x of\n"
++ " <1> -> case y of\n"
++ " <1> -> 1;\n"
++ " <2> -> 2")
, TestLabel "Exercise 1.23: application" $
[ ("foo", ["f","x","y","z"], EAp (EAp (EAp (EVar "f")
(EVar "x"))
(EVar "y"))
(EVar "z"))
, ("bar", ["f","g","x","y","z"], EAp (EAp (EVar "f")
(EAp (EAp (EVar "g")
(EVar "x"))
(EVar "y")))
(EVar "z"))
, ("baz", ["f","g","x","y","z"], EAp (EAp (EVar "f")
(EVar "x"))
(EAp (EAp (EVar "g")
(EVar "y"))
(EVar "z")))
] ~=?
(parse $ "foo f x y z = f x y z;\n"
++ "bar f g x y z = f (g x y) z;\n"
++ "baz f g x y z = f x (g y z)")
, TestLabel "Exercise 1.24: infix operators" $
[("foo", ["x","y","p","xs"], EAp (EAp (EVar ">")
(EAp (EAp (EVar "+")
(EVar "x"))
(EVar "y")))
(EAp (EAp (EVar "*")
(EVar "p"))
(EAp (EVar "length")
(EVar "xs"))))
] ~=?
(parse "foo x y p xs = x + y > p * length xs")
]
pHelloOrGoodbye :: Parser String
pHelloOrGoodbye = (pLit "hello") `pAlt` (pLit "goodbye")
pGreeting :: Parser (String, String)
pGreeting = pThen (,) pHelloOrGoodbye pVar
pGreeting3 :: Parser (String, String)
pGreeting3 = pThen3 combine pHelloOrGoodbye pVar (pLit "!")
where
combine hg name exclamation = (hg, name)
pGreetings :: Parser [(String, String)]
pGreetings = pZeroOrMore pGreeting
pGreetings1 :: Parser [(String, String)]
pGreetings1 = pOneOrMore pGreeting
pGreetingsN :: Parser Int
pGreetingsN = pGreetings1 `pApply` length | binesiyu/ifl | src/Core/Parser/Tests.hs | mit | 11,615 | 0 | 19 | 3,771 | 4,241 | 2,396 | 1,845 | 210 | 1 |
----------------------------------------------
-- CIS 194, Homework 5
-- Author: Glenn R. Fisher
-- Date: April 9, 2016
----------------------------------------------
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
import ExprT
import Parser
import StackVM
import qualified Data.Map as M
-- Evaluate an expression.
--
-- > eval (Mul (Add (Lit 2) (Lit 3)) (Lit 4)) == 20
eval :: ExprT -> Integer
eval (ExprT.Lit x) = x
eval (ExprT.Add x y) = eval x + eval y
eval (ExprT.Mul x y) = eval x * eval y
-- Parse and evaluate a string expression.
--
-- > evalStr "(2+3)*4" == Just 20
-- > evalStr "(2+3)*" == Nothing
evalStr :: String -> Maybe Integer
evalStr str =
case parseExp ExprT.Lit ExprT.Add ExprT.Mul str of
Just exp -> Just (eval exp)
Nothing -> Nothing
-- Abstract the calculator operations
-- to operate on various types of data.
class Expr a where
lit :: Integer -> a
add :: a -> a -> a
mul :: a -> a -> a
-- Support calculator operations with ExprT type.
instance Expr ExprT where
lit x = ExprT.Lit x
add x y = ExprT.Add x y
mul x y = ExprT.Mul x y
-- Support calculator operations with Integer type.
instance Expr Integer where
lit x = x
add x y = x + y
mul x y = x * y
-- Support calculator operations with Bool type.
-- * Literals less than 0 are interpreted as False.
-- * All positive Integers are interpreted as True.
-- * Addition is logical or.
-- * Multiplication is logical and.
instance Expr Bool where
lit x = x > 0
add x y = x || y
mul x y = x && y
-- Support calculator operations with MinMax type.
-- * Addition is taken to be the max function.
-- * Multiplication is taken to be the min function.
newtype MinMax = MinMax Integer deriving (Eq, Show)
instance Expr MinMax where
lit x = MinMax x
add (MinMax x) (MinMax y) = MinMax (max x y)
mul (MinMax x) (MinMax y) = MinMax (min x y)
-- Support calculator operations with Mod7 type.
-- * All values are in the range 0...6.
-- * All arithmetic is done modulo 7 (e.g. 5 + 3 = 1).
newtype Mod7 = Mod7 Integer deriving (Eq, Show)
instance Expr Mod7 where
lit x = Mod7 (x `mod` 7)
add (Mod7 x) (Mod7 y) = Mod7 ((x + y) `mod` 7)
mul (Mod7 x) (Mod7 y) = Mod7 ((x * y) `mod` 7)
-- Test and demonstrate the calculator operations.
testExp :: Expr a => Maybe a
testExp = parseExp lit add mul "(3 * -4) + 5"
testInteger = testExp :: Maybe Integer
testBool = testExp :: Maybe Bool
testMM = testExp :: Maybe MinMax
testSat = testExp :: Maybe Mod7
-- Support calculator operations with StackVM.Program type.
instance Expr StackVM.Program where
lit x = [StackVM.PushI x]
add x y = x ++ y ++ [StackVM.Add]
mul x y = x ++ y ++ [StackVM.Mul]
-- Compile a program by converting a string with arithmetic
-- expressions to instructions for the custom stack-based CPU.
--
-- > compile "2*3+1" == Just [PushI 2,PushI 3,PushI 1,Add,Mul]
compile :: String -> Maybe Program
compile = parseExp lit add mul
-- Types that are instances of `HasVars`
-- have some notion of named variables.
class HasVars a where
var :: String -> a
-- VarExprT is similar to ExprT, but with an
-- additional constructor to support variables.
data VarExprT = VLit Integer
| VVar String
| VAdd VarExprT VarExprT
| VMul VarExprT VarExprT
deriving (Show, Eq)
-- Support calculator operations with VarExprT type.
instance Expr VarExprT where
lit x = VLit x
add x y = VAdd x y
mul x y = VMul x y
-- Support named variable operations with VarExprT type.
instance HasVars VarExprT where
var x = VVar x
-- Support named variable operations with mappings.
-- In other words, variables can be interpreted as functions
-- from a mapping of variables to Integer values to (possibly)
-- Integer values.
instance HasVars (M.Map String Integer -> Maybe Integer) where
var = M.lookup
-- Support calculator operations with mappings.
-- In other words, functions from a mapping of variables to
-- Integer values to (possibly) Integer values can be
-- interpreted as expressions (by passing along the
-- mapping to subexpressions and combining results
-- appropriately).
instance Expr (M.Map String Integer -> Maybe Integer) where
lit x = \_ -> Just x
add f g = \map -> (+) <$> (f map) <*> (g map)
mul f g = \map -> (*) <$> (f map) <*> (g map)
-- Evaluate an expression using a mapping from variable
-- names to their integer values.
--
-- > withVars [("x", 6)] $ add (lit 3) (var "x") == Just 9
-- > withVars [("x", 6)] $ add (lit 3) (var "y") == Nothing
withVars :: [(String, Integer)]
-> (M.Map String Integer -> Maybe Integer)
-> (Maybe Integer)
withVars vs exp = exp $ M.fromList vs
| glennrfisher/cis194-haskell | 05 Type Classes/Calc.hs | mit | 4,775 | 31 | 10 | 1,119 | 1,177 | 620 | 557 | 75 | 2 |
module GHCJS.DOM.SVGFEOffsetElement (
) where
| manyoo/ghcjs-dom | ghcjs-dom-webkit/src/GHCJS/DOM/SVGFEOffsetElement.hs | mit | 48 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
module PostgREST.Error (
apiRequestError
, pgError
, simpleError
, singularityError
, binaryFieldError
, connectionLostError
, encodeError
) where
import Protolude
import Data.Aeson ((.=))
import qualified Data.Aeson as JSON
import Data.Text (unwords)
import qualified Hasql.Pool as P
import qualified Hasql.Session as H
import qualified Network.HTTP.Types.Status as HT
import Network.Wai (Response, responseLBS)
import PostgREST.Types
apiRequestError :: ApiRequestError -> Response
apiRequestError err = errorResponse status err
where
status =
case err of
ActionInappropriate -> HT.status405
UnsupportedVerb -> HT.status405
InvalidBody _ -> HT.status400
ParseRequestError _ _ -> HT.status400
NoRelationBetween _ _ -> HT.status400
InvalidRange -> HT.status416
UnknownRelation -> HT.status404
simpleError :: HT.Status -> Text -> Response
simpleError status message =
errorResponse status $ JSON.object ["message" .= message]
errorResponse :: JSON.ToJSON a => HT.Status -> a -> Response
errorResponse status e =
responseLBS status [toHeader CTApplicationJSON] $ encodeError e
pgError :: Bool -> P.UsageError -> Response
pgError authed e =
let status = httpStatus authed e
jsonType = toHeader CTApplicationJSON
wwwAuth = ("WWW-Authenticate", "Bearer")
hdrs = if status == HT.status401
then [jsonType, wwwAuth]
else [jsonType] in
responseLBS status hdrs (encodeError e)
singularityError :: Integer -> Response
singularityError numRows =
responseLBS HT.status406
[toHeader CTSingularJSON]
$ toS . formatGeneralError
"JSON object requested, multiple (or no) rows returned"
$ unwords
[ "Results contain", show numRows, "rows,"
, toS (toMime CTSingularJSON), "requires 1 row"
]
where
formatGeneralError :: Text -> Text -> Text
formatGeneralError message details = toS . JSON.encode $
JSON.object ["message" .= message, "details" .= details]
binaryFieldError :: Response
binaryFieldError =
simpleError HT.status406 (toS (toMime CTOctetStream) <>
" requested but a single column was not selected")
connectionLostError :: Response
connectionLostError =
simpleError HT.status503 "Database connection lost, retrying the connection."
encodeError :: JSON.ToJSON a => a -> LByteString
encodeError = JSON.encode
instance JSON.ToJSON ApiRequestError where
toJSON (ParseRequestError message details) = JSON.object [
"message" .= message, "details" .= details]
toJSON ActionInappropriate = JSON.object [
"message" .= ("Bad Request" :: Text)]
toJSON (InvalidBody errorMessage) = JSON.object [
"message" .= (toS errorMessage :: Text)]
toJSON InvalidRange = JSON.object [
"message" .= ("HTTP Range error" :: Text)]
toJSON UnknownRelation = JSON.object [
"message" .= ("Unknown relation" :: Text)]
toJSON (NoRelationBetween parent child) = JSON.object [
"message" .= ("Could not find foreign keys between these entities, No relation found between " <> parent <> " and " <> child :: Text)]
toJSON UnsupportedVerb = JSON.object [
"message" .= ("Unsupported HTTP verb" :: Text)]
instance JSON.ToJSON P.UsageError where
toJSON (P.ConnectionError e) = JSON.object [
"code" .= ("" :: Text),
"message" .= ("Database connection error" :: Text),
"details" .= (toS $ fromMaybe "" e :: Text)]
toJSON (P.SessionError e) = JSON.toJSON e -- H.Error
instance JSON.ToJSON H.Error where
toJSON (H.ResultError (H.ServerError c m d h)) = JSON.object [
"code" .= (toS c::Text),
"message" .= (toS m::Text),
"details" .= (fmap toS d::Maybe Text),
"hint" .= (fmap toS h::Maybe Text)]
toJSON (H.ResultError (H.UnexpectedResult m)) = JSON.object [
"message" .= (m::Text)]
toJSON (H.ResultError (H.RowError i H.EndOfInput)) = JSON.object [
"message" .= ("Row error: end of input"::Text),
"details" .=
("Attempt to parse more columns than there are in the result"::Text),
"details" .= (("Row number " <> show i)::Text)]
toJSON (H.ResultError (H.RowError i H.UnexpectedNull)) = JSON.object [
"message" .= ("Row error: unexpected null"::Text),
"details" .= ("Attempt to parse a NULL as some value."::Text),
"details" .= (("Row number " <> show i)::Text)]
toJSON (H.ResultError (H.RowError i (H.ValueError d))) = JSON.object [
"message" .= ("Row error: Wrong value parser used"::Text),
"details" .= d,
"details" .= (("Row number " <> show i)::Text)]
toJSON (H.ResultError (H.UnexpectedAmountOfRows i)) = JSON.object [
"message" .= ("Unexpected amount of rows"::Text),
"details" .= i]
toJSON (H.ClientError d) = JSON.object [
"message" .= ("Database client error"::Text),
"details" .= (fmap toS d::Maybe Text)]
httpStatus :: Bool -> P.UsageError -> HT.Status
httpStatus _ (P.ConnectionError _) = HT.status503
httpStatus authed (P.SessionError (H.ResultError (H.ServerError c _ _ _))) =
case toS c of
'0':'8':_ -> HT.status503 -- pg connection err
'0':'9':_ -> HT.status500 -- triggered action exception
'0':'L':_ -> HT.status403 -- invalid grantor
'0':'P':_ -> HT.status403 -- invalid role specification
"23503" -> HT.status409 -- foreign_key_violation
"23505" -> HT.status409 -- unique_violation
'2':'5':_ -> HT.status500 -- invalid tx state
'2':'8':_ -> HT.status403 -- invalid auth specification
'2':'D':_ -> HT.status500 -- invalid tx termination
'3':'8':_ -> HT.status500 -- external routine exception
'3':'9':_ -> HT.status500 -- external routine invocation
'3':'B':_ -> HT.status500 -- savepoint exception
'4':'0':_ -> HT.status500 -- tx rollback
'5':'3':_ -> HT.status503 -- insufficient resources
'5':'4':_ -> HT.status413 -- too complex
'5':'5':_ -> HT.status500 -- obj not on prereq state
'5':'7':_ -> HT.status500 -- operator intervention
'5':'8':_ -> HT.status500 -- system error
'F':'0':_ -> HT.status500 -- conf file error
'H':'V':_ -> HT.status500 -- foreign data wrapper error
"P0001" -> HT.status400 -- default code for "raise"
'P':'0':_ -> HT.status500 -- PL/pgSQL Error
'X':'X':_ -> HT.status500 -- internal Error
"42883" -> HT.status404 -- undefined function
"42P01" -> HT.status404 -- undefined table
"42501" -> if authed then HT.status403 else HT.status401 -- insufficient privilege
_ -> HT.status400
httpStatus _ (P.SessionError (H.ResultError _)) = HT.status500
httpStatus _ (P.SessionError (H.ClientError _)) = HT.status503
| Skyfold/postgrest | src/PostgREST/Error.hs | mit | 6,837 | 0 | 13 | 1,464 | 2,010 | 1,071 | 939 | 148 | 28 |
{-# LANGUAGE NoImplicitPrelude #-}
module Main ( main ) where
import Graphics.Caramia.Color
import Graphics.Caramia.Prelude
import Test.Framework
import Test.Framework.Providers.QuickCheck2
main :: IO ()
main = defaultMain tests
tests :: [Test]
tests = [
testProperty "floatToWord8 . word8ToFloat = id" floatwordid
]
floatwordid :: Word8 -> Bool
floatwordid x = floatToWord8 (word8ToFloat x) == x
| Noeda/caramia | tests/color/Main.hs | mit | 411 | 0 | 8 | 67 | 103 | 59 | 44 | 13 | 1 |
module Main where
import Control.Monad (unless, void)
import Data.IORef
import Graphics.GLUtil
import Graphics.UI.GLUT hiding (exit, shaderType)
import Foreign.Marshal.Array (withArray)
import Foreign.Storable (sizeOf)
import System.Exit (exitFailure)
import Hogldev.Pipeline (Pipeline(..), getTrans)
import Hogldev.Utils (PersProj(..))
windowWidth, windowHeight :: GLsizei
windowWidth = 1024
windowHeight = 768
persProjection :: PersProj
persProjection = PersProj
{ persFOV = 30
, persWidth = fromIntegral windowWidth
, persHeigh = fromIntegral windowHeight
, persZNear = 1
, persZFar = 1000
}
main :: IO ()
main = do
void getArgsAndInitialize
initialDisplayMode $= [DoubleBuffered, RGBAMode]
initialWindowSize $= Size windowWidth windowHeight
initialWindowPosition $= Position 100 100
void $ createWindow "Tutorial 12"
vbo <- createVertexBuffer
ibo <- createIndexBuffer
gWorldLocation <- compileShaders
gScale <- newIORef 0.0
initializeGlutCallbacks vbo ibo gWorldLocation gScale
clearColor $= Color4 0 0 0 0
mainLoop
initializeGlutCallbacks :: BufferObject
-> BufferObject
-> UniformLocation
-> IORef GLfloat
-> IO ()
initializeGlutCallbacks vbo ibo gWorldLocation gScale = do
displayCallback $= renderSceneCB vbo ibo gWorldLocation gScale
idleCallback $= Just (idleCB gScale)
idleCB :: IORef GLfloat -> IdleCallback
idleCB gScale = do
gScale $~! (+ 0.1)
postRedisplay Nothing
createVertexBuffer :: IO BufferObject
createVertexBuffer = do
vbo <- genObjectName
bindBuffer ArrayBuffer $= Just vbo
withArray vertices $ \ptr ->
bufferData ArrayBuffer $= (size, ptr, StaticDraw)
return vbo
where
vertices :: [Vertex3 GLfloat]
vertices = [ Vertex3 (-1) (-1) 0
, Vertex3 0 (-1) 1
, Vertex3 1 (-1) 0
, Vertex3 0 1 0 ]
numVertices = length vertices
vertexSize = sizeOf (head vertices)
size = fromIntegral (numVertices * vertexSize)
createIndexBuffer :: IO BufferObject
createIndexBuffer = do
ibo <- genObjectName
bindBuffer ElementArrayBuffer $= Just ibo
withArray indices $ \ptr ->
bufferData ElementArrayBuffer $= (size, ptr, StaticDraw)
return ibo
where
indices :: [GLuint]
indices = [ 0, 3, 1
, 1, 3, 2
, 2, 3, 0
, 0, 2, 1 ]
numIndices = length indices
indexSize = sizeOf (head indices)
size = fromIntegral (numIndices * indexSize)
compileShaders :: IO UniformLocation
compileShaders = do
shaderProgram <- createProgram
addShader shaderProgram "tutorial12/shader.vs" VertexShader
addShader shaderProgram "tutorial12/shader.fs" FragmentShader
linkProgram shaderProgram
linkStatus shaderProgram >>= \ status -> unless status $ do
errorLog <- programInfoLog shaderProgram
putStrLn $ "Error linking shader program: '" ++ errorLog ++ "'"
exitFailure
validateProgram shaderProgram
validateStatus shaderProgram >>= \ status -> unless status $ do
errorLog <- programInfoLog shaderProgram
putStrLn $ "Invalid shader program: '" ++ errorLog ++ "'"
exitFailure
currentProgram $= Just shaderProgram
uniformLocation shaderProgram "gWorld"
addShader :: Program -> FilePath -> ShaderType -> IO ()
addShader shaderProgram shaderFile shaderType = do
shaderText <- readFile shaderFile
shaderObj <- createShader shaderType
shaderSourceBS shaderObj $= packUtf8 shaderText
compileShader shaderObj
compileStatus shaderObj >>= \ status -> unless status $ do
errorLog <- shaderInfoLog shaderObj
putStrLn ("Error compiling shader type " ++ show shaderType
++ ": '" ++ errorLog ++ "'")
exitFailure
attachShader shaderProgram shaderObj
renderSceneCB :: BufferObject
-> BufferObject
-> UniformLocation
-> IORef GLfloat
-> DisplayCallback
renderSceneCB vbo ibo gWorldLocation gScale = do
clear [ColorBuffer]
gScaleVal <- readIORef gScale
uniformMat gWorldLocation $= getTrans
WPPipeline {
worldInfo = Vector3 0 0 5,
scaleInfo = Vector3 1 1 1,
rotateInfo = Vector3 0 gScaleVal 0,
persProj = persProjection
}
vertexAttribArray vPosition $= Enabled
bindBuffer ArrayBuffer $= Just vbo
vertexAttribPointer vPosition $=
(ToFloat, VertexArrayDescriptor 3 Float 0 offset0)
bindBuffer ElementArrayBuffer $= Just ibo
drawIndexedTris 4
vertexAttribArray vPosition $= Disabled
swapBuffers
where
vPosition = AttribLocation 0
| triplepointfive/hogldev | tutorial12/Tutorial12.hs | mit | 4,995 | 0 | 18 | 1,457 | 1,275 | 627 | 648 | 128 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Lastfm authentication procedure helpers
--
-- Basically, lastfm provides 3 ways to authenticate user:
--
-- - web application - <http://www.last.fm/api/webauth>
--
-- - desktop application - <http://www.last.fm/api/desktopauth>
--
-- - modile application - <http://www.last.fm/api/mobileauth>
--
-- Note that you can use any of them in your
-- application despite their names
--
-- How to get session key for yourself for debug with GHCi:
--
-- >>> import Lastfm
-- >>> import Lastfm.Authentication
-- >>> :set -XOverloadedStrings
-- >>> con <- newConnection
-- >>> lastfm con $ getToken <*> apiKey "__API_KEY__" <* json
-- Right (Object (fromList [("token",String "__TOKEN__")]))
-- >>> putStrLn . link $ apiKey "__API_KEY__" <* token "__TOKEN__"
-- http://www.last.fm/api/auth/?api_key=__API_KEY__&token=__TOKEN__
-- >>> -- Click that link ^^^
-- >>> lastfm con $ sign "__SECRET__" $ getSession <*> token "__TOKEN__" <*> apiKey "__API_KEY__" <* json
-- Right (Object (fromList [("session",Object (fromList [("subscriber",String "0"),("key",String "__SESSION_KEY__"),("name",String "__USER__")]))]))
module Lastfm.Authentication
( -- * Helpers
getToken, getSession, getMobileSession
, link
) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative
import Data.Monoid
#endif
import Lastfm.Internal
import Lastfm.Request
-- | Get authorization token
getToken :: Request f (APIKey -> Ready)
getToken = api "auth.getToken"
-- | Get session key
getMobileSession :: Request f (Username -> Password -> APIKey -> Sign)
getMobileSession = api "auth.getMobileSession" <* post
-- | Get session key
getSession :: Request f (Token -> APIKey -> Sign)
getSession = api "auth.getSession"
-- | Construct link user should follow to approve application
link :: Request f a -> String
link q = render . unwrap q $ R
{ _host = "http://www.last.fm/api/auth/"
, _method = mempty
, _query = mempty
}
| supki/liblastfm | src/Lastfm/Authentication.hs | mit | 2,003 | 0 | 9 | 314 | 220 | 137 | 83 | 22 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : TurtleGraphics
-- Copyright : Joe Jevnik 27.9.2013
-- License : GPL v2
--
-- Maintainer : Joe Jevnik
-- Stability : experimental
-- Portability : requires ImageUtils and devIL.
--
-- An example application of my ImageUtils library.
--
-----------------------------------------------------------------------------
import Text.Read (readMaybe)
import Control.Applicative ((<$>))
import System.Exit (exitSuccess,exitFailure)
import ImageUtils
-- |Data type for the turtle's state.
data Turtle = Turtle { loc :: (Int,Int) -- Turtles location on the image.
, color :: Color -- The color of the tail.
, comln :: Int -- The amount of commands (erroring).
, image :: Image -- The image (default is 500,500).
}
-- |The default turtle state.
nEW_TURTLE :: IO Turtle
nEW_TURTLE = return
$ Turtle { loc = (0,0)
, color = bLACK
, image = listArray ((0,0,0),(499,499,3)) (repeat 255)
, comln = 1
}
-- |Initializes devIL and starts recursing with a new turtle.
main :: IO ()
main = ilInit >> recurs nEW_TURTLE >> return ()
-- |The main loop that extracts the commands and parses them into actions.
recurs :: IO Turtle -> IO Turtle
recurs iot = do
t <- iot
recurs $ (words <$> getLine) >>= parse_ln t
-- |Parse a line of user input or file input.
parse_ln :: Turtle -> [String] -> IO Turtle
parse_ln t cs
| null cs = return $ t { comln = comln t + 1 } -- Empty lines.
| head cs == "move"
&& null (tail cs) = error ("Parse error on line: " ++ show (comln t)
++ ": direction needed: up down left right")
| head cs == "move" = (move t (cs!!1) (read $ cs!!2))
>>= (\t' -> return $ t' { comln = comln t + 1 })
| head cs == "color" = color_change t (cs!!1)
>>= (\t' -> return $ t' { comln = comln t + 1 })
| head cs == "write" = write t (cs!!1)
| head cs == "new" && (null (tail cs) || null (tail (tail cs)))
= error ("Parse error on line: " ++ show (comln t)
++ ": missing paramaters to new: x y")
>> exitFailure >> nEW_TURTLE
| head cs == "new" = new t (cs!!1) (cs!!2)
| head cs == "--" = return $ t { comln = comln t + 1 } -- Comments.
| otherwise = error ("Parse error on line: " ++ show (comln t)
++ ": command not recognized: '" ++ head cs ++ "'")
>> exitFailure >> nEW_TURTLE
-- |Moves the turtle leaving a trail behind it.
move :: Turtle -> String -> Int -> IO Turtle
move t dir n
| dir == "up" = let (x,y) = loc t
in return $ t { loc = (x,y+n)
, image = draw_seg (x,y) (x,y+n) (color t)
(image t)
}
| dir == "down" = let (x,y) = loc t
in return $ t { loc = (x,y-n)
, image = draw_seg (x,y) (x,y-n) (color t)
(image t)
}
| dir == "left" = let (x,y) = loc t
in return $ t { loc = (x-n,y)
, image = draw_seg (x,y) (x-n,y) (color t)
(image t)
}
| dir == "right" = let (x,y) = loc t
in return $ t { loc = (x+n,y)
, image = draw_seg (x,y) (x+n,y) (color t)
(image t)
}
| otherwise = error ("Parse error on line " ++ show (comln t)
++ ": invalid direction: '" ++ dir ++ "'")
>> exitFailure >> nEW_TURTLE
-- |Changes the color channel of the tail of the turtle.
color_change :: Turtle -> String -> IO Turtle
color_change t "red" = return t { color = rED }
color_change t "green" = return t { color = gREEN }
color_change t "blue" = return t { color = bLUE }
color_change t "black" = return t { color = bLACK }
color_change t "white" = return t { color = wHITE }
color_change t str = case readMaybe str :: Maybe (Word8,Word8,Word8) of
Nothing -> error ("Parse error on line "
++ show (comln t)
++ ": invalid color: '" ++ str
++ "': use '(r,g,b)'")
>> exitFailure >> nEW_TURTLE
Just c -> return t { color = c }
-- |News a new turtle on white image of size x y.
new :: Turtle -> String -> String -> IO Turtle
new t xstr ystr = case ( readMaybe xstr :: Maybe Int
, readMaybe ystr :: Maybe Int
)
of
(Nothing,Nothing) -> error ("Parse error on line "
++ show (comln t)
++ ": bad new size "
++ "arguments: '" ++ xstr
++ "' '" ++ ystr ++ "'")
>> exitFailure >> nEW_TURTLE
(Nothing,_) -> error ("Parse error on line "
++ show (comln t)
++ ": bad new x size "
++ "argument: '" ++ xstr ++ "'")
>> exitFailure >> nEW_TURTLE
(_,Nothing) -> error ("Parse error on line "
++ show (comln t)
++ ": bad new y argument: '"
++ ystr ++ "'")
>> exitFailure >> nEW_TURTLE
(Just x,Just y) -> return
$ Turtle { loc = (0,0)
, color = bLACK
, image = listArray ((0,0,0)
,(y,x,3))
(repeat 255)
, comln = comln t + 1
}
-- |Write's the turtles image to a file named fl.
write :: Turtle -> FilePath -> IO Turtle
write t fl
| dropWhile (/='.') fl `elem` [ ".bmp"
, ".dds"
, ".exr"
, ".h"
, ".jpg"
, ".jp2"
, ".pal"
, ".pcx"
, ".png"
, ".pbm"
, ".pgm"
, ".pgm"
, ".pnm"
, ".psd"
, ".raw"
, ".sgi"
, ".bw"
, ".rgb"
, ".rgba"
, ".tga"
, ".tif"
, ".vtf"
]
= writeImage fl (image t)
>> exitSuccess >> nEW_TURTLE
| otherwise = error ("Parse error on line: " ++ show (comln t)
++ ": invalid file format: '" ++ dropWhile (/='.') fl
++ "'")
>> exitFailure >> nEW_TURTLE
| llllllllll/imageutils | src/turtle.hs | gpl-2.0 | 8,353 | 4 | 21 | 4,430 | 1,991 | 1,052 | 939 | 130 | 4 |
{-# LANGUAGE ImpredicativeTypes #-}
module FrontEnd.Tc.Monad(
CoerceTerm(..),
Tc(),
TcInfo(..),
TypeEnv(),
TcEnv(..),
tcRecursiveCalls_u,
Output(..),
addCoerce,
addPreds,
composeCoerce,
addRule,
addToCollectedEnv,
boxyInstantiate,
boxySpec,
deconstructorInstantiate,
freeMetaVarsEnv,
freshInstance,
freshSigma,
getClassHierarchy,
getCollectedEnv,
getCollectedCoerce,
getDeName,
getKindEnv,
getSigEnv,
evalFullType,
inst,
listenCheckedRules,
listenPreds,
listenCPreds,
localEnv,
lookupName,
newBox,
newMetaVar,
newVar,
quantify,
quantify_n,
runTc,
skolomize,
tcInfoEmpty,
toSigma,
unBox,
evalType,
unificationError,
varBind,
zonkKind,
withContext,
withMetaVars
) where
import Control.Monad.Error
import Control.Monad.Reader
import Control.Monad.Writer.Strict
import Data.IORef
import Data.Monoid
import List
import System
import Text.PrettyPrint.HughesPJ(Doc)
import qualified Data.Foldable as T
import qualified Data.Map as Map
import qualified Data.Sequence as Seq
import qualified Data.Set as Set
import qualified Data.Traversable as T
import Doc.DocLike
import Doc.PPrint
import FrontEnd.Class
import FrontEnd.Diagnostic
import FrontEnd.KindInfer
import FrontEnd.Rename(DeNameable(..))
import FrontEnd.SrcLoc(bogusASrcLoc,MonadSrcLoc(..))
import FrontEnd.Tc.Kind
import FrontEnd.Tc.Type
import FrontEnd.Warning
import GenUtil
import Name.Name
import Name.Names
import Options
import Support.CanType
import Support.FreeVars
import Support.Tickle
import qualified FlagDump as FD
import {-# SOURCE #-} FrontEnd.Tc.Class(simplify)
data BindingType = RecursiveInfered | Supplied
type TypeEnv = Map.Map Name Sigma
-- read only environment, set up before type checking.
data TcEnv = TcEnv {
tcInfo :: TcInfo,
tcDiagnostics :: [Diagnostic], -- list of information that might help diagnosis
tcVarnum :: {-# UNPACK #-} !(IORef Int),
tcCollectedEnv :: {-# UNPACK #-} !(IORef (Map.Map Name Sigma)),
tcCollectedCoerce :: {-# UNPACK #-} !(IORef (Map.Map Name CoerceTerm)),
tcConcreteEnv :: Map.Map Name Sigma,
tcMutableEnv :: Map.Map Name Sigma,
tcCurrentScope :: Set.Set MetaVar,
tcRecursiveCalls :: Set.Set Name,
tcInstanceEnv :: InstanceEnv,
tcOptions :: Opt -- module specific options
}
{-! derive: update !-}
data Output = Output {
collectedPreds :: !Preds,
existentialPreds :: !Preds,
constraints :: !(Seq.Seq Constraint),
checkedRules :: !(Seq.Seq Rule),
existentialVars :: [Tyvar],
tcWarnings :: !(Seq.Seq Warning),
outKnots :: [(Name,Name)]
}
{-! derive: update, Monoid !-}
newtype Tc a = Tc (ReaderT TcEnv (WriterT Output IO) a)
deriving(MonadFix,MonadIO,MonadReader TcEnv,MonadWriter Output,Functor)
-- | information that is passed into the type checker.
data TcInfo = TcInfo {
tcInfoEnv :: TypeEnv, -- initial typeenv, data constructors, and previously infered types
tcInfoSigEnv :: TypeEnv, -- type signatures used for binding analysis
tcInfoModName :: Module,
tcInfoKindInfo :: KindEnv,
tcInfoClassHierarchy :: ClassHierarchy
}
getDeName :: DeNameable n => Tc (n -> n)
getDeName = do
mn <- asks (tcInfoModName . tcInfo)
return (\n -> deName mn n)
-- | run a computation with a local environment
localEnv :: TypeEnv -> Tc a -> Tc a
localEnv te act = do
te' <- T.mapM flattenType te
let (cenv,menv) = Map.partition (Set.null . freeMetaVars) te'
--if any isBoxy (Map.elems te') then
-- fail $ "localEnv error!\n" ++ show te
local (tcConcreteEnv_u (cenv `Map.union`) . tcMutableEnv_u ((menv `Map.union`) .
Map.filterWithKey (\k _ -> k `Map.notMember` cenv))) act
-- | add to the collected environment which will be used to annotate uses of
-- variables with their instantiated types. should contain @-aliases for each
-- use of a polymorphic variable or pattern match.
addToCollectedEnv :: TypeEnv -> Tc ()
addToCollectedEnv te = do
v <- asks tcCollectedEnv
liftIO $ modifyIORef v (te `Map.union`)
addCoerce :: Name -> CoerceTerm -> Tc ()
addCoerce n te = do
v <- asks tcCollectedCoerce
liftIO $ modifyIORef v (Map.insert n te)
getCollectedEnv :: Tc TypeEnv
getCollectedEnv = do
v <- asks tcCollectedEnv
r <- liftIO $ readIORef v
r <- T.mapM flattenType r
return r
getCollectedCoerce :: Tc (Map.Map Name CoerceTerm)
getCollectedCoerce = do
v <- asks tcCollectedCoerce
r <- liftIO $ readIORef v
r <- T.mapM flattenType r
return r
runTc :: (MonadIO m,OptionMonad m) => TcInfo -> Tc a -> m a
runTc tcInfo (Tc tim) = do
opt <- getOptions
liftIO $ do
vn <- newIORef 0
ce <- newIORef mempty
cc <- newIORef mempty
(a,out) <- runWriterT $ runReaderT tim TcEnv {
tcCollectedEnv = ce,
tcCollectedCoerce = cc,
tcConcreteEnv = tcInfoEnv tcInfo `mappend` tcInfoSigEnv tcInfo,
tcMutableEnv = mempty,
tcVarnum = vn,
tcDiagnostics = [Msg Nothing $
"Compilation of module: " ++ show (tcInfoModName tcInfo)],
tcInfo = tcInfo,
tcRecursiveCalls = mempty,
tcInstanceEnv = makeInstanceEnv (tcInfoClassHierarchy tcInfo),
tcCurrentScope = mempty,
tcOptions = opt
}
liftIO $ processErrors (T.toList $ tcWarnings out)
return a
instance OptionMonad Tc where
getOptions = asks tcOptions
-- | given a diagnostic and a computation to take place inside the TI-monad,
-- run the computation but during it have the diagnostic at the top of the
-- stack
withContext :: Diagnostic -> Tc a -> Tc a
withContext diagnostic comp = do
local (tcDiagnostics_u (diagnostic:)) comp
addRule :: Rule -> Tc ()
addRule r = tell mempty { checkedRules = Seq.singleton r }
getErrorContext :: Tc [Diagnostic]
getErrorContext = asks tcDiagnostics
getClassHierarchy :: Tc ClassHierarchy
getClassHierarchy = asks (tcInfoClassHierarchy . tcInfo)
getKindEnv :: Tc KindEnv
getKindEnv = asks (tcInfoKindInfo . tcInfo)
getSigEnv :: Tc TypeEnv
getSigEnv = asks (tcInfoSigEnv . tcInfo)
askCurrentEnv = do
env1 <- asks tcConcreteEnv
env2 <- asks tcMutableEnv
return (env2 `Map.union` env1)
dConScheme :: Name -> Tc Sigma
dConScheme conName = do
env <- askCurrentEnv
case Map.lookup conName env of
Just s -> return s
Nothing -> error $ "dConScheme: constructor not found: " ++ show conName ++
"\nin this environment:\n" ++ show env
-- | returns a new box and a function to read said box.
newBox :: Kind -> Tc Type
newBox k = newMetaVar Sigma k
unificationError t1 t2 = do
t1 <- evalFullType t1
t2 <- evalFullType t2
diagnosis <- getErrorContext
let Left msg = typeError (Unification $ "attempted to unify " ++
prettyPrintType t1 ++ " with " ++ prettyPrintType t2) diagnosis
liftIO $ processIOErrors
liftIO $ putErrLn msg
liftIO $ exitFailure
lookupName :: Name -> Tc Sigma
lookupName n = do
env <- askCurrentEnv
case Map.lookup n env of
Just x -> freshSigma x
Nothing | Just 0 <- fromUnboxedNameTuple n -> do
return (tTTuple' [])
Nothing | Just num <- fromUnboxedNameTuple n -> do
nvs <- mapM newVar (replicate num kindArg)
let nvs' = map TVar nvs
return (TForAll nvs $ [] :=> foldr TArrow (tTTuple' nvs') nvs')
Nothing -> fail $ "Could not find var in tcEnv:" ++ show (nameType n,n)
newMetaVar :: MetaVarType -> Kind -> Tc Type
newMetaVar t k = do
te <- ask
n <- newUniq
r <- liftIO $ newIORef Nothing
return $ TMetaVar MetaVar { metaUniq = n, metaKind = k, metaRef = r, metaType = t }
class Instantiate a where
inst:: Map.Map Int Type -> Map.Map Name Type -> a -> a
instance Instantiate Type where
inst mm ts (TAp l r) = tAp (inst mm ts l) (inst mm ts r)
inst mm ts (TArrow l r) = TArrow (inst mm ts l) (inst mm ts r)
inst mm _ t@TCon {} = t
inst mm ts (TVar tv ) = case Map.lookup (tyvarName tv) ts of
Just t' -> t'
Nothing -> (TVar tv)
inst mm ts (TForAll as qt) = TForAll as (inst mm (foldr Map.delete ts (map tyvarName as)) qt)
inst mm ts (TExists as qt) = TExists as (inst mm (foldr Map.delete ts (map tyvarName as)) qt)
inst mm ts (TMetaVar mv) | Just t <- Map.lookup (metaUniq mv) mm = t
inst mm ts (TMetaVar mv) = TMetaVar mv
inst mm ts (TAssoc tc as bs) = TAssoc tc (map (inst mm ts) as) (map (inst mm ts) bs)
inst mm _ t = error $ "inst: " ++ show t
instance Instantiate a => Instantiate [a] where
inst mm ts = map (inst mm ts)
instance Instantiate t => Instantiate (Qual t) where
inst mm ts (ps :=> t) = inst mm ts ps :=> inst mm ts t
instance Instantiate Pred where
inst mm ts is = tickle (inst mm ts :: Type -> Type) is
freshInstance :: MetaVarType -> Sigma -> Tc ([Type],Rho)
freshInstance typ (TForAll as qt) = do
ts <- mapM (newMetaVar typ) (map tyvarKind as)
let (ps :=> t) = (applyTyvarMapQT (zip as ts) qt)
addPreds ps
return (ts,t)
freshInstance _ x = return ([],x)
addPreds :: Preds -> Tc ()
addPreds ps = do
sl <- getSrcLoc
Tc $ tell mempty { collectedPreds = [ p | p@IsIn {} <- ps ],
constraints = Seq.fromList [ Equality { constraintSrcLoc = sl,
constraintType1 = a, constraintType2 = b } | IsEq a b <- ps ] }
addConstraints :: [Constraint] -> Tc ()
addConstraints ps = Tc $ tell mempty { constraints = Seq.fromList ps }
listenPreds :: Tc a -> Tc (a,Preds)
listenPreds action = censor (\x -> x { collectedPreds = mempty }) $
listens collectedPreds action
listenCPreds :: Tc a -> Tc (a,(Preds,[Constraint]))
listenCPreds action = censor (\x -> x { constraints = mempty, collectedPreds = mempty }) $
listens (\x -> (collectedPreds x,T.toList $ constraints x)) action
listenCheckedRules :: Tc a -> Tc (a,[Rule])
listenCheckedRules action = do
(a,r) <- censor (\x -> x { checkedRules = mempty }) $ listens checkedRules action
return (a,T.toList r)
newVar :: Kind -> Tc Tyvar
newVar k = do
te <- ask
n <- newUniq
let ident = toName TypeVal (tcInfoModName $ tcInfo te,'v':show n)
v = tyvar ident k
return v
-- rename the bound variables of a sigma, just in case.
freshSigma :: Sigma -> Tc Sigma
freshSigma (TForAll [] ([] :=> t)) = return t
freshSigma (TForAll vs qt) = do
nvs <- mapM (newVar . tyvarKind) vs
return (TForAll nvs $ applyTyvarMapQT (zip vs (map TVar nvs)) qt)
freshSigma x = return x
toSigma :: Sigma -> Sigma
toSigma t@TForAll {} = t
toSigma t = TForAll [] ([] :=> t)
-- | replace bound variables with arbitrary new ones and drop the binding
-- TODO predicates?
skolomize :: Sigma -> Tc ([Tyvar],[Pred],Type)
skolomize s = freshSigma s >>= return . fromType
boxyInstantiate :: Sigma -> Tc ([Type],Rho')
boxyInstantiate = freshInstance Sigma
deconstructorInstantiate :: Sigma -> Tc Rho'
deconstructorInstantiate tfa@TForAll {} = do
TForAll vs qt@(_ :=> t) <- freshSigma tfa
let f (_ `TArrow` b) = f b
f b = b
eqvs = vs List.\\ freeVars (f t)
tell mempty { existentialVars = eqvs }
(_,t) <- freshInstance Sigma (TForAll (vs List.\\ eqvs) qt)
return t
deconstructorInstantiate x = return x
boxySpec :: Sigma -> Tc ([(BoundTV,[Sigma'])],Rho')
boxySpec (TForAll as qt@(ps :=> t)) = do
let f (TVar t) vs | t `elem` vs = do
b <- lift (newBox $ tyvarKind t)
tell [(t,b)]
return b
f e@TCon {} _ = return e
f (TAp a b) vs = liftM2 tAp (f a vs) (f b vs)
f (TArrow a b) vs = liftM2 TArrow (f a vs) (f b vs)
f (TForAll as (ps :=> t)) vs = do
t' <- f t (vs List.\\ as)
return (TForAll as (ps :=> t'))
f t _ = return t
-- f t _ = error $ "boxySpec: " ++ show t
(t',vs) <- runWriterT (f t as)
addPreds $ inst mempty (Map.fromList [ (tyvarName bt,s) | (bt,s) <- vs ]) ps
return (sortGroupUnderFG fst snd vs,t')
freeMetaVarsEnv :: Tc (Set.Set MetaVar)
freeMetaVarsEnv = do
env <- asks tcMutableEnv
xs <- flip mapM (Map.elems env) $ \ x -> do
x <- flattenType x
return $ freeMetaVars x
return (Set.unions xs)
quantify_n :: [MetaVar] -> [Pred] -> [Rho] -> Tc [Sigma]
quantify_n vs ps rs | not $ any isBoxyMetaVar vs = do
-- we bind the quantified variables to fresh tvars
vs <- mapM groundKind vs
nvs <- mapM (newVar . fixKind . metaKind) vs
sequence_ [ varBind mv (TVar v) | v <- nvs | mv <- vs ]
ps <- flattenType ps
rs <- flattenType rs
ch <- getClassHierarchy
return $ [TForAll nvs (FrontEnd.Tc.Class.simplify ch ps :=> r) | r <- rs ]
quantify :: [MetaVar] -> [Pred] -> Rho -> Tc Sigma
quantify vs ps r = do [s] <- quantify_n vs ps [r]; return s
-- turn all ?? into * types, as we can't abstract over unboxed types
fixKind :: Kind -> Kind
fixKind (KBase KQuestQuest) = KBase Star
fixKind (KBase KQuest) = KBase Star
fixKind (a `Kfun` b) = fixKind a `Kfun` fixKind b
fixKind x = x
groundKind mv = zonkKind (fixKind $ metaKind mv) mv
-- this removes all boxes, replacing them with tau vars
unBox :: Type -> Tc Type
unBox tv = ft' tv where
ft t@(TMetaVar mv)
| isBoxyMetaVar mv = do
tmv <- newMetaVar Tau (getType mv)
varBind mv tmv
return tmv
| otherwise = return t
ft t = tickleM ft' t
ft' t = evalType t >>= ft
evalType t = findType t >>= evalTAssoc >>= evalArrowApp
evalFullType t = f' t where
f t = tickleM f' t
f' t = evalType t >>= f
evalTAssoc ta@TAssoc { typeCon = Tycon { tyconName = n1 }, typeClassArgs = ~[carg], typeExtraArgs = eas } = do
carg' <- evalType carg
case fromTAp carg' of
(TCon Tycon { tyconName = n2 }, as) -> do
InstanceEnv ie <- asks tcInstanceEnv
case Map.lookup (n1,n2) ie of
Just (aa,bb,tt) -> evalType (applyTyvarMap (zip aa as ++ zip bb eas) tt)
_ -> fail "no instance for associated type"
_ -> return ta { typeClassArgs = [carg'] }
evalTAssoc t = return t
evalArrowApp (TAp (TAp (TCon tcon) ta) tb)
| tyconName tcon == tc_Arrow = return (TArrow ta tb)
evalArrowApp t = return t
-- Bind mv to type, first filling in any boxes in type with tau vars
varBind :: MetaVar -> Type -> Tc ()
varBind u t
-- | getType u /= getType t = error $ "varBind: kinds do not match:" ++ show (u,t)
| otherwise = do
kindCombine (getType u) (getType t)
tt <- unBox t
--(t,be,_) <- unbox t
--when be $ error $ "binding boxy: " ++ tupled [pprint u,prettyPrintType t]
tt <- evalFullType tt
when (dump FD.BoxySteps) $ liftIO $ putStrLn $ "varBind: " ++ pprint u <+>
text ":=" <+> prettyPrintType tt
when (u `Set.member` freeMetaVars tt) $ do
unificationError (TMetaVar u) tt -- occurs check
let r = metaRef u
x <- liftIO $ readIORef r
case x of
Just r -> fail $ "varBind: binding unfree: " ++
tupled [pprint u,prettyPrintType tt,prettyPrintType r]
Nothing -> liftIO $ do
--when (dump FD.BoxySteps) $ putStrLn $ "varBind: " ++ pprint u <+> text ":=" <+> prettyPrintType t
writeIORef r (Just tt)
zonkKind :: Kind -> MetaVar -> Tc MetaVar
zonkKind nk mv = do
fk <- kindCombine nk (metaKind mv)
if fk == metaKind mv then return mv else do
nref <- liftIO $ newIORef Nothing
let nmv = mv { metaKind = fk, metaRef = nref }
liftIO $ modifyIORef (metaRef mv) (\Nothing -> Just $ TMetaVar nmv)
return nmv
zonkBox :: MetaVar -> Tc Type
zonkBox mv | isBoxyMetaVar mv = findType (TMetaVar mv)
zonkBox mv = fail $ "zonkBox: nonboxy" ++ show mv
readFilledBox :: MetaVar -> Tc Type
readFilledBox mv | isBoxyMetaVar mv = zonkBox mv >>= \v -> case v of
TMetaVar mv' | mv == mv' -> fail $ "readFilledBox: " ++ show mv
t -> return t
readFilledBox mv = error $ "readFilledBox: nonboxy" ++ show mv
elimBox :: MetaVar -> Tc Type
elimBox mv | isBoxyMetaVar mv = do
t <- readMetaVar mv
case t of
Just t -> return t
Nothing -> newMetaVar Tau (getType mv)
elimBox mv = error $ "elimBox: nonboxy" ++ show mv
----------------------------------------
-- Declaration of instances, boilerplate
----------------------------------------
pretty :: PPrint Doc a => a -> String
pretty x = show (pprint x :: Doc)
instance Monad Tc where
return a = Tc $ return a
Tc comp >>= fun = Tc $ do x <- comp; case fun x of Tc m -> m
Tc a >> Tc b = Tc $ a >> b
fail s = Tc $ do
st <- ask
liftIO $ processIOErrors
Left x <- typeError (Failure s) (tcDiagnostics st)
liftIO $ fail x
instance MonadWarn Tc where
addWarning w = tell mempty { tcWarnings = Seq.singleton w }
instance MonadSrcLoc Tc where
getSrcLoc = do
xs <- asks tcDiagnostics
case xs of
(Msg (Just sl) _:_) -> return sl
_ -> return bogusASrcLoc
instance UniqueProducer Tc where
newUniq = do
v <- asks tcVarnum
n <- liftIO $ do
n <- readIORef v
writeIORef v $! n + 1
return n
return n
tcInfoEmpty = TcInfo {
tcInfoEnv = mempty,
tcInfoModName = toModule "(unknown)",
tcInfoKindInfo = mempty,
tcInfoClassHierarchy = mempty,
tcInfoSigEnv = mempty
}
withMetaVars :: MetaVar -> [Kind] -> ([Sigma] -> Sigma) -> ([Sigma'] -> Tc a) -> Tc a
withMetaVars mv ks sfunc bsfunc | isBoxyMetaVar mv = do
boxes <- mapM newBox ks
res <- bsfunc boxes
tys <- mapM readFilledBox [ mv | ~(TMetaVar mv) <- boxes]
varBind mv (sfunc tys)
return res
withMetaVars mv ks sfunc bsfunc = do
taus <- mapM (newMetaVar Tau) ks
varBind mv (sfunc taus)
bsfunc taus
| dec9ue/jhc_copygc | src/FrontEnd/Tc/Monad.hs | gpl-2.0 | 18,181 | 1 | 20 | 4,855 | 6,421 | 3,236 | 3,185 | -1 | -1 |
--
-- Copyright (c) 2015 Assured Information Security, Inc. <[email protected]>
-- Copyright (c) 2014 Citrix Systems, Inc.
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
--
{-# LANGUAGE PatternGuards #-}
-- description: set ndvm to proper kernel path
-- date: 06/14/2016
module Migrations.M_27 (migration) where
import UpgradeEngine
import Data.List (foldl')
migration = Migration {
sourceVersion = 27
, targetVersion = 28
, actions = act
}
act :: IO ()
act = updateNdvm
updateNdvm = xformVmJSON xform where
xform tree = case jsGet "/type" tree of
Just s | jsUnboxString s == "ndvm" -> modify tree
_ -> tree
where
modify = jsSet "/config/kernel-extract" (jsBoxString "/boot/bzImage")
| OpenXT/manager | upgrade-db/Migrations/M_27.hs | gpl-2.0 | 1,439 | 0 | 14 | 308 | 161 | 95 | 66 | 15 | 2 |
module HLinear.NormalForm.FoldUnfold.PLE.DivisionRing
where
import HLinear.Utility.Prelude
import qualified Data.Vector as V
import HLinear.Hook.PLEHook ( PLEHook(..) )
import HLinear.Matrix.Definition ( Matrix(..) )
import HLinear.NormalForm.FoldUnfold.Pivot ( splitOffPivotNonZero )
import HLinear.Utility.RPermute ( RPermute(..) )
import qualified HLinear.Hook.PLEHook.Basic as Hook
import qualified HLinear.Hook.EchelonForm as EF
import qualified HLinear.Hook.LeftTransformation as LT
import qualified HLinear.Utility.RPermute as RP
type HasPLE a = ( DivisionRing a, DecidableZero a, DecidableUnit a )
-- to allow the rules for particular a to fire
{-# NOINLINE[2] ple #-}
ple :: HasPLE a => Matrix a -> PLEHook a a
ple m@(Matrix nrs ncs _) =
case splitOffHook m of
Nothing -> Hook.one nrs ncs
Just (h,m') -> V.foldl (*) h $ V.unfoldr splitOffHook m'
{-# INLINABLE splitOffHook #-}
splitOffHook
:: HasPLE a
=> Matrix a -> Maybe (PLEHook a a, Matrix a)
splitOffHook m@(Matrix nrs ncs rs)
| nrs == 0 || ncs == 0 = Nothing
| otherwise = Just $ case splitOffPivotNonZero m of
Nothing ->
( Hook.one nrs ncs
, Matrix nrs (ncs-1) $ fmap V.tail rs
)
Just (p, ((NonZero pivot, pivotBottom), (pivotTail, bottomRight))) ->
( PLEHook p lt ef
, Matrix (nrs-1) (ncs-1) bottomRight'
)
where
pivotRecip = recip $ Unit pivot
pivotTail' = fmap (fromUnit pivotRecip *) pivotTail
lt = LT.singleton pivotRecip $ fmap negate pivotBottom
ef = EF.singletonLeadingOne nrs pivotTail'
bottomRight' =
(\f -> V.zipWith f pivotBottom bottomRight) $ \h t ->
V.zipWith (\pv te -> te - h * pv) pivotTail' t
| martinra/hlinear | src/HLinear/NormalForm/FoldUnfold/PLE/DivisionRing.hs | gpl-3.0 | 1,760 | 0 | 18 | 420 | 574 | 318 | 256 | 37 | 2 |
{-# LANGUAGE
FlexibleContexts
, FlexibleInstances
, UndecidableInstances
#-}
module HFlint.NMod.Tasty.QuickCheck
where
import Data.Maybe ( mapMaybe )
import Math.Structure ( Unit(..), DecidableUnit(..) )
import Test.QuickCheck.Arbitrary
import Test.QuickCheck.Gen
import HFlint.NMod.Algebra ()
import HFlint.NMod.Arithmetic ()
import HFlint.NMod.Context
import HFlint.NMod.FFI
instance ReifiesNModContext ctx => Arbitrary (NMod ctx) where
arbitrary = fmap fromIntegral (arbitrary :: Gen Int)
shrink = const []
instance ReifiesNModContext ctx => Arbitrary (Unit (NMod ctx)) where
arbitrary = toUnit <$> arbitrary `suchThat` isUnit
shrink = mapMaybe toUnitSafe . shrink . fromUnit
| martinra/hflint | src/HFlint/NMod/Tasty/QuickCheck.hs | gpl-3.0 | 703 | 0 | 9 | 104 | 190 | 110 | 80 | 19 | 0 |
module TypeBug9 where
infixl 6 <*>
infixl 7 <$>
type Parser symbol result = [symbol] -> [(result,[symbol])]
(<*>) :: Parser s (a -> b) -> Parser s a -> Parser s b
(<$>) :: (a -> b) -> Parser s a -> Parser s b
(<*>) = undefined
(<$>) = undefined
many :: Parser s a -> Parser s [a]
many = undefined
chainr :: Parser s a -> Parser s (a -> a -> a) -> Parser s a
chainr pe po = h <$> many (j <$> pe <*> po) <*> pe
where j x op = (x `op`)
h fs x = foldr ($) fs x
| roberth/uu-helium | test/typeerrors/Examples/TypeBug9.hs | gpl-3.0 | 482 | 0 | 10 | 134 | 259 | 143 | 116 | 14 | 1 |
{-
Programa em Haskell para validar os digitos de um CPF
Mais info em: http://pt.wikipedia.org/wiki/Cadastro_de_Pessoas_F%C3%ADsicas
-}
import Data.Char
isCpfOk :: [Int] -> Bool
isCpfOk cpf =
let -- calcula primeiro digito
digitos1 = take 9 cpf
expr1 = (sum $ zipWith (*) digitos1 [10,9..2]) `mod` 11
dv1 = if expr1 < 2 then 0 else 11-expr1
-- calcula segundo digito
digitos2 = digitos1 ++ [dv1]
expr2 = (sum $ zipWith (*) digitos2 [11,10..2]) `mod` 11
dv2 = if expr2 < 2 then 0 else 11-expr2
in dv1 == cpf !! 9 && dv2 == cpf !! 10
main = do
let cpf = "12345678909"
digitos = (map digitToInt cpf)
result = isCpfOk digitos
putStrLn (show result)
| AndreaInfUFSM/elc117-2016a | slides/validaCPF.hs | gpl-3.0 | 718 | 4 | 13 | 189 | 241 | 131 | 110 | 16 | 3 |
{-# LANGUAGE OverloadedStrings #-}
{-|
Module : ReferenceExtractor
Description : Extract links from a, link etc. tag from HTML
Copyright : (c) Frédéric BISSON, 2016
License : GPL-3
Maintainer : [email protected]
Stability : experimental
Portability : POSIX
A link extractor
-}
module Data.ReferenceExtractor (findReferences) where
import qualified Data.Text as T
import Text.HTML.TagSoup (Tag(TagOpen), parseTags)
import Data.Maybe (catMaybes, fromMaybe)
tagToReference :: Tag T.Text -> Maybe T.Text
tagToReference (TagOpen tagName attrs)
| tagName == "a" = lookup "href" attrs
| tagName == "link" = lookup "href" attrs
| tagName == "script" = lookup "src" attrs
| otherwise = Nothing
tagToReference _ = Nothing
-- | Find base tag if it exists
findBase :: [Tag T.Text] -> T.Text
findBase (TagOpen "base" attrs:_) = fromMaybe "" (lookup "href" attrs)
findBase (_:tags) = findBase tags
findBase [] = ""
-- | Given an HTML string, find all references to resources (links, scripts,
-- etc.)
findReferences :: T.Text -> (T.Text, [T.Text])
findReferences document =
( findBase tags
, catMaybes (tagToReference <$> tags)
)
where tags = parseTags document
| Zigazou/deadlink | src/Data/ReferenceExtractor.hs | gpl-3.0 | 1,239 | 0 | 8 | 263 | 299 | 158 | 141 | 21 | 1 |
{-# LANGUAGE NamedFieldPuns #-}
{- |
This module provides some basic logic to deal with rate limited IO actions
that should be retried until successful. It provides an exponential backoff
time, and it provides the ability to coordinate the rate limit over multiple
threads of execution.
-}
module Control.Concurrent.RateLimitedIO (
newRateManager,
perform,
performWith,
Result(..),
BackoffPolicy(..),
RateManager,
) where
import Control.Concurrent (threadDelay)
import Control.Concurrent.STM (atomically, retry)
import Control.Concurrent.STM.TVar (TVar, newTVar, readTVar, writeTVar,
modifyTVar)
import Control.Exception (finally)
import Control.Monad (join)
import Data.Default.Class (Default, def)
import Data.List (delete)
{- |
Jobs to be executed must return a Result, indicating either successful
completion or an operation that hit the rate limit.
-}
data Result a b = Ok a | HitLimit b
data BackoffPolicy = BackoffPolicy
{ initialDelayMilliseconds :: Int
, maxExponent :: Int
}
instance Default BackoffPolicy where
def = BackoffPolicy
{ initialDelayMilliseconds = 10
, maxExponent = 11
}
-- | Calculate a new delay in microseconds. This is not a true exponential
-- backoff as the delays are not random.
-- https://en.wikipedia.org/wiki/Exponential_backoff
time ::
Int
-- ^ the initial delay in milliseconds
-> Int
-- ^ the number of collisions that have occurred
-> Int
time initialDelayMs collisions =
initialDelayMs * ((2 ^ collisions) - 1) * numMicrosecondsInMillisecond
where
numMicrosecondsInMillisecond :: Int
numMicrosecondsInMillisecond = 1000
{- |
We default the maximum backoff exponent to 11 and initial delay to 10ms, which
translates to a 20.47 second delay. Specifying a higher maximum is useful for
platforms that enforce a long waiting-period when a rate-limit is exceeded.
backoff time
------- -------
0 0.0 (seconds)
1 0.01
2 0.03
3 0.07
4 0.15
5 0.31
6 0.63
7 1.27
8 2.55
9 5.11
10 10.23
11 20.47
12 40.95
13 1.36 (minutes)
14 2.73
15 5.46
16 10.92
17 21.84
18 43.69
19 1.45 (hours)
-}
{- |
A coordinating manager for rate limiting.
-}
data RateManager =
R {
countT :: TVar Int,
throttledT :: TVar [Int]
}
{- |
Create a new RateManager.
-}
newRateManager :: IO RateManager
newRateManager = do
countT <- atomically (newTVar minBound)
throttledT <- atomically (newTVar [])
return R {countT, throttledT}
{- |
Perform a job in the context of the `RateManager`. The job blocks
until the rate limit logic says it can go. If the job gets throttled,
then it re-tries until it is successful (where "successful" means
anything except `HitLimit`. Throwing an exception counts as "success"
in this case).
The job that's executed in each retry is created from the
client-provided `mkNextJob` function, which accepts the result of the previous
HitLimit as an input.
The idea is that the oldest throttled job must complete before any other jobs
(throttled or not) are allowed to start. Because of concurrency, "oldest" in
this case means when we discovered the job was throttled, not when it was
started.
If there are no jobs that have been throttled, then it is a
free-for-all. All jobs are executed immediately.
The 'mkNextJob' argument is responsible for returning a tuple that
both controls how the timeout is computed, and also specifies a new
job to try. If the first element of the tuple is 'Nothing', then the
job will be re-tried using an internally computed backoff
timeout. If the first element of the tuple is 'Just Int', the 'Int'
specifies explicitly the number of milliseconds to timeout before trying
the new job.
-}
performWith ::
BackoffPolicy
-> RateManager
-> (b -> (Maybe Int, IO (Result a b)))
-> IO (Result a b)
-> IO a
performWith policy R{countT, throttledT} mkNextJob job = do
jobId <- freshJobId countT
performJob policy throttledT jobId mkNextJob Nothing job
freshJobId :: TVar Int -> IO Int
freshJobId countT = atomically $ do
c <- readTVar countT
writeTVar countT (c + 1)
return c
{- |
The same as `performWith`, but the original job is retried each time.
-}
perform ::
BackoffPolicy
-> RateManager
-> IO (Result a ())
-> IO a
perform policy r job = performWith policy r (const (Nothing, job)) job
performJob ::
BackoffPolicy
-> TVar [Int]
-> Int
-> (b -> (Maybe Int, IO (Result a b)))
-> Maybe Int
-> IO (Result a b)
-> IO a
performJob policy@BackoffPolicy{initialDelayMilliseconds, maxExponent}
throttledT jobId mkNextJob timeout job =
join . atomically $ do
throttled <- readTVar throttledT
case throttled of
[] -> return tryJob -- full speed ahead.
first:_ | first == jobId ->
-- we are first in line
return (untilSuccess 0 timeout job `finally` pop)
_ ->
-- we must wait
retry
where
tryJob = do
result <- job
case result of
Ok val -> return val
HitLimit limitResponse -> do
atomically $ modifyTVar throttledT (++ [jobId])
uncurry (performJob policy throttledT jobId mkNextJob) $
mkNextJob limitResponse
untilSuccess collisions timeout' job' = do
case timeout' of
Nothing -> threadDelay (time initialDelayMilliseconds collisions)
Just milliseconds -> threadDelay (1000 * milliseconds)
result <- job'
case result of
Ok val -> return val
HitLimit limitResponse ->
uncurry (untilSuccess $ newExponent collisions)
(mkNextJob limitResponse)
newExponent collisions
| collisions >= maxExponent = collisions
-- don't go crazy with the backoff exponent.
| otherwise = collisions + 1
pop = atomically $ modifyTVar throttledT (delete jobId)
| SumAll/haskell-rate-limited-io | src/Control/Concurrent/RateLimitedIO.hs | apache-2.0 | 6,052 | 0 | 17 | 1,531 | 1,030 | 537 | 493 | 101 | 6 |
module FizzBuzz where
divisibleBy :: Int -> Int -> Bool
divisibleBy x y = x `mod` y == 0
fizz :: Int -> Bool
fizz x = x `divisibleBy` 3
buzz :: Int -> Bool
buzz x = x `divisibleBy` 5
fizzBuzzer :: Int -> String
fizzBuzzer x =
if fizz x && buzz x then
"FizzBuzz"
else if fizz x then
"Fizz"
else if buzz x then
"Buzz"
else
show x
| arthurms/tw-coding-dojo | src/FizzBuzz.hs | apache-2.0 | 357 | 0 | 8 | 101 | 143 | 78 | 65 | 16 | 4 |
module Staircase.A282573Spec (main, spec) where
import Test.Hspec
import Staircase.A282573 (a282573)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A282573" $
it "correctly computes the first 20 elements" $
take 20 (map a282573 [1..]) `shouldBe` expectedValue where
expectedValue = [1,3,4,7,10,12,13,19,20,23,26,32,33,39,40,41,46,53,57,56]
| peterokagey/haskellOEIS | test/Staircase/A282573Spec.hs | apache-2.0 | 369 | 0 | 10 | 59 | 160 | 95 | 65 | 10 | 1 |
-- Copyright 2018 Google LLC
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- https://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE LambdaCase #-}
-- | Deals with google3 build and deployment issues.
module Google.Runtime.BuildHelper (
findRunfilesDir,
findRunfilesDirOrDie,
findHostDependency,
findHostDependencyOrDie,
) where
import Control.Exception (throwIO, ErrorCall(..))
import Control.Monad.IO.Class (liftIO)
import Control.Monad (guard, msum, mzero)
import Control.Monad.Trans.Maybe (MaybeT(..))
import Data.List (elemIndices)
import System.Directory (doesDirectoryExist, getCurrentDirectory)
import System.Environment.FindBin(getProgPath)
import System.FilePath
( (</>)
, dropTrailingPathSeparator
, joinPath
, splitDirectories
, takeDirectory
, takeFileName
)
import Google.Runtime.FindFiles (findSrcDir, findParentRunfilesDir)
-- | Get the runfiles directory for the current binary. Checks common places
-- where host dependencies live in different environments.
findRunfilesDir :: IO (Maybe FilePath)
findRunfilesDir = runMaybeT $ do
binary <- liftIO getProgPath
let candidates :: [MaybeT IO FilePath]
candidates = [
-- Nominal case of prebuilt binary with runfiles directory.
return $ binary ++ ".runfiles" </> "google3",
-- Rarer case of prebuilt binary which itself lives in a runfiles
-- directory.
MaybeT $ return $ (</> "google3") <$> findParentRunfilesDir binary,
-- Development fallback for ghci.
(</> "bazel-bin/") <$> MaybeT findSrcDir,
-- Walk up to google3/ starting from current binary location.
-- Use splitDirectories to remove the trailing slash from each
-- component; we don't care about eliding multiple slashes in a row.
do parts <- splitDirectories . (</> binary)
<$> liftIO getCurrentDirectory
let matches = elemIndices "google3" parts
case matches of
[] -> mzero
_ -> return $ joinPath (take (last matches + 1) parts)
]
result <- dropTrailingPathSeparator <$> msum (map checkExists candidates)
-- We drop the final "google3" in order to be consistent with other
-- languages.
-- But first, sanity check that it's actually there.
guard (takeFileName result == "google3")
return $ takeDirectory result
checkExists :: MaybeT IO FilePath -> MaybeT IO FilePath
checkExists action = do
path <- action
liftIO (doesDirectoryExist path) >>= guard
return path
-- | A version of findRunfilesDirOrDie that throws an error if the directory
-- cannot be found.
findRunfilesDirOrDie :: IO FilePath
findRunfilesDirOrDie = findRunfilesDir >>= \case
-- Use throwIO to raise an error more eagerly.
Nothing -> throwIO (ErrorCall "Unable to locate runfiles dir")
Just f -> return f
-- | Resolves the given host dependency stem (starting from google3) to a full
-- path. Checks common places where host dependencies live in different
-- environments.
--
-- The result is equivalent to @runfiles </> "google3" </> f@, where @runfiles@
-- is the result of calling 'findRunfilesDir' and 'f' is the input to this
-- function.
findHostDependency :: FilePath -> IO (Maybe FilePath)
findHostDependency f = fmap (fmap (</> "google3" </> f)) findRunfilesDir
-- | A version of 'findHostDependency' that throws an error if the file is not
-- found.
findHostDependencyOrDie :: FilePath -> IO FilePath
findHostDependencyOrDie f = fmap (</> "google3" </> f) findRunfilesDirOrDie
| google/cabal2bazel | src/Google/Runtime/BuildHelper.hs | apache-2.0 | 4,077 | 0 | 23 | 871 | 616 | 346 | 270 | 51 | 2 |
{-# Language OverloadedStrings, LambdaCase, RecordWildCards #-}
module Main where
import Control.Concurrent
import Control.Exception.Safe
import Control.Monad
import Control.Monad.IO.Class
import Data.ByteArray (ScrubbedBytes)
import Data.Time.Clock
import Data.Time.Clock.POSIX
import Data.UUID (UUID)
import System.Console.Haskeline hiding (bracket)
import System.CredentialStore
import System.Directory
import System.Exit
import System.FileLock
import System.FilePath
import System.FilePath.Glob
import System.Log.Logger
import System.Random (randomIO)
import qualified Data.ByteString as BS
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import PrivateCloud.Aws.Provider
import PrivateCloud.Cloud.Action
import PrivateCloud.Cloud.EventLog
import PrivateCloud.Cloud.Monad as Cloud
import PrivateCloud.Provider.Class
import Options
appLoggerName :: String
appLoggerName = "PrivateCloud.App"
main :: IO ()
main = do
options <- getOptions
print options
run options
encodeUtf :: String -> BS.ByteString
encodeUtf = T.encodeUtf8 . T.pack
genRandomId :: IO T.Text
genRandomId = fmap (T.pack . show) (randomIO :: IO UUID)
whileNothing :: Monad m => m (Maybe b) -> m b
whileNothing prompt = do
resp <- prompt
case resp of
Just v -> pure v
Nothing -> whileNothing prompt
run :: Options -> IO ()
run Create{cloudId = cloudid, ..} = do
dbExists <- doesPathExist (root </> dbName)
when dbExists $ do
putStrLn $ "Local database already exists at " ++ root
++ ", can't create new cloud instance"
exitFailure
instanceId <- if null cloudid then genRandomId else pure (T.pack cloudid)
userId <- genRandomId
putStrLn $ "Creating cloud instance " ++ T.unpack instanceId ++ " with user " ++ T.unpack userId
(rootKeyId, rootSecretKey) <-
runInputT (defaultSettings { autoAddHistory = False }) $ do
keyid <- if null adminKeyId
then whileNothing $ getInputLine "Admin AccessKeyId: "
else pure adminKeyId
secret <- if null adminSecretKey
then whileNothing $ getPassword (Just '*') "Admin SecretKey: "
else pure adminSecretKey
pure (encodeUtf keyid, encodeUtf secret)
createDirectoryIfMissing True root
(uniqueId, credentials) <- setupAwsPrivateCloud root instanceId userId rootKeyId rootSecretKey
withCredentialStore $ \store ->
let credName = "privatecloud-" ++ T.unpack uniqueId
in putCredential store credName (credentials :: ScrubbedBytes)
run Connect{cloudId = cloudid, ..} = do
dbExists <- doesPathExist (root </> dbName)
when dbExists $ do
putStrLn $ "Local database already exists at " ++ root
++ ", can't create new cloud instance"
exitFailure
(instanceId, rootKeyId, rootSecretKey) <-
runInputT (defaultSettings { autoAddHistory = False }) $ do
instanceid <- if null cloudid
then whileNothing $ getInputLine "Cloud instance: "
else pure cloudid
keyid <- if null adminKeyId
then whileNothing $ getInputLine "Admin AccessKeyId: "
else pure adminKeyId
secret <- if null adminSecretKey
then whileNothing $ getPassword (Just '*') "Admin SecretKey: "
else pure adminSecretKey
pure (instanceid, encodeUtf keyid, encodeUtf secret)
userId <- genRandomId
putStrLn $ "Connecting to cloud instance " ++ instanceId ++ " as user " ++ T.unpack userId
createDirectoryIfMissing True root
(uniqueId, credentials) <- connectAwsPrivateCloud root (T.pack instanceId) userId rootKeyId rootSecretKey
withCredentialStore $ \store ->
let credName = "privatecloud-" ++ T.unpack uniqueId
in putCredential store credName (credentials :: ScrubbedBytes)
run Run{..} = do
let fullSyncDelay = fromIntegral (fullSyncInterval * 60)
let cleanupDelay = fromIntegral (cleanupInterval * 60)
let lockName = ".privatecloud.lock"
-- GUI can register custom handler for eventLoggerName and display notifications
-- Here we just write everything to stderr
updateGlobalLogger eventLoggerName (setLevel NOTICE)
-- Also print debugging loggers at level requested
updateGlobalLogger "PrivateCloud" (setLevel loglevel)
let conflictPattern = "*" ++ conflictSuffix
patterns <- forM (conflictPattern : lockName : dbName : exclPatterns) $ \pat -> do
case simplify <$> tryCompileWith compPosix pat of
Left errmsg -> do
logEventError $ "INVALID_PATTERN #pattern " ++ show pat
++ " #msg " ++ errmsg
exitFailure
Right pattern -> do
infoM appLoggerName $ "#EXCLUSION #pattern " ++ show pat
pure pattern
let getCred uniqueId =
let credName = "privatecloud-" ++ T.unpack uniqueId
in withCredentialStore $ \store ->
getCredential store credName :: IO (Maybe ScrubbedBytes)
let lockOrDie = tryLockFile (root </> lockName) Exclusive >>= \case
Nothing -> do
putStrLn $ "Service already running in " ++ root
exitFailure
Just lock -> pure lock
bracket lockOrDie unlockFile $ const $ runAwsPrivateCloud root patterns getCred $ do
instanceId <- Cloud.cloudId
logEventNotice $ "START #root " ++ root ++ " #instance " ++ T.unpack instanceId
let loop lastFullSyncTime lastCleanupTime = do
(lfst, lct) <- catchAny (step lastFullSyncTime lastCleanupTime) $
\e -> do
logEventError $ "#EXCEPTION #msg " ++ show e
-- Keep full sync time, but delay database cleanup.
-- This way after cloud outage all clients will not
-- try to do database cleanup at once.
currentTime <- liftIO getCurrentTime
pure (lastFullSyncTime, currentTime)
liftIO $ threadDelay (1000000 * fromIntegral syncInterval)
loop lfst lct
step lastFullSyncTime lastCleanupTime = do
currentTime <- liftIO getCurrentTime
let sinceLastFullSync = diffUTCTime currentTime lastFullSyncTime
lfst <- if sinceLastFullSync > fullSyncDelay
then do
syncAllChanges
pure currentTime
else do
liftIO $ noticeM appLoggerName $ "#TIMER #tillNextFullSync " ++ show (fullSyncDelay - sinceLastFullSync)
syncRecentChanges
pure lastFullSyncTime
let sinceLastCleanup = diffUTCTime currentTime lastCleanupTime
lct <- if sinceLastCleanup > cleanupDelay
then do
runAction cleanupCloud
pure currentTime
else do
liftIO $ noticeM appLoggerName $ "#TIMER #tillNextCleanup " ++ show (cleanupDelay - sinceLastCleanup)
pure lastCleanupTime
pure (lfst, lct)
-- Force first sync to be full, but delay cleanup.
startTime <- liftIO getCurrentTime
loop (posixSecondsToUTCTime 0) startTime
| rblaze/private-cloud | app/Main.hs | apache-2.0 | 7,470 | 16 | 20 | 2,236 | 1,725 | 854 | 871 | 152 | 11 |
{-# OPTIONS -fno-warn-type-defaults #-}
{-| Constants contains the Haskell constants
The constants in this module are used in Haskell and are also
converted to Python.
Do not write any definitions in this file other than constants. Do
not even write helper functions. The definitions in this module are
automatically stripped to build the Makefile.am target
'ListConstants.hs'. If there are helper functions in this module,
they will also be dragged and it will cause compilation to fail.
Therefore, all helper functions should go to a separate module and
imported.
-}
{-
Copyright (C) 2013, 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Constants where
import Control.Arrow ((***),(&&&))
import Data.List ((\\))
import Data.Map (Map)
import qualified Data.Map as Map (empty, fromList, keys, insert)
import qualified AutoConf
import Ganeti.ConstantUtils (PythonChar(..), FrozenSet, Protocol(..),
buildVersion)
import qualified Ganeti.ConstantUtils as ConstantUtils
import qualified Ganeti.HTools.Types as Types
import Ganeti.HTools.Types (AutoRepairResult(..), AutoRepairType(..))
import Ganeti.Logging (SyslogUsage(..))
import qualified Ganeti.Logging as Logging (syslogUsageToRaw)
import qualified Ganeti.Runtime as Runtime
import Ganeti.Runtime (GanetiDaemon(..), MiscGroup(..), GanetiGroup(..),
ExtraLogReason(..))
import Ganeti.THH (PyValueEx(..))
import Ganeti.Types
import qualified Ganeti.Types as Types
import Ganeti.Confd.Types (ConfdRequestType(..), ConfdReqField(..),
ConfdReplyStatus(..), ConfdNodeRole(..),
ConfdErrorType(..))
import qualified Ganeti.Confd.Types as Types
{-# ANN module "HLint: ignore Use camelCase" #-}
-- * 'autoconf' constants for Python only ('autotools/build-bash-completion')
htoolsProgs :: [String]
htoolsProgs = AutoConf.htoolsProgs
-- * 'autoconf' constants for Python only ('lib/constants.py')
drbdBarriers :: String
drbdBarriers = AutoConf.drbdBarriers
drbdNoMetaFlush :: Bool
drbdNoMetaFlush = AutoConf.drbdNoMetaFlush
lvmStripecount :: Int
lvmStripecount = AutoConf.lvmStripecount
hasGnuLn :: Bool
hasGnuLn = AutoConf.hasGnuLn
-- * 'autoconf' constants for Python only ('lib/pathutils.py')
-- ** Build-time constants
exportDir :: String
exportDir = AutoConf.exportDir
backupDir :: String
backupDir = AutoConf.backupDir
osSearchPath :: [String]
osSearchPath = AutoConf.osSearchPath
esSearchPath :: [String]
esSearchPath = AutoConf.esSearchPath
sshConfigDir :: String
sshConfigDir = AutoConf.sshConfigDir
xenConfigDir :: String
xenConfigDir = AutoConf.xenConfigDir
sysconfdir :: String
sysconfdir = AutoConf.sysconfdir
toolsdir :: String
toolsdir = AutoConf.toolsdir
localstatedir :: String
localstatedir = AutoConf.localstatedir
-- ** Paths which don't change for a virtual cluster
pkglibdir :: String
pkglibdir = AutoConf.pkglibdir
sharedir :: String
sharedir = AutoConf.sharedir
-- * 'autoconf' constants for Python only ('lib/build/sphinx_ext.py')
manPages :: Map String Int
manPages = Map.fromList AutoConf.manPages
-- * 'autoconf' constants for QA cluster only ('qa/qa_cluster.py')
versionedsharedir :: String
versionedsharedir = AutoConf.versionedsharedir
-- * 'autoconf' constants for Python only ('tests/py/docs_unittest.py')
gntScripts :: [String]
gntScripts = AutoConf.gntScripts
-- * Various versions
releaseVersion :: String
releaseVersion = AutoConf.packageVersion
versionMajor :: Int
versionMajor = AutoConf.versionMajor
versionMinor :: Int
versionMinor = AutoConf.versionMinor
versionRevision :: Int
versionRevision = AutoConf.versionRevision
dirVersion :: String
dirVersion = AutoConf.dirVersion
osApiV10 :: Int
osApiV10 = 10
osApiV15 :: Int
osApiV15 = 15
osApiV20 :: Int
osApiV20 = 20
osApiVersions :: FrozenSet Int
osApiVersions = ConstantUtils.mkSet [osApiV10, osApiV15, osApiV20]
-- | The version of the backup/export instance description file format we are
-- producing when exporting and accepting when importing. The two are currently
-- tightly intertwined.
exportVersion :: Int
exportVersion = 0
rapiVersion :: Int
rapiVersion = 2
configMajor :: Int
configMajor = AutoConf.versionMajor
configMinor :: Int
configMinor = AutoConf.versionMinor
-- | The configuration is supposed to remain stable across
-- revisions. Therefore, the revision number is cleared to '0'.
configRevision :: Int
configRevision = 0
configVersion :: Int
configVersion = buildVersion configMajor configMinor configRevision
-- | Similarly to the configuration (see 'configRevision'), the
-- protocols are supposed to remain stable across revisions.
protocolVersion :: Int
protocolVersion = buildVersion configMajor configMinor configRevision
-- * User separation
daemonsGroup :: String
daemonsGroup = Runtime.daemonGroup (ExtraGroup DaemonsGroup)
adminGroup :: String
adminGroup = Runtime.daemonGroup (ExtraGroup AdminGroup)
masterdUser :: String
masterdUser = Runtime.daemonUser GanetiMasterd
masterdGroup :: String
masterdGroup = Runtime.daemonGroup (DaemonGroup GanetiMasterd)
metadUser :: String
metadUser = Runtime.daemonUser GanetiMetad
metadGroup :: String
metadGroup = Runtime.daemonGroup (DaemonGroup GanetiMetad)
rapiUser :: String
rapiUser = Runtime.daemonUser GanetiRapi
rapiGroup :: String
rapiGroup = Runtime.daemonGroup (DaemonGroup GanetiRapi)
confdUser :: String
confdUser = Runtime.daemonUser GanetiConfd
confdGroup :: String
confdGroup = Runtime.daemonGroup (DaemonGroup GanetiConfd)
wconfdUser :: String
wconfdUser = Runtime.daemonUser GanetiWConfd
wconfdGroup :: String
wconfdGroup = Runtime.daemonGroup (DaemonGroup GanetiWConfd)
kvmdUser :: String
kvmdUser = Runtime.daemonUser GanetiKvmd
kvmdGroup :: String
kvmdGroup = Runtime.daemonGroup (DaemonGroup GanetiKvmd)
luxidUser :: String
luxidUser = Runtime.daemonUser GanetiLuxid
luxidGroup :: String
luxidGroup = Runtime.daemonGroup (DaemonGroup GanetiLuxid)
nodedUser :: String
nodedUser = Runtime.daemonUser GanetiNoded
nodedGroup :: String
nodedGroup = Runtime.daemonGroup (DaemonGroup GanetiNoded)
mondUser :: String
mondUser = Runtime.daemonUser GanetiMond
mondGroup :: String
mondGroup = Runtime.daemonGroup (DaemonGroup GanetiMond)
sshLoginUser :: String
sshLoginUser = AutoConf.sshLoginUser
sshConsoleUser :: String
sshConsoleUser = AutoConf.sshConsoleUser
-- * Cpu pinning separators and constants
cpuPinningSep :: String
cpuPinningSep = ":"
cpuPinningAll :: String
cpuPinningAll = "all"
-- | Internal representation of "all"
cpuPinningAllVal :: Int
cpuPinningAllVal = -1
-- | One "all" entry in a CPU list means CPU pinning is off
cpuPinningOff :: [Int]
cpuPinningOff = [cpuPinningAllVal]
-- | A Xen-specific implementation detail is that there is no way to
-- actually say "use any cpu for pinning" in a Xen configuration file,
-- as opposed to the command line, where you can say
-- @
-- xm vcpu-pin <domain> <vcpu> all
-- @
--
-- The workaround used in Xen is "0-63" (see source code function
-- "xm_vcpu_pin" in @<xen-source>/tools/python/xen/xm/main.py@).
--
-- To support future changes, the following constant is treated as a
-- blackbox string that simply means "use any cpu for pinning under
-- xen".
cpuPinningAllXen :: String
cpuPinningAllXen = "0-63"
-- * Image and wipe
ddCmd :: String
ddCmd = "dd"
-- | 1 MiB
-- The default block size for the 'dd' command
ddBlockSize :: Int
ddBlockSize = 1024^2
-- | 1GB
maxWipeChunk :: Int
maxWipeChunk = 1024
minWipeChunkPercent :: Int
minWipeChunkPercent = 10
-- * Directories
runDirsMode :: Int
runDirsMode = 0o775
secureDirMode :: Int
secureDirMode = 0o700
secureFileMode :: Int
secureFileMode = 0o600
adoptableBlockdevRoot :: String
adoptableBlockdevRoot = "/dev/disk/"
-- * 'autoconf' enable/disable
enableConfd :: Bool
enableConfd = AutoConf.enableConfd
enableMond :: Bool
enableMond = AutoConf.enableMond
enableMetad :: Bool
enableMetad = AutoConf.enableMetad
enableRestrictedCommands :: Bool
enableRestrictedCommands = AutoConf.enableRestrictedCommands
-- * SSH constants
ssh :: String
ssh = "ssh"
scp :: String
scp = "scp"
-- * Daemons
confd :: String
confd = Runtime.daemonName GanetiConfd
masterd :: String
masterd = Runtime.daemonName GanetiMasterd
metad :: String
metad = Runtime.daemonName GanetiMetad
mond :: String
mond = Runtime.daemonName GanetiMond
noded :: String
noded = Runtime.daemonName GanetiNoded
wconfd :: String
wconfd = Runtime.daemonName GanetiWConfd
luxid :: String
luxid = Runtime.daemonName GanetiLuxid
rapi :: String
rapi = Runtime.daemonName GanetiRapi
kvmd :: String
kvmd = Runtime.daemonName GanetiKvmd
daemons :: FrozenSet String
daemons =
ConstantUtils.mkSet [confd,
luxid,
masterd,
mond,
noded,
rapi]
defaultConfdPort :: Int
defaultConfdPort = 1814
defaultMondPort :: Int
defaultMondPort = 1815
defaultMetadPort :: Int
defaultMetadPort = 80
defaultNodedPort :: Int
defaultNodedPort = 1811
defaultRapiPort :: Int
defaultRapiPort = 5080
daemonsPorts :: Map String (Protocol, Int)
daemonsPorts =
Map.fromList
[ (confd, (Udp, defaultConfdPort))
, (metad, (Tcp, defaultMetadPort))
, (mond, (Tcp, defaultMondPort))
, (noded, (Tcp, defaultNodedPort))
, (rapi, (Tcp, defaultRapiPort))
, (ssh, (Tcp, 22))
]
firstDrbdPort :: Int
firstDrbdPort = 11000
lastDrbdPort :: Int
lastDrbdPort = 14999
daemonsLogbase :: Map String String
daemonsLogbase =
Map.fromList
[ (Runtime.daemonName d, Runtime.daemonLogBase d) | d <- [minBound..] ]
daemonsExtraLogbase :: Map String (Map String String)
daemonsExtraLogbase =
Map.fromList $
map (Runtime.daemonName *** id)
[ (GanetiMond, Map.fromList
[ ("access", Runtime.daemonsExtraLogbase GanetiMond AccessLog)
, ("error", Runtime.daemonsExtraLogbase GanetiMond ErrorLog)
])
]
extraLogreasonAccess :: String
extraLogreasonAccess = Runtime.daemonsExtraLogbase GanetiMond AccessLog
extraLogreasonError :: String
extraLogreasonError = Runtime.daemonsExtraLogbase GanetiMond ErrorLog
devConsole :: String
devConsole = ConstantUtils.devConsole
procMounts :: String
procMounts = "/proc/mounts"
-- * Luxi (Local UniX Interface) related constants
luxiEom :: PythonChar
luxiEom = PythonChar '\x03'
-- | Environment variable for the luxi override socket
luxiOverride :: String
luxiOverride = "FORCE_LUXI_SOCKET"
luxiOverrideMaster :: String
luxiOverrideMaster = "master"
luxiOverrideQuery :: String
luxiOverrideQuery = "query"
luxiVersion :: Int
luxiVersion = configVersion
-- * Syslog
syslogUsage :: String
syslogUsage = AutoConf.syslogUsage
syslogNo :: String
syslogNo = Logging.syslogUsageToRaw SyslogNo
syslogYes :: String
syslogYes = Logging.syslogUsageToRaw SyslogYes
syslogOnly :: String
syslogOnly = Logging.syslogUsageToRaw SyslogOnly
syslogSocket :: String
syslogSocket = "/dev/log"
exportConfFile :: String
exportConfFile = "config.ini"
-- * Xen
xenBootloader :: String
xenBootloader = AutoConf.xenBootloader
xenCmdXl :: String
xenCmdXl = "xl"
xenCmdXm :: String
xenCmdXm = "xm"
xenInitrd :: String
xenInitrd = AutoConf.xenInitrd
xenKernel :: String
xenKernel = AutoConf.xenKernel
-- FIXME: perhaps rename to 'validXenCommands' for consistency with
-- other constants
knownXenCommands :: FrozenSet String
knownXenCommands = ConstantUtils.mkSet [xenCmdXl, xenCmdXm]
-- * KVM and socat
kvmPath :: String
kvmPath = AutoConf.kvmPath
kvmKernel :: String
kvmKernel = AutoConf.kvmKernel
socatEscapeCode :: String
socatEscapeCode = "0x1d"
socatPath :: String
socatPath = AutoConf.socatPath
socatUseCompress :: Bool
socatUseCompress = AutoConf.socatUseCompress
socatUseEscape :: Bool
socatUseEscape = AutoConf.socatUseEscape
-- * LXC
-- If you are trying to change the value of these default constants, you also
-- need to edit the default value declaration in man/gnt-instance.rst.
lxcDevicesDefault :: String
lxcDevicesDefault =
"c 1:3 rw" -- /dev/null
++ ",c 1:5 rw" -- /dev/zero
++ ",c 1:7 rw" -- /dev/full
++ ",c 1:8 rw" -- /dev/random
++ ",c 1:9 rw" -- /dev/urandom
++ ",c 1:10 rw" -- /dev/aio
++ ",c 5:0 rw" -- /dev/tty
++ ",c 5:1 rw" -- /dev/console
++ ",c 5:2 rw" -- /dev/ptmx
++ ",c 136:* rw" -- first block of Unix98 PTY slaves
lxcDropCapabilitiesDefault :: String
lxcDropCapabilitiesDefault =
"mac_override" -- Allow MAC configuration or state changes
++ ",sys_boot" -- Use reboot(2) and kexec_load(2)
++ ",sys_module" -- Load and unload kernel modules
++ ",sys_time" -- Set system clock, set real-time (hardware) clock
lxcStateRunning :: String
lxcStateRunning = "RUNNING"
-- * Console types
-- | Display a message for console access
consMessage :: String
consMessage = "msg"
-- | Console as SPICE server
consSpice :: String
consSpice = "spice"
-- | Console as SSH command
consSsh :: String
consSsh = "ssh"
-- | Console as VNC server
consVnc :: String
consVnc = "vnc"
consAll :: FrozenSet String
consAll = ConstantUtils.mkSet [consMessage, consSpice, consSsh, consVnc]
-- | RSA key bit length
--
-- For RSA keys more bits are better, but they also make operations
-- more expensive. NIST SP 800-131 recommends a minimum of 2048 bits
-- from the year 2010 on.
rsaKeyBits :: Int
rsaKeyBits = 2048
-- | Ciphers allowed for SSL connections.
--
-- For the format, see ciphers(1). A better way to disable ciphers
-- would be to use the exclamation mark (!), but socat versions below
-- 1.5 can't parse exclamation marks in options properly. When
-- modifying the ciphers, ensure not to accidentially add something
-- after it's been removed. Use the "openssl" utility to check the
-- allowed ciphers, e.g. "openssl ciphers -v HIGH:-DES".
opensslCiphers :: String
opensslCiphers = "HIGH:-DES:-3DES:-EXPORT:-ADH"
-- * X509
-- | commonName (CN) used in certificates
x509CertCn :: String
x509CertCn = "ganeti.example.com"
-- | Default validity of certificates in days
x509CertDefaultValidity :: Int
x509CertDefaultValidity = 365 * 5
x509CertSignatureHeader :: String
x509CertSignatureHeader = "X-Ganeti-Signature"
-- | Digest used to sign certificates ("openssl x509" uses SHA1 by default)
x509CertSignDigest :: String
x509CertSignDigest = "SHA1"
-- * Import/export daemon mode
iemExport :: String
iemExport = "export"
iemImport :: String
iemImport = "import"
-- * Import/export transport compression
iecGzip :: String
iecGzip = "gzip"
iecGzipFast :: String
iecGzipFast = "gzip-fast"
iecGzipSlow :: String
iecGzipSlow = "gzip-slow"
iecLzop :: String
iecLzop = "lzop"
iecNone :: String
iecNone = "none"
iecAll :: [String]
iecAll = [iecGzip, iecGzipFast, iecGzipSlow, iecLzop, iecNone]
iecDefaultTools :: [String]
iecDefaultTools = [iecGzip, iecGzipFast, iecGzipSlow]
iecCompressionUtilities :: Map String String
iecCompressionUtilities =
Map.fromList
[ (iecGzipFast, iecGzip)
, (iecGzipSlow, iecGzip)
]
ieCustomSize :: String
ieCustomSize = "fd"
-- * Import/export I/O
-- | Direct file I/O, equivalent to a shell's I/O redirection using
-- '<' or '>'
ieioFile :: String
ieioFile = "file"
-- | Raw block device I/O using "dd"
ieioRawDisk :: String
ieioRawDisk = "raw"
-- | OS definition import/export script
ieioScript :: String
ieioScript = "script"
-- * Values
valueDefault :: String
valueDefault = "default"
valueAuto :: String
valueAuto = "auto"
valueGenerate :: String
valueGenerate = "generate"
valueNone :: String
valueNone = "none"
valueTrue :: String
valueTrue = "true"
valueFalse :: String
valueFalse = "false"
-- * Hooks
hooksNameCfgupdate :: String
hooksNameCfgupdate = "config-update"
hooksNameWatcher :: String
hooksNameWatcher = "watcher"
hooksPath :: String
hooksPath = "/sbin:/bin:/usr/sbin:/usr/bin"
hooksPhasePost :: String
hooksPhasePost = "post"
hooksPhasePre :: String
hooksPhasePre = "pre"
hooksVersion :: Int
hooksVersion = 2
-- * Hooks subject type (what object type does the LU deal with)
htypeCluster :: String
htypeCluster = "CLUSTER"
htypeGroup :: String
htypeGroup = "GROUP"
htypeInstance :: String
htypeInstance = "INSTANCE"
htypeNetwork :: String
htypeNetwork = "NETWORK"
htypeNode :: String
htypeNode = "NODE"
-- * Hkr
hkrSkip :: Int
hkrSkip = 0
hkrFail :: Int
hkrFail = 1
hkrSuccess :: Int
hkrSuccess = 2
-- * Storage types
stBlock :: String
stBlock = Types.storageTypeToRaw StorageBlock
stDiskless :: String
stDiskless = Types.storageTypeToRaw StorageDiskless
stExt :: String
stExt = Types.storageTypeToRaw StorageExt
stFile :: String
stFile = Types.storageTypeToRaw StorageFile
stSharedFile :: String
stSharedFile = Types.storageTypeToRaw StorageSharedFile
stGluster :: String
stGluster = Types.storageTypeToRaw StorageGluster
stLvmPv :: String
stLvmPv = Types.storageTypeToRaw StorageLvmPv
stLvmVg :: String
stLvmVg = Types.storageTypeToRaw StorageLvmVg
stRados :: String
stRados = Types.storageTypeToRaw StorageRados
storageTypes :: FrozenSet String
storageTypes = ConstantUtils.mkSet $ map Types.storageTypeToRaw [minBound..]
-- | The set of storage types for which full storage reporting is available
stsReport :: FrozenSet String
stsReport = ConstantUtils.mkSet [stFile, stLvmPv, stLvmVg]
-- | The set of storage types for which node storage reporting is available
-- | (as used by LUQueryNodeStorage)
stsReportNodeStorage :: FrozenSet String
stsReportNodeStorage = ConstantUtils.union stsReport $
ConstantUtils.mkSet [ stSharedFile
, stGluster
]
-- * Storage fields
-- ** First two are valid in LU context only, not passed to backend
sfNode :: String
sfNode = "node"
sfType :: String
sfType = "type"
-- ** and the rest are valid in backend
sfAllocatable :: String
sfAllocatable = Types.storageFieldToRaw SFAllocatable
sfFree :: String
sfFree = Types.storageFieldToRaw SFFree
sfName :: String
sfName = Types.storageFieldToRaw SFName
sfSize :: String
sfSize = Types.storageFieldToRaw SFSize
sfUsed :: String
sfUsed = Types.storageFieldToRaw SFUsed
validStorageFields :: FrozenSet String
validStorageFields =
ConstantUtils.mkSet $ map Types.storageFieldToRaw [minBound..] ++
[sfNode, sfType]
modifiableStorageFields :: Map String (FrozenSet String)
modifiableStorageFields =
Map.fromList [(Types.storageTypeToRaw StorageLvmPv,
ConstantUtils.mkSet [sfAllocatable])]
-- * Storage operations
soFixConsistency :: String
soFixConsistency = "fix-consistency"
validStorageOperations :: Map String (FrozenSet String)
validStorageOperations =
Map.fromList [(Types.storageTypeToRaw StorageLvmVg,
ConstantUtils.mkSet [soFixConsistency])]
-- * Volume fields
vfDev :: String
vfDev = "dev"
vfInstance :: String
vfInstance = "instance"
vfName :: String
vfName = "name"
vfNode :: String
vfNode = "node"
vfPhys :: String
vfPhys = "phys"
vfSize :: String
vfSize = "size"
vfVg :: String
vfVg = "vg"
-- * Local disk status
ldsFaulty :: Int
ldsFaulty = Types.localDiskStatusToRaw DiskStatusFaulty
ldsOkay :: Int
ldsOkay = Types.localDiskStatusToRaw DiskStatusOk
ldsUnknown :: Int
ldsUnknown = Types.localDiskStatusToRaw DiskStatusUnknown
ldsNames :: Map Int String
ldsNames =
Map.fromList [ (Types.localDiskStatusToRaw ds,
localDiskStatusName ds) | ds <- [minBound..] ]
-- * Disk template types
dtDiskless :: String
dtDiskless = Types.diskTemplateToRaw DTDiskless
dtFile :: String
dtFile = Types.diskTemplateToRaw DTFile
dtSharedFile :: String
dtSharedFile = Types.diskTemplateToRaw DTSharedFile
dtPlain :: String
dtPlain = Types.diskTemplateToRaw DTPlain
dtBlock :: String
dtBlock = Types.diskTemplateToRaw DTBlock
dtDrbd8 :: String
dtDrbd8 = Types.diskTemplateToRaw DTDrbd8
dtRbd :: String
dtRbd = Types.diskTemplateToRaw DTRbd
dtExt :: String
dtExt = Types.diskTemplateToRaw DTExt
dtGluster :: String
dtGluster = Types.diskTemplateToRaw DTGluster
-- | This is used to order determine the default disk template when
-- the list of enabled disk templates is inferred from the current
-- state of the cluster. This only happens on an upgrade from a
-- version of Ganeti that did not support the 'enabled_disk_templates'
-- so far.
diskTemplatePreference :: [String]
diskTemplatePreference =
map Types.diskTemplateToRaw
[DTBlock, DTDiskless, DTDrbd8, DTExt, DTFile,
DTPlain, DTRbd, DTSharedFile, DTGluster]
diskTemplates :: FrozenSet String
diskTemplates = ConstantUtils.mkSet $ map Types.diskTemplateToRaw [minBound..]
-- | Disk templates that are enabled by default
defaultEnabledDiskTemplates :: [String]
defaultEnabledDiskTemplates = map Types.diskTemplateToRaw [DTDrbd8, DTPlain]
-- | Mapping of disk templates to storage types
mapDiskTemplateStorageType :: Map String String
mapDiskTemplateStorageType =
Map.fromList $
map ( Types.diskTemplateToRaw
&&& Types.storageTypeToRaw . diskTemplateToStorageType)
[minBound..maxBound]
-- | The set of network-mirrored disk templates
dtsIntMirror :: FrozenSet String
dtsIntMirror = ConstantUtils.mkSet [dtDrbd8]
-- | 'DTDiskless' is 'trivially' externally mirrored
dtsExtMirror :: FrozenSet String
dtsExtMirror =
ConstantUtils.mkSet $
map Types.diskTemplateToRaw
[DTDiskless, DTBlock, DTExt, DTSharedFile, DTRbd, DTGluster]
-- | The set of non-lvm-based disk templates
dtsNotLvm :: FrozenSet String
dtsNotLvm =
ConstantUtils.mkSet $
map Types.diskTemplateToRaw
[DTSharedFile, DTDiskless, DTBlock, DTExt, DTFile, DTRbd, DTGluster]
-- | The set of disk templates which can be grown
dtsGrowable :: FrozenSet String
dtsGrowable =
ConstantUtils.mkSet $
map Types.diskTemplateToRaw
[DTSharedFile, DTDrbd8, DTPlain, DTExt, DTFile, DTRbd, DTGluster]
-- | The set of disk templates that allow adoption
dtsMayAdopt :: FrozenSet String
dtsMayAdopt =
ConstantUtils.mkSet $ map Types.diskTemplateToRaw [DTBlock, DTPlain]
-- | The set of disk templates that *must* use adoption
dtsMustAdopt :: FrozenSet String
dtsMustAdopt = ConstantUtils.mkSet [Types.diskTemplateToRaw DTBlock]
-- | The set of disk templates that allow migrations
dtsMirrored :: FrozenSet String
dtsMirrored = dtsIntMirror `ConstantUtils.union` dtsExtMirror
-- | The set of file based disk templates
dtsFilebased :: FrozenSet String
dtsFilebased =
ConstantUtils.mkSet $ map Types.diskTemplateToRaw
[DTSharedFile, DTFile, DTGluster]
-- | The set of disk templates that can be moved by copying
--
-- Note: a requirement is that they're not accessed externally or
-- shared between nodes; in particular, sharedfile is not suitable.
dtsCopyable :: FrozenSet String
dtsCopyable =
ConstantUtils.mkSet $ map Types.diskTemplateToRaw [DTPlain, DTFile]
-- | The set of disk templates that are supported by exclusive_storage
dtsExclStorage :: FrozenSet String
dtsExclStorage = ConstantUtils.mkSet $ map Types.diskTemplateToRaw [DTPlain]
-- | Templates for which we don't perform checks on free space
dtsNoFreeSpaceCheck :: FrozenSet String
dtsNoFreeSpaceCheck =
ConstantUtils.mkSet $
map Types.diskTemplateToRaw [DTExt, DTSharedFile, DTFile, DTRbd, DTGluster]
dtsBlock :: FrozenSet String
dtsBlock =
ConstantUtils.mkSet $
map Types.diskTemplateToRaw [DTPlain, DTDrbd8, DTBlock, DTRbd, DTExt]
-- | The set of lvm-based disk templates
dtsLvm :: FrozenSet String
dtsLvm = diskTemplates `ConstantUtils.difference` dtsNotLvm
-- | The set of lvm-based disk templates
dtsHaveAccess :: FrozenSet String
dtsHaveAccess = ConstantUtils.mkSet $
map Types.diskTemplateToRaw [DTRbd, DTGluster, DTExt]
-- | The set of disk templates that cannot convert from
dtsNotConvertibleFrom :: FrozenSet String
dtsNotConvertibleFrom =
ConstantUtils.mkSet $
map Types.diskTemplateToRaw [DTDiskless]
-- | The set of disk templates that cannot convert to
dtsNotConvertibleTo :: FrozenSet String
dtsNotConvertibleTo =
ConstantUtils.mkSet $
map Types.diskTemplateToRaw [DTDiskless, DTBlock]
-- * Drbd
drbdHmacAlg :: String
drbdHmacAlg = "md5"
drbdDefaultNetProtocol :: String
drbdDefaultNetProtocol = "C"
drbdMigrationNetProtocol :: String
drbdMigrationNetProtocol = "C"
drbdStatusFile :: String
drbdStatusFile = "/proc/drbd"
-- | The length of generated DRBD secrets (see also TempRes module).
drbdSecretLength :: Int
drbdSecretLength = 20
-- | Size of DRBD meta block device
drbdMetaSize :: Int
drbdMetaSize = 128
-- * Drbd barrier types
drbdBDiskBarriers :: String
drbdBDiskBarriers = "b"
drbdBDiskDrain :: String
drbdBDiskDrain = "d"
drbdBDiskFlush :: String
drbdBDiskFlush = "f"
drbdBNone :: String
drbdBNone = "n"
-- | Valid barrier combinations: "n" or any non-null subset of "bfd"
drbdValidBarrierOpt :: FrozenSet (FrozenSet String)
drbdValidBarrierOpt =
ConstantUtils.mkSet
[ ConstantUtils.mkSet [drbdBNone]
, ConstantUtils.mkSet [drbdBDiskBarriers]
, ConstantUtils.mkSet [drbdBDiskDrain]
, ConstantUtils.mkSet [drbdBDiskFlush]
, ConstantUtils.mkSet [drbdBDiskDrain, drbdBDiskFlush]
, ConstantUtils.mkSet [drbdBDiskBarriers, drbdBDiskDrain]
, ConstantUtils.mkSet [drbdBDiskBarriers, drbdBDiskFlush]
, ConstantUtils.mkSet [drbdBDiskBarriers, drbdBDiskFlush, drbdBDiskDrain]
]
-- | Rbd tool command
rbdCmd :: String
rbdCmd = "rbd"
-- * File backend driver
fdBlktap :: String
fdBlktap = Types.fileDriverToRaw FileBlktap
fdBlktap2 :: String
fdBlktap2 = Types.fileDriverToRaw FileBlktap2
fdLoop :: String
fdLoop = Types.fileDriverToRaw FileLoop
fdDefault :: String
fdDefault = fdLoop
fileDriver :: FrozenSet String
fileDriver =
ConstantUtils.mkSet $
map Types.fileDriverToRaw [minBound..]
-- | The set of drbd-like disk types
dtsDrbd :: FrozenSet String
dtsDrbd = ConstantUtils.mkSet [Types.diskTemplateToRaw DTDrbd8]
-- * Disk access mode
diskRdonly :: String
diskRdonly = Types.diskModeToRaw DiskRdOnly
diskRdwr :: String
diskRdwr = Types.diskModeToRaw DiskRdWr
diskAccessSet :: FrozenSet String
diskAccessSet = ConstantUtils.mkSet $ map Types.diskModeToRaw [minBound..]
-- * Disk replacement mode
replaceDiskAuto :: String
replaceDiskAuto = Types.replaceDisksModeToRaw ReplaceAuto
replaceDiskChg :: String
replaceDiskChg = Types.replaceDisksModeToRaw ReplaceNewSecondary
replaceDiskPri :: String
replaceDiskPri = Types.replaceDisksModeToRaw ReplaceOnPrimary
replaceDiskSec :: String
replaceDiskSec = Types.replaceDisksModeToRaw ReplaceOnSecondary
replaceModes :: FrozenSet String
replaceModes =
ConstantUtils.mkSet $ map Types.replaceDisksModeToRaw [minBound..]
-- * Instance export mode
exportModeLocal :: String
exportModeLocal = Types.exportModeToRaw ExportModeLocal
exportModeRemote :: String
exportModeRemote = Types.exportModeToRaw ExportModeRemote
exportModes :: FrozenSet String
exportModes = ConstantUtils.mkSet $ map Types.exportModeToRaw [minBound..]
-- * Instance creation modes
instanceCreate :: String
instanceCreate = Types.instCreateModeToRaw InstCreate
instanceImport :: String
instanceImport = Types.instCreateModeToRaw InstImport
instanceRemoteImport :: String
instanceRemoteImport = Types.instCreateModeToRaw InstRemoteImport
instanceCreateModes :: FrozenSet String
instanceCreateModes =
ConstantUtils.mkSet $ map Types.instCreateModeToRaw [minBound..]
-- * Remote import/export handshake message and version
rieHandshake :: String
rieHandshake = "Hi, I'm Ganeti"
rieVersion :: Int
rieVersion = 0
-- | Remote import/export certificate validity (seconds)
rieCertValidity :: Int
rieCertValidity = 24 * 60 * 60
-- | Export only: how long to wait per connection attempt (seconds)
rieConnectAttemptTimeout :: Int
rieConnectAttemptTimeout = 20
-- | Export only: number of attempts to connect
rieConnectRetries :: Int
rieConnectRetries = 10
-- | Overall timeout for establishing connection
rieConnectTimeout :: Int
rieConnectTimeout = 180
-- | Give child process up to 5 seconds to exit after sending a signal
childLingerTimeout :: Double
childLingerTimeout = 5.0
-- * Import/export config options
inisectBep :: String
inisectBep = "backend"
inisectExp :: String
inisectExp = "export"
inisectHyp :: String
inisectHyp = "hypervisor"
inisectIns :: String
inisectIns = "instance"
inisectOsp :: String
inisectOsp = "os"
inisectOspPrivate :: String
inisectOspPrivate = "os_private"
-- * Dynamic device modification
ddmAdd :: String
ddmAdd = Types.ddmFullToRaw DdmFullAdd
ddmModify :: String
ddmModify = Types.ddmFullToRaw DdmFullModify
ddmRemove :: String
ddmRemove = Types.ddmFullToRaw DdmFullRemove
ddmsValues :: FrozenSet String
ddmsValues = ConstantUtils.mkSet [ddmAdd, ddmRemove]
ddmsValuesWithModify :: FrozenSet String
ddmsValuesWithModify = ConstantUtils.mkSet $ map Types.ddmFullToRaw [minBound..]
-- * Common exit codes
exitSuccess :: Int
exitSuccess = 0
exitFailure :: Int
exitFailure = ConstantUtils.exitFailure
exitNotcluster :: Int
exitNotcluster = 5
exitNotmaster :: Int
exitNotmaster = 11
exitNodesetupError :: Int
exitNodesetupError = 12
-- | Need user confirmation
exitConfirmation :: Int
exitConfirmation = 13
-- | Exit code for query operations with unknown fields
exitUnknownField :: Int
exitUnknownField = 14
-- * Tags
tagCluster :: String
tagCluster = Types.tagKindToRaw TagKindCluster
tagInstance :: String
tagInstance = Types.tagKindToRaw TagKindInstance
tagNetwork :: String
tagNetwork = Types.tagKindToRaw TagKindNetwork
tagNode :: String
tagNode = Types.tagKindToRaw TagKindNode
tagNodegroup :: String
tagNodegroup = Types.tagKindToRaw TagKindGroup
validTagTypes :: FrozenSet String
validTagTypes = ConstantUtils.mkSet $ map Types.tagKindToRaw [minBound..]
maxTagLen :: Int
maxTagLen = 128
maxTagsPerObj :: Int
maxTagsPerObj = 4096
-- * Others
defaultBridge :: String
defaultBridge = AutoConf.defaultBridge
defaultOvs :: String
defaultOvs = "switch1"
-- | 60 MiB/s, expressed in KiB/s
classicDrbdSyncSpeed :: Int
classicDrbdSyncSpeed = 60 * 1024
ip4AddressAny :: String
ip4AddressAny = "0.0.0.0"
ip4AddressLocalhost :: String
ip4AddressLocalhost = "127.0.0.1"
ip6AddressAny :: String
ip6AddressAny = "::"
ip6AddressLocalhost :: String
ip6AddressLocalhost = "::1"
ip4Version :: Int
ip4Version = 4
ip6Version :: Int
ip6Version = 6
validIpVersions :: FrozenSet Int
validIpVersions = ConstantUtils.mkSet [ip4Version, ip6Version]
tcpPingTimeout :: Int
tcpPingTimeout = 10
defaultVg :: String
defaultVg = AutoConf.defaultVg
defaultDrbdHelper :: String
defaultDrbdHelper = "/bin/true"
minVgSize :: Int
minVgSize = 20480
defaultMacPrefix :: String
defaultMacPrefix = "aa:00:00"
-- | Default maximum instance wait time (seconds)
defaultShutdownTimeout :: Int
defaultShutdownTimeout = 120
-- | Node clock skew (seconds)
nodeMaxClockSkew :: Int
nodeMaxClockSkew = 150
-- | Time for an intra-cluster disk transfer to wait for a connection
diskTransferConnectTimeout :: Int
diskTransferConnectTimeout = 60
-- | Disk index separator
diskSeparator :: String
diskSeparator = AutoConf.diskSeparator
ipCommandPath :: String
ipCommandPath = AutoConf.ipPath
-- | Key for job IDs in opcode result
jobIdsKey :: String
jobIdsKey = "jobs"
-- * Runparts results
runpartsErr :: Int
runpartsErr = 2
runpartsRun :: Int
runpartsRun = 1
runpartsSkip :: Int
runpartsSkip = 0
runpartsStatus :: [Int]
runpartsStatus = [runpartsErr, runpartsRun, runpartsSkip]
-- * RPC
rpcEncodingNone :: Int
rpcEncodingNone = 0
rpcEncodingZlibBase64 :: Int
rpcEncodingZlibBase64 = 1
-- * Timeout table
--
-- Various time constants for the timeout table
rpcTmoUrgent :: Int
rpcTmoUrgent = Types.rpcTimeoutToRaw Urgent
rpcTmoFast :: Int
rpcTmoFast = Types.rpcTimeoutToRaw Fast
rpcTmoNormal :: Int
rpcTmoNormal = Types.rpcTimeoutToRaw Normal
rpcTmoSlow :: Int
rpcTmoSlow = Types.rpcTimeoutToRaw Slow
-- | 'rpcTmo_4hrs' contains an underscore to circumvent a limitation
-- in the 'Ganeti.THH.deCamelCase' function and generate the correct
-- Python name.
rpcTmo_4hrs :: Int
rpcTmo_4hrs = Types.rpcTimeoutToRaw FourHours
-- | 'rpcTmo_1day' contains an underscore to circumvent a limitation
-- in the 'Ganeti.THH.deCamelCase' function and generate the correct
-- Python name.
rpcTmo_1day :: Int
rpcTmo_1day = Types.rpcTimeoutToRaw OneDay
-- | Timeout for connecting to nodes (seconds)
rpcConnectTimeout :: Int
rpcConnectTimeout = 5
-- OS
osScriptCreate :: String
osScriptCreate = "create"
osScriptCreateUntrusted :: String
osScriptCreateUntrusted = "create_untrusted"
osScriptExport :: String
osScriptExport = "export"
osScriptImport :: String
osScriptImport = "import"
osScriptRename :: String
osScriptRename = "rename"
osScriptVerify :: String
osScriptVerify = "verify"
osScripts :: [String]
osScripts = [osScriptCreate, osScriptCreateUntrusted, osScriptExport,
osScriptImport, osScriptRename, osScriptVerify]
osApiFile :: String
osApiFile = "ganeti_api_version"
osVariantsFile :: String
osVariantsFile = "variants.list"
osParametersFile :: String
osParametersFile = "parameters.list"
osValidateParameters :: String
osValidateParameters = "parameters"
osValidateCalls :: FrozenSet String
osValidateCalls = ConstantUtils.mkSet [osValidateParameters]
-- | External Storage (ES) related constants
esActionAttach :: String
esActionAttach = "attach"
esActionCreate :: String
esActionCreate = "create"
esActionDetach :: String
esActionDetach = "detach"
esActionGrow :: String
esActionGrow = "grow"
esActionRemove :: String
esActionRemove = "remove"
esActionSetinfo :: String
esActionSetinfo = "setinfo"
esActionVerify :: String
esActionVerify = "verify"
esActionSnapshot :: String
esActionSnapshot = "snapshot"
esScriptCreate :: String
esScriptCreate = esActionCreate
esScriptRemove :: String
esScriptRemove = esActionRemove
esScriptGrow :: String
esScriptGrow = esActionGrow
esScriptAttach :: String
esScriptAttach = esActionAttach
esScriptDetach :: String
esScriptDetach = esActionDetach
esScriptSetinfo :: String
esScriptSetinfo = esActionSetinfo
esScriptVerify :: String
esScriptVerify = esActionVerify
esScriptSnapshot :: String
esScriptSnapshot = esActionSnapshot
esScripts :: FrozenSet String
esScripts =
ConstantUtils.mkSet [esScriptAttach,
esScriptCreate,
esScriptDetach,
esScriptGrow,
esScriptRemove,
esScriptSetinfo,
esScriptVerify,
esScriptSnapshot]
esParametersFile :: String
esParametersFile = "parameters.list"
-- * Reboot types
instanceRebootSoft :: String
instanceRebootSoft = Types.rebootTypeToRaw RebootSoft
instanceRebootHard :: String
instanceRebootHard = Types.rebootTypeToRaw RebootHard
instanceRebootFull :: String
instanceRebootFull = Types.rebootTypeToRaw RebootFull
rebootTypes :: FrozenSet String
rebootTypes = ConstantUtils.mkSet $ map Types.rebootTypeToRaw [minBound..]
-- * Instance reboot behaviors
instanceRebootAllowed :: String
instanceRebootAllowed = "reboot"
instanceRebootExit :: String
instanceRebootExit = "exit"
rebootBehaviors :: [String]
rebootBehaviors = [instanceRebootAllowed, instanceRebootExit]
-- * VTypes
vtypeBool :: VType
vtypeBool = VTypeBool
vtypeInt :: VType
vtypeInt = VTypeInt
vtypeFloat :: VType
vtypeFloat = VTypeFloat
vtypeMaybeString :: VType
vtypeMaybeString = VTypeMaybeString
-- | Size in MiBs
vtypeSize :: VType
vtypeSize = VTypeSize
vtypeString :: VType
vtypeString = VTypeString
enforceableTypes :: FrozenSet VType
enforceableTypes = ConstantUtils.mkSet [minBound..]
-- | Constant representing that the user does not specify any IP version
ifaceNoIpVersionSpecified :: Int
ifaceNoIpVersionSpecified = 0
validSerialSpeeds :: [Int]
validSerialSpeeds =
[75,
110,
300,
600,
1200,
1800,
2400,
4800,
9600,
14400,
19200,
28800,
38400,
57600,
115200,
230400,
345600,
460800]
-- * HV parameter names (global namespace)
hvAcpi :: String
hvAcpi = "acpi"
hvBlockdevPrefix :: String
hvBlockdevPrefix = "blockdev_prefix"
hvBootloaderArgs :: String
hvBootloaderArgs = "bootloader_args"
hvBootloaderPath :: String
hvBootloaderPath = "bootloader_path"
hvBootOrder :: String
hvBootOrder = "boot_order"
hvCdromImagePath :: String
hvCdromImagePath = "cdrom_image_path"
hvCpuCap :: String
hvCpuCap = "cpu_cap"
hvCpuCores :: String
hvCpuCores = "cpu_cores"
hvCpuMask :: String
hvCpuMask = "cpu_mask"
hvCpuSockets :: String
hvCpuSockets = "cpu_sockets"
hvCpuThreads :: String
hvCpuThreads = "cpu_threads"
hvCpuType :: String
hvCpuType = "cpu_type"
hvCpuWeight :: String
hvCpuWeight = "cpu_weight"
hvDeviceModel :: String
hvDeviceModel = "device_model"
hvDiskCache :: String
hvDiskCache = "disk_cache"
hvDiskType :: String
hvDiskType = "disk_type"
hvInitrdPath :: String
hvInitrdPath = "initrd_path"
hvInitScript :: String
hvInitScript = "init_script"
hvKernelArgs :: String
hvKernelArgs = "kernel_args"
hvKernelPath :: String
hvKernelPath = "kernel_path"
hvKeymap :: String
hvKeymap = "keymap"
hvKvmCdrom2ImagePath :: String
hvKvmCdrom2ImagePath = "cdrom2_image_path"
hvKvmCdromDiskType :: String
hvKvmCdromDiskType = "cdrom_disk_type"
hvKvmExtra :: String
hvKvmExtra = "kvm_extra"
hvKvmFlag :: String
hvKvmFlag = "kvm_flag"
hvKvmFloppyImagePath :: String
hvKvmFloppyImagePath = "floppy_image_path"
hvKvmMachineVersion :: String
hvKvmMachineVersion = "machine_version"
hvKvmMigrationCaps :: String
hvKvmMigrationCaps = "migration_caps"
hvKvmPath :: String
hvKvmPath = "kvm_path"
hvKvmDiskAio :: String
hvKvmDiskAio = "disk_aio"
hvKvmSpiceAudioCompr :: String
hvKvmSpiceAudioCompr = "spice_playback_compression"
hvKvmSpiceBind :: String
hvKvmSpiceBind = "spice_bind"
hvKvmSpiceIpVersion :: String
hvKvmSpiceIpVersion = "spice_ip_version"
hvKvmSpiceJpegImgCompr :: String
hvKvmSpiceJpegImgCompr = "spice_jpeg_wan_compression"
hvKvmSpiceLosslessImgCompr :: String
hvKvmSpiceLosslessImgCompr = "spice_image_compression"
hvKvmSpicePasswordFile :: String
hvKvmSpicePasswordFile = "spice_password_file"
hvKvmSpiceStreamingVideoDetection :: String
hvKvmSpiceStreamingVideoDetection = "spice_streaming_video"
hvKvmSpiceTlsCiphers :: String
hvKvmSpiceTlsCiphers = "spice_tls_ciphers"
hvKvmSpiceUseTls :: String
hvKvmSpiceUseTls = "spice_use_tls"
hvKvmSpiceUseVdagent :: String
hvKvmSpiceUseVdagent = "spice_use_vdagent"
hvKvmSpiceZlibGlzImgCompr :: String
hvKvmSpiceZlibGlzImgCompr = "spice_zlib_glz_wan_compression"
hvKvmUseChroot :: String
hvKvmUseChroot = "use_chroot"
hvKvmUserShutdown :: String
hvKvmUserShutdown = "user_shutdown"
hvLxcStartupWait :: String
hvLxcStartupWait = "lxc_startup_wait"
hvLxcCgroupUse :: String
hvLxcCgroupUse = "lxc_cgroup_use"
hvLxcDevices :: String
hvLxcDevices = "lxc_devices"
hvLxcDropCapabilities :: String
hvLxcDropCapabilities = "lxc_drop_capabilities"
hvLxcExtraConfig :: String
hvLxcExtraConfig = "lxc_extra_config"
hvLxcTty :: String
hvLxcTty = "lxc_tty"
hvMemPath :: String
hvMemPath = "mem_path"
hvMigrationBandwidth :: String
hvMigrationBandwidth = "migration_bandwidth"
hvMigrationDowntime :: String
hvMigrationDowntime = "migration_downtime"
hvMigrationMode :: String
hvMigrationMode = "migration_mode"
hvMigrationPort :: String
hvMigrationPort = "migration_port"
hvNicType :: String
hvNicType = "nic_type"
hvPae :: String
hvPae = "pae"
hvPassthrough :: String
hvPassthrough = "pci_pass"
hvRebootBehavior :: String
hvRebootBehavior = "reboot_behavior"
hvRootPath :: String
hvRootPath = "root_path"
hvSecurityDomain :: String
hvSecurityDomain = "security_domain"
hvSecurityModel :: String
hvSecurityModel = "security_model"
hvSerialConsole :: String
hvSerialConsole = "serial_console"
hvSerialSpeed :: String
hvSerialSpeed = "serial_speed"
hvSoundhw :: String
hvSoundhw = "soundhw"
hvUsbDevices :: String
hvUsbDevices = "usb_devices"
hvUsbMouse :: String
hvUsbMouse = "usb_mouse"
hvUseBootloader :: String
hvUseBootloader = "use_bootloader"
hvUseLocaltime :: String
hvUseLocaltime = "use_localtime"
hvVga :: String
hvVga = "vga"
hvVhostNet :: String
hvVhostNet = "vhost_net"
hvVirtioNetQueues :: String
hvVirtioNetQueues = "virtio_net_queues"
hvVifScript :: String
hvVifScript = "vif_script"
hvVifType :: String
hvVifType = "vif_type"
hvViridian :: String
hvViridian = "viridian"
hvVncBindAddress :: String
hvVncBindAddress = "vnc_bind_address"
hvVncPasswordFile :: String
hvVncPasswordFile = "vnc_password_file"
hvVncTls :: String
hvVncTls = "vnc_tls"
hvVncX509 :: String
hvVncX509 = "vnc_x509_path"
hvVncX509Verify :: String
hvVncX509Verify = "vnc_x509_verify"
hvVnetHdr :: String
hvVnetHdr = "vnet_hdr"
hvXenCmd :: String
hvXenCmd = "xen_cmd"
hvXenCpuid :: String
hvXenCpuid = "cpuid"
hvsParameterTitles :: Map String String
hvsParameterTitles =
Map.fromList
[(hvAcpi, "ACPI"),
(hvBootOrder, "Boot_order"),
(hvCdromImagePath, "CDROM_image_path"),
(hvCpuType, "cpu_type"),
(hvDiskType, "Disk_type"),
(hvInitrdPath, "Initrd_path"),
(hvKernelPath, "Kernel_path"),
(hvNicType, "NIC_type"),
(hvPae, "PAE"),
(hvPassthrough, "pci_pass"),
(hvVncBindAddress, "VNC_bind_address")]
hvsParameters :: FrozenSet String
hvsParameters = ConstantUtils.mkSet $ Map.keys hvsParameterTypes
hvsParameterTypes :: Map String VType
hvsParameterTypes = Map.fromList
[ (hvAcpi, VTypeBool)
, (hvBlockdevPrefix, VTypeString)
, (hvBootloaderArgs, VTypeString)
, (hvBootloaderPath, VTypeString)
, (hvBootOrder, VTypeString)
, (hvCdromImagePath, VTypeString)
, (hvCpuCap, VTypeInt)
, (hvCpuCores, VTypeInt)
, (hvCpuMask, VTypeString)
, (hvCpuSockets, VTypeInt)
, (hvCpuThreads, VTypeInt)
, (hvCpuType, VTypeString)
, (hvCpuWeight, VTypeInt)
, (hvDeviceModel, VTypeString)
, (hvDiskCache, VTypeString)
, (hvDiskType, VTypeString)
, (hvInitrdPath, VTypeString)
, (hvInitScript, VTypeString)
, (hvKernelArgs, VTypeString)
, (hvKernelPath, VTypeString)
, (hvKeymap, VTypeString)
, (hvKvmCdrom2ImagePath, VTypeString)
, (hvKvmCdromDiskType, VTypeString)
, (hvKvmExtra, VTypeString)
, (hvKvmFlag, VTypeString)
, (hvKvmFloppyImagePath, VTypeString)
, (hvKvmMachineVersion, VTypeString)
, (hvKvmMigrationCaps, VTypeString)
, (hvKvmPath, VTypeString)
, (hvKvmDiskAio, VTypeString)
, (hvKvmSpiceAudioCompr, VTypeBool)
, (hvKvmSpiceBind, VTypeString)
, (hvKvmSpiceIpVersion, VTypeInt)
, (hvKvmSpiceJpegImgCompr, VTypeString)
, (hvKvmSpiceLosslessImgCompr, VTypeString)
, (hvKvmSpicePasswordFile, VTypeString)
, (hvKvmSpiceStreamingVideoDetection, VTypeString)
, (hvKvmSpiceTlsCiphers, VTypeString)
, (hvKvmSpiceUseTls, VTypeBool)
, (hvKvmSpiceUseVdagent, VTypeBool)
, (hvKvmSpiceZlibGlzImgCompr, VTypeString)
, (hvKvmUseChroot, VTypeBool)
, (hvKvmUserShutdown, VTypeBool)
, (hvLxcCgroupUse, VTypeString)
, (hvLxcDevices, VTypeString)
, (hvLxcDropCapabilities, VTypeString)
, (hvLxcExtraConfig, VTypeString)
, (hvLxcTty, VTypeInt)
, (hvLxcStartupWait, VTypeInt)
, (hvMemPath, VTypeString)
, (hvMigrationBandwidth, VTypeInt)
, (hvMigrationDowntime, VTypeInt)
, (hvMigrationMode, VTypeString)
, (hvMigrationPort, VTypeInt)
, (hvNicType, VTypeString)
, (hvPae, VTypeBool)
, (hvPassthrough, VTypeString)
, (hvRebootBehavior, VTypeString)
, (hvRootPath, VTypeMaybeString)
, (hvSecurityDomain, VTypeString)
, (hvSecurityModel, VTypeString)
, (hvSerialConsole, VTypeBool)
, (hvSerialSpeed, VTypeInt)
, (hvSoundhw, VTypeString)
, (hvUsbDevices, VTypeString)
, (hvUsbMouse, VTypeString)
, (hvUseBootloader, VTypeBool)
, (hvUseLocaltime, VTypeBool)
, (hvVga, VTypeString)
, (hvVhostNet, VTypeBool)
, (hvVirtioNetQueues, VTypeInt)
, (hvVifScript, VTypeString)
, (hvVifType, VTypeString)
, (hvViridian, VTypeBool)
, (hvVncBindAddress, VTypeString)
, (hvVncPasswordFile, VTypeString)
, (hvVncTls, VTypeBool)
, (hvVncX509, VTypeString)
, (hvVncX509Verify, VTypeBool)
, (hvVnetHdr, VTypeBool)
, (hvXenCmd, VTypeString)
, (hvXenCpuid, VTypeString)
]
-- * Migration statuses
hvMigrationActive :: String
hvMigrationActive = "active"
hvMigrationCancelled :: String
hvMigrationCancelled = "cancelled"
hvMigrationCompleted :: String
hvMigrationCompleted = "completed"
hvMigrationFailed :: String
hvMigrationFailed = "failed"
hvMigrationValidStatuses :: FrozenSet String
hvMigrationValidStatuses =
ConstantUtils.mkSet [hvMigrationActive,
hvMigrationCancelled,
hvMigrationCompleted,
hvMigrationFailed]
hvMigrationFailedStatuses :: FrozenSet String
hvMigrationFailedStatuses =
ConstantUtils.mkSet [hvMigrationFailed, hvMigrationCancelled]
-- | KVM-specific statuses
--
-- FIXME: this constant seems unnecessary
hvKvmMigrationValidStatuses :: FrozenSet String
hvKvmMigrationValidStatuses = hvMigrationValidStatuses
-- | Node info keys
hvNodeinfoKeyVersion :: String
hvNodeinfoKeyVersion = "hv_version"
-- * Hypervisor state
hvstCpuNode :: String
hvstCpuNode = "cpu_node"
hvstCpuTotal :: String
hvstCpuTotal = "cpu_total"
hvstMemoryHv :: String
hvstMemoryHv = "mem_hv"
hvstMemoryNode :: String
hvstMemoryNode = "mem_node"
hvstMemoryTotal :: String
hvstMemoryTotal = "mem_total"
hvstsParameters :: FrozenSet String
hvstsParameters =
ConstantUtils.mkSet [hvstCpuNode,
hvstCpuTotal,
hvstMemoryHv,
hvstMemoryNode,
hvstMemoryTotal]
hvstDefaults :: Map String Int
hvstDefaults =
Map.fromList
[(hvstCpuNode, 1),
(hvstCpuTotal, 1),
(hvstMemoryHv, 0),
(hvstMemoryTotal, 0),
(hvstMemoryNode, 0)]
hvstsParameterTypes :: Map String VType
hvstsParameterTypes =
Map.fromList [(hvstMemoryTotal, VTypeInt),
(hvstMemoryNode, VTypeInt),
(hvstMemoryHv, VTypeInt),
(hvstCpuTotal, VTypeInt),
(hvstCpuNode, VTypeInt)]
-- * Disk state
dsDiskOverhead :: String
dsDiskOverhead = "disk_overhead"
dsDiskReserved :: String
dsDiskReserved = "disk_reserved"
dsDiskTotal :: String
dsDiskTotal = "disk_total"
dsDefaults :: Map String Int
dsDefaults =
Map.fromList
[(dsDiskTotal, 0),
(dsDiskReserved, 0),
(dsDiskOverhead, 0)]
dssParameterTypes :: Map String VType
dssParameterTypes =
Map.fromList [(dsDiskTotal, VTypeInt),
(dsDiskReserved, VTypeInt),
(dsDiskOverhead, VTypeInt)]
dssParameters :: FrozenSet String
dssParameters =
ConstantUtils.mkSet [dsDiskTotal, dsDiskReserved, dsDiskOverhead]
dsValidTypes :: FrozenSet String
dsValidTypes = ConstantUtils.mkSet [Types.diskTemplateToRaw DTPlain]
-- Backend parameter names
beAlwaysFailover :: String
beAlwaysFailover = "always_failover"
beAutoBalance :: String
beAutoBalance = "auto_balance"
beMaxmem :: String
beMaxmem = "maxmem"
-- | Deprecated and replaced by max and min mem
beMemory :: String
beMemory = "memory"
beMinmem :: String
beMinmem = "minmem"
beSpindleUse :: String
beSpindleUse = "spindle_use"
beVcpus :: String
beVcpus = "vcpus"
besParameterTypes :: Map String VType
besParameterTypes =
Map.fromList [(beAlwaysFailover, VTypeBool),
(beAutoBalance, VTypeBool),
(beMaxmem, VTypeSize),
(beMinmem, VTypeSize),
(beSpindleUse, VTypeInt),
(beVcpus, VTypeInt)]
besParameterTitles :: Map String String
besParameterTitles =
Map.fromList [(beAutoBalance, "Auto_balance"),
(beMinmem, "ConfigMinMem"),
(beVcpus, "ConfigVCPUs"),
(beMaxmem, "ConfigMaxMem")]
besParameterCompat :: Map String VType
besParameterCompat = Map.insert beMemory VTypeSize besParameterTypes
besParameters :: FrozenSet String
besParameters =
ConstantUtils.mkSet [beAlwaysFailover,
beAutoBalance,
beMaxmem,
beMinmem,
beSpindleUse,
beVcpus]
-- | Instance specs
--
-- FIXME: these should be associated with 'Ganeti.HTools.Types.ISpec'
ispecMemSize :: String
ispecMemSize = ConstantUtils.ispecMemSize
ispecCpuCount :: String
ispecCpuCount = ConstantUtils.ispecCpuCount
ispecDiskCount :: String
ispecDiskCount = ConstantUtils.ispecDiskCount
ispecDiskSize :: String
ispecDiskSize = ConstantUtils.ispecDiskSize
ispecNicCount :: String
ispecNicCount = ConstantUtils.ispecNicCount
ispecSpindleUse :: String
ispecSpindleUse = ConstantUtils.ispecSpindleUse
ispecsParameterTypes :: Map String VType
ispecsParameterTypes =
Map.fromList
[(ConstantUtils.ispecDiskSize, VTypeInt),
(ConstantUtils.ispecCpuCount, VTypeInt),
(ConstantUtils.ispecSpindleUse, VTypeInt),
(ConstantUtils.ispecMemSize, VTypeInt),
(ConstantUtils.ispecNicCount, VTypeInt),
(ConstantUtils.ispecDiskCount, VTypeInt)]
ispecsParameters :: FrozenSet String
ispecsParameters =
ConstantUtils.mkSet [ConstantUtils.ispecCpuCount,
ConstantUtils.ispecDiskCount,
ConstantUtils.ispecDiskSize,
ConstantUtils.ispecMemSize,
ConstantUtils.ispecNicCount,
ConstantUtils.ispecSpindleUse]
ispecsMinmax :: String
ispecsMinmax = ConstantUtils.ispecsMinmax
ispecsMax :: String
ispecsMax = "max"
ispecsMin :: String
ispecsMin = "min"
ispecsStd :: String
ispecsStd = ConstantUtils.ispecsStd
ipolicyDts :: String
ipolicyDts = ConstantUtils.ipolicyDts
ipolicyVcpuRatio :: String
ipolicyVcpuRatio = ConstantUtils.ipolicyVcpuRatio
ipolicySpindleRatio :: String
ipolicySpindleRatio = ConstantUtils.ipolicySpindleRatio
ispecsMinmaxKeys :: FrozenSet String
ispecsMinmaxKeys = ConstantUtils.mkSet [ispecsMax, ispecsMin]
ipolicyParameters :: FrozenSet String
ipolicyParameters =
ConstantUtils.mkSet [ConstantUtils.ipolicyVcpuRatio,
ConstantUtils.ipolicySpindleRatio]
ipolicyAllKeys :: FrozenSet String
ipolicyAllKeys =
ConstantUtils.union ipolicyParameters $
ConstantUtils.mkSet [ConstantUtils.ipolicyDts,
ConstantUtils.ispecsMinmax,
ispecsStd]
-- | Node parameter names
ndExclusiveStorage :: String
ndExclusiveStorage = "exclusive_storage"
ndOobProgram :: String
ndOobProgram = "oob_program"
ndSpindleCount :: String
ndSpindleCount = "spindle_count"
ndOvs :: String
ndOvs = "ovs"
ndOvsLink :: String
ndOvsLink = "ovs_link"
ndOvsName :: String
ndOvsName = "ovs_name"
ndSshPort :: String
ndSshPort = "ssh_port"
ndCpuSpeed :: String
ndCpuSpeed = "cpu_speed"
ndsParameterTypes :: Map String VType
ndsParameterTypes =
Map.fromList
[(ndExclusiveStorage, VTypeBool),
(ndOobProgram, VTypeString),
(ndOvs, VTypeBool),
(ndOvsLink, VTypeMaybeString),
(ndOvsName, VTypeMaybeString),
(ndSpindleCount, VTypeInt),
(ndSshPort, VTypeInt),
(ndCpuSpeed, VTypeFloat)]
ndsParameters :: FrozenSet String
ndsParameters = ConstantUtils.mkSet (Map.keys ndsParameterTypes)
ndsParameterTitles :: Map String String
ndsParameterTitles =
Map.fromList
[(ndExclusiveStorage, "ExclusiveStorage"),
(ndOobProgram, "OutOfBandProgram"),
(ndOvs, "OpenvSwitch"),
(ndOvsLink, "OpenvSwitchLink"),
(ndOvsName, "OpenvSwitchName"),
(ndSpindleCount, "SpindleCount")]
-- * Logical Disks parameters
ldpAccess :: String
ldpAccess = "access"
ldpBarriers :: String
ldpBarriers = "disabled-barriers"
ldpDefaultMetavg :: String
ldpDefaultMetavg = "default-metavg"
ldpDelayTarget :: String
ldpDelayTarget = "c-delay-target"
ldpDiskCustom :: String
ldpDiskCustom = "disk-custom"
ldpDynamicResync :: String
ldpDynamicResync = "dynamic-resync"
ldpFillTarget :: String
ldpFillTarget = "c-fill-target"
ldpMaxRate :: String
ldpMaxRate = "c-max-rate"
ldpMinRate :: String
ldpMinRate = "c-min-rate"
ldpNetCustom :: String
ldpNetCustom = "net-custom"
ldpNoMetaFlush :: String
ldpNoMetaFlush = "disable-meta-flush"
ldpPlanAhead :: String
ldpPlanAhead = "c-plan-ahead"
ldpPool :: String
ldpPool = "pool"
ldpProtocol :: String
ldpProtocol = "protocol"
ldpResyncRate :: String
ldpResyncRate = "resync-rate"
ldpStripes :: String
ldpStripes = "stripes"
diskLdTypes :: Map String VType
diskLdTypes =
Map.fromList
[(ldpAccess, VTypeString),
(ldpResyncRate, VTypeInt),
(ldpStripes, VTypeInt),
(ldpBarriers, VTypeString),
(ldpNoMetaFlush, VTypeBool),
(ldpDefaultMetavg, VTypeString),
(ldpDiskCustom, VTypeString),
(ldpNetCustom, VTypeString),
(ldpProtocol, VTypeString),
(ldpDynamicResync, VTypeBool),
(ldpPlanAhead, VTypeInt),
(ldpFillTarget, VTypeInt),
(ldpDelayTarget, VTypeInt),
(ldpMaxRate, VTypeInt),
(ldpMinRate, VTypeInt),
(ldpPool, VTypeString)]
diskLdParameters :: FrozenSet String
diskLdParameters = ConstantUtils.mkSet (Map.keys diskLdTypes)
-- * Disk template parameters
--
-- Disk template parameters can be set/changed by the user via
-- gnt-cluster and gnt-group)
drbdResyncRate :: String
drbdResyncRate = "resync-rate"
drbdDataStripes :: String
drbdDataStripes = "data-stripes"
drbdMetaStripes :: String
drbdMetaStripes = "meta-stripes"
drbdDiskBarriers :: String
drbdDiskBarriers = "disk-barriers"
drbdMetaBarriers :: String
drbdMetaBarriers = "meta-barriers"
drbdDefaultMetavg :: String
drbdDefaultMetavg = "metavg"
drbdDiskCustom :: String
drbdDiskCustom = "disk-custom"
drbdNetCustom :: String
drbdNetCustom = "net-custom"
drbdProtocol :: String
drbdProtocol = "protocol"
drbdDynamicResync :: String
drbdDynamicResync = "dynamic-resync"
drbdPlanAhead :: String
drbdPlanAhead = "c-plan-ahead"
drbdFillTarget :: String
drbdFillTarget = "c-fill-target"
drbdDelayTarget :: String
drbdDelayTarget = "c-delay-target"
drbdMaxRate :: String
drbdMaxRate = "c-max-rate"
drbdMinRate :: String
drbdMinRate = "c-min-rate"
lvStripes :: String
lvStripes = "stripes"
rbdAccess :: String
rbdAccess = "access"
rbdPool :: String
rbdPool = "pool"
diskDtTypes :: Map String VType
diskDtTypes =
Map.fromList [(drbdResyncRate, VTypeInt),
(drbdDataStripes, VTypeInt),
(drbdMetaStripes, VTypeInt),
(drbdDiskBarriers, VTypeString),
(drbdMetaBarriers, VTypeBool),
(drbdDefaultMetavg, VTypeString),
(drbdDiskCustom, VTypeString),
(drbdNetCustom, VTypeString),
(drbdProtocol, VTypeString),
(drbdDynamicResync, VTypeBool),
(drbdPlanAhead, VTypeInt),
(drbdFillTarget, VTypeInt),
(drbdDelayTarget, VTypeInt),
(drbdMaxRate, VTypeInt),
(drbdMinRate, VTypeInt),
(lvStripes, VTypeInt),
(rbdAccess, VTypeString),
(rbdPool, VTypeString),
(glusterHost, VTypeString),
(glusterVolume, VTypeString),
(glusterPort, VTypeInt)
]
diskDtParameters :: FrozenSet String
diskDtParameters = ConstantUtils.mkSet (Map.keys diskDtTypes)
-- * Dynamic disk parameters
ddpLocalIp :: String
ddpLocalIp = "local-ip"
ddpRemoteIp :: String
ddpRemoteIp = "remote-ip"
ddpPort :: String
ddpPort = "port"
ddpLocalMinor :: String
ddpLocalMinor = "local-minor"
ddpRemoteMinor :: String
ddpRemoteMinor = "remote-minor"
-- * OOB supported commands
oobPowerOn :: String
oobPowerOn = Types.oobCommandToRaw OobPowerOn
oobPowerOff :: String
oobPowerOff = Types.oobCommandToRaw OobPowerOff
oobPowerCycle :: String
oobPowerCycle = Types.oobCommandToRaw OobPowerCycle
oobPowerStatus :: String
oobPowerStatus = Types.oobCommandToRaw OobPowerStatus
oobHealth :: String
oobHealth = Types.oobCommandToRaw OobHealth
oobCommands :: FrozenSet String
oobCommands = ConstantUtils.mkSet $ map Types.oobCommandToRaw [minBound..]
oobPowerStatusPowered :: String
oobPowerStatusPowered = "powered"
-- | 60 seconds
oobTimeout :: Int
oobTimeout = 60
-- | 2 seconds
oobPowerDelay :: Double
oobPowerDelay = 2.0
oobStatusCritical :: String
oobStatusCritical = Types.oobStatusToRaw OobStatusCritical
oobStatusOk :: String
oobStatusOk = Types.oobStatusToRaw OobStatusOk
oobStatusUnknown :: String
oobStatusUnknown = Types.oobStatusToRaw OobStatusUnknown
oobStatusWarning :: String
oobStatusWarning = Types.oobStatusToRaw OobStatusWarning
oobStatuses :: FrozenSet String
oobStatuses = ConstantUtils.mkSet $ map Types.oobStatusToRaw [minBound..]
-- | Instance Parameters Profile
ppDefault :: String
ppDefault = "default"
-- * nic* constants are used inside the ganeti config
nicLink :: String
nicLink = "link"
nicMode :: String
nicMode = "mode"
nicVlan :: String
nicVlan = "vlan"
nicsParameterTypes :: Map String VType
nicsParameterTypes =
Map.fromList [(nicMode, vtypeString),
(nicLink, vtypeString),
(nicVlan, vtypeString)]
nicsParameters :: FrozenSet String
nicsParameters = ConstantUtils.mkSet (Map.keys nicsParameterTypes)
nicModeBridged :: String
nicModeBridged = Types.nICModeToRaw NMBridged
nicModeRouted :: String
nicModeRouted = Types.nICModeToRaw NMRouted
nicModeOvs :: String
nicModeOvs = Types.nICModeToRaw NMOvs
nicIpPool :: String
nicIpPool = Types.nICModeToRaw NMPool
nicValidModes :: FrozenSet String
nicValidModes = ConstantUtils.mkSet $ map Types.nICModeToRaw [minBound..]
releaseAction :: String
releaseAction = "release"
reserveAction :: String
reserveAction = "reserve"
-- * idisk* constants are used in opcodes, to create/change disks
idiskAdopt :: String
idiskAdopt = "adopt"
idiskMetavg :: String
idiskMetavg = "metavg"
idiskMode :: String
idiskMode = "mode"
idiskName :: String
idiskName = "name"
idiskSize :: String
idiskSize = "size"
idiskSpindles :: String
idiskSpindles = "spindles"
idiskVg :: String
idiskVg = "vg"
idiskProvider :: String
idiskProvider = "provider"
idiskAccess :: String
idiskAccess = "access"
idiskParamsTypes :: Map String VType
idiskParamsTypes =
Map.fromList [(idiskSize, VTypeSize),
(idiskSpindles, VTypeInt),
(idiskMode, VTypeString),
(idiskAdopt, VTypeString),
(idiskVg, VTypeString),
(idiskMetavg, VTypeString),
(idiskProvider, VTypeString),
(idiskAccess, VTypeString),
(idiskName, VTypeMaybeString)]
idiskParams :: FrozenSet String
idiskParams = ConstantUtils.mkSet (Map.keys idiskParamsTypes)
modifiableIdiskParamsTypes :: Map String VType
modifiableIdiskParamsTypes =
Map.fromList [(idiskMode, VTypeString),
(idiskName, VTypeString)]
modifiableIdiskParams :: FrozenSet String
modifiableIdiskParams =
ConstantUtils.mkSet (Map.keys modifiableIdiskParamsTypes)
-- * inic* constants are used in opcodes, to create/change nics
inicBridge :: String
inicBridge = "bridge"
inicIp :: String
inicIp = "ip"
inicLink :: String
inicLink = "link"
inicMac :: String
inicMac = "mac"
inicMode :: String
inicMode = "mode"
inicName :: String
inicName = "name"
inicNetwork :: String
inicNetwork = "network"
inicVlan :: String
inicVlan = "vlan"
inicParamsTypes :: Map String VType
inicParamsTypes =
Map.fromList [(inicBridge, VTypeMaybeString),
(inicIp, VTypeMaybeString),
(inicLink, VTypeString),
(inicMac, VTypeString),
(inicMode, VTypeString),
(inicName, VTypeMaybeString),
(inicNetwork, VTypeMaybeString),
(inicVlan, VTypeMaybeString)]
inicParams :: FrozenSet String
inicParams = ConstantUtils.mkSet (Map.keys inicParamsTypes)
-- * Hypervisor constants
htXenPvm :: String
htXenPvm = Types.hypervisorToRaw XenPvm
htFake :: String
htFake = Types.hypervisorToRaw Fake
htXenHvm :: String
htXenHvm = Types.hypervisorToRaw XenHvm
htKvm :: String
htKvm = Types.hypervisorToRaw Kvm
htChroot :: String
htChroot = Types.hypervisorToRaw Chroot
htLxc :: String
htLxc = Types.hypervisorToRaw Lxc
hyperTypes :: FrozenSet String
hyperTypes = ConstantUtils.mkSet $ map Types.hypervisorToRaw [minBound..]
htsReqPort :: FrozenSet String
htsReqPort = ConstantUtils.mkSet [htXenHvm, htKvm]
vncBasePort :: Int
vncBasePort = 5900
vncDefaultBindAddress :: String
vncDefaultBindAddress = ip4AddressAny
-- * NIC types
htNicE1000 :: String
htNicE1000 = "e1000"
htNicI82551 :: String
htNicI82551 = "i82551"
htNicI8259er :: String
htNicI8259er = "i82559er"
htNicI85557b :: String
htNicI85557b = "i82557b"
htNicNe2kIsa :: String
htNicNe2kIsa = "ne2k_isa"
htNicNe2kPci :: String
htNicNe2kPci = "ne2k_pci"
htNicParavirtual :: String
htNicParavirtual = "paravirtual"
htNicPcnet :: String
htNicPcnet = "pcnet"
htNicRtl8139 :: String
htNicRtl8139 = "rtl8139"
htHvmValidNicTypes :: FrozenSet String
htHvmValidNicTypes =
ConstantUtils.mkSet [htNicE1000,
htNicNe2kIsa,
htNicNe2kPci,
htNicParavirtual,
htNicRtl8139]
htKvmValidNicTypes :: FrozenSet String
htKvmValidNicTypes =
ConstantUtils.mkSet [htNicE1000,
htNicI82551,
htNicI8259er,
htNicI85557b,
htNicNe2kIsa,
htNicNe2kPci,
htNicParavirtual,
htNicPcnet,
htNicRtl8139]
-- * Vif types
-- | Default vif type in xen-hvm
htHvmVifIoemu :: String
htHvmVifIoemu = "ioemu"
htHvmVifVif :: String
htHvmVifVif = "vif"
htHvmValidVifTypes :: FrozenSet String
htHvmValidVifTypes = ConstantUtils.mkSet [htHvmVifIoemu, htHvmVifVif]
-- * Disk types
htDiskIde :: String
htDiskIde = "ide"
htDiskIoemu :: String
htDiskIoemu = "ioemu"
htDiskMtd :: String
htDiskMtd = "mtd"
htDiskParavirtual :: String
htDiskParavirtual = "paravirtual"
htDiskPflash :: String
htDiskPflash = "pflash"
htDiskScsi :: String
htDiskScsi = "scsi"
htDiskSd :: String
htDiskSd = "sd"
htHvmValidDiskTypes :: FrozenSet String
htHvmValidDiskTypes = ConstantUtils.mkSet [htDiskIoemu, htDiskParavirtual]
htKvmValidDiskTypes :: FrozenSet String
htKvmValidDiskTypes =
ConstantUtils.mkSet [htDiskIde,
htDiskMtd,
htDiskParavirtual,
htDiskPflash,
htDiskScsi,
htDiskSd]
htCacheDefault :: String
htCacheDefault = "default"
htCacheNone :: String
htCacheNone = "none"
htCacheWback :: String
htCacheWback = "writeback"
htCacheWthrough :: String
htCacheWthrough = "writethrough"
htValidCacheTypes :: FrozenSet String
htValidCacheTypes =
ConstantUtils.mkSet [htCacheDefault,
htCacheNone,
htCacheWback,
htCacheWthrough]
htKvmAioThreads :: String
htKvmAioThreads = "threads"
htKvmAioNative :: String
htKvmAioNative = "native"
htKvmValidAioTypes :: FrozenSet String
htKvmValidAioTypes =
ConstantUtils.mkSet [htKvmAioThreads,
htKvmAioNative]
-- * Mouse types
htMouseMouse :: String
htMouseMouse = "mouse"
htMouseTablet :: String
htMouseTablet = "tablet"
htKvmValidMouseTypes :: FrozenSet String
htKvmValidMouseTypes = ConstantUtils.mkSet [htMouseMouse, htMouseTablet]
-- * Boot order
htBoCdrom :: String
htBoCdrom = "cdrom"
htBoDisk :: String
htBoDisk = "disk"
htBoFloppy :: String
htBoFloppy = "floppy"
htBoNetwork :: String
htBoNetwork = "network"
htKvmValidBoTypes :: FrozenSet String
htKvmValidBoTypes =
ConstantUtils.mkSet [htBoCdrom, htBoDisk, htBoFloppy, htBoNetwork]
-- * SPICE lossless image compression options
htKvmSpiceLosslessImgComprAutoGlz :: String
htKvmSpiceLosslessImgComprAutoGlz = "auto_glz"
htKvmSpiceLosslessImgComprAutoLz :: String
htKvmSpiceLosslessImgComprAutoLz = "auto_lz"
htKvmSpiceLosslessImgComprGlz :: String
htKvmSpiceLosslessImgComprGlz = "glz"
htKvmSpiceLosslessImgComprLz :: String
htKvmSpiceLosslessImgComprLz = "lz"
htKvmSpiceLosslessImgComprOff :: String
htKvmSpiceLosslessImgComprOff = "off"
htKvmSpiceLosslessImgComprQuic :: String
htKvmSpiceLosslessImgComprQuic = "quic"
htKvmSpiceValidLosslessImgComprOptions :: FrozenSet String
htKvmSpiceValidLosslessImgComprOptions =
ConstantUtils.mkSet [htKvmSpiceLosslessImgComprAutoGlz,
htKvmSpiceLosslessImgComprAutoLz,
htKvmSpiceLosslessImgComprGlz,
htKvmSpiceLosslessImgComprLz,
htKvmSpiceLosslessImgComprOff,
htKvmSpiceLosslessImgComprQuic]
htKvmSpiceLossyImgComprAlways :: String
htKvmSpiceLossyImgComprAlways = "always"
htKvmSpiceLossyImgComprAuto :: String
htKvmSpiceLossyImgComprAuto = "auto"
htKvmSpiceLossyImgComprNever :: String
htKvmSpiceLossyImgComprNever = "never"
htKvmSpiceValidLossyImgComprOptions :: FrozenSet String
htKvmSpiceValidLossyImgComprOptions =
ConstantUtils.mkSet [htKvmSpiceLossyImgComprAlways,
htKvmSpiceLossyImgComprAuto,
htKvmSpiceLossyImgComprNever]
-- * SPICE video stream detection
htKvmSpiceVideoStreamDetectionAll :: String
htKvmSpiceVideoStreamDetectionAll = "all"
htKvmSpiceVideoStreamDetectionFilter :: String
htKvmSpiceVideoStreamDetectionFilter = "filter"
htKvmSpiceVideoStreamDetectionOff :: String
htKvmSpiceVideoStreamDetectionOff = "off"
htKvmSpiceValidVideoStreamDetectionOptions :: FrozenSet String
htKvmSpiceValidVideoStreamDetectionOptions =
ConstantUtils.mkSet [htKvmSpiceVideoStreamDetectionAll,
htKvmSpiceVideoStreamDetectionFilter,
htKvmSpiceVideoStreamDetectionOff]
-- * Security models
htSmNone :: String
htSmNone = "none"
htSmPool :: String
htSmPool = "pool"
htSmUser :: String
htSmUser = "user"
htKvmValidSmTypes :: FrozenSet String
htKvmValidSmTypes = ConstantUtils.mkSet [htSmNone, htSmPool, htSmUser]
-- * Kvm flag values
htKvmDisabled :: String
htKvmDisabled = "disabled"
htKvmEnabled :: String
htKvmEnabled = "enabled"
htKvmFlagValues :: FrozenSet String
htKvmFlagValues = ConstantUtils.mkSet [htKvmDisabled, htKvmEnabled]
-- * Migration type
htMigrationLive :: String
htMigrationLive = Types.migrationModeToRaw MigrationLive
htMigrationNonlive :: String
htMigrationNonlive = Types.migrationModeToRaw MigrationNonLive
htMigrationModes :: FrozenSet String
htMigrationModes =
ConstantUtils.mkSet $ map Types.migrationModeToRaw [minBound..]
-- * Cluster verify steps
verifyNplusoneMem :: String
verifyNplusoneMem = Types.verifyOptionalChecksToRaw VerifyNPlusOneMem
verifyOptionalChecks :: FrozenSet String
verifyOptionalChecks =
ConstantUtils.mkSet $ map Types.verifyOptionalChecksToRaw [minBound..]
-- * Cluster Verify error classes
cvTcluster :: String
cvTcluster = "cluster"
cvTgroup :: String
cvTgroup = "group"
cvTnode :: String
cvTnode = "node"
cvTinstance :: String
cvTinstance = "instance"
-- * Cluster Verify error levels
cvWarning :: String
cvWarning = "WARNING"
cvError :: String
cvError = "ERROR"
-- * Cluster Verify error codes and documentation
cvEclustercert :: (String, String, String)
cvEclustercert =
("cluster",
Types.cVErrorCodeToRaw CvECLUSTERCERT,
"Cluster certificate files verification failure")
cvEclusterclientcert :: (String, String, String)
cvEclusterclientcert =
("cluster",
Types.cVErrorCodeToRaw CvECLUSTERCLIENTCERT,
"Cluster client certificate files verification failure")
cvEclustercfg :: (String, String, String)
cvEclustercfg =
("cluster",
Types.cVErrorCodeToRaw CvECLUSTERCFG,
"Cluster configuration verification failure")
cvEclusterdanglinginst :: (String, String, String)
cvEclusterdanglinginst =
("node",
Types.cVErrorCodeToRaw CvECLUSTERDANGLINGINST,
"Some instances have a non-existing primary node")
cvEclusterdanglingnodes :: (String, String, String)
cvEclusterdanglingnodes =
("node",
Types.cVErrorCodeToRaw CvECLUSTERDANGLINGNODES,
"Some nodes belong to non-existing groups")
cvEclusterfilecheck :: (String, String, String)
cvEclusterfilecheck =
("cluster",
Types.cVErrorCodeToRaw CvECLUSTERFILECHECK,
"Cluster configuration verification failure")
cvEgroupdifferentpvsize :: (String, String, String)
cvEgroupdifferentpvsize =
("group",
Types.cVErrorCodeToRaw CvEGROUPDIFFERENTPVSIZE,
"PVs in the group have different sizes")
cvEinstancebadnode :: (String, String, String)
cvEinstancebadnode =
("instance",
Types.cVErrorCodeToRaw CvEINSTANCEBADNODE,
"Instance marked as running lives on an offline node")
cvEinstancedown :: (String, String, String)
cvEinstancedown =
("instance",
Types.cVErrorCodeToRaw CvEINSTANCEDOWN,
"Instance not running on its primary node")
cvEinstancefaultydisk :: (String, String, String)
cvEinstancefaultydisk =
("instance",
Types.cVErrorCodeToRaw CvEINSTANCEFAULTYDISK,
"Impossible to retrieve status for a disk")
cvEinstancelayout :: (String, String, String)
cvEinstancelayout =
("instance",
Types.cVErrorCodeToRaw CvEINSTANCELAYOUT,
"Instance has multiple secondary nodes")
cvEinstancemissingcfgparameter :: (String, String, String)
cvEinstancemissingcfgparameter =
("instance",
Types.cVErrorCodeToRaw CvEINSTANCEMISSINGCFGPARAMETER,
"A configuration parameter for an instance is missing")
cvEinstancemissingdisk :: (String, String, String)
cvEinstancemissingdisk =
("instance",
Types.cVErrorCodeToRaw CvEINSTANCEMISSINGDISK,
"Missing volume on an instance")
cvEinstancepolicy :: (String, String, String)
cvEinstancepolicy =
("instance",
Types.cVErrorCodeToRaw CvEINSTANCEPOLICY,
"Instance does not meet policy")
cvEinstancesplitgroups :: (String, String, String)
cvEinstancesplitgroups =
("instance",
Types.cVErrorCodeToRaw CvEINSTANCESPLITGROUPS,
"Instance with primary and secondary nodes in different groups")
cvEinstanceunsuitablenode :: (String, String, String)
cvEinstanceunsuitablenode =
("instance",
Types.cVErrorCodeToRaw CvEINSTANCEUNSUITABLENODE,
"Instance running on nodes that are not suitable for it")
cvEinstancewrongnode :: (String, String, String)
cvEinstancewrongnode =
("instance",
Types.cVErrorCodeToRaw CvEINSTANCEWRONGNODE,
"Instance running on the wrong node")
cvEnodedrbd :: (String, String, String)
cvEnodedrbd =
("node",
Types.cVErrorCodeToRaw CvENODEDRBD,
"Error parsing the DRBD status file")
cvEnodedrbdhelper :: (String, String, String)
cvEnodedrbdhelper =
("node",
Types.cVErrorCodeToRaw CvENODEDRBDHELPER,
"Error caused by the DRBD helper")
cvEnodedrbdversion :: (String, String, String)
cvEnodedrbdversion =
("node",
Types.cVErrorCodeToRaw CvENODEDRBDVERSION,
"DRBD version mismatch within a node group")
cvEnodefilecheck :: (String, String, String)
cvEnodefilecheck =
("node",
Types.cVErrorCodeToRaw CvENODEFILECHECK,
"Error retrieving the checksum of the node files")
cvEnodefilestoragepaths :: (String, String, String)
cvEnodefilestoragepaths =
("node",
Types.cVErrorCodeToRaw CvENODEFILESTORAGEPATHS,
"Detected bad file storage paths")
cvEnodefilestoragepathunusable :: (String, String, String)
cvEnodefilestoragepathunusable =
("node",
Types.cVErrorCodeToRaw CvENODEFILESTORAGEPATHUNUSABLE,
"File storage path unusable")
cvEnodehooks :: (String, String, String)
cvEnodehooks =
("node",
Types.cVErrorCodeToRaw CvENODEHOOKS,
"Communication failure in hooks execution")
cvEnodehv :: (String, String, String)
cvEnodehv =
("node",
Types.cVErrorCodeToRaw CvENODEHV,
"Hypervisor parameters verification failure")
cvEnodelvm :: (String, String, String)
cvEnodelvm =
("node",
Types.cVErrorCodeToRaw CvENODELVM,
"LVM-related node error")
cvEnoden1 :: (String, String, String)
cvEnoden1 =
("node",
Types.cVErrorCodeToRaw CvENODEN1,
"Not enough memory to accommodate instance failovers")
cvEnodenet :: (String, String, String)
cvEnodenet =
("node",
Types.cVErrorCodeToRaw CvENODENET,
"Network-related node error")
cvEnodeoobpath :: (String, String, String)
cvEnodeoobpath =
("node",
Types.cVErrorCodeToRaw CvENODEOOBPATH,
"Invalid Out Of Band path")
cvEnodeorphaninstance :: (String, String, String)
cvEnodeorphaninstance =
("node",
Types.cVErrorCodeToRaw CvENODEORPHANINSTANCE,
"Unknown intance running on a node")
cvEnodeorphanlv :: (String, String, String)
cvEnodeorphanlv =
("node",
Types.cVErrorCodeToRaw CvENODEORPHANLV,
"Unknown LVM logical volume")
cvEnodeos :: (String, String, String)
cvEnodeos =
("node",
Types.cVErrorCodeToRaw CvENODEOS,
"OS-related node error")
cvEnoderpc :: (String, String, String)
cvEnoderpc =
("node",
Types.cVErrorCodeToRaw CvENODERPC,
"Error during connection to the primary node of an instance")
cvEnodesetup :: (String, String, String)
cvEnodesetup =
("node",
Types.cVErrorCodeToRaw CvENODESETUP,
"Node setup error")
cvEnodesharedfilestoragepathunusable :: (String, String, String)
cvEnodesharedfilestoragepathunusable =
("node",
Types.cVErrorCodeToRaw CvENODESHAREDFILESTORAGEPATHUNUSABLE,
"Shared file storage path unusable")
cvEnodeglusterstoragepathunusable :: (String, String, String)
cvEnodeglusterstoragepathunusable =
("node",
Types.cVErrorCodeToRaw CvENODEGLUSTERSTORAGEPATHUNUSABLE,
"Gluster storage path unusable")
cvEnodessh :: (String, String, String)
cvEnodessh =
("node",
Types.cVErrorCodeToRaw CvENODESSH,
"SSH-related node error")
cvEnodetime :: (String, String, String)
cvEnodetime =
("node",
Types.cVErrorCodeToRaw CvENODETIME,
"Node returned invalid time")
cvEnodeuserscripts :: (String, String, String)
cvEnodeuserscripts =
("node",
Types.cVErrorCodeToRaw CvENODEUSERSCRIPTS,
"User scripts not present or not executable")
cvEnodeversion :: (String, String, String)
cvEnodeversion =
("node",
Types.cVErrorCodeToRaw CvENODEVERSION,
"Protocol version mismatch or Ganeti version mismatch")
cvAllEcodes :: FrozenSet (String, String, String)
cvAllEcodes =
ConstantUtils.mkSet
[cvEclustercert,
cvEclustercfg,
cvEclusterdanglinginst,
cvEclusterdanglingnodes,
cvEclusterfilecheck,
cvEgroupdifferentpvsize,
cvEinstancebadnode,
cvEinstancedown,
cvEinstancefaultydisk,
cvEinstancelayout,
cvEinstancemissingcfgparameter,
cvEinstancemissingdisk,
cvEinstancepolicy,
cvEinstancesplitgroups,
cvEinstanceunsuitablenode,
cvEinstancewrongnode,
cvEnodedrbd,
cvEnodedrbdhelper,
cvEnodedrbdversion,
cvEnodefilecheck,
cvEnodefilestoragepaths,
cvEnodefilestoragepathunusable,
cvEnodehooks,
cvEnodehv,
cvEnodelvm,
cvEnoden1,
cvEnodenet,
cvEnodeoobpath,
cvEnodeorphaninstance,
cvEnodeorphanlv,
cvEnodeos,
cvEnoderpc,
cvEnodesetup,
cvEnodesharedfilestoragepathunusable,
cvEnodeglusterstoragepathunusable,
cvEnodessh,
cvEnodetime,
cvEnodeuserscripts,
cvEnodeversion]
cvAllEcodesStrings :: FrozenSet String
cvAllEcodesStrings =
ConstantUtils.mkSet $ map Types.cVErrorCodeToRaw [minBound..]
-- * Node verify constants
nvBridges :: String
nvBridges = "bridges"
nvClientCert :: String
nvClientCert = "client-cert"
nvDrbdhelper :: String
nvDrbdhelper = "drbd-helper"
nvDrbdversion :: String
nvDrbdversion = "drbd-version"
nvDrbdlist :: String
nvDrbdlist = "drbd-list"
nvExclusivepvs :: String
nvExclusivepvs = "exclusive-pvs"
nvFilelist :: String
nvFilelist = "filelist"
nvAcceptedStoragePaths :: String
nvAcceptedStoragePaths = "allowed-file-storage-paths"
nvFileStoragePath :: String
nvFileStoragePath = "file-storage-path"
nvSharedFileStoragePath :: String
nvSharedFileStoragePath = "shared-file-storage-path"
nvGlusterStoragePath :: String
nvGlusterStoragePath = "gluster-storage-path"
nvHvinfo :: String
nvHvinfo = "hvinfo"
nvHvparams :: String
nvHvparams = "hvparms"
nvHypervisor :: String
nvHypervisor = "hypervisor"
nvInstancelist :: String
nvInstancelist = "instancelist"
nvLvlist :: String
nvLvlist = "lvlist"
nvMasterip :: String
nvMasterip = "master-ip"
nvNodelist :: String
nvNodelist = "nodelist"
nvNodenettest :: String
nvNodenettest = "node-net-test"
nvNodesetup :: String
nvNodesetup = "nodesetup"
nvOobPaths :: String
nvOobPaths = "oob-paths"
nvOslist :: String
nvOslist = "oslist"
nvPvlist :: String
nvPvlist = "pvlist"
nvTime :: String
nvTime = "time"
nvUserscripts :: String
nvUserscripts = "user-scripts"
nvVersion :: String
nvVersion = "version"
nvVglist :: String
nvVglist = "vglist"
nvNonvmnodes :: String
nvNonvmnodes = "nonvmnodes"
nvSshSetup :: String
nvSshSetup = "ssh-setup"
-- * Instance status
inststAdmindown :: String
inststAdmindown = Types.instanceStatusToRaw StatusDown
inststAdminoffline :: String
inststAdminoffline = Types.instanceStatusToRaw StatusOffline
inststErrordown :: String
inststErrordown = Types.instanceStatusToRaw ErrorDown
inststErrorup :: String
inststErrorup = Types.instanceStatusToRaw ErrorUp
inststNodedown :: String
inststNodedown = Types.instanceStatusToRaw NodeDown
inststNodeoffline :: String
inststNodeoffline = Types.instanceStatusToRaw NodeOffline
inststRunning :: String
inststRunning = Types.instanceStatusToRaw Running
inststUserdown :: String
inststUserdown = Types.instanceStatusToRaw UserDown
inststWrongnode :: String
inststWrongnode = Types.instanceStatusToRaw WrongNode
inststAll :: FrozenSet String
inststAll = ConstantUtils.mkSet $ map Types.instanceStatusToRaw [minBound..]
-- * Admin states
adminstDown :: String
adminstDown = Types.adminStateToRaw AdminDown
adminstOffline :: String
adminstOffline = Types.adminStateToRaw AdminOffline
adminstUp :: String
adminstUp = Types.adminStateToRaw AdminUp
adminstAll :: FrozenSet String
adminstAll = ConstantUtils.mkSet $ map Types.adminStateToRaw [minBound..]
-- * Admin state sources
adminSource :: AdminStateSource
adminSource = AdminSource
userSource :: AdminStateSource
userSource = UserSource
adminStateSources :: FrozenSet AdminStateSource
adminStateSources = ConstantUtils.mkSet [minBound..]
-- * Node roles
nrDrained :: String
nrDrained = Types.nodeRoleToRaw NRDrained
nrMaster :: String
nrMaster = Types.nodeRoleToRaw NRMaster
nrMcandidate :: String
nrMcandidate = Types.nodeRoleToRaw NRCandidate
nrOffline :: String
nrOffline = Types.nodeRoleToRaw NROffline
nrRegular :: String
nrRegular = Types.nodeRoleToRaw NRRegular
nrAll :: FrozenSet String
nrAll = ConstantUtils.mkSet $ map Types.nodeRoleToRaw [minBound..]
-- * SSL certificate check constants (in days)
sslCertExpirationError :: Int
sslCertExpirationError = 7
sslCertExpirationWarn :: Int
sslCertExpirationWarn = 30
-- * Allocator framework constants
iallocatorVersion :: Int
iallocatorVersion = 2
iallocatorDirIn :: String
iallocatorDirIn = Types.iAllocatorTestDirToRaw IAllocatorDirIn
iallocatorDirOut :: String
iallocatorDirOut = Types.iAllocatorTestDirToRaw IAllocatorDirOut
validIallocatorDirections :: FrozenSet String
validIallocatorDirections =
ConstantUtils.mkSet $ map Types.iAllocatorTestDirToRaw [minBound..]
iallocatorModeAlloc :: String
iallocatorModeAlloc = Types.iAllocatorModeToRaw IAllocatorAlloc
iallocatorModeChgGroup :: String
iallocatorModeChgGroup = Types.iAllocatorModeToRaw IAllocatorChangeGroup
iallocatorModeMultiAlloc :: String
iallocatorModeMultiAlloc = Types.iAllocatorModeToRaw IAllocatorMultiAlloc
iallocatorModeNodeEvac :: String
iallocatorModeNodeEvac = Types.iAllocatorModeToRaw IAllocatorNodeEvac
iallocatorModeReloc :: String
iallocatorModeReloc = Types.iAllocatorModeToRaw IAllocatorReloc
validIallocatorModes :: FrozenSet String
validIallocatorModes =
ConstantUtils.mkSet $ map Types.iAllocatorModeToRaw [minBound..]
iallocatorSearchPath :: [String]
iallocatorSearchPath = AutoConf.iallocatorSearchPath
defaultIallocatorShortcut :: String
defaultIallocatorShortcut = "."
-- * Opportunistic allocator usage
-- | Time delay in seconds between repeated opportunistic instance creations.
-- Rather than failing with an informative error message if the opportunistic
-- creation cannot grab enough nodes, for some uses it is better to retry the
-- creation with an interval between attempts. This is a reasonable default.
defaultOpportunisticRetryInterval :: Int
defaultOpportunisticRetryInterval = 30
-- * Node evacuation
nodeEvacPri :: String
nodeEvacPri = Types.evacModeToRaw ChangePrimary
nodeEvacSec :: String
nodeEvacSec = Types.evacModeToRaw ChangeSecondary
nodeEvacAll :: String
nodeEvacAll = Types.evacModeToRaw ChangeAll
nodeEvacModes :: FrozenSet String
nodeEvacModes = ConstantUtils.mkSet $ map Types.evacModeToRaw [minBound..]
-- * Job queue
jobQueueVersion :: Int
jobQueueVersion = 1
jobQueueSizeHardLimit :: Int
jobQueueSizeHardLimit = 5000
jobQueueFilesPerms :: Int
jobQueueFilesPerms = 0o640
-- * Unchanged job return
jobNotchanged :: String
jobNotchanged = "nochange"
-- * Job status
jobStatusQueued :: String
jobStatusQueued = Types.jobStatusToRaw JOB_STATUS_QUEUED
jobStatusWaiting :: String
jobStatusWaiting = Types.jobStatusToRaw JOB_STATUS_WAITING
jobStatusCanceling :: String
jobStatusCanceling = Types.jobStatusToRaw JOB_STATUS_CANCELING
jobStatusRunning :: String
jobStatusRunning = Types.jobStatusToRaw JOB_STATUS_RUNNING
jobStatusCanceled :: String
jobStatusCanceled = Types.jobStatusToRaw JOB_STATUS_CANCELED
jobStatusSuccess :: String
jobStatusSuccess = Types.jobStatusToRaw JOB_STATUS_SUCCESS
jobStatusError :: String
jobStatusError = Types.jobStatusToRaw JOB_STATUS_ERROR
jobsPending :: FrozenSet String
jobsPending =
ConstantUtils.mkSet [jobStatusQueued, jobStatusWaiting, jobStatusCanceling]
jobsFinalized :: FrozenSet String
jobsFinalized =
ConstantUtils.mkSet $ map Types.finalizedJobStatusToRaw [minBound..]
jobStatusAll :: FrozenSet String
jobStatusAll = ConstantUtils.mkSet $ map Types.jobStatusToRaw [minBound..]
-- * OpCode status
-- ** Not yet finalized opcodes
opStatusCanceling :: String
opStatusCanceling = "canceling"
opStatusQueued :: String
opStatusQueued = "queued"
opStatusRunning :: String
opStatusRunning = "running"
opStatusWaiting :: String
opStatusWaiting = "waiting"
-- ** Finalized opcodes
opStatusCanceled :: String
opStatusCanceled = "canceled"
opStatusError :: String
opStatusError = "error"
opStatusSuccess :: String
opStatusSuccess = "success"
opsFinalized :: FrozenSet String
opsFinalized =
ConstantUtils.mkSet [opStatusCanceled, opStatusError, opStatusSuccess]
-- * OpCode priority
opPrioLowest :: Int
opPrioLowest = 19
opPrioHighest :: Int
opPrioHighest = -20
opPrioLow :: Int
opPrioLow = Types.opSubmitPriorityToRaw OpPrioLow
opPrioNormal :: Int
opPrioNormal = Types.opSubmitPriorityToRaw OpPrioNormal
opPrioHigh :: Int
opPrioHigh = Types.opSubmitPriorityToRaw OpPrioHigh
opPrioSubmitValid :: FrozenSet Int
opPrioSubmitValid = ConstantUtils.mkSet [opPrioLow, opPrioNormal, opPrioHigh]
opPrioDefault :: Int
opPrioDefault = opPrioNormal
-- * Lock recalculate mode
locksAppend :: String
locksAppend = "append"
locksReplace :: String
locksReplace = "replace"
-- * Lock timeout
--
-- The lock timeout (sum) before we transition into blocking acquire
-- (this can still be reset by priority change). Computed as max time
-- (10 hours) before we should actually go into blocking acquire,
-- given that we start from the default priority level.
lockAttemptsMaxwait :: Double
lockAttemptsMaxwait = 75.0
lockAttemptsMinwait :: Double
lockAttemptsMinwait = 5.0
lockAttemptsTimeout :: Int
lockAttemptsTimeout = (10 * 3600) `div` (opPrioDefault - opPrioHighest)
-- * Execution log types
elogMessage :: String
elogMessage = Types.eLogTypeToRaw ELogMessage
elogRemoteImport :: String
elogRemoteImport = Types.eLogTypeToRaw ELogRemoteImport
elogJqueueTest :: String
elogJqueueTest = Types.eLogTypeToRaw ELogJqueueTest
elogDelayTest :: String
elogDelayTest = Types.eLogTypeToRaw ELogDelayTest
-- * /etc/hosts modification
etcHostsAdd :: String
etcHostsAdd = "add"
etcHostsRemove :: String
etcHostsRemove = "remove"
-- * Job queue test
jqtMsgprefix :: String
jqtMsgprefix = "TESTMSG="
jqtExec :: String
jqtExec = "exec"
jqtExpandnames :: String
jqtExpandnames = "expandnames"
jqtLogmsg :: String
jqtLogmsg = "logmsg"
jqtStartmsg :: String
jqtStartmsg = "startmsg"
jqtAll :: FrozenSet String
jqtAll = ConstantUtils.mkSet [jqtExec, jqtExpandnames, jqtLogmsg, jqtStartmsg]
-- * Query resources
qrCluster :: String
qrCluster = "cluster"
qrExport :: String
qrExport = "export"
qrExtstorage :: String
qrExtstorage = "extstorage"
qrGroup :: String
qrGroup = "group"
qrInstance :: String
qrInstance = "instance"
qrJob :: String
qrJob = "job"
qrLock :: String
qrLock = "lock"
qrNetwork :: String
qrNetwork = "network"
qrFilter :: String
qrFilter = "filter"
qrNode :: String
qrNode = "node"
qrOs :: String
qrOs = "os"
-- | List of resources which can be queried using 'Ganeti.OpCodes.OpQuery'
qrViaOp :: FrozenSet String
qrViaOp =
ConstantUtils.mkSet [qrCluster,
qrOs,
qrExtstorage]
-- | List of resources which can be queried using Local UniX Interface
qrViaLuxi :: FrozenSet String
qrViaLuxi = ConstantUtils.mkSet [qrGroup,
qrExport,
qrInstance,
qrJob,
qrLock,
qrNetwork,
qrNode,
qrFilter]
-- | List of resources which can be queried using RAPI
qrViaRapi :: FrozenSet String
qrViaRapi = qrViaLuxi
-- | List of resources which can be queried via RAPI including PUT requests
qrViaRapiPut :: FrozenSet String
qrViaRapiPut = ConstantUtils.mkSet [qrLock, qrJob, qrFilter]
-- * Query field types
qftBool :: String
qftBool = "bool"
qftNumber :: String
qftNumber = "number"
qftNumberFloat :: String
qftNumberFloat = "float"
qftOther :: String
qftOther = "other"
qftText :: String
qftText = "text"
qftTimestamp :: String
qftTimestamp = "timestamp"
qftUnit :: String
qftUnit = "unit"
qftUnknown :: String
qftUnknown = "unknown"
qftAll :: FrozenSet String
qftAll =
ConstantUtils.mkSet [qftBool,
qftNumber,
qftNumberFloat,
qftOther,
qftText,
qftTimestamp,
qftUnit,
qftUnknown]
-- * Query result field status
--
-- Don't change or reuse values as they're used by clients.
--
-- FIXME: link with 'Ganeti.Query.Language.ResultStatus'
-- | No data (e.g. RPC error), can be used instead of 'rsOffline'
rsNodata :: Int
rsNodata = 2
rsNormal :: Int
rsNormal = 0
-- | Resource marked offline
rsOffline :: Int
rsOffline = 4
-- | Value unavailable/unsupported for item; if this field is
-- supported but we cannot get the data for the moment, 'rsNodata' or
-- 'rsOffline' should be used
rsUnavail :: Int
rsUnavail = 3
rsUnknown :: Int
rsUnknown = 1
rsAll :: FrozenSet Int
rsAll =
ConstantUtils.mkSet [rsNodata,
rsNormal,
rsOffline,
rsUnavail,
rsUnknown]
-- | Special field cases and their verbose/terse formatting
rssDescription :: Map Int (String, String)
rssDescription =
Map.fromList [(rsUnknown, ("(unknown)", "??")),
(rsNodata, ("(nodata)", "?")),
(rsOffline, ("(offline)", "*")),
(rsUnavail, ("(unavail)", "-"))]
-- * Max dynamic devices
maxDisks :: Int
maxDisks = Types.maxDisks
maxNics :: Int
maxNics = Types.maxNics
-- | SSCONF file prefix
ssconfFileprefix :: String
ssconfFileprefix = "ssconf_"
-- * SSCONF keys
ssClusterName :: String
ssClusterName = "cluster_name"
ssClusterTags :: String
ssClusterTags = "cluster_tags"
ssFileStorageDir :: String
ssFileStorageDir = "file_storage_dir"
ssSharedFileStorageDir :: String
ssSharedFileStorageDir = "shared_file_storage_dir"
ssGlusterStorageDir :: String
ssGlusterStorageDir = "gluster_storage_dir"
ssMasterCandidates :: String
ssMasterCandidates = "master_candidates"
ssMasterCandidatesIps :: String
ssMasterCandidatesIps = "master_candidates_ips"
ssMasterCandidatesCerts :: String
ssMasterCandidatesCerts = "master_candidates_certs"
ssMasterIp :: String
ssMasterIp = "master_ip"
ssMasterNetdev :: String
ssMasterNetdev = "master_netdev"
ssMasterNetmask :: String
ssMasterNetmask = "master_netmask"
ssMasterNode :: String
ssMasterNode = "master_node"
ssNodeList :: String
ssNodeList = "node_list"
ssNodePrimaryIps :: String
ssNodePrimaryIps = "node_primary_ips"
ssNodeSecondaryIps :: String
ssNodeSecondaryIps = "node_secondary_ips"
ssNodeVmCapable :: String
ssNodeVmCapable = "node_vm_capable"
ssOfflineNodes :: String
ssOfflineNodes = "offline_nodes"
ssOnlineNodes :: String
ssOnlineNodes = "online_nodes"
ssPrimaryIpFamily :: String
ssPrimaryIpFamily = "primary_ip_family"
ssInstanceList :: String
ssInstanceList = "instance_list"
ssReleaseVersion :: String
ssReleaseVersion = "release_version"
ssHypervisorList :: String
ssHypervisorList = "hypervisor_list"
ssMaintainNodeHealth :: String
ssMaintainNodeHealth = "maintain_node_health"
ssUidPool :: String
ssUidPool = "uid_pool"
ssNodegroups :: String
ssNodegroups = "nodegroups"
ssNetworks :: String
ssNetworks = "networks"
-- | This is not a complete SSCONF key, but the prefix for the
-- hypervisor keys
ssHvparamsPref :: String
ssHvparamsPref = "hvparams_"
-- * Hvparams keys
ssHvparamsXenChroot :: String
ssHvparamsXenChroot = ssHvparamsPref ++ htChroot
ssHvparamsXenFake :: String
ssHvparamsXenFake = ssHvparamsPref ++ htFake
ssHvparamsXenHvm :: String
ssHvparamsXenHvm = ssHvparamsPref ++ htXenHvm
ssHvparamsXenKvm :: String
ssHvparamsXenKvm = ssHvparamsPref ++ htKvm
ssHvparamsXenLxc :: String
ssHvparamsXenLxc = ssHvparamsPref ++ htLxc
ssHvparamsXenPvm :: String
ssHvparamsXenPvm = ssHvparamsPref ++ htXenPvm
validSsHvparamsKeys :: FrozenSet String
validSsHvparamsKeys =
ConstantUtils.mkSet [ssHvparamsXenChroot,
ssHvparamsXenLxc,
ssHvparamsXenFake,
ssHvparamsXenHvm,
ssHvparamsXenKvm,
ssHvparamsXenPvm]
ssFilePerms :: Int
ssFilePerms = 0o444
ssEnabledUserShutdown :: String
ssEnabledUserShutdown = "enabled_user_shutdown"
-- | Cluster wide default parameters
defaultEnabledHypervisor :: String
defaultEnabledHypervisor = htXenPvm
hvcDefaults :: Map Hypervisor (Map String PyValueEx)
hvcDefaults =
Map.fromList
[ (XenPvm, Map.fromList
[ (hvUseBootloader, PyValueEx False)
, (hvBootloaderPath, PyValueEx xenBootloader)
, (hvBootloaderArgs, PyValueEx "")
, (hvKernelPath, PyValueEx xenKernel)
, (hvInitrdPath, PyValueEx "")
, (hvRootPath, PyValueEx "/dev/xvda1")
, (hvKernelArgs, PyValueEx "ro")
, (hvMigrationPort, PyValueEx (8002 :: Int))
, (hvMigrationMode, PyValueEx htMigrationLive)
, (hvBlockdevPrefix, PyValueEx "sd")
, (hvRebootBehavior, PyValueEx instanceRebootAllowed)
, (hvCpuMask, PyValueEx cpuPinningAll)
, (hvCpuCap, PyValueEx (0 :: Int))
, (hvCpuWeight, PyValueEx (256 :: Int))
, (hvVifScript, PyValueEx "")
, (hvXenCmd, PyValueEx xenCmdXm)
, (hvXenCpuid, PyValueEx "")
, (hvSoundhw, PyValueEx "")
])
, (XenHvm, Map.fromList
[ (hvBootOrder, PyValueEx "cd")
, (hvCdromImagePath, PyValueEx "")
, (hvNicType, PyValueEx htNicRtl8139)
, (hvDiskType, PyValueEx htDiskParavirtual)
, (hvVncBindAddress, PyValueEx ip4AddressAny)
, (hvAcpi, PyValueEx True)
, (hvPae, PyValueEx True)
, (hvKernelPath, PyValueEx "/usr/lib/xen/boot/hvmloader")
, (hvDeviceModel, PyValueEx "/usr/lib/xen/bin/qemu-dm")
, (hvMigrationPort, PyValueEx (8002 :: Int))
, (hvMigrationMode, PyValueEx htMigrationNonlive)
, (hvUseLocaltime, PyValueEx False)
, (hvBlockdevPrefix, PyValueEx "hd")
, (hvPassthrough, PyValueEx "")
, (hvRebootBehavior, PyValueEx instanceRebootAllowed)
, (hvCpuMask, PyValueEx cpuPinningAll)
, (hvCpuCap, PyValueEx (0 :: Int))
, (hvCpuWeight, PyValueEx (256 :: Int))
, (hvVifType, PyValueEx htHvmVifIoemu)
, (hvVifScript, PyValueEx "")
, (hvViridian, PyValueEx False)
, (hvXenCmd, PyValueEx xenCmdXm)
, (hvXenCpuid, PyValueEx "")
, (hvSoundhw, PyValueEx "")
])
, (Kvm, Map.fromList
[ (hvKvmPath, PyValueEx kvmPath)
, (hvKernelPath, PyValueEx kvmKernel)
, (hvInitrdPath, PyValueEx "")
, (hvKernelArgs, PyValueEx "ro")
, (hvRootPath, PyValueEx "/dev/vda1")
, (hvAcpi, PyValueEx True)
, (hvSerialConsole, PyValueEx True)
, (hvSerialSpeed, PyValueEx (38400 :: Int))
, (hvVncBindAddress, PyValueEx "")
, (hvVncTls, PyValueEx False)
, (hvVncX509, PyValueEx "")
, (hvVncX509Verify, PyValueEx False)
, (hvVncPasswordFile, PyValueEx "")
, (hvKvmSpiceBind, PyValueEx "")
, (hvKvmSpiceIpVersion, PyValueEx ifaceNoIpVersionSpecified)
, (hvKvmSpicePasswordFile, PyValueEx "")
, (hvKvmSpiceLosslessImgCompr, PyValueEx "")
, (hvKvmSpiceJpegImgCompr, PyValueEx "")
, (hvKvmSpiceZlibGlzImgCompr, PyValueEx "")
, (hvKvmSpiceStreamingVideoDetection, PyValueEx "")
, (hvKvmSpiceAudioCompr, PyValueEx True)
, (hvKvmSpiceUseTls, PyValueEx False)
, (hvKvmSpiceTlsCiphers, PyValueEx opensslCiphers)
, (hvKvmSpiceUseVdagent, PyValueEx True)
, (hvKvmFloppyImagePath, PyValueEx "")
, (hvCdromImagePath, PyValueEx "")
, (hvKvmCdrom2ImagePath, PyValueEx "")
, (hvBootOrder, PyValueEx htBoDisk)
, (hvNicType, PyValueEx htNicParavirtual)
, (hvDiskType, PyValueEx htDiskParavirtual)
, (hvKvmCdromDiskType, PyValueEx "")
, (hvKvmDiskAio, PyValueEx htKvmAioThreads)
, (hvUsbMouse, PyValueEx "")
, (hvKeymap, PyValueEx "")
, (hvMigrationPort, PyValueEx (8102 :: Int))
, (hvMigrationBandwidth, PyValueEx (32 :: Int))
, (hvMigrationDowntime, PyValueEx (30 :: Int))
, (hvMigrationMode, PyValueEx htMigrationLive)
, (hvUseLocaltime, PyValueEx False)
, (hvDiskCache, PyValueEx htCacheDefault)
, (hvSecurityModel, PyValueEx htSmNone)
, (hvSecurityDomain, PyValueEx "")
, (hvKvmFlag, PyValueEx "")
, (hvVhostNet, PyValueEx False)
, (hvVirtioNetQueues, PyValueEx (1 :: Int))
, (hvKvmUseChroot, PyValueEx False)
, (hvKvmUserShutdown, PyValueEx False)
, (hvMemPath, PyValueEx "")
, (hvRebootBehavior, PyValueEx instanceRebootAllowed)
, (hvCpuMask, PyValueEx cpuPinningAll)
, (hvCpuType, PyValueEx "")
, (hvCpuCores, PyValueEx (0 :: Int))
, (hvCpuThreads, PyValueEx (0 :: Int))
, (hvCpuSockets, PyValueEx (0 :: Int))
, (hvSoundhw, PyValueEx "")
, (hvUsbDevices, PyValueEx "")
, (hvVga, PyValueEx "")
, (hvKvmExtra, PyValueEx "")
, (hvKvmMachineVersion, PyValueEx "")
, (hvKvmMigrationCaps, PyValueEx "")
, (hvVnetHdr, PyValueEx True)])
, (Fake, Map.fromList [(hvMigrationMode, PyValueEx htMigrationLive)])
, (Chroot, Map.fromList [(hvInitScript, PyValueEx "/ganeti-chroot")])
, (Lxc, Map.fromList
[ (hvCpuMask, PyValueEx "")
, (hvLxcCgroupUse, PyValueEx "")
, (hvLxcDevices, PyValueEx lxcDevicesDefault)
, (hvLxcDropCapabilities, PyValueEx lxcDropCapabilitiesDefault)
, (hvLxcExtraConfig, PyValueEx "")
, (hvLxcTty, PyValueEx (6 :: Int))
, (hvLxcStartupWait, PyValueEx (30 :: Int))
])
]
hvcGlobals :: FrozenSet String
hvcGlobals =
ConstantUtils.mkSet [hvMigrationBandwidth,
hvMigrationMode,
hvMigrationPort,
hvXenCmd]
becDefaults :: Map String PyValueEx
becDefaults =
Map.fromList
[ (beMinmem, PyValueEx (128 :: Int))
, (beMaxmem, PyValueEx (128 :: Int))
, (beVcpus, PyValueEx (1 :: Int))
, (beAutoBalance, PyValueEx True)
, (beAlwaysFailover, PyValueEx False)
, (beSpindleUse, PyValueEx (1 :: Int))
]
ndcDefaults :: Map String PyValueEx
ndcDefaults =
Map.fromList
[ (ndOobProgram, PyValueEx "")
, (ndSpindleCount, PyValueEx (1 :: Int))
, (ndExclusiveStorage, PyValueEx False)
, (ndOvs, PyValueEx False)
, (ndOvsName, PyValueEx defaultOvs)
, (ndOvsLink, PyValueEx "")
, (ndSshPort, PyValueEx (22 :: Int))
, (ndCpuSpeed, PyValueEx (1 :: Double))
]
ndcGlobals :: FrozenSet String
ndcGlobals = ConstantUtils.mkSet [ndExclusiveStorage]
-- | Default delay target measured in sectors
defaultDelayTarget :: Int
defaultDelayTarget = 1
defaultDiskCustom :: String
defaultDiskCustom = ""
defaultDiskResync :: Bool
defaultDiskResync = False
-- | Default fill target measured in sectors
defaultFillTarget :: Int
defaultFillTarget = 0
-- | Default mininum rate measured in KiB/s
defaultMinRate :: Int
defaultMinRate = 4 * 1024
defaultNetCustom :: String
defaultNetCustom = ""
-- | Default plan ahead measured in sectors
--
-- The default values for the DRBD dynamic resync speed algorithm are
-- taken from the drbsetup 8.3.11 man page, except for c-plan-ahead
-- (that we don't need to set to 0, because we have a separate option
-- to enable it) and for c-max-rate, that we cap to the default value
-- for the static resync rate.
defaultPlanAhead :: Int
defaultPlanAhead = 20
defaultRbdPool :: String
defaultRbdPool = "rbd"
diskLdDefaults :: Map DiskTemplate (Map String PyValueEx)
diskLdDefaults =
Map.fromList
[ (DTBlock, Map.empty)
, (DTDrbd8, Map.fromList
[ (ldpBarriers, PyValueEx drbdBarriers)
, (ldpDefaultMetavg, PyValueEx defaultVg)
, (ldpDelayTarget, PyValueEx defaultDelayTarget)
, (ldpDiskCustom, PyValueEx defaultDiskCustom)
, (ldpDynamicResync, PyValueEx defaultDiskResync)
, (ldpFillTarget, PyValueEx defaultFillTarget)
, (ldpMaxRate, PyValueEx classicDrbdSyncSpeed)
, (ldpMinRate, PyValueEx defaultMinRate)
, (ldpNetCustom, PyValueEx defaultNetCustom)
, (ldpNoMetaFlush, PyValueEx drbdNoMetaFlush)
, (ldpPlanAhead, PyValueEx defaultPlanAhead)
, (ldpProtocol, PyValueEx drbdDefaultNetProtocol)
, (ldpResyncRate, PyValueEx classicDrbdSyncSpeed)
])
, (DTExt, Map.fromList
[ (ldpAccess, PyValueEx diskKernelspace)
])
, (DTFile, Map.empty)
, (DTPlain, Map.fromList [(ldpStripes, PyValueEx lvmStripecount)])
, (DTRbd, Map.fromList
[ (ldpPool, PyValueEx defaultRbdPool)
, (ldpAccess, PyValueEx diskKernelspace)
])
, (DTSharedFile, Map.empty)
, (DTGluster, Map.fromList
[ (rbdAccess, PyValueEx diskKernelspace)
, (glusterHost, PyValueEx glusterHostDefault)
, (glusterVolume, PyValueEx glusterVolumeDefault)
, (glusterPort, PyValueEx glusterPortDefault)
])
]
diskDtDefaults :: Map DiskTemplate (Map String PyValueEx)
diskDtDefaults =
Map.fromList
[ (DTBlock, Map.empty)
, (DTDiskless, Map.empty)
, (DTDrbd8, Map.fromList
[ (drbdDataStripes, PyValueEx lvmStripecount)
, (drbdDefaultMetavg, PyValueEx defaultVg)
, (drbdDelayTarget, PyValueEx defaultDelayTarget)
, (drbdDiskBarriers, PyValueEx drbdBarriers)
, (drbdDiskCustom, PyValueEx defaultDiskCustom)
, (drbdDynamicResync, PyValueEx defaultDiskResync)
, (drbdFillTarget, PyValueEx defaultFillTarget)
, (drbdMaxRate, PyValueEx classicDrbdSyncSpeed)
, (drbdMetaBarriers, PyValueEx drbdNoMetaFlush)
, (drbdMetaStripes, PyValueEx lvmStripecount)
, (drbdMinRate, PyValueEx defaultMinRate)
, (drbdNetCustom, PyValueEx defaultNetCustom)
, (drbdPlanAhead, PyValueEx defaultPlanAhead)
, (drbdProtocol, PyValueEx drbdDefaultNetProtocol)
, (drbdResyncRate, PyValueEx classicDrbdSyncSpeed)
])
, (DTExt, Map.fromList
[ (rbdAccess, PyValueEx diskKernelspace)
])
, (DTFile, Map.empty)
, (DTPlain, Map.fromList [(lvStripes, PyValueEx lvmStripecount)])
, (DTRbd, Map.fromList
[ (rbdPool, PyValueEx defaultRbdPool)
, (rbdAccess, PyValueEx diskKernelspace)
])
, (DTSharedFile, Map.empty)
, (DTGluster, Map.fromList
[ (rbdAccess, PyValueEx diskKernelspace)
, (glusterHost, PyValueEx glusterHostDefault)
, (glusterVolume, PyValueEx glusterVolumeDefault)
, (glusterPort, PyValueEx glusterPortDefault)
])
]
niccDefaults :: Map String PyValueEx
niccDefaults =
Map.fromList
[ (nicMode, PyValueEx nicModeBridged)
, (nicLink, PyValueEx defaultBridge)
, (nicVlan, PyValueEx "")
]
-- | All of the following values are quite arbitrary - there are no
-- "good" defaults, these must be customised per-site
ispecsMinmaxDefaults :: Map String (Map String Int)
ispecsMinmaxDefaults =
Map.fromList
[(ispecsMin,
Map.fromList
[(ConstantUtils.ispecMemSize, Types.iSpecMemorySize Types.defMinISpec),
(ConstantUtils.ispecCpuCount, Types.iSpecCpuCount Types.defMinISpec),
(ConstantUtils.ispecDiskCount, Types.iSpecDiskCount Types.defMinISpec),
(ConstantUtils.ispecDiskSize, Types.iSpecDiskSize Types.defMinISpec),
(ConstantUtils.ispecNicCount, Types.iSpecNicCount Types.defMinISpec),
(ConstantUtils.ispecSpindleUse, Types.iSpecSpindleUse Types.defMinISpec)]),
(ispecsMax,
Map.fromList
[(ConstantUtils.ispecMemSize, Types.iSpecMemorySize Types.defMaxISpec),
(ConstantUtils.ispecCpuCount, Types.iSpecCpuCount Types.defMaxISpec),
(ConstantUtils.ispecDiskCount, Types.iSpecDiskCount Types.defMaxISpec),
(ConstantUtils.ispecDiskSize, Types.iSpecDiskSize Types.defMaxISpec),
(ConstantUtils.ispecNicCount, Types.iSpecNicCount Types.defMaxISpec),
(ConstantUtils.ispecSpindleUse, Types.iSpecSpindleUse Types.defMaxISpec)])]
ipolicyDefaults :: Map String PyValueEx
ipolicyDefaults =
Map.fromList
[ (ispecsMinmax, PyValueEx [ispecsMinmaxDefaults])
, (ispecsStd, PyValueEx (Map.fromList
[ (ispecMemSize, 128)
, (ispecCpuCount, 1)
, (ispecDiskCount, 1)
, (ispecDiskSize, 1024)
, (ispecNicCount, 1)
, (ispecSpindleUse, 1)
] :: Map String Int))
, (ipolicyDts, PyValueEx (ConstantUtils.toList diskTemplates))
, (ipolicyVcpuRatio, PyValueEx (4.0 :: Double))
, (ipolicySpindleRatio, PyValueEx (32.0 :: Double))
]
masterPoolSizeDefault :: Int
masterPoolSizeDefault = 10
-- * Exclusive storage
-- | Error margin used to compare physical disks
partMargin :: Double
partMargin = 0.01
-- | Space reserved when creating instance disks
partReserved :: Double
partReserved = 0.02
-- * Luxid job scheduling
-- | Time intervall in seconds for polling updates on the job queue. This
-- intervall is only relevant if the number of running jobs reaches the maximal
-- allowed number, as otherwise new jobs will be started immediately anyway.
-- Also, as jobs are watched via inotify, scheduling usually works independent
-- of polling. Therefore we chose a sufficiently large interval, in the order of
-- 5 minutes. As with the interval for reloading the configuration, we chose a
-- prime number to avoid accidental 'same wakeup' with other processes.
luxidJobqueuePollInterval :: Int
luxidJobqueuePollInterval = 307
-- | The default value for the maximal number of jobs to be running at the same
-- time. Once the maximal number is reached, new jobs will just be queued and
-- only started, once some of the other jobs have finished.
luxidMaximalRunningJobsDefault :: Int
luxidMaximalRunningJobsDefault = 20
-- | The default value for the maximal number of jobs that luxid tracks via
-- inotify. If the number of running jobs exceeds this limit (which only happens
-- if the user increases the default value of maximal running jobs), new forked
-- jobs are no longer tracked by inotify; progress will still be noticed on the
-- regular polls.
luxidMaximalTrackedJobsDefault :: Int
luxidMaximalTrackedJobsDefault = 25
-- | The number of retries when trying to @fork@ a new job.
-- Due to a bug in GHC, this can fail even though we synchronize all forks
-- and restrain from other @IO@ operations in the thread.
luxidRetryForkCount :: Int
luxidRetryForkCount = 5
-- | The average time period (in /us/) to wait between two @fork@ attempts.
-- The forking thread wait a random time period between @0@ and twice the
-- number, and with each attempt it doubles the step.
-- See 'luxidRetryForkCount'.
luxidRetryForkStepUS :: Int
luxidRetryForkStepUS = 500000
-- * Luxid job death testing
-- | The number of attempts to prove that a job is dead after sending it a
-- KILL signal.
luxidJobDeathDetectionRetries :: Int
luxidJobDeathDetectionRetries = 3
-- | Time to delay (in /us/) after unsucessfully verifying the death of a
-- job we believe to be dead. This is best choosen to be the average time
-- sending a SIGKILL to take effect.
luxidJobDeathDelay :: Int
luxidJobDeathDelay = 100000
-- * WConfD
-- | Time itnervall in seconds between checks that all lock owners are still
-- alive, and cleaning up the resources for the dead ones. As jobs dying without
-- releasing resources is the exception, not the rule, we don't want this task
-- to take up too many cycles itself. Hence we choose a sufficiently large
-- intervall, in the order of 5 minutes. To avoid accidental 'same wakeup'
-- with other tasks, we choose the next unused prime number.
wconfdDeathdetectionIntervall :: Int
wconfdDeathdetectionIntervall = 311
wconfdDefCtmo :: Int
wconfdDefCtmo = 10
wconfdDefRwto :: Int
wconfdDefRwto = 60
-- * Confd
confdProtocolVersion :: Int
confdProtocolVersion = ConstantUtils.confdProtocolVersion
-- Confd request type
confdReqPing :: Int
confdReqPing = Types.confdRequestTypeToRaw ReqPing
confdReqNodeRoleByname :: Int
confdReqNodeRoleByname = Types.confdRequestTypeToRaw ReqNodeRoleByName
confdReqNodePipByInstanceIp :: Int
confdReqNodePipByInstanceIp = Types.confdRequestTypeToRaw ReqNodePipByInstPip
confdReqClusterMaster :: Int
confdReqClusterMaster = Types.confdRequestTypeToRaw ReqClusterMaster
confdReqNodePipList :: Int
confdReqNodePipList = Types.confdRequestTypeToRaw ReqNodePipList
confdReqMcPipList :: Int
confdReqMcPipList = Types.confdRequestTypeToRaw ReqMcPipList
confdReqInstancesIpsList :: Int
confdReqInstancesIpsList = Types.confdRequestTypeToRaw ReqInstIpsList
confdReqNodeDrbd :: Int
confdReqNodeDrbd = Types.confdRequestTypeToRaw ReqNodeDrbd
confdReqNodeInstances :: Int
confdReqNodeInstances = Types.confdRequestTypeToRaw ReqNodeInstances
confdReqInstanceDisks :: Int
confdReqInstanceDisks = Types.confdRequestTypeToRaw ReqInstanceDisks
confdReqConfigQuery :: Int
confdReqConfigQuery = Types.confdRequestTypeToRaw ReqConfigQuery
confdReqDataCollectors :: Int
confdReqDataCollectors = Types.confdRequestTypeToRaw ReqDataCollectors
confdReqs :: FrozenSet Int
confdReqs =
ConstantUtils.mkSet .
map Types.confdRequestTypeToRaw $
[minBound..] \\ [ReqNodeInstances]
-- * Confd request type
confdReqfieldName :: Int
confdReqfieldName = Types.confdReqFieldToRaw ReqFieldName
confdReqfieldIp :: Int
confdReqfieldIp = Types.confdReqFieldToRaw ReqFieldIp
confdReqfieldMnodePip :: Int
confdReqfieldMnodePip = Types.confdReqFieldToRaw ReqFieldMNodePip
-- * Confd repl status
confdReplStatusOk :: Int
confdReplStatusOk = Types.confdReplyStatusToRaw ReplyStatusOk
confdReplStatusError :: Int
confdReplStatusError = Types.confdReplyStatusToRaw ReplyStatusError
confdReplStatusNotimplemented :: Int
confdReplStatusNotimplemented = Types.confdReplyStatusToRaw ReplyStatusNotImpl
confdReplStatuses :: FrozenSet Int
confdReplStatuses =
ConstantUtils.mkSet $ map Types.confdReplyStatusToRaw [minBound..]
-- * Confd node role
confdNodeRoleMaster :: Int
confdNodeRoleMaster = Types.confdNodeRoleToRaw NodeRoleMaster
confdNodeRoleCandidate :: Int
confdNodeRoleCandidate = Types.confdNodeRoleToRaw NodeRoleCandidate
confdNodeRoleOffline :: Int
confdNodeRoleOffline = Types.confdNodeRoleToRaw NodeRoleOffline
confdNodeRoleDrained :: Int
confdNodeRoleDrained = Types.confdNodeRoleToRaw NodeRoleDrained
confdNodeRoleRegular :: Int
confdNodeRoleRegular = Types.confdNodeRoleToRaw NodeRoleRegular
-- * A few common errors for confd
confdErrorUnknownEntry :: Int
confdErrorUnknownEntry = Types.confdErrorTypeToRaw ConfdErrorUnknownEntry
confdErrorInternal :: Int
confdErrorInternal = Types.confdErrorTypeToRaw ConfdErrorInternal
confdErrorArgument :: Int
confdErrorArgument = Types.confdErrorTypeToRaw ConfdErrorArgument
-- * Confd request query fields
confdReqqLink :: String
confdReqqLink = ConstantUtils.confdReqqLink
confdReqqIp :: String
confdReqqIp = ConstantUtils.confdReqqIp
confdReqqIplist :: String
confdReqqIplist = ConstantUtils.confdReqqIplist
confdReqqFields :: String
confdReqqFields = ConstantUtils.confdReqqFields
-- | Each request is "salted" by the current timestamp.
--
-- This constant decides how many seconds of skew to accept.
--
-- TODO: make this a default and allow the value to be more
-- configurable
confdMaxClockSkew :: Int
confdMaxClockSkew = 2 * nodeMaxClockSkew
-- | When we haven't reloaded the config for more than this amount of
-- seconds, we force a test to see if inotify is betraying us. Using a
-- prime number to ensure we get less chance of 'same wakeup' with
-- other processes.
confdConfigReloadTimeout :: Int
confdConfigReloadTimeout = 17
-- | If we receive more than one update in this amount of
-- microseconds, we move to polling every RATELIMIT seconds, rather
-- than relying on inotify, to be able to serve more requests.
confdConfigReloadRatelimit :: Int
confdConfigReloadRatelimit = 250000
-- | Magic number prepended to all confd queries.
--
-- This allows us to distinguish different types of confd protocols
-- and handle them. For example by changing this we can move the whole
-- payload to be compressed, or move away from json.
confdMagicFourcc :: String
confdMagicFourcc = "plj0"
-- | By default a confd request is sent to the minimum between this
-- number and all MCs. 6 was chosen because even in the case of a
-- disastrous 50% response rate, we should have enough answers to be
-- able to compare more than one.
confdDefaultReqCoverage :: Int
confdDefaultReqCoverage = 6
-- | Timeout in seconds to expire pending query request in the confd
-- client library. We don't actually expect any answer more than 10
-- seconds after we sent a request.
confdClientExpireTimeout :: Int
confdClientExpireTimeout = 10
-- | Maximum UDP datagram size.
--
-- On IPv4: 64K - 20 (ip header size) - 8 (udp header size) = 65507
-- On IPv6: 64K - 40 (ip6 header size) - 8 (udp header size) = 65487
-- (assuming we can't use jumbo frames)
-- We just set this to 60K, which should be enough
maxUdpDataSize :: Int
maxUdpDataSize = 61440
-- * User-id pool minimum/maximum acceptable user-ids
uidpoolUidMin :: Int
uidpoolUidMin = 0
-- | Assuming 32 bit user-ids
uidpoolUidMax :: Integer
uidpoolUidMax = 2 ^ 32 - 1
-- | Name or path of the pgrep command
pgrep :: String
pgrep = "pgrep"
-- | Name of the node group that gets created at cluster init or
-- upgrade
initialNodeGroupName :: String
initialNodeGroupName = "default"
-- * Possible values for NodeGroup.alloc_policy
allocPolicyLastResort :: String
allocPolicyLastResort = Types.allocPolicyToRaw AllocLastResort
allocPolicyPreferred :: String
allocPolicyPreferred = Types.allocPolicyToRaw AllocPreferred
allocPolicyUnallocable :: String
allocPolicyUnallocable = Types.allocPolicyToRaw AllocUnallocable
validAllocPolicies :: [String]
validAllocPolicies = map Types.allocPolicyToRaw [minBound..]
-- | Temporary external/shared storage parameters
blockdevDriverManual :: String
blockdevDriverManual = Types.blockDriverToRaw BlockDrvManual
-- | 'qemu-img' path, required for 'ovfconverter'
qemuimgPath :: String
qemuimgPath = AutoConf.qemuimgPath
-- | The hail iallocator
iallocHail :: String
iallocHail = "hail"
-- * Fake opcodes for functions that have hooks attached to them via
-- backend.RunLocalHooks
fakeOpMasterTurndown :: String
fakeOpMasterTurndown = "OP_CLUSTER_IP_TURNDOWN"
fakeOpMasterTurnup :: String
fakeOpMasterTurnup = "OP_CLUSTER_IP_TURNUP"
-- * Crypto Types
-- Types of cryptographic tokens used in node communication
cryptoTypeSslDigest :: String
cryptoTypeSslDigest = "ssl"
cryptoTypeSsh :: String
cryptoTypeSsh = "ssh"
-- So far only ssl keys are used in the context of this constant
cryptoTypes :: FrozenSet String
cryptoTypes = ConstantUtils.mkSet [cryptoTypeSslDigest]
-- * Crypto Actions
-- Actions that can be performed on crypto tokens
cryptoActionGet :: String
cryptoActionGet = "get"
-- This is 'create and get'
cryptoActionCreate :: String
cryptoActionCreate = "create"
cryptoActions :: FrozenSet String
cryptoActions = ConstantUtils.mkSet [cryptoActionGet, cryptoActionCreate]
-- * Options for CryptoActions
-- Filename of the certificate
cryptoOptionCertFile :: String
cryptoOptionCertFile = "cert_file"
-- Serial number of the certificate
cryptoOptionSerialNo :: String
cryptoOptionSerialNo = "serial_no"
-- * SSH key types
sshkDsa :: String
sshkDsa = "dsa"
sshkRsa :: String
sshkRsa = "rsa"
sshkAll :: FrozenSet String
sshkAll = ConstantUtils.mkSet [sshkRsa, sshkDsa]
-- * SSH authorized key types
sshakDss :: String
sshakDss = "ssh-dss"
sshakRsa :: String
sshakRsa = "ssh-rsa"
sshakAll :: FrozenSet String
sshakAll = ConstantUtils.mkSet [sshakDss, sshakRsa]
-- * SSH setup
sshsClusterName :: String
sshsClusterName = "cluster_name"
sshsSshHostKey :: String
sshsSshHostKey = "ssh_host_key"
sshsSshRootKey :: String
sshsSshRootKey = "ssh_root_key"
sshsSshAuthorizedKeys :: String
sshsSshAuthorizedKeys = "authorized_keys"
sshsSshPublicKeys :: String
sshsSshPublicKeys = "public_keys"
sshsNodeDaemonCertificate :: String
sshsNodeDaemonCertificate = "node_daemon_certificate"
sshsAdd :: String
sshsAdd = "add"
sshsReplaceOrAdd :: String
sshsReplaceOrAdd = "replace_or_add"
sshsRemove :: String
sshsRemove = "remove"
sshsOverride :: String
sshsOverride = "override"
sshsClear :: String
sshsClear = "clear"
sshsGenerate :: String
sshsGenerate = "generate"
sshsActions :: FrozenSet String
sshsActions = ConstantUtils.mkSet [ sshsAdd
, sshsRemove
, sshsOverride
, sshsClear
, sshsReplaceOrAdd]
-- * Key files for SSH daemon
sshHostDsaPriv :: String
sshHostDsaPriv = sshConfigDir ++ "/ssh_host_dsa_key"
sshHostDsaPub :: String
sshHostDsaPub = sshHostDsaPriv ++ ".pub"
sshHostRsaPriv :: String
sshHostRsaPriv = sshConfigDir ++ "/ssh_host_rsa_key"
sshHostRsaPub :: String
sshHostRsaPub = sshHostRsaPriv ++ ".pub"
sshDaemonKeyfiles :: Map String (String, String)
sshDaemonKeyfiles =
Map.fromList [ (sshkRsa, (sshHostRsaPriv, sshHostRsaPub))
, (sshkDsa, (sshHostDsaPriv, sshHostDsaPub))
]
-- * Node daemon setup
ndsClusterName :: String
ndsClusterName = "cluster_name"
ndsNodeDaemonCertificate :: String
ndsNodeDaemonCertificate = "node_daemon_certificate"
ndsSsconf :: String
ndsSsconf = "ssconf"
ndsStartNodeDaemon :: String
ndsStartNodeDaemon = "start_node_daemon"
-- * VCluster related constants
vClusterEtcHosts :: String
vClusterEtcHosts = "/etc/hosts"
vClusterVirtPathPrefix :: String
vClusterVirtPathPrefix = "/###-VIRTUAL-PATH-###,"
vClusterRootdirEnvname :: String
vClusterRootdirEnvname = "GANETI_ROOTDIR"
vClusterHostnameEnvname :: String
vClusterHostnameEnvname = "GANETI_HOSTNAME"
vClusterVpathWhitelist :: FrozenSet String
vClusterVpathWhitelist = ConstantUtils.mkSet [ vClusterEtcHosts ]
-- * The source reasons for the execution of an OpCode
opcodeReasonSrcClient :: String
opcodeReasonSrcClient = "gnt:client"
_opcodeReasonSrcDaemon :: String
_opcodeReasonSrcDaemon = "gnt:daemon"
_opcodeReasonSrcMasterd :: String
_opcodeReasonSrcMasterd = _opcodeReasonSrcDaemon ++ ":masterd"
opcodeReasonSrcNoded :: String
opcodeReasonSrcNoded = _opcodeReasonSrcDaemon ++ ":noded"
opcodeReasonSrcOpcode :: String
opcodeReasonSrcOpcode = "gnt:opcode"
opcodeReasonSrcPickup :: String
opcodeReasonSrcPickup = _opcodeReasonSrcMasterd ++ ":pickup"
opcodeReasonSrcWatcher :: String
opcodeReasonSrcWatcher = "gnt:watcher"
opcodeReasonSrcRlib2 :: String
opcodeReasonSrcRlib2 = "gnt:library:rlib2"
opcodeReasonSrcUser :: String
opcodeReasonSrcUser = "gnt:user"
opcodeReasonSources :: FrozenSet String
opcodeReasonSources =
ConstantUtils.mkSet [opcodeReasonSrcClient,
opcodeReasonSrcNoded,
opcodeReasonSrcOpcode,
opcodeReasonSrcPickup,
opcodeReasonSrcWatcher,
opcodeReasonSrcRlib2,
opcodeReasonSrcUser]
-- | Path generating random UUID
randomUuidFile :: String
randomUuidFile = ConstantUtils.randomUuidFile
-- * Auto-repair levels
autoRepairFailover :: String
autoRepairFailover = Types.autoRepairTypeToRaw ArFailover
autoRepairFixStorage :: String
autoRepairFixStorage = Types.autoRepairTypeToRaw ArFixStorage
autoRepairMigrate :: String
autoRepairMigrate = Types.autoRepairTypeToRaw ArMigrate
autoRepairReinstall :: String
autoRepairReinstall = Types.autoRepairTypeToRaw ArReinstall
autoRepairAllTypes :: FrozenSet String
autoRepairAllTypes =
ConstantUtils.mkSet [autoRepairFailover,
autoRepairFixStorage,
autoRepairMigrate,
autoRepairReinstall]
-- * Auto-repair results
autoRepairEnoperm :: String
autoRepairEnoperm = Types.autoRepairResultToRaw ArEnoperm
autoRepairFailure :: String
autoRepairFailure = Types.autoRepairResultToRaw ArFailure
autoRepairSuccess :: String
autoRepairSuccess = Types.autoRepairResultToRaw ArSuccess
autoRepairAllResults :: FrozenSet String
autoRepairAllResults =
ConstantUtils.mkSet [autoRepairEnoperm, autoRepairFailure, autoRepairSuccess]
-- | The version identifier for builtin data collectors
builtinDataCollectorVersion :: String
builtinDataCollectorVersion = "B"
-- | The reason trail opcode parameter name
opcodeReason :: String
opcodeReason = "reason"
-- | The reason trail opcode parameter name
opcodeSequential :: String
opcodeSequential = "sequential"
diskstatsFile :: String
diskstatsFile = "/proc/diskstats"
-- * CPU load collector
statFile :: String
statFile = "/proc/stat"
cpuavgloadBufferSize :: Int
cpuavgloadBufferSize = 150
-- | Window size for averaging in seconds.
cpuavgloadWindowSize :: Int
cpuavgloadWindowSize = 600
-- * Monitoring daemon
-- | Mond's variable for periodical data collection
mondTimeInterval :: Int
mondTimeInterval = 5
-- | Mond's waiting time for requesting the current configuration.
mondConfigTimeInterval :: Int
mondConfigTimeInterval = 15
-- | Mond's latest API version
mondLatestApiVersion :: Int
mondLatestApiVersion = 1
mondDefaultCategory :: String
mondDefaultCategory = "default"
-- * Disk access modes
diskUserspace :: String
diskUserspace = Types.diskAccessModeToRaw DiskUserspace
diskKernelspace :: String
diskKernelspace = Types.diskAccessModeToRaw DiskKernelspace
diskValidAccessModes :: FrozenSet String
diskValidAccessModes =
ConstantUtils.mkSet $ map Types.diskAccessModeToRaw [minBound..]
-- | Timeout for queue draining in upgrades
upgradeQueueDrainTimeout :: Int
upgradeQueueDrainTimeout = 36 * 60 * 60 -- 1.5 days
-- | Intervall at which the queue is polled during upgrades
upgradeQueuePollInterval :: Int
upgradeQueuePollInterval = 10
-- * Hotplug Actions
hotplugActionAdd :: String
hotplugActionAdd = Types.hotplugActionToRaw HAAdd
hotplugActionRemove :: String
hotplugActionRemove = Types.hotplugActionToRaw HARemove
hotplugActionModify :: String
hotplugActionModify = Types.hotplugActionToRaw HAMod
hotplugAllActions :: FrozenSet String
hotplugAllActions =
ConstantUtils.mkSet $ map Types.hotplugActionToRaw [minBound..]
-- * Hotplug Device Targets
hotplugTargetNic :: String
hotplugTargetNic = Types.hotplugTargetToRaw HTNic
hotplugTargetDisk :: String
hotplugTargetDisk = Types.hotplugTargetToRaw HTDisk
hotplugAllTargets :: FrozenSet String
hotplugAllTargets =
ConstantUtils.mkSet $ map Types.hotplugTargetToRaw [minBound..]
-- | Timeout for disk removal (seconds)
diskRemoveRetryTimeout :: Int
diskRemoveRetryTimeout = 30
-- | Interval between disk removal retries (seconds)
diskRemoveRetryInterval :: Int
diskRemoveRetryInterval = 3
-- * UUID regex
uuidRegex :: String
uuidRegex = "^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$"
-- * Luxi constants
luxiSocketPerms :: Int
luxiSocketPerms = 0o660
luxiKeyMethod :: String
luxiKeyMethod = "method"
luxiKeyArgs :: String
luxiKeyArgs = "args"
luxiKeySuccess :: String
luxiKeySuccess = "success"
luxiKeyResult :: String
luxiKeyResult = "result"
luxiKeyVersion :: String
luxiKeyVersion = "version"
luxiReqSubmitJob :: String
luxiReqSubmitJob = "SubmitJob"
luxiReqSubmitJobToDrainedQueue :: String
luxiReqSubmitJobToDrainedQueue = "SubmitJobToDrainedQueue"
luxiReqSubmitManyJobs :: String
luxiReqSubmitManyJobs = "SubmitManyJobs"
luxiReqWaitForJobChange :: String
luxiReqWaitForJobChange = "WaitForJobChange"
luxiReqPickupJob :: String
luxiReqPickupJob = "PickupJob"
luxiReqCancelJob :: String
luxiReqCancelJob = "CancelJob"
luxiReqArchiveJob :: String
luxiReqArchiveJob = "ArchiveJob"
luxiReqChangeJobPriority :: String
luxiReqChangeJobPriority = "ChangeJobPriority"
luxiReqAutoArchiveJobs :: String
luxiReqAutoArchiveJobs = "AutoArchiveJobs"
luxiReqQuery :: String
luxiReqQuery = "Query"
luxiReqQueryFields :: String
luxiReqQueryFields = "QueryFields"
luxiReqQueryJobs :: String
luxiReqQueryJobs = "QueryJobs"
luxiReqQueryFilters :: String
luxiReqQueryFilters = "QueryFilters"
luxiReqReplaceFilter :: String
luxiReqReplaceFilter = "ReplaceFilter"
luxiReqDeleteFilter :: String
luxiReqDeleteFilter = "DeleteFilter"
luxiReqQueryInstances :: String
luxiReqQueryInstances = "QueryInstances"
luxiReqQueryNodes :: String
luxiReqQueryNodes = "QueryNodes"
luxiReqQueryGroups :: String
luxiReqQueryGroups = "QueryGroups"
luxiReqQueryNetworks :: String
luxiReqQueryNetworks = "QueryNetworks"
luxiReqQueryExports :: String
luxiReqQueryExports = "QueryExports"
luxiReqQueryConfigValues :: String
luxiReqQueryConfigValues = "QueryConfigValues"
luxiReqQueryClusterInfo :: String
luxiReqQueryClusterInfo = "QueryClusterInfo"
luxiReqQueryTags :: String
luxiReqQueryTags = "QueryTags"
luxiReqSetDrainFlag :: String
luxiReqSetDrainFlag = "SetDrainFlag"
luxiReqSetWatcherPause :: String
luxiReqSetWatcherPause = "SetWatcherPause"
luxiReqAll :: FrozenSet String
luxiReqAll =
ConstantUtils.mkSet
[ luxiReqArchiveJob
, luxiReqAutoArchiveJobs
, luxiReqCancelJob
, luxiReqChangeJobPriority
, luxiReqQuery
, luxiReqQueryClusterInfo
, luxiReqQueryConfigValues
, luxiReqQueryExports
, luxiReqQueryFields
, luxiReqQueryGroups
, luxiReqQueryInstances
, luxiReqQueryJobs
, luxiReqQueryNodes
, luxiReqQueryNetworks
, luxiReqQueryTags
, luxiReqSetDrainFlag
, luxiReqSetWatcherPause
, luxiReqSubmitJob
, luxiReqSubmitJobToDrainedQueue
, luxiReqSubmitManyJobs
, luxiReqWaitForJobChange
, luxiReqPickupJob
, luxiReqQueryFilters
, luxiReqReplaceFilter
, luxiReqDeleteFilter
]
luxiDefCtmo :: Int
luxiDefCtmo = 10
luxiDefRwto :: Int
luxiDefRwto = 60
-- | 'WaitForJobChange' timeout
luxiWfjcTimeout :: Int
luxiWfjcTimeout = (luxiDefRwto - 1) `div` 2
-- | The prefix of the LUXI livelock file name
luxiLivelockPrefix :: String
luxiLivelockPrefix = "luxi-daemon"
-- | The LUXI daemon waits this number of seconds for ensuring that a canceled
-- job terminates before giving up.
luxiCancelJobTimeout :: Int
luxiCancelJobTimeout = (luxiDefRwto - 1) `div` 4
-- * Master voting constants
-- | Number of retries to carry out if nodes do not answer
masterVotingRetries :: Int
masterVotingRetries = 6
-- | Retry interval (in seconds) in master voting, if not enough answers
-- could be gathered.
masterVotingRetryIntervall :: Int
masterVotingRetryIntervall = 10
-- * Query language constants
-- ** Logic operators with one or more operands, each of which is a
-- filter on its own
qlangOpAnd :: String
qlangOpAnd = "&"
qlangOpOr :: String
qlangOpOr = "|"
-- ** Unary operators with exactly one operand
qlangOpNot :: String
qlangOpNot = "!"
qlangOpTrue :: String
qlangOpTrue = "?"
-- ** Binary operators with exactly two operands, the field name and
-- an operator-specific value
qlangOpContains :: String
qlangOpContains = "=[]"
qlangOpEqual :: String
qlangOpEqual = "=="
qlangOpEqualLegacy :: String
qlangOpEqualLegacy = "="
qlangOpGe :: String
qlangOpGe = ">="
qlangOpGt :: String
qlangOpGt = ">"
qlangOpLe :: String
qlangOpLe = "<="
qlangOpLt :: String
qlangOpLt = "<"
qlangOpNotEqual :: String
qlangOpNotEqual = "!="
qlangOpRegexp :: String
qlangOpRegexp = "=~"
-- | Characters used for detecting user-written filters (see
-- L{_CheckFilter})
qlangFilterDetectionChars :: FrozenSet String
qlangFilterDetectionChars =
ConstantUtils.mkSet ["!", " ", "\"", "\'",
")", "(", "\x0b", "\n",
"\r", "\x0c", "/", "<",
"\t", ">", "=", "\\", "~"]
-- | Characters used to detect globbing filters
qlangGlobDetectionChars :: FrozenSet String
qlangGlobDetectionChars = ConstantUtils.mkSet ["*", "?"]
-- * Error related constants
--
-- 'OpPrereqError' failure types
-- | Environment error (e.g. node disk error)
errorsEcodeEnviron :: String
errorsEcodeEnviron = "environment_error"
-- | Entity already exists
errorsEcodeExists :: String
errorsEcodeExists = "already_exists"
-- | Internal cluster error
errorsEcodeFault :: String
errorsEcodeFault = "internal_error"
-- | Wrong arguments (at syntax level)
errorsEcodeInval :: String
errorsEcodeInval = "wrong_input"
-- | Entity not found
errorsEcodeNoent :: String
errorsEcodeNoent = "unknown_entity"
-- | Not enough resources (iallocator failure, disk space, memory, etc)
errorsEcodeNores :: String
errorsEcodeNores = "insufficient_resources"
-- | Resource not unique (e.g. MAC or IP duplication)
errorsEcodeNotunique :: String
errorsEcodeNotunique = "resource_not_unique"
-- | Resolver errors
errorsEcodeResolver :: String
errorsEcodeResolver = "resolver_error"
-- | Wrong entity state
errorsEcodeState :: String
errorsEcodeState = "wrong_state"
-- | Temporarily out of resources; operation can be tried again
errorsEcodeTempNores :: String
errorsEcodeTempNores = "temp_insufficient_resources"
errorsEcodeAll :: FrozenSet String
errorsEcodeAll =
ConstantUtils.mkSet [ errorsEcodeNores
, errorsEcodeExists
, errorsEcodeState
, errorsEcodeNotunique
, errorsEcodeTempNores
, errorsEcodeNoent
, errorsEcodeFault
, errorsEcodeResolver
, errorsEcodeInval
, errorsEcodeEnviron
]
-- * Jstore related constants
jstoreJobsPerArchiveDirectory :: Int
jstoreJobsPerArchiveDirectory = 10000
-- * Gluster settings
-- | Name of the Gluster host setting
glusterHost :: String
glusterHost = "host"
-- | Default value of the Gluster host setting
glusterHostDefault :: String
glusterHostDefault = "127.0.0.1"
-- | Name of the Gluster volume setting
glusterVolume :: String
glusterVolume = "volume"
-- | Default value of the Gluster volume setting
glusterVolumeDefault :: String
glusterVolumeDefault = "gv0"
-- | Name of the Gluster port setting
glusterPort :: String
glusterPort = "port"
-- | Default value of the Gluster port setting
glusterPortDefault :: Int
glusterPortDefault = 24007
-- * Instance communication
--
-- The instance communication attaches an additional NIC, named
-- @instanceCommunicationNicPrefix@:@instanceName@ with MAC address
-- prefixed by @instanceCommunicationMacPrefix@, to the instances that
-- have instance communication enabled. This NIC is part of the
-- instance communication network which is supplied by the user via
--
-- gnt-cluster modify --instance-communication=mynetwork
--
-- This network is defined as @instanceCommunicationNetwork4@ for IPv4
-- and @instanceCommunicationNetwork6@ for IPv6.
instanceCommunicationDoc :: String
instanceCommunicationDoc =
"Enable or disable the communication mechanism for an instance"
instanceCommunicationMacPrefix :: String
instanceCommunicationMacPrefix = "52:54:00"
-- | The instance communication network is a link-local IPv4/IPv6
-- network because the communication is meant to be exclusive between
-- the host and the guest and not routed outside the node.
instanceCommunicationNetwork4 :: String
instanceCommunicationNetwork4 = "169.254.0.0/16"
-- | See 'instanceCommunicationNetwork4'.
instanceCommunicationNetwork6 :: String
instanceCommunicationNetwork6 = "fe80::/10"
instanceCommunicationNetworkLink :: String
instanceCommunicationNetworkLink = "communication_rt"
instanceCommunicationNetworkMode :: String
instanceCommunicationNetworkMode = nicModeRouted
instanceCommunicationNicPrefix :: String
instanceCommunicationNicPrefix = "ganeti:communication:"
-- | Parameters that should be protected
--
-- Python does not have a type system and can't automatically infer what should
-- be the resulting type of a JSON request. As a result, it must rely on this
-- list of parameter names to protect values correctly.
--
-- Names ending in _cluster will be treated as dicts of dicts of private values.
-- Otherwise they are considered dicts of private values.
privateParametersBlacklist :: [String]
privateParametersBlacklist = [ "osparams_private"
, "osparams_secret"
, "osparams_private_cluster"
]
-- | Warn the user that the logging level is too low for production use.
debugModeConfidentialityWarning :: String
debugModeConfidentialityWarning =
"ALERT: %s started in debug mode.\n\
\ Private and secret parameters WILL be logged!\n"
-- * Stat dictionary entries
--
-- The get_file_info RPC returns a number of values as a dictionary, and the
-- following constants are both descriptions and means of accessing them.
-- | The size of the file
statSize :: String
statSize = "size"
-- * Helper VM-related timeouts
-- | The default fixed timeout needed to startup the helper VM.
helperVmStartup :: Int
helperVmStartup = 5 * 60
-- | The default fixed timeout needed until the helper VM is finally
-- shutdown, for example, after installing the OS.
helperVmShutdown :: Int
helperVmShutdown = 2 * 60 * 60
-- | The zeroing timeout per MiB of disks to zero
--
-- Determined by estimating that a disk writes at a relatively slow
-- speed of 1/5 of the max speed of current drives.
zeroingTimeoutPerMib :: Double
zeroingTimeoutPerMib = 1.0 / (100.0 / 5.0)
-- * Networking
-- The minimum size of a network.
ipv4NetworkMinSize :: Int
ipv4NetworkMinSize = 30
-- The maximum size of a network.
--
-- FIXME: This limit is for performance reasons. Remove when refactoring
-- for performance tuning was successful.
ipv4NetworkMaxSize :: Int
ipv4NetworkMaxSize = 30
-- * Data Collectors
dataCollectorCPULoad :: String
dataCollectorCPULoad = "cpu-avg-load"
dataCollectorDiskStats :: String
dataCollectorDiskStats = "diskstats"
dataCollectorDrbd :: String
dataCollectorDrbd = "drbd"
dataCollectorLv :: String
dataCollectorLv = "lv"
dataCollectorInstStatus :: String
dataCollectorInstStatus = "inst-status-xen"
dataCollectorParameterInterval :: String
dataCollectorParameterInterval = "interval"
dataCollectorNames :: FrozenSet String
dataCollectorNames =
ConstantUtils.mkSet [ dataCollectorCPULoad
, dataCollectorDiskStats
, dataCollectorDrbd
, dataCollectorLv
, dataCollectorInstStatus
]
dataCollectorStateActive :: String
dataCollectorStateActive = "active"
dataCollectorsEnabledName :: String
dataCollectorsEnabledName = "enabled_data_collectors"
dataCollectorsIntervalName :: String
dataCollectorsIntervalName = "data_collector_interval"
| ganeti-github-testing/ganeti-test-1 | src/Ganeti/Constants.hs | bsd-2-clause | 137,931 | 0 | 13 | 27,097 | 23,627 | 13,994 | 9,633 | -1 | -1 |
module Move where
import Control.Lens
import Data.Complex
import EngineState
import GPS
import Posture
import Unit
import Speed
orderMove :: Int -> [GPS] -> EngineState ()
orderMove uid gs = do
u <- getOwnUnit uid
replaceOwnUnit $ u
& unitWaypoints .~ gs
-- & unitPosture .~ Moving
-- moveUnitQuant :: Unit -> EngineState Distance
-- moveUnitQuant u = do
-- let s <- speedDefault u ^. unitType
-- moveUnit :: Unit -> EngineState ()
-- moveUnit u = do
-- let wps = u ^. unitWaypoints
-- case wps of
-- [] -> return ()
-- _ -> do
-- let s = (speedDefault u ^. unitType) / 4
-- s' = speedApplyCommandControl s (u ^. unitCommandControlStatus)
-- s'' =
-- moveUnit :: GPS -> Speed -> GPS
-- moveUnit g s = (realPart newcg, imagPart newcg)
-- where cg = (gpsX g) :+ (gpsY g)
-- newcg = cg + s
movePulse :: EngineState ()
movePulse
| nbrk/ld | library/Move.hs | bsd-2-clause | 899 | 0 | 9 | 230 | 115 | 69 | 46 | -1 | -1 |
--
-- Scene: test scene data
--
module UnitTest.Aya.Scene where
import Aya.Algebra
xReso = 256 :: Int
yReso = 256 :: Int
xRegion = (-1.0, 1.0)
yRegion = (-1.0, 1.0)
eyepos = Vector3 1 1 1
etarget = Vector3 0 2 4
upper = Vector3 0 1 0
focus = 1.5
| eiji-a/aya | src/UnitTest/Aya/Scene.hs | bsd-3-clause | 259 | 0 | 6 | 64 | 98 | 58 | 40 | 10 | 1 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE Safe #-}
{- |
Module : Physics.Learn.Current
Copyright : (c) Scott N. Walck 2012-2019
License : BSD3 (see LICENSE)
Maintainer : Scott N. Walck <[email protected]>
Stability : experimental
This module contains functions for working with current, magnetic field,
and magnetic flux.
-}
module Physics.Learn.Current
(
-- * Current
Current
, CurrentDistribution(..)
-- * Magnetic Field
, bField
, bFieldFromLineCurrent
, bFieldFromSurfaceCurrent
, bFieldFromVolumeCurrent
-- * Magnetic Flux
, magneticFlux
)
where
import Physics.Learn.CarrotVec
( magnitude
, (*^)
, (^/)
, (><)
)
import Physics.Learn.Position
( VectorField
, displacement
, addFields
)
import Physics.Learn.Curve
( Curve(..)
, crossedLineIntegral
)
import Physics.Learn.Surface
( Surface(..)
, surfaceIntegral
, dottedSurfaceIntegral
)
import Physics.Learn.Volume
( Volume(..)
, volumeIntegral
)
-- | Electric current, in units of Amperes (A)
type Current = Double
-- | A current distribution is a line current (current through a wire), a surface current,
-- a volume current, or a combination of these.
-- The 'VectorField' describes a surface current density
-- or a volume current density.
data CurrentDistribution = LineCurrent Current Curve -- ^ current through a wire
| SurfaceCurrent VectorField Surface -- ^ 'VectorField' is surface current density (A/m)
| VolumeCurrent VectorField Volume -- ^ 'VectorField' is volume current density (A/m^2)
| MultipleCurrents [CurrentDistribution] -- ^ combination of current distributions
-- | Magnetic field produced by a line current (current through a wire).
-- The function 'bField' calls this function
-- to evaluate the magnetic field produced by a line current.
bFieldFromLineCurrent
:: Current -- ^ current (in Amps)
-> Curve -- ^ geometry of the line current
-> VectorField -- ^ magnetic field (in Tesla)
bFieldFromLineCurrent i c r
= k *^ crossedLineIntegral 1000 integrand c
where
k = 1e-7 -- mu0 / (4 * pi)
integrand r' = (-i) *^ d ^/ magnitude d ** 3
where
d = displacement r' r
-- | Magnetic field produced by a surface current.
-- The function 'bField' calls this function
-- to evaluate the magnetic field produced by a surface current.
-- This function assumes that surface current density
-- will be specified parallel to the surface, and does
-- not check if that is true.
bFieldFromSurfaceCurrent
:: VectorField -- ^ surface current density
-> Surface -- ^ geometry of the surface current
-> VectorField -- ^ magnetic field (in T)
bFieldFromSurfaceCurrent kCurrent c r
= k *^ surfaceIntegral 100 100 integrand c
where
k = 1e-7 -- mu0 / (4 * pi)
integrand r' = (kCurrent r' >< d) ^/ magnitude d ** 3
where
d = displacement r' r
-- | Magnetic field produced by a volume current.
-- The function 'bField' calls this function
-- to evaluate the magnetic field produced by a volume current.
bFieldFromVolumeCurrent
:: VectorField -- ^ volume current density
-> Volume -- ^ geometry of the volume current
-> VectorField -- ^ magnetic field (in T)
bFieldFromVolumeCurrent j c r
= k *^ volumeIntegral 50 50 50 integrand c
where
k = 1e-7 -- mu0 / (4 * pi)
integrand r' = (j r' >< d) ^/ magnitude d ** 3
where
d = displacement r' r
-- | The magnetic field produced by a current distribution.
-- This is the simplest way to find the magnetic field, because it
-- works for any current distribution (line, surface, volume, or combination).
bField :: CurrentDistribution -> VectorField
bField (LineCurrent i c) = bFieldFromLineCurrent i c
bField (SurfaceCurrent kC s) = bFieldFromSurfaceCurrent kC s
bField (VolumeCurrent j v) = bFieldFromVolumeCurrent j v
bField (MultipleCurrents cds) = addFields $ map bField cds
-------------------
-- Magnetic Flux --
-------------------
-- | The magnetic flux through a surface produced by a current distribution.
magneticFlux :: Surface -> CurrentDistribution -> Double
magneticFlux surf dist = dottedSurfaceIntegral 100 100 (bField dist) surf
| walck/learn-physics | src/Physics/Learn/Current.hs | bsd-3-clause | 4,449 | 0 | 11 | 1,145 | 609 | 354 | 255 | 69 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module Demo where
import Prelude hiding (map, zipWith, zipWith3)
import Data.Vector.Storable (Vector)
import Nikola
f :: Exp (Vector Float) -> Exp (Vector Float)
f = map inc
--inc :: Exp Float -> Exp Float
inc = vapply $ \x -> x + 1
| mainland/nikola | tests/Demo.hs | bsd-3-clause | 271 | 0 | 8 | 52 | 88 | 51 | 37 | 8 | 1 |
{-# LANGUAGE OverloadedStrings #-}
-- | Documentation page controller.
module HL.Controller.Documentation where
import HL.Controller
import HL.View
import HL.View.Documentation
-- | Documentation controller.
getDocumentationR :: C (Html ())
getDocumentationR = do
tutorialMap <- fmap appTutorials getYesod
lucid (documentationV tutorialMap)
| haskell-lang/haskell-lang | src/HL/Controller/Documentation.hs | bsd-3-clause | 349 | 0 | 9 | 46 | 71 | 39 | 32 | 9 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Spec.ExecuteM64 where
import Spec.Decode
import Spec.Machine
import Utility.Utility
import Control.Monad
execute :: forall p t. (RiscvMachine p t) => InstructionM64 -> p ()
-- begin ast
execute (Mulw rd rs1 rs2) = do
x <- getRegister rs1
y <- getRegister rs2
setRegister rd (s32 (x * y))
execute (Divw rd rs1 rs2) = do
x <- getRegister rs1
y <- getRegister rs2
let q | x == minSigned && y == -1 = x
| y == 0 = -1
| otherwise = quot x y
in setRegister rd (s32 q)
execute (Divuw rd rs1 rs2) = do
x <- getRegister rs1
y <- getRegister rs2
let q | y == 0 = maxUnsigned
| otherwise = divu x y
in setRegister rd (s32 q)
execute (Remw rd rs1 rs2) = do
x <- getRegister rs1
y <- getRegister rs2
let r | x == minSigned && y == -1 = 0
| y == 0 = x
| otherwise = rem x y
in setRegister rd (s32 r)
execute (Remuw rd rs1 rs2) = do
x <- getRegister rs1
y <- getRegister rs2
let r | y == 0 = x
| otherwise = remu x y
in setRegister rd (s32 r)
-- end ast
execute inst = error $ "dispatch bug: " ++ show inst
| mit-plv/riscv-semantics | src/Spec/ExecuteM64.hs | bsd-3-clause | 1,131 | 0 | 16 | 317 | 534 | 247 | 287 | 38 | 1 |
{-# OPTIONS_GHC -freduction-depth=0 #-}
-- | Devices management
--
-- This module allows the creation of a 'DeviceManager' which:
--
-- * maintains an up-to-date tree of connected devices
-- * maintains device index by subsystem type
-- * signals when tree changes through STM channels
-- * allows query of devices
-- * allows device property querying/setting
--
-- Internally, it relies on Linux's sysfs and on a socket to receive netlink
-- kernel object events.
--
module Haskus.System.Devices
( Device (..)
-- * Device manager
, DeviceManager (..)
, initDeviceManager
, deviceAdd
, deviceMove
, deviceRemove
, deviceLookup
-- * Device tree
, DeviceTree (..)
, DevicePath
, SubsystemIndex (..)
, deviceTreeCreate
, deviceTreeInsert
, deviceTreeRemove
, deviceTreeLookup
, deviceTreeMove
-- * Various
, getDeviceHandle
, getDeviceHandleByName
, releaseDeviceHandle
, openDeviceDir
, listDevicesWithClass
, listDeviceClasses
, listDevices
)
where
import Prelude hiding (lookup)
import qualified Haskus.Format.Binary.BitSet as BitSet
import Haskus.Format.Binary.Word
import Haskus.Format.Text (Text)
import qualified Haskus.Format.Text as Text
import Haskus.System.Linux.Error
import Haskus.System.Linux.Devices
import Haskus.System.Linux.Handle
import Haskus.System.Linux.FileSystem
import Haskus.System.Linux.FileSystem.Directory
import Haskus.System.Linux.KernelEvent
import Haskus.System.Sys
import Haskus.System.FileSystem
import Haskus.System.Process
import Haskus.System.Network
import Haskus.Utils.Flow
import Haskus.Utils.Maybe
import Haskus.Utils.STM
import Haskus.Format.Text (textFormat,string,shown,(%))
import Control.Arrow (second)
import qualified Data.Map as Map
import Data.Map (Map)
import qualified Data.Set as Set
import Data.Set (Set)
-- Note [sysfs]
-- ~~~~~~~~~~~~
--
-- Linux uses "sysfs" virtual file system to export kernel objects, their
-- attributes and their relationships to user-space. The mapping is as follow:
--
-- | Kernel | User-space |
-- |--------------------------------|
-- | Objects | Directories |
-- | Attributes | Files |
-- | Relationships | Symbolic links |
--
-- Initially attributes were ASCII files at most one page-size large. Now there
-- are "binary attributes" (non-ASCII files) that can be larger than a page.
--
-- The sysfs tree is mutable: devices can be (un)plugged, renamed, etc.
-- * Object changes in sysfs are notified to userspace via Netlink's kernel events.
-- * Attribute changes are not. But we should be able to watch some of them with
-- inotify
--
-- User-space can set some attributes by writing into the attribute files.
--
-- sysfs documentation is very sparse and overly bad. I had to read sources
-- (udev, systemd's sd-device, linux), articles, kernel docs, MLs, etc. See here
-- for a tentative to document the whole thing by Rob Landley in 2007 and the
-- bad reactions from sysfs/udev devs:
-- "Documentation for sysfs, hotplug, and firmware loading." thread on LKML
-- http://lkml.iu.edu/hypermail/linux/kernel/0707.2/index.html#1085
--
-- Most of his critics are still valid:
-- * current documentation is bad (says what not to do, but not what to do)
-- * there is no unified /sys/subsystem directory
-- * we still have to check the subsystem to see if devices are block or char
-- * contradictions between anti-guidelines in sysfs-rules.txt and available
-- approaches
--
-- According to Documentation/sysfs-rules.txt in the kernel tree:
-- * there is a single tree containing all the devices: in /devices
-- * devices have the following properties:
-- * a devpath (e.g., /devices/pci0000:00/0000:00:1d.1/usb2/2-2/2-2:1.0)
-- used as a unique key to identify the device at this point in time
-- * a kernel name (basename of the devpath)
-- * a subsystem (optional): basename of the "subsystem" link
-- * a driver (optional): basename of the "driver" link
-- * attributes (files)
--
-- Devices are defined with a "struct device" (cf include/linux/device.h in the
-- kernel tree).
--
-- Devices can be found by their subsystem: until it gets unified in a
-- /subsystem directory, we can find devices by subsystems by looking into
-- /class/SUB and /bus/SUB/devices.
--
-- If the subsystem is "block", device special files have to be of type "block",
-- otherwise they have to be of type "character".
--
-- "device" link shouldn't be used at all to find the parent device. The device
-- hierarchy in /devices can be used instead.
--
-- "subsystem" link shouldn't be used at all (except for getting the subsystem
-- name I guess).
--
-- We musn't assume a specific device hierarchy as it can change between kernel
-- versions.
--
--
-- Kernel Object and Subsystems
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Kernel object (or kobject) is a kernel structure used as a top-class for
-- several kinds of objects. It provides/supports:
-- * reference counting
-- * an object name
-- * a hierarchy of kobject's
-- * via a "parent" field (pointer to another kobject)
-- * via "ksets" (subsystems)
-- * sysfs mapping and notifications
--
-- A subsystem (or a "kset") is basically a kobject which references a
-- linked-list of kobjects of the same type. Each kobject can only be in a
-- single subsystem (via its "kset" field).
--
--
-- HotPlug and ColdPlug
-- ~~~~~~~~~~~~~~~~~~~~
--
-- HotPlug devices are signaled through a Netlink socket.
--
-- ColdPlug devices are already in the sysfs tree before we have a chance to
-- listen to the Netlink socket. We may:
-- 1) write "add" in their "uevent" attribute to get them resent through the
-- Netlink socket with Add action (remove, change, move, etc. commands seem
-- to work too with the uevent attribute).
-- 2) read their "uevent" attribute and fake an "Add" event
-- 3) just parse their attributes if necessary
--
--
-- SUMMARY
-- ~~~~~~~
--
-- The kernel wants to export a mutable tree to user-space:
-- * non-leaf nodes can be added, removed, moved (renamed)
-- * leaf nodes can be added, removed or have their value changed
-- * some leaf nodes can be written by user-space
--
-- sysfs offers a *non-atomic* interface on the current state of the tree because
-- of the nature of the VFS:
-- * nodes can be added/removed/moved between directory listing and actual
-- exploration of the listing
-- * an opened file may not be readable/writable anymore
--
-- netlink socket signals some of the changes:
-- * non-leaf node addition/removal/renaming
-- * generic "change" action for attributes
--
-- Specific attributes can be watched with inotify, especially if they don't
-- trigger "change" netlink notification when their value changes.
--
-- REFERENCES
-- * "The sysfs Filesystem", Patrick Mochel, 2005
-- https://www.kernel.org/pub/linux/kernel/people/mochel/doc/papers/ols-2005/mochel.pdf
-- * Documentation/sysfs-rules in the kernel tree (what not to do)
-- * lib/kobject.c in the kernel tree (e.g., function kobject_rename)
--
-------------------------------------------------------------------------------
-- Device manager
-------------------------------------------------------------------------------
-- | Device manager
data DeviceManager = DeviceManager
{ dmEvents :: TChan KernelEvent -- ^ Netlink kobject events
, dmSysFS :: Handle -- ^ Handle to sysfs
, dmDevFS :: Handle -- ^ root of the tmpfs used to create device nodes
, dmDevNum :: TVar Word64 -- ^ counter used to create device node
, dmDevices :: TVar DeviceTree -- ^ Device hierarchy
, dmSubsystems :: TVar (Map Text SubsystemIndex) -- ^ Per-subsystem index
, dmOnSubsystemAdd :: TChan Text -- ^ When a new subsystem appears
}
-- | Init a device manager
initDeviceManager :: Handle -> Handle -> Sys DeviceManager
initDeviceManager sysfs devfs = do
-- open Netlink socket and then duplicate the kernel event channel so that
-- events start accumulating until we launch the handling thread
bch <- newKernelEventReader
ch <- atomically $ dupTChan bch
-- create empty device manager
root <- deviceTreeCreate Nothing Nothing Map.empty
devNum <- newTVarIO 0 -- device node counter
subIndex' <- newTVarIO Map.empty
tree' <- newTVarIO root
sadd <- newBroadcastTChanIO
let dm = DeviceManager
{ dmDevices = tree'
, dmSubsystems = subIndex'
, dmEvents = bch
, dmSysFS = sysfs
, dmDevFS = devfs
, dmDevNum = devNum
, dmOnSubsystemAdd = sadd
}
-- we enumerate devices from sysfs. Directory listing is non-atomic so
-- directories may appear or be removed while we do the traversal. Hence we
-- shouldn't fail on error, just skip the erroneous directories.
--
-- After the traversal, kernel events potentially received during the
-- traversal are used to create/remove nodes. We have to be liberal in their
-- interpretation: e.g., a remove event could be received for a directory we
-- haven't been able to read, etc.
let
withDevDir hdl path f = withOpenAt hdl path flags BitSet.empty f
flags = BitSet.fromList [ HandleDirectory
, HandleNonBlocking
, HandleDontFollowSymLinks
]
-- read a sysfs device directory and try to create a DeviceTree
-- recursively from it. The current directory is already opened and the
-- handle is passed (alongside the name and fullname).
-- Return Nothing if it fails for any reason.
readSysfsDir :: Text -> Handle -> FlowT '[] Sys ()
readSysfsDir path hdl = do
unless (Text.null path) $
lift (deviceAdd dm path Nothing)
-- list directories (sub-devices) that are *not* symlinks
dirs <- (listDirectory hdl
-- filter to keep only directories (sysfs fills the type field)
||> filter (\entry -> entryType entry == TypeDirectory)
-- only keep the directory name
||> fmap entryName
-- return an empty directory list on error
) `catchAllE` (\_ -> success [])
-- recursively try to create a tree for each sub-dir
forM_ dirs $ \dir -> do
let path' = Text.concat [path, Text.pack "/", Text.pack dir]
withDevDir hdl dir (readSysfsDir path')
`catchAllE` (\_ -> success ())
return ()
-- list devices in /devices
withDevDir sysfs "devices" (readSysfsDir Text.empty)
|> evalCatchFlowT (\err -> sysError (textFormat ("Cannot read /devices in sysfs: " % shown) err))
|> void
-- launch handling thread
sysFork "Kernel sysfs event handler" $ eventThread ch dm
return dm
-- | Thread handling incoming kernel events
eventThread :: TChan KernelEvent -> DeviceManager -> Sys ()
eventThread ch dm = do
forever $ do
-- read kernel event
ev <- atomically (readTChan ch)
case Text.unpack (fst (bkPath (kernelEventDevPath ev))) of
-- TODO: handle module ADD/REMOVE (/module/* path)
"module" -> sysWarningShow "sysfs event in /module ignored"
(kernelEventDevPath ev)
-- event in the device tree: update the device tree and trigger rules
"devices" -> do
let
-- remove "/devices" from the path
path = Text.drop 8 (kernelEventDevPath ev)
signalEvent f = do
notFound <- atomically $ do
tree <- readTVar (dmDevices dm)
case deviceTreeLookup path tree of
Just node -> do
writeTChan (f node) ev
return False
Nothing -> return True
when notFound $
sysWarning (textFormat ("Event received for non existing device: " % shown) path)
case kernelEventAction ev of
ActionAdd -> do
sysLogInfoShow "Added device" path
deviceAdd dm path (Just ev)
ActionRemove -> do
sysLogInfoShow "Removed device" path
deviceRemove dm path ev
ActionMove -> do
sysLogInfoShow "Moved device" path
deviceMove dm path ev
ActionChange -> do
sysLogInfoShow "Changed device" path
signalEvent deviceNodeOnChange
ActionOnline -> do
sysLogInfoShow "Device goes online" path
signalEvent deviceNodeOnOnline
ActionOffline -> do
sysLogInfoShow "Device goes offline" path
signalEvent deviceNodeOnOffline
ActionOther _ -> do
sysLogInfoShow "Unknown device event" path
signalEvent deviceNodeOnOther
-- warn on unrecognized event
str -> sysWarningShow (textFormat ("sysfs event in /" % string % " ignored") str) (kernelEventDevPath ev)
-- | Lookup a device by name
deviceLookup :: DeviceManager -> DevicePath -> Sys (Maybe DeviceTree)
deviceLookup dm path = deviceTreeLookup path <$> readTVarIO (dmDevices dm)
-- | Add a device
deviceAdd :: DeviceManager -> DevicePath -> Maybe KernelEvent -> Sys ()
deviceAdd dm path mev = do
let rpath = "devices" ++ Text.unpack path -- relative path in sysfs
(msubsystem,mdev) <- case mev of
Nothing -> sysfsReadDev (dmSysFS dm) rpath
-- device id may be read from the event properties
Just ev -> do
let
detail k = Map.lookup (Text.pack k) (kernelEventDetails ev)
detailNum k = fmap (read . Text.unpack) (detail k)
case (detailNum "MAJOR", detailNum "MINOR") of
(Just ma, Just mi) -> do
sub <- sysfsReadSubsystem (dmSysFS dm) rpath
return (sub, (`sysfsMakeDev` DeviceID ma mi) <$> sub)
_ -> sysfsReadDev (dmSysFS dm) rpath
node <- deviceTreeCreate msubsystem mdev Map.empty
atomically $ do
-- update the tree
tree <- readTVar (dmDevices dm)
tree' <- deviceTreeInsert path node tree
writeTVar (dmDevices dm) tree'
case msubsystem of
Nothing -> return ()
Just subsystem -> do
-- Add device into subsystem index
subs <- readTVar (dmSubsystems dm)
subs' <- case Map.lookup subsystem subs of
Nothing -> do
-- create new index
index <- SubsystemIndex (Set.singleton path)
<$> newBroadcastTChan
<*> newBroadcastTChan
-- signal the new subsystem
writeTChan (dmOnSubsystemAdd dm) subsystem
-- return the new index
return (Map.insert subsystem index subs)
Just index -> do
let
devs = subsystemDevices index
devs' = Set.insert path devs
index' = index { subsystemDevices = devs' }
-- signal the addition
writeTChan (subsystemOnAdd index) path
-- return the new index
return (Map.insert subsystem index' subs)
writeTVar (dmSubsystems dm) subs'
-- | Remove a device
deviceRemove :: DeviceManager -> DevicePath -> KernelEvent -> Sys ()
deviceRemove dm path ev = do
notFound <- atomically $ do
tree <- readTVar (dmDevices dm)
case deviceTreeLookup path tree of
Just node -> do
-- remove from tree and signal
writeTVar (dmDevices dm) (deviceTreeRemove path tree)
writeTChan (deviceNodeOnRemove node) ev
case deviceNodeSubsystem node of
Nothing -> return ()
Just s -> do
-- Remove from index
subs <- readTVar (dmSubsystems dm)
let
index = subs Map.! s
devs = subsystemDevices index
index' = index { subsystemDevices = Set.delete path devs}
writeTVar (dmSubsystems dm) (Map.insert s index' subs)
-- signal for index
writeTChan (subsystemOnRemove index) path
return False
Nothing -> return True
when notFound $ do
sysWarning $ textFormat ("Remove event received for non existing device: " % shown) path
-- | Move a device
--
-- A device can be moved/renamed in the device tree (see kobject_rename
-- in lib/kobject.c in the kernel sources)
deviceMove :: DeviceManager -> DevicePath -> KernelEvent -> Sys ()
deviceMove dm path ev = do
-- get old device path
let oldPath' = Map.lookup (Text.pack "DEVPATH_OLD") (kernelEventDetails ev)
oldPath <- case oldPath' of
Nothing -> sysError "Cannot find DEVPATH_OLD entry for device move kernel event"
Just x -> return (Text.drop 8 x) -- remove "/devices"
notFound <- atomically $ do
-- move the device in the tree
tree <- readTVar (dmDevices dm)
case deviceTreeLookup oldPath tree of
Just node -> do
-- move the node in the tree
tree' <- deviceTreeMove oldPath path tree
writeTVar (dmDevices dm) tree'
-- signal the event
writeTChan (deviceNodeOnMove node) ev
return False
Nothing -> return True
when notFound $ do
sysWarning $ textFormat ("Move event received for non existing device: " % shown % ". We try to add it") path
deviceAdd dm path (Just ev)
-------------------------------------------------------------------------------
-- Device tree & subsystem index
-------------------------------------------------------------------------------
-- | Device tree
--
-- It is expected that the device tree will not change much after the
-- initialization phase (except when a device is (dis)connected, etc.), hence it
-- is an immutable data structure. It is much easier to perform tree traversal
-- with a single global lock thereafter.
data DeviceTree = DeviceTree
{ deviceNodeSubsystem :: Maybe Text -- ^ Subsystem
, deviceDevice :: Maybe Device -- ^ Device identifier
, deviceNodeChildren :: Map Text DeviceTree -- ^ Children devices
, deviceNodeOnRemove :: TChan KernelEvent -- ^ On "remove" event
, deviceNodeOnChange :: TChan KernelEvent -- ^ On "change" event
, deviceNodeOnMove :: TChan KernelEvent -- ^ On "move" event
, deviceNodeOnOnline :: TChan KernelEvent -- ^ On "online" event
, deviceNodeOnOffline :: TChan KernelEvent -- ^ On "offline" event
, deviceNodeOnOther :: TChan KernelEvent -- ^ On other events
}
-- | Per-subsystem events
data SubsystemIndex = SubsystemIndex
{ subsystemDevices :: Set Text -- ^ Devices in the index
, subsystemOnAdd :: TChan Text -- ^ Signal device addition
, subsystemOnRemove :: TChan Text -- ^ Signal device removal
}
type DevicePath = Text
-- | Break a device tree path into (first component, remaining)
bkPath :: DevicePath -> (Text,Text)
bkPath p = second f (Text.breakOn (Text.pack "/") p')
where
-- handle paths starting with "/"
p' = if not (Text.null p) && Text.head p == '/'
then Text.tail p
else p
f xs
| Text.null xs = xs
| otherwise = Text.tail xs
-- | Create a device tree
deviceTreeCreate :: MonadIO m => Maybe Text -> Maybe Device -> Map Text DeviceTree -> m DeviceTree
deviceTreeCreate subsystem dev children = atomically (deviceTreeCreate' subsystem dev children)
-- | Create a device tree
deviceTreeCreate' :: Maybe Text -> Maybe Device -> Map Text DeviceTree -> STM DeviceTree
deviceTreeCreate' subsystem dev children = DeviceTree subsystem dev children
<$> newBroadcastTChan
<*> newBroadcastTChan
<*> newBroadcastTChan
<*> newBroadcastTChan
<*> newBroadcastTChan
<*> newBroadcastTChan
-- move a node in the tree
deviceTreeMove :: Text -> Text -> DeviceTree -> STM DeviceTree
deviceTreeMove src tgt root = case (bkPath src, bkPath tgt) of
((x,xs),(y,ys))
-- we only modify the subtree concerned by the move
| x == y -> do
case Map.lookup x (deviceNodeChildren root) of
Nothing -> error "deviceTreeLookup: source node doesn't exists"
Just p -> deviceTreeMove xs ys p
| otherwise -> do
case deviceTreeLookup src root of
Nothing -> error "deviceTreeLookup: source node doesn't exists"
Just n -> deviceTreeInsert tgt n (deviceTreeRemove src root)
-- lookup for a node
deviceTreeLookup :: Text -> DeviceTree -> Maybe DeviceTree
deviceTreeLookup path root = case bkPath path of
(x,xs)
| Text.null x
&& Text.null xs -> error "deviceTreeLookup': empty path"
| otherwise -> do
n <- Map.lookup x (deviceNodeChildren root)
if Text.null xs
then Just n
else deviceTreeLookup xs n
-- remove a node in the tree
deviceTreeRemove :: Text -> DeviceTree -> DeviceTree
deviceTreeRemove path root = root { deviceNodeChildren = cs' }
where
cs = deviceNodeChildren root
cs' = case bkPath path of
(x,xs)
| Text.null x
&& Text.null xs -> error "deviceTreeRemove: empty path"
| Text.null xs -> Map.delete x cs
| otherwise -> Map.update (Just . deviceTreeRemove xs) x cs
-- insert a node in the tree
deviceTreeInsert :: Text -> DeviceTree -> DeviceTree -> STM DeviceTree
deviceTreeInsert path node root = do
let cs = deviceNodeChildren root
cs' <- case bkPath path of
(x,xs)
| Text.null x && Text.null xs -> error "deviceTreeInsert: empty path"
| Text.null xs -> return (Map.insert x node cs)
| otherwise ->
case Map.lookup x cs of
Just p -> do
node' <- deviceTreeInsert xs node p
return (Map.insert x node' cs)
-- the parent doesn't exist yet. Add it. As it should not be a
-- real device, we don't look for subsystem and device id (i.e.,
-- we don't call deviceAdd)
Nothing -> do
p <- deviceTreeCreate' Nothing Nothing Map.empty
node' <- deviceTreeInsert xs node p
return (Map.insert x node' cs)
return (root { deviceNodeChildren = cs' })
-------------------------------------------------------------------------------
-- Various
-------------------------------------------------------------------------------
-- | Create a new thread reading kernel events and putting them in a TChan
newKernelEventReader :: Sys (TChan KernelEvent)
newKernelEventReader = do
h <- createKernelEventSocket
ch <- newBroadcastTChanIO
let
go = forever $ do
threadWaitRead h
ev <- receiveKernelEvent h
atomically $ writeTChan ch ev
sysFork "Kernel sysfs event reader" go
return ch
-- | Get device handle by name (i.e., sysfs path)
getDeviceHandleByName :: DeviceManager -> String -> FlowT (ErrorCode ': OpenErrors) Sys Handle
getDeviceHandleByName dm path = do
dev <- lift <| deviceLookup dm (Text.pack path)
case dev >>= deviceDevice of
Just d -> getDeviceHandle dm d
Nothing -> throwE DeviceNotFound
-- | Get a handle on a device
--
-- Linux doesn't provide an API to open a device directly from its major and
-- minor numbers. Instead we must create a special device file with mknod in
-- the VFS and open it. This is what this function does. Additionally, we
-- remove the file once it is opened.
getDeviceHandle :: DeviceManager -> Device -> FlowT (ErrorCode ': OpenErrors) Sys Handle
getDeviceHandle dm dev = do
-- get a fresh device number
num <- atomically $ do
n <- readTVar (dmDevNum dm)
writeTVar (dmDevNum dm) (n+1)
return n
let
devname = "./dev" ++ show num
devfd = dmDevFS dm
logS = textFormat ("Opening device " % string % " into " % string)
(showDevice dev) devname
sysLogSequenceL logS <| do
-- create special file in device fs
liftFlowT <| createDeviceFile (Just devfd) devname dev BitSet.empty
-- on success, try to open it
let flgs = BitSet.fromList [HandleReadWrite,HandleNonBlocking]
hdl <- liftFlowT <| open (Just devfd) devname flgs BitSet.empty
-- then remove it
liftFlowT <| sysUnlinkAt devfd devname False
`onFlowError` sysWarningShow "Unlinking special device file failed"
return hdl
-- | Release a device handle
releaseDeviceHandle :: Handle -> Sys ()
releaseDeviceHandle fd = close fd
|> evalCatchFlowT (\err -> do
let msg = textFormat ("close (failed with " % shown % ")") err
sysLog LogWarning msg)
-- | Find device path by number (major, minor)
openDeviceDir :: DeviceManager -> Device -> FlowT OpenErrors Sys Handle
openDeviceDir dm dev = open (Just (dmDevFS dm)) path (BitSet.fromList [HandleDirectory]) BitSet.empty
where
path = "./dev/" ++ typ' ++ "/" ++ ids
typ' = case deviceType dev of
CharDevice -> "char"
BlockDevice -> "block"
ids = show (deviceMajor (deviceID dev)) ++ ":" ++ show (deviceMinor (deviceID dev))
-- | List devices
listDevices :: DeviceManager -> Sys [Text]
listDevices dm = atomically (listDevices' dm)
-- | List devices
listDevices' :: DeviceManager -> STM [Text]
listDevices' dm = go Text.empty <$> readTVar (dmDevices dm)
where
go parent n = parent : (cs >>= f)
where
cs = Map.assocs (deviceNodeChildren n)
f (p,n') = go (Text.concat [parent, Text.pack "/", p]) n'
-- | List devices classes
listDeviceClasses :: DeviceManager -> Sys [Text]
listDeviceClasses dm = atomically (Map.keys <$> readTVar (dmSubsystems dm))
-- | List devices with the given class
--
-- TODO: support dynamic asynchronous device adding/removal
listDevicesWithClass :: DeviceManager -> String -> Sys [(DevicePath,DeviceTree)]
listDevicesWithClass dm cls = atomically $ do
subs <- readTVar (dmSubsystems dm)
devs <- readTVar (dmDevices dm)
ds <- listDevices' dm
let paths = Map.lookup (Text.pack cls) subs
||> Set.elems . subsystemDevices
|> fromMaybe []
getNode x = case deviceTreeLookup x devs of
Just n -> n
Nothing -> error ("Mismatch between device tree and device subsystem index! Report this as a Haskus bug. (" ++ show x ++ ") " ++ show ds)
nodes = fmap getNode paths
return (paths `zip` nodes)
| hsyl20/ViperVM | haskus-system/src/lib/Haskus/System/Devices.hs | bsd-3-clause | 27,473 | 0 | 31 | 7,863 | 4,875 | 2,512 | 2,363 | -1 | -1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-}
module Distribution.Nixpkgs.Haskell.Derivation
( Derivation, pkgid, revision, src, isLibrary, isExecutable
, extraFunctionArgs, libraryDepends, executableDepends, testDepends, configureFlags
, cabalFlags, runHaddock, jailbreak, doCheck, testTarget, hyperlinkSource, enableSplitObjs
, enableLibraryProfiling, enableExecutableProfiling, phaseOverrides, editedCabalFile, metaSection
, dependencies
)
where
import Control.DeepSeq.Generics
import Data.List
import Data.Set ( Set )
import qualified Data.Set as Set
import Data.Set.Lens
import Distribution.Nixpkgs.Fetch
import Distribution.Nixpkgs.Haskell.OrphanInstances ( )
import Distribution.Nixpkgs.Meta
import Distribution.Nixpkgs.Haskell.BuildInfo
import Distribution.Package
import Distribution.PackageDescription ( FlagAssignment, FlagName(..) )
import GHC.Generics ( Generic )
import Internal.Lens
import Internal.PrettyPrinting
import Language.Nix
-- | A represtation of Nix expressions for building Haskell packages.
-- The data type correspond closely to the definition of
-- 'PackageDescription' from Cabal.
data Derivation = MkDerivation
{ _pkgid :: PackageIdentifier
, _revision :: Int
, _src :: DerivationSource
, _isLibrary :: Bool
, _isExecutable :: Bool
, _extraFunctionArgs :: Set Identifier
, _libraryDepends :: BuildInfo
, _executableDepends :: BuildInfo
, _testDepends :: BuildInfo
, _configureFlags :: Set String
, _cabalFlags :: FlagAssignment
, _runHaddock :: Bool
, _jailbreak :: Bool
, _doCheck :: Bool
, _testTarget :: String
, _hyperlinkSource :: Bool
, _enableLibraryProfiling :: Bool
, _enableExecutableProfiling :: Bool
, _enableSplitObjs :: Bool
, _phaseOverrides :: String
, _editedCabalFile :: String
, _metaSection :: Meta
}
deriving (Show, Eq, Generic)
instance Default Derivation where
def = MkDerivation
{ _pkgid = error "undefined Derivation.pkgid"
, _revision = error "undefined Derivation.revision"
, _src = error "undefined Derivation.src"
, _isLibrary = error "undefined Derivation.isLibrary"
, _isExecutable = error "undefined Derivation.isExecutable"
, _extraFunctionArgs = error "undefined Derivation.extraFunctionArgs"
, _libraryDepends = error "undefined Derivation.libraryDepends"
, _executableDepends = error "undefined Derivation.executableDepends"
, _testDepends = error "undefined Derivation.testDepends"
, _configureFlags = error "undefined Derivation.configureFlags"
, _cabalFlags = error "undefined Derivation.cabalFlags"
, _runHaddock = error "undefined Derivation.runHaddock"
, _jailbreak = error "undefined Derivation.jailbreak"
, _doCheck = error "undefined Derivation.doCheck"
, _testTarget = error "undefined Derivation.testTarget"
, _hyperlinkSource = error "undefined Derivation.hyperlinkSource"
, _enableLibraryProfiling = error "undefined Derivation.enableLibraryProfiling"
, _enableExecutableProfiling = error "undefined Derivation.enableExecutableProfiling"
, _enableSplitObjs = error "undefined Derivation.enableSplitObjs"
, _phaseOverrides = error "undefined Derivation.phaseOverrides"
, _editedCabalFile = error "undefined Derivation.editedCabalFile"
, _metaSection = error "undefined Derivation.metaSection"
}
makeLenses ''Derivation
makeLensesFor [("_libraryDepends", "dependencies"), ("_executableDepends", "dependencies"), ("_testDepends", "dependencies")] ''Derivation
instance Package Derivation where
packageId = view pkgid
instance NFData Derivation where rnf = genericRnf
instance Pretty Derivation where
pPrint drv@(MkDerivation {..}) = funargs (map text ("mkDerivation" : toAscList inputs)) $$ vcat
[ text "mkDerivation" <+> lbrace
, nest 2 $ vcat
[ attr "pname" $ doubleQuotes $ disp (packageName _pkgid)
, attr "version" $ doubleQuotes $ disp (packageVersion _pkgid)
, sourceAttr _src
, onlyIf (_revision > 0) $ attr "revision" $ doubleQuotes $ int _revision
, onlyIf (not (null _editedCabalFile)) $ attr "editedCabalFile" $ string _editedCabalFile
, listattr "configureFlags" empty (map (show . show) renderedFlags)
, boolattr "isLibrary" (not _isLibrary || _isExecutable) _isLibrary
, boolattr "isExecutable" (not _isLibrary || _isExecutable) _isExecutable
, onlyIf (_libraryDepends /= mempty) $ pPrintBuildInfo "library" _libraryDepends
, onlyIf (_executableDepends /= mempty) $ pPrintBuildInfo "executable" _executableDepends
, onlyIf (_testDepends /= mempty) $ pPrintBuildInfo "test" _testDepends
, boolattr "enableLibraryProfiling" _enableLibraryProfiling _enableLibraryProfiling
, boolattr "enableExecutableProfiling" _enableExecutableProfiling _enableExecutableProfiling
, boolattr "enableSplitObjs" (not _enableSplitObjs) _enableSplitObjs
, boolattr "doHaddock" (not _runHaddock) _runHaddock
, boolattr "jailbreak" _jailbreak _jailbreak
, boolattr "doCheck" (not _doCheck) _doCheck
, onlyIf (not (null _testTarget)) $ attr "testTarget" $ string _testTarget
, boolattr "hyperlinkSource" (not _hyperlinkSource) _hyperlinkSource
, onlyIf (not (null _phaseOverrides)) $ vcat ((map text . lines) _phaseOverrides)
, pPrint _metaSection
]
, rbrace
]
where
inputs :: Set String
inputs = Set.unions [ Set.map (view ident) _extraFunctionArgs
, setOf (dependencies . each . folded . localName . ident) drv
, Set.fromList ["fetch" ++ derivKind _src | derivKind _src /= "" && not isHackagePackage]
]
renderedFlags = [ text "-f" <> (if enable then empty else char '-') <> text f | (FlagName f, enable) <- _cabalFlags ]
++ map text (toAscList _configureFlags)
isHackagePackage = "mirror://hackage/" `isPrefixOf` derivUrl _src
sourceAttr (DerivationSource{..})
| isHackagePackage = attr "sha256" $ string derivHash
| derivKind /= "" = vcat
[ text "src" <+> equals <+> text ("fetch" ++ derivKind) <+> lbrace
, nest 2 $ vcat
[ attr "url" $ string derivUrl
, attr "sha256" $ string derivHash
, if derivRevision /= "" then attr "rev" (string derivRevision) else empty
]
, rbrace <> semi
]
| otherwise = attr "src" $ text derivUrl
| psibi/cabal2nix | src/Distribution/Nixpkgs/Haskell/Derivation.hs | bsd-3-clause | 6,880 | 0 | 18 | 1,597 | 1,499 | 814 | 685 | 121 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
-- | Various proxies that help handling the IRC business
module Pipes.Network.IRC.Core where
import Control.Monad (forever)
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.Set as S
import Pipes
import Pipes.Network.TCP
import qualified Pipes.Prelude as P
import Pipes.Network.IRC.Internal.Parser
import Pipes.Network.IRC.Types
-- | Proxy that parses IRC messages
parseMsg :: (Monad m)
=> Pipe ByteString (Maybe Message) m r
parseMsg = forever $
await >>= yield . parseComplete messageParser
-- | For debugging purposes
logP :: Pipe ByteString ByteString IO r
logP = forever $ do
s <- await
lift . putStrLn $ "Sending response: " ++ Prelude.show s
yield s
-- | Proxy that filters messages according to 'IRCSettings'
filterMsg :: (MonadIO m, Monad m)
=> IRCSettings
-> Pipe Message Message m r
filterMsg (IRCSettings{..}) = loop
where loop = do
msg <- await
case msgCommand msg of
PrivMsgCmd targets txt -> do
let me = S.insert nick channels
if (trigger `BS.isPrefixOf` txt)
&& (not . S.null $ me `S.intersection` targets)
then yield msg >> loop
else loop
_ -> loop
-- | Proxy that responds to PING queries
handlePing :: (MonadIO m, Monad m)
=> Pipe Message Command m r
handlePing = do
msg <- await
case msgCommand msg of
PingCmd a b -> yield (PongCmd a b) >> liftIO (putStrLn "Ping handled") >> handlePing
_ -> handlePing
-- | Proxy that sends command to the socket
writeMsg :: Socket -> Consumer Command IO r
writeMsg sock = P.map showCommand >-> logP >-> toSocket sock
| co-dan/pipes-irc | src/Pipes/Network/IRC/Core.hs | bsd-3-clause | 1,947 | 0 | 20 | 616 | 486 | 259 | 227 | 44 | 3 |
{-# LANGUAGE TypeFamilies, EmptyDataDecls, TypeOperators #-}
{-# LANGUAGE FlexibleInstances #-}
-- | Embedding a higher-order domain-specific language (simply-typed
-- lambda-calculus with constants) with a selectable evaluation order:
-- Call-by-value, call-by-name, call-by-need in the same Final Tagless framework
--
-- <http://okmij.org/ftp/tagless-final/tagless-typed.html#call-by-any>
--
module Language.CB where
import Data.IORef
import Control.Monad
import Control.Monad.Trans
-- | Our EDSL is typed. EDSL types are built from the following two
-- type constructors:
data IntT
data a :-> b
infixr 5 :->
-- | We could have used Haskell's type Int and the arrow -> constructor.
-- We would like to emphasize however that EDSL types need not be identical
-- to the host language types. To give the type system to EDSL, we merely
-- need `labels' -- which is what IntT and :-> are
--
-- The (higher-order abstract) syntax of our DSL
class EDSL exp where
lam :: (exp a -> exp b) -> exp (a :-> b)
app :: exp (a :-> b) -> exp a -> exp b
int :: Int -> exp IntT -- Integer literal
add :: exp IntT -> exp IntT -> exp IntT
sub :: exp IntT -> exp IntT -> exp IntT
-- | A convenient abbreviation
let_ :: EDSL exp => exp a -> (exp a -> exp b) -> exp b
let_ x y = (lam y) `app` x
-- | A sample EDSL term
t :: EDSL exp => exp IntT
t = (lam $ \x -> let_ (x `add` x)
$ \y -> y `add` y) `app` int 10
-- | Interpretation of EDSL types as host language types
-- The type interpretation function Sem is parameterized by 'm',
-- which is assumed to be a Monad.
type family Sem (m :: * -> *) a :: *
type instance Sem m IntT = Int
type instance Sem m (a :-> b) = m (Sem m a) -> m (Sem m b)
-- | Interpretation of EDSL expressions as values of the host language (Haskell)
-- An EDSL expression of the type a is interpreted as a Haskell value
-- of the type S l m a, where m is a Monad (the parameter of the interpretation)
-- and l is the label for the evaluation order (one of Name, Value, or Lazy).
-- (S l m) is not quite a monad -- only up to the Sem interpretation
newtype S l m a = S { unS :: m (Sem m a) }
-- | Call-by-name
--
data Name
instance MonadIO m => EDSL (S Name m) where
int = S . return
add x y = S $ do a <- unS x
b <- unS y
liftIO $ putStrLn "Adding"
return (a + b)
sub x y = S $ do a <- unS x
b <- unS y
liftIO $ putStrLn "Subtracting"
return (a - b)
lam f = S . return $ (unS . f . S)
app x y = S $ unS x >>= ($ (unS y))
-- Tests
runName :: S Name m a -> m (Sem m a)
runName x = unS x
-- | Evaluating:
--
-- > t = (lam $ \x -> let_ (x `add` x)
-- > $ \y -> y `add` y) `app` int 10
--
-- The addition (x `add` x) is performed twice because y is bound
-- to a computation, and y is evaluated twice
t0SN = runName t >>= print
{-
Adding
Adding
Adding
40
-}
-- A more elaborate example
t1 :: EDSL exp => exp IntT
t1 = (lam $ \x -> let_ (x `add` x)
$ \y -> lam $ \z ->
z `add` (z `add` (y `add` y))) `app` (int 10 `sub` int 5)
`app` (int 20 `sub` int 10)
t1SN = runName t1 >>= print
{-
*CB> t1SN
Subtracting
Subtracting
Subtracting
Subtracting
Adding
Subtracting
Subtracting
Adding
Adding
Adding
Adding
40
-}
-- | A better example
t2 :: EDSL exp => exp IntT
t2 = (lam $ \z -> lam $ \x -> let_ (x `add` x)
$ \y -> y `add` y)
`app` (int 100 `sub` int 10)
`app` (int 5 `add` int 5)
-- | The result of subtraction was not needed, and so it was not performed
-- | OTH, (int 5 `add` int 5) was computed four times
t2SN = runName t2 >>= print
{-
*CB> t2SN
Adding
Adding
Adding
Adding
Adding
Adding
Adding
40
-}
-- Call-by-value
data Value
-- | We reuse most of EDSL (S Name) except for lam
vn :: S Value m x -> S Name m x
vn = S . unS
nv :: S Name m x -> S Value m x
nv = S . unS
instance MonadIO m => EDSL (S Value m) where
int = nv . int
add x y = nv $ add (vn x) (vn y)
sub x y = nv $ sub (vn x) (vn y)
app x y = nv $ app (vn x) (vn y)
-- This is the only difference between CBN and CBV:
-- lam first evaluates its argument, no matter what
-- This is the definition of CBV after all
lam f = S . return $ (\x -> x >>= unS . f . S . return)
runValue :: S Value m a -> m (Sem m a)
runValue x = unS x
-- We now evaluate the previously written tests t, t1, t2
-- under the new interpretation
t0SV = runValue t >>= print
{-
*CB> t0SV
Adding
Adding
40
-}
t1SV = runValue t1 >>= print
{-
*CB> t1SV
Subtracting
Adding
Subtracting
Adding
Adding
Adding
40
-}
-- Although the result of subs-traction was not needed, it was still performed
-- OTH, (int 5 `add` int 5) was computed only once
t2SV = runValue t2 >>= print
{-
*CB> t2SV
Subtracting
Adding
Adding
Adding
40
-}
-- Call-by-need
share :: MonadIO m => m a -> m (m a)
share m = do
r <- liftIO $ newIORef (False,m)
let ac = do
(f,m) <- liftIO $ readIORef r
if f then m
else do
v <- m
liftIO $ writeIORef r (True,return v)
return v
return ac
data Lazy
-- | We reuse most of EDSL (S Name) except for lam
ln :: S Lazy m x -> S Name m x
ln = S . unS
nl :: S Name m x -> S Lazy m x
nl = S . unS
instance MonadIO m => EDSL (S Lazy m) where
int = nl . int
add x y = nl $ add (ln x) (ln y)
sub x y = nl $ sub (ln x) (ln y)
app x y = nl $ app (ln x) (ln y)
-- This is the only difference between CBN and CBNeed
-- lam shares its argument, no matter what
-- This is the definition of CBNeed after all
lam f = S . return $ (\x -> share x >>= unS . f . S)
runLazy :: S Lazy m a -> m (Sem m a)
runLazy x = unS x
-- We now evaluate the previously written tests t, t1, t2
-- under the new interpretation
-- | Here, Lazy is just as efficient as CBV
t0SL = runLazy t >>= print
{-
*CB> t0SL
Adding
Adding
40
-}
-- | Ditto
t1SL = runLazy t1 >>= print
{-
*CB> t1SL
Subtracting
Subtracting
Adding
Adding
Adding
Adding
40
-}
-- | Now, Lazy is better than both CBN and CBV: subtraction was not needed,
-- and it was not performed.
-- All other expressions were needed, and evaluated once.
t2SL = runLazy t2 >>= print
{-
*CB> t2SL
Adding
Adding
Adding
40
-}
| suhailshergill/liboleg | Language/CB.hs | bsd-3-clause | 6,365 | 2 | 18 | 1,785 | 1,733 | 920 | 813 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ImplicitParams #-}
module Tests.Travel where
import Data.Metrology.Poly
import Data.Metrology.SI.Poly
import Data.Metrology.Imperial.Types (Imperial)
import Data.Metrology.Imperial.Units
import Data.Metrology.Show ()
import qualified Data.Metrology.SI.Dims as D
import Test.Tasty
import Test.Tasty.HUnit
import Test.HUnit.Approx
type PerArea lcsu n = MkQu_DLN (D.Area :^ MOne) lcsu n
fromGLtoED :: MkQu_DLN D.Length Imperial Float
fromGLtoED = 46.5 % Mile
fuelEfficiency :: PerArea Imperial Float
fuelEfficiency = 40 % (Mile :/ Gallon)
gasolineDensity :: MkQu_DLN D.Density Imperial Float
gasolineDensity = 7.29 % (Pound :/ Gallon)
gasolineWeight :: (Fractional f)
=> MkQu_DLN D.Length su f -> PerArea su f -> MkQu_DLN D.Density su f -> MkQu_DLN D.Mass su f
gasolineWeight len0 ef0 den0 = len0 |/| ef0 |*| den0
tests :: TestTree
tests =
let ?epsilon = 0.00001 in
testGroup "Travel"
[ testCase "fromGLtoED" (fromGLtoED # Mile @?~ 46.5)
, testCase "fuelEfficiency" (fuelEfficiency # (Mile :/ Gallon) @?~ 39.999996)
, testCase "gasolineDensity" (gasolineDensity # (Pound :/ Gallon) @?~ 7.29)
, testCase "gasolineWeight" (gasolineWeight fromGLtoED fuelEfficiency gasolineDensity # Pound @?~ 8.474626)
, testCase "fromGLtoED2" (fromGLtoED # kilo Meter @?~ 74.834496)
, testCase "fuelEfficiency2" (fuelEfficiency # (kilo Meter :/ Liter) @?~ 14.160248)
, testCase "gasolineDensity2" (gasolineDensity # (kilo Gram :/ Liter) @?~ 0.7273698)
, testCase "gasolineWeight2" ((gasolineWeight (convert fromGLtoED) (convert fuelEfficiency) (convert gasolineDensity) :: MkQu_DLN D.Mass SI Float) # kilo Gram @?~ 3.8440251) ]
main :: IO ()
main = do
putStrLn $ fromGLtoED `showIn` Mile
putStrLn $ fuelEfficiency `showIn` Mile :/ Gallon
putStrLn $ gasolineDensity `showIn` Pound :/ Gallon
putStrLn $ show $ gasolineWeight fromGLtoED fuelEfficiency gasolineDensity
putStrLn ""
putStrLn $ fromGLtoED `showIn` (kilo Meter)
putStrLn $ fuelEfficiency `showIn` kilo Meter :/ Liter
putStrLn $ gasolineDensity `showIn` kilo Gram :/ Liter
putStrLn $ show $ (gasolineWeight
(convert fromGLtoED) (convert fuelEfficiency) (convert gasolineDensity) :: MkQu_DLN D.Mass SI Float)
{---- Execution result ---
46.5 mi
39.999996 mi/gal
7.29 lb/gal
8.474626 lb
74.834496 km
14.160248 km/l
0.7273698 kg/l
3.8440251 kg
-}
| hesiod/units | Tests/Travel.hs | bsd-3-clause | 2,447 | 0 | 16 | 382 | 721 | 384 | 337 | 48 | 1 |
import qualified Data.Vector.Generic.Extra as V
import Data.Either
import Data.Maybe
import Data.Vector
import Test.Tasty
import qualified Test.Tasty.SmallCheck as SC
main :: IO ()
main = defaultMain analogies
analogousTo :: Eq b => (Vector a -> Vector b) -> ([a] -> [b]) -> [a] -> Bool
analogousTo vf lf = vf `analogousTo'` lf `via` (fromList, toList)
analogousTo' :: a -> b -> (a, b)
analogousTo' vf lf = (vf, lf)
via :: Eq d => (a -> b, c -> d) -> (c -> a, b -> d) -> c -> Bool
via (vf, lf) (f, g) c = (g . vf . f) c == lf c
analogies :: TestTree
analogies = testGroup "Analogies to functions on []" [maybeFuns, eitherFuns]
maybeFuns :: TestTree
maybeFuns = testGroup "Maybe utilities"
[ SC.testProperty "mapMaybe" $ SC.changeDepth (min 2) $
\f -> V.mapMaybe f `analogousTo` mapMaybe (f :: Int -> Maybe Char)
, SC.testProperty "catMaybes" $
V.catMaybes `analogousTo` (catMaybes :: [Maybe Bool] -> [Bool])
, SC.testProperty "vectorToMaybe" $
V.vectorToMaybe `analogousTo'` (listToMaybe :: [Int] -> Maybe Int) `via`
(fromList, id)
, SC.testProperty "maybeToVector" $
V.maybeToVector `analogousTo'` (maybeToList :: Maybe () -> [()]) `via`
(id, toList)
]
eitherFuns :: TestTree
eitherFuns = testGroup "Either utilities"
[ SC.testProperty "lefts" $
V.lefts `analogousTo` (lefts :: [Either String Int] -> [String])
, SC.testProperty "rights" $
V.rights `analogousTo` (rights :: [Either String Int] -> [Int])
, SC.testProperty "partitionEithers" $
V.partitionEithers `analogousTo'`
(partitionEithers :: [Either Int Int] -> ([Int], [Int])) `via`
(fromList, \(va, vb) -> (toList va, toList vb))
]
| sjakobi/vector-extra | test/ListAnalogies.hs | bsd-3-clause | 1,691 | 0 | 13 | 348 | 687 | 387 | 300 | 38 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Temperature.KO.Corpus
( corpus ) where
import Prelude
import Data.String
import Duckling.Lang
import Duckling.Resolve
import Duckling.Temperature.Types
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {lang = KO}, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (TemperatureValue Celsius 37)
[ "37°C"
, "섭씨37°"
, "섭씨37도"
]
, examples (TemperatureValue Fahrenheit 70)
[ "70°F"
, "화씨70°"
, "화씨70도"
]
, examples (TemperatureValue Degree 45)
[ "45°"
, "45도"
]
]
| rfranek/duckling | Duckling/Temperature/KO/Corpus.hs | bsd-3-clause | 1,039 | 0 | 9 | 286 | 162 | 99 | 63 | 24 | 1 |
import System.Environment (getArgs)
cardval :: Char -> Int
cardval '2' = 2
cardval '3' = 3
cardval '4' = 4
cardval '5' = 5
cardval '6' = 6
cardval '7' = 7
cardval '8' = 8
cardval '9' = 9
cardval '1' = 10
cardval 'J' = 11
cardval 'Q' = 12
cardval 'K' = 13
cardval 'A' = 14
cardval _ = -1
trump :: [String] -> String
trump [xs, ys, _, [z]] | xc == z && yc /= z = xs
| xc /= z && yc == z = ys
| cardval x > cardval y = xs
| cardval x < cardval y = ys
| otherwise = xs ++ (' ' : ys)
where xc = last xs
yc = last ys
x = head xs
y = head ys
main :: IO ()
main = do
[inpFile] <- getArgs
input <- readFile inpFile
putStr . unlines . map (trump . words) $ lines input
| nikai3d/ce-challenges | easy/simple_or_trump.hs | bsd-3-clause | 914 | 0 | 11 | 416 | 363 | 180 | 183 | 31 | 1 |
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
-- | Lookup the documentation of a name in a module (and in a specific
-- package in the case of ambiguity).
module Haskell.Docs
(module Haskell.Docs
,Identifier(..)
,PackageName(..))
where
import Haskell.Docs.Cabal
import Haskell.Docs.Formatting
import Haskell.Docs.Ghc
import Haskell.Docs.Haddock
import Haskell.Docs.Index
import Haskell.Docs.Types
import PackageConfig hiding (PackageName)
import Control.Exception
import Control.Monad
import qualified Data.HashMap.Strict as M
import Data.List
import Data.Ord
import qualified Data.Text as T
import qualified Data.Text.IO as T
import GHC hiding (verbosity)
import MonadUtils
-- -- | Print the documentation of a name in the given module.
searchAndPrintDoc
:: [String] -- ^ GHC Options
-> Bool -- ^ Print modules only.
-> Bool -- ^ S-expression format.
-> Maybe PackageName -- ^ Package.
-> Maybe ModuleName -- ^ Module name.
-> Identifier -- ^ Identifier.
-> Ghc ()
searchAndPrintDoc flags _ms ss _pname _mname ident =
do result <- liftIO (lookupIdent flags
(T.pack (unIdentifier ident)))
case result of
Nothing -> throw NoFindModule
Just pkgModuleMap ->
do pkgs <- getAllPackages flags
docs <- fmap concat
(forM (M.toList pkgModuleMap)
(searchResult pkgs))
if ss
then printSexp docs
else mapM_ (\(_,doc') ->
printIdentDoc False True True doc')
(zip [0 :: Int ..]
(nub docs))
where searchResult pkgs (pkgName,modName) =
case find (matchingPkg pkgName) pkgs of
Nothing -> return []
Just pkg -> searchPkg pkg modName
where matchingPkg pkgNm = (== pkgNm) . T.pack . showPackageName .
getIdentifier
searchPkg pkg modName =
do result <- searchWithPackage
pkg
(Just (head (fmap (makeModuleName . T.unpack) modName)))
ident
case result of
Left err -> throw err
Right (sortBy (comparing identDocPackageName) -> docs) -> return docs
-- | Search only for identifiers and print out all modules associated.
searchAndPrintModules :: [String] -> Identifier -> IO ()
searchAndPrintModules flags ident =
do result <- lookupIdent flags (T.pack (unIdentifier ident))
case result of
Nothing ->
throw NoFindModule
Just packages ->
forM_ (nub (concat (map snd (M.toList packages))))
T.putStrLn
| chrisbarrett/haskell-docs | src/Haskell/Docs.hs | bsd-3-clause | 2,896 | 0 | 19 | 1,023 | 671 | 357 | 314 | 69 | 5 |
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE MultiParamTypeClasses #-}
import Text.XML.HXT.Core
import Text.XML.HXT.HTTP
import Data.Ratio
import Control.Monad.State.Lazy
import Music -- This is to import the Music type
import MXml
import Data.Default
main = do
d <- runX $ xunpickleDocument pmusic
[withValidate no
-- ,withTrace 2
,withRemoveWS yes
,withPreserveComment no] "demo-score.xml"
-- >>> arrIO (\x -> do {print x; return x})
let d2 = postProcessing d
mapM print d2
return ()
where postProcessing = id.head -- fixPositions
----------------------------------------------------------------------------------------------------
-- MusicXml Pickler functions
----------------------------------------------------------------------------------------------------
pmusic :: PU Music
pmusic
= startPt $
xpWrap (forward,backward) $ -- Convert [MXMeasElm] to Music
xpWrap (concat,undefined) $ xpList $ xpElem "measure" $ -- Convert MXMeasElm to [MXMeasElm]
xpList $ (xpickle :: PU MXMeasElm) -- Pickle
where startPt a = xpElem "score-partwise" $ -- Select
keepElem "part" $ xpElem "part" $ -- Fitler, select
keepElem "measure" $ a -- Filter
instance XmlPickler MXMeasElm where
xpickle = xpAlt tag ps
where tag (MXNoteElm _) = 0
tag (MXAttrElm _) = 1
tag (MXMeasNum _) = 2
ps = [ xpWrap (MXNoteElm, \(MXNoteElm n) -> n) pNote
, xpWrap (MXAttrElm, \(MXAttrElm a) -> a) pAttr
, xpWrap (MXMeasNum, \(MXMeasNum n) -> n) pMeasNum
]
pMeasNum :: PU Int
pMeasNum = xpWrap (read, show) $ xpAttr "number" xpText
-- Measure Attributes
pAttr
= xpElem "attributes" $ wAttr
pdivs -- Divisions
pkey -- Key
ptime -- Time
pclef -- Clef
where pdivs = maySelNodeAndPickle "divisions" -- Divisions per beat
pkey = xpOption $ xpElem "key" (wKey pfif pmode) :: PU (Maybe MXKey)
pfif = selNodeAndPickle "fifths" -- Key Fifths
pmode = selNodeAndPickle "mode" -- Key Mode
ptime = xpOption $
xpElem "time" (wTime pbeats pbtyp pattr) :: PU (Maybe MXTime)
pbeats = selNodeAndPickle "beats" -- Time Beats per measure
pbtyp = selNodeAndPickle "beat-type" -- Time Beat Division
pattr = xpOption $ xpWrap (read, show) $ -- Time Annotation
xpAttr "symbol" xpText
pclef = xpOption $
xpElem "clef" (wClef pcsign pcline pcalt) :: PU (Maybe MXClef)
pcsign = selNodeAndPickle "sign" -- Clef Sign
pcline = xpDefault def $ selNodeAndPickle "line" -- Clef Line -- BUG: Should use xpWrap so (NoneClef,0) to MXML doesn't include line # 0
pcalt = maySelNodeAndPickle "clef-octave-change" -- Clef octave change
-- Wrap functions. Converts tuple to type and vice versa
wAttr a b c d = xpWrap (uncurry4 MXAttr, \(MXAttr divs key time clef) -> (divs,key,time,clef)) $ xp4Tuple a b c d
wKey a b = xpWrap (uncurry MXKey, \ (MXKey fifths mode) -> (fifths, mode)) $ xpPair a b
wTime a b c = xpWrap (uncurry3 MXTime, \(MXTime beats beatType anno) -> (beats, beatType, anno)) $ xpTriple a b c
wClef a b c = xpWrap (uncurry3 MXClef, \(MXClef sign line octalt) -> (sign, line, octalt)) $ xpTriple a b c
maySelNodeAndPickle a = xpOption $ selNodeAndPickle a
-- Measure Notes
pNote
= xpElem "note" $ wNote
ppitch -- Pitch
pdur -- Duration
pvoice -- Voice
ptype -- Type
where ppitch = xpElem "pitch" (wPitch pstep poct palter) :: PU MXPitch
pstep = selNodeAndPickle "step" -- Pitch Step
poct = selNodeAndPickle "octave" -- Pitch Octave
palter = xpOption $ selNodeAndPickle "alter" -- Ptich Maybe Alter
pdur = selNodeAndPickle "duration" -- Duration
pvoice = selNodeAndPickle "voice" -- Voice
ptype = selNodeAndPickle "type" -- Note Type (eg quarter)
-- Wrap functions
wNote a b c d = xpWrap (uncurry4 MXNote, \(MXNote pitch dur voice notetype) -> (pitch,dur,voice,notetype)) $ xp4Tuple a b c d
wPitch a b c = xpWrap (uncurry3 MXPitch, \(MXPitch step oct alt) -> (step, oct, alt)) $ xpTriple a b c
----------------------------------------------------------------------------------------------------
-- Helper functions
----------------------------------------------------------------------------------------------------
keepElems :: [String] -> PU a -> PU a
keepElems ls = let msum' = foldr (<+>) zeroArrow -- (hasName "a") <+> (hasName "b") ...
in xpFilterCont $ msum' (map hasName ls)
keepElem :: String -> PU a -> PU a
keepElem x = xpFilterCont (hasName x)
instance (Default a, Eq a) => Default (PU a) where
def = xpLift (def::a)
-- Selects one XML Node and pickles with xpPrim
selNodeAndPickle s = xpElem s xpPrim
| nickgeoca/music-browserside | Main.hs | bsd-3-clause | 5,528 | 0 | 13 | 1,738 | 1,305 | 693 | 612 | 89 | 1 |
-- | This module provides the @TypeMap@ data structure, as well as several
-- data structures for the type signatures of the various elements stored in a
-- @TypeMap@.
module Language.Java.Paragon.TypeChecker.TypeMap
(
TypeMap
) where
import Data.Map (Map)
import qualified Data.Map as Map
import Language.Java.Paragon.Syntax
import Language.Java.Paragon.TypeChecker.Types
-- | The @TypeMap@ data structure.
-- For methods and constructors, which might share the same name, we store an
-- additional mapping, from the number and type of arguments to the actual
-- signature.
-- A package is mapped to a @TypeMap@ for that package (??)
data TypeMap = TypeMap
{ tmFields :: Map String FieldSignature
, tmMethods :: Map String MethodMap
, tmConstructors :: ConstructorMap
, tmLocks :: Map String ()
, tmPolicies :: Map String PrgPolicy -- TODO: WHAT IS THIS?
, tmActors :: Map String ()
, tmTypeMethods :: Map String ()
, tmTypes :: Map String TypeSignature
, tmPackages :: Map String TypeMap
} deriving (Show)
data FieldSignature = FieldSignature
{ fsType :: TcType -- ^ The type checked type of this field.
, fsPolicy :: ActorPolicy -- TODO: WHAT IS THIS?
, fsIsParam :: Bool -- TODO: WHAT IS THIS?
, fsIsStatic :: Bool -- ^ Whether this field is declared @static@.
, fsIsFinal :: Bool -- ^ Whether this field is declared @final@.
, fsNotNull :: Bool -- ^ Whether this field is declared @notnull@.
} deriving (Show)
-- | For method disambiguation, this mapping provides the method signature for
-- each method by number of type parameters, real arguments and whether it has
-- varargs.
type MethodMap = Map ([TypeParam], [TcType], Bool) MethodSig
data MethodSig = MethodSig
{ msReturnType :: TcType -- ^ Return type.
, msModifiers :: [Modifier] -- ^ Method modifiers.
, msReadEffect :: ActorPolicy -- ^ Read effect policy.
, msWriteEffect :: ActorPolicy -- ^ Write effect policy.
, msExpectedLocks :: [TcLock] -- ^ Locks expected to be open.
, msParameters :: [String] -- ^ Method's parameters.
, msParameterBounds :: [ActorPolicy] -- ^ Policy read-effect bounds on parameters. TODO: why not paired with params?
, msLockDelta :: TcLockDelta -- ^ Which locks are opened and closed.
, msExceptions :: [(TcType, ExceptionSignature)] -- ^ Signature per exception thrown.
, msNNPars :: [String] -- TODO: WHAT IS THIS? List of parameters that are not null?
, msIsNative :: Bool -- ^ Whether this method is native (to Paragon)
}
data ExceptionSignature = ExceptionSignature
{ exsReadEffect :: ActorPolicy -- ^ Read effect policy.
, exsWriteEffect :: ActorPolicy -- ^ Write effect policy.
, exsLockDelta :: TcLockDelta -- ^ Locks opened and closed when exception is thrown.
}
-- | For constructor disambiguation, this mapping provides the constructor
-- signature for each constructor by number of type parameters, real arguments
-- and ??? (TODO).
type ConstructorMap = Map ([TypeParam], [TcType], Bool) ConstructorSignature
data ConstructorSignature = ConstructorSignature
{ csModifiers :: [Modifier] -- ^ Constructor modifiers.
, csWriteEffect :: ActorPolicy -- ^ Write effect policy.
, csExpectedLocks :: [TcLock] -- ^ Locks expected to be open.
, csParameters :: [String] -- ^ Constructor's parameters.
, csParameterBounds :: [ActorPolicy] -- ^ Policy read-effect bounds on parameters.
, csLockDelta :: TcLockDelta -- ^ Which locks are opened and closed.
, csExceptions :: [(TcType, ExceptionSignature)] -- ^ Signature per exception thrown.
, csNNPars :: [String] -- ^ TODO ??
, csIsNative :: Bool -- ^ Whether this constructor is native (to Paragon).
}
data TypeSignature = TypeSignature
{ tsType :: TcRefType -- ^ This reference type.
, tsIsClass :: Bool -- ^ Whether this is a class (or an interface?) TODO
, tsIsFinal :: Bool -- ^ Whether this type is declared final.
, tsSuperClasses :: [TcClassType] -- ^ Super class (possibly multiple for interfaces).
, tsInterfaces :: [TcClassType] -- ^ Interfaces implemented by this type.
, tsMembers :: TypeMap -- ^ Members of this type.
}
| bvdelft/paragon | src/Language/Java/Paragon/TypeChecker/TypeMap.hs | bsd-3-clause | 4,446 | 0 | 10 | 1,101 | 581 | 384 | 197 | 61 | 0 |
{-# LANGUAGE OverloadedStrings, QuasiQuotes, ImplicitParams #-}
module Matlab where
import Data.String.Interpolation
import Data.String
import Data.Monoid
import Data.Default
import System.FilePath
import System.Directory
import System.Process
import System.Exit
import System.IO
data MatlabImp = Matlab | Octave deriving (Eq, Ord, Read, Show)
data MatlabOpts = MatlabOpts { dbnSizes :: [Int], numEpochs :: Int, implementation :: MatlabImp } deriving (Eq, Ord, Read, Show)
instance Default MatlabOpts where
def = MatlabOpts { dbnSizes = [100], numEpochs = 5, implementation = Octave }
prepAndRun :: MatlabOpts -> FilePath -> FilePath -> IO ExitCode
prepAndRun matlabOpts outputDirectory' inputDataFile' = do
let ?dbnSizes = dbnSizes matlabOpts
?numEpochs = numEpochs matlabOpts
outputDirectory <- canonicalizePath outputDirectory'
inputDataFile <- canonicalizePath inputDataFile'
createDirectoryIfMissing True outputDirectory
writeFile (outputDirectory </> "nnsave_to_file_full.m") nnsave_to_file_full
writeFile (outputDirectory </> "nnsave_to_file.m") nnsave_to_file
writeFile (outputDirectory </> "run_main.m") (run_main inputDataFile)
writeFile (outputDirectory </> "run_trainer.m") (run_trainer (outputDirectory </> "dbn.txt"))
writeFile (outputDirectory </> "run_trainer_ll.m") (run_trainer_ll (outputDirectory </> "dbn-ll.txt"))
let logFile = outputDirectory </> "run_log.txt"
tailHandle <- runProcess "tail" (words "--retry --follow=name" ++ [logFile]) (Just outputDirectory) Nothing Nothing Nothing Nothing
exitCode <- withFile logFile WriteMode (\ log'han -> do
procHandle <- case implementation matlabOpts of
Octave -> runProcess "octave" (words "run_main.m") (Just outputDirectory) Nothing Nothing (Just log'han) Nothing
Matlab -> runProcess "matlab" (words "-nosplash -nodisplay -r run_main") (Just outputDirectory) Nothing Nothing (Just log'han) Nothing
waitForProcess procHandle)
print =<< readFile logFile
terminateProcess tailHandle
return exitCode
-- zapisuje wszystkie warstwy, łącznie z ostatnią
nnsave_to_file_full :: (IsString a, Monoid a) => a
nnsave_to_file_full = [str|
function nnsave_to_file_full(net, filename)
% open file
file = fopen(filename, 'w');
% write sizes in single row
for i = 2 : net.n
fprintf(file, '%d ', net.size(i));
end
fprintf(file, '\n');
fclose(file);
% write biases in 'layer count' rows
biases = net.b;
for i = 1 : net.n-1;
dlmwrite(filename, biases{i}', 'delimiter', ' ', '-append'); % '
end
% write weights in 'neuron count' rows
weights = net.W;
for i = 1 : net.n-1;
dlmwrite(filename, weights{i}, 'delimiter', ' ', '-append');
end
end
|]
-- zapisuje wszystkie warstwy poza ostatnią
nnsave_to_file :: (IsString a, Monoid a) => a
nnsave_to_file = [str| function nnsave_to_file(net, filename)
% open file
file = fopen(filename, 'w');
% write sizes in single row
for i = 2 : net.n-1
fprintf(file, '%d ', net.size(i));
end
fprintf(file, '\n');
fclose(file);
% write biases in 'layer count' rows
biases = net.b;
for i = 1 : net.n-2;
dlmwrite(filename, biases{i}', 'delimiter', ' ', '-append');
end
% write weights in 'neuron count' rows
weights = net.W;
for i = 1 : net.n-2;
dlmwrite(filename, weights{i}, 'delimiter', ' ', '-append');
end
end
|]
-- główny skrypt uruchamiąjący wszystkie pozostałe
run_main :: (IsString a, Monoid a) => String -> a
run_main inputdata = [str|
% clear all; close all; clc;
addpath(genpath('../../DeepLearnToolbox'));
games = dlmread('$fromString inputdata$');
games_cnt = floor(size(games,1)/100)*100;
games = games(1:games_cnt,:);
run_trainer(games);
% fixme: use that code
%games_y = games_y(1:games_cnt,:);
%train_y = games_y;
%run_trainer_ll(train_x, train_y);
exit;
|]
run_trainer_ll :: (IsString a, Monoid a, ?dbnSizes :: [Int], ?numEpochs :: Int) => String -> a
run_trainer_ll outputFilepath =
let dbnsizes = ?dbnSizes
numepochs = ?numEpochs :: Int
in [str|
function run_trainer_ll(train_x, train_y)
dbn.sizes = $:dbnsizes$;
opts.numepochs = [$:numepochs$];
opts.batchsize = 100;
opts.momentum = 0;
opts.alpha = 1;
dbn = dbnsetup(dbn, train_x, opts);
dbn = dbntrain(dbn, train_x, opts);
nn_ll = dbnunfoldtonn(dbn, 1);
opts.numepochs = 1;
opts.batchsize = 100;
nn_ll = nntrain(nn_ll, train_x, train_y, opts);
nnsave_to_file_full(nn_ll,'$fromString outputFilepath$');
end; |]
run_trainer :: (IsString a, Monoid a, ?numEpochs :: Int, ?dbnSizes :: [Int]) => String -> a
run_trainer outputFilepath = let dbnSizes' = ?dbnSizes
numepochs = ?numEpochs :: Int
in [str|
function dbn = run_trainer(train_x)
dbn.sizes = $:dbnSizes'$;
opts.numepochs = [$:numepochs$];
opts.batchsize = 100;
opts.momentum = 0;
opts.alpha = 1;
dbn = dbnsetup(dbn, train_x, opts);
dbn = dbntrain(dbn, train_x, opts);
nn = dbnunfoldtonn(dbn, 10);
nnsave_to_file(nn, '$fromString outputFilepath$');
end
|] | Tener/deeplearning-thesis | lib/Matlab.hs | bsd-3-clause | 5,324 | 0 | 19 | 1,170 | 785 | 420 | 365 | 52 | 2 |
module Data.Metagraph.Internal.Types(
MetaGraphId(..)
, MetaGraph(..)
, Directed(..)
, isDirected
, isUndirected
, EdgeId(..)
, MetaEdge(..)
, NodeId(..)
, MetaNode(..)
) where
import Data.IntMap
import GHC.Generics
import Data.Bifunctor
-- | Id of edge in metagraph
newtype EdgeId = EdgeId { unEdgeId :: Int }
deriving (Generic, Eq, Ord, Show)
-- | Id of node in metagraph
newtype NodeId = NodeId { unNodeId :: Int }
deriving (Generic, Eq, Ord, Show)
-- | Id of metagraph
newtype MetaGraphId = MetaGraphId { unMetaGraphId :: Int }
deriving (Generic, Eq, Ord, Show)
-- | Graph which edges and nodes can be a subgraphs.
--
-- [@edge@] Payload of edge
--
-- [@node@] Payload of node
data MetaGraph edge node = MetaGraph {
_metagraphId :: MetaGraphId
-- | Holds all top-level edges of graph
, _metagraphEdges :: IntMap (MetaEdge edge node)
-- | Holds all top-level nodes. This is nesseccary as there can be nodes that
-- are not connected with any other node.
, _metagraphNodes :: IntMap (MetaNode edge node)
} deriving (Generic)
-- | Direction marker for edge
data Directed = Directed | Undirected
deriving (Eq, Show, Read, Ord, Enum, Bounded)
-- | Helper to check if the flag is directed
isDirected :: Directed -> Bool
isDirected Directed = True
isDirected _ = False
-- | Helper to check if the flag is undirected
isUndirected :: Directed -> Bool
isUndirected Undirected = True
isUndirected _ = False
-- | Edge of metagraph that can hold subgraph inside itself.
--
-- [@edge@] Payload of edge
--
-- [@node@] Payload of node
data MetaEdge edge node = MetaEdge {
-- | Unique id of edge
_edgeId :: EdgeId
-- | Holds direction feature of the edge
, _edgeDirected :: Directed
-- | Begin of the edge
, _edgeFrom :: MetaNode edge node
-- | End of the edge
, _edgeTo :: MetaNode edge node
-- | Payload of edge
, _edgePayload :: edge
-- | Edge can hold subgraph
, _edgeGraph :: Maybe (MetaGraph edge node)
} deriving (Generic)
-- | Node of metagraph that can hold subgraph inside itself.
data MetaNode edge node = MetaNode {
-- | Unique id of node
_nodeId :: NodeId
-- | Payload of node
, _nodePayload :: node
-- | Node can hold
, _nodeGraph :: Maybe (MetaGraph edge node)
} deriving (Generic)
| Teaspot-Studio/metagraph | src/Data/Metagraph/Internal/Types.hs | bsd-3-clause | 2,280 | 0 | 11 | 481 | 472 | 292 | 180 | 45 | 1 |
module Parser where
import Control.Applicative hiding ((<|>), many)
import Text.Parsec as P
import Text.Parsec.Token
import Text.Parsec.Language
import Text.Parsec.String
import Syntax
tok :: TokenParser a
tok = haskell
ident :: Parser String
ident = identifier tok
sym :: String -> Parser String
sym = symbol tok
keyw :: String -> Parser ()
keyw = reserved tok
int :: Parser Integer
int = natural tok
expr :: Parser Expr
expr = foldl1 App <$> many1 term
term :: Parser Expr
term =
Var <$> ident
-- <|> Num <$> int
<|> lam <$ (sym "\\" <?> "lambda") <*> many1 ident <* sym "->" <*> expr
<|> letfun <$ keyw "let" <*> ident <*> many ident <* sym "=" <*> expr
<* keyw "in" <*> expr
<|> parens tok expr
parseExpr :: String -> Either ParseError Expr
parseExpr xs = parse (whiteSpace tok *> expr <* eof) "interactive" xs
decl :: Parser (Name, Expr)
decl = (\ x xs e -> (x, lam xs e)) <$ keyw "let" <*> ident <*> many ident <* sym "=" <*> expr
toplevel :: Parser (Name, Expr)
toplevel = ((,) "it") <$> try expr <|> decl
parseToplevel :: String -> Either ParseError (Name, Expr)
parseToplevel = parse (whiteSpace tok *> toplevel <* eof) "interactive"
| kosmikus/type-inference-regensburg | src/Parser.hs | bsd-3-clause | 1,184 | 0 | 20 | 248 | 462 | 241 | 221 | 34 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : ./QBF/AS_BASIC_QBF.der.hs
Description : Abstract syntax for propositional logic extended with QBFs
Copyright : (c) Jonathan von Schroeder, DFKI GmbH 2010
License : GPLv2 or higher, see LICENSE.txt
Maintainer : <[email protected]>
Stability : experimental
Portability : portable
Definition of abstract syntax for propositional logic extended with QBFs
Ref.
<http://en.wikipedia.org/wiki/Propositional_logic>
<http://www.voronkov.com/lics.cgi>
-}
module QBF.AS_BASIC_QBF
( FORMULA (..) -- datatype for Propositional Formulas
, BASICITEMS (..) -- Items of a Basic Spec
, BASICSPEC (..) -- Basic Spec
, SYMBITEMS (..) -- List of symbols
, SYMB (..) -- Symbols
, SYMBMAPITEMS (..) -- Symbol map
, SYMBORMAP (..) -- Symbol or symbol map
, PREDITEM (..) -- Predicates
, isPrimForm
, ID (..)
) where
import Common.Id as Id
import Common.Doc
import Common.DocUtils
import Common.Keywords
import Common.AS_Annotation as AS_Anno
import Data.Data
import Data.Maybe (isJust)
import qualified Data.List as List
-- DrIFT command
{-! global: GetRange !-}
-- | predicates = propositions
data PREDITEM = PredItem [Id.Token] Id.Range
deriving (Show, Typeable, Data)
newtype BASICSPEC = BasicSpec [AS_Anno.Annoted BASICITEMS]
deriving (Show, Typeable, Data)
data BASICITEMS =
PredDecl PREDITEM
| AxiomItems [AS_Anno.Annoted FORMULA]
-- pos: dots
deriving (Show, Typeable, Data)
-- | Datatype for QBF formulas
data FORMULA =
FalseAtom Id.Range
-- pos: "False
| TrueAtom Id.Range
-- pos: "True"
| Predication Id.Token
-- pos: Propositional Identifiers
| Negation FORMULA Id.Range
-- pos: not
| Conjunction [FORMULA] Id.Range
-- pos: "/\"s
| Disjunction [FORMULA] Id.Range
-- pos: "\/"s
| Implication FORMULA FORMULA Id.Range
-- pos: "=>"
| Equivalence FORMULA FORMULA Id.Range
-- pos: "<=>"
| ForAll [Id.Token] FORMULA Id.Range
| Exists [Id.Token] FORMULA Id.Range
deriving (Show, Ord, Typeable, Data)
data ID = ID Id.Token (Maybe Id.Token) deriving (Typeable, Data)
instance Eq ID where
ID t1 (Just t2) == ID t3 (Just t4) =
((t1 == t3) && (t2 == t4))
|| ((t2 == t3) && (t1 == t4))
ID t1 Nothing == ID t2 t3 = (t1 == t2) || (Just t1 == t3)
ID _ (Just _) == ID _ Nothing = False
{- two QBFs are equivalent if bound variables
can be renamed such that the QBFs are equal -}
qbfMakeEqual :: Maybe [ID] -> FORMULA -> [Id.Token]
-> FORMULA -> [Id.Token] -> Maybe [ID]
qbfMakeEqual (Just ids) f ts f1 ts1 = if length ts /= length ts1 then
Nothing
else case (f, f1) of
(Predication t, Predication t1)
| t == t1 -> Just ids
| t `elem` ts && t1 `elem` ts1 -> let tt1 = ID t (Just t1) in
if tt1 `elem` ids then
Just ids
else
if ID t Nothing `notElem` ids && ID t1 Nothing `notElem` ids then
Just (tt1 : ids)
else
Nothing
| otherwise -> Nothing
(Negation f_ _, Negation f1_ _) -> qbfMakeEqual (Just ids) f_ ts f1_ ts1
(Conjunction (f_ : fs) _, Conjunction (f1_ : fs1) _) ->
if length fs /= length fs1 then Nothing else
case r of
Nothing -> Nothing
_ -> qbfMakeEqual r
(Conjunction fs nullRange) ts
(Conjunction fs1 nullRange) ts1
where
r = qbfMakeEqual (Just ids) f_ ts f1_ ts1
(Disjunction fs r, Disjunction fs1 r1) -> qbfMakeEqual (Just ids)
(Conjunction fs r) ts (Conjunction fs1 r1) ts1
(Implication f_ f1_ _, Implication f2 f3 _) -> case r of
Nothing -> Nothing
_ -> qbfMakeEqual r f1_ ts f3 ts1
where
r = qbfMakeEqual (Just ids) f_ ts f2 ts1
(Equivalence f_ f1_ r1, Equivalence f2 f3 _) -> qbfMakeEqual (Just ids)
(Implication f_ f1_ r1) ts
(Implication f2 f3 r1) ts1
(ForAll ts_ f_ _, ForAll ts1_ f1_ _) -> case r of
Nothing -> Nothing
(Just ids_) -> Just (ids ++ filter (\ (ID x my) ->
let Just y = my in
(x `elem` ts_ && y `notElem` ts1_) ||
(x `elem` ts1_ && y `notElem` ts_)) d)
where
d = ids_ List.\\ ids
where
r = qbfMakeEqual (Just ids) f_ (ts ++ ts_) f1_ (ts1 ++ ts1_)
(Exists ts_ f_ r, Exists ts1_ f1_ r1) -> qbfMakeEqual (Just ids)
(Exists ts_ f_ r) ts
(Exists ts1_ f1_ r1) ts1
(_1, _2) -> Nothing
qbfMakeEqual Nothing _ _ _ _ = Nothing
-- ranges are always equal (see Common/Id.hs) - thus they can be ignored
instance Eq FORMULA where
FalseAtom _ == FalseAtom _ = True
TrueAtom _ == TrueAtom _ = True
Predication t == Predication t1 = t == t1
Negation f _ == Negation f1 _ = f == f1
Conjunction xs _ == Conjunction xs1 _ = xs == xs1
Disjunction xs _ == Disjunction xs1 _ = xs == xs1
Implication f f1 _ == Implication f2 f3 _ = (f == f2) && (f1 == f3)
Equivalence f f1 _ == Equivalence f2 f3 _ = (f == f2) && (f1 == f3)
ForAll ts f _ == ForAll ts1 f1 _ = isJust (qbfMakeEqual (Just []) f ts f1 ts1)
Exists ts f _ == Exists ts1 f1 _ = isJust (qbfMakeEqual (Just []) f ts f1 ts1)
_ == _ = False
data SYMBITEMS = SymbItems [SYMB] Id.Range
-- pos: SYMB_KIND, commas
deriving (Show, Eq, Ord, Typeable, Data)
newtype SYMB = SymbId Id.Token
-- pos: colon
deriving (Show, Eq, Ord, Typeable, Data)
data SYMBMAPITEMS = SymbMapItems [SYMBORMAP] Id.Range
-- pos: SYMB_KIND, commas
deriving (Show, Eq, Ord, Typeable, Data)
data SYMBORMAP = Symb SYMB
| SymbMap SYMB SYMB Id.Range
-- pos: "|->"
deriving (Show, Eq, Ord, Typeable, Data)
-- All about pretty printing we chose the easy way here :)
instance Pretty FORMULA where
pretty = printFormula
instance Pretty BASICSPEC where
pretty = printBasicSpec
instance Pretty SYMB where
pretty = printSymbol
instance Pretty SYMBITEMS where
pretty = printSymbItems
instance Pretty SYMBMAPITEMS where
pretty = printSymbMapItems
instance Pretty BASICITEMS where
pretty = printBasicItems
instance Pretty SYMBORMAP where
pretty = printSymbOrMap
instance Pretty PREDITEM where
pretty = printPredItem
isPrimForm :: FORMULA -> Bool
isPrimForm f = case f of
TrueAtom _ -> True
FalseAtom _ -> True
Predication _ -> True
Negation _ _ -> True
_ -> False
-- Pretty printing for formulas
printFormula :: FORMULA -> Doc
printFormula frm =
let ppf p f = (if p f then id else parens) $ printFormula f
isJunctForm f = case f of
Implication {} -> False
Equivalence {} -> False
ForAll {} -> False
Exists {} -> False
_ -> True
in case frm of
FalseAtom _ -> text falseS
TrueAtom _ -> text trueS
Predication x -> pretty x
Negation f _ -> notDoc <+> ppf isPrimForm f
Conjunction xs _ -> sepByArbitrary andDoc $ map (ppf isPrimForm) xs
Disjunction xs _ -> sepByArbitrary orDoc $ map (ppf isPrimForm) xs
Implication x y _ -> ppf isJunctForm x <+> implies <+> ppf isJunctForm y
Equivalence x y _ -> ppf isJunctForm x <+> equiv <+> ppf isJunctForm y
ForAll xs y _ -> forallDoc <+> sepByArbitrary comma (map pretty xs)
<+> space
<+> ppf isJunctForm y
Exists xs y _ -> exists <+> sepByArbitrary comma (map pretty xs)
<+> space
<+> ppf isJunctForm y
sepByArbitrary :: Doc -> [Doc] -> Doc
sepByArbitrary d = fsep . prepPunctuate (d <> space)
printPredItem :: PREDITEM -> Doc
printPredItem (PredItem xs _) = fsep $ map pretty xs
printBasicSpec :: BASICSPEC -> Doc
printBasicSpec (BasicSpec xs) = vcat $ map pretty xs
printBasicItems :: BASICITEMS -> Doc
printBasicItems (AxiomItems xs) = vcat $ map pretty xs
printBasicItems (PredDecl x) = pretty x
printSymbol :: SYMB -> Doc
printSymbol (SymbId sym) = pretty sym
printSymbItems :: SYMBITEMS -> Doc
printSymbItems (SymbItems xs _) = fsep $ map pretty xs
printSymbOrMap :: SYMBORMAP -> Doc
printSymbOrMap (Symb sym) = pretty sym
printSymbOrMap (SymbMap source dest _) =
pretty source <+> mapsto <+> pretty dest
printSymbMapItems :: SYMBMAPITEMS -> Doc
printSymbMapItems (SymbMapItems xs _) = fsep $ map pretty xs
| spechub/Hets | QBF/AS_BASIC_QBF.der.hs | gpl-2.0 | 8,517 | 6 | 22 | 2,411 | 2,478 | 1,354 | 1,124 | 179 | 16 |
module Util.Map (inverse, lookupMaybe, partitionEithers) where
import Control.Arrow
import Data.Foldable
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
-- | Given a list of key-value pairs, create a map from each value to its keys. /O(n*log(n))/
inverse :: Ord v => [(k, v)] -> Map v [k]
inverse = foldl' addInverse Map.empty
where addInverse m (k, v) = Map.insertWith (++) v [k] m
-- | @lookupMaybe x m@ is shorthand for @x >>= (`lookup` m)@
lookupMaybe :: Ord k => Maybe k -> Map k v -> Maybe v
lookupMaybe x m = x >>= (`Map.lookup` m)
-- | Partition a map of 'Either's into a map containing only left-values and
-- another containing right-values.
partitionEithers :: Ord k => Map k (Either a b) -> (Map k a, Map k b)
partitionEithers = (Map.fromList *** Map.fromList) . foldr insert ([], []) . Map.toList
where
insert (k, Left x) (lefts, rights) = ((k,x):lefts, rights)
insert (k, Right y) (lefts, rights) = (lefts, (k,y):rights)
| rodrigo-machado/verigraph | src/library/Util/Map.hs | gpl-3.0 | 1,004 | 0 | 9 | 211 | 353 | 198 | 155 | 14 | 2 |
{-# LANGUAGE RecordWildCards #-}
module System where
import Data.Maybe
import BasicFunctions
import HardwareTypes
import Sprockell
-- ===================================================================================
shMem :: (SharedMem, RequestFifo)
-> IndRequests
-> ((SharedMem, RequestFifo), (SprID,Reply))
shMem (sharedMem,requestFifo) chRequests = ((sharedMem',requestFifo'), (i,reply))
where
(i,req) | not $ null requestFifo = head requestFifo
| otherwise = (0, NoRequest)
(reply, sharedMem') = case req of
NoRequest -> ( Nothing , sharedMem )
ReadReq a -> ( Just (sharedMem!a) , sharedMem )
WriteReq v a -> ( Nothing , sharedMem <~ (a,v))
TestReq a | sharedMem!a == 0 -> ( Just 1 , sharedMem <~ (a,1))
| otherwise -> ( Just 0 , sharedMem )
requestFifo' = drop 1 requestFifo ++ filter ((/=NoRequest).snd) chRequests
-- ===================================================================================
transfer :: (RequestChannels, ReplyChannels)
-> (ParRequests, (SprID, Reply))
-> ((RequestChannels, ReplyChannels), (ParReplies, IndRequests))
transfer (requestChnls,replyChnls) (sprRequests,(i,shMemReply)) = ( (requestChnls',replyChnls'), (outReplies,outRequests) )
where
-- ->->->->
outRequests = zip [0..] $ map head requestChnls -- <<== TODO: abstract away from softare/hardware
requestChnls' = zipWith (<<+) requestChnls sprRequests
-- <-<-<-<-
n = length replyChnls -- <<== TODO: abstraction difficult:
inReplies = replicate n Nothing <~ (i,shMemReply) -- no parameter n in CLaSH
outReplies = map head replyChnls
replyChnls' = zipWith (<<+) replyChnls inReplies
-- ===================================================================================
system :: Int -> [InstructionMem] -> SystemState -> t -> SystemState
system nrOfSprs instrss systemState _ = systemState'
where
SystemState{..} = systemState
-- Sprockells
(sprStates',sprRequests) = unzip $ sprockell $> instrss |$| sprStates |$| chReplies
-- Communication
((requestChnls',replyChnls'), (chReplies,chRequests)) = transfer (requestChnls,replyChnls) (sprRequests,(i,shMemReply))
-- Shared Memory
((sharedMem',requestFifo'), (i,shMemReply)) = shMem (sharedMem,requestFifo) chRequests
systemState' = SystemState
{ sprStates = sprStates'
, requestChnls = requestChnls'
, replyChnls = replyChnls'
, requestFifo = requestFifo'
, sharedMem = sharedMem'
}
| wouwouwou/2017_module_8 | src/haskell/PP-project-2016/lib/sprockell/System.hs | apache-2.0 | 3,206 | 0 | 14 | 1,149 | 696 | 402 | 294 | 41 | 4 |
import Control.Monad
import Data.Word
import qualified Data.Vector.Unboxed as U
import Test.Tasty
import Test.Tasty.QuickCheck
import Test.Tasty.HUnit
import Test.QuickCheck.Monadic
import System.Random.MWC
----------------------------------------------------------------
--
----------------------------------------------------------------
main :: IO ()
main = do
g0 <- createSystemRandom
defaultMain $ testGroup "mwc"
[ testProperty "save/restore" $ prop_SeedSaveRestore g0
, testCase "user save/restore" $ saveRestoreUserSeed
, testCase "empty seed data" $ emptySeed
, testCase "output correct" $ do
g <- create
xs <- replicateM 513 (uniform g)
assertEqual "[Word32]" xs golden
]
updateGenState :: GenIO -> IO ()
updateGenState g = replicateM_ 256 (uniform g :: IO Word32)
prop_SeedSaveRestore :: GenIO -> Property
prop_SeedSaveRestore g = monadicIO $ do
run $ updateGenState g
seed <- run $ save g
seed' <- run $ save =<< restore seed
return $ seed == seed'
saveRestoreUserSeed :: IO ()
saveRestoreUserSeed = do
let seed = toSeed $ U.replicate 258 0
seed' <- save =<< restore seed
assertEqual "Seeds must be equal" seed' seed
emptySeed :: IO ()
emptySeed = do
let seed = toSeed U.empty
seed' <- save =<< create
assertEqual "Seeds must be equal" seed' seed
-- First 513 values generated from seed made using create
golden :: [Word32]
golden =
[ 2254043345, 562229898, 1034503294, 2470032534, 2831944869, 3042560015, 838672965, 715056843
, 3122641307, 2300516242, 4079538318, 3722020688, 98524204, 1450170923, 2669500465, 2890402829
, 114212910, 1914313000, 2389251496, 116282477, 1771812561, 1606473512, 1086420617, 3652430775
, 1165083752, 3599954795, 3006722175, 341614641, 3000394300, 1378097585, 1551512487, 81211762
, 604209599, 3949866361, 77745071, 3170410267, 752447516, 1213023833, 1624321744, 3251868348
, 1584957570, 2296897736, 3305840056, 1158966242, 2458014362, 1919777052, 3203159823, 3230279656
, 755741068, 3005087942, 2478156967, 410224731, 1196248614, 3302310440, 3295868805, 108051054
, 1010042411, 2725695484, 2201528637, 667561409, 79601486, 50029770, 566202616, 3217300833
, 2162817014, 925506837, 1527015413, 3079491438, 927252446, 118306579, 499811870, 2973454232
, 2979271640, 4078978924, 1864075883, 197741457, 296365782, 1784247291, 236572186, 464208268
, 1769568958, 827682258, 4247376295, 2959098022, 1183860331, 2475064236, 3952901213, 1953014945
, 393081236, 1616500498, 2201176136, 1663813362, 2167124739, 630903810, 113470040, 924745892
, 1081531735, 4039388931, 4118728223, 107819176, 2212875141, 1941653033, 3660517172, 192973521
, 3653156164, 1878601439, 3028195526, 2545631291, 3882334975, 456082861, 2775938704, 3813508885
, 1758481462, 3332769695, 3595846251, 3745876981, 152488869, 2555728588, 3058747945, 39382408
, 520595021, 2185388418, 3502636573, 2650173199, 1077668433, 3548643646, 71562049, 2726649517
, 494210825, 1208915815, 620990806, 2877290965, 3253243521, 804166732, 2481889113, 623399529
, 44880343, 183645859, 3283683418, 2214754452, 419328482, 4224066437, 1102669380, 1997964721
, 2437245376, 985749802, 858381069, 116806511, 1771295365, 97352549, 341972923, 2971905841
, 110707773, 950500868, 1237119233, 691764764, 896381812, 1528998276, 1269357470, 2567094423
, 52141189, 2722993417, 80628658, 3919817965, 3615946076, 899371181, 46940285, 4010779728
, 318101834, 30736609, 3577200709, 971882724, 1478800972, 3769640027, 3706909300, 3300631811
, 4057825972, 4285058790, 2329759553, 2967563409, 4080096760, 2762613004, 2518395275, 295718526
, 598435593, 2385852565, 2608425408, 604857293, 2246982455, 919156819, 1721573814, 2502545603
, 643962859, 587823425, 3508582012, 1777595823, 4119929334, 2833342174, 414044876, 2469473258
, 289159600, 3715175415, 966867024, 788102818, 3197534326, 3571396978, 3508903890, 570753009
, 4273926277, 3301521986, 1411959102, 2766249515, 4071012597, 959442028, 1962463990, 1098904190
, 714719899, 562204808, 1658783410, 1471669042, 2565780129, 1616648894, 4236521717, 1788863789
, 3068674883, 191936470, 253084644, 1915647866, 276372665, 2117183118, 3704675319, 218791054
, 3680045802, 406662689, 3844864229, 91140313, 3834015630, 25116147, 904830493, 3152559113
, 820358622, 1301896358, 296152699, 2202014455, 4256659428, 1175171414, 3287520873, 2028006499
, 327448717, 2095642873, 3798661296, 58567008, 3907537112, 3691259011, 1730142328, 2373011713
, 3387040741, 3189417655, 2949233059, 1238379614, 1813238023, 1064726446, 1339055235, 1744523609
, 279811576, 2934103599, 283542302, 994488448, 418691747, 1062780152, 102211875, 4071713296
, 1790834038, 1035092527, 2374272359, 3558280982, 1927663822, 3645417844, 3481790745, 3566282546
, 2000290859, 505518126, 363501589, 4075468679, 3247300709, 3705242654, 2731103609, 2836871038
, 589640144, 2546495106, 84767518, 1376911639, 2400770705, 527489676, 3804134352, 150084021
, 240070593, 3807594859, 3518576690, 659503830, 2239678479, 1273668921, 4271050554, 3090482972
, 401956859, 1772128561, 4438455, 1989666158, 2521484677, 3960178700, 4220196277, 1033999035
, 2214785840, 3428469341, 428564336, 2517446784, 3935757188, 3294001677, 1037971963, 3590324170
, 1220969729, 1719719817, 807688972, 77076422, 4251553858, 3963852375, 326128795, 3277818295
, 3671513069, 549617771, 1683950556, 3352913781, 409318429, 2456264774, 4036950639, 1162718475
, 83888874, 5578966, 172866494, 1542278848, 455546979, 1296511553, 4263636440, 2450589064
, 372411483, 211216338, 2632256495, 2393754408, 1336054289, 4087203071, 3159642437, 1933346856
, 2914152714, 3805541979, 2769740793, 1161287028, 2289749561, 4124509890, 2128452935, 210531695
, 4250709834, 390950534, 1421430300, 3030519715, 3228987297, 3086837053, 2866915453, 2335948692
, 1684378991, 2575634059, 4153427304, 2426048796, 4197556954, 2605152326, 2909410733, 2424889219
, 654577921, 811955499, 118126602, 504071559, 1278756230, 3896458168, 4105558075, 750276169
, 1120805572, 1762689330, 993728154, 1104363215, 774344996, 4077568952, 2183487324, 994724370
, 3323036885, 3880704963, 746305447, 961608310, 2030117337, 453935768, 800490463, 1034636
, 2323633564, 602565693, 806061242, 1899269713, 162686347, 467541008, 1529175313, 282891502
, 2529616339, 2930657178, 464272784, 2878535316, 807165854, 3209080518, 4080120278, 347748171
, 3972126063, 284174728, 2498328933, 1723872460, 143845955, 4223866687, 1761495357, 1544646770
, 4206103283, 3771574626, 642165282, 1119501013, 3514063332, 1443320304, 4056369796, 3602131475
, 1422908288, 804093687, 431176780, 40108717, 2998264213, 3705835674, 169805085, 454593842
, 2781536994, 2385225212, 4137367775, 2631435125, 2347082354, 629238010, 3283635219, 3815791831
, 1340400558, 4061846985, 3803921868, 3196119096, 718610843, 3694290834, 2169960411, 2407155570
, 2557480499, 16164105, 480957288, 2155919829, 2490067282, 2356287132, 511737296, 1602800634
, 1802275249, 3316832299, 50286484, 2106622541, 2352302834, 2538374315, 344766394, 2777260569
, 1215135803, 2229011963, 114632277, 1645499402, 1111617833, 3833259754, 928611385, 686744723
, 1898396834, 2461932251, 2665457318, 3797019621, 868313114, 2366635205, 481934875, 1170532970
, 642610859, 3150733309, 3508548582, 666714469, 711663449, 2436617656, 2681476315, 1637296693
, 2487349478, 4174144946, 2793869557, 559398604, 1898140528, 991962870, 864792875, 3861665129
, 4024051364, 3383200293, 773730975, 33517291, 2660126073, 689133464, 2248134097, 3874737781
, 3358012678]
| Shimuuar/mwc-random | tests/props.hs | bsd-2-clause | 7,612 | 0 | 16 | 1,010 | 1,939 | 1,225 | 714 | 104 | 1 |
{-# LANGUAGE TemplateHaskell, FlexibleInstances , UndecidableInstances,
DeriveDataTypeable, MultiParamTypeClasses, CPP, ScopedTypeVariables,
ScopedTypeVariables #-}
-- | Functions to allow you to use XSLT to transform your output. To use this, you would generally design your happstack application to output XML. The xslt filter will then run an external tool which performs the tranforms. The transformed result will then be sent to the http client as the Response.
--
-- NOTE: This module is currently looking for a maintainer. If you want to improve XSLT support in Happstack, please volunteer!
module Happstack.Server.XSLT
(xsltFile, xsltString, {- xsltElem, -} xsltFPS, xsltFPSIO, XSLPath,
xslt, doXslt, xsltproc,saxon,procFPSIO,procLBSIO,XSLTCommand,XSLTCmd
) where
import System.Log.Logger
import Control.Concurrent (forkIO)
import Control.Concurrent.MVar (newEmptyMVar, putMVar, takeMVar)
import Control.Monad
import Control.Monad.Trans
import qualified Data.ByteString.Char8 as B
import Happstack.Server.SimpleHTTP
-- import Happstack.Server.MinHaXML
import Control.Exception.Extensible(bracket,try,SomeException)
import qualified Data.ByteString.Char8 as P
import qualified Data.ByteString.Lazy.Char8 as L
import System.Directory(removeFile)
import System.Environment(getEnv)
import System.Exit (ExitCode(..))
import System.IO
import System.IO.Unsafe(unsafePerformIO)
import System.Process (runInteractiveProcess, waitForProcess)
-- import Text.XML.HaXml.Verbatim(verbatim)
import Happstack.Data hiding (Element)
logMX :: Priority -> String -> IO ()
logMX = logM "Happstack.Server.XSLT"
type XSLPath = FilePath
$(deriveAll [''Show,''Read,''Default, ''Eq, ''Ord]
[d|
data XSLTCmd = XSLTProc | Saxon
|]
)
xsltCmd :: XSLTCmd
-> XSLPath
-> FilePath
-> FilePath
-> (FilePath, [String])
xsltCmd XSLTProc = xsltproc'
xsltCmd Saxon = saxon'
{-
-- | Uses 'xsltString' to transform the given XML 'Element' into a
-- a 'String'.
xsltElem :: XSLPath -> Element -> String
xsltElem xsl = xsltString xsl . verbatim
-}
procLBSIO :: XSLTCmd -> XSLPath -> L.ByteString -> IO L.ByteString
procLBSIO xsltp' xsl inp =
withTempFile "happs-src.xml" $ \sfp sh -> do
withTempFile "happs-dst.xml" $ \dfp dh -> do
let xsltp = xsltCmd xsltp'
L.hPut sh inp
hClose sh
hClose dh
xsltFileEx xsltp xsl sfp dfp
s <- L.readFile dfp
logMX DEBUG (">>> XSLT: result: "++ show s)
return s
procFPSIO :: XSLTCommand
-> XSLPath
-> [P.ByteString]
-> IO [P.ByteString]
procFPSIO xsltp xsl inp =
withTempFile "happs-src.xml" $ \sfp sh -> do
withTempFile "happs-dst.xml" $ \dfp dh -> do
mapM_ (P.hPut sh) inp
hClose sh
hClose dh
xsltFileEx xsltp xsl sfp dfp
s <- P.readFile dfp
logMX DEBUG (">>> XSLT: result: "++ show s)
return [s]
-- | Performs an XSL transformation with lists of ByteStrings instead of
-- a String.
xsltFPS :: XSLPath -> [P.ByteString] -> [P.ByteString]
xsltFPS xsl = unsafePerformIO . xsltFPSIO xsl
-- | Equivalent to 'xsltFPS' but does not hide the inherent IO of the low-level
-- ByteString operations.
xsltFPSIO :: XSLPath -> [P.ByteString] -> IO [P.ByteString]
xsltFPSIO xsl inp =
withTempFile "happs-src.xml" $ \sfp sh -> do
withTempFile "happs-dst.xml" $ \dfp dh -> do
mapM_ (P.hPut sh) inp
hClose sh
hClose dh
xsltFile xsl sfp dfp
s <- P.readFile dfp
logMX DEBUG (">>> XSLT: result: "++ show s)
return [s]
-- | Uses the provided xsl file to transform the given string.
-- This function creates temporary files during its execution, but
-- guarantees their cleanup.
xsltString :: XSLPath -> String -> String
xsltString xsl inp = unsafePerformIO $
withTempFile "happs-src.xml" $ \sfp sh -> do
withTempFile "happs-dst.xml" $ \dfp dh -> do
hPutStr sh inp
hClose sh
hClose dh
xsltFile xsl sfp dfp
s <- readFileStrict dfp
logMX DEBUG (">>> XSLT: result: "++ show s)
return s
-- | Note that the xsl file must have .xsl suffix.
xsltFile :: XSLPath -> FilePath -> FilePath -> IO ()
xsltFile = xsltFileEx xsltproc'
-- | Use @xsltproc@ to transform XML.
xsltproc :: XSLTCmd
xsltproc = XSLTProc
xsltproc' :: XSLTCommand
xsltproc' dst xsl src = ("xsltproc",["-o",dst,xsl,src])
-- | Use @saxon@ to transform XML.
saxon :: XSLTCmd
saxon = Saxon
saxon' :: XSLTCommand
saxon' dst xsl src = ("java -classpath /usr/share/java/saxon.jar",
["com.icl.saxon.StyleSheet"
,"-o",dst,src,xsl])
type XSLTCommand = XSLPath -> FilePath -> FilePath -> (FilePath,[String])
xsltFileEx :: XSLTCommand -> XSLPath -> FilePath -> FilePath -> IO ()
xsltFileEx xsltp xsl src dst = do
let msg = (">>> XSLT: Starting xsltproc " ++ unwords ["-o",dst,xsl,src])
logMX DEBUG msg
uncurry runCommand $ xsltp dst xsl src
logMX DEBUG (">>> XSLT: xsltproc done")
-- Utilities
withTempFile :: String -> (FilePath -> Handle -> IO a) -> IO a
withTempFile str hand = bracket (openTempFile tempDir str) (removeFile . fst) (uncurry hand)
readFileStrict :: FilePath -> IO String
readFileStrict fp = do
let fseqM [] = return []
fseqM xs = last xs `seq` return xs
fseqM =<< readFile fp
{-# NOINLINE tempDir #-}
tempDir :: FilePath
tempDir = unsafePerformIO $ tryAny [getEnv "TEMP",getEnv "TMP"] err
where err = return "/tmp"
tryAny :: [IO a] -> IO a -> IO a
tryAny [] c = c
tryAny (x:xs) c = either (\(_::SomeException) -> tryAny xs c) return =<< try x
-- | Use @cmd@ to transform XML against @xslPath@. This function only
-- acts if the content-type is @application\/xml@.
xslt :: (MonadIO m, MonadPlus m, ToMessage r) =>
XSLTCmd -- ^ XSLT preprocessor. Usually 'xsltproc' or 'saxon'.
-> XSLPath -- ^ Path to xslt stylesheet.
-> m r -- ^ Affected 'ServerPart's.
-> m Response
xslt cmd xslPath parts = do
res <- parts
if toContentType res == B.pack "application/xml"
then doXslt cmd xslPath (toResponse res)
else return (toResponse res)
doXslt :: (MonadIO m) =>
XSLTCmd -> XSLPath -> Response -> m Response
doXslt cmd xslPath res =
do new <- liftIO $ procLBSIO cmd xslPath $ rsBody res
return $ setHeader "Content-Type" "text/html" $
setHeader "Content-Length" (show $ L.length new) $
res { rsBody = new }
-- | Run an external command. Upon failure print status
-- to stderr.
runCommand :: String -> [String] -> IO ()
runCommand cmd args = do
(_, outP, errP, pid) <- runInteractiveProcess cmd args Nothing Nothing
let pGetContents h = do mv <- newEmptyMVar
let put [] = putMVar mv []
put xs = last xs `seq` putMVar mv xs
forkIO (hGetContents h >>= put)
takeMVar mv
os <- pGetContents outP
es <- pGetContents errP
ec <- waitForProcess pid
case ec of
ExitSuccess -> return ()
ExitFailure e ->
do hPutStrLn stderr ("Running process "++unwords (cmd:args)++" FAILED ("++show e++")")
hPutStrLn stderr os
hPutStrLn stderr es
hPutStrLn stderr "Raising error..."
fail "Running external command failed"
| arybczak/happstack-server | src/Happstack/Server/XSLT.hs | bsd-3-clause | 7,418 | 0 | 20 | 1,802 | 1,976 | 1,008 | 968 | -1 | -1 |
-- |
-- Module : Crypto.Random.Entropy.RDRand
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : Good
--
{-# LANGUAGE ForeignFunctionInterface #-}
module Crypto.Random.Entropy.RDRand
( RDRand
) where
import Foreign.Ptr
import Foreign.C.Types
import Data.Word (Word8)
import Crypto.Random.Entropy.Source
foreign import ccall unsafe "cryptonite_cpu_has_rdrand"
c_cpu_has_rdrand :: IO CInt
foreign import ccall unsafe "cryptonite_get_rand_bytes"
c_get_rand_bytes :: Ptr Word8 -> CInt -> IO CInt
-- | fake handle to Intel RDRand entropy cpu instruction
data RDRand = RDRand
instance EntropySource RDRand where
entropyOpen = rdrandGrab
entropyGather _ = rdrandGetBytes
entropyClose _ = return ()
rdrandGrab :: IO (Maybe RDRand)
rdrandGrab = supported `fmap` c_cpu_has_rdrand
where supported 0 = Nothing
supported _ = Just RDRand
rdrandGetBytes :: Ptr Word8 -> Int -> IO Int
rdrandGetBytes ptr sz = fromIntegral `fmap` c_get_rand_bytes ptr (fromIntegral sz)
| nomeata/cryptonite | Crypto/Random/Entropy/RDRand.hs | bsd-3-clause | 1,080 | 0 | 8 | 195 | 228 | 127 | 101 | 22 | 2 |
{-# LANGUAGE RankNTypes, NamedFieldPuns, RecordWildCards, DoRec,
BangPatterns, OverloadedStrings, CPP, TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Distribution.Server.Features.Users (
initUserFeature,
UserFeature(..),
UserResource(..),
GroupResource(..),
) where
import Distribution.Server.Framework
import Distribution.Server.Framework.BackupDump
import qualified Distribution.Server.Framework.Auth as Auth
import Distribution.Server.Users.Types
import Distribution.Server.Users.State
import Distribution.Server.Users.Backup
import qualified Distribution.Server.Users.Users as Users
import qualified Distribution.Server.Users.Group as Group
import Distribution.Server.Users.Group (UserGroup(..), GroupDescription(..), UserList, nullDescription)
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Maybe (fromMaybe)
import Data.Function (fix)
import Control.Applicative (optional)
import Data.Aeson (toJSON)
import Data.Aeson.TH
import qualified Data.Text as T
import Distribution.Text (display, simpleParse)
-- | A feature to allow manipulation of the database of users.
--
-- TODO: clean up mismatched and duplicate functionality (some noted below).
data UserFeature = UserFeature {
-- | The users `HackageFeature`.
userFeatureInterface :: HackageFeature,
-- | User resources.
userResource :: UserResource,
-- | Notification that a user has been added. Currently unused.
userAdded :: Hook () (), --TODO: delete, other status changes?
-- | The admin user group, including its description, members, and
-- modification thereof.
adminGroup :: UserGroup,
-- Authorisation
-- | Require any of a set of privileges.
guardAuthorised_ :: [PrivilegeCondition] -> ServerPartE (),
-- | Require any of a set of privileges, giving the id of the current user.
guardAuthorised :: [PrivilegeCondition] -> ServerPartE UserId,
-- | Require being logged in, giving the id of the current user.
guardAuthenticated :: ServerPartE UserId,
-- | A hook to override the default authentication error in particular
-- circumstances.
authFailHook :: Hook Auth.AuthError (Maybe ErrorResponse),
-- | Retrieves the entire user base.
queryGetUserDb :: forall m. MonadIO m => m Users.Users,
-- | Creates a Hackage 2 user credential.
newUserAuth :: UserName -> PasswdPlain -> UserAuth,
-- | Adds a user with a fresh name.
updateAddUser :: forall m. MonadIO m => UserName -> UserAuth -> m (Either Users.ErrUserNameClash UserId),
-- | Sets the account-enabled status of an existing user to True or False.
updateSetUserEnabledStatus :: MonadIO m => UserId -> Bool
-> m (Maybe (Either Users.ErrNoSuchUserId Users.ErrDeletedUser)),
-- | Sets the credentials of an existing user.
updateSetUserAuth :: MonadIO m => UserId -> UserAuth
-> m (Maybe (Either Users.ErrNoSuchUserId Users.ErrDeletedUser)),
-- | Adds a user to a group based on a "user" path component.
--
-- Use the UserGroup or GroupResource directly instead, as this is a hack.
groupAddUser :: UserGroup -> DynamicPath -> ServerPartE (),
-- | Likewise, deletes a user, will go away soon.
groupDeleteUser :: UserGroup -> DynamicPath -> ServerPartE (),
-- | Get a username from a path.
userNameInPath :: forall m. MonadPlus m => DynamicPath -> m UserName,
-- | Lookup a `UserId` from a name, if the name exists.
lookupUserName :: UserName -> ServerPartE UserId,
-- | Lookup full `UserInfo` from a name, if the name exists.
lookupUserNameFull :: UserName -> ServerPartE (UserId, UserInfo),
-- | Lookup full `UserInfo` from an id, if the id exists.
lookupUserInfo :: UserId -> ServerPartE UserInfo,
-- | An action to change a password directly, using "password" and
-- "repeat-password" form fields. Only admins and the user themselves
-- can do this. This is messy, as it was one of the first things writen
-- for the users feature.
--
-- TODO: update and make more usable.
changePassword :: UserName -> ServerPartE (),
-- | Determine if the first user can change the second user's password,
-- replicating auth functionality. Avoid using.
canChangePassword :: forall m. MonadIO m => UserId -> UserId -> m Bool,
-- | Action to create a new user with the given credentials. This takes the
-- desired name, a password, and a repeated password, validating all.
newUserWithAuth :: String -> PasswdPlain -> PasswdPlain -> ServerPartE UserName,
-- | Action for an admin to create a user with "username", "password", and
-- "repeat-password" username fields.
adminAddUser :: ServerPartE Response,
-- Create a group resource for the given resource path.
groupResourceAt :: String -> UserGroup -> IO (UserGroup, GroupResource),
-- | Create a parameretrized group resource for the given resource path.
-- The parameter `a` can here be called a group key, and there is
-- potentially a set of initial values.
--
-- This takes functions to create a user group on the fly for the given
-- key, go from a key to a DynamicPath (for URI generation), as well as
-- go from a DynamicPath to a key with some possibility of failure. This
-- should check key membership, as well.
--
-- When these parameretrized `UserGroup`s need to be modified, the returned
-- `a -> UserGroup` function should be used, as it wraps the given
-- `a -> UserGroup` function to keep user-to-group mappings up-to-date.
groupResourcesAt :: forall a. String -> (a -> UserGroup)
-> (a -> DynamicPath)
-> (DynamicPath -> ServerPartE a)
-> [a]
-> IO (a -> UserGroup, GroupResource),
-- | Look up whether the current user has (add, remove) capabilities for
-- the given group, erroring out if neither are present.
lookupGroupEditAuth :: UserGroup -> ServerPartE (Bool, Bool),
-- | For a given user, return all of the URIs for groups they are in.
getGroupIndex :: forall m. (Functor m, MonadIO m) => UserId -> m [String],
-- | For a given URI, get a GroupDescription for it, if one can be found.
getIndexDesc :: forall m. MonadIO m => String -> m GroupDescription
}
instance IsHackageFeature UserFeature where
getFeatureInterface = userFeatureInterface
data UserResource = UserResource {
-- | The list of all users.
userList :: Resource,
-- | The main page for a given user.
userPage :: Resource,
-- | A user's password.
passwordResource :: Resource,
-- | A user's enabled status.
enabledResource :: Resource,
-- | The admin group.
adminResource :: GroupResource,
-- | URI for `userList` given a format.
userListUri :: String -> String,
-- | URI for `userPage` given a format and name.
userPageUri :: String -> UserName -> String,
-- | URI for `passwordResource` given a format and name.
userPasswordUri :: String -> UserName -> String,
-- | URI for `enabledResource` given a format and name.
userEnabledUri :: String -> UserName -> String,
-- | URI for `adminResource` given a format.
adminPageUri :: String -> String
}
instance FromReqURI UserName where
fromReqURI = simpleParse
data GroupResource = GroupResource {
-- | A group, potentially parametetrized over some collection.
groupResource :: Resource,
-- | A user's presence in a group.
groupUserResource :: Resource,
-- | A `UserGroup` for a group, with a `DynamicPath` for any parameterization.
getGroup :: DynamicPath -> ServerPartE UserGroup
}
-- This is a mapping of UserId -> group URI and group URI -> description.
-- Like many reverse mappings, it is probably rather volatile. Still, it is
-- a secondary concern, as user groups should be defined by each feature
-- and not globally, to be perfectly modular.
data GroupIndex = GroupIndex {
usersToGroupUri :: !(IntMap (Set String)),
groupUrisToDesc :: !(Map String GroupDescription)
}
emptyGroupIndex :: GroupIndex
emptyGroupIndex = GroupIndex IntMap.empty Map.empty
instance MemSize GroupIndex where
memSize (GroupIndex a b) = memSize2 a b
-- TODO: add renaming
initUserFeature :: ServerEnv -> IO (IO UserFeature)
initUserFeature ServerEnv{serverStateDir} = do
-- Canonical state
usersState <- usersStateComponent serverStateDir
adminsState <- adminsStateComponent serverStateDir
-- Ephemeral state
groupIndex <- newMemStateWHNF emptyGroupIndex
-- Extension hooks
userAdded <- newHook
authFailHook <- newHook
return $ do
-- Slightly tricky: we have an almost recursive knot between the group
-- resource management functions, and creating the admin group
-- resource that is part of the user feature.
--
-- Instead of trying to pull it apart, we just use a 'do rec'
--
rec let (feature@UserFeature{groupResourceAt}, adminGroupDesc)
= userFeature usersState
adminsState
groupIndex
userAdded authFailHook
adminG adminR
(adminG, adminR) <- groupResourceAt "/users/admins/" adminGroupDesc
return feature
usersStateComponent :: FilePath -> IO (StateComponent AcidState Users.Users)
usersStateComponent stateDir = do
st <- openLocalStateFrom (stateDir </> "db" </> "Users") initialUsers
return StateComponent {
stateDesc = "List of users"
, stateHandle = st
, getState = query st GetUserDb
, putState = update st . ReplaceUserDb
, backupState = \backuptype users ->
[csvToBackup ["users.csv"] (usersToCSV backuptype users)]
, restoreState = userBackup
, resetState = usersStateComponent
}
adminsStateComponent :: FilePath -> IO (StateComponent AcidState HackageAdmins)
adminsStateComponent stateDir = do
st <- openLocalStateFrom (stateDir </> "db" </> "HackageAdmins") initialHackageAdmins
return StateComponent {
stateDesc = "Admins"
, stateHandle = st
, getState = query st GetHackageAdmins
, putState = update st . ReplaceHackageAdmins . adminList
, backupState = \_ (HackageAdmins admins) -> [csvToBackup ["admins.csv"] (groupToCSV admins)]
, restoreState = HackageAdmins <$> groupBackup ["admins.csv"]
, resetState = adminsStateComponent
}
userFeature :: StateComponent AcidState Users.Users
-> StateComponent AcidState HackageAdmins
-> MemState GroupIndex
-> Hook () ()
-> Hook Auth.AuthError (Maybe ErrorResponse)
-> UserGroup
-> GroupResource
-> (UserFeature, UserGroup)
userFeature usersState adminsState
groupIndex userAdded authFailHook
adminGroup adminResource
= (UserFeature {..}, adminGroupDesc)
where
userFeatureInterface = (emptyHackageFeature "users") {
featureDesc = "Manipulate the user database."
, featureResources =
map ($ userResource)
[ userList
, userPage
, passwordResource
, enabledResource
]
++ [
groupResource adminResource
, groupUserResource adminResource
]
, featureState = [
abstractAcidStateComponent usersState
, abstractAcidStateComponent adminsState
]
, featureCaches = [
CacheComponent {
cacheDesc = "user group index",
getCacheMemSize = memSize <$> readMemState groupIndex
}
]
}
userResource = fix $ \r -> UserResource {
userList = (resourceAt "/users/.:format") {
resourceDesc = [ (GET, "list of users") ]
, resourceGet = [ ("json", serveUsersGet) ]
}
, userPage = (resourceAt "/user/:username.:format") {
resourceDesc = [ (GET, "user id info")
, (PUT, "create user")
, (DELETE, "delete user")
]
, resourceGet = [ ("json", serveUserGet) ]
, resourcePut = [ ("", serveUserPut) ]
, resourceDelete = [ ("", serveUserDelete) ]
}
, passwordResource = resourceAt "/user/:username/password.:format"
--TODO: PUT
, enabledResource = (resourceAt "/user/:username/enabled.:format") {
resourceDesc = [ (GET, "return if the user is enabled")
, (PUT, "set if the user is enabled")
]
, resourceGet = [("json", serveUserEnabledGet)]
, resourcePut = [("json", serveUserEnabledPut)]
}
, adminResource = adminResource
, userListUri = \format ->
renderResource (userList r) [format]
, userPageUri = \format uname ->
renderResource (userPage r) [display uname, format]
, userPasswordUri = \format uname ->
renderResource (passwordResource r) [display uname, format]
, userEnabledUri = \format uname ->
renderResource (enabledResource r) [display uname, format]
, adminPageUri = \format ->
renderResource (groupResource adminResource) [format]
}
-- Queries and updates
--
queryGetUserDb :: MonadIO m => m Users.Users
queryGetUserDb = queryState usersState GetUserDb
updateAddUser :: MonadIO m => UserName -> UserAuth -> m (Either Users.ErrUserNameClash UserId)
updateAddUser uname auth = updateState usersState (AddUserEnabled uname auth)
updateSetUserEnabledStatus :: MonadIO m => UserId -> Bool
-> m (Maybe (Either Users.ErrNoSuchUserId Users.ErrDeletedUser))
updateSetUserEnabledStatus uid isenabled = updateState usersState (SetUserEnabledStatus uid isenabled)
updateSetUserAuth :: MonadIO m => UserId -> UserAuth
-> m (Maybe (Either Users.ErrNoSuchUserId Users.ErrDeletedUser))
updateSetUserAuth uid auth = updateState usersState (SetUserAuth uid auth)
--
-- Authorisation: authentication checks and privilege checks
--
-- High level, all in one check that the client is authenticated as a
-- particular user and has an appropriate privilege, but then ignore the
-- identity of the user.
guardAuthorised_ :: [PrivilegeCondition] -> ServerPartE ()
guardAuthorised_ = void . guardAuthorised
-- As above but also return the identity of the client
guardAuthorised :: [PrivilegeCondition] -> ServerPartE UserId
guardAuthorised privconds = do
users <- queryGetUserDb
uid <- guardAuthenticatedWithErrHook users
Auth.guardPriviledged users uid privconds
return uid
-- Simply check if the user is authenticated as some user, without any
-- check that they have any particular priveledges. Only useful as a
-- building block.
guardAuthenticated :: ServerPartE UserId
guardAuthenticated = do
users <- queryGetUserDb
guardAuthenticatedWithErrHook users
-- As above but using the given userdb snapshot
guardAuthenticatedWithErrHook :: Users.Users -> ServerPartE UserId
guardAuthenticatedWithErrHook users = do
(uid,_) <- Auth.checkAuthenticated realm users
>>= either handleAuthError return
return uid
where
realm = Auth.hackageRealm --TODO: should be configurable
handleAuthError :: Auth.AuthError -> ServerPartE a
handleAuthError err = do
defaultResponse <- Auth.authErrorResponse realm err
overrideResponse <- msum <$> runHook authFailHook err
throwError (fromMaybe defaultResponse overrideResponse)
-- | Resources representing the collection of known users.
--
-- Features:
--
-- * listing the collection of users
-- * adding and deleting users
-- * enabling and disabling accounts
-- * changing user's name and password
--
serveUsersGet :: DynamicPath -> ServerPartE Response
serveUsersGet _ = do
userlist <- Users.enumerateActiveUsers <$> queryGetUserDb
let users = [ UserNameIdResource {
ui_username = userName uinfo,
ui_userid = uid
}
| (uid, uinfo) <- userlist ]
return . toResponse $ toJSON users
serveUserGet :: DynamicPath -> ServerPartE Response
serveUserGet dpath = do
(uid, uinfo) <- lookupUserNameFull =<< userNameInPath dpath
groups <- getGroupIndex uid
return . toResponse $
toJSON UserInfoResource {
ui1_username = userName uinfo,
ui1_userid = uid,
ui1_groups = map T.pack groups
}
serveUserPut :: DynamicPath -> ServerPartE Response
serveUserPut dpath = do
guardAuthorised_ [InGroup adminGroup]
username <- userNameInPath dpath
muid <- updateState usersState $ AddUserDisabled username
case muid of
Left Users.ErrUserNameClash ->
errBadRequest "Username already exists"
[MText "Cannot create a new user account with that username because already exists"]
Right uid -> return . toResponse $
toJSON UserNameIdResource {
ui_username = username,
ui_userid = uid
}
serveUserDelete :: DynamicPath -> ServerPartE Response
serveUserDelete dpath = do
guardAuthorised_ [InGroup adminGroup]
uid <- lookupUserName =<< userNameInPath dpath
merr <- updateState usersState $ DeleteUser uid
case merr of
Nothing -> noContent $ toResponse ()
--TODO: need to be able to delete user by name to fix this race condition
Just Users.ErrNoSuchUserId -> errInternalError [MText "uid does not exist"]
serveUserEnabledGet :: DynamicPath -> ServerPartE Response
serveUserEnabledGet dpath = do
guardAuthorised_ [InGroup adminGroup]
(_uid, uinfo) <- lookupUserNameFull =<< userNameInPath dpath
let enabled = case userStatus uinfo of
AccountEnabled _ -> True
_ -> False
return . toResponse $ toJSON EnabledResource { ui_enabled = enabled }
serveUserEnabledPut :: DynamicPath -> ServerPartE Response
serveUserEnabledPut dpath = do
guardAuthorised_ [InGroup adminGroup]
uid <- lookupUserName =<< userNameInPath dpath
EnabledResource enabled <- expectAesonContent
merr <- updateState usersState (SetUserEnabledStatus uid enabled)
case merr of
Nothing -> noContent $ toResponse ()
Just (Left Users.ErrNoSuchUserId) ->
errInternalError [MText "uid does not exist"]
Just (Right Users.ErrDeletedUser) ->
errBadRequest "User deleted"
[MText "Cannot disable account, it has already been deleted"]
--
-- Exported utils for looking up user names in URLs\/paths
--
userNameInPath :: forall m. MonadPlus m => DynamicPath -> m UserName
userNameInPath dpath = maybe mzero return (simpleParse =<< lookup "username" dpath)
lookupUserName :: UserName -> ServerPartE UserId
lookupUserName = fmap fst . lookupUserNameFull
lookupUserNameFull :: UserName -> ServerPartE (UserId, UserInfo)
lookupUserNameFull uname = do
users <- queryState usersState GetUserDb
case Users.lookupUserName uname users of
Just u -> return u
Nothing -> userLost "Could not find user: not presently registered"
where userLost = errNotFound "User not found" . return . MText
--FIXME: 404 is only the right error for operating on User resources
-- not when users are being looked up for other reasons, like setting
-- ownership of packages. In that case needs errBadRequest
lookupUserInfo :: UserId -> ServerPartE UserInfo
lookupUserInfo uid = do
users <- queryState usersState GetUserDb
case Users.lookupUserId uid users of
Just uinfo -> return uinfo
Nothing -> errInternalError [MText "user id does not exist"]
adminAddUser :: ServerPartE Response
adminAddUser = do
-- with this line commented out, self-registration is allowed
guardAuthorised_ [InGroup adminGroup]
reqData <- getDataFn lookUserNamePasswords
case reqData of
(Left errs) -> errBadRequest "Error registering user"
((MText "Username, password, or repeated password invalid.") : map MText errs)
(Right (ustr, pwd1, pwd2)) -> do
uname <- newUserWithAuth ustr (PasswdPlain pwd1) (PasswdPlain pwd2)
seeOther ("/user/" ++ display uname) (toResponse ())
where lookUserNamePasswords = do
(,,) <$> look "username"
<*> look "password"
<*> look "repeat-password"
newUserWithAuth :: String -> PasswdPlain -> PasswdPlain -> ServerPartE UserName
newUserWithAuth _ pwd1 pwd2 | pwd1 /= pwd2 = errBadRequest "Error registering user" [MText "Entered passwords do not match"]
newUserWithAuth userNameStr password _ =
case simpleParse userNameStr of
Nothing -> errBadRequest "Error registering user" [MText "Not a valid user name!"]
Just uname -> do
let auth = newUserAuth uname password
muid <- updateState usersState $ AddUserEnabled uname auth
case muid of
Left Users.ErrUserNameClash -> errForbidden "Error registering user" [MText "A user account with that user name already exists."]
Right _ -> return uname
-- Arguments: the auth'd user id, the user path id (derived from the :username)
canChangePassword :: MonadIO m => UserId -> UserId -> m Bool
canChangePassword uid userPathId = do
admins <- queryState adminsState GetAdminList
return $ uid == userPathId || (uid `Group.member` admins)
--FIXME: this thing is a total mess!
-- Do admins need to change user's passwords? Why not just reset passwords & (de)activate accounts.
changePassword :: UserName -> ServerPartE ()
changePassword username = do
uid <- lookupUserName username
guardAuthorised [IsUserId uid, InGroup adminGroup]
passwd1 <- look "password" --TODO: fail rather than mzero if missing
passwd2 <- look "repeat-password"
when (passwd1 /= passwd2) $
forbidChange "Copies of new password do not match or is an invalid password (ex: blank)"
let passwd = PasswdPlain passwd1
auth = newUserAuth username passwd
res <- updateState usersState (SetUserAuth uid auth)
case res of
Nothing -> return ()
Just (Left Users.ErrNoSuchUserId) -> errInternalError [MText "user id lookup failure"]
Just (Right Users.ErrDeletedUser) -> forbidChange "Cannot set passwords for deleted users"
where
forbidChange = errForbidden "Error changing password" . return . MText
newUserAuth :: UserName -> PasswdPlain -> UserAuth
newUserAuth name pwd = UserAuth (Auth.newPasswdHash Auth.hackageRealm name pwd)
------ User group management
adminGroupDesc :: UserGroup
adminGroupDesc = UserGroup {
groupDesc = nullDescription { groupTitle = "Hackage admins" },
queryUserList = queryState adminsState GetAdminList,
addUserList = updateState adminsState . AddHackageAdmin,
removeUserList = updateState adminsState . RemoveHackageAdmin,
canAddGroup = [adminGroupDesc],
canRemoveGroup = [adminGroupDesc]
}
groupAddUser :: UserGroup -> DynamicPath -> ServerPartE ()
groupAddUser group _ = do
guardAuthorised_ (map InGroup (canAddGroup group))
users <- queryState usersState GetUserDb
muser <- optional $ look "user"
case muser of
Nothing -> addError "Bad request (could not find 'user' argument)"
Just ustr -> case simpleParse ustr >>= \uname -> Users.lookupUserName uname users of
Nothing -> addError $ "No user with name " ++ show ustr ++ " found"
Just (uid,_) -> liftIO $ addUserList group uid
where addError = errBadRequest "Failed to add user" . return . MText
groupDeleteUser :: UserGroup -> DynamicPath -> ServerPartE ()
groupDeleteUser group dpath = do
guardAuthorised_ (map InGroup (canRemoveGroup group))
uid <- lookupUserName =<< userNameInPath dpath
liftIO $ removeUserList group uid
lookupGroupEditAuth :: UserGroup -> ServerPartE (Bool, Bool)
lookupGroupEditAuth group = do
addList <- liftIO . Group.queryGroups $ canAddGroup group
removeList <- liftIO . Group.queryGroups $ canRemoveGroup group
uid <- guardAuthenticated
let (canAdd, canDelete) = (uid `Group.member` addList, uid `Group.member` removeList)
if not (canAdd || canDelete)
then errForbidden "Forbidden" [MText "Can't edit permissions for user group"]
else return (canAdd, canDelete)
------------ Encapsulation of resources related to editing a user group.
-- | Registers a user group for external display. It takes the index group
-- mapping (groupIndex from UserFeature), the base uri of the group, and a
-- UserGroup object with all the necessary hooks. The base uri shouldn't
-- contain any dynamic or varying components. It returns the GroupResource
-- object, and also an adapted UserGroup that updates the cache. You should
-- use this in order to keep the index updated.
groupResourceAt :: String -> UserGroup -> IO (UserGroup, GroupResource)
groupResourceAt uri group = do
let mainr = resourceAt uri
descr = groupDesc group
groupUri = renderResource mainr []
group' = group
{ addUserList = \uid -> do
addGroupIndex uid groupUri descr
addUserList group uid
, removeUserList = \uid -> do
removeGroupIndex uid groupUri
removeUserList group uid
}
ulist <- queryUserList group
initGroupIndex ulist groupUri descr
let groupr = GroupResource {
groupResource = (extendResourcePath "/.:format" mainr) {
resourceDesc = [ (GET, "Description of the group and a list of its members (defined in 'users' feature)") ]
, resourceGet = [ ("json", serveUserGroupGet groupr) ]
}
, groupUserResource = (extendResourcePath "/user/:username.:format" mainr) {
resourceDesc = [ (PUT, "Add a user to the group (defined in 'users' feature)")
, (DELETE, "Remove a user from the group (defined in 'users' feature)")
]
, resourcePut = [ ("", serveUserGroupUserPut groupr) ]
, resourceDelete = [ ("", serveUserGroupUserDelete groupr) ]
}
, getGroup = \_ -> return group'
}
return (group', groupr)
-- | Registers a collection of user groups for external display. These groups
-- are usually backing a separate collection. Like groupResourceAt, it takes the
-- index group mapping and a base uri The base uri can contain varying path
-- components, so there should be a group-generating function that, given a
-- DynamicPath, yields the proper UserGroup. The final argument is the initial
-- list of DynamicPaths to build the initial group index. Like groupResourceAt,
-- this function returns an adaptor function that keeps the index updated.
groupResourcesAt :: String
-> (a -> UserGroup)
-> (a -> DynamicPath)
-> (DynamicPath -> ServerPartE a)
-> [a]
-> IO (a -> UserGroup, GroupResource)
groupResourcesAt uri mkGroup mkPath getGroupData initialGroupData = do
let mainr = resourceAt uri
sequence_
[ do let group = mkGroup x
dpath = mkPath x
ulist <- queryUserList group
initGroupIndex ulist (renderResource' mainr dpath) (groupDesc group)
| x <- initialGroupData ]
let mkGroup' x =
let group = mkGroup x
dpath = mkPath x
in group {
addUserList = \uid -> do
addGroupIndex uid (renderResource' mainr dpath) (groupDesc group)
addUserList group uid
, removeUserList = \uid -> do
removeGroupIndex uid (renderResource' mainr dpath)
removeUserList group uid
}
groupr = GroupResource {
groupResource = (extendResourcePath "/.:format" mainr) {
resourceDesc = [ (GET, "Description of the group and a list of the members (defined in 'users' feature)") ]
, resourceGet = [ ("json", serveUserGroupGet groupr) ]
}
, groupUserResource = (extendResourcePath "/user/:username.:format" mainr) {
resourceDesc = [ (PUT, "Add a user to the group (defined in 'users' feature)")
, (DELETE, "Delete a user from the group (defined in 'users' feature)")
]
, resourcePut = [ ("", serveUserGroupUserPut groupr) ]
, resourceDelete = [ ("", serveUserGroupUserDelete groupr) ]
}
, getGroup = \dpath -> mkGroup' <$> getGroupData dpath
}
return (mkGroup', groupr)
serveUserGroupGet groupr dpath = do
group <- getGroup groupr dpath
userDb <- queryGetUserDb
userlist <- liftIO $ queryUserList group
return . toResponse $ toJSON
UserGroupResource {
ui_title = T.pack $ groupTitle (groupDesc group),
ui_description = T.pack $ groupPrologue (groupDesc group),
ui_members = [ UserNameIdResource {
ui_username = Users.userIdToName userDb uid,
ui_userid = uid
}
| uid <- Group.enumerate userlist ]
}
--TODO: add serveUserGroupUserPost for the sake of the html frontend
-- and then remove groupAddUser & groupDeleteUser
serveUserGroupUserPut groupr dpath = do
group <- getGroup groupr dpath
guardAuthorised_ (map InGroup (canAddGroup group))
uid <- lookupUserName =<< userNameInPath dpath
liftIO $ addUserList group uid
goToList groupr dpath
serveUserGroupUserDelete groupr dpath = do
group <- getGroup groupr dpath
guardAuthorised_ (map InGroup (canRemoveGroup group))
uid <- lookupUserName =<< userNameInPath dpath
liftIO $ removeUserList group uid
goToList groupr dpath
goToList group dpath = seeOther (renderResource' (groupResource group) dpath)
(toResponse ())
---------------------------------------------------------------
addGroupIndex :: MonadIO m => UserId -> String -> GroupDescription -> m ()
addGroupIndex (UserId uid) uri desc =
modifyMemState groupIndex $
adjustGroupIndex
(IntMap.insertWith Set.union uid (Set.singleton uri))
(Map.insert uri desc)
removeGroupIndex :: MonadIO m => UserId -> String -> m ()
removeGroupIndex (UserId uid) uri =
modifyMemState groupIndex $
adjustGroupIndex
(IntMap.update (keepSet . Set.delete uri) uid)
id
where
keepSet m = if Set.null m then Nothing else Just m
initGroupIndex :: MonadIO m => UserList -> String -> GroupDescription -> m ()
initGroupIndex ulist uri desc =
modifyMemState groupIndex $
adjustGroupIndex
(IntMap.unionWith Set.union (IntMap.fromList . map mkEntry $ Group.enumerate ulist))
(Map.insert uri desc)
where
mkEntry (UserId uid) = (uid, Set.singleton uri)
getGroupIndex :: (Functor m, MonadIO m) => UserId -> m [String]
getGroupIndex (UserId uid) =
liftM (maybe [] Set.toList . IntMap.lookup uid . usersToGroupUri) $ readMemState groupIndex
getIndexDesc :: MonadIO m => String -> m GroupDescription
getIndexDesc uri =
liftM (Map.findWithDefault nullDescription uri . groupUrisToDesc) $ readMemState groupIndex
-- partitioning index modifications, a cheap combinator
adjustGroupIndex :: (IntMap (Set String) -> IntMap (Set String))
-> (Map String GroupDescription -> Map String GroupDescription)
-> GroupIndex -> GroupIndex
adjustGroupIndex f g (GroupIndex a b) = GroupIndex (f a) (g b)
{------------------------------------------------------------------------------
Some types for JSON resources
------------------------------------------------------------------------------}
data UserNameIdResource = UserNameIdResource { ui_username :: UserName,
ui_userid :: UserId }
data UserInfoResource = UserInfoResource { ui1_username :: UserName,
ui1_userid :: UserId,
ui1_groups :: [T.Text] }
data EnabledResource = EnabledResource { ui_enabled :: Bool }
data UserGroupResource = UserGroupResource { ui_title :: T.Text,
ui_description :: T.Text,
ui_members :: [UserNameIdResource] }
#if MIN_VERSION_aeson(0,6,2)
$(deriveJSON defaultOptions{fieldLabelModifier = drop 3} ''UserNameIdResource)
$(deriveJSON defaultOptions{fieldLabelModifier = drop 4} ''UserInfoResource)
$(deriveJSON defaultOptions{fieldLabelModifier = drop 3} ''EnabledResource)
$(deriveJSON defaultOptions{fieldLabelModifier = drop 3} ''UserGroupResource)
#else
$(deriveJSON (drop 3) ''UserNameIdResource)
$(deriveJSON (drop 4) ''UserInfoResource)
$(deriveJSON (drop 3) ''EnabledResource)
$(deriveJSON (drop 3) ''UserGroupResource)
#endif
| haskell-infra/hackage-server | Distribution/Server/Features/Users.hs | bsd-3-clause | 35,069 | 31 | 22 | 10,092 | 6,632 | 3,490 | 3,142 | 510 | 18 |
module InnerEar.Types.Handle where
type Handle = String
isValidHandle :: Handle -> Bool
isValidHandle x = a && b && c
where
a = x /= [] -- not empty
b = take 1 x /= " " -- first character is not a space
c = take 1 (reverse x) /= " " -- last character is not a space
| d0kt0r0/InnerEar | src/InnerEar/Types/Handle.hs | gpl-3.0 | 278 | 0 | 10 | 72 | 88 | 49 | 39 | 7 | 1 |
module Main where
data Hello = HelloWorld deriving ( Show )
data World = Helloworld deriving ( Show )
main = do
print HelloWorld
print Helloworld
| rahulmutt/ghcvm | tests/suite/similar-names/fail/FailDataConstructor2.hs | bsd-3-clause | 154 | 0 | 7 | 34 | 49 | 26 | 23 | 6 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="si-LK">
<title>Groovy Support</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/groovy/src/main/javahelp/org/zaproxy/zap/extension/groovy/resources/help_si_LK/helpset_si_LK.hs | apache-2.0 | 959 | 77 | 66 | 156 | 407 | 206 | 201 | -1 | -1 |
{-
rrdgraph-haskell – Haskell DSL for rendering RRD graphs using RRDtool
Copyright © 2011 Johan Kiviniemi <[email protected]>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-}
{-# LANGUAGE TemplateHaskell #-}
module Data.RRDGraph.Tests.State (tests_State)
where
import Data.RRDGraph.Command
import Data.RRDGraph.Internal
import Data.RRDGraph.State
import Data.RRDGraph.Tests.Command
import Control.Monad
import Data.Function
import Data.Record.Label
import qualified Data.Set as S
import Test.Framework (Test)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.Framework.TH (testGroupGenerator)
import Test.QuickCheck
tests_State :: Test
tests_State = $(testGroupGenerator)
prop_runGraph :: Command -> Bool
prop_runGraph cmd =
runGraph (addCommand cmd) == [formatCommand cmd]
prop_runGraphRaw :: Command -> Bool
prop_runGraphRaw cmd =
runGraphRaw (addCommand cmd) == [cmd]
prop_newName_valid :: NonNegative Int -> Property
prop_newName_valid (NonNegative n) =
let n' = min n 100
names = evalGraphState (replicateM n' newName)
in printNames names $ all nameIsValid names
prop_newName_unique :: NonNegative Int -> Property
prop_newName_unique (NonNegative n) =
let n' = min n 100
names = evalGraphState (replicateM n' newName)
in printNames names $ numUniques names == n'
prop_addCommand :: [Command] -> Bool
prop_addCommand cmds =
let cmds' = take 5 $ cmds
in runGraphRaw (mapM_ addCommand cmds') == cmds'
prop_addCommandDef_duplicates :: [Command] -> Property
prop_addCommandDef_duplicates cmds =
let cmds' = take 5 . filter applies_addCommandDef $ cmds
cmds'' = cmds' ++ cmds'
names = evalGraphState (mapM addCommandDef cmds'')
in printNames names $
numUniques names == (numUniques . map commandNullDefines) cmds''
prop_addCommandDef_commands :: [Command] -> Property
prop_addCommandDef_commands cmds =
let cmds' = take 5 . filter applies_addCommandDef $ cmds
cmds'' = cmds' ++ cmds'
cmdsRes = runGraphRaw (mapM_ addCommandDef cmds'')
in printGot "cmdsRes" cmdsRes $
((==) `on` S.fromList . map commandNullDefines) cmds'' cmdsRes
applies_addCommandDef :: Command -> Bool
applies_addCommandDef (DataCommand {}) = True
applies_addCommandDef (CDefCommand {}) = True
applies_addCommandDef (VDefCommand {}) = True
applies_addCommandDef (GraphCommand {}) = False
-- Helpers.
commandNullDefines :: Command -> Command
commandNullDefines = setL cmdDefines (Name "")
printGot :: (Show a, Testable prop) => String -> a -> prop -> Property
printGot name value = printTestCase ("Got " ++ name ++ ": " ++ show value)
printNames :: Testable prop => [Name] -> prop -> Property
printNames = printGot "names" . map fromName
| ion1/rrdgraph-haskell | Data/RRDGraph/Tests/State.hs | isc | 3,405 | 1 | 12 | 565 | 778 | 406 | 372 | -1 | -1 |
module Server where
import Config
import Control.Lens
import Control.Monad.IO.Class (liftIO)
import CSRUtils
import Data.Acid
import Data.Acid.Advanced (query', update')
import Data.Monoid (mconcat)
import Data.Text (Text, pack)
import qualified Data.Text.Lazy as LT
import Data.Time
import Data.UUID.V4
import Network.HTTP.Types.Status
import Network.Wai (remoteHost)
import Network.Wai.Logger (showSockAddr)
import Notify
import Notify
import OpenSSL.EVP.PKey
import Storage
import Types.Certificate
import Types.CSR
import Web.Scotty
type Notifier = CSR -> ActionM ()
server :: Config -> SomeKeyPair -> AppState -> IO ()
server config key state = scotty scottyPort $ do
let notify = notifyForCSR config
clientRoutes notify state
adminRoutes state key
where
scottyPort = config ^. port
-- | Routes accessible by all clients under /
clientRoutes :: Notifier -> AppState -> ScottyM ()
clientRoutes notify state = do
post "/client/csr" $ handlePostCSR notify state
get "/client/csr/:csrid" $ handlePollCSRState state
get "/client/cert/:fcertid" $ handleGetCertificate state
-- | Routes for administrators accessible under /admin
-- These routes should be protected with client-certificate checks. Refer to
-- the provided nginx configuration for an example.
adminRoutes :: AppState -> SomeKeyPair -> ScottyM ()
adminRoutes state key = do
get "/admin/csr/all" $ handleListRequests state
get "/admin/csr/pending" $ handleListByStatus state Pending
get "/admin/csr/rejected" $ handleListByStatus state Rejected
get "/admin/csr/reject/:csrid" $ handleRejectCSR state
get "/admin/csr/sign/:csrid" $ handleSignCertificate state key
-- Error helper
serveError :: Status -> Text -> ActionM ()
serveError s t = status s >> json t
-- Posting CSRs
handlePostCSR :: Notifier -> AppState -> ActionM ()
handlePostCSR notify state = do
now <- liftIO getCurrentTime
csrId <- liftIO (fmap CSRID nextRandom)
csrBody <- param "csr"
maybeCsrBuilder <- liftIO (parseCSR csrBody)
clientIP <- fmap (showSockAddr . remoteHost) request
case maybeCsrBuilder of
Nothing -> serveError badRequest400 "Could not parse CSR"
(Just builder) -> do
let csr = builder csrId (RequestingHost $ pack clientIP) now Pending
storeCSR state csr
status created201
notify csr
json csr
storeCSR :: AppState -> CSR -> ActionM CSR
storeCSR state csr = update' state $ InsertCSR csr
-- Polling CSR status
handlePollCSRState :: AppState -> ActionM ()
handlePollCSRState state = do
csrId <- param "csrid"
maybeCsr <- query' state $ RetrieveCSR csrId
case maybeCsr of
Nothing -> serveError notFound404 "Could not find CSR"
(Just csr) -> json $ csr ^. requestStatus
-- Retrieving certificates
handleGetCertificate :: AppState -> ActionM ()
handleGetCertificate state = do
certId <- param "certid"
maybeCert <- query' state $ RetrieveCert certId
case maybeCert of
Nothing -> status notFound404
(Just crt) -> json crt
-- * Administration functions
handleListRequests :: AppState -> ActionM ()
handleListRequests state = do
csrList <- query' state ListCSR
json csrList
handleListByStatus :: AppState -> CSRStatus -> ActionM ()
handleListByStatus state status = do
csrList <- query' state $ ListCSRByStatus status
json csrList
handleRejectCSR :: AppState -> ActionM ()
handleRejectCSR state = do
csrId <- param "csrid"
updatedCsr <- update' state $ RejectCSR csrId
case updatedCsr of
Nothing -> serveError notFound404 "Could not find CSR"
(Just csr) -> json csr
-- Returns a reason if signing is not possible, and Nothing otherwise.
checkSignable :: CSR -> Maybe Text
checkSignable csr =
case csr ^. requestStatus of
Rejected -> Just "This CSR is rejected and can not be signed"
Signed csr -> Just "This CSR is already signed"
Pending -> Nothing
signAndRespond :: AppState -> CSR -> SomeKeyPair -> ActionM ()
signAndRespond state csr key = do
ca <- update' state GetNextSerialNumber
cert <- liftIO $ signCSR csr ca key
update' state $ SetSignedCSR (csr ^. requestId) $ cert ^. certId
update' state $ InsertCertificate cert
json cert
handleSignCertificate :: AppState -> SomeKeyPair -> ActionM ()
handleSignCertificate state key = do
csrId <- param "csrid"
maybeCsr <- query' state $ RetrieveCSR csrId
case maybeCsr of
Nothing -> serveError notFound404 "Could not find CSR"
(Just csr) ->
maybe (signAndRespond state csr key) (serveError conflict409)
(checkSignable csr)
| tazjin/herbert | src/Server.hs | mit | 4,895 | 0 | 18 | 1,207 | 1,299 | 622 | 677 | 111 | 3 |
import TestUtils
import VectorsHMAC
import qualified Data.ByteString as B
import Crypto.Nettle.HMAC
import Crypto.Nettle.Hash
assertHMAC :: HashAlgorithm a => (B.ByteString, B.ByteString, String) -> Tagged a Assertion
assertHMAC (key, msg, h) = do
h' <- hmac key msg
return $ assertEqualHex "" (hs h) $ B.take (B.length $ hs h) h'
testHMAC :: HashAlgorithm a => Tagged a Test
testHMAC = do
name <- hashName
vectors <- findHmacTestVectors ("HMAC-" ++ name)
results <- mapM assertHMAC vectors
return $ testCases ("testing HMAC-" ++ name) results
-- return $ debugTestCases ("testing HMAC-" ++ name) results
main = defaultMain
[ testHMAC `witness` (undefined :: MD5)
, testHMAC `witness` (undefined :: RIPEMD160)
, testHMAC `witness` (undefined :: SHA1)
, testHMAC `witness` (undefined :: SHA224)
, testHMAC `witness` (undefined :: SHA256)
, testHMAC `witness` (undefined :: SHA384)
, testHMAC `witness` (undefined :: SHA512)
]
| stbuehler/haskell-nettle | src/Tests/HMAC.hs | mit | 946 | 4 | 11 | 158 | 326 | 183 | 143 | 23 | 1 |
bubbleSort :: (Ord a) => [a] -> [a]
bubbleSort x = (!!length x) $ iterate bubble x
where bubble (x:y:r)
| x <= y = x : bubble (y:r)
| otherwise = y : bubble (x:r)
bubble x = x
| Gathros/algorithm-archive | contents/bubble_sort/code/haskell/bubbleSort.hs | mit | 209 | 0 | 11 | 73 | 125 | 63 | 62 | 6 | 2 |
module Memento.Logger where
import Data.Text (Text, unpack)
import System.Console.ANSI
--------------------------------------------------------------------------------
reset :: IO ()
reset = setSGR [Reset]
--------------------------------------------------------------------------------
colorised :: Color -> Text -> IO ()
colorised col msg = do
setSGR [SetColor Foreground Vivid col]
putStrLn . unpack $ msg
reset
--------------------------------------------------------------------------------
green :: Text -> IO ()
green = colorised Green
--------------------------------------------------------------------------------
red :: Text -> IO ()
red = colorised Red
--------------------------------------------------------------------------------
yellow :: Text -> IO ()
yellow = colorised Yellow
--------------------------------------------------------------------------------
cyan :: Text -> IO ()
cyan = colorised Cyan
| adinapoli/memento | src/Memento/Logger.hs | mit | 935 | 0 | 9 | 101 | 198 | 104 | 94 | 18 | 1 |
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
-- | Definition for a default Entity to use with a SQL event store.
module Eventful.Store.Sql.DefaultEntity
( SqlEvent (..)
, SqlEventId
, migrateSqlEvent
, defaultSqlEventStoreConfig
) where
import Database.Persist.TH
import Eventful.Store.Class
import Eventful.UUID
import Eventful.Store.Sql.Operations
import Eventful.Store.Sql.JSONString
import Eventful.Store.Sql.Orphans ()
share [mkPersist sqlSettings, mkMigrate "migrateSqlEvent"] [persistLowerCase|
SqlEvent sql=events
Id SequenceNumber sql=sequence_number
uuid UUID
version EventVersion
event JSONString
UniqueUuidVersion uuid version
deriving Show
|]
defaultSqlEventStoreConfig :: SqlEventStoreConfig SqlEvent JSONString
defaultSqlEventStoreConfig =
SqlEventStoreConfig
SqlEvent
SqlEventKey
(\(SqlEventKey seqNum) -> seqNum)
sqlEventUuid
sqlEventVersion
sqlEventEvent
SqlEventId
SqlEventUuid
SqlEventVersion
SqlEventEvent
| jdreaver/eventful | eventful-sql-common/src/Eventful/Store/Sql/DefaultEntity.hs | mit | 1,124 | 0 | 9 | 167 | 147 | 91 | 56 | 30 | 1 |
module GUBS.MaxPolynomial where
import Data.List (nub)
import qualified Text.PrettyPrint.ANSI.Leijen as PP
import Data.Foldable (toList)
import GUBS.Utils
import GUBS.Algebra
import qualified GUBS.Term as T
import GUBS.Constraint
import qualified GUBS.Polynomial as P
import qualified GUBS.Solver.Class as S
data MaxPoly v c =
Var v
| Const c
| Plus (MaxPoly v c) (MaxPoly v c)
| Mult (MaxPoly v c) (MaxPoly v c)
| Max (MaxPoly v c) (MaxPoly v c)
deriving (Show, Eq, Functor, Foldable, Traversable)
constant :: c -> MaxPoly v c
constant = Const
variable :: v -> MaxPoly v c
variable = Var
variablesDL :: MaxPoly v c -> [v] -> [v]
variablesDL (Var v) = (:) v
variablesDL (Const _) = id
variablesDL (Plus p q) = variablesDL p . variablesDL q
variablesDL (Mult p q) = variablesDL p . variablesDL q
variablesDL (Max p q) = variablesDL p . variablesDL q
variables :: MaxPoly v c -> [v]
variables p = variablesDL p []
coefficients :: MaxPoly v c -> [c]
coefficients = toList
instance IsNat c => IsNat (MaxPoly v c) where
fromNatural_ = Const . fromNatural
instance (Eq c, Additive c) => Additive (MaxPoly v c) where
zero = Const zero
Const i .+ t2 | zero == i = t2
t1 .+ Const j | zero == j = t1
Const i .+ Const j = Const (i .+ j)
t1 .+ t2 = Plus t1 t2
instance (Eq c, Additive c) => Max (MaxPoly v c) where
Const i `maxA` t2 | zero == i = t2
t1 `maxA` Const j | zero == j = t1
t1 `maxA` t2 = Max t1 t2
instance (Eq c, Additive c, Multiplicative c) => Multiplicative (MaxPoly v c) where
one = Const one
Const i .* t2 | zero == i = zero
| one == i = t2
t1 .* Const j | zero == j = zero
| one == j = t1
Const i .* Const j = Const (i .* j)
t1 .* t2 = Mult t1 t2
-- operations
fromMaxPoly :: (Max a, SemiRing a) => (v -> a) -> (c -> a) -> MaxPoly v c -> a
fromMaxPoly var _ (Var v) = var v
fromMaxPoly _ con (Const c) = con c
fromMaxPoly var con (Plus p q) = fromMaxPoly var con p .+ fromMaxPoly var con q
fromMaxPoly var con (Mult p q) = fromMaxPoly var con p .* fromMaxPoly var con q
fromMaxPoly var con (Max p q) = fromMaxPoly var con p `maxA` fromMaxPoly var con q
substitute :: (Eq c, SemiRing c) => (v -> MaxPoly v' c) -> MaxPoly v c -> MaxPoly v' c
substitute s (Var v) = s v
substitute _ (Const c) = Const c
substitute s (Plus p q) = substitute s p .+ substitute s q
substitute s (Mult p q) = substitute s p .* substitute s q
substitute s (Max p q) = substitute s p `maxA` substitute s q
-- * max elimination
splitMax :: (Ord v, IsNat c, SemiRing c) => MaxPoly v c -> [P.Polynomial v c]
splitMax (Var v) = [P.variable v]
splitMax (Const c) = [P.coefficient c]
splitMax (Plus p q) = (.+) <$> splitMax p <*> splitMax q
splitMax (Mult p q) = (.*) <$> splitMax p <*> splitMax q
splitMax (Max p q) = splitMax p ++ splitMax q
-- maxElim :: (Ord v, Eq c, IsNat c, SemiRing c) => Constraint (MaxPoly v c) -> [ConditionalConstraint (P.Polynomial v c)]
-- maxElim ieq = walk [([],ieq)] where
-- walk [] = []
-- walk ((ps,ieq):ceqs) =
-- case splitIEQ ieq of
-- Nothing -> CC { premises = ps, constraint = fmap toPoly ieq } : walk ceqs
-- Just (p,q,ctx) -> walk (cp:cq:ceqs)
-- where
-- p' = toPoly p
-- q' = toPoly q
-- cp = ((p' :>=: q') : ps,ctx p)
-- cq = ((q' :>=: (p' .+ one)) : ps,ctx q)
-- toPoly (Var v) = P.variable v
-- toPoly (Const c) = P.coefficient c
-- toPoly (Plus p q) = toPoly p .+ toPoly q
-- toPoly (Mult p q) = toPoly p .* toPoly q
-- toPoly (Max {}) = error "maxElim: polynomial still contains max"
-- splitIEQ (p1 :>=: p2) =
-- case (splitMaxPoly p1, splitMaxPoly p2) of
-- (Right (p,q,ctx), _ ) -> Just (p,q, \pi -> ctx pi :>=: p2)
-- (_ , Right (p,q,ctx)) -> Just (p,q, \pi -> p1 :>=: ctx pi)
-- (_ , _ ) -> Nothing
-- splitBinOp c p1 p2 =
-- case (splitMaxPoly p1, splitMaxPoly p2) of
-- (Right (p1',p2',ctx), _ ) -> Right (p1',p2', \ pi' -> c (ctx pi') p2)
-- (_ , Right (p1',p2',ctx)) -> Right (p1',p2', \ pi' -> c p1 (ctx pi'))
-- (_ , _ ) -> Left (c p1 p2)
-- splitMaxPoly p@(Var _) = Left p
-- splitMaxPoly p@(Const _) = Left p
-- splitMaxPoly (Max p1 p2) =
-- case splitBinOp Max p1 p2 of {Left _ -> Right (p1, p2, id); s -> s}
-- splitMaxPoly (Plus p1 p2) = splitBinOp Plus p1 p2
-- splitMaxPoly (Mult p1 p2) = splitBinOp Mult p1 p2
simp :: (Ord c, Ord v, IsNat c, SemiRing c) => MaxPoly v c -> MaxPoly v c
simp = fromPolyList . filterSubsumed . nub . splitMax where --
fromPolyList [] = zero
fromPolyList ps = maximumA (map fromPoly ps)
fromPoly = P.fromPolynomial variable constant
-- TODO max(x0 + x1 + x2,1 + x0)
filterSubsumed ps = foldr (\ p -> filter (not . subsumes p)) ps ps
p1 `subsumes` p2 =
and [ c1 >= c2 | (c1 :>=: c2) <- P.absolutePositive (p1 `P.minus` p2)]
&& p1 /= p2
degree :: MaxPoly v c -> Int
degree (Var _) = 1
degree (Const _) = 0
degree (Plus p q) = degree p `max` degree q
degree (Max p q) = degree p `max` degree q
degree (Mult p q) = degree p + degree q
-- pretty printing
instance (Eq c, Ord v, IsNat c, SemiRing c, PP.Pretty v, PP.Pretty c) => PP.Pretty (MaxPoly v c) where
-- pretty (Var v) = PP.pretty v
-- pretty (Const c) = PP.pretty c
-- pretty (Plus p1 p2) = PP.parens (PP.pretty p1 PP.<+> PP.text "+" PP.<+> PP.pretty p2)
-- pretty (Mult p1 p2) = PP.parens (PP.pretty p1 PP.<+> PP.text "*" PP.<+> PP.pretty p2)
-- pretty (Max p1 p2) = PP.parens (PP.pretty p1 PP.<+> PP.text "max" PP.<+> PP.pretty p2)
pretty = pp . splitMax where
pp [] = PP.text "0"
pp [t] = PP.pretty t
pp ts = PP.text "max" PP.<> PP.tupled (PP.pretty `map` ts)
| mzini/gubs | src/GUBS/MaxPolynomial.hs | mit | 6,048 | 0 | 15 | 1,768 | 1,919 | 982 | 937 | 89 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RecordWildCards #-}
module SWTOR.UIProfile.Placement
( Placement (..), Bounds (..)
, place
) where
import Control.Monad.Writer
import Data.Foldable
import Data.List
import SWTOR.UIProfile.Layout
data Placement = Placement{ placAlign :: Alignment
, placPos :: (Rational, Rational)
, placBounds :: Bounds
, placElement :: Element
}
deriving (Eq, Ord, Show, Read)
data Bounds = Bounds{ boundsL :: Rational
, boundsT :: Rational
, boundsR :: Rational
, boundsB :: Rational
}
deriving (Eq, Ord, Show, Read)
place :: Layout -> (Globals, [Placement])
place Screen{..} = (scrGlobals, execWriter (traverse_ placeScreen scrPrims))
placeScreen :: MonadWriter [Placement] m => LayoutPrim -> m ()
placeScreen ly
| Just screenAlign <- layoutPrimParentAlignment ly =
placePrim screenAlign (Bounds 0 0 0 0) ly
| otherwise = pure () -- No elements inside.
placePrim :: MonadWriter [Placement] m => Alignment -> Bounds -> LayoutPrim -> m ()
placePrim screenAlign parentBounds prim =
case prim of
Anchor{..} -> do
let thisBounds = connect (lyParentAlign, parentBounds) (lyThisAlign, lyElement)
plac = Placement { placAlign = screenAlign
, placPos = boundsAnchor screenAlign thisBounds
, placBounds = thisBounds
, placElement = lyElement
}
tell [plac]
traverse_ (placePrim screenAlign thisBounds) lyChildren
Box{..} -> do
(_, placements) <- listen $ traverse_ (placePrim screenAlign parentBounds) lyInside
let bounds = case placements of
[] -> parentBounds -- If nothing inside, use the parent bounds.
_ -> (foldl1' unionBounds . map placBounds) placements
traverse_ (placePrim screenAlign bounds) lyChildren
unionBounds :: Bounds -> Bounds -> Bounds
unionBounds (Bounds la ta ra ba) (Bounds lb tb rb bb) =
Bounds (min la lb) (min ta tb) (max ra rb) (max ba bb)
connect :: (Alignment, Bounds) -> (Alignment, Element) -> Bounds
connect (parentAlign, parent) (childAlign, child) =
Bounds x y (x + w) (y + h)
where
(x, y) = (xParent + xOff + xAlign, yParent + yOff + yAlign)
(w, h) = elementSize child
(xParent, yParent) = boundsAnchor parentAlign parent
(xOff, yOff) = elemOffset child
(xAlign, yAlign) =
case childAlign of
TL -> (0, 0)
BL -> (0, -h)
L -> (0, -0.5 * h)
TR -> (-w, 0)
BR -> (-w, -h)
R -> (-w, -0.5 * h)
T -> (-0.5 * w, 0)
B -> (-0.5 * w, -h)
C -> (-0.5 * w, -0.5 * h)
boundsAnchor :: Alignment -> Bounds -> (Rational, Rational)
boundsAnchor al Bounds{..} =
case al of
TL -> (boundsL, boundsT)
BL -> (boundsL, boundsB)
L -> (boundsL, 0.5 * (boundsT + boundsB))
TR -> (boundsR, boundsT)
BR -> (boundsR, boundsB)
R -> (boundsR, 0.5 * (boundsT + boundsB))
T -> (0.5 * (boundsL + boundsR), boundsT)
B -> (0.5 * (boundsL + boundsR), boundsB)
C -> (0.5 * (boundsL + boundsR), 0.5 * (boundsT + boundsB))
| ion1/swtor-ui | src/SWTOR/UIProfile/Placement.hs | mit | 3,322 | 0 | 19 | 1,033 | 1,170 | 638 | 532 | 76 | 9 |
main = putStrLn . show $ sum $ takeWhile (< 2000000) primes
primes = sieve [2..]
where sieve (p:xs) = p : sieve [x | x <- xs, mod x p > 0]
| benji6/project-euler-solutions | haskell/10.hs | mit | 141 | 0 | 12 | 35 | 86 | 44 | 42 | 3 | 1 |
module Graphics.Ninja.GL.Extensions
( openGLExtensions
, requireExtensions
) where
import Control.Exception
import Control.Monad
import Data.List ((\\))
import Foreign.C.String
import Foreign.Ptr
import Graphics.GL.Core33
import Graphics.Ninja.GL.Exception
import Graphics.Ninja.Util
-- * OpenGL Extensions
-- | Checks whether the given extensions are available and fails with an exception otherwise.
requireExtensions :: [String] -> IO ()
requireExtensions required = do
available <- openGLExtensions
let missing = required \\ available
when (not $ null missing) $ do
throwIO (ExtensionsUnavailable missing)
-- | Returns a list of all available OpenGL extensions.
openGLExtensions :: IO [String]
openGLExtensions = do
num <- withPtrOut $ glGetIntegerv GL_NUM_EXTENSIONS
mapM (glGetStringi GL_EXTENSIONS . fromIntegral >=> peekCString . castPtr) [0..num-1]
| fatho/ninja | src/Graphics/Ninja/GL/Extensions.hs | mit | 952 | 0 | 12 | 201 | 213 | 116 | 97 | -1 | -1 |
module Morph (
toSnake,
toSnakeCaps,
toDashed,
toHuman,
toTitle,
toCamel,
toUpperCamel
) where
import qualified Data.Char as C
import qualified Re as Re
toSnake :: String -> String
toSnake s = do
let s1 = Re.replace "([A-Z\\d])([A-Z][a-z\\d])" s "\\1_\\2"
let s2 = Re.replace "([a-z])([2-Z])" s1 "\\1_\\2"
let s3 = Re.replace "[-. ]" s2 "_"
map C.toLower s3
toSnakeCaps :: String -> String
toSnakeCaps s = map C.toUpper $ toSnake s
toDashed :: String -> String
toDashed s = do
let s1 = Re.replace "([A-Z\\d])([A-Z][a-z\\d])" s "\\1-\\2"
let s2 = Re.replace "([a-z])([2-Z])" s1 "\\1-\\2"
let s3 = Re.replace "[_. ]" s2 "-"
map C.toLower s3
toHuman :: String -> String
toHuman s = do
let s1 = Re.replace "([A-Z\\d])([A-Z][a-z\\d])" s "\\1 \\2"
let s2 = Re.replace "([a-z])([2-Z])" s1 "\\1 \\2"
let s3 = Re.replace "[-_.]" s2 " "
toUpperFirstChar $ map C.toLower s3
toTitle :: String -> String
toTitle s = Re.replaceMap " ([a-z])" (toHuman s) f
where
f ps = " " ++ (map C.toUpper $ ps!!1)
toCamel :: String -> String
toCamel s = Re.replaceMap "_([a-z])" (toSnake s) f
where
f ps = map C.toUpper $ ps!!1
toUpperCamel :: String -> String
toUpperCamel s = toUpperFirstChar $ toCamel s
toUpperFirstChar :: String -> String
toUpperFirstChar s = (map C.toUpper $ take 1 s) ++ (drop 1 s)
| SKAhack/hs-morph | src/Morph.hs | mit | 1,387 | 6 | 19 | 326 | 493 | 243 | 250 | 40 | 1 |
import Database.Kayvee.Kayvee
import Database.Kayvee.GC
main :: IO ()
main = do
connect
put "this is the key to the first element" "this is the contents of the first insertion"
put "apple" "banana"
put "apple" "fruit"
x <- get "this is the key to the first element"
y <- get "apple"
print x
print y
runGc
disconnect
| deweyvm/kayvee | src/Main.hs | mit | 357 | 0 | 8 | 97 | 93 | 40 | 53 | 14 | 1 |
{-|
Module : Language.GoLite.Parser
Description : High level parsers and re-exports
Copyright : (c) Jacob Errington and Frederic Lafrance, 2016
License : MIT
Maintainer : [email protected]
Stability : experimental
This module contains the top-most parser 'package' as well as parsers that
combine both declarations and statements.
-}
module Language.GoLite.Parser
( packageP
-- * Declarations
, topLevelDeclP
, funDecl
, module Language.GoLite.Parser.Decl
-- * Statements
, module Language.GoLite.Parser.Stmt
) where
import Language.Common.Misc ( distTuple )
import Language.GoLite.Parser.Core
import Language.GoLite.Parser.Decl
import Language.GoLite.Parser.Stmt
-- | Parses a package: the package header (`package` keyword followed by an
-- identifier), followed by a list of top-level declarations.
packageP :: Parser SrcAnnPackage
packageP = do
name <- (kwPackage >>= noSemiP) >> (identifier >>= requireSemiP)
decls <- many topLevelDeclP
pure $ Package name (concat decls)
-- | Parses a top-level declaration: either a regular declaration (type/var) or
-- a function declaration. Since declarations can be distributed, this returns
-- a list.
topLevelDeclP :: Parser [SrcAnnTopLevelDecl]
topLevelDeclP = typ <|> var <|> fun
where
typ = fmap (map fromDeclStmt) typeDeclP
var = fmap (map fromDeclStmt) varDeclP
fun = fmap (:[]) (fmap TopLevelFun funDecl)
-- | Converts a declaration statement into a top-level declaration. This is
-- so we can just use the same parser twice.
fromDeclStmt :: SrcAnnStatement -> SrcAnnTopLevelDecl
fromDeclStmt (Fix (Ann _ (DeclStmt d))) = TopLevelDecl d
fromDeclStmt _ = error "Not a DeclStmt"
-- | Parses a function declaration: The func keyword, followed by a function
-- name, a parenthesized potentially empty list of parameter fields
-- (comma-separated list of identifiers then a type) separated by commas, an
-- optional return type and a block.
funDecl :: Parser SrcAnnFunDecl
funDecl = do
name <- (kwFunc >>= noSemiP) >> (identifier >>= noSemiP)
params <- (parens $ (field >>= noSemiP) `sepBy` comma) >>= noSemiP
ret <- optional (type_ >>= noSemiP)
b <- blockP >>= requireSemiP
pure $ FunDecl name (concatMap (uncurry distTuple) params) ret b
| djeik/goto | libgoto/Language/GoLite/Parser.hs | mit | 2,281 | 0 | 13 | 406 | 406 | 224 | 182 | 30 | 1 |
module Web.Markury.Model.Input where
import Data.Text ( Text )
data BookmarkInput = BookmarkInput
{ bookmarkInputTitle :: Text
, bookmarkInputDescription :: Text
, bookmarkInputUrl :: Text
, bookmarkInputTags :: Text
}
data UserInput = UserInput
{ userInputEmail :: Text
, userInputPassword :: Text
}
data TagInput = TagInput
{ tagInputTitle :: Text
}
data LoginInput = LoginInput
{ loginInputEmail :: Text
, loginInputPassword :: Text
}
| y-taka-23/markury | src/Web/Markury/Model/Input.hs | mit | 495 | 0 | 8 | 121 | 108 | 68 | 40 | 15 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.