code
stringlengths
5
1.03M
repo_name
stringlengths
5
90
path
stringlengths
4
158
license
stringclasses
15 values
size
int64
5
1.03M
n_ast_errors
int64
0
53.9k
ast_max_depth
int64
2
4.17k
n_whitespaces
int64
0
365k
n_ast_nodes
int64
3
317k
n_ast_terminals
int64
1
171k
n_ast_nonterminals
int64
1
146k
loc
int64
-1
37.3k
cycloplexity
int64
-1
1.31k
import Data.Ratio import Data.List (sort, nub, maximumBy) import Data.Function (on) import Common.Utils (if') ratio2Integer :: (Ratio Integer) -> Integer ratio2Integer r = if' (a `mod` b == 0) (a `div` b) (-1) where a = numerator r b = denominator r solve :: [Integer] -> Integer solve xs = getConsecutive $ concat [ dfs (kill n rs) (Just (rs !! n)) | n <- [0 .. 3] ] where kill n xs = (take n xs) ++ (drop (n + 1) xs) rs = map (\x -> x % 1) xs dfs :: [Ratio Integer] -> Maybe (Ratio Integer) -> [Maybe (Ratio Integer)] dfs _ Nothing = [] dfs [] value = [value] dfs can value = concat ret where ret = do useIndex <- [0 .. (pred . length) can] let newCan = kill useIndex can op <- [1 .. 6] let newValue = value >>= (apply op (can !! useIndex)) return $ dfs newCan newValue apply 1 x y = Just $ x + y apply 2 x y = Just $ x - y apply 3 x y = Just $ y - x apply 4 x y = Just $ x * y apply 5 x 0 = Nothing apply 5 x y = Just $ x / y apply 6 0 y = Nothing apply 6 x y = Just $ y / x getConsecutive :: [Maybe (Ratio Integer)] -> Integer getConsecutive rs = pred $ snd $ head $ dropWhile (\(x, y) -> x == y) $ zip xs [1 .. ] where xs :: [Integer] xs = dropWhile (<= 0) $ nub . sort $ map (\(Just x) -> ratio2Integer x) $ filter (\x -> x /= Nothing) rs main = print $ helper 0 $ snd $ maximumBy (compare `on` fst) [ (solve x, x) | x <- comb4 ] where comb4 = do a <- [1 .. 9] b <- [a + 1 .. 9] c <- [b + 1 .. 9] d <- [c + 1 .. 9] return [a, b, c, d] helper ret [] = ret helper ret (x:xs) = helper (ret * 10 + x) xs
foreverbell/project-euler-solutions
src/93.hs
bsd-3-clause
1,736
0
18
589
907
475
432
43
10
{-# LANGUAGE RankNTypes #-} ----------------------------------------------------------------------------- -- | -- Copyright : (C) 2015 Dimitri Sabadie -- License : BSD3 -- -- Maintainer : Dimitri Sabadie <[email protected]> -- Stability : experimental -- Portability : portable -- ---------------------------------------------------------------------------- module Quaazar.System.Resource ( -- * Resource map & manager ResourceMap(..) , getResourceMap , getSimpleManager , mkResourceManager -- * Re-exported , module X ) where import Control.Lens import Control.Monad.Error.Class as X ( MonadError ) import Control.Monad.Trans as X ( MonadIO(..) ) import Data.IORef import Data.Map as M ( empty ) import Quaazar.System.Loader as X import Quaazar.Utils.Log as X ( Log, MonadLogger ) import Quaazar.Utils.Scoped as X ( MonadScoped ) -- IO resource map with reference semantic. Can be shared between computations. data ResourceMap a = ResourceMap { insertRes :: forall m. (MonadIO m) => String -> a -> m () , lookupRes :: forall m. (MonadIO m) => String -> m (Maybe a) } getResourceMap :: (MonadIO m) => m (ResourceMap a) getResourceMap = do ref <- liftIO $ newIORef empty pure $ ResourceMap (inject_ ref) (retrieve_ ref) where inject_ ref name r = liftIO . modifyIORef ref $ at name .~ Just r retrieve_ ref name = liftIO . fmap (view $ at name) $ readIORef ref mkResourceManager :: (MonadIO m,MonadScoped IO m,MonadLogger m,MonadError Log m) => ((String -> a -> m ()) -> (String -> m (Maybe a)) -> m b) -> m b mkResourceManager builder = do resMap <- getResourceMap builder (insertRes resMap) (lookupRes resMap) -- Simple manager with default implementation for simple managed objects. getSimpleManager :: (MonadIO m,MonadScoped IO m,MonadLogger m,MonadError Log m,Load () a) => m (String -> m a) getSimpleManager = mkResourceManager $ \insert lkp -> pure $ \name -> lkp name >>= \case Just r -> pure r Nothing -> do r <- load_ name insert name r pure r
phaazon/quaazar
src/Quaazar/System/Resource.hs
bsd-3-clause
2,122
0
16
463
604
329
275
-1
-1
module Lib (someFunc ) where import Ranked.Type import Ranked.Infer import State (Infer) uni :: Infer T uni = do a <- newVar 0 b <- newVar 0 c <- newVar 0 unify (TArrow [TConst "int", TConst "bool"] $ TConst "int") (TArrow [a, b] c) return $ TArrow [a, b] c someFunc :: IO () someFunc = do t <- uni print t
zjhmale/HMF
src/Lib.hs
bsd-3-clause
346
0
12
101
163
81
82
16
1
{-# LANGUAGE ViewPatterns #-} -- | Description: Run /Retcon/ as a one-shot command. -- module Retcon.Program.Once ( -- * One-shot `retcon` on documents Request(..) , retconOnce -- * Run store commands , runPSQL ) where import Control.Exception import Control.Monad.IO.Class import Data.Aeson import Retcon.Configuration import Retcon.DataSource import Retcon.Document import Retcon.Identifier import Retcon.Monad import Retcon.Store import Retcon.Store.PostgreSQL -------------------------------------------------------------------------------- -- * Operations on documents data Request = Create { commandKey :: ForeignKey } | Read { commandKey :: ForeignKey } | Update { commandKey :: ForeignKey } | Delete { commandKey :: ForeignKey } deriving (Eq, Show) -- | Run a single command on documents. -- retconOnce :: Request -> Configuration -> RetconMonad () retconOnce req cfg = do let rk = commandKey req ds <- either error return $ getDataSource cfg (fkEntity rk) (fkSource rk) case req of Create fk -> inputDocument fk >>= exec . createDocument ds Read fk -> exec $ readDocument ds fk Update fk -> inputDocument rk >>= exec . updateDocument ds fk Delete fk -> exec $ deleteDocument ds fk where exec :: (MonadIO m, Show a) => DSMonad m a -> m () exec a = do res <- runDSMonad a case res of Left e -> error $ show e Right v -> liftIO $ print v -- | Read JSON from standard input and produce a 'Document'. inputDocument :: MonadIO m => ForeignKey -> m Document inputDocument fk = let e = fkEntity fk s = fkSource fk in return $ Document e s Null -------------------------------------------------------------------------------- -- * Low-level operations on a persistent store. runPSQL :: (PGStore -> IO a) -> Configuration -> IO a runPSQL act (configServer -> (_,_,pg_conn)) = bracket (initBackend (PGOpts pg_conn)) closeBackend act
anchor/retcon
lib/Retcon/Program/Once.hs
bsd-3-clause
2,144
0
13
596
536
279
257
53
5
{-# OPTIONS_GHC -Wall #-} {-# LANGUAGE OverloadedStrings #-} module Reporting.Error ( Error(..) , toString , toStderr ) where import qualified Text.PrettyPrint.ANSI.Leijen as P import qualified Elm.Compiler as Compiler import qualified Elm.Compiler.Module as Module import qualified Elm.Package as Pkg import qualified Elm.Utils as Utils import qualified Reporting.Error.Assets as Asset import qualified Reporting.Error.Bump as Bump import qualified Reporting.Error.Compile as Compile import qualified Reporting.Error.Crawl as Crawl import qualified Reporting.Error.Deps as Deps import qualified Reporting.Error.Diff as Diff import qualified Reporting.Error.Help as Help import qualified Reporting.Error.Http as Http import qualified Reporting.Error.Publish as Publish -- ALL POSSIBLE ERRORS data Error = NoElmJson | Assets Asset.Error | Bump Bump.Error | Compile Compile.Error [Compile.Error] | Crawl Crawl.Error | Cycle [Module.Raw] -- TODO write docs to help with this scenario | Deps Deps.Error | Diff Diff.Error | Publish Publish.Error | BadHttp String Http.Error -- install | NoSolution [Pkg.Name] | CannotMakeNothing -- RENDERERS toString :: Error -> String toString err = Help.toString (Help.reportToDoc (toReport err)) toStderr :: Error -> IO () toStderr err = Help.toStderr (Help.reportToDoc (toReport err)) toReport :: Error -> Help.Report toReport err = case err of NoElmJson -> Help.report "WELCOME" Nothing "It looks like you are trying to start a new Elm project. Very exciting! :D" [ P.fillSep ["I","very","highly","recommend","working","through" ,P.green "<https://guide.elm-lang.org>","which","will","teach","you","the" ,"basics","of","Elm,","including","how","to","start","new","projects." ] , P.fillSep ["For","folks","who","have","already","built","stuff","with","Elm,","the" ,"problem","is","just","that","there","is","no",P.dullyellow "elm.json","yet." ,"If","you","want","to","work","from","an","example,","check","out","the" ,"one","at","<https://github.com/evancz/elm-todomvc/blob/master/elm.json>" ] , Help.reflow "Whatever your scenario, I hope you have a lovely time using Elm!" ] Assets assetError -> Asset.toReport assetError Bump bumpError -> Bump.toReport bumpError Compile e es -> Help.compilerReport $ Compile.toDoc e es Crawl crawlError -> Crawl.toReport crawlError Cycle names -> Help.report "IMPORT CYCLE" Nothing "Your module imports form a cycle:" [ P.indent 4 (Utils.drawCycle names) , Help.reflow $ "Learn more about why this is disallowed and how to break cycles here:" ++ Help.hintLink "import-cycles" ] Deps depsError -> Deps.toReport depsError Diff commandsError -> Diff.toReport commandsError Publish publishError -> Publish.toReport publishError BadHttp url httpError -> Http.toReport url httpError NoSolution badPackages -> case badPackages of [] -> Help.report "UNSOLVABLE DEPENDENCIES" (Just "elm.json") "This usually happens if you try to modify dependency constraints by\ \ hand. I recommend deleting any dependency you added recently (or all\ \ of them if things are bad) and then adding them again with:" [ P.indent 4 $ P.green "elm install" , Help.reflow $ "And do not be afaid to ask for help on Slack if you get stuck!" ] _:_ -> Help.report "OLD DEPENDENCIES" (Just "elm.json") ( "You are using Elm " ++ Pkg.versionToString Compiler.version ++ ", but the following packages have not been updated for this version yet:" ) [ P.vcat $ map (P.red . P.text . Pkg.toString) badPackages , Help.note "Please be kind to the relevant package authors! Having friendly interactions\ \ with users is great motivation, and conversely, getting berated by strangers\ \ on the internet sucks your soul dry. Furthermore, package authors are humans\ \ with families, friends, jobs, vacations, responsibilities, goals, etc. They\ \ face obstacles outside of their technical work you will never know about,\ \ so please assume the best and try to be patient and supportive!" ] CannotMakeNothing -> Help.report "NO INPUT" Nothing "What should I make though? I need more information, like:" [ P.vcat [ P.indent 4 $ P.green "elm make MyThing.elm" , P.indent 4 $ P.green "elm make This.elm That.elm" ] , Help.reflow "However many files you give, I will create one JS file out of them." ]
evancz/builder
src/Reporting/Error.hs
bsd-3-clause
5,003
0
18
1,339
956
540
416
102
13
-- | -- Module : $Header$ -- Copyright : (c) 2013-2015 Galois, Inc. -- License : BSD3 -- Maintainer : [email protected] -- Stability : provisional -- Portability : portable {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE OverloadedStrings #-} module Notebook where import Cryptol.REPL.Monad (REPL(..), runREPL) import qualified Cryptol.REPL.Monad as REPL import qualified Cryptol.Parser as P import qualified Cryptol.Parser.AST as P import Cryptol.Parser.Names (allNamesD, tnamesNT) import Cryptol.Parser.Position (Located(..), emptyRange) import Cryptol.Utils.PP (PP(..), pp, hang, text) import qualified Control.Exception as X import Control.Monad (ap) import Control.Monad.IO.Class (MonadIO(..)) import Data.IORef (IORef, newIORef, readIORef, modifyIORef) import qualified Data.Set as Set import Data.Typeable (Typeable) #if __GLASGOW_HASKELL__ < 710 import Control.Applicative #endif -- Notebook Environment -------------------------------------------------------- -- | All of the top-level declarations along with all of the names -- that they define. We need to associate the names in order to remove -- declarations from the module context when they're overwritten. type NamedDecls = [([P.QName], P.TopDecl)] data RW = RW { eNamedDecls :: NamedDecls } -- | The default environment is simple now but might get more -- complicated, so it's made in IO. defaultRW :: IO RW defaultRW = return RW { eNamedDecls = [] } -- Notebook Monad -------------------------------------------------------------- -- | The Notebook is just a REPL augmented with an incrementally-built module. newtype NB a = NB { unNB :: IORef RW -> REPL a } instance Functor NB where {-# INLINE fmap #-} fmap f m = NB (\ref -> fmap f (unNB m ref)) instance Applicative NB where {-# INLINE pure #-} pure = return {-# INLINE (<*>) #-} (<*>) = ap instance Monad NB where {-# INLINE return #-} return x = NB (\_ -> return x) {-# INLINE (>>=) #-} m >>= f = NB $ \ref -> do x <- unNB m ref unNB (f x) ref -- | Run a NB action with a fresh environment. runNB :: NB a -> IO a runNB m = do ref <- newIORef =<< defaultRW let initialize = liftREPL $ do -- `let` is confusing in notebook context (see #163) REPL.disableLet -- turn of warning noise (#163) REPL.setUser "warnDefaulting" "no" REPL.setUser "warnShadowing" "no" runREPL True $ unNB (initialize >> m) ref -- | Lift a REPL action into the NB monad. liftREPL :: REPL a -> NB a liftREPL m = NB (\_ -> m) instance MonadIO NB where liftIO = io -- Primitives ------------------------------------------------------------------ io :: IO a -> NB a io m = liftREPL (REPL.io m) getRW :: NB RW getRW = NB (\ref -> REPL.io (readIORef ref)) modifyRW_ :: (RW -> RW) -> NB () modifyRW_ f = NB (\ref -> REPL.io (modifyIORef ref f)) getTopDecls :: NB NamedDecls getTopDecls = eNamedDecls `fmap` getRW setTopDecls :: NamedDecls -> NB () setTopDecls nds = modifyRW_ (\rw -> rw { eNamedDecls = nds }) modifyTopDecls :: (NamedDecls -> NamedDecls) -> NB NamedDecls modifyTopDecls f = do nds <- f `fmap` getTopDecls setTopDecls nds return nds -- Exceptions ------------------------------------------------------------------ -- | Notebook exceptions. data NBException = REPLException REPL.REPLException | AutoParseError P.ParseError deriving (Show, Typeable) instance X.Exception NBException instance PP NBException where ppPrec _ nbe = case nbe of REPLException exn -> pp exn AutoParseError exn -> hang (text "[error] Failed to parse cell as a module or as interactive input") 4 (pp exn) -- | Raise an exception raise :: NBException -> NB a raise exn = io (X.throwIO exn) -- | Catch an exception catch :: NB a -> (NBException -> NB a) -> NB a catch m k = NB (\ref -> REPL (\replRef -> unREPL (unNB m ref) replRef `X.catches` -- catch a REPLException or a NBException [ X.Handler $ \e -> unREPL (unNB (k (REPLException e)) ref) replRef , X.Handler $ \e -> unREPL (unNB (k e) ref) replRef ])) -- | Try running a possibly-excepting computation try :: NB a -> NB (Either NBException a) try m = catch (Right `fmap` m) (return . Left) -- | Try running the given action, printing any exceptions that arise. runExns :: NB () -> NB () runExns m = m `catch` \x -> io $ print $ pp x -- Module Manipulation --------------------------------------------------------- nbName :: P.Located P.ModName nbName = Located { srcRange = emptyRange , thing = P.ModName ["Notebook"] } -- | Distill a module into a list of decls along with the names -- defined by those decls. modNamedDecls :: P.Module -> NamedDecls modNamedDecls m = [(tdNames td, td) | td <- P.mDecls m] -- | Build a module of the given name using the given list of -- declarations. moduleFromDecls :: P.Located P.ModName -> NamedDecls -> P.Module moduleFromDecls name nds = P.Module { P.mName = name , P.mImports = [] , P.mDecls = map snd nds } -- | In @updateNamedDecls old new = result@, @result@ is a -- right-biased combination of @old@ and @new@ with the following -- semantics: -- -- If a name @x@ is defined in @old@ and not @new@, or in @new@ and -- not @old@, all declarations of @x@ are in @result@. -- -- If a name @x@ is defined in both @old@ and @new@, /none/ of the -- declarations of @x@ from @old@ are in @result@, and all -- declarations of @x@ from @new@ are in @result@. updateNamedDecls :: NamedDecls -> NamedDecls -> NamedDecls updateNamedDecls old new = filteredOld ++ new where newNames = Set.fromList $ concat $ map fst new containsNewName = any (\x -> Set.member x newNames) filteredOld = filter (\(xs,_) -> not (containsNewName xs)) old -- | The names defined by a top level declaration tdNames :: P.TopDecl -> [P.QName] tdNames (P.Decl d) = map P.thing $ allNamesD $ P.tlValue d tdNames (P.TDNewtype d) = map P.thing $ fst $ tnamesNT $ P.tlValue d tdNames (P.Include _) = [] removeIncludes :: P.Module -> P.Module removeIncludes m = m { P.mDecls = decls' } where decls' = filter (not . isInclude) $ P.mDecls m isInclude (P.Include _) = True isInclude _ = False removeImports :: P.Module -> P.Module removeImports m = m { P.mImports = [] }
ZenDevelopmentSystems/ICryptol
src/Notebook.hs
bsd-3-clause
6,501
0
21
1,494
1,693
922
771
114
2
{-# LINE 1 "Data.Ratio.hs" #-} {-# LANGUAGE Safe #-} ----------------------------------------------------------------------------- -- | -- Module : Data.Ratio -- Copyright : (c) The University of Glasgow 2001 -- License : BSD-style (see the file libraries/base/LICENSE) -- -- Maintainer : [email protected] -- Stability : stable -- Portability : portable -- -- Standard functions on rational numbers -- ----------------------------------------------------------------------------- module Data.Ratio ( Ratio , Rational , (%) , numerator , denominator , approxRational ) where import GHC.Real -- The basic defns for Ratio -- ----------------------------------------------------------------------------- -- approxRational -- | 'approxRational', applied to two real fractional numbers @x@ and @epsilon@, -- returns the simplest rational number within @epsilon@ of @x@. -- A rational number @y@ is said to be /simpler/ than another @y'@ if -- -- * @'abs' ('numerator' y) <= 'abs' ('numerator' y')@, and -- -- * @'denominator' y <= 'denominator' y'@. -- -- Any real interval contains a unique simplest rational; -- in particular, note that @0\/1@ is the simplest rational of all. -- Implementation details: Here, for simplicity, we assume a closed rational -- interval. If such an interval includes at least one whole number, then -- the simplest rational is the absolutely least whole number. Otherwise, -- the bounds are of the form q%1 + r%d and q%1 + r'%d', where abs r < d -- and abs r' < d', and the simplest rational is q%1 + the reciprocal of -- the simplest rational between d'%r' and d%r. approxRational :: (RealFrac a) => a -> a -> Rational approxRational rat eps = simplest (rat-eps) (rat+eps) where simplest x y | y < x = simplest y x | x == y = xr | x > 0 = simplest' n d n' d' | y < 0 = - simplest' (-n') d' (-n) d | otherwise = 0 :% 1 where xr = toRational x n = numerator xr d = denominator xr nd' = toRational y n' = numerator nd' d' = denominator nd' simplest' n d n' d' -- assumes 0 < n%d < n'%d' | r == 0 = q :% 1 | q /= q' = (q+1) :% 1 | otherwise = (q*n''+d'') :% n'' where (q,r) = quotRem n d (q',r') = quotRem n' d' nd'' = simplest' d' r' d r n'' = numerator nd'' d'' = denominator nd''
phischu/fragnix
builtins/base/Data.Ratio.hs
bsd-3-clause
3,051
0
11
1,226
422
232
190
31
1
-- for @ {-# LANGUAGE TypeApplications #-} -- for forall r c capturing with Proxy {-# LANGUAGE ScopedTypeVariables #-} -- lift 2, 3, etc to type level {-# LANGUAGE DataKinds #-} -- for *, + in type sigs {-# LANGUAGE TypeOperators #-} -- for *, + in type sigs {-# LANGUAGE FlexibleContexts #-} -- for (*) which is not injective {-# LANGUAGE UndecidableInstances #-} import Test.Tasty import Test.Tasty.QuickCheck as QC import Test.Tasty.HUnit as HU import Data.List import Data.Ord import Data.Ratio import Symengine as Sym import Symengine.DenseMatrix import Symengine.VecBasic import Symengine.BasicSym import Foreign.C.Types import Prelude hiding (pi) -- TODO: move arbitrary instance _inside_ the library import GHC.TypeLits import Data.Proxy import qualified Data.Vector.Sized as V main = defaultMain tests tests :: TestTree tests = testGroup "Tests" [genBasic, symbolIntRing, denseMatrixRing] -- These are used to check invariants that can be tested by creating -- random members of the type and then checking invariants on them -- properties :: TestTree -- properties = testGroup "Properties" [qcProps] genSafeChar :: Gen Char genSafeChar = elements ['a'..'z'] genSafeString :: Gen String genSafeString = listOf1 genSafeChar instance Arbitrary(BasicSym) where arbitrary = do --intval <- QC.choose (1, 5000) :: Gen (Ratio Integer) let pow2 = 512 intval <- choose (-(2^pow2), 2 ^ pow2 - 1) :: Gen Int strval <- genSafeString :: Gen String choice <- arbitrary :: Gen Bool if choice then return (fromIntegral intval) else return (symbol_new (take 10 strval)) instance forall r c. (KnownNat r, KnownNat c, KnownNat (r * c)) => Arbitrary(DenseMatrix r c) where arbitrary = do let (rows, cols) = (natVal (Proxy @ r), natVal (Proxy @ c)) syms <- V.replicateM arbitrary return (densematrix_new_vec syms) genBasic = testGroup "create and destroy BasicSym" [QC.testProperty "create and die immediately " ((const True) :: BasicSym -> Bool) ] basicTests = testGroup "Basic tests" [ HU.testCase "ascii art" $ do ascii_art <- ascii_art_str HU.assertBool "ASCII art from ascii_art_str is empty" (not . null $ ascii_art) , HU.testCase "Basic Constructors" $ do "0" @?= (show zero) "1" @?= (show one) "-1" @?= (show minus_one) , HU.testCase "Basic Trignometric Functions" $ do let pi_over_3 = pi / 3 :: BasicSym let pi_over_2 = pi / 2 :: BasicSym sin zero @?= zero cos zero @?= one sin (pi / 6) @?= 1 / 2 sin (pi / 3) @?= (3 ** (1/2)) / 2 cos (pi / 6) @?= (3 ** (1/2)) / 2 cos (pi / 3) @?= 1 / 2 sin pi_over_2 @?= one cos pi_over_2 @?= zero , HU.testCase "New Symbols, differentiation" $ do let x = symbol_new "x" let y = symbol_new "y" x - x @?= zero x + y @?= y + x diff (x ** 2 + y) x @?= 2 * x diff (x * y) x @?= y diff (sin x) x @?= cos x diff (cos x) x @?= -(sin x) ] -- tests for vectors vectorTests = testGroup "Vector" [ HU.testCase "Vector - create, push_back, get out value" $ do v <- vecbasic_new vecbasic_push_back v (11 :: BasicSym) vecbasic_push_back v (12 :: BasicSym) vecbasic_get v 0 @?= Right (11 :: BasicSym) vecbasic_get v 1 @?= Right (12 :: BasicSym) vecbasic_get v 101 @?= Left RuntimeError ] -- tests for symbol(ints) symbolIntRing = let plus_commutativity :: BasicSym -> BasicSym -> Bool plus_commutativity b1 b2 = b1 + b2 == b2 + b1 plus_assoc :: BasicSym -> BasicSym -> BasicSym -> Bool plus_assoc b1 b2 b3 = (b1 + b2) + b3 == b1 + (b2 + b3) plus_identity :: BasicSym -> Bool plus_identity b = (b + 0) == b && (0 + b) == b plus_inverse :: BasicSym -> Bool plus_inverse b = (b + (-b)) == 0 && ((-b) + b) == 0 mult_identity :: BasicSym -> Bool mult_identity b = (b * 1) == (1 * b) && (b * 1) == b mult_assoc :: BasicSym -> BasicSym -> BasicSym -> Bool mult_assoc a b c = (a * b) * c == a * (b * c) mult_inverse :: BasicSym -> Bool mult_inverse b = if b == 0 then True else b * (1.0 / b) == 1 && (1.0 / b) * b == 1 mult_commutativity :: BasicSym -> BasicSym -> Bool mult_commutativity b1 b2 = b1 * b2 == b2 * b1 -- symengine (==) is structural equality, not "legit" equality. -- see: https://github.com/symengine/symengine/issues/207 mult_distributivity :: BasicSym -> BasicSym -> BasicSym -> Bool mult_distributivity b1 b2 b3 = expand(b1 * (b2 + b3) - (b1 * b2 + b1 * b3)) == (0 :: BasicSym) in testGroup "Symbols of numbers - Ring" [ QC.testProperty "(+) identity" plus_identity, QC.testProperty "(+) associativity" plus_assoc, QC.testProperty "(+) inverse" plus_inverse, QC.testProperty "(+) commutativity" plus_commutativity, QC.testProperty "(*) identity" mult_identity, QC.testProperty "(*) associativity" mult_assoc, QC.testProperty "(*) inverse" mult_inverse, QC.testProperty "(*) distributivity" mult_distributivity ] denseMatrixRing = let eye :: DenseMatrix 10 10 eye = densematrix_new_eye @ 0 @ 10 @ 10 zero :: DenseMatrix 10 10 zero = densematrix_new_zeros @ 10 @ 10 plus_identity :: DenseMatrix 10 10 -> Bool plus_identity d = densematrix_add d zero == d && densematrix_add zero d == d plus_invert :: DenseMatrix 10 10 -> Bool plus_invert d = d - d == densematrix_new_zeros plus_commutativity :: DenseMatrix 10 10 -> DenseMatrix 10 10 -> Bool plus_commutativity d1 d2 = densematrix_add d1 d2 == densematrix_add d2 d1 plus_assoc :: DenseMatrix 10 10 -> DenseMatrix 10 10 -> DenseMatrix 10 10 -> Bool plus_assoc d1 d2 d3 = densematrix_add (densematrix_add d1 d2) d3 == densematrix_add d1 (densematrix_add d2 d3) mult_identity :: DenseMatrix 10 10 -> Bool mult_identity d = d <> eye == d && eye <> d == d mult_assoc :: DenseMatrix 2 2 -> DenseMatrix 2 2 -> DenseMatrix 2 2 -> Bool mult_assoc d1 d2 d3 = (((d1 <> d2) <> d3) - (d1 <> (d2 <> d3))) == densematrix_new_zeros mult_nonsingular_invertible :: DenseMatrix 10 10 -> Bool mult_nonsingular_invertible d = if expand(det d) /= 0 then d <> (inv d) == eye else True in testGroup "DenseMatrix - Ring" [ QC.testProperty "(+) identity" plus_identity, QC.testProperty "(+) associativity" plus_assoc, QC.testProperty "(+) commutativity" plus_commutativity, QC.testProperty "(*) identity" mult_identity, -- this fails because I need symbol reduction -- QC.testProperty "(*) associativity " mult_assoc -- no idea why this fails -- QC.testProperty "(*) non-singluar invertible" mult_nonsingular_invertible ]
bollu/symengine.hs-1
test/Spec.hs
mit
6,846
0
16
1,713
2,087
1,069
1,018
-1
-1
{-# LANGUAGE TypeOperators #-} module Main where import Data.Array.Repa import DeepLearning.ConvNet import DeepLearning.Util -- |Main main :: IO () main = do (pvol, acts) <- withActivations (testNet testShape 2) (testInput testShape) print (computeS pvol :: Vol DIM1) print acts
silky/deeplearning-hs
Main.hs
mit
317
0
10
77
93
49
44
10
1
{-# LANGUAGE BangPatterns #-} {-# LANGUAGE CPP #-} {-# LANGUAGE EmptyDataDecls #-} {-# LANGUAGE ExplicitForAll #-} -- | This module provides wrappers in 'IO' around the functions from -- "Data.HashTable.Class". -- -- This module exports three concrete hash table types, one for each hash table -- implementation in this package: -- -- > type BasicHashTable k v = IOHashTable (B.HashTable) k v -- > type CuckooHashTable k v = IOHashTable (Cu.HashTable) k v -- > type LinearHashTable k v = IOHashTable (L.HashTable) k v -- -- The 'IOHashTable' type can be thought of as a wrapper around a concrete -- hashtable type, which sets the 'ST' monad state type to 'PrimState' 'IO', -- a.k.a. 'RealWorld': -- -- > type IOHashTable tabletype k v = tabletype (PrimState IO) k v -- -- This module provides 'stToIO' wrappers around the hashtable functions (which -- are in 'ST') to make it convenient to use them in 'IO'. It is intended to be -- imported qualified and used with a user-defined type alias, i.e.: -- -- > import qualified Data.HashTable.IO as H -- > -- > type HashTable k v = H.CuckooHashTable k v -- > -- > foo :: IO (HashTable Int Int) -- > foo = do -- > ht <- H.new -- > H.insert ht 1 1 -- > return ht -- -- Essentially, anywhere you see @'IOHashTable' h k v@ in the type signatures -- below, you can plug in any of @'BasicHashTable' k v@, @'CuckooHashTable' k -- v@, or @'LinearHashTable' k v@. -- module Data.HashTable.Weak.IO ( BasicHashTable , CuckooHashTable -- , LinearHashTable , IOHashTable , new , newSized, newSizedWithMkWeak , insert, insertWeak, insertWithMkWeak , delete , lookup , fromList , fromListWithSizeHint , toList , mapM_, mapWeakM_ , foldM, foldWeakM, foldStopM , computeOverhead, finalize ) where ------------------------------------------------------------------------------ import Control.Monad.Primitive (PrimState) import Control.Monad.ST (stToIO) import Data.Hashable (Hashable) import qualified Data.HashTable.Weak.Class as C import Prelude hiding (lookup, mapM_) import qualified System.Mem.Weak.Exts as Weak import System.Mem.Weak.Exts (MkWeak(..),Weak(..)) ------------------------------------------------------------------------------ import Data.HashTable.Weak.Internal.Utils (unsafeIOToST) import qualified Data.HashTable.Weak.ST.Basic as B import qualified Data.HashTable.Weak.ST.Cuckoo as Cu --import qualified Data.HashTable.ST.Linear as L ------------------------------------------------------------------------------ -- | A type alias for a basic open addressing hash table using linear -- probing. See "Data.HashTable.ST.Basic". type BasicHashTable k v = IOHashTable (B.HashTable) k v -- | A type alias for the cuckoo hash table. See "Data.HashTable.ST.Cuckoo". type CuckooHashTable k v = IOHashTable (Cu.HashTable) k v -- | A type alias for the linear hash table. See "Data.HashTable.ST.Linear". --type LinearHashTable k v = IOHashTable (L.HashTable) k v ------------------------------------------------------------------------------ -- | A type alias for our hash tables, which run in 'ST', to set the state -- token type to 'PrimState' 'IO' (aka 'RealWorld') so that we can use them in -- 'IO'. type IOHashTable tabletype k v = tabletype (PrimState IO) k v ------------------------------------------------------------------------------ -- | See the documentation for this function in "Data.HashTable.Class#v:new". new :: (Eq k,Hashable k,C.HashTable h) => IO (IOHashTable h k v) new = stToIO C.new {-# INLINE new #-} {-# SPECIALIZE INLINE new :: (Eq k,Hashable k) => IO (BasicHashTable k v) #-} --{-# SPECIALIZE INLINE new :: IO (LinearHashTable k v) #-} {-# SPECIALIZE INLINE new :: (Eq k,Hashable k) => IO (CuckooHashTable k v) #-} ------------------------------------------------------------------------------ -- | See the documentation for this function in -- "Data.HashTable.Class#v:newSized". newSized :: (Eq k,Hashable k,C.HashTable h) => Int -> IO (IOHashTable h k v) newSized = stToIO . C.newSized {-# INLINE newSized #-} {-# SPECIALIZE INLINE newSized :: (Eq k,Hashable k) => Int -> IO (BasicHashTable k v) #-} --{-# SPECIALIZE INLINE newSized :: Int -> IO (LinearHashTable k v) #-} {-# SPECIALIZE INLINE newSized :: (Eq k,Hashable k) => Int -> IO (CuckooHashTable k v) #-} newSizedWithMkWeak :: (Eq k,Hashable k,C.HashTable h) => Int -> MkWeak -> IO (IOHashTable h k v) newSizedWithMkWeak mkWeak = stToIO . C.newSizedWithMkWeak mkWeak {-# INLINE newSizedWithMkWeak #-} {-# SPECIALIZE INLINE newSizedWithMkWeak :: (Eq k,Hashable k) => Int -> MkWeak -> IO (BasicHashTable k v) #-} {-# SPECIALIZE INLINE newSizedWithMkWeak :: (Eq k,Hashable k) => Int -> MkWeak -> IO (CuckooHashTable k v) #-} ------------------------------------------------------------------------------ -- | See the documentation for this function in "Data.HashTable.Class#v:insert". insert :: (C.HashTable h, Eq k, Hashable k) => IOHashTable h k v -> k -> v -> IO () insert h k v = stToIO $ C.insert h k v {-# INLINE insert #-} {-# SPECIALIZE INLINE insert :: (Eq k, Hashable k) => BasicHashTable k v -> k -> v -> IO () #-} --{-# SPECIALIZE INLINE insert :: (Eq k, Hashable k) => -- LinearHashTable k v -> k -> v -> IO () #-} {-# SPECIALIZE INLINE insert :: (Eq k, Hashable k) => CuckooHashTable k v -> k -> v -> IO () #-} insertWeak :: (C.HashTable h, Eq k, Hashable k) => IOHashTable h k v -> k -> Weak v -> IO () insertWeak h k w = stToIO $ C.insertWeak h k w {-# INLINE insertWeak #-} {-# SPECIALIZE INLINE insertWeak :: (Eq k, Hashable k) => BasicHashTable k v -> k -> Weak v -> IO () #-} {-# SPECIALIZE INLINE insertWeak :: (Eq k, Hashable k) => CuckooHashTable k v -> k -> Weak v -> IO () #-} insertWithMkWeak :: (C.HashTable h, Eq k, Hashable k) => IOHashTable h k v -> k -> v -> MkWeak -> IO () insertWithMkWeak h k v mkWeak = stToIO $ C.insertWithMkWeak h k v mkWeak {-# INLINE insertWithMkWeak #-} {-# SPECIALIZE INLINE insertWithMkWeak :: (Eq k, Hashable k) => BasicHashTable k v -> k -> v -> MkWeak -> IO () #-} {-# SPECIALIZE INLINE insertWithMkWeak :: (Eq k, Hashable k) => CuckooHashTable k v -> k -> v -> MkWeak -> IO () #-} ------------------------------------------------------------------------------ -- | See the documentation for this function in "Data.HashTable.Class#v:delete". delete :: (C.HashTable h, Eq k, Hashable k) => IOHashTable h k v -> k -> IO () delete h k = stToIO $ C.delete h k {-# INLINE delete #-} {-# SPECIALIZE INLINE delete :: (Eq k, Hashable k) => BasicHashTable k v -> k -> IO () #-} --{-# SPECIALIZE INLINE delete :: (Eq k, Hashable k) => LinearHashTable k v -> k -> IO () #-} {-# SPECIALIZE INLINE delete :: (Eq k, Hashable k) => CuckooHashTable k v -> k -> IO () #-} ------------------------------------------------------------------------------ -- | See the documentation for this function in "Data.HashTable.Class#v:lookup". lookup :: (C.HashTable h, Eq k, Hashable k) => IOHashTable h k v -> k -> IO (Maybe v) lookup h k = stToIO $ C.lookup h k {-# INLINE lookup #-} {-# SPECIALIZE INLINE lookup :: (Eq k, Hashable k) => BasicHashTable k v -> k -> IO (Maybe v) #-} --{-# SPECIALIZE INLINE lookup :: (Eq k, Hashable k) => LinearHashTable k v -> k -> IO (Maybe v) #-} {-# SPECIALIZE INLINE lookup :: (Eq k, Hashable k) => CuckooHashTable k v -> k -> IO (Maybe v) #-} ------------------------------------------------------------------------------ -- | See the documentation for this function in -- "Data.HashTable.Class#v:fromList". fromList :: (C.HashTable h, Eq k, Hashable k) => [(k,v)] -> IO (IOHashTable h k v) fromList = stToIO . C.fromList {-# INLINE fromList #-} {-# SPECIALIZE INLINE fromList :: (Eq k, Hashable k) => [(k,v)] -> IO (BasicHashTable k v) #-} --{-# SPECIALIZE INLINE fromList :: (Eq k, Hashable k) => [(k,v)] -> IO (LinearHashTable k v) #-} {-# SPECIALIZE INLINE fromList :: (Eq k, Hashable k) => [(k,v)] -> IO (CuckooHashTable k v) #-} ------------------------------------------------------------------------------ -- | See the documentation for this function in -- "Data.HashTable.Class#v:fromListWithSizeHint". fromListWithSizeHint :: (C.HashTable h, Eq k, Hashable k) => Int -> [(k,v)] -> IO (IOHashTable h k v) fromListWithSizeHint n = stToIO . C.fromListWithSizeHint n {-# INLINE fromListWithSizeHint #-} {-# SPECIALIZE INLINE fromListWithSizeHint :: (Eq k, Hashable k) => Int -> [(k,v)] -> IO (BasicHashTable k v) #-} --{-# SPECIALIZE INLINE fromListWithSizeHint :: (Eq k, Hashable k) => Int -> [(k,v)] -> IO (LinearHashTable k v) #-} {-# SPECIALIZE INLINE fromListWithSizeHint :: (Eq k, Hashable k) => Int -> [(k,v)] -> IO (CuckooHashTable k v) #-} ------------------------------------------------------------------------------ -- | See the documentation for this function in "Data.HashTable.Class#v:toList". toList :: (C.HashTable h, Eq k, Hashable k) => IOHashTable h k v -> IO [(k,v)] toList = stToIO . C.toList {-# INLINE toList #-} {-# SPECIALIZE INLINE toList :: (Eq k, Hashable k) => BasicHashTable k v -> IO [(k,v)] #-} --{-# SPECIALIZE INLINE toList :: (Eq k, Hashable k) => LinearHashTable k v -> IO [(k,v)] #-} {-# SPECIALIZE INLINE toList :: (Eq k, Hashable k) => CuckooHashTable k v -> IO [(k,v)] #-} ------------------------------------------------------------------------------ -- | See the documentation for this function in "Data.HashTable.Class#v:foldM". foldM :: (C.HashTable h) => (a -> (k,v) -> IO a) -> a -> IOHashTable h k v -> IO a foldM f seed ht = stToIO $ C.foldM f' seed ht where f' !i !t = unsafeIOToST $ f i t {-# INLINE foldM #-} {-# SPECIALIZE INLINE foldM :: (a -> (k,v) -> IO a) -> a -> BasicHashTable k v -> IO a #-} --{-# SPECIALIZE INLINE foldM :: (a -> (k,v) -> IO a) -> a -> LinearHashTable k v -> IO a #-} {-# SPECIALIZE INLINE foldM :: (a -> (k,v) -> IO a) -> a -> CuckooHashTable k v -> IO a #-} foldWeakM :: (C.HashTable h) => (a -> (k,Weak v) -> IO a) -> a -> IOHashTable h k v -> IO a foldWeakM f seed ht = stToIO $ C.foldWeakM f' seed ht where f' !i !t = unsafeIOToST $ f i t {-# INLINE foldWeakM #-} {-# SPECIALIZE INLINE foldWeakM :: (a -> (k,Weak v) -> IO a) -> a -> BasicHashTable k v -> IO a #-} {-# SPECIALIZE INLINE foldWeakM :: (a -> (k,Weak v) -> IO a) -> a -> CuckooHashTable k v -> IO a #-} foldStopM :: (C.HashTable h) => (a -> (k,v) -> IO (Either a a)) -> a -> IOHashTable h k v -> IO a foldStopM f seed ht = stToIO $ C.foldStopM f' seed ht where f' !i !t = unsafeIOToST $ f i t {-# INLINE foldStopM #-} {-# SPECIALIZE INLINE foldStopM :: (a -> (k,v) -> IO (Either a a)) -> a -> BasicHashTable k v -> IO a #-} --{-# SPECIALIZE INLINE foldM :: (a -> (k,v) -> IO a) -> a -> LinearHashTable k v -> IO a #-} {-# SPECIALIZE INLINE foldStopM :: (a -> (k,v) -> IO (Either a a)) -> a -> CuckooHashTable k v -> IO a #-} ------------------------------------------------------------------------------ -- | See the documentation for this function in "Data.HashTable.Class#v:mapM_". mapM_ :: (C.HashTable h) => ((k,v) -> IO a) -> IOHashTable h k v -> IO () mapM_ f ht = stToIO $ C.mapM_ f' ht where f' = unsafeIOToST . f {-# INLINE mapM_ #-} {-# SPECIALIZE INLINE mapM_ :: ((k,v) -> IO a) -> BasicHashTable k v -> IO () #-} --{-# SPECIALIZE INLINE mapM_ :: ((k,v) -> IO a) -> LinearHashTable k v -> IO () #-} {-# SPECIALIZE INLINE mapM_ :: ((k,v) -> IO a) -> CuckooHashTable k v -> IO () #-} mapWeakM_ :: (C.HashTable h) => ((k,Weak v) -> IO a) -> IOHashTable h k v -> IO () mapWeakM_ f ht = stToIO $ C.mapWeakM_ f' ht where f' = unsafeIOToST . f {-# INLINE mapWeakM_ #-} {-# SPECIALIZE INLINE mapWeakM_ :: ((k,Weak v) -> IO a) -> BasicHashTable k v -> IO () #-} {-# SPECIALIZE INLINE mapWeakM_ :: ((k,Weak v) -> IO a) -> CuckooHashTable k v -> IO () #-} ------------------------------------------------------------------------------ -- | See the documentation for this function in -- "Data.HashTable.Class#v:computeOverhead" computeOverhead :: (C.HashTable h) => IOHashTable h k v -> IO Double computeOverhead = stToIO . C.computeOverhead {-# INLINE computeOverhead #-} finalize :: (Eq k,Hashable k,C.HashTable h) => IOHashTable h k v -> IO () finalize = stToIO . C.finalize {-# INLINE finalize #-} {-# SPECIALIZE INLINE finalize :: (Eq k,Hashable k) => BasicHashTable k v -> IO () #-} {-# SPECIALIZE INLINE finalize :: (Eq k,Hashable k) => CuckooHashTable k v -> IO () #-}
cornell-pl/HsAdapton
weak-hashtables/src/Data/HashTable/Weak/IO.hs
bsd-3-clause
12,707
0
12
2,485
1,754
996
758
142
1
import DPH.Testsuite import DPH.Arbitrary.Segd import Data.Array.Parallel.Unlifted as U import Prelude as P $(testcases [ "" <@ [t| ( Bool, Int ) |] , "acc" <@ [t| ( Int ) |] , "num" <@ [t| ( Int ) |] , "ord" <@ [t| ( Bool, Int ) |] , "enum" <@ [t| ( Bool, Int ) |] ] [d| prop_lengthsToSegd :: Array Int -> Bool prop_lengthsToSegd arr = let lens = U.map (`mod` 100) arr segd = lengthsToSegd lens in checkSegd segd lens prop_mkSegd :: Array Int -> Bool prop_mkSegd arr = let lens = U.map (`mod` 100) arr ids = U.scan (+) 0 lens n = U.sum lens segd = mkSegd lens ids n in checkSegd segd lens prop_lengthSegd :: Segd -> Bool prop_lengthSegd segd = lengthSegd segd == U.length (lengthsSegd segd) -- skip: lengthsSegd (redundant) -- skip: indicesSegd (redundant) -- skip: elementsSegd (redundant) -- Adds two segment descriptors segment-wise -- TODO: decide whether we would ever be called with Segds -- with different number of segments in each. prop_plusSegd :: Segd -> Segd -> Property prop_plusSegd segd1 segd2 = let segd = segd1 `plusSegd` segd2 lens1 = lengthsSegd segd1 lens2 = lengthsSegd segd2 lens = U.zipWith (+) lens1 lens2 in U.length lens1 == U.length lens2 ==> checkSegd segd lens |])
mainland/dph
dph-test/test/Unlifted/Segd.hs
bsd-3-clause
1,488
0
9
504
97
67
30
-1
-1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE TemplateHaskell #-} module Ros.Std_msgs.ByteMultiArray where import qualified Prelude as P import Prelude ((.), (+), (*)) import qualified Data.Typeable as T import Control.Applicative import Ros.Internal.RosBinary import Ros.Internal.Msg.MsgInfo import qualified GHC.Generics as G import qualified Data.Default.Generics as D import qualified Data.Vector.Storable as V import qualified Data.Word as Word import qualified Ros.Std_msgs.MultiArrayLayout as MultiArrayLayout import Lens.Family.TH (makeLenses) import Lens.Family (view, set) data ByteMultiArray = ByteMultiArray { _layout :: MultiArrayLayout.MultiArrayLayout , __data :: V.Vector Word.Word8 } deriving (P.Show, P.Eq, P.Ord, T.Typeable, G.Generic) $(makeLenses ''ByteMultiArray) instance RosBinary ByteMultiArray where put obj' = put (_layout obj') *> put (__data obj') get = ByteMultiArray <$> get <*> get instance MsgInfo ByteMultiArray where sourceMD5 _ = "70ea476cbcfd65ac2f68f3cda1e891fe" msgTypeName _ = "std_msgs/ByteMultiArray" instance D.Default ByteMultiArray
acowley/roshask
msgs/Std_msgs/Ros/Std_msgs/ByteMultiArray.hs
bsd-3-clause
1,234
1
10
218
289
175
114
29
0
{-# LANGUAGE FlexibleContexts , FlexibleInstances , MultiParamTypeClasses , RankNTypes , ScopedTypeVariables , TypeOperators , UndecidableInstances #-} {- | Module : StackTransCFJava Description : Translation of FCore to Java with Stack-opt Copyright : (c) 2014—2015 The F2J Project Developers (given in AUTHORS.txt) License : BSD3 Maintainer : Jeremy <[email protected]>, Tomas <[email protected]> Stability : stable Portability : non-portable (MPTC) This module augments the basic translation of FCore to Java to support tail-call elimination. -} module StackTransCFJava where -- TODO: isolate all hardcoded strings to StringPrefixes (e.g. Fun) import qualified Language.Java.Syntax as J import Data.Maybe (fromJust) import BaseTransCFJava import ClosureF import Inheritance import JavaEDSL import MonadLib import StringPrefixes data TranslateStack m = TS { toTS :: Translate m -- supertype is a subtype of Translate (later on at least) } instance {-(r :< Translate m) =>-} (:<) (TranslateStack m) (Translate m) where up = up . toTS instance (:<) (TranslateStack m) (TranslateStack m) where -- reflexivity up = id nextClass ::(Monad m) => Translate m -> m String nextClass this = liftM2 (++) (getPrefix this) (return "Next") whileApplyLoop :: (Monad m) => Translate m -> Bool -> String -> String -> J.Type -> J.Type -> m [J.BlockStmt] whileApplyLoop this flag ctemp tempOut outType ctempCastTyp = do closureClass <- liftM2 (++) (getPrefix this) (return "Closure") let closureType' = classTy closureClass nextName <- nextClass (up this) let doWhileStmts = [localVar closureType' (varDeclNoInit ctemp), localVar outType (varDecl tempOut (case outType of J.PrimType J.IntT -> J.Lit (J.Int 0) -- TODO: better choice? _ -> J.Lit J.Null)), bStmt (J.Do (J.StmtBlock (block [bsAssign (name [ctemp]) (J.ExpName $ name [nextName, "next"]) ,bsAssign (name [nextName, "next"]) (J.Lit J.Null) ,bStmt (methodCall [ctemp, "apply"] [])])) (J.BinOp (J.ExpName $ name [nextName, "next"]) J.NotEq (J.Lit J.Null))), bsAssign (name [tempOut]) (cast outType (J.FieldAccess (fieldAccExp (cast ctempCastTyp (left $ var ctemp)) closureOutput)))] if flag -- False means stack with apply then return doWhileStmts else return (let (l1,l2) = splitAt 2 doWhileStmts in head l1:l2) whileApplyLoopMain :: (Monad m) => Translate m -> String -> String -> J.Type -> J.Type -> m [J.BlockStmt] whileApplyLoopMain this ctemp tempOut outType ctempCastTyp = do closureClass <- liftM2 (++) (getPrefix this) (return "Closure") let closureType' = classTy closureClass nextName <- nextClass (up this) let nextNEqNull = J.BinOp (J.ExpName $ name [nextName,"next"]) J.NotEq (J.Lit J.Null) let loop = [bStmt (J.Do (J.StmtBlock (block [bsAssign (name [ctemp]) (J.ExpName $ name [nextName,"next"]) ,bsAssign (name [nextName,"next"]) (J.Lit J.Null) ,bStmt (methodCall [ctemp,"apply"] [])])) nextNEqNull) ,bsAssign (name [tempOut]) (cast outType (J.FieldAccess (fieldAccExp (cast ctempCastTyp (left $ var ctemp)) closureOutput)))] return [localVar closureType' (varDeclNoInit ctemp) ,localVar outType (varDecl tempOut (J.MethodInv (J.MethodCall (name ["apply"]) []))) ,bStmt (J.IfThen nextNEqNull (J.StmtBlock (block loop)))] containsNext :: [J.BlockStmt] -> Bool containsNext = foldr ((||) . (\x -> case x of (J.BlockStmt (J.ExpStmt (J.Assign (J.NameLhs (J.Name [J.Ident _nextClass,J.Ident "next"])) J.EqualA _))) -> True _ -> False)) False -- ad-hoc fix for final-returned expressions in Stack translation empyClosure :: Monad m => Translate m -> J.Exp -> String -> m J.BlockStmt empyClosure this outExp box = do closureClass <- liftM (++ box) $ liftM2 (++) (getPrefix this) (return "Closure") nextName <- nextClass (up this) return (bsAssign (name [nextName,"next"]) (J.InstanceCreation [] (classTyp closureClass) [] (Just (classBody [memberDecl (methodDecl [annotation "Override",J.Public] (Just (classTy closureClass)) "clone" [] returnNull) ,memberDecl (methodDecl [annotation "Override",J.Public] Nothing "apply" [] (Just (block [bsAssign (name [closureOutput]) outExp])))])))) whileApply :: (Monad m) => Translate m -> Bool -> J.Exp -> String -> String -> J.Type -> J.Type -> m [J.BlockStmt] whileApply this flag cl ctemp tempOut outType ctempCastTyp = do loop <- whileApplyLoop this flag ctemp tempOut outType ctempCastTyp nextName <- nextClass (up this) return (bsAssign (name [nextName,"next"]) cl : loop) --e.g. Next.next = x8; nextApply :: (Monad m) => Translate m -> J.Exp -> String -> J.Type -> m [J.BlockStmt] nextApply this cl tempOut outType = do nextName <- nextClass this return [bsAssign (name [nextName,"next"]) cl ,localVar outType (varDecl tempOut (case outType of J.PrimType J.IntT -> J.Lit (J.Int 0) J.PrimType J.CharT -> J.Lit (J.Char 'a') --TODO: better default value? _ -> J.Lit J.Null))] transS :: forall m selfType. (MonadState Int m, MonadReader Bool m, selfType :< TranslateStack m, selfType :< Translate m) => Mixin selfType (Translate m) (TranslateStack m) transS this super = TS {toTS = super { translateM = \e -> case e of -- count abstraction as in tail position Lam _ _ -> local (True ||) $ translateM super e Fix{} -> local (True ||) $ translateM super e -- type application just inherits existing flag TApp _ _ -> translateM super e -- if e1 e2 e3: e1 can't be in tail position, e2 and e3 inherits flag If e1 e2 e3 -> translateIf (up this) (local (False &&) $ translateM (up this) e1) (translateM (up this) e2) (translateM (up this) e3) App e1 e2 -> translateApply (up this) False (local (False &&) $ translateM (up this) e1) (local (False &&) $ translateM (up this) e2) -- let e1 e2: e1 can't be in tail position, e2 inherits flag Let _ expr body -> do (s1,j1,t1) <- local (False &&) $ translateM (up this) expr translateLet (up this) (s1,j1,t1) body -- case scrutinee can't be in tail position, alts inherit flag Case scrut alts -> do (scrutStmts, scrutExpr, _) <- local (False &&) $ translateM (up this) scrut (altsStmts, varName, typ) <- transAlts (up this) scrutExpr alts return (scrutStmts ++ altsStmts, varName, typ) -- the last expression in the sequence inherit flag SeqExprs es -> do let allButLast = init es let lastExpr = last es emost <- mapM (local (False &&) . translateM (up this)) allButLast (eLastStmt, eLastExpr, eLastType) <- translateM (up this) lastExpr let statements = concatMap (\(x,_,_) -> x) emost return (statements ++ eLastStmt, eLastExpr, eLastType) -- count other expressions as not in tail position _ -> local (False &&) $ translateM super e ,genApply = \f _ x jType ctempCastTyp -> do (tailPosition :: Bool) <- ask (n :: Int) <- get put (n + 1) flag <- withApply (up this) -- False means Stack with Apply if tailPosition then nextApply (up this) f x jType else whileApply (up this) flag f ("c" ++ show n) x jType ctempCastTyp ,genRes = \_ _ -> return [] ,stackMainBody = \t -> do closureClass <- liftM2 (++) (getPrefix (up this)) (return "Closure") retType <- applyRetType (up this) t loop <- whileApplyLoopMain (up this) "c" "result" (fromJust retType) (classTy closureClass) return (loop ++ [bStmt (classMethodCall (left $ var "System.out") "println" [left $ var "result"])]) ,applyRetType = \t -> (case t of JClass "java.lang.Integer" -> return $ Just $ classTy "java.lang.Integer" JClass "java.lang.Boolean" -> return $ Just $ classTy "java.lang.Boolean" CFInt -> return $ Just $ classTy "java.lang.Integer" _ -> return $ Just objClassTy) ,createWrap = \nam expr -> do (bs,e,t) <- translateM (up this) expr returnType <- applyRetType (up this) t let returnStmt = [bStmt $ J.Return $ Just (unwrap e)] -- box <- getBox (up this) t -- empyClosure' <- empyClosure (up this) (unwrap e) box mainbody <- stackMainBody (up this) t let stackDecl = wrapperClass False nam (bs ++ -- (if containsNext bs then [] else [empyClosure']) ++ returnStmt) returnType (Just $ J.Block mainbody) return (createCUB Nothing [stackDecl] ,t)}} -- Alternative version of transS that interacts with the Apply translation transSA :: (MonadState Int m, MonadReader Bool m, selfType :< TranslateStack m, selfType :< Translate m) => Mixin selfType (Translate m) (TranslateStack m) transSA this super = TS {toTS = (up (transS this super)) { withApply = return False }} -- Alternative version of transS that interacts with the Unbox translation -- transSU :: (MonadState Int m, MonadReader (Int, Bool) m, selfType :< TranslateStack m, selfType :< Translate m) => Mixin selfType (Translate m) (TranslateStack m) -- transSU this super = -- TS {toTS = (up (transS this super)) { -- getBox = \t -> case t of -- CFInt -> return "BoxInt" -- _ -> return "BoxBox", -- applyRetType = \t -> (case t of -- JClass "java.lang.Integer" -> return $ Just $ J.PrimType J.IntT -- JClass "java.lang.Boolean" -> return $ Just $ J.PrimType J.BooleanT -- CFInt -> return $ Just $ J.PrimType J.IntT -- _ -> return $ Just objClassTy), -- stackMainBody = \t -> do -- closureClass <- liftM2 (++) (getPrefix (up this)) (return "Closure") -- let closureType' = classTy closureClass -- nextName <- nextClass (up this) -- let finalType = case t of -- CFInt -> "Int" -- _ -> "Box" -- let resultType = case t of -- CFInt -> J.PrimType J.IntT -- CFChar -> J.PrimType J.CharT -- JClass "java.lang.Integer" -> classTy "java.lang.Integer" -- _ -> objClassTy -- let nextNEqNull = J.BinOp (J.ExpName $ name [nextName, "next"]) -- J.NotEq -- (J.Lit J.Null) -- let loop = [bStmt (J.Do (J.StmtBlock (block [assign (name ["c"]) (J.ExpName $ name [nextName, "next"]) -- ,assign (name [nextName, "next"]) (J.Lit J.Null) -- ,bStmt (methodCall ["c", "apply"] [])])) -- (J.BinOp (J.ExpName $ name [nextName, "next"]) -- J.NotEq -- (J.Lit J.Null))), -- bStmt (J.IfThenElse -- (J.InstanceOf (left $ var "c") (J.ClassRefType $ classTyp (closureClass ++ "Int" ++ finalType))) -- (assignE -- (name ["result"]) -- (cast resultType -- (J.FieldAccess (fieldAccExp -- (cast (classTy (closureClass ++ "Int" ++ finalType)) (left $ var "c")) -- closureOutput)))) -- (assignE -- (name ["result"]) -- (cast resultType -- (J.FieldAccess (fieldAccExp -- (cast (classTy (closureClass ++ "Box" ++ finalType)) (left $ var "c")) -- closureOutput)))))] -- return (localVar closureType' (varDeclNoInit "c") : -- localVar resultType (varDecl "result" (J.MethodInv (J.MethodCall (name ["apply"]) []))) : -- bStmt (J.IfThen nextNEqNull (J.StmtBlock (block loop))) : -- [bStmt (classMethodCall (left $ var "System.out") "println" [left $ var "result"])]) -- }} -- Alternative version of transS that interacts with the Unbox and Apply translation -- transSAU :: (MonadState Int m, MonadReader (Int, Bool) m, selfType :< TranslateStack m, selfType :< Translate m) => Mixin selfType (Translate m) (TranslateStack m) -- transSAU this super = TS {toTS = (up (transSU this super)) { -- -- genRes = \t s -> if (last t) then return [] else genRes super t s -- genApply = \f _ x jType ctempCastTyp -> -- do (_ :: Int, tailPosition :: Bool) <- ask -- (n :: Int) <- get -- put (n+1) -- if tailPosition -- then nextApply (up this) f x jType -- else whileApply (up this) False f ("c" ++ show n) x jType ctempCastTyp -- }}
bixuanzju/fcore
lib/StackTransCFJava.hs
bsd-2-clause
15,623
1
29
6,285
3,242
1,680
1,562
182
13
{-# LANGUAGE RecordWildCards, TupleSections, ViewPatterns, RankNTypes, TypeOperators, TypeFamilies, ExistentialQuantification #-} {-# LANGUAGE GeneralizedNewtypeDeriving, FlexibleInstances, FlexibleContexts, ScopedTypeVariables #-} module General.Database( Pred, (%==), (%==%), (%>), (%<), (%/=), (%&&), nullP, likeP, orderDesc, orderAsc, distinct, limit, Upd(..), TypeField(..), Table, table, Column, column, rowid, norowid, sqlInsert, sqlUpdate, sqlSelect, sqlDelete, sqlEnsureTable, sqlUnsafe ) where import Data.List.Extra import Data.String import Data.Maybe import Data.Time.Clock import Data.Tuple.Extra import Database.SQLite.Simple hiding ((:=)) import Database.SQLite.Simple.FromField import Database.SQLite.Simple.ToField type family Uncolumns cs type instance Uncolumns () = () type instance Uncolumns (Column a) = Only a type instance Uncolumns (Only (Column a)) = Only a type instance Uncolumns (Column a, Column b) = (a, b) type instance Uncolumns (Column a, Column b, Column c) = (a, b, c) type instance Uncolumns (Column a, Column b, Column c, Column d) = (a, b, c, d) type instance Uncolumns (Column a, Column b, Column c, Column d, Column e) = (a, b, c, d, e) type instance Uncolumns (Column a, Column b, Column c, Column d, Column e, Column f) = (a, b, c, d, e, f) type instance Uncolumns (Column a, Column b, Column c, Column d, Column e, Column f, Column g) = (a, b, c, d, e, f, g) type instance Uncolumns (Column a, Column b, Column c, Column d, Column e, Column f, Column g, Column h) = (a, b, c, d, e, f, g, h) type instance Uncolumns (Column a, Column b, Column c, Column d, Column e, Column f, Column g, Column h, Column i) = (a, b, c, d, e, f, g, h, i) data Table rowid cs = Table {tblName :: String, tblKeys :: [Column_], tblCols :: [Column_]} data Column c = Column {colTable :: String, colName :: String, colSqlType :: String} deriving (Eq,Show) type Column_ = Column () column_ :: Column c -> Column_ column_ Column{..} = Column{..} class TypeField field where typeField :: field -> String instance TypeField String where typeField _ = "TEXT NOT NULL" instance TypeField Int where typeField _ = "INTEGER NOT NULL" instance TypeField Double where typeField _ = "REAL NOT NULL" instance TypeField UTCTime where typeField _ = "TEXT NOT NULL" instance TypeField Bool where typeField _ = "INTEGER NOT NULL" instance TypeField a => TypeField (Maybe a) where typeField x | Just s <- stripSuffix " NOT NULL" s = s | otherwise = error "Can't remove the NULL constraint" where s = typeField $ fromJust x class Columns cs where columns :: cs -> [Column_] instance Columns () where columns () = [] instance Columns (Column c1) where columns c1 = [column_ c1] instance Columns (Only (Column c1)) where columns (Only c1) = [column_ c1] instance Columns (Column c1, Column c2) where columns (c1, c2) = [column_ c1, column_ c2] instance Columns (Column c1, Column c2, Column c3) where columns (c1, c2, c3) = [column_ c1, column_ c2, column_ c3] instance Columns (Column c1, Column c2, Column c3, Column c4) where columns (c1, c2, c3, c4) = [column_ c1, column_ c2, column_ c3, column_ c4] instance Columns (Column c1, Column c2, Column c3, Column c4, Column c5) where columns (c1, c2, c3, c4, c5) = [column_ c1, column_ c2, column_ c3, column_ c4, column_ c5] instance Columns (Column c1, Column c2, Column c3, Column c4, Column c5, Column c6) where columns (c1, c2, c3, c4, c5, c6) = [column_ c1, column_ c2, column_ c3, column_ c4, column_ c5, column_ c6] instance Columns (Column c1, Column c2, Column c3, Column c4, Column c5, Column c6, Column c7) where columns (c1, c2, c3, c4, c5, c6, c7) = [column_ c1, column_ c2, column_ c3, column_ c4, column_ c5, column_ c6, column_ c7] instance Columns (Column c1, Column c2, Column c3, Column c4, Column c5, Column c6, Column c7, Column c8) where columns (c1, c2, c3, c4, c5, c6, c7, c8) = [column_ c1, column_ c2, column_ c3, column_ c4, column_ c5, column_ c6, column_ c7, column_ c8] instance Columns (Column c1, Column c2, Column c3, Column c4, Column c5, Column c6, Column c7, Column c8, Column c9) where columns (c1, c2, c3, c4, c5, c6, c7, c8, c9) = [column_ c1, column_ c2, column_ c3, column_ c4, column_ c5, column_ c6, column_ c7, column_ c8, column_ c9] table :: (Columns keys, Columns cols) => String -> Column rowid -> keys -> cols -> Table rowid (Uncolumns cols) -- important to produce name before looking at columns table name rowid (columns -> keys) (columns -> cols) = Table name (check keys) (check cols) where check x | nubOrd (map colTable $ keys ++ cols) /= [name] = error "Column with the wrong table" | not $ null $ map colName keys \\ map colName cols = error "Key column which is not one of the normal columns" | colName rowid `notElem` ["","rowid"] = error "Rowid column must have name rowid" | otherwise = x column :: forall c rowid cs . TypeField c => Table rowid cs -> String -> Column c column tbl row = Column (tblName tbl) row (typeField (undefined :: c)) rowid :: Table rowid cs -> Column rowid rowid tbl = Column (tblName tbl) "rowid" "" norowid :: Column () norowid = Column "" "" "" sqlInsert :: (ToRow cs, FromField rowid) => Connection -> Table rowid cs -> cs -> IO rowid sqlInsert conn tbl val = do let vs = toRow val -- FIXME: Should combine the last_insert_rowid with the INSERT INTO let str = "INSERT INTO " ++ tblName tbl ++ " VALUES (" ++ intercalate "," (replicate (length vs) "?") ++ ")" execute conn (fromString str) vs [Only row] <- query_ conn (fromString "SELECT last_insert_rowid()") return row sqlUpdate :: Connection -> [Upd] -> [Pred] -> IO () sqlUpdate conn upd pred = do let (updCs, updVs) = unzip $ map unupdate upd let (prdStr, _, prdCs, prdVs) = unpred pred let tbl = nubOrd $ map colTable $ updCs ++ prdCs case tbl of _ | null upd -> fail "Must update at least one column" [t] -> do let str = "UPDATE " ++ t ++ " SET " ++ intercalate ", " (map ((++ "=?") . colName) updCs) ++ " WHERE " ++ prdStr execute conn (fromString str) (updVs ++ prdVs) _ -> fail "Must update all in the same column" sqlDelete :: Connection -> Table rowid cs -> [Pred] -> IO () sqlDelete conn tbl pred = do let (prdStr, _, prdCs, prdVs) = unpred pred case nubOrd $ tblName tbl : map colTable prdCs of [t] -> do let str = "DELETE FROM " ++ t ++ " WHERE " ++ prdStr execute conn (fromString str) prdVs ts -> fail $ "sqlDelete, can only delete from one table but you are touching: " ++ unwords ts sqlSelect :: (FromRow (Uncolumns cs), Columns cs) => Connection -> cs -> [Pred] -> IO [Uncolumns cs] sqlSelect conn cols pred = do let outCs = columns cols let (prdStr, prdDs, prdCs, prdVs) = unpred pred let str = "SELECT " ++ intercalate ", " [(if c `elem` prdDs then "DISTINCT " else "") ++ colTable ++ "." ++ colName | c@Column{..} <- outCs] ++ " " ++ "FROM " ++ intercalate ", " (nubOrd $ map colTable $ outCs ++ prdCs) ++ " WHERE " ++ prdStr query conn (fromString str) prdVs sqlEnsureTable :: Connection -> Table rowid cs -> IO () sqlEnsureTable conn Table{..} = do let fields = intercalate ", " $ [colName ++ " " ++ colSqlType | Column{..} <- tblCols] ++ ["PRIMARY KEY (" ++ intercalate ", " (map colName tblKeys) ++ ")" | not $ null tblKeys] let str = "CREATE TABLE " ++ tblName ++ " (" ++ fields ++ ")" existing <- query conn (fromString "SELECT sql FROM sqlite_master WHERE type = ? AND name = ?") ("table", tblName) case existing of [Only s] | str == s -> return () [] -> execute_ conn $ fromString str _ -> error $ "Trying to ensure table " ++ tblName ++ " but mismatch" ++ "\nCreating:\n" ++ str ++ "\nGot:\n" ++ unlines (map fromOnly existing) sqlUnsafe :: (ToRow q, FromRow r) => Connection -> String -> q -> IO [r] sqlUnsafe conn str q = query conn (fromString str) q data Upd = forall a . ToField a => Column a := a unupdate :: Upd -> (Column_, SQLData) unupdate (c := v) = (column_ c, toField v) data Pred = PNull Column_ | PNotNull Column_ | PEq Column_ SQLData | PNEq Column_ SQLData | PGt Column_ SQLData | PLt Column_ SQLData | PEqP Column_ Column_ | PLike Column_ SQLData | PAnd [Pred] | PDistinct Column_ | POrder Column_ String | PLimit Int distinct :: Column c -> Pred distinct c = PDistinct (column_ c) limit :: Int -> Pred limit = PLimit orderDesc :: Column UTCTime -> Pred orderDesc c = POrder (column_ c) $ colTable c ++ "." ++ colName c ++ " DESC" orderAsc :: Column UTCTime -> Pred orderAsc c = POrder (column_ c) $ colTable c ++ "." ++ colName c ++ " ASC" nullP :: Column (Maybe c) -> Pred nullP c = PNull (column_ c) likeP :: ToField c => Column c -> c -> Pred likeP (column_ -> c) (toField -> v) = PLike c v (%&&) :: Pred -> Pred -> Pred (%&&) a b = PAnd [a,b] (%==) :: ToField c => Column c -> c -> Pred (%==) (column_ -> c) (toField -> v) | v == SQLNull = PNull c | otherwise = PEq c v (%>) :: ToField c => Column c -> c -> Pred (%>) (column_ -> c) (toField -> v) | v == SQLNull = error $ "Can't %> on a NULL" | otherwise = PGt c v (%<) :: ToField c => Column c -> c -> Pred (%<) (column_ -> c) (toField -> v) | v == SQLNull = error $ "Can't %> on a NULL" | otherwise = PLt c v (%/=) :: ToField c => Column c -> c -> Pred (%/=) (column_ -> c) (toField -> v) | v == SQLNull = PNotNull c | otherwise = PNEq c v (%==%) :: ToField c => Column c -> Column c -> Pred (%==%) c1 c2 | isNull c1 || isNull c2 = error $ show ("Column must be NOT NULL to do %==%", show c1, show c2) | otherwise = PEqP (column_ c1) (column_ c2) where isNull c = not $ colSqlType c == "" || " NOT NULL" `isSuffixOf` colSqlType c unpred :: [Pred] -> (String, [Column_], [Column_], [SQLData]) unpred ps = let (a,b,c) = f $ PAnd pred in (a ++ (if null order then "" else " ORDER BY " ++ unwords [x | POrder _ x <- order]) ++ (if null limit then "" else " LIMIT " ++ head [show x | PLimit x <- limit]) , [x | PDistinct x <- dist], b ++ [x | POrder x _ <- order], c) where isDistinct PDistinct{} = True; isDistinct _ = False isOrder POrder{} = True; isOrder _ = False isLimit PLimit{} = True; isLimit _ = False (dist, (order, (limit, pred))) = second (second (partition isLimit) . partition isOrder) $ partition isDistinct ps g Column{..} = colTable ++ "." ++ colName f (PNull c) = (g c ++ " IS NULL", [c], []) f (PNotNull c) = (g c ++ " IS NOT NULL", [c], []) f (PEq c v) = (g c ++ " == ?", [c], [v]) -- IS always works, but is a LOT slower f (PNEq c v) = (g c ++ " != ?", [c], [v]) -- IS always works, but is a LOT slower f (PGt c v) = (g c ++ " > ?", [c], [v]) -- IS always works, but is a LOT slower f (PLt c v) = (g c ++ " < ?", [c], [v]) -- IS always works, but is a LOT slower f (PEqP c1 c2) = (g c1 ++ " = " ++ g c2, [c1,c2], []) f (PLike c v) = (g c ++ " LIKE ?", [c], [v]) f (PAnd []) = ("NULL IS NULL", [], []) f (PAnd [x]) = f x f (PAnd xs) = (intercalate " AND " ["(" ++ s ++ ")" | s <- ss], concat cs, concat vs) where (ss,cs,vs) = unzip3 $ map f xs f _ = error "Unrecognised Pred" instance FromField () where fromField _ = return ()
Pitometsu/bake
src/General/Database.hs
bsd-3-clause
11,552
0
23
2,783
5,015
2,624
2,391
193
17
{-#LANGUAGE ParallelListComp, RecordWildCards, DeriveGeneric#-} module Main where import CV.Image import CV.Tracking import CV.Bindings.Types import CV.Drawing import CV.ImageOp import Utils.Rectangle import Text.Printf import CV.ImageMathOp hiding ((#>)) import CV.Filters import CV.ColourUtils import Utils.Rectangle import Data.List import Data.Function import Graphics.Tools.DefaultGUI data Parameters = P {x :: IntRange Zero Hundred ,y :: IntRange Zero Hundred ,scale :: IntRange One Fifty } deriving (Generic) instance Default Parameters instance Persist Parameters instance Tangible Parameters main = defaultGUI "Treetop_params" "dsm.tif" f f P{..} img = resultI where prec :: Image GrayScale D32 prec = gaussian (11,11) img (w,h) = getSize img resultPts = [ (val,res) | x <- [0,15..w-10], y <- [0,15..h-10] , let (val,res) = hillClimber (indfun prec) (-1,(x,y)) ] groups = group2 (\a b -> abs (a-b) < value scale) (snd.snd) (fst.snd) $ resultPts resultI = img-- <## [ lineOp 0.8 1 s (res) #> -- circleOp 2 (res) 1 Filled -- | (s,res) <- resultPts -- ] <## [circleOp 0 m 10 (Stroked 1) | rs <- groups, let (v,m) = maximumBy (compare`on`fst) rs] -- (val,res) = -- meanShift prec s1 (EPS 1) group2 equal a b = concatMap (f a) . f b where f a = groupBy (equal `on` a) . sortBy (compare `on` a) indfun :: Image GrayScale D32 -> ((Int,Int) -> D32) indfun img c@(x,y) | x<0 = 0 | y<0 = 0 | x>=w = 0 | y>=h = 0 | otherwise = getPixel c img where (w,h) = getSize img hillClimber :: ((Int,Int) -> D32) -> (D32, (Int,Int)) -> (D32, (Int,Int)) hillClimber f (o,init) | (o+0.001) >= v = (o,init) | otherwise = hillClimber f (v,next) where (v,next) = maximumBy (compare`on` fst) $ [(f n, n) | n <- neighbours init ] neighbours (x,y) = [(x+i,y+j) | i<-[-5,-4..5], j <- [-5,-4..5], (i,j) /= (0,0)]
BeautifulDestinations/CV
examples/TreeTop.hs
bsd-3-clause
2,158
0
15
664
891
489
402
47
1
-- A simple program to demonstrate Gtk2Hs. module Main (Main.main) where import Graphics.UI.Gtk main :: IO () main = do initGUI -- Create a new window window <- windowNew -- Here we connect the "destroy" event to a signal handler. -- This event occurs when we call widgetDestroy on the window -- or if the user closes the window. on window objectDestroy mainQuit -- Sets the border width and tile of the window. Note that border width -- attribute is in 'Container' from which 'Window' is derived. set window [ containerBorderWidth := 10, windowTitle := "Hello World" ] -- Creates a new button with the label "Hello World". button <- buttonNew set button [ buttonLabel := "Hello World" ] -- When the button receives the "clicked" signal, it will call the -- function given as the second argument. on button buttonActivated (putStrLn "Hello World") -- Gtk+ allows several callbacks for the same event. -- This one will cause the window to be destroyed by calling -- widgetDestroy. The callbacks are called in the sequence they were added. on button buttonActivated $ do putStrLn "A \"clicked\"-handler to say \"destroy\"" widgetDestroy window -- Insert the hello-world button into the window. set window [ containerChild := button ] -- The final step is to display this newly created widget. Note that this -- also allocates the right amount of space to the windows and the button. widgetShowAll window -- All Gtk+ applications must have a main loop. Control ends here -- and waits for an event to occur (like a key press or mouse event). -- This function returns if the program should finish. mainGUI
k0001/gtk2hs
gtk/demo/hello/World.hs
gpl-3.0
1,669
0
10
340
179
93
86
17
1
module Eval where import qualified Data.Set as S {-@ measure keys @-} keys :: (Ord k) => [(k, v)] -> S.Set k keys [] = S.empty keys (kv:kvs) = (S.singleton (myfst kv)) `S.union` (keys kvs) {-@ measure myfst @-} myfst :: (a, b) -> a myfst (x, _) = x -- this is fine {-@ measure okeys :: [(a, b)] -> (S.Set a) okeys ([]) = (Set_empty 0) okeys (kv:kvs) = (Set_cup (Set_sng (fst kv)) (okeys kvs)) @-}
abakst/liquidhaskell
tests/pos/Keys.hs
bsd-3-clause
423
0
9
104
132
77
55
7
1
main = case 1 > 10 of True -> do putStrLn "hello" putStrLn "there" False -> do putStrLn "blah" putStrLn "blah"
mpickering/ghc-exactprint
tests/examples/ghc710/FooExpected.hs
bsd-3-clause
151
0
10
61
50
21
29
6
2
import System.FilePath.Glob import Test.DocTest main :: IO () main = glob "src/**/*.hs" >>= doctest
angerman/data-bitcode-edsl
test/Doctest.hs
bsd-3-clause
101
0
6
15
34
18
16
4
1
module T9681 where foo = 1 + "\n"
ghc-android/ghc
libraries/base/tests/T9681.hs
bsd-3-clause
35
0
5
9
13
8
5
2
1
-- From the blog post Fun With XPolyKinds : Polykinded Folds -- http://www.typesandotherdistractions.com/2012/02/fun-with-xpolykinds-polykinded-folds.html {- In the following, I will write a polykinded version of the combinators fold and unfold, along with three examples: folds for regular datatypes (specialized to kind *), folds for nested datatypes (specialized to kind * -> *), and folds for mutually recursive data types (specialized to the product kind (*,*)). The approach should generalise easily enough to things such as types indexed by another kind (e.g. by specializing to kind Nat -> *, using the XDataKinds extension), or higher order nested datatypes (e.g. by specializing to kind (* -> *) -> (* -> *)). The following will compile in the new GHC 7.4.1 release. We require the following GHC extensions: -} {-# LANGUAGE GADTs #-} {-# LANGUAGE PolyKinds #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE StandaloneDeriving #-} module Main where {- The basic fold and unfold combinators can be written as follows: fold phi = phi . fmap (fold phi) . out unfold psi = in . fmap (unfold psi) . psi The idea now is to generalize these combinators by working over different categories. We can capture the basic operations in a category with a typeclass: -} class Category hom where ident :: hom a a compose :: hom a b -> hom b c -> hom a c {- A category has two operations: an identity morphism for every object, and for every two compatible morphisms, the composition of those morphisms. In earlier versions of GHC, the type hom would have been specialized to kind * -> * -> *, but with the new PolyKinds extension, hom is polykinded, and the Category typeclass can be instantiated to k -> k -> * for any kind k. This means that in addition to all of the Category instances that we could have written before, we can now write instances of Category for type constructors, type constructor constructors, etc. Here is the instance for the category Hask of Haskell types. Objects are Haskell types and morphisms are functions between types. The identity is the regular polymorphic identity function id, and composition is given by the (flipped) composition operator (.) -} instance Category (->) where ident = id compose = flip (.) {- Another example is the category of type constructors and natural transformations. A natural transformation is defined as follows: -} newtype Nat f g = Nat { nu :: (forall a. f a -> g a) } {- Here is the Category instance for natural transformations. This time the type hom is inferred to have kind (* -> *) -> (* -> *) -> *. Identity and composition are both defined pointwise. -} instance Category (Nat :: (* -> *) -> (* -> *) -> *) where ident = Nat id compose f g = Nat (nu g . nu f) {- Let's define a type class which will capture the idea of a fixed point in a category. This generalizes the idea of recursive types in Hask: -} class Rec hom f t where _in :: hom (f t) t out :: hom t (f t) {- The class Rec defines two morphisms: _in, which is the constructor of the fixed point type t, and out, its destructor. The final piece is the definition of a higher order functor, which generalizes the typeclass Functor: -} class HFunctor hom f where hmap :: hom a b -> hom (f a) (f b) {- Note the similarity with the type signature of the function fmap :: (Functor f) => (a -> b) -> f a -> f b. Indeed, specializing hom to (->) in the definition of HFunctor gives back the type signature of fmap. Finally, we can define folds and unfolds in a category. The definitions are as before, but with explicit composition, constructors and destructors replaced with the equivalent type class methods, and hmap in place of fmap: -} fold :: (Category hom, HFunctor hom f, Rec hom f rec) => hom (f t) t -> hom rec t fold phi = compose out (compose (hmap (fold phi)) phi) unfold :: (Category hom, HFunctor hom f, Rec hom f rec) => hom t (f t) -> hom t rec unfold phi = compose phi (compose (hmap (unfold phi)) _in) -- Now for some examples. -- The first example is a regular recursive datatype of binary leaf -- trees. The functor FTree is the base functor of this recursive type: data FTree a b = FLeaf a | FBranch b b data Tree a = Leaf a | Branch (Tree a) (Tree a) -- An instance of Rec shows the relationship between the defining functor -- and the recursive type itself: instance Rec (->) (FTree a) (Tree a) where _in (FLeaf a) = Leaf a _in (FBranch a b) = Branch a b out (Leaf a) = FLeaf a out (Branch a b) = FBranch a b -- FTree is indeed a functor, so it is also a HFunctor: instance HFunctor (->) (FTree a) where hmap f (FLeaf a) = FLeaf a hmap f (FBranch a b) = FBranch (f a) (f b) -- These instances are enough to define folds and unfolds for this -- type. The following fold calculates the depth of a tree: depth :: Tree a -> Int depth = (fold :: (FTree a Int -> Int) -> Tree a -> Int) phi where phi :: FTree a Int -> Int phi (FLeaf a) = 1 phi (FBranch a b) = 1 + max a b -- The second example is a fold for the nested (or non-regular) -- datatype of complete binary leaf trees. The higher order functor -- FCTree defines the type constructor CTree as its fixed point: data FCTree f a = FCLeaf a | FCBranch (f (a, a)) -- FCTree :: (* -> *) -> * -> * data CTree a = CLeaf a | CBranch (CTree (a, a)) -- Again, we define type class instances for HFunctor and Rec: instance HFunctor Nat FCTree where hmap (f :: Nat (f :: * -> *) (g :: * -> *)) = Nat ff where ff :: forall a. FCTree f a -> FCTree g a ff (FCLeaf a) = FCLeaf a ff (FCBranch a) = FCBranch (nu f a) instance Rec Nat FCTree CTree where _in = Nat inComplete where inComplete (FCLeaf a) = CLeaf a inComplete (FCBranch a) = CBranch a out = Nat outComplete where outComplete(CLeaf a) = FCLeaf a outComplete(CBranch a) = FCBranch a -- Morphisms between type constructors are natural transformations, so we -- need a type constructor to act as the target of the fold. For our -- purposes, a constant functor will do: data K a b = K a -- K :: forall k. * -> k -> * -- And finally, the following fold calculates the depth of a complete binary leaf tree: cdepth :: CTree a -> Int cdepth c = let (K d) = nu (fold (Nat phi)) c in d where phi :: FCTree (K Int) a -> K Int a phi (FCLeaf a) = K 1 phi (FCBranch (K n)) = K (n + 1) {- The final example is a fold for the pair of mutually recursive datatype of lists of even and odd lengths. The fold will take a list of even length and produce a list of pairs. We cannot express type constructors in Haskell whose return kind is anything other than *, so we cheat a little and emulate the product kind using an arrow kind Choice -> *, where Choice is a two point kind, lifted using the XDataKinds extension: -} data Choice = Fst | Snd -- A morphism of pairs of types is just a pair of morphisms. For -- technical reasons, we represent this using a Church-style encoding, -- along with helper methods, as follows: newtype PHom h1 h2 p1 p2 = PHom { runPHom :: forall r. (h1 (p1 Fst) (p2 Fst) -> h2 (p1 Snd) (p2 Snd) -> r) -> r } mkPHom f g = PHom (\h -> h f g) fstPHom p = runPHom p (\f -> \g -> f) sndPHom p = runPHom p (\f -> \g -> g) -- Now, PHom allows us to take two categories and form the product category: instance (Category h1, Category h2) => Category (PHom h1 h2) where ident = mkPHom ident ident compose p1 p2 = mkPHom (compose (fstPHom p1) (fstPHom p2)) (compose (sndPHom p1) (sndPHom p2)) -- We can define the types of lists of even and odd length as -- follows. Note that the kind annotation indicates the appearance of the -- kind Choice -> *: data FAlt :: * -> (Choice -> *) -> Choice -> * where FZero :: FAlt a p Fst FSucc1 :: a -> (p Snd) -> FAlt a p Fst FSucc2 :: a -> (p Fst) -> FAlt a p Snd data Alt :: * -> Choice -> * where Zero :: Alt a Fst Succ1 :: a -> Alt a Snd -> Alt a Fst Succ2 :: a -> Alt a Fst -> Alt a Snd deriving instance Show a => Show (Alt a b) -- Again, we need to define instances of Rec and HFunctor: instance Rec (PHom (->) (->)) (FAlt a) (Alt a) where _in = mkPHom f g where f,g :: FAlt a (Alt a) s -> Alt a s f FZero = Zero f (FSucc1 a b) = Succ1 a b g (FSucc2 a b) = Succ2 a b out = mkPHom f g where f,g :: Alt a s -> FAlt a (Alt a) s f Zero = FZero f (Succ1 a b) = FSucc1 a b g (Succ2 a b) = FSucc2 a b instance HFunctor (PHom (->) (->)) (FAlt a) where hmap p = mkPHom hf hg where hf FZero = FZero hf (FSucc1 a x) = FSucc1 a (sndPHom p x) hg (FSucc2 a x) = FSucc2 a (fstPHom p x) -- As before, we create a target type for our fold, and this time a type synonym as well: data K2 :: * -> * -> Choice -> * where K21 :: a -> K2 a b Fst K22 :: b -> K2 a b Snd type PairUpResult a = K2 [(a, a)] (a, [(a, a)]) -- At last, here is the fold pairUp, taking even length lists to lists of pairs: pairUp :: Alt a Fst -> [(a, a)] pairUp xs = let (K21 xss) = (fstPHom (fold (mkPHom phi psi))) xs in xss where phi :: FAlt y (K2 v (r,[(y,r)])) s -> K2 [(y,r)] (y,z) s phi FZero = K21 [] phi (FSucc1 x1 (K22 (x2, xss))) = K21 ((x1, x2):xss) psi :: FAlt y (K2 z w) s -> K2 [x] (y,z) s psi (FSucc2 x (K21 xss)) = K22 (x, xss) main = print (Succ1 (0::Int) $ Succ2 1 $ Succ1 2 $ Succ2 3 $ Succ1 4 $ Succ2 5 Zero)
ghc-android/ghc
testsuite/tests/polykinds/Freeman.hs
bsd-3-clause
9,490
1
15
2,120
2,407
1,256
1,151
109
2
{-| Module : BreadU.Pages.CSS.Names Description : Names for our own CSS-classes. Stability : experimental Portability : POSIX Names for our own CSS-classes. We don't want to work with raw string literals explicitly, so we just define a type with nullary constructors corresponding to names of classes. These constructors can be represented as a 'Text', so we'll use it in our own CSS as well as in HTML markup. The only classes we use as a raw string literals are third-party classes, for example from Bootstrap library. -} module BreadU.Pages.CSS.Names ( ClassName(..) ) where import Text.Blaze ( ToValue(..) ) import TextShow ( TextShow(..), fromText ) import Data.Text ( pack ) -- | Type for all our own CSS-classes. data ClassName = LanguageSwitcher | LanguageSwitcherDelimiter | AboutInfo | CurrentLanguage | FormBlock | FoodFormRowsSeparator | FoodFormFirstItem | FoodFormItem | FoodFormItemInputs | FoodItemsSeparator | FoodNameInfo | FoodAmountInfo | FoodInputClass | CarbsInputClass | BUInputClass | GramsInputClass | Or | TotalBUQuantity | AddFood | AddFoodButton | Calculate | CalculateButton | MainButtonIcon | MainButtonIconSeparator | InfoIconFoodForm | RemoveIconFoodForm | AuthorInfo | AuthorInfoMailToSeparator | MailToIcon | SocialButtons | SocialButtonsSeparator | BlueCircleArea | BlueCircle | Block404 | Block404Mark | Block404Mark0 | Block404Description deriving (Show) -- | We want to use constructors as attributes' values in HTML markup. instance ToValue ClassName where toValue = toValue . show {-| We want 'Text'-representation of all constructors, but it's impossible to derive from 'TextShow' class explicitly. So we use standard 'show' to convert 'String'-representations of constructors into 'Text'-representations. -} instance TextShow ClassName where showb = fromText . pack . show
denisshevchenko/breadu.info
src/lib/BreadU/Pages/CSS/Names.hs
mit
2,052
0
7
494
220
141
79
48
0
{-# LANGUAGE LambdaCase #-} {-# LANGUAGE TemplateHaskell #-} module PrefixCompression(runTests) where import Test.QuickCheck import Text.Printf main :: IO () main = interact run run :: String -> String run input = output where output = unlines . map print' $ [p, a', b'] print' :: String -> String print' string = printf "%d %s" (length string) string a:b:_ = lines input p = map fst . takeWhile (uncurry (==)) $ zip a b a' = drop (length p) a b' = drop (length p) b prop_run = run "\ \abcdefpr\n\ \abcpqr" == "\ \3 abc\n\ \5 defpr\n\ \3 pqr\n" prop_run1 = run "\ \kitkat\n\ \kit" == "\ \3 kit\n\ \3 kat\n\ \0 \n" prop_run2 = run "\ \puppy\n\ \puppy" == "\ \5 puppy\n\ \0 \n\ \0 \n" return [] runTests = $quickCheckAll
alexander-matsievsky/HackerRank
All_Domains/Functional_Programming/Recursion/src/PrefixCompression.hs
mit
778
0
11
191
244
126
118
24
1
{-# LANGUAGE JavaScriptFFI #-} -- | FIXME: doc module GHCJS.Electron.PowerMonitor where
taktoa/ghcjs-electron
src/GHCJS/Electron/PowerMonitor.hs
mit
89
0
3
12
9
7
2
2
0
{-# LANGUAGE LambdaCase #-} module TC.Util where import Control.Monad.Gen import Control.Monad.Except import Data.Foldable (foldMap) import qualified Data.Set as S import Source data TypeError = NoSuchName Name | TySynCycle Name | OccursFail Name | CannotMerge | CannotUnify deriving Show type TCM = GenT Name (Except TypeError) patVars :: Pat a -> S.Set Name patVars = \case WildP -> S.empty VarP _ n -> S.singleton n ConP _ _ pats -> foldMap patVars pats ListP _ pats -> foldMap patVars pats TupleP _ l r -> patVars l `S.union` patVars r _ -> S.empty nBoundVars :: NestedDecl a -> S.Set Name nBoundVars = \case NSig{} -> S.empty NFun (Fun n _) -> S.singleton n NTop (Top p _ _) -> patVars p boundVars :: Decl a -> S.Set Name boundVars = \case DFun (Fun n _) -> S.singleton n DTop (Top p _ _) -> patVars p DClass _ _ _ nds -> foldMap nBoundVars nds _ -> S.empty kindOf :: Type -> Kind kindOf = \case TVar (Just k) _ -> k TCon (Just k) _ -> k TApp f _ -> case kindOf f of KFun _ t -> t TFun -> KFun Star (KFun Star Star) TTuple -> KFun Star (KFun Star Star) TList -> KFun Star Star TInt -> Star TDouble -> Star TUnit -> Star TBool -> Star TIO -> KFun Star Star TChar -> Star
jozefg/hi
src/TC/Util.hs
mit
1,316
0
11
370
547
272
275
47
12
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE TypeOperators #-} module Routes where -- Libs import Servant import Servant.Generic -- Source import Models -- Abstraction over Content Types data Routes path = Routes { api :: path :- ApiRouter } deriving Generic type Router = ToServant (Routes AsApi) type Version = "v1" data ApiRoutes path = ApiRoutes { accounts :: path :- Version :> "accounts" :> Get '[JSON] [Account] , gigs :: path :- Version :> "shows" :> Get '[JSON] [Gig] } deriving Generic type ApiRouter = ToServant (ApiRoutes AsApi)
erlandsona/caldwell-api
library/Routes.hs
mit
600
0
14
125
155
92
63
17
0
{-# LANGUAGE OverloadedStrings, Rank2Types #-} module Web.Authenticate.SQRL where import Web.Authenticate.SQRL.Types --import Data.Char (isDigit, isAlpha) import Crypto.Random import Crypto.Cipher.AES import qualified Crypto.Ed25519.Exceptions as ED25519 import Control.Applicative import Control.Concurrent.MVar import Data.Byteable import Data.Binary import Data.Bits import Data.Time.Clock.POSIX --import Data.QRCode import Data.Text (Text) import qualified Data.Text as T import qualified Data.Text.Encoding as TE import Data.ByteString (ByteString) import qualified Data.ByteString.Lazy as BSL import qualified Data.ByteString as BS import System.IO (hPutStrLn, stderr) import System.IO.Error import System.IO.Unsafe (unsafePerformIO) import Data.Maybe (fromJust, fromMaybe) -- | A type type SQRL t = SQRLServer sqrl => sqrl -> Either String t -- | Trololololo - any container which contains a nut, or two. class NutSack f where tickleNuts :: (SQRLNutEx a -> SQRLNutEx a) -> f a -> f a crackNuts :: Binary a => f a -> f a crackNuts = tickleNuts (\x -> case decryptSQRLNut x of { Left e -> error ("crackNuts: " ++ e) ; Right r -> r }) wrapNuts :: (Binary a, SQRLServer sqrl) => sqrl -> Nounce -> f a -> f a wrapNuts sqrl nounce = tickleNuts (encryptSQRLNut sqrl nounce) instance NutSack SQRLNutEx where tickleNuts f = f instance NutSack SQRLServerData where tickleNuts f x = x { serverNut = f (serverNut x) } {-# NOINLINE sqrlCounter #-} sqrlCounter :: MVar (Counter, SystemRandom) sqrlCounter = unsafePerformIO ((newGenIO :: IO SystemRandom) >>= newMVar . (,) 0) {-# NOINLINE sqrlKey' #-} sqrlKey' :: ByteString sqrlKey' = unsafePerformIO $ catchIOError (pad16_8 <$> BS.readFile "sqrl-nut-key.dat") $ \e -> do hPutStrLn stderr $ if isDoesNotExistError e then "sqrl-nut-key.dat not found. Generating a temporary key." else if isPermissionError e then "sqrl-nut-key.dat is not accessible due to permissions. Generating a temporary key." else "sqrl-nut-key.dat can not be read because of some unknown error (" ++ show e ++ "). Generating a temporary key." modifyMVar sqrlCounter $ \(i, g) -> case (\(x, g') -> ((i, g'), x)) <$> genBytes 16 g of Left err -> fail $ "sqrlIV': default key could not be created: " ++ show err Right r' -> return r' data SQRLAuthenticated = CurrentAuth IdentityKey | PreviousAuth IdentityKey | BothAuth IdentityKey IdentityKey deriving (Show, Eq) readClientPost :: Binary a => BSL.ByteString -> SQRL (SQRLClientPost a, SQRLAuthenticated) readClientPost b = \sqrl -> case f "server" of Nothing -> Left "readClientPost: no server data." Just sd -> case f "client" of Nothing -> Left "readClientPost: no client data." Just cd -> case readSQRLClientData cd of -- Left err -> Left $ "readClientPost: Client decoding failed: " ++ err -- Right (Left err) -> Left $ "readClientPost: " ++ err -- Right (Right cl) -> case u <$> f "sign" of Left err -> Left $ "readClientPost: " ++ err Right cl -> case f "ids" of Nothing -> Left "readClientPost: No signatures." Just sg -> case readSQRLSignatures sg of Left errm -> Left $ "readClientPost: " ++ errm Right sig -> let signdata = BS.append sd cd cid = clientIdentity cl maybeError err = fromMaybe (error err) cauth0 = ED25519.valid signdata (maybeError "public key size failure for cID" $ ED25519.importPublic $ publicKey cid) (ED25519.Sig $ signature $ signIdentity sig) cauth1 = case (maybeError "public key size failure for pID" . ED25519.importPublic . publicKey) <$> clientPreviousID cl of Nothing -> False Just key -> case (ED25519.Sig . signature) <$> signPreviousID sig of Nothing -> False Just sign -> ED25519.valid signdata key sign cauth = if cauth1 then BothAuth cid $ fromJust $ clientPreviousID cl else CurrentAuth cid in if not cauth0 then Left "readClientPost: Signature verification failed for current identity" else case u sd >>= fsdata sqrl of Left err -> Left $ "readClientPost: Server decoding failed: " ++ err Right sv -> Right (SQRLClientPost { sqrlServerData = sv , sqrlClientData = cl , sqrlSignatures = sig , sqrlPostAll = bs }, cauth) where bs = filter (\(x, y) -> not (BS.null x || BS.null y)) $ map (\z -> let (x, y) = BSL.break (eq==) z in (BSL.toStrict x, BSL.toStrict $ BSL.tail y)) $ BSL.split amp b amp = (fromIntegral $ fromEnum '&') :: Word8 eq = (fromIntegral $ fromEnum '=') :: Word8 f = flip lookup bs u = dec64unpad fsdata :: (SQRLServer sqrl, Binary a) => sqrl -> ByteString -> Either String (Either SQRLUrl (SQRLServerData a)) fsdata sqrl x = if BS.take 6 x `elem` ["sqrl:/", "qrl://"] then onleft (\e -> Left ("sqrl-link decoding error: " ++ show e)) (TE.decodeUtf8' x) >>= readSQRLUrl >>= \r -> Right (Left r) else runSQRL sqrl (serverReadSQRLServerData x) >>= \r -> Right (Right r) onleft g (Left x) = g x onleft _ (Right x) = Right x -- | Create a nut for use in SQRL. newSQRLNut :: Binary a => IPBytes -> IO (SQRLNutEx a) newSQRLNut ip = newSQRLNut' ip Nothing -- | Create a nut for use in SQRL. Extra data may be encrypted together with the nut to allow session related data to be sent. newSQRLNut' :: Binary a => IPBytes -> Maybe a -> IO (SQRLNutEx a) newSQRLNut' ip ex = do (i, r) <- modifyMVar sqrlCounter $ \x -> return $ case incrementSQRL undefined x of { Left err -> (x, error $ "newSQRLNut': " ++ show err) ; Right y -> y } t <- truncate <$> getPOSIXTime return SQRLNut { nutIP = ip, nutTime = t, nutCounter = i, nutRandom = r, nutQR = False, nutExtra = ex } where incrementSQRL :: (Integral i, Binary r, FiniteBits r, CryptoRandomGen g) => r -> (i, g) -> Either GenError ((i, g), (i, r)) incrementSQRL r (i, g) = (\(x, g') -> ((i+1, g'), (i, decode $ BSL.fromStrict x))) <$> genBytes (fromIntegral $ finiteBitSize r `div` 8) g {-# NOINLINE sqrlIV' #-} sqrlIV' :: ByteString sqrlIV' = unsafePerformIO $ catchIOError (pad16_8 <$> BS.readFile "sqrl-nut-iv.dat") $ \e -> do hPutStrLn stderr $ if isDoesNotExistError e then "sqrl-nut-iv.dat not found. Generating a temporary IV." else if isPermissionError e then "sqrl-nut-iv.dat is not accessible due to permissions. Generating a temporary IV." else "sqrl-nut-iv.dat can not be read because of some unknown error (" ++ show e ++ "). Generating a temporary IV." modifyMVar sqrlCounter $ \(i, g) -> case (\(x, g') -> ((i, g'), x)) <$> genBytes 16 g of Left err -> fail $ "sqrlIV': default IV could not be created: " ++ show err Right r' -> return r' sqrlGenNounce :: IO (Maybe Nounce) sqrlGenNounce = sqrlRandBytes 12 sqrlRandBytes :: Int -> IO (Maybe ByteString) sqrlRandBytes l = modifyMVar sqrlCounter $ \(i, g) -> case (\(x, g') -> ((i, g'), x)) <$> genBytes l g of Left err -> hPutStrLn stderr ("sqrlRandBytes: random bytes could not be generated: " ++ show err) >> return ((i, g), Nothing) Right (a,b) -> return (a, Just b) pad16_8 :: ByteString -> ByteString pad16_8 x = let l = BS.length x l_ = l `mod` 8 l' | l < 16 = 16 | l_ == 0 = l | otherwise = l + 8 - l_ in if l' == l then x else BS.append x $ BS.replicate (l' - l) 27 -- | An instance of a SQRL server. class SQRLServer sqrl where -- | The IV used for encryption of 'SQRLNut's. sqrlIV :: sqrl -> ByteString sqrlIV = const sqrlIV' -- | The key used for encryptions of 'SQRLNut's. sqrlKey :: sqrl -> ByteString sqrlKey = const sqrlKey' -- | The versions supported by this server (default is only 1). sqrlVersion :: sqrl -> SQRLVersion sqrlVersion = const sqrlVersion1 -- | If the SQRL server is runnung HTTPS. sqrlTLS :: sqrl -> Bool -- | The domain (and optional port) the SQRL server is running at. sqrlDomain :: sqrl -> Text -- | The path the SQRL server is listening to. sqrlPath :: sqrl -> Text data SQRLServerLocal = SQRLServerLocal instance SQRLServer SQRLServerLocal where sqrlTLS _ = False sqrlDomain _ = "localhost" sqrlPath _ = "/sqrl" -- | A future compatible way to run a SQRL server. runSQRL :: SQRLServer sqrl => sqrl -> SQRL t -> Either String t runSQRL sqrl sqrlf = sqrlf sqrl -- | Server specialices version of 'readSQRLServerData'. serverReadSQRLServerData :: Binary a => ByteString -> SQRL (SQRLServerData a) serverReadSQRLServerData t sqrl = readSQRLServerData (sqrlKey sqrl) (sqrlIV sqrl) t -- | Encrypts a nut. If the nut is already encrypted this is the identity. encryptSQRLNut :: (SQRLServer sqrl, Binary a) => sqrl -> Nounce -> SQRLNutEx a -> SQRLNutEx a encryptSQRLNut _ _ n@(SQRLNutEncrypted {}) = n encryptSQRLNut sqrl nounce n = SQRLNutEncrypted { encNutIV = iv , encNutKey = key , encNutVer = toBytes tag , encNutData = base , encNutExtra = extra , encNutNounce = nounce } where (base, extra) = BS.splitAt 16 crypt key = sqrlKey sqrl (crypt, tag) = encryptGCM (initAES key) iv "HS-SQRL" $ BSL.toStrict $ encode n bsxor :: ByteString -> ByteString -> ByteString bsxor x y = BS.pack $ BS.zipWith xor x y iv = let (p1, p2) = BS.splitAt (BS.length nounce) (sqrlIV sqrl) in BS.append (p1 `bsxor` nounce) p2 -- | Decrypts a nut. If the nut is already decrypted this is the identity. decryptSQRLNut :: Binary a => SQRLNutEx a -> Either String (SQRLNutEx a) decryptSQRLNut n@(SQRLNut {}) = Right n decryptSQRLNut (SQRLNutEncrypted { encNutIV = iv, encNutKey = key, encNutVer = tag, encNutData = base, encNutExtra = extra }) = r where (nutd, tag') = decryptGCM (initAES key) iv "HS-SQRL" (BS.append base extra) r = decode (BSL.fromStrict nutd) >>= \nut -> if tag == toBytes tag' then Right nut else Left "TAG MISMATCH" -- | Creates an URL for use with SQRL (used to create a QR Code or browser links). sqrlURL :: Binary a => SQRLNutEx a -> Nounce -> SQRL Text sqrlURL nut nounce sqrl = Right $ T.append (T.append (T.append (if sqrlTLS sqrl then "sqrl://" else "qrl://") (sqrlDomain sqrl)) path') $ decodeASCII "sqrlURL" cryptUrl' where path = sqrlPath sqrl path' = if T.null path then "/" else T.append path $ case T.findIndex ('?' ==) path of { Nothing -> "?nut=" ; _ -> "&nut=" } (SQRLNutEncrypted { encNutVer = tag, encNutData = base, encNutExtra = extra }) = encryptSQRLNut sqrl nounce nut cryptUrl' = (if BS.null extra then id else flip BS.append (BS.append "&x-nut-extra=" $ enc64unpad extra)) $ BS.append (BS.append (BS.take 22 $ enc64unpad base) $ BS.append "&x-nut-nounce=" $ enc64unpad nounce) $ BS.append "&x-nut-tag=" $ BS.take 22 $ enc64unpad tag -- * SQRL generator {- -- | Generate a SQRL login button which is, of course, a QR Code. htmlSQRLLogin :: Binary a => Text -> Text -> SQRLNutEx a -> IO Html htmlSQRLLogin tls domain path nut = do nounce0 <- modifyMVar sqrlCounter $ \(i, g) -> (\(x, g') -> ((i, g'), x)) <$> genBytes 12 g qrdata <- qr $ cryptUrl nounce0 nut { nutQR = True } nounce1 <- modifyMVar sqrlCounter $ \(i, g) -> (\(x, g') -> ((i, g'), x)) <$> genBytes 12 g return $ a ! class_ "sqrl" ! href (toValue $ cryptUrl nounce1 nut { nutQR = False }) $ img ! src (toValue $ T.append "data:image/png;base64," qrdata) where black = rgb 0x00 0x00 0x00 white = rgb 0xFF 0xFF 0xFF qrplt = [white, black] cryptUrl = sqrlURL tls domain path bwcol p l w = foldr (\n p' -> (if testBit w n then black else white) : p') p [(0 .. (l-1))] qr :: Text -> AttributeValue qr t = let qrc = encodeByteString (TE.encodeUtf8 t) Nothing QR_ECLEVEL_M QR_MODE_EIGHT qrl = getQRCodeWidth qrc scanline r = let (pt0, pt1) = splitAt (qrl `div` 8) in yield $ foldr (\bits colors -> bwcol colors 8 bits) (if null pt1 then [] else bwcol [] (qrl `mod` 8) (head pt1)) pt0 scanlines = toProducer $ mapM_ scanline $ toMatrix qrc in decodeASCII $ BS.concat $ runIdentity $ pngSource (mkPNGFromPalette qrl qrl qrplt scanlines) $= encodeBase64 $$ CL.consume -}
TimLuq/sqrl-auth-hs
src/Web/Authenticate/SQRL.hs
mit
12,772
15
33
3,322
3,136
1,691
1,445
171
13
module LiftIO.Prelude where import Control.Monad.IO.Class (MonadIO,liftIO) putCharM :: MonadIO m => Char -> m () putCharM = liftIO . putChar putStrM :: MonadIO m => String -> m () putStrM = liftIO . putStr putStrLnM :: MonadIO m => String -> m () putStrLnM = liftIO . putStrLn printM :: (Show a, MonadIO m) => a -> m () printM = liftIO . print getCharM :: MonadIO m => m Char getCharM = liftIO getChar getLineM :: MonadIO m => m String getLineM = liftIO getLine getContentsM :: MonadIO m => m String getContentsM = liftIO getContents interactM :: MonadIO m => (String -> String) -> m () interactM = liftIO . interact readFileM :: MonadIO m => FilePath -> m String readFileM = liftIO . readFile writeFileM :: MonadIO m => FilePath -> String -> m () writeFileM path = liftIO . writeFile path appendFileM :: MonadIO m => FilePath -> String -> m () appendFileM path = liftIO . appendFile path readM :: (MonadIO m, Read a) => String -> m a readM = liftIO . readIO readLnM :: (MonadIO m, Read a) => m a readLnM = liftIO readLn mError :: MonadIO m => IOError -> m a mError = liftIO . ioError
trbecker/liftio
src/LiftIO/Prelude.hs
mit
1,100
0
9
221
458
232
226
30
1
module Settings.Builders.Tar (tarBuilderArgs) where import Settings.Builders.Common tarBuilderArgs :: Args tarBuilderArgs = builder Tar ? mconcat [ arg "-xf" , input "*.gz" ? arg "--gzip" , input "*.bz2" ? arg "--bzip2" , arg =<< getInput , arg "-C", arg =<< getOutput ]
izgzhen/hadrian
src/Settings/Builders/Tar.hs
mit
430
0
9
200
90
48
42
8
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} module Shell ( State , mkState , nodeGetHttpConfig , nodeSetHttpConfig , nodeLoadProgram , nodeListSelectedProgram , nodeListPrograms , nodeListPatterns , nodeRunNamedPattern , nodeRunRandomPattern , nodeListGlobalCounter , nodeListSelectedCounter , nodeListSelectedStatus , storeProgramResource ) where import Control.Monad.IO.Class (MonadIO, liftIO) import Data.IORef (IORef, modifyIORef', readIORef) import GhostLang.API ( PatternInfo (..) , ProgramPath (..) , Resource (..) , Service (..) , NamedPattern (..) , ExecParams (..) , PatternCounter , PatternStatus , getHttpConfig , setHttpConfig , loadProgram , listSelectedProgram , listPrograms , listPatterns , runNamedPattern , runRandomPattern , listGlobalCounter , listSelectedCounter , listSelectedStatus ) import Network.HTTP.Client (Manager, newManager, defaultManagerSettings) import qualified Data.Text as T data State = State { nodeAddress :: !String , progResource :: !(Maybe Resource) , manager :: !Manager } mkState :: MonadIO m => String -> m State mkState nodeAddress' = State nodeAddress' Nothing <$> liftIO (newManager defaultManagerSettings) nodeGetHttpConfig :: MonadIO m => IORef State -> m (Either String Service) nodeGetHttpConfig state = do (mgr, baseUrl) <- nodeParams <$> liftIO (readIORef state) liftIO $ getHttpConfig mgr baseUrl nodeSetHttpConfig :: MonadIO m => IORef State -> String -> Int -> m (Either String ()) nodeSetHttpConfig state server port = do (mgr, baseUrl) <- nodeParams <$> liftIO (readIORef state) liftIO $ setHttpConfig mgr baseUrl $ Service { serviceAddress = server , servicePort = port } nodeLoadProgram :: MonadIO m => IORef State -> FilePath -> m (Either String Resource) nodeLoadProgram state filePath = do (mgr, baseUrl) <- nodeParams <$> liftIO (readIORef state) liftIO $ loadProgram mgr baseUrl ProgramPath { programPath = T.pack filePath } nodeListSelectedProgram :: MonadIO m => IORef State -> m (Either String [PatternInfo]) nodeListSelectedProgram state = do maybeProg <- progResource <$> liftIO (readIORef state) maybe (return $ Left "No saved program") nodeListProgram' maybeProg where nodeListProgram' :: MonadIO m => Resource -> m (Either String [PatternInfo]) nodeListProgram' prog = do (mgr, baseUrl) <- nodeParams <$> liftIO (readIORef state) liftIO $ listSelectedProgram mgr baseUrl prog nodeListPrograms :: MonadIO m => IORef State -> m (Either String [Resource]) nodeListPrograms state = do (mgr, baseUrl) <- nodeParams <$> liftIO (readIORef state) liftIO $ listPrograms mgr baseUrl nodeListPatterns :: MonadIO m => IORef State -> m (Either String [Resource]) nodeListPatterns state = do (mgr, baseUrl) <- nodeParams <$> liftIO (readIORef state) liftIO $ listPatterns mgr baseUrl nodeRunNamedPattern :: MonadIO m => IORef State -> String -> Bool -> Maybe String -> m (Either String Resource) nodeRunNamedPattern state name trace src = do maybeProg <- progResource <$> liftIO (readIORef state) maybe (return $ Left "No saved program") nodeRunNamedPattern' maybeProg where nodeRunNamedPattern' :: MonadIO m => Resource -> m (Either String Resource) nodeRunNamedPattern' prog = do let namedPattern = NamedPattern { execPattern = T.pack name , execParams = ExecParams { shallTrace = trace , srcIp = src } } (mgr, baseUrl) <- nodeParams <$> liftIO (readIORef state) liftIO $ runNamedPattern mgr baseUrl prog namedPattern nodeRunRandomPattern :: MonadIO m => IORef State -> Bool -> Maybe String -> m (Either String Resource) nodeRunRandomPattern state trace src = do maybeProg <- progResource <$> liftIO (readIORef state) maybe (return $ Left "No saved program") nodeRunRandomPattern' maybeProg where nodeRunRandomPattern' :: MonadIO m => Resource -> m (Either String Resource) nodeRunRandomPattern' prog = do let params = ExecParams { shallTrace = trace , srcIp = src } (mgr, baseUrl) <- nodeParams <$> liftIO (readIORef state) liftIO $ runRandomPattern mgr baseUrl prog params nodeListGlobalCounter :: MonadIO m => IORef State -> m (Either String PatternCounter) nodeListGlobalCounter state = do (mgr, baseUrl) <- nodeParams <$> liftIO (readIORef state) liftIO $ listGlobalCounter mgr baseUrl nodeListSelectedCounter :: MonadIO m => IORef State -> String -> m (Either String PatternCounter) nodeListSelectedCounter state res = do (mgr, baseUrl) <- nodeParams <$> liftIO (readIORef state) liftIO $ listSelectedCounter mgr baseUrl $ Resource { resourceUrl = T.pack res } nodeListSelectedStatus :: MonadIO m => IORef State -> String -> m (Either String PatternStatus) nodeListSelectedStatus state res = do (mgr, baseUrl) <- nodeParams <$> liftIO (readIORef state) liftIO $ listSelectedStatus mgr baseUrl $ Resource { resourceUrl = T.pack res } storeProgramResource :: MonadIO m => IORef State -> Resource -> m () storeProgramResource state res = liftIO $ modifyIORef' state $ \s -> s { progResource = Just res } nodeParams :: State -> (Manager, String) nodeParams State {..} = (manager, nodeAddress)
kosmoskatten/ghost-lang
ghost-shell/src/Shell.hs
mit
6,199
0
15
1,924
1,664
848
816
129
1
{-# LANGUAGE RecordWildCards #-} -- | Library for spawning and working with Ghci sessions. module Language.Haskell.Ghcid( Ghci, GhciError(..), Stream(..), Load(..), Severity(..), startGhci, stopGhci, interrupt, process, execStream, showModules, reload, exec, quit ) where import System.IO import System.IO.Error import System.Process import System.Time.Extra import Control.Concurrent.Extra import Control.Exception.Extra import Control.Monad.Extra import Data.Function import Data.List.Extra import Data.Maybe import Data.IORef import Control.Applicative import Data.Unique import System.Console.CmdArgs.Verbosity import Language.Haskell.Ghcid.Parser import Language.Haskell.Ghcid.Types as T import Language.Haskell.Ghcid.Util import Prelude -- | A GHCi session. Created with 'startGhci', closed with 'stopGhci'. -- -- The interactions with a 'Ghci' session must all occur single-threaded, -- or an error will be raised. The only exception is 'interrupt', which aborts -- a running computation, or does nothing if no computation is running. data Ghci = Ghci {ghciProcess :: ProcessHandle ,ghciInterrupt :: IO () ,ghciExec :: String -> (Stream -> String -> IO ()) -> IO () ,ghciUnique :: Unique } instance Eq Ghci where a == b = ghciUnique a == ghciUnique b -- | Start GHCi, returning a function to perform further operation, as well as the result of the initial loading. -- If you do not call 'stopGhci' then the underlying process may be leaked. -- The callback will be given the messages produced while loading, useful if invoking something like "cabal repl" -- which might compile dependent packages before really loading. startGhci :: String -> Maybe FilePath -> (Stream -> String -> IO ()) -> IO (Ghci, [Load]) startGhci cmd directory echo0 = do (Just inp, Just out, Just err, ghciProcess) <- createProcess (shell cmd){std_in=CreatePipe, std_out=CreatePipe, std_err=CreatePipe, cwd=directory, create_group=True} hSetBuffering out LineBuffering hSetBuffering err LineBuffering hSetBuffering inp LineBuffering let writeInp x = do whenLoud $ outStrLn $ "%STDIN: " ++ x hPutStrLn inp x -- Some programs (e.g. stack) might use stdin before starting ghci (see #57) -- Send them an empty line hPutStrLn inp "" -- I'd like the GHCi prompt to go away, but that's not possible, so I set it to a special -- string and filter that out. let ghcid_prefix = "#~GHCID-START~#" let removePrefix = dropPrefixRepeatedly ghcid_prefix -- At various points I need to ensure everything the user is waiting for has completed -- So I send messages on stdout/stderr and wait for them to arrive syncCount <- newVar 0 let syncReplay = do i <- readVar syncCount let msg = "#~GHCID-FINISH-" ++ show i ++ "~#" writeInp $ "INTERNAL_GHCID.putStrLn " ++ show msg ++ "\n" ++ "INTERNAL_GHCID.hPutStrLn INTERNAL_GHCID.stderr " ++ show msg return $ isInfixOf msg let syncFresh = do modifyVar_ syncCount $ return . succ syncReplay -- Consume from a stream until EOF (return Nothing) or some predicate returns Just let consume :: Stream -> (String -> IO (Maybe a)) -> IO (Maybe a) consume name finish = do let h = if name == Stdout then out else err fix $ \rec -> do el <- tryBool isEOFError $ hGetLine h case el of Left _ -> return Nothing Right l -> do whenLoud $ outStrLn $ "%" ++ upper (show name) ++ ": " ++ l res <- finish $ removePrefix l case res of Nothing -> rec Just a -> return $ Just a let consume2 :: String -> (Stream -> String -> IO (Maybe a)) -> IO (a,a) consume2 msg finish = do res1 <- onceFork $ consume Stdout (finish Stdout) res2 <- consume Stderr (finish Stderr) res1 <- res1 case liftM2 (,) res1 res2 of Nothing -> throwIO $ UnexpectedExit cmd msg Just v -> return v -- held while interrupting, and briefly held when starting an exec -- ensures exec values queue up behind an ongoing interrupt and no two interrupts run at once isInterrupting <- newLock -- is anyone running running an exec statement, ensure only one person talks to ghci at a time isRunning <- newLock let ghciExec command echo = do withLock isInterrupting $ return () res <- withLockTry isRunning $ do writeInp command stop <- syncFresh void $ consume2 command $ \strm s -> if stop s then return $ Just () else do echo strm s; return Nothing when (isNothing res) $ fail "Ghcid.exec, computation is already running, must be used single-threaded" let ghciInterrupt = withLock isInterrupting $ do whenM (fmap isNothing $ withLockTry isRunning $ return ()) $ do whenLoud $ outStrLn "%INTERRUPT" interruptProcessGroupOf ghciProcess -- let the person running ghciExec finish, since their sync messages -- may have been the ones that got interrupted syncReplay -- now wait for the person doing ghciExec to have actually left the lock withLock isRunning $ return () -- there may have been two syncs sent, so now do a fresh sync to clear everything stop <- syncFresh void $ consume2 "Interrupt" $ \_ s -> return $ if stop s then Just () else Nothing ghciUnique <- newUnique let ghci = Ghci{..} -- Now wait for 'GHCi, version' to appear before sending anything real, required for #57 stdout <- newIORef [] stderr <- newIORef [] sync <- newIORef $ const False consume2 "" $ \strm s -> do stop <- readIORef sync if stop s then return $ Just () else do -- there may be some initial prompts on stdout before I set the prompt properly s <- return $ maybe s (removePrefix . snd) $ stripInfix ghcid_prefix s whenLoud $ outStrLn $ "%STDOUT2: " ++ s modifyIORef (if strm == Stdout then stdout else stderr) (s:) when ("GHCi, version " `isPrefixOf` s) $ do -- the thing before me may have done its own Haskell compiling writeIORef stdout [] writeIORef stderr [] writeInp "import qualified System.IO as INTERNAL_GHCID" writeInp $ ":set prompt " ++ ghcid_prefix writeInp ":set -fno-break-on-exception -fno-break-on-error" -- see #43 writeIORef sync =<< syncFresh echo0 strm s return Nothing r <- parseLoad . reverse <$> ((++) <$> readIORef stderr <*> readIORef stdout) execStream ghci "" echo0 return (ghci, r) -- | Execute a command, calling a callback on each response. -- The callback will be called single threaded. execStream :: Ghci -> String -> (Stream -> String -> IO ()) -> IO () execStream = ghciExec -- | Interrupt Ghci, stopping the current computation (if any), -- but leaving the process open to new input. interrupt :: Ghci -> IO () interrupt = ghciInterrupt -- | Obtain the progress handle behind a GHCi instance. process :: Ghci -> ProcessHandle process = ghciProcess --------------------------------------------------------------------- -- SUGAR HELPERS -- | Execute a command, calling a callback on each response. -- The callback will be called single threaded. execBuffer :: Ghci -> String -> (Stream -> String -> IO ()) -> IO [String] execBuffer ghci cmd echo = do stdout <- newIORef [] stderr <- newIORef [] execStream ghci cmd $ \strm s -> do modifyIORef (if strm == Stdout then stdout else stderr) (s:) echo strm s reverse <$> ((++) <$> readIORef stderr <*> readIORef stdout) -- | Send a command, get lines of result. Must be called single-threaded. exec :: Ghci -> String -> IO [String] exec ghci cmd = execBuffer ghci cmd $ \_ _ -> return () -- | List the modules currently loaded, with module name and source file. showModules :: Ghci -> IO [(String,FilePath)] showModules ghci = parseShowModules <$> exec ghci ":show modules" -- | Perform a reload, list the messages that reload generated. reload :: Ghci -> IO [Load] reload ghci = parseLoad <$> exec ghci ":reload" -- | Send @:quit@ and wait for the process to quit. quit :: Ghci -> IO () quit ghci = do interrupt ghci handle (\UnexpectedExit{} -> return ()) $ void $ exec ghci ":quit" -- Be aware that waitForProcess has a race condition, see https://github.com/haskell/process/issues/46. -- Therefore just ignore the exception anyway, its probably already terminated. ignore $ void $ waitForProcess $ process ghci -- | Stop GHCi. Attempts to interrupt and execute @:quit:@, but if that doesn't complete -- within 5 seconds it just terminates the process. stopGhci :: Ghci -> IO () stopGhci ghci = do forkIO $ do -- if nicely doesn't work, kill ghci as the process level sleep 5 terminateProcess $ process ghci quit ghci
wereHamster/nauva
pkg/hs/nauvad/src/Language/Haskell/Ghcid.hs
mit
9,447
0
26
2,651
2,082
1,037
1,045
151
9
module Phi.Window ( Window, mkWindow , length , push , phi ) where import Data.Vector.Unboxed (Vector) import qualified Data.Vector.Unboxed as V import Prelude hiding (length) import Statistics.Distribution (complCumulative) import Statistics.Distribution.Normal (normalFromSample) data Window = Window { sample :: !(Vector Double) , size :: !Int } mkWindow :: Int -> Window mkWindow = Window V.empty push :: Double -> Window -> Window push d window = window {sample = sample'} where sample' = V.take (size window) (V.cons d (sample window)) phi :: Window -> Double -> Double phi w = negate . logBase 10 . pLater w where pLater :: Window -> Double -> Double pLater = complCumulative . normalFromSample . sample length :: Window -> Int length = V.length . sample
reinh/hs-phi
src/Phi/Window.hs
mit
919
0
11
278
271
152
119
31
1
{-# LANGUAGE OverloadedStrings #-} module Y2020.M11.D09.Solution where {-- The previous exercise had you parse the European Union's member states then add that to the AllianceMap of the world. That was easy, because those data were available as wikidata.org JSON. Not so for the United Nations (which we will do today) and the Organization of American States (tomorrow). These alliances are in wikitext, and, what's even more FUNZORX is that these wikitext documents don't have an uniform format. YAY! FUNZORX! Today's #haskell problem. Let's parse in the member nations of the United Nations and add that ... 'alliance' (?) to our AllianceMap. I'll provide the seed alliance map. You provide the UN and add it to that map. hint: we may (?) have parsed in nation flags (?) before (?) from wikitext? post-hint: oh. It's not {{flagicon|<country>}}, now it's {{flag|<country>}} The Funzorx continuezorx. :/ --} import Y2020.M10.D12.Solution -- ALL the way back when for Country. import Y2020.M10.D30.Solution (Alliance(Alliance), AllianceMap, dear, moderns) import qualified Y2020.M10.D30.Solution as A -- look for flaggie stuff here import Y2020.M11.D05.Solution (todoPrep) -- for the updated alliance-parse import Y2020.M11.D06.Solution (addEU, euDir, eu, adder) import Data.List (stripPrefix) import qualified Data.Map as Map import Data.Maybe (mapMaybe) import qualified Data.Set as Set import qualified Data.Text as T seedAllianceMap :: FilePath -> FilePath -> IO AllianceMap seedAllianceMap allieses eus = todoPrep allieses >>= flip addEU eus {-- >>> seedAllianceMap (dear ++ moderns) (euDir ++ eu) ... >>> let am = it >>> Map.size am 43 --} unDir :: FilePath unDir = "Y2020/M11/D09/" un :: FilePath un = "un.wtxt" unFlag :: String -> Maybe Country unFlag line = T.pack . fst . break (== '}') <$> A.prolog [stripPrefix "|{{flag|", stripPrefix "|{{Flag|"] line -- unFlag is the same flagicon, but for "{{flag|", ... so it's different. :/ exUnFlagPass, exUnFlagFail :: String exUnFlagPass = "|{{flag|China}}" exUnFlagFail = "|1971, replaced the [[Republic of China]]" {-- >>> unFlag exUnFlagPass Just "China" >>> unFlag exUnFlagFail Nothing --} -- and with that --^ we can do this --v unitedNationsParser :: FilePath -> IO Alliance unitedNationsParser file = Alliance "United Nations" Set.empty . Set.fromList . mapMaybe unFlag . lines <$> readFile file {-- >>> unitedNationsParser (unDir ++ un) Alliance {name = "United Nations", aliases = fromList [], countries = fromList ["Belgium","China","Dominican Republic","Estonia", "France","Germany","Indonesia","Niger","Russia", "Saint Vincent and the Grenadines","South Africa", "Tunisia","United Kingdom","United States","Vietnam"]} --} addUN :: AllianceMap -> FilePath -> IO AllianceMap addUN = adder unitedNationsParser {-- >>> addUN am (unDir ++ un) ... >>> let unmap = it >>> Map.size unmap 44 --}
geophf/1HaskellADay
exercises/HAD/Y2020/M11/D09/Solution.hs
mit
3,005
0
10
567
348
206
142
30
1
module GHCJS.DOM.CustomEvent ( ) where
manyoo/ghcjs-dom
ghcjs-dom-webkit/src/GHCJS/DOM/CustomEvent.hs
mit
41
0
3
7
10
7
3
1
0
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE StrictData #-} {-# LANGUAGE TupleSections #-} -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-s3action.html module Stratosphere.ResourceProperties.SESReceiptRuleS3Action where import Stratosphere.ResourceImports -- | Full data type definition for SESReceiptRuleS3Action. See -- 'sesReceiptRuleS3Action' for a more convenient constructor. data SESReceiptRuleS3Action = SESReceiptRuleS3Action { _sESReceiptRuleS3ActionBucketName :: Val Text , _sESReceiptRuleS3ActionKmsKeyArn :: Maybe (Val Text) , _sESReceiptRuleS3ActionObjectKeyPrefix :: Maybe (Val Text) , _sESReceiptRuleS3ActionTopicArn :: Maybe (Val Text) } deriving (Show, Eq) instance ToJSON SESReceiptRuleS3Action where toJSON SESReceiptRuleS3Action{..} = object $ catMaybes [ (Just . ("BucketName",) . toJSON) _sESReceiptRuleS3ActionBucketName , fmap (("KmsKeyArn",) . toJSON) _sESReceiptRuleS3ActionKmsKeyArn , fmap (("ObjectKeyPrefix",) . toJSON) _sESReceiptRuleS3ActionObjectKeyPrefix , fmap (("TopicArn",) . toJSON) _sESReceiptRuleS3ActionTopicArn ] -- | Constructor for 'SESReceiptRuleS3Action' containing required fields as -- arguments. sesReceiptRuleS3Action :: Val Text -- ^ 'sesrrsaBucketName' -> SESReceiptRuleS3Action sesReceiptRuleS3Action bucketNamearg = SESReceiptRuleS3Action { _sESReceiptRuleS3ActionBucketName = bucketNamearg , _sESReceiptRuleS3ActionKmsKeyArn = Nothing , _sESReceiptRuleS3ActionObjectKeyPrefix = Nothing , _sESReceiptRuleS3ActionTopicArn = Nothing } -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-s3action.html#cfn-ses-receiptrule-s3action-bucketname sesrrsaBucketName :: Lens' SESReceiptRuleS3Action (Val Text) sesrrsaBucketName = lens _sESReceiptRuleS3ActionBucketName (\s a -> s { _sESReceiptRuleS3ActionBucketName = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-s3action.html#cfn-ses-receiptrule-s3action-kmskeyarn sesrrsaKmsKeyArn :: Lens' SESReceiptRuleS3Action (Maybe (Val Text)) sesrrsaKmsKeyArn = lens _sESReceiptRuleS3ActionKmsKeyArn (\s a -> s { _sESReceiptRuleS3ActionKmsKeyArn = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-s3action.html#cfn-ses-receiptrule-s3action-objectkeyprefix sesrrsaObjectKeyPrefix :: Lens' SESReceiptRuleS3Action (Maybe (Val Text)) sesrrsaObjectKeyPrefix = lens _sESReceiptRuleS3ActionObjectKeyPrefix (\s a -> s { _sESReceiptRuleS3ActionObjectKeyPrefix = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-receiptrule-s3action.html#cfn-ses-receiptrule-s3action-topicarn sesrrsaTopicArn :: Lens' SESReceiptRuleS3Action (Maybe (Val Text)) sesrrsaTopicArn = lens _sESReceiptRuleS3ActionTopicArn (\s a -> s { _sESReceiptRuleS3ActionTopicArn = a })
frontrowed/stratosphere
library-gen/Stratosphere/ResourceProperties/SESReceiptRuleS3Action.hs
mit
2,981
0
13
308
447
253
194
38
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedLists #-} module BoxProperties ( testBox ) where import Crypto.Saltine.Core.Box import Data.Monoid import Test.Framework.Providers.QuickCheck2 import Test.Framework import Test.QuickCheck.Property import Test.QuickCheck.Monadic import Util -- | Ciphertext can be decrypted rightInverseProp :: Keypair -> Keypair -> Nonce -> Message -> Bool rightInverseProp (Keypair sk1 pk1) (Keypair sk2 pk2) n (Message bs) = Just bs == boxOpen pk1 sk2 n (box pk2 sk1 n bs) -- | Cannot decrypt without the corrent secret key rightInverseFailureProp1 :: Keypair -> Keypair -> Nonce -> Message -> Perturb -> Bool rightInverseFailureProp1 (Keypair sk1 pk1) (Keypair sk2 pk2) n (Message bs) p = Nothing == boxOpen pk1 (perturb sk2 ([0] <> p)) n (box pk2 sk1 n bs) -- | Cannot decrypt when not sent to you rightInverseFailureProp2 :: Keypair -> Keypair -> Nonce -> Message -> Perturb -> Bool rightInverseFailureProp2 (Keypair sk1 pk1) (Keypair sk2 pk2) n (Message bs) p = Nothing == boxOpen pk1 sk2 n (box (perturb pk2 p) sk1 n bs) -- | Ciphertext cannot be decrypted (verification failure) if the -- ciphertext is perturbed rightInverseFailureProp3 :: Keypair -> Keypair -> Nonce -> Message -> Perturb -> Bool rightInverseFailureProp3 (Keypair sk1 pk1) (Keypair sk2 pk2) n (Message bs) p = Nothing == boxOpen pk1 sk2 n (perturb (box pk2 sk1 n bs) p) -- | Ciphertext cannot be decrypted with a different nonce cannotDecryptNonceProp :: Keypair -> Keypair -> Nonce -> Nonce -> Message -> Bool cannotDecryptNonceProp (Keypair sk1 pk1) (Keypair sk2 pk2) n1 n2 (Message bs) = Nothing == boxOpen pk1 sk2 n2 (box pk2 sk1 n1 bs) -- | BeforeNM creates identical secret keys when called in an -- anti-symmetric fashion. beforeNMCreateSecretKeyProp :: Test.QuickCheck.Property.Property beforeNMCreateSecretKeyProp = monadicIO . (assert =<<) . run $ do Keypair sk1 pk1 <- newKeypair Keypair sk2 pk2 <- newKeypair let ck_1for2 = beforeNM sk1 pk2 ck_2for1 = beforeNM sk2 pk1 return (ck_1for2 == ck_2for1) -- | Ciphertext can be decrypted using combined keys rightInverseAfterNMProp :: CombinedKey -> CombinedKey -> Nonce -> Message -> Bool rightInverseAfterNMProp ck_1for2 ck_2for1 n (Message bs) = Just bs == boxOpenAfterNM ck_2for1 n (boxAfterNM ck_1for2 n bs) -- | Perturbed ciphertext cannot be decrypted using combined keys rightInverseFailureAfterNMProp1 :: CombinedKey -> CombinedKey -> Nonce -> Message -> Perturb -> Bool rightInverseFailureAfterNMProp1 ck_1for2 ck_2for1 n (Message bs) p = Nothing == boxOpenAfterNM ck_2for1 n (perturb (boxAfterNM ck_1for2 n bs) p) testBox :: Test testBox = buildTest $ do kp1@(Keypair sk1 pk1) <- newKeypair kp2@(Keypair sk2 pk2) <- newKeypair let ck_1for2 = beforeNM sk1 pk2 ck_2for1 = beforeNM sk2 pk1 n1 <- newNonce n2 <- newNonce return $ testGroup "...Internal.Box" [ testGroup "Can decrypt ciphertext using..." [ testProperty "... public key/secret key" $ rightInverseProp kp1 kp2 n1 , testProperty "... combined key" $ rightInverseAfterNMProp ck_1for2 ck_2for1 n1 ], testGroup "Fail to verify ciphertext when..." [ testProperty "... not using proper secret key" $ rightInverseFailureProp1 kp1 kp2 n1, testProperty "... not actually sent to you" $ rightInverseFailureProp2 kp1 kp2 n1, testProperty "... ciphertext has been perturbed" $ rightInverseFailureProp3 kp1 kp2 n1, testProperty "... using the wrong nonce" $ cannotDecryptNonceProp kp1 kp2 n1 n2, testProperty "... using the wrong combined key" $ rightInverseFailureAfterNMProp1 ck_1for2 ck_2for1 n1 ], testGroup "(properties)" [ testProperty "beforeNM is anti-symmetric" beforeNMCreateSecretKeyProp ] ]
tel/saltine
tests/BoxProperties.hs
mit
3,846
0
14
748
1,016
516
500
69
1
-- Copyright 2015 Mitchell Kember. Subject to the MIT License. -- Project Euler: Problem 21 -- Amicable numbers module Problem21 where import Common (properDivisors) import Data.Tuple (swap) amicablePairs :: Int -> [(Int, Int)] amicablePairs limit = filter amicable pairs where pairs = map withSum [1..limit] withSum n = (n, sum (properDivisors n)) amicable p = uncurry (<) p && swap p `elem` pairs solve :: Int solve = sum . map add . amicablePairs $ 9999 where add = uncurry (+)
mk12/euler
haskell/Problem21.hs
mit
500
0
10
100
161
90
71
10
1
-- Informatics 1 - Functional Programming -- Tutorial 7 -- -- Week 9 - Due: 19/20 Nov. import LSystem import Test.QuickCheck -- Exercise 1 -- 1a. split split :: Command -> [Command] split = undefined -- 1b. join join :: [Command] -> Command join = undefined -- 1c equivalent equivalent = undefined -- 1d. testing join and split prop_split_join = undefined prop_split = undefined -- Exercise 2 -- 2a. copy copy :: Int -> Command -> Command copy = undefined -- 2b. pentagon pentagon :: Distance -> Command pentagon = undefined -- 2c. polygon polygon :: Distance -> Int -> Command polygon = undefined -- Exercise 3 -- spiral spiral :: Distance -> Int -> Distance -> Angle -> Command spiral = undefined -- Exercise 4 -- optimise optimise :: Command -> Command optimise = undefined -- L-Systems -- 5. arrowhead arrowhead :: Int -> Command arrowhead = undefined -- 6. snowflake snowflake :: Int -> Command snowflake = undefined -- 7. hilbert hilbert :: Int -> Command hilbert = undefined main :: IO () main = display (Go 30 :#: Turn 120 :#: Go 30 :#: Turn 120 :#: Go 30)
PavelClaudiuStefan/FMI
An_3_Semestru_1/ProgramareDeclarativa/Extra/Laborator/Laborator 7/lab7.hs
cc0-1.0
1,091
2
11
218
334
164
170
27
1
{-# LANGUAGE ExistentialQuantification #-} -- | -- Module : Main -- Copyright : (c) Joachim Fasting 2010 -- License : GPL-2 (see COPYING) -- -- Maintainer : [email protected] -- Stability : unstable -- Portability : not portable -- -- A FUSE filesystem for the Music Player Daemon (MPD). -- See README for more information. module Main (main) where import Control.Monad (liftM) import Control.Monad.Error (catchError) import Control.Monad.Trans (liftIO) import Control.Concurrent (forkIO) import qualified Control.Concurrent.Chan as C import qualified Control.Concurrent.MVar as C import qualified Data.ByteString.Char8 as B import Data.ByteString.Char8 (ByteString) import System.FilePath ((</>), takeBaseName, takeDirectory, splitDirectories) import qualified System.Fuse as F import System.Posix hiding (createDirectory, rename) import Prelude hiding (readFile, writeFile) import Network.MPD hiding (rename) import Network.MPD.Core (close) import qualified Network.MPD as M data Request = forall a. ReqSync (MPD a) (C.MVar (Response a)) | ReqAsync (MPD ()) | ReqDone main :: IO () main = do chan <- C.newChan mDone <- C.newEmptyMVar threadId <- forkIO (withMPD (mpdloop chan mDone) >> return ()) F.fuseMain (operations chan) F.defaultExceptionHandler C.takeMVar mDone return () mpdloop :: C.Chan Request -> C.MVar () -> MPD () mpdloop chan mDone = loop where loop = liftIO (C.readChan chan) >>= go go (ReqSync action result) = do res <- catchError (Right `fmap` action) (return . Left) liftIO (C.putMVar result res) >> loop go (ReqAsync action) = action >> loop go (ReqDone) = close >> liftIO (C.putMVar mDone ()) -- -- FUSE operations. -- operations :: C.Chan Request -> F.FuseOperations fh operations chan = F.defaultFuseOps { F.fuseGetFileStat = stat chan , F.fuseOpen = openFile chan , F.fuseRead = readFile chan , F.fuseWrite = writeFile chan , F.fuseReadDirectory = readDir chan , F.fuseOpenDirectory = openDirectory chan , F.fuseCreateDirectory = createDirectory chan , F.fuseRename = rename chan -- Dummies to make FUSE happy. , F.fuseSetFileSize = \_ _ -> return F.eOK , F.fuseSetFileTimes = \_ _ _ -> return F.eOK , F.fuseSetFileMode = \_ _ -> return F.eOK , F.fuseSetOwnerAndGroup = \_ _ _ -> return F.eOK } openDirectory :: C.Chan Request -> FilePath -> IO F.Errno openDirectory chan p = do putStrLn $ "OPEN DIRECTORY: " ++ p st <- stat chan p case st of Right st' -> case F.statEntryType st' of F.Directory -> return F.eOK _ -> return F.eNOTDIR _ -> return F.eNOENT createDirectory :: C.Chan Request -> FilePath -> FileMode -> IO F.Errno createDirectory chan p _ = do putStrLn $ "CREATE DIRECTORY: " ++ p case splitDirectories ("/" </> p) of ("/":"Playlists":plName:[]) -> either (const F.eNOENT) (const F.eOK) `fmap` fuseMPD chan (playlistAdd_ plName "") rename :: C.Chan Request -> FilePath -> FilePath -> IO F.Errno rename chan p newName = do putStrLn $ "RENAME DIRECTORY: " ++ p ++ " " ++ newName return F.eOK -- Implements the readdir(3) call. readDir :: C.Chan Request -> FilePath -> IO (Either F.Errno [(FilePath, F.FileStat)]) readDir chan p = do putStrLn $ "READ DIRECTORY: " ++ p Right `liftM` getDirectoryContents chan p -- Implements the open(3) call. openFile :: C.Chan Request -> FilePath -> OpenMode -> OpenFileFlags -> IO (Either F.Errno fh) openFile chan p _ _ = do putStrLn $ "OPEN FILE: " ++ p st <- stat chan p case st of Right st' -> case F.statEntryType st' of F.RegularFile -> return $ Right undefined _ -> return $ Left F.eNOENT _ -> return $ Left F.eNOENT -- Implements the read(3) call. -- XXX: needs cleanup readFile :: C.Chan Request -> FilePath -> fh -> ByteCount -> FileOffset ->IO (Either F.Errno ByteString) readFile chan p _ _ _ = do putStrLn $ "READ FILE " ++ p case splitDirectories ("/" </> p) of ("/":"Outputs":_:[]) -> readDeviceFile chan p ("/":"Status":_:[]) -> readStatusFile chan p ("/":"Stats":_:[]) -> readStatsFile chan p _ -> return $ Left F.eNOENT -- this should be -- handled by -- openFile? readStatusFile chan p = fuseMPD chan $ case lookup (takeBaseName p) fs of Just f -> (flip B.snoc '\n' . f) `liftM` status _ -> undefined where fs = [("state", packShow . stState) ,("volume", packShow . stVolume) ,("repeat_mode", packBool . stRepeat) ,("random_mode", packBool . stRandom) ,("playlist_version", packShow . stPlaylistVersion) ,("playlist_length", packShow . stPlaylistLength) ,("song_pos", packMaybe . stSongPos) ,("song_id", packMaybe . stSongID) ,("next_song_pos", packMaybe . stNextSongPos) ,("next_song_id", packMaybe . stNextSongID) ,("time", packShow . stTime) ,("bitrate", packShow . stBitrate) ,("crossfade", packShow . stXFadeWidth) ,("audio", packShow . stAudio) ,("updating_db", packShow . stUpdatingDb) ,("single_mode", packBool . stSingle) ,("consume_mode", packBool . stConsume) ,("error", B.pack . stError) ] readDeviceFile chan p = fuseMPD chan $ do xs <- outputs case filter ((==) (takeDeviceID p) . dOutputID) xs of [d] -> return . flip B.snoc '\n' . packBool $ dOutputEnabled d _ -> undefined -- assume openFile makes sure this will never happen readStatsFile chan p = fuseMPD chan $ case lookup (takeBaseName p) selectors of Just f -> (flip B.snoc '\n' . packShow . f) `liftM` stats _ -> undefined -- let's pretend openFile will prevent us from -- going here where selectors = [("artists", stsArtists) ,("albums", stsAlbums) ,("playtime", stsPlaytime) ,("songs", stsSongs) ,("uptime", stsUptime) ,("db_playtime", stsDbPlaytime) ,("db_update", stsDbUpdate)] -- Implements the pwrite(2) call. -- XXX: needs cleanup writeFile :: C.Chan Request -> FilePath -> fh -> ByteString -> FileOffset -> IO (Either F.Errno ByteCount) writeFile chan p _ s _ = do putStrLn $ "WRITE FILE " ++ p r <- case splitDirectories ("/" </> p) of ("/":"Outputs":_:[]) -> writeDeviceFile chan p s ("/":"Status":"state":[]) -> fuseMPD chan $ do liftIO (putStr "INPUT: " >> print s) case B.unpack s of "stop\n" -> stop "play\n" -> play Nothing "pause\n" -> toggle _ -> toggle ("/":"Status":"volume":[]) -> fuseMPD chan $ do case B.readInt (rtrim s) of Just (x, _) -> setVolume x Nothing -> return () ("/":"Status":"repeat_mode":[]) -> fuseMPD chan $ do case B.readInt (rtrim s) of Just (x, _) -> M.repeat (x /= 0) Nothing -> return () -- XXX: should toggle by default ("/":"Status":"random_mode":[]) -> fuseMPD chan $ do case B.readInt (rtrim s) of Just (x, _) -> random (x /= 0) Nothing -> return () -- XXX: should toggle by -- default ("/":"Status":"single_mode":[]) -> fuseMPD chan $ do case B.readInt (rtrim s) of Just (x, _) -> single (x /= 0) Nothing -> return () ("/":"Status":"consume_mode":[]) -> fuseMPD chan $ do case B.readInt (rtrim s) of Just (x, _) -> consume (x /= 0) Nothing -> return () _ -> return $ Left F.eNOENT return $ either Left (const $ Right . fromIntegral $ B.length s) r writeDeviceFile chan p s = fuseMPD chan $ do let setState = case B.readInt s of Just (0, _) -> disableOutput _ -> enableOutput setState (takeDeviceID p) -- Implements the stat(3) call. stat :: C.Chan Request -> FilePath -> IO (Either F.Errno F.FileStat) stat _ "/" = return $ Right directory stat chan p = do putStrLn $ "STAT DIRECTORY: " ++ p cs <- getDirectoryContents chan (takeDirectory p) case lookup (takeBaseName p) cs of Just s -> return $ Right s Nothing -> return $ Left F.eNOENT -- -- File system description. -- getDirectoryContents :: C.Chan Request -> FilePath -> IO [(FilePath, F.FileStat)] getDirectoryContents chan p = ioMPD chan $ -- NOTE: we make sure that paths begin with a slash for convenience. case splitDirectories ("/" </> p) of ("/":[]) -> return $ dots ++ [("Music", directory) ,("Outputs", directory) ,("Playlists", directory) ,("Status", directory) ,("Stats", directory)] ("/":"Music":[]) -> return dots ("/":"Outputs":[]) -> do devs <- outputs return $ dots ++ map (\x -> (deviceFileName x, mkFileStat (B.pack "0"))) devs ("/":"Playlists":[]) -> do pls <- lsPlaylists return $ dots ++ map (\x -> (x, directory)) pls ("/":"Playlists":plName:[]) -> do pls <- lsPlaylists if plName `elem` pls then do songs <- listPlaylist plName return $ dots ++ map (flip (,) regularFile) songs else fail "" ("/":"Status":[]) -> do st <- status return $ dots ++ [("state", mkFileStat (packShow $ stState st)) ,("volume", mkFileStat (packShow $ stVolume st)) ,("repeat_mode", mkFileStat (packShow $ stVolume st)) ,("random_mode", mkFileStat (packShow $ stRandom st)) ,("playlist_version", mkFileStat (packShow $ stPlaylistVersion st)) ,("playlist_length", mkFileStat (packShow $ stPlaylistLength st)) ,("song_pos", mkFileStat (packShow $ stSongPos st)) ,("song_id", mkFileStat (packShow $ stSongID st)) ,("next_song_pos", mkFileStat (packShow $ stNextSongPos st)) ,("next_song_id", mkFileStat (packShow $ stNextSongID st)) ,("time", mkFileStat (packShow $ stTime st)) ,("bitrate", mkFileStat (packShow $ stBitrate st)) ,("crossfade", mkFileStat (packShow $ stXFadeWidth st)) ,("audio", mkFileStat (packShow $ stAudio st)) ,("updating_db", mkFileStat (packShow $ stUpdatingDb st)) ,("single_mode", mkFileStat (packShow $ stSingle st)) ,("consume_mode", mkFileStat (packShow $ stConsume st)) ,("error", mkFileStat (B.pack $ stError st))] ("/":"Stats":[]) -> do sts <- stats return $ dots ++ [("artists", mkFileStat (packShow $ stsArtists sts)) ,("albums", mkFileStat (packShow $ stsAlbums sts)) ,("songs", mkFileStat (packShow $ stsSongs sts)) ,("uptime", mkFileStat (packShow $ stsUptime sts)) ,("playtime", mkFileStat (packShow $ stsPlaytime sts)) ,("db_playtime", mkFileStat (packShow $ stsDbPlaytime sts)) ,("db_update", mkFileStat (packShow $ stsDbUpdate sts))] _ -> fail "No such directory" where dots = [(".", directory), ("..", directory)] -- Given a file content as a string, produce a file stat with appropriate size -- and block information. mkFileStat s = regularFile { F.statFileSize = fromIntegral len , F.statBlocks = fromIntegral blk } where len = B.length s + 1 -- remember space for trailing newline blk = (len `div` 4096) + 1 songFileStat :: Song -> F.FileStat songFileStat sg = regularFile { F.statFileSize = fromIntegral $ sgLength sg } directory, regularFile, emptyStat :: F.FileStat directory = emptyStat { F.statEntryType = F.Directory , F.statFileSize = 4096 , F.statBlocks = 1 , F.statFileMode = foldr1 unionFileModes [ ownerReadMode, ownerExecuteMode ] } regularFile = emptyStat { F.statEntryType = F.RegularFile , F.statFileMode = foldr1 unionFileModes [ ownerReadMode, ownerWriteMode ] } emptyStat = F.FileStat { F.statEntryType = F.Unknown , F.statFileMode = ownerModes , F.statLinkCount = 0 , F.statFileOwner = 0 , F.statFileGroup = 0 , F.statSpecialDeviceID = 0 , F.statFileSize = 0 , F.statBlocks = 0 , F.statAccessTime = 0 , F.statModificationTime = 0 , F.statStatusChangeTime = 0 } -- -- Mapping MPD data structures to file system objects. -- takeDeviceID :: FilePath -> Int takeDeviceID = read . take 1 . takeBaseName deviceFileName :: Device -> FilePath deviceFileName (Device i n _) = show i ++ ":" ++ replace ' ' '_' n songFileName :: Song -> FilePath songFileName = undefined -- -- Utilities. -- -- Run an action in the MPD monad and lift the result into the FUSE -- context. fuseMPD :: C.Chan Request -> MPD a -> IO (Either F.Errno a) fuseMPD chan x = do mResult <- C.newEmptyMVar C.writeChan chan $ ReqSync x mResult either (const $ Left F.eNOENT) Right `fmap` C.takeMVar mResult -- Run an action in the MPD monad and lift the result into I/O. ioMPD :: C.Chan Request -> MPD a -> IO a ioMPD chan m = fuseMPD chan m >>= either (\(F.Errno x) -> fail (show x)) return replace :: Eq a => a -> a -> [a] -> [a] replace from to = map (\x -> if x == from then to else x) packMaybe :: Show a => Maybe a -> ByteString packMaybe (Just x) = packShow x packMaybe Nothing = B.empty packShow :: Show a => a -> ByteString packShow = B.pack . show packBool :: Bool -> ByteString packBool b = B.pack (if b then "1" else "0") rtrim :: ByteString -> ByteString rtrim = B.reverse . B.dropWhile (`elem` "\n\r\t") . B.reverse
joachifm/mpdfs
Main.hs
gpl-2.0
14,818
0
19
4,751
4,685
2,467
2,218
286
16
{-# LANGUAGE PatternGuards #-} module Latex ( latexEasy, latexOptimizedExpression ) where import Expression ( Expression(..), Exprn(..), Type, substitute, latex, k, k_var, IsTemp( CannotBeFreed ), cleanvars, mapExprn, mkExprn ) import Statement ( Statement(..), freeVectors, reuseVar ) import Optimize ( optimize, findNamedSubexpression ) latexEasy :: (Type a) => Expression a -> String latexEasy e0 = unlines $ ["\\documentclass{article}", "\\usepackage{amsmath}", "\\usepackage{color}", "\\usepackage{breqn}", "\\begin{document}"] ++ map latexme (niceExprns e0) ++ ["\\end{document}"] latexOptimizedExpression :: (Type a) => Expression a -> String latexOptimizedExpression e = unlines $ ["\\documentclass{article}", "\\usepackage{environ}", -- consider mdframed when it is in debian "\\usepackage{amsmath}", "\\usepackage{color}", "\\usepackage{breqn}", "\\begin{document}", "\\newbox{\\savetextbox}", "\\NewEnviron{bracetext}[1][\\textwidth]{%", " \\begin{lrbox}{\\savetextbox}%", " \\begin{minipage}{#1} \\BODY \\end{minipage}", " \\end{lrbox}%", " \\smallskip%", " \\noindent\\makebox[0pt][r]{$\\left\\{\\rule{0pt}{\\ht\\savetextbox}\\right.$}%", " \\usebox{\\savetextbox}\\par", " \\smallskip%", "}", ""] ++ map latexS (sts) ++ [mapExprn latexe e', "\\end{document}"] where (sts0,[e']) = optimize [mkExprn e] sts = reuseVar $ freeVectors sts0 eqn :: String -> String -- The following is a safety valve to avoid applying dmath to -- equations that are so complicated that it simply can't handle them. -- The number of characters is determined by pure experimentation, and -- may be quite inaccurate. eqn e | length e > 10000 = unlines ["\\begin{equation}", " " ++ e, "\\end{equation}"] eqn e = unlines ["\\begin{dmath}", " " ++ e, "\\end{dmath}"] latexme :: Exprn -> String latexme (ES e) = latexe e latexme (EK e) = latexe e latexme (ER e) = latexe e latexe :: Type a => Expression a -> String latexe (Var _ _ _ t (Just e')) = eqn (t ++ " = " ++ latex (simpk e')) latexe (Var _ _ _ _ Nothing) = "" latexe e = error ("oops in latexe: " ++ show e) simpk :: Type a => Expression a -> Expression a simpk = substitute (k**2) (k_var "k" **2) niceExprns :: (Type a) => Expression a -> [Exprn] niceExprns e0@(Var _ _ _ _ _) = case findNamedSubexpression e0 of Just (ES v@(Var _ b c t (Just _))) -> ES v : niceExprns (substitute v (Var CannotBeFreed b c t Nothing) e0) Just (ER v@(Var _ b c t (Just _))) -> ER v : niceExprns (substitute v (Var CannotBeFreed b c t Nothing) e0) Just (EK v@(Var _ b c t (Just _))) -> EK v : niceExprns (substitute v (Var CannotBeFreed b c t Nothing) e0) _ -> [] niceExprns _ = error "need named input in niceExprns" latexS :: Statement -> String latexS (Assign (ER x@(Var a b c t _)) (ER y)) | tds@(_:_:_) <- niceExprns (Var a b c t (Just y)) = unlines $ ["\\begin{bracetext}"] ++ map latexme tds ++ ["\\end{bracetext}"] | otherwise = eqn $ latex x ++ " = " ++ latex (simpk $ cleanvars y) latexS (Assign (EK x@(Var a b c t _)) (EK y)) | tds@(_:_:_) <- niceExprns (Var a b c t (Just y)) = unlines $ ["\\begin{bracetext}"] ++ map latexme tds ++ ["\\end{bracetext}"] | otherwise = eqn $ latex x ++ " = " ++ latex (simpk $ cleanvars y) latexS (Assign (ES x@(Var a b c t _)) (ES y)) | tds@(_:_:_) <- niceExprns (Var a b c t (Just y)) = unlines $ ["\\begin{bracetext}"] ++ map latexme tds ++ ["\\end{bracetext}"] | otherwise = eqn $ latex x ++ " = " ++ latex (simpk $ cleanvars y) latexS (Initialize (ER e)) = eqn $ "\\text{initialize real space } " ++ latex e latexS (Initialize (EK e)) = eqn $ "\\text{initialize reciprocal space } " ++ latex e latexS (Initialize (ES _)) = "" latexS (Free e) = eqn $ "\\text{free } " ++ latex e latexS st = error ("bad statement in latexS: "++ show st)
droundy/deft
src/haskell/Latex.hs
gpl-2.0
4,641
0
15
1,537
1,492
769
723
86
4
module Zepto.Primitives.RegexPrimitives where import Control.Monad.Except (throwError) import Data.ByteString.Char8 (unpack, pack) import Text.Regex.PCRE.Heavy import qualified Text.Regex.PCRE.Light.Base as R import Zepto.Types makeRegexDoc :: String makeRegexDoc = "creates a new regex from a string <par>s</par>.\n\ \n\ params:\n\ - s: the string to convert\n\ complexity: O(n)\n\ returns: a new regex" makeRegex :: LispVal -> ThrowsError LispVal makeRegex (SimpleVal (String s)) = case compileM (pack s) [] of Left msg -> throwError $ Default msg Right r -> return $ fromSimple $ Regex $ r makeRegex x = throwError $ TypeMismatch "string" x regexPatternDoc :: String regexPatternDoc = "gets the regex pattern as a string.\n\ \n\ params:\n\ - r: the regex to analyze\n\ complexity: O(n)\n\ returns: a string representing the match" regexPattern :: LispVal -> ThrowsError LispVal regexPattern (SimpleVal (Regex (R.Regex _ r))) = return $ fromSimple $ String $ unpack r regexPattern x = throwError $ TypeMismatch "regex" x regexMatchesDoc :: String regexMatchesDoc = "matches a regex against a a string.\n\ \n\ params:\n\ - r: the regex against which we match\n\ - check: the string to match\n\ complexity: heavily dependent on the input regex\n\ returns: a boolean" regexMatches :: [LispVal] -> ThrowsError LispVal regexMatches [SimpleVal (Regex r), SimpleVal (String pattern)] = return $ fromSimple $ Bool $ pattern =~ r regexMatches [SimpleVal (Regex _), x] = throwError $ TypeMismatch "string" x regexMatches [x, _] = throwError $ TypeMismatch "regex" x regexMatches x = throwError $ NumArgs 2 x regexScanDoc :: String regexScanDoc = "scans a string <par>check</par> for occurences of the regex <par>r</par>.\n\ \n\ params:\n\ - r: the regex against which we match\n\ - check: the string to scan\n\ complexity: heavily dependent on the input regex\n\ returns: a list of lists of the form <zepto>[match, [groups]]</zepto>" regexScan :: [LispVal] -> ThrowsError LispVal regexScan [SimpleVal (Regex r), SimpleVal (String pattern)] = return $ List $ map convert (scan r pattern) where convert (str, l) = List [fromSimple $ String str, List $ map (\x -> fromSimple $ String x) l] regexScan [SimpleVal (Regex _), x] = throwError $ TypeMismatch "string" x regexScan [x, _] = throwError $ TypeMismatch "regex" x regexScan x = throwError $ NumArgs 2 x regexScanODoc :: String regexScanODoc = "scans a string <par>check</par> for occurences of the regex <par>r</par>\n\ and returns a list of lists of the start and end indices.\n\ \n\ params:\n\ - r: the regex against which we match\n\ - check: the string to scan\n\ complexity: heavily dependent on the input regex\n\ returns: a list of lists of the form <zepto>[start, end]</zepto>" regexScanO :: [LispVal] -> ThrowsError LispVal regexScanO [SimpleVal (Regex r), SimpleVal (String pattern)] = return $ List $ map convert (scanRanges r pattern) where convert (range, ranges) = List [build range, List $ map build ranges] build (start, end) = List [fromSimple $ Number $ NumS start, fromSimple $ Number $ NumS end] regexScanO [SimpleVal (Regex _), x] = throwError $ TypeMismatch "string" x regexScanO [x, _] = throwError $ TypeMismatch "regex" x regexScanO x = throwError $ NumArgs 2 x regexSubDoc :: String regexSubDoc = "replaces the first occurence of the regex <par>r</par>\n\ in the string <par>input</par> with <par>pattern</par>.\n\ \n\ params:\n\ - r: the regex against which we match\n\ - pattern: the string that should be inserted\n\ - input: the string to change\n\ complexity: heavily dependent on the input regex\n\ returns: a new string" regexSub :: [LispVal] -> ThrowsError LispVal regexSub [SimpleVal (Regex r), SimpleVal (String s), SimpleVal (String pattern)] = return $ fromSimple $ String $ sub r s pattern regexSub [SimpleVal (Regex _), x, SimpleVal (String _)] = throwError $ TypeMismatch "string" x regexSub [x, _, SimpleVal (String _)] = throwError $ TypeMismatch "regex" x regexSub [_, _, x] = throwError $ TypeMismatch "string" x regexSub x = throwError $ NumArgs 3 x regexGSubDoc :: String regexGSubDoc = "replaces all occurences of the regex <par>r</par>\n\ in the string <par>input</par> with <par>pattern</par>.\n\ \n\ params:\n\ - r: the regex against which we match\n\ - pattern: the string that should be inserted\n\ - input: the string to change\n\ complexity: heavily dependent on the input regex\n\ returns: a new string" regexGSub :: [LispVal] -> ThrowsError LispVal regexGSub [SimpleVal (Regex r), SimpleVal (String s), SimpleVal (String pattern)] = return $ fromSimple $ String $ gsub r s pattern regexGSub [SimpleVal (Regex _), x, SimpleVal (String _)] = throwError $ TypeMismatch "string" x regexGSub [x, _, SimpleVal (String _)] = throwError $ TypeMismatch "regex" x regexGSub [_, _, x] = throwError $ TypeMismatch "string" x regexGSub x = throwError $ NumArgs 3 x regexSplitDoc :: String regexSplitDoc = "splits the string input on matches of the regex <par>r</par>.\n\ \n\ params:\n\ - r: the regex against which we match\n\ - input: the string to splite\n\ complexity: heavily dependent on the input regex\n\ returns: a new string" regexSplit :: [LispVal] -> ThrowsError LispVal regexSplit [SimpleVal (Regex r), SimpleVal (String s)] = return $ List $ map (fromSimple . String) $ split r s regexSplit [SimpleVal (Regex _), x] = throwError $ TypeMismatch "string" x regexSplit [x, _] = throwError $ TypeMismatch "regex" x regexSplit x = throwError $ NumArgs 2 x
zepto-lang/zepto
src/Zepto/Primitives/RegexPrimitives.hs
gpl-2.0
5,734
158
14
1,121
2,271
1,163
1,108
-1
-1
{-# LANGUAGE RankNTypes, OverloadedStrings #-} module Game.Boggle.Bot ( boggleBot ,sanitize ) where import Control.Applicative import Control.Lens import Control.Monad import Control.Monad.Except import Control.Monad.State import qualified Data.ByteString.Lazy as BSL import qualified Data.ByteString as BS import Data.Foldable hiding (forM_, mapM_) import Data.Function import Data.Maybe import Data.Monoid import Data.List as L import Data.String import Data.FixFile import qualified Data.FixFile.Trie.Light as T import System.Random import Game.Boggle import Network.IRC import Network.IRC.Bot chunkWords :: [BSL.ByteString] -> [[BSL.ByteString]] chunkWords = ($ []) . chunk' 0 id where chunk' 0 _ [] = id chunk' _ f [] = (f []:) chunk' c f l@(x:xs) | c > 100 = (f []:) . chunk' 0 id l | otherwise = chunk' (c + BSL.length x) (f . (x:)) xs cap :: BS.ByteString -> BS.ByteString cap = BS.map capW8 where capW8 c | c >= 97 && c <= 122 = c - 32 | otherwise = c capPM :: Message -> Message capPM (PrivMsg usr ch msg) = PrivMsg usr ch (cap msg) capPM x = x sanitize :: BSL.ByteString -> Maybe BSL.ByteString sanitize bs = sanitized where sanitized = do guard (BSL.length bs >= 3) BSL.pack <$> traverse san (BSL.unpack bs) san x | x >= 65 && x <= 90 = return x | x >= 97 && x <= 122 = return (x - 32) | otherwise = Nothing boggleBot :: Channel -> StdGen -> IRCBot (Ref Trie) () boggleBot ch g = do handleChannel ch $ flip evalStateT (20 :: Int, (g, Nothing)) $ forever $ do msg <- lift $ fmap capPM <$> readIn' r <- zoom _2 $ gameRunning warned <- zoom _2 $ hasWarned case (msg, r, warned) of (IRCMessage (PrivMsg _ _ "BOGGLE TIME"), False, _) -> do _1 .= 20 void $ zoom _2 $ (lift (readQuery getFull) >>= newGame) lift $ channelTimeout ch 120000000 Just (b, ws, _, _) <- use (_2._2) let ms = getSum $ foldMap (Sum . wordValue) ws lift $ do writeOut "It's Boggle Time!" writeOut ("Maximum Score " <> fromString (show ms)) forM_ (boardLines b) $ \l -> do writeOut (BSL.toStrict l) (IRCMessage (PrivMsg _ _ "HELP"), False, _) -> lift $ do writeOut "BoggleBot Commands:" writeOut "\"boggle time\" - starts a game of boggle" writeOut "\"!board\" - displays the current board" writeOut "\"help\" - displays this help message." writeOut "\"lookup $WORD\" - lookup a word in the dictionary." writeOut "\"insert $WORD\" - insert a word into the dictionary." writeOut "\"delete $WORD\" - delete a word from the dictionary." (IRCMessage (PrivMsg _ _ cmd), False, _) -> lift $ do let cmds = BSL.split 32 $ BSL.fromStrict cmd case catMaybes $ fmap sanitize cmds of "LOOKUP":ws -> forM_ ws $ \w -> do unless (BSL.null w) $ do lu <- readQuery (T.lookupTrieT (BSL.reverse w)) if isJust lu then writeOut . BSL.toStrict $ ("\"" <> w <> "\" is a word") else writeOut . BSL.toStrict $ ("\"" <> w <> "\" is not a word") "INSERT":ws -> forM_ ws $ \w -> do unless (BSL.null w) $ do ins <- writeExceptQuery $ do let w' = BSL.reverse w lu <- lift $ T.lookupTrieT w' if isJust lu then throwError () else lift $ T.insertTrieT w' () case ins of Left _ -> writeOut . BSL.toStrict $ ("\"" <> w <> "\" was already present") Right _ -> writeOut . BSL.toStrict $ ("\"" <> w <> "\" has been added") "DELETE":ws -> forM_ ws $ \w -> do unless (BSL.null w) $ do del <- writeExceptQuery $ do let w' = BSL.reverse w lu <- lift $ T.lookupTrieT w' if isJust lu then lift $ T.deleteTrieT w' else throwError () case del of Left _ -> writeOut . BSL.toStrict $ ("\"" <> w <> "\" was not present") Right _ -> writeOut . BSL.toStrict $ ("\"" <> w <> "\" has been deleted") _ -> return () (_, False, _) -> return () (Timeout _, _, True) -> do Just (scores, missed) <- zoom _2 $ endGame lift $ writeOut "Time's up!" let scores' = L.reverse $ sortBy (compare `on` (^._3)) scores lift $ forM_ scores' $ \(p, ws, s) -> do writeOut . BSL.toStrict $ (p <> " : " <> fromString (show s)) forM_ (chunkWords ws) $ \ws' -> writeOut . BSL.toStrict $ (p <> " : " <> BSL.intercalate ", " ws') lift $ forM_ (chunkWords missed) $ \miss -> do writeOut . BSL.toStrict $ ("Missed Words: " <> BSL.intercalate ", " miss) (Timeout _, _, False) -> do zoom _2 $ warn lift $ writeOut "One minute remaining!" lift $ channelTimeout ch 60000000 (IRCMessage (PrivMsg (User usr _) _ str), _, _) -> do _1 -= 1 lc <- use _1 when (lc == 0) $ do _1 .= 20 Just (b, _, _, _) <- use (_2._2) lift $ forM_ (boardLines b) $ \l -> do writeOut (BSL.toStrict l) let bs = catMaybes . fmap sanitize . BSL.split 32 $ str' str' = BSL.fromStrict str usr' = BSL.fromStrict usr zoom _2 $ mapM_ (scoreWord usr') bs when (str == "!BOARD") $ do Just (b, _, _, _) <- use (_2._2) lift $ forM_ (boardLines b) $ \l -> do writeOut (BSL.toStrict l) _ -> return ()
revnull/bogglebot
src/Game/Boggle/Bot.hs
gpl-3.0
6,897
0
34
3,106
2,162
1,070
1,092
145
16
{- | Module : Constant Description : Constant values for the SonyGPSAssist program. Copyright : (c) Frédéric BISSON, 2015 License : GPL-3 Maintainer : [email protected] Stability : experimental Portability : POSIX -} module Constant where import Network.Curl (URLString) {- | The URL pointing the Sony GPS data file. -} gpsDataUrl :: URLString gpsDataUrl = "http://control.d-imaging.sony.co.jp/GPS/assistme.dat" {- | The URL pointing the MD5 corresponding to the Sony GPS data file. -} gpsMD5Url :: URLString gpsMD5Url = "http://control.d-imaging.sony.co.jp/GPS/assistme.md5" {- | Base directory allowing to identify a Sony storage device. -} sonyBaseDir :: FilePath sonyBaseDir = "PRIVATE/SONY" {- | GPS directory where GPS data should be stored. -} sonyGPSDir :: FilePath sonyGPSDir = "PRIVATE/SONY/GPS" {- | GPS data file name. -} sonyGPSName :: FilePath sonyGPSName = "assistme.dat"
Zigazou/SonyGPSAssistHS
src/Constant.hs
gpl-3.0
907
0
5
144
70
45
25
12
1
module Cashflow.Parser ( P.ParseError ,D.Entries ,parseFile ,parse ) where import qualified Text.Parsec as P import qualified Text.Parsec.String as PS import Control.Applicative import Data.Monoid import qualified Cashflow.Entry as D ws :: PS.Parser String ws = many (P.oneOf " \t,") newLine :: PS.Parser String newLine = P.many1 (P.oneOf "\r\n") lexeme :: PS.Parser a -> PS.Parser a lexeme p = p <* ws emptyLines :: PS.Parser [String] emptyLines = many ((P.many1 $ P.oneOf " \r\n") <|> comment) parseLine :: PS.Parser a -> PS.Parser a parseLine p = lexeme p <* emptyLines int :: PS.Parser Int int = read <$> P.many1 P.digit month :: PS.Parser D.Month month = fmap read . foldr1 (<|>) $ map (P.try . P.string . show) [D.Jan ..] tentative :: PS.Parser Bool tentative = P.option False $ P.char '~' *> pure True tentativeMonth :: PS.Parser (Bool, D.Month) tentativeMonth = P.option (True, D.Dec) $ (,) <$> tentative <*> month string :: PS.Parser String string = many $ P.noneOf "\n\r\t:#[]()" description :: PS.Parser String description = ws *> string <* P.char ':' entry :: PS.Parser D.Entry entry = D.Entry <$> lexeme description <*> lexeme int collapse :: (Functor f, Monoid a) => f [a] -> f a collapse = fmap mconcat parseWithHeading :: PS.Parser a -> (a -> PS.Parser b) -> PS.Parser [b] parseWithHeading heading values = do h <- (parseLine heading) many $ parseLine $ values h sectionHeading :: String -> PS.Parser String sectionHeading name = P.try $ P.between (P.char '[') (P.char ']') (P.string name) parseSection :: (Monoid a) => String -> PS.Parser a -> PS.Parser a parseSection name = collapse . parseWithHeading (sectionHeading name) . const groupHeading :: PS.Parser String groupHeading = P.between (P.char '(') (P.char ')') string parseGroup :: (Monoid a) => (String -> PS.Parser a) -> PS.Parser a parseGroup = collapse . (parseWithHeading groupHeading) monthlyExpense :: PS.Parser D.Entries monthlyExpense = D.fromMonthlyExpense . D.MonthlyExpense <$> entry income :: PS.Parser D.Entries income = D.fromIncome . D.Income <$> entry asset :: PS.Parser D.Entries asset = D.fromAsset . D.Asset <$> entry projection :: PS.Parser D.Entries projection = D.fromProjection <$> (D.Projection <$> entry <*> lexeme month) expense :: PS.Parser D.Entries expense = D.fromExpense <$> (exp <$> entry <*> tentativeMonth) where exp = \e (t, m) -> D.Expense e m t debt :: String -> PS.Parser D.Entries debt creditor = D.fromDebt <$> (D.Debt <$> entry <*> pure creditor <*> lexeme month <*> lexeme int) monthlyExpenses :: PS.Parser D.Entries monthlyExpenses = parseSection "monthly expenses" monthlyExpense incomes :: PS.Parser D.Entries incomes = parseSection "income" income assets :: PS.Parser D.Entries assets = parseSection "assets" asset projections :: PS.Parser D.Entries projections = parseSection "projections" projection expenses :: PS.Parser D.Entries expenses = parseSection "expenses" expense debtGroup :: PS.Parser D.Entries debtGroup = parseGroup debt debts :: PS.Parser D.Entries debts = parseSection "debt" debtGroup comment :: PS.Parser String comment = P.char '#' *> many (P.noneOf "\n\r") <* newLine file :: PS.Parser D.Entries file = emptyLines *> (collapse $ many section) <* P.eof where section = expenses <|> monthlyExpenses <|> incomes <|> debts <|> assets <|> projections parseFile :: String -> IO (Either P.ParseError D.Entries) parseFile = PS.parseFromFile file parse :: String -> Either P.ParseError D.Entries parse s = P.parse file "" s
StarvingMarvin/cashflow
src/Cashflow/Parser.hs
gpl-3.0
3,668
0
11
730
1,374
702
672
89
1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE UnicodeSyntax #-} {-# OPTIONS_HADDOCK show-extensions #-} -- | -- Module : Yi.Mode.Agda -- License : GPL-3 -- Copyright : © Mateusz Kowalczyk, 2014 -- Maintainer : [email protected] -- Stability : experimental -- -- Agda mode for Yi module Yi.Mode.Agda where import Control.Applicative import Control.Concurrent import Control.Lens hiding (act) import Control.Monad.Base import Control.Monad.State import Data.Attoparsec.Text import Data.Binary import Data.Bits import Data.Default import Data.Either import Data.IORef import Data.Monoid import qualified Data.Text as Tx import qualified Data.Text.Encoding as TxE import qualified Data.Text.IO as TxI import Data.Typeable import Prelude hiding (takeWhile, drop) import System.Directory import System.Exit (ExitCode(..)) import System.IO import System.Process import Yi hiding (char) import Yi.Config.Simple.Types import Yi.Keymap.Emacs.KillRing import Yi.Lexer.Alex import Yi.Modes import qualified Yi.Rope as R import Yi.String import Yi.Types (YiVariable, YiConfigVariable) data AgdaStyle = AgdaStyle { _agdaKeyword ∷ StyleName , _agdaSymbol ∷ StyleName , _agdaPrimitiveType ∷ StyleName , _agdaModule ∷ StyleName , _agdaFunction ∷ StyleName , _agdaDatatype ∷ StyleName , _agdaInductiveConstructor ∷ StyleName , _agdaBound ∷ StyleName , _agdaUnsolvedMeta ∷ StyleName , _agdaPostulate ∷ StyleName , _agdaTerminationProblem ∷ StyleName , _agdaError ∷ StyleName , _agdaString ∷ StyleName } deriving (Typeable) -- | Convenience function rgb :: Word32 -> Color rgb x = RGB (fi (x `shiftR` 16)) (fi (x `shiftR` 8)) (fi x) where fi = fromIntegral fgColour ∷ Word32 → b → Style fgColour x = const $ withFg (rgb x) bgColour ∷ Word32 → b → Style bgColour x = const $ withBg (rgb x) -- | Agda defaults are used except in places where there's already a -- matching attribute in 'UIStyle'. instance Default AgdaStyle where def = AgdaStyle { _agdaKeyword = keywordStyle , _agdaSymbol = fgColour 0x404040 -- gray75 , _agdaPrimitiveType = builtinStyle , _agdaModule = fgColour 0xa020f0 -- purple , _agdaFunction = fgColour 0x0000cd -- medium blue , _agdaDatatype = typeStyle , _agdaInductiveConstructor = fgColour 0x008b00 -- green4 , _agdaBound = defaultStyle , _agdaUnsolvedMeta = -- yellow bg, black fg bgColour 0xffff00 <> fgColour 0x000000 , _agdaPostulate = fgColour 0x0000cd -- medium blue , _agdaTerminationProblem = -- light salmon bg, black fg bgColour 0xffa07a <> fgColour 0x000000 , _agdaError = fgColour 0xff0000 -- red , _agdaString = fgColour 0xb22222 -- firebrick } instance YiConfigVariable AgdaStyle data GoalInfo = GoalInfo { _goalIndex ∷ !Int , _goalType ∷ Tx.Text } deriving (Show, Eq, Typeable) getText ∷ Get Tx.Text getText = TxE.decodeUtf8 <$> Data.Binary.get putText ∷ Tx.Text → Put putText = Data.Binary.put . TxE.encodeUtf8 instance Binary GoalInfo where put (GoalInfo i gt) = Data.Binary.put i *> putText gt get = GoalInfo <$> Data.Binary.get <*> getText data AgdaState = AgdaState { _agdaGoals ∷ [GoalInfo] } deriving (Show, Eq, Typeable) instance Default AgdaState where def = AgdaState { _agdaGoals = mempty } instance Binary AgdaState where put (AgdaState gs) = Data.Binary.put gs get = AgdaState <$> Data.Binary.get instance YiVariable AgdaState agdaStyle ∷ Field AgdaStyle agdaStyle = customVariable styleIdentifier ∷ AgdaStyle → IdentifierInfo → StyleName styleIdentifier as Keyword = _agdaKeyword as styleIdentifier as Symbol = _agdaSymbol as styleIdentifier as PrimitiveType = _agdaPrimitiveType as styleIdentifier as (Module _ _) = _agdaModule as styleIdentifier as (Function _ _) = _agdaFunction as styleIdentifier as (Datatype _ _) = _agdaDatatype as styleIdentifier as (InductiveConstructor _) = _agdaInductiveConstructor as styleIdentifier as (Bound _ _) = _agdaBound as styleIdentifier as (Postulate _ _) = _agdaPostulate as styleIdentifier as (TerminationProblem _ _) = _agdaTerminationProblem as styleIdentifier as UnsolvedMeta = _agdaUnsolvedMeta as styleIdentifier as (ErrorI _ _ _ _) = _agdaError as styleIdentifier as StringI = _agdaString as styleIdentifier _ (IdentifierOther _) = defaultStyle sl ∷ AgdaStyle → StyleLexerASI () IdentifierInfo sl st = StyleLexer { _tokenToStyle = styleIdentifier st , _styleLexer = commonLexer (const Nothing) () } agdaMode ∷ TokenBasedMode IdentifierInfo agdaMode = mkAgdaMode $ sl def -- | Re-make the Agda mode: this allows us to cheat and when we want -- to slide in a new lexer, we remake the whole thing. mkAgdaMode ∷ Show (l s) ⇒ StyleLexer l s t i -> TokenBasedMode t mkAgdaMode x = styleMode x & modeNameA .~ "agda" & modeAppliesA .~ anyExtension [ "lagda", "agda" ] & modeToggleCommentSelectionA .~ Just (toggleCommentB "--") loadCurrentBuffer ∷ YiM () loadCurrentBuffer = withCurrentBuffer (gets file) >>= \case Nothing → printMsg "Current buffer is not associated with a file." Just fp → do b ← withCurrentBuffer $ gets id d ← getAgda >> getEditorDyn fwriteBufferE (bkey b) -- save before loading in the file sendAgda' (runCommands b) . loadCmd fp $ _agdaIncludeDirs d splitCase ∷ YiM () splitCase = withCurrentBuffer (gets file) >>= \case Nothing → printMsg "Current buffer is not associated with a file." Just fp → do (Point p, c, l, cr) ← withCurrentBuffer $ (,,,) <$> pointB <*> curCol <*> curLn <*> readB b ← withCurrentBuffer $ gets id sendAgda' (runCommands b) $ caseCmd fp 0 p l c (p + 1) l (c + 1) [cr] -- | Given commands, actually execute them in the editor runCommands ∷ FBuffer → [Command] → YiM () runCommands _ [] = return () runCommands b (Info _ m _:cs) = printMsg m >> runCommands b cs runCommands b (Status m:cs) | Tx.null m = runCommands b cs | otherwise = printMsg m >> runCommands b cs runCommands _ (ErrorGoto _ fp pnt:_) = -- TODO: It seems we should delay this until we processed all other -- commands to: don't want to miss out on highlighting because a -- goto came first. openingNewFile fp $ moveTo (Point pnt) -- Insert replacements at point runCommands b (MakeCase ls:cs) = do withGivenBuffer (bkey b) . savingPointB $ do pointB >>= solPointB >>= moveTo killRestOfLine insertN (R.fromText $ Tx.unlines ls) runCommands b cs runCommands b (HighlightClear:cs) = do withGivenBuffer (bkey b) $ delOverlayLayerB UserLayer runCommands b cs runCommands b a@(Identifiers _:_) = do let allIds = concat [ i | Identifiers i ← a ] rest = filter (\case { Identifiers _ → False; _ → True }) a overlays ← withEditor $ mapM makeOverlay allIds withGivenBuffer (bkey b) $ mapM_ addOverlayB overlays runCommands b rest runCommands b (ParseFailure t:cs) = printMsg t >> runCommands b cs runCommands b (HighlightLoadAndDelete fp:cs) = printMsg ("Somehow didn't read in " <> Tx.pack fp) >> runCommands b cs runCommands b (GoalAction _:cs) = runCommands b cs -- | Turns identifiers Agda tells us about to colour overlays. -- -- Uses current 'agdaStyle'. makeOverlay ∷ Identifier → EditorM Overlay makeOverlay (Identifier r i _) = agdaStyle `views` \st → mkOverlay UserLayer r (styleIdentifier st i) -- | Buffer used for process communication between Yi and Agda. To me -- it seems like the interface isn't flexible enough. newtype AgdaBuffer = AgdaBuffer { _agdaBuffer ∷ Maybe BufferRef } deriving (Show, Eq, Typeable) instance Default AgdaBuffer where def = AgdaBuffer Nothing -- | Path to the Agda binary. newtype AgdaPath = AgdaPath { _agdaPath ∷ FilePath } deriving (Show, Eq, Typeable) instance Default AgdaPath where def = AgdaPath "agda" -- | Directories to pass to Agda commands, pointing at libraries needed. newtype AgdaIncludeDirs = AgdaIncludeDirs { _agdaIncludeDirs ∷ [FilePath] } deriving (Show, Eq, Typeable) -- | By default, we point to the the "." directory. instance Default AgdaIncludeDirs where def = AgdaIncludeDirs ["."] -- | Extra flags to pass to Agda when it starts. "--interaction" is -- always used regardless and passed as the last element. newtype AgdaExtraFlags = AgdaExtraFlags { _agdaExtraFlags ∷ [String] } deriving (Show, Eq, Typeable) -- | By default no extra flags are passed in instance Default AgdaExtraFlags where def = AgdaExtraFlags [] deriving instance Binary AgdaBuffer deriving instance Binary AgdaPath deriving instance Binary AgdaIncludeDirs deriving instance Binary AgdaExtraFlags instance YiVariable AgdaBuffer instance YiVariable AgdaPath instance YiVariable AgdaIncludeDirs instance YiVariable AgdaExtraFlags startAgda ∷ YiM BufferRef startAgda = getEditorDyn >>= \case AgdaBuffer (Just b) → printMsg "Agda already started" >> return b AgdaBuffer Nothing → do AgdaPath binaryPath ← getEditorDyn AgdaExtraFlags extraFlags ← getEditorDyn cb ← withCurrentBuffer $ gets bkey b ← startSubprocess binaryPath (extraFlags <> ["--interaction"]) handleExit -- Switch back to the buffer we were in, startSubprocess really -- shouldn't switch for us. withEditor $ switchToBufferE cb putEditorDyn (AgdaBuffer $ Just b) >> return b where handleExit (Right ExitSuccess) = printMsg "Agda closed gracefully" handleExit (Right (ExitFailure x)) = printMsg $ "Agda quit with: " <> showT x handleExit (Left e) = printMsg $ "Exception in the Agda process: " <> showT e getAgda ∷ YiM BufferRef getAgda = getEditorDyn >>= \case AgdaBuffer (Just b) → return b AgdaBuffer Nothing → startAgda sendAgda ∷ IOTCM → YiM () sendAgda = sendAgda' (const $ return ()) sendAgda' ∷ ([Command] → YiM a) → IOTCM → YiM () sendAgda' f = sendAgdaRaw f . serialise -- | This monster sends a command to Agda, waits until a new prompt is -- spotted, parses everything in between and runs user-supplied -- function on the results. sendAgdaRaw ∷ ([Command] → YiM a) → String → YiM () sendAgdaRaw f s = getEditorDyn >>= \case AgdaBuffer Nothing → printMsg "Agda is not running." AgdaBuffer (Just b) → do let endIsPrompt ∷ BufferM (Maybe Int) endIsPrompt = do Point i ← sizeB end ← betweenB (Point $ i - Tx.length promptTxt) (Point i) return $ if R.toText end == promptTxt then Just i else Nothing -- Try set number of times before announcing Agda is not ready -- with a small wait between them, currently 10ms. ready ∷ Int → YiM (Either Tx.Text Int) ready n = withGivenBuffer b endIsPrompt >>= \case Nothing | n > 0 → liftBase (threadDelay 20000) >> ready (n - 1) | otherwise → return $ Left "Agda is not ready, not sending command" Just i → return $ Right i liftBase . putStrLn $ "Sending: " <> s t ← ready 10 case t of Left m → printMsg m Right bfs → do ior ← liftBase $ newIORef True sendToProcess b (s <> "\n") let terminate = do c ← readIORef ior if c then threadDelay 10000 >> return True else return False wb ∷ MonadEditor m ⇒ BufferM a → m a wb = withGivenBuffer b loop ∷ YiM () loop = wb endIsPrompt >>= \case Nothing → return () Just nbfs → when (nbfs /= bfs) $ do liftBase $ writeIORef ior False s' ← R.lines <$> wb (betweenB (Point bfs) (Point nbfs)) liftBase $ putStrLn (R.toString $ R.unlines s') cs ← fmap rights . liftBase $ mapM (parseCommand . R.toText) s' void $ f cs void $ forkAction terminate MustRefresh loop testFile ∷ FilePath testFile = "/tmp/DTPiA.agda" agdaPath ∷ FilePath agdaPath = "/run/current-system/sw/bin/agda" data Agda = Agda { _stdIn ∷ Handle , _stdOut ∷ Handle , _stdErr ∷ Handle , _procHandle ∷ ProcessHandle , _threads ∷ [ThreadId] } type Message = Tx.Text type Line = Int type Col = Int data InfoType = TypeChecking | Error | Goals | CurrentGoal | Other Tx.Text deriving (Show, Eq) data IdentifierInfo = Keyword | Symbol | PrimitiveType | Module FilePath Int | Function FilePath Int | Datatype FilePath Int | InductiveConstructor (Maybe (FilePath, Int)) | Bound FilePath Int | IdentifierOther Tx.Text | Postulate FilePath Int | TerminationProblem FilePath Int | UnsolvedMeta | ErrorI FilePath Line Col Tx.Text | StringI deriving (Show, Eq) data Identifier = Identifier Region IdentifierInfo (Maybe FilePath) deriving (Show) data Command = Info InfoType Message Bool | HighlightClear | HighlightLoadAndDelete FilePath | Identifiers [Identifier] | Status Tx.Text | ErrorGoto Line FilePath Col | MakeCase [Tx.Text] | ParseFailure Tx.Text | GoalAction [Int] deriving (Show) identifiers ∷ Parser [Identifier] identifiers = parens $ identifier `sepBy` char ' ' err ∷ Parser a err = takeText >>= error . Tx.unpack err' ∷ a → Parser b err' = const err identifier ∷ Parser Identifier identifier = parens $ do (s, (m, fp)) ← (,) <$> spn <~> idt return $ Identifier s m fp where mfp = Nothing <$ "nil" <|> Just . Tx.unpack <$> str withLoc c s = do (_,fp,(fp',d)) ← (,,) <$> parens s <~> mfp <~> pair (Tx.unpack <$> str) decimal return (c fp' d,fp) errP = do s ← parens "error" *> char ' ' *> str let p ∷ Parser (FilePath, Int, Int, Tx.Text) p = do fp ← takeWhile (/= ':') <* char ':' (ln, cl) ← (,) <$> (decimal <* char ',') <*> decimal -- Stuff is escaped (notably newline) because ELisp. i ← char '-' *> (decimal ∷ Parser Int) *> "\\n" *> takeText return (Tx.unpack fp, ln, cl, i) case parseOnly p s of Left s' → fail s' Right (fp, ln, cl, i) → return $ ErrorI fp ln cl i inductive = do (_,fp,p) ← (,,) <$> parens "inductiveconstructor" <~> mfp <*> optional (char ' ' *> pair (Tx.unpack <$> str) decimal) return $ (InductiveConstructor p, fp) idt = (,) <$> parens (Keyword <$ "keyword") <~> mfp <|> (,) <$> parens (Symbol <$ "symbol") <~> mfp <|> (,) <$> parens (PrimitiveType <$ "primitivetype") <~> mfp <|> (,) <$> parens (UnsolvedMeta <$ "unsolvedmeta") <~> mfp <|> (,) <$> parens (StringI <$ "string") <~> mfp <|> withLoc Module "module" <|> withLoc Function "function" <|> withLoc Datatype "datatype" <|> inductive <|> withLoc Bound "bound" <|> withLoc Postulate "postulate" <|> withLoc TerminationProblem "terminationproblem function" <|> (,) <$> errP <*> return Nothing <|> (,) <$> parens (IdentifierOther <$> takeWhile (/= ')')) <~> mfp -- Emacs counts columns with different indexing so we need to -- compensate here spn = mkRegion <$> (Point . pred <$> decimal) <~> (Point . pred <$> decimal) skippingPrompt ∷ Parser () skippingPrompt = void $ optional prompt promptTxt ∷ Tx.Text promptTxt = "Agda2> " prompt ∷ Parser Tx.Text prompt = string promptTxt <* skipSpace between ∷ Parser a → Parser b → Parser b between p = delim p p delim ∷ Parser a → Parser b → Parser c → Parser c delim p p' p'' = p *> p'' <* p' -- | Parses out stuff between double quotes. Accounts for quotes -- escaped with a backslash. str ∷ Parser Tx.Text str = do let p = "\\\"" <|> Tx.pack . return <$> notChar '"' char '"' *> (mconcat <$> many p) <* char '"' bool ∷ Parser Bool bool = False <$ "nil" <|> True <$ "t" parens ∷ Parser a → Parser a parens = delim (char '(') (char ')') quoted ∷ Parser a → Parser a quoted p = char '\'' *> p -- | Parser for an ELisp pair (naive). pair ∷ Parser a → Parser b → Parser (a, b) pair p p' = parens $ (,) <$> (p <* " . ") <*> p' skipSexpr ∷ Parser () skipSexpr = void . parens $ takeWhile (/= ')') infixl 4 <~> -- | Like '(<*>)' but with a single spaces between the parsers, useful -- for parsing out ‘words’. (<~>) ∷ Parser (a → b) → Parser a → Parser b p <~> p' = p <* char ' ' <*> p' commands ∷ Parser [Command] commands = command `sepBy` endOfLine failRest ∷ Parser a failRest = takeText >>= fail . Tx.unpack command ∷ Parser Command command = skippingPrompt *> cmds <|> parseFailure where cmds = parens (info <|> hlClear <|> hlLoadAndDelete <|> status) <|> mkCase <|> errorGoto <|> mkGoalAct parseFailure = ParseFailure <$> takeText info = Info <$> ("agda2-info-action " *> infoBfr) <~> str <~> bool infoBfr = between (char '"') $ TypeChecking <$ "*Type-checking*" <|> Error <$ "*Error*" <|> Goals <$ "*All Goals*" <|> CurrentGoal <$ "*Current Goal*" <|> Other <$> takeWhile (/= '"') hlClear = HighlightClear <$ "agda2-highlight-clear" hlLoadAndDelete = HighlightLoadAndDelete . Tx.unpack <$> ("agda2-highlight-load-and-delete-action " *> str) status = Status <$> ("agda2-status-action " *> str) mkGoalAct = let p = parens $ "agda2-goals-action " *> quoted (parens $ decimal `sepBy` char ' ') in GoalAction . snd <$> pair skipSexpr p mkCase = let p = parens $ "agda2-make-case-action " *> quoted (parens $ str `sepBy` char ' ') in MakeCase . snd <$> pair skipSexpr p errorGoto = do let ln = snd <$> pair (takeWhile (/= ' ')) decimal gt = parens $ "agda2-goto " *> quoted (pair str decimal) (l, (f, pnt)) ← pair ln gt return $ ErrorGoto l (Tx.unpack f) pnt instance Show Agda where show (Agda i o e _ ts) = "Agda " ++ unwords [show i, show o, show e, show ts] killAgda ∷ Agda → IO ExitCode killAgda (Agda _ _ _ ph ts) = mapM_ killThread ts >> terminateProcess ph >> waitForProcess ph runAgda ∷ IO Agda runAgda = runInteractiveProcess agdaPath ["--interaction"] Nothing Nothing >>= \case (sin', sout, serr, ph) → do hSetBuffering sin' NoBuffering return $ Agda sin' sout serr ph mempty parseCommand ∷ Tx.Text → IO (Either String Command) parseCommand = return . parseOnly (command <|> failRest) >=> \case Right (HighlightLoadAndDelete fp) → do fc ← TxI.readFile fp let r = parseOnly (Identifiers <$> identifiers) fc case r of Left _ → putStrLn $ Tx.unpack fc _ → return () removeFile fp >> return r x → return x threaded ∷ (Agda → IO ()) → Agda → IO Agda threaded f a = do forkIO (f a) >>= \t → return $ a { _threads = t : _threads a } parser ∷ Agda → IO () parser ag = forever $ do l ← hGetContents (_stdOut ag) mapM_ (parseCommand . Tx.pack >=> print) (lines l) test ∷ IO () test = do ag ← runAgda >>= threaded parser send ag $ loadCmd testFile [] send ag $ goalTypeCmd testFile 0 send ag $ caseCmd testFile 0 109 10 2 110 10 3 "x" threadDelay 500000 void $ killAgda ag data Activity = Interactive | NonInteractive deriving (Show, Eq) data Way = Indirect | Direct deriving (Show, Eq) data Complexity = Simplified deriving (Show, Eq) data IOTCM = IOTCM FilePath Activity Way Cmd deriving (Show, Eq) send ∷ Agda → IOTCM → IO () send a = sendRaw a . serialise sendRaw ∷ Agda → String → IO () sendRaw a = hPutStrLn (_stdIn a) data Cmd where Cmd_load ∷ FilePath → [FilePath] → Cmd Cmd_goal_type ∷ Complexity → Int → Cmd Cmd_make_case ∷ FilePath → Int → Int → Int → Int → Int → Int → Int → String → Cmd deriving (Show, Eq) serialise ∷ IOTCM → String serialise fc@(IOTCM fp' act w c) = case c of Cmd_load _ _ → show fc Cmd_goal_type cmp i → unwords ["IOTCM", show fp', show act, show w , "(Cmd_goal_type", show cmp, show i , "noRange", "\"\")" ] Cmd_make_case fp gi sch sr sc ech er ec cnt → unwords [ "IOTCM", show fp, show act, show w, "(Cmd_make_case", show gi , "(Range [Interval (Pn (Just (mkAbsolute", show fp <> "))" , show sch, show sr, show sc <> ")(Pn (Just (mkAbsolute" , show fp <> "))", show ech, show er, show ec <> ")])" , show cnt <> ")" ] caseCmd ∷ FilePath → Int -- ^ Index of the goal → Int -- ^ Start char index → Int -- ^ Start Row → Int -- ^ Start column → Int -- ^ End char index → Int -- ^ End row → Int -- ^ End column → String -- ^ Terms you want to split → IOTCM caseCmd fp gi sch sr sc ech er ec cnt = IOTCM fp NonInteractive Indirect (Cmd_make_case fp gi sch sr sc ech er ec cnt) loadCmd ∷ FilePath → [FilePath] → IOTCM loadCmd fp fps = IOTCM fp NonInteractive Indirect (Cmd_load fp fps) goalTypeCmd ∷ FilePath → Int -- ^ Goal index → IOTCM goalTypeCmd fp i = IOTCM fp NonInteractive Indirect (Cmd_goal_type Simplified i)
Fuuzetsu/yi-agda
src/Yi/Mode/Agda.hs
gpl-3.0
22,611
0
36
6,155
6,704
3,447
3,257
-1
-1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Cloudbuild.Projects.Locations.Triggers.List -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Lists existing \`BuildTrigger\`s. This API is experimental. -- -- /See:/ <https://cloud.google.com/cloud-build/docs/ Cloud Build API Reference> for @cloudbuild.projects.locations.triggers.list@. module Network.Google.Resource.Cloudbuild.Projects.Locations.Triggers.List ( -- * REST Resource ProjectsLocationsTriggersListResource -- * Creating a Request , projectsLocationsTriggersList , ProjectsLocationsTriggersList -- * Request Lenses , pltlParent , pltlXgafv , pltlUploadProtocol , pltlAccessToken , pltlUploadType , pltlPageToken , pltlProjectId , pltlPageSize , pltlCallback ) where import Network.Google.ContainerBuilder.Types import Network.Google.Prelude -- | A resource alias for @cloudbuild.projects.locations.triggers.list@ method which the -- 'ProjectsLocationsTriggersList' request conforms to. type ProjectsLocationsTriggersListResource = "v1" :> Capture "parent" Text :> "triggers" :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "pageToken" Text :> QueryParam "projectId" Text :> QueryParam "pageSize" (Textual Int32) :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> Get '[JSON] ListBuildTriggersResponse -- | Lists existing \`BuildTrigger\`s. This API is experimental. -- -- /See:/ 'projectsLocationsTriggersList' smart constructor. data ProjectsLocationsTriggersList = ProjectsLocationsTriggersList' { _pltlParent :: !Text , _pltlXgafv :: !(Maybe Xgafv) , _pltlUploadProtocol :: !(Maybe Text) , _pltlAccessToken :: !(Maybe Text) , _pltlUploadType :: !(Maybe Text) , _pltlPageToken :: !(Maybe Text) , _pltlProjectId :: !(Maybe Text) , _pltlPageSize :: !(Maybe (Textual Int32)) , _pltlCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ProjectsLocationsTriggersList' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'pltlParent' -- -- * 'pltlXgafv' -- -- * 'pltlUploadProtocol' -- -- * 'pltlAccessToken' -- -- * 'pltlUploadType' -- -- * 'pltlPageToken' -- -- * 'pltlProjectId' -- -- * 'pltlPageSize' -- -- * 'pltlCallback' projectsLocationsTriggersList :: Text -- ^ 'pltlParent' -> ProjectsLocationsTriggersList projectsLocationsTriggersList pPltlParent_ = ProjectsLocationsTriggersList' { _pltlParent = pPltlParent_ , _pltlXgafv = Nothing , _pltlUploadProtocol = Nothing , _pltlAccessToken = Nothing , _pltlUploadType = Nothing , _pltlPageToken = Nothing , _pltlProjectId = Nothing , _pltlPageSize = Nothing , _pltlCallback = Nothing } -- | The parent of the collection of \`Triggers\`. Format: -- \`projects\/{project}\/locations\/{location}\` pltlParent :: Lens' ProjectsLocationsTriggersList Text pltlParent = lens _pltlParent (\ s a -> s{_pltlParent = a}) -- | V1 error format. pltlXgafv :: Lens' ProjectsLocationsTriggersList (Maybe Xgafv) pltlXgafv = lens _pltlXgafv (\ s a -> s{_pltlXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). pltlUploadProtocol :: Lens' ProjectsLocationsTriggersList (Maybe Text) pltlUploadProtocol = lens _pltlUploadProtocol (\ s a -> s{_pltlUploadProtocol = a}) -- | OAuth access token. pltlAccessToken :: Lens' ProjectsLocationsTriggersList (Maybe Text) pltlAccessToken = lens _pltlAccessToken (\ s a -> s{_pltlAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). pltlUploadType :: Lens' ProjectsLocationsTriggersList (Maybe Text) pltlUploadType = lens _pltlUploadType (\ s a -> s{_pltlUploadType = a}) -- | Token to provide to skip to a particular spot in the list. pltlPageToken :: Lens' ProjectsLocationsTriggersList (Maybe Text) pltlPageToken = lens _pltlPageToken (\ s a -> s{_pltlPageToken = a}) -- | Required. ID of the project for which to list BuildTriggers. pltlProjectId :: Lens' ProjectsLocationsTriggersList (Maybe Text) pltlProjectId = lens _pltlProjectId (\ s a -> s{_pltlProjectId = a}) -- | Number of results to return in the list. pltlPageSize :: Lens' ProjectsLocationsTriggersList (Maybe Int32) pltlPageSize = lens _pltlPageSize (\ s a -> s{_pltlPageSize = a}) . mapping _Coerce -- | JSONP pltlCallback :: Lens' ProjectsLocationsTriggersList (Maybe Text) pltlCallback = lens _pltlCallback (\ s a -> s{_pltlCallback = a}) instance GoogleRequest ProjectsLocationsTriggersList where type Rs ProjectsLocationsTriggersList = ListBuildTriggersResponse type Scopes ProjectsLocationsTriggersList = '["https://www.googleapis.com/auth/cloud-platform"] requestClient ProjectsLocationsTriggersList'{..} = go _pltlParent _pltlXgafv _pltlUploadProtocol _pltlAccessToken _pltlUploadType _pltlPageToken _pltlProjectId _pltlPageSize _pltlCallback (Just AltJSON) containerBuilderService where go = buildClient (Proxy :: Proxy ProjectsLocationsTriggersListResource) mempty
brendanhay/gogol
gogol-containerbuilder/gen/Network/Google/Resource/Cloudbuild/Projects/Locations/Triggers/List.hs
mpl-2.0
6,354
0
19
1,465
960
554
406
139
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Admin.Channels.Stop -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Stops watching resources through this channel. -- -- /See:/ <https://developers.google.com/admin-sdk/ Admin SDK API Reference> for @admin.channels.stop@. module Network.Google.Resource.Admin.Channels.Stop ( -- * REST Resource ChannelsStopResource -- * Creating a Request , channelsStop , ChannelsStop -- * Request Lenses , csXgafv , csUploadProtocol , csAccessToken , csUploadType , csPayload , csCallback ) where import Network.Google.Directory.Types import Network.Google.Prelude -- | A resource alias for @admin.channels.stop@ method which the -- 'ChannelsStop' request conforms to. type ChannelsStopResource = "admin" :> "directory_v1" :> "channels" :> "stop" :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> ReqBody '[JSON] Channel :> Post '[JSON] () -- | Stops watching resources through this channel. -- -- /See:/ 'channelsStop' smart constructor. data ChannelsStop = ChannelsStop' { _csXgafv :: !(Maybe Xgafv) , _csUploadProtocol :: !(Maybe Text) , _csAccessToken :: !(Maybe Text) , _csUploadType :: !(Maybe Text) , _csPayload :: !Channel , _csCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ChannelsStop' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'csXgafv' -- -- * 'csUploadProtocol' -- -- * 'csAccessToken' -- -- * 'csUploadType' -- -- * 'csPayload' -- -- * 'csCallback' channelsStop :: Channel -- ^ 'csPayload' -> ChannelsStop channelsStop pCsPayload_ = ChannelsStop' { _csXgafv = Nothing , _csUploadProtocol = Nothing , _csAccessToken = Nothing , _csUploadType = Nothing , _csPayload = pCsPayload_ , _csCallback = Nothing } -- | V1 error format. csXgafv :: Lens' ChannelsStop (Maybe Xgafv) csXgafv = lens _csXgafv (\ s a -> s{_csXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). csUploadProtocol :: Lens' ChannelsStop (Maybe Text) csUploadProtocol = lens _csUploadProtocol (\ s a -> s{_csUploadProtocol = a}) -- | OAuth access token. csAccessToken :: Lens' ChannelsStop (Maybe Text) csAccessToken = lens _csAccessToken (\ s a -> s{_csAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). csUploadType :: Lens' ChannelsStop (Maybe Text) csUploadType = lens _csUploadType (\ s a -> s{_csUploadType = a}) -- | Multipart request metadata. csPayload :: Lens' ChannelsStop Channel csPayload = lens _csPayload (\ s a -> s{_csPayload = a}) -- | JSONP csCallback :: Lens' ChannelsStop (Maybe Text) csCallback = lens _csCallback (\ s a -> s{_csCallback = a}) instance GoogleRequest ChannelsStop where type Rs ChannelsStop = () type Scopes ChannelsStop = '["https://www.googleapis.com/auth/admin.directory.user", "https://www.googleapis.com/auth/admin.directory.user.alias", "https://www.googleapis.com/auth/admin.directory.user.alias.readonly", "https://www.googleapis.com/auth/admin.directory.user.readonly", "https://www.googleapis.com/auth/cloud-platform"] requestClient ChannelsStop'{..} = go _csXgafv _csUploadProtocol _csAccessToken _csUploadType _csCallback (Just AltJSON) _csPayload directoryService where go = buildClient (Proxy :: Proxy ChannelsStopResource) mempty
brendanhay/gogol
gogol-admin-directory/gen/Network/Google/Resource/Admin/Channels/Stop.hs
mpl-2.0
4,648
0
18
1,134
727
424
303
106
1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.AcceleratedMobilePageURL.Types -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- module Network.Google.AcceleratedMobilePageURL.Types ( -- * Service Configuration acceleratedMobilePageUrlService -- * AmpURLError , AmpURLError , ampURLError , aueOriginalURL , aueErrorCode , aueErrorMessage -- * AmpURLErrorErrorCode , AmpURLErrorErrorCode (..) -- * BatchGetAmpURLsResponse , BatchGetAmpURLsResponse , batchGetAmpURLsResponse , bgaurAmpURLs , bgaurURLErrors -- * BatchGetAmpURLsRequest , BatchGetAmpURLsRequest , batchGetAmpURLsRequest , bgaurURLs , bgaurLookupStrategy -- * AmpURL , AmpURL , ampURL , auOriginalURL , auAmpURL , auCdnAmpURL -- * Xgafv , Xgafv (..) -- * BatchGetAmpURLsRequestLookupStrategy , BatchGetAmpURLsRequestLookupStrategy (..) ) where import Network.Google.AcceleratedMobilePageURL.Types.Product import Network.Google.AcceleratedMobilePageURL.Types.Sum import Network.Google.Prelude -- | Default request referring to version 'v1' of the Accelerated Mobile Pages (AMP) URL API. This contains the host and root path used as a starting point for constructing service requests. acceleratedMobilePageUrlService :: ServiceConfig acceleratedMobilePageUrlService = defaultService (ServiceId "acceleratedmobilepageurl:v1") "acceleratedmobilepageurl.googleapis.com"
brendanhay/gogol
gogol-acceleratedmobilepageurl/gen/Network/Google/AcceleratedMobilePageURL/Types.hs
mpl-2.0
1,875
0
7
370
154
110
44
38
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Vault.Operations.Get -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Gets the latest state of a long-running operation. Clients can use this -- method to poll the operation result at intervals as recommended by the -- API service. -- -- /See:/ <https://developers.google.com/vault G Suite Vault API Reference> for @vault.operations.get@. module Network.Google.Resource.Vault.Operations.Get ( -- * REST Resource OperationsGetResource -- * Creating a Request , operationsGet , OperationsGet -- * Request Lenses , ogXgafv , ogUploadProtocol , ogAccessToken , ogUploadType , ogName , ogCallback ) where import Network.Google.Prelude import Network.Google.Vault.Types -- | A resource alias for @vault.operations.get@ method which the -- 'OperationsGet' request conforms to. type OperationsGetResource = "v1" :> Capture "name" Text :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> Get '[JSON] Operation -- | Gets the latest state of a long-running operation. Clients can use this -- method to poll the operation result at intervals as recommended by the -- API service. -- -- /See:/ 'operationsGet' smart constructor. data OperationsGet = OperationsGet' { _ogXgafv :: !(Maybe Xgafv) , _ogUploadProtocol :: !(Maybe Text) , _ogAccessToken :: !(Maybe Text) , _ogUploadType :: !(Maybe Text) , _ogName :: !Text , _ogCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'OperationsGet' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'ogXgafv' -- -- * 'ogUploadProtocol' -- -- * 'ogAccessToken' -- -- * 'ogUploadType' -- -- * 'ogName' -- -- * 'ogCallback' operationsGet :: Text -- ^ 'ogName' -> OperationsGet operationsGet pOgName_ = OperationsGet' { _ogXgafv = Nothing , _ogUploadProtocol = Nothing , _ogAccessToken = Nothing , _ogUploadType = Nothing , _ogName = pOgName_ , _ogCallback = Nothing } -- | V1 error format. ogXgafv :: Lens' OperationsGet (Maybe Xgafv) ogXgafv = lens _ogXgafv (\ s a -> s{_ogXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). ogUploadProtocol :: Lens' OperationsGet (Maybe Text) ogUploadProtocol = lens _ogUploadProtocol (\ s a -> s{_ogUploadProtocol = a}) -- | OAuth access token. ogAccessToken :: Lens' OperationsGet (Maybe Text) ogAccessToken = lens _ogAccessToken (\ s a -> s{_ogAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). ogUploadType :: Lens' OperationsGet (Maybe Text) ogUploadType = lens _ogUploadType (\ s a -> s{_ogUploadType = a}) -- | The name of the operation resource. ogName :: Lens' OperationsGet Text ogName = lens _ogName (\ s a -> s{_ogName = a}) -- | JSONP ogCallback :: Lens' OperationsGet (Maybe Text) ogCallback = lens _ogCallback (\ s a -> s{_ogCallback = a}) instance GoogleRequest OperationsGet where type Rs OperationsGet = Operation type Scopes OperationsGet = '["https://www.googleapis.com/auth/ediscovery", "https://www.googleapis.com/auth/ediscovery.readonly"] requestClient OperationsGet'{..} = go _ogName _ogXgafv _ogUploadProtocol _ogAccessToken _ogUploadType _ogCallback (Just AltJSON) vaultService where go = buildClient (Proxy :: Proxy OperationsGetResource) mempty
brendanhay/gogol
gogol-vault/gen/Network/Google/Resource/Vault/Operations/Get.hs
mpl-2.0
4,505
0
15
1,057
701
411
290
99
1
{-# OPTIONS_GHC -F -pgmF dist/build/htfpp/htfpp #-} module MaxPrevTime (maxPrevTimeMain) where import Test.Framework import Control.Concurrent test_slow :: IO () test_slow = threadDelay 20000 test_fast :: IO () test_fast = return () maxPrevTimeMain args = htfMainWithArgs args htf_thisModulesTests
ekarayel/HTF
tests/real-bbt/MaxPrevTime.hs
lgpl-2.1
302
0
6
40
71
38
33
9
1
{-# OPTIONS_GHC -Wall #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeApplications #-} {-# LANGUAGE PatternSynonyms #-} module Dyno.DirectCollocation.Quadratures ( QuadratureRoots(..) , mkTaus , interpolate , timesFromTaus , collocationTimes ) where import GHC.Generics ( Generic ) import GHC.TypeLits ( KnownNat, natVal ) import Casadi.Matrix ( CMatrix ) import Data.Aeson ( FromJSON, ToJSON ) import Data.Proxy ( Proxy(..) ) import Data.Singletons.TypeLits (withKnownNat, pattern SNat) import Data.Singletons.Prelude.Num ((%+)) import qualified Data.Vector as V import qualified Data.Foldable as F import Data.Binary ( Binary ) import JacobiRoots ( shiftedLegendreRoots, shiftedRadauRoots ) import Dyno.View.View ( View, J ) import Dyno.TypeVecs ( Vec ) import Dyno.Vectorize ( devectorize ) import qualified Dyno.TypeVecs as TV import Dyno.LagrangePolynomials ( lagrangeXis ) data QuadratureRoots = Legendre | Radau deriving (Show, Eq, Ord, Enum, Generic) instance Binary QuadratureRoots instance ToJSON QuadratureRoots instance FromJSON QuadratureRoots mkTaus :: forall deg a . (KnownNat deg, Fractional a) => QuadratureRoots -> Vec deg a mkTaus quadratureRoots = case taus of Just taus' -> devectorize $ V.map (fromRational . toRational) taus' Nothing -> error "makeTaus: too high degree" where deg = fromIntegral (natVal (Proxy :: Proxy deg)) taus :: Maybe (V.Vector Double) taus = case quadratureRoots of Legendre -> shiftedLegendreRoots deg Radau -> fmap (`V.snoc` 1.0) (shiftedRadauRoots (deg-1)) -- todo: code duplication dot :: forall x deg a b. (View x, CMatrix a, Real b, KnownNat deg) => Vec deg b -> Vec deg (J x a) -> J x a dot cks xs = F.sum $ TV.unVec elemwise where elemwise :: Vec deg (J x a) elemwise = TV.tvzipWith smul cks xs smul :: b -> J x a -> J x a smul x y = realToFrac x * y -- todo: code duplication interpolate :: forall deg b x a . (KnownNat deg, Real b, Fractional b, View x, CMatrix a) => Vec deg b -> J x a -> Vec deg (J x a) -> J x a interpolate taus x0 xs = withKnownNat (SNat @deg %+ SNat @1) $ dot (TV.mkVec' xis) (x0 TV.<| xs) where xis = map (lagrangeXis (0 : F.toList taus) 1) [0..deg] deg = TV.tvlength taus timesFromTaus :: forall n deg a . (Num a, KnownNat n) => a -> Vec deg a -> a -> Vec n (a, Vec deg a) timesFromTaus t0 taus dt = times where n = fromIntegral (natVal (Proxy :: Proxy n)) -- initial time at each collocation stage t0s :: Vec n a t0s = TV.mkVec' $ take n [t0 + (dt * fromIntegral k) | k <- [(0::Int)..]] -- times at each collocation point times :: Vec n (a, Vec deg a) times = fmap (\t0' -> (t0', fmap (\tau -> t0' + tau * dt) taus)) t0s collocationTimes :: (KnownNat n, KnownNat deg, Fractional a) => a -> QuadratureRoots -> a -> Vec n (a, Vec deg a) collocationTimes t0 qr dt = timesFromTaus t0 (mkTaus qr) dt
ghorn/dynobud
dynobud/src/Dyno/DirectCollocation/Quadratures.hs
lgpl-3.0
3,097
0
15
683
1,106
601
505
73
3
data Point = Point Float Float deriving (Show) data Shape = Circle Point Float | Rectangle Point Point deriving (Show) surface:: Shape -> Float surface(Circle _ r) = pi * r ^ 2 surface(Rectangle (Point x1 y1) (Point x2 y2)) = (abs $ y2 - y1) * (abs $ x2 - x1) -- a function that moves the shape from initial coordinates nudge:: Shape -> Float -> Float -> Shape nudge (Circle (Point x y) r) a b = Circle (Point (x+a) (y+b)) r nudge (Rectangle (Point x1 y1) (Point x2 y2)) a b= Rectangle(Point (x1+a) (y1+b)) (Point (x2+a) (y2+b)) -- add functions that creates shapes of some size at the origin and nudge them appropriately baseCircle :: Float -> Shape baseCircle r = Circle (Point 0 0) r baseRect :: Float -> Float -> Shape baseRect width height = Rectangle (Point 0 0) (Point width height)
iUwej/learnyehaskell
dataclasses.hs
unlicense
796
3
13
153
395
195
200
12
1
{-# LANGUAGE CPP #-} {-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE DeriveFoldable #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE DeriveTraversable #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} -------------------------------------------------------------------- -- | -- Copyright : (c) Edward Kmett and Dan Doel 2012-2013 -- License : BSD2 -- Maintainer: Edward Kmett <[email protected]> -- Stability : experimental -- Portability: non-portable -- -- These combinators can be used to retain sharing information. -------------------------------------------------------------------- module Ermine.Unification.Sharing ( runSharing , withSharing , sharing , SharingT(..) , Shared(..) , uncaring ) where import Control.Applicative import Control.Monad (void) import Control.Monad.Writer.Class import Control.Monad.Reader.Class import Control.Monad.State.Class import Control.Monad.Trans.Class import Control.Monad.IO.Class import Control.Comonad import Data.Foldable import Data.Monoid import Data.Traversable import Data.Data data Shared a = Shared !Bool a deriving (Eq,Ord,Show,Read,Functor,Foldable,Traversable,Typeable,Data) instance Comonad Shared where extract (Shared _ a) = a extend f s@(Shared b _) = Shared b (f s) -- An efficient strict-in-the-monoid version of WriterT Any@ newtype SharingT m a = SharingT { unsharingT :: m (Shared a) } #if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ >= 708 deriving Typeable #endif instance Monad m => Functor (SharingT m) where fmap f (SharingT m) = SharingT $ do Shared p a <- m return $! Shared p (f a) {-# INLINE fmap #-} instance Monad m => Applicative (SharingT m) where pure a = SharingT (return (Shared False a)) {-# INLINE pure #-} SharingT mf <*> SharingT ma = SharingT $ do Shared p f <- mf Shared q a <- ma return $! Shared (p || q) (f a) {-# INLINE (<*>) #-} instance Monad m => Monad (SharingT m) where return a = SharingT (return (Shared False a)) {-# INLINE return #-} SharingT m >>= f = SharingT $ do Shared p a <- m Shared q b <- unsharingT (f a) return $! Shared (p || q) b {-# INLINE (>>=) #-} instance Monad m => MonadWriter Any (SharingT m) where tell (Any p) = SharingT $ return $ Shared p () {-# INLINE tell #-} listen (SharingT ma) = SharingT $ do Shared p a <- ma return $! Shared p (a, Any p) {-# INLINE listen #-} pass (SharingT mapp) = SharingT $ do Shared p (a, pp) <- mapp return $! Shared (getAny (pp (Any p))) a {-# INLINE pass #-} instance MonadTrans SharingT where lift ma = SharingT $ do a <- ma return $! Shared False a {-# INLINE lift #-} instance MonadIO m => MonadIO (SharingT m) where liftIO = lift . liftIO {-# INLINE liftIO #-} instance MonadState s m => MonadState s (SharingT m) where get = lift get {-# INLINE get #-} put = lift . put {-# INLINE put #-} instance MonadReader e m => MonadReader e (SharingT m) where ask = lift ask {-# INLINE ask #-} local f = SharingT . local f . unsharingT {-# INLINE local #-} -- | Run an action, if it returns @'Any' 'True'@ then use its new value, otherwise use the passed in value. -- -- This can be used to recover sharing during unification when no interesting unification takes place. -- -- This version discards the 'SharingT' wrapper. runSharing :: Monad m => a -> SharingT m a -> m a runSharing a m = do Shared modified b <- unsharingT m return $! if modified then b else a {-# INLINE runSharing #-} withSharing :: Monad m => (a -> SharingT m a) -> a -> m a withSharing k a = runSharing a (k a) {-# INLINE withSharing #-} uncaring :: Functor m => SharingT m a -> m () uncaring = void . unsharingT {-# INLINE uncaring #-} -- | Run an action, if it returns @'Any' 'True'@ then use its new value, otherwise use the passed in value. -- -- This can be used to recover sharing during unification when no interesting unification takes place. -- -- This version retains the current monad wrapper. sharing :: MonadWriter Any m => a -> m a -> m a sharing a m = do (b, Any modified) <- listen m return $! if modified then b else a {-# INLINE sharing #-}
PipocaQuemada/ermine
src/Ermine/Unification/Sharing.hs
bsd-2-clause
4,254
0
16
855
1,161
605
556
102
2
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE BangPatterns #-} module Juno.Messaging.ZMQ ( runMsgServer ) where import Control.Concurrent (forkIO, threadDelay, yield, newMVar, takeMVar, putMVar, yield, newEmptyMVar) import qualified Control.Concurrent.Async as Async import Control.Concurrent.Chan.Unagi import qualified Control.Concurrent.Chan.Unagi.NoBlocking as NoBlock import Control.Monad.State.Strict import Data.ByteString (ByteString) import qualified Data.Map.Strict as Map import qualified Data.Set as Set import System.ZMQ4.Monadic import Data.Thyme.Clock import Data.Serialize import Data.Thyme.Calendar (showGregorian) import Data.Thyme.LocalTime import System.IO (hFlush, stderr, stdout) import Juno.Messaging.Types import Juno.Types (ReceivedAt(..),Digest(..),MsgType(..),SignedRPC(..)) import Juno.Util.Combinator (foreverRetry) sendProcess :: OutChan (OutBoundMsg String ByteString) -> Rolodex String (Socket z Push) -> ZMQ z () sendProcess outboxRead !r = do liftIO $ moreLogging "Entered sendProcess" rMvar <- liftIO $ newMVar r forever $ do (OutBoundMsg !addrs !msg) <- liftIO $! readChan outboxRead liftIO $ moreLogging $ "Sending message to " ++ (show addrs) ++ " ## MSG ## " ++ show msg r' <- liftIO $ takeMVar rMvar !newRol <- updateRolodex r' addrs !toPoll <- recipList newRol addrs mapM_ (\s -> send s [] msg) toPoll liftIO $ putMVar rMvar newRol liftIO $ moreLogging "Sent Msg" updateRolodex :: Rolodex String (Socket z Push) -> Recipients String -> ZMQ z (Rolodex String (Socket z Push)) updateRolodex r@(Rolodex !_rol) RAll = return $! r updateRolodex r@(Rolodex !rol) (RSome !addrs) = if Set.isSubsetOf addrs $! Map.keysSet rol then return $! r else do !a <- addNewAddrs r $! Set.toList addrs return $! a updateRolodex r@(Rolodex !rol) (ROne !addr) = if Set.member addr $! Map.keysSet rol then return $! r else do !a <- addNewAddrs r [addr] return $! a addNewAddrs :: Rolodex String (Socket z Push) -> [Addr String] -> ZMQ z (Rolodex String (Socket z Push)) addNewAddrs !r [] = return r addNewAddrs (Rolodex !r) (x:xs) = do !r' <- if Map.member x r then return $! Rolodex r else do s <- socket Push _ <- connect s $ _unAddr x return $! Rolodex $! Map.insert x (ListenOn s) r r' `seq` addNewAddrs r' xs recipList :: Rolodex String (Socket z Push) -> Recipients String -> ZMQ z [Socket z Push] recipList (Rolodex r) RAll = return $! _unListenOn <$> Map.elems r recipList (Rolodex r) (RSome addrs) = return $! _unListenOn . (r Map.!) <$> Set.toList addrs recipList (Rolodex r) (ROne addr) = return $! _unListenOn <$> [r Map.! addr] moreLogging :: String -> IO () moreLogging msg = do (ZonedTime (LocalTime d' t') _) <- getZonedTime putStrLn $ (showGregorian d') ++ "T" ++ (take 15 $ show t') ++ " [ZMQ]: " ++ msg hFlush stdout >> hFlush stderr runMsgServer :: NoBlock.InChan (ReceivedAt, SignedRPC) -> NoBlock.InChan (ReceivedAt, SignedRPC) -> NoBlock.InChan (ReceivedAt, SignedRPC) -> InChan (ReceivedAt, SignedRPC) -> OutChan (OutBoundMsg String ByteString) -> Addr String -> [Addr String] -> IO () runMsgServer inboxWrite cmdInboxWrite aerInboxWrite rvAndRvrWrite outboxRead me addrList = void $ forkIO $ forever $ do zmqThread <- Async.async $ runZMQ $ do liftIO $ moreLogging "Launching ZMQ_THREAD" zmqReceiver <- async $ do liftIO $ moreLogging "Launching ZMQ_RECEIVER" sock <- socket Pull _ <- bind sock $ _unAddr me forever $ do newMsg <- receive sock ts <- liftIO getCurrentTime case decode newMsg of Left err -> do liftIO $ moreLogging $ "Failed to deserialize to SignedRPC [Msg]: " ++ show newMsg liftIO $ moreLogging $ "Failed to deserialize to SignedRPC [Error]: " ++ err liftIO yield Right s@(SignedRPC dig _) | _digType dig == RV || _digType dig == RVR -> liftIO $ writeChan rvAndRvrWrite (ReceivedAt ts, s) >> moreLogging ("got " ++ show dig) >> yield | _digType dig == CMD || _digType dig == CMDB -> liftIO $ NoBlock.writeChan cmdInboxWrite (ReceivedAt ts, s) >> moreLogging ("got " ++ show dig) >> yield | _digType dig == AER -> liftIO $ NoBlock.writeChan aerInboxWrite (ReceivedAt ts, s) >> moreLogging ("got " ++ show dig) >> yield | otherwise -> liftIO $ NoBlock.writeChan inboxWrite (ReceivedAt ts, s) >> moreLogging ("got " ++ show dig) >> yield liftIO $ threadDelay 100000 -- to be sure that the receive side is up first liftIO $ moreLogging "Launching ZMQ_SENDER" zmqSender <- async $ do rolodex <- addNewAddrs (Rolodex Map.empty) addrList void $ sendProcess outboxRead rolodex liftIO $ moreLogging "Exiting ZMQ_SENDER" liftIO $ (Async.waitEitherCancel zmqReceiver zmqSender) >>= \res' -> case res' of Left () -> liftIO $ moreLogging "ZMQ_RECEIVER returned with ()" Right v -> liftIO $ moreLogging $ "ZMQ_SENDER returned with " ++ show v liftIO $ moreLogging "Exiting ZMQ_THREAD" res <- Async.waitCatch zmqThread Async.cancel zmqThread >> case res of Right () -> moreLogging "ZMQ_MSG_SERVER died returning () with no details" Left err -> moreLogging $ "ZMQ_MSG_SERVER exception " ++ show err
buckie/juno
src/Juno/Messaging/ZMQ.hs
bsd-3-clause
5,599
0
28
1,323
1,883
922
961
118
4
{-# LANGUAGE BangPatterns, OverloadedStrings #-} module Network.HPACK.HeaderBlock.Integer ( encode , decode , parseInteger ) where import Data.Array (Array, listArray, (!)) import Data.Bits ((.&.), shiftR) import Data.ByteString (ByteString) import qualified Data.ByteString as BS import Data.Word (Word8) -- $setup -- >>> import qualified Data.ByteString as BS ---------------------------------------------------------------- powerArray :: Array Int Int powerArray = listArray (1,8) [1,3,7,15,31,63,127,255] ---------------------------------------------------------------- {- if I < 2^N - 1, encode I on N bits else encode (2^N - 1) on N bits I = I - (2^N - 1) while I >= 128 encode (I % 128 + 128) on 8 bits I = I / 128 encode I on 8 bits -} -- | Integer encoding. The first argument is N of prefix. -- -- >>> encode 5 10 -- [10] -- >>> encode 5 1337 -- [31,154,10] -- >>> encode 8 42 -- [42] encode :: Int -> Int -> [Word8] encode n i | i < p = fromIntegral i : [] | otherwise = fromIntegral p : encode' (i - p) where p = powerArray ! n encode' :: Int -> [Word8] encode' i | i < 128 = fromIntegral i : [] | otherwise = fromIntegral (r + 128) : encode' q where -- (q,r) = i `divMod` 128 q = i `shiftR` 7 r = i .&. 0x7f ---------------------------------------------------------------- {- decode I from the next N bits if I < 2^N - 1, return I else M = 0 repeat B = next octet I = I + (B & 127) * 2^M M = M + 7 while B & 128 == 128 return I -} -- | Integer decoding. The first argument is N of prefix. -- -- >>> decode 5 10 $ BS.empty -- 10 -- >>> decode 5 31 $ BS.pack [154,10] -- 1337 -- >>> decode 8 42 $ BS.empty -- 42 decode :: Int -> Word8 -> ByteString -> Int decode n w bs | i < p = i | BS.null bs = error $ "decode: n = " ++ show n ++ ", w = " ++ show w ++ ", bs = empty" | otherwise = decode' bs 0 i where p = powerArray ! n i = fromIntegral w decode' :: ByteString -> Int -> Int -> Int decode' "" _ i = i decode' bs m i = decode' bs' m' i' where !b = fromIntegral $ BS.head bs !bs' = BS.tail bs !i' = i + (b .&. 127) * 2 ^ m !m' = m + 7 ---------------------------------------------------------------- -- | -- -- >>> parseInteger 7 127 $ BS.pack [210,211,212,87,88,89,90] -- (183839313,"XYZ") parseInteger :: Int -> Word8 -> ByteString -> (Int, ByteString) parseInteger n w bs | i < p = (i, bs) | otherwise = (len, rest) where p = powerArray ! n i = fromIntegral w Just idx = BS.findIndex (< 128) bs (bs', rest) = BS.splitAt (idx + 1) bs len = decode n w bs'
bergmark/http2
Network/HPACK/HeaderBlock/Integer.hs
bsd-3-clause
2,725
0
11
746
701
381
320
46
1
-- Copyright 2013 Kevin Backhouse. module TestDelay ( instanceTest ) where import Control.Monad.ST2 import Control.Monad.MultiPass import Control.Monad.MultiPass.Instrument.Delay import Control.Monad.MultiPass.Utils.InstanceTest -- This test checks that all the necessary instances have been -- defined. Its only purpose is to check that there are no compile -- errors, so it does not need to be executed. instanceTest :: ST2 r w () instanceTest = run instanceTestBody instanceTestBody :: TestInstrument2 Delay r w instanceTestBody = testInstrument2
kevinbackhouse/Control-Monad-MultiPass
tests/TestDelay.hs
bsd-3-clause
554
0
6
76
80
50
30
9
1
{-# OPTIONS_GHC -Wall #-} module SourceSyntax.Helpers where import qualified Data.Char as Char splitDots :: String -> [String] splitDots = go [] where go vars str = case break (=='.') str of (x,_:rest) | isOp x -> vars ++ [x ++ '.' : rest] | otherwise -> go (vars ++ [x]) rest (x,[]) -> vars ++ [x] brkt :: String -> String brkt s = "{ " ++ s ++ " }" isTuple :: String -> Bool isTuple name = take 6 name == "_Tuple" && all Char.isDigit (drop 6 name) isOp :: String -> Bool isOp = all isSymbol isSymbol :: Char -> Bool isSymbol c = Char.isSymbol c || elem c "+-/*=.$<>:&|^?%#@~!"
deadfoxygrandpa/Elm
compiler/SourceSyntax/Helpers.hs
bsd-3-clause
648
0
14
182
265
139
126
20
2
{-# LANGUAGE DefaultSignatures #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE InstanceSigs #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeInType #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE PolyKinds #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE TypeApplications #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE UndecidableSuperClasses #-} {-# LANGUAGE NoMonomorphismRestriction #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE TypeFamilyDependencies #-} {-# LANGUAGE StrictData #-} {-# LANGUAGE AllowAmbiguousTypes #-} {-# LANGUAGE IncoherentInstances #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE PartialTypeSignatures #-} {-# OPTIONS_GHC -funbox-strict-fields #-} {-# OPTIONS_GHC -Wno-unticked-promoted-constructors -Wno-missing-signatures -Wno-redundant-constraints #-} {-# OPTIONS_GHC -Wno-unused-imports #-} {-# OPTIONS_GHC -Ddump-splices #-} module Data.Iota.Unified.Indexed5 -- ( Eff ) where import Data.Promotion.Prelude -- import Data.Promotion.TH import Data.Singletons import Data.Singletons.Decide import Data.Singletons.Prelude import Data.Singletons.TH import Data.Singletons.TypeLits import Data.Type.Equality import Data.Kind (Constraint, type (*)) import Data.Proxy (Proxy) import GHC.TypeLits hiding (type (*)) import GHC.Prim (Proxy#, proxy#) import Unsafe.Coerce (unsafeCoerce) import Control.Monad.Fix (fix) -- import Data.Type.Set (Set (..), Union) -- import Data.Type.BiMap (BiMapping (..), BiMap (..)) -- import qualified Data.Type.BiMap as BiMap import Data.Typeable import qualified Debug.Trace as Debug intX :: Int intX = 5 foobar :: String foobar = "foobar" data Reader (e :: *) (v :: *) where Reader :: Reader e e deriving (Typeable) data Writer (e :: *) (v :: *) where Writer :: o -> Writer o () data State (s :: *) (v :: *) where Get :: State s s Put :: s -> State s () newtype Exc (e :: *) (v :: *) = Exc e data Trace v where Trace :: String -> Trace () data Halt = Halt -- seal :: MonadEff '[] 'Pure a -> MonadEff '[] 'Pure a -- seal = id -- instance Member t (t ': r) -- instance (Member t r) => Member t (u ': r) -- type family Member' t r :: Constraint where -- Member' x (x ': r) = () -- Member' x (y ': r) = Member' x r -- type family AllMemberOf ts r :: Constraint where -- AllMemberOf '[] _ = () -- AllMemberOf (x ': ts) r = (Member' x r, AllMemberOf ts r) type Effect = (* -> *) $(promote [d| data Tree a = Pure | Ctor a | Tree a :>>= Nat | Focused (Tree a) | Unfocused (Tree a) deriving (Eq) data Universe v = Universe { treeMap :: [(Nat, Tree v)], effects :: [v], currentFocus :: Maybe v } emptyU = Universe [] [] class Treelike a where makeCtor :: a -> Tree a |]) $(promoteOnly [d| lookupIndex :: Nat -> Universe v -> Tree v lookupIndex k u = lookupIndex' k (treeMap u) lookupIndex' :: (Eq a) => a -> [(a, b)] -> b lookupIndex' key ((x,y):xys) = if key == x then y else lookupIndex' key xys lookupValue :: Eq v => Tree v -> Universe v -> Nat lookupValue k u = lookupValue' k (treeMap u) lookupValue' :: (Eq b) => b -> [(a,b)] -> a lookupValue' value ((x,y):xys) = if value == y then x else lookupValue' value xys |]) data MonadEff (u :: Universe Effect) j (a :: *) where Val :: a -> MonadEff u 'Pure a Eff :: (ctor a) -> (a -> b) -> MonadEff u (Ctor ctor) b Unf :: (ctor a) -> (a -> b) -> MonadEff u (Unfocused (Ctor ctor)) b Bind :: MonadEff u j a -> (a -> MonadEff u k b) -> MonadEff u (j :>>= n) b class Run u j where run :: MonadEff u j a -> a instance Run u 'Pure where run (Val x) = x instance (Run u (LookupIndex k u)) => Run u ('Pure ':>>= k) where run :: MonadEff u j a -> a run (Val x `Bind` k) = case x of (x :: a') -> run $ (unsafeCoerce k :: a' -> MonadEff u (LookupIndex k u) a) x -- This is the control flow graph, with nodes indexed by type-level natural numbers. -- This graph forms a cycle with one node. type TraceMap = '[ '(1, Ctor Trace :>>= 1) ] type TraceUniverse r = 'Universe TraceMap r Nothing -- TraceMap is a type of kind [(GHC.Types.Nat, Tree (* -> *))] -- Increment over a "Trace" effect, emitting it, and iterating. {-# INLINE stepTrace #-} stepTrace :: MonadEff (TraceUniverse r) (Ctor Trace :>>= 1) a -> MonadEff (TraceUniverse r) (LookupIndex' 1 TraceMap) a stepTrace ((Eff u q) `Bind` k) = case u of Trace t -> Debug.trace t $ unsafeCoerce $ k (q ()) -- Uses a Trace effect and a recursive let binding for an infinite loop. infiniteTraceLoop :: MonadEff (TraceUniverse r) (LookupIndex' 1 TraceMap) Int infiniteTraceLoop = let t (n :: Int) = Eff (Trace $ show n) (const $ n + 1) `Bind` t in t 0 {-# INLINE loopTrace #-} loopTrace :: MonadEff (TraceUniverse r) ('Ctor Trace ':>>= 1) a -> IO Void loopTrace m = do case stepTrace m of m' -> loopTrace (stepTrace m') type StateUniverse r = 'Universe StateMap r Nothing type StateMap = '[ '(1, Ctor (State Int) :>>= 2), '(2, Ctor (State Int) :>>= 1) ] -- Uses a Trace effect and a recursive let binding for an infinite loop. infiniteStateLoop :: MonadEff (StateUniverse r) (LookupIndex' 1 StateMap) a infiniteStateLoop = let t () = (Debug.trace "Get" (Eff Get id) `Bind` (\s -> (Eff (Put $ s + 1) id) `Bind` t)) in t () $(promote [d| mapOverEffect f (n, e) = (n, f e) mapOverGraph f (Universe g r c) = Universe (map f g) r c applyEffect f u = mapOverGraph (mapOverEffect f) u -- simpleEffect' e Pure = Pure -- simpleEffect' e (Ctor e') = if e == e' then Pure else Ctor e' -- simpleEffect' e (m :>>= n) = simpleEffect' e m :>>= n -- simpleEffect e = simpleEffect' e -- decomp' e (Ctor e') = if e == e' then Ctor e else Ctor e' -- decomp' e j = j |]) type family SimpleResult u j e where --Not correct, but works for infiniteStateLoop SimpleResult u (Ctor e :>>= n) e = LookupIndex n u SimpleResult u (Ctor e) e = Pure SimpleResult u j _ = j type family FocusT (n :: Tree (* -> *)) (e :: * -> *) = r | r -> n where -- FocusT (Pure ) e = Focused (Pure) FocusT (Ctor e ) e = Ctor e -- FocusT (Ctor e' ) e = Unfocused (Ctor e') -- FocusT (m :>>= n) e = FocusT m e :>>= n type family FocusG (g :: [(Nat, Tree (* -> *))]) (e :: * -> *) = r | r -> g e where FocusG ( '(n, tree) ': r) e = '(n, FocusT tree e) ': FocusG r e type family FocusU u e = r | r -> u e where FocusU ('Universe g r Nothing) e = 'Universe (FocusG g e) r (Just e) type family Focus m e = r | r -> m e where Focus (MonadEff u j w) e = MonadEff (FocusU u e) (FocusT j e) w focus :: forall e u j w. MonadEff u j w -> (Focus (MonadEff u j w) e, Focus (MonadEff u j w) e -> MonadEff u j w) focus m = (unsafeCoerce m, unsafeCoerce) class Decomp (e :: * -> *) u j j' w | j' -> j where decomp :: MonadEff u j w -> Either (MonadEff u j w) (MonadEff u j' w) instance {-# INCOHERENT #-} (Focus (MonadEff u (Ctor e) w) e ~ t) => Decomp e u (Ctor e) (Ctor e) w where decomp = Right . unsafeCoerce instance Decomp e u j j' w where decomp = Left -- decomp :: forall e u j w. MonadEff u j w -> Either (MonadEff u j w) (MonadEff u (Ctor e) w) -- decomp m = case focus @e m of -- (m', _) -> case m' of -- (Eff u q) -> Right (unsafeCoerce $ m) -- (_) -> Left m testDecomp :: MonadEff u j w -> IO String testDecomp m = do return $ case decomp @(Writer [Char]) m of Left (Val _) -> "Left Pure" Left (Eff u q) -> "Left Eff" Left (Bind _ _) -> "Left Bind" Left (Unf _ _) -> "Left Unf" Left (_) -> "Left Other" -- Right (Val x) -> "Right Pure" Right (Eff u q) -> "Right Eff" -- Right (Bind _ _) -> "Right Bind" -- Right (Unf _ _) -> "Right Unf" Right (_) -> "Right Other" -- (Val _ ) -> Right (Val x) -- decomp :: (Focusable e (MonadEff u j w)) -- => MonadEff u j w -- -> Either (MonadEff u j w) (MonadEff u (Ctor e) w) -- decomp m = case focus -- class Decomp e u j w where -- decomp :: m -> Either (MonadEff u j w) (MonadEff u (Ctor e) w) class Decomp' u j j' (e :: * -> *) where decomp1 :: MonadEff u j a -> Either (MonadEff u j a) (MonadEff u j' a) decompProofLeft :: Decision (SimpleResult u j e :~: j) decompProofLeft = Disproved undefined decompProofRight :: Decision (SimpleResult u j e :~: Pure) decompProofRight = Disproved undefined -- unfocus :: forall e u j w. MonadEff u j w -- -> Focus (MonadEff u j w) e -- unfocus = unsafeCoerce -- Focus (MonadEff u (Ctor e) w) e = MonadEff u (Focused e) w -- Focus (MonadEff u j w) e = MonadEff u j w instance Decomp' u j j' e where decomp1 = Debug.trace "Decomp Left" . Left decompProofLeft = Proved (unsafeCoerce Refl) instance Decomp' u (Ctor e) (Ctor e') e where decomp1 = Debug.trace "Decomp Right" . Right . unsafeCoerce decompProofRight = Proved (unsafeCoerce Refl) -- appBind :: MonadEff u (Ctor t :>>= n) w -- -> v -- -> (v -> MonadEff u k w) -- -> MonadEff u (LookupIndex n u) w -- appBind (Eff u q `Bind` _) extract = unsafeCoerce $ k (q (extract u)) appBind' :: (forall a1. (a1 -> MonadEff u k a, ctor1 a3, a3 -> a1)) -> MonadEff u (LookupIndex n u) w appBind' (k, u, q) = undefined -- instance {-# OVERLAPPABLE #-} Decomp e j k where -- decomp = Left -- instance {-# OVERLAPS #-} (Decomp' e j ~ True) => Decomp e j True where -- decomp m = unsafeCoerce (Right m) -- instance {-# OVERLAPPABLE #-} (Decomp' e j ~ True) => Decomp e j where -- decomp m = unsafeCoerce $ Right m -- applyBind :: MonadEff u (Ctor (State Int) :>>= n) w -- -> (forall a. State Int a -> a) -- -> forall v. (v, v -> MonadEff u (LookupIndex n u) w) -- applyBind (Eff u q `Bind` k) extract = (_ q (extract u), undefined) -- applyBind :: MonadEff u ('Ctor (State Int) ':>>= n) a -- -> (forall v1. State Int v1 -> v1) -- -> MonadEff u (LookupIndex n u) a -- applyBind (Eff u q `Bind` k) extract = unsafeCoerce $ k (q (extract u)) tr n = Debug.trace (show n) runState :: forall u j a i r. (u ~ (StateUniverse r)) => MonadEff u j a -> Int -> MonadEff u (SimpleResult u j (State Int)) (a, Int) runState m s = tr 0 $ case decomp1 @u @j @(Ctor (State Int)) @(State Int) m of Right m -> tr 1 $ case decompProofRight @u @j @(Ctor (State Int)) @(State Int) of Proved Refl -> tr 2 $ case m of (Eff Get q) -> tr 3 $ Val (q s, s ) (Eff (Put s') q) -> tr 4 $ Val (q (), s') Left (Val a) -> tr 5 $ Val (a, s) Left (Eff u q) -> tr 6 $ case decompProofLeft @u @j @(Ctor (State Int)) @(State Int) of Proved Refl -> tr 7 $ Eff u (undefined q) Left m@(e@(Eff u q :: MonadEff u j' _) `Bind` k) -> tr 8 $ case decomp1 @u @j' @(Ctor (State Int)) @(State Int) e of Left _ -> tr 12 $ undefined Right e -> tr 9 $ case decompProofRight @u @j' @(Ctor (State Int)) @(State Int) of Proved Refl -> tr 10 $ case e of (Eff Get q) -> tr 11 $ undefined -- $ unsafeCoerce k (q s) :: MonadEff u (LookupIndex n u) a -- case decompProofLeft @u @(Ctor t :>>= n) @(Ctor (State Int)) @(State Int) of -- Proved Refl -> case decomp1 @u @j' @(Ctor (State Int)) @(State Int) m' of -- Right (Eff Get q) -> case decompProofRight @u @j' @(Ctor (State Int)) @(State Int) of -- Proved Refl -> undefined $ appBind m extract where extract :: State Int v -> v extract Get = s extract (Put _)= () -- Left (Eff u q) -> case proofDecomp @j @(Ctor (State Int)) @(State Int) of -- Proved refl -> Eff _ _ -- Left (m `Bind` k) -> case decomp @(State Int) m of -- _ -> undefined -- Right m@(Eff u q) -> case m of -- (Eff Get q :: MonadEff u (Ctor (State Int)) a) -> Val (q s, s) -- Right m@(Eff u q) -> undefined -- runState m@(Val a) s = Val (a, s) -- runState m@(Eff u _ `Bind` k) s = -- case u of -- Get -> runState (applyBind m extract) s -- Put _ -> undefined -- -- Put s' -> runState (applyBind m extract) s' -- (Get, (q, k) -> runState (unsafeCoerce $ k $ q s) s -- -- (Put s', q, k) -> runState (unsafeCoerce $ k $ q ()) s' -- loop m@(Eff u q `Bind` k) s = qApp m -- case u of -- Get -> Debug.trace showGet $ unsafeCoerce $ stepState (qApp ) -- where showGet = "Get " ++ show s -- Put s' -> Debug.trace showPut $ unsafeCoerce $ stepState (k (q ())) s -- where showPut = "Put " ++ show s' ++ " over " ++ show s -- {-# INLINE loopState #-} -- loopState :: MonadEff ('Universe StateMap r) (LookupIndex' 1 StateMap) a -> IO () -- loopState m = do -- case stepState m 0 of -- m' -> case stepState m' 0 of -- m'' -> loopState m'' -- -- Increment over a "Trace" effect, emitting it, and iterating. -- {-# INLINE stepState #-} -- stepState :: MonadEff ('Universe StateMap r) (Ctor (State Int) :>>= n) a -- -> Int -- -> MonadEff ('Universe StateMap r) (LookupIndex' n StateMap) a -- stepState ((Eff u q) `Bind` k) s = case u of -- Get -> Debug.trace showGet $ unsafeCoerce $ k (q s) -- where showGet = "Get " ++ show s -- Put s' -> Debug.trace showPut $ unsafeCoerce $ k (q ()) -- where showPut = "Put " ++ show s' ++ " over " ++ show s -- {-# INLINE loopState #-} -- loopState :: MonadEff ('Universe StateMap r) (LookupIndex' 1 StateMap) a -> IO () -- loopState m = do -- case stepState m 0 of -- m' -> case stepState m' 0 of -- m'' -> loopState m'' -- branch (b :: Bool) = case toSing b of -- SomeSing (sb :: Sing Bool) -> _ sb -- stepIf :: MonadEff ('Universe u r) ('Ctor (Branch n1 n2)) a -- -> MonadEff ('Universe u r) (LookupIndex k u) a -- stepIf (Eff u q) = case u of -- (Branch True) -> -- Runs forever, loops over "Int" domain [0, 1, ..., maxBound, minBound, ... -1] -- finiteTest = do -- let stepOnce m = do -- case stepTrace m of -- (Val x `Bind` k) -> do -- print $ (unsafeCoerce x' :: Int) -- return $ runTest' (k x) -- return foobar -- runTest'' = let x = runTrace' . runTest' . x in x infiniteTraceTest -- send :: Member ctor r => ctor v -> MonadEff r ('Ctor ctor 'Pure) v -- send t = Eff t Val -- ask :: forall e r. Member (Reader e) r => MonadEff r ('Ctor (Reader e) 'Pure) e -- ask = send (Reader) -- tell :: forall o r. Member (Writer o) r => o -> MonadEff r ('Ctor (Writer o) 'Pure) () -- tell o = send (Writer o) -- trace = send . Trace -- test :: MonadEff ('Ctor (Reader String) 'Pure) String -- test = ask @Int -- test2 :: MonadEff ('Ctor (Reader String) ('Ctor (Writer String) 'Pure)) () -- test2 = ask @String `Bind` tell -- t2rr = (((), [foobar]) ==) $ run $ runWriter (runReader test2 foobar) -- class RunReader (j :: Tree (* -> *)) e where -- runReader :: MonadEff (Reader e ': r) j w -> e -> MonadEff r (RunSimple j (Reader e)) w -- instance RunReader ('Ctor (Reader e) k) e where -- runReader (Eff u q) e = case u of -- Reader -> (q e) -- class RunWriter j o where -- runWriter :: MonadEff (Writer o ': r) j w -> MonadEff r (RunSimple j (Writer o)) (w, [o]) -- instance RunWriter ('Ctor (Writer String) k) o where -- runWriter (Eff u q) = case u of -- Writer o -> case q () of -- Val a -> Val (a, [o]) -- (:<*>) :: MonadEff j (a -> b) -> MonadEff k a -> MonadEff ('ApNode j k) b -- Bind :: {-# UNPACK #-} !(MonadEff r j a) -- -> {-# UNPACK #-} !(a -> MonadEff r k b) -- -> MonadEff r (j :>>= k) b -- imap :: (a -> b) -> MonadEff r j a -> MonadEff r j b -- imap f (Val a) = Val (f a) -- imap f (Eff u q) = Eff u (imap f . q) -- imap f (m `Bind` k) = m `Bind` (imap f . k) -- data WrappedMonadEff w = forall j. SingI j => WrappedMonadEff (MonadEff j w)
AaronFriel/eff-experiments
src/Data/Iota/Unified/Indexed5.hs
bsd-3-clause
16,377
2
22
4,411
3,149
1,751
1,398
-1
-1
-- Copyright (c) 2016-present, Facebook, Inc. -- All rights reserved. -- -- This source code is licensed under the BSD-style license found in the -- LICENSE file in the root directory of this source tree. An additional grant -- of patent rights can be found in the PATENTS file in the same directory. ----------------------------------------------------------------- -- Auto-generated by regenClassifiers -- -- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING -- @generated ----------------------------------------------------------------- {-# LANGUAGE OverloadedStrings #-} module Duckling.Ranking.Classifiers.KO_XX (classifiers) where import Data.String import Prelude import qualified Data.HashMap.Strict as HashMap import Duckling.Ranking.Types classifiers :: Classifiers classifiers = HashMap.fromList [("<time> timezone", Classifier{okData = ClassData{prior = 0.0, unseen = -2.3978952727983707, likelihoods = HashMap.fromList [("<time-of-day> am|pm", -1.6094379124341003), ("hh:mm", -1.2039728043259361), ("hour", -1.6094379124341003), ("minute", -1.2039728043259361)], n = 3}, koData = ClassData{prior = -infinity, unseen = -1.6094379124341003, likelihoods = HashMap.fromList [], n = 0}}), ("Thursday", Classifier{okData = ClassData{prior = 0.0, unseen = -2.0794415416798357, likelihoods = HashMap.fromList [("", 0.0)], n = 6}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("integer (numeric)", Classifier{okData = ClassData{prior = -0.40546510810816444, unseen = -4.867534450455582, likelihoods = HashMap.fromList [("", 0.0)], n = 128}, koData = ClassData{prior = -1.0986122886681098, unseen = -4.189654742026425, likelihoods = HashMap.fromList [("", 0.0)], n = 64}}), ("lunch", Classifier{okData = ClassData{prior = 0.0, unseen = -1.3862943611198906, likelihoods = HashMap.fromList [("", 0.0)], n = 2}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("intersect 2 numbers", Classifier{okData = ClassData{prior = -0.6931471805599453, unseen = -1.3862943611198906, likelihoods = HashMap.fromList [("compose by multiplicationinteger (21..99) - TYPE 2", -0.40546510810816444)], n = 1}, koData = ClassData{prior = -0.6931471805599453, unseen = -1.3862943611198906, likelihoods = HashMap.fromList [("integer - TYPE 1: powers of teninteger (21..99) - TYPE 2", -0.40546510810816444)], n = 1}}), ("<time> <part-of-day>", Classifier{okData = ClassData{prior = -0.8472978603872037, unseen = -4.143134726391533, likelihoods = HashMap.fromList [("dayhour", -1.1826954058786512), ("yesterdayevening|night", -3.4339872044851463), ("mm/ddafternoon", -3.4339872044851463), ("todayafternoon", -3.4339872044851463), ("dayevening|night", -3.4339872044851463), ("todayevening|night", -2.740840023925201), ("daymorning", -3.4339872044851463), ("intersectevening|night", -3.4339872044851463), ("next <cycle>evening|night", -3.028522096376982), ("tomorrowlunch", -3.4339872044851463), ("intersectmorning", -3.4339872044851463), ("Mondaymorning", -3.4339872044851463), ("the day before yesterday - \50634\44536\51228morning", -3.4339872044851463), ("tomorrowevening|night", -3.028522096376982), ("next <cycle>lunch", -3.4339872044851463)], n = 18}, koData = ClassData{prior = -0.5596157879354228, unseen = -4.31748811353631, likelihoods = HashMap.fromList [("year (latent)lunch", -3.6109179126442243), ("dayhour", -1.8191584434161694), ("yearhour", -1.906169820405799), ("time-of-day (latent)evening|night", -3.6109179126442243), ("Fridayafternoon", -3.6109179126442243), ("year (latent)evening|night", -2.512305623976115), ("hourhour", -2.917770732084279), ("time-of-day (latent)lunch", -3.6109179126442243), ("intersectafternoon", -2.10684051586795), ("year (latent)afternoon", -3.20545280453606), ("next <cycle>evening|night", -3.6109179126442243), ("time-of-day (latent)afternoon", -3.6109179126442243), ("year (latent)morning", -3.20545280453606), ("tomorrowevening|night", -3.6109179126442243)], n = 24}}), ("<time> nth <time> - 3\50900 \52395\51704 \54868\50836\51068", Classifier{okData = ClassData{prior = -0.6190392084062235, unseen = -2.995732273553991, likelihoods = HashMap.fromList [("monthday", -0.8649974374866046), ("intersectordinals (\52395\48264\51704)Tuesday", -1.845826690498331), ("monthordinals (\52395\48264\51704)Tuesday", -2.2512917986064953), ("intersectordinals (\52395\48264\51704)Wednesday", -1.3350010667323402)], n = 7}, koData = ClassData{prior = -0.7731898882334817, unseen = -2.890371757896165, likelihoods = HashMap.fromList [("monthday", -0.8873031950009028), ("monthordinals (\52395\48264\51704)Wednesday", -1.2237754316221157), ("monthordinals (\52395\48264\51704)Tuesday", -1.7346010553881064)], n = 6}}), ("the day before yesterday - \50634\44536\51228", Classifier{okData = ClassData{prior = 0.0, unseen = -1.791759469228055, likelihoods = HashMap.fromList [("", 0.0)], n = 4}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("today", Classifier{okData = ClassData{prior = 0.0, unseen = -1.9459101490553135, likelihoods = HashMap.fromList [("", 0.0)], n = 5}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("mm/dd", Classifier{okData = ClassData{prior = 0.0, unseen = -1.3862943611198906, likelihoods = HashMap.fromList [("", 0.0)], n = 2}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("half - \48152", Classifier{okData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}, koData = ClassData{prior = 0.0, unseen = -2.1972245773362196, likelihoods = HashMap.fromList [("", 0.0)], n = 7}}), ("month (grain)", Classifier{okData = ClassData{prior = 0.0, unseen = -2.0794415416798357, likelihoods = HashMap.fromList [("", 0.0)], n = 6}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("<time> \47560\51648\47561 <day-of-week>", Classifier{okData = ClassData{prior = 0.0, unseen = -2.3978952727983707, likelihoods = HashMap.fromList [("monthSunday", -1.6094379124341003), ("monthday", -0.916290731874155), ("monthMonday", -1.6094379124341003), ("intersectSunday", -1.6094379124341003)], n = 3}, koData = ClassData{prior = -infinity, unseen = -1.6094379124341003, likelihoods = HashMap.fromList [], n = 0}}), ("integer - TYPE 1", Classifier{okData = ClassData{prior = -1.8588987720656835, unseen = -3.258096538021482, likelihoods = HashMap.fromList [("", 0.0)], n = 24}, koData = ClassData{prior = -0.1694181519580468, unseen = -4.882801922586371, likelihoods = HashMap.fromList [("", 0.0)], n = 130}}), ("Wednesday", Classifier{okData = ClassData{prior = 0.0, unseen = -2.0794415416798357, likelihoods = HashMap.fromList [("", 0.0)], n = 6}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("hour (grain)", Classifier{okData = ClassData{prior = -1.241713132308783, unseen = -2.70805020110221, likelihoods = HashMap.fromList [("", 0.0)], n = 13}, koData = ClassData{prior = -0.3409265869705932, unseen = -3.5263605246161616, likelihoods = HashMap.fromList [("", 0.0)], n = 32}}), ("intersect", Classifier{okData = ClassData{prior = -0.4279379639602229, unseen = -5.991464547107982, likelihoods = HashMap.fromList [("intersect<hour-of-day> <integer> (as relative minutes)", -5.295814236329918), ("intersectFriday", -4.602667055769973), ("year<time> \47560\51648\47561 <cycle>", -4.890349128221754), ("next <cycle>Friday", -5.295814236329918), ("dayhour", -2.0571357841655376), ("last <cycle>Sunday", -4.890349128221754), ("monthday", -2.462600892273702), ("yearhour", -3.591066144091493), ("<time> <part-of-day>time-of-day", -4.379523504455763), ("this <cycle>Tuesday", -5.295814236329918), ("dayTuesday", -5.295814236329918), ("intersectam|pm <time-of-day>", -3.0985896589936988), ("next <cycle><time-of-day> am|pm", -5.295814236329918), ("next <cycle>am|pm <time-of-day>", -4.890349128221754), ("yearintersect", -2.9932291433358724), ("monthday with korean number - \51068\51068..\44396\51068", -5.295814236329918), ("Saturdaytime-of-day", -5.295814236329918), ("intersectMonday", -4.890349128221754), ("monthhour", -3.591066144091493), ("Thursday<time> timezone", -4.890349128221754), ("mm/ddam|pm <time-of-day>", -4.602667055769973), ("todayam|pm <time-of-day>", -4.602667055769973), ("the day before yesterday - \50634\44536\51228am|pm <time-of-day>", -4.379523504455763), ("dayday", -3.686376323895818), ("hourhour", -3.791736839553644), ("year<time> <ordinal> <cycle>", -5.295814236329918), ("month<datetime> - <datetime> (interval)", -4.890349128221754), ("dayam|pm <time-of-day>", -5.295814236329918), ("last <cycle>Tuesday", -5.295814236329918), ("time-of-dayafter <time-of-day>", -5.295814236329918), ("monthminute", -4.890349128221754), ("hourminute", -4.602667055769973), ("this <cycle>Wednesday", -5.295814236329918), ("Thursdayhh:mm", -4.890349128221754), ("intersectday", -4.197201947661808), ("am|pm <time-of-day>after <time-of-day>", -5.295814236329918), ("dayMonday", -4.890349128221754), ("Thursday<datetime> - <datetime> (interval)", -4.379523504455763), ("next <cycle>Wednesday", -5.295814236329918), ("month<time> <part-of-day>", -4.890349128221754), ("Thursday<time-of-day> - <time-of-day> (interval)", -4.04305126783455), ("yearmonth", -3.1557480728336476), ("dayminute", -3.424012059428327), ("today<date>\50640", -5.295814236329918), ("mm/dd<date>\50640", -5.295814236329918), ("<hour-of-day> <integer> (as relative minutes)seconds", -5.295814236329918), ("<time> <part-of-day><date>\50640", -4.890349128221754), ("dayFriday", -5.295814236329918), ("monthintersect", -3.686376323895818), ("year<time> \47560\51648\47561 <day-of-week>", -5.295814236329918), ("intersectintersect", -3.5040547671018634), ("weekday", -3.686376323895818), ("intersectday with korean number - \51068\51068..\44396\51068", -4.890349128221754), ("yearday", -3.5040547671018634), ("Thursdayam|pm <time-of-day>", -5.295814236329918), ("yearweek", -4.890349128221754), ("minutesecond", -5.295814236329918), ("<time> <part-of-day><hour-of-day> <integer> (as relative minutes)", -4.890349128221754), ("Fridayam|pm <time-of-day>", -4.890349128221754), ("next <cycle>Tuesday", -4.890349128221754), ("tomorrow<time-of-day> am|pm", -5.295814236329918), ("tomorrowam|pm <time-of-day>", -4.890349128221754), ("dayintersect", -4.890349128221754)], n = 176}, koData = ClassData{prior = -1.0551271767283708, unseen = -5.594711379601839, likelihoods = HashMap.fromList [("year<time> \47560\51648\47561 <cycle>", -4.897839799950911), ("dayhour", -2.2587824703356527), ("monthday", -3.1930917077124863), ("yearhour", -3.1930917077124863), ("<time> <part-of-day>time-of-day", -3.7992275112828016), ("houryear", -4.204692619390966), ("after <time-of-day>by <time> - \44620\51648", -4.897839799950911), ("intersectam|pm <time-of-day>", -4.492374691842747), ("hournograin", -4.897839799950911), ("intersect<time> <part-of-day>", -3.9815490680767565), ("yearintersect", -3.02603762304932), ("time-of-day<duration> ago", -4.897839799950911), ("intersecttime-of-day", -4.492374691842747), ("monthhour", -3.02603762304932), ("intersectlast <time>", -4.897839799950911), ("dayday", -3.9815490680767565), ("hourhour", -2.951929650895598), ("month<datetime> - <datetime> (interval)", -3.9815490680767565), ("dayam|pm <time-of-day>", -4.492374691842747), ("hourminute", -4.204692619390966), ("Thursdayhh:mm", -4.492374691842747), ("<time-of-day> - <time-of-day> (interval)last <time>", -3.9815490680767565), ("intersectday", -4.492374691842747), ("Thursdayintersect", -4.897839799950911), ("this <cycle>Monday", -4.492374691842747), ("<datetime> - <datetime> (interval)day", -3.9815490680767565), ("month<time> <part-of-day>", -4.492374691842747), ("Thursday<time-of-day> - <time-of-day> (interval)", -4.492374691842747), ("yearmonth", -4.492374691842747), ("Fridayafter <time-of-day>", -4.897839799950911), ("dayminute", -4.492374691842747), ("<time> <part-of-day><date>\50640", -4.897839799950911), ("last <time>time-of-day", -4.897839799950911), ("monthintersect", -3.3937624031746374), ("year<time> <part-of-day>", -4.492374691842747), ("intersectintersect", -3.6450768314555435), ("weekday", -4.492374691842747), ("intersectday with korean number - \51068\51068..\44396\51068", -4.492374691842747), ("day<time> <part-of-day>", -4.897839799950911), ("yearday", -3.7992275112828016), ("Thursdayam|pm <time-of-day>", -4.897839799950911), ("<time-of-day> - <time-of-day> (interval)time-of-day", -4.492374691842747), ("intersectafter <time-of-day>", -3.3937624031746374), ("<time> <part-of-day><hour-of-day> <integer> (as relative minutes)", -4.492374691842747), ("dayintersect", -4.897839799950911), ("mm/ddafter <time-of-day>", -4.897839799950911), ("todayafter <time-of-day>", -4.897839799950911)], n = 94}}), ("year (grain)", Classifier{okData = ClassData{prior = -1.0608719606852628, unseen = -2.3978952727983707, likelihoods = HashMap.fromList [("", 0.0)], n = 9}, koData = ClassData{prior = -0.42488319396526597, unseen = -2.9444389791664407, likelihoods = HashMap.fromList [("", 0.0)], n = 17}}), ("Saturday", Classifier{okData = ClassData{prior = 0.0, unseen = -1.3862943611198906, likelihoods = HashMap.fromList [("", 0.0)], n = 2}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("next <cycle>", Classifier{okData = ClassData{prior = -0.12516314295400605, unseen = -3.7612001156935624, likelihoods = HashMap.fromList [("week", -1.791759469228055), ("month (grain)", -3.044522437723423), ("year (grain)", -3.044522437723423), ("week (grain)", -1.791759469228055), ("day", -1.791759469228055), ("quarter", -3.044522437723423), ("year", -3.044522437723423), ("month", -3.044522437723423), ("quarter (grain)", -3.044522437723423), ("day (grain)", -1.791759469228055)], n = 15}, koData = ClassData{prior = -2.1400661634962708, unseen = -2.833213344056216, likelihoods = HashMap.fromList [("day", -2.0794415416798357), ("minute (grain)", -2.0794415416798357), ("minute", -2.0794415416798357), ("day (grain)", -2.0794415416798357)], n = 2}}), ("this <day-of-week>", Classifier{okData = ClassData{prior = 0.0, unseen = -2.5649493574615367, likelihoods = HashMap.fromList [("Wednesday", -1.791759469228055), ("Monday", -1.3862943611198906), ("day", -0.8754687373538999), ("Tuesday", -1.791759469228055)], n = 4}, koData = ClassData{prior = -infinity, unseen = -1.6094379124341003, likelihoods = HashMap.fromList [], n = 0}}), ("yyyy-mm-dd", Classifier{okData = ClassData{prior = 0.0, unseen = -1.3862943611198906, likelihoods = HashMap.fromList [("", 0.0)], n = 2}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("year (latent)", Classifier{okData = ClassData{prior = -infinity, unseen = -1.6094379124341003, likelihoods = HashMap.fromList [], n = 0}, koData = ClassData{prior = 0.0, unseen = -3.5263605246161616, likelihoods = HashMap.fromList [("integer (numeric)", -0.9315582040049435), ("integer - TYPE 1", -0.8574502318512216), ("integer (21..99) - TYPE 2", -2.803360380906535), ("integer (1..4) - for ordinals", -2.1102132003465894)], n = 29}}), ("mm/dd/yyyy", Classifier{okData = ClassData{prior = 0.0, unseen = -1.9459101490553135, likelihoods = HashMap.fromList [("", 0.0)], n = 5}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("evening|night", Classifier{okData = ClassData{prior = -0.13353139262452263, unseen = -2.1972245773362196, likelihoods = HashMap.fromList [("", 0.0)], n = 7}, koData = ClassData{prior = -2.0794415416798357, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}}), ("Memorial Day", Classifier{okData = ClassData{prior = 0.0, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("Monday", Classifier{okData = ClassData{prior = 0.0, unseen = -2.1972245773362196, likelihoods = HashMap.fromList [("", 0.0)], n = 7}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("yesterday", Classifier{okData = ClassData{prior = 0.0, unseen = -1.3862943611198906, likelihoods = HashMap.fromList [("", 0.0)], n = 2}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("integer (20..90) - TYPE 2 and ordinals", Classifier{okData = ClassData{prior = 0.0, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("hh:mm:ss", Classifier{okData = ClassData{prior = 0.0, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("by <time> - \44620\51648", Classifier{okData = ClassData{prior = 0.0, unseen = -2.0794415416798357, likelihoods = HashMap.fromList [("am|pm <time-of-day>", -1.252762968495368), ("time-of-day", -1.252762968495368), ("hour", -0.8472978603872037)], n = 2}, koData = ClassData{prior = -infinity, unseen = -1.3862943611198906, likelihoods = HashMap.fromList [], n = 0}}), ("<named-month>\50640", Classifier{okData = ClassData{prior = 0.0, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("month", 0.0)], n = 1}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("integer (21..99) - TYPE 2", Classifier{okData = ClassData{prior = -0.4700036292457356, unseen = -2.3025850929940455, likelihoods = HashMap.fromList [("integer (20..90) - TYPE 2 and ordinalsinteger (1..4) - for ordinals", -1.5040773967762742), ("integer - TYPE 1: powers of teninteger - TYPE 1", -1.0986122886681098), ("compose by multiplicationinteger - TYPE 1", -1.0986122886681098)], n = 5}, koData = ClassData{prior = -0.9808292530117262, unseen = -2.0794415416798357, likelihoods = HashMap.fromList [("integer - TYPE 1: powers of teninteger - TYPE 1", -0.8472978603872037), ("integer (numeric)integer - TYPE 1", -1.252762968495368)], n = 3}}), ("Independence Movement Day", Classifier{okData = ClassData{prior = 0.0, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("week (grain)", Classifier{okData = ClassData{prior = -0.17589066646366416, unseen = -3.332204510175204, likelihoods = HashMap.fromList [("", 0.0)], n = 26}, koData = ClassData{prior = -1.824549292051046, unseen = -1.9459101490553135, likelihoods = HashMap.fromList [("", 0.0)], n = 5}}), ("<year> <1..4>quarter", Classifier{okData = ClassData{prior = 0.0, unseen = -1.6094379124341003, likelihoods = HashMap.fromList [("yearinteger (numeric)quarter (grain)", -0.6931471805599453), ("yearquarter", -0.6931471805599453)], n = 1}, koData = ClassData{prior = -infinity, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [], n = 0}}), ("now", Classifier{okData = ClassData{prior = -1.0986122886681098, unseen = -1.3862943611198906, likelihoods = HashMap.fromList [("", 0.0)], n = 2}, koData = ClassData{prior = -0.40546510810816444, unseen = -1.791759469228055, likelihoods = HashMap.fromList [("", 0.0)], n = 4}}), ("Christmas Eve", Classifier{okData = ClassData{prior = 0.0, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("Liberation Day", Classifier{okData = ClassData{prior = 0.0, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("numbers prefix with -, \47560\51060\45320\49828, or \47560\51060\45208\49828", Classifier{okData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}, koData = ClassData{prior = 0.0, unseen = -2.1972245773362196, likelihoods = HashMap.fromList [("integer (numeric)", 0.0)], n = 7}}), ("Friday", Classifier{okData = ClassData{prior = 0.0, unseen = -1.6094379124341003, likelihoods = HashMap.fromList [("", 0.0)], n = 3}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("in|during the <part-of-day>", Classifier{okData = ClassData{prior = 0.0, unseen = -1.6094379124341003, likelihoods = HashMap.fromList [("afternoon", -0.6931471805599453), ("hour", -0.6931471805599453)], n = 1}, koData = ClassData{prior = -infinity, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [], n = 0}}), ("tomorrow", Classifier{okData = ClassData{prior = -0.15415067982725836, unseen = -2.0794415416798357, likelihoods = HashMap.fromList [("", 0.0)], n = 6}, koData = ClassData{prior = -1.9459101490553135, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}}), ("National Foundation Day", Classifier{okData = ClassData{prior = 0.0, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("ordinals (\52395\48264\51704)", Classifier{okData = ClassData{prior = 0.0, unseen = -2.3978952727983707, likelihoods = HashMap.fromList [("integer (1..4) - for ordinals", -0.35667494393873245), ("integer (1..10) - TYPE 2", -1.2039728043259361)], n = 8}, koData = ClassData{prior = -infinity, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [], n = 0}}), ("day", Classifier{okData = ClassData{prior = -0.2657031657330056, unseen = -3.258096538021482, likelihoods = HashMap.fromList [("integer (numeric)", -8.338160893905101e-2), ("integer - TYPE 1", -2.5257286443082556)], n = 23}, koData = ClassData{prior = -1.455287232606842, unseen = -2.3025850929940455, likelihoods = HashMap.fromList [("integer (numeric)", -0.25131442828090605), ("integer - TYPE 1", -1.5040773967762742)], n = 7}}), ("half an hour", Classifier{okData = ClassData{prior = -infinity, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [], n = 0}, koData = ClassData{prior = 0.0, unseen = -2.833213344056216, likelihoods = HashMap.fromList [("hour (grain)", -0.6931471805599453), ("hour", -0.6931471805599453)], n = 7}}), ("fractional number", Classifier{okData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}, koData = ClassData{prior = 0.0, unseen = -1.9459101490553135, likelihoods = HashMap.fromList [("", 0.0)], n = 5}}), ("Sunday", Classifier{okData = ClassData{prior = 0.0, unseen = -2.0794415416798357, likelihoods = HashMap.fromList [("", 0.0)], n = 6}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("afternoon", Classifier{okData = ClassData{prior = -0.2876820724517809, unseen = -1.6094379124341003, likelihoods = HashMap.fromList [("", 0.0)], n = 3}, koData = ClassData{prior = -1.3862943611198906, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}}), ("integer (1..4) - for ordinals", Classifier{okData = ClassData{prior = -3.7740327982847086e-2, unseen = -3.332204510175204, likelihoods = HashMap.fromList [("", 0.0)], n = 26}, koData = ClassData{prior = -3.295836866004329, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}}), ("<time> <ordinal> <cycle>", Classifier{okData = ClassData{prior = 0.0, unseen = -2.0794415416798357, likelihoods = HashMap.fromList [("monthordinals (\52395\48264\51704)week (grain)", -1.252762968495368), ("monthweek", -0.8472978603872037), ("intersectordinals (\52395\48264\51704)week (grain)", -1.252762968495368)], n = 2}, koData = ClassData{prior = -infinity, unseen = -1.3862943611198906, likelihoods = HashMap.fromList [], n = 0}}), ("this <cycle>", Classifier{okData = ClassData{prior = -0.5108256237659907, unseen = -3.044522437723423, likelihoods = HashMap.fromList [("week", -1.3862943611198906), ("year (grain)", -2.3025850929940455), ("week (grain)", -1.3862943611198906), ("quarter", -2.3025850929940455), ("year", -2.3025850929940455), ("quarter (grain)", -2.3025850929940455)], n = 6}, koData = ClassData{prior = -0.916290731874155, unseen = -2.833213344056216, likelihoods = HashMap.fromList [("week", -1.3862943611198906), ("week (grain)", -1.3862943611198906), ("minute (grain)", -2.0794415416798357), ("minute", -2.0794415416798357)], n = 4}}), ("minute (grain)", Classifier{okData = ClassData{prior = -0.6359887667199967, unseen = -2.3978952727983707, likelihoods = HashMap.fromList [("", 0.0)], n = 9}, koData = ClassData{prior = -0.7537718023763802, unseen = -2.3025850929940455, likelihoods = HashMap.fromList [("", 0.0)], n = 8}}), ("about <time-of-day>", Classifier{okData = ClassData{prior = -infinity, unseen = -1.3862943611198906, likelihoods = HashMap.fromList [], n = 0}, koData = ClassData{prior = 0.0, unseen = -2.0794415416798357, likelihoods = HashMap.fromList [("time-of-day (latent)", -1.252762968495368), ("time-of-day", -1.252762968495368), ("hour", -0.8472978603872037)], n = 2}}), ("time-of-day (latent)", Classifier{okData = ClassData{prior = -1.0986122886681098, unseen = -3.4011973816621555, likelihoods = HashMap.fromList [("integer (numeric)", -0.4769240720903093), ("integer - TYPE 1", -2.6741486494265287), ("integer (1..4) - for ordinals", -1.7578579175523736), ("integer (1..10) - TYPE 2", -2.6741486494265287)], n = 23}, koData = ClassData{prior = -0.40546510810816444, unseen = -3.970291913552122, likelihoods = HashMap.fromList [("integer (numeric)", -1.0608719606852628), ("integer - TYPE 1", -0.8157495026522777), ("integer (20..90) - TYPE 2 and ordinals", -3.258096538021482), ("integer (21..99) - TYPE 2", -3.258096538021482), ("integer (1..4) - for ordinals", -2.159484249353372)], n = 46}}), ("year", Classifier{okData = ClassData{prior = -0.9873866535578852, unseen = -3.258096538021482, likelihoods = HashMap.fromList [("integer (numeric)", -0.3285040669720361), ("intersect 2 numbers", -2.5257286443082556), ("integer (21..99) - TYPE 2", -2.5257286443082556)], n = 19}, koData = ClassData{prior = -0.46608972992459924, unseen = -3.6635616461296463, likelihoods = HashMap.fromList [("integer (numeric)", -0.5020919437972361), ("intersect 2 numbers", -2.2512917986064953), ("integer - TYPE 1", -2.0281482472922856), ("integer - TYPE 1: powers of ten", -2.538973871058276), ("compose by multiplication", -2.9444389791664407)], n = 32}}), ("<integer> <unit-of-duration>", Classifier{okData = ClassData{prior = -1.5955488002734333, unseen = -4.5217885770490405, likelihoods = HashMap.fromList [("week", -2.7191000372887952), ("integer - TYPE 1year (grain)", -3.41224721784874), ("integer (numeric)day (grain)", -2.5649493574615367), ("second", -3.817712325956905), ("integer - TYPE 1minute (grain)", -2.9014215940827497), ("integer (1..4) - for ordinalshour (grain)", -3.41224721784874), ("integer (numeric)second (grain)", -3.817712325956905), ("integer (numeric)year (grain)", -3.41224721784874), ("day", -2.5649493574615367), ("year", -2.9014215940827497), ("integer (numeric)week (grain)", -2.7191000372887952), ("hour", -2.3136349291806306), ("integer (numeric)minute (grain)", -3.817712325956905), ("few \47751hour (grain)", -2.9014215940827497), ("minute", -2.7191000372887952), ("integer (numeric)hour (grain)", -3.41224721784874)], n = 29}, koData = ClassData{prior = -0.22664618186541183, unseen = -5.568344503761097, likelihoods = HashMap.fromList [("week", -3.955082494888593), ("integer - TYPE 1quarter (grain)", -4.871373226762748), ("integer - TYPE 1day (grain)", -4.178226046202803), ("integer - TYPE 1year (grain)", -4.465908118654584), ("integer (numeric)day (grain)", -2.4290261913935436), ("intersect 2 numbersyear (grain)", -4.465908118654584), ("integer (numeric)quarter (grain)", -4.871373226762748), ("compose by multiplicationminute (grain)", -4.871373226762748), ("second", -3.7727609380946383), ("integer - TYPE 1minute (grain)", -3.7727609380946383), ("integer (1..4) - for ordinalsmonth (grain)", -4.465908118654584), ("integer (1..4) - for ordinalshour (grain)", -2.7313070632664775), ("integer (numeric)second (grain)", -4.871373226762748), ("integer (numeric)year (grain)", -2.6741486494265287), ("integer (21..99) - TYPE 2hour (grain)", -4.871373226762748), ("integer - TYPE 1week (grain)", -4.465908118654584), ("day", -2.3064238693012116), ("quarter", -4.465908118654584), ("year", -2.4290261913935436), ("integer (21..99) - TYPE 2minute (grain)", -4.178226046202803), ("integer (numeric)week (grain)", -4.465908118654584), ("integer (1..10) - TYPE 2hour (grain)", -4.871373226762748), ("hour", -1.900958761193047), ("month", -3.955082494888593), ("integer - TYPE 1second (grain)", -4.465908118654584), ("integer (numeric)minute (grain)", -3.955082494888593), ("integer (numeric)month (grain)", -4.465908118654584), ("integer (21..99) - TYPE 2year (grain)", -4.871373226762748), ("minute", -2.856470206220483), ("integer - TYPE 1: powers of tenminute (grain)", -4.871373226762748), ("integer (numeric)hour (grain)", -2.5199979695992702), ("integer (21..99) - TYPE 2second (grain)", -4.465908118654584)], n = 114}}), ("<time-of-day> am|pm", Classifier{okData = ClassData{prior = -0.3746934494414107, unseen = -3.295836866004329, likelihoods = HashMap.fromList [("time-of-day (latent)", -1.6486586255873816), ("hh:mm", -1.1786549963416462), ("hour", -1.6486586255873816), ("minute", -1.1786549963416462)], n = 11}, koData = ClassData{prior = -1.1631508098056809, unseen = -2.70805020110221, likelihoods = HashMap.fromList [("time-of-day (latent)", -0.8472978603872037), ("hour", -0.8472978603872037)], n = 5}}), ("a day - \54616\47336", Classifier{okData = ClassData{prior = 0.0, unseen = -1.3862943611198906, likelihoods = HashMap.fromList [("", 0.0)], n = 2}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("am|pm <time-of-day>", Classifier{okData = ClassData{prior = -0.23262229526875347, unseen = -4.543294782270004, likelihoods = HashMap.fromList [("time-of-day (latent)", -1.824549292051046), ("<date>\50640", -3.146305132033365), ("time-of-day", -1.536867219599265), ("hour", -0.8437200390393196), ("<time-of-day>\51060\51204", -3.4339872044851463), ("<hour-of-day> half (as relative minutes)", -3.8394523125933104), ("minute", -3.146305132033365), ("after <time-of-day>", -3.8394523125933104), ("<hour-of-day> <integer> (as relative minutes)", -3.4339872044851463)], n = 42}, koData = ClassData{prior = -1.5723966407537513, unseen = -3.4657359027997265, likelihoods = HashMap.fromList [("time-of-day (latent)", -1.2367626271489267), ("time-of-day", -2.0476928433652555), ("hour", -0.9490805546971459)], n = 11}}), ("hh:mm", Classifier{okData = ClassData{prior = -4.255961441879589e-2, unseen = -3.2188758248682006, likelihoods = HashMap.fromList [("", 0.0)], n = 23}, koData = ClassData{prior = -3.1780538303479458, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}}), ("integer - TYPE 1: powers of ten", Classifier{okData = ClassData{prior = -0.15415067982725836, unseen = -2.0794415416798357, likelihoods = HashMap.fromList [("", 0.0)], n = 6}, koData = ClassData{prior = -1.9459101490553135, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}}), ("day with korean number - \51068\51068..\44396\51068", Classifier{okData = ClassData{prior = -0.6931471805599453, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}, koData = ClassData{prior = -0.6931471805599453, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}}), ("second (grain)", Classifier{okData = ClassData{prior = -0.2876820724517809, unseen = -1.6094379124341003, likelihoods = HashMap.fromList [("", 0.0)], n = 3}, koData = ClassData{prior = -1.3862943611198906, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}}), ("Hangul Day", Classifier{okData = ClassData{prior = 0.0, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("<integer> (hour-of-day) relative minutes \51204", Classifier{okData = ClassData{prior = 0.0, unseen = -1.6094379124341003, likelihoods = HashMap.fromList [("hour", -0.6931471805599453), ("time-of-dayinteger (21..99) - TYPE 2", -0.6931471805599453)], n = 1}, koData = ClassData{prior = -infinity, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [], n = 0}}), ("<duration> ago", Classifier{okData = ClassData{prior = -0.45198512374305727, unseen = -3.044522437723423, likelihoods = HashMap.fromList [("week", -1.8971199848858813), ("day", -1.6094379124341003), ("year", -1.8971199848858813), ("<integer> <unit-of-duration>", -0.916290731874155)], n = 7}, koData = ClassData{prior = -1.0116009116784799, unseen = -2.70805020110221, likelihoods = HashMap.fromList [("<integer> <unit-of-duration>", -1.0296194171811581), ("hour", -1.540445040947149), ("minute", -1.540445040947149)], n = 4}}), ("Constitution Day", Classifier{okData = ClassData{prior = 0.0, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("last <time>", Classifier{okData = ClassData{prior = -2.833213344056216, unseen = -3.1780538303479458, likelihoods = HashMap.fromList [("day", -2.03688192726104), ("Sunday", -2.03688192726104), ("hour", -2.03688192726104), ("week-end", -2.03688192726104)], n = 4}, koData = ClassData{prior = -6.0624621816434854e-2, unseen = -4.969813299576001, likelihoods = HashMap.fromList [("intersect", -3.8642323415917974), ("year (latent)", -1.967112356705916), ("second", -3.8642323415917974), ("time-of-day (latent)", -1.9183221925364844), ("year", -1.967112356705916), ("<duration> ago", -3.8642323415917974), ("time-of-day", -2.477937980471907), ("hour", -1.1786549963416462), ("seconds", -3.8642323415917974), ("<datetime> - <datetime> (interval)", -3.8642323415917974), ("<time-of-day>\51060\51204", -3.8642323415917974), ("<time-of-day> - <time-of-day> (interval)", -3.353406717825807)], n = 64}}), ("<date>\50640", Classifier{okData = ClassData{prior = -7.410797215372185e-2, unseen = -3.4657359027997265, likelihoods = HashMap.fromList [("intersect", -1.824549292051046), ("day", -2.3353749158170367), ("am|pm <time-of-day>", -2.0476928433652555), ("<duration> ago", -2.740840023925201), ("time-of-day", -2.0476928433652555), ("hour", -1.0360919316867756), ("month", -2.740840023925201)], n = 13}, koData = ClassData{prior = -2.639057329615259, unseen = -2.3025850929940455, likelihoods = HashMap.fromList [("intersect", -1.5040773967762742), ("hour", -1.5040773967762742)], n = 1}}), ("integer (1..10) - TYPE 2", Classifier{okData = ClassData{prior = 0.0, unseen = -1.6094379124341003, likelihoods = HashMap.fromList [("", 0.0)], n = 3}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("time-of-day", Classifier{okData = ClassData{prior = -0.5328045304847658, unseen = -3.4965075614664802, likelihoods = HashMap.fromList [("integer (numeric)", -0.5753641449035618), ("integer (1..4) - for ordinals", -1.1631508098056809), ("integer (1..10) - TYPE 2", -2.772588722239781)], n = 27}, koData = ClassData{prior = -0.8842024173226546, unseen = -3.2188758248682006, likelihoods = HashMap.fromList [("integer (numeric)", -1.3862943611198906), ("integer (21..99) - TYPE 2", -2.4849066497880004), ("integer (1..4) - for ordinals", -0.8754687373538999), ("few \47751", -1.5686159179138452)], n = 19}}), ("noon", Classifier{okData = ClassData{prior = 0.0, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("Christmas", Classifier{okData = ClassData{prior = -0.6931471805599453, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}, koData = ClassData{prior = -0.6931471805599453, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}}), ("<integer> and an half hours", Classifier{okData = ClassData{prior = 0.0, unseen = -1.9459101490553135, likelihoods = HashMap.fromList [("integer (1..4) - for ordinals", 0.0)], n = 5}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("after <duration>", Classifier{okData = ClassData{prior = -0.2876820724517809, unseen = -2.3025850929940455, likelihoods = HashMap.fromList [("day", -1.5040773967762742), ("<integer> <unit-of-duration>", -0.8109302162163288), ("hour", -1.0986122886681098)], n = 3}, koData = ClassData{prior = -1.3862943611198906, unseen = -1.791759469228055, likelihoods = HashMap.fromList [("<integer> <unit-of-duration>", -0.916290731874155), ("hour", -0.916290731874155)], n = 1}}), ("month", Classifier{okData = ClassData{prior = -5.5569851154810765e-2, unseen = -3.6375861597263857, likelihoods = HashMap.fromList [("integer (numeric)", -5.5569851154810765e-2), ("integer - TYPE 1", -2.917770732084279)], n = 35}, koData = ClassData{prior = -2.917770732084279, unseen = -1.6094379124341003, likelihoods = HashMap.fromList [("integer - TYPE 1", -0.2876820724517809)], n = 2}}), ("midnight|EOD|end of day", Classifier{okData = ClassData{prior = 0.0, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("next <time>", Classifier{okData = ClassData{prior = -0.40546510810816444, unseen = -2.1972245773362196, likelihoods = HashMap.fromList [("day", -1.3862943611198906), ("month", -1.3862943611198906), ("Tuesday", -1.3862943611198906)], n = 2}, koData = ClassData{prior = -1.0986122886681098, unseen = -2.0794415416798357, likelihoods = HashMap.fromList [("time-of-day", -1.252762968495368), ("hour", -1.252762968495368)], n = 1}}), ("last <cycle>", Classifier{okData = ClassData{prior = -0.45198512374305727, unseen = -3.1354942159291497, likelihoods = HashMap.fromList [("week", -1.2992829841302609), ("month (grain)", -2.3978952727983707), ("year (grain)", -2.3978952727983707), ("week (grain)", -1.2992829841302609), ("year", -2.3978952727983707), ("month", -2.3978952727983707)], n = 7}, koData = ClassData{prior = -1.0116009116784799, unseen = -2.833213344056216, likelihoods = HashMap.fromList [("week", -1.6739764335716716), ("week (grain)", -1.6739764335716716), ("day", -1.6739764335716716), ("day (grain)", -1.6739764335716716)], n = 4}}), ("next n <cycle>", Classifier{okData = ClassData{prior = 0.0, unseen = -3.4965075614664802, likelihoods = HashMap.fromList [("week", -2.367123614131617), ("integer - TYPE 1year (grain)", -2.772588722239781), ("integer - TYPE 1minute (grain)", -2.772588722239781), ("integer (1..4) - for ordinalsmonth (grain)", -2.772588722239781), ("integer (numeric)year (grain)", -2.772588722239781), ("integer - TYPE 1week (grain)", -2.772588722239781), ("year", -2.367123614131617), ("integer (numeric)week (grain)", -2.772588722239781), ("hour", -2.772588722239781), ("month", -2.367123614131617), ("integer (numeric)minute (grain)", -2.772588722239781), ("integer (numeric)month (grain)", -2.772588722239781), ("minute", -2.367123614131617), ("integer (numeric)hour (grain)", -2.772588722239781)], n = 9}, koData = ClassData{prior = -infinity, unseen = -2.70805020110221, likelihoods = HashMap.fromList [], n = 0}}), ("seconds", Classifier{okData = ClassData{prior = -1.0986122886681098, unseen = -1.791759469228055, likelihoods = HashMap.fromList [("integer (numeric)", -0.916290731874155), ("integer (21..99) - TYPE 2", -0.916290731874155)], n = 2}, koData = ClassData{prior = -0.40546510810816444, unseen = -2.0794415416798357, likelihoods = HashMap.fromList [("integer (numeric)", -1.252762968495368), ("integer - TYPE 1", -0.8472978603872037), ("integer (21..99) - TYPE 2", -1.252762968495368)], n = 4}}), ("<time> \47560\51648\47561 <cycle>", Classifier{okData = ClassData{prior = -0.40546510810816444, unseen = -2.70805020110221, likelihoods = HashMap.fromList [("monthday", -1.540445040947149), ("monthday (grain)", -1.9459101490553135), ("monthweek", -1.540445040947149), ("intersectweek (grain)", -1.9459101490553135), ("intersectday (grain)", -1.9459101490553135), ("monthweek (grain)", -1.9459101490553135)], n = 4}, koData = ClassData{prior = -1.0986122886681098, unseen = -2.3978952727983707, likelihoods = HashMap.fromList [("monthday", -1.2039728043259361), ("monthday (grain)", -1.6094379124341003), ("intersectday (grain)", -1.6094379124341003)], n = 2}}), ("in <duration>", Classifier{okData = ClassData{prior = -0.17589066646366416, unseen = -4.1588830833596715, likelihoods = HashMap.fromList [("week", -3.044522437723423), ("second", -3.4499875458315876), ("day", -2.5336968139574325), ("year", -3.044522437723423), ("<integer> <unit-of-duration>", -1.1986957472250923), ("a day - \54616\47336", -3.044522437723423), ("<integer> and an half hours", -2.3513752571634776), ("hour", -2.1972245773362196), ("minute", -1.6582280766035324), ("about <duration>", -3.4499875458315876)], n = 26}, koData = ClassData{prior = -1.824549292051046, unseen = -3.0910424533583156, likelihoods = HashMap.fromList [("half an hour", -1.252762968495368), ("minute", -1.252762968495368)], n = 5}}), ("<datetime> - <datetime> (interval)", Classifier{okData = ClassData{prior = -0.46262352194811296, unseen = -3.8501476017100584, likelihoods = HashMap.fromList [("intersecthh:mm", -2.7300291078209855), ("intersectam|pm <time-of-day>", -2.7300291078209855), ("minuteminute", -1.749199854809259), ("hh:mmhh:mm", -2.03688192726104), ("dayday", -1.8827312474337816), ("hourhour", -2.2192034840549946), ("intersectday", -2.7300291078209855), ("am|pm <time-of-day>am|pm <time-of-day>", -2.7300291078209855), ("intersectintersect", -2.7300291078209855)], n = 17}, koData = ClassData{prior = -0.9932517730102834, unseen = -3.4965075614664802, likelihoods = HashMap.fromList [("daymonth", -1.8562979903656263), ("time-of-dayam|pm <time-of-day>", -2.367123614131617), ("intersectmonth", -2.367123614131617), ("dayday", -2.367123614131617), ("hourhour", -1.8562979903656263), ("last <time>am|pm <time-of-day>", -2.367123614131617), ("dayintersect", -2.367123614131617)], n = 10}}), ("<time-of-day>\51060\51204", Classifier{okData = ClassData{prior = -0.40546510810816444, unseen = -2.70805020110221, likelihoods = HashMap.fromList [("am|pm <time-of-day>", -1.540445040947149), ("time-of-day", -1.540445040947149), ("hour", -1.0296194171811581)], n = 4}, koData = ClassData{prior = -1.0986122886681098, unseen = -2.3978952727983707, likelihoods = HashMap.fromList [("time-of-day (latent)", -1.6094379124341003), ("hour", -1.6094379124341003), ("minute", -1.6094379124341003), ("<hour-of-day> <integer> (as relative minutes)", -1.6094379124341003)], n = 2}}), ("Tuesday", Classifier{okData = ClassData{prior = 0.0, unseen = -2.4849066497880004, likelihoods = HashMap.fromList [("", 0.0)], n = 10}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("New Year's Day", Classifier{okData = ClassData{prior = 0.0, unseen = -1.3862943611198906, likelihoods = HashMap.fromList [("", 0.0)], n = 2}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("<1..4> quarter", Classifier{okData = ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055, likelihoods = HashMap.fromList [("integer - TYPE 1quarter (grain)", -0.916290731874155), ("quarter", -0.916290731874155)], n = 1}, koData = ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055, likelihoods = HashMap.fromList [("integer (numeric)quarter (grain)", -0.916290731874155), ("quarter", -0.916290731874155)], n = 1}}), ("Children's Day", Classifier{okData = ClassData{prior = 0.0, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("Winter", Classifier{okData = ClassData{prior = 0.0, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("<time-of-day> - <time-of-day> (interval)", Classifier{okData = ClassData{prior = -0.49247648509779407, unseen = -3.5263605246161616, likelihoods = HashMap.fromList [("hh:mmtime-of-day (latent)", -1.8870696490323797), ("minuteminute", -1.7047480922384253), ("hh:mmhh:mm", -1.7047480922384253), ("hourhour", -2.3978952727983707), ("minutehour", -1.8870696490323797), ("am|pm <time-of-day>am|pm <time-of-day>", -2.3978952727983707)], n = 11}, koData = ClassData{prior = -0.9444616088408514, unseen = -3.258096538021482, likelihoods = HashMap.fromList [("time-of-dayam|pm <time-of-day>", -2.120263536200091), ("time-of-daytime-of-day (latent)", -2.5257286443082556), ("hh:mmtime-of-day (latent)", -2.5257286443082556), ("am|pm <time-of-day>time-of-day (latent)", -2.5257286443082556), ("am|pm <time-of-day><time-of-day>\51060\51204", -2.5257286443082556), ("hourhour", -1.2729656758128873), ("time-of-day<time-of-day>\51060\51204", -2.5257286443082556), ("minutehour", -2.5257286443082556)], n = 7}}), ("last n <cycle>", Classifier{okData = ClassData{prior = 0.0, unseen = -3.713572066704308, likelihoods = HashMap.fromList [("week", -2.5902671654458267), ("second", -2.5902671654458267), ("integer - TYPE 1minute (grain)", -2.995732273553991), ("integer (1..4) - for ordinalsmonth (grain)", -2.995732273553991), ("integer (1..4) - for ordinalshour (grain)", -2.995732273553991), ("integer (numeric)second (grain)", -2.995732273553991), ("integer (21..99) - TYPE 2hour (grain)", -2.995732273553991), ("integer - TYPE 1week (grain)", -2.995732273553991), ("integer (numeric)week (grain)", -2.995732273553991), ("hour", -2.0794415416798357), ("month", -2.5902671654458267), ("integer - TYPE 1second (grain)", -2.995732273553991), ("integer (numeric)minute (grain)", -2.995732273553991), ("integer (numeric)month (grain)", -2.995732273553991), ("minute", -2.5902671654458267), ("integer (numeric)hour (grain)", -2.5902671654458267)], n = 12}, koData = ClassData{prior = -infinity, unseen = -2.833213344056216, likelihoods = HashMap.fromList [], n = 0}}), ("<hour-of-day> half (as relative minutes)", Classifier{okData = ClassData{prior = 0.0, unseen = -1.9459101490553135, likelihoods = HashMap.fromList [("time-of-day", -0.6931471805599453), ("hour", -0.6931471805599453)], n = 2}, koData = ClassData{prior = -infinity, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [], n = 0}}), ("quarter (grain)", Classifier{okData = ClassData{prior = 0.0, unseen = -1.791759469228055, likelihoods = HashMap.fromList [("", 0.0)], n = 4}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("morning", Classifier{okData = ClassData{prior = 0.0, unseen = -1.791759469228055, likelihoods = HashMap.fromList [("", 0.0)], n = 4}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("week-end", Classifier{okData = ClassData{prior = 0.0, unseen = -1.6094379124341003, likelihoods = HashMap.fromList [("", 0.0)], n = 3}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("after <time-of-day>", Classifier{okData = ClassData{prior = -1.5040773967762742, unseen = -2.639057329615259, likelihoods = HashMap.fromList [("lunch", -1.8718021769015913), ("time-of-day (latent)", -1.8718021769015913), ("am|pm <time-of-day>", -1.8718021769015913), ("time-of-day", -1.8718021769015913), ("hour", -0.9555114450274363)], n = 4}, koData = ClassData{prior = -0.25131442828090605, unseen = -3.5263605246161616, likelihoods = HashMap.fromList [("time-of-day (latent)", -0.7884573603642702), ("hour", -0.7884573603642702)], n = 14}}), ("day (grain)", Classifier{okData = ClassData{prior = -1.1727202608218315, unseen = -2.70805020110221, likelihoods = HashMap.fromList [("", 0.0)], n = 13}, koData = ClassData{prior = -0.37037378829689427, unseen = -3.4339872044851463, likelihoods = HashMap.fromList [("", 0.0)], n = 29}}), ("the day after tomorrow - \45236\51068\47784\47112", Classifier{okData = ClassData{prior = 0.0, unseen = -1.3862943611198906, likelihoods = HashMap.fromList [("", 0.0)], n = 2}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("Summer", Classifier{okData = ClassData{prior = 0.0, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [("", 0.0)], n = 1}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("compose by multiplication", Classifier{okData = ClassData{prior = 0.0, unseen = -1.791759469228055, likelihoods = HashMap.fromList [("integer - TYPE 1integer - TYPE 1: powers of ten", 0.0)], n = 4}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("about <duration>", Classifier{okData = ClassData{prior = -0.6931471805599453, unseen = -1.9459101490553135, likelihoods = HashMap.fromList [("<integer> and an half hours", -1.0986122886681098), ("minute", -1.0986122886681098)], n = 1}, koData = ClassData{prior = -0.6931471805599453, unseen = -1.9459101490553135, likelihoods = HashMap.fromList [("<integer> <unit-of-duration>", -1.0986122886681098), ("hour", -1.0986122886681098)], n = 1}}), ("<hour-of-day> <integer> (as relative minutes)", Classifier{okData = ClassData{prior = -0.2876820724517809, unseen = -2.3978952727983707, likelihoods = HashMap.fromList [("time-of-daycompose by multiplication", -1.6094379124341003), ("time-of-dayinteger (numeric)", -1.6094379124341003), ("hour", -0.916290731874155), ("time-of-dayinteger (21..99) - TYPE 2", -1.6094379124341003)], n = 3}, koData = ClassData{prior = -1.3862943611198906, unseen = -1.9459101490553135, likelihoods = HashMap.fromList [("hour", -1.0986122886681098), ("time-of-dayinteger (21..99) - TYPE 2", -1.0986122886681098)], n = 1}}), ("few \47751", Classifier{okData = ClassData{prior = 0.0, unseen = -1.791759469228055, likelihoods = HashMap.fromList [("", 0.0)], n = 4}, koData = ClassData{prior = -infinity, unseen = -0.6931471805599453, likelihoods = HashMap.fromList [], n = 0}}), ("this <time>", Classifier{okData = ClassData{prior = 0.0, unseen = -2.70805020110221, likelihoods = HashMap.fromList [("day", -1.540445040947149), ("hour", -1.540445040947149), ("Winter", -1.9459101490553135), ("morning", -1.9459101490553135), ("week-end", -1.9459101490553135), ("Summer", -1.9459101490553135)], n = 4}, koData = ClassData{prior = -infinity, unseen = -1.9459101490553135, likelihoods = HashMap.fromList [], n = 0}}), ("within <duration>", Classifier{okData = ClassData{prior = 0.0, unseen = -1.6094379124341003, likelihoods = HashMap.fromList [("week", -0.6931471805599453), ("<integer> <unit-of-duration>", -0.6931471805599453)], n = 1}, koData = ClassData{prior = -infinity, unseen = -1.0986122886681098, likelihoods = HashMap.fromList [], n = 0}})]
facebookincubator/duckling
Duckling/Ranking/Classifiers/KO_XX.hs
bsd-3-clause
92,149
0
15
45,318
16,136
10,059
6,077
1,493
1
module Network.HaskellNet.IMAP ( connectIMAP, connectIMAPPort, connectStream -- * IMAP commands -- ** any state commands , noop, capability, logout -- ** not authenticated state commands , login, authenticate -- ** autenticated state commands , select, examine, create, delete, rename , subscribe, unsubscribe , list, lsub, status, append -- ** selected state commands , check, close, expunge , search, store, copy -- * fetch commands , fetch, fetchHeader, fetchSize, fetchHeaderFields, fetchHeaderFieldsNot , fetchFlags, fetchR, fetchByString, fetchByStringR -- * other types , Flag(..), Attribute(..), MailboxStatus(..) , SearchQuery(..), FlagsQuery(..) ) where import Network import Network.HaskellNet.BSStream import Network.HaskellNet.IMAP.Connection import Network.HaskellNet.IMAP.Types import Network.HaskellNet.IMAP.Parsers import qualified Network.HaskellNet.Auth as A import Data.ByteString (ByteString) import qualified Data.ByteString.Char8 as BS import Control.Applicative ((<$>)) import Control.Monad import System.IO import System.Time import Data.Maybe import Data.List hiding (delete) import Data.Char import Text.Packrat.Parse (Result) -- suffixed by `s' data SearchQuery = ALLs | FLAG Flag | UNFLAG Flag | BCCs String | BEFOREs CalendarTime | BODYs String | CCs String | FROMs String | HEADERs String String | LARGERs Integer | NEWs | NOTs SearchQuery | OLDs | ONs CalendarTime | ORs SearchQuery SearchQuery | SENTBEFOREs CalendarTime | SENTONs CalendarTime | SENTSINCEs CalendarTime | SINCEs CalendarTime | SMALLERs Integer | SUBJECTs String | TEXTs String | TOs String | UIDs [UID] instance Show SearchQuery where showsPrec d q = showParen (d>app_prec) $ showString $ showQuery q where app_prec = 10 showQuery ALLs = "ALL" showQuery (FLAG f) = showFlag f showQuery (UNFLAG f) = "UN" ++ showFlag f showQuery (BCCs addr) = "BCC " ++ addr showQuery (BEFOREs t) = "BEFORE " ++ dateToStringIMAP t showQuery (BODYs s) = "BODY " ++ s showQuery (CCs addr) = "CC " ++ addr showQuery (FROMs addr) = "FROM " ++ addr showQuery (HEADERs f v) = "HEADER " ++ f ++ " " ++ v showQuery (LARGERs siz) = "LARGER {" ++ show siz ++ "}" showQuery NEWs = "NEW" showQuery (NOTs qry) = "NOT " ++ show qry showQuery OLDs = "OLD" showQuery (ONs t) = "ON " ++ dateToStringIMAP t showQuery (ORs q1 q2) = "OR " ++ show q1 ++ " " ++ show q2 showQuery (SENTBEFOREs t) = "SENTBEFORE " ++ dateToStringIMAP t showQuery (SENTONs t) = "SENTON " ++ dateToStringIMAP t showQuery (SENTSINCEs t) = "SENTSINCE " ++ dateToStringIMAP t showQuery (SINCEs t) = "SINCE " ++ dateToStringIMAP t showQuery (SMALLERs siz) = "SMALLER {" ++ show siz ++ "}" showQuery (SUBJECTs s) = "SUBJECT " ++ s showQuery (TEXTs s) = "TEXT " ++ s showQuery (TOs addr) = "TO " ++ addr showQuery (UIDs uids) = concat $ intersperse "," $ map show uids showFlag Seen = "SEEN" showFlag Answered = "ANSWERED" showFlag Flagged = "FLAGGED" showFlag Deleted = "DELETED" showFlag Draft = "DRAFT" showFlag Recent = "RECENT" showFlag (Keyword s) = "KEYWORD " ++ s data FlagsQuery = ReplaceFlags [Flag] | PlusFlags [Flag] | MinusFlags [Flag] ---------------------------------------------------------------------- -- establish connection connectIMAPPort :: String -> PortNumber -> IO (IMAPConnection Handle) connectIMAPPort hostname port = connectTo hostname (PortNumber port) >>= connectStream connectIMAP :: String -> IO (IMAPConnection Handle) connectIMAP hostname = connectIMAPPort hostname 143 connectStream :: BSStream s => s -> IO (IMAPConnection s) connectStream s = do msg <- bsGetLine s unless (and $ BS.zipWith (==) msg (BS.pack "* OK")) $ fail "cannot connect to the server" newConnection s ---------------------------------------------------------------------- -- normal send commands sendCommand' :: (BSStream s) => IMAPConnection s -> String -> IO (ByteString, Int) sendCommand' c cmdstr = do (_, num) <- withNextCommandNum c $ \num -> bsPutCrLf c $ BS.pack $ show6 num ++ " " ++ cmdstr resp <- getResponse c return (resp, num) show6 :: (Ord a, Num a, Show a) => a -> String show6 n | n > 100000 = show n | n > 10000 = '0' : show n | n > 1000 = "00" ++ show n | n > 100 = "000" ++ show n | n > 10 = "0000" ++ show n | otherwise = "00000" ++ show n sendCommand :: BSStream s => IMAPConnection s -> String -> (RespDerivs -> Result RespDerivs (ServerResponse, MboxUpdate, v)) -> IO v sendCommand imapc cmdstr pFunc = do (buf, num) <- sendCommand' imapc cmdstr let (resp, mboxUp, value) = eval pFunc (show6 num) buf case resp of OK _ _ -> do mboxUpdate imapc mboxUp return value NO _ msg -> fail ("NO: " ++ msg) BAD _ msg -> fail ("BAD: " ++ msg) PREAUTH _ msg -> fail ("preauth: " ++ msg) getResponse :: BSStream s => s -> IO ByteString getResponse s = unlinesCRLF <$> getLs where unlinesCRLF = BS.concat . concatMap (:[crlfStr]) getLs = do l <- strip <$> bsGetLine s case () of _ | isLiteral l -> do l' <- getLiteral l (getLitLen l) ls <- getLs return (l' : ls) | isTagged l -> (l:) <$> getLs | otherwise -> return [l] getLiteral l len = do lit <- bsGet s len l2 <- strip <$> bsGetLine s let l' = BS.concat [l, crlfStr, lit, l2] if isLiteral l2 then getLiteral l' (getLitLen l2) else return l' crlfStr = BS.pack "\r\n" isLiteral l = BS.last l == '}' && BS.last (fst (BS.spanEnd isDigit (BS.init l))) == '{' getLitLen = read . BS.unpack . snd . BS.spanEnd isDigit . BS.init isTagged l = BS.head l == '*' && BS.head (BS.tail l) == ' ' mboxUpdate :: (BSStream s) => IMAPConnection s -> MboxUpdate -> IO () mboxUpdate conn (MboxUpdate exists' recent') = do when (isJust exists') $ modifyMailboxInfo conn $ \mbox -> mbox { _exists = fromJust exists' } when (isJust recent') $ modifyMailboxInfo conn $ \mbox -> mbox { _recent = fromJust recent' } ---------------------------------------------------------------------- -- IMAP commands -- noop :: BSStream s => IMAPConnection s -> IO () noop conn = sendCommand conn "NOOP" pNone capability :: BSStream s => IMAPConnection s -> IO [String] capability conn = sendCommand conn "CAPABILITY" pCapability logout :: (BSStream s) => IMAPConnection s -> IO () logout c = do bsPutCrLf c $ BS.pack "a0001 LOGOUT" bsClose c login :: BSStream s => IMAPConnection s -> A.UserName -> A.Password -> IO () login conn username password = sendCommand conn ("LOGIN " ++ username ++ " " ++ password) pNone authenticate :: (BSStream s) => IMAPConnection s -> A.AuthType -> A.UserName -> A.Password -> IO () authenticate conn A.LOGIN username password = do (_, num) <- sendCommand' conn "AUTHENTICATE LOGIN" bsPutCrLf conn $ BS.pack userB64 bsGetLine conn bsPutCrLf conn $ BS.pack passB64 buf <- getResponse conn let (resp, mboxUp, value) = eval pNone (show6 num) buf case resp of OK _ _ -> do mboxUpdate conn $ mboxUp return value NO _ msg -> fail ("NO: " ++ msg) BAD _ msg -> fail ("BAD: " ++ msg) PREAUTH _ msg -> fail ("preauth: " ++ msg) where (userB64, passB64) = A.login username password authenticate conn at username password = do (c, num) <- sendCommand' conn $ "AUTHENTICATE " ++ show at let challenge = if BS.take 2 c == BS.pack "+ " then A.b64Decode $ BS.unpack $ head $ dropWhile (isSpace . BS.last) $ BS.inits $ BS.drop 2 c else "" bsPutCrLf conn $ BS.pack $ A.auth at challenge username password buf <- getResponse conn let (resp, mboxUp, value) = eval pNone (show6 num) buf case resp of OK _ _ -> do mboxUpdate conn $ mboxUp return value NO _ msg -> fail ("NO: " ++ msg) BAD _ msg -> fail ("BAD: " ++ msg) PREAUTH _ msg -> fail ("preauth: " ++ msg) _select :: (BSStream s) => String -> IMAPConnection s -> String -> IO () _select cmd conn mboxName = do mbox' <- sendCommand conn (cmd ++ mboxName) pSelect setMailboxInfo conn $ mbox' { _mailbox = mboxName } select :: BSStream s => IMAPConnection s -> MailboxName -> IO () select = _select "SELECT " examine :: BSStream s => IMAPConnection s -> MailboxName -> IO () examine = _select "EXAMINE " create :: BSStream s => IMAPConnection s -> MailboxName -> IO () create conn mboxname = sendCommand conn ("CREATE " ++ mboxname) pNone delete :: BSStream s => IMAPConnection s -> MailboxName -> IO () delete conn mboxname = sendCommand conn ("DELETE " ++ mboxname) pNone rename :: BSStream s => IMAPConnection s -> MailboxName -> MailboxName -> IO () rename conn mboxorg mboxnew = sendCommand conn ("RENAME " ++ mboxorg ++ " " ++ mboxnew) pNone subscribe :: BSStream s => IMAPConnection s -> MailboxName -> IO () subscribe conn mboxname = sendCommand conn ("SUBSCRIBE " ++ mboxname) pNone unsubscribe :: BSStream s => IMAPConnection s -> MailboxName -> IO () unsubscribe conn mboxname = sendCommand conn ("UNSUBSCRIBE " ++ mboxname) pNone list :: BSStream s => IMAPConnection s -> IO [([Attribute], MailboxName)] list conn = (map (\(a, _, m) -> (a, m))) <$> listFull conn "\"\"" "*" lsub :: BSStream s => IMAPConnection s -> IO [([Attribute], MailboxName)] lsub conn = (map (\(a, _, m) -> (a, m))) <$> lsubFull conn "\"\"" "*" listFull :: BSStream s => IMAPConnection s -> String -> String -> IO [([Attribute], String, MailboxName)] listFull conn ref pat = sendCommand conn (unwords ["LIST", ref, pat]) pList lsubFull :: BSStream s => IMAPConnection s -> String -> String -> IO [([Attribute], String, MailboxName)] lsubFull conn ref pat = sendCommand conn (unwords ["LSUB", ref, pat]) pLsub status :: BSStream s => IMAPConnection s -> MailboxName -> [MailboxStatus] -> IO [(MailboxStatus, Integer)] status conn mbox stats = let cmd = "STATUS " ++ mbox ++ " (" ++ (unwords $ map show stats) ++ ")" in sendCommand conn cmd pStatus append :: BSStream s => IMAPConnection s -> MailboxName -> ByteString -> IO () append conn mbox mailData = appendFull conn mbox mailData [] Nothing appendFull :: BSStream s => IMAPConnection s -> MailboxName -> ByteString -> [Flag] -> Maybe CalendarTime -> IO () appendFull conn mbox mailData flags' time = do (buf, num) <- sendCommand' conn (unwords ["APPEND", mbox , fstr, tstr, "{" ++ show len ++ "}"]) unless (BS.null buf || (BS.head buf /= '+')) $ fail "illegal server response" mapM_ (bsPutCrLf conn) mailLines buf2 <- getResponse conn let (resp, mboxUp, ()) = eval pNone (show6 num) buf2 case resp of OK _ _ -> mboxUpdate conn mboxUp NO _ msg -> fail ("NO: "++msg) BAD _ msg -> fail ("BAD: "++msg) PREAUTH _ msg -> fail ("PREAUTH: "++msg) where mailLines = BS.lines mailData len = sum $ map ((2+) . BS.length) mailLines tstr = maybe "" show time fstr = unwords $ map show flags' check :: BSStream s => IMAPConnection s -> IO () check conn = sendCommand conn "CHECK" pNone close :: BSStream s => IMAPConnection s -> IO () close conn = do sendCommand conn "CLOSE" pNone setMailboxInfo conn emptyMboxInfo expunge :: BSStream s => IMAPConnection s -> IO [Integer] expunge conn = sendCommand conn "EXPUNGE" pExpunge search :: BSStream s => IMAPConnection s -> [SearchQuery] -> IO [UID] search conn queries = searchCharset conn "" queries searchCharset :: BSStream s => IMAPConnection s -> Charset -> [SearchQuery] -> IO [UID] searchCharset conn charset queries = sendCommand conn ("UID SEARCH " ++ (if not . null $ charset then charset ++ " " else "") ++ unwords (map show queries)) pSearch fetch :: BSStream s => IMAPConnection s -> UID -> IO ByteString fetch conn uid = do lst <- fetchByString conn uid "BODY[]" return $ maybe BS.empty BS.pack $ lookup "BODY[]" lst fetchHeader :: BSStream s => IMAPConnection s -> UID -> IO ByteString fetchHeader conn uid = do lst <- fetchByString conn uid "BODY[HEADER]" return $ maybe BS.empty BS.pack $ lookup "BODY[HEADER]" lst fetchSize :: BSStream s => IMAPConnection s -> UID -> IO Int fetchSize conn uid = do lst <- fetchByString conn uid "RFC822.SIZE" return $ maybe 0 read $ lookup "RFC822.SIZE" lst fetchHeaderFields :: BSStream s => IMAPConnection s -> UID -> [String] -> IO ByteString fetchHeaderFields conn uid hs = do lst <- fetchByString conn uid ("BODY[HEADER.FIELDS "++unwords hs++"]") return $ maybe BS.empty BS.pack $ lookup ("BODY[HEADER.FIELDS "++unwords hs++"]") lst fetchHeaderFieldsNot :: BSStream s => IMAPConnection s -> UID -> [String] -> IO ByteString fetchHeaderFieldsNot conn uid hs = do let fetchCmd = "BODY[HEADER.FIELDS.NOT "++unwords hs++"]" lst <- fetchByString conn uid fetchCmd return $ maybe BS.empty BS.pack $ lookup fetchCmd lst fetchFlags :: BSStream s => IMAPConnection s -> UID -> IO [Flag] fetchFlags conn uid = do lst <- fetchByString conn uid "FLAGS" return $ getFlags $ lookup "FLAGS" lst where getFlags Nothing = [] getFlags (Just s) = eval' dvFlags "" s fetchR :: BSStream s => IMAPConnection s -> (UID, UID) -> IO [(UID, ByteString)] fetchR conn r = do lst <- fetchByStringR conn r "BODY[]" return $ map (\(uid, vs) -> (uid, maybe BS.empty BS.pack $ lookup "BODY[]" vs)) lst fetchByString :: BSStream s => IMAPConnection s -> UID -> String -> IO [(String, String)] fetchByString conn uid command = do lst <- fetchCommand conn ("UID FETCH "++show uid++" "++command) id return $ snd $ head lst fetchByStringR :: BSStream s => IMAPConnection s -> (UID, UID) -> String -> IO [(UID, [(String, String)])] fetchByStringR conn (s, e) command = fetchCommand conn ("UID FETCH "++show s++":"++show e++" "++command) proc where proc (n, ps) = (maybe (toEnum (fromIntegral n)) read (lookup "UID" ps), ps) fetchCommand :: (BSStream s) => IMAPConnection s -> String -> ((Integer, [(String, String)]) -> b) -> IO [b] fetchCommand conn command proc = (map proc) <$> sendCommand conn command pFetch storeFull :: BSStream s => IMAPConnection s -> String -> FlagsQuery -> Bool -> IO [(UID, [Flag])] storeFull conn uidstr query isSilent = fetchCommand conn ("UID STORE " ++ uidstr ++ flgs query) procStore where fstrs fs = "(" ++ (concat $ intersperse " " $ map show fs) ++ ")" toFStr s fstrs' = s ++ (if isSilent then ".SILENT" else "") ++ " " ++ fstrs' flgs (ReplaceFlags fs) = toFStr "FLAGS" $ fstrs fs flgs (PlusFlags fs) = toFStr "+FLAGS" $ fstrs fs flgs (MinusFlags fs) = toFStr "-FLAGS" $ fstrs fs procStore (n, ps) = (maybe (toEnum (fromIntegral n)) read (lookup "UID" ps) ,maybe [] (eval' dvFlags "") (lookup "FLAG" ps)) store :: BSStream s => IMAPConnection s -> UID -> FlagsQuery -> IO () store conn i q = storeFull conn (show i) q True >> return () copyFull :: (BSStream s) => IMAPConnection s -> String -> String -> IO () copyFull conn uidStr mbox = sendCommand conn ("UID COPY " ++ uidStr ++ " " ++ mbox) pNone copy :: BSStream s => IMAPConnection s -> UID -> MailboxName -> IO () copy conn uid mbox = copyFull conn (show uid) mbox ---------------------------------------------------------------------- -- auxialiary functions dateToStringIMAP :: CalendarTime -> String dateToStringIMAP date = concat $ intersperse "-" [show2 $ ctDay date , showMonth $ ctMonth date , show $ ctYear date] where show2 n | n < 10 = '0' : show n | otherwise = show n showMonth January = "Jan" showMonth February = "Feb" showMonth March = "Mar" showMonth April = "Apr" showMonth May = "May" showMonth June = "Jun" showMonth July = "Jul" showMonth August = "Aug" showMonth September = "Sep" showMonth October = "Oct" showMonth November = "Nov" showMonth December = "Dec" strip :: ByteString -> ByteString strip = fst . BS.spanEnd isSpace . BS.dropWhile isSpace
danchoi/HaskellNet
src/Network/HaskellNet/IMAP.hs
bsd-3-clause
18,363
0
18
5,836
6,190
3,086
3,104
362
12
---------------------------------------------------------------------- module Sudoku.Cover ( pureCoverSolver, fastPureCoverSolver, dlxSolver, boardConstraints, givenConstraints ) where import Data.List (sort) import Data.Maybe (catMaybes) import Cover (makeColumn, Column, fastPureSolve, pureSolve, dlxSolve) import Sudoku.Internal -- Use 10s to make the rows easy to read. To solve bigger puzzles, -- this factor would need to be larger. makeRow :: Int -> Int -> Int -> Int makeRow row col piece = 100*row + 10*col + piece unmakeRow :: Int -> (Int, Int, Int) unmakeRow coded = let (row, c2) = coded `divMod` 100 in let (col, piece) = c2 `divMod` 10 in (row, col, piece) -- An unconstrained sudoku board of size nsqrt^2 wide and nsqrt^2 high -- has the following constraints. There is a single row for each -- possible piece in each possible squre. boardConstraints :: Int -> [Column Int] boardConstraints nsqrt = let n = nsqrt * nsqrt in -- Each cell can only have a single number. [ makeColumn ("u" ++ show row ++ show col) [ makeRow row col piece | piece <- [1..n] ] | row <- [1..n], col <- [1..n] ] ++ -- Each number only once per row. [ makeColumn ("r" ++ show row ++ "-p" ++ show piece) [ makeRow row col piece | col <- [1..n] ] | row <- [1..n], piece <- [1..n] ] ++ -- Each number only once per column. [ makeColumn ("c" ++ show col ++ "-p" ++ show piece) [ makeRow row col piece | row <- [1..n] ] | col <- [1..n], piece <- [1..n] ] ++ -- Each number must only occur once in each group. [ makeColumn ("g" ++ show (gx+1) ++ show (gy+1) ++ "-p" ++ show piece) [ makeRow ((gy*nsqrt) + dy + 1) ((gx*nsqrt) + dx + 1) piece | dy <- [0 .. nsqrt-1], dx <- [0 .. nsqrt-1] ] | gy <- [0 .. nsqrt-1], gx <- [0 .. nsqrt-1], piece <- [1..n] ] -- A given problem statement can be represented as additional columns -- with just a single row listed. givenConstraints :: String -> [Column Int] givenConstraints text = let n = floor (sqrt $ fromIntegral (length text) :: Double) in let cells = [ (row, col) | row <- [1..n], col <- [1..n] ] in catMaybes $ zipWith collect text cells where collect ch (row, col) | ch >= '1' && ch <= '9' = let piece = fromEnum ch - fromEnum '0' in Just $ makeColumn ("g" ++ show row ++ show col ++ "-p" ++ show piece) [ makeRow row col piece ] | otherwise = Nothing -- Convert an answer (as a list of Rows) back into a problem -- statement. answerToBoard :: [Int] -> String answerToBoard = concatMap decodeRow . sort where decodeRow row = let (_, _, piece) = unmakeRow row in show piece pureCoverSolver :: String -> [String] pureCoverSolver = solver pureSolve fastPureCoverSolver :: String -> [String] fastPureCoverSolver = solver fastPureSolve dlxSolver :: String -> [String] dlxSolver = solver dlxSolve solver :: ([Column Int] -> [[Int]]) -> String -> [String] solver kind board = map answerToBoard $ kind $ boardConstraints blockSize ++ givenConstraints board
d3zd3z/sudoku
Sudoku/Cover.hs
bsd-3-clause
3,083
0
17
718
1,084
576
508
55
1
-- Advent of Code ---- Day 5: Doesn't he have intern-elves for this? module AOC2015.Day05 where import Data.List adjacents :: String -> [(Char,Char)] adjacents str = zip str $ tail str extractEqPairs :: Eq a => [(a,a)] -> [(a,a)] extractEqPairs adj = [(x,y) | (x,y) <- adj, x == y] nice :: String -> Bool nice str = hasAllProps where hasAllProps = a && b && c a = length (fst $ partition (`elem` "aeiou") str) >= 3 b = not $ null (extractEqPairs $ adjacents str) c = not $ or ["ab" `isInfixOf` str, "cd" `isInfixOf` str ,"pq" `isInfixOf` str, "xy" `isInfixOf` str] -- check if the first two listelements appear again, wholly and intact -- at least once in tail of list pairOccurs :: Eq a => [a] -> Bool pairOccurs [] = False pairOccurs (x:y:xys) = [x,y] `isInfixOf` xys pairOccurs _ = False -- now check for every pair in a list if it appears more than once pairsOccur :: Eq a => [a] -> Bool pairsOccur s = or $ pairOccurs <$> tails s repeatsBetween :: String -> Int repeatsBetween [] = 0 repeatsBetween (x:y:xys) | null xys = repeatsBetween xys | x == head xys = 1 + repeatsBetween (y:xys) | otherwise = repeatsBetween (y:xys) repeatsBetween _ = 0 nice' :: String -> Bool nice' str = hasAllProps where hasAllProps = a && b a = pairsOccur str b = repeatsBetween str >= 1 count :: Eq a => a -> [a] -> Int count el list = length $ filter (==el) list answers :: IO () answers = do i <- readFile "inputs/is2015/day05-input.txt" let ils = lines i putStrLn "-- Advent of Code 2015, Day 05 --" let checks = fmap nice ils let countPassed = count True checks putStrLn $ "Part One: " ++ show countPassed let checks' = fmap nice' ils let countPassed' = count True checks' putStrLn $ "Part Two: " ++ show countPassed'
bitrauser/aoc
src/AOC2015/Day05.hs
bsd-3-clause
1,793
0
12
410
691
359
332
44
1
{-# LANGUAGE DeriveGeneric, DeriveDataTypeable, GeneralizedNewtypeDeriving #-} -- | Handling project configuration, types. -- module Distribution.Client.ProjectConfig.Types ( -- * Types for project config ProjectConfig(..), ProjectConfigBuildOnly(..), ProjectConfigShared(..), ProjectConfigProvenance(..), PackageConfig(..), -- * Resolving configuration SolverSettings(..), BuildTimeSettings(..), -- * Extra useful Monoids MapLast(..), MapMappend(..), ) where import Distribution.Client.Types ( RemoteRepo, AllowNewer(..), AllowOlder(..) ) import Distribution.Client.Dependency.Types ( PreSolver ) import Distribution.Client.Targets ( UserConstraint ) import Distribution.Client.BuildReports.Types ( ReportLevel(..) ) import Distribution.Client.IndexUtils.Timestamp ( IndexState ) import Distribution.Solver.Types.Settings import Distribution.Solver.Types.ConstraintSource import Distribution.Package ( PackageName, PackageId, UnitId ) import Distribution.Types.Dependency import Distribution.Version ( Version ) import Distribution.System ( Platform ) import Distribution.PackageDescription ( FlagAssignment, SourceRepo(..) ) import Distribution.Simple.Compiler ( Compiler, CompilerFlavor , OptimisationLevel(..), ProfDetailLevel, DebugInfoLevel(..) ) import Distribution.Simple.Setup ( Flag, HaddockTarget(..) ) import Distribution.Simple.InstallDirs ( PathTemplate ) import Distribution.Utils.NubList ( NubList ) import Distribution.Verbosity ( Verbosity ) import Data.Map (Map) import qualified Data.Map as Map import Data.Set (Set) import Distribution.Compat.Binary (Binary) import Distribution.Compat.Semigroup import GHC.Generics (Generic) import Data.Typeable ------------------------------- -- Project config types -- -- | This type corresponds directly to what can be written in the -- @cabal.project@ file. Other sources of configuration can also be injected -- into this type, such as the user-wide @~/.cabal/config@ file and the -- command line of @cabal configure@ or @cabal build@. -- -- Since it corresponds to the external project file it is an instance of -- 'Monoid' and all the fields can be empty. This also means there has to -- be a step where we resolve configuration. At a minimum resolving means -- applying defaults but it can also mean merging information from multiple -- sources. For example for package-specific configuration the project file -- can specify configuration that applies to all local packages, and then -- additional configuration for a specific package. -- -- Future directions: multiple profiles, conditionals. If we add these -- features then the gap between configuration as written in the config file -- and resolved settings we actually use will become even bigger. -- data ProjectConfig = ProjectConfig { -- | Packages in this project, including local dirs, local .cabal files -- local and remote tarballs. When these are file globs, they must -- match at least one package. projectPackages :: [String], -- | Like 'projectConfigPackageGlobs' but /optional/ in the sense that -- file globs are allowed to match nothing. The primary use case for -- this is to be able to say @optional-packages: */@ to automagically -- pick up deps that we unpack locally without erroring when -- there aren't any. projectPackagesOptional :: [String], -- | Packages in this project from remote source repositories. projectPackagesRepo :: [SourceRepo], -- | Packages in this project from hackage repositories. projectPackagesNamed :: [Dependency], -- See respective types for an explanation of what these -- values are about: projectConfigBuildOnly :: ProjectConfigBuildOnly, projectConfigShared :: ProjectConfigShared, projectConfigProvenance :: Set ProjectConfigProvenance, -- | Configuration to be applied to *local* packages; i.e., -- any packages which are explicitly named in `cabal.project`. projectConfigLocalPackages :: PackageConfig, projectConfigSpecificPackage :: MapMappend PackageName PackageConfig } deriving (Eq, Show, Generic, Typeable) -- | That part of the project configuration that only affects /how/ we build -- and not the /value/ of the things we build. This means this information -- does not need to be tracked for changes since it does not affect the -- outcome. -- data ProjectConfigBuildOnly = ProjectConfigBuildOnly { projectConfigVerbosity :: Flag Verbosity, projectConfigDryRun :: Flag Bool, projectConfigOnlyDeps :: Flag Bool, projectConfigSummaryFile :: NubList PathTemplate, projectConfigLogFile :: Flag PathTemplate, projectConfigBuildReports :: Flag ReportLevel, projectConfigReportPlanningFailure :: Flag Bool, projectConfigSymlinkBinDir :: Flag FilePath, projectConfigOneShot :: Flag Bool, projectConfigNumJobs :: Flag (Maybe Int), projectConfigKeepGoing :: Flag Bool, projectConfigOfflineMode :: Flag Bool, projectConfigKeepTempFiles :: Flag Bool, projectConfigHttpTransport :: Flag String, projectConfigIgnoreExpiry :: Flag Bool, projectConfigCacheDir :: Flag FilePath, projectConfigLogsDir :: Flag FilePath } deriving (Eq, Show, Generic) -- | Project configuration that is shared between all packages in the project. -- In particular this includes configuration that affects the solver. -- data ProjectConfigShared = ProjectConfigShared { projectConfigDistDir :: Flag FilePath, projectConfigConfigFile :: Flag FilePath, projectConfigProjectFile :: Flag FilePath, projectConfigHcFlavor :: Flag CompilerFlavor, projectConfigHcPath :: Flag FilePath, projectConfigHcPkg :: Flag FilePath, projectConfigHaddockIndex :: Flag PathTemplate, -- Things that only make sense for manual mode, not --local mode -- too much control! --projectConfigUserInstall :: Flag Bool, --projectConfigInstallDirs :: InstallDirs (Flag PathTemplate), --TODO: [required eventually] decide what to do with InstallDirs -- currently we don't allow it to be specified in the config file --projectConfigPackageDBs :: [Maybe PackageDB], -- configuration used both by the solver and other phases projectConfigRemoteRepos :: NubList RemoteRepo, -- ^ Available Hackage servers. projectConfigLocalRepos :: NubList FilePath, projectConfigIndexState :: Flag IndexState, -- solver configuration projectConfigConstraints :: [(UserConstraint, ConstraintSource)], projectConfigPreferences :: [Dependency], projectConfigCabalVersion :: Flag Version, --TODO: [required eventually] unused projectConfigSolver :: Flag PreSolver, projectConfigAllowOlder :: Maybe AllowOlder, projectConfigAllowNewer :: Maybe AllowNewer, projectConfigMaxBackjumps :: Flag Int, projectConfigReorderGoals :: Flag ReorderGoals, projectConfigCountConflicts :: Flag CountConflicts, projectConfigStrongFlags :: Flag StrongFlags, projectConfigAllowBootLibInstalls :: Flag AllowBootLibInstalls, projectConfigPerComponent :: Flag Bool, projectConfigIndependentGoals :: Flag IndependentGoals -- More things that only make sense for manual mode, not --local mode -- too much control! --projectConfigShadowPkgs :: Flag Bool, --projectConfigReinstall :: Flag Bool, --projectConfigAvoidReinstalls :: Flag Bool, --projectConfigOverrideReinstall :: Flag Bool, --projectConfigUpgradeDeps :: Flag Bool } deriving (Eq, Show, Generic) -- | Specifies the provenance of project configuration, whether defaults were -- used or if the configuration was read from an explicit file path. data ProjectConfigProvenance -- | The configuration is implicit due to no explicit configuration -- being found. See 'Distribution.Client.ProjectConfig.readProjectConfig' -- for how implicit configuration is determined. = Implicit -- | The path the project configuration was explicitly read from. -- | The configuration was explicitly read from the specified 'FilePath'. | Explicit FilePath deriving (Eq, Ord, Show, Generic) -- | Project configuration that is specific to each package, that is where we -- can in principle have different values for different packages in the same -- project. -- data PackageConfig = PackageConfig { packageConfigProgramPaths :: MapLast String FilePath, packageConfigProgramArgs :: MapMappend String [String], packageConfigProgramPathExtra :: NubList FilePath, packageConfigFlagAssignment :: FlagAssignment, packageConfigVanillaLib :: Flag Bool, packageConfigSharedLib :: Flag Bool, packageConfigStaticLib :: Flag Bool, packageConfigDynExe :: Flag Bool, packageConfigProf :: Flag Bool, --TODO: [code cleanup] sort out packageConfigProfLib :: Flag Bool, -- this duplication packageConfigProfExe :: Flag Bool, -- and consistency packageConfigProfDetail :: Flag ProfDetailLevel, packageConfigProfLibDetail :: Flag ProfDetailLevel, packageConfigConfigureArgs :: [String], packageConfigOptimization :: Flag OptimisationLevel, packageConfigProgPrefix :: Flag PathTemplate, packageConfigProgSuffix :: Flag PathTemplate, packageConfigExtraLibDirs :: [FilePath], packageConfigExtraFrameworkDirs :: [FilePath], packageConfigExtraIncludeDirs :: [FilePath], packageConfigGHCiLib :: Flag Bool, packageConfigSplitObjs :: Flag Bool, packageConfigStripExes :: Flag Bool, packageConfigStripLibs :: Flag Bool, packageConfigTests :: Flag Bool, packageConfigBenchmarks :: Flag Bool, packageConfigCoverage :: Flag Bool, packageConfigRelocatable :: Flag Bool, packageConfigDebugInfo :: Flag DebugInfoLevel, packageConfigRunTests :: Flag Bool, --TODO: [required eventually] use this packageConfigDocumentation :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockHoogle :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockHtml :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockHtmlLocation :: Flag String, --TODO: [required eventually] use this packageConfigHaddockForeignLibs :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockExecutables :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockTestSuites :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockBenchmarks :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockInternal :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockCss :: Flag FilePath, --TODO: [required eventually] use this packageConfigHaddockHscolour :: Flag Bool, --TODO: [required eventually] use this packageConfigHaddockHscolourCss :: Flag FilePath, --TODO: [required eventually] use this packageConfigHaddockContents :: Flag PathTemplate, --TODO: [required eventually] use this packageConfigHaddockForHackage :: Flag HaddockTarget } deriving (Eq, Show, Generic) instance Binary ProjectConfig instance Binary ProjectConfigBuildOnly instance Binary ProjectConfigShared instance Binary ProjectConfigProvenance instance Binary PackageConfig -- | Newtype wrapper for 'Map' that provides a 'Monoid' instance that takes -- the last value rather than the first value for overlapping keys. newtype MapLast k v = MapLast { getMapLast :: Map k v } deriving (Eq, Show, Functor, Generic, Binary, Typeable) instance Ord k => Monoid (MapLast k v) where mempty = MapLast Map.empty mappend = (<>) instance Ord k => Semigroup (MapLast k v) where MapLast a <> MapLast b = MapLast $ Map.union b a -- rather than Map.union which is the normal Map monoid instance -- | Newtype wrapper for 'Map' that provides a 'Monoid' instance that -- 'mappend's values of overlapping keys rather than taking the first. newtype MapMappend k v = MapMappend { getMapMappend :: Map k v } deriving (Eq, Show, Functor, Generic, Binary, Typeable) instance (Semigroup v, Ord k) => Monoid (MapMappend k v) where mempty = MapMappend Map.empty mappend = (<>) instance (Semigroup v, Ord k) => Semigroup (MapMappend k v) where MapMappend a <> MapMappend b = MapMappend (Map.unionWith (<>) a b) -- rather than Map.union which is the normal Map monoid instance instance Monoid ProjectConfig where mempty = gmempty mappend = (<>) instance Semigroup ProjectConfig where (<>) = gmappend instance Monoid ProjectConfigBuildOnly where mempty = gmempty mappend = (<>) instance Semigroup ProjectConfigBuildOnly where (<>) = gmappend instance Monoid ProjectConfigShared where mempty = gmempty mappend = (<>) instance Semigroup ProjectConfigShared where (<>) = gmappend instance Monoid PackageConfig where mempty = gmempty mappend = (<>) instance Semigroup PackageConfig where (<>) = gmappend ---------------------------------------- -- Resolving configuration to settings -- -- | Resolved configuration for the solver. The idea is that this is easier to -- use than the raw configuration because in the raw configuration everything -- is optional (monoidial). In the 'BuildTimeSettings' every field is filled -- in, if only with the defaults. -- -- Use 'resolveSolverSettings' to make one from the project config (by -- applying defaults etc). -- data SolverSettings = SolverSettings { solverSettingRemoteRepos :: [RemoteRepo], -- ^ Available Hackage servers. solverSettingLocalRepos :: [FilePath], solverSettingConstraints :: [(UserConstraint, ConstraintSource)], solverSettingPreferences :: [Dependency], solverSettingFlagAssignment :: FlagAssignment, -- ^ For all local packages solverSettingFlagAssignments :: Map PackageName FlagAssignment, solverSettingCabalVersion :: Maybe Version, --TODO: [required eventually] unused solverSettingSolver :: PreSolver, solverSettingAllowOlder :: AllowOlder, solverSettingAllowNewer :: AllowNewer, solverSettingMaxBackjumps :: Maybe Int, solverSettingReorderGoals :: ReorderGoals, solverSettingCountConflicts :: CountConflicts, solverSettingStrongFlags :: StrongFlags, solverSettingAllowBootLibInstalls :: AllowBootLibInstalls, solverSettingIndexState :: Maybe IndexState, solverSettingIndependentGoals :: IndependentGoals -- Things that only make sense for manual mode, not --local mode -- too much control! --solverSettingShadowPkgs :: Bool, --solverSettingReinstall :: Bool, --solverSettingAvoidReinstalls :: Bool, --solverSettingOverrideReinstall :: Bool, --solverSettingUpgradeDeps :: Bool } deriving (Eq, Show, Generic, Typeable) instance Binary SolverSettings -- | Resolved configuration for things that affect how we build and not the -- value of the things we build. The idea is that this is easier to use than -- the raw configuration because in the raw configuration everything is -- optional (monoidial). In the 'BuildTimeSettings' every field is filled in, -- if only with the defaults. -- -- Use 'resolveBuildTimeSettings' to make one from the project config (by -- applying defaults etc). -- data BuildTimeSettings = BuildTimeSettings { buildSettingDryRun :: Bool, buildSettingOnlyDeps :: Bool, buildSettingSummaryFile :: [PathTemplate], buildSettingLogFile :: Maybe (Compiler -> Platform -> PackageId -> UnitId -> FilePath), buildSettingLogVerbosity :: Verbosity, buildSettingBuildReports :: ReportLevel, buildSettingReportPlanningFailure :: Bool, buildSettingSymlinkBinDir :: [FilePath], buildSettingOneShot :: Bool, buildSettingNumJobs :: Int, buildSettingKeepGoing :: Bool, buildSettingOfflineMode :: Bool, buildSettingKeepTempFiles :: Bool, buildSettingRemoteRepos :: [RemoteRepo], buildSettingLocalRepos :: [FilePath], buildSettingCacheDir :: FilePath, buildSettingHttpTransport :: Maybe String, buildSettingIgnoreExpiry :: Bool }
themoritz/cabal
cabal-install/Distribution/Client/ProjectConfig/Types.hs
bsd-3-clause
17,779
0
14
4,507
2,267
1,381
886
241
0
import System.Environment(getArgs,getProgName) import System.Console.GetOpt(getOpt,ArgOrder(..),OptDescr(..),ArgDescr(..),usageInfo) import System.FilePath.Posix((</>),(<.>),isAbsolute,takeFileName,dropTrailingPathSeparator) import Data.Time(getCurrentTime,diffUTCTime,addUTCTime) import Data.List(intercalate) import Freedesktop.Trash(TrashFile(..),trashGetOrphans,getTrashPaths,trashGetFiles,trashRestore,expungeTrash,moveToTrash) import Control.Monad(when) import System.Exit(exitSuccess) import Paths_fdo_trash(version) import Data.Version(showVersion) import System.Directory(createDirectoryIfMissing,canonicalizePath) import Text.Parsec(parse,many1,(<|>),char,oneOf,eof,option,digit,ParseError) minSecs = 60 hourSecs = 60 *minSecs daySecs = 24 *hourSecs monthSecs = 30 *daySecs yearSecs = 365*daySecs timeOffsetString = do sign <- (char '-' >> return (-1)) <|> (option 1 (char '+' >> return 1)) num <- option 1 (fmap read $ many1 digit) mult <- option 'd' (oneOf "SMHdmy") let multNum = case mult of 'S' -> 1 'M' -> minSecs 'H' -> hourSecs 'd' -> daySecs 'm' -> monthSecs 'y' -> yearSecs _ -> undefined eof return $ sign*num*multNum printVersion = fmap (++ '-' : showVersion version) getProgName >>= putStrLn >> exitSuccess castFloat :: (Real a, Fractional b) => a -> b castFloat = fromRational.toRational actions = [ ("purge", fdoPurge) , ("rm", fdoRm) , ("unrm", fdoUnRm) ] --compilerOpts :: [String] -> IO (Options, [String]) parseOpts defaultOptions options exe argv = case getOpt Permute options argv of (o,n,[] ) -> return (foldl (flip id) defaultOptions o, n) (_,_,errs) -> ioError (userError (concat errs ++ usageInfo header options)) where header = "Usage: " ++ exe ++ " [OPTION...] parameters..." --Rm data RmOptions = RmOptions { rmTimeOffset :: Either ParseError Double , rmVersion :: Bool , rmHelp :: Bool , rmTrash :: Maybe String } deriving(Show) rmDefaults = RmOptions { rmTimeOffset = Right 0 , rmVersion = False , rmHelp = False , rmTrash = Nothing } rmOptions = [ Option ['V'] ["version"] (NoArg (\opts -> opts{rmVersion=True})) "Show version number" , Option ['h'] ["help"] (NoArg (\opts -> opts{rmHelp=True})) "Print help" , Option ['t'] ["time-offset"] (ReqArg (\offset opts -> opts{rmTimeOffset=parse timeOffsetString "" offset}) "offset") "Specify time offset suffixes ymdHMS supported, default: 0d" , Option ['T'] ["trash-path"] (ReqArg (\path opts -> opts{rmTrash=Just path}) "path") "Override Trash path autodetection." ] doRm time iPath fPath fileName = do absFile <- canonicalizePath fileName moveToTrash $ TrashFile (iPath </> takeFileName fileName <.> "trashinfo") (fPath </> takeFileName fileName) absFile time 0 fdoRm args = do (myOpts, realArgs) <- parseOpts rmDefaults rmOptions "fdo-rm" args when (rmVersion myOpts) printVersion when (rmHelp myOpts) $ putStrLn (usageInfo "Usage: fdo-rm [OPTION...] parameters..." rmOptions) >> exitSuccess (iPath,fPath) <- maybe getTrashPaths (\p -> return (p </> "info", p </> "files")) (rmTrash myOpts) createDirectoryIfMissing True iPath createDirectoryIfMissing True fPath timeOffset <- either (\x -> ioError (userError $ "Invalid time format" ++ show x)) (\x -> return x) (rmTimeOffset myOpts) time <- fmap (addUTCTime $ castFloat timeOffset) getCurrentTime mapM_ (doRm time iPath fPath) (map dropTrailingPathSeparator realArgs) --Purge data PurgeOptions = PurgeOptions { purgeThreshold :: Double , purgeAgePow :: Double , purgeSizePow :: Double , purgeVersion :: Bool , purgeHelp :: Bool , purgeTrash :: Maybe String } deriving(Show) purgeDefaults = PurgeOptions { purgeThreshold = 10**6 , purgeAgePow = 1 , purgeSizePow = 0.1 , purgeHelp = False , purgeVersion = False , purgeTrash = Nothing } purgeOptions = [ Option ['V'] ["version"] (NoArg (\opts -> opts{purgeVersion=True})) "Show version number" , Option ['h'] ["help"] (NoArg (\opts -> opts{purgeHelp=True})) "Print help" , Option ['a'] ["age"] (ReqArg (\secs opts -> opts{purgeThreshold=read secs}) "secs") ("Specify maximium file age default: " ++ (show $ purgeThreshold purgeDefaults)) , Option ['A'] ["age-power"] (ReqArg (\pow opts -> opts{purgeAgePow=read pow}) "pow") ("Specify age power for threshold formula size^sizepow*age^agepow, default: " ++ (show $ purgeAgePow purgeDefaults)) , Option ['S'] ["size-power"] (ReqArg (\pow opts -> opts{purgeSizePow=read pow}) "pow") ("Specify size power for threshold formula size^sizepow*age^agepow, default: " ++ (show $ purgeSizePow purgeDefaults)) , Option ['T'] ["trash-path"] (ReqArg (\path opts -> opts{purgeTrash=Just path}) "path") "Override Trash path autodetection." ] fdoPurge args = do (myOpts, _) <- parseOpts purgeDefaults purgeOptions "fdo-purge" args when (purgeVersion myOpts) printVersion when (purgeHelp myOpts) $ putStrLn (usageInfo "Usage: fdo-purge [OPTION...] parameters..." purgeOptions) >> exitSuccess (iPath,fPath) <- maybe getTrashPaths (\p -> return (p </> "info", p </> "files")) (purgeTrash myOpts) createDirectoryIfMissing True iPath createDirectoryIfMissing True fPath now <- getCurrentTime (iExtra,dExtra) <- trashGetOrphans iPath fPath ayx <- fmap (filter (\x -> (max 0 $ castFloat $ diffUTCTime now $ deleteTime x)**(purgeAgePow myOpts) * (max 1 $ fromIntegral $ totalSize x)**(purgeSizePow myOpts) > purgeThreshold myOpts)) $ trashGetFiles iPath fPath when (not$null iExtra) $ putStrLn "Orphan files detected:\n" >> print iExtra when (not$null dExtra) $ putStrLn "Orphan files detected:\n" >> print dExtra mapM_ expungeTrash ayx --Unrm data UnRmOptions = UnRmOptions { unRmOrigDir :: Bool , unRmVersion :: Bool , unRmHelp :: Bool , unRmOutFile :: Maybe String , unRmSelect :: Maybe Int , unRmTrash :: Maybe String } deriving(Show) unRmDefaults = UnRmOptions { unRmOrigDir = False , unRmHelp = False , unRmVersion = False , unRmOutFile = Nothing , unRmSelect = Nothing , unRmTrash = Nothing } unRmOptions = [ Option ['V'] ["version"] (NoArg (\opts -> opts{unRmVersion=True})) "Show version number" , Option ['h'] ["help"] (NoArg (\opts -> opts{unRmHelp=True})) "Print help" , Option ['O'] ["original-name"] (NoArg (\opts -> opts{unRmOrigDir=True})) "output file to original path, default: ., conflicts with -o" , Option ['o'] ["output-file"] (ReqArg (\out opts -> opts{unRmOutFile=Just out}) "filepath") "Specify output file, conflicts with -O" , Option ['s'] ["select"] (ReqArg (\index opts -> opts{unRmSelect=Just $ read index}) "index") "Select file with index if multiple files match" , Option ['T'] ["trash-path"] (ReqArg (\path opts -> opts{unRmTrash=Just path}) "path") "Override Trash path autodetection." ] doRestore file opts saveFile = maybe (if (unRmOrigDir opts) then trashRestore file Nothing else trashRestore file (Just saveFile)) (\out -> trashRestore file (Just out)) (unRmOutFile opts) doUnRm files opts saveFile = do case (length files') of 0 -> putStrLn $ "No such file: " ++ saveFile 1 -> doRestore (head files') opts saveFile _ -> maybe (putStrLn $ "Multiple matches:\n" ++ unlines (zipWith (++) (map (\x -> show x ++ ": ") [(0::Int)..]) (map origPath files') )) (\index -> if (index < length files' && index >= 0) then doRestore (files' !! index) opts saveFile else putStrLn $ "Index " ++ show index ++ " out of bounds!") (unRmSelect opts) where files' = if (isAbsolute saveFile) then filter (\x -> origPath x == saveFile) files else filter (\x -> takeFileName (origPath x) == takeFileName saveFile) files fdoUnRm args = do (myOpts, realArgs) <- parseOpts unRmDefaults unRmOptions "fdo-unrm" args when (unRmVersion myOpts) printVersion when (unRmHelp myOpts) $ putStrLn (usageInfo "Usage: fdo-unrm [OPTION...] parameters..." unRmOptions) >> exitSuccess (iPath,fPath) <- maybe getTrashPaths (\p -> return (p </> "info", p </> "files")) (unRmTrash myOpts) createDirectoryIfMissing True iPath createDirectoryIfMissing True fPath files <- trashGetFiles iPath fPath mapM_ (doUnRm files myOpts) realArgs --Main main :: IO () main = do args <- getArgs exe <- getProgName let actionsStr = intercalate "|" $ map fst actions thisAction = maybe ( if (null args) then Nothing else maybe (Nothing) (\x -> Just (tail args, x)) (lookup (args !! 0) actions) ) (\x -> Just (args,x)) (lookup (drop 4 exe) actions) maybe (putStrLn $ "No action specified\nUsage: " ++ exe ++ " <" ++ actionsStr ++ "> params") (\(a,f) -> f a) thisAction
jkarlson/fdo-trash
fdo-trash.hs
bsd-3-clause
9,500
46
22
2,370
3,181
1,657
1,524
215
7
{-# LANGUAGE PatternSynonyms #-} -------------------------------------------------------------------------------- -- | -- Module : Graphics.GL.ARB.FramebufferObjectCore -- Copyright : (c) Sven Panne 2019 -- License : BSD3 -- -- Maintainer : Sven Panne <[email protected]> -- Stability : stable -- Portability : portable -- -------------------------------------------------------------------------------- module Graphics.GL.ARB.FramebufferObjectCore ( -- * Extension Support glGetARBFramebufferObject, gl_ARB_framebuffer_object, -- * Enums pattern GL_COLOR_ATTACHMENT0, pattern GL_COLOR_ATTACHMENT1, pattern GL_COLOR_ATTACHMENT10, pattern GL_COLOR_ATTACHMENT11, pattern GL_COLOR_ATTACHMENT12, pattern GL_COLOR_ATTACHMENT13, pattern GL_COLOR_ATTACHMENT14, pattern GL_COLOR_ATTACHMENT15, pattern GL_COLOR_ATTACHMENT2, pattern GL_COLOR_ATTACHMENT3, pattern GL_COLOR_ATTACHMENT4, pattern GL_COLOR_ATTACHMENT5, pattern GL_COLOR_ATTACHMENT6, pattern GL_COLOR_ATTACHMENT7, pattern GL_COLOR_ATTACHMENT8, pattern GL_COLOR_ATTACHMENT9, pattern GL_DEPTH24_STENCIL8, pattern GL_DEPTH_ATTACHMENT, pattern GL_DEPTH_STENCIL, pattern GL_DEPTH_STENCIL_ATTACHMENT, pattern GL_DRAW_FRAMEBUFFER, pattern GL_DRAW_FRAMEBUFFER_BINDING, pattern GL_FRAMEBUFFER, pattern GL_FRAMEBUFFER_ATTACHMENT_ALPHA_SIZE, pattern GL_FRAMEBUFFER_ATTACHMENT_BLUE_SIZE, pattern GL_FRAMEBUFFER_ATTACHMENT_COLOR_ENCODING, pattern GL_FRAMEBUFFER_ATTACHMENT_COMPONENT_TYPE, pattern GL_FRAMEBUFFER_ATTACHMENT_DEPTH_SIZE, pattern GL_FRAMEBUFFER_ATTACHMENT_GREEN_SIZE, pattern GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME, pattern GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE, pattern GL_FRAMEBUFFER_ATTACHMENT_RED_SIZE, pattern GL_FRAMEBUFFER_ATTACHMENT_STENCIL_SIZE, pattern GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE, pattern GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER, pattern GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL, pattern GL_FRAMEBUFFER_BINDING, pattern GL_FRAMEBUFFER_COMPLETE, pattern GL_FRAMEBUFFER_DEFAULT, pattern GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT, pattern GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER, pattern GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT, pattern GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE, pattern GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER, pattern GL_FRAMEBUFFER_UNDEFINED, pattern GL_FRAMEBUFFER_UNSUPPORTED, pattern GL_INVALID_FRAMEBUFFER_OPERATION, pattern GL_MAX_COLOR_ATTACHMENTS, pattern GL_MAX_RENDERBUFFER_SIZE, pattern GL_MAX_SAMPLES, pattern GL_READ_FRAMEBUFFER, pattern GL_READ_FRAMEBUFFER_BINDING, pattern GL_RENDERBUFFER, pattern GL_RENDERBUFFER_ALPHA_SIZE, pattern GL_RENDERBUFFER_BINDING, pattern GL_RENDERBUFFER_BLUE_SIZE, pattern GL_RENDERBUFFER_DEPTH_SIZE, pattern GL_RENDERBUFFER_GREEN_SIZE, pattern GL_RENDERBUFFER_HEIGHT, pattern GL_RENDERBUFFER_INTERNAL_FORMAT, pattern GL_RENDERBUFFER_RED_SIZE, pattern GL_RENDERBUFFER_SAMPLES, pattern GL_RENDERBUFFER_STENCIL_SIZE, pattern GL_RENDERBUFFER_WIDTH, pattern GL_STENCIL_ATTACHMENT, pattern GL_STENCIL_INDEX1, pattern GL_STENCIL_INDEX16, pattern GL_STENCIL_INDEX4, pattern GL_STENCIL_INDEX8, pattern GL_TEXTURE_STENCIL_SIZE, pattern GL_UNSIGNED_INT_24_8, pattern GL_UNSIGNED_NORMALIZED, -- * Functions glBindFramebuffer, glBindRenderbuffer, glBlitFramebuffer, glCheckFramebufferStatus, glDeleteFramebuffers, glDeleteRenderbuffers, glFramebufferRenderbuffer, glFramebufferTexture1D, glFramebufferTexture2D, glFramebufferTexture3D, glFramebufferTextureLayer, glGenFramebuffers, glGenRenderbuffers, glGenerateMipmap, glGetFramebufferAttachmentParameteriv, glGetRenderbufferParameteriv, glIsFramebuffer, glIsRenderbuffer, glRenderbufferStorage, glRenderbufferStorageMultisample ) where import Graphics.GL.ExtensionPredicates import Graphics.GL.Tokens import Graphics.GL.Functions
haskell-opengl/OpenGLRaw
src/Graphics/GL/ARB/FramebufferObjectCore.hs
bsd-3-clause
3,920
0
5
442
469
294
175
99
0
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE TemplateHaskell #-} module Data.Geodetic.HasDoubles( HasDoubles(..) ) where import Papa class HasDoubles a where doubles :: Traversal' a Double instance HasDoubles Double where doubles = id
NICTA/coordinate
src/Data/Geodetic/HasDoubles.hs
bsd-3-clause
256
0
7
49
53
31
22
11
0
import Data.Functor main = do xs <- map read . words <$> getLine :: IO [Int] print $ maximum xs
ksoda/atCoder
a/2.hs
bsd-3-clause
101
1
10
25
51
23
28
4
1
module Experts where import Data.List(mapAccumL, foldl', tails) import Bit import Histogram class Expert a where predict :: a -> Double updateState :: Bool -> a -> a updateModel :: Bool -> a -> a update :: Bool -> a -> a update newB = updateState newB . updateModel newB data ConstantExpert = CE Double instance Expert ConstantExpert where predict (CE p) = p updateState _ e = e updateModel _ e = e simExpert :: (Expert a) => a -> [Bool] -> (a,[(Bool, Double)]) simExpert = mapAccumL f where f :: (Expert a) => a -> Bool -> (a, (Bool, Double)) f expert b = (update b expert, (b, predict expert)) simTwoExpert :: (Expert a) => (a,a) -> [Bool] -> ((a,a),[(Bool, Double)]) simTwoExpert = mapAccumL f where f :: (Expert a) => (a,a) -> Bool -> ((a,a), (Bool, Double)) f (e1, e2) b = ((update b e1, update b e2), (b, (predict e1 + predict e2) / 2)) getLoss :: Expert a => (Bool -> Double -> Double) -> a -> [Bool] -> Double getLoss l e = computeLoss l . snd . simExpert e computeLoss :: (Bool -> Double -> Double) -> [(Bool, Double)] -> Double computeLoss f = sum . map (uncurry f) ---------------------------------- mapNth :: Int -> (a -> a) -> [a] -> [a] mapNth _ _ [] = [] mapNth 0 f (x:xs) = f x : xs mapNth n f (x:xs) = x : mapNth (n-1) f xs simSideExpert :: (Expert a) => Int -> a -> [(Int, Bool)] -> ([a],[(Bool,Double)]) simSideExpert n expert = mapAccumL f (replicate n expert) where f :: (Expert a) => [a] -> (Int,Bool) -> ([a], (Bool, Double)) f experts (side, b) = (map (updateState b) . mapNth side (updateModel b) $ experts, (b, predict (experts !! side))) sideInfo :: Int -> [Bool] -> [Bool] -> [(Int, Bool)] sideInfo n xs ys = zip (map (bitsToInt . take (n + 1)) (tails xs)) (drop n ys) getSideLoss :: Expert a => (Bool -> Double -> Double) -> Int -> a -> [Bool] -> [Bool] -> Double getSideLoss l n e side = computeLoss l . snd . simSideExpert (2 ^ (n + 1)) e . sideInfo n side ---------------------------------- logLoss :: Bool -> Double -> Double logLoss True = log logLoss False = log . (1 -) sqLoss :: Bool -> Double -> Double sqLoss True x = (x - 1) * (1 - x) sqLoss False x = (-1) * x * x ---------------------------------- subseqs :: Int -> [a] -> [[a]] subseqs n xs = snd $ mapAccumL f ys zs where (ys, zs) = splitAt n xs f [] x = ([], [x]) f (y:ys) x = let zs = ys ++ [x] in (zs, y:zs) markov :: Int -> [Bool] -> [Int] markov n = treeHistogram . map bitsToInt . subseqs n
cullina/Extractor
src/Experts.hs
bsd-3-clause
2,537
0
12
621
1,315
715
600
55
2
{-# LANGUAGE BangPatterns #-} {-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ViewPatterns #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE TupleSections #-} module Stack.Setup ( setupEnv , ensureGHC , SetupOpts (..) , defaultStackSetupYaml ) where import Control.Applicative import Control.Exception.Enclosed (catchIO, tryAny) import Control.Monad (liftM, when, join, void, unless) import Control.Monad.Catch import Control.Monad.IO.Class (MonadIO, liftIO) import Control.Monad.Logger import Control.Monad.Reader (MonadReader, ReaderT (..), asks) import Control.Monad.State (get, put, modify) import Control.Monad.Trans.Control import Crypto.Hash (SHA1(SHA1)) import Data.Aeson.Extended import Data.ByteString (ByteString) import qualified Data.ByteString as S import qualified Data.ByteString.Char8 as S8 import Data.Conduit (Conduit, ($$), (=$), await, yield, awaitForever) import Data.Conduit.Lift (evalStateC) import qualified Data.Conduit.List as CL import Data.Either import Data.Foldable hiding (concatMap, or) import Data.IORef import Data.IORef.RunOnce (runOnce) import Data.List hiding (concat, elem, maximumBy) import Data.Map (Map) import qualified Data.Map as Map import Data.Maybe import Data.Monoid import Data.Ord (comparing) import Data.Set (Set) import qualified Data.Set as Set import Data.Text (Text) import qualified Data.Text as T import qualified Data.Text.Encoding as T import qualified Data.Text.Encoding.Error as T import Data.Time.Clock (NominalDiffTime, diffUTCTime, getCurrentTime) import Data.Typeable (Typeable) import qualified Data.Yaml as Yaml import Distribution.System (OS, Arch (..), Platform (..)) import qualified Distribution.System as Cabal import Distribution.Text (simpleParse) import Network.HTTP.Client.Conduit import Network.HTTP.Download.Verified import Path import Path.IO import Prelude hiding (concat, elem) -- Fix AMP warning import Safe (headMay, readMay) import Stack.Types.Build import Stack.Config (resolvePackageEntry) import Stack.Constants (distRelativeDir) import Stack.Fetch import Stack.GhcPkg (createDatabase, getCabalPkgVer, getGlobalDB) import Stack.Solver (getCompilerVersion) import Stack.Types import Stack.Types.StackT import qualified System.Directory as D import System.Environment (getExecutablePath) import System.Exit (ExitCode (ExitSuccess)) import System.FilePath (searchPathSeparator) import qualified System.FilePath as FP import System.IO.Temp (withSystemTempDirectory) import System.Process (rawSystem) import System.Process.Read import System.Process.Run (runIn) import Text.Printf (printf) -- | Default location of the stack-setup.yaml file defaultStackSetupYaml :: String defaultStackSetupYaml = "https://raw.githubusercontent.com/fpco/stackage-content/master/stack/stack-setup-2.yaml" data SetupOpts = SetupOpts { soptsInstallIfMissing :: !Bool , soptsUseSystem :: !Bool , soptsWantedCompiler :: !CompilerVersion , soptsCompilerCheck :: !VersionCheck , soptsStackYaml :: !(Maybe (Path Abs File)) -- ^ If we got the desired GHC version from that file , soptsForceReinstall :: !Bool , soptsSanityCheck :: !Bool -- ^ Run a sanity check on the selected GHC , soptsSkipGhcCheck :: !Bool -- ^ Don't check for a compatible GHC version/architecture , soptsSkipMsys :: !Bool -- ^ Do not use a custom msys installation on Windows , soptsUpgradeCabal :: !Bool -- ^ Upgrade the global Cabal library in the database to the newest -- version. Only works reliably with a stack-managed installation. , soptsResolveMissingGHC :: !(Maybe Text) -- ^ Message shown to user for how to resolve the missing GHC , soptsStackSetupYaml :: !String } deriving Show data SetupException = UnsupportedSetupCombo OS Arch | MissingDependencies [String] | UnknownCompilerVersion Text CompilerVersion (Set Version) | UnknownOSKey Text | GHCSanityCheckCompileFailed ReadProcessException (Path Abs File) deriving Typeable instance Exception SetupException instance Show SetupException where show (UnsupportedSetupCombo os arch) = concat [ "I don't know how to install GHC for " , show (os, arch) , ", please install manually" ] show (MissingDependencies tools) = "The following executables are missing and must be installed: " ++ intercalate ", " tools show (UnknownCompilerVersion oskey wanted known) = concat [ "No information found for " , T.unpack (compilerVersionName wanted) , ".\nSupported versions for OS key '" ++ T.unpack oskey ++ "': " , intercalate ", " (map show $ Set.toList known) ] show (UnknownOSKey oskey) = "Unable to find installation URLs for OS key: " ++ T.unpack oskey show (GHCSanityCheckCompileFailed e ghc) = concat [ "The GHC located at " , toFilePath ghc , " failed to compile a sanity check. Please see:\n\n" , " https://github.com/commercialhaskell/stack/wiki/Downloads\n\n" , "for more information. Exception was:\n" , show e ] -- | Modify the environment variables (like PATH) appropriately, possibly doing installation too setupEnv :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasBuildConfig env, HasHttpManager env, MonadBaseControl IO m) => Maybe Text -- ^ Message to give user when necessary GHC is not available -> m EnvConfig setupEnv mResolveMissingGHC = do bconfig <- asks getBuildConfig let platform = getPlatform bconfig wc = whichCompiler (bcWantedCompiler bconfig) sopts = SetupOpts { soptsInstallIfMissing = configInstallGHC $ bcConfig bconfig , soptsUseSystem = configSystemGHC $ bcConfig bconfig , soptsWantedCompiler = bcWantedCompiler bconfig , soptsCompilerCheck = configCompilerCheck $ bcConfig bconfig , soptsStackYaml = Just $ bcStackYaml bconfig , soptsForceReinstall = False , soptsSanityCheck = False , soptsSkipGhcCheck = configSkipGHCCheck $ bcConfig bconfig , soptsSkipMsys = configSkipMsys $ bcConfig bconfig , soptsUpgradeCabal = False , soptsResolveMissingGHC = mResolveMissingGHC , soptsStackSetupYaml = defaultStackSetupYaml } mghcBin <- ensureGHC sopts -- Modify the initial environment to include the GHC path, if a local GHC -- is being used menv0 <- getMinimalEnvOverride let env = removeHaskellEnvVars $ augmentPathMap (maybe [] edBins mghcBin) $ unEnvOverride menv0 menv <- mkEnvOverride platform env compilerVer <- getCompilerVersion menv wc cabalVer <- getCabalPkgVer menv wc packages <- mapM (resolvePackageEntry menv (bcRoot bconfig)) (bcPackageEntries bconfig) let envConfig0 = EnvConfig { envConfigBuildConfig = bconfig , envConfigCabalVersion = cabalVer , envConfigCompilerVersion = compilerVer , envConfigPackages = Map.fromList $ concat packages } -- extra installation bin directories mkDirs <- runReaderT extraBinDirs envConfig0 let mpath = Map.lookup "PATH" env mkDirs' = map toFilePath . mkDirs depsPath = augmentPath (mkDirs' False) mpath localsPath = augmentPath (mkDirs' True) mpath deps <- runReaderT packageDatabaseDeps envConfig0 createDatabase menv wc deps localdb <- runReaderT packageDatabaseLocal envConfig0 createDatabase menv wc localdb globalDB <- getGlobalDB menv wc let mkGPP locals = T.pack $ intercalate [searchPathSeparator] $ concat [ [toFilePathNoTrailingSlash localdb | locals] , [toFilePathNoTrailingSlash deps] , [toFilePathNoTrailingSlash globalDB] ] distDir <- runReaderT distRelativeDir envConfig0 executablePath <- liftIO getExecutablePath utf8EnvVars <- getUtf8LocaleVars menv envRef <- liftIO $ newIORef Map.empty let getEnvOverride' es = do m <- readIORef envRef case Map.lookup es m of Just eo -> return eo Nothing -> do eo <- mkEnvOverride platform $ Map.insert "PATH" (if esIncludeLocals es then localsPath else depsPath) $ (if esIncludeGhcPackagePath es then Map.insert "GHC_PACKAGE_PATH" (mkGPP (esIncludeLocals es)) else id) $ (if esStackExe es then Map.insert "STACK_EXE" (T.pack executablePath) else id) $ (if esLocaleUtf8 es then Map.union utf8EnvVars else id) -- For reasoning and duplication, see: https://github.com/fpco/stack/issues/70 $ Map.insert "HASKELL_PACKAGE_SANDBOX" (T.pack $ toFilePathNoTrailingSlash deps) $ Map.insert "HASKELL_PACKAGE_SANDBOXES" (T.pack $ if esIncludeLocals es then intercalate [searchPathSeparator] [ toFilePathNoTrailingSlash localdb , toFilePathNoTrailingSlash deps , "" ] else intercalate [searchPathSeparator] [ toFilePathNoTrailingSlash deps , "" ]) $ Map.insert "HASKELL_DIST_DIR" (T.pack $ toFilePathNoTrailingSlash distDir) $ env !() <- atomicModifyIORef envRef $ \m' -> (Map.insert es eo m', ()) return eo return EnvConfig { envConfigBuildConfig = bconfig { bcConfig = maybe id addIncludeLib mghcBin (bcConfig bconfig) { configEnvOverride = getEnvOverride' } } , envConfigCabalVersion = cabalVer , envConfigCompilerVersion = compilerVer , envConfigPackages = envConfigPackages envConfig0 } -- | Add the include and lib paths to the given Config addIncludeLib :: ExtraDirs -> Config -> Config addIncludeLib (ExtraDirs _bins includes libs) config = config { configExtraIncludeDirs = Set.union (configExtraIncludeDirs config) (Set.fromList $ map T.pack includes) , configExtraLibDirs = Set.union (configExtraLibDirs config) (Set.fromList $ map T.pack libs) } data ExtraDirs = ExtraDirs { edBins :: ![FilePath] , edInclude :: ![FilePath] , edLib :: ![FilePath] } instance Monoid ExtraDirs where mempty = ExtraDirs [] [] [] mappend (ExtraDirs a b c) (ExtraDirs x y z) = ExtraDirs (a ++ x) (b ++ y) (c ++ z) -- | Ensure GHC is installed and provide the PATHs to add if necessary ensureGHC :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m) => SetupOpts -> m (Maybe ExtraDirs) ensureGHC sopts = do let wc = whichCompiler (soptsWantedCompiler sopts) ghcVersion = case soptsWantedCompiler sopts of GhcVersion v -> v GhcjsVersion _ v -> v when (ghcVersion < $(mkVersion "7.8")) $ do $logWarn "stack will almost certainly fail with GHC below version 7.8" $logWarn "Valiantly attempting to run anyway, but I know this is doomed" $logWarn "For more information, see: https://github.com/commercialhaskell/stack/issues/648" $logWarn "" -- Check the available GHCs menv0 <- getMinimalEnvOverride msystem <- if soptsUseSystem sopts then getSystemCompiler menv0 wc else return Nothing Platform expectedArch _ <- asks getPlatform let needLocal = case msystem of Nothing -> True Just _ | soptsSkipGhcCheck sopts -> False Just (system, arch) -> not (isWanted system) || arch /= expectedArch isWanted = isWantedCompiler (soptsCompilerCheck sopts) (soptsWantedCompiler sopts) -- If we need to install a GHC, try to do so mpaths <- if needLocal then do getSetupInfo' <- runOnce (getSetupInfo sopts =<< asks getHttpManager) config <- asks getConfig installed <- runReaderT listInstalled config -- Install GHC ghcIdent <- case getInstalledTool installed $(mkPackageName "ghc") (isWanted . GhcVersion) of Just ident -> return ident Nothing | soptsInstallIfMissing sopts -> do si <- getSetupInfo' downloadAndInstallGHC menv0 si (soptsWantedCompiler sopts) (soptsCompilerCheck sopts) | otherwise -> do Platform arch _ <- asks getPlatform throwM $ CompilerVersionMismatch msystem (soptsWantedCompiler sopts, arch) (soptsCompilerCheck sopts) (soptsStackYaml sopts) (fromMaybe "Try running stack setup to locally install the correct GHC" $ soptsResolveMissingGHC sopts) -- Install msys2 on windows, if necessary mmsys2Ident <- case configPlatform config of Platform _ os | isWindows os && not (soptsSkipMsys sopts) -> case getInstalledTool installed $(mkPackageName "msys2") (const True) of Just ident -> return (Just ident) Nothing | soptsInstallIfMissing sopts -> do si <- getSetupInfo' osKey <- getOSKey menv0 VersionedDownloadInfo version info <- case Map.lookup osKey $ siMsys2 si of Just x -> return x Nothing -> error $ "MSYS2 not found for " ++ T.unpack osKey Just <$> downloadAndInstallTool si info $(mkPackageName "msys2") version (installMsys2Windows osKey) | otherwise -> do $logWarn "Continuing despite missing tool: msys2" return Nothing _ -> return Nothing let idents = catMaybes [Just ghcIdent, mmsys2Ident] paths <- runReaderT (mapM extraDirs idents) config return $ Just $ mconcat paths else return Nothing menv <- case mpaths of Nothing -> return menv0 Just ed -> do config <- asks getConfig let m0 = unEnvOverride menv0 path0 = Map.lookup "PATH" m0 path = augmentPath (edBins ed) path0 m = Map.insert "PATH" path m0 mkEnvOverride (configPlatform config) (removeHaskellEnvVars m) when (soptsUpgradeCabal sopts) $ do unless needLocal $ do $logWarn "Trying to upgrade Cabal library on a GHC not installed by stack." $logWarn "This may fail, caveat emptor!" upgradeCabal menv wc when (soptsSanityCheck sopts) $ sanityCheck menv return mpaths -- | Install the newest version of Cabal globally upgradeCabal :: (MonadIO m, MonadLogger m, MonadReader env m, HasHttpManager env, HasConfig env, MonadBaseControl IO m, MonadMask m) => EnvOverride -> WhichCompiler -> m () upgradeCabal menv wc = do let name = $(mkPackageName "Cabal") rmap <- resolvePackages menv Set.empty (Set.singleton name) newest <- case Map.keys rmap of [] -> error "No Cabal library found in index, cannot upgrade" [PackageIdentifier name' version] | name == name' -> return version x -> error $ "Unexpected results for resolvePackages: " ++ show x installed <- getCabalPkgVer menv wc if installed >= newest then $logInfo $ T.concat [ "Currently installed Cabal is " , T.pack $ versionString installed , ", newest is " , T.pack $ versionString newest , ". I'm not upgrading Cabal." ] else withSystemTempDirectory "stack-cabal-upgrade" $ \tmpdir -> do $logInfo $ T.concat [ "Installing Cabal-" , T.pack $ versionString newest , " to replace " , T.pack $ versionString installed ] tmpdir' <- parseAbsDir tmpdir let ident = PackageIdentifier name newest m <- unpackPackageIdents menv tmpdir' Nothing (Set.singleton ident) compilerPath <- join $ findExecutable menv (compilerExeName wc) newestDir <- parseRelDir $ versionString newest let installRoot = toFilePath $ parent (parent compilerPath) </> $(mkRelDir "new-cabal") </> newestDir dir <- case Map.lookup ident m of Nothing -> error $ "upgradeCabal: Invariant violated, dir missing" Just dir -> return dir runIn dir (compilerExeName wc) menv ["Setup.hs"] Nothing let setupExe = toFilePath $ dir </> $(mkRelFile "Setup") dirArgument name' = concat [ "--" , name' , "dir=" , installRoot FP.</> name' ] runIn dir setupExe menv ( "configure" : map dirArgument (words "lib bin data doc") ) Nothing runIn dir setupExe menv ["build"] Nothing runIn dir setupExe menv ["install"] Nothing $logInfo "New Cabal library installed" -- | Get the version of the system compiler, if available getSystemCompiler :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m) => EnvOverride -> WhichCompiler -> m (Maybe (CompilerVersion, Arch)) getSystemCompiler menv wc = do let exeName = case wc of Ghc -> "ghc" Ghcjs -> "ghcjs" exists <- doesExecutableExist menv exeName if exists then do eres <- tryProcessStdout Nothing menv exeName ["--info"] let minfo = do Right bs <- Just eres pairs <- readMay $ S8.unpack bs :: Maybe [(String, String)] version <- lookup "Project version" pairs >>= parseVersionFromString arch <- lookup "Target platform" pairs >>= simpleParse . takeWhile (/= '-') return (version, arch) case (wc, minfo) of (Ghc, Just (version, arch)) -> return (Just (GhcVersion version, arch)) (Ghcjs, Just (_, arch)) -> do eversion <- tryAny $ getCompilerVersion menv Ghcjs case eversion of Left _ -> return Nothing Right version -> return (Just (version, arch)) (_, Nothing) -> return Nothing else return Nothing data DownloadInfo = DownloadInfo { downloadInfoUrl :: Text , downloadInfoContentLength :: Int , downloadInfoSha1 :: Maybe ByteString } deriving Show data VersionedDownloadInfo = VersionedDownloadInfo { vdiVersion :: Version , vdiDownloadInfo :: DownloadInfo } deriving Show parseDownloadInfoFromObject :: Yaml.Object -> Yaml.Parser DownloadInfo parseDownloadInfoFromObject o = do url <- o .: "url" contentLength <- o .: "content-length" sha1TextMay <- o .:? "sha1" return DownloadInfo { downloadInfoUrl = url , downloadInfoContentLength = contentLength , downloadInfoSha1 = fmap T.encodeUtf8 sha1TextMay } instance FromJSON DownloadInfo where parseJSON = withObject "DownloadInfo" parseDownloadInfoFromObject instance FromJSON VersionedDownloadInfo where parseJSON = withObject "VersionedDownloadInfo" $ \o -> do version <- o .: "version" downloadInfo <- parseDownloadInfoFromObject o return VersionedDownloadInfo { vdiVersion = version , vdiDownloadInfo = downloadInfo } data SetupInfo = SetupInfo { siSevenzExe :: DownloadInfo , siSevenzDll :: DownloadInfo , siMsys2 :: Map Text VersionedDownloadInfo , siGHCs :: Map Text (Map Version DownloadInfo) } deriving Show instance FromJSON SetupInfo where parseJSON = withObject "SetupInfo" $ \o -> SetupInfo <$> o .: "sevenzexe-info" <*> o .: "sevenzdll-info" <*> o .: "msys2" <*> o .: "ghc" -- | Download the most recent SetupInfo getSetupInfo :: (MonadIO m, MonadThrow m) => SetupOpts -> Manager -> m SetupInfo getSetupInfo sopts manager = do bs <- case parseUrl $ soptsStackSetupYaml sopts of Just req -> do bss <- liftIO $ flip runReaderT manager $ withResponse req $ \res -> responseBody res $$ CL.consume return $ S8.concat bss Nothing -> liftIO $ S.readFile $ soptsStackSetupYaml sopts either throwM return $ Yaml.decodeEither' bs markInstalled :: (MonadIO m, MonadReader env m, HasConfig env, MonadThrow m) => PackageIdentifier -- ^ e.g., ghc-7.8.4, msys2-20150512 -> m () markInstalled ident = do dir <- asks $ configLocalPrograms . getConfig fpRel <- parseRelFile $ packageIdentifierString ident ++ ".installed" liftIO $ writeFile (toFilePath $ dir </> fpRel) "installed" unmarkInstalled :: (MonadIO m, MonadReader env m, HasConfig env, MonadThrow m) => PackageIdentifier -> m () unmarkInstalled ident = do dir <- asks $ configLocalPrograms . getConfig fpRel <- parseRelFile $ packageIdentifierString ident ++ ".installed" removeFileIfExists $ dir </> fpRel listInstalled :: (MonadIO m, MonadReader env m, HasConfig env, MonadThrow m) => m [PackageIdentifier] listInstalled = do dir <- asks $ configLocalPrograms . getConfig createTree dir (_, files) <- listDirectory dir return $ mapMaybe toIdent files where toIdent fp = do x <- T.stripSuffix ".installed" $ T.pack $ toFilePath $ filename fp parsePackageIdentifierFromString $ T.unpack x installDir :: (MonadReader env m, HasConfig env, MonadThrow m, MonadLogger m) => PackageIdentifier -> m (Path Abs Dir) installDir ident = do config <- asks getConfig reldir <- parseRelDir $ packageIdentifierString ident return $ configLocalPrograms config </> reldir -- | Binary directories for the given installed package extraDirs :: (MonadReader env m, HasConfig env, MonadThrow m, MonadLogger m) => PackageIdentifier -> m ExtraDirs extraDirs ident = do config <- asks getConfig dir <- installDir ident case (configPlatform config, packageNameString $ packageIdentifierName ident) of (Platform _ (isWindows -> True), "ghc") -> return mempty { edBins = goList [ dir </> $(mkRelDir "bin") , dir </> $(mkRelDir "mingw") </> $(mkRelDir "bin") ] } (Platform _ (isWindows -> True), "msys2") -> return mempty { edBins = goList [ dir </> $(mkRelDir "usr") </> $(mkRelDir "bin") ] , edInclude = goList [ dir </> $(mkRelDir "mingw64") </> $(mkRelDir "include") , dir </> $(mkRelDir "mingw32") </> $(mkRelDir "include") ] , edLib = goList [ dir </> $(mkRelDir "mingw64") </> $(mkRelDir "lib") , dir </> $(mkRelDir "mingw32") </> $(mkRelDir "lib") ] } (_, "ghc") -> return mempty { edBins = goList [ dir </> $(mkRelDir "bin") ] } (Platform _ x, tool) -> do $logWarn $ "binDirs: unexpected OS/tool combo: " <> T.pack (show (x, tool)) return mempty where goList = map toFilePathNoTrailingSlash getInstalledTool :: [PackageIdentifier] -- ^ already installed -> PackageName -- ^ package to find -> (Version -> Bool) -- ^ which versions are acceptable -> Maybe PackageIdentifier getInstalledTool installed name goodVersion = if null available then Nothing else Just $ maximumBy (comparing packageIdentifierVersion) available where available = filter goodPackage installed goodPackage pi' = packageIdentifierName pi' == name && goodVersion (packageIdentifierVersion pi') downloadAndInstallTool :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m) => SetupInfo -> DownloadInfo -> PackageName -> Version -> (SetupInfo -> Path Abs File -> ArchiveType -> Path Abs Dir -> PackageIdentifier -> m ()) -> m PackageIdentifier downloadAndInstallTool si downloadInfo name version installer = do let ident = PackageIdentifier name version (file, at) <- downloadFromInfo downloadInfo ident dir <- installDir ident unmarkInstalled ident installer si file at dir ident markInstalled ident return ident downloadAndInstallGHC :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m) => EnvOverride -> SetupInfo -> CompilerVersion -> VersionCheck -> m PackageIdentifier downloadAndInstallGHC menv si wanted versionCheck = do osKey <- getOSKey menv pairs <- case Map.lookup osKey $ siGHCs si of Nothing -> throwM $ UnknownOSKey osKey Just pairs -> return pairs let mpair = listToMaybe $ sortBy (flip (comparing fst)) $ filter (\(v, _) -> isWantedCompiler versionCheck wanted (GhcVersion v)) (Map.toList pairs) (selectedVersion, downloadInfo) <- case mpair of Just pair -> return pair Nothing -> throwM $ UnknownCompilerVersion osKey wanted (Map.keysSet pairs) platform <- asks $ configPlatform . getConfig let installer = case platform of Platform _ os | isWindows os -> installGHCWindows _ -> installGHCPosix $logInfo "Preparing to install GHC to an isolated location." $logInfo "This will not interfere with any system-level installation." downloadAndInstallTool si downloadInfo $(mkPackageName "ghc") selectedVersion installer getOSKey :: (MonadReader env m, MonadThrow m, HasConfig env, MonadLogger m, MonadIO m, MonadCatch m, MonadBaseControl IO m) => EnvOverride -> m Text getOSKey menv = do platform <- asks $ configPlatform . getConfig case platform of Platform I386 Cabal.Linux -> ("linux32" <>) <$> getLinuxSuffix Platform X86_64 Cabal.Linux -> ("linux64" <>) <$> getLinuxSuffix Platform I386 Cabal.OSX -> return "macosx" Platform X86_64 Cabal.OSX -> return "macosx" Platform I386 Cabal.FreeBSD -> return "freebsd32" Platform X86_64 Cabal.FreeBSD -> return "freebsd64" Platform I386 Cabal.OpenBSD -> return "openbsd32" Platform X86_64 Cabal.OpenBSD -> return "openbsd64" Platform I386 Cabal.Windows -> return "windows32" Platform X86_64 Cabal.Windows -> return "windows64" Platform I386 (Cabal.OtherOS "windowsintegersimple") -> return "windowsintegersimple32" Platform X86_64 (Cabal.OtherOS "windowsintegersimple") -> return "windowsintegersimple64" Platform arch os -> throwM $ UnsupportedSetupCombo os arch where getLinuxSuffix = do executablePath <- liftIO getExecutablePath elddOut <- tryProcessStdout Nothing menv "ldd" [executablePath] return $ case elddOut of Left _ -> "" Right lddOut -> if hasLineWithFirstWord "libgmp.so.3" lddOut then "-gmp4" else "" hasLineWithFirstWord w = elem (Just w) . map (headMay . T.words) . T.lines . T.decodeUtf8With T.lenientDecode downloadFromInfo :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m) => DownloadInfo -> PackageIdentifier -> m (Path Abs File, ArchiveType) downloadFromInfo downloadInfo ident = do config <- asks getConfig at <- case extension of ".tar.xz" -> return TarXz ".tar.bz2" -> return TarBz2 ".7z.exe" -> return SevenZ _ -> error $ "Unknown extension: " ++ extension relfile <- parseRelFile $ packageIdentifierString ident ++ extension let path = configLocalPrograms config </> relfile chattyDownload (packageIdentifierText ident) downloadInfo path return (path, at) where url = downloadInfoUrl downloadInfo extension = loop $ T.unpack url where loop fp | ext `elem` [".tar", ".bz2", ".xz", ".exe", ".7z"] = loop fp' ++ ext | otherwise = "" where (fp', ext) = FP.splitExtension fp data ArchiveType = TarBz2 | TarXz | SevenZ installGHCPosix :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m) => SetupInfo -> Path Abs File -> ArchiveType -> Path Abs Dir -> PackageIdentifier -> m () installGHCPosix _ archiveFile archiveType destDir ident = do platform <- asks getPlatform menv0 <- getMinimalEnvOverride menv <- mkEnvOverride platform (removeHaskellEnvVars (unEnvOverride menv0)) $logDebug $ "menv = " <> T.pack (show (unEnvOverride menv)) zipTool' <- case archiveType of TarXz -> return "xz" TarBz2 -> return "bzip2" SevenZ -> error "Don't know how to deal with .7z files on non-Windows" (zipTool, makeTool, tarTool) <- checkDependencies $ (,,) <$> checkDependency zipTool' <*> (checkDependency "gmake" <|> checkDependency "make") <*> checkDependency "tar" $logDebug $ "ziptool: " <> T.pack zipTool $logDebug $ "make: " <> T.pack makeTool $logDebug $ "tar: " <> T.pack tarTool withSystemTempDirectory "stack-setup" $ \root' -> do root <- parseAbsDir root' dir <- liftM (root Path.</>) $ parseRelDir $ packageIdentifierString ident $logSticky $ T.concat ["Unpacking GHC into ", (T.pack . toFilePath $ root), " ..."] $logDebug $ "Unpacking " <> T.pack (toFilePath archiveFile) readInNull root tarTool menv ["xf", toFilePath archiveFile] Nothing $logSticky "Configuring GHC ..." readInNull dir (toFilePath $ dir Path.</> $(mkRelFile "configure")) menv ["--prefix=" ++ toFilePath destDir] Nothing $logSticky "Installing GHC ..." readInNull dir makeTool menv ["install"] Nothing $logStickyDone $ "Installed GHC." $logDebug $ "GHC installed to " <> T.pack (toFilePath destDir) where -- | Check if given processes appear to be present, throwing an exception if -- missing. checkDependencies :: (MonadIO m, MonadThrow m, MonadReader env m, HasConfig env) => CheckDependency a -> m a checkDependencies (CheckDependency f) = do menv <- getMinimalEnvOverride liftIO (f menv) >>= either (throwM . MissingDependencies) return checkDependency :: String -> CheckDependency String checkDependency tool = CheckDependency $ \menv -> do exists <- doesExecutableExist menv tool return $ if exists then Right tool else Left [tool] newtype CheckDependency a = CheckDependency (EnvOverride -> IO (Either [String] a)) deriving Functor instance Applicative CheckDependency where pure x = CheckDependency $ \_ -> return (Right x) CheckDependency f <*> CheckDependency x = CheckDependency $ \menv -> do f' <- f menv x' <- x menv return $ case (f', x') of (Left e1, Left e2) -> Left $ e1 ++ e2 (Left e, Right _) -> Left e (Right _, Left e) -> Left e (Right f'', Right x'') -> Right $ f'' x'' instance Alternative CheckDependency where empty = CheckDependency $ \_ -> return $ Left [] CheckDependency x <|> CheckDependency y = CheckDependency $ \menv -> do res1 <- x menv case res1 of Left _ -> y menv Right x' -> return $ Right x' installGHCWindows :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m) => SetupInfo -> Path Abs File -> ArchiveType -> Path Abs Dir -> PackageIdentifier -> m () installGHCWindows si archiveFile archiveType destDir _ = do suffix <- case archiveType of TarXz -> return ".xz" TarBz2 -> return ".bz2" _ -> error $ "GHC on Windows must be a tarball file" tarFile <- case T.stripSuffix suffix $ T.pack $ toFilePath archiveFile of Nothing -> error $ "Invalid GHC filename: " ++ show archiveFile Just x -> parseAbsFile $ T.unpack x config <- asks getConfig run7z <- setup7z si config run7z (parent archiveFile) archiveFile run7z (parent archiveFile) tarFile removeFile tarFile `catchIO` \e -> $logWarn (T.concat [ "Exception when removing " , T.pack $ toFilePath tarFile , ": " , T.pack $ show e ]) $logInfo $ "GHC installed to " <> T.pack (toFilePath destDir) installMsys2Windows :: (MonadIO m, MonadMask m, MonadLogger m, MonadReader env m, HasConfig env, HasHttpManager env, MonadBaseControl IO m) => Text -- ^ OS Key -> SetupInfo -> Path Abs File -> ArchiveType -> Path Abs Dir -> PackageIdentifier -> m () installMsys2Windows osKey si archiveFile archiveType destDir _ = do suffix <- case archiveType of TarXz -> return ".xz" TarBz2 -> return ".bz2" _ -> error $ "MSYS2 must be a .tar.xz archive" tarFile <- case T.stripSuffix suffix $ T.pack $ toFilePath archiveFile of Nothing -> error $ "Invalid MSYS2 filename: " ++ show archiveFile Just x -> parseAbsFile $ T.unpack x config <- asks getConfig run7z <- setup7z si config exists <- liftIO $ D.doesDirectoryExist $ toFilePath destDir when exists $ liftIO (D.removeDirectoryRecursive $ toFilePath destDir) `catchIO` \e -> do $logError $ T.pack $ "Could not delete existing msys directory: " ++ toFilePath destDir throwM e run7z (parent archiveFile) archiveFile run7z (parent archiveFile) tarFile removeFile tarFile `catchIO` \e -> $logWarn (T.concat [ "Exception when removing " , T.pack $ toFilePath tarFile , ": " , T.pack $ show e ]) msys <- parseRelDir $ "msys" ++ T.unpack (fromMaybe "32" $ T.stripPrefix "windows" osKey) liftIO $ D.renameDirectory (toFilePath $ parent archiveFile </> msys) (toFilePath destDir) platform <- asks getPlatform menv0 <- getMinimalEnvOverride let oldEnv = unEnvOverride menv0 newEnv = augmentPathMap [toFilePath $ destDir </> $(mkRelDir "usr") </> $(mkRelDir "bin")] oldEnv menv <- mkEnvOverride platform newEnv -- I couldn't find this officially documented anywhere, but you need to run -- the shell once in order to initialize some pacman stuff. Once that run -- happens, you can just run commands as usual. runIn destDir "sh" menv ["--login", "-c", "true"] Nothing -- Install git. We could install other useful things in the future too. runIn destDir "pacman" menv ["-Sy", "--noconfirm", "git"] Nothing -- | Download 7z as necessary, and get a function for unpacking things. -- -- Returned function takes an unpack directory and archive. setup7z :: (MonadReader env m, HasHttpManager env, MonadThrow m, MonadIO m, MonadIO n, MonadLogger m, MonadBaseControl IO m) => SetupInfo -> Config -> m (Path Abs Dir -> Path Abs File -> n ()) setup7z si config = do chattyDownload "7z.dll" (siSevenzDll si) dll chattyDownload "7z.exe" (siSevenzExe si) exe return $ \outdir archive -> liftIO $ do ec <- rawSystem (toFilePath exe) [ "x" , "-o" ++ toFilePath outdir , "-y" , toFilePath archive ] when (ec /= ExitSuccess) $ error $ "Problem while decompressing " ++ toFilePath archive where dir = configLocalPrograms config </> $(mkRelDir "7z") exe = dir </> $(mkRelFile "7z.exe") dll = dir </> $(mkRelFile "7z.dll") chattyDownload :: (MonadReader env m, HasHttpManager env, MonadIO m, MonadLogger m, MonadThrow m, MonadBaseControl IO m) => Text -- ^ label -> DownloadInfo -- ^ URL, content-length, and sha1 -> Path Abs File -- ^ destination -> m () chattyDownload label downloadInfo path = do let url = downloadInfoUrl downloadInfo req <- parseUrl $ T.unpack url $logSticky $ T.concat [ "Preparing to download " , label , " ..." ] $logDebug $ T.concat [ "Downloading from " , url , " to " , T.pack $ toFilePath path , " ..." ] hashChecks <- case downloadInfoSha1 downloadInfo of Just sha1ByteString -> do let sha1 = CheckHexDigestByteString sha1ByteString $logDebug $ T.concat [ "Will check against sha1 hash: " , T.decodeUtf8With T.lenientDecode sha1ByteString ] return [HashCheck SHA1 sha1] Nothing -> do $logWarn $ T.concat [ "No sha1 found in metadata," , " download hash won't be checked." ] return [] let dReq = DownloadRequest { drRequest = req , drHashChecks = hashChecks , drLengthCheck = Just totalSize , drRetryPolicy = drRetryPolicyDefault } runInBase <- liftBaseWith $ \run -> return (void . run) x <- verifiedDownload dReq path (chattyDownloadProgress runInBase) if x then $logStickyDone ("Downloaded " <> label <> ".") else $logStickyDone "Already downloaded." where totalSize = downloadInfoContentLength downloadInfo chattyDownloadProgress runInBase _ = do _ <- liftIO $ runInBase $ $logSticky $ label <> ": download has begun" CL.map (Sum . S.length) =$ chunksOverTime 1 =$ go where go = evalStateC 0 $ awaitForever $ \(Sum size) -> do modify (+ size) totalSoFar <- get liftIO $ runInBase $ $logSticky $ T.pack $ chattyProgressWithTotal totalSoFar totalSize -- Note(DanBurton): Total size is now always known in this file. -- However, printing in the case where it isn't known may still be -- useful in other parts of the codebase. -- So I'm just commenting out the code rather than deleting it. -- case mcontentLength of -- Nothing -> chattyProgressNoTotal totalSoFar -- Just 0 -> chattyProgressNoTotal totalSoFar -- Just total -> chattyProgressWithTotal totalSoFar total ---- Example: ghc: 42.13 KiB downloaded... --chattyProgressNoTotal totalSoFar = -- printf ("%s: " <> bytesfmt "%7.2f" totalSoFar <> " downloaded...") -- (T.unpack label) -- Example: ghc: 50.00 MiB / 100.00 MiB (50.00%) downloaded... chattyProgressWithTotal totalSoFar total = printf ("%s: " <> bytesfmt "%7.2f" totalSoFar <> " / " <> bytesfmt "%.2f" total <> " (%6.2f%%) downloaded...") (T.unpack label) percentage where percentage :: Double percentage = (fromIntegral totalSoFar / fromIntegral total * 100) -- | Given a printf format string for the decimal part and a number of -- bytes, formats the bytes using an appropiate unit and returns the -- formatted string. -- -- >>> bytesfmt "%.2" 512368 -- "500.359375 KiB" bytesfmt :: Integral a => String -> a -> String bytesfmt formatter bs = printf (formatter <> " %s") (fromIntegral (signum bs) * dec :: Double) (bytesSuffixes !! i) where (dec,i) = getSuffix (abs bs) getSuffix n = until p (\(x,y) -> (x / 1024, y+1)) (fromIntegral n,0) where p (n',numDivs) = n' < 1024 || numDivs == (length bytesSuffixes - 1) bytesSuffixes :: [String] bytesSuffixes = ["B","KiB","MiB","GiB","TiB","PiB","EiB","ZiB","YiB"] -- Await eagerly (collect with monoidal append), -- but space out yields by at least the given amount of time. -- The final yield may come sooner, and may be a superfluous mempty. -- Note that Integer and Float literals can be turned into NominalDiffTime -- (these literals are interpreted as "seconds") chunksOverTime :: (Monoid a, MonadIO m) => NominalDiffTime -> Conduit a m a chunksOverTime diff = do currentTime <- liftIO getCurrentTime evalStateC (currentTime, mempty) go where -- State is a tuple of: -- * the last time a yield happened (or the beginning of the sink) -- * the accumulated awaits since the last yield go = await >>= \case Nothing -> do (_, acc) <- get yield acc Just a -> do (lastTime, acc) <- get let acc' = acc <> a currentTime <- liftIO getCurrentTime if diff < diffUTCTime currentTime lastTime then put (currentTime, mempty) >> yield acc' else put (lastTime, acc') go -- | Perform a basic sanity check of GHC sanityCheck :: (MonadIO m, MonadMask m, MonadLogger m, MonadBaseControl IO m) => EnvOverride -> m () sanityCheck menv = withSystemTempDirectory "stack-sanity-check" $ \dir -> do dir' <- parseAbsDir dir let fp = toFilePath $ dir' </> $(mkRelFile "Main.hs") liftIO $ writeFile fp $ unlines [ "import Distribution.Simple" -- ensure Cabal library is present , "main = putStrLn \"Hello World\"" ] ghc <- join $ findExecutable menv "ghc" $logDebug $ "Performing a sanity check on: " <> T.pack (toFilePath ghc) eres <- tryProcessStdout (Just dir') menv "ghc" [ fp , "-no-user-package-db" ] case eres of Left e -> throwM $ GHCSanityCheckCompileFailed e ghc Right _ -> return () -- TODO check that the output of running the command is correct toFilePathNoTrailingSlash :: Path loc Dir -> FilePath toFilePathNoTrailingSlash = FP.dropTrailingPathSeparator . toFilePath -- Remove potentially confusing environment variables removeHaskellEnvVars :: Map Text Text -> Map Text Text removeHaskellEnvVars = Map.delete "GHC_PACKAGE_PATH" . Map.delete "HASKELL_PACKAGE_SANDBOX" . Map.delete "HASKELL_PACKAGE_SANDBOXES" . Map.delete "HASKELL_DIST_DIR" -- | Get map of environment variables to set to change the locale's encoding to UTF-8 getUtf8LocaleVars :: forall m env. (MonadReader env m, HasPlatform env, MonadLogger m, MonadCatch m, MonadBaseControl IO m, MonadIO m) => EnvOverride -> m (Map Text Text) getUtf8LocaleVars menv = do Platform _ os <- asks getPlatform if isWindows os then -- On Windows, locale is controlled by the code page, so we don't set any environment -- variables. return Map.empty else do let checkedVars = map checkVar (Map.toList $ eoTextMap menv) -- List of environment variables that will need to be updated to set UTF-8 (because -- they currently do not specify UTF-8). needChangeVars = concatMap fst checkedVars -- Set of locale-related environment variables that have already have a value. existingVarNames = Set.unions (map snd checkedVars) -- True if a locale is already specified by one of the "global" locale variables. hasAnyExisting = or $ map (`Set.member` existingVarNames) ["LANG", "LANGUAGE", "LC_ALL"] if null needChangeVars && hasAnyExisting then -- If no variables need changes and at least one "global" variable is set, no -- changes to environment need to be made. return Map.empty else do -- Get a list of known locales by running @locale -a@. elocales <- tryProcessStdout Nothing menv "locale" ["-a"] let -- Filter the list to only include locales with UTF-8 encoding. utf8Locales = case elocales of Left _ -> [] Right locales -> filter isUtf8Locale (T.lines $ T.decodeUtf8With T.lenientDecode locales) mfallback = getFallbackLocale utf8Locales when (isNothing mfallback) ($logWarn "Warning: unable to set locale to UTF-8 encoding; GHC may fail with 'invalid character'") let -- Get the new values of variables to adjust. changes = Map.unions $ map (adjustedVarValue utf8Locales mfallback) needChangeVars -- Get the values of variables to add. adds | hasAnyExisting = -- If we already have a "global" variable, then nothing needs -- to be added. Map.empty | otherwise = -- If we don't already have a "global" variable, then set LANG to the -- fallback. case mfallback of Nothing -> Map.empty Just fallback -> Map.singleton "LANG" fallback return (Map.union changes adds) where -- Determines whether an environment variable is locale-related and, if so, whether it needs to -- be adjusted. checkVar :: (Text, Text) -> ([Text], Set Text) checkVar (k,v) = if k `elem` ["LANG", "LANGUAGE"] || "LC_" `T.isPrefixOf` k then if isUtf8Locale v then ([], Set.singleton k) else ([k], Set.singleton k) else ([], Set.empty) -- Adjusted value of an existing locale variable. Looks for valid UTF-8 encodings with -- same language /and/ territory, then with same language, and finally the first UTF-8 locale -- returned by @locale -a@. adjustedVarValue :: [Text] -> Maybe Text -> Text -> Map Text Text adjustedVarValue utf8Locales mfallback k = case Map.lookup k (eoTextMap menv) of Nothing -> Map.empty Just v -> case concatMap (matchingLocales utf8Locales) [ T.takeWhile (/= '.') v <> "." , T.takeWhile (/= '_') v <> "_"] of (v':_) -> Map.singleton k v' [] -> case mfallback of Just fallback -> Map.singleton k fallback Nothing -> Map.empty -- Determine the fallback locale, by looking for any UTF-8 locale prefixed with the list in -- @fallbackPrefixes@, and if not found, picking the first UTF-8 encoding returned by @locale -- -a@. getFallbackLocale :: [Text] -> Maybe Text getFallbackLocale utf8Locales = do case concatMap (matchingLocales utf8Locales) fallbackPrefixes of (v:_) -> Just v [] -> case utf8Locales of [] -> Nothing (v:_) -> Just v -- Filter the list of locales for any with the given prefixes (case-insitive). matchingLocales :: [Text] -> Text -> [Text] matchingLocales utf8Locales prefix = filter (\v -> (T.toLower prefix) `T.isPrefixOf` T.toLower v) utf8Locales -- Does the locale have one of the encodings in @utf8Suffixes@ (case-insensitive)? isUtf8Locale locale = or $ map (\v -> T.toLower v `T.isSuffixOf` T.toLower locale) utf8Suffixes -- Prefixes of fallback locales (case-insensitive) fallbackPrefixes = ["C.", "en_US.", "en_"] -- Suffixes of UTF-8 locales (case-insensitive) utf8Suffixes = [".UTF-8", ".utf8"]
wolftune/stack
src/Stack/Setup.hs
bsd-3-clause
51,725
0
31
17,450
11,885
5,938
5,947
1,015
15
{-| The pretty printing for torus types. -} module Torus.Core.Pretty ( Pretty(..) ) where import Data.Text ( Text, unpack ) import Text.PrettyPrint -- | This class captures stuff that can be pretty printed. class Pretty a where pretty :: a -> Doc instance Pretty Text where pretty = text . unpack
piyush-kurur/torus
Torus/Core/Pretty.hs
bsd-3-clause
328
0
7
82
71
42
29
8
0
-- | -- Module : Crypto.MAC.KMAC -- License : BSD-style -- Maintainer : Olivier Chéron <[email protected]> -- Stability : experimental -- Portability : unknown -- -- Provide the KMAC (Keccak Message Authentication Code) algorithm, derived from -- the SHA-3 base algorithm Keccak and defined in NIST SP800-185. -- {-# LANGUAGE BangPatterns #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE ScopedTypeVariables #-} module Crypto.MAC.KMAC ( HashSHAKE , kmac , KMAC(..) -- * Incremental , Context , initialize , update , updates , finalize ) where import qualified Crypto.Hash as H import Crypto.Hash.SHAKE (HashSHAKE(..)) import Crypto.Hash.Types (HashAlgorithm(..), Digest(..)) import qualified Crypto.Hash.Types as H import Crypto.Internal.Builder import Crypto.Internal.Imports import Foreign.Ptr (Ptr) import Data.Bits (shiftR) import Data.ByteArray (ByteArrayAccess) import qualified Data.ByteArray as B -- cSHAKE cshakeInit :: forall a name string prefix . (HashSHAKE a, ByteArrayAccess name, ByteArrayAccess string, ByteArrayAccess prefix) => name -> string -> prefix -> H.Context a cshakeInit n s p = H.Context $ B.allocAndFreeze c $ \(ptr :: Ptr (H.Context a)) -> do hashInternalInit ptr B.withByteArray b $ \d -> hashInternalUpdate ptr d (fromIntegral $ B.length b) B.withByteArray p $ \d -> hashInternalUpdate ptr d (fromIntegral $ B.length p) where c = hashInternalContextSize (undefined :: a) w = hashBlockSize (undefined :: a) x = encodeString n <> encodeString s b = buildAndFreeze (bytepad x w) :: B.Bytes cshakeUpdate :: (HashSHAKE a, ByteArrayAccess ba) => H.Context a -> ba -> H.Context a cshakeUpdate = H.hashUpdate cshakeUpdates :: (HashSHAKE a, ByteArrayAccess ba) => H.Context a -> [ba] -> H.Context a cshakeUpdates = H.hashUpdates cshakeFinalize :: forall a suffix . (HashSHAKE a, ByteArrayAccess suffix) => H.Context a -> suffix -> Digest a cshakeFinalize !c s = Digest $ B.allocAndFreeze (hashDigestSize (undefined :: a)) $ \dig -> do ((!_) :: B.Bytes) <- B.copy c $ \(ctx :: Ptr (H.Context a)) -> do B.withByteArray s $ \d -> hashInternalUpdate ctx d (fromIntegral $ B.length s) cshakeInternalFinalize ctx dig return () -- KMAC -- | Represent a KMAC that is a phantom type with the hash used to produce the -- mac. -- -- The Eq instance is constant time. No Show instance is provided, to avoid -- printing by mistake. newtype KMAC a = KMAC { kmacGetDigest :: Digest a } deriving (ByteArrayAccess,NFData) instance Eq (KMAC a) where (KMAC b1) == (KMAC b2) = B.constEq b1 b2 -- | Compute a KMAC using the supplied customization string and key. kmac :: (HashSHAKE a, ByteArrayAccess string, ByteArrayAccess key, ByteArrayAccess ba) => string -> key -> ba -> KMAC a kmac str key msg = finalize $ updates (initialize str key) [msg] -- | Represent an ongoing KMAC state, that can be appended with 'update' and -- finalized to a 'KMAC' with 'finalize'. newtype Context a = Context (H.Context a) -- | Initialize a new incremental KMAC context with the supplied customization -- string and key. initialize :: forall a string key . (HashSHAKE a, ByteArrayAccess string, ByteArrayAccess key) => string -> key -> Context a initialize str key = Context $ cshakeInit n str p where n = B.pack [75,77,65,67] :: B.Bytes -- "KMAC" w = hashBlockSize (undefined :: a) p = buildAndFreeze (bytepad (encodeString key) w) :: B.ScrubbedBytes -- | Incrementally update a KMAC context. update :: (HashSHAKE a, ByteArrayAccess ba) => Context a -> ba -> Context a update (Context ctx) = Context . cshakeUpdate ctx -- | Incrementally update a KMAC context with multiple inputs. updates :: (HashSHAKE a, ByteArrayAccess ba) => Context a -> [ba] -> Context a updates (Context ctx) = Context . cshakeUpdates ctx -- | Finalize a KMAC context and return the KMAC. finalize :: forall a . HashSHAKE a => Context a -> KMAC a finalize (Context ctx) = KMAC $ cshakeFinalize ctx suffix where l = cshakeOutputLength (undefined :: a) suffix = buildAndFreeze (rightEncode l) :: B.Bytes -- Utilities bytepad :: Builder -> Int -> Builder bytepad x w = prefix <> x <> zero padLen where prefix = leftEncode w padLen = (w - builderLength prefix - builderLength x) `mod` w encodeString :: ByteArrayAccess bin => bin -> Builder encodeString s = leftEncode (8 * B.length s) <> bytes s leftEncode :: Int -> Builder leftEncode x = byte len <> digits where digits = i2osp x len = fromIntegral (builderLength digits) rightEncode :: Int -> Builder rightEncode x = digits <> byte len where digits = i2osp x len = fromIntegral (builderLength digits) i2osp :: Int -> Builder i2osp i | i >= 256 = i2osp (shiftR i 8) <> byte (fromIntegral i) | otherwise = byte (fromIntegral i)
vincenthz/cryptonite
Crypto/MAC/KMAC.hs
bsd-3-clause
5,042
0
20
1,137
1,487
792
695
86
1
module Main where bar = do foo :: String <- baz
mpickering/ghc-exactprint
tests/examples/ghc710/DoPatBind.hs
bsd-3-clause
51
0
8
14
20
11
9
-1
-1
-- | -- Module : Crypto.Hash.Skein512 -- License : BSD-style -- Maintainer : Vincent Hanquez <[email protected]> -- Stability : experimental -- Portability : unknown -- -- Module containing the binding functions to work with the -- Skein512 cryptographic hash. -- {-# LANGUAGE ForeignFunctionInterface #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE TypeFamilies #-} module Crypto.Hash.Skein512 ( Skein512_224 (..), Skein512_256 (..), Skein512_384 (..), Skein512_512 (..) ) where import Crypto.Hash.Types import Foreign.Ptr (Ptr) import Data.Data import Data.Word (Word8, Word32) -- | Skein512 (224 bits) cryptographic hash algorithm data Skein512_224 = Skein512_224 deriving (Show,Data) instance HashAlgorithm Skein512_224 where type HashBlockSize Skein512_224 = 64 type HashDigestSize Skein512_224 = 28 type HashInternalContextSize Skein512_224 = 160 hashBlockSize _ = 64 hashDigestSize _ = 28 hashInternalContextSize _ = 160 hashInternalInit p = c_skein512_init p 224 hashInternalUpdate = c_skein512_update hashInternalFinalize p = c_skein512_finalize p 224 -- | Skein512 (256 bits) cryptographic hash algorithm data Skein512_256 = Skein512_256 deriving (Show,Data) instance HashAlgorithm Skein512_256 where type HashBlockSize Skein512_256 = 64 type HashDigestSize Skein512_256 = 32 type HashInternalContextSize Skein512_256 = 160 hashBlockSize _ = 64 hashDigestSize _ = 32 hashInternalContextSize _ = 160 hashInternalInit p = c_skein512_init p 256 hashInternalUpdate = c_skein512_update hashInternalFinalize p = c_skein512_finalize p 256 -- | Skein512 (384 bits) cryptographic hash algorithm data Skein512_384 = Skein512_384 deriving (Show,Data) instance HashAlgorithm Skein512_384 where type HashBlockSize Skein512_384 = 64 type HashDigestSize Skein512_384 = 48 type HashInternalContextSize Skein512_384 = 160 hashBlockSize _ = 64 hashDigestSize _ = 48 hashInternalContextSize _ = 160 hashInternalInit p = c_skein512_init p 384 hashInternalUpdate = c_skein512_update hashInternalFinalize p = c_skein512_finalize p 384 -- | Skein512 (512 bits) cryptographic hash algorithm data Skein512_512 = Skein512_512 deriving (Show,Data) instance HashAlgorithm Skein512_512 where type HashBlockSize Skein512_512 = 64 type HashDigestSize Skein512_512 = 64 type HashInternalContextSize Skein512_512 = 160 hashBlockSize _ = 64 hashDigestSize _ = 64 hashInternalContextSize _ = 160 hashInternalInit p = c_skein512_init p 512 hashInternalUpdate = c_skein512_update hashInternalFinalize p = c_skein512_finalize p 512 foreign import ccall unsafe "cryptonite_skein512_init" c_skein512_init :: Ptr (Context a) -> Word32 -> IO () foreign import ccall "cryptonite_skein512_update" c_skein512_update :: Ptr (Context a) -> Ptr Word8 -> Word32 -> IO () foreign import ccall unsafe "cryptonite_skein512_finalize" c_skein512_finalize :: Ptr (Context a) -> Word32 -> Ptr (Digest a) -> IO ()
vincenthz/cryptonite
Crypto/Hash/Skein512.hs
bsd-3-clause
3,366
0
11
838
647
351
296
64
0
{- Copyright 2012-2015 Vidar Holen This file is part of ShellCheck. http://www.vidarholen.net/contents/shellcheck ShellCheck is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. ShellCheck is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. -} {-# LANGUAGE FlexibleContexts #-} -- Basically Text.Regex based on regex-tdfa instead of the buggy regex-posix. module ShellCheck.Regex where import Data.List import Data.Maybe import Control.Monad import Text.Regex.TDFA -- Precompile the regex mkRegex :: String -> Regex mkRegex str = let make :: RegexMaker Regex CompOption ExecOption String => String -> Regex make = makeRegex in make str -- Does the regex match? matches :: String -> Regex -> Bool matches = flip match -- Get all subgroups of the first match matchRegex :: Regex -> String -> Maybe [String] matchRegex re str = do (_, _, _, groups) <- matchM re str :: Maybe (String,String,String,[String]) return groups -- Get all full matches matchAllStrings :: Regex -> String -> [String] matchAllStrings re = unfoldr f where f :: String -> Maybe (String, String) f str = do (_, match, rest, _) <- matchM re str :: Maybe (String, String, String, [String]) return (match, rest) -- Get all subgroups from all matches matchAllSubgroups :: Regex -> String -> [[String]] matchAllSubgroups re = unfoldr f where f :: String -> Maybe ([String], String) f str = do (_, _, rest, groups) <- matchM re str :: Maybe (String, String, String, [String]) return (groups, rest) -- Replace regex in input with string subRegex :: Regex -> String -> String -> String subRegex re input replacement = f input where f str = fromMaybe str $ do (before, match, after) <- matchM re str :: Maybe (String, String, String) when (null match) $ error ("Internal error: substituted empty in " ++ str) return $ before ++ replacement ++ f after
icyfork/shellcheck
ShellCheck/Regex.hs
gpl-3.0
2,467
0
13
554
549
296
253
35
1
{- (c) The GRASP/AQUA Project, Glasgow University, 1993-1998 \section[WwLib]{A library for the ``worker\/wrapper'' back-end to the strictness analyser} -} {-# LANGUAGE CPP #-} module WwLib ( mkWwBodies, mkWWstr, mkWorkerArgs , deepSplitProductType_maybe, findTypeShape , isWorkerSmallEnough ) where #include "HsVersions.h" import GhcPrelude import CoreSyn import CoreUtils ( exprType, mkCast ) import Id import IdInfo ( JoinArity, vanillaIdInfo ) import DataCon import Demand import MkCore ( mkAbsentErrorApp, mkCoreUbxTup , mkCoreApp, mkCoreLet ) import MkId ( voidArgId, voidPrimId ) import TysWiredIn ( tupleDataCon ) import TysPrim ( voidPrimTy ) import Literal ( absentLiteralOf ) import VarEnv ( mkInScopeSet ) import VarSet ( VarSet ) import Type import RepType ( isVoidTy ) import Coercion import FamInstEnv import BasicTypes ( Boxity(..) ) import TyCon import UniqSupply import Unique import Maybes import Util import Outputable import DynFlags import FastString import ListSetOps {- ************************************************************************ * * \subsection[mkWrapperAndWorker]{@mkWrapperAndWorker@} * * ************************************************************************ Here's an example. The original function is: \begin{verbatim} g :: forall a . Int -> [a] -> a g = \/\ a -> \ x ys -> case x of 0 -> head ys _ -> head (tail ys) \end{verbatim} From this, we want to produce: \begin{verbatim} -- wrapper (an unfolding) g :: forall a . Int -> [a] -> a g = \/\ a -> \ x ys -> case x of I# x# -> $wg a x# ys -- call the worker; don't forget the type args! -- worker $wg :: forall a . Int# -> [a] -> a $wg = \/\ a -> \ x# ys -> let x = I# x# in case x of -- note: body of g moved intact 0 -> head ys _ -> head (tail ys) \end{verbatim} Something we have to be careful about: Here's an example: \begin{verbatim} -- "f" strictness: U(P)U(P) f (I# a) (I# b) = a +# b g = f -- "g" strictness same as "f" \end{verbatim} \tr{f} will get a worker all nice and friendly-like; that's good. {\em But we don't want a worker for \tr{g}}, even though it has the same strictness as \tr{f}. Doing so could break laziness, at best. Consequently, we insist that the number of strictness-info items is exactly the same as the number of lambda-bound arguments. (This is probably slightly paranoid, but OK in practice.) If it isn't the same, we ``revise'' the strictness info, so that we won't propagate the unusable strictness-info into the interfaces. ************************************************************************ * * \subsection{The worker wrapper core} * * ************************************************************************ @mkWwBodies@ is called when doing the worker\/wrapper split inside a module. -} type WwResult = ([Demand], -- Demands for worker (value) args JoinArity, -- Number of worker (type OR value) args Id -> CoreExpr, -- Wrapper body, lacking only the worker Id CoreExpr -> CoreExpr) -- Worker body, lacking the original function rhs mkWwBodies :: DynFlags -> FamInstEnvs -> VarSet -- Free vars of RHS -- See Note [Freshen WW arguments] -> Maybe JoinArity -- Just ar <=> is join point with join arity ar -> Type -- Type of original function -> [Demand] -- Strictness of original function -> DmdResult -- Info about function result -> UniqSM (Maybe WwResult) -- wrap_fn_args E = \x y -> E -- work_fn_args E = E x y -- wrap_fn_str E = case x of { (a,b) -> -- case a of { (a1,a2) -> -- E a1 a2 b y }} -- work_fn_str E = \a2 a2 b y -> -- let a = (a1,a2) in -- let x = (a,b) in -- E mkWwBodies dflags fam_envs rhs_fvs mb_join_arity fun_ty demands res_info = do { let empty_subst = mkEmptyTCvSubst (mkInScopeSet rhs_fvs) -- See Note [Freshen WW arguments] ; (wrap_args, wrap_fn_args, work_fn_args, res_ty) <- mkWWargs empty_subst fun_ty demands ; (useful1, work_args, wrap_fn_str, work_fn_str) <- mkWWstr dflags fam_envs wrap_args -- Do CPR w/w. See Note [Always do CPR w/w] ; (useful2, wrap_fn_cpr, work_fn_cpr, cpr_res_ty) <- mkWWcpr (gopt Opt_CprAnal dflags) fam_envs res_ty res_info ; let (work_lam_args, work_call_args) = mkWorkerArgs dflags work_args cpr_res_ty worker_args_dmds = [idDemandInfo v | v <- work_call_args, isId v] wrapper_body = wrap_fn_args . wrap_fn_cpr . wrap_fn_str . applyToVars work_call_args . Var worker_body = mkLams work_lam_args. work_fn_str . work_fn_cpr . work_fn_args ; if isWorkerSmallEnough dflags work_args && not (too_many_args_for_join_point wrap_args) && (useful1 && not only_one_void_argument || useful2) then return (Just (worker_args_dmds, length work_call_args, wrapper_body, worker_body)) else return Nothing } -- We use an INLINE unconditionally, even if the wrapper turns out to be -- something trivial like -- fw = ... -- f = __inline__ (coerce T fw) -- The point is to propagate the coerce to f's call sites, so even though -- f's RHS is now trivial (size 1) we still want the __inline__ to prevent -- fw from being inlined into f's RHS where -- Note [Do not split void functions] only_one_void_argument | [d] <- demands , Just (arg_ty1, _) <- splitFunTy_maybe fun_ty , isAbsDmd d && isVoidTy arg_ty1 = True | otherwise = False -- Note [Join points returning functions] too_many_args_for_join_point wrap_args | Just join_arity <- mb_join_arity , wrap_args `lengthExceeds` join_arity = WARN(True, text "Unable to worker/wrapper join point with arity " <+> int join_arity <+> text "but" <+> int (length wrap_args) <+> text "args") True | otherwise = False -- See Note [Limit w/w arity] isWorkerSmallEnough :: DynFlags -> [Var] -> Bool isWorkerSmallEnough dflags vars = count isId vars <= maxWorkerArgs dflags -- We count only Free variables (isId) to skip Type, Kind -- variables which have no runtime representation. {- Note [Always do CPR w/w] ~~~~~~~~~~~~~~~~~~~~~~~~ At one time we refrained from doing CPR w/w for thunks, on the grounds that we might duplicate work. But that is already handled by the demand analyser, which doesn't give the CPR proprety if w/w might waste work: see Note [CPR for thunks] in DmdAnal. And if something *has* been given the CPR property and we don't w/w, it's a disaster, because then the enclosing function might say it has the CPR property, but now doesn't and there a cascade of disaster. A good example is Trac #5920. Note [Limit w/w arity] ~~~~~~~~~~~~~~~~~~~~~~~~ Guard against high worker arity as it generates a lot of stack traffic. A simplified example is Trac #11565#comment:6 Current strategy is very simple: don't perform w/w transformation at all if the result produces a wrapper with arity higher than -fmax-worker-args=. It is a bit all or nothing, consider f (x,y) (a,b,c,d,e ... , z) = rhs Currently we will remove all w/w ness entirely. But actually we could w/w on the (x,y) pair... it's the huge product that is the problem. Could we instead refrain from w/w on an arg-by-arg basis? Yes, that'd solve f. But we can get a lot of args from deeply-nested products: g (a, (b, (c, (d, ...)))) = rhs This is harder to spot on an arg-by-arg basis. Previously mkWwStr was given some "fuel" saying how many arguments it could add; when we ran out of fuel it would stop w/wing. Still not very clever because it had a left-right bias. ************************************************************************ * * \subsection{Making wrapper args} * * ************************************************************************ During worker-wrapper stuff we may end up with an unlifted thing which we want to let-bind without losing laziness. So we add a void argument. E.g. f = /\a -> \x y z -> E::Int# -- E does not mention x,y,z ==> fw = /\ a -> \void -> E f = /\ a -> \x y z -> fw realworld We use the state-token type which generates no code. -} mkWorkerArgs :: DynFlags -> [Var] -> Type -- Type of body -> ([Var], -- Lambda bound args [Var]) -- Args at call site mkWorkerArgs dflags args res_ty | any isId args || not needsAValueLambda = (args, args) | otherwise = (args ++ [voidArgId], args ++ [voidPrimId]) where needsAValueLambda = isUnliftedType res_ty || not (gopt Opt_FunToThunk dflags) -- see Note [Protecting the last value argument] {- Note [Protecting the last value argument] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If the user writes (\_ -> E), they might be intentionally disallowing the sharing of E. Since absence analysis and worker-wrapper are keen to remove such unused arguments, we add in a void argument to prevent the function from becoming a thunk. The user can avoid adding the void argument with the -ffun-to-thunk flag. However, this can create sharing, which may be bad in two ways. 1) It can create a space leak. 2) It can prevent inlining *under a lambda*. If w/w removes the last argument from a function f, then f now looks like a thunk, and so f can't be inlined *under a lambda*. Note [Join points and beta-redexes] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Originally, the worker would invoke the original function by calling it with arguments, thus producing a beta-redex for the simplifier to munch away: \x y z -> e => (\x y z -> e) wx wy wz Now that we have special rules about join points, however, this is Not Good if the original function is itself a join point, as then it may contain invocations of other join points: join j1 x = ... join j2 y = if y == 0 then 0 else j1 y => join j1 x = ... join $wj2 y# = let wy = I# y# in (\y -> if y == 0 then 0 else jump j1 y) wy join j2 y = case y of I# y# -> jump $wj2 y# There can't be an intervening lambda between a join point's declaration and its occurrences, so $wj2 here is wrong. But of course, this is easy enough to fix: ... let join $wj2 y# = let wy = I# y# in let y = wy in if y == 0 then 0 else j1 y ... Hence we simply do the beta-reduction here. (This would be harder if we had to worry about hygiene, but luckily wy is freshly generated.) Note [Join points returning functions] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ It is crucial that the arity of a join point depends on its *callers,* not its own syntax. What this means is that a join point can have "extra lambdas": f :: Int -> Int -> (Int, Int) -> Int f x y = join j (z, w) = \(u, v) -> ... in jump j (x, y) Typically this happens with functions that are seen as computing functions, rather than being curried. (The real-life example was GraphOps.addConflicts.) When we create the wrapper, it *must* be in "eta-contracted" form so that the jump has the right number of arguments: f x y = join $wj z' w' = \u' v' -> let {z = z'; w = w'; u = u'; v = v'} in ... j (z, w) = jump $wj z w (See Note [Join points and beta-redexes] for where the lets come from.) If j were a function, we would instead say f x y = let $wj = \z' w' u' v' -> let {z = z'; w = w'; u = u'; v = v'} in ... j (z, w) (u, v) = $wj z w u v Notice that the worker ends up with the same lambdas; it's only the wrapper we have to be concerned about. FIXME Currently the functionality to produce "eta-contracted" wrappers is unimplemented; we simply give up. ************************************************************************ * * \subsection{Coercion stuff} * * ************************************************************************ We really want to "look through" coerces. Reason: I've seen this situation: let f = coerce T (\s -> E) in \x -> case x of p -> coerce T' f q -> \s -> E2 r -> coerce T' f If only we w/w'd f, we'd get let f = coerce T (\s -> fw s) fw = \s -> E in ... Now we'll inline f to get let fw = \s -> E in \x -> case x of p -> fw q -> \s -> E2 r -> fw Now we'll see that fw has arity 1, and will arity expand the \x to get what we want. -} -- mkWWargs just does eta expansion -- is driven off the function type and arity. -- It chomps bites off foralls, arrows, newtypes -- and keeps repeating that until it's satisfied the supplied arity mkWWargs :: TCvSubst -- Freshening substitution to apply to the type -- See Note [Freshen WW arguments] -> Type -- The type of the function -> [Demand] -- Demands and one-shot info for value arguments -> UniqSM ([Var], -- Wrapper args CoreExpr -> CoreExpr, -- Wrapper fn CoreExpr -> CoreExpr, -- Worker fn Type) -- Type of wrapper body mkWWargs subst fun_ty demands | null demands = return ([], id, id, substTy subst fun_ty) | (dmd:demands') <- demands , Just (arg_ty, fun_ty') <- splitFunTy_maybe fun_ty = do { uniq <- getUniqueM ; let arg_ty' = substTy subst arg_ty id = mk_wrap_arg uniq arg_ty' dmd ; (wrap_args, wrap_fn_args, work_fn_args, res_ty) <- mkWWargs subst fun_ty' demands' ; return (id : wrap_args, Lam id . wrap_fn_args, apply_or_bind_then work_fn_args (varToCoreExpr id), res_ty) } | Just (tv, fun_ty') <- splitForAllTy_maybe fun_ty = do { uniq <- getUniqueM ; let (subst', tv') = cloneTyVarBndr subst tv uniq -- See Note [Freshen WW arguments] ; (wrap_args, wrap_fn_args, work_fn_args, res_ty) <- mkWWargs subst' fun_ty' demands ; return (tv' : wrap_args, Lam tv' . wrap_fn_args, apply_or_bind_then work_fn_args (mkTyArg (mkTyVarTy tv')), res_ty) } | Just (co, rep_ty) <- topNormaliseNewType_maybe fun_ty -- The newtype case is for when the function has -- a newtype after the arrow (rare) -- -- It's also important when we have a function returning (say) a pair -- wrapped in a newtype, at least if CPR analysis can look -- through such newtypes, which it probably can since they are -- simply coerces. = do { (wrap_args, wrap_fn_args, work_fn_args, res_ty) <- mkWWargs subst rep_ty demands ; let co' = substCo subst co ; return (wrap_args, \e -> Cast (wrap_fn_args e) (mkSymCo co'), \e -> work_fn_args (Cast e co'), res_ty) } | otherwise = WARN( True, ppr fun_ty ) -- Should not happen: if there is a demand return ([], id, id, substTy subst fun_ty) -- then there should be a function arrow where -- See Note [Join points and beta-redexes] apply_or_bind_then k arg (Lam bndr body) = mkCoreLet (NonRec bndr arg) (k body) -- Important that arg is fresh! apply_or_bind_then k arg fun = k $ mkCoreApp (text "mkWWargs") fun arg applyToVars :: [Var] -> CoreExpr -> CoreExpr applyToVars vars fn = mkVarApps fn vars mk_wrap_arg :: Unique -> Type -> Demand -> Id mk_wrap_arg uniq ty dmd = mkSysLocalOrCoVar (fsLit "w") uniq ty `setIdDemandInfo` dmd {- Note [Freshen WW arguments] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Wen we do a worker/wrapper split, we must not in-scope names as the arguments of the worker, else we'll get name capture. E.g. -- y1 is in scope from further out f x = ..y1.. If we accidentally choose y1 as a worker argument disaster results: fww y1 y2 = let x = (y1,y2) in ...y1... To avoid this: * We use a fresh unique for both type-variable and term-variable binders Originally we lacked this freshness for type variables, and that led to the very obscure Trac #12562. (A type variable in the worker shadowed an outer term-variable binding.) * Because of this cloning we have to substitute in the type/kind of the new binders. That's why we carry the TCvSubst through mkWWargs. So we need a decent in-scope set, just in case that type/kind itself has foralls. We get this from the free vars of the RHS of the function since those are the only variables that might be captured. It's a lazy thunk, which will only be poked if the type/kind has a forall. Another tricky case was when f :: forall a. a -> forall a. a->a (i.e. with shadowing), and then the worker used the same 'a' twice. ************************************************************************ * * \subsection{Strictness stuff} * * ************************************************************************ -} mkWWstr :: DynFlags -> FamInstEnvs -> [Var] -- Wrapper args; have their demand info on them -- *Includes type variables* -> UniqSM (Bool, -- Is this useful [Var], -- Worker args CoreExpr -> CoreExpr, -- Wrapper body, lacking the worker call -- and without its lambdas -- This fn adds the unboxing CoreExpr -> CoreExpr) -- Worker body, lacking the original body of the function, -- and lacking its lambdas. -- This fn does the reboxing mkWWstr _ _ [] = return (False, [], nop_fn, nop_fn) mkWWstr dflags fam_envs (arg : args) = do (useful1, args1, wrap_fn1, work_fn1) <- mkWWstr_one dflags fam_envs arg (useful2, args2, wrap_fn2, work_fn2) <- mkWWstr dflags fam_envs args return (useful1 || useful2, args1 ++ args2, wrap_fn1 . wrap_fn2, work_fn1 . work_fn2) {- Note [Unpacking arguments with product and polymorphic demands] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The argument is unpacked in a case if it has a product type and has a strict *and* used demand put on it. I.e., arguments, with demands such as the following ones: <S,U(U, L)> <S(L,S),U> will be unpacked, but <S,U> or <B,U> will not, because the pieces aren't used. This is quite important otherwise we end up unpacking massive tuples passed to the bottoming function. Example: f :: ((Int,Int) -> String) -> (Int,Int) -> a f g pr = error (g pr) main = print (f fst (1, error "no")) Does 'main' print "error 1" or "error no"? We don't really want 'f' to unbox its second argument. This actually happened in GHC's onwn source code, in Packages.applyPackageFlag, which ended up un-boxing the enormous DynFlags tuple, and being strict in the as-yet-un-filled-in pkgState files. -} ---------------------- -- mkWWstr_one wrap_arg = (useful, work_args, wrap_fn, work_fn) -- * wrap_fn assumes wrap_arg is in scope, -- brings into scope work_args (via cases) -- * work_fn assumes work_args are in scope, a -- brings into scope wrap_arg (via lets) mkWWstr_one :: DynFlags -> FamInstEnvs -> Var -> UniqSM (Bool, [Var], CoreExpr -> CoreExpr, CoreExpr -> CoreExpr) mkWWstr_one dflags fam_envs arg | isTyVar arg = return (False, [arg], nop_fn, nop_fn) -- See Note [Worker-wrapper for bottoming functions] | isAbsDmd dmd , Just work_fn <- mk_absent_let dflags arg -- Absent case. We can't always handle absence for arbitrary -- unlifted types, so we need to choose just the cases we can --- (that's what mk_absent_let does) = return (True, [], nop_fn, work_fn) -- See Note [Worthy functions for Worker-Wrapper split] | isSeqDmd dmd -- `seq` demand; evaluate in wrapper in the hope -- of dropping seqs in the worker = let arg_w_unf = arg `setIdUnfolding` evaldUnfolding -- Tell the worker arg that it's sure to be evaluated -- so that internal seqs can be dropped in return (True, [arg_w_unf], mk_seq_case arg, nop_fn) -- Pass the arg, anyway, even if it is in theory discarded -- Consider -- f x y = x `seq` y -- x gets a (Eval (Poly Abs)) demand, but if we fail to pass it to the worker -- we ABSOLUTELY MUST record that x is evaluated in the wrapper. -- Something like: -- f x y = x `seq` fw y -- fw y = let x{Evald} = error "oops" in (x `seq` y) -- If we don't pin on the "Evald" flag, the seq doesn't disappear, and -- we end up evaluating the absent thunk. -- But the Evald flag is pretty weird, and I worry that it might disappear -- during simplification, so for now I've just nuked this whole case | isStrictDmd dmd , Just cs <- splitProdDmd_maybe dmd -- See Note [Unpacking arguments with product and polymorphic demands] , Just (data_con, inst_tys, inst_con_arg_tys, co) <- deepSplitProductType_maybe fam_envs (idType arg) , cs `equalLength` inst_con_arg_tys -- See Note [mkWWstr and unsafeCoerce] = do { (uniq1:uniqs) <- getUniquesM ; let unpk_args = zipWith3 mk_ww_arg uniqs inst_con_arg_tys cs unbox_fn = mkUnpackCase (Var arg) co uniq1 data_con unpk_args arg_no_unf = zapStableUnfolding arg -- See Note [Zap unfolding when beta-reducing] -- in Simplify.hs; and see Trac #13890 rebox_fn = Let (NonRec arg_no_unf con_app) con_app = mkConApp2 data_con inst_tys unpk_args `mkCast` mkSymCo co ; (_, worker_args, wrap_fn, work_fn) <- mkWWstr dflags fam_envs unpk_args ; return (True, worker_args, unbox_fn . wrap_fn, work_fn . rebox_fn) } -- Don't pass the arg, rebox instead | otherwise -- Other cases = return (False, [arg], nop_fn, nop_fn) where dmd = idDemandInfo arg mk_ww_arg uniq ty sub_dmd = setIdDemandInfo (mk_ww_local uniq ty) sub_dmd ---------------------- nop_fn :: CoreExpr -> CoreExpr nop_fn body = body {- Note [mkWWstr and unsafeCoerce] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ By using unsafeCoerce, it is possible to make the number of demands fail to match the number of constructor arguments; this happened in Trac #8037. If so, the worker/wrapper split doesn't work right and we get a Core Lint bug. The fix here is simply to decline to do w/w if that happens. Note [Record evaluated-ness in worker/wrapper] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Suppose we have data T = MkT !Int Int f :: T -> T f x = e and f's is strict, and has the CPR property. The we are going to generate this w/w split f x = case x of MkT x1 x2 -> case $wf x1 x2 of (# r1, r2 #) -> MkT r1 r2 $wfw x1 x2 = let x = MkT x1 x2 in case e of MkT r1 r2 -> (# r1, r2 #) Note that * In the worker $wf, inside 'e' we can be sure that x1 will be evaluated (it came from unpacking the argument MkT. But that's no immediately apparent in $wf * In the wrapper 'f', which we'll inline at call sites, we can be sure that 'r1' has been evaluated (because it came from unpacking the result MkT. But that is not immediately apparent from the wrapper code. Missing these facts isn't unsound, but it loses possible future opportunities for optimisation. Solution: use setCaseBndrEvald when creating (A) The arg binders x1,x2 in mkWstr_one See Trac #13077, test T13077 (B) The result binders r1,r2 in mkWWcpr_help See Trace #13077, test T13077a And Trac #13027 comment:20, item (4) to record that the relevant binder is evaluated. ************************************************************************ * * Type scrutiny that is specific to demand analysis * * ************************************************************************ Note [Do not unpack class dictionaries] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If we have f :: Ord a => [a] -> Int -> a {-# INLINABLE f #-} and we worker/wrapper f, we'll get a worker with an INLINABLE pragma (see Note [Worker-wrapper for INLINABLE functions] in WorkWrap), which can still be specialised by the type-class specialiser, something like fw :: Ord a => [a] -> Int# -> a BUT if f is strict in the Ord dictionary, we might unpack it, to get fw :: (a->a->Bool) -> [a] -> Int# -> a and the type-class specialiser can't specialise that. An example is Trac #6056. Moreover, dictionaries can have a lot of fields, so unpacking them can increase closure sizes. Conclusion: don't unpack dictionaries. -} deepSplitProductType_maybe :: FamInstEnvs -> Type -> Maybe (DataCon, [Type], [(Type, StrictnessMark)], Coercion) -- If deepSplitProductType_maybe ty = Just (dc, tys, arg_tys, co) -- then dc @ tys (args::arg_tys) :: rep_ty -- co :: ty ~ rep_ty -- Why do we return the strictness of the data-con arguments? -- Answer: see Note [Record evaluated-ness in worker/wrapper] deepSplitProductType_maybe fam_envs ty | let (co, ty1) = topNormaliseType_maybe fam_envs ty `orElse` (mkRepReflCo ty, ty) , Just (tc, tc_args) <- splitTyConApp_maybe ty1 , Just con <- isDataProductTyCon_maybe tc , not (isClassTyCon tc) -- See Note [Do not unpack class dictionaries] , let arg_tys = dataConInstArgTys con tc_args strict_marks = dataConRepStrictness con = Just (con, tc_args, zipEqual "dspt" arg_tys strict_marks, co) deepSplitProductType_maybe _ _ = Nothing deepSplitCprType_maybe :: FamInstEnvs -> ConTag -> Type -> Maybe (DataCon, [Type], [(Type, StrictnessMark)], Coercion) -- If deepSplitCprType_maybe n ty = Just (dc, tys, arg_tys, co) -- then dc @ tys (args::arg_tys) :: rep_ty -- co :: ty ~ rep_ty -- Why do we return the strictness of the data-con arguments? -- Answer: see Note [Record evaluated-ness in worker/wrapper] deepSplitCprType_maybe fam_envs con_tag ty | let (co, ty1) = topNormaliseType_maybe fam_envs ty `orElse` (mkRepReflCo ty, ty) , Just (tc, tc_args) <- splitTyConApp_maybe ty1 , isDataTyCon tc , let cons = tyConDataCons tc , cons `lengthAtLeast` con_tag -- This might not be true if we import the -- type constructor via a .hs-bool file (#8743) , let con = cons `getNth` (con_tag - fIRST_TAG) arg_tys = dataConInstArgTys con tc_args strict_marks = dataConRepStrictness con = Just (con, tc_args, zipEqual "dsct" arg_tys strict_marks, co) deepSplitCprType_maybe _ _ _ = Nothing findTypeShape :: FamInstEnvs -> Type -> TypeShape -- Uncover the arrow and product shape of a type -- The data type TypeShape is defined in Demand -- See Note [Trimming a demand to a type] in Demand findTypeShape fam_envs ty | Just (tc, tc_args) <- splitTyConApp_maybe ty , Just con <- isDataProductTyCon_maybe tc = TsProd (map (findTypeShape fam_envs) $ dataConInstArgTys con tc_args) | Just (_, res) <- splitFunTy_maybe ty = TsFun (findTypeShape fam_envs res) | Just (_, ty') <- splitForAllTy_maybe ty = findTypeShape fam_envs ty' | Just (_, ty') <- topNormaliseType_maybe fam_envs ty = findTypeShape fam_envs ty' | otherwise = TsUnk {- ************************************************************************ * * \subsection{CPR stuff} * * ************************************************************************ @mkWWcpr@ takes the worker/wrapper pair produced from the strictness info and adds in the CPR transformation. The worker returns an unboxed tuple containing non-CPR components. The wrapper takes this tuple and re-produces the correct structured output. The non-CPR results appear ordered in the unboxed tuple as if by a left-to-right traversal of the result structure. -} mkWWcpr :: Bool -> FamInstEnvs -> Type -- function body type -> DmdResult -- CPR analysis results -> UniqSM (Bool, -- Is w/w'ing useful? CoreExpr -> CoreExpr, -- New wrapper CoreExpr -> CoreExpr, -- New worker Type) -- Type of worker's body mkWWcpr opt_CprAnal fam_envs body_ty res -- CPR explicitly turned off (or in -O0) | not opt_CprAnal = return (False, id, id, body_ty) -- CPR is turned on by default for -O and O2 | otherwise = case returnsCPR_maybe res of Nothing -> return (False, id, id, body_ty) -- No CPR info Just con_tag | Just stuff <- deepSplitCprType_maybe fam_envs con_tag body_ty -> mkWWcpr_help stuff | otherwise -- See Note [non-algebraic or open body type warning] -> WARN( True, text "mkWWcpr: non-algebraic or open body type" <+> ppr body_ty ) return (False, id, id, body_ty) mkWWcpr_help :: (DataCon, [Type], [(Type,StrictnessMark)], Coercion) -> UniqSM (Bool, CoreExpr -> CoreExpr, CoreExpr -> CoreExpr, Type) mkWWcpr_help (data_con, inst_tys, arg_tys, co) | [arg1@(arg_ty1, _)] <- arg_tys , isUnliftedType arg_ty1 -- Special case when there is a single result of unlifted type -- -- Wrapper: case (..call worker..) of x -> C x -- Worker: case ( ..body.. ) of C x -> x = do { (work_uniq : arg_uniq : _) <- getUniquesM ; let arg = mk_ww_local arg_uniq arg1 con_app = mkConApp2 data_con inst_tys [arg] `mkCast` mkSymCo co ; return ( True , \ wkr_call -> Case wkr_call arg (exprType con_app) [(DEFAULT, [], con_app)] , \ body -> mkUnpackCase body co work_uniq data_con [arg] (varToCoreExpr arg) -- varToCoreExpr important here: arg can be a coercion -- Lacking this caused Trac #10658 , arg_ty1 ) } | otherwise -- The general case -- Wrapper: case (..call worker..) of (# a, b #) -> C a b -- Worker: case ( ...body... ) of C a b -> (# a, b #) = do { (work_uniq : wild_uniq : uniqs) <- getUniquesM ; let wrap_wild = mk_ww_local wild_uniq (ubx_tup_ty,MarkedStrict) args = zipWith mk_ww_local uniqs arg_tys ubx_tup_ty = exprType ubx_tup_app ubx_tup_app = mkCoreUbxTup (map fst arg_tys) (map varToCoreExpr args) con_app = mkConApp2 data_con inst_tys args `mkCast` mkSymCo co ; return (True , \ wkr_call -> Case wkr_call wrap_wild (exprType con_app) [(DataAlt (tupleDataCon Unboxed (length arg_tys)), args, con_app)] , \ body -> mkUnpackCase body co work_uniq data_con args ubx_tup_app , ubx_tup_ty ) } mkUnpackCase :: CoreExpr -> Coercion -> Unique -> DataCon -> [Id] -> CoreExpr -> CoreExpr -- (mkUnpackCase e co uniq Con args body) -- returns -- case e |> co of bndr { Con args -> body } mkUnpackCase (Tick tickish e) co uniq con args body -- See Note [Profiling and unpacking] = Tick tickish (mkUnpackCase e co uniq con args body) mkUnpackCase scrut co uniq boxing_con unpk_args body = Case casted_scrut bndr (exprType body) [(DataAlt boxing_con, unpk_args, body)] where casted_scrut = scrut `mkCast` co bndr = mk_ww_local uniq (exprType casted_scrut, MarkedStrict) {- Note [non-algebraic or open body type warning] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ There are a few cases where the W/W transformation is told that something returns a constructor, but the type at hand doesn't really match this. One real-world example involves unsafeCoerce: foo = IO a foo = unsafeCoerce c_exit foreign import ccall "c_exit" c_exit :: IO () Here CPR will tell you that `foo` returns a () constructor for sure, but trying to create a worker/wrapper for type `a` obviously fails. (This was a real example until ee8e792 in libraries/base.) It does not seem feasible to avoid all such cases already in the analyser (and after all, the analysis is not really wrong), so we simply do nothing here in mkWWcpr. But we still want to emit warning with -DDEBUG, to hopefully catch other cases where something went avoidably wrong. Note [Profiling and unpacking] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If the original function looked like f = \ x -> {-# SCC "foo" #-} E then we want the CPR'd worker to look like \ x -> {-# SCC "foo" #-} (case E of I# x -> x) and definitely not \ x -> case ({-# SCC "foo" #-} E) of I# x -> x) This transform doesn't move work or allocation from one cost centre to another. Later [SDM]: presumably this is because we want the simplifier to eliminate the case, and the scc would get in the way? I'm ok with including the case itself in the cost centre, since it is morally part of the function (post transformation) anyway. ************************************************************************ * * \subsection{Utilities} * * ************************************************************************ Note [Absent errors] ~~~~~~~~~~~~~~~~~~~~ We make a new binding for Ids that are marked absent, thus let x = absentError "x :: Int" The idea is that this binding will never be used; but if it buggily is used we'll get a runtime error message. Coping with absence for *unlifted* types is important; see, for example, Trac #4306. For these we find a suitable literal, using Literal.absentLiteralOf. We don't have literals for every primitive type, so the function is partial. Note: I did try the experiment of using an error thunk for unlifted things too, relying on the simplifier to drop it as dead code. But this is fragile - It fails when profiling is on, which disables various optimisations - It fails when reboxing happens. E.g. data T = MkT Int Int# f p@(MkT a _) = ...g p.... where g is /lazy/ in 'p', but only uses the first component. Then 'f' is /strict/ in 'p', and only uses the first component. So we only pass that component to the worker for 'f', which reconstructs 'p' to pass it to 'g'. Alas we can't say ...f (MkT a (absentError Int# "blah"))... bacause `MkT` is strict in its Int# argument, so we get an absentError exception when we shouldn't. Very annoying! So absentError is only used for lifted types. -} mk_absent_let :: DynFlags -> Id -> Maybe (CoreExpr -> CoreExpr) mk_absent_let dflags arg | not (isUnliftedType arg_ty) = Just (Let (NonRec lifted_arg abs_rhs)) | Just tc <- tyConAppTyCon_maybe arg_ty , Just lit <- absentLiteralOf tc = Just (Let (NonRec arg (Lit lit))) | arg_ty `eqType` voidPrimTy = Just (Let (NonRec arg (Var voidPrimId))) | otherwise = WARN( True, text "No absent value for" <+> ppr arg_ty ) Nothing where lifted_arg = arg `setIdStrictness` exnSig -- Note in strictness signature that this is bottoming -- (for the sake of the "empty case scrutinee not known to -- diverge for sure lint" warning) arg_ty = idType arg abs_rhs = mkAbsentErrorApp arg_ty msg msg = showSDoc (gopt_set dflags Opt_SuppressUniques) (ppr arg <+> ppr (idType arg)) -- We need to suppress uniques here because otherwise they'd -- end up in the generated code as strings. This is bad for -- determinism, because with different uniques the strings -- will have different lengths and hence different costs for -- the inliner leading to different inlining. -- See also Note [Unique Determinism] in Unique mk_seq_case :: Id -> CoreExpr -> CoreExpr mk_seq_case arg body = Case (Var arg) (sanitiseCaseBndr arg) (exprType body) [(DEFAULT, [], body)] sanitiseCaseBndr :: Id -> Id -- The argument we are scrutinising has the right type to be -- a case binder, so it's convenient to re-use it for that purpose. -- But we *must* throw away all its IdInfo. In particular, the argument -- will have demand info on it, and that demand info may be incorrect for -- the case binder. e.g. case ww_arg of ww_arg { I# x -> ... } -- Quite likely ww_arg isn't used in '...'. The case may get discarded -- if the case binder says "I'm demanded". This happened in a situation -- like (x+y) `seq` .... sanitiseCaseBndr id = id `setIdInfo` vanillaIdInfo mk_ww_local :: Unique -> (Type, StrictnessMark) -> Id -- The StrictnessMark comes form the data constructor and says -- whether this field is strict -- See Note [Record evaluated-ness in worker/wrapper] mk_ww_local uniq (ty,str) = setCaseBndrEvald str $ mkSysLocalOrCoVar (fsLit "ww") uniq ty
ezyang/ghc
compiler/stranal/WwLib.hs
bsd-3-clause
38,959
0
18
11,323
4,196
2,275
1,921
302
2
{-# LANGUAGE DeriveGeneric #-} module Distribution.Simple.Program.GHC ( GhcOptions(..), GhcMode(..), GhcOptimisation(..), GhcDynLinkMode(..), GhcProfAuto(..), ghcInvocation, renderGhcOptions, runGHC, ) where import Distribution.Compat.Semigroup as Semi import Distribution.Simple.GHC.ImplInfo import Distribution.Package import Distribution.PackageDescription hiding (Flag) import Distribution.ModuleName import Distribution.Simple.Compiler hiding (Flag) import Distribution.Simple.Setup import Distribution.Simple.Program.Types import Distribution.Simple.Program.Run import Distribution.System import Distribution.Text import Distribution.Verbosity import Distribution.Utils.NubList import Language.Haskell.Extension import GHC.Generics (Generic) import qualified Data.Map as M -- | A structured set of GHC options/flags -- data GhcOptions = GhcOptions { -- | The major mode for the ghc invocation. ghcOptMode :: Flag GhcMode, -- | Any extra options to pass directly to ghc. These go at the end and hence -- override other stuff. ghcOptExtra :: NubListR String, -- | Extra default flags to pass directly to ghc. These go at the beginning -- and so can be overridden by other stuff. ghcOptExtraDefault :: NubListR String, ----------------------- -- Inputs and outputs -- | The main input files; could be .hs, .hi, .c, .o, depending on mode. ghcOptInputFiles :: NubListR FilePath, -- | The names of input Haskell modules, mainly for @--make@ mode. ghcOptInputModules :: NubListR ModuleName, -- | Location for output file; the @ghc -o@ flag. ghcOptOutputFile :: Flag FilePath, -- | Location for dynamic output file in 'GhcStaticAndDynamic' mode; -- the @ghc -dyno@ flag. ghcOptOutputDynFile :: Flag FilePath, -- | Start with an empty search path for Haskell source files; -- the @ghc -i@ flag (@-i@ on it's own with no path argument). ghcOptSourcePathClear :: Flag Bool, -- | Search path for Haskell source files; the @ghc -i@ flag. ghcOptSourcePath :: NubListR FilePath, ------------- -- Packages -- | The unit ID the modules will belong to; the @ghc -this-unit-id@ -- flag (or @-this-package-key@ or @-package-name@ on older -- versions of GHC). This is a 'String' because we assume you've -- already figured out what the correct format for this string is -- (we need to handle backwards compatibility.) ghcOptThisUnitId :: Flag String, -- | GHC package databases to use, the @ghc -package-conf@ flag. ghcOptPackageDBs :: PackageDBStack, -- | The GHC packages to use. For compatability with old and new ghc, this -- requires both the short and long form of the package id; -- the @ghc -package@ or @ghc -package-id@ flags. ghcOptPackages :: NubListR (UnitId, PackageId, ModuleRenaming), -- | Start with a clean package set; the @ghc -hide-all-packages@ flag ghcOptHideAllPackages :: Flag Bool, -- | Don't automatically link in Haskell98 etc; the @ghc -- -no-auto-link-packages@ flag. ghcOptNoAutoLinkPackages :: Flag Bool, ----------------- -- Linker stuff -- | Names of libraries to link in; the @ghc -l@ flag. ghcOptLinkLibs :: NubListR FilePath, -- | Search path for libraries to link in; the @ghc -L@ flag. ghcOptLinkLibPath :: NubListR FilePath, -- | Options to pass through to the linker; the @ghc -optl@ flag. ghcOptLinkOptions :: NubListR String, -- | OSX only: frameworks to link in; the @ghc -framework@ flag. ghcOptLinkFrameworks :: NubListR String, -- | OSX only: Search path for frameworks to link in; the -- @ghc -framework-path@ flag. ghcOptLinkFrameworkDirs :: NubListR String, -- | Don't do the link step, useful in make mode; the @ghc -no-link@ flag. ghcOptNoLink :: Flag Bool, -- | Don't link in the normal RTS @main@ entry point; the @ghc -no-hs-main@ -- flag. ghcOptLinkNoHsMain :: Flag Bool, -------------------- -- C and CPP stuff -- | Options to pass through to the C compiler; the @ghc -optc@ flag. ghcOptCcOptions :: NubListR String, -- | Options to pass through to CPP; the @ghc -optP@ flag. ghcOptCppOptions :: NubListR String, -- | Search path for CPP includes like header files; the @ghc -I@ flag. ghcOptCppIncludePath :: NubListR FilePath, -- | Extra header files to include at CPP stage; the @ghc -optP-include@ flag. ghcOptCppIncludes :: NubListR FilePath, -- | Extra header files to include for old-style FFI; the @ghc -#include@ flag. ghcOptFfiIncludes :: NubListR FilePath, ---------------------------- -- Language and extensions -- | The base language; the @ghc -XHaskell98@ or @-XHaskell2010@ flag. ghcOptLanguage :: Flag Language, -- | The language extensions; the @ghc -X@ flag. ghcOptExtensions :: NubListR Extension, -- | A GHC version-dependent mapping of extensions to flags. This must be -- set to be able to make use of the 'ghcOptExtensions'. ghcOptExtensionMap :: M.Map Extension String, ---------------- -- Compilation -- | What optimisation level to use; the @ghc -O@ flag. ghcOptOptimisation :: Flag GhcOptimisation, -- | Emit debug info; the @ghc -g@ flag. ghcOptDebugInfo :: Flag Bool, -- | Compile in profiling mode; the @ghc -prof@ flag. ghcOptProfilingMode :: Flag Bool, -- | Automatically add profiling cost centers; the @ghc -fprof-auto*@ flags. ghcOptProfilingAuto :: Flag GhcProfAuto, -- | Use the \"split object files\" feature; the @ghc -split-objs@ flag. ghcOptSplitObjs :: Flag Bool, -- | Run N jobs simultaneously (if possible). ghcOptNumJobs :: Flag (Maybe Int), -- | Enable coverage analysis; the @ghc -fhpc -hpcdir@ flags. ghcOptHPCDir :: Flag FilePath, ---------------- -- GHCi -- | Extra GHCi startup scripts; the @-ghci-script@ flag ghcOptGHCiScripts :: NubListR FilePath, ------------------------ -- Redirecting outputs ghcOptHiSuffix :: Flag String, ghcOptObjSuffix :: Flag String, ghcOptDynHiSuffix :: Flag String, -- ^ only in 'GhcStaticAndDynamic' mode ghcOptDynObjSuffix :: Flag String, -- ^ only in 'GhcStaticAndDynamic' mode ghcOptHiDir :: Flag FilePath, ghcOptObjDir :: Flag FilePath, ghcOptOutputDir :: Flag FilePath, ghcOptStubDir :: Flag FilePath, -------------------- -- Dynamic linking ghcOptDynLinkMode :: Flag GhcDynLinkMode, ghcOptShared :: Flag Bool, ghcOptFPic :: Flag Bool, ghcOptDylibName :: Flag String, ghcOptRPaths :: NubListR FilePath, --------------- -- Misc flags -- | Get GHC to be quiet or verbose with what it's doing; the @ghc -v@ flag. ghcOptVerbosity :: Flag Verbosity, -- | Let GHC know that it is Cabal that's calling it. -- Modifies some of the GHC error messages. ghcOptCabal :: Flag Bool } deriving (Show, Generic) data GhcMode = GhcModeCompile -- ^ @ghc -c@ | GhcModeLink -- ^ @ghc@ | GhcModeMake -- ^ @ghc --make@ | GhcModeInteractive -- ^ @ghci@ \/ @ghc --interactive@ | GhcModeAbiHash -- ^ @ghc --abi-hash@ -- | GhcModeDepAnalysis -- ^ @ghc -M@ -- | GhcModeEvaluate -- ^ @ghc -e@ deriving (Show, Eq) data GhcOptimisation = GhcNoOptimisation -- ^ @-O0@ | GhcNormalOptimisation -- ^ @-O@ | GhcMaximumOptimisation -- ^ @-O2@ | GhcSpecialOptimisation String -- ^ e.g. @-Odph@ deriving (Show, Eq) data GhcDynLinkMode = GhcStaticOnly -- ^ @-static@ | GhcDynamicOnly -- ^ @-dynamic@ | GhcStaticAndDynamic -- ^ @-static -dynamic-too@ deriving (Show, Eq) data GhcProfAuto = GhcProfAutoAll -- ^ @-fprof-auto@ | GhcProfAutoToplevel -- ^ @-fprof-auto-top@ | GhcProfAutoExported -- ^ @-fprof-auto-exported@ deriving (Show, Eq) runGHC :: Verbosity -> ConfiguredProgram -> Compiler -> Platform -> GhcOptions -> IO () runGHC verbosity ghcProg comp platform opts = do runProgramInvocation verbosity (ghcInvocation ghcProg comp platform opts) ghcInvocation :: ConfiguredProgram -> Compiler -> Platform -> GhcOptions -> ProgramInvocation ghcInvocation prog comp platform opts = programInvocation prog (renderGhcOptions comp platform opts) renderGhcOptions :: Compiler -> Platform -> GhcOptions -> [String] renderGhcOptions comp _platform@(Platform _arch os) opts | compilerFlavor comp `notElem` [GHC, GHCJS] = error $ "Distribution.Simple.Program.GHC.renderGhcOptions: " ++ "compiler flavor must be 'GHC' or 'GHCJS'!" | otherwise = concat [ case flagToMaybe (ghcOptMode opts) of Nothing -> [] Just GhcModeCompile -> ["-c"] Just GhcModeLink -> [] Just GhcModeMake -> ["--make"] Just GhcModeInteractive -> ["--interactive"] Just GhcModeAbiHash -> ["--abi-hash"] -- Just GhcModeDepAnalysis -> ["-M"] -- Just GhcModeEvaluate -> ["-e", expr] , flags ghcOptExtraDefault , [ "-no-link" | flagBool ghcOptNoLink ] --------------- -- Misc flags , maybe [] verbosityOpts (flagToMaybe (ghcOptVerbosity opts)) , [ "-fbuilding-cabal-package" | flagBool ghcOptCabal , flagBuildingCabalPkg implInfo ] ---------------- -- Compilation , case flagToMaybe (ghcOptOptimisation opts) of Nothing -> [] Just GhcNoOptimisation -> ["-O0"] Just GhcNormalOptimisation -> ["-O"] Just GhcMaximumOptimisation -> ["-O2"] Just (GhcSpecialOptimisation s) -> ["-O" ++ s] -- eg -Odph , [ "-g" | flagDebugInfo implInfo && flagBool ghcOptDebugInfo ] , [ "-prof" | flagBool ghcOptProfilingMode ] , case flagToMaybe (ghcOptProfilingAuto opts) of _ | not (flagBool ghcOptProfilingMode) -> [] Nothing -> [] Just GhcProfAutoAll | flagProfAuto implInfo -> ["-fprof-auto"] | otherwise -> ["-auto-all"] -- not the same, but close Just GhcProfAutoToplevel | flagProfAuto implInfo -> ["-fprof-auto-top"] | otherwise -> ["-auto-all"] Just GhcProfAutoExported | flagProfAuto implInfo -> ["-fprof-auto-exported"] | otherwise -> ["-auto"] , [ "-split-objs" | flagBool ghcOptSplitObjs ] , case flagToMaybe (ghcOptHPCDir opts) of Nothing -> [] Just hpcdir -> ["-fhpc", "-hpcdir", hpcdir] , if parmakeSupported comp then case ghcOptNumJobs opts of NoFlag -> [] Flag n -> ["-j" ++ maybe "" show n] else [] -------------------- -- Dynamic linking , [ "-shared" | flagBool ghcOptShared ] , case flagToMaybe (ghcOptDynLinkMode opts) of Nothing -> [] Just GhcStaticOnly -> ["-static"] Just GhcDynamicOnly -> ["-dynamic"] Just GhcStaticAndDynamic -> ["-static", "-dynamic-too"] , [ "-fPIC" | flagBool ghcOptFPic ] , concat [ ["-dylib-install-name", libname] | libname <- flag ghcOptDylibName ] ------------------------ -- Redirecting outputs , concat [ ["-osuf", suf] | suf <- flag ghcOptObjSuffix ] , concat [ ["-hisuf", suf] | suf <- flag ghcOptHiSuffix ] , concat [ ["-dynosuf", suf] | suf <- flag ghcOptDynObjSuffix ] , concat [ ["-dynhisuf",suf] | suf <- flag ghcOptDynHiSuffix ] , concat [ ["-outputdir", dir] | dir <- flag ghcOptOutputDir , flagOutputDir implInfo ] , concat [ ["-odir", dir] | dir <- flag ghcOptObjDir ] , concat [ ["-hidir", dir] | dir <- flag ghcOptHiDir ] , concat [ ["-stubdir", dir] | dir <- flag ghcOptStubDir , flagStubdir implInfo ] ----------------------- -- Source search path , [ "-i" | flagBool ghcOptSourcePathClear ] , [ "-i" ++ dir | dir <- flags ghcOptSourcePath ] -------------------- -- C and CPP stuff , [ "-I" ++ dir | dir <- flags ghcOptCppIncludePath ] , [ "-optP" ++ opt | opt <- flags ghcOptCppOptions ] , concat [ [ "-optP-include", "-optP" ++ inc] | inc <- flags ghcOptCppIncludes ] , [ "-#include \"" ++ inc ++ "\"" | inc <- flags ghcOptFfiIncludes, flagFfiIncludes implInfo ] , [ "-optc" ++ opt | opt <- flags ghcOptCcOptions ] ----------------- -- Linker stuff , [ "-optl" ++ opt | opt <- flags ghcOptLinkOptions ] , ["-l" ++ lib | lib <- flags ghcOptLinkLibs ] , ["-L" ++ dir | dir <- flags ghcOptLinkLibPath ] , if isOSX then concat [ ["-framework", fmwk] | fmwk <- flags ghcOptLinkFrameworks ] else [] , if isOSX then concat [ ["-framework-path", path] | path <- flags ghcOptLinkFrameworkDirs ] else [] , [ "-no-hs-main" | flagBool ghcOptLinkNoHsMain ] , [ "-dynload deploy" | not (null (flags ghcOptRPaths)) ] , concat [ [ "-optl-Wl,-rpath," ++ dir] | dir <- flags ghcOptRPaths ] ------------- -- Packages , concat [ [ case () of _ | unitIdSupported comp -> "-this-unit-id" | packageKeySupported comp -> "-this-package-key" | otherwise -> "-package-name" , this_arg ] | this_arg <- flag ghcOptThisUnitId ] , [ "-hide-all-packages" | flagBool ghcOptHideAllPackages ] , [ "-no-auto-link-packages" | flagBool ghcOptNoAutoLinkPackages ] , packageDbArgs implInfo (ghcOptPackageDBs opts) , concat $ if flagPackageId implInfo then let space "" = "" space xs = ' ' : xs in [ ["-package-id", display ipkgid ++ space (display rns)] | (ipkgid,_,rns) <- flags ghcOptPackages ] else [ ["-package", display pkgid] | (_,pkgid,_) <- flags ghcOptPackages ] ---------------------------- -- Language and extensions , if supportsHaskell2010 implInfo then [ "-X" ++ display lang | lang <- flag ghcOptLanguage ] else [] , [ case M.lookup ext (ghcOptExtensionMap opts) of Just arg -> arg Nothing -> error $ "Distribution.Simple.Program.GHC.renderGhcOptions: " ++ display ext ++ " not present in ghcOptExtensionMap." | ext <- flags ghcOptExtensions ] ---------------- -- GHCi , concat [ [ "-ghci-script", script ] | script <- flags ghcOptGHCiScripts , flagGhciScript implInfo ] --------------- -- Inputs , [ display modu | modu <- flags ghcOptInputModules ] , flags ghcOptInputFiles , concat [ [ "-o", out] | out <- flag ghcOptOutputFile ] , concat [ [ "-dyno", out] | out <- flag ghcOptOutputDynFile ] --------------- -- Extra , flags ghcOptExtra ] where implInfo = getImplInfo comp isOSX = os == OSX flag flg = flagToList (flg opts) flags flg = fromNubListR . flg $ opts flagBool flg = fromFlagOrDefault False (flg opts) verbosityOpts :: Verbosity -> [String] verbosityOpts verbosity | verbosity >= deafening = ["-v"] | verbosity >= normal = [] | otherwise = ["-w", "-v0"] -- | GHC <7.6 uses '-package-conf' instead of '-package-db'. packageDbArgsConf :: PackageDBStack -> [String] packageDbArgsConf dbstack = case dbstack of (GlobalPackageDB:UserPackageDB:dbs) -> concatMap specific dbs (GlobalPackageDB:dbs) -> ("-no-user-package-conf") : concatMap specific dbs _ -> ierror where specific (SpecificPackageDB db) = [ "-package-conf", db ] specific _ = ierror ierror = error $ "internal error: unexpected package db stack: " ++ show dbstack -- | GHC >= 7.6 uses the '-package-db' flag. See -- https://ghc.haskell.org/trac/ghc/ticket/5977. packageDbArgsDb :: PackageDBStack -> [String] -- special cases to make arguments prettier in common scenarios packageDbArgsDb dbstack = case dbstack of (GlobalPackageDB:UserPackageDB:dbs) | all isSpecific dbs -> concatMap single dbs (GlobalPackageDB:dbs) | all isSpecific dbs -> "-no-user-package-db" : concatMap single dbs dbs -> "-clear-package-db" : concatMap single dbs where single (SpecificPackageDB db) = [ "-package-db", db ] single GlobalPackageDB = [ "-global-package-db" ] single UserPackageDB = [ "-user-package-db" ] isSpecific (SpecificPackageDB _) = True isSpecific _ = False packageDbArgs :: GhcImplInfo -> PackageDBStack -> [String] packageDbArgs implInfo | flagPackageConf implInfo = packageDbArgsConf | otherwise = packageDbArgsDb -- ----------------------------------------------------------------------------- -- Boilerplate Monoid instance for GhcOptions instance Monoid GhcOptions where mempty = gmempty mappend = (Semi.<>) instance Semigroup GhcOptions where (<>) = gmappend
tolysz/prepare-ghcjs
spec-lts8/cabal/Cabal/Distribution/Simple/Program/GHC.hs
bsd-3-clause
17,265
0
17
4,529
3,275
1,796
1,479
273
26
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE PolyKinds #-} module IHaskell.Display.Widgets.Interactive (interactive, uncurryHList, Rec(..), Argument(..)) where import Data.Text import Data.Proxy import Data.Vinyl.Core import Data.Vinyl.Functor (Identity(..), Const(..)) import Data.Vinyl.Derived (HList) import Data.Vinyl.Lens (type (∈)) import Data.Vinyl.TypeLevel (RecAll) import IHaskell.Display import IHaskell.Display.Widgets.Types import IHaskell.Display.Widgets.Common import qualified IHaskell.Display.Widgets.Singletons as S (SField, Field(..)) import IHaskell.Display.Widgets.Box.FlexBox import IHaskell.Display.Widgets.Bool.CheckBox import IHaskell.Display.Widgets.String.Text import IHaskell.Display.Widgets.Int.BoundedInt.IntSlider import IHaskell.Display.Widgets.Float.BoundedFloat.FloatSlider import IHaskell.Display.Widgets.Output data WidgetConf a where WidgetConf :: (RecAll Attr (WidgetFields (SuitableWidget a)) ToPairs, FromWidget a) => WrappedWidget (SuitableWidget a) (SuitableHandler a) (SuitableField a) a -> WidgetConf a type family WithTypes (ts :: [*]) (r :: *) :: * where WithTypes '[] r = r WithTypes (x ': xs) r = (x -> WithTypes xs r) uncurryHList :: WithTypes ts r -> HList ts -> r uncurryHList f RNil = f uncurryHList f (Identity x :& xs) = uncurryHList (f x) xs -- Consistent type variables are required to make things play nicely with vinyl data Constructor a where Constructor :: RecAll Attr (WidgetFields (SuitableWidget a)) ToPairs => IO (IPythonWidget (SuitableWidget a)) -> Constructor a newtype Getter a = Getter (IPythonWidget (SuitableWidget a) -> IO a) newtype EventSetter a = EventSetter (IPythonWidget (SuitableWidget a) -> IO () -> IO ()) newtype Initializer a = Initializer (IPythonWidget (SuitableWidget a) -> Argument a -> IO ()) data RequiredWidget a where RequiredWidget :: RecAll Attr (WidgetFields (SuitableWidget a)) ToPairs => IPythonWidget (SuitableWidget a) -> RequiredWidget a -- Zipping vinyl records in various ways applyGetters :: Rec Getter ts -> Rec RequiredWidget ts -> IO (HList ts) applyGetters RNil RNil = return RNil applyGetters (Getter getter :& gs) (RequiredWidget widget :& ws) = do val <- getter widget rest <- applyGetters gs ws return $ Identity val :& rest applyEventSetters :: Rec EventSetter ts -> Rec RequiredWidget ts -> IO () -> IO () applyEventSetters RNil RNil _ = return () applyEventSetters (EventSetter setter :& xs) (RequiredWidget widget :& ws) handler = do setter widget handler applyEventSetters xs ws handler setInitialValues :: Rec Initializer ts -> Rec RequiredWidget ts -> Rec Argument ts -> IO () setInitialValues RNil RNil RNil = return () setInitialValues (Initializer initializer :& fs) (RequiredWidget widget :& ws) (argument :& vs) = do initializer widget argument setInitialValues fs ws vs extractConstructor :: WidgetConf x -> Constructor x extractConstructor (WidgetConf wr) = Constructor $ construct wr extractGetter :: WidgetConf x -> Getter x extractGetter (WidgetConf wr) = Getter $ getValue wr extractEventSetter :: WidgetConf x -> EventSetter x extractEventSetter (WidgetConf wr) = EventSetter $ setEvent wr extractInitializer :: WidgetConf x -> Initializer x extractInitializer WidgetConf{} = Initializer initializer createWidget :: Constructor a -> IO (RequiredWidget a) createWidget (Constructor con) = fmap RequiredWidget con mkChildren :: Rec RequiredWidget a -> [ChildWidget] mkChildren widgets = let childRecord = rmap (\(RequiredWidget w) -> Const (ChildWidget w)) widgets in recordToList childRecord class MakeConfs (ts :: [*]) where mkConfs :: proxy ts -> Rec WidgetConf ts instance MakeConfs '[] where mkConfs _ = RNil instance (FromWidget t, MakeConfs ts) => MakeConfs (t ': ts) where mkConfs _ = WidgetConf wrapped :& mkConfs (Proxy :: Proxy ts) interactive :: (IHaskellDisplay r, MakeConfs ts) => (HList ts -> r) -> Rec Argument ts -> IO FlexBox interactive func = let confs = mkConfs Proxy in liftToWidgets func confs -- | Transform a function (HList ts -> r) to one which: 1) Uses widgets to accept the arguments 2) -- Accepts initial values for the arguments 3) Creates a compound FlexBox widget with an embedded -- OutputWidget for display liftToWidgets :: IHaskellDisplay r => (HList ts -> r) -> Rec WidgetConf ts -> Rec Argument ts -> IO FlexBox liftToWidgets func rc initvals = do let constructors = rmap extractConstructor rc getters = rmap extractGetter rc eventSetters = rmap extractEventSetter rc initializers = rmap extractInitializer rc bx <- mkFlexBox out <- mkOutputWidget -- Create a list of widgets widgets <- rtraverse createWidget constructors let handler = do vals <- applyGetters getters widgets replaceOutput out $ func vals -- Apply handler to all widgets applyEventSetters eventSetters widgets handler -- Set initial values for all widgets setInitialValues initializers widgets initvals -- applyValueSetters valueSetters widgets $ getList defvals setField out Width 500 setField bx Orientation VerticalOrientation -- Set children for the FlexBox let children = mkChildren widgets setField bx Children $ children ++ [ChildWidget out] return bx data WrappedWidget w h f a where WrappedWidget :: (FieldType h ~ IO (), FieldType f ~ a, h ∈ WidgetFields w, f ∈ WidgetFields w, ToPairs (Attr h), IHaskellWidget (IPythonWidget w), ToPairs (Attr f)) => IO (IPythonWidget w) -> S.SField h -> S.SField f -> WrappedWidget w h f a construct :: WrappedWidget w h f a -> IO (IPythonWidget w) construct (WrappedWidget cons _ _) = cons getValue :: WrappedWidget w h f a -> IPythonWidget w -> IO a getValue (WrappedWidget _ _ field) widget = getField widget field setEvent :: WrappedWidget w h f a -> IPythonWidget w -> IO () -> IO () setEvent (WrappedWidget _ h _) widget = setField widget h class RecAll Attr (WidgetFields (SuitableWidget a)) ToPairs => FromWidget a where type SuitableWidget a :: WidgetType type SuitableHandler a :: S.Field type SuitableField a :: S.Field data Argument a initializer :: IPythonWidget (SuitableWidget a) -> Argument a -> IO () wrapped :: WrappedWidget (SuitableWidget a) (SuitableHandler a) (SuitableField a) a instance FromWidget Bool where type SuitableWidget Bool = CheckBoxType type SuitableHandler Bool = S.ChangeHandler type SuitableField Bool = S.BoolValue data Argument Bool = BoolVal Bool initializer w (BoolVal b) = setField w BoolValue b wrapped = WrappedWidget mkCheckBox ChangeHandler BoolValue instance FromWidget Text where type SuitableWidget Text = TextType type SuitableHandler Text = S.SubmitHandler type SuitableField Text = S.StringValue data Argument Text = TextVal Text initializer w (TextVal txt) = setField w StringValue txt wrapped = WrappedWidget mkTextWidget SubmitHandler StringValue instance FromWidget Integer where type SuitableWidget Integer = IntSliderType type SuitableHandler Integer = S.ChangeHandler type SuitableField Integer = S.IntValue data Argument Integer = IntVal Integer | IntRange (Integer, Integer, Integer) wrapped = WrappedWidget mkIntSlider ChangeHandler IntValue initializer w (IntVal int) = setField w IntValue int initializer w (IntRange (v, l, u)) = do setField w IntValue v setField w MinInt l setField w MaxInt u instance FromWidget Double where type SuitableWidget Double = FloatSliderType type SuitableHandler Double = S.ChangeHandler type SuitableField Double = S.FloatValue data Argument Double = FloatVal Double | FloatRange (Double, Double, Double) wrapped = WrappedWidget mkFloatSlider ChangeHandler FloatValue initializer w (FloatVal d) = setField w FloatValue d initializer w (FloatRange (v, l, u)) = do setField w FloatValue v setField w MinFloat l setField w MaxFloat u
artuuge/IHaskell
ihaskell-display/ihaskell-widgets/src/IHaskell/Display/Widgets/Interactive.hs
mit
8,503
0
14
1,821
2,542
1,297
1,245
-1
-1
{-# LANGUAGE BangPatterns, TemplateHaskell #-} import Control.Monad import Control.Applicative import Control.Monad.IO.Class (liftIO) import qualified Data.ByteString.Lazy as BSL import Data.Binary (encode, decode) import Remote pingServer :: ProcessM () pingServer = forever $ do them <- expect send them () pingClient :: Int -> ProcessId -> ProcessM () pingClient n them = do us <- getSelfPid replicateM_ n $ send them us >> (expect :: ProcessM ()) liftIO . putStrLn $ "Did " ++ show n ++ " pings" initialProcess :: String -> ProcessM () initialProcess "SERVER" = do us <- getSelfPid liftIO $ BSL.writeFile "pingServer.pid" (encode us) pingServer initialProcess "CLIENT" = do n <- liftIO $ getLine them <- liftIO $ decode <$> BSL.readFile "pingServer.pid" pingClient (read n) them main :: IO () main = remoteInit (Just "config") [] initialProcess
tweag/distributed-process
benchmarks/remote/Latency.hs
bsd-3-clause
876
0
10
156
307
153
154
27
1
{-# LANGUAGE Arrows #-} {-# LANGUAGE FlexibleContexts #-} module Main where import qualified QuickCheck import Opaleye.SQLite (Column, Nullable, Query, QueryArr, (.==), (.>)) import qualified Opaleye.SQLite as O import qualified Database.SQLite.Simple as PGS import qualified Data.Profunctor.Product.Default as D import qualified Data.Profunctor.Product as PP import qualified Data.Profunctor as P import qualified Data.Ord as Ord import qualified Data.List as L import Data.Monoid ((<>)) import qualified Data.String as String import qualified System.Exit as Exit import qualified System.Environment as Environment import qualified Control.Applicative as A import qualified Control.Arrow as Arr import Control.Arrow ((&&&), (***), (<<<), (>>>)) import GHC.Int (Int64) {- Status ====== The tests here are very superficial and pretty much the bare mininmum that needs to be tested. Future ====== The overall approach to testing should probably go as follows. 1. Test all individual units of functionality by running them on a table and checking that they produce the expected result. This type of testing is amenable to the QuickCheck approach if we reimplement the individual units of functionality in Haskell. 2. Test that "the denotation is an arrow morphism" is correct. I think in combination with 1. this is all that will be required to demonstrate that the library is correct. "The denotation is an arrow morphism" means that for each arrow operation, the denotation preserves the operation. If we have f :: QueryArr wiresa wiresb then [f] should be something like [f] :: a -> IO [b] f as = runQuery (toValues as >>> f) For example, take the operation >>>. We need to check that [f >>> g] = [f] >>> [g] for all f and g, where [] means the denotation. We would also want to check that [id] = id and [first f] = first [f] I think checking these operations is sufficient because all the other QueryArr operations are implemented in terms of them. (Here I'm taking a slight liberty as `a -> IO [b]` is not directly an arrow, but it could be made one straightforwardly. (For the laws to be satisfied, perhaps we have to assume that the IO actions commute.)) I don't think this type of testing is amenable to QuickCheck. It seems we have to check the properties for arbitrary arrows indexed by arbitrary types. I don't think QuickCheck supports this sort of randomised testing. Note ---- This seems to be equivalent to just reimplementing Opaleye in Haskell-side terms and comparing the results of queries run in both ways. -} twoIntTable :: String -> O.Table (Column O.PGInt4, Column O.PGInt4) (Column O.PGInt4, Column O.PGInt4) twoIntTable n = O.Table n (PP.p2 (O.required "column1", O.required "column2")) table1 :: O.Table (Column O.PGInt4, Column O.PGInt4) (Column O.PGInt4, Column O.PGInt4) table1 = twoIntTable "table1" table1F :: O.Table (Column O.PGInt4, Column O.PGInt4) (Column O.PGInt4, Column O.PGInt4) table1F = fmap (\(col1, col2) -> (col1 + col2, col1 - col2)) table1 -- This is implicitly testing our ability to handle upper case letters in table names. table2 :: O.Table (Column O.PGInt4, Column O.PGInt4) (Column O.PGInt4, Column O.PGInt4) table2 = twoIntTable "TABLE2" table3 :: O.Table (Column O.PGInt4, Column O.PGInt4) (Column O.PGInt4, Column O.PGInt4) table3 = twoIntTable "table3" table4 :: O.Table (Column O.PGInt4, Column O.PGInt4) (Column O.PGInt4, Column O.PGInt4) table4 = twoIntTable "table4" table5 :: O.Table (Maybe (Column O.PGInt4), Maybe (Column O.PGInt4)) (Column O.PGInt4, Column O.PGInt4) table5 = O.Table "table5" (PP.p2 (O.optional "column1", O.optional "column2")) table6 :: O.Table (Column O.PGText, Column O.PGText) (Column O.PGText, Column O.PGText) table6 = O.Table "table6" (PP.p2 (O.required "column1", O.required "column2")) tableKeywordColNames :: O.Table (Column O.PGInt4, Column O.PGInt4) (Column O.PGInt4, Column O.PGInt4) tableKeywordColNames = O.Table "keywordtable" (PP.p2 (O.required "column", O.required "where")) table1Q :: Query (Column O.PGInt4, Column O.PGInt4) table1Q = O.queryTable table1 table2Q :: Query (Column O.PGInt4, Column O.PGInt4) table2Q = O.queryTable table2 table3Q :: Query (Column O.PGInt4, Column O.PGInt4) table3Q = O.queryTable table3 table6Q :: Query (Column O.PGText, Column O.PGText) table6Q = O.queryTable table6 table1dataG :: Num a => [(a, a)] table1dataG = [ (1, 100) , (1, 100) , (1, 200) , (2, 300) ] table1data :: [(Int, Int)] table1data = table1dataG table1columndata :: [(Column O.PGInt4, Column O.PGInt4)] table1columndata = table1dataG table2dataG :: Num a => [(a, a)] table2dataG = [ (1, 100) , (3, 400) ] table2data :: [(Int, Int)] table2data = table2dataG table2columndata :: [(Column O.PGInt4, Column O.PGInt4)] table2columndata = table2dataG table3dataG :: Num a => [(a, a)] table3dataG = [ (1, 50) ] table3data :: [(Int, Int)] table3data = table3dataG table3columndata :: [(Column O.PGInt4, Column O.PGInt4)] table3columndata = table3dataG table4dataG :: Num a => [(a, a)] table4dataG = [ (1, 10) , (2, 20) ] table4data :: [(Int, Int)] table4data = table4dataG table4columndata :: [(Column O.PGInt4, Column O.PGInt4)] table4columndata = table4dataG table6data :: [(String, String)] table6data = [("xy", "a"), ("z", "a"), ("more text", "a")] table6columndata :: [(Column O.PGText, Column O.PGText)] table6columndata = map (\(column1, column2) -> (O.pgString column1, O.pgString column2)) table6data -- We have to quote the table names here because upper case letters in -- table names are treated as lower case unless the name is quoted! -- -- We have to issue multiple statements because sqlite-simple's -- execute_ only executes the first in a ;-separated list, unlike -- postgresql-simple -- -- http://hackage.haskell.org/package/sqlite-simple-0.4.9.0/docs/Database-SQLite-Simple.html#v:execute dropAndCreateTable :: String -> (String, [String]) -> [PGS.Query] dropAndCreateTable columnType (t, cols) = map String.fromString drop_ where drop_ = [ "DROP TABLE IF EXISTS \"" ++ t ++ "\"" , "CREATE TABLE \"" ++ t ++ "\"" ++ " (" ++ commas cols ++ ")" ] integer c = ("\"" ++ c ++ "\"" ++ " " ++ columnType) commas = L.intercalate "," . map integer dropAndCreateTableInt :: (String, [String]) -> [PGS.Query] dropAndCreateTableInt = dropAndCreateTable "integer" dropAndCreateTableText :: (String, [String]) -> [PGS.Query] dropAndCreateTableText = dropAndCreateTable "text" -- We have to quote the table names here because upper case letters in -- table names are treated as lower case unless the name is quoted! -- -- We have to issue multiple statements because sqlite-simple's -- execute_ only executes the first in a ;-separated list, unlike -- postgresql-simple -- -- http://hackage.haskell.org/package/sqlite-simple-0.4.9.0/docs/Database-SQLite-Simple.html#v:execute dropAndCreateTableSerial :: (String, [String]) -> [PGS.Query] dropAndCreateTableSerial (t, cols) = map String.fromString drop_ where drop_ = [ "DROP TABLE IF EXISTS " ++ t , "CREATE TABLE " ++ t ++ " (" ++ commas cols ++ ")" ] integer c = ("\"" ++ c ++ "\"" ++ " SERIAL") commas = L.intercalate "," . map integer type Table_ = (String, [String]) -- This should ideally be derived from the table definition above columns2 :: String -> Table_ columns2 t = (t, ["column1", "column2"]) -- This should ideally be derived from the table definition above tables :: [Table_] tables = map columns2 ["table1", "TABLE2", "table3", "table4"] ++ [("keywordtable", ["column", "where"])] serialTables :: [Table_] serialTables = map columns2 ["table5"] dropAndCreateDB :: PGS.Connection -> IO () dropAndCreateDB conn = do mapM_ execute tables executeTextTable mapM_ executeSerial serialTables where execute = mapM_ (PGS.execute_ conn) . dropAndCreateTableInt executeTextTable = (mapM_ (PGS.execute_ conn) . dropAndCreateTableText . columns2) "table6" executeSerial = mapM_ (PGS.execute_ conn) . dropAndCreateTableSerial type Test = PGS.Connection -> IO Bool testG :: D.Default O.QueryRunner wires haskells => Query wires -> ([haskells] -> b) -> PGS.Connection -> IO b testG q p conn = do result <- O.runQuery conn q return (p result) testSelect :: Test testSelect = testG table1Q (\r -> L.sort table1data == L.sort r) testProduct :: Test testProduct = testG query (\r -> L.sort (A.liftA2 (,) table1data table2data) == L.sort r) where query = table1Q &&& table2Q testRestrict :: Test testRestrict = testG query (\r -> filter ((== 1) . fst) (L.sort table1data) == L.sort r) where query = proc () -> do t <- table1Q -< () O.restrict -< fst t .== 1 Arr.returnA -< t testNum :: Test testNum = testG query expected where query :: Query (Column O.PGInt4) query = proc () -> do t <- table1Q -< () Arr.returnA -< op t expected = \r -> L.sort (map op table1data) == L.sort r op :: Num a => (a, a) -> a op (x, y) = abs (x - 5) * signum (x - 4) * (y * y + 1) testDiv :: Test testDiv = testG query expected where query :: Query (Column O.PGFloat8) query = proc () -> do t <- Arr.arr (O.doubleOfInt *** O.doubleOfInt) <<< table1Q -< () Arr.returnA -< op t expected r = L.sort (map (op . toDoubles) table1data) == L.sort r op :: Fractional a => (a, a) -> a -- Choosing 0.5 here as it should be exactly representable in -- floating point op (x, y) = y / x * 0.5 toDoubles :: (Int, Int) -> (Double, Double) toDoubles = fromIntegral *** fromIntegral -- TODO: need to implement and test case_ returning tuples testCase :: Test testCase = testG q (== expected) where q :: Query (Column O.PGInt4) q = table1Q >>> proc (i, j) -> do Arr.returnA -< O.case_ [(j .== 100, 12), (i .== 1, 21)] 33 expected :: [Int] expected = [12, 12, 21, 33] testDistinct :: Test testDistinct = testG (O.distinct table1Q) (\r -> L.sort (L.nub table1data) == L.sort r) -- FIXME: the unsafeCoerceColumn is currently needed because the type -- changes required for aggregation are not currently dealt with by -- Opaleye. aggregateCoerceFIXME :: QueryArr (Column O.PGInt4) (Column O.PGInt8) aggregateCoerceFIXME = Arr.arr aggregateCoerceFIXME' aggregateCoerceFIXME' :: Column a -> Column O.PGInt8 aggregateCoerceFIXME' = O.unsafeCoerceColumn testAggregate :: Test testAggregate = testG (Arr.second aggregateCoerceFIXME <<< O.aggregate (PP.p2 (O.groupBy, O.sum)) table1Q) (\r -> [(1, 400) :: (Int, Int64), (2, 300)] == L.sort r) testAggregateProfunctor :: Test testAggregateProfunctor = testG q expected where q = O.aggregate (PP.p2 (O.groupBy, countsum)) table1Q expected r = [(1, 1200) :: (Int, Int64), (2, 300)] == L.sort r countsum = P.dimap (\x -> (x,x)) (\(x, y) -> aggregateCoerceFIXME' x * y) (PP.p2 (O.sum, O.count)) {- testStringArrayAggregate :: Test testStringArrayAggregate = testG q expected where q = O.aggregate (PP.p2 (O.arrayAgg, O.min)) table6Q expected r = [(map fst table6data, minimum (map snd table6data))] == r -} testStringAggregate :: Test testStringAggregate = testG q expected where q = O.aggregate (PP.p2 ((O.stringAgg . O.pgString) "_", O.groupBy)) table6Q expected r = [( (foldl1 (\x y -> x ++ "_" ++ y) . map fst) table6data , head (map snd table6data))] == r testOrderByG :: O.Order (Column O.PGInt4, Column O.PGInt4) -> ((Int, Int) -> (Int, Int) -> Ordering) -> Test testOrderByG orderQ order = testG (O.orderBy orderQ table1Q) (L.sortBy order table1data ==) testOrderBy :: Test testOrderBy = testOrderByG (O.desc snd) (flip (Ord.comparing snd)) testOrderBy2 :: Test testOrderBy2 = testOrderByG (O.desc fst <> O.asc snd) (flip (Ord.comparing fst) <> Ord.comparing snd) testOrderBySame :: Test testOrderBySame = testOrderByG (O.desc fst <> O.asc fst) (flip (Ord.comparing fst) <> Ord.comparing fst) testLOG :: (Query (Column O.PGInt4, Column O.PGInt4) -> Query (Column O.PGInt4, Column O.PGInt4)) -> ([(Int, Int)] -> [(Int, Int)]) -> Test testLOG olQ ol = testG (olQ (orderQ table1Q)) (ol (order table1data) ==) where orderQ = O.orderBy (O.desc snd) order = L.sortBy (flip (Ord.comparing snd)) testLimit :: Test testLimit = testLOG (O.limit 2) (take 2) testOffset :: Test testOffset = testLOG (O.offset 2) (drop 2) testLimitOffset :: Test testLimitOffset = testLOG (O.limit 2 . O.offset 2) (take 2 . drop 2) testOffsetLimit :: Test testOffsetLimit = testLOG (O.offset 2 . O.limit 2) (drop 2 . take 2) testDistinctAndAggregate :: Test testDistinctAndAggregate = testG q expected where q = O.distinct table1Q &&& (Arr.second aggregateCoerceFIXME <<< O.aggregate (PP.p2 (O.groupBy, O.sum)) table1Q) expected r = L.sort r == L.sort expectedResult expectedResult = A.liftA2 (,) (L.nub table1data) [(1 :: Int, 400 :: Int64), (2, 300)] one :: Query (Column O.PGInt4) one = Arr.arr (const (1 :: Column O.PGInt4)) -- The point of the "double" tests is to ensure that we do not -- introduce name clashes in the operations which create new column names testDoubleG :: (Eq haskells, D.Default O.QueryRunner columns haskells) => (QueryArr () (Column O.PGInt4) -> QueryArr () columns) -> [haskells] -> Test testDoubleG q expected1 = testG (q one &&& q one) (== expected2) where expected2 = A.liftA2 (,) expected1 expected1 testDoubleDistinct :: Test testDoubleDistinct = testDoubleG O.distinct [1 :: Int] testDoubleAggregate :: Test testDoubleAggregate = testDoubleG (O.aggregate O.count) [1 :: Int64] testDoubleLeftJoin :: Test testDoubleLeftJoin = testDoubleG lj [(1 :: Int, Just (1 :: Int))] where lj :: Query (Column O.PGInt4) -> Query (Column O.PGInt4, Column (Nullable O.PGInt4)) lj q = O.leftJoin q q (uncurry (.==)) testDoubleValues :: Test testDoubleValues = testDoubleG v [1 :: Int] where v :: Query (Column O.PGInt4) -> Query (Column O.PGInt4) v _ = O.values [1] testDoubleUnionAll :: Test testDoubleUnionAll = testDoubleG u [1 :: Int, 1] where u q = q `O.unionAll` q aLeftJoin :: Query ((Column O.PGInt4, Column O.PGInt4), (Column (Nullable O.PGInt4), Column (Nullable O.PGInt4))) aLeftJoin = O.leftJoin table1Q table3Q (\(l, r) -> fst l .== fst r) testLeftJoin :: Test testLeftJoin = testG aLeftJoin (== expected) where expected :: [((Int, Int), (Maybe Int, Maybe Int))] expected = [ ((1, 100), (Just 1, Just 50)) , ((1, 100), (Just 1, Just 50)) , ((1, 200), (Just 1, Just 50)) , ((2, 300), (Nothing, Nothing)) ] testLeftJoinNullable :: Test testLeftJoinNullable = testG q (== expected) where q :: Query ((Column O.PGInt4, Column O.PGInt4), ((Column (Nullable O.PGInt4), Column (Nullable O.PGInt4)), (Column (Nullable O.PGInt4), Column (Nullable O.PGInt4)))) q = O.leftJoin table3Q aLeftJoin cond cond (x, y) = fst x .== fst (fst y) expected :: [((Int, Int), ((Maybe Int, Maybe Int), (Maybe Int, Maybe Int)))] expected = [ ((1, 50), ((Just 1, Just 100), (Just 1, Just 50))) , ((1, 50), ((Just 1, Just 100), (Just 1, Just 50))) , ((1, 50), ((Just 1, Just 200), (Just 1, Just 50))) ] testThreeWayProduct :: Test testThreeWayProduct = testG q (== expected) where q = A.liftA3 (,,) table1Q table2Q table3Q expected = A.liftA3 (,,) table1data table2data table3data testValues :: Test testValues = testG (O.values values) (values' ==) where values :: [(Column O.PGInt4, Column O.PGInt4)] values = [ (1, 10) , (2, 100) ] values' :: [(Int, Int)] values' = [ (1, 10) , (2, 100) ] {- FIXME: does not yet work testValuesDouble :: Test testValuesDouble = testG (O.values values) (values' ==) where values :: [(Column O.PGInt4, Column O.PGFloat8)] values = [ (1, 10.0) , (2, 100.0) ] values' :: [(Int, Double)] values' = [ (1, 10.0) , (2, 100.0) ] -} testValuesEmpty :: Test testValuesEmpty = testG (O.values values) (values' ==) where values :: [Column O.PGInt4] values = [] values' :: [Int] values' = [] testUnionAll :: Test testUnionAll = testG (table1Q `O.unionAll` table2Q) (\r -> L.sort (table1data ++ table2data) == L.sort r) testTableFunctor :: Test testTableFunctor = testG (O.queryTable table1F) (result ==) where result = fmap (\(col1, col2) -> (col1 + col2, col1 - col2)) table1data -- TODO: This is getting too complicated testUpdate :: Test testUpdate conn = do _ <- O.runUpdate conn table4 update cond result <- runQueryTable4 if result /= expected then return False else do _ <- O.runDelete conn table4 condD resultD <- runQueryTable4 if resultD /= expectedD then return False else return True {- else do returned <- O.runInsertReturning conn table4 insertT returning _ <- O.runInsertMany conn table4 insertTMany resultI <- runQueryTable4 return ((resultI == expectedI) && (returned == expectedR)) -} where update (x, y) = (x + y, x - y) cond (_, y) = y .> 15 condD (x, _) = x .> 20 expected :: [(Int, Int)] expected = [ (1, 10) , (22, -18)] expectedD :: [(Int, Int)] expectedD = [(1, 10)] runQueryTable4 = O.runQuery conn (O.queryTable table4) insertT :: (Column O.PGInt4, Column O.PGInt4) insertT = (1, 2) insertTMany :: [(Column O.PGInt4, Column O.PGInt4)] insertTMany = [(20, 30), (40, 50)] expectedI :: [(Int, Int)] expectedI = [(1, 10), (1, 2), (20, 30), (40, 50)] returning (x, y) = x - y expectedR :: [Int] expectedR = [-1] testKeywordColNames :: Test testKeywordColNames conn = do let q :: IO [(Int, Int)] q = O.runQuery conn (O.queryTable tableKeywordColNames) _ <- q return True testInsertSerial :: Test testInsertSerial conn = do _ <- O.runInsert conn table5 (Just 10, Just 20) _ <- O.runInsert conn table5 (Just 30, Nothing) _ <- O.runInsert conn table5 (Nothing, Nothing) _ <- O.runInsert conn table5 (Nothing, Just 40) resultI <- O.runQuery conn (O.queryTable table5) return (resultI == expected) where expected :: [(Int, Int)] expected = [ (10, 20) , (30, 1) , (1, 2) , (2, 40) ] allTests :: [Test] allTests = [testSelect, testProduct, testRestrict, testNum, testDiv, testCase, testDistinct, testAggregate, testAggregateProfunctor, {-testStringAggregate,-} testOrderBy, testOrderBy2, testOrderBySame, testLimit{- , testOffset, testLimitOffset, testOffsetLimit -}, testDistinctAndAggregate, testDoubleDistinct, testDoubleAggregate, testDoubleLeftJoin{-, testDoubleValues -} , testDoubleUnionAll, testLeftJoin, testLeftJoinNullable, testThreeWayProduct{-, testValues, testValuesEmpty-}, testUnionAll, testTableFunctor, testUpdate, testKeywordColNames{- , testInsertSerial-} ] main :: IO () main = do conn <- PGS.open ":memory:" dropAndCreateDB conn let insert (writeable, columndata) = mapM_ (O.runInsert conn writeable) columndata mapM_ insert [ (table1, table1columndata) , (table2, table2columndata) , (table3, table3columndata) , (table4, table4columndata) ] insert (table6, table6columndata) -- Need to run quickcheck after table data has been inserted QuickCheck.run conn results <- mapM ($ conn) allTests print results let passed = and results putStrLn (if passed then "All passed" else "Failure") Exit.exitWith (if passed then Exit.ExitSuccess else Exit.ExitFailure 1)
bergmark/haskell-opaleye
opaleye-sqlite/Test/Test.hs
bsd-3-clause
20,924
4
17
5,204
6,421
3,511
2,910
375
3
module Stackage.Init (stackageInit) where import Data.List (isInfixOf, isPrefixOf) import Stackage.Util import System.FilePath ((</>)) stackageInit :: IO () stackageInit = do c <- getCabalRoot let config = c </> "config" orig <- readFile config -- bypass laziness _ <- return $! length orig writeFile config $ unlines $ go $ lines orig where go = addStackage . map commentHackage . filter (\s -> not $ "stackage" `isInfixOf` s) addStackage [] = stackageLines [] addStackage (l:ls) | "remote-repo-cache:" `isPrefixOf` l = stackageLines $ l : ls | otherwise = l : addStackage ls stackageLines x = "remote-repo: stackage:http://hackage.haskell.org/packages/archive" : "remote-repo: stackage-extra:http://hackage.haskell.org/packages/archive" : x commentHackage s | s == "remote-repo: hackage.haskell.org:http://hackage.haskell.org/packages/archive" = "--" ++ s | otherwise = s
sinelaw/stackage
Stackage/Init.hs
mit
1,027
0
12
269
274
138
136
25
2
{-# LANGUAGE LambdaCase #-} module NoBlockArgumentsFail3 where import Control.Monad foo :: IO () foo = forM [1 .. 10] \case Just 3 -> print x
shlevy/ghc
testsuite/tests/parser/should_fail/NoBlockArgumentsFail3.hs
bsd-3-clause
146
0
9
29
50
27
23
-1
-1
{- | Module : XMonad.Util.Paste Copyright : (C) 2008 Jérémy Bobbio, gwern License : BSD3 Maintainer : none Stability : unstable Portability : unportable A module for sending key presses to windows. This modules provides generalized and specialized functions for this task. -} module XMonad.Util.Paste ( -- * Usage -- $usage pasteSelection, pasteString, pasteChar, sendKey, sendKeyWindow, noModMask ) where import XMonad (io, theRoot, withDisplay, X ()) import Graphics.X11 import Graphics.X11.Xlib.Extras (none, setEventType, setKeyEvent) import Control.Monad.Reader (asks) import XMonad.Operations (withFocused) import Data.Char (isUpper) import Data.Maybe (listToMaybe) import XMonad.Util.XSelection (getSelection) import XMonad.Util.EZConfig (parseKey) import Text.ParserCombinators.ReadP (readP_to_S) {- $usage Import this module into your xmonad.hs as usual: > import XMonad.Util.Paste And use the functions. They all return 'X' (), and so are appropriate for use as keybindings. Example: > , ((m, xK_d), pasteString "foo bar") ] Don't expect too much of the functions; they probably don't work on complex texts. -} -- | Paste the current X mouse selection. Note that this uses 'getSelection' from -- "XMonad.Util.XSelection" and so is heir to its flaws. pasteSelection :: X () pasteSelection = getSelection >>= pasteString -- | Send a string to the window which is currently focused. This function correctly -- handles capitalization. Warning: in dealing with capitalized characters, this assumes a QWERTY layout. pasteString :: String -> X () pasteString = mapM_ (\x -> if isUpper x || x `elem` "~!@#$%^&*()_+{}|:\"<>?" then pasteChar shiftMask x else pasteChar noModMask x) {- | Send a character to the current window. This is more low-level. Remember that you must handle the case of capitalization appropriately. That is, from the window's perspective: > pasteChar mod2Mask 'F' ~> "f" You would want to do something like: > pasteChar shiftMask 'F' Note that this function makes use of 'stringToKeysym', and so will probably have trouble with any 'Char' outside ASCII. -} pasteChar :: KeyMask -> Char -> X () pasteChar m c = sendKey m $ maybe (stringToKeysym [c]) fst $ listToMaybe $ readP_to_S parseKey [c] sendKey :: KeyMask -> KeySym -> X () sendKey = (withFocused .) . sendKeyWindow -- | The primitive. Allows you to send any combination of 'KeyMask' and 'KeySym' to any 'Window' you specify. sendKeyWindow :: KeyMask -> KeySym -> Window -> X () sendKeyWindow mods key w = withDisplay $ \d -> do rootw <- asks theRoot keycode <- io $ keysymToKeycode d key io $ allocaXEvent $ \ev -> do setEventType ev keyPress setKeyEvent ev w rootw none mods keycode True sendEvent d w True keyPressMask ev setEventType ev keyRelease sendEvent d w True keyReleaseMask ev
pjones/xmonad-test
vendor/xmonad-contrib/XMonad/Util/Paste.hs
bsd-2-clause
3,244
0
13
896
477
260
217
36
2
{-# LANGUAGE DeriveDataTypeable #-} ----------------------------------------------------------------------------- -- | -- Module : XMonad.Hooks.DynamicHooks -- Copyright : (c) Braden Shepherdson 2008 -- License : BSD-style (as xmonad) -- -- Maintainer : [email protected] -- Stability : unstable -- Portability : unportable -- -- One-shot and permanent ManageHooks that can be updated at runtime. -- ----------------------------------------------------------------------------- module XMonad.Hooks.DynamicHooks ( -- * Usage -- $usage dynamicMasterHook ,addDynamicHook ,updateDynamicHook ,oneShotHook ) where import XMonad import qualified XMonad.Util.ExtensibleState as XS import Data.List import Data.Maybe (listToMaybe) import Data.Monoid -- $usage -- Provides two new kinds of 'ManageHooks' that can be defined at runtime. -- -- * One-shot 'ManageHooks' that are deleted after they execute. -- -- * Permanent 'ManageHooks' (unless you want to destroy them) -- -- Note that you will lose all dynamically defined 'ManageHook's when you @mod+q@! -- If you want them to last, you should create them as normal in your @xmonad.hs@. -- -- To use this module, add 'dynamicMasterHook' to your 'manageHook': -- -- > xmonad { manageHook = myManageHook <+> dynamicMasterHook } -- -- You can then use the supplied functions in your keybindings: -- -- > ((modMask,xK_a), oneShotHook (className =? "example") doFloat) -- data DynamicHooks = DynamicHooks { transients :: [(Query Bool, ManageHook)] , permanent :: ManageHook } deriving Typeable instance ExtensionClass DynamicHooks where initialValue = DynamicHooks [] idHook -- this hook is always executed, and the contents of the stored hooks checked. -- note that transient hooks are run second, therefore taking precedence -- over permanent ones on matters such as which workspace to shift to. -- doFloat and doIgnore are idempotent. -- | Master 'ManageHook' that must be in your @xmonad.hs@ 'ManageHook'. dynamicMasterHook :: ManageHook dynamicMasterHook = (ask >>= \w -> liftX (do dh <- XS.get (Endo f) <- runQuery (permanent dh) w ts <- mapM (\(q,a) -> runQuery q w >>= \x -> return (x,(q, a))) (transients dh) let (ts',nts) = partition fst ts gs <- mapM (flip runQuery w . snd . snd) ts' let (Endo g) = maybe (Endo id) id $ listToMaybe gs XS.put $ dh { transients = map snd nts } return $ Endo $ f . g )) -- | Appends the given 'ManageHook' to the permanent dynamic 'ManageHook'. addDynamicHook :: ManageHook -> X () addDynamicHook m = updateDynamicHook (<+> m) -- | Modifies the permanent 'ManageHook' with an arbitrary function. updateDynamicHook :: (ManageHook -> ManageHook) -> X () updateDynamicHook f = XS.modify $ \dh -> dh { permanent = f (permanent dh) } -- | Creates a one-shot 'ManageHook'. Note that you have to specify the two -- parts of the 'ManageHook' separately. Where you would usually write: -- -- > className =? "example" --> doFloat -- -- you must call 'oneShotHook' as -- -- > oneShotHook dynHooksRef (className =? "example) doFloat -- oneShotHook :: Query Bool -> ManageHook -> X () oneShotHook q a = XS.modify $ \dh -> dh { transients = (q,a):(transients dh) }
pjones/xmonad-test
vendor/xmonad-contrib/XMonad/Hooks/DynamicHooks.hs
bsd-2-clause
3,274
0
20
616
553
317
236
33
1
{-# LANGUAGE CPP #-} -- | Our extended FCode monad. -- We add a mapping from names to CmmExpr, to support local variable names in -- the concrete C-- code. The unique supply of the underlying FCode monad -- is used to grab a new unique for each local variable. -- In C--, a local variable can be declared anywhere within a proc, -- and it scopes from the beginning of the proc to the end. Hence, we have -- to collect declarations as we parse the proc, and feed the environment -- back in circularly (to avoid a two-pass algorithm). module StgCmmExtCode ( CmmParse, unEC, Named(..), Env, loopDecls, getEnv, withName, getName, newLocal, newLabel, newBlockId, newFunctionName, newImport, lookupLabel, lookupName, code, emit, emitLabel, emitAssign, emitStore, getCode, getCodeR, getCodeScoped, emitOutOfLine, withUpdFrameOff, getUpdFrameOff ) where import qualified StgCmmMonad as F import StgCmmMonad (FCode, newUnique) import Cmm import CLabel import MkGraph -- import BasicTypes import BlockId import DynFlags import FastString import Module import UniqFM import Unique import Control.Monad (liftM, ap) #if __GLASGOW_HASKELL__ < 709 import Control.Applicative (Applicative(..)) #endif -- | The environment contains variable definitions or blockids. data Named = VarN CmmExpr -- ^ Holds CmmLit(CmmLabel ..) which gives the label type, -- eg, RtsLabel, ForeignLabel, CmmLabel etc. | FunN PackageKey -- ^ A function name from this package | LabelN BlockId -- ^ A blockid of some code or data. -- | An environment of named things. type Env = UniqFM Named -- | Local declarations that are in scope during code generation. type Decls = [(FastString,Named)] -- | Does a computation in the FCode monad, with a current environment -- and a list of local declarations. Returns the resulting list of declarations. newtype CmmParse a = EC { unEC :: String -> Env -> Decls -> FCode (Decls, a) } type ExtCode = CmmParse () returnExtFC :: a -> CmmParse a returnExtFC a = EC $ \_ _ s -> return (s, a) thenExtFC :: CmmParse a -> (a -> CmmParse b) -> CmmParse b thenExtFC (EC m) k = EC $ \c e s -> do (s',r) <- m c e s; unEC (k r) c e s' instance Functor CmmParse where fmap = liftM instance Applicative CmmParse where pure = return (<*>) = ap instance Monad CmmParse where (>>=) = thenExtFC return = returnExtFC instance HasDynFlags CmmParse where getDynFlags = EC (\_ _ d -> do dflags <- getDynFlags return (d, dflags)) -- | Takes the variable decarations and imports from the monad -- and makes an environment, which is looped back into the computation. -- In this way, we can have embedded declarations that scope over the whole -- procedure, and imports that scope over the entire module. -- Discards the local declaration contained within decl' -- loopDecls :: CmmParse a -> CmmParse a loopDecls (EC fcode) = EC $ \c e globalDecls -> do (_, a) <- F.fixC $ \ ~(decls, _) -> fcode c (addListToUFM e decls) globalDecls return (globalDecls, a) -- | Get the current environment from the monad. getEnv :: CmmParse Env getEnv = EC $ \_ e s -> return (s, e) -- | Get the current context name from the monad getName :: CmmParse String getName = EC $ \c _ s -> return (s, c) -- | Set context name for a sub-parse withName :: String -> CmmParse a -> CmmParse a withName c' (EC fcode) = EC $ \_ e s -> fcode c' e s addDecl :: FastString -> Named -> ExtCode addDecl name named = EC $ \_ _ s -> return ((name, named) : s, ()) -- | Add a new variable to the list of local declarations. -- The CmmExpr says where the value is stored. addVarDecl :: FastString -> CmmExpr -> ExtCode addVarDecl var expr = addDecl var (VarN expr) -- | Add a new label to the list of local declarations. addLabel :: FastString -> BlockId -> ExtCode addLabel name block_id = addDecl name (LabelN block_id) -- | Create a fresh local variable of a given type. newLocal :: CmmType -- ^ data type -> FastString -- ^ name of variable -> CmmParse LocalReg -- ^ register holding the value newLocal ty name = do u <- code newUnique let reg = LocalReg u ty addVarDecl name (CmmReg (CmmLocal reg)) return reg -- | Allocate a fresh label. newLabel :: FastString -> CmmParse BlockId newLabel name = do u <- code newUnique addLabel name (mkBlockId u) return (mkBlockId u) newBlockId :: CmmParse BlockId newBlockId = code F.newLabelC -- | Add add a local function to the environment. newFunctionName :: FastString -- ^ name of the function -> PackageKey -- ^ package of the current module -> ExtCode newFunctionName name pkg = addDecl name (FunN pkg) -- | Add an imported foreign label to the list of local declarations. -- If this is done at the start of the module the declaration will scope -- over the whole module. newImport :: (FastString, CLabel) -> CmmParse () newImport (name, cmmLabel) = addVarDecl name (CmmLit (CmmLabel cmmLabel)) -- | Lookup the BlockId bound to the label with this name. -- If one hasn't been bound yet, create a fresh one based on the -- Unique of the name. lookupLabel :: FastString -> CmmParse BlockId lookupLabel name = do env <- getEnv return $ case lookupUFM env name of Just (LabelN l) -> l _other -> mkBlockId (newTagUnique (getUnique name) 'L') -- | Lookup the location of a named variable. -- Unknown names are treated as if they had been 'import'ed from the runtime system. -- This saves us a lot of bother in the RTS sources, at the expense of -- deferring some errors to link time. lookupName :: FastString -> CmmParse CmmExpr lookupName name = do env <- getEnv return $ case lookupUFM env name of Just (VarN e) -> e Just (FunN pkg) -> CmmLit (CmmLabel (mkCmmCodeLabel pkg name)) _other -> CmmLit (CmmLabel (mkCmmCodeLabel rtsPackageKey name)) -- | Lift an FCode computation into the CmmParse monad code :: FCode a -> CmmParse a code fc = EC $ \_ _ s -> do r <- fc return (s, r) emit :: CmmAGraph -> CmmParse () emit = code . F.emit emitLabel :: BlockId -> CmmParse () emitLabel = code . F.emitLabel emitAssign :: CmmReg -> CmmExpr -> CmmParse () emitAssign l r = code (F.emitAssign l r) emitStore :: CmmExpr -> CmmExpr -> CmmParse () emitStore l r = code (F.emitStore l r) getCode :: CmmParse a -> CmmParse CmmAGraph getCode (EC ec) = EC $ \c e s -> do ((s',_), gr) <- F.getCodeR (ec c e s) return (s', gr) getCodeR :: CmmParse a -> CmmParse (a, CmmAGraph) getCodeR (EC ec) = EC $ \c e s -> do ((s', r), gr) <- F.getCodeR (ec c e s) return (s', (r,gr)) getCodeScoped :: CmmParse a -> CmmParse (a, CmmAGraphScoped) getCodeScoped (EC ec) = EC $ \c e s -> do ((s', r), gr) <- F.getCodeScoped (ec c e s) return (s', (r,gr)) emitOutOfLine :: BlockId -> CmmAGraphScoped -> CmmParse () emitOutOfLine l g = code (F.emitOutOfLine l g) withUpdFrameOff :: UpdFrameOffset -> CmmParse () -> CmmParse () withUpdFrameOff size inner = EC $ \c e s -> F.withUpdFrameOff size $ (unEC inner) c e s getUpdFrameOff :: CmmParse UpdFrameOffset getUpdFrameOff = code $ F.getUpdFrameOff
urbanslug/ghc
compiler/codeGen/StgCmmExtCode.hs
bsd-3-clause
7,581
0
15
1,959
1,921
1,027
894
147
3
{-# LANGUAGE MagicHash #-} module ShouldCompile where import GHC.Exts data STRef s a = STRef (MutVar# s a) -- ghc 4.08 had a problem with returning a MutVar#. from :: STRef s a -> MutVar# s a from (STRef x) = x to :: MutVar# s a -> STRef s a to x = STRef x
ryantm/ghc
testsuite/tests/codeGen/should_compile/cg001.hs
bsd-3-clause
263
0
8
62
93
49
44
8
1
module System.Warp.Types where import Data.Map(Map) type Facts = Map String String type GenId = String data ServiceAction = ServiceStop | ServiceStart | ServiceRestart | ServiceReload | ServiceStatus deriving (Show, Read) data Command = PingCommand | SleepCommand Integer | ShCommand String String [Int] | ServiceCommand ServiceAction String deriving (Show, Read) data Script = Script String [Command] data Matcher = MatchAll | MatchNone | MatchHost String | MatchFact String String | MatchNot Matcher | MatchOr [Matcher] | MatchAnd [Matcher] deriving (Show, Read) data CommandOutput = CommandSuccess Int String String | CommandFailure Int String String | CommandFinished deriving (Show, Read) data Request = Request { rq_id :: GenId , rq_match :: Matcher , rq_timeout :: Integer , rq_scriptname :: String , rq_script :: [Command] } deriving (Show, Read) data Response = Response { res_id :: GenId , res_host :: String , res_output :: CommandOutput } deriving (Show, Read) data AckStatus = AckStart | AckRefused deriving (Show, Read) data Ack = Ack { ack_id :: GenId , ack_host :: String , ack_status :: AckStatus } deriving (Show, Read) data WarpConfig = WarpConfig { cacert :: String , privkey :: String , redis_host :: String , redis_port :: Integer } deriving (Show, Read) data Verbosity = Normal | Verbose deriving (Show, Read) data WarpArguments = WarpArguments { config :: FilePath , verbosity :: Verbosity , logfile :: Maybe FilePath } deriving (Show, Read)
pyr/warp-agent
System/Warp/Types.hs
isc
2,267
0
9
1,029
466
277
189
50
0
-- Char stands for character (ie. 'a'). A String is a list of characters -- So [Char] is equalant to String (ie. ['A','n','i'] == "Ani"). removeNonUpperCase :: String -> String removeNonUpperCase st = [ c | c <- st, c `elem` ['A'..'Z']] -- Int stands for integer. It’s used for whole numbers. 7 can be an Int but 7.2 -- cannot. Int is bounded, which means that it has a minimum and a maximum -- value. Usually on 32-bit machines the maximum possible Int is 2147483647 -- and the minimum is -2147483648. On 64-bit it's 9223372036854775807 to -- -9223372036854775808 addThree :: Int -> Int -> Int -> Int addThree x y z = x + y + z -- Integer -- stands for, er... also integer. The main difference is that it’s not -- bounded so it can be used to represent really really big numbers. I mean like -- really big. Int, however, is more efficient. factorial :: Integer -> Integer factorial n = product [1..n] -- Float is a real floating point with single precision. circumference :: Float -> Float circumference r = 2 * pi * r -- Double is a real floating point with double the precision circumference' :: Double -> Double circumference' r = 2 * pi * r -- Bool is a boolean type. It can have only two values: True and False. -- Char represents a character. It’s denoted by single quotes. A list of characters is a string. -- Tuples are types but they are dependent on their length as well as the -- types of their components, so there is theoretically an infinite number of tuple -- types, which is too many to cover in this tutorial. Note that the empty tuple -- () is also a type which can only have a single value: ()
dotnetCarpenter/haskell1
ch03/types.hs
isc
1,626
0
8
311
175
101
74
10
1
{-# htermination keysFM_LE :: FiniteMap Bool b -> Bool -> [Bool] #-} import FiniteMap
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/full_haskell/FiniteMap_keysFM_LE_8.hs
mit
86
0
3
14
5
3
2
1
0
module Handler.ReviewNew where import Import import Import import Cache import Form import SessionState import Database.Persist.Sql import qualified Data.List as L getReviewNewR :: Handler Html getReviewNewR = do defaultLayout $ do setTitle "Запрос техсправки" $(widgetFile "tech-request")
swamp-agr/carbuyer-advisor
Handler/ReviewNew.hs
mit
320
0
12
49
71
40
31
13
1
module Sandbox.Number.Extra where -- | Factorial fact :: (Eq a, Num a) => a -> a fact = fact' 1 where fact' s 0 = s fact' s a = fact' (s * a) (a - 1)
4e6/sandbox
haskell/Sandbox/Number/Extra.hs
mit
161
0
9
48
81
44
37
5
2
import Control.Concurrent.STM.TMVar import Control.Concurrent.STM.TQueue import Control.Monad import Control.Monad.STM import Control.Concurrent import Network.Socket.Internal import qualified Data.ByteString.Lazy as B import System.Console.CmdArgs.Implicit import Nntp.Client import Nntp.Types import Nzb.Parser import Config data CmdLine = CmdLine { configFile :: String , nzbFile :: String } deriving (Show, Data, Typeable) data NNTPThread = NNTPThread { nntpInputQueue :: NzbQueue , nntpOutpuQueue :: WriterQueue , nntpThreadId :: ThreadId } instance Show NNTPThread where show = show . nntpThreadId pipeQueue :: [a] -> TQueue a -> IO () pipeQueue vals queue = atomically $ mapM_ (writeTQueue queue) vals concatMapM :: (Monad m) => (a -> m [b]) -> [a] -> m [b] concatMapM f xs = (liftM concat) $ mapM f xs startThreadDispather :: Config -> IO (TMVar Bool) startThreadDispather config = do outQ <- atomically newTQueue :: IO WriterQueue -- Going to need a state or something to hold the queue and threadIDs concatMapM (run outQ) (configServers config) >>= print atomically newEmptyTMVar -- Start a thread for each connection run :: WriterQueue -> ServerConfig -> IO [NNTPThread] run outQ serverConfig = do -- A queue which Nzb's can be sent down inQ <- atomically newTQueue :: IO NzbQueue threads <- replicateM (serverConections serverConfig) (startThread inQ) return threads where startThread :: NzbQueue -> IO NNTPThread startThread inQ = do threadId <- forkIO $ nntpMain inQ outQ serverConfig return $ NNTPThread inQ outQ threadId hewsnet :: Config -> IO () hewsnet config = do var <- startThreadDispather config atomically $ takeTMVar var return () main :: IO () main = withSocketsDo $ do args <- cmdArgs CmdLine { configFile = def , nzbFile = def } file <- B.readFile (configFile args) case (openConfig file) of Just conf -> hewsnet conf >>= print Nothing -> print "Unable to openConfig" return ()
Zariel/hewsnet
src/main.hs
mit
1,976
12
13
360
643
329
314
55
2
{- {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ExtendedDefaultRules #-} module Main where import Lucid.Base import Lucid.Html5 --import Control.Monoid -} class L t where len :: t -> Int instance L Char where len _ = 1 instance L Bool where len _ = 33 instance L [a] where len xs = length xs --instance L (a -> b) where len _ = 5 instance L (a,b) where len _ = 2 instance L (Maybe a) where len _ = 3 instance (L r) => L (a -> r) where len f = 8 class C t where f' :: [String] -> t instance C Char where f' strings = head $ head strings instance C Int where f' strings = length strings instance (Show a, C r) => C (a -> r) where f' strings = \x -> f' (strings ++ [show x]) g :: [String] -> Int g = f' --h :: (Show a, C r) => [String] -> (a -> r) --h strings = f' strings instance C (IO a) where f' strings = do putStrLn "-----" --putStrLn $ strings !! 0 print strings --mapM_ putStrLn strings --putStrLn $ "len = " ++ show (length strings) putStrLn "-----" return undefined {- f :: (C t) => t f = f' [] -} main :: IO () main = f' [] True 99 "Mary" 'k' "had"
dominicprior/ghcjs-demos
lu/src/Main.hs
mit
1,141
0
11
277
372
190
182
25
1
module Main where import Memento.CLI import Memento.Logger import Memento.Types import Memento.GoogleCalendar import Memento.HipChat import Memento.Commands import Data.Default import Text.ICalendar.Parser import Options.Applicative import Control.Monad import System.Exit import System.IO import qualified Data.Text as T -------------------------------------------------------------------------------- main :: IO () main = execParser opts >>= memento where opts = info (helper <*> cli) ( fullDesc <> progDesc "Remember to do my things." <> header "He's Odersky. Don't trust his lies." ) -------------------------------------------------------------------------------- repl :: IO () repl = do cyan "This is the memento REPL." cyan "Write :h to access the list of commands you can type." hSetBuffering stdout NoBuffering startMemento $ forever go where go :: Memento () go = do liftIO $ hPutStr stdout "> " userAction <- liftIO getLine let uCommand = parseCommand userAction case uCommand of Nothing -> do liftIO $ yellow "eh?" go Just cmd -> processCommand cmd >> go -------------------------------------------------------------------------------- memento :: CLI -> IO () memento Interactive = repl memento Hangout = startMemento $ do link <- newHangoutLink "Morning Standup" case link of Left e -> liftIO $ red (T.pack . show $ e) Right hLink -> do liftIO (putStrLn "Hangout link: " >> green hLink) let msg = "@all The Standup Hangout link: " <> hLink <> " (via Memento)" notifyRoom DevChat msg memento (Mem fp) = do parseCal <- parseICalendarFile def fp case parseCal of Left e -> fail e Right _ -> cyan "Let the fun begin"
adinapoli/memento
main/Main.hs
mit
1,810
0
17
418
462
224
238
51
3
-- Copyright (c) Microsoft. All rights reserved. -- Licensed under the MIT license. See LICENSE file in the project root for full license information. {-# LANGUAGE QuasiQuotes, OverloadedStrings, RecordWildCards #-} {-# OPTIONS_GHC -Wwarn #-} module Bond.Template.Cpp.Util ( openNamespace , closeNamespace , structName , structParams , template , modifierTag , defaultValue , attributeInit , schemaMetadata , ifndef , defaultedFunctions , rvalueReferences , enumDefinition ) where import Data.Monoid import Prelude import Data.Text.Lazy (Text) import Text.Shakespeare.Text import Bond.Schema.Types import Bond.Schema.Util import Bond.Util import Bond.Template.Util import Bond.Template.TypeMapping -- open namespaces openNamespace :: MappingContext -> Text openNamespace cpp = newlineSep 0 open $ getNamespace cpp where open n = [lt|namespace #{n} {|] -- close namespaces in reverse order closeNamespace :: MappingContext -> Text closeNamespace cpp = newlineSep 0 close (reverse $ getNamespace cpp) where close n = [lt|} // namespace #{n}|] structName :: Declaration -> String structName s@Struct {..} = declName <> structParams s structName _ = error "structName: impossible happened." structParams :: Declaration -> String structParams Struct {..} = angles $ sepBy ", " paramName declParams structParams _ = error "structName: impossible happened." template :: Declaration -> Text template d = if null $ declParams d then mempty else [lt|template <typename #{params}> |] where params = sepBy ", typename " paramName $ declParams d -- attribute initializer attributeInit :: [Attribute] -> Text attributeInit [] = "bond::reflection::Attributes()" attributeInit xs = [lt|boost::assign::map_list_of<std::string, std::string>#{newlineBeginSep 5 attrNameValue xs}|] where attrNameValue Attribute {..} = [lt|("#{getIdlQualifiedName attrName}", "#{attrValue}")|] -- modifier tag type for a field modifierTag :: Field -> Text modifierTag Field {..} = [lt|bond::reflection::#{modifier fieldType fieldModifier}_field_modifier|] where modifier BT_MetaName _ = [lt|required_optional|] modifier BT_MetaFullName _ = [lt|required_optional|] modifier _ RequiredOptional = [lt|required_optional|] modifier _ Required = [lt|required|] modifier _ _ = [lt|optional|] defaultValue :: MappingContext -> Type -> Default -> Text defaultValue _ BT_WString (DefaultString x) = [lt|L"#{x}"|] defaultValue _ BT_String (DefaultString x) = [lt|"#{x}"|] defaultValue _ BT_Float (DefaultFloat x) = [lt|#{x}f|] defaultValue _ BT_Int64 (DefaultInteger (-9223372036854775808)) = [lt|-9223372036854775807LL-1|] defaultValue _ BT_Int64 (DefaultInteger x) = [lt|#{x}LL|] defaultValue _ BT_UInt64 (DefaultInteger x) = [lt|#{x}ULL|] defaultValue _ BT_Int32 (DefaultInteger (-2147483648)) = [lt|-2147483647-1|] defaultValue m t (DefaultEnum x) = enumValue m t x defaultValue _ _ (DefaultBool True) = "true" defaultValue _ _ (DefaultBool False) = "false" defaultValue _ _ (DefaultInteger x) = [lt|#{x}|] defaultValue _ _ (DefaultFloat x) = [lt|#{x}|] defaultValue _ _ (DefaultNothing) = mempty defaultValue m (BT_UserDefined a@Alias {..} args) d = defaultValue m (resolveAlias a args) d defaultValue _ _ _ = error "defaultValue: impossible happened." enumValue :: ToText a => MappingContext -> Type -> a -> Text enumValue cpp (BT_UserDefined e@Enum {..} _) x = [lt|#{getGlobalQualifiedName cppTypeMapping $ getDeclNamespace cpp e}::_bond_enumerators::#{declName}::#{x}|] enumValue _ _ _ = error "enumValue: impossible happened." -- schema metadata static member definitions schemaMetadata :: MappingContext -> Declaration -> Text schemaMetadata cpp s@Struct {..} = [lt| #{template s}const bond::Metadata #{structName s}::Schema::metadata = #{structName s}::Schema::GetMetadata();#{newlineBeginSep 1 staticDef structFields}|] where -- static member definition for field metadata staticDef f@Field {..} | fieldModifier == Optional && null fieldAttributes = [lt| #{template s}const bond::Metadata #{structName s}::Schema::s_#{fieldName}_metadata = bond::reflection::MetadataInit(#{defaultInit f}"#{fieldName}");|] | otherwise = [lt| #{template s}const bond::Metadata #{structName s}::Schema::s_#{fieldName}_metadata = bond::reflection::MetadataInit(#{defaultInit f}"#{fieldName}", #{modifierTag f}::value, #{attributeInit fieldAttributes});|] where defaultInit Field {fieldDefault = (Just def)} = [lt|#{explicitDefault def}, |] defaultInit _ = mempty explicitDefault (DefaultNothing) = "bond::nothing" explicitDefault d@(DefaultInteger _) = staticCast d explicitDefault d@(DefaultFloat _) = staticCast d explicitDefault d = defaultValue cpp fieldType d staticCast d = [lt|static_cast<#{getTypeName cpp fieldType}>(#{defaultValue cpp fieldType d})|] schemaMetadata _ _ = error "schemaMetadata: impossible happened." defaultedFunctions, rvalueReferences :: Text defaultedFunctions = [lt|BOND_NO_CXX11_DEFAULTED_FUNCTIONS|] rvalueReferences = [lt|BOND_NO_CXX11_RVALUE_REFERENCES|] ifndef :: ToText a => a -> Text -> Text ifndef m = between [lt| #ifndef #{m}|] [lt| #endif|] enumDefinition :: Declaration -> Text enumDefinition Enum {..} = [lt|enum #{declName} { #{commaLineSep 3 constant enumConstants} };|] where constant Constant {..} = [lt|#{constantName}#{optional value constantValue}|] value x = [lt| = #{x}|] enumDefinition _ = error "enumDefinition: impossible happened."
innovimax/bond
compiler/Bond/Template/Cpp/Util.hs
mit
5,636
0
14
942
1,286
734
552
-1
-1
module MiniCore.Transforms.StronglyConnectedComponents ( simplifyProgram , simplifyExpr ) where import MiniCore.Types import MiniCore.Transforms.Utils import MiniCore.Format import Control.Monad.State import Control.Applicative import qualified Data.Map as Map import qualified Data.Set as Set import qualified Data.List as List import Data.Maybe import Debug.Trace import Data.Foldable (foldrM) -- Map vertex to the list of neighbor vertices type Edges a = Map.Map a [a] -- Find vertices with an edge from vertex expand :: Ord a => a -> Edges a -> [a] expand a edges = maybe [] id (Map.lookup a edges) -- Use edge map to do depth-first-search from each vertex updating -- state (visited, sequence). The output sequence should be sorted in -- topological order innerDFS :: Ord a => Edges a -> (Set.Set a, [a]) -> [a] -> (Set.Set a, [a]) innerDFS edges = List.foldl' search where search (visited, sequence) vertex | vertex `Set.member` visited = (visited, sequence) | otherwise = (visited', vertex:sequence') where (visited', sequence') = innerDFS edges (Set.insert vertex visited, sequence) (expand vertex edges) -- Public interface to depth-first-search dfs :: Ord a => Edges a -> [a] -> [a] dfs edges = snd . innerDFS edges (Set.empty, []) -- Do depth first search from each vertex producing a list of sets -- of vertices visited. spanningSearch :: Ord a => Edges a -> [a] -> [Set.Set a] spanningSearch edges = snd . List.foldl' search (Set.empty, []) where search (visited, setSequence) vertex | vertex `Set.member` visited = (visited, setSequence) | otherwise = (visited', Set.fromList (vertex:sequence):setSequence) where (visited', sequence) = innerDFS edges (Set.insert vertex visited, []) (expand vertex edges) -- Construct a topologically sorted sequence of the vertices in the graph -- and then construct the reverse of the topologically sorted sequence -- of strongly connected components scc :: Ord a => Edges a -> Edges a -> [a] -> [Set.Set a] scc ins outs = spanningSearch ins . dfs outs -- Annotate program with free-variables and then -- break up into the smallest possible lets and letrecs simplifyProgram :: Program -> Stage Program simplifyProgram program = depends =<< freeVars program -- Annotate single expression with free-variables and -- then break up into the smallest possible let or letrec simplifyExpr :: Expr -> Stage Expr simplifyExpr expr = dependsExpr =<< freeVarsExpr Set.empty expr -- Run dependency analysis on the body of each combinator depends :: FVProgram -> Stage Program depends program = mapM depends' program where depends' (name, args, body) = Combinator name args <$> dependsExpr body -- Lets are the only interesting case dependsExpr :: FVExpr -> Stage Expr dependsExpr (free, ANum n) = return (Num n) dependsExpr (free, ACons tag arity) = return (Cons tag arity) dependsExpr (free, AVar v) = return (Var v) dependsExpr (free, AApp e1 e2) = App <$> dependsExpr e1 <*> dependsExpr e2 dependsExpr (free, ACase body alts) = let dependsAlt (tag, args, e) = (,,) tag args <$> dependsExpr e in Case <$> dependsExpr body <*> mapM dependsAlt alts dependsExpr (free, ALambda args body) = Lambda args <$> dependsExpr body dependsExpr (free, ALet recursive defs body) = do let binders = bindersOf defs binderSet | recursive = Set.fromList binders | otherwise = Set.empty -- Make an edge from each name to its free variables that are bound -- in this letrec edges = [ (name, freeSet) | (name, (freeVars, _)) <- defs , freeSet <- Set.toList (freeVars `Set.intersection` binderSet) ] -- If ins w = [u, ...] then w depends on each u -- If out u = [w, ...] then each w depends on u ins = Map.fromList [(w, [u | (u, w') <- edges, w == w']) | (_, w) <- edges] out = Map.fromList [(u, [w | (u', w) <- edges, u == u']) | (u, _) <- edges] -- Strongly connected components in sorted topologically components = map Set.toList (scc ins out binders) -- Break defs into strongly connected components defs' = [[(name, fromJust (lookup name defs)) | name <- names] | names <- components ] -- Build new nested let(rec) body' <- dependsExpr body defs'' <- foldrM mkLet body' defs' return defs'' -- Take a list of definitions and build a new Let out of them -- Make it recursive if any name is found in the set of all -- free variables mkLet :: [(Name, FVExpr)] -> Expr -> Stage Expr mkLet defs body = do let names = map fst defs exprs = map snd defs exprs' <- mapM dependsExpr exprs let defs' = zip names exprs' vars = foldr Set.union Set.empty (map fst exprs) recursive = any (`Set.member` vars) names return (Let recursive defs' body)
cdparks/mini-core
src/MiniCore/Transforms/StronglyConnectedComponents.hs
mit
4,871
0
16
1,074
1,465
777
688
84
1
-- from ch05 of Real World Haskell -- PrettyJSON.hs module PrettyJSON ( renderJSONValue ) where import Data.Bits (shiftR, (.&.)) import Data.Char (ord) import Numeric (showHex) import Prettify (Doc, char, double, fsep, hcat, punctuate, text, (<>)) import SimpleJSON (JSONValue (..)) renderJSONValue :: JSONValue -> Doc renderJSONValue (JSONBool True) = text "true" renderJSONValue (JSONBool False) = text "false" renderJSONValue JSONNull = text "null" renderJSONValue (JSONNumber num) = double num renderJSONValue (JSONString str) = string str renderJSONValue (JSONArray arr) = series '[' ']' renderJSONValue arr renderJSONValue (JSONObject alist) = series '{' '}' field alist where field (name,val) = string name <> text ": " <> renderJSONValue val string :: String -> Doc string = enclose '"' '"' . hcat . map oneChar -- helpers for pretty printing a string enclose :: Char -> Char -> Doc -> Doc enclose left right x = char left <> x <> char right -- lookup seems to be O(n)...gotta be a better way...maybe later? oneChar :: Char -> Doc oneChar c = case lookup c simpleEscapes of Just r -> text r Nothing | mustEscape c -> hexEscape c | otherwise -> char c -- is this really good Haskell? Exposing the underlying -- representation of char... -- also, `c == '\x7f'`: Error:(31, 43) <qcon> or <qvar> expected, got '\'? where mustEscape c = c < ' ' || c == '\x7f' || c > '\xff' hexEscape :: Char -> Doc hexEscape c | d < 0x10000 = smallHex d | otherwise = astral (d - 0x10000) where d = ord c -- helpers for proper escaping -- an association list /'alist'/ simpleEscapes :: [(Char, String)] simpleEscapes = zipWith ch "\b\n\f\r\t\\\"/" "bnfrt\\\"/" where ch a b = (a, ['\\',b]) smallHex :: Int -> Doc smallHex x = text "\\u" <> text (replicate (4 - length h) '0') <> text h where h = showHex x "" -- wrinkle: the above only works for unicode up to 0xffff -- time for some bit manipulation astral :: Int -> Doc astral n = smallHex (a + 0xd800) <> smallHex (b + 0xdc00) where a = (n `shiftR` 10) .&. 0x3ff b = n .&. 0x3ff series :: Char -> Char -> (a -> Doc) -> [a] -> Doc series open close item = enclose open close . fsep . punctuate (char ',') . map item
0culus/SimpleJSON
src/PrettyJSON.hs
mit
2,545
0
12
777
749
390
359
49
2
module ParseProblem where {- Paradox/Equinox -- Copyright (c) 2003-2007, Koen Claessen, Niklas Sorensson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -} import Data.Char ( isSpace , isAlphaNum , isUpper ) import Data.List ( intersperse , nub ) import System.Environment (getEnv) import System.Exit ( exitWith , ExitCode(..) ) import System.IO ( hFlush , stdout ) import System.IO.Error as IO ( try ) import Data.Set( Set ) import qualified Data.Set as S import Form import Name import Output import Parsek as P ------------------------------------------------------------------------- -- reading readProblemWithRoots :: [FilePath] -> FilePath -> IO Problem readProblemWithRoots roots name = do putStr ("Reading '" ++ name ++ "' ... ") hFlush stdout mtptp <- IO.try (getEnv "TPTP") mes <- findFile [ rt ++ nm | rt <- roots ++ [ case reverse tptp of '/':_ -> tptp _ -> tptp ++ "/" | Right tptp <- [mtptp] ] , nm <- nub [ name, name_p ] ++ [ "Problems/" ++ name_p , "Problems/" ++ take 3 name ++ "/" ++ name_p ] ] case mes of Nothing -> do putStrLn "COULD NOT OPEN" putFailure "INPUT FILE ERROR" Just s -> case parseP s of Left err -> do putStrLn "PARSE ERROR:" sequence [ putWarning s | s <- err ] exitWith (ExitFailure 1) Right (includes,clauses) -> do putStrLn "OK" hFlush stdout sets <- sequence [ readProblemWithRoots roots incl | incl <- includes ] return (concat sets ++ clauses) where name_p | '.' `elem` name = name | otherwise = name ++ ".p" findFile [] = do return Nothing findFile (name:names) = do ees <- IO.try (readFile name) case ees of Left _ -> findFile names Right s -> return (Just s) readProblem :: FilePath -> IO [Input Form] readProblem name = readProblemWithRoots [""] name ------------------------------------------------------------------------- -- parsing type P = Parser Char -- white space white :: P () white = do munch isSpace option () $ do char '%' <?> "" many (satisfy (/= '\n')) char '\n' white <|> do char '/' <?> "" char '*' s <- P.look let body ('*':'/':s) = do anyChar anyChar return () body (_:s) = do anyChar body s body [] = do return () body s white token :: String -> P String token s = do white string s <?> show s avname :: String -> P String avname s = do white string s <?> show s pname :: (Char -> Bool) -> P String pname p = do white stdName where stdName = do c <- satisfy (\c -> p c && isIdfChar c) s <- munch isIdfChar return (c:s) <|> do if not (p '\'') then fail "name" else return () string "\'" s <- munch (/= '\'') string "\'" return ("\'" ++ s ++ "\'") fname :: P Name fname = do s <- pname (not . isUpper) if s == "equal" then fail "equal" else return () let n = name s n `seq` return n <?> "lower-case name" vname :: P String vname = pname isUpper <?> "variable name" isVarName :: Name -> Bool isVarName n = not (null s) && isUpper (head s) where s = show n isIdfChar :: Char -> Bool isIdfChar c = isValid c isValid :: Char -> Bool isValid n = isAlphaNum n || n == '_' parens :: P a -> P a parens = between (token "(") (token ")") bracks :: P a -> P a bracks = between (token "[") (token "]") -- terms type Bnd = Maybe (Set String) term :: Bnd -> P Term term bnd = do s <- fname xs <- args bnd return (Fun (s ::: ([ top | x <- xs ] :-> top)) xs) <|> do s <- case bnd of Just vs -> do choice [ avname s <?> "bound variable" | s <- S.toList vs ] Nothing -> do vname return (Var (name s ::: V top)) <|> do parens (term bnd) <?> "term" args :: Bnd -> P [Term] args bnd = do return [] <|> do parens (term bnd `sepBy` token ",") <?> "arguments" -- atoms atom :: Bnd -> P Form atom bnd = do token "$false" return false <|> do token "$true" return true <|> do s <- fname xs <- args bnd return (Atom (prd (s ::: ([ top | x <- xs ] :-> bool)) xs)) <|> do t1 <- term bnd op <- token "=" <|> token "!=" t2 <- term bnd let a = Atom (t1 :=: t2) return (if op == "=" then a else nt a) <|> do avname "equal" token "(" t1 <- term bnd token "," t2 <- term bnd token ")" return (Atom (t1 :=: t2)) <?> "atom" -- forms form :: Bnd -> P Form form bnd = do foper bnd ops <?> "formula" where ops = [ ("<=>", Equiv) , ("<~>", \x y -> nt (x `Equiv` y)) , ("=>", \x y -> nt x \/ y) , ("<=", \x y -> x \/ nt y) , ("|", (\/)) , ("~|", \x y -> nt (x \/ y)) , ("&", (/\)) , ("~&", \x y -> nt (x /\ y)) ] foper :: Bnd -> [(String, Form->Form->Form)] -> P Form foper bnd [] = funit bnd foper bnd ops@((sym,fun):ops') = do a <- foper bnd ops' option a $ do token sym b <- foper bnd ops return (a `fun` b) funit :: Bnd -> P Form funit bnd = do parens (form bnd) <|> do atom bnd <|> do token "~" f <- funit bnd return (nt f) <|> do q <- (do token "!"; return forAll) <|> (do token "?"; return exists) vs <- bracks (vname `sepBy` token ",") token ":" f <- funit ((`S.union` S.fromList vs) `fmap` bnd) return (foldr q f (map (\v -> name v ::: V top) vs)) <?> "formula unit" lit :: P Form lit = do atom Nothing <|> do token "~" a <- atom Nothing return (nt a) <?> "literal" claus :: P Form claus = do ls <- lit `sepBy` token "|" let c = orl ls return (foldr forAll c (S.toList (free c))) <|> do parens claus where orl [a] = a orl as = Or (S.fromList as) -- formulas and clauses formula :: P (Input Form) formula = do lang <- token "fof" <|> token "cnf" x <- parens $ do white s <- pname (const True) <|> (token (show "") >> return "") token "," white t <- ptype token "," f <- if lang == "fof" then form (Just S.empty) else claus return (Input t s f) token "." return x where ptype = choice [ do token s return t | (s,t) <- typeList ] typeList = [ ("axiom", Fact) -- .. , ("theorem", Fact) -- I see no reason to distinguish these , ("lemma", Fact) -- .. , ("hypothesis", Fact) -- .. , ("conjecture", Conjecture) , ("negated_conjecture", NegatedConjecture) ] -- includes include :: P FilePath include = do token "include" s <- parens (white >> filePath) token "." return s filePath :: P FilePath filePath = do q <- char '\'' <|> char '\"' s <- munch (\c -> c /= q && c /= '\n') char q return s <?> "file path" prob :: P ([FilePath],[Input Form]) prob = do incls <- many include ins <- many formula white return (incls,ins) parseP :: String -> Either [String] ([FilePath],[Input Form]) parseP s = case parse prob completeResultsWithLine s of Left (n, exp, unexp) -> Left $ [ "On line: " ++ show n ] ++ [ "Unexpected: " ++ commas "and" unexp | not (null unexp) ] ++ [ "Expected: " ++ commas "or" exp | not (null exp) ] Right [x] -> Right x Right _ -> Left $ [ "Internal error: Ambiguous parse!" , "Please report this as a bug in the parser." ] where commas op = concat . intersperse (", " ++ op ++ " ") ------------------------------------------------------------------------- -- the end.
msakai/folkung
Haskell/ParseProblem.hs
mit
9,308
0
22
3,205
3,243
1,581
1,662
290
6
module CTG1371Bench (benchmarks) where import CTG1371 import Criterion benchmarks :: [Benchmark] benchmarks = [ bench "main" (nfIO main) ]
danplubell/CTG1371
benchmark/CTG1371Bench.hs
mit
150
0
8
30
42
25
17
6
1
{-| Cluster rebalancer. -} {- Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -} module Ganeti.HTools.Program.Hbal ( main , options , arguments , iterateDepth ) where import Control.Exception (bracket) import Control.Monad import Data.List import Data.Maybe (isJust, isNothing, fromJust) import Data.IORef import System.Exit import System.IO import System.Posix.Process import System.Posix.Signals import Text.Printf (printf) import qualified Ganeti.HTools.Container as Container import qualified Ganeti.HTools.Cluster as Cluster import qualified Ganeti.HTools.Group as Group import qualified Ganeti.HTools.Node as Node import qualified Ganeti.HTools.Instance as Instance import Ganeti.BasicTypes import Ganeti.Common import Ganeti.HTools.CLI import Ganeti.HTools.ExtLoader import Ganeti.HTools.Types import Ganeti.HTools.Loader import Ganeti.OpCodes (wrapOpCode, setOpComment, setOpPriority, OpCode, MetaOpCode) import Ganeti.Jobs as Jobs import Ganeti.Types import Ganeti.Utils import qualified Ganeti.Luxi as L import Ganeti.Version (version) -- | Options list and functions. options :: IO [OptType] options = do luxi <- oLuxiSocket return [ oPrintNodes , oPrintInsts , oPrintCommands , oDataFile , oEvacMode , oRapiMaster , luxi , oIAllocSrc , oExecJobs , oGroup , oMaxSolLength , oVerbose , oQuiet , oOfflineNode , oMinScore , oMaxCpu , oMinDisk , oMinGain , oMinGainLim , oDiskMoves , oSelInst , oInstMoves , oDynuFile , oExTags , oExInst , oSaveCluster , oPriority ] -- | The list of arguments supported by the program. arguments :: [ArgCompletion] arguments = [] -- | A simple type alias for clearer signature. type Annotator = OpCode -> MetaOpCode -- | Wraps an 'OpCode' in a 'MetaOpCode' while also adding a comment -- about what generated the opcode. annotateOpCode :: Annotator annotateOpCode = setOpComment ("rebalancing via hbal " ++ version) . wrapOpCode {- | Start computing the solution at the given depth and recurse until we find a valid solution or we exceed the maximum depth. -} iterateDepth :: Bool -- ^ Whether to print moves -> Cluster.Table -- ^ The starting table -> Int -- ^ Remaining length -> Bool -- ^ Allow disk moves -> Bool -- ^ Allow instance moves -> Int -- ^ Max node name len -> Int -- ^ Max instance name len -> [MoveJob] -- ^ Current command list -> Score -- ^ Score at which to stop -> Score -- ^ Min gain limit -> Score -- ^ Min score gain -> Bool -- ^ Enable evacuation mode -> IO (Cluster.Table, [MoveJob]) -- ^ The resulting table -- and commands iterateDepth printmove ini_tbl max_rounds disk_moves inst_moves nmlen imlen cmd_strs min_score mg_limit min_gain evac_mode = let Cluster.Table ini_nl ini_il _ _ = ini_tbl allowed_next = Cluster.doNextBalance ini_tbl max_rounds min_score m_fin_tbl = if allowed_next then Cluster.tryBalance ini_tbl disk_moves inst_moves evac_mode mg_limit min_gain else Nothing in case m_fin_tbl of Just fin_tbl -> do let (Cluster.Table _ _ _ fin_plc) = fin_tbl cur_plc@(idx, _, _, move, _) <- exitIfEmpty "Empty placement list returned for solution?!" fin_plc let fin_plc_len = length fin_plc (sol_line, cmds) = Cluster.printSolutionLine ini_nl ini_il nmlen imlen cur_plc fin_plc_len afn = Cluster.involvedNodes ini_il cur_plc upd_cmd_strs = (afn, idx, move, cmds):cmd_strs when printmove $ do putStrLn sol_line hFlush stdout iterateDepth printmove fin_tbl max_rounds disk_moves inst_moves nmlen imlen upd_cmd_strs min_score mg_limit min_gain evac_mode Nothing -> return (ini_tbl, cmd_strs) -- | Displays the cluster stats. printStats :: Node.List -> Node.List -> IO () printStats ini_nl fin_nl = do let ini_cs = Cluster.totalResources ini_nl fin_cs = Cluster.totalResources fin_nl printf "Original: mem=%d disk=%d\n" (Cluster.csFmem ini_cs) (Cluster.csFdsk ini_cs) :: IO () printf "Final: mem=%d disk=%d\n" (Cluster.csFmem fin_cs) (Cluster.csFdsk fin_cs) -- | Saves the rebalance commands to a text file. saveBalanceCommands :: Options -> String -> IO () saveBalanceCommands opts cmd_data = do let out_path = fromJust $ optShowCmds opts putStrLn "" if out_path == "-" then printf "Commands to run to reach the above solution:\n%s" (unlines . map (" " ++) . filter (/= " check") . lines $ cmd_data) else do writeFile out_path (shTemplate ++ cmd_data) printf "The commands have been written to file '%s'\n" out_path -- | Wrapper over execJobSet checking for early termination via an IORef. execCancelWrapper :: Annotator -> String -> Node.List -> Instance.List -> IORef Int -> [JobSet] -> IO (Result ()) execCancelWrapper _ _ _ _ _ [] = return $ Ok () execCancelWrapper anno master nl il cref alljss = do cancel <- readIORef cref if cancel > 0 then return . Bad $ "Exiting early due to user request, " ++ show (length alljss) ++ " jobset(s) remaining." else execJobSet anno master nl il cref alljss -- | Execute an entire jobset. execJobSet :: Annotator -> String -> Node.List -> Instance.List -> IORef Int -> [JobSet] -> IO (Result ()) execJobSet _ _ _ _ _ [] = return $ Ok () execJobSet anno master nl il cref (js:jss) = do -- map from jobset (htools list of positions) to [[opcodes]] let jobs = map (\(_, idx, move, _) -> map anno $ Cluster.iMoveToJob nl il idx move) js descr = map (\(_, idx, _, _) -> Container.nameOf il idx) js logfn = putStrLn . ("Got job IDs" ++) . commaJoin . map (show . fromJobId) putStrLn $ "Executing jobset for instances " ++ commaJoin descr jrs <- bracket (L.getClient master) L.closeClient $ Jobs.execJobsWait jobs logfn case jrs of Bad x -> return $ Bad x Ok x -> if null failures then execCancelWrapper anno master nl il cref jss else return . Bad . unlines $ [ "Not all jobs completed successfully: " ++ show failures, "Aborting."] where failures = filter ((/= JOB_STATUS_SUCCESS) . snd) x -- | Executes the jobs, if possible and desired. maybeExecJobs :: Options -> [a] -> Node.List -> Instance.List -> [JobSet] -> IO (Result ()) maybeExecJobs opts ord_plc fin_nl il cmd_jobs = if optExecJobs opts && not (null ord_plc) then (case optLuxi opts of Nothing -> return $ Bad "Execution of commands possible only on LUXI" Just master -> let annotator = maybe id setOpPriority (optPriority opts) . annotateOpCode in execWithCancel annotator master fin_nl il cmd_jobs) else return $ Ok () -- | Signal handler for graceful termination. handleSigInt :: IORef Int -> IO () handleSigInt cref = do writeIORef cref 1 putStrLn ("Cancel request registered, will exit at" ++ " the end of the current job set...") -- | Signal handler for immediate termination. handleSigTerm :: IORef Int -> IO () handleSigTerm cref = do -- update the cref to 2, just for consistency writeIORef cref 2 putStrLn "Double cancel request, exiting now..." exitImmediately $ ExitFailure 2 -- | Prepares to run a set of jobsets with handling of signals and early -- termination. execWithCancel :: Annotator -> String -> Node.List -> Instance.List -> [JobSet] -> IO (Result ()) execWithCancel anno master fin_nl il cmd_jobs = do cref <- newIORef 0 mapM_ (\(hnd, sig) -> installHandler sig (Catch (hnd cref)) Nothing) [(handleSigTerm, softwareTermination), (handleSigInt, keyboardSignal)] execCancelWrapper anno master fin_nl il cref cmd_jobs -- | Select the target node group. selectGroup :: Options -> Group.List -> Node.List -> Instance.List -> IO (String, (Node.List, Instance.List)) selectGroup opts gl nlf ilf = do let ngroups = Cluster.splitCluster nlf ilf when (length ngroups > 1 && isNothing (optGroup opts)) $ do hPutStrLn stderr "Found multiple node groups:" mapM_ (hPutStrLn stderr . (" " ++) . Group.name . flip Container.find gl . fst) ngroups exitErr "Aborting." case optGroup opts of Nothing -> do (gidx, cdata) <- exitIfEmpty "No groups found by splitCluster?!" ngroups let grp = Container.find gidx gl return (Group.name grp, cdata) Just g -> case Container.findByName gl g of Nothing -> do hPutStrLn stderr $ "Node group " ++ g ++ " not found. Node group list is:" mapM_ (hPutStrLn stderr . (" " ++) . Group.name ) (Container.elems gl) exitErr "Aborting." Just grp -> case lookup (Group.idx grp) ngroups of Nothing -> -- This will only happen if there are no nodes assigned -- to this group return (Group.name grp, (Container.empty, Container.empty)) Just cdata -> return (Group.name grp, cdata) -- | Do a few checks on the cluster data. checkCluster :: Int -> Node.List -> Instance.List -> IO () checkCluster verbose nl il = do -- nothing to do on an empty cluster when (Container.null il) $ do printf "Cluster is empty, exiting.\n"::IO () exitSuccess -- hbal doesn't currently handle split clusters let split_insts = Cluster.findSplitInstances nl il unless (null split_insts || verbose <= 1) $ do hPutStrLn stderr "Found instances belonging to multiple node groups:" mapM_ (\i -> hPutStrLn stderr $ " " ++ Instance.name i) split_insts hPutStrLn stderr "These instances will not be moved." printf "Loaded %d nodes, %d instances\n" (Container.size nl) (Container.size il)::IO () let csf = commonSuffix nl il when (not (null csf) && verbose > 1) $ printf "Note: Stripping common suffix of '%s' from names\n" csf -- | Do a few checks on the selected group data. checkGroup :: Int -> String -> Node.List -> Instance.List -> IO () checkGroup verbose gname nl il = do printf "Group size %d nodes, %d instances\n" (Container.size nl) (Container.size il)::IO () putStrLn $ "Selected node group: " ++ gname let (bad_nodes, bad_instances) = Cluster.computeBadItems nl il unless (verbose == 0) $ printf "Initial check done: %d bad nodes, %d bad instances.\n" (length bad_nodes) (length bad_instances) unless (null bad_nodes) $ putStrLn "Cluster is not N+1 happy, continuing but no guarantee \ \that the cluster will end N+1 happy." -- | Check that we actually need to rebalance. checkNeedRebalance :: Options -> Score -> IO () checkNeedRebalance opts ini_cv = do let min_cv = optMinScore opts when (ini_cv < min_cv) $ do printf "Cluster is already well balanced (initial score %.6g,\n\ \minimum score %.6g).\nNothing to do, exiting\n" ini_cv min_cv:: IO () exitSuccess -- | Main function. main :: Options -> [String] -> IO () main opts args = do unless (null args) $ exitErr "This program doesn't take any arguments." let verbose = optVerbose opts shownodes = optShowNodes opts showinsts = optShowInsts opts ini_cdata@(ClusterData gl fixed_nl ilf ctags ipol) <- loadExternalData opts when (verbose > 1) $ do putStrLn $ "Loaded cluster tags: " ++ intercalate "," ctags putStrLn $ "Loaded cluster ipolicy: " ++ show ipol nlf <- setNodeStatus opts fixed_nl checkCluster verbose nlf ilf maybeSaveData (optSaveCluster opts) "original" "before balancing" ini_cdata (gname, (nl, il)) <- selectGroup opts gl nlf ilf checkGroup verbose gname nl il maybePrintInsts showinsts "Initial" (Cluster.printInsts nl il) maybePrintNodes shownodes "Initial cluster" (Cluster.printNodes nl) let ini_cv = Cluster.compCV nl ini_tbl = Cluster.Table nl il ini_cv [] min_cv = optMinScore opts checkNeedRebalance opts ini_cv if verbose > 2 then printf "Initial coefficients: overall %.8f\n%s" ini_cv (Cluster.printStats " " nl)::IO () else printf "Initial score: %.8f\n" ini_cv putStrLn "Trying to minimize the CV..." let imlen = maximum . map (length . Instance.alias) $ Container.elems il nmlen = maximum . map (length . Node.alias) $ Container.elems nl (fin_tbl, cmd_strs) <- iterateDepth True ini_tbl (optMaxLength opts) (optDiskMoves opts) (optInstMoves opts) nmlen imlen [] min_cv (optMinGainLim opts) (optMinGain opts) (optEvacMode opts) let (Cluster.Table fin_nl fin_il fin_cv fin_plc) = fin_tbl ord_plc = reverse fin_plc sol_msg = case () of _ | null fin_plc -> printf "No solution found\n" | verbose > 2 -> printf "Final coefficients: overall %.8f\n%s" fin_cv (Cluster.printStats " " fin_nl) | otherwise -> printf "Cluster score improved from %.8f to %.8f\n" ini_cv fin_cv ::String putStr sol_msg unless (verbose == 0) $ printf "Solution length=%d\n" (length ord_plc) let cmd_jobs = Cluster.splitJobs cmd_strs when (isJust $ optShowCmds opts) . saveBalanceCommands opts $ Cluster.formatCmds cmd_jobs maybeSaveData (optSaveCluster opts) "balanced" "after balancing" ini_cdata { cdNodes = fin_nl, cdInstances = fin_il } maybePrintInsts showinsts "Final" (Cluster.printInsts fin_nl fin_il) maybePrintNodes shownodes "Final cluster" (Cluster.printNodes fin_nl) when (verbose > 3) $ printStats nl fin_nl exitIfBad "hbal" =<< maybeExecJobs opts ord_plc fin_nl il cmd_jobs
damoxc/ganeti
src/Ganeti/HTools/Program/Hbal.hs
gpl-2.0
15,311
0
19
4,310
3,684
1,854
1,830
306
4
{-# LANGUAGE RelaxedPolyRec, FlexibleContexts, DeriveDataTypeable, TemplateHaskell, CPP, PatternGuards #-} -- Copyright (c) 2004-5 Don Stewart - http://www.cse.unsw.edu.au/~dons -- Copyright (c) 2008 Nicolas Pouillard -- | Vim keymap for Yi. Emulates vim :set nocompatible module Yi.Keymap.Vim (keymapSet, viWrite, defKeymap, leaveInsRep, leave, ModeMap(..), VimOpts(..), VimExCmd(..), nilCmd, exCmd, exCmds, exSimpleComplete, exInfixComplete', exInfixComplete, mkExHistComplete, exHistComplete', exHistComplete, exHistInfixComplete', exHistInfixComplete, savingInsertB, savingInsertCharB, savingInsertStringB, savingDeleteB, savingDeleteCharB, savingDeleteWordB, savingCommandY, savingCommandE, mkKeymap, beginIns, beginInsE, beginInsB, listTagStack, pushTagStack, popTagStack, peekTagStack ) where import Prelude (maybe, length, filter, map, drop, break, uncurry, reads) import Data.Char import Data.List (nub, take, words, dropWhile, takeWhile, intersperse, reverse) import Data.Maybe (fromMaybe, isJust) import Data.Either (either) import Data.Prototype import Data.Accessor.Template import Numeric (showHex, showOct) import Shim.Utils (splitBy, uncurry3) import System.IO (readFile) #ifdef mingw32_HOST_OS import System.PosixCompat.Files (fileExist) #else import System.Posix (fileExist) #endif import System.FilePath (FilePath) import System.Directory (getCurrentDirectory, setCurrentDirectory) import Control.Monad.State hiding (mapM_, mapM, sequence) import Control.Arrow hiding (left, right) import {-# source #-} Yi.Boot import Yi.Command (cabalRun) import Yi.Core import Yi.Dired import Yi.Eval (execEditorAction, getAllNamesInScope) import Yi.File import Yi.History import Yi.Misc (matchingFileNames,adjBlock,adjIndent) import Yi.String (dropSpace,lines') import Yi.MiniBuffer import Yi.Regex (seInput, regexEscapeString) import Yi.Search import Yi.Style import Yi.TextCompletion import Yi.Completion (containsMatch', mkIsPrefixOf) import Yi.Tag import Yi.Window (bufkey) import Yi.Hoogle (hoogle, hoogleSearch) import qualified Codec.Binary.UTF8.String as UTF8 -- -- What's missing? -- gq,gw,fillText should leave the last \n as is. -- fancier :s// ==> missing /c, ... -- '.': started look at "TODO repeat" for missing things -- @: -- 8g8 -- provide and improve "g@{motion}" -- integrate unit transposing (transposeB) (gSaw: swap a word) -- free keys g[bBcClLnNOSWxXyYzZ(){}[/=|\:">.] z[BIJKpPqQSTVyYZ~`!@#$%&*()[]{}/?_:;,<>"'|\] -- could be reused g[dDhHiQ] -- free insert keys C-!, C-#, C-$, C-%, C-&, C-*, C-(, C-), C-/, C-+, C-=, C--, C-:, C-;, C-| -- could be reused insert keys C-_, C-^ -- go -- gm, g$, g<End>, gp, gP, ]p, ]P, [P, [p, gr, gR, gs, gv, gV -- &, :&&, g& -- goto file [{visual}][count]g{f,F} -- invent a variant of o and O that add spaces to be at the same col -- invent a "indent as previous" in insert mode -- :sh[ell] -- :!! -- movement parameterised \> \< -- motion operators [motion.txt]: ! -- C-v: visual block mode: almost works, block yanking/pasting is still to do. -- Support for marks -- C-o and C-i: jump list -- C-a or C-@: insert prev text -- C-u: delete all entered chars of the current line -- C-f: reindent the line -- C-o: execute one command, return to Insert mode (see also C-\ C-O) -- C-k <C-K><S-Space>: insert a char using it's specification -- C-r <reg>: insert the content of a register -- --------------------------------------------------------------------- type VimMode = Keymap data ViMove = Move TextUnit Direction | MaybeMove TextUnit Direction | GenMove TextUnit (Direction, BoundarySide) Direction | CharMove Direction | PercentageFile Int | ArbMove (BufferM ()) | Replicate ViMove Int | SeqMove ViMove ViMove | NoMove deriving (Typeable) data ViCmd = ArbCmd !(Int -> YiM ()) !Int | NoOp deriving (Typeable) instance Initializable ViCmd where initial = NoOp data ViInsertion = ViIns { viActFirst :: Maybe (EditorM ()) -- ^ The action performed first , viActBefore :: BufferM () -- ^ The action performed before insertion , viBeginPos :: Point -- ^ The position _before_ insertion , viEndPos :: Point -- ^ The position _after_ insertion , viActAfter :: BufferM () -- ^ The action performed after insertion } deriving (Typeable) $(nameDeriveAccessors ''ViInsertion $ Just.(++ "A")) data VimOpts = VimOpts { tildeop :: Bool , completeCaseSensitive :: Bool , enableTagStack :: Bool } data VimExCmd = VimExCmd { cmdNames :: [String] , cmdFn :: String -> YiM () , completeFn :: Maybe (String -> YiM ()) } type VimExCmdMap = [VimExCmd] -- very simple implementation yet newtype VimTagStack = VimTagStack { tagsStack :: [(FilePath, Point)] } deriving (Typeable) instance Initializable VimTagStack where initial = VimTagStack [] getTagStack :: EditorM VimTagStack getTagStack = getDynamic setTagStack :: VimTagStack -> EditorM () setTagStack = setDynamic listTagStack :: EditorM [(FilePath, Point)] listTagStack = return . tagsStack =<< getTagStack pushTagStack :: FilePath -> Point -> EditorM () pushTagStack fp p = do VimTagStack ts <- getTagStack setTagStack $ VimTagStack $ (fp, p):ts peekTagStack :: EditorM (Maybe (FilePath, Point)) peekTagStack = do VimTagStack ts <- getTagStack case ts of [] -> return Nothing (p:_) -> return $ Just p -- pop 'count' element from the tag stack. popTagStack :: Int -> EditorM (Maybe (FilePath, Point)) popTagStack count = do VimTagStack ts <- getTagStack case drop (count - 1) ts of [] -> return Nothing (p:ps) -> do setTagStack $ VimTagStack ps return $ Just p $(nameDeriveAccessors ''VimOpts $ Just.(++ "A")) -- | The Vim keymap is divided into several parts, roughly corresponding -- to the different modes of vi. Each mode is in turn broken up into -- separate VimProcs for each phase of key input in that mode. data ModeMap = ModeMap { -- | Top level mode v_top_level :: VimMode -- | vim insert mode , v_ins_char :: VimMode , v_opts :: VimOpts , v_ex_cmds :: VimExCmdMap } $(nameDeriveAccessors ''ModeMap $ Just.(++ "A")) lastViCommandA :: Accessor Editor ViCmd lastViCommandA = dynA currentViInsertionA :: Accessor FBuffer (Maybe ViInsertion) currentViInsertionA = bufferDynamicValueA applyViCmd :: Maybe Int -> ViCmd -> YiM () applyViCmd _ NoOp = return () applyViCmd mi (ArbCmd f i') = f $ fromMaybe i' mi regionOfViMove :: ViMove -> RegionStyle -> BufferM Region regionOfViMove move regionStyle = join $ mkRegionOfStyleB <$> pointB <*> savingPointB (viMove move >> pointB) <*> pure regionStyle applyOperator :: (RegionStyle -> Region -> EditorM ()) -> Int -> (RegionStyle, ViMove) -> EditorM () applyOperator onRegion i (regionStyle, move) = savingCommandE f i where f j = onRegion regionStyle =<< withBuffer0' (regionOfViMove (Replicate move j) regionStyle) emptyViIns :: Point -> ViInsertion emptyViIns p = ViIns Nothing (return ()) p p (return ()) getViIns :: BufferM ViInsertion getViIns = maybe def return =<< getA currentViInsertionA where def = do ins <- emptyViIns <$> pointB putA currentViInsertionA $ Just ins return ins viInsText :: ViInsertion -> BufferM String viInsText ins = readRegionB $ mkRegion (viBeginPos ins) (viEndPos ins) -- | The given buffer action should be an insertion action. savingInsertB :: BufferM () -> BufferM () savingInsertB action = do ins0 <- getViIns oldP <- pointB action newP <- pointB let endP = viEndPos ins0 beginP = viBeginPos ins0 ins1 | endP == oldP = ins0 { viEndPos = newP } | oldP >= beginP && oldP < endP = ins0 { viEndPos = endP +~ (newP ~- oldP) } | otherwise = emptyViIns newP putA currentViInsertionA $ Just ins1 savingInsertCharB :: Char -> BufferM () savingInsertCharB = savingInsertB . insertB savingInsertStringB :: String -> BufferM () savingInsertStringB = savingInsertB . insertN -- | The given action should be a deletion action. -- The only well tested buffer actions are deleting one character, -- or one word, forward or backward. savingDeleteB :: BufferM () -> BufferM () savingDeleteB action = do ins0 <- getViIns oldP <- pointB s1 <- sizeB action s2 <- sizeB newP <- pointB let diff = s2 ~- s1 endP = viEndPos ins0 beginP = viBeginPos ins0 shrinkEndPos = viEndPosA ^: (-~ diff) ins1 = if oldP >= beginP && oldP <= endP then if newP > endP then viActAfterA ^: (>> action) $ ins0 { viEndPos = newP } else if newP < beginP then viActBeforeA ^: (>> action) $ shrinkEndPos $ ins0 { viBeginPos = newP } else shrinkEndPos ins0 else if newP > oldP then viActAfterA ^: (>> action) $ emptyViIns newP else viActBeforeA ^: (>> action) $ emptyViIns newP putA currentViInsertionA $ Just ins1 savingDeleteCharB :: Direction -> BufferM () savingDeleteCharB dir = savingDeleteB (adjBlock (-1) >> deleteB Character dir) savingDeleteWordB :: Direction -> BufferM () savingDeleteWordB dir = savingDeleteB $ deleteRegionB =<< regionOfPartNonEmptyB unitViWordOnLine dir viCommandOfViInsertion :: ViInsertion -> BufferM ViCmd viCommandOfViInsertion ins@(ViIns mayFirstAct before _ _ after) = do text <- viInsText ins return . flip ArbCmd 1 . fmap withEditor $ case mayFirstAct of Just firstAct -> \n-> replicateM_ n firstAct >> withBuffer0' (before >> insertN text >> after) Nothing -> flip replicateM_ $ withBuffer0' $ before >> insertN text >> after commitLastInsertionE :: EditorM () commitLastInsertionE = do mins <- withBuffer0 $ getA currentViInsertionA withBuffer0 $ putA currentViInsertionA Nothing putA lastViCommandA =<< maybe (return NoOp) (withBuffer0 . viCommandOfViInsertion) mins savingCommandY :: (Int -> YiM ()) -> Int -> YiM () savingCommandY f i = putA lastViCommandA (ArbCmd f i) >> f i savingCommandE :: (Int -> EditorM ()) -> Int -> EditorM () savingCommandE f i = putA lastViCommandA (ArbCmd (withEditor . f) i) >> f i savingCommandE'Y :: (Int -> EditorM ()) -> Int -> YiM () savingCommandE'Y f = withEditor' . savingCommandE f savingCommandEY :: (Int -> EditorM ()) -> Int -> YiM () savingCommandEY f = withEditor . savingCommandE f savingCommandB :: (Int -> BufferM ()) -> Int -> EditorM () savingCommandB f = savingCommandE (withBuffer0 . f) savingCommandB' :: (Int -> BufferM ()) -> Int -> EditorM () savingCommandB' f = savingCommandE (withBuffer0' . f) savingCommandB'Y :: (Int -> BufferM ()) -> Int -> YiM () savingCommandB'Y f = withEditor . savingCommandB' f viMove :: ViMove -> BufferM () viMove NoMove = return () viMove (GenMove unit boundary dir) = genMoveB unit boundary dir viMove (MaybeMove unit dir) = maybeMoveB unit dir viMove (Move unit dir) = moveB unit dir viMove (CharMove Forward) = moveXorEol 1 viMove (CharMove Backward) = moveXorSol 1 viMove (PercentageFile i) = movePercentageFile i viMove (ArbMove move) = move viMove (SeqMove move1 move2) = viMove move1 >> viMove move2 viMove (Replicate move i) = viReplicateMove move i viReplicateMove :: ViMove -> Int -> BufferM () viReplicateMove (Move VLine Forward) i = lineMoveRel i >> return () viReplicateMove (Move VLine Backward) i = lineMoveRel (-i) >> return () viReplicateMove (CharMove Forward) i = moveXorEol i viReplicateMove (CharMove Backward) i = moveXorSol i viReplicateMove (Replicate move j) i = viReplicateMove move (i * j) viReplicateMove move i = replicateM_ i $ viMove move movePercentageFile :: Int -> BufferM () movePercentageFile i = do let f :: Double f = case fromIntegral i / 100.0 of x | x > 1.0 -> 1.0 | x < 0.0 -> 0.0 -- Impossible? | otherwise -> x Point max_p <- sizeB setMarkHere '\'' moveTo $ Point $ floor (fromIntegral max_p * f) firstNonSpaceB mkKeymap :: Proto ModeMap -> KeymapSet mkKeymap p = KeymapSet { -- if the keymap "crashed" we restart here -- so we clear the status line to indicate whatever mode we were in -- has been left startTopKeymap = do write clrStatus write $ setInserting False write $ setVisibleSelection False , startInsertKeymap = do write clrStatus write $ setInserting True write $ setVisibleSelection False write $ setStatus (["-- INSERT --"], defaultStyle) , topKeymap = v_top_level v , insertKeymap = v_ins_char v } where v = extractValue p keymapSet :: KeymapSet keymapSet = mkKeymap defKeymap nilCmd :: VimExCmd nilCmd = VimExCmd { cmdNames = [] , cmdFn = (return . const ()) , completeFn = Nothing} exCmd :: String -> (String -> YiM ()) -> Maybe (String -> YiM ()) -> VimExCmd exCmd names fn cfn = VimExCmd { cmdNames = splitBy isSpace names , cmdFn = fn , completeFn = cfn } exCmds :: [(String, String->YiM (), Maybe (String -> YiM ()))] -> VimExCmdMap exCmds = map $ uncurry3 exCmd ignoreExCmd :: String -> String ignoreExCmd = dropWhile (isSpace) . dropWhile (not . isSpace) exSimpleComplete :: (String -> YiM [String]) -> String -> YiM () exSimpleComplete compl s' = simpleComplete compl s >>= withBuffer . insertN . drop (length s) where s = dropWhile isSpace s' exInfixComplete' :: Bool -> (String -> YiM [String]) -> String -> YiM () exInfixComplete' caseSensitive compl s' = do cs <- infixComplete' caseSensitive compl s when (not $ null cs) (withBuffer $ do leftN (length s) deleteToEol insertN cs) where s = dropWhile isSpace s' exInfixComplete :: (String -> YiM [String]) -> String -> YiM () exInfixComplete = exInfixComplete' True mkExHistComplete :: (String -> String -> Bool) -> (String -> YiM [String]) -> String -> YiM () mkExHistComplete matchFn compl s = mkWordComplete (return s) compl (withEditor . printMsgs . tail) matchFn >>= (withBuffer . (testDeleteB >> ) . insertN) where testDeleteB = if null s then return () else deleteWordB deleteWordB = deleteUnitB unitSep Backward deleteUnitB unit dir = deleteRegionB =<< regionOfPartNonEmptyB unit dir exHistComplete' :: Bool -> (String -> YiM [String]) -> String -> YiM () exHistComplete' caseSensitive = mkExHistComplete (mkIsPrefixOf caseSensitive) exHistComplete :: (String -> YiM [String]) -> String -> YiM () exHistComplete = exHistComplete' True exHistInfixComplete' :: Bool -> (String -> YiM [String]) -> String -> YiM () exHistInfixComplete' caseSensitive = mkExHistComplete match where match x y = isJust $ containsMatch' caseSensitive x y exHistInfixComplete :: (String -> YiM [String]) -> String -> YiM () exHistInfixComplete = exHistInfixComplete' True defKeymap :: Proto ModeMap defKeymap = Proto template where template self = ModeMap { v_top_level = def_top_level , v_ins_char = def_ins_char , v_opts = def_opts , v_ex_cmds = [] } where def_opts = VimOpts { tildeop = False , completeCaseSensitive = True , enableTagStack = True } -- | Top level consists of simple commands that take a count arg, -- the replace cmd, which consumes one char of input, and commands -- that switch modes. def_top_level = choice [cmd_eval,cmd_move,cmd2other,cmd_op] -- | Replace mode is like insert, except it performs writes, not inserts -- TODO repeat rep_mode :: VimMode rep_mode = write (setStatus (["-- REPLACE --"], defaultStyle)) >> many rep_char >> leaveInsRep >> write (moveXorSol 1) -- | Reset the selection style to a character-wise mode 'Inclusive'. resetSelectStyle :: BufferM () resetSelectStyle = putA regionStyleA Inclusive -- | Visual mode, similar to command mode vis_move :: VimMode vis_move = (moveKeymap >>= write . viMove . snd) <|> do cnt <- count let i = fromMaybe 1 cnt choice ([events evs >>! action i | (evs,action) <- visOrCmdFM ] ++ [events evs >>! action cnt | (evs, action) <- scrollCmdFM ]) vis_mode :: RegionStyle -> VimMode vis_mode selStyle = do write $ do putA rectangleSelectionA $ Block == selStyle setVisibleSelection True pointB >>= setSelectionMarkPointB core_vis_mode selStyle write (clrStatus >> withBuffer0' (setVisibleSelection False >> resetSelectStyle)) core_vis_mode :: RegionStyle -> VimMode core_vis_mode selStyle = do write $ do withBuffer0' $ putA regionStyleA selStyle setStatus ([msg selStyle], defaultStyle) many (vis_move <|> select_any_unit (withBuffer0' . (\r -> resetSelectStyle >> extendSelectRegionB r >> leftB))) visual2other selStyle where msg LineWise = "-- VISUAL LINE --" msg Block = "-- VISUAL BLOCK --" msg _ = "-- VISUAL --" -- | Change visual mode change_vis_mode :: RegionStyle -> RegionStyle -> VimMode change_vis_mode src dst | src == dst = return () | otherwise = core_vis_mode dst -- | A KeymapM to accumulate digits. -- typically what is needed for integer repetition arguments to commands count :: KeymapM (Maybe Int) count = (deprioritize >> pure Nothing) <|> do c <- charOf id '1' '9' cs <- many $ charOf id '0' '9' return $ Just $ read (c:cs) viMoveToNthEol :: Int -> BufferM () viMoveToNthEol n = replicateM_ n $ maybeMoveB Line Forward viMoveToEol :: ViMove viMoveToEol = MaybeMove Line Forward viMoveToSol :: ViMove viMoveToSol = MaybeMove Line Backward -- --------------------------------------------------------------------- -- | KeymapM for movement commands -- -- The may be invoked directly, or sometimes as arguments to other -- /operator/ commands (like d). -- cmd_move :: VimMode cmd_move = moveKeymap >>= write . withBuffer0' . viMove . snd -- the returned RegionStyle is used when the movement is combined with a 'cut' or 'yank'. moveKeymap :: KeymapM (RegionStyle, ViMove) moveKeymap = choice [ char '0' ?>> return (Exclusive, viMoveToSol) , char '%' ?>> return percentMove , do cnt <- count let x = fromMaybe 1 cnt choice ([c ?>> return (Inclusive, a x) | (c,a) <- moveCmdFM_inclusive ] ++ [pString s >> return (Inclusive, a x) | (s,a) <- moveCmdS_inclusive ] ++ [c ?>> return (Exclusive, a x) | (c,a) <- moveCmdFM_exclusive ] ++ [events evs >> return (Exclusive, a x) | (evs,a) <- moveCmdS_exclusive ] ++ [c ?>> return (LineWise, a x) | (c,a) <- moveUpDownCmdFM] ++ [do event c; c' <- textChar; return (r, a c' x) | (c,r,a) <- move2CmdFM] ++ [char 'G' ?>> return (LineWise, ArbMove $ setMarkHere '\'' >> maybe (botB >> firstNonSpaceB) gotoFNS cnt) ,pString "gg" >> return (LineWise, ArbMove $ setMarkHere '\'' >> gotoFNS (fromMaybe 0 cnt)) ,char '\'' ?>> do c <- validMarkIdentifier return (LineWise, ArbMove $ jumpToMark c >> firstNonSpaceB) ,char '`' ?>> do c <- validMarkIdentifier return (Exclusive, ArbMove $ jumpToMark c) -- The count value, in this case, is interpretted as a percentage instead of a repeat -- count. ,char '%' ?>> return (LineWise, PercentageFile x)])] where gotoFNS :: Int -> BufferM () gotoFNS n = gotoLn n >> firstNonSpaceB -- | movement commands (with exclusive cut/yank semantics) moveCmdFM_exclusive :: [(Event, (Int -> ViMove))] moveCmdFM_exclusive = -- left/right [(char 'h', left) ,(ctrlCh 'h', left) ,(spec KBS, left) ,(spec KLeft, left) ,(spec KRight, right) ,(char 'l', right) ,(char ' ', right) -- eol / sol / special column ,(spec KHome, sol) ,(char '^', const $ ArbMove firstNonSpaceB) ,(char '|', ArbMove . moveToColB . pred) ,(char '$', eol) ,(spec KEnd, eol) -- words ,(char 'w', jumpF unitViWord) ,(char 'W', jumpF unitViWORD) ,(char 'b', jumpB unitViWord) ,(char 'B', jumpB unitViWORD) ,(ctrl $ spec KLeft, jumpB unitViWORD) ,(ctrl $ spec KRight, jumpF unitViWORD) -- text ,(char '{', Replicate $ Move unitEmacsParagraph Backward) ,(char '}', Replicate $ Move unitEmacsParagraph Forward) ,(char '(', Replicate $ Move unitSentence Backward) ,(char ')', Replicate $ Move unitSentence Forward) ] where left = Replicate $ CharMove Backward right = Replicate $ CharMove Forward sol = Replicate viMoveToSol eol = ArbMove . viMoveToNthEol jumpF = \unit -> Replicate $ GenMove unit (Backward,InsideBound) Forward jumpB = \unit -> Replicate $ Move unit Backward -- | movement *multi-chars* commands (with exclusive cut/yank semantics) moveCmdS_exclusive :: [([Event], (Int -> ViMove))] moveCmdS_exclusive = [(map char "[(", Replicate $ ArbMove (goUnmatchedB Backward '(' ')')) ,(map char "[{", Replicate $ ArbMove (goUnmatchedB Backward '{' '}')) ,(map char "])", Replicate $ ArbMove (goUnmatchedB Forward '(' ')')) ,(map char "]}", Replicate $ ArbMove (goUnmatchedB Forward '{' '}')) ,(map char "gk", up) ,([char 'g', spec KUp], up) ,(map char "gj", down) ,([char 'g', spec KDown], down) ] where up = Replicate (Move VLine Backward) down = Replicate (Move VLine Forward) -- | movement commands (with inclusive cut/yank semantics) moveCmdFM_inclusive :: [(Event, (Int -> ViMove))] moveCmdFM_inclusive = [(char 'e', Replicate $ GenMove unitViWord (Forward, InsideBound) Forward) ,(char 'E', Replicate $ GenMove unitViWORD (Forward, InsideBound) Forward)] -- | movement *multi-chars* commands (with inclusive cut/yank semantics) moveCmdS_inclusive :: [(String, (Int -> ViMove))] moveCmdS_inclusive = [("ge", Replicate $ GenMove unitViWord (Forward, InsideBound) Backward) ,("gE", Replicate $ GenMove unitViWORD (Forward, InsideBound) Backward) ,("g_", const $ ArbMove lastNonSpaceB)] -- | up/down movement commands. these one are separated from moveCmdFM_{inclusive,exclusive} -- because they behave differently when yanking/cuting (line mode). moveUpDownCmdFM :: [(Event, Int -> ViMove)] moveUpDownCmdFM = [(char 'k', up) ,(spec KUp, up) ,(ctrlCh 'p', up) ,(char 'j', down) ,(spec KDown, down) ,(ctrlCh 'j', down) ,(ctrlCh 'n', down) ,(spec KEnter, down) ,(char '-', fns up) ,(char '+', fns down) ,(ctrlCh 'm', fns down) ,(char '_', fns down . pred) -- misc ,(char 'H', ArbMove . downFromTosB . pred) ,(char 'M', const $ ArbMove middleB) ,(char 'L', ArbMove . upFromBosB . pred) ] where up = Replicate (Move VLine Backward) down = Replicate (Move VLine Forward) fns m = (`SeqMove` ArbMove firstNonSpaceB) . m -- | more movement commands. these ones are paramaterised by a character -- to find in the buffer. move2CmdFM :: [(Event, RegionStyle, Char -> Int -> ViMove)] move2CmdFM = -- these Inc/Exc in {next,prev}C{Inc,Exc} are not quite the same -- than Exclusive/Inclusive, look at the vim manual for more details. [(char 'f', Inclusive, Replicate . ArbMove . nextCInc) ,(char 'F', Exclusive, Replicate . ArbMove . prevCInc) ,(char 't', Inclusive, Replicate . ArbMove . nextCExc) ,(char 'T', Exclusive, Replicate . ArbMove . prevCExc) ] -- | Other command mode functions cmd_eval :: VimMode cmd_eval = do cnt <- count let i = fromMaybe 1 cnt choice $ [events evs >>! action i | (evs, action) <- cmdFM ] ++ [events evs >>! action i | (evs, action) <- visOrCmdFM ] ++ [events evs >>! action cnt | (evs, action) <- scrollCmdFM ] ++ [char 'r' ?>> do c <- textChar write $ savingCommandB (savingPointB . writeN . flip replicate c) i ,char 'm' ?>> setMark ,char '.' ?>>! applyViCmd cnt =<< withEditor (getA lastViCommandA)] searchCurrentWord :: Direction -> EditorM () searchCurrentWord dir = do w <- withBuffer0' $ readRegionB =<< regionOfNonEmptyB unitViWord viSearch (boundedPattern w) [] dir where boundedPattern x = "\\<" ++ (regexEscapeString x) ++ "\\>" gotoTag :: Tag -> YiM () gotoTag tag = visitTagTable $ \tagTable -> case lookupTag tag tagTable of Nothing -> fail $ "No tags containing " ++ tag Just (filename, line) -> do when (enableTagStack $ v_opts self) viTagStackPushPos viFnewE filename withBuffer' $ gotoLn line return () viTagStackPushPos :: YiM () viTagStackPushPos = withEditor $ do bn <- withBuffer0 $ gets identString p <- withBuffer0 pointB pushTagStack bn p gotoPrevTagMark :: Int -> YiM () gotoPrevTagMark count = do lastP <- withEditor $ popTagStack count case lastP of Nothing -> withEditor $ fail "bottom of tag stack" Just (fp, p) -> do viFnewE fp withBuffer' $ moveTo p -- | Call continuation @act@ with the TagTable. Uses the global table -- and prompts the user if it doesn't exist visitTagTable :: (TagTable -> YiM ()) -> YiM () visitTagTable act = do posTagTable <- withEditor getTags -- does the tagtable exist? case posTagTable of Just tagTable -> act tagTable Nothing -> do fps <- withEditor getTagsFileList -- withBuffer0' $ tagsFileList <$> getDynamicB efps <- io $ filterM fileExist fps when (null efps) $ fail ("No existing tags file among: " ++ show fps) tagTable <- io $ importTagTable (head efps) withEditor $ setTags tagTable act tagTable gotoTagCurrentWord :: YiM () gotoTagCurrentWord = gotoTag =<< withEditor (withBuffer0' (readRegionB =<< regionOfNonEmptyB unitViWord)) -- | Parse any character that can be inserted in the text. textChar :: KeymapM Char textChar = do Event (KASCII c) [] <- anyEvent return c continueSearching :: (Direction -> Direction) -> EditorM () continueSearching fdir = do m <- getRegexE dir <- fdir <$> getA searchDirectionA printMsg $ directionElim dir '?' '/' : maybe "" seInput m viSearch "" [] dir skippingFirst :: ([a] -> [a]) -> [a] -> [a] skippingFirst f = list [] (\x -> (x :) . f) skippingLast :: ([a] -> [a]) -> [a] -> [a] skippingLast f xs = f (init xs) ++ [last xs] skippingNull :: ([a] -> [b]) -> [a] -> [b] skippingNull _ [] = [] skippingNull f xs = f xs joinLinesB :: Region -> BufferM () joinLinesB = savingPointB . (modifyRegionClever $ skippingLast $ concat . (skippingFirst $ map $ skippingNull ((' ':) . dropWhile isSpace)) . lines') concatLinesB :: Region -> BufferM () concatLinesB = savingPointB . (modifyRegionClever $ skippingLast $ filter (/='\n')) onCurrentWord :: (String -> String) -> BufferM () onCurrentWord f = savingPointB $ modifyRegionClever f =<< regionOfNonEmptyB unitViWord onNumberInString :: (Read a, Show a, Num a) => (a -> a) -> String -> String onNumberInString f s = case reads s2 of [] -> s (n, rest):_ -> s1 ++ show (f n) ++ rest where (s1,s2) = break isDigit s -- as cmdFM but these commands are also valid in visual mode visOrCmdFM :: [([Event], Int -> YiM ())] visOrCmdFM = [([ctrlCh 'l'], const userForceRefresh) ,([ctrlCh 'z'], const suspendEditor) ,([ctrlCh 't'], gotoPrevTagMark) ,([ctrlCh ']'], const gotoTagCurrentWord) -- TODO add support for 'count' ] ++ (fmap.second.fmap) withEditor [([ctrlW, char 'c'], const tryCloseE) ,([ctrlW, char 'o'], const closeOtherE) ,([ctrlW, char 's'], const splitE) ,([ctrlW, char 'w'], nextWinE') ,([ctrlW, ctrlW], nextWinE') ,([ctrlW, char 'W'], prevWinE') ,([ctrlW, char 'p'], prevWinE') -- these 4 commands should go to moveKeymap -- however moveKeymap is currently confined to BufferM ,([char 'n'], const $ continueSearching id) ,([char 'N'], const $ continueSearching reverseDir) ,([char '*'], const $ searchCurrentWord Forward) ,([char '#'], const $ searchCurrentWord Backward) -- since we don't have vertical splitting, -- these moving can be done using next/prev. ,([ctrlW,spec KDown], nextWinE') ,([ctrlW,spec KUp], prevWinE') ,([ctrlW,spec KRight], nextWinE') ,([ctrlW,spec KLeft], prevWinE') ,([ctrlW,char 'k'], prevWinE') ,([ctrlW,char 'j'], nextWinE') -- Same as the above pair, when you're a bit slow to release ctl. ,([ctrlW, ctrlCh 'k'], prevWinE') ,([ctrlW, ctrlCh 'j'], nextWinE') ,(map char "ga", const viCharInfo) ,(map char "g8", const viChar8Info) ,(map char "gt", nextTabE') ,(map char "gT", prevTabE') ] where nextWinE' = flip replicateM_ nextWinE prevWinE' = flip replicateM_ prevWinE nextTabE' = flip replicateM_ nextTabE prevTabE' = flip replicateM_ previousTabE -- | cmd mode commands -- An event specified paired with an action that may take an integer argument. -- Usually the integer argument is the number of times an action should be repeated. cmdFM :: [([Event], Int -> YiM ())] cmdFM = [([ctrlCh 'g'], const $ withEditor viFileInfo) ,([ctrlCh '^'], withEditor . alternateBufferE . (+ (-1)) ) -- undo/redo ,([char 'u'], withBuffer' . flip replicateM_ undoB) ,([char 'U'], withBuffer' . flip replicateM_ undoB) -- NB not correct ,([ctrlCh 'r'], withBuffer' . flip replicateM_ redoB) ,([ctrlCh 'a'], savingCommandB'Y $ onCurrentWord . onNumberInString . (+)) ,([ctrlCh 'x'], savingCommandB'Y $ onCurrentWord . onNumberInString . flip (-)) ,([char 'D'], savingCommandE'Y $ cut Exclusive . ArbMove . viMoveToNthEol) ,([char 'J'], savingCommandB'Y $ (joinLinesB =<<) . countLinesRegion . max 2) ,(map char "gJ", savingCommandB'Y $ (concatLinesB =<<) . countLinesRegion . max 2) ,([char 'Y'], withEditor . yank LineWise . (Replicate $ Move Line Forward)) ,([char 'X'], savingCommandE'Y $ cut Exclusive . (Replicate $ CharMove Backward)) ,([char 'x'], savingCommandE'Y $ cut Exclusive . (Replicate $ CharMove Forward)) ,([spec KDel], savingCommandE'Y $ cut Exclusive . (Replicate $ CharMove Forward)) -- pasting ,([char 'p'], savingCommandEY $ flip replicateM_ pasteAfter) ,([char 'P'], savingCommandEY $ flip replicateM_ pasteBefore) ,(map char "ZZ", const $ viWriteModified >> closeWindow) ,(map char "ZQ", const closeWindow) ] ++ [ ([char '~'], savingCommandB'Y $ (flip mapRegionB switchCaseChar =<<) . flip regionOfViMove Exclusive . Replicate (CharMove Forward)) | not $ tildeop $ v_opts self ] ctrlW :: Event ctrlW = ctrlCh 'w' scrollCmdFM :: [([Event], Maybe Int -> BufferM ())] scrollCmdFM = [([ctrlCh 'b'], upScreensB . fromMaybe 1) -- vim does (firstNonSpaceB;moveXorSol) ,([ctrlCh 'f'], downScreensB . fromMaybe 1) ,([ctrlCh 'u'], vimScrollByB (negate . (`div` 2)) . fromMaybe 1) ,([ctrlCh 'd'], vimScrollByB (`div` 2) . fromMaybe 1) ,([ctrlCh 'y'], vimScrollB . negate . fromMaybe 1) ,([ctrlCh 'e'], vimScrollB . fromMaybe 1) ,([spec KPageUp], upScreensB . fromMaybe 1) ,([spec KPageDown], downScreensB . fromMaybe 1) ,([char 'z', spec KEnter], mmGoFNS scrollCursorToTopB) ,(map char "zt", mmGoSC scrollCursorToTopB) ,(map char "z.", mmGoFNS scrollToCursorB) ,(map char "zz", mmGoSC scrollToCursorB) ,(map char "z-", mmGoFNS scrollCursorToBottomB) ,(map char "zb", mmGoSC scrollCursorToBottomB)] where mayMove :: BufferM () -> Maybe Int -> BufferM () mayMove scroll cnt = do case cnt of Just n -> gotoLn n >> return () Nothing -> return () scroll mmGoFNS scroll = mayMove (scroll >> firstNonSpaceB) mmGoSC scroll = movingToPrefCol . mayMove scroll -- | So-called 'operators', which take movement actions as arguments. -- -- How do we achive this? We parse a known operator char then parse -- one of the known movement commands. We then apply the returned -- action and then the operator. For example, we 'd' command stores -- the current point, does a movement, then deletes from the old to -- the new point. cmd_op :: VimMode cmd_op = do cnt <- count let i = fromMaybe 1 cnt choice [let s1 = prefix [c] ss = nub [[c], s1] onRegion = onRegion' 1 in pString s1 >> choice ([ forceRegStyle >>= \ frs -> moveKeymap >>= write . applyOperator onRegion i . first frs -- TODO: text units (eg. dViB) , select_any_unit (onRegion Exclusive) ] ++ -- TODO repeat [ pString s >>! applyOperator onRegion (i-1) (LineWise, Move VLine Forward) | s <- ss ] ) | (prefix,_,c,onRegion') <- operators, c /= 'J' ] where -- | Forces RegionStyle; see motion.txt, line 116 and below (Vim 7.2) forceRegStyle = do style <- many $ choice [ char 'V' ?>> return (const LineWise) , char 'v' ?>> return swpRsOrIncl , ctrlCh 'v' ?>> return (const Block) ] return $ last (id:style) where swpRsOrIncl Exclusive = Inclusive swpRsOrIncl _ = Exclusive -- | operator (i.e. movement-parameterised) actions operators :: [((String->String), (String->String), Char, (Int -> RegionStyle -> Region -> EditorM ()))] operators = [ (id, id, 'd', const $ \s r -> cutRegion s r >> withBuffer0 leftOnEol) , (id, id, 'y', const $ nonBlockRegion "y" yankRegion) , (id, id, '=', const $ mapRegions_ indentRegion) , (id, id, '>', mapRegions_ . shiftIndentOfRegion) , (id, id, '<', mapRegions_ . shiftIndentOfRegion . negate) , (id, id, 'J', const $ nonBlockRegion "J" (const $ withBuffer0' . joinLinesB)) , (g_, g_, 'J', const $ nonBlockRegion "gJ" (const $ withBuffer0' . concatLinesB)) , (ti, id, '~', const $ viMapRegion switchCaseChar) , (g_, id, 'u', const $ viMapRegion toLower) , (g_, id, 'U', const $ viMapRegion toUpper) , (g_, g_, '?', const $ viMapRegion rot13Char) , (g_, g_, 'q', const $ nonBlockRegion "gq" (const $ withBuffer0' . fillRegion)) , (g_, g_, 'w', const $ nonBlockRegion "gw" (const $ withBuffer0' . savingPointB . fillRegion)) ] where g_ = ('g':) ti = if tildeop $ v_opts self then id else g_ nonBlockRegion n _ Block _ = fail (show n ++ " does not works yet for block selections") nonBlockRegion _ op s r = op s r mapRegions_ f Block r = withBuffer0' $ mapM_ f =<< blockifyRegion r mapRegions_ f _ r = withBuffer0' $ f r toOuter outer _ True = leftBoundaryUnit outer toOuter _ inner False = inner char2unit :: [(Char, Bool -> TextUnit)] char2unit = [('w', toOuter unitViWord unitViWordAnyBnd) ,('W', toOuter unitViWORD unitViWORDAnyBnd) ,('p', toOuter unitEmacsParagraph unitEmacsParagraph) -- TODO inner could be inproved ,('s', toOuter unitSentence unitSentence) -- TODO inner could be inproved ,('"', unitDelimited '"' '"') ,('`', unitDelimited '`' '`') ,('\'', unitDelimited '\'' '\'') ,('(', unitDelimited '(' ')') ,(')', unitDelimited '(' ')') ,('b', unitDelimited '(' ')') ,('{', unitDelimited '{' '}') ,('}', unitDelimited '{' '}') ,('B', unitDelimited '{' '}') ,('<', unitDelimited '<' '>') ,('>', unitDelimited '<' '>') ] select_any_unit :: (MonadInteract m Action Event) => (Region -> EditorM ()) -> m () select_any_unit f = do outer <- (char 'a' ?>> pure True) <|> (char 'i' ?>> pure False) choice [ char c ?>> write (f =<< withBuffer0' (regionOfNonEmptyB $ unit outer)) | (c, unit) <- char2unit] regionOfSelection :: BufferM (RegionStyle, Region) regionOfSelection = do setMarkHere '>' regionStyle <- getA regionStyleA region <- join $ mkRegionOfStyleB <$> getSelectionMarkPointB <*> pointB <*> pure regionStyle return (regionStyle, region) indentRegion :: Region -> BufferM () indentRegion region = do len <- length . filter (=='\n') <$> readRegionB region savingPointB $ do moveTo $ regionStart region replicateM_ len $ adjIndent IncreaseCycle >> lineDown firstNonSpaceB yankRegion :: RegionStyle -> Region -> EditorM () yankRegion regionStyle region | regionIsEmpty region = return () | otherwise = do when (regionStyle == Block) $ fail "yankRegion does not work on block regions" txt <- withBuffer0' $ readRegionB region setRegE $ if regionStyle == LineWise then '\n':txt else txt let rowsYanked = length (filter (== '\n') txt) when (rowsYanked > 2) $ printMsg $ show rowsYanked ++ " lines yanked" yank :: RegionStyle -> ViMove -> EditorM () yank regionStyle move = yankRegion regionStyle =<< (withBuffer0' $ regionOfViMove move regionStyle) cutRegion :: RegionStyle -> Region -> EditorM () cutRegion Block region = do withBuffer0' $ mapM_ deleteRegionB =<< reverse <$> blockifyRegion region printMsg "This block region is not cut just deleted" cutRegion regionStyle region | regionIsEmpty region = return () | otherwise = do (txt, rowsCut) <- withBuffer0 $ do txt <- readRegionB region let rowsCut = length $ filter (=='\n') txt when (rowsCut==0) $ replicateM_ (length txt) (adjBlock (-1)) deleteRegionB region return (txt, rowsCut) setRegE $ if regionStyle == LineWise then '\n':txt else txt when (rowsCut > 2) $ printMsg $ show rowsCut ++ " fewer lines" cut :: RegionStyle -> ViMove -> EditorM () cut regionStyle move = do region <- withBuffer0 $ regionOfViMove move regionStyle cutRegion regionStyle region cutSelection :: EditorM () cutSelection = uncurry cutRegion =<< withBuffer0' regionOfSelection pasteOverSelection :: EditorM () pasteOverSelection = do txt <- getRegE withBuffer0' $ do regStyle <- getA regionStyleA start <- getSelectionMarkPointB stop <- pointB region <- mkRegionOfStyleB start stop regStyle moveTo $ regionStart region deleteRegionB region insertN txt pasteAfter :: EditorM () pasteAfter = do txt' <- getRegE withBuffer0' $ do when ('\n' `notElem` txt') $ adjBlock $ length txt' case txt' of '\n':txt -> moveToEol >> rightB >> insertN txt >> leftN (length txt) _ -> moveXorEol 1 >> insertN txt' >> leftB pasteBefore :: EditorM () pasteBefore = do txt' <- getRegE withBuffer0' $ do when ('\n' `notElem` txt') $ adjBlock $ length txt' case txt' of '\n':txt -> moveToSol >> insertN txt >> leftN (length txt) _ -> insertN txt' >> leftB switchCaseChar :: Char -> Char switchCaseChar c = if isUpper c then toLower c else toUpper c onCharLetterCode :: (Int -> Int) -> Char -> Char onCharLetterCode f c | isUpper c || isLower c = chr (f (ord c - a) `mod` 26 + a) | otherwise = c where a | isUpper c = ord 'A' | isLower c = ord 'a' | otherwise = undefined rot13Char :: Char -> Char rot13Char = onCharLetterCode (+13) viMapRegion :: (Char -> Char) -> RegionStyle -> Region -> EditorM () viMapRegion f Block region = withBuffer0' $ mapM_ (`mapRegionB` f) =<< blockifyRegion region viMapRegion f _ region = withBuffer0' $ mapRegionB region f countLinesRegion :: Int -> BufferM Region countLinesRegion n = regionOfViMove (Replicate (Move VLine Forward) (n - 1)) LineWise -- | Switching to another mode from visual mode. -- -- All visual commands are meta actions, as they transfer control to another -- KeymapM. In this way vis_single is analogous to cmd2other -- visual2other :: RegionStyle -> VimMode visual2other selStyle = do cnt <- count let i = fromMaybe 1 cnt choice $ [spec KEsc ?>> return () ,char 'V' ?>> change_vis_mode selStyle LineWise ,char 'v' ?>> change_vis_mode selStyle Inclusive ,ctrlCh 'v'?>> change_vis_mode selStyle Block ,char ':' ?>>! ex_mode ":'<,'>" ,char 'p' ?>>! pasteOverSelection -- TODO repeat ,char 'x' ?>>! (cutSelection >> withBuffer0 leftOnEol) -- TODO repeat ,char 's' ?>> beginIns self (cutSelection >> withBuffer0 (setVisibleSelection False)) -- TODO repeat ,char 'c' ?>> beginIns self (cutSelection >> withBuffer0 (setVisibleSelection False)) -- TODO repeat ,char 'r' ?>> do x <- textChar -- TODO repeat let convert '\n' = '\n' convert _ = x write $ uncurry (viMapRegion convert) =<< withBuffer0 regionOfSelection ] ++ [pString (prefix [c]) >>! (uncurry (action i) =<< withBuffer0' regionOfSelection) -- TODO repeat | (_, prefix, c, action) <- operators ] -- | Switch to another vim mode from command mode. -- -- These commands are meta actions, as they transfer control to another -- KeymapM. Some of these commands also perform an action before switching. -- cmd2other :: VimMode cmd2other = choice [char ':' ?>>! ex_mode ":", char 'v' ?>> vis_mode Inclusive, char 'V' ?>> vis_mode LineWise, ctrlCh 'v' ?>> vis_mode Block, -- one use VLine for block mode char 'R' ?>> rep_mode, char 'i' ?>> ins_mode self, char 'I' ?>> beginInsB self firstNonSpaceB, pString "gi" >> beginInsB self (jumpToMark '^'), pString "gI" >> beginInsB self moveToSol, char 'a' ?>> beginInsB self $ moveXorEol 1, char 'A' ?>> beginInsB self moveToEol, char 'o' ?>> beginInsB self $ moveToEol >> insertB '\n', char 'O' ?>> beginInsB self $ moveToSol >> insertB '\n' >> lineUp, char 'c' ?>> changeCmds, -- FIXME: those two should take int argument char 'C' ?>> change NoMove Exclusive viMoveToEol, -- alias of "c$" char 'S' ?>> change viMoveToSol LineWise viMoveToEol, -- alias of "cc" TODO update char 's' ?>> change NoMove Exclusive (CharMove Forward), -- non-linewise alias of "cl" char '/' ?>>! ex_mode "/", char '?' ?>>! ex_mode "?", leave, spec KIns ?>> ins_mode self] -- TODO cw,cW,cc,c[ai]<unit> don't support counting changeCmds :: I Event Action () changeCmds = adjustPriority (-1) >> ((char 'w' ?>> change NoMove Exclusive (GenMove unitViWord (Forward, OutsideBound) Forward)) <|> (char 'W' ?>> change NoMove Exclusive (GenMove unitViWORD (Forward, OutsideBound) Forward))) <|> (char 'c' ?>> change NoMove LineWise NoMove) <|> (uncurry (change NoMove) =<< moveKeymap) <|> (select_any_unit (cutRegion Exclusive) >> ins_mode self) -- this correct while the RegionStyle is not LineWise change :: ViMove -> RegionStyle -> ViMove -> I Event Action () change preMove regionStyle move = beginInsE self $ do withBuffer0' $ viMove preMove cut regionStyle move when (regionStyle == LineWise) $ withBuffer0' $ insertB '\n' >> leftB -- TODO repeat (savingInsertCharB?) -- The Vim semantics is a little different here, When receiving CTRL-D -- instead of looking at the last typed character, one look at the previous -- character in buffer and if it's '0' then one delete the indentation. -- This means that one are sensible to lines already containing a '0'. -- I consider this to be very minor issue. dedentOrDeleteIndent :: BufferM () dedentOrDeleteIndent = do c <- savingPointB (moveXorSol 1 >> readB) r <- regionOfB Line if c == '0' then deleteB Character Backward >> deleteIndentOfRegion r else shiftIndentOfRegion (-1) r upTo :: Alternative f => f a -> Int -> f [a] _ `upTo` 0 = empty p `upTo` n = (:) <$> p <*> (p `upTo` pred n <|> pure []) insertSpecialChar :: (Char -> BufferM ()) -> VimMode insertSpecialChar insrepB = insertNumber insrepB <|> (ctrlCh '@' ?>>! insrepB '\000') <|| (write . withBuffer0' . insrepB . eventToChar =<< anyEvent) insertNumber :: (Char -> BufferM ()) -> VimMode insertNumber insrepB = choice [g [charOf id '0' '1',dec,dec] "" ,g [charOf id '2' '2',charOf id '0' '5',dec] "" ,g [charOf id '2' '2',charOf id '6' '9'] "" ,g [charOf id '3' '9',dec] "" ,oneOf (map char "oO") >> g [charOf id '0' '3',oct,oct] "0o" ,oneOf (map char "oO") >> g [charOf id '4' '7',oct] "0o" ,oneOf (map char "xX") >> g [hex,hex] "0x" -- NP: I don't get why this does not work (ex typing "i<CTRL-Q>u3b1.") -- ,char 'u' ?>> f (hex `upTo` 4) "0x" ,char 'u' ?>> f (sequence $ replicate 4 hex) "0x" ,char 'U' ?>> f (sequence $ replicate 8 hex) "0x"] where dec = charOf id '0' '9' oct = charOf id '0' '7' hex = charOf id '0' '9' <|> charOf id 'a' 'f' <|> charOf id 'A' 'F' f digits prefix = do xs <- digits write $ withBuffer0' $ insrepB $ chr $ read $ prefix ++ xs g = f . sequence ins_rep_char :: (Char -> BufferM ()) -> VimMode ins_rep_char insrepB = choice [spec KPageUp ?>>! upScreenB ,spec KPageDown ?>>! downScreenB ,spec KUp ?>>! lineUp ,spec KDown ?>>! lineDown ,spec KLeft ?>>! moveXorSol 1 ,spec KRight ?>>! moveXorEol 1 ,spec KEnd ?>>! moveToEol ,spec KHome ?>>! moveToSol ,spec KDel ?>>! savingDeleteCharB Forward ,spec KEnter ?>>! savingInsertCharB '\n' ,(ctrl $ spec KLeft) ?>>! moveB unitViWORD Backward ,(ctrl $ spec KRight) ?>>! genMoveB unitViWORD (Backward,InsideBound) Forward ,ctrlCh 'j' ?>>! savingInsertCharB '\n' ,ctrlCh 'm' ?>>! savingInsertCharB '\r' ,spec KTab ?>>! mapM_ insrepB =<< tabB ,ctrlCh 'i' ?>>! mapM_ insrepB =<< tabB ,ctrlCh 'e' ?>>! insrepB =<< savingPointB (lineDown >> readB) ,ctrlCh 'y' ?>>! insrepB =<< savingPointB (lineUp >> readB) ,ctrlCh 't' ?>>! savingCommandB (const $ savingPointB $ shiftIndentOfRegion 1 =<< regionOfB Line) 1 --TODO should not move the cursor ,ctrlCh 'd' ?>>! savingCommandE (const $ withBuffer0' $ savingPointB dedentOrDeleteIndent) 1 -- IDEM ,ctrlCh 'v' ?>> insertSpecialChar insrepB ,ctrlCh 'q' ?>> insertSpecialChar insrepB ] -- -- Some ideas for a better insert mode are contained in: -- -- Poller and Garter , "A comparative study of moded and modeless -- text editing by experienced editor users", 1983 -- -- which suggest that movement commands be added to insert mode, along -- with delete. -- -- Which is fine in Vim (and so in Yi too) since there is a bunch of -- handy bindings to edit while composing (backspace, C-W, C-T, C-D, C-E, C-Y...) -- def_ins_char = choice [spec KBS ?>>! savingDeleteCharB Backward ,ctrlCh 'h' ?>>! savingDeleteCharB Backward ,ctrlCh 'w' ?>>! savingDeleteWordB Backward ] <|> ins_rep_char savingInsertCharB <|| (textChar >>= write . (adjBlock 1 >>) . savingInsertCharB) -- --------------------------------------------------------------------- -- | vim replace mode -- -- To quote vim: -- In Replace mode, one character in the line is deleted for every character -- you type. If there is no character to delete (at the end of the line), the -- typed character is appended (as in Insert mode). Thus the number of -- characters in a line stays the same until you get to the end of the line. -- If a <NL> is typed, a line break is inserted and no character is deleted. rep_char :: VimMode rep_char = choice [spec KBS ?>>! leftB ,ctrlCh 'h' ?>>! leftB ,ctrlCh 'w' ?>>! genMoveB unitViWord (Backward,InsideBound) Backward ] -- should undo unless pointer has been moved <|> ins_rep_char replaceB <|| do c <- textChar; write $ replaceB c where replaceB c = do e <- atEol; if e then insertB c else writeB c -- savingInsertCharB ? -- --------------------------------------------------------------------- -- Ex mode. We also process regex searching mode here. -- findUserCmd :: String -> Maybe VimExCmd findUserCmd cmdLine = find ((name `elem`) . cmdNames) $ v_ex_cmds self where name = takeWhile (not . isSpace) $ dropWhile isSpace cmdLine ex_mode :: String -> EditorM () ex_mode prompt = do -- The above ensures that the action is performed on the buffer that originated the minibuffer. let ex_buffer_finish = do withEditor historyFinish lineString <- withBuffer' elemsB withEditor closeBufferAndWindowE ex_eval (head prompt : lineString) ex_process :: VimMode ex_process = (some (spec KTab ?>>! completeMinibuffer) >> deprioritize >>! resetComplete) <|| choice [spec KEnter ?>>! ex_buffer_finish ,spec KEsc ?>>! closeBufferAndWindowE ,ctrlCh 'h' ?>>! actionAndHistoryPrefix $ deleteB Character Backward ,spec KBS ?>>! deleteBkdOrClose ,spec KDel ?>>! actionAndHistoryPrefix $ deleteB Character Forward ,ctrlCh 'p' ?>>! historyUp ,spec KUp ?>>! historyUp ,ctrlCh 'n' ?>>! historyDown ,spec KDown ?>>! historyDown ,spec KLeft ?>>! moveXorSol 1 ,spec KRight ?>>! moveXorEol 1 ,ctrlCh 'w' ?>>! actionAndHistoryPrefix $ deleteB unitWord Backward ,ctrlCh 'u' ?>>! moveToSol >> deleteToEol] <|| (insertChar >>! setHistoryPrefix) actionAndHistoryPrefix act = do withBuffer0 $ act setHistoryPrefix setHistoryPrefix = do ls <- withEditor . withBuffer0 $ elemsB historyPrefixSet ls insertChar = textChar >>= write . insertB deleteBkdOrClose = do ls <- withBuffer0 elemsB if null ls then closeBufferAndWindowE else actionAndHistoryPrefix $ deleteB Character Backward findUserComplFn s | Just ex_cmd <- findUserCmd s = completeFn ex_cmd | otherwise = Nothing completeMinibuffer = do s <- withBuffer elemsB case findUserComplFn s of Just cmplFn -> cmplFn $ ignoreExCmd s Nothing -> ex_complete s f_complete = exSimpleComplete (matchingFileNames Nothing) b_complete = exSimpleComplete matchingBufferNames ex_complete ('c':'d':' ':f) = f_complete f ex_complete ('e':' ':f) = f_complete f ex_complete ('e':'d':'i':'t':' ':f) = f_complete f ex_complete ('w':' ':f) = f_complete f ex_complete ('w':'r':'i':'t':'e':' ':f) = f_complete f ex_complete ('r':' ':f) = f_complete f ex_complete ('r':'e':'a':'d':' ':f) = f_complete f ex_complete ('t':'a':'b':'e':' ':f) = f_complete f ex_complete ('s':'a':'v':'e':'a':'s':' ':f) = f_complete f ex_complete ('s':'a':'v':'e':'a':'s':'!':' ':f) = f_complete f ex_complete ('b':' ':f) = b_complete f ex_complete ('b':'u':'f':'f':'e':'r':' ':f) = b_complete f ex_complete ('b':'d':' ':f) = b_complete f ex_complete ('b':'d':'!':' ':f) = b_complete f ex_complete ('b':'d':'e':'l':'e':'t':'e':' ':f) = b_complete f ex_complete ('b':'d':'e':'l':'e':'t':'e':'!':' ':f) = b_complete f ex_complete ('c':'a':'b':'a':'l':' ':s) = cabalComplete s ex_complete ('s':'e':'t':' ':'f':'t':'=':f) = completeModes f ex_complete ('y':'i':' ':s) = exSimpleComplete (const getAllNamesInScope) s ex_complete s = catchAllComplete s userExCmds = concatMap (map (++ " ") . cmdNames) $ v_ex_cmds self catchAllComplete = exSimpleComplete $ const $ return $ (userExCmds ++) $ ("hoogle-word" :) $ ("hoogle-search" : )$ ("set ft=" :) $ ("set tags=" :) $ map (++ " ") $ words $ "e edit r read saveas saveas! tabe tabnew tabm b buffer bd bd! bdelete bdelete! " ++ "yi cabal nohlsearch cd pwd suspend stop undo redo redraw reload tag .! quit quitall " ++ "qall quit! quitall! qall! write wq wqall ascii xit exit next prev" ++ "$ split new ball h help" cabalComplete = exSimpleComplete $ const $ return cabalCmds cabalCmds = words "configure install list update upgrade fetch upload check sdist" ++ words "report build copy haddock clean hscolour register test help" completeModes = exSimpleComplete $ const getAllModeNames historyStart historyPrefixSet "" spawnMinibufferE prompt $ const ex_process return () -- | eval an ex command to an YiM (), also appends to the ex history ex_eval :: String -> YiM () ex_eval cmd = case cmd of -- regex searching ('/':pat) -> withEditor $ viSearch pat [] Forward ('?':pat) -> withEditor $ viSearch pat [] Backward -- TODO: Remapping could be done using the <|| operator somehow. -- The remapped stuff could be saved in a keymap-local state, (using StateT monad transformer). -- add mapping to command mode (_:'m':'a':'p':' ':_cs) -> error "Not yet implemented." -- add mapping to insert mode (_:'m':'a':'p':'!':' ':_cs) -> error "Not yet implemented." -- unmap a binding from command mode (_:'u':'n':'m':'a':'p':' ':_cs) -> error "Not yet implemented." -- unmap a binding from insert mode (_:'u':'n':'m':'a':'p':'!':' ':_cs) -> error "Not yet implemented." -- just a normal ex command (_:src) -> evalCmd $ dropSpace src -- can't happen, but deal with it [] -> return () where {- safeQuitWindow implements the commands in vim equivalent to :q. - Closes the current window unless the current window is the last window on a - modified buffer that is not considered "worthless". -} safeQuitWindow = do nw <- withBuffer' needsAWindowB ws <- withEditor $ getA currentWindowA >>= windowsOnBufferE . bufkey if 1 == length ws && nw then errorEditor "No write since last change (add ! to override)" else closeWindow needsAWindowB = do isWorthless <- gets (either (const True) (const False) . (^. identA)) canClose <- gets isUnchangedBuffer if isWorthless || canClose then return False else return True {- quitWindow implements the commands in vim equivalent to :q! - Closes the current window regardless of whether the window is on a modified - buffer or not. - TODO: Does not quit the editor if there are modified hidden buffers. - - Corey - Vim appears to abandon any changes to the current buffer if the window being - closed is the last window on the buffer. The, now unmodified, buffer is still around - and can be switched to using :b. I think this is odd and prefer the modified buffer - sticking around. -} quitWindow = closeWindow {- safeQuitAllWindows implements the commands in vim equivalent to :qa! - Exits the editor unless there is a modified buffer that is not worthless. -} safeQuitAllWindows = do bs <- mapM (\b -> (,) b <$> withEditor (withGivenBuffer0 b needsAWindowB)) =<< readEditor bufferStack -- Vim only shows the first modified buffer in the error. case find snd bs of Nothing -> quitEditor Just (b, _) -> do bufferName <- withEditor $ withGivenBuffer0 b $ gets file errorEditor $ "No write since last change for buffer " ++ show bufferName ++ " (add ! to override)" whenUnchanged mu f = do u <- mu if u then f else errorEditor "No write since last change (add ! to override)" wquitall = forAllBuffers fwriteBufferE >> quitEditor bdelete = whenUnchanged (withBuffer' $ gets isUnchangedBuffer) . withEditor . closeBufferE . dropSpace bdeleteNoW = withEditor . closeBufferE . dropSpace -- the help feature currently try to show available key bindings help = withEditor (printMsg . show =<< acceptedInputs) evalCmd cmdLine = case findUserCmd cmdLine of Just ex_cmd -> cmdFn ex_cmd $ ignoreExCmd cmdLine Nothing -> fn cmdLine -- fn maps from the text entered on the command line to a YiM () implementing the -- command. fn "" = withEditor clrStatus fn s | all isDigit s = withBuffer' (setMarkHere '\'' >> gotoLn (read s) >> firstNonSpaceB) fn "w" = viWrite fn ('w':' ':f) = viSafeWriteTo $ dropSpace f fn ('w':'r':'i':'t':'e':' ':f) = viSafeWriteTo $ dropSpace f fn ('w':'!':' ':f) = viWriteTo $ dropSpace f fn ('w':'r':'i':'t':'e':'!':' ':f) = viWriteTo $ dropSpace f fn "qa" = safeQuitAllWindows fn "qal" = safeQuitAllWindows fn "qall" = safeQuitAllWindows fn "quita" = safeQuitAllWindows fn "quital" = safeQuitAllWindows fn "quitall" = safeQuitAllWindows fn "q" = safeQuitWindow fn "qu" = safeQuitWindow fn "qui" = safeQuitWindow fn "quit" = safeQuitWindow fn "q!" = quitWindow fn "qu!" = quitWindow fn "qui!" = quitWindow fn "quit!" = quitWindow fn "qa!" = quitEditor fn "qal!" = quitEditor fn "qall!" = quitEditor fn "quita!" = quitEditor fn "quital!" = quitEditor fn "quitall!" = quitEditor fn "wq" = viWrite >> closeWindow fn "wqa" = wquitall fn "wqal" = wquitall fn "wqall" = wquitall fn "as" = withEditor viCharInfo fn "ascii" = withEditor viCharInfo fn "x" = viWriteModified >> closeWindow fn "xi" = viWriteModified >> closeWindow fn "xit" = viWriteModified >> closeWindow fn "exi" = viWriteModified >> closeWindow fn "exit" = viWriteModified >> closeWindow fn "n" = withEditor nextBufW fn "next" = withEditor nextBufW fn "$" = withBuffer' botB fn "p" = withEditor prevBufW fn "prev" = withEditor prevBufW fn ('s':'p':_) = withEditor splitE fn "e" = revertE fn "edit" = revertE fn ('e':' ':f) = viFnewE f fn ('e':'d':'i':'t':' ':f) = viFnewE f fn ('s':'a':'v':'e':'a':'s':' ':f) = let f' = dropSpace f in viSafeWriteTo f' >> fnewE f' fn ('s':'a':'v':'e':'a':'s':'!':' ':f) = let f' = dropSpace f in viWriteTo f' >> fnewE f' fn ('r':' ':f) = withBuffer' . insertN =<< io (readFile $ dropSpace f) fn ('r':'e':'a':'d':' ':f) = withBuffer' . insertN =<< io (readFile $ dropSpace f) fn ('s':'e':'t':' ':'f':'t':'=':ft) = do (AnyMode m) <- anyModeByName (dropSpace ft) ; withBuffer $ setMode m fn ('s':'e':'t':' ':'t':'a':'g':'s':'=':fps) = withEditor $ setTagsFileList fps fn ('n':'e':'w':' ':f) = withEditor splitE >> viFnewE f fn ('s':'/':cs) = withEditor $ viSub cs Line fn ('%':'s':'/':cs) = withEditor $ viSub cs Document fn ('b':' ':"m") = withEditor $ switchToBufferWithNameE "*messages*" fn ('b':' ':f) = withEditor $ switchToBufferWithNameE $ dropSpace f fn "bd" = bdelete "" fn "bdelete" = bdelete "" fn ('b':'d':' ':f) = bdelete f fn ('b':'d':'e':'l':'e':'t':'e':' ':f) = bdelete f fn "bd!" = bdeleteNoW "" fn "bdelete!" = bdeleteNoW "" fn ('b':'d':'!':' ':f) = bdeleteNoW f fn ('b':'d':'e':'l':'e':'t':'e':'!':' ':f) = bdeleteNoW f -- TODO: bd[!] [N] fn ('t':'a':'g':' ':t) = gotoTag t -- send just this line through external command /fn/ fn ('.':'!':f) = do ln <- withBuffer' readLnB ln' <- runProcessWithInput f ln withBuffer' $ do moveToSol deleteToEol insertN ln' moveToSol -- Needs to occur in another buffer -- fn ('!':f) = runProcessWithInput f [] fn "reload" = reload >> return () -- not in vim fn "redr" = userForceRefresh fn "redraw" = userForceRefresh fn "u" = withBuffer' undoB fn "undo" = withBuffer' undoB fn "red" = withBuffer' redoB fn "redo" = withBuffer' redoB fn ('c':'d':' ':f) = io . setCurrentDirectory . dropSpace $ f fn "pwd" = (io $ getCurrentDirectory) >>= withEditor . printMsg fn "sus" = suspendEditor fn "suspend" = suspendEditor fn "st" = suspendEditor fn "stop" = suspendEditor fn ('c':'a':'b':'a':'l':' ':s) = cabalRun s1 (const $ return ()) (CommandArguments $ words $ drop 1 s2) where (s1, s2) = break (==' ') s fn ('y':'i':' ':s) = execEditorAction $ dropSpace s fn "hoogle-word" = hoogle >> return () fn "hoogle-search" = hoogleSearch fn "h" = help fn "help" = help fn "tabm" = withEditor (moveTab Nothing) fn ('t':'a':'b':'m':' ':n) = withEditor (moveTab $ Just (read n)) fn "tabnew" = withEditor $ do newTabE newTempBufferE return () fn ('t':'a':'b':'e':' ':f) = withEditor newTabE >> viFnewE f fn "ball" = withEditor openAllBuffersE fn "noh" = withEditor resetRegexE fn "nohlsearch" = withEditor resetRegexE fn s = errorEditor $ "The "++show s++ " command is unknown." ------------------------------------------------------------------------ --not_implemented :: Char -> YiM () --not_implemented c = errorEditor $ "Not implemented: " ++ show c -- --------------------------------------------------------------------- -- Misc functions forAllBuffers :: (BufferRef -> YiM ()) -> YiM () forAllBuffers f = mapM_ f =<< readEditor bufferStack viCharInfo :: EditorM () viCharInfo = do c <- withBuffer0' readB printMsg $ showCharInfo c "" where showCharInfo :: Char -> ShowS showCharInfo c = shows c . showChar ' ' . shows d . showString ", Hex " . showHex d . showString ", Octal " . showOct d where d = ord c viChar8Info :: EditorM () viChar8Info = do c <- withBuffer0' readB let w8 = UTF8.encode [c] printMsg $ shows c . showChar ' ' . showSeq shows w8 . showString ", Hex " . showSeq showHex w8 . showString ", Octal " . showSeq showOct w8 $ "" where showSeq showX xs s = foldr ($) s $ intersperse (showChar ' ') $ map showX xs viFileInfo :: EditorM () viFileInfo = do bufInfo <- withBuffer0' bufInfoB printMsg $ showBufInfo bufInfo where showBufInfo :: BufferFileInfo -> String showBufInfo bufInfo = concat [ show $ bufInfoFileName bufInfo , " Line " , show $ bufInfoLineNo bufInfo , " [" , bufInfoPercent bufInfo , "]" ] -- | write the current buffer, but only if modified (cf. :help :x) viWriteModified :: YiM () viWriteModified = do unchanged <- withBuffer' $ gets isUnchangedBuffer unless unchanged viWrite viFnewE :: String -> YiM () viFnewE = fnewE . dropSpace -- | viSearch is a doSearch wrapper that print the search outcome. -- TODO: consider merging with doSearch viSearch :: String -> [SearchOption] -> Direction -> EditorM () viSearch x y z = do r <- doSearch (if null x then Nothing else Just x) y z case r of PatternFound -> return () PatternNotFound -> printMsg "Pattern not found" SearchWrapped -> printMsg "Search wrapped" -- | Try to do a substitution viSub :: String -> TextUnit -> EditorM () viSub cs unit = do let (pat,rep') = break (== '/') cs (rep,opts) = case rep' of [] -> ([],[]) (_:ds) -> case break (== '/') ds of (rep'', []) -> (rep'', []) (rep'', (_:fs)) -> (rep'',fs) case opts of [] -> do_single pat rep False ['g'] -> do_single pat rep True _ -> fail ("Trailing characters " ++ show (take 10 opts)) -- TODO more options where do_single p r g = do s <- searchAndRepUnit p r g unit if not s then fail ("Pattern not found: "++p) else clrStatus -- | Leave a mode. This always has priority over catch-all actions inside the mode. leave :: VimMode leave = oneOf [spec KEsc, ctrlCh 'c'] >> adjustPriority (-1) >> write clrStatus leaveInsRep :: VimMode leaveInsRep = do oneOf [spec KEsc, ctrlCh '[', ctrlCh 'c'] adjustPriority (-1) write $ commitLastInsertionE >> withBuffer0 (setMarkHere '^') startTopKeymap keymapSet -- | Insert mode is either insertion actions, or the meta (\ESC) action -- TODO repeat ins_mode :: ModeMap -> VimMode ins_mode self = do startInsertKeymap keymapSet many (v_ins_char self <|> kwd_mode (v_opts self)) leaveInsRep write $ moveXorSol 1 -- TODO refactor with beginInsB and beginInsE beginIns :: (Show x, YiAction a x) => ModeMap -> a -> I Event Action () beginIns self a = write a >> ins_mode self beginInsB :: ModeMap -> BufferM () -> I Event Action () beginInsB self = beginInsE self . withBuffer0 beginInsE :: ModeMap -> EditorM () -> I Event Action () beginInsE self a = do write $ do a withBuffer0 $ do p <- pointB putA currentViInsertionA $ Just $ viActFirstA ^= Just a $ emptyViIns p ins_mode self withBuffer0' :: BufferM a -> EditorM a withBuffer0' f = withBuffer0 (f <* leftOnEol) withBuffer' :: BufferM a -> YiM a withBuffer' = withEditor . withBuffer0' withEditor' :: EditorM a -> YiM a withEditor' f = withEditor (f <* withBuffer0 leftOnEol) -- Find the item after or under the cursor and jump to its match percentMove :: (RegionStyle, ViMove) percentMove = (Inclusive, ArbMove tryGoingToMatch) where tryGoingToMatch = do p <- pointB getViMarkB '\'' >>= flip setMarkPointB p foundMatch <- goToMatch unless foundMatch $ moveTo p go dir a b = goUnmatchedB dir a b >> return True goToMatch = do c <- readB case c of '(' -> go Forward '(' ')' ')' -> go Backward '(' ')' '{' -> go Forward '{' '}' '}' -> go Backward '{' '}' '[' -> go Forward '[' ']' ']' -> go Backward '[' ']' _ -> otherChar otherChar = do eof <- atEof eol <- atEol if eof || eol then return False else rightB >> goToMatch -- search for matchable character after the cursor jumpToMark :: Char -> BufferM () jumpToMark c = do mm <- mayGetViMarkB c case mm of Nothing -> fail "Mark not set" Just m -> do p_next <- getMarkPointB m -- Retain the current point in the mark "'" automatically. p <- pointB getViMarkB '\'' >>= flip setMarkPointB p -- now jump to p_next. moveTo p_next setMark :: VimMode setMark = do c <- validMarkIdentifier write $ do p <- pointB -- Retain the current point in the mark "'" automatically. getViMarkB '\'' >>= flip setMarkPointB p getViMarkB c >>= flip setMarkPointB p setMarkHere :: Char -> BufferM () setMarkHere c = do p <- pointB getViMarkB c >>= flip setMarkPointB p getViMarkB :: Char -> BufferM Mark getViMarkB '<' = selMark <$> askMarks getViMarkB c = getMarkB $ Just [c] mayGetViMarkB :: Char -> BufferM (Maybe Mark) mayGetViMarkB '<' = Just . selMark <$> askMarks mayGetViMarkB c = mayGetMarkB [c] validMarkIdentifier :: (MonadInteract m w Event) => m Char validMarkIdentifier = fmap f $ oneOfchar "<>^'`" <|> charOf id 'a' 'z' <|> fail "Not a valid mark identifier." where oneOfchar = choice . map (\c -> event (char c) >> return c) f '`' = '\'' f c = c -- -------------------- -- | Keyword kwd_mode :: VimOpts -> VimMode kwd_mode opts = some (ctrlCh 'n' ?>> write . viWordComplete $ completeCaseSensitive opts) >> deprioritize >> write resetComplete -- 'adjustPriority' is there to lift the ambiguity between "continuing" completion -- and resetting it (restarting at the 1st completion). where viWordComplete caseSensitive = withEditor . withBuffer0 . (savingDeleteWordB Backward >>) . savingInsertStringB =<< wordCompleteString' caseSensitive
codemac/yi-editor
src/Yi/Keymap/Vim.hs
gpl-2.0
78,123
3
26
26,898
20,808
10,645
10,163
-1
-1