code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# Language OverloadedStrings, ExtendedDefaultRules #-}
import Shelly
-- This test runs, but causes this error to show up:
-- Exception: cannot access an inherited pipe
main = shelly $
runHandles "bash" ["examples/test.sh"] handles doNothing
where handles = [InHandle Inherit, OutHandle Inherit, ErrorHandle Inherit]
doNothing _ _ _ = return ""
|
adinapoli/Shelly.hs
|
test/examples/run-handles.hs
|
bsd-3-clause
| 358 | 0 | 8 | 62 | 68 | 36 | 32 | 6 | 1 |
{-# LANGUAGE CPP, NondecreasingIndentation, TupleSections #-}
{-# OPTIONS -fno-warn-incomplete-patterns -optc-DNON_POSIX_SOURCE #-}
-----------------------------------------------------------------------------
--
-- GHC Driver program
--
-- (c) The University of Glasgow 2005
--
-----------------------------------------------------------------------------
module Main (main) where
-- The official GHC API
import qualified GHC
import GHC ( -- DynFlags(..), HscTarget(..),
-- GhcMode(..), GhcLink(..),
Ghc, GhcMonad(..),
LoadHowMuch(..) )
import CmdLineParser
-- Implementations of the various modes (--show-iface, mkdependHS. etc.)
import LoadIface ( showIface )
import HscMain ( newHscEnv )
import DriverPipeline ( oneShot, compileFile )
import DriverMkDepend ( doMkDependHS )
#ifdef GHCI
import InteractiveUI ( interactiveUI, ghciWelcomeMsg, defaultGhciSettings )
#endif
-- Various other random stuff that we need
import Config
import Constants
import HscTypes
import Packages ( pprPackages, pprPackagesSimple, pprModuleMap )
import DriverPhases
import BasicTypes ( failed )
import StaticFlags
import DynFlags
import ErrUtils
import FastString
import Outputable
import SrcLoc
import Util
import Panic
import UniqSupply
import MonadUtils ( liftIO )
-- Imports for --abi-hash
import LoadIface ( loadUserInterface )
import Module ( mkModuleName )
import Finder ( findImportedModule, cannotFindInterface )
import TcRnMonad ( initIfaceCheck )
import Binary ( openBinMem, put_, fingerprintBinMem )
-- Standard Haskell libraries
import System.IO
import System.Environment
import System.Exit
import System.FilePath
import Control.Monad
import Data.Char
import Data.List
import Data.Maybe
-----------------------------------------------------------------------------
-- ToDo:
-- time commands when run with -v
-- user ways
-- Win32 support: proper signal handling
-- reading the package configuration file is too slow
-- -K<size>
-----------------------------------------------------------------------------
-- GHC's command-line interface
main :: IO ()
main = do
initGCStatistics -- See Note [-Bsymbolic and hooks]
hSetBuffering stdout LineBuffering
hSetBuffering stderr LineBuffering
-- Handle GHC-specific character encoding flags, allowing us to control how
-- GHC produces output regardless of OS.
env <- getEnvironment
case lookup "GHC_CHARENC" env of
Just "UTF-8" -> do
hSetEncoding stdout utf8
hSetEncoding stderr utf8
_ -> do
-- Avoid GHC erroring out when trying to display unhandled characters
hSetTranslit stdout
hSetTranslit stderr
GHC.defaultErrorHandler defaultFatalMessager defaultFlushOut $ do
-- 1. extract the -B flag from the args
argv0 <- getArgs
let (minusB_args, argv1) = partition ("-B" `isPrefixOf`) argv0
mbMinusB | null minusB_args = Nothing
| otherwise = Just (drop 2 (last minusB_args))
let argv1' = map (mkGeneralLocated "on the commandline") argv1
(argv2, staticFlagWarnings) <- parseStaticFlags argv1'
-- 2. Parse the "mode" flags (--make, --interactive etc.)
(mode, argv3, modeFlagWarnings) <- parseModeFlags argv2
let flagWarnings = staticFlagWarnings ++ modeFlagWarnings
-- If all we want to do is something like showing the version number
-- then do it now, before we start a GHC session etc. This makes
-- getting basic information much more resilient.
-- In particular, if we wait until later before giving the version
-- number then bootstrapping gets confused, as it tries to find out
-- what version of GHC it's using before package.conf exists, so
-- starting the session fails.
case mode of
Left preStartupMode ->
do case preStartupMode of
ShowSupportedExtensions -> showSupportedExtensions
ShowVersion -> showVersion
ShowNumVersion -> putStrLn cProjectVersion
ShowOptions isInteractive -> showOptions isInteractive
Right postStartupMode ->
-- start our GHC session
GHC.runGhc mbMinusB $ do
dflags <- GHC.getSessionDynFlags
case postStartupMode of
Left preLoadMode ->
liftIO $ do
case preLoadMode of
ShowInfo -> showInfo dflags
ShowGhcUsage -> showGhcUsage dflags
ShowGhciUsage -> showGhciUsage dflags
PrintWithDynFlags f -> putStrLn (f dflags)
Right postLoadMode ->
main' postLoadMode dflags argv3 flagWarnings
main' :: PostLoadMode -> DynFlags -> [Located String] -> [Located String]
-> Ghc ()
main' postLoadMode dflags0 args flagWarnings = do
-- set the default GhcMode, HscTarget and GhcLink. The HscTarget
-- can be further adjusted on a module by module basis, using only
-- the -fvia-C and -fasm flags. If the default HscTarget is not
-- HscC or HscAsm, -fvia-C and -fasm have no effect.
let dflt_target = hscTarget dflags0
(mode, lang, link)
= case postLoadMode of
DoInteractive -> (CompManager, HscInterpreted, LinkInMemory)
DoEval _ -> (CompManager, HscInterpreted, LinkInMemory)
DoMake -> (CompManager, dflt_target, LinkBinary)
DoMkDependHS -> (MkDepend, dflt_target, LinkBinary)
DoAbiHash -> (OneShot, dflt_target, LinkBinary)
_ -> (OneShot, dflt_target, LinkBinary)
let dflags1 = case lang of
HscInterpreted ->
let platform = targetPlatform dflags0
dflags0a = updateWays $ dflags0 { ways = interpWays }
dflags0b = foldl gopt_set dflags0a
$ concatMap (wayGeneralFlags platform)
interpWays
dflags0c = foldl gopt_unset dflags0b
$ concatMap (wayUnsetGeneralFlags platform)
interpWays
in dflags0c
_ ->
dflags0
dflags2 = dflags1{ ghcMode = mode,
hscTarget = lang,
ghcLink = link,
verbosity = case postLoadMode of
DoEval _ -> 0
_other -> 1
}
-- turn on -fimplicit-import-qualified for GHCi now, so that it
-- can be overriden from the command-line
-- XXX: this should really be in the interactive DynFlags, but
-- we don't set that until later in interactiveUI
dflags3 | DoInteractive <- postLoadMode = imp_qual_enabled
| DoEval _ <- postLoadMode = imp_qual_enabled
| otherwise = dflags2
where imp_qual_enabled = dflags2 `gopt_set` Opt_ImplicitImportQualified
-- The rest of the arguments are "dynamic"
-- Leftover ones are presumably files
(dflags4, fileish_args, dynamicFlagWarnings) <- GHC.parseDynamicFlags dflags3 args
GHC.prettyPrintGhcErrors dflags4 $ do
let flagWarnings' = flagWarnings ++ dynamicFlagWarnings
handleSourceError (\e -> do
GHC.printException e
liftIO $ exitWith (ExitFailure 1)) $ do
liftIO $ handleFlagWarnings dflags4 flagWarnings'
-- make sure we clean up after ourselves
GHC.defaultCleanupHandler dflags4 $ do
liftIO $ showBanner postLoadMode dflags4
let
-- To simplify the handling of filepaths, we normalise all filepaths right
-- away - e.g., for win32 platforms, backslashes are converted
-- into forward slashes.
normal_fileish_paths = map (normalise . unLoc) fileish_args
(srcs, objs) = partition_args normal_fileish_paths [] []
dflags5 = dflags4 { ldInputs = map (FileOption "") objs
++ ldInputs dflags4 }
-- we've finished manipulating the DynFlags, update the session
_ <- GHC.setSessionDynFlags dflags5
dflags6 <- GHC.getSessionDynFlags
hsc_env <- GHC.getSession
---------------- Display configuration -----------
case verbosity dflags6 of
v | v == 4 -> liftIO $ dumpPackagesSimple dflags6
| v >= 5 -> liftIO $ dumpPackages dflags6
| otherwise -> return ()
when (verbosity dflags6 >= 3) $ do
liftIO $ hPutStrLn stderr ("Hsc static flags: " ++ unwords staticFlags)
when (dopt Opt_D_dump_mod_map dflags6) . liftIO $
printInfoForUser (dflags6 { pprCols = 200 })
(pkgQual dflags6) (pprModuleMap dflags6)
liftIO $ initUniqSupply (initialUnique dflags6) (uniqueIncrement dflags6)
---------------- Final sanity checking -----------
liftIO $ checkOptions postLoadMode dflags6 srcs objs
---------------- Do the business -----------
handleSourceError (\e -> do
GHC.printException e
liftIO $ exitWith (ExitFailure 1)) $ do
case postLoadMode of
ShowInterface f -> liftIO $ doShowIface dflags6 f
DoMake -> doMake srcs
DoMkDependHS -> doMkDependHS (map fst srcs)
StopBefore p -> liftIO (oneShot hsc_env p srcs)
DoInteractive -> ghciUI srcs Nothing
DoEval exprs -> ghciUI srcs $ Just $ reverse exprs
DoAbiHash -> abiHash (map fst srcs)
ShowPackages -> liftIO $ showPackages dflags6
liftIO $ dumpFinalStats dflags6
ghciUI :: [(FilePath, Maybe Phase)] -> Maybe [String] -> Ghc ()
#ifndef GHCI
ghciUI _ _ = throwGhcException (CmdLineError "not built for interactive use")
#else
ghciUI = interactiveUI defaultGhciSettings
#endif
-- -----------------------------------------------------------------------------
-- Splitting arguments into source files and object files. This is where we
-- interpret the -x <suffix> option, and attach a (Maybe Phase) to each source
-- file indicating the phase specified by the -x option in force, if any.
partition_args :: [String] -> [(String, Maybe Phase)] -> [String]
-> ([(String, Maybe Phase)], [String])
partition_args [] srcs objs = (reverse srcs, reverse objs)
partition_args ("-x":suff:args) srcs objs
| "none" <- suff = partition_args args srcs objs
| StopLn <- phase = partition_args args srcs (slurp ++ objs)
| otherwise = partition_args rest (these_srcs ++ srcs) objs
where phase = startPhase suff
(slurp,rest) = break (== "-x") args
these_srcs = zip slurp (repeat (Just phase))
partition_args (arg:args) srcs objs
| looks_like_an_input arg = partition_args args ((arg,Nothing):srcs) objs
| otherwise = partition_args args srcs (arg:objs)
{-
We split out the object files (.o, .dll) and add them
to ldInputs for use by the linker.
The following things should be considered compilation manager inputs:
- haskell source files (strings ending in .hs, .lhs or other
haskellish extension),
- module names (not forgetting hierarchical module names),
- things beginning with '-' are flags that were not recognised by
the flag parser, and we want them to generate errors later in
checkOptions, so we class them as source files (#5921)
- and finally we consider everything not containing a '.' to be
a comp manager input, as shorthand for a .hs or .lhs filename.
Everything else is considered to be a linker object, and passed
straight through to the linker.
-}
looks_like_an_input :: String -> Bool
looks_like_an_input m = isSourceFilename m
|| looksLikeModuleName m
|| "-" `isPrefixOf` m
|| '.' `notElem` m
-- -----------------------------------------------------------------------------
-- Option sanity checks
-- | Ensure sanity of options.
--
-- Throws 'UsageError' or 'CmdLineError' if not.
checkOptions :: PostLoadMode -> DynFlags -> [(String,Maybe Phase)] -> [String] -> IO ()
-- Final sanity checking before kicking off a compilation (pipeline).
checkOptions mode dflags srcs objs = do
-- Complain about any unknown flags
let unknown_opts = [ f | (f@('-':_), _) <- srcs ]
when (notNull unknown_opts) (unknownFlagsErr unknown_opts)
when (notNull (filter wayRTSOnly (ways dflags))
&& isInterpretiveMode mode) $
hPutStrLn stderr ("Warning: -debug, -threaded and -ticky are ignored by GHCi")
-- -prof and --interactive are not a good combination
when ((filter (not . wayRTSOnly) (ways dflags) /= interpWays)
&& isInterpretiveMode mode) $
do throwGhcException (UsageError
"--interactive can't be used with -prof or -static.")
-- -ohi sanity check
if (isJust (outputHi dflags) &&
(isCompManagerMode mode || srcs `lengthExceeds` 1))
then throwGhcException (UsageError "-ohi can only be used when compiling a single source file")
else do
-- -o sanity checking
if (srcs `lengthExceeds` 1 && isJust (outputFile dflags)
&& not (isLinkMode mode))
then throwGhcException (UsageError "can't apply -o to multiple source files")
else do
let not_linking = not (isLinkMode mode) || isNoLink (ghcLink dflags)
when (not_linking && not (null objs)) $
hPutStrLn stderr ("Warning: the following files would be used as linker inputs, but linking is not being done: " ++ unwords objs)
-- Check that there are some input files
-- (except in the interactive case)
if null srcs && (null objs || not_linking) && needsInputsMode mode
then throwGhcException (UsageError "no input files")
else do
case mode of
StopBefore HCc | hscTarget dflags /= HscC
-> throwGhcException $ UsageError $
"the option -C is only available with an unregisterised GHC"
_ -> return ()
-- Verify that output files point somewhere sensible.
verifyOutputFiles dflags
-- Compiler output options
-- Called to verify that the output files point somewhere valid.
--
-- The assumption is that the directory portion of these output
-- options will have to exist by the time 'verifyOutputFiles'
-- is invoked.
--
-- We create the directories for -odir, -hidir, -outputdir etc. ourselves if
-- they don't exist, so don't check for those here (#2278).
verifyOutputFiles :: DynFlags -> IO ()
verifyOutputFiles dflags = do
let ofile = outputFile dflags
when (isJust ofile) $ do
let fn = fromJust ofile
flg <- doesDirNameExist fn
when (not flg) (nonExistentDir "-o" fn)
let ohi = outputHi dflags
when (isJust ohi) $ do
let hi = fromJust ohi
flg <- doesDirNameExist hi
when (not flg) (nonExistentDir "-ohi" hi)
where
nonExistentDir flg dir =
throwGhcException (CmdLineError ("error: directory portion of " ++
show dir ++ " does not exist (used with " ++
show flg ++ " option.)"))
-----------------------------------------------------------------------------
-- GHC modes of operation
type Mode = Either PreStartupMode PostStartupMode
type PostStartupMode = Either PreLoadMode PostLoadMode
data PreStartupMode
= ShowVersion -- ghc -V/--version
| ShowNumVersion -- ghc --numeric-version
| ShowSupportedExtensions -- ghc --supported-extensions
| ShowOptions Bool {- isInteractive -} -- ghc --show-options
showVersionMode, showNumVersionMode, showSupportedExtensionsMode, showOptionsMode :: Mode
showVersionMode = mkPreStartupMode ShowVersion
showNumVersionMode = mkPreStartupMode ShowNumVersion
showSupportedExtensionsMode = mkPreStartupMode ShowSupportedExtensions
showOptionsMode = mkPreStartupMode (ShowOptions False)
mkPreStartupMode :: PreStartupMode -> Mode
mkPreStartupMode = Left
isShowVersionMode :: Mode -> Bool
isShowVersionMode (Left ShowVersion) = True
isShowVersionMode _ = False
isShowNumVersionMode :: Mode -> Bool
isShowNumVersionMode (Left ShowNumVersion) = True
isShowNumVersionMode _ = False
data PreLoadMode
= ShowGhcUsage -- ghc -?
| ShowGhciUsage -- ghci -?
| ShowInfo -- ghc --info
| PrintWithDynFlags (DynFlags -> String) -- ghc --print-foo
showGhcUsageMode, showGhciUsageMode, showInfoMode :: Mode
showGhcUsageMode = mkPreLoadMode ShowGhcUsage
showGhciUsageMode = mkPreLoadMode ShowGhciUsage
showInfoMode = mkPreLoadMode ShowInfo
printSetting :: String -> Mode
printSetting k = mkPreLoadMode (PrintWithDynFlags f)
where f dflags = fromMaybe (panic ("Setting not found: " ++ show k))
$ lookup k (compilerInfo dflags)
mkPreLoadMode :: PreLoadMode -> Mode
mkPreLoadMode = Right . Left
isShowGhcUsageMode :: Mode -> Bool
isShowGhcUsageMode (Right (Left ShowGhcUsage)) = True
isShowGhcUsageMode _ = False
isShowGhciUsageMode :: Mode -> Bool
isShowGhciUsageMode (Right (Left ShowGhciUsage)) = True
isShowGhciUsageMode _ = False
data PostLoadMode
= ShowInterface FilePath -- ghc --show-iface
| DoMkDependHS -- ghc -M
| StopBefore Phase -- ghc -E | -C | -S
-- StopBefore StopLn is the default
| DoMake -- ghc --make
| DoInteractive -- ghc --interactive
| DoEval [String] -- ghc -e foo -e bar => DoEval ["bar", "foo"]
| DoAbiHash -- ghc --abi-hash
| ShowPackages -- ghc --show-packages
doMkDependHSMode, doMakeMode, doInteractiveMode,
doAbiHashMode, showPackagesMode :: Mode
doMkDependHSMode = mkPostLoadMode DoMkDependHS
doMakeMode = mkPostLoadMode DoMake
doInteractiveMode = mkPostLoadMode DoInteractive
doAbiHashMode = mkPostLoadMode DoAbiHash
showPackagesMode = mkPostLoadMode ShowPackages
showInterfaceMode :: FilePath -> Mode
showInterfaceMode fp = mkPostLoadMode (ShowInterface fp)
stopBeforeMode :: Phase -> Mode
stopBeforeMode phase = mkPostLoadMode (StopBefore phase)
doEvalMode :: String -> Mode
doEvalMode str = mkPostLoadMode (DoEval [str])
mkPostLoadMode :: PostLoadMode -> Mode
mkPostLoadMode = Right . Right
isDoInteractiveMode :: Mode -> Bool
isDoInteractiveMode (Right (Right DoInteractive)) = True
isDoInteractiveMode _ = False
isStopLnMode :: Mode -> Bool
isStopLnMode (Right (Right (StopBefore StopLn))) = True
isStopLnMode _ = False
isDoMakeMode :: Mode -> Bool
isDoMakeMode (Right (Right DoMake)) = True
isDoMakeMode _ = False
isDoEvalMode :: Mode -> Bool
isDoEvalMode (Right (Right (DoEval _))) = True
isDoEvalMode _ = False
#ifdef GHCI
isInteractiveMode :: PostLoadMode -> Bool
isInteractiveMode DoInteractive = True
isInteractiveMode _ = False
#endif
-- isInterpretiveMode: byte-code compiler involved
isInterpretiveMode :: PostLoadMode -> Bool
isInterpretiveMode DoInteractive = True
isInterpretiveMode (DoEval _) = True
isInterpretiveMode _ = False
needsInputsMode :: PostLoadMode -> Bool
needsInputsMode DoMkDependHS = True
needsInputsMode (StopBefore _) = True
needsInputsMode DoMake = True
needsInputsMode _ = False
-- True if we are going to attempt to link in this mode.
-- (we might not actually link, depending on the GhcLink flag)
isLinkMode :: PostLoadMode -> Bool
isLinkMode (StopBefore StopLn) = True
isLinkMode DoMake = True
isLinkMode DoInteractive = True
isLinkMode (DoEval _) = True
isLinkMode _ = False
isCompManagerMode :: PostLoadMode -> Bool
isCompManagerMode DoMake = True
isCompManagerMode DoInteractive = True
isCompManagerMode (DoEval _) = True
isCompManagerMode _ = False
-- -----------------------------------------------------------------------------
-- Parsing the mode flag
parseModeFlags :: [Located String]
-> IO (Mode,
[Located String],
[Located String])
parseModeFlags args = do
let ((leftover, errs1, warns), (mModeFlag, errs2, flags')) =
runCmdLine (processArgs mode_flags args)
(Nothing, [], [])
mode = case mModeFlag of
Nothing -> doMakeMode
Just (m, _) -> m
-- See Note [Handling errors when parsing commandline flags]
unless (null errs1 && null errs2) $ throwGhcException $ errorsToGhcException $
map (("on the commandline", )) $ map unLoc errs1 ++ errs2
return (mode, flags' ++ leftover, warns)
type ModeM = CmdLineP (Maybe (Mode, String), [String], [Located String])
-- mode flags sometimes give rise to new DynFlags (eg. -C, see below)
-- so we collect the new ones and return them.
mode_flags :: [Flag ModeM]
mode_flags =
[ ------- help / version ----------------------------------------------
defFlag "?" (PassFlag (setMode showGhcUsageMode))
, defFlag "-help" (PassFlag (setMode showGhcUsageMode))
, defFlag "V" (PassFlag (setMode showVersionMode))
, defFlag "-version" (PassFlag (setMode showVersionMode))
, defFlag "-numeric-version" (PassFlag (setMode showNumVersionMode))
, defFlag "-info" (PassFlag (setMode showInfoMode))
, defFlag "-show-options" (PassFlag (setMode showOptionsMode))
, defFlag "-supported-languages" (PassFlag (setMode showSupportedExtensionsMode))
, defFlag "-supported-extensions" (PassFlag (setMode showSupportedExtensionsMode))
, defFlag "-show-packages" (PassFlag (setMode showPackagesMode))
] ++
[ defFlag k' (PassFlag (setMode (printSetting k)))
| k <- ["Project version",
"Project Git commit id",
"Booter version",
"Stage",
"Build platform",
"Host platform",
"Target platform",
"Have interpreter",
"Object splitting supported",
"Have native code generator",
"Support SMP",
"Unregisterised",
"Tables next to code",
"RTS ways",
"Leading underscore",
"Debug on",
"LibDir",
"Global Package DB",
"C compiler flags",
"Gcc Linker flags",
"Ld Linker flags"],
let k' = "-print-" ++ map (replaceSpace . toLower) k
replaceSpace ' ' = '-'
replaceSpace c = c
] ++
------- interfaces ----------------------------------------------------
[ defFlag "-show-iface" (HasArg (\f -> setMode (showInterfaceMode f)
"--show-iface"))
------- primary modes ------------------------------------------------
, defFlag "c" (PassFlag (\f -> do setMode (stopBeforeMode StopLn) f
addFlag "-no-link" f))
, defFlag "M" (PassFlag (setMode doMkDependHSMode))
, defFlag "E" (PassFlag (setMode (stopBeforeMode anyHsc)))
, defFlag "C" (PassFlag (setMode (stopBeforeMode HCc)))
, defFlag "S" (PassFlag (setMode (stopBeforeMode (As False))))
, defFlag "-make" (PassFlag (setMode doMakeMode))
, defFlag "-interactive" (PassFlag (setMode doInteractiveMode))
, defFlag "-abi-hash" (PassFlag (setMode doAbiHashMode))
, defFlag "e" (SepArg (\s -> setMode (doEvalMode s) "-e"))
]
setMode :: Mode -> String -> EwM ModeM ()
setMode newMode newFlag = liftEwM $ do
(mModeFlag, errs, flags') <- getCmdLineState
let (modeFlag', errs') =
case mModeFlag of
Nothing -> ((newMode, newFlag), errs)
Just (oldMode, oldFlag) ->
case (oldMode, newMode) of
-- -c/--make are allowed together, and mean --make -no-link
_ | isStopLnMode oldMode && isDoMakeMode newMode
|| isStopLnMode newMode && isDoMakeMode oldMode ->
((doMakeMode, "--make"), [])
-- If we have both --help and --interactive then we
-- want showGhciUsage
_ | isShowGhcUsageMode oldMode &&
isDoInteractiveMode newMode ->
((showGhciUsageMode, oldFlag), [])
| isShowGhcUsageMode newMode &&
isDoInteractiveMode oldMode ->
((showGhciUsageMode, newFlag), [])
-- If we have both -e and --interactive then -e always wins
_ | isDoEvalMode oldMode &&
isDoInteractiveMode newMode ->
((oldMode, oldFlag), [])
| isDoEvalMode newMode &&
isDoInteractiveMode oldMode ->
((newMode, newFlag), [])
-- Otherwise, --help/--version/--numeric-version always win
| isDominantFlag oldMode -> ((oldMode, oldFlag), [])
| isDominantFlag newMode -> ((newMode, newFlag), [])
-- We need to accumulate eval flags like "-e foo -e bar"
(Right (Right (DoEval esOld)),
Right (Right (DoEval [eNew]))) ->
((Right (Right (DoEval (eNew : esOld))), oldFlag),
errs)
-- Saying e.g. --interactive --interactive is OK
_ | oldFlag == newFlag -> ((oldMode, oldFlag), errs)
-- --interactive and --show-options are used together
(Right (Right DoInteractive), Left (ShowOptions _)) ->
((Left (ShowOptions True),
"--interactive --show-options"), errs)
(Left (ShowOptions _), (Right (Right DoInteractive))) ->
((Left (ShowOptions True),
"--show-options --interactive"), errs)
-- Otherwise, complain
_ -> let err = flagMismatchErr oldFlag newFlag
in ((oldMode, oldFlag), err : errs)
putCmdLineState (Just modeFlag', errs', flags')
where isDominantFlag f = isShowGhcUsageMode f ||
isShowGhciUsageMode f ||
isShowVersionMode f ||
isShowNumVersionMode f
flagMismatchErr :: String -> String -> String
flagMismatchErr oldFlag newFlag
= "cannot use `" ++ oldFlag ++ "' with `" ++ newFlag ++ "'"
addFlag :: String -> String -> EwM ModeM ()
addFlag s flag = liftEwM $ do
(m, e, flags') <- getCmdLineState
putCmdLineState (m, e, mkGeneralLocated loc s : flags')
where loc = "addFlag by " ++ flag ++ " on the commandline"
-- ----------------------------------------------------------------------------
-- Run --make mode
doMake :: [(String,Maybe Phase)] -> Ghc ()
doMake srcs = do
let (hs_srcs, non_hs_srcs) = partition haskellish srcs
haskellish (f,Nothing) =
looksLikeModuleName f || isHaskellSrcFilename f || '.' `notElem` f
haskellish (_,Just phase) =
phase `notElem` [ As True, As False, Cc, Cobjc, Cobjcxx, CmmCpp, Cmm
, StopLn]
hsc_env <- GHC.getSession
-- if we have no haskell sources from which to do a dependency
-- analysis, then just do one-shot compilation and/or linking.
-- This means that "ghc Foo.o Bar.o -o baz" links the program as
-- we expect.
if (null hs_srcs)
then liftIO (oneShot hsc_env StopLn srcs)
else do
o_files <- mapM (\x -> liftIO $ compileFile hsc_env StopLn x)
non_hs_srcs
dflags <- GHC.getSessionDynFlags
let dflags' = dflags { ldInputs = map (FileOption "") o_files
++ ldInputs dflags }
_ <- GHC.setSessionDynFlags dflags'
targets <- mapM (uncurry GHC.guessTarget) hs_srcs
GHC.setTargets targets
ok_flag <- GHC.load LoadAllTargets
when (failed ok_flag) (liftIO $ exitWith (ExitFailure 1))
return ()
-- ---------------------------------------------------------------------------
-- --show-iface mode
doShowIface :: DynFlags -> FilePath -> IO ()
doShowIface dflags file = do
hsc_env <- newHscEnv dflags
showIface hsc_env file
-- ---------------------------------------------------------------------------
-- Various banners and verbosity output.
showBanner :: PostLoadMode -> DynFlags -> IO ()
showBanner _postLoadMode dflags = do
let verb = verbosity dflags
#ifdef GHCI
-- Show the GHCi banner
when (isInteractiveMode _postLoadMode && verb >= 1) $ putStrLn ghciWelcomeMsg
#endif
-- Display details of the configuration in verbose mode
when (verb >= 2) $
do hPutStr stderr "Glasgow Haskell Compiler, Version "
hPutStr stderr cProjectVersion
hPutStr stderr ", stage "
hPutStr stderr cStage
hPutStr stderr " booted by GHC version "
hPutStrLn stderr cBooterVersion
-- We print out a Read-friendly string, but a prettier one than the
-- Show instance gives us
showInfo :: DynFlags -> IO ()
showInfo dflags = do
let sq x = " [" ++ x ++ "\n ]"
putStrLn $ sq $ intercalate "\n ," $ map show $ compilerInfo dflags
showSupportedExtensions :: IO ()
showSupportedExtensions = mapM_ putStrLn supportedLanguagesAndExtensions
showVersion :: IO ()
showVersion = putStrLn (cProjectName ++ ", version " ++ cProjectVersion)
showOptions :: Bool -> IO ()
showOptions isInteractive = putStr (unlines availableOptions)
where
availableOptions = concat [
flagsForCompletion isInteractive,
map ('-':) (concat [
getFlagNames mode_flags
, (filterUnwantedStatic . getFlagNames $ flagsStatic)
, flagsStaticNames
])
]
getFlagNames opts = map flagName opts
-- this is a hack to get rid of two unwanted entries that get listed
-- as static flags. Hopefully this hack will disappear one day together
-- with static flags
filterUnwantedStatic = filter (`notElem`["f", "fno-"])
showGhcUsage :: DynFlags -> IO ()
showGhcUsage = showUsage False
showGhciUsage :: DynFlags -> IO ()
showGhciUsage = showUsage True
showUsage :: Bool -> DynFlags -> IO ()
showUsage ghci dflags = do
let usage_path = if ghci then ghciUsagePath dflags
else ghcUsagePath dflags
usage <- readFile usage_path
dump usage
where
dump "" = return ()
dump ('$':'$':s) = putStr progName >> dump s
dump (c:s) = putChar c >> dump s
dumpFinalStats :: DynFlags -> IO ()
dumpFinalStats dflags =
when (gopt Opt_D_faststring_stats dflags) $ dumpFastStringStats dflags
dumpFastStringStats :: DynFlags -> IO ()
dumpFastStringStats dflags = do
buckets <- getFastStringTable
let (entries, longest, has_z) = countFS 0 0 0 buckets
msg = text "FastString stats:" $$
nest 4 (vcat [text "size: " <+> int (length buckets),
text "entries: " <+> int entries,
text "longest chain: " <+> int longest,
text "has z-encoding: " <+> (has_z `pcntOf` entries)
])
-- we usually get more "has z-encoding" than "z-encoded", because
-- when we z-encode a string it might hash to the exact same string,
-- which will is not counted as "z-encoded". Only strings whose
-- Z-encoding is different from the original string are counted in
-- the "z-encoded" total.
putMsg dflags msg
where
x `pcntOf` y = int ((x * 100) `quot` y) <> char '%'
countFS :: Int -> Int -> Int -> [[FastString]] -> (Int, Int, Int)
countFS entries longest has_z [] = (entries, longest, has_z)
countFS entries longest has_z (b:bs) =
let
len = length b
longest' = max len longest
entries' = entries + len
has_zs = length (filter hasZEncoding b)
in
countFS entries' longest' (has_z + has_zs) bs
showPackages, dumpPackages, dumpPackagesSimple :: DynFlags -> IO ()
showPackages dflags = putStrLn (showSDoc dflags (pprPackages dflags))
dumpPackages dflags = putMsg dflags (pprPackages dflags)
dumpPackagesSimple dflags = putMsg dflags (pprPackagesSimple dflags)
-- -----------------------------------------------------------------------------
-- ABI hash support
{-
ghc --abi-hash Data.Foo System.Bar
Generates a combined hash of the ABI for modules Data.Foo and
System.Bar. The modules must already be compiled, and appropriate -i
options may be necessary in order to find the .hi files.
This is used by Cabal for generating the ComponentId for a
package. The ComponentId must change when the visible ABI of
the package chagnes, so during registration Cabal calls ghc --abi-hash
to get a hash of the package's ABI.
-}
-- | Print ABI hash of input modules.
--
-- The resulting hash is the MD5 of the GHC version used (Trac #5328,
-- see 'hiVersion') and of the existing ABI hash from each module (see
-- 'mi_mod_hash').
abiHash :: [String] -- ^ List of module names
-> Ghc ()
abiHash strs = do
hsc_env <- getSession
let dflags = hsc_dflags hsc_env
liftIO $ do
let find_it str = do
let modname = mkModuleName str
r <- findImportedModule hsc_env modname Nothing
case r of
Found _ m -> return m
_error -> throwGhcException $ CmdLineError $ showSDoc dflags $
cannotFindInterface dflags modname r
mods <- mapM find_it strs
let get_iface modl = loadUserInterface False (text "abiHash") modl
ifaces <- initIfaceCheck hsc_env $ mapM get_iface mods
bh <- openBinMem (3*1024) -- just less than a block
put_ bh hiVersion
-- package hashes change when the compiler version changes (for now)
-- see #5328
mapM_ (put_ bh . mi_mod_hash) ifaces
f <- fingerprintBinMem bh
putStrLn (showPpr dflags f)
-- -----------------------------------------------------------------------------
-- Util
unknownFlagsErr :: [String] -> a
unknownFlagsErr fs = throwGhcException $ UsageError $ concatMap oneError fs
where
oneError f =
"unrecognised flag: " ++ f ++ "\n" ++
(case fuzzyMatch f (nub allFlags) of
[] -> ""
suggs -> "did you mean one of:\n" ++ unlines (map (" " ++) suggs))
{- Note [-Bsymbolic and hooks]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Bsymbolic is a flag that prevents the binding of references to global
symbols to symbols outside the shared library being compiled (see `man
ld`). When dynamically linking, we don't use -Bsymbolic on the RTS
package: that is because we want hooks to be overridden by the user,
we don't want to constrain them to the RTS package.
Unfortunately this seems to have broken somehow on OS X: as a result,
defaultHooks (in hschooks.c) is not called, which does not initialize
the GC stats. As a result, this breaks things like `:set +s` in GHCi
(#8754). As a hacky workaround, we instead call 'defaultHooks'
directly to initalize the flags in the RTS.
A byproduct of this, I believe, is that hooks are likely broken on OS
X when dynamically linking. But this probably doesn't affect most
people since we're linking GHC dynamically, but most things themselves
link statically.
-}
foreign import ccall safe "initGCStatistics"
initGCStatistics :: IO ()
|
elieux/ghc
|
ghc/Main.hs
|
bsd-3-clause
| 35,956 | 0 | 27 | 10,081 | 7,356 | 3,790 | 3,566 | 566 | 15 |
module Data.Store.Db
(Db, withDb, lookup, transaction, store)
where
import Data.ByteString (ByteString)
import Data.Foldable (traverse_)
import Data.Store.Guid (Guid)
import Data.Store.Transaction (Store(..))
import Prelude hiding (lookup)
import qualified Control.Exception as Exc
import qualified Data.Store.Guid as Guid
import qualified Database.KeyValueHash as HashDB
type Db = HashDB.Database
-- TODO: this should be automatic in HashDB
hashSize :: HashDB.Size
hashSize = HashDB.mkSize $ 2 ^ (17::Int)
open :: FilePath -> IO Db
open fileName =
HashDB.openDatabase fileName HashDB.stdHash hashSize
`Exc.catch`
(\(Exc.SomeException _) ->
HashDB.createDatabase fileName HashDB.stdHash hashSize)
close :: Db -> IO ()
close _ = return ()
withDb :: FilePath -> (Db -> IO a) -> IO a
withDb filePath = Exc.bracket (open filePath) close
lookup :: Db -> Guid -> IO (Maybe ByteString)
lookup db = HashDB.readKey db . Guid.bs
transaction :: Db -> [(Guid, Maybe ByteString)] -> IO ()
transaction db changes = do
traverse_ applyChange changes
HashDB.msync db
where
applyChange (key, Nothing) = HashDB.deleteKey db (Guid.bs key)
applyChange (key, Just value) = HashDB.writeKey db (Guid.bs key) value
-- You get a Store tagged however you like...
store :: Db -> Store IO
store db = Store {
storeNewKey = Guid.new,
storeLookup = lookup db,
storeAtomicWrite = transaction db
}
|
sinelaw/lamdu
|
bottlelib/Data/Store/Db.hs
|
gpl-3.0
| 1,409 | 0 | 10 | 243 | 492 | 269 | 223 | 36 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Redshift.DisableSnapshotCopy
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Disables the automatic copying of snapshots from one region to another region
-- for a specified cluster.
--
-- <http://docs.aws.amazon.com/redshift/latest/APIReference/API_DisableSnapshotCopy.html>
module Network.AWS.Redshift.DisableSnapshotCopy
(
-- * Request
DisableSnapshotCopy
-- ** Request constructor
, disableSnapshotCopy
-- ** Request lenses
, dscClusterIdentifier
-- * Response
, DisableSnapshotCopyResponse
-- ** Response constructor
, disableSnapshotCopyResponse
-- ** Response lenses
, dscrCluster
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.Redshift.Types
import qualified GHC.Exts
newtype DisableSnapshotCopy = DisableSnapshotCopy
{ _dscClusterIdentifier :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'DisableSnapshotCopy' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dscClusterIdentifier' @::@ 'Text'
--
disableSnapshotCopy :: Text -- ^ 'dscClusterIdentifier'
-> DisableSnapshotCopy
disableSnapshotCopy p1 = DisableSnapshotCopy
{ _dscClusterIdentifier = p1
}
-- | The unique identifier of the source cluster that you want to disable copying
-- of snapshots to a destination region.
--
-- Constraints: Must be the valid name of an existing cluster that has
-- cross-region snapshot copy enabled.
dscClusterIdentifier :: Lens' DisableSnapshotCopy Text
dscClusterIdentifier =
lens _dscClusterIdentifier (\s a -> s { _dscClusterIdentifier = a })
newtype DisableSnapshotCopyResponse = DisableSnapshotCopyResponse
{ _dscrCluster :: Maybe Cluster
} deriving (Eq, Read, Show)
-- | 'DisableSnapshotCopyResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dscrCluster' @::@ 'Maybe' 'Cluster'
--
disableSnapshotCopyResponse :: DisableSnapshotCopyResponse
disableSnapshotCopyResponse = DisableSnapshotCopyResponse
{ _dscrCluster = Nothing
}
dscrCluster :: Lens' DisableSnapshotCopyResponse (Maybe Cluster)
dscrCluster = lens _dscrCluster (\s a -> s { _dscrCluster = a })
instance ToPath DisableSnapshotCopy where
toPath = const "/"
instance ToQuery DisableSnapshotCopy where
toQuery DisableSnapshotCopy{..} = mconcat
[ "ClusterIdentifier" =? _dscClusterIdentifier
]
instance ToHeaders DisableSnapshotCopy
instance AWSRequest DisableSnapshotCopy where
type Sv DisableSnapshotCopy = Redshift
type Rs DisableSnapshotCopy = DisableSnapshotCopyResponse
request = post "DisableSnapshotCopy"
response = xmlResponse
instance FromXML DisableSnapshotCopyResponse where
parseXML = withElement "DisableSnapshotCopyResult" $ \x -> DisableSnapshotCopyResponse
<$> x .@? "Cluster"
|
romanb/amazonka
|
amazonka-redshift/gen/Network/AWS/Redshift/DisableSnapshotCopy.hs
|
mpl-2.0
| 3,838 | 0 | 9 | 774 | 426 | 262 | 164 | 54 | 1 |
{-# LANGUAGE EmptyDataDecls, TypeFamilies, UndecidableInstances,
ScopedTypeVariables, TypeOperators,
FlexibleInstances, NoMonomorphismRestriction,
MultiParamTypeClasses, FlexibleContexts #-}
module IndTypesPerfMerge where
data a :* b = a :* b
infixr 6 :*
data TRUE
data FALSE
data Zero
data Succ a
type family Equals m n
type instance Equals Zero Zero = TRUE
type instance Equals (Succ a) Zero = FALSE
type instance Equals Zero (Succ a) = FALSE
type instance Equals (Succ a) (Succ b) = Equals a b
type family LessThan m n
type instance LessThan Zero Zero = FALSE
type instance LessThan (Succ n) Zero = FALSE
type instance LessThan Zero (Succ n) = TRUE
type instance LessThan (Succ m) (Succ n) = LessThan m n
newtype Tagged n a = Tagged a deriving (Show,Eq)
type family Cond p a b
type instance Cond TRUE a b = a
type instance Cond FALSE a b = b
class Merger a where
type Merged a
type UnmergedLeft a
type UnmergedRight a
mkMerge :: a -> UnmergedLeft a -> UnmergedRight a -> Merged a
class Mergeable a b where
type MergerType a b
merger :: a -> b -> MergerType a b
{-
merge ::
forall a b.
(Merger (MergerType a b), Mergeable a b,
UnmergedLeft (MergerType a b) ~ a,
UnmergedRight (MergerType a b) ~ b) =>
a -> b -> Merged (MergerType a b)
-}
merge x y = mkMerge (merger x y) x y
{- ------------- NASTY TYPE FOR merge -----------------
-- See #11408
x:tx, y:ty
mkMerge @ gamma
merger @ alpha beta
merge :: tx -> ty -> tr
Constraints generated:
gamma ~ MergerType alpha beta
UnmergedLeft gamma ~ tx
UnmergedRight gamma ~ ty
alpha ~ tx
beta ~ ty
tr ~ Merged gamma
Mergeable tx ty
Merger gamma
One solve path:
gamma := t
tx := alpha := UnmergedLeft t
ty := beta := UnmergedRight t
Mergeable (UnmergedLeft t) (UnmergedRight t)
Merger t
t ~ MergerType (UnmergedLeft t) (UnmergedRight t)
LEADS TO AMBIGUOUS TYPE
Another solve path:
tx := alpha
ty := beta
gamma := MergerType alpha beta
UnmergedLeft (MergerType alpha beta) ~ alpha
UnmergedRight (MergerType alpha beta) ~ beta
Merger (MergerType alpha beta)
Mergeable alpha beta
LEADS TO NON-AMBIGUOUS TYPE
--------------- -}
data TakeRight a
data TakeLeft a
data DiscardRightHead a b c d
data LeftHeadFirst a b c d
data RightHeadFirst a b c d
data EndMerge
instance Mergeable () () where
type MergerType () () = EndMerge
merger = undefined
instance Mergeable () (a :* b) where
type MergerType () (a :* b) = TakeRight (a :* b)
merger = undefined
instance Mergeable (a :* b) () where
type MergerType (a :* b) () = TakeLeft (a :* b)
merger = undefined
instance Mergeable (Tagged m a :* t1) (Tagged n b :* t2) where
type MergerType (Tagged m a :* t1) (Tagged n b :* t2) =
Cond (Equals m n) (DiscardRightHead (Tagged m a) t1 (Tagged n b) t2)
(Cond (LessThan m n) (LeftHeadFirst (Tagged m a) t1 (Tagged n b) t2)
(RightHeadFirst (Tagged m a ) t1 (Tagged n b) t2))
merger = undefined
instance Merger EndMerge where
type Merged EndMerge = ()
type UnmergedLeft EndMerge = ()
type UnmergedRight EndMerge = ()
mkMerge _ () () = ()
instance Merger (TakeRight a) where
type Merged (TakeRight a) = a
type UnmergedLeft (TakeRight a) = ()
type UnmergedRight (TakeRight a) = a
mkMerge _ () a = a
instance Merger (TakeLeft a) where
type Merged (TakeLeft a) = a
type UnmergedLeft (TakeLeft a) = a
type UnmergedRight (TakeLeft a) = ()
mkMerge _ a () = a
instance
(Mergeable t1 t2,
Merger (MergerType t1 t2),
t1 ~ UnmergedLeft (MergerType t1 t2),
t2 ~ UnmergedRight (MergerType t1 t2)) =>
Merger (DiscardRightHead h1 t1 h2 t2) where
type Merged (DiscardRightHead h1 t1 h2 t2) = h1 :* Merged (MergerType t1 t2)
type UnmergedLeft (DiscardRightHead h1 t1 h2 t2) = h1 :* t1
type UnmergedRight (DiscardRightHead h1 t1 h2 t2) = h2 :* t2
mkMerge _ (h1 :* t1) (h2 :* t2) = h1 :* mkMerge (merger t1 t2) t1 t2
instance
(Mergeable t1 (h2 :* t2),
Merger (MergerType t1 (h2 :* t2)),
t1 ~ UnmergedLeft (MergerType t1 (h2 :* t2)),
(h2 :* t2) ~ UnmergedRight (MergerType t1 (h2 :* t2))) =>
Merger (LeftHeadFirst h1 t1 h2 t2) where
type Merged (LeftHeadFirst h1 t1 h2 t2) = h1 :* Merged (MergerType t1 (h2 :* t2))
type UnmergedLeft (LeftHeadFirst h1 t1 h2 t2) = h1 :* t1
type UnmergedRight (LeftHeadFirst h1 t1 h2 t2) = h2 :* t2
mkMerge _ (h1 :* t1) (h2 :* t2) = h1 :* mkMerge (merger t1 (h2 :* t2)) t1 (h2 :* t2)
instance
(Mergeable (h1 :* t1) t2,
Merger (MergerType (h1 :* t1) t2),
(h1 :* t1) ~ UnmergedLeft (MergerType (h1 :* t1) t2),
t2 ~ UnmergedRight (MergerType (h1 :* t1) t2)) =>
Merger (RightHeadFirst h1 t1 h2 t2) where
type Merged (RightHeadFirst h1 t1 h2 t2) = h2 :* Merged (MergerType (h1 :* t1) t2)
type UnmergedLeft (RightHeadFirst h1 t1 h2 t2) = h1 :* t1
type UnmergedRight (RightHeadFirst h1 t1 h2 t2) = h2 :* t2
mkMerge _ (h1 :* t1) (h2 :* t2) = h2 :* mkMerge (merger (h1 :* t1) t2) (h1 :* t1) t2
|
sdiehl/ghc
|
testsuite/tests/indexed-types/should_compile/IndTypesPerfMerge.hs
|
bsd-3-clause
| 5,135 | 0 | 12 | 1,286 | 1,697 | 905 | 792 | -1 | -1 |
module HsFreeNames where
-------------------------------------------------------------------------------
-- This module implements free name analysis for Haskell.
-- Under construction!!!
-- We are not yet faithfully dealing with qualified vs. unqualified names.
-- Same holds for module level analysis.
-------------------------------------------------------------------------------
import Language.Haskell.Syntax
import SyntaxTermInstances
import StrategyLib
import Data.List
import Control.Monad
import Data.Monoid
-------------------------------------------------------------------------------
-- The main function of analysis.
-- We use the stop_td scheme.
-- We define ad hoc cases for several syntactical domains.
-- Ad hoc cases are meant to restart recursion.
hsFreeAndDeclared :: (Term t, MonadPlus m) => t -> m ([HsQName],[HsQName])
hsFreeAndDeclared = applyTU (stop_tdTU worker)
where
worker = failTU `adhocTU` exp
`adhocTU` pat
`adhocTU` match
`adhocTU` alt
`adhocTU` decls
`adhocTU` stmts
exp (HsVar qn) = return ([qn],[])
exp (HsCon qn) = return ([qn],[])
exp (HsLambda _ pats body)
= do (pf,pd) <- hsFreeAndDeclared pats
(bf,bd) <- hsFreeAndDeclared body
return ((bf `union` pf) \\ pd,[])
exp (HsLet decls exp)
= do (df,dd) <- hsFreeAndDeclared decls
(ef,ed) <- hsFreeAndDeclared exp
return (df `union` (ef \\ dd),[])
exp (HsListComp exp stmts)
= hsFreeAndDeclared (stmts ++ [HsQualifier exp])
exp (HsRecConstr qn e) = addFree qn (hsFreeAndDeclared e)
exp _ = mzero
pat (HsPVar n) = return ([],[UnQual n])
pat (HsPInfixApp p1 qn p2) = addFree qn (hsFreeAndDeclared [p1,p2])
pat (HsPApp qn pats) = addFree qn (hsFreeAndDeclared pats)
pat (HsPRec qn fields) = addFree qn (hsFreeAndDeclared fields)
pat _ = mzero
match (HsMatch _ fun pats rhs {-where-} decls)
= do (pf,pd) <- hsFreeAndDeclared pats
(rf,rd) <- hsFreeAndDeclared rhs
(df,dd) <- hsFreeAndDeclared decls
let qfun = UnQual fun
return ( pf `union` (((rf \\ (dd `union` [qfun]) `union` df) \\ pd)),
[qfun] )
alt (HsAlt _ pat exp decls)
= do (pf,pd) <- hsFreeAndDeclared pat
(ef,ed) <- hsFreeAndDeclared exp
(df,dd) <- hsFreeAndDeclared decls
return (pf `union` (((ef \\ dd) `union` df) \\ pd),[])
decls (ds::[HsDecl])
= do (f,d) <- hsFreeAndDeclaredList ds
return (f \\ d, d)
stmts (HsGenerator _ pat exp:stmts)
= do (pf,pd) <- hsFreeAndDeclared pat
(ef,ed) <- hsFreeAndDeclared exp
(sf,sd) <- hsFreeAndDeclared stmts
return (pf `union` ef `union` (sf \\ pd),[])
stmts (HsLetStmt decls:stmts)
= do (df,dd) <- hsFreeAndDeclared decls
(sf,sd) <- hsFreeAndDeclared stmts
return (df `union` (sf \\ dd),[])
stmts _ = mzero
addFree free mfd = do (f,d) <- mfd
return ([free] `union` f,d)
hsFreeAndDeclaredList :: (Term t, MonadPlus m) => [t] -> m ([HsQName],[HsQName])
hsFreeAndDeclaredList l
= do fds <- mapM hsFreeAndDeclared l
return ( foldr union [] (map fst fds),
foldr union [] (map snd fds) )
-------------------------------------------------------------------------------
-- This should go somewhere else.
-- instance (Monoid a, Monoid b) => Monoid (a,b) where
-- mappend (a,b) (a',b') = (mappend a a', mappend b b')
-- mempty = (mempty,mempty)
-------------------------------------------------------------------------------
|
forste/haReFork
|
StrategyLib-4.0-beta/examples/haskell/HsFreeNames.hs
|
bsd-3-clause
| 3,824 | 0 | 18 | 1,067 | 1,184 | 637 | 547 | -1 | -1 |
-------------------------------------------------------------------------------
-- |
-- Module : System.Hardware.Haskino.SamplePrograms.Strong.ScheduledLCD
-- Copyright : (c) University of Kansas
-- License : BSD3
-- Stability : experimental
--
-- Creates a scheduled task on the Arduino which alternates writing 'Rock',
-- 'Chalk' and 'Jayhawk' to the LCD screen every second and a half.
-- Note: This example requires a Mega2560 board, as the Uno boards do not have
-- enough RAM.
-------------------------------------------------------------------------------
module System.Hardware.Haskino.SamplePrograms.Strong.ScheduledLCD where
import Control.Monad.Trans (liftIO)
import System.Hardware.Haskino
import System.Hardware.Haskino.Parts.LCD
hitachi :: LCDController
hitachi = Hitachi44780 { lcdRS = 8
, lcdEN = 9
, lcdD4 = 4
, lcdD5 = 5
, lcdD6 = 6
, lcdD7 = 7
, lcdBL = Just 10
, lcdRows = 2
, lcdCols = 16
, dotMode5x10 = False
}
-- Task which will execute on Arduino, write an 'Rock' to the display, delay a
-- second, write a 'Chalk' to the display, delay a second, write a 'Jayhawk'
-- to the display and repeat
myTask :: LCD -> Arduino ()
myTask lcd = do
lcdHome lcd
lcdWrite lcd "Rock "
delayMillis 1500
lcdHome lcd
lcdWrite lcd "Chalk "
delayMillis 1500
lcdHome lcd
lcdWrite lcd "Jayhawk"
delayMillis 1500
scheduledLCD :: IO ()
scheduledLCD = withArduino True "/dev/cu.usbmodem1421" $ do
lcd <- lcdRegister hitachi
lcdBacklightOn lcd
-- Create the task which writes to the LCD
createTask 1 (myTask lcd)
-- Schedule the task to start in 1 seconds
scheduleTask 1 1000
-- Query to confirm task creation
task <- queryTask 1
liftIO $ print task
|
ku-fpg/kansas-amber
|
legacy/Shallow/ScheduledLCD.hs
|
bsd-3-clause
| 2,008 | 0 | 10 | 607 | 292 | 158 | 134 | 34 | 1 |
{-# OPTIONS_HADDOCK hide #-}
-- #hide
#if __GLASGOW_HASKELL__ >= 701
{-# LANGUAGE Safe #-}
#endif
module Text.XHtml.Strict.Elements where
import Text.XHtml.Internals
-- * Elements in XHTML Strict
abbr :: Html -> Html
acronym :: Html -> Html
address :: Html -> Html
anchor :: Html -> Html
area :: Html
bdo :: Html -> Html
big :: Html -> Html
blockquote :: Html -> Html
body :: Html -> Html
bold :: Html -> Html
br :: Html
button :: Html -> Html
caption :: Html -> Html
cite :: Html -> Html
col :: Html -> Html
colgroup :: Html -> Html
del :: Html -> Html
ddef :: Html -> Html
define :: Html -> Html
dlist :: Html -> Html
dterm :: Html -> Html
emphasize :: Html -> Html
fieldset :: Html -> Html
form :: Html -> Html
h1 :: Html -> Html
h2 :: Html -> Html
h3 :: Html -> Html
h4 :: Html -> Html
h5 :: Html -> Html
h6 :: Html -> Html
header :: Html -> Html
hr :: Html
image :: Html
input :: Html
ins :: Html -> Html
italics :: Html -> Html
keyboard :: Html -> Html
label :: Html -> Html
legend :: Html -> Html
li :: Html -> Html
meta :: Html
noscript :: Html -> Html
object :: Html -> Html
olist :: Html -> Html
optgroup :: Html -> Html
option :: Html -> Html
paragraph :: Html -> Html
param :: Html
pre :: Html -> Html
quote :: Html -> Html
sample :: Html -> Html
script :: Html -> Html
select :: Html -> Html
small :: Html -> Html
strong :: Html -> Html
style :: Html -> Html
sub :: Html -> Html
sup :: Html -> Html
table :: Html -> Html
tbody :: Html -> Html
td :: Html -> Html
textarea :: Html -> Html
tfoot :: Html -> Html
th :: Html -> Html
thead :: Html -> Html
thebase :: Html
thecode :: Html -> Html
thediv :: Html -> Html
thehtml :: Html -> Html
thelink :: Html -> Html
themap :: Html -> Html
thespan :: Html -> Html
thetitle :: Html -> Html
tr :: Html -> Html
tt :: Html -> Html
ulist :: Html -> Html
variable :: Html -> Html
abbr = tag "abbr"
acronym = tag "acronym"
address = tag "address"
anchor = tag "a"
area = itag "area"
bdo = tag "bdo"
big = tag "big"
blockquote = tag "blockquote"
body = tag "body"
bold = tag "b"
button = tag "button"
br = itag "br"
caption = tag "caption"
cite = tag "cite"
col = tag "col"
colgroup = tag "colgroup"
ddef = tag "dd"
define = tag "dfn"
del = tag "del"
dlist = tag "dl"
dterm = tag "dt"
emphasize = tag "em"
fieldset = tag "fieldset"
form = tag "form"
h1 = tag "h1"
h2 = tag "h2"
h3 = tag "h3"
h4 = tag "h4"
h5 = tag "h5"
h6 = tag "h6"
header = tag "head"
hr = itag "hr"
image = itag "img"
input = itag "input"
ins = tag "ins"
italics = tag "i"
keyboard = tag "kbd"
label = tag "label"
legend = tag "legend"
li = tag "li"
meta = itag "meta"
noscript = tag "noscript"
object = tag "object"
olist = tag "ol"
optgroup = tag "optgroup"
option = tag "option"
paragraph = tag "p"
param = itag "param"
pre = tag "pre"
quote = tag "q"
sample = tag "samp"
script = tag "script"
select = tag "select"
small = tag "small"
strong = tag "strong"
style = tag "style"
sub = tag "sub"
sup = tag "sup"
table = tag "table"
tbody = tag "tbody"
td = tag "td"
textarea = tag "textarea"
tfoot = tag "tfoot"
th = tag "th"
thead = tag "thead"
thebase = itag "base"
thecode = tag "code"
thediv = tag "div"
thehtml = tag "html"
thelink = tag "link"
themap = tag "map"
thespan = tag "span"
thetitle = tag "title"
tr = tag "tr"
tt = tag "tt"
ulist = tag "ul"
variable = tag "var"
|
DavidAlphaFox/ghc
|
libraries/xhtml/Text/XHtml/Strict/Elements.hs
|
bsd-3-clause
| 5,583 | 0 | 5 | 2,987 | 1,297 | 693 | 604 | 157 | 1 |
<?xml version='1.0' encoding='UTF-8' ?>
<!--
<!DOCTYPE helpset
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN"
"http://java.sun.com/products/javahelp/helpset_2_0.dtd"> -->
<helpset version="2.0">
<!-- title -->
<title>Logisim - Help</title>
<!-- maps -->
<maps>
<homeID>top</homeID>
<mapref location="map_{lang}.jhm" />
</maps>
<!-- views -->
<view xml:lang="{lang}" mergetype="javax.help.UniteAppendMerge">
<name>TOC</name>
<label>Table Of Contents</label>
<type>javax.help.TOCView</type>
<data>{lang}/contents.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">search_lookup_{lang}</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
<!-- presentation windows -->
<!-- This window is the default one for the helpset.
* It is a tri-paned window because displayviews, not
* defined, defaults to true and because a toolbar is defined.
* The toolbar has a back arrow, a forward arrow, and
* a home button that has a user-defined image.
-->
<presentation default=true>
<name>main window</name>
<size width="900" height="700" />
<location x="200" y="10" />
<toolbar>
<helpaction>javax.help.BackAction</helpaction>
<helpaction>javax.help.ForwardAction</helpaction>
<helpaction image="homeicon">javax.help.HomeAction</helpaction>
<helpaction>javax.help.SeparatorAction</helpaction>
<helpaction>javax.help.FavoritesAction</helpaction>
</toolbar>
</presentation>
<!-- implementation section -->
<impl>
<helpsetregistry helpbrokerclass="javax.help.DefaultHelpBroker" />
<viewerregistry viewertype="text/html"
viewerclass="com.sun.java.help.impl.CustomKit" />
<viewerregistry viewertype="text/xml"
viewerclass="com.sun.java.help.impl.CustomXMLKit" />
</impl>
</helpset>
|
aaorellana/Logisim-SeniorProject
|
src/main/resources/doc/support/base-doc.hs
|
gpl-3.0
| 2,150 | 143 | 87 | 443 | 751 | 378 | 373 | -1 | -1 |
module Sort where
--insert :: Ord a => a -> [a] -> [a]
insert x [] = [x]
insert x (y:ys) = if x<=y
then x:y:ys
else y:insert x ys
--sort :: Ord a => [a] -> [a]
sort [] = []
sort (x:xs) = insert x (sort xs)
isort xs = foldr insert [] xs
|
forste/haReFork
|
tools/hs2alfa/tests/Sort.hs
|
bsd-3-clause
| 247 | 4 | 7 | 67 | 126 | 66 | 60 | 8 | 2 |
{-# LANGUAGE RankNTypes #-}
module T14488 where
type Lens' s a = forall f. Functor f => (a -> f a) -> s -> f s
data T a = MkT { _tfield :: Eq a => a }
tfield :: Eq a => Lens' (T a) a
tfield f t = MkT <$> f (_tfield t)
|
shlevy/ghc
|
testsuite/tests/typecheck/should_compile/T14488.hs
|
bsd-3-clause
| 222 | 0 | 10 | 61 | 115 | 61 | 54 | 6 | 1 |
-- {-# LANGUAGE DeriveGeneric, OverloadedStrings #-}
module State
( State(..)
, verboseEnabled
, debugEnabled
, sandboxEnabled
) where
import Config (Settings(..))
import Identity (Identity)
import Args (Options(..))
data State = State { settings :: Settings
, identity :: Identity
, options :: Options
}
deriving (Show)
verboseEnabled :: State -> Bool
verboseEnabled st = (optVerbose (options st) == Just True) || verbose (settings st)
debugEnabled :: State -> Bool
debugEnabled st = (optDebug (options st) == Just True) || debug (settings st)
sandboxEnabled :: State -> Bool
sandboxEnabled st = (optSandbox (options st) == Just True) || sandbox (settings st)
|
dxtr/dnsimple
|
src/State.hs
|
isc
| 745 | 0 | 10 | 184 | 236 | 130 | 106 | 18 | 1 |
{-------------------------------------------------------------------------------
Consider the infinite polynomial series AF(x) = xF1 + x²F₂ + x³F₃ + ..., where
F_k is the kth term in the Fibonacci sequence: 1, 1, 2, 3, 5, 8, ... ; that is,
F_k = F_k−1 + F_k−2, F_1 = 1 and F_2 = 1.
For this problem we shall be interested in values of x for which AF(x) is a
positive integer.
Surprisingly AF(1/2) = (1/2).1 + (1/2)².1 + (1/2)³.2 + (1/2)⁴.3 + (1/2)⁵.5 + ...
= 1/2 + 1/4 + 2/8 + 3/16 + 5/32 + ...
= 2
The corresponding values of x for the first five natural numbers are shown
below.
x AF(x)
√2−1 1
1/2 2
(√13−2)/3 3
(√89−5)/8 4
(√34−3)/5 5
We shall call AF(x) a golden nugget if x is rational, because they become
increasingly rarer; for example, the 10th golden nugget is 74049690.
Find the 15th golden nugget.
--------------------------------------------------------------------------------
AF(x) is the generating function of the Fibonacci sequence. Using the Fibonacci
recurrence relation it is trivial to show that AF(x) = x/(1-x-x²).
Posing AF(x) = n and solving for x, it comes
x = [√(5n²+2n+1) - (n+1)]/2n
The other root is negative, so it is discarded.
x is rational iif 5n²+2n+1 is a perfect square l². Then it comes:
l² = 5n²+2n+1
5l² = 25n²+10n+5 = (5n+1)² + 4
=> (5n+1)² - 5l² = -4
This is a generalised Pell's equation in (5n+1) and l
-------------------------------------------------------------------------------}
import qualified Math.NumberTheory.Pell as Pell
import Tools
euler = tail -- the first solution is n=0, which is invalid
$ map (\fnpo -> quot (fnpo - 1) 5) -- fnpo = five n plus one
$ filter (\fnpo -> mod fnpo 5 == 1)
$ map fst $ Pell.solve 5 (-4)
line n = concat [show n, "th golden nugget: ", show $ euler !! (n-1)]
--The first solution return by 'euler' is n=0, which is not acceptable. So, the
-- n'th solutions is not 'euler !! (n-1)'' but 'euler !! n'
main = do
putStrLn $ line 10
putStrLn $ line 15
|
dpieroux/euler
|
0137c.hs
|
mit
| 2,195 | 0 | 14 | 555 | 160 | 85 | 75 | 10 | 1 |
module System.Log.GratteLogger (
configureLogger
, LoggerSettings(..)
, logMsgIO
, LogLevel(..)
) where
import System.Log.Logger
import System.Log.Handler.Simple
import System.Log.Handler (setFormatter)
import System.Log.Formatter
import System.IO
import System.Environment
import qualified Filesystem as FS
import qualified Filesystem.Path.CurrentOS as FS
data LogLevel = Debug
| Info
| Notice
| Warning
| Error
| Critical
| Alert
| Emergency
deriving (Show, Eq)
logLevelToPriority :: LogLevel -> Priority
logLevelToPriority lvl =
case lvl of
Debug -> DEBUG
Info -> INFO
Notice -> NOTICE
Warning -> WARNING
Error -> ERROR
Critical -> CRITICAL
Alert -> ALERT
Emergency -> EMERGENCY
data LoggerSettings = LoggerSettings {
loggerPath :: FS.FilePath
, loggerLevel :: LogLevel
}
logMsgIO :: LogLevel -> String -> IO ()
logMsgIO lvl msg = do
logger <- getRootLogger
logL logger (logLevelToPriority lvl) msg
configureLogger :: LoggerSettings -> IO ()
configureLogger (LoggerSettings path level) = do
prg <- getProgName
-- Everything is logged to the file
createLogDir path
logFileHandler <- fileHandler (FS.encodeString path) DEBUG
-- Console logging depends on the verbosity in the options
consoleHandler <- streamHandler stderr $ logLevelToPriority level
updateGlobalLogger rootLoggerName
(setLevel DEBUG
. setHandlers (
map (`setFormatter` getFormatter prg)
[logFileHandler, consoleHandler]))
getFormatter :: String -> LogFormatter (GenericHandler Handle)
getFormatter prg = simpleLogFormatter $ "[$utcTime] [$prio] [" ++ prg ++ "] $msg"
createLogDir :: FS.FilePath -> IO ()
createLogDir path = FS.createTree $ FS.directory path
|
ostapneko/gratte-papier
|
src/System/Log/GratteLogger.hs
|
mit
| 1,952 | 0 | 14 | 544 | 465 | 252 | 213 | 55 | 8 |
import Hamu8080.Types (mkComputer)
import Hamu8080.Compute (runComputer, loadProgram)
-- Emulate an 8080 Intel processor for the following usage:
-- 1. Write program as a list of bytes (yes, pure machine code
-- programming!)
-- 2. Create a computer (CPU & Memory)
-- 3. Load program into memory
-- 4. Run computer emulation until a HLT instruction is reached
-- 5. Inspect final state of computer
main :: IO ()
main = do
let program = [0x3E, 0x9B, 0x27, 0x76]
computer = loadProgram 0x0000 program $ mkComputer 0x01FF
result = runComputer computer
print result
|
cbaatz/hamu8080
|
src/Main.hs
|
mit
| 583 | 0 | 11 | 113 | 98 | 55 | 43 | 8 | 1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE DeriveFunctor #-}
--
-- Play around with adjunctions and monads
--
-- Functor composition is a functor
newtype FComp f g x = FComp { unFComp :: f (g x) }
instance (Functor f, Functor g) => Functor (FComp f g) where
fmap f = FComp . (fmap . fmap) f . unFComp
-- We define adjunction as follows
class (Functor l, Functor r) => Adjunction l r where
ladj :: forall a b. (l a -> b) -> (a -> r b)
radj :: forall a b. (a -> r b) -> (l a -> b)
unit :: forall a. a -> r (l a)
counit :: forall a. l (r a) -> a
unit = ladj id
counit = radj id
ladj f = fmap f . unit
radj f = counit . fmap f
-- An adjunction gives rise to an applicative
instance (Adjunction l r) => Applicative (FComp r l) where
pure = FComp . unit
-- (<*>) :: r (l (a -> b)) -> r (l a) -> r (l b)
rlab <*> rla = FComp rlb
where rlb = fmap counit $ unFComp $ fmap (\f -> unFComp $ fmap f rla) rlab
-- An adjuction gives rise to a monad
instance (Adjunction l r) => Monad (FComp r l) where
-- (>>=) :: rl a -> (a -> rl b) -> rl b
rla >>= arlb = FComp $ fmap counit $ unFComp $ fmap (unFComp . arlb) rla
-- An adjunction gives rise to a comonad
class Comonad w where
extract :: w a -> a
duplicate :: w a -> w (w a)
instance (Adjunction l r) => Comonad (FComp l r) where
extract = counit . unFComp
duplicate lra = fmap FComp $ FComp $ fmap unit $ unFComp lra
-- Typical example of an adjunction
--
-- (s, -) -| (s -> -)
--
instance Adjunction ((,) s) ((->) s) where
ladj :: ((s, a) -> b) -> (a -> s -> b)
ladj = flip . curry
radj :: (a -> s -> b) -> ((s, a) -> b)
radj = uncurry . flip
-- The adjunction gives rise to State Monad and Store Comonad
data State s a = State { runState :: s -> (s, a) }
deriving Functor
data Store s a = Store s (s -> a)
deriving Functor
-- few conversion functions
stateAdj :: State s a -> FComp ((->) s) ((,) s) a
stateAdj (State ssa) = FComp ssa
adjState :: FComp ((->) s) ((,) s) a -> State s a
adjState (FComp ssa) = State ssa
storeAdj :: Store s a -> FComp ((,) s) ((->) s) a
storeAdj (Store s sa) = FComp (s, sa)
adjStore :: FComp ((,) s) ((->) s) a -> Store s a
adjStore (FComp (s, sa)) = Store s sa
-- then we get the monad and comonad for free
instance Applicative (State s) where
pure = adjState . pure
f <*> a = adjState (stateAdj f <*> stateAdj a)
instance Monad (State s) where
a >>= f = adjState (stateAdj a >>= (stateAdj . f))
instance Comonad (Store s) where
extract = extract . storeAdj
duplicate = adjStore . fmap adjStore . duplicate . storeAdj
main = putStrLn "type checks!"
|
shouya/thinking-dumps
|
cat-code/Adjunction.hs
|
mit
| 2,688 | 0 | 13 | 660 | 1,105 | 582 | 523 | 54 | 1 |
module Day13Spec (spec) where
import Day13
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "stepsToReach" $ do
it "takes 11 steps to reach (7, 4) with seed 10" $ do
stepsToReach 10 (7, 4) `shouldBe` 11
describe "day13" $ do
it "takes 86 steps to reach (31, 39) with seed 1364" $ do
day13 1364 `shouldBe` 86
describe "numLocationsReached" $ do
it "can reach 5 locations in 2 steps with seed 10" $ do
numLocationsReached 10 2 `shouldBe` 5
describe "day13'" $ do
it "can reach 127 locations in 50 steps with seed 1364" $ do
day13' 1364 `shouldBe` 127
|
brianshourd/adventOfCode2016
|
test/Day13Spec.hs
|
mit
| 681 | 0 | 15 | 208 | 183 | 88 | 95 | 19 | 1 |
module ChatCore.Util.Parsec where
import Control.Applicative ((<$>))
import qualified Data.Text as T
import Text.Parsec
import Text.Parsec.ByteString
-- | Reads characters into a Text until the given character is read.
charsUntil :: Char -> Parser T.Text
charsUntil stop = T.pack <$> charsUntilStr stop
-- | Reads characters into a String until the given character is read.
charsUntilStr :: Char -> Parser String
charsUntilStr stop = segChars
where
segChars = do
-- Read a character.
c <- anyChar
if c == stop
-- If it's our ending char, we're done.
then return []
-- Otherwise, parse the rest of the segment.
else (c:) <$> (segChars <|> (eof >> return []))
|
Forkk/ChatCore
|
ChatCore/Util/Parsec.hs
|
mit
| 738 | 0 | 16 | 187 | 155 | 88 | 67 | 14 | 2 |
-- | Data.TSTP.BinOp module
{-# LANGUAGE UnicodeSyntax #-}
module Data.TSTP.BinOp where
-- | Binary formula connectives.
data BinOp = (:<=>:) -- ^ ↔ /Equivalence/
| (:=>:) -- ^ → /Implication/
| (:<=:) -- ^ ← /Reverse Implication/
| (:&:) -- ^ ∧ /AND/
| (:|:) -- ^ ∨ /OR/
| (:~&:) -- ^ ⊼ /NAND/
| (:~|:) -- ^ ⊽ /NOR/
| (:<~>:) -- ^ ⊕ /XOR/
deriving (Eq, Ord, Read)
instance Show BinOp where
show (:<=>:) = "↔"
show (:=>:) = "→"
show (:<=:) = "←"
show (:&:) = "∧"
show (:|:) = "∨"
show (:~&:) = "⊼"
show (:~|:) = "⊽"
show (:<~>:) = "⊕"
|
agomezl/tstp2agda
|
src/Data/TSTP/BinOp.hs
|
mit
| 709 | 16 | 6 | 248 | 170 | 110 | 60 | 20 | 0 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ViewPatterns #-}
module Data.FingerTree.Internal
( module Data.Foldable
, module Data.Monoid
, Elem (..)
, Prio (..)
, Key (..)
, Measured (..)
, Deque (..)
, foldMapDeque
, Unfoldable (..)
, MinView (..)
, MaxView (..)
) where
import Prelude hiding (any)
import Control.Arrow
import Data.Foldable
import Data.Maybe
import Data.Monoid
newtype Elem a = Elem { getElem :: a }
deriving (Eq, Ord)
data Prio a = MInf | Prio a
deriving (Eq, Ord)
instance Ord a => Monoid (Prio a) where
mempty = MInf
MInf `mappend` p = p
p `mappend` MInf = p
Prio m `mappend` Prio n = Prio (max m n)
data Key a = NoKey | Key a
deriving (Eq, Ord)
instance Monoid (Key a) where
mempty = NoKey
k `mappend` NoKey = k
_ `mappend` k = k
class Monoid v => Measured a v where
measure :: a -> v
instance (Measured (Elem a) v1, Measured (Elem a) v2) => Measured (Elem a) (v1, v2) where
measure = measure &&& measure
instance Ord a => Measured (Elem a) (Prio a) where
measure (Elem x) = Prio x
instance Measured (Elem a) (Key a) where
measure (Elem x) = Key x
class Foldable t => Deque t a where
infixr 5 <|
(<|) :: a -> t a -> t a
infixl 5 |>
(|>) :: t a -> a -> t a
viewL :: Monad m => t a -> m (a, t a)
viewR :: Monad m => t a -> m (t a, a)
headL :: t a -> a
tailL :: t a -> t a
headR :: t a -> a
tailR :: t a -> t a
viewL (null -> True) = fail ""
viewL xs = return (headL xs, tailL xs)
viewR (null -> True) = fail ""
viewR xs = return (tailR xs, headR xs)
headL = fst . fromJust . viewL
tailL = snd . fromJust . viewL
headR = snd . fromJust . viewR
tailR = fst . fromJust . viewR
{-# MINIMAL (<|), (|>), (viewL | headL, tailL), (viewR | headR, tailR) #-}
foldMapDeque :: (Deque t a, Monoid m) => (a -> m) -> t a -> m
foldMapDeque f (null -> True) = mempty
foldMapDeque f xs = f (headL xs) <> foldMapDeque f (tailL xs)
class Unfoldable t a where
insert :: a -> t a -> t a
class Ord a => MinView t a where
minView :: Monad m => t a -> m (a, t a)
minElem :: t a -> a
deleteMin :: t a -> t a
minElem = fst . fromJust . minView
deleteMin = snd . fromJust . minView
{-# MINIMAL minView #-}
class Ord a => MaxView t a where
maxView :: Monad m => t a -> m (t a, a)
maxElem :: t a -> a
deleteMax :: t a -> t a
maxElem = snd . fromJust . maxView
deleteMax = fst . fromJust . maxView
{-# MINIMAL maxView #-}
|
meimisaki/FingerTree
|
Data/FingerTree/Internal.hs
|
mit
| 2,555 | 0 | 11 | 713 | 1,134 | 596 | 538 | 82 | 1 |
{-# LANGUAGE RecordWildCards #-}
module Hogldev.Texture (
Texture(..)
, textureBind
, textureLoad
) where
import Graphics.Rendering.OpenGL hiding (Texture)
import Graphics.GLUtil (readTexture)
data Texture = Texture
{ textureObject :: !TextureObject
, textureTarget :: !TextureTarget2D
} deriving Show
textureBind :: Texture -> TextureUnit -> IO ()
textureBind Texture{..} textureUnit = do
activeTexture $= textureUnit
textureBinding textureTarget $= Just textureObject
textureLoad :: FilePath -> TextureTarget2D -> IO (Maybe Texture)
textureLoad fileName target = readTexture fileName >>= either fail bind
where
fail :: String -> IO (Maybe Texture)
fail msg = do
putStrLn ("Error loading texture '" ++ fileName ++ "': " ++ msg)
return Nothing
bind :: TextureObject -> IO (Maybe Texture)
bind object = do
textureBinding target $= Just object
textureFilter target $= ((Linear', Nothing), Linear')
return (Just (Texture object target))
|
triplepointfive/hogldev
|
common/Hogldev/Texture.hs
|
mit
| 1,078 | 0 | 13 | 273 | 304 | 155 | 149 | 30 | 1 |
module Main where
import Control.Monad (void)
import Options.Applicative ((<>))
import qualified Options.Applicative as O
import Language.Janus.AST
import Language.Janus.Interp
import Language.Janus.Parser
import Language.Janus.Stdlib
data Arguments = Arguments String
deriving (Show)
main :: IO ()
main = O.execParser opts >>= runApp
where
opts = O.info (O.helper <*> argdefs) O.fullDesc
argdefs = Arguments
<$> O.strArgument (O.metavar "FILE" <> O.help "source file")
runApp :: Arguments -> IO ()
runApp (Arguments filename) = do
source <- readFile filename
case parseProgram filename source of
Left err -> print err
Right ast -> run' ast >>= either print (\_ -> return ())
where run' ast = runInterpM $ do { importStdlib; eval ast }
|
mkaput/janus
|
runner/Main.hs
|
mit
| 854 | 0 | 15 | 225 | 276 | 145 | 131 | 22 | 2 |
{-# OPTIONS_GHC -F -pgmF htfpp #-}
module InternalTests.GraphTests where
import GraphDB.Util.Prelude
import Test.Framework hiding (frequency, oneof, listOf, elements, choose)
import Test.QuickCheck.Monadic
import Test.QuickCheck.Instances ()
import QuickCheck.GenT
import GraphDB.Graph
import qualified GraphDB.Util.DIOVector as V
import qualified CerealPlus.Serialize as CS
import qualified CerealPlus.Deserialize as CD
-- * Model
-------------------------
type Catalogue = ()
type Name = Text
type UID = Int
instance Setup Catalogue where
type Algorithm Catalogue = Basic
data Index Catalogue =
Catalogue_Artist_UID UID |
Catalogue_Artist_Name Name |
Catalogue_Genre_Name Name |
Catalogue_Song_Name Name |
Genre_Song |
Song_Artist
deriving (Eq, Generic)
data Value Catalogue =
Catalogue Catalogue |
Artist UID Name |
Genre Name |
Song Name
deriving (Eq, Generic)
indexes to from = case (to, from) of
(Artist uid n, Catalogue _) -> [Catalogue_Artist_Name n, Catalogue_Artist_UID uid]
(Artist uid n, Song _) -> [Song_Artist]
(Genre n, Catalogue _) -> [Catalogue_Genre_Name n]
(Song n, Catalogue _) -> [Catalogue_Song_Name n]
(Song n, Genre _) -> [Genre_Song]
_ -> []
instance Serializable m (Index Catalogue)
instance Serializable m (Value Catalogue)
instance Hashable (Index Catalogue)
instance Hashable (Value Catalogue)
-- * QuickCheck
-------------------------
newtype Update = Update (ReaderT (Node Catalogue) IO ())
instance Show Update where
show _ = "<Update>"
instance Arbitrary Update where
arbitrary = fmap Update $ runGenT $ frequency
[
(50, addSomeEdge),
(30, removeSomeEdge),
(40, insertArtist),
(20, insertGenre),
(200, insertSong)
]
where
removeSomeEdge = do
root <- lift $ ask
void $ runMaybeT $ do
source <- MaybeT $ oneof [selectSomeNode, return $ Just root]
target <- MaybeT $ selectSomeNode
liftIO $ removeTarget source target
addSomeEdge = do
void $ runMaybeT $ do
source <- MaybeT selectSomeNode
target <- MaybeT selectSomeNode
liftIO $ addTarget source target
selectSomeNode = do
root <- lift $ ask
targets <- liftIO $ V.new
liftIO $ traverseTargets root $ void . V.append targets
size <- liftIO $ V.size targets
if size > 0
then do
index <- liftGen $ choose (0, size - 1)
fmap Just $ liftIO $ V.unsafeLookup targets index
else
return Nothing
insertArtist = addValueToCatalogue =<< do liftGen $ Artist <$> arbitrary <*> arbitrary
insertGenre = addValueToCatalogue =<< do liftGen $ Genre <$> arbitrary
insertSong = addValueToCatalogue =<< do liftGen $ Song <$> arbitrary
addValueToCatalogue value = do
source <- lift $ ask
target <- liftIO . new $ value
liftIO $ addTarget source target
-- * Tests
-------------------------
test_remove = do
catalogue <- new $ Catalogue ()
michael <- new $ Artist 1 "Michael Jackson"
billieJean <- new $ Song "Billie Jean"
whoIsIt <- new $ Song "Who is it?"
addTarget catalogue michael
addTarget catalogue billieJean
addTarget catalogue whoIsIt
addTarget billieJean michael
addTarget whoIsIt michael
remove michael
assertEqual (3, 2, 2) =<< getStats catalogue
assertEqual 0 . length =<< getSources michael
test_stats = do
catalogue <- new $ Catalogue ()
michael <- new $ Artist 1 "Michael Jackson"
billieJean <- new $ Song "Billie Jean"
whoIsIt <- new $ Song "Who is it?"
addTarget catalogue michael
addTarget catalogue billieJean
addTarget catalogue whoIsIt
addTarget billieJean michael
addTarget whoIsIt michael
assertEqual (4, 5, 6) =<< getStats catalogue
test_addingANodeAffectsTheStats = do
root <- new $ Catalogue ()
addTarget root =<< do new $ Artist 1 "Michael Jackson"
assertEqual (2, 1, 2) =<< getStats root
test_removingANodeAffectsTheStats = do
root <- new $ Catalogue ()
artist <- new $ Artist 1 "Michael Jackson"
addTarget root artist
removeTarget root artist
assertEqual (1, 0, 0) =<< getStats root
test_addingATargetTwiceMakesNoDifference = do
root <- new $ Catalogue ()
artist <- new $ Artist 1 "Michael Jackson"
addTarget root artist
addTarget root artist
assertEqual (2, 1, 2) =<< getStats root
test_traverseTargetsDoesNotRepeat = do
root <- new $ Catalogue ()
addTarget root =<< do new $ Artist 1 "Michael Jackson"
counter <- newIORef 0
traverseTargets root $ const $ modifyIORef counter succ
assertEqual 1 =<< readIORef counter
test_traverseSourcesDoesNotRepeat = do
root <- new $ Catalogue ()
artist <- new $ Artist 1 "Michael Jackson"
addTarget root artist
counter <- newIORef 0
traverseSources artist $ const $ modifyIORef counter succ
assertEqual 1 =<< readIORef counter
prop_serializeDeserializePreservesStats = monadicIO $ do
node <- do
updates :: [Update] <- pick $ do
amount <- choose (0, 100)
replicateM amount arbitrary
run $ do
root <- new $ Catalogue ()
forM_ updates $ \(Update u) -> runReaderT u root
return root
stats <- run $ getStats node
run $ traceIO $ "Stats: " <> show stats
bs <- run $ CS.exec $ serialize $ node
CD.Done (node' :: Node Catalogue) _ <- run $ CD.runPartial deserialize bs
bs' <- run $ CS.exec $ serialize $ node'
stats' <- run $ getStats node'
assert $ stats == stats'
|
nikita-volkov/graph-db
|
executables/InternalTests/GraphTests.hs
|
mit
| 5,562 | 0 | 18 | 1,340 | 1,816 | 878 | 938 | -1 | -1 |
{- |
Boards are what you see when you play the game. A board can do anything a
vector can do since it's just a collection of them. In the game, you can
shift the board in four different directions. To achieve that, rotate the
board first, then shift it.
-}
module Threase.Board
( Board (..)
, canMove
, canShift
, isOver
, move
, render
, rotate
, rotateTo
, rotations
, score
, shift
, shiftWith
) where
import Data.List (transpose)
import Threase.Direction (Direction (..))
import qualified Threase.Tile as T
import qualified Threase.Vector as V
{- $setup
>>> :{
let board = Board
[ V.Vector [Nothing, Just (T.Tile 3)]
, V.Vector [Just (T.Tile 1), Just (T.Tile 2)]
]
:}
-}
{- |
An entire game board. This is just a list of vectors. It's implied, but not
enforced, that the board has the same number of rows and columns. In other
words, it should be square.
>>> board -- Used in examples but annoying to type.
Board {vectors = [Vector {tiles = [Nothing,Just (Tile {number = 3})]},Vector {tiles = [Just (Tile {number = 1}),Just (Tile {number = 2})]}]}
-}
data Board = Board
{ vectors :: [V.Vector] -- ^ The board's vectors.
} deriving (Eq, Show)
{- |
Determines if a board can be moved in a direction.
>>> board `canMove` East
True
-}
canMove :: Board -> Direction -> Bool
canMove b d = canShift (b `rotateTo` d)
{- |
Determines if a board can be shifted.
>>> canShift board
True
-}
canShift :: Board -> Bool
canShift = any V.canShift . vectors
{- |
Determines if the board can be shifted in any direction.
>>> isOver board
False
-}
isOver :: Board -> Bool
isOver = not . any canShift . rotations
{- |
Moves a board in a direction. This is how the user interacts with the
board. For instance, swiping to the left is the same as moving to the west.
>>> render (move board East)
"-\t3\n-\t3\n"
-}
move :: Board -> Direction -> Board
move b d = rotateTo (shift (rotateTo b d)) d'
where
d' = toEnum (fromEnum (maxBound :: Direction) + 1 - fromEnum d)
{- |
Renders a board.
>>> render board
"-\t3\n1\t2\n"
-}
render :: Board -> String
render = unlines . fmap V.render . vectors
{- |
Rotates a board 90 degrees clockwise.
>>> render (rotate board)
"1\t-\n2\t3\n"
-}
rotate :: Board -> Board
rotate = fromLists . fmap reverse . transpose . toLists
where
toLists = fmap V.tiles . vectors
fromLists = Board . fmap V.Vector
{- |
Rotates a board to a direction. After rotating, left will correspond to the
given direction. (By default, left is West.)
>>> render (board `rotateTo` East)
"2\t1\n3\t-\n"
-}
rotateTo :: Board -> Direction -> Board
rotateTo b d = rotations b !! fromEnum d
{- |
Generates rotated boards from a board. This is done by iteration 'rotate',
so the boards are rotated clockwise.
>>> map render (rotations board)
["-\t3\n1\t2\n","1\t-\n2\t3\n","2\t1\n3\t-\n","3\t2\n-\t1\n"]
-}
rotations :: Board -> [Board]
rotations = take n . iterate rotate
where
n = 1 + fromEnum (maxBound :: Direction)
{- |
Calculates the score of a board, which is the sum of the scores of its
vectors.
>>> score board
3
-}
score :: Board -> Int
score = sum . fmap V.score . vectors
{- |
Shifts all the vectors in a board.
>>> render (shift board)
"3\t-\n3\t-\n"
-}
shift :: Board -> Board
shift = Board . fmap V.shift . vectors
{- |
Shifts all the vectors in a board and inserts a new tile on the trailing
edge at the given position.
>>> render (shiftWith board (T.Tile 1) 0)
"3\t1\n3\t-\n"
-}
shiftWith :: Board -> T.Tile -> Int -> Board
shiftWith b t n = Board vs'
where
vs' = fmap f vs
f (i, v) = if i == n
then V.shiftWith v t
else V.shift v
vs = zip [0 ..] (vectors b)
|
tfausak/threase
|
library/Threase/Board.hs
|
mit
| 3,973 | 0 | 12 | 1,075 | 616 | 341 | 275 | 51 | 2 |
{-
- Types.hs
- By Steven Smith
-}
-- | This module contains the AutoStorable types. They are essentially
-- special-purpose product and sum types. If you can make your data type into
-- some combination of these types and each component is an instance of
-- 'Storable', then you can use the AutoStorable functions to generate a
-- 'Storable' instance for your data type.
module Foreign.AutoStorable.Types (
T1 (..),
T2 (..),
T3 (..),
T4 (..),
T5 (..),
T6 (..),
T7 (..),
T8 (..),
S2 (..),
S3 (..),
S4 (..),
S5 (..),
S6 (..),
S7 (..),
S8 (..)
) where
import Control.Applicative
import Control.Arrow ((>>>))
import Foreign.Storable
import Foreign.AutoStorable.Utils
-- | A 1-element tuple. I don't know of an actual use for this, but it is
-- included for completeness.
data T1 a = T1 a
deriving (Show, Eq)
-- | A 2-element tuple
data T2 a b = T2 a b
deriving (Show, Eq)
-- | A 3-element tuple
data T3 a b c = T3 a b c
deriving (Show, Eq)
-- | A 4-element tuple
data T4 a b c d = T4 a b c d
deriving (Show, Eq)
-- | A 5-element tuple
data T5 a b c d e = T5 a b c d e
deriving (Show, Eq)
-- | A 6-element tuple
data T6 a b c d e f = T6 a b c d e f
deriving (Show, Eq)
-- | A 7-element tuple
data T7 a b c d e f g = T7 a b c d e f g
deriving (Show, Eq)
-- | An 8-element tuple
data T8 a b c d e f g h = T8 a b c d e f g h
deriving (Show, Eq)
-- No S1 because that would just be T1
-- | A 2-element sum type
data S2 a b = S2a a | S2b b
deriving (Show, Eq)
-- | A 3-element sum type
data S3 a b c = S3a a | S3b b | S3c c
deriving (Show, Eq)
-- | A 4-element sum type
data S4 a b c d = S4a a | S4b b | S4c c | S4d d
deriving (Show, Eq)
-- | A 5-element sum type
data S5 a b c d e = S5a a | S5b b | S5c c | S5d d | S5e e
deriving (Show, Eq)
-- | A 6-element sum type
data S6 a b c d e f = S6a a | S6b b | S6c c | S6d d | S6e e | S6f f
deriving (Show, Eq)
-- | A 7-element sum type
data S7 a b c d e f g = S7a a | S7b b | S7c c | S7d d | S7e e | S7f f | S7g g
deriving (Show, Eq)
-- | An 8-element sum type
data S8 a b c d e f g h =
S8a a | S8b b | S8c c | S8d d | S8e e | S8f f | S8g g | S8h h
deriving (Show, Eq)
split2 :: S2 a b -> T2 a b
split2 = undefined
split3 :: S3 a b c -> T3 a b c
split3 = undefined
split4 :: S4 a b c d -> T4 a b c d
split4 = undefined
split5 :: S5 a b c d e -> T5 a b c d e
split5 = undefined
split6 :: S6 a b c d e f -> T6 a b c d e f
split6 = undefined
split7 :: S7 a b c d e f g -> T7 a b c d e f g
split7 = undefined
split8 :: S8 a b c d e f g h -> T8 a b c d e f g h
split8 = undefined
instance Storable a => Storable (T1 a) where
sizeOf ~(T1 a) = sizeOf a
alignment ~(T1 a) = alignment a
peek ptr = runPeek ptr $ T1
<$> peekShift a
where
~(T1 a) = unwrap ptr
poke ptr (T1 a) = runPoke ptr $ do
pokeShift a
instance (Storable a, Storable b) => Storable (T2 a b) where
sizeOf ~t@(T2 a b) = fixPadding t . sapp2 sizePadded (>>>) a b $ 0
alignment ~(T2 a b) = sapp2 alignment max a b
peek ptr = runPeek ptr $ T2
<$> peekShift a
<*> peekShift b
where
~(T2 a b) = unwrap ptr
poke ptr (T2 a b) = runPoke ptr $ do
pokeShift a
pokeShift b
instance (Storable a, Storable b, Storable c) => Storable (T3 a b c) where
sizeOf ~t@(T3 a b c) = fixPadding t . sapp3 sizePadded (>>>) a b c $ 0
alignment ~(T3 a b c) = sapp3 alignment max a b c
peek ptr = runPeek ptr $ T3
<$> peekShift a
<*> peekShift b
<*> peekShift c
where
~(T3 a b c) = unwrap ptr
poke ptr (T3 a b c) = runPoke ptr $ do
pokeShift a
pokeShift b
pokeShift c
instance (Storable a, Storable b, Storable c, Storable d)
=> Storable (T4 a b c d) where
sizeOf ~t@(T4 a b c d) = fixPadding t . sapp4 sizePadded (>>>) a b c d $ 0
alignment ~(T4 a b c d) = sapp4 alignment max a b c d
peek ptr = runPeek ptr $ T4
<$> peekShift a
<*> peekShift b
<*> peekShift c
<*> peekShift d
where
~(T4 a b c d) = unwrap ptr
poke ptr (T4 a b c d) = runPoke ptr $ do
pokeShift a
pokeShift b
pokeShift c
pokeShift d
instance (Storable a, Storable b, Storable c, Storable d, Storable e)
=> Storable (T5 a b c d e) where
sizeOf ~t@(T5 a b c d e) =
fixPadding t . sapp5 sizePadded (>>>) a b c d e $ 0
alignment ~(T5 a b c d e) = sapp5 alignment max a b c d e
peek ptr = runPeek ptr $ T5
<$> peekShift a
<*> peekShift b
<*> peekShift c
<*> peekShift d
<*> peekShift e
where
~(T5 a b c d e) = unwrap ptr
poke ptr (T5 a b c d e) = runPoke ptr $ do
pokeShift a
pokeShift b
pokeShift c
pokeShift d
pokeShift e
instance
(Storable a, Storable b, Storable c, Storable d, Storable e, Storable f)
=> Storable (T6 a b c d e f) where
sizeOf ~t@(T6 a b c d e f) =
fixPadding t . sapp6 sizePadded (>>>) a b c d e f $ 0
alignment ~(T6 a b c d e f) = sapp6 alignment max a b c d e f
peek ptr = runPeek ptr $ T6
<$> peekShift a
<*> peekShift b
<*> peekShift c
<*> peekShift d
<*> peekShift e
<*> peekShift f
where
~(T6 a b c d e f) = unwrap ptr
poke ptr (T6 a b c d e f) = runPoke ptr $ do
pokeShift a
pokeShift b
pokeShift c
pokeShift d
pokeShift e
pokeShift f
instance
(Storable a, Storable b, Storable c, Storable d, Storable e, Storable f,
Storable g)
=> Storable (T7 a b c d e f g) where
sizeOf ~t@(T7 a b c d e f g) =
fixPadding t . sapp7 sizePadded (>>>) a b c d e f g $ 0
alignment ~(T7 a b c d e f g) = sapp7 alignment max a b c d e f g
peek ptr = runPeek ptr $ T7
<$> peekShift a
<*> peekShift b
<*> peekShift c
<*> peekShift d
<*> peekShift e
<*> peekShift f
<*> peekShift g
where
~(T7 a b c d e f g) = unwrap ptr
poke ptr (T7 a b c d e f g) = runPoke ptr $ do
pokeShift a
pokeShift b
pokeShift c
pokeShift d
pokeShift e
pokeShift f
pokeShift g
instance
(Storable a, Storable b, Storable c, Storable d, Storable e, Storable f,
Storable g, Storable h)
=> Storable (T8 a b c d e f g h) where
sizeOf ~t@(T8 a b c d e f g h) =
fixPadding t . sapp8 sizePadded (>>>) a b c d e f g h $ 0
alignment ~(T8 a b c d e f g h) = sapp8 alignment max a b c d e f g h
peek ptr = runPeek ptr $ T8
<$> peekShift a
<*> peekShift b
<*> peekShift c
<*> peekShift d
<*> peekShift e
<*> peekShift f
<*> peekShift g
<*> peekShift h
where
~(T8 a b c d e f g h) = unwrap ptr
poke ptr (T8 a b c d e f g h) = runPoke ptr $ do
pokeShift a
pokeShift b
pokeShift c
pokeShift d
pokeShift e
pokeShift f
pokeShift g
pokeShift h
instance (Storable a, Storable b) => Storable (S2 a b) where
sizeOf s = fixPadding s $ sapp2 sizeTagged max a b
where
~(T2 a b) = split2 s
alignment s = sapp2 alignment max a b
where
~(T2 a b) = split2 s
peek ptr = runPeek ptr $ do
tag <- peekTag tagOffset
case tag of
0 -> S2a <$> peekShift a
_ -> S2b <$> peekShift b
where
~(T2 a b) = split2 (unwrap ptr)
tagOffset = sapp2 sizeOf max a b
poke ptr s = case s of
S2a a -> runPokeTagged ptr 0 tagOffset a
S2b b -> runPokeTagged ptr 1 tagOffset b
where
~(T2 a' b') = split2 (unwrap ptr)
tagOffset = sapp2 sizeOf max a' b'
instance (Storable a, Storable b, Storable c) => Storable (S3 a b c) where
sizeOf s = fixPadding s $ sapp3 sizeTagged max a b c
where
~(T3 a b c) = split3 s
alignment s = sapp3 alignment max a b c
where
~(T3 a b c) = split3 s
peek ptr = runPeek ptr $ do
tag <- peekTag tagOffset
case tag of
0 -> S3a <$> peekShift a
1 -> S3b <$> peekShift b
_ -> S3c <$> peekShift c
where
~(T3 a b c) = split3 (unwrap ptr)
tagOffset = sapp3 sizeOf max a b c
poke ptr s = case s of
S3a a -> runPokeTagged ptr 0 tagOffset a
S3b b -> runPokeTagged ptr 1 tagOffset b
S3c c -> runPokeTagged ptr 2 tagOffset c
where
~(T3 a' b' c') = split3 (unwrap ptr)
tagOffset = sapp3 sizeOf max a' b' c'
instance (Storable a, Storable b, Storable c, Storable d)
=> Storable (S4 a b c d) where
sizeOf s = fixPadding s $ sapp4 sizeTagged max a b c d
where
~(T4 a b c d) = split4 s
alignment s = sapp4 alignment max a b c d
where
~(T4 a b c d) = split4 s
peek ptr = runPeek ptr $ do
tag <- peekTag tagOffset
case tag of
0 -> S4a <$> peekShift a
1 -> S4b <$> peekShift b
2 -> S4c <$> peekShift c
_ -> S4d <$> peekShift d
where
~(T4 a b c d) = split4 (unwrap ptr)
tagOffset = sapp4 sizeOf max a b c d
poke ptr s = case s of
S4a a -> runPokeTagged ptr 0 tagOffset a
S4b b -> runPokeTagged ptr 1 tagOffset b
S4c c -> runPokeTagged ptr 2 tagOffset c
S4d d -> runPokeTagged ptr 3 tagOffset d
where
~(T4 a' b' c' d') = split4 (unwrap ptr)
tagOffset = sapp4 sizeOf max a' b' c' d'
instance (Storable a, Storable b, Storable c, Storable d, Storable e)
=> Storable (S5 a b c d e) where
sizeOf s = fixPadding s $ sapp5 sizeTagged max a b c d e
where
~(T5 a b c d e) = split5 s
alignment s = sapp5 alignment max a b c d e
where
~(T5 a b c d e) = split5 s
peek ptr = runPeek ptr $ do
tag <- peekTag tagOffset
case tag of
0 -> S5a <$> peekShift a
1 -> S5b <$> peekShift b
2 -> S5c <$> peekShift c
3 -> S5d <$> peekShift d
_ -> S5e <$> peekShift e
where
~(T5 a b c d e) = split5 (unwrap ptr)
tagOffset = sapp5 sizeOf max a b c d e
poke ptr s = case s of
S5a a -> runPokeTagged ptr 0 tagOffset a
S5b b -> runPokeTagged ptr 1 tagOffset b
S5c c -> runPokeTagged ptr 2 tagOffset c
S5d d -> runPokeTagged ptr 3 tagOffset d
S5e e -> runPokeTagged ptr 4 tagOffset e
where
~(T5 a' b' c' d' e') = split5 (unwrap ptr)
tagOffset = sapp5 sizeOf max a' b' c' d' e'
instance
(Storable a, Storable b, Storable c, Storable d, Storable e, Storable f)
=> Storable (S6 a b c d e f) where
sizeOf s = fixPadding s $ sapp6 sizeTagged max a b c d e f
where
~(T6 a b c d e f) = split6 s
alignment s = sapp6 alignment max a b c d e f
where
~(T6 a b c d e f) = split6 s
peek ptr = runPeek ptr $ do
tag <- peekTag tagOffset
case tag of
0 -> S6a <$> peekShift a
1 -> S6b <$> peekShift b
2 -> S6c <$> peekShift c
3 -> S6d <$> peekShift d
4 -> S6e <$> peekShift e
_ -> S6f <$> peekShift f
where
~(T6 a b c d e f) = split6 (unwrap ptr)
tagOffset = sapp6 sizeOf max a b c d e f
poke ptr s = case s of
S6a a -> runPokeTagged ptr 0 tagOffset a
S6b b -> runPokeTagged ptr 1 tagOffset b
S6c c -> runPokeTagged ptr 2 tagOffset c
S6d d -> runPokeTagged ptr 3 tagOffset d
S6e e -> runPokeTagged ptr 4 tagOffset e
S6f f -> runPokeTagged ptr 5 tagOffset f
where
~(T6 a' b' c' d' e' f') = split6 (unwrap ptr)
tagOffset = sapp6 sizeOf max a' b' c' d' e' f'
instance
(Storable a, Storable b, Storable c, Storable d, Storable e, Storable f,
Storable g)
=> Storable (S7 a b c d e f g) where
sizeOf s = fixPadding s $ sapp7 sizeTagged max a b c d e f g
where
~(T7 a b c d e f g) = split7 s
alignment s = sapp7 alignment max a b c d e f g
where
~(T7 a b c d e f g) = split7 s
peek ptr = runPeek ptr $ do
tag <- peekTag tagOffset
case tag of
0 -> S7a <$> peekShift a
1 -> S7b <$> peekShift b
2 -> S7c <$> peekShift c
3 -> S7d <$> peekShift d
4 -> S7e <$> peekShift e
5 -> S7f <$> peekShift f
_ -> S7g <$> peekShift g
where
~(T7 a b c d e f g) = split7 (unwrap ptr)
tagOffset = sapp7 sizeOf max a b c d e f g
poke ptr s = case s of
S7a a -> runPokeTagged ptr 0 tagOffset a
S7b b -> runPokeTagged ptr 1 tagOffset b
S7c c -> runPokeTagged ptr 2 tagOffset c
S7d d -> runPokeTagged ptr 3 tagOffset d
S7e e -> runPokeTagged ptr 4 tagOffset e
S7f f -> runPokeTagged ptr 5 tagOffset f
S7g g -> runPokeTagged ptr 6 tagOffset g
where
~(T7 a' b' c' d' e' f' g') = split7 (unwrap ptr)
tagOffset = sapp7 sizeOf max a' b' c' d' e' f' g'
instance
(Storable a, Storable b, Storable c, Storable d, Storable e, Storable f,
Storable g, Storable h)
=> Storable (S8 a b c d e f g h) where
sizeOf s = fixPadding s $ sapp8 sizeTagged max a b c d e f g h
where
~(T8 a b c d e f g h) = split8 s
alignment s = sapp8 alignment max a b c d e f g h
where
~(T8 a b c d e f g h) = split8 s
peek ptr = runPeek ptr $ do
tag <- peekTag tagOffset
case tag of
0 -> S8a <$> peekShift a
1 -> S8b <$> peekShift b
2 -> S8c <$> peekShift c
3 -> S8d <$> peekShift d
4 -> S8e <$> peekShift e
5 -> S8f <$> peekShift f
6 -> S8g <$> peekShift g
_ -> S8h <$> peekShift h
where
~(T8 a b c d e f g h) = split8 (unwrap ptr)
tagOffset = sapp8 sizeOf max a b c d e f g h
poke ptr s = case s of
S8a a -> runPokeTagged ptr 0 tagOffset a
S8b b -> runPokeTagged ptr 1 tagOffset b
S8c c -> runPokeTagged ptr 2 tagOffset c
S8d d -> runPokeTagged ptr 3 tagOffset d
S8e e -> runPokeTagged ptr 4 tagOffset e
S8f f -> runPokeTagged ptr 5 tagOffset f
S8g g -> runPokeTagged ptr 6 tagOffset g
S8h h -> runPokeTagged ptr 7 tagOffset h
where
~(T8 a' b' c' d' e' f' g' h') = split8 (unwrap ptr)
tagOffset = sapp8 sizeOf max a' b' c' d' e' f' g' h'
|
stevely/AutoStorable
|
src/Foreign/AutoStorable/Types.hs
|
mit
| 14,650 | 0 | 15 | 5,329 | 6,353 | 3,103 | 3,250 | 370 | 1 |
module Main where
import Parser
import Data.Maybe (fromMaybe)
inputParser :: Parser String
inputParser =
concat <$> (parseMany $
parseEither textParser repeaterParser)
inputParser' :: Parser Integer
inputParser' =
sum <$> (parseMany $
parseEither textParser' repeaterParser')
textParser :: Parser String
textParser = do
txt <- parseAlphas
if null txt
then failParse
else return txt
textParser' :: Parser Integer
textParser' = do
txt <- parseAlphas
if null txt
then failParse
else return $ fromIntegral (length txt)
repeaterParser :: Parser String
repeaterParser = do
parseChar (== '(')
n <- parseNumber
parseChar (== 'x')
t <- parseNumber
parseChar (== ')')
txt <- parseN parseAny n
return $ (concat $ replicate t txt)
repeaterParser' :: Parser Integer
repeaterParser' = do
parseChar (== '(')
n <- parseNumber
parseChar (== 'x')
t <- parseNumber
parseChar (== ')')
inner <- parseN parseAny n
let m = fromMaybe 0 $ eval inputParser' inner
return $ fromIntegral t * m
input :: IO String
input = readFile "input.txt"
main :: IO ()
main = do
text <- input
putStrLn $ "part 1: " ++ (show . length . concat $ eval inputParser text)
putStrLn $ "part 2: " ++ (show $ eval inputParser' text)
putStrLn "all done"
test :: String
test = "X(8x2)(3x3)ABCY"
|
CarstenKoenig/AdventOfCode2016
|
Day9/Main.hs
|
mit
| 1,338 | 0 | 11 | 297 | 459 | 225 | 234 | 52 | 2 |
{-# LANGUAGE MultiParamTypeClasses #-}
module Diffie_Hellman.Quiz
( make
, Param (..)
, Config (..)
)
where
import Diffie_Hellman.Param
import Diffie_Hellman.Config
import Diffie_Hellman.Break hiding ( make )
import Prime.Check
import Autolib.Util.Zufall
import Autolib.Util.Seed
import Inter.Types
import Inter.Quiz hiding ( make )
roll par = do
start <- randomRIO ( 10 ^ digits par , 10 ^ ( 1 + digits par ) )
let (pp, gg) = Prime.Check.next start
aa <- randomRIO ( 1, pp - 1 )
bb <- randomRIO ( 1, pp - 1 )
return $ Config
{ public = Public { p = pp
, g = gg
, g_a = powMod gg aa pp
, g_b = powMod gg bb pp
}
, private = Private { a = aa
, b = bb
, g_ab = powMod gg ( aa * bb ) pp
}
}
instance Generator Diffie_Hellman_Code_Break Param Config where
generator _ p key = roll p
instance Project Diffie_Hellman_Code_Break Config Config where
project _ = id
make :: Make
make = quiz Diffie_Hellman_Code_Break Diffie_Hellman.Param.example
|
Erdwolf/autotool-bonn
|
src/Diffie_Hellman/Quiz.hs
|
gpl-2.0
| 1,223 | 0 | 14 | 458 | 347 | 193 | 154 | 32 | 1 |
module Main where
import Game
import Loop
import Init
-- | Location of default game
exampleFile :: String
exampleFile = "games/example.exp"
-- | Opens a game file and either starts the game or reports an error.
main :: IO ()
main = do
putStrLn "What game file would you like to play?"
putStrLn "Games in the current directory:"
games <- listGames "."
mapM_ putStrLn games
putStrLn "Or just press enter to play the game at games/example.exp"
file <- getLine
f <- if file == "" then readFile exampleFile else readFile file
-- TODO: what if the file isn't there?
let game = initGame f
either print startGame game
|
emhoracek/explora
|
executable/Main.hs
|
gpl-2.0
| 654 | 0 | 10 | 152 | 131 | 63 | 68 | 17 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module Tic.GameState where
import Control.Lens (makeClassy)
import Tic.Level
import Tic.Location
import Tic.Score
import Tic.UnitType
import Wrench.Time
data GameState = GameState {
_gsCurrentLevel :: Level
, _gsTimerInited :: TimeTicks
, _gsLocationSequence :: [Location UnitType]
, _gsLevelScore :: Score
, _gsTotalScore :: Score
}
$(makeClassy ''GameState)
|
pmiddend/tic
|
lib/Tic/GameState.hs
|
gpl-3.0
| 491 | 0 | 10 | 146 | 99 | 59 | 40 | 15 | 0 |
import Development.Hake
import Development.Hake.FunSetIO
main = hake [
file [ "program" ] [ "main.o", "iodat.o", "dorun.o", "lo.o", "./usr/fred/lib/crtn.a" ] $
const $ rawSystemE [ "cc", "-o", "program", "main.o", "iodat.o", "dorun.o", "lo.o", "./usr/fred/lib/crtn.a" ]
,
file [ "main.o" ] [ "main.c" ] $ const $ rawSystemE [ "cc", "-c", "main.c" ]
,
file [ "iodat.o" ] [ "iodat.c" ] $ const $ rawSystemE [ "cc", "-c", "iodat.c" ]
,
file [ "dorun.o" ] [ "dorun.c" ] $ const $ rawSystemE [ "cc", "-c", "dorun.c" ]
,
file [ "lo.o" ] [ "lo.s" ] $ const $ rawSystemE [ "cc", "-c", "lo.s" ]
,
task "clean" $ rawSystemE [ "rm", "main.o", "iodat.o", "dorun.o", "lo.o", "program" ]
]
|
YoshikuniJujo/hake_haskell
|
examples/nutshell/chap1/1/hakeMainIO.hs
|
gpl-3.0
| 694 | 0 | 10 | 133 | 248 | 142 | 106 | 10 | 1 |
module Helpers where
import Data.List
import Data.Array
import System.Random
import qualified Data.Vector as V
partialSum :: Array Int Float -> Int -> [Int] -> Int -> Float
partialSum cosList q xs x =
let q' = q `div` 2
getIndex = \ind -> let ind' = ind `mod` q in if ind' > q' then abs (ind' - q) else ind'
getCosVal = \b -> cosList! (b*x `mod` q)
deep = length xs
-- sumList = map getCosVal xs `using` parList rdeepseq
in ( foldl' (\f s -> (+) f $ getCosVal s) 0 xs ) / convertInt deep
-- in ( sumList `seq` foldl' (+) 0 sumList ) / convertInt deep
partialSum' :: Array Int Float -> Int -> V.Vector Int -> Int -> Float
partialSum' cosList q xs x =
let q' = q `div` 2
getIndex = \ind -> let ind' = ind `mod` q in if ind' > q' then abs (ind' - q) else ind'
getCosVal = \b -> cosList! getIndex (b*x)
deep = V.length xs
in ( V.foldl' (\f s -> (+) f $ getCosVal s) 0 xs ) / convertInt deep
getMaxForAllX :: Array Int Float -> Int -> Float-> V.Vector Int -> Float
getMaxForAllX cosList q prevDelta xs =
let partSum = partialSum' cosList q
ring = [1..q `div` 2]
in foldl' (\f s -> if f < prevDelta then max f (abs $ partSum xs s) else 1.0) 0 ring
findBetter :: Array Int Float -> Int -> Float -> [V.Vector Int] -> (V.Vector Int, Float)
findBetter cosList q delta xs = foldl'
(\(prevLst, prevD) lst ->
let currD = getMaxForAllX cosList q prevD lst
in if currD > prevD then (prevLst, prevD) else (lst, currD))
(V.fromList [], delta)
xs
getLowerBound :: Int -> Float -> Int
getLowerBound q _ = round . log . convertInt $ q
getHightBound :: Int -> Float -> Int
getHightBound q delta = min q $ round ((2.0 * (log . convertInt $ (2 * q))) / delta / delta)
convertInt :: Int -> Float
convertInt x = fromInteger . toInteger $ x
comb :: Int -> Int -> Int
comb n m
| (n < 0) || (m < 0) || (n < m) = 0
| n < 2 * m = comb' n (n-m)
| otherwise = comb' n m
comb' :: Int -> Int -> Int
comb' _ 0 = 1
comb' n m = (comb' (n-1) (m-1)) * n `div` m
randomInts :: RandomGen g => Int -> Int -> g -> ([Int], g)
randomInts _ 0 gen = ([], gen)
randomInts q d gen =
let (x, genn) = randomR (0, q) $ gen
(xs, gennn) = randomInts q (d-1) genn
in (x:xs, gennn)
calcCosList :: Int -> Array Int Float
calcCosList q = array (0, q) [ (q', cos (2.0 * pi * (convertInt q') / (convertInt q) )) | q' <- [0..q] ]
gcdExt :: Int -> Int -> (Int, Int, Int)
gcdExt a 0 = (1, 0, a)
gcdExt a b = let (q, r) = a `quotRem` b
(s, t, g) = gcdExt b r
in (t, s - q * t, g)
modInv :: Int -> Int -> Maybe Int
modInv a m = let (i, _, g) = gcdExt a m
in if g == 1 then Just (mkPos i) else Nothing
where mkPos x = if x < 0 then x + m else x
|
GOGEN/HashingCollizion
|
src/Utils/Helpers.hs
|
gpl-3.0
| 2,782 | 53 | 15 | 776 | 1,436 | 761 | 675 | 62 | 3 |
--------------------------------------------------------------------------------
{- |
Module : Math.Util
Description : Utility functions for the matrix package.
License : GPL-3
Maintainer : [email protected]
Stability : Stable
Portability : Portable
-}
--------------------------------------------------------------------------------
module Math.Util where
{- | Function composition composition. Composes an arity 1 function with an
arity 2 function.
> dotProduct = V.sum .: (*)
Is equivalent to:
> dotProduct u v = V.sum (u * v)
-}
(.:) :: (b -> c) -> (a -> a1 -> b) -> a -> a1 -> c
(.:) = (.) . (.)
-- * Angle conversions
-- | Converts Radians to Degrees
toDegrees :: Floating a => a -> a
toDegrees rad = rad * (180 / pi)
-- | Converts Degrees to Radians
toRadians :: Floating a => a -> a
toRadians deg = deg * (pi / 180)
|
Jiggins/Matrix
|
Math/Util.hs
|
gpl-3.0
| 859 | 0 | 9 | 164 | 139 | 81 | 58 | 7 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AdSenseHost.Accounts.AdClients.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Get information about one of the ad clients in the specified
-- publisher\'s AdSense account.
--
-- /See:/ <https://developers.google.com/adsense/host/ AdSense Host API Reference> for @adsensehost.accounts.adclients.get@.
module Network.Google.Resource.AdSenseHost.Accounts.AdClients.Get
(
-- * REST Resource
AccountsAdClientsGetResource
-- * Creating a Request
, accountsAdClientsGet
, AccountsAdClientsGet
-- * Request Lenses
, aacgAdClientId
, aacgAccountId
) where
import Network.Google.AdSenseHost.Types
import Network.Google.Prelude
-- | A resource alias for @adsensehost.accounts.adclients.get@ method which the
-- 'AccountsAdClientsGet' request conforms to.
type AccountsAdClientsGetResource =
"adsensehost" :>
"v4.1" :>
"accounts" :>
Capture "accountId" Text :>
"adclients" :>
Capture "adClientId" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] AdClient
-- | Get information about one of the ad clients in the specified
-- publisher\'s AdSense account.
--
-- /See:/ 'accountsAdClientsGet' smart constructor.
data AccountsAdClientsGet =
AccountsAdClientsGet'
{ _aacgAdClientId :: !Text
, _aacgAccountId :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountsAdClientsGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aacgAdClientId'
--
-- * 'aacgAccountId'
accountsAdClientsGet
:: Text -- ^ 'aacgAdClientId'
-> Text -- ^ 'aacgAccountId'
-> AccountsAdClientsGet
accountsAdClientsGet pAacgAdClientId_ pAacgAccountId_ =
AccountsAdClientsGet'
{_aacgAdClientId = pAacgAdClientId_, _aacgAccountId = pAacgAccountId_}
-- | Ad client to get.
aacgAdClientId :: Lens' AccountsAdClientsGet Text
aacgAdClientId
= lens _aacgAdClientId
(\ s a -> s{_aacgAdClientId = a})
-- | Account which contains the ad client.
aacgAccountId :: Lens' AccountsAdClientsGet Text
aacgAccountId
= lens _aacgAccountId
(\ s a -> s{_aacgAccountId = a})
instance GoogleRequest AccountsAdClientsGet where
type Rs AccountsAdClientsGet = AdClient
type Scopes AccountsAdClientsGet =
'["https://www.googleapis.com/auth/adsensehost"]
requestClient AccountsAdClientsGet'{..}
= go _aacgAccountId _aacgAdClientId (Just AltJSON)
adSenseHostService
where go
= buildClient
(Proxy :: Proxy AccountsAdClientsGetResource)
mempty
|
brendanhay/gogol
|
gogol-adsense-host/gen/Network/Google/Resource/AdSenseHost/Accounts/AdClients/Get.hs
|
mpl-2.0
| 3,404 | 0 | 14 | 734 | 384 | 232 | 152 | 64 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AdExchangeBuyer2.Bidders.Accounts.FilterSets.FilteredBids.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- List all reasons for which bids were filtered, with the number of bids
-- filtered for each reason.
--
-- /See:/ <https://developers.google.com/authorized-buyers/apis/reference/rest/ Ad Exchange Buyer API II Reference> for @adexchangebuyer2.bidders.accounts.filterSets.filteredBids.list@.
module Network.Google.Resource.AdExchangeBuyer2.Bidders.Accounts.FilterSets.FilteredBids.List
(
-- * REST Resource
BiddersAccountsFilterSetsFilteredBidsListResource
-- * Creating a Request
, biddersAccountsFilterSetsFilteredBidsList
, BiddersAccountsFilterSetsFilteredBidsList
-- * Request Lenses
, bafsfblXgafv
, bafsfblUploadProtocol
, bafsfblFilterSetName
, bafsfblAccessToken
, bafsfblUploadType
, bafsfblPageToken
, bafsfblPageSize
, bafsfblCallback
) where
import Network.Google.AdExchangeBuyer2.Types
import Network.Google.Prelude
-- | A resource alias for @adexchangebuyer2.bidders.accounts.filterSets.filteredBids.list@ method which the
-- 'BiddersAccountsFilterSetsFilteredBidsList' request conforms to.
type BiddersAccountsFilterSetsFilteredBidsListResource
=
"v2beta1" :>
Capture "filterSetName" Text :>
"filteredBids" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListFilteredBidsResponse
-- | List all reasons for which bids were filtered, with the number of bids
-- filtered for each reason.
--
-- /See:/ 'biddersAccountsFilterSetsFilteredBidsList' smart constructor.
data BiddersAccountsFilterSetsFilteredBidsList =
BiddersAccountsFilterSetsFilteredBidsList'
{ _bafsfblXgafv :: !(Maybe Xgafv)
, _bafsfblUploadProtocol :: !(Maybe Text)
, _bafsfblFilterSetName :: !Text
, _bafsfblAccessToken :: !(Maybe Text)
, _bafsfblUploadType :: !(Maybe Text)
, _bafsfblPageToken :: !(Maybe Text)
, _bafsfblPageSize :: !(Maybe (Textual Int32))
, _bafsfblCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'BiddersAccountsFilterSetsFilteredBidsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'bafsfblXgafv'
--
-- * 'bafsfblUploadProtocol'
--
-- * 'bafsfblFilterSetName'
--
-- * 'bafsfblAccessToken'
--
-- * 'bafsfblUploadType'
--
-- * 'bafsfblPageToken'
--
-- * 'bafsfblPageSize'
--
-- * 'bafsfblCallback'
biddersAccountsFilterSetsFilteredBidsList
:: Text -- ^ 'bafsfblFilterSetName'
-> BiddersAccountsFilterSetsFilteredBidsList
biddersAccountsFilterSetsFilteredBidsList pBafsfblFilterSetName_ =
BiddersAccountsFilterSetsFilteredBidsList'
{ _bafsfblXgafv = Nothing
, _bafsfblUploadProtocol = Nothing
, _bafsfblFilterSetName = pBafsfblFilterSetName_
, _bafsfblAccessToken = Nothing
, _bafsfblUploadType = Nothing
, _bafsfblPageToken = Nothing
, _bafsfblPageSize = Nothing
, _bafsfblCallback = Nothing
}
-- | V1 error format.
bafsfblXgafv :: Lens' BiddersAccountsFilterSetsFilteredBidsList (Maybe Xgafv)
bafsfblXgafv
= lens _bafsfblXgafv (\ s a -> s{_bafsfblXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
bafsfblUploadProtocol :: Lens' BiddersAccountsFilterSetsFilteredBidsList (Maybe Text)
bafsfblUploadProtocol
= lens _bafsfblUploadProtocol
(\ s a -> s{_bafsfblUploadProtocol = a})
-- | Name of the filter set that should be applied to the requested metrics.
-- For example: - For a bidder-level filter set for bidder 123:
-- \`bidders\/123\/filterSets\/abc\` - For an account-level filter set for
-- the buyer account representing bidder 123:
-- \`bidders\/123\/accounts\/123\/filterSets\/abc\` - For an account-level
-- filter set for the child seat buyer account 456 whose bidder is 123:
-- \`bidders\/123\/accounts\/456\/filterSets\/abc\`
bafsfblFilterSetName :: Lens' BiddersAccountsFilterSetsFilteredBidsList Text
bafsfblFilterSetName
= lens _bafsfblFilterSetName
(\ s a -> s{_bafsfblFilterSetName = a})
-- | OAuth access token.
bafsfblAccessToken :: Lens' BiddersAccountsFilterSetsFilteredBidsList (Maybe Text)
bafsfblAccessToken
= lens _bafsfblAccessToken
(\ s a -> s{_bafsfblAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
bafsfblUploadType :: Lens' BiddersAccountsFilterSetsFilteredBidsList (Maybe Text)
bafsfblUploadType
= lens _bafsfblUploadType
(\ s a -> s{_bafsfblUploadType = a})
-- | A token identifying a page of results the server should return.
-- Typically, this is the value of ListFilteredBidsResponse.nextPageToken
-- returned from the previous call to the filteredBids.list method.
bafsfblPageToken :: Lens' BiddersAccountsFilterSetsFilteredBidsList (Maybe Text)
bafsfblPageToken
= lens _bafsfblPageToken
(\ s a -> s{_bafsfblPageToken = a})
-- | Requested page size. The server may return fewer results than requested.
-- If unspecified, the server will pick an appropriate default.
bafsfblPageSize :: Lens' BiddersAccountsFilterSetsFilteredBidsList (Maybe Int32)
bafsfblPageSize
= lens _bafsfblPageSize
(\ s a -> s{_bafsfblPageSize = a})
. mapping _Coerce
-- | JSONP
bafsfblCallback :: Lens' BiddersAccountsFilterSetsFilteredBidsList (Maybe Text)
bafsfblCallback
= lens _bafsfblCallback
(\ s a -> s{_bafsfblCallback = a})
instance GoogleRequest
BiddersAccountsFilterSetsFilteredBidsList
where
type Rs BiddersAccountsFilterSetsFilteredBidsList =
ListFilteredBidsResponse
type Scopes BiddersAccountsFilterSetsFilteredBidsList
=
'["https://www.googleapis.com/auth/adexchange.buyer"]
requestClient
BiddersAccountsFilterSetsFilteredBidsList'{..}
= go _bafsfblFilterSetName _bafsfblXgafv
_bafsfblUploadProtocol
_bafsfblAccessToken
_bafsfblUploadType
_bafsfblPageToken
_bafsfblPageSize
_bafsfblCallback
(Just AltJSON)
adExchangeBuyer2Service
where go
= buildClient
(Proxy ::
Proxy
BiddersAccountsFilterSetsFilteredBidsListResource)
mempty
|
brendanhay/gogol
|
gogol-adexchangebuyer2/gen/Network/Google/Resource/AdExchangeBuyer2/Bidders/Accounts/FilterSets/FilteredBids/List.hs
|
mpl-2.0
| 7,463 | 0 | 18 | 1,557 | 891 | 520 | 371 | 135 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidEnterprise.Users.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a user\'s details.
--
-- /See:/ <https://developers.google.com/android/work/play/emm-api Google Play EMM API Reference> for @androidenterprise.users.get@.
module Network.Google.Resource.AndroidEnterprise.Users.Get
(
-- * REST Resource
UsersGetResource
-- * Creating a Request
, usersGet
, UsersGet
-- * Request Lenses
, ugEnterpriseId
, ugUserId
) where
import Network.Google.AndroidEnterprise.Types
import Network.Google.Prelude
-- | A resource alias for @androidenterprise.users.get@ method which the
-- 'UsersGet' request conforms to.
type UsersGetResource =
"androidenterprise" :>
"v1" :>
"enterprises" :>
Capture "enterpriseId" Text :>
"users" :>
Capture "userId" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] User
-- | Retrieves a user\'s details.
--
-- /See:/ 'usersGet' smart constructor.
data UsersGet = UsersGet'
{ _ugEnterpriseId :: !Text
, _ugUserId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'UsersGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ugEnterpriseId'
--
-- * 'ugUserId'
usersGet
:: Text -- ^ 'ugEnterpriseId'
-> Text -- ^ 'ugUserId'
-> UsersGet
usersGet pUgEnterpriseId_ pUgUserId_ =
UsersGet'
{ _ugEnterpriseId = pUgEnterpriseId_
, _ugUserId = pUgUserId_
}
-- | The ID of the enterprise.
ugEnterpriseId :: Lens' UsersGet Text
ugEnterpriseId
= lens _ugEnterpriseId
(\ s a -> s{_ugEnterpriseId = a})
-- | The ID of the user.
ugUserId :: Lens' UsersGet Text
ugUserId = lens _ugUserId (\ s a -> s{_ugUserId = a})
instance GoogleRequest UsersGet where
type Rs UsersGet = User
type Scopes UsersGet =
'["https://www.googleapis.com/auth/androidenterprise"]
requestClient UsersGet'{..}
= go _ugEnterpriseId _ugUserId (Just AltJSON)
androidEnterpriseService
where go
= buildClient (Proxy :: Proxy UsersGetResource)
mempty
|
rueshyna/gogol
|
gogol-android-enterprise/gen/Network/Google/Resource/AndroidEnterprise/Users/Get.hs
|
mpl-2.0
| 2,962 | 0 | 14 | 712 | 381 | 229 | 152 | 61 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Run.Projects.Locations.Routes.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Get information about a route.
--
-- /See:/ <https://cloud.google.com/run/ Cloud Run Admin API Reference> for @run.projects.locations.routes.get@.
module Network.Google.Resource.Run.Projects.Locations.Routes.Get
(
-- * REST Resource
ProjectsLocationsRoutesGetResource
-- * Creating a Request
, projectsLocationsRoutesGet
, ProjectsLocationsRoutesGet
-- * Request Lenses
, plrgXgafv
, plrgUploadProtocol
, plrgAccessToken
, plrgUploadType
, plrgName
, plrgCallback
) where
import Network.Google.Prelude
import Network.Google.Run.Types
-- | A resource alias for @run.projects.locations.routes.get@ method which the
-- 'ProjectsLocationsRoutesGet' request conforms to.
type ProjectsLocationsRoutesGetResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Route
-- | Get information about a route.
--
-- /See:/ 'projectsLocationsRoutesGet' smart constructor.
data ProjectsLocationsRoutesGet =
ProjectsLocationsRoutesGet'
{ _plrgXgafv :: !(Maybe Xgafv)
, _plrgUploadProtocol :: !(Maybe Text)
, _plrgAccessToken :: !(Maybe Text)
, _plrgUploadType :: !(Maybe Text)
, _plrgName :: !Text
, _plrgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsRoutesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plrgXgafv'
--
-- * 'plrgUploadProtocol'
--
-- * 'plrgAccessToken'
--
-- * 'plrgUploadType'
--
-- * 'plrgName'
--
-- * 'plrgCallback'
projectsLocationsRoutesGet
:: Text -- ^ 'plrgName'
-> ProjectsLocationsRoutesGet
projectsLocationsRoutesGet pPlrgName_ =
ProjectsLocationsRoutesGet'
{ _plrgXgafv = Nothing
, _plrgUploadProtocol = Nothing
, _plrgAccessToken = Nothing
, _plrgUploadType = Nothing
, _plrgName = pPlrgName_
, _plrgCallback = Nothing
}
-- | V1 error format.
plrgXgafv :: Lens' ProjectsLocationsRoutesGet (Maybe Xgafv)
plrgXgafv
= lens _plrgXgafv (\ s a -> s{_plrgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plrgUploadProtocol :: Lens' ProjectsLocationsRoutesGet (Maybe Text)
plrgUploadProtocol
= lens _plrgUploadProtocol
(\ s a -> s{_plrgUploadProtocol = a})
-- | OAuth access token.
plrgAccessToken :: Lens' ProjectsLocationsRoutesGet (Maybe Text)
plrgAccessToken
= lens _plrgAccessToken
(\ s a -> s{_plrgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plrgUploadType :: Lens' ProjectsLocationsRoutesGet (Maybe Text)
plrgUploadType
= lens _plrgUploadType
(\ s a -> s{_plrgUploadType = a})
-- | The name of the route to retrieve. For Cloud Run (fully managed),
-- replace {namespace_id} with the project ID or number.
plrgName :: Lens' ProjectsLocationsRoutesGet Text
plrgName = lens _plrgName (\ s a -> s{_plrgName = a})
-- | JSONP
plrgCallback :: Lens' ProjectsLocationsRoutesGet (Maybe Text)
plrgCallback
= lens _plrgCallback (\ s a -> s{_plrgCallback = a})
instance GoogleRequest ProjectsLocationsRoutesGet
where
type Rs ProjectsLocationsRoutesGet = Route
type Scopes ProjectsLocationsRoutesGet =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsLocationsRoutesGet'{..}
= go _plrgName _plrgXgafv _plrgUploadProtocol
_plrgAccessToken
_plrgUploadType
_plrgCallback
(Just AltJSON)
runService
where go
= buildClient
(Proxy :: Proxy ProjectsLocationsRoutesGetResource)
mempty
|
brendanhay/gogol
|
gogol-run/gen/Network/Google/Resource/Run/Projects/Locations/Routes/Get.hs
|
mpl-2.0
| 4,765 | 0 | 15 | 1,049 | 697 | 408 | 289 | 101 | 1 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE ConstraintKinds #-}
-- The following is needed to define MonadPlus instance. It is decidable
-- (there is no recursion!), but GHC cannot see that.
{-# LANGUAGE UndecidableInstances #-}
-- Demo of Extensible effects
-- This is the real implementation, but stripped of INLINE pragma and other
-- performace-inducing stuff
-- There are several implementations in Haskell (see Hackages, including
-- packaging of the implementation to be presented). There are at least
-- two implementations in Scala. Extensible effects in PureScript is
-- the defining feature of the language.
-- There is even an implementation of extensible effects in Coq!
module Tutorial3_Orig where
import Control.Monad
import Control.Applicative
import OpenUnion52
import FTCQueue1
-- Example of the Drunken coin by Sam Lindley
-- Non-determinism effect
data Choice a where
Choice :: Choice Bool
-- See the inferred type
coinflip = E (inj Choice) (tsingleton Val)
data Failure a where
Failure :: Failure a
-- Abbreviting the E inj ... pattern
failure = send Failure
-- Explicit signature. Try to forget Failure, and see what happens
choose :: (Member Failure r, Member Choice r) => [a] -> Eff r a
choose = foldr (\x l -> do { h <- coinflip; if h then return x else l}) failure
-- Drunken coin example
drunkencoin = do
f <- coinflip
if f then choose [True,False]
else failure
drunkencoins n = sequence $ replicate n drunkencoin
-- See the inferred types
-- Interpreters
-- The type of run ensures that all effects must be handled:
-- only pure computations may be run.
run :: Eff '[] w -> w
run (Val x) = x
-- the other case is unreachable since Union [] a cannot be
-- constructed.
-- Therefore, run is a total function if its argument terminates.
-- First, obtain all solutions
{-
-- Inferred type
-- runChooseAll :: Eff (Choice ': r) t -> Eff r [t]
runChooseAll (Val x) = return [x]
-- Exercise: use alternative
-- Using the handle-relay abbreviation
-- runFailure = undefined handle_relay
-- Closed handler
runFailureClosed :: Eff '[Failure] a -> Maybe a
runFailureClosed (Val x) = Just x
runFailureClosed (E u q) = case decomp u of
Right Failure -> Nothing
-- Nothing more happens
-- See the inferred type and the results of the following expressions
-- Hrm, Haskelll...
-- _ = runChooseAll $ drunkencoins 3
_ = runFailure . runChooseAll $ drunkencoins 3
_ = run . runFailure . runChooseAll $ drunkencoins 3
_ = runFailureClosed . runChooseAll $ drunkencoins 3
_ = run . runChooseAll . runFailure $ drunkencoins 3
-- Handle both effects at the same time
runChooseAllF (Val x) = return [x]
runChooseAllF (E u q) = case decomp u of
Right Choice -> (++) <$>
runChooseAllF (qApp q True) <*>
runChooseAllF (qApp q False)
Left u -> case decomp u of
Right Failure -> return []
Left u -> E u (qComps q runChooseAllF)
_ = run . runChooseAllF $ drunkencoins 3
-- Use external source of non-determinism
runChooseSome :: Member Choice r => Eff (Choice ': r) a -> Eff r a
runChooseSome = handle_relay return (\Choice k -> k =<< coinflip)
-- What is the difference from the above? See the signature!
runChooseSome1 = interpose return (\Choice k -> k =<< coinflip)
-- The statement of the equational law
eq e = runChooseSome1 e `asTypeOf` e
runChooseSome11 = interpose return (\Choice k -> k =<< fmap not coinflip)
-- Now we clearly see the result of the superposition
_ = run . runChooseAllF . runChooseSome11 $ drunkencoins 3
-- ------------------------------------------------------------------------
-- Committed Choice
-- Soft-cut: non-deterministic if-then-else, aka Prolog's *->
-- Declaratively,
-- ifte t th el = (t >>= th) `mplus` ((not t) >> el)
-- However, t is evaluated only once. In other words, ifte t th el
-- is equivalent to t >>= th if t has at least one solution.
-- If t fails, ifte t th el is the same as el.
-- We actually implement LogicT, the non-determinism reflection,
-- of which soft-cut is one instance.
-- See the LogicT paper for an explanation
msplit :: (Member Choice r, Member Failure r) =>
Eff r a -> Eff r (Maybe (a, Eff r a))
msplit = loop []
where
-- single result
loop [] (Val x) = return (Just (x,mzero))
-- definite result and perhaps some others
loop jq (Val x) = return (Just (x, msum jq))
-- not yet definite answer
loop jq (E u q) | Just Failure <- prj u =
case jq of
-- no futher choices
[] -> return Nothing
-- other choices remain, try them
(j:jq) -> loop jq j
loop jq (E u q) = case prj u of
Just Choice -> loop ((qApp q False):jq) (qApp q True)
_ -> E u (qComps q (loop jq))
type NonDet r = (Member Choice r, Member Failure r)
-- Other committed choice primitives can be implemented in terms of msplit
-- The following implementations are directly from the LogicT paper
ifte :: NonDet r => Eff r a -> (a -> Eff r b) -> Eff r b -> Eff r b
ifte t th el = msplit t >>= check
where check Nothing = el
check (Just (sg1,sg2)) = (th sg1) `mplus` (sg2 >>= th)
once :: NonDet r => Eff r a -> Eff r a
once m = msplit m >>= check
where check Nothing = mzero
check (Just (sg1,_)) = return sg1
instance NonDet r => Alternative (Eff r) where
empty = failure
m1 <|> m2 = do {x <- coinflip; if x then m1 else m2}
instance NonDet r => MonadPlus (Eff r) where
mzero = empty
mplus = (<|>)
-- primes (very inefficiently -- but a good example of ifte)
test_ifte = do
n <- gen
ifte (do
d <- gen
guard $ d < n && n `mod` d == 0
-- _ <- trace ("d: " ++ show d) (return ())
)
(\_->mzero)
(return n)
where gen = msum . fmap return $ [2..30]
test_ifte_run :: [Int]
test_ifte_run = run . runChooseAllF $ test_ifte
-- [2,3,5,7,11,13,17,19,23,29]
-}
-- -----------------------------------------------------------------------
-- Open Union interface
{-
type Union (r :: [* -> *]) a
class Member (t :: k) (r :: [k])
inj :: Member t r => t v -> Union r v
prj :: Member t r => Union r v -> Maybe (t v)
decomp :: Union (t:r) v -> Either (Union r v) (t v)
The type of inj/prj really shows the union as a (multi)set.
decomp imposes the ordering. Dissatisfaction. What we really need
is something like the local instances with the closure seamntics.
And Haskell almost has what we need! (Implicit parameters).
-}
-- ------------------------------------------------------------------------
-- A monadic library for communication between a handler and
-- its client, the administered computation
data Eff r a where -- Existensial, not full GADT
Val :: a -> Eff r a
E :: Union r x -> Arrs r x a -> Eff r a
-- Effectful arrow type: a function from a to b that also does effects
-- denoted by r
type Arr r a b = a -> Eff r b
-- An effectful function from 'a' to 'b' that is a composition
-- of several effectful functions. The paremeter r describes the overall
-- effect.
-- The composition members are accumulated in a type-aligned queue
type Arrs r a b = FTCQueue (Eff r) a b
-- Application to the `generalized effectful function' Arrs r b w
-- A bit more understandable version
qApp :: Arrs r b w -> b -> Eff r w
qApp q x = case tviewl q of
TOne k -> k x
k :| t -> bind' (k x) t
where
bind' :: Eff r a -> Arrs r a b -> Eff r b
bind' (Val y) k = qApp k y
bind' (E u q) k = E u (q >< k)
-- Eff is still a monad and a functor (and Applicative)
-- (despite the lack of the Functor constraint)
instance Functor (Eff r) where
fmap f (Val x) = Val (f x)
fmap f (E u q) = E u (q |> (Val . f)) -- does no mapping yet!
instance Applicative (Eff r) where
pure = Val
Val f <*> Val x = Val $ f x
Val f <*> E u q = E u (q |> (Val . f))
E u q <*> Val x = E u (q |> (Val . ($ x)))
E u q <*> m = E u (q |> (`fmap` m))
instance Monad (Eff r) where
return x = Val x
Val x >>= k = k x
E u q >>= k = E u (q |> k) -- just accumulates continuations
-- send a request and wait for a reply
send :: Member t r => t v -> Eff r v
send t = E (inj t) (tsingleton Val)
-- This seems to be a very beneficial rule! On micro-benchmarks, cuts
-- the needed memory in half and speeds up almost twice.
{-# RULES
"send/bind" [~3] forall t k. send t >>= k = E (inj t) (tsingleton k)
#-}
-- A convenient pattern: given a request (open union), either
-- handle it or relay it.
handle_relay :: (a -> Eff r w) ->
(forall v. t v -> Arr r v w -> Eff r w) ->
Eff (t ': r) a -> Eff r w
handle_relay ret h m = loop m
where
loop (Val x) = ret x
loop (E u q) = case decomp u of
Right x -> h x k
Left u -> E u (tsingleton k)
where k = qComp q loop
-- Add something like Control.Exception.catches? It could be useful
-- for control with cut.
-- Intercept the request and possibly reply to it, but leave it unhandled
-- (that's why we use the same r all throuout)
{-# INLINE interpose #-}
interpose :: Member t r =>
(a -> Eff r w) -> (forall v. t v -> Arr r v w -> Eff r w) ->
Eff r a -> Eff r w
interpose ret h m = loop m
where
loop (Val x) = ret x
loop (E u q) = case prj u of
Just x -> h x k
_ -> E u (tsingleton k)
where k = qComp q loop
-- Compose effectful arrows (and possibly change the effect!)
qComp :: Arrs r a b -> (Eff r b -> Eff r' c) -> Arr r' a c
-- qComp g h = (h . (g `qApp`))
qComp g h = \a -> h $ qApp g a
qComps :: Arrs r a b -> (Eff r b -> Eff r' c) -> Arrs r' a c
qComps g h = tsingleton $ \a -> h $ qApp g a
|
haroldcarr/learn-haskell-coq-ml-etc
|
haskell/conference/2017-09-cufp-effects/src/Tutorial3_Orig.hs
|
unlicense
| 9,898 | 0 | 12 | 2,404 | 1,408 | 732 | 676 | 83 | 3 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module Lib
( scottyMain
)
where
import Data.Aeson (FromJSON, ToJSON)
import Data.Monoid (mconcat)
import GHC.Generics
import Web.Scotty
data User = User { userId :: Int, userName :: String } deriving (Eq, Generic, Show)
bob = User 1 "bob"
jenny = User 2 "jenny"
allUsers = [bob, jenny]
instance ToJSON User
instance FromJSON User
scottyMain :: IO ()
scottyMain = scotty 3000 $ do
get "/users" $ do
json allUsers
get "/users/:id" $ do
id <- param "id"
json $ filter ((==id) . userId) allUsers
post "/reg" $ do
e <- param "email" `rescue` (const next)
html $ mconcat [ "ok ", e ]
get "/:word" $ do
beam <- param "word"
html $ mconcat ["<h1>Scotty, ", beam, " me up!</h1>"]
{-
curl http://127.0.0.1:3000/users
curl http://127.0.0.1:3000/users/1
curl -X POST http://127.0.0.1:3000/reg?email=foo
curl http://127.0.0.1:3000/JUNK
-}
|
haroldcarr/learn-haskell-coq-ml-etc
|
haskell/topic/web/scotty/2015-08-20-utah-haskell/src/Lib.hs
|
unlicense
| 1,031 | 0 | 15 | 279 | 294 | 151 | 143 | 27 | 1 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
-- {-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE UndecidableSuperClasses #-}
module HC2 where
import GHC.Exts (Constraint)
import qualified Test.HUnit.Util as U (t, e)
-- p9 1.3.1 Vectors
data Nat = Zero | Suc Nat
-- KindSignatures DataKinds GADT
-- v v v v
data Vec (a :: *) (n :: Nat) where
-- DataKinds
-- v
VNil :: Vec a 'Zero
VCons :: a -> Vec a n -> Vec a ('Suc n)
infixr 5 `VCons`
-- StandaloneDeriving
deriving instance Eq a => Eq (Vec a n)
deriving instance Show a => Show (Vec a n)
type Two = 'Suc ('Suc 'Zero)
type Three = 'Suc Two
vbc :: Vec Char Two
vbc = 'b' `VCons` 'c' `VCons` VNil
vabc :: Vec Char Three
vabc = 'a' `VCons` vbc
vtail :: Vec a ('Suc n) -> Vec a n
vtail (VCons _ xs) = xs
vmap :: (a -> b) -> Vec a n -> Vec b n
vmap _ VNil = VNil
vmap f (VCons x xs) = f x `VCons` vmap f xs
-- no explicit [] case, so will get runtime error
xsum :: [Int] -> Int
xsum xs = head xs + xsum (tail xs)
txsum = U.e "txsum" (xsum [1,2,3]) "Prelude.head: empty list"
vhead :: Vec a ('Suc n) -> a
vhead (VCons x _) = x
-- vsum :: Vec Int ('Suc n) -> Int
-- vsum xs = vhead xs + vsum (vtail xs)
------------------------------------------------------------------------------
-- p11 1.4 Singleton types
data SNat (n :: Nat) where
SZero :: SNat 'Zero
SSuc :: SNatI n => SNat ('Suc n)
class SNatI (n :: Nat) where
sNat :: SNat n
instance SNatI 'Zero where
sNat = SZero
instance SNatI n => SNatI ('Suc n) where
sNat = SSuc
-- RankNTypes (or ExistentialQuantification, ...)
-- v
vreplicate :: forall a n . SNatI n => a -> Vec a n
-- ScopedTypeVariables
-- v
vreplicate x = case sNat :: SNat n of -- choice of sNat to run at runtime is made via type at compiletime
SZero -> VNil
SSuc -> x `VCons` vreplicate x
vr3 :: Vec Char Three
vr3 = vreplicate 'x'
tvr3 = U.t "tvr3" vr3 (VCons 'x' (VCons 'x' (VCons 'x' VNil)))
-- p12 1.4.2 Applicative vectors
vapply :: Vec (a -> b) n -> Vec a n -> Vec b n
vapply VNil VNil = VNil
vapply (f `VCons` fs) (x `VCons` xs) = f x `VCons` vapply fs xs
va :: Vec Integer ('Suc ('Suc ('Suc 'Zero)))
va = ((2*) `VCons` (5*) `VCons` (9*) `VCons` VNil) `vapply`
( 1 `VCons` 4 `VCons` 7 `VCons` VNil)
tva = U.t "tva" va
( 2 `VCons` 20 `VCons` 63 `VCons` VNil)
------------------------------------------------------------------------------
-- p13 1.5 Heterogeneous lists
-- TODO : when/where are values, length and types handled?
data HList (xs :: [*]) where
HNil :: HList '[]
HCons :: x -> HList xs -> HList (x ': xs)
-- ^
-- TypeOperators
infixr 5 `HCons`
hhead :: HList (x ': xs) -> x
hhead (x `HCons` _) = x
htail :: HList (x ': xs) -> HList xs
htail (_ `HCons` xs) = xs
-- sig can be inferred
group :: HList '[Char, Bool, Int]
group = 'x' `HCons` False `HCons` 3 `HCons` HNil
-- p14 1.5.2 NP: n-ary products (aka "Environments")
-- PolyKinds
-- v
data NP (f :: k -> *) (xs :: [k]) where
Nil :: NP f '[]
(:*) :: f x -> NP f xs -> NP f (x ': xs)
infixr 5 :*
-- identity function on types
newtype I a = I { unI :: a } deriving (Eq, Read, Show)
-- p15
fromHList :: HList xs -> NP I xs
fromHList HNil = Nil
fromHList (x `HCons` xs) = I x :* fromHList xs
toHList :: NP I xs -> HList xs
toHList Nil = HNil
toHList (I x :* xs) = x `HCons` toHList xs
groupNPI :: NP I '[Char, Bool, Int]
groupNPI = fromHList group
gnpi = U.t "gnpi" groupNPI (I {unI = 'x'} :* I {unI = False} :* I {unI = 3} :* Nil)
groupHL :: HList '[Char, Bool, Int]
groupHL = toHList groupNPI
ghl = U.t "ghl" groupHL ('x' `HCons` False `HCons` 3 `HCons` HNil)
groupNPM :: NP Maybe '[Char, Bool, Int]
groupNPM = Just 'x' :* Just False :* Just (3::Int) :* Nil
j3 :: Num x => NP Maybe '[x]
j3 = Just 3 :* Nil
-- constant function on types
-- for any types a b, K a b isomorphic to a
newtype K a b = K {unK :: a} deriving (Eq, Read, Show)
-- useful: NP of K into normal list
hcollapse :: NP (K a) xs -> [a]
hcollapse Nil = []
hcollapse (K x :* xs) = x : hcollapse xs
groupNPK :: NP (K Char) '[Char, Bool, Int]
groupNPK = K 'x' :* K 'y' :* K 'z' :* Nil
gchar = U.t "gchar" (hcollapse groupNPK) "xyz"
k2 :: K Integer b
k2 = K 2
groupK2 = k2 :* k2 :* k2 :* Nil
g2 = U.t "g2" (hcollapse groupK2) [2,2,2]
xx :: NP ((->) a) '[K a b]
xx = K :* Nil
xxx :: NP ((->) a) '[K a b1, K a b2]
xxx = K :* K :* Nil
------------------------------------------------------------------------------
-- p15 1.6 Higher-rank types
-- The f arg to `hmap` must be polymorphic
-- Rank2Types
-- v
hmap :: (forall x . f x -> g x) -> NP f xs -> NP g xs
hmap _ Nil = Nil
hmap m (x :* xs) = m x :* hmap m xs
groupI :: NP I '[Char, Bool, Integer]
groupI = I 'x' :* I False :* I 3 :* Nil
-- p17
example :: NP Maybe '[Char, Bool, Integer]
example = hmap (Just . unI) groupI
exj = U.t "exj" example (Just 'x' :* Just False :* Just 3 :* Nil)
-- p17 1.6.2 Applicative n-ary products
data SList (xs :: [k]) where
SNil :: SList '[]
SCons :: SListI xs => SList (x ': xs)
class SListI (xs :: [k]) where
-- | Get the explicit singleton --- the one can then pattern match on
sList :: SList xs
instance SListI '[] where
sList = SNil
instance SListI xs => SListI (x ': xs) where
sList = SCons
hpure :: forall f xs . SListI xs => (forall a . f a) -> NP f xs
hpure x = case sList :: SList xs of
SNil -> Nil
SCons -> x :* hpure x
-- p18
hpn = U.t "hpn" (hpure Nothing :: NP Maybe '[Char, Bool, Int])
(Nothing :* Nothing :* Nothing :* Nil)
hpk = U.t "hpk" (hpure (K 0) :: NP (K Int) '[Char, Bool, Int])
(K {unK = 0} :* K {unK = 0} :* K {unK = 0} :* Nil)
-- p18 1.6.3 Lifted functions
newtype (f -.-> g) a = Fn { apFn :: f a -> g a }
infix 1 -.->
hap :: NP (f -.-> g) xs -> NP f xs -> NP g xs
hap Nil Nil = Nil
hap (f :* fs) (x :* xs) = apFn f x :* hap fs xs
lists :: NP [] '[String, Int]
lists = ["foo", "bar", "baz"] :* [1 .. ] :* Nil
numbers :: NP (K Int) '[String, Int]
numbers = K 2 :* K 5 :* Nil
{-# ANN fn_2 "HLint: ignore Avoid lambda" #-}
fn_2 :: (f a -> f' a -> f'' a)
-> (f -.-> (f' -.->f'')) a
fn_2 f = Fn (\x -> Fn (\y -> f x y))
take' :: (K Int -.-> ([] -.->[])) a
take' = fn_2 (\(K n) xs -> take n xs)
-- the Ints from numbers are used a the `(K n)` in take'
-- the first number, 2, takes 2 from the list of String
-- the second number, 5, takes 5 from the infinite list on Int
hpt = U.t "hpt" (hpure take' `hap` numbers `hap` lists)
(["foo","bar"] :* [1,2,3,4,5] :* Nil)
-- p19 1.6.4 Another look at `hmap`
hmap' :: SListI xs => (forall a . f a -> g a) -> NP f xs -> NP g xs
hmap' f xs = hpure (Fn f) `hap` xs
hmi = U.t "hmi'" (hmap (Just . unI) groupI)
(hmap' (Just . unI) groupI)
------------------------------------------------------------------------------
-- p 19 1.7 Abstracting from class and type functions
-- p19 1.7.1 The kind `Constraint`
-- GHC.Exts (Constraint)
-- ConstraintKinds
-- v
type NoConstraint = (() :: Constraint)
type SomeConstraints a = (Eq a, Show a)
type MoreConstraints f a = (Monad f, SomeConstraints a)
-- p20 1.7.2 Type functions
-- TypeFamilies
-- v
type family All (c :: k -> Constraint) (xs :: [k]) :: Constraint where
All c '[] = ()
All c (x ': xs) = (c x, All c xs)
hToString :: All Show xs => HList xs -> String
hToString HNil = ""
hToString (HCons x xs) = show x ++ hToString xs
htos = U.t "htos" (hToString group) "'x'False3"
-- p21 1.7.3 Composing constraints
-- UndecidableSuperClasses
-- UndecidableInstances
-- MultiParamTypeClasses
-- v v
class (f (g x)) => (f `Compose` g) x
-- FlexibleInstances
-- v
instance (f (g x)) => (f `Compose` g) x
infixr 9 `Compose`
hToString' :: All (Show `Compose` f) xs => NP f xs -> String
hToString' Nil = ""
hToString' (x :* xs) = show x ++ hToString' xs
htos' = U.t "htos'" (hToString' groupNPM) "Just 'x'Just FalseJust 3"
sgnpm = U.t "sgnpm" (show groupNPM) "Just 'x' :* Just False :* Just 3 :* Nil"
-- auto derived version would print : "Just 'x' :* (Just False :* (Just 3 :* Nil))"
-- p21 1.7.4 Proxies
-- works for args of any kind (e.g., * -> Constraint)
data Proxy (a :: k) = Proxy
hcpure :: forall c f xs . (All c xs, SListI xs)
=> Proxy c -> (forall a . c a => f a) -> NP f xs
hcpure p x = case sList :: SList xs of
SNil -> Nil
SCons -> x :* hcpure p x
hcp1 = U.t "hcp0" (hcpure (Proxy :: Proxy Bounded) (I minBound) :: NP I '[Char, Bool])
(I {unI = '\NUL'} :* (I {unI = False} :* Nil))
hcp2 :: NP (K String) '[Char, Bool, Integer] -- inferred
hcp2 = hcpure (Proxy :: Proxy Show) (Fn (K . show . unI)) `hap` groupI
hcp2t = U.t "hcp2t" hcp2 (K {unK = "'x'"} :* K {unK = "False"} :* K {unK = "3"} :* Nil)
hcmap :: (SListI xs, All c xs)
=> Proxy c -> (forall a . c a => f a -> g a) -> NP f xs -> NP g xs
hcmap p f xs = hcpure p (Fn f) `hap` xs
------------------------------------------------------------------------------
deriving instance Eq (SList (xs :: [k]))
deriving instance Ord (SList (xs :: [k]))
deriving instance Show (SList (xs :: [k]))
-- manual, because built-in deriving does not use associativity info
instance All (Show `Compose` f) xs => Show (NP f xs) where
showsPrec _ Nil = showString "Nil"
showsPrec d (f :* fs) = showParen (d > 5)
$ showsPrec (5 + 1) f
. showString " :* "
. showsPrec 5 fs
deriving instance All (Eq `Compose` f) xs => Eq (NP f xs)
instance All Show xs => Show (HList xs) where
showsPrec _ HNil = showString "HNil"
showsPrec d (x `HCons` xs) = showParen (d > 5)
$ showsPrec (5 + 1) x
. showString " `HCons` "
. showsPrec 5 xs
deriving instance All Eq xs => Eq (HList xs)
|
haroldcarr/learn-haskell-coq-ml-etc
|
haskell/topic/type-level/2018-02-andres-loh-type-level-and-generic/src/HC2.hs
|
unlicense
| 10,850 | 7 | 12 | 3,165 | 4,134 | 2,247 | 1,887 | -1 | -1 |
{-# LANGUAGE RecordWildCards #-}
module Sprockell where
import Data.Bits
import Data.Maybe
import Debug.Trace
import BasicFunctions
import HardwareTypes
{-------------------------------------------------------------
| SPROCKELL: Simple PROCessor in hasKELL :-)
|
| Initial definition: October 2012, Jan Kuper ([email protected])
| Extensions: June 2015, Martijn Bastiaan, Arjan Boeijink, Jan Kuper, Leon Schoorl
| Simplification: January 2016, Jan Kuper
-------------------------------------------------------------}
-- =====================================================================================
-- sprockell: combines the separate components defined below
-- instrs : list of instructions to be executed
-- sprState: state of the sprockell, containing pc, sp, registers, local memory
-- reply : input from shared memory (Maybe type)
-- request : output to shared memory
-- =====================================================================================
sprockell :: InstructionMem -> SprockellState -> Reply -> (SprockellState, Request)
sprockell instrs sprState reply = (sprState', request)
where
SprState{..} = sprState
MachCode{..} = decode (instrs!pc)
(x,y) = (regbank!regX , regbank!regY)
aluOutput = alu aluCode x y
pc' = nextPC branch tgtCode (x,reply) (pc,immValue,y)
sp' = nextSP spCode sp
address = agu aguCode (addrImm,x,sp)
loadValue = load ldCode (immValue, aluOutput, localMem!address, pc, reply)
regbank' = regbank <~! (loadReg, loadValue)
localMem' = store localMem stCode (address,y)
sprState' = SprState {pc=pc', sp=sp', regbank=regbank', localMem=localMem'}
request = sendOut ioCode address y
-- =====================================================================================
-- decode function + default machine code (nullcode): to generate machine code from an instruction
-- =====================================================================================
nullcode :: MachCode
nullcode = MachCode
{ ldCode = LdImm
, stCode = StNone
, aguCode = AguDir
, branch = False
, tgtCode = NoJump
, spCode = Flat
, aluCode = Or
, ioCode = IONone
, immValue = 0
, regX = 0
, regY = 0
, loadReg = 0
, addrImm = 0
}
-- ============================
decode :: Instruction -> MachCode
decode instr = case instr of
Compute c rx ry toReg -> nullcode {ldCode=LdAlu, aluCode=c, regX=rx, regY=ry, loadReg=toReg}
Jump target -> case target of
Abs n -> nullcode {tgtCode=TAbs, immValue=n}
Rel n -> nullcode {tgtCode=TRel, immValue=n}
Ind r -> nullcode {tgtCode=TInd, regY=r}
Branch cReg target -> case target of
Abs n -> nullcode {branch=True, tgtCode=TAbs, regX=cReg, immValue=n}
Rel n -> nullcode {branch=True, tgtCode=TRel, regX=cReg, immValue=n}
Ind r -> nullcode {branch=True, tgtCode=TInd, regX=cReg, regY=r}
Load memAddr toReg -> case memAddr of
ImmValue n -> nullcode {loadReg=toReg, ldCode=LdImm, immValue=n}
DirAddr a -> nullcode {loadReg=toReg, ldCode=LdMem, aguCode=AguDir, addrImm=a}
IndAddr p -> nullcode {loadReg=toReg, ldCode=LdMem, aguCode=AguInd, regX=p}
Store fromReg memAddr -> case memAddr of
ImmValue n -> nullcode -- Undefined. Should not occur.
DirAddr a -> nullcode {stCode=StMem, regY=fromReg, ldCode=LdMem, aguCode=AguDir, addrImm=a}
IndAddr p -> nullcode {stCode=StMem, regY=fromReg, ldCode=LdMem, aguCode=AguInd, regX=p}
Push fromReg -> nullcode {stCode=StMem, regY=fromReg, aguCode=AguPush, spCode=Down}
Pop toReg -> nullcode {ldCode=LdMem, loadReg=toReg, aguCode=AguPop, spCode=Up}
Receive toReg -> nullcode {ldCode=LdInp, tgtCode=Waiting, loadReg=toReg}
ReadInstr memAddr -> case memAddr of
ImmValue n -> nullcode -- undefined
DirAddr a -> nullcode {ioCode=IORead, ldCode=LdMem, aguCode=AguDir, addrImm=a}
IndAddr p -> nullcode {ioCode=IORead, ldCode=LdMem, aguCode=AguInd, regX=p}
WriteInstr fromReg memAddr -> case memAddr of
ImmValue n -> nullcode -- undefined
DirAddr a -> nullcode {ioCode=IOWrite, regY=fromReg, ldCode=LdMem, aguCode=AguDir, addrImm=a}
IndAddr p -> nullcode {ioCode=IOWrite, regY=fromReg, ldCode=LdMem, aguCode=AguInd, regX=p}
TestAndSet memAddr -> case memAddr of
ImmValue n -> nullcode -- undefined
DirAddr a -> nullcode {ioCode=IOTest, ldCode=LdMem, aguCode=AguDir, addrImm=a}
IndAddr p -> nullcode {ioCode=IOTest, ldCode=LdMem, aguCode=AguInd, regX=p}
EndProg -> nullcode {tgtCode=TRel, immValue=0}
Nop -> nullcode
Debug _ -> nullcode -- only for development purposes
{- ===============================================================
Meaning registers regX and regY (containing x and y, respectively)
==================================================================
| regX - x | regY - y |
-----------------------------------------------------------------
Compute c rx ry toReg | 1st arg rx | 2nd arg ry |
LdConst n toReg | - | - |
Load (DirAddr a) toReg | - | - |
Load (Defer p) toReg | deref-reg p | - |
| | |
Jump (Abs n) | - | - |
- (Rel n) | - | - |
- (Ind r) | - | ind reg r |
Branch cReg (Abs n) | cond-reg cReg | - |
- - (Rel n) | cond-reg cReg | - |
- - (Ind r) | cond-reg cReg | ind reg r |
| | |
Store fromReg (DirAddr a) | - | fromReg |
- - (IndAddr p) | deref-reg p | fromReg |
| | |
Push fromReg | - | fromReg |
Pop toReg | - | - |
| | |
Receive toReg | - | - |
| | |
Read (DirAddr a) | - | - |
- (IndAddr p) | deref-reg p | - |
| | |
TestAndSet (DirAddr a) | - | - |
- (IndAddr p) | deref-reg p | - |
| | |
Write fromReg (DirAddr a) | - | fromReg |
- - (IndAddr p) | deref-addr p | fromReg |
=============================================================== -}
-- =====================================================================================
-- alu (Arithmetic-Logic Unit): defines the computational functionality
-- =====================================================================================
alu :: Operator -> Value -> Value -> Value
alu op x y = case op of
Incr -> x + 1
Decr -> x - 1
Add -> x + y
Sub -> x - y
Mul -> x * y
Equal -> intBool (x == y)
NEq -> intBool (x /= y)
Gt -> intBool (x > y)
GtE -> intBool (x >= y)
Lt -> intBool (x < y)
LtE -> intBool (x <= y)
And -> x .&. y
Or -> x .|. y
LShift -> shiftL x (fromIntegral y)
RShift -> shiftR x (fromIntegral y)
Xor -> x `xor` y
-- Div -> x `div` y -- usable in Haskell, but expensive on hardware
-- Mod -> x `mod` y -- Ibid
-- =====================================================================================
-- agu (Address Generation Unit): calculates the address for local memory
-- =====================================================================================
agu :: AguCode -> (MemAddr,MemAddr,MemAddr) -> MemAddr
agu aguCode (addrImm,x,sp) = case aguCode of
AguDir -> addrImm
AguInd -> x
AguPush -> sp-1
AguPop -> sp
-- =====================================================================================
-- load: calculates the value that has to be put in a register
-- =====================================================================================
load :: LdCode -> (Value, Value, Value, Value, Reply) -> Value
load ldCode (immval,aluOutput,memval,pc,reply) = case (ldCode, reply) of
(LdImm, Nothing) -> immval
(LdAlu, Nothing) -> aluOutput
(LdMem, Nothing) -> memval
(LdPC , Nothing) -> pc
(LdInp, Just rx) -> rx
(LdInp, Nothing) -> 0
(_ , Just rx) -> error ("Sprockell ignored a system response of value: " ++ show rx)
-- =====================================================================================
-- store: to store data in local memory
-- =====================================================================================
store :: LocalMem -> StCode -> (MemAddr, Value) -> LocalMem
store mem stCode (address,value) = case stCode of
StNone -> mem
StMem -> mem <~! (address, value)
-- =====================================================================================
-- nextPC: to calculate next program counter
-- =====================================================================================
nextPC :: Bool -> TargetCode -> (Value,Reply) -> (Value,Value,Value) -> Value
nextPC branch tgtCode (x,reply) (pc,n,y) =
case (branch, tgtCode, x/=0, reply ) of
( True , TAbs , True, _ ) -> n
( True , TRel , True, _ ) -> pc + n
( True , TInd , True, _ ) -> y
( False, TAbs , _ , _ ) -> n
( False, TRel , _ , _ ) -> pc + n
( False, TInd , _ , _ ) -> y
( False, Waiting, _ , Nothing ) -> pc
( _ , _ , _ , _ ) -> pc + 1
-- =====================================================================================
-- nextSP: to calculate next stack pointer
-- =====================================================================================
nextSP :: SPCode -> MemAddr -> MemAddr
nextSP spCode sp = case spCode of
Down -> sp-1
Flat -> sp
Up -> sp+1
-- =====================================================================================
-- sendOut: to calculate output request to shared memory
-- =====================================================================================
sendOut :: IOCode -> MemAddr -> Value -> Request
sendOut ioCode address value = case ioCode of
IONone -> NoRequest
IORead -> ReadReq address
IOWrite -> WriteReq value address
IOTest -> TestReq address
|
wouwouwou/2017_module_8
|
src/haskell/PP-project-2016/lib/sprockell/Sprockell.hs
|
apache-2.0
| 12,154 | 0 | 12 | 4,420 | 2,262 | 1,297 | 965 | 132 | 28 |
{-
Copyright 2019 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
program = simulationOf(step, picture)
step((x, y), dt) = (x - y * dt, y + x * dt)
picture(x, y) = translated(rectangle(1, 1), x, y)
|
pranjaltale16/codeworld
|
codeworld-compiler/test/testcase/test_simulation/source.hs
|
apache-2.0
| 754 | 0 | 8 | 152 | 93 | 53 | 40 | 3 | 1 |
{-| Tags
This module holds all the tag interpretation done by htools.
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.HTools.Tags
( exTagsPrefix
, standbyAuto
, hasStandbyTag
, autoRepairTagPrefix
, autoRepairTagEnabled
, autoRepairTagPending
, autoRepairTagResult
, autoRepairTagSuspended
, getMigRestrictions
, getRecvMigRestrictions
, getLocations
) where
import Control.Monad (guard, (>=>))
import Data.List (isPrefixOf, isInfixOf, stripPrefix)
import Data.Maybe (mapMaybe)
import qualified Data.Set as S
import qualified Ganeti.HTools.Node as Node
-- * Constants
-- | The exclusion tag prefix. Instance tags starting with this prefix
-- describe a service provided by the instance. Instances providing the
-- same service at not places on the same node.
exTagsPrefix :: String
exTagsPrefix = "htools:iextags:"
-- | The tag-prefix indicating that hsqueeze should consider a node
-- as being standby.
standbyPrefix :: String
standbyPrefix = "htools:standby:"
-- | The prefix for migration tags
migrationPrefix :: String
migrationPrefix = "htools:migration:"
-- | Prefix of tags allowing migration
allowMigrationPrefix :: String
allowMigrationPrefix = "htools:allowmigration:"
-- | The prefix for location tags.
locationPrefix :: String
locationPrefix = "htools:nlocation:"
-- | The tag to be added to nodes that were shutdown by hsqueeze.
standbyAuto :: String
standbyAuto = "htools:standby:auto"
-- | Auto-repair tag prefix
autoRepairTagPrefix :: String
autoRepairTagPrefix = "ganeti:watcher:autorepair:"
autoRepairTagEnabled :: String
autoRepairTagEnabled = autoRepairTagPrefix
autoRepairTagPending :: String
autoRepairTagPending = autoRepairTagPrefix ++ "pending:"
autoRepairTagResult :: String
autoRepairTagResult = autoRepairTagPrefix ++ "result:"
autoRepairTagSuspended :: String
autoRepairTagSuspended = autoRepairTagPrefix ++ "suspend:"
-- * Predicates
-- | Predicate of having a standby tag.
hasStandbyTag :: Node.Node -> Bool
hasStandbyTag = any (standbyPrefix `isPrefixOf`) . Node.nTags
-- * Utility functions
-- | Htools standard tag extraction. Given a set of cluster tags,
-- take those starting with a specific prefix, strip the prefix
-- and append a colon, and then take those node tags starting with
-- one of those strings.
getTags :: String -> [String] -> [String] -> S.Set String
getTags prefix ctags ntags = S.fromList
(mapMaybe (stripPrefix prefix) ctags >>= \ p ->
filter ((p ++ ":") `isPrefixOf`) ntags)
-- * Migration restriction tags
-- | Given the cluster tags extract the migration restrictions
-- from a node tag.
getMigRestrictions :: [String] -> [String] -> S.Set String
getMigRestrictions = getTags migrationPrefix
-- | Maybe split a string on the first single occurence of "::" return
-- the parts before and after.
splitAtColons :: String -> Maybe (String, String)
splitAtColons (':':':':xs) = do
guard $ not ("::" `isInfixOf` xs)
return ("", xs)
splitAtColons (x:xs) = do
(as, bs) <- splitAtColons xs
return (x:as, bs)
splitAtColons _ = Nothing
-- | Get the pairs of allowed migrations from a set of cluster tags.
migrations :: [String] -> [(String, String)]
migrations = mapMaybe $ stripPrefix allowMigrationPrefix >=> splitAtColons
-- | Given the cluster tags, extract the set of migration restrictions
-- a node is able to receive from its node tags.
getRecvMigRestrictions :: [String] -> [String] -> S.Set String
getRecvMigRestrictions ctags ntags =
let migs = migrations ctags
closure tag = (:) tag . map fst $ filter ((==) tag . snd) migs
in S.fromList $ S.elems (getMigRestrictions ctags ntags) >>= closure
-- * Location tags
-- | Given the cluster tags, extract the node location tags
-- from the node tags.
getLocations :: [String] -> [String] -> S.Set String
getLocations = getTags locationPrefix
|
dimara/ganeti
|
src/Ganeti/HTools/Tags.hs
|
bsd-2-clause
| 5,092 | 0 | 13 | 815 | 706 | 409 | 297 | 64 | 1 |
{-# LANGUAGE TemplateHaskell, QuasiQuotes #-}
module Settings.StaticFiles where
import Yesod.Static (staticFiles, StaticRoute (StaticRoute))
-- | This generates easy references to files in the static directory at compile time.
-- The upside to this is that you have compile-time verification that referenced files
-- exist. However, any files added to your static directory during run-time can't be
-- accessed this way. You'll have to use their FilePath or URL to access them.
$(staticFiles "static")
|
cutsea110/tut
|
Settings/StaticFiles.hs
|
bsd-2-clause
| 510 | 0 | 7 | 81 | 38 | 24 | 14 | 4 | 0 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.OpCodes
( testOpCodes
, OpCodes.OpCode(..)
) where
import Test.HUnit as HUnit
import Test.QuickCheck as QuickCheck
import Control.Applicative
import Control.Monad
import Data.Char
import Data.List
import qualified Data.Map as Map
import qualified Text.JSON as J
import Text.Printf (printf)
import Test.Ganeti.Objects ()
import Test.Ganeti.Query.Language ()
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Test.Ganeti.Types (genReasonTrail)
import Ganeti.BasicTypes
import qualified Ganeti.Constants as C
import qualified Ganeti.ConstantUtils as CU
import qualified Ganeti.OpCodes as OpCodes
import Ganeti.Types
import Ganeti.OpParams
import Ganeti.JSON
{-# ANN module "HLint: ignore Use camelCase" #-}
-- * Arbitrary instances
arbitraryOpTagsGet :: Gen OpCodes.OpCode
arbitraryOpTagsGet = do
kind <- arbitrary
OpCodes.OpTagsSet kind <$> genTags <*> genOpCodesTagName kind
arbitraryOpTagsSet :: Gen OpCodes.OpCode
arbitraryOpTagsSet = do
kind <- arbitrary
OpCodes.OpTagsSet kind <$> genTags <*> genOpCodesTagName kind
arbitraryOpTagsDel :: Gen OpCodes.OpCode
arbitraryOpTagsDel = do
kind <- arbitrary
OpCodes.OpTagsDel kind <$> genTags <*> genOpCodesTagName kind
$(genArbitrary ''OpCodes.ReplaceDisksMode)
$(genArbitrary ''DiskAccess)
instance Arbitrary OpCodes.DiskIndex where
arbitrary = choose (0, C.maxDisks - 1) >>= OpCodes.mkDiskIndex
instance Arbitrary INicParams where
arbitrary = INicParams <$> genMaybe genNameNE <*> genMaybe genName <*>
genMaybe genNameNE <*> genMaybe genNameNE <*>
genMaybe genNameNE <*> genMaybe genName <*>
genMaybe genNameNE <*> genMaybe genNameNE
instance Arbitrary IDiskParams where
arbitrary = IDiskParams <$> arbitrary <*> arbitrary <*>
genMaybe genNameNE <*> genMaybe genNameNE <*>
genMaybe genNameNE <*> genMaybe genNameNE <*>
genMaybe genNameNE <*> arbitrary <*>
genMaybe genNameNE <*> genAndRestArguments
instance Arbitrary RecreateDisksInfo where
arbitrary = oneof [ pure RecreateDisksAll
, RecreateDisksIndices <$> arbitrary
, RecreateDisksParams <$> arbitrary
]
instance Arbitrary DdmOldChanges where
arbitrary = oneof [ DdmOldIndex <$> arbitrary
, DdmOldMod <$> arbitrary
]
instance (Arbitrary a) => Arbitrary (SetParamsMods a) where
arbitrary = oneof [ pure SetParamsEmpty
, SetParamsDeprecated <$> arbitrary
, SetParamsNew <$> arbitrary
]
instance Arbitrary ExportTarget where
arbitrary = oneof [ ExportTargetLocal <$> genNodeNameNE
, ExportTargetRemote <$> pure []
]
arbitraryDataCollector :: Gen (Container Bool)
arbitraryDataCollector = do
els <- listOf . elements $ CU.toList C.dataCollectorNames
activation <- vector $ length els
return . GenericContainer . Map.fromList $ zip els activation
arbitraryDataCollectorInterval :: Gen (Maybe (Container Int))
arbitraryDataCollectorInterval = do
els <- listOf . elements $ CU.toList C.dataCollectorNames
intervals <- vector $ length els
genMaybe . return . containerFromList $ zip els intervals
instance Arbitrary OpCodes.OpCode where
arbitrary = do
op_id <- elements OpCodes.allOpIDs
case op_id of
"OP_TEST_DELAY" ->
OpCodes.OpTestDelay <$> arbitrary <*> arbitrary <*>
genNodeNamesNE <*> return Nothing <*> arbitrary <*> arbitrary <*>
arbitrary
"OP_INSTANCE_REPLACE_DISKS" ->
OpCodes.OpInstanceReplaceDisks <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary <*> arbitrary <*> genDiskIndices <*>
genMaybe genNodeNameNE <*> return Nothing <*> genMaybe genNameNE
"OP_INSTANCE_FAILOVER" ->
OpCodes.OpInstanceFailover <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary <*> genMaybe genNodeNameNE <*>
return Nothing <*> arbitrary <*> arbitrary <*> genMaybe genNameNE
"OP_INSTANCE_MIGRATE" ->
OpCodes.OpInstanceMigrate <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary <*> genMaybe genNodeNameNE <*>
return Nothing <*> arbitrary <*> arbitrary <*> arbitrary <*>
genMaybe genNameNE <*> arbitrary <*> arbitrary
"OP_TAGS_GET" ->
arbitraryOpTagsGet
"OP_TAGS_SEARCH" ->
OpCodes.OpTagsSearch <$> genNameNE
"OP_TAGS_SET" ->
arbitraryOpTagsSet
"OP_TAGS_DEL" ->
arbitraryOpTagsDel
"OP_CLUSTER_POST_INIT" -> pure OpCodes.OpClusterPostInit
"OP_CLUSTER_RENEW_CRYPTO" -> OpCodes.OpClusterRenewCrypto <$>
arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
"OP_CLUSTER_DESTROY" -> pure OpCodes.OpClusterDestroy
"OP_CLUSTER_QUERY" -> pure OpCodes.OpClusterQuery
"OP_CLUSTER_VERIFY" ->
OpCodes.OpClusterVerify <$> arbitrary <*> arbitrary <*>
genListSet Nothing <*> genListSet Nothing <*> arbitrary <*>
genMaybe genNameNE <*> arbitrary
"OP_CLUSTER_VERIFY_CONFIG" ->
OpCodes.OpClusterVerifyConfig <$> arbitrary <*> arbitrary <*>
genListSet Nothing <*> arbitrary
"OP_CLUSTER_VERIFY_GROUP" ->
OpCodes.OpClusterVerifyGroup <$> genNameNE <*> arbitrary <*>
arbitrary <*> genListSet Nothing <*> genListSet Nothing <*>
arbitrary <*> arbitrary
"OP_CLUSTER_VERIFY_DISKS" ->
OpCodes.OpClusterVerifyDisks <$> genMaybe genNameNE
"OP_GROUP_VERIFY_DISKS" ->
OpCodes.OpGroupVerifyDisks <$> genNameNE
"OP_CLUSTER_REPAIR_DISK_SIZES" ->
OpCodes.OpClusterRepairDiskSizes <$> genNodeNamesNE
"OP_CLUSTER_CONFIG_QUERY" ->
OpCodes.OpClusterConfigQuery <$> genFieldsNE
"OP_CLUSTER_RENAME" ->
OpCodes.OpClusterRename <$> genNameNE
"OP_CLUSTER_SET_PARAMS" ->
OpCodes.OpClusterSetParams
<$> arbitrary -- force
<*> emptyMUD -- hv_state
<*> emptyMUD -- disk_state
<*> genMaybe genName -- vg_name
<*> genMaybe arbitrary -- enabled_hypervisors
<*> genMaybe genEmptyContainer -- hvparams
<*> emptyMUD -- beparams
<*> genMaybe genEmptyContainer -- os_hvp
<*> genMaybe genEmptyContainer -- osparams
<*> genMaybe genEmptyContainer -- osparams_private_cluster
<*> genMaybe genEmptyContainer -- diskparams
<*> genMaybe arbitrary -- candidate_pool_size
<*> genMaybe arbitrary -- max_running_jobs
<*> genMaybe arbitrary -- max_tracked_jobs
<*> arbitrary -- uid_pool
<*> arbitrary -- add_uids
<*> arbitrary -- remove_uids
<*> arbitrary -- maintain_node_health
<*> arbitrary -- prealloc_wipe_disks
<*> arbitrary -- nicparams
<*> emptyMUD -- ndparams
<*> emptyMUD -- ipolicy
<*> genMaybe genPrintableAsciiString
-- drbd_helper
<*> genMaybe genPrintableAsciiString
-- default_iallocator
<*> emptyMUD -- default_iallocator_params
<*> genMaybe genMacPrefix -- mac_prefix
<*> genMaybe genPrintableAsciiString
-- master_netdev
<*> arbitrary -- master_netmask
<*> genMaybe (listOf genPrintableAsciiStringNE)
-- reserved_lvs
<*> genMaybe (listOf ((,) <$> arbitrary
<*> genPrintableAsciiStringNE))
-- hidden_os
<*> genMaybe (listOf ((,) <$> arbitrary
<*> genPrintableAsciiStringNE))
-- blacklisted_os
<*> arbitrary -- use_external_mip_script
<*> arbitrary -- enabled_disk_templates
<*> arbitrary -- modify_etc_hosts
<*> genMaybe genName -- file_storage_dir
<*> genMaybe genName -- shared_file_storage_dir
<*> genMaybe genName -- gluster_file_storage_dir
<*> genMaybe genPrintableAsciiString
-- install_image
<*> genMaybe genPrintableAsciiString
-- instance_communication_network
<*> genMaybe genPrintableAsciiString
-- zeroing_image
<*> genMaybe (listOf genPrintableAsciiStringNE)
-- compression_tools
<*> arbitrary -- enabled_user_shutdown
<*> genMaybe arbitraryDataCollector -- enabled_data_collectors
<*> arbitraryDataCollectorInterval -- data_collector_interval
<*> genMaybe (fromPositive <$> arbitrary) -- maintd round interval
"OP_CLUSTER_REDIST_CONF" -> pure OpCodes.OpClusterRedistConf
"OP_CLUSTER_ACTIVATE_MASTER_IP" ->
pure OpCodes.OpClusterActivateMasterIp
"OP_CLUSTER_DEACTIVATE_MASTER_IP" ->
pure OpCodes.OpClusterDeactivateMasterIp
"OP_QUERY" ->
OpCodes.OpQuery <$> arbitrary <*> arbitrary <*> genNamesNE <*>
pure Nothing
"OP_QUERY_FIELDS" ->
OpCodes.OpQueryFields <$> arbitrary <*> genMaybe genNamesNE
"OP_OOB_COMMAND" ->
OpCodes.OpOobCommand <$> genNodeNamesNE <*> return Nothing <*>
arbitrary <*> arbitrary <*> arbitrary <*>
(arbitrary `suchThat` (>0))
"OP_NODE_REMOVE" ->
OpCodes.OpNodeRemove <$> genNodeNameNE <*> return Nothing
"OP_NODE_ADD" ->
OpCodes.OpNodeAdd <$> genNodeNameNE <*> emptyMUD <*> emptyMUD <*>
genMaybe genNameNE <*> genMaybe genNameNE <*> arbitrary <*>
genMaybe genNameNE <*> arbitrary <*> arbitrary <*> emptyMUD <*>
arbitrary
"OP_NODE_QUERYVOLS" ->
OpCodes.OpNodeQueryvols <$> genNamesNE <*> genNodeNamesNE
"OP_NODE_QUERY_STORAGE" ->
OpCodes.OpNodeQueryStorage <$> genNamesNE <*> arbitrary <*>
genNodeNamesNE <*> genMaybe genNameNE
"OP_NODE_MODIFY_STORAGE" ->
OpCodes.OpNodeModifyStorage <$> genNodeNameNE <*> return Nothing <*>
arbitrary <*> genMaybe genNameNE <*> pure emptyJSObject
"OP_REPAIR_NODE_STORAGE" ->
OpCodes.OpRepairNodeStorage <$> genNodeNameNE <*> return Nothing <*>
arbitrary <*> genMaybe genNameNE <*> arbitrary
"OP_NODE_SET_PARAMS" ->
OpCodes.OpNodeSetParams <$> genNodeNameNE <*> return Nothing <*>
arbitrary <*> emptyMUD <*> emptyMUD <*> arbitrary <*> arbitrary <*>
arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*>
genMaybe genNameNE <*> emptyMUD <*> arbitrary
"OP_NODE_POWERCYCLE" ->
OpCodes.OpNodePowercycle <$> genNodeNameNE <*> return Nothing <*>
arbitrary
"OP_NODE_MIGRATE" ->
OpCodes.OpNodeMigrate <$> genNodeNameNE <*> return Nothing <*>
arbitrary <*> arbitrary <*> genMaybe genNodeNameNE <*>
return Nothing <*> arbitrary <*> arbitrary <*> genMaybe genNameNE
"OP_NODE_EVACUATE" ->
OpCodes.OpNodeEvacuate <$> arbitrary <*> genNodeNameNE <*>
return Nothing <*> genMaybe genNodeNameNE <*> return Nothing <*>
genMaybe genNameNE <*> arbitrary <*> arbitrary
"OP_INSTANCE_CREATE" ->
OpCodes.OpInstanceCreate
<$> genFQDN -- instance_name
<*> arbitrary -- force_variant
<*> arbitrary -- wait_for_sync
<*> arbitrary -- name_check
<*> arbitrary -- ignore_ipolicy
<*> arbitrary -- opportunistic_locking
<*> pure emptyJSObject -- beparams
<*> arbitrary -- disks
<*> arbitrary -- disk_template
<*> genMaybe genNameNE -- group_name
<*> arbitrary -- file_driver
<*> genMaybe genNameNE -- file_storage_dir
<*> pure emptyJSObject -- hvparams
<*> arbitrary -- hypervisor
<*> genMaybe genNameNE -- iallocator
<*> arbitrary -- identify_defaults
<*> arbitrary -- ip_check
<*> arbitrary -- conflicts_check
<*> arbitrary -- mode
<*> arbitrary -- nics
<*> arbitrary -- no_install
<*> pure emptyJSObject -- osparams
<*> genMaybe arbitraryPrivateJSObj -- osparams_private
<*> genMaybe arbitrarySecretJSObj -- osparams_secret
<*> genMaybe genNameNE -- os_type
<*> genMaybe genNodeNameNE -- pnode
<*> return Nothing -- pnode_uuid
<*> genMaybe genNodeNameNE -- snode
<*> return Nothing -- snode_uuid
<*> genMaybe (pure []) -- source_handshake
<*> genMaybe genNodeNameNE -- source_instance_name
<*> arbitrary -- source_shutdown_timeout
<*> genMaybe genNodeNameNE -- source_x509_ca
<*> return Nothing -- src_node
<*> genMaybe genNodeNameNE -- src_node_uuid
<*> genMaybe genNameNE -- src_path
<*> genPrintableAsciiString -- compress
<*> arbitrary -- start
<*> arbitrary -- forthcoming
<*> arbitrary -- commit
<*> (genTags >>= mapM mkNonEmpty) -- tags
<*> arbitrary -- instance_communication
<*> arbitrary -- helper_startup_timeout
<*> arbitrary -- helper_shutdown_timeout
"OP_INSTANCE_MULTI_ALLOC" ->
OpCodes.OpInstanceMultiAlloc <$> arbitrary <*> genMaybe genNameNE <*>
pure []
"OP_INSTANCE_REINSTALL" ->
OpCodes.OpInstanceReinstall <$> genFQDN <*> return Nothing <*>
arbitrary <*> genMaybe genNameNE <*> genMaybe (pure emptyJSObject)
<*> genMaybe arbitraryPrivateJSObj <*> genMaybe arbitrarySecretJSObj
"OP_INSTANCE_REMOVE" ->
OpCodes.OpInstanceRemove <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary
"OP_INSTANCE_RENAME" ->
OpCodes.OpInstanceRename <$> genFQDN <*> return Nothing <*>
genNodeNameNE <*> arbitrary <*> arbitrary
"OP_INSTANCE_STARTUP" ->
OpCodes.OpInstanceStartup <$>
genFQDN <*> -- instance_name
return Nothing <*> -- instance_uuid
arbitrary <*> -- force
arbitrary <*> -- ignore_offline_nodes
pure emptyJSObject <*> -- hvparams
pure emptyJSObject <*> -- beparams
arbitrary <*> -- no_remember
arbitrary <*> -- startup_paused
arbitrary -- shutdown_timeout
"OP_INSTANCE_SHUTDOWN" ->
OpCodes.OpInstanceShutdown <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
"OP_INSTANCE_REBOOT" ->
OpCodes.OpInstanceReboot <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary <*> arbitrary
"OP_INSTANCE_MOVE" ->
OpCodes.OpInstanceMove <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary <*> genNodeNameNE <*> return Nothing <*>
genPrintableAsciiString <*> arbitrary
"OP_INSTANCE_CONSOLE" -> OpCodes.OpInstanceConsole <$> genFQDN <*>
return Nothing
"OP_INSTANCE_ACTIVATE_DISKS" ->
OpCodes.OpInstanceActivateDisks <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary
"OP_INSTANCE_DEACTIVATE_DISKS" ->
OpCodes.OpInstanceDeactivateDisks <$> genFQDN <*> return Nothing <*>
arbitrary
"OP_INSTANCE_RECREATE_DISKS" ->
OpCodes.OpInstanceRecreateDisks <$> genFQDN <*> return Nothing <*>
arbitrary <*> genNodeNamesNE <*> return Nothing <*>
genMaybe genNameNE
"OP_INSTANCE_QUERY_DATA" ->
OpCodes.OpInstanceQueryData <$> arbitrary <*>
genNodeNamesNE <*> arbitrary
"OP_INSTANCE_SET_PARAMS" ->
OpCodes.OpInstanceSetParams
<$> genFQDN -- instance_name
<*> return Nothing -- instance_uuid
<*> arbitrary -- force
<*> arbitrary -- force_variant
<*> arbitrary -- ignore_ipolicy
<*> arbitrary -- nics
<*> arbitrary -- disks
<*> pure emptyJSObject -- beparams
<*> arbitrary -- runtime_mem
<*> pure emptyJSObject -- hvparams
<*> arbitrary -- disk_template
<*> pure emptyJSObject -- ext_params
<*> arbitrary -- file_driver
<*> genMaybe genNameNE -- file_storage_dir
<*> genMaybe genNodeNameNE -- pnode
<*> return Nothing -- pnode_uuid
<*> genMaybe genNodeNameNE -- remote_node
<*> return Nothing -- remote_node_uuid
<*> genMaybe genNameNE -- iallocator
<*> genMaybe genNameNE -- os_name
<*> pure emptyJSObject -- osparams
<*> genMaybe arbitraryPrivateJSObj -- osparams_private
<*> arbitrary -- wait_for_sync
<*> arbitrary -- offline
<*> arbitrary -- conflicts_check
<*> arbitrary -- hotplug
<*> arbitrary -- hotplug_if_possible
<*> arbitrary -- instance_communication
"OP_INSTANCE_GROW_DISK" ->
OpCodes.OpInstanceGrowDisk <$> genFQDN <*> return Nothing <*>
arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
"OP_INSTANCE_CHANGE_GROUP" ->
OpCodes.OpInstanceChangeGroup <$> genFQDN <*> return Nothing <*>
arbitrary <*> genMaybe genNameNE <*>
genMaybe (resize maxNodes (listOf genNameNE))
"OP_GROUP_ADD" ->
OpCodes.OpGroupAdd <$> genNameNE <*> arbitrary <*>
emptyMUD <*> genMaybe genEmptyContainer <*>
emptyMUD <*> emptyMUD <*> emptyMUD
"OP_GROUP_ASSIGN_NODES" ->
OpCodes.OpGroupAssignNodes <$> genNameNE <*> arbitrary <*>
genNodeNamesNE <*> return Nothing
"OP_GROUP_SET_PARAMS" ->
OpCodes.OpGroupSetParams <$> genNameNE <*> arbitrary <*>
emptyMUD <*> genMaybe genEmptyContainer <*>
emptyMUD <*> emptyMUD <*> emptyMUD
"OP_GROUP_REMOVE" ->
OpCodes.OpGroupRemove <$> genNameNE
"OP_GROUP_RENAME" ->
OpCodes.OpGroupRename <$> genNameNE <*> genNameNE
"OP_GROUP_EVACUATE" ->
OpCodes.OpGroupEvacuate <$> genNameNE <*> arbitrary <*>
genMaybe genNameNE <*> genMaybe genNamesNE <*> arbitrary <*> arbitrary
"OP_OS_DIAGNOSE" ->
OpCodes.OpOsDiagnose <$> genFieldsNE <*> genNamesNE
"OP_EXT_STORAGE_DIAGNOSE" ->
OpCodes.OpOsDiagnose <$> genFieldsNE <*> genNamesNE
"OP_BACKUP_PREPARE" ->
OpCodes.OpBackupPrepare <$> genFQDN <*> return Nothing <*> arbitrary
"OP_BACKUP_EXPORT" ->
OpCodes.OpBackupExport
<$> genFQDN -- instance_name
<*> return Nothing -- instance_uuid
<*> genPrintableAsciiString -- compress
<*> arbitrary -- shutdown_timeout
<*> arbitrary -- target_node
<*> return Nothing -- target_node_uuid
<*> arbitrary -- shutdown
<*> arbitrary -- remove_instance
<*> arbitrary -- ignore_remove_failures
<*> arbitrary -- mode
<*> genMaybe (pure []) -- x509_key_name
<*> genMaybe genNameNE -- destination_x509_ca
<*> arbitrary -- zero_free_space
<*> arbitrary -- zeroing_timeout_fixed
<*> arbitrary -- zeroing_timeout_per_mib
<*> arbitrary -- long_sleep
"OP_BACKUP_REMOVE" ->
OpCodes.OpBackupRemove <$> genFQDN <*> return Nothing
"OP_TEST_ALLOCATOR" ->
OpCodes.OpTestAllocator <$> arbitrary <*> arbitrary <*>
genNameNE <*> genMaybe (pure []) <*> genMaybe (pure []) <*>
arbitrary <*> genMaybe genNameNE <*>
(genTags >>= mapM mkNonEmpty) <*>
arbitrary <*> arbitrary <*> genMaybe genNameNE <*>
arbitrary <*> genMaybe genNodeNamesNE <*> arbitrary <*>
genMaybe genNamesNE <*> arbitrary <*> arbitrary <*>
genMaybe genNameNE
"OP_TEST_JQUEUE" ->
OpCodes.OpTestJqueue <$> arbitrary <*> arbitrary <*>
resize 20 (listOf genFQDN) <*> arbitrary
"OP_TEST_OS_PARAMS" ->
OpCodes.OpTestOsParams <$> genMaybe arbitrarySecretJSObj
"OP_TEST_DUMMY" ->
OpCodes.OpTestDummy <$> pure J.JSNull <*> pure J.JSNull <*>
pure J.JSNull <*> pure J.JSNull
"OP_NETWORK_ADD" ->
OpCodes.OpNetworkAdd <$> genNameNE <*> genIPv4Network <*>
genMaybe genIPv4Address <*> pure Nothing <*> pure Nothing <*>
genMaybe genMacPrefix <*> genMaybe (listOf genIPv4Address) <*>
arbitrary <*> (genTags >>= mapM mkNonEmpty)
"OP_NETWORK_REMOVE" ->
OpCodes.OpNetworkRemove <$> genNameNE <*> arbitrary
"OP_NETWORK_SET_PARAMS" ->
OpCodes.OpNetworkSetParams <$> genNameNE <*>
genMaybe genIPv4Address <*> pure Nothing <*> pure Nothing <*>
genMaybe genMacPrefix <*> genMaybe (listOf genIPv4Address) <*>
genMaybe (listOf genIPv4Address)
"OP_NETWORK_CONNECT" ->
OpCodes.OpNetworkConnect <$> genNameNE <*> genNameNE <*>
arbitrary <*> genNameNE <*> genPrintableAsciiString <*> arbitrary
"OP_NETWORK_DISCONNECT" ->
OpCodes.OpNetworkDisconnect <$> genNameNE <*> genNameNE
"OP_RESTRICTED_COMMAND" ->
OpCodes.OpRestrictedCommand <$> arbitrary <*> genNodeNamesNE <*>
return Nothing <*> genNameNE
_ -> fail $ "Undefined arbitrary for opcode " ++ op_id
instance Arbitrary OpCodes.CommonOpParams where
arbitrary = OpCodes.CommonOpParams <$> arbitrary <*> arbitrary <*>
arbitrary <*> resize 5 arbitrary <*> genMaybe genName <*>
genReasonTrail
-- * Helper functions
-- | Empty JSObject.
emptyJSObject :: J.JSObject J.JSValue
emptyJSObject = J.toJSObject []
-- | Empty maybe unchecked dictionary.
emptyMUD :: Gen (Maybe (J.JSObject J.JSValue))
emptyMUD = genMaybe $ pure emptyJSObject
-- | Generates an empty container.
genEmptyContainer :: (Ord a) => Gen (GenericContainer a b)
genEmptyContainer = pure . GenericContainer $ Map.fromList []
-- | Generates list of disk indices.
genDiskIndices :: Gen [DiskIndex]
genDiskIndices = do
cnt <- choose (0, C.maxDisks)
genUniquesList cnt arbitrary
-- | Generates a list of node names.
genNodeNames :: Gen [String]
genNodeNames = resize maxNodes (listOf genFQDN)
-- | Generates a list of node names in non-empty string type.
genNodeNamesNE :: Gen [NonEmptyString]
genNodeNamesNE = genNodeNames >>= mapM mkNonEmpty
-- | Gets a node name in non-empty type.
genNodeNameNE :: Gen NonEmptyString
genNodeNameNE = genFQDN >>= mkNonEmpty
-- | Gets a name (non-fqdn) in non-empty type.
genNameNE :: Gen NonEmptyString
genNameNE = genName >>= mkNonEmpty
-- | Gets a list of names (non-fqdn) in non-empty type.
genNamesNE :: Gen [NonEmptyString]
genNamesNE = resize maxNodes (listOf genNameNE)
-- | Returns a list of non-empty fields.
genFieldsNE :: Gen [NonEmptyString]
genFieldsNE = genFields >>= mapM mkNonEmpty
-- | Generate a 3-byte MAC prefix.
genMacPrefix :: Gen NonEmptyString
genMacPrefix = do
octets <- vectorOf 3 $ choose (0::Int, 255)
mkNonEmpty . intercalate ":" $ map (printf "%02x") octets
-- | JSObject of arbitrary data.
--
-- Since JSValue does not implement Arbitrary, I'll simply generate
-- (String, String) objects.
arbitraryPrivateJSObj :: Gen (J.JSObject (Private J.JSValue))
arbitraryPrivateJSObj =
constructor <$> (fromNonEmpty <$> genNameNE)
<*> (fromNonEmpty <$> genNameNE)
where constructor k v = showPrivateJSObject [(k, v)]
-- | JSObject of arbitrary secret data.
arbitrarySecretJSObj :: Gen (J.JSObject (Secret J.JSValue))
arbitrarySecretJSObj =
constructor <$> (fromNonEmpty <$> genNameNE)
<*> (fromNonEmpty <$> genNameNE)
where constructor k v = showSecretJSObject [(k, v)]
-- | Arbitrary instance for MetaOpCode, defined here due to TH ordering.
$(genArbitrary ''OpCodes.MetaOpCode)
-- | Small helper to check for a failed JSON deserialisation
isJsonError :: J.Result a -> Bool
isJsonError (J.Error _) = True
isJsonError _ = False
-- * Test cases
-- | Check that opcode serialization is idempotent.
prop_serialization :: OpCodes.OpCode -> Property
prop_serialization = testSerialisation
-- | Check that Python and Haskell defined the same opcode list.
case_AllDefined :: HUnit.Assertion
case_AllDefined = do
py_stdout <-
runPython "from ganeti import opcodes\n\
\from ganeti import serializer\n\
\import sys\n\
\print serializer.Dump([opid for opid in opcodes.OP_MAPPING])\n"
""
>>= checkPythonResult
py_ops <- case J.decode py_stdout::J.Result [String] of
J.Ok ops -> return ops
J.Error msg ->
HUnit.assertFailure ("Unable to decode opcode names: " ++ msg)
-- this already raised an expection, but we need it
-- for proper types
>> fail "Unable to decode opcode names"
let hs_ops = sort OpCodes.allOpIDs
extra_py = py_ops \\ hs_ops
extra_hs = hs_ops \\ py_ops
HUnit.assertBool ("Missing OpCodes from the Haskell code:\n" ++
unlines extra_py) (null extra_py)
HUnit.assertBool ("Extra OpCodes in the Haskell code code:\n" ++
unlines extra_hs) (null extra_hs)
-- | Custom HUnit test case that forks a Python process and checks
-- correspondence between Haskell-generated OpCodes and their Python
-- decoded, validated and re-encoded version.
--
-- Note that we have a strange beast here: since launching Python is
-- expensive, we don't do this via a usual QuickProperty, since that's
-- slow (I've tested it, and it's indeed quite slow). Rather, we use a
-- single HUnit assertion, and in it we manually use QuickCheck to
-- generate 500 opcodes times the number of defined opcodes, which
-- then we pass in bulk to Python. The drawbacks to this method are
-- two fold: we cannot control the number of generated opcodes, since
-- HUnit assertions don't get access to the test options, and for the
-- same reason we can't run a repeatable seed. We should probably find
-- a better way to do this, for example by having a
-- separately-launched Python process (if not running the tests would
-- be skipped).
case_py_compat_types :: HUnit.Assertion
case_py_compat_types = do
let num_opcodes = length OpCodes.allOpIDs * 100
opcodes <- genSample (vectorOf num_opcodes
(arbitrary::Gen OpCodes.MetaOpCode))
let with_sum = map (\o -> (OpCodes.opSummary $
OpCodes.metaOpCode o, o)) opcodes
serialized = J.encode opcodes
-- check for non-ASCII fields, usually due to 'arbitrary :: String'
mapM_ (\op -> when (any (not . isAscii) (J.encode op)) .
HUnit.assertFailure $
"OpCode has non-ASCII fields: " ++ show op
) opcodes
py_stdout <-
runPython "from ganeti import opcodes\n\
\from ganeti import serializer\n\
\import sys\n\
\op_data = serializer.Load(sys.stdin.read())\n\
\decoded = [opcodes.OpCode.LoadOpCode(o) for o in op_data]\n\
\for op in decoded:\n\
\ op.Validate(True)\n\
\encoded = [(op.Summary(), op.__getstate__())\n\
\ for op in decoded]\n\
\print serializer.Dump(\
\ encoded,\
\ private_encoder=serializer.EncodeWithPrivateFields)"
serialized
>>= checkPythonResult
let deserialised =
J.decode py_stdout::J.Result [(String, OpCodes.MetaOpCode)]
decoded <- case deserialised of
J.Ok ops -> return ops
J.Error msg ->
HUnit.assertFailure ("Unable to decode opcodes: " ++ msg)
-- this already raised an expection, but we need it
-- for proper types
>> fail "Unable to decode opcodes"
HUnit.assertEqual "Mismatch in number of returned opcodes"
(length decoded) (length with_sum)
mapM_ (uncurry (HUnit.assertEqual "Different result after encoding/decoding")
) $ zip with_sum decoded
-- | Custom HUnit test case that forks a Python process and checks
-- correspondence between Haskell OpCodes fields and their Python
-- equivalent.
case_py_compat_fields :: HUnit.Assertion
case_py_compat_fields = do
let hs_fields = sort $ map (\op_id -> (op_id, OpCodes.allOpFields op_id))
OpCodes.allOpIDs
py_stdout <-
runPython "from ganeti import opcodes\n\
\import sys\n\
\from ganeti import serializer\n\
\fields = [(k, sorted([p[0] for p in v.OP_PARAMS]))\n\
\ for k, v in opcodes.OP_MAPPING.items()]\n\
\print serializer.Dump(fields)" ""
>>= checkPythonResult
let deserialised = J.decode py_stdout::J.Result [(String, [String])]
py_fields <- case deserialised of
J.Ok v -> return $ sort v
J.Error msg ->
HUnit.assertFailure ("Unable to decode op fields: " ++ msg)
-- this already raised an expection, but we need it
-- for proper types
>> fail "Unable to decode op fields"
HUnit.assertEqual "Mismatch in number of returned opcodes"
(length hs_fields) (length py_fields)
HUnit.assertEqual "Mismatch in defined OP_IDs"
(map fst hs_fields) (map fst py_fields)
mapM_ (\((py_id, py_flds), (hs_id, hs_flds)) -> do
HUnit.assertEqual "Mismatch in OP_ID" py_id hs_id
HUnit.assertEqual ("Mismatch in fields for " ++ hs_id)
py_flds hs_flds
) $ zip hs_fields py_fields
-- | Checks that setOpComment works correctly.
prop_setOpComment :: OpCodes.MetaOpCode -> String -> Property
prop_setOpComment op comment =
let (OpCodes.MetaOpCode common _) = OpCodes.setOpComment comment op
in OpCodes.opComment common ==? Just comment
-- | Tests wrong (negative) disk index.
prop_mkDiskIndex_fail :: QuickCheck.Positive Int -> Property
prop_mkDiskIndex_fail (Positive i) =
case mkDiskIndex (negate i) of
Bad msg -> counterexample "error message " $
"Invalid value" `isPrefixOf` msg
Ok v -> failTest $ "Succeeded to build disk index '" ++ show v ++
"' from negative value " ++ show (negate i)
-- | Tests a few invalid 'readRecreateDisks' cases.
case_readRecreateDisks_fail :: Assertion
case_readRecreateDisks_fail = do
assertBool "null" $
isJsonError (J.readJSON J.JSNull::J.Result RecreateDisksInfo)
assertBool "string" $
isJsonError (J.readJSON (J.showJSON "abc")::J.Result RecreateDisksInfo)
-- | Tests a few invalid 'readDdmOldChanges' cases.
case_readDdmOldChanges_fail :: Assertion
case_readDdmOldChanges_fail = do
assertBool "null" $
isJsonError (J.readJSON J.JSNull::J.Result DdmOldChanges)
assertBool "string" $
isJsonError (J.readJSON (J.showJSON "abc")::J.Result DdmOldChanges)
-- | Tests a few invalid 'readExportTarget' cases.
case_readExportTarget_fail :: Assertion
case_readExportTarget_fail = do
assertBool "null" $
isJsonError (J.readJSON J.JSNull::J.Result ExportTarget)
assertBool "int" $
isJsonError (J.readJSON (J.showJSON (5::Int))::J.Result ExportTarget)
testSuite "OpCodes"
[ 'prop_serialization
, 'case_AllDefined
, 'case_py_compat_types
, 'case_py_compat_fields
, 'prop_setOpComment
, 'prop_mkDiskIndex_fail
, 'case_readRecreateDisks_fail
, 'case_readDdmOldChanges_fail
, 'case_readExportTarget_fail
]
|
grnet/snf-ganeti
|
test/hs/Test/Ganeti/OpCodes.hs
|
bsd-2-clause
| 35,183 | 0 | 56 | 10,745 | 5,974 | 3,016 | 2,958 | 597 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
module SmplUnif where
import Control.Applicative
import Control.Monad
import Data.Char (isUpper)
import Data.Foldable (foldlM)
import Data.List hiding (insert, map, null)
import Data.Map.Strict hiding (foldl, foldr, insert, map,
mapMaybe, null)
import qualified Data.Map.Strict as M
import Data.Maybe
import SmplSyntax
import Unbound.LocallyNameless
-- import Debug.Trace
trace = flip const
-- Sig is a quantifier prefix in the form of
-- c1...cn, exists X1....Xn, forall b1...bn
-- where c1...cn are global constants and b1...bn are some bound variables
-- In the mapping of Sig we only store c1...cn and X1...Xn and bound vars
-- are ignored. That is, when it is not found in the sig it is considered
-- as a bound variable. Global constants are given in the beginning and
-- do not change. Flexible logic variables X1...Xn are given in the beginning
-- but can be added during unification calculation as we need to introduce
-- more fresh logic variables
type Sig = Map Nm VarType
data VarType = Cnst | Flex deriving (Eq,Ord,Show,Read)
emptyMap = M.empty
expand :: Fresh m => Map Nm Tm -> Tm -> m Tm
expand s v@(V x) = case M.lookup x s of { Nothing -> pure v; Just u -> expand s u }
expand s (App t1 t2) = App <$> expand s t1 <*> expand s t2
expand s (Lam b) = do { (x,t) <- unbind b; lam x <$> expand s t }
expand' :: Fresh m => Map Nm Tm -> Tm -> m Tm
expand' s v@(V x) = case M.lookup x s of { Nothing -> pure v; Just u -> expand' s u }
expand' s (App t1 t2) = do { t1' <- expand' s t1; t2' <- expand' s t2; redapps t1' [t2'] }
expand' s (Lam b) = do { (x,t) <- unbind b; lam x <$> expand' s t }
infixr .+
(.+) :: Fresh m => (Nm,Tm) -> m (sig, [(Tm,Tm)],Map Nm Tm) -> m (sig, [(Tm,Tm)],Map Nm Tm)
(x,t) .+ mess = do (sig,es,s) <- mess
t' <- expand' s t
let s' = M.insert x t' (subst x t' <$> s)
let es' | M.member x s = (s!x,t'):es
| otherwise = es
return (sig,es', s')
u :: Fresh m => (Sig, [(Tm, Tm)], Map Nm Tm) -> m (Map Nm Tm)
u (_, [], s) = return s
u ess = u =<< ustep' ess
ustep' ess@(_, [], _) = return ess
ustep' (sig, (t1,t2):es, s) = do t1' <- devar s t1
t2' <- devar s t2
ustep (sig, (t1',t2'):es, s)
ustep :: Fresh m => (Sig, [(Tm,Tm)], Map Nm Tm) -> m (Sig, [(Tm,Tm)], Map Nm Tm)
ustep p@(_, [], _) = return p
-- on the fly eta-expansion
ustep (sig, (Lam b1, Lam b2):es, s) = do Just(x,t1,_,t2) <- unbind2 b1 b2
pure (sig, (t1,t2):es, s)
ustep (sig, (t1, Lam b):es, s) = do (x,t) <- unbind b
pure (sig, (App t1 (V x), t):es, s)
ustep (sig, (Lam b, t2):es, s) = do (x,t) <- unbind b
pure (sig, (t, App t2 (V x)):es, s)
-- the real unification work
ustep (sig, (t1, t2):es, s) =
case (tF, tG) of
(V xF, V xG)
-- flexflex
| flex sig xF && flex sig xG -> if -- multy way if
-- flexflex1
| xF == xG -> if len1/=len2 then cantUnify "their arguments differ"
else do h <- fresh (s2n "H")
let sig' = M.insert h Flex sig
(xF, hnf bs1 (V h) xs) .+ pure(sig', es, s)
-- flexflex2
| subset bs1 bs2 -> (xG, hnf bs2 tF ts1) .+ pure(sig, es, s)
| subset bs2 bs1 -> (xF, hnf bs1 tG ts2) .+ pure(sig, es, s)
| otherwise ->
do h <- fresh (s2n "H")
let sig' = M.insert h Flex sig
(xF, hnf bs1 (V h) zs) .+ (xG, hnf bs2 (V h) zs) .+ pure(sig',es,s)
-- flexrigid
| flex sig xF -> trace ("flexrigid "++show((t1,t2):es)) $
do xF't2 <- occ s xF t2
when xF't2 . cantUnify $ show xF++" occurs in "++show t2
(sig',es',s') <- (xF, lamMany bs1 t2) .+ pure(sig, es, s)
proj' bs1 (sig',es',s') t2
-- rigidflex
| flex sig xG -> trace ("rigidflex "++show((t1,t2):es)) $
do xG't1 <- occ s xG t1
when xG't1 . cantUnify $ show xG++" occurs in "++show t1
(sig',es',s') <- (xG, lamMany bs2 t1) .+ pure(sig,es, s)
proj' bs2 (sig',es',s') t1
-- rigidrigid
| xF==xG && len1==len2 -> pure (sig, zip ts1 ts2 ++ es, s)
| xF/=xG -> cantUnify $ show xF++" /= "++show xG
| otherwise -> cantUnify "their arguments differ"
where
tF : ts1 = unfoldApp t1; bs1 = unB<$>ts1; len1 = length ts1
tG : ts2 = unfoldApp t2; bs2 = unB<$>ts2; len2 = length ts2
xs = [x1 | (x1,x2)<-zip ts1 ts2, x1==x2]
zs = [x1 | x1 <- ts1, x2 <- ts2, x1==x2]
cantUnify whymsg = fail $ "cannot unify " ++ show (t1,t2)
++ " because " ++ whymsg
unB (V x) = x -- x must not be in sig but not really checking here yet
occ s x t = occurs x <$> expand s t
subset xs ys = all (`elem` ys) xs
flex sig x = Just Flex == M.lookup x sig
cnst sig x = Just Cnst == M.lookup x sig
proj :: Fresh m => [Nm] -> (Sig,[(Tm,Tm)],Map Nm Tm) ->
Tm -> m (Sig,[(Tm,Tm)],Map Nm Tm)
proj vs ess@(sig,es,s) t =
trace ("\nproj ("++show vs++") ("++show ess++") ("++show t++")\n***\n") $
case unfoldApp t of
[Lam b] -> do { (x,tb) <- unbind b; proj' (x:vs) ess tb }
V x : ts
| cnst sig x -> foldlM (proj' vs) ess ts -- global const
| flex sig x -> let ys = unB <$> ts -- logic var
zs = [V y | y<-ys, y `elem` vs]
in if subset ys vs then pure ess
else do h <- fresh (s2n "H")
let sig' = M.insert h Flex sig
(x, hnf ys (V h) zs) .+ pure(sig',es,s)
| x `elem` vs -> foldlM (proj' vs) ess ts -- bound var
| otherwise -> fail $ "unbound rigid variable "++ show x
_ -> error $ "non-reachable pattern: t = "++show t
++" ; unfoldApp t = "++show(unfoldApp t)
-- helper function wrapping proj with devar
proj' vs ess@(sig,es,s) t = proj vs ess =<< devar s t
hnf vs h zs = lamMany vs $ appMany h zs
appMany t ts = foldl1 App (t:ts)
lamMany = foldr ((.) . lam) id
devar :: Fresh m => Map Nm Tm -> Tm -> m Tm
devar s t = case t1 of
V x -> case M.lookup x s of
Just t' -> devar s =<< redapps t' ts
Nothing -> pure t
_ -> pure t
where t1 : ts = unfoldApp t
redapps (Lam b) (t:ts) = do (x,tb) <- unbind b
redapps (subst x t tb) ts
redapps t ts = return $ appMany t ts
unfoldApp = reverse . unstackApp
unstackApp (App t1 t2) = t2 : unstackApp t1
unstackApp t = [t]
|
kyagrd/hs-nipkow-lics93
|
src/SmplUnif.hs
|
bsd-2-clause
| 7,230 | 0 | 19 | 2,593 | 3,111 | 1,608 | 1,503 | 125 | 5 |
module Main where
import ParserTest
import Test.DocTest
import Data.List
import System.Directory
import System.FilePath
import Control.Monad
main :: IO ()
main = getHaskellSourceFiles >>= doctest
getHaskellSourceFiles :: IO [FilePath]
getHaskellSourceFiles = filter (isSuffixOf ".hs") <$> go "src"
where go dir =
do (dirs, files) <- getFilesAndDirectories dir
(files ++) . concat <$> mapM go dirs
getFilesAndDirectories :: FilePath -> IO ([FilePath], [FilePath])
getFilesAndDirectories dir = do
c <- map (dir </>) . filter (`notElem` ["..", "."]) <$> getDirectoryContents dir
liftM2 (,) (filterM doesDirectoryExist c) (filterM doesFileExist c)
|
AlphaMarc/WYAH
|
test/Spec.hs
|
bsd-3-clause
| 811 | 0 | 12 | 253 | 230 | 123 | 107 | 18 | 1 |
module Yhc.Core.Firstify.Super(super) where
import Yhc.Core hiding (uniqueBoundVarsCore, uniqueBoundVars)
import Yhc.Core.FreeVar3
import Yhc.Core.UniqueId
import Yhc.Core.Util
import Yhc.Core.Firstify.Mitchell.Template
import Yhc.Core.Firstify.Mitchell.Terminate
import qualified Yhc.Core.Firstify.Mitchell.BiMap as BiMap
import Control.Exception
import Control.Monad
import Control.Monad.State
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.List
import Data.Maybe
import Debug.Trace
import Safe
type M a = State S a
data S = S {done :: Set.Set CoreFuncName -- those functions which have been done
,pending :: Set.Set CoreFuncName -- those which are being done
,core :: CoreFuncMap -- the entire program
,special :: BiMap.BiMap CoreFuncName CoreExpr -- which special variants do we have
,terminate :: () -- termination check
,varId :: Int -- what is the next variable id to use
,funcId :: Int -- what is the next function id to use
}
instance UniqueId S where
getId = varId
putId x s = s{varId = x}
super :: Core -> Core
super c = coreReachable ["main"] $ fromCoreFuncMap c $ core $
flip execState undefined $ do
c <- return $ ensureInvariants [NoRecursiveLet,NoCorePos] c
let s0 = S Set.empty Set.empty undefined BiMap.empty () 0 (uniqueFuncsNext c)
put (s0 :: S)
c <- uniqueBoundVarsCore c
modify $ \s -> s{core = toCoreFuncMap c}
foFunc "main"
foFunc :: CoreFuncName -> M Int
foFunc x = do
s <- get
func <- return $ coreFuncMap (core s) x
when (isCoreFunc func && x `Set.notMember` done s && x `Set.notMember` pending s) $ do
modify $ \s -> s{pending = Set.insert x (pending s)}
(args,body) <- liftM fromCoreLam $ foBody (coreFuncBody func)
modify $ \s -> s{core = Map.insert x (CoreFunc x (coreFuncArgs func ++ args) body) (core s)
,pending = Set.delete x (pending s)
,done = Set.insert x (done s)
}
return $ coreFuncArity $ coreFuncMap (core s) x
foBody = transformM fo . funInsideApp
-- invariant: all CoreFun's must be inside a CoreApp
funInsideApp = transform f
where
f (CoreFun x) = CoreApp (CoreFun x) []
f (CoreApp (CoreApp x y) z) = CoreApp x (y++z)
f x = x
fo :: CoreExpr -> M CoreExpr
fo (CoreApp (CoreLam vs x) xs) = do
let ap x f n = if null n then return x else fo $ f n x
x <- ap x CoreLet (zip vs1 xs1)
x <- ap x CoreLam vs2
x <- ap x (flip CoreApp) xs2
return x
where
n = min (length vs) (length xs)
(vs1,vs2) = splitAt n vs
(xs1,xs2) = splitAt n xs
fo (CoreApp (CoreFun x) xs) = do
arity <- foFunc x
vs <- getVars $ max 0 (arity - length xs)
xs <- return $ xs ++ map CoreVar vs
o <- return $ CoreApp (CoreFun x) xs
s <- get
let t = templateCreate (isCorePrim . coreFuncMap (core s)) (const False) o
res <- if t == templateNone then return o else do
let tfull = templateExpand (`BiMap.lookup` special s) t
holes = templateHoles o t
case BiMap.lookupRev t (special s) of
-- OPTION 1: Not previously done, and a homeomorphic embedding
--Nothing | not $ askSpec within tfull (terminate s) -> return x
-- OPTION 2: Previously done
Just name ->
return $ coreApp (CoreFun name) holes
-- OPTION 3: New todo
done -> do
let name = uniqueJoin (templateName t) (funcId s)
fun <- templateGenerate (coreFuncMap (core s)) name t
modify $ \s -> s
{ {-terminate = addSpec name tfull $
cloneSpec within name $ terminate s
, -} funcId = funcId s + 1
,special = BiMap.insert name t (special s)
,core = Map.insert name fun (core s)
}
fo $ coreApp (CoreFun name) holes
return $ coreLam vs res
fo (CoreLet bind x) = if any (not . isCoreVar . snd) rep
then transformM fo x2 else return x2
where
x2 = coreLet keep $ replaceFreeVars rep x
(rep,keep) = partition (\(v,x) -> isCoreVar x || isHo x) bind
fo x = return x
isHo = any isCoreLam . universe
{-
-- In each step first inline all top-level function bindings
-- and let's that appear to be bound to an unsaturated
--
-- Then specialise each value
step :: CoreFuncMap -> SS CoreFuncMap
step = f acts
where
(*) = (,)
acts = ["lambdas" * lambdas, "simplify" * simplify, "inline" * inline, "specialise" * specialise]
f [] x = return x
f ((name,act):ys) x = do
x2 <- trace name $ act x
if x == x2 then f ys x else f acts x2
-- make sure every function is given enough arguments, by introducing lambdas
lambdas :: CoreFuncMap -> SS CoreFuncMap
lambdas c | checkFreeVarCoreMap c = do
s <- get
let funcs = c `Map.union` suspend s
alive = coreReachableMap ["main"] funcs
put $ s{suspend = Map.filterWithKey (\key _ -> key `Map.notMember` alive) funcs}
applyBodyCoreMapM (f alive) alive
where
f alive o@(CoreApp (CoreFun x) xs) = do
xs <- mapM (f alive) xs
let arity = coreFuncArity $ alive Map.! x
extra = arity - length xs
if extra <= 0 then return $ coreApp (CoreFun x) xs else do
vs <- getVars arity
return $ coreApp (coreLam vs (coreApp (CoreFun x) (map CoreVar vs))) xs
f alive (CoreFun x) = f alive $ CoreApp (CoreFun x) []
f alive x = descendM (f alive) x
-- perform basic simplification to remove lambda's
-- basic idea is to lift lambda's outwards to the top
simplify :: CoreFuncMap -> SS CoreFuncMap
simplify c = return . applyFuncCoreMap g =<< transformExprM f c
where
g (CoreFunc name args (CoreLam vars body)) = CoreFunc name (args++vars) body
g x = x
f (CoreApp (CoreLam vs x) ys) = do
x2 <- transformExprM f x2
return $ coreApp (coreLam vs2 x2) ys2
where
i = min (length vs) (length ys)
(vs1,vs2) = splitAt i vs
(ys1,ys2) = splitAt i ys
(rep,bind) = partition (\(a,b) -> isCoreVar b || countFreeVar a x <= 1) (zip vs1 ys1)
x2 = coreLet bind $ replaceFreeVars rep x
f (CoreCase on alts) | not $ null ar = do
vs <- getVars $ maximum ar
transformExprM f $ CoreLam vs $ CoreCase on
[(a, CoreApp b (map CoreVar vs)) | (a,b) <- alts]
where
ar = [length vs | (_, CoreLam vs x) <- alts]
f (CoreLet bind x) | not $ null bad = do
x <- transformM g x
x <- transformM f x
return $ coreLet good x
where
(bad,good) = partition (any isCoreLam . universe . snd) bind
g (CoreVar x) = case lookup x bad of
Nothing -> return $ CoreVar x
Just y -> duplicateExpr y
g x = return x
f (CoreCase on@(CoreApp (CoreCon x) xs) alts) | any isCoreLam $ universe on =
transformM f $ head $ concatMap g alts
where
g (PatDefault, y) = [y]
g (PatCon c vs, y) = [coreLet (zip vs xs) y | c == x]
g _ = []
f (CoreCase (CoreCase on alts1) alts2) | any isCoreLam $ concatMap (universe . snd) alts1 =
transformM f =<< liftM (CoreCase on) (mapM g alts1)
where
g (lhs,rhs) = do
CoreCase _ alts22 <- duplicateExpr $ CoreCase (CoreLit $ CoreInt 0) alts2
return (lhs, CoreCase rhs alts22)
f (CoreLam vs1 (CoreLam vs2 x)) = return $ CoreLam (vs1++vs2) x
f (CoreLet bind (CoreLam vs x)) = return $ CoreLam vs (CoreLet bind x)
f (CoreApp (CoreApp x y) z) = return $ CoreApp x (y++z)
f x = return x
-- BEFORE: box = [even]
-- AFTER: all uses of box are inlined
inline :: CoreFuncMap -> SS CoreFuncMap
inline c = do
s <- get
let todo = Map.fromList [(name,coreLam args body) | CoreFunc name args body <- Map.elems c
,shouldInline body]
if Map.null todo
then return c
else applyFuncBodyCoreMapM (\name -> transformM (f (terminate s) todo name)) c
where
-- note: deliberately use term from BEFORE this state
-- so you keep inlining many times per call
f term mp name (CoreFun x)
| x `Map.member` mp && askInline name x term
= do modify $ \s -> s{terminate = addInline name x (terminate s)}
y <- duplicateExpr $ mp Map.! x
-- try and inline in the context of the person you are grabbing from
transformM (f term (Map.delete x mp) x) y
f term mp name x = return x
-- should inline if there is a lambda before you get to a function
shouldInline = any isCoreLam . universe . transform g
g (CoreApp (CoreFun x) _) = CoreFun x
g x = x
-- BEFORE: map even x
-- AFTER: map_even x
specialise :: CoreFuncMap -> SS CoreFuncMap
specialise c = do
s <- get
(c,(new,s)) <- return $ flip runState (Map.empty,s) $
applyFuncBodyCoreMapM (\name -> transformM (f name)) c
put s
return $ c `Map.union` new
where
isPrim x = maybe False isCorePrim $ Map.lookup x c
f within x | t /= templateNone = do
(new,s) <- get
let tfull = templateExpand (`BiMap.lookup` special s) t
holes = templateHoles x t
case BiMap.lookupRev t (special s) of
-- OPTION 1: Not previously done, and a homeomorphic embedding
Nothing | not $ askSpec within tfull (terminate s) -> return x
-- OPTION 2: Previously done
Just name ->
return $ coreApp (CoreFun name) holes
-- OPTION 3: New todo
done -> do
let name = uniqueJoin (templateName t) (funcId s)
findCoreFunc name = Map.findWithDefault (new Map.! name) name c
fun <- templateGenerate findCoreFunc name t
modify $ \(new,s) -> (Map.insert name fun new,
s{terminate = addSpec name tfull $
cloneSpec within name $ terminate s
,funcId = funcId s + 1
,special = BiMap.insert name t (special s)
})
return $ coreApp (CoreFun name) holes
where t = templateCreate isPrim x
f name x = return x
-}
|
ndmitchell/firstify
|
Yhc/Core/Firstify/Super.hs
|
bsd-3-clause
| 11,180 | 0 | 22 | 4,073 | 1,501 | 771 | 730 | 91 | 5 |
{-# LANGUAGE DeriveDataTypeable, OverloadedStrings #-}
-- | This module re-exports the @Github.Data.Definitions@ module, adding
-- instances of @FromJSON@ to it. If you wish to use the data without the
-- instances, use the @Github.Data.Definitions@ module instead.
module Github.Data (module Github.Data.Definitions) where
import Data.Time
import Control.Applicative
import Control.Monad
import qualified Data.Text as T
import Data.Aeson.Types
import System.Locale (defaultTimeLocale)
import qualified Data.Vector as V
import qualified Data.HashMap.Lazy as Map
import Data.Hashable (Hashable)
import Github.Data.Definitions
instance FromJSON GithubDate where
parseJSON (String t) =
case parseTime defaultTimeLocale "%FT%T%Z" (T.unpack t) of
Just d -> pure $ GithubDate d
_ -> fail "could not parse Github datetime"
parseJSON _ = fail "Given something besides a String"
instance FromJSON Commit where
parseJSON (Object o) =
Commit <$> o .: "sha"
<*> o .: "parents"
<*> o .: "url"
<*> o .: "commit"
<*> o .:? "committer"
<*> o .:? "author"
<*> o .:< "files"
<*> o .:? "stats"
parseJSON _ = fail "Could not build a Commit"
instance FromJSON Tree where
parseJSON (Object o) =
Tree <$> o .: "sha"
<*> o .: "url"
<*> o .:< "tree"
parseJSON _ = fail "Could not build a Tree"
instance FromJSON GitTree where
parseJSON (Object o) =
GitTree <$> o .: "type"
<*> o .: "sha"
<*> o .:? "url"
<*> o .:? "size"
<*> o .: "path"
<*> o .: "mode"
parseJSON _ = fail "Could not build a GitTree"
instance FromJSON GitCommit where
parseJSON (Object o) =
GitCommit <$> o .: "message"
<*> o .: "url"
<*> o .: "committer"
<*> o .: "author"
<*> o .: "tree"
<*> o .:? "sha"
<*> o .:< "parents"
parseJSON _ = fail "Could not build a GitCommit"
instance FromJSON GithubOwner where
parseJSON (Object o)
| o `at` "gravatar_id" == Nothing =
GithubOrganization <$> o .: "avatar_url"
<*> o .: "login"
<*> o .: "url"
<*> o .: "id"
| otherwise =
GithubUser <$> o .: "avatar_url"
<*> o .: "login"
<*> o .: "url"
<*> o .: "id"
<*> o .: "gravatar_id"
parseJSON v = fail $ "Could not build a GithubOwner out of " ++ (show v)
instance FromJSON GitUser where
parseJSON (Object o) =
GitUser <$> o .: "name"
<*> o .: "email"
<*> o .: "date"
parseJSON _ = fail "Could not build a GitUser"
instance FromJSON File where
parseJSON (Object o) =
File <$> o .: "blob_url"
<*> o .: "status"
<*> o .: "raw_url"
<*> o .: "additions"
<*> o .: "sha"
<*> o .: "changes"
<*> o .: "patch"
<*> o .: "filename"
<*> o .: "deletions"
parseJSON _ = fail "Could not build a File"
instance FromJSON Stats where
parseJSON (Object o) =
Stats <$> o .: "additions"
<*> o .: "total"
<*> o .: "deletions"
parseJSON _ = fail "Could not build a Stats"
instance FromJSON Comment where
parseJSON (Object o) =
Comment <$> o .:? "position"
<*> o .:? "line"
<*> o .: "body"
<*> o .:? "commit_id"
<*> o .: "updated_at"
<*> o .:? "html_url"
<*> o .: "url"
<*> o .: "created_at"
<*> o .:? "path"
<*> o .: "user"
<*> o .: "id"
parseJSON _ = fail "Could not build a Comment"
instance ToJSON NewComment where
toJSON (NewComment b) = object [ "body" .= b ]
instance ToJSON EditComment where
toJSON (EditComment b) = object [ "body" .= b ]
instance FromJSON Diff where
parseJSON (Object o) =
Diff <$> o .: "status"
<*> o .: "behind_by"
<*> o .: "patch_url"
<*> o .: "url"
<*> o .: "base_commit"
<*> o .:< "commits"
<*> o .: "total_commits"
<*> o .: "html_url"
<*> o .:< "files"
<*> o .: "ahead_by"
<*> o .: "diff_url"
<*> o .: "permalink_url"
parseJSON _ = fail "Could not build a Diff"
instance FromJSON Gist where
parseJSON (Object o) =
Gist <$> o .: "user"
<*> o .: "git_push_url"
<*> o .: "url"
<*> o .:? "description"
<*> o .: "created_at"
<*> o .: "public"
<*> o .: "comments"
<*> o .: "updated_at"
<*> o .: "html_url"
<*> o .: "id"
<*> o `values` "files"
<*> o .: "git_push_url"
parseJSON _ = fail "Could not build a Gist"
instance FromJSON GistFile where
parseJSON (Object o) =
GistFile <$> o .: "type"
<*> o .: "raw_url"
<*> o .: "size"
<*> o .:? "language"
<*> o .: "filename"
<*> o .:? "content"
parseJSON _ = fail "Could not build a GistFile"
instance FromJSON GistComment where
parseJSON (Object o) =
GistComment <$> o .: "user"
<*> o .: "url"
<*> o .: "created_at"
<*> o .: "body"
<*> o .: "updated_at"
<*> o .: "id"
parseJSON _ = fail "Could not build a GistComment"
instance FromJSON Blob where
parseJSON (Object o) =
Blob <$> o .: "url"
<*> o .: "encoding"
<*> o .: "content"
<*> o .: "sha"
<*> o .: "size"
parseJSON _ = fail "Could not build a Blob"
instance ToJSON NewGitReference where
toJSON (NewGitReference r s) = object [ "ref" .= r, "sha" .= s ]
instance FromJSON GitReference where
parseJSON (Object o) =
GitReference <$> o .: "object"
<*> o .: "url"
<*> o .: "ref"
parseJSON _ = fail "Could not build a GitReference"
instance FromJSON GitObject where
parseJSON (Object o) =
GitObject <$> o .: "type"
<*> o .: "sha"
<*> o .: "url"
parseJSON _ = fail "Could not build a GitObject"
instance FromJSON Issue where
parseJSON (Object o) =
Issue <$> o .:? "closed_at"
<*> o .: "updated_at"
<*> o .: "events_url"
<*> o .: "html_url"
<*> o .:? "closed_by"
<*> o .: "labels"
<*> o .: "number"
<*> o .:? "assignee"
<*> o .: "user"
<*> o .: "title"
<*> o .:? "pull_request"
<*> o .: "url"
<*> o .: "created_at"
<*> o .: "body"
<*> o .: "state"
<*> o .: "id"
<*> o .: "comments"
<*> o .:? "milestone"
parseJSON _ = fail "Could not build an Issue"
instance ToJSON NewIssue where
toJSON (NewIssue t b a m ls) =
object
[ "title" .= t
, "body" .= b
, "assignee" .= a
, "milestone" .= m
, "labels" .= ls ]
instance ToJSON EditIssue where
toJSON (EditIssue t b a s m ls) =
object $ filter notNull $ [ "title" .= t
, "body" .= b
, "assignee" .= a
, "state" .= s
, "milestone" .= m
, "labels" .= ls ]
where notNull (_, Null) = False
notNull (_, _) = True
instance FromJSON Milestone where
parseJSON (Object o) =
Milestone <$> o .: "creator"
<*> o .: "due_on"
<*> o .: "open_issues"
<*> o .: "number"
<*> o .: "closed_issues"
<*> o .: "description"
<*> o .: "title"
<*> o .: "url"
<*> o .: "created_at"
<*> o .: "state"
parseJSON _ = fail "Could not build a Milestone"
instance FromJSON IssueLabel where
parseJSON (Object o) =
IssueLabel <$> o .: "color"
<*> o .: "url"
<*> o .: "name"
parseJSON _ = fail "Could not build a Milestone"
instance FromJSON PullRequestReference where
parseJSON (Object o) =
PullRequestReference <$> o .:? "html_url"
<*> o .:? "patch_url"
<*> o .:? "diff_url"
parseJSON _ = fail "Could not build a PullRequest"
instance FromJSON IssueComment where
parseJSON (Object o) =
IssueComment <$> o .: "updated_at"
<*> o .: "user"
<*> o .: "url"
<*> o .: "created_at"
<*> o .: "body"
<*> o .: "id"
parseJSON _ = fail "Could not build an IssueComment"
instance FromJSON Event where
parseJSON (Object o) =
Event <$> o .: "actor"
<*> o .: "event"
<*> o .:? "commit_id"
<*> o .: "url"
<*> o .: "created_at"
<*> o .: "id"
<*> o .:? "issue"
parseJSON _ = fail "Could not build an Event"
instance FromJSON EventType where
parseJSON (String "closed") = pure Closed
parseJSON (String "reopened") = pure Reopened
parseJSON (String "subscribed") = pure Subscribed
parseJSON (String "merged") = pure Merged
parseJSON (String "referenced") = pure Referenced
parseJSON (String "mentioned") = pure Mentioned
parseJSON (String "assigned") = pure Assigned
parseJSON (String "unsubscribed") = pure Unsubscribed
parseJSON (String "unassigned") = pure ActorUnassigned
parseJSON (String "labeled") = pure Labeled
parseJSON (String "unlabeled") = pure Unlabeled
parseJSON (String "milestoned") = pure Milestoned
parseJSON (String "demilestoned") = pure Demilestoned
parseJSON (String "renamed") = pure Renamed
parseJSON (String "locked") = pure Locked
parseJSON (String "unlocked") = pure Unlocked
parseJSON (String "head_ref_deleted") = pure HeadRefDeleted
parseJSON (String "head_ref_restored") = pure HeadRefRestored
parseJSON _ = fail "Could not build an EventType"
instance FromJSON SimpleOrganization where
parseJSON (Object o) =
SimpleOrganization <$> o .: "url"
<*> o .: "avatar_url"
<*> o .: "id"
<*> o .: "login"
parseJSON _ = fail "Could not build a SimpleOrganization"
instance FromJSON Organization where
parseJSON (Object o) =
Organization <$> o .: "type"
<*> o .:? "blog"
<*> o .:? "location"
<*> o .: "login"
<*> o .: "followers"
<*> o .:? "company"
<*> o .: "avatar_url"
<*> o .: "public_gists"
<*> o .: "html_url"
<*> o .:? "email"
<*> o .: "following"
<*> o .: "public_repos"
<*> o .: "url"
<*> o .: "created_at"
<*> o .:? "name"
<*> o .: "id"
parseJSON _ = fail "Could not build an Organization"
instance FromJSON PullRequest where
parseJSON (Object o) =
PullRequest
<$> o .:? "closed_at"
<*> o .: "created_at"
<*> o .: "user"
<*> o .: "patch_url"
<*> o .: "state"
<*> o .: "number"
<*> o .: "html_url"
<*> o .: "updated_at"
<*> o .: "body"
<*> o .: "issue_url"
<*> o .: "diff_url"
<*> o .: "url"
<*> o .: "_links"
<*> o .:? "merged_at"
<*> o .: "title"
<*> o .: "id"
parseJSON _ = fail "Could not build a PullRequest"
instance ToJSON EditPullRequestState where
toJSON (EditPullRequestStateOpen) = String "open"
toJSON (EditPullRequestStateClosed) = String "closed"
instance ToJSON EditPullRequest where
toJSON (EditPullRequest t b s) =
object $ filter notNull [ "title" .= t, "body" .= b, "state" .= s ]
where notNull (_, Null) = False
notNull (_, _) = True
instance FromJSON DetailedPullRequest where
parseJSON (Object o) =
DetailedPullRequest
<$> o .:? "closed_at"
<*> o .: "created_at"
<*> o .: "user"
<*> o .: "patch_url"
<*> o .: "state"
<*> o .: "number"
<*> o .: "html_url"
<*> o .: "updated_at"
<*> o .: "body"
<*> o .: "issue_url"
<*> o .: "diff_url"
<*> o .: "url"
<*> o .: "_links"
<*> o .:? "merged_at"
<*> o .: "title"
<*> o .: "id"
<*> o .:? "merged_by"
<*> o .: "changed_files"
<*> o .: "head"
<*> o .: "comments"
<*> o .: "deletions"
<*> o .: "additions"
<*> o .: "review_comments"
<*> o .: "base"
<*> o .: "commits"
<*> o .: "merged"
<*> o .:? "mergeable"
parseJSON _ = fail "Could not build a DetailedPullRequest"
instance FromJSON PullRequestLinks where
parseJSON (Object o) =
PullRequestLinks <$> o <.:> ["review_comments", "href"]
<*> o <.:> ["comments", "href"]
<*> o <.:> ["html", "href"]
<*> o <.:> ["self", "href"]
parseJSON _ = fail "Could not build a PullRequestLinks"
instance FromJSON PullRequestCommit where
parseJSON (Object o) =
PullRequestCommit <$> o .: "label"
<*> o .: "ref"
<*> o .: "sha"
<*> o .: "user"
<*> o .: "repo"
parseJSON _ = fail "Could not build a PullRequestCommit"
instance FromJSON PullRequestEvent where
parseJSON (Object o) =
PullRequestEvent <$> o .: "action"
<*> o .: "number"
<*> o .: "pull_request"
<*> o .: "repository"
<*> o .: "sender"
parseJSON _ = fail "Could not build a PullRequestEvent"
instance FromJSON PullRequestEventType where
parseJSON (String "opened") = pure PullRequestOpened
parseJSON (String "closed") = pure PullRequestClosed
parseJSON (String "synchronize") = pure PullRequestSynchronized
parseJSON (String "reopened") = pure PullRequestReopened
parseJSON (String "assigned") = pure PullRequestAssigned
parseJSON (String "unassigned") = pure PullRequestUnassigned
parseJSON (String "labeled") = pure PullRequestLabeled
parseJSON (String "unlabeled") = pure PullRequestUnlabeled
parseJSON _ = fail "Could not build a PullRequestEventType"
instance FromJSON RepoWebhookEvent where
parseJSON (String "*") = pure WebhookWildcardEvent
parseJSON (String "commit_comment") = pure WebhookCommitCommentEvent
parseJSON (String "create") = pure WebhookCreateEvent
parseJSON (String "delete") = pure WebhookDeleteEvent
parseJSON (String "deployment") = pure WebhookDeploymentEvent
parseJSON (String "deployment_status") = pure WebhookDeploymentStatusEvent
parseJSON (String "fork") = pure WebhookForkEvent
parseJSON (String "gollum") = pure WebhookGollumEvent
parseJSON (String "issue_comment") = pure WebhookIssueCommentEvent
parseJSON (String "issues") = pure WebhookIssuesEvent
parseJSON (String "member") = pure WebhookMemberEvent
parseJSON (String "page_build") = pure WebhookPageBuildEvent
parseJSON (String "public") = pure WebhookPublicEvent
parseJSON (String "pull_request_review_comment") = pure WebhookPullRequestReviewCommentEvent
parseJSON (String "pull_request") = pure WebhookPullRequestEvent
parseJSON (String "push") = pure WebhookPushEvent
parseJSON (String "release") = pure WebhookReleaseEvent
parseJSON (String "status") = pure WebhookStatusEvent
parseJSON (String "team_add") = pure WebhookTeamAddEvent
parseJSON (String "watch") = pure WebhookWatchEvent
parseJSON _ = fail "Could not build a Webhook event"
instance ToJSON RepoWebhookEvent where
toJSON (WebhookWildcardEvent) = String "*"
toJSON (WebhookCommitCommentEvent) = String "commit_comment"
toJSON (WebhookCreateEvent) = String "create"
toJSON (WebhookDeleteEvent) = String "delete"
toJSON (WebhookDeploymentEvent) = String "deployment"
toJSON (WebhookDeploymentStatusEvent) = String "deployment_status"
toJSON (WebhookForkEvent) = String "fork"
toJSON (WebhookGollumEvent) = String "gollum"
toJSON (WebhookIssueCommentEvent) = String "issue_comment"
toJSON (WebhookIssuesEvent) = String "issues"
toJSON (WebhookMemberEvent) = String "member"
toJSON (WebhookPageBuildEvent) = String "page_build"
toJSON (WebhookPublicEvent) = String "public"
toJSON (WebhookPullRequestReviewCommentEvent) = String "pull_request_review_comment"
toJSON (WebhookPullRequestEvent) = String "pull_request"
toJSON (WebhookPushEvent) = String "push"
toJSON (WebhookReleaseEvent) = String "release"
toJSON (WebhookStatusEvent) = String "status"
toJSON (WebhookTeamAddEvent) = String "team_add"
toJSON (WebhookWatchEvent) = String "watch"
instance FromJSON PingEvent where
parseJSON (Object o) =
PingEvent <$> o .: "zen"
<*> o .: "hook"
<*> o .: "hook_id"
parseJSON _ = fail "Could not build a PingEvent"
instance FromJSON SearchReposResult where
parseJSON (Object o) =
SearchReposResult <$> o .: "total_count"
<*> o .:< "items"
parseJSON _ = fail "Could not build a SearchReposResult"
instance FromJSON Repo where
parseJSON (Object o) =
Repo <$> o .:? "ssh_url"
<*> o .: "description"
<*> o .:? "created_at"
<*> o .: "html_url"
<*> o .:? "svn_url"
<*> o .:? "forks"
<*> o .:? "homepage"
<*> o .: "fork"
<*> o .:? "git_url"
<*> o .: "private"
<*> o .:? "clone_url"
<*> o .:? "size"
<*> o .:? "updated_at"
<*> o .:? "watchers"
<*> o .: "owner"
<*> o .: "name"
<*> o .:? "language"
<*> o .:? "master_branch"
<*> o .:? "pushed_at"
<*> o .: "id"
<*> o .: "url"
<*> o .:? "open_issues"
<*> o .:? "has_wiki"
<*> o .:? "has_issues"
<*> o .:? "has_downloads"
<*> o .:? "parent"
<*> o .:? "source"
<*> o .: "hooks_url"
parseJSON _ = fail "Could not build a Repo"
instance FromJSON SearchCodeResult where
parseJSON (Object o) =
SearchCodeResult <$> o .: "total_count"
<*> o .:< "items"
parseJSON _ = fail "Could not build a SearchCodeResult"
instance FromJSON Code where
parseJSON (Object o ) =
Code <$> o .: "name"
<*> o .: "path"
<*> o .: "sha"
<*> o .: "url"
<*> o .: "git_url"
<*> o .: "html_url"
<*> o .: "repository"
parseJSON _ = fail "Could not build a Code"
instance FromJSON RepoRef where
parseJSON (Object o) =
RepoRef <$> o .: "owner"
<*> o .: "name"
parseJSON _ = fail "Could not build a RepoRef"
instance FromJSON Contributor where
parseJSON (Object o)
| o `at` "type" == (Just "Anonymous") =
AnonymousContributor <$> o .: "contributions"
<*> o .: "name"
| otherwise =
KnownContributor <$> o .: "contributions"
<*> o .: "avatar_url"
<*> o .: "login"
<*> o .: "url"
<*> o .: "id"
<*> o .: "gravatar_id"
parseJSON _ = fail "Could not build a Contributor"
instance FromJSON Languages where
parseJSON (Object o) =
Languages <$>
mapM (\name -> Language (T.unpack name) <$> o .: name)
(Map.keys o)
parseJSON _ = fail "Could not build Languages"
instance FromJSON Tag where
parseJSON (Object o) =
Tag <$> o .: "name"
<*> o .: "zipball_url"
<*> o .: "tarball_url"
<*> o .: "commit"
parseJSON _ = fail "Could not build a Tag"
instance FromJSON Branch where
parseJSON (Object o) = Branch <$> o .: "name" <*> o .: "commit"
parseJSON _ = fail "Could not build a Branch"
instance FromJSON BranchCommit where
parseJSON (Object o) = BranchCommit <$> o .: "sha" <*> o .: "url"
parseJSON _ = fail "Could not build a BranchCommit"
instance FromJSON DetailedOwner where
parseJSON (Object o)
| o `at` "gravatar_id" == Nothing =
DetailedOrganization <$> o .: "created_at"
<*> o .: "type"
<*> o .: "public_gists"
<*> o .: "avatar_url"
<*> o .: "followers"
<*> o .: "following"
<*> o .:? "blog"
<*> o .:? "bio"
<*> o .: "public_repos"
<*> o .:? "name"
<*> o .:? "location"
<*> o .:? "company"
<*> o .: "url"
<*> o .: "id"
<*> o .: "html_url"
<*> o .: "login"
| otherwise =
DetailedUser <$> o .: "created_at"
<*> o .: "type"
<*> o .: "public_gists"
<*> o .: "avatar_url"
<*> o .: "followers"
<*> o .: "following"
<*> o .:? "hireable"
<*> o .: "gravatar_id"
<*> o .:? "blog"
<*> o .:? "bio"
<*> o .: "public_repos"
<*> o .:? "name"
<*> o .:? "location"
<*> o .:? "company"
<*> o .:? "email"
<*> o .: "url"
<*> o .: "id"
<*> o .: "html_url"
<*> o .: "login"
parseJSON _ = fail "Could not build a DetailedOwner"
instance FromJSON RepoWebhook where
parseJSON (Object o) =
RepoWebhook <$> o .: "url"
<*> o .: "test_url"
<*> o .: "id"
<*> o .: "name"
<*> o .: "active"
<*> o .: "events"
<*> o .: "config"
<*> o .: "last_response"
<*> o .: "updated_at"
<*> o .: "created_at"
parseJSON _ = fail "Could not build a RepoWebhook"
instance FromJSON RepoWebhookResponse where
parseJSON (Object o) =
RepoWebhookResponse <$> o .: "code"
<*> o .: "status"
<*> o .: "message"
parseJSON _ = fail "Could not build a RepoWebhookResponse"
instance FromJSON Content where
parseJSON o@(Object _) = ContentFile <$> parseJSON o
parseJSON (Array os) = ContentDirectory <$> (mapM parseJSON $ V.toList os)
parseJSON _ = fail "Could not build a Content"
instance FromJSON ContentData where
parseJSON (Object o) =
ContentData <$> o .: "type"
<*> o .: "encoding"
<*> o .: "size"
<*> o .: "name"
<*> o .: "path"
<*> o .: "content"
<*> o .: "sha"
<*> o .: "url"
<*> o .: "git_url"
<*> o .: "html_url"
parseJSON _ = fail "Could not build a ContentData"
-- | A slightly more generic version of Aeson's @(.:?)@, using `mzero' instead
-- of `Nothing'.
(.:<) :: (FromJSON a) => Object -> T.Text -> Parser [a]
obj .:< key = case Map.lookup key obj of
Nothing -> pure mzero
Just v -> parseJSON v
-- | Produce all values for the given key.
values :: (Eq k, Hashable k, FromJSON v) => Map.HashMap k Value -> k -> Parser v
obj `values` key =
let (Object children) = findWithDefault (Object Map.empty) key obj in
parseJSON $ Array $ V.fromList $ Map.elems children
-- | Produce the value for the last key by traversing.
(<.:>) :: (FromJSON v) => Object -> [T.Text] -> Parser v
obj <.:> [key] = obj .: key
obj <.:> (key:keys) =
let (Object nextObj) = findWithDefault (Object Map.empty) key obj in
nextObj <.:> keys
_ <.:> [] = fail "must have a pair"
-- | Produce the value for the given key, maybe.
at :: Object -> T.Text -> Maybe Value
obj `at` key = Map.lookup key obj
-- Taken from Data.Map:
findWithDefault :: (Eq k, Hashable k) => v -> k -> Map.HashMap k v -> v
findWithDefault def k m =
case Map.lookup k m of
Nothing -> def
Just x -> x
|
thoughtbot/github
|
Github/Data.hs
|
bsd-3-clause
| 24,255 | 10 | 59 | 8,315 | 6,603 | 3,288 | 3,315 | 635 | 2 |
module HPath.Path
( Path(..)
, parse
, url
) where
import Data.List
import qualified Text.ParserCombinators.Parsec (parse)
import Text.ParserCombinators.Parsec hiding (parse)
import Text.ParserCombinators.Parsec.Char
import HPath.Parser.Lower
data Path = Path [String] String String
deriving instance Eq Path
deriving instance Ord Path
deriving instance Show Path
parse :: String -> Either ParseError Path
parse s = Text.ParserCombinators.Parsec.parse (qualified []) s s
qualified [] = modules []
qualified (mod:mods) = do
choice
[ try (modules (mod:mods))
, do
name <- choice [varid, varsym, conid, consym]
return (Path (reverse mods) mod name)
]
modules mods = do
mod <- modid
char '.'
qualified (mod:mods)
url :: Path -> String
url (Path h m d) = "hpath://" ++ intercalate "." (h ++ [m, d])
|
solidsnack/hpath
|
HPath/Path.hs
|
bsd-3-clause
| 1,017 | 0 | 15 | 341 | 329 | 176 | 153 | -1 | -1 |
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# LANGUAGE FlexibleContexts, FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ImplicitParams #-}
{-# LANGUAGE OverloadedStrings, DisambiguateRecordFields #-}
import qualified Text.XML.Hexml as Hexml
import Control.Monad
import Data.Char
import Data.Foldable
import Data.List (sort)
import Data.Monoid
import qualified Data.ByteString.Char8 as BS
import qualified Data.Vector.Storable as V
import System.Process (callCommand)
import System.FilePath
import Text.Printf
import Config
import Text.Xml.Tiny
import Text.Xml.Tiny.Internal(Node(..), Attribute(..), ParseDetails(ParseDetails), AttributeParseDetails(..), Slice)
import qualified Text.Xml.Tiny.Internal as Slice
examples :: [(Bool, BS.ByteString)]
examples =
[(True,"<hello>world</hello>")
,(True,"<hello/>")
,(True, "<test id='bob'>here<extra/>there</test>")
,(True, "<test /><close />")
,(True, "<test /><!-- comment > --><close />")
,(True, "<test id=\"bob value\" another-attr=\"test with <\">here </test> more text at the end<close />")
,(False, "<test></more>")
,(False, "<test")
,(True, "<?xml version=\"1.1\"?>\n<greeting>Hello, world!</greeting>")
]
xmlFiles = [ "mail", "benchmark" ]
main = do
forM_ examples $ \(parses,src) -> do
case parse src of
Left err ->
when parses $ fail ("Unexpected failure on " ++ BS.unpack src ++ ": " ++ show err)
Right doc -> do
unless parses $ fail ( "Unexpected success on " ++ BS.unpack src)
print src
print doc
let Right doc = parse "<test id=\"1\" extra=\"2\" />\n<test id=\"2\" /><b><test id=\"3\" /></b><test id=\"4\" /><test />"
map name (children doc) === ["test","test","b","test","test"]
location (children doc !! 2) === (2,16)
length (childrenBy doc "test") === 4
length (childrenBy doc "b") === 1
length (childrenBy doc "extra") === 0
attributes (head $ children doc) === [Attribute "id" "1", Attribute "extra" "2"]
map (`attributeBy` "id") (childrenBy doc "test") === map (fmap (Attribute "id")) [Just "1", Just "2", Just "4", Nothing]
Right _ <- return $ parse $ "<test " <> BS.unwords [BS.pack $ "x" ++ show i ++ "='value'" | i <- [1..10000]] <> " />"
Right _ <- return $ parse $ BS.unlines $ replicate 10000 "<test x='value' />"
let attrs = ["usd:jpy","test","extra","more","stuff","jpy:usd","xxx","xxxx"]
Right doc <- return $ parse $ "<test " <> BS.unwords [x <> "='" <> x <> "'" | x <- attrs] <> ">middle</test>"
[c] <- return $ childrenBy doc "test"
forM_ attrs $ \a -> attributeBy c a === Just (Attribute a a)
forM_ ["missing","gone","nothing"] $ \a -> attributeBy c a === Nothing
forM_ xmlFiles $ \name -> do
putStrLn ""
let path = "xml" </> name <.> "xml"
let pathGz = path <.> ".bz2"
callCommand $ "bunzip2 -f -k " ++ pathGz
xml <- BS.readFile path
let us = either (error $ "failed to parse: " ++ path) id $ parse xml
checkStructure us
let hexml = either (error $ "Hexml failed to parse: " ++ path ) id $ Hexml.parse xml
testEq us hexml
putStrLn "\nSuccess"
checkFind :: Node -> IO ()
checkFind n = do
forM_ (attributes n) $ \a -> attributeBy n (attributeName a) === Just a
attributeBy n "xxx" === (Nothing :: Maybe Attribute)
let cs = children n
forM_ ("xxx":map name cs) $ \c ->
map outer (filter ((==) c . name) cs) === map outer (childrenBy n c)
mapM_ checkFind $ children n
pairs f (a:b:rest) = f a b && pairs f (b:rest)
pairs f _ = True
checkStructure :: Config => Node -> IO ()
checkStructure n = checkNode [] n where
checkNode path n@Node{attributesV, slices=ParseDetails{attributes}} = do
let nn = children n
unless (sorted nn) $ fail "not sorted"
unless (pairs (nonOverlapping path) nn) $ fail "overlapping children nodes"
unless (pairs nonOverlappingA (Slice.vector attributes attributesV)) $ fail "overlapping attributes"
putChar '.'
forM_ nn $ \n' -> checkNode (name n : path) n'
nonOverlapping :: Config => [BS.ByteString] -> Node -> Node -> Bool
nonOverlapping path n1@Node{slices=ParseDetails{outer=o1}} n2@Node{slices=ParseDetails{outer=o2}} =
nonOverlappingS o1 o2
|| error (printf "%s Overlapping nodes: %s(%s) %s(%s)" (show path) (show$ outer n1) (show $ location n1) (show$ outer n2) (show$ location n2))
nonOverlappingA :: Config => AttributeParseDetails -> AttributeParseDetails -> Bool
nonOverlappingA a1@(AttributeParseDetails n v) a2@(AttributeParseDetails n' v') =
let slices = [n,v,n',v']
in and [ s >= s' || nonOverlappingS s s'
| s <- slices, s' <- slices]
|| error (printf "overlapping attributes" (show a1) (show a2))
nonOverlappingS :: Config => Slice -> Slice -> Bool
nonOverlappingS s1 s2 = Slice.end s1 <= Slice.start s2
|| Slice.end s2 <= Slice.start s1
-- || error (printf "Overlapping slices: %s, %s" (show s1) (show s2))
sorted nn =
let outers = map (Slice.start.Slice.outer.slices) nn
in sort outers == outers
|| error ("Internal error - nodes not sorted: " ++
show [ (name n, Slice.start(Slice.outer(slices n))) | n <- nn])
class (Show a, Show b) => TestEq a b where testEq :: a -> b -> IO ()
(===) :: Config => TestEq a a => a -> a -> IO ()
(===) = testEq
instance (Show a, Eq a) => TestEq a a where
a `testEq` b = if a == b then putChar '.' else error $ "mismatch, " ++ show a ++ " /= " ++ show b
instance TestEq Node Hexml.Node where
testEq n n' = do
name n `testEq` Hexml.name n'
test "attributes" (attributes n) (Hexml.attributes n')
test "contents" (contents n) (Hexml.contents n')
where
test (msg :: String) aa bb
| length aa == length bb = zipWithM_ testEq aa bb
| otherwise = error$ printf "Length of %s does not match (%d /= %d):\n%s\n---------------\n%s" msg (length aa) (length bb) (show aa) (show bb)
instance TestEq Attribute Hexml.Attribute where
Attribute n v `testEq` Hexml.Attribute n' v' = do
n `testEq` n'
v `testEq` v'
instance (Show a, Show b, TestEq a a', TestEq b b') => TestEq (Either a b) (Either a' b') where
Left e `testEq` Left e' = e `testEq` e'
Right x `testEq` Right x' = x `testEq` x'
testEq a b = error $ printf "mismatch in children: %s /= %s" (show a) (show b)
debugShow :: Node -> String
debugShow n =
unlines $
"Nodes buffer: "
: [ " " ++ show n | n <- V.toList $ nodesV n]
++ showNodeContents (Right n)
where
showNodeContents :: Either BS.ByteString Node -> [String]
showNodeContents (Right n) =
[ "Node contents:"
, " name: " ++ show (name n)
, " slices: " ++ show (slices n)
, " attributes: " ++ (show $ attributes n)
, " contents: "
] ++
[ " " ++ l | n' <- contents n, l <- showNodeContents n']
showNodeContents (Left txt) =
[ "Text content: " ++ BS.unpack txt ]
|
pepeiborra/bytestring-xml
|
Test.hs
|
bsd-3-clause
| 7,220 | 1 | 21 | 1,738 | 2,661 | 1,345 | 1,316 | 145 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module Server.ClientFrameT where
import Control.Lens (makeLenses)
import qualified Data.Vector.Storable as SV
import Data.Word (Word8)
import qualified Constants
import Game.PlayerStateT
import Types
makeLenses ''ClientFrameT
newClientFrameT :: ClientFrameT
newClientFrameT = ClientFrameT
{ _cfAreaBytes = 0
, _cfAreaBits = SV.replicate (Constants.maxMapAreas `div` 8) 0
, _cfPlayerState = newPlayerStateT
, _cfNumEntities = 0
, _cfFirstEntity = 0
, _cfSentTime = 0
}
|
ksaveljev/hake-2
|
src/Server/ClientFrameT.hs
|
bsd-3-clause
| 599 | 0 | 10 | 160 | 122 | 76 | 46 | 17 | 1 |
{-|
Module: Data.Astro.Planet
Description: Planet calculations
Copyright: Alexander Ignatyev, 2016
Planet calculations.
= Example
=== /Initialisation/
@
import Data.Astro.Time.JulianDate
import Data.Astro.Coordinate
import Data.Astro.Types
import Data.Astro.Effects
import Data.Astro.CelestialObject.RiseSet
import Data.Astro.Planet
ro :: GeographicCoordinates
ro = GeoC (fromDMS 51 28 40) (-(fromDMS 0 0 5))
dt :: LocalCivilTime
dt = lctFromYMDHMS (DH 1) 2017 6 25 10 29 0
today :: LocalCivilDate
today = lcdFromYMD (DH 1) 2017 6 25
jupiterDetails :: PlanetDetails
jupiterDetails = j2010PlanetDetails Jupiter
earthDetails :: PlanetDetails
earthDetails = j2010PlanetDetails Earth
jupiterPosition :: JulianDate -> EquatorialCoordinates1
jupiterPosition = planetPosition planetTrueAnomaly1 jupiterDetails earthDetails
@
=== /Calcaulate Coordinates/
@
jupiterEC1 :: EquatorialCoordinates1
jupiterEC1 = jupiterPosition (lctUniversalTime dt)
-- EC1 {e1Declination = DD (-4.104626810672402), e1RightAscension = DH 12.863365504382228}
jupiterHC :: HorizonCoordinates
jupiterHC = ec1ToHC ro (lctUniversalTime dt) jupiterEC1
-- HC {hAltitude = DD (-30.67914598469227), hAzimuth = DD 52.29376845044007}
@
=== /Calculate Distance/
@
jupiterDistance :: AstronomicalUnits
jupiterDistance = planetDistance1 jupiterDetails earthDetails (lctUniversalTime dt)
-- AU 5.193435872521039
@
=== /Calculate Angular Size/
@
jupiterAngularSize :: DecimalDegrees
jupiterAngularSize = planetAngularDiameter jupiterDetails jupiterDistance
-- DD 1.052289877865987e-2
toDMS jupiterAngularSize
-- (0,0,37.88243560317554)
@
=== /Calculate Rise and Set/
@
verticalShift :: DecimalDegrees
verticalShift = refract (DD 0) 12 1012
-- DD 0.5660098245614035
jupiterRiseSet :: RiseSetMB
jupiterRiseSet = riseAndSet2 0.000001 jupiterPosition ro verticalShift today
-- RiseSet
-- (Just (2017-06-25 13:53:27.3109 +1.0,DD 95.88943953535569))
-- (Just (2017-06-25 01:21:23.5835 +1.0,DD 264.1289033612776))
@
-}
module Data.Astro.Planet
(
Details.Planet(..)
, Details.PlanetDetails(..)
, Details.j2010PlanetDetails
, Mechanics.planetTrueAnomaly1
, Mechanics.planetTrueAnomaly2
, Mechanics.planetPosition
, Mechanics.planetPosition1
, Mechanics.planetDistance
, Mechanics.planetDistance1
, Mechanics.planetAngularDiameter
, Mechanics.planetPhase1
, Mechanics.planetBrightLimbPositionAngle
)
where
import qualified Data.Astro.Planet.PlanetDetails as Details
import qualified Data.Astro.Planet.PlanetMechanics as Mechanics
|
Alexander-Ignatyev/astro
|
src/Data/Astro/Planet.hs
|
bsd-3-clause
| 2,526 | 0 | 5 | 312 | 98 | 65 | 33 | 16 | 0 |
import Codec.Picture
import Codec.Picture.Gif
import ImageOutput
import System.IO
main :: IO()
main = createGifColor =<< sequence[readImage ("./out/frame"++ show i ++".png") >>= decode | i <- [1..10]]
decode :: Either String DynamicImage -> IO (Image PixelRGB8)
decode (Right (ImageRGB8 image)) = return image
|
veniversum/fractal-haskell
|
src/imagesToGif.hs
|
bsd-3-clause
| 352 | 0 | 13 | 84 | 125 | 64 | 61 | 8 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
module Duckling.PhoneNumber.Rules
( rules ) where
import qualified Data.Text as Text
import Prelude
import Data.String
import Duckling.Dimensions.Types
import Duckling.Numeral.Helpers (parseInt)
import Duckling.PhoneNumber.Types (PhoneNumberData(..))
import qualified Duckling.PhoneNumber.Types as TPhoneNumber
import Duckling.Regex.Types
import Duckling.Types
rulePhoneNumber :: Rule
rulePhoneNumber = Rule
{ name = "phone number"
, pattern =
-- We somewhat arbitrarly use 20 here to limit the length of matches,
-- otherwise due to backtracking the regexp will take very long time
-- or run out of stack for some inputs.
[ regex $
"(?:\\(?\\+(\\d{1,2})\\)?[\\s-\\.]*)?" ++ -- area code
"((?=[-\\d()\\s\\.]{6,16}(?:\\s*e?xt?\\.?\\s*(?:\\d{1,20}))?(?:[^\\d]+|$))(?:[\\d(]{1,20}(?:[-)\\s\\.]*\\d{1,20}){0,20}){1,20})" ++ -- nums
"(?:\\s*e?xt?\\.?\\s*(\\d{1,20}))?" -- extension
]
, prod = \xs -> case xs of
(Token RegexMatch (GroupMatch (code:nums:ext:_)):_) ->
let parseNum x = toInteger <$> parseInt x
mcode = parseNum code
mext = parseNum ext
cleanup = Text.filter (not . isWhitespace)
isWhitespace x = elem x ['.', ' ', '-', '\t', '(', ')']
in Just . Token PhoneNumber $ PhoneNumberData
{ TPhoneNumber.prefix = mcode
, TPhoneNumber.number = cleanup nums
, TPhoneNumber.extension = mext
}
_ -> Nothing
}
rules :: [Rule]
rules = [ rulePhoneNumber ]
|
rfranek/duckling
|
Duckling/PhoneNumber/Rules.hs
|
bsd-3-clause
| 1,886 | 0 | 19 | 414 | 334 | 199 | 135 | 35 | 2 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE Trustworthy #-}
-- |
-- Module : Crypto.Hash.SHA256
-- License : BSD-3
-- Maintainer : Herbert Valerio Riedel <[email protected]>
-- Stability : stable
--
-- A module containing <https://en.wikipedia.org/wiki/SHA-2 SHA-256> bindings
--
module Crypto.Hash.SHA256
(
-- * Incremental API
--
-- | This API is based on 4 different functions, similar to the
-- lowlevel operations of a typical hash:
--
-- - 'init': create a new hash context
-- - 'update': update non-destructively a new hash context with a strict bytestring
-- - 'updates': same as update, except that it takes a list of strict bytestrings
-- - 'finalize': finalize the context and returns a digest bytestring.
--
-- all those operations are completely pure, and instead of
-- changing the context as usual in others language, it
-- re-allocates a new context each time.
--
-- Example:
--
-- > import qualified Data.ByteString
-- > import qualified Crypto.Hash.SHA256 as SHA256
-- >
-- > main = print digest
-- > where
-- > digest = SHA256.finalize ctx
-- > ctx = foldl SHA256.update ctx0 (map Data.ByteString.pack [ [1,2,3], [4,5,6] ])
-- > ctx0 = SHA256.init
Ctx(..)
, init -- :: Ctx
, update -- :: Ctx -> ByteString -> Ctx
, updates -- :: Ctx -> [ByteString] -> Ctx
, finalize -- :: Ctx -> ByteString
, finalizeAndLength -- :: Ctx -> (ByteString,Word64)
, start -- :: ByteString -> Ct
, startlazy -- :: L.ByteString -> Ctx
-- * Single Pass API
--
-- | This API use the incremental API under the hood to provide
-- the common all-in-one operations to create digests out of a
-- 'ByteString' and lazy 'L.ByteString'.
--
-- - 'hash': create a digest ('init' + 'update' + 'finalize') from a strict 'ByteString'
-- - 'hashlazy': create a digest ('init' + 'update' + 'finalize') from a lazy 'L.ByteString'
-- - 'hashlazyAndLength': create a digest ('init' + 'update' + 'finalizeAndLength') from a lazy 'L.ByteString'
--
-- Example:
--
-- > import qualified Data.ByteString
-- > import qualified Crypto.Hash.SHA256 as SHA256
-- >
-- > main = print $ SHA256.hash (Data.ByteString.pack [0..255])
--
-- __NOTE__: The returned digest is a binary 'ByteString'. For
-- converting to a base16/hex encoded digest the
-- <https://hackage.haskell.org/package/base16-bytestring base16-bytestring>
-- package is recommended.
, hash -- :: ByteString -> ByteString
, hashlazy -- :: L.ByteString -> ByteString
, hashlazyAndLength -- :: L.ByteString -> (ByteString,Int64)
-- ** HMAC-SHA-256
--
-- | <https://tools.ietf.org/html/rfc2104 RFC2104>-compatible
-- <https://en.wikipedia.org/wiki/HMAC HMAC>-SHA-256 digests
, hmac -- :: ByteString -> ByteString -> ByteString
, hmaclazy -- :: ByteString -> L.ByteString -> ByteString
, hmaclazyAndLength -- :: ByteString -> L.ByteString -> (ByteString,Word64)
-- ** HKDF-SHA-256
--
-- | <https://tools.ietf.org/html/rfc5869 RFC5869>-compatible
-- <https://en.wikipedia.org/wiki/HKDF HKDF>-SHA-256 key derivation function
, hkdf
) where
import Data.Bits (xor)
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.ByteString.Internal (ByteString (PS), create,
createAndTrim, mallocByteString,
memcpy, toForeignPtr)
import qualified Data.ByteString.Lazy as L
import Data.ByteString.Unsafe (unsafeUseAsCStringLen)
import Data.Word
import Foreign.C.Types
import Foreign.ForeignPtr (withForeignPtr)
import Foreign.Marshal.Alloc
import Foreign.Ptr
import Prelude hiding (init)
import System.IO.Unsafe (unsafeDupablePerformIO)
import Crypto.Hash.SHA256.FFI
-- | perform IO for hashes that do allocation and ffi.
-- unsafeDupablePerformIO is used when possible as the
-- computation is pure and the output is directly linked
-- to the input. we also do not modify anything after it has
-- been returned to the user.
unsafeDoIO :: IO a -> a
unsafeDoIO = unsafeDupablePerformIO
-- keep this synchronised with cbits/sha256.h
{-# INLINE digestSize #-}
digestSize :: Int
digestSize = 32
{-# INLINE sizeCtx #-}
sizeCtx :: Int
sizeCtx = 104
{-# INLINE withByteStringPtr #-}
withByteStringPtr :: ByteString -> (Ptr Word8 -> IO a) -> IO a
withByteStringPtr b f =
withForeignPtr fptr $ \ptr -> f (ptr `plusPtr` off)
where (fptr, off, _) = toForeignPtr b
{-# INLINE create' #-}
-- | Variant of 'create' which allows to return an argument
create' :: Int -> (Ptr Word8 -> IO a) -> IO (ByteString,a)
create' l f = do
fp <- mallocByteString l
x <- withForeignPtr fp $ \p -> f p
let bs = PS fp 0 l
return $! x `seq` bs `seq` (bs,x)
copyCtx :: Ptr Ctx -> Ptr Ctx -> IO ()
copyCtx dst src = memcpy (castPtr dst) (castPtr src) (fromIntegral sizeCtx)
withCtxCopy :: Ctx -> (Ptr Ctx -> IO ()) -> IO Ctx
withCtxCopy (Ctx ctxB) f = Ctx `fmap` createCtx
where
createCtx = create sizeCtx $ \dstPtr ->
withByteStringPtr ctxB $ \srcPtr -> do
copyCtx (castPtr dstPtr) (castPtr srcPtr)
f (castPtr dstPtr)
withCtxThrow :: Ctx -> (Ptr Ctx -> IO a) -> IO a
withCtxThrow (Ctx ctxB) f =
allocaBytes sizeCtx $ \dstPtr ->
withByteStringPtr ctxB $ \srcPtr -> do
copyCtx (castPtr dstPtr) (castPtr srcPtr)
f (castPtr dstPtr)
withCtxNew :: (Ptr Ctx -> IO ()) -> IO Ctx
withCtxNew f = Ctx `fmap` create sizeCtx (f . castPtr)
withCtxNewThrow :: (Ptr Ctx -> IO a) -> IO a
withCtxNewThrow f = allocaBytes sizeCtx (f . castPtr)
-- 'safe' call overhead neglible for 4KiB and more
c_sha256_update :: Ptr Ctx -> Ptr Word8 -> CSize -> IO ()
c_sha256_update pctx pbuf sz
| sz < 4096 = c_sha256_update_unsafe pctx pbuf sz
| otherwise = c_sha256_update_safe pctx pbuf sz
-- 'safe' call overhead neglible for 4KiB and more
c_sha256_hash :: Ptr Word8 -> CSize -> Ptr Word8 -> IO ()
c_sha256_hash pbuf sz pout
| sz < 4096 = c_sha256_hash_unsafe pbuf sz pout
| otherwise = c_sha256_hash_safe pbuf sz pout
updateInternalIO :: Ptr Ctx -> ByteString -> IO ()
updateInternalIO ptr d =
unsafeUseAsCStringLen d (\(cs, len) -> c_sha256_update ptr (castPtr cs) (fromIntegral len))
finalizeInternalIO :: Ptr Ctx -> IO ByteString
finalizeInternalIO ptr = create digestSize (c_sha256_finalize ptr)
finalizeInternalIO' :: Ptr Ctx -> IO (ByteString,Word64)
finalizeInternalIO' ptr = create' digestSize (c_sha256_finalize_len ptr)
{-# NOINLINE init #-}
-- | create a new hash context
init :: Ctx
init = unsafeDoIO $ withCtxNew c_sha256_init
validCtx :: Ctx -> Bool
validCtx (Ctx b) = B.length b == sizeCtx
{-# NOINLINE update #-}
-- | update a context with a bytestring
update :: Ctx -> ByteString -> Ctx
update ctx d
| validCtx ctx = unsafeDoIO $ withCtxCopy ctx $ \ptr -> updateInternalIO ptr d
| otherwise = error "SHA256.update: invalid Ctx"
{-# NOINLINE updates #-}
-- | updates a context with multiple bytestrings
updates :: Ctx -> [ByteString] -> Ctx
updates ctx d
| validCtx ctx = unsafeDoIO $ withCtxCopy ctx $ \ptr -> mapM_ (updateInternalIO ptr) d
| otherwise = error "SHA256.updates: invalid Ctx"
{-# NOINLINE finalize #-}
-- | finalize the context into a digest bytestring (32 bytes)
finalize :: Ctx -> ByteString
finalize ctx
| validCtx ctx = unsafeDoIO $ withCtxThrow ctx finalizeInternalIO
| otherwise = error "SHA256.finalize: invalid Ctx"
{-# NOINLINE finalizeAndLength #-}
-- | Variant of 'finalize' also returning length of hashed content
--
-- @since 0.11.101.0
finalizeAndLength :: Ctx -> (ByteString,Word64)
finalizeAndLength ctx
| validCtx ctx = unsafeDoIO $ withCtxThrow ctx finalizeInternalIO'
| otherwise = error "SHA256.finalize: invalid Ctx"
{-# NOINLINE hash #-}
-- | hash a strict bytestring into a digest bytestring (32 bytes)
hash :: ByteString -> ByteString
-- hash d = unsafeDoIO $ withCtxNewThrow $ \ptr -> c_sha256_init ptr >> updateInternalIO ptr d >> finalizeInternalIO ptr
hash d = unsafeDoIO $ unsafeUseAsCStringLen d $ \(cs, len) -> create digestSize (c_sha256_hash (castPtr cs) (fromIntegral len))
{-# NOINLINE start #-}
-- | hash a strict bytestring into a Ctx
start :: ByteString -> Ctx
start d = unsafeDoIO $ withCtxNew $ \ptr -> c_sha256_init ptr >> updateInternalIO ptr d
{-# NOINLINE hashlazy #-}
-- | hash a lazy bytestring into a digest bytestring (32 bytes)
hashlazy :: L.ByteString -> ByteString
hashlazy l = unsafeDoIO $ withCtxNewThrow $ \ptr ->
c_sha256_init ptr >> mapM_ (updateInternalIO ptr) (L.toChunks l) >> finalizeInternalIO ptr
{-# NOINLINE startlazy #-}
-- | hash a lazy bytestring into a Ctx
startlazy :: L.ByteString -> Ctx
startlazy l = unsafeDoIO $ withCtxNew $ \ptr ->
c_sha256_init ptr >> mapM_ (updateInternalIO ptr) (L.toChunks l)
{-# NOINLINE hashlazyAndLength #-}
-- | Variant of 'hashlazy' which simultaneously computes the hash and length of a lazy bytestring.
--
-- @since 0.11.101.0
hashlazyAndLength :: L.ByteString -> (ByteString,Word64)
hashlazyAndLength l = unsafeDoIO $ withCtxNewThrow $ \ptr ->
c_sha256_init ptr >> mapM_ (updateInternalIO ptr) (L.toChunks l) >> finalizeInternalIO' ptr
-- | Compute 32-byte <https://tools.ietf.org/html/rfc2104 RFC2104>-compatible
-- HMAC-SHA-256 digest for a strict bytestring message
--
-- @since 0.11.100.0
hmac :: ByteString -- ^ secret
-> ByteString -- ^ message
-> ByteString -- ^ digest (32 bytes)
hmac secret msg = hash $ B.append opad (hashlazy $ L.fromChunks [ipad,msg])
where
opad = B.map (xor 0x5c) k'
ipad = B.map (xor 0x36) k'
k' = B.append kt pad
kt = if B.length secret > 64 then hash secret else secret
pad = B.replicate (64 - B.length kt) 0
-- | Compute 32-byte <https://tools.ietf.org/html/rfc2104 RFC2104>-compatible
-- HMAC-SHA-256 digest for a lazy bytestring message
--
-- @since 0.11.100.0
hmaclazy :: ByteString -- ^ secret
-> L.ByteString -- ^ message
-> ByteString -- ^ digest (32 bytes)
hmaclazy secret msg = hash $ B.append opad (hashlazy $ L.append ipad msg)
where
opad = B.map (xor 0x5c) k'
ipad = L.fromChunks [B.map (xor 0x36) k']
k' = B.append kt pad
kt = if B.length secret > 64 then hash secret else secret
pad = B.replicate (64 - B.length kt) 0
-- | Variant of 'hmaclazy' which also returns length of message
--
-- @since 0.11.101.0
hmaclazyAndLength :: ByteString -- ^ secret
-> L.ByteString -- ^ message
-> (ByteString,Word64) -- ^ digest (32 bytes) and length of message
hmaclazyAndLength secret msg =
(hash (B.append opad htmp), sz' - fromIntegral ipadLen)
where
(htmp, sz') = hashlazyAndLength (L.append ipad msg)
opad = B.map (xor 0x5c) k'
ipad = L.fromChunks [B.map (xor 0x36) k']
ipadLen = B.length k'
k' = B.append kt pad
kt = if B.length secret > 64 then hash secret else secret
pad = B.replicate (64 - B.length kt) 0
{-# NOINLINE hkdf #-}
-- | <https://tools.ietf.org/html/rfc6234 RFC6234>-compatible
-- HKDF-SHA-256 key derivation function.
--
-- @since 0.11.101.0
hkdf :: ByteString -- ^ /IKM/ Input keying material
-> ByteString -- ^ /salt/ Optional salt value, a non-secret random value (can be @""@)
-> ByteString -- ^ /info/ Optional context and application specific information (can be @""@)
-> Int -- ^ /L/ length of output keying material in octets (at most 255*32 bytes)
-> ByteString -- ^ /OKM/ Output keying material (/L/ bytes)
hkdf ikm salt info l
| l == 0 = B.empty
| 0 > l || l > 255*32 = error "hkdf: invalid L parameter"
| otherwise = unsafeDoIO $ createAndTrim (32*fromIntegral cnt) (go 0 B.empty)
where
prk = hmac salt ikm
cnt = fromIntegral ((l+31) `div` 32) :: Word8
go :: Word8 -> ByteString -> Ptr Word8 -> IO Int
go !i t !p | i == cnt = return l
| otherwise = do
let t' = hmaclazy prk (L.fromChunks [t,info,B.singleton (i+1)])
withByteStringPtr t' $ \tptr' -> memcpy p tptr' 32
go (i+1) t' (p `plusPtr` 32)
|
hvr/cryptohash-sha256
|
src/Crypto/Hash/SHA256.hs
|
bsd-3-clause
| 12,486 | 0 | 19 | 2,934 | 2,637 | 1,408 | 1,229 | 179 | 2 |
module TypedWorkPushing where
import Control.Monad
import Control.Distributed.Process
import Control.Distributed.Process.Closure
import PrimeFactors
slave :: SendPort Integer -> ReceivePort Integer -> Process ()
slave results todo = forever $ do
n <- receiveChan todo
sendChan results (numPrimeFactors n)
sdictInteger :: SerializableDict Integer
sdictInteger = SerializableDict
remotable ['slave, 'sdictInteger]
-- | Wait for n integers and sum them all up
sumIntegers :: ReceivePort Integer -> Int -> Process Integer
sumIntegers rport = go 0
where
go :: Integer -> Int -> Process Integer
go !acc 0 = return acc
go !acc n = do
m <- receiveChan rport
go (acc + m) (n - 1)
master :: Integer -> [NodeId] -> Process Integer
master n slaves = do
(sport, rport) <- newChan
-- Start slave processes
slaveProcesses <- forM slaves $ \nid ->
spawnChannel $(mkStatic 'sdictInteger) nid ($(mkClosure 'slave) sport)
-- Distribute 1 .. n amongst the slave processes
spawnLocal $ forM_ (zip [1 .. n] (cycle slaveProcesses)) $
\(m, them) -> sendChan them m
-- Wait for the result
sumIntegers rport (fromIntegral n)
|
haskell-distributed/distributed-process-demos
|
src/TypedWorkPushing/TypedWorkPushing.hs
|
bsd-3-clause
| 1,160 | 0 | 15 | 232 | 377 | 190 | 187 | -1 | -1 |
{-# OPTIONS_HADDOCK hide #-}
{-# LANGUAGE ExistentialQuantification #-}
-- |
-- Module : Network.TLS.Cipher
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : unknown
--
module Network.TLS.Cipher
( CipherKeyExchangeType(..)
, Bulk(..)
, BulkFunctions(..)
, BulkDirection(..)
, BulkState(..)
, BulkStream(..)
, BulkBlock
, BulkAEAD
, bulkInit
, Hash(..)
, Cipher(..)
, CipherID
, cipherKeyBlockSize
, BulkKey
, BulkIV
, BulkNonce
, BulkAdditionalData
, cipherAllowedForVersion
, cipherExchangeNeedMoreData
, hasMAC
, hasRecordIV
) where
import Crypto.Cipher.Types (AuthTag)
import Network.TLS.Types (CipherID)
import Network.TLS.Struct (Version(..))
import Network.TLS.Crypto (Hash(..), hashDigestSize)
import qualified Data.ByteString as B
-- FIXME convert to newtype
type BulkKey = B.ByteString
type BulkIV = B.ByteString
type BulkNonce = B.ByteString
type BulkAdditionalData = B.ByteString
data BulkState =
BulkStateStream BulkStream
| BulkStateBlock BulkBlock
| BulkStateAEAD BulkAEAD
| BulkStateUninitialized
instance Show BulkState where
show (BulkStateStream _) = "BulkStateStream"
show (BulkStateBlock _) = "BulkStateBlock"
show (BulkStateAEAD _) = "BulkStateAEAD"
show (BulkStateUninitialized) = "BulkStateUninitialized"
newtype BulkStream = BulkStream (B.ByteString -> (B.ByteString, BulkStream))
type BulkBlock = BulkIV -> B.ByteString -> (B.ByteString, BulkIV)
type BulkAEAD = BulkNonce -> B.ByteString -> BulkAdditionalData -> (B.ByteString, AuthTag)
data BulkDirection = BulkEncrypt | BulkDecrypt
deriving (Show,Eq)
bulkInit :: Bulk -> BulkDirection -> BulkKey -> BulkState
bulkInit bulk direction key =
case bulkF bulk of
BulkBlockF ini -> BulkStateBlock (ini direction key)
BulkStreamF ini -> BulkStateStream (ini direction key)
BulkAeadF ini -> BulkStateAEAD (ini direction key)
data BulkFunctions =
BulkBlockF (BulkDirection -> BulkKey -> BulkBlock)
| BulkStreamF (BulkDirection -> BulkKey -> BulkStream)
| BulkAeadF (BulkDirection -> BulkKey -> BulkAEAD)
hasMAC,hasRecordIV :: BulkFunctions -> Bool
hasMAC (BulkBlockF _ ) = True
hasMAC (BulkStreamF _) = True
hasMAC (BulkAeadF _ ) = False
hasRecordIV = hasMAC
data CipherKeyExchangeType =
CipherKeyExchange_RSA
| CipherKeyExchange_DH_Anon
| CipherKeyExchange_DHE_RSA
| CipherKeyExchange_ECDHE_RSA
| CipherKeyExchange_DHE_DSS
| CipherKeyExchange_DH_DSS
| CipherKeyExchange_DH_RSA
| CipherKeyExchange_ECDH_ECDSA
| CipherKeyExchange_ECDH_RSA
| CipherKeyExchange_ECDHE_ECDSA
deriving (Show,Eq)
data Bulk = Bulk
{ bulkName :: String
, bulkKeySize :: Int
, bulkIVSize :: Int
, bulkBlockSize :: Int
, bulkF :: BulkFunctions
}
instance Show Bulk where
show bulk = bulkName bulk
instance Eq Bulk where
b1 == b2 = and [ bulkName b1 == bulkName b2
, bulkKeySize b1 == bulkKeySize b2
, bulkIVSize b1 == bulkIVSize b2
, bulkBlockSize b1 == bulkBlockSize b2
]
-- | Cipher algorithm
data Cipher = Cipher
{ cipherID :: CipherID
, cipherName :: String
, cipherHash :: Hash
, cipherBulk :: Bulk
, cipherKeyExchange :: CipherKeyExchangeType
, cipherMinVer :: Maybe Version
}
cipherKeyBlockSize :: Cipher -> Int
cipherKeyBlockSize cipher = 2 * (hashDigestSize (cipherHash cipher) + bulkIVSize bulk + bulkKeySize bulk)
where bulk = cipherBulk cipher
-- | Check if a specific 'Cipher' is allowed to be used
-- with the version specified
cipherAllowedForVersion :: Version -> Cipher -> Bool
cipherAllowedForVersion ver cipher =
case cipherMinVer cipher of
Nothing -> True
Just cVer -> cVer <= ver
instance Show Cipher where
show c = cipherName c
instance Eq Cipher where
(==) c1 c2 = cipherID c1 == cipherID c2
cipherExchangeNeedMoreData :: CipherKeyExchangeType -> Bool
cipherExchangeNeedMoreData CipherKeyExchange_RSA = False
cipherExchangeNeedMoreData CipherKeyExchange_DH_Anon = True
cipherExchangeNeedMoreData CipherKeyExchange_DHE_RSA = True
cipherExchangeNeedMoreData CipherKeyExchange_ECDHE_RSA = True
cipherExchangeNeedMoreData CipherKeyExchange_DHE_DSS = True
cipherExchangeNeedMoreData CipherKeyExchange_DH_DSS = False
cipherExchangeNeedMoreData CipherKeyExchange_DH_RSA = False
cipherExchangeNeedMoreData CipherKeyExchange_ECDH_ECDSA = True
cipherExchangeNeedMoreData CipherKeyExchange_ECDH_RSA = True
cipherExchangeNeedMoreData CipherKeyExchange_ECDHE_ECDSA = True
|
AaronFriel/hs-tls
|
core/Network/TLS/Cipher.hs
|
bsd-3-clause
| 4,850 | 0 | 11 | 1,093 | 1,068 | 600 | 468 | 118 | 3 |
module Example where
import MAlonzo.Code.Example
rev :: [a] -> [a]
rev = rev' () ()
safeHead :: [a] -> Maybe a
safeHead = safeHead' () ()
|
notogawa/agda-haskell-example
|
src/Example.hs
|
bsd-3-clause
| 141 | 0 | 6 | 29 | 68 | 38 | 30 | 6 | 1 |
module EqUni where
import Uni (Uni(Uni))
instance Eq Uni where
Uni == Uni = True
|
phischu/fragnix
|
tests/quick/ImplicitInstances/EqUni.hs
|
bsd-3-clause
| 86 | 0 | 6 | 20 | 35 | 20 | 15 | 4 | 0 |
{-# LINE 1 "System.Environment.hs" #-}
{-# LANGUAGE Safe #-}
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : System.Environment
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Miscellaneous information about the system environment.
--
-----------------------------------------------------------------------------
module System.Environment
(
getArgs,
getProgName,
getExecutablePath,
getEnv,
lookupEnv,
setEnv,
unsetEnv,
withArgs,
withProgName,
getEnvironment,
) where
import Foreign
import Foreign.C
import System.IO.Error (mkIOError)
import Control.Exception.Base (bracket_, throwIO)
-- import GHC.IO
import GHC.IO.Exception
import GHC.IO.Encoding (getFileSystemEncoding)
import qualified GHC.Foreign as GHC
import Control.Monad
import System.Posix.Internals (withFilePath)
import System.Environment.ExecutablePath
-- ---------------------------------------------------------------------------
-- getArgs, getProgName, getEnv
-- | Computation 'getArgs' returns a list of the program's command
-- line arguments (not including the program name).
getArgs :: IO [String]
getArgs =
alloca $ \ p_argc ->
alloca $ \ p_argv -> do
getProgArgv p_argc p_argv
p <- fromIntegral `liftM` peek p_argc
argv <- peek p_argv
enc <- getFileSystemEncoding
peekArray (p - 1) (advancePtr argv 1) >>= mapM (GHC.peekCString enc)
foreign import ccall unsafe "getProgArgv"
getProgArgv :: Ptr CInt -> Ptr (Ptr CString) -> IO ()
{-|
Computation 'getProgName' returns the name of the program as it was
invoked.
However, this is hard-to-impossible to implement on some non-Unix
OSes, so instead, for maximum portability, we just return the leafname
of the program as invoked. Even then there are some differences
between platforms: on Windows, for example, a program invoked as foo
is probably really @FOO.EXE@, and that is what 'getProgName' will return.
-}
getProgName :: IO String
getProgName =
alloca $ \ p_argc ->
alloca $ \ p_argv -> do
getProgArgv p_argc p_argv
argv <- peek p_argv
unpackProgName argv
unpackProgName :: Ptr (Ptr CChar) -> IO String -- argv[0]
unpackProgName argv = do
enc <- getFileSystemEncoding
s <- peekElemOff argv 0 >>= GHC.peekCString enc
return (basename s)
basename :: FilePath -> FilePath
basename f = go f f
where
go acc [] = acc
go acc (x:xs)
| isPathSeparator x = go xs xs
| otherwise = go acc xs
isPathSeparator :: Char -> Bool
isPathSeparator '/' = True
isPathSeparator _ = False
-- | Computation 'getEnv' @var@ returns the value
-- of the environment variable @var@. For the inverse, POSIX users
-- can use 'System.Posix.Env.putEnv'.
--
-- This computation may fail with:
--
-- * 'System.IO.Error.isDoesNotExistError' if the environment variable
-- does not exist.
getEnv :: String -> IO String
getEnv name = lookupEnv name >>= maybe handleError return
where
handleError = ioe_missingEnvVar name
-- | Return the value of the environment variable @var@, or @Nothing@ if
-- there is no such value.
--
-- For POSIX users, this is equivalent to 'System.Posix.Env.getEnv'.
--
-- @since 4.6.0.0
lookupEnv :: String -> IO (Maybe String)
lookupEnv name =
withCString name $ \s -> do
litstring <- c_getenv s
if litstring /= nullPtr
then do enc <- getFileSystemEncoding
result <- GHC.peekCString enc litstring
return $ Just result
else return Nothing
foreign import ccall unsafe "getenv"
c_getenv :: CString -> IO (Ptr CChar)
ioe_missingEnvVar :: String -> IO a
ioe_missingEnvVar name = ioException (IOError Nothing NoSuchThing "getEnv"
"no environment variable" Nothing (Just name))
-- | @setEnv name value@ sets the specified environment variable to @value@.
--
-- On Windows setting an environment variable to the /empty string/ removes
-- that environment variable from the environment. For the sake of
-- compatibility we adopt that behavior. In particular
--
-- @
-- setEnv name \"\"
-- @
--
-- has the same effect as
--
-- @
-- `unsetEnv` name
-- @
--
-- If you don't care about Windows support and want to set an environment
-- variable to the empty string use @System.Posix.Env.setEnv@ from the @unix@
-- package instead.
--
-- Throws `Control.Exception.IOException` if @name@ is the empty string or
-- contains an equals sign.
--
-- @since 4.7.0.0
setEnv :: String -> String -> IO ()
setEnv key_ value_
| null key = throwIO (mkIOError InvalidArgument "setEnv" Nothing Nothing)
| '=' `elem` key = throwIO (mkIOError InvalidArgument "setEnv" Nothing Nothing)
| null value = unsetEnv key
| otherwise = setEnv_ key value
where
key = takeWhile (/= '\NUL') key_
value = takeWhile (/= '\NUL') value_
setEnv_ :: String -> String -> IO ()
-- NOTE: The 'setenv()' function is not available on all systems, hence we use
-- 'putenv()'. This leaks memory, but so do common implementations of
-- 'setenv()' (AFAIK).
setEnv_ k v = putEnv (k ++ "=" ++ v)
putEnv :: String -> IO ()
putEnv keyvalue = do
s <- getFileSystemEncoding >>= (`GHC.newCString` keyvalue)
-- IMPORTANT: Do not free `s` after calling putenv!
--
-- According to SUSv2, the string passed to putenv becomes part of the
-- environment.
throwErrnoIf_ (/= 0) "putenv" (c_putenv s)
foreign import ccall unsafe "putenv" c_putenv :: CString -> IO CInt
-- | @unSet name@ removes the specified environment variable from the
-- environment of the current process.
--
-- Throws `Control.Exception.IOException` if @name@ is the empty string or
-- contains an equals sign.
--
-- @since 4.7.0.0
unsetEnv :: String -> IO ()
unsetEnv key = withFilePath key (throwErrnoIf_ (/= 0) "unsetEnv" . c_unsetenv)
foreign import ccall unsafe "__hsbase_unsetenv" c_unsetenv :: CString -> IO CInt
{-|
'withArgs' @args act@ - while executing action @act@, have 'getArgs'
return @args@.
-}
withArgs :: [String] -> IO a -> IO a
withArgs xs act = do
p <- System.Environment.getProgName
withArgv (p:xs) act
{-|
'withProgName' @name act@ - while executing action @act@,
have 'getProgName' return @name@.
-}
withProgName :: String -> IO a -> IO a
withProgName nm act = do
xs <- System.Environment.getArgs
withArgv (nm:xs) act
-- Worker routine which marshals and replaces an argv vector for
-- the duration of an action.
withArgv :: [String] -> IO a -> IO a
withArgv = withProgArgv
withProgArgv :: [String] -> IO a -> IO a
withProgArgv new_args act = do
pName <- System.Environment.getProgName
existing_args <- System.Environment.getArgs
bracket_ (setProgArgv new_args)
(setProgArgv (pName:existing_args))
act
setProgArgv :: [String] -> IO ()
setProgArgv argv = do
enc <- getFileSystemEncoding
GHC.withCStringsLen enc argv $ \len css ->
c_setProgArgv (fromIntegral len) css
-- setProgArgv copies the arguments
foreign import ccall unsafe "setProgArgv"
c_setProgArgv :: CInt -> Ptr CString -> IO ()
-- |'getEnvironment' retrieves the entire environment as a
-- list of @(key,value)@ pairs.
--
-- If an environment entry does not contain an @\'=\'@ character,
-- the @key@ is the whole entry and the @value@ is the empty string.
getEnvironment :: IO [(String, String)]
getEnvironment = do
pBlock <- getEnvBlock
if pBlock == nullPtr then return []
else do
enc <- getFileSystemEncoding
stuff <- peekArray0 nullPtr pBlock >>= mapM (GHC.peekCString enc)
return (map divvy stuff)
foreign import ccall unsafe "__hscore_environ"
getEnvBlock :: IO (Ptr CString)
divvy :: String -> (String, String)
divvy str =
case break (=='=') str of
(xs,[]) -> (xs,[]) -- don't barf (like Posix.getEnvironment)
(name,_:value) -> (name,value)
|
phischu/fragnix
|
builtins/base/System.Environment.hs
|
bsd-3-clause
| 8,505 | 0 | 15 | 2,074 | 1,640 | 871 | 769 | 130 | 3 |
{-- snippet main --}
import GHC.Conc (numCapabilities)
import System.Environment (getArgs)
main = do
args <- getArgs
putStrLn $ "command line arguments: " ++ show args
putStrLn $ "number of cores: " ++ show numCapabilities
{-- /snippet main --}
|
binesiyu/ifl
|
examples/ch24/NumCapabilities.hs
|
mit
| 252 | 0 | 8 | 45 | 63 | 32 | 31 | 6 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Database.Persist.Sql.Orphan.PersistStore
( withRawQuery
, BackendKey(..)
, toSqlKey
, fromSqlKey
, getFieldName
, getTableName
, tableDBName
, fieldDBName
) where
import Control.Exception (throwIO)
import Control.Monad.IO.Class
import Control.Monad.Trans.Reader (ReaderT, ask)
import Data.Acquire (with)
import qualified Data.Aeson as A
import Data.ByteString.Char8 (readInteger)
import Data.Conduit (ConduitM, runConduit, (.|))
import qualified Data.Conduit.List as CL
import qualified Data.Foldable as Foldable
import Data.Function (on)
import Data.Int (Int64)
import Data.List (find, nubBy)
import qualified Data.Map as Map
import Data.Maybe (isJust)
import Data.Text (Text, unpack)
import qualified Data.Text as T
import Data.Void (Void)
import GHC.Generics (Generic)
import Web.HttpApiData (FromHttpApiData, ToHttpApiData)
import Web.PathPieces (PathPiece)
import Database.Persist
import Database.Persist.Class ()
import Database.Persist.Sql.Class (PersistFieldSql)
import Database.Persist.Sql.Raw
import Database.Persist.Sql.Types
import Database.Persist.Sql.Types.Internal
import Database.Persist.Sql.Util
( commaSeparated
, dbIdColumns
, keyAndEntityColumnNames
, mkInsertValues
, mkUpdateText
, parseEntityValues
, updatePersistValue
)
withRawQuery :: MonadIO m
=> Text
-> [PersistValue]
-> ConduitM [PersistValue] Void IO a
-> ReaderT SqlBackend m a
withRawQuery sql vals sink = do
srcRes <- rawQueryRes sql vals
liftIO $ with srcRes (\src -> runConduit $ src .| sink)
toSqlKey :: ToBackendKey SqlBackend record => Int64 -> Key record
toSqlKey = fromBackendKey . SqlBackendKey
fromSqlKey :: ToBackendKey SqlBackend record => Key record -> Int64
fromSqlKey = unSqlBackendKey . toBackendKey
whereStmtForKey :: PersistEntity record => SqlBackend -> Key record -> Text
whereStmtForKey conn k =
T.intercalate " AND "
$ Foldable.toList
$ fmap (<> "=? ")
$ dbIdColumns conn entDef
where
entDef = entityDef $ dummyFromKey k
whereStmtForKeys :: PersistEntity record => SqlBackend -> [Key record] -> Text
whereStmtForKeys conn ks = T.intercalate " OR " $ whereStmtForKey conn `fmap` ks
-- | get the SQL string for the table that a PeristEntity represents
-- Useful for raw SQL queries
--
-- Your backend may provide a more convenient tableName function
-- which does not operate in a Monad
getTableName :: forall record m backend.
( PersistEntity record
, BackendCompatible SqlBackend backend
, Monad m
) => record -> ReaderT backend m Text
getTableName rec = withCompatibleBackend $ do
conn <- ask
return $ connEscapeTableName conn (entityDef $ Just rec)
-- | useful for a backend to implement tableName by adding escaping
tableDBName :: (PersistEntity record) => record -> EntityNameDB
tableDBName rec = getEntityDBName $ entityDef (Just rec)
-- | get the SQL string for the field that an EntityField represents
-- Useful for raw SQL queries
--
-- Your backend may provide a more convenient fieldName function
-- which does not operate in a Monad
getFieldName :: forall record typ m backend.
( PersistEntity record
, PersistEntityBackend record ~ SqlBackend
, BackendCompatible SqlBackend backend
, Monad m
)
=> EntityField record typ -> ReaderT backend m Text
getFieldName rec = withCompatibleBackend $ do
conn <- ask
return $ connEscapeFieldName conn (fieldDB $ persistFieldDef rec)
-- | useful for a backend to implement fieldName by adding escaping
fieldDBName :: forall record typ. (PersistEntity record) => EntityField record typ -> FieldNameDB
fieldDBName = fieldDB . persistFieldDef
instance PersistCore SqlBackend where
newtype BackendKey SqlBackend = SqlBackendKey { unSqlBackendKey :: Int64 }
deriving stock (Show, Read, Eq, Ord, Generic)
deriving newtype (Num, Integral, PersistField, PersistFieldSql, PathPiece, ToHttpApiData, FromHttpApiData, Real, Enum, Bounded, A.ToJSON, A.FromJSON)
instance PersistCore SqlReadBackend where
newtype BackendKey SqlReadBackend = SqlReadBackendKey { unSqlReadBackendKey :: Int64 }
deriving stock (Show, Read, Eq, Ord, Generic)
deriving newtype (Num, Integral, PersistField, PersistFieldSql, PathPiece, ToHttpApiData, FromHttpApiData, Real, Enum, Bounded, A.ToJSON, A.FromJSON)
instance PersistCore SqlWriteBackend where
newtype BackendKey SqlWriteBackend = SqlWriteBackendKey { unSqlWriteBackendKey :: Int64 }
deriving stock (Show, Read, Eq, Ord, Generic)
deriving newtype (Num, Integral, PersistField, PersistFieldSql, PathPiece, ToHttpApiData, FromHttpApiData, Real, Enum, Bounded, A.ToJSON, A.FromJSON)
instance BackendCompatible SqlBackend SqlBackend where
projectBackend = id
instance BackendCompatible SqlBackend SqlReadBackend where
projectBackend = unSqlReadBackend
instance BackendCompatible SqlBackend SqlWriteBackend where
projectBackend = unSqlWriteBackend
instance PersistStoreWrite SqlBackend where
update _ [] = return ()
update k upds = do
conn <- ask
let wher = whereStmtForKey conn k
let sql = T.concat
[ "UPDATE "
, connEscapeTableName conn (entityDef $ Just $ recordTypeFromKey k)
, " SET "
, T.intercalate "," $ map (mkUpdateText conn) upds
, " WHERE "
, wher
]
rawExecute sql $
map updatePersistValue upds `mappend` keyToValues k
insert val = do
conn <- ask
let esql = connInsertSql conn t vals
key <-
case esql of
ISRSingle sql -> withRawQuery sql vals $ do
x <- CL.head
case x of
Just [PersistInt64 i] -> case keyFromValues [PersistInt64 i] of
Left err -> error $ "SQL insert: keyFromValues: PersistInt64 " `mappend` show i `mappend` " " `mappend` unpack err
Right k -> return k
Nothing -> error $ "SQL insert did not return a result giving the generated ID"
Just vals' -> case keyFromValues vals' of
Left e -> error $ "Invalid result from a SQL insert, got: " ++ show vals' ++ ". Error was: " ++ unpack e
Right k -> return k
ISRInsertGet sql1 sql2 -> do
rawExecute sql1 vals
withRawQuery sql2 [] $ do
mm <- CL.head
let m = maybe
(Left $ "No results from ISRInsertGet: " `mappend` tshow (sql1, sql2))
Right mm
-- TODO: figure out something better for MySQL
let convert x =
case x of
[PersistByteString i] -> case readInteger i of -- mssql
Just (ret,"") -> [PersistInt64 $ fromIntegral ret]
_ -> x
_ -> x
-- Yes, it's just <|>. Older bases don't have the
-- instance for Either.
onLeft Left{} x = x
onLeft x _ = x
case m >>= (\x -> keyFromValues x `onLeft` keyFromValues (convert x)) of
Right k -> return k
Left err -> throw $ "ISRInsertGet: keyFromValues failed: " `mappend` err
ISRManyKeys sql fs -> do
rawExecute sql vals
case entityPrimary t of
Nothing ->
error $ "ISRManyKeys is used when Primary is defined " ++ show sql
Just pdef ->
let pks = Foldable.toList $ fmap fieldHaskell $ compositeFields pdef
keyvals = map snd $ filter (\(a, _) -> let ret=isJust (find (== a) pks) in ret) $ zip (map fieldHaskell $ getEntityFields t) fs
in case keyFromValues keyvals of
Right k -> return k
Left e -> error $ "ISRManyKeys: unexpected keyvals result: " `mappend` unpack e
return key
where
tshow :: Show a => a -> Text
tshow = T.pack . show
throw = liftIO . throwIO . userError . T.unpack
t = entityDef $ Just val
vals = mkInsertValues val
insertMany [] = return []
insertMany vals = do
conn <- ask
case connInsertManySql conn of
Nothing -> mapM insert vals
Just insertManyFn ->
case insertManyFn ent valss of
ISRSingle sql -> rawSql sql (concat valss)
_ -> error "ISRSingle is expected from the connInsertManySql function"
where
ent = entityDef vals
valss = map mkInsertValues vals
insertMany_ vals0 = runChunked (length $ getEntityFields t) insertMany_' vals0
where
t = entityDef vals0
insertMany_' vals = do
conn <- ask
let valss = map mkInsertValues vals
let sql = T.concat
[ "INSERT INTO "
, connEscapeTableName conn t
, "("
, T.intercalate "," $ map (connEscapeFieldName conn . fieldDB) $ getEntityFields t
, ") VALUES ("
, T.intercalate "),(" $ replicate (length valss) $ T.intercalate "," $ map (const "?") (getEntityFields t)
, ")"
]
rawExecute sql (concat valss)
replace k val = do
conn <- ask
let t = entityDef $ Just val
let wher = whereStmtForKey conn k
let sql = T.concat
[ "UPDATE "
, connEscapeTableName conn t
, " SET "
, T.intercalate "," (map (go conn . fieldDB) $ getEntityFields t)
, " WHERE "
, wher
]
vals = mkInsertValues val `mappend` keyToValues k
rawExecute sql vals
where
go conn x = connEscapeFieldName conn x `T.append` "=?"
insertKey k v = insrepHelper "INSERT" [Entity k v]
insertEntityMany es' = do
conn <- ask
let entDef = entityDef $ map entityVal es'
let columnNames = keyAndEntityColumnNames entDef conn
runChunked (length columnNames) go es'
where
go = insrepHelper "INSERT"
repsert key value = do
mExisting <- get key
case mExisting of
Nothing -> insertKey key value
Just _ -> replace key value
repsertMany [] = return ()
repsertMany krsDups = do
conn <- ask
let krs = nubBy ((==) `on` fst) (reverse krsDups)
let rs = snd `fmap` krs
let ent = entityDef rs
let nr = length krs
let toVals (k,r)
= case entityPrimary ent of
Nothing -> keyToValues k <> (mkInsertValues r)
Just _ -> mkInsertValues r
case connRepsertManySql conn of
(Just mkSql) -> rawExecute (mkSql ent nr) (concatMap toVals krs)
Nothing -> mapM_ (uncurry repsert) krs
delete k = do
conn <- ask
rawExecute (sql conn) (keyToValues k)
where
wher conn = whereStmtForKey conn k
sql conn = T.concat
[ "DELETE FROM "
, connEscapeTableName conn (entityDef $ Just $ recordTypeFromKey k)
, " WHERE "
, wher conn
]
instance PersistStoreWrite SqlWriteBackend where
insert v = withBaseBackend $ insert v
insertMany vs = withBaseBackend $ insertMany vs
insertMany_ vs = withBaseBackend $ insertMany_ vs
insertEntityMany vs = withBaseBackend $ insertEntityMany vs
insertKey k v = withBaseBackend $ insertKey k v
repsert k v = withBaseBackend $ repsert k v
replace k v = withBaseBackend $ replace k v
delete k = withBaseBackend $ delete k
update k upds = withBaseBackend $ update k upds
repsertMany krs = withBaseBackend $ repsertMany krs
instance PersistStoreRead SqlBackend where
get k = do
mEs <- getMany [k]
return $ Map.lookup k mEs
-- inspired by Database.Persist.Sql.Orphan.PersistQuery.selectSourceRes
getMany [] = return Map.empty
getMany ks@(k:_)= do
conn <- ask
let t = entityDef . dummyFromKey $ k
let cols = commaSeparated . Foldable.toList . keyAndEntityColumnNames t
let wher = whereStmtForKeys conn ks
let sql = T.concat
[ "SELECT "
, cols conn
, " FROM "
, connEscapeTableName conn t
, " WHERE "
, wher
]
let parse vals
= case parseEntityValues t vals of
Left s -> liftIO $ throwIO $
PersistMarshalError ("getBy: " <> s)
Right row -> return row
withRawQuery sql (Foldable.foldMap keyToValues ks) $ do
es <- CL.mapM parse .| CL.consume
return $ Map.fromList $ fmap (\e -> (entityKey e, entityVal e)) es
instance PersistStoreRead SqlReadBackend where
get k = withBaseBackend $ get k
getMany ks = withBaseBackend $ getMany ks
instance PersistStoreRead SqlWriteBackend where
get k = withBaseBackend $ get k
getMany ks = withBaseBackend $ getMany ks
dummyFromKey :: Key record -> Maybe record
dummyFromKey = Just . recordTypeFromKey
recordTypeFromKey :: Key record -> record
recordTypeFromKey _ = error "dummyFromKey"
insrepHelper :: (MonadIO m, PersistEntity val)
=> Text
-> [Entity val]
-> ReaderT SqlBackend m ()
insrepHelper _ [] = return ()
insrepHelper command es = do
conn <- ask
let columnNames = Foldable.toList $ keyAndEntityColumnNames entDef conn
rawExecute (sql conn columnNames) vals
where
entDef = entityDef $ map entityVal es
sql conn columnNames = T.concat
[ command
, " INTO "
, connEscapeTableName conn entDef
, "("
, T.intercalate "," columnNames
, ") VALUES ("
, T.intercalate "),(" $ replicate (length es) $ T.intercalate "," $ fmap (const "?") columnNames
, ")"
]
vals = Foldable.foldMap entityValues es
runChunked
:: (Monad m)
=> Int
-> ([a] -> ReaderT SqlBackend m ())
-> [a]
-> ReaderT SqlBackend m ()
runChunked _ _ [] = return ()
runChunked width m xs = do
conn <- ask
case connMaxParams conn of
Nothing -> m xs
Just maxParams -> let chunkSize = maxParams `div` width in
mapM_ m (chunksOf chunkSize xs)
-- Implement this here to avoid depending on the split package
chunksOf :: Int -> [a] -> [[a]]
chunksOf _ [] = []
chunksOf size xs = let (chunk, rest) = splitAt size xs in chunk : chunksOf size rest
|
yesodweb/persistent
|
persistent/Database/Persist/Sql/Orphan/PersistStore.hs
|
mit
| 15,614 | 0 | 33 | 5,243 | 4,180 | 2,102 | 2,078 | -1 | -1 |
-- C->Haskell Compiler: Marshalling library
--
-- Copyright (c) [1999...2005] Manuel M T Chakravarty
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- 1. Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- 3. The name of the author may not be used to endorse or promote products
-- derived from this software without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
-- IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
-- OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
-- NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
-- TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-- PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
-- LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-- NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-- SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
--- Description ---------------------------------------------------------------
--
-- Language: Haskell 98
--
-- This module provides the marshaling routines for Haskell files produced by
-- C->Haskell for binding to C library interfaces. It exports all of the
-- low-level FFI (language-independent plus the C-specific parts) together
-- with the C->HS-specific higher-level marshalling routines.
--
module C2HS (
-- * Re-export the language-independent component of the FFI
module Foreign,
-- * Re-export the C language component of the FFI
module Foreign.C,
-- * Composite marshalling functions
withCStringLenIntConv, peekCStringLenIntConv, withIntConv, withFloatConv,
peekIntConv, peekFloatConv, withBool, peekBool, withEnum, peekEnum,
-- * Conditional results using 'Maybe'
nothingIf, nothingIfNull,
-- * Bit masks
combineBitMasks, containsBitMask, extractBitMasks,
-- * Conversion between C and Haskell types
cIntConv, cFloatConv, cToBool, cFromBool, cToEnum, cFromEnum
) where
import Foreign
import Foreign.C
import Monad (liftM)
-- Composite marshalling functions
-- -------------------------------
-- Strings with explicit length
--
withCStringLenIntConv :: Num n => String -> ((CString, n) -> IO a) -> IO a
withCStringLenIntConv s f = withCStringLen s $ \(p, n) -> f (p, fromIntegral n)
peekCStringLenIntConv :: Integral n => (CString, n) -> IO String
peekCStringLenIntConv (s, n) = peekCStringLen (s, fromIntegral n)
-- Marshalling of numerals
--
withIntConv :: (Storable b, Integral a, Integral b)
=> a -> (Ptr b -> IO c) -> IO c
withIntConv = with . fromIntegral
withFloatConv :: (Storable b, RealFloat a, RealFloat b)
=> a -> (Ptr b -> IO c) -> IO c
withFloatConv = with . realToFrac
peekIntConv :: (Storable a, Integral a, Integral b)
=> Ptr a -> IO b
peekIntConv = liftM fromIntegral . peek
peekFloatConv :: (Storable a, RealFloat a, RealFloat b)
=> Ptr a -> IO b
peekFloatConv = liftM realToFrac . peek
-- Everything else below is deprecated.
-- These functions are not used by code generated by c2hs.
{-# DEPRECATED withBool "The C2HS module will soon stop providing unnecessary\nutility functions. Please use standard FFI library functions instead." #-}
{-# DEPRECATED peekBool "The C2HS module will soon stop providing unnecessary\nutility functions. Please use standard FFI library functions instead." #-}
{-# DEPRECATED withEnum "The C2HS module will soon stop providing unnecessary\nutility functions. Please use standard FFI library functions instead." #-}
{-# DEPRECATED peekEnum "The C2HS module will soon stop providing unnecessary\nutility functions. Please use standard FFI library functions instead." #-}
{-# DEPRECATED nothingIf "The C2HS module will soon stop providing unnecessary\nutility functions. Please use standard FFI library functions instead." #-}
{-# DEPRECATED nothingIfNull "The C2HS module will soon stop providing unnecessary\nutility functions. Please use standard FFI library functions instead." #-}
{-# DEPRECATED combineBitMasks "The C2HS module will soon stop providing unnecessary\nutility functions. Please use standard FFI library functions instead." #-}
{-# DEPRECATED containsBitMask "The C2HS module will soon stop providing unnecessary\nutility functions. Please use standard FFI library functions instead." #-}
{-# DEPRECATED extractBitMasks "The C2HS module will soon stop providing unnecessary\nutility functions. Please use standard FFI library functions instead." #-}
{-# DEPRECATED cIntConv "The C2HS module will soon stop providing unnecessary\nutility functions. Please use standard FFI library functions instead." #-}
{-# DEPRECATED cFloatConv "The C2HS module will soon stop providing unnecessary\nutility functions. Please use standard FFI library functions instead." #-}
{-# DEPRECATED cFromBool "The C2HS module will soon stop providing unnecessary\nutility functions. Please use standard FFI library functions instead." #-}
{-# DEPRECATED cToBool "The C2HS module will soon stop providing unnecessary\nutility functions. Please use standard FFI library functions instead." #-}
{-# DEPRECATED cToEnum "The C2HS module will soon stop providing unnecessary\nutility functions. Please use standard FFI library functions instead." #-}
{-# DEPRECATED cFromEnum "The C2HS module will soon stop providing unnecessary\nutility functions. Please use standard FFI library functions instead." #-}
-- Passing Booleans by reference
--
withBool :: (Integral a, Storable a) => Bool -> (Ptr a -> IO b) -> IO b
withBool = with . fromBool
peekBool :: (Integral a, Storable a) => Ptr a -> IO Bool
peekBool = liftM toBool . peek
-- Passing enums by reference
--
withEnum :: (Enum a, Integral b, Storable b) => a -> (Ptr b -> IO c) -> IO c
withEnum = with . cFromEnum
peekEnum :: (Enum a, Integral b, Storable b) => Ptr b -> IO a
peekEnum = liftM cToEnum . peek
-- Storing of 'Maybe' values
-- -------------------------
--TODO: kill off this orphan instance!
instance Storable a => Storable (Maybe a) where
sizeOf _ = sizeOf (undefined :: Ptr ())
alignment _ = alignment (undefined :: Ptr ())
peek p = do
ptr <- peek (castPtr p)
if ptr == nullPtr
then return Nothing
else liftM Just $ peek ptr
poke p v = do
ptr <- case v of
Nothing -> return nullPtr
Just v' -> new v'
poke (castPtr p) ptr
-- Conditional results using 'Maybe'
-- ---------------------------------
-- Wrap the result into a 'Maybe' type.
--
-- * the predicate determines when the result is considered to be non-existing,
-- ie, it is represented by `Nothing'
--
-- * the second argument allows to map a result wrapped into `Just' to some
-- other domain
--
nothingIf :: (a -> Bool) -> (a -> b) -> a -> Maybe b
nothingIf p f x = if p x then Nothing else Just $ f x
-- |Instance for special casing null pointers.
--
nothingIfNull :: (Ptr a -> b) -> Ptr a -> Maybe b
nothingIfNull = nothingIf (== nullPtr)
-- Support for bit masks
-- ---------------------
-- Given a list of enumeration values that represent bit masks, combine these
-- masks using bitwise disjunction.
--
combineBitMasks :: (Enum a, Bits b) => [a] -> b
combineBitMasks = foldl (.|.) 0 . map (fromIntegral . fromEnum)
-- Tests whether the given bit mask is contained in the given bit pattern
-- (i.e., all bits set in the mask are also set in the pattern).
--
containsBitMask :: (Bits a, Enum b) => a -> b -> Bool
bits `containsBitMask` bm = let bm' = fromIntegral . fromEnum $ bm
in
bm' .&. bits == bm'
-- |Given a bit pattern, yield all bit masks that it contains.
--
-- * This does *not* attempt to compute a minimal set of bit masks that when
-- combined yield the bit pattern, instead all contained bit masks are
-- produced.
--
extractBitMasks :: (Bits a, Enum b, Bounded b) => a -> [b]
extractBitMasks bits =
[bm | bm <- [minBound..maxBound], bits `containsBitMask` bm]
-- Conversion routines
-- -------------------
-- |Integral conversion
--
cIntConv :: (Integral a, Integral b) => a -> b
cIntConv = fromIntegral
-- |Floating conversion
--
cFloatConv :: (RealFloat a, RealFloat b) => a -> b
cFloatConv = realToFrac
-- |Obtain C value from Haskell 'Bool'.
--
cFromBool :: Num a => Bool -> a
cFromBool = fromBool
-- |Obtain Haskell 'Bool' from C value.
--
cToBool :: Num a => a -> Bool
cToBool = toBool
-- |Convert a C enumeration to Haskell.
--
cToEnum :: (Integral i, Enum e) => i -> e
cToEnum = toEnum . fromIntegral
-- |Convert a Haskell enumeration to C.
--
cFromEnum :: (Enum e, Integral i) => e -> i
cFromEnum = fromIntegral . fromEnum
|
jrockway/c2hs
|
C2HS.hs
|
gpl-2.0
| 9,434 | 0 | 12 | 1,932 | 1,384 | 778 | 606 | 87 | 2 |
{-#LANGUAGE ScopedTypeVariables, InstanceSigs, ExplicitForAll, TypeSynonymInstances, UndecidableInstances, FlexibleInstances, MultiParamTypeClasses, GADTs, DataKinds, PolyKinds, TypeOperators, ViewPatterns, PatternSynonyms, RankNTypes, FlexibleContexts, AutoDeriveTypeable #-}
module Carnap.Core.Examples.ACUI (
V, Set, VLang, Var, acuiParser,
pattern VEmpty, pattern VUnion, pattern VSomeSet,
VLangLabel(..),
parseTerm, evalTerm,
pattern ACUISV
) where
import Carnap.Core.Data.Types
import Carnap.Core.Data.Classes
import Carnap.Core.Data.Optics
import Carnap.Core.Unification.Unification
import Carnap.Core.Unification.ACUI
import Carnap.Core.Unification.FirstOrder
--import Carnap.Core.Unification.Combination
import Carnap.Core.Util
import qualified Data.Set as S
import Data.Type.Equality
import Data.Typeable
import Text.Parsec hiding (Empty)
import Text.Parsec.Expr
import Control.Monad.State
--I really liked this example so I'm using it for testing
newtype VFix f = VFix (f (VFix f))
deriving(Typeable)
type V = VFix S.Set
ev :: V
ev = VFix S.empty
sv :: V -> V
sv x = VFix (S.singleton x)
uv :: V -> V -> V
uv (VFix x) (VFix y) = VFix $ S.union x y
instance Show V where
show (VFix x) = show x
instance Eq V where
(VFix x) == (VFix y) = x == y
instance Ord V where
(VFix x) <= (VFix y) = x <= y
--I don't want to handle constants just yet
--So this language has no singeltons but that comes next
data Set a where
Empty :: Set (Term V)
Singleton :: Set (Term V -> Term V)
Union :: Set (Term V -> Term V -> Term V)
instance UniformlyEq Set where
Empty =* Empty = True
Singleton =* Singleton = True
Union =* Union = True
_ =* _ = False
instance FirstOrderLex Set
instance Schematizable Set where
schematize Singleton (x:_) = "{" ++ x ++ "}"
schematize Empty _ = "{}"
schematize Union (x:y:_) = x ++ " ∪ " ++ y
instance Evaluable Set where
eval Empty = Term ev
eval Union = \(Term t) (Term t') -> Term (uv t t')
eval Singleton = \(Term t) -> Term (sv t)
data Var lang a where
SomeSet :: String -> Var lang (Term V)
instance UniformlyEq (Var lang) where
(SomeSet x) =* (SomeSet y) = x == y
instance FirstOrderLex (Var lang) where
isVarLex (SomeSet _) = True
instance Schematizable (Var lang) where
schematize (SomeSet s) _ = s
instance Evaluable (Var lang) where
eval _ = error "you are not allowed to do that silly"
data Extra a where
ConstUnFunc :: String -> Extra (Term V -> Term V)
ConstBinFunc :: String -> Extra (Term V -> Term V -> Term V)
instance UniformlyEq Extra where
(ConstUnFunc s) =* (ConstUnFunc s') = s == s'
(ConstBinFunc s) =* (ConstBinFunc s') = s == s'
_ =* _ = False
instance Schematizable Extra where
schematize (ConstUnFunc s) (x:_) = s ++ "(" ++ x ++ ")"
schematize (ConstBinFunc s) (x:y:_) = s ++ "(" ++ x ++ "," ++ y ++ ")"
instance Evaluable Extra where
eval _ = error "don't do this, I too lazy to implement this"
instance FirstOrderLex Extra
type VLex = (Function Set :|: Var :|: SubstitutionalVariable :|: Function Extra :|: EndLang)
type VLang = FixLang VLex
type VTerm = VLang (Term V)
pattern VEmpty = Fx1 (Function Empty AZero)
pattern VSomeSet s = Fx2 (SomeSet s)
pattern VSingelton x = Fx1 (Function Singleton AOne) :!$: x
pattern VUnion x y = Fx1 (Function Union ATwo) :!$: x :!$: y
pattern SV n = Fx3 (SubVar n)
pattern ACUISV n = SV n
pattern VUnFunc s x = Fx4 (Function (ConstUnFunc s) AOne) :!$: x
pattern VBinFunc s x y = Fx4 (Function (ConstBinFunc s) ATwo) :!$: x :!$: y
instance PrismSubstitutionalVariable VLex
instance BoundVars VLex where
subBoundVar = undefined
instance CopulaSchema VLang where
appSchema x y e = schematize x (show y : e)
lamSchema = error "how did you even do this?"
liftSchema = error "should not print a lifted value"
-- instance Monoid (VLang (Term V)) where
-- mempty = VEmpty
-- mappend = VUnion
instance Eq (VLang a) where
(==) = (=*)
instance ACUI VLang where
unfoldTerm (VUnion x y) = unfoldTerm x ++ unfoldTerm y
unfoldTerm VEmpty = []
unfoldTerm leaf = [leaf]
isId VEmpty = True
isId _ = False
isACUI (VUnFunc _ _) = False
isACUI (VBinFunc _ _ _) = False
isACUI _ = True
getId (Proxy :: Proxy a) = case eqT :: Maybe (a :~: (Term V)) of
Just Refl -> VEmpty
_ -> error "you have to use the right type"
acuiOp a b@VEmpty = a
acuiOp a@VEmpty b = b
acuiOp a@(VUnion _ _) b = VUnion a b
acuiOp a@(VSomeSet _) b = VUnion a b
acuiOp a@(VSingelton _) b = VUnion a b
acuiOp a@(VUnFunc _ _) b = VUnion a b
acuiOp a@(VBinFunc _ _ _) b = VUnion a b
acuiOp b a@(VUnion _ _) = VUnion b a
acuiOp b a@(VSomeSet _) = VUnion b a
acuiOp b a@(VSingelton _) = VUnion b a
acuiOp b a@(VUnFunc _ _) = VUnion b a
acuiOp b a@(VBinFunc _ _ _) = VUnion b a
acuiOp ((SV n) :: VLang a) b = case eqT :: Maybe (a :~: (Term V)) of
Just Refl -> VUnion (SV n) b
_ -> error "you have to use the right type"
--This is just a place holder until we define things compositionally
data VLangLabel = VExtra
| VSet
deriving(Eq, Ord, Show)
-- instance Combineable VLang VLangLabel where
-- getLabel VEmpty = VSet
-- getLabel (VSingelton _) = VSet
-- getLabel (VUnion _ _) = VSet
-- getLabel _ = VExtra
-- getAlgo VSet = acuiUnifySys
-- getAlgo VExtra = foUnifySys
-- replaceChild (VSingelton _) pig _ = VSingelton $ unEveryPig pig
-- replaceChild (VUnion _ x) pig 0 = VUnion (unEveryPig pig) x
-- replaceChild (VUnion x _) pig 1 = VUnion x (unEveryPig pig)
-- replaceChild (VUnFunc s _) pig _ = VUnFunc s (unEveryPig pig)
-- replaceChild (VBinFunc s _ x) pig 0 = VBinFunc s (unEveryPig pig) x
-- replaceChild (VBinFunc s x _) pig 1 = VBinFunc s x (unEveryPig pig)
parseUnion :: (Monad m) => ParsecT String u m (VTerm -> VTerm -> VTerm)
parseUnion = do spaces
string "u"
spaces
return VUnion
emptyParser :: (Monad m) => ParsecT String u m (VTerm)
emptyParser = do string "{}"
return VEmpty
singletonParser recur = do char '{'
inner <- recur
char '}'
return $ VSingelton inner
unfuncParser :: (Monad m) => ParsecT String u m (VTerm)
unfuncParser = do c <- oneOf "fgjkl"
char '('
arg <- acuiParser
char ')'
return $ VUnFunc [c] arg
binfuncParser :: (Monad m) => ParsecT String u m (VTerm)
binfuncParser = do c <- oneOf "rhtqs"
char '('
arg1 <- acuiParser
char ','
arg2 <- acuiParser
char ')'
return $ VBinFunc [c] arg1 arg2
somesetParser :: (Monad m) => ParsecT String u m (VTerm)
somesetParser = do c <- oneOf "abcdexyz"
return $ VSomeSet [c]
subvarParser :: (Monad m) => ParsecT String u m (VTerm)
subvarParser = do n <- read <$> many1 digit
return $ SV n
acuiParser :: (Monad m) => ParsecT String u m (VTerm)
acuiParser = buildExpressionParser [[Infix (try parseUnion) AssocLeft]] subParser
where subParser = try emptyParser <|>
try unfuncParser <|>
try binfuncParser <|>
try (singletonParser acuiParser) <|>
subvarParser <|>
somesetParser
parseTerm s = let (Right term) = parse acuiParser "" s in term
evalTerm m = evalState m (0 :: Int)
instance Schematizable f => Show (AnyPig f) where
show (AnyPig t) = schematize t []
|
opentower/carnap
|
Carnap/src/Carnap/Core/Examples/ACUI.hs
|
gpl-3.0
| 8,137 | 0 | 12 | 2,491 | 2,592 | 1,314 | 1,278 | 170 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.SWF.PollForDecisionTask
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Used by deciders to get a DecisionTask from the specified decision
-- 'taskList'. A decision task may be returned for any open workflow
-- execution that is using the specified task list. The task includes a
-- paginated view of the history of the workflow execution. The decider
-- should use the workflow type and the history to determine how to
-- properly handle the task.
--
-- This action initiates a long poll, where the service holds the HTTP
-- connection open and responds as soon a task becomes available. If no
-- decision task is available in the specified task list before the timeout
-- of 60 seconds expires, an empty result is returned. An empty result, in
-- this context, means that a DecisionTask is returned, but that the value
-- of 'taskToken' is an empty string.
--
-- Deciders should set their client-side socket timeout to at least 70
-- seconds (10 seconds higher than the timeout).
--
-- Because the number of workflow history events for a single workflow
-- execution might be very large, the result returned might be split up
-- across a number of pages. To retrieve subsequent pages, make additional
-- calls to 'PollForDecisionTask' using the 'nextPageToken' returned by the
-- initial call. Note that you do __not__ call
-- 'GetWorkflowExecutionHistory' with this 'nextPageToken'. Instead, call
-- 'PollForDecisionTask' again.
--
-- __Access Control__
--
-- You can use IAM policies to control this action\'s access to Amazon SWF
-- resources as follows:
--
-- - Use a 'Resource' element with the domain name to limit the action to
-- only specified domains.
-- - Use an 'Action' element to allow or deny permission to call this
-- action.
-- - Constrain the 'taskList.name' parameter by using a __Condition__
-- element with the 'swf:taskList.name' key to allow the action to
-- access only certain task lists.
--
-- If the caller does not have sufficient permissions to invoke the action,
-- or the parameter values fall outside the specified constraints, the
-- action fails. The associated event attribute\'s __cause__ parameter will
-- be set to OPERATION_NOT_PERMITTED. For details and example IAM policies,
-- see
-- <http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html Using IAM to Manage Access to Amazon SWF Workflows>.
--
-- /See:/ <http://docs.aws.amazon.com/amazonswf/latest/apireference/API_PollForDecisionTask.html AWS API Reference> for PollForDecisionTask.
--
-- This operation returns paginated results.
module Network.AWS.SWF.PollForDecisionTask
(
-- * Creating a Request
pollForDecisionTask
, PollForDecisionTask
-- * Request Lenses
, pfdtNextPageToken
, pfdtReverseOrder
, pfdtMaximumPageSize
, pfdtIdentity
, pfdtDomain
, pfdtTaskList
-- * Destructuring the Response
, pollForDecisionTaskResponse
, PollForDecisionTaskResponse
-- * Response Lenses
, pfdtrsNextPageToken
, pfdtrsPreviousStartedEventId
, pfdtrsResponseStatus
, pfdtrsTaskToken
, pfdtrsStartedEventId
, pfdtrsWorkflowExecution
, pfdtrsWorkflowType
, pfdtrsEvents
) where
import Network.AWS.Pager
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
import Network.AWS.SWF.Types
import Network.AWS.SWF.Types.Product
-- | /See:/ 'pollForDecisionTask' smart constructor.
data PollForDecisionTask = PollForDecisionTask'
{ _pfdtNextPageToken :: !(Maybe Text)
, _pfdtReverseOrder :: !(Maybe Bool)
, _pfdtMaximumPageSize :: !(Maybe Nat)
, _pfdtIdentity :: !(Maybe Text)
, _pfdtDomain :: !Text
, _pfdtTaskList :: !TaskList
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'PollForDecisionTask' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pfdtNextPageToken'
--
-- * 'pfdtReverseOrder'
--
-- * 'pfdtMaximumPageSize'
--
-- * 'pfdtIdentity'
--
-- * 'pfdtDomain'
--
-- * 'pfdtTaskList'
pollForDecisionTask
:: Text -- ^ 'pfdtDomain'
-> TaskList -- ^ 'pfdtTaskList'
-> PollForDecisionTask
pollForDecisionTask pDomain_ pTaskList_ =
PollForDecisionTask'
{ _pfdtNextPageToken = Nothing
, _pfdtReverseOrder = Nothing
, _pfdtMaximumPageSize = Nothing
, _pfdtIdentity = Nothing
, _pfdtDomain = pDomain_
, _pfdtTaskList = pTaskList_
}
-- | If a 'NextPageToken' was returned by a previous call, there are more
-- results available. To retrieve the next page of results, make the call
-- again using the returned token in 'nextPageToken'. Keep all other
-- arguments unchanged.
--
-- The configured 'maximumPageSize' determines how many results can be
-- returned in a single call.
--
-- The 'nextPageToken' returned by this action cannot be used with
-- GetWorkflowExecutionHistory to get the next page. You must call
-- PollForDecisionTask again (with the 'nextPageToken') to retrieve the
-- next page of history records. Calling PollForDecisionTask with a
-- 'nextPageToken' will not return a new decision task.
--
-- .
pfdtNextPageToken :: Lens' PollForDecisionTask (Maybe Text)
pfdtNextPageToken = lens _pfdtNextPageToken (\ s a -> s{_pfdtNextPageToken = a});
-- | When set to 'true', returns the events in reverse order. By default the
-- results are returned in ascending order of the 'eventTimestamp' of the
-- events.
pfdtReverseOrder :: Lens' PollForDecisionTask (Maybe Bool)
pfdtReverseOrder = lens _pfdtReverseOrder (\ s a -> s{_pfdtReverseOrder = a});
-- | The maximum number of results that will be returned per call.
-- 'nextPageToken' can be used to obtain futher pages of results. The
-- default is 1000, which is the maximum allowed page size. You can,
-- however, specify a page size /smaller/ than the maximum.
--
-- This is an upper limit only; the actual number of results returned per
-- call may be fewer than the specified maximum.
pfdtMaximumPageSize :: Lens' PollForDecisionTask (Maybe Natural)
pfdtMaximumPageSize = lens _pfdtMaximumPageSize (\ s a -> s{_pfdtMaximumPageSize = a}) . mapping _Nat;
-- | Identity of the decider making the request, which is recorded in the
-- DecisionTaskStarted event in the workflow history. This enables
-- diagnostic tracing when problems arise. The form of this identity is
-- user defined.
pfdtIdentity :: Lens' PollForDecisionTask (Maybe Text)
pfdtIdentity = lens _pfdtIdentity (\ s a -> s{_pfdtIdentity = a});
-- | The name of the domain containing the task lists to poll.
pfdtDomain :: Lens' PollForDecisionTask Text
pfdtDomain = lens _pfdtDomain (\ s a -> s{_pfdtDomain = a});
-- | Specifies the task list to poll for decision tasks.
--
-- The specified string must not start or end with whitespace. It must not
-- contain a ':' (colon), '\/' (slash), '|' (vertical bar), or any control
-- characters (\\u0000-\\u001f | \\u007f - \\u009f). Also, it must not
-- contain the literal string quotarnquot.
pfdtTaskList :: Lens' PollForDecisionTask TaskList
pfdtTaskList = lens _pfdtTaskList (\ s a -> s{_pfdtTaskList = a});
instance AWSPager PollForDecisionTask where
page rq rs
| stop (rs ^. pfdtrsNextPageToken) = Nothing
| stop (rs ^. pfdtrsEvents) = Nothing
| otherwise =
Just $ rq &
pfdtNextPageToken .~ rs ^. pfdtrsNextPageToken
instance AWSRequest PollForDecisionTask where
type Rs PollForDecisionTask =
PollForDecisionTaskResponse
request = postJSON sWF
response
= receiveJSON
(\ s h x ->
PollForDecisionTaskResponse' <$>
(x .?> "nextPageToken") <*>
(x .?> "previousStartedEventId")
<*> (pure (fromEnum s))
<*> (x .:> "taskToken")
<*> (x .:> "startedEventId")
<*> (x .:> "workflowExecution")
<*> (x .:> "workflowType")
<*> (x .?> "events" .!@ mempty))
instance ToHeaders PollForDecisionTask where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("SimpleWorkflowService.PollForDecisionTask" ::
ByteString),
"Content-Type" =#
("application/x-amz-json-1.0" :: ByteString)])
instance ToJSON PollForDecisionTask where
toJSON PollForDecisionTask'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _pfdtNextPageToken,
("reverseOrder" .=) <$> _pfdtReverseOrder,
("maximumPageSize" .=) <$> _pfdtMaximumPageSize,
("identity" .=) <$> _pfdtIdentity,
Just ("domain" .= _pfdtDomain),
Just ("taskList" .= _pfdtTaskList)])
instance ToPath PollForDecisionTask where
toPath = const "/"
instance ToQuery PollForDecisionTask where
toQuery = const mempty
-- | A structure that represents a decision task. Decision tasks are sent to
-- deciders in order for them to make decisions.
--
-- /See:/ 'pollForDecisionTaskResponse' smart constructor.
data PollForDecisionTaskResponse = PollForDecisionTaskResponse'
{ _pfdtrsNextPageToken :: !(Maybe Text)
, _pfdtrsPreviousStartedEventId :: !(Maybe Integer)
, _pfdtrsResponseStatus :: !Int
, _pfdtrsTaskToken :: !Text
, _pfdtrsStartedEventId :: !Integer
, _pfdtrsWorkflowExecution :: !WorkflowExecution
, _pfdtrsWorkflowType :: !WorkflowType
, _pfdtrsEvents :: ![HistoryEvent]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'PollForDecisionTaskResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pfdtrsNextPageToken'
--
-- * 'pfdtrsPreviousStartedEventId'
--
-- * 'pfdtrsResponseStatus'
--
-- * 'pfdtrsTaskToken'
--
-- * 'pfdtrsStartedEventId'
--
-- * 'pfdtrsWorkflowExecution'
--
-- * 'pfdtrsWorkflowType'
--
-- * 'pfdtrsEvents'
pollForDecisionTaskResponse
:: Int -- ^ 'pfdtrsResponseStatus'
-> Text -- ^ 'pfdtrsTaskToken'
-> Integer -- ^ 'pfdtrsStartedEventId'
-> WorkflowExecution -- ^ 'pfdtrsWorkflowExecution'
-> WorkflowType -- ^ 'pfdtrsWorkflowType'
-> PollForDecisionTaskResponse
pollForDecisionTaskResponse pResponseStatus_ pTaskToken_ pStartedEventId_ pWorkflowExecution_ pWorkflowType_ =
PollForDecisionTaskResponse'
{ _pfdtrsNextPageToken = Nothing
, _pfdtrsPreviousStartedEventId = Nothing
, _pfdtrsResponseStatus = pResponseStatus_
, _pfdtrsTaskToken = pTaskToken_
, _pfdtrsStartedEventId = pStartedEventId_
, _pfdtrsWorkflowExecution = pWorkflowExecution_
, _pfdtrsWorkflowType = pWorkflowType_
, _pfdtrsEvents = mempty
}
-- | If a 'NextPageToken' was returned by a previous call, there are more
-- results available. To retrieve the next page of results, make the call
-- again using the returned token in 'nextPageToken'. Keep all other
-- arguments unchanged.
--
-- The configured 'maximumPageSize' determines how many results can be
-- returned in a single call.
pfdtrsNextPageToken :: Lens' PollForDecisionTaskResponse (Maybe Text)
pfdtrsNextPageToken = lens _pfdtrsNextPageToken (\ s a -> s{_pfdtrsNextPageToken = a});
-- | The ID of the DecisionTaskStarted event of the previous decision task of
-- this workflow execution that was processed by the decider. This can be
-- used to determine the events in the history new since the last decision
-- task received by the decider.
pfdtrsPreviousStartedEventId :: Lens' PollForDecisionTaskResponse (Maybe Integer)
pfdtrsPreviousStartedEventId = lens _pfdtrsPreviousStartedEventId (\ s a -> s{_pfdtrsPreviousStartedEventId = a});
-- | The response status code.
pfdtrsResponseStatus :: Lens' PollForDecisionTaskResponse Int
pfdtrsResponseStatus = lens _pfdtrsResponseStatus (\ s a -> s{_pfdtrsResponseStatus = a});
-- | The opaque string used as a handle on the task. This token is used by
-- workers to communicate progress and response information back to the
-- system about the task.
pfdtrsTaskToken :: Lens' PollForDecisionTaskResponse Text
pfdtrsTaskToken = lens _pfdtrsTaskToken (\ s a -> s{_pfdtrsTaskToken = a});
-- | The ID of the 'DecisionTaskStarted' event recorded in the history.
pfdtrsStartedEventId :: Lens' PollForDecisionTaskResponse Integer
pfdtrsStartedEventId = lens _pfdtrsStartedEventId (\ s a -> s{_pfdtrsStartedEventId = a});
-- | The workflow execution for which this decision task was created.
pfdtrsWorkflowExecution :: Lens' PollForDecisionTaskResponse WorkflowExecution
pfdtrsWorkflowExecution = lens _pfdtrsWorkflowExecution (\ s a -> s{_pfdtrsWorkflowExecution = a});
-- | The type of the workflow execution for which this decision task was
-- created.
pfdtrsWorkflowType :: Lens' PollForDecisionTaskResponse WorkflowType
pfdtrsWorkflowType = lens _pfdtrsWorkflowType (\ s a -> s{_pfdtrsWorkflowType = a});
-- | A paginated list of history events of the workflow execution. The
-- decider uses this during the processing of the decision task.
pfdtrsEvents :: Lens' PollForDecisionTaskResponse [HistoryEvent]
pfdtrsEvents = lens _pfdtrsEvents (\ s a -> s{_pfdtrsEvents = a}) . _Coerce;
|
fmapfmapfmap/amazonka
|
amazonka-swf/gen/Network/AWS/SWF/PollForDecisionTask.hs
|
mpl-2.0
| 14,096 | 0 | 18 | 2,899 | 1,633 | 993 | 640 | 183 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.Glacier.ListMultipartUploads
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- This operation lists in-progress multipart uploads for the specified
-- vault. An in-progress multipart upload is a multipart upload that has
-- been initiated by an InitiateMultipartUpload request, but has not yet
-- been completed or aborted. The list returned in the List Multipart
-- Upload response has no guaranteed order.
--
-- The List Multipart Uploads operation supports pagination. By default,
-- this operation returns up to 1,000 multipart uploads in the response.
-- You should always check the response for a 'marker' at which to continue
-- the list; if there are no more items the 'marker' is 'null'. To return a
-- list of multipart uploads that begins at a specific upload, set the
-- 'marker' request parameter to the value you obtained from a previous
-- List Multipart Upload request. You can also limit the number of uploads
-- returned in the response by specifying the 'limit' parameter in the
-- request.
--
-- Note the difference between this operation and listing parts
-- (ListParts). The List Multipart Uploads operation lists all multipart
-- uploads for a vault and does not require a multipart upload ID. The List
-- Parts operation requires a multipart upload ID since parts are
-- associated with a single upload.
--
-- An AWS account has full permission to perform all operations (actions).
-- However, AWS Identity and Access Management (IAM) users don\'t have any
-- permissions by default. You must grant them explicit permission to
-- perform specific actions. For more information, see
-- <http://docs.aws.amazon.com/amazonglacier/latest/dev/using-iam-with-amazon-glacier.html Access Control Using AWS Identity and Access Management (IAM)>.
--
-- For conceptual information and the underlying REST API, go to
-- <http://docs.aws.amazon.com/amazonglacier/latest/dev/working-with-archives.html Working with Archives in Amazon Glacier>
-- and
-- <http://docs.aws.amazon.com/amazonglacier/latest/dev/api-multipart-list-uploads.html List Multipart Uploads>
-- in the /Amazon Glacier Developer Guide/.
--
-- /See:/ <http://docs.aws.amazon.com/amazonglacier/latest/dev/api-ListMultipartUploads.html AWS API Reference> for ListMultipartUploads.
module Network.AWS.Glacier.ListMultipartUploads
(
-- * Creating a Request
listMultipartUploads
, ListMultipartUploads
-- * Request Lenses
, lmuMarker
, lmuLimit
, lmuAccountId
, lmuVaultName
-- * Destructuring the Response
, listMultipartUploadsResponse
, ListMultipartUploadsResponse
-- * Response Lenses
, lmursUploadsList
, lmursMarker
, lmursResponseStatus
) where
import Network.AWS.Glacier.Types
import Network.AWS.Glacier.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | Provides options for retrieving list of in-progress multipart uploads
-- for an Amazon Glacier vault.
--
-- /See:/ 'listMultipartUploads' smart constructor.
data ListMultipartUploads = ListMultipartUploads'
{ _lmuMarker :: !(Maybe Text)
, _lmuLimit :: !(Maybe Text)
, _lmuAccountId :: !Text
, _lmuVaultName :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListMultipartUploads' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lmuMarker'
--
-- * 'lmuLimit'
--
-- * 'lmuAccountId'
--
-- * 'lmuVaultName'
listMultipartUploads
:: Text -- ^ 'lmuAccountId'
-> Text -- ^ 'lmuVaultName'
-> ListMultipartUploads
listMultipartUploads pAccountId_ pVaultName_ =
ListMultipartUploads'
{ _lmuMarker = Nothing
, _lmuLimit = Nothing
, _lmuAccountId = pAccountId_
, _lmuVaultName = pVaultName_
}
-- | An opaque string used for pagination. This value specifies the upload at
-- which the listing of uploads should begin. Get the marker value from a
-- previous List Uploads response. You need only include the marker if you
-- are continuing the pagination of results started in a previous List
-- Uploads request.
lmuMarker :: Lens' ListMultipartUploads (Maybe Text)
lmuMarker = lens _lmuMarker (\ s a -> s{_lmuMarker = a});
-- | Specifies the maximum number of uploads returned in the response body.
-- If this value is not specified, the List Uploads operation returns up to
-- 1,000 uploads.
lmuLimit :: Lens' ListMultipartUploads (Maybe Text)
lmuLimit = lens _lmuLimit (\ s a -> s{_lmuLimit = a});
-- | The 'AccountId' value is the AWS account ID of the account that owns the
-- vault. You can either specify an AWS account ID or optionally a single
-- apos'-'apos (hyphen), in which case Amazon Glacier uses the AWS account
-- ID associated with the credentials used to sign the request. If you use
-- an account ID, do not include any hyphens (apos-apos) in the ID.
lmuAccountId :: Lens' ListMultipartUploads Text
lmuAccountId = lens _lmuAccountId (\ s a -> s{_lmuAccountId = a});
-- | The name of the vault.
lmuVaultName :: Lens' ListMultipartUploads Text
lmuVaultName = lens _lmuVaultName (\ s a -> s{_lmuVaultName = a});
instance AWSRequest ListMultipartUploads where
type Rs ListMultipartUploads =
ListMultipartUploadsResponse
request = get glacier
response
= receiveJSON
(\ s h x ->
ListMultipartUploadsResponse' <$>
(x .?> "UploadsList" .!@ mempty) <*> (x .?> "Marker")
<*> (pure (fromEnum s)))
instance ToHeaders ListMultipartUploads where
toHeaders = const mempty
instance ToPath ListMultipartUploads where
toPath ListMultipartUploads'{..}
= mconcat
["/", toBS _lmuAccountId, "/vaults/",
toBS _lmuVaultName, "/multipart-uploads"]
instance ToQuery ListMultipartUploads where
toQuery ListMultipartUploads'{..}
= mconcat
["marker" =: _lmuMarker, "limit" =: _lmuLimit]
-- | Contains the Amazon Glacier response to your request.
--
-- /See:/ 'listMultipartUploadsResponse' smart constructor.
data ListMultipartUploadsResponse = ListMultipartUploadsResponse'
{ _lmursUploadsList :: !(Maybe [UploadListElement])
, _lmursMarker :: !(Maybe Text)
, _lmursResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'ListMultipartUploadsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'lmursUploadsList'
--
-- * 'lmursMarker'
--
-- * 'lmursResponseStatus'
listMultipartUploadsResponse
:: Int -- ^ 'lmursResponseStatus'
-> ListMultipartUploadsResponse
listMultipartUploadsResponse pResponseStatus_ =
ListMultipartUploadsResponse'
{ _lmursUploadsList = Nothing
, _lmursMarker = Nothing
, _lmursResponseStatus = pResponseStatus_
}
-- | A list of in-progress multipart uploads.
lmursUploadsList :: Lens' ListMultipartUploadsResponse [UploadListElement]
lmursUploadsList = lens _lmursUploadsList (\ s a -> s{_lmursUploadsList = a}) . _Default . _Coerce;
-- | An opaque string that represents where to continue pagination of the
-- results. You use the marker in a new List Multipart Uploads request to
-- obtain more uploads in the list. If there are no more uploads, this
-- value is 'null'.
lmursMarker :: Lens' ListMultipartUploadsResponse (Maybe Text)
lmursMarker = lens _lmursMarker (\ s a -> s{_lmursMarker = a});
-- | The response status code.
lmursResponseStatus :: Lens' ListMultipartUploadsResponse Int
lmursResponseStatus = lens _lmursResponseStatus (\ s a -> s{_lmursResponseStatus = a});
|
fmapfmapfmap/amazonka
|
amazonka-glacier/gen/Network/AWS/Glacier/ListMultipartUploads.hs
|
mpl-2.0
| 8,381 | 0 | 14 | 1,582 | 906 | 556 | 350 | 104 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Carbon.Aggregator.Processor (
BuffersManager
, newBuffersManager
, newBuffersManagerIO
, processAggregate
, processAggregateManyIO
, collectAggregatedIO
) where
import Data.Maybe (mapMaybe, catMaybes)
import Control.Concurrent.STM (STM, atomically)
import qualified STMContainers.Map as STMap
import ListT (toList)
import Focus (StrategyM, Decision(..))
import Carbon
import Carbon.Aggregator
import Carbon.Aggregator.Rules
import Carbon.Aggregator.Buffer
processAggregateManyIO :: [Rule] -> BuffersManager -> [MetricTuple] -> IO [MetricTuple]
processAggregateManyIO rules bm mtuples = do
let (actionss, moutms) = unzip $ map (processAggregate rules bm) mtuples
sequence_ $ concat actionss
return $ catMaybes moutms
processAggregate :: [Rule] -> BuffersManager -> MetricTuple -> ([IO ()], (Maybe MetricTuple))
processAggregate rules bm mtuple@(MetricTuple metric dp) = do
-- TODO: rewrite rules PRE
let matchingRules = mapMaybe (metricRule metric) rules :: [(AggregatedMetricName, Rule)]
let actions = map applyAggregationRule matchingRules
-- TODO: rewrite rules POST
if metric `elem` (fst <$> matchingRules)
then (actions, Nothing)
else (actions, Just mtuple)
where
metricRule :: MetricPath -> Rule -> Maybe (AggregatedMetricName, Rule)
metricRule rpath rule = ruleAggregatedMetricName rule rpath >>= \p -> return (p, rule)
applyAggregationRule (metricName, rule) = processAggregateRule rule bm metricName dp
processAggregateRule :: Rule -> BuffersManager -> AggregatedMetricName -> DataPoint -> IO ()
processAggregateRule rule bm metric dp = do
buf <- getBufferRef bm (metric, rule)
appendDataPoint buf dp
collectAggregatedIO :: Int -> Timestamp -> BuffersManager -> IO [MetricTuple]
collectAggregatedIO maxBuckets now bm = do
pairs <- atomically . toList $ STMap.stream bm
concat <$> mapM process pairs
where
process :: (AggregatedMetricName, MetricBuffers) -> IO [MetricTuple]
process (mpath, mbufs) = do
dps <- computeAggregatedIO maxBuckets now mbufs
return [MetricTuple mpath p | p <- dps]
getBufferRef :: BuffersManager -> (AggregatedMetricName, Rule) -> IO MetricBuffers
getBufferRef bm (mpath, rule) = do
buf <- createBuffer
atomically $ STMap.focus (insertIfNotExistsM buf) mpath bm
where
createBuffer = bufferFor mpath ruleFrequency ruleMethod
ruleFrequency = ruleAggregationFrequency rule
ruleMethod = ruleAggregationMethod rule
insertIfNotExistsM :: (Monad m) => a -> StrategyM m a a
insertIfNotExistsM a = maybe (return (a, Replace a)) (\r -> return (r, Keep))
{-# SPECIALIZE insertIfNotExistsM :: MetricBuffers -> StrategyM STM MetricBuffers MetricBuffers #-}
|
ratsam/hs-carbon-aggregator
|
src/library/Carbon/Aggregator/Processor.hs
|
apache-2.0
| 3,117 | 0 | 13 | 794 | 805 | 429 | 376 | 55 | 2 |
module Util.String (padTo) where
padTo :: String -> Int -> String
padTo s n | length s < n = take n $ s ++ repeat ' '
| otherwise = s
|
tmhedberg/hsenv
|
src/Util/String.hs
|
bsd-3-clause
| 148 | 0 | 9 | 46 | 70 | 34 | 36 | 4 | 1 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
module Vimus.Key (
keyNames
, ExpandKeyError (..)
, expandKeys
, unExpandKeys
, keyEsc
, keyTab
, ctrlA
, ctrlB
, ctrlC
, ctrlD
, ctrlE
, ctrlF
, ctrlG
, ctrlH
, ctrlI
, ctrlJ
, ctrlK
, ctrlL
, ctrlM
, ctrlN
, ctrlO
, ctrlP
, ctrlQ
, ctrlR
, ctrlS
, ctrlT
, ctrlU
, ctrlV
, ctrlW
, ctrlX
, ctrlY
, ctrlZ
) where
import Data.Tuple (swap)
import Data.Char (toLower)
import Data.Maybe (fromMaybe)
import Data.Map (Map)
import qualified Data.Map as Map
import UI.Curses.Key
keyEsc = '\ESC'
keyTab = '\t'
ctrlA = '\SOH'
ctrlB = '\STX'
ctrlC = '\ETX'
ctrlD = '\EOT'
ctrlE = '\ENQ'
ctrlF = '\ACK'
ctrlG = '\BEL'
ctrlH = '\BS'
ctrlI = '\HT'
ctrlJ = '\LF'
ctrlK = '\VT'
ctrlL = '\FF'
ctrlM = '\CR'
ctrlN = '\SO'
ctrlO = '\SI'
ctrlP = '\DLE'
ctrlQ = '\DC1'
ctrlR = '\DC2'
ctrlS = '\DC3'
ctrlT = '\DC4'
ctrlU = '\NAK'
ctrlV = '\SYN'
ctrlW = '\ETB'
ctrlX = '\CAN'
ctrlY = '\EM'
ctrlZ = '\SUB'
-- | Associate each key with Vim's key-notation.
keys :: [(Char, String)]
keys = [
m keyEsc "Esc"
, m keyTab "Tab"
, m ctrlA "C-A"
, m ctrlB "C-B"
, m ctrlC "C-C"
, m ctrlD "C-D"
, m ctrlE "C-E"
, m ctrlF "C-F"
, m ctrlG "C-G"
, m ctrlH "C-H"
, m ctrlI "C-I"
, m ctrlJ "C-J"
, m ctrlK "C-K"
, m ctrlL "C-L"
, m ctrlM "C-M"
, m ctrlN "C-N"
, m ctrlO "C-O"
, m ctrlP "C-P"
, m ctrlQ "C-Q"
, m ctrlR "C-R"
, m ctrlS "C-S"
, m ctrlT "C-T"
, m ctrlU "C-U"
, m ctrlV "C-V"
, m ctrlW "C-W"
, m ctrlX "C-X"
, m ctrlY "C-Y"
, m ctrlZ "C-Z"
-- not defined here
, m '\n' "CR"
, m ' ' "Space"
, m keyUp "Up"
, m keyDown "Down"
, m keyLeft "Left"
, m keyRight "Right"
, m keyPpage "PageUp"
, m keyNpage "PageDown"
]
where
m = (,)
keyNames :: [String]
keyNames = map snd keys
-- | A mapping from spcial keys to Vim's key-notation.
--
-- The brackets are included.
keyMap :: Map Char String
keyMap = Map.fromList (map (fmap (\s -> "<" ++ s ++ ">")) keys)
-- | A mapping from Vim's key-notation to their corresponding keys.
--
-- The brackets are not included, and only lower-case is used for key-notation.
keyNotationMap :: Map String Char
keyNotationMap = Map.fromList (map (swap . fmap (map toLower)) keys)
-- | Replace all special keys with their corresponding key reference.
--
-- Vim's key-notation is used for key references.
unExpandKeys :: String -> String
unExpandKeys = foldr f ("" :: String)
where
f c
-- escape opening brackets..
| c == '<' = ("\\<" ++)
-- escape backslashes
| c == '\\' = ("\\\\" ++)
| otherwise = (keyNotation c ++)
-- | Convert given character to Vim's key-notation.
keyNotation c = fromMaybe (return c) (Map.lookup c keyMap)
data ExpandKeyError =
EmptyKeyReference
| UnterminatedKeyReference String
| UnknownKeyReference String
deriving Eq
instance Show ExpandKeyError where
show e = case e of
EmptyKeyReference -> "empty key reference"
UnterminatedKeyReference k -> "unterminated key reference " ++ show k
UnknownKeyReference k -> "unknown key reference " ++ show k
-- | Expand all key references to their corresponding keys.
--
-- Vim's key-notation is used for key references.
expandKeys :: String -> Either ExpandKeyError String
expandKeys = go
where
go s = case s of
"" -> return ""
-- keep escaped characters
'\\':x:xs -> x `cons` go xs
-- expand key references
'<' : xs -> expand xs
-- keep any other characters
x:xs -> x `cons` go xs
-- | Prepend given element to a list in the either monad.
cons :: b -> Either a [b] -> Either a [b]
cons = fmap . (:)
-- Assume that `xs` starts with a key reference, terminated with a closing
-- bracket. Replace that key reference with it's corresponding key.
expand xs = do
(k, ys) <- takeKeyReference xs
case Map.lookup k keyNotationMap of
Just x -> (x :) `fmap` go ys
Nothing -> Left (UnknownKeyReference k)
-- Assume that `s` starts with a key reference, terminated with a closing
-- bracket. Return the key reference (converted to lower-case) and the
-- suffix, drop the closing bracket.
takeKeyReference :: String -> Either ExpandKeyError (String, String)
takeKeyReference s = case break (== '>') s of
(xs, "") -> Left (UnterminatedKeyReference xs)
("", _ ) -> Left EmptyKeyReference
(xs, '>':ys) -> return (map toLower xs, ys)
_ -> error "Key.takeKeyReference: this should never happen"
|
vimus/vimus
|
src/Vimus/Key.hs
|
mit
| 4,747 | 0 | 14 | 1,338 | 1,275 | 702 | 573 | 150 | 8 |
f arr = sum([x | x <- arr, odd x])
main = do
inputdata <- getContents
putStrLn $ show $ f $ map (read :: String -> Int) $ lines inputdata
|
EdisonAlgorithms/HackerRank
|
practice/fp/intro/fp-sum-of-odd-elements/fp-sum-of-odd-elements.hs
|
mit
| 145 | 0 | 11 | 38 | 80 | 39 | 41 | 4 | 1 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2015 Galois, Inc.
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE PatternGuards #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module REPL.Haskeline where
import Cryptol.REPL.Command
import Cryptol.REPL.Monad
import Cryptol.REPL.Trie
import Cryptol.Utils.PP
import qualified Control.Exception as X
import Control.Monad (guard, when)
import qualified Control.Monad.Trans.Class as MTL
import Data.Char (isAlphaNum, isSpace)
import Data.Function (on)
import Data.List (isPrefixOf,nub,sortBy)
import System.Console.ANSI (setTitle)
import System.Console.Haskeline
import System.Directory ( doesFileExist
, getHomeDirectory
, getCurrentDirectory)
import System.FilePath ((</>))
-- | Haskeline-specific repl implementation.
repl :: Cryptolrc -> Maybe FilePath -> REPL () -> IO ()
repl cryrc mbBatch begin =
do settings <- setHistoryFile (replSettings isBatch)
runREPL isBatch (runInputTBehavior behavior settings body)
where
body = withInterrupt $ do
MTL.lift evalCryptolrc
MTL.lift begin
loop
(isBatch,behavior) = case mbBatch of
Nothing -> (False,defaultBehavior)
Just path -> (True,useFile path)
loop = do
prompt <- MTL.lift getPrompt
mb <- handleInterrupt (return (Just "")) (getInputLines prompt [])
case mb of
Just line
| Just cmd <- parseCommand findCommandExact line -> do
continue <- MTL.lift $ do
handleInterrupt handleCtrlC (runCommand cmd)
shouldContinue
when continue loop
| otherwise -> loop
Nothing -> return ()
getInputLines prompt ls =
do mb <- getInputLine prompt
let newPropmpt = map (\_ -> ' ') prompt
case mb of
Nothing -> return Nothing
Just l | not (null l) && last l == '\\' ->
getInputLines newPropmpt (init l : ls)
| otherwise -> return $ Just $ unlines $ reverse $ l : ls
evalCryptolrc =
case cryrc of
CryrcDefault -> do
here <- io $ getCurrentDirectory
home <- io $ getHomeDirectory
let dcHere = here </> ".cryptolrc"
dcHome = home </> ".cryptolrc"
isHere <- io $ doesFileExist dcHere
isHome <- io $ doesFileExist dcHome
if | isHere -> slurp dcHere
| isHome -> slurp dcHome
| otherwise -> whenDebug $ io $ putStrLn "no .cryptolrc found"
CryrcFiles paths -> mapM_ slurp paths
CryrcDisabled -> return ()
-- | Actually read the contents of a file, but don't save the
-- history
--
-- XXX: friendlier error message would be nice if the file can't be
-- found, but since these will be specified on the command line it
-- should be obvious what's going wrong
slurp path = do
let settings' = defaultSettings { autoAddHistory = False }
runInputTBehavior (useFile path) settings' (withInterrupt loop)
-- | Try to set the history file.
setHistoryFile :: Settings REPL -> IO (Settings REPL)
setHistoryFile ss =
do dir <- getHomeDirectory
return ss { historyFile = Just (dir </> ".cryptol_history") }
`X.catch` \(SomeException {}) -> return ss
-- | Haskeline settings for the REPL.
replSettings :: Bool -> Settings REPL
replSettings isBatch = Settings
{ complete = cryptolCommand
, historyFile = Nothing
, autoAddHistory = not isBatch
}
-- .cryptolrc ------------------------------------------------------------------
-- | Configuration of @.cryptolrc@ file behavior. The default option
-- searches the following locations in order, and evaluates the first
-- file that exists in batch mode on interpreter startup:
--
-- 1. $PWD/.cryptolrc
-- 2. $HOME/.cryptolrc
--
-- If files are specified, they will all be evaluated, but none of the
-- default files will be (unless they are explicitly specified).
--
-- The disabled option inhibits any reading of any .cryptolrc files.
data Cryptolrc =
CryrcDefault
| CryrcDisabled
| CryrcFiles [FilePath]
deriving (Show)
-- Utilities -------------------------------------------------------------------
instance MonadException REPL where
controlIO branchIO = REPL $ \ ref -> do
runBody <- branchIO $ RunIO $ \ m -> do
a <- unREPL m ref
return (return a)
unREPL runBody ref
-- Titles ----------------------------------------------------------------------
mkTitle :: Maybe LoadedModule -> String
mkTitle lm = maybe "" (\ m -> pretty m ++ " - ") (lName =<< lm)
++ "cryptol"
setREPLTitle :: REPL ()
setREPLTitle = do
lm <- getLoadedMod
io (setTitle (mkTitle lm))
-- Completion ------------------------------------------------------------------
-- | Completion for cryptol commands.
cryptolCommand :: CompletionFunc REPL
cryptolCommand cursor@(l,r)
| ":" `isPrefixOf` l'
, Just (cmd,rest) <- splitCommand l' = case nub (findCommand cmd) of
[c] | null rest && not (any isSpace l') -> do
return (l, cmdComp cmd c)
| otherwise -> do
(rest',cs) <- cmdArgument (cBody c) (reverse (sanitize rest),r)
return (unwords [rest', reverse cmd],cs)
cmds ->
return (l, concat [ cmdComp l' c | c <- cmds ])
-- Complete all : commands when the line is just a :
| ":" == l' = return (l, concat [ cmdComp l' c | c <- nub (findCommand ":") ])
| otherwise = completeExpr cursor
where
l' = sanitize (reverse l)
-- | Generate completions from a REPL command definition.
cmdComp :: String -> CommandDescr -> [Completion]
cmdComp prefix c = do
cName <- cNames c
guard (prefix `isPrefixOf` cName)
return $ Completion
{ replacement = drop (length prefix) cName
, display = cName
, isFinished = True
}
-- | Dispatch to a completion function based on the kind of completion the
-- command is expecting.
cmdArgument :: CommandBody -> CompletionFunc REPL
cmdArgument ct cursor@(l,_) = case ct of
ExprArg _ -> completeExpr cursor
DeclsArg _ -> (completeExpr +++ completeType) cursor
ExprTypeArg _ -> (completeExpr +++ completeType) cursor
FilenameArg _ -> completeFilename cursor
ShellArg _ -> completeFilename cursor
OptionArg _ -> completeOption cursor
NoArg _ -> return (l,[])
-- | Complete a name from the expression environment.
completeExpr :: CompletionFunc REPL
completeExpr (l,_) = do
ns <- getExprNames
let n = reverse (takeWhile isIdentChar l)
vars = filter (n `isPrefixOf`) ns
return (l,map (nameComp n) vars)
-- | Complete a name from the type synonym environment.
completeType :: CompletionFunc REPL
completeType (l,_) = do
ns <- getTypeNames
let n = reverse (takeWhile isIdentChar l)
vars = filter (n `isPrefixOf`) ns
return (l,map (nameComp n) vars)
-- | Generate a completion from a prefix and a name.
nameComp :: String -> String -> Completion
nameComp prefix c = Completion
{ replacement = drop (length prefix) c
, display = c
, isFinished = True
}
isIdentChar :: Char -> Bool
isIdentChar c = isAlphaNum c || c `elem` "_\'"
-- | Join two completion functions together, merging and sorting their results.
(+++) :: CompletionFunc REPL -> CompletionFunc REPL -> CompletionFunc REPL
(as +++ bs) cursor = do
(_,acs) <- as cursor
(_,bcs) <- bs cursor
return (fst cursor, sortBy (compare `on` replacement) (acs ++ bcs))
-- | Complete an option from the options environment.
--
-- XXX this can do better, as it has access to the expected form of the value
completeOption :: CompletionFunc REPL
completeOption cursor@(l,_) = return (fst cursor, map comp opts)
where
n = reverse l
opts = lookupTrie n userOptions
comp opt = Completion
{ replacement = drop (length n) (optName opt)
, display = optName opt
, isFinished = False
}
|
iblumenfeld/cryptol
|
cryptol/REPL/Haskeline.hs
|
bsd-3-clause
| 8,090 | 0 | 21 | 2,022 | 2,162 | 1,110 | 1,052 | 159 | 8 |
{-# LANGUAGE RankNTypes #-}
module Mini where
data Type t = Int | TVar t | Fun (Type t) (Type t) | Forall (t -> Type t)
newtype CType = CType { unCType :: forall t. Type t }
konstTy = Forall (\a -> Forall (\b -> Fun (TVar a) (Fun (TVar b) (TVar a))))
t1 = let Forall f = konstTy in Forall (\a -> subst' a Int (f a))
t2 = let Forall f = konstTy in join (f Int)
dedeBruijnType :: Int -> Int -> [t] -> Type Int -> Type t
dedeBruijnType s _ as (TVar i) = TVar (reverse as !! (i-s))
dedeBruijnType s _ _ Int = Int
dedeBruijnType s i as (Fun t1 t2) = Fun (dedeBruijnType s i as t1) (dedeBruijnType s i as t2)
dedeBruijnType s i as (Forall f) = Forall (\a -> dedeBruijnType s (i+1) (a:as) (f i))
subst' :: Int -> Type Int -> Type Int -> Type Int
subst' x r Int = Int
subst' x r (TVar a)
| a == x = r
| otherwise = TVar a
subst' x r (Fun t1 t2) = Fun (subst' x r t1) (subst' x r t2)
subst' x r (Forall f) = Forall (\a -> subst' x r (f a))
-- subst :: Type Int -> Type Int -> Type t
-- subst t1 t2 = dedeBruijnType subst' 0 t1 t2
pretty :: Int -> Type Int -> String
pretty _ Int = "Int"
pretty _ (TVar a) = "a" ++ show a
pretty i (Fun t1 t2) = "(" ++ pretty i t1 ++ " -> " ++ pretty i t2 ++ ")"
pretty i (Forall f) = "forall a" ++ show i ++ ". " ++ pretty (i+1) (f i)
join :: Type (Type t) -> Type t
join Int = Int
join (TVar a) = a
join (Fun t1 t2) = Fun (join t1) (join t2)
join (Forall f) = Forall (\a -> join (f (TVar a)))
|
bixuanzju/fcore
|
lib/Mini.hs
|
bsd-2-clause
| 1,498 | 0 | 15 | 413 | 824 | 412 | 412 | 29 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Complex
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Complex numbers.
--
-----------------------------------------------------------------------------
module Data.Complex
(
-- * Rectangular form
Complex((:+))
, realPart
, imagPart
-- * Polar form
, mkPolar
, cis
, polar
, magnitude
, phase
-- * Conjugate
, conjugate
) where
import GHC.Generics (Generic)
import Data.Data (Data)
import Foreign (Storable, castPtr, peek, poke, pokeElemOff, peekElemOff, sizeOf,
alignment)
infix 6 :+
-- -----------------------------------------------------------------------------
-- The Complex type
-- | Complex numbers are an algebraic type.
--
-- For a complex number @z@, @'abs' z@ is a number with the magnitude of @z@,
-- but oriented in the positive real direction, whereas @'signum' z@
-- has the phase of @z@, but unit magnitude.
data Complex a
= !a :+ !a -- ^ forms a complex number from its real and imaginary
-- rectangular components.
deriving (Eq, Show, Read, Data, Generic)
-- -----------------------------------------------------------------------------
-- Functions over Complex
-- | Extracts the real part of a complex number.
realPart :: Complex a -> a
realPart (x :+ _) = x
-- | Extracts the imaginary part of a complex number.
imagPart :: Complex a -> a
imagPart (_ :+ y) = y
-- | The conjugate of a complex number.
{-# SPECIALISE conjugate :: Complex Double -> Complex Double #-}
conjugate :: Num a => Complex a -> Complex a
conjugate (x:+y) = x :+ (-y)
-- | Form a complex number from polar components of magnitude and phase.
{-# SPECIALISE mkPolar :: Double -> Double -> Complex Double #-}
mkPolar :: Floating a => a -> a -> Complex a
mkPolar r theta = r * cos theta :+ r * sin theta
-- | @'cis' t@ is a complex value with magnitude @1@
-- and phase @t@ (modulo @2*'pi'@).
{-# SPECIALISE cis :: Double -> Complex Double #-}
cis :: Floating a => a -> Complex a
cis theta = cos theta :+ sin theta
-- | The function 'polar' takes a complex number and
-- returns a (magnitude, phase) pair in canonical form:
-- the magnitude is nonnegative, and the phase in the range @(-'pi', 'pi']@;
-- if the magnitude is zero, then so is the phase.
{-# SPECIALISE polar :: Complex Double -> (Double,Double) #-}
polar :: (RealFloat a) => Complex a -> (a,a)
polar z = (magnitude z, phase z)
-- | The nonnegative magnitude of a complex number.
{-# SPECIALISE magnitude :: Complex Double -> Double #-}
magnitude :: (RealFloat a) => Complex a -> a
magnitude (x:+y) = scaleFloat k
(sqrt (sqr (scaleFloat mk x) + sqr (scaleFloat mk y)))
where k = max (exponent x) (exponent y)
mk = - k
sqr z = z * z
-- | The phase of a complex number, in the range @(-'pi', 'pi']@.
-- If the magnitude is zero, then so is the phase.
{-# SPECIALISE phase :: Complex Double -> Double #-}
phase :: (RealFloat a) => Complex a -> a
phase (0 :+ 0) = 0 -- SLPJ July 97 from John Peterson
phase (x:+y) = atan2 y x
-- -----------------------------------------------------------------------------
-- Instances of Complex
instance (RealFloat a) => Num (Complex a) where
{-# SPECIALISE instance Num (Complex Float) #-}
{-# SPECIALISE instance Num (Complex Double) #-}
(x:+y) + (x':+y') = (x+x') :+ (y+y')
(x:+y) - (x':+y') = (x-x') :+ (y-y')
(x:+y) * (x':+y') = (x*x'-y*y') :+ (x*y'+y*x')
negate (x:+y) = negate x :+ negate y
abs z = magnitude z :+ 0
signum (0:+0) = 0
signum z@(x:+y) = x/r :+ y/r where r = magnitude z
fromInteger n = fromInteger n :+ 0
instance (RealFloat a) => Fractional (Complex a) where
{-# SPECIALISE instance Fractional (Complex Float) #-}
{-# SPECIALISE instance Fractional (Complex Double) #-}
(x:+y) / (x':+y') = (x*x''+y*y'') / d :+ (y*x''-x*y'') / d
where x'' = scaleFloat k x'
y'' = scaleFloat k y'
k = - max (exponent x') (exponent y')
d = x'*x'' + y'*y''
fromRational a = fromRational a :+ 0
instance (RealFloat a) => Floating (Complex a) where
{-# SPECIALISE instance Floating (Complex Float) #-}
{-# SPECIALISE instance Floating (Complex Double) #-}
pi = pi :+ 0
exp (x:+y) = expx * cos y :+ expx * sin y
where expx = exp x
log z = log (magnitude z) :+ phase z
x ** y = case (x,y) of
(_ , (0:+0)) -> 1 :+ 0
((0:+0), (exp_re:+_)) -> case compare exp_re 0 of
GT -> 0 :+ 0
LT -> inf :+ 0
EQ -> nan :+ nan
((re:+im), (exp_re:+_))
| (isInfinite re || isInfinite im) -> case compare exp_re 0 of
GT -> inf :+ 0
LT -> 0 :+ 0
EQ -> nan :+ nan
| otherwise -> exp (log x * y)
where
inf = 1/0
nan = 0/0
sqrt (0:+0) = 0
sqrt z@(x:+y) = u :+ (if y < 0 then -v else v)
where (u,v) = if x < 0 then (v',u') else (u',v')
v' = abs y / (u'*2)
u' = sqrt ((magnitude z + abs x) / 2)
sin (x:+y) = sin x * cosh y :+ cos x * sinh y
cos (x:+y) = cos x * cosh y :+ (- sin x * sinh y)
tan (x:+y) = (sinx*coshy:+cosx*sinhy)/(cosx*coshy:+(-sinx*sinhy))
where sinx = sin x
cosx = cos x
sinhy = sinh y
coshy = cosh y
sinh (x:+y) = cos y * sinh x :+ sin y * cosh x
cosh (x:+y) = cos y * cosh x :+ sin y * sinh x
tanh (x:+y) = (cosy*sinhx:+siny*coshx)/(cosy*coshx:+siny*sinhx)
where siny = sin y
cosy = cos y
sinhx = sinh x
coshx = cosh x
asin z@(x:+y) = y':+(-x')
where (x':+y') = log (((-y):+x) + sqrt (1 - z*z))
acos z = y'':+(-x'')
where (x'':+y'') = log (z + ((-y'):+x'))
(x':+y') = sqrt (1 - z*z)
atan z@(x:+y) = y':+(-x')
where (x':+y') = log (((1-y):+x) / sqrt (1+z*z))
asinh z = log (z + sqrt (1+z*z))
acosh z = log (z + (z+1) * sqrt ((z-1)/(z+1)))
atanh z = 0.5 * log ((1.0+z) / (1.0-z))
instance Storable a => Storable (Complex a) where
sizeOf a = 2 * sizeOf (realPart a)
alignment a = alignment (realPart a)
peek p = do
q <- return $ castPtr p
r <- peek q
i <- peekElemOff q 1
return (r :+ i)
poke p (r :+ i) = do
q <-return $ (castPtr p)
poke q r
pokeElemOff q 1 i
|
urbanslug/ghc
|
libraries/base/Data/Complex.hs
|
bsd-3-clause
| 7,633 | 2 | 14 | 2,714 | 2,379 | 1,255 | 1,124 | 139 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="bs-BA">
<title>MacOS WebDrivers</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/webdrivers/webdrivermacos/src/main/javahelp/org/zaproxy/zap/extension/webdrivermacos/resources/help_bs_BA/helpset_bs_BA.hs
|
apache-2.0
| 961 | 77 | 66 | 156 | 407 | 206 | 201 | -1 | -1 |
{-|
Module : Idris.Docstrings
Description : Wrapper around Markdown library.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE DeriveFunctor, DeriveGeneric, ScopedTypeVariables #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Idris.Docstrings (
Docstring(..), Block(..), Inline(..), parseDocstring, renderDocstring
, emptyDocstring, nullDocstring, noDocs, overview, containsText
, renderHtml, annotCode, DocTerm(..), renderDocTerm, checkDocstring
) where
import Idris.Core.TT (Err, Name, OutputAnnotation(..), Term, TextFormatting(..))
import Util.Pretty
import Prelude hiding ((<$>))
import qualified Cheapskate as C
import Cheapskate.Html (renderDoc)
import qualified Cheapskate.Types as CT
import Data.Foldable (Foldable)
import qualified Data.Foldable as F
import qualified Data.Sequence as S
import qualified Data.Text as T
import Data.Traversable (Traversable)
import GHC.Generics (Generic)
import Text.Blaze.Html (Html)
-- | The various kinds of code samples that can be embedded in docs
data DocTerm = Unchecked
| Checked Term
| Example Term
| Failing Err
deriving (Show, Generic)
-- | Render a term in the documentation
renderDocTerm :: (Term -> Doc OutputAnnotation) -> (Term -> Term) -> DocTerm -> String -> Doc OutputAnnotation
renderDocTerm pp norm Unchecked src = text src
renderDocTerm pp norm (Checked tm) src = pp tm
renderDocTerm pp norm (Example tm) src = align $
text ">" <+> align (pp tm) <$>
pp (norm tm)
renderDocTerm pp norm (Failing err) src = annotate (AnnErr err) $ text src
-- | Representation of Idris's inline documentation. The type paramter
-- represents the type of terms that are associated with code blocks.
data Docstring a = DocString CT.Options (Blocks a)
deriving (Show, Functor, Foldable, Traversable, Generic)
type Blocks a = S.Seq (Block a)
-- | Block-level elements.
data Block a = Para (Inlines a)
| Header Int (Inlines a)
| Blockquote (Blocks a)
| List Bool CT.ListType [Blocks a]
| CodeBlock CT.CodeAttr T.Text a
| HtmlBlock T.Text
| HRule
deriving (Show, Functor, Foldable, Traversable, Generic)
data Inline a = Str T.Text
| Space
| SoftBreak
| LineBreak
| Emph (Inlines a)
| Strong (Inlines a)
| Code T.Text a
| Link (Inlines a) T.Text T.Text
| Image (Inlines a) T.Text T.Text
| Entity T.Text
| RawHtml T.Text
deriving (Show, Functor, Foldable, Traversable, Generic)
type Inlines a = S.Seq (Inline a)
-- | Run some kind of processing step over code in a Docstring. The code
-- processor gets the language and annotations as parameters, along with the
-- source and the original annotation.
checkDocstring :: forall a b. (String -> [String] -> String -> a -> b) -> Docstring a -> Docstring b
checkDocstring f (DocString opts blocks) = DocString opts (fmap (checkBlock f) blocks)
where checkBlock :: (String -> [String] -> String -> a -> b) -> Block a -> Block b
checkBlock f (Para inlines) = Para (fmap (checkInline f) inlines)
checkBlock f (Header i inlines) = Header i (fmap (checkInline f) inlines)
checkBlock f (Blockquote bs) = Blockquote (fmap (checkBlock f) bs)
checkBlock f (List b t blocks) = List b t (fmap (fmap (checkBlock f)) blocks)
checkBlock f (CodeBlock attrs src tm) = CodeBlock attrs src
(f (T.unpack $ CT.codeLang attrs)
(words . T.unpack $ CT.codeInfo attrs)
(T.unpack src)
tm)
checkBlock f (HtmlBlock src) = HtmlBlock src
checkBlock f HRule = HRule
checkInline :: (String -> [String] -> String -> a -> b) -> Inline a -> Inline b
checkInline f (Str txt) = Str txt
checkInline f Space = Space
checkInline f SoftBreak = SoftBreak
checkInline f LineBreak = LineBreak
checkInline f (Emph is) = Emph (fmap (checkInline f) is)
checkInline f (Strong is) = Strong (fmap (checkInline f) is)
checkInline f (Code src x) = Code src (f "" [] (T.unpack src) x)
checkInline f (Link is url title) = Link (fmap (checkInline f) is) url title
checkInline f (Image is url title) = Image (fmap (checkInline f) is) url title
checkInline f (Entity txt) = Entity txt
checkInline f (RawHtml src) = RawHtml src
-- | Construct a docstring from a Text that contains Markdown-formatted docs
parseDocstring :: T.Text -> Docstring ()
parseDocstring = toDocstring . C.markdown options
where toDocstring :: CT.Doc -> Docstring ()
toDocstring (CT.Doc opts blocks) = DocString opts (fmap toBlock blocks)
toBlock :: CT.Block -> Block ()
toBlock (CT.Para inlines) = Para (fmap toInline inlines)
toBlock (CT.Header i inlines) = Header i (fmap toInline inlines)
toBlock (CT.Blockquote blocks) = Blockquote (fmap toBlock blocks)
toBlock (CT.List b t blocks) = List b t (fmap (fmap toBlock) blocks)
toBlock (CT.CodeBlock attrs text) = CodeBlock attrs text ()
toBlock (CT.HtmlBlock src) = HtmlBlock src
toBlock CT.HRule = HRule
toInline :: CT.Inline -> Inline ()
toInline (CT.Str t) = Str t
toInline CT.Space = Space
toInline CT.SoftBreak = SoftBreak
toInline CT.LineBreak = LineBreak
toInline (CT.Emph is) = Emph (fmap toInline is)
toInline (CT.Strong is) = Strong (fmap toInline is)
toInline (CT.Code src) = Code src ()
toInline (CT.Link is url title) = Link (fmap toInline is) url title
toInline (CT.Image is url title) = Image (fmap toInline is) url title
toInline (CT.Entity txt) = Entity txt
toInline (CT.RawHtml src) = RawHtml src
options = CT.Options { CT.sanitize = True
, CT.allowRawHtml = False
, CT.preserveHardBreaks = True
, CT.debug = False
}
-- | Convert a docstring to be shown by the pretty-printer
renderDocstring :: (a -> String -> Doc OutputAnnotation) -> Docstring a -> Doc OutputAnnotation
renderDocstring pp (DocString _ blocks) = renderBlocks pp blocks
-- | Construct a docstring consisting of the first block-level element of the
-- argument docstring, for use in summaries.
overview :: Docstring a -> Docstring a
overview (DocString opts blocks) = DocString opts (S.take 1 blocks)
renderBlocks :: (a -> String -> Doc OutputAnnotation)
-> Blocks a -> Doc OutputAnnotation
renderBlocks pp blocks | S.length blocks > 1 = F.foldr1 (\b1 b2 -> b1 <> line <> line <> b2) $
fmap (renderBlock pp) blocks
| S.length blocks == 1 = renderBlock pp (S.index blocks 0)
| otherwise = empty
renderBlock :: (a -> String -> Doc OutputAnnotation)
-> Block a -> Doc OutputAnnotation
renderBlock pp (Para inlines) = renderInlines pp inlines
renderBlock pp (Header lvl inlines) = renderInlines pp inlines <+> parens (text (show lvl))
renderBlock pp (Blockquote blocks) = indent 8 $ renderBlocks pp blocks
renderBlock pp (List b ty blockss) = renderList pp b ty blockss
renderBlock pp (CodeBlock attr src tm) = indent 4 $ pp tm (T.unpack src)
renderBlock pp (HtmlBlock txt) = text "<html block>" -- TODO
renderBlock pp HRule = text "----------------------"
renderList :: (a -> String -> Doc OutputAnnotation)
-> Bool -> CT.ListType -> [Blocks a] -> Doc OutputAnnotation
renderList pp b (CT.Bullet c) blockss = vsep $ map (hang 4 . (char c <+>) . renderBlocks pp) blockss
renderList pp b (CT.Numbered nw i) blockss =
vsep $
zipWith3 (\n p txt -> hang 4 $ text (show n) <> p <+> txt)
[i..] (repeat punc) (map (renderBlocks pp) blockss)
where punc = case nw of
CT.PeriodFollowing -> char '.'
CT.ParenFollowing -> char '('
renderInlines :: (a -> String -> Doc OutputAnnotation) -> Inlines a -> Doc OutputAnnotation
renderInlines pp = F.foldr (<>) empty . fmap (renderInline pp)
renderInline :: (a -> String -> Doc OutputAnnotation) -> Inline a -> Doc OutputAnnotation
renderInline pp (Str s) = text $ T.unpack s
renderInline pp Space = softline
renderInline pp SoftBreak = softline
renderInline pp LineBreak = line
renderInline pp (Emph txt) = annotate (AnnTextFmt ItalicText) $ renderInlines pp txt
renderInline pp (Strong txt) = annotate (AnnTextFmt BoldText) $ renderInlines pp txt
renderInline pp (Code txt tm) = pp tm $ T.unpack txt
renderInline pp (Link body url title) = annotate (AnnLink (T.unpack url)) (renderInlines pp body)
renderInline pp (Image body url title) = text "<image>" -- TODO
renderInline pp (Entity a) = text $ "<entity " ++ T.unpack a ++ ">" -- TODO
renderInline pp (RawHtml txt) = text "<html content>" --TODO
-- | The empty docstring
emptyDocstring :: Docstring a
emptyDocstring = DocString options S.empty
-- | Check whether a docstring is emtpy
nullDocstring :: Docstring a -> Bool
nullDocstring (DocString _ blocks) = S.null blocks
-- | Empty documentation for a definition
noDocs :: (Docstring a, [(Name, Docstring a)])
noDocs = (emptyDocstring, [])
-- | Does a string occur in the docstring?
containsText :: T.Text -> Docstring a -> Bool
containsText str (DocString _ blocks) = F.any (blockContains (T.toLower str)) blocks
-- blockContains and inlineContains should always be called with a lower-case search string
where blockContains :: T.Text -> Block a -> Bool
blockContains str (Para inlines) = F.any (inlineContains str) inlines
blockContains str (Header lvl inlines) = F.any (inlineContains str) inlines
blockContains str (Blockquote blocks) = F.any (blockContains str) blocks
blockContains str (List b ty blockss) = F.any (F.any (blockContains str)) blockss
blockContains str (CodeBlock attr src _) = T.isInfixOf str (T.toLower src)
blockContains str (HtmlBlock txt) = False -- TODO
blockContains str HRule = False
inlineContains :: T.Text -> Inline a -> Bool
inlineContains str (Str s) = T.isInfixOf str (T.toLower s)
inlineContains str Space = False
inlineContains str SoftBreak = False
inlineContains str LineBreak = False
inlineContains str (Emph txt) = F.any (inlineContains str) txt
inlineContains str (Strong txt) = F.any (inlineContains str) txt
inlineContains str (Code txt _) = T.isInfixOf str (T.toLower txt)
inlineContains str (Link body url title) = F.any (inlineContains str) body
inlineContains str (Image body url title) = False
inlineContains str (Entity a) = False
inlineContains str (RawHtml txt) = T.isInfixOf str (T.toLower txt)
renderHtml :: Docstring DocTerm -> Html
renderHtml = renderDoc . fromDocstring
where
fromDocstring :: Docstring DocTerm -> CT.Doc
fromDocstring (DocString opts blocks) = CT.Doc opts (fmap fromBlock blocks)
fromBlock :: Block DocTerm -> CT.Block
fromBlock (Para inlines) = CT.Para (fmap fromInline inlines)
fromBlock (Header i inlines) = CT.Header i (fmap fromInline inlines)
fromBlock (Blockquote blocks) = CT.Blockquote (fmap fromBlock blocks)
fromBlock (List b t blocks) = CT.List b t (fmap (fmap fromBlock) blocks)
fromBlock (CodeBlock attrs text _) = CT.CodeBlock attrs text
fromBlock (HtmlBlock src) = CT.HtmlBlock src
fromBlock HRule = CT.HRule
fromInline :: Inline DocTerm -> CT.Inline
fromInline (Str t) = CT.Str t
fromInline Space = CT.Space
fromInline SoftBreak = CT.SoftBreak
fromInline LineBreak = CT.LineBreak
fromInline (Emph is) = CT.Emph (fmap fromInline is)
fromInline (Strong is) = CT.Strong (fmap fromInline is)
fromInline (Code src _) = CT.Code src
fromInline (Link is url title) = CT.Link (fmap fromInline is) url title
fromInline (Image is url title) = CT.Image (fmap fromInline is) url title
fromInline (Entity txt) = CT.Entity txt
fromInline (RawHtml src) = CT.RawHtml src
-- | Annotate the code samples in a docstring
annotCode :: forall a b. (String -> b) -- ^ How to annotate code samples
-> Docstring a
-> Docstring b
annotCode annot (DocString opts blocks)
= DocString opts $ fmap annotCodeBlock blocks
where
annotCodeBlock :: Block a -> Block b
annotCodeBlock (Para inlines) = Para (fmap annotCodeInline inlines)
annotCodeBlock (Header i inlines) = Header i (fmap annotCodeInline inlines)
annotCodeBlock (Blockquote blocks) = Blockquote (fmap annotCodeBlock blocks)
annotCodeBlock (List b t blocks) = List b t (fmap (fmap annotCodeBlock) blocks)
annotCodeBlock (CodeBlock attrs src _) = CodeBlock attrs src (annot (T.unpack src))
annotCodeBlock (HtmlBlock src) = HtmlBlock src
annotCodeBlock HRule = HRule
annotCodeInline :: Inline a -> Inline b
annotCodeInline (Str t) = Str t
annotCodeInline Space = Space
annotCodeInline SoftBreak = SoftBreak
annotCodeInline LineBreak = LineBreak
annotCodeInline (Emph is) = Emph (fmap annotCodeInline is)
annotCodeInline (Strong is) = Strong (fmap annotCodeInline is)
annotCodeInline (Code src _) = Code src (annot (T.unpack src))
annotCodeInline (Link is url title) = Link (fmap annotCodeInline is) url title
annotCodeInline (Image is url title) = Image (fmap annotCodeInline is) url title
annotCodeInline (Entity txt) = Entity txt
annotCodeInline (RawHtml src) = RawHtml src
|
ben-schulz/Idris-dev
|
src/Idris/Docstrings.hs
|
bsd-3-clause
| 14,535 | 0 | 14 | 4,157 | 4,728 | 2,383 | 2,345 | 229 | 17 |
module PropSyntaxStruct (module PropSyntaxStruct,module P) where
import BaseSyntax as P
import HsPropStruct as P
import HsPropMaps as P
import HsPropPretty
import HasBaseStruct
import HasPropStruct
import NameMaps
import PrettyPrint
import HsDeclPretty(ppContext)
import SrcLoc
import MUtils
import MapDeclM
import Recursive(struct)
data Q c t = c:=>t deriving (Eq,Show)
--type PropEI i e p ds t c = EI i e p ds t c
type PropDI i e p ds t c tp pa pp = Prop (DI i e p ds t c tp) (PD i pa pp)
data Prop b p = Base b | Prop p deriving (Eq,Show)
prop bf pf (Base b) = bf b
prop bf pf (Prop p) = pf p
recprop f g = prop f g . struct
mapProp bf pf = prop (Base . bf) (Prop . pf)
mapMProp bf pf = prop (fmap Base . bf) (fmap Prop . pf)
mapBase f = mapProp f id
isBase p = prop p (const False)
maybeBase f = prop f (const Nothing)
maybeProp = prop (const Nothing)
listBase f = prop f (const [])
seqProp (Base b) = Base # b
seqProp (Prop p) = Prop # p
instance HasBaseStruct (Prop b p) b where base = Base
instance GetBaseStruct (Prop b p) b where basestruct = maybeBase Just
instance HasPropStruct (Prop b p) p where proprec = Prop
instance GetPropStruct (Prop b p) p where propstruct = maybeProp Just
--------------------------------------------------------------------------------
instance (AccNames i b,AccNames i p) => AccNames i (Prop b p) where
accNames f = prop (accNames f) (accNames f)
instance (AccNames i c,AccNames i t) => AccNames i (Q c t) where
accNames f (c:=>t) = accNames f c . accNames f t
--------------------------------------------------------------------------------
instance (HasSrcLoc b,HasSrcLoc p) => HasSrcLoc (Prop b p) where
srcLoc = prop srcLoc srcLoc
instance (MapDeclM b ds,MapDeclM p ds) => MapDeclM (Prop b p) ds where
mapDeclM f = mapMProp (mapDeclM f) (mapDeclM f)
instance (Printable b,Printable p) => Printable (Prop b p) where
ppi = prop ppi ppi
wrap = prop wrap wrap
instance (Printable c,Printable t) => Printable (Q c t) where
ppi (c:=>t) = ppContext (ppis c)<+>t
|
forste/haReFork
|
tools/property/syntax/PropSyntaxStruct.hs
|
bsd-3-clause
| 2,034 | 0 | 9 | 390 | 859 | 453 | 406 | -1 | -1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ExplicitNamespaces #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Type.Equality
-- License : BSD-style (see the LICENSE file in the distribution)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : not portable
--
-- Definition of propositional equality @(:~:)@. Pattern-matching on a variable
-- of type @(a :~: b)@ produces a proof that @a ~ b@.
--
-- /Since: 4.7.0.0/
-----------------------------------------------------------------------------
module Data.Type.Equality (
-- * The equality type
(:~:)(..),
-- * Working with equality
sym, trans, castWith, gcastWith, apply, inner, outer,
-- * Inferring equality from other types
TestEquality(..),
-- * Boolean type-level equality
type (==)
) where
import Data.Maybe
import GHC.Enum
import GHC.Show
import GHC.Read
import GHC.Base
import Data.Type.Bool
infix 4 :~:
-- | Propositional equality. If @a :~: b@ is inhabited by some terminating
-- value, then the type @a@ is the same as the type @b@. To use this equality
-- in practice, pattern-match on the @a :~: b@ to get out the @Refl@ constructor;
-- in the body of the pattern-match, the compiler knows that @a ~ b@.
--
-- /Since: 4.7.0.0/
data a :~: b where
Refl :: a :~: a
-- with credit to Conal Elliott for 'ty', Erik Hesselink & Martijn van
-- Steenbergen for 'type-equality', Edward Kmett for 'eq', and Gabor Greif
-- for 'type-eq'
-- | Symmetry of equality
sym :: (a :~: b) -> (b :~: a)
sym Refl = Refl
-- | Transitivity of equality
trans :: (a :~: b) -> (b :~: c) -> (a :~: c)
trans Refl Refl = Refl
-- | Type-safe cast, using propositional equality
castWith :: (a :~: b) -> a -> b
castWith Refl x = x
-- | Generalized form of type-safe cast using propositional equality
gcastWith :: (a :~: b) -> ((a ~ b) => r) -> r
gcastWith Refl x = x
-- | Apply one equality to another, respectively
apply :: (f :~: g) -> (a :~: b) -> (f a :~: g b)
apply Refl Refl = Refl
-- | Extract equality of the arguments from an equality of a applied types
inner :: (f a :~: g b) -> (a :~: b)
inner Refl = Refl
-- | Extract equality of type constructors from an equality of applied types
outer :: (f a :~: g b) -> (f :~: g)
outer Refl = Refl
deriving instance Eq (a :~: b)
deriving instance Show (a :~: b)
deriving instance Ord (a :~: b)
instance a ~ b => Read (a :~: b) where
readsPrec d = readParen (d > 10) (\r -> [(Refl, s) | ("Refl",s) <- lex r ])
instance a ~ b => Enum (a :~: b) where
toEnum 0 = Refl
toEnum _ = error "Data.Type.Equality.toEnum: bad argument"
fromEnum Refl = 0
instance a ~ b => Bounded (a :~: b) where
minBound = Refl
maxBound = Refl
-- | This class contains types where you can learn the equality of two types
-- from information contained in /terms/. Typically, only singleton types should
-- inhabit this class.
class TestEquality f where
-- | Conditionally prove the equality of @a@ and @b@.
testEquality :: f a -> f b -> Maybe (a :~: b)
instance TestEquality ((:~:) a) where
testEquality Refl Refl = Just Refl
-- | A type family to compute Boolean equality. Instances are provided
-- only for /open/ kinds, such as @*@ and function kinds. Instances are
-- also provided for datatypes exported from base. A poly-kinded instance
-- is /not/ provided, as a recursive definition for algebraic kinds is
-- generally more useful.
type family (a :: k) == (b :: k) :: Bool
infix 4 ==
{-
This comment explains more about why a poly-kinded instance for (==) is
not provided. To be concrete, here would be the poly-kinded instance:
type family EqPoly (a :: k) (b :: k) where
EqPoly a a = True
EqPoly a b = False
type instance (a :: k) == (b :: k) = EqPoly a b
Note that this overlaps with every other instance -- if this were defined,
it would be the only instance for (==).
Now, consider
data Nat = Zero | Succ Nat
Suppose I want
foo :: (Succ n == Succ m) ~ True => ((n == m) :~: True)
foo = Refl
This would not type-check with the poly-kinded instance. `Succ n == Succ m`
quickly becomes `EqPoly (Succ n) (Succ m)` but then is stuck. We don't know
enough about `n` and `m` to reduce further.
On the other hand, consider this:
type family EqNat (a :: Nat) (b :: Nat) where
EqNat Zero Zero = True
EqNat (Succ n) (Succ m) = EqNat n m
EqNat n m = False
type instance (a :: Nat) == (b :: Nat) = EqNat a b
With this instance, `foo` type-checks fine. `Succ n == Succ m` becomes `EqNat
(Succ n) (Succ m)` which becomes `EqNat n m`. Thus, we can conclude `(n == m)
~ True` as desired.
So, the Nat-specific instance allows strictly more reductions, and is thus
preferable to the poly-kinded instance. But, if we introduce the poly-kinded
instance, we are barred from writing the Nat-specific instance, due to
overlap.
Even better than the current instance for * would be one that does this sort
of recursion for all datatypes, something like this:
type family EqStar (a :: *) (b :: *) where
EqStar Bool Bool = True
EqStar (a,b) (c,d) = a == c && b == d
EqStar (Maybe a) (Maybe b) = a == b
...
EqStar a b = False
The problem is the (...) is extensible -- we would want to add new cases for
all datatypes in scope. This is not currently possible for closed type
families.
-}
-- all of the following closed type families are local to this module
type family EqStar (a :: *) (b :: *) where
EqStar a a = True
EqStar a b = False
-- This looks dangerous, but it isn't. This allows == to be defined
-- over arbitrary type constructors.
type family EqArrow (a :: k1 -> k2) (b :: k1 -> k2) where
EqArrow a a = True
EqArrow a b = False
type family EqBool a b where
EqBool True True = True
EqBool False False = True
EqBool a b = False
type family EqOrdering a b where
EqOrdering LT LT = True
EqOrdering EQ EQ = True
EqOrdering GT GT = True
EqOrdering a b = False
type EqUnit (a :: ()) (b :: ()) = True
type family EqList a b where
EqList '[] '[] = True
EqList (h1 ': t1) (h2 ': t2) = (h1 == h2) && (t1 == t2)
EqList a b = False
type family EqMaybe a b where
EqMaybe Nothing Nothing = True
EqMaybe (Just x) (Just y) = x == y
EqMaybe a b = False
type family Eq2 a b where
Eq2 '(a1, b1) '(a2, b2) = a1 == a2 && b1 == b2
type family Eq3 a b where
Eq3 '(a1, b1, c1) '(a2, b2, c2) = a1 == a2 && b1 == b2 && c1 == c2
type family Eq4 a b where
Eq4 '(a1, b1, c1, d1) '(a2, b2, c2, d2) = a1 == a2 && b1 == b2 && c1 == c2 && d1 == d2
type family Eq5 a b where
Eq5 '(a1, b1, c1, d1, e1) '(a2, b2, c2, d2, e2) = a1 == a2 && b1 == b2 && c1 == c2 && d1 == d2 && e1 == e2
type family Eq6 a b where
Eq6 '(a1, b1, c1, d1, e1, f1) '(a2, b2, c2, d2, e2, f2) = a1 == a2 && b1 == b2 && c1 == c2 && d1 == d2 && e1 == e2 && f1 == f2
type family Eq7 a b where
Eq7 '(a1, b1, c1, d1, e1, f1, g1) '(a2, b2, c2, d2, e2, f2, g2) = a1 == a2 && b1 == b2 && c1 == c2 && d1 == d2 && e1 == e2 && f1 == f2 && g1 == g2
type family Eq8 a b where
Eq8 '(a1, b1, c1, d1, e1, f1, g1, h1) '(a2, b2, c2, d2, e2, f2, g2, h2) = a1 == a2 && b1 == b2 && c1 == c2 && d1 == d2 && e1 == e2 && f1 == f2 && g1 == g2 && h1 == h2
type family Eq9 a b where
Eq9 '(a1, b1, c1, d1, e1, f1, g1, h1, i1) '(a2, b2, c2, d2, e2, f2, g2, h2, i2) = a1 == a2 && b1 == b2 && c1 == c2 && d1 == d2 && e1 == e2 && f1 == f2 && g1 == g2 && h1 == h2 && i1 == i2
type family Eq10 a b where
Eq10 '(a1, b1, c1, d1, e1, f1, g1, h1, i1, j1) '(a2, b2, c2, d2, e2, f2, g2, h2, i2, j2) = a1 == a2 && b1 == b2 && c1 == c2 && d1 == d2 && e1 == e2 && f1 == f2 && g1 == g2 && h1 == h2 && i1 == i2 && j1 == j2
type family Eq11 a b where
Eq11 '(a1, b1, c1, d1, e1, f1, g1, h1, i1, j1, k1) '(a2, b2, c2, d2, e2, f2, g2, h2, i2, j2, k2) = a1 == a2 && b1 == b2 && c1 == c2 && d1 == d2 && e1 == e2 && f1 == f2 && g1 == g2 && h1 == h2 && i1 == i2 && j1 == j2 && k1 == k2
type family Eq12 a b where
Eq12 '(a1, b1, c1, d1, e1, f1, g1, h1, i1, j1, k1, l1) '(a2, b2, c2, d2, e2, f2, g2, h2, i2, j2, k2, l2) = a1 == a2 && b1 == b2 && c1 == c2 && d1 == d2 && e1 == e2 && f1 == f2 && g1 == g2 && h1 == h2 && i1 == i2 && j1 == j2 && k1 == k2 && l1 == l2
type family Eq13 a b where
Eq13 '(a1, b1, c1, d1, e1, f1, g1, h1, i1, j1, k1, l1, m1) '(a2, b2, c2, d2, e2, f2, g2, h2, i2, j2, k2, l2, m2) = a1 == a2 && b1 == b2 && c1 == c2 && d1 == d2 && e1 == e2 && f1 == f2 && g1 == g2 && h1 == h2 && i1 == i2 && j1 == j2 && k1 == k2 && l1 == l2 && m1 == m2
type family Eq14 a b where
Eq14 '(a1, b1, c1, d1, e1, f1, g1, h1, i1, j1, k1, l1, m1, n1) '(a2, b2, c2, d2, e2, f2, g2, h2, i2, j2, k2, l2, m2, n2) = a1 == a2 && b1 == b2 && c1 == c2 && d1 == d2 && e1 == e2 && f1 == f2 && g1 == g2 && h1 == h2 && i1 == i2 && j1 == j2 && k1 == k2 && l1 == l2 && m1 == m2 && n1 == n2
type family Eq15 a b where
Eq15 '(a1, b1, c1, d1, e1, f1, g1, h1, i1, j1, k1, l1, m1, n1, o1) '(a2, b2, c2, d2, e2, f2, g2, h2, i2, j2, k2, l2, m2, n2, o2) = a1 == a2 && b1 == b2 && c1 == c2 && d1 == d2 && e1 == e2 && f1 == f2 && g1 == g2 && h1 == h2 && i1 == i2 && j1 == j2 && k1 == k2 && l1 == l2 && m1 == m2 && n1 == n2 && o1 == o2
-- these all look to be overlapping, but they are differentiated by their kinds
type instance a == b = EqStar a b
type instance a == b = EqArrow a b
type instance a == b = EqBool a b
type instance a == b = EqOrdering a b
type instance a == b = EqUnit a b
type instance a == b = EqList a b
type instance a == b = EqMaybe a b
type instance a == b = Eq2 a b
type instance a == b = Eq3 a b
type instance a == b = Eq4 a b
type instance a == b = Eq5 a b
type instance a == b = Eq6 a b
type instance a == b = Eq7 a b
type instance a == b = Eq8 a b
type instance a == b = Eq9 a b
type instance a == b = Eq10 a b
type instance a == b = Eq11 a b
type instance a == b = Eq12 a b
type instance a == b = Eq13 a b
type instance a == b = Eq14 a b
type instance a == b = Eq15 a b
|
frantisekfarka/ghc-dsi
|
libraries/base/Data/Type/Equality.hs
|
bsd-3-clause
| 10,351 | 21 | 34 | 2,565 | 3,370 | 1,939 | 1,431 | 131 | 1 |
{-# LANGUAGE DeriveAnyClass #-}
module T12144_2 where
class C1 a
instance C1 a => C1 (Foo a)
class C1 a => C2 a where
c2 :: a -> String
c2 _ = "C2 default"
newtype Foo a = Foo a deriving C2
foo :: C1 a => Foo a -> String
foo = c2
|
shlevy/ghc
|
testsuite/tests/deriving/should_compile/T12144_2.hs
|
bsd-3-clause
| 239 | 0 | 7 | 63 | 101 | 52 | 49 | -1 | -1 |
module T11208 where
import qualified Prelude as P
f n = n P.+ 1
g h (P.Just x) = P.Just (h x)
g _ P.Nothing = P.Nothing
|
ezyang/ghc
|
testsuite/tests/ghci/scripts/T11208.hs
|
bsd-3-clause
| 124 | 0 | 8 | 31 | 67 | 36 | 31 | 5 | 1 |
import System.IO (hFlush, stdout)
import System.Environment (getArgs)
import Control.Monad (mapM)
import Control.Monad.Error (runErrorT)
import Control.Monad.Trans (liftIO)
import qualified Data.Map as Map
import qualified Data.Traversable as DT
import Readline (readline, load_history)
import Types
import Reader (read_str)
import Printer (_pr_str)
import Env (Env, env_new, env_bind, env_get, env_set)
import Core as Core
-- read
mal_read :: String -> IOThrows MalVal
mal_read str = read_str str
-- eval
is_pair (MalList x _:xs) = True
is_pair (MalVector x _:xs) = True
is_pair _ = False
quasiquote :: MalVal -> MalVal
quasiquote ast =
case ast of
(MalList (MalSymbol "unquote" : a1 : []) _) -> a1
(MalList (MalList (MalSymbol "splice-unquote" : a01 : []) _ : rest) _) ->
MalList [(MalSymbol "concat"), a01, quasiquote (MalList rest Nil)] Nil
(MalVector (MalList (MalSymbol "splice-unquote" : a01 : []) _ : rest) _) ->
MalList [(MalSymbol "concat"), a01, quasiquote (MalVector rest Nil)] Nil
(MalList (a0 : rest) _) -> MalList [(MalSymbol "cons"),
quasiquote a0,
quasiquote (MalList rest Nil)] Nil
(MalVector (a0 : rest) _) -> MalList [(MalSymbol "cons"),
quasiquote a0,
quasiquote (MalVector rest Nil)] Nil
_ -> MalList [(MalSymbol "quote"), ast] Nil
eval_ast :: MalVal -> Env -> IOThrows MalVal
eval_ast sym@(MalSymbol _) env = env_get env sym
eval_ast ast@(MalList lst m) env = do
new_lst <- mapM (\x -> (eval x env)) lst
return $ MalList new_lst m
eval_ast ast@(MalVector lst m) env = do
new_lst <- mapM (\x -> (eval x env)) lst
return $ MalVector new_lst m
eval_ast ast@(MalHashMap lst m) env = do
new_hm <- DT.mapM (\x -> (eval x env)) lst
return $ MalHashMap new_hm m
eval_ast ast env = return ast
let_bind :: Env -> [MalVal] -> IOThrows Env
let_bind env [] = return env
let_bind env (b:e:xs) = do
evaled <- eval e env
x <- liftIO $ env_set env b evaled
let_bind env xs
apply_ast :: MalVal -> Env -> IOThrows MalVal
apply_ast ast@(MalList (MalSymbol "def!" : args) _) env = do
case args of
(a1@(MalSymbol _): a2 : []) -> do
evaled <- eval a2 env
liftIO $ env_set env a1 evaled
_ -> throwStr "invalid def!"
apply_ast ast@(MalList (MalSymbol "let*" : args) _) env = do
case args of
(a1 : a2 : []) -> do
params <- (_to_list a1)
let_env <- liftIO $ env_new $ Just env
let_bind let_env params
eval a2 let_env
_ -> throwStr "invalid let*"
apply_ast ast@(MalList (MalSymbol "quote" : args) _) env = do
case args of
a1 : [] -> return a1
_ -> throwStr "invalid quote"
apply_ast ast@(MalList (MalSymbol "quasiquote" : args) _) env = do
case args of
a1 : [] -> eval (quasiquote a1) env
_ -> throwStr "invalid quasiquote"
apply_ast ast@(MalList (MalSymbol "do" : args) _) env = do
case args of
([]) -> return Nil
_ -> do
el <- eval_ast (MalList args Nil) env
case el of
(MalList lst _) -> return $ last lst
apply_ast ast@(MalList (MalSymbol "if" : args) _) env = do
case args of
(a1 : a2 : a3 : []) -> do
cond <- eval a1 env
if cond == MalFalse || cond == Nil
then eval a3 env
else eval a2 env
(a1 : a2 : []) -> do
cond <- eval a1 env
if cond == MalFalse || cond == Nil
then return Nil
else eval a2 env
_ -> throwStr "invalid if"
apply_ast ast@(MalList (MalSymbol "fn*" : args) _) env = do
case args of
(a1 : a2 : []) -> do
params <- (_to_list a1)
return $ (_malfunc a2 env (MalList params Nil)
(\args -> do
fn_env1 <- liftIO $ env_new $ Just env
fn_env2 <- liftIO $ env_bind fn_env1 params args
eval a2 fn_env2))
_ -> throwStr "invalid fn*"
apply_ast ast@(MalList _ _) env = do
el <- eval_ast ast env
case el of
(MalList ((Func (Fn f) _) : rest) _) ->
f $ rest
(MalList ((MalFunc {ast=ast, env=fn_env, params=(MalList params Nil)}) : rest) _) -> do
fn_env1 <- liftIO $ env_new $ Just fn_env
fn_env2 <- liftIO $ env_bind fn_env1 params rest
eval ast fn_env2
el ->
throwStr $ "invalid apply: " ++ (show el)
eval :: MalVal -> Env -> IOThrows MalVal
eval ast env = do
case ast of
(MalList _ _) -> apply_ast ast env
_ -> eval_ast ast env
-- print
mal_print :: MalVal -> String
mal_print exp = show exp
-- repl
rep :: Env -> String -> IOThrows String
rep env line = do
ast <- mal_read line
exp <- eval ast env
return $ mal_print exp
repl_loop :: Env -> IO ()
repl_loop env = do
line <- readline "user> "
case line of
Nothing -> return ()
Just "" -> repl_loop env
Just str -> do
res <- runErrorT $ rep env str
out <- case res of
Left (StringError str) -> return $ "Error: " ++ str
Left (MalValError mv) -> return $ "Error: " ++ (show mv)
Right val -> return val
putStrLn out
hFlush stdout
repl_loop env
main = do
args <- getArgs
load_history
repl_env <- env_new Nothing
-- core.hs: defined using Haskell
(mapM (\(k,v) -> (env_set repl_env (MalSymbol k) v)) Core.ns)
env_set repl_env (MalSymbol "eval") (_func (\[ast] -> eval ast repl_env))
env_set repl_env (MalSymbol "*ARGV*") (MalList [] Nil)
-- core.mal: defined using the language itself
runErrorT $ rep repl_env "(def! not (fn* (a) (if a false true)))"
runErrorT $ rep repl_env "(def! load-file (fn* (f) (eval (read-string (str \"(do \" (slurp f) \")\")))))"
if length args > 0 then do
env_set repl_env (MalSymbol "*ARGV*") (MalList (map MalString (drop 1 args)) Nil)
runErrorT $ rep repl_env $ "(load-file \"" ++ (args !! 0) ++ "\")"
return ()
else
repl_loop repl_env
|
alphaKAI/mal
|
haskell/step7_quote.hs
|
mpl-2.0
| 6,419 | 0 | 21 | 2,141 | 2,439 | 1,204 | 1,235 | 156 | 13 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_HADDOCK show-extensions #-}
------------------------------------------------------------------------
-- |
-- Module : Yeast.Render
-- Copyright : (c) 2015-2016 Stevan Andjelkovic
-- License : ISC (see the file LICENSE)
-- Maintainer : Stevan Andjelkovic
-- Stability : experimental
-- Portability : non-portable
--
-- This module contains functions for rendering news feeds.
--
------------------------------------------------------------------------
module Yeast.Render (
-- * Rendering
-- $rendering
renderFile
, renderLBS
, renderText
-- * Reexport
, RenderSettings(..)
-- | Reexport 'RenderSettings' from "Text.XML".
, def
-- | Reexport the default 'RenderSettings' from "Text.XML".
)
where
import Data.ByteString.Lazy (ByteString)
import qualified Data.ByteString.Lazy as LBS
import qualified Data.Map as M
import Data.Maybe (isJust)
import Data.Text (Text)
import qualified Data.Text.Lazy as L
import Data.Text.Lazy.Encoding (encodeUtf8)
import Text.XML (Document(Document),
Element(Element),
Name(Name), Node(NodeElement,
NodeContent), Prologue(Prologue),
RenderSettings, def)
import qualified Text.XML as XML
import Text.XML.Lens ((^.), (.~), (&), root, nodes,
attrs, _Element)
import Yeast.Feed
------------------------------------------------------------------------
-- $rendering
-- Feeds can be rendered in different ways.
-- | Render feed into a file.
renderFile :: RenderSettings -> FilePath -> Feed -> IO ()
renderFile rs fp = LBS.writeFile fp . renderLBS rs
-- | Render feed into a (lazy) byte string.
renderLBS :: RenderSettings -> Feed -> ByteString
renderLBS rs = encodeUtf8 . renderText rs
-- | Render a feed into (lazy) text.
renderText :: RenderSettings -> Feed -> L.Text
renderText rs = XML.renderText rs . toXML
------------------------------------------------------------------------
-- Helpers
emptyDocument :: FeedKind -> Document
emptyDocument k = Document (Prologue [] Nothing []) rootEl []
where
rootEl :: Element
rootEl = case k of
RSS1Kind -> Element "{http://www.w3.org/1999/02/22-rdf-syntax-ns#}RDF"
M.empty []
RSS2Kind -> Element "rss" (M.fromList [("version", "2.0")]) []
AtomKind -> Element "{http://www.w3.org/2005/Atom}feed" M.empty []
node :: Text -> [Node] -> Node
node t ns = NodeElement $ Element
(Name t Nothing Nothing) M.empty ns
leaf :: Text -> Maybe Text -> [Node]
leaf _ Nothing = []
leaf t (Just t') = [node t [NodeContent t']]
attrLeaf :: Text -> [(Text, Maybe Text)] -> [Node]
attrLeaf t0 as = case as' of
[] -> []
_ -> [node t0 [] & _Element.attrs .~ M.fromList as']
where
as' :: [(Name, Text)]
as' = map (\(t, Just t') -> (Name t Nothing Nothing, t'))
$ filter (isJust . snd) as
toXML :: Feed -> Document
toXML f = emptyDocument (f^.kind) & root.nodes .~ case f^.kind of
RSS1Kind ->
node "channel"
( leaf "title" (f^.title)
++ leaf "link" (f^.feedHtml)
++ leaf "description" (f^.description)
++ leaf "date" (f^.date))
: flip map (f^.items) (\i ->
node "item"
$ leaf "title" (i^.title)
++ leaf "link" (i^.link)
++ leaf "date" (i^.date)
++ leaf "dc:creator" (i^.author)
++ leaf "description" (i^.description)
)
RSS2Kind -> [
node "channel"
( leaf "title" (f^.title)
++ leaf "link" (f^.feedHtml)
++ leaf "description" (f^.description)
++ flip map (f^.items) (\i ->
node "item"
$ leaf "title" (i^.title)
++ leaf "link" (i^.link)
++ leaf "pubDate" (i^.date)
++ leaf "author" (i^.author)
++ leaf "description" (i^.description))
)
]
AtomKind ->
leaf "title" (f^.title)
++ leaf "subtitle" (f^.description)
++ attrLeaf "link"
[ ("type", Just "text/html")
, ("rel", Just "alternative")
, ("href", f^.feedHtml)
]
++ attrLeaf "link"
[ ("type", Just "application/atom+xml")
, ("rel", Just "self")
, ("href", f^.feedHome)
]
++ leaf "updated" (f^.date)
++ flip map (f^.items) (\i ->
node "entry"
$ leaf "title" (i^.title)
++ attrLeaf "link"
[ ("type", Just "text/html")
, ("rel", Just "alternative")
, ("href", i^.link)
]
++ leaf "updated" (i^.date)
++ [node "author" (leaf "name" (i^.author))]
++ leaf "content" (i^.description)
)
|
stevana/yeast
|
src/Yeast/Render.hs
|
isc
| 5,174 | 0 | 23 | 1,720 | 1,400 | 769 | 631 | -1 | -1 |
module RandomGolf where
import Control.Arrow (first)
import System.Random (Random, randoms, getStdRandom, split)
randomsIO_golfed :: Random a => IO [a]
randomsIO_golfed = getStdRandom (first randoms . split)
|
pauldoo/scratch
|
RealWorldHaskell/ch15/RandomGolf.hs
|
isc
| 211 | 0 | 8 | 29 | 69 | 39 | 30 | 5 | 1 |
module Main where
import Loans
import Sim
main ::IO ()
main = putStrLn $ show $ defaultPaydown sampleMortgage
|
ohbadiah/loan-repayment
|
Main.hs
|
mit
| 112 | 0 | 6 | 20 | 36 | 20 | 16 | 5 | 1 |
{-# LANGUAGE TemplateHaskell, OverloadedStrings, BangPatterns #-}
-- | A 'HashString' represents a string, annotated with its hash. This allows
-- us to avoid frequent rehashing.
--
-- The recommended way of creating a 'HashString' is either statically, using
-- quasiquotes:
--
-- > [hashed|hello, world!|] -- Generates the HashString "hello, world!"
--
-- or dynamically, with 'fromString':
--
-- > name <- loadNameFromFile "some-text-file"
-- > fromString name -- Generates the hashString equivalent to whatever was
-- > -- in 'name'.
--
-- Use the quasiquoter as much as possible, as it makes the hashing run at
-- compile time, saving us from having to do _any_ hashing at runtime.
module Data.HashString ( -- * Normal Haskell Interface
HashString(..)
, toHashString
, fromHashString
-- * Template Haskell Helpers
, hString
, hashed
) where
import Prelewd
import Impure
import Control.DeepSeq
import Data.Bits (xor)
import Data.Hashable
import Data.String
import qualified Data.Text as T
import Language.Haskell.TH
import Language.Haskell.TH.Quote
import Language.Haskell.TH.Syntax
import Text.Show
-- | The HashString constructor is exposed so it can be used with the 'hashed'
-- quasiquoter. Please don't call it manually. If you want to construct a
-- HashString, please use 'fromString'.
--
-- The best part about HashStrings? If you compare two HashStrings with
-- unique hashes, the text will never even have to be evaluated - it will
-- remain a thunk, saving needless computation and memory waste.
data HashString = HashString !Int
T.Text
-- | The usual use case for HashString is in a HashMap. We'd frequently like
-- to force the entire tree to prevent deleted elements from being leaked.
--
-- Unfortunately, when we do this, we don't want the hashstring's text to
-- be needlessly forced. Therefore, we only keep the hash strict, and leave
-- the text itself lazy. This lets us deepseq the hashmap without walking
-- through text every time.
instance NFData HashString where
rnf (HashString _ _) = ()
instance IsString HashString where
fromString = toHashString
instance Ord HashString where
compare (HashString ha a) (HashString hb b) = case compare ha hb of
EQ -> compare a b
x -> x
instance Eq HashString where
(HashString ha sa) == (HashString hb sb) = ha == hb
&& sa == sb
instance Show HashString where
show (HashString _ s) = show s
-- | Converts a string to a 'HashString'.
toHashString :: String -> HashString
toHashString s = HashString (hash asText) asText
where
asText = T.pack s
-- | Converts a 'HashString' to text.
fromHashString :: HashString -> T.Text
fromHashString (HashString _ x) = x
{-# INLINE fromHashString #-}
instance Hashable HashString where
hashWithSalt s (HashString h _) = xor s h
-- | The template haskell splice which lets us generate 'HashString's at
-- compile-time. Use this like so:
--
-- > $(hString "hello, world")
--
-- Which would be the same as you saying:
--
-- > "hello, world" :: HashString
--
-- except it's evaluated at compile time. Please use this function wherever
-- possible to save a lot of wasted cycles.
hString :: String -> Q Exp
hString s = [| HashString $(lift . hash $ T.pack s') (fromString $(liftString s')) |]
where
s' = filter (/= '"') s
-- | The 'hashed' quasiquoter lets us automatically create a 'HashString' at
-- compile time. This saves us from having to ever has this string at
-- runtime. Use this whenever you want to construct a 'HashString' from a
-- compile-time string.
--
-- Usage:
--
-- > [hashed|what is this witchcraft|] -- == (fromString "what is this witchcraft") :: HashString
hashed :: QuasiQuoter
hashed = QuasiQuoter { quoteExp = hString
, quotePat = undefined
, quoteType = undefined
, quoteDec = undefined
}
|
bfops/Chess
|
src/Data/HashString.hs
|
mit
| 4,281 | 0 | 9 | 1,185 | 506 | 301 | 205 | 51 | 1 |
import qualified Data.Set as S
import Helpers (stringToIntList)
import Data.Numbers.Primes (primeFactors)
fn :: [(Integer, Integer)] -> (Integer, Integer) -> [(Integer, Integer)]
fn acc (n, d) = let a = S.fromList $ stringToIntList $ show n
b = S.fromList $ stringToIntList $ show d
i = S.intersection a b
a' = S.difference a i
b' = S.difference b i
extract = S.elemAt 0
isModTen = n `mod` 10 == 0 && d `mod` 10 == 0
intDiv x y = fromInteger x / fromInteger y
in if n > d || isModTen || S.null i || S.size a' /= 1 || S.size b' /= 1
then acc
else let n' = extract a'
d' = extract b'
in if n' > 0 && d' >0 && intDiv n d == intDiv n' d'
then (n', d'):acc else acc
l = [10..99]
nonTrivial = foldl fn [] [ (n, d) | n <- l, d <- l]
nonTrivialProduct = foldl (\(x1, y1) (x2, y2) -> (x1*x2, y1*y2) ) (1, 1) nonTrivial
-- Found the following on the forum written by AntoineCellerier, an absolute delight
-- product [a Data.Ratio.% b | a <- [1..9], b <- [1..9], c <-[1..9], 10*a+c < 10*c+b, (10*a+c)*b == (10*c+b)*a ]
|
samidarko/euler
|
problem033.hs
|
mit
| 1,313 | 0 | 15 | 508 | 444 | 240 | 204 | 21 | 3 |
module ProjectEuler.Problem52
( problem
) where
import Data.List
import ProjectEuler.Types
problem :: Problem
problem = pureProblem 52 Solved result
allEqual :: (Eq e) => [e] -> Bool
allEqual (x:xs) = all (== x) xs
allEqual [] = True
isPermNum :: Int -> Bool
isPermNum n = allEqual $ map (sort . show . (* n') ) [1..6]
where
n' = oneInFront n
-- the number have to start with `1`
oneInFront :: Int -> Int
oneInFront n = read $ '1' : show n
result :: Int
result = oneInFront $ head $ filter isPermNum [1..]
|
Javran/Project-Euler
|
src/ProjectEuler/Problem52.hs
|
mit
| 527 | 0 | 9 | 117 | 204 | 111 | 93 | 16 | 1 |
module Salesman.OptionTypes
( Command(..)
, Common(..)
, Options(..)
) where
data Command
= Install [String]
| Check
| Upgrade
| Remove [String]
| List
deriving (Show)
data Common = Common
{ optProperties :: FilePath
} deriving (Show)
data Options = Options
{ optCommon :: Common
, optCommand :: Command
} deriving (Show)
|
thomasdziedzic/salesman
|
src/Salesman/OptionTypes.hs
|
mit
| 385 | 0 | 8 | 115 | 115 | 72 | 43 | 18 | 0 |
module Rocketfuel.Commands (
runCommand,
updateContext
) where
import Control.Monad.Random
import Rocketfuel.Types
import Rocketfuel.Grid
updateContext :: Maybe Command -> (GameContext, StdGen) -> (GameContext, StdGen)
updateContext command (context, generator) =
let contextAfterCommand = runCommand command context
baseGrid = grid contextAfterCommand
moves = getFallingTiles baseGrid
finalContext = context { grid = baseGrid,
currentMoves = moves }
in (finalContext, generator)
runCommand :: Maybe Command -> GameContext -> GameContext
runCommand com context = maybe context (process' context) com
where
process' gc (DragAndDrop (Just p1) (Just p2)) = swap p1 p2 gc
process' gc _ = gc
swap :: Position -> Position -> GameContext -> GameContext
swap p1 p2 gc@(GameContext g _ _) = if p1 /= p2 && orthoClose p1 p2
then gc { grid = applySwap (Swap p1 p2) g }
else gc
orthoClose :: Position -> Position -> Bool
orthoClose (x,y) (x2,y2) = abs(x2-x) + abs(y2-y) == 1
|
Raveline/Rocketfuel
|
src/Rocketfuel/Commands.hs
|
mit
| 1,105 | 0 | 11 | 287 | 366 | 196 | 170 | 24 | 2 |
module Classifier.LU
( lu,luSolve
) where
import Data.List
lu :: [[Double]] -> ([[Double]], [[Double]])
lu a = reclu a ([], map (\x->[]) [1..(length a)])
reclu :: [[Double]] -> ([[Double]],[[Double]]) -> ([[Double]], [[Double]])
reclu [] (l, ut) = (l, transpose ut)
reclu (ai:arst) (l, ut) = reclu arst (l++[nl], zipWith (\x -> \y -> x++[y]) ut nu)
where (nl, nu) = sublu (length l) ai ut ([],[])
sublu :: Int -> [Double] ->[[Double]]-> ([Double], [Double]) -> ([Double], [Double])
sublu i [] _ (l, u) = (l,u)
sublu i (aij:airst) (utj:uttl) (li, uj) = sublu i airst uttl (li++[nl], uj++[nu])
where j = length li
sm = aij - (foldl (+) 0 (zipWith (*) li utj))
(nl, nu) | i==j = (1,sm)
| i<j = (0,sm)
| i>j = (sm/(utj!!j),0)
forwardSub ::[[Double]] -> [Double] -> [Double]-> [Double]
forwardSub [] _ c = c
forwardSub (lh:lt) (bh:bt) c = forwardSub lt bt (c++[h])
where h = bh - (foldl (+) 0 (zipWith (*) lh c))
backwardSub ::[[Double]] -> [Double]-> [Double]
backwardSub [] _ = []
backwardSub (lh:lt) (bh:bt) = (h / (lh!!k) ):c
where c = backwardSub lt bt
k = length lh - length c - 1
h = bh - (foldl (+) 0 (zipWith (*) (reverse lh) (reverse c)))
luSolve :: [[Double]] -> [Double] -> [Double]
luSolve a b = (backwardSub u (forwardSub l b []))
where (l,u) = lu a
|
Yukits/classyou
|
src/Classifier/LU.hs
|
mit
| 1,712 | 0 | 13 | 683 | 906 | 509 | 397 | 30 | 1 |
module Cook.Catalog.Debian.Apt
( aptGet
, addAptRepository
, updatePackages
, upgradePackages
, clearPackagesCache
, installPackages
, provider
) where
import Data.List (last)
import Data.List.NonEmpty hiding (last)
import Data.Semigroup
import qualified Data.Text.Lazy as T
import Cook.Recipe
import Cook.Provider.PkgManager (Provider)
import qualified Cook.Provider.PkgManager as P
aptGet :: NonEmpty String -> Recipe f ()
aptGet args = withEnv [("DEBIAN_FRONTEND", "noninteractive")] $
runProc "apt-get" $ toList $ ["--quiet", "--yes"] <> args
addAptRepository :: String -> Recipe f ()
addAptRepository repo = withRecipeName "Debian.Apt.AddAptRepository" $
runProc "add-apt-repository" [repo]
installPackages :: NonEmpty String -> Recipe f ()
installPackages pkgs = withRecipeName "Debian.Apt.InstallPackages" $
aptGet $ "install" <| pkgs
updatePackages :: Recipe f ()
updatePackages = withRecipeName "Debian.Apt.UpdatePackages" $
aptGet ["update"]
upgradePackages :: Recipe f ()
upgradePackages = withRecipeName "Debian.Apt.UpgradePackages" $ do
updatePackages
aptGet ["upgrade"]
isPackageInstalled :: String -> Recipe f Bool
isPackageInstalled "" = error "Debian.isPackageInstalled: empty package name"
isPackageInstalled pkg = withRecipeName "IsPackageInstalled" $ do
res <- withoutError $ runOut $ proc "dpkg" ["-l", pkg]
case res of
Left _ -> return False
Right (out, _) -> return . T.isPrefixOf "ii" . last $ T.lines out
clearPackagesCache :: Recipe f ()
clearPackagesCache = withRecipeName "Debian.Apt.ClearPackagesCache" $ do
aptGet ["autoremove", "--purge"]
aptGet ["clean"]
provider :: Provider f
provider = prov
where prov = P.Provider
{ P._updatePackages = updatePackages
, P._upgradePackages = upgradePackages
, P._clearPackagesCache = clearPackagesCache
, P._requirePackages = P.requirePackagesGeneric prov
, P._isPackageInstalled = isPackageInstalled
, P._installPackages = installPackages
}
|
jimenezrick/cook.hs
|
src/Cook/Catalog/Debian/Apt.hs
|
mit
| 2,102 | 0 | 15 | 425 | 547 | 291 | 256 | 51 | 2 |
module Colors where
import Types
red :: RGB
red = RGB 255 0 0
green :: RGB
green = RGB 0 255 0
blue :: RGB
blue = RGB 0 0 255
white :: RGB
white = RGB 255 255 255
black :: RGB
black = RGB 0 0 0
basicColors :: [RGB]
basicColors = [red, green, blue, white, black]
|
Axmill/riverrun
|
src/RGB/Colors.hs
|
mit
| 268 | 0 | 5 | 66 | 120 | 68 | 52 | 14 | 1 |
{-# OPTIONS_GHC -Wall #-}
module LogAnalysis where
import Log
toInt :: String -> Int
toInt = read
parseMessage :: String -> LogMessage
-- Do we need to handle ill-formed? e.g E 20 this is missing timestamp
parseMessage s = f $ words s
where
f ("I":t:zs) = LogMessage Info (toInt t) (unwords zs)
f ("W":t:zs) = LogMessage Warning (toInt t) (unwords zs)
f ("E":e:t:zs) = LogMessage (Error (toInt e)) (toInt t) (unwords zs)
f _ = Unknown s
parse :: String -> [LogMessage]
parse = map parseMessage . lines
messageTime :: LogMessage -> Int
messageTime (LogMessage _ t _) = t
messageTime (Unknown _) = 0
insert :: LogMessage -> MessageTree -> MessageTree
insert (Unknown _) inTree = inTree
insert inMesg Leaf = Node Leaf inMesg Leaf
insert inMesg (Node treeL m treeR)
| (messageTime inMesg < messageTime m) = Node (insert inMesg treeL) m treeR
| otherwise = Node treeL m (insert inMesg treeR)
build :: [LogMessage] -> MessageTree
build = foldr insert Leaf
inOrder :: MessageTree -> [LogMessage]
inOrder Leaf = []
inOrder (Node treeL m treeR) = (inOrder treeL) ++ [m] ++ (inOrder treeR)
isBadErrMsg :: Int -> LogMessage -> Bool
isBadErrMsg d (LogMessage (Error severity) _ _) = severity > d
isBadErrMsg _ _= False
getMsg :: LogMessage -> String
getMsg (LogMessage _ _ s) = s
getMsg (Unknown s) = s
whatWentWrong :: [LogMessage] -> [String]
whatWentWrong = map getMsg .filter (isBadErrMsg 50) . inOrder . build
|
bachase/cis194
|
hw2/LogAnalysis.hs
|
mit
| 1,512 | 1 | 11 | 351 | 601 | 307 | 294 | 35 | 4 |
{-# LANGUAGE OverloadedStrings #-}
import Control.Monad.State
import Data.Maybe
import Network.URL
import Pipes
import Network.Discord.Types
import Network.Discord.Gateway
data PutStrClient = PsClient
instance Client PutStrClient where
getAuth _ = Bot "TOKEN"
main :: IO ()
main = runWebsocket (fromJust $ importURL "wss://gateway.discord.gg") PsClient $ do
st <- get
for (eventCore (getWebSocket st))
(\pl -> lift . liftIO $ print (pl:: Event))
|
jano017/Discord.hs
|
examples/putstr.hs
|
mit
| 461 | 0 | 13 | 74 | 145 | 77 | 68 | 15 | 1 |
{-# LANGUAGE OverloadedStrings, DeriveDataTypeable #-}
module NiceChildProcess (ChildProcess(..), spawn, NiceChildProcessException) where
import Control.Applicative
import Control.Concurrent.Async
import Control.Concurrent.Chan
import Control.Concurrent.MVar
import Control.Exception
import Control.Monad
import Debug.Trace
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import Data.Traversable
import Data.Typeable (Typeable)
import GHCJS.Types
import GHCJS.Foreign
import qualified Node.ChildProcess as CP
data NiceChildProcessException
= NoSuchFile Text
deriving (Eq, Show, Typeable)
instance Exception NiceChildProcessException
type Command = Text
type Arg = Text
type Directory = Text
type Line = Text
type LineQueue = Chan Line
data ChildProcess = ChildProcess {
readLine :: IO Text,
writeLine :: Text -> IO ()
}
data ChildProcessData = ChildProcessData CP.ChildProcess (MVar NiceChildProcessException) LineQueue
createException :: Text -> NiceChildProcessException
createException = NoSuchFile
spawn :: Command -> [Arg] -> Directory -> IO ChildProcess
spawn command args cwd = do
childProcess <- CP.spawn (toJSString command) (map toJSString args) (toJSString cwd)
errored <- newEmptyMVar
CP.onError childProcess $ \err ->
putMVar errored (createException $ fromJSString (CP.errorMessage err))
outStream <- CP.stdout childProcess
lineQueue <- newChan
CP.onData outStream $ \buffer -> do
text <- fromJSString <$> CP.toString buffer
let lines = T.lines text
void $ traverse (writeChan lineQueue) lines
let childProcessData = ChildProcessData childProcess errored lineQueue
return $ ChildProcess (makeReadLine childProcessData) (makeWriteLine childProcessData)
raceTo :: MVar NiceChildProcessException -> IO a -> IO a
raceTo errored other = do
winner <- race (readMVar errored) other
case winner of
Left exception -> throw exception
Right value -> return value
makeReadLine :: ChildProcessData -> IO Text
makeReadLine (ChildProcessData childProcess errored lineQueue) = do
line <- raceTo errored (readChan lineQueue)
traceShow line (return line)
makeWriteLine :: ChildProcessData -> Text -> IO ()
makeWriteLine (ChildProcessData childProcess errored _) text = do
traceShow text (return ())
hadErrored <- tryReadMVar errored
case hadErrored of
Nothing -> return ()
Just exception -> throw exception
inStream <- CP.stdin childProcess
CP.write inStream (toJSString $ T.snoc text '\n')
|
CRogers/stack-ide-atom
|
haskell/src/NiceChildProcess.hs
|
mit
| 2,502 | 0 | 15 | 391 | 761 | 384 | 377 | 65 | 2 |
-- Copyright 2016 Peter Beard
-- Distributed under the GNU GPL v2. For full terms, see the LICENSE file.
--
-- Problem #6
--
-- The sum of the squares of the first ten natural numbers is,
-- 12 + 22 + ... + 102 = 385
--
-- The square of the sum of the first ten natural numbers is,
-- (1 + 2 + ... + 10)2 = 552 = 3025
--
-- Hence the difference between the sum of the squares of the first ten natural
-- numbers and the square of the sum is 3025 − 385 = 2640.
--
-- Find the difference between the sum of the squares of the first one hundred
-- natural numbers and the square of the sum.
solution = abs $ (sum $ map (^2) [1..100]) - (sum [1..100]) ^ 2
main = putStrLn $ "The solution is " ++ (show solution)
|
PeterBeard/project-euler
|
haskell/src/problem-006.hs
|
gpl-2.0
| 714 | 0 | 11 | 157 | 85 | 53 | 32 | 2 | 1 |
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable, MultiParamTypeClasses #-}
module NFA.Compress.Instance where
import NFA.Compress.Data
import Autolib.ToDoc
import Autolib.Reader
import Autolib.Reporter
import qualified Challenger as C
import Data.List ( nub )
import Data.Typeable
data Instance = Instance
{ max_size :: Int
, original :: [ [ Int ] ]
}
deriving ( Typeable )
$(derives [makeReader, makeToDoc] [''Instance])
instance C.Verify DFA_Compress Instance where
verify p i = do
let lengths = map length $ original i
case nub lengths of
[] -> reject $ text "Tabelle ist leer"
[l] -> return ()
ls -> reject $ text "Tabelle enthält verschieden lange Zeilen"
when ( max_size i < 0 ) $ reject
$ text "max_size ist negativ."
example :: Instance
example = Instance
{ max_size = 8
, original = [ [ 1,2,3,4], [2,2,3,4], [1,3,2,4], [2,1,3,4] ]
}
-- local variables:
-- mode: haskell
-- end:
|
Erdwolf/autotool-bonn
|
src/NFA/Compress/Instance.hs
|
gpl-2.0
| 967 | 9 | 14 | 219 | 320 | 182 | 138 | 27 | 1 |
module Ssh.KeyExchangeAlgorithm (
KeyExchangeAlgorithm (..)
, createKeyData
, makeWord8
) where
import qualified Data.ByteString.Lazy as B
import Data.ByteString.Lazy.Char8 () -- IsString instance for the above
import Data.Word
import Data.Digest.Pure.SHA
import Ssh.Packet
import Ssh.Transport
import Ssh.ConnectionData
import Ssh.String
import Ssh.HostKeyAlgorithm
data KeyExchangeAlgorithm = KeyExchangeAlgorithm {
kexName :: SshString
, handleKex :: HostKeyAlgorithm -> SshString -> SshString -> SshConnection ConnectionData
}
instance Show KeyExchangeAlgorithm where
show = show . kexName
makeWord8 x = map (toEnum . fromEnum) $ B.unpack x
createKeyData :: SshString -> SshString -> Word8 -> SshString -> [Word8]
createKeyData sharedSecret exchangeHash typeChar sId =
makeWord8 $ createKeyData' {-sha1-} (B.concat [sharedSecret, exchangeHash]) (B.concat [B.pack [typeChar], sId])
createKeyData' :: SshString -> SshString -> SshString -- make sha1 configurable ### TODO
createKeyData' init append = B.concat [hashed, createKeyData' init hashed]
where hashed = bytestringDigest $ sha1 $ B.concat [init, append]
|
bcoppens/HaskellSshClient
|
src/Ssh/KeyExchangeAlgorithm.hs
|
gpl-3.0
| 1,156 | 0 | 12 | 180 | 308 | 174 | 134 | 25 | 1 |
import qualified Hbot.MsgParser.Test as MsgParser
import Test.Tasty
import Test.Tasty.QuickCheck as QC
import Test.Tasty.HUnit
main = defaultMain tests
tests = testGroup "All Tests"
[ QC.testProperty "trivial property" $
\x -> (x :: Int) == x
, MsgParser.tests
]
|
ljsc/hbot
|
tests/Test.hs
|
gpl-3.0
| 284 | 0 | 10 | 58 | 79 | 47 | 32 | 9 | 1 |
--------------------------------------------------------------------------------
-- This file is part of diplomarbeit ("Diplomarbeit Johannes Weiß"). --
-- --
-- diplomarbeit is free software: you can redistribute it and/or modify --
-- it under the terms of the GNU General Public License as published by --
-- the Free Software Foundation, either version 3 of the License, or --
-- (at your option) any later version. --
-- --
-- diplomarbeit is distributed in the hope that it will be useful, --
-- but WITHOUT ANY WARRANTY; without even the implied warranty of --
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the --
-- GNU General Public License for more details. --
-- --
-- You should have received a copy of the GNU General Public License --
-- along with diplomarbeit. If not, see <http://www.gnu.org/licenses/>. --
-- --
-- Copyright 2012, Johannes Weiß --
--------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -Wwarn #-}
module Main where
import Crypto.Random (SystemRandom, newGenIO)
import System.Environment
import qualified Data.Map as M
import Data.ExpressionTypes
import Data.LinearExpression
import Data.FieldTypes (Field(..))
import Data.RAE.Encoder
import Data.RAE.Types
import qualified Math.Polynomials as P
import Math.FiniteFields.F2Pow256
type Element = F2Pow256
--import Math.Algebra.Field.Base (F97, Fp)
--import Math.Common.IntegerAsType (IntegerAsType)
--type Element = F97
--instance IntegerAsType n => Field (Fp n) where
-- invert n =
-- case n of
-- 0 -> error "0 is not invertible"
-- n' -> 1 / n'
--
--instance IntegerAsType n => Read (Fp n) where
-- readsPrec _ value = [(fromInteger $ (read value :: Integer), "")]
main :: IO ()
main =
do putStrLn "RAE Fun: START"
--test
args <- getArgs
let l = (read . head) args
putStrLn "EXERCISE 2"
g <- newGenIO :: IO SystemRandom
let (_, drac) = exprToDRAC g (testExpr1 l)
--let (rac, _) = singularizeDRAC drac
--let (_, rac, _) = exprToRAC g (testExpr1 l)
print $ length $ show $ drac
return ()
testExpr1 :: Integer -> Expr Element
testExpr1 l = P.horner _X_ (map (Literal . fromInteger) [1..l])
_X_ :: Field e => Expr e
_X_ = Var "x"
_Y_ :: Field e => Expr e
_Y_ = Var "y"
_C_1_ :: PrimaryExpression Element
_C_1_ = Constant 1
_C_23_ :: PrimaryExpression Element
_C_23_ = Constant 23
_C_42_ :: PrimaryExpression Element
_C_42_ = Constant 42
_V_x_ :: PrimaryExpression Element
_V_x_ = Variable "x"
_V_y_ :: PrimaryExpression Element
_V_y_ = Variable "y"
_V_z_ :: PrimaryExpression Element
_V_z_ = Variable "z"
_TestVarMap_ :: VarMapping Element
_TestVarMap_ = M.fromList [ ("x", 17), ("y", 23) ]
|
weissi/diplomarbeit
|
programs/RAEFun.hs
|
gpl-3.0
| 3,285 | 0 | 12 | 1,029 | 461 | 259 | 202 | 44 | 1 |
module Main where
import Data.Monoid
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit
import Text.Interpol
main :: IO ()
main = defaultMainWithOpts
[ testCase "toString" testToString
] mempty
testToString :: Assertion
testToString = do
("I " ^-^ "have " ^-^ "apples") @?= "I have apples"
("Triple " ^-^ 'X') @?= "Triple 'X'"
("Am " ^-^ (21 :: Int) ^-^ " years old") @?= "Am 21 years old"
((21 :: Int) ^-^ " is my age") @?= "21 is my age"
("Umlaut: " ^-^ 'ü') @?= "Umlaut: 'ü'"
|
scvalex/interpol
|
Test/Unit.hs
|
gpl-3.0
| 547 | 0 | 11 | 124 | 159 | 87 | 72 | 17 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.