code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE TemplateHaskell #-}
module Data.TrieMap.Representation.TH.Utils where
import Language.Haskell.TH
import Language.Haskell.TH.ExpandSyns
decompose :: Type -> (Type, [Type])
decompose (tyfun `AppT` ty) = case decompose tyfun of
(tyfun, tys) -> (tyfun, tys ++ [ty])
decompose ty = (ty, [])
decompose' :: Type -> Maybe (Name, [Name])
decompose' (tyfun `AppT` VarT ty) = do
(tyfun, tys) <- decompose' tyfun
return (tyfun, tys ++ [ty])
decompose' (ConT ty) = return (ty, [])
decompose' _ = Nothing
compose :: Name -> [Name] -> Type
compose tyCon tyArgs = foldl AppT (ConT tyCon) (map VarT tyArgs)
tyVarBndrVar :: TyVarBndr -> Name
tyVarBndrVar (PlainTV tyvar) = tyvar
tyVarBndrVar (KindedTV tyvar _) = tyvar
tyVarBndrType :: TyVarBndr -> Type
tyVarBndrType = VarT . tyVarBndrVar
tyProd, tySum :: Type -> Type -> Type
tyProd t1 t2 = TupleT 2 `AppT` t1 `AppT` t2
tySum t1 t2 = ConT ''Either `AppT` t1 `AppT` t2
fstExp, sndExp :: Exp -> Exp
fstExp (TupE [e, _]) = e
fstExp e = VarE 'fst `AppE` e
sndExp (TupE [_, e]) = e
sndExp e = VarE 'snd `AppE` e
leftN, rightN :: Name
leftN = 'Left
rightN = 'Right
leftExp, rightExp :: Exp -> Exp
leftExp = AppE (ConE leftN)
rightExp = AppE (ConE rightN)
fstTy, sndTy :: Type -> Type
fstTy (TupleT 2 `AppT` t1 `AppT` _) = t1
fstTy _ = error "Error: not a pair type"
sndTy (TupleT 2 `AppT` _ `AppT` t2) = t2
sndTy _ = error "Error: not a pair type"
isEnumTy :: Type -> Bool
isEnumTy (ConT eith `AppT` t1 `AppT` t2)
= eith == ''Either && isEnumTy t1 && isEnumTy t2
isEnumTy (TupleT 0)
= True
isEnumTy _ = False
type AlgCon = (Name, [Type])
algCon :: Con -> AlgCon
algCon (NormalC name args) = (name, map snd args)
algCon (RecC name args) = (name, [argTy | (_, _, argTy) <- args])
algCon (InfixC (_, ty1) name (_, ty2)) = (name, [ty1, ty2])
algCon _ = error "Error: universally quantified constructors are not algebraic"
substInAlgCon :: (Name, Type) -> AlgCon -> AlgCon
substInAlgCon sub (conName, args) = (conName, map (substInType sub) args)
substInPred :: (Name, Type) -> Pred -> Pred
substInPred sub (ClassP cName tys) = ClassP cName (map (substInType sub) tys)
substInPred sub (EqualP ty1 ty2) = EqualP (substInType sub ty1) (substInType sub ty2)
mergeWith :: (a -> a -> a) -> [a] -> a
mergeWith _ [a] = a
mergeWith _ [] = error "Error: mergeWith called with empty list"
mergeWith f xs = mergeWith f (combine xs) where
combine (x1:x2:xs) = f x1 x2:combine xs
combine xs = xs | lowasser/TrieMap | Data/TrieMap/Representation/TH/Utils.hs | bsd-3-clause | 2,445 | 6 | 10 | 454 | 1,110 | 607 | 503 | 63 | 2 |
{-# OPTIONS_GHC -fno-warn-unused-binds -fno-warn-unused-matches -fno-warn-name-shadowing -fno-warn-missing-signatures #-}
{-# LANGUAGE FlexibleInstances, ConstraintKinds, ExistentialQuantification, GADTs, RankNTypes, MultiParamTypeClasses, RankNTypes, UndecidableInstances, FlexibleContexts, TypeSynonymInstances #-}
---------------------------------------------------------------------------------------------------
---------------------------------------------------------------------------------------------------
-- |
-- | Module : First attempt at approximate counting
-- | Creator: Xiao Ling
-- | Created: 12/08/2015
-- | TODO : test standard deviation of alpha, beta, and final version
-- |
---------------------------------------------------------------------------------------------------
---------------------------------------------------------------------------------------------------
module Morris (
morris
, morris'
, morris1
, morris1'
) where
import Control.Monad.Random
import Control.Monad.Random.Class
import Data.Conduit
import Data.List.Split
import qualified Data.Conduit.List as Cl
import Core
import Statistics
{-----------------------------------------------------------------------------
I. Morris Algorithm list of counter
------------------------------------------------------------------------------}
---- * Count the number of items in `as` to within `eps` of actual
---- * with confidence `delta`
morris' :: EpsDelta -> Batch a IO Counter
morris' ed = morris ed `using` evalRandIO
morris :: MonadRandom m => EpsDelta -> Streaming a m Counter
morris (ED e d) = medianOfMeans $ go cs
where cs = replicate (t*m) 0
t = round $ 1/(e^2*d) :: Int
m = round . log $ 1/d
go cs = (\c -> 2^(round c) - 1) `ffmap` Cl.foldM (\cs _ -> traverse incr cs) cs
ffmap = fmap . fmap
medianOfMeans = fmap median' . (fmap . fmap) mean' . fmap (chunksOf t)
{-----------------------------------------------------------------------------
II. Morris Algorithm list of list of counter
------------------------------------------------------------------------------}
---- * Count the number of items in `as` to within `eps` of actual
---- * with confidence `delta`
morris1' :: EpsDelta -> Batch a IO Counter
morris1' ed = morris1 ed `using` evalRandIO
-- * Run on stream inputs `as` for t independent trials for `t = 1/eps^2 * d`,
-- * and `m` times in parallel, for `m = log(1/d)` and take the median
morris1 :: MonadRandom m => EpsDelta -> Streaming a m Counter
morris1 (ED e d) = medianOfMeans $ go ccs
where
medianOfMeans = fmap median' . (fmap . fmap) mean'
ccs = replicate m $ replicate t 0
t = round $ 1/(e^2*d)
m = round . log $ 1/d
go ccs = (\x -> 2^(round x) - 1) `fffmap` Cl.foldM (\xs _ -> incrs' xs) ccs
fffmap = fmap . fmap . fmap
-- * given a list of list of counters toss a coin for each counter and incr
-- * this can be flattened
incrs' :: MonadRandom m => [[Counter]] -> m [[Counter]]
incrs' = sequence . fmap (sequence . fmap incr)
{-----------------------------------------------------------------------------
III. Utils
------------------------------------------------------------------------------}
-- * Increment a counter `x` with probability 1/2^x
incr :: MonadRandom m => Counter -> m Counter
incr x = do
h <- toss . coin $ 0.5^(round x)
return $ if isHead h then (seq () succ x) else seq () x
mean', median' :: (Floating a, Ord a, RealFrac a) => [a] -> Float
mean' = fromIntegral . round . mean
median' = fromIntegral . round . median
| lingxiao/CIS700 | src/Morris.hs | bsd-3-clause | 3,758 | 0 | 13 | 765 | 758 | 412 | 346 | 43 | 2 |
{-# LANGUAGE RecordWildCards #-}
module Main where
import Control.Monad (forM_)
import Data.Monoid (mconcat)
import Development.Shake
import Development.Shake.FilePath
import System.Console.GetOpt
import qualified System.Info (os, arch)
import Config
import Dirs
import GhcDist
import HaddockMaster
import OS
import Package
import Paths
import PlatformDB
import Releases
import SourceTarball
import Types
import Target
import Website
data Flags = Info | Prefix String
deriving Eq
flags :: [OptDescr (Either a Flags)]
flags = [ Option ['i'] ["info"] (NoArg $ Right Info)
"Show info on what gets included in this HP release"
, Option [] ["prefix"] (ReqArg (Right . Prefix) "DIR")
"Set installation prefix (only for Posix builds)"
]
main :: IO ()
main = shakeArgsWith opts flags main'
where
main' flgs args =
if Info `elem` flgs
then info
else case args of
(tarfile:what) -> return $ Just $ do
allRules tarfile flgs
want $ if null what then ["build-all"] else what
[] -> usage
info = do
putStrLn $ "This hptool is built to construct " ++ hpFullName ++ "\n\
\ for the " ++
System.Info.os ++ " " ++ System.Info.arch ++ " platform.\n\
\The HP constructed will contain the following:\n\n"
++ unlines (whatIsIncluded hpRelease)
return Nothing
usage = do
putStrLn "usage: hptool --info\n\
\ hptool [opts] <ghc-bindist.tar.bz> [target...]\n\
\ where target is one of:\n\
\ build-all -- build everything (default)\n\
\ build-source -- build the source tar ball\n\
\ build-target -- build the target tree\n\
\ build-package-<pkg> -- build the package (name or name-ver)\n\
\ build-local -- build the local GHC environment\n\
\ build-website -- build the website\n"
return Nothing
allRules tarfile flgs = do
buildConfig <- addConfigOracle hpRelease tarfile (prefixSetting flgs)
ghcDistRules
packageRules
targetRules buildConfig
haddockMasterRules buildConfig
sourceTarballRules srcTarFile
buildRules hpRelease srcTarFile buildConfig
websiteRules "website"
prefixSetting = mconcat . reverse . map ps
where
ps (Prefix p) = Just p
ps _ = Nothing
opts = shakeOptions
hpRelease = hp_7_10_2
hpFullName = show $ relVersion hpRelease
srcTarFile = productDir </> hpFullName <.> "tar.gz"
whatIsIncluded :: Release -> [String]
whatIsIncluded = map concat . map includeToString . relIncludes where
includeToString (IncGHC, p) = "GHC: " : [show p]
includeToString (IncGHCLib, p) = "GHCLib: " : [show p]
includeToString (IncLib, p) = "LIB: " : [show p]
includeToString (IncTool, p) = "TOOL: " : [show p]
includeToString (IncGHCTool, p) = "TOOL: " : [show p]
includeToString (IncIfWindows it, p) =
"IfWindows: " : includeToString (it,p)
includeToString (IncIfNotWindows it, p) =
"IfNotWindows: " : includeToString (it,p)
buildRules :: Release -> FilePath -> BuildConfig -> Rules()
buildRules hpRelease srcTarFile bc = do
"build-source" ~> need [srcTarFile]
"build-target" ~> need [targetDir]
"build-product" ~> need [osProduct]
"build-local" ~> need [dir ghcLocalDir]
"build-website" ~> need [dir websiteDir]
forM_ (platformPackages hpRelease) $ \pkg -> do
let full = "build-package-" ++ show pkg
let short = "build-package-" ++ pkgName pkg
short ~> need [full]
full ~> need [dir $ packageBuildDir pkg]
"build-all" ~> do -- separate need call so built in order
need ["build-source"]
need ["build-product"]
osRules hpRelease bc
where
OS{..} = osFromConfig bc
| bgamari/haskell-platform | hptool/src/Main.hs | bsd-3-clause | 4,166 | 5 | 16 | 1,328 | 963 | 502 | 461 | 91 | 7 |
{-|
Module : System.Process.Utils
Description : Convenience functions for calling commands
Copyright : (c) Michael Klein, 2016
License : BSD3
Maintainer : lambdamichael(at)gmail.com
-}
module System.Process.Utils where
import System.Process ( readProcessWithExitCode
)
import System.Exit ( ExitCode(..)
)
-- | A `String` containing errors
type ErrString = String
-- -- | Do `readProcessWithExitCode`, returning @`Left` errorDetails@ on failure
-- simpleRun :: String -> [String] -> String -> IO (Either ErrString String)
-- simpleRun cmd args input = do
-- (exitCode, stdOut, stdErr) <- readProcessWithExitCode cmd args input
-- if (exitCode /= ExitSuccess) || (stdErr /= "")
-- then do
-- return . Left $ unlines [ "exitCode:"
-- , show exitCode
-- , "stderr:"
-- , stdErr
-- ]
-- else do
-- return . Right $ stdOut
| michaeljklein/git-details | src/System/Process/Utils.hs | bsd-3-clause | 996 | 0 | 6 | 306 | 52 | 39 | 13 | 4 | 0 |
{- |
Provide a class that renders multiple Haskell values in a text form
that is accessible by gnuplot.
Maybe we add a method for the binary interface to gnuplot later.
-}
module Graphics.Gnuplot.Value.Tuple (
C(text, columnCount),
ColumnCount(ColumnCount),
) where
import System.Locale (defaultTimeLocale, )
import qualified Data.Time as Time
import Data.Word (Word8, Word16, Word32, Word64, )
import Data.Int (Int8, Int16, Int32, Int64, )
import Data.Ratio (Ratio, )
class C a where
{- |
For values that are also in Atom class,
'text' must generate a singleton list.
-}
text :: a -> [ShowS]
{- |
It must hold @ColumnCount (length (text x)) == columnCount@.
-}
columnCount :: ColumnCount a
columnCount = ColumnCount 1
{- |
Count numbers of gnuplot data columns for the respective type.
Somehow a writer monad with respect to Sum monoid
without material monadic result.
Cf. ColumnSet module.
-}
newtype ColumnCount a = ColumnCount Int
deriving (Eq, Ord, Show)
{-
Functor and Applicative instances would be useful
for combining column sets,
but they are dangerous, because they can bring
type and column columnCount out of sync.
-}
pure :: a -> ColumnCount a
pure _ = ColumnCount 0
(<*>) :: ColumnCount (a -> b) -> ColumnCount a -> ColumnCount b
ColumnCount n <*> ColumnCount m = ColumnCount (n+m)
singleton :: a -> [a]
singleton = (:[])
instance C Float where text = singleton . shows
instance C Double where text = singleton . shows
instance C Int where text = singleton . shows
instance C Integer where text = singleton . shows
instance (Integral a) => C (Ratio a) where
text = singleton . shows . (id :: Double->Double) . realToFrac
instance C Int8 where text = singleton . shows
instance C Int16 where text = singleton . shows
instance C Int32 where text = singleton . shows
instance C Int64 where text = singleton . shows
instance C Word8 where text = singleton . shows
instance C Word16 where text = singleton . shows
instance C Word32 where text = singleton . shows
instance C Word64 where text = singleton . shows
instance C Time.Day where
text d = text $ Time.UTCTime d 0
instance C Time.UTCTime where
text = singleton . showString . Time.formatTime defaultTimeLocale "%F %T%Q %z"
instance (C a, C b) => C (a,b) where
text (a,b) = text a ++ text b
columnCount =
pure (,)
<*> columnCount
<*> columnCount
instance (C a, C b, C c) => C (a,b,c) where
text (a,b,c) = text a ++ text b ++ text c
columnCount =
pure (,,)
<*> columnCount
<*> columnCount
<*> columnCount
instance (C a, C b, C c, C d) => C (a,b,c,d) where
text (a,b,c,d) = text a ++ text b ++ text c ++ text d
columnCount =
pure (,,,)
<*> columnCount
<*> columnCount
<*> columnCount
<*> columnCount
| kubkon/gnuplot | src/Graphics/Gnuplot/Value/Tuple.hs | bsd-3-clause | 2,863 | 0 | 11 | 668 | 868 | 474 | 394 | 63 | 1 |
{-# LANGUAGE EmptyDataDecls, DeriveDataTypeable,
FlexibleInstances, MultiParamTypeClasses #-}
-- | Reactive Demand Programming (RDP) design is for open, scalable,
-- distributed systems. Sirea is much more humble: just one Haskell
-- process. But it is still useful to model concurrent behaviors in
-- Sirea - for task concurrency, and proof of concept for modeling
-- spatial orchestration.
--
-- This module provides behaviors for signals to cross Partitions.
-- Each partition has one Haskell thread. Trivial partitions merely
-- process RDP updates, but many represent resources and continuous
-- or periodic tasks (persisting state, maintaining a GLUT window,
-- watching the filesystem, etc.). A typeclass allows clients to
-- create partitions for specific tasks.
--
-- Sirea makes partitions very convenient - just name them by type,
-- or infer them at `bcross`. This is very declarative. Partition
-- threads are only created if the partition is used. Partitions
-- can be abstracted by typeclass, possibly by existentials. They
-- communicate via inboxes processed on runStepper operations. For
-- weakly periodic tasks (GC, persistence, polling) a pulse message
-- is regularly broadcast across all partitions that need it.
--
-- Use bdelay with bcross to model the communication overheads, and
-- computation costs within each partition. There is no delay by
-- default.
--
-- NOTE: Partition threads must use non-blocking IO if they interact
-- with legacy libraries or the operating system. Sirea waits when a
-- thread falls behind (for predictable time and space properties).
-- Blocking IO can cause the app to freeze. (Fork another thread if
-- necessary; just don't block the `runStepper` operation.)
--
module Sirea.Partition
( Partition(..)
, BCross(..)
, Pt, P0, W
, Stepper(..)
, Stopper(..)
, PSched, Sched(..) -- re-exported
, getPSched
) where
import Sirea.Behavior
import Sirea.PCX
import Sirea.Internal.CC
import Sirea.Internal.PTypes
import Sirea.Internal.Thread
import Sirea.Internal.PulseSensor (getPulseScheduler)
import Data.Typeable
import Data.IORef
import Control.Concurrent (forkIO)
import GHC.Conc (labelThread)
-- | Cross between partitions. Note that this behavior requires the
-- `b` class itself to encapsulate knowledge of how the partitions
-- are accessed. In the normal use case, partitions are created when
-- you cross into them by type, i.e. bcross into a GLUT partition in
-- order to create and control a GLUT window. The illusion is that
-- the partitions have always existed, they're just passive unless
-- you control them - i.e. discovery, not creation.
--
-- Cross from a partition to itself may optimize to identity.
class BCross b where
bcross :: (Partition p, Partition p') => b (S p x) (S p' x)
-- | Partition p - indicates a toplevel partition type, and also
-- can override the default partition thread constructor. The
-- partition must return its own stopper operation, which will be
-- run from within the same partition when it is time to halt the
-- application.
--
-- Note: Partitions are Resources (see PCX) and should not have any
-- significant side-effects until some effects are demanded.
--
class (Typeable p) => Partition p where
-- | create a new partition thread, with access to partition
-- resources via PCX. (Recommend use of GHC.Conc.labelThread.)
newPartitionThread :: PCX p -> Stepper -> IO Stopper
-- We need a child PCX for each partition.
instance (Partition p) => Resource W (PCX p) where
locateResource rp _ = newPCX rp
-- | The W type represents the toplevel PCX. Each thread partition
-- operates directly under the world or process level partition, W.
data W
-- | PSched is a partition scheduler, operating on partition threads
-- in the IO monad.
type PSched = Sched IO
-- | Given the PCX for a partition, we can obtain the scheduler,
-- though doing so is optional. See Sirea.PSched for more info.
getPSched :: (Partition p) => PCX p -> IO PSched
getPSched cp =
findInPCX cp >>= \ tc ->
getPulseScheduler cp >>= \ onPulse ->
return $! Sched
{ stepTime = getTCTime tc
, onNextStep = addTCRecv tc
, onUpdPhase = addTCWork tc
, onStepEnd = addTCSend tc
, eventually = onPulse
}
-- | Pt is a type for trivial partitions. These partitions have few
-- responsibilities, other than to process available RDP updates as
-- fast as possible and perform specified step or pulse actions.
--
-- While partitioning can be a basis for parallelism, it weakens the
-- consistency properties of Sirea applications. (Within a partition
-- you have determinism up to input. Across partitions, you only get
-- snapshot consistency and eventual consistency. Straggling updates
-- are possible if a thread falls behind.) Consider whether `bspark`
-- or `bstrat` is sufficient for parallelism.
--
-- Partitions are better justified when they represent resources and
-- various IO responsibilities.
--
data Pt x deriving(Typeable)
instance (Typeable x) => Partition (Pt x) where
newPartitionThread cp stepper =
newIORef emptyStopData >>= \ rfStop ->
forkIO (simplePartitionLoop rfStop stepper) >>= \ tid ->
labelThread tid (getLabel cp) >>
return (makeStopper rfStop)
getLabel :: (Typeable x) => PCX x -> String
getLabel = show . typeOf . getPTX
where getPTX :: PCX x -> Pt x
getPTX _ = undefined
-- | P0 is the initial or main partition for a Sirea application. It
-- has a thread, but one controlled by the Sirea client rather than
-- created by Sirea. See Sirea.Build for more information.
data P0 deriving(Typeable)
instance Partition P0 where
newPartitionThread = error "special case: main thread is not constructed"
| dmbarbour/Sirea | src/Sirea/Partition.hs | bsd-3-clause | 5,809 | 0 | 13 | 1,123 | 630 | 379 | 251 | -1 | -1 |
module Gfx.Context
( GfxContext(..)
, createGfxContext
, empty
)
where
import Control.Monad.State.Strict ( execStateT )
import Control.Concurrent.STM ( TVar
, atomically
, readTVarIO
, writeTVar
, modifyTVar'
)
import qualified Gfx.Commands as GC
import Gfx.Engine ( GfxEngine
, GraphicsEngine
, resetGfxEngine
)
import Gfx.PostProcessing ( AnimationStyle )
data GfxContext = GfxContext { drawShape :: String -> Float -> Float -> Float -> IO ()
, rotate :: Float -> Float -> Float -> IO ()
, scale :: Float -> Float -> Float -> IO ()
, move :: Float -> Float -> Float -> IO ()
, colourFill :: Float -> Float -> Float -> Float -> IO ()
, noFill :: IO ()
, textureFill :: String -> Float -> IO ()
, colourStroke :: Float -> Float -> Float -> Float -> IO ()
, noStroke :: IO ()
, setMaterial :: String -> IO ()
, setMaterialVar :: String -> Float -> IO ()
, setBackground :: Float -> Float -> Float -> IO ()
, pushScope :: IO ()
, popScope :: IO ()
, setAnimationStyle :: AnimationStyle -> IO ()
, setDepthChecking :: Bool -> IO ()
, reset :: IO ()
, renderCode :: String -> IO ()
, renderCodeToBuffer :: String -> IO ()
}
createGfxContext :: TVar GfxEngine -> GfxContext
createGfxContext gfx = GfxContext
{ drawShape = wrapFourArg gfx GC.drawShape
, rotate = wrapThreeArg gfx GC.rotate
, scale = wrapThreeArg gfx GC.scale
, move = wrapThreeArg gfx GC.move
, colourFill = wrapFourArg gfx GC.colourFill
, noFill = wrapNoArg gfx GC.noFill
, textureFill = wrapTwoArg gfx GC.textureFill
, colourStroke = wrapFourArg gfx GC.colourStroke
, noStroke = wrapNoArg gfx GC.noStroke
, setMaterial = wrapOneArg gfx GC.setMaterial
, setMaterialVar = wrapTwoArg gfx GC.setMaterialVariable
, setBackground = wrapThreeArg gfx GC.setBackground
, pushScope = wrapNoArg gfx GC.pushScope
, popScope = wrapNoArg gfx GC.popScope
, setAnimationStyle = wrapOneArg gfx GC.setAnimationStyle
, setDepthChecking = wrapOneArg gfx GC.setDepthChecking
, reset = resetGfxCtx gfx
, renderCode = wrapOneArg gfx GC.renderCode
, renderCodeToBuffer = wrapOneArg gfx GC.renderCodeToBuffer
}
empty :: GfxContext
empty = GfxContext { drawShape = \_ _ _ _ -> print "No GFX Context"
, rotate = \_ _ _ -> print "No GFX Context"
, scale = \_ _ _ -> print "No GFX Context"
, move = \_ _ _ -> print "No GFX Context"
, colourFill = \_ _ _ _ -> print "No GFX Context"
, noFill = print "No Gfx Context"
, textureFill = \_ _ -> print "No Gfx Context"
, colourStroke = \_ _ _ _ -> print "No GFX Context"
, noStroke = print "No Gfx Context"
, setMaterial = \_ -> print "No Gfx Context"
, setMaterialVar = \_ _ -> print "No Gfx Context"
, setBackground = \_ _ _ -> print "No Gfx Context"
, pushScope = print "No Gfx Context"
, popScope = print "No Gfx Context"
, setAnimationStyle = \_ -> print "No Gfx Context"
, setDepthChecking = \_ -> print "No Gfx Context"
, reset = print "No Gfx Context"
, renderCode = \_ -> print "No Gfx Context"
, renderCodeToBuffer = \_ -> print "No Gfx Context"
}
resetGfxCtx :: TVar GfxEngine -> IO ()
resetGfxCtx gfx = atomically $ modifyTVar' gfx resetGfxEngine
wrapNoArg :: TVar GfxEngine -> GraphicsEngine () -> IO ()
wrapNoArg gfx func = do
ge <- readTVarIO gfx
newGe <- execStateT func ge
atomically $ writeTVar gfx newGe
wrapOneArg :: TVar GfxEngine -> (a -> GraphicsEngine ()) -> a -> IO ()
wrapOneArg gfx func a = do
ge <- readTVarIO gfx
newGe <- execStateT (func a) ge
atomically $ writeTVar gfx newGe
wrapTwoArg :: TVar GfxEngine -> (a -> b -> GraphicsEngine ()) -> a -> b -> IO ()
wrapTwoArg gfx func a b = do
ge <- readTVarIO gfx
newGe <- execStateT (func a b) ge
atomically $ writeTVar gfx newGe
wrapThreeArg
:: TVar GfxEngine
-> (a -> b -> c -> GraphicsEngine ())
-> a
-> b
-> c
-> IO ()
wrapThreeArg gfx func a b c = do
ge <- readTVarIO gfx
newGe <- execStateT (func a b c) ge
atomically $ writeTVar gfx newGe
wrapFourArg
:: TVar GfxEngine
-> (a -> b -> c -> d -> GraphicsEngine ())
-> a
-> b
-> c
-> d
-> IO ()
wrapFourArg gfx func a b c d = do
ge <- readTVarIO gfx
newGe <- execStateT (func a b c d) ge
atomically $ writeTVar gfx newGe
| rumblesan/proviz | src/Gfx/Context.hs | bsd-3-clause | 5,825 | 0 | 14 | 2,542 | 1,528 | 796 | 732 | 115 | 1 |
{-# LANGUAGE Rank2Types,ScopedTypeVariables #-}
module Language.Haskell.Exts.SimpleGenerics where
import Data.Data
import Data.Maybe(fromJust)
-- taken from SYB
everywhere' :: (forall a. Data a => a -> a)
-> (forall a. Data a => a -> a)
everywhere' f = gmapT (everywhere' f) . f
generic :: forall a b. (Data a, Data b) =>
(a -> a) -> b -> b
generic f = everywhere'
(\e -> if (typeOf e == typeOf (undefined :: a))
then fromJust $ cast $ f $ fromJust $ cast e
else e)
everywhereM' :: Monad m => (forall a. Data a => a -> m a)
-> (forall a. Data a => a -> m a)
everywhereM' f x = do x' <- f x
gmapM (everywhereM' f) x'
genericM :: forall a b m. (Monad m,Data a, Data b) =>
(a -> m a) -> b -> m b
genericM f = everywhereM'
(\e -> if (typeOf e == typeOf (undefined :: a))
then fromJust $ gcast $ f $ fromJust $ cast e
else return e) | shayan-najd/Haskell-Desugar-Generic | Language/Haskell/Exts/SimpleGenerics.hs | bsd-3-clause | 1,067 | 0 | 12 | 411 | 415 | 218 | 197 | 23 | 2 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.ARB.TextureRG
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.ARB.TextureRG (
-- * Extension Support
glGetARBTextureRG,
gl_ARB_texture_rg,
-- * Enums
pattern GL_R16,
pattern GL_R16F,
pattern GL_R16I,
pattern GL_R16UI,
pattern GL_R32F,
pattern GL_R32I,
pattern GL_R32UI,
pattern GL_R8,
pattern GL_R8I,
pattern GL_R8UI,
pattern GL_RG,
pattern GL_RG16,
pattern GL_RG16F,
pattern GL_RG16I,
pattern GL_RG16UI,
pattern GL_RG32F,
pattern GL_RG32I,
pattern GL_RG32UI,
pattern GL_RG8,
pattern GL_RG8I,
pattern GL_RG8UI,
pattern GL_RG_INTEGER
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
| haskell-opengl/OpenGLRaw | src/Graphics/GL/ARB/TextureRG.hs | bsd-3-clause | 1,031 | 0 | 5 | 175 | 152 | 99 | 53 | 28 | 0 |
module Matterhorn.State.Help
( showHelpScreen
)
where
import Prelude ()
import Matterhorn.Prelude
import Brick.Main ( viewportScroll, vScrollToBeginning )
import Matterhorn.Types
showHelpScreen :: HelpTopic -> MH ()
showHelpScreen topic = do
curMode <- use (csCurrentTeam.tsMode)
case curMode of
ShowHelp {} -> return ()
_ -> do
mh $ vScrollToBeginning (viewportScroll HelpViewport)
setMode $ ShowHelp topic curMode
| matterhorn-chat/matterhorn | src/Matterhorn/State/Help.hs | bsd-3-clause | 513 | 0 | 15 | 151 | 134 | 69 | 65 | 14 | 2 |
module Idris.REPLParser (parseCmd, help, allHelp) where
import System.FilePath ((</>))
import System.Console.ANSI (Color(..))
import Idris.Colours
import Idris.AbsSyntax
import Idris.Core.TT
import Idris.Help
import qualified Idris.Parser as P
import Control.Applicative
import Control.Monad.State.Strict
import Text.Parser.Combinators
import Text.Parser.Char(anyChar,oneOf)
import Text.Trifecta(Result, parseString)
import Text.Trifecta.Delta
import Debug.Trace
import Data.List
import Data.List.Split(splitOn)
import Data.Char(isSpace, toLower)
import qualified Data.ByteString.UTF8 as UTF8
parseCmd :: IState -> String -> String -> Result (Either String Command)
parseCmd i inputname = P.runparser pCmd i inputname . trim
where trim = f . f
where f = reverse . dropWhile isSpace
type CommandTable = [ ( [String], CmdArg, String
, String -> P.IdrisParser (Either String Command) ) ]
help :: [([String], CmdArg, String)]
help = (["<expr>"], NoArg, "Evaluate an expression") :
[ (map (':' :) names, args, text) | (names, args, text, _) <- parserCommandsForHelp ]
allHelp :: [([String], CmdArg, String)]
allHelp = [ (map (':' :) names, args, text)
| (names, args, text, _) <- parserCommandsForHelp ++ parserCommands ]
parserCommandsForHelp :: CommandTable
parserCommandsForHelp =
[ exprArgCmd ["t", "type"] Check "Check the type of an expression"
, exprArgCmd ["core"] Core "View the core language representation of a term"
, nameArgCmd ["miss", "missing"] Missing "Show missing clauses"
, (["doc"], NameArg, "Show internal documentation", cmd_doc)
, (["mkdoc"], NamespaceArg, "Generate IdrisDoc for namespace(s) and dependencies"
, genArg "namespace" (many anyChar) MakeDoc)
, (["apropos"], SeqArgs (OptionalArg PkgArgs) NameArg, " Search names, types, and documentation"
, cmd_apropos)
, (["s", "search"], SeqArgs (OptionalArg PkgArgs) ExprArg
, " Search for values by type", cmd_search)
, nameArgCmd ["wc", "whocalls"] WhoCalls "List the callers of some name"
, nameArgCmd ["cw", "callswho"] CallsWho "List the callees of some name"
, namespaceArgCmd ["browse"] Browse "List the contents of some namespace"
, nameArgCmd ["total"] TotCheck "Check the totality of a name"
, noArgCmd ["r", "reload"] Reload "Reload current file"
, (["l", "load"], FileArg, "Load a new file"
, strArg (\f -> Load f Nothing))
, (["cd"], FileArg, "Change working directory"
, strArg ChangeDirectory)
, (["module"], ModuleArg, "Import an extra module", moduleArg ModImport) -- NOTE: dragons
, noArgCmd ["e", "edit"] Edit "Edit current file using $EDITOR or $VISUAL"
, noArgCmd ["m", "metavars"] Metavars "Show remaining proof obligations (metavariables or holes)"
, (["p", "prove"], MetaVarArg, "Prove a metavariable"
, nameArg (Prove False))
, (["elab"], MetaVarArg, "Build a metavariable using the elaboration shell"
, nameArg (Prove True))
, (["a", "addproof"], NameArg, "Add proof to source file", cmd_addproof)
, (["rmproof"], NameArg, "Remove proof from proof stack"
, nameArg RmProof)
, (["showproof"], NameArg, "Show proof"
, nameArg ShowProof)
, noArgCmd ["proofs"] Proofs "Show available proofs"
, exprArgCmd ["x"] ExecVal "Execute IO actions resulting from an expression using the interpreter"
, (["c", "compile"], FileArg, "Compile to an executable [codegen] <filename>", cmd_compile)
, (["exec", "execute"], OptionalArg ExprArg, "Compile to an executable and run", cmd_execute)
, (["dynamic"], FileArg, "Dynamically load a C library (similar to %dynamic)", cmd_dynamic)
, (["dynamic"], NoArg, "List dynamically loaded C libraries", cmd_dynamic)
, noArgCmd ["?", "h", "help"] Help "Display this help text"
, optArgCmd ["set"] SetOpt "Set an option (errorcontext, showimplicits)"
, optArgCmd ["unset"] UnsetOpt "Unset an option"
, (["color", "colour"], ColourArg
, "Turn REPL colours on or off; set a specific colour"
, cmd_colour)
, (["consolewidth"], ConsoleWidthArg, "Set the width of the console", cmd_consolewidth)
, (["printerdepth"], OptionalArg NumberArg, "Set the maximum pretty-printer depth (no arg for infinite)", cmd_printdepth)
, noArgCmd ["q", "quit"] Quit "Exit the Idris system"
, noArgCmd ["w", "warranty"] Warranty "Displays warranty information"
, (["let"], ManyArgs DeclArg
, "Evaluate a declaration, such as a function definition, instance implementation, or fixity declaration"
, cmd_let)
, (["unlet", "undefine"], ManyArgs NameArg
, "Remove the listed repl definitions, or all repl definitions if no names given"
, cmd_unlet)
, nameArgCmd ["printdef"] PrintDef "Show the definition of a function"
, (["pp", "pprint"], (SeqArgs OptionArg (SeqArgs NumberArg NameArg))
, "Pretty prints an Idris function in either LaTeX or HTML and for a specified width."
, cmd_pprint)
]
parserCommands =
[ noArgCmd ["u", "universes"] Universes "Display universe constraints"
, noArgCmd ["errorhandlers"] ListErrorHandlers "List registered error handlers"
, nameArgCmd ["d", "def"] Defn "Display a name's internal definitions"
, nameArgCmd ["transinfo"] TransformInfo "Show relevant transformation rules for a name"
, nameArgCmd ["di", "dbginfo"] DebugInfo "Show debugging information for a name"
, exprArgCmd ["patt"] Pattelab "(Debugging) Elaborate pattern expression"
, exprArgCmd ["spec"] Spec "?"
, exprArgCmd ["hnf"] HNF "?"
, exprArgCmd ["inline"] TestInline "?"
, proofArgCmd ["cs", "casesplit"] CaseSplitAt
":cs <line> <name> splits the pattern variable on the line"
, proofArgCmd ["apc", "addproofclause"] AddProofClauseFrom
":apc <line> <name> adds a pattern-matching proof clause to name on line"
, proofArgCmd ["ac", "addclause"] AddClauseFrom
":ac <line> <name> adds a clause for the definition of the name on the line"
, proofArgCmd ["am", "addmissing"] AddMissing
":am <line> <name> adds all missing pattern matches for the name on the line"
, proofArgCmd ["mw", "makewith"] MakeWith
":mw <line> <name> adds a with clause for the definition of the name on the line"
, proofArgCmd ["ml", "makelemma"] MakeLemma "?"
, (["log"], NumberArg, "Set logging verbosity level", cmd_log)
, (["lto", "loadto"], SeqArgs NumberArg FileArg
, "Load file up to line number", cmd_loadto)
, (["ps", "proofsearch"], NoArg
, ":ps <line> <name> <names> does proof search for name on line, with names as hints"
, cmd_proofsearch)
, (["ref", "refine"], NoArg
, ":ref <line> <name> <name'> attempts to partially solve name on line, with name' as hint, introducing metavariables for arguments that aren't inferrable"
, cmd_refine)
, (["debugunify"], SeqArgs ExprArg ExprArg
, "(Debugging) Try to unify two expressions", const $ do
l <- P.simpleExpr defaultSyntax
r <- P.simpleExpr defaultSyntax
eof
return (Right (DebugUnify l r))
)
]
noArgCmd names command doc =
(names, NoArg, doc, noArgs command)
nameArgCmd names command doc =
(names, NameArg, doc, fnNameArg command)
namespaceArgCmd names command doc =
(names, NamespaceArg, doc, namespaceArg command)
exprArgCmd names command doc =
(names, ExprArg, doc, exprArg command)
metavarArgCmd names command doc =
(names, MetaVarArg, doc, fnNameArg command)
optArgCmd names command doc =
(names, OptionArg, doc, optArg command)
proofArgCmd names command doc =
(names, NoArg, doc, proofArg command)
pCmd :: P.IdrisParser (Either String Command)
pCmd = choice [ do c <- cmd names; parser c
| (names, _, _, parser) <- parserCommandsForHelp ++ parserCommands ]
<|> unrecognized
<|> nop
<|> eval
where nop = do eof; return (Right NOP)
eval = exprArg Eval ""
unrecognized = do
P.lchar ':'
cmd <- many anyChar
let cmd' = takeWhile (/=' ') cmd
return (Left $ "Unrecognized command: " ++ cmd')
cmd :: [String] -> P.IdrisParser String
cmd xs = try $ do
P.lchar ':'
docmd sorted_xs
where docmd [] = fail "Could not parse command"
docmd (x:xs) = try (P.reserved x >> return x) <|> docmd xs
sorted_xs = sortBy (\x y -> compare (length y) (length x)) xs
noArgs :: Command -> String -> P.IdrisParser (Either String Command)
noArgs cmd name = do
let emptyArgs = do
eof
return (Right cmd)
let failure = return (Left $ ":" ++ name ++ " takes no arguments")
emptyArgs <|> failure
exprArg :: (PTerm -> Command) -> String -> P.IdrisParser (Either String Command)
exprArg cmd name = do
let noArg = do
eof
return $ Left ("Usage is :" ++ name ++ " <expression>")
let properArg = do
t <- P.fullExpr defaultSyntax
return $ Right (cmd t)
try noArg <|> properArg
genArg :: String -> P.IdrisParser a -> (a -> Command)
-> String -> P.IdrisParser (Either String Command)
genArg argName argParser cmd name = do
let emptyArgs = do eof; failure
oneArg = do arg <- argParser
eof
return (Right (cmd arg))
try emptyArgs <|> oneArg <|> failure
where
failure = return $ Left ("Usage is :" ++ name ++ " <" ++ argName ++ ">")
nameArg, fnNameArg :: (Name -> Command) -> String -> P.IdrisParser (Either String Command)
nameArg = genArg "name" $ fst <$> P.name
fnNameArg = genArg "functionname" $ fst <$> P.fnName
strArg :: (String -> Command) -> String -> P.IdrisParser (Either String Command)
strArg = genArg "string" (many anyChar)
moduleArg :: (FilePath -> Command) -> String -> P.IdrisParser (Either String Command)
moduleArg = genArg "module" (fmap (toPath . fst) P.identifier)
where
toPath n = foldl1' (</>) $ splitOn "." n
namespaceArg :: ([String] -> Command) -> String -> P.IdrisParser (Either String Command)
namespaceArg = genArg "namespace" (fmap (toNS . fst) P.identifier)
where
toNS = splitOn "."
optArg :: (Opt -> Command) -> String -> P.IdrisParser (Either String Command)
optArg cmd name = do
let emptyArgs = do
eof
return $ Left ("Usage is :" ++ name ++ " <option>")
let oneArg = do
o <- pOption
P.whiteSpace
eof
return (Right (cmd o))
let failure = return $ Left "Unrecognized setting"
try emptyArgs <|> oneArg <|> failure
where
pOption :: P.IdrisParser Opt
pOption = do discard (P.symbol "errorcontext"); return ErrContext
<|> do discard (P.symbol "showimplicits"); return ShowImpl
<|> do discard (P.symbol "originalerrors"); return ShowOrigErr
<|> do discard (P.symbol "autosolve"); return AutoSolve
<|> do discard (P.symbol "nobanner") ; return NoBanner
<|> do discard (P.symbol "warnreach"); return WarnReach
proofArg :: (Bool -> Int -> Name -> Command) -> String -> P.IdrisParser (Either String Command)
proofArg cmd name = do
upd <- option False $ do
P.lchar '!'
return True
l <- fst <$> P.natural
n <- fst <$> P.name;
return (Right (cmd upd (fromInteger l) n))
cmd_doc :: String -> P.IdrisParser (Either String Command)
cmd_doc name = do
let constant = do
c <- fmap fst P.constant
eof
return $ Right (DocStr (Right c) FullDocs)
let pType = do
P.reserved "Type"
eof
return $ Right (DocStr (Left $ P.mkName ("Type", "")) FullDocs)
let fnName = fnNameArg (\n -> DocStr (Left n) FullDocs) name
try constant <|> pType <|> fnName
cmd_consolewidth :: String -> P.IdrisParser (Either String Command)
cmd_consolewidth name = do
w <- pConsoleWidth
return (Right (SetConsoleWidth w))
where
pConsoleWidth :: P.IdrisParser ConsoleWidth
pConsoleWidth = do discard (P.symbol "auto"); return AutomaticWidth
<|> do discard (P.symbol "infinite"); return InfinitelyWide
<|> do n <- fmap (fromInteger . fst) P.natural
return (ColsWide n)
cmd_printdepth :: String -> P.IdrisParser (Either String Command)
cmd_printdepth _ = do d <- optional (fmap (fromInteger . fst) P.natural)
return (Right $ SetPrinterDepth d)
cmd_execute :: String -> P.IdrisParser (Either String Command)
cmd_execute name = do
tm <- option maintm (P.fullExpr defaultSyntax)
return (Right (Execute tm))
where
maintm = PRef (fileFC "(repl)") (sNS (sUN "main") ["Main"])
cmd_dynamic :: String -> P.IdrisParser (Either String Command)
cmd_dynamic name = do
let emptyArgs = noArgs ListDynamic name
let oneArg = do l <- many anyChar
return $ Right (DynamicLink l)
let failure = return $ Left $ "Usage is :" ++ name ++ " [<library>]"
try emptyArgs <|> try oneArg <|> failure
cmd_pprint :: String -> P.IdrisParser (Either String Command)
cmd_pprint name = do
fmt <- ppFormat
P.whiteSpace
n <- fmap (fromInteger . fst) P.natural
P.whiteSpace
t <- P.fullExpr defaultSyntax
return (Right (PPrint fmt n t))
where
ppFormat :: P.IdrisParser OutputFmt
ppFormat = (discard (P.symbol "html") >> return HTMLOutput)
<|> (discard (P.symbol "latex") >> return LaTeXOutput)
cmd_compile :: String -> P.IdrisParser (Either String Command)
cmd_compile name = do
let defaultCodegen = Via "c"
let codegenOption :: P.IdrisParser Codegen
codegenOption = do
let bytecodeCodegen = discard (P.symbol "bytecode") *> return Bytecode
viaCodegen = do x <- fst <$> P.identifier
return (Via (map toLower x))
bytecodeCodegen <|> viaCodegen
let hasOneArg = do
i <- get
f <- fst <$> P.identifier
eof
return $ Right (Compile defaultCodegen f)
let hasTwoArgs = do
i <- get
codegen <- codegenOption
f <- fst <$> P.identifier
eof
return $ Right (Compile codegen f)
let failure = return $ Left $ "Usage is :" ++ name ++ " [<codegen>] <filename>"
try hasTwoArgs <|> try hasOneArg <|> failure
cmd_addproof :: String -> P.IdrisParser (Either String Command)
cmd_addproof name = do
n <- option Nothing $ do
x <- fst <$> P.name
return (Just x)
eof
return (Right (AddProof n))
cmd_log :: String -> P.IdrisParser (Either String Command)
cmd_log name = do
i <- fmap (fromIntegral . fst) P.natural
eof
return (Right (LogLvl i))
cmd_let :: String -> P.IdrisParser (Either String Command)
cmd_let name = do
defn <- concat <$> many (P.decl defaultSyntax)
return (Right (NewDefn defn))
cmd_unlet :: String -> P.IdrisParser (Either String Command)
cmd_unlet name = (Right . Undefine) `fmap` many (fst <$> P.name)
cmd_loadto :: String -> P.IdrisParser (Either String Command)
cmd_loadto name = do
toline <- fmap (fromInteger . fst) P.natural
f <- many anyChar;
return (Right (Load f (Just toline)))
cmd_colour :: String -> P.IdrisParser (Either String Command)
cmd_colour name = fmap Right pSetColourCmd
where
colours :: [(String, Maybe Color)]
colours = [ ("black", Just Black)
, ("red", Just Red)
, ("green", Just Green)
, ("yellow", Just Yellow)
, ("blue", Just Blue)
, ("magenta", Just Magenta)
, ("cyan", Just Cyan)
, ("white", Just White)
, ("default", Nothing)
]
pSetColourCmd :: P.IdrisParser Command
pSetColourCmd = (do c <- pColourType
let defaultColour = IdrisColour Nothing True False False False
opts <- sepBy pColourMod (P.whiteSpace)
let colour = foldr ($) defaultColour $ reverse opts
return $ SetColour c colour)
<|> try (P.symbol "on" >> return ColourOn)
<|> try (P.symbol "off" >> return ColourOff)
pColour :: P.IdrisParser (Maybe Color)
pColour = doColour colours
where doColour [] = fail "Unknown colour"
doColour ((s, c):cs) = (try (P.symbol s) >> return c) <|> doColour cs
pColourMod :: P.IdrisParser (IdrisColour -> IdrisColour)
pColourMod = try (P.symbol "vivid" >> return doVivid)
<|> try (P.symbol "dull" >> return doDull)
<|> try (P.symbol "underline" >> return doUnderline)
<|> try (P.symbol "nounderline" >> return doNoUnderline)
<|> try (P.symbol "bold" >> return doBold)
<|> try (P.symbol "nobold" >> return doNoBold)
<|> try (P.symbol "italic" >> return doItalic)
<|> try (P.symbol "noitalic" >> return doNoItalic)
<|> try (pColour >>= return . doSetColour)
where doVivid i = i { vivid = True }
doDull i = i { vivid = False }
doUnderline i = i { underline = True }
doNoUnderline i = i { underline = False }
doBold i = i { bold = True }
doNoBold i = i { bold = False }
doItalic i = i { italic = True }
doNoItalic i = i { italic = False }
doSetColour c i = i { colour = c }
-- | Generate the colour type names using the default Show instance.
colourTypes :: [(String, ColourType)]
colourTypes = map (\x -> ((map toLower . reverse . drop 6 . reverse . show) x, x)) $
enumFromTo minBound maxBound
pColourType :: P.IdrisParser ColourType
pColourType = doColourType colourTypes
where doColourType [] = fail $ "Unknown colour category. Options: " ++
(concat . intersperse ", " . map fst) colourTypes
doColourType ((s,ct):cts) = (try (P.symbol s) >> return ct) <|> doColourType cts
idChar = oneOf (['a'..'z'] ++ ['A'..'Z'] ++ ['0'..'9'] ++ ['_'])
cmd_apropos :: String -> P.IdrisParser (Either String Command)
cmd_apropos = packageBasedCmd (some idChar) Apropos
packageBasedCmd :: P.IdrisParser a -> ([String] -> a -> Command)
-> String -> P.IdrisParser (Either String Command)
packageBasedCmd valParser cmd name =
try (do P.lchar '('
pkgs <- sepBy (some idChar) (P.lchar ',')
P.lchar ')'
val <- valParser
return (Right (cmd pkgs val)))
<|> do val <- valParser
return (Right (cmd [] val))
cmd_search :: String -> P.IdrisParser (Either String Command)
cmd_search = packageBasedCmd
(P.typeExpr (defaultSyntax { implicitAllowed = True })) Search
cmd_proofsearch :: String -> P.IdrisParser (Either String Command)
cmd_proofsearch name = do
upd <- option False (do P.lchar '!'; return True)
l <- fmap (fromInteger . fst) P.natural; n <- fst <$> P.name
hints <- many (fst <$> P.fnName)
return (Right (DoProofSearch upd True l n hints))
cmd_refine :: String -> P.IdrisParser (Either String Command)
cmd_refine name = do
upd <- option False (do P.lchar '!'; return True)
l <- fmap (fromInteger . fst) P.natural; n <- fst <$> P.name
hint <- fst <$> P.fnName
return (Right (DoProofSearch upd False l n [hint]))
| bkoropoff/Idris-dev | src/Idris/REPLParser.hs | bsd-3-clause | 19,466 | 0 | 21 | 5,029 | 6,192 | 3,185 | 3,007 | -1 | -1 |
module Bindings.K8055.DigitalIn (
DigitalInput(..),
readDigitalChannel,
readAllDigital
) where
import Data.Word
import Foreign.C
data DigitalInput
= DigitalIn1
| DigitalIn2
| DigitalIn3
| DigitalIn4
| DigitalIn5
| DigitalIn6
| DigitalIn7
| DigitalIn8
digitalInputId :: Num a => DigitalInput -> a
digitalInputId input =
case input of
DigitalIn1 -> 1
DigitalIn2 -> 2
DigitalIn3 -> 3
DigitalIn4 -> 4
DigitalIn5 -> 5
DigitalIn6 -> 6
DigitalIn7 -> 7
DigitalIn8 -> 8
foreign import stdcall unsafe "ReadDigitalChannel"
c_ReadDigitalChannel :: CInt -> IO CInt
-- | Reads the status of the input channel
readDigitalChannel :: DigitalInput -> IO Word8
readDigitalChannel input = do
res <- c_ReadDigitalChannel (digitalInputId input)
return $ fromIntegral res
foreign import stdcall unsafe "ReadAllDigital"
c_ReadAllDigital :: IO CInt
-- | Reads the status of all the input channels
readAllDigital :: IO Word8
readAllDigital = do
res <- c_ReadAllDigital
return $ fromIntegral res
| jputcu/bindings-K8055 | Bindings/K8055/DigitalIn.hs | bsd-3-clause | 1,049 | 0 | 10 | 220 | 252 | 134 | 118 | 38 | 8 |
module Main where
import AI.DataLoader
import AI.MathTmp
import Graphics.Histogram
import Control.Monad
import Data.List
import Data.Monoid
import Debug.Trace
import System.Directory
import System.IO
import Text.ParserCombinators.Parsec
import qualified Data.Map as Map
import qualified Graphics.Gnuplot.Advanced as Plot
import qualified Graphics.Gnuplot.Terminal.X11 as X11
import qualified Graphics.Gnuplot.Terminal.PNG as PNG
import qualified Graphics.Gnuplot.Terminal.SVG as SVG
import qualified Graphics.Gnuplot.Terminal.PostScript as PS
import qualified Graphics.Gnuplot.MultiPlot as MultiPlot
import qualified Graphics.Gnuplot.Frame as Frame
import qualified Graphics.Gnuplot.Frame.OptionSet as Opts
import qualified Graphics.Gnuplot.Frame.OptionSet.Style as OptsStyle
import qualified Graphics.Gnuplot.Frame.OptionSet.Histogram as Histogram
import qualified Graphics.Gnuplot.Graph as Graph
import qualified Graphics.Gnuplot.Plot.TwoDimensional as Plot2D
import qualified Graphics.Gnuplot.Graph.TwoDimensional as Graph2D
import Graphics.Gnuplot.Plot.TwoDimensional (linearScale, )
import qualified Graphics.Gnuplot.LineSpecification as LineSpec
import qualified Graphics.Gnuplot.ColorSpecification as Color
-- csv2stats :: (Num t) => String -> [[String]] -> [(String, t, [Double])]
csv2stats title xs =
[ last $ matmap mean $ doubledata xs
, last $ matmap stddev $ doubledata xs
]
csv2boostErr :: (Num t) => String -> [[String]] -> [(String, t, [Double])]
csv2boostErr title xs = trace title $
[ ("", 1, matmap (quantile 0.75) $ doubledata xs)
, (title', 2, matmap mean $ doubledata xs)
, ("", 1, matmap (quantile 0.25) $ doubledata xs)
]
where
title' =
swapL '\\' ' ' $
last $
words $
swapL '/' ' ' $
swapL ' ' '\\' title
swapL :: Char -> Char -> String -> String
swapL x y ls = map (\c -> if c==x
then y
else c) ls
-- csv2boostHist :: (Num t) => String -> [[String]] -> [(String, t, [Double])]
csv2boostHist title xs =
map boostingperf $ doubledata xs
boostingperf xs = (head $ tail $ reverse xs) - (head $ tail $ tail $ tail xs)
-- helpers
matmap :: ([a]->b) -> [[a]] -> [b]
matmap f xs = reverse $ matmap' f xs
matmap' :: ([a]->b) -> [[a]] -> [b]
matmap' f xs =
if (length $ head xs)>1
then (f $ map head xs):(matmap' f $ map tail xs)
else (f $ map head xs):[]
doubledata :: [[String]] -> [[Double]]
doubledata xs =
map ( map (read::String->Double)
. tail
. tail
. tail
)
xs
-- types of plots
boostErr :: [(String,Double,[Double])] -> Frame.T (Graph2D.T Int Double)
boostErr xs =
Frame.cons (
Opts.title "" $
Opts.xLabel "Number of boosting iterations" $
Opts.yLabel "Error" $
-- Opts.yRange2d (0,0.35) $
Opts.deflt) $
mconcat $
mkErrLines Color.red [(head $ tail xs)]
-- mkErrLines :: Color.T -> [(String,Double,[a])] -> [Plot2D.T (Graph2D.T Int Double)]
mkErrLines color xs =
map (\(title,width,dat) ->
fmap (Graph2D.lineSpec (
LineSpec.title title $
LineSpec.lineWidth width $
LineSpec.lineColor color $
LineSpec.lineStyle 1 $
LineSpec.deflt
)) $
Plot2D.list Graph2D.listLines dat) $ xs
-- boostErrComp :: [[(Color.T,(String,Double,[Double]))]] -> Frame.T (Graph2D.T Int Double)
boostErrComp xs =
Frame.cons (
Opts.title "" $
Opts.xLabel "Number of boosting iterations" $
Opts.yLabel "Error" $
-- Opts.yRange2d (0,0.3) $
Opts.deflt) $
mconcat $ concat
[ [head $ tail $ mkErrLines color ys] | (color,ys) <- xs ]
-------------------------------------------------------------------------------
-- plotting IO
loadResults :: String -> (String -> [[String]] -> r) -> String -> IO (Either ParseError r)
-- loadResults {-rowfilter-} plottingFunc filename = liftM (liftM $ plottingFunc $ nocsv filename) $ loadCSV filename
loadResults rowfilter plottingFunc filename = do
eitherCSV <- loadCSV filename
return $ do
csv <- eitherCSV
return $ plottingFunc (nocsv $ filename) $ filter (\xs -> xs!!2==rowfilter) csv
nocsv str = trace str $
if (take 4 $ reverse str)=="vsc."
then reverse $ drop 4 $ reverse str
else str
plotFile file = do
putStrLn $ "Plotting results for: "++file
eitherBoostErr <- loadResults "performance" csv2boostErr file
eitherBoostMargin <- loadResults "aveMargin" csv2boostErr file
eitherBoostWeightsMean <- loadResults "weights-mean" csv2boostErr file
eitherBoostWeightsStddev <- loadResults "weights-stddev" csv2boostErr file
eitherBoostHist <- loadResults "performance" csv2boostHist file
sequence_
[ Plot.plot (PS.cons $ file++".boostErr.ps") $ boostErr $ right eitherBoostErr
, Plot.plot (PS.cons $ file++".boostMargin.ps") $ boostErr $ right eitherBoostMargin
, Plot.plot (PS.cons $ file++".boostWeightsMean.ps") $ boostErr $ right eitherBoostWeightsMean
, Plot.plot (PS.cons $ file++".boostWeightsStddev.ps") $ boostErr $ right eitherBoostWeightsStddev
-- , Plot.plot (PS.cons $ file++".boostHist.ps") $ boostHist $ right eitherBoostHist
]
plot (file++".boostHistogram.ps") $ histogram binSturges $ right eitherBoostHist
right (Right xs) = xs
algCompare resdir fs = do
putStrLn $ "Plotting comparison for: "++(show fs)
eitherBoostErr <- sequence [loadResults "performance" csv2boostErr f | f<-fs]
sequence_
[ Plot.plot (PS.cons $ resdir++"/comp."++{-(show fs)++-}".boostErr.ps") $ boostErrComp $
zip [ Color.red, Color.orange, Color.green, Color.blue, Color.purple, Color.black ]
[ right ebe | ebe <- eitherBoostErr ]
]
plotAllFiles tmpdir resdir = do
setCurrentDirectory tmpdir
files <- getDirectoryContents resdir
let resfiles = sort $ map (resdir++) $ filter okfile files
algCompare resdir resfiles
sequence_ $ map plotFile $ resfiles
okfile str = isInfixOf ".csv" str &&
(not $ isInfixOf ".png" str) &&
(not $ isInfixOf ".ps" str) &&
(not $ isInfixOf ".svg" str)
--------------------
statsAllFiles tmpdir resdir = do
setCurrentDirectory tmpdir
hout <- openFile (resdir++"/stats.txt") WriteMode
files <- getDirectoryContents resdir
let resfiles = sort $ map (resdir++) $ filter okfile files
sequence_ $ map (statsFile hout) $ resfiles
hClose hout
statsFile hout file = do
hPutStr hout $ "Statistics for: "++file++" ; "
eitherStats <- loadResults "performance" csv2stats file
let stats=right eitherStats
hPutStr hout $ "mean="++(show $ stats !! 0)++" ; "
hPutStr hout $ "stddev="++(show $ stats !! 1)++" ; "
hPutStrLn hout ""
-- main
main :: IO ()
main = do
plotAllFiles tmpdir resdir
statsAllFiles tmpdir resdir
where
tmpdir = "/home/user/proj/haskell-classification/tmp/"
resdir = "/home/user/proj/haskell-classification/results/"
-- resdir="/home/user/proj/haskell-classification/results/results-good/ringnorm-NB-0.01/" | mikeizbicki/Classification | src/Plots.hs | bsd-3-clause | 7,362 | 0 | 19 | 1,784 | 2,089 | 1,097 | 992 | 151 | 2 |
module Game.Monad(
AppMonad
, AppPeer
, AppNetworkBackend
, MonadApp
) where
import Game.GoreAndAsh
import Game.GoreAndAsh.Logging
import Game.GoreAndAsh.Network
import Game.GoreAndAsh.Network.Backend.TCP
import Game.GoreAndAsh.Sync
-- | Which network implementation to use
type AppNetworkBackend = TCPBackend
-- | Peer connection for application monad
type AppPeer = Peer AppNetworkBackend
-- | Application monad that is used for implementation of game API
type AppMonad a = SyncT Spider AppNetworkBackend (NetworkT Spider AppNetworkBackend (LoggingT Spider GMSpider))
-- | Shortcut for constraints
type MonadApp t m = (MonadGame t m, NetworkServer t AppNetworkBackend m, NetworkClient t AppNetworkBackend m, SyncMonad t AppNetworkBackend m)
| Teaspot-Studio/gore-and-ash-demo | src/server/Game/Monad.hs | bsd-3-clause | 760 | 0 | 9 | 111 | 148 | 89 | 59 | 14 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
module Codec.String.Base64
( bytes64
, ibytes64
, fillByte64
, encode64
, decode64
) where
import Prelude hiding ((.), id, (++), length)
import Control.Applicative hiding (empty)
import Control.Category
import Data.Array.IArray
import Data.Bits
import qualified Data.Map as M
import Data.Monoid
import Data.String.Class
import Data.Tagged
import Data.Word
bytes64 :: Array Word8 Word8
bytes64 = listArray (0, 0x3F) $
[0x41..0x5A]
++ [0x61..0x7A]
++ [0x30..0x39]
++ [0x2B, 0x2F]
where (++) = mappend
ibytes64 :: M.Map Word8 Word8
ibytes64 = M.fromList $ map (\ ~(a_, b_) -> (b_, a_)) . assocs $ bytes64
fillByte64 :: Word8
fillByte64 = 0x3D
encode64 :: forall s. (StringCells s) => s -> s
encode64 s
| (Just (a, b, c, s')) <- safeUncons3 s =
let a' = toWord8 a
b' = toWord8 b
c' = toWord8 c
a'' = base $ a' `shiftR` 2
b'' = base $ ((a' .&. 0x03) `shiftL` 4) .|. (b' `shiftR` 4)
c'' = base $ ((b' .&. 0x0F) `shiftL` 2) .|. (c' `shiftR` 6)
d'' = base $ c' .&. 0x3F
in cons4 a'' b'' c'' d'' $ encode64 s'
| 2 <- length s =
let ~(Just (a, b, _)) = safeUncons2 s
a' = toWord8 a
b' = toWord8 b
a'' = base $ a' `shiftR` 2
b'' = base $ ((a' .&. 0x03) `shiftL` 4) .|. (b' `shiftR` 4)
c'' = base $ (b' .&. 0x0F) `shiftL` 2
in cons4 a'' b'' c'' fillByte64' $ empty
| 1 <- length s =
let ~(Just (a, _)) = safeUncons s
a' = toWord8 a
a'' = base $ a' `shiftR` 2
b'' = base $ (a' .&. 0x03) `shiftL` 4
in cons4 a'' b'' fillByte64' fillByte64' $ empty
| otherwise =
empty
where base = untag' . toMainChar . (bytes64 !) . toWord8
fillByte64' = untag' . toMainChar $ fillByte64
untag' = untag :: Tagged s a -> a
decode64 :: forall s. (StringCells s) => s -> Maybe s
decode64 s
| (Just (a, b, c, d, s')) <- safeUncons4 s = do
let n x
| (toWord8 x) == fillByte64 = Just 0xFF -- no regular base 64 digit can match with 0xFF; use this so we know whether a byte is a fill byte
| (Just y) <- M.lookup (toWord8 x) ibytes64 = Just y
| otherwise = Nothing
a' <- n a
b' <- n b
c' <- n c
d' <- n d
if c' /= 0xFF
then do
-- c is not a fill byte; check d
if d' /= 0xFF
then do
-- abcd
cons3
(untag' . toMainChar $ (a' `shiftL` 2) .|. (b' `shiftR` 4))
(untag' . toMainChar $ (b' `shiftL` 4) .|. (c' `shiftR` 2))
(untag' . toMainChar $ (c' `shiftL` 6) .|. d')
<$> decode64 s'
else do
-- abc_
Just . cons2
(untag' . toMainChar $ (a' `shiftL` 2) .|. (b' `shiftR` 4))
(untag' . toMainChar $ (b' `shiftL` 4) .|. (c' `shiftR` 2))
$ empty
else do
do
do
-- ab__
Just . cons
(untag' . toMainChar $ (a' `shiftL` 2) .|. (b' `shiftR` 4))
$ empty
| otherwise =
Just empty
where untag' = untag :: Tagged s a -> a
| bairyn/bitmaps | src/Codec/String/Base64.hs | bsd-3-clause | 3,731 | 0 | 22 | 1,656 | 1,256 | 684 | 572 | 92 | 3 |
{-# LANGUAGE CPP #-}
import Control.Shell
import Data.Bits
import System.Info (os)
import Control.Monad
import System.Environment (getArgs)
import System.Exit
-- Packages will end up in ghc-$GHC_MAJOR.$GHC_MINOR. If the directory does
-- not exist, it is created. If the package already exists in that directory,
-- it is overwritten.
main = do
args <- fixAllArg `fmap` getArgs
when (null args) $ do
putStrLn $ "Usage: runghc build-release.hs [no-rebuild] formats\n"
putStrLn $ "Supported formats: deb, tarball, all\n"
putStrLn $ "no-rebuild\n Repackage whatever is already in the " ++
"_build directory\n instead of rebuilding from scratch."
exitFailure
when ("--debghcdeps" `elem` args) $ do
_ <- shell $ do
v <- init `fmap` run "ghc" ["--numeric-version"] ""
let (major, '.':rest) = break (== '.') v
(minor, _) = break (== '.') rest
intminor = read minor :: Int
minStr = major ++ "." ++ minor
maxStr = major ++ "." ++ show (intminor+1)
liftIO . putStr $ "ghc (>= " ++ minStr ++ "), ghc (<< " ++ maxStr ++ ")"
exitSuccess
res <- shell $ do
srcdir <- pwd
isdir <- isDirectory "_build"
when (isdir && not ("no-rebuild" `elem` args)) $ rmdir "_build"
mkdir True "_build"
inDirectory "_build" $ do
unless ("no-rebuild" `elem` args) $ do
run_ "git" ["clone", srcdir] ""
inDirectory "haste-compiler" $ do
(ver, ghcver) <- if ("no-rebuild" `elem` args)
then do
getVersions
else do
vers <- buildPortable
bootPortable
return vers
let (major, '.':rest) = break (== '.') ghcver
(minor, _) = break (== '.') rest
outdir = ".." </> ".." </> ("ghc-" ++ major ++ "." ++ minor)
mkdir True outdir
when ("tarball" `elem` args) $ do
tar <- buildBinaryTarball ver ghcver
mv tar (outdir </> tar)
when ("deb" `elem` args) $ do
deb <- buildDebianPackage srcdir ver ghcver
mv (".." </> deb) (outdir </> deb)
case res of
Left err -> error $ "FAILED: " ++ err
_ -> return ()
where
fixAllArg args | "all" `elem` args = "deb" : "tarball" : args
| otherwise = args
buildPortable = do
-- Build compiler
run_ "cabal" ["configure", "-f", "portable", "-f", "static"] ""
run_ "cabal" ["build"] ""
-- Strip symbols
case os of
"mingw32" -> do
-- windows
run_ "strip" ["-s", "haste-compiler\\bin\\haste-boot.exe"] ""
run_ "strip" ["-s", "haste-compiler\\bin\\haste-pkg.exe"] ""
run_ "strip" ["-s", "haste-compiler\\bin\\haste-inst.exe"] ""
run_ "strip" ["-s", "haste-compiler\\bin\\hastec.exe"] ""
"linux" -> do
-- linux
run_ "strip" ["-s", "haste-compiler/bin/haste-boot"] ""
run_ "strip" ["-s", "haste-compiler/bin/haste-pkg"] ""
run_ "strip" ["-s", "haste-compiler/bin/haste-inst"] ""
run_ "strip" ["-s", "haste-compiler/bin/hastec"] ""
_ -> do
-- darwin
run_ "strip" ["haste-compiler/bin/haste-boot"] ""
run_ "strip" ["haste-compiler/bin/haste-pkg"] ""
run_ "strip" ["haste-compiler/bin/haste-inst"] ""
run_ "strip" ["haste-compiler/bin/hastec"] ""
-- Get versions
getVersions
getVersions = do
ver <- fmap init $ run "haste-compiler/bin/hastec" ["--version"] ""
ghcver <- fmap init $ run "ghc" ["--numeric-version"] ""
return (ver, ghcver)
bootPortable = do
-- Build libs
run_ "haste-compiler/bin/haste-boot" ["--force", "--local"] ""
-- Remove unnecessary binaries
case os of
"mingw32" -> do
-- windows
rm "haste-compiler\\bin\\haste-copy-pkg.exe"
rm "haste-compiler\\bin\\haste-install-his.exe"
rm "haste-compiler\\bin\\haste-boot.exe"
_ -> do
-- linux/darwin
rm "haste-compiler/bin/haste-copy-pkg"
rm "haste-compiler/bin/haste-install-his"
rm "haste-compiler/bin/haste-boot"
forEachFile "haste-compiler" $ \f -> do
when ((f `hasExt` ".o") || (f `hasExt` ".a")) $ rm f
where
f `hasExt` e = takeExtension f == e
buildBinaryTarball ver ghcver = do
-- Get versions and create binary tarball
run_ "tar" ["-cjf", tarball, "haste-compiler"] ""
return tarball
where
tarball =
concat ["haste-compiler-",ver,"-ghc-",ghcver,"-",os,"-",arch,".tar.bz2"]
arch = if bits == 64 then "amd64" else "i686"
where
#if __GLASGOW_HASKELL__ >= 708
bits = finiteBitSize (0 :: Int)
#else
bits = bitSize (0 :: Int)
#endif
-- Debian packaging based on https://wiki.debian.org/IntroDebianPackaging.
-- Requires build-essential, devscripts and debhelper.
buildDebianPackage srcdir ver ghcver = do
run_ "debuild" ["-us", "-uc", "-b"] ""
return $ "haste-compiler_" ++ ver ++ "-1_" ++ arch ++ ".deb"
| joelburget/haste-compiler | build-release.hs | bsd-3-clause | 5,145 | 17 | 18 | 1,513 | 1,326 | 686 | 640 | 104 | 3 |
module Parser where
import TreeSimple
import Text.ParserCombinators.Parsec
symbol :: Parser Char
symbol = oneOf "#!$%&|+-/*:<=>?@^_~"
------------------------------------------------------------------
-----------------------------------------------------------------
doParse :: String -> TreeS
doParse x = case (parse parseFile "tree" x ) of
Left err -> (Leaf 0)
Right xs -> xs
{-
instance Read TreeS where
read = doParse
-}
parseFile :: Parser TreeS
parseFile = do
tree <- parseTree
anyToken
eof
return tree
parseTree :: Parser TreeS
parseTree = parseNode <|> parseLeaf
parseNode :: Parser TreeS
parseNode = do
try (char '(')
l<-parseTree
char ' '
r<-parseTree
char ')'
return (Node l r)
parseLeaf :: Parser TreeS
parseLeaf = do
num <- many1 digit
return (Leaf (read num))
| Benzi-Junior/TreeExample | src/Parser.hs | bsd-3-clause | 809 | 2 | 11 | 144 | 244 | 118 | 126 | 29 | 2 |
module Data.Deciparsec.Internal.Types
( ParserT(ParserT), ParserState (ParserState), Input(I), Added(A), More(Complete, Incomplete)
, Failure, Success, IResult(..), ParseError(ParseError), Message(Message, UnExpect, Expect), SourcePos(SourcePos)
, unI, runParserT_, psPos, psState, errMsgs, spName, spLine, spColumn, addS
) where
import Control.Applicative
import Control.Monad
import Data.Data
import Data.List
import Data.Monoid
import Control.Monad.Trans.Class
import Control.DeepSeq
data Message = SysUnExpect { msgStr :: !String }
| UnExpect { msgStr :: !String }
| Expect { msgStr :: !String }
| Message { msgStr :: !String }
deriving Show
data ParseError = ParseError { errPos :: !SourcePos, errMsgs :: [Message] }
-- |A representation of the current position of the parser.
data SourcePos = SourcePos { spName :: String -- ^The name of the source "file". Field accessor
, spLine :: !Int -- ^The current line. Field accessor
, spColumn :: !Int -- ^The current column. Field accessor
}
deriving (Eq, Ord, Data, Typeable)
-- |The result of a parse.
--
-- For the 'Functor' instance, only the final result in 'Done'
-- will be transformed (whether immediately available, or
-- eventually recieved from supplying more tokens to a 'Partial'
-- result).
data IResult s u m r = Fail s u ParseError
-- ^The parse failed. The @s@ parameter is the input that had not yet
-- been consumed when the failure occurred, the @u@ parameter is the
-- user state when the failure occurred, and the last is the error
-- describing the failure, including its position.
| Partial (s -> m (IResult s u m r))
-- ^A continuation that can be supplied with more input to continue
-- parsing. If no more input is available, provide an
-- 'Data.Deciparsec.Internal.TokenSeq.empty' sequence.
| Done s u r
-- ^The parse succeeded. The @s@ parameter is the input that had not
-- yet been consumed (if any) when the parse succeeded, the @u@
-- parameter is the final user state, and the @r@ parameter is the
-- result of the parser.
data ParserState u = ParserState { psPos :: !SourcePos, psState :: !u }
newtype Input s = I { unI :: s } deriving Monoid
newtype Added s = A { unA :: s } deriving Monoid
data More = Complete | Incomplete
deriving (Eq, Show)
type Failure s u m r = ParserState u -> Input s -> Added s -> More -> ParseError -> m (IResult s u m r)
type Success s u a m r = ParserState u -> Input s -> Added s -> More -> a -> m (IResult s u m r)
-- |The general parser transformer type.
newtype ParserT s u m a = ParserT { runParserT_ :: forall r. ParserState u -> Input s -> Added s -> More
-> Failure s u m r
-> Success s u a m r
-> m (IResult s u m r) }
fmapP :: (a -> b) -> ParserT s u m a -> ParserT s u m b
fmapP f p = ParserT $ \s0 i0 a0 m0 kf ks -> runParserT_ p s0 i0 a0 m0 kf $ \s1 i1 a1 m1 x -> ks s1 i1 a1 m1 (f x)
addS :: Monoid s =>
Input s -> Added s -> More
-> Input s -> Added s -> More
-> (Input s -> Added s -> More -> m r) -> m r
addS i0 a0 m0 _i0 a1 m1 f = let !i = i0 <> I (unA a1)
a = a0 <> a1
!m = m0 <> m1
in f i a m
{-# INLINE addS #-}
noAddS :: Monoid s =>
Input s -> Added s -> More
-> (Input s -> Added s -> More -> m r) -> m r
noAddS i0 _ m0 f = f i0 mempty m0
{-# INLINE noAddS #-}
plusP :: Monoid s => ParserT s u m r -> ParserT s u m r -> ParserT s u m r
plusP a b = ParserT $ \s0 i0 a0 m0 kf ks ->
let kf' _ i1 a1 m1 _ = addS i0 a0 m0 i1 a1 m1 $ \i2 a2 m2 -> runParserT_ b s0 i2 a2 m2 kf ks
ks' s1 i1 a1 m1 = ks s1 i1 (a0 <> a1) m1
in noAddS i0 a0 m0 $ \i2 a2 m2 -> runParserT_ a s0 i2 a2 m2 kf' ks'
{-# INLINE plusP #-}
returnP :: r -> ParserT s u m r
returnP r = ParserT $ \s i a m _ ks -> ks s i a m r
{-# INLINE returnP #-}
bindP :: ParserT s u m a -> (a -> ParserT s u m b) -> ParserT s u m b
bindP p k =
ParserT $ \s0 i0 a0 m0 kf ks -> runParserT_ p s0 i0 a0 m0 kf $ \s1 i1 a1 m1 x -> runParserT_ (k x) s1 i1 a1 m1 kf ks
{-# INLINE bindP #-}
failP :: String -> ParserT s u m r
failP err = ParserT $ \s i a m kf _ -> kf s i a m $ ParseError (psPos s) [Message err]
{-# INLINE failP #-}
instance NFData Message where
rnf _ = ()
{-# INLINE rnf #-}
instance NFData ParseError where
rnf (ParseError sp msgs) = rnf sp `seq` rnf msgs
{-# INLINE rnf #-}
instance (Show s, Show u, Show r) => Show (IResult s u m r) where
show (Fail s u pe) = "Fail " ++ show s ++ " " ++ show u ++ " " ++ show pe
show (Partial _ ) = "Partial _"
show (Done s u r ) = "Done " ++ show s ++ " " ++ show u ++ " " ++ show r
instance (NFData s, NFData u, NFData r) => NFData (IResult s u m r) where
rnf (Fail s u pe) = rnf s `seq` rnf u `seq` rnf pe
{-# INLINE rnf #-}
instance Functor m => Functor (IResult s u m) where
fmap f (Done s u r ) = Done s u $ f r
fmap f (Partial k ) = Partial (fmap (fmap f) . k)
fmap _ (Fail s u pe) = Fail s u pe
instance Monoid More where
mappend Complete _ = Complete
mappend _ m = m
{-# INLINE mappend #-}
mempty = Incomplete
{-# INLINE mempty #-}
instance Functor (ParserT s u m) where
fmap = fmapP
{-# INLINE fmap #-}
instance Applicative (ParserT s u m) where
pure = returnP
{-# INLINE pure #-}
(<*>) = ap
{-# INLINE (<*>) #-}
{-
# if MIN_VERSION_base(4,2,0)
(*>) = (>>)
{-# INLINE (*>) #-}
x <* y = x >>+ \a -> y >> return a
{-# INLINE (<*) #-}
# endif
-}
instance Monoid s => Alternative (ParserT s u m) where
empty = failP "empty"
{-# INLINE empty #-}
(<|>) = plusP
{-# INLINE (<|>) #-}
{-
#if MIN_VERSION_base(4,2,0)
many p = many_p
where many_p = some_p <|> pure []
some_p = (:) <$> p <*> many_p
{-# INLINE many #-}
some p = some_p
where many_p = some_p <|> pure []
some_p = (:) <$> p <*> many_p
{-# INLINE some #-}
#endif
-}
instance Monoid s => Monoid (ParserT s u m r) where
mempty = failP "mempty"
{-# INLINE mempty #-}
mappend = plusP
{-# INLINE mappend #-}
instance Monad (ParserT s u m) where
return = returnP
{-# INLINE return #-}
fail = failP
{-# INLINE fail #-}
(>>=) = bindP
{-# INLINE (>>=) #-}
instance Monoid s => MonadPlus (ParserT s u m) where
mzero = failP "mzero"
{-# INLINE mzero #-}
mplus = plusP
{-# INLINE mplus #-}
instance MonadTrans (ParserT s u) where
lift mt = ParserT $ \s i a m _ ks -> mt >>= ks s i a m
instance Eq Message where
m1 == m2 = msgToNum m1 == msgToNum m2
instance Ord Message where
compare m1 m2 = compare (msgToNum m1) (msgToNum m2)
instance Show ParseError where
show err = show (errPos err) ++ ":\n" ++ showErrMsgs (errMsgs err)
-- FIXME: Special case where line = -1 (ie, don't show the line for things like binary files)
instance Show SourcePos where
show (SourcePos name line column)
| null name = showLineColumn
| otherwise = "\"" ++ name ++ "\" " ++ showLineColumn
where showLineColumn = "(line " ++ show line ++ ", column " ++ show column ++ ")"
instance NFData SourcePos where
rnf (SourcePos name line column) = rnf name `seq` rnf line `seq` rnf column -- FIXME: Can probably drop the last two parts as they are strict
msgToNum :: Message -> Int
msgToNum (SysUnExpect _) = 0
msgToNum (UnExpect _) = 1
msgToNum (Expect _) = 2
msgToNum (Message _) = 3
{-# INLINE msgToNum #-}
showErrMsgs :: [Message] -> String
showErrMsgs [] = "unknown parse error"
showErrMsgs msgs0 = unlines $ clean $ [showSys, showUn, showExp, showMsg]
where (sys, msgs1) = span ((SysUnExpect "") ==) $ sort msgs0
(un , msgs2) = span ((UnExpect "") ==) msgs1
(ex , msgs3) = span ((Expect "") ==) msgs2
showMsg = showMany "" msgs3
showExp = showMany "expecting " ex
showUn = showMany "unexpected " un
showSys | not (null un) || null sys = ""
| null $ msgStr $ head sys = "unexpected end of input"
| otherwise = "unexpected " ++ msgStr (head sys)
showMany pre msgs =
case clean (map msgStr msgs) of
[] -> ""
ms -> pre ++ concOr ms
concOr [] = ""
concOr [m] = m
concOr ms = intercalate ", " (init ms) ++ " or " ++ last ms
clean = nub . filter (not . null)
| d3tucker/deciparsec | src/Data/Deciparsec/Internal/Types.hs | bsd-3-clause | 9,120 | 19 | 17 | 3,026 | 2,834 | 1,481 | 1,353 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE ViewPatterns #-}
module Yesod.Squealer.Routes where
import Data.Text (Text)
import Database.Squealer (Database)
import Yesod.Core (Yesod, renderRoute)
import Yesod.Core.Dispatch (mkYesodSubData, parseRoutes)
import Yesod.Core.Types (HandlerT)
import Database.PostgreSQL.Simple (Connection)
data Squealer
= Squealer
{ database ∷ Database
}
mkYesodSubData "Squealer" [parseRoutes|
/ DatabaseR OPTIONS GET
/#Text TableR OPTIONS GET
/#Text/column/#Text ColumnR OPTIONS GET
/#Text/rows RowsR OPTIONS GET POST
/#Text/version/#Text VersionR OPTIONS GET PUT DELETE
/#Text/version/#Text/predecessor PredecessorR OPTIONS GET
/#Text/version/#Text/successor SuccessorR OPTIONS GET
|]
class Yesod master ⇒ YesodSquealer master where
withConnection
∷ (Connection → HandlerT master IO a)
→ HandlerT master IO a
type SquealerHandler a
= ∀ master. YesodSquealer master
⇒ HandlerT Squealer (HandlerT master IO) a
| mgomezch/yesod-squealer | source/Yesod/Squealer/Routes.hs | bsd-3-clause | 1,372 | 0 | 10 | 394 | 190 | 111 | 79 | 25 | 0 |
{-# LANGUAGE PackageImports, BangPatterns #-}
module Data.Tree23.Tree23 (
Tree,
empty, singleton,
null, size,
insertWith,
delete, -- update,
member, lookup,
mapEntries, mapEntriesValues, mapEntriesKeysMonotonic,
minimum, maximum,
toList,
) where
import Prelude hiding (null, lookup, maximum, minimum)
import Data.Maybe as M
import Data.Ord
import qualified "dlist" Data.DList as D
import qualified Safe
import qualified Data.List as L
import Data.Tree23.Entry as E
data Tree k v = Nil | Branch2 (Tree k v) (Entry k v) (Tree k v)
| Branch3 (Tree k v) (Entry k v) (Tree k v) (Entry k v) (Tree k v) deriving (Eq, Show)
data Result k v = ResTree (Tree k v) | ResBranch4 (Tree k v) (Entry k v) (Tree k v) (Entry k v) (Tree k v) (Entry k v) (Tree k v)
empty :: Tree k v
empty = Nil
singleton :: k -> v -> Tree k v
singleton k v = Branch2 Nil (Entry k v Valid) Nil
null :: Tree k v -> Bool
null Nil = True
null _ = False
size :: Tree k v -> Int
size Nil = 0
size (Branch2 esq _ dreta) = 1 + size esq + size dreta
size (Branch3 esq _ mig _ dreta) = 2 + size esq + size mig + size dreta
-- insertWith: insert or update (strict to avoid O(n) stack pending ops when used with List.foldl').
insertWith :: Ord k => (v -> v -> v) -> (k, v) -> Tree k v -> Tree k v
insertWith f (k, v) Nil = singleton k v
insertWith f (k, v) !arb = case insertToRes f (Entry k v Valid) arb of
ResTree res -> res
ResBranch4 ch1 a ch2 b ch3 c ch4 -> Branch2 (Branch2 ch1 a ch2) b (Branch2 ch3 c ch4)
-- private: insertToRes entry with collision combine function f
insertToRes :: Ord k => (v -> v -> v) -> Entry k v -> Tree k v -> Result k v
insertToRes f x Nil = ResTree $ Branch2 Nil x Nil
insertToRes f x (Branch2 Nil y Nil)
| x == y = ResTree $ Branch2 Nil (combineEntry f y x) Nil
| x < y = ResTree $ Branch3 Nil x Nil y Nil
| otherwise = ResTree $ Branch3 Nil y Nil x Nil
insertToRes f x (Branch3 Nil y Nil z Nil)
| x == y = ResTree $ Branch3 Nil (combineEntry f y x) Nil z Nil
| x == z = ResTree $ Branch3 Nil y Nil (combineEntry f z x) Nil
| x < y = ResBranch4 Nil x Nil y Nil z Nil
| x < z = ResBranch4 Nil y Nil x Nil z Nil
| otherwise = ResBranch4 Nil y Nil z Nil x Nil
insertToRes f x (Branch2 esq y dreta)
| x == y = ResTree $ Branch2 esq (combineEntry f y x) dreta
| x < y = case insertToRes f x esq of
ResTree arb -> ResTree $ Branch2 arb y dreta
ResBranch4 ch1 a ch2 b ch3 c ch4 -> ResTree $ Branch3 (Branch2 ch1 a ch2) b (Branch2 ch3 c ch4) y dreta
| otherwise = case insertToRes f x dreta of
ResTree arb -> ResTree $ Branch2 esq y arb
ResBranch4 ch1 a ch2 b ch3 c ch4 -> ResTree $ Branch3 esq y (Branch2 ch1 a ch2) b (Branch2 ch3 c ch4)
insertToRes f x (Branch3 esq y mig z dreta)
| x == y = ResTree $ Branch3 esq (combineEntry f y x) mig z dreta
| x == z = ResTree $ Branch3 esq y mig (combineEntry f z x) dreta
| x < y = case insertToRes f x esq of
ResTree arb -> ResTree $ Branch3 arb y mig z dreta
ResBranch4 ch1 a ch2 b ch3 c ch4 -> ResBranch4 (Branch2 ch1 a ch2) b (Branch2 ch3 c ch4) y mig z dreta
| x < z = case insertToRes f x mig of
ResTree arb -> ResTree $ Branch3 esq y arb z dreta
ResBranch4 ch1 a ch2 b ch3 c ch4 -> ResBranch4 esq y (Branch2 ch1 a ch2) b (Branch2 ch3 c ch4) z dreta
| otherwise = case insertToRes f x dreta of
ResTree arb -> ResTree $ Branch3 esq y mig z arb
ResBranch4 ch1 a ch2 b ch3 c ch4 -> ResBranch4 esq y mig z (Branch2 ch1 a ch2) b (Branch2 ch3 c ch4)
{-
update :: Ord k => Entry k v -> Tree k v -> Tree k v
update _ Nil = Nil
update x ar @ (Branch2 Nil y Nil)
| x == y = Branch2 Nil x Nil
| otherwise = ar
update x ar @ (Branch3 Nil y Nil z Nil)
| x == y = Branch3 Nil x Nil z Nil
| x == z = Branch3 Nil y Nil x Nil
| otherwise = ar
update x ar @ (Branch2 esq y dreta)
| x == y = Branch2 esq x dreta
| x < y = Branch2 (update x esq) y dreta
| otherwise = Branch2 esq y (update x dreta)
update x ar @ (Branch3 esq y mig z dreta)
| x == y = Branch3 esq x mig z dreta
| x == z = Branch3 esq y mig x dreta
| x < y = Branch3 (update x esq) y mig z dreta
| x < z = Branch3 esq y (update x mig) z dreta
| otherwise = Branch3 esq y mig z (update x dreta)
-}
lookup :: Ord k => k -> Tree k v -> Maybe (k, v)
lookup k Nil = Nothing
lookup k (Branch2 esq y dreta)
| k == key y = E.toMaybe y
| k < key y = lookup k esq
| otherwise = lookup k dreta
lookup k (Branch3 esq y mig z dreta)
| k == key y = E.toMaybe y
| k == key z = E.toMaybe z
| k < key y = lookup k esq
| k < key z = lookup k mig
| otherwise = lookup k dreta
member :: Ord k => k -> Tree k v -> Bool
member k t = isJust $ lookup k t
delete :: Ord k => k -> Tree k v -> Tree k v
delete _ Nil = Nil
delete k !ar @ (Branch2 esq y dreta)
| k == key y = Branch2 esq (invalidate y) dreta
| k < key y = Branch2 (delete k esq) y dreta
| otherwise = Branch2 esq y (delete k dreta)
delete k !ar @ (Branch3 esq y mig z dreta)
| k == key y = Branch3 esq (invalidate y) mig z dreta
| k == key z = Branch3 esq y mig (invalidate z) dreta
| k < key y = Branch3 (delete k esq) y mig z dreta
| k < key z = Branch3 esq y (delete k mig) z dreta
| otherwise = Branch3 esq y mig z (delete k dreta)
toDList :: Tree k v -> D.DList (k, v)
toDList Nil = D.empty
toDList (Branch2 esq x dreta) = toDList esq `D.append` valToDList x `D.append` toDList dreta
toDList (Branch3 esq x mig y dreta) = toDList esq `D.append` valToDList x `D.append` toDList mig `D.append` valToDList y `D.append` toDList dreta
toList :: Tree k v -> [(k, v)]
toList = D.toList . toDList
mapEntriesValues :: (Entry k v1 -> Entry k v2) -> Tree k v1 -> Tree k v2
mapEntriesValues f Nil = Nil
mapEntriesValues f (Branch2 esq x dreta) = Branch2 (mapEntriesValues f esq) (f x) (mapEntriesValues f dreta)
mapEntriesValues f (Branch3 esq x mig y dreta) = Branch3 (mapEntriesValues f esq) (f x) (mapEntriesValues f mig) (f y) (mapEntriesValues f dreta)
mapEntriesKeysMonotonic :: (Ord k1, Ord k2) => (Entry k1 v -> Entry k2 v) -> Tree k1 v -> Tree k2 v
mapEntriesKeysMonotonic f Nil = Nil
mapEntriesKeysMonotonic f (Branch2 esq x dreta) = Branch2 (mapEntriesKeysMonotonic f esq) (f x) (mapEntriesKeysMonotonic f dreta)
mapEntriesKeysMonotonic f (Branch3 esq x mig y dreta) = Branch3 (mapEntriesKeysMonotonic f esq) (f x) (mapEntriesKeysMonotonic f mig) (f y) (mapEntriesKeysMonotonic f dreta)
mapEntries :: (Entry k v -> Entry k v) -> Tree k v -> Tree k v
mapEntries f Nil = Nil
mapEntries f (Branch2 esq x dreta) = Branch2 (mapEntries f esq) (f x) (mapEntries f dreta)
mapEntries f (Branch3 esq x mig y dreta) = Branch3 (mapEntries f esq) (f x) (mapEntries f mig) (f y) (mapEntries f dreta)
minimum, maximum :: Ord k => Tree k v -> Maybe (k, v)
minimum Nil = Nothing
minimum (Branch2 esq x dreta) = firstOfMaybes [(minimum esq), E.toMaybe x, (minimum dreta)]
minimum (Branch3 esq x mig y dreta) = firstOfMaybes [(minimum esq), E.toMaybe x, (minimum mig), E.toMaybe y, (minimum dreta)]
maximum Nil = Nothing
maximum (Branch2 esq x dreta) = firstOfMaybes [(maximum dreta), E.toMaybe x, (maximum esq)]
maximum (Branch3 esq x mig y dreta) = firstOfMaybes [(maximum dreta), E.toMaybe y, (maximum mig), E.toMaybe x, (maximum esq)]
-- private
firstOfMaybes :: [Maybe a] -> Maybe a
firstOfMaybes xs = Safe.headDef Nothing (L.dropWhile isNothing xs)
------------------------------------------------------------------------------------
{-
foldMapKey :: Monoid m => (k -> m) -> Tree k v -> m
foldMapKey f Nil = mempty
-- foldMap elements from the right
foldMapKey f (Branch2 l e1 r) = foldMapKey f r <> foldEntryKey f e1 <> foldMapKey f l
foldMapKey f (Branch3 l e1 mid e2 r) = foldMapKey f r <> foldEntryKey f e2 <> foldMapKey f mid <> foldEntryKey f e1 <> foldMapKey f l
foldMapVal :: Monoid m => (v -> m) -> Tree k v -> m
foldMapVal f Nil = mempty
-- foldMap elements from the right
foldMapVal f (Branch2 l e1 r) = foldMapVal f r <> foldEntryVal f e1 <> foldMapVal f l
foldMapVal f (Branch3 l e1 mid e2 r) = foldMapVal f r <> foldEntryVal f e2 <> foldMapVal f mid <> foldEntryVal f e1 <> foldMapVal f l
-} | griba2001/tree23-map-set | src/Data/Tree23/Tree23.hs | bsd-3-clause | 8,635 | 7 | 13 | 2,398 | 3,317 | 1,641 | 1,676 | 122 | 6 |
{-# LANGUAGE PatternGuards #-}
-- | A Ninja style environment, equivalent to a non-empty list of mutable hash tables.
module Development.Ninja.Env(
Env, newEnv, scopeEnv, addEnv, askEnv, fromEnv
) where
import qualified Data.HashMap.Strict as Map
import Data.Hashable
import Data.IORef
data Env k v = Env (IORef (Map.HashMap k v)) (Maybe (Env k v))
instance Show (Env k v) where show _ = "Env"
newEnv :: IO (Env k v)
newEnv = do ref <- newIORef Map.empty; pure $ Env ref Nothing
scopeEnv :: Env k v -> IO (Env k v)
scopeEnv e = do ref <- newIORef Map.empty; pure $ Env ref $ Just e
addEnv :: (Eq k, Hashable k) => Env k v -> k -> v -> IO ()
addEnv (Env ref _) k v = modifyIORef ref $ Map.insert k v
askEnv :: (Eq k, Hashable k) => Env k v -> k -> IO (Maybe v)
askEnv (Env ref e) k = do
mp <- readIORef ref
case Map.lookup k mp of
Just v -> pure $ Just v
Nothing | Just e <- e -> askEnv e k
_ -> pure Nothing
fromEnv :: Env k v -> IO (Map.HashMap k v)
fromEnv (Env ref _) = readIORef ref
| ndmitchell/shake | src/Development/Ninja/Env.hs | bsd-3-clause | 1,041 | 0 | 14 | 259 | 479 | 239 | 240 | 23 | 3 |
-- Problem 44: Pentagon Numbers
--
-- https://projecteuler.net/problem=44
--
-- Pentagonal numbers are generated by the formula, Pn=n(3n−1)/2. The first ten pentagonal numbers are:
--
-- 1, 5, 12, 22, 35, 51, 70, 92, 117, 145, ...
--
-- It can be seen that P4 + P7 = 22 + 70 = 92 = P8. However, their difference, 70 − 22 = 48, is not pentagonal.
--
-- Find the pair of pentagonal numbers, Pj and Pk, for which their sum and difference are pentagonal and D = |Pk − Pj| is minimised;
-- what is the value of D?
import Data.List
pentagon :: Integer -> Integer
pentagon n = n * (3 * n - 1) `div` 2
natural :: Double -> Bool
natural n = floor n == ceiling n
pentagonal :: Integer -> Bool
pentagonal n = natural $ (sqrt (24 * (fromIntegral n) + 1) + 1) / 6
minDiffs :: [Integer] -> [Integer]
minDiffs xs = sort [b - a | a <- xs, b <- xs, a < b, pentagonal (b + a), pentagonal (b - a)]
main = putStrLn $ show $ minDiffs [pentagon n | n <- [1..10000]]
| moddy3d/euler | p44/p44.hs | mit | 957 | 1 | 14 | 203 | 261 | 139 | 122 | 10 | 1 |
--
-- Chapter 9.
--
module C'9 where
import Test.QuickCheck
import E'9'15
import E'9'14
import E'9'13
import E'9'12
import E'9'11
import E'9'10
import E'9''9
import E'9''8
import E'9''7
import E'9''6
import E'9''5
import E'9''4
import E'9''3
import E'9''2
import E'9''1 | pascal-knodel/haskell-craft | _/links/C'9.hs | mit | 276 | 0 | 4 | 46 | 57 | 39 | 18 | 17 | 0 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
-- |
-- Copyright : (c) 2011, 2012 Benedikt Schmidt & Simon Meier
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Simon Meier <[email protected]>
-- Portability : GHC only
--
-- Facts used to formulate and reason about protocol execution.
module Theory.Model.Fact (
-- * Fact
Fact(..)
, Multiplicity(..)
, FactTag(..)
, FactAnnotation(..)
, matchFact
, normFact
-- ** Queries
, isLinearFact
, isPersistentFact
, isProtoFact
, factTagName
, showFactTag
, showFactTagArity
, factTagArity
, factTagMultiplicity
, factArity
, factMultiplicity
, getLeftFact
, getRightFact
, getFactVariables
, getFactTerms
, getFactAnnotations
, isTrivialFact
, isSolveFirstFact
, isSolveLastFact
, isNoSourcesFact
, DirTag(..)
, kuFact
, kuFactAnn
, kdFact
, kdFactAnn
, termFact
, kFactView
, dedFactView
, isKFact
, isKUFact
, isKDFact
, isKDXorFact
, convertKUtoKD
, convertKDtoKU
-- ** Construction
, freshFact
, outFact
, inFact
, inFactAnn
, kLogFact
, dedLogFact
, protoFact
, protoFactAnn
, annotateFact
-- * NFact
, NFact
-- * LFact
, LFact
, LNFact
, unifyLNFactEqs
, unifiableLNFacts
-- * Pretty-Printing
, prettyFact
, prettyNFact
, prettyLNFact
) where
-- import Control.Basics
import Control.DeepSeq
import Control.Monad.Reader
import GHC.Generics (Generic)
import Data.Binary
-- import Data.Foldable (Foldable(..))
import Data.Data
import Data.Maybe (isJust)
-- import Data.Monoid
-- import Data.Traversable (Traversable(..))
import Data.List (isPrefixOf)
import qualified Data.Set as S
import Term.Unification
import Term.Rewriting.Norm
import Text.PrettyPrint.Class
------------------------------------------------------------------------------
-- Fact
------------------------------------------------------------------------------
data Multiplicity = Persistent | Linear
deriving( Eq, Ord, Show, Typeable, Data, Generic, NFData, Binary )
-- | Fact tags/symbols
data FactTag = ProtoFact Multiplicity String Int
-- ^ A protocol fact together with its arity and multiplicity.
| FreshFact -- ^ Freshly generated value.
| OutFact -- ^ Sent by the protocol
| InFact -- ^ Officially known by the intruder/network.
| KUFact -- ^ Up-knowledge fact in messsage deduction.
| KDFact -- ^ Down-knowledge fact in message deduction.
| DedFact -- ^ Log-fact denoting that the intruder deduced
-- a message using a construction rule.
| TermFact -- ^ internal fact, only used to convert terms to facts
-- to simplify computations. should never occur in a graph.
deriving( Eq, Ord, Show, Typeable, Data, Generic, NFData, Binary )
-- | Annotations are properties thhat might be used elsewhere (e.g. in
-- dot rendering, or for sorting by heuristics) but do not affect
-- the semantics of the fact
data FactAnnotation = SolveFirst | SolveLast | NoSources
deriving( Eq, Ord, Show, Typeable, Data, Generic, NFData, Binary )
-- | Facts.
data Fact t = Fact
{ factTag :: FactTag
, factAnnotations :: S.Set FactAnnotation
, factTerms :: [t]
}
deriving( Show, Typeable, Data, Generic, NFData, Binary )
-- Instances
------------
-- Ignore annotations in equality and ord testing
instance Eq t => Eq (Fact t) where
(==) (Fact tag _ ts) (Fact tag' _ ts') = (tag == tag') && (ts == ts')
instance Ord t => Ord (Fact t) where
compare (Fact tag _ ts) (Fact tag' _ ts') = compare tag tag' <> compare ts ts'
instance Functor Fact where
fmap f (Fact tag an ts) = Fact tag an (fmap f ts)
instance Foldable Fact where
foldMap f (Fact _ _ ts) = foldMap f ts
instance Traversable Fact where
sequenceA (Fact tag an ts) = Fact tag an <$> sequenceA ts
traverse f (Fact tag an ts) = Fact tag an <$> traverse f ts
instance Sized t => Sized (Fact t) where
size (Fact _ _ args) = size args
instance HasFrees t => HasFrees (Fact t) where
foldFrees f = foldMap (foldFrees f)
foldFreesOcc f c fa = foldFreesOcc f ((show $ factTag fa):c) (factTerms fa)
mapFrees f = traverse (mapFrees f)
instance Apply t => Apply (Fact t) where
apply subst = fmap (apply subst)
-- KU and KD facts
------------------
-- | A direction tag
data DirTag = UpK | DnK
deriving( Eq, Ord, Show )
kdFact, kuFact, termFact :: t -> Fact t
kdFact = Fact KDFact S.empty . return
kuFact = Fact KUFact S.empty . return
termFact = Fact TermFact S.empty . return
-- | Make annotated KU/KD facts
kdFactAnn, kuFactAnn :: S.Set FactAnnotation -> t -> Fact t
kdFactAnn ann = Fact KDFact ann . return
kuFactAnn ann = Fact KUFact ann . return
-- | View a message-deduction fact.
kFactView :: LNFact -> Maybe (DirTag, LNTerm)
kFactView fa = case fa of
Fact KUFact _ [m] -> Just (UpK, m)
Fact KUFact _ _ -> errMalformed "kFactView" fa
Fact KDFact _ [m] -> Just (DnK, m)
Fact KDFact _ _ -> errMalformed "kFactView" fa
_ -> Nothing
-- | View a deduction logging fact.
dedFactView :: LNFact -> Maybe LNTerm
dedFactView fa = case fa of
Fact DedFact _ [m] -> Just m
Fact DedFact _ _ -> errMalformed "dedFactView" fa
_ -> Nothing
-- | True if the fact is a message-deduction fact.
isKFact :: LNFact -> Bool
isKFact = isJust . kFactView
-- | True if the fact is a KU-fact.
isKUFact :: LNFact -> Bool
isKUFact (Fact KUFact _ _) = True
isKUFact _ = False
-- | True if the fact is a KD-fact.
isKDFact :: LNFact -> Bool
isKDFact (Fact KDFact _ _) = True
isKDFact _ = False
-- | True if the fact is a KD-fact concerning an Xor Term.
isKDXorFact :: LNFact -> Bool
isKDXorFact (Fact KDFact _ [m]) = isXor m
isKDXorFact _ = False
-- | converts a KU-Fact into a KD-Fact with the same terms
convertKUtoKD :: LNFact -> LNFact
convertKUtoKD (Fact KUFact a m) = (Fact KDFact a m)
convertKUtoKD f = f
-- | converts a KD-Fact into a KU-Fact with the same terms
convertKDtoKU :: LNFact -> LNFact
convertKDtoKU (Fact KDFact a m) = (Fact KUFact a m)
convertKDtoKU f = f
-- | Mark a fact as malformed.
errMalformed :: String -> LNFact -> a
errMalformed caller fa =
error $ caller ++ show ": malformed fact: " ++ show fa
-- Constructing facts
---------------------
-- | A fact denoting a message sent by the protocol to the intruder.
outFact :: t -> Fact t
outFact = Fact OutFact S.empty . return
-- | A fresh fact denotes a fresh unguessable name.
freshFact :: t -> Fact t
freshFact = Fact FreshFact S.empty . return
-- | A fact denoting that the intruder sent a message to the protocol.
inFact :: t -> Fact t
inFact = Fact InFact S.empty . return
-- | An annotated fact denoting that the intruder sent a message to the protocol.
inFactAnn :: S.Set FactAnnotation -> t -> Fact t
inFactAnn an = Fact InFact an . return
-- | A fact logging that the intruder knows a message.
kLogFact :: t -> Fact t
kLogFact = protoFact Linear "K" . return
-- | A fact logging that the intruder deduced a message using a construction
-- rule. We use this to formulate invariants over normal dependency graphs.
dedLogFact :: t -> Fact t
dedLogFact = Fact DedFact S.empty . return
-- | A protocol fact denotes a fact generated by a protocol rule.
protoFact :: Multiplicity -> String -> [t] -> Fact t
protoFact multi name ts = Fact (ProtoFact multi name (length ts)) S.empty ts
-- | An annotated fact denoting a fact generated by a protocol rule.
protoFactAnn :: Multiplicity -> String -> S.Set FactAnnotation -> [t] -> Fact t
protoFactAnn multi name an ts = Fact (ProtoFact multi name (length ts)) an ts
-- | Add annotations to an existing fact
annotateFact :: S.Set FactAnnotation -> Fact t -> Fact t
annotateFact ann' (Fact tag ann ts) = Fact tag (S.union ann' ann) ts
-- Queries on facts
-------------------
-- | True iff the fact is a non-special protocol fact.
isProtoFact :: Fact t -> Bool
isProtoFact (Fact (ProtoFact _ _ _) _ _) = True
isProtoFact _ = False
-- | True if the fact is a linear fact.
isLinearFact :: Fact t -> Bool
isLinearFact = (Linear ==) . factMultiplicity
-- | True if the fact is a persistent fact.
isPersistentFact :: Fact t -> Bool
isPersistentFact = (Persistent ==) . factMultiplicity
-- | The multiplicity of a 'FactTag'.
factTagMultiplicity :: FactTag -> Multiplicity
factTagMultiplicity tag = case tag of
ProtoFact multi _ _ -> multi
KUFact -> Persistent
KDFact -> Persistent
_ -> Linear
-- | The arity of a 'FactTag'.
factTagArity :: FactTag -> Int
factTagArity tag = case tag of
ProtoFact _ _ k -> k
KUFact -> 1
KDFact -> 1
DedFact -> 1
FreshFact -> 1
InFact -> 1
OutFact -> 1
TermFact -> 1
-- | The arity of a 'Fact'.
factArity :: Fact t -> Int
factArity (Fact tag _ ts)
| length ts == k = k
| otherwise = error $ "factArity: tag of arity " ++ show k ++
" applied to " ++ show (length ts) ++ " terms"
where
k = factTagArity tag
-- | The multiplicity of a 'Fact'.
factMultiplicity :: Fact t -> Multiplicity
factMultiplicity = factTagMultiplicity . factTag
-- | The terms of a 'Fact'.
getFactTerms :: Fact t -> [t]
getFactTerms (Fact _ _ ts) = ts
-- | Get the set of fact annotations
getFactAnnotations :: Fact t -> S.Set FactAnnotation
getFactAnnotations (Fact _ ann _) = ann
-- | Whether the fact has been marked as 'solve first' for the heuristic
isSolveFirstFact :: Fact t -> Bool
isSolveFirstFact (Fact tag ann _) = SolveFirst `S.member` ann || (isPrefixOf "F_" $ factTagName tag)
-- | Whether the fact has been marked as 'solve last' for the heuristic
isSolveLastFact :: Fact t -> Bool
isSolveLastFact (Fact tag ann _) = SolveLast `S.member` ann || (isPrefixOf "L_" $ factTagName tag)
-- | Whether the fact should not have its source solved
isNoSourcesFact :: Fact t -> Bool
isNoSourcesFact (Fact _ ann _) = NoSources `S.member` ann
------------------------------------------------------------------------------
-- NFact
------------------------------------------------------------------------------
-- | Facts with literals containing names and arbitrary variables.
type NFact v = Fact (NTerm v)
------------------------------------------------------------------------------
-- LFact
------------------------------------------------------------------------------
-- | Facts with literals arbitrary constants and logical variables.
type LFact c = Fact (LTerm c)
-- | Facts used for proving; i.e. variables fixed to logical variables
-- and constant fixed to names.
type LNFact = Fact LNTerm
-- | Unify a list of @LFact@ equalities.
unifyLNFactEqs :: [Equal LNFact] -> WithMaude [LNSubstVFresh]
unifyLNFactEqs eqs
| all (evalEqual . fmap factTag) eqs =
unifyLNTerm (map (fmap (fAppList . factTerms)) eqs)
| otherwise = return []
-- | 'True' iff the two facts are unifiable.
unifiableLNFacts :: LNFact -> LNFact -> WithMaude Bool
unifiableLNFacts fa1 fa2 = (not . null) <$> unifyLNFactEqs [Equal fa1 fa2]
-- | Normalize all terms in the fact
normFact :: LNFact -> WithMaude LNFact
normFact (Fact h an ts) = reader $ \hnd -> (Fact h an (map (\term -> runReader (norm' term) hnd) ts))
-- | @matchLFact t p@ is a complete set of AC matchers for the term fact @t@
-- and the pattern fact @p@.
matchFact :: Fact t -- ^ Term
-> Fact t -- ^ Pattern
-> Match t
matchFact t p =
matchOnlyIf (factTag t == factTag p &&
length (factTerms t) == length (factTerms p))
<> mconcat (zipWith matchWith (factTerms t) (factTerms p))
-- | Get "left" variant of a diff fact
getLeftFact :: LNFact -> LNFact
getLeftFact (Fact tag an ts) =
(Fact tag an (map getLeftTerm ts))
-- | Get "left" variant of a diff fact
getRightFact :: LNFact -> LNFact
getRightFact (Fact tag an ts) =
(Fact tag an (map getRightTerm ts))
-- | Get all variables inside a fact
getFactVariables :: LNFact -> [LVar]
getFactVariables (Fact _ _ ts) =
map fst $ varOccurences ts
-- | If all the fact terms are simple and different msg variables (i.e., not fresh or public), returns the list of all these variables. Otherwise returns Nothing. [This could be relaxed to work for all variables (including fresh and public) if Facts were typed, so that an argument would always have to be fresh or public or general.]
isTrivialFact :: LNFact -> Maybe [LVar]
isTrivialFact (Fact _ _ ts) = case ts of
[] -> Just []
x:xs -> Prelude.foldl combine (getMsgVar x) (map getMsgVar xs)
where
combine :: Maybe [LVar] -> Maybe [LVar] -> Maybe [LVar]
combine Nothing _ = Nothing
combine (Just _ ) Nothing = Nothing
combine (Just l1) (Just l2) = if noDuplicates l1 l2 then (Just (l1++l2)) else Nothing
noDuplicates l1 l2 = S.null (S.intersection (S.fromList l1) (S.fromList l2))
------------------------------------------------------------------------------
-- Pretty Printing
------------------------------------------------------------------------------
-- | The name of a fact tag, e.g., @factTagName KUFact = "KU"@.
factTagName :: FactTag -> String
factTagName tag = case tag of
KUFact -> "KU"
KDFact -> "KD"
DedFact -> "Ded"
InFact -> "In"
OutFact -> "Out"
FreshFact -> "Fr"
(ProtoFact _ n _) -> n
TermFact -> "Term"
-- | Show a fact tag as a 'String'. This is the 'factTagName' prefixed with
-- the multiplicity.
showFactTag :: FactTag -> String
showFactTag tag =
(++ factTagName tag) $ case factTagMultiplicity tag of
Linear -> ""
Persistent -> "!"
-- | Show a fact tag together with its aritiy.
showFactTagArity :: FactTag -> String
showFactTagArity tag = showFactTag tag ++ "/" ++ show (factTagArity tag)
-- | Show fact annotation
showFactAnnotation :: FactAnnotation -> String
showFactAnnotation an = case an of
SolveFirst -> "+"
SolveLast -> "-"
NoSources -> "no_precomp"
-- | Pretty print a fact.
prettyFact :: Document d => (t -> d) -> Fact t -> d
prettyFact ppTerm (Fact tag an ts)
| factTagArity tag /= length ts = ppFact ("MALFORMED-" ++ show tag) ts <> ppAnn an
| otherwise = ppFact (showFactTag tag) ts <> ppAnn an
where
ppFact n t = nestShort' (n ++ "(") ")" . fsep . punctuate comma $ map ppTerm t
ppAnn ann = if S.null ann then text "" else
brackets . fsep . punctuate comma $ map (text . showFactAnnotation) $ S.toList ann
-- | Pretty print a 'NFact'.
prettyNFact :: Document d => LNFact -> d
prettyNFact = prettyFact prettyNTerm
-- | Pretty print a 'LFact'.
prettyLNFact :: Document d => LNFact -> d
prettyLNFact fa = prettyFact prettyNTerm fa
| kmilner/tamarin-prover | lib/theory/src/Theory/Model/Fact.hs | gpl-3.0 | 15,498 | 0 | 15 | 3,886 | 3,782 | 1,997 | 1,785 | 281 | 8 |
--- * -*- outline-regexp:"--- \\*"; -*-
--- ** doc
-- In Emacs, use TAB on lines beginning with "-- *" to collapse/expand sections.
{-|
A reader for CSV data, using an extra rules file to help interpret the data.
-}
-- Lots of haddocks in this file are for non-exported types.
-- Here's a command that will render them:
-- stack haddock hledger-lib --fast --no-haddock-deps --haddock-arguments='--ignore-all-exports' --open
--- ** language
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
--- ** exports
module Hledger.Read.CsvReader (
-- * Reader
reader,
-- * Misc.
CSV, CsvRecord, CsvValue,
csvFileFor,
rulesFileFor,
parseRulesFile,
printCSV,
-- * Tests
tests_CsvReader,
)
where
--- ** imports
import Control.Applicative (liftA2)
import Control.Exception (IOException, handle, throw)
import Control.Monad (unless, when)
import Control.Monad.Except (ExceptT, throwError)
import qualified Control.Monad.Fail as Fail
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.State.Strict (StateT, get, modify', evalStateT)
import Control.Monad.Trans.Class (lift)
import Data.Char (toLower, isDigit, isSpace, isAlphaNum, ord)
import Data.Bifunctor (first)
import Data.List (elemIndex, foldl', intersperse, mapAccumL, nub, sortBy)
import Data.Maybe (catMaybes, fromMaybe, isJust)
import Data.MemoUgly (memo)
import Data.Ord (comparing)
import qualified Data.Set as S
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.IO as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Builder as TB
import Data.Time.Calendar (Day)
import Data.Time.Format (parseTimeM, defaultTimeLocale)
import Safe (atMay, headMay, lastMay, readDef, readMay)
import System.Directory (doesFileExist)
import System.FilePath ((</>), takeDirectory, takeExtension, takeFileName)
import qualified Data.Csv as Cassava
import qualified Data.Csv.Parser.Megaparsec as CassavaMP
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import Data.Foldable (asum, toList)
import Text.Megaparsec hiding (match, parse)
import Text.Megaparsec.Char (char, newline, string)
import Text.Megaparsec.Custom (customErrorBundlePretty, parseErrorAt)
import Text.Printf (printf)
import Hledger.Data
import Hledger.Utils
import Hledger.Read.Common (aliasesFromOpts, Reader(..), InputOpts(..), amountp, statusp, journalFinalise )
--- ** doctest setup
-- $setup
-- >>> :set -XOverloadedStrings
--- ** some types
type CSV = [CsvRecord]
type CsvRecord = [CsvValue]
type CsvValue = Text
--- ** reader
reader :: MonadIO m => Reader m
reader = Reader
{rFormat = "csv"
,rExtensions = ["csv","tsv","ssv"]
,rReadFn = parse
,rParser = error' "sorry, CSV files can't be included yet" -- PARTIAL:
}
-- | Parse and post-process a "Journal" from CSV data, or give an error.
-- Does not check balance assertions.
-- XXX currently ignores the provided data, reads it from the file path instead.
parse :: InputOpts -> FilePath -> Text -> ExceptT String IO Journal
parse iopts f t = do
let rulesfile = mrules_file_ iopts
r <- liftIO $ readJournalFromCsv rulesfile f t
case r of Left e -> throwError e
Right pj ->
-- journalFinalise assumes the journal's items are
-- reversed, as produced by JournalReader's parser.
-- But here they are already properly ordered. So we'd
-- better preemptively reverse them once more. XXX inefficient
let pj' = journalReverse pj
-- apply any command line account aliases. Can fail with a bad replacement pattern.
in case journalApplyAliases (aliasesFromOpts iopts) pj' of
Left e -> throwError e
Right pj'' -> journalFinalise iopts{balancingopts_=(balancingopts_ iopts){ignore_assertions_=True}} f t pj''
--- ** reading rules files
--- *** rules utilities
-- Not used by hledger; just for lib users,
-- | An pure-exception-throwing IO action that parses this file's content
-- as CSV conversion rules, interpolating any included files first,
-- and runs some extra validation checks.
parseRulesFile :: FilePath -> ExceptT String IO CsvRules
parseRulesFile f =
liftIO (readFilePortably f >>= expandIncludes (takeDirectory f))
>>= either throwError return . parseAndValidateCsvRules f
-- | Given a CSV file path, what would normally be the corresponding rules file ?
rulesFileFor :: FilePath -> FilePath
rulesFileFor = (++ ".rules")
-- | Given a CSV rules file path, what would normally be the corresponding CSV file ?
csvFileFor :: FilePath -> FilePath
csvFileFor = reverse . drop 6 . reverse
defaultRulesText :: FilePath -> Text
defaultRulesText csvfile = T.pack $ unlines
["# hledger csv conversion rules for " ++ csvFileFor (takeFileName csvfile)
,"# cf http://hledger.org/manual#csv-files"
,""
,"account1 assets:bank:checking"
,""
,"fields date, description, amount1"
,""
,"#skip 1"
,"#newest-first"
,""
,"#date-format %-d/%-m/%Y"
,"#date-format %-m/%-d/%Y"
,"#date-format %Y-%h-%d"
,""
,"#currency $"
,""
,"if ITUNES"
," account2 expenses:entertainment"
,""
,"if (TO|FROM) SAVINGS"
," account2 assets:bank:savings\n"
]
addDirective :: (DirectiveName, Text) -> CsvRulesParsed -> CsvRulesParsed
addDirective d r = r{rdirectives=d:rdirectives r}
addAssignment :: (HledgerFieldName, FieldTemplate) -> CsvRulesParsed -> CsvRulesParsed
addAssignment a r = r{rassignments=a:rassignments r}
setIndexesAndAssignmentsFromList :: [CsvFieldName] -> CsvRulesParsed -> CsvRulesParsed
setIndexesAndAssignmentsFromList fs = addAssignmentsFromList fs . setCsvFieldIndexesFromList fs
setCsvFieldIndexesFromList :: [CsvFieldName] -> CsvRulesParsed -> CsvRulesParsed
setCsvFieldIndexesFromList fs r = r{rcsvfieldindexes=zip fs [1..]}
addAssignmentsFromList :: [CsvFieldName] -> CsvRulesParsed -> CsvRulesParsed
addAssignmentsFromList fs r = foldl' maybeAddAssignment r journalfieldnames
where
maybeAddAssignment rules f = (maybe id addAssignmentFromIndex $ elemIndex f fs) rules
where
addAssignmentFromIndex i = addAssignment (f, T.pack $ '%':show (i+1))
addConditionalBlock :: ConditionalBlock -> CsvRulesParsed -> CsvRulesParsed
addConditionalBlock b r = r{rconditionalblocks=b:rconditionalblocks r}
addConditionalBlocks :: [ConditionalBlock] -> CsvRulesParsed -> CsvRulesParsed
addConditionalBlocks bs r = r{rconditionalblocks=bs++rconditionalblocks r}
getDirective :: DirectiveName -> CsvRules -> Maybe FieldTemplate
getDirective directivename = lookup directivename . rdirectives
instance ShowErrorComponent String where
showErrorComponent = id
-- | Inline all files referenced by include directives in this hledger CSV rules text, recursively.
-- Included file paths may be relative to the directory of the provided file path.
-- This is done as a pre-parse step to simplify the CSV rules parser.
expandIncludes :: FilePath -> Text -> IO Text
expandIncludes dir content = mapM (expandLine dir) (T.lines content) >>= return . T.unlines
where
expandLine dir line =
case line of
(T.stripPrefix "include " -> Just f) -> expandIncludes dir' =<< T.readFile f'
where
f' = dir </> T.unpack (T.dropWhile isSpace f)
dir' = takeDirectory f'
_ -> return line
-- | An error-throwing IO action that parses this text as CSV conversion rules
-- and runs some extra validation checks. The file path is used in error messages.
parseAndValidateCsvRules :: FilePath -> T.Text -> Either String CsvRules
parseAndValidateCsvRules rulesfile s =
case parseCsvRules rulesfile s of
Left err -> Left $ customErrorBundlePretty err
Right rules -> first makeFancyParseError $ validateRules rules
where
makeFancyParseError :: String -> String
makeFancyParseError errorString =
parseErrorPretty (FancyError 0 (S.singleton $ ErrorFail errorString) :: ParseError Text String)
-- | Parse this text as CSV conversion rules. The file path is for error messages.
parseCsvRules :: FilePath -> T.Text -> Either (ParseErrorBundle T.Text CustomErr) CsvRules
-- parseCsvRules rulesfile s = runParser csvrulesfile nullrules{baseAccount=takeBaseName rulesfile} rulesfile s
parseCsvRules = runParser (evalStateT rulesp defrules)
-- | Return the validated rules, or an error.
validateRules :: CsvRules -> Either String CsvRules
validateRules rules = do
unless (isAssigned "date") $ Left "Please specify (at top level) the date field. Eg: date %1\n"
Right rules
where
isAssigned f = isJust $ getEffectiveAssignment rules [] f
--- *** rules types
-- | A set of data definitions and account-matching patterns sufficient to
-- convert a particular CSV data file into meaningful journal transactions.
data CsvRules' a = CsvRules' {
rdirectives :: [(DirectiveName,Text)],
-- ^ top-level rules, as (keyword, value) pairs
rcsvfieldindexes :: [(CsvFieldName, CsvFieldIndex)],
-- ^ csv field names and their column number, if declared by a fields list
rassignments :: [(HledgerFieldName, FieldTemplate)],
-- ^ top-level assignments to hledger fields, as (field name, value template) pairs
rconditionalblocks :: [ConditionalBlock],
-- ^ conditional blocks, which containing additional assignments/rules to apply to matched csv records
rblocksassigning :: a -- (String -> [ConditionalBlock])
-- ^ all conditional blocks which can potentially assign field with a given name (memoized)
}
-- | Type used by parsers. Directives, assignments and conditional blocks
-- are in the reverse order compared to what is in the file and rblocksassigning is non-functional,
-- could not be used for processing CSV records yet
type CsvRulesParsed = CsvRules' ()
-- | Type used after parsing is done. Directives, assignments and conditional blocks
-- are in the same order as they were in the unput file and rblocksassigning is functional.
-- Ready to be used for CSV record processing
type CsvRules = CsvRules' (Text -> [ConditionalBlock])
instance Eq CsvRules where
r1 == r2 = (rdirectives r1, rcsvfieldindexes r1, rassignments r1) ==
(rdirectives r2, rcsvfieldindexes r2, rassignments r2)
-- Custom Show instance used for debug output: omit the rblocksassigning field, which isn't showable.
instance Show CsvRules where
show r = "CsvRules { rdirectives = " ++ show (rdirectives r) ++
", rcsvfieldindexes = " ++ show (rcsvfieldindexes r) ++
", rassignments = " ++ show (rassignments r) ++
", rconditionalblocks = " ++ show (rconditionalblocks r) ++
" }"
type CsvRulesParser a = StateT CsvRulesParsed SimpleTextParser a
-- | The keyword of a CSV rule - "fields", "skip", "if", etc.
type DirectiveName = Text
-- | CSV field name.
type CsvFieldName = Text
-- | 1-based CSV column number.
type CsvFieldIndex = Int
-- | Percent symbol followed by a CSV field name or column number. Eg: %date, %1.
type CsvFieldReference = Text
-- | One of the standard hledger fields or pseudo-fields that can be assigned to.
-- Eg date, account1, amount, amount1-in, date-format.
type HledgerFieldName = Text
-- | A text value to be assigned to a hledger field, possibly
-- containing csv field references to be interpolated.
type FieldTemplate = Text
-- | A strptime date parsing pattern, as supported by Data.Time.Format.
type DateFormat = Text
-- | A prefix for a matcher test, either & or none (implicit or).
data MatcherPrefix = And | None
deriving (Show, Eq)
-- | A single test for matching a CSV record, in one way or another.
data Matcher =
RecordMatcher MatcherPrefix Regexp -- ^ match if this regexp matches the overall CSV record
| FieldMatcher MatcherPrefix CsvFieldReference Regexp -- ^ match if this regexp matches the referenced CSV field's value
deriving (Show, Eq)
-- | A conditional block: a set of CSV record matchers, and a sequence
-- of rules which will be enabled only if one or more of the matchers
-- succeeds.
--
-- Three types of rule are allowed inside conditional blocks: field
-- assignments, skip, end. (A skip or end rule is stored as if it was
-- a field assignment, and executed in validateCsv. XXX)
data ConditionalBlock = CB {
cbMatchers :: [Matcher]
,cbAssignments :: [(HledgerFieldName, FieldTemplate)]
} deriving (Show, Eq)
defrules :: CsvRulesParsed
defrules = CsvRules' {
rdirectives=[],
rcsvfieldindexes=[],
rassignments=[],
rconditionalblocks=[],
rblocksassigning = ()
}
-- | Create CsvRules from the content parsed out of the rules file
mkrules :: CsvRulesParsed -> CsvRules
mkrules rules =
let conditionalblocks = reverse $ rconditionalblocks rules
maybeMemo = if length conditionalblocks >= 15 then memo else id
in
CsvRules' {
rdirectives=reverse $ rdirectives rules,
rcsvfieldindexes=rcsvfieldindexes rules,
rassignments=reverse $ rassignments rules,
rconditionalblocks=conditionalblocks,
rblocksassigning = maybeMemo (\f -> filter (any ((==f).fst) . cbAssignments) conditionalblocks)
}
matcherPrefix :: Matcher -> MatcherPrefix
matcherPrefix (RecordMatcher prefix _) = prefix
matcherPrefix (FieldMatcher prefix _ _) = prefix
-- | Group matchers into associative pairs based on prefix, e.g.:
-- A
-- & B
-- C
-- D
-- & E
-- => [[A, B], [C], [D, E]]
groupedMatchers :: [Matcher] -> [[Matcher]]
groupedMatchers [] = []
groupedMatchers (x:xs) = (x:ys) : groupedMatchers zs
where (ys, zs) = span (\y -> matcherPrefix y == And) xs
--- *** rules parsers
{-
Grammar for the CSV conversion rules, more or less:
RULES: RULE*
RULE: ( FIELD-LIST | FIELD-ASSIGNMENT | CONDITIONAL-BLOCK | SKIP | NEWEST-FIRST | DATE-FORMAT | DECIMAL-MARK | COMMENT | BLANK ) NEWLINE
FIELD-LIST: fields SPACE FIELD-NAME ( SPACE? , SPACE? FIELD-NAME )*
FIELD-NAME: QUOTED-FIELD-NAME | BARE-FIELD-NAME
QUOTED-FIELD-NAME: " (any CHAR except double-quote)+ "
BARE-FIELD-NAME: any CHAR except space, tab, #, ;
FIELD-ASSIGNMENT: JOURNAL-FIELD ASSIGNMENT-SEPARATOR FIELD-VALUE
JOURNAL-FIELD: date | date2 | status | code | description | comment | account1 | account2 | amount | JOURNAL-PSEUDO-FIELD
JOURNAL-PSEUDO-FIELD: amount-in | amount-out | currency
ASSIGNMENT-SEPARATOR: SPACE | ( : SPACE? )
FIELD-VALUE: VALUE (possibly containing CSV-FIELD-REFERENCEs)
CSV-FIELD-REFERENCE: % CSV-FIELD
CSV-FIELD: ( FIELD-NAME | FIELD-NUMBER ) (corresponding to a CSV field)
FIELD-NUMBER: DIGIT+
CONDITIONAL-BLOCK: if ( FIELD-MATCHER NEWLINE )+ INDENTED-BLOCK
FIELD-MATCHER: ( CSV-FIELD-NAME SPACE? )? ( MATCHOP SPACE? )? PATTERNS
MATCHOP: ~
PATTERNS: ( NEWLINE REGEXP )* REGEXP
INDENTED-BLOCK: ( SPACE ( FIELD-ASSIGNMENT | COMMENT ) NEWLINE )+
REGEXP: ( NONSPACE CHAR* ) SPACE?
VALUE: SPACE? ( CHAR* ) SPACE?
COMMENT: SPACE? COMMENT-CHAR VALUE
COMMENT-CHAR: # | ;
NONSPACE: any CHAR not a SPACE-CHAR
BLANK: SPACE?
SPACE: SPACE-CHAR+
SPACE-CHAR: space | tab
CHAR: any character except newline
DIGIT: 0-9
-}
rulesp :: CsvRulesParser CsvRules
rulesp = do
_ <- many $ choice
[blankorcommentlinep <?> "blank or comment line"
,(directivep >>= modify' . addDirective) <?> "directive"
,(fieldnamelistp >>= modify' . setIndexesAndAssignmentsFromList) <?> "field name list"
,(fieldassignmentp >>= modify' . addAssignment) <?> "field assignment"
-- conditionalblockp backtracks because it shares "if" prefix with conditionaltablep.
,try (conditionalblockp >>= modify' . addConditionalBlock) <?> "conditional block"
-- 'reverse' is there to ensure that conditions are added in the order they listed in the file
,(conditionaltablep >>= modify' . addConditionalBlocks . reverse) <?> "conditional table"
]
eof
mkrules <$> get
blankorcommentlinep :: CsvRulesParser ()
blankorcommentlinep = lift (dbgparse 8 "trying blankorcommentlinep") >> choiceInState [blanklinep, commentlinep]
blanklinep :: CsvRulesParser ()
blanklinep = lift skipNonNewlineSpaces >> newline >> return () <?> "blank line"
commentlinep :: CsvRulesParser ()
commentlinep = lift skipNonNewlineSpaces >> commentcharp >> lift restofline >> return () <?> "comment line"
commentcharp :: CsvRulesParser Char
commentcharp = oneOf (";#*" :: [Char])
directivep :: CsvRulesParser (DirectiveName, Text)
directivep = (do
lift $ dbgparse 8 "trying directive"
d <- choiceInState $ map (lift . string) directives
v <- (((char ':' >> lift (many spacenonewline)) <|> lift (some spacenonewline)) >> directivevalp)
<|> (optional (char ':') >> lift skipNonNewlineSpaces >> lift eolof >> return "")
return (d, v)
) <?> "directive"
directives :: [Text]
directives =
["date-format"
,"decimal-mark"
,"separator"
-- ,"default-account"
-- ,"default-currency"
,"skip"
,"newest-first"
, "balance-type"
]
directivevalp :: CsvRulesParser Text
directivevalp = T.pack <$> anySingle `manyTill` lift eolof
fieldnamelistp :: CsvRulesParser [CsvFieldName]
fieldnamelistp = (do
lift $ dbgparse 8 "trying fieldnamelist"
string "fields"
optional $ char ':'
lift skipNonNewlineSpaces1
let separator = lift skipNonNewlineSpaces >> char ',' >> lift skipNonNewlineSpaces
f <- fromMaybe "" <$> optional fieldnamep
fs <- some $ (separator >> fromMaybe "" <$> optional fieldnamep)
lift restofline
return . map T.toLower $ f:fs
) <?> "field name list"
fieldnamep :: CsvRulesParser Text
fieldnamep = quotedfieldnamep <|> barefieldnamep
quotedfieldnamep :: CsvRulesParser Text
quotedfieldnamep =
char '"' *> takeWhile1P Nothing (`notElem` ("\"\n:;#~" :: [Char])) <* char '"'
barefieldnamep :: CsvRulesParser Text
barefieldnamep = takeWhile1P Nothing (`notElem` (" \t\n,;#~" :: [Char]))
fieldassignmentp :: CsvRulesParser (HledgerFieldName, FieldTemplate)
fieldassignmentp = do
lift $ dbgparse 8 "trying fieldassignmentp"
f <- journalfieldnamep
v <- choiceInState [ assignmentseparatorp >> fieldvalp
, lift eolof >> return ""
]
return (f,v)
<?> "field assignment"
journalfieldnamep :: CsvRulesParser Text
journalfieldnamep = do
lift (dbgparse 8 "trying journalfieldnamep")
choiceInState $ map (lift . string) journalfieldnames
maxpostings = 99
-- Transaction fields and pseudo fields for CSV conversion.
-- Names must precede any other name they contain, for the parser
-- (amount-in before amount; date2 before date). TODO: fix
journalfieldnames =
concat [[ "account" <> i
,"amount" <> i <> "-in"
,"amount" <> i <> "-out"
,"amount" <> i
,"balance" <> i
,"comment" <> i
,"currency" <> i
] | x <- [maxpostings, (maxpostings-1)..1], let i = T.pack $ show x]
++
["amount-in"
,"amount-out"
,"amount"
,"balance"
,"code"
,"comment"
,"currency"
,"date2"
,"date"
,"description"
,"status"
,"skip" -- skip and end are not really fields, but we list it here to allow conditional rules that skip records
,"end"
]
assignmentseparatorp :: CsvRulesParser ()
assignmentseparatorp = do
lift $ dbgparse 8 "trying assignmentseparatorp"
_ <- choiceInState [ lift skipNonNewlineSpaces >> char ':' >> lift skipNonNewlineSpaces
, lift skipNonNewlineSpaces1
]
return ()
fieldvalp :: CsvRulesParser Text
fieldvalp = do
lift $ dbgparse 8 "trying fieldvalp"
T.pack <$> anySingle `manyTill` lift eolof
-- A conditional block: one or more matchers, one per line, followed by one or more indented rules.
conditionalblockp :: CsvRulesParser ConditionalBlock
conditionalblockp = do
lift $ dbgparse 8 "trying conditionalblockp"
-- "if\nMATCHER" or "if \nMATCHER" or "if MATCHER"
start <- getOffset
string "if" >> ( (newline >> return Nothing)
<|> (lift skipNonNewlineSpaces1 >> optional newline))
ms <- some matcherp
as <- catMaybes <$>
many (lift skipNonNewlineSpaces1 >>
choice [ lift eolof >> return Nothing
, fmap Just fieldassignmentp
])
when (null as) $
customFailure $ parseErrorAt start $ "start of conditional block found, but no assignment rules afterward\n(assignment rules in a conditional block should be indented)\n"
return $ CB{cbMatchers=ms, cbAssignments=as}
<?> "conditional block"
-- A conditional table: "if" followed by separator, followed by some field names,
-- followed by many lines, each of which has:
-- one matchers, followed by field assignments (as many as there were fields)
conditionaltablep :: CsvRulesParser [ConditionalBlock]
conditionaltablep = do
lift $ dbgparse 8 "trying conditionaltablep"
start <- getOffset
string "if"
sep <- lift $ satisfy (\c -> not (isAlphaNum c || isSpace c))
fields <- journalfieldnamep `sepBy1` (char sep)
newline
body <- flip manyTill (lift eolof) $ do
off <- getOffset
m <- matcherp' (char sep >> return ())
vs <- T.split (==sep) . T.pack <$> lift restofline
if (length vs /= length fields)
then customFailure $ parseErrorAt off $ ((printf "line of conditional table should have %d values, but this one has only %d\n" (length fields) (length vs)) :: String)
else return (m,vs)
when (null body) $
customFailure $ parseErrorAt start $ "start of conditional table found, but no assignment rules afterward\n"
return $ flip map body $ \(m,vs) ->
CB{cbMatchers=[m], cbAssignments=zip fields vs}
<?> "conditional table"
-- A single matcher, on one line.
matcherp' :: CsvRulesParser () -> CsvRulesParser Matcher
matcherp' end = try (fieldmatcherp end) <|> recordmatcherp end
matcherp :: CsvRulesParser Matcher
matcherp = matcherp' (lift eolof)
-- A single whole-record matcher.
-- A pattern on the whole line, not beginning with a csv field reference.
recordmatcherp :: CsvRulesParser () -> CsvRulesParser Matcher
recordmatcherp end = do
lift $ dbgparse 8 "trying recordmatcherp"
-- pos <- currentPos
-- _ <- optional (matchoperatorp >> lift skipNonNewlineSpaces >> optional newline)
p <- matcherprefixp
r <- regexp end
return $ RecordMatcher p r
-- when (null ps) $
-- Fail.fail "start of record matcher found, but no patterns afterward\n(patterns should not be indented)\n"
<?> "record matcher"
-- | A single matcher for a specific field. A csv field reference
-- (like %date or %1), and a pattern on the rest of the line,
-- optionally space-separated. Eg:
-- %description chez jacques
fieldmatcherp :: CsvRulesParser () -> CsvRulesParser Matcher
fieldmatcherp end = do
lift $ dbgparse 8 "trying fieldmatcher"
-- An optional fieldname (default: "all")
-- f <- fromMaybe "all" `fmap` (optional $ do
-- f' <- fieldnamep
-- lift skipNonNewlineSpaces
-- return f')
p <- matcherprefixp
f <- csvfieldreferencep <* lift skipNonNewlineSpaces
-- optional operator.. just ~ (case insensitive infix regex) for now
-- _op <- fromMaybe "~" <$> optional matchoperatorp
lift skipNonNewlineSpaces
r <- regexp end
return $ FieldMatcher p f r
<?> "field matcher"
matcherprefixp :: CsvRulesParser MatcherPrefix
matcherprefixp = do
lift $ dbgparse 8 "trying matcherprefixp"
(char '&' >> lift skipNonNewlineSpaces >> return And) <|> return None
csvfieldreferencep :: CsvRulesParser CsvFieldReference
csvfieldreferencep = do
lift $ dbgparse 8 "trying csvfieldreferencep"
char '%'
T.cons '%' . textQuoteIfNeeded <$> fieldnamep
-- A single regular expression
regexp :: CsvRulesParser () -> CsvRulesParser Regexp
regexp end = do
lift $ dbgparse 8 "trying regexp"
-- notFollowedBy matchoperatorp
c <- lift nonspace
cs <- anySingle `manyTill` end
case toRegexCI . T.strip . T.pack $ c:cs of
Left x -> Fail.fail $ "CSV parser: " ++ x
Right x -> return x
-- -- A match operator, indicating the type of match to perform.
-- -- Currently just ~ meaning case insensitive infix regex match.
-- matchoperatorp :: CsvRulesParser String
-- matchoperatorp = fmap T.unpack $ choiceInState $ map string
-- ["~"
-- -- ,"!~"
-- -- ,"="
-- -- ,"!="
-- ]
--- ** reading csv files
-- | Read a Journal from the given CSV data (and filename, used for error
-- messages), or return an error. Proceed as follows:
--
-- 1. parse CSV conversion rules from the specified rules file, or from
-- the default rules file for the specified CSV file, if it exists,
-- or throw a parse error; if it doesn't exist, use built-in default rules
--
-- 2. parse the CSV data, or throw a parse error
--
-- 3. convert the CSV records to transactions using the rules
--
-- 4. if the rules file didn't exist, create it with the default rules and filename
--
-- 5. return the transactions as a Journal
--
readJournalFromCsv :: Maybe FilePath -> FilePath -> Text -> IO (Either String Journal)
readJournalFromCsv Nothing "-" _ = return $ Left "please use --rules-file when reading CSV from stdin"
readJournalFromCsv mrulesfile csvfile csvdata =
handle (\(e::IOException) -> return $ Left $ show e) $ do
-- make and throw an IO exception.. which we catch and convert to an Either above ?
let throwerr = throw . userError
-- parse the csv rules
let rulesfile = fromMaybe (rulesFileFor csvfile) mrulesfile
rulesfileexists <- doesFileExist rulesfile
rulestext <-
if rulesfileexists
then do
dbg6IO "using conversion rules file" rulesfile
readFilePortably rulesfile >>= expandIncludes (takeDirectory rulesfile)
else
return $ defaultRulesText rulesfile
rules <- either throwerr return $ parseAndValidateCsvRules rulesfile rulestext
dbg6IO "csv rules" rules
-- parse the skip directive's value, if any
let skiplines = case getDirective "skip" rules of
Nothing -> 0
Just "" -> 1
Just s -> readDef (throwerr $ "could not parse skip value: " ++ show s) $ T.unpack s
-- parse csv
let
-- parsec seems to fail if you pass it "-" here TODO: try again with megaparsec
parsecfilename = if csvfile == "-" then "(stdin)" else csvfile
separator =
case getDirective "separator" rules >>= parseSeparator of
Just c -> c
_ | ext == "ssv" -> ';'
_ | ext == "tsv" -> '\t'
_ -> ','
where
ext = map toLower $ drop 1 $ takeExtension csvfile
dbg6IO "using separator" separator
records <- (either throwerr id .
dbg7 "validateCsv" . validateCsv rules skiplines .
dbg7 "parseCsv")
`fmap` parseCsv separator parsecfilename csvdata
dbg6IO "first 3 csv records" $ take 3 records
-- identify header lines
-- let (headerlines, datalines) = identifyHeaderLines records
-- mfieldnames = lastMay headerlines
let
-- convert CSV records to transactions, saving the CSV line numbers for error positions
txns = dbg7 "csv txns" $ snd $ mapAccumL
(\pos r ->
let
SourcePos name line col = pos
line' = (mkPos . (+1) . unPos) line
pos' = SourcePos name line' col
in
(pos', transactionFromCsvRecord pos rules r)
)
(initialPos parsecfilename) records
-- Ensure transactions are ordered chronologically.
-- First, if the CSV records seem to be most-recent-first (because
-- there's an explicit "newest-first" directive, or there's more
-- than one date and the first date is more recent than the last):
-- reverse them to get same-date transactions ordered chronologically.
txns' =
(if newestfirst || mdataseemsnewestfirst == Just True
then dbg7 "reversed csv txns" . reverse else id)
txns
where
newestfirst = dbg6 "newestfirst" $ isJust $ getDirective "newest-first" rules
mdataseemsnewestfirst = dbg6 "mdataseemsnewestfirst" $
case nub $ map tdate txns of
ds | length ds > 1 -> Just $ head ds > last ds
_ -> Nothing
-- Second, sort by date.
txns'' = dbg7 "date-sorted csv txns" $ sortBy (comparing tdate) txns'
when (not rulesfileexists) $ do
dbg1IO "creating conversion rules file" rulesfile
T.writeFile rulesfile rulestext
return $ Right nulljournal{jtxns=txns''}
-- | Parse special separator names TAB and SPACE, or return the first
-- character. Return Nothing on empty string
parseSeparator :: Text -> Maybe Char
parseSeparator = specials . T.toLower
where specials "space" = Just ' '
specials "tab" = Just '\t'
specials xs = fst <$> T.uncons xs
parseCsv :: Char -> FilePath -> Text -> IO (Either String CSV)
parseCsv separator filePath csvdata =
case filePath of
"-" -> parseCassava separator "(stdin)" <$> T.getContents
_ -> return $ if T.null csvdata then Right mempty else parseCassava separator filePath csvdata
parseCassava :: Char -> FilePath -> Text -> Either String CSV
parseCassava separator path content =
either (Left . errorBundlePretty) (Right . parseResultToCsv) <$>
CassavaMP.decodeWith (decodeOptions separator) Cassava.NoHeader path $
BL.fromStrict $ T.encodeUtf8 content
decodeOptions :: Char -> Cassava.DecodeOptions
decodeOptions separator = Cassava.defaultDecodeOptions {
Cassava.decDelimiter = fromIntegral (ord separator)
}
parseResultToCsv :: (Foldable t, Functor t) => t (t B.ByteString) -> CSV
parseResultToCsv = toListList . unpackFields
where
toListList = toList . fmap toList
unpackFields = (fmap . fmap) T.decodeUtf8
printCSV :: CSV -> TL.Text
printCSV = TB.toLazyText . unlinesB . map printRecord
where printRecord = foldMap TB.fromText . intersperse "," . map printField
printField = wrap "\"" "\"" . T.replace "\"" "\"\""
-- | Return the cleaned up and validated CSV data (can be empty), or an error.
validateCsv :: CsvRules -> Int -> Either String CSV -> Either String [CsvRecord]
validateCsv _ _ (Left err) = Left err
validateCsv rules numhdrlines (Right rs) = validate $ applyConditionalSkips $ drop numhdrlines $ filternulls rs
where
filternulls = filter (/=[""])
skipCount r =
case (getEffectiveAssignment rules r "end", getEffectiveAssignment rules r "skip") of
(Nothing, Nothing) -> Nothing
(Just _, _) -> Just maxBound
(Nothing, Just "") -> Just 1
(Nothing, Just x) -> Just (read $ T.unpack x)
applyConditionalSkips [] = []
applyConditionalSkips (r:rest) =
case skipCount r of
Nothing -> r:(applyConditionalSkips rest)
Just cnt -> applyConditionalSkips (drop (cnt-1) rest)
validate [] = Right []
validate rs@(_first:_) = case lessthan2 of
Just r -> Left $ printf "CSV record %s has less than two fields" (show r)
Nothing -> Right rs
where
lessthan2 = headMay $ filter ((<2).length) rs
-- -- | The highest (0-based) field index referenced in the field
-- -- definitions, or -1 if no fields are defined.
-- maxFieldIndex :: CsvRules -> Int
-- maxFieldIndex r = maximumDef (-1) $ catMaybes [
-- dateField r
-- ,statusField r
-- ,codeField r
-- ,amountField r
-- ,amountInField r
-- ,amountOutField r
-- ,currencyField r
-- ,accountField r
-- ,account2Field r
-- ,date2Field r
-- ]
--- ** converting csv records to transactions
showRules rules record =
T.unlines $ catMaybes [ (("the "<>fld<>" rule is: ")<>) <$> getEffectiveAssignment rules record fld | fld <- journalfieldnames]
-- | Look up the value (template) of a csv rule by rule keyword.
csvRule :: CsvRules -> DirectiveName -> Maybe FieldTemplate
csvRule rules = (`getDirective` rules)
-- | Look up the value template assigned to a hledger field by field
-- list/field assignment rules, taking into account the current record and
-- conditional rules.
hledgerField :: CsvRules -> CsvRecord -> HledgerFieldName -> Maybe FieldTemplate
hledgerField = getEffectiveAssignment
-- | Look up the final value assigned to a hledger field, with csv field
-- references interpolated.
hledgerFieldValue :: CsvRules -> CsvRecord -> HledgerFieldName -> Maybe Text
hledgerFieldValue rules record = fmap (renderTemplate rules record) . hledgerField rules record
transactionFromCsvRecord :: SourcePos -> CsvRules -> CsvRecord -> Transaction
transactionFromCsvRecord sourcepos rules record = t
where
----------------------------------------------------------------------
-- 1. Define some helpers:
rule = csvRule rules :: DirectiveName -> Maybe FieldTemplate
-- ruleval = csvRuleValue rules record :: DirectiveName -> Maybe String
field = hledgerField rules record :: HledgerFieldName -> Maybe FieldTemplate
fieldval = hledgerFieldValue rules record :: HledgerFieldName -> Maybe Text
parsedate = parseDateWithCustomOrDefaultFormats (rule "date-format")
mkdateerror datefield datevalue mdateformat = T.unpack $ T.unlines
["error: could not parse \""<>datevalue<>"\" as a date using date format "
<>maybe "\"YYYY/M/D\", \"YYYY-M-D\" or \"YYYY.M.D\"" (T.pack . show) mdateformat
,showRecord record
,"the "<>datefield<>" rule is: "<>(fromMaybe "required, but missing" $ field datefield)
,"the date-format is: "<>fromMaybe "unspecified" mdateformat
,"you may need to "
<>"change your "<>datefield<>" rule, "
<>maybe "add a" (const "change your") mdateformat<>" date-format rule, "
<>"or "<>maybe "add a" (const "change your") mskip<>" skip rule"
,"for m/d/y or d/m/y dates, use date-format %-m/%-d/%Y or date-format %-d/%-m/%Y"
]
where
mskip = rule "skip"
----------------------------------------------------------------------
-- 2. Gather values needed for the transaction itself, by evaluating the
-- field assignment rules using the CSV record's data, and parsing a bit
-- more where needed (dates, status).
mdateformat = rule "date-format"
date = fromMaybe "" $ fieldval "date"
-- PARTIAL:
date' = fromMaybe (error' $ mkdateerror "date" date mdateformat) $ parsedate date
mdate2 = fieldval "date2"
mdate2' = maybe Nothing (maybe (error' $ mkdateerror "date2" (fromMaybe "" mdate2) mdateformat) Just . parsedate) mdate2
status =
case fieldval "status" of
Nothing -> Unmarked
Just s -> either statuserror id $ runParser (statusp <* eof) "" s
where
statuserror err = error' . T.unpack $ T.unlines
["error: could not parse \""<>s<>"\" as a cleared status (should be *, ! or empty)"
,"the parse error is: "<>T.pack (customErrorBundlePretty err)
]
code = maybe "" singleline $ fieldval "code"
description = maybe "" singleline $ fieldval "description"
comment = maybe "" unescapeNewlines $ fieldval "comment"
precomment = maybe "" unescapeNewlines $ fieldval "precomment"
singleline = T.unwords . filter (not . T.null) . map T.strip . T.lines
unescapeNewlines = T.intercalate "\n" . T.splitOn "\\n"
----------------------------------------------------------------------
-- 3. Generate the postings for which an account has been assigned
-- (possibly indirectly due to an amount or balance assignment)
p1IsVirtual = (accountNamePostingType <$> fieldval "account1") == Just VirtualPosting
ps = [p | n <- [1..maxpostings]
,let comment = maybe "" unescapeNewlines $ fieldval ("comment"<> T.pack (show n))
,let currency = fromMaybe "" (fieldval ("currency"<> T.pack (show n)) <|> fieldval "currency")
,let mamount = getAmount rules record currency p1IsVirtual n
,let mbalance = getBalance rules record currency n
,Just (acct,isfinal) <- [getAccount rules record mamount mbalance n] -- skips Nothings
,let acct' | not isfinal && acct==unknownExpenseAccount &&
fromMaybe False (mamount >>= isNegativeMixedAmount) = unknownIncomeAccount
| otherwise = acct
,let p = nullposting{paccount = accountNameWithoutPostingType acct'
,pamount = fromMaybe missingmixedamt mamount
,ptransaction = Just t
,pbalanceassertion = mkBalanceAssertion rules record <$> mbalance
,pcomment = comment
,ptype = accountNamePostingType acct
}
]
----------------------------------------------------------------------
-- 4. Build the transaction (and name it, so the postings can reference it).
t = nulltransaction{
tsourcepos = (sourcepos, sourcepos) -- the CSV line number
,tdate = date'
,tdate2 = mdate2'
,tstatus = status
,tcode = code
,tdescription = description
,tcomment = comment
,tprecedingcomment = precomment
,tpostings = ps
}
-- | Figure out the amount specified for posting N, if any.
-- A currency symbol to prepend to the amount, if any, is provided,
-- and whether posting 1 requires balancing or not.
-- This looks for a non-empty amount value assigned to "amountN", "amountN-in", or "amountN-out".
-- For postings 1 or 2 it also looks at "amount", "amount-in", "amount-out".
-- If more than one of these has a value, it looks for one that is non-zero.
-- If there's multiple non-zeros, or no non-zeros but multiple zeros, it throws an error.
getAmount :: CsvRules -> CsvRecord -> Text -> Bool -> Int -> Maybe MixedAmount
getAmount rules record currency p1IsVirtual n =
-- Warning! Many tricky corner cases here.
-- Keep synced with:
-- hledger_csv.m4.md -> CSV FORMAT -> "amount", "Setting amounts",
-- hledger/test/csv.test -> 13, 31-34
let
unnumberedfieldnames = ["amount","amount-in","amount-out"]
-- amount field names which can affect this posting
fieldnames = map (("amount"<> T.pack(show n))<>) ["","-in","-out"]
-- For posting 1, also recognise the old amount/amount-in/amount-out names.
-- For posting 2, the same but only if posting 1 needs balancing.
++ if n==1 || n==2 && not p1IsVirtual then unnumberedfieldnames else []
-- assignments to any of these field names with non-empty values
assignments = [(f,a') | f <- fieldnames
, Just v <- [T.strip . renderTemplate rules record <$> hledgerField rules record f]
, not $ T.null v
-- XXX maybe ignore rule-generated values like "", "-", "$", "-$", "$-" ? cf CSV FORMAT -> "amount", "Setting amounts",
, let a = parseAmount rules record currency v
-- With amount/amount-in/amount-out, in posting 2,
-- flip the sign and convert to cost, as they did before 1.17
, let a' = if f `elem` unnumberedfieldnames && n==2 then mixedAmountCost (maNegate a) else a
]
-- if any of the numbered field names are present, discard all the unnumbered ones
discardUnnumbered xs = if null numbered then xs else numbered
where
numbered = filter (T.any isDigit . fst) xs
-- discard all zero amounts, unless all amounts are zero, in which case discard all but the first
discardExcessZeros xs = if null nonzeros then take 1 xs else nonzeros
where
nonzeros = filter (not . mixedAmountLooksZero . snd) xs
-- for -out fields, flip the sign XXX unless it's already negative ? back compat issues / too confusing ?
negateIfOut f = if "-out" `T.isSuffixOf` f then maNegate else id
in case discardExcessZeros $ discardUnnumbered assignments of
[] -> Nothing
[(f,a)] -> Just $ negateIfOut f a
fs -> error' . T.unpack . T.unlines $ -- PARTIAL:
["multiple non-zero amounts assigned,"
,"please ensure just one. (https://hledger.org/csv.html#amount)"
," " <> showRecord record
," for posting: " <> T.pack (show n)
] ++
[" assignment: " <> f <> " " <>
fromMaybe "" (hledgerField rules record f) <>
"\t=> value: " <> wbToText (showMixedAmountB noColour a) -- XXX not sure this is showing all the right info
| (f,a) <- fs]
-- | Figure out the expected balance (assertion or assignment) specified for posting N,
-- if any (and its parse position).
getBalance :: CsvRules -> CsvRecord -> Text -> Int -> Maybe (Amount, SourcePos)
getBalance rules record currency n = do
v <- (fieldval ("balance"<> T.pack (show n))
-- for posting 1, also recognise the old field name
<|> if n==1 then fieldval "balance" else Nothing)
case v of
"" -> Nothing
s -> Just (
parseBalanceAmount rules record currency n s
,initialPos "" -- parse position to show when assertion fails,
) -- XXX the csv record's line number would be good
where
fieldval = fmap T.strip . hledgerFieldValue rules record :: HledgerFieldName -> Maybe Text
-- | Given a non-empty amount string (from CSV) to parse, along with a
-- possibly non-empty currency symbol to prepend,
-- parse as a hledger MixedAmount (as in journal format), or raise an error.
-- The whole CSV record is provided for the error message.
parseAmount :: CsvRules -> CsvRecord -> Text -> Text -> MixedAmount
parseAmount rules record currency s =
either mkerror mixedAmount $ -- PARTIAL:
runParser (evalStateT (amountp <* eof) journalparsestate) "" $
currency <> simplifySign s
where
journalparsestate = nulljournal{jparsedecimalmark=parseDecimalMark rules}
mkerror e = error' . T.unpack $ T.unlines
["error: could not parse \"" <> s <> "\" as an amount"
,showRecord record
,showRules rules record
-- ,"the default-currency is: "++fromMaybe "unspecified" (getDirective "default-currency" rules)
,"the parse error is: " <> T.pack (customErrorBundlePretty e)
,"you may need to \
\change your amount*, balance*, or currency* rules, \
\or add or change your skip rule"
]
-- XXX unify these ^v
-- | Almost but not quite the same as parseAmount.
-- Given a non-empty amount string (from CSV) to parse, along with a
-- possibly non-empty currency symbol to prepend,
-- parse as a hledger Amount (as in journal format), or raise an error.
-- The CSV record and the field's numeric suffix are provided for the error message.
parseBalanceAmount :: CsvRules -> CsvRecord -> Text -> Int -> Text -> Amount
parseBalanceAmount rules record currency n s =
either (mkerror n s) id $
runParser (evalStateT (amountp <* eof) journalparsestate) "" $
currency <> simplifySign s
-- the csv record's line number would be good
where
journalparsestate = nulljournal{jparsedecimalmark=parseDecimalMark rules}
mkerror n s e = error' . T.unpack $ T.unlines
["error: could not parse \"" <> s <> "\" as balance"<> T.pack (show n) <> " amount"
,showRecord record
,showRules rules record
-- ,"the default-currency is: "++fromMaybe "unspecified" mdefaultcurrency
,"the parse error is: "<> T.pack (customErrorBundlePretty e)
]
-- Read a valid decimal mark from the decimal-mark rule, if any.
-- If the rule is present with an invalid argument, raise an error.
parseDecimalMark :: CsvRules -> Maybe DecimalMark
parseDecimalMark rules = do
s <- rules `csvRule` "decimal-mark"
case T.uncons s of
Just (c, rest) | T.null rest && isDecimalMark c -> return c
_ -> error' . T.unpack $ "decimal-mark's argument should be \".\" or \",\" (not \""<>s<>"\")"
-- | Make a balance assertion for the given amount, with the given parse
-- position (to be shown in assertion failures), with the assertion type
-- possibly set by a balance-type rule.
-- The CSV rules and current record are also provided, to be shown in case
-- balance-type's argument is bad (XXX refactor).
mkBalanceAssertion :: CsvRules -> CsvRecord -> (Amount, SourcePos) -> BalanceAssertion
mkBalanceAssertion rules record (amt, pos) = assrt{baamount=amt, baposition=pos}
where
assrt =
case getDirective "balance-type" rules of
Nothing -> nullassertion
Just "=" -> nullassertion
Just "==" -> nullassertion{batotal=True}
Just "=*" -> nullassertion{bainclusive=True}
Just "==*" -> nullassertion{batotal=True, bainclusive=True}
Just x -> error' . T.unpack $ T.unlines -- PARTIAL:
[ "balance-type \"" <> x <>"\" is invalid. Use =, ==, =* or ==*."
, showRecord record
, showRules rules record
]
-- | Figure out the account name specified for posting N, if any.
-- And whether it is the default unknown account (which may be
-- improved later) or an explicitly set account (which may not).
getAccount :: CsvRules -> CsvRecord -> Maybe MixedAmount -> Maybe (Amount, SourcePos) -> Int -> Maybe (AccountName, Bool)
getAccount rules record mamount mbalance n =
let
fieldval = hledgerFieldValue rules record :: HledgerFieldName -> Maybe Text
maccount = fieldval ("account"<> T.pack (show n))
in case maccount of
-- accountN is set to the empty string - no posting will be generated
Just "" -> Nothing
-- accountN is set (possibly to "expenses:unknown"! #1192) - mark it final
Just a -> Just (a, True)
-- accountN is unset
Nothing ->
case (mamount, mbalance) of
-- amountN is set, or implied by balanceN - set accountN to
-- the default unknown account ("expenses:unknown") and
-- allow it to be improved later
(Just _, _) -> Just (unknownExpenseAccount, False)
(_, Just _) -> Just (unknownExpenseAccount, False)
-- amountN is also unset - no posting will be generated
(Nothing, Nothing) -> Nothing
-- | Default account names to use when needed.
unknownExpenseAccount = "expenses:unknown"
unknownIncomeAccount = "income:unknown"
type CsvAmountString = Text
-- | Canonicalise the sign in a CSV amount string.
-- Such strings can have a minus sign, parentheses (equivalent to minus),
-- or any two of these (which cancel out),
-- or a plus sign (which is removed),
-- or any sign by itself with no following number (which is removed).
-- See hledger > CSV FORMAT > Tips > Setting amounts.
--
-- These are supported (note, not every possibile combination):
--
-- >>> simplifySign "1"
-- "1"
-- >>> simplifySign "+1"
-- "1"
-- >>> simplifySign "-1"
-- "-1"
-- >>> simplifySign "(1)"
-- "-1"
-- >>> simplifySign "--1"
-- "1"
-- >>> simplifySign "-(1)"
-- "1"
-- >>> simplifySign "-+1"
-- "-1"
-- >>> simplifySign "(-1)"
-- "1"
-- >>> simplifySign "((1))"
-- "1"
-- >>> simplifySign "-"
-- ""
-- >>> simplifySign "()"
-- ""
-- >>> simplifySign "+"
-- ""
simplifySign :: CsvAmountString -> CsvAmountString
simplifySign amtstr
| Just (' ',t) <- T.uncons amtstr = simplifySign t
| Just (t,' ') <- T.unsnoc amtstr = simplifySign t
| Just ('(',t) <- T.uncons amtstr, Just (amt,')') <- T.unsnoc t = simplifySign $ negateStr amt
| Just ('-',b) <- T.uncons amtstr, Just ('(',t) <- T.uncons b, Just (amt,')') <- T.unsnoc t = simplifySign amt
| Just ('-',m) <- T.uncons amtstr, Just ('-',amt) <- T.uncons m = amt
| Just ('-',m) <- T.uncons amtstr, Just ('+',amt) <- T.uncons m = negateStr amt
| amtstr `elem` ["-","+","()"] = ""
| Just ('+',amt) <- T.uncons amtstr = simplifySign amt
| otherwise = amtstr
negateStr :: Text -> Text
negateStr amtstr = case T.uncons amtstr of
Just ('-',s) -> s
_ -> T.cons '-' amtstr
-- | Show a (approximate) recreation of the original CSV record.
showRecord :: CsvRecord -> Text
showRecord r = "record values: "<>T.intercalate "," (map (wrap "\"" "\"") r)
-- | Given the conversion rules, a CSV record and a hledger field name, find
-- the value template ultimately assigned to this field, if any, by a field
-- assignment at top level or in a conditional block matching this record.
--
-- Note conditional blocks' patterns are matched against an approximation of the
-- CSV record: all the field values, without enclosing quotes, comma-separated.
--
getEffectiveAssignment :: CsvRules -> CsvRecord -> HledgerFieldName -> Maybe FieldTemplate
getEffectiveAssignment rules record f = lastMay $ map snd $ assignments
where
-- all active assignments to field f, in order
assignments = dbg9 "csv assignments" $ filter ((==f).fst) $ toplevelassignments ++ conditionalassignments
where
-- all top level field assignments
toplevelassignments = rassignments rules
-- all field assignments in conditional blocks assigning to field f and active for the current csv record
conditionalassignments = concatMap cbAssignments $ filter isBlockActive $ (rblocksassigning rules) f
where
-- does this conditional block match the current csv record ?
isBlockActive :: ConditionalBlock -> Bool
isBlockActive CB{..} = any (all matcherMatches) $ groupedMatchers cbMatchers
where
-- does this individual matcher match the current csv record ?
matcherMatches :: Matcher -> Bool
matcherMatches (RecordMatcher _ pat) = regexMatchText pat' wholecsvline
where
pat' = dbg7 "regex" pat
-- A synthetic whole CSV record to match against. Note, this can be
-- different from the original CSV data:
-- - any whitespace surrounding field values is preserved
-- - any quotes enclosing field values are removed
-- - and the field separator is always comma
-- which means that a field containing a comma will look like two fields.
wholecsvline = dbg7 "wholecsvline" $ T.intercalate "," record
matcherMatches (FieldMatcher _ csvfieldref pat) = regexMatchText pat csvfieldvalue
where
-- the value of the referenced CSV field to match against.
csvfieldvalue = dbg7 "csvfieldvalue" $ replaceCsvFieldReference rules record csvfieldref
-- | Render a field assignment's template, possibly interpolating referenced
-- CSV field values. Outer whitespace is removed from interpolated values.
renderTemplate :: CsvRules -> CsvRecord -> FieldTemplate -> Text
renderTemplate rules record t = maybe t mconcat $ parseMaybe
(many $ takeWhile1P Nothing (/='%')
<|> replaceCsvFieldReference rules record <$> referencep)
t
where
referencep = liftA2 T.cons (char '%') (takeWhile1P (Just "reference") isFieldNameChar) :: Parsec CustomErr Text Text
isFieldNameChar c = isAlphaNum c || c == '_' || c == '-'
-- | Replace something that looks like a reference to a csv field ("%date" or "%1)
-- with that field's value. If it doesn't look like a field reference, or if we
-- can't find such a field, leave it unchanged.
replaceCsvFieldReference :: CsvRules -> CsvRecord -> CsvFieldReference -> Text
replaceCsvFieldReference rules record s = case T.uncons s of
Just ('%', fieldname) -> fromMaybe s $ csvFieldValue rules record fieldname
_ -> s
-- | Get the (whitespace-stripped) value of a CSV field, identified by its name or
-- column number, ("date" or "1"), from the given CSV record, if such a field exists.
csvFieldValue :: CsvRules -> CsvRecord -> CsvFieldName -> Maybe Text
csvFieldValue rules record fieldname = do
fieldindex <- if | T.all isDigit fieldname -> readMay $ T.unpack fieldname
| otherwise -> lookup (T.toLower fieldname) $ rcsvfieldindexes rules
T.strip <$> atMay record (fieldindex-1)
-- | Parse the date string using the specified date-format, or if unspecified
-- the "simple date" formats (YYYY/MM/DD, YYYY-MM-DD, YYYY.MM.DD, leading
-- zeroes optional).
parseDateWithCustomOrDefaultFormats :: Maybe DateFormat -> Text -> Maybe Day
parseDateWithCustomOrDefaultFormats mformat s = asum $ map parsewith formats
where
parsewith = flip (parseTimeM True defaultTimeLocale) (T.unpack s)
formats = map T.unpack $ maybe
["%Y/%-m/%-d"
,"%Y-%-m-%-d"
,"%Y.%-m.%-d"
-- ,"%-m/%-d/%Y"
-- ,parseTime defaultTimeLocale "%Y/%m/%e" (take 5 s ++ "0" ++ drop 5 s)
-- ,parseTime defaultTimeLocale "%Y-%m-%e" (take 5 s ++ "0" ++ drop 5 s)
-- ,parseTime defaultTimeLocale "%m/%e/%Y" ('0':s)
-- ,parseTime defaultTimeLocale "%m-%e-%Y" ('0':s)
]
(:[])
mformat
--- ** tests
tests_CsvReader = testGroup "CsvReader" [
testGroup "parseCsvRules" [
testCase "empty file" $
parseCsvRules "unknown" "" @?= Right (mkrules defrules)
]
,testGroup "rulesp" [
testCase "trailing comments" $
parseWithState' defrules rulesp "skip\n# \n#\n" @?= Right (mkrules $ defrules{rdirectives = [("skip","")]})
,testCase "trailing blank lines" $
parseWithState' defrules rulesp "skip\n\n \n" @?= (Right (mkrules $ defrules{rdirectives = [("skip","")]}))
,testCase "no final newline" $
parseWithState' defrules rulesp "skip" @?= (Right (mkrules $ defrules{rdirectives=[("skip","")]}))
,testCase "assignment with empty value" $
parseWithState' defrules rulesp "account1 \nif foo\n account2 foo\n" @?=
(Right (mkrules $ defrules{rassignments = [("account1","")], rconditionalblocks = [CB{cbMatchers=[RecordMatcher None (toRegex' "foo")],cbAssignments=[("account2","foo")]}]}))
]
,testGroup "conditionalblockp" [
testCase "space after conditional" $ -- #1120
parseWithState' defrules conditionalblockp "if a\n account2 b\n \n" @?=
(Right $ CB{cbMatchers=[RecordMatcher None $ toRegexCI' "a"],cbAssignments=[("account2","b")]})
,testGroup "csvfieldreferencep" [
testCase "number" $ parseWithState' defrules csvfieldreferencep "%1" @?= (Right "%1")
,testCase "name" $ parseWithState' defrules csvfieldreferencep "%date" @?= (Right "%date")
,testCase "quoted name" $ parseWithState' defrules csvfieldreferencep "%\"csv date\"" @?= (Right "%\"csv date\"")
]
,testGroup "matcherp" [
testCase "recordmatcherp" $
parseWithState' defrules matcherp "A A\n" @?= (Right $ RecordMatcher None $ toRegexCI' "A A")
,testCase "recordmatcherp.starts-with-&" $
parseWithState' defrules matcherp "& A A\n" @?= (Right $ RecordMatcher And $ toRegexCI' "A A")
,testCase "fieldmatcherp.starts-with-%" $
parseWithState' defrules matcherp "description A A\n" @?= (Right $ RecordMatcher None $ toRegexCI' "description A A")
,testCase "fieldmatcherp" $
parseWithState' defrules matcherp "%description A A\n" @?= (Right $ FieldMatcher None "%description" $ toRegexCI' "A A")
,testCase "fieldmatcherp.starts-with-&" $
parseWithState' defrules matcherp "& %description A A\n" @?= (Right $ FieldMatcher And "%description" $ toRegexCI' "A A")
-- ,testCase "fieldmatcherp with operator" $
-- parseWithState' defrules matcherp "%description ~ A A\n" @?= (Right $ FieldMatcher "%description" "A A")
]
,testGroup "getEffectiveAssignment" [
let rules = mkrules $ defrules {rcsvfieldindexes=[("csvdate",1)],rassignments=[("date","%csvdate")]}
in testCase "toplevel" $ getEffectiveAssignment rules ["a","b"] "date" @?= (Just "%csvdate")
,let rules = mkrules $ defrules{rcsvfieldindexes=[("csvdate",1)], rconditionalblocks=[CB [FieldMatcher None "%csvdate" $ toRegex' "a"] [("date","%csvdate")]]}
in testCase "conditional" $ getEffectiveAssignment rules ["a","b"] "date" @?= (Just "%csvdate")
,let rules = mkrules $ defrules{rcsvfieldindexes=[("csvdate",1),("description",2)], rconditionalblocks=[CB [FieldMatcher None "%csvdate" $ toRegex' "a", FieldMatcher None "%description" $ toRegex' "b"] [("date","%csvdate")]]}
in testCase "conditional-with-or-a" $ getEffectiveAssignment rules ["a"] "date" @?= (Just "%csvdate")
,let rules = mkrules $ defrules{rcsvfieldindexes=[("csvdate",1),("description",2)], rconditionalblocks=[CB [FieldMatcher None "%csvdate" $ toRegex' "a", FieldMatcher None "%description" $ toRegex' "b"] [("date","%csvdate")]]}
in testCase "conditional-with-or-b" $ getEffectiveAssignment rules ["_", "b"] "date" @?= (Just "%csvdate")
,let rules = mkrules $ defrules{rcsvfieldindexes=[("csvdate",1),("description",2)], rconditionalblocks=[CB [FieldMatcher None "%csvdate" $ toRegex' "a", FieldMatcher And "%description" $ toRegex' "b"] [("date","%csvdate")]]}
in testCase "conditional.with-and" $ getEffectiveAssignment rules ["a", "b"] "date" @?= (Just "%csvdate")
,let rules = mkrules $ defrules{rcsvfieldindexes=[("csvdate",1),("description",2)], rconditionalblocks=[CB [FieldMatcher None "%csvdate" $ toRegex' "a", FieldMatcher And "%description" $ toRegex' "b", FieldMatcher None "%description" $ toRegex' "c"] [("date","%csvdate")]]}
in testCase "conditional.with-and-or" $ getEffectiveAssignment rules ["_", "c"] "date" @?= (Just "%csvdate")
]
]
]
| simonmichael/hledger | hledger-lib/Hledger/Read/CsvReader.hs | gpl-3.0 | 58,282 | 0 | 23 | 13,216 | 11,858 | 6,260 | 5,598 | 752 | 10 |
module LayoutBad6 where
x = id (case x of 3 -> 4) | roberth/uu-helium | test/parser/LayoutBad6.hs | gpl-3.0 | 57 | 0 | 9 | 19 | 25 | 14 | 11 | 2 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.SWF.RegisterActivityType
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Registers a new /activity type/ along with its configuration settings in
-- the specified domain.
--
-- A 'TypeAlreadyExists' fault is returned if the type already exists in
-- the domain. You cannot change any configuration settings of the type
-- after its registration, and it must be registered as a new version.
--
-- __Access Control__
--
-- You can use IAM policies to control this action\'s access to Amazon SWF
-- resources as follows:
--
-- - Use a 'Resource' element with the domain name to limit the action to
-- only specified domains.
-- - Use an 'Action' element to allow or deny permission to call this
-- action.
-- - Constrain the following parameters by using a 'Condition' element
-- with the appropriate keys.
-- - 'defaultTaskList.name': String constraint. The key is
-- 'swf:defaultTaskList.name'.
-- - 'name': String constraint. The key is 'swf:name'.
-- - 'version': String constraint. The key is 'swf:version'.
--
-- If the caller does not have sufficient permissions to invoke the action,
-- or the parameter values fall outside the specified constraints, the
-- action fails. The associated event attribute\'s __cause__ parameter will
-- be set to OPERATION_NOT_PERMITTED. For details and example IAM policies,
-- see
-- <http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html Using IAM to Manage Access to Amazon SWF Workflows>.
--
-- /See:/ <http://docs.aws.amazon.com/amazonswf/latest/apireference/API_RegisterActivityType.html AWS API Reference> for RegisterActivityType.
module Network.AWS.SWF.RegisterActivityType
(
-- * Creating a Request
registerActivityType
, RegisterActivityType
-- * Request Lenses
, ratDefaultTaskScheduleToStartTimeout
, ratDefaultTaskList
, ratDefaultTaskPriority
, ratDefaultTaskHeartbeatTimeout
, ratDefaultTaskScheduleToCloseTimeout
, ratDefaultTaskStartToCloseTimeout
, ratDescription
, ratDomain
, ratName
, ratVersion
-- * Destructuring the Response
, registerActivityTypeResponse
, RegisterActivityTypeResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
import Network.AWS.SWF.Types
import Network.AWS.SWF.Types.Product
-- | /See:/ 'registerActivityType' smart constructor.
data RegisterActivityType = RegisterActivityType'
{ _ratDefaultTaskScheduleToStartTimeout :: !(Maybe Text)
, _ratDefaultTaskList :: !(Maybe TaskList)
, _ratDefaultTaskPriority :: !(Maybe Text)
, _ratDefaultTaskHeartbeatTimeout :: !(Maybe Text)
, _ratDefaultTaskScheduleToCloseTimeout :: !(Maybe Text)
, _ratDefaultTaskStartToCloseTimeout :: !(Maybe Text)
, _ratDescription :: !(Maybe Text)
, _ratDomain :: !Text
, _ratName :: !Text
, _ratVersion :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'RegisterActivityType' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ratDefaultTaskScheduleToStartTimeout'
--
-- * 'ratDefaultTaskList'
--
-- * 'ratDefaultTaskPriority'
--
-- * 'ratDefaultTaskHeartbeatTimeout'
--
-- * 'ratDefaultTaskScheduleToCloseTimeout'
--
-- * 'ratDefaultTaskStartToCloseTimeout'
--
-- * 'ratDescription'
--
-- * 'ratDomain'
--
-- * 'ratName'
--
-- * 'ratVersion'
registerActivityType
:: Text -- ^ 'ratDomain'
-> Text -- ^ 'ratName'
-> Text -- ^ 'ratVersion'
-> RegisterActivityType
registerActivityType pDomain_ pName_ pVersion_ =
RegisterActivityType'
{ _ratDefaultTaskScheduleToStartTimeout = Nothing
, _ratDefaultTaskList = Nothing
, _ratDefaultTaskPriority = Nothing
, _ratDefaultTaskHeartbeatTimeout = Nothing
, _ratDefaultTaskScheduleToCloseTimeout = Nothing
, _ratDefaultTaskStartToCloseTimeout = Nothing
, _ratDescription = Nothing
, _ratDomain = pDomain_
, _ratName = pName_
, _ratVersion = pVersion_
}
-- | If set, specifies the default maximum duration that a task of this
-- activity type can wait before being assigned to a worker. This default
-- can be overridden when scheduling an activity task using the
-- 'ScheduleActivityTask' decision.
--
-- The duration is specified in seconds; an integer greater than or equal
-- to 0. The value \"NONE\" can be used to specify unlimited duration.
ratDefaultTaskScheduleToStartTimeout :: Lens' RegisterActivityType (Maybe Text)
ratDefaultTaskScheduleToStartTimeout = lens _ratDefaultTaskScheduleToStartTimeout (\ s a -> s{_ratDefaultTaskScheduleToStartTimeout = a});
-- | If set, specifies the default task list to use for scheduling tasks of
-- this activity type. This default task list is used if a task list is not
-- provided when a task is scheduled through the 'ScheduleActivityTask'
-- decision.
ratDefaultTaskList :: Lens' RegisterActivityType (Maybe TaskList)
ratDefaultTaskList = lens _ratDefaultTaskList (\ s a -> s{_ratDefaultTaskList = a});
-- | The default task priority to assign to the activity type. If not
-- assigned, then \"0\" will be used. Valid values are integers that range
-- from Java\'s 'Integer.MIN_VALUE' (-2147483648) to 'Integer.MAX_VALUE'
-- (2147483647). Higher numbers indicate higher priority.
--
-- For more information about setting task priority, see
-- <http://docs.aws.amazon.com/amazonswf/latest/developerguide/programming-priority.html Setting Task Priority>
-- in the /Amazon Simple Workflow Developer Guide/.
ratDefaultTaskPriority :: Lens' RegisterActivityType (Maybe Text)
ratDefaultTaskPriority = lens _ratDefaultTaskPriority (\ s a -> s{_ratDefaultTaskPriority = a});
-- | If set, specifies the default maximum time before which a worker
-- processing a task of this type must report progress by calling
-- RecordActivityTaskHeartbeat. If the timeout is exceeded, the activity
-- task is automatically timed out. This default can be overridden when
-- scheduling an activity task using the 'ScheduleActivityTask' decision.
-- If the activity worker subsequently attempts to record a heartbeat or
-- returns a result, the activity worker receives an 'UnknownResource'
-- fault. In this case, Amazon SWF no longer considers the activity task to
-- be valid; the activity worker should clean up the activity task.
--
-- The duration is specified in seconds; an integer greater than or equal
-- to 0. The value \"NONE\" can be used to specify unlimited duration.
ratDefaultTaskHeartbeatTimeout :: Lens' RegisterActivityType (Maybe Text)
ratDefaultTaskHeartbeatTimeout = lens _ratDefaultTaskHeartbeatTimeout (\ s a -> s{_ratDefaultTaskHeartbeatTimeout = a});
-- | If set, specifies the default maximum duration for a task of this
-- activity type. This default can be overridden when scheduling an
-- activity task using the 'ScheduleActivityTask' decision.
--
-- The duration is specified in seconds; an integer greater than or equal
-- to 0. The value \"NONE\" can be used to specify unlimited duration.
ratDefaultTaskScheduleToCloseTimeout :: Lens' RegisterActivityType (Maybe Text)
ratDefaultTaskScheduleToCloseTimeout = lens _ratDefaultTaskScheduleToCloseTimeout (\ s a -> s{_ratDefaultTaskScheduleToCloseTimeout = a});
-- | If set, specifies the default maximum duration that a worker can take to
-- process tasks of this activity type. This default can be overridden when
-- scheduling an activity task using the 'ScheduleActivityTask' decision.
--
-- The duration is specified in seconds; an integer greater than or equal
-- to 0. The value \"NONE\" can be used to specify unlimited duration.
ratDefaultTaskStartToCloseTimeout :: Lens' RegisterActivityType (Maybe Text)
ratDefaultTaskStartToCloseTimeout = lens _ratDefaultTaskStartToCloseTimeout (\ s a -> s{_ratDefaultTaskStartToCloseTimeout = a});
-- | A textual description of the activity type.
ratDescription :: Lens' RegisterActivityType (Maybe Text)
ratDescription = lens _ratDescription (\ s a -> s{_ratDescription = a});
-- | The name of the domain in which this activity is to be registered.
ratDomain :: Lens' RegisterActivityType Text
ratDomain = lens _ratDomain (\ s a -> s{_ratDomain = a});
-- | The name of the activity type within the domain.
--
-- The specified string must not start or end with whitespace. It must not
-- contain a ':' (colon), '\/' (slash), '|' (vertical bar), or any control
-- characters (\\u0000-\\u001f | \\u007f - \\u009f). Also, it must not
-- contain the literal string quotarnquot.
ratName :: Lens' RegisterActivityType Text
ratName = lens _ratName (\ s a -> s{_ratName = a});
-- | The version of the activity type.
--
-- The activity type consists of the name and version, the combination of
-- which must be unique within the domain.
--
-- The specified string must not start or end with whitespace. It must not
-- contain a ':' (colon), '\/' (slash), '|' (vertical bar), or any control
-- characters (\\u0000-\\u001f | \\u007f - \\u009f). Also, it must not
-- contain the literal string quotarnquot.
ratVersion :: Lens' RegisterActivityType Text
ratVersion = lens _ratVersion (\ s a -> s{_ratVersion = a});
instance AWSRequest RegisterActivityType where
type Rs RegisterActivityType =
RegisterActivityTypeResponse
request = postJSON sWF
response = receiveNull RegisterActivityTypeResponse'
instance ToHeaders RegisterActivityType where
toHeaders
= const
(mconcat
["X-Amz-Target" =#
("SimpleWorkflowService.RegisterActivityType" ::
ByteString),
"Content-Type" =#
("application/x-amz-json-1.0" :: ByteString)])
instance ToJSON RegisterActivityType where
toJSON RegisterActivityType'{..}
= object
(catMaybes
[("defaultTaskScheduleToStartTimeout" .=) <$>
_ratDefaultTaskScheduleToStartTimeout,
("defaultTaskList" .=) <$> _ratDefaultTaskList,
("defaultTaskPriority" .=) <$>
_ratDefaultTaskPriority,
("defaultTaskHeartbeatTimeout" .=) <$>
_ratDefaultTaskHeartbeatTimeout,
("defaultTaskScheduleToCloseTimeout" .=) <$>
_ratDefaultTaskScheduleToCloseTimeout,
("defaultTaskStartToCloseTimeout" .=) <$>
_ratDefaultTaskStartToCloseTimeout,
("description" .=) <$> _ratDescription,
Just ("domain" .= _ratDomain),
Just ("name" .= _ratName),
Just ("version" .= _ratVersion)])
instance ToPath RegisterActivityType where
toPath = const "/"
instance ToQuery RegisterActivityType where
toQuery = const mempty
-- | /See:/ 'registerActivityTypeResponse' smart constructor.
data RegisterActivityTypeResponse =
RegisterActivityTypeResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'RegisterActivityTypeResponse' with the minimum fields required to make a request.
--
registerActivityTypeResponse
:: RegisterActivityTypeResponse
registerActivityTypeResponse = RegisterActivityTypeResponse'
| fmapfmapfmap/amazonka | amazonka-swf/gen/Network/AWS/SWF/RegisterActivityType.hs | mpl-2.0 | 12,078 | 0 | 12 | 2,427 | 1,201 | 745 | 456 | 141 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.RDS.RemoveSourceIdentifierFromSubscription
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Removes a source identifier from an existing RDS event notification
-- subscription.
--
-- /See:/ <http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_RemoveSourceIdentifierFromSubscription.html AWS API Reference> for RemoveSourceIdentifierFromSubscription.
module Network.AWS.RDS.RemoveSourceIdentifierFromSubscription
(
-- * Creating a Request
removeSourceIdentifierFromSubscription
, RemoveSourceIdentifierFromSubscription
-- * Request Lenses
, rsifsSubscriptionName
, rsifsSourceIdentifier
-- * Destructuring the Response
, removeSourceIdentifierFromSubscriptionResponse
, RemoveSourceIdentifierFromSubscriptionResponse
-- * Response Lenses
, rsifsrsEventSubscription
, rsifsrsResponseStatus
) where
import Network.AWS.Prelude
import Network.AWS.RDS.Types
import Network.AWS.RDS.Types.Product
import Network.AWS.Request
import Network.AWS.Response
-- |
--
-- /See:/ 'removeSourceIdentifierFromSubscription' smart constructor.
data RemoveSourceIdentifierFromSubscription = RemoveSourceIdentifierFromSubscription'
{ _rsifsSubscriptionName :: !Text
, _rsifsSourceIdentifier :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'RemoveSourceIdentifierFromSubscription' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rsifsSubscriptionName'
--
-- * 'rsifsSourceIdentifier'
removeSourceIdentifierFromSubscription
:: Text -- ^ 'rsifsSubscriptionName'
-> Text -- ^ 'rsifsSourceIdentifier'
-> RemoveSourceIdentifierFromSubscription
removeSourceIdentifierFromSubscription pSubscriptionName_ pSourceIdentifier_ =
RemoveSourceIdentifierFromSubscription'
{ _rsifsSubscriptionName = pSubscriptionName_
, _rsifsSourceIdentifier = pSourceIdentifier_
}
-- | The name of the RDS event notification subscription you want to remove a
-- source identifier from.
rsifsSubscriptionName :: Lens' RemoveSourceIdentifierFromSubscription Text
rsifsSubscriptionName = lens _rsifsSubscriptionName (\ s a -> s{_rsifsSubscriptionName = a});
-- | The source identifier to be removed from the subscription, such as the
-- __DB instance identifier__ for a DB instance or the name of a security
-- group.
rsifsSourceIdentifier :: Lens' RemoveSourceIdentifierFromSubscription Text
rsifsSourceIdentifier = lens _rsifsSourceIdentifier (\ s a -> s{_rsifsSourceIdentifier = a});
instance AWSRequest
RemoveSourceIdentifierFromSubscription where
type Rs RemoveSourceIdentifierFromSubscription =
RemoveSourceIdentifierFromSubscriptionResponse
request = postQuery rDS
response
= receiveXMLWrapper
"RemoveSourceIdentifierFromSubscriptionResult"
(\ s h x ->
RemoveSourceIdentifierFromSubscriptionResponse' <$>
(x .@? "EventSubscription") <*> (pure (fromEnum s)))
instance ToHeaders
RemoveSourceIdentifierFromSubscription where
toHeaders = const mempty
instance ToPath
RemoveSourceIdentifierFromSubscription where
toPath = const "/"
instance ToQuery
RemoveSourceIdentifierFromSubscription where
toQuery RemoveSourceIdentifierFromSubscription'{..}
= mconcat
["Action" =:
("RemoveSourceIdentifierFromSubscription" ::
ByteString),
"Version" =: ("2014-10-31" :: ByteString),
"SubscriptionName" =: _rsifsSubscriptionName,
"SourceIdentifier" =: _rsifsSourceIdentifier]
-- | /See:/ 'removeSourceIdentifierFromSubscriptionResponse' smart constructor.
data RemoveSourceIdentifierFromSubscriptionResponse = RemoveSourceIdentifierFromSubscriptionResponse'
{ _rsifsrsEventSubscription :: !(Maybe EventSubscription)
, _rsifsrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'RemoveSourceIdentifierFromSubscriptionResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rsifsrsEventSubscription'
--
-- * 'rsifsrsResponseStatus'
removeSourceIdentifierFromSubscriptionResponse
:: Int -- ^ 'rsifsrsResponseStatus'
-> RemoveSourceIdentifierFromSubscriptionResponse
removeSourceIdentifierFromSubscriptionResponse pResponseStatus_ =
RemoveSourceIdentifierFromSubscriptionResponse'
{ _rsifsrsEventSubscription = Nothing
, _rsifsrsResponseStatus = pResponseStatus_
}
-- | Undocumented member.
rsifsrsEventSubscription :: Lens' RemoveSourceIdentifierFromSubscriptionResponse (Maybe EventSubscription)
rsifsrsEventSubscription = lens _rsifsrsEventSubscription (\ s a -> s{_rsifsrsEventSubscription = a});
-- | The response status code.
rsifsrsResponseStatus :: Lens' RemoveSourceIdentifierFromSubscriptionResponse Int
rsifsrsResponseStatus = lens _rsifsrsResponseStatus (\ s a -> s{_rsifsrsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-rds/gen/Network/AWS/RDS/RemoveSourceIdentifierFromSubscription.hs | mpl-2.0 | 5,777 | 0 | 13 | 1,026 | 627 | 377 | 250 | 89 | 1 |
{-# OPTIONS_GHC -cpp -fglasgow-exts #-}
-------------------------------------------------------------------------------------------
-- |
-- Module : Control.Functor.HigherOrder.Composition
-- Copyright : 2008 Edward Kmett
-- License : BSD
--
-- Maintainer : Edward Kmett <[email protected]>
-- Stability : experimental
-- Portability : non-portable (kind annotations, rank-2 types)
--
-- Composition of higher order functors
-------------------------------------------------------------------------------------------
module Control.Functor.HigherOrder.Composition
( CompH(..)
, HComposition(..)
, hassociateComposition
, hcoassociateComposition
) where
import Control.Functor.HigherOrder
class HComposition
(o :: ((* -> *) -> * -> *) ->
((* -> *) -> * -> *) ->
((* -> *) -> * -> *)) where
hcompose :: f (g h) a -> (f `o` g) h a
hdecompose :: (f `o` g) h a -> f (g h) a
newtype CompH
(f :: ((* -> *) -> * -> *))
(g :: ((* -> *) -> * -> *))
(a :: (* -> *)) (b :: *) = CompH { runCompH :: f (g a) b }
instance HComposition CompH where
hcompose = CompH
hdecompose = runCompH
instance (HFunctor f, HFunctor g) => HFunctor (CompH f g) where
hfmap f = hcompose . hfmap (hfmap f) . hdecompose
ffmap f = hcompose . hfmap liftH . ffmap f . hfmap LowerH . hdecompose
instance (HFunctor f, HFunctor g, Functor h) => Functor (CompH f g h) where
fmap = ffmap
hassociateComposition :: (HFunctor f, HComposition o) => ((f `o` g) `o` h) a b -> (f `o` (g `o` h)) a b
hassociateComposition = hcompose . hfmap hcompose . hdecompose . hdecompose
hcoassociateComposition :: (HFunctor f, HComposition o) => (f `o` (g `o` h)) a b -> ((f `o` g) `o` h) a b
hcoassociateComposition = hcompose . hcompose . hfmap hdecompose . hdecompose
| urska19/MFP---Samodejno-racunanje-dvosmernih-preslikav | Control/Functor/HigherOrder/Composition.hs | apache-2.0 | 1,754 | 26 | 13 | 326 | 610 | 342 | 268 | -1 | -1 |
-- | Web server.
module Main where
import HL.Dispatch ()
import HL.Foundation
import Control.Concurrent.Chan
import Yesod
import Yesod.Static
-- | Main entry point.
main :: IO ()
main =
do s <- static "static"
c <- newChan
warp 1990 (App s c)
| chrisdone/hl | src/Main.hs | bsd-3-clause | 259 | 0 | 9 | 58 | 81 | 44 | 37 | 11 | 1 |
default ()
undef = undef
forceNum :: Num a => a -> a
forceNum x = x
mono x = forceNum x
mono' = mono
genericLength :: Num a => [b] -> a
genericLength = undef
f xs = let len = genericLength xs
in (len,len)
| themattchan/tandoori | input/mono-restrict2.hs | bsd-3-clause | 242 | 3 | 9 | 83 | 110 | 53 | 57 | 10 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pl-PL">
<title>Linux WebDrivers</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/webdrivers/webdriverlinux/src/main/javahelp/org/zaproxy/zap/extension/webdriverlinux/resources/help_pl_PL/helpset_pl_PL.hs | apache-2.0 | 961 | 89 | 29 | 156 | 389 | 209 | 180 | -1 | -1 |
{-# LANGUAGE CPP #-}
#if !defined(TESTING) && __GLASGOW_HASKELL__ >= 703
{-# LANGUAGE Trustworthy #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Data.IntMap
-- Copyright : (c) Daan Leijen 2002
-- (c) Andriy Palamarchuk 2008
-- License : BSD-style
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- An efficient implementation of maps from integer keys to values
-- (dictionaries).
--
-- This module re-exports the value lazy 'Data.IntMap.Lazy' API, plus
-- several value strict functions from 'Data.IntMap.Strict'.
--
-- These modules are intended to be imported qualified, to avoid name
-- clashes with Prelude functions, e.g.
--
-- > import Data.IntMap (IntMap)
-- > import qualified Data.IntMap as IntMap
--
-- The implementation is based on /big-endian patricia trees/. This data
-- structure performs especially well on binary operations like 'union'
-- and 'intersection'. However, my benchmarks show that it is also
-- (much) faster on insertions and deletions when compared to a generic
-- size-balanced map implementation (see "Data.Map").
--
-- * Chris Okasaki and Andy Gill, \"/Fast Mergeable Integer Maps/\",
-- Workshop on ML, September 1998, pages 77-86,
-- <http://citeseer.ist.psu.edu/okasaki98fast.html>
--
-- * D.R. Morrison, \"/PATRICIA -- Practical Algorithm To Retrieve
-- Information Coded In Alphanumeric/\", Journal of the ACM, 15(4),
-- October 1968, pages 514-534.
--
-- Operation comments contain the operation time complexity in
-- the Big-O notation <http://en.wikipedia.org/wiki/Big_O_notation>.
-- Many operations have a worst-case complexity of /O(min(n,W))/.
-- This means that the operation can become linear in the number of
-- elements with a maximum of /W/ -- the number of bits in an 'Int'
-- (32 or 64).
-----------------------------------------------------------------------------
module Data.IntMap
( module Data.IntMap.Lazy
, insertWith'
, insertWithKey'
, fold
, foldWithKey
) where
import Prelude hiding (lookup,map,filter,foldr,foldl,null)
import Data.IntMap.Lazy
import qualified Data.IntMap.Strict as S
-- | /Deprecated./ As of version 0.5, replaced by 'S.insertWith'.
--
-- /O(log n)/. Same as 'insertWith', but the combining function is
-- applied strictly. This function is deprecated, use 'insertWith' in
-- "Data.IntMap.Strict" instead.
insertWith' :: (a -> a -> a) -> Key -> a -> IntMap a -> IntMap a
insertWith' = S.insertWith
{-# INLINE insertWith' #-}
-- | /Deprecated./ As of version 0.5, replaced by 'S.insertWithKey'.
--
-- /O(log n)/. Same as 'insertWithKey', but the combining function is
-- applied strictly. This function is deprecated, use 'insertWithKey'
-- in "Data.IntMap.Strict" instead.
insertWithKey' :: (Key -> a -> a -> a) -> Key -> a -> IntMap a -> IntMap a
insertWithKey' = S.insertWithKey
{-# INLINE insertWithKey' #-}
-- | /Deprecated./ As of version 0.5, replaced by 'foldr'.
--
-- /O(n)/. Fold the values in the map using the given
-- right-associative binary operator. This function is an equivalent
-- of 'foldr' and is present for compatibility only.
fold :: (a -> b -> b) -> b -> IntMap a -> b
fold = foldr
{-# INLINE fold #-}
-- | /Deprecated./ As of version 0.5, replaced by 'foldrWithKey'.
--
-- /O(n)/. Fold the keys and values in the map using the given
-- right-associative binary operator. This function is an equivalent
-- of 'foldrWithKey' and is present for compatibility only.
foldWithKey :: (Int -> a -> b -> b) -> b -> IntMap a -> b
foldWithKey = foldrWithKey
{-# INLINE foldWithKey #-}
| technogeeky/d-A | include/containers-0.5.0.0/Data/IntMap.hs | gpl-3.0 | 3,685 | 0 | 9 | 640 | 307 | 204 | 103 | 22 | 1 |
module Test where
import Lib
import Distribution.TestSuite
tests :: IO [Test]
tests = return [Test bar]
where
bar = TestInstance
{ run = return $ Finished run
, name = "test"
, tags = []
, options = []
, setOption = \_ _ -> Right bar
}
run = if foo then Pass else Fail "should pass"
| themoritz/cabal | cabal-testsuite/PackageTests/Regression/T4270/Test.hs | bsd-3-clause | 344 | 0 | 10 | 119 | 113 | 65 | 48 | 12 | 2 |
{-# LANGUAGE ForeignFunctionInterface, GHCForeignImportPrim, CPP,
MagicHash, UnboxedTuples, UnliftedFFITypes, BangPatterns #-}
{-# OPTIONS_GHC -XNoImplicitPrelude #-}
{-# OPTIONS_HADDOCK hide #-}
module GHC.Integer.GMP.Prim (
cmpInteger#,
cmpIntegerInt#,
plusInteger#,
minusInteger#,
timesInteger#,
quotRemInteger#,
quotInteger#,
remInteger#,
divModInteger#,
divInteger#,
modInteger#,
decodeDouble#,
int2Integer#,
integer2Int#,
word2Integer#,
integer2Word#,
andInteger#,
orInteger#,
xorInteger#,
complementInteger#,
shiftLInteger#,
shiftRInteger#,
int64ToInteger#, integerToInt64#,
word64ToInteger#, integerToWord64#,
toFloat#, toDouble#,
negateInteger#,
integerToJSString#,
fromRat#
) where
import GHC.Prim
-- Double isn't available yet, and we shouldn't be using defaults anyway:
default ()
-- | Returns -1,0,1 according as first argument is less than, equal to, or greater than second argument.
--
foreign import prim "I_compare" cmpInteger#
:: ByteArray# -> ByteArray# -> Int#
-- | Returns -1,0,1 according as first argument is less than, equal to, or greater than second argument, which
-- is an ordinary Int\#.
foreign import prim "I_compareInt" cmpIntegerInt#
:: ByteArray# -> Int# -> Int#
-- |
--
foreign import prim "I_add" plusInteger#
:: ByteArray# -> ByteArray# -> ByteArray#
-- |
--
foreign import prim "I_sub" minusInteger#
:: ByteArray# -> ByteArray# -> ByteArray#
-- |
--
foreign import prim "I_mul" timesInteger#
:: ByteArray# -> ByteArray# -> ByteArray#
-- | Compute div and mod simultaneously, where div rounds towards negative
-- infinity and\ @(q,r) = divModInteger#(x,y)@ implies
-- @plusInteger# (timesInteger# q y) r = x@.
--
foreign import prim "I_quotRem" quotRemInteger#
:: ByteArray# -> ByteArray# -> (# ByteArray#, ByteArray# #)
-- | Rounds towards zero.
--
foreign import prim "I_quot" quotInteger#
:: ByteArray# -> ByteArray# -> ByteArray#
-- | Satisfies \texttt{plusInteger\# (timesInteger\# (quotInteger\# x y) y) (remInteger\# x y) == x}.
--
foreign import prim "I_rem" remInteger#
:: ByteArray# -> ByteArray# -> ByteArray#
-- | Compute div and mod simultaneously, where div rounds towards negative infinity
-- and\texttt{(q,r) = divModInteger\#(x,y)} implies \texttt{plusInteger\# (timesInteger\# q y) r = x}.
--
foreign import prim "I_divMod" divModInteger#
:: ByteArray# -> ByteArray# -> (# ByteArray#, ByteArray# #)
foreign import prim "I_div" divInteger#
:: ByteArray# -> ByteArray# -> ByteArray#
foreign import prim "I_mod" modInteger#
:: ByteArray# -> ByteArray# -> ByteArray#
-------
-- | Convert to arbitrary-precision integer.
-- First {\tt Int\#} in result is the exponent; second {\tt Int\#} and {\tt ByteArray\#}
-- represent an {\tt Integer\#} holding the mantissa.
--
foreign import prim "I_decodeDouble" decodeDouble#
:: Double# -> (# Int#, ByteArray# #)
-- |
--
foreign import prim "I_fromInt" int2Integer#
:: Int# -> ByteArray#
-- |
--
foreign import prim "I_fromInt" word2Integer#
:: Word# -> ByteArray#
-- |
--
foreign import prim "I_and" andInteger#
:: ByteArray# -> ByteArray# -> ByteArray#
-- |
--
foreign import prim "I_or" orInteger#
:: ByteArray# -> ByteArray# -> ByteArray#
-- |
--
foreign import prim "I_xor" xorInteger#
:: ByteArray# -> ByteArray# -> ByteArray#
-- |
--
foreign import prim "I_complement" complementInteger#
:: ByteArray# -> ByteArray#
foreign import prim "I_fromInt64" int64ToInteger#
:: Int64# -> ByteArray#
foreign import prim "I_fromWord64" word64ToInteger#
:: Word64# -> ByteArray#
foreign import prim "I_toInt64"
integerToInt64# :: ByteArray# -> Int64#
foreign import prim "I_toWord64"
integerToWord64# :: ByteArray# -> Word64#
foreign import prim "I_toInt"
integer2Int# :: ByteArray# -> Int#
foreign import prim "I_negate"
negateInteger# :: ByteArray# -> ByteArray#
foreign import prim "I_abs"
absInteger# :: ByteArray# -> ByteArray#
foreign import prim "I_toNumber"
toDouble# :: ByteArray# -> Double#
foreign import prim "I_toNumber"
toFloat# :: ByteArray# -> Float#
foreign import prim "I_shiftLeft"
shiftLInteger# :: ByteArray# -> Int# -> ByteArray#
foreign import prim "I_shiftRight"
shiftRInteger# :: ByteArray# -> Int# -> ByteArray#
foreign import prim "I_toString"
integerToJSString# :: ByteArray# -> ByteArray#
foreign import prim "I_fromRat"
fromRat# :: ByteArray# -> ByteArray# -> Double#
integer2Word# :: ByteArray# -> Word#
integer2Word# n = int2Word# (integer2Int# n)
| joelburget/haste-compiler | libraries/integer-gmp/GHC/Integer/GMP/Prim.hs | bsd-3-clause | 4,635 | 0 | 8 | 847 | 759 | 452 | 307 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Control.Monad.ST
import Data.Primitive
main :: IO ()
main = do
let xs :: [Float] = runST $ do
barr <- mutableByteArrayFromList [1..fromIntegral n::Float]
peekByteArray n barr
print xs
where
n = 13
mutableByteArrayFromList :: forall s a . (Prim a)
=> [a]
-> ST s (MutableByteArray s)
mutableByteArrayFromList xs = do
arr <- newByteArray (length xs*sizeOf (undefined :: a))
loop arr 0 xs
return arr
where
loop :: (Prim a) => MutableByteArray s -> Int -> [a] -> ST s ()
loop _ _ [] = return ()
loop arr i (x : xs) = do
writeByteArray arr i x
loop arr (i+1) xs
peekByteArray :: (Prim a)
=> Int
-> MutableByteArray s
-> ST s [a]
peekByteArray n arr =
loop 0 arr
where
loop :: (Prim a)
=> Int
-> MutableByteArray s
-> ST s [a]
loop i _ | i >= n = return []
loop i arr = do
x <- readByteArray arr i
xs <- loop (i+1) arr
return (x : xs)
| urbanslug/ghc | testsuite/tests/deriving/should_compile/T8138.hs | bsd-3-clause | 1,132 | 0 | 16 | 416 | 453 | 223 | 230 | -1 | -1 |
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies,
FlexibleInstances #-}
-- !!! Functional dependencies
-- This broke an early impl of functional dependencies
-- (complaining about ambiguity)
module ShouldCompile where
class Foo r a | r -> a where
foo :: r -> a
instance Foo [m a] (m a)
bad:: Monad m => m a
bad = foo bar
bar:: Monad m => [m a]
bar = []
| urbanslug/ghc | testsuite/tests/typecheck/should_compile/tc115.hs | bsd-3-clause | 437 | 0 | 7 | 91 | 104 | 56 | 48 | 11 | 1 |
-- !!! Nullary rec-pats for constructors that hasn't got any labelled
-- !!! fields is legal Haskell, and requires extra care in the desugarer.
module ShouldCompile where
data X = X Int [Int]
f :: X -> Int
f (X _ []) = 0
f X{} = 1
| urbanslug/ghc | testsuite/tests/deSugar/should_compile/ds047.hs | bsd-3-clause | 238 | 0 | 8 | 55 | 59 | 33 | 26 | 5 | 1 |
module ResetCommand
where
import Candidate
import GitCommand
import GitCommandType
import GitStatus
import Alfred
import CommandOption
data ResetCommand = ResetCommand
instance Candidate ResetCommand where
name c = name $commandType c
description c = description $commandType c
isAvailable c = isAvailable $commandType c
instance GitCommand ResetCommand where
commandType cmdd = AddCommandType
commandOptions cmd = [ (["-q"], "", False)
, (["-p", "--patch"], "", False)
, (["--sort, --mixed", "--hard", "--merge", "--keep"], "", True)
]
processCommand cmd args = do
Right ss <- pathStatuses
let paths = map (\(_,_,p,_) -> (PathCandidate p)) $filter canReset ss
putFeedbacks (makeFeedbacks cmd args [] paths)
where canReset (i,_,_,_) = (elem i "MARC")
data GitResetMode = GitIndexReset
| GitInteractiveIndexReset
| GitCommitReset
data TreeIsh = HEAD
makeResetFeedbacks :: (GitCommand a) => a -> [String] -> [String] -> [PathCandidate] -> [Feedback]
makeResetFeedbacks cmd (a:as) accArgs paths =
case readArg cmd a accArgs paths of
OptionArg (CommandOptionCandidate "--sort" _) -> makeCommitResetFeedbacks cmd as accArgs paths
OptionArg (CommandOptionCandidate "--mixed" _) -> makeCommitResetFeedbacks cmd as accArgs paths
OptionArg (CommandOptionCandidate "--hard" _) -> makeCommitResetFeedbacks cmd as accArgs paths
OptionArg (CommandOptionCandidate "--merge" _) -> makeCommitResetFeedbacks cmd as accArgs paths
OptionArg (CommandOptionCandidate "--keep" _) -> makeCommitResetFeedbacks cmd as accArgs paths
PartialOptionArg arg -> map (optionCandidateFeedback cmd accArgs) arg
PathArg path -> makeFeedbacks cmd as accArgs paths
PartialPathArg path -> map (pathCandidateFeedback cmd accArgs) path
makeCommitResetFeedbacks :: (GitCommand a) => a -> [String] -> [String] -> [PathCandidate] -> [Feedback]
makeCommitResetFeedbacks cmd (a:as) accArgs commits =
case readArg cmd a accArgs commits of
PartialOptionArg arg -> map (optionCandidateFeedback cmd accArgs) arg
PartialPathArg cs -> map (pathCandidateFeedback cmd accArgs) cs
PathArg commit -> (commandFeedback cmd ((name commit):accArgs)):makeFeedbacks cmd [] ((name commit):accArgs) commits
| yamamotoj/alfred-git-workflow | src/ResetCommand.hs | mit | 2,438 | 0 | 15 | 565 | 741 | 381 | 360 | 43 | 8 |
-- |
-- Module : Graphics.Michelangelo.Shaders
-- Description : OpenGL shader utilities
-- Copyright : (c) Jonatan H Sundqvist, 2015
-- License : MIT
-- Maintainer : Jonatan H Sundqvist
-- Stability : experimental|stable
-- Portability : POSIX (not sure)
--
-- Created July 27 2015
-- TODO | -
-- -
-- SPEC | -
-- -
--------------------------------------------------------------------------------------------------------------------------------------------
-- Pragmas
--------------------------------------------------------------------------------------------------------------------------------------------
{-# LANGUAGE TypeSynonymInstances #-} --
{-# LANGUAGE FlexibleInstances #-} --
--------------------------------------------------------------------------------------------------------------------------------------------
-- API
--------------------------------------------------------------------------------------------------------------------------------------------
module Graphics.Michelangelo.Shaders where
--------------------------------------------------------------------------------------------------------------------------------------------
-- We'll need these
--------------------------------------------------------------------------------------------------------------------------------------------
import qualified Graphics.Rendering.OpenGL as GL
import Graphics.Rendering.OpenGL (($=))
-- import Graphics.Rendering.OpenGL.GL.Shaders.ShaderObjects
import Graphics.Rendering.OpenGL.GL.Shaders
import Graphics.GLUtil hiding (loadShaderProgram)
import qualified Graphics.Rendering.OpenGL.Raw as GLRaw
import Linear.Matrix
import Linear.Projection
import Linear.Quaternion
import Linear.V3
import Linear.V4
import Foreign.Storable (Storable)
import Foreign.Ptr (castPtr, Ptr())
import qualified Foreign.Marshal.Utils as Marshal (with)
import qualified Data.Map as Map
import Control.Lens
import Control.Exception
import Control.Monad (forM)
import Text.Printf
import Graphics.Michelangelo.Types
--------------------------------------------------------------------------------------------------------------------------------------------
-- Functions
--------------------------------------------------------------------------------------------------------------------------------------------
-- |
-- TODO: Improve control flow
-- TODO: Improve error checking (eg. which logs belong to which part, check errors at each stage?)
-- TODO: Catch exceptions
-- TODO: Program crashes when the source strings are empty
-- TODO: Optional logging layer (?)
-- TODO: Use Monad transformer to make 'bailing-out' easier (?)
createShaderProgram :: String -> String -> IO (Either [String] GL.Program)
createShaderProgram vsource psource = do
putStrLn "Creating shader program"
program <- GL.createProgram
vshader <- GL.createShader VertexShader
pshader <- GL.createShader FragmentShader
case (vsource, psource) of
("", _) -> return $ Left ["Empty vertex shader source"]
(_, "") -> return $ Left ["Empty pixel shader source"]
_ -> do
putStrLn "Setting vertex shader source"
shaderSourceBS vshader $= packUtf8 vsource
putStrLn "Compiling vertex shader"
compileShader vshader
putStrLn "Setting fragment shader source"
shaderSourceBS pshader $= packUtf8 psource
compileShader pshader
-- putStrLn "Compiling shaders..."
vstatus <- GL.get $ compileStatus vshader
printf "Vertex shader %s compiled successfully.\n" (if vstatus then "was" else "was not")
pstatus <- GL.get $ compileStatus pshader
printf "Vertex pixel %s compiled successfully.\n" (if pstatus then "was" else "was not")
if vstatus && pstatus
then do
putStrLn "Successfully compiled shaders. Linking program..."
mapM (GL.attachShader program) [vshader, pshader]
GL.linkProgram program
linked <- GL.get $ GL.linkStatus program
if linked
then return $ Right program
else mapM GL.get [GL.shaderInfoLog vshader, GL.shaderInfoLog pshader, GL.programInfoLog program] >>= return . Left
else mapM (GL.get . GL.shaderInfoLog) [vshader, pshader] >>= return . Left
-- |
-- TODO: Rename (?)
-- TODO: Pass in uniforms by name or by location (?)
setShaderUniforms :: GL.Program -> [(GL.UniformLocation, UniformValue)] -> IO ()
setShaderUniforms theprogram theuniforms = do
-- Set uniforms
-- mapM ((>> printErrorMsg "Setting uniform") . uncurry uniform) theuniforms
-- TODO: Refactor
forM theuniforms $ \(loc, value) -> case value of
UMatrix44 mat -> uniform loc $= mat
UFloat f -> uniform loc $= f
UInt i -> uniform loc $= i
-- UVec vec -> uniform loc $= vec
return ()
-- |
loadShaderProgram :: String -> String -> IO (Either [String] GL.Program)
loadShaderProgram vpath ppath = do
[vsource, psource] <- mapM readFile [vpath, ppath]
catch
(createShaderProgram vsource psource) --
caught -- TODO: More elaborate exception message (?)
where
caught :: IOException -> IO (Either [String] GL.Program)
caught _ = return $ Left ["Unable to open file."]
--------------------------------------------------------------------------------------------------------------------------------------------
-- Uniforms
--------------------------------------------------------------------------------------------------------------------------------------------
-- |
-- uniform :: GL.UniformLocation -> UniformValue -> IO ()
-- uniform (GL.UniformLocation loc) (UMatrix44 mat) = Marshal.with mat $ \ptr -> GLRaw.glUniformMatrix4fv loc 1 0 (castPtr (ptr :: Ptr (M44 Float)))
-- uniform (GL.UniformLocation loc) (UVec3 vec) = Marshal.with vec $ \ptr -> GLRaw.glUniform3fv loc 1 (castPtr (ptr :: Ptr (V3 Float)))
-- uniform (GL.UniformLocation loc) (UFloat f) = Marshal.with f $ \ptr -> GLRaw.glUniform1fv loc 1 (castPtr (ptr :: Ptr (Float)))
-- uniform (GL.UniformLocation loc) (UInt i) = Marshal.with i $ \ptr -> GLRaw.glUniform1iv loc 1 (castPtr (ptr :: Ptr (Int)))
-- TODO: Move instances to Instances or Uniform module (?)
-- TODO: Transpose via OpenGL or Linear (?)
-- |
-- TODO: Better names (?)
-- class UniformValue u where
-- setUniform :: (Storable u) => GL.GLint -> GL.GLsizei -> Ptr GL.GLfloat -> IO ()
-- Scalars
-- Vectors
-- instance UniformValue (M44 Float) where
-- setUniform = GLRaw.glUniformMatrix3fv
-- Matrices
-- instance UniformValue (M22 Float) where
-- setUniform = GLRaw.glUniformMatrix2fv
-- instance UniformValue (M33 Float) where
-- setUniform = GLRaw.glUniformMatrix3fv
-- instance UniformValue (M44 Float) where
-- setUniform = GLRaw.glUniformMatrix4fv
-- glUniform1f :: GLint -> GLfloat -> IO ()
-- glUniform2f :: GLint -> GLfloat -> GLfloat -> IO ()
-- glUniform3f :: GLint -> GLfloat -> GLfloat -> GLfloat -> IO ()
-- glUniform4f :: GLint -> GLfloat -> GLfloat -> GLfloat -> GLfloat -> IO ()
-- glUniform1i :: GLint -> GLint -> IO ()
-- glUniform2i :: GLint -> GLint -> GLint -> IO ()
-- glUniform3i :: GLint -> GLint -> GLint -> GLint -> IO ()
-- glUniform4i :: GLint -> GLint -> GLint -> GLint -> GLint -> IO ()
-- glUniform1fv :: GLint -> GLsizei -> Ptr GLfloat -> IO () --
-- glUniform2fv :: GLint -> GLsizei -> Ptr GLfloat -> IO () --
-- glUniform3fv :: GLint -> GLsizei -> Ptr GLfloat -> IO () --
-- glUniform4fv :: GLint -> GLsizei -> Ptr GLfloat -> IO () --
-- glUniform1iv :: GLint -> GLsizei -> Ptr GLint -> IO () --
-- glUniform2iv :: GLint -> GLsizei -> Ptr GLint -> IO () --
-- glUniform3iv :: GLint -> GLsizei -> Ptr GLint -> IO () --
-- glUniform4iv :: GLint -> GLsizei -> Ptr GLint -> IO () --
-- glUniformMatrix2fv :: GLint -> GLsizei -> GLboolean -> Ptr GLfloat -> IO () -- (✓)
-- glUniformMatrix3fv :: GLint -> GLsizei -> GLboolean -> Ptr GLfloat -> IO () -- (✓)
-- glUniformMatrix4fv :: GLint -> GLsizei -> GLboolean -> Ptr GLfloat -> IO () -- (✓)
| SwiftsNamesake/Michelangelo | src/Graphics/Michelangelo/Shaders.hs | mit | 8,106 | 0 | 21 | 1,441 | 925 | 524 | 401 | 68 | 7 |
{-|
Module : Web.Facebook.Messenger.Types.Callbacks.PreCheckout
Copyright : (c) Felix Paulusma, 2016
License : MIT
Maintainer : [email protected]
Stability : semi-experimental
This callback will occur when a user clicks on Pay in the payment dialog, but before the user's card is charged.
This allows you to do any processing on your end before charging user's card.
You could check inventory levels or for price changes before accepting the payment.
Subscribe to this callback by selecting the @"messaging_pre_checkouts"@ option when setting up your webhook.
If your app does not subscribe to this event, after the user clicks on Pay we will process the payment directly.
The event is only called for payments triggered via `BuyButton`, and not for those triggered via webview.
https://developers.facebook.com/docs/messenger-platform/reference/webhook-events/messaging_pre_checkouts
-}
module Web.Facebook.Messenger.Types.Callbacks.PreCheckout (
-- * Pre-Checkout Callback
PreCheckout (..)
)
where
import Data.Aeson
import Data.Text (Text)
import Web.Facebook.Messenger.Types.Callbacks.Payment (RequestedUserInfo, Amount)
-- | This callback is sent just before charging the user
--
-- You must respond to the callback with an HTTP status of @200@
-- and the body of the response must contain a success field
-- to indicate whether the pre checkout processing went through.
-- If success returned is false, we will not charge the user and
-- fail the payment flow. Otherwise we will let the payment go through.
data PreCheckout = PreCheckout
{ cpPayload :: Text -- ^ Metadata defined in the `BuyButton`.
, cpRequestedUserInfo :: RequestedUserInfo -- ^ Information that was requested from the user by the Buy Button.
, cpAmount :: Amount -- ^ Total amount of transaction.
} deriving (Eq, Show, Read, Ord)
-- --------------------------- --
-- CHECKOUT UPDATE INSTANCES --
-- --------------------------- --
instance ToJSON PreCheckout where
toJSON (PreCheckout payload rui amount) =
object [ "payload" .= payload
, "requested_user_info" .= rui
, "amount" .= amount
]
instance FromJSON PreCheckout where
parseJSON = withObject "PreCheckout" $ \o ->
PreCheckout <$> o .: "payload"
<*> o .: "requested_user_info"
<*> o .: "amount"
| Vlix/facebookmessenger | src/Web/Facebook/Messenger/Types/Callbacks/PreCheckout.hs | mit | 2,373 | 0 | 13 | 462 | 211 | 128 | 83 | 20 | 0 |
{-# LANGUAGE TemplateHaskell #-}
module Tests.Sodium.Internal where
import Tests.Sodium.Common ()
import Crypto.Sodium.Internal
import qualified Crypto.Sodium.SecureMem as SM
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.Maybe
import Test.Tasty
import Test.Tasty.QuickCheck
import Test.Tasty.TH
prop_constantTimeEq :: ByteString -> ByteString -> Bool
prop_constantTimeEq bs1 bs2 = constantTimeEq bs1 bs2 == (bs1 == bs2)
prop_mkHelper_correct_length :: Positive Int -> Property
prop_mkHelper_correct_length (Positive i) = forAll (vector i) $ \v ->
isJust $ mkHelper i id (B.pack v)
prop_mkHelper :: Positive Int -> ByteString -> Bool
prop_mkHelper (Positive i) bs =
isJust (mkHelper i id bs) == (i == B.length bs)
prop_mkSecureHelper_correct_length :: Positive Int -> Property
prop_mkSecureHelper_correct_length (Positive i) =
forAll (SM.fromByteString . B.pack <$> vector i) $ \sm ->
isJust $ mkSecureHelper i id sm
prop_mkSecureHelper :: Positive Int -> ByteString -> Bool
prop_mkSecureHelper (Positive i) bs =
isJust (mkSecureHelper i id (SM.fromByteString bs)) == (i == B.length bs)
tests :: TestTree
tests = $(testGroupGenerator)
| dnaq/crypto-sodium | test/Tests/Sodium/Internal.hs | mit | 1,283 | 0 | 11 | 262 | 385 | 205 | 180 | 28 | 1 |
module Main where
import Paths_hs_nombre_generator (version)
import Control.Monad (replicateM)
import Data.Char (toLower)
import Data.Version (showVersion)
import System.Environment
import System.Exit
import Web.NombreGenerator.Generator.Candidaturas
import Web.NombreGenerator.RandomUtil
import Web.NombreGenerator.Scrapper.BsAs
type Sex = String
parseArgs :: [String] -> IO ([Cargo], Sex, Int)
parseArgs ["-h"] = printUsage >> exit
parseArgs ["-v"] = printVersion >> exit
parseArgs ["-f", cargs, listas] = return $ eval (cargs, "f", listas)
parseArgs ["-m", cargs, listas] = return $ eval (cargs, "m", listas)
parseArgs [cargs, listas] = return $ eval (cargs, "a", listas)
parseArgs _ = printUsage >> die
eval (c, s, l) = (read c :: [Cargo]) `seq` (read l :: Int) `seq` (read c :: [Cargo], s, read l :: Int)
printUsage = putStrLn "Usage: hs-nombre-generator [-h] [-m/-f] '[('CARGO', Int, False|True), etc]'"
printVersion = putStrLn $ "NombreGenerator " ++ showVersion version
exit = exitSuccess
die = exitWith (ExitFailure 1)
format :: [(String, String)] -> (String, String)
format names_sex = (c ++ ", " ++ a ++ " " ++ b, custom_sex)
where [a, b, c] = map fst names_sex
sex = snd . head $ names_sex
custom_sex = if sex == "A" then rand_sex else sex
rand_sex = if (head a > 'K') then "M" else "F"
randTriples :: Int -> [(String, String)] -> IO [(String, String)]
randTriples n list = replicateM n (fmap format $ takeRandom 3 list)
generate :: IO [Name] -> Sex -> [Cargo] -> Int -> IO [FullName]
generate names sex cargos listas = (fmap (filter isSex) . randTriples count) =<< names
where isSex (_, s) = sex == "a" || (lower s) == sex
lower = map toLower
count = (*) listas $ sum . map (\(_, n, supl) -> if supl then n * 2 else n) $ cargos
main = getArgs >>=
parseArgs >>= \(cargos, sex, listas) -> do
names <- generate scrap sex cargos listas
mapM_ putStrLn $ candidaturas2 cargos listas names
| alvare/hs-nombre-generator | Web/NombreGenerator.hs | mit | 1,993 | 0 | 13 | 407 | 782 | 436 | 346 | 40 | 3 |
{-# LANGUAGE TypeSynonymInstances #-}
module Math.Matrix
( Matrix3
, Matrix4
, identity
, (*.)
) where
import Math.Vector
import Data.Vect.Double
type Matrix3 = Mat3
type Matrix4 = Mat4
class Identity a where
identity :: a
instance Identity Matrix3 where
identity = Mat3 (Vec3 1 0 0) (Vec3 0 1 0) (Vec3 0 0 1)
instance Identity Matrix4 where
identity = Mat4 (Vec4 1 0 0 0) (Vec4 0 1 0 0) (Vec4 0 0 1 0) (Vec4 0 0 0 1)
| burz/Rayzer | Math/Matrix.hs | mit | 438 | 0 | 8 | 101 | 180 | 98 | 82 | 16 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Concurrent (threadDelay)
import Control.Monad (forM_)
import qualified Data.ByteString as BS
import Data.Default (Default(..))
import Foreign.C.Types
import GameCom (step)
import Memory (MachineState(..), Color)
import PPU (getPixel)
import ROM (parseROM)
import SDL.Vect
import qualified SDL
import System.Directory (getCurrentDirectory)
width = 256
height = 240
scale = 4
frameDelayMs = 0
endDelayMs = 5000000
romName = "full_palette"
draw :: MachineState -> SDL.Window -> IO ()
draw state window =
SDL.getWindowSurface window >>=
(\ surface ->
forM_ [0..(width - 1)] (\ x ->
forM_ [0..(height - 1)] (\ y ->
do
let (r, g, b) = getPixel (x, y) state
let color = V4 r g b maxBound
let area = SDL.Rectangle
(P (V2 (x * scale) (y * scale)))
(V2 scale scale)
SDL.surfaceFillRect surface (Just area) color)))
loop :: MachineState -> SDL.Window -> Bool -> Int -> IO ()
loop _ _ _ 0 = return ()
loop state window newFrame n = do
if newFrame
then draw state window
else return ()
if n `mod` 60 == 0
then putStrLn $ "Frames remaining: " ++ show n
else return ()
SDL.updateWindowSurface window
threadDelay frameDelayMs
let (newFrame', state') = step state
loop state' window newFrame' (n - 1)
fromRight (Right x) = x
fromRight (Left x) = error $ "ROM load failure: " ++ x
main :: IO ()
main = do
pwd <- getCurrentDirectory
putStrLn $ "Working Directory: " ++ pwd
romBytes <- BS.readFile $ "roms/" ++ romName ++ ".nes"
let state = def { rom = fromRight $ parseROM romBytes }
SDL.initialize [SDL.InitVideo]
window <- SDL.createWindow
"GameCom"
SDL.defaultWindow
{ SDL.windowInitialSize = V2 (256 * scale) (240 * scale) }
SDL.showWindow window
loop state window True 10000
putStrLn "Clock stopped"
threadDelay endDelayMs
SDL.destroyWindow window
SDL.quit
| rkoeninger/GameCom | src/main/Main.hs | mit | 2,186 | 0 | 25 | 666 | 744 | 384 | 360 | 65 | 3 |
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveDataTypeable #-}
module BlueWire.Database.Schema where
import Database.Persist
import Database.Persist.TH
import Database.Persist.Sql
import Data.Aeson.TH
import Data.Time
import Data.Typeable
import Data.Generics
import Control.Monad.Trans.Resource
import BlueWire.Types
import BlueWire.Database.OrphanInstances()
share [mkPersist sqlSettings { mpsGenerateLenses = True, mpsPrefixFields = False }
, mkMigrate "migrateAll"] [persistLowerCase|
Recovery
rate NominalDiffTime
hurdle NominalDiffTime
maxTime NominalDiffTime
deriving Show Data Typeable
Kick
duration NominalDiffTime -- ^ The duration of the kick, in seconds.
countdown NominalDiffTime -- ^ The total time before, an actual mutable value.
repeatCountdown NominalDiffTime Maybe -- ^ Should this kick repeat when the kick time is over and if so, use this value (null if no repeat).
recoveryProfile Recovery Maybe
deriving Show Data Typeable
AppStats
name String -- ^ The name of the application
UniqueAppStatsName name -- no two appstats can have the same app name.
activeKicks [Kick]
lastHeartbeat UTCTime -- ^ The last time a heartbeat was recieved
kickEnds UTCTime Maybe -- ^ The time when the active kick will end, Nothing if there's no active kick.
canNextSetKicks UTCTime -- ^ The next time the kicks can be set.
deriving Show Data Typeable
|]
deriveJSON (defaultOptions {fieldLabelModifier = dropWhile (== '_') }) ''Recovery
deriveJSON (defaultOptions {fieldLabelModifier = dropWhile (== '_') }) ''Kick
deriveJSON (defaultOptions {fieldLabelModifier = dropWhile (== '_') }) ''AppStats
{-|
Better named alias.
-}
type AppProfile = AppStats
| quantifiedtran/blue-wire-backend | src/BlueWire/Database/Schema.hs | mit | 2,229 | 0 | 10 | 499 | 207 | 127 | 80 | 29 | 0 |
module MPCH.Config where
import qualified Network.MPD as MPD
data Config = Config {
host :: Maybe String,
port :: Maybe String,
password :: Maybe MPD.Password
}
deriving Show
defaultConfig :: Config
defaultConfig = Config {
host = Nothing,
port = Nothing,
password = Nothing
}
configure :: Config -> [Config -> Config] -> Config
configure = foldl (\cfg x -> x cfg)
| mineo/mpch | MPCH/Config.hs | mit | 397 | 0 | 10 | 94 | 126 | 74 | 52 | 14 | 1 |
-- | Data.TSTP.Source module
module Data.TSTP.Source where
import Data.TSTP.GData (GTerm (..))
import Data.TSTP.IntroType (IntroType (..))
import Data.TSTP.Parent (Parent (..))
import Data.TSTP.Rule (Rule (..))
import Data.TSTP.Status (Status (..))
import Data.TSTP.Theory (Theory (..))
-- | 'Source' main purpose is to provide all the information regarding
-- the deductive process that lead to a given formula. Information
-- about the rules applied along with parent formulas and
-- <http://www.cs.miami.edu/~tptp/TPTP/TPTPTParty/2007/PositionStatements/GeoffSutcliffe_SZS.html SZS>
-- status are among the information you might expect from this field.
data Source = Creator String [Info]
| File String (Maybe String)
| Inference Rule [Info] [Parent]
| Introduced IntroType [Info]
| Name String
| NoSource
| Source String
| Theory Theory [Info]
deriving (Eq, Ord, Show, Read)
data Info = AssumptionR [String]
| Description String
| Function String [GTerm]
| InferenceInfo Rule String [GTerm]
| IQuote String
| Refutation Source
| Status Status
deriving (Eq, Ord, Show, Read)
| agomezl/tstp2agda | src/Data/TSTP/Source.hs | mit | 1,346 | 0 | 8 | 414 | 270 | 167 | 103 | 24 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE TemplateHaskell #-}
{-
NOTE: Don't modify this file unless you know what you are doing. If you are
new to snap, start with Site.hs and Application.hs. This file contains
boilerplate needed for dynamic reloading and is not meant for general
consumption.
Occasionally if we modify the way the dynamic reloader works and you want to
upgrade, you might have to swap out this file for a newer version. But in
most cases you'll never need to modify this code.
-}
module Main where
------------------------------------------------------------------------------
import Control.Exception (SomeException, try)
import qualified Data.Text as T
import Snap.Http.Server
import Snap.Snaplet
import Snap.Snaplet.Config
import Snap.Core
import System.IO
import Site
#ifdef DEVELOPMENT
import Snap.Loader.Dynamic
#else
import Snap.Loader.Static
#endif
main :: IO ()
main = do
-- Depending on the version of loadSnapTH in scope, this either enables
-- dynamic reloading, or compiles it without. The last argument to
-- loadSnapTH is a list of additional directories to watch for changes to
-- trigger reloads in development mode. It doesn't need to include source
-- directories, those are picked up automatically by the splice.
(conf, site, cleanup) <- $(loadSnapTH [| getConf |]
'getActions
["."])
_ <- try $ httpServe conf site :: IO (Either SomeException ())
cleanup
------------------------------------------------------------------------------
-- | This action loads the config used by this application. The loaded config
-- is returned as the first element of the tuple produced by the loadSnapTH
-- Splice. The type is not solidly fixed, though it must be an IO action that
-- produces the same type as 'getActions' takes. It also must be an instance of
-- Typeable. If the type of this is changed, a full recompile will be needed to
-- pick up the change, even in development mode.
--
-- This action is only run once, regardless of whether development or
-- production mode is in use.
getConf :: IO (Config Snap AppConfig)
getConf = commandLineAppConfig defaultConfig
------------------------------------------------------------------------------
-- | This function generates the the site handler and cleanup action from the
-- configuration. In production mode, this action is only run once. In
-- development mode, this action is run whenever the application is reloaded.
--
-- Development mode also makes sure that the cleanup actions are run
-- appropriately before shutdown. The cleanup action returned from loadSnapTH
-- should still be used after the server has stopped handling requests, as the
-- cleanup actions are only automatically run when a reload is triggered.
--
-- This sample doesn't actually use the config passed in, but more
-- sophisticated code might.
getActions :: Config Snap AppConfig -> IO (Snap (), IO ())
getActions conf = do
(msgs, site, cleanup) <- runSnaplet
(appEnvironment =<< getOther conf) app
hPutStrLn stderr $ T.unpack msgs
return (site, cleanup)
| epsilonhalbe/rendezvous | src/Main.hs | mit | 3,264 | 0 | 11 | 712 | 302 | 178 | 124 | 27 | 1 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module Models.Channel
(
parseChannelsJson,
Channel(..)
) where
import Data.Text
import Data.Text.Encoding
import Data.Aeson.Types
import Data.Aeson
import Data.ByteString.Lazy
import Control.Monad as M
import Data.Vector as V
data Channel = Channel {
_cname :: String,
_cid :: String
} deriving (Show, Ord, Eq)
parseChannelsJson :: Text -> Maybe [Channel]
parseChannelsJson text = decode (fromStrict $ encodeUtf8 text) >>= parseMaybe parser
where
parser :: Object -> Parser [Channel]
parser toplevel = do
entries :: Array <- toplevel .: "entries"
channelsVector :: Vector Channel <- M.forM entries $
(\(Object entry) -> do
key <- entry .: "key"
title <- entry .: "val"
return $ Channel title key
)
return $ V.toList channelsVector
| simonvandel/tvheadend-frontend | src/Models/Channel.hs | mit | 884 | 0 | 16 | 210 | 263 | 141 | 122 | 27 | 1 |
module Carbon.DataStructures.Trees.BinaryHeap.Benchmarks (benchmarks, setup) where
import qualified Carbon.DataStructures.Trees.BinaryHeap as Tree
import Carbon.DataStructures.Trees.BinaryHeap.Scaffolding
import Carbon.Benchmarking
import Control.DeepSeq
instance NFData (Tree.Tree a)
setup = force [get_tree max_size]
benchmarks = []
++ (benchmark_function_with_values "insert" insert_tree (map (\ x -> 2 ^ x) [4..16]))
++ (benchmark_function_with_values "remove" remove_tree (map (\ x -> 2 ^ x) [4..16])) | Raekye/Carbon | haskell/testsuite/benchmarks/Carbon/DataStructures/Trees/BinaryHeap/Benchmarks.hs | mit | 513 | 0 | 13 | 58 | 159 | 92 | 67 | 10 | 1 |
-- vim: expandtab shiftwidth=4 tabstop=4 :
-- Based on <https://gist.github.com/yeban/311016>
-- Some potential inspiration for the future:
-- <https://github.com/dmxt/Solarized-xmonad-xmobar/blob/master/xmonad.hs>
-- <https://www.haskell.org/haskellwiki/Xmonad/General_xmonad.hs_config_tips>
import XMonad hiding (config)
import XMonad.Hooks.DynamicLog
import XMonad.Hooks.EwmhDesktops
import XMonad.Hooks.ManageDocks
import XMonad.Layout.Grid
import XMonad.Layout.Maximize
import XMonad.Layout.NoBorders
import XMonad.Layout.Tabbed
import qualified Data.Map as M
import qualified XMonad.StackSet as W
-- Basic key bindings
-- @moadMask@ is the modifier key used for all the shortcuts
keyMap conf@(XConfig { XMonad.modMask = modMask }) = M.fromList $
-- Layout and windows -----------------------------------------------------------
-- Cycle through available layout algorithms
[ ((modMask, xK_Tab), sendMessage NextLayout)
-- Resize windows to correct size -- ???
--, ((modMask, xK_n), refresh)
-- Focus next window
, ((modMask, xK_j), windows W.focusDown)
-- Focus previous window
, ((modMask, xK_k), windows W.focusUp)
-- Focus first master window
, ((modMask, xK_m), windows W.focusMaster)
-- Swap focused and next window
, ((modMask .|. shiftMask, xK_j), windows W.swapDown)
-- Swap focused and previous window
, ((modMask .|. shiftMask, xK_k), windows W.swapUp)
-- Swap focused and master window
, ((modMask .|. shiftMask, xK_m), windows W.swapMaster)
-- Maximize focused window temporarily
, ((modMask, xK_n), withFocused $ sendMessage . maximizeRestore)
-- Shrink master area
, ((modMask, xK_greater),sendMessage Shrink)
-- Expand master area
, ((modMask, xK_less), sendMessage Expand)
-- Push window back into tiling
, ((modMask, xK_t), withFocused $ windows . W.sink)
-- Increment number of windows in master area
, ((modMask, xK_plus), sendMessage (IncMasterN 1))
-- Decrement number of windows in master area
, ((modMask, xK_minus), sendMessage (IncMasterN (-1)))
-- Applications -----------------------------------------------------------------
-- Kill current application
, ((modMask, xK_q), kill)
-- Launch the terminal emulator
, ((modMask, xK_Return), spawn $ XMonad.terminal conf)
-- Start web browser -- Firefox Nightly
, ((modMask, xK_b), spawn "/usr/bin/firefox-nightly")
-- Start file manager -- PCManFM
, ((modMask, xK_f), spawn "/usr/bin/pcmanfm")
-- Open LXDE's "Run command" modal
, ((modMask, xK_r), spawn "/usr/bin/lxpanelctl run")
-- Show LXDE's system menu
, ((modMask, xK_Escape), spawn "/usr/bin/lxpanelctl menu")
-- Open LXDE logout modal
, ((modMask, xK_End), spawn "/usr/bin/lxsession-logout")
-- Recompile and restart Xmonad
, ((modMask .|. shiftMask, xK_End), spawn "/usr/bin/xmonad --recompile && xmonad --restart")
-- Take a screenshot of the whole screen
, ((modMask, xK_s), spawn "/usr/bin/gnome-screenshot")
-- Take a screenshot of selected area
, ((modMask .|. shiftMask, xK_s), spawn "/usr/bin/gnome-screenshot -a")
] ++
--
-- mod-N . . . Switch to workspace N
-- mod-shift-N . . . Move window to workspace N
--
[((modMask .|. modifier, key), windows $ f workspace)
| (workspace, key) <- zip (XMonad.workspaces conf) [xK_1..]
, (f, modifier) <- [(W.greedyView, 0), (W.shift, shiftMask)]]
mouseMap (XConfig { XMonad.modMask = modMask }) = M.fromList $
[ ((modMask, button1), (\w -> focus w >> mouseMoveWindow w))
, ((modMask, button2), (\w -> focus w >> windows W.swapMaster))
, ((modMask, button3), (\w -> focus w >> mouseResizeWindow w))
]
layouts = smartBorders (Full ||| maximize tiled ||| GridRatio (4/3))
where
-- Partition the screen into two panes
tiled = Tall nmaster delta ratio
-- The default number of windows in the master pane
nmaster = 1
-- Default proportion of screen occupied by master pane
ratio = 1/2
-- Percent of screen to increment by when resizing panes
delta = 3/100
-- Check using xprop
-- XMonad.ManageHook
myManageHook = composeAll
[ title =? "About Firefox Nightly" --> doFloat
, resource =? "Dialog" --> doFloat
, resource =? "Prompt" --> doFloat
, className =? "Coqide" <&&> title =? "Quit" --> doFloat
, className =? "Lxpanel" --> doFloat
--, className =? "Lxsession-logout" --> doFloat
, className =? "Pinentry" --> doFloat
, className =? "qjackctl" --> doFloat
, className =? "Xmessage" --> doFloat
, windowRole =? "alert" --> doFloat
, windowRole =? "GtkFileChooserDialog" --> doFloat
--, windowType =? "_NET_WM_WINDOW_TYPE_DIALOG" --> doFloat
]
where
windowRole = stringProperty "WM_WINDOW_ROLE"
--windowType = stringProperty "_NET_WM_WINDOW_TYPE"
toggleStrutsKey XConfig { XMonad.modMask = modMask } = (modMask .|. shiftMask, xK_t)
--xmobarBin = "/home/mgrabovsky/.xmonad/xmobar-graceful"
xmobarBin = "/home/mgrabovsky/builds/xmobar-git/src/xmobar-git/.stack-work/install/x86_64-linux-tinfo6/lts-13.0/8.6.3/bin/xmobar"
-- defined in XMonad.Hooks.DynamicLog
myPP = xmobarPP { ppCurrent = xmobarColor "#444" ""
, ppHidden = xmobarColor "#aaa" ""
, ppHiddenNoWindows = xmobarColor "#ddd" ""
, ppUrgent = xmobarColor "#aaa" ""
, ppTitle = shorten 90
, ppSep = pad (xmobarColor "#ddd" "" "·")
, ppLayout = (\l -> case l of
"Full" -> "[F]"
"Maximize Tall" -> "M|="
_ -> "[-]")
}
config = def
{ manageHook = manageDocks <+> myManageHook <+> manageHook def
, logHook = myLogHook
, layoutHook = avoidStruts layouts
, handleEventHook = ewmhDesktopsEventHook
, startupHook = ewmhDesktopsStartup
, modMask = mod4Mask -- Super/Windows key
, terminal = "termite"
, keys = keyMap
, mouseBindings = mouseMap
, workspaces = show <$> [1..5]
, borderWidth = 1
, normalBorderColor = "#bbbbbb"
, focusedBorderColor = "#ffcc00"
}
where
myLogHook = ewmhDesktopsLogHook -- Either dynamicLogXinerama or ewmhDesktopsLogHook
main = let runXmobar = statusBar xmobarBin myPP toggleStrutsKey
in xmonad =<< (runXmobar . ewmh) config
| mgrabovsky/dotfiles | .xmonad/xmonad.hs | cc0-1.0 | 7,249 | 0 | 13 | 2,249 | 1,359 | 809 | 550 | 88 | 3 |
{-# LANGUAGE RecordWildCards #-}
-- | Type checking mode of jb.
-- It runs type inference to decide if a term is typably hierarchical.
module Type(typeInference, runTypeInference) where
import Frontend
import Language.PiCalc
import Language.PiCalc.Analysis.Hierarchical
import Control.Monad
import qualified Data.Set as Set
import qualified Data.Map as Map
import qualified Data.MultiSet as MSet
import Data.List(sort, intercalate, intersperse)
import Language.PiCalc.Pretty(nest)
-- import Text.PrettyPrint(nest, braces, sep, punctuate, comma)
runTypeInference = runJB typeInference ()
typeInference:: JB () ()
typeInference = do
progs <- readInputProgs
when (length progs > 1) $ setOpt shouldShowFn
skipu <- getOpt skipUnsupported
infer <- selectAlgorithm
sequence_ $
intersperse sepLines $
[apply infer source prog | (source, prog, _) <- progs, (not skipu) || isSupported prog ]
disclaimer
disclaimer = do
shoutLn " .----------------- << Warning >> -------------------."
shoutLn " | The current version of the type system is a variant |"
shoutLn " | of the one presented in the paper. |"
shoutLn " | It does not fully support global free names and is |"
shoutLn " | restricted in the kind of types it can infer for |"
shoutLn " | better performance. |"
shoutLn " '-----------------------------------------------------'\n"
sepLines = info "\n" >> sepLine >> info "\n"
shouldShowFn o@TypeInf{showFileNames = Nothing} = o{showFileNames = Just True}
shouldShowFn o@TypeInf{showFileNames = _} = o
showTVar (NameType n) = show $ pretty n
showTVar (ArgType n i) = (show $ pretty n) ++ "[" ++ show i ++ "]"
-- namelst p = map pretty $ Set.toList $ allNames p
showArity Nothing = "Any"
showArity (Just i) = show i ++ "-ary"
declareUnsupported = colorWrap "33" $ outputLn "UNSUPPORTED"
declareSimplyTyped True = positiveLn "SIMPLY TYPED"
declareSimplyTyped False = negativeLn "NOT SIMPLY TYPED"
declareTypablyHierarchical True = positiveLn "TYPABLY HIERARCHICAL"
declareTypablyHierarchical False = negativeLn "NOT TYPABLY HIERARCHICAL"
positiveLn x = colorWrap "32" $ outputLn x
negativeLn x = colorWrap "31" $ outputLn x
apply infer source prog = do
let p = start prog
printFn source
if isSupported prog then
infer p
else do
declareUnsupported
declareInput p
shoutLn " Currently definitions and process calls are not supported."
shoutLn " Please reformulate the term using replication: *(term)."
declareInput term = do
useColor <- getOpt colored
let prettyPrint | useColor = prettyTerm (colorHoareStyle defTermOpts)
| otherwise = pretty
info "\n"
infoLn $ show $ nest 4 $ prettyPrint term
unlessAlgSimple $ do
shoutLn "\nConstraints:\n"
shoutBaseConstr term
info "\n"
selectAlgorithm = do
alg <- getOpt algorithm
unless (alg == AlgComplete) $ warnLn $ "Warning: Applying " ++ show alg ++ " algorithm"
case alg of
AlgComplete -> return (hierarchical inferTypes)
AlgAltCompl -> return (hierarchical inferTypesSlow)
AlgIncomplete -> warnIncomplete >> return (hierarchical inferTypesIncomplete)
AlgSimple -> return simpletype
where warnIncomplete = warnLn "Warning: The incomplete type system may reject terms which can be proved hierarchical with the complete algorithm (but has better performance).\n"
unlessAlgSimple m = do
alg <- getOpt algorithm
unless (alg == AlgSimple) m
printFn fn = do
flag <- getOpt showFileNames
case flag of
Just True -> header (output $ "\n# " ++ fn) >> output "\n" >> info "\n"
_ -> return ()
simpletype p =
case unifyTypes p of
Left err -> do
declareSimplyTyped False
declareInput p
reportArityMismatch err
Right eq -> do
declareSimplyTyped True
declareInput p
let (typing, types) = buildTypingEnv eq
printTypingEnv (allRestr p) typing types
hierarchical infer p = do
case infer p of
ArityMismatch mismatch -> do
declareSimplyTyped False
declareInput p
reportArityMismatch mismatch
Inconsistent {..} -> do
declareTypablyHierarchical False
declareInput p
printTypingEnv (allRestr p) typing types
forM_ cycles $ \ys -> do
info " Cycle: "
forM_ ys $ \y ->
info $ (show $ pretty y) ++ " "
infoLn ""
NotTShaped {..} -> do
declareTypablyHierarchical False
declareInput p
printTypingEnv (allRestr p) typing types
info " These names have the same type but are always tied to each other: "
forM_ conflicts $ \y ->
info $ (show $ pretty y) ++ " "
infoLn ""
Inferred {..} -> do
declareTypablyHierarchical True
declareInput p
when (not $ null $ baseTypes) $ do
infoLn "Typable with base types"
info " "
infoLn $ intercalate " ⤙ " $ map (("t"++).show) baseTypes
-- infoLn "and types"
printTypingEnv (allRestr p) typing types
infoLn $ "Bound on depth: " ++ (show $ length baseTypes)
reportArityMismatch (n1, n2, xs) = do
info $ " Wrong arity for names "
forM_ xs $ \x ->
info $ (show $ pretty x) ++ " "
infoLn $ "\n Expected arity: " ++ show n1
infoLn $ " Actual arity: " ++ show n2
printTypingEnv names typing types = do
infoLn "with types"
forM_ (Map.toList $ typing) $ \(x, b) ->
when (isGlobal x || x `Set.member` names) $ do
info " "
info $ show $ pretty x
info ":τ"
info $ show b
infoLn "\nwhere"
forM_ (Map.toList $ types) $ \(b, args) -> do
info " τ"
info $ show b
info " = t"
info $ show b
infoLn $ showTypeArg args
infoLn ""
showTypeArg (Just xs) = "[" ++ intercalate ", " (map (\x-> "τ"++show x) xs) ++ "]"
showTypeArg Nothing = ""
shoutBaseConstr p = shout $ unlines $ map prcs $ constrBaseTypes p
where
prcs (BLt a b) = " " ++ pp a ++ " < " ++ pp b
prcs (BLtOr as bs c) = " OR ⎡ " ++ pps as ++ " < " ++ pp c ++
"\n ⎣ " ++ pps bs ++ " < " ++ pp c
pp = show . pretty
pps xs = show $ map pretty xs
isSupported :: PiProg -> Bool
isSupported prog = (null $ defsList $ defs prog) && (not $ hasPCall $ start prog)
where
hasPCall (Parall ps) = any hasPCall $ MSet.distinctElems ps
hasPCall (Alt as) = any (hasPCall.snd) $ MSet.distinctElems as
hasPCall (New _ p) = hasPCall p
hasPCall (Bang p@(Alt _)) = hasPCall p
hasPCall (Bang p) = True
hasPCall (PiCall _ _) = True
-- hasPCall _ = False | bordaigorl/jamesbound | src/Type.hs | gpl-2.0 | 7,062 | 0 | 22 | 2,135 | 2,048 | 957 | 1,091 | 162 | 6 |
{-# LANGUAGE BangPatterns #-}
module ProxyClient where
import SSHClient
import Control.Concurrent.HEP
import Data.HProxy.Rules
import Data.ByteString.Char8 as C
import Control.Monad
import Control.Monad.Trans
import Text.ParserCombinators.Parsec
import Prelude as P
data ProxyMessage = ProxyRead ByteString
| ProxyReadError ByteString
| ProxyExit
proxyLogin:: String -- login
-> Destination -- server
-> Destination -- destination
-> Int
-> HEP (Either String (PortT, Pid))
proxyLogin login
server
dest@(DestinationAddrPort (IPAddress ip) port)
connectionsCount = do
inbox <- liftIO $! newMBox
ssh <- startSSHClient server login
("hproxyserver -d " ++ ip ++ ":" ++ show port ++ " -c " ++
show connectionsCount )
(\x-> liftIO $! sendMBox inbox $! ProxyRead x)
(\x-> return () ) -- liftIO $! sendMBox inbox $! ProxyReadError x)
(liftIO $! do
P.putStrLn $! "ssh client exited"
sendMBox inbox $! ProxyExit)
mmsg <- liftIO $! receiveMBoxAfter 60000 inbox
stopSSHClient ssh
case mmsg of
Nothing -> return $! Left "ERROR: ssh client timeouted"
Just msg -> case msg of
ProxyReadError !some -> do
return $! Left $! unpack some
ProxyExit -> do
return $! Left "ERROR: ssh exited unexpectedly"
ProxyRead !some -> do
case parse parseOkPort "hproxyserver answer" (unpack some) of
Right port -> return $! Right (port,ssh)
Left _ -> return $! Left $! unpack some
parseOkPort:: GenParser Char st PortT
parseOkPort = do
string "OK"
spaces
port <- many1 digit
return $! read port
| dambaev/hproxy | src/ProxyClient.hs | gpl-2.0 | 1,856 | 0 | 20 | 616 | 474 | 237 | 237 | 50 | 5 |
{-# OPTIONS -fno-warn-orphans #-}
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable, RankNTypes, NoMonomorphismRestriction #-}
module Lamdu.Data.Expression.Utils
( makeApply
, makePi, makeLambda, makeLam
, pureApply
, pureHole
, pureSet
, pureRecord
, pureLam
, pureGetField
, pureLiteralInteger
, pureIntegerType
, pureExpression
, randomizeExpr
, randomizeParamIds
, randomizeParamIdsG
, randomizeExprAndParams
, NameGen(..), onNgMakeName
, randomNameGen, debugNameGen
, matchBody, matchExpression, matchExpressionG
, subExpressions, subExpressionsWithout
, isDependentPi, exprHasGetVar
, curriedFuncArguments
, ApplyFormAnnotation(..), applyForms
, recordValForm, structureForType
, alphaEq, couldEq
, subst, substGetPar
, showBodyExpr, showsPrecBodyExpr
, isTypeConstructorType
, addExpressionContexts
, addBodyContexts
, PiWrappers(..), piWrappersDepParams, piWrappersMIndepParam, piWrappersResultType
, getPiWrappers
) where
import Prelude hiding (pi)
import Lamdu.Data.Expression
import Control.Applicative (Applicative(..), liftA2, (<$>), (<$))
import Control.Arrow ((***))
import Control.Lens (Context(..))
import Control.Lens.Operators
import Control.Lens.Utils (addListContexts, addTuple2Contexts)
import Control.Monad (guard)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Reader (ReaderT, runReaderT)
import Control.Monad.Trans.State (evalState, state)
import Data.Map (Map)
import Data.Maybe (isJust, fromMaybe)
import Data.Monoid (Any)
import Data.Store.Guid (Guid)
import Data.Traversable (Traversable(..), sequenceA)
import System.Random (Random, RandomGen, random)
import qualified Control.Lens as Lens
import qualified Control.Monad.Trans.Reader as Reader
import qualified Data.Foldable as Foldable
import qualified Data.List as List
import qualified Data.List.Utils as ListUtils
import qualified Data.Map as Map
import qualified Data.Store.Guid as Guid
import qualified Lamdu.Data.Expression.Lens as ExprLens
import qualified System.Random as Random
data PiWrappers def a = PiWrappers
{ _piWrappersDepParams :: [(Guid, Expression def a)]
, _piWrappersMIndepParam :: Maybe (Guid, Expression def a)
, _piWrappersResultType :: Expression def a
}
Lens.makeLenses ''PiWrappers
data NameGen pl = NameGen
{ ngSplit :: (NameGen pl, NameGen pl)
, ngMakeName :: Guid -> pl -> (Guid, NameGen pl)
}
onNgMakeName ::
(NameGen b ->
(Guid -> a -> (Guid, NameGen b)) ->
Guid -> b -> (Guid, NameGen b)) ->
NameGen a -> NameGen b
onNgMakeName onMakeName =
go
where
go nameGen =
result
where
result =
nameGen
{ ngMakeName =
ngMakeName nameGen
& Lens.mapped . Lens.mapped . Lens._2 %~ go
& onMakeName result
, ngSplit =
ngSplit nameGen
& Lens.both %~ go
}
getPiWrappers :: Expression def a -> PiWrappers def a
getPiWrappers expr =
case expr ^? ExprLens.exprLam of
Just (Lam KType param paramType resultType)
| isDependentPi expr ->
getPiWrappers resultType & piWrappersDepParams %~ (p :)
| otherwise ->
PiWrappers
{ _piWrappersDepParams = []
, _piWrappersMIndepParam = Just p
, _piWrappersResultType = resultType
}
where
p = (param, paramType)
_ -> PiWrappers [] Nothing expr
couldEq :: Eq def => Expression def a -> Expression def a -> Bool
couldEq x y =
isJust $ matchExpression (const . Just) onMismatch x y
where
onMismatch (Expression (BodyLeaf Hole) _) e = Just e
onMismatch e (Expression (BodyLeaf Hole) _) = Just e
onMismatch _ _ = Nothing
alphaEq :: Eq def => Expression def a -> Expression def a -> Bool
alphaEq x y =
isJust $ matchExpression
((const . const . Just) ())
((const . const) Nothing)
x y
-- Useful functions:
substGetPar ::
Guid ->
Expression def a ->
Expression def a ->
Expression def a
substGetPar from =
subst (ExprLens.exprParameterRef . Lens.filtered (== from))
subst ::
Lens.Getting Any (Expression def a) b ->
Expression def a ->
Expression def a ->
Expression def a
subst lens to expr
| Lens.has lens expr = to
| otherwise = expr & eBody . traverse %~ subst lens to
data ApplyFormAnnotation =
Untouched | DependentParamAdded | IndependentParamAdded
deriving Eq
-- Transform expression to expression applied with holes,
-- with all different sensible levels of currying.
applyForms :: Expression def () -> Expression def () -> [Expression def ApplyFormAnnotation]
applyForms exprType rawExpr
| Lens.has (ExprLens.exprLam . lamKind . _KVal) expr = [expr]
| otherwise = reverse withAllAppliesAdded
where
expr = Untouched <$ rawExpr
withDepAppliesAdded =
foldl (addApply DependentParamAdded) expr depParamTypes
withAllAppliesAdded =
scanl (addApply IndependentParamAdded) withDepAppliesAdded $
indepParamTypes ++ assumeHoleIsPi
depParamTypes = snd <$> depParams
indepParamTypes = mNonDepParam ^.. Lens._Just . Lens._2
assumeHoleIsPi
| Lens.has ExprLens.exprHole resultType = [pureHole]
| otherwise = []
PiWrappers
{ _piWrappersDepParams = depParams
, _piWrappersMIndepParam = mNonDepParam
, _piWrappersResultType = resultType
} = getPiWrappers exprType
addApply ann func paramType =
Expression (makeApply func arg) ann
where
arg = ann <$ fromMaybe pureHole (recordValForm paramType)
recordValForm :: Expression def () -> Maybe (Expression def ())
recordValForm paramType =
replaceFieldTypesWithHoles <$>
(paramType ^? ExprLens.exprKindedRecordFields KType)
where
replaceFieldTypesWithHoles fields =
ExprLens.pureExpr . _BodyRecord .
ExprLens.kindedRecordFields KVal #
(fields & Lens.traversed . Lens._2 .~ pureHole)
structureForType ::
Expression def () ->
Expression def ()
structureForType =
(eBody %~) $
const (ExprLens.bodyHole # ())
& Lens.outside (ExprLens.bodyKindedRecordFields KType) .~
(ExprLens.bodyKindedRecordFields KVal # ) . (traverse . Lens._2 %~ structureForType)
& Lens.outside (ExprLens.bodyKindedLam KType) .~
(ExprLens.bodyKindedLam KVal # ) . (Lens._3 %~ structureForType)
randomizeExprAndParams :: (RandomGen gen, Random r) => gen -> Expression def (r -> a) -> Expression def a
randomizeExprAndParams gen = randomizeParamIds paramGen . randomizeExpr exprGen
where
(exprGen, paramGen) = Random.split gen
randomizeExpr :: (RandomGen gen, Random r) => gen -> Expression def (r -> a) -> Expression def a
randomizeExpr gen (Expression body pl) =
(`evalState` gen) $ do
r <- state random
newBody <- body & traverse %%~ randomizeSubexpr
return . Expression newBody $ pl r
where
randomizeSubexpr subExpr = do
localGen <- state Random.split
return $ randomizeExpr localGen subExpr
randomNameGen :: RandomGen g => g -> NameGen dummy
randomNameGen g = NameGen
{ ngSplit = Random.split g & Lens.both %~ randomNameGen
, ngMakeName = const . const $ random g & Lens._2 %~ randomNameGen
}
debugNameGen :: NameGen dummy
debugNameGen = ng names ""
where
names = (:[]) <$> ['a'..'z']
ng [] _ = error "TODO: Infinite list of names"
ng st@(l:ls) suffix =
NameGen
{ ngSplit = (ng st "_0", ng st "_1")
, ngMakeName = const . const $ (Guid.fromString (l++suffix), ng ls suffix)
}
randomizeParamIds :: RandomGen g => g -> Expression def a -> Expression def a
randomizeParamIds gen = randomizeParamIdsG id (randomNameGen gen) Map.empty $ \_ _ a -> a
randomizeParamIdsG ::
(a -> n) ->
NameGen n -> Map Guid Guid ->
(NameGen n -> Map Guid Guid -> a -> b) ->
Expression def a -> Expression def b
randomizeParamIdsG preNG gen initMap convertPL =
(`evalState` gen) . (`runReaderT` initMap) . go
where
go (Expression v s) = do
guidMap <- Reader.ask
newGen <- lift $ state ngSplit
(`Expression` convertPL newGen guidMap s) <$>
case v of
BodyLam (Lam k oldParamId paramType body) -> do
newParamId <- lift . state $ makeName oldParamId s
fmap BodyLam $ liftA2 (Lam k newParamId) (go paramType) .
Reader.local (Map.insert oldParamId newParamId) $ go body
BodyLeaf (GetVariable (ParameterRef guid)) ->
pure $ ExprLens.bodyParameterRef #
fromMaybe guid (Map.lookup guid guidMap)
x@BodyLeaf {} -> traverse go x
x@BodyApply {} -> traverse go x
x@BodyGetField {} -> traverse go x
x@BodyRecord {} -> traverse go x
makeName oldParamId s nameGen =
ngMakeName nameGen oldParamId $ preNG s
-- Left-biased on parameter guids
{-# INLINE matchBody #-}
matchBody ::
Eq def =>
(Guid -> Guid -> a -> b -> c) -> -- ^ Lam/Pi result match
(a -> b -> c) -> -- ^ Ordinary structural match (Apply components, param type)
(Guid -> Guid -> Bool) -> -- ^ Match ParameterRef's
Body def a -> Body def b -> Maybe (Body def c)
matchBody matchLamResult matchOther matchGetPar body0 body1 =
case body0 of
BodyLam (Lam k0 p0 pt0 r0) -> do
Lam k1 p1 pt1 r1 <- body1 ^? _BodyLam
guard $ k0 == k1
return . BodyLam $
Lam k0 p0 (matchOther pt0 pt1) $
matchLamResult p0 p1 r0 r1
BodyApply (Apply f0 a0) -> do
Apply f1 a1 <- body1 ^? _BodyApply
return . BodyApply $ Apply (matchOther f0 f1) (matchOther a0 a1)
BodyRecord (Record k0 fs0) -> do
Record k1 fs1 <- body1 ^? _BodyRecord
guard $ k0 == k1
BodyRecord . Record k0 <$> ListUtils.match matchPair fs0 fs1
BodyGetField (GetField r0 f0) -> do
GetField r1 f1 <- body1 ^? _BodyGetField
return . BodyGetField $ GetField (matchOther r0 r1) (matchOther f0 f1)
BodyLeaf (GetVariable (ParameterRef p0)) -> do
p1 <- body1 ^? ExprLens.bodyParameterRef
guard $ matchGetPar p0 p1
return $ ExprLens.bodyParameterRef # p0
BodyLeaf x -> do
y <- body1 ^? _BodyLeaf
guard $ x == y
return $ BodyLeaf x
where
matchPair (k0, v0) (k1, v1) =
(matchOther k0 k1, matchOther v0 v1)
-- The returned expression gets the same guids as the left
-- expression
{-# INLINE matchExpression #-}
matchExpression ::
(Eq def, Applicative f) =>
(a -> b -> f c) ->
(Expression def a -> Expression def b -> f (Expression def c)) ->
Expression def a -> Expression def b -> f (Expression def c)
matchExpression = matchExpressionG . const . const $ pure ()
{-# INLINE matchExpressionG #-}
matchExpressionG ::
(Eq def, Applicative f) =>
(Guid -> Guid -> f ()) -> -- ^ Left expr guid overrides right expr guid
(a -> b -> f c) ->
(Expression def a -> Expression def b -> f (Expression def c)) ->
Expression def a -> Expression def b -> f (Expression def c)
matchExpressionG overrideGuids onMatch onMismatch =
go Map.empty
where
go scope e0@(Expression body0 pl0) e1@(Expression body1 pl1) =
case matchBody matchLamResult matchOther matchGetPar body0 body1 of
Nothing ->
onMismatch e0 $
(ExprLens.exprLeaves . ExprLens.parameterRef %~ lookupGuid) e1
Just bodyMatched -> Expression <$> sequenceA bodyMatched <*> onMatch pl0 pl1
where
matchGetPar p0 p1 = p0 == lookupGuid p1
matchLamResult p0 p1 r0 r1 = overrideGuids p0 p1 *> go (Map.insert p1 p0 scope) r0 r1
matchOther = go scope
lookupGuid guid = fromMaybe guid $ Map.lookup guid scope
subExpressions :: Expression def a -> [Expression def a]
subExpressions x =
x : Foldable.concatMap subExpressions (x ^. eBody)
subExpressionsWithout ::
Lens.Traversal' (Expression def (Bool, a)) (Expression def (Bool, a)) ->
Expression def a -> [Expression def a]
subExpressionsWithout group =
map (fmap snd) .
filter (fst . (^. ePayload)) .
subExpressions .
(group . ePayload . Lens._1 .~ False) .
fmap ((,) True)
isDependentPi :: Expression def a -> Bool
isDependentPi =
Lens.has (ExprLens.exprKindedLam KType . Lens.filtered f)
where
f (g, _, resultType) = exprHasGetVar g resultType
parameterRefs :: Lens.Fold (Expression def a) Guid
parameterRefs = Lens.folding subExpressions . ExprLens.exprParameterRef
exprHasGetVar :: Guid -> Expression def a -> Bool
exprHasGetVar g = Lens.anyOf parameterRefs (== g)
curriedFuncArguments :: Expression def a -> [Expression def a]
curriedFuncArguments =
(^.. ExprLens.exprLam . ExprLens.kindedLam KVal . Lens.folding f)
where
f (_, paramType, body) = paramType : curriedFuncArguments body
pureIntegerType :: Expression def ()
pureIntegerType = ExprLens.pureExpr . ExprLens.bodyIntegerType # ()
pureLiteralInteger :: Integer -> Expression def ()
pureLiteralInteger = (ExprLens.pureExpr . ExprLens.bodyLiteralInteger # )
pureApply :: Expression def () -> Expression def () -> Expression def ()
pureApply f x = ExprLens.pureExpr . _BodyApply # Apply f x
pureHole :: Expression def ()
pureHole = ExprLens.pureExpr . ExprLens.bodyHole # ()
pureSet :: Expression def ()
pureSet = ExprLens.pureExpr . ExprLens.bodyType # ()
pureRecord :: Kind -> [(Expression def (), Expression def ())] -> Expression def ()
pureRecord k fields = ExprLens.pureExpr . ExprLens.bodyKindedRecordFields k # fields
pureLam :: Kind -> Guid -> Expression def () -> Expression def () -> Expression def ()
pureLam k paramGuid paramType result =
ExprLens.pureExpr . ExprLens.bodyKindedLam k # (paramGuid, paramType, result)
pureGetField :: Expression def () -> Expression def () -> Expression def ()
pureGetField record field =
ExprLens.pureExpr . _BodyGetField # GetField record field
-- TODO: Deprecate below here:
pureExpression :: Body def (Expression def ()) -> Expression def ()
pureExpression = (ExprLens.pureExpr # )
makeApply :: expr -> expr -> Body def expr
makeApply func arg = BodyApply $ Apply func arg
makeLam :: Kind -> Guid -> expr -> expr -> Body def expr
makeLam k argId argType resultType =
BodyLam $ Lam k argId argType resultType
-- TODO: Remove the kind-passing wrappers
makePi :: Guid -> expr -> expr -> Body def expr
makePi = makeLam KType
makeLambda :: Guid -> expr -> expr -> Body def expr
makeLambda = makeLam KVal
isTypeConstructorType :: Expression def a -> Bool
isTypeConstructorType expr =
case expr ^. eBody of
BodyLeaf Type -> True
BodyLam (Lam KType _ _ res) -> isTypeConstructorType res
_ -> False
-- Show isntances:
showsPrecBody ::
(Show def, Show expr) => (Guid -> expr -> Bool) ->
Int -> Body def expr -> ShowS
showsPrecBody mayDepend prec body =
case body of
BodyLam (Lam KVal paramId paramType result) ->
paren 0 $
showChar '\\' . shows paramId . showChar ':' .
showsPrec 11 paramType . showString "==>" .
shows result
BodyLam (Lam KType paramId paramType resultType) ->
paren 0 $
paramStr . showString "->" . shows resultType
where
paramStr
| dependent =
showString "(" . shows paramId . showString ":" . showsPrec 11 paramType . showString ")"
| otherwise = showsPrec 1 paramType
dependent = mayDepend paramId resultType
BodyApply (Apply func arg) ->
paren 10 $
showsPrec 10 func . showChar ' ' . showsPrec 11 arg
BodyRecord (Record k fields) ->
paren 11 $ showString recStr
where
recStr =
concat ["Rec", recType k, "{", List.intercalate ", " (map showField fields), "}"]
showField (field, typ) =
unwords [show field, sep k, show typ]
sep KVal = "="
sep KType = ":"
recType KVal = "V"
recType KType = "T"
BodyGetField (GetField r tag) ->
paren 8 $ showsPrec 8 r . showChar '.' . showsPrec 9 tag
BodyLeaf leaf -> showsPrec prec leaf
where
paren innerPrec = showParen (prec > innerPrec)
showsPrecBodyExpr :: (Show def, Show a) => Int -> BodyExpr def a -> ShowS
showsPrecBodyExpr = showsPrecBody exprHasGetVar
showBodyExpr :: BodyExpr String String -> String
showBodyExpr = flip (showsPrecBodyExpr 0) ""
instance (Show def, Show expr) => Show (Body def expr) where
showsPrec = showsPrecBody mayDepend
where
-- We are polymorphic on any expr, so we cannot tell...
mayDepend _ _ = True
instance (Show def, Show a) => Show (Expression def a) where
showsPrec prec (Expression body payload) =
showsPrecBodyExpr bodyPrec body .
showString showPayload
where
(bodyPrec, showPayload) =
case show payload of
"" -> (prec, "")
"()" -> (prec, "")
str -> (11, "{" ++ str ++ "}")
addBodyContexts ::
(a -> b) -> Context (Body def a) (Body def b) container ->
Body def (Context a b container)
addBodyContexts tob (Context intoContainer body) =
afterSetter %~ intoContainer $
case body of
BodyLam (Lam k paramId func arg) ->
Lam k paramId
(Context (flip (Lam k paramId) (tob arg)) func)
(Context (Lam k paramId (tob func)) arg)
& BodyLam
& afterSetter %~ BodyLam
BodyApply (Apply func arg) ->
Apply
(Context (`Apply` tob arg) func)
(Context (tob func `Apply`) arg)
& BodyApply
& afterSetter %~ BodyApply
BodyRecord (Record k fields) ->
(Record k .
map (addTuple2Contexts tob) .
addListContexts (tob *** tob))
(Context (Record k) fields)
& BodyRecord
& afterSetter %~ BodyRecord
BodyGetField (GetField record tag) ->
GetField
(Context (`GetField` tob tag) record)
(Context (tob record `GetField`) tag)
& BodyGetField
& afterSetter %~ BodyGetField
BodyLeaf leaf -> BodyLeaf leaf
where
afterSetter = Lens.mapped . Lens.mapped
addExpressionContexts ::
(a -> b) ->
Context (Expression def a) (Expression def b) container ->
Expression def (Context a (Expression def b) container)
addExpressionContexts atob (Context intoContainer (Expression body a)) =
Expression newBody (Context intoContainer a)
where
newBody =
addExpressionContexts atob <$>
addBodyContexts (fmap atob) bodyPtr
bodyPtr =
Context (intoContainer . (`Expression` atob a)) body
| MatiasNAmendola/lamdu | Lamdu/Data/Expression/Utils.hs | gpl-3.0 | 17,954 | 0 | 20 | 3,964 | 6,116 | 3,127 | 2,989 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
module Slidecoding.Browser
( browse
) where
import Slidecoding.Types
import Prelude hiding (until)
import qualified Language.Haskell.GhcMod as GM (GhcModT, browse)
import qualified Language.Haskell.GhcMod.Monad as GMM (runGmOutT, runGhcModT', withGhcModEnv)
import Language.Haskell.GhcMod.Logging (gmSetLogLevel, gmAppendLogQuiet)
import Language.Haskell.GhcMod.Types (GhcModLog, GhcModError, IOish, Options(..), OutputOpts(..), BrowseOpts(..), defaultOptions, defaultBrowseOpts, defaultGhcModState)
import Codec.Binary.Base64.String as B64 (encode)
import Control.Arrow (first)
import Control.Monad.IO.Class (liftIO)
import Data.Char (isSpace, isUpper)
import Data.List (find, intercalate, isPrefixOf)
import Data.Maybe (fromMaybe)
import System.Directory (canonicalizePath)
import System.FilePath ((</>), (<.>))
browse :: Module -> IO Description
browse m = withSymbol <$> browseSignatures m >>= loadDescription m
type Interface = (Symbol, Signature)
withSymbol :: [Signature] -> [Interface]
withSymbol = map decorate
where decorate (Signature sig) = (symbol sig, Signature sig)
symbol = Symbol . unwords . takeWhile (/= "::") . words
loadDescription :: Module -> [Interface] -> IO Description
loadDescription m interface = Description m <$> loadItems m interface
loadItems :: Module -> [Interface] -> IO [Item]
loadItems m interface = loadAll <$> readModule m
where loadAll content = map (loadItem content) interface
loadItem :: [String] -> Interface -> Item
loadItem content (s, sig) = Item s sig . Source . base64 . intercalate "\n" $ scopeTo s content
base64 :: String -> String
base64 = filter (not . isSpace) . B64.encode
browseSignatures :: Module -> IO [Signature]
browseSignatures (Module wd m) = map Signature . lines <$> runGhcMod wd cmd
where cmd = GM.browse opts m
opts = defaultBrowseOpts { optBrowseOperators = True, optBrowseDetailed = True }
scopeTo :: Symbol -> [String] -> [String]
scopeTo (Symbol s) ls | isType s = firstNonEmpty [limitToPrefix ("data " ++ s) ls, limitToPrefix ("class " ++ s) ls]
| otherwise = limitToPrefix s ls
firstNonEmpty :: [[String]] -> [String]
firstNonEmpty = fromMaybe [] . find (not.null)
isType :: String -> Bool
isType = isUpper . head
limitToPrefix :: String -> [String] -> [String]
limitToPrefix prefix = until (anyOf [empty, otherPrefix]) . from (isPrefixOf prefix')
where prefix' = prefix ++ " "
otherPrefix = allOf [not . isSpace . head, not . isPrefixOf prefix']
from criteria = dropWhile (not.criteria)
until criteria = takeWhile (not.criteria)
anyOf ps x = any ($x) ps
allOf ps x = all ($x) ps
empty :: String -> Bool
empty = not . any (not.isSpace)
readModule :: Module -> IO [String]
readModule = load . modulePath
load :: FilePath -> IO [String]
load p = lines <$> readFile p
modulePath :: Module -> FilePath
modulePath (Module wd m) = wd </> "src" </> map dotToPath m <.> "hs"
where dotToPath '.' = '/'
dotToPath c = c
runGhcMod :: FilePath -> GM.GhcModT IO String -> IO String
runGhcMod wd = fmap extract . runGhcModT wd defaultOptions
where extract = either (const "") id . fst
runGhcModT :: IOish m => FilePath -> Options -> GM.GhcModT m a -> m (Either GhcModError a, GhcModLog)
runGhcModT wd opt action = liftIO (canonicalizePath wd) >>= \dir' ->
GMM.runGmOutT opt $
GMM.withGhcModEnv dir' opt $ \(env,lg) ->
first (fst <$>) <$> GMM.runGhcModT' env defaultGhcModState (do
gmSetLogLevel (ooptLogLevel $ optOutput opt)
gmAppendLogQuiet lg
action)
| ptitfred/slidecoding | src/Slidecoding/Browser.hs | gpl-3.0 | 3,706 | 0 | 17 | 763 | 1,295 | 695 | 600 | 73 | 2 |
{-# LANGUAGE TupleSections, OverloadedStrings #-}
module Handler.Construct where
import Import
-- This is a handler function for the GET request method on the HomeR
-- resource pattern. All of your resource patterns are defined in
-- config/routes
--
-- The majority of the code you will write in Yesod lives in these handler
-- functions. You can spread them across multiple files if you are so
-- inclined, or create a single monolithic file.
getConstructR :: Handler RepHtml
getConstructR = do
u <- requireAuth
{-
(formWidget, formEnctype) <- generateFormPost sampleForm
let submission = Nothing :: Maybe (FileInfo, Text)
handlerName = "getHomeR" :: Text
-}
defaultLayout $ do
aDomId <- lift newIdent
setTitle "Welcome To Yesod!"
let rows = 15 :: Int
cols = 15 :: Int
$(widgetFile "grid")
{-
postGridR :: Handler RepHtml
postGridR = do
((result, formWidget), formEnctype) <- runFormPost sampleForm
let handlerName = "postHomeR" :: Text
submission = case result of
FormSuccess res -> Just res
_ -> Nothing
defaultLayout $ do
aDomId <- lift newIdent
setTitle "Welcome To Yesod!"
$(widgetFile "homepage")
-}
| ajdunlap/cruzo | Handler/Construct.hs | gpl-3.0 | 1,252 | 0 | 12 | 319 | 92 | 50 | 42 | -1 | -1 |
-- A simple game
-- The game is played on a board consisting of a row of places. An empty
-- place is indicated by a 0, a place with a piece by a positive integer. The
-- allowable moves for a piece of value n are exactly n steps in either direction.
-- Operations on the ADT: lift a piece from one place; drop it in another; compute the score.
-- The definitions for these operations are split up into two groups: the first check the
-- validity of the arguments, the second implement the operation.
module RowGameLib(Position(), posStyle, initPos, checkLift, checkDrop, liftIt, dropIt, score) where {
import Styles; -- Needed for the style "list bt5" used in this module
import Char; -- Needed for the "intToDigit" function used in computing the score
data Position = Position {state :: Maybe Int, view :: [Int]};
initPos = Position Nothing [3, 0, 0, 2, 1, 0, 0, 3, 1, 0];
-- These two functions check and report on the validity of the pre-conditions for an operation
checkLift :: Int -> Position -> Either String String;
checkDrop :: Int -> Position -> Either String String;
checkLift i pos =
case pos of {
Position Nothing xs ->
if (i < 0) || (i >= length xs) then
Left "Index is out of bounds!"
else if (xs !! i) == 0 then
Left "Cannot pick up a piece of value 0!"
else
Right "OK";
Position (Just _) xs ->
Left "A piece has already been picked up!"
};
checkDrop i pos =
case pos of {
Position Nothing xs ->
Left "No piece has been picked up!";
Position (Just (j,n)) xs ->
if (i < 0) || (i >= length xs) then
Left "Index is out of bounds!"
else if (xs !! i) /= 0 then
Left "Cannot drop a piece in a non-empty place!"
else if (i /= (j-n)) && (i /= j) && (i /= (j+n)) then
Left "Not allowed to drop it here!"
else
Right "OK"
};
-- These two functions actually carry out the operations (assumed to be valid)
liftIt :: Int -> Position -> Position;
dropIt :: Int -> Position -> Position;
liftIt i pos =
case pos of {
Position Nothing xs ->
let { n = xs !! i;
state' = Just (i,n);
xs' = subst i 0 xs
} in Position state' xs';
Position (Just _) xs ->
error "Game.liftIt"
};
dropIt i pos =
case pos of {
Position Nothing xs ->
error "Game.dropIt";
Position (Just (j,n)) xs ->
let { state' = Nothing;
xs' = subst i n xs
} in Position state' xs'
};
-- The score of the game is computed by treating the position as a decimal numeral
score :: Position -> String;
score pos =
case pos of {
Position Nothing xs -> let digits = map intToDigit (dropWhile (== 0) xs)
in "The score is: " ++ digits;
Position (Just _) _ -> "Cannot compute the score when you have a piece not on the board!"
};
-- SUBSIDIARY FUNCTIONS -------------------------------------------------------------
-- Replace the i-th element of a list (0-based indexing) with the given element
subst :: Int -> a -> [a] -> [a];
subst i x xs = if i == 0 then
x : tail xs
else
head xs : subst (i-1) x (tail xs);
-- Define an instance of Show for Position (useful for debugging)
instance Show Position where {
shows pos s =
case pos of {
Position Nothing xs -> "Position Nothing " ++ shows xs s;
Position (Just (i,n)) xs -> "Position (Just (" ++ shows i ("," ++ shows n (")) " ++ shows xs s))
};
show pos = shows pos ""
};
-- Definition of a style for Position
posStyle = Data [vStyle];
vStyle = VJuxta cStyle [fStyle1, fStyle2] True NoFormat;
cStyle = CText cBox cFmt;
cBox = Box 480 25 lightGray green;
cFmt = Format SansSerif 16 red True False;
fStyle1 = FHide;
fStyle2 = FLink NoBox fLink (list box4 fmt5);
fLink = Link (-380, 5) True trans cBox
}
| ckaestne/CIDE | CIDE_Language_Haskell/test/fromviral/RowGameLib.hs | gpl-3.0 | 4,246 | 0 | 16 | 1,430 | 1,063 | 585 | 478 | 74 | 5 |
module ATP.Util.Print.Print
( Print(..)
, GenPrint(..)
, prettyShow
, paren
, commas
, list
, listVert
, listHoriz
, tuple
, tupleVert
, tupleHoriz
, set
, setVert
, setHoriz
, putStrLn
, dot
)
where
import Prelude hiding (putStrLn)
import qualified System.IO.UTF8 as S
import qualified Data.Map as Map
import Data.Map (Map)
import qualified Data.Set as Set
import Data.Set (Set)
import qualified Text.PrettyPrint.HughesPJ as PP
import Text.PrettyPrint.HughesPJ (Doc, (<+>))
import qualified Data.Ratio as Ratio
dot :: PP.Doc
dot = PP.text "."
paren :: Bool -> PP.Doc -> PP.Doc
paren False = id
paren True = PP.parens
commas :: [PP.Doc] -> [PP.Doc]
commas = PP.punctuate (PP.text ", ")
list :: [PP.Doc] -> PP.Doc
list = PP.brackets . PP.fcat . commas
listVert :: [PP.Doc] -> PP.Doc
listVert = PP.brackets . PP.vcat . commas
listHoriz :: [PP.Doc] -> PP.Doc
listHoriz = PP.brackets . PP.hcat . commas
tuple :: [PP.Doc] -> PP.Doc
tuple = PP.parens . PP.fcat . commas
tupleVert :: [PP.Doc] -> PP.Doc
tupleVert = PP.parens . PP.vcat . commas
tupleHoriz :: [PP.Doc] -> PP.Doc
tupleHoriz = PP.parens . PP.hcat . commas
set :: [PP.Doc] -> PP.Doc
set = PP.braces . PP.fcat . commas
setVert :: [PP.Doc] -> PP.Doc
setVert = PP.braces . PP.vcat . commas
setHoriz :: [PP.Doc] -> PP.Doc
setHoriz = PP.braces . PP.hcat . commas
class Print a where
pPrintPrec :: PrettyLevel -> Rational -> a -> Doc
pPrint :: a -> Doc
pPrintList :: PrettyLevel -> [a] -> Doc
pPrintPrec _ _ = pPrint
pPrint = pPrintPrec prettyNormal 0
pPrintList l = PP.brackets . PP.fsep . PP.punctuate PP.comma
. map (pPrintPrec l 0)
newtype PrettyLevel = PrettyLevel Int
deriving (Eq, Ord, Show)
prettyNormal :: PrettyLevel
prettyNormal = PrettyLevel 0
prettyShow :: Print a => a -> String
prettyShow = PP.render . pPrint
pPrint0 :: Print a => PrettyLevel -> a -> Doc
pPrint0 l = pPrintPrec l 0
appPrec :: Rational
appPrec = 10
putStrLn :: PP.Doc -> IO ()
putStrLn = S.putStrLn . PP.render
instance Print Int where
pPrint = PP.int
instance Print Integer where
pPrint = PP.integer
instance Print Float where
pPrint = PP.float
instance Print Double where
pPrint = PP.double
instance Print (a -> b) where
pPrint _ = PP.text "<fun>"
instance Print () where
pPrint _ = PP.text "()"
instance Print Bool where
pPrint = PP.text . show
instance Print Ordering where
pPrint = PP.text . show
instance Print Char where
pPrint = PP.char
pPrintList _ = PP.text . show
instance Print Rational where
pPrint n = if Ratio.denominator n == 1 then pPrint $ Ratio.numerator n
else PP.text $ show n
instance (Print a) => Print (Maybe a) where
pPrintPrec _ _ Nothing = PP.text "Nothing"
pPrintPrec l p (Just x) = paren (p > appPrec) $ PP.text "Just" <+> pPrintPrec l (appPrec+1) x
instance (Print a, Print b) => Print (Either a b) where
pPrintPrec l p (Left x) = paren (p > appPrec) $ PP.text "Left" <+> pPrintPrec l (appPrec+1) x
pPrintPrec l p (Right x) = paren (p > appPrec) $ PP.text "Right" <+> pPrintPrec l (appPrec+1) x
instance (Print a) => Print [a] where
pPrintPrec l _ = pPrintList l
instance (Print a, Print b) => Print (a, b) where
pPrintPrec l _ (a, b) =
PP.parens $ PP.fsep $ PP.punctuate PP.comma [pPrint0 l a, pPrint0 l b]
instance (Print a, Print b, Print c) => Print (a, b, c) where
pPrintPrec l _ (a, b, c) =
PP.parens $ PP.fsep $ PP.punctuate PP.comma [pPrint0 l a, pPrint0 l b, pPrint0 l c]
instance (Print a, Print b, Print c, Print d) => Print (a, b, c, d) where
pPrintPrec l _ (a, b, c, d) =
PP.parens $ PP.fsep $ PP.punctuate PP.comma [pPrint0 l a, pPrint0 l b, pPrint0 l c, pPrint0 l d]
instance (Print a, Print b, Print c, Print d, Print e) => Print (a, b, c, d, e) where
pPrintPrec l _ (a, b, c, d, e) =
PP.parens $ PP.fsep $ PP.punctuate PP.comma [pPrint0 l a, pPrint0 l b, pPrint0 l c, pPrint0 l d, pPrint0 l e]
instance (Print a, Print b, Print c, Print d, Print e, Print f) => Print (a, b, c, d, e, f) where
pPrintPrec l _ (a, b, c, d, e, f) =
PP.parens $ PP.fsep $ PP.punctuate PP.comma [pPrint0 l a, pPrint0 l b, pPrint0 l c, pPrint0 l d, pPrint0 l e, pPrint0 l f]
instance (Print a, Print b, Print c, Print d, Print e, Print f, Print g) =>
Print (a, b, c, d, e, f, g) where
pPrintPrec l _ (a, b, c, d, e, f, g) =
PP.parens $ PP.fsep $ PP.punctuate PP.comma [pPrint0 l a, pPrint0 l b, pPrint0 l c, pPrint0 l d, pPrint0 l e, pPrint0 l f, pPrint0 l g]
instance (Print a, Print b, Print c, Print d, Print e, Print f, Print g, Print h) =>
Print (a, b, c, d, e, f, g, h) where
pPrintPrec l _ (a, b, c, d, e, f, g, h) =
PP.parens $ PP.fsep $ PP.punctuate PP.comma [pPrint0 l a, pPrint0 l b, pPrint0 l c, pPrint0 l d, pPrint0 l e, pPrint0 l f, pPrint0 l g, pPrint0 l h]
instance Print a => Print (Set a) where
pPrint = setHoriz . map pPrint . Set.toList
instance (Print a, Print b) => Print (Map a b) where
pPrint = setHoriz . map pPrint . Map.toList
-- This is needed for proper unicode printing.
instance Print [String] where
pPrint = listHoriz . map PP.text
class GenPrint s a where
pPrintEnv :: s -> a -> Doc
instance Print a => GenPrint () a where
pPrintEnv _ = pPrint
| andre-artus/handbook-of-practical-logic-and-automated-reasoning-haskell | src/ATP/Util/Print/Print.hs | gpl-3.0 | 5,332 | 0 | 10 | 1,242 | 2,458 | 1,316 | 1,142 | -1 | -1 |
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TemplateHaskell #-}
module Selector
( Selector
-- * Constructor
, makeSelector
-- * Lenses
, selected
, nonSelected
-- * Query
, hasSelected
-- * Operations
, select
, selectAll
, unselectAll
)
where
import Data.Foldable (for_)
import Lens.Micro
import Lens.Micro.Extras (view)
import Lens.Micro.TH
data Selector a = Selector { _selected :: [a]
, _nonSelected :: [a]
}
deriving (Foldable, Functor, Show, Traversable)
makeLenses ''Selector
makeSelector :: [a] -> Selector a
makeSelector elements = Selector { _selected = [], _nonSelected = elements }
hasSelected :: Selector a -> Bool
hasSelected = not . null . _selected
unselectAll :: Selector a -> Selector a
unselectAll s =
s & nonSelected %~ (view selected s ++)
& selected .~ []
selectAll :: Selector a -> Selector a
selectAll s =
s & selected %~ (view nonSelected s ++)
& nonSelected .~ []
-- | Selects/unselects part for given index.
select :: Int -> Selector a -> Selector a
select i s
| i < 0 = s
| i < n =
moveElement i selected nonSelected s
| otherwise =
moveElement (i-n) nonSelected selected s
where
n = length (view selected s)
-- For some reason, I need Rank2Types for this.
moveElement :: Int -> Lens' (Selector a) [a] -> Lens' (Selector a) [a] -> Selector a -> Selector a
moveElement i from to s =
s & from .~ rest
& to %~ (p ++)
where
(p, rest) = pick i (view from s)
--------------------------------------------------------------------------------
pick :: Int -> [a] -> ([a],[a])
pick i xs = (x, before ++ after)
where
(before,r) = splitAt i xs
(x, after) = splitAt 1 r
| holmisen/glbrix | src/Selector.hs | gpl-3.0 | 1,817 | 0 | 10 | 455 | 596 | 322 | 274 | 50 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.MachineLearning.Types.Product
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.MachineLearning.Types.Product where
import Network.Google.MachineLearning.Types.Sum
import Network.Google.Prelude
-- | Request message for the SetDefaultVersion request.
--
-- /See:/ 'googleCloudMlV1beta1__SetDefaultVersionRequest' smart constructor.
data GoogleCloudMlV1beta1__SetDefaultVersionRequest =
GoogleCloudMlV1beta1__SetDefaultVersionRequest'
deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__SetDefaultVersionRequest' with the minimum fields required to make a request.
--
googleCloudMlV1beta1__SetDefaultVersionRequest
:: GoogleCloudMlV1beta1__SetDefaultVersionRequest
googleCloudMlV1beta1__SetDefaultVersionRequest =
GoogleCloudMlV1beta1__SetDefaultVersionRequest'
instance FromJSON
GoogleCloudMlV1beta1__SetDefaultVersionRequest where
parseJSON
= withObject
"GoogleCloudMlV1beta1SetDefaultVersionRequest"
(\ o ->
pure GoogleCloudMlV1beta1__SetDefaultVersionRequest')
instance ToJSON
GoogleCloudMlV1beta1__SetDefaultVersionRequest where
toJSON = const emptyObject
-- | Represents a version of the model. Each version is a trained model
-- deployed in the cloud, ready to handle prediction requests. A model can
-- have multiple versions. You can get information about all of the
-- versions of a given model by calling
-- [projects.models.versions.list](\/ml\/reference\/rest\/v1beta1\/projects.models.versions\/list).
--
-- /See:/ 'googleCloudMlV1beta1__Version' smart constructor.
data GoogleCloudMlV1beta1__Version = GoogleCloudMlV1beta1__Version'
{ _gcmvvRuntimeVersion :: !(Maybe Text)
, _gcmvvLastUseTime :: !(Maybe DateTime')
, _gcmvvName :: !(Maybe Text)
, _gcmvvDeploymentURI :: !(Maybe Text)
, _gcmvvDescription :: !(Maybe Text)
, _gcmvvCreateTime :: !(Maybe DateTime')
, _gcmvvOnlinePredictionLogging :: !(Maybe Bool)
, _gcmvvIsDefault :: !(Maybe Bool)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__Version' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvvRuntimeVersion'
--
-- * 'gcmvvLastUseTime'
--
-- * 'gcmvvName'
--
-- * 'gcmvvDeploymentURI'
--
-- * 'gcmvvDescription'
--
-- * 'gcmvvCreateTime'
--
-- * 'gcmvvOnlinePredictionLogging'
--
-- * 'gcmvvIsDefault'
googleCloudMlV1beta1__Version
:: GoogleCloudMlV1beta1__Version
googleCloudMlV1beta1__Version =
GoogleCloudMlV1beta1__Version'
{ _gcmvvRuntimeVersion = Nothing
, _gcmvvLastUseTime = Nothing
, _gcmvvName = Nothing
, _gcmvvDeploymentURI = Nothing
, _gcmvvDescription = Nothing
, _gcmvvCreateTime = Nothing
, _gcmvvOnlinePredictionLogging = Nothing
, _gcmvvIsDefault = Nothing
}
-- | Optional. The Google Cloud ML runtime version to use for this
-- deployment. If not set, Google Cloud ML will choose a version.
gcmvvRuntimeVersion :: Lens' GoogleCloudMlV1beta1__Version (Maybe Text)
gcmvvRuntimeVersion
= lens _gcmvvRuntimeVersion
(\ s a -> s{_gcmvvRuntimeVersion = a})
-- | Output only. The time the version was last used for prediction.
gcmvvLastUseTime :: Lens' GoogleCloudMlV1beta1__Version (Maybe UTCTime)
gcmvvLastUseTime
= lens _gcmvvLastUseTime
(\ s a -> s{_gcmvvLastUseTime = a})
. mapping _DateTime
-- | Required.The name specified for the version when it was created. The
-- version name must be unique within the model it is created in.
gcmvvName :: Lens' GoogleCloudMlV1beta1__Version (Maybe Text)
gcmvvName
= lens _gcmvvName (\ s a -> s{_gcmvvName = a})
-- | Required. The Google Cloud Storage location of the trained model used to
-- create the version. See the [overview of model
-- deployment](\/ml\/docs\/concepts\/deployment-overview) for more
-- informaiton. When passing Version to
-- [projects.models.versions.create](\/ml\/reference\/rest\/v1beta1\/projects.models.versions\/create)
-- the model service uses the specified location as the source of the
-- model. Once deployed, the model version is hosted by the prediction
-- service, so this location is useful only as a historical record.
gcmvvDeploymentURI :: Lens' GoogleCloudMlV1beta1__Version (Maybe Text)
gcmvvDeploymentURI
= lens _gcmvvDeploymentURI
(\ s a -> s{_gcmvvDeploymentURI = a})
-- | Optional. The description specified for the version when it was created.
gcmvvDescription :: Lens' GoogleCloudMlV1beta1__Version (Maybe Text)
gcmvvDescription
= lens _gcmvvDescription
(\ s a -> s{_gcmvvDescription = a})
-- | Output only. The time the version was created.
gcmvvCreateTime :: Lens' GoogleCloudMlV1beta1__Version (Maybe UTCTime)
gcmvvCreateTime
= lens _gcmvvCreateTime
(\ s a -> s{_gcmvvCreateTime = a})
. mapping _DateTime
-- | Optional. If true, enables StackDriver Logging for online prediction.
-- Default is false.
gcmvvOnlinePredictionLogging :: Lens' GoogleCloudMlV1beta1__Version (Maybe Bool)
gcmvvOnlinePredictionLogging
= lens _gcmvvOnlinePredictionLogging
(\ s a -> s{_gcmvvOnlinePredictionLogging = a})
-- | Output only. If true, this version will be used to handle prediction
-- requests that do not specify a version. You can change the default
-- version by calling
-- [projects.methods.versions.setDefault](\/ml\/reference\/rest\/v1beta1\/projects.models.versions\/setDefault).
gcmvvIsDefault :: Lens' GoogleCloudMlV1beta1__Version (Maybe Bool)
gcmvvIsDefault
= lens _gcmvvIsDefault
(\ s a -> s{_gcmvvIsDefault = a})
instance FromJSON GoogleCloudMlV1beta1__Version where
parseJSON
= withObject "GoogleCloudMlV1beta1Version"
(\ o ->
GoogleCloudMlV1beta1__Version' <$>
(o .:? "runtimeVersion") <*> (o .:? "lastUseTime")
<*> (o .:? "name")
<*> (o .:? "deploymentUri")
<*> (o .:? "description")
<*> (o .:? "createTime")
<*> (o .:? "onlinePredictionLogging")
<*> (o .:? "isDefault"))
instance ToJSON GoogleCloudMlV1beta1__Version where
toJSON GoogleCloudMlV1beta1__Version'{..}
= object
(catMaybes
[("runtimeVersion" .=) <$> _gcmvvRuntimeVersion,
("lastUseTime" .=) <$> _gcmvvLastUseTime,
("name" .=) <$> _gcmvvName,
("deploymentUri" .=) <$> _gcmvvDeploymentURI,
("description" .=) <$> _gcmvvDescription,
("createTime" .=) <$> _gcmvvCreateTime,
("onlinePredictionLogging" .=) <$>
_gcmvvOnlinePredictionLogging,
("isDefault" .=) <$> _gcmvvIsDefault])
-- | Represents a set of hyperparameters to optimize.
--
-- /See:/ 'googleCloudMlV1beta1__HyperparameterSpec' smart constructor.
data GoogleCloudMlV1beta1__HyperparameterSpec = GoogleCloudMlV1beta1__HyperparameterSpec'
{ _gcmvhsParams :: !(Maybe [GoogleCloudMlV1beta1__ParameterSpec])
, _gcmvhsGoal :: !(Maybe GoogleCloudMlV1beta1__HyperparameterSpecGoal)
, _gcmvhsMaxTrials :: !(Maybe (Textual Int32))
, _gcmvhsMaxParallelTrials :: !(Maybe (Textual Int32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__HyperparameterSpec' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvhsParams'
--
-- * 'gcmvhsGoal'
--
-- * 'gcmvhsMaxTrials'
--
-- * 'gcmvhsMaxParallelTrials'
googleCloudMlV1beta1__HyperparameterSpec
:: GoogleCloudMlV1beta1__HyperparameterSpec
googleCloudMlV1beta1__HyperparameterSpec =
GoogleCloudMlV1beta1__HyperparameterSpec'
{ _gcmvhsParams = Nothing
, _gcmvhsGoal = Nothing
, _gcmvhsMaxTrials = Nothing
, _gcmvhsMaxParallelTrials = Nothing
}
-- | Required. The set of parameters to tune.
gcmvhsParams :: Lens' GoogleCloudMlV1beta1__HyperparameterSpec [GoogleCloudMlV1beta1__ParameterSpec]
gcmvhsParams
= lens _gcmvhsParams (\ s a -> s{_gcmvhsParams = a})
. _Default
. _Coerce
-- | Required. The type of goal to use for tuning. Available types are
-- \`MAXIMIZE\` and \`MINIMIZE\`. Defaults to \`MAXIMIZE\`.
gcmvhsGoal :: Lens' GoogleCloudMlV1beta1__HyperparameterSpec (Maybe GoogleCloudMlV1beta1__HyperparameterSpecGoal)
gcmvhsGoal
= lens _gcmvhsGoal (\ s a -> s{_gcmvhsGoal = a})
-- | Optional. How many training trials should be attempted to optimize the
-- specified hyperparameters. Defaults to one.
gcmvhsMaxTrials :: Lens' GoogleCloudMlV1beta1__HyperparameterSpec (Maybe Int32)
gcmvhsMaxTrials
= lens _gcmvhsMaxTrials
(\ s a -> s{_gcmvhsMaxTrials = a})
. mapping _Coerce
-- | Optional. The number of training trials to run concurrently. You can
-- reduce the time it takes to perform hyperparameter tuning by adding
-- trials in parallel. However, each trail only benefits from the
-- information gained in completed trials. That means that a trial does not
-- get access to the results of trials running at the same time, which
-- could reduce the quality of the overall optimization. Each trial will
-- use the same scale tier and machine types. Defaults to one.
gcmvhsMaxParallelTrials :: Lens' GoogleCloudMlV1beta1__HyperparameterSpec (Maybe Int32)
gcmvhsMaxParallelTrials
= lens _gcmvhsMaxParallelTrials
(\ s a -> s{_gcmvhsMaxParallelTrials = a})
. mapping _Coerce
instance FromJSON
GoogleCloudMlV1beta1__HyperparameterSpec where
parseJSON
= withObject "GoogleCloudMlV1beta1HyperparameterSpec"
(\ o ->
GoogleCloudMlV1beta1__HyperparameterSpec' <$>
(o .:? "params" .!= mempty) <*> (o .:? "goal") <*>
(o .:? "maxTrials")
<*> (o .:? "maxParallelTrials"))
instance ToJSON
GoogleCloudMlV1beta1__HyperparameterSpec where
toJSON GoogleCloudMlV1beta1__HyperparameterSpec'{..}
= object
(catMaybes
[("params" .=) <$> _gcmvhsParams,
("goal" .=) <$> _gcmvhsGoal,
("maxTrials" .=) <$> _gcmvhsMaxTrials,
("maxParallelTrials" .=) <$>
_gcmvhsMaxParallelTrials])
-- | Returns service account information associated with a project.
--
-- /See:/ 'googleCloudMlV1beta1__GetConfigResponse' smart constructor.
data GoogleCloudMlV1beta1__GetConfigResponse = GoogleCloudMlV1beta1__GetConfigResponse'
{ _gcmvgcrServiceAccount :: !(Maybe Text)
, _gcmvgcrServiceAccountProject :: !(Maybe (Textual Int64))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__GetConfigResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvgcrServiceAccount'
--
-- * 'gcmvgcrServiceAccountProject'
googleCloudMlV1beta1__GetConfigResponse
:: GoogleCloudMlV1beta1__GetConfigResponse
googleCloudMlV1beta1__GetConfigResponse =
GoogleCloudMlV1beta1__GetConfigResponse'
{ _gcmvgcrServiceAccount = Nothing
, _gcmvgcrServiceAccountProject = Nothing
}
-- | The service account Cloud ML uses to access resources in the project.
gcmvgcrServiceAccount :: Lens' GoogleCloudMlV1beta1__GetConfigResponse (Maybe Text)
gcmvgcrServiceAccount
= lens _gcmvgcrServiceAccount
(\ s a -> s{_gcmvgcrServiceAccount = a})
-- | The project number for \`service_account\`.
gcmvgcrServiceAccountProject :: Lens' GoogleCloudMlV1beta1__GetConfigResponse (Maybe Int64)
gcmvgcrServiceAccountProject
= lens _gcmvgcrServiceAccountProject
(\ s a -> s{_gcmvgcrServiceAccountProject = a})
. mapping _Coerce
instance FromJSON
GoogleCloudMlV1beta1__GetConfigResponse where
parseJSON
= withObject "GoogleCloudMlV1beta1GetConfigResponse"
(\ o ->
GoogleCloudMlV1beta1__GetConfigResponse' <$>
(o .:? "serviceAccount") <*>
(o .:? "serviceAccountProject"))
instance ToJSON
GoogleCloudMlV1beta1__GetConfigResponse where
toJSON GoogleCloudMlV1beta1__GetConfigResponse'{..}
= object
(catMaybes
[("serviceAccount" .=) <$> _gcmvgcrServiceAccount,
("serviceAccountProject" .=) <$>
_gcmvgcrServiceAccountProject])
-- | Represents a training or prediction job.
--
-- /See:/ 'googleCloudMlV1beta1__Job' smart constructor.
data GoogleCloudMlV1beta1__Job = GoogleCloudMlV1beta1__Job'
{ _gcmvjState :: !(Maybe GoogleCloudMlV1beta1__JobState)
, _gcmvjTrainingOutput :: !(Maybe GoogleCloudMlV1beta1__TrainingOutput)
, _gcmvjJobId :: !(Maybe Text)
, _gcmvjStartTime :: !(Maybe DateTime')
, _gcmvjPredictionInput :: !(Maybe GoogleCloudMlV1beta1__PredictionInput)
, _gcmvjEndTime :: !(Maybe DateTime')
, _gcmvjPredictionOutput :: !(Maybe GoogleCloudMlV1beta1__PredictionOutput)
, _gcmvjErrorMessage :: !(Maybe Text)
, _gcmvjTrainingInput :: !(Maybe GoogleCloudMlV1beta1__TrainingInput)
, _gcmvjCreateTime :: !(Maybe DateTime')
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__Job' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvjState'
--
-- * 'gcmvjTrainingOutput'
--
-- * 'gcmvjJobId'
--
-- * 'gcmvjStartTime'
--
-- * 'gcmvjPredictionInput'
--
-- * 'gcmvjEndTime'
--
-- * 'gcmvjPredictionOutput'
--
-- * 'gcmvjErrorMessage'
--
-- * 'gcmvjTrainingInput'
--
-- * 'gcmvjCreateTime'
googleCloudMlV1beta1__Job
:: GoogleCloudMlV1beta1__Job
googleCloudMlV1beta1__Job =
GoogleCloudMlV1beta1__Job'
{ _gcmvjState = Nothing
, _gcmvjTrainingOutput = Nothing
, _gcmvjJobId = Nothing
, _gcmvjStartTime = Nothing
, _gcmvjPredictionInput = Nothing
, _gcmvjEndTime = Nothing
, _gcmvjPredictionOutput = Nothing
, _gcmvjErrorMessage = Nothing
, _gcmvjTrainingInput = Nothing
, _gcmvjCreateTime = Nothing
}
-- | Output only. The detailed state of a job.
gcmvjState :: Lens' GoogleCloudMlV1beta1__Job (Maybe GoogleCloudMlV1beta1__JobState)
gcmvjState
= lens _gcmvjState (\ s a -> s{_gcmvjState = a})
-- | The current training job result.
gcmvjTrainingOutput :: Lens' GoogleCloudMlV1beta1__Job (Maybe GoogleCloudMlV1beta1__TrainingOutput)
gcmvjTrainingOutput
= lens _gcmvjTrainingOutput
(\ s a -> s{_gcmvjTrainingOutput = a})
-- | Required. The user-specified id of the job.
gcmvjJobId :: Lens' GoogleCloudMlV1beta1__Job (Maybe Text)
gcmvjJobId
= lens _gcmvjJobId (\ s a -> s{_gcmvjJobId = a})
-- | Output only. When the job processing was started.
gcmvjStartTime :: Lens' GoogleCloudMlV1beta1__Job (Maybe UTCTime)
gcmvjStartTime
= lens _gcmvjStartTime
(\ s a -> s{_gcmvjStartTime = a})
. mapping _DateTime
-- | Input parameters to create a prediction job.
gcmvjPredictionInput :: Lens' GoogleCloudMlV1beta1__Job (Maybe GoogleCloudMlV1beta1__PredictionInput)
gcmvjPredictionInput
= lens _gcmvjPredictionInput
(\ s a -> s{_gcmvjPredictionInput = a})
-- | Output only. When the job processing was completed.
gcmvjEndTime :: Lens' GoogleCloudMlV1beta1__Job (Maybe UTCTime)
gcmvjEndTime
= lens _gcmvjEndTime (\ s a -> s{_gcmvjEndTime = a})
. mapping _DateTime
-- | The current prediction job result.
gcmvjPredictionOutput :: Lens' GoogleCloudMlV1beta1__Job (Maybe GoogleCloudMlV1beta1__PredictionOutput)
gcmvjPredictionOutput
= lens _gcmvjPredictionOutput
(\ s a -> s{_gcmvjPredictionOutput = a})
-- | Output only. The details of a failure or a cancellation.
gcmvjErrorMessage :: Lens' GoogleCloudMlV1beta1__Job (Maybe Text)
gcmvjErrorMessage
= lens _gcmvjErrorMessage
(\ s a -> s{_gcmvjErrorMessage = a})
-- | Input parameters to create a training job.
gcmvjTrainingInput :: Lens' GoogleCloudMlV1beta1__Job (Maybe GoogleCloudMlV1beta1__TrainingInput)
gcmvjTrainingInput
= lens _gcmvjTrainingInput
(\ s a -> s{_gcmvjTrainingInput = a})
-- | Output only. When the job was created.
gcmvjCreateTime :: Lens' GoogleCloudMlV1beta1__Job (Maybe UTCTime)
gcmvjCreateTime
= lens _gcmvjCreateTime
(\ s a -> s{_gcmvjCreateTime = a})
. mapping _DateTime
instance FromJSON GoogleCloudMlV1beta1__Job where
parseJSON
= withObject "GoogleCloudMlV1beta1Job"
(\ o ->
GoogleCloudMlV1beta1__Job' <$>
(o .:? "state") <*> (o .:? "trainingOutput") <*>
(o .:? "jobId")
<*> (o .:? "startTime")
<*> (o .:? "predictionInput")
<*> (o .:? "endTime")
<*> (o .:? "predictionOutput")
<*> (o .:? "errorMessage")
<*> (o .:? "trainingInput")
<*> (o .:? "createTime"))
instance ToJSON GoogleCloudMlV1beta1__Job where
toJSON GoogleCloudMlV1beta1__Job'{..}
= object
(catMaybes
[("state" .=) <$> _gcmvjState,
("trainingOutput" .=) <$> _gcmvjTrainingOutput,
("jobId" .=) <$> _gcmvjJobId,
("startTime" .=) <$> _gcmvjStartTime,
("predictionInput" .=) <$> _gcmvjPredictionInput,
("endTime" .=) <$> _gcmvjEndTime,
("predictionOutput" .=) <$> _gcmvjPredictionOutput,
("errorMessage" .=) <$> _gcmvjErrorMessage,
("trainingInput" .=) <$> _gcmvjTrainingInput,
("createTime" .=) <$> _gcmvjCreateTime])
-- | Represents results of a training job. Output only.
--
-- /See:/ 'googleCloudMlV1beta1__TrainingOutput' smart constructor.
data GoogleCloudMlV1beta1__TrainingOutput = GoogleCloudMlV1beta1__TrainingOutput'
{ _gcmvtoIsHyperparameterTuningJob :: !(Maybe Bool)
, _gcmvtoCompletedTrialCount :: !(Maybe (Textual Int64))
, _gcmvtoConsumedMLUnits :: !(Maybe (Textual Double))
, _gcmvtoTrials :: !(Maybe [GoogleCloudMlV1beta1__HyperparameterOutput])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__TrainingOutput' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvtoIsHyperparameterTuningJob'
--
-- * 'gcmvtoCompletedTrialCount'
--
-- * 'gcmvtoConsumedMLUnits'
--
-- * 'gcmvtoTrials'
googleCloudMlV1beta1__TrainingOutput
:: GoogleCloudMlV1beta1__TrainingOutput
googleCloudMlV1beta1__TrainingOutput =
GoogleCloudMlV1beta1__TrainingOutput'
{ _gcmvtoIsHyperparameterTuningJob = Nothing
, _gcmvtoCompletedTrialCount = Nothing
, _gcmvtoConsumedMLUnits = Nothing
, _gcmvtoTrials = Nothing
}
-- | Whether this job is a hyperparameter tuning job.
gcmvtoIsHyperparameterTuningJob :: Lens' GoogleCloudMlV1beta1__TrainingOutput (Maybe Bool)
gcmvtoIsHyperparameterTuningJob
= lens _gcmvtoIsHyperparameterTuningJob
(\ s a -> s{_gcmvtoIsHyperparameterTuningJob = a})
-- | The number of hyperparameter tuning trials that completed successfully.
-- Only set for hyperparameter tuning jobs.
gcmvtoCompletedTrialCount :: Lens' GoogleCloudMlV1beta1__TrainingOutput (Maybe Int64)
gcmvtoCompletedTrialCount
= lens _gcmvtoCompletedTrialCount
(\ s a -> s{_gcmvtoCompletedTrialCount = a})
. mapping _Coerce
-- | The amount of ML units consumed by the job.
gcmvtoConsumedMLUnits :: Lens' GoogleCloudMlV1beta1__TrainingOutput (Maybe Double)
gcmvtoConsumedMLUnits
= lens _gcmvtoConsumedMLUnits
(\ s a -> s{_gcmvtoConsumedMLUnits = a})
. mapping _Coerce
-- | Results for individual Hyperparameter trials. Only set for
-- hyperparameter tuning jobs.
gcmvtoTrials :: Lens' GoogleCloudMlV1beta1__TrainingOutput [GoogleCloudMlV1beta1__HyperparameterOutput]
gcmvtoTrials
= lens _gcmvtoTrials (\ s a -> s{_gcmvtoTrials = a})
. _Default
. _Coerce
instance FromJSON
GoogleCloudMlV1beta1__TrainingOutput where
parseJSON
= withObject "GoogleCloudMlV1beta1TrainingOutput"
(\ o ->
GoogleCloudMlV1beta1__TrainingOutput' <$>
(o .:? "isHyperparameterTuningJob") <*>
(o .:? "completedTrialCount")
<*> (o .:? "consumedMLUnits")
<*> (o .:? "trials" .!= mempty))
instance ToJSON GoogleCloudMlV1beta1__TrainingOutput
where
toJSON GoogleCloudMlV1beta1__TrainingOutput'{..}
= object
(catMaybes
[("isHyperparameterTuningJob" .=) <$>
_gcmvtoIsHyperparameterTuningJob,
("completedTrialCount" .=) <$>
_gcmvtoCompletedTrialCount,
("consumedMLUnits" .=) <$> _gcmvtoConsumedMLUnits,
("trials" .=) <$> _gcmvtoTrials])
-- | Represents a machine learning solution. A model can have multiple
-- versions, each of which is a deployed, trained model ready to receive
-- prediction requests. The model itself is just a container.
--
-- /See:/ 'googleCloudMlV1beta1__Model' smart constructor.
data GoogleCloudMlV1beta1__Model = GoogleCloudMlV1beta1__Model'
{ _gcmvmRegions :: !(Maybe [Text])
, _gcmvmDefaultVersion :: !(Maybe GoogleCloudMlV1beta1__Version)
, _gcmvmName :: !(Maybe Text)
, _gcmvmDescription :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__Model' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvmRegions'
--
-- * 'gcmvmDefaultVersion'
--
-- * 'gcmvmName'
--
-- * 'gcmvmDescription'
googleCloudMlV1beta1__Model
:: GoogleCloudMlV1beta1__Model
googleCloudMlV1beta1__Model =
GoogleCloudMlV1beta1__Model'
{ _gcmvmRegions = Nothing
, _gcmvmDefaultVersion = Nothing
, _gcmvmName = Nothing
, _gcmvmDescription = Nothing
}
-- | Optional. The list of regions where the model is going to be deployed.
-- Currently only one region per model is supported. Defaults to
-- \'us-central1\' if nothing is set.
gcmvmRegions :: Lens' GoogleCloudMlV1beta1__Model [Text]
gcmvmRegions
= lens _gcmvmRegions (\ s a -> s{_gcmvmRegions = a})
. _Default
. _Coerce
-- | Output only. The default version of the model. This version will be used
-- to handle prediction requests that do not specify a version. You can
-- change the default version by calling
-- [projects.methods.versions.setDefault](\/ml\/reference\/rest\/v1beta1\/projects.models.versions\/setDefault).
gcmvmDefaultVersion :: Lens' GoogleCloudMlV1beta1__Model (Maybe GoogleCloudMlV1beta1__Version)
gcmvmDefaultVersion
= lens _gcmvmDefaultVersion
(\ s a -> s{_gcmvmDefaultVersion = a})
-- | Required. The name specified for the model when it was created. The
-- model name must be unique within the project it is created in.
gcmvmName :: Lens' GoogleCloudMlV1beta1__Model (Maybe Text)
gcmvmName
= lens _gcmvmName (\ s a -> s{_gcmvmName = a})
-- | Optional. The description specified for the model when it was created.
gcmvmDescription :: Lens' GoogleCloudMlV1beta1__Model (Maybe Text)
gcmvmDescription
= lens _gcmvmDescription
(\ s a -> s{_gcmvmDescription = a})
instance FromJSON GoogleCloudMlV1beta1__Model where
parseJSON
= withObject "GoogleCloudMlV1beta1Model"
(\ o ->
GoogleCloudMlV1beta1__Model' <$>
(o .:? "regions" .!= mempty) <*>
(o .:? "defaultVersion")
<*> (o .:? "name")
<*> (o .:? "description"))
instance ToJSON GoogleCloudMlV1beta1__Model where
toJSON GoogleCloudMlV1beta1__Model'{..}
= object
(catMaybes
[("regions" .=) <$> _gcmvmRegions,
("defaultVersion" .=) <$> _gcmvmDefaultVersion,
("name" .=) <$> _gcmvmName,
("description" .=) <$> _gcmvmDescription])
-- | The normal response of the operation in case of success. If the original
-- method returns no data on success, such as \`Delete\`, the response is
-- \`google.protobuf.Empty\`. If the original method is standard
-- \`Get\`\/\`Create\`\/\`Update\`, the response should be the resource.
-- For other methods, the response should have the type \`XxxResponse\`,
-- where \`Xxx\` is the original method name. For example, if the original
-- method name is \`TakeSnapshot()\`, the inferred response type is
-- \`TakeSnapshotResponse\`.
--
-- /See:/ 'googleLongrunning__OperationResponse' smart constructor.
newtype GoogleLongrunning__OperationResponse = GoogleLongrunning__OperationResponse'
{ _glorAddtional :: HashMap Text JSONValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleLongrunning__OperationResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'glorAddtional'
googleLongrunning__OperationResponse
:: HashMap Text JSONValue -- ^ 'glorAddtional'
-> GoogleLongrunning__OperationResponse
googleLongrunning__OperationResponse pGlorAddtional_ =
GoogleLongrunning__OperationResponse'
{ _glorAddtional = _Coerce # pGlorAddtional_
}
-- | Properties of the object. Contains field \'type with type URL.
glorAddtional :: Lens' GoogleLongrunning__OperationResponse (HashMap Text JSONValue)
glorAddtional
= lens _glorAddtional
(\ s a -> s{_glorAddtional = a})
. _Coerce
instance FromJSON
GoogleLongrunning__OperationResponse where
parseJSON
= withObject "GoogleLongrunningOperationResponse"
(\ o ->
GoogleLongrunning__OperationResponse' <$>
(parseJSONObject o))
instance ToJSON GoogleLongrunning__OperationResponse
where
toJSON = toJSON . _glorAddtional
-- | Represents the metadata of the long-running operation.
--
-- /See:/ 'googleCloudMlV1beta1__OperationMetadata' smart constructor.
data GoogleCloudMlV1beta1__OperationMetadata = GoogleCloudMlV1beta1__OperationMetadata'
{ _gcmvomStartTime :: !(Maybe DateTime')
, _gcmvomModelName :: !(Maybe Text)
, _gcmvomVersion :: !(Maybe GoogleCloudMlV1beta1__Version)
, _gcmvomEndTime :: !(Maybe DateTime')
, _gcmvomIsCancellationRequested :: !(Maybe Bool)
, _gcmvomOperationType :: !(Maybe GoogleCloudMlV1beta1__OperationMetadataOperationType)
, _gcmvomCreateTime :: !(Maybe DateTime')
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__OperationMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvomStartTime'
--
-- * 'gcmvomModelName'
--
-- * 'gcmvomVersion'
--
-- * 'gcmvomEndTime'
--
-- * 'gcmvomIsCancellationRequested'
--
-- * 'gcmvomOperationType'
--
-- * 'gcmvomCreateTime'
googleCloudMlV1beta1__OperationMetadata
:: GoogleCloudMlV1beta1__OperationMetadata
googleCloudMlV1beta1__OperationMetadata =
GoogleCloudMlV1beta1__OperationMetadata'
{ _gcmvomStartTime = Nothing
, _gcmvomModelName = Nothing
, _gcmvomVersion = Nothing
, _gcmvomEndTime = Nothing
, _gcmvomIsCancellationRequested = Nothing
, _gcmvomOperationType = Nothing
, _gcmvomCreateTime = Nothing
}
-- | The time operation processing started.
gcmvomStartTime :: Lens' GoogleCloudMlV1beta1__OperationMetadata (Maybe UTCTime)
gcmvomStartTime
= lens _gcmvomStartTime
(\ s a -> s{_gcmvomStartTime = a})
. mapping _DateTime
-- | Contains the name of the model associated with the operation.
gcmvomModelName :: Lens' GoogleCloudMlV1beta1__OperationMetadata (Maybe Text)
gcmvomModelName
= lens _gcmvomModelName
(\ s a -> s{_gcmvomModelName = a})
-- | Contains the version associated with the operation.
gcmvomVersion :: Lens' GoogleCloudMlV1beta1__OperationMetadata (Maybe GoogleCloudMlV1beta1__Version)
gcmvomVersion
= lens _gcmvomVersion
(\ s a -> s{_gcmvomVersion = a})
-- | The time operation processing completed.
gcmvomEndTime :: Lens' GoogleCloudMlV1beta1__OperationMetadata (Maybe UTCTime)
gcmvomEndTime
= lens _gcmvomEndTime
(\ s a -> s{_gcmvomEndTime = a})
. mapping _DateTime
-- | Indicates whether a request to cancel this operation has been made.
gcmvomIsCancellationRequested :: Lens' GoogleCloudMlV1beta1__OperationMetadata (Maybe Bool)
gcmvomIsCancellationRequested
= lens _gcmvomIsCancellationRequested
(\ s a -> s{_gcmvomIsCancellationRequested = a})
-- | The operation type.
gcmvomOperationType :: Lens' GoogleCloudMlV1beta1__OperationMetadata (Maybe GoogleCloudMlV1beta1__OperationMetadataOperationType)
gcmvomOperationType
= lens _gcmvomOperationType
(\ s a -> s{_gcmvomOperationType = a})
-- | The time the operation was submitted.
gcmvomCreateTime :: Lens' GoogleCloudMlV1beta1__OperationMetadata (Maybe UTCTime)
gcmvomCreateTime
= lens _gcmvomCreateTime
(\ s a -> s{_gcmvomCreateTime = a})
. mapping _DateTime
instance FromJSON
GoogleCloudMlV1beta1__OperationMetadata where
parseJSON
= withObject "GoogleCloudMlV1beta1OperationMetadata"
(\ o ->
GoogleCloudMlV1beta1__OperationMetadata' <$>
(o .:? "startTime") <*> (o .:? "modelName") <*>
(o .:? "version")
<*> (o .:? "endTime")
<*> (o .:? "isCancellationRequested")
<*> (o .:? "operationType")
<*> (o .:? "createTime"))
instance ToJSON
GoogleCloudMlV1beta1__OperationMetadata where
toJSON GoogleCloudMlV1beta1__OperationMetadata'{..}
= object
(catMaybes
[("startTime" .=) <$> _gcmvomStartTime,
("modelName" .=) <$> _gcmvomModelName,
("version" .=) <$> _gcmvomVersion,
("endTime" .=) <$> _gcmvomEndTime,
("isCancellationRequested" .=) <$>
_gcmvomIsCancellationRequested,
("operationType" .=) <$> _gcmvomOperationType,
("createTime" .=) <$> _gcmvomCreateTime])
-- | Represents the result of a single hyperparameter tuning trial from a
-- training job. The TrainingOutput object that is returned on successful
-- completion of a training job with hyperparameter tuning includes a list
-- of HyperparameterOutput objects, one for each successful trial.
--
-- /See:/ 'googleCloudMlV1beta1__HyperparameterOutput' smart constructor.
data GoogleCloudMlV1beta1__HyperparameterOutput = GoogleCloudMlV1beta1__HyperparameterOutput'
{ _gcmvhoAllMetrics :: !(Maybe [GoogleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric])
, _gcmvhoHyperparameters :: !(Maybe GoogleCloudMlV1beta1__HyperparameterOutputHyperparameters)
, _gcmvhoTrialId :: !(Maybe Text)
, _gcmvhoFinalMetric :: !(Maybe GoogleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__HyperparameterOutput' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvhoAllMetrics'
--
-- * 'gcmvhoHyperparameters'
--
-- * 'gcmvhoTrialId'
--
-- * 'gcmvhoFinalMetric'
googleCloudMlV1beta1__HyperparameterOutput
:: GoogleCloudMlV1beta1__HyperparameterOutput
googleCloudMlV1beta1__HyperparameterOutput =
GoogleCloudMlV1beta1__HyperparameterOutput'
{ _gcmvhoAllMetrics = Nothing
, _gcmvhoHyperparameters = Nothing
, _gcmvhoTrialId = Nothing
, _gcmvhoFinalMetric = Nothing
}
-- | All recorded object metrics for this trial.
gcmvhoAllMetrics :: Lens' GoogleCloudMlV1beta1__HyperparameterOutput [GoogleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric]
gcmvhoAllMetrics
= lens _gcmvhoAllMetrics
(\ s a -> s{_gcmvhoAllMetrics = a})
. _Default
. _Coerce
-- | The hyperparameters given to this trial.
gcmvhoHyperparameters :: Lens' GoogleCloudMlV1beta1__HyperparameterOutput (Maybe GoogleCloudMlV1beta1__HyperparameterOutputHyperparameters)
gcmvhoHyperparameters
= lens _gcmvhoHyperparameters
(\ s a -> s{_gcmvhoHyperparameters = a})
-- | The trial id for these results.
gcmvhoTrialId :: Lens' GoogleCloudMlV1beta1__HyperparameterOutput (Maybe Text)
gcmvhoTrialId
= lens _gcmvhoTrialId
(\ s a -> s{_gcmvhoTrialId = a})
-- | The final objective metric seen for this trial.
gcmvhoFinalMetric :: Lens' GoogleCloudMlV1beta1__HyperparameterOutput (Maybe GoogleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric)
gcmvhoFinalMetric
= lens _gcmvhoFinalMetric
(\ s a -> s{_gcmvhoFinalMetric = a})
instance FromJSON
GoogleCloudMlV1beta1__HyperparameterOutput where
parseJSON
= withObject
"GoogleCloudMlV1beta1HyperparameterOutput"
(\ o ->
GoogleCloudMlV1beta1__HyperparameterOutput' <$>
(o .:? "allMetrics" .!= mempty) <*>
(o .:? "hyperparameters")
<*> (o .:? "trialId")
<*> (o .:? "finalMetric"))
instance ToJSON
GoogleCloudMlV1beta1__HyperparameterOutput where
toJSON
GoogleCloudMlV1beta1__HyperparameterOutput'{..}
= object
(catMaybes
[("allMetrics" .=) <$> _gcmvhoAllMetrics,
("hyperparameters" .=) <$> _gcmvhoHyperparameters,
("trialId" .=) <$> _gcmvhoTrialId,
("finalMetric" .=) <$> _gcmvhoFinalMetric])
-- | Response message for the ListVersions method.
--
-- /See:/ 'googleCloudMlV1beta1__ListVersionsResponse' smart constructor.
data GoogleCloudMlV1beta1__ListVersionsResponse = GoogleCloudMlV1beta1__ListVersionsResponse'
{ _gcmvlvrNextPageToken :: !(Maybe Text)
, _gcmvlvrVersions :: !(Maybe [GoogleCloudMlV1beta1__Version])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__ListVersionsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvlvrNextPageToken'
--
-- * 'gcmvlvrVersions'
googleCloudMlV1beta1__ListVersionsResponse
:: GoogleCloudMlV1beta1__ListVersionsResponse
googleCloudMlV1beta1__ListVersionsResponse =
GoogleCloudMlV1beta1__ListVersionsResponse'
{ _gcmvlvrNextPageToken = Nothing
, _gcmvlvrVersions = Nothing
}
-- | Optional. Pass this token as the \`page_token\` field of the request for
-- a subsequent call.
gcmvlvrNextPageToken :: Lens' GoogleCloudMlV1beta1__ListVersionsResponse (Maybe Text)
gcmvlvrNextPageToken
= lens _gcmvlvrNextPageToken
(\ s a -> s{_gcmvlvrNextPageToken = a})
-- | The list of versions.
gcmvlvrVersions :: Lens' GoogleCloudMlV1beta1__ListVersionsResponse [GoogleCloudMlV1beta1__Version]
gcmvlvrVersions
= lens _gcmvlvrVersions
(\ s a -> s{_gcmvlvrVersions = a})
. _Default
. _Coerce
instance FromJSON
GoogleCloudMlV1beta1__ListVersionsResponse where
parseJSON
= withObject
"GoogleCloudMlV1beta1ListVersionsResponse"
(\ o ->
GoogleCloudMlV1beta1__ListVersionsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "versions" .!= mempty))
instance ToJSON
GoogleCloudMlV1beta1__ListVersionsResponse where
toJSON
GoogleCloudMlV1beta1__ListVersionsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _gcmvlvrNextPageToken,
("versions" .=) <$> _gcmvlvrVersions])
-- | The response message for Operations.ListOperations.
--
-- /See:/ 'googleLongrunning__ListOperationsResponse' smart constructor.
data GoogleLongrunning__ListOperationsResponse = GoogleLongrunning__ListOperationsResponse'
{ _gllorNextPageToken :: !(Maybe Text)
, _gllorOperations :: !(Maybe [GoogleLongrunning__Operation])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleLongrunning__ListOperationsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gllorNextPageToken'
--
-- * 'gllorOperations'
googleLongrunning__ListOperationsResponse
:: GoogleLongrunning__ListOperationsResponse
googleLongrunning__ListOperationsResponse =
GoogleLongrunning__ListOperationsResponse'
{ _gllorNextPageToken = Nothing
, _gllorOperations = Nothing
}
-- | The standard List next-page token.
gllorNextPageToken :: Lens' GoogleLongrunning__ListOperationsResponse (Maybe Text)
gllorNextPageToken
= lens _gllorNextPageToken
(\ s a -> s{_gllorNextPageToken = a})
-- | A list of operations that matches the specified filter in the request.
gllorOperations :: Lens' GoogleLongrunning__ListOperationsResponse [GoogleLongrunning__Operation]
gllorOperations
= lens _gllorOperations
(\ s a -> s{_gllorOperations = a})
. _Default
. _Coerce
instance FromJSON
GoogleLongrunning__ListOperationsResponse where
parseJSON
= withObject
"GoogleLongrunningListOperationsResponse"
(\ o ->
GoogleLongrunning__ListOperationsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "operations" .!= mempty))
instance ToJSON
GoogleLongrunning__ListOperationsResponse where
toJSON GoogleLongrunning__ListOperationsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _gllorNextPageToken,
("operations" .=) <$> _gllorOperations])
-- | Represents input parameters for a prediction job.
--
-- /See:/ 'googleCloudMlV1beta1__PredictionInput' smart constructor.
data GoogleCloudMlV1beta1__PredictionInput = GoogleCloudMlV1beta1__PredictionInput'
{ _gcmvpiVersionName :: !(Maybe Text)
, _gcmvpiModelName :: !(Maybe Text)
, _gcmvpiDataFormat :: !(Maybe GoogleCloudMlV1beta1__PredictionInputDataFormat)
, _gcmvpiRuntimeVersion :: !(Maybe Text)
, _gcmvpiMaxWorkerCount :: !(Maybe (Textual Int64))
, _gcmvpiOutputPath :: !(Maybe Text)
, _gcmvpiRegion :: !(Maybe Text)
, _gcmvpiInputPaths :: !(Maybe [Text])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__PredictionInput' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvpiVersionName'
--
-- * 'gcmvpiModelName'
--
-- * 'gcmvpiDataFormat'
--
-- * 'gcmvpiRuntimeVersion'
--
-- * 'gcmvpiMaxWorkerCount'
--
-- * 'gcmvpiOutputPath'
--
-- * 'gcmvpiRegion'
--
-- * 'gcmvpiInputPaths'
googleCloudMlV1beta1__PredictionInput
:: GoogleCloudMlV1beta1__PredictionInput
googleCloudMlV1beta1__PredictionInput =
GoogleCloudMlV1beta1__PredictionInput'
{ _gcmvpiVersionName = Nothing
, _gcmvpiModelName = Nothing
, _gcmvpiDataFormat = Nothing
, _gcmvpiRuntimeVersion = Nothing
, _gcmvpiMaxWorkerCount = Nothing
, _gcmvpiOutputPath = Nothing
, _gcmvpiRegion = Nothing
, _gcmvpiInputPaths = Nothing
}
-- | Use this field if you want to specify a version of the model to use. The
-- string is formatted the same way as \`model_version\`, with the addition
-- of the version information:
-- \`\"projects\/[YOUR_PROJECT]\/models\/YOUR_MODEL\/versions\/[YOUR_VERSION]\"\`
gcmvpiVersionName :: Lens' GoogleCloudMlV1beta1__PredictionInput (Maybe Text)
gcmvpiVersionName
= lens _gcmvpiVersionName
(\ s a -> s{_gcmvpiVersionName = a})
-- | Use this field if you want to use the default version for the specified
-- model. The string must use the following format:
-- \`\"projects\/[YOUR_PROJECT]\/models\/[YOUR_MODEL]\"\`
gcmvpiModelName :: Lens' GoogleCloudMlV1beta1__PredictionInput (Maybe Text)
gcmvpiModelName
= lens _gcmvpiModelName
(\ s a -> s{_gcmvpiModelName = a})
-- | Required. The format of the input data files.
gcmvpiDataFormat :: Lens' GoogleCloudMlV1beta1__PredictionInput (Maybe GoogleCloudMlV1beta1__PredictionInputDataFormat)
gcmvpiDataFormat
= lens _gcmvpiDataFormat
(\ s a -> s{_gcmvpiDataFormat = a})
-- | Optional. The Google Cloud ML runtime version to use for this batch
-- prediction. If not set, Google Cloud ML will choose a version.
gcmvpiRuntimeVersion :: Lens' GoogleCloudMlV1beta1__PredictionInput (Maybe Text)
gcmvpiRuntimeVersion
= lens _gcmvpiRuntimeVersion
(\ s a -> s{_gcmvpiRuntimeVersion = a})
-- | Optional. The maximum number of workers to be used for parallel
-- processing. Defaults to 10 if not specified.
gcmvpiMaxWorkerCount :: Lens' GoogleCloudMlV1beta1__PredictionInput (Maybe Int64)
gcmvpiMaxWorkerCount
= lens _gcmvpiMaxWorkerCount
(\ s a -> s{_gcmvpiMaxWorkerCount = a})
. mapping _Coerce
-- | Required. The output Google Cloud Storage location.
gcmvpiOutputPath :: Lens' GoogleCloudMlV1beta1__PredictionInput (Maybe Text)
gcmvpiOutputPath
= lens _gcmvpiOutputPath
(\ s a -> s{_gcmvpiOutputPath = a})
-- | Required. The Google Compute Engine region to run the prediction job in.
gcmvpiRegion :: Lens' GoogleCloudMlV1beta1__PredictionInput (Maybe Text)
gcmvpiRegion
= lens _gcmvpiRegion (\ s a -> s{_gcmvpiRegion = a})
-- | Required. The Google Cloud Storage location of the input data files. May
-- contain wildcards.
gcmvpiInputPaths :: Lens' GoogleCloudMlV1beta1__PredictionInput [Text]
gcmvpiInputPaths
= lens _gcmvpiInputPaths
(\ s a -> s{_gcmvpiInputPaths = a})
. _Default
. _Coerce
instance FromJSON
GoogleCloudMlV1beta1__PredictionInput where
parseJSON
= withObject "GoogleCloudMlV1beta1PredictionInput"
(\ o ->
GoogleCloudMlV1beta1__PredictionInput' <$>
(o .:? "versionName") <*> (o .:? "modelName") <*>
(o .:? "dataFormat")
<*> (o .:? "runtimeVersion")
<*> (o .:? "maxWorkerCount")
<*> (o .:? "outputPath")
<*> (o .:? "region")
<*> (o .:? "inputPaths" .!= mempty))
instance ToJSON GoogleCloudMlV1beta1__PredictionInput
where
toJSON GoogleCloudMlV1beta1__PredictionInput'{..}
= object
(catMaybes
[("versionName" .=) <$> _gcmvpiVersionName,
("modelName" .=) <$> _gcmvpiModelName,
("dataFormat" .=) <$> _gcmvpiDataFormat,
("runtimeVersion" .=) <$> _gcmvpiRuntimeVersion,
("maxWorkerCount" .=) <$> _gcmvpiMaxWorkerCount,
("outputPath" .=) <$> _gcmvpiOutputPath,
("region" .=) <$> _gcmvpiRegion,
("inputPaths" .=) <$> _gcmvpiInputPaths])
-- | This resource represents a long-running operation that is the result of
-- a network API call.
--
-- /See:/ 'googleLongrunning__Operation' smart constructor.
data GoogleLongrunning__Operation = GoogleLongrunning__Operation'
{ _gloDone :: !(Maybe Bool)
, _gloError :: !(Maybe GoogleRpc__Status)
, _gloResponse :: !(Maybe GoogleLongrunning__OperationResponse)
, _gloName :: !(Maybe Text)
, _gloMetadata :: !(Maybe GoogleLongrunning__OperationMetadata)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleLongrunning__Operation' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gloDone'
--
-- * 'gloError'
--
-- * 'gloResponse'
--
-- * 'gloName'
--
-- * 'gloMetadata'
googleLongrunning__Operation
:: GoogleLongrunning__Operation
googleLongrunning__Operation =
GoogleLongrunning__Operation'
{ _gloDone = Nothing
, _gloError = Nothing
, _gloResponse = Nothing
, _gloName = Nothing
, _gloMetadata = Nothing
}
-- | If the value is \`false\`, it means the operation is still in progress.
-- If true, the operation is completed, and either \`error\` or
-- \`response\` is available.
gloDone :: Lens' GoogleLongrunning__Operation (Maybe Bool)
gloDone = lens _gloDone (\ s a -> s{_gloDone = a})
-- | The error result of the operation in case of failure or cancellation.
gloError :: Lens' GoogleLongrunning__Operation (Maybe GoogleRpc__Status)
gloError = lens _gloError (\ s a -> s{_gloError = a})
-- | The normal response of the operation in case of success. If the original
-- method returns no data on success, such as \`Delete\`, the response is
-- \`google.protobuf.Empty\`. If the original method is standard
-- \`Get\`\/\`Create\`\/\`Update\`, the response should be the resource.
-- For other methods, the response should have the type \`XxxResponse\`,
-- where \`Xxx\` is the original method name. For example, if the original
-- method name is \`TakeSnapshot()\`, the inferred response type is
-- \`TakeSnapshotResponse\`.
gloResponse :: Lens' GoogleLongrunning__Operation (Maybe GoogleLongrunning__OperationResponse)
gloResponse
= lens _gloResponse (\ s a -> s{_gloResponse = a})
-- | The server-assigned name, which is only unique within the same service
-- that originally returns it. If you use the default HTTP mapping, the
-- \`name\` should have the format of \`operations\/some\/unique\/name\`.
gloName :: Lens' GoogleLongrunning__Operation (Maybe Text)
gloName = lens _gloName (\ s a -> s{_gloName = a})
-- | Service-specific metadata associated with the operation. It typically
-- contains progress information and common metadata such as create time.
-- Some services might not provide such metadata. Any method that returns a
-- long-running operation should document the metadata type, if any.
gloMetadata :: Lens' GoogleLongrunning__Operation (Maybe GoogleLongrunning__OperationMetadata)
gloMetadata
= lens _gloMetadata (\ s a -> s{_gloMetadata = a})
instance FromJSON GoogleLongrunning__Operation where
parseJSON
= withObject "GoogleLongrunningOperation"
(\ o ->
GoogleLongrunning__Operation' <$>
(o .:? "done") <*> (o .:? "error") <*>
(o .:? "response")
<*> (o .:? "name")
<*> (o .:? "metadata"))
instance ToJSON GoogleLongrunning__Operation where
toJSON GoogleLongrunning__Operation'{..}
= object
(catMaybes
[("done" .=) <$> _gloDone,
("error" .=) <$> _gloError,
("response" .=) <$> _gloResponse,
("name" .=) <$> _gloName,
("metadata" .=) <$> _gloMetadata])
-- | The hyperparameters given to this trial.
--
-- /See:/ 'googleCloudMlV1beta1__HyperparameterOutputHyperparameters' smart constructor.
newtype GoogleCloudMlV1beta1__HyperparameterOutputHyperparameters = GoogleCloudMlV1beta1__HyperparameterOutputHyperparameters'
{ _gcmvhohAddtional :: HashMap Text Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__HyperparameterOutputHyperparameters' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvhohAddtional'
googleCloudMlV1beta1__HyperparameterOutputHyperparameters
:: HashMap Text Text -- ^ 'gcmvhohAddtional'
-> GoogleCloudMlV1beta1__HyperparameterOutputHyperparameters
googleCloudMlV1beta1__HyperparameterOutputHyperparameters pGcmvhohAddtional_ =
GoogleCloudMlV1beta1__HyperparameterOutputHyperparameters'
{ _gcmvhohAddtional = _Coerce # pGcmvhohAddtional_
}
gcmvhohAddtional :: Lens' GoogleCloudMlV1beta1__HyperparameterOutputHyperparameters (HashMap Text Text)
gcmvhohAddtional
= lens _gcmvhohAddtional
(\ s a -> s{_gcmvhohAddtional = a})
. _Coerce
instance FromJSON
GoogleCloudMlV1beta1__HyperparameterOutputHyperparameters
where
parseJSON
= withObject
"GoogleCloudMlV1beta1HyperparameterOutputHyperparameters"
(\ o ->
GoogleCloudMlV1beta1__HyperparameterOutputHyperparameters'
<$> (parseJSONObject o))
instance ToJSON
GoogleCloudMlV1beta1__HyperparameterOutputHyperparameters
where
toJSON = toJSON . _gcmvhohAddtional
-- | The \`Status\` type defines a logical error model that is suitable for
-- different programming environments, including REST APIs and RPC APIs. It
-- is used by [gRPC](https:\/\/github.com\/grpc). The error model is
-- designed to be: - Simple to use and understand for most users - Flexible
-- enough to meet unexpected needs # Overview The \`Status\` message
-- contains three pieces of data: error code, error message, and error
-- details. The error code should be an enum value of google.rpc.Code, but
-- it may accept additional error codes if needed. The error message should
-- be a developer-facing English message that helps developers *understand*
-- and *resolve* the error. If a localized user-facing error message is
-- needed, put the localized message in the error details or localize it in
-- the client. The optional error details may contain arbitrary information
-- about the error. There is a predefined set of error detail types in the
-- package \`google.rpc\` which can be used for common error conditions. #
-- Language mapping The \`Status\` message is the logical representation of
-- the error model, but it is not necessarily the actual wire format. When
-- the \`Status\` message is exposed in different client libraries and
-- different wire protocols, it can be mapped differently. For example, it
-- will likely be mapped to some exceptions in Java, but more likely mapped
-- to some error codes in C. # Other uses The error model and the
-- \`Status\` message can be used in a variety of environments, either with
-- or without APIs, to provide a consistent developer experience across
-- different environments. Example uses of this error model include: -
-- Partial errors. If a service needs to return partial errors to the
-- client, it may embed the \`Status\` in the normal response to indicate
-- the partial errors. - Workflow errors. A typical workflow has multiple
-- steps. Each step may have a \`Status\` message for error reporting
-- purpose. - Batch operations. If a client uses batch request and batch
-- response, the \`Status\` message should be used directly inside batch
-- response, one for each error sub-response. - Asynchronous operations. If
-- an API call embeds asynchronous operation results in its response, the
-- status of those operations should be represented directly using the
-- \`Status\` message. - Logging. If some API errors are stored in logs,
-- the message \`Status\` could be used directly after any stripping needed
-- for security\/privacy reasons.
--
-- /See:/ 'googleRpc__Status' smart constructor.
data GoogleRpc__Status = GoogleRpc__Status'
{ _grsDetails :: !(Maybe [GoogleRpc__StatusDetailsItem])
, _grsCode :: !(Maybe (Textual Int32))
, _grsMessage :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleRpc__Status' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'grsDetails'
--
-- * 'grsCode'
--
-- * 'grsMessage'
googleRpc__Status
:: GoogleRpc__Status
googleRpc__Status =
GoogleRpc__Status'
{ _grsDetails = Nothing
, _grsCode = Nothing
, _grsMessage = Nothing
}
-- | A list of messages that carry the error details. There will be a common
-- set of message types for APIs to use.
grsDetails :: Lens' GoogleRpc__Status [GoogleRpc__StatusDetailsItem]
grsDetails
= lens _grsDetails (\ s a -> s{_grsDetails = a}) .
_Default
. _Coerce
-- | The status code, which should be an enum value of google.rpc.Code.
grsCode :: Lens' GoogleRpc__Status (Maybe Int32)
grsCode
= lens _grsCode (\ s a -> s{_grsCode = a}) .
mapping _Coerce
-- | A developer-facing error message, which should be in English. Any
-- user-facing error message should be localized and sent in the
-- google.rpc.Status.details field, or localized by the client.
grsMessage :: Lens' GoogleRpc__Status (Maybe Text)
grsMessage
= lens _grsMessage (\ s a -> s{_grsMessage = a})
instance FromJSON GoogleRpc__Status where
parseJSON
= withObject "GoogleRpcStatus"
(\ o ->
GoogleRpc__Status' <$>
(o .:? "details" .!= mempty) <*> (o .:? "code") <*>
(o .:? "message"))
instance ToJSON GoogleRpc__Status where
toJSON GoogleRpc__Status'{..}
= object
(catMaybes
[("details" .=) <$> _grsDetails,
("code" .=) <$> _grsCode,
("message" .=) <$> _grsMessage])
-- | Service-specific metadata associated with the operation. It typically
-- contains progress information and common metadata such as create time.
-- Some services might not provide such metadata. Any method that returns a
-- long-running operation should document the metadata type, if any.
--
-- /See:/ 'googleLongrunning__OperationMetadata' smart constructor.
newtype GoogleLongrunning__OperationMetadata = GoogleLongrunning__OperationMetadata'
{ _glomAddtional :: HashMap Text JSONValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleLongrunning__OperationMetadata' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'glomAddtional'
googleLongrunning__OperationMetadata
:: HashMap Text JSONValue -- ^ 'glomAddtional'
-> GoogleLongrunning__OperationMetadata
googleLongrunning__OperationMetadata pGlomAddtional_ =
GoogleLongrunning__OperationMetadata'
{ _glomAddtional = _Coerce # pGlomAddtional_
}
-- | Properties of the object. Contains field \'type with type URL.
glomAddtional :: Lens' GoogleLongrunning__OperationMetadata (HashMap Text JSONValue)
glomAddtional
= lens _glomAddtional
(\ s a -> s{_glomAddtional = a})
. _Coerce
instance FromJSON
GoogleLongrunning__OperationMetadata where
parseJSON
= withObject "GoogleLongrunningOperationMetadata"
(\ o ->
GoogleLongrunning__OperationMetadata' <$>
(parseJSONObject o))
instance ToJSON GoogleLongrunning__OperationMetadata
where
toJSON = toJSON . _glomAddtional
-- | Represents results of a prediction job.
--
-- /See:/ 'googleCloudMlV1beta1__PredictionOutput' smart constructor.
data GoogleCloudMlV1beta1__PredictionOutput = GoogleCloudMlV1beta1__PredictionOutput'
{ _gcmvpoNodeHours :: !(Maybe (Textual Double))
, _gcmvpoErrorCount :: !(Maybe (Textual Int64))
, _gcmvpoPredictionCount :: !(Maybe (Textual Int64))
, _gcmvpoOutputPath :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__PredictionOutput' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvpoNodeHours'
--
-- * 'gcmvpoErrorCount'
--
-- * 'gcmvpoPredictionCount'
--
-- * 'gcmvpoOutputPath'
googleCloudMlV1beta1__PredictionOutput
:: GoogleCloudMlV1beta1__PredictionOutput
googleCloudMlV1beta1__PredictionOutput =
GoogleCloudMlV1beta1__PredictionOutput'
{ _gcmvpoNodeHours = Nothing
, _gcmvpoErrorCount = Nothing
, _gcmvpoPredictionCount = Nothing
, _gcmvpoOutputPath = Nothing
}
-- | Node hours used by the batch prediction job.
gcmvpoNodeHours :: Lens' GoogleCloudMlV1beta1__PredictionOutput (Maybe Double)
gcmvpoNodeHours
= lens _gcmvpoNodeHours
(\ s a -> s{_gcmvpoNodeHours = a})
. mapping _Coerce
-- | The number of data instances which resulted in errors.
gcmvpoErrorCount :: Lens' GoogleCloudMlV1beta1__PredictionOutput (Maybe Int64)
gcmvpoErrorCount
= lens _gcmvpoErrorCount
(\ s a -> s{_gcmvpoErrorCount = a})
. mapping _Coerce
-- | The number of generated predictions.
gcmvpoPredictionCount :: Lens' GoogleCloudMlV1beta1__PredictionOutput (Maybe Int64)
gcmvpoPredictionCount
= lens _gcmvpoPredictionCount
(\ s a -> s{_gcmvpoPredictionCount = a})
. mapping _Coerce
-- | The output Google Cloud Storage location provided at the job creation
-- time.
gcmvpoOutputPath :: Lens' GoogleCloudMlV1beta1__PredictionOutput (Maybe Text)
gcmvpoOutputPath
= lens _gcmvpoOutputPath
(\ s a -> s{_gcmvpoOutputPath = a})
instance FromJSON
GoogleCloudMlV1beta1__PredictionOutput where
parseJSON
= withObject "GoogleCloudMlV1beta1PredictionOutput"
(\ o ->
GoogleCloudMlV1beta1__PredictionOutput' <$>
(o .:? "nodeHours") <*> (o .:? "errorCount") <*>
(o .:? "predictionCount")
<*> (o .:? "outputPath"))
instance ToJSON
GoogleCloudMlV1beta1__PredictionOutput where
toJSON GoogleCloudMlV1beta1__PredictionOutput'{..}
= object
(catMaybes
[("nodeHours" .=) <$> _gcmvpoNodeHours,
("errorCount" .=) <$> _gcmvpoErrorCount,
("predictionCount" .=) <$> _gcmvpoPredictionCount,
("outputPath" .=) <$> _gcmvpoOutputPath])
-- | Request message for the CancelJob method.
--
-- /See:/ 'googleCloudMlV1beta1__CancelJobRequest' smart constructor.
data GoogleCloudMlV1beta1__CancelJobRequest =
GoogleCloudMlV1beta1__CancelJobRequest'
deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__CancelJobRequest' with the minimum fields required to make a request.
--
googleCloudMlV1beta1__CancelJobRequest
:: GoogleCloudMlV1beta1__CancelJobRequest
googleCloudMlV1beta1__CancelJobRequest =
GoogleCloudMlV1beta1__CancelJobRequest'
instance FromJSON
GoogleCloudMlV1beta1__CancelJobRequest where
parseJSON
= withObject "GoogleCloudMlV1beta1CancelJobRequest"
(\ o -> pure GoogleCloudMlV1beta1__CancelJobRequest')
instance ToJSON
GoogleCloudMlV1beta1__CancelJobRequest where
toJSON = const emptyObject
-- | Response message for the ListModels method.
--
-- /See:/ 'googleCloudMlV1beta1__ListModelsResponse' smart constructor.
data GoogleCloudMlV1beta1__ListModelsResponse = GoogleCloudMlV1beta1__ListModelsResponse'
{ _gcmvlmrNextPageToken :: !(Maybe Text)
, _gcmvlmrModels :: !(Maybe [GoogleCloudMlV1beta1__Model])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__ListModelsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvlmrNextPageToken'
--
-- * 'gcmvlmrModels'
googleCloudMlV1beta1__ListModelsResponse
:: GoogleCloudMlV1beta1__ListModelsResponse
googleCloudMlV1beta1__ListModelsResponse =
GoogleCloudMlV1beta1__ListModelsResponse'
{ _gcmvlmrNextPageToken = Nothing
, _gcmvlmrModels = Nothing
}
-- | Optional. Pass this token as the \`page_token\` field of the request for
-- a subsequent call.
gcmvlmrNextPageToken :: Lens' GoogleCloudMlV1beta1__ListModelsResponse (Maybe Text)
gcmvlmrNextPageToken
= lens _gcmvlmrNextPageToken
(\ s a -> s{_gcmvlmrNextPageToken = a})
-- | The list of models.
gcmvlmrModels :: Lens' GoogleCloudMlV1beta1__ListModelsResponse [GoogleCloudMlV1beta1__Model]
gcmvlmrModels
= lens _gcmvlmrModels
(\ s a -> s{_gcmvlmrModels = a})
. _Default
. _Coerce
instance FromJSON
GoogleCloudMlV1beta1__ListModelsResponse where
parseJSON
= withObject "GoogleCloudMlV1beta1ListModelsResponse"
(\ o ->
GoogleCloudMlV1beta1__ListModelsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "models" .!= mempty))
instance ToJSON
GoogleCloudMlV1beta1__ListModelsResponse where
toJSON GoogleCloudMlV1beta1__ListModelsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _gcmvlmrNextPageToken,
("models" .=) <$> _gcmvlmrModels])
-- | Response message for the ListJobs method.
--
-- /See:/ 'googleCloudMlV1beta1__ListJobsResponse' smart constructor.
data GoogleCloudMlV1beta1__ListJobsResponse = GoogleCloudMlV1beta1__ListJobsResponse'
{ _gcmvljrNextPageToken :: !(Maybe Text)
, _gcmvljrJobs :: !(Maybe [GoogleCloudMlV1beta1__Job])
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__ListJobsResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvljrNextPageToken'
--
-- * 'gcmvljrJobs'
googleCloudMlV1beta1__ListJobsResponse
:: GoogleCloudMlV1beta1__ListJobsResponse
googleCloudMlV1beta1__ListJobsResponse =
GoogleCloudMlV1beta1__ListJobsResponse'
{ _gcmvljrNextPageToken = Nothing
, _gcmvljrJobs = Nothing
}
-- | Optional. Pass this token as the \`page_token\` field of the request for
-- a subsequent call.
gcmvljrNextPageToken :: Lens' GoogleCloudMlV1beta1__ListJobsResponse (Maybe Text)
gcmvljrNextPageToken
= lens _gcmvljrNextPageToken
(\ s a -> s{_gcmvljrNextPageToken = a})
-- | The list of jobs.
gcmvljrJobs :: Lens' GoogleCloudMlV1beta1__ListJobsResponse [GoogleCloudMlV1beta1__Job]
gcmvljrJobs
= lens _gcmvljrJobs (\ s a -> s{_gcmvljrJobs = a}) .
_Default
. _Coerce
instance FromJSON
GoogleCloudMlV1beta1__ListJobsResponse where
parseJSON
= withObject "GoogleCloudMlV1beta1ListJobsResponse"
(\ o ->
GoogleCloudMlV1beta1__ListJobsResponse' <$>
(o .:? "nextPageToken") <*>
(o .:? "jobs" .!= mempty))
instance ToJSON
GoogleCloudMlV1beta1__ListJobsResponse where
toJSON GoogleCloudMlV1beta1__ListJobsResponse'{..}
= object
(catMaybes
[("nextPageToken" .=) <$> _gcmvljrNextPageToken,
("jobs" .=) <$> _gcmvljrJobs])
-- | Represents input parameters for a training job.
--
-- /See:/ 'googleCloudMlV1beta1__TrainingInput' smart constructor.
data GoogleCloudMlV1beta1__TrainingInput = GoogleCloudMlV1beta1__TrainingInput'
{ _gcmvtiMasterType :: !(Maybe Text)
, _gcmvtiParameterServerCount :: !(Maybe (Textual Int64))
, _gcmvtiArgs :: !(Maybe [Text])
, _gcmvtiWorkerCount :: !(Maybe (Textual Int64))
, _gcmvtiRuntimeVersion :: !(Maybe Text)
, _gcmvtiWorkerType :: !(Maybe Text)
, _gcmvtiPythonModule :: !(Maybe Text)
, _gcmvtiParameterServerType :: !(Maybe Text)
, _gcmvtiHyperparameters :: !(Maybe GoogleCloudMlV1beta1__HyperparameterSpec)
, _gcmvtiPackageURIs :: !(Maybe [Text])
, _gcmvtiScaleTier :: !(Maybe GoogleCloudMlV1beta1__TrainingInputScaleTier)
, _gcmvtiRegion :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__TrainingInput' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvtiMasterType'
--
-- * 'gcmvtiParameterServerCount'
--
-- * 'gcmvtiArgs'
--
-- * 'gcmvtiWorkerCount'
--
-- * 'gcmvtiRuntimeVersion'
--
-- * 'gcmvtiWorkerType'
--
-- * 'gcmvtiPythonModule'
--
-- * 'gcmvtiParameterServerType'
--
-- * 'gcmvtiHyperparameters'
--
-- * 'gcmvtiPackageURIs'
--
-- * 'gcmvtiScaleTier'
--
-- * 'gcmvtiRegion'
googleCloudMlV1beta1__TrainingInput
:: GoogleCloudMlV1beta1__TrainingInput
googleCloudMlV1beta1__TrainingInput =
GoogleCloudMlV1beta1__TrainingInput'
{ _gcmvtiMasterType = Nothing
, _gcmvtiParameterServerCount = Nothing
, _gcmvtiArgs = Nothing
, _gcmvtiWorkerCount = Nothing
, _gcmvtiRuntimeVersion = Nothing
, _gcmvtiWorkerType = Nothing
, _gcmvtiPythonModule = Nothing
, _gcmvtiParameterServerType = Nothing
, _gcmvtiHyperparameters = Nothing
, _gcmvtiPackageURIs = Nothing
, _gcmvtiScaleTier = Nothing
, _gcmvtiRegion = Nothing
}
-- | Optional. Specifies the type of virtual machine to use for your training
-- job\'s master worker. The following types are supported:
--
-- [standard]
-- A basic machine configuration suitable for training simple models
-- with small to moderate datasets.
-- [large_model]
-- A machine with a lot of memory, specially suited for parameter
-- servers when your model is large (having many hidden layers or
-- layers with very large numbers of nodes).
-- [complex_model_s]
-- A machine suitable for the master and workers of the cluster when
-- your model requires more computation than the standard machine can
-- handle satisfactorily.
-- [complex_model_m]
-- A machine with roughly twice the number of cores and roughly double
-- the memory of 'complex_model_s'.
-- [complex_model_l]
-- A machine with roughly twice the number of cores and roughly double
-- the memory of 'complex_model_m'.
--
-- You must set this value when \`scaleTier\` is set to \`CUSTOM\`.
gcmvtiMasterType :: Lens' GoogleCloudMlV1beta1__TrainingInput (Maybe Text)
gcmvtiMasterType
= lens _gcmvtiMasterType
(\ s a -> s{_gcmvtiMasterType = a})
-- | Optional. The number of parameter server replicas to use for the
-- training job. Each replica in the cluster will be of the type specified
-- in \`parameter_server_type\`. This value can only be used when
-- \`scale_tier\` is set to \`CUSTOM\`.If you set this value, you must also
-- set \`parameter_server_type\`.
gcmvtiParameterServerCount :: Lens' GoogleCloudMlV1beta1__TrainingInput (Maybe Int64)
gcmvtiParameterServerCount
= lens _gcmvtiParameterServerCount
(\ s a -> s{_gcmvtiParameterServerCount = a})
. mapping _Coerce
-- | Optional. Command line arguments to pass to the program.
gcmvtiArgs :: Lens' GoogleCloudMlV1beta1__TrainingInput [Text]
gcmvtiArgs
= lens _gcmvtiArgs (\ s a -> s{_gcmvtiArgs = a}) .
_Default
. _Coerce
-- | Optional. The number of worker replicas to use for the training job.
-- Each replica in the cluster will be of the type specified in
-- \`worker_type\`. This value can only be used when \`scale_tier\` is set
-- to \`CUSTOM\`. If you set this value, you must also set \`worker_type\`.
gcmvtiWorkerCount :: Lens' GoogleCloudMlV1beta1__TrainingInput (Maybe Int64)
gcmvtiWorkerCount
= lens _gcmvtiWorkerCount
(\ s a -> s{_gcmvtiWorkerCount = a})
. mapping _Coerce
-- | Optional. The Google Cloud ML runtime version to use for training. If
-- not set, Google Cloud ML will choose the latest stable version.
gcmvtiRuntimeVersion :: Lens' GoogleCloudMlV1beta1__TrainingInput (Maybe Text)
gcmvtiRuntimeVersion
= lens _gcmvtiRuntimeVersion
(\ s a -> s{_gcmvtiRuntimeVersion = a})
-- | Optional. Specifies the type of virtual machine to use for your training
-- job\'s worker nodes. The supported values are the same as those
-- described in the entry for \`masterType\`. This value must be present
-- when \`scaleTier\` is set to \`CUSTOM\` and \`workerCount\` is greater
-- than zero.
gcmvtiWorkerType :: Lens' GoogleCloudMlV1beta1__TrainingInput (Maybe Text)
gcmvtiWorkerType
= lens _gcmvtiWorkerType
(\ s a -> s{_gcmvtiWorkerType = a})
-- | Required. The Python module name to run after installing the packages.
gcmvtiPythonModule :: Lens' GoogleCloudMlV1beta1__TrainingInput (Maybe Text)
gcmvtiPythonModule
= lens _gcmvtiPythonModule
(\ s a -> s{_gcmvtiPythonModule = a})
-- | Optional. Specifies the type of virtual machine to use for your training
-- job\'s parameter server. The supported values are the same as those
-- described in the entry for \`master_type\`. This value must be present
-- when \`scaleTier\` is set to \`CUSTOM\` and \`parameter_server_count\`
-- is greater than zero.
gcmvtiParameterServerType :: Lens' GoogleCloudMlV1beta1__TrainingInput (Maybe Text)
gcmvtiParameterServerType
= lens _gcmvtiParameterServerType
(\ s a -> s{_gcmvtiParameterServerType = a})
-- | Optional. The set of Hyperparameters to tune.
gcmvtiHyperparameters :: Lens' GoogleCloudMlV1beta1__TrainingInput (Maybe GoogleCloudMlV1beta1__HyperparameterSpec)
gcmvtiHyperparameters
= lens _gcmvtiHyperparameters
(\ s a -> s{_gcmvtiHyperparameters = a})
-- | Required. The Google Cloud Storage location of the packages with the
-- training program and any additional dependencies.
gcmvtiPackageURIs :: Lens' GoogleCloudMlV1beta1__TrainingInput [Text]
gcmvtiPackageURIs
= lens _gcmvtiPackageURIs
(\ s a -> s{_gcmvtiPackageURIs = a})
. _Default
. _Coerce
-- | Required. Specifies the machine types, the number of replicas for
-- workers and parameter servers.
gcmvtiScaleTier :: Lens' GoogleCloudMlV1beta1__TrainingInput (Maybe GoogleCloudMlV1beta1__TrainingInputScaleTier)
gcmvtiScaleTier
= lens _gcmvtiScaleTier
(\ s a -> s{_gcmvtiScaleTier = a})
-- | Required. The Google Compute Engine region to run the training job in.
gcmvtiRegion :: Lens' GoogleCloudMlV1beta1__TrainingInput (Maybe Text)
gcmvtiRegion
= lens _gcmvtiRegion (\ s a -> s{_gcmvtiRegion = a})
instance FromJSON GoogleCloudMlV1beta1__TrainingInput
where
parseJSON
= withObject "GoogleCloudMlV1beta1TrainingInput"
(\ o ->
GoogleCloudMlV1beta1__TrainingInput' <$>
(o .:? "masterType") <*>
(o .:? "parameterServerCount")
<*> (o .:? "args" .!= mempty)
<*> (o .:? "workerCount")
<*> (o .:? "runtimeVersion")
<*> (o .:? "workerType")
<*> (o .:? "pythonModule")
<*> (o .:? "parameterServerType")
<*> (o .:? "hyperparameters")
<*> (o .:? "packageUris" .!= mempty)
<*> (o .:? "scaleTier")
<*> (o .:? "region"))
instance ToJSON GoogleCloudMlV1beta1__TrainingInput
where
toJSON GoogleCloudMlV1beta1__TrainingInput'{..}
= object
(catMaybes
[("masterType" .=) <$> _gcmvtiMasterType,
("parameterServerCount" .=) <$>
_gcmvtiParameterServerCount,
("args" .=) <$> _gcmvtiArgs,
("workerCount" .=) <$> _gcmvtiWorkerCount,
("runtimeVersion" .=) <$> _gcmvtiRuntimeVersion,
("workerType" .=) <$> _gcmvtiWorkerType,
("pythonModule" .=) <$> _gcmvtiPythonModule,
("parameterServerType" .=) <$>
_gcmvtiParameterServerType,
("hyperparameters" .=) <$> _gcmvtiHyperparameters,
("packageUris" .=) <$> _gcmvtiPackageURIs,
("scaleTier" .=) <$> _gcmvtiScaleTier,
("region" .=) <$> _gcmvtiRegion])
--
-- /See:/ 'googleRpc__StatusDetailsItem' smart constructor.
newtype GoogleRpc__StatusDetailsItem = GoogleRpc__StatusDetailsItem'
{ _grsdiAddtional :: HashMap Text JSONValue
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleRpc__StatusDetailsItem' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'grsdiAddtional'
googleRpc__StatusDetailsItem
:: HashMap Text JSONValue -- ^ 'grsdiAddtional'
-> GoogleRpc__StatusDetailsItem
googleRpc__StatusDetailsItem pGrsdiAddtional_ =
GoogleRpc__StatusDetailsItem'
{ _grsdiAddtional = _Coerce # pGrsdiAddtional_
}
-- | Properties of the object. Contains field \'type with type URL.
grsdiAddtional :: Lens' GoogleRpc__StatusDetailsItem (HashMap Text JSONValue)
grsdiAddtional
= lens _grsdiAddtional
(\ s a -> s{_grsdiAddtional = a})
. _Coerce
instance FromJSON GoogleRpc__StatusDetailsItem where
parseJSON
= withObject "GoogleRpcStatusDetailsItem"
(\ o ->
GoogleRpc__StatusDetailsItem' <$>
(parseJSONObject o))
instance ToJSON GoogleRpc__StatusDetailsItem where
toJSON = toJSON . _grsdiAddtional
-- | A generic empty message that you can re-use to avoid defining duplicated
-- empty messages in your APIs. A typical example is to use it as the
-- request or the response type of an API method. For instance: service Foo
-- { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The
-- JSON representation for \`Empty\` is empty JSON object \`{}\`.
--
-- /See:/ 'googleProtobuf__Empty' smart constructor.
data GoogleProtobuf__Empty =
GoogleProtobuf__Empty'
deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleProtobuf__Empty' with the minimum fields required to make a request.
--
googleProtobuf__Empty
:: GoogleProtobuf__Empty
googleProtobuf__Empty = GoogleProtobuf__Empty'
instance FromJSON GoogleProtobuf__Empty where
parseJSON
= withObject "GoogleProtobufEmpty"
(\ o -> pure GoogleProtobuf__Empty')
instance ToJSON GoogleProtobuf__Empty where
toJSON = const emptyObject
-- | An observed value of a metric.
--
-- /See:/ 'googleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric' smart constructor.
data GoogleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric = GoogleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric'
{ _gcmvhohmTrainingStep :: !(Maybe (Textual Int64))
, _gcmvhohmObjectiveValue :: !(Maybe (Textual Double))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvhohmTrainingStep'
--
-- * 'gcmvhohmObjectiveValue'
googleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric
:: GoogleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric
googleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric =
GoogleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric'
{ _gcmvhohmTrainingStep = Nothing
, _gcmvhohmObjectiveValue = Nothing
}
-- | The global training step for this metric.
gcmvhohmTrainingStep :: Lens' GoogleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric (Maybe Int64)
gcmvhohmTrainingStep
= lens _gcmvhohmTrainingStep
(\ s a -> s{_gcmvhohmTrainingStep = a})
. mapping _Coerce
-- | The objective value at this training step.
gcmvhohmObjectiveValue :: Lens' GoogleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric (Maybe Double)
gcmvhohmObjectiveValue
= lens _gcmvhohmObjectiveValue
(\ s a -> s{_gcmvhohmObjectiveValue = a})
. mapping _Coerce
instance FromJSON
GoogleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric
where
parseJSON
= withObject
"GoogleCloudMlV1beta1HyperparameterOutputHyperparameterMetric"
(\ o ->
GoogleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric'
<$>
(o .:? "trainingStep") <*> (o .:? "objectiveValue"))
instance ToJSON
GoogleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric
where
toJSON
GoogleCloudMlV1beta1_HyperparameterOutput_HyperparameterMetric'{..}
= object
(catMaybes
[("trainingStep" .=) <$> _gcmvhohmTrainingStep,
("objectiveValue" .=) <$> _gcmvhohmObjectiveValue])
-- | Message that represents an arbitrary HTTP body. It should only be used
-- for payload formats that can\'t be represented as JSON, such as raw
-- binary or an HTML page. This message can be used both in streaming and
-- non-streaming API methods in the request as well as the response. It can
-- be used as a top-level request field, which is convenient if one wants
-- to extract parameters from either the URL or HTTP template into the
-- request fields and also want access to the raw HTTP body. Example:
-- message GetResourceRequest { \/\/ A unique request id. string request_id
-- = 1; \/\/ The raw HTTP body is bound to this field. google.api.HttpBody
-- http_body = 2; } service ResourceService { rpc
-- GetResource(GetResourceRequest) returns (google.api.HttpBody); rpc
-- UpdateResource(google.api.HttpBody) returns (google.protobuf.Empty); }
-- Example with streaming methods: service CaldavService { rpc
-- GetCalendar(stream google.api.HttpBody) returns (stream
-- google.api.HttpBody); rpc UpdateCalendar(stream google.api.HttpBody)
-- returns (stream google.api.HttpBody); } Use of this type only changes
-- how the request and response bodies are handled, all other features will
-- continue to work unchanged.
--
-- /See:/ 'googleAPI__HTTPBody' smart constructor.
data GoogleAPI__HTTPBody = GoogleAPI__HTTPBody'
{ _gahttpbData :: !(Maybe Bytes)
, _gahttpbContentType :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleAPI__HTTPBody' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gahttpbData'
--
-- * 'gahttpbContentType'
googleAPI__HTTPBody
:: GoogleAPI__HTTPBody
googleAPI__HTTPBody =
GoogleAPI__HTTPBody'
{ _gahttpbData = Nothing
, _gahttpbContentType = Nothing
}
-- | HTTP body binary data.
gahttpbData :: Lens' GoogleAPI__HTTPBody (Maybe ByteString)
gahttpbData
= lens _gahttpbData (\ s a -> s{_gahttpbData = a}) .
mapping _Bytes
-- | The HTTP Content-Type string representing the content type of the body.
gahttpbContentType :: Lens' GoogleAPI__HTTPBody (Maybe Text)
gahttpbContentType
= lens _gahttpbContentType
(\ s a -> s{_gahttpbContentType = a})
instance FromJSON GoogleAPI__HTTPBody where
parseJSON
= withObject "GoogleAPIHTTPBody"
(\ o ->
GoogleAPI__HTTPBody' <$>
(o .:? "data") <*> (o .:? "contentType"))
instance ToJSON GoogleAPI__HTTPBody where
toJSON GoogleAPI__HTTPBody'{..}
= object
(catMaybes
[("data" .=) <$> _gahttpbData,
("contentType" .=) <$> _gahttpbContentType])
-- | Request for predictions to be issued against a trained model. The body
-- of the request is a single JSON object with a single top-level field:
--
-- [instances]
-- A JSON array containing values representing the instances to use for
-- prediction.
--
-- The structure of each element of the instances list is determined by
-- your model\'s input definition. Instances can include named inputs or
-- can contain only unlabeled values. Most data does not include named
-- inputs. Some instances will be simple JSON values (boolean, number, or
-- string). However, instances are often lists of simple values, or complex
-- nested lists. Here are some examples of request bodies: CSV data with
-- each row encoded as a string value:
--
-- > {"instances": ["1.0,true,\\"x\\"", "-2.0,false,\\"y\\""]}
--
-- Plain text:
--
-- > {"instances": ["the quick brown fox", "la bruja le dio"]}
--
-- Sentences encoded as lists of words (vectors of strings):
--
-- > {"instances": [["the","quick","brown"], ["la","bruja","le"]]}
--
-- Floating point scalar values:
--
-- > {"instances": [0.0, 1.1, 2.2]}
--
-- Vectors of integers:
--
-- > {"instances": [[0, 1, 2], [3, 4, 5],...]}
--
-- Tensors (in this case, two-dimensional tensors):
--
-- > {"instances": [[[0, 1, 2], [3, 4, 5]], ...]}
--
-- Images represented as a three-dimensional list. In this encoding scheme
-- the first two dimensions represent the rows and columns of the image,
-- and the third contains the R, G, and B values for each pixel.
--
-- > {"instances": [[[[138, 30, 66], [130, 20, 56], ...]]]]}
--
-- Data must be encoded as UTF-8. If your data uses another character
-- encoding, you must base64 encode the data and mark it as binary. To mark
-- a JSON string as binary, replace it with an object with a single
-- attribute named \`b\`:
--
-- > {"b": "..."}
--
-- For example: Two Serialized tf.Examples (fake data, for illustrative
-- purposes only):
--
-- > {"instances": [{"b64": "X5ad6u"}, {"b64": "IA9j4nx"}]}
--
-- Two JPEG image byte strings (fake data, for illustrative purposes only):
--
-- > {"instances": [{"b64": "ASa8asdf"}, {"b64": "JLK7ljk3"}]}
--
-- If your data includes named references, format each instance as a JSON
-- object with the named references as the keys: JSON input data to be
-- preprocessed:
--
-- > {"instances": [{"a": 1.0, "b": true, "c": "x"},
-- > {"a": -2.0, "b": false, "c": "y"}]}
--
-- Some models have an underlying TensorFlow graph that accepts multiple
-- input tensors. In this case, you should use the names of JSON
-- name\/value pairs to identify the input tensors, as shown in the
-- following exmaples: For a graph with input tensor aliases \"tag\"
-- (string) and \"image\" (base64-encoded string):
--
-- > {"instances": [{"tag": "beach", "image": {"b64": "ASa8asdf"}},
-- > {"tag": "car", "image": {"b64": "JLK7ljk3"}}]}
--
-- For a graph with input tensor aliases \"tag\" (string) and \"image\"
-- (3-dimensional array of 8-bit ints):
--
-- > {"instances": [{"tag": "beach", "image": [[[263, 1, 10], [262, 2, 11], ...]]},
-- > {"tag": "car", "image": [[[10, 11, 24], [23, 10, 15], ...]]}]}
--
-- If the call is successful, the response body will contain one prediction
-- entry per instance in the request body. If prediction fails for any
-- instance, the response body will contain no predictions and will contian
-- a single error entry instead.
--
-- /See:/ 'googleCloudMlV1beta1__PredictRequest' smart constructor.
newtype GoogleCloudMlV1beta1__PredictRequest = GoogleCloudMlV1beta1__PredictRequest'
{ _gcmvprHTTPBody :: Maybe GoogleAPI__HTTPBody
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__PredictRequest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvprHTTPBody'
googleCloudMlV1beta1__PredictRequest
:: GoogleCloudMlV1beta1__PredictRequest
googleCloudMlV1beta1__PredictRequest =
GoogleCloudMlV1beta1__PredictRequest'
{ _gcmvprHTTPBody = Nothing
}
-- | Required. The prediction request body.
gcmvprHTTPBody :: Lens' GoogleCloudMlV1beta1__PredictRequest (Maybe GoogleAPI__HTTPBody)
gcmvprHTTPBody
= lens _gcmvprHTTPBody
(\ s a -> s{_gcmvprHTTPBody = a})
instance FromJSON
GoogleCloudMlV1beta1__PredictRequest where
parseJSON
= withObject "GoogleCloudMlV1beta1PredictRequest"
(\ o ->
GoogleCloudMlV1beta1__PredictRequest' <$>
(o .:? "httpBody"))
instance ToJSON GoogleCloudMlV1beta1__PredictRequest
where
toJSON GoogleCloudMlV1beta1__PredictRequest'{..}
= object
(catMaybes [("httpBody" .=) <$> _gcmvprHTTPBody])
-- | Represents a single hyperparameter to optimize.
--
-- /See:/ 'googleCloudMlV1beta1__ParameterSpec' smart constructor.
data GoogleCloudMlV1beta1__ParameterSpec = GoogleCloudMlV1beta1__ParameterSpec'
{ _gcmvpsMaxValue :: !(Maybe (Textual Double))
, _gcmvpsScaleType :: !(Maybe GoogleCloudMlV1beta1__ParameterSpecScaleType)
, _gcmvpsType :: !(Maybe GoogleCloudMlV1beta1__ParameterSpecType)
, _gcmvpsDiscreteValues :: !(Maybe [Textual Double])
, _gcmvpsParameterName :: !(Maybe Text)
, _gcmvpsCategoricalValues :: !(Maybe [Text])
, _gcmvpsMinValue :: !(Maybe (Textual Double))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'GoogleCloudMlV1beta1__ParameterSpec' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gcmvpsMaxValue'
--
-- * 'gcmvpsScaleType'
--
-- * 'gcmvpsType'
--
-- * 'gcmvpsDiscreteValues'
--
-- * 'gcmvpsParameterName'
--
-- * 'gcmvpsCategoricalValues'
--
-- * 'gcmvpsMinValue'
googleCloudMlV1beta1__ParameterSpec
:: GoogleCloudMlV1beta1__ParameterSpec
googleCloudMlV1beta1__ParameterSpec =
GoogleCloudMlV1beta1__ParameterSpec'
{ _gcmvpsMaxValue = Nothing
, _gcmvpsScaleType = Nothing
, _gcmvpsType = Nothing
, _gcmvpsDiscreteValues = Nothing
, _gcmvpsParameterName = Nothing
, _gcmvpsCategoricalValues = Nothing
, _gcmvpsMinValue = Nothing
}
-- | Required if typeis \`DOUBLE\` or \`INTEGER\`. This field should be unset
-- if type is \`CATEGORICAL\`. This value should be integers if type is
-- \`INTEGER\`.
gcmvpsMaxValue :: Lens' GoogleCloudMlV1beta1__ParameterSpec (Maybe Double)
gcmvpsMaxValue
= lens _gcmvpsMaxValue
(\ s a -> s{_gcmvpsMaxValue = a})
. mapping _Coerce
-- | Optional. How the parameter should be scaled to the hypercube. Leave
-- unset for categorical parameters. Some kind of scaling is strongly
-- recommended for real or integral parameters (e.g.,
-- \`UNIT_LINEAR_SCALE\`).
gcmvpsScaleType :: Lens' GoogleCloudMlV1beta1__ParameterSpec (Maybe GoogleCloudMlV1beta1__ParameterSpecScaleType)
gcmvpsScaleType
= lens _gcmvpsScaleType
(\ s a -> s{_gcmvpsScaleType = a})
-- | Required. The type of the parameter.
gcmvpsType :: Lens' GoogleCloudMlV1beta1__ParameterSpec (Maybe GoogleCloudMlV1beta1__ParameterSpecType)
gcmvpsType
= lens _gcmvpsType (\ s a -> s{_gcmvpsType = a})
-- | Required if type is \`DISCRETE\`. A list of feasible points. The list
-- should be in strictly increasing order. For instance, this parameter
-- might have possible settings of 1.5, 2.5, and 4.0. This list should not
-- contain more than 1,000 values.
gcmvpsDiscreteValues :: Lens' GoogleCloudMlV1beta1__ParameterSpec [Double]
gcmvpsDiscreteValues
= lens _gcmvpsDiscreteValues
(\ s a -> s{_gcmvpsDiscreteValues = a})
. _Default
. _Coerce
-- | Required. The parameter name must be unique amongst all ParameterConfigs
-- in a HyperparameterSpec message. E.g., \"learning_rate\".
gcmvpsParameterName :: Lens' GoogleCloudMlV1beta1__ParameterSpec (Maybe Text)
gcmvpsParameterName
= lens _gcmvpsParameterName
(\ s a -> s{_gcmvpsParameterName = a})
-- | Required if type is \`CATEGORICAL\`. The list of possible categories.
gcmvpsCategoricalValues :: Lens' GoogleCloudMlV1beta1__ParameterSpec [Text]
gcmvpsCategoricalValues
= lens _gcmvpsCategoricalValues
(\ s a -> s{_gcmvpsCategoricalValues = a})
. _Default
. _Coerce
-- | Required if type is \`DOUBLE\` or \`INTEGER\`. This field should be
-- unset if type is \`CATEGORICAL\`. This value should be integers if type
-- is INTEGER.
gcmvpsMinValue :: Lens' GoogleCloudMlV1beta1__ParameterSpec (Maybe Double)
gcmvpsMinValue
= lens _gcmvpsMinValue
(\ s a -> s{_gcmvpsMinValue = a})
. mapping _Coerce
instance FromJSON GoogleCloudMlV1beta1__ParameterSpec
where
parseJSON
= withObject "GoogleCloudMlV1beta1ParameterSpec"
(\ o ->
GoogleCloudMlV1beta1__ParameterSpec' <$>
(o .:? "maxValue") <*> (o .:? "scaleType") <*>
(o .:? "type")
<*> (o .:? "discreteValues" .!= mempty)
<*> (o .:? "parameterName")
<*> (o .:? "categoricalValues" .!= mempty)
<*> (o .:? "minValue"))
instance ToJSON GoogleCloudMlV1beta1__ParameterSpec
where
toJSON GoogleCloudMlV1beta1__ParameterSpec'{..}
= object
(catMaybes
[("maxValue" .=) <$> _gcmvpsMaxValue,
("scaleType" .=) <$> _gcmvpsScaleType,
("type" .=) <$> _gcmvpsType,
("discreteValues" .=) <$> _gcmvpsDiscreteValues,
("parameterName" .=) <$> _gcmvpsParameterName,
("categoricalValues" .=) <$>
_gcmvpsCategoricalValues,
("minValue" .=) <$> _gcmvpsMinValue])
| rueshyna/gogol | gogol-ml/gen/Network/Google/MachineLearning/Types/Product.hs | mpl-2.0 | 89,847 | 0 | 22 | 19,063 | 12,368 | 7,175 | 5,193 | 1,465 | 1 |
-- symbolic calculator
-- from Bartosz Milewski's Basics of Haskell tutorial
-- https://www.schoolofhaskell.com/school/starting-with-haskell/basics-of-haskell
import Control.Monad (mapM_)
import Control.Monad.State
import Data.Char
import Data.List (dropWhileEnd)
import qualified Data.Map as Map
import System.IO (hFlush, stdout)
type SymTab = Map.Map String Double
data Token = TokOp Operator
| TokIdent String
| TokNum Double
| TokAssign
| TokLParen
| TokRParen
| TokEnd
deriving (Show, Eq)
data Expression
data Operator = Plus | Minus | Times | Div deriving (Eq)
instance Show Operator where
show Plus = " + "
show Minus = " - "
show Times = " * "
show Div = " / "
data Tree = SumNode Operator Tree Tree
| ProdNode Operator Tree Tree
| AssignNode String Tree
| UnaryNode Operator Tree
| NumNode Double
| VarNode String
instance Show Tree where
show (VarNode str) = str
show (NumNode x) = show x
show (UnaryNode op t) = "(" ++ (show op) ++ show t ++ ")"
show (AssignNode str t) = "(" ++ str ++ " = " ++ show t ++ ")"
show (ProdNode op t s) = "(" ++ show t ++ (show op) ++ show s ++ ")"
show (SumNode op t s) = "(" ++ show t ++ (show op) ++ show s ++ ")"
operator :: Char -> Operator
operator c | c == '+' = Plus
| c == '-' = Minus
| c == '*' = Times
| c == '/' = Div
trim :: String -> String
trim = dropWhileEnd isSpace . dropWhile isSpace
prompt :: String -> IO String
prompt text = do
putStr text
hFlush stdout
getLine
format :: (String, Double) -> String
format (k, v) = k ++ " = " ++ (show v)
-- Tokenizer ---------------------------------------------------------
tokenize :: String -> [Token]
tokenize [] = []
tokenize (c : cs)
| elem c "+-*/" = TokOp (operator c) : tokenize cs
| isDigit c = number c cs
| isAlpha c = identifier c cs
| c == '=' = TokAssign : tokenize cs
| c == '(' = TokLParen : tokenize cs
| c == ')' = TokRParen : tokenize cs
| isSpace c = tokenize cs
| otherwise = error $ "Cannot tokenize " ++ [c]
identifier :: Char -> String -> [Token]
identifier c cs = let (str, cs') = span isAlphaNum cs in
TokIdent (c:str) : tokenize cs'
number :: Char -> String -> [Token]
number c cs =
let (digs, cs') = span isDigit cs in
TokNum (read (c : digs)) : tokenize cs'
-- Parser ------------------------------------------------------------
-- Grammar for parsing expressions
-- Expression <- Term [+-] Expression
-- | Identifier '=' Expression
-- | Term
-- Term <- Factor [*/] Term
-- | Factor
-- Factor <- Number
-- | Identifier
-- | [+-] Factor
-- | '(' Expression ')'
parse :: [Token] -> Tree
parse toks = let (tree, toks') = expression toks
in
if null toks'
then tree
else error $ "Leftover tokens: " ++ show toks'
expression :: [Token] -> (Tree, [Token])
expression toks =
let (termTree, toks') = term toks
in
case lookAhead toks' of
-- Term [+-] Expression
(TokOp op) | elem op [Plus, Minus] ->
let (exTree, toks'') = expression (accept toks')
in (SumNode op termTree exTree, toks'')
-- Identifier '=' Expression
TokAssign ->
case termTree of
VarNode str ->
let (exTree, toks'') = expression (accept toks')
in (AssignNode str exTree, toks'')
_ -> error "Only variables can be assigned to"
-- Term
_ -> (termTree, toks')
term :: [Token] -> (Tree, [Token])
term toks =
let (facTree, toks') = factor toks
in
case lookAhead toks' of
(TokOp op) | elem op [Times, Div] ->
let (termTree, toks'') = term (accept toks')
in (ProdNode op facTree termTree, toks'')
_ -> (facTree, toks')
factor :: [Token] -> (Tree, [Token])
factor toks =
case lookAhead toks of
(TokNum x) -> (NumNode x, accept toks)
(TokIdent str) -> (VarNode str, accept toks)
(TokOp op) | elem op [Plus, Minus] ->
let (facTree, toks') = factor (accept toks)
in (UnaryNode op facTree, toks')
TokLParen ->
let (expTree, toks') = expression (accept toks)
in
if lookAhead toks' /= TokRParen
then error "Missing right parenthesis"
else (expTree, accept toks')
_ -> error $ "Parse error on token: " ++ show toks
lookAhead :: [Token] -> Token
lookAhead [] = TokEnd
lookAhead (c:cs) = c
accept :: [Token] -> [Token]
accept [] = error "Nothing to accept"
accept (t:ts) = ts
-- Evaluator ---------------------------------------------------------
type Evaluator a = State SymTab a
evaluate :: Tree -> Evaluator Double
evaluate (SumNode op larg rarg) = do
l <- evaluate larg
r <- evaluate rarg
case op of
Plus -> return (l + r)
Minus -> return (l - r)
evaluate (ProdNode op larg rarg) = do
l <- evaluate larg
r <- evaluate rarg
case op of
Times -> return (l * r)
Div -> return (l / r)
evaluate (UnaryNode op arg) = do
x <- evaluate arg
case op of
Plus -> return x
Minus -> return (-x)
evaluate (NumNode x) = return x
evaluate (AssignNode str tree) = do
val <- evaluate tree
assign str val
evaluate (VarNode str) = lookupSymbol str
lookupSymbol :: String -> Evaluator Double
lookupSymbol str = do
symbols <- get
case Map.lookup str symbols of
Just val -> return val
Nothing -> error $ "Undefined variable " ++ str
assign :: String -> Double -> Evaluator Double
assign str val = do
symbols <- get
put $ Map.insert str val symbols
return val
main :: IO ()
main = do
putStrLn "Calc - Enter an expression to be evaluated:"
loop (Map.fromList [("pi", pi), ("e", exp 1)])
loop symbols = do
line <- prompt "> "
line <- return (trim line)
case line of
"" -> do
loop symbols
":s" -> do
mapM_ (putStrLn . format) (Map.toList symbols)
loop symbols
":q" -> do
putStrLn "Bye!"
otherwise -> do
let tree = (parse . tokenize) line
(val, symbols') = runState (evaluate tree) symbols
in do
print val
loop symbols'
| cbare/Etudes | haskell/calc.hs | apache-2.0 | 6,611 | 0 | 19 | 2,150 | 2,301 | 1,149 | 1,152 | -1 | -1 |
module Lichen.Config where
import Control.Monad.Reader
import Control.Monad.Except
import Lichen.Error
type Configured c = ReaderT c Erring
runConfigured :: Configured c () -> c -> IO ()
runConfigured m c = do
result <- runExceptT $ runReaderT m c
case result of Left e -> printError e; Right _ -> return ()
| Submitty/AnalysisTools | src/Lichen/Config.hs | bsd-3-clause | 328 | 0 | 11 | 71 | 120 | 61 | 59 | 9 | 2 |
{-# LANGUAGE FlexibleContexts #-}
module QModels where
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit
import PCA.QModels
import PCA.Distribution
import Numeric.LinearAlgebra.Data
(fromRows, toRows, size, (!), konst, Matrix, Vector, fromList)
import qualified Numeric.LinearAlgebra.HMatrix as H
tests :: TestTree
tests = testGroup "QModels module tests" [unitTests]
unitTests :: TestTree
unitTests =
testCaseSteps "Tests for Q model calculation" $
\step ->
do step "Preparing..."
let n = 10
d = 3
trainT <- H.rand n d
let tau = initQtau
mu = initQmu d
alpha = initQalpha d
w = initQW d
step "Test calculation of Q"
let sigmaX = calcSigmaX d tau w
assertEqual "Size of Sigma_X must dxd" (size sigmaX) (d, d)
let mX = calcMX trainT tau sigmaX w mu
assertEqual
"Fool impl and matrix impl of calcMX must give the same result"
mX
(foolCalcMX trainT tau sigmaX w mu)
assertEqual "Size of mean X is Nxd" (size mX) (n, d)
let x = MatrixMNormal mX sigmaX
let sigmaMu = calcSigmaMu n tau mu
assertEqual "Size of Sigma_Mu is dxd" (size sigmaMu) (d, d)
let mMu = calcMMu trainT tau sigmaMu w x
assertEqual "Size of m_Mu is 1xd" (size mMu) d
assertEqual
"Fool impl and matrix impl of calcMMu must give the same result"
mMu
(foolCalcMMu trainT tau sigmaMu w x)
let sigmaW = calcSigmaW tau alpha x
assertEqual "Size of sigma_W is dxd" (size sigmaW) (d, d)
assertBool
"Fool impl and matrix impl of calcSigmaW must give the same result"
(H.maxElement (foolCalcSigmaW tau alpha x - sigmaW) < 0.001)
let mW = calcMW tau sigmaW x trainT mu
assertEqual "Size of m_W is dxd" (size mW) (d, d)
assertBool
"Fool impl and matrix impl of calcMW must give the same result"
(H.maxElement (foolCalcMW tau sigmaW x trainT mu - mW) < 0.001)
let bAlpha = calcBalpha (konst 1.0e-3 d) w
assertEqual "Size of b_alpha is d" (size bAlpha) d
assertEqual
"Fool impl and matrix impl of calcBalpha must give the same result"
(foolCalcBalpha (konst 1.0e-3 d) w)
bAlpha
let bTau = calcBtau 1.0e-3 trainT mu w x
assertBool
"Fool impl and matrix impl of calcBtau must give the same result"
(abs (foolCalcBtau 1.0e-3 trainT mu w x) - bTau < 0.001)
foolCalcMX trainT tau sigmaX w mu =
fromRows
(map
(\r ->
coef H.#> (r - mean mu))
(toRows trainT))
where
coef = mean tau `H.scale` sigmaX H.<> H.tr (mean w)
foolCalcMMu trainT tau sigmaMu w x =
(mean tau `H.scale` sigmaMu) H.#>
foldr
(\r resV ->
resV + ((trainT ! r) - mW H.#> (mX ! r)))
(konst 0 d)
[0 .. (n - 1)]
where
mW = mean w
mX = mean x
(n,d) = size trainT
foolCalcSigmaW tau alpha x =
H.inv
(H.diag (mean alpha) +
mean tau `H.scale`
(foldr
(\r resM ->
resM + variance x + r `H.outer` r)
(H.diagl [0 | _ <- [1 .. d]])
(toRows mX)))
where
mX = mean x
(_,d) = size mX
foolCalcMW
:: Distr Double Double
-> Matrix Double
-> Distr (Matrix Double) (Matrix Double)
-> Matrix Double
-> Distr (Vector Double) (Matrix Double)
-> Matrix Double
foolCalcMW tau sigmaW x trainT mu =
fromRows
(map
(\k ->
(mean tau `H.scale` sigmaW) H.#>
(foldr
(\i resV ->
resV +
((trainT ! i ! k) - (mMu ! k)) `H.scale` (mX ! i))
(konst 0 d)
[0 .. (n - 1)]))
[0 .. (d - 1)])
where
(n,d) = size trainT
mMu = mean mu
mX = mean x
foolCalcBalpha :: Vector Double
-> Distr (Matrix Double) (Matrix Double)
-> Vector Double
foolCalcBalpha b w =
fromList
(map
(\i ->
b ! i + (mW ! i) `H.dot` (mW ! i) / 2)
[0 .. (d - 1)])
where
mW = mean w
(d,_) = size mW
foolCalcBtau
:: Double
-> Matrix Double
-> Distr (Vector Double) (Matrix Double)
-> Distr (Matrix Double) (Matrix Double)
-> Distr (Matrix Double) (Matrix Double)
-> Double
foolCalcBtau b trainT mu w x =
b +
0.5 *
sum
(map
(\i ->
normSq (trainT ! i) + normSq mMu +
2 * (mMu `H.dot` (mW H.#> (mX ! i))) -
2 * (trainT ! i `H.dot` (mW H.#> (mX ! i))) -
2 * (trainT ! i `H.dot` mMu))
[0 .. (n - 1)])
where
normSq v = v `H.dot` v
mMu = mean mu
mX = mean x
mW = mean w
(n,_) = size trainT
| DbIHbKA/vbpca | test/QModels.hs | bsd-3-clause | 5,225 | 0 | 21 | 2,123 | 1,751 | 898 | 853 | 145 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE JavaScriptFFI, GHCForeignImportPrim #-}
-----------------------------------------------------------------------------
-- |
-- Module : Main
-- Copyright : Copyright (C) 2015 Artem M. Chirkin <[email protected]>
-- License : BSD3
--
-- Maintainer : Artem M. Chirkin <[email protected]>
-- Stability : Experimental
-- Portability :
--
--
-----------------------------------------------------------------------------
module Main
( main
) where
import Foreign.Marshal
import Foreign.Storable
import Foreign.Ptr
--import JsHs.Types
--import GHCJS.Prim
--import GHCJS.Marshal
--import JavaScript.Array
--import Unsafe.Coerce
--import Data.Geometry.Prim.JSNum
#if defined(ghcjs_HOST_OS)
import JsHs.TypedArray
import JsHs.TypedArray.IO
import qualified Control.Monad.ST as ST
import qualified JsHs.TypedArray.ST as ST
import Data.Int
import qualified GHC.Exts as Exts
import Unsafe.Coerce
--import JsHs.JSString
import JsHs.Types
#else
#endif
import Data.Geometry
main :: IO ()
main = do
print a
print b
print $ a .*. b
print $ dot a b
putStrLn "hello world!"
-- printRef $ coerce m
print m
print $ trace m
print $ det m
print $ transpose m
print $ toDiag b
print $ fromDiag m
print l
print $ a * 5
print $ abs c
print $ negate a
print $ c * a
print $ inverse m
print $ inverse m `prod` m
print $ m `prod` inverse m
print $ inverse l
print (inverse eye :: Matrix 3 Float)
print $ m `prod` diag 3
print $ a > b
print $ compare a b
print $ compare c c
print $ compare c d
print $ 2 >= a
print $ 2 > a
print $ a < 3
print $ 2 > a / 1.4
marr <- mallocArray 5 :: IO (Ptr (Matrix 4 Float))
poke marr 3
pokeElemOff marr 1 m
pokeElemOff marr 4 eye
pokeElemOff marr 2 pi
pokeByteOff marr (3*16*4) l
peek marr >>= print
peekElemOff marr 1 >>= print
peekElemOff marr 2 >>= print
peekElemOff marr 3 >>= print
peekElemOff marr 4 >>= print
#if defined(ghcjs_HOST_OS)
let barr = typedArray 5 :: TypedArray (Vector 6 Float)
arr = fromList [a,b,c,d]
sarr = fillNewTypedArray 3 d
e = vector4 1 (-2.0124) 9.72 0.23
iv1 = vector3 113 63 (-135) :: Vector3 Int
iv2 = vector3 7 (-36) 15 :: Vector3 Int
printAny barr
printAnyVal sarr
printAnyVal arr
print (arr ! 2 == c)
print (elemSize barr)
print (elemSize arr)
print (elemSize sarr)
printAnyVal (fromArray arr :: TypedArray (Vector 2 Double))
printAnyVal (fromArray (fromList [1,2,3 :: Float]) :: TypedArray Double)
print $ (arrayView $ arrayBuffer barr) ! 2 - d
print $ (arrayView . arrayBuffer $ fillNewTypedArray 8 (4::Float)) ! 1 * d
barr2 <- thaw barr
setIndex 2 1 barr2
setList 1 [0.3,23,12.3262] barr2
let barr3 = ST.runST $ do
barr35 <- ST.unsafeThaw barr
ST.setIndex 0 2.34 barr35
ST.freeze barr35
print barr3
print barr
print barr2
putStrLn "Decomposing matrices"
print m
print $ colsOfM4 m
print $ rowsOfM4 m
putStrLn "Enum tests"
print ([vector2 0 3, 3.1 .. 9] :: [Vector 2 Double])
putStrLn "Resizing"
print (resizeMatrix m :: Matrix 2 Float)
print (resizeMatrix m :: Matrix 7 Float)
print (resizeMatrix m :: Matrix 4 Float)
print (resizeVector e :: Vector 2 Double)
print (resizeVector e :: Vector 7 Double)
print (resizeVector e :: Vector 5 Double)
putStrLn "Real tests"
print (realToFrac e :: Vector 3 Float)
print (realToFrac e :: Vector 4 Float)
print (realToFrac e :: Vector 5 Double)
print (realToFrac d :: Vector 6 Double)
print (realToFrac e :: Vector 2 Double)
putStrLn "Integral tests"
print (fromIntegral iv1 * e)
print (fromIntegral iv2 * vector2 1 (2::Double))
print (iv1 `div` iv2)
putStrLn "lcm+gcd"
print (lcm iv1 iv2)
print (gcd iv1 iv2)
putStrLn "integral power"
print (iv2 ^ vector3 0 1 (2::Int))
putStrLn "RealFrac tests"
print e
print (round e :: Vector 2 Int)
print (floor e :: Vector 3 Int16)
print (truncate e :: Vector 6 Int)
print (ceiling e :: Vector 5 Int)
print (4.3 :: QFloat)
print (7.6 :: QDouble)
#else
#endif
where a = vector4 2 0 0 2 :: Vector 4 Float
b = vector4 0 1 0 0 :: Vector 4 Float
c = vector4 0 0 4 0 :: Vector 4 Float
d = vector4 0 2 0 1 :: Vector 4 Float
m = matrix4x4 a b c d
l = diag 6 :: Matrix 3 Float
#if defined(ghcjs_HOST_OS)
-- | Printing anything without conversion of types
printAny :: a -> IO ()
printAny = printAny' . unsafeCoerce
foreign import javascript safe "console.log($1)"
printAny' :: Exts.Any -> IO ()
-- | Printing anything without conversion of types, attempting to get value from the heap object
printAnyVal :: a -> IO ()
printAnyVal = printVal' . unsafeCoerce
foreign import javascript safe "console.log($1)"
printVal' :: JSVal -> IO ()
#else
#endif
| achirkin/fastvec | src/Main.hs | bsd-3-clause | 5,125 | 9 | 16 | 1,342 | 1,792 | 854 | 938 | 59 | 1 |
module Biolab.Analysis.Utils (
absoluteToRelativeTime,
trim,
grEstimationParameters,
exponentialApproximation,
exponentialDerivative,
exponentialFit,
)
where
import Data.Time (UTCTime, NominalDiffTime, diffUTCTime)
import Statistics.Types (Sample(..))
import Statistics.LinearRegression (nonRandomRobustFit, defaultEstimationParameters, EstimationParameters(..))
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as U
absoluteToRelativeTime :: V.Vector (UTCTime,a) -> (UTCTime, V.Vector (NominalDiffTime,a))
absoluteToRelativeTime v = (start, V.map (\(x,y) -> (x `diffUTCTime` start,y)) v)
where
start = fst . V.head $ v
trim :: (Ord a) => V.Vector (NominalDiffTime,a) -> V.Vector (NominalDiffTime,a)
trim v | V.null v = v
| otherwise = V.reverse . trim_prefix . V.reverse . trim_suffix . remove_initial_spike $ v
where
trim_suffix v = V.takeWhile ((< (V.maximum . V.map snd $ v)) . snd) v
trim_prefix v = V.takeWhile ((> (V.minimum . V.map snd $ v)) . snd) v
remove_initial_spike v = if (V.maximum . V.map snd $ v) == (V.maximum . V.map snd . V.takeWhile ((< 4800) . realToFrac . fst) $ v)
then V.tail . V.dropWhile ((< (V.maximum . V.map snd $ v)) . snd) $ v
else v
grEstimationParameters = defaultEstimationParameters {outlierFraction = 0.4}
exponentialApproximation :: Sample -> Sample -> Sample
exponentialApproximation xs ys = U.map (\x -> exp (beta*x+alpha)) xs
where
(alpha,beta) = exponentialFit xs ys
exponentialFit :: Sample -> Sample -> (Double,Double)
exponentialFit xs ys = nonRandomRobustFit grEstimationParameters xs . U.map log $ ys
exponentialDerivative :: Sample -> Sample -> Sample
exponentialDerivative xs ys = U.map (beta*) . exponentialApproximation xs $ ys
where
(alpha,beta) = exponentialFit xs ys
| uriba/biolab-analysis | Biolab/Analysis/Utils.hs | bsd-3-clause | 1,949 | 0 | 18 | 440 | 695 | 381 | 314 | 32 | 2 |
import Control.Concurrent (myThreadId)
import System.IO (stdout, hSetBuffering, BufferMode(LineBuffering))
import System.Random (randomIO)
import Streamly
import Streamly.Prelude (drain, nil, yieldM)
main :: IO ()
main = drain $ do
yieldM $ hSetBuffering stdout LineBuffering
x <- loop "A " 2
y <- loop "B " 2
yieldM $ myThreadId >>= putStr . show
>> putStr " "
>> print (x, y)
where
-- we can just use
-- parallely $ mconcat $ replicate n $ yieldM (...)
loop :: String -> Int -> SerialT IO String
loop name n = do
rnd <- yieldM (randomIO :: IO Int)
let result = name <> show rnd
repeatIt = if n > 1 then loop name (n - 1) else nil
in return result `wAsync` repeatIt
| harendra-kumar/asyncly | test/nested-loops.hs | bsd-3-clause | 769 | 0 | 16 | 224 | 260 | 135 | 125 | 19 | 2 |
import qualified Prelude
import Feldspar
import Language.Embedded.Imperative
import Feldspar.Compiler.ToMutable
prog :: Program (RefCMD Data :+: ControlCMD Data) (Data Int32)
prog = do
ir <- initRef (0 :: Data Int32)
ar <- initRef (1 :: Data Int32)
xr <- initRef (6 :: Data Int32)
setRef xr 67
x1 <- getRef xr
while
(do i <- getRef ir; setRef ir (i+1); return (i < 10))
(do a <- getRef ar; setRef ar (a*2))
setRef xr 102
x2 <- getRef xr
a <- getRef ar
return (x1+x2+a)
main = do
drawAST $ toMutable prog
x <- eval $ toMutable prog
if x Prelude.== 1193
then return ()
else fail ("wrong result " Prelude.++ show x)
| emwap/feldspar-compiler-shim | test/ToMutable.hs | bsd-3-clause | 702 | 0 | 13 | 201 | 320 | 152 | 168 | -1 | -1 |
module MAC where
import CLaSH.Prelude
ma acc (x, y) = acc + x * y
macT acc (x,y) = (acc', o)
where
acc' = ma acc(x,y)
o = acc
--macT acc inp = (ma acc inp, acc)
mac = mealy macT 0
topEntity :: Signal (Signed 9, Signed 9) -> Signal (Signed 9)
topEntity = mac
testInput :: Signal (Signed 9,Signed 9)
testInput = stimuliGenerator $(listToVecTH [(1,1) :: (Signed 9,Signed 9),(2,2),(3,3),(4,4)])
expectedOutput :: Signal (Signed 9) -> Signal Bool
expectedOutput = outputVerifier $(listToVecTH [0 :: Signed 9,1,5,14])
| trxeste/wrk | haskell/cl3sh/mac.hs | bsd-3-clause | 531 | 0 | 11 | 106 | 267 | 147 | 120 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{- | This module implements KOI8-R encoding which covers the russian and bulgarian alphabet.
See <http://en.wikipedia.org/wiki/KOI8-R> for more information.
-}
module Data.Encoding.KOI8R
(KOI8R(..)) where
import Control.OldException (throwDyn)
import Data.Array.Unboxed
import Data.Char (ord,chr)
import qualified Data.ByteString.Lazy as Lazy
import Data.Map hiding (map,(!))
import Data.Word
import Prelude hiding (lookup)
import Data.Typeable
import Data.Encoding.Base
data KOI8R = KOI8R deriving (Eq,Show,Typeable)
koi8rArr :: UArray Word8 Char
koi8rArr = listArray (128,255) koi8rList
koi8rMap :: Map Char Word8
koi8rMap = fromList (zip koi8rList [0..])
koi8rList :: [Char]
koi8rList =
['\x2500','\x2502','\x250c','\x2510','\x2514','\x2518','\x251c','\x2524'
,'\x252c','\x2534','\x253c','\x2580','\x2584','\x2588','\x258c','\x2590'
,'\x2591','\x2592','\x2593','\x2320','\x25a0','\x2219','\x221a','\x2248'
,'\x2264','\x2265','\x00a0','\x2321','\x00b0','\x00b2','\x00b7','\x00f7'
,'\x2550','\x2551','\x2552','\x0451','\x2553','\x2554','\x2555','\x2556'
,'\x2557','\x2558','\x2559','\x255a','\x255b','\x255c','\x255d','\x255e'
,'\x255f','\x2560','\x2561','\x0401','\x2562','\x2563','\x2564','\x2565'
,'\x2566','\x2567','\x2568','\x2569','\x256a','\x256b','\x256c','\x00a9'
,'\x044e','\x0430','\x0431','\x0446','\x0434','\x0435','\x0444','\x0433'
,'\x0445','\x0438','\x0439','\x043a','\x043b','\x043c','\x043d','\x043e'
,'\x043f','\x044f','\x0440','\x0441','\x0442','\x0443','\x0436','\x0432'
,'\x044c','\x044b','\x0437','\x0448','\x044d','\x0449','\x0447','\x044a'
,'\x042e','\x0410','\x0411','\x0426','\x0414','\x0415','\x0424','\x0413'
,'\x0425','\x0418','\x0419','\x041a','\x041b','\x041c','\x041d','\x041e'
,'\x041f','\x042f','\x0420','\x0421','\x0422','\x0423','\x0416','\x0412'
,'\x042c','\x042b','\x0417','\x0428','\x042d','\x0429','\x0427','\x042a'
]
koi8rDecode :: Word8 -> Char
koi8rDecode ch
| ch < 128 = chr $ fromIntegral ch
| otherwise = koi8rArr!ch
koi8rEncode :: Char -> Word8
koi8rEncode ch
| ch < '\128' = fromIntegral $ ord ch
| otherwise = case lookup ch koi8rMap of
Just w -> w
Nothing -> throwDyn (HasNoRepresentation ch)
instance Encoding KOI8R where
encode _ = encodeSinglebyte koi8rEncode
encodeLazy _ = encodeSinglebyteLazy koi8rEncode
encodable _ c = (c < '\128') || (member c koi8rMap)
decode _ = decodeSinglebyte koi8rDecode
decodeLazy _ str = concatMap (decodeSinglebyte koi8rDecode) (Lazy.toChunks str)
decodable _ = const True
| abuiles/turbinado-blog | tmp/dependencies/encoding-0.4.1/Data/Encoding/KOI8R.hs | bsd-3-clause | 2,545 | 6 | 11 | 250 | 429 | 229 | 200 | 52 | 2 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE DataKinds #-}
module CANOpen.Tower.Types where
import Ivory.Language
import Ivory.Serialize
import Ivory.Tower
newtype DictError =
DictError { unDictError :: Uint8
} deriving (IvoryType, IvoryVar, IvoryExpr, IvoryEq, IvoryStore, IvoryInit, IvoryZeroVal)
noError :: DictError
noError = DictError 0
notFound :: DictError
notFound = DictError 1
readOnly :: DictError
readOnly = DictError 2
writeOnly :: DictError
writeOnly = DictError 3
sizeMismatch :: DictError
sizeMismatch = DictError 4
sizeMismatchParamHigh :: DictError
sizeMismatchParamHigh = DictError 5
sizeMismatchParamLow :: DictError
sizeMismatchParamLow = DictError 6
subindexNotFound :: DictError
subindexNotFound = DictError 7
unhandled :: DictError
unhandled = DictError 128
[ivory|
struct mux
{ addr :: Stored Uint16
; sub :: Stored Uint8
}
string struct PackBuffer 128
struct muxpack
{ mp_mux :: Struct mux
; mp_buf :: PackBuffer
}
struct setresult
{ setres_ok :: Stored IBool
; setres_error :: Stored DictError
; setres_mux :: Struct mux
}
struct getresult
{ getres_ok :: Stored IBool
; getres_error :: Stored DictError
; getres_mux :: Struct mux
; getres_buf :: PackBuffer
}
|]
data ObjDict =
ObjDict
{ objdict_init :: ChanInput ('Stored IBool)
, objdict_get_in :: ChanInput ('Struct "mux")
, objdict_get_out :: ChanOutput ('Struct "getresult")
, objdict_set_in :: ChanInput ('Struct "muxpack")
, objdict_set_out :: ChanOutput ('Struct "setresult")
}
dictTypes :: Module
dictTypes = package "dict_types" $ do
defStringType (Proxy :: Proxy PackBuffer)
defStruct (Proxy :: Proxy "mux")
defStruct (Proxy :: Proxy "muxpack")
defStruct (Proxy :: Proxy "getresult")
defStruct (Proxy :: Proxy "setresult")
depend serializeModule
wrappedPackMod muxWrapper
dictTowerDeps :: Tower e ()
dictTowerDeps = do
towerDepends dictTypes
towerModule dictTypes
muxWrapper :: WrappedPackRep ('Struct "mux")
muxWrapper = wrapPackRep "mux" $
packStruct
[ packLabel addr
, packLabel sub
]
instance Packable ('Struct "mux") where
packRep = wrappedPackRep muxWrapper
packsize :: (Packable a) => ConstRef s a -> Sint32
packsize = packsize' packRep
packsize' :: PackRep a -> ConstRef s a -> Sint32
packsize' rep _ = fromIntegral $ packSize rep
| distrap/ivory-tower-canopen | ivory-tower-canopen-core/src/CANOpen/Tower/Types.hs | bsd-3-clause | 2,468 | 0 | 12 | 460 | 558 | 290 | 268 | 62 | 1 |
module VisualBrowseTree where
import Control.Concurrent
import Control.Concurrent.STM
import Control.Monad
import Data.Maybe
import Data.Tree
import Data.Tree.Zipper
import qualified Data.Foldable as Foldable
import qualified Data.Traversable as T
import Graphics.UI.Gtk
import Graphics.UI.Gtk.WebKit.Download
import Graphics.UI.Gtk.WebKit.NetworkRequest
import Graphics.UI.Gtk.WebKit.WebFrame
import Graphics.UI.Gtk.WebKit.WebNavigationAction
import Graphics.UI.Gtk.WebKit.WebSettings
import Graphics.UI.Gtk.WebKit.WebView
import Graphics.UI.Gtk.WebKit.WebWindowFeatures
import System.Exit
import System.Process
import Text.Printf
import Utils
import NotebookSimple
import Datatypes
import Commands
browseTreeToSVG :: [Tree Page] -> IO String
browseTreeToSVG btree = do
let ellipsis t n | length t < n = t
| otherwise = take n t ++ "..."
nodeID <- newTVarIO (0::Int)
btree' <- mapM (T.mapM (\p -> do
i <- atomically $ do
iden <- readTVar nodeID
writeTVar nodeID (iden+1)
return iden
t <- getPageTitle p
return (i,(t,mkStablePageLink p)))) btree
let prelude = unlines ["digraph \"Browse tree\" {",
"graph [",
"fontname = \"Helvetica-Oblique\",",
"page = 10",
"size = 30",
" ];"]
-- labels = unlines [ printf "d%d [label=\"%s\"];" i t | (i,t) <- concatMap flatten btree' ]
footer = "}"
btreeZip = concatMap flattenToZipper' btree' -- ellipsis t 15
labels = unlines [ printf "d%d [URL=\"%s\", shape=polygon, fixedsize=true, fontsize=8, width=1.25, height=0.25, tooltip=\"%s\", label=\"%s\"];"
i link t (ellipsis t 18) | (i,(t,link)) <- map label btreeZip ]
edges = unlines [ printf "d%d -> d%d;" (fst . label . fromJust . parent $ z ) (fst . label $ z)
| z <- btreeZip,
parent z /= Nothing]
edges2 = [ ((fst . label . fromJust . parent $ z ),(fst . label $ z))
| z <- btreeZip,
parent z /= Nothing]
everything = prelude ++ edges ++ labels ++ footer
print everything
print edges2
tot@(code,svg,dotErr) <- readProcessWithExitCode "dot" ["-Tsvg"] everything
-- _ <- readProcessWithExitCode "dot" ["-Tsvg","-ograph.svg"] everything
-- _ <- readProcessWithExitCode "dot" ["-ograph.dot"] everything
print tot
case code of
ExitSuccess -> return svg
ExitFailure c -> return $ printf "<text>Error running 'dot' command. Exit code: %s\n%s</text>" (show c) dotErr
-- mkStablePageLink page = let (GObject foreignPtr) = toGObject (pgWidget page) in PageLink foreignPtr
lookupStablePageLink :: [Tree Page] -> PageLink -> Maybe Page
lookupStablePageLink pages link = listToMaybe $ catMaybes (map (Foldable.find (\p -> mkStablePageLink p == link)) pages)
mkStablePageLink page = "page://" ++ (show $ pgIdent page)
mkLinks :: [Page] -> [PageLink]
mkLinks = map mkStablePageLink
-- visualBrowseTreeWidget :: t -> IO Widget
visualBrowseTreeWidget :: (Page -> IO ()) -> TVar [Tree Page] -> IO Widget
visualBrowseTreeWidget viewPage btreeVar = do
-- webkit widget
web <- webViewNew
webViewSetTransparent web True
webViewSetFullContentZoom web True
-- scrolled window to enclose the webkit
scrollWeb <- scrolledWindowNew Nothing Nothing
containerAdd scrollWeb web
settings <- webViewGetWebSettings web
set settings [webSettingsEnablePlugins := False]
let refreshSVG = do
svg <- browseTreeToSVG =<< readTVarIO btreeVar
webViewLoadString web svg (Just "image/svg+xml") Nothing ""
on web navigationPolicyDecisionRequested $ \ webframe networkReq webNavAct webPolDec -> do
print "[navigationPolicyDecisionRequested]"
muri <- networkRequestGetUri networkReq
case muri of
Nothing -> return ()
Just uri -> do
print ("visualBrowseTreeWidget",uri)
t <- readTVarIO btreeVar
case lookupStablePageLink t uri of
Nothing -> print ("Page no longer exist: " ++ show uri)
-- Just p -> viewPage p
Just p -> sendCommand (ViewPageCommand (pgIdent p)) -- viewPage p
return True
-- watch btreeVar for changes, update
let watchdog page = do
page' <- waitTVarChangeFrom page btreeVar
postGUIAsync refreshSVG
watchdog page'
forkIO (watchdog =<< readTVarIO btreeVar)
forkIO (forever $ do
threadDelay (10^6)
postGUIAsync refreshSVG)
refreshSVG
return (toWidget scrollWeb)
visualBrowseTreeWindow viewPage btreeVar = do
window <- windowNew
visualBT <- visualBrowseTreeWidget viewPage btreeVar
set window [ containerBorderWidth := 10,
windowTitle := "Spike browser - visual browse tree",
containerChild := visualBT,
windowAllowGrow := True ]
widgetShowAll window
return window | Tener/spike | src/VisualBrowseTree.hs | bsd-3-clause | 5,083 | 0 | 23 | 1,347 | 1,274 | 640 | 634 | 108 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Lib
( someFunc
) where
import Data.JSString ()
import GHCJS.Types
foreign import javascript unsafe "window.alert($1)" js_alert :: JSString -> IO ()
someFunc :: IO ()
someFunc = js_alert "Hello from GHCJS!" | shulhi/newvdom | src/Lib.hs | bsd-3-clause | 258 | 4 | 6 | 46 | 65 | 36 | 29 | 8 | 1 |
module PrimFunction (primitiveFunctions) where
import Control.Monad.Except
import Data.Array.IArray ((!), bounds)
import Data.Char (toLower)
import Data.Foldable (foldrM)
import Definition
import qualified LispVector as V
import Variable (runEvaled)
import Unpacker
------- Primitive Function Mapping Tuples -------
primitiveFunctions :: [(String, LFunction)]
primitiveFunctions =
[
-- Numeric Operations
("+" , numericBinop (+) )
, ("-" , numericBinop (-) )
, ("*" , numericBinop (*) )
, ("/" , numericBinop (/) )
, ("div" , numericBinop div )
, ("mod" , numericBinop mod )
, ("quotient" , numericBinop quot )
, ("remainder" , numericBinop rem )
-- Operations Resulting in Boolean Equalities
, ("=" , numBoolBinop (==) )
, ("<" , numBoolBinop (<) )
, (">" , numBoolBinop (>) )
, ("/=" , numBoolBinop (/=) )
, (">=" , numBoolBinop (>=) )
, ("<=" , numBoolBinop (<=) )
, ("&&" , boolBoolBinop (&&) )
, ("||" , boolBoolBinop (||) )
, ("string=?" , strBoolBinop id (==) )
, ("string<=?" , strBoolBinop id (<=) )
, ("string>=?" , strBoolBinop id (>=) )
, ("string<?" , strBoolBinop id (<) )
, ("string>?" , strBoolBinop id (>) )
, ("string-ci=?" , strBoolBinop (map toLower) (==) )
, ("string-ci<=?" , strBoolBinop (map toLower) (<=) )
, ("string-ci>=?" , strBoolBinop (map toLower) (>=) )
, ("string-ci<?" , strBoolBinop (map toLower) (<) )
, ("string-ci>?" , strBoolBinop (map toLower) (>) )
, ("equal?" , equal )
-- Lists/Pairs
, ("car" , car )
, ("cdr" , cdr )
, ("cons" , cons )
-- Type Testing
, ("string?" , isLString )
, ("number?" , isLNumber )
, ("symbol?" , isLAtom )
, ("boolean?" , isLBoolean )
, ("char?" , isLChar )
, ("port?" , isLPort )
, ("pair?" , isPair )
, ("vector?" , isLVector )
, ("procedure?" , isProcedure )
, ("list?" , isLList )
-- Symbol Handling
, ("string->symbol" , stringToAtom )
-- String Functions
, ("make-string" , makeString )
, ("substring" , substring )
, ("string-append" , stringAppend )
, ("list->string" , listToString )
, ("string->list" , stringToList )
, ("string-length" , stringLength )
, ("string-ref" , stringRef )
-- Vector Functions,
, ("vector" , vector )
, ("make-vector" , makeVector )
, ("list->vector" , listToVector )
, ("vector->list" , vectorToList )
, ("vector-length" , vectorLength )
, ("vector-ref" , vectorRef )
]
------- Polymorphic Binary Operations -------
numericBinop :: (SchemeNumber -> SchemeNumber -> SchemeNumber)
-> LFunction
numericBinop op params = LNumber . foldl1 op <$> mapM unpackNum params
boolBinop :: (LispVal -> Evaled a)
-> (a -> a -> Bool)
-> LFunction
boolBinop unpacker op args = if length args /= 2
then throwError $ NumArgs 2 args
else do left <- unpacker $ head args
right <- unpacker $ args !! 1
return . LBool $ left `op` right
numBoolBinop :: (SchemeNumber -> SchemeNumber -> Bool) -> LFunction
numBoolBinop = boolBinop unpackNum
strBoolBinop :: (String -> String) -> (String -> String -> Bool) -> LFunction
strBoolBinop = boolBinop . unpackStr
boolBoolBinop :: (Bool -> Bool -> Bool) -> LFunction
boolBoolBinop = boolBinop unpackBool
------- List/Pair Operations -------
car :: LFunction
car [LList (x:_)] = return x
car [LDottedList (x:_) _] = return x
car [badArg] = throwError $ TypeMismatch "pair" badArg
car badArgList = throwError $ NumArgs 1 badArgList
cdr :: LFunction
cdr [LList (_:xs)] = return $ LList xs
cdr [LDottedList (_:xs) x] = return $ LDottedList xs x
cdr [badArg] = throwError $ TypeMismatch "pair" badArg
cdr badArgList = throwError $ NumArgs 1 badArgList
cons :: LFunction
cons [x, LList []] = return $ LList [x]
cons [x, LList xs] = return . LList $ x:xs
cons [x, LDottedList xs xlast] = return $ LDottedList (x:xs) xlast
cons [x1, x2] = return $ LDottedList [x1] x2
cons badArgList = throwError $ NumArgs 2 badArgList
------- Equality Checks -------
-- TODO: Currently 'equal' coerces values before checking equality
equal :: LFunction
equal = go
where
go :: LFunction
go [LList xs, LList ys] = return . LBool $ listEqual xs ys
go [LDottedList xs xlast, LDottedList ys ylast] = do
(LBool lastEquals) <- equal [xlast, ylast]
if lastEquals
then return . LBool $ listEqual xs ys
else return $ LBool False
go [LVector xs, LVector ys] = return . LBool $ vectorEqual xs ys
go [arg1, arg2] = do
primitiveEquals <- or <$> mapM (unpackEquals arg1 arg2)
[ AnyUnpacker unpackNum
, AnyUnpacker (unpackStr id)
, AnyUnpacker unpackBool
]
return . LBool $ primitiveEquals || (arg1 == arg2)
go badArgList = throwError $ NumArgs 2 badArgList
listEqual :: [LispVal] -> [LispVal] -> Bool
listEqual xs ys
| length xs /= length ys = False
| otherwise = all equalPair $ zip xs ys
equalPair :: (LispVal, LispVal) -> Bool
equalPair (x, y) = runEvaled (const False) unpackBoolCoerce (equal [x, y])
vectorEqual :: SVector LispVal -> SVector LispVal -> Bool
vectorEqual xs ys =
let (_, xHigh) = bounds xs
(_, yHigh) = bounds ys
allEqual i = (i == xHigh) ||
(equalPair (xs ! i, ys ! i) && allEqual (i + 1))
in xHigh == yHigh && allEqual 0
------- Type Testing -------
isLString :: LFunction
isLString [LString _] = return $ LBool True
isLString [_] = return $ LBool False
isLString vals = throwError $ NumArgs 1 vals
isLNumber :: LFunction
isLNumber [LNumber _] = return $ LBool True
isLNumber [_] = return $ LBool False
isLNumber vals = throwError $ NumArgs 1 vals
isLBoolean :: LFunction
isLBoolean [LBool _] = return $ LBool True
isLBoolean [_] = return $ LBool False
isLBoolean vals = throwError $ NumArgs 1 vals
isLChar :: LFunction
isLChar [LChar _] = return $ LBool True
isLChar [_] = return $ LBool False
isLChar vals = throwError $ NumArgs 1 vals
isLPort :: LFunction
isLPort [LPort _] = return $ LBool True
isLPort [_] = return $ LBool False
isLPort vals = throwError $ NumArgs 1 vals
isLAtom :: LFunction
isLAtom [LAtom _] = return $ LBool True
isLAtom [_] = return $ LBool False
isLAtom vals = throwError $ NumArgs 1 vals
isLList :: LFunction
isLList [LList _] = return $ LBool True
isLList [_] = return $ LBool False
isLList vals = throwError $ NumArgs 1 vals
isLDottedList :: LFunction
isLDottedList [LDottedList _ _] = return $ LBool True
isLDottedList [_] = return $ LBool False
isLDottedList vals = throwError $ NumArgs 1 vals
isPair :: LFunction
isPair [LDottedList _ _] = return $ LBool True
isPair [LList _] = return $ LBool True
isPair [_] = return $ LBool False
isPair vals = throwError $ NumArgs 1 vals
isLVector :: LFunction
isLVector [LVector _] = return $ LBool True
isLVector [_] = return $ LBool False
isLVector vals = throwError $ NumArgs 1 vals
isProcedure :: LFunction
isProcedure [LPrimitiveFunc _ _] = return $ LBool True
isProcedure [LIOFunc _ _] = return $ LBool True
isProcedure [LEnvFunc _ _] = return $ LBool True
isProcedure [LLambdaFunc {} ] = return $ LBool True
isProcedure [_] = return $ LBool False
isProcedure vals = throwError $ NumArgs 1 vals
------- Symbol Handling -------
stringToAtom :: LFunction
stringToAtom [LString s] = return $ LAtom s
stringToAtom vals = throwError $ NumArgs 1 vals
------- String Functions -------
makeString :: LFunction
makeString args = case args of
[LNumber n] -> return $ mkStr (fromIntegral n, ' ')
[LNumber n, LChar c] -> return $ mkStr (fromIntegral n, c)
_ -> throwError $ NumArgs 1 args
where mkStr :: (Int, Char) -> LispVal
mkStr = LString . uncurry replicate
substring :: LFunction
substring [LString str, LNumber start, LNumber end] =
let startI = fromIntegral $ toInteger start
sublen = fromIntegral (toInteger end) - startI
in return . LString . take sublen . drop startI $ str
substring args = throwError $ NumArgs 3 args
stringAppend :: LFunction
stringAppend = go
where go :: LFunction
go [] = return $ LString ""
go (LString s:strs) = (\(LString s') -> LString $ s ++ s') <$> go strs
go args = throwError $ InvalidArgs "Expected string list" args
listToString :: LFunction
listToString [LList vals] = LString <$> toString vals
where toString :: [LispVal] -> Evaled String
toString [] = return ""
toString (LChar c:lvs) = (c:) <$> toString lvs
toString args = throwError $ InvalidArgs "Expected a char list" args
listToString args = throwError $ InvalidArgs "Expected a char list" args
stringToList :: LFunction
stringToList [LString s] = return . LList $ map LChar s
stringToList args = throwError $ InvalidArgs "Expected string" args
stringLength :: LFunction
stringLength [LString s] = return . LNumber . SInt . toInteger $ length s
stringLength args = throwError $ NumArgs 1 args
stringRef :: LFunction
stringRef [LString s, LNumber n] =
let index = fromIntegral $ toInteger n
in if length s > index
then return . LChar $ s !! index
else throwError $ InvalidArgs "Index is longer than string" [LString s, LNumber n]
stringRef args = throwError $ NumArgs 2 args
------- Vector Functions -------
vector :: LFunction
vector args = return . LVector $ V.vector args
makeVector :: LFunction
makeVector [LNumber n] = return . LVector $ V.makeVector (fromIntegral n) (LBool False)
makeVector [LNumber n, val] = return . LVector $ V.makeVector (fromIntegral n) val
makeVector args =
throwError $ InvalidArgs "Expected vector length and optional fill value" args
vectorToList :: LFunction
vectorToList [LVector v] = return . LList $ V.vectorToList v
vectorToList args = throwError $ NumArgs 1 args
listToVector :: LFunction
listToVector [LList vals] = vector vals
listToVector args = throwError $ NumArgs 1 args
vectorLength :: LFunction
vectorLength [LVector v] = return . LNumber . SInt . toInteger $ V.vectorLength v
vectorLength args = throwError $ NumArgs 1 args
vectorRef :: LFunction
vectorRef [LVector v, LNumber n] = return . V.vectorRef v $ fromIntegral n
vectorRef args = throwError $ NumArgs 2 args
------- Utility Functions -------
allM :: (Foldable t, Monad m) => (a -> m Bool) -> t a -> m Bool
allM f = foldrM (\a b -> (b &&) <$> f a) True
| comraq/scheme-interpreter | src/PrimFunction.hs | bsd-3-clause | 11,434 | 0 | 15 | 3,360 | 3,743 | 1,980 | 1,763 | 240 | 6 |
{-# LANGUAGE DeriveDataTypeable, CPP #-}
{-# OPTIONS_GHC -w -fno-cse #-}
module CabalBounds.Args
( Args(..)
, get
, defaultDrop
, defaultUpdate
, defaultDump
, defaultLibs
, defaultFormat
) where
import System.Console.CmdArgs hiding (ignore)
import qualified System.Console.CmdArgs as CmdArgs
import CabalBounds.VersionComp (VersionComp(..))
#ifdef CABAL
import Data.Version (showVersion)
import Paths_cabal_bounds (version)
#endif
data Args = Drop { upper :: Bool
, library :: Bool
, executable :: [String]
, testSuite :: [String]
, benchmark :: [String]
, only :: [String]
, ignore :: [String]
, output :: Maybe FilePath
, cabalFile :: Maybe FilePath
}
| Update { lower :: Bool
, upper :: Bool
, lowerComp :: Maybe VersionComp
, upperComp :: Maybe VersionComp
, library :: Bool
, executable :: [String]
, testSuite :: [String]
, benchmark :: [String]
, only :: [String]
, ignore :: [String]
, missing :: Bool
, output :: Maybe FilePath
, fromFile :: FilePath
, haskellPlatform :: String
, cabalFile :: Maybe FilePath
, setupConfigFile :: Maybe FilePath
, planFile :: Maybe FilePath
}
| Dump { only :: [String]
, ignore :: [String]
, output :: Maybe FilePath
, cabalFiles :: [FilePath]
}
| Libs { only :: [String]
, ignore :: [String]
, output :: Maybe FilePath
, fromFile :: FilePath
, haskellPlatform :: String
, cabalFile :: Maybe FilePath
, setupConfigFile :: Maybe FilePath
, planFile :: Maybe FilePath
}
| Format { output :: Maybe FilePath
, cabalFile :: Maybe FilePath
}
deriving (Data, Typeable, Show, Eq)
get :: IO Args
get = cmdArgsRun . cmdArgsMode $ modes [dropArgs, updateArgs, dumpArgs, libsArgs, formatArgs]
&= program "cabal-bounds"
&= summary summaryInfo
&= help "A command line program for managing the bounds/versions of the dependencies in a cabal file."
&= helpArg [explicit, name "help", name "h"]
&= versionArg [explicit, name "version", name "v", summary versionInfo]
where
summaryInfo = ""
defaultDrop :: Args
defaultDrop = Drop
{ upper = def
, library = def
, executable = def
, testSuite = def
, benchmark = def
, only = def
, ignore = def
, output = def
, cabalFile = def
}
defaultUpdate :: Args
defaultUpdate = Update
{ lower = def
, upper = def
, lowerComp = def
, upperComp = def
, library = def
, executable = def
, testSuite = def
, benchmark = def
, only = def
, ignore = def
, missing = def
, output = def
, fromFile = def
, haskellPlatform = def
, cabalFile = def
, setupConfigFile = def
, planFile = def
}
defaultDump :: Args
defaultDump = Dump
{ only = def
, ignore = def
, output = def
, cabalFiles = def
}
defaultLibs :: Args
defaultLibs = Libs
{ only = def
, ignore = def
, output = def
, fromFile = def
, haskellPlatform = def
, cabalFile = def
, setupConfigFile = def
, planFile = def
}
defaultFormat :: Args
defaultFormat = Format
{ output = def
, cabalFile = def
}
dropArgs :: Args
dropArgs = Drop
{ upper = def &= explicit &= name "upper" &= name "U"
&= help "Only the upper bound is dropped, otherwise both - the lower and upper - bounds are dropped."
, library = def &= explicit &= name "library" &= name "l" &= help "Only the bounds of the library are modified."
, executable = def &= typ "NAME" &= help "Only the bounds of the executable are modified."
, testSuite = def &= typ "NAME" &= help "Only the bounds of the test suite are modified."
, benchmark = def &= typ "NAME" &= help "Only the bounds of the benchmark are modified."
, only = def &= explicit &= typ "DEPENDENCY" &= name "only" &= name "O"
&= help "Only the bounds of the dependency are modified."
, ignore = def &= explicit &= typ "DEPENDENCY" &= name "ignore" &= name "I"
&= help "This dependency is ignored, not modified in any way."
, output = def &= explicit &= typ "FILE" &= name "output" &= name "o"
&= help "Save modified cabal file to file, if empty, the cabal file is modified inplace."
, cabalFile = def &= CmdArgs.ignore
}
updateArgs :: Args
updateArgs = Update
{ lower = def &= explicit &= name "lower" &= name "L"
&= help "Only the lower bound is updated. The same as using '--lowercomp=minor'."
, upper = def &= explicit &= name "upper" &= name "U"
&= help "Only the upper bound is updated. The same as using '--uppercomp=major2'."
, lowerComp = def &= explicit &= name "lowercomp"
&= help "Only the lower bound is updated with the specified version component. (major1, major2 or minor)"
, upperComp = def &= explicit &= name "uppercomp"
&= help "Only the upper bound is updated with the specified version component. (major1, major2 or minor)"
, missing = def &= help "Only the dependencies having missing bounds are updated."
, fromFile = def &= typ "FILE" &= help "Update bounds by the library versions specified in the given file."
, haskellPlatform = def &= explicit &= typ "VERSION" &= name "haskell-platform"
&= help "Update bounds by the library versions of the specified haskell platform version"
, setupConfigFile = def &= CmdArgs.ignore
, planFile = def &= CmdArgs.ignore
}
dumpArgs :: Args
dumpArgs = Dump
{ output = def &= explicit &= typ "FILE" &= name "output" &= name "o"
&= help "Save libraries with lower bounds to file, if empty, then it's written to stdout."
, cabalFiles = def &= args &= typ "CABAL-FILE"
}
libsArgs :: Args
libsArgs = Libs
{ output = def &= explicit &= typ "FILE" &= name "output" &= name "o"
&= help "Save the libraries cabal-bounds would use for its update run to file, if empty, then they're written to stdout."
, fromFile = def &= typ "FILE" &= help "Use the library versions specified in the given file."
, haskellPlatform = def &= explicit &= typ "VERSION" &= name "haskell-platform"
&= help "Use the library versions of the specified haskell platform version"
}
formatArgs :: Args
formatArgs = Format
{ output = def &= explicit &= typ "FILE" &= name "output" &= name "o"
&= help "Save the formated cabal file to file, if empty, the cabal file is modified inplace."
, cabalFile = def &= CmdArgs.ignore
}
versionInfo :: String
versionInfo =
#ifdef CABAL
"cabal-bounds version " ++ showVersion version
#else
"cabal-bounds version unknown (not built with cabal)"
#endif
| dan-t/cabal-bounds | lib/CabalBounds/Args.hs | bsd-3-clause | 7,994 | 0 | 12 | 3,034 | 1,605 | 911 | 694 | 163 | 1 |
module Main where
import VisPar
import Control.DeepSeq
fib :: Int -> Par Int
fib n | n < 2 = return 1
| otherwise = do
leftVar <- spawnNamed ("fib " ++ show (n - 1)) $ fib (n - 1)
rightVar <- spawnNamed ("fib " ++ show (n - 2)) $ fib (n - 2)
left <- get leftVar
right <- get rightVar
return $ left + right
fib' :: Int -> Par Int
fib' n | n < 2 = return 1
| otherwise = do
leftVar <- spawnNamed ("fib " ++ show (n - 1)) $ fib' (n - 1)
right <- withLocalName ("fib " ++ show (n - 2)) $ fib' (n - 2)
left <- get leftVar
return $ left + right
main :: IO ()
main = do
print $ runPar (fib 5)
g <- visPar Complete "fib 5" (fib 5)
g' <- visPar Compact "fib 5" (fib 5)
saveGraphPdf Vertical "fib.graph.pdf" g
saveGraphPdf Vertical "fib.compact.graph.pdf" g'
| MaximilianAlgehed/VisPar | examples/Fib.hs | bsd-3-clause | 897 | 0 | 15 | 311 | 403 | 189 | 214 | 25 | 1 |
module Examples.Test.Benchmark(main) where
import Development.Shake
import Examples.Util
import Data.List
import Development.Shake.FilePath
-- | Given a breadth and depth come up with a set of build files
main = shaken (\_ _ -> return ()) $ \args obj -> do
let get ty = head $ [read $ drop (length ty + 1) a | a <- args, (ty ++ "=") `isPrefixOf` a] ++
error ("Could not find argument, expected " ++ ty ++ "=Number")
depth = get "depth"
breadth = get "breadth"
want [obj $ "0." ++ show i | i <- [1..breadth]]
obj "*" *> \out -> do
let d = read $ takeBaseName out
need [obj $ show (d + 1) ++ "." ++ show i | d < depth, i <- [1..breadth]]
writeFile' out ""
| nh2/shake | Examples/Test/Benchmark.hs | bsd-3-clause | 735 | 0 | 20 | 209 | 292 | 149 | 143 | 15 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
-- |
-- Module: $HEADER$
-- Description: TODO
-- Copyright: (c) 2016 Peter Trško
-- License: BSD3
--
-- Stability: experimental
-- Portability: GHC specific language extensions.
--
-- TODO
module Data.DHT.DKS.Type.Message.NewSuccessorAck
( NewSuccessorAck(..)
)
where
import Data.Eq (Eq)
import Data.Typeable (Typeable)
import GHC.Generics (Generic)
import Text.Show (Show)
import Data.Default.Class (Default(def))
import Data.OverloadedRecords.TH (overloadedRecord)
import Data.DHT.DKS.Type.Hash (DksHash)
data NewSuccessorAck = NewSuccessorAck
{ _requester :: !DksHash
, _oldSuccessor :: !DksHash
, _successor :: !DksHash
}
deriving (Eq, Generic, Show, Typeable)
overloadedRecord def ''NewSuccessorAck
| FPBrno/dht-dks | src/Data/DHT/DKS/Type/Message/NewSuccessorAck.hs | bsd-3-clause | 1,024 | 0 | 9 | 167 | 175 | 113 | 62 | 29 | 0 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables #-}
module Data.Yahoo where
import Network.Wreq
import qualified Network.HTTP.Client as HC
import Control.Lens
import Data.Text
import qualified Data.Text as T
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Char8 as C8
import Data.Time.Calendar
import Data.Time
import Data.Csv
import qualified Data.Vector as V
import Control.Monad (mzero)
import qualified Control.Exception as E
import Data.Typeable
import Data.Aeson.Lens (_String, key)
import Control.Monad.Except
import Control.Error
data YahooQuote = YahooQuote { yDate :: !Day,
yOpen :: !Double,
yHigh :: !Double,
yLow :: !Double,
yClose :: !Double,
yVolume :: !Integer,
yAdjClose :: !Double } deriving (Show, Eq)
type Symbol = String
data Interval = Daily | Weekly | Monthly deriving (Eq, Ord)
instance Show Interval where
show Daily = "d"
show Weekly = "w"
show Monthly = "m"
instance FromRecord YahooQuote where
parseRecord v
| V.length v == 7 = YahooQuote <$> v .! 0
<*> v .! 1
<*> v .! 2
<*> v .! 3
<*> v .! 4
<*> v .! 5
<*> v .! 6
| otherwise = mzero
instance FromField Day where
parseField d = pure $ parseTimeOrError True defaultTimeLocale "%Y-%m-%d" (C8.unpack d)
data YahooApiError = NetworkError HC.HttpException
| ParseError String deriving (Show)
type ErrorM = ExceptT YahooApiError IO
-- | Request historical quote data from Yahoo server and return as a bytestring (csv formatted).
--
getCSV :: Symbol
-- ^ The stock ticker symbol
-> Interval
-- ^ The quote interval i.e. Daily, Weekly or Monthly
-> Day
-- ^ The last day in the range of data to be requested
-> Integer
-- ^ How many days of data to request
-> ErrorM BL.ByteString --ExceptT YahooApiError IO BL.ByteString
getCSV sym intvl endday numdays = do
(r :: Either HC.HttpException (Response BL.ByteString)) <- liftIO (E.try (getWith opts baseUrl))
case r of
Right r' -> return (r' ^. responseBody)
Left e -> throwError (NetworkError e)
where
baseUrl = "http://real-chart.finance.yahoo.com/table.csv"
stday = addDays (-(numdays-1)) endday
(f,d,e) = toGregorian endday
(c,a,b) = toGregorian stday
opts = defaults &
param "a" .~ [T.pack . show $ a-1] &
param "b" .~ [T.pack $ show b] &
param "c" .~ [T.pack $ show c] &
param "d" .~ [T.pack . show $ d-1] &
param "e" .~ [T.pack $ show e] &
param "f" .~ [T.pack $ show f] &
param "ignore" .~ [".csv"] &
param "s" .~ [T.pack sym] &
param "g" .~ [T.pack $ show intvl]
-- | Convert a csv ByteString to V.Vector YahooQuote.
toQuotes :: BL.ByteString -> ErrorM (V.Vector YahooQuote)
toQuotes bs = do
let qs = decode HasHeader bs
case qs of
Right v -> return v
Left s -> throwError (ParseError s)
-- | Request historical quote data from the Yahoo server and return a Right (Vector YahooQuote) if the parse
-- is successful, otherwsie returns Left String. Will throw an "HttpException" if the server request fails.
getQuotes :: Symbol
-- ^ The stock ticker symbol
-> Interval
-- ^ The quote interval i.e. Daily, Weekly or Monthly
-> Day
-- ^ The last day in the range of data to be requested
-> Integer
-- ^ How many days of data to request
-> ErrorM (V.Vector YahooQuote)
getQuotes sym intvl endday numdays = do
bs <- getCSV sym intvl endday numdays
toQuotes bs
{--
-- | Request historical quote data from Yahoo server and return as a bytestring (csv formatted).
-- Throws an HttpException if the request fails.
getCSV :: Symbol
-- ^ The stock ticker symbol
-> Interval
-- ^ The quote interval i.e. Daily, Weekly or Monthly
-> Day
-- ^ The last day in the range of data to be requested
-> Integer
-- ^ How many days of data to request
-> IO BL.ByteString
getCSV sym intvl endday numdays = do
r <- getWith opts baseUrl -- :: IO (Either HC.HttpException (Response ByteString))
return $ r ^. responseBody
where
baseUrl = "http://real-chart.finance.yahoo.com/table.csv"
stday = addDays (-(numdays-1)) endday
(f,d,e) = toGregorian endday
(c,a,b) = toGregorian stday
opts = defaults &
param "a" .~ [T.pack . show $ a-1] &
param "b" .~ [T.pack $ show b] &
param "c" .~ [T.pack $ show c] &
param "d" .~ [T.pack . show $ d-1] &
param "e" .~ [T.pack $ show e] &
param "f" .~ [T.pack $ show f] &
param "ignore" .~ [".csv"] &
param "s" .~ [T.pack sym] &
param "g" .~ [T.pack $ show intvl]
toQuotes :: BL.ByteString -> Either String (V.Vector YahooQuote)
toQuotes = decode HasHeader
getQuotes :: Symbol
-- ^ The stock ticker symbol
-> Interval
-- ^ The quote interval i.e. Daily, Weekly or Monthly
-> Day
-- ^ The last day in the range of data to be requested
-> Integer
-- ^ How many days of data to request
-> IO (Either String (V.Vector YahooQuote)) --(Symbol, Either String (V.Vector YahooQuote))
getQuotes sym intvl endday numdays = do
bs <- getCSV sym intvl endday numdays
return $ decode HasHeader bs
getQuotes :: Symbol
-- ^ The stock ticker symbol
-> Interval
-- ^ The quote interval i.e. Daily, Weekly or Monthly
-> Day
-- ^ The last day in the range of data to be requested
-> Integer
-- ^ How many days of data to request
-> IO (Either String (V.Vector YahooQuote)) --(Symbol, Either String (V.Vector YahooQuote))
getQuotes sym intvl endday numdays = do
r <- runExceptT $ do
bs <- syncIO (getCSV sym intvl endday numdays)
let qs = decode HasHeader r
case qs of
Right r -> return r
Left e -> return $ Left ("Network Error: " ++ show e)
return r
-- | Request historical quote data from the Yahoo server and return a Right (Vector YahooQuote) if the parse
-- is successful, otherwsie returns Left String. Will throw an "HttpException" if the server request fails.
getQuotes' :: Symbol
-- ^ The stock ticker symbol
-> Interval
-- ^ The quote interval i.e. Daily, Weekly or Monthly
-> Day
-- ^ The last day in the range of data to be requested
-> Integer
-- ^ How many days of data to request
-> ErrorM (V.Vector YahooQuote)
getQuotes' sym intvl endday numdays = do
bs <- getCSV' sym intvl endday numdays
let qs = decode HasHeader bs
case qs of
Right v -> return v
Left s -> throwError (ParseError s)
--}
| tjroth/stock-quotes | src/Data/Yahoo.hs | bsd-3-clause | 7,132 | 0 | 28 | 2,223 | 997 | 529 | 468 | 99 | 2 |
--------------------------------------------------------------------------------
module Hakyll.Web.Template.Context
( Context (..)
, mapContext
, field
, constField
, functionField
, defaultContext
, bodyField
, metadataField
, urlField
, pathField
, titleField
, dateField
, dateFieldWith
, getItemUTC
, modificationTimeField
, modificationTimeFieldWith
, missingField
) where
--------------------------------------------------------------------------------
import Control.Applicative (Alternative (..), (<$>))
import Control.Monad (msum)
import Data.List (intercalate)
import qualified Data.Map as M
import Data.Monoid (Monoid (..))
import Data.Time.Clock (UTCTime (..))
import Data.Time.Format (formatTime, parseTime)
import System.FilePath (takeBaseName, takeFileName)
import System.Locale (TimeLocale, defaultTimeLocale)
--------------------------------------------------------------------------------
import Hakyll.Core.Compiler
import Hakyll.Core.Compiler.Internal
import Hakyll.Core.Identifier
import Hakyll.Core.Item
import Hakyll.Core.Metadata
import Hakyll.Core.Provider
import Hakyll.Core.Util.String (splitAll)
import Hakyll.Web.Html
--------------------------------------------------------------------------------
newtype Context a = Context
{ unContext :: String -> Item a -> Compiler String
}
--------------------------------------------------------------------------------
instance Monoid (Context a) where
mempty = missingField
mappend (Context f) (Context g) = Context $ \k i -> f k i <|> g k i
--------------------------------------------------------------------------------
mapContext :: (String -> String) -> Context a -> Context a
mapContext f (Context g) = Context $ \k i -> f <$> g k i
--------------------------------------------------------------------------------
field :: String -> (Item a -> Compiler String) -> Context a
field key value = Context $ \k i -> if k == key then value i else empty
--------------------------------------------------------------------------------
constField :: String -> String -> Context a
constField key = field key . const . return
--------------------------------------------------------------------------------
functionField :: String -> ([String] -> Item a -> Compiler String) -> Context a
functionField name value = Context $ \k i -> case words k of
[] -> empty
(n : args)
| n == name -> value args i
| otherwise -> empty
--------------------------------------------------------------------------------
defaultContext :: Context String
defaultContext =
bodyField "body" `mappend`
metadataField `mappend`
urlField "url" `mappend`
pathField "path" `mappend`
titleField "title" `mappend`
missingField
--------------------------------------------------------------------------------
bodyField :: String -> Context String
bodyField key = field key $ return . itemBody
--------------------------------------------------------------------------------
-- | Map any field to its metadata value, if present
metadataField :: Context String
metadataField = Context $ \k i -> do
value <- getMetadataField (itemIdentifier i) k
maybe empty return value
--------------------------------------------------------------------------------
-- | Absolute url to the resulting item
urlField :: String -> Context a
urlField key = field key $
fmap (maybe empty toUrl) . getRoute . itemIdentifier
--------------------------------------------------------------------------------
-- | Filepath of the underlying file of the item
pathField :: String -> Context a
pathField key = field key $ return . toFilePath . itemIdentifier
--------------------------------------------------------------------------------
-- | This title field takes the basename of the underlying file by default
titleField :: String -> Context a
titleField key = mapContext takeBaseName $ pathField key
--------------------------------------------------------------------------------
-- | When the metadata has a field called @published@ in one of the
-- following formats then this function can render the date.
--
-- * @Sun, 01 Feb 2000 13:00:00 UT@ (RSS date format)
--
-- * @2000-02-01T13:00:00Z@ (Atom date format)
--
-- * @February 1, 2000 1:00 PM@ (PM is usually uppercase)
--
-- * @February 1, 2000@ (assumes 12:00 AM for the time)
--
-- Alternatively, when the metadata has a field called @path@ in a
-- @folder/yyyy-mm-dd-title.extension@ format (the convention for pages)
-- and no @published@ metadata field set, this function can render
-- the date.
--
-- > renderDateField "date" "%B %e, %Y" "Date unknown"
--
-- Will render something like @January 32, 2010@.
--
dateField :: String -- ^ Key in which the rendered date should be placed
-> String -- ^ Format to use on the date
-> Context a -- ^ Resulting context
dateField = dateFieldWith defaultTimeLocale
--------------------------------------------------------------------------------
-- | This is an extended version of 'dateField' that allows you to
-- specify a time locale that is used for outputting the date. For more
-- details, see 'dateField'.
dateFieldWith :: TimeLocale -- ^ Output time locale
-> String -- ^ Destination key
-> String -- ^ Format to use on the date
-> Context a -- ^ Resulting context
dateFieldWith locale key format = field key $ \i -> do
time <- getItemUTC locale $ itemIdentifier i
return $ formatTime locale format time
--------------------------------------------------------------------------------
-- | Parser to try to extract and parse the time from the @published@
-- field or from the filename. See 'renderDateField' for more information.
-- Exported for user convenience.
getItemUTC :: MonadMetadata m
=> TimeLocale -- ^ Output time locale
-> Identifier -- ^ Input page
-> m UTCTime -- ^ Parsed UTCTime
getItemUTC locale id' = do
metadata <- getMetadata id'
let tryField k fmt = M.lookup k metadata >>= parseTime' fmt
fn = takeFileName $ toFilePath id'
maybe empty' return $ msum $
[tryField "published" fmt | fmt <- formats] ++
[tryField "date" fmt | fmt <- formats] ++
[parseTime' "%Y-%m-%d" $ intercalate "-" $ take 3 $ splitAll "-" fn]
where
empty' = fail $ "Hakyll.Web.Template.Context.getItemUTC: " ++
"could not parse time for " ++ show id'
parseTime' = parseTime locale
formats =
[ "%a, %d %b %Y %H:%M:%S UT"
, "%Y-%m-%dT%H:%M:%SZ"
, "%Y-%m-%d %H:%M:%S"
, "%Y-%m-%d"
, "%B %e, %Y %l:%M %p"
, "%B %e, %Y"
]
--------------------------------------------------------------------------------
modificationTimeField :: String -- ^ Key
-> String -- ^ Format
-> Context a -- ^ Resuting context
modificationTimeField = modificationTimeFieldWith defaultTimeLocale
--------------------------------------------------------------------------------
modificationTimeFieldWith :: TimeLocale -- ^ Time output locale
-> String -- ^ Key
-> String -- ^ Format
-> Context a -- ^ Resulting context
modificationTimeFieldWith locale key fmt = field key $ \i -> do
provider <- compilerProvider <$> compilerAsk
let mtime = resourceModificationTime provider $ itemIdentifier i
return $ formatTime locale fmt mtime
--------------------------------------------------------------------------------
missingField :: Context a
missingField = Context $ \k i -> compilerThrow $
"Missing field $" ++ k ++ "$ in context for item " ++
show (itemIdentifier i)
| bergmark/hakyll | src/Hakyll/Web/Template/Context.hs | bsd-3-clause | 8,287 | 0 | 13 | 1,979 | 1,399 | 764 | 635 | 122 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
-- |
module Main where
import Control.Concurrent (threadDelay)
import Control.Monad (forM_, when)
import Control.Monad.Loops (untilM_)
import Data.Char (isUpper, toLower)
import Data.Default
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Lazy.IO as TL
import Formatting
import Options.Applicative
import System.IO (hFlush, stdout)
import Network.Libtorrent
data Config = Config {
_source :: !Text
}
deriving Show
instance Default Config where
def = Config {
_source = ""
}
main :: IO ()
main = do
Config{..} <- execParser opts
ses <- newSession
listenOn ses (6881, 6891) Nothing
infoSrc <- TorrentInfoSrc <$> newTorrentInfo _source
atp <- newAddTorrentParams infoSrc
setTorrentSavePath atp "."
th <- addTorrent ses atp
ts' <- torrentStatus th Nothing
name <- getName ts'
TL.putStrLn $ format ("starting " % stext) name
flip untilM_ (torrentStatus th Nothing >>= getIsSeeding) $ do
ts <- torrentStatus th Nothing
dr::Double <- fromIntegral <$> getDownloadRate ts
ur::Double <- fromIntegral <$> getUploadRate ts
np <- getNumPeers ts
st <- getState ts
pg <- getProgress ts
let out = format ((fixed 2) % "% complete (down: "
% (fixed 1) % "kB/s up: "
% (fixed 1) % "kB/s peers: "
% int % ") " % string)
(pg * 100) (dr / 1000) (ur / 1000) np (camelToSpaces $ show st)
TL.putStrLn out
alerts <- popAlerts ses >>= toList
forM_ alerts $ \a -> do
(BitFlags acs) <- alertCategory a
when (ErrorNotification `elem` acs) $
print a
hFlush stdout
threadDelay 1000000
putStrLn "complete"
opts :: ParserInfo Config
opts = info (helper <*> optParser)
( fullDesc
<> progDesc "Simple BitTorrent client"
<> header "simple-client - simple bittorrent client")
optParser :: Parser Config
optParser = Config
<$> (T.pack <$> (argument str
(metavar "TORRENT-INFO-FILE"
<> help "torrent file name")))
camelToSpaces :: String -> String
camelToSpaces "" = ""
camelToSpaces (c:rst) = toLower c:go rst
where
go [] = []
go (x:rest)
| isUpper x = ' ':toLower x:go rest
| otherwise = x:go rest
| eryx67/haskell-libtorrent | examples/SimpleClient.hs | bsd-3-clause | 2,714 | 0 | 24 | 855 | 798 | 399 | 399 | 77 | 2 |
module Main
( main
) where
-------------------------------------------------------------------------------
import qualified Data.ByteString as BS
import Test.QuickCheck.Instances ()
import Test.Tasty
import Test.Tasty.QuickCheck
-------------------------------------------------------------------------------
import Codec.Compression.LZF
-------------------------------------------------------------------------------
main :: IO ()
main = defaultMain tests
-------------------------------------------------------------------------------
tests :: TestTree
tests = testGroup "lzf-bytestring"
[
testProperty "compress/decompress cycle with growing buffer" $ \bs (Positive bufSize) ->
let res = decompress (GrowBufferBytes bufSize) (compress bs)
in res === Right bs
, testProperty "compress/decompress cycle with correct known buffer size" $ \bs ->
let exactBufSize = BS.length bs
res = decompress (KnownUncompressedSize exactBufSize) (compress bs)
in res === Right bs
]
| MichaelXavier/lzf-bytestring | test/Main.hs | bsd-3-clause | 1,078 | 0 | 15 | 205 | 198 | 107 | 91 | 19 | 1 |
{-# LANGUAGE TypeFamilies, TemplateHaskell #-}
module Control.Monad.Voids where
import Control.Monad (void)
import Language.Haskell.TH
mkFmap :: Int -> ExpQ
mkFmap 0 = varE 'id
mkFmap 1 = varE 'fmap
mkFmap n = uInfixE (varE 'fmap) (varE '(.)) (mkFmap $ n - 1)
mkVoid :: Int -> ExpQ
mkVoid i = mkFmap i `appE` (appE (varE 'const) (tupE []))
void0 :: Functor m => m b -> m ()
void0 x = fmap (const ()) x
{-# INLINE void0 #-}
void1 :: Functor m => (a1 -> m b) -> a1 -> m ()
void1 x = \a1 -> fmap (const ()) (x a1)
{-# INLINE void1 #-}
void2 :: Functor m => (a1 -> a2 -> m b) -> a1 -> a2 -> m ()
void2 x = \a1 a2 -> fmap (const ()) (x a1 a2)
{-# INLINE void2 #-}
void3 :: Functor m => (a1 -> a2 -> a3 -> m b) -> a1 -> a2 -> a3 -> m ()
void3 x = \a1 a2 a3 -> fmap (const ()) (x a1 a2 a3)
{-# INLINE void3 #-}
void4 :: Functor m => (a1 -> a2 -> a3 -> a4 -> m b) -> a1 -> a2 -> a3 -> a4 -> m ()
void4 x = \a1 a2 a3 a4 -> fmap (const ()) (x a1 a2 a3 a4)
{-# INLINE void4 #-}
void5 :: Functor m => (a1 -> a2 -> a3 -> a4 -> a5 -> m b) -> a1 -> a2 -> a3 -> a4 -> a5 -> m ()
void5 x = \a1 a2 a3 a4 a5 -> fmap (const ()) (x a1 a2 a3 a4 a5)
{-# INLINE void5 #-}
void6 :: Functor m => (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> m b) -> a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> m ()
void6 x = \a1 a2 a3 a4 a5 a6 -> fmap (const ()) (x a1 a2 a3 a4 a5 a6)
{-# INLINE void6 #-}
void7 :: Functor m => (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> m b) -> a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> m ()
void7 x = \a1 a2 a3 a4 a5 a6 a7 -> fmap (const ()) (x a1 a2 a3 a4 a5 a6 a7)
{-# INLINE void7 #-}
void8 :: Functor m => (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> m b) -> a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> m ()
void8 x = \a1 a2 a3 a4 a5 a6 a7 a8 -> fmap (const ()) (x a1 a2 a3 a4 a5 a6 a7 a8)
{-# INLINE void8 #-}
void9 :: Functor m => (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> a9 -> m b) -> a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> a9 -> m ()
void9 x = \a1 a2 a3 a4 a5 a6 a7 a8 a9 -> fmap (const ()) (x a1 a2 a3 a4 a5 a6 a7 a8 a9)
{-# INLINE void9 #-}
void10 :: Functor m => (a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> a9 -> a10 -> m b) -> a1 -> a2 -> a3 -> a4 -> a5 -> a6 -> a7 -> a8 -> a9 -> a10 -> m ()
void10 x = \a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 -> fmap (const ()) (x a1 a2 a3 a4 a5 a6 a7 a8 a9 a10)
{-# INLINE void10 #-} | seagull-kamome/Voids | Control/Monad/Voids.hs | bsd-3-clause | 2,343 | 0 | 18 | 641 | 1,414 | 714 | 700 | 43 | 1 |
#!/usr/bin/runhaskell
module Main where
import Data.List
import Distribution.Simple
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.PackageIndex
import Distribution.Simple.Setup
import Distribution.Simple.Utils (rawSystemExit)
import qualified Distribution.InstalledPackageInfo as I
import Distribution.PackageDescription
import System.Directory
import System.FilePath
import Control.Monad
import Data.Maybe
import qualified Distribution.ModuleName as ModuleName
import Distribution.Simple.BuildPaths
import Distribution.Simple.Program
import Distribution.Simple.Program.Ld
import Distribution.Simple.Register
import Distribution.Simple.Utils
import Distribution.Text
import Distribution.Version
import Distribution.Verbosity
import Text.Regex.Posix
import System.FilePath
main :: IO ()
main = defaultMainWithHooks simpleUserHooks {
confHook = confWithQt, buildHook = buildWithQt,
copyHook = copyWithQt, instHook = instWithQt,
regHook = regWithQt}
confWithQt :: (GenericPackageDescription, HookedBuildInfo) -> ConfigFlags ->
IO LocalBuildInfo
confWithQt tuple flags = do
lbi <- confHook simpleUserHooks tuple flags
(_,_,db') <- requireProgramVersion (fromFlag $ configVerbosity flags)
mocProgram qtVersionRange (withPrograms lbi)
return $ lbi {withPrograms = db'}
buildWithQt ::
PackageDescription -> LocalBuildInfo -> UserHooks -> BuildFlags -> IO ()
buildWithQt pkgDesc lbi hooks flags = do
let verb = fromFlag $ buildVerbosity flags
libs' <- maybeMapM (\lib -> fmap (\lib' ->
lib {libBuildInfo = lib'}) $ fixQtBuild verb lbi $ libBuildInfo lib) $
library pkgDesc
exes' <- mapM (\exe -> fmap (\exe' ->
exe {buildInfo = exe'}) $ fixQtBuild verb lbi $ buildInfo exe) $
executables pkgDesc
let pkgDesc' = pkgDesc {library = libs', executables = exes'}
lbi' = if (needsGHCiFix pkgDesc lbi)
then lbi {withGHCiLib = False, splitObjs = False} else lbi
buildHook simpleUserHooks pkgDesc' lbi' hooks flags
case libs' of
Just lib -> when (needsGHCiFix pkgDesc lbi) $
buildGHCiFix verb pkgDesc lbi lib
Nothing -> return ()
fixQtBuild :: Verbosity -> LocalBuildInfo -> BuildInfo -> IO BuildInfo
fixQtBuild verb lbi build = do
let moc = fromJust $ lookupProgram mocProgram $ withPrograms lbi
incs = words $ fromMaybe "" $ lookup "x-moc-headers" $
customFieldsBI build
bDir = buildDir lbi
cpps = map (\inc ->
bDir </> ("moc_" ++ (takeBaseName inc) ++ ".cpp")) incs
createDirectoryIfMissingVerbose verb True bDir
mapM_ (\(i,o) -> runProgram verb moc [i,"-o",o]) $ zip incs cpps
return build {cSources = cpps ++ cSources build}
needsGHCiFix :: PackageDescription -> LocalBuildInfo -> Bool
needsGHCiFix pkgDesc lbi =
(withGHCiLib lbi &&) $ fromMaybe False $ do
lib <- library pkgDesc
str <- lookup "x-separate-cbits" $ customFieldsBI $ libBuildInfo lib
simpleParse str
mkGHCiFixLibName :: PackageDescription -> String
mkGHCiFixLibName pkgDesc =
("libcbits-" ++ display (packageId pkgDesc)) <.> dllExtension
buildGHCiFix ::
Verbosity -> PackageDescription -> LocalBuildInfo -> Library -> IO ()
buildGHCiFix verb pkgDesc lbi lib = do
let bDir = buildDir lbi
ms = map ModuleName.toFilePath $ libModules lib
hsObjs = map ((bDir </>) . (<.> "o")) ms
stubObjs <- fmap catMaybes $
mapM (findFileWithExtension ["o"] [bDir]) $ map (++ "_stub") ms
(ld,_) <- requireProgram verb ldProgram (withPrograms lbi)
combineObjectFiles verb ld
(bDir </> (("HS" ++) $ display $ packageId pkgDesc) <.> ".o")
(stubObjs ++ hsObjs)
(ghc,_) <- requireProgram verb ghcProgram (withPrograms lbi)
let bi = libBuildInfo lib
runProgram verb ghc (
["-shared","-fPIC","-o",bDir </> (mkGHCiFixLibName pkgDesc)] ++
(map ("-optc" ++) $ ccOptions bi) ++
(map ("-optc" ++) $ cppOptions bi) ++
(map ("-I" ++) $ includeDirs bi) ++
(ldOptions bi) ++ (map ("-l" ++) $ extraLibs bi) ++
(map ("-L"++) $ extraLibDirs bi) ++
(cSources bi))
return ()
mocProgram :: Program
mocProgram = Program {
programName = "moc",
programFindLocation = flip findProgramLocation "moc",
programFindVersion = \verbosity path -> do
(_,line,_) <- rawSystemStdInOut verbosity path ["-v"] Nothing False
return $ case line =~ "\\(Qt ([0-9.]+)\\)" of
(_:ver:_):_ -> simpleParse ver
_ -> Nothing,
programPostConf = \_ _ -> return []
}
qtVersionRange :: VersionRange
qtVersionRange = orLaterVersion $ Version [4,7] []
copyWithQt ::
PackageDescription -> LocalBuildInfo -> UserHooks -> CopyFlags -> IO ()
copyWithQt pkgDesc lbi hooks flags = do
copyHook simpleUserHooks pkgDesc lbi hooks flags
let verb = fromFlag $ copyVerbosity flags
dest = fromFlag $ copyDest flags
bDir = buildDir lbi
libDir = libdir $ absoluteInstallDirs pkgDesc lbi dest
file = mkGHCiFixLibName pkgDesc
when (needsGHCiFix pkgDesc lbi) $
installOrdinaryFile verb (bDir </> file) (libDir </> file)
regWithQt ::
PackageDescription -> LocalBuildInfo -> UserHooks -> RegisterFlags -> IO ()
regWithQt pkg@PackageDescription { library = Just lib }
lbi@LocalBuildInfo { libraryConfig = Just clbi } hooks flags = do
let verb = fromFlag $ regVerbosity flags
inplace = fromFlag $ regInPlace flags
dist = fromFlag $ regDistPref flags
pkgDb = withPackageDB lbi
instPkgInfo <- generateRegistrationInfo
verb pkg lib lbi clbi inplace dist
let instPkgInfo' = if (needsGHCiFix pkg lbi)
then instPkgInfo {I.extraGHCiLibraries =
(drop 3 $ takeBaseName $ mkGHCiFixLibName pkg) :
I.extraGHCiLibraries instPkgInfo}
else instPkgInfo
case flagToMaybe $ regGenPkgConf flags of
Just regFile -> do
let regFile = fromMaybe (display (packageId pkg) <.> "conf")
(fromFlag $ regGenPkgConf flags)
writeUTF8File regFile $ I.showInstalledPackageInfo instPkgInfo'
_ | fromFlag (regGenScript flags) ->
die "Registration scripts are not implemented."
| otherwise ->
registerPackage verb instPkgInfo' pkg lbi inplace pkgDb
regWithQt pkgDesc _ _ flags =
setupMessage (fromFlag $ regVerbosity flags)
"Package contains no library to register:" (packageId pkgDesc)
instWithQt ::
PackageDescription -> LocalBuildInfo -> UserHooks -> InstallFlags -> IO ()
instWithQt pkgDesc lbi hooks flags = do
let copyFlags = defaultCopyFlags {
copyDistPref = installDistPref flags,
copyVerbosity = installVerbosity flags
}
regFlags = defaultRegisterFlags {
regDistPref = installDistPref flags,
regInPlace = installInPlace flags,
regPackageDB = installPackageDB flags,
regVerbosity = installVerbosity flags
}
copyWithQt pkgDesc lbi hooks copyFlags
when (hasLibs pkgDesc) $ regWithQt pkgDesc lbi hooks regFlags
maybeMapM :: (Monad m) => (a -> m b) -> (Maybe a) -> m (Maybe b)
maybeMapM f = maybe (return Nothing) $ liftM Just . f
| travitch/hsqml | Setup.hs | bsd-3-clause | 7,046 | 0 | 20 | 1,458 | 2,231 | 1,141 | 1,090 | 162 | 3 |
module Uni where
import {-# SOURCE #-} EqUni ()
import {-# SOURCE #-} OrdUni ()
data Uni = Uni | phischu/fragnix | tests/quick/ImplicitInstances/Uni.hs | bsd-3-clause | 96 | 0 | 5 | 20 | 25 | 17 | 8 | 4 | 0 |
-- | Exports simple compilers to just copy files
--
{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable #-}
module Hakyll.Core.Writable.CopyFile
( CopyFile (..)
, copyFileCompiler
) where
import Control.Arrow ((>>^))
import System.Directory (copyFile)
import Data.Typeable (Typeable)
import Data.Binary (Binary)
import Hakyll.Core.Resource
import Hakyll.Core.Writable
import Hakyll.Core.Compiler
import Hakyll.Core.Identifier
-- | Newtype construct around 'FilePath' which will copy the file directly
--
newtype CopyFile = CopyFile {unCopyFile :: FilePath}
deriving (Show, Eq, Ord, Binary, Typeable)
instance Writable CopyFile where
write dst (CopyFile src) = copyFile src dst
copyFileCompiler :: Compiler Resource CopyFile
copyFileCompiler = getIdentifier >>^ CopyFile . toFilePath
| sol/hakyll | src/Hakyll/Core/Writable/CopyFile.hs | bsd-3-clause | 830 | 0 | 8 | 133 | 179 | 108 | 71 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
module Network.Wai.MakeAssets (
serveAssets,
Options(..),
-- * re-exports
Default(..),
) where
import Control.Concurrent
import Control.Exception
import Control.Monad
import Data.Default
import Data.List (intercalate)
import Data.String.Conversions
import Development.Shake (cmd, Exit(..), Stderr(..), CmdOption(..))
import Network.HTTP.Types.Status
import Network.Wai
import Network.Wai.Application.Static
import System.Directory
import System.Exit
import System.FilePath
data Options
= Options {
clientDir :: FilePath
}
instance Default Options where
def = Options {
clientDir = "client"
}
-- | 'serveAssets' will create a wai 'Application' that serves files from the
-- "assets" directory.
--
-- The workflow that 'serveAssets' allows is similar to working on files (for
-- web-sites) that don't need compilation or generation, e.g. html, css, php or
-- javascript. You edit the file in an editor, save it, switch to a browser and
-- hit reload. 'serveAssets' makes sure your browser will be sent up-to-date
-- files.
--
-- To accomplish this, 'serveAssets' assumes that there's a "Makefile" in the
-- directory pointed to by 'clientDir' (default: "client"). This "Makefile" is
-- supposed to put compilation results into the "assets" directory. On __every__
-- request, 'serveAssets' will execute that "Makefile" and only start serving
-- files once the "Makefile" is done. ('serveAssets' makes sure not to run your
-- "Makefile" concurrently.)
serveAssets :: Options -> IO Application
serveAssets options = do
startupChecks options
let fileApp = staticApp $ defaultFileServerSettings "assets/"
mvar <- newMVar ()
return $ \ request respond -> do
(Exit exitCode, Stderr errs) <- synchronize mvar $
cmd (Cwd (clientDir options)) "make"
case exitCode of
ExitSuccess -> fileApp request respond
ExitFailure _ -> respond $ responseLBS internalServerError500 [] $
cs "make error:\n" <> errs
synchronize :: MVar () -> IO a -> IO a
synchronize mvar action = modifyMVar mvar $ \ () -> ((), ) <$> action
startupChecks :: Options -> IO ()
startupChecks options = do
checkExists Dir (clientDir options) $
"You should put sources for assets in there."
checkExists File (clientDir options </> "Makefile") $ unwords $
"Which will be invoked to build the assets." :
"It should put compiled assets into 'assets/'." :
[]
checkExists Dir "assets" $
"All files in 'assets/' will be served."
data FileType
= File
| Dir
checkExists :: FileType -> FilePath -> String -> IO ()
checkExists typ path hint = do
exists <- (isFile doesFileExist doesDirectoryExist) path
when (not exists) $ do
throwIO $ ErrorCall $ intercalate "\n" $
("missing " ++ isFile "file" "directory" ++ ": '" ++ showPath path ++ "'") :
("Please create '" ++ showPath path ++ "'.") :
("(" ++ hint ++ ")") :
[]
where
isFile :: a -> a -> a
isFile a b = case typ of
File -> a
Dir -> b
showPath :: FilePath -> String
showPath = case typ of
File -> id
Dir -> (++ "/")
| soenkehahn/wai-make-assets | src/Network/Wai/MakeAssets.hs | bsd-3-clause | 3,287 | 0 | 19 | 778 | 725 | 385 | 340 | 69 | 3 |
{-# OPTIONS_GHC -w #-}
module VSim.VIR.AST
( parseFiles
, parseFile
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Reader
import Control.Monad.Error
import Data.IORef
import Data.IORefEx
import Data.List
import Data.Ord
import Data.Maybe
import qualified Control.Exception as E
import qualified Data.ByteString.Char8 as B
import System.IO
import System.IO.Error
import System.IO.Unsafe
import VSim.VIR.Monad
import VSim.VIR.Lexer as L
import VSim.VIR.Types as T
import VSim.Data.Line
import VSim.Data.Loc
import VSim.Data.NamePath
import VSim.Data.Int128
import VSim.Data.TInt
-- parser produced by Happy Version 1.18.9
data HappyAbsSyn
= HappyTerminal (L.Token)
| HappyErrorToken Int
| HappyAbsSyn4 ([IRTop])
| HappyAbsSyn5 (IRTop)
| HappyAbsSyn6 (IRGen)
| HappyAbsSyn7 (IRType)
| HappyAbsSyn8 (EnumElement)
| HappyAbsSyn9 (IRTypeDescr)
| HappyAbsSyn10 (UnitDecl)
| HappyAbsSyn11 ((Loc, Ident, IRTypeDescr))
| HappyAbsSyn12 (Bool)
| HappyAbsSyn13 (IRRangeDescr)
| HappyAbsSyn14 (IRArrayRangeDescr)
| HappyAbsSyn15 (Constrained IRArrayRangeDescr)
| HappyAbsSyn16 (IRConstant)
| HappyAbsSyn17 (IRVariable)
| HappyAbsSyn18 (IRSignal)
| HappyAbsSyn19 (IROptExpr)
| HappyAbsSyn20 (IRAlias)
| HappyAbsSyn21 (IRPort)
| HappyAbsSyn23 (IRFunction)
| HappyAbsSyn24 (IRProcedure)
| HappyAbsSyn25 ([IRArg])
| HappyAbsSyn26 (IRArg)
| HappyAbsSyn27 (ArgMode)
| HappyAbsSyn28 (NamedItemKind)
| HappyAbsSyn29 (IRNameG)
| HappyAbsSyn30 (IRProcess)
| HappyAbsSyn31 (IRStat)
| HappyAbsSyn33 (LoopLabel)
| HappyAbsSyn35 (IRAfter)
| HappyAbsSyn36 ([IRAfter])
| HappyAbsSyn37 ([IRNameG])
| HappyAbsSyn38 (Maybe IRExpr)
| HappyAbsSyn40 (IRExpr)
| HappyAbsSyn42 (IRCaseElement)
| HappyAbsSyn43 (IRLetDecl)
| HappyAbsSyn48 (IRElementAssociation)
| HappyAbsSyn51 (IRName)
| HappyAbsSyn56 (IREGList)
| HappyAbsSyn58 ([B.ByteString])
| HappyAbsSyn59 (Ident)
| HappyAbsSyn60 ((Ident, [Ident]))
| HappyAbsSyn62 (B.ByteString)
| HappyAbsSyn63 (TInt)
| HappyAbsSyn64 (Int128)
| HappyAbsSyn65 ()
| HappyAbsSyn69 (Loc)
| HappyAbsSyn70 (Int)
| HappyAbsSyn76 (T.MemoryMapRange)
| HappyAbsSyn77 (([Ident], [Ident]))
| HappyAbsSyn78 ([EnumElement])
| HappyAbsSyn81 ([UnitDecl])
| HappyAbsSyn84 ([(Loc, Ident, IRTypeDescr)])
| HappyAbsSyn87 ([IRArrayRangeDescr])
| HappyAbsSyn90 ([Constrained IRArrayRangeDescr])
| HappyAbsSyn99 ([IRLetDecl])
| HappyAbsSyn102 ([IRCaseElement])
| HappyAbsSyn105 ([IRElementAssociation])
| HappyAbsSyn111 (WithLoc Int128)
| HappyAbsSyn112 (WithLoc Ident)
| HappyAbsSyn114 (WithLoc (Ident, [Ident]))
{- to allow type-synonyms as our monads (likely
- with explicitly-specified bind and return)
- in Haskell98, it seems that with
- /type M a = .../, then /(HappyReduction M)/
- is not allowed. But Happy is a
- code-generator that can just substitute it.
type HappyReduction m =
Int
-> (L.Token)
-> HappyState (L.Token) (HappyStk HappyAbsSyn -> m HappyAbsSyn)
-> [HappyState (L.Token) (HappyStk HappyAbsSyn -> m HappyAbsSyn)]
-> HappyStk HappyAbsSyn
-> m HappyAbsSyn
-}
action_0,
action_1,
action_2,
action_3,
action_4,
action_5,
action_6,
action_7,
action_8,
action_9,
action_10,
action_11,
action_12,
action_13,
action_14,
action_15,
action_16,
action_17,
action_18,
action_19,
action_20,
action_21,
action_22,
action_23,
action_24,
action_25,
action_26,
action_27,
action_28,
action_29,
action_30,
action_31,
action_32,
action_33,
action_34,
action_35,
action_36,
action_37,
action_38,
action_39,
action_40,
action_41,
action_42,
action_43,
action_44,
action_45,
action_46,
action_47,
action_48,
action_49,
action_50,
action_51,
action_52,
action_53,
action_54,
action_55,
action_56,
action_57,
action_58,
action_59,
action_60,
action_61,
action_62,
action_63,
action_64,
action_65,
action_66,
action_67,
action_68,
action_69,
action_70,
action_71,
action_72,
action_73,
action_74,
action_75,
action_76,
action_77,
action_78,
action_79,
action_80,
action_81,
action_82,
action_83,
action_84,
action_85,
action_86,
action_87,
action_88,
action_89,
action_90,
action_91,
action_92,
action_93,
action_94,
action_95,
action_96,
action_97,
action_98,
action_99,
action_100,
action_101,
action_102,
action_103,
action_104,
action_105,
action_106,
action_107,
action_108,
action_109,
action_110,
action_111,
action_112,
action_113,
action_114,
action_115,
action_116,
action_117,
action_118,
action_119,
action_120,
action_121,
action_122,
action_123,
action_124,
action_125,
action_126,
action_127,
action_128,
action_129,
action_130,
action_131,
action_132,
action_133,
action_134,
action_135,
action_136,
action_137,
action_138,
action_139,
action_140,
action_141,
action_142,
action_143,
action_144,
action_145,
action_146,
action_147,
action_148,
action_149,
action_150,
action_151,
action_152,
action_153,
action_154,
action_155,
action_156,
action_157,
action_158,
action_159,
action_160,
action_161,
action_162,
action_163,
action_164,
action_165,
action_166,
action_167,
action_168,
action_169,
action_170,
action_171,
action_172,
action_173,
action_174,
action_175,
action_176,
action_177,
action_178,
action_179,
action_180,
action_181,
action_182,
action_183,
action_184,
action_185,
action_186,
action_187,
action_188,
action_189,
action_190,
action_191,
action_192,
action_193,
action_194,
action_195,
action_196,
action_197,
action_198,
action_199,
action_200,
action_201,
action_202,
action_203,
action_204,
action_205,
action_206,
action_207,
action_208,
action_209,
action_210,
action_211,
action_212,
action_213,
action_214,
action_215,
action_216,
action_217,
action_218,
action_219,
action_220,
action_221,
action_222,
action_223,
action_224,
action_225,
action_226,
action_227,
action_228,
action_229,
action_230,
action_231,
action_232,
action_233,
action_234,
action_235,
action_236,
action_237,
action_238,
action_239,
action_240,
action_241,
action_242,
action_243,
action_244,
action_245,
action_246,
action_247,
action_248,
action_249,
action_250,
action_251,
action_252,
action_253,
action_254,
action_255,
action_256,
action_257,
action_258,
action_259,
action_260,
action_261,
action_262,
action_263,
action_264,
action_265,
action_266,
action_267,
action_268,
action_269,
action_270,
action_271,
action_272,
action_273,
action_274,
action_275,
action_276,
action_277,
action_278,
action_279,
action_280,
action_281,
action_282,
action_283,
action_284,
action_285,
action_286,
action_287,
action_288,
action_289,
action_290,
action_291,
action_292,
action_293,
action_294,
action_295,
action_296,
action_297,
action_298,
action_299,
action_300,
action_301,
action_302,
action_303,
action_304,
action_305,
action_306,
action_307,
action_308,
action_309,
action_310,
action_311,
action_312,
action_313,
action_314,
action_315,
action_316,
action_317,
action_318,
action_319,
action_320,
action_321,
action_322,
action_323,
action_324,
action_325,
action_326,
action_327,
action_328,
action_329,
action_330,
action_331,
action_332,
action_333,
action_334,
action_335,
action_336,
action_337,
action_338,
action_339,
action_340,
action_341,
action_342,
action_343,
action_344,
action_345,
action_346,
action_347,
action_348,
action_349,
action_350,
action_351,
action_352,
action_353,
action_354,
action_355,
action_356,
action_357,
action_358,
action_359,
action_360,
action_361,
action_362,
action_363,
action_364,
action_365,
action_366,
action_367,
action_368,
action_369,
action_370,
action_371,
action_372,
action_373,
action_374,
action_375,
action_376,
action_377,
action_378,
action_379,
action_380,
action_381,
action_382,
action_383,
action_384,
action_385,
action_386,
action_387,
action_388,
action_389,
action_390,
action_391,
action_392,
action_393,
action_394,
action_395,
action_396,
action_397,
action_398,
action_399,
action_400,
action_401,
action_402,
action_403,
action_404,
action_405,
action_406,
action_407,
action_408,
action_409,
action_410,
action_411,
action_412,
action_413,
action_414,
action_415,
action_416,
action_417,
action_418,
action_419,
action_420,
action_421,
action_422,
action_423,
action_424,
action_425,
action_426,
action_427,
action_428,
action_429,
action_430,
action_431,
action_432,
action_433,
action_434,
action_435,
action_436,
action_437,
action_438,
action_439,
action_440,
action_441,
action_442,
action_443,
action_444,
action_445,
action_446,
action_447,
action_448,
action_449,
action_450,
action_451,
action_452,
action_453,
action_454,
action_455,
action_456,
action_457,
action_458,
action_459,
action_460,
action_461,
action_462,
action_463,
action_464,
action_465,
action_466,
action_467,
action_468,
action_469,
action_470,
action_471,
action_472,
action_473,
action_474,
action_475,
action_476,
action_477,
action_478,
action_479,
action_480,
action_481,
action_482,
action_483,
action_484,
action_485,
action_486,
action_487,
action_488,
action_489,
action_490,
action_491,
action_492,
action_493,
action_494,
action_495,
action_496,
action_497,
action_498,
action_499,
action_500,
action_501,
action_502,
action_503,
action_504,
action_505,
action_506,
action_507,
action_508,
action_509,
action_510,
action_511,
action_512,
action_513,
action_514,
action_515,
action_516,
action_517,
action_518,
action_519,
action_520,
action_521,
action_522,
action_523,
action_524,
action_525,
action_526,
action_527,
action_528,
action_529,
action_530,
action_531,
action_532,
action_533,
action_534,
action_535,
action_536,
action_537,
action_538,
action_539,
action_540,
action_541,
action_542,
action_543,
action_544,
action_545,
action_546,
action_547,
action_548,
action_549,
action_550,
action_551,
action_552,
action_553,
action_554,
action_555,
action_556,
action_557,
action_558,
action_559,
action_560,
action_561,
action_562,
action_563,
action_564,
action_565,
action_566,
action_567,
action_568,
action_569,
action_570,
action_571,
action_572,
action_573,
action_574,
action_575,
action_576,
action_577,
action_578,
action_579,
action_580,
action_581,
action_582,
action_583,
action_584,
action_585,
action_586,
action_587,
action_588,
action_589,
action_590,
action_591,
action_592,
action_593,
action_594,
action_595,
action_596,
action_597,
action_598,
action_599,
action_600,
action_601,
action_602,
action_603,
action_604,
action_605,
action_606,
action_607,
action_608,
action_609,
action_610,
action_611,
action_612,
action_613,
action_614,
action_615,
action_616,
action_617,
action_618,
action_619,
action_620,
action_621,
action_622,
action_623,
action_624,
action_625,
action_626,
action_627,
action_628,
action_629,
action_630,
action_631,
action_632,
action_633,
action_634,
action_635,
action_636,
action_637,
action_638,
action_639,
action_640,
action_641,
action_642,
action_643,
action_644,
action_645,
action_646,
action_647,
action_648,
action_649,
action_650,
action_651,
action_652,
action_653,
action_654,
action_655,
action_656,
action_657,
action_658,
action_659,
action_660,
action_661,
action_662,
action_663,
action_664,
action_665,
action_666,
action_667,
action_668,
action_669,
action_670,
action_671,
action_672,
action_673,
action_674,
action_675,
action_676,
action_677,
action_678,
action_679,
action_680,
action_681,
action_682,
action_683,
action_684,
action_685,
action_686,
action_687,
action_688,
action_689,
action_690,
action_691,
action_692,
action_693,
action_694,
action_695,
action_696,
action_697,
action_698,
action_699,
action_700,
action_701,
action_702,
action_703,
action_704,
action_705,
action_706,
action_707,
action_708,
action_709,
action_710,
action_711,
action_712,
action_713,
action_714,
action_715,
action_716,
action_717,
action_718,
action_719,
action_720,
action_721,
action_722,
action_723,
action_724,
action_725,
action_726,
action_727,
action_728,
action_729,
action_730,
action_731,
action_732,
action_733,
action_734,
action_735,
action_736,
action_737,
action_738,
action_739,
action_740,
action_741,
action_742,
action_743,
action_744,
action_745,
action_746,
action_747,
action_748,
action_749,
action_750,
action_751,
action_752,
action_753,
action_754,
action_755,
action_756,
action_757,
action_758,
action_759 :: () => Int -> ({-HappyReduction (Parser) = -}
Int
-> (L.Token)
-> HappyState (L.Token) (HappyStk HappyAbsSyn -> (Parser) HappyAbsSyn)
-> [HappyState (L.Token) (HappyStk HappyAbsSyn -> (Parser) HappyAbsSyn)]
-> HappyStk HappyAbsSyn
-> (Parser) HappyAbsSyn)
happyReduce_1,
happyReduce_2,
happyReduce_3,
happyReduce_4,
happyReduce_5,
happyReduce_6,
happyReduce_7,
happyReduce_8,
happyReduce_9,
happyReduce_10,
happyReduce_11,
happyReduce_12,
happyReduce_13,
happyReduce_14,
happyReduce_15,
happyReduce_16,
happyReduce_17,
happyReduce_18,
happyReduce_19,
happyReduce_20,
happyReduce_21,
happyReduce_22,
happyReduce_23,
happyReduce_24,
happyReduce_25,
happyReduce_26,
happyReduce_27,
happyReduce_28,
happyReduce_29,
happyReduce_30,
happyReduce_31,
happyReduce_32,
happyReduce_33,
happyReduce_34,
happyReduce_35,
happyReduce_36,
happyReduce_37,
happyReduce_38,
happyReduce_39,
happyReduce_40,
happyReduce_41,
happyReduce_42,
happyReduce_43,
happyReduce_44,
happyReduce_45,
happyReduce_46,
happyReduce_47,
happyReduce_48,
happyReduce_49,
happyReduce_50,
happyReduce_51,
happyReduce_52,
happyReduce_53,
happyReduce_54,
happyReduce_55,
happyReduce_56,
happyReduce_57,
happyReduce_58,
happyReduce_59,
happyReduce_60,
happyReduce_61,
happyReduce_62,
happyReduce_63,
happyReduce_64,
happyReduce_65,
happyReduce_66,
happyReduce_67,
happyReduce_68,
happyReduce_69,
happyReduce_70,
happyReduce_71,
happyReduce_72,
happyReduce_73,
happyReduce_74,
happyReduce_75,
happyReduce_76,
happyReduce_77,
happyReduce_78,
happyReduce_79,
happyReduce_80,
happyReduce_81,
happyReduce_82,
happyReduce_83,
happyReduce_84,
happyReduce_85,
happyReduce_86,
happyReduce_87,
happyReduce_88,
happyReduce_89,
happyReduce_90,
happyReduce_91,
happyReduce_92,
happyReduce_93,
happyReduce_94,
happyReduce_95,
happyReduce_96,
happyReduce_97,
happyReduce_98,
happyReduce_99,
happyReduce_100,
happyReduce_101,
happyReduce_102,
happyReduce_103,
happyReduce_104,
happyReduce_105,
happyReduce_106,
happyReduce_107,
happyReduce_108,
happyReduce_109,
happyReduce_110,
happyReduce_111,
happyReduce_112,
happyReduce_113,
happyReduce_114,
happyReduce_115,
happyReduce_116,
happyReduce_117,
happyReduce_118,
happyReduce_119,
happyReduce_120,
happyReduce_121,
happyReduce_122,
happyReduce_123,
happyReduce_124,
happyReduce_125,
happyReduce_126,
happyReduce_127,
happyReduce_128,
happyReduce_129,
happyReduce_130,
happyReduce_131,
happyReduce_132,
happyReduce_133,
happyReduce_134,
happyReduce_135,
happyReduce_136,
happyReduce_137,
happyReduce_138,
happyReduce_139,
happyReduce_140,
happyReduce_141,
happyReduce_142,
happyReduce_143,
happyReduce_144,
happyReduce_145,
happyReduce_146,
happyReduce_147,
happyReduce_148,
happyReduce_149,
happyReduce_150,
happyReduce_151,
happyReduce_152,
happyReduce_153,
happyReduce_154,
happyReduce_155,
happyReduce_156,
happyReduce_157,
happyReduce_158,
happyReduce_159,
happyReduce_160,
happyReduce_161,
happyReduce_162,
happyReduce_163,
happyReduce_164,
happyReduce_165,
happyReduce_166,
happyReduce_167,
happyReduce_168,
happyReduce_169,
happyReduce_170,
happyReduce_171,
happyReduce_172,
happyReduce_173,
happyReduce_174,
happyReduce_175,
happyReduce_176,
happyReduce_177,
happyReduce_178,
happyReduce_179,
happyReduce_180,
happyReduce_181,
happyReduce_182,
happyReduce_183,
happyReduce_184,
happyReduce_185,
happyReduce_186,
happyReduce_187,
happyReduce_188,
happyReduce_189,
happyReduce_190,
happyReduce_191,
happyReduce_192,
happyReduce_193,
happyReduce_194,
happyReduce_195,
happyReduce_196,
happyReduce_197,
happyReduce_198,
happyReduce_199,
happyReduce_200,
happyReduce_201,
happyReduce_202,
happyReduce_203,
happyReduce_204,
happyReduce_205,
happyReduce_206,
happyReduce_207,
happyReduce_208,
happyReduce_209,
happyReduce_210,
happyReduce_211,
happyReduce_212,
happyReduce_213,
happyReduce_214,
happyReduce_215,
happyReduce_216,
happyReduce_217,
happyReduce_218,
happyReduce_219,
happyReduce_220,
happyReduce_221,
happyReduce_222,
happyReduce_223,
happyReduce_224,
happyReduce_225,
happyReduce_226,
happyReduce_227,
happyReduce_228,
happyReduce_229,
happyReduce_230,
happyReduce_231,
happyReduce_232,
happyReduce_233,
happyReduce_234,
happyReduce_235,
happyReduce_236,
happyReduce_237,
happyReduce_238,
happyReduce_239,
happyReduce_240,
happyReduce_241,
happyReduce_242,
happyReduce_243,
happyReduce_244,
happyReduce_245,
happyReduce_246,
happyReduce_247,
happyReduce_248,
happyReduce_249,
happyReduce_250,
happyReduce_251,
happyReduce_252,
happyReduce_253,
happyReduce_254,
happyReduce_255,
happyReduce_256,
happyReduce_257,
happyReduce_258,
happyReduce_259,
happyReduce_260,
happyReduce_261,
happyReduce_262,
happyReduce_263,
happyReduce_264,
happyReduce_265,
happyReduce_266,
happyReduce_267,
happyReduce_268,
happyReduce_269,
happyReduce_270,
happyReduce_271,
happyReduce_272,
happyReduce_273,
happyReduce_274,
happyReduce_275,
happyReduce_276,
happyReduce_277,
happyReduce_278,
happyReduce_279,
happyReduce_280,
happyReduce_281,
happyReduce_282,
happyReduce_283,
happyReduce_284,
happyReduce_285,
happyReduce_286,
happyReduce_287,
happyReduce_288,
happyReduce_289 :: () => ({-HappyReduction (Parser) = -}
Int
-> (L.Token)
-> HappyState (L.Token) (HappyStk HappyAbsSyn -> (Parser) HappyAbsSyn)
-> [HappyState (L.Token) (HappyStk HappyAbsSyn -> (Parser) HappyAbsSyn)]
-> HappyStk HappyAbsSyn
-> (Parser) HappyAbsSyn)
action_0 (121) = happyShift action_15
action_0 (4) = happyGoto action_16
action_0 (5) = happyGoto action_2
action_0 (6) = happyGoto action_3
action_0 (7) = happyGoto action_4
action_0 (16) = happyGoto action_5
action_0 (18) = happyGoto action_6
action_0 (20) = happyGoto action_7
action_0 (21) = happyGoto action_8
action_0 (23) = happyGoto action_9
action_0 (24) = happyGoto action_10
action_0 (30) = happyGoto action_11
action_0 (65) = happyGoto action_17
action_0 (76) = happyGoto action_13
action_0 (77) = happyGoto action_14
action_0 _ = happyReduce_3
action_1 (121) = happyShift action_15
action_1 (5) = happyGoto action_2
action_1 (6) = happyGoto action_3
action_1 (7) = happyGoto action_4
action_1 (16) = happyGoto action_5
action_1 (18) = happyGoto action_6
action_1 (20) = happyGoto action_7
action_1 (21) = happyGoto action_8
action_1 (23) = happyGoto action_9
action_1 (24) = happyGoto action_10
action_1 (30) = happyGoto action_11
action_1 (65) = happyGoto action_12
action_1 (76) = happyGoto action_13
action_1 (77) = happyGoto action_14
action_1 _ = happyFail
action_2 (121) = happyShift action_15
action_2 (4) = happyGoto action_37
action_2 (5) = happyGoto action_2
action_2 (6) = happyGoto action_3
action_2 (7) = happyGoto action_4
action_2 (16) = happyGoto action_5
action_2 (18) = happyGoto action_6
action_2 (20) = happyGoto action_7
action_2 (21) = happyGoto action_8
action_2 (23) = happyGoto action_9
action_2 (24) = happyGoto action_10
action_2 (30) = happyGoto action_11
action_2 (65) = happyGoto action_17
action_2 (76) = happyGoto action_13
action_2 (77) = happyGoto action_14
action_2 _ = happyReduce_3
action_3 _ = happyReduce_11
action_4 _ = happyReduce_4
action_5 _ = happyReduce_5
action_6 _ = happyReduce_6
action_7 _ = happyReduce_7
action_8 _ = happyReduce_8
action_9 _ = happyReduce_9
action_10 _ = happyReduce_10
action_11 _ = happyReduce_12
action_12 (129) = happyShift action_21
action_12 (159) = happyShift action_22
action_12 (170) = happyShift action_23
action_12 (171) = happyShift action_24
action_12 (175) = happyShift action_25
action_12 (176) = happyShift action_26
action_12 (178) = happyShift action_27
action_12 (188) = happyShift action_28
action_12 (189) = happyShift action_29
action_12 (198) = happyShift action_30
action_12 (199) = happyShift action_31
action_12 (239) = happyShift action_32
action_12 (240) = happyShift action_33
action_12 (241) = happyShift action_34
action_12 (242) = happyShift action_35
action_12 (71) = happyGoto action_36
action_12 (73) = happyGoto action_19
action_12 (74) = happyGoto action_20
action_12 _ = happyFail
action_13 _ = happyReduce_13
action_14 _ = happyReduce_14
action_15 _ = happyReduce_215
action_16 (243) = happyAccept
action_16 _ = happyFail
action_17 (129) = happyShift action_21
action_17 (159) = happyShift action_22
action_17 (170) = happyShift action_23
action_17 (171) = happyShift action_24
action_17 (175) = happyShift action_25
action_17 (176) = happyShift action_26
action_17 (178) = happyShift action_27
action_17 (188) = happyShift action_28
action_17 (189) = happyShift action_29
action_17 (198) = happyShift action_30
action_17 (199) = happyShift action_31
action_17 (239) = happyShift action_32
action_17 (240) = happyShift action_33
action_17 (241) = happyShift action_34
action_17 (242) = happyShift action_35
action_17 (71) = happyGoto action_18
action_17 (73) = happyGoto action_19
action_17 (74) = happyGoto action_20
action_17 _ = happyFail
action_18 (121) = happyShift action_15
action_18 (4) = happyGoto action_65
action_18 (5) = happyGoto action_66
action_18 (6) = happyGoto action_3
action_18 (7) = happyGoto action_4
action_18 (16) = happyGoto action_5
action_18 (18) = happyGoto action_6
action_18 (20) = happyGoto action_7
action_18 (21) = happyGoto action_8
action_18 (23) = happyGoto action_9
action_18 (24) = happyGoto action_10
action_18 (30) = happyGoto action_11
action_18 (65) = happyGoto action_17
action_18 (76) = happyGoto action_13
action_18 (77) = happyGoto action_14
action_18 _ = happyReduce_3
action_19 (69) = happyGoto action_44
action_19 (114) = happyGoto action_64
action_19 _ = happyReduce_219
action_20 (69) = happyGoto action_44
action_20 (114) = happyGoto action_63
action_20 _ = happyReduce_219
action_21 (117) = happyShift action_62
action_21 _ = happyFail
action_22 (69) = happyGoto action_44
action_22 (114) = happyGoto action_61
action_22 _ = happyReduce_219
action_23 (69) = happyGoto action_44
action_23 (114) = happyGoto action_60
action_23 _ = happyReduce_219
action_24 (69) = happyGoto action_44
action_24 (114) = happyGoto action_59
action_24 _ = happyReduce_219
action_25 (117) = happyShift action_58
action_25 _ = happyReduce_224
action_26 (117) = happyShift action_57
action_26 _ = happyReduce_226
action_27 (69) = happyGoto action_44
action_27 (114) = happyGoto action_56
action_27 _ = happyReduce_219
action_28 (121) = happyShift action_15
action_28 (123) = happyShift action_54
action_28 (125) = happyShift action_55
action_28 (49) = happyGoto action_48
action_28 (55) = happyGoto action_49
action_28 (65) = happyGoto action_50
action_28 (67) = happyGoto action_51
action_28 (69) = happyGoto action_52
action_28 (114) = happyGoto action_53
action_28 _ = happyReduce_219
action_29 (69) = happyGoto action_44
action_29 (114) = happyGoto action_47
action_29 _ = happyReduce_219
action_30 (69) = happyGoto action_44
action_30 (114) = happyGoto action_46
action_30 _ = happyReduce_219
action_31 (69) = happyGoto action_44
action_31 (114) = happyGoto action_45
action_31 _ = happyReduce_219
action_32 (176) = happyShift action_26
action_32 (74) = happyGoto action_43
action_32 _ = happyFail
action_33 (176) = happyShift action_26
action_33 (74) = happyGoto action_42
action_33 _ = happyFail
action_34 (119) = happyShift action_40
action_34 (58) = happyGoto action_41
action_34 _ = happyFail
action_35 (119) = happyShift action_40
action_35 (58) = happyGoto action_39
action_35 _ = happyFail
action_36 (121) = happyShift action_15
action_36 (5) = happyGoto action_38
action_36 (6) = happyGoto action_3
action_36 (7) = happyGoto action_4
action_36 (16) = happyGoto action_5
action_36 (18) = happyGoto action_6
action_36 (20) = happyGoto action_7
action_36 (21) = happyGoto action_8
action_36 (23) = happyGoto action_9
action_36 (24) = happyGoto action_10
action_36 (30) = happyGoto action_11
action_36 (65) = happyGoto action_12
action_36 (76) = happyGoto action_13
action_36 (77) = happyGoto action_14
action_36 _ = happyFail
action_37 _ = happyReduce_1
action_38 (72) = happyGoto action_67
action_38 _ = happyReduce_222
action_39 (119) = happyShift action_40
action_39 (127) = happyShift action_126
action_39 (58) = happyGoto action_127
action_39 _ = happyFail
action_40 _ = happyReduce_203
action_41 (119) = happyShift action_40
action_41 (127) = happyShift action_126
action_41 (58) = happyGoto action_125
action_41 _ = happyFail
action_42 (69) = happyGoto action_44
action_42 (114) = happyGoto action_124
action_42 _ = happyReduce_219
action_43 (69) = happyGoto action_44
action_43 (114) = happyGoto action_123
action_43 _ = happyReduce_219
action_44 (119) = happyShift action_40
action_44 (58) = happyGoto action_82
action_44 (60) = happyGoto action_83
action_44 _ = happyFail
action_45 (121) = happyShift action_15
action_45 (65) = happyGoto action_122
action_45 _ = happyFail
action_46 (121) = happyShift action_15
action_46 (9) = happyGoto action_121
action_46 (13) = happyGoto action_70
action_46 (65) = happyGoto action_71
action_46 (69) = happyGoto action_72
action_46 (113) = happyGoto action_73
action_46 _ = happyReduce_219
action_47 (69) = happyGoto action_44
action_47 (114) = happyGoto action_120
action_47 _ = happyReduce_219
action_48 (69) = happyGoto action_119
action_48 _ = happyReduce_219
action_49 (179) = happyShift action_118
action_49 _ = happyFail
action_50 (123) = happyShift action_54
action_50 (129) = happyShift action_21
action_50 (202) = happyShift action_94
action_50 (205) = happyShift action_95
action_50 (206) = happyShift action_96
action_50 (207) = happyShift action_97
action_50 (209) = happyShift action_98
action_50 (210) = happyShift action_99
action_50 (213) = happyShift action_100
action_50 (216) = happyShift action_101
action_50 (217) = happyShift action_102
action_50 (218) = happyShift action_103
action_50 (219) = happyShift action_104
action_50 (220) = happyShift action_105
action_50 (223) = happyShift action_106
action_50 (224) = happyShift action_107
action_50 (225) = happyShift action_108
action_50 (226) = happyShift action_109
action_50 (229) = happyShift action_110
action_50 (230) = happyShift action_111
action_50 (231) = happyShift action_112
action_50 (232) = happyShift action_113
action_50 (235) = happyShift action_114
action_50 (236) = happyShift action_115
action_50 (237) = happyShift action_116
action_50 (238) = happyShift action_117
action_50 (67) = happyGoto action_90
action_50 (69) = happyGoto action_91
action_50 (71) = happyGoto action_92
action_50 (113) = happyGoto action_93
action_50 _ = happyReduce_219
action_51 (69) = happyGoto action_89
action_51 _ = happyReduce_219
action_52 (116) = happyShift action_86
action_52 (117) = happyShift action_87
action_52 (118) = happyShift action_88
action_52 (119) = happyShift action_40
action_52 (58) = happyGoto action_82
action_52 (60) = happyGoto action_83
action_52 (63) = happyGoto action_84
action_52 (69) = happyGoto action_85
action_52 _ = happyReduce_219
action_53 _ = happyReduce_130
action_54 _ = happyReduce_217
action_55 (69) = happyGoto action_80
action_55 (111) = happyGoto action_81
action_55 _ = happyReduce_219
action_56 (121) = happyShift action_15
action_56 (9) = happyGoto action_79
action_56 (13) = happyGoto action_70
action_56 (65) = happyGoto action_71
action_56 (69) = happyGoto action_72
action_56 (113) = happyGoto action_73
action_56 _ = happyReduce_219
action_57 _ = happyReduce_225
action_58 _ = happyReduce_223
action_59 (121) = happyShift action_15
action_59 (65) = happyGoto action_78
action_59 _ = happyFail
action_60 (121) = happyShift action_15
action_60 (65) = happyGoto action_77
action_60 _ = happyFail
action_61 (121) = happyShift action_15
action_61 (9) = happyGoto action_76
action_61 (13) = happyGoto action_70
action_61 (65) = happyGoto action_71
action_61 (69) = happyGoto action_72
action_61 (113) = happyGoto action_73
action_61 _ = happyReduce_219
action_62 (115) = happyShift action_75
action_62 _ = happyFail
action_63 (121) = happyShift action_15
action_63 (9) = happyGoto action_74
action_63 (13) = happyGoto action_70
action_63 (65) = happyGoto action_71
action_63 (69) = happyGoto action_72
action_63 (113) = happyGoto action_73
action_63 _ = happyReduce_219
action_64 (121) = happyShift action_15
action_64 (9) = happyGoto action_69
action_64 (13) = happyGoto action_70
action_64 (65) = happyGoto action_71
action_64 (69) = happyGoto action_72
action_64 (113) = happyGoto action_73
action_64 _ = happyReduce_219
action_65 (72) = happyGoto action_68
action_65 _ = happyReduce_222
action_66 (121) = happyShift action_15
action_66 (122) = happyReduce_222
action_66 (4) = happyGoto action_37
action_66 (5) = happyGoto action_2
action_66 (6) = happyGoto action_3
action_66 (7) = happyGoto action_4
action_66 (16) = happyGoto action_5
action_66 (18) = happyGoto action_6
action_66 (20) = happyGoto action_7
action_66 (21) = happyGoto action_8
action_66 (23) = happyGoto action_9
action_66 (24) = happyGoto action_10
action_66 (30) = happyGoto action_11
action_66 (65) = happyGoto action_17
action_66 (72) = happyGoto action_67
action_66 (76) = happyGoto action_13
action_66 (77) = happyGoto action_14
action_66 _ = happyReduce_222
action_67 (122) = happyShift action_129
action_67 (66) = happyGoto action_244
action_67 _ = happyFail
action_68 (122) = happyShift action_129
action_68 (66) = happyGoto action_243
action_68 _ = happyFail
action_69 (19) = happyGoto action_241
action_69 (69) = happyGoto action_242
action_69 _ = happyReduce_219
action_70 _ = happyReduce_22
action_71 (160) = happyShift action_233
action_71 (161) = happyShift action_234
action_71 (163) = happyShift action_235
action_71 (164) = happyShift action_236
action_71 (165) = happyShift action_237
action_71 (166) = happyShift action_238
action_71 (233) = happyShift action_239
action_71 (234) = happyShift action_240
action_71 (69) = happyGoto action_231
action_71 (113) = happyGoto action_232
action_71 _ = happyReduce_219
action_72 (119) = happyShift action_40
action_72 (58) = happyGoto action_178
action_72 (59) = happyGoto action_179
action_72 _ = happyFail
action_73 _ = happyReduce_21
action_74 (121) = happyShift action_15
action_74 (22) = happyGoto action_228
action_74 (65) = happyGoto action_229
action_74 (69) = happyGoto action_230
action_74 _ = happyReduce_219
action_75 (115) = happyShift action_227
action_75 _ = happyFail
action_76 (122) = happyShift action_129
action_76 (66) = happyGoto action_226
action_76 _ = happyFail
action_77 (121) = happyShift action_15
action_77 (25) = happyGoto action_225
action_77 (26) = happyGoto action_220
action_77 (65) = happyGoto action_221
action_77 (93) = happyGoto action_222
action_77 (94) = happyGoto action_223
action_77 (95) = happyGoto action_224
action_77 _ = happyReduce_260
action_78 (121) = happyShift action_15
action_78 (25) = happyGoto action_219
action_78 (26) = happyGoto action_220
action_78 (65) = happyGoto action_221
action_78 (93) = happyGoto action_222
action_78 (94) = happyGoto action_223
action_78 (95) = happyGoto action_224
action_78 _ = happyReduce_260
action_79 (69) = happyGoto action_218
action_79 _ = happyReduce_219
action_80 (64) = happyGoto action_217
action_80 (69) = happyGoto action_136
action_80 _ = happyReduce_219
action_81 (69) = happyGoto action_215
action_81 (112) = happyGoto action_216
action_81 _ = happyReduce_219
action_82 (127) = happyShift action_126
action_82 _ = happyReduce_208
action_83 (70) = happyGoto action_214
action_83 _ = happyReduce_220
action_84 _ = happyReduce_168
action_85 (115) = happyShift action_213
action_85 _ = happyFail
action_86 _ = happyReduce_169
action_87 _ = happyReduce_142
action_88 _ = happyReduce_167
action_89 (121) = happyShift action_15
action_89 (123) = happyShift action_54
action_89 (124) = happyReduce_280
action_89 (125) = happyShift action_55
action_89 (48) = happyGoto action_207
action_89 (49) = happyGoto action_48
action_89 (55) = happyGoto action_208
action_89 (65) = happyGoto action_209
action_89 (67) = happyGoto action_51
action_89 (69) = happyGoto action_52
action_89 (105) = happyGoto action_210
action_89 (106) = happyGoto action_211
action_89 (107) = happyGoto action_212
action_89 (114) = happyGoto action_53
action_89 _ = happyReduce_219
action_90 (69) = happyGoto action_206
action_90 _ = happyReduce_219
action_91 (119) = happyShift action_40
action_91 (134) = happyShift action_180
action_91 (135) = happyShift action_181
action_91 (136) = happyShift action_182
action_91 (137) = happyShift action_183
action_91 (138) = happyShift action_184
action_91 (139) = happyShift action_185
action_91 (140) = happyShift action_186
action_91 (141) = happyShift action_187
action_91 (142) = happyShift action_188
action_91 (143) = happyShift action_189
action_91 (144) = happyShift action_190
action_91 (145) = happyShift action_191
action_91 (146) = happyShift action_192
action_91 (147) = happyShift action_193
action_91 (148) = happyShift action_194
action_91 (149) = happyShift action_195
action_91 (150) = happyShift action_196
action_91 (151) = happyShift action_197
action_91 (152) = happyShift action_198
action_91 (153) = happyShift action_199
action_91 (154) = happyShift action_200
action_91 (155) = happyShift action_201
action_91 (156) = happyShift action_202
action_91 (157) = happyShift action_203
action_91 (201) = happyShift action_204
action_91 (203) = happyShift action_205
action_91 (58) = happyGoto action_178
action_91 (59) = happyGoto action_179
action_91 _ = happyFail
action_92 (121) = happyShift action_15
action_92 (123) = happyShift action_54
action_92 (125) = happyShift action_55
action_92 (49) = happyGoto action_48
action_92 (55) = happyGoto action_177
action_92 (65) = happyGoto action_50
action_92 (67) = happyGoto action_51
action_92 (69) = happyGoto action_52
action_92 (114) = happyGoto action_53
action_92 _ = happyReduce_219
action_93 (122) = happyReduce_200
action_93 (56) = happyGoto action_174
action_93 (57) = happyGoto action_175
action_93 (69) = happyGoto action_176
action_93 _ = happyReduce_219
action_94 (121) = happyShift action_15
action_94 (49) = happyGoto action_140
action_94 (50) = happyGoto action_172
action_94 (65) = happyGoto action_173
action_94 (69) = happyGoto action_44
action_94 (114) = happyGoto action_53
action_94 _ = happyReduce_219
action_95 (69) = happyGoto action_171
action_95 _ = happyReduce_219
action_96 (69) = happyGoto action_170
action_96 _ = happyReduce_219
action_97 (69) = happyGoto action_169
action_97 _ = happyReduce_219
action_98 (69) = happyGoto action_168
action_98 _ = happyReduce_219
action_99 (69) = happyGoto action_167
action_99 _ = happyReduce_219
action_100 (69) = happyGoto action_166
action_100 _ = happyReduce_219
action_101 (69) = happyGoto action_165
action_101 _ = happyReduce_219
action_102 (69) = happyGoto action_164
action_102 _ = happyReduce_219
action_103 (69) = happyGoto action_163
action_103 _ = happyReduce_219
action_104 (69) = happyGoto action_162
action_104 _ = happyReduce_219
action_105 (69) = happyGoto action_161
action_105 _ = happyReduce_219
action_106 (69) = happyGoto action_160
action_106 _ = happyReduce_219
action_107 (69) = happyGoto action_159
action_107 _ = happyReduce_219
action_108 (69) = happyGoto action_158
action_108 _ = happyReduce_219
action_109 (69) = happyGoto action_157
action_109 _ = happyReduce_219
action_110 (69) = happyGoto action_156
action_110 _ = happyReduce_219
action_111 (69) = happyGoto action_155
action_111 _ = happyReduce_219
action_112 (69) = happyGoto action_154
action_112 _ = happyReduce_219
action_113 (69) = happyGoto action_153
action_113 _ = happyReduce_219
action_114 (69) = happyGoto action_152
action_114 _ = happyReduce_219
action_115 (69) = happyGoto action_151
action_115 _ = happyReduce_219
action_116 (69) = happyGoto action_150
action_116 _ = happyReduce_219
action_117 (69) = happyGoto action_149
action_117 _ = happyReduce_219
action_118 (121) = happyShift action_15
action_118 (4) = happyGoto action_148
action_118 (5) = happyGoto action_2
action_118 (6) = happyGoto action_3
action_118 (7) = happyGoto action_4
action_118 (16) = happyGoto action_5
action_118 (18) = happyGoto action_6
action_118 (20) = happyGoto action_7
action_118 (21) = happyGoto action_8
action_118 (23) = happyGoto action_9
action_118 (24) = happyGoto action_10
action_118 (30) = happyGoto action_11
action_118 (65) = happyGoto action_17
action_118 (76) = happyGoto action_13
action_118 (77) = happyGoto action_14
action_118 _ = happyReduce_3
action_119 _ = happyReduce_141
action_120 (121) = happyShift action_15
action_120 (65) = happyGoto action_147
action_120 _ = happyFail
action_121 (69) = happyGoto action_146
action_121 _ = happyReduce_219
action_122 (121) = happyShift action_15
action_122 (122) = happyReduce_265
action_122 (29) = happyGoto action_139
action_122 (49) = happyGoto action_140
action_122 (50) = happyGoto action_141
action_122 (65) = happyGoto action_142
action_122 (69) = happyGoto action_44
action_122 (96) = happyGoto action_143
action_122 (97) = happyGoto action_144
action_122 (98) = happyGoto action_145
action_122 (114) = happyGoto action_53
action_122 _ = happyReduce_219
action_123 (121) = happyShift action_15
action_123 (9) = happyGoto action_138
action_123 (13) = happyGoto action_70
action_123 (65) = happyGoto action_71
action_123 (69) = happyGoto action_72
action_123 (113) = happyGoto action_73
action_123 _ = happyReduce_219
action_124 (121) = happyShift action_15
action_124 (9) = happyGoto action_137
action_124 (13) = happyGoto action_70
action_124 (65) = happyGoto action_71
action_124 (69) = happyGoto action_72
action_124 (113) = happyGoto action_73
action_124 _ = happyReduce_219
action_125 (127) = happyShift action_126
action_125 (64) = happyGoto action_135
action_125 (69) = happyGoto action_136
action_125 _ = happyReduce_219
action_126 (115) = happyShift action_131
action_126 (117) = happyShift action_132
action_126 (119) = happyShift action_133
action_126 (166) = happyShift action_134
action_126 (62) = happyGoto action_130
action_126 _ = happyFail
action_127 (122) = happyShift action_129
action_127 (127) = happyShift action_126
action_127 (66) = happyGoto action_128
action_127 _ = happyFail
action_128 _ = happyReduce_230
action_129 _ = happyReduce_216
action_130 _ = happyReduce_204
action_131 _ = happyReduce_205
action_132 _ = happyReduce_206
action_133 _ = happyReduce_211
action_134 _ = happyReduce_212
action_135 (64) = happyGoto action_361
action_135 (69) = happyGoto action_136
action_135 _ = happyReduce_219
action_136 (115) = happyShift action_360
action_136 _ = happyFail
action_137 (69) = happyGoto action_359
action_137 _ = happyReduce_219
action_138 (69) = happyGoto action_358
action_138 _ = happyReduce_219
action_139 _ = happyReduce_261
action_140 _ = happyReduce_135
action_141 _ = happyReduce_68
action_142 (123) = happyShift action_54
action_142 (129) = happyShift action_21
action_142 (202) = happyShift action_94
action_142 (67) = happyGoto action_90
action_142 (69) = happyGoto action_357
action_142 (71) = happyGoto action_325
action_142 _ = happyReduce_219
action_143 (121) = happyShift action_15
action_143 (122) = happyReduce_263
action_143 (29) = happyGoto action_356
action_143 (49) = happyGoto action_140
action_143 (50) = happyGoto action_141
action_143 (65) = happyGoto action_142
action_143 (69) = happyGoto action_44
action_143 (114) = happyGoto action_53
action_143 _ = happyReduce_219
action_144 _ = happyReduce_264
action_145 (122) = happyShift action_129
action_145 (66) = happyGoto action_355
action_145 _ = happyFail
action_146 (121) = happyShift action_15
action_146 (49) = happyGoto action_140
action_146 (50) = happyGoto action_353
action_146 (51) = happyGoto action_354
action_146 (65) = happyGoto action_142
action_146 (69) = happyGoto action_44
action_146 (114) = happyGoto action_53
action_146 _ = happyReduce_219
action_147 (167) = happyShift action_352
action_147 _ = happyFail
action_148 (122) = happyShift action_129
action_148 (66) = happyGoto action_351
action_148 _ = happyFail
action_149 (121) = happyShift action_15
action_149 (9) = happyGoto action_350
action_149 (13) = happyGoto action_70
action_149 (65) = happyGoto action_71
action_149 (69) = happyGoto action_72
action_149 (113) = happyGoto action_73
action_149 _ = happyReduce_219
action_150 (121) = happyShift action_15
action_150 (9) = happyGoto action_349
action_150 (13) = happyGoto action_70
action_150 (65) = happyGoto action_71
action_150 (69) = happyGoto action_72
action_150 (113) = happyGoto action_73
action_150 _ = happyReduce_219
action_151 (121) = happyShift action_15
action_151 (123) = happyShift action_54
action_151 (125) = happyShift action_55
action_151 (49) = happyGoto action_48
action_151 (55) = happyGoto action_348
action_151 (65) = happyGoto action_50
action_151 (67) = happyGoto action_51
action_151 (69) = happyGoto action_52
action_151 (114) = happyGoto action_53
action_151 _ = happyReduce_219
action_152 (121) = happyShift action_15
action_152 (123) = happyShift action_54
action_152 (125) = happyShift action_55
action_152 (49) = happyGoto action_48
action_152 (55) = happyGoto action_347
action_152 (65) = happyGoto action_50
action_152 (67) = happyGoto action_51
action_152 (69) = happyGoto action_52
action_152 (114) = happyGoto action_53
action_152 _ = happyReduce_219
action_153 (121) = happyShift action_15
action_153 (123) = happyShift action_54
action_153 (125) = happyShift action_55
action_153 (49) = happyGoto action_48
action_153 (55) = happyGoto action_346
action_153 (65) = happyGoto action_50
action_153 (67) = happyGoto action_51
action_153 (69) = happyGoto action_52
action_153 (114) = happyGoto action_53
action_153 _ = happyReduce_219
action_154 (121) = happyShift action_15
action_154 (123) = happyShift action_54
action_154 (125) = happyShift action_55
action_154 (49) = happyGoto action_48
action_154 (55) = happyGoto action_345
action_154 (65) = happyGoto action_50
action_154 (67) = happyGoto action_51
action_154 (69) = happyGoto action_52
action_154 (114) = happyGoto action_53
action_154 _ = happyReduce_219
action_155 (121) = happyShift action_15
action_155 (123) = happyShift action_54
action_155 (125) = happyShift action_55
action_155 (49) = happyGoto action_48
action_155 (55) = happyGoto action_344
action_155 (65) = happyGoto action_50
action_155 (67) = happyGoto action_51
action_155 (69) = happyGoto action_52
action_155 (114) = happyGoto action_53
action_155 _ = happyReduce_219
action_156 (121) = happyShift action_15
action_156 (123) = happyShift action_54
action_156 (125) = happyShift action_55
action_156 (49) = happyGoto action_48
action_156 (55) = happyGoto action_343
action_156 (65) = happyGoto action_50
action_156 (67) = happyGoto action_51
action_156 (69) = happyGoto action_52
action_156 (114) = happyGoto action_53
action_156 _ = happyReduce_219
action_157 (121) = happyShift action_15
action_157 (9) = happyGoto action_342
action_157 (13) = happyGoto action_70
action_157 (65) = happyGoto action_71
action_157 (69) = happyGoto action_72
action_157 (113) = happyGoto action_73
action_157 _ = happyReduce_219
action_158 (121) = happyShift action_15
action_158 (9) = happyGoto action_341
action_158 (13) = happyGoto action_70
action_158 (65) = happyGoto action_71
action_158 (69) = happyGoto action_72
action_158 (113) = happyGoto action_73
action_158 _ = happyReduce_219
action_159 (121) = happyShift action_15
action_159 (9) = happyGoto action_340
action_159 (13) = happyGoto action_70
action_159 (65) = happyGoto action_71
action_159 (69) = happyGoto action_72
action_159 (113) = happyGoto action_73
action_159 _ = happyReduce_219
action_160 (121) = happyShift action_15
action_160 (9) = happyGoto action_339
action_160 (13) = happyGoto action_70
action_160 (65) = happyGoto action_71
action_160 (69) = happyGoto action_72
action_160 (113) = happyGoto action_73
action_160 _ = happyReduce_219
action_161 (121) = happyShift action_15
action_161 (9) = happyGoto action_338
action_161 (13) = happyGoto action_70
action_161 (65) = happyGoto action_71
action_161 (69) = happyGoto action_72
action_161 (113) = happyGoto action_73
action_161 _ = happyReduce_219
action_162 (121) = happyShift action_15
action_162 (9) = happyGoto action_337
action_162 (13) = happyGoto action_70
action_162 (65) = happyGoto action_71
action_162 (69) = happyGoto action_72
action_162 (113) = happyGoto action_73
action_162 _ = happyReduce_219
action_163 (121) = happyShift action_15
action_163 (9) = happyGoto action_336
action_163 (13) = happyGoto action_70
action_163 (65) = happyGoto action_71
action_163 (69) = happyGoto action_72
action_163 (113) = happyGoto action_73
action_163 _ = happyReduce_219
action_164 (121) = happyShift action_15
action_164 (9) = happyGoto action_335
action_164 (13) = happyGoto action_70
action_164 (65) = happyGoto action_71
action_164 (69) = happyGoto action_72
action_164 (113) = happyGoto action_73
action_164 _ = happyReduce_219
action_165 (121) = happyShift action_15
action_165 (9) = happyGoto action_334
action_165 (13) = happyGoto action_70
action_165 (65) = happyGoto action_71
action_165 (69) = happyGoto action_72
action_165 (113) = happyGoto action_73
action_165 _ = happyReduce_219
action_166 (121) = happyShift action_15
action_166 (49) = happyGoto action_140
action_166 (50) = happyGoto action_327
action_166 (53) = happyGoto action_333
action_166 (65) = happyGoto action_142
action_166 (69) = happyGoto action_44
action_166 (114) = happyGoto action_53
action_166 _ = happyReduce_219
action_167 (121) = happyShift action_15
action_167 (49) = happyGoto action_140
action_167 (50) = happyGoto action_327
action_167 (53) = happyGoto action_332
action_167 (65) = happyGoto action_142
action_167 (69) = happyGoto action_44
action_167 (114) = happyGoto action_53
action_167 _ = happyReduce_219
action_168 (121) = happyShift action_15
action_168 (49) = happyGoto action_140
action_168 (50) = happyGoto action_327
action_168 (53) = happyGoto action_331
action_168 (65) = happyGoto action_142
action_168 (69) = happyGoto action_44
action_168 (114) = happyGoto action_53
action_168 _ = happyReduce_219
action_169 (121) = happyShift action_15
action_169 (49) = happyGoto action_140
action_169 (50) = happyGoto action_327
action_169 (53) = happyGoto action_330
action_169 (65) = happyGoto action_142
action_169 (69) = happyGoto action_44
action_169 (114) = happyGoto action_53
action_169 _ = happyReduce_219
action_170 (121) = happyShift action_15
action_170 (49) = happyGoto action_140
action_170 (50) = happyGoto action_327
action_170 (53) = happyGoto action_329
action_170 (65) = happyGoto action_142
action_170 (69) = happyGoto action_44
action_170 (114) = happyGoto action_53
action_170 _ = happyReduce_219
action_171 (121) = happyShift action_15
action_171 (49) = happyGoto action_140
action_171 (50) = happyGoto action_327
action_171 (53) = happyGoto action_328
action_171 (65) = happyGoto action_142
action_171 (69) = happyGoto action_44
action_171 (114) = happyGoto action_53
action_171 _ = happyReduce_219
action_172 (69) = happyGoto action_326
action_172 _ = happyReduce_219
action_173 (121) = happyShift action_15
action_173 (123) = happyShift action_54
action_173 (129) = happyShift action_21
action_173 (202) = happyShift action_94
action_173 (49) = happyGoto action_140
action_173 (50) = happyGoto action_323
action_173 (65) = happyGoto action_142
action_173 (67) = happyGoto action_90
action_173 (69) = happyGoto action_324
action_173 (71) = happyGoto action_325
action_173 (114) = happyGoto action_53
action_173 _ = happyReduce_219
action_174 (69) = happyGoto action_322
action_174 _ = happyReduce_219
action_175 (122) = happyReduce_199
action_175 (69) = happyGoto action_321
action_175 _ = happyReduce_219
action_176 (121) = happyShift action_15
action_176 (123) = happyShift action_54
action_176 (125) = happyShift action_55
action_176 (49) = happyGoto action_48
action_176 (55) = happyGoto action_320
action_176 (65) = happyGoto action_50
action_176 (67) = happyGoto action_51
action_176 (69) = happyGoto action_52
action_176 (114) = happyGoto action_53
action_176 _ = happyReduce_219
action_177 (72) = happyGoto action_319
action_177 _ = happyReduce_222
action_178 (127) = happyShift action_126
action_178 _ = happyReduce_207
action_179 (70) = happyGoto action_318
action_179 _ = happyReduce_220
action_180 (121) = happyShift action_15
action_180 (123) = happyShift action_54
action_180 (125) = happyShift action_55
action_180 (49) = happyGoto action_48
action_180 (55) = happyGoto action_317
action_180 (65) = happyGoto action_50
action_180 (67) = happyGoto action_51
action_180 (69) = happyGoto action_52
action_180 (114) = happyGoto action_53
action_180 _ = happyReduce_219
action_181 (121) = happyShift action_15
action_181 (123) = happyShift action_54
action_181 (125) = happyShift action_55
action_181 (49) = happyGoto action_48
action_181 (55) = happyGoto action_316
action_181 (65) = happyGoto action_50
action_181 (67) = happyGoto action_51
action_181 (69) = happyGoto action_52
action_181 (114) = happyGoto action_53
action_181 _ = happyReduce_219
action_182 (121) = happyShift action_15
action_182 (123) = happyShift action_54
action_182 (125) = happyShift action_55
action_182 (49) = happyGoto action_48
action_182 (55) = happyGoto action_315
action_182 (65) = happyGoto action_50
action_182 (67) = happyGoto action_51
action_182 (69) = happyGoto action_52
action_182 (114) = happyGoto action_53
action_182 _ = happyReduce_219
action_183 (121) = happyShift action_15
action_183 (123) = happyShift action_54
action_183 (125) = happyShift action_55
action_183 (49) = happyGoto action_48
action_183 (55) = happyGoto action_314
action_183 (65) = happyGoto action_50
action_183 (67) = happyGoto action_51
action_183 (69) = happyGoto action_52
action_183 (114) = happyGoto action_53
action_183 _ = happyReduce_219
action_184 (121) = happyShift action_15
action_184 (123) = happyShift action_54
action_184 (125) = happyShift action_55
action_184 (49) = happyGoto action_48
action_184 (55) = happyGoto action_313
action_184 (65) = happyGoto action_50
action_184 (67) = happyGoto action_51
action_184 (69) = happyGoto action_52
action_184 (114) = happyGoto action_53
action_184 _ = happyReduce_219
action_185 (121) = happyShift action_15
action_185 (123) = happyShift action_54
action_185 (125) = happyShift action_55
action_185 (49) = happyGoto action_48
action_185 (55) = happyGoto action_312
action_185 (65) = happyGoto action_50
action_185 (67) = happyGoto action_51
action_185 (69) = happyGoto action_52
action_185 (114) = happyGoto action_53
action_185 _ = happyReduce_219
action_186 (121) = happyShift action_15
action_186 (123) = happyShift action_54
action_186 (125) = happyShift action_55
action_186 (49) = happyGoto action_48
action_186 (55) = happyGoto action_311
action_186 (65) = happyGoto action_50
action_186 (67) = happyGoto action_51
action_186 (69) = happyGoto action_52
action_186 (114) = happyGoto action_53
action_186 _ = happyReduce_219
action_187 (121) = happyShift action_15
action_187 (9) = happyGoto action_310
action_187 (13) = happyGoto action_70
action_187 (65) = happyGoto action_71
action_187 (69) = happyGoto action_72
action_187 (113) = happyGoto action_73
action_187 _ = happyReduce_219
action_188 (121) = happyShift action_15
action_188 (9) = happyGoto action_309
action_188 (13) = happyGoto action_70
action_188 (65) = happyGoto action_71
action_188 (69) = happyGoto action_72
action_188 (113) = happyGoto action_73
action_188 _ = happyReduce_219
action_189 (121) = happyShift action_15
action_189 (9) = happyGoto action_308
action_189 (13) = happyGoto action_70
action_189 (65) = happyGoto action_71
action_189 (69) = happyGoto action_72
action_189 (113) = happyGoto action_73
action_189 _ = happyReduce_219
action_190 (121) = happyShift action_15
action_190 (9) = happyGoto action_307
action_190 (13) = happyGoto action_70
action_190 (65) = happyGoto action_71
action_190 (69) = happyGoto action_72
action_190 (113) = happyGoto action_73
action_190 _ = happyReduce_219
action_191 (121) = happyShift action_15
action_191 (9) = happyGoto action_306
action_191 (13) = happyGoto action_70
action_191 (65) = happyGoto action_71
action_191 (69) = happyGoto action_72
action_191 (113) = happyGoto action_73
action_191 _ = happyReduce_219
action_192 (121) = happyShift action_15
action_192 (9) = happyGoto action_305
action_192 (13) = happyGoto action_70
action_192 (65) = happyGoto action_71
action_192 (69) = happyGoto action_72
action_192 (113) = happyGoto action_73
action_192 _ = happyReduce_219
action_193 (121) = happyShift action_15
action_193 (123) = happyShift action_54
action_193 (125) = happyShift action_55
action_193 (49) = happyGoto action_48
action_193 (55) = happyGoto action_304
action_193 (65) = happyGoto action_50
action_193 (67) = happyGoto action_51
action_193 (69) = happyGoto action_52
action_193 (114) = happyGoto action_53
action_193 _ = happyReduce_219
action_194 (121) = happyShift action_15
action_194 (123) = happyShift action_54
action_194 (125) = happyShift action_55
action_194 (49) = happyGoto action_48
action_194 (55) = happyGoto action_303
action_194 (65) = happyGoto action_50
action_194 (67) = happyGoto action_51
action_194 (69) = happyGoto action_52
action_194 (114) = happyGoto action_53
action_194 _ = happyReduce_219
action_195 (121) = happyShift action_15
action_195 (123) = happyShift action_54
action_195 (125) = happyShift action_55
action_195 (49) = happyGoto action_48
action_195 (55) = happyGoto action_302
action_195 (65) = happyGoto action_50
action_195 (67) = happyGoto action_51
action_195 (69) = happyGoto action_52
action_195 (114) = happyGoto action_53
action_195 _ = happyReduce_219
action_196 (121) = happyShift action_15
action_196 (123) = happyShift action_54
action_196 (125) = happyShift action_55
action_196 (49) = happyGoto action_48
action_196 (55) = happyGoto action_301
action_196 (65) = happyGoto action_50
action_196 (67) = happyGoto action_51
action_196 (69) = happyGoto action_52
action_196 (114) = happyGoto action_53
action_196 _ = happyReduce_219
action_197 (121) = happyShift action_15
action_197 (123) = happyShift action_54
action_197 (125) = happyShift action_55
action_197 (49) = happyGoto action_48
action_197 (55) = happyGoto action_300
action_197 (65) = happyGoto action_50
action_197 (67) = happyGoto action_51
action_197 (69) = happyGoto action_52
action_197 (114) = happyGoto action_53
action_197 _ = happyReduce_219
action_198 (121) = happyShift action_15
action_198 (123) = happyShift action_54
action_198 (125) = happyShift action_55
action_198 (49) = happyGoto action_48
action_198 (55) = happyGoto action_299
action_198 (65) = happyGoto action_50
action_198 (67) = happyGoto action_51
action_198 (69) = happyGoto action_52
action_198 (114) = happyGoto action_53
action_198 _ = happyReduce_219
action_199 (121) = happyShift action_15
action_199 (9) = happyGoto action_298
action_199 (13) = happyGoto action_70
action_199 (65) = happyGoto action_71
action_199 (69) = happyGoto action_72
action_199 (113) = happyGoto action_73
action_199 _ = happyReduce_219
action_200 (121) = happyShift action_15
action_200 (9) = happyGoto action_297
action_200 (13) = happyGoto action_70
action_200 (65) = happyGoto action_71
action_200 (69) = happyGoto action_72
action_200 (113) = happyGoto action_73
action_200 _ = happyReduce_219
action_201 (121) = happyShift action_15
action_201 (123) = happyShift action_54
action_201 (125) = happyShift action_55
action_201 (49) = happyGoto action_48
action_201 (55) = happyGoto action_296
action_201 (65) = happyGoto action_50
action_201 (67) = happyGoto action_51
action_201 (69) = happyGoto action_52
action_201 (114) = happyGoto action_53
action_201 _ = happyReduce_219
action_202 (121) = happyShift action_15
action_202 (123) = happyShift action_54
action_202 (125) = happyShift action_55
action_202 (49) = happyGoto action_48
action_202 (55) = happyGoto action_295
action_202 (65) = happyGoto action_50
action_202 (67) = happyGoto action_51
action_202 (69) = happyGoto action_52
action_202 (114) = happyGoto action_53
action_202 _ = happyReduce_219
action_203 (121) = happyShift action_15
action_203 (123) = happyShift action_54
action_203 (125) = happyShift action_55
action_203 (49) = happyGoto action_48
action_203 (55) = happyGoto action_294
action_203 (65) = happyGoto action_50
action_203 (67) = happyGoto action_51
action_203 (69) = happyGoto action_52
action_203 (114) = happyGoto action_53
action_203 _ = happyReduce_219
action_204 (121) = happyShift action_15
action_204 (49) = happyGoto action_140
action_204 (50) = happyGoto action_293
action_204 (65) = happyGoto action_142
action_204 (69) = happyGoto action_44
action_204 (114) = happyGoto action_53
action_204 _ = happyReduce_219
action_205 (121) = happyShift action_15
action_205 (49) = happyGoto action_140
action_205 (50) = happyGoto action_292
action_205 (65) = happyGoto action_142
action_205 (69) = happyGoto action_44
action_205 (114) = happyGoto action_53
action_205 _ = happyReduce_219
action_206 (121) = happyShift action_15
action_206 (123) = happyShift action_54
action_206 (124) = happyReduce_280
action_206 (125) = happyShift action_55
action_206 (48) = happyGoto action_207
action_206 (49) = happyGoto action_48
action_206 (55) = happyGoto action_208
action_206 (65) = happyGoto action_209
action_206 (67) = happyGoto action_51
action_206 (69) = happyGoto action_52
action_206 (105) = happyGoto action_210
action_206 (106) = happyGoto action_211
action_206 (107) = happyGoto action_291
action_206 (114) = happyGoto action_53
action_206 _ = happyReduce_219
action_207 _ = happyReduce_276
action_208 (69) = happyGoto action_290
action_208 _ = happyReduce_219
action_209 (123) = happyShift action_54
action_209 (129) = happyShift action_21
action_209 (202) = happyShift action_94
action_209 (205) = happyShift action_95
action_209 (206) = happyShift action_96
action_209 (207) = happyShift action_97
action_209 (209) = happyShift action_98
action_209 (210) = happyShift action_99
action_209 (213) = happyShift action_100
action_209 (216) = happyShift action_101
action_209 (217) = happyShift action_102
action_209 (218) = happyShift action_103
action_209 (219) = happyShift action_104
action_209 (220) = happyShift action_105
action_209 (223) = happyShift action_106
action_209 (224) = happyShift action_107
action_209 (225) = happyShift action_108
action_209 (226) = happyShift action_109
action_209 (229) = happyShift action_110
action_209 (230) = happyShift action_111
action_209 (231) = happyShift action_112
action_209 (232) = happyShift action_113
action_209 (235) = happyShift action_114
action_209 (236) = happyShift action_115
action_209 (237) = happyShift action_116
action_209 (238) = happyShift action_117
action_209 (67) = happyGoto action_90
action_209 (69) = happyGoto action_289
action_209 (71) = happyGoto action_92
action_209 (113) = happyGoto action_93
action_209 _ = happyReduce_219
action_210 (121) = happyShift action_15
action_210 (123) = happyShift action_54
action_210 (124) = happyReduce_278
action_210 (125) = happyShift action_55
action_210 (48) = happyGoto action_288
action_210 (49) = happyGoto action_48
action_210 (55) = happyGoto action_208
action_210 (65) = happyGoto action_209
action_210 (67) = happyGoto action_51
action_210 (69) = happyGoto action_52
action_210 (114) = happyGoto action_53
action_210 _ = happyReduce_219
action_211 _ = happyReduce_279
action_212 (124) = happyShift action_287
action_212 (68) = happyGoto action_286
action_212 _ = happyFail
action_213 _ = happyReduce_213
action_214 _ = happyReduce_289
action_215 (119) = happyShift action_265
action_215 (166) = happyShift action_266
action_215 (61) = happyGoto action_285
action_215 _ = happyFail
action_216 (126) = happyShift action_284
action_216 _ = happyFail
action_217 (70) = happyGoto action_283
action_217 _ = happyReduce_220
action_218 (121) = happyShift action_15
action_218 (123) = happyShift action_54
action_218 (125) = happyShift action_55
action_218 (49) = happyGoto action_48
action_218 (55) = happyGoto action_282
action_218 (65) = happyGoto action_50
action_218 (67) = happyGoto action_51
action_218 (69) = happyGoto action_52
action_218 (114) = happyGoto action_53
action_218 _ = happyReduce_219
action_219 (122) = happyShift action_129
action_219 (66) = happyGoto action_281
action_219 _ = happyFail
action_220 _ = happyReduce_256
action_221 (129) = happyShift action_21
action_221 (69) = happyGoto action_215
action_221 (71) = happyGoto action_279
action_221 (112) = happyGoto action_280
action_221 _ = happyReduce_219
action_222 (121) = happyShift action_15
action_222 (26) = happyGoto action_278
action_222 (65) = happyGoto action_221
action_222 _ = happyReduce_258
action_223 _ = happyReduce_259
action_224 _ = happyReduce_56
action_225 (122) = happyShift action_129
action_225 (66) = happyGoto action_277
action_225 _ = happyFail
action_226 _ = happyReduce_18
action_227 _ = happyReduce_221
action_228 (122) = happyShift action_129
action_228 (66) = happyGoto action_276
action_228 _ = happyFail
action_229 (119) = happyShift action_40
action_229 (129) = happyShift action_21
action_229 (58) = happyGoto action_178
action_229 (59) = happyGoto action_274
action_229 (71) = happyGoto action_275
action_229 _ = happyFail
action_230 _ = happyReduce_51
action_231 (119) = happyShift action_40
action_231 (167) = happyShift action_273
action_231 (58) = happyGoto action_178
action_231 (59) = happyGoto action_179
action_231 _ = happyFail
action_232 (121) = happyShift action_15
action_232 (13) = happyGoto action_267
action_232 (14) = happyGoto action_268
action_232 (65) = happyGoto action_269
action_232 (69) = happyGoto action_72
action_232 (87) = happyGoto action_270
action_232 (88) = happyGoto action_271
action_232 (113) = happyGoto action_272
action_232 _ = happyReduce_219
action_233 (118) = happyShift action_264
action_233 (119) = happyShift action_265
action_233 (166) = happyShift action_266
action_233 (8) = happyGoto action_259
action_233 (61) = happyGoto action_260
action_233 (78) = happyGoto action_261
action_233 (79) = happyGoto action_262
action_233 (80) = happyGoto action_263
action_233 _ = happyReduce_235
action_234 (121) = happyShift action_15
action_234 (65) = happyGoto action_258
action_234 _ = happyFail
action_235 (121) = happyShift action_15
action_235 (11) = happyGoto action_253
action_235 (65) = happyGoto action_254
action_235 (84) = happyGoto action_255
action_235 (85) = happyGoto action_256
action_235 (86) = happyGoto action_257
action_235 _ = happyReduce_245
action_236 (121) = happyShift action_15
action_236 (13) = happyGoto action_251
action_236 (65) = happyGoto action_252
action_236 _ = happyFail
action_237 (121) = happyShift action_15
action_237 (9) = happyGoto action_250
action_237 (13) = happyGoto action_70
action_237 (65) = happyGoto action_71
action_237 (69) = happyGoto action_72
action_237 (113) = happyGoto action_73
action_237 _ = happyReduce_219
action_238 (69) = happyGoto action_249
action_238 _ = happyReduce_219
action_239 (69) = happyGoto action_248
action_239 _ = happyReduce_219
action_240 (69) = happyGoto action_247
action_240 _ = happyReduce_219
action_241 (122) = happyShift action_129
action_241 (66) = happyGoto action_246
action_241 _ = happyFail
action_242 (121) = happyShift action_15
action_242 (122) = happyReduce_46
action_242 (123) = happyShift action_54
action_242 (125) = happyShift action_55
action_242 (49) = happyGoto action_48
action_242 (55) = happyGoto action_245
action_242 (65) = happyGoto action_50
action_242 (67) = happyGoto action_51
action_242 (69) = happyGoto action_52
action_242 (114) = happyGoto action_53
action_242 _ = happyReduce_219
action_243 _ = happyReduce_2
action_244 _ = happyReduce_15
action_245 _ = happyReduce_45
action_246 _ = happyReduce_44
action_247 (121) = happyShift action_15
action_247 (123) = happyShift action_54
action_247 (125) = happyShift action_55
action_247 (49) = happyGoto action_48
action_247 (55) = happyGoto action_466
action_247 (65) = happyGoto action_50
action_247 (67) = happyGoto action_51
action_247 (69) = happyGoto action_52
action_247 (114) = happyGoto action_53
action_247 _ = happyReduce_219
action_248 (121) = happyShift action_15
action_248 (123) = happyShift action_54
action_248 (125) = happyShift action_55
action_248 (49) = happyGoto action_48
action_248 (55) = happyGoto action_465
action_248 (65) = happyGoto action_50
action_248 (67) = happyGoto action_51
action_248 (69) = happyGoto action_52
action_248 (114) = happyGoto action_53
action_248 _ = happyReduce_219
action_249 (119) = happyShift action_40
action_249 (58) = happyGoto action_178
action_249 (59) = happyGoto action_464
action_249 _ = happyFail
action_250 (122) = happyShift action_129
action_250 (66) = happyGoto action_463
action_250 _ = happyFail
action_251 (119) = happyShift action_265
action_251 (166) = happyShift action_266
action_251 (61) = happyGoto action_462
action_251 _ = happyFail
action_252 (233) = happyShift action_239
action_252 (234) = happyShift action_240
action_252 (69) = happyGoto action_461
action_252 _ = happyReduce_219
action_253 _ = happyReduce_241
action_254 (69) = happyGoto action_460
action_254 _ = happyReduce_219
action_255 (121) = happyShift action_15
action_255 (11) = happyGoto action_459
action_255 (65) = happyGoto action_254
action_255 _ = happyReduce_243
action_256 _ = happyReduce_244
action_257 (122) = happyShift action_129
action_257 (66) = happyGoto action_458
action_257 _ = happyFail
action_258 (121) = happyShift action_15
action_258 (123) = happyShift action_54
action_258 (13) = happyGoto action_267
action_258 (14) = happyGoto action_453
action_258 (15) = happyGoto action_454
action_258 (65) = happyGoto action_269
action_258 (67) = happyGoto action_455
action_258 (69) = happyGoto action_72
action_258 (90) = happyGoto action_456
action_258 (91) = happyGoto action_457
action_258 (113) = happyGoto action_272
action_258 _ = happyReduce_219
action_259 _ = happyReduce_231
action_260 _ = happyReduce_20
action_261 (118) = happyShift action_264
action_261 (119) = happyShift action_265
action_261 (166) = happyShift action_266
action_261 (8) = happyGoto action_452
action_261 (61) = happyGoto action_260
action_261 _ = happyReduce_233
action_262 _ = happyReduce_234
action_263 (122) = happyShift action_129
action_263 (66) = happyGoto action_451
action_263 _ = happyFail
action_264 _ = happyReduce_19
action_265 _ = happyReduce_209
action_266 _ = happyReduce_210
action_267 _ = happyReduce_37
action_268 _ = happyReduce_246
action_269 (233) = happyShift action_239
action_269 (234) = happyShift action_240
action_269 (69) = happyGoto action_231
action_269 (113) = happyGoto action_450
action_269 _ = happyReduce_219
action_270 (121) = happyShift action_15
action_270 (122) = happyReduce_248
action_270 (13) = happyGoto action_267
action_270 (14) = happyGoto action_449
action_270 (65) = happyGoto action_269
action_270 (69) = happyGoto action_72
action_270 (113) = happyGoto action_272
action_270 _ = happyReduce_219
action_271 (122) = happyShift action_129
action_271 (66) = happyGoto action_448
action_271 _ = happyFail
action_272 _ = happyReduce_38
action_273 (121) = happyShift action_15
action_273 (123) = happyShift action_54
action_273 (125) = happyShift action_55
action_273 (49) = happyGoto action_48
action_273 (55) = happyGoto action_447
action_273 (65) = happyGoto action_50
action_273 (67) = happyGoto action_51
action_273 (69) = happyGoto action_52
action_273 (114) = happyGoto action_53
action_273 _ = happyReduce_219
action_274 (69) = happyGoto action_446
action_274 _ = happyReduce_219
action_275 (121) = happyShift action_15
action_275 (22) = happyGoto action_445
action_275 (65) = happyGoto action_229
action_275 (69) = happyGoto action_230
action_275 _ = happyReduce_219
action_276 _ = happyReduce_48
action_277 (121) = happyShift action_15
action_277 (9) = happyGoto action_444
action_277 (13) = happyGoto action_70
action_277 (65) = happyGoto action_71
action_277 (69) = happyGoto action_72
action_277 (113) = happyGoto action_73
action_277 _ = happyReduce_219
action_278 _ = happyReduce_257
action_279 (121) = happyShift action_15
action_279 (26) = happyGoto action_443
action_279 (65) = happyGoto action_221
action_279 _ = happyFail
action_280 (128) = happyShift action_442
action_280 _ = happyFail
action_281 (121) = happyShift action_15
action_281 (31) = happyGoto action_437
action_281 (32) = happyGoto action_438
action_281 (44) = happyGoto action_439
action_281 (45) = happyGoto action_440
action_281 (65) = happyGoto action_441
action_281 _ = happyReduce_119
action_282 (122) = happyShift action_129
action_282 (66) = happyGoto action_436
action_282 _ = happyFail
action_283 _ = happyReduce_286
action_284 _ = happyReduce_170
action_285 (70) = happyGoto action_435
action_285 _ = happyReduce_220
action_286 _ = happyReduce_143
action_287 _ = happyReduce_218
action_288 _ = happyReduce_277
action_289 (119) = happyShift action_40
action_289 (130) = happyShift action_431
action_289 (131) = happyShift action_432
action_289 (132) = happyShift action_433
action_289 (133) = happyShift action_434
action_289 (134) = happyShift action_180
action_289 (135) = happyShift action_181
action_289 (136) = happyShift action_182
action_289 (137) = happyShift action_183
action_289 (138) = happyShift action_184
action_289 (139) = happyShift action_185
action_289 (140) = happyShift action_186
action_289 (141) = happyShift action_187
action_289 (142) = happyShift action_188
action_289 (143) = happyShift action_189
action_289 (144) = happyShift action_190
action_289 (145) = happyShift action_191
action_289 (146) = happyShift action_192
action_289 (147) = happyShift action_193
action_289 (148) = happyShift action_194
action_289 (149) = happyShift action_195
action_289 (150) = happyShift action_196
action_289 (151) = happyShift action_197
action_289 (152) = happyShift action_198
action_289 (153) = happyShift action_199
action_289 (154) = happyShift action_200
action_289 (155) = happyShift action_201
action_289 (156) = happyShift action_202
action_289 (157) = happyShift action_203
action_289 (201) = happyShift action_204
action_289 (203) = happyShift action_205
action_289 (58) = happyGoto action_178
action_289 (59) = happyGoto action_179
action_289 _ = happyFail
action_290 _ = happyReduce_124
action_291 (124) = happyShift action_287
action_291 (68) = happyGoto action_430
action_291 _ = happyFail
action_292 (121) = happyShift action_15
action_292 (13) = happyGoto action_267
action_292 (14) = happyGoto action_429
action_292 (65) = happyGoto action_269
action_292 (69) = happyGoto action_72
action_292 (113) = happyGoto action_272
action_292 _ = happyReduce_219
action_293 (122) = happyReduce_200
action_293 (56) = happyGoto action_428
action_293 (57) = happyGoto action_175
action_293 (69) = happyGoto action_176
action_293 _ = happyReduce_219
action_294 (121) = happyShift action_15
action_294 (123) = happyShift action_54
action_294 (125) = happyShift action_55
action_294 (49) = happyGoto action_48
action_294 (55) = happyGoto action_427
action_294 (65) = happyGoto action_50
action_294 (67) = happyGoto action_51
action_294 (69) = happyGoto action_52
action_294 (114) = happyGoto action_53
action_294 _ = happyReduce_219
action_295 (121) = happyShift action_15
action_295 (123) = happyShift action_54
action_295 (125) = happyShift action_55
action_295 (49) = happyGoto action_48
action_295 (55) = happyGoto action_426
action_295 (65) = happyGoto action_50
action_295 (67) = happyGoto action_51
action_295 (69) = happyGoto action_52
action_295 (114) = happyGoto action_53
action_295 _ = happyReduce_219
action_296 (121) = happyShift action_15
action_296 (123) = happyShift action_54
action_296 (125) = happyShift action_55
action_296 (49) = happyGoto action_48
action_296 (55) = happyGoto action_425
action_296 (65) = happyGoto action_50
action_296 (67) = happyGoto action_51
action_296 (69) = happyGoto action_52
action_296 (114) = happyGoto action_53
action_296 _ = happyReduce_219
action_297 (121) = happyShift action_15
action_297 (9) = happyGoto action_424
action_297 (13) = happyGoto action_70
action_297 (65) = happyGoto action_71
action_297 (69) = happyGoto action_72
action_297 (113) = happyGoto action_73
action_297 _ = happyReduce_219
action_298 (121) = happyShift action_15
action_298 (9) = happyGoto action_423
action_298 (13) = happyGoto action_70
action_298 (65) = happyGoto action_71
action_298 (69) = happyGoto action_72
action_298 (113) = happyGoto action_73
action_298 _ = happyReduce_219
action_299 (121) = happyShift action_15
action_299 (123) = happyShift action_54
action_299 (125) = happyShift action_55
action_299 (49) = happyGoto action_48
action_299 (55) = happyGoto action_422
action_299 (65) = happyGoto action_50
action_299 (67) = happyGoto action_51
action_299 (69) = happyGoto action_52
action_299 (114) = happyGoto action_53
action_299 _ = happyReduce_219
action_300 (121) = happyShift action_15
action_300 (123) = happyShift action_54
action_300 (125) = happyShift action_55
action_300 (49) = happyGoto action_48
action_300 (55) = happyGoto action_421
action_300 (65) = happyGoto action_50
action_300 (67) = happyGoto action_51
action_300 (69) = happyGoto action_52
action_300 (114) = happyGoto action_53
action_300 _ = happyReduce_219
action_301 (121) = happyShift action_15
action_301 (123) = happyShift action_54
action_301 (125) = happyShift action_55
action_301 (49) = happyGoto action_48
action_301 (55) = happyGoto action_420
action_301 (65) = happyGoto action_50
action_301 (67) = happyGoto action_51
action_301 (69) = happyGoto action_52
action_301 (114) = happyGoto action_53
action_301 _ = happyReduce_219
action_302 (122) = happyShift action_129
action_302 (66) = happyGoto action_419
action_302 _ = happyFail
action_303 (121) = happyShift action_15
action_303 (122) = happyShift action_129
action_303 (123) = happyShift action_54
action_303 (125) = happyShift action_55
action_303 (49) = happyGoto action_48
action_303 (55) = happyGoto action_417
action_303 (65) = happyGoto action_50
action_303 (66) = happyGoto action_418
action_303 (67) = happyGoto action_51
action_303 (69) = happyGoto action_52
action_303 (114) = happyGoto action_53
action_303 _ = happyReduce_219
action_304 (121) = happyShift action_15
action_304 (122) = happyShift action_129
action_304 (123) = happyShift action_54
action_304 (125) = happyShift action_55
action_304 (49) = happyGoto action_48
action_304 (55) = happyGoto action_415
action_304 (65) = happyGoto action_50
action_304 (66) = happyGoto action_416
action_304 (67) = happyGoto action_51
action_304 (69) = happyGoto action_52
action_304 (114) = happyGoto action_53
action_304 _ = happyReduce_219
action_305 (121) = happyShift action_15
action_305 (123) = happyShift action_54
action_305 (125) = happyShift action_55
action_305 (49) = happyGoto action_48
action_305 (55) = happyGoto action_414
action_305 (65) = happyGoto action_50
action_305 (67) = happyGoto action_51
action_305 (69) = happyGoto action_52
action_305 (114) = happyGoto action_53
action_305 _ = happyReduce_219
action_306 (121) = happyShift action_15
action_306 (123) = happyShift action_54
action_306 (125) = happyShift action_55
action_306 (49) = happyGoto action_48
action_306 (55) = happyGoto action_413
action_306 (65) = happyGoto action_50
action_306 (67) = happyGoto action_51
action_306 (69) = happyGoto action_52
action_306 (114) = happyGoto action_53
action_306 _ = happyReduce_219
action_307 (121) = happyShift action_15
action_307 (123) = happyShift action_54
action_307 (125) = happyShift action_55
action_307 (49) = happyGoto action_48
action_307 (55) = happyGoto action_412
action_307 (65) = happyGoto action_50
action_307 (67) = happyGoto action_51
action_307 (69) = happyGoto action_52
action_307 (114) = happyGoto action_53
action_307 _ = happyReduce_219
action_308 (121) = happyShift action_15
action_308 (123) = happyShift action_54
action_308 (125) = happyShift action_55
action_308 (49) = happyGoto action_48
action_308 (55) = happyGoto action_411
action_308 (65) = happyGoto action_50
action_308 (67) = happyGoto action_51
action_308 (69) = happyGoto action_52
action_308 (114) = happyGoto action_53
action_308 _ = happyReduce_219
action_309 (121) = happyShift action_15
action_309 (123) = happyShift action_54
action_309 (125) = happyShift action_55
action_309 (49) = happyGoto action_48
action_309 (55) = happyGoto action_410
action_309 (65) = happyGoto action_50
action_309 (67) = happyGoto action_51
action_309 (69) = happyGoto action_52
action_309 (114) = happyGoto action_53
action_309 _ = happyReduce_219
action_310 (121) = happyShift action_15
action_310 (123) = happyShift action_54
action_310 (125) = happyShift action_55
action_310 (49) = happyGoto action_48
action_310 (55) = happyGoto action_409
action_310 (65) = happyGoto action_50
action_310 (67) = happyGoto action_51
action_310 (69) = happyGoto action_52
action_310 (114) = happyGoto action_53
action_310 _ = happyReduce_219
action_311 (122) = happyShift action_129
action_311 (66) = happyGoto action_408
action_311 _ = happyFail
action_312 (121) = happyShift action_15
action_312 (123) = happyShift action_54
action_312 (125) = happyShift action_55
action_312 (49) = happyGoto action_48
action_312 (55) = happyGoto action_407
action_312 (65) = happyGoto action_50
action_312 (67) = happyGoto action_51
action_312 (69) = happyGoto action_52
action_312 (114) = happyGoto action_53
action_312 _ = happyReduce_219
action_313 (121) = happyShift action_15
action_313 (123) = happyShift action_54
action_313 (125) = happyShift action_55
action_313 (49) = happyGoto action_48
action_313 (55) = happyGoto action_406
action_313 (65) = happyGoto action_50
action_313 (67) = happyGoto action_51
action_313 (69) = happyGoto action_52
action_313 (114) = happyGoto action_53
action_313 _ = happyReduce_219
action_314 (121) = happyShift action_15
action_314 (123) = happyShift action_54
action_314 (125) = happyShift action_55
action_314 (49) = happyGoto action_48
action_314 (55) = happyGoto action_405
action_314 (65) = happyGoto action_50
action_314 (67) = happyGoto action_51
action_314 (69) = happyGoto action_52
action_314 (114) = happyGoto action_53
action_314 _ = happyReduce_219
action_315 (121) = happyShift action_15
action_315 (123) = happyShift action_54
action_315 (125) = happyShift action_55
action_315 (49) = happyGoto action_48
action_315 (55) = happyGoto action_404
action_315 (65) = happyGoto action_50
action_315 (67) = happyGoto action_51
action_315 (69) = happyGoto action_52
action_315 (114) = happyGoto action_53
action_315 _ = happyReduce_219
action_316 (121) = happyShift action_15
action_316 (123) = happyShift action_54
action_316 (125) = happyShift action_55
action_316 (49) = happyGoto action_48
action_316 (55) = happyGoto action_403
action_316 (65) = happyGoto action_50
action_316 (67) = happyGoto action_51
action_316 (69) = happyGoto action_52
action_316 (114) = happyGoto action_53
action_316 _ = happyReduce_219
action_317 (121) = happyShift action_15
action_317 (123) = happyShift action_54
action_317 (125) = happyShift action_55
action_317 (49) = happyGoto action_48
action_317 (55) = happyGoto action_402
action_317 (65) = happyGoto action_50
action_317 (67) = happyGoto action_51
action_317 (69) = happyGoto action_52
action_317 (114) = happyGoto action_53
action_317 _ = happyReduce_219
action_318 _ = happyReduce_288
action_319 (122) = happyShift action_129
action_319 (66) = happyGoto action_401
action_319 _ = happyFail
action_320 _ = happyReduce_201
action_321 (121) = happyShift action_15
action_321 (123) = happyShift action_54
action_321 (125) = happyShift action_55
action_321 (49) = happyGoto action_48
action_321 (55) = happyGoto action_400
action_321 (65) = happyGoto action_50
action_321 (67) = happyGoto action_51
action_321 (69) = happyGoto action_52
action_321 (114) = happyGoto action_53
action_321 _ = happyReduce_219
action_322 (122) = happyShift action_129
action_322 (66) = happyGoto action_399
action_322 _ = happyFail
action_323 (122) = happyShift action_129
action_323 (66) = happyGoto action_398
action_323 _ = happyFail
action_324 (119) = happyShift action_40
action_324 (201) = happyShift action_204
action_324 (203) = happyShift action_205
action_324 (58) = happyGoto action_82
action_324 (60) = happyGoto action_83
action_324 _ = happyFail
action_325 (121) = happyShift action_15
action_325 (49) = happyGoto action_140
action_325 (50) = happyGoto action_397
action_325 (65) = happyGoto action_142
action_325 (69) = happyGoto action_44
action_325 (114) = happyGoto action_53
action_325 _ = happyReduce_219
action_326 (119) = happyShift action_265
action_326 (166) = happyShift action_266
action_326 (61) = happyGoto action_396
action_326 _ = happyFail
action_327 _ = happyReduce_139
action_328 (121) = happyShift action_15
action_328 (123) = happyShift action_54
action_328 (125) = happyShift action_55
action_328 (49) = happyGoto action_48
action_328 (55) = happyGoto action_391
action_328 (65) = happyGoto action_50
action_328 (67) = happyGoto action_51
action_328 (69) = happyGoto action_392
action_328 (75) = happyGoto action_395
action_328 (114) = happyGoto action_53
action_328 _ = happyReduce_219
action_329 (121) = happyShift action_15
action_329 (123) = happyShift action_54
action_329 (125) = happyShift action_55
action_329 (49) = happyGoto action_48
action_329 (55) = happyGoto action_391
action_329 (65) = happyGoto action_50
action_329 (67) = happyGoto action_51
action_329 (69) = happyGoto action_392
action_329 (75) = happyGoto action_394
action_329 (114) = happyGoto action_53
action_329 _ = happyReduce_219
action_330 (121) = happyShift action_15
action_330 (123) = happyShift action_54
action_330 (125) = happyShift action_55
action_330 (49) = happyGoto action_48
action_330 (55) = happyGoto action_391
action_330 (65) = happyGoto action_50
action_330 (67) = happyGoto action_51
action_330 (69) = happyGoto action_392
action_330 (75) = happyGoto action_393
action_330 (114) = happyGoto action_53
action_330 _ = happyReduce_219
action_331 (122) = happyShift action_129
action_331 (66) = happyGoto action_390
action_331 _ = happyFail
action_332 (122) = happyShift action_129
action_332 (66) = happyGoto action_389
action_332 _ = happyFail
action_333 (122) = happyShift action_129
action_333 (66) = happyGoto action_388
action_333 _ = happyFail
action_334 (122) = happyShift action_129
action_334 (66) = happyGoto action_387
action_334 _ = happyFail
action_335 (122) = happyShift action_129
action_335 (66) = happyGoto action_386
action_335 _ = happyFail
action_336 (122) = happyShift action_129
action_336 (66) = happyGoto action_385
action_336 _ = happyFail
action_337 (122) = happyShift action_129
action_337 (66) = happyGoto action_384
action_337 _ = happyFail
action_338 (122) = happyShift action_129
action_338 (66) = happyGoto action_383
action_338 _ = happyFail
action_339 (121) = happyShift action_15
action_339 (123) = happyShift action_54
action_339 (125) = happyShift action_55
action_339 (49) = happyGoto action_48
action_339 (55) = happyGoto action_382
action_339 (65) = happyGoto action_50
action_339 (67) = happyGoto action_51
action_339 (69) = happyGoto action_52
action_339 (114) = happyGoto action_53
action_339 _ = happyReduce_219
action_340 (121) = happyShift action_15
action_340 (123) = happyShift action_54
action_340 (125) = happyShift action_55
action_340 (49) = happyGoto action_48
action_340 (55) = happyGoto action_381
action_340 (65) = happyGoto action_50
action_340 (67) = happyGoto action_51
action_340 (69) = happyGoto action_52
action_340 (114) = happyGoto action_53
action_340 _ = happyReduce_219
action_341 (121) = happyShift action_15
action_341 (123) = happyShift action_54
action_341 (125) = happyShift action_55
action_341 (49) = happyGoto action_48
action_341 (55) = happyGoto action_380
action_341 (65) = happyGoto action_50
action_341 (67) = happyGoto action_51
action_341 (69) = happyGoto action_52
action_341 (114) = happyGoto action_53
action_341 _ = happyReduce_219
action_342 (121) = happyShift action_15
action_342 (123) = happyShift action_54
action_342 (125) = happyShift action_55
action_342 (49) = happyGoto action_48
action_342 (55) = happyGoto action_379
action_342 (65) = happyGoto action_50
action_342 (67) = happyGoto action_51
action_342 (69) = happyGoto action_52
action_342 (114) = happyGoto action_53
action_342 _ = happyReduce_219
action_343 (121) = happyShift action_15
action_343 (49) = happyGoto action_140
action_343 (50) = happyGoto action_353
action_343 (51) = happyGoto action_378
action_343 (65) = happyGoto action_142
action_343 (69) = happyGoto action_44
action_343 (114) = happyGoto action_53
action_343 _ = happyReduce_219
action_344 (121) = happyShift action_15
action_344 (49) = happyGoto action_140
action_344 (50) = happyGoto action_353
action_344 (51) = happyGoto action_377
action_344 (65) = happyGoto action_142
action_344 (69) = happyGoto action_44
action_344 (114) = happyGoto action_53
action_344 _ = happyReduce_219
action_345 (121) = happyShift action_15
action_345 (49) = happyGoto action_140
action_345 (50) = happyGoto action_353
action_345 (51) = happyGoto action_376
action_345 (65) = happyGoto action_142
action_345 (69) = happyGoto action_44
action_345 (114) = happyGoto action_53
action_345 _ = happyReduce_219
action_346 (121) = happyShift action_15
action_346 (49) = happyGoto action_140
action_346 (50) = happyGoto action_353
action_346 (51) = happyGoto action_375
action_346 (65) = happyGoto action_142
action_346 (69) = happyGoto action_44
action_346 (114) = happyGoto action_53
action_346 _ = happyReduce_219
action_347 (121) = happyShift action_15
action_347 (49) = happyGoto action_140
action_347 (50) = happyGoto action_353
action_347 (51) = happyGoto action_374
action_347 (65) = happyGoto action_142
action_347 (69) = happyGoto action_44
action_347 (114) = happyGoto action_53
action_347 _ = happyReduce_219
action_348 (121) = happyShift action_15
action_348 (49) = happyGoto action_140
action_348 (50) = happyGoto action_353
action_348 (51) = happyGoto action_373
action_348 (65) = happyGoto action_142
action_348 (69) = happyGoto action_44
action_348 (114) = happyGoto action_53
action_348 _ = happyReduce_219
action_349 (121) = happyShift action_15
action_349 (123) = happyShift action_54
action_349 (125) = happyShift action_55
action_349 (49) = happyGoto action_48
action_349 (55) = happyGoto action_372
action_349 (65) = happyGoto action_50
action_349 (67) = happyGoto action_51
action_349 (69) = happyGoto action_52
action_349 (114) = happyGoto action_53
action_349 _ = happyReduce_219
action_350 (121) = happyShift action_15
action_350 (9) = happyGoto action_371
action_350 (13) = happyGoto action_70
action_350 (65) = happyGoto action_71
action_350 (69) = happyGoto action_72
action_350 (113) = happyGoto action_73
action_350 _ = happyReduce_219
action_351 _ = happyReduce_16
action_352 (115) = happyShift action_370
action_352 _ = happyFail
action_353 _ = happyReduce_137
action_354 (122) = happyShift action_129
action_354 (66) = happyGoto action_369
action_354 _ = happyFail
action_355 (121) = happyShift action_15
action_355 (32) = happyGoto action_365
action_355 (46) = happyGoto action_366
action_355 (47) = happyGoto action_367
action_355 (65) = happyGoto action_368
action_355 _ = happyReduce_123
action_356 _ = happyReduce_262
action_357 (201) = happyShift action_204
action_357 (203) = happyShift action_205
action_357 _ = happyFail
action_358 (121) = happyShift action_15
action_358 (22) = happyGoto action_364
action_358 (65) = happyGoto action_229
action_358 (69) = happyGoto action_230
action_358 _ = happyReduce_219
action_359 (121) = happyShift action_15
action_359 (22) = happyGoto action_363
action_359 (65) = happyGoto action_229
action_359 (69) = happyGoto action_230
action_359 _ = happyReduce_219
action_360 _ = happyReduce_214
action_361 (122) = happyShift action_129
action_361 (66) = happyGoto action_362
action_361 _ = happyFail
action_362 _ = happyReduce_229
action_363 (122) = happyShift action_129
action_363 (66) = happyGoto action_546
action_363 _ = happyFail
action_364 (122) = happyShift action_129
action_364 (66) = happyGoto action_545
action_364 _ = happyFail
action_365 _ = happyReduce_120
action_366 (121) = happyShift action_15
action_366 (31) = happyGoto action_544
action_366 (32) = happyGoto action_438
action_366 (65) = happyGoto action_441
action_366 _ = happyReduce_122
action_367 (122) = happyShift action_129
action_367 (66) = happyGoto action_543
action_367 _ = happyFail
action_368 (129) = happyShift action_21
action_368 (197) = happyShift action_542
action_368 (69) = happyGoto action_488
action_368 (71) = happyGoto action_489
action_368 (113) = happyGoto action_490
action_368 _ = happyReduce_219
action_369 _ = happyReduce_47
action_370 (168) = happyShift action_541
action_370 _ = happyFail
action_371 (121) = happyShift action_15
action_371 (123) = happyShift action_54
action_371 (125) = happyShift action_55
action_371 (49) = happyGoto action_48
action_371 (55) = happyGoto action_540
action_371 (65) = happyGoto action_50
action_371 (67) = happyGoto action_51
action_371 (69) = happyGoto action_52
action_371 (114) = happyGoto action_53
action_371 _ = happyReduce_219
action_372 (122) = happyShift action_129
action_372 (66) = happyGoto action_539
action_372 _ = happyFail
action_373 (122) = happyShift action_129
action_373 (66) = happyGoto action_538
action_373 _ = happyFail
action_374 (122) = happyShift action_129
action_374 (66) = happyGoto action_537
action_374 _ = happyFail
action_375 (122) = happyShift action_129
action_375 (66) = happyGoto action_536
action_375 _ = happyFail
action_376 (122) = happyShift action_129
action_376 (66) = happyGoto action_535
action_376 _ = happyFail
action_377 (122) = happyShift action_129
action_377 (66) = happyGoto action_534
action_377 _ = happyFail
action_378 (122) = happyShift action_129
action_378 (66) = happyGoto action_533
action_378 _ = happyFail
action_379 (122) = happyShift action_129
action_379 (66) = happyGoto action_532
action_379 _ = happyFail
action_380 (122) = happyShift action_129
action_380 (66) = happyGoto action_531
action_380 _ = happyFail
action_381 (122) = happyShift action_129
action_381 (66) = happyGoto action_530
action_381 _ = happyFail
action_382 (122) = happyShift action_129
action_382 (66) = happyGoto action_529
action_382 _ = happyFail
action_383 _ = happyReduce_150
action_384 _ = happyReduce_149
action_385 _ = happyReduce_148
action_386 _ = happyReduce_147
action_387 _ = happyReduce_146
action_388 _ = happyReduce_163
action_389 _ = happyReduce_162
action_390 _ = happyReduce_161
action_391 _ = happyReduce_227
action_392 (116) = happyShift action_86
action_392 (117) = happyShift action_87
action_392 (118) = happyShift action_88
action_392 (119) = happyShift action_40
action_392 (122) = happyReduce_228
action_392 (58) = happyGoto action_82
action_392 (60) = happyGoto action_83
action_392 (63) = happyGoto action_84
action_392 (69) = happyGoto action_85
action_392 _ = happyReduce_219
action_393 (122) = happyShift action_129
action_393 (66) = happyGoto action_528
action_393 _ = happyFail
action_394 (122) = happyShift action_129
action_394 (66) = happyGoto action_527
action_394 _ = happyFail
action_395 (122) = happyShift action_129
action_395 (66) = happyGoto action_526
action_395 _ = happyFail
action_396 (122) = happyShift action_129
action_396 (66) = happyGoto action_525
action_396 _ = happyFail
action_397 (72) = happyGoto action_524
action_397 _ = happyReduce_222
action_398 (69) = happyGoto action_523
action_398 _ = happyReduce_219
action_399 _ = happyReduce_171
action_400 _ = happyReduce_202
action_401 _ = happyReduce_198
action_402 (122) = happyShift action_129
action_402 (66) = happyGoto action_522
action_402 _ = happyFail
action_403 (122) = happyShift action_129
action_403 (66) = happyGoto action_521
action_403 _ = happyFail
action_404 (122) = happyShift action_129
action_404 (66) = happyGoto action_520
action_404 _ = happyFail
action_405 (122) = happyShift action_129
action_405 (66) = happyGoto action_519
action_405 _ = happyFail
action_406 (122) = happyShift action_129
action_406 (66) = happyGoto action_518
action_406 _ = happyFail
action_407 (122) = happyShift action_129
action_407 (66) = happyGoto action_517
action_407 _ = happyFail
action_408 _ = happyReduce_197
action_409 (121) = happyShift action_15
action_409 (123) = happyShift action_54
action_409 (125) = happyShift action_55
action_409 (49) = happyGoto action_48
action_409 (55) = happyGoto action_516
action_409 (65) = happyGoto action_50
action_409 (67) = happyGoto action_51
action_409 (69) = happyGoto action_52
action_409 (114) = happyGoto action_53
action_409 _ = happyReduce_219
action_410 (121) = happyShift action_15
action_410 (123) = happyShift action_54
action_410 (125) = happyShift action_55
action_410 (49) = happyGoto action_48
action_410 (55) = happyGoto action_515
action_410 (65) = happyGoto action_50
action_410 (67) = happyGoto action_51
action_410 (69) = happyGoto action_52
action_410 (114) = happyGoto action_53
action_410 _ = happyReduce_219
action_411 (121) = happyShift action_15
action_411 (123) = happyShift action_54
action_411 (125) = happyShift action_55
action_411 (49) = happyGoto action_48
action_411 (55) = happyGoto action_514
action_411 (65) = happyGoto action_50
action_411 (67) = happyGoto action_51
action_411 (69) = happyGoto action_52
action_411 (114) = happyGoto action_53
action_411 _ = happyReduce_219
action_412 (121) = happyShift action_15
action_412 (123) = happyShift action_54
action_412 (125) = happyShift action_55
action_412 (49) = happyGoto action_48
action_412 (55) = happyGoto action_513
action_412 (65) = happyGoto action_50
action_412 (67) = happyGoto action_51
action_412 (69) = happyGoto action_52
action_412 (114) = happyGoto action_53
action_412 _ = happyReduce_219
action_413 (121) = happyShift action_15
action_413 (123) = happyShift action_54
action_413 (125) = happyShift action_55
action_413 (49) = happyGoto action_48
action_413 (55) = happyGoto action_512
action_413 (65) = happyGoto action_50
action_413 (67) = happyGoto action_51
action_413 (69) = happyGoto action_52
action_413 (114) = happyGoto action_53
action_413 _ = happyReduce_219
action_414 (121) = happyShift action_15
action_414 (123) = happyShift action_54
action_414 (125) = happyShift action_55
action_414 (49) = happyGoto action_48
action_414 (55) = happyGoto action_511
action_414 (65) = happyGoto action_50
action_414 (67) = happyGoto action_51
action_414 (69) = happyGoto action_52
action_414 (114) = happyGoto action_53
action_414 _ = happyReduce_219
action_415 (122) = happyShift action_129
action_415 (66) = happyGoto action_510
action_415 _ = happyFail
action_416 _ = happyReduce_194
action_417 (122) = happyShift action_129
action_417 (66) = happyGoto action_509
action_417 _ = happyFail
action_418 _ = happyReduce_195
action_419 _ = happyReduce_196
action_420 (122) = happyShift action_129
action_420 (66) = happyGoto action_508
action_420 _ = happyFail
action_421 (122) = happyShift action_129
action_421 (66) = happyGoto action_507
action_421 _ = happyFail
action_422 (122) = happyShift action_129
action_422 (66) = happyGoto action_506
action_422 _ = happyFail
action_423 (121) = happyShift action_15
action_423 (123) = happyShift action_54
action_423 (125) = happyShift action_55
action_423 (49) = happyGoto action_48
action_423 (55) = happyGoto action_505
action_423 (65) = happyGoto action_50
action_423 (67) = happyGoto action_51
action_423 (69) = happyGoto action_52
action_423 (114) = happyGoto action_53
action_423 _ = happyReduce_219
action_424 (121) = happyShift action_15
action_424 (123) = happyShift action_54
action_424 (125) = happyShift action_55
action_424 (49) = happyGoto action_48
action_424 (55) = happyGoto action_504
action_424 (65) = happyGoto action_50
action_424 (67) = happyGoto action_51
action_424 (69) = happyGoto action_52
action_424 (114) = happyGoto action_53
action_424 _ = happyReduce_219
action_425 (122) = happyShift action_129
action_425 (66) = happyGoto action_503
action_425 _ = happyFail
action_426 (122) = happyShift action_129
action_426 (66) = happyGoto action_502
action_426 _ = happyFail
action_427 (122) = happyShift action_129
action_427 (66) = happyGoto action_501
action_427 _ = happyFail
action_428 (122) = happyShift action_129
action_428 (66) = happyGoto action_500
action_428 _ = happyFail
action_429 (122) = happyShift action_129
action_429 (66) = happyGoto action_499
action_429 _ = happyFail
action_430 (128) = happyShift action_498
action_430 _ = happyFail
action_431 (121) = happyShift action_15
action_431 (123) = happyShift action_54
action_431 (125) = happyShift action_55
action_431 (49) = happyGoto action_48
action_431 (55) = happyGoto action_497
action_431 (65) = happyGoto action_50
action_431 (67) = happyGoto action_51
action_431 (69) = happyGoto action_52
action_431 (114) = happyGoto action_53
action_431 _ = happyReduce_219
action_432 (121) = happyShift action_15
action_432 (13) = happyGoto action_267
action_432 (14) = happyGoto action_496
action_432 (65) = happyGoto action_269
action_432 (69) = happyGoto action_72
action_432 (113) = happyGoto action_272
action_432 _ = happyReduce_219
action_433 (119) = happyShift action_265
action_433 (166) = happyShift action_266
action_433 (61) = happyGoto action_495
action_433 _ = happyFail
action_434 (121) = happyShift action_15
action_434 (123) = happyShift action_54
action_434 (125) = happyShift action_55
action_434 (49) = happyGoto action_48
action_434 (55) = happyGoto action_494
action_434 (65) = happyGoto action_50
action_434 (67) = happyGoto action_51
action_434 (69) = happyGoto action_52
action_434 (114) = happyGoto action_53
action_434 _ = happyReduce_219
action_435 _ = happyReduce_287
action_436 _ = happyReduce_42
action_437 _ = happyReduce_116
action_438 _ = happyReduce_71
action_439 (121) = happyShift action_15
action_439 (31) = happyGoto action_493
action_439 (32) = happyGoto action_438
action_439 (65) = happyGoto action_441
action_439 _ = happyReduce_118
action_440 (122) = happyShift action_129
action_440 (66) = happyGoto action_492
action_440 _ = happyFail
action_441 (129) = happyShift action_21
action_441 (197) = happyShift action_491
action_441 (69) = happyGoto action_488
action_441 (71) = happyGoto action_489
action_441 (113) = happyGoto action_490
action_441 _ = happyReduce_219
action_442 (121) = happyShift action_15
action_442 (9) = happyGoto action_487
action_442 (13) = happyGoto action_70
action_442 (65) = happyGoto action_71
action_442 (69) = happyGoto action_72
action_442 (113) = happyGoto action_73
action_442 _ = happyReduce_219
action_443 (72) = happyGoto action_486
action_443 _ = happyReduce_222
action_444 (121) = happyShift action_15
action_444 (31) = happyGoto action_437
action_444 (32) = happyGoto action_438
action_444 (44) = happyGoto action_439
action_444 (45) = happyGoto action_485
action_444 (65) = happyGoto action_441
action_444 _ = happyReduce_119
action_445 (72) = happyGoto action_484
action_445 _ = happyReduce_222
action_446 (121) = happyShift action_15
action_446 (123) = happyShift action_54
action_446 (125) = happyShift action_55
action_446 (49) = happyGoto action_48
action_446 (55) = happyGoto action_483
action_446 (65) = happyGoto action_50
action_446 (67) = happyGoto action_51
action_446 (69) = happyGoto action_52
action_446 (114) = happyGoto action_53
action_446 _ = happyReduce_219
action_447 (168) = happyShift action_481
action_447 (169) = happyShift action_482
action_447 (12) = happyGoto action_480
action_447 _ = happyFail
action_448 _ = happyReduce_29
action_449 _ = happyReduce_247
action_450 (121) = happyShift action_15
action_450 (13) = happyGoto action_479
action_450 (65) = happyGoto action_252
action_450 _ = happyFail
action_451 _ = happyReduce_23
action_452 _ = happyReduce_232
action_453 _ = happyReduce_40
action_454 _ = happyReduce_251
action_455 (121) = happyShift action_15
action_455 (13) = happyGoto action_267
action_455 (14) = happyGoto action_478
action_455 (65) = happyGoto action_269
action_455 (69) = happyGoto action_72
action_455 (113) = happyGoto action_272
action_455 _ = happyReduce_219
action_456 (121) = happyShift action_15
action_456 (122) = happyReduce_253
action_456 (123) = happyShift action_54
action_456 (13) = happyGoto action_267
action_456 (14) = happyGoto action_453
action_456 (15) = happyGoto action_477
action_456 (65) = happyGoto action_269
action_456 (67) = happyGoto action_455
action_456 (69) = happyGoto action_72
action_456 (113) = happyGoto action_272
action_456 _ = happyReduce_219
action_457 (122) = happyShift action_129
action_457 (66) = happyGoto action_476
action_457 _ = happyFail
action_458 _ = happyReduce_26
action_459 _ = happyReduce_242
action_460 (119) = happyShift action_265
action_460 (166) = happyShift action_266
action_460 (61) = happyGoto action_475
action_460 _ = happyFail
action_461 (167) = happyShift action_273
action_461 _ = happyFail
action_462 (121) = happyShift action_15
action_462 (10) = happyGoto action_470
action_462 (65) = happyGoto action_471
action_462 (81) = happyGoto action_472
action_462 (82) = happyGoto action_473
action_462 (83) = happyGoto action_474
action_462 _ = happyReduce_240
action_463 _ = happyReduce_27
action_464 (121) = happyShift action_15
action_464 (9) = happyGoto action_469
action_464 (13) = happyGoto action_70
action_464 (65) = happyGoto action_71
action_464 (69) = happyGoto action_72
action_464 (113) = happyGoto action_73
action_464 _ = happyReduce_219
action_465 (121) = happyShift action_15
action_465 (49) = happyGoto action_140
action_465 (50) = happyGoto action_353
action_465 (51) = happyGoto action_468
action_465 (65) = happyGoto action_142
action_465 (69) = happyGoto action_44
action_465 (114) = happyGoto action_53
action_465 _ = happyReduce_219
action_466 (121) = happyShift action_15
action_466 (49) = happyGoto action_140
action_466 (50) = happyGoto action_353
action_466 (51) = happyGoto action_467
action_466 (65) = happyGoto action_142
action_466 (69) = happyGoto action_44
action_466 (114) = happyGoto action_53
action_466 _ = happyReduce_219
action_467 (122) = happyShift action_129
action_467 (66) = happyGoto action_600
action_467 _ = happyFail
action_468 (122) = happyShift action_129
action_468 (66) = happyGoto action_599
action_468 _ = happyFail
action_469 (122) = happyShift action_129
action_469 (66) = happyGoto action_598
action_469 _ = happyFail
action_470 _ = happyReduce_236
action_471 (69) = happyGoto action_597
action_471 _ = happyReduce_219
action_472 (121) = happyShift action_15
action_472 (10) = happyGoto action_596
action_472 (65) = happyGoto action_471
action_472 _ = happyReduce_238
action_473 _ = happyReduce_239
action_474 (122) = happyShift action_129
action_474 (66) = happyGoto action_595
action_474 _ = happyFail
action_475 (121) = happyShift action_15
action_475 (9) = happyGoto action_594
action_475 (13) = happyGoto action_70
action_475 (65) = happyGoto action_71
action_475 (69) = happyGoto action_72
action_475 (113) = happyGoto action_73
action_475 _ = happyReduce_219
action_476 (162) = happyShift action_593
action_476 _ = happyFail
action_477 _ = happyReduce_252
action_478 (121) = happyShift action_15
action_478 (65) = happyGoto action_592
action_478 _ = happyFail
action_479 (122) = happyShift action_129
action_479 (66) = happyGoto action_591
action_479 _ = happyFail
action_480 (121) = happyShift action_15
action_480 (123) = happyShift action_54
action_480 (125) = happyShift action_55
action_480 (49) = happyGoto action_48
action_480 (55) = happyGoto action_590
action_480 (65) = happyGoto action_50
action_480 (67) = happyGoto action_51
action_480 (69) = happyGoto action_52
action_480 (114) = happyGoto action_53
action_480 _ = happyReduce_219
action_481 _ = happyReduce_32
action_482 _ = happyReduce_33
action_483 (122) = happyShift action_129
action_483 (66) = happyGoto action_589
action_483 _ = happyFail
action_484 (122) = happyShift action_129
action_484 (66) = happyGoto action_588
action_484 _ = happyFail
action_485 (122) = happyShift action_129
action_485 (66) = happyGoto action_587
action_485 _ = happyFail
action_486 (122) = happyShift action_129
action_486 (66) = happyGoto action_586
action_486 _ = happyFail
action_487 (175) = happyShift action_25
action_487 (177) = happyShift action_583
action_487 (178) = happyShift action_584
action_487 (180) = happyShift action_585
action_487 (28) = happyGoto action_581
action_487 (73) = happyGoto action_582
action_487 _ = happyReduce_67
action_488 (119) = happyShift action_40
action_488 (181) = happyShift action_568
action_488 (182) = happyShift action_569
action_488 (183) = happyShift action_570
action_488 (185) = happyShift action_571
action_488 (186) = happyShift action_572
action_488 (187) = happyShift action_573
action_488 (188) = happyShift action_574
action_488 (189) = happyShift action_575
action_488 (193) = happyShift action_576
action_488 (194) = happyShift action_577
action_488 (195) = happyShift action_578
action_488 (196) = happyShift action_579
action_488 (200) = happyShift action_580
action_488 (58) = happyGoto action_178
action_488 (59) = happyGoto action_179
action_488 _ = happyFail
action_489 (121) = happyShift action_15
action_489 (31) = happyGoto action_567
action_489 (32) = happyGoto action_438
action_489 (65) = happyGoto action_441
action_489 _ = happyFail
action_490 (122) = happyReduce_200
action_490 (56) = happyGoto action_566
action_490 (57) = happyGoto action_175
action_490 (69) = happyGoto action_176
action_490 _ = happyReduce_219
action_491 (121) = happyShift action_15
action_491 (65) = happyGoto action_565
action_491 _ = happyFail
action_492 _ = happyReduce_55
action_493 _ = happyReduce_117
action_494 (121) = happyShift action_15
action_494 (123) = happyShift action_54
action_494 (125) = happyShift action_55
action_494 (49) = happyGoto action_48
action_494 (55) = happyGoto action_564
action_494 (65) = happyGoto action_50
action_494 (67) = happyGoto action_51
action_494 (69) = happyGoto action_52
action_494 (114) = happyGoto action_53
action_494 _ = happyReduce_219
action_495 (121) = happyShift action_15
action_495 (123) = happyShift action_54
action_495 (125) = happyShift action_55
action_495 (49) = happyGoto action_48
action_495 (55) = happyGoto action_563
action_495 (65) = happyGoto action_50
action_495 (67) = happyGoto action_51
action_495 (69) = happyGoto action_52
action_495 (114) = happyGoto action_53
action_495 _ = happyReduce_219
action_496 (121) = happyShift action_15
action_496 (123) = happyShift action_54
action_496 (125) = happyShift action_55
action_496 (49) = happyGoto action_48
action_496 (55) = happyGoto action_562
action_496 (65) = happyGoto action_50
action_496 (67) = happyGoto action_51
action_496 (69) = happyGoto action_52
action_496 (114) = happyGoto action_53
action_496 _ = happyReduce_219
action_497 (122) = happyShift action_129
action_497 (66) = happyGoto action_561
action_497 _ = happyFail
action_498 (121) = happyShift action_15
action_498 (9) = happyGoto action_560
action_498 (13) = happyGoto action_70
action_498 (65) = happyGoto action_71
action_498 (69) = happyGoto action_72
action_498 (113) = happyGoto action_73
action_498 _ = happyReduce_219
action_499 _ = happyReduce_134
action_500 _ = happyReduce_133
action_501 _ = happyReduce_184
action_502 _ = happyReduce_179
action_503 _ = happyReduce_178
action_504 (121) = happyShift action_15
action_504 (123) = happyShift action_54
action_504 (125) = happyShift action_55
action_504 (49) = happyGoto action_48
action_504 (55) = happyGoto action_559
action_504 (65) = happyGoto action_50
action_504 (67) = happyGoto action_51
action_504 (69) = happyGoto action_52
action_504 (114) = happyGoto action_53
action_504 _ = happyReduce_219
action_505 (121) = happyShift action_15
action_505 (123) = happyShift action_54
action_505 (125) = happyShift action_55
action_505 (49) = happyGoto action_48
action_505 (55) = happyGoto action_558
action_505 (65) = happyGoto action_50
action_505 (67) = happyGoto action_51
action_505 (69) = happyGoto action_52
action_505 (114) = happyGoto action_53
action_505 _ = happyReduce_219
action_506 _ = happyReduce_180
action_507 _ = happyReduce_182
action_508 _ = happyReduce_193
action_509 _ = happyReduce_183
action_510 _ = happyReduce_181
action_511 (122) = happyShift action_129
action_511 (66) = happyGoto action_557
action_511 _ = happyFail
action_512 (122) = happyShift action_129
action_512 (66) = happyGoto action_556
action_512 _ = happyFail
action_513 (122) = happyShift action_129
action_513 (66) = happyGoto action_555
action_513 _ = happyFail
action_514 (122) = happyShift action_129
action_514 (66) = happyGoto action_554
action_514 _ = happyFail
action_515 (122) = happyShift action_129
action_515 (66) = happyGoto action_553
action_515 _ = happyFail
action_516 (122) = happyShift action_129
action_516 (66) = happyGoto action_552
action_516 _ = happyFail
action_517 _ = happyReduce_192
action_518 _ = happyReduce_191
action_519 _ = happyReduce_190
action_520 _ = happyReduce_188
action_521 _ = happyReduce_189
action_522 _ = happyReduce_187
action_523 (119) = happyShift action_265
action_523 (166) = happyShift action_266
action_523 (61) = happyGoto action_551
action_523 _ = happyFail
action_524 (122) = happyShift action_129
action_524 (66) = happyGoto action_550
action_524 _ = happyFail
action_525 _ = happyReduce_131
action_526 _ = happyReduce_165
action_527 _ = happyReduce_164
action_528 _ = happyReduce_166
action_529 _ = happyReduce_154
action_530 _ = happyReduce_153
action_531 _ = happyReduce_151
action_532 _ = happyReduce_152
action_533 _ = happyReduce_155
action_534 _ = happyReduce_156
action_535 _ = happyReduce_157
action_536 _ = happyReduce_158
action_537 _ = happyReduce_160
action_538 _ = happyReduce_159
action_539 _ = happyReduce_144
action_540 (122) = happyShift action_129
action_540 (66) = happyGoto action_549
action_540 _ = happyFail
action_541 (115) = happyShift action_548
action_541 _ = happyFail
action_542 (121) = happyShift action_15
action_542 (65) = happyGoto action_547
action_542 _ = happyFail
action_543 _ = happyReduce_70
action_544 _ = happyReduce_121
action_545 _ = happyReduce_50
action_546 _ = happyReduce_49
action_547 (121) = happyShift action_15
action_547 (16) = happyGoto action_630
action_547 (17) = happyGoto action_631
action_547 (20) = happyGoto action_632
action_547 (23) = happyGoto action_633
action_547 (24) = happyGoto action_634
action_547 (43) = happyGoto action_635
action_547 (65) = happyGoto action_636
action_547 (99) = happyGoto action_637
action_547 (100) = happyGoto action_638
action_547 (101) = happyGoto action_648
action_547 _ = happyReduce_270
action_548 (122) = happyShift action_129
action_548 (66) = happyGoto action_647
action_548 _ = happyFail
action_549 _ = happyReduce_145
action_550 _ = happyReduce_136
action_551 (122) = happyShift action_129
action_551 (66) = happyGoto action_646
action_551 _ = happyFail
action_552 _ = happyReduce_172
action_553 _ = happyReduce_173
action_554 _ = happyReduce_174
action_555 _ = happyReduce_175
action_556 _ = happyReduce_176
action_557 _ = happyReduce_177
action_558 (122) = happyShift action_129
action_558 (66) = happyGoto action_645
action_558 _ = happyFail
action_559 (122) = happyShift action_129
action_559 (66) = happyGoto action_644
action_559 _ = happyFail
action_560 (122) = happyShift action_129
action_560 (66) = happyGoto action_643
action_560 _ = happyFail
action_561 _ = happyReduce_125
action_562 (122) = happyShift action_129
action_562 (66) = happyGoto action_642
action_562 _ = happyFail
action_563 (122) = happyShift action_129
action_563 (66) = happyGoto action_641
action_563 _ = happyFail
action_564 (122) = happyShift action_129
action_564 (66) = happyGoto action_640
action_564 _ = happyFail
action_565 (121) = happyShift action_15
action_565 (16) = happyGoto action_630
action_565 (17) = happyGoto action_631
action_565 (20) = happyGoto action_632
action_565 (23) = happyGoto action_633
action_565 (24) = happyGoto action_634
action_565 (43) = happyGoto action_635
action_565 (65) = happyGoto action_636
action_565 (99) = happyGoto action_637
action_565 (100) = happyGoto action_638
action_565 (101) = happyGoto action_639
action_565 _ = happyReduce_270
action_566 (69) = happyGoto action_629
action_566 _ = happyReduce_219
action_567 (72) = happyGoto action_628
action_567 _ = happyReduce_222
action_568 (190) = happyShift action_627
action_568 (37) = happyGoto action_626
action_568 _ = happyReduce_98
action_569 (121) = happyShift action_15
action_569 (122) = happyShift action_129
action_569 (123) = happyShift action_54
action_569 (125) = happyShift action_55
action_569 (49) = happyGoto action_48
action_569 (55) = happyGoto action_624
action_569 (65) = happyGoto action_50
action_569 (66) = happyGoto action_625
action_569 (67) = happyGoto action_51
action_569 (69) = happyGoto action_52
action_569 (114) = happyGoto action_53
action_569 _ = happyReduce_219
action_570 (121) = happyShift action_15
action_570 (123) = happyShift action_54
action_570 (125) = happyShift action_55
action_570 (49) = happyGoto action_48
action_570 (55) = happyGoto action_623
action_570 (65) = happyGoto action_50
action_570 (67) = happyGoto action_51
action_570 (69) = happyGoto action_52
action_570 (114) = happyGoto action_53
action_570 _ = happyReduce_219
action_571 (121) = happyShift action_15
action_571 (123) = happyShift action_54
action_571 (125) = happyShift action_55
action_571 (49) = happyGoto action_48
action_571 (55) = happyGoto action_622
action_571 (65) = happyGoto action_50
action_571 (67) = happyGoto action_51
action_571 (69) = happyGoto action_52
action_571 (114) = happyGoto action_53
action_571 _ = happyReduce_219
action_572 (121) = happyShift action_15
action_572 (49) = happyGoto action_140
action_572 (50) = happyGoto action_620
action_572 (52) = happyGoto action_621
action_572 (65) = happyGoto action_142
action_572 (69) = happyGoto action_44
action_572 (114) = happyGoto action_53
action_572 _ = happyReduce_219
action_573 (121) = happyShift action_15
action_573 (49) = happyGoto action_140
action_573 (50) = happyGoto action_327
action_573 (53) = happyGoto action_619
action_573 (65) = happyGoto action_142
action_573 (69) = happyGoto action_44
action_573 (114) = happyGoto action_53
action_573 _ = happyReduce_219
action_574 (121) = happyShift action_15
action_574 (123) = happyShift action_54
action_574 (125) = happyShift action_55
action_574 (49) = happyGoto action_48
action_574 (55) = happyGoto action_618
action_574 (65) = happyGoto action_50
action_574 (67) = happyGoto action_51
action_574 (69) = happyGoto action_52
action_574 (114) = happyGoto action_53
action_574 _ = happyReduce_219
action_575 (120) = happyShift action_616
action_575 (33) = happyGoto action_617
action_575 _ = happyReduce_91
action_576 (120) = happyShift action_616
action_576 (33) = happyGoto action_615
action_576 _ = happyReduce_91
action_577 (119) = happyShift action_265
action_577 (166) = happyShift action_266
action_577 (34) = happyGoto action_614
action_577 (61) = happyGoto action_613
action_577 _ = happyReduce_93
action_578 (119) = happyShift action_265
action_578 (166) = happyShift action_266
action_578 (34) = happyGoto action_612
action_578 (61) = happyGoto action_613
action_578 _ = happyReduce_93
action_579 (121) = happyShift action_15
action_579 (65) = happyGoto action_611
action_579 _ = happyFail
action_580 (122) = happyShift action_129
action_580 (66) = happyGoto action_610
action_580 _ = happyFail
action_581 (172) = happyShift action_607
action_581 (173) = happyShift action_608
action_581 (174) = happyShift action_609
action_581 (27) = happyGoto action_606
action_581 _ = happyReduce_62
action_582 _ = happyReduce_65
action_583 _ = happyReduce_64
action_584 _ = happyReduce_63
action_585 _ = happyReduce_66
action_586 _ = happyReduce_58
action_587 _ = happyReduce_54
action_588 _ = happyReduce_53
action_589 _ = happyReduce_52
action_590 (122) = happyShift action_129
action_590 (66) = happyGoto action_605
action_590 _ = happyFail
action_591 _ = happyReduce_39
action_592 (167) = happyShift action_604
action_592 _ = happyFail
action_593 (121) = happyShift action_15
action_593 (9) = happyGoto action_603
action_593 (13) = happyGoto action_70
action_593 (65) = happyGoto action_71
action_593 (69) = happyGoto action_72
action_593 (113) = happyGoto action_73
action_593 _ = happyReduce_219
action_594 (122) = happyShift action_129
action_594 (66) = happyGoto action_602
action_594 _ = happyFail
action_595 _ = happyReduce_25
action_596 _ = happyReduce_237
action_597 (119) = happyShift action_265
action_597 (166) = happyShift action_266
action_597 (61) = happyGoto action_601
action_597 _ = happyFail
action_598 _ = happyReduce_28
action_599 _ = happyReduce_35
action_600 _ = happyReduce_36
action_601 (125) = happyShift action_678
action_601 _ = happyFail
action_602 _ = happyReduce_31
action_603 (122) = happyShift action_129
action_603 (66) = happyGoto action_677
action_603 _ = happyFail
action_604 (158) = happyShift action_676
action_604 _ = happyFail
action_605 _ = happyReduce_34
action_606 (122) = happyShift action_129
action_606 (66) = happyGoto action_675
action_606 _ = happyFail
action_607 _ = happyReduce_59
action_608 _ = happyReduce_60
action_609 _ = happyReduce_61
action_610 _ = happyReduce_84
action_611 (121) = happyShift action_15
action_611 (123) = happyShift action_54
action_611 (125) = happyShift action_55
action_611 (49) = happyGoto action_48
action_611 (55) = happyGoto action_674
action_611 (65) = happyGoto action_50
action_611 (67) = happyGoto action_51
action_611 (69) = happyGoto action_52
action_611 (114) = happyGoto action_53
action_611 _ = happyReduce_219
action_612 (122) = happyShift action_129
action_612 (66) = happyGoto action_673
action_612 _ = happyFail
action_613 _ = happyReduce_94
action_614 (122) = happyShift action_129
action_614 (66) = happyGoto action_672
action_614 _ = happyFail
action_615 (121) = happyShift action_15
action_615 (123) = happyShift action_54
action_615 (125) = happyShift action_55
action_615 (49) = happyGoto action_48
action_615 (55) = happyGoto action_671
action_615 (65) = happyGoto action_50
action_615 (67) = happyGoto action_51
action_615 (69) = happyGoto action_52
action_615 (114) = happyGoto action_53
action_615 _ = happyReduce_219
action_616 _ = happyReduce_92
action_617 (69) = happyGoto action_215
action_617 (112) = happyGoto action_670
action_617 _ = happyReduce_219
action_618 (121) = happyShift action_15
action_618 (65) = happyGoto action_669
action_618 _ = happyFail
action_619 (69) = happyGoto action_668
action_619 _ = happyReduce_219
action_620 _ = happyReduce_138
action_621 (69) = happyGoto action_667
action_621 _ = happyReduce_219
action_622 (183) = happyShift action_666
action_622 (40) = happyGoto action_664
action_622 (69) = happyGoto action_665
action_622 _ = happyReduce_219
action_623 (121) = happyShift action_15
action_623 (122) = happyShift action_129
action_623 (123) = happyShift action_54
action_623 (125) = happyShift action_55
action_623 (49) = happyGoto action_48
action_623 (55) = happyGoto action_662
action_623 (65) = happyGoto action_50
action_623 (66) = happyGoto action_663
action_623 (67) = happyGoto action_51
action_623 (69) = happyGoto action_52
action_623 (114) = happyGoto action_53
action_623 _ = happyReduce_219
action_624 (122) = happyShift action_129
action_624 (66) = happyGoto action_661
action_624 _ = happyFail
action_625 _ = happyReduce_73
action_626 (191) = happyShift action_660
action_626 (38) = happyGoto action_659
action_626 _ = happyReduce_100
action_627 (121) = happyShift action_15
action_627 (65) = happyGoto action_658
action_627 _ = happyFail
action_628 (122) = happyShift action_129
action_628 (66) = happyGoto action_657
action_628 _ = happyFail
action_629 (122) = happyShift action_129
action_629 (66) = happyGoto action_656
action_629 _ = happyFail
action_630 _ = happyReduce_109
action_631 _ = happyReduce_110
action_632 _ = happyReduce_111
action_633 _ = happyReduce_112
action_634 _ = happyReduce_113
action_635 _ = happyReduce_266
action_636 (129) = happyShift action_21
action_636 (159) = happyShift action_654
action_636 (170) = happyShift action_23
action_636 (171) = happyShift action_24
action_636 (177) = happyShift action_655
action_636 (178) = happyShift action_27
action_636 (198) = happyShift action_30
action_636 (71) = happyGoto action_653
action_636 _ = happyFail
action_637 (121) = happyShift action_15
action_637 (16) = happyGoto action_630
action_637 (17) = happyGoto action_631
action_637 (20) = happyGoto action_632
action_637 (23) = happyGoto action_633
action_637 (24) = happyGoto action_634
action_637 (43) = happyGoto action_652
action_637 (65) = happyGoto action_636
action_637 _ = happyReduce_268
action_638 _ = happyReduce_269
action_639 (122) = happyShift action_129
action_639 (66) = happyGoto action_651
action_639 _ = happyFail
action_640 _ = happyReduce_128
action_641 _ = happyReduce_127
action_642 _ = happyReduce_126
action_643 _ = happyReduce_129
action_644 _ = happyReduce_185
action_645 _ = happyReduce_186
action_646 _ = happyReduce_132
action_647 (179) = happyShift action_650
action_647 _ = happyFail
action_648 (122) = happyShift action_129
action_648 (66) = happyGoto action_649
action_648 _ = happyFail
action_649 (121) = happyShift action_15
action_649 (31) = happyGoto action_437
action_649 (32) = happyGoto action_438
action_649 (44) = happyGoto action_439
action_649 (45) = happyGoto action_705
action_649 (65) = happyGoto action_441
action_649 _ = happyReduce_119
action_650 (121) = happyShift action_15
action_650 (4) = happyGoto action_704
action_650 (5) = happyGoto action_2
action_650 (6) = happyGoto action_3
action_650 (7) = happyGoto action_4
action_650 (16) = happyGoto action_5
action_650 (18) = happyGoto action_6
action_650 (20) = happyGoto action_7
action_650 (21) = happyGoto action_8
action_650 (23) = happyGoto action_9
action_650 (24) = happyGoto action_10
action_650 (30) = happyGoto action_11
action_650 (65) = happyGoto action_17
action_650 (76) = happyGoto action_13
action_650 (77) = happyGoto action_14
action_650 _ = happyReduce_3
action_651 (121) = happyShift action_15
action_651 (31) = happyGoto action_437
action_651 (32) = happyGoto action_438
action_651 (44) = happyGoto action_439
action_651 (45) = happyGoto action_703
action_651 (65) = happyGoto action_441
action_651 _ = happyReduce_119
action_652 _ = happyReduce_267
action_653 (121) = happyShift action_15
action_653 (16) = happyGoto action_630
action_653 (17) = happyGoto action_631
action_653 (20) = happyGoto action_632
action_653 (23) = happyGoto action_633
action_653 (24) = happyGoto action_634
action_653 (43) = happyGoto action_702
action_653 (65) = happyGoto action_636
action_653 _ = happyFail
action_654 (69) = happyGoto action_44
action_654 (114) = happyGoto action_701
action_654 _ = happyReduce_219
action_655 (69) = happyGoto action_44
action_655 (114) = happyGoto action_700
action_655 _ = happyReduce_219
action_656 _ = happyReduce_75
action_657 _ = happyReduce_90
action_658 (121) = happyShift action_15
action_658 (122) = happyReduce_265
action_658 (29) = happyGoto action_139
action_658 (49) = happyGoto action_140
action_658 (50) = happyGoto action_141
action_658 (65) = happyGoto action_142
action_658 (69) = happyGoto action_44
action_658 (96) = happyGoto action_143
action_658 (97) = happyGoto action_144
action_658 (98) = happyGoto action_699
action_658 (114) = happyGoto action_53
action_658 _ = happyReduce_219
action_659 (189) = happyShift action_698
action_659 (39) = happyGoto action_697
action_659 _ = happyReduce_102
action_660 (121) = happyShift action_15
action_660 (123) = happyShift action_54
action_660 (125) = happyShift action_55
action_660 (49) = happyGoto action_48
action_660 (55) = happyGoto action_696
action_660 (65) = happyGoto action_50
action_660 (67) = happyGoto action_51
action_660 (69) = happyGoto action_52
action_660 (114) = happyGoto action_53
action_660 _ = happyReduce_219
action_661 _ = happyReduce_74
action_662 (122) = happyShift action_129
action_662 (66) = happyGoto action_695
action_662 _ = happyFail
action_663 _ = happyReduce_81
action_664 (184) = happyShift action_694
action_664 (41) = happyGoto action_692
action_664 (69) = happyGoto action_693
action_664 _ = happyReduce_219
action_665 _ = happyReduce_104
action_666 (121) = happyShift action_15
action_666 (123) = happyShift action_54
action_666 (125) = happyShift action_55
action_666 (49) = happyGoto action_48
action_666 (55) = happyGoto action_691
action_666 (65) = happyGoto action_50
action_666 (67) = happyGoto action_51
action_666 (69) = happyGoto action_52
action_666 (114) = happyGoto action_53
action_666 _ = happyReduce_219
action_667 (121) = happyShift action_15
action_667 (123) = happyShift action_54
action_667 (125) = happyShift action_55
action_667 (49) = happyGoto action_48
action_667 (55) = happyGoto action_690
action_667 (65) = happyGoto action_50
action_667 (67) = happyGoto action_51
action_667 (69) = happyGoto action_52
action_667 (114) = happyGoto action_53
action_667 _ = happyReduce_219
action_668 (121) = happyShift action_15
action_668 (123) = happyShift action_54
action_668 (125) = happyShift action_55
action_668 (35) = happyGoto action_685
action_668 (36) = happyGoto action_686
action_668 (49) = happyGoto action_48
action_668 (55) = happyGoto action_687
action_668 (65) = happyGoto action_50
action_668 (67) = happyGoto action_51
action_668 (69) = happyGoto action_52
action_668 (108) = happyGoto action_688
action_668 (109) = happyGoto action_689
action_668 (114) = happyGoto action_53
action_668 _ = happyReduce_219
action_669 (121) = happyShift action_15
action_669 (31) = happyGoto action_437
action_669 (32) = happyGoto action_438
action_669 (44) = happyGoto action_439
action_669 (45) = happyGoto action_684
action_669 (65) = happyGoto action_441
action_669 _ = happyReduce_119
action_670 (121) = happyShift action_15
action_670 (9) = happyGoto action_683
action_670 (13) = happyGoto action_70
action_670 (65) = happyGoto action_71
action_670 (69) = happyGoto action_72
action_670 (113) = happyGoto action_73
action_670 _ = happyReduce_219
action_671 (121) = happyShift action_15
action_671 (31) = happyGoto action_437
action_671 (32) = happyGoto action_438
action_671 (44) = happyGoto action_439
action_671 (45) = happyGoto action_682
action_671 (65) = happyGoto action_441
action_671 _ = happyReduce_119
action_672 _ = happyReduce_89
action_673 _ = happyReduce_88
action_674 (128) = happyShift action_681
action_674 _ = happyFail
action_675 _ = happyReduce_57
action_676 (122) = happyShift action_129
action_676 (66) = happyGoto action_680
action_676 _ = happyFail
action_677 _ = happyReduce_24
action_678 (64) = happyGoto action_679
action_678 (69) = happyGoto action_136
action_678 _ = happyReduce_219
action_679 (119) = happyShift action_265
action_679 (166) = happyShift action_266
action_679 (61) = happyGoto action_728
action_679 _ = happyFail
action_680 (124) = happyShift action_287
action_680 (68) = happyGoto action_727
action_680 _ = happyFail
action_681 (121) = happyShift action_15
action_681 (9) = happyGoto action_726
action_681 (13) = happyGoto action_70
action_681 (65) = happyGoto action_71
action_681 (69) = happyGoto action_72
action_681 (113) = happyGoto action_73
action_681 _ = happyReduce_219
action_682 (122) = happyShift action_129
action_682 (66) = happyGoto action_725
action_682 _ = happyFail
action_683 (121) = happyShift action_15
action_683 (31) = happyGoto action_437
action_683 (32) = happyGoto action_438
action_683 (44) = happyGoto action_439
action_683 (45) = happyGoto action_724
action_683 (65) = happyGoto action_441
action_683 _ = happyReduce_119
action_684 (122) = happyShift action_129
action_684 (66) = happyGoto action_723
action_684 _ = happyFail
action_685 _ = happyReduce_281
action_686 (122) = happyShift action_129
action_686 (66) = happyGoto action_722
action_686 _ = happyFail
action_687 (122) = happyShift action_129
action_687 (192) = happyShift action_721
action_687 (66) = happyGoto action_720
action_687 _ = happyFail
action_688 (121) = happyShift action_15
action_688 (122) = happyReduce_283
action_688 (123) = happyShift action_54
action_688 (125) = happyShift action_55
action_688 (35) = happyGoto action_718
action_688 (49) = happyGoto action_48
action_688 (55) = happyGoto action_719
action_688 (65) = happyGoto action_50
action_688 (67) = happyGoto action_51
action_688 (69) = happyGoto action_52
action_688 (114) = happyGoto action_53
action_688 _ = happyReduce_219
action_689 _ = happyReduce_96
action_690 (122) = happyShift action_129
action_690 (66) = happyGoto action_717
action_690 _ = happyFail
action_691 _ = happyReduce_103
action_692 (122) = happyShift action_129
action_692 (66) = happyGoto action_716
action_692 _ = happyFail
action_693 _ = happyReduce_106
action_694 (121) = happyShift action_15
action_694 (123) = happyShift action_54
action_694 (125) = happyShift action_55
action_694 (49) = happyGoto action_48
action_694 (55) = happyGoto action_715
action_694 (65) = happyGoto action_50
action_694 (67) = happyGoto action_51
action_694 (69) = happyGoto action_52
action_694 (114) = happyGoto action_53
action_694 _ = happyReduce_219
action_695 _ = happyReduce_82
action_696 _ = happyReduce_99
action_697 (122) = happyShift action_129
action_697 (66) = happyGoto action_714
action_697 _ = happyFail
action_698 (121) = happyShift action_15
action_698 (123) = happyShift action_54
action_698 (125) = happyShift action_55
action_698 (49) = happyGoto action_48
action_698 (55) = happyGoto action_713
action_698 (65) = happyGoto action_50
action_698 (67) = happyGoto action_51
action_698 (69) = happyGoto action_52
action_698 (114) = happyGoto action_53
action_698 _ = happyReduce_219
action_699 (122) = happyShift action_129
action_699 (66) = happyGoto action_712
action_699 _ = happyFail
action_700 (121) = happyShift action_15
action_700 (9) = happyGoto action_711
action_700 (13) = happyGoto action_70
action_700 (65) = happyGoto action_71
action_700 (69) = happyGoto action_72
action_700 (113) = happyGoto action_73
action_700 _ = happyReduce_219
action_701 (121) = happyShift action_15
action_701 (9) = happyGoto action_710
action_701 (13) = happyGoto action_70
action_701 (65) = happyGoto action_71
action_701 (69) = happyGoto action_72
action_701 (113) = happyGoto action_73
action_701 _ = happyReduce_219
action_702 (72) = happyGoto action_709
action_702 _ = happyReduce_222
action_703 (122) = happyShift action_129
action_703 (66) = happyGoto action_708
action_703 _ = happyFail
action_704 (122) = happyShift action_129
action_704 (66) = happyGoto action_707
action_704 _ = happyFail
action_705 (122) = happyShift action_129
action_705 (66) = happyGoto action_706
action_705 _ = happyFail
action_706 (122) = happyShift action_129
action_706 (66) = happyGoto action_737
action_706 _ = happyFail
action_707 _ = happyReduce_17
action_708 _ = happyReduce_72
action_709 (122) = happyShift action_129
action_709 (66) = happyGoto action_736
action_709 _ = happyFail
action_710 (122) = happyShift action_129
action_710 (66) = happyGoto action_735
action_710 _ = happyFail
action_711 (19) = happyGoto action_734
action_711 (69) = happyGoto action_242
action_711 _ = happyReduce_219
action_712 _ = happyReduce_97
action_713 _ = happyReduce_101
action_714 _ = happyReduce_83
action_715 _ = happyReduce_105
action_716 _ = happyReduce_80
action_717 _ = happyReduce_77
action_718 _ = happyReduce_282
action_719 (192) = happyShift action_721
action_719 _ = happyFail
action_720 _ = happyReduce_78
action_721 (121) = happyShift action_15
action_721 (123) = happyShift action_54
action_721 (125) = happyShift action_55
action_721 (49) = happyGoto action_48
action_721 (55) = happyGoto action_733
action_721 (65) = happyGoto action_50
action_721 (67) = happyGoto action_51
action_721 (69) = happyGoto action_52
action_721 (114) = happyGoto action_53
action_721 _ = happyReduce_219
action_722 _ = happyReduce_79
action_723 (121) = happyShift action_15
action_723 (65) = happyGoto action_732
action_723 _ = happyFail
action_724 (122) = happyShift action_129
action_724 (66) = happyGoto action_731
action_724 _ = happyFail
action_725 _ = happyReduce_87
action_726 (122) = happyShift action_129
action_726 (66) = happyGoto action_730
action_726 _ = happyFail
action_727 _ = happyReduce_41
action_728 (126) = happyShift action_729
action_728 _ = happyFail
action_729 (122) = happyShift action_129
action_729 (66) = happyGoto action_745
action_729 _ = happyFail
action_730 (121) = happyShift action_15
action_730 (42) = happyGoto action_740
action_730 (65) = happyGoto action_741
action_730 (102) = happyGoto action_742
action_730 (103) = happyGoto action_743
action_730 (104) = happyGoto action_744
action_730 _ = happyReduce_275
action_731 _ = happyReduce_86
action_732 (121) = happyShift action_15
action_732 (31) = happyGoto action_437
action_732 (32) = happyGoto action_438
action_732 (44) = happyGoto action_439
action_732 (45) = happyGoto action_739
action_732 (65) = happyGoto action_441
action_732 _ = happyReduce_119
action_733 _ = happyReduce_95
action_734 (122) = happyShift action_129
action_734 (66) = happyGoto action_738
action_734 _ = happyFail
action_735 _ = happyReduce_114
action_736 _ = happyReduce_115
action_737 _ = happyReduce_69
action_738 _ = happyReduce_43
action_739 (122) = happyShift action_129
action_739 (66) = happyGoto action_749
action_739 _ = happyFail
action_740 _ = happyReduce_271
action_741 (121) = happyShift action_15
action_741 (65) = happyGoto action_748
action_741 _ = happyFail
action_742 (121) = happyShift action_15
action_742 (42) = happyGoto action_747
action_742 (65) = happyGoto action_741
action_742 _ = happyReduce_273
action_743 _ = happyReduce_274
action_744 (122) = happyShift action_129
action_744 (66) = happyGoto action_746
action_744 _ = happyFail
action_745 _ = happyReduce_30
action_746 _ = happyReduce_85
action_747 _ = happyReduce_272
action_748 (69) = happyGoto action_751
action_748 _ = happyReduce_219
action_749 (122) = happyShift action_129
action_749 (66) = happyGoto action_750
action_749 _ = happyFail
action_750 _ = happyReduce_76
action_751 (122) = happyReduce_200
action_751 (130) = happyShift action_753
action_751 (56) = happyGoto action_752
action_751 (57) = happyGoto action_175
action_751 (69) = happyGoto action_176
action_751 _ = happyReduce_219
action_752 (122) = happyShift action_129
action_752 (66) = happyGoto action_755
action_752 _ = happyFail
action_753 (122) = happyShift action_129
action_753 (66) = happyGoto action_754
action_753 _ = happyFail
action_754 (121) = happyShift action_15
action_754 (31) = happyGoto action_437
action_754 (32) = happyGoto action_438
action_754 (44) = happyGoto action_439
action_754 (45) = happyGoto action_757
action_754 (65) = happyGoto action_441
action_754 _ = happyReduce_119
action_755 (121) = happyShift action_15
action_755 (31) = happyGoto action_437
action_755 (32) = happyGoto action_438
action_755 (44) = happyGoto action_439
action_755 (45) = happyGoto action_756
action_755 (65) = happyGoto action_441
action_755 _ = happyReduce_119
action_756 (122) = happyShift action_129
action_756 (66) = happyGoto action_759
action_756 _ = happyFail
action_757 (122) = happyShift action_129
action_757 (66) = happyGoto action_758
action_757 _ = happyFail
action_758 _ = happyReduce_107
action_759 _ = happyReduce_108
happyReduce_1 = happySpecReduce_2 4 happyReduction_1
happyReduction_1 (HappyAbsSyn4 happy_var_2)
(HappyAbsSyn5 happy_var_1)
= HappyAbsSyn4
(happy_var_1 : happy_var_2
)
happyReduction_1 _ _ = notHappyAtAll
happyReduce_2 = happyReduce 5 4 happyReduction_2
happyReduction_2 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn4 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn4
(happy_var_3
) `HappyStk` happyRest
happyReduce_3 = happySpecReduce_0 4 happyReduction_3
happyReduction_3 = HappyAbsSyn4
([]
)
happyReduce_4 = happySpecReduce_1 5 happyReduction_4
happyReduction_4 (HappyAbsSyn7 happy_var_1)
= HappyAbsSyn5
(IRTType happy_var_1
)
happyReduction_4 _ = notHappyAtAll
happyReduce_5 = happySpecReduce_1 5 happyReduction_5
happyReduction_5 (HappyAbsSyn16 happy_var_1)
= HappyAbsSyn5
(IRTConstant happy_var_1
)
happyReduction_5 _ = notHappyAtAll
happyReduce_6 = happySpecReduce_1 5 happyReduction_6
happyReduction_6 (HappyAbsSyn18 happy_var_1)
= HappyAbsSyn5
(IRTSignal happy_var_1
)
happyReduction_6 _ = notHappyAtAll
happyReduce_7 = happySpecReduce_1 5 happyReduction_7
happyReduction_7 (HappyAbsSyn20 happy_var_1)
= HappyAbsSyn5
(IRTAlias happy_var_1
)
happyReduction_7 _ = notHappyAtAll
happyReduce_8 = happySpecReduce_1 5 happyReduction_8
happyReduction_8 (HappyAbsSyn21 happy_var_1)
= HappyAbsSyn5
(IRTPort happy_var_1
)
happyReduction_8 _ = notHappyAtAll
happyReduce_9 = happySpecReduce_1 5 happyReduction_9
happyReduction_9 (HappyAbsSyn23 happy_var_1)
= HappyAbsSyn5
(IRTFunction happy_var_1
)
happyReduction_9 _ = notHappyAtAll
happyReduce_10 = happySpecReduce_1 5 happyReduction_10
happyReduction_10 (HappyAbsSyn24 happy_var_1)
= HappyAbsSyn5
(IRTProcedure happy_var_1
)
happyReduction_10 _ = notHappyAtAll
happyReduce_11 = happySpecReduce_1 5 happyReduction_11
happyReduction_11 (HappyAbsSyn6 happy_var_1)
= HappyAbsSyn5
(IRTGenerate happy_var_1
)
happyReduction_11 _ = notHappyAtAll
happyReduce_12 = happySpecReduce_1 5 happyReduction_12
happyReduction_12 (HappyAbsSyn30 happy_var_1)
= HappyAbsSyn5
(IRTProcess happy_var_1
)
happyReduction_12 _ = notHappyAtAll
happyReduce_13 = happySpecReduce_1 5 happyReduction_13
happyReduction_13 (HappyAbsSyn76 happy_var_1)
= HappyAbsSyn5
(IRTMM happy_var_1
)
happyReduction_13 _ = notHappyAtAll
happyReduce_14 = happySpecReduce_1 5 happyReduction_14
happyReduction_14 _
= HappyAbsSyn5
(IRTCorresp ([],[])
)
happyReduce_15 = happyReduce 5 5 happyReduction_15
happyReduction_15 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn5 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn5
(happy_var_3
) `HappyStk` happyRest
happyReduce_16 = happyReduce 6 6 happyReduction_16
happyReduction_16 (_ `HappyStk`
(HappyAbsSyn4 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn40 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn6
(IRGenIf happy_var_3 happy_var_5
) `HappyStk` happyRest
happyReduce_17 = happyReduce 13 6 happyReduction_17
happyReduction_17 (_ `HappyStk`
(HappyAbsSyn4 happy_var_12) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyTerminal happy_var_9) `HappyStk`
_ `HappyStk`
(HappyTerminal happy_var_7) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn114 happy_var_4) `HappyStk`
(HappyAbsSyn114 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn6
(let f (L.Integer v7 _) (L.Integer v9 _) = IRGenFor (show happy_var_3) happy_var_4 v7 v9 happy_var_12
in f happy_var_7 happy_var_9
) `HappyStk` happyRest
happyReduce_18 = happyReduce 5 7 happyReduction_18
happyReduction_18 (_ `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn114 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn7
(IRType happy_var_3 happy_var_4
) `HappyStk` happyRest
happyReduce_19 = happySpecReduce_1 8 happyReduction_19
happyReduction_19 (HappyTerminal (L.EnumIdent happy_var_1))
= HappyAbsSyn8
(happy_var_1
)
happyReduction_19 _ = notHappyAtAll
happyReduce_20 = happySpecReduce_1 8 happyReduction_20
happyReduction_20 (HappyAbsSyn59 happy_var_1)
= HappyAbsSyn8
(EnumId happy_var_1
)
happyReduction_20 _ = notHappyAtAll
happyReduce_21 = happySpecReduce_1 9 happyReduction_21
happyReduction_21 (HappyAbsSyn112 happy_var_1)
= HappyAbsSyn9
(ITDName happy_var_1
)
happyReduction_21 _ = notHappyAtAll
happyReduce_22 = happySpecReduce_1 9 happyReduction_22
happyReduction_22 (HappyAbsSyn13 happy_var_1)
= HappyAbsSyn9
(ITDRangeDescr happy_var_1
)
happyReduction_22 _ = notHappyAtAll
happyReduce_23 = happyReduce 4 9 happyReduction_23
happyReduction_23 (_ `HappyStk`
(HappyAbsSyn78 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn9
(ITDEnum happy_var_3
) `HappyStk` happyRest
happyReduce_24 = happyReduce 8 9 happyReduction_24
happyReduction_24 (_ `HappyStk`
(HappyAbsSyn9 happy_var_7) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn90 happy_var_4) `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn9
(ITDArray happy_var_4 happy_var_7
) `HappyStk` happyRest
happyReduce_25 = happyReduce 6 9 happyReduction_25
happyReduction_25 (_ `HappyStk`
(HappyAbsSyn81 happy_var_5) `HappyStk`
(HappyAbsSyn59 happy_var_4) `HappyStk`
(HappyAbsSyn13 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn9
(ITDPhysical happy_var_3 happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_26 = happyReduce 4 9 happyReduction_26
happyReduction_26 (_ `HappyStk`
(HappyAbsSyn84 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn9
(ITDRecord happy_var_3
) `HappyStk` happyRest
happyReduce_27 = happyReduce 4 9 happyReduction_27
happyReduction_27 (_ `HappyStk`
(HappyAbsSyn9 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn9
(ITDAccess happy_var_3
) `HappyStk` happyRest
happyReduce_28 = happyReduce 6 9 happyReduction_28
happyReduction_28 (_ `HappyStk`
(HappyAbsSyn9 happy_var_5) `HappyStk`
(HappyAbsSyn59 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn9
(ITDResolved happy_var_3 happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_29 = happyReduce 4 9 happyReduction_29
happyReduction_29 (_ `HappyStk`
(HappyAbsSyn87 happy_var_3) `HappyStk`
(HappyAbsSyn112 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn9
(ITDConstraint (withLocLoc happy_var_2) (ITDName happy_var_2) happy_var_3
) `HappyStk` happyRest
happyReduce_30 = happyReduce 8 10 happyReduction_30
happyReduction_30 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn59 happy_var_6) `HappyStk`
(HappyAbsSyn64 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn59 happy_var_3) `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn10
(UnitDecl happy_var_2 happy_var_3 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_31 = happyReduce 5 11 happyReduction_31
happyReduction_31 (_ `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn59 happy_var_3) `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn11
((happy_var_2, happy_var_3, happy_var_4)
) `HappyStk` happyRest
happyReduce_32 = happySpecReduce_1 12 happyReduction_32
happyReduction_32 _
= HappyAbsSyn12
(True
)
happyReduce_33 = happySpecReduce_1 12 happyReduction_33
happyReduction_33 _
= HappyAbsSyn12
(False
)
happyReduce_34 = happyReduce 7 13 happyReduction_34
happyReduction_34 (_ `HappyStk`
(HappyAbsSyn40 happy_var_6) `HappyStk`
(HappyAbsSyn12 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn13
(IRDRange happy_var_2 happy_var_4 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_35 = happyReduce 6 13 happyReduction_35
happyReduction_35 (_ `HappyStk`
(HappyAbsSyn51 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn13
(IRDARange happy_var_3 happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_36 = happyReduce 6 13 happyReduction_36
happyReduction_36 (_ `HappyStk`
(HappyAbsSyn51 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn13
(IRDAReverseRange happy_var_3 happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_37 = happySpecReduce_1 14 happyReduction_37
happyReduction_37 (HappyAbsSyn13 happy_var_1)
= HappyAbsSyn14
(IRARDRange happy_var_1
)
happyReduction_37 _ = notHappyAtAll
happyReduce_38 = happySpecReduce_1 14 happyReduction_38
happyReduction_38 (HappyAbsSyn112 happy_var_1)
= HappyAbsSyn14
(IRARDTypeMark (withLocLoc happy_var_1) (ITDName happy_var_1)
)
happyReduction_38 _ = notHappyAtAll
happyReduce_39 = happyReduce 4 14 happyReduction_39
happyReduction_39 (_ `HappyStk`
(HappyAbsSyn13 happy_var_3) `HappyStk`
(HappyAbsSyn112 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn14
(IRARDConstrained (withLocLoc happy_var_2) (ITDName happy_var_2) happy_var_3
) `HappyStk` happyRest
happyReduce_40 = happySpecReduce_1 15 happyReduction_40
happyReduction_40 (HappyAbsSyn14 happy_var_1)
= HappyAbsSyn15
(Constrained False happy_var_1
)
happyReduction_40 _ = notHappyAtAll
happyReduce_41 = happyReduce 7 15 happyReduction_41
happyReduction_41 (_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn14 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn15
(Unconstrained happy_var_2
) `HappyStk` happyRest
happyReduce_42 = happyReduce 7 16 happyReduction_42
happyReduction_42 (_ `HappyStk`
(HappyAbsSyn40 happy_var_6) `HappyStk`
(HappyAbsSyn69 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn114 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn16
(IRConstant happy_var_3 happy_var_4 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_43 = happyReduce 6 17 happyReduction_43
happyReduction_43 (_ `HappyStk`
(HappyAbsSyn19 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn114 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn17
(IRVariable happy_var_3 happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_44 = happyReduce 6 18 happyReduction_44
happyReduction_44 (_ `HappyStk`
(HappyAbsSyn19 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn114 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn18
(IRSignal happy_var_3 happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_45 = happySpecReduce_2 19 happyReduction_45
happyReduction_45 (HappyAbsSyn40 happy_var_2)
(HappyAbsSyn69 happy_var_1)
= HappyAbsSyn19
(IOEJustExpr happy_var_1 happy_var_2
)
happyReduction_45 _ _ = notHappyAtAll
happyReduce_46 = happySpecReduce_1 19 happyReduction_46
happyReduction_46 (HappyAbsSyn69 happy_var_1)
= HappyAbsSyn19
(IOENothing happy_var_1
)
happyReduction_46 _ = notHappyAtAll
happyReduce_47 = happyReduce 7 20 happyReduction_47
happyReduction_47 (_ `HappyStk`
(HappyAbsSyn51 happy_var_6) `HappyStk`
(HappyAbsSyn69 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn114 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn20
(IRAlias happy_var_3 happy_var_4 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_48 = happyReduce 6 21 happyReduction_48
happyReduction_48 (_ `HappyStk`
(HappyAbsSyn19 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn114 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn21
(IRPort happy_var_3 happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_49 = happyReduce 8 21 happyReduction_49
happyReduction_49 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_6) `HappyStk`
(HappyAbsSyn9 happy_var_5) `HappyStk`
(HappyAbsSyn114 happy_var_4) `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn21
(IRPort happy_var_4 happy_var_5 (IOENothing happy_var_6)
) `HappyStk` happyRest
happyReduce_50 = happyReduce 8 21 happyReduction_50
happyReduction_50 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_6) `HappyStk`
(HappyAbsSyn9 happy_var_5) `HappyStk`
(HappyAbsSyn114 happy_var_4) `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn21
(IRPort happy_var_4 happy_var_5 (IOENothing happy_var_6)
) `HappyStk` happyRest
happyReduce_51 = happySpecReduce_1 22 happyReduction_51
happyReduction_51 (HappyAbsSyn69 happy_var_1)
= HappyAbsSyn19
(IOENothing happy_var_1
)
happyReduction_51 _ = notHappyAtAll
happyReduce_52 = happyReduce 5 22 happyReduction_52
happyReduction_52 (_ `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn19
(IOEJustExpr happy_var_3 happy_var_4
) `HappyStk` happyRest
happyReduce_53 = happyReduce 5 22 happyReduction_53
happyReduction_53 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn19 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn19
(happy_var_3
) `HappyStk` happyRest
happyReduce_54 = happyReduce 9 23 happyReduction_54
happyReduction_54 (_ `HappyStk`
(HappyAbsSyn31 happy_var_8) `HappyStk`
(HappyAbsSyn9 happy_var_7) `HappyStk`
_ `HappyStk`
(HappyAbsSyn25 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn114 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn23
(IRFunction happy_var_3 happy_var_5 happy_var_7 happy_var_8
) `HappyStk` happyRest
happyReduce_55 = happyReduce 8 24 happyReduction_55
happyReduction_55 (_ `HappyStk`
(HappyAbsSyn31 happy_var_7) `HappyStk`
_ `HappyStk`
(HappyAbsSyn25 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn114 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn24
(IRProcedure happy_var_3 happy_var_5 happy_var_7
) `HappyStk` happyRest
happyReduce_56 = happySpecReduce_1 25 happyReduction_56
happyReduction_56 (HappyAbsSyn25 happy_var_1)
= HappyAbsSyn25
(happy_var_1
)
happyReduction_56 _ = notHappyAtAll
happyReduce_57 = happyReduce 7 26 happyReduction_57
happyReduction_57 (_ `HappyStk`
(HappyAbsSyn27 happy_var_6) `HappyStk`
(HappyAbsSyn28 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn112 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn26
(IRArg happy_var_2 happy_var_4 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_58 = happyReduce 5 26 happyReduction_58
happyReduction_58 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn26 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn26
(happy_var_3
) `HappyStk` happyRest
happyReduce_59 = happySpecReduce_1 27 happyReduction_59
happyReduction_59 _
= HappyAbsSyn27
(AMIn
)
happyReduce_60 = happySpecReduce_1 27 happyReduction_60
happyReduction_60 _
= HappyAbsSyn27
(AMOut
)
happyReduce_61 = happySpecReduce_1 27 happyReduction_61
happyReduction_61 _
= HappyAbsSyn27
(AMInout
)
happyReduce_62 = happySpecReduce_0 27 happyReduction_62
happyReduction_62 = HappyAbsSyn27
(AMIn
)
happyReduce_63 = happySpecReduce_1 28 happyReduction_63
happyReduction_63 _
= HappyAbsSyn28
(NIKConstant
)
happyReduce_64 = happySpecReduce_1 28 happyReduction_64
happyReduction_64 _
= HappyAbsSyn28
(NIKVariable
)
happyReduce_65 = happySpecReduce_1 28 happyReduction_65
happyReduction_65 _
= HappyAbsSyn28
(NIKSignal
)
happyReduce_66 = happySpecReduce_1 28 happyReduction_66
happyReduction_66 _
= HappyAbsSyn28
(NIKFile
)
happyReduce_67 = happySpecReduce_0 28 happyReduction_67
happyReduction_67 = HappyAbsSyn28
(NIKVariable
)
happyReduce_68 = happySpecReduce_1 29 happyReduction_68
happyReduction_68 (HappyAbsSyn29 happy_var_1)
= HappyAbsSyn29
(happy_var_1
)
happyReduction_68 _ = notHappyAtAll
happyReduce_69 = happyReduce 14 30 happyReduction_69
happyReduction_69 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn31 happy_var_12) `HappyStk`
_ `HappyStk`
(HappyAbsSyn99 happy_var_10) `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn37 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn114 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn30
(IRProcess happy_var_3 happy_var_5 happy_var_10 happy_var_12
) `HappyStk` happyRest
happyReduce_70 = happyReduce 8 30 happyReduction_70
happyReduction_70 (_ `HappyStk`
(HappyAbsSyn31 happy_var_7) `HappyStk`
_ `HappyStk`
(HappyAbsSyn37 happy_var_5) `HappyStk`
_ `HappyStk`
(HappyAbsSyn114 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn30
(IRProcess happy_var_3 happy_var_5 [] happy_var_7
) `HappyStk` happyRest
happyReduce_71 = happySpecReduce_1 31 happyReduction_71
happyReduction_71 (HappyAbsSyn31 happy_var_1)
= HappyAbsSyn31
(happy_var_1
)
happyReduction_71 _ = notHappyAtAll
happyReduce_72 = happyReduce 7 31 happyReduction_72
happyReduction_72 (_ `HappyStk`
(HappyAbsSyn31 happy_var_6) `HappyStk`
_ `HappyStk`
(HappyAbsSyn99 happy_var_4) `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISLet happy_var_4 happy_var_6
) `HappyStk` happyRest
happyReduce_73 = happyReduce 4 32 happyReduction_73
happyReduction_73 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISReturn happy_var_2
) `HappyStk` happyRest
happyReduce_74 = happyReduce 5 32 happyReduction_74
happyReduction_74 (_ `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISReturnExpr happy_var_2 happy_var_4
) `HappyStk` happyRest
happyReduce_75 = happyReduce 5 32 happyReduction_75
happyReduction_75 (_ `HappyStk`
(HappyAbsSyn69 happy_var_4) `HappyStk`
(HappyAbsSyn56 happy_var_3) `HappyStk`
(HappyAbsSyn112 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISProcCall happy_var_2 happy_var_3 happy_var_4
) `HappyStk` happyRest
happyReduce_76 = happyReduce 11 32 happyReduction_76
happyReduction_76 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn31 happy_var_9) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn31 happy_var_6) `HappyStk`
_ `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISIf happy_var_2 happy_var_4 happy_var_6 happy_var_9
) `HappyStk` happyRest
happyReduce_77 = happyReduce 7 32 happyReduction_77
happyReduction_77 (_ `HappyStk`
(HappyAbsSyn40 happy_var_6) `HappyStk`
(HappyAbsSyn69 happy_var_5) `HappyStk`
(HappyAbsSyn51 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISAssign happy_var_2 happy_var_4 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_78 = happyReduce 7 32 happyReduction_78
happyReduction_78 (_ `HappyStk`
(HappyAbsSyn40 happy_var_6) `HappyStk`
(HappyAbsSyn69 happy_var_5) `HappyStk`
(HappyAbsSyn51 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISSignalAssign happy_var_2 happy_var_4 happy_var_5
[IRAfter happy_var_6 (IEPhysical (WithLoc happy_var_5 0) (WithLoc happy_var_5 (fsLit "sec")))]
) `HappyStk` happyRest
happyReduce_79 = happyReduce 7 32 happyReduction_79
happyReduction_79 (_ `HappyStk`
(HappyAbsSyn36 happy_var_6) `HappyStk`
(HappyAbsSyn69 happy_var_5) `HappyStk`
(HappyAbsSyn51 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISSignalAssign happy_var_2 happy_var_4 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_80 = happyReduce 7 32 happyReduction_80
happyReduction_80 (_ `HappyStk`
(HappyAbsSyn40 happy_var_6) `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISAssert happy_var_2 happy_var_4 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_81 = happyReduce 5 32 happyReduction_81
happyReduction_81 (_ `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISReport happy_var_2 happy_var_4 $ IEEnumIdent happy_var_2 (EnumId $ fsLit $ "NOTE")
) `HappyStk` happyRest
happyReduce_82 = happyReduce 6 32 happyReduction_82
happyReduction_82 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISReport happy_var_2 happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_83 = happyReduce 7 32 happyReduction_83
happyReduction_83 (_ `HappyStk`
(HappyAbsSyn38 happy_var_6) `HappyStk`
(HappyAbsSyn38 happy_var_5) `HappyStk`
(HappyAbsSyn37 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISWait happy_var_2 happy_var_4 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_84 = happyReduce 4 32 happyReduction_84
happyReduction_84 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISNop happy_var_2
) `HappyStk` happyRest
happyReduce_85 = happyReduce 10 32 happyReduction_85
happyReduction_85 (_ `HappyStk`
(HappyAbsSyn102 happy_var_9) `HappyStk`
_ `HappyStk`
(HappyAbsSyn9 happy_var_7) `HappyStk`
_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISCase happy_var_2 happy_var_5 happy_var_7 happy_var_9
) `HappyStk` happyRest
happyReduce_86 = happyReduce 8 32 happyReduction_86
happyReduction_86 (_ `HappyStk`
(HappyAbsSyn31 happy_var_7) `HappyStk`
(HappyAbsSyn9 happy_var_6) `HappyStk`
(HappyAbsSyn112 happy_var_5) `HappyStk`
(HappyAbsSyn33 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISFor happy_var_4 happy_var_2 happy_var_5 happy_var_6 happy_var_7
) `HappyStk` happyRest
happyReduce_87 = happyReduce 7 32 happyReduction_87
happyReduction_87 (_ `HappyStk`
(HappyAbsSyn31 happy_var_6) `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn33 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISWhile happy_var_4 happy_var_2 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_88 = happyReduce 5 32 happyReduction_88
happyReduction_88 (_ `HappyStk`
(HappyAbsSyn33 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISExit happy_var_2 happy_var_4
) `HappyStk` happyRest
happyReduce_89 = happyReduce 5 32 happyReduction_89
happyReduction_89 (_ `HappyStk`
(HappyAbsSyn33 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(ISNext happy_var_2 happy_var_4
) `HappyStk` happyRest
happyReduce_90 = happyReduce 5 32 happyReduction_90
happyReduction_90 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn31 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn31
(happy_var_3
) `HappyStk` happyRest
happyReduce_91 = happySpecReduce_0 33 happyReduction_91
happyReduction_91 = HappyAbsSyn33
(fsLit ""
)
happyReduce_92 = happySpecReduce_1 33 happyReduction_92
happyReduction_92 (HappyTerminal (L.Label happy_var_1))
= HappyAbsSyn33
(bsToFs happy_var_1
)
happyReduction_92 _ = notHappyAtAll
happyReduce_93 = happySpecReduce_0 34 happyReduction_93
happyReduction_93 = HappyAbsSyn33
(fsLit ""
)
happyReduce_94 = happySpecReduce_1 34 happyReduction_94
happyReduction_94 (HappyAbsSyn59 happy_var_1)
= HappyAbsSyn33
(happy_var_1
)
happyReduction_94 _ = notHappyAtAll
happyReduce_95 = happySpecReduce_3 35 happyReduction_95
happyReduction_95 (HappyAbsSyn40 happy_var_3)
_
(HappyAbsSyn40 happy_var_1)
= HappyAbsSyn35
(IRAfter happy_var_1 happy_var_3
)
happyReduction_95 _ _ _ = notHappyAtAll
happyReduce_96 = happySpecReduce_1 36 happyReduction_96
happyReduction_96 (HappyAbsSyn36 happy_var_1)
= HappyAbsSyn36
(happy_var_1
)
happyReduction_96 _ = notHappyAtAll
happyReduce_97 = happyReduce 4 37 happyReduction_97
happyReduction_97 (_ `HappyStk`
(HappyAbsSyn37 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn37
(happy_var_3
) `HappyStk` happyRest
happyReduce_98 = happySpecReduce_0 37 happyReduction_98
happyReduction_98 = HappyAbsSyn37
([]
)
happyReduce_99 = happySpecReduce_2 38 happyReduction_99
happyReduction_99 (HappyAbsSyn40 happy_var_2)
_
= HappyAbsSyn38
(Just happy_var_2
)
happyReduction_99 _ _ = notHappyAtAll
happyReduce_100 = happySpecReduce_0 38 happyReduction_100
happyReduction_100 = HappyAbsSyn38
(Nothing
)
happyReduce_101 = happySpecReduce_2 39 happyReduction_101
happyReduction_101 (HappyAbsSyn40 happy_var_2)
_
= HappyAbsSyn38
(Just happy_var_2
)
happyReduction_101 _ _ = notHappyAtAll
happyReduce_102 = happySpecReduce_0 39 happyReduction_102
happyReduction_102 = HappyAbsSyn38
(Nothing
)
happyReduce_103 = happySpecReduce_2 40 happyReduction_103
happyReduction_103 (HappyAbsSyn40 happy_var_2)
_
= HappyAbsSyn40
(happy_var_2
)
happyReduction_103 _ _ = notHappyAtAll
happyReduce_104 = happySpecReduce_1 40 happyReduction_104
happyReduction_104 (HappyAbsSyn69 happy_var_1)
= HappyAbsSyn40
(IEString happy_var_1 (B.pack "Assertion violation")
)
happyReduction_104 _ = notHappyAtAll
happyReduce_105 = happySpecReduce_2 41 happyReduction_105
happyReduction_105 (HappyAbsSyn40 happy_var_2)
_
= HappyAbsSyn40
(happy_var_2
)
happyReduction_105 _ _ = notHappyAtAll
happyReduce_106 = happySpecReduce_1 41 happyReduction_106
happyReduction_106 (HappyAbsSyn69 happy_var_1)
= HappyAbsSyn40
(IEEnumIdent happy_var_1 (EnumId $ fsLit $ "ERROR")
)
happyReduction_106 _ = notHappyAtAll
happyReduce_107 = happyReduce 7 42 happyReduction_107
happyReduction_107 (_ `HappyStk`
(HappyAbsSyn31 happy_var_6) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn42
(ICEOthers happy_var_3 happy_var_6
) `HappyStk` happyRest
happyReduce_108 = happyReduce 7 42 happyReduction_108
happyReduction_108 (_ `HappyStk`
(HappyAbsSyn31 happy_var_6) `HappyStk`
_ `HappyStk`
(HappyAbsSyn56 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn42
(ICEExpr happy_var_3 happy_var_4 happy_var_6
) `HappyStk` happyRest
happyReduce_109 = happySpecReduce_1 43 happyReduction_109
happyReduction_109 (HappyAbsSyn16 happy_var_1)
= HappyAbsSyn43
(ILDConstant happy_var_1
)
happyReduction_109 _ = notHappyAtAll
happyReduce_110 = happySpecReduce_1 43 happyReduction_110
happyReduction_110 (HappyAbsSyn17 happy_var_1)
= HappyAbsSyn43
(ILDVariable happy_var_1
)
happyReduction_110 _ = notHappyAtAll
happyReduce_111 = happySpecReduce_1 43 happyReduction_111
happyReduction_111 (HappyAbsSyn20 happy_var_1)
= HappyAbsSyn43
(ILDAlias happy_var_1
)
happyReduction_111 _ = notHappyAtAll
happyReduce_112 = happySpecReduce_1 43 happyReduction_112
happyReduction_112 (HappyAbsSyn23 happy_var_1)
= HappyAbsSyn43
(ILDFunction happy_var_1
)
happyReduction_112 _ = notHappyAtAll
happyReduce_113 = happySpecReduce_1 43 happyReduction_113
happyReduction_113 (HappyAbsSyn24 happy_var_1)
= HappyAbsSyn43
(ILDProcedure happy_var_1
)
happyReduction_113 _ = notHappyAtAll
happyReduce_114 = happyReduce 5 43 happyReduction_114
happyReduction_114 (_ `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn114 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn43
(ILDType happy_var_3 happy_var_4
) `HappyStk` happyRest
happyReduce_115 = happyReduce 5 43 happyReduction_115
happyReduction_115 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn43 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn43
(happy_var_3
) `HappyStk` happyRest
happyReduce_116 = happySpecReduce_1 44 happyReduction_116
happyReduction_116 (HappyAbsSyn31 happy_var_1)
= HappyAbsSyn31
(happy_var_1
)
happyReduction_116 _ = notHappyAtAll
happyReduce_117 = happySpecReduce_2 44 happyReduction_117
happyReduction_117 (HappyAbsSyn31 happy_var_2)
(HappyAbsSyn31 happy_var_1)
= HappyAbsSyn31
(ISSeq happy_var_1 happy_var_2
)
happyReduction_117 _ _ = notHappyAtAll
happyReduce_118 = happySpecReduce_1 45 happyReduction_118
happyReduction_118 (HappyAbsSyn31 happy_var_1)
= HappyAbsSyn31
(happy_var_1
)
happyReduction_118 _ = notHappyAtAll
happyReduce_119 = happySpecReduce_0 45 happyReduction_119
happyReduction_119 = HappyAbsSyn31
(ISNil
)
happyReduce_120 = happySpecReduce_1 46 happyReduction_120
happyReduction_120 (HappyAbsSyn31 happy_var_1)
= HappyAbsSyn31
(happy_var_1
)
happyReduction_120 _ = notHappyAtAll
happyReduce_121 = happySpecReduce_2 46 happyReduction_121
happyReduction_121 (HappyAbsSyn31 happy_var_2)
(HappyAbsSyn31 happy_var_1)
= HappyAbsSyn31
(ISSeq happy_var_1 happy_var_2
)
happyReduction_121 _ _ = notHappyAtAll
happyReduce_122 = happySpecReduce_1 47 happyReduction_122
happyReduction_122 (HappyAbsSyn31 happy_var_1)
= HappyAbsSyn31
(happy_var_1
)
happyReduction_122 _ = notHappyAtAll
happyReduce_123 = happySpecReduce_0 47 happyReduction_123
happyReduction_123 = HappyAbsSyn31
(ISNil
)
happyReduce_124 = happySpecReduce_2 48 happyReduction_124
happyReduction_124 (HappyAbsSyn69 happy_var_2)
(HappyAbsSyn40 happy_var_1)
= HappyAbsSyn48
(IEAExpr happy_var_2 happy_var_1
)
happyReduction_124 _ _ = notHappyAtAll
happyReduce_125 = happyReduce 5 48 happyReduction_125
happyReduction_125 (_ `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn48
(IEAOthers happy_var_2 happy_var_4
) `HappyStk` happyRest
happyReduce_126 = happyReduce 6 48 happyReduction_126
happyReduction_126 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn14 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn48
(IEAType happy_var_2 happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_127 = happyReduce 6 48 happyReduction_127
happyReduction_127 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn59 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn48
(IEAField happy_var_2 happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_128 = happyReduce 6 48 happyReduction_128
happyReduction_128 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn48
(IEAExprIndex happy_var_2 happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_129 = happyReduce 8 49 happyReduction_129
happyReduction_129 (_ `HappyStk`
(HappyAbsSyn9 happy_var_7) `HappyStk`
_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn105 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn29
(INAggregate happy_var_3 happy_var_4 happy_var_7
) `HappyStk` happyRest
happyReduce_130 = happySpecReduce_1 49 happyReduction_130
happyReduction_130 (HappyAbsSyn114 happy_var_1)
= HappyAbsSyn29
(INIdent happy_var_1
)
happyReduction_130 _ = notHappyAtAll
happyReduce_131 = happyReduce 6 49 happyReduction_131
happyReduction_131 (_ `HappyStk`
(HappyAbsSyn59 happy_var_5) `HappyStk`
(HappyAbsSyn69 happy_var_4) `HappyStk`
(HappyAbsSyn29 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn29
(INField happy_var_3 happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_132 = happyReduce 8 49 happyReduction_132
happyReduction_132 (_ `HappyStk`
(HappyAbsSyn59 happy_var_7) `HappyStk`
(HappyAbsSyn69 happy_var_6) `HappyStk`
_ `HappyStk`
(HappyAbsSyn29 happy_var_4) `HappyStk`
_ `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn29
(INField happy_var_4 happy_var_6 happy_var_7
) `HappyStk` happyRest
happyReduce_133 = happyReduce 6 49 happyReduction_133
happyReduction_133 (_ `HappyStk`
(HappyAbsSyn56 happy_var_5) `HappyStk`
(HappyAbsSyn29 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn29
(INIndex NEKDynamic happy_var_4 happy_var_2 happy_var_5
) `HappyStk` happyRest
happyReduce_134 = happyReduce 6 49 happyReduction_134
happyReduction_134 (_ `HappyStk`
(HappyAbsSyn14 happy_var_5) `HappyStk`
(HappyAbsSyn29 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn29
(INSlice NEKDynamic happy_var_4 happy_var_2 happy_var_5
) `HappyStk` happyRest
happyReduce_135 = happySpecReduce_1 50 happyReduction_135
happyReduction_135 (HappyAbsSyn29 happy_var_1)
= HappyAbsSyn29
(happy_var_1
)
happyReduction_135 _ = notHappyAtAll
happyReduce_136 = happyReduce 5 50 happyReduction_136
happyReduction_136 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn29 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn29
(happy_var_3
) `HappyStk` happyRest
happyReduce_137 = happySpecReduce_1 51 happyReduction_137
happyReduction_137 (HappyAbsSyn29 happy_var_1)
= HappyAbsSyn51
(IRName happy_var_1 ExprCheck
)
happyReduction_137 _ = notHappyAtAll
happyReduce_138 = happySpecReduce_1 52 happyReduction_138
happyReduction_138 (HappyAbsSyn29 happy_var_1)
= HappyAbsSyn51
(IRName happy_var_1 AssignCheck
)
happyReduction_138 _ = notHappyAtAll
happyReduce_139 = happySpecReduce_1 53 happyReduction_139
happyReduction_139 (HappyAbsSyn29 happy_var_1)
= HappyAbsSyn51
(IRName happy_var_1 SignalCheck
)
happyReduction_139 _ = notHappyAtAll
happyReduce_140 = happySpecReduce_1 54 happyReduction_140
happyReduction_140 (HappyAbsSyn29 happy_var_1)
= HappyAbsSyn51
(IRName happy_var_1 TypeCheck
)
happyReduction_140 _ = notHappyAtAll
happyReduce_141 = happySpecReduce_2 55 happyReduction_141
happyReduction_141 (HappyAbsSyn69 happy_var_2)
(HappyAbsSyn29 happy_var_1)
= HappyAbsSyn40
(IEName happy_var_2 (IRName happy_var_1 ExprCheck)
-- loc в конце, т.к. лезут shift reduce конфликты
)
happyReduction_141 _ _ = notHappyAtAll
happyReduce_142 = happySpecReduce_2 55 happyReduction_142
happyReduction_142 (HappyTerminal happy_var_2)
(HappyAbsSyn69 happy_var_1)
= HappyAbsSyn40
(IEString happy_var_1 (L.decodedString happy_var_2)
)
happyReduction_142 _ _ = notHappyAtAll
happyReduce_143 = happyReduce 4 55 happyReduction_143
happyReduction_143 (_ `HappyStk`
(HappyAbsSyn105 happy_var_3) `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEAggregate happy_var_2 happy_var_3
) `HappyStk` happyRest
happyReduce_144 = happyReduce 6 55 happyReduction_144
happyReduction_144 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEQualifyType happy_var_3 happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_145 = happyReduce 7 55 happyReduction_145
happyReduction_145 (_ `HappyStk`
(HappyAbsSyn40 happy_var_6) `HappyStk`
(HappyAbsSyn9 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEVQualifyType happy_var_3 happy_var_4 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_146 = happyReduce 5 55 happyReduction_146
happyReduction_146 (_ `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IETypeAttr happy_var_3 T_left happy_var_4
) `HappyStk` happyRest
happyReduce_147 = happyReduce 5 55 happyReduction_147
happyReduction_147 (_ `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IETypeAttr happy_var_3 T_right happy_var_4
) `HappyStk` happyRest
happyReduce_148 = happyReduce 5 55 happyReduction_148
happyReduction_148 (_ `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IETypeAttr happy_var_3 T_high happy_var_4
) `HappyStk` happyRest
happyReduce_149 = happyReduce 5 55 happyReduction_149
happyReduction_149 (_ `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IETypeAttr happy_var_3 T_low happy_var_4
) `HappyStk` happyRest
happyReduce_150 = happyReduce 5 55 happyReduction_150
happyReduction_150 (_ `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IETypeAttr happy_var_3 T_ascending happy_var_4
) `HappyStk` happyRest
happyReduce_151 = happyReduce 6 55 happyReduction_151
happyReduction_151 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IETypeValueAttr happy_var_3 T_succ happy_var_5 happy_var_4
) `HappyStk` happyRest
happyReduce_152 = happyReduce 6 55 happyReduction_152
happyReduction_152 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IETypeValueAttr happy_var_3 T_pred happy_var_5 happy_var_4
) `HappyStk` happyRest
happyReduce_153 = happyReduce 6 55 happyReduction_153
happyReduction_153 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IETypeValueAttr happy_var_3 T_val happy_var_5 happy_var_4
) `HappyStk` happyRest
happyReduce_154 = happyReduce 6 55 happyReduction_154
happyReduction_154 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IETypeValueAttr happy_var_3 T_pos happy_var_5 happy_var_4
) `HappyStk` happyRest
happyReduce_155 = happyReduce 6 55 happyReduction_155
happyReduction_155 (_ `HappyStk`
(HappyAbsSyn51 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEArrayAttr happy_var_3 A_left happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_156 = happyReduce 6 55 happyReduction_156
happyReduction_156 (_ `HappyStk`
(HappyAbsSyn51 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEArrayAttr happy_var_3 A_right happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_157 = happyReduce 6 55 happyReduction_157
happyReduction_157 (_ `HappyStk`
(HappyAbsSyn51 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEArrayAttr happy_var_3 A_high happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_158 = happyReduce 6 55 happyReduction_158
happyReduction_158 (_ `HappyStk`
(HappyAbsSyn51 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEArrayAttr happy_var_3 A_low happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_159 = happyReduce 6 55 happyReduction_159
happyReduction_159 (_ `HappyStk`
(HappyAbsSyn51 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEArrayAttr happy_var_3 A_ascending happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_160 = happyReduce 6 55 happyReduction_160
happyReduction_160 (_ `HappyStk`
(HappyAbsSyn51 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEArrayAttr happy_var_3 A_length happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_161 = happyReduce 5 55 happyReduction_161
happyReduction_161 (_ `HappyStk`
(HappyAbsSyn51 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IESignalAttr happy_var_3 S_event happy_var_4
) `HappyStk` happyRest
happyReduce_162 = happyReduce 5 55 happyReduction_162
happyReduction_162 (_ `HappyStk`
(HappyAbsSyn51 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IESignalAttr happy_var_3 S_active happy_var_4
) `HappyStk` happyRest
happyReduce_163 = happyReduce 5 55 happyReduction_163
happyReduction_163 (_ `HappyStk`
(HappyAbsSyn51 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IESignalAttr happy_var_3 S_last_value happy_var_4
) `HappyStk` happyRest
happyReduce_164 = happyReduce 6 55 happyReduction_164
happyReduction_164 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn51 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IESignalAttrTimed happy_var_3 S_stable happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_165 = happyReduce 6 55 happyReduction_165
happyReduction_165 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn51 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IESignalAttrTimed happy_var_3 S_delayed happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_166 = happyReduce 6 55 happyReduction_166
happyReduction_166 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn51 happy_var_4) `HappyStk`
(HappyAbsSyn69 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IESignalAttrTimed happy_var_3 S_quiet happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_167 = happySpecReduce_2 55 happyReduction_167
happyReduction_167 (HappyTerminal (L.EnumIdent happy_var_2))
(HappyAbsSyn69 happy_var_1)
= HappyAbsSyn40
(IEEnumIdent happy_var_1 happy_var_2
)
happyReduction_167 _ _ = notHappyAtAll
happyReduce_168 = happySpecReduce_2 55 happyReduction_168
happyReduction_168 (HappyAbsSyn63 happy_var_2)
(HappyAbsSyn69 happy_var_1)
= HappyAbsSyn40
(IEInt happy_var_1 happy_var_2
)
happyReduction_168 _ _ = notHappyAtAll
happyReduce_169 = happySpecReduce_2 55 happyReduction_169
happyReduction_169 (HappyTerminal (L.Double happy_var_2))
(HappyAbsSyn69 happy_var_1)
= HappyAbsSyn40
(IEDouble happy_var_1 happy_var_2
)
happyReduction_169 _ _ = notHappyAtAll
happyReduce_170 = happyReduce 4 55 happyReduction_170
happyReduction_170 (_ `HappyStk`
(HappyAbsSyn112 happy_var_3) `HappyStk`
(HappyAbsSyn111 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEPhysical happy_var_2 happy_var_3
) `HappyStk` happyRest
happyReduce_171 = happyReduce 5 55 happyReduction_171
happyReduction_171 (_ `HappyStk`
(HappyAbsSyn69 happy_var_4) `HappyStk`
(HappyAbsSyn56 happy_var_3) `HappyStk`
(HappyAbsSyn112 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEFunctionCall happy_var_2 happy_var_3 happy_var_4
) `HappyStk` happyRest
happyReduce_172 = happyReduce 7 55 happyReduction_172
happyReduction_172 (_ `HappyStk`
(HappyAbsSyn40 happy_var_6) `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IERelOp happy_var_2 IEq happy_var_4 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_173 = happyReduce 7 55 happyReduction_173
happyReduction_173 (_ `HappyStk`
(HappyAbsSyn40 happy_var_6) `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IERelOp happy_var_2 INeq happy_var_4 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_174 = happyReduce 7 55 happyReduction_174
happyReduction_174 (_ `HappyStk`
(HappyAbsSyn40 happy_var_6) `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IERelOp happy_var_2 ILess happy_var_4 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_175 = happyReduce 7 55 happyReduction_175
happyReduction_175 (_ `HappyStk`
(HappyAbsSyn40 happy_var_6) `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IERelOp happy_var_2 ILessEqual happy_var_4 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_176 = happyReduce 7 55 happyReduction_176
happyReduction_176 (_ `HappyStk`
(HappyAbsSyn40 happy_var_6) `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IERelOp happy_var_2 IGreater happy_var_4 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_177 = happyReduce 7 55 happyReduction_177
happyReduction_177 (_ `HappyStk`
(HappyAbsSyn40 happy_var_6) `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IERelOp happy_var_2 IGreaterEqual happy_var_4 happy_var_5 happy_var_6
) `HappyStk` happyRest
happyReduce_178 = happyReduce 6 55 happyReduction_178
happyReduction_178 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEBinOp happy_var_2 IMod happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_179 = happyReduce 6 55 happyReduction_179
happyReduction_179 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEBinOp happy_var_2 IRem happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_180 = happyReduce 6 55 happyReduction_180
happyReduction_180 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEBinOp happy_var_2 IDiv happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_181 = happyReduce 6 55 happyReduction_181
happyReduction_181 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEBinOp happy_var_2 IPlus happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_182 = happyReduce 6 55 happyReduction_182
happyReduction_182 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEBinOp happy_var_2 IMul happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_183 = happyReduce 6 55 happyReduction_183
happyReduction_183 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEBinOp happy_var_2 IMinus happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_184 = happyReduce 6 55 happyReduction_184
happyReduction_184 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEBinOp happy_var_2 IExp happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_185 = happyReduce 8 55 happyReduction_185
happyReduction_185 (_ `HappyStk`
(HappyAbsSyn40 happy_var_7) `HappyStk`
(HappyAbsSyn40 happy_var_6) `HappyStk`
(HappyAbsSyn9 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEGenericBinop happy_var_2 (IRGenericDiv) happy_var_4 happy_var_5 happy_var_6 happy_var_7
) `HappyStk` happyRest
happyReduce_186 = happyReduce 8 55 happyReduction_186
happyReduction_186 (_ `HappyStk`
(HappyAbsSyn40 happy_var_7) `HappyStk`
(HappyAbsSyn40 happy_var_6) `HappyStk`
(HappyAbsSyn9 happy_var_5) `HappyStk`
(HappyAbsSyn9 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEGenericBinop happy_var_2 (IRGenericMul) happy_var_4 happy_var_5 happy_var_6 happy_var_7
) `HappyStk` happyRest
happyReduce_187 = happyReduce 6 55 happyReduction_187
happyReduction_187 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEBinOp happy_var_2 IAnd happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_188 = happyReduce 6 55 happyReduction_188
happyReduction_188 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEBinOp happy_var_2 INand happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_189 = happyReduce 6 55 happyReduction_189
happyReduction_189 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEBinOp happy_var_2 IOr happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_190 = happyReduce 6 55 happyReduction_190
happyReduction_190 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEBinOp happy_var_2 INor happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_191 = happyReduce 6 55 happyReduction_191
happyReduction_191 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEBinOp happy_var_2 IXor happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_192 = happyReduce 6 55 happyReduction_192
happyReduction_192 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEBinOp happy_var_2 IXNor happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_193 = happyReduce 6 55 happyReduction_193
happyReduction_193 (_ `HappyStk`
(HappyAbsSyn40 happy_var_5) `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEBinOp happy_var_2 IConcat happy_var_4 happy_var_5
) `HappyStk` happyRest
happyReduce_194 = happyReduce 5 55 happyReduction_194
happyReduction_194 (_ `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEUnOp happy_var_2 IUPlus happy_var_4
) `HappyStk` happyRest
happyReduce_195 = happyReduce 5 55 happyReduction_195
happyReduction_195 (_ `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEUnOp happy_var_2 IUMinus happy_var_4
) `HappyStk` happyRest
happyReduce_196 = happyReduce 5 55 happyReduction_196
happyReduction_196 (_ `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEUnOp happy_var_2 IAbs happy_var_4
) `HappyStk` happyRest
happyReduce_197 = happyReduce 5 55 happyReduction_197
happyReduction_197 (_ `HappyStk`
(HappyAbsSyn40 happy_var_4) `HappyStk`
_ `HappyStk`
(HappyAbsSyn69 happy_var_2) `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(IEUnOp happy_var_2 INot happy_var_4
) `HappyStk` happyRest
happyReduce_198 = happyReduce 5 55 happyReduction_198
happyReduction_198 (_ `HappyStk`
_ `HappyStk`
(HappyAbsSyn40 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn40
(happy_var_3
) `HappyStk` happyRest
happyReduce_199 = happySpecReduce_1 56 happyReduction_199
happyReduction_199 (HappyAbsSyn56 happy_var_1)
= HappyAbsSyn56
(reverse happy_var_1
)
happyReduction_199 _ = notHappyAtAll
happyReduce_200 = happySpecReduce_0 56 happyReduction_200
happyReduction_200 = HappyAbsSyn56
([]
)
happyReduce_201 = happySpecReduce_2 57 happyReduction_201
happyReduction_201 (HappyAbsSyn40 happy_var_2)
(HappyAbsSyn69 happy_var_1)
= HappyAbsSyn56
([(happy_var_1, happy_var_2)]
)
happyReduction_201 _ _ = notHappyAtAll
happyReduce_202 = happySpecReduce_3 57 happyReduction_202
happyReduction_202 (HappyAbsSyn40 happy_var_3)
(HappyAbsSyn69 happy_var_2)
(HappyAbsSyn56 happy_var_1)
= HappyAbsSyn56
((happy_var_2, happy_var_3) : happy_var_1
)
happyReduction_202 _ _ _ = notHappyAtAll
happyReduce_203 = happySpecReduce_1 58 happyReduction_203
happyReduction_203 (HappyTerminal (L.Ident happy_var_1))
= HappyAbsSyn58
([happy_var_1]
)
happyReduction_203 _ = notHappyAtAll
happyReduce_204 = happySpecReduce_3 58 happyReduction_204
happyReduction_204 (HappyAbsSyn62 happy_var_3)
_
(HappyAbsSyn58 happy_var_1)
= HappyAbsSyn58
(happy_var_3 : happy_var_1
)
happyReduction_204 _ _ _ = notHappyAtAll
happyReduce_205 = happySpecReduce_3 58 happyReduction_205
happyReduction_205 (HappyTerminal happy_var_3)
_
(HappyAbsSyn58 happy_var_1)
= HappyAbsSyn58
(L.originalString happy_var_3 : happy_var_1
)
happyReduction_205 _ _ _ = notHappyAtAll
happyReduce_206 = happySpecReduce_3 58 happyReduction_206
happyReduction_206 (HappyTerminal happy_var_3)
_
(HappyAbsSyn58 happy_var_1)
= HappyAbsSyn58
(L.originalString happy_var_3 : happy_var_1
)
happyReduction_206 _ _ _ = notHappyAtAll
happyReduce_207 = happySpecReduce_1 59 happyReduction_207
happyReduction_207 (HappyAbsSyn58 happy_var_1)
= HappyAbsSyn59
(bsToFs $ B.concat $ intersperse (B.pack ".") $ reverse happy_var_1
)
happyReduction_207 _ = notHappyAtAll
happyReduce_208 = happySpecReduce_1 60 happyReduction_208
happyReduction_208 (HappyAbsSyn58 happy_var_1)
= HappyAbsSyn60
((bsToFs $ B.concat $ intersperse (B.pack ".") $ reverse happy_var_1, map bsToFs happy_var_1)
)
happyReduction_208 _ = notHappyAtAll
happyReduce_209 = happySpecReduce_1 61 happyReduction_209
happyReduction_209 (HappyTerminal (L.Ident happy_var_1))
= HappyAbsSyn59
(bsToFs happy_var_1
)
happyReduction_209 _ = notHappyAtAll
happyReduce_210 = happySpecReduce_1 61 happyReduction_210
happyReduction_210 _
= HappyAbsSyn59
(fsLit "resolved"
)
happyReduce_211 = happySpecReduce_1 62 happyReduction_211
happyReduction_211 (HappyTerminal (L.Ident happy_var_1))
= HappyAbsSyn62
(happy_var_1
)
happyReduction_211 _ = notHappyAtAll
happyReduce_212 = happySpecReduce_1 62 happyReduction_212
happyReduction_212 _
= HappyAbsSyn62
(B.pack "resolved"
)
happyReduce_213 = happyMonadReduce 2 63 happyReduction_213
happyReduction_213 ((HappyTerminal happy_var_2) `HappyStk`
(HappyAbsSyn69 happy_var_1) `HappyStk`
happyRest) tk
= happyThen (( let i = L.decodedInteger happy_var_2 in
if i >= fromIntegral (minBound::TInt) &&
i <= fromIntegral (maxBound::TInt) then
return $ fromIntegral i
else failLoc happy_var_1 "Integer literal is too large")
) (\r -> happyReturn (HappyAbsSyn63 r))
happyReduce_214 = happyMonadReduce 2 64 happyReduction_214
happyReduction_214 ((HappyTerminal happy_var_2) `HappyStk`
(HappyAbsSyn69 happy_var_1) `HappyStk`
happyRest) tk
= happyThen (( let i = L.decodedInteger happy_var_2 in
if i >= fromIntegral (minBound::Int128) &&
i <= fromIntegral (maxBound::Int128) then
return $ fromIntegral i
else failLoc happy_var_1 "Integer literal is too large")
) (\r -> happyReturn (HappyAbsSyn64 r))
happyReduce_215 = happySpecReduce_1 65 happyReduction_215
happyReduction_215 _
= HappyAbsSyn65
(
)
happyReduce_216 = happySpecReduce_1 66 happyReduction_216
happyReduction_216 _
= HappyAbsSyn65
(
)
happyReduce_217 = happySpecReduce_1 67 happyReduction_217
happyReduction_217 _
= HappyAbsSyn65
(
)
happyReduce_218 = happySpecReduce_1 68 happyReduction_218
happyReduction_218 _
= HappyAbsSyn65
(
)
happyReduce_219 = happyMonadReduce 0 69 happyReduction_219
happyReduction_219 (happyRest) tk
= happyThen (( getLoc)
) (\r -> happyReturn (HappyAbsSyn69 r))
happyReduce_220 = happyMonadReduce 0 70 happyReduction_220
happyReduction_220 (happyRest) tk
= happyThen (( readRef psPrevTokenEnd)
) (\r -> happyReturn (HappyAbsSyn70 r))
happyReduce_221 = happyMonadReduce 4 71 happyReduction_221
happyReduction_221 ((HappyTerminal happy_var_4) `HappyStk`
(HappyTerminal happy_var_3) `HappyStk`
(HappyTerminal happy_var_2) `HappyStk`
_ `HappyStk`
happyRest) tk
= happyThen (( modifyRef psLocationStack
(Line (B.unpack $ L.decodedString happy_var_2)
(fromIntegral $ L.decodedInteger happy_var_3)
(fromIntegral $ L.decodedInteger happy_var_4) : ))
) (\r -> happyReturn (HappyAbsSyn65 r))
happyReduce_222 = happyMonadReduce 0 72 happyReduction_222
happyReduction_222 (happyRest) tk
= happyThen (( modifyRef psLocationStack tail)
) (\r -> happyReturn (HappyAbsSyn65 r))
happyReduce_223 = happySpecReduce_2 73 happyReduction_223
happyReduction_223 _
_
= HappyAbsSyn65
(
)
happyReduce_224 = happySpecReduce_1 73 happyReduction_224
happyReduction_224 _
= HappyAbsSyn65
(
)
happyReduce_225 = happySpecReduce_2 74 happyReduction_225
happyReduction_225 _
_
= HappyAbsSyn65
(
)
happyReduce_226 = happySpecReduce_1 74 happyReduction_226
happyReduction_226 _
= HappyAbsSyn65
(
)
happyReduce_227 = happySpecReduce_1 75 happyReduction_227
happyReduction_227 (HappyAbsSyn40 happy_var_1)
= HappyAbsSyn40
(happy_var_1
)
happyReduction_227 _ = notHappyAtAll
happyReduce_228 = happySpecReduce_1 75 happyReduction_228
happyReduction_228 (HappyAbsSyn69 happy_var_1)
= HappyAbsSyn40
(IEPhysical (WithLoc happy_var_1 0) (WithLoc happy_var_1 (fsLit "fs"))
)
happyReduction_228 _ = notHappyAtAll
happyReduce_229 = happyReduce 7 76 happyReduction_229
happyReduction_229 (_ `HappyStk`
(HappyAbsSyn64 happy_var_6) `HappyStk`
(HappyAbsSyn64 happy_var_5) `HappyStk`
(HappyAbsSyn58 happy_var_4) `HappyStk`
(HappyAbsSyn58 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn76
(T.MemoryMapRange happy_var_3 happy_var_4 (fromIntegral happy_var_5) (fromIntegral happy_var_6)
) `HappyStk` happyRest
happyReduce_230 = happyReduce 5 77 happyReduction_230
happyReduction_230 (_ `HappyStk`
(HappyAbsSyn58 happy_var_4) `HappyStk`
(HappyAbsSyn58 happy_var_3) `HappyStk`
_ `HappyStk`
_ `HappyStk`
happyRest)
= HappyAbsSyn77
((map bsToFs happy_var_3,map bsToFs happy_var_4)
) `HappyStk` happyRest
happyReduce_231 = happySpecReduce_1 78 happyReduction_231
happyReduction_231 (HappyAbsSyn8 happy_var_1)
= HappyAbsSyn78
([happy_var_1]
)
happyReduction_231 _ = notHappyAtAll
happyReduce_232 = happySpecReduce_2 78 happyReduction_232
happyReduction_232 (HappyAbsSyn8 happy_var_2)
(HappyAbsSyn78 happy_var_1)
= HappyAbsSyn78
(happy_var_2 : happy_var_1
)
happyReduction_232 _ _ = notHappyAtAll
happyReduce_233 = happySpecReduce_1 79 happyReduction_233
happyReduction_233 (HappyAbsSyn78 happy_var_1)
= HappyAbsSyn78
(reverse happy_var_1
)
happyReduction_233 _ = notHappyAtAll
happyReduce_234 = happySpecReduce_1 80 happyReduction_234
happyReduction_234 (HappyAbsSyn78 happy_var_1)
= HappyAbsSyn78
(happy_var_1
)
happyReduction_234 _ = notHappyAtAll
happyReduce_235 = happySpecReduce_0 80 happyReduction_235
happyReduction_235 = HappyAbsSyn78
([]
)
happyReduce_236 = happySpecReduce_1 81 happyReduction_236
happyReduction_236 (HappyAbsSyn10 happy_var_1)
= HappyAbsSyn81
([happy_var_1]
)
happyReduction_236 _ = notHappyAtAll
happyReduce_237 = happySpecReduce_2 81 happyReduction_237
happyReduction_237 (HappyAbsSyn10 happy_var_2)
(HappyAbsSyn81 happy_var_1)
= HappyAbsSyn81
(happy_var_2 : happy_var_1
)
happyReduction_237 _ _ = notHappyAtAll
happyReduce_238 = happySpecReduce_1 82 happyReduction_238
happyReduction_238 (HappyAbsSyn81 happy_var_1)
= HappyAbsSyn81
(reverse happy_var_1
)
happyReduction_238 _ = notHappyAtAll
happyReduce_239 = happySpecReduce_1 83 happyReduction_239
happyReduction_239 (HappyAbsSyn81 happy_var_1)
= HappyAbsSyn81
(happy_var_1
)
happyReduction_239 _ = notHappyAtAll
happyReduce_240 = happySpecReduce_0 83 happyReduction_240
happyReduction_240 = HappyAbsSyn81
([]
)
happyReduce_241 = happySpecReduce_1 84 happyReduction_241
happyReduction_241 (HappyAbsSyn11 happy_var_1)
= HappyAbsSyn84
([happy_var_1]
)
happyReduction_241 _ = notHappyAtAll
happyReduce_242 = happySpecReduce_2 84 happyReduction_242
happyReduction_242 (HappyAbsSyn11 happy_var_2)
(HappyAbsSyn84 happy_var_1)
= HappyAbsSyn84
(happy_var_2 : happy_var_1
)
happyReduction_242 _ _ = notHappyAtAll
happyReduce_243 = happySpecReduce_1 85 happyReduction_243
happyReduction_243 (HappyAbsSyn84 happy_var_1)
= HappyAbsSyn84
(reverse happy_var_1
)
happyReduction_243 _ = notHappyAtAll
happyReduce_244 = happySpecReduce_1 86 happyReduction_244
happyReduction_244 (HappyAbsSyn84 happy_var_1)
= HappyAbsSyn84
(happy_var_1
)
happyReduction_244 _ = notHappyAtAll
happyReduce_245 = happySpecReduce_0 86 happyReduction_245
happyReduction_245 = HappyAbsSyn84
([]
)
happyReduce_246 = happySpecReduce_1 87 happyReduction_246
happyReduction_246 (HappyAbsSyn14 happy_var_1)
= HappyAbsSyn87
([happy_var_1]
)
happyReduction_246 _ = notHappyAtAll
happyReduce_247 = happySpecReduce_2 87 happyReduction_247
happyReduction_247 (HappyAbsSyn14 happy_var_2)
(HappyAbsSyn87 happy_var_1)
= HappyAbsSyn87
(happy_var_2 : happy_var_1
)
happyReduction_247 _ _ = notHappyAtAll
happyReduce_248 = happySpecReduce_1 88 happyReduction_248
happyReduction_248 (HappyAbsSyn87 happy_var_1)
= HappyAbsSyn87
(reverse happy_var_1
)
happyReduction_248 _ = notHappyAtAll
happyReduce_249 = happySpecReduce_1 89 happyReduction_249
happyReduction_249 (HappyAbsSyn87 happy_var_1)
= HappyAbsSyn87
(happy_var_1
)
happyReduction_249 _ = notHappyAtAll
happyReduce_250 = happySpecReduce_0 89 happyReduction_250
happyReduction_250 = HappyAbsSyn87
([]
)
happyReduce_251 = happySpecReduce_1 90 happyReduction_251
happyReduction_251 (HappyAbsSyn15 happy_var_1)
= HappyAbsSyn90
([happy_var_1]
)
happyReduction_251 _ = notHappyAtAll
happyReduce_252 = happySpecReduce_2 90 happyReduction_252
happyReduction_252 (HappyAbsSyn15 happy_var_2)
(HappyAbsSyn90 happy_var_1)
= HappyAbsSyn90
(happy_var_2 : happy_var_1
)
happyReduction_252 _ _ = notHappyAtAll
happyReduce_253 = happySpecReduce_1 91 happyReduction_253
happyReduction_253 (HappyAbsSyn90 happy_var_1)
= HappyAbsSyn90
(reverse happy_var_1
)
happyReduction_253 _ = notHappyAtAll
happyReduce_254 = happySpecReduce_1 92 happyReduction_254
happyReduction_254 (HappyAbsSyn90 happy_var_1)
= HappyAbsSyn90
(happy_var_1
)
happyReduction_254 _ = notHappyAtAll
happyReduce_255 = happySpecReduce_0 92 happyReduction_255
happyReduction_255 = HappyAbsSyn90
([]
)
happyReduce_256 = happySpecReduce_1 93 happyReduction_256
happyReduction_256 (HappyAbsSyn26 happy_var_1)
= HappyAbsSyn25
([happy_var_1]
)
happyReduction_256 _ = notHappyAtAll
happyReduce_257 = happySpecReduce_2 93 happyReduction_257
happyReduction_257 (HappyAbsSyn26 happy_var_2)
(HappyAbsSyn25 happy_var_1)
= HappyAbsSyn25
(happy_var_2 : happy_var_1
)
happyReduction_257 _ _ = notHappyAtAll
happyReduce_258 = happySpecReduce_1 94 happyReduction_258
happyReduction_258 (HappyAbsSyn25 happy_var_1)
= HappyAbsSyn25
(reverse happy_var_1
)
happyReduction_258 _ = notHappyAtAll
happyReduce_259 = happySpecReduce_1 95 happyReduction_259
happyReduction_259 (HappyAbsSyn25 happy_var_1)
= HappyAbsSyn25
(happy_var_1
)
happyReduction_259 _ = notHappyAtAll
happyReduce_260 = happySpecReduce_0 95 happyReduction_260
happyReduction_260 = HappyAbsSyn25
([]
)
happyReduce_261 = happySpecReduce_1 96 happyReduction_261
happyReduction_261 (HappyAbsSyn29 happy_var_1)
= HappyAbsSyn37
([happy_var_1]
)
happyReduction_261 _ = notHappyAtAll
happyReduce_262 = happySpecReduce_2 96 happyReduction_262
happyReduction_262 (HappyAbsSyn29 happy_var_2)
(HappyAbsSyn37 happy_var_1)
= HappyAbsSyn37
(happy_var_2 : happy_var_1
)
happyReduction_262 _ _ = notHappyAtAll
happyReduce_263 = happySpecReduce_1 97 happyReduction_263
happyReduction_263 (HappyAbsSyn37 happy_var_1)
= HappyAbsSyn37
(reverse happy_var_1
)
happyReduction_263 _ = notHappyAtAll
happyReduce_264 = happySpecReduce_1 98 happyReduction_264
happyReduction_264 (HappyAbsSyn37 happy_var_1)
= HappyAbsSyn37
(happy_var_1
)
happyReduction_264 _ = notHappyAtAll
happyReduce_265 = happySpecReduce_0 98 happyReduction_265
happyReduction_265 = HappyAbsSyn37
([]
)
happyReduce_266 = happySpecReduce_1 99 happyReduction_266
happyReduction_266 (HappyAbsSyn43 happy_var_1)
= HappyAbsSyn99
([happy_var_1]
)
happyReduction_266 _ = notHappyAtAll
happyReduce_267 = happySpecReduce_2 99 happyReduction_267
happyReduction_267 (HappyAbsSyn43 happy_var_2)
(HappyAbsSyn99 happy_var_1)
= HappyAbsSyn99
(happy_var_2 : happy_var_1
)
happyReduction_267 _ _ = notHappyAtAll
happyReduce_268 = happySpecReduce_1 100 happyReduction_268
happyReduction_268 (HappyAbsSyn99 happy_var_1)
= HappyAbsSyn99
(reverse happy_var_1
)
happyReduction_268 _ = notHappyAtAll
happyReduce_269 = happySpecReduce_1 101 happyReduction_269
happyReduction_269 (HappyAbsSyn99 happy_var_1)
= HappyAbsSyn99
(happy_var_1
)
happyReduction_269 _ = notHappyAtAll
happyReduce_270 = happySpecReduce_0 101 happyReduction_270
happyReduction_270 = HappyAbsSyn99
([]
)
happyReduce_271 = happySpecReduce_1 102 happyReduction_271
happyReduction_271 (HappyAbsSyn42 happy_var_1)
= HappyAbsSyn102
([happy_var_1]
)
happyReduction_271 _ = notHappyAtAll
happyReduce_272 = happySpecReduce_2 102 happyReduction_272
happyReduction_272 (HappyAbsSyn42 happy_var_2)
(HappyAbsSyn102 happy_var_1)
= HappyAbsSyn102
(happy_var_2 : happy_var_1
)
happyReduction_272 _ _ = notHappyAtAll
happyReduce_273 = happySpecReduce_1 103 happyReduction_273
happyReduction_273 (HappyAbsSyn102 happy_var_1)
= HappyAbsSyn102
(reverse happy_var_1
)
happyReduction_273 _ = notHappyAtAll
happyReduce_274 = happySpecReduce_1 104 happyReduction_274
happyReduction_274 (HappyAbsSyn102 happy_var_1)
= HappyAbsSyn102
(happy_var_1
)
happyReduction_274 _ = notHappyAtAll
happyReduce_275 = happySpecReduce_0 104 happyReduction_275
happyReduction_275 = HappyAbsSyn102
([]
)
happyReduce_276 = happySpecReduce_1 105 happyReduction_276
happyReduction_276 (HappyAbsSyn48 happy_var_1)
= HappyAbsSyn105
([happy_var_1]
)
happyReduction_276 _ = notHappyAtAll
happyReduce_277 = happySpecReduce_2 105 happyReduction_277
happyReduction_277 (HappyAbsSyn48 happy_var_2)
(HappyAbsSyn105 happy_var_1)
= HappyAbsSyn105
(happy_var_2 : happy_var_1
)
happyReduction_277 _ _ = notHappyAtAll
happyReduce_278 = happySpecReduce_1 106 happyReduction_278
happyReduction_278 (HappyAbsSyn105 happy_var_1)
= HappyAbsSyn105
(reverse happy_var_1
)
happyReduction_278 _ = notHappyAtAll
happyReduce_279 = happySpecReduce_1 107 happyReduction_279
happyReduction_279 (HappyAbsSyn105 happy_var_1)
= HappyAbsSyn105
(happy_var_1
)
happyReduction_279 _ = notHappyAtAll
happyReduce_280 = happySpecReduce_0 107 happyReduction_280
happyReduction_280 = HappyAbsSyn105
([]
)
happyReduce_281 = happySpecReduce_1 108 happyReduction_281
happyReduction_281 (HappyAbsSyn35 happy_var_1)
= HappyAbsSyn36
([happy_var_1]
)
happyReduction_281 _ = notHappyAtAll
happyReduce_282 = happySpecReduce_2 108 happyReduction_282
happyReduction_282 (HappyAbsSyn35 happy_var_2)
(HappyAbsSyn36 happy_var_1)
= HappyAbsSyn36
(happy_var_2 : happy_var_1
)
happyReduction_282 _ _ = notHappyAtAll
happyReduce_283 = happySpecReduce_1 109 happyReduction_283
happyReduction_283 (HappyAbsSyn36 happy_var_1)
= HappyAbsSyn36
(reverse happy_var_1
)
happyReduction_283 _ = notHappyAtAll
happyReduce_284 = happySpecReduce_1 110 happyReduction_284
happyReduction_284 (HappyAbsSyn36 happy_var_1)
= HappyAbsSyn36
(happy_var_1
)
happyReduction_284 _ = notHappyAtAll
happyReduce_285 = happySpecReduce_0 110 happyReduction_285
happyReduction_285 = HappyAbsSyn36
([]
)
happyReduce_286 = happySpecReduce_3 111 happyReduction_286
happyReduction_286 (HappyAbsSyn70 happy_var_3)
(HappyAbsSyn64 happy_var_2)
(HappyAbsSyn69 happy_var_1)
= HappyAbsSyn111
(WithLoc (happy_var_1 { locEndChar = happy_var_3 }) happy_var_2
)
happyReduction_286 _ _ _ = notHappyAtAll
happyReduce_287 = happySpecReduce_3 112 happyReduction_287
happyReduction_287 (HappyAbsSyn70 happy_var_3)
(HappyAbsSyn59 happy_var_2)
(HappyAbsSyn69 happy_var_1)
= HappyAbsSyn112
(WithLoc (happy_var_1 { locEndChar = happy_var_3 }) happy_var_2
)
happyReduction_287 _ _ _ = notHappyAtAll
happyReduce_288 = happySpecReduce_3 113 happyReduction_288
happyReduction_288 (HappyAbsSyn70 happy_var_3)
(HappyAbsSyn59 happy_var_2)
(HappyAbsSyn69 happy_var_1)
= HappyAbsSyn112
(WithLoc (happy_var_1 { locEndChar = happy_var_3 }) happy_var_2
)
happyReduction_288 _ _ _ = notHappyAtAll
happyReduce_289 = happySpecReduce_3 114 happyReduction_289
happyReduction_289 (HappyAbsSyn70 happy_var_3)
(HappyAbsSyn60 happy_var_2)
(HappyAbsSyn69 happy_var_1)
= HappyAbsSyn114
(WithLoc (happy_var_1 { locEndChar = happy_var_3 }) happy_var_2
)
happyReduction_289 _ _ _ = notHappyAtAll
happyNewToken action sts stk
= lexer(\tk ->
let cont i = action i i tk (HappyState action) sts stk in
case tk of {
L.Eof -> action 243 243 tk (HappyState action) sts stk;
L.Integer _ _ -> cont 115;
L.Double happy_dollar_dollar -> cont 116;
L.String _ _ -> cont 117;
L.EnumIdent happy_dollar_dollar -> cont 118;
L.Ident happy_dollar_dollar -> cont 119;
L.Label happy_dollar_dollar -> cont 120;
L.LParen -> cont 121;
L.RParen -> cont 122;
L.LBracket -> cont 123;
L.RBracket -> cont 124;
L.LBrace -> cont 125;
L.RBrace -> cont 126;
L.Point -> cont 127;
L.Colon -> cont 128;
L.Hash -> cont 129;
L.ChoiceOthers -> cont 130;
L.ChoiceType -> cont 131;
L.ChoiceField -> cont 132;
L.ChoiceExpr -> cont 133;
L.And -> cont 134;
L.Or -> cont 135;
L.Nand -> cont 136;
L.Nor -> cont 137;
L.Xor -> cont 138;
L.Xnor -> cont 139;
L.Not -> cont 140;
L.EQ -> cont 141;
L.NEQ -> cont 142;
L.LT -> cont 143;
L.LE -> cont 144;
L.GT -> cont 145;
L.GE -> cont 146;
L.Plus -> cont 147;
L.Minus -> cont 148;
L.Abs -> cont 149;
L.Concat -> cont 150;
L.Mul -> cont 151;
L.Div -> cont 152;
L.MulG -> cont 153;
L.DivG -> cont 154;
L.Mod -> cont 155;
L.Rem -> cont 156;
L.Exp -> cont 157;
L.Box -> cont 158;
L.Type -> cont 159;
L.Enum -> cont 160;
L.Array -> cont 161;
L.Of -> cont 162;
L.Record -> cont 163;
L.Physical -> cont 164;
L.Access -> cont 165;
L.Resolved -> cont 166;
L.Range -> cont 167;
L.To -> cont 168;
L.Downto -> cont 169;
L.Function -> cont 170;
L.Procedure -> cont 171;
L.In -> cont 172;
L.Out -> cont 173;
L.Inout -> cont 174;
L.Signal -> cont 175;
L.Port -> cont 176;
L.Variable -> cont 177;
L.Constant -> cont 178;
L.Generate -> cont 179;
L.File -> cont 180;
L.Wait -> cont 181;
L.Return -> cont 182;
L.Report -> cont 183;
L.Severity -> cont 184;
L.Assert -> cont 185;
L.Assign -> cont 186;
L.Send -> cont 187;
L.If -> cont 188;
L.For -> cont 189;
L.On -> cont 190;
L.Until -> cont 191;
L.After -> cont 192;
L.While -> cont 193;
L.Next -> cont 194;
L.Exit -> cont 195;
L.Case -> cont 196;
L.Let -> cont 197;
L.Alias -> cont 198;
L.Process -> cont 199;
L.Nop -> cont 200;
L.Index -> cont 201;
L.Field -> cont 202;
L.Slice -> cont 203;
L.Deref -> cont 204;
L.SDelayed -> cont 205;
L.SStable -> cont 206;
L.SQuiet -> cont 207;
L.STransaction -> cont 208;
L.SEvent -> cont 209;
L.SActive -> cont 210;
L.SLast_event -> cont 211;
L.SLast_active -> cont 212;
L.SLast_value -> cont 213;
L.SDriving -> cont 214;
L.SDriving_value -> cont 215;
L.TLeft -> cont 216;
L.TRight -> cont 217;
L.THigh -> cont 218;
L.TLow -> cont 219;
L.TAscending -> cont 220;
L.TImage -> cont 221;
L.TValue -> cont 222;
L.TPos -> cont 223;
L.TVal -> cont 224;
L.TSucc -> cont 225;
L.TPred -> cont 226;
L.TLeftof -> cont 227;
L.TRightof -> cont 228;
L.ALeft -> cont 229;
L.ARight -> cont 230;
L.AHigh -> cont 231;
L.ALow -> cont 232;
L.ARange -> cont 233;
L.AReverseRange -> cont 234;
L.ALength -> cont 235;
L.AAscending -> cont 236;
L.QualifyType -> cont 237;
L.VQualifyType -> cont 238;
L.VerilogToVhdl -> cont 239;
L.VhdlToVerilog -> cont 240;
L.MemoryMapRange -> cont 241;
L.InstancedBy -> cont 242;
_ -> happyError' tk
})
happyError_ 243 tk = happyError' tk
happyError_ _ tk = happyError' tk
happyThen :: () => Parser a -> (a -> Parser b) -> Parser b
happyThen = (>>=)
happyReturn :: () => a -> Parser a
happyReturn = (return)
happyThen1 = happyThen
happyReturn1 :: () => a -> Parser a
happyReturn1 = happyReturn
happyError' :: () => (L.Token) -> Parser a
happyError' tk = parseError tk
toplevel_decls = happySomeParser where
happySomeParser = happyThen (happyParse action_0) (\x -> case x of {HappyAbsSyn4 z -> happyReturn z; _other -> notHappyAtAll })
happySeq = happyDontSeq
-- | FastString compatibility functions
fsLit :: String -> Ident
fsLit = B.pack
-- | FastString compatibility functions
bsToFs :: B.ByteString -> Ident
bsToFs = id
parseFile :: FilePath -> IO [IRTop]
parseFile f = do
inp <- B.readFile f
ps <- newState f inp
runParser ps toplevel_decls
parseFiles :: [FilePath] -> IO [IRTop]
parseFiles fn = concat <$> forM fn parseFile
failLoc :: Loc -> String -> Parser a
failLoc loc err = throwError $ ParserError $ formatErr loc $ "error: " ++ err
{-# LINE 1 "templates\GenericTemplate.hs" #-}
{-# LINE 1 "templates\\GenericTemplate.hs" #-}
{-# LINE 1 "<built-in>" #-}
{-# LINE 1 "<command-line>" #-}
{-# LINE 1 "templates\\GenericTemplate.hs" #-}
-- Id: GenericTemplate.hs,v 1.26 2005/01/14 14:47:22 simonmar Exp
{-# LINE 30 "templates\\GenericTemplate.hs" #-}
{-# LINE 51 "templates\\GenericTemplate.hs" #-}
{-# LINE 61 "templates\\GenericTemplate.hs" #-}
{-# LINE 70 "templates\\GenericTemplate.hs" #-}
infixr 9 `HappyStk`
data HappyStk a = HappyStk a (HappyStk a)
-----------------------------------------------------------------------------
-- starting the parse
happyParse start_state = happyNewToken start_state notHappyAtAll notHappyAtAll
-----------------------------------------------------------------------------
-- Accepting the parse
-- If the current token is (1), it means we've just accepted a partial
-- parse (a %partial parser). We must ignore the saved token on the top of
-- the stack in this case.
happyAccept (1) tk st sts (_ `HappyStk` ans `HappyStk` _) =
happyReturn1 ans
happyAccept j tk st sts (HappyStk ans _) =
(happyReturn1 ans)
-----------------------------------------------------------------------------
-- Arrays only: do the next action
{-# LINE 148 "templates\\GenericTemplate.hs" #-}
-----------------------------------------------------------------------------
-- HappyState data type (not arrays)
newtype HappyState b c = HappyState
(Int -> -- token number
Int -> -- token number (yes, again)
b -> -- token semantic value
HappyState b c -> -- current state
[HappyState b c] -> -- state stack
c)
-----------------------------------------------------------------------------
-- Shifting a token
happyShift new_state (1) tk st sts stk@(x `HappyStk` _) =
let (i) = (case x of { HappyErrorToken (i) -> i }) in
-- trace "shifting the error token" $
new_state i i tk (HappyState (new_state)) ((st):(sts)) (stk)
happyShift new_state i tk st sts stk =
happyNewToken new_state ((st):(sts)) ((HappyTerminal (tk))`HappyStk`stk)
-- happyReduce is specialised for the common cases.
happySpecReduce_0 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_0 nt fn j tk st@((HappyState (action))) sts stk
= action nt j tk st ((st):(sts)) (fn `HappyStk` stk)
happySpecReduce_1 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_1 nt fn j tk _ sts@(((st@(HappyState (action))):(_))) (v1`HappyStk`stk')
= let r = fn v1 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happySpecReduce_2 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_2 nt fn j tk _ ((_):(sts@(((st@(HappyState (action))):(_))))) (v1`HappyStk`v2`HappyStk`stk')
= let r = fn v1 v2 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happySpecReduce_3 i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happySpecReduce_3 nt fn j tk _ ((_):(((_):(sts@(((st@(HappyState (action))):(_))))))) (v1`HappyStk`v2`HappyStk`v3`HappyStk`stk')
= let r = fn v1 v2 v3 in
happySeq r (action nt j tk st sts (r `HappyStk` stk'))
happyReduce k i fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happyReduce k nt fn j tk st sts stk
= case happyDrop (k - ((1) :: Int)) sts of
sts1@(((st1@(HappyState (action))):(_))) ->
let r = fn stk in -- it doesn't hurt to always seq here...
happyDoSeq r (action nt j tk st1 sts1 r)
happyMonadReduce k nt fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happyMonadReduce k nt fn j tk st sts stk =
happyThen1 (fn stk tk) (\r -> action nt j tk st1 sts1 (r `HappyStk` drop_stk))
where (sts1@(((st1@(HappyState (action))):(_)))) = happyDrop k ((st):(sts))
drop_stk = happyDropStk k stk
happyMonad2Reduce k nt fn (1) tk st sts stk
= happyFail (1) tk st sts stk
happyMonad2Reduce k nt fn j tk st sts stk =
happyThen1 (fn stk tk) (\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk))
where (sts1@(((st1@(HappyState (action))):(_)))) = happyDrop k ((st):(sts))
drop_stk = happyDropStk k stk
new_state = action
happyDrop (0) l = l
happyDrop n ((_):(t)) = happyDrop (n - ((1) :: Int)) t
happyDropStk (0) l = l
happyDropStk n (x `HappyStk` xs) = happyDropStk (n - ((1)::Int)) xs
-----------------------------------------------------------------------------
-- Moving to a new state after a reduction
{-# LINE 246 "templates\\GenericTemplate.hs" #-}
happyGoto action j tk st = action j j tk (HappyState action)
-----------------------------------------------------------------------------
-- Error recovery ((1) is the error token)
-- parse error if we are in recovery and we fail again
happyFail (1) tk old_st _ stk@(x `HappyStk` _) =
let (i) = (case x of { HappyErrorToken (i) -> i }) in
-- trace "failing" $
happyError_ i tk
{- We don't need state discarding for our restricted implementation of
"error". In fact, it can cause some bogus parses, so I've disabled it
for now --SDM
-- discard a state
happyFail (1) tk old_st (((HappyState (action))):(sts))
(saved_tok `HappyStk` _ `HappyStk` stk) =
-- trace ("discarding state, depth " ++ show (length stk)) $
action (1) (1) tk (HappyState (action)) sts ((saved_tok`HappyStk`stk))
-}
-- Enter error recovery: generate an error token,
-- save the old token and carry on.
happyFail i tk (HappyState (action)) sts stk =
-- trace "entering error recovery" $
action (1) (1) tk (HappyState (action)) sts ( (HappyErrorToken (i)) `HappyStk` stk)
-- Internal happy errors:
notHappyAtAll :: a
notHappyAtAll = error "Internal Happy error\n"
-----------------------------------------------------------------------------
-- Hack to get the typechecker to accept our action functions
-----------------------------------------------------------------------------
-- Seq-ing. If the --strict flag is given, then Happy emits
-- happySeq = happyDoSeq
-- otherwise it emits
-- happySeq = happyDontSeq
happyDoSeq, happyDontSeq :: a -> b -> b
happyDoSeq a b = a `seq` b
happyDontSeq a b = b
-----------------------------------------------------------------------------
-- Don't inline any functions from the template. GHC has a nasty habit
-- of deciding to inline happyGoto everywhere, which increases the size of
-- the generated parser quite a bit.
{-# LINE 312 "templates\\GenericTemplate.hs" #-}
{-# NOINLINE happyShift #-}
{-# NOINLINE happySpecReduce_0 #-}
{-# NOINLINE happySpecReduce_1 #-}
{-# NOINLINE happySpecReduce_2 #-}
{-# NOINLINE happySpecReduce_3 #-}
{-# NOINLINE happyReduce #-}
{-# NOINLINE happyMonadReduce #-}
{-# NOINLINE happyGoto #-}
{-# NOINLINE happyFail #-}
-- end of Happy Template.
| ierton/vsim | src/VSim/VIR/AST.hs | bsd-3-clause | 229,882 | 7,940 | 135 | 40,200 | 67,425 | 37,175 | 30,250 | 6,818 | 130 |
{-# LANGUAGE
OverloadedStrings
#-}
module JUnit.Parser where
import Control.Applicative
import Control.Monad
import Data.Functor
import Data.List
import qualified Data.Text as T
import Data.Time.Clock
import Prelude hiding (readFile)
import Text.XML
import Text.XML.Cursor
import System.FilePath
-- * Basic parser JUnit test suite results.
data TestSuiteResult = TestSuiteResult
{ suiteName :: T.Text
, suiteErrors :: T.Text
, suiteNumTests :: T.Text
, suiteFailures :: T.Text
, suiteTime :: T.Text -- FIXME Data.Time?
, suiteTimeStamp:: T.Text -- FIXME Data.Time?
} deriving (Eq, Show)
-- |
parseFromFile :: FilePath -> IO [TestSuiteResult]
parseFromFile file = do
doc <- readFile def file
let cursor = fromDocument doc
return $ testSuiteResults cursor
where
testSuiteResults cur = cur $// element "testsuite" >=>
\c -> zipWith6 TestSuiteResult
(attribute "name" c)
(attribute "errors" c)
(attribute "tests" c)
(attribute "failures" c)
(attribute "time" c)
(attribute "timestamp" c)
| wayofthepie/junit-parser | src/JUnit/Parser.hs | bsd-3-clause | 1,138 | 0 | 11 | 288 | 284 | 158 | 126 | 34 | 1 |
{-# LANGUAGE PolyKinds, UndecidableInstances #-}
#if __GLASGOW_HASKELL__ < 710
{-# LANGUAGE OverlappingInstances #-}
#endif
#if __GLASGOW_HASKELL__ >= 800
{-# LANGUAGE UndecidableSuperClasses #-}
#endif
{-# OPTIONS_GHC -fno-warn-orphans -fno-warn-deprecations #-}
-- | Constraints for indexed datatypes.
--
-- This module contains code that helps to specify that all
-- elements of an indexed structure must satisfy a particular
-- constraint.
--
module Generics.SOP.Constraint
( module Generics.SOP.Constraint
, Constraint
) where
import GHC.Exts (Constraint)
import Generics.SOP.Sing
-- | Require a constraint for every element of a list.
--
-- If you have a datatype that is indexed over a type-level
-- list, then you can use 'All' to indicate that all elements
-- of that type-level list must satisfy a given constraint.
--
-- /Example:/ The constraint
--
-- > All Eq '[ Int, Bool, Char ]
--
-- is equivalent to the constraint
--
-- > (Eq Int, Eq Bool, Eq Char)
--
-- /Example:/ A type signature such as
--
-- > f :: All Eq xs => NP I xs -> ...
--
-- means that 'f' can assume that all elements of the n-ary
-- product satisfy 'Eq'.
--
class (AllF f xs, SListI xs) => All (f :: k -> Constraint) (xs :: [k])
instance (AllF f xs, SListI xs) => All f xs
-- | Type family used to implement 'All'.
--
type family AllF (c :: k -> Constraint) (xs :: [k]) :: Constraint
type instance AllF c '[] = ()
type instance AllF c (x ': xs) = (c x, All c xs)
-- | Require a singleton for every inner list in a list of lists.
type SListI2 = All SListI
-- | Require a constraint for every element of a list of lists.
--
-- If you have a datatype that is indexed over a type-level
-- list of lists, then you can use 'All2' to indicate that all
-- elements of the innert lists must satisfy a given constraint.
--
-- /Example:/ The constraint
--
-- > All2 Eq '[ '[ Int ], '[ Bool, Char ] ]
--
-- is equivalent to the constraint
--
-- > (Eq Int, Eq Bool, Eq Char)
--
-- /Example:/ A type signature such as
--
-- > f :: All2 Eq xss => SOP I xs -> ...
--
-- means that 'f' can assume that all elements of the sum
-- of product satisfy 'Eq'.
--
class (AllF (All f) xss, SListI xss) => All2 f xss
instance (AllF (All f) xss, SListI xss) => All2 f xss
--
-- NOTE:
--
-- The definition
--
-- type All2 f = All (All f)
--
-- is more direct, but has the unfortunate disadvantage the
-- it triggers GHC's superclass cycle check when used in a
-- class context.
-- | Composition of constraints.
--
-- Note that the result of the composition must be a constraint,
-- and therefore, in @f ':.' g@, the kind of @f@ is @k -> 'Constraint'@.
-- The kind of @g@, however, is @l -> k@ and can thus be an normal
-- type constructor.
--
-- A typical use case is in connection with 'All' on an 'NP' or an
-- 'NS'. For example, in order to denote that all elements on an
-- @'NP' f xs@ satisfy 'Show', we can say @'All' ('Show' :. f) xs@.
--
class (f (g x)) => (f `Compose` g) x
instance (f (g x)) => (f `Compose` g) x
infixr 9 `Compose`
-- | Pairing of constraints.
--
class (f x, g x) => (f `And` g) x
instance (f x, g x) => (f `And` g) x
infixl 7 `And`
-- | A constraint that can always be satisfied.
--
class Top x
instance Top x
-- | A generalization of 'All' and 'All2'.
--
-- The family 'AllN' expands to 'All' or 'All2' depending on whether
-- the argument is indexed by a list or a list of lists.
--
type family AllN (h :: (k -> *) -> (l -> *)) (c :: k -> Constraint) :: l -> Constraint
-- | A generalization of 'SListI'.
--
-- The family 'SListIN' expands to 'SListI' or 'SListI2' depending
-- on whether the argument is indexed by a list or a list of lists.
--
type family SListIN (h :: (k -> *) -> (l -> *)) :: l -> Constraint
instance
#if __GLASGOW_HASKELL__ >= 710
{-# OVERLAPPABLE #-}
#endif
SListI xs => SingI (xs :: [k]) where
sing = sList
instance
#if __GLASGOW_HASKELL__ >= 710
{-# OVERLAPPING #-}
#endif
(All SListI xss, SListI xss) => SingI (xss :: [[k]]) where
sing = sList
| phadej/generics-sop | src/Generics/SOP/Constraint.hs | bsd-3-clause | 3,991 | 1 | 12 | 812 | 683 | 421 | 262 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables #-}
import Control.Applicative
import Control.Monad
import Control.Concurrent
import System.IO
import System.Environment
import Network
main :: IO ()
main = do
(pn :: Int) : _ <- mapM readIO =<< getArgs
let port = PortNumber $ fromIntegral pn
socket <- listenOn port
forever $ do
(cl, _, _) <- accept socket
_ <- forkIO $ do
ret <- toEmpty cl
mapM_ putStrLn ret
print =<< getChars cl 24
return ()
toEmpty :: Handle -> IO [String]
toEmpty h = do
l <- hGetLine h
if l == "\r" then return [] else (l :) <$> toEmpty h
getChars :: Handle -> Int -> IO String
getChars _ 0 = return ""
getChars h n = (:) <$> hGetChar h <*> getChars h (n - 1)
| YoshikuniJujo/tighttp | examples/checkRequest.hs | bsd-3-clause | 693 | 0 | 15 | 152 | 299 | 146 | 153 | 26 | 2 |
module Abyme.Direction where
import Linear
data Direction = Up | Down | LEft | RIght
deriving (Eq, Ord, Show)
directionOpposite :: Direction -> Direction
directionOpposite Up = Down
directionOpposite Down = Up
directionOpposite LEft = RIght
directionOpposite RIght = LEft
directionToVector :: Num a => Direction -> V2 a
directionToVector Up = V2 0 (-1)
directionToVector Down = V2 0 1
directionToVector LEft = V2 (-1) 0
directionToVector RIght = V2 1 0
| mvr/abyme | haskell-model/src/Abyme/Direction.hs | bsd-3-clause | 464 | 0 | 7 | 84 | 162 | 84 | 78 | 14 | 1 |
{-# LANGUAGE FlexibleContexts #-}
-- | @futhark wasm-multicore@
module Futhark.CLI.MulticoreWASM (main) where
import Futhark.Actions (compileMulticoreToWASMAction)
import Futhark.Compiler.CLI
import Futhark.Passes (multicorePipeline)
-- | Run @futhark c@
main :: String -> [String] -> IO ()
main = compilerMain
()
[]
"Compile to multicore WASM"
"Generate multicore WASM with the multicore C backend code from optimised Futhark program."
multicorePipeline
$ \fcfg () mode outpath prog ->
actionProcedure (compileMulticoreToWASMAction fcfg mode outpath) prog
| diku-dk/futhark | src/Futhark/CLI/MulticoreWASM.hs | isc | 576 | 0 | 9 | 88 | 116 | 65 | 51 | 14 | 1 |
import Data.Char (digitToInt)
main :: IO ()
main = do
numbers <- putStr "How many happy numbers to find? " >> readLn :: IO Int
print $ happy numbers
happy :: Int -> [Integer]
happy = (`take` (filter isHappy [1..]))
where isHappy n | n == 4 = False
| n == 1 = True
| otherwise = isHappy $ reduce n
where reduce = sum . (map $ (^ 2).toInteger.digitToInt) . show
| smac89/UVA_OJ | fun-projects/Numbers/happynumbers.hs | mit | 397 | 1 | 13 | 110 | 171 | 87 | 84 | 11 | 1 |
-- Copyright (c) Microsoft. All rights reserved.
-- Licensed under the MIT license. See LICENSE file in the project root for full license information.
{-# LANGUAGE QuasiQuotes, OverloadedStrings, RecordWildCards #-}
module Language.Bond.Codegen.Cs.Types_cs
( types_cs
, FieldMapping(..)
, StructMapping(..)
, ConstructorOptions(..)
) where
import Data.Monoid
import Prelude
import Data.Text.Lazy (Text, pack)
import Text.Shakespeare.Text
import Language.Bond.Syntax.Types
import Language.Bond.Syntax.Util
import Language.Bond.Syntax.Internal
import Language.Bond.Util
import Language.Bond.Codegen.TypeMapping
import Language.Bond.Codegen.Util
import qualified Language.Bond.Codegen.Cs.Util as CS
-- | C# representation of schema structs
data StructMapping =
Class -- ^ public partial class
deriving Eq
-- | Representation of schema fields in the generated C# types
data FieldMapping =
PublicFields | -- ^ public fields
Properties | -- ^ auto-properties
ReadOnlyProperties -- ^ auto-properties with private setter
deriving Eq
-- | Options for how constructors should be generated.
data ConstructorOptions =
DefaultWithProtectedBase | -- ^ The original bond behavior.
ConstructorParameters -- ^ Generate a constructor that takes all the fields as parameters.
deriving Eq
-- | Codegen template for generating definitions of C# types representing the schema.
types_cs
:: StructMapping -- ^ Specifies how to represent schema structs
-> FieldMapping -- ^ Specifies how to represent schema fields
-> ConstructorOptions -- ^ Specifies the constructors that should be generated
-> MappingContext -> String -> [Import] -> [Declaration] -> (String, Text)
types_cs structMapping fieldMapping constructorOptions cs _ _ declarations = (fileSuffix, [lt|
#{CS.disableCscWarnings}
#{CS.disableReSharperWarnings}
namespace #{csNamespace}
{
using System.Collections.Generic;
#{doubleLineSep 1 typeDefinition declarations}
} // #{csNamespace}
|])
where
idl = MappingContext idlTypeMapping [] [] []
-- C# type
csType = getTypeName cs
csNamespace = sepBy "." toText $ getNamespace cs
access = case structMapping of
_ -> [lt|public |]
fileSuffix = case structMapping of
_ -> "_types.cs"
struct = case structMapping of
_ -> [lt|public partial class |]
typeAttributes s = case structMapping of
_ -> CS.typeAttributes cs s
propertyAttributes f = case structMapping of
Class -> CS.propertyAttributes cs f
baseClass x = [lt|
: #{csType x}|]
-- C# type definition for schema struct
typeDefinition s@Struct {..} = [lt|#{typeAttributes s}#{struct}#{declName}#{params}#{maybe interface baseClass structBase}#{constraints}
{
#{doubleLineSep 2 property structFields}#{constructors}
}|]
where
interface = case structMapping of
_ -> mempty
-- type parameters
params = angles $ sepBy ", " paramName declParams
-- constraints
constraints = CS.paramConstraints declParams
-- default value
csDefault = CS.defaultValue cs
metaFields = filter (isMetaName . fieldType) structFields
noMetaFields = null metaFields
-- constructor: DefaultWithProtectedBase option
defaultWithProtectedBaseConstructor = if noCtor then mempty else [lt|
public #{declName}()
: this("#{getDeclTypeName idl s}", "#{declName}")
{}
protected #{declName}(string fullName, string name)#{baseCtor}
{
#{newlineSep 3 initializer structFields}
}|]
where
noCtor = not callBaseCtor && (fieldMapping == PublicFields && noMetaFields || null structFields)
callBaseCtor = getAny $ optional (foldMapFields metaField) structBase
baseCtor = if not callBaseCtor
then mempty
else [lt|
: base(fullName, name)|]
-- constructor: ConstructorParameters option
constructorWithParameters = if not noMetaFields
then error $ "bond_meta usage in Struct " ++ (show declName) ++ " Field " ++ (show $ fieldName $ head metaFields) ++ " is incompatible with --preview--constructor-parameters"
else if (null baseFieldList)
then if (null structFields)
then [lt|
#{defaultConstructor}|]
else [lt|
public #{declName}(
#{commaLineSep 3 paramDecl fieldNameList})
{
#{newlineSep 3 paramBasedInitializer fieldNameList}
}
#{defaultConstructor}|]
else [lt|
public #{declName}(
// Base class parameters
#{commaLineSep 3 paramDecl (zip baseFieldList uniqueBaseFieldNames)}#{thisParamBlock}
) : base(
#{commaLineSep 4 pack uniqueBaseFieldNames})
{
#{newlineSep 3 paramBasedInitializer (zip structFields uniqueThisFieldNames)}
}
#{defaultConstructor}|]
thisParamBlock = if null structFields
then mempty
else [lt|,
// This class parameters
#{commaLineSep 3 paramDecl (zip structFields uniqueThisFieldNames)}|]
defaultConstructor = [lt|public #{declName}()
{
#{newlineSep 3 initializer structFields}
}|]
baseFieldList = concat $ baseFields s
uniqueBaseFieldNames = uniqueNames (map fieldName baseFieldList) []
uniqueThisFieldNames = uniqueNames (map fieldName structFields) uniqueBaseFieldNames
paramDecl (f, n) = [lt|#{csType $ fieldType f} #{n}|]
paramBasedInitializer (f, n) = [lt|this.#{fieldName f} = #{n};|]
fieldNameList = map (\f -> (f, fieldName f)) structFields
constructors = case constructorOptions of
DefaultWithProtectedBase -> defaultWithProtectedBaseConstructor
ConstructorParameters -> constructorWithParameters
-- property or field
property f@Field {..} =
[lt|#{propertyAttributes f}#{new}#{access}#{csType fieldType} #{fieldName}#{autoPropertyOrField}|]
where
autoPropertyOrField = case fieldMapping of
PublicFields -> [lt|#{optional fieldInitializer $ csDefault f};|]
Properties -> [lt| { get; set; }|]
ReadOnlyProperties -> [lt| { get; private set; }|]
fieldInitializer x = [lt| = #{x}|]
new = if isBaseField fieldName structBase then "new " else "" :: String
-- initializers in constructor
initializer f@Field {..} = optional fieldInit $ def f
where
fieldInit x = [lt|#{this fieldName} = #{x};|]
this = if fieldName == "name" || fieldName == "fullName" then ("this." ++) else id
def Field {fieldType = BT_MetaName} = Just "name"
def Field {fieldType = BT_MetaFullName} = Just "fullName"
def x = if fieldMapping == PublicFields then Nothing else csDefault x
-- C# enum definition for schema enum
typeDefinition e@Enum {..} = [lt|#{CS.typeAttributes cs e}public enum #{declName}
{
#{newlineSep 2 constant enumConstants}
}|]
where
-- constant
constant Constant {..} = let value x = [lt| = unchecked((int)#{x})|] in
[lt|#{constantName}#{optional value constantValue},|]
typeDefinition _ = mempty
| chwarr/bond | compiler/src/Language/Bond/Codegen/Cs/Types_cs.hs | mit | 7,589 | 0 | 15 | 2,089 | 1,160 | 688 | 472 | 101 | 17 |
{-|
This module exports the underlying Attoparsec row parser. This is helpful if
you want to do some ad-hoc CSV string parsing.
-}
module Data.CSV.Conduit.Parser.ByteString
( parseCSV
, parseRow
, row
, csv
) where
-------------------------------------------------------------------------------
import Control.Applicative
import Control.Monad (mzero)
import Data.Attoparsec.ByteString as P hiding (take)
import qualified Data.Attoparsec.ByteString.Char8 as C8
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as B8
import Data.Word (Word8)
-------------------------------------------------------------------------------
import Data.CSV.Conduit.Types
------------------------------------------------------------------------------
-- | Try to parse given string as CSV
parseCSV :: CSVSettings -> ByteString -> Either String [Row ByteString]
parseCSV s = parseOnly $ csv s
------------------------------------------------------------------------------
-- | Try to parse given string as 'Row ByteString'
parseRow :: CSVSettings -> ByteString -> Either String (Maybe (Row ByteString))
parseRow s = parseOnly $ row s
------------------------------------------------------------------------------
-- | Parse CSV
csv :: CSVSettings -> Parser [Row ByteString]
csv s = do
r <- row s
end <- atEnd
if end
then case r of
Just x -> return [x]
Nothing -> return []
else do
rest <- csv s
return $ case r of
Just x -> x : rest
Nothing -> rest
------------------------------------------------------------------------------
-- | Parse a CSV row
row :: CSVSettings -> Parser (Maybe (Row ByteString))
row csvs = csvrow csvs <|> badrow
badrow :: Parser (Maybe (Row ByteString))
badrow = P.takeWhile (not . C8.isEndOfLine) *>
(C8.endOfLine <|> C8.endOfInput) *> return Nothing
csvrow :: CSVSettings -> Parser (Maybe (Row ByteString))
csvrow c =
let rowbody = (quotedField' <|> field c) `sepBy` C8.char (csvSep c)
properrow = rowbody <* (C8.endOfLine <|> P.endOfInput)
quotedField' = case csvQuoteChar c of
Nothing -> mzero
Just q' -> try (quotedField q')
in do
res <- properrow
return $ Just res
field :: CSVSettings -> Parser ByteString
field s = P.takeWhile (isFieldChar s)
isFieldChar :: CSVSettings -> Word8 -> Bool
isFieldChar s = notInClass xs'
where xs = csvSep s : "\n\r"
xs' = case csvQuoteChar s of
Nothing -> xs
Just x -> x : xs
quotedField :: Char -> Parser ByteString
quotedField c =
let quoted = string dbl *> return c
dbl = B8.pack [c,c]
in do
_ <- C8.char c
f <- many (C8.notChar c <|> quoted)
_ <- C8.char c
return $ B8.pack f
| mohsen3/csv-conduit | src/Data/CSV/Conduit/Parser/ByteString.hs | bsd-3-clause | 2,882 | 0 | 14 | 691 | 776 | 400 | 376 | 62 | 4 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.Generics.Twins
-- Copyright : (c) The University of Glasgow, CWI 2001--2004
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : non-portable (local universal quantification)
--
-- \"Scrap your boilerplate\" --- Generic programming in Haskell
-- See <http://www.cs.vu.nl/boilerplate/>. The present module
-- provides support for multi-parameter traversal, which is also
-- demonstrated with generic operations like equality.
--
-----------------------------------------------------------------------------
module Data.Generics.Twins (
-- * Generic folds and maps that also accumulate
gfoldlAccum,
gmapAccumT,
gmapAccumM,
gmapAccumQl,
gmapAccumQr,
gmapAccumQ,
-- * Mapping combinators for twin traversal
gzipWithT,
gzipWithM,
gzipWithQ,
-- * Typical twin traversals
geq,
gzip
) where
------------------------------------------------------------------------------
#ifdef __HADDOCK__
import Prelude
#endif
import Data.Generics.Basics
import Data.Generics.Aliases
#ifdef __GLASGOW_HASKELL__
import Prelude hiding ( GT )
#endif
------------------------------------------------------------------------------
------------------------------------------------------------------------------
--
-- Generic folds and maps that also accumulate
--
------------------------------------------------------------------------------
{--------------------------------------------------------------
A list map can be elaborated to perform accumulation.
In the same sense, we can elaborate generic maps over terms.
We recall the type of map:
map :: (a -> b) -> [a] -> [b]
We recall the type of an accumulating map (see Data.List):
mapAccumL :: (a -> b -> (a,c)) -> a -> [b] -> (a,[c])
Applying the same scheme we obtain an accumulating gfoldl.
--------------------------------------------------------------}
-- | gfoldl with accumulation
gfoldlAccum :: Data d
=> (forall d r. Data d => a -> c (d -> r) -> d -> (a, c r))
-> (forall g. a -> g -> (a, c g))
-> a -> d -> (a, c d)
gfoldlAccum k z a d = unA (gfoldl k' z' d) a
where
k' c y = A (\a -> let (a', c') = unA c a in k a' c' y)
z' f = A (\a -> z a f)
-- | A type constructor for accumulation
newtype A a c d = A { unA :: a -> (a, c d) }
-- | gmapT with accumulation
gmapAccumT :: Data d
=> (forall d. Data d => a -> d -> (a,d))
-> a -> d -> (a, d)
gmapAccumT f a d = let (a',d') = gfoldlAccum k z a d
in (a',unID d')
where
k a (ID c) d = let (a',d') = f a d
in (a', ID (c d'))
z a x = (a, ID x)
-- | gmapM with accumulation
gmapAccumM :: (Data d, Monad m)
=> (forall d. Data d => a -> d -> (a, m d))
-> a -> d -> (a, m d)
gmapAccumM f = gfoldlAccum k z
where
k a c d = let (a',d') = f a d
in (a', d' >>= \d'' -> c >>= \c' -> return (c' d''))
z a x = (a, return x)
-- | gmapQl with accumulation
gmapAccumQl :: Data d
=> (r -> r' -> r)
-> r
-> (forall d. Data d => a -> d -> (a,r'))
-> a -> d -> (a, r)
gmapAccumQl o r f a d = let (a',r) = gfoldlAccum k z a d
in (a',unCONST r)
where
k a (CONST c) d = let (a',r') = f a d
in (a', CONST (c `o` r'))
z a _ = (a, CONST r)
-- | gmapQr with accumulation
gmapAccumQr :: Data d
=> (r' -> r -> r)
-> r
-> (forall d. Data d => a -> d -> (a,r'))
-> a -> d -> (a, r)
gmapAccumQr o r f a d = let (a',l) = gfoldlAccum k z a d
in (a',unQr l r)
where
k a (Qr c) d = let (a',r') = f a d
in (a', Qr (\r -> c (r' `o` r)))
z a _ = (a, Qr id)
-- | gmapQ with accumulation
gmapAccumQ :: Data d
=> (forall d. Data d => a -> d -> (a,q))
-> a -> d -> (a, [q])
gmapAccumQ f = gmapAccumQr (:) [] f
------------------------------------------------------------------------------
--
-- Helper type constructors
--
------------------------------------------------------------------------------
-- | The identity type constructor needed for the definition of gmapAccumT
newtype ID x = ID { unID :: x }
-- | The constant type constructor needed for the definition of gmapAccumQl
newtype CONST c a = CONST { unCONST :: c }
-- | The type constructor needed for the definition of gmapAccumQr
newtype Qr r a = Qr { unQr :: r -> r }
------------------------------------------------------------------------------
--
-- Mapping combinators for twin traversal
--
------------------------------------------------------------------------------
-- | Twin map for transformation
gzipWithT :: GenericQ (GenericT) -> GenericQ (GenericT)
gzipWithT f x y = case gmapAccumT perkid funs y of
([], c) -> c
_ -> error "gzipWithT"
where
perkid a d = (tail a, unGT (head a) d)
funs = gmapQ (\k -> GT (f k)) x
-- | Twin map for monadic transformation
gzipWithM :: Monad m => GenericQ (GenericM m) -> GenericQ (GenericM m)
gzipWithM f x y = case gmapAccumM perkid funs y of
([], c) -> c
_ -> error "gzipWithM"
where
perkid a d = (tail a, unGM (head a) d)
funs = gmapQ (\k -> GM (f k)) x
-- | Twin map for queries
gzipWithQ :: GenericQ (GenericQ r) -> GenericQ (GenericQ [r])
gzipWithQ f x y = case gmapAccumQ perkid funs y of
([], r) -> r
_ -> error "gzipWithQ"
where
perkid a d = (tail a, unGQ (head a) d)
funs = gmapQ (\k -> GQ (f k)) x
------------------------------------------------------------------------------
--
-- Typical twin traversals
--
------------------------------------------------------------------------------
-- | Generic equality: an alternative to \"deriving Eq\"
geq :: Data a => a -> a -> Bool
{-
Testing for equality of two terms goes like this. Firstly, we
establish the equality of the two top-level datatype
constructors. Secondly, we use a twin gmap combinator, namely tgmapQ,
to compare the two lists of immediate subterms.
(Note for the experts: the type of the worker geq' is rather general
but precision is recovered via the restrictive type of the top-level
operation geq. The imprecision of geq' is caused by the type system's
unability to express the type equivalence for the corresponding
couples of immediate subterms from the two given input terms.)
-}
geq x y = geq' x y
where
geq' :: GenericQ (GenericQ Bool)
geq' x y = (toConstr x == toConstr y)
&& and (gzipWithQ geq' x y)
-- | Generic zip controlled by a function with type-specific branches
gzip :: GenericQ (GenericM Maybe) -> GenericQ (GenericM Maybe)
-- See testsuite/.../Generics/gzip.hs for an illustration
gzip f x y =
f x y
`orElse`
if toConstr x == toConstr y
then gzipWithM (gzip f) x y
else Nothing
| alekar/hugs | packages/base/Data/Generics/Twins.hs | bsd-3-clause | 7,105 | 22 | 16 | 1,789 | 1,888 | 1,023 | 865 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.RDS.ModifyEventSubscription
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Modifies an existing RDS event notification subscription. Note that you
-- cannot modify the source identifiers using this call; to change source
-- identifiers for a subscription, use the 'AddSourceIdentifierToSubscription' and 'RemoveSourceIdentifierFromSubscription' calls.
--
-- You can see a list of the event categories for a given SourceType in the <http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Events.html Events> topic in the Amazon RDS User Guide or by using the DescribeEventCategories
-- action.
--
-- <http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_ModifyEventSubscription.html>
module Network.AWS.RDS.ModifyEventSubscription
(
-- * Request
ModifyEventSubscription
-- ** Request constructor
, modifyEventSubscription
-- ** Request lenses
, mesEnabled
, mesEventCategories
, mesSnsTopicArn
, mesSourceType
, mesSubscriptionName
-- * Response
, ModifyEventSubscriptionResponse
-- ** Response constructor
, modifyEventSubscriptionResponse
-- ** Response lenses
, mesrEventSubscription
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.RDS.Types
import qualified GHC.Exts
data ModifyEventSubscription = ModifyEventSubscription
{ _mesEnabled :: Maybe Bool
, _mesEventCategories :: List "member" Text
, _mesSnsTopicArn :: Maybe Text
, _mesSourceType :: Maybe Text
, _mesSubscriptionName :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'ModifyEventSubscription' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'mesEnabled' @::@ 'Maybe' 'Bool'
--
-- * 'mesEventCategories' @::@ ['Text']
--
-- * 'mesSnsTopicArn' @::@ 'Maybe' 'Text'
--
-- * 'mesSourceType' @::@ 'Maybe' 'Text'
--
-- * 'mesSubscriptionName' @::@ 'Text'
--
modifyEventSubscription :: Text -- ^ 'mesSubscriptionName'
-> ModifyEventSubscription
modifyEventSubscription p1 = ModifyEventSubscription
{ _mesSubscriptionName = p1
, _mesSnsTopicArn = Nothing
, _mesSourceType = Nothing
, _mesEventCategories = mempty
, _mesEnabled = Nothing
}
-- | A Boolean value; set to true to activate the subscription.
mesEnabled :: Lens' ModifyEventSubscription (Maybe Bool)
mesEnabled = lens _mesEnabled (\s a -> s { _mesEnabled = a })
-- | A list of event categories for a SourceType that you want to subscribe to.
-- You can see a list of the categories for a given SourceType in the <http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Events.html Events>
-- topic in the Amazon RDS User Guide or by using the DescribeEventCategories
-- action.
mesEventCategories :: Lens' ModifyEventSubscription [Text]
mesEventCategories =
lens _mesEventCategories (\s a -> s { _mesEventCategories = a })
. _List
-- | The Amazon Resource Name (ARN) of the SNS topic created for event
-- notification. The ARN is created by Amazon SNS when you create a topic and
-- subscribe to it.
mesSnsTopicArn :: Lens' ModifyEventSubscription (Maybe Text)
mesSnsTopicArn = lens _mesSnsTopicArn (\s a -> s { _mesSnsTopicArn = a })
-- | The type of source that will be generating the events. For example, if you
-- want to be notified of events generated by a DB instance, you would set this
-- parameter to db-instance. if this value is not specified, all events are
-- returned.
--
-- Valid values: db-instance | db-parameter-group | db-security-group |
-- db-snapshot
mesSourceType :: Lens' ModifyEventSubscription (Maybe Text)
mesSourceType = lens _mesSourceType (\s a -> s { _mesSourceType = a })
-- | The name of the RDS event notification subscription.
mesSubscriptionName :: Lens' ModifyEventSubscription Text
mesSubscriptionName =
lens _mesSubscriptionName (\s a -> s { _mesSubscriptionName = a })
newtype ModifyEventSubscriptionResponse = ModifyEventSubscriptionResponse
{ _mesrEventSubscription :: Maybe EventSubscription
} deriving (Eq, Read, Show)
-- | 'ModifyEventSubscriptionResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'mesrEventSubscription' @::@ 'Maybe' 'EventSubscription'
--
modifyEventSubscriptionResponse :: ModifyEventSubscriptionResponse
modifyEventSubscriptionResponse = ModifyEventSubscriptionResponse
{ _mesrEventSubscription = Nothing
}
mesrEventSubscription :: Lens' ModifyEventSubscriptionResponse (Maybe EventSubscription)
mesrEventSubscription =
lens _mesrEventSubscription (\s a -> s { _mesrEventSubscription = a })
instance ToPath ModifyEventSubscription where
toPath = const "/"
instance ToQuery ModifyEventSubscription where
toQuery ModifyEventSubscription{..} = mconcat
[ "Enabled" =? _mesEnabled
, "EventCategories" =? _mesEventCategories
, "SnsTopicArn" =? _mesSnsTopicArn
, "SourceType" =? _mesSourceType
, "SubscriptionName" =? _mesSubscriptionName
]
instance ToHeaders ModifyEventSubscription
instance AWSRequest ModifyEventSubscription where
type Sv ModifyEventSubscription = RDS
type Rs ModifyEventSubscription = ModifyEventSubscriptionResponse
request = post "ModifyEventSubscription"
response = xmlResponse
instance FromXML ModifyEventSubscriptionResponse where
parseXML = withElement "ModifyEventSubscriptionResult" $ \x -> ModifyEventSubscriptionResponse
<$> x .@? "EventSubscription"
| romanb/amazonka | amazonka-rds/gen/Network/AWS/RDS/ModifyEventSubscription.hs | mpl-2.0 | 6,504 | 0 | 10 | 1,274 | 722 | 439 | 283 | 81 | 1 |
{-# LANGUAGE LambdaCase #-}
module HN.SplExport (convertToSpl, convertExpr, convertDef) where
import Data.Functor.Foldable
import HN.Intermediate
import SPL.Visualise (showAsSource)
import SPL.Types
import Utils (joinStr)
convertToSpl = (\x -> show x ++ "\n" ++ joinStr "\n" (map showAsSource x)) . map convertDef
convertExpr :: Expression String -> C
convertExpr = cata $ \case
ConstantF (ConstInt i) -> CNum i
ConstantF (ConstString i) -> CStr i
AtomF a -> CVal a
ApplicationF a b -> CL a $ K b
convertDef (Definition _ arguments l)
= (case arguments of
[] -> convertedWithWhere
_ -> CL xvalue (S arguments)) where
xvalue = case whereDefinitions of
[] -> convertExpr value
_ -> convertedWithWhere
whereVars = whereMap (\(Definition name _ _) -> name)
whereValues = whereMap convertDef
whereMap f = map f whereDefinitions
convertedWithWhere = CL (convertExpr value) $ W $ zip whereVars whereValues
value = letValue l
whereDefinitions = letWhere l | kayuri/HNC | HN/SplExport.hs | lgpl-3.0 | 983 | 2 | 11 | 180 | 358 | 182 | 176 | 27 | 4 |
{-# LANGUAGE OverloadedStrings #-}
{-
Copyright 2017 The CodeWorld Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module Blocks.Types(setBlockTypes, getTypeBlocks)
where
import Blockly.DesignBlock
import Blockly.General
import Blockly.Event
import Data.List(intersperse)
import qualified Data.Text as T
colorPicture = Color 160
colorNumber = Color 210
colorProgram = Color 0
colorColor = Color 290
colorPoly = Color 180
colorBool = Color 100
colorText = Color 45
typePicture = Picture
typeNumber = Number
typeProgram = Program
typeColor = Col
typeBool = Truth
typeText = Str
typeComment = Comment
inlineDef = Inline True
icon :: T.Text -> Field
icon name = FieldImage ("ims/" `T.append` name) 20 20
standardFunction cwName funcName ico types [] color tooltip =
DesignBlock cwName (Function funcName types) [header] inlineDef color (Tooltip tooltip)
where
header = case ico of
Just i -> Dummy [TextE funcName, icon i]
Nothing -> Dummy [TextE funcName]
standardFunction cwName funcName ico types inputNames color tooltip =
DesignBlock cwName (Function funcName types)
(header : (argInputs ++ [Dummy [Text ")"]]))
inlineDef
color
(Tooltip tooltip)
where
header = case ico of
Just i -> Value (head inputNames) [Text "(", TextE funcName, icon i]
Nothing -> Value (head inputNames) [Text "(", TextE funcName]
argInputs = map (\name -> Value name [Text ","]) (tail inputNames)
-- PICTURE ----------------------------------------------
cwBlank = standardFunction "cwBlank" "blank" Nothing [Picture] [] colorPicture "Blank picture"
cwCoordinatePlane = standardFunction "cwCoordinatePlane" "coordinatePlane" Nothing [Picture] [] colorPicture "Picture of coordinate plane"
cwCodeWorldLogo = standardFunction "cwCodeWorldLogo" "codeWorldLogo" Nothing [Picture] [] colorPicture "Picture of CodeWorld logo"
cwText = standardFunction "cwText" "text" Nothing [typeText, Picture]
["TEXT"] colorPicture "Picture of text"
cwDrawingOf = DesignBlock "cwDrawingOf" (Top "drawingOf" [typePicture, typeProgram])
[Value "VALUE" [Text "(", TextE "drawingOf", icon "shape-plus.svg"], Dummy [Text ")"]]
inlineDef colorProgram
(Tooltip "Displays a drawing of a picture")
cwCircle = standardFunction "cwCircle" "circle" Nothing [typeNumber, typePicture] ["RADIUS"] colorPicture "Picture of a circle"
cwThickCircle = standardFunction "cwThickCircle" "thickCircle" Nothing [typeNumber, typeNumber, typePicture] ["RADIUS", "LINEWIDTH"]
colorPicture "Picture of a circle with a border"
cwSolidCircle = standardFunction "cwSolidCircle" "solidCircle" Nothing [typeNumber, typePicture] ["RADIUS"]
colorPicture "Picture of a solid circle"
cwRectangle = standardFunction "cwRectangle" "rectangle" Nothing [typeNumber, typeNumber, typePicture] ["WIDTH", "HEIGHT"]
colorPicture "Picture of a rectangle"
cwThickRectangle = standardFunction "cwThickRectangle" "thickRectangle" Nothing
[typeNumber, typeNumber, typeNumber, typePicture] ["WIDTH", "HEIGHT", "LINEWIDTH"]
colorPicture "Picture of a rectangle with a border"
cwSolidRectangle = standardFunction "cwSolidRectangle" "solidRectangle" Nothing [typeNumber, typeNumber, typePicture] ["WIDTH", "HEIGHT"]
colorPicture "Picture of a solid rectangle"
cwArc = standardFunction "cwArc" "arc" Nothing [typeNumber, typeNumber, typeNumber, typePicture] ["STARTANGLE", "ENDANGLE", "RADIUS"]
colorPicture "A thin arc"
cwSector = standardFunction "cwSector" "sector" Nothing [typeNumber, typeNumber, typeNumber, typePicture] ["STARTANGLE", "ENDANGLE", "RADIUS"]
colorPicture "A solid sector of a circle"
cwThickArc = standardFunction "cwThickArc" "thickArc" Nothing [typeNumber, typeNumber, typeNumber, typeNumber, typePicture]
["STARTANGLE", "ENDANGLE", "RADIUS", "LINEWIDTH"]
colorPicture "An arc with variable line width"
-- Transformations -----------------------------------------------
cwColored = standardFunction "cwColored" "colored" (Just "format-color-fill.svg") [typePicture, typeColor, typePicture]
["PICTURE", "COLOR"] colorPicture "A colored picture"
cwTranslate = standardFunction "cwTranslate" "translated" (Just "cursor-move.svg") [typePicture, typeNumber, typeNumber, typePicture]
["PICTURE", "X", "Y"] colorPicture "A translated picture"
cwScale = standardFunction "cwScale" "scaled" (Just "move-resize-variant.svg") [typePicture, typeNumber, typeNumber, typePicture]
["PICTURE", "HORZ", "VERTZ"] colorPicture "A scaled picture"
cwRotate = standardFunction "cwRotate" "rotated" (Just "rotate-3d.svg") [typePicture, typeNumber, typePicture ]
["PICTURE", "ANGLE"] colorPicture "A rotated picture"
-- NUMBERS ---------------------------------------------
numAdd = DesignBlock "numAdd" (Function "+" [typeNumber, typeNumber, typeNumber])
[ Value "LEFT" []
,Value "RIGHT" [TextE "+"]
]
(Inline True) colorNumber
(Tooltip "Add two numbers")
numSub = DesignBlock "numSub" (Function "-" [typeNumber, typeNumber, typeNumber])
[ Value "LEFT" []
,Value "RIGHT" [TextE "-"]
]
(Inline True) colorNumber
(Tooltip "Subtract two numbers")
numMult = DesignBlock "numMult" (Function "*" [typeNumber, typeNumber, typeNumber])
[ Value "LEFT" []
,Value "RIGHT" [TextE "\xD7"]
]
(Inline True) colorNumber
(Tooltip "Multiply two numbers")
numDiv = DesignBlock "numDiv" (Function "/" [typeNumber, typeNumber, typeNumber])
[ Value "LEFT" []
,Value "RIGHT" [TextE "\xF7"]
]
(Inline True) colorNumber
(Tooltip "Divide two numbers")
numExp = DesignBlock "numExp" (Function "^" [typeNumber, typeNumber, typeNumber])
[ Value "LEFT" []
,Value "RIGHT" [TextE "^"]
]
(Inline True) colorNumber
(Tooltip "Raise a number to a power")
numMax = standardFunction "numMax" "max" (Just "arrow-up.svg") [typeNumber, typeNumber, typeNumber]
["LEFT", "RIGHT"] colorNumber "The maximum of two numbers"
numMin = standardFunction "numMin" "min" (Just "arrow-down.svg") [typeNumber, typeNumber, typeNumber]
["LEFT", "RIGHT"] colorNumber "Take the minimum of two numbers"
numOpposite = standardFunction "numOpposite" "opposite" (Just "minus-box.svg") [typeNumber, typeNumber]
["NUM"] colorNumber "The opposite of a number"
numAbs = standardFunction "numAbs" "abs" Nothing [typeNumber, typeNumber]
["NUM"] colorNumber "The absolute value of a number"
numRound = standardFunction "numRound" "rounded" Nothing [typeNumber, typeNumber]
["NUM"] colorNumber "The rounded value of a number"
numReciprocal = standardFunction "numReciprocal" "reciprocal" Nothing [typeNumber, typeNumber]
["NUM"] colorNumber "The reciprocal of a number"
numQuot = standardFunction "numQuot" "quotient" Nothing [typeNumber, typeNumber, typeNumber]
["LEFT", "RIGHT"] colorNumber "The integer part when dividing two numbers"
numRem = standardFunction "numRem" "remainder" Nothing [typeNumber, typeNumber, typeNumber]
["LEFT", "RIGHT"] colorNumber "The remainder when dividing two numbers"
numPi = DesignBlock "numPi" (Function "pi" [typeNumber])
[Dummy
[TextE "\x3C0"]
]
inlineDef colorNumber
(Tooltip "The number pi, 3.1415..")
numSqrt = DesignBlock "numSqrt" (Function "sqrt" [typeNumber, typeNumber])
[Value "NUM" [TextE "\x221A"], Dummy []]
(Inline True) colorNumber
(Tooltip "Gives the square root of a number")
numGCD = standardFunction "numGCD" "gcd" Nothing [typeNumber, typeNumber, typeNumber]
["LEFT", "RIGHT"] colorNumber "The greatest common demonitator between two numbers"
numLCM = standardFunction "numLCM" "lcm" Nothing [typeNumber, typeNumber, typeNumber]
["LEFT", "RIGHT"] colorNumber "The least common multiple between two numbers"
numSin = standardFunction "numSin" "sin" Nothing [typeNumber, typeNumber]
["VAL"] colorNumber "The sine of an angle"
numCos = standardFunction "numCos" "cos" Nothing [typeNumber, typeNumber]
["VAL"] colorNumber "The cosine of an angle"
-- TEXT ------------------------------------------------
txtPrinted = standardFunction "txtPrinted" "printed" Nothing [typeNumber, typeText]
["TEXT"] colorText "The text value of a number"
txtLowercase = standardFunction "txtLowercase" "lowercase" Nothing [typeText, typeText]
["TEXT"] colorText "The text in lowercase"
txtUppercase = standardFunction "txtUppercase" "uppercase" Nothing [typeText, typeText]
["TEXT"] colorText "The text in uppercase"
-- COLORS ----------------------------------------------
cwBlue = standardFunction "cwBlue" "blue" Nothing [typeColor] [] colorColor "The color blue"
cwRed = standardFunction "cwRed" "red" Nothing [typeColor] [] colorColor "The color red"
cwGreen = standardFunction "cwGreen" "green" Nothing [typeColor] [] colorColor "The color green"
cwOrange = standardFunction "cwOrange" "orange" Nothing [typeColor] [] colorColor "The color orange"
cwBrown = standardFunction "cwBrown" "brown" Nothing [typeColor] [] colorColor "The color brown"
cwBlack = standardFunction "cwBlack" "black" Nothing [typeColor] [] colorColor "The color black"
cwWhite = standardFunction "cwWhite" "white" Nothing [typeColor] [] colorColor "The color white"
cwCyan = standardFunction "cwCyan" "cyan" Nothing [typeColor] [] colorColor "The color cyan"
cwMagenta = standardFunction "cwMagenta" "magenta" Nothing [typeColor] [] colorColor "The color magenta"
cwYellow = standardFunction "cwYellow" "yellow" Nothing [typeColor] [] colorColor "The color yellow"
cwAquamarine = standardFunction "cwAquamarine" "aquamarine" Nothing [typeColor] [] colorColor "The color aquamarine"
cwAzure = standardFunction "cwAzure" "azure" Nothing [typeColor] [] colorColor "The color azure"
cwViolet = standardFunction "cwViolet" "violet" Nothing [typeColor] [] colorColor "The color violet"
cwChartreuse = standardFunction "cwChartreuse" "chartreuse" Nothing [typeColor] [] colorColor "The color chartreuse"
cwRose = standardFunction "cwRose" "rose" Nothing [typeColor] [] colorColor "The color rose"
cwPink = standardFunction "cwPink" "pink" Nothing [typeColor] [] colorColor "The color pink"
cwPurple = standardFunction "cwPurple" "purple" Nothing [typeColor] [] colorColor "The color purple"
cwGray = standardFunction "cwGray" "gray" Nothing [typeNumber, typeColor]
["VALUE"] colorColor "The color gray, varying by an amount. Lower value is closer to black"
cwMixed = standardFunction "cwMixed" "mixed" (Just "pot-mix.svg") [typeColor, typeColor, typeColor]
["COL1", "COL2"] colorColor "Two mix of two colors"
cwLight = standardFunction "cwLight" "light" Nothing [typeColor, typeColor]
["COL"] colorColor "A lighter color"
cwDark = standardFunction "cwDark" "dark" Nothing [typeColor, typeColor]
["COL"] colorColor "A darker color"
cwBright = standardFunction "cwBright" "bright" Nothing [typeColor, typeColor]
["COL"] colorColor "A brighter color"
cwDull = standardFunction "cwDull" "dull" Nothing [typeColor, typeColor]
["COL"] colorColor "A more dull color"
cwTranslucent = standardFunction "cwTranslucent" "translucent" Nothing [typeColor, typeColor]
["COL"] colorColor "A more translucent color"
cwRGBA = standardFunction "cwRGBA" "RGBA" Nothing [typeNumber, typeNumber, typeNumber, typeNumber, typeColor]
["RED", "GREEN", "BLUE", "ALPHA"] colorColor
"Makes a color with the given red, green, blue and alpha portions"
-- LOGIC -------------------------------------------
conIf = DesignBlock "conIf" (Function "if" [typeBool, Poly "a", Poly "a", Poly "a"])
[ Value "IF" [TextE "if"]
,Value "THEN" [Text "then"]
,Value "ELSE" [Text "else"]
]
inlineDef colorPoly
(Tooltip "if condition is true then give a else b")
conAnd = DesignBlock "conAnd" (Function "&&" [typeBool, typeBool, typeBool])
[ Value "LEFT" []
,Value "RIGHT" [TextE "and"]
]
(Inline True) colorBool
(Tooltip "Logical AND operation")
conOr = DesignBlock "conOr" (Function "||" [typeBool, typeBool, typeBool])
[ Value "LEFT" []
,Value "RIGHT" [TextE "or"]
]
(Inline True) colorBool
(Tooltip "Logical OR operation")
conNot = standardFunction "conNot" "not" Nothing [typeBool, typeBool]
["VALUE"] colorBool "Negation of logical value"
conEq = DesignBlock "conEq" (Function "==" [Poly "a", Poly "a", typeBool])
[ Value "LEFT" []
,Value "RIGHT" [TextE "="]
]
(Inline True) colorBool
(Tooltip "Are two items equal")
conNeq = DesignBlock "conNeq" (Function "/=" [Poly "a", Poly "a", typeBool])
[ Value "LEFT" []
,Value "RIGHT" [TextE "\x2260"]
]
(Inline True) colorBool
(Tooltip "Are two items not equal")
conTrue = standardFunction "conTrue" "True" Nothing [typeBool] [] colorBool "True logic value"
conFalse = standardFunction "conFalse" "False" Nothing [typeBool] [] colorBool "False logic value"
conGreater = DesignBlock "conGreater" (Function ">" [typeNumber, typeNumber, typeBool])
[ Value "LEFT" []
,Value "RIGHT" [TextE ">"]
]
(Inline True) colorBool
(Tooltip "Tells whether one number is greater than the other")
conGeq = DesignBlock "conGeq" (Function ">=" [typeNumber, typeNumber, typeBool])
[ Value "LEFT" []
,Value "RIGHT" [TextE "\x2265"]
]
(Inline True) colorBool
(Tooltip "Tells whether one number is greater than or equal to ther other")
conLess = DesignBlock "conLess" (Function "<" [typeNumber, typeNumber, typeBool])
[ Value "LEFT" []
,Value "RIGHT" [TextE "<"]
]
(Inline True) colorBool
(Tooltip "Tells whether one number is less than the other")
conLeq = DesignBlock "conLeq" (Function "<=" [typeNumber, typeNumber, typeBool])
[ Value "LEFT" []
,Value "RIGHT" [TextE "\x2264"]
]
(Inline True) colorBool
(Tooltip "Tells whether one number is less than or equal to ther other")
conEven = standardFunction "conEven" "even" Nothing [typeNumber, typeBool]
["VALUE"] colorBool "True if the number is even"
conOdd = standardFunction "conOdd" "odd" Nothing [typeNumber, typeBool]
["VALUE"] colorBool "True if the number is odd"
conStartWith = standardFunction "conStartWith" "startsWith" Nothing [typeText, typeText, typeBool]
["TEXTMAIN", "TEXTTEST"] colorBool "Test whether the text starts with the characters of the other text"
conEndWith = standardFunction "conEndWith" "endsWith" Nothing [typeText, typeText, typeBool]
["TEXTMAIN", "TEXTTEST"] colorBool "Test whether the text ends with the characters of the other text"
-- LISTS ----------------------------------------------
lstGenNum = DesignBlock "lstGenNum" (Function ".." [typeNumber, typeNumber, typeNumber])
[ Value "LEFT" [Text "["]
,Value "RIGHT" [TextE ".."]
,Dummy [Text "]"]
]
(Inline True) colorBool
(Tooltip "Tells whether one number is greater than the other")
comment = DesignBlock "comment" None
[Dummy [TextInput "" "TEXT", TextE "--"]]
inlineDef (Color 260) (Tooltip "Enter a comment")
getTypeBlocks :: [T.Text]
getTypeBlocks = map (\(DesignBlock name _ _ _ _ _) -> name) blockTypes
blockTypes = [
-- PICTURE
cwBlank
,cwCoordinatePlane
,cwCodeWorldLogo
,cwText
,cwDrawingOf
,cwCircle
,cwThickCircle
,cwSolidCircle
,cwRectangle
,cwThickRectangle
,cwSolidRectangle
,cwArc
,cwSector
,cwThickArc
-- TRANSFORMATIONS
,cwColored
,cwTranslate
,cwRotate
,cwScale
-- NUMBERS
--,numNumber
,numAdd
,numSub
,numMult
,numDiv
,numExp
,numMax
,numMin
,numOpposite
,numAbs
,numRound
,numReciprocal
,numQuot
,numRem
,numPi
,numSqrt
,numGCD
,numLCM
,numSin
,numCos
-- TEXT
,txtPrinted
,txtLowercase
,txtUppercase
-- COLORS
,cwBlue
,cwRed
,cwGreen
,cwBrown
,cwOrange
,cwBlack
,cwWhite
,cwCyan
,cwMagenta
,cwYellow
,cwAquamarine
,cwAzure
,cwViolet
,cwChartreuse
,cwRose
,cwPink
,cwPurple
,cwGray
,cwMixed
,cwLight
,cwDark
,cwBright
,cwDull
,cwTranslucent
,cwRGBA
-- LOGIC
-- ,conIf
,conAnd
,conOr
,conNot
,conEq
,conNeq
,conTrue
,conFalse
,conGreater
,conGeq
,conLess
,conLeq
,conEven
,conOdd
,conStartWith
,conEndWith
,comment
]
-- Assigns CodeGen functions defined here to the Blockly Javascript Code
-- generator
setBlockTypes :: IO ()
setBlockTypes = mapM_ setBlockType blockTypes
| three/codeworld | funblocks-client/src/Blocks/Types.hs | apache-2.0 | 18,939 | 0 | 13 | 4,939 | 4,154 | 2,266 | 1,888 | 326 | 3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.