code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE Haskell2010, OverloadedStrings #-}
{-# LINE 1 "Network/Wai/Middleware/AddHeaders.hs" #-}
-- |
--
-- Since 3.0.3
module Network.Wai.Middleware.AddHeaders
( addHeaders
) where
import Network.HTTP.Types (Header)
import Network.Wai (Middleware, modifyResponse, mapResponseHeaders)
import Network.Wai.Internal (Response(..))
import Data.ByteString (ByteString)
import qualified Data.CaseInsensitive as CI
import Control.Arrow (first)
addHeaders :: [(ByteString, ByteString)] -> Middleware
-- ^ Prepend a list of headers without any checks
--
-- Since 3.0.3
addHeaders h = modifyResponse $ addHeaders' (map (first CI.mk) h)
addHeaders' :: [Header] -> Response -> Response
addHeaders' h = mapResponseHeaders (\hs -> h ++ hs)
| phischu/fragnix | tests/packages/scotty/Network.Wai.Middleware.AddHeaders.hs | bsd-3-clause | 760 | 0 | 11 | 119 | 179 | 109 | 70 | 14 | 1 |
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances #-}
{- |
Module : $Header$
Description : COL instance of class Logic
Copyright : (c) Till Mossakowski, Uni Bremen 2002-2004
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable (via Logic)
COL instance of class Logic
-}
module COL.Logic_COL where
import COL.AS_COL
import COL.COLSign
import COL.ATC_COL ()
import COL.Parse_AS ()
import COL.StatAna
import COL.Print_AS ()
import CASL.Sign
import CASL.StaticAna
import CASL.MixfixParser
import CASL.Morphism
import CASL.SymbolMapAnalysis
import CASL.AS_Basic_CASL
import CASL.Parse_AS_Basic
import CASL.MapSentence
import CASL.SymbolParser
import CASL.Logic_CASL ()
import Logic.Logic
data COL = COL deriving Show
instance Language COL where
description _ =
"COLCASL extends CASL by constructors and observers"
type C_BASIC_SPEC = BASIC_SPEC () COL_SIG_ITEM ()
type CSign = Sign () COLSign
type COLMor = Morphism () COLSign (DefMorExt COLSign)
type COLFORMULA = FORMULA ()
instance SignExtension COLSign where
isSubSignExtension = isSubCOLSign
instance Syntax COL C_BASIC_SPEC
Symbol SYMB_ITEMS SYMB_MAP_ITEMS
where
parse_basic_spec COL = Just $ basicSpec col_reserved_words
parse_symb_items COL = Just $ symbItems col_reserved_words
parse_symb_map_items COL = Just $ symbMapItems col_reserved_words
instance Sentences COL COLFORMULA CSign COLMor Symbol where
map_sen COL m = return . mapSen (const id) m
sym_of COL = symOf
symmap_of COL = morphismToSymbMap
sym_name COL = symName
instance StaticAnalysis COL C_BASIC_SPEC COLFORMULA
SYMB_ITEMS SYMB_MAP_ITEMS
CSign
COLMor
Symbol RawSymbol where
basic_analysis COL = Just $ basicAnalysis (const return)
(const return) ana_COL_SIG_ITEM
emptyMix
stat_symb_map_items COL = statSymbMapItems
stat_symb_items COL = statSymbItems
symbol_to_raw COL = symbolToRaw
id_to_raw COL = idToRaw
matches COL = CASL.Morphism.matches
empty_signature COL = emptySign emptyCOLSign
signature_union COL sigma1 =
return . addSig addCOLSign sigma1
morphism_union COL = plainMorphismUnion addCOLSign
final_union COL = finalUnion addCOLSign
is_subsig COL = isSubSig isSubCOLSign
subsig_inclusion COL = sigInclusion emptyMorExt
cogenerated_sign COL = cogeneratedSign emptyMorExt
generated_sign COL = generatedSign emptyMorExt
induced_from_morphism COL = inducedFromMorphism emptyMorExt
induced_from_to_morphism COL =
inducedFromToMorphism emptyMorExt isSubCOLSign diffCOLSign
instance Logic COL ()
C_BASIC_SPEC COLFORMULA SYMB_ITEMS SYMB_MAP_ITEMS
CSign
COLMor
Symbol RawSymbol () where
empty_proof_tree _ = ()
| keithodulaigh/Hets | COL/Logic_COL.hs | gpl-2.0 | 3,110 | 0 | 9 | 791 | 596 | 307 | 289 | 70 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Redshift.CreateTags
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Adds one or more tags to a specified resource.
--
-- A resource can have up to 10 tags. If you try to create more than 10 tags
-- for a resource, you will receive an error and the attempt will fail.
--
-- If you specify a key that already exists for the resource, the value for
-- that key will be updated with the new value.
--
-- <http://docs.aws.amazon.com/redshift/latest/APIReference/API_CreateTags.html>
module Network.AWS.Redshift.CreateTags
(
-- * Request
CreateTags
-- ** Request constructor
, createTags
-- ** Request lenses
, ctResourceName
, ctTags
-- * Response
, CreateTagsResponse
-- ** Response constructor
, createTagsResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.Redshift.Types
import qualified GHC.Exts
data CreateTags = CreateTags
{ _ctResourceName :: Text
, _ctTags :: List "member" Tag
} deriving (Eq, Read, Show)
-- | 'CreateTags' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'ctResourceName' @::@ 'Text'
--
-- * 'ctTags' @::@ ['Tag']
--
createTags :: Text -- ^ 'ctResourceName'
-> CreateTags
createTags p1 = CreateTags
{ _ctResourceName = p1
, _ctTags = mempty
}
-- | The Amazon Resource Name (ARN) to which you want to add the tag or tags. For
-- example, 'arn:aws:redshift:us-east-1:123456789:cluster:t1'.
ctResourceName :: Lens' CreateTags Text
ctResourceName = lens _ctResourceName (\s a -> s { _ctResourceName = a })
-- | One or more name/value pairs to add as tags to the specified resource. Each
-- tag name is passed in with the parameter 'tag-key' and the corresponding value
-- is passed in with the parameter 'tag-value'. The 'tag-key' and 'tag-value'
-- parameters are separated by a colon (:). Separate multiple tags with a space.
-- For example, '--tags "tag-key"="owner":"tag-value"="admin""tag-key"="environment":"tag-value"="test""tag-key"="version":"tag-value"="1.0"'.
ctTags :: Lens' CreateTags [Tag]
ctTags = lens _ctTags (\s a -> s { _ctTags = a }) . _List
data CreateTagsResponse = CreateTagsResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'CreateTagsResponse' constructor.
createTagsResponse :: CreateTagsResponse
createTagsResponse = CreateTagsResponse
instance ToPath CreateTags where
toPath = const "/"
instance ToQuery CreateTags where
toQuery CreateTags{..} = mconcat
[ "ResourceName" =? _ctResourceName
, "Tags" =? _ctTags
]
instance ToHeaders CreateTags
instance AWSRequest CreateTags where
type Sv CreateTags = Redshift
type Rs CreateTags = CreateTagsResponse
request = post "CreateTags"
response = nullResponse CreateTagsResponse
| kim/amazonka | amazonka-redshift/gen/Network/AWS/Redshift/CreateTags.hs | mpl-2.0 | 3,796 | 0 | 10 | 837 | 405 | 251 | 154 | 51 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Applicative ((<$>))
import Control.Monad (forM_)
import qualified Data.Aeson as Json
import qualified Data.ByteString.Lazy as BS
import qualified Data.Text.Lazy as Text
import qualified Data.Text.Lazy.Encoding as Text
import qualified Data.Text.Lazy.IO as Text
import qualified System.Directory as Dir
import qualified System.Environment as Env
import System.Exit (exitFailure)
import System.FilePath ((</>), takeExtension)
import System.IO (hPutStrLn, stderr)
import qualified Elm.Docs as Docs
import Elm.Utils ((|>))
main :: IO ()
main =
do allArgs <- Env.getArgs
case allArgs of
[pkg] ->
do fixAllDocs pkg
fixModuleDocs pkg
_ ->
do hPutStrLn stderr "Expecting a path to a particular version of a package"
exitFailure
-- FIX documentation.json
fixAllDocs :: FilePath -> IO ()
fixAllDocs pkg =
do maybeValue <- Json.decode <$> BS.readFile (pkg </> "documentation.json")
writeAsJson
(pkg </> "documentation.json")
(maybeValue :: Maybe [Docs.Documentation])
-- FIX docs/*.json
fixModuleDocs :: FilePath -> IO ()
fixModuleDocs pkg =
do moduleDocs <-
filter (\path -> takeExtension path == ".json")
<$> Dir.getDirectoryContents (pkg </> "docs")
forM_ moduleDocs $ \file ->
do maybeValue <- Json.decode <$> BS.readFile (pkg </> "docs" </> file)
writeAsJson
(pkg </> "docs" </> file)
(maybeValue :: Maybe Docs.Documentation)
-- WRITE JSON
writeAsJson :: (Json.ToJSON a) => FilePath -> Maybe a -> IO ()
writeAsJson file maybeValue =
case maybeValue of
Nothing ->
do hPutStrLn stderr "Problem reading JSON"
exitFailure
Just value ->
Docs.prettyJson value
|> Text.decodeUtf8
|> Text.replace "\\u003e" ">"
|> Text.writeFile file
| Dedoig/package.elm-lang.org | upgrade-docs.hs | bsd-3-clause | 1,977 | 0 | 16 | 519 | 535 | 291 | 244 | 53 | 2 |
{-
(c) The University of Glasgow 2006
(c) The AQUA Project, Glasgow University, 1994-1998
Core-syntax unfoldings
Unfoldings (which can travel across module boundaries) are in Core
syntax (namely @CoreExpr@s).
The type @Unfolding@ sits ``above'' simply-Core-expressions
unfoldings, capturing ``higher-level'' things we know about a binding,
usually things that the simplifier found out (e.g., ``it's a
literal''). In the corner of a @CoreUnfolding@ unfolding, you will
find, unsurprisingly, a Core expression.
-}
{-# LANGUAGE CPP #-}
module ETA.Core.CoreUnfold (
Unfolding, UnfoldingGuidance, -- Abstract types
noUnfolding, mkImplicitUnfolding,
mkUnfolding, mkCoreUnfolding,
mkTopUnfolding, mkSimpleUnfolding, mkWorkerUnfolding,
mkInlineUnfolding, mkInlinableUnfolding, mkWwInlineRule,
mkCompulsoryUnfolding, mkDFunUnfolding,
specUnfolding,
ArgSummary(..),
couldBeSmallEnoughToInline, inlineBoringOk,
certainlyWillInline, smallEnoughToInline,
callSiteInline, CallCtxt(..),
-- Reexport from CoreSubst (it only live there so it can be used
-- by the Very Simple Optimiser)
exprIsConApp_maybe, exprIsLiteral_maybe
) where
#include "HsVersions.h"
import ETA.Main.DynFlags
import ETA.Core.CoreSyn
import ETA.Core.PprCore () -- Instances
import ETA.SimplCore.OccurAnal ( occurAnalyseExpr )
import ETA.Core.CoreSubst hiding( substTy )
import ETA.Core.CoreArity ( manifestArity, exprBotStrictness_maybe )
import ETA.Core.CoreUtils
import ETA.BasicTypes.Id
import ETA.BasicTypes.DataCon
import ETA.BasicTypes.Literal
import ETA.Prelude.PrimOp
import ETA.BasicTypes.IdInfo
import ETA.BasicTypes.BasicTypes ( Arity )
import ETA.Types.Type
import ETA.Prelude.PrelNames
import ETA.Prelude.TysPrim ( realWorldStatePrimTy )
import ETA.Utils.Bag
import ETA.Utils.Util
import ETA.Utils.FastTypes
import ETA.Utils.FastString
import ETA.Utils.Outputable
import ETA.Prelude.ForeignCall
import qualified Data.ByteString as BS
import Data.Maybe
{-
************************************************************************
* *
\subsection{Making unfoldings}
* *
************************************************************************
-}
mkTopUnfolding :: DynFlags -> Bool -> CoreExpr -> Unfolding
mkTopUnfolding dflags = mkUnfolding dflags InlineRhs True {- Top level -}
mkImplicitUnfolding :: DynFlags -> CoreExpr -> Unfolding
-- For implicit Ids, do a tiny bit of optimising first
mkImplicitUnfolding dflags expr
= mkTopUnfolding dflags False (simpleOptExpr expr)
-- Note [Top-level flag on inline rules]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Slight hack: note that mk_inline_rules conservatively sets the
-- top-level flag to True. It gets set more accurately by the simplifier
-- Simplify.simplUnfolding.
mkSimpleUnfolding :: DynFlags -> CoreExpr -> Unfolding
mkSimpleUnfolding dflags = mkUnfolding dflags InlineRhs False False
mkDFunUnfolding :: [Var] -> DataCon -> [CoreExpr] -> Unfolding
mkDFunUnfolding bndrs con ops
= DFunUnfolding { df_bndrs = bndrs
, df_con = con
, df_args = map occurAnalyseExpr ops }
-- See Note [Occurrrence analysis of unfoldings]
mkWwInlineRule :: CoreExpr -> Arity -> Unfolding
mkWwInlineRule expr arity
= mkCoreUnfolding InlineStable True
(simpleOptExpr expr)
(UnfWhen { ug_arity = arity, ug_unsat_ok = unSaturatedOk
, ug_boring_ok = boringCxtNotOk })
mkCompulsoryUnfolding :: CoreExpr -> Unfolding
mkCompulsoryUnfolding expr -- Used for things that absolutely must be unfolded
= mkCoreUnfolding InlineCompulsory True
(simpleOptExpr expr)
(UnfWhen { ug_arity = 0 -- Arity of unfolding doesn't matter
, ug_unsat_ok = unSaturatedOk, ug_boring_ok = boringCxtOk })
mkWorkerUnfolding :: DynFlags -> (CoreExpr -> CoreExpr) -> Unfolding -> Unfolding
-- See Note [Worker-wrapper for INLINABLE functions] in WorkWrap
mkWorkerUnfolding dflags work_fn
(CoreUnfolding { uf_src = src, uf_tmpl = tmpl
, uf_is_top = top_lvl })
| isStableSource src
= mkCoreUnfolding src top_lvl new_tmpl guidance
where
new_tmpl = simpleOptExpr (work_fn tmpl)
guidance = calcUnfoldingGuidance dflags new_tmpl
mkWorkerUnfolding _ _ _ = noUnfolding
mkInlineUnfolding :: Maybe Arity -> CoreExpr -> Unfolding
mkInlineUnfolding mb_arity expr
= mkCoreUnfolding InlineStable
True -- Note [Top-level flag on inline rules]
expr' guide
where
expr' = simpleOptExpr expr
guide = case mb_arity of
Nothing -> UnfWhen { ug_arity = manifestArity expr'
, ug_unsat_ok = unSaturatedOk
, ug_boring_ok = boring_ok }
Just arity -> UnfWhen { ug_arity = arity
, ug_unsat_ok = needSaturated
, ug_boring_ok = boring_ok }
boring_ok = inlineBoringOk expr'
mkInlinableUnfolding :: DynFlags -> CoreExpr -> Unfolding
mkInlinableUnfolding dflags expr
= mkUnfolding dflags InlineStable True is_bot expr'
where
expr' = simpleOptExpr expr
is_bot = isJust (exprBotStrictness_maybe expr')
specUnfolding :: DynFlags -> Subst -> [Var] -> [CoreExpr] -> Unfolding -> Unfolding
-- See Note [Specialising unfoldings]
specUnfolding _ subst new_bndrs spec_args
df@(DFunUnfolding { df_bndrs = bndrs, df_con = con , df_args = args })
= ASSERT2( length bndrs >= length spec_args, ppr df $$ ppr spec_args $$ ppr new_bndrs )
mkDFunUnfolding (new_bndrs ++ extra_bndrs) con
(map (substExpr spec_doc subst2) args)
where
subst1 = extendSubstList subst (bndrs `zip` spec_args)
(subst2, extra_bndrs) = substBndrs subst1 (dropList spec_args bndrs)
specUnfolding _dflags subst new_bndrs spec_args
(CoreUnfolding { uf_src = src, uf_tmpl = tmpl
, uf_is_top = top_lvl
, uf_guidance = old_guidance })
| isStableSource src -- See Note [Specialising unfoldings]
, UnfWhen { ug_arity = old_arity
, ug_unsat_ok = unsat_ok
, ug_boring_ok = boring_ok } <- old_guidance
= let guidance = UnfWhen { ug_arity = old_arity - count isValArg spec_args
+ count isId new_bndrs
, ug_unsat_ok = unsat_ok
, ug_boring_ok = boring_ok }
new_tmpl = simpleOptExpr $ mkLams new_bndrs $
mkApps (substExpr spec_doc subst tmpl) spec_args
-- The beta-redexes created here will be simplified
-- away by simplOptExpr in mkUnfolding
in mkCoreUnfolding src top_lvl new_tmpl guidance
specUnfolding _ _ _ _ _ = noUnfolding
spec_doc :: SDoc
spec_doc = ptext (sLit "specUnfolding")
{-
Note [Specialising unfoldings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we specialise a function for some given type-class arguments, we use
specUnfolding to specialise its unfolding. Some important points:
* If the original function has a DFunUnfolding, the specialised one
must do so too! Otherwise we lose the magic rules that make it
interact with ClassOps
* There is a bit of hack for INLINABLE functions:
f :: Ord a => ....
f = <big-rhs>
{- INLINEABLE f #-}
Now if we specialise f, should the specialised version still have
an INLINEABLE pragma? If it does, we'll capture a specialised copy
of <big-rhs> as its unfolding, and that probaby won't inline. But
if we don't, the specialised version of <big-rhs> might be small
enough to inline at a call site. This happens with Control.Monad.liftM3,
and can cause a lot more allocation as a result (nofib n-body shows this).
Moreover, keeping the INLINEABLE thing isn't much help, because
the specialised function (probaby) isn't overloaded any more.
Conclusion: drop the INLINEALE pragma. In practice what this means is:
if a stable unfolding has UnfoldingGuidance of UnfWhen,
we keep it (so the specialised thing too will always inline)
if a stable unfolding has UnfoldingGuidance of UnfIfGoodArgs
(which arises from INLINEABLE), we discard it
-}
mkCoreUnfolding :: UnfoldingSource -> Bool -> CoreExpr
-> UnfoldingGuidance -> Unfolding
-- Occurrence-analyses the expression before capturing it
mkCoreUnfolding src top_lvl expr guidance
= CoreUnfolding { uf_tmpl = occurAnalyseExpr expr,
-- See Note [Occurrrence analysis of unfoldings]
uf_src = src,
uf_is_top = top_lvl,
uf_is_value = exprIsHNF expr,
uf_is_conlike = exprIsConLike expr,
uf_is_work_free = exprIsWorkFree expr,
uf_expandable = exprIsExpandable expr,
uf_guidance = guidance }
mkUnfolding :: DynFlags -> UnfoldingSource -> Bool -> Bool -> CoreExpr
-> Unfolding
-- Calculates unfolding guidance
-- Occurrence-analyses the expression before capturing it
mkUnfolding dflags src top_lvl is_bottoming expr
| top_lvl && is_bottoming
, not (exprIsTrivial expr)
= NoUnfolding -- See Note [Do not inline top-level bottoming functions]
| otherwise
= CoreUnfolding { uf_tmpl = occurAnalyseExpr expr,
-- See Note [Occurrrence analysis of unfoldings]
uf_src = src,
uf_is_top = top_lvl,
uf_is_value = exprIsHNF expr,
uf_is_conlike = exprIsConLike expr,
uf_expandable = exprIsExpandable expr,
uf_is_work_free = exprIsWorkFree expr,
uf_guidance = guidance }
where
guidance = calcUnfoldingGuidance dflags expr
-- NB: *not* (calcUnfoldingGuidance (occurAnalyseExpr expr))!
-- See Note [Calculate unfolding guidance on the non-occ-anal'd expression]
{-
Note [Occurrence analysis of unfoldings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We do occurrence-analysis of unfoldings once and for all, when the
unfolding is built, rather than each time we inline them.
But given this decision it's vital that we do
*always* do it. Consider this unfolding
\x -> letrec { f = ...g...; g* = f } in body
where g* is (for some strange reason) the loop breaker. If we don't
occ-anal it when reading it in, we won't mark g as a loop breaker, and
we may inline g entirely in body, dropping its binding, and leaving
the occurrence in f out of scope. This happened in Trac #8892, where
the unfolding in question was a DFun unfolding.
But more generally, the simplifier is designed on the
basis that it is looking at occurrence-analysed expressions, so better
ensure that they acutally are.
Note [Calculate unfolding guidance on the non-occ-anal'd expression]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Notice that we give the non-occur-analysed expression to
calcUnfoldingGuidance. In some ways it'd be better to occur-analyse
first; for example, sometimes during simplification, there's a large
let-bound thing which has been substituted, and so is now dead; so
'expr' contains two copies of the thing while the occurrence-analysed
expression doesn't.
Nevertheless, we *don't* and *must not* occ-analyse before computing
the size because
a) The size computation bales out after a while, whereas occurrence
analysis does not.
b) Residency increases sharply if you occ-anal first. I'm not
100% sure why, but it's a large effect. Compiling Cabal went
from residency of 534M to over 800M with this one change.
This can occasionally mean that the guidance is very pessimistic;
it gets fixed up next round. And it should be rare, because large
let-bound things that are dead are usually caught by preInlineUnconditionally
************************************************************************
* *
\subsection{The UnfoldingGuidance type}
* *
************************************************************************
-}
inlineBoringOk :: CoreExpr -> Bool
-- See Note [INLINE for small functions]
-- True => the result of inlining the expression is
-- no bigger than the expression itself
-- eg (\x y -> f y x)
-- This is a quick and dirty version. It doesn't attempt
-- to deal with (\x y z -> x (y z))
-- The really important one is (x `cast` c)
inlineBoringOk e
= go 0 e
where
go :: Int -> CoreExpr -> Bool
go credit (Lam x e) | isId x = go (credit+1) e
| otherwise = go credit e
go credit (App f (Type {})) = go credit f
go credit (App f a) | credit > 0
, exprIsTrivial a = go (credit-1) f
go credit (Tick _ e) = go credit e -- dubious
go credit (Cast e _) = go credit e
go _ (Var {}) = boringCxtOk
go _ _ = boringCxtNotOk
calcUnfoldingGuidance
:: DynFlags
-> CoreExpr -- Expression to look at
-> UnfoldingGuidance
calcUnfoldingGuidance dflags (Tick t expr)
| not (tickishIsCode t) -- non-code ticks don't matter for unfolding
= calcUnfoldingGuidance dflags expr
calcUnfoldingGuidance dflags expr
= case sizeExpr dflags (iUnbox bOMB_OUT_SIZE) val_bndrs body of
TooBig -> UnfNever
SizeIs size cased_bndrs scrut_discount
| uncondInline expr n_val_bndrs (iBox size)
-> UnfWhen { ug_unsat_ok = unSaturatedOk
, ug_boring_ok = boringCxtOk
, ug_arity = n_val_bndrs } -- Note [INLINE for small functions]
| otherwise
-> UnfIfGoodArgs { ug_args = map (mk_discount cased_bndrs) val_bndrs
, ug_size = iBox size
, ug_res = iBox scrut_discount }
where
(bndrs, body) = collectBinders expr
bOMB_OUT_SIZE = ufCreationThreshold dflags
-- Bomb out if size gets bigger than this
val_bndrs = filter isId bndrs
n_val_bndrs = length val_bndrs
mk_discount :: Bag (Id,Int) -> Id -> Int
mk_discount cbs bndr = foldlBag combine 0 cbs
where
combine acc (bndr', disc)
| bndr == bndr' = acc `plus_disc` disc
| otherwise = acc
plus_disc :: Int -> Int -> Int
plus_disc | isFunTy (idType bndr) = max
| otherwise = (+)
-- See Note [Function and non-function discounts]
{-
Note [Computing the size of an expression]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The basic idea of sizeExpr is obvious enough: count nodes. But getting the
heuristics right has taken a long time. Here's the basic strategy:
* Variables, literals: 0
(Exception for string literals, see litSize.)
* Function applications (f e1 .. en): 1 + #value args
* Constructor applications: 1, regardless of #args
* Let(rec): 1 + size of components
* Note, cast: 0
Examples
Size Term
--------------
0 42#
0 x
0 True
2 f x
1 Just x
4 f (g x)
Notice that 'x' counts 0, while (f x) counts 2. That's deliberate: there's
a function call to account for. Notice also that constructor applications
are very cheap, because exposing them to a caller is so valuable.
[25/5/11] All sizes are now multiplied by 10, except for primops
(which have sizes like 1 or 4. This makes primops look fantastically
cheap, and seems to be almost unversally beneficial. Done partly as a
result of #4978.
Note [Do not inline top-level bottoming functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The FloatOut pass has gone to some trouble to float out calls to 'error'
and similar friends. See Note [Bottoming floats] in SetLevels.
Do not re-inline them! But we *do* still inline if they are very small
(the uncondInline stuff).
Note [INLINE for small functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider {-# INLINE f #-}
f x = Just x
g y = f y
Then f's RHS is no larger than its LHS, so we should inline it into
even the most boring context. In general, f the function is
sufficiently small that its body is as small as the call itself, the
inline unconditionally, regardless of how boring the context is.
Things to note:
(1) We inline *unconditionally* if inlined thing is smaller (using sizeExpr)
than the thing it's replacing. Notice that
(f x) --> (g 3) -- YES, unconditionally
(f x) --> x : [] -- YES, *even though* there are two
-- arguments to the cons
x --> g 3 -- NO
x --> Just v -- NO
It's very important not to unconditionally replace a variable by
a non-atomic term.
(2) We do this even if the thing isn't saturated, else we end up with the
silly situation that
f x y = x
...map (f 3)...
doesn't inline. Even in a boring context, inlining without being
saturated will give a lambda instead of a PAP, and will be more
efficient at runtime.
(3) However, when the function's arity > 0, we do insist that it
has at least one value argument at the call site. (This check is
made in the UnfWhen case of callSiteInline.) Otherwise we find this:
f = /\a \x:a. x
d = /\b. MkD (f b)
If we inline f here we get
d = /\b. MkD (\x:b. x)
and then prepareRhs floats out the argument, abstracting the type
variables, so we end up with the original again!
(4) We must be much more cautious about arity-zero things. Consider
let x = y +# z in ...
In *size* terms primops look very small, because the generate a
single instruction, but we do not want to unconditionally replace
every occurrence of x with (y +# z). So we only do the
unconditional-inline thing for *trivial* expressions.
NB: you might think that PostInlineUnconditionally would do this
but it doesn't fire for top-level things; see SimplUtils
Note [Top level and postInlineUnconditionally]
-}
uncondInline :: CoreExpr -> Arity -> Int -> Bool
-- Inline unconditionally if there no size increase
-- Size of call is arity (+1 for the function)
-- See Note [INLINE for small functions]
uncondInline rhs arity size
| arity > 0 = size <= 10 * (arity + 1) -- See Note [INLINE for small functions] (1)
| otherwise = exprIsTrivial rhs -- See Note [INLINE for small functions] (4)
sizeExpr :: DynFlags
-> FastInt -- Bomb out if it gets bigger than this
-> [Id] -- Arguments; we're interested in which of these
-- get case'd
-> CoreExpr
-> ExprSize
-- Note [Computing the size of an expression]
sizeExpr dflags bOMB_OUT_SIZE top_args expr
= size_up expr
where
size_up (Cast e _) = size_up e
size_up (Tick _ e) = size_up e
size_up (Type _) = sizeZero -- Types cost nothing
size_up (Coercion _) = sizeZero
size_up (Lit lit) = sizeN (litSize lit)
size_up (Var f) | isRealWorldId f = sizeZero
-- Make sure we get constructor discounts even
-- on nullary constructors
| otherwise = size_up_call f [] 0
size_up (App fun arg)
| isTyCoArg arg = size_up fun
| otherwise = size_up arg `addSizeNSD`
size_up_app fun [arg] (if isRealWorldExpr arg then 1 else 0)
size_up (Lam b e)
| isId b && not (isRealWorldId b) = lamScrutDiscount dflags (size_up e `addSizeN` 10)
| otherwise = size_up e
size_up (Let (NonRec binder rhs) body)
= size_up rhs `addSizeNSD`
size_up body `addSizeN`
(if isUnLiftedType (idType binder) then 0 else 10)
-- For the allocation
-- If the binder has an unlifted type there is no allocation
size_up (Let (Rec pairs) body)
= foldr (addSizeNSD . size_up . snd)
(size_up body `addSizeN` (10 * length pairs)) -- (length pairs) for the allocation
pairs
size_up (Case (Var v) _ _ alts)
| v `elem` top_args -- We are scrutinising an argument variable
= alts_size (foldr addAltSize sizeZero alt_sizes)
(foldr maxSize sizeZero alt_sizes)
-- Good to inline if an arg is scrutinised, because
-- that may eliminate allocation in the caller
-- And it eliminates the case itself
where
alt_sizes = map size_up_alt alts
-- alts_size tries to compute a good discount for
-- the case when we are scrutinising an argument variable
alts_size (SizeIs tot tot_disc tot_scrut) -- Size of all alternatives
(SizeIs max _ _) -- Size of biggest alternative
= SizeIs tot (unitBag (v, iBox (_ILIT(20) +# tot -# max)) `unionBags` tot_disc) tot_scrut
-- If the variable is known, we produce a discount that
-- will take us back to 'max', the size of the largest alternative
-- The 1+ is a little discount for reduced allocation in the caller
--
-- Notice though, that we return tot_disc, the total discount from
-- all branches. I think that's right.
alts_size tot_size _ = tot_size
size_up (Case e _ _ alts) = size_up e `addSizeNSD`
foldr (addAltSize . size_up_alt) case_size alts
where
case_size
| is_inline_scrut e, not (lengthExceeds alts 1) = sizeN (-10)
| otherwise = sizeZero
-- Normally we don't charge for the case itself, but
-- we charge one per alternative (see size_up_alt,
-- below) to account for the cost of the info table
-- and comparisons.
--
-- However, in certain cases (see is_inline_scrut
-- below), no code is generated for the case unless
-- there are multiple alts. In these cases we
-- subtract one, making the first alt free.
-- e.g. case x# +# y# of _ -> ... should cost 1
-- case touch# x# of _ -> ... should cost 0
-- (see #4978)
--
-- I would like to not have the "not (lengthExceeds alts 1)"
-- condition above, but without that some programs got worse
-- (spectral/hartel/event and spectral/para). I don't fully
-- understand why. (SDM 24/5/11)
-- unboxed variables, inline primops and unsafe foreign calls
-- are all "inline" things:
is_inline_scrut (Var v) = isUnLiftedType (idType v)
is_inline_scrut scrut
| (Var f, _) <- collectArgs scrut
= case idDetails f of
FCallId fc -> not (isSafeForeignCall fc)
PrimOpId op -> not (primOpOutOfLine op)
_other -> False
| otherwise
= False
------------
-- size_up_app is used when there's ONE OR MORE value args
size_up_app (App fun arg) args voids
| isTyCoArg arg = size_up_app fun args voids
| isRealWorldExpr arg = size_up_app fun (arg:args) (voids + 1)
| otherwise = size_up arg `addSizeNSD`
size_up_app fun (arg:args) voids
size_up_app (Var fun) args voids = size_up_call fun args voids
size_up_app (Tick _ expr) args voids = size_up_app expr args voids
size_up_app other args voids = size_up other `addSizeN` (length args - voids)
------------
size_up_call :: Id -> [CoreExpr] -> Int -> ExprSize
size_up_call fun val_args voids
= case idDetails fun of
FCallId _ -> sizeN (10 * (1 + length val_args))
DataConWorkId dc -> conSize dc (length val_args)
PrimOpId op -> primOpSize op (length val_args)
ClassOpId _ -> classOpSize dflags top_args val_args
_ -> funSize dflags top_args fun (length val_args) voids
------------
size_up_alt (_con, _bndrs, rhs) = size_up rhs `addSizeN` 10
-- Don't charge for args, so that wrappers look cheap
-- (See comments about wrappers with Case)
--
-- IMPORATANT: *do* charge 1 for the alternative, else we
-- find that giant case nests are treated as practically free
-- A good example is Foreign.C.Error.errrnoToIOError
------------
-- These addSize things have to be here because
-- I don't want to give them bOMB_OUT_SIZE as an argument
addSizeN TooBig _ = TooBig
addSizeN (SizeIs n xs d) m = mkSizeIs bOMB_OUT_SIZE (n +# iUnbox m) xs d
-- addAltSize is used to add the sizes of case alternatives
addAltSize TooBig _ = TooBig
addAltSize _ TooBig = TooBig
addAltSize (SizeIs n1 xs d1) (SizeIs n2 ys d2)
= mkSizeIs bOMB_OUT_SIZE (n1 +# n2)
(xs `unionBags` ys)
(d1 +# d2) -- Note [addAltSize result discounts]
-- This variant ignores the result discount from its LEFT argument
-- It's used when the second argument isn't part of the result
addSizeNSD TooBig _ = TooBig
addSizeNSD _ TooBig = TooBig
addSizeNSD (SizeIs n1 xs _) (SizeIs n2 ys d2)
= mkSizeIs bOMB_OUT_SIZE (n1 +# n2)
(xs `unionBags` ys)
d2 -- Ignore d1
isRealWorldId id = idType id `eqType` realWorldStatePrimTy
-- an expression of type State# RealWorld must be a variable
isRealWorldExpr (Var id) = isRealWorldId id
isRealWorldExpr (Tick _ e) = isRealWorldExpr e
isRealWorldExpr _ = False
-- | Finds a nominal size of a string literal.
litSize :: Literal -> Int
-- Used by CoreUnfold.sizeExpr
litSize (LitInteger {}) = 100 -- Note [Size of literal integers]
litSize (MachStr str) = 10 + 10 * ((BS.length str + 3) `div` 4)
-- If size could be 0 then @f "x"@ might be too small
-- [Sept03: make literal strings a bit bigger to avoid fruitless
-- duplication of little strings]
litSize _other = 0 -- Must match size of nullary constructors
-- Key point: if x |-> 4, then x must inline unconditionally
-- (eg via case binding)
classOpSize :: DynFlags -> [Id] -> [CoreExpr] -> ExprSize
-- See Note [Conlike is interesting]
classOpSize _ _ []
= sizeZero
classOpSize dflags top_args (arg1 : other_args)
= SizeIs (iUnbox size) arg_discount (_ILIT(0))
where
size = 20 + (10 * length other_args)
-- If the class op is scrutinising a lambda bound dictionary then
-- give it a discount, to encourage the inlining of this function
-- The actual discount is rather arbitrarily chosen
arg_discount = case arg1 of
Var dict | dict `elem` top_args
-> unitBag (dict, ufDictDiscount dflags)
_other -> emptyBag
funSize :: DynFlags -> [Id] -> Id -> Int -> Int -> ExprSize
-- Size for functions that are not constructors or primops
-- Note [Function applications]
funSize dflags top_args fun n_val_args voids
| fun `hasKey` buildIdKey = buildSize
| fun `hasKey` augmentIdKey = augmentSize
| otherwise = SizeIs (iUnbox size) arg_discount (iUnbox res_discount)
where
some_val_args = n_val_args > 0
size | some_val_args = 10 * (1 + n_val_args - voids)
| otherwise = 0
-- The 1+ is for the function itself
-- Add 1 for each non-trivial arg;
-- the allocation cost, as in let(rec)
-- DISCOUNTS
-- See Note [Function and non-function discounts]
arg_discount | some_val_args && fun `elem` top_args
= unitBag (fun, ufFunAppDiscount dflags)
| otherwise = emptyBag
-- If the function is an argument and is applied
-- to some values, give it an arg-discount
res_discount | idArity fun > n_val_args = ufFunAppDiscount dflags
| otherwise = 0
-- If the function is partially applied, show a result discount
conSize :: DataCon -> Int -> ExprSize
conSize dc n_val_args
| n_val_args == 0 = SizeIs (_ILIT(0)) emptyBag (_ILIT(10)) -- Like variables
-- See Note [Unboxed tuple size and result discount]
| isUnboxedTupleCon dc = SizeIs (_ILIT(0)) emptyBag (iUnbox (10 * (1 + n_val_args)))
-- See Note [Constructor size and result discount]
| otherwise = SizeIs (_ILIT(10)) emptyBag (iUnbox (10 * (1 + n_val_args)))
{-
Note [Constructor size and result discount]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Treat a constructors application as size 10, regardless of how many
arguments it has; we are keen to expose them (and we charge separately
for their args). We can't treat them as size zero, else we find that
(Just x) has size 0, which is the same as a lone variable; and hence
'v' will always be replaced by (Just x), where v is bound to Just x.
The "result discount" is applied if the result of the call is
scrutinised (say by a case). For a constructor application that will
mean the constructor application will disappear, so we don't need to
charge it to the function. So the discount should at least match the
cost of the constructor application, namely 10. But to give a bit
of extra incentive we give a discount of 10*(1 + n_val_args).
Simon M tried a MUCH bigger discount: (10 * (10 + n_val_args)),
and said it was an "unambiguous win", but its terribly dangerous
because a fuction with many many case branches, each finishing with
a constructor, can have an arbitrarily large discount. This led to
terrible code bloat: see Trac #6099.
Note [Unboxed tuple size and result discount]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
However, unboxed tuples count as size zero. I found occasions where we had
f x y z = case op# x y z of { s -> (# s, () #) }
and f wasn't getting inlined.
I tried giving unboxed tuples a *result discount* of zero (see the
commented-out line). Why? When returned as a result they do not
allocate, so maybe we don't want to charge so much for them If you
have a non-zero discount here, we find that workers often get inlined
back into wrappers, because it look like
f x = case $wf x of (# a,b #) -> (a,b)
and we are keener because of the case. However while this change
shrank binary sizes by 0.5% it also made spectral/boyer allocate 5%
more. All other changes were very small. So it's not a big deal but I
didn't adopt the idea.
Note [Function and non-function discounts]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We want a discount if the function is applied. A good example is
monadic combinators with continuation arguments, where inlining is
quite important.
But we don't want a big discount when a function is called many times
(see the detailed comments with Trac #6048) because if the function is
big it won't be inlined at its many call sites and no benefit results.
Indeed, we can get exponentially big inlinings this way; that is what
Trac #6048 is about.
On the other hand, for data-valued arguments, if there are lots of
case expressions in the body, each one will get smaller if we apply
the function to a constructor application, so we *want* a big discount
if the argument is scrutinised by many case expressions.
Conclusion:
- For functions, take the max of the discounts
- For data values, take the sum of the discounts
Note [Literal integer size]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Literal integers *can* be big (mkInteger [...coefficients...]), but
need not be (S# n). We just use an aribitrary big-ish constant here
so that, in particular, we don't inline top-level defns like
n = S# 5
There's no point in doing so -- any optimisations will see the S#
through n's unfolding. Nor will a big size inhibit unfoldings functions
that mention a literal Integer, because the float-out pass will float
all those constants to top level.
-}
primOpSize :: PrimOp -> Int -> ExprSize
primOpSize op n_val_args
= if primOpOutOfLine op
then sizeN (op_size + n_val_args)
else sizeN op_size
where
op_size = primOpCodeSize op
buildSize :: ExprSize
buildSize = SizeIs (_ILIT(0)) emptyBag (_ILIT(40))
-- We really want to inline applications of build
-- build t (\cn -> e) should cost only the cost of e (because build will be inlined later)
-- Indeed, we should add a result_discount becuause build is
-- very like a constructor. We don't bother to check that the
-- build is saturated (it usually is). The "-2" discounts for the \c n,
-- The "4" is rather arbitrary.
augmentSize :: ExprSize
augmentSize = SizeIs (_ILIT(0)) emptyBag (_ILIT(40))
-- Ditto (augment t (\cn -> e) ys) should cost only the cost of
-- e plus ys. The -2 accounts for the \cn
-- When we return a lambda, give a discount if it's used (applied)
lamScrutDiscount :: DynFlags -> ExprSize -> ExprSize
lamScrutDiscount dflags (SizeIs n vs _) = SizeIs n vs (iUnbox (ufFunAppDiscount dflags))
lamScrutDiscount _ TooBig = TooBig
{-
Note [addAltSize result discounts]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When adding the size of alternatives, we *add* the result discounts
too, rather than take the *maximum*. For a multi-branch case, this
gives a discount for each branch that returns a constructor, making us
keener to inline. I did try using 'max' instead, but it makes nofib
'rewrite' and 'puzzle' allocate significantly more, and didn't make
binary sizes shrink significantly either.
Note [Discounts and thresholds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Constants for discounts and thesholds are defined in main/DynFlags,
all of form ufXxxx. They are:
ufCreationThreshold
At a definition site, if the unfolding is bigger than this, we
may discard it altogether
ufUseThreshold
At a call site, if the unfolding, less discounts, is smaller than
this, then it's small enough inline
ufKeenessFactor
Factor by which the discounts are multiplied before
subtracting from size
ufDictDiscount
The discount for each occurrence of a dictionary argument
as an argument of a class method. Should be pretty small
else big functions may get inlined
ufFunAppDiscount
Discount for a function argument that is applied. Quite
large, because if we inline we avoid the higher-order call.
ufDearOp
The size of a foreign call or not-dupable PrimOp
Note [Function applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In a function application (f a b)
- If 'f' is an argument to the function being analysed,
and there's at least one value arg, record a FunAppDiscount for f
- If the application if a PAP (arity > 2 in this example)
record a *result* discount (because inlining
with "extra" args in the call may mean that we now
get a saturated application)
Code for manipulating sizes
-}
data ExprSize = TooBig
| SizeIs FastInt -- Size found
!(Bag (Id,Int)) -- Arguments cased herein, and discount for each such
FastInt -- Size to subtract if result is scrutinised
-- by a case expression
instance Outputable ExprSize where
ppr TooBig = ptext (sLit "TooBig")
ppr (SizeIs a _ c) = brackets (int (iBox a) <+> int (iBox c))
-- subtract the discount before deciding whether to bale out. eg. we
-- want to inline a large constructor application into a selector:
-- tup = (a_1, ..., a_99)
-- x = case tup of ...
--
mkSizeIs :: FastInt -> FastInt -> Bag (Id, Int) -> FastInt -> ExprSize
mkSizeIs max n xs d | (n -# d) ># max = TooBig
| otherwise = SizeIs n xs d
maxSize :: ExprSize -> ExprSize -> ExprSize
maxSize TooBig _ = TooBig
maxSize _ TooBig = TooBig
maxSize s1@(SizeIs n1 _ _) s2@(SizeIs n2 _ _) | n1 ># n2 = s1
| otherwise = s2
sizeZero :: ExprSize
sizeN :: Int -> ExprSize
sizeZero = SizeIs (_ILIT(0)) emptyBag (_ILIT(0))
sizeN n = SizeIs (iUnbox n) emptyBag (_ILIT(0))
{-
************************************************************************
* *
\subsection[considerUnfolding]{Given all the info, do (not) do the unfolding}
* *
************************************************************************
We use 'couldBeSmallEnoughToInline' to avoid exporting inlinings that
we ``couldn't possibly use'' on the other side. Can be overridden w/
flaggery. Just the same as smallEnoughToInline, except that it has no
actual arguments.
-}
couldBeSmallEnoughToInline :: DynFlags -> Int -> CoreExpr -> Bool
couldBeSmallEnoughToInline dflags threshold rhs
= case sizeExpr dflags (iUnbox threshold) [] body of
TooBig -> False
_ -> True
where
(_, body) = collectBinders rhs
----------------
smallEnoughToInline :: DynFlags -> Unfolding -> Bool
smallEnoughToInline dflags (CoreUnfolding {uf_guidance = UnfIfGoodArgs {ug_size = size}})
= size <= ufUseThreshold dflags
smallEnoughToInline _ _
= False
----------------
certainlyWillInline :: DynFlags -> Unfolding -> Maybe Unfolding
-- Sees if the unfolding is pretty certain to inline
-- If so, return a *stable* unfolding for it, that will always inline
certainlyWillInline dflags unf@(CoreUnfolding { uf_guidance = guidance, uf_tmpl = expr })
= case guidance of
UnfNever -> Nothing
UnfWhen {} -> Just (unf { uf_src = InlineStable })
-- The UnfIfGoodArgs case seems important. If we w/w small functions
-- binary sizes go up by 10%! (This is with SplitObjs.) I'm not totally
-- sure whyy.
UnfIfGoodArgs { ug_size = size, ug_args = args }
| not (null args) -- See Note [certainlyWillInline: be careful of thunks]
, let arity = length args
, size - (10 * (arity + 1)) <= ufUseThreshold dflags
-> Just (unf { uf_src = InlineStable
, uf_guidance = UnfWhen { ug_arity = arity
, ug_unsat_ok = unSaturatedOk
, ug_boring_ok = inlineBoringOk expr } })
-- Note the "unsaturatedOk". A function like f = \ab. a
-- will certainly inline, even if partially applied (f e), so we'd
-- better make sure that the transformed inlining has the same property
_ -> Nothing
certainlyWillInline _ unf@(DFunUnfolding {})
= Just unf
certainlyWillInline _ _
= Nothing
{-
Note [certainlyWillInline: be careful of thunks]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Don't claim that thunks will certainly inline, because that risks work
duplication. Even if the work duplication is not great (eg is_cheap
holds), it can make a big difference in an inner loop In Trac #5623 we
found that the WorkWrap phase thought that
y = case x of F# v -> F# (v +# v)
was certainlyWillInline, so the addition got duplicated.
************************************************************************
* *
\subsection{callSiteInline}
* *
************************************************************************
This is the key function. It decides whether to inline a variable at a call site
callSiteInline is used at call sites, so it is a bit more generous.
It's a very important function that embodies lots of heuristics.
A non-WHNF can be inlined if it doesn't occur inside a lambda,
and occurs exactly once or
occurs once in each branch of a case and is small
If the thing is in WHNF, there's no danger of duplicating work,
so we can inline if it occurs once, or is small
NOTE: we don't want to inline top-level functions that always diverge.
It just makes the code bigger. Tt turns out that the convenient way to prevent
them inlining is to give them a NOINLINE pragma, which we do in
StrictAnal.addStrictnessInfoToTopId
-}
callSiteInline :: DynFlags
-> Id -- The Id
-> Bool -- True <=> unfolding is active
-> Bool -- True if there are are no arguments at all (incl type args)
-> [ArgSummary] -- One for each value arg; True if it is interesting
-> CallCtxt -- True <=> continuation is interesting
-> Maybe CoreExpr -- Unfolding, if any
data ArgSummary = TrivArg -- Nothing interesting
| NonTrivArg -- Arg has structure
| ValueArg -- Arg is a con-app or PAP
-- ..or con-like. Note [Conlike is interesting]
instance Outputable ArgSummary where
ppr TrivArg = ptext (sLit "TrivArg")
ppr NonTrivArg = ptext (sLit "NonTrivArg")
ppr ValueArg = ptext (sLit "ValueArg")
nonTriv :: ArgSummary -> Bool
nonTriv TrivArg = False
nonTriv _ = True
data CallCtxt
= BoringCtxt
| RhsCtxt -- Rhs of a let-binding; see Note [RHS of lets]
| DiscArgCtxt -- Argument of a fuction with non-zero arg discount
| RuleArgCtxt -- We are somewhere in the argument of a function with rules
| ValAppCtxt -- We're applied to at least one value arg
-- This arises when we have ((f x |> co) y)
-- Then the (f x) has argument 'x' but in a ValAppCtxt
| CaseCtxt -- We're the scrutinee of a case
-- that decomposes its scrutinee
instance Outputable CallCtxt where
ppr CaseCtxt = ptext (sLit "CaseCtxt")
ppr ValAppCtxt = ptext (sLit "ValAppCtxt")
ppr BoringCtxt = ptext (sLit "BoringCtxt")
ppr RhsCtxt = ptext (sLit "RhsCtxt")
ppr DiscArgCtxt = ptext (sLit "DiscArgCtxt")
ppr RuleArgCtxt = ptext (sLit "RuleArgCtxt")
callSiteInline dflags id active_unfolding lone_variable arg_infos cont_info
= case idUnfolding id of
-- idUnfolding checks for loop-breakers, returning NoUnfolding
-- Things with an INLINE pragma may have an unfolding *and*
-- be a loop breaker (maybe the knot is not yet untied)
CoreUnfolding { uf_tmpl = unf_template, uf_is_top = is_top
, uf_is_work_free = is_wf
, uf_guidance = guidance, uf_expandable = is_exp }
| active_unfolding -> tryUnfolding dflags id lone_variable
arg_infos cont_info unf_template is_top
is_wf is_exp guidance
| otherwise -> traceInline dflags "Inactive unfolding:" (ppr id) Nothing
NoUnfolding -> Nothing
OtherCon {} -> Nothing
DFunUnfolding {} -> Nothing -- Never unfold a DFun
traceInline :: DynFlags -> String -> SDoc -> a -> a
traceInline dflags str doc result
| dopt Opt_D_dump_inlinings dflags && dopt Opt_D_verbose_core2core dflags
= pprTrace str doc result
| otherwise
= result
tryUnfolding :: DynFlags -> Id -> Bool -> [ArgSummary] -> CallCtxt
-> CoreExpr -> Bool -> Bool -> Bool -> UnfoldingGuidance
-> Maybe CoreExpr
tryUnfolding dflags id lone_variable
arg_infos cont_info unf_template is_top
is_wf is_exp guidance
= case guidance of
UnfNever -> traceInline dflags str (ptext (sLit "UnfNever")) Nothing
UnfWhen { ug_arity = uf_arity, ug_unsat_ok = unsat_ok, ug_boring_ok = boring_ok }
| enough_args && (boring_ok || some_benefit)
-- See Note [INLINE for small functions (3)]
-> traceInline dflags str (mk_doc some_benefit empty True) (Just unf_template)
| otherwise
-> traceInline dflags str (mk_doc some_benefit empty False) Nothing
where
some_benefit = calc_some_benefit uf_arity
enough_args = (n_val_args >= uf_arity) || (unsat_ok && n_val_args > 0)
UnfIfGoodArgs { ug_args = arg_discounts, ug_res = res_discount, ug_size = size }
| is_wf && some_benefit && small_enough
-> traceInline dflags str (mk_doc some_benefit extra_doc True) (Just unf_template)
| otherwise
-> traceInline dflags str (mk_doc some_benefit extra_doc False) Nothing
where
some_benefit = calc_some_benefit (length arg_discounts)
extra_doc = text "discounted size =" <+> int discounted_size
discounted_size = size - discount
small_enough = discounted_size <= ufUseThreshold dflags
discount = computeDiscount dflags arg_discounts
res_discount arg_infos cont_info
where
mk_doc some_benefit extra_doc yes_or_no
= vcat [ text "arg infos" <+> ppr arg_infos
, text "interesting continuation" <+> ppr cont_info
, text "some_benefit" <+> ppr some_benefit
, text "is exp:" <+> ppr is_exp
, text "is work-free:" <+> ppr is_wf
, text "guidance" <+> ppr guidance
, extra_doc
, text "ANSWER =" <+> if yes_or_no then text "YES" else text "NO"]
str = "Considering inlining: " ++ showSDocDump dflags (ppr id)
n_val_args = length arg_infos
-- some_benefit is used when the RHS is small enough
-- and the call has enough (or too many) value
-- arguments (ie n_val_args >= arity). But there must
-- be *something* interesting about some argument, or the
-- result context, to make it worth inlining
calc_some_benefit :: Arity -> Bool -- The Arity is the number of args
-- expected by the unfolding
calc_some_benefit uf_arity
| not saturated = interesting_args -- Under-saturated
-- Note [Unsaturated applications]
| otherwise = interesting_args -- Saturated or over-saturated
|| interesting_call
where
saturated = n_val_args >= uf_arity
over_saturated = n_val_args > uf_arity
interesting_args = any nonTriv arg_infos
-- NB: (any nonTriv arg_infos) looks at the
-- over-saturated args too which is "wrong";
-- but if over-saturated we inline anyway.
interesting_call
| over_saturated
= True
| otherwise
= case cont_info of
CaseCtxt -> not (lone_variable && is_wf) -- Note [Lone variables]
ValAppCtxt -> True -- Note [Cast then apply]
RuleArgCtxt -> uf_arity > 0 -- See Note [Unfold info lazy contexts]
DiscArgCtxt -> uf_arity > 0 --
RhsCtxt -> uf_arity > 0 --
_ -> not is_top && uf_arity > 0 -- Note [Nested functions]
-- Note [Inlining in ArgCtxt]
{-
Note [Unfold into lazy contexts], Note [RHS of lets]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the call is the argument of a function with a RULE, or the RHS of a let,
we are a little bit keener to inline. For example
f y = (y,y,y)
g y = let x = f y in ...(case x of (a,b,c) -> ...) ...
We'd inline 'f' if the call was in a case context, and it kind-of-is,
only we can't see it. Also
x = f v
could be expensive whereas
x = case v of (a,b) -> a
is patently cheap and may allow more eta expansion.
So we treat the RHS of a let as not-totally-boring.
Note [Unsaturated applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When a call is not saturated, we *still* inline if one of the
arguments has interesting structure. That's sometimes very important.
A good example is the Ord instance for Bool in Base:
Rec {
$fOrdBool =GHC.Classes.D:Ord
@ Bool
...
$cmin_ajX
$cmin_ajX [Occ=LoopBreaker] :: Bool -> Bool -> Bool
$cmin_ajX = GHC.Classes.$dmmin @ Bool $fOrdBool
}
But the defn of GHC.Classes.$dmmin is:
$dmmin :: forall a. GHC.Classes.Ord a => a -> a -> a
{- Arity: 3, HasNoCafRefs, Strictness: SLL,
Unfolding: (\ @ a $dOrd :: GHC.Classes.Ord a x :: a y :: a ->
case @ a GHC.Classes.<= @ a $dOrd x y of wild {
GHC.Types.False -> y GHC.Types.True -> x }) -}
We *really* want to inline $dmmin, even though it has arity 3, in
order to unravel the recursion.
Note [Things to watch]
~~~~~~~~~~~~~~~~~~~~~~
* { y = I# 3; x = y `cast` co; ...case (x `cast` co) of ... }
Assume x is exported, so not inlined unconditionally.
Then we want x to inline unconditionally; no reason for it
not to, and doing so avoids an indirection.
* { x = I# 3; ....f x.... }
Make sure that x does not inline unconditionally!
Lest we get extra allocation.
Note [Inlining an InlineRule]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
An InlineRules is used for
(a) programmer INLINE pragmas
(b) inlinings from worker/wrapper
For (a) the RHS may be large, and our contract is that we *only* inline
when the function is applied to all the arguments on the LHS of the
source-code defn. (The uf_arity in the rule.)
However for worker/wrapper it may be worth inlining even if the
arity is not satisfied (as we do in the CoreUnfolding case) so we don't
require saturation.
Note [Nested functions]
~~~~~~~~~~~~~~~~~~~~~~~
If a function has a nested defn we also record some-benefit, on the
grounds that we are often able to eliminate the binding, and hence the
allocation, for the function altogether; this is good for join points.
But this only makes sense for *functions*; inlining a constructor
doesn't help allocation unless the result is scrutinised. UNLESS the
constructor occurs just once, albeit possibly in multiple case
branches. Then inlining it doesn't increase allocation, but it does
increase the chance that the constructor won't be allocated at all in
the branches that don't use it.
Note [Cast then apply]
~~~~~~~~~~~~~~~~~~~~~~
Consider
myIndex = __inline_me ( (/\a. <blah>) |> co )
co :: (forall a. a -> a) ~ (forall a. T a)
... /\a.\x. case ((myIndex a) |> sym co) x of { ... } ...
We need to inline myIndex to unravel this; but the actual call (myIndex a) has
no value arguments. The ValAppCtxt gives it enough incentive to inline.
Note [Inlining in ArgCtxt]
~~~~~~~~~~~~~~~~~~~~~~~~~~
The condition (arity > 0) here is very important, because otherwise
we end up inlining top-level stuff into useless places; eg
x = I# 3#
f = \y. g x
This can make a very big difference: it adds 16% to nofib 'integer' allocs,
and 20% to 'power'.
At one stage I replaced this condition by 'True' (leading to the above
slow-down). The motivation was test eyeball/inline1.hs; but that seems
to work ok now.
NOTE: arguably, we should inline in ArgCtxt only if the result of the
call is at least CONLIKE. At least for the cases where we use ArgCtxt
for the RHS of a 'let', we only profit from the inlining if we get a
CONLIKE thing (modulo lets).
Note [Lone variables] See also Note [Interaction of exprIsWorkFree and lone variables]
~~~~~~~~~~~~~~~~~~~~~ which appears below
The "lone-variable" case is important. I spent ages messing about
with unsatisfactory varaints, but this is nice. The idea is that if a
variable appears all alone
as an arg of lazy fn, or rhs BoringCtxt
as scrutinee of a case CaseCtxt
as arg of a fn ArgCtxt
AND
it is bound to a cheap expression
then we should not inline it (unless there is some other reason,
e.g. is is the sole occurrence). That is what is happening at
the use of 'lone_variable' in 'interesting_call'.
Why? At least in the case-scrutinee situation, turning
let x = (a,b) in case x of y -> ...
into
let x = (a,b) in case (a,b) of y -> ...
and thence to
let x = (a,b) in let y = (a,b) in ...
is bad if the binding for x will remain.
Another example: I discovered that strings
were getting inlined straight back into applications of 'error'
because the latter is strict.
s = "foo"
f = \x -> ...(error s)...
Fundamentally such contexts should not encourage inlining because the
context can ``see'' the unfolding of the variable (e.g. case or a
RULE) so there's no gain. If the thing is bound to a value.
However, watch out:
* Consider this:
foo = _inline_ (\n. [n])
bar = _inline_ (foo 20)
baz = \n. case bar of { (m:_) -> m + n }
Here we really want to inline 'bar' so that we can inline 'foo'
and the whole thing unravels as it should obviously do. This is
important: in the NDP project, 'bar' generates a closure data
structure rather than a list.
So the non-inlining of lone_variables should only apply if the
unfolding is regarded as cheap; because that is when exprIsConApp_maybe
looks through the unfolding. Hence the "&& is_wf" in the
InlineRule branch.
* Even a type application or coercion isn't a lone variable.
Consider
case $fMonadST @ RealWorld of { :DMonad a b c -> c }
We had better inline that sucker! The case won't see through it.
For now, I'm treating treating a variable applied to types
in a *lazy* context "lone". The motivating example was
f = /\a. \x. BIG
g = /\a. \y. h (f a)
There's no advantage in inlining f here, and perhaps
a significant disadvantage. Hence some_val_args in the Stop case
Note [Interaction of exprIsWorkFree and lone variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The lone-variable test says "don't inline if a case expression
scrutines a lone variable whose unfolding is cheap". It's very
important that, under these circumstances, exprIsConApp_maybe
can spot a constructor application. So, for example, we don't
consider
let x = e in (x,x)
to be cheap, and that's good because exprIsConApp_maybe doesn't
think that expression is a constructor application.
In the 'not (lone_variable && is_wf)' test, I used to test is_value
rather than is_wf, which was utterly wrong, because the above
expression responds True to exprIsHNF, which is what sets is_value.
This kind of thing can occur if you have
{-# INLINE foo #-}
foo = let x = e in (x,x)
which Roman did.
-}
computeDiscount :: DynFlags -> [Int] -> Int -> [ArgSummary] -> CallCtxt
-> Int
computeDiscount dflags arg_discounts res_discount arg_infos cont_info
-- We multiple the raw discounts (args_discount and result_discount)
-- ty opt_UnfoldingKeenessFactor because the former have to do with
-- *size* whereas the discounts imply that there's some extra
-- *efficiency* to be gained (e.g. beta reductions, case reductions)
-- by inlining.
= 10 -- Discount of 10 because the result replaces the call
-- so we count 10 for the function itself
+ 10 * length actual_arg_discounts
-- Discount of 10 for each arg supplied,
-- because the result replaces the call
+ round (ufKeenessFactor dflags *
fromIntegral (total_arg_discount + res_discount'))
where
actual_arg_discounts = zipWith mk_arg_discount arg_discounts arg_infos
total_arg_discount = sum actual_arg_discounts
mk_arg_discount _ TrivArg = 0
mk_arg_discount _ NonTrivArg = 10
mk_arg_discount discount ValueArg = discount
res_discount'
| LT <- arg_discounts `compareLength` arg_infos
= res_discount -- Over-saturated
| otherwise
= case cont_info of
BoringCtxt -> 0
CaseCtxt -> res_discount -- Presumably a constructor
ValAppCtxt -> res_discount -- Presumably a function
_ -> 40 `min` res_discount
-- ToDo: this 40 `min` res_discount doesn't seem right
-- for DiscArgCtxt it shouldn't matter because the function will
-- get the arg discount for any non-triv arg
-- for RuleArgCtxt we do want to be keener to inline; but not only
-- constructor results
-- for RhsCtxt I suppose that exposing a data con is good in general
-- And 40 seems very arbitrary
--
-- res_discount can be very large when a function returns
-- constructors; but we only want to invoke that large discount
-- when there's a case continuation.
-- Otherwise we, rather arbitrarily, threshold it. Yuk.
-- But we want to aovid inlining large functions that return
-- constructors into contexts that are simply "interesting"
| alexander-at-github/eta | compiler/ETA/Core/CoreUnfold.hs | bsd-3-clause | 58,307 | 0 | 19 | 16,859 | 6,694 | 3,556 | 3,138 | 499 | 32 |
f = foo (\x -> \y -> x x y y) | mpickering/hlint-refactor | tests/examples/Lambda18.hs | bsd-3-clause | 29 | 1 | 9 | 10 | 33 | 15 | 18 | 1 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="el-GR">
<title>Image Locaiton and Privacy Scanner | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/imagelocationscanner/src/main/javahelp/org/zaproxy/zap/extension/imagelocationscanner/resources/help_el_GR/helpset_el_GR.hs | apache-2.0 | 995 | 104 | 29 | 162 | 429 | 215 | 214 | -1 | -1 |
import Graphics.Rendering.OpenGL
import Graphics.UI.GLUT
myPoints :: [(GLfloat,GLfloat,GLfloat)]
myPoints = map (\k -> (sin(2*pi*k/12),cos(2*pi*k/12),0.0)) [1..12]
main = do
(progname, _) <- getArgsAndInitialize
createWindow "Hello World"
displayCallback $= display
mainLoop
display = do
clear [ColorBuffer]
renderPrimitive Points $ mapM_ (\(x, y, z)->vertex$Vertex3 x y z) myPoints
flush | erantapaa/haskell-platform | hptool/os-extras/win/test/glut.hs | bsd-3-clause | 407 | 2 | 13 | 63 | 206 | 103 | 103 | 13 | 1 |
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
{-# LANGUAGE TypeFamilies, UndecidableInstances #-}
module T2448 where
import Data.Kind (Type)
-- Demonstrates a bug in propagating type equality constraints
class VectorSpace v where
type Scalar v :: Type
class VectorSpace v => InnerSpace v
instance (VectorSpace u,VectorSpace v, Scalar u ~ Scalar v) =>
VectorSpace (u,v)
where
type Scalar (u,v) = Scalar u
instance (InnerSpace u,InnerSpace v, Scalar u ~ Scalar v) => InnerSpace (u,v)
| sdiehl/ghc | testsuite/tests/indexed-types/should_compile/T2448.hs | bsd-3-clause | 505 | 0 | 8 | 87 | 150 | 80 | 70 | -1 | -1 |
-- Test for trac ticket #1287; ghc 6.6 and 6.6.1 panicked on this
module ShouldCompile where
{-# SPECIALIZE delta' :: Num b => Int -> Int -> b -> b -> b #-}
delta' :: Eq a => a -> a -> b -> b -> b
delta' x y e f = if (x==y) then f else e
{-# SPECIALIZE delta :: Num b => Int -> Int -> b #-}
delta :: (Eq a, Num b) => a -> a -> b
delta x y = delta' x y 0 1
| spacekitteh/smcghc | testsuite/tests/simplCore/should_compile/simpl016.hs | bsd-3-clause | 360 | 0 | 9 | 97 | 111 | 60 | 51 | 7 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Web.Spock.Safe
import Data.Aeson (object, (.=))
import Data.Monoid ((<>))
import Data.Maybe (fromMaybe)
import Helpers (breakTextByDot)
import System.Environment (lookupEnv)
import qualified FizzBuzz as FB
main :: IO ()
main = do
maybePort <- lookupEnv "PORT"
let port = read $ fromMaybe "3000" maybePort
runSpock port $ spockT id $ do
get "/" $ redirect "3,Fizz;5,Buzz"
get ("/" <//> var) $ \ftype -> do
case breakTextByDot ftype of
(t, ".json") -> json $ object ["answer" .= FB.fizzerBuzzer t (1, 100)]
(t, _) -> text $ FB.fizzerBuzzer t (1, 100)
| ifo/fbaas | src/Main.hs | isc | 651 | 0 | 22 | 136 | 245 | 132 | 113 | 19 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Commands
( respond
)
where
import qualified Data.ByteString.Char8 as B
import Data.Maybe (listToMaybe)
import Data.UUID (fromString)
import Data.UUID.V4 (nextRandom)
import System.IO (Handle)
import Control.Applicative ((<$>))
import Control.Monad.IO.Class
import Control.Monad.State
import Control.Monad.Except
import Control.Monad.Reader
import Messages
import Game
import Actions
respond :: Message -> Handle -> GameState -> IO GameState
respond message handle state = snd <$> runStateT (runReaderT (cmd (command message) handle) message) state
cmd :: B.ByteString -> Handle -> ReaderT Message (StateT GameState IO) ()
cmd "action" = \handle -> do
message <- ask
if length (mParts message) < 2
then invalid handle
else get >>= liftIO . runAction handle message >>= put
cmd "robot_stopped" = \handle -> do
message <- ask
if length (mParts message) /= 1
then invalid handle
else do
let idPart = head $ mParts message
modify . changeRobot idPart $ \r -> r { status = Off }
cmd _ = invalid
complain :: B.ByteString -> Handle -> ReaderT Message (StateT GameState IO) ()
complain err handle = undefined
invalid :: Handle -> ReaderT Message (StateT GameState IO) ()
invalid handle = liftIO . (flip sendInvalid handle) =<< ask
| shak-mar/botstrats | server/Commands.hs | mit | 1,322 | 0 | 15 | 248 | 455 | 242 | 213 | 36 | 3 |
{-# LANGUAGE TemplateHaskell #-}
-- このソースコードは、次の記事を信頼して書かれました.
-- http://osecpu.osask.jp/wiki/?page0104
-- http://osecpu.osask.jp/wiki/?page0092
-- 変数のレジスタ割当は,レキシカルスコープのように,ある時点で使用されている変数を管理すれば良い.
-- ラベルの割当は,プログラム中でユニークであるべきなので,コンパイル中全体で管理する.
module ChageComp where
import Data.Word
import Data.List (genericLength, tails)
import Control.Monad.State
import Control.Monad
-- Lensなる便利なライブラリを使うことにした
import Control.Lens
import Control.Applicative
import Inst
import AST
import Type
import Typing
import qualified IR
-- 変数とレジスタの割当関係を管理する.
data Frame = Frame { _intVars :: [(Var, Reg)]
, _ptrVars :: [(Var, PReg)]
, _intRest :: [Reg]
, _ptrRest :: [PReg]
}
makeLenses ''Frame
emptyFrame :: Frame
emptyFrame = Frame [] [] [Reg 0 .. Reg 0x27] [PReg 0 .. PReg 0x27]
-- コンパイル中に保持しておくべき情報.内部的に用いられるのみである.
data CompileState = CS { _labels :: Word32, -- 導入したラベルの数
_frame :: Frame
}
makeLenses ''CompileState
-- 変数として確保できるレジスタの数の上限. [R00 ~ R27]
localRegisterCount = 0x27
-- テンポラリレジスタとして.[R28 ~ R2B]
tmp1 = Reg 0x3A
tmp2 = Reg 0x3B
-- 32bit指定のやつ.
spec32 = BitSpec 32
-- ポインタレジスタを使うとして,
p3f = PReg 0x3F
p30 = PReg 0x30
p3e = PReg 0x3E
p2f = PReg 0x2F
-- ラベルには二種類ある.ジャンプするためのラベルと,読み書きするためのラベルだ.
jumpOnly = LabelOpt 0
readWrite = LabelOpt 1
apiList = [("api_drawPoint", (Imm 0x0002, [Reg 0x31 .. Reg 0x34])),
("api_drawLine", (Imm 0x0003, [Reg 0x31 .. Reg 0x36])),
("api_drawRect", (Imm 0x0004, [Reg 0x31 .. Reg 0x36])),
("api_drawOval", (Imm 0x0005, [Reg 0x31 .. Reg 0x36])),
("api_exit", (Imm 0x0008, [Reg 0x31 .. Reg 0x31])),
("api_sleep", (Imm 0x0009, [Reg 0x31 .. Reg 0x32])),
("api_openWin", (Imm 0x0010, [Reg 0x31 .. Reg 0x34]))
]
-- スコープのネストした中でモナドを走らせる.要は,いったん戻してるだけ.
extendScope :: State CompileState a -> State CompileState a
extendScope m = do
fr <- use frame
ret <- m
frame .= fr
return ret
-- 整数の変数を,シンボルテーブルから引っ張ってくる.
lookupVar :: Var -> State CompileState Reg
lookupVar var@(Var s) = do
vars <- use (frame.intVars)
case lookup var vars of
Nothing -> error $ "undefined variable: " ++ s
Just rg -> return rg
-- ポインタ変数を,シンボルテーブルから引っ張ってくる.
lookupPtr :: Var -> State CompileState PReg
lookupPtr ptr@(Var s) = do
vars <- use (frame.ptrVars)
case lookup ptr vars of
Nothing -> error $ "undefined pointer variable: " ++ s
Just pr -> return pr
alreadyDefinedVar :: Var -> State CompileState Bool
alreadyDefinedVar v = do
vars <- use (frame.intVars)
return $ case lookup v vars of
Nothing -> False
Just _ -> True
alreadyDefinedPtr :: Var -> State CompileState Bool
alreadyDefinedPtr v = do
vars <- use (frame.ptrVars)
return $ case lookup v vars of
Nothing -> False
Just _ -> True
defineVar var@(Var v) = do
defd <- alreadyDefinedVar var
when defd (error $ "already defined var: " ++ v)
reg <- use (frame.intRest) <&> head
frame.intVars %= ((var, reg) :)
frame.intRest %= tail
return $ reg
definePtr ptr@(Var v) = do
defd <- alreadyDefinedPtr ptr
when defd (error $ "already defined ptr: " ++ v)
reg <- use (frame.ptrRest) <&> head
frame.ptrVars %= ((ptr, reg) :)
frame.ptrRest %= tail
return $ reg
-- releaseVar var@(Var v) = do
-- frame.intVars %= filter ((v ==) . fst)
genLabel :: State CompileState Label
genLabel = do
lb <- use labels
labels += 1
return $ Label lb
arith And = AND
arith Xor = XOR
arith Or = OR
arith Add = ADD
arith Mul = MUL
comp Cmpe = CMPE
comp Cmpne = CMPNE
comp Cmpl = CMPL
comp Cmple = CMPLE
comp Cmpg = CMPG
comp Cmpge = CMPGE
compile :: [IR.IR] -> Program
compile is = Program $ evalState (compile' is) (CS 0 emptyFrame)
where
compile' :: [IR.IR] -> State CompileState [Inst]
compile' irs = concat <$> zipWithM compileIR irs (map concat $ tails $ map IR.varRefs irs)
compileIR :: IR.IR -> [Var] -> State CompileState [Inst]
compileIR ir livingVars =
case ir of
IR.If var csq alt ->
do cond <- lookupVar var
c1 <- extendScope (compile' csq)
c2 <- extendScope (compile' alt)
ifLabel <- genLabel
endLabel <- genLabel
return $ [LIMM spec32 tmp2 (Imm 1),
XOR spec32 tmp1 cond tmp2,
CND tmp1, PLIMM p3f ifLabel] ++
c1 ++ [PLIMM p3f endLabel, LB jumpOnly ifLabel] ++
c2 ++ [LB jumpOnly endLabel]
IR.While var cond body ->
do c1 <- compile' cond
cv <- lookupVar var
c2 <- extendScope (compile' body)
startLabel <- genLabel
endLabel <- genLabel
return $ [LB jumpOnly startLabel] ++ c1 ++
[LIMM spec32 tmp2 (Imm 1), XOR spec32 tmp1 cv tmp2,
CND tmp1, PLIMM p3f endLabel] ++
c2 ++ [PLIMM p3f startLabel, LB jumpOnly endLabel]
IR.Declare var S32Int val ->
do defineVar var
r <- lookupVar var
v <- lookupVar val
return [OR spec32 r v v]
IR.Declare var (Pointer _) val ->
do definePtr var
p <- lookupPtr var
v <- lookupPtr val
return [PCP p v]
IR.Arith op var e1 e2 ->
do defineVar var
r <- lookupVar var
v1 <- lookupVar e1
v2 <- lookupVar e2
return [arith op spec32 r v1 v2]
IR.Comp op var e1 e2 ->
do defineVar var
r <- lookupVar var
v1 <- lookupVar e1
v2 <- lookupVar e2
return [comp op spec32 spec32 r v1 v2]
IR.ConstS32Int var int ->
do defineVar var
r <- lookupVar var
return [LIMM spec32 r (Imm (fromInteger (toInteger int)))]
IR.Assign var S32Int val ->
do r <- lookupVar var
v <- lookupVar val
return [OR spec32 r v v]
IR.Assign var (Pointer _) val ->
do p <- lookupPtr var
v <- lookupPtr val
return [PCP p v]
IR.Load var ptr idx ->
do defineVar var
v <- lookupVar var
p <- lookupPtr ptr
i <- lookupVar idx
return [PADD spec32 p3e p i, LMEM0 spec32 v p3e]
IR.Store ptr idx val ->
do p <- lookupPtr ptr
i <- lookupVar idx
v <- lookupVar val
return [PADD spec32 p3e p i, SMEM0 spec32 v p3e]
IR.Call name vars ->
case lookup name apiList of
Nothing -> error $ "function not found: " ++ name
Just (inst, regs) -> do
args <- mapM lookupVar vars
let move = zipWith (\d s -> OR spec32 d s s) regs args
lb <- genLabel
return $ move ++ [LIMM spec32 (Reg 0x30) inst,
PLIMM p30 lb,
PCP p3f p2f,
LB readWrite lb]
IR.Data name dat ->
do definePtr name
p <- lookupPtr name
lb <- genLabel
return $ [PLIMM p lb, LB readWrite lb, DATA dat]
IR.DebugStop -> return [BREAK]
-- <* mapM_ (\v -> unless (v `elem` livingVars) re) IR.varRefs ir
test1 = AST [
Declare (Var "x") S32Int (Arith () Add
(Arith () Xor (ConstS32Int () 3) (ConstS32Int () 4))
(ConstS32Int () 5))]
test2 = AST [
Data (Var "d") [1, 2, 3],
Declare (Var "y") S32Int (Load () (GetVar () (Var "d")) (ConstS32Int () 1)),
Store (GetVar () (Var "d")) (ConstS32Int () 3) (ConstS32Int () 4)
]
test3 = AST [
Data (Var "d") [1, 2, 3],
Declare (Var "y") (Pointer S32Int) (GetVar () (Var "d"))
]
test4 = AST [
Declare (Var "i") S32Int (ConstS32Int () 0),
While (GetVar () (Var "i")) (AST [])
]
test5 = AST [
If (Comp () Cmpe (ConstS32Int () 3) (ConstS32Int () 4))
(AST [])
(AST [])
]
process = compile . IR.normalize . typing
hoge = process
| ryna4c2e/chage | ChageComp.hs | mit | 9,595 | 0 | 21 | 3,406 | 2,929 | 1,441 | 1,488 | -1 | -1 |
module Handler.Euler where
import Import
import Yesod.Form.Bootstrap3
getEulerR :: Handler Html
getEulerR = error "Not yet implemented: getEulerR"
postEulerR :: Handler Html
postEulerR = error "Not yet implemented: postEulerR"
getEulerSolutionR :: ProblemNumber -> Handler Html
getEulerSolutionR = error "Not yet implemented: getEulerSolutionR"
postEulerSolutionR :: ProblemNumber -> Handler Html
postEulerSolutionR num = error "Not yet implemented: postEulerSolutionR"
solutionForm :: UserId -> Form EulerSolution
solutionForm userId = renderBootstrap3 BootstrapBasicForm $ EulerSolution
<$> pure userId
<*> (ProblemNumber <$> areq intField "Problem Number" Nothing)
<*> areq intField "Solution" Nothing
<*> (unTextarea <$> areq textareaField "Markdown" Nothing)
<*> pure Nothing
| terrelln/terrelln.me | Handler/Euler.hs | mit | 807 | 0 | 11 | 121 | 178 | 89 | 89 | 18 | 1 |
{-# LANGUAGE CPP #-}
module Test.Mockery.ActionSpec (spec) where
import Test.Hspec
import Control.Monad
import Test.HUnit.Lang
import Test.Mockery.Action
#if MIN_VERSION_HUnit(1,4,0)
hUnitFailure :: String -> HUnitFailure -> Bool
hUnitFailure actual (HUnitFailure _ reason) = case reason of
Reason expected -> actual == expected
_ -> False
#else
hUnitFailure :: String -> HUnitFailure -> Bool
hUnitFailure actual (HUnitFailure _ expected) = actual == expected
#endif
spec :: Spec
spec = do
describe "dummy" $ do
it "fails" $ do
(dummy "test" :: Int -> Int -> IO Int) 23 42 `shouldThrow` hUnitFailure "Unexpected call to dummy action: test"
describe "stub" $ do
context "with one parameter" $ do
context "when receiving specified parameters" $ do
it "returns specified value" $ do
stub ("foo", return "r") "foo" `shouldReturn` "r"
context "when receiving unexpected parameters" $ do
it "throws an exception" $ do
stub ("foo", return "r") "bar" `shouldThrow` (hUnitFailure . unlines) [
"Unexected parameter to stubbed action!"
, "expected: " ++ show "foo"
, " but got: " ++ show "bar"
]
context "with two parameters" $ do
context "when receiving specified parameters" $ do
it "returns specified value" $ do
stub ("foo", "bar", return "r") "foo" "bar" `shouldReturn` "r"
context "when receiving unexpected parameters" $ do
it "throws an exception" $ do
stub ("foo", "bar", return "r") "23" "42" `shouldThrow` (hUnitFailure . unlines) [
"Unexected parameters to stubbed action!"
, "expected: " ++ show ("foo", "bar")
, " but got: " ++ show ("23", "42")
]
context "with three parameters" $ do
context "when receiving specified parameters" $ do
it "returns specified value" $ do
stub ("foo", "bar", "baz", return "r") "foo" "bar" "baz" `shouldReturn` "r"
context "when receiving unexpected parameters" $ do
it "throws an exception" $ do
stub ("foo", "bar", "baz", return "r") "23" "42" "65" `shouldThrow` (hUnitFailure . unlines) [
"Unexected parameters to stubbed action!"
, "expected: " ++ show ("foo", "bar", "baz")
, " but got: " ++ show ("23", "42", "65")
]
context "when used with lists" $ do
context "with two parameters" $ do
context "when receiving specified parameters" $ do
it "returns specified value" $ do
stub [("foo", "bar", return "r"), ("foo", "baz", return "_")] "foo" "bar" `shouldReturn` "r"
context "when receiving unexpected parameters" $ do
it "throws an exception" $ do
stub [(10, 20, return ()), (23, 42, return ())] (23 :: Int) (65 :: Int) `shouldThrow` (hUnitFailure . unlines) [
"Unexected parameters to stubbed action!"
, "expected one of: (10,20), (23,42)"
, " but got: (23,65)"
]
describe "withMock" $ do
let
withMockSpec stubbedAction call = do
context "when action is called once" $ do
it "passes" $ do
withMock stubbedAction $ \action -> do
call action
`shouldReturn` "r"
context "when action is called multiple times" $ do
it "fails" $ do
withMock stubbedAction $ \action -> do
replicateM_ 10 (call action)
`shouldThrow` hUnitFailure "Expected to be called once, but it was called 10 times instead!"
context "when action is not called" $ do
it "fails" $ do
withMock stubbedAction $ \_ -> do
return ()
`shouldThrow` hUnitFailure "Expected to be called once, but it was called 0 times instead!"
context "with one parameter" $ do
let stubbedAction = stub ("foo", return "r")
call action = action "foo"
withMockSpec stubbedAction call
context "with two parameters" $ do
let stubbedAction = stub ("foo", "bar", return "r")
call action = action "foo" "bar"
withMockSpec stubbedAction call
context "with three parameters" $ do
let stubbedAction = stub ("foo", "bar", "baz", return "r")
call action = action "foo" "bar" "baz"
withMockSpec stubbedAction call
describe "mockChain" $ do
let actions = replicate 2 $ stub ("foo", "bar", return "r")
context "when mock is called the specified number of times" $ do
it "passes" $ do
mockChain actions $ \mock -> do
replicateM_ 2 (mock "foo" "bar")
context "when mock is called too often" $ do
it "fails" $ do
mockChain actions $ \mock -> do
replicateM_ 3 (mock "foo" "bar")
`shouldThrow` hUnitFailure "Expected to be called only 2 times, but it received an additional call!"
context "when mock is not called often enough" $ do
it "fails" $ do
mockChain actions $ \mock -> do
replicateM_ 1 (mock "foo" "bar")
`shouldThrow` hUnitFailure "Expected to be called 2 times, but it was called 1 time instead!"
| robbinch/tinc | test/Test/Mockery/ActionSpec.hs | mit | 5,251 | 0 | 29 | 1,590 | 1,370 | 657 | 713 | 101 | 1 |
import Data.Char
import Data.List
import System.Environment( getArgs )
import System.Random
import System.Console.GetOpt
main = do
args <- getArgs
let key = head args
randomNote <- randomRIO (0, 6)
randomSuff <- randomRIO (0, 6)
-- prints random key in scale
putStrLn $ (deriveScale key !! randomNote)
-- prints random suffix
putStrLn $ (suffix !! randomSuff)
-- sharp chromatic scale, for mapping
sharps = ["C", "C#", "D", "D#", "E", "F", "F#", "G", "G#", "A", "A#", "B"]
-- flat chromatic scale, for mapping
flats = ["C", "Db", "D", "Eb", "E", "F", "Gb", "G", "Ab", "A", "Bb", "B"]
suffix = ["", "6", "6\n4", "7", "6\n5", "4\n3", "4\n2"]
-- isSharps a returns True if key a is a sharp key,
-- and False if key a is a flat key
isSharps :: String -> Bool
-- edge case: the only element in sharps that is a flat key
isSharps a | a == "F" = False
| (map toUpper a) `elem` sharps = True
isSharps a = False
{- deriveOffset a returns the offset from the key of C
e.g.) deriveOffset D returns 2, because D is offset from C by 2
deriveOffset :: String -> Int
these are our outside cases, not appearing in sharps
(we only need to determine index here):
Db, db, Eb, eb, Gb, gb, Ab, ab, Bb, bb
we match both sides toLower equivalent
e.g.) map toLower Bb == "bb"
because matching toUpper results in B not matching -}
deriveOffset key | map toLower key == "db" = 1
| map toLower key == "eb" = 3
| map toLower key == "gb" = 6
| map toLower key == "ab" = 8
| map toLower key == "bb" = 10
deriveOffset key = head (elemIndices (map toUpper key) sharps)
-- maps sharp or flat scale over scale intervals from derivedModality
deriveScale :: String -> [String]
deriveScale a | isSharps a = map (sharps !!) (deriveModality a)
deriveScale a = map (flats !!) (deriveModality a)
-- creates scale intervals (major or minor), using offset amount
deriveModality :: String -> [Int]
-- key is major
deriveModality a | isUpper (head a) = [(0 + offset) `mod` 12, (2 + offset) `mod` 12, (4 + offset) `mod` 12, (5 + offset) `mod` 12, (7 + offset) `mod` 12, (9 + offset) `mod` 12, (11 + offset) `mod` 12] where offset = deriveOffset a
-- key is minor
deriveModality a = [(0 + offset) `mod` 12, (2 + offset) `mod` 12, (3 + offset) `mod` 12, (5 + offset) `mod` 12, (7 + offset) `mod` 12, (8 + offset) `mod` 12, (10 + offset) `mod` 12] where offset = deriveOffset a
| rebennett/figbass | figbass.hs | mit | 2,508 | 5 | 10 | 617 | 816 | 442 | 374 | 31 | 1 |
module Main where
import Protolude
import Lib
main :: IO ()
main = someFunc
| Archimidis/md2html | app/Main.hs | mit | 78 | 0 | 6 | 16 | 25 | 15 | 10 | 5 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE LambdaCase #-}
module StorePackage where
import qualified Codec.Archive.Tar as Tar
import Control.Applicative
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Resource
import Control.Exception.Lifted
import Data.Aeson ((.:), (.=))
import qualified Data.Aeson as JSON
import qualified Data.ByteString.Char8 as S
import qualified Data.ByteString.Lazy.Char8 as L
import Data.Conduit
import qualified Data.Conduit.List as CL
import qualified Data.Conduit.Zlib as ZLib
import Data.Function (fix, on)
import qualified Data.HashMap.Strict as H
import Data.List
import Data.Maybe
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Vector as V
import qualified Distribution.Package as D
import qualified Distribution.PackageDescription as D
import qualified Distribution.PackageDescription.Configuration as D
import qualified Distribution.PackageDescription.Parse as D
import qualified Distribution.Text as D
import Network.HTTP.Client.TLS
import Network.HTTP.Conduit
import System.Environment
import Numeric
import Data.Time
import System.Locale
import System.IO
untar :: MonadThrow m => Conduit S.ByteString m Tar.Entry
untar = do
s <- CL.consume
loop $ Tar.read (L.fromChunks s)
where
loop (Tar.Next e es) = yield e >> loop es
loop Tar.Done = return ()
loop (Tar.Fail e) = throwM e
isCabalFile :: Tar.Entry -> Bool
isCabalFile e = ".cabal" `isSuffixOf` Tar.entryPath e
normalFileContent :: Tar.EntryContent -> Maybe L.ByteString
normalFileContent (Tar.NormalFile c _) = Just c
normalFileContent _ = Nothing
resultMaybe :: D.ParseResult a -> Maybe a
resultMaybe (D.ParseOk _ a) = Just a
resultMaybe _ = Nothing
entryToPackageDescription :: (MonadBaseControl IO m, MonadIO m) => Request -> Manager
-> Conduit Tar.Entry m (D.PackageDescription, UTCTime)
entryToPackageDescription req mgr =
CL.filter isCabalFile =$
CL.mapMaybe (normalFileContent . Tar.entryContent) =$
CL.mapMaybe (resultMaybe . D.parsePackageDescription . L.unpack) =$
CL.map D.flattenPackageDescription =$
CL.groupBy ((==) `on` (D.pkgName . D.package)) =$
CL.map (maximumBy (compare `on` (D.pkgVersion . D.package))) =$
CL.mapM (\pd -> (pd,) <$> getLastUploaded (D.display . D.pkgName $ D.package pd) req mgr)
pdToValue :: D.PackageDescription -> UTCTime -> Maybe [T.Text] -> JSON.Value
pdToValue p upl dpr = JSON.object
[ "name" .= name
, "version" .= (D.display . D.pkgVersion . D.package) p
, "license" .= (D.display . D.license) p
, "copyright" .= D.copyright p
, "maintainer" .= D.maintainer p
, "author" .= D.author p
, "stability" .= D.stability p
, "homepage" .= D.homepage p
, "bugReports" .= D.bugReports p
, "synopsis" .= D.synopsis p
, "description" .= (unescapeHtml . D.description) p
, "category" .= (splitCategory . D.category) p
, "hasLibrary" .= (isJust . D.library) p
, "hasExecutable" .= (not . null . D.executables) p
, "executables" .= (map D.exeName . D.executables) p
, "deprecated" .= isJust dpr
, "inFavourOf" .= maybe [] id dpr
, "lastUploaded" .= upl
, "ngram" .= JSON.object
[ "description" JSON..= (unescapeHtml . D.description) p
, "synopsis" JSON..= (unescapeHtml . D.synopsis) p
, "name" JSON..= name
]
, "raw" .= JSON.object
[ "name" .= name
, "license" .= (D.display . D.license) p
, "category" .= (splitCategory . D.category) p
]
]
where
splitCategory = map T.strip . T.splitOn "," . T.pack
name = (D.display . D.pkgName . D.package) p
unescapeHtml :: String -> String
unescapeHtml = loop
where
loop [] = []
loop ('&':cs) = unescape cs
loop (c:cs) = c : loop cs
entities = H.fromList [("lt", '<'), ("gt", '>'), ("amp", '&'), ("quot", '"')]
unescape cs = case break (== ';') cs of
(_, []) -> []
('#':x:hex, _:cs') | x `elem` ['x', 'X'] -> case readHex hex of
[(n, [])] -> toEnum n : loop cs'
_ -> loop cs'
('#':dec, _:cs') -> case reads dec of
[(n, [])] -> toEnum n : loop cs'
_ -> loop cs'
(ent, _:cs') -> case H.lookup ent entities of
Just c -> c : loop cs'
Nothing -> loop cs'
parseUrl' :: MonadThrow m => String -> m Request
parseUrl' s | '@' `notElem` s = parseUrl s
parseUrl' s0 = do
let (proto, s1) = T.breakOnEnd "://" (T.pack s0)
(user, s2) = T.breakOnEnd ":" s1
(pass, s3) = T.breakOnEnd "@" s2
req <- parseUrl . concat $ map T.unpack [proto, s3]
return $ applyBasicAuth (T.encodeUtf8 $ T.init user) (T.encodeUtf8 $ T.init pass) req
defaultTime :: UTCTime
defaultTime = UTCTime (ModifiedJulianDay 0) 0
getLastUploaded :: (MonadIO m, MonadBaseControl IO m) => String -> Request -> Manager -> m UTCTime
getLastUploaded pkg req mgr = fmap (maybe defaultTime id . either (\(_ :: SomeException) -> Nothing) id) . try $
parseTime defaultTimeLocale "%a %b %e %X %Z %Y" . L.unpack . responseBody <$>
httpLbs req { path = S.pack $ "/package/" ++ pkg ++ "/upload-time" } mgr
sinkStoreElasticsearch :: (MonadThrow m, MonadIO m)
=> Int -> Bool -> H.HashMap T.Text [T.Text] -> Request -> Manager
-> Consumer (D.PackageDescription, UTCTime) m ()
sinkStoreElasticsearch cs progress dpr req' mgr = do
let req = req' { method = "POST" }
fix $ \loop -> do
chunk <- CL.take cs
let cmd i = JSON.object ["index" .= JSON.object ["_id" .= (i :: String)]]
body = L.unlines . map JSON.encode $ concatMap (\(p,upd) ->
let pkg = D.display . D.pkgName $ D.package p in
[ cmd pkg
, pdToValue p upd (H.lookup (T.pack pkg) dpr)
]) chunk
unless (null chunk) $ do
_ <- liftIO $ httpLbs req { requestBody = RequestBodyLBS body } mgr
when progress . liftIO $ putChar '.' >> hFlush stdout
loop
newtype Deprecateds = Deprecateds { unDeprecateds :: H.HashMap T.Text [T.Text] }
instance JSON.FromJSON Deprecateds where
parseJSON (JSON.Array a) = Deprecateds . H.fromList <$> mapM parseElem (mapMaybe fromObject (V.toList a))
where
fromObject (JSON.Object o) = Just o
fromObject _ = Nothing
parseElem o = (,) <$> o .: "deprecated-package" <*> o .: "in-favour-of"
parseJSON _ = mzero
getDeprecateds :: (MonadThrow m, MonadIO m) => Manager -> m (H.HashMap T.Text [T.Text])
getDeprecateds mgr = do
req <- parseUrl "http://hackage.haskell.org/packages/deprecated"
res <- httpLbs req { requestHeaders = ("Accept", "application/json") : requestHeaders req } mgr
maybe (fail "getDeprecateds: decode json failed.") return $
unDeprecateds <$> (JSON.decode $ responseBody res)
storeElasticsearch :: (MonadBaseControl IO m, MonadThrow m, MonadIO m, MonadResource m)
=> String -> String -> Manager -> m ()
storeElasticsearch url indexName mgr = do
dpr <- getDeprecateds mgr
index <- parseUrl "http://hackage.haskell.org/packages/index.tar.gz"
src <- http index mgr
req <- parseUrl' url
responseBody src $$+-
ZLib.ungzip =$ untar =$ entryToPackageDescription index mgr =$
sinkStoreElasticsearch 500 True dpr req { path = S.pack $ '/' : indexName ++ "/package/_bulk" } mgr
main :: IO ()
main = withManagerSettings tlsManagerSettings $ \mgr -> liftIO getArgs >>= \case
[index, url] -> storeElasticsearch url index mgr
_ -> liftIO $ putStrLn "USAGE: main INDEX ELASTICSEARCH_URL"
| philopon/find-hackage | src/StorePackage.hs | mit | 8,633 | 0 | 26 | 2,552 | 2,776 | 1,457 | 1,319 | 171 | 9 |
-- |
-- Types and instances of anonymous records.
module Record where
import BasePrelude hiding (Proxy)
import Data.Functor.Identity
import GHC.TypeLits
import Foreign.Storable
import Foreign.Ptr (plusPtr)
import Control.Lens.Basic
import qualified Record.TH as TH
-- |
-- A specialised version of "Data.Proxy.Proxy".
-- Defined for compatibility with \"base-4.6\",
-- since @Proxy@ was only defined in \"base-4.7\".
data FieldName (t :: Symbol)
-- |
-- Defines a lens to manipulate some value of a type by a type-level name,
-- using the string type literal functionality.
--
-- Instances are provided for all records and for tuples of arity of up to 24.
--
-- Here's how you can use it with tuples:
--
-- >trd :: Field "3" a a' v v' => a -> v
-- >trd = view (fieldLens (undefined :: FieldName "3"))
--
-- The function above will get you the third item of any tuple, which has it.
class Field (n :: Symbol) a a' v v' | n a -> v, n a' -> v', n a v' -> a', n a' v -> a where
fieldLens :: FieldName n -> Lens a a' v v'
instance Field "1" (Identity v1) (Identity v1') v1 v1' where
fieldLens = const $ \f -> fmap Identity . f . runIdentity
-- Generate the tuple instances of the Field class:
return $ do
arity <- [2 .. 24]
fieldIndex <- [1 .. arity]
return $ TH.tupleFieldInstanceDec arity fieldIndex
-- |
-- A simplified field constraint,
-- which excludes the possibility of type-changing updates.
type Field' n a v =
Field n a a v v
-- * Record types and instances
-------------------------
-- Generate the record types and instances:
return $ do
arity <- [1 .. 24]
strict <- [False, True]
let
recordType =
TH.recordTypeDec strict arity
fieldInstances =
do
fieldIndex <- [1 .. arity]
return $ TH.recordFieldInstanceDec strict arity fieldIndex
storableInstance =
TH.recordStorableInstanceDec strict arity
in recordType : storableInstance : fieldInstances
-- * Record construction functions with field names
-------------------------
-- Generate the function declarations:
return $ concat $ TH.recordConFunDecs <$> [False, True] <*> [1 .. 24]
| nikita-volkov/record | library/Record.hs | mit | 2,122 | 0 | 16 | 430 | 423 | 247 | 176 | -1 | -1 |
module E16 where
definition usedArgument unusedArgument
= usedArgument
{-
definition usedArgument
= usedArgument
-} | pascal-knodel/haskell-craft | Examples/· Errors/E16.hs | mit | 141 | 0 | 5 | 39 | 15 | 9 | 6 | 3 | 1 |
test = if isNothing x then (-1.0) else fromJust x
| alphaHeavy/hlint | tests/brackets.hs | gpl-2.0 | 50 | 1 | 7 | 10 | 28 | 13 | 15 | 1 | 2 |
{-# OPTIONS -funbox-strict-fields -O2 #-}
module Support.MD5(
Hash(), emptyHash, md5,md5file,md5lazy,md5lazyIO,
md5show32,md5Bytes,md5String,md5Handle,hashToBytes) where
import Control.Monad
import Data.Binary
import Data.Char
import Foreign
import Foreign.C
import System.IO
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.ByteString.Unsafe as BS
data Hash = Hash !Word32 !Word32 !Word32 !Word32
deriving(Eq,Ord)
md5 :: BS.ByteString -> Hash
md5 bs = unsafePerformIO $ allocaBytes 16 $ \digest -> do
BS.unsafeUseAsCStringLen bs $ \ (x,y) -> md5Data (castPtr x) (fromIntegral y) digest
readDigest digest
md5lazy :: LBS.ByteString -> Hash
md5lazy lbs = unsafePerformIO $ md5lazyIO lbs
md5lazyIO :: LBS.ByteString -> IO Hash
md5lazyIO lbs = do
allocaBytes (fromIntegral $ get_md5_statesize) $ \msp -> do
let ms = MState msp
md5_init ms
forM_ (LBS.toChunks lbs) $ \bs -> do
BS.unsafeUseAsCStringLen bs $ \ (x,y) -> md5_append ms (castPtr x) (fromIntegral y)
allocaBytes 16 $ \digest -> do
md5_finish ms digest
readDigest digest
readDigest digest = do
w1 <- peekWord32 digest 0
w2 <- peekWord32 digest 4
w3 <- peekWord32 digest 8
w4 <- peekWord32 digest 12
return $ Hash w1 w2 w3 w4
peekWord32 ptr off = do
b1 <- peekByteOff ptr off :: IO Word8
b2 <- peekByteOff ptr (off + 1) :: IO Word8
b3 <- peekByteOff ptr (off + 2) :: IO Word8
b4 <- peekByteOff ptr (off + 3) :: IO Word8
let fi = fromIntegral :: Word8 -> Word32
return (fi b1 `shiftL` 24 .|. fi b2 `shiftL` 16 .|. fi b3 `shiftL` 8 .|. fi b4)
instance Binary Hash where
put (Hash a b c d) = put a >> put b >> put c >> put d
get = return Hash `ap` get `ap` get `ap` get `ap` get
md5file :: FilePath -> IO Hash
md5file fp = md5lazy `fmap` LBS.readFile fp
newtype MState = MState (Ptr MState)
foreign import ccall unsafe "md5_data" md5Data :: Ptr Word8 -> CInt -> Ptr Word8 -> IO ()
foreign import ccall unsafe md5_init :: MState -> IO ()
foreign import ccall unsafe md5_append :: MState -> Ptr Word8 -> CInt -> IO ()
foreign import ccall unsafe md5_finish :: MState -> Ptr Word8 -> IO ()
foreign import ccall unsafe get_md5_statesize :: CInt
hashToBytes :: Hash -> [Word8]
hashToBytes (Hash a b c d) = tb a . tb b . tb c . tb d $ [] where
tb :: Word32 -> [Word8] -> [Word8]
tb n = showIt 4 n
showIt :: Int -> Word32 -> [Word8] -> [Word8]
showIt 0 _ r = r
showIt i x r = case quotRem x 256 of
(y, z) -> let c = fromIntegral z
in c `seq` showIt (i-1) y (c:r)
md5show32 :: Hash -> String
md5show32 hash = f [] (hashToBytes hash) where
f cs [] = cs
f cs (o1:o2:o3:o4:o5:rest) = f ns rest where
i1 = o1 `shiftR` 3
i2 = (o1 `shiftL` 2 .|. o2 `shiftR` 6) .&. 0x1f
i3 = o2 `shiftR` 1 .&. 0x1f
i4 = (o2 `shiftL` 4 .|. o3 `shiftR` 4) .&. 0x1f
i5 = (o3 `shiftL` 1 .|. o4 `shiftR` 7) .&. 0x1f
i6 = o4 `shiftR` 2 .&. 0x1f
i7 = (o4 `shiftL` 3 .|. o5 `shiftR` 5) .&. 0x1f
i8 = o5 .&. 0x1f
ns = g i1:g i2:g i3:g i4:g i5:g i6:g i7:g i8:cs
g x | x <= 9 = chr (ord '0' + fromIntegral x)
| otherwise = chr (ord 'a' + fromIntegral x - 10)
f cs ns = reverse (take ((lns * 8 + 4) `div` 5) (f [] (ns ++ replicate (5 - lns) 0))) ++ cs where
lns = length ns
instance Show Hash where
showsPrec _ (Hash a b c d) = showAsHex a . showAsHex b . showAsHex c . showAsHex d
showAsHex :: Word32 -> ShowS
showAsHex n = showIt 8 n
where
showIt :: Int -> Word32 -> String -> String
showIt 0 _ r = r
showIt i x r = case quotRem x 16 of
(y, z) -> let c = intToDigit (fromIntegral z)
in c `seq` showIt (i-1) y (c:r)
emptyHash = Hash 0 0 0 0
md5Bytes :: [Word8] -> Hash
md5Bytes bs = unsafePerformIO $ allocaBytes 16 $ \digest -> do
withArrayLen bs $ \y x -> md5Data (castPtr x) (fromIntegral y) digest
readDigest digest
md5String :: String -> Hash
md5String ss = md5Bytes (toUTF ss) where
-- | Convert Unicode characters to UTF-8.
toUTF :: String -> [Word8]
toUTF [] = []
toUTF (x:xs) | ord x<=0x007F = (fromIntegral $ ord x):toUTF xs
| ord x<=0x07FF = fromIntegral (0xC0 .|. ((ord x `shift` (-6)) .&. 0x1F)):
fromIntegral (0x80 .|. (ord x .&. 0x3F)):
toUTF xs
| otherwise = fromIntegral (0xE0 .|. ((ord x `shift` (-12)) .&. 0x0F)):
fromIntegral (0x80 .|. ((ord x `shift` (-6)) .&. 0x3F)):
fromIntegral (0x80 .|. (ord x .&. 0x3F)):
toUTF xs
-- XXX inefficient, don't use it.
md5Handle :: Handle -> IO Hash
md5Handle h = do
hSeek h AbsoluteSeek 0
len <- fromIntegral `liftM` hFileSize h
allocaBytes len $ \ptr -> do
cnt <- hGetBuf h ptr len
unless (cnt == len) $ fail "md5File - read returned too few bytes"
hSeek h AbsoluteSeek 0
allocaBytes 16 $ \digest -> do
md5Data ptr (fromIntegral len) digest
readDigest digest
| dec9ue/jhc_copygc | src/Support/MD5.hs | gpl-2.0 | 5,314 | 0 | 19 | 1,600 | 2,267 | 1,152 | 1,115 | -1 | -1 |
{-# LANGUAGE CPP #-}
module Hkl.Sixs
( main_sixs )
where
import Prelude hiding (concat, head, print)
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative ((<$>), (<*>))
#endif
import Data.ByteString.Char8 (pack)
import Data.Vector.Storable (concat, head)
import Control.Exception (bracket)
import Control.Monad (forM_)
import Numeric.LinearAlgebra (Matrix)
import Numeric.Units.Dimensional.Prelude (meter, nano, (*~))
import Pipes (Producer, runEffect, (>->), lift, yield)
import Pipes.Prelude (print)
import System.FilePath.Posix ((</>))
import Hkl.Types
import Hkl.H5
{-# ANN module "HLint: ignore Use camelCase" #-}
data DataFrameH5Path = DataFrameH5Path
{ h5pImage :: DataItem
, h5pMu :: DataItem
, h5pOmega :: DataItem
, h5pDelta :: DataItem
, h5pGamma :: DataItem
, h5pUB :: DataItem
, h5pWavelength :: DataItem
, h5pDiffractometerType :: DataItem
} deriving (Show)
data DataFrameH5 = DataFrameH5
{ h5image :: Dataset
, h5mu :: Dataset
, h5omega :: Dataset
, h5delta :: Dataset
, h5gamma :: Dataset
, h5ub :: Dataset
, h5wavelength :: Dataset
, h5dtype :: Dataset
}
data DataFrame = DataFrame
{ df_n :: Int
, df_geometry :: Geometry
, df_ub :: Matrix Double
} deriving (Show)
withDataframeH5 :: File -> DataFrameH5Path -> (DataFrameH5 -> IO r) -> IO r
withDataframeH5 h5file dfp = bracket (hkl_h5_open h5file dfp) hkl_h5_close
hkl_h5_open :: File -> DataFrameH5Path -> IO DataFrameH5
hkl_h5_open h5file dp = DataFrameH5
<$> openDataset' h5file (h5pImage dp)
<*> openDataset' h5file (h5pMu dp)
<*> openDataset' h5file (h5pOmega dp)
<*> openDataset' h5file (h5pDelta dp)
<*> openDataset' h5file (h5pGamma dp)
<*> openDataset' h5file (h5pUB dp)
<*> openDataset' h5file (h5pWavelength dp)
<*> openDataset' h5file (h5pDiffractometerType dp)
where
openDataset' hid (DataItem name _) = openDataset hid (pack name) Nothing
hkl_h5_is_valid :: DataFrameH5 -> IO Bool
hkl_h5_is_valid df = do
True <- check_ndims (h5mu df) 1
True <- check_ndims (h5omega df) 1
True <- check_ndims (h5delta df) 1
True <- check_ndims (h5gamma df) 1
return True
hkl_h5_close :: DataFrameH5 -> IO ()
hkl_h5_close d = do
closeDataset (h5image d)
closeDataset (h5mu d)
closeDataset (h5omega d)
closeDataset (h5delta d)
closeDataset (h5gamma d)
closeDataset (h5ub d)
closeDataset (h5wavelength d)
closeDataset (h5dtype d)
getDataFrame' :: DataFrameH5 -> Int -> IO DataFrame
getDataFrame' d i = do
mu <- get_position (h5mu d) i
omega <- get_position (h5omega d) i
delta <- get_position (h5delta d) i
gamma <- get_position (h5gamma d) i
wavelength <- get_position (h5wavelength d) 0
ub <- get_ub (h5ub d)
let positions = concat [mu, omega, delta, gamma]
let source = Source (head wavelength *~ nano meter)
return DataFrame { df_n = i
, df_geometry = Geometry Uhv source positions Nothing
, df_ub = ub
}
getDataFrame :: DataFrameH5 -> Producer DataFrame IO ()
getDataFrame d = do
(Just n) <- lift $ lenH5Dataspace (h5mu d)
forM_ [0..n-1] (\i -> lift (getDataFrame' d i) >>= yield)
main_sixs :: IO ()
main_sixs = do
let root = "/nfs/ruche-sixs/sixs-soleil/com-sixs/2015/Shutdown4-5/XpadAu111/"
let filename = "align_FLY2_omega_00045.nxs"
let dataframe_h5p = DataFrameH5Path { h5pImage = DataItem "com_113934/scan_data/xpad_image" StrictDims
, h5pMu = DataItem "com_113934/scan_data/UHV_MU" ExtendDims
, h5pOmega = DataItem "com_113934/scan_data/UHV_OMEGA" ExtendDims
, h5pDelta = DataItem "com_113934/scan_data/UHV_DELTA" ExtendDims
, h5pGamma = DataItem "com_113934/scan_data/UHV_GAMMA" ExtendDims
, h5pUB = DataItem "com_113934/SIXS/I14-C-CX2__EX__DIFF-UHV__#1/UB" StrictDims
, h5pWavelength = DataItem "com_113934/SIXS/Monochromator/wavelength" StrictDims
, h5pDiffractometerType = DataItem "com_113934/SIXS/I14-C-CX2__EX__DIFF-UHV__#1/type" StrictDims
}
withH5File (root </> filename) $ \h5file ->
withDataframeH5 h5file dataframe_h5p $ \dataframe_h5 -> do
True <- hkl_h5_is_valid dataframe_h5
runEffect $ getDataFrame dataframe_h5
>-> print
| picca/hkl | contrib/haskell/src/Hkl/Sixs.hs | gpl-3.0 | 5,041 | 0 | 15 | 1,655 | 1,283 | 668 | 615 | 105 | 1 |
module Language.Subleq.Assembly.Prim where
import Data.Maybe
import Data.Set (Set)
import qualified Data.Set as S
import Data.Map (Map)
import qualified Data.Map as M
import Text.Printf
import Data.List
import Data.Bits
type Id = String
type Location = String
type Substitution = Map Id Expr
data Expr = Identifier Id
| Number Integer
| EAdd Expr Expr
| ESub Expr Expr
| EShiftL Expr Expr
deriving (Read, Show, Eq, Ord)
type LocExpr = (Maybe Location, Expr)
-- deriving (Read, Show, Eq, Ord)
data Instruction = Subleq
deriving (Read, Show, Eq, Ord)
instructionArity :: Instruction -> (Int, Int)
instructionArity Subleq = (1, 3)
data Element = ElemInst Instruction [LocExpr]
| SubroutineCall (Maybe Location) Id [Expr]
| ElemLoc Location
deriving (Read, Show, Eq, Ord)
data Object = Subroutine Id [Id] [Element]
| Macro Id [Id] [Element]
deriving (Read, Show, Eq, Ord)
data Module = Module (Map Id Object)
deriving (Read, Show, Eq, Ord)
maybeToSet :: Maybe a -> Set a
maybeToSet = maybe S.empty S.singleton
elemsSet :: (Ord a)=>Map k a -> Set a
elemsSet m = S.fromList (M.elems m)
unionsMap :: (Ord b)=>(a -> Set b) -> Map k a -> Set b
unionsMap f m = fst $ M.mapAccum (\a b-> (S.union a (f b), ())) S.empty m
objectId :: Object -> Id
objectId (Subroutine n _ _) = n
objectId (Macro n _ _) = n
objectArity :: Object -> Int
objectArity (Subroutine _ args _) = length args
objectArity (Macro _ args _) = length args
evaluateNumExpr :: Expr -> Integer
evaluateNumExpr (Identifier x) = error $ "unexpected identifier " ++ x ++ "."
evaluateNumExpr (Number n) = n
evaluateNumExpr (EAdd e1 e2) = evaluateNumExpr e1 + evaluateNumExpr e2
evaluateNumExpr (ESub e1 e2) = evaluateNumExpr e1 - evaluateNumExpr e2
evaluateNumExpr (EShiftL e1 e2) = evaluateNumExpr e1 `shift` fromIntegral (evaluateNumExpr e2)
evaluateNumExprInLocElem :: LocExpr -> LocExpr
evaluateNumExprInLocElem (l, e) = (l, Number $ evaluateNumExpr e)
evaluateNumExprInElem :: Element -> Element
evaluateNumExprInElem (ElemInst i les) = ElemInst i $ map evaluateNumExprInLocElem les
evaluateNumExprInElem e@(SubroutineCall {}) = e
evaluateNumExprInElem e@(ElemLoc {}) = e
substituteExpr :: Substitution -> Expr -> Expr
substituteExpr sub i@(Identifier x) = M.findWithDefault i x sub
substituteExpr sub i@(EAdd e1 e2) = EAdd (substituteExpr sub e1) (substituteExpr sub e2)
substituteExpr sub i@(ESub e1 e2) = ESub (substituteExpr sub e1) (substituteExpr sub e2)
substituteExpr sub i@(EShiftL e1 e2) = EShiftL (substituteExpr sub e1) (substituteExpr sub e2)
substituteExpr _ (Number n) = Number n
substituteLocId :: Substitution -> Id -> Id
substituteLocId sub l | l `M.member` sub = case M.lookup l sub of
Just (Identifier l') -> l'
-- Just x -> error $ printf "Label %s cannot be substituted with %s" l (show x)
Just x -> printf "%s(%s)" l (show x)
-- Just _ -> (Nothing, substituteExpr sub e')
Nothing -> l
substituteLocId _ l = l
substituteLocExpr :: Substitution -> LocExpr -> LocExpr
substituteLocExpr sub (Just l, e') = (Just (substituteLocId sub l), substituteExpr sub e')
substituteLocExpr sub (l, e') = (l, substituteExpr sub e')
substituteElement :: Substitution -> Element -> Element
substituteElement sub (ElemInst i es) = ElemInst i (map (substituteLocExpr sub) es)
substituteElement sub (SubroutineCall l i es) = SubroutineCall (fmap (substituteLocId sub) l) i (map (substituteExpr sub) es)
substituteElement sub (ElemLoc l) = ElemLoc $ substituteLocId sub l
substituteObject :: Substitution -> Object -> Object
substituteObject sub (Subroutine n args elems) = Subroutine n args $ map (substituteElement sub) elems
substituteObject sub (Macro n args elems) = Macro n args $ map (substituteElement sub) elems
locationsElement :: Element -> Set Id
locationsElement (ElemInst _ es) = S.fromList $ mapMaybe fst es
locationsElement (ElemLoc l) = S.singleton l
locationsElement (SubroutineCall l _ _) = maybeToSet l
locationsElements :: [Element] -> Set Id
locationsElements = S.unions . map locationsElement
locationsObject :: Object -> Set Id
locationsObject (Subroutine _ _ elems) = S.unions $ map locationsElement elems
locationsObject (Macro _ _ elems) = S.unions $ map locationsElement elems
freqMap :: (Ord a)=>[a] -> M.Map a Int
freqMap xs = M.fromListWith (+) . zip xs $ repeat 1
locationsOccursionElement :: Element -> Map Id Int
locationsOccursionElement (ElemInst _ es) = freqMap $ mapMaybe fst es
locationsOccursionElement (ElemLoc l) = M.singleton l 1
locationsOccursionElement (SubroutineCall Nothing _ _) = M.empty
locationsOccursionElement (SubroutineCall (Just l) _ _) = M.singleton l 1
locationsOccursionElements :: [Element] -> Map Id Int
locationsOccursionElements = M.unionsWith (+) . map locationsOccursionElement
locationsOccursionObject :: Object -> Map Id Int
locationsOccursionObject (Subroutine _ _ elems) = locationsOccursionElements elems
locationsOccursionObject (Macro _ _ elems) = locationsOccursionElements elems
errorsObject :: Object -> [String]
errorsObject (Subroutine n args elems) = errorsObject' n args elems
errorsObject (Macro n args elems) = errorsObject' n args elems
errorsObject' :: Id -> [Id] -> [Element] -> [String]
errorsObject' n args elems = catMaybes [e1, e2, e3]
where
e1 | not . null $ dupLocs =
Just $
printf "Object %s: locations must be exclusive, but: %s" n (show dupLocs)
| otherwise = Nothing
e2 | not . null $ dupArgs =
Just $
printf "Object %s: arguments must be exclusive, but: %s" n (show dupArgs)
| otherwise = Nothing
e3 | not . null $ dupArgLocs =
Just $
printf "Object %s: locations and arguments must be exclusive, but: %s" n (show dupArgLocs)
| otherwise = Nothing
argFreq = freqMap args
locFreq = locationsOccursionElements elems
dupArgs = M.elems . M.filter (> 1) $ argFreq
dupLocs = M.elems . M.filter (> 1) $ locFreq
dupArgLocs = M.elems . M.filter (> 1) $ M.unionWith (+) argFreq locFreq
boundedVars :: Object -> Set Id
boundedVars o@(Subroutine _ args _) = S.fromList args `S.union` locationsObject o
boundedVars o@(Macro _ args _) = S.fromList args `S.union` locationsObject o
freeVarExpr :: Expr -> Set Id
freeVarExpr (Identifier i) = S.singleton i
freeVarExpr _ = S.empty
freeVarLocExpr :: LocExpr -> Set Id
freeVarLocExpr (_,e) = freeVarExpr e
freeVarElement :: Element -> Set Id
freeVarElement (ElemInst _ es) = S.unions $ map freeVarLocExpr es
freeVarElement (SubroutineCall l x es) = S.unions $ [maybeToSet l, S.singleton x] ++ map freeVarExpr es
freeVarElement (ElemLoc _) = S.empty
freeVarObject :: Object -> Set Id
freeVarObject o@(Subroutine _ args es) = S.unions (map freeVarElement es) S.\\ S.fromList args S.\\ locationsObject o
freeVarObject o@(Macro _ args es) = S.unions (map freeVarElement es) S.\\ S.fromList args S.\\ locationsObject o
freeVarModule :: Module -> Set Id
freeVarModule (Module m) = unionsMap freeVarObject m S.\\ M.keysSet m
applyObject :: LabelPrefix -> Object -> [Expr] -> [Element]
applyObject lp (Macro x as es) = applyObject' lp x as es
applyObject _ (Subroutine x _ _) = error $ printf "%s is a subroutine and not applicable" x
applyObject' :: LabelPrefix -> Id -> [Id] -> [Element] -> [Expr] -> [Element]
applyObject' lp x as es aes | length as == length aes = map (substituteElement sub) $ addLocationPrefix lp targets es -- addLocationPrefix lp $ map (substituteElement sub) es
| otherwise = error $ printf "%s takes %d argument(s), but got: %s" x (length as) (show aes)
where
sub = M.fromList $ zip (map (labelPrefixToString lp ++) as) aes
targets = S.fromList as `S.union` locationsElements es
type DistinctStack a = ([a], Set a)
push :: (Ord a)=>a -> DistinctStack a -> Maybe (DistinctStack a)
push x (xs, st) | x `S.member` st = Nothing
| otherwise = Just (x:xs, S.insert x st)
pop :: (Ord a)=>DistinctStack a -> Maybe (a, DistinctStack a)
pop ([], _) = Nothing
pop (x:xs, st) = Just (x, (xs, S.delete x st))
emptyStack :: DistinctStack a
emptyStack = ([], S.empty)
singletonStack :: (Ord a)=>a -> DistinctStack a
singletonStack x = ([x], S.singleton x)
stackToList :: DistinctStack a -> [a]
stackToList = fst
lookupModule :: Id -> Module -> Maybe Object
lookupModule x (Module m) = M.lookup x m
expandMacroAll :: Module -> Module
expandMacroAll m@(Module m') = Module $ M.map (expandMacro m) m'
expandMacro :: Module -> Object -> Object
expandMacro _ o@(Macro {}) = o
expandMacro m (Subroutine x as es) = Subroutine x as (concatMap (\(i, e)->expandMacro' (singletonStack x) m [i] e) $ zip [0..] es)
expandMacro' :: DistinctStack Id -> Module -> LabelPrefix -> Element -> [Element]
expandMacro' stk m lp (SubroutineCall l x as) = es''
where
stk' = fromMaybe
(error $ printf "%s: Cyclic macro expansion: %s" x (show $ stackToList stk))
(push x stk)
o :: Object
o = fromMaybe
(error $ printf "Object %s is not found in the module: %s" x (show $ stackToList stk))
(lookupModule x m)
es' :: [Element]
es' = map ElemLoc (maybeToList l) ++ applyObject lp o as
es'' = concatMap (\(i, e)-> expandMacro' stk' m (i:lp) e) $ zip [0..] es'
expandMacro' _ _ _ e@(ElemInst _ _) = [e]
expandMacro' _ _ _ e@(ElemLoc _) = [e]
addLocationPrefix :: LabelPrefix -> Set Id -> [Element] -> [Element]
addLocationPrefix lp targets elems = elems'
where
elems' = map (substituteElement sub) elems
sub = M.fromSet (Identifier . (labelPrefixToString lp ++)) targets
type LabelPrefix = [Int]
labelPrefixToString :: LabelPrefix -> String
labelPrefixToString = ('_':) . intercalate "_" . reverse . map show
| Hara-Laboratory/subleq-toolchain | Language/Subleq/Assembly/Prim.hs | gpl-3.0 | 10,113 | 0 | 13 | 2,242 | 3,814 | 1,957 | 1,857 | 187 | 3 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Ampersand.Output.ToJSON.Relations
(Relationz)
where
import Ampersand.ADL1
import Ampersand.FSpec.FSpecAux
import Ampersand.Output.ToJSON.JSONutils
import Data.Maybe
import qualified Data.Set as Set
data Relationz = Relationz [RelationJson]deriving (Generic, Show)
data RelationJson = RelationJson
{ relJSONname :: String
, relJSONsignature :: String
, relJSONsrcConceptId :: String
, relJSONtgtConceptId :: String
, relJSONuni :: Bool
, relJSONtot :: Bool
, relJSONinj :: Bool
, relJSONsur :: Bool
, relJSONprop :: Bool
, relJSONaffectedConjuncts :: [String]
, relJSONmysqlTable :: RelTableInfo
} deriving (Generic, Show)
data RelTableInfo = RelTableInfo -- Contains info about where the relation is implemented in SQL
{ rtiJSONname :: String
, rtiJSONtableOf :: Maybe String -- specifies if relation is administrated in table of srcConcept (i.e. "src"), tgtConcept (i.e. "tgt") or its own n-n table (i.e. null).
, rtiJSONsrcCol :: TableCol
, rtiJSONtgtCol :: TableCol
} deriving (Generic, Show)
data TableCol = TableCol
{ tcJSONname :: String
, tcJSONnull :: Bool
, tcJSONunique :: Bool
} deriving (Generic, Show)
instance ToJSON Relationz where
toJSON = amp2Jason
instance ToJSON RelationJson where
toJSON = amp2Jason
instance ToJSON RelTableInfo where
toJSON = amp2Jason
instance ToJSON TableCol where
toJSON = amp2Jason
instance JSON MultiFSpecs Relationz where
fromAmpersand multi _ = Relationz (map (fromAmpersand multi) (Set.elems $ vrels (userFSpec multi)))
instance JSON Relation RelationJson where
fromAmpersand multi dcl = RelationJson
{ relJSONname = name dcl
, relJSONsignature = name dcl ++ (show . sign) dcl
, relJSONsrcConceptId = escapeIdentifier . name . source $ dcl
, relJSONtgtConceptId = escapeIdentifier . name . target $ dcl
, relJSONuni = isUni bindedExp
, relJSONtot = isTot bindedExp
, relJSONinj = isInj bindedExp
, relJSONsur = isSur bindedExp
, relJSONprop = isProp bindedExp
, relJSONaffectedConjuncts = map rc_id $ fromMaybe [] (lookup dcl $ allConjsPerDecl fSpec)
, relJSONmysqlTable = fromAmpersand multi dcl
}
where bindedExp = EDcD dcl
fSpec = userFSpec multi
instance JSON Relation RelTableInfo where
fromAmpersand multi dcl = RelTableInfo
{ rtiJSONname = name plug
, rtiJSONtableOf = srcOrtgt
, rtiJSONsrcCol = fromAmpersand multi . rsSrcAtt $ relstore
, rtiJSONtgtCol = fromAmpersand multi . rsTrgAtt $ relstore
}
where fSpec = userFSpec multi
(plug,relstore) = getRelationTableInfo fSpec dcl
(plugSrc,_) = getConceptTableInfo fSpec (source dcl)
(plugTrg,_) = getConceptTableInfo fSpec (target dcl)
srcOrtgt
| (plug == plugSrc) && (plugSrc == plugTrg) = Just $ if rsStoredFlipped relstore then "tgt" else "src" -- relations where src and tgt concepts are in the same classification tree as well as relations that are UNI or INJ
| plug == plugSrc = Just "src" -- relation in same table as src concept (UNI relations)
| plug == plugTrg = Just "tgt" -- relation in same table as tgt concept (INJ relations that are not UNI)
| otherwise = Nothing -- relations in n-n table (not UNI and not INJ)
instance JSON SqlAttribute TableCol where
fromAmpersand _ att = TableCol
{ tcJSONname = attName att
, tcJSONnull = attDBNull att
, tcJSONunique = attUniq att
}
| AmpersandTarski/ampersand | src/Ampersand/Output/ToJSON/Relations.hs | gpl-3.0 | 3,737 | 0 | 13 | 956 | 831 | 461 | 370 | 79 | 0 |
import Development.Hake
import Development.Hake.FunSetRaw
main = hake [
file [ "program" ] [ "main.o", "iodat.o", "dorun.o", "lo.o", "./usr/fred/lib/crtn.a" ] $
const [ [ "cc", "-o", "program", "main.o", "iodat.o", "dorun.o", "lo.o", "./usr/fred/lib/crtn.a" ] ]
,
file [ "main.o" ] [ "main.c" ] $ const [ [ "cc", "-c", "main.c" ] ]
,
file [ "iodat.o" ] [ "iodat.c" ] $ const [ [ "cc", "-c", "iodat.c" ] ]
,
file [ "dorun.o" ] [ "dorun.c" ] $ const [ [ "cc", "-c", "dorun.c" ] ]
,
file [ "lo.o" ] [ "lo.s" ] $ const [ [ "cc", "-c", "lo.s" ] ]
,
task "clean" [ [ "rm", "main.o", "iodat.o", "dorun.o", "lo.o", "program" ] ]
]
| YoshikuniJujo/hake_haskell | examples/nutshell/chap1/1/hakeMainRaw.hs | gpl-3.0 | 641 | 1 | 9 | 133 | 244 | 142 | 102 | 10 | 1 |
module Fun.Parser.Pragma where
-- Imports de parsec.
import Text.Parsec
import Text.Parsec.Token( colon, symbol, reservedOp )
import Data.Text ( Text, unpack )
-- Imports de fun.
import Fun.Parser.Internal
-- Imports de equ.
import Equ.Syntax ( Operator(..) )
import qualified Equ.Theories.FOL as F ( theoryOperatorsList )
import qualified Equ.Theories.Arith as A ( theoryOperatorsList )
{- Ejemplo
{# AC: ≡ ∧ ∨ #}
-}
kwPragma :: ParserD ()
kwPragma = keyword "AC"
pColon :: ParserD ()
pColon = colon lexer >> return ()
pragmaBetween :: ParserD a -> ParserD a
pragmaBetween = between (symbol lexer "{#") (symbol lexer "#}")
parsePragma :: ParserD [Operator]
parsePragma = try $ pragmaBetween pPragma
pPragma :: ParserD [Operator]
pPragma = kwPragma >> pColon >> parseOperators
operatorsList :: [Operator]
operatorsList = F.theoryOperatorsList ++ A.theoryOperatorsList
parseOperators :: ParserD [Operator]
parseOperators = many1 $ choice $ map makeOp operatorsList
makeOp :: Operator -> ParserD Operator
makeOp op = try (parseOp $ opRepr op) >> return op
where
parseOp :: Text -> ParserD ()
parseOp = reservedOp lexer . unpack
| alexgadea/fun | Fun/Parser/Pragma.hs | gpl-3.0 | 1,169 | 0 | 9 | 205 | 345 | 191 | 154 | 26 | 1 |
<?xml version='1.0' encoding='ISO-8859-1' ?>
<!DOCTYPE helpset
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN"
"http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0">
<title>Xena Help</title>
<maps>
<homeID>xenahelp</homeID>
<mapref location="map.jhm"/>
</maps>
<view xml:lang="en" mergetype="javax.help.UniteAppendMerge">
<name>TOC</name>
<label>Table Of Contents</label>
<type>javax.help.TOCView</type>
<data>toc.xml</data>

</view>
<presentation default=true>
<name>Xena Help</name>
<size width="800" height="600" />
<location x="200" y="200" />
<title>Xena Help</title>
<toolbar>
<helpaction image="back">javax.help.BackAction</helpaction>
<helpaction image="forward">javax.help.ForwardAction</helpaction>
<helpaction image="home">javax.help.HomeAction</helpaction>
<helpaction image="print">javax.help.PrintAction</helpaction>
<helpaction image="printsetup">javax.help.PrintSetupAction</helpaction>
</toolbar>

</presentation>
</helpset>
| srnsw/xena | xena/doc/xena-help.hs | gpl-3.0 | 1,151 | 116 | 13 | 173 | 520 | 237 | 283 | -1 | -1 |
module Sound.Tidal.MIDI.Tanzbar where
import Sound.Tidal.Params
import Sound.Tidal.MIDI.Control
(bd1_attack, bd1_attack_p) = pF "bd1_attack" (Just 0)
(bd1_decay, bd1_decay_p) = pF "bd1_decay" (Just 0)
(bd1_tune, bd1_tune_p) = pF "bd1_tune" (Just 0)
(bd1_noise, bd1_noise_p) = pF "bd1_noise" (Just 0)
(bd1_filter, bd1_filter_p) = pF "bd1_filter" (Just 0)
(bd1_dist, bd1_dist_p) = pF "bd1_dist" (Just 0)
(bd1_trigger, bd1_trigger_p) = pF "bd1_trigger" (Just 0)
(bd2_decay, bd2_decay_p) = pF "bd2_decay" (Just 0)
(bd2_tune, bd2_tune_p) = pF "bd2_tune" (Just 0)
(bd2_tone, bd2_tone_p) = pF "bd2_tone" (Just 0)
(sd_tune, sd_tune_p) = pF "sd_tune" (Just 0)
(sd_d_tune, sd_d_tune_p) = pF "sd_d_tune" (Just 0)
(sd_snappy, sd_snappy_p) = pF "sd_snappy" (Just 0)
(sd_sn_decay, sd_sn_decay_p) = pF "sd_sn_decay" (Just 0)
(sd_tone, sd_tone_p) = pF "sd_tone" (Just 0)
(sd_tone_decay, sd_tone_decay_p) = pF "sd_tone_decay" (Just 0)
(sd_pitch, sd_pitch_p) = pF "sd_pitch" (Just 0)
(rs_tune, rs_tune_p) = pF "rs_tune" (Just 0)
(cy_decay, cy_decay_p) = pF "cy_decay" (Just 0)
(cy_tone, cy_tone_p) = pF "cy_tone" (Just 0)
(cy_tune, cy_tune_p) = pF "cy_tune" (Just 0)
(oh_decay, oh_decay_p) = pF "oh_decay" (Just 0)
(hh_tune, hh_tune_p) = pF "hh_tune" (Just 0)
(hh_decay, hh_decay_p) = pF "hh_decay" (Just 0)
(cl_tune, cl_tune_p) = pF "cl_tune" (Just 0)
(cl_decay, cl_decay_p) = pF "cl_decay" (Just 0)
(cp_decay, cp_decay_p) = pF "cp_decay" (Just 0)
(cp_filter, cp_filter_p) = pF "cp_filter" (Just 0)
(cp_attack, cp_attack_p) = pF "cp_attack" (Just 0)
(cp_trigger, cp_trigger_p) = pF "cp_trigger" (Just 0)
(htc_tune, htc_tune_p) = pF "htc_tune" (Just 0)
(htc_decay, htc_decay_p) = pF "htc_decay" (Just 0)
(htc_noise_on_off, htc_noise_on_off_p) = pF "htc_noise_on_off" (Just 0)
(htc_tom_conga, htc_tom_conga_p) = pF "htc_tom_conga" (Just 0)
(mtc_tune, mtc_tune_p) = pF "mtc_tune" (Just 0)
(mtc_decay, mtc_decay_p) = pF "mtc_decay" (Just 0)
(mtc_noise_on_off, mtc_noise_on_off_p) = pF "mtc_noise_on_off" (Just 0)
(mtc_tom_conga, mtc_tom_conga_p) = pF "mtc_tom_conga" (Just 0)
(ltc_tune, ltc_tune_p) = pF "ltc_tune" (Just 0)
(ltc_decay, ltc_decay_p) = pF "ltc_decay" (Just 0)
(ltc_noise_on_off, ltc_noise_on_off_p) = pF "ltc_noise_on_off" (Just 0)
(ltc_tom_conga, ltc_tom_conga_p) = pF "ltc_tom_conga" (Just 0)
(tom_noise, tom_noise_p) = pF "tom_noise" (Just 0)
(cb_tune, cb_tune_p) = pF "cb_tune" (Just 0)
(cb_decay, cb_decay_p) = pF "cb_decay" (Just 0)
(ma_decay, ma_decay_p) = pF "ma_decay" (Just 0)
(set_select, set_select_p) = pF "set_select" (Just 0)
(track_delay_cv1, track_delay_cv1_p) = pF "track_delay_cv1" (Just 0)
(track_delay_cv23, track_delay_cv23_p) = pF "track_delay_cv23" (Just 0)
(track_delay_bd1, track_delay_bd1_p) = pF "track_delay_bd1" (Just 0)
(track_delay_bd2, track_delay_bd2_p) = pF "track_delay_bd2" (Just 0)
(track_delay_sd, track_delay_sd_p) = pF "track_delay_sd" (Just 0)
(track_delay_rs, track_delay_rs_p) = pF "track_delay_rs" (Just 0)
(track_delay_cy, track_delay_cy_p) = pF "track_delay_cy" (Just 0)
(track_delay_oh, track_delay_oh_p) = pF "track_delay_oh" (Just 0)
(track_delay_hh, track_delay_hh_p) = pF "track_delay_hh" (Just 0)
(track_delay_cl, track_delay_cl_p) = pF "track_delay_cl" (Just 0)
(track_delay_cp, track_delay_cp_p) = pF "track_delay_cp" (Just 0)
(track_delay_ltc, track_delay_ltc_p) = pF "track_delay_ltc" (Just 0)
(track_delay_mtc, track_delay_mtc_p) = pF "track_delay_mtc" (Just 0)
(track_delay_htc, track_delay_htc_p) = pF "track_delay_htc" (Just 0)
(track_delay_cb, track_delay_cb_p) = pF "track_delay_cb" (Just 0)
(track_delay_ma, track_delay_ma_p) = pF "track_delay_ma" (Just 0)
(bd1, bd1_p) = pF "bd1" (Just 0)
(bd2, bd2_p) = pF "bd2" (Just 0)
(sd, sd_p) = pF "sd" (Just 0)
(rs, rs_p) = pF "rs" (Just 0)
(cy, cy_p) = pF "cy" (Just 0)
(oh, oh_p) = pF "oh" (Just 0)
(hh, hh_p) = pF "hh" (Just 0)
(cl, cl_p) = pF "cl" (Just 0)
(cp, cp_p) = pF "cp" (Just 0)
(ltc, ltc_p) = pF "ltc" (Just 0)
(mtc, mtc_p) = pF "mtc" (Just 0)
(htc, htc_p) = pF "htc" (Just 0)
(cb, cb_p) = pF "cb" (Just 0)
(ma, ma_p) = pF "ma" (Just 0)
tanzController :: ControllerShape
tanzController = ControllerShape {
controls = [
mCC bd1_attack_p 2,
mCC bd1_decay_p 64,
mCC bd1_tune_p 3,
mCC bd1_noise_p 4,
mCC bd1_filter_p 5,
mCC bd1_dist_p 6,
mCC bd1_trigger_p 66,
mCC bd2_decay_p 8,
mCC bd2_tune_p 9,
mCC bd2_tone_p 10,
mCC sd_tune_p 11,
mCC sd_d_tune_p 12,
mCC sd_snappy_p 13,
mCC sd_sn_decay_p 67,
mCC sd_tone_p 14,
mCC sd_tone_decay_p 68,
mCC sd_pitch_p 69,
mCC rs_tune_p 88,
mCC cy_decay_p 70,
mCC cy_tone_p 15,
mCC cy_tune_p 71,
mCC oh_decay_p 75,
mCC hh_tune_p 73,
mCC hh_decay_p 74,
mCC cl_tune_p 16,
mCC cl_decay_p 17,
mCC cp_decay_p 75,
mCC cp_filter_p 18,
mCC cp_attack_p 76,
mCC cp_trigger_p 77,
mCC htc_tune_p 19,
mCC htc_decay_p 20,
mCC htc_noise_on_off_p 78,
mCC htc_tom_conga_p 79,
mCC mtc_tune_p 21,
mCC mtc_decay_p 22,
mCC mtc_noise_on_off_p 80,
mCC mtc_tom_conga_p 81,
mCC ltc_tune_p 23,
mCC ltc_decay_p 24,
mCC ltc_noise_on_off_p 82,
mCC ltc_tom_conga_p 83,
mCC tom_noise_p 84,
mCC cb_tune_p 85,
mCC cb_decay_p 86,
mCC ma_decay_p 87,
mCC set_select_p 0,
mCC track_delay_cv1_p 89,
mCC track_delay_cv23_p 90,
mCC track_delay_bd1_p 91,
mCC track_delay_bd2_p 92,
mCC track_delay_sd_p 93,
mCC track_delay_rs_p 94,
mCC track_delay_cy_p 95,
mCC track_delay_oh_p 96,
mCC track_delay_hh_p 97,
mCC track_delay_cl_p 98,
mCC track_delay_cp_p 99,
mCC track_delay_ltc_p 100,
mCC track_delay_mtc_p 101,
mCC track_delay_htc_p 102,
mCC track_delay_cb_p 103,
mCC track_delay_ma_p 104,
mCC bd1_p 36,
mCC bd2_p 37,
mCC sd_p 38,
mCC rs_p 39,
mCC cy_p 40,
mCC oh_p 41,
mCC hh_p 42,
mCC cl_p 43,
mCC cp_p 44,
mCC ltc_p 45,
mCC mtc_p 46,
mCC htc_p 47,
mCC cb_p 48,
mCC ma_p 49
],
latency = 0.1
}
tanz = midinote . (tanzN <$>)
tanzN :: String -> Int
tanzN "bd1" = 36
tanzN "bd2" = 37
tanzN "sd" = 38
tanzN "rs" = 39
tanzN "cy" = 40
tanzN "oh" = 41
tanzN "hh" = 42
tanzN "cl" = 43
tanzN "cp" = 44
tanzN "ltc" = 45
tanzN "mtc" = 46
tanzN "htc" = 47
tanzN "cb" = 48
tanzN "ma" = 49
-- general shape for stream
tanzShape = toShape tanzController
| tidalcycles/tidal-midi | Sound/Tidal/MIDI/Tanzbar.hs | gpl-3.0 | 6,380 | 0 | 8 | 1,207 | 2,575 | 1,336 | 1,239 | 178 | 1 |
module Paths_lambda (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
catchIO = Exception.catch
version :: Version
version = Version {versionBranch = [0,1,0,0], versionTags = []}
bindir, libdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/home/anastasia/Haskell/lambda/.cabal-sandbox/bin"
libdir = "/home/anastasia/Haskell/lambda/.cabal-sandbox/lib/x86_64-linux-ghc-7.8.3/lambda-0.1.0.0"
datadir = "/home/anastasia/Haskell/lambda/.cabal-sandbox/share/x86_64-linux-ghc-7.8.3/lambda-0.1.0.0"
libexecdir = "/home/anastasia/Haskell/lambda/.cabal-sandbox/libexec"
sysconfdir = "/home/anastasia/Haskell/lambda/.cabal-sandbox/etc"
getBinDir, getLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "lambda_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "lambda_libdir") (\_ -> return libdir)
getDataDir = catchIO (getEnv "lambda_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "lambda_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "lambda_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
| nastya13/lambda | dist/build/autogen/Paths_lambda.hs | gpl-3.0 | 1,466 | 0 | 10 | 182 | 371 | 213 | 158 | 28 | 1 |
module He.Error
( module Control.Monad.Except
, Error()
, err
, err'
, report
, check
, checked
, fatal
, logExceptT
, logExceptT'
, log
) where
import Control.Lens
import Control.Monad.Except
import qualified Data.DList as D
import H.Prelude hiding ((<>), empty, show)
import Prelude (show)
import Text.Parsec.Applicative.Types
import Text.PrettyPrint
import Text.PrettyPrint.HughesPJClass
import System.IO (hPutStr, hPutStrLn, stderr)
newtype Error = Error (D.DList (Maybe SourcePos, Doc))
deriving (Eq, Monoid, Show)
vSepBy :: Doc -> Doc -> Doc -> Doc
vSepBy sep above below = above $+$ sep $+$ below
instance Pretty Error where
pPrint (Error es) = case D.toList es of
[] -> text "No errors"
es -> foldl (vSepBy empty) empty $ map (uncurry prettyError) es
prettyError :: Maybe SourcePos -> Doc -> Doc
prettyError sp message = case sp of
Nothing -> message
Just sp -> prettySourcePos sp $+$ message
prettySourcePos :: SourcePos -> Doc
prettySourcePos sp =
text "At " <> maybe empty (text . unpack) (sp ^. spName)
<> text " line " <> text (show $ sp ^. spLine)
<> text " column " <> text (show $ sp ^. spColumn)
<> text ":"
err :: Maybe SourcePos -> Text -> Error
err sp xs = Error $ D.singleton (sp, text $ unpack xs)
err' :: Text -> Error
err' = err Nothing
report :: (MonadState (Maybe e) m, Monoid e) => e -> m ()
report e = modify (`mappend` Just e)
check :: (MonadState (Maybe e) m, MonadError e m) => m ()
check = get >>= maybe (return ()) ((put Nothing >>) . throwError)
checked :: (MonadState (Maybe e) m, MonadError e m) => m a -> m a
checked = (<* check)
fatal :: (MonadState (Maybe e) m, Monoid e, MonadError e m) => e -> m a
fatal e = do
report e
get >>= maybe (impossible "fatal") ((put Nothing >>) . throwError)
logExceptT :: (MonadIO m, Pretty e) => ExceptT e m a -> (a -> m ()) -> m ()
logExceptT e f =
runExceptT e
>>= either (liftIO . hPutStr stderr . render . pPrint) f
logExceptT' :: (MonadIO m, Pretty e) => ExceptT e m () -> m ()
logExceptT' e = logExceptT e return
log :: (MonadIO m) => Text -> m ()
log = liftIO . hPutStrLn stderr . unpack
| ktvoelker/helium | src/He/Error.hs | gpl-3.0 | 2,139 | 0 | 13 | 457 | 946 | 496 | 450 | -1 | -1 |
import System.IO
import System.FilePath
import Data.List
import Data.List.Split
main :: IO ()
main = do
contents <- readFile "..\\goldi_input_1.txt"
let (pref:orgs) = zip [0..] $ conv $ getGoldi contents
putStrLn $ intercalate " " $ map (show . fst) $ filter (checkSeats (snd pref)) $ orgs
checkSeats :: (Int, Int) -> (Int, (Int, Int)) -> Bool
checkSeats a b = ((fst a) <= (fst $ snd b)) && ((snd a) >= (snd $ snd b))
getGoldi :: String -> [ [Char]]
getGoldi contents = [ x | y <- lines contents, x <- splitOn " " y]
conv :: [[Char]] -> [ (Int, Int) ]
conv [] = []
conv (k:v:t) = (read k :: Int, read v :: Int) : conv t
| jandersen7/Daily | src/291e/hs/goldi.hs | gpl-3.0 | 639 | 0 | 13 | 142 | 370 | 187 | 183 | 16 | 1 |
{- ============================================================================
| Copyright 2011 Matthew D. Steele <[email protected]> |
| |
| This file is part of Fallback. |
| |
| Fallback is free software: you can redistribute it and/or modify it under |
| the terms of the GNU General Public License as published by the Free |
| Software Foundation, either version 3 of the License, or (at your option) |
| any later version. |
| |
| Fallback is distributed in the hope that it will be useful, but WITHOUT |
| ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for |
| more details. |
| |
| You should have received a copy of the GNU General Public License along |
| with Fallback. If not, see <http://www.gnu.org/licenses/>. |
============================================================================ -}
module Fallback.Draw
(module Fallback.Draw.Base,
module Fallback.Draw.GUI)
where
import Fallback.Draw.Base
import Fallback.Draw.GUI
-------------------------------------------------------------------------------
| mdsteele/fallback | src/Fallback/Draw.hs | gpl-3.0 | 1,660 | 0 | 5 | 696 | 36 | 25 | 11 | 5 | 0 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 [email protected]
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE UnboxedTuples #-}
module Game.Grid.GridWorld.SegmentArray
(
SegmentArray,
makeSegmentArray,
segmentarrayRead,
segmentarrayWrite,
) where
import MyPrelude
import Game
import Game.Grid.GridWorld.Segment
import Game.Grid.GridWorld.Node
import Game.Grid.GridWorld.Turn
import Data.Int
-- making unportable
import GHC.Prim
import GHC.Exts
import GHC.Types
import GHC.Int
import System.IO.Unsafe
--------------------------------------------------------------------------------
-- SegmentArray
-- we need some type of sequential container for the segments of path.
-- also, we want the path to be able to grow modulo its size. so I decided
-- to use a custom array type.
-- update: after working in IO, we could have used IOArray instead, but read note
-- below
-- note: in GHC, the datatypes Int8/Int16/... are implemented by the word size
-- of the architecture (which is 32 bits on ARMv7), so using them does not
-- save memory. but in this implementation of SegmentArray, we use the cor-
-- responding bits and so save memory.
data SegmentArray =
SegmentArray (MutableByteArray# RealWorld)
unsafeState# :: (State# RealWorld -> (# State# RealWorld, a #)) -> a
unsafeState# =
unsafeDupablePerformIO . IO
makeSegmentArray :: UInt -> SegmentArray
makeSegmentArray (W# size#) = unsafeState# $ \s# ->
case newAlignedPinnedByteArray# (word2Int# (size# `timesWord#` 16##)) 4# s# of
(# s'#, mba# #) -> (# s'#, SegmentArray mba# #)
segmentarrayWrite :: SegmentArray -> UInt -> Segment -> SegmentArray
segmentarrayWrite sa@(SegmentArray mba#)
(W# ix#)
(Segment (Node (I16# x#) (I16# y#) (I16# z#))
(Turn (I8# x0#) (I8# x1#) (I8# x2#)
(I8# y0#) (I8# y1#) (I8# y2#)
(I8# z0#) (I8# z1#) (I8# z2#))) = unsafeState# $ \s# ->
case writeInt16Array# mba# (ix2Bytes# ix# 0##) x# s# of { s# ->
case writeInt16Array# mba# (ix2Bytes# ix# 1##) y# s# of { s# ->
case writeInt16Array# mba# (ix2Bytes# ix# 2##) z# s# of { s# ->
case writeInt8Array# mba# (ix4Bytes# ix# 6##) x0# s# of { s# ->
case writeInt8Array# mba# (ix4Bytes# ix# 7##) x1# s# of { s# ->
case writeInt8Array# mba# (ix4Bytes# ix# 8##) x2# s# of { s# ->
case writeInt8Array# mba# (ix4Bytes# ix# 9##) y0# s# of { s# ->
case writeInt8Array# mba# (ix4Bytes# ix# 10##) y1# s# of { s# ->
case writeInt8Array# mba# (ix4Bytes# ix# 11##) y2# s# of { s# ->
case writeInt8Array# mba# (ix4Bytes# ix# 12##) z0# s# of { s# ->
case writeInt8Array# mba# (ix4Bytes# ix# 13##) z1# s# of { s# ->
case writeInt8Array# mba# (ix4Bytes# ix# 14##) z2# s# of { s# ->
case writeInt8Array# mba# (ix4Bytes# ix# 15##) 0# s# of { s# ->
(# s#, sa #) }}}}}}}}}}}}}
where
ix2Bytes# ix# a# =
word2Int# ((8## `timesWord#` ix#) `plusWord#` a#)
ix4Bytes# ix# a# =
word2Int# ((16## `timesWord#` ix#) `plusWord#` a#)
segmentarrayRead :: SegmentArray -> UInt -> Segment
segmentarrayRead (SegmentArray mba#) (W# ix#) = unsafeState# $ \s# ->
case readInt16Array# mba# (ix2Bytes# ix# 0##) s# of { (# s#, x# #) ->
case readInt16Array# mba# (ix2Bytes# ix# 1##) s# of { (# s#, y# #) ->
case readInt16Array# mba# (ix2Bytes# ix# 2##) s# of { (# s#, z# #) ->
case readInt8Array# mba# (ix4Bytes# ix# 6##) s# of { (# s#, x0# #) ->
case readInt8Array# mba# (ix4Bytes# ix# 7##) s# of { (# s#, x1# #) ->
case readInt8Array# mba# (ix4Bytes# ix# 8##) s# of { (# s#, x2# #) ->
case readInt8Array# mba# (ix4Bytes# ix# 9##) s# of { (# s#, y0# #) ->
case readInt8Array# mba# (ix4Bytes# ix# 10##) s# of { (# s#, y1# #) ->
case readInt8Array# mba# (ix4Bytes# ix# 11##) s# of { (# s#, y2# #) ->
case readInt8Array# mba# (ix4Bytes# ix# 12##) s# of { (# s#, z0# #) ->
case readInt8Array# mba# (ix4Bytes# ix# 13##) s# of { (# s#, z1# #) ->
case readInt8Array# mba# (ix4Bytes# ix# 14##) s# of { (# s#, z2# #) ->
case readInt8Array# mba# (ix4Bytes# ix# 15##) s# of { (# s#, _ #) ->
(# s#, Segment (Node (I16# x#) (I16# y#) (I16# z#))
(Turn (I8# x0#) (I8# x1#) (I8# x2#)
(I8# y0#) (I8# y1#) (I8# y2#)
(I8# z0#) (I8# z1#) (I8# z2#)) #) }}}}}}}}}}}}}
where
ix2Bytes# ix# a# =
word2Int# ((8## `timesWord#` ix#) `plusWord#` a#)
ix4Bytes# ix# a# =
word2Int# ((16## `timesWord#` ix#) `plusWord#` a#)
{-
-- | we create a newtype so that we later can just store the direction if we want
-- (strip Segment for structure)
newtype StorableSegment =
StorableSegment Segment
wrapStorableSegment :: Segment -> StorableSegment
wrapStorableSegment seg =
StorableSegment seg
instance Storable StorableSegment where
sizeOf _ = 16
alignment _ = 4 -- ^ 4 byte align on ARM (?)
peek ptr = do
-- node
x <- peekByteOff ptr 0 :: IO Int16
y <- peekByteOff ptr 2 :: IO Int16
z <- peekByteOff ptr 4 :: IO Int16
-- turn
x0 <- peekByteOff ptr 6 :: IO Int8
x1 <- peekByteOff ptr 7 :: IO Int8
x2 <- peekByteOff ptr 8 :: IO Int8
y0 <- peekByteOff ptr 9 :: IO Int8
y1 <- peekByteOff ptr 10 :: IO Int8
y2 <- peekByteOff ptr 11 :: IO Int8
z0 <- peekByteOff ptr 12 :: IO Int8
z1 <- peekByteOff ptr 13 :: IO Int8
z2 <- peekByteOff ptr 14 :: IO Int8
-- 15 is empty!
return $ StorableSegment $ Segment (Node x y z) (Turn x0 x1 x2
y0 y1 y2
z0 z1 z2)
poke ptr (StorableSegment (Segment (Node x y z) (Turn x0 x1 x2
y0 y1 y2
z0 z1 z2))) = do
-- node
pokeByteOff ptr 0 x
pokeByteOff ptr 2 y
pokeByteOff ptr 4 z
-- turn
pokeByteOff ptr 6 x0
pokeByteOff ptr 7 x1
pokeByteOff ptr 8 x2
pokeByteOff ptr 9 y0
pokeByteOff ptr 10 y1
pokeByteOff ptr 11 y2
pokeByteOff ptr 12 z0
pokeByteOff ptr 13 z1
pokeByteOff ptr 14 z2
-- 15: empty!
-}
| karamellpelle/grid | source/Game/Grid/GridWorld/SegmentArray.hs | gpl-3.0 | 7,214 | 0 | 51 | 2,150 | 1,491 | 812 | 679 | 76 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.OpsWorks.DeregisterInstance
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Deregister a registered Amazon EC2 or on-premises instance. This action
-- removes the instance from the stack and returns it to your control. This
-- action can not be used with instances that were created with AWS OpsWorks.
--
-- Required Permissions: To use this action, an IAM user must have a Manage
-- permissions level for the stack or an attached policy that explicitly grants
-- permissions. For more information on user permissions, see <http://docs.aws.amazon.com/opsworks/latest/userguide/opsworks-security-users.html Managing UserPermissions>.
--
-- <http://docs.aws.amazon.com/opsworks/latest/APIReference/API_DeregisterInstance.html>
module Network.AWS.OpsWorks.DeregisterInstance
(
-- * Request
DeregisterInstance
-- ** Request constructor
, deregisterInstance
-- ** Request lenses
, di1InstanceId
-- * Response
, DeregisterInstanceResponse
-- ** Response constructor
, deregisterInstanceResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.OpsWorks.Types
import qualified GHC.Exts
newtype DeregisterInstance = DeregisterInstance
{ _di1InstanceId :: Text
} deriving (Eq, Ord, Read, Show, Monoid, IsString)
-- | 'DeregisterInstance' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'di1InstanceId' @::@ 'Text'
--
deregisterInstance :: Text -- ^ 'di1InstanceId'
-> DeregisterInstance
deregisterInstance p1 = DeregisterInstance
{ _di1InstanceId = p1
}
-- | The instance ID.
di1InstanceId :: Lens' DeregisterInstance Text
di1InstanceId = lens _di1InstanceId (\s a -> s { _di1InstanceId = a })
data DeregisterInstanceResponse = DeregisterInstanceResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'DeregisterInstanceResponse' constructor.
deregisterInstanceResponse :: DeregisterInstanceResponse
deregisterInstanceResponse = DeregisterInstanceResponse
instance ToPath DeregisterInstance where
toPath = const "/"
instance ToQuery DeregisterInstance where
toQuery = const mempty
instance ToHeaders DeregisterInstance
instance ToJSON DeregisterInstance where
toJSON DeregisterInstance{..} = object
[ "InstanceId" .= _di1InstanceId
]
instance AWSRequest DeregisterInstance where
type Sv DeregisterInstance = OpsWorks
type Rs DeregisterInstance = DeregisterInstanceResponse
request = post "DeregisterInstance"
response = nullResponse DeregisterInstanceResponse
| dysinger/amazonka | amazonka-opsworks/gen/Network/AWS/OpsWorks/DeregisterInstance.hs | mpl-2.0 | 3,538 | 0 | 9 | 707 | 351 | 216 | 135 | 47 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.SWF.DeprecateWorkflowType
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Deprecates the specified /workflow type/. After a workflow type has been
-- deprecated, you cannot create new executions of that type. Executions that
-- were started before the type was deprecated will continue to run. A
-- deprecated workflow type may still be used when calling visibility actions.
--
-- This operation is eventually consistent. The results are best effort and may
-- not exactly reflect recent updates and changes. Access Control
--
-- You can use IAM policies to control this action's access to Amazon SWF
-- resources as follows:
--
-- Use a 'Resource' element with the domain name to limit the action to only
-- specified domains. Use an 'Action' element to allow or deny permission to call
-- this action. Constrain the following parameters by using a 'Condition' element
-- with the appropriate keys. 'workflowType.name': String constraint. The key is 'swf:workflowType.name'. 'workflowType.version': String constraint. The key is 'swf:workflowType.version'
-- . If the caller does not have sufficient permissions to invoke the action,
-- or the parameter values fall outside the specified constraints, the action
-- fails. The associated event attribute's cause parameter will be set to
-- OPERATION_NOT_PERMITTED. For details and example IAM policies, see <http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html Using IAMto Manage Access to Amazon SWF Workflows>.
--
-- <http://docs.aws.amazon.com/amazonswf/latest/apireference/API_DeprecateWorkflowType.html>
module Network.AWS.SWF.DeprecateWorkflowType
(
-- * Request
DeprecateWorkflowType
-- ** Request constructor
, deprecateWorkflowType
-- ** Request lenses
, dwt1Domain
, dwt1WorkflowType
-- * Response
, DeprecateWorkflowTypeResponse
-- ** Response constructor
, deprecateWorkflowTypeResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.SWF.Types
import qualified GHC.Exts
data DeprecateWorkflowType = DeprecateWorkflowType
{ _dwt1Domain :: Text
, _dwt1WorkflowType :: WorkflowType
} deriving (Eq, Read, Show)
-- | 'DeprecateWorkflowType' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dwt1Domain' @::@ 'Text'
--
-- * 'dwt1WorkflowType' @::@ 'WorkflowType'
--
deprecateWorkflowType :: Text -- ^ 'dwt1Domain'
-> WorkflowType -- ^ 'dwt1WorkflowType'
-> DeprecateWorkflowType
deprecateWorkflowType p1 p2 = DeprecateWorkflowType
{ _dwt1Domain = p1
, _dwt1WorkflowType = p2
}
-- | The name of the domain in which the workflow type is registered.
dwt1Domain :: Lens' DeprecateWorkflowType Text
dwt1Domain = lens _dwt1Domain (\s a -> s { _dwt1Domain = a })
-- | The workflow type to deprecate.
dwt1WorkflowType :: Lens' DeprecateWorkflowType WorkflowType
dwt1WorkflowType = lens _dwt1WorkflowType (\s a -> s { _dwt1WorkflowType = a })
data DeprecateWorkflowTypeResponse = DeprecateWorkflowTypeResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'DeprecateWorkflowTypeResponse' constructor.
deprecateWorkflowTypeResponse :: DeprecateWorkflowTypeResponse
deprecateWorkflowTypeResponse = DeprecateWorkflowTypeResponse
instance ToPath DeprecateWorkflowType where
toPath = const "/"
instance ToQuery DeprecateWorkflowType where
toQuery = const mempty
instance ToHeaders DeprecateWorkflowType
instance ToJSON DeprecateWorkflowType where
toJSON DeprecateWorkflowType{..} = object
[ "domain" .= _dwt1Domain
, "workflowType" .= _dwt1WorkflowType
]
instance AWSRequest DeprecateWorkflowType where
type Sv DeprecateWorkflowType = SWF
type Rs DeprecateWorkflowType = DeprecateWorkflowTypeResponse
request = post "DeprecateWorkflowType"
response = nullResponse DeprecateWorkflowTypeResponse
| dysinger/amazonka | amazonka-swf/gen/Network/AWS/SWF/DeprecateWorkflowType.hs | mpl-2.0 | 4,915 | 0 | 9 | 962 | 423 | 264 | 159 | 54 | 1 |
{-# LANGUAGE OverloadedStrings #-}
--
-- Copyright (c) 2005-2022 Stefan Wehr - http://www.stefanwehr.de
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA
--
{-|
This module defines functions for notifying all test reporters registered about
particular events in the lifecycle of a test run.
Further, it defines the standard test reporters for HTF's various output formats.
-}
module Test.Framework.TestReporter (
IsParallel(..), isParallelFromBool, IsJsonOutput(..), IsXmlOutput(..),
reportAllTests, reportGlobalStart, reportTestStart, reportTestResult,
reportGlobalResults, defaultTestReporters
) where
import Test.Framework.TestTypes
import Test.Framework.Location
import Test.Framework.Colors
import Test.Framework.JsonOutput
import Test.Framework.XmlOutput
import System.IO
import Control.Monad.RWS
import Text.PrettyPrint
import qualified Data.Text.IO as T
import qualified Data.ByteString.Lazy as BSL
-- | Invokes 'tr_reportAllTests' on all test reporters registered.
reportAllTests :: ReportAllTests
reportAllTests tests =
do reps <- asks tc_reporters
mapM_ (\r -> tr_reportAllTests r tests) reps
-- | Invokes 'tr_reportGlobalStart' on all test reporters registered.
reportGlobalStart :: ReportGlobalStart
reportGlobalStart tests =
do reps <- asks tc_reporters
mapM_ (\r -> tr_reportGlobalStart r tests) reps
-- | Invokes 'tr_reportTestStart' on all test reporters registered.
reportTestStart :: ReportTestStart
reportTestStart t =
do reps <- asks tc_reporters
mapM_ (\r -> tr_reportTestStart r t) reps
-- | Invokes 'tr_reportTestResult' on all test reporters registered.
reportTestResult :: ReportTestResult
reportTestResult t =
do reps <- asks tc_reporters
mapM_ (\r -> tr_reportTestResult r t) reps
-- | Invokes 'tr_reportGlobalResults' on all test reporters registered.
reportGlobalResults :: ReportGlobalResults
reportGlobalResults arg =
do reps <- asks tc_reporters
mapM_ (\r -> tr_reportGlobalResults r arg) reps
data IsParallel = Parallel | NonParallel
isParallelFromBool :: Bool -> IsParallel
isParallelFromBool True = Parallel
isParallelFromBool False = NonParallel
data IsJsonOutput = JsonOutput | NoJsonOutput
data IsXmlOutput = XmlOutput | NoXmlOutput
-- | The default test reporters for HTF.
defaultTestReporters :: IsParallel
-> IsJsonOutput
-> IsXmlOutput
-> [TestReporter]
defaultTestReporters inParallel forMachine doXml =
case (inParallel, forMachine) of
(NonParallel, NoJsonOutput) ->
[TestReporter
{ tr_id = "rep_seq_human"
, tr_reportAllTests = reportAllTestsH
, tr_reportGlobalStart = reportGlobalStartHS
, tr_reportTestStart = reportTestStartHS
, tr_reportTestResult = reportTestResultHS
, tr_reportGlobalResults = reportGlobalResultsH
}] ++ xmlReporters
(Parallel, NoJsonOutput) ->
[TestReporter
{ tr_id = "rep_par_human"
, tr_reportAllTests = reportAllTestsH
, tr_reportGlobalStart = reportGlobalStartHP
, tr_reportTestStart = reportTestStartHP
, tr_reportTestResult = reportTestResultHP
, tr_reportGlobalResults = reportGlobalResultsH
}] ++ xmlReporters
(NonParallel, JsonOutput) ->
[TestReporter
{ tr_id = "rep_seq_machine"
, tr_reportAllTests = reportAllTestsM
, tr_reportGlobalStart = reportGlobalStartMS
, tr_reportTestStart = reportTestStartMS
, tr_reportTestResult = reportTestResultMS
, tr_reportGlobalResults = reportGlobalResultsM
}] ++ xmlReporters
(Parallel, JsonOutput) ->
[TestReporter
{ tr_id = "rep_par_machine"
, tr_reportAllTests = reportAllTestsM
, tr_reportGlobalStart = reportGlobalStartMP
, tr_reportTestStart = reportTestStartMP
, tr_reportTestResult = reportTestResultMP
, tr_reportGlobalResults = reportGlobalResultsM
}] ++ xmlReporters
where
xmlReporters =
case doXml of
NoXmlOutput -> []
XmlOutput -> [(emptyTestReporter "rep_xml") {
tr_reportGlobalResults = reportGlobalResultsXml
}]
--
-- output for humans
--
humanTestName :: GenFlatTest a -> String
humanTestName ft =
flatName (ft_path ft) ++
case ft_location ft of
Nothing -> ""
Just loc -> " (" ++ showLoc loc ++ ")"
reportHumanTestStartMessage :: ReportLevel -> GenFlatTest a -> TR ()
reportHumanTestStartMessage level ft =
do let t = colorize testStartColor "[TEST] "
reportTR level (t +++ noColor (humanTestName ft))
-- sequential
reportGlobalStartHS :: ReportGlobalStart
reportGlobalStartHS _ = return ()
reportTestStartHS :: ReportTestStart
reportTestStartHS ft = reportHumanTestStartMessage Debug ft
reportTestResultHS :: ReportTestResult
reportTestResultHS ftr =
let res = rr_result (ft_payload ftr)
msg = attachCallStack (rr_message (ft_payload ftr)) (rr_stack (ft_payload ftr))
in case res of
Pass ->
reportMessage Debug msg okSuffix
Pending ->
do reportHumanTestStartMessageIfNeeded
reportMessage Info msg pendingSuffix
Fail ->
do reportHumanTestStartMessageIfNeeded
reportMessage Info msg failureSuffix
Error ->
do reportHumanTestStartMessageIfNeeded
reportMessage Info msg errorSuffix
where
reportHumanTestStartMessageIfNeeded =
do tc <- ask
when (tc_quiet tc) (reportHumanTestStartMessage Info ftr)
reportMessage level msg suffix =
reportTR level (ensureNewlineColorString msg +++ suffix +++ noColor timeStr)
timeStr = " (" ++ show (rr_wallTimeMs (ft_payload ftr)) ++ "ms)\n"
failureSuffix = colorize warningColor "*** Failed!"
errorSuffix = colorize warningColor "@@@ Error!"
pendingSuffix = colorize pendingColor "^^^ Pending!"
okSuffix = colorize testOkColor "+++ OK"
-- parallel
reportGlobalStartHP :: ReportGlobalStart
reportGlobalStartHP _ = return ()
reportTestStartHP :: ReportTestStart
reportTestStartHP ft =
do reportStringTR Debug ("Starting " ++ (humanTestName ft))
reportTestResultHP :: ReportTestResult
reportTestResultHP ftr =
do reportHumanTestStartMessage Debug ftr
reportTestResultHS ftr
-- results and all tests
reportAllTestsH :: ReportAllTests
reportAllTestsH l =
reportStringTR Info (render (renderTestNames l))
reportGlobalResultsH :: ReportGlobalResults
reportGlobalResultsH arg =
do let passed = length (rgra_passed arg)
pending = length (rgra_pending arg)
failed = length (rgra_failed arg)
error = length (rgra_errors arg)
timedOut = length (rgra_timedOut arg)
filtered = length (rgra_filtered arg)
total = passed + failed + error + pending
let pendings = (if pending > 0 then colorize pendingColor else noColor) "* Pending:"
failures = (if failed > 0 then colorize warningColor else noColor) "* Failures:"
errors = (if error > 0 then colorize warningColor else noColor) "* Errors:"
reportTR Info ("* Tests: " +++ showC total +++ "\n" +++
"* Passed: " +++ showC passed +++ "\n" +++
pendings +++ " " +++ showC pending +++ "\n" +++
failures +++ " " +++ showC failed +++ "\n" +++
errors +++ " " +++ showC error +++ "\n" +++
"* Timed out: " +++ showC timedOut +++ "\n" +++
"* Filtered: " +++ showC filtered)
when (timedOut > 0) $
if timedOut < 10
then
reportTR Info
("\n" +++ noColor "* Timed out:" +++ "\n" +++ renderTestNames' (reverse (rgra_timedOut arg)))
else
reportTR Info
("\n" +++ noColor "* Timed out: (" +++ showC timedOut +++ noColor ", too many to list)")
when (filtered > 0) $
if filtered < 10
then
reportTR Info
("\n" +++ noColor "* Filtered:" +++ "\n" +++ renderTestNames' (reverse (rgra_filtered arg)))
else
reportTR Info
("\n" +++ noColor "* Filtered: (" +++ showC filtered +++ noColor ", too many to list)")
when (pending > 0) $
reportTR Info
("\n" +++ pendings +++ "\n" +++ renderTestNames' (reverse (rgra_pending arg)))
when (failed > 0) $
reportTR Info
("\n" +++ failures +++ "\n" +++ renderTestNames' (reverse (rgra_failed arg)))
when (error > 0) $
reportTR Info
("\n" +++ errors +++ "\n" +++ renderTestNames' (reverse (rgra_errors arg)))
reportStringTR Info ("\nTotal execution time: " ++ show (rgra_timeMs arg) ++ "ms")
where
showC x = noColor (show x)
renderTestNames' rrs =
noColor $ render $ nest 2 $ renderTestNames rrs
renderTestNames :: [GenFlatTest a] -> Doc
renderTestNames l =
vcat (map (\ft -> text "*" <+>
text (humanTestName ft)) l)
--
-- output for machines
--
-- sequential
reportGlobalStartMS :: ReportGlobalStart
reportGlobalStartMS _ = return ()
reportTestStartMS :: ReportTestStart
reportTestStartMS ft =
let json = mkTestStartEventObj ft (flatName (ft_path ft))
in reportJsonTR json
reportTestResultMS :: ReportTestResult
reportTestResultMS ftr =
let json = mkTestEndEventObj ftr (flatName (ft_path ftr))
in reportJsonTR json
-- parallel
reportGlobalStartMP :: ReportGlobalStart
reportGlobalStartMP _ = return ()
reportTestStartMP :: ReportTestStart
reportTestStartMP = reportTestStartMS
reportTestResultMP :: ReportTestResult
reportTestResultMP = reportTestResultMS
-- results and all tests
reportAllTestsM :: ReportAllTests
reportAllTestsM l =
let json = mkTestListObj (map (\ft -> (ft, flatName (ft_path ft))) l)
in reportJsonTR json
reportGlobalResultsM :: ReportGlobalResults
reportGlobalResultsM arg =
let json = mkTestResultsObj arg
in reportJsonTR json
reportGlobalResultsXml :: ReportGlobalResults
reportGlobalResultsXml arg =
do let xml = mkGlobalResultsXml arg
tc <- ask
case tc_outputXml tc of
Just fname -> liftIO $ withFile fname WriteMode $ \h -> BSL.hPut h xml
Nothing -> liftIO $ BSL.putStr xml
--
-- General reporting routines
--
reportTR :: ReportLevel -> ColorString -> TR ()
reportTR level msg =
do tc <- ask
let s = renderColorString msg (tc_useColors tc)
reportGen tc level (\h -> T.hPutStrLn h s)
reportStringTR :: ReportLevel -> String -> TR ()
reportStringTR level msg =
do tc <- ask
reportGen tc level (\h -> hPutStrLn h msg)
reportLazyBytesTR :: ReportLevel -> BSL.ByteString -> TR ()
reportLazyBytesTR level msg =
do tc <- ask
reportGen tc level (\h -> BSL.hPut h msg)
reportJsonTR :: HTFJsonObj a => a -> TR ()
reportJsonTR x = reportLazyBytesTR Info (decodeObj x)
data ReportLevel = Debug | Info
deriving (Eq,Ord)
reportGen :: TestConfig -> ReportLevel -> (Handle -> IO ()) -> TR ()
reportGen tc level fun =
unless (tc_quiet tc && level < Info) $
case tc_output tc of
TestOutputHandle h _ -> liftIO (fun h)
TestOutputSplitted fp ->
do -- split mode: one file for each result to avoid locking on windows
ix <- gets ts_index
let realFp = fp ++ (show ix) -- just append the index at the end of the file given as output parameter
modify (\x -> x { ts_index = ts_index x + 1 })
liftIO $ withFile realFp WriteMode fun
| skogsbaer/HTF | Test/Framework/TestReporter.hs | lgpl-2.1 | 12,583 | 0 | 31 | 3,265 | 2,865 | 1,470 | 1,395 | 247 | 6 |
module ViperVM.Library.FloatMatrixSub (
builtin, function, metaKernel, kernels
) where
import Control.Applicative ( (<$>) )
import ViperVM.VirtualPlatform.FunctionalKernel hiding (metaKernel,proto)
import ViperVM.VirtualPlatform.MetaObject
import ViperVM.VirtualPlatform.Descriptor
import ViperVM.VirtualPlatform.MetaKernel hiding (proto,name,kernels)
import ViperVM.VirtualPlatform.Objects.Matrix
import ViperVM.VirtualPlatform.Object
import ViperVM.Platform.KernelParameter
import ViperVM.Platform.Kernel
import ViperVM.Platform.Peer.KernelPeer
import qualified ViperVM.Library.OpenCL.FloatMatrixSub as CL
----------------------------------------
-- Builtin & Function
---------------------------------------
builtin :: MakeBuiltin
builtin = makeBuiltinIO function
function :: IO FunctionalKernel
function = FunctionalKernel proto makeParams makeResult <$> metaKernel
where
proto = Prototype {
inputs = [MatrixType,MatrixType],
output = MatrixType
}
makeParams args = do
let [a,b] = args
c <- allocate (descriptor a)
return [a,b,c]
makeResult args = last args
----------------------------------------
-- MetaKernel
---------------------------------------
metaKernel :: IO MetaKernel
metaKernel = MetaKernel name proto conf <$> kernels
where
name = "FloatMatrixSub"
proto = [
Arg ReadOnly "a",
Arg ReadOnly "b",
Arg WriteOnly "c"
]
conf :: [ObjectPeer] -> [KernelParameter]
conf objs = params
where
[MatrixObject ma, MatrixObject mb, MatrixObject mc] = objs
params =
[WordParam (fromIntegral width),
WordParam (fromIntegral height),
BufferParam (matrixBuffer ma),
WordParam (fromIntegral lda),
WordParam (fromIntegral (matrixOffset ma `div` 4)),
BufferParam (matrixBuffer mb),
WordParam (fromIntegral ldb),
WordParam (fromIntegral (matrixOffset mb `div` 4)),
BufferParam (matrixBuffer mc),
WordParam (fromIntegral ldc),
WordParam (fromIntegral (matrixOffset mc `div` 4))]
(width, height) = matrixDimensions ma
lda = ((matrixWidth ma) * 4 + (matrixPadding ma)) `div` 4
ldb = ((matrixWidth mb) * 4 + (matrixPadding mb)) `div` 4
ldc = ((matrixWidth mc) * 4 + (matrixPadding mc)) `div` 4
----------------------------------------
-- Kernels
---------------------------------------
kernels :: IO [Kernel]
kernels = initKernelsIO [
CLKernel <$> CL.kernel
]
| hsyl20/HViperVM | lib/ViperVM/Library/FloatMatrixSub.hs | lgpl-3.0 | 2,695 | 0 | 15 | 691 | 686 | 386 | 300 | 54 | 1 |
module ProjectEuler.A268398Spec (main, spec) where
import Test.Hspec
import ProjectEuler.A268398 (a268398)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A268398" $
it "correctly computes the first 20 elements" $
take 20 (map a268398 [1..]) `shouldBe` expectedValue where
expectedValue = [1,2,3,7,8,9,10,14,17,18,19,23,24,25,26,42,43,46,47,51]
| peterokagey/haskellOEIS | test/ProjectEuler/A268398Spec.hs | apache-2.0 | 373 | 0 | 10 | 59 | 160 | 95 | 65 | 10 | 1 |
import System.Environment (getArgs)
import Control.Arrow (first)
import Graphics.Rendering.Chart.Easy hiding (argument)
import Graphics.Rendering.Chart.Backend.Diagrams
import Data.Time
import System.Process
import Options.Applicative
import Data.Semigroup ((<>))
import Data.Text.Time (parseISODateTime)
import qualified Data.TimeSeries as TS
-- Command line params
data Params = Params
{ resolution :: String
, fileName :: String }
deriving (Show)
-- Command line parser
params :: Parser Params
params = Params
<$> strOption
( long "time"
<> metavar "<Y | M | D | H>"
<> help "Resampling resolution." )
<*> argument str (metavar "FILE")
-- Convert string representation of time into TimeResolution
convertTime :: String -> TS.TimeResolution
convertTime "Y" = TS.years 1
convertTime "M" = TS.months 1
convertTime "D" = TS.days 1
convertTime "H" = TS.seconds 3600
-- Main function. Parses command line arguments and calls drawResampled for Time Series processing
main :: IO ()
main = do
ps <- execParser opts
ts <- TS.loadCSV TS.HasHeader parseISODateTime (fileName ps)
drawResampled ts (convertTime (resolution ps))
where
opts = info (helper <*> params) ( fullDesc <> progDesc "Resampling time series" )
-- Convert Time series to format expected by Chart library
signal :: Num a => TS.Series a -> [(LocalTime, a)]
signal ts = map (first (utcToLocalTime utc)) (TS.toList ts)
-- Take time series with time resolutions and creates chart with original and modified series
drawResampled :: TS.Series Double -> TS.TimeResolution -> IO ()
drawResampled ts dt = do
args <- getArgs
let fn = head args
let startTime = UTCTime (fromGregorian 1965 1 1) 0
let xs = TS.resample startTime dt ts
toFile def "dist/resample.svg" $ do
layout_title .= fn
setColors [opaque blue, opaque red]
plot (line "Original series" [signal ts])
plot (line "Resampled series" [signal xs])
putStrLn "Plot saved to: dist/resample.svg"
_ <- createProcess (shell "firefox dist/resample.svg")
return ()
| klangner/timeseries | examples/Resample.hs | bsd-2-clause | 2,108 | 17 | 14 | 430 | 661 | 312 | 349 | 48 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Module : Graphics.Hoodle.Render.Util.HitTest
-- Copyright : (c) 2011-2013 Ian-Woo Kim
--
-- License : BSD3
-- Maintainer : Ian-Woo Kim <[email protected]>
-- Stability : experimental
-- Portability : GHC
--
-- Hit-testing routines
--
-----------------------------------------------------------------------------
module Graphics.Hoodle.Render.Util.HitTest where
import Control.Applicative
import Control.Monad.State
import Data.Strict.Tuple
-- from hoodle-platform
import Data.Hoodle.Simple
import Data.Hoodle.BBox
-- from this package
import Graphics.Hoodle.Render.Type.HitTest
import Graphics.Hoodle.Render.Type.Item
--------------------------
-- hit test collections --
--------------------------
-- | hit test of whether a point in a bbox
-- previously, hitTestBBoxPoint
isPointInBBox :: BBox
-> (Double,Double) -- ^ point
-> Bool
isPointInBBox (BBox (ulx,uly) (lrx,lry)) (x,y)
= ulx <= x && x <= lrx && uly <= y && y <= lry
-- | hit test of whether two lines intersect
do2LinesIntersect :: ((Double,Double),(Double,Double)) -- ^ line1
-> ((Double,Double),(Double,Double)) -- ^ line2
-> Bool
do2LinesIntersect ((x1,y1),(x2,y2)) ((x3,y3),(x4,y4)) =
(x2-xc)*(xc-x1)>0 && (x3-xc)*(xc-x4) >0
where x21 = x2-x1
x43 = x4-x3
y21 = y2-y1
y43 = y4-y3
xc = (x21*x43*(y3-y1)+y21*x43*x1-y43*x21*x3)/(y21*x43-y43*x21)
-- | previously, hitTestLineStroke
doesLineHitStrk :: ((Double,Double),(Double,Double)) -> Stroke -> Bool
doesLineHitStrk line1 str@(Stroke _t _c _w _d) = test (stroke_data str)
where test [] = False
test ((_:!:_):[]) = False
test ((x0:!:y0):(x:!:y):rest)
= do2LinesIntersect line1 ((x0,y0),(x,y))
|| test ((x:!:y) : rest)
doesLineHitStrk line1 (VWStroke _t _c d) = test d
where test [] = False
test ((_,_,_):[]) = False
test ((x0,y0,_):(x,y,z):rest)
= do2LinesIntersect line1 ((x0,y0),(x,y))
|| test ((x,y,z) : rest)
-- | Do two bounding boxes intersect with each other?
-- previously, hitTestBBoxBBox
do2BBoxIntersect :: BBox -> BBox -> Bool
do2BBoxIntersect (BBox (ulx1,uly1) (lrx1,lry1)) (BBox (ulx2,uly2) (lrx2,lry2))
= p1 && p2
where p1 = ulx1 <= ulx2 && ulx2 <= lrx1
|| ulx1 <= lrx2 && lrx2 <= lrx1
|| ulx2 <= ulx1 && ulx1 <= lrx2
|| ulx2 <= lrx1 && lrx1 <= lrx2
p2 = uly1 <= uly2 && uly2 <= lry1
|| uly1 <= lry2 && lry2 <= lry1
|| uly2 <= uly1 && uly1 <= lry2
|| uly2 <= lry1 && lry1 <= lry2
-- | is the second bbox inside the first bbox?
-- previously, hitTestInsideBBox
isBBox2InBBox1 :: BBox -- ^ 1st bbox
-> BBox -- ^ 2nd bbox
-> Bool
isBBox2InBBox1 b1 (BBox (ulx2,uly2) (lrx2,lry2))
= isPointInBBox b1 (ulx2,uly2) && isPointInBBox b1 (lrx2,lry2)
--------------------------------------------------------
-- item filtering functions that results in AlterList --
--------------------------------------------------------
-- |
hltFilteredBy_StateT :: (a -> Bool) -- ^ hit test condition
-> [a] -- ^ strokes to test
-> State Bool (AlterList (NotHitted a) (Hitted a))
hltFilteredBy_StateT test itms = do
let (nhit,rest) = break test itms
(hit,rest') = break (not.test) rest
modify (|| (not.null) hit)
if null rest'
then return (NotHitted nhit :- Hitted hit :- NotHitted [] :- Empty)
else return (NotHitted nhit :- Hitted hit :- hltFilteredBy test rest')
-- | highlight strokes filtered by a condition.
-- previously mkHitTestAL
hltFilteredBy :: (a -> Bool) -- ^ hit test condition
-> [a] -- ^ strokes to test
-> AlterList (NotHitted a) (Hitted a)
hltFilteredBy test is = evalState (hltFilteredBy_StateT test is) False
-- |
hltHittedByBBox :: (GetBBoxable a) =>
BBox -- ^ test bounding box
-> [a] -- ^ items to test
-> AlterList (NotHitted a) (Hitted a)
hltHittedByBBox b = hltFilteredBy (do2BBoxIntersect b . getBBox)
-- |
hltEmbeddedByBBox :: (GetBBoxable a) =>
BBox
-> [a]
-> AlterList (NotHitted a) (Hitted a)
hltEmbeddedByBBox b = hltFilteredBy (isBBox2InBBox1 b . getBBox)
-- | only check if a line and bbox of item overlapped
hltHittedByLineRough :: (GetBBoxable a) =>
((Double,Double),(Double,Double)) -- ^ line
-> [a] -- ^ items to test
-> AlterList (NotHitted a) (Hitted a)
hltHittedByLineRough (p1,p2) = hltFilteredBy boxhittest
where boxhittest s = isPointInBBox (getBBox s) p1
|| isPointInBBox (getBBox s) p2
-- |
hltItmsHittedByLine_StateT :: ((Double,Double),(Double,Double))
-> [RItem]
-> State Bool RItemHitted
-- (AlterList (NotHitted RItem) (Hitted RItem))
hltItmsHittedByLine_StateT line = hltFilteredBy_StateT test
where test (RItemStroke strk) = (doesLineHitStrk line . bbxed_content) strk
test _ = False
-- |
hltItmsHittedByLineFrmSelected_StateT ::
((Double,Double),(Double,Double))
-> RItemHitted -- AlterList (NotHitted RItem) (Hitted RItem)
-> State Bool (AlterList (NotHitted RItem) RItemHitted)
hltItmsHittedByLineFrmSelected_StateT _ Empty
= error "something is wrong, invariant broken"
hltItmsHittedByLineFrmSelected_StateT _ (n:-Empty) = return (n:-Empty)
hltItmsHittedByLineFrmSelected_StateT line (n:-h:-rest) = do
h' <- hltItmsHittedByLine_StateT line (unHitted h)
(n:-) . (h':-) <$> hltItmsHittedByLineFrmSelected_StateT line rest
-- |
elimHitted :: (GetBBoxable a) =>
AlterList (NotHitted a) (Hitted a) -> State (Maybe BBox) [a]
elimHitted Empty = error "something wrong in elimHitted"
elimHitted (n:-Empty) = return (unNotHitted n)
elimHitted (n:-h:-rest) = do
bbox <- get
let bbox2 = getTotalBBox (unHitted h)
put (merge bbox bbox2)
return . (unNotHitted n ++) =<< elimHitted rest
-- |
merge :: Maybe BBox -> Maybe BBox -> Maybe BBox
merge Nothing Nothing = Nothing
merge Nothing (Just b) = Just b
merge (Just b) Nothing = Just b
merge (Just (BBox (x1,y1) (x2,y2))) (Just (BBox (x3,y3) (x4,y4)))
= Just (BBox (min x1 x3, min y1 y3) (max x2 x4,max y2 y4))
-- |
getTotalBBox :: (GetBBoxable a) => [a] -> Maybe BBox
getTotalBBox = foldl f Nothing
where f acc = merge acc . Just . getBBox
| wavewave/hoodle-render | src/Graphics/Hoodle/Render/Util/HitTest.hs | bsd-2-clause | 6,782 | 0 | 21 | 1,751 | 2,152 | 1,174 | 978 | 118 | 5 |
{-# LANGUAGE DeriveDataTypeable #-}
module Gwist.JSON (
Result (..),
decodeJSON,
readJSON
) where
import Data.Typeable
import qualified Data.Attoparsec as AP
import qualified Data.Aeson as AE
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
data Result a = Success a | Error String
decodeJSON :: AE.FromJSON a => LBS.ByteString -> Result a
decodeJSON b =
case AP.parse (fmap AE.fromJSON AE.json) (lbsToBs b) of
AP.Done _ (AE.Success g) -> Success g
AP.Done _ (AE.Error msg) -> Error msg
AP.Fail _ _ msg -> Error msg
AP.Partial _ -> Error "JSON is too short"
where lbsToBs = BS.concat . LBS.toChunks
readJSON :: AE.FromJSON a => LBS.ByteString -> IO a
readJSON b =
case decodeJSON b of
Success g -> return g
Error msg -> error msg
| shugo/gwist | Gwist/JSON.hs | bsd-2-clause | 816 | 0 | 11 | 176 | 292 | 153 | 139 | 24 | 4 |
module Propellor.Property.Dns (
module Propellor.Types.Dns,
primary,
signedPrimary,
secondary,
secondaryFor,
mkSOA,
writeZoneFile,
nextSerialNumber,
adjustSerialNumber,
serialNumberOffset,
WarningMessage,
genZone,
) where
import Propellor.Base
import Propellor.Types.Dns
import Propellor.Types.Info
import Propellor.Property.File
import qualified Propellor.Property.Apt as Apt
import qualified Propellor.Property.Ssh as Ssh
import qualified Propellor.Property.Service as Service
import Propellor.Property.Scheduled
import Propellor.Property.DnsSec
import Utility.Applicative
import qualified Data.Map as M
import qualified Data.Set as S
import Data.List
-- | Primary dns server for a domain, using bind.
--
-- Currently, this only configures bind to serve forward DNS, not reverse DNS.
--
-- Most of the content of the zone file is configured by setting properties
-- of hosts. For example,
--
-- > host "foo.example.com"
-- > & ipv4 "192.168.1.1"
-- > & alias "mail.exmaple.com"
--
-- Will cause that hostmame and its alias to appear in the zone file,
-- with the configured IP address.
--
-- Also, if a host has a ssh public key configured, a SSHFP record will
-- be automatically generated for it.
--
-- The [(BindDomain, Record)] list can be used for additional records
-- that cannot be configured elsewhere. This often includes NS records,
-- TXT records and perhaps CNAMEs pointing at hosts that propellor does
-- not control.
--
-- The primary server is configured to only allow zone transfers to
-- secondary dns servers. These are determined in two ways:
--
-- 1. By looking at the properties of other hosts, to find hosts that
-- are configured as the secondary dns server.
--
-- 2. By looking for NS Records in the passed list of records.
--
-- In either case, the secondary dns server Host should have an ipv4 and/or
-- ipv6 property defined.
primary :: [Host] -> Domain -> SOA -> [(BindDomain, Record)] -> RevertableProperty (HasInfo + DebianLike) DebianLike
primary hosts domain soa rs = setup <!> cleanup
where
setup = setupPrimary zonefile id hosts domain soa rs
`onChange` Service.reloaded "bind9"
cleanup = cleanupPrimary zonefile domain
`onChange` Service.reloaded "bind9"
zonefile = "/etc/bind/propellor/db." ++ domain
setupPrimary :: FilePath -> (FilePath -> FilePath) -> [Host] -> Domain -> SOA -> [(BindDomain, Record)] -> Property (HasInfo + DebianLike)
setupPrimary zonefile mknamedconffile hosts domain soa rs =
withwarnings baseprop
`requires` servingZones
where
hostmap = hostMap hosts
-- Known hosts with hostname located in the domain.
indomain = M.elems $ M.filterWithKey (\hn _ -> inDomain domain $ AbsDomain $ hn) hostmap
(partialzone, zonewarnings) = genZone indomain hostmap domain soa
baseprop = primaryprop
`setInfoProperty` (toInfo (addNamedConf conf))
primaryprop :: Property DebianLike
primaryprop = property ("dns primary for " ++ domain) $ do
sshfps <- concat <$> mapM (genSSHFP domain) (M.elems hostmap)
let zone = partialzone
{ zHosts = zHosts partialzone ++ rs ++ sshfps }
ifM (liftIO $ needupdate zone)
( makeChange $ writeZoneFile zone zonefile
, noChange
)
withwarnings p = adjustPropertySatisfy p $ \a -> do
mapM_ warningMessage $ zonewarnings ++ secondarywarnings
a
conf = NamedConf
{ confDomain = domain
, confDnsServerType = Master
, confFile = mknamedconffile zonefile
, confMasters = []
, confAllowTransfer = nub $
concatMap (`hostAddresses` hosts) $
secondaries ++ nssecondaries
, confLines = []
}
secondaries = otherServers Secondary hosts domain
secondarywarnings = map (\h -> "No IP address defined for DNS seconary " ++ h) $
filter (\h -> null (hostAddresses h hosts)) secondaries
nssecondaries = mapMaybe (domainHostName <=< getNS) rootRecords
rootRecords = map snd $
filter (\(d, _r) -> d == RootDomain || d == AbsDomain domain) rs
needupdate zone = do
v <- readZonePropellorFile zonefile
return $ case v of
Nothing -> True
Just oldzone ->
-- compare everything except serial
let oldserial = sSerial (zSOA oldzone)
z = zone { zSOA = (zSOA zone) { sSerial = oldserial } }
in z /= oldzone || oldserial < sSerial (zSOA zone)
cleanupPrimary :: FilePath -> Domain -> Property DebianLike
cleanupPrimary zonefile domain = check (doesFileExist zonefile) $
go `requires` namedConfWritten
where
desc = "removed dns primary for " ++ domain
go :: Property DebianLike
go = property desc (makeChange $ removeZoneFile zonefile)
-- | Primary dns server for a domain, secured with DNSSEC.
--
-- This is like `primary`, except the resulting zone
-- file is signed.
-- The Zone Signing Key (ZSK) and Key Signing Key (KSK)
-- used in signing it are taken from the PrivData.
--
-- As a side effect of signing the zone, a
-- </var/cache/bind/dsset-domain.>
-- file will be created. This file contains the DS records
-- which need to be communicated to your domain registrar
-- to make DNSSEC be used for your domain. Doing so is outside
-- the scope of propellor (currently). See for example the tutorial
-- <https://www.digitalocean.com/community/tutorials/how-to-setup-dnssec-on-an-authoritative-bind-dns-server--2>
--
-- The 'Recurrance' controls how frequently the signature
-- should be regenerated, using a new random salt, to prevent
-- zone walking attacks. `Weekly Nothing` is a reasonable choice.
--
-- To transition from 'primary' to 'signedPrimary', you can revert
-- the 'primary' property, and add this property.
--
-- Note that DNSSEC zone files use a serial number based on the unix epoch.
-- This is different from the serial number used by 'primary', so if you
-- want to later disable DNSSEC you will need to adjust the serial number
-- passed to mkSOA to ensure it is larger.
signedPrimary :: Recurrance -> [Host] -> Domain -> SOA -> [(BindDomain, Record)] -> RevertableProperty (HasInfo + DebianLike) DebianLike
signedPrimary recurrance hosts domain soa rs = setup <!> cleanup
where
setup = combineProperties ("dns primary for " ++ domain ++ " (signed)")
(props
& setupPrimary zonefile signedZoneFile hosts domain soa rs'
& zoneSigned domain zonefile
& forceZoneSigned domain zonefile `period` recurrance
)
`onChange` Service.reloaded "bind9"
cleanup = cleanupPrimary zonefile domain
`onChange` revert (zoneSigned domain zonefile)
`onChange` Service.reloaded "bind9"
-- Include the public keys into the zone file.
rs' = include PubKSK : include PubZSK : rs
include k = (RootDomain, INCLUDE (keyFn domain k))
-- Put DNSSEC zone files in a different directory than is used for
-- the regular ones. This allows 'primary' to be reverted and
-- 'signedPrimary' enabled, without the reverted property stomping
-- on the new one's settings.
zonefile = "/etc/bind/propellor/dnssec/db." ++ domain
-- | Secondary dns server for a domain.
--
-- The primary server is determined by looking at the properties of other
-- hosts to find which one is configured as the primary.
--
-- Note that if a host is declared to be a primary and a secondary dns
-- server for the same domain, the primary server config always wins.
secondary :: [Host] -> Domain -> RevertableProperty (HasInfo + DebianLike) DebianLike
secondary hosts domain = secondaryFor (otherServers Master hosts domain) hosts domain
-- | This variant is useful if the primary server does not have its DNS
-- configured via propellor.
secondaryFor :: [HostName] -> [Host] -> Domain -> RevertableProperty (HasInfo + DebianLike) DebianLike
secondaryFor masters hosts domain = setup <!> cleanup
where
setup = pureInfoProperty desc (addNamedConf conf)
`requires` servingZones
cleanup = namedConfWritten
desc = "dns secondary for " ++ domain
conf = NamedConf
{ confDomain = domain
, confDnsServerType = Secondary
, confFile = "db." ++ domain
, confMasters = concatMap (`hostAddresses` hosts) masters
, confAllowTransfer = []
, confLines = []
}
otherServers :: DnsServerType -> [Host] -> Domain -> [HostName]
otherServers wantedtype hosts domain =
M.keys $ M.filter wanted $ hostMap hosts
where
wanted h = case M.lookup domain (fromNamedConfMap $ fromInfo $ hostInfo h) of
Nothing -> False
Just conf -> confDnsServerType conf == wantedtype
&& confDomain conf == domain
-- | Rewrites the whole named.conf.local file to serve the zones
-- configured by `primary` and `secondary`, and ensures that bind9 is
-- running.
servingZones :: Property DebianLike
servingZones = namedConfWritten
`onChange` Service.reloaded "bind9"
`requires` Apt.serviceInstalledRunning "bind9"
namedConfWritten :: Property DebianLike
namedConfWritten = property' "named.conf configured" $ \w -> do
zs <- getNamedConf
ensureProperty w $
hasContent namedConfFile $
concatMap confStanza $ M.elems zs
confStanza :: NamedConf -> [Line]
confStanza c =
[ "// automatically generated by propellor"
, "zone \"" ++ confDomain c ++ "\" {"
, cfgline "type" (if confDnsServerType c == Master then "master" else "slave")
, cfgline "file" ("\"" ++ confFile c ++ "\"")
] ++
mastersblock ++
allowtransferblock ++
(map (\l -> "\t" ++ l ++ ";") (confLines c)) ++
[ "};"
, ""
]
where
cfgline f v = "\t" ++ f ++ " " ++ v ++ ";"
ipblock name l =
[ "\t" ++ name ++ " {" ] ++
(map (\ip -> "\t\t" ++ val ip ++ ";") l) ++
[ "\t};" ]
mastersblock
| null (confMasters c) = []
| otherwise = ipblock "masters" (confMasters c)
-- an empty block prohibits any transfers
allowtransferblock = ipblock "allow-transfer" (confAllowTransfer c)
namedConfFile :: FilePath
namedConfFile = "/etc/bind/named.conf.local"
-- | Generates a SOA with some fairly sane numbers in it.
--
-- The Domain is the domain to use in the SOA record. Typically
-- something like ns1.example.com. So, not the domain that this is the SOA
-- record for.
--
-- The SerialNumber can be whatever serial number was used by the domain
-- before propellor started managing it. Or 0 if the domain has only ever
-- been managed by propellor.
--
-- You do not need to increment the SerialNumber when making changes!
-- Propellor will automatically add the number of commits in the git
-- repository to the SerialNumber.
mkSOA :: Domain -> SerialNumber -> SOA
mkSOA d sn = SOA
{ sDomain = AbsDomain d
, sSerial = sn
, sRefresh = hours 4
, sRetry = hours 1
, sExpire = 2419200 -- 4 weeks
, sNegativeCacheTTL = hours 8
}
where
hours n = n * 60 * 60
dValue :: BindDomain -> String
dValue (RelDomain d) = d
dValue (AbsDomain d) = d ++ "."
dValue (RootDomain) = "@"
rField :: Record -> Maybe String
rField (Address (IPv4 _)) = Just "A"
rField (Address (IPv6 _)) = Just "AAAA"
rField (CNAME _) = Just "CNAME"
rField (MX _ _) = Just "MX"
rField (NS _) = Just "NS"
rField (TXT _) = Just "TXT"
rField (SRV _ _ _ _) = Just "SRV"
rField (SSHFP _ _ _) = Just "SSHFP"
rField (INCLUDE _) = Just "$INCLUDE"
rField (PTR _) = Nothing
rValue :: Record -> Maybe String
rValue (Address (IPv4 addr)) = Just addr
rValue (Address (IPv6 addr)) = Just addr
rValue (CNAME d) = Just $ dValue d
rValue (MX pri d) = Just $ val pri ++ " " ++ dValue d
rValue (NS d) = Just $ dValue d
rValue (SRV priority weight port target) = Just $ unwords
[ val priority
, val weight
, val port
, dValue target
]
rValue (SSHFP x y s) = Just $ unwords
[ val x
, val y
, s
]
rValue (INCLUDE f) = Just f
rValue (TXT s) = Just $ [q] ++ filter (/= q) s ++ [q]
where
q = '"'
rValue (PTR _) = Nothing
-- | Adjusts the serial number of the zone to always be larger
-- than the serial number in the Zone record,
-- and always be larger than the passed SerialNumber.
nextSerialNumber :: Zone -> SerialNumber -> Zone
nextSerialNumber z serial = adjustSerialNumber z $ \sn -> succ $ max sn serial
adjustSerialNumber :: Zone -> (SerialNumber -> SerialNumber) -> Zone
adjustSerialNumber (Zone d soa l) f = Zone d soa' l
where
soa' = soa { sSerial = f (sSerial soa) }
-- | Count the number of git commits made to the current branch.
serialNumberOffset :: IO SerialNumber
serialNumberOffset = fromIntegral . length . lines
<$> readProcess "git" ["log", "--pretty=%H"]
-- | Write a Zone out to a to a file.
--
-- The serial number in the Zone automatically has the serialNumberOffset
-- added to it. Also, just in case, the old serial number used in the zone
-- file is checked, and if it is somehow larger, its succ is used.
writeZoneFile :: Zone -> FilePath -> IO ()
writeZoneFile z f = do
oldserial <- oldZoneFileSerialNumber f
offset <- serialNumberOffset
let z' = nextSerialNumber
(adjustSerialNumber z (+ offset))
oldserial
createDirectoryIfMissing True (takeDirectory f)
writeFile f (genZoneFile z')
writeZonePropellorFile f z'
removeZoneFile :: FilePath -> IO ()
removeZoneFile f = do
nukeFile f
nukeFile (zonePropellorFile f)
-- | Next to the zone file, is a ".propellor" file, which contains
-- the serialized Zone. This saves the bother of parsing
-- the horrible bind zone file format.
zonePropellorFile :: FilePath -> FilePath
zonePropellorFile f = f ++ ".propellor"
oldZoneFileSerialNumber :: FilePath -> IO SerialNumber
oldZoneFileSerialNumber = maybe 0 (sSerial . zSOA) <$$> readZonePropellorFile
writeZonePropellorFile :: FilePath -> Zone -> IO ()
writeZonePropellorFile f z = writeFile (zonePropellorFile f) (show z)
readZonePropellorFile :: FilePath -> IO (Maybe Zone)
readZonePropellorFile f = catchDefaultIO Nothing $
readish <$> readFileStrict (zonePropellorFile f)
-- | Generating a zone file.
genZoneFile :: Zone -> String
genZoneFile (Zone zdomain soa rs) = unlines $
header : genSOA soa ++ mapMaybe (genRecord zdomain) rs
where
header = com $ "BIND zone file for " ++ zdomain ++ ". Generated by propellor, do not edit."
genRecord :: Domain -> (BindDomain, Record) -> Maybe String
genRecord zdomain (domain, record) = case (rField record, rValue record) of
(Nothing, _) -> Nothing
(_, Nothing) -> Nothing
(Just rfield, Just rvalue) -> Just $ intercalate "\t" $ case record of
INCLUDE _ -> [ rfield, rvalue ]
_ ->
[ domainHost zdomain domain
, "IN"
, rfield
, rvalue
]
genSOA :: SOA -> [String]
genSOA soa =
-- "@ IN SOA ns1.example.com. root ("
[ intercalate "\t"
[ dValue RootDomain
, "IN"
, "SOA"
, dValue (sDomain soa)
, "root"
, "("
]
, headerline sSerial "Serial"
, headerline sRefresh "Refresh"
, headerline sRetry "Retry"
, headerline sExpire "Expire"
, headerline sNegativeCacheTTL "Negative Cache TTL"
, inheader ")"
]
where
headerline r comment = inheader $ show (r soa) ++ "\t\t" ++ com comment
inheader l = "\t\t\t" ++ l
-- | Comment line in a zone file.
com :: String -> String
com s = "; " ++ s
type WarningMessage = String
-- | Generates a Zone for a particular Domain from the DNS properies of all
-- hosts that propellor knows about that are in that Domain.
--
-- Does not include SSHFP records.
genZone :: [Host] -> M.Map HostName Host -> Domain -> SOA -> (Zone, [WarningMessage])
genZone inzdomain hostmap zdomain soa =
let (warnings, zhosts) = partitionEithers $ concatMap concat
[ map hostips inzdomain
, map hostrecords inzdomain
, map addcnames (M.elems hostmap)
]
in (Zone zdomain soa (simplify zhosts), warnings)
where
-- Each host with a hostname located in the zdomain
-- should have 1 or more IPAddrs in its Info.
--
-- If a host lacks any IPAddr, it's probably a misconfiguration,
-- so warn.
hostips :: Host -> [Either WarningMessage (BindDomain, Record)]
hostips h
| null l = [Left $ "no IP address defined for host " ++ hostName h]
| otherwise = map Right l
where
info = hostInfo h
l = zip (repeat $ AbsDomain $ hostName h)
(map Address $ getAddresses info)
-- Any host, whether its hostname is in the zdomain or not,
-- may have cnames which are in the zdomain. The cname may even be
-- the same as the root of the zdomain, which is a nice way to
-- specify IP addresses for a SOA record.
--
-- Add Records for those.. But not actually, usually, cnames!
-- Why not? Well, using cnames doesn't allow doing some things,
-- including MX and round robin DNS, and certianly CNAMES
-- shouldn't be used in SOA records.
--
-- We typically know the host's IPAddrs anyway.
-- So we can just use the IPAddrs.
addcnames :: Host -> [Either WarningMessage (BindDomain, Record)]
addcnames h = concatMap gen $ filter (inDomain zdomain) $
mapMaybe getCNAME $ S.toList $ fromDnsInfo $ fromInfo info
where
info = hostInfo h
gen c = case getAddresses info of
[] -> [ret (CNAME c)]
l -> map (ret . Address) l
where
ret record = Right (c, record)
-- Adds any other DNS records for a host located in the zdomain.
hostrecords :: Host -> [Either WarningMessage (BindDomain, Record)]
hostrecords h = map Right l
where
info = hostInfo h
l = zip (repeat $ AbsDomain $ hostName h)
(S.toList $ S.filter (\r -> isNothing (getIPAddr r) && isNothing (getCNAME r)) (fromDnsInfo $ fromInfo info))
-- Simplifies the list of hosts. Remove duplicate entries.
-- Also, filter out any CHAMES where the same domain has an
-- IP address, since that's not legal.
simplify :: [(BindDomain, Record)] -> [(BindDomain, Record)]
simplify l = nub $ filter (not . dupcname ) l
where
dupcname (d, CNAME _) | any (matchingaddr d) l = True
dupcname _ = False
matchingaddr d (d', (Address _)) | d == d' = True
matchingaddr _ _ = False
inDomain :: Domain -> BindDomain -> Bool
inDomain domain (AbsDomain d) = domain == d || ('.':domain) `isSuffixOf` d
inDomain _ _ = False -- can't tell, so assume not
-- | Gets the hostname of the second domain, relative to the first domain,
-- suitable for using in a zone file.
domainHost :: Domain -> BindDomain -> String
domainHost _ (RelDomain d) = d
domainHost _ RootDomain = "@"
domainHost base (AbsDomain d)
| dotbase `isSuffixOf` d = take (length d - length dotbase) d
| base == d = "@"
| otherwise = d
where
dotbase = '.':base
addNamedConf :: NamedConf -> NamedConfMap
addNamedConf conf = NamedConfMap (M.singleton domain conf)
where
domain = confDomain conf
getNamedConf :: Propellor (M.Map Domain NamedConf)
getNamedConf = asks $ fromNamedConfMap . fromInfo . hostInfo
-- | Generates SSHFP records for hosts in the domain (or with CNAMES
-- in the domain) that have configured ssh public keys.
--
-- This is done using ssh-keygen, so sadly needs IO.
genSSHFP :: Domain -> Host -> Propellor [(BindDomain, Record)]
genSSHFP domain h = concatMap mk . concat <$> (gen =<< get)
where
get = fromHost [h] hostname Ssh.getHostPubKey
gen = liftIO . mapM genSSHFP' . M.elems . fromMaybe M.empty
mk r = mapMaybe (\d -> if inDomain domain d then Just (d, r) else Nothing)
(AbsDomain hostname : cnames)
cnames = mapMaybe getCNAME $ S.toList $ fromDnsInfo $ fromInfo info
hostname = hostName h
info = hostInfo h
genSSHFP' :: String -> IO [Record]
genSSHFP' pubkey = withTmpFile "sshfp" $ \tmp tmph -> do
hPutStrLn tmph pubkey
hClose tmph
s <- catchDefaultIO "" $
readProcess "ssh-keygen" ["-r", "dummy", "-f", tmp]
return $ mapMaybe (parse . words) $ lines s
where
parse ("dummy":"IN":"SSHFP":x:y:s:[]) = do
x' <- readish x
y' <- readish y
return $ SSHFP x' y' s
parse _ = Nothing
| ArchiveTeam/glowing-computing-machine | src/Propellor/Property/Dns.hs | bsd-2-clause | 19,165 | 748 | 20 | 3,628 | 4,997 | 2,719 | 2,278 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleContexts, FlexibleInstances, PatternGuards #-}
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.ComboP
-- Description : Combine multiple layouts and specify where to put new windows.
-- Copyright : (c) Konstantin Sobolev <[email protected]>
-- License : BSD-style (see LICENSE)
--
-- Maintainer : Konstantin Sobolev <[email protected]>
-- Stability : unstable
-- Portability : unportable
--
-- A layout that combines multiple layouts and allows to specify where to put
-- new windows.
--
-----------------------------------------------------------------------------
module XMonad.Layout.ComboP (
-- * Usage
-- $usage
combineTwoP,
CombineTwoP,
SwapWindow(..),
PartitionWins(..),
Property(..)
) where
import XMonad.Prelude
import XMonad hiding (focus)
import XMonad.StackSet ( Workspace (..), Stack(..) )
import XMonad.Layout.WindowNavigation
import XMonad.Util.WindowProperties
import qualified XMonad.StackSet as W
-- $usage
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
-- > import XMonad.Layout.ComboP
--
-- and add something like
--
-- > combineTwoP (TwoPane 0.03 0.5) (tabbed shrinkText defaultTConf) (tabbed shrinkText defaultTConf) (ClassName "Firefox")
--
-- to your layouts. This way all windows with class = \"Firefox\" will always go
-- to the left pane, all others - to the right.
--
-- For more detailed instructions on editing the layoutHook see:
--
-- "XMonad.Doc.Extending#Editing_the_layout_hook"
--
-- 'combineTwoP' is a simple layout combinator based on 'combineTwo' from Combo, with
-- addition of a 'Property' which tells where to put new windows. Windows mathing
-- the property will go into the first part, all others will go into the second
-- part. It supports @Move@ messages as 'combineTwo' does, but it also introduces
-- 'SwapWindow' message which sends focused window to the other part. It is
-- required because @Move@ commands don't work when one of the parts is empty.
-- To use it, import \"XMonad.Layout.WindowNavigation\", and add the following key
-- bindings (or something similar):
--
-- > , ((modm .|. controlMask .|. shiftMask, xK_Right), sendMessage $ Move R)
-- > , ((modm .|. controlMask .|. shiftMask, xK_Left ), sendMessage $ Move L)
-- > , ((modm .|. controlMask .|. shiftMask, xK_Up ), sendMessage $ Move U)
-- > , ((modm .|. controlMask .|. shiftMask, xK_Down ), sendMessage $ Move D)
-- > , ((modm .|. controlMask .|. shiftMask, xK_s ), sendMessage $ SwapWindow)
--
-- For detailed instruction on editing the key binding see
-- "XMonad.Doc.Extending#Editing_key_bindings".
data SwapWindow = SwapWindow -- ^ Swap window between panes
| SwapWindowN Int -- ^ Swap window between panes in the N-th nested ComboP. @SwapWindowN 0@ equals to SwapWindow
deriving (Read, Show)
instance Message SwapWindow
data PartitionWins = PartitionWins -- ^ Reset the layout and
-- partition all windows into the
-- correct sub-layout. Useful for
-- when window properties have
-- changed and you want ComboP to
-- update which layout a window
-- belongs to.
deriving (Read, Show)
instance Message PartitionWins
data CombineTwoP l l1 l2 a = C2P [a] [a] [a] l (l1 a) (l2 a) Property
deriving (Read, Show)
combineTwoP :: (LayoutClass super(), LayoutClass l1 Window, LayoutClass l2 Window) =>
super () -> l1 Window -> l2 Window -> Property -> CombineTwoP (super ()) l1 l2 Window
combineTwoP = C2P [] [] []
instance (LayoutClass l (), LayoutClass l1 Window, LayoutClass l2 Window) =>
LayoutClass (CombineTwoP (l ()) l1 l2) Window where
doLayout (C2P f w1 w2 super l1 l2 prop) rinput s =
let origws = W.integrate s -- passed in windows
w1c = origws `intersect` w1 -- current windows in the first pane
w2c = origws `intersect` w2 -- current windows in the second pane
new = origws \\ (w1c ++ w2c) -- new windows
superstack = Just Stack { focus=(), up=[], down=[()] }
f' = focus s:delete (focus s) f -- list of focused windows, contains 2 elements at most
in do
matching <- hasProperty prop `filterM` new -- new windows matching predecate
let w1' = w1c ++ matching -- updated first pane windows
w2' = w2c ++ (new \\ matching) -- updated second pane windows
s1 = differentiate f' w1' -- first pane stack
s2 = differentiate f' w2' -- second pane stack
([((),r1),((),r2)], msuper') <- runLayout (Workspace "" super superstack) rinput
(wrs1, ml1') <- runLayout (Workspace "" l1 s1) r1
(wrs2, ml2') <- runLayout (Workspace "" l2 s2) r2
return (wrs1++wrs2, Just $ C2P f' w1' w2' (fromMaybe super msuper')
(fromMaybe l1 ml1') (fromMaybe l2 ml2') prop)
handleMessage us@(C2P f ws1 ws2 super l1 l2 prop) m
| Just PartitionWins <- fromMessage m = return . Just $ C2P [] [] [] super l1 l2 prop
| Just SwapWindow <- fromMessage m = swap us
| Just (SwapWindowN 0) <- fromMessage m = swap us
| Just (SwapWindowN n) <- fromMessage m = forwardToFocused us $ SomeMessage $ SwapWindowN $ n-1
| Just (MoveWindowToWindow w1 w2) <- fromMessage m,
w1 `elem` ws1,
w2 `elem` ws2 = return $ Just $ C2P f (delete w1 ws1) (w1:ws2) super l1 l2 prop
| Just (MoveWindowToWindow w1 w2) <- fromMessage m,
w1 `elem` ws2,
w2 `elem` ws1 = return $ Just $ C2P f (w1:ws1) (delete w1 ws2) super l1 l2 prop
| otherwise = do ml1' <- handleMessage l1 m
ml2' <- handleMessage l2 m
msuper' <- handleMessage super m
if isJust msuper' || isJust ml1' || isJust ml2'
then return $ Just $ C2P f ws1 ws2
(fromMaybe super msuper')
(fromMaybe l1 ml1')
(fromMaybe l2 ml2') prop
else return Nothing
description (C2P _ _ _ super l1 l2 prop) = "combining " ++ description l1 ++ " and "++
description l2 ++ " with " ++ description super ++ " using "++ show prop
-- send focused window to the other pane. Does nothing if we don't
-- own the focused window
swap :: (LayoutClass s a, LayoutClass l1 Window, LayoutClass l2 Window) =>
CombineTwoP (s a) l1 l2 Window -> X (Maybe (CombineTwoP (s a) l1 l2 Window))
swap (C2P f ws1 ws2 super l1 l2 prop) = do
mst <- gets (W.stack . W.workspace . W.current . windowset)
let (ws1', ws2') = case mst of
Nothing -> (ws1, ws2)
Just st -> if foc `elem` ws1
then (foc `delete` ws1, foc:ws2)
else if foc `elem` ws2
then (foc:ws1, foc `delete` ws2)
else (ws1, ws2)
where foc = W.focus st
if (ws1,ws2) == (ws1',ws2')
then return Nothing
else return $ Just $ C2P f ws1' ws2' super l1 l2 prop
-- forwards the message to the sublayout which contains the focused window
forwardToFocused :: (LayoutClass l1 Window, LayoutClass l2 Window, LayoutClass s a) =>
CombineTwoP (s a) l1 l2 Window -> SomeMessage -> X (Maybe (CombineTwoP (s a) l1 l2 Window))
forwardToFocused (C2P f ws1 ws2 super l1 l2 prop) m = do
ml1 <- forwardIfFocused l1 ws1 m
ml2 <- forwardIfFocused l2 ws2 m
ms <- if isJust ml1 || isJust ml2
then return Nothing
else handleMessage super m
if isJust ml1 || isJust ml2 || isJust ms
then return $ Just $ C2P f ws1 ws2 (fromMaybe super ms) (fromMaybe l1 ml1) (fromMaybe l2 ml2) prop
else return Nothing
-- forwards message m to layout l if focused window is among w
forwardIfFocused :: (LayoutClass l Window) => l Window -> [Window] -> SomeMessage -> X (Maybe (l Window))
forwardIfFocused l w m = do
mst <- gets (W.stack . W.workspace . W.current . windowset)
maybe (return Nothing) send mst where
send st = if W.focus st `elem` w
then handleMessage l m
else return Nothing
-- code from CombineTwo
-- given two sets of zs and xs takes the first z from zs that also belongs to xs
-- and turns xs into a stack with z being current element. Acts as
-- StackSet.differentiate if zs and xs don't intersect
differentiate :: Eq q => [q] -> [q] -> Maybe (Stack q)
differentiate (z:zs) xs | z `elem` xs = Just $ Stack { focus=z
, up = reverse $ takeWhile (/=z) xs
, down = tail $ dropWhile (/=z) xs }
| otherwise = differentiate zs xs
differentiate [] xs = W.differentiate xs
-- vim:ts=4:shiftwidth=4:softtabstop=4:expandtab:foldlevel=20:
| xmonad/xmonad-contrib | XMonad/Layout/ComboP.hs | bsd-3-clause | 9,693 | 0 | 16 | 3,123 | 2,184 | 1,156 | 1,028 | 106 | 5 |
{-# LANGUAGE Arrows #-}
module Arrow where
import Control.Arrow
import Control.Arrow.Operations
integral :: ArrowCircuit a => Int -> a Double Double
integral rate = proc x -> do
rec let i' = i + x * dt
i <- delay 0 -< i'
returnA -< i
where
dt = 1 / fromIntegral rate
| svenkeidel/stream-bench | src/Arrow.hs | bsd-3-clause | 285 | 1 | 14 | 71 | 107 | 53 | 54 | 10 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Graphics.UI.Toy.Gtk.Prelude
-- Copyright : (c) 2012 Michael Sloan
-- License : BSD-style (see the LICENSE file)
--
-- Maintainer : Michael Sloan <[email protected]>
-- Stability : experimental
-- Portability : GHC only
--
-- This module re-exports the "Diagrams.Prelude", along with all of the
-- exported modules in toy-diagrams, toy-gtk, and toy-gtk-diagrams.
--
-- While importing this module should be discouraged in \"real\" code, it's
-- convenient for \"toy\" code.
--
-----------------------------------------------------------------------------
module Graphics.UI.Toy.Gtk.Prelude
( module Diagrams.Prelude
, module Graphics.UI.Toy.Gtk
, module Graphics.UI.Toy.Gtk.Diagrams
, module Graphics.UI.Toy.Gtk.Dict
, module Graphics.UI.Toy.Gtk.Text
, module Graphics.UI.Toy.Button
, module Graphics.UI.Toy.Diagrams
, module Graphics.UI.Toy.Dict
, module Graphics.UI.Toy.Draggable
-- , module Graphics.UI.Toy.Slider
, module Graphics.UI.Toy.Transformed
, Cairo
) where
import Diagrams.Backend.Cairo ( Cairo )
import Diagrams.Prelude
import Graphics.UI.Toy.Gtk
import Graphics.UI.Toy.Gtk.Diagrams
import Graphics.UI.Toy.Gtk.Dict
import Graphics.UI.Toy.Gtk.Text
import Graphics.UI.Toy.Button
import Graphics.UI.Toy.Diagrams
import Graphics.UI.Toy.Dict
import Graphics.UI.Toy.Draggable
-- import Graphics.UI.Toy.Slider
import Graphics.UI.Toy.Transformed
| mgsloan/toy-gtk-diagrams | src/Graphics/UI/Toy/Gtk/Prelude.hs | bsd-3-clause | 1,493 | 0 | 5 | 193 | 196 | 148 | 48 | 23 | 0 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Duration.SV.Tests
( tests
) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Duration.SV.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "SV Tests"
[ makeCorpusTest [Seal Duration] corpus
]
| facebookincubator/duckling | tests/Duckling/Duration/SV/Tests.hs | bsd-3-clause | 509 | 0 | 9 | 80 | 79 | 50 | 29 | 11 | 1 |
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
module Language.SimplePi.Types
( Position (..)
, dummyPos
, Statement (..)
, Expr
, Ident (..)
, Binding (..)
, ExprF (..)
, getPosition
, Identity (..)
, Fix (..)
) where
import Data.Functor.Foldable (Fix (..))
import Data.Functor.Identity
import Data.Text (Text)
import qualified Data.Text.Lazy as Lazy
import Text.PrettyPrint.Leijen.Text
import GHC.Generics
-- Statement
data Statement
= Load FilePath
| Parameter Ident Expr {- <var> : <expr> -}
| Definition Ident Expr {- <var> = <expr> -}
| Check Expr
| Eval Expr
deriving (Show, Eq, Generic)
-- Expression
type Expr = Fix ExprF
newtype Ident = Ident Text
deriving (Show, Eq, Ord, Generic)
data Binding f e = Binding Ident (f e)
deriving (Show, Eq, Foldable, Functor, Generic)
data ExprF e
= Lambda Position [Binding Maybe e] e
| Pi Position (Binding Identity e) e
| Arrow Position e e [e]
| App Position e e [e]
| Var Position Ident
| Universe Position Integer
deriving (Show, Eq, Functor, Foldable, Generic)
getPosition :: Expr -> Position
getPosition (Fix e) =
case e of
(Lambda pos _ _) -> pos
(Pi pos _ _) -> pos
(Arrow pos _ _ _) -> pos
(App pos _ _ _) -> pos
(Var pos _) -> pos
(Universe pos _) -> pos
-- | File position
data Position = Position
{ posFileName :: !Text
, posLine :: {-# UNPACK #-} !Int
, posColumn :: {-# UNPACK #-} !Int
} deriving (Show, Ord, Eq)
dummyPos :: Position
dummyPos = Position "<no location information>" 1 0
instance Pretty Position where
pretty (Position fn line col) =
text (Lazy.fromStrict fn) <> colon <> int line <> colon <> int col
| esmolanka/simple-pi | src/Language/SimplePi/Types.hs | bsd-3-clause | 1,871 | 0 | 13 | 470 | 605 | 345 | 260 | 63 | 6 |
-- | Transforms MultiCoreStatus' into diagrams
module Graphics.Diagrams.Transformations.MultiCoreStatus2Diagram
( transformStatus )
where
-- External imports
import Data.Maybe
import Data.History
-- Internal imports
import Config.Preferences
import Config.Config
import Graphics.Diagrams.Simple.Diagram
import Graphics.Diagrams.MultiCoreStatus
import Graphics.Diagrams.Types (Name)
import Model.SystemStatus
-- | Transform a multicore status into a diagram
transformStatus :: Config -> SystemStatus -> Diagram
transformStatus cfg (SystemStatus mhist s) = Diagram ps' ms'
where ps' = mapMaybe (transformProcessingUnit cfg s) ps
ms' = map transformMessage ms
(MultiCoreStatus ps ms) = historyPresent mhist
-- | Transforms a processing unit into a group box
transformProcessingUnit :: Config -> [Name] -> ProcessingUnit -> Maybe Box
transformProcessingUnit _ _ (ProcessingUnit _ _ UnitIgnored) = Nothing
transformProcessingUnit cfg sel (ProcessingUnit n cs e) =
Just $ GroupBox n cs' col expand
where cs' = if expand then map (transformRunningElement cfg sel') cs else []
col = processingUnitColor cfg (sel == [n])
sel' = if not (null sel) && head sel == n then tail sel else []
expand = e == UnitExpanded
-- | Transforms a running element (component, application) into a box
transformRunningElement :: Config -> [Name] -> RunningElement -> Box
transformRunningElement cfg ns (Component n k s _ _) =
Box n k (runningElementColor cfg (ns == [n]) s)
-- | Transforms a message into an arrow
transformMessage :: Message -> Arrow
transformMessage m = Arrow (sender m) (receiver m)
| ivanperez-keera/SoOSiM-ui | src/Graphics/Diagrams/Transformations/MultiCoreStatus2Diagram.hs | bsd-3-clause | 1,641 | 0 | 12 | 292 | 436 | 234 | 202 | 28 | 3 |
module Main where
import Data.AhoCorasick
main :: IO ()
main = do
let acm = construct ["he", "she", "his", "hers"]
print $ run acm "ushers"
let acm' = constructWithValues [("he", 1.2), ("she", 3.4), ("his", 5.6), ("hers", 7.8)]
print $ run acm' "ushers"
| yuttie/ac-machine | examples/Simple.hs | bsd-3-clause | 273 | 0 | 12 | 61 | 120 | 66 | 54 | 8 | 1 |
-- {-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE ExplicitForAll #-}
--{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
-- {-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
--{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE RankNTypes #-}
--{-# LANGUAGE RebindableSyntax #-}
--{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedLists #-}
--{-# LANGUAGE NamedFieldPuns #-}
module FV.Types
( MCtruth (..)
, Prong (..)
, Chi2 (..)
, VHMeas (..), hFilter
, XMeas (..), vBlowup, xXMeas, yXMeas, zXMeas
, DMeas (..), Pos, distance
, HMeas (..)
, QMeas (..), fromHMeas
, PMeas (..), fromQMeas, invMass
, MMeas (..)
, XFit (..)
, chi2Vertex, zVertex, rVertex, z0Helix
) where
import Prelude.Extended
import qualified Data.Vector.Unboxed as A
( drop, zip, map, foldl )
import Data.Foldable ( fold )
import Data.Cov
import Data.Cov.Jac ( Jac (..) )
-----------------------------------------------
-- Prong
-- a prong results from a vertex fit of N helices
newtype Chi2 = Chi2 Number
instance Show Chi2 where
show (Chi2 c2) = show c2
instance Semiring Chi2 where
add (Chi2 c0) (Chi2 c1) = Chi2 (c0+c1)
zero = Chi2 0.0
mul (Chi2 c0) (Chi2 c1) = Chi2 (c0*c1)
one = Chi2 1.0
instance Num Chi2 where
fromInteger i = Chi2 $ fromInteger i
negate (Chi2 c) = Chi2 (negate c)
abs (Chi2 c) = Chi2 (abs c)
signum (Chi2 c) = Chi2 (signum c)
(*) = error "cannot multiply Chi2*Chi2 to return a Chi2"
(+) (Chi2 a) (Chi2 b) = Chi2 (a+b)
data Prong = Prong
{ nProng :: Int
, fitVertex :: XMeas
, fitMomenta :: List QMeas
, fitChi2s :: List Chi2
, measurements :: VHMeas
}
-- fitMomenta :: Prong -> List QMeas
-- fitMomenta (Prong {fitMomenta= f}) = f
instance Show Prong where
show _ = "Prong!!!!!!!!"
-----------------------------------------------
-- VHMeas
--
data VHMeas = VHMeas {
vertex :: XMeas
, helices :: List HMeas
}
-- vertex :: VHMeas -> XMeas
-- vertex (VHMeas {vertex=v}) = v
-- helices :: VHMeas -> List HMeas
-- helices (VHMeas {helices=hs}) = hs
{-- instance Semigroup VHMeas where --}
{-- append (VHMeas v hs) (VHMeas _ hs') = VHMeas v ( hs ++ hs' ) --}
{-- mempty = VHMeas (XMeas (Matrix.zero 3 1) (Matrix.zero 3 3)) [] --}
instance Show VHMeas where
show VHMeas {vertex=v_, helices=hs} = "VHMeas w/ " <> show (length hs)
<> " tracks. " <> show v_
vBlowup :: Number -> VHMeas -> VHMeas
{-- vBlowup scale vm = over vertexLens (blowup scale) vm where --}
vBlowup scale VHMeas {vertex= v_, helices= hs} =
VHMeas {vertex= blowup scale v_, helices= hs} where
blowup :: Number -> XMeas -> XMeas -- blow up diag of cov matrix
blowup s (XMeas v__ cv) = XMeas v__ cv' where
cv' = scaleDiag s cv
hFilter :: List Int -> VHMeas -> VHMeas
hFilter is VHMeas {vertex=v_, helices=hs} = VHMeas {vertex=v_, helices= iflt is hs}
hRemove :: Int -> VHMeas -> VHMeas
hRemove indx VHMeas {vertex=v_, helices=hs} = VHMeas {vertex=v_, helices=irem indx hs}
-----------------------------------------------
-- MCtruth
--
newtype MCtruth = MCtruth {
pu_zpositions :: List Number
}
instance Show MCtruth where
show MCtruth {pu_zpositions=puz} = "MCtruth w/" <> show (length puz)
<> " PU z positions."
-----------------------------------------------
-- HMeas
-- 5-vector and covariance matrix for helix measurement
--
data HMeas = HMeas Vec5 Cov5 Number
instance Show HMeas where
show (HMeas h ch _) = s' where
sh = A.map sqrt $ diag ch
hs = toArray h
s00 = to5fix x <> " +-" <> to5fix dx where
x = uidx hs 0
dx = uidx sh 0
s' = A.foldl f s00 (A.drop 1 $ A.zip hs sh) where
f s (x, dx) = s <> to3fix x <> " +-" <> to3fix dx
z0Helix :: HMeas -> Number
z0Helix (HMeas h _ _) = z0 where
v = val h
z0 = uidx v 4
-----------------------------------------------
-- QMeas
-- 3-vector and covariance matrix for momentum measurement
--
mπ :: Number
mπ = 0.1395675
data QMeas = QMeas Vec3 Cov3 Number
instance Show QMeas where
show = showQMeas
-- print QMeas as a 4-momentum vector with errors, use pt and pz
showQMeas :: QMeas -> String
showQMeas (QMeas q cq w2pt) = s' where
f :: String -> (Number, Number) -> String
f s (x, dx) = s <> to3fix x <> " +-" <> to3fix dx
m = mπ
wp = w2pt
qs :: Array Number
qs = toArray q
w = uidx qs 0
tl = uidx qs 1
psi0 = uidx qs 2
pt = wp / abs w
pz = pt*tl
psi = psi0*180.0/pi
e = sqrt(pt*pt + pz*pz + m*m)
jj :: Jac34
jj = Jac { v = [ -wp/w/w, -wp/w/w*tl, 0.0, -(pz*pz + pt*pt)/w/e
, 0.0, wp/w, 0.0, pt*pt*tl/e
, 0.0, 0.0, 1.0, 0.0], nr = 3 }
cq' = jj .*. cq
p' = [pt, pz, psi, e]
dp = A.map sqrt $ diag cq'
d1 = uidx dp 0
d2 = uidx dp 1
d3 = uidx dp 2
d4 = uidx dp 3
dp' = [d1, d2, d3*180.0/pi, d4]
s' = A.foldl f "" ( A.zip p' dp' ) <> " GeV"
fromHMeas :: HMeas -> QMeas -- just drop the d0, z0 part... fix!!!!
fromHMeas (HMeas h ch w2pt) = QMeas q cq w2pt where
q = subm 3 h
cq = subm2 3 ch
h2p :: HMeas -> PMeas
h2p = fromQMeas <<< fromHMeas
-----------------------------------------------
-- PMeas
-- 4-vector and coavariance matrix for momentum px,py,pz and energy
--
data PMeas = PMeas Vec4 Cov4
instance Semigroup PMeas where
(<>) (PMeas p1 cp1) (PMeas p2 cp2) = PMeas (p1+p2) (cp1 + cp2)
instance Monoid PMeas where
mappend (PMeas p1 cp1) (PMeas p2 cp2) = PMeas (p1+p2) (cp1 + cp2)
mempty = PMeas (fromArray [0.0,0.0,0.0,0.0]) zero
instance Show PMeas where
show = showPMeas
-- print PMeas as a 4-momentum vector px,py,pz,E with errors
showPMeas :: PMeas -> String
showPMeas (PMeas p cp) = s' where
sp = A.map sqrt $ diag cp
f s (x, dx) = s <> to3fix x <> " +-" <> to3fix dx -- \xc2b1 ±±±±±
s' = A.foldl f "" (A.zip (toArray p) sp) <> " GeV"
invMass :: List PMeas -> MMeas
invMass = pmass <<< fold
pmass :: PMeas -> MMeas
pmass (PMeas p cp) = mm where
ps = toArray p
px = uidx ps 0
py = uidx ps 1
pz = uidx ps 2
e = uidx ps 3
cps = toArray cp
c11 = uidx cps 0
c12 = uidx cps 1
c13 = uidx cps 2
c14 = uidx cps 3
c22 = uidx cps 5
c23 = uidx cps 6
c24 = uidx cps 7
c33 = uidx cps 10
c34 = uidx cps 11
c44 = uidx cps 15
m = sqrt $ max (e*e-px*px-py*py-pz*pz) 0.0
sigm0 = px*c11*px + py*c22*py + pz*c33*pz + e*c44*e +
2.0*(px*(c12*py + c13*pz - c14*e)
+ py*(c23*pz - c24*e)
- pz*c34*e)
dm = sqrt ( max sigm0 0.0 ) / m
mm = MMeas {m=m, dm=dm}
fromQMeas :: QMeas -> PMeas
fromQMeas (QMeas q0 cq0 w2pt) = PMeas p0 cp0 where
m = mπ
q0s = toArray q0
w = uidx q0s 0
tl = uidx q0s 1
psi0 = uidx q0s 2
sph = sin psi0
cph = cos psi0
pt = w2pt / abs w
px = pt * cph
py = pt * sph
pz = pt * tl
sqr x= x*x
e = sqrt(px*px + py*py + pz*pz + m*m)
ps = w2pt / w
dpdk = ps*ps/w2pt
cq0s = toArray cq0
c11 = uidx cq0s 0
c12 = uidx cq0s 1
c13 = uidx cq0s 2
c22 = uidx cq0s 4
c23 = uidx cq0s 5
c33 = uidx cq0s 8
xy = 2.0*ps*dpdk*cph*sph*c13
sxx = sqr (dpdk*cph) * c11 + sqr (ps*sph) * c33 + xy
sxy = cph*sph*(dpdk*dpdk*c11 - ps*ps*c33) +
ps*dpdk*(sph*sph-cph*cph)*c13
syy = sqr (dpdk*sph) * c11 + sqr (ps*cph) * c33 - xy
sxz = dpdk*dpdk*cph*tl*c11 -
ps*dpdk*(cph*c12-sph*tl*c13) -
ps*ps*sph*c23
syz = dpdk*dpdk*sph*tl*c11 -
ps*dpdk*(sph*c12 + cph*tl*c13) +
ps*ps*cph*c23
szz = sqr (dpdk*tl) * c11 + ps*ps*c22 -
2.0*ps*dpdk*tl*c12
sxe = (px*sxx + py*sxy + pz*sxz)/e
sye = (px*sxy + py*syy + pz*syz)/e
sze = (px*sxz + py*syz + pz*szz)/e
see = (px*px*sxx + py*py*syy + pz*pz*szz +
2.0*(px*(py*sxy + pz*sxz) + py*pz*syz))/e/e
cp0 = fromArray [ sxx, sxy, sxz, sxe
, syy, syz, sye
, szz, sze
, see]
p0 = fromArray [px,py,pz,e]
-----------------------------------------------
-- MMeas
-- scalar mass and error calculated from PMeas covariance matrices
--
data MMeas = MMeas
{ m :: Number
, dm :: Number
}
instance Show MMeas where
show MMeas {m=m, dm=dm} = " " <> to1fix (m*1000.0) <> " +-" <> to1fix (dm*1000.0) <> " MeV"
-----------------------------------------------
-- XMeas
-- 3-vector and covariance matrix for position/vertex measurement
--
data DMeas = DMeas Number Number -- distance and error
instance Show DMeas where
show (DMeas d sd) = to2fix d <> " +-" <> to2fix sd <> " cm"
class Pos a where
distance :: a -> a -> DMeas -- distance between two positions
instance Pos XMeas where
distance (XMeas v0 vv0) (XMeas v1 vv1) = DMeas d sd where
v0s = toArray v0
x0 = uidx v0s 0
y0 = uidx v0s 1
z0 = uidx v0s 2
v1s = toArray v1
x1 = uidx v1s 0
y1 = uidx v1s 1
z1 = uidx v1s 2
d = sqrt(sqr(x0-x1) + sqr(y0-y1) + sqr(z0-z1))
dd :: Vec3
dd = fromArray [(x0-x1)/d, (y0-y1)/d, (z0-z1)/d]
tem0 = dd .*. vv0
tem1 = dd .*. vv1
sd = sqrt (tem0 + tem1)
data XMeas = XMeas Vec3 Cov3
instance Semigroup XMeas where
(<>) (XMeas x1 cx1) (XMeas x2 cx2) = XMeas (x1 + x2) (cx1 + cx2)
instance Monoid XMeas where
mempty = XMeas zero zero
mappend = undefined
instance Show XMeas where
show = showXMeas
-- return a string showing vertex position vector with errors
showXMeas :: XMeas -> String
showXMeas (XMeas v cv) = s' where
vv = toArray v
x = uidx vv 0
y = uidx vv 1
z = uidx vv 2
s2v = A.map sqrt $ diag cv
dx = uidx s2v 0
dy = uidx s2v 1
dz = uidx s2v 2
f :: Number -> Number -> String -> String
f x dx s = s <> to2fix x <> " +-" <> to2fix dx
s' = f z dz <<< f y dy <<< f x dx $
"(r,z) =" <> "(" <> to2fix (sqrt (x*x + y*y))
<> ", " <> to2fix z <> "), x y z ="
xXMeas :: XMeas -> Number
xXMeas (XMeas v _) = x where
x = uidx (val v) 0
yXMeas :: XMeas -> Number
yXMeas (XMeas v _) = y where
y = uidx (val v) 1
zXMeas :: XMeas -> Number
zXMeas (XMeas v _) = z where
z = uidx (val v) 2
data XFit = XFit Vec3 Cov3 Chi2
instance Show XFit where
show (XFit x xx c2) = showXMeas (XMeas x xx) <> ", chi2=" <> show c2
chi2Vertex :: XFit -> Chi2
chi2Vertex (XFit _ _ c2) = c2
zVertex :: XFit -> Double
zVertex (XFit v _ _) = z where
z = uidx (val v) 2
rVertex :: XFit -> Double
rVertex (XFit v _ _) = r where
r = sqrt $ (sqr (uidx (val v) 0)) + (sqr (uidx (val v) 1))
| LATBauerdick/fv.hs | src/FV/Types.hs | bsd-3-clause | 10,884 | 1 | 20 | 3,200 | 4,393 | 2,343 | 2,050 | 277 | 1 |
-- !!! local aliases
module M where
import qualified Data.OldList as M
import qualified Data.Maybe as M
x = M.length
b = M.isJust
| jstolarek/ghc | testsuite/tests/module/mod106.hs | bsd-3-clause | 132 | 0 | 5 | 25 | 35 | 24 | 11 | 5 | 1 |
module Main where
main :: IO()
main = do
putStrLn "Give me a list, and I'll give you it's last...Maybe"
ls <- getLine
print $ myLast ls
myLast :: [a] -> Maybe a
myLast [] = Nothing
myLast [x] = Just x
myLast (_:xs) = myLast xs
myLast' :: [a] -> a
myLast' [] = error "Cannot operate on empty lists, exiting..."
myLast' [x] = x
myLast' (_:xs) = myLast' xs
| Jaso-N7/H99-solutions | 1-10/1.hs | bsd-3-clause | 375 | 0 | 8 | 92 | 153 | 77 | 76 | 14 | 1 |
{-# LANGUAGE CPP, FlexibleInstances, RankNTypes,
TypeSynonymInstances #-}
{-# OPTIONS -Wall #-}
-- | An extension module of building blocks. Contains booleans, comparison operations, branchings.
module Language.Paraiso.OM.Builder.Boolean
(eq, ne, lt, le, gt, ge, select) where
import Data.Dynamic (Typeable, typeOf)
import qualified Language.Paraiso.OM.Arithmetic as A
import Language.Paraiso.OM.Builder.Internal
import Language.Paraiso.OM.DynValue as DVal
import Language.Paraiso.OM.Graph
import Language.Paraiso.OM.Realm as Realm
import Language.Paraiso.OM.Value as Val
infix 4 `eq`, `ne`, `lt`, `le`, `gt`, `ge`
-- | generate a binary operator that returns Bool results.
mkOp2B :: (TRealm r, Typeable c) =>
A.Operator -- ^The operation to be performed
-> (Builder v g a (Value r c)) -- ^The first argument
-> (Builder v g a (Value r c)) -- ^The second argument
-> (Builder v g a (Value r Bool)) -- ^The result
mkOp2B op builder1 builder2 = do
v1 <- builder1
v2 <- builder2
let
r1 = Val.realm v1
n1 <- valueToNode v1
n2 <- valueToNode v2
n0 <- addNodeE [n1, n2] $ NInst (Arith op)
n01 <- addNodeE [n0] $ NValue (toDyn v1){typeRep = typeOf True}
return $ FromNode r1 True n01
type CompareOp = (TRealm r, Typeable c) =>
(Builder v g a (Value r c)) -> (Builder v g a (Value r c)) -> (Builder v g a (Value r Bool))
-- | Equal
eq :: CompareOp
eq = mkOp2B A.EQ
-- | Not equal
ne :: CompareOp
ne = mkOp2B A.NE
-- | Less than
lt :: CompareOp
lt = mkOp2B A.LT
-- | Less than or equal to
le :: CompareOp
le = mkOp2B A.LE
-- | Greater than
gt :: CompareOp
gt = mkOp2B A.GT
-- | Greater than or equal to
ge :: CompareOp
ge = mkOp2B A.GE
-- | selects either the second or the third argument based
select ::(TRealm r, Typeable c) =>
(Builder v g a (Value r Bool)) -- ^The 'Bool' condition
-> (Builder v g a (Value r c)) -- ^The value chosen when the condition is 'True'
-> (Builder v g a (Value r c)) -- ^The value chosen when the condition is 'False'
-> (Builder v g a (Value r c)) -- ^The result
select builderB builder1 builder2 = do
vb <- builderB
v1 <- builder1
v2 <- builder2
nb <- valueToNode vb
n1 <- valueToNode v1
n2 <- valueToNode v2
n0 <- addNodeE [nb, n1, n2] $ NInst (Arith A.Select)
n01 <- addNodeE [n0] $ NValue (toDyn v1)
let
r1 = Val.realm v1
c1 = Val.content v1
return $ FromNode r1 c1 n01
| drmaruyama/Paraiso | Language/Paraiso/OM/Builder/Boolean.hs | bsd-3-clause | 2,474 | 0 | 12 | 589 | 834 | 450 | 384 | 60 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeApplications #-}
import Control.Monad.Classes hiding (exec)
import Control.Monad.Classes.Log
import Control.Monad.Log (LoggingT, runLoggingT)
import Control.Monad.Trans.Reader (ReaderT(..))
import Control.Monad.Trans.State (State, execState)
import Test.Tasty
import Test.Tasty.HUnit
main :: IO ()
main = defaultMain $ testGroup "tests" [testCase "should compile" testcase
,testCase "writer impl" writecase]
where testcase = assertEqual "correct results" opresults (exec op)
writecase = assertEqual "correct writes" opresults (exec opw)
opresults = [RRes "bar", LRes "foo"]
data Foo = Foo String
data Bar = Bar String
data Res = LRes String | RRes String
deriving (Eq, Ord, Show)
logit :: (MonadReader a m, MonadLog a m) => (a -> a) -> m ()
logit f = ask >>= logMessage . f
-- | This is the operation that will not compile under mtl
-- | due to functional dependencies.
op :: (MonadReader Foo m, MonadReader Bar m, MonadLog Foo m, MonadLog Bar m) => m ()
op = logit (id @Foo) >> logit (id @Bar)
-- | handler for Foo
foo :: MonadState [Res] m => Foo -> m ()
foo (Foo s) = modify $ \r -> LRes s : r
-- | Handler for Bar
bar :: MonadState [Res] m => Bar -> m ()
bar (Bar s) = modify $ \r -> RRes s : r
exec :: ReaderT Foo (ReaderT Bar (LoggingT Foo (LoggingT Bar (State [Res])))) () -> [Res]
exec m = execState (runLoggingT (runLoggingT (runReaderT (runReaderT m (Foo "foo")) (Bar "bar")) foo) bar) []
writeit :: (MonadReader a m, MonadWriter a m) => (a -> a) -> m ()
writeit f = ask >>= tell . f
opw :: (MonadReader Foo m, MonadReader Bar m, MonadWriter Foo m, MonadWriter Bar m) => m ()
opw = writeit (id @Foo) >> writeit (id @Bar)
| edwardgeorge/monad-classes-logging | test/Spec.hs | bsd-3-clause | 1,754 | 0 | 15 | 365 | 713 | 377 | 336 | 33 | 1 |
{-# OPTIONS -Wall #-}
module Main where
import Codec.Picture( PixelRGBA8( .. ), writePng )
import Graphics.Rasterific
import Graphics.Rasterific.Texture
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as UV
import qualified Data.Vector.Unboxed.Mutable as UM
import Control.Monad.Primitive
import Control.Monad (filterM)
import Data.Word (Word8)
import System.Random
type Width = Int
type Height = Int
type XPos = Int
type YPos = Int
type Color = (Word8, Word8, Word8)
type P2 = (XPos, YPos)
type Pixel = (P2, Color, Bool)
type Canvas m = UM.MVector m Pixel
type Points = V.Vector P2
width :: Width
width = 640
height :: Height
height = 360
generatePixels :: Width -> Height -> [Pixel]
generatePixels w h = [((j,i),(255,255,255),False) | i <- [0..h-1], j <- [0..w-1]]
createPixelInstruction :: Pixel -> Drawing PixelRGBA8 ()
createPixelInstruction ((x,y),(r,g,b),_) =
withTexture (uniformTexture color) . fill $ rectangle point 1 1
where color = PixelRGBA8 (r :: Word8) (g :: Word8) (b :: Word8) 255
point = V2 (fromIntegral x) (fromIntegral y)
generateCanvas :: PrimMonad m => Width -> Height -> StdGen -> m (Canvas (PrimState m))
generateCanvas w h g = do
let seed = (quot w 2, quot h 2)
seedColor = (207,0,15)
canvasPoints = V.fromList [(j,i) | i <- [0..height-1], j <- [0..width-1]]
mV <- UV.thaw $ UV.fromList $ generatePixels w h -- create canvas
_ <- drawPixel mV seedColor seed -- draw center pixel (seed)
_ <- draw mV canvasPoints seedColor g
return mV
draw :: PrimMonad m => Canvas (PrimState m) -> Points -> Color -> StdGen -> m (Canvas (PrimState m))
draw canvas canvasPoints seedColor g = do
let (nextColor,g') = newColor seedColor g
points <- touchedPoints canvas canvasPoints
V.sequence_ $ V.map (drawBorderPixels canvas nextColor g') points
if (V.length points < (width*height)) then do (draw canvas canvasPoints nextColor g') else return canvas
randVals :: StdGen -> ((Double, Double, Double), StdGen)
randVals g = ((x,y,z),g''')
where (x, g' ) = randomR (0,1) g
(y, g'' ) = randomR (0,1) g'
(z, g''') = randomR (0,1) g''
newColor :: Color -> StdGen -> (Color,StdGen)
newColor c g = (calculateColor c prob, g')
where (prob,g') = randVals g
calculateColor :: Color -> (Double, Double, Double) -> Color
calculateColor (r,g,b) (x,y,z) = color
where color = (incrementColor r (inc x), incrementColor g (inc y), incrementColor b (inc z))
incrementColor :: Word8 -> Inc -> Word8
incrementColor w i
| i == Add && w == 255 = 244
| i == Subtract && w == 0 = 1
| i == Add = w + 5
| i == Subtract = w - 5
| otherwise = w
data Inc = Add | Same | Subtract deriving (Eq)
inc :: Double -> Inc
inc d
| d >= 0.0 && d < 0.33 = Subtract
| d >= 0.33 && d < 0.66 = Same
| otherwise = Add
drawBorderPixels :: PrimMonad m => Canvas (PrimState m) -> Color -> StdGen -> P2 -> m (Canvas (PrimState m))
drawBorderPixels canvas color g point = do
(_,_,touched) <- UM.read canvas (index point)
if touched then do
points <- pointsToDraw canvas point g
sequence_ $ (drawPixel canvas color) <$> points
return canvas
else return canvas
-- Generate list of safe pixels to check
pointsToDraw :: PrimMonad m => Canvas (PrimState m) -> P2 -> StdGen -> m [P2]
pointsToDraw canvas (x,y) g = do
let validPoints = filter validPoint points
availablePoints <- untouchedPoints canvas validPoints
let shuffledPoints = shuffle availablePoints g
takenPoints = take 1 shuffledPoints
return takenPoints
where n = (x,y-1)
ne = (x+1,y-1)
e = (x+1,y)
se = (x+1,y+1)
s = (x,y+1)
sw = (x-1,y+1)
w = (x-1,y)
nw = (x-1,y-1)
points = [n,ne,e,se,s,sw,w,nw]
shuffle :: [P2] -> StdGen -> [P2]
shuffle [] _ = []
shuffle xs g = val : shuffle rest g'
where (idx,g') = randomR (0,length xs - 1) g
val = xs !! idx
bef = take idx xs
aft = drop (idx+1) xs
rest = bef ++ aft
validPoint :: P2 -> Bool
validPoint (x,y)
| x < 0 || x >= width = False
| y < 0 || y >= height = False
| otherwise = True
touchedPoints :: PrimMonad m => Canvas (PrimState m) -> Points -> m Points
touchedPoints canvas points = do
validPoints <- V.filterM (touchedPoint canvas) points
return validPoints
--getTouchedPoint :: PrimMonad m => Canvas (PrimState m) -> Points -> m P2
--getTouchedPoint canvas points = do
-- validPoints <- V.filterM (touchedPoint canvas) points
-- return $ V.head $ V.take 1 validPoints
--touchedPoint :: PrimMonad m => Canvas (PrimState m) -> P2 -> m Bool
--touchedPoint canvas point = do
-- (_,_,touched) <- UM.read canvas (index point)
-- if touched then return True else return False
touchedPoint :: PrimMonad m => Canvas (PrimState m) -> P2 -> m Bool
touchedPoint canvas n = do
(_,_,touched) <- UM.read canvas $ index n
if touched then return True else return False
untouchedPoints :: PrimMonad m => Canvas (PrimState m) -> [P2] -> m [P2]
untouchedPoints canvas points = do
validPoints <- filterM (untouchedPoint canvas) points
return validPoints
untouchedPoint :: PrimMonad m => Canvas (PrimState m) -> P2 -> m Bool
untouchedPoint canvas point = do
(_,_,touched) <- UM.read canvas (index point)
if touched then return False else return True
drawPixel :: PrimMonad m => Canvas (PrimState m) -> Color -> P2 -> m (Canvas (PrimState m))
drawPixel canvas color point = do
let pixel = (point, color, True)
UM.write canvas (index point) pixel
return canvas
vecToInstructions :: UV.Vector Pixel -> [Drawing PixelRGBA8 ()]
vecToInstructions v = createPixelInstruction <$> UV.toList v
index :: P2 -> Int
index (x,y) = width * y + x
main :: IO ()
main = do
gen <- getStdGen
mutableCanvas <- generateCanvas width height gen
frozenCanvas <- UV.freeze mutableCanvas
let drawInstructions = vecToInstructions frozenCanvas
white = PixelRGBA8 255 255 255 255
img = renderDrawing width height white $ sequence_ drawInstructions
writePng "image.png" img
| ismailmustafa/Paint | src/Main.hs | bsd-3-clause | 6,243 | 0 | 15 | 1,507 | 2,486 | 1,304 | 1,182 | 142 | 2 |
{-# LANGUAGE FlexibleInstances #-}
module Shader where
import Prelude hiding (any, floor, ceiling)
import Control.Monad (when)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Foreign
import Foreign.C.String
import Graphics.GL
import Linear
import Paths_hadoom
import Util
import qualified Data.ByteString as BS
import qualified Data.Text.Encoding as Text
import qualified Data.Text.IO as Text
newtype GLProgram =
GLProgram {unGLProgram :: GLuint}
createShaderProgram :: FilePath -> FilePath -> IO GLProgram
createShaderProgram vertexShaderPath fragmentShaderPath =
do vertexShader <- glCreateShader GL_VERTEX_SHADER
compileShader vertexShaderPath vertexShader
fragmentShader <- glCreateShader GL_FRAGMENT_SHADER
compileShader fragmentShaderPath fragmentShader
shaderProg <- glCreateProgram
glAttachShader shaderProg vertexShader
glAttachShader shaderProg fragmentShader
mapM_ (\(x,y) ->
withCString x
(glBindAttribLocation shaderProg y))
[("in_Position",positionAttribute)
,("in_Normal",normalAttribute)
,("in_Tangent",tangentAttribute)
,("in_Bitangent",bitangentAttribute)
,("in_UV",uvAttribute)]
glLinkProgram shaderProg
linked <- overPtr (glGetProgramiv shaderProg GL_LINK_STATUS)
when (linked == GL_FALSE)
(do maxLength <- overPtr (glGetProgramiv shaderProg GL_INFO_LOG_LENGTH)
logLines <- allocaArray
(fromIntegral maxLength)
(\p ->
alloca (\lenP ->
do glGetProgramInfoLog shaderProg maxLength lenP p
len <- peek lenP
peekCStringLen (p,fromIntegral len)))
putStrLn logLines)
return (GLProgram shaderProg)
where compileShader path shader =
do src <- getDataFileName path >>= Text.readFile
BS.useAsCString
(Text.encodeUtf8 src)
(\ptr ->
withArray [ptr]
(\srcs ->
glShaderSource shader 1 srcs nullPtr))
glCompileShader shader -- GL.get (GL.shaderInfoLog shader) >>= putStrLn
positionAttribute, uvAttribute, normalAttribute, tangentAttribute, bitangentAttribute :: GLuint
positionAttribute = 0
normalAttribute = 1
tangentAttribute = 2
bitangentAttribute = 3
uvAttribute = 4
class Uniform a where
setUniformLoc :: MonadIO m => GLint -> a -> m ()
setUniform :: (MonadIO m, Uniform a) => GLProgram -> String -> a -> m ()
setUniform (GLProgram shaderProg) name a =
do loc <- liftIO (withCString name
(glGetUniformLocation shaderProg))
glUseProgram shaderProg
setUniformLoc loc a
instance Storable a => Uniform (M44 a) where
setUniformLoc loc m =
liftIO (with m
(glUniformMatrix4fv loc 1 0 .
castPtr))
instance Uniform (V2 GLfloat) where
setUniformLoc loc (V2 x y) =
glUniform2f loc x y
instance Uniform (V4 GLfloat) where
setUniformLoc loc (V4 x y z w) =
glUniform4f loc x y z w
instance Uniform GLint where
setUniformLoc = glUniform1i
instance Uniform GLfloat where
setUniformLoc = glUniform1f
getSubroutineIndex :: MonadIO m => GLProgram -> String -> m GLuint
getSubroutineIndex (GLProgram p) subroutine =
liftIO (withCString subroutine
(glGetSubroutineIndex p GL_FRAGMENT_SHADER))
getUniformBlockIndex :: MonadIO m => GLProgram -> String -> m GLuint
getUniformBlockIndex (GLProgram p) block =
liftIO (withCString block
(glGetUniformBlockIndex p))
| ocharles/hadoom | hadoom/Shader.hs | bsd-3-clause | 3,772 | 0 | 22 | 1,085 | 957 | 493 | 464 | 92 | 1 |
{-# LANGUAGE RecordWildCards #-}
module TemplateParse (parseChordTemplate) where
import Text.ParserCombinators.Parsec
import Control.Applicative ((<$>), (*>), (<*), (<$), (<*>))
import Data.Maybe (fromMaybe, isJust, catMaybes)
import Note (ABC (..), Octave (..))
import Interval (Interval (..))
import qualified Interval as In
import Pattern
-- <chord> :: <root_note>["("<octave_range>")"](<explicit_intervals>|<chord_spec>)
-- <octave_range> :: \d|(\d,\d,...)|(\d-\d)
-- <explicit_intervals> :: "{" (<interval_spec> ",")* "}"
-- <interval_spec> ::
-- {0=,4,7?,11=?} -- 0 exact, 4 with inversions allowed, optional 7 with inversions, optional 11 exact
-- {0=,4,*}-{10,11} -- 0, 4 and any other, except 10 and 11
-- {0=,4,7?,10|11} -- ..., 10 or 11
-- {}
-- <chord_spec> :: <chord_quality><interval_num><altered_fifth><additional_interval>
-- <chord_quality> :: "m" | "M" | ""
-- <interval_num> :: "(" ("maj" | "min" | "aug" | "dim")<number> ")"
infix 0 <??>
(<??>) = flip (<?>)
oneOfStr :: [String] -> CharParser () String
oneOfStr ss = choice (map string ss)
pChord :: CharParser () (ChordPattern NotePattern [IntervalPattern])
pChord = do
note <- "note pattern" <??> pTemplateValue pNote
octave <- "octave pattern" <??> fromMaybe Any <$>
optionMaybe (char '-' *> pTemplateValue pOctave)
intervalPatterns <- "chord spec" <??>
pExplicitIntervals <|> (chordSpecToIntervals <$> pChordSpec)
return $ ChordPattern (NotePattern note octave) intervalPatterns
pNote :: CharParser () ABC
pNote = do
n <- oneOf "ABCDEFG"
m <- maybe [] ((: []) . sharpOrFlat) <$> optionMaybe (oneOf "#b")
return $ read (n : m)
where
sharpOrFlat '#' = 's'
sharpOrFlat x = x
pOctave :: CharParser () Octave
pOctave = Octave . read . (: []) <$> oneOf "012345678"
pIntervalName :: CharParser () Interval
pIntervalName = "interval name" <??> choiceInterval
[(In.perf1, ["P1", "perf1", "dim2"])
,(In.min2, ["m2", "min2", "A1", "aug1"])
,(In.maj2, ["M2", "maj2", "d3", "dim3"])
,(In.min3, ["m3", "min3", "A2", "aug2"])
,(In.maj3, ["M3", "maj3", "d4", "dim4"])
,(In.perf4, ["P4", "perf4", "A3", "aug3"])
,(In.dim5, ["d5", "dim5", "A4", "aug4", "tritone"])
,(In.perf5, ["P5", "perf5", "d6", "dim6"])
,(In.min6, ["m6", "min6", "A5", "aug5"])
,(In.maj6, ["M6", "maj6", "d7", "dim7"])
,(In.min7, ["m7", "min7", "A6", "aug6"])
,(In.maj7, ["M7", "maj7", "d8", "dim8"])
,(In.perf8, ["P8", "perf8", "A7", "aug7"])
,(In.Interval 13, ["m9", "min9"]) -- p8 + min2
,(In.Interval 14, ["M9", "maj9"]) -- p8 + maj2
,(In.Interval 15, ["m10", "min10"]) -- p8 + min3
,(In.Interval 16, ["M10", "maj10"]) -- p8 + maj3
]
where
p_oneName (interval, names) = try $ interval <$ oneOfStr names
choiceInterval is = choice (map p_oneName is)
pIntervalInteger :: CharParser () Interval
pIntervalInteger = (Interval . read) <$> many1 digit
pInterval :: CharParser () Interval
pInterval = pIntervalInteger <|> pIntervalName
pIntervalPattern :: CharParser () IntervalPattern
pIntervalPattern = do
noInversions <- isJust <$> optionMaybe (char '=')
intervalDef <- pInterval
isOptional <- isJust <$> optionMaybe (char '?')
let ipv = IntervalPatternValue { interval = intervalDef, inversionsAllowed = not noInversions }
let po = if isOptional then OneOrNone ipv else ExactlyOne ipv
return $ po
pExplicitIntervals :: CharParser () [IntervalPattern]
pExplicitIntervals = char '{' *> sepBy1 pIntervalPattern (char ',') <* char '}'
pTemplateValue :: (Bounded a, Enum a) =>
CharParser () a -> CharParser () (PatternValue a)
pTemplateValue p =
Any <$ char '*' <|>
maybe Any OneOf <$> optionMaybe oneOrManyV
where
oneV = (: []) <$> p
manyV = "pattern" <??> char '[' *> sepBy1 p (char ',') <* char ']'
oneOrManyV = manyV <|> oneV
data ChordQuality = MinorChord | MajorChord
data IntervalQuality = Minor | Major | Diminished | Augmented | Dominant
data ChordSpec = ChordSpec
{ chordQuality :: Maybe ChordQuality,
mainInterval :: Maybe (Maybe IntervalQuality, Integer),
addInterval :: Maybe Integer
}
pChordSpec :: CharParser () ChordSpec
pChordSpec =
createSpec <$> pChordQuality <*> pIntervalSpec <*> pAdditionalInterval
where
createSpec q mi ai =
ChordSpec { chordQuality = q, mainInterval = mi, addInterval = ai }
pChordQuality = "chord quality" <??>
optionMaybe
((try $ MinorChord <$ char 'm') <|>
(try $ MajorChord <$ char 'M'))
pIntervalQuality = (<??>) "main chord interval" $
-- @TODO seems broken
optionMaybe $
choice (map try
[Minor <$ (string "min")
,Major <$ (string "maj")
,Diminished <$ (string "dim")
,Augmented <$ (string "aug")
,Dominant <$ (string "dom")])
pIntervalNumber = read <$> many1 digit
pAdditionalInterval = (<??>) "additional chord note" $
optionMaybe $
read <$> (string "add" *> many1 digit)
pIntervalSpec =
optionMaybe $
(,) <$> (char '(' *> pIntervalQuality) <*> (pIntervalNumber <* char ')')
chordSpecToIntervals :: ChordSpec -> [IntervalPattern]
chordSpecToIntervals (ChordSpec {..}) =
root : (catMaybes $ third : fifth : main : added : [])
where
ipat x y = IntervalPatternValue { interval = x, inversionsAllowed = y }
inversions x = ipat x True
noInversions x = ipat x False
root = ExactlyOne $ noInversions (In.Interval 0)
chordQ = fromMaybe MajorChord chordQuality
-- @TODO ugly stuff
makeInterval (q, n) =
In.Interval $ neutralI + modI
where
neutralI =
case n of
7 -> 11
6 -> 9
9 -> 14
10 -> 16
11 -> 17 -- ??
modI =
case fromMaybe Major q of
Major -> 0
Minor -> -1
Augmented -> 1
Diminished -> -2
Dominant -> error "ugh"
third = Just . ExactlyOne . noInversions $
(case chordQ of
MajorChord -> In.maj3
MinorChord -> In.min3)
fifth = Just . ExactlyOne . inversions $ In.perf5
main = ExactlyOne . inversions . makeInterval <$> mainInterval
added = Nothing
parseChordTemplate :: String
-> Either ParseError (ChordPattern NotePattern [IntervalPattern])
parseChordTemplate = parse pChord "(chord def)" | blacktaxi/inversion | src/TemplateParse.hs | bsd-3-clause | 6,807 | 0 | 14 | 1,900 | 1,973 | 1,089 | 884 | 137 | 10 |
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -Wall #-}
-- | Algebra
module Tower.Ring (
-- * Ring
Semiring
, Ring
, CRing
) where
import Protolude (Double, Float, Int, Integer,Bool(..))
import Data.Functor.Rep
import Tower.Additive
import Tower.Multiplicative
import Tower.Distribution
-- | a semiring
class ( Additive a
, MultiplicativeAssociative a
, MultiplicativeUnital a
, Distribution a) =>
Semiring a
instance Semiring Double
instance Semiring Float
instance Semiring Int
instance Semiring Integer
instance Semiring Bool
instance (Representable r, Semiring a) => Semiring (r a)
-- | Ring
class ( AdditiveGroup a
, MultiplicativeAssociative a
, MultiplicativeUnital a
, Distribution a) =>
Ring a
instance Ring Double
instance Ring Float
instance Ring Int
instance Ring Integer
instance (Representable r, Ring a) => Ring (r a)
-- | CRing is a Commutative Ring. It arises often due to * being defined as only multiplicative commutative, yet fromInteger being a `Integer -> Ring` (and thus not necessarily commutative).
class ( Multiplicative a, Ring a) => CRing a
instance CRing Double
instance CRing Float
instance CRing Int
instance CRing Integer
instance (Representable r, CRing a) => CRing (r a)
| tonyday567/tower | src/Tower/Ring.hs | bsd-3-clause | 1,384 | 0 | 7 | 268 | 337 | 176 | 161 | -1 | -1 |
module Main where
import GameIO
import System.Console.ANSI
main :: IO ()
main = runGame
| MikePors/FizzBuzzWarz | app/Main.hs | bsd-3-clause | 90 | 0 | 6 | 16 | 28 | 17 | 11 | 5 | 1 |
{-# OPTIONS_GHC -Wall #-}
module Canonicalize.Variable where
import qualified Data.Either as Either
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified AST.Helpers as Help
import qualified AST.Module.Name as ModuleName
import qualified AST.Type as Type
import qualified AST.Variable as Var
import qualified Reporting.Annotation as A
import qualified Reporting.Error.Canonicalize as Error
import qualified Reporting.Error.Helpers as Error
import qualified Reporting.Region as R
import qualified Canonicalize.Environment as Env
import qualified Canonicalize.Result as Result
import Elm.Utils ((|>))
variable :: R.Region -> Env.Environment -> String -> Result.ResultErr Var.Canonical
variable region env var =
case toVarName var of
Right (name, varName)
| ModuleName.isNative name ->
error "TODO" "Result.var (Var.Canonical (Var.Module name) varName)"
_ ->
case Set.toList `fmap` Map.lookup var (Env._values env) of
Just [v] ->
Result.var v
Just vs ->
preferLocals region env "variable" vs var
Nothing ->
notFound region "variable" (Map.keys (Env._values env)) var
tvar
:: R.Region
-> Env.Environment
-> String
-> Result.ResultErr
(Either
Var.Canonical
(Var.Canonical, [String], Type.Canonical)
)
tvar region env var =
case adts ++ aliases of
[] -> notFound region "type" (Map.keys (Env._adts env) ++ Map.keys (Env._aliases env)) var
[v] -> Result.var' extract v
vs -> preferLocals' region env extract "type" vs var
where
adts =
map Left (maybe [] Set.toList (Map.lookup var (Env._adts env)))
aliases =
map Right (maybe [] Set.toList (Map.lookup var (Env._aliases env)))
extract value =
case value of
Left v -> v
Right (v,_,_) -> v
pvar
:: R.Region
-> Env.Environment
-> String
-> Int
-> Result.ResultErr Var.Canonical
pvar region env var actualArgs =
case Set.toList `fmap` Map.lookup var (Env._patterns env) of
Just [value] ->
foundArgCheck value
Just values ->
preferLocals' region env fst "pattern" values var
`Result.andThen` foundArgCheck
Nothing ->
notFound region "pattern" (Map.keys (Env._patterns env)) var
where
foundArgCheck (name, expectedArgs) =
if actualArgs == expectedArgs
then Result.var name
else Result.err (A.A region (Error.argMismatch name expectedArgs actualArgs))
-- FOUND
preferLocals
:: R.Region
-> Env.Environment
-> String
-> [Var.Canonical]
-> String
-> Result.ResultErr Var.Canonical
preferLocals region env =
preferLocals' region env id
preferLocals'
:: R.Region
-> Env.Environment
-> (a -> Var.Canonical)
-> String
-> [a]
-> String
-> Result.ResultErr a
preferLocals' region env extract kind possibilities var =
case filter (isLocal (Env._home env) . extract) possibilities of
[] ->
ambiguous possibilities
[v] ->
Result.var' extract v
--Local vars shadow top-level vars
[v1, v2]
| isTopLevel (extract v1) ->
Result.var' extract v2
| isTopLevel (extract v2) ->
Result.var' extract v1
locals ->
ambiguous locals
where
ambiguous possibleVars =
Result.err (A.A region (Error.variable kind var Error.Ambiguous vars))
where
vars = map (Var.toString . extract) possibleVars
isLocal :: ModuleName.Canonical -> Var.Canonical -> Bool
isLocal contextName (Var.Canonical home _) =
case home of
Var.Local ->
True
Var.TopLevel _ ->
True
Var.BuiltIn ->
False
Var.Module name ->
name == contextName
isTopLevel :: Var.Canonical -> Bool
isTopLevel (Var.Canonical home _) =
case home of
Var.TopLevel _ ->
True
_ ->
False
-- NOT FOUND HELPERS
type VarName =
Either String (ModuleName.Raw, String)
toVarName :: String -> VarName
toVarName var =
case Help.splitDots var of
[x] -> Left x
xs -> Right (init xs, last xs)
noQualifier :: VarName -> String
noQualifier name =
case name of
Left x -> x
Right (_, x) -> x
qualifiedToString :: (ModuleName.Raw, String) -> String
qualifiedToString (modul, name) =
ModuleName.toString modul ++ "." ++ name
isOp :: VarName -> Bool
isOp name =
Help.isOp (noQualifier name)
-- NOT FOUND
notFound :: R.Region -> String -> [String] -> String -> Result.ResultErr a
notFound region kind possibilities var =
let name =
toVarName var
possibleNames =
map toVarName possibilities
(problem, suggestions) =
case name of
Left _ ->
exposedProblem name possibleNames
Right (modul, varName) ->
qualifiedProblem modul varName (Either.rights possibleNames)
in
Result.err (A.A region (Error.variable kind var problem suggestions))
exposedProblem :: VarName -> [VarName] -> (Error.VarProblem, [String])
exposedProblem name possibleNames =
let (exposed, qualified) =
possibleNames
|> filter (\n -> isOp name == isOp n)
|> Error.nearbyNames noQualifier name
|> Either.partitionEithers
in
( Error.ExposedUnknown
, exposed ++ map qualifiedToString qualified
)
qualifiedProblem
:: ModuleName.Raw
-> String
-> [(ModuleName.Raw, String)]
-> (Error.VarProblem, [String])
qualifiedProblem moduleName name allQualified =
let availableModules =
Set.fromList (map fst allQualified)
moduleNameString =
ModuleName.toString moduleName
in
case Set.member moduleName availableModules of
True ->
( Error.QualifiedUnknown moduleNameString name
, allQualified
|> filter ((==) moduleName . fst)
|> map snd
|> Error.nearbyNames id name
)
False ->
( Error.UnknownQualifier moduleNameString name
, Set.toList availableModules
|> map ModuleName.toString
|> Error.nearbyNames id moduleNameString
)
| pairyo/elm-compiler | src/Canonicalize/Variable.hs | bsd-3-clause | 6,338 | 0 | 21 | 1,834 | 1,911 | 987 | 924 | 183 | 4 |
module Main where
main :: IO ()
main = putStrLn "sup"
| bts/free-transformers | app/Main.hs | bsd-3-clause | 55 | 0 | 6 | 12 | 22 | 12 | 10 | 3 | 1 |
{-# LANGUAGE OverloadedStrings, CPP #-}
module HPACK.HuffmanSpec where
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative ((<$>))
#endif
import Data.ByteString (ByteString)
import qualified Data.ByteString.Base16 as B16
import qualified Data.ByteString.Char8 as BS
import Data.Char (toLower)
import Network.HPACK
import Network.HPACK.Internal
import Test.Hspec
import Test.Hspec.QuickCheck
testData :: [(ByteString, ByteString)]
testData = [
("", "")
, ("www.example.com", "f1e3c2e5f23a6ba0ab90f4ff")
, ("no-cache", "a8eb10649cbf")
, ("custom-key", "25a849e95ba97d7f")
, ("custom-value", "25a849e95bb8e8b4bf")
, ("private", "aec3771a4b")
, ("Mon, 21 Oct 2013 20:13:21 GMT", "d07abe941054d444a8200595040b8166e082a62d1bff")
, ("https://www.example.com", "9d29ad171863c78f0b97c8e9ae82ae43d3")
, ("Mon, 21 Oct 2013 20:13:22 GMT", "d07abe941054d444a8200595040b8166e084a62d1bff")
, ("gzip", "9bd9ab")
, ("foo=ASDJKHQKBZXOQWEOPIUAXQWEOIU; max-age=3600; version=1", "94e7821dd7f2e6c7b335dfdfcd5b3960d5af27087f3672c1ab270fb5291f9587316065c003ed4ee5b1063d5007")
]
shouldBeEncoded :: ByteString -> ByteString -> Expectation
shouldBeEncoded inp out = do
out' <- BS.map toLower . B16.encode <$> encodeHuffman inp
out' `shouldBe` out
shouldBeDecoded :: ByteString -> ByteString -> Expectation
shouldBeDecoded inp out = do
out' <- decodeHuffman $ B16.decodeLenient inp
out' `shouldBe` out
tryDecode :: ByteString -> IO ByteString
tryDecode inp = decodeHuffman $ B16.decodeLenient inp
spec :: Spec
spec = do
describe "encode and decode" $ do
prop "duality" $ \cs -> do
let bs = BS.pack cs
es <- encodeHuffman bs
ds <- decodeHuffman es
ds `shouldBe` bs
describe "encode" $ do
it "encodes" $ do
mapM_ (\(x,y) -> x `shouldBeEncoded` y) testData
describe "decode" $ do
it "decodes" $ do
tryDecode "ff" `shouldThrow` (== TooLongEos)
tryDecode "ffffeaff" `shouldThrow` (== TooLongEos)
"ffffea" `shouldBeDecoded` "\9"
mapM_ (\(x,y) -> y `shouldBeDecoded` x) testData
| kazu-yamamoto/http2 | test/HPACK/HuffmanSpec.hs | bsd-3-clause | 2,149 | 0 | 18 | 426 | 548 | 306 | 242 | 51 | 1 |
module Graphics.Hexif.Utils
where
import Data.Binary
import Data.Binary.Get
import qualified Data.ByteString.Lazy as BL
import Data.Hex
-- | The encoding of the binary data.
-- Motorola is big endian, Intel is low endian
data Encoding = Intel
| Motorola
deriving (Show)
-- | Little support function to read 16 bit integers
getWord16 :: Encoding -> Get Word16
getWord16 Motorola = getWord16be
getWord16 Intel = getWord16le
-- | Little support function to read 32 bit integers
getWord32 :: Encoding -> Get Word32
getWord32 Motorola = getWord32be
getWord32 Intel = getWord32le
-- | Convert a lazy ByteString into a normal Haskell String
-- Copied from MissingH library module Data.Bits.Utils
unpackLazyBS :: BL.ByteString -> String
unpackLazyBS = map (toEnum . fromIntegral) . BL.unpack
showHex :: Binary a => a -> String
showHex = show . hex . encode
runGetEither :: Get a -> BL.ByteString -> Either String a
runGetEither get bs = case runGetOrFail get bs of
Left (_,_,strError) -> Left strError
Right (_,_,x) -> Right x
-- Return the first parameter, if it's Nothing, return the second parameter
-- This something similar to an OR for Maybe values
ifNothing :: Maybe a -> Maybe a -> Maybe a
ifNothing mbX mbY = case mbX of
Nothing -> mbY
_ -> mbX
| hansroland/hexif | src/Graphics/Hexif/Utils.hs | bsd-3-clause | 1,291 | 0 | 9 | 252 | 306 | 165 | 141 | 26 | 2 |
{-# LANGUAGE Arrows #-}
module Karamaan.Plankton.Arrow where
import Prelude hiding (id)
import Control.Arrow ((&&&), Arrow, arr, returnA, second)
import Control.Category ((<<<), id)
import Data.List (foldl')
voidArr :: Arrow arr => arr a ()
voidArr = arr (const ())
andVoid :: Arrow arr => arr a () -> arr a () -> arr a ()
andVoid f g = voidArr <<< (f &&& g)
all_ :: Arrow arr => [arr a ()] -> arr a ()
all_ = foldl' andVoid voidArr
replaceWith :: Arrow arr => arr () b -> arr a b
replaceWith = (<<< voidArr)
restrictWith :: Arrow arr => arr b () -> arr b b
restrictWith r = arr fst <<< (id &&& r)
removeUnit :: Arrow arr => arr a (a, ())
removeUnit = arr (flip (,) ())
liftArr2 :: Arrow arr => (a -> b -> r) -> arr z a -> arr z b -> arr z r
liftArr2 f x y = f <$-> x <*-> y
-- TODO: I guess this should really be made to work with any traversal
-- cf 'instance Traversable []' in Data.Traversable
sequenceArr :: Arrow arr => [arr a b] -> arr a [b]
sequenceArr = foldr (liftArr2 (:)) (arr (const []))
traverseArr :: Arrow arr => (b -> arr a c) -> [b] -> arr a [c]
traverseArr f = sequenceArr . map f
(<*->) :: Arrow arr => arr a (b -> c) -> arr a b -> arr a c
f <*-> x = proc a -> do
f' <- f -< a
x' <- x -< a
returnA -< f' x'
(<$->) :: Arrow arr => (b -> c) -> arr a b -> arr a c
f <$-> x = arr f <<< x
foldrArr :: Arrow arr => arr (a, b) b -> arr z b -> [arr z a] -> arr z b
foldrArr f = foldr g
where g x rest = f <<< (x &&& rest)
foldl'Arr :: Arrow arr => arr (b, a) b -> arr z b -> [arr z a] -> arr z b
foldl'Arr f = foldl' g
where g rest x = f <<< (rest &&& x)
-- opC stands for "operator curry". It's a sort of partial application.
-- If anyone has a better name, please change it :)
opC :: Arrow arr => arr (a, b) c -> arr () b -> arr a c
opC op q = op <<< second q <<< arr (\a -> (a, ()))
noOp :: Arrow arr => arr a ()
noOp = arr (const ())
| karamaan/karamaan-plankton | Karamaan/Plankton/Arrow.hs | bsd-3-clause | 1,878 | 1 | 10 | 459 | 978 | 496 | 482 | 41 | 1 |
module SLM.Model where
import SLM.DataTypes
class TrainedModel a where
predictInstance :: a -> [Predictor] -> Double
predict :: a -> [[Predictor]] -> [Double]
predict model = map (predictInstance model)
| timveitch/Gobble | src/SLM/Model.hs | bsd-3-clause | 211 | 0 | 10 | 37 | 75 | 41 | 34 | 6 | 0 |
-- |
-- Module :
-- License : BSD-Style
-- Maintainer : Nicolas DI PRIMA <[email protected]>
-- Stability : experimental
-- Portability : unknown
--
module Network.SMTP
( module Network.SMTP.Types
, module Network.SMTP.Monad
-- * Client
, module Network.SMTP.Client
-- * Utils
-- ** Lookup MX domains
, MXServer(..)
, lookupMXs
-- ** email address
, emailAddrFromString
-- ** pretty print result
, prettyPrintResult
) where
import Data.Byteable
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Parse as BP (parse, Result(..))
import Data.List (intercalate, sort)
import Data.Hourglass
import qualified Network.DNS as DNS
import Network.SMTP.Types
import Network.SMTP.Monad
import Network.SMTP.Client
import Network.SMTP.Parser
data MXServer = MXServer
{ mxWeight :: Int
, mxAddress :: Domain
} deriving (Eq, Show)
instance Ord MXServer where
compare mx1 mx2 = compare (mxWeight mx1) (mxWeight mx2)
showMXServer :: MXServer -> String
showMXServer (MXServer w d) = "(" ++ show w ++ ")" ++ BC.unpack (toBytes d)
-- | This function retrieve the MX Server which are reponsible for the given
-- email address
lookupMXs :: Domain -> SMTP [MXServer]
lookupMXs domain = do
seed <- smtpLiftIO "get the DNS Resolver information" $ DNS.makeResolvSeed resolvconf
eres <- smtpLiftIO "send the DNS MXs Query" $ DNS.withResolver seed $ \r -> DNS.lookupMX r (toBytes domain)
case eres of
Left err -> fail $ "unable to collect the MX Domains for " ++ BC.unpack (toBytes domain) ++ " error is: " ++ show err
Right v -> do
let mxs = sort $ map (\(d, p) -> MXServer p (Domain d)) v
smtpReportInfo $ ("collecting MX for " ++ BC.unpack (toBytes domain)) ++ ": " ++ (intercalate " " $ map showMXServer mxs)
return mxs
where
resolvconf :: DNS.ResolvConf
resolvconf = DNS.defaultResolvConf
emailAddrFromString :: String -> Either String EmailAddress
emailAddrFromString str = case BP.parse parseEmailAddress (BC.snoc bs '\0') of
BP.ParseFail err -> Left $ "failed to parse: " ++ show bs ++ " " ++ err
BP.ParseMore _ -> Left $ "failed to parse: " ++ show bs ++ " not enough bytes"
BP.ParseOK _ v -> Right v
where
bs :: ByteString
bs = BC.pack str
prettyPrintResult :: String
-> Result ()
-> String
prettyPrintResult indentation rs =
let (report, header) = case rs of
SMTPOK r _ -> (r, "Success")
SMTPKO r -> (r, "Failure")
in indentation ++ header ++ "\n" ++ (prettyPrintReports (indentation ++ " ") report)
prettyPrintReports :: String
-> SMTPReports
-> String
prettyPrintReports indentation rs = intercalate "\n" $
map (prettyPrintReport indentation) (reverse $ smtpReports rs)
prettyPrintReport :: String
-> SMTPReport
-> String
prettyPrintReport indentation r = case r of
SMTPReportInfo {} -> prettyPrintString indentation time "INF" (smtpReportReport r)
SMTPReportError {} -> prettyPrintString indentation time "ERR" (smtpReportReport r)
SMTPReportCommand {} -> prettyPrintCommand indentation time (smtpReportCmd r)
SMTPReportResponse {} -> prettyPrintResponse indentation time (smtpReportRespCode r)
where
time :: String
time = timePrint "EPOCH.p6" $ smtpReportDate r
prettyPrintString :: String -> String -> String -> String -> String
prettyPrintString indentation time ty msg =
indentation ++ "[" ++ time ++ "][" ++ ty ++ "] " ++ msg
prettyPrintCommand :: String -> String -> Command -> String
prettyPrintCommand indentation time cmd =
indentation ++ "[" ++ time ++ "][CMD] " ++ (BC.unpack $ showCommand cmd)
prettyPrintResponse :: String
-> String
-> Response
-> String
prettyPrintResponse indentation time resp =
intercalate "\n" $ map (((++) prefix) . BC.unpack) $ message resp
where
prefix :: String
prefix = indentation ++ "[" ++ time ++ "][RSP][" ++ show (responseCodeToInt $ code resp) ++ "] "
| NicolasDP/hs-smtp | Network/SMTP.hs | bsd-3-clause | 4,230 | 0 | 21 | 1,057 | 1,184 | 619 | 565 | 83 | 4 |
{-# LANGUAGE RecordWildCards #-}
module Plugin.Jira
( respond
, hear
, help
) where
import Control.Applicative
import Control.Lens
import Data.Aeson
import Data.List (nub)
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as T
-- import Data.Text.Lazy (toStrict)
-- import Data.Text.Lazy.Encoding
import Network.HTTP.Types
import Network.Wreq
import Text.Megaparsec hiding (label)
import Text.Megaparsec.Text
import Types
import Util
import Web.Slack
data Issue = Issue { issueKey :: Text
, summary :: Text
, description :: Maybe Text
, priorityName :: Maybe Text
, priorityIcon :: Maybe Text
, issueStatus :: Maybe Text
, assigneeName :: Maybe Text
, creatorName :: Maybe Text
, issueUrl :: Maybe Text
, commentCount :: Maybe Integer
} deriving Show
data IssueResponseType = Short | Full
instance FromJSON Issue where
parseJSON (Object v) = do
-- parents
let fields = v .: "fields"
let priority = fields >>= (.: "priority")
-- values
issueKey <- v .: "key"
summary <- fields >>= (.: "summary")
description <- optional $ fields >>= (.: "description")
priorityName <- optional $ priority >>= (.: "name")
priorityIcon <- optional $ priority >>= (.: "iconUrl")
issueStatus <- optional $ fields >>= (.: "status") >>= (.: "name")
assigneeName <- optional $ fields >>= (.: "assignee") >>= (.: "displayName")
creatorName <- optional $ fields >>= (.: "creator") >>= (.: "displayName")
commentCount <- optional $ fields >>= (.: "comment") >>= (.: "total")
let issueUrl = Nothing
pure Issue{..}
parseJSON _ = mempty
respond :: BotResponse
respond (evt, resp) = maybe (pure ()) parseCmd (command evt)
where
parseCmd cmd = case trigger cmd of
"jira" -> fetchIssues (arguments cmd) resp
"help" -> help (arguments cmd) resp
_ -> pure ()
hear :: BotResponse
hear (evt, resp) = do
fetchShortIssues (T.words $ fullMessage evt) resp
help :: [Text] -> OutputResponse -> IO ()
help args resp = case args of
[] -> say
["jira"] -> say
_ -> pure ()
where say = slackWriter resp $ QuotedSimpleMessage "!jira XYZ-123 <XYZ-234 XYZ-345…> :: Displays Jira entries"
isIssueKey :: Text -> Bool
isIssueKey = isJust . parseMaybe issueKeyParser
where
issueKeyParser :: Parser String
issueKeyParser = some letterChar >> char '-' >> some numberChar
getIssues :: [Text] -> [Text]
getIssues = nub . map T.toUpper . filter isIssueKey
fetchShortIssues :: [Text] -> OutputResponse -> IO ()
fetchShortIssues message resp = mapM_ (fetchIssue Short resp) $ getIssues message
fetchIssues :: [Text] -> OutputResponse -> IO ()
fetchIssues args resp = mapM_ (fetchIssue Full resp) $ getIssues args
fetchIssue :: IssueResponseType -> OutputResponse -> Text -> IO ()
fetchIssue respType resp issueName = do
domain <- T.pack <$> envDefault "jira.atlassian.net" "JIRA_DOMAIN"
let uri = T.concat ["https://", domain, "/rest/api/2/issue/", issueName]
username <- fmap T.pack <$> envMaybe "JIRA_USERNAME"
password <- fmap T.pack <$> envMaybe "JIRA_PASSWORD"
response <- safeGetUrl uri username password
-- TODO thread those guys
case response of
Left err -> print $ T.concat ["[JIRA] Error: ", err, " :: issue ", issueName]
Right success -> do
print $ T.concat ["[JIRA] Fetched issue ", issueName]
-- print $ toStrict $ decodeUtf8 $ success ^. responseBody
let status = success ^. responseStatus
let body = decode (success ^. responseBody) :: Maybe Issue
case body of
Just issue -> slackWriter resp $ handle respType issueName (withUrl domain issue) status
Nothing -> pure ()
withUrl :: Text -> Issue -> Issue
withUrl domain issue = issue { issueUrl = Just $ T.concat ["https://", domain, "/browse/", issueKey issue]}
handle :: IssueResponseType -> Text -> Issue -> Status -> OutputMessage
handle respType issueName issue status
| statusIsServerError status = showError issueName
| statusIsClientError status = showNotFound issueName
| otherwise = case respType of
Full -> showIssue issue
Short -> showShortIssue issue
makeField :: Text -> Maybe Text -> Maybe Field
makeField label val = case val of
Just v -> Just $ Field (Just label) v True
Nothing -> Nothing
showShortIssue :: Issue -> OutputMessage
showShortIssue issue = QuotedSimpleMessage $ T.intercalate " :: " $ nub $ sequence [issueKey, summary, (fromMaybe "") . issueStatus] issue
showIssue :: Issue -> OutputMessage
showIssue issue = RichMessage defaultAttachment
{ attachmentFallback = summary issue
, attachmentColor = color
, attachmentAuthorName = Just (issueKey issue)
, attachmentAuthorIcon = priorityIcon issue
, attachmentTitle = Just (summary issue)
, attachmentTitleLink = issueUrl issue
, attachmentText = description issue
, attachmentFields = catMaybes [ makeField "Status" (issueStatus issue)
, makeField "Assigned to" (assigneeName issue)
]
, attachmentThumbUrl = Just "https://a.slack-edge.com/2fac/plugins/jira/assets/service_36.png"
, attachmentFooter = Just $ T.concat ["Comments: ", T.pack . show $ (fromMaybe 0 $ commentCount issue)]
}
where
color = case (priorityName issue) of
Just "Blocker" -> DangerColor
Just "Critical" -> DangerColor
Just "Major" -> WarningColor
Just "Minor" -> GoodColor
Just "Trivial" -> GoodColor
_ -> DefaultColor
showError :: Text -> OutputMessage
showError issueName = SimpleMessage $ T.concat ["Error fetching item `", issueName, "` from Jira."]
showNotFound :: Text -> OutputMessage
showNotFound issueName = SimpleMessage $ T.concat ["Item `", issueName, "` not found in Jira."]
| wamaral/slaskellbot | src/Plugin/Jira.hs | bsd-3-clause | 6,218 | 0 | 18 | 1,663 | 1,772 | 914 | 858 | 127 | 6 |
module Data.Wright.RGB.Model.AdobeRGB1998 (adobeRGB1998) where
import Data.Wright.Types (Model(..), Primary(..), Gamma(..))
import Data.Wright.CIE.Illuminant.D65 (d65)
adobeRGB1998 :: Model
adobeRGB1998 = d65
{ gamma = Gamma 2.2
, red = Primary 0.6400 0.3300 0.297361
, green = Primary 0.2100 0.7100 0.627355
, blue = Primary 0.1500 0.0600 0.075285
} | fmap-archive/wright | src/Data/Wright/RGB/Model/AdobeRGB1998.hs | mit | 365 | 0 | 7 | 59 | 116 | 72 | 44 | 9 | 1 |
import Data.Array
import Data.Function (on)
import Data.List (intercalate, maximumBy)
import Data.Ratio
type Matrix a = Array (Int, Int) a
type Vector a = Array Int a
swapRows :: Int -> Int -> Matrix a -> Matrix a
swapRows r1 r2 m
| r1 == r2 = m
| otherwise =
m //
concat [[((r2, c), m ! (r1, c)), ((r1, c), m ! (r2, c))] | c <- [c1 .. cn]]
where
((_, c1), (_, cn)) = bounds m
subRows ::
Fractional a
=> (Int, Int) -- pivot location
-> (Int, Int) -- rows to cover
-> (Int, Int) -- columns to cover
-> Matrix a
-> Matrix a
subRows (r, c) (r1, rn) (c1, cn) m =
accum
(-)
m
[ ((i, j), m ! (i, c) * m ! (r, j) / m ! (r, c))
| i <- [r1 .. rn]
, j <- [c1 .. cn]
]
gaussianElimination :: (Fractional a, Ord a) => Matrix a -> Matrix a
gaussianElimination mat = go (r1, c1) mat
where
((r1, c1), (rn, cn)) = bounds mat
go (r, c) m
| c == cn = m
| pivot == 0 = go (r, c + 1) m
| otherwise = go (r + 1, c + 1) $ subRows (r, c) (r + 1, rn) (c, cn) m'
where
(target, pivot) =
maximumBy (compare `on` abs . snd) [(k, m ! (k, c)) | k <- [r .. rn]]
m' = swapRows r target m
gaussJordan :: (Fractional a, Eq a) => Matrix a -> Matrix a
gaussJordan mat = go (r1, c1) mat
where
((r1, c1), (rn, cn)) = bounds mat
go (r, c) m
| c == cn = m
| m ! (r, c) == 0 = go (r, c + 1) m
| otherwise = go (r + 1, c + 1) $ subRows (r, c) (r1, r - 1) (c, cn) m'
where
m' = accum (/) m [((r, j), m ! (r, c)) | j <- [c .. cn]]
backSubstitution :: (Fractional a) => Matrix a -> Vector a
backSubstitution m = sol
where
((r1, _), (rn, cn)) = bounds m
sol =
listArray (r1, rn) [(m ! (r, cn) - sum' r) / m ! (r, r) | r <- [r1 .. rn]]
sum' r = sum [m ! (r, k) * sol ! k | k <- [r + 1 .. rn]]
printM :: (Show a) => Matrix a -> String
printM m =
let ((r1, c1), (rn, cn)) = bounds m
in unlines
[ intercalate "\t" [show $ m ! (r, c) | c <- [c1 .. cn]]
| r <- [r1 .. rn]
]
printV :: (Show a) => Vector a -> String
printV = unlines . map show . elems
main :: IO ()
main = do
let mat = [2, 3, 4, 6, 1, 2, 3, 4, 3, -4, 0, 10] :: [Ratio Int]
m = listArray ((1, 1), (3, 4)) mat
putStrLn "Original Matrix:"
putStrLn $ printM m
putStrLn "Echelon form"
putStrLn $ printM $ gaussianElimination m
putStrLn "Reduced echelon form"
putStrLn $ printM $ gaussJordan $ gaussianElimination m
putStrLn "Solution from back substitution"
putStrLn $ printV $ backSubstitution $ gaussianElimination m
| Gathros/algorithm-archive | contents/gaussian_elimination/code/haskell/gaussianElimination.hs | mit | 2,573 | 10 | 12 | 781 | 1,495 | 798 | 697 | -1 | -1 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleContexts #-}
---------------------------------------------------------
--
-- | Serve static files from a Yesod app.
--
-- This is great for developing your application, but also for a
-- dead-simple deployment. Caching headers are automatically
-- taken care of.
--
-- If you are running a proxy server (like Apache or Nginx),
-- you may want to have that server do the static serving instead.
--
-- In fact, in an ideal setup you'll serve your static files from
-- a separate domain name to save time on transmitting
-- cookies. In that case, you may wish to use 'urlParamRenderOverride'
-- to redirect requests to this subsite to a separate domain
-- name.
--
-- Note that this module's static subsite ignores all files and
-- directories that are hidden by Unix conventions (i.e. start
-- with a dot, such as @\".ssh\"@) and the directory "tmp" on the
-- root of the directory with static files.
module Yesod.Static
( -- * Subsite
Static (..)
, Route (..)
, StaticRoute
-- * Smart constructor
, static
, staticDevel
-- * Combining CSS/JS
-- $combining
, combineStylesheets'
, combineScripts'
-- ** Settings
, CombineSettings
, csStaticDir
, csCssPostProcess
, csJsPostProcess
, csCssPreProcess
, csJsPreProcess
, csCombinedFolder
-- * Template Haskell helpers
, staticFiles
, staticFilesList
, staticFilesMap
, staticFilesMergeMap
, publicFiles
-- * Hashing
, base64md5
-- * Embed
, embed
#ifdef TEST_EXPORT
, getFileListPieces
#endif
) where
import System.Directory
import qualified System.FilePath as FP
import Control.Monad
import Data.FileEmbed (embedDir)
import Yesod.Core
import Yesod.Core.Types
import Data.List (intercalate, sort)
import Language.Haskell.TH
import Language.Haskell.TH.Syntax as TH
import Crypto.Hash.Conduit (hashFile, sinkHash)
import Crypto.Hash (MD5, Digest)
import Control.Monad.Trans.State
import qualified Data.ByteArray as ByteArray
import qualified Data.ByteString.Base64
import qualified Data.ByteString.Char8 as S8
import qualified Data.ByteString.Lazy as L
import Data.Text (Text, pack)
import qualified Data.Text as T
import qualified Data.Map as M
import Data.IORef (readIORef, newIORef, writeIORef)
import Data.Char (isLower, isDigit)
import Data.List (foldl')
import qualified Data.ByteString as S
import System.PosixCompat.Files (getFileStatus, modificationTime)
import System.Posix.Types (EpochTime)
import Conduit
import System.FilePath ((</>), (<.>), takeDirectory)
import qualified System.FilePath as F
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TLE
import Data.Default
--import Text.Lucius (luciusRTMinified)
import Network.Wai (pathInfo)
import Network.Wai.Application.Static
( StaticSettings (..)
, staticApp
, webAppSettingsWithLookup
, embeddedSettings
)
import WaiAppStatic.Storage.Filesystem (ETagLookup)
-- | Type used for the subsite with static contents.
newtype Static = Static StaticSettings
type StaticRoute = Route Static
-- | Produce a default value of 'Static' for a given file
-- folder.
--
-- Does not have index files or directory listings. The static
-- files' contents /must not/ change, however new files can be
-- added.
static :: FilePath -> IO Static
static dir = do
hashLookup <- cachedETagLookup dir
return $ Static $ webAppSettingsWithLookup dir hashLookup
-- | Same as 'static', but does not assumes that the files do not
-- change and checks their modification time whenever a request
-- is made.
staticDevel :: FilePath -> IO Static
staticDevel dir = do
hashLookup <- cachedETagLookupDevel dir
return $ Static $ webAppSettingsWithLookup dir hashLookup
-- | Produce a 'Static' based on embedding all of the static files' contents in the
-- executable at compile time.
--
-- You should use "Yesod.EmbeddedStatic" instead, it is much more powerful.
--
-- Nota Bene: if you replace the scaffolded 'static' call in Settings/StaticFiles.hs
-- you will need to change the scaffolded addStaticContent. Otherwise, some of your
-- assets will be 404'ed. This is because by default yesod will generate compile those
-- assets to @static/tmp@ which for 'static' is fine since they are served out of the
-- directory itself. With embedded static, that will not work.
-- You can easily change @addStaticContent@ to @\_ _ _ -> return Nothing@ as a workaround.
-- This will cause yesod to embed those assets into the generated HTML file itself.
embed :: FilePath -> Q Exp
embed fp = [|Static (embeddedSettings $(embedDir fp))|]
instance RenderRoute Static where
-- | A route on the static subsite (see also 'staticFiles').
--
-- You may use this constructor directly to manually link to a
-- static file. The first argument is the sub-path to the file
-- being served whereas the second argument is the key-value
-- pairs in the query string. For example,
--
-- > StaticRoute $ StaticR [\"thumb001.jpg\"] [(\"foo\", \"5\"), (\"bar\", \"choc\")]
--
-- would generate a url such as
-- @http://www.example.com/static/thumb001.jpg?foo=5&bar=choc@
-- The StaticRoute constructor can be used when the URL cannot be
-- statically generated at compile-time (e.g. when generating
-- image galleries).
data Route Static = StaticRoute [Text] [(Text, Text)]
deriving (Eq, Show, Read)
renderRoute (StaticRoute x y) = (x, y)
instance ParseRoute Static where
parseRoute (x, y) = Just $ StaticRoute x y
instance YesodSubDispatch Static master where
yesodSubDispatch YesodSubRunnerEnv {..} req =
ysreParentRunner handlert ysreParentEnv (fmap ysreToParentRoute route) req
where
route = Just $ StaticRoute (pathInfo req) []
Static set = ysreGetSub $ yreSite $ ysreParentEnv
handlert = sendWaiApplication $ staticApp set
notHidden :: FilePath -> Bool
notHidden "tmp" = False
notHidden s =
case s of
'.':_ -> False
_ -> True
getFileListPieces :: FilePath -> IO [[String]]
getFileListPieces = flip evalStateT M.empty . flip go id
where
go :: String
-> ([String] -> [String])
-> StateT (M.Map String String) IO [[String]]
go fp front = do
allContents <- liftIO $ (sort . filter notHidden) `fmap` getDirectoryContents fp
let fullPath :: String -> String
fullPath f = fp ++ '/' : f
files <- liftIO $ filterM (doesFileExist . fullPath) allContents
let files' = map (front . return) files
files'' <- mapM dedupe files'
dirs <- liftIO $ filterM (doesDirectoryExist . fullPath) allContents
dirs' <- mapM (\f -> go (fullPath f) (front . (:) f)) dirs
return $ concat $ files'' : dirs'
-- Reuse data buffers for identical strings
dedupe :: [String] -> StateT (M.Map String String) IO [String]
dedupe = mapM dedupe'
dedupe' :: String -> StateT (M.Map String String) IO String
dedupe' s = do
m <- get
case M.lookup s m of
Just s' -> return s'
Nothing -> do
put $ M.insert s s m
return s
-- | Template Haskell function that automatically creates routes
-- for all of your static files.
--
-- For example, if you used
--
-- > staticFiles "static/"
--
-- and you had files @\"static\/style.css\"@ and
-- @\"static\/js\/script.js\"@, then the following top-level
-- definitions would be created:
--
-- > style_css = StaticRoute ["style.css"] []
-- > js_script_js = StaticRoute ["js", "script.js"] []
--
-- Note that dots (@.@), dashes (@-@) and slashes (@\/@) are
-- replaced by underscores (@\_@) to create valid Haskell
-- identifiers.
staticFiles :: FilePath -> Q [Dec]
staticFiles dir = mkStaticFiles dir
-- | Same as 'staticFiles', but takes an explicit list of files
-- to create identifiers for. The files path given are relative
-- to the static folder. For example, to create routes for the
-- files @\"static\/js\/jquery.js\"@ and
-- @\"static\/css\/normalize.css\"@, you would use:
--
-- > staticFilesList "static" ["js/jquery.js", "css/normalize.css"]
--
-- This can be useful when you have a very large number of static
-- files, but only need to refer to a few of them from Haskell.
staticFilesList :: FilePath -> [FilePath] -> Q [Dec]
staticFilesList dir fs =
mkStaticFilesList dir (map split fs) True
where
split :: FilePath -> [String]
split [] = []
split x =
let (a, b) = break (== '/') x
in a : split (drop 1 b)
-- | Same as 'staticFiles', but doesn't append an ETag to the
-- query string.
--
-- Using 'publicFiles' will speed up the compilation, since there
-- won't be any need for hashing files during compile-time.
-- However, since the ETag ceases to be part of the URL, the
-- 'Static' subsite won't be able to set the expire date too far
-- on the future. Browsers still will be able to cache the
-- contents, however they'll need send a request to the server to
-- see if their copy is up-to-date.
publicFiles :: FilePath -> Q [Dec]
publicFiles dir = mkStaticFiles' dir False
-- | Similar to 'staticFilesList', but takes a mapping of
-- unmunged names to fingerprinted file names.
--
-- @since 1.5.3
staticFilesMap :: FilePath -> M.Map FilePath FilePath -> Q [Dec]
staticFilesMap fp m = mkStaticFilesList' fp (map splitBoth mapList) True
where
splitBoth (k, v) = (split k, split v)
mapList = M.toList m
split :: FilePath -> [String]
split [] = []
split x =
let (a, b) = break (== '/') x
in a : split (drop 1 b)
-- | Similar to 'staticFilesMergeMap', but also generates identifiers
-- for all files in the specified directory that don't have a
-- fingerprinted version.
--
-- @since 1.5.3
staticFilesMergeMap :: FilePath -> M.Map FilePath FilePath -> Q [Dec]
staticFilesMergeMap fp m = do
fs <- qRunIO $ getFileListPieces fp
let filesList = map FP.joinPath fs
mergedMapList = M.toList $ foldl' (checkedInsert invertedMap) m filesList
mkStaticFilesList' fp (map splitBoth mergedMapList) True
where
splitBoth (k, v) = (split k, split v)
swap (x, y) = (y, x)
mapList = M.toList m
invertedMap = M.fromList $ map swap mapList
split :: FilePath -> [String]
split [] = []
split x =
let (a, b) = break (== '/') x
in a : split (drop 1 b)
-- We want to keep mappings for all files that are pre-fingerprinted,
-- so this function checks against all of the existing fingerprinted files and
-- only inserts a new mapping if it's not a fingerprinted file.
checkedInsert
:: M.Map FilePath FilePath -- inverted dictionary
-> M.Map FilePath FilePath -- accumulating state
-> FilePath
-> M.Map FilePath FilePath
checkedInsert iDict st p = if M.member p iDict
then st
else M.insert p p st
mkHashMap :: FilePath -> IO (M.Map FilePath S8.ByteString)
mkHashMap dir = do
fs <- getFileListPieces dir
hashAlist fs >>= return . M.fromList
where
hashAlist :: [[String]] -> IO [(FilePath, S8.ByteString)]
hashAlist fs = mapM hashPair fs
where
hashPair :: [String] -> IO (FilePath, S8.ByteString)
hashPair pieces = do let file = pathFromRawPieces dir pieces
h <- base64md5File file
return (file, S8.pack h)
pathFromRawPieces :: FilePath -> [String] -> FilePath
pathFromRawPieces =
foldl' append
where
append a b = a ++ '/' : b
cachedETagLookupDevel :: FilePath -> IO ETagLookup
cachedETagLookupDevel dir = do
etags <- mkHashMap dir
mtimeVar <- newIORef (M.empty :: M.Map FilePath EpochTime)
return $ \f ->
case M.lookup f etags of
Nothing -> return Nothing
Just checksum -> do
fs <- getFileStatus f
let newt = modificationTime fs
mtimes <- readIORef mtimeVar
oldt <- case M.lookup f mtimes of
Nothing -> writeIORef mtimeVar (M.insert f newt mtimes) >> return newt
Just oldt -> return oldt
return $ if newt /= oldt then Nothing else Just checksum
cachedETagLookup :: FilePath -> IO ETagLookup
cachedETagLookup dir = do
etags <- mkHashMap dir
return $ (\f -> return $ M.lookup f etags)
mkStaticFiles :: FilePath -> Q [Dec]
mkStaticFiles fp = mkStaticFiles' fp True
mkStaticFiles' :: FilePath -- ^ static directory
-> Bool -- ^ append checksum query parameter
-> Q [Dec]
mkStaticFiles' fp makeHash = do
fs <- qRunIO $ getFileListPieces fp
mkStaticFilesList fp fs makeHash
mkStaticFilesList
:: FilePath -- ^ static directory
-> [[String]] -- ^ list of files to create identifiers for
-> Bool -- ^ append checksum query parameter
-> Q [Dec]
mkStaticFilesList fp fs makeHash = mkStaticFilesList' fp (zip fs fs) makeHash
mkStaticFilesList'
:: FilePath -- ^ static directory
-> [([String], [String])] -- ^ list of files to create identifiers for, where
-- the first argument of the tuple is the identifier
-- alias and the second is the actual file name
-> Bool -- ^ append checksum query parameter
-> Q [Dec]
mkStaticFilesList' fp fs makeHash = do
concat `fmap` mapM mkRoute fs
where
replace' c
| 'A' <= c && c <= 'Z' = c
| 'a' <= c && c <= 'z' = c
| '0' <= c && c <= '9' = c
| otherwise = '_'
mkRoute (alias, f) = do
let name' = intercalate "_" $ map (map replace') alias
routeName = mkName $
case () of
()
| null name' -> error "null-named file"
| isDigit (head name') -> '_' : name'
| isLower (head name') -> name'
| otherwise -> '_' : name'
f' <- [|map pack $(TH.lift f)|]
qs <- if makeHash
then do hash <- qRunIO $ base64md5File $ pathFromRawPieces fp f
[|[(pack "etag", pack $(TH.lift hash))]|]
else return $ ListE []
return
[ SigD routeName $ ConT ''StaticRoute
, FunD routeName
[ Clause [] (NormalB $ (ConE 'StaticRoute) `AppE` f' `AppE` qs) []
]
]
base64md5File :: FilePath -> IO String
base64md5File = fmap (base64 . encode) . hashFile
where encode d = ByteArray.convert (d :: Digest MD5)
base64md5 :: L.ByteString -> String
base64md5 lbs =
base64 $ encode
$ runConduitPure
$ Conduit.sourceLazy lbs .| sinkHash
where
encode d = ByteArray.convert (d :: Digest MD5)
base64 :: S.ByteString -> String
base64 = map tr
. take 8
. S8.unpack
. Data.ByteString.Base64.encode
where
tr '+' = '-'
tr '/' = '_'
tr c = c
-- $combining
--
-- A common scenario on a site is the desire to include many external CSS and
-- Javascript files on every page. Doing so via the Widget functionality in
-- Yesod will work, but would also mean that the same content will be
-- downloaded many times. A better approach would be to combine all of these
-- files together into a single static file and serve that as a static resource
-- for every page. That resource can be cached on the client, and bandwidth
-- usage reduced.
--
-- This could be done as a manual process, but that becomes tedious. Instead,
-- you can use some Template Haskell code which will combine these files into a
-- single static file at compile time.
data CombineType = JS | CSS
combineStatics' :: CombineType
-> CombineSettings
-> [Route Static] -- ^ files to combine
-> Q Exp
combineStatics' combineType CombineSettings {..} routes = do
texts <- qRunIO $ runConduitRes
$ yieldMany fps
.| awaitForever readUTFFile
.| sinkLazy
ltext <- qRunIO $ preProcess texts
bs <- qRunIO $ postProcess fps $ TLE.encodeUtf8 ltext
let hash' = base64md5 bs
suffix = csCombinedFolder </> hash' <.> extension
fp = csStaticDir </> suffix
qRunIO $ do
createDirectoryIfMissing True $ takeDirectory fp
L.writeFile fp bs
let pieces = map T.unpack $ T.splitOn "/" $ T.pack suffix
[|StaticRoute (map pack pieces) []|]
where
fps :: [FilePath]
fps = map toFP routes
toFP (StaticRoute pieces _) = csStaticDir </> F.joinPath (map T.unpack pieces)
readUTFFile fp = sourceFile fp .| decodeUtf8C
postProcess =
case combineType of
JS -> csJsPostProcess
CSS -> csCssPostProcess
preProcess =
case combineType of
JS -> csJsPreProcess
CSS -> csCssPreProcess
extension =
case combineType of
JS -> "js"
CSS -> "css"
-- | Data type for holding all settings for combining files.
--
-- This data type is a settings type. For more information, see:
--
-- <http://www.yesodweb.com/book/settings-types>
--
-- Since 1.2.0
data CombineSettings = CombineSettings
{ csStaticDir :: FilePath
-- ^ File path containing static files.
--
-- Default: static
--
-- Since 1.2.0
, csCssPostProcess :: [FilePath] -> L.ByteString -> IO L.ByteString
-- ^ Post processing to be performed on CSS files.
--
-- Default: Pass-through.
--
-- Since 1.2.0
, csJsPostProcess :: [FilePath] -> L.ByteString -> IO L.ByteString
-- ^ Post processing to be performed on Javascript files.
--
-- Default: Pass-through.
--
-- Since 1.2.0
, csCssPreProcess :: TL.Text -> IO TL.Text
-- ^ Pre-processing to be performed on CSS files.
--
-- Default: convert all occurences of /static/ to ../
--
-- Since 1.2.0
, csJsPreProcess :: TL.Text -> IO TL.Text
-- ^ Pre-processing to be performed on Javascript files.
--
-- Default: Pass-through.
--
-- Since 1.2.0
, csCombinedFolder :: FilePath
-- ^ Subfolder to put combined files into.
--
-- Default: combined
--
-- Since 1.2.0
}
instance Default CombineSettings where
def = CombineSettings
{ csStaticDir = "static"
{- Disabled due to: https://github.com/yesodweb/yesod/issues/623
, csCssPostProcess = \fps ->
either (error . (errorIntro fps)) (return . TLE.encodeUtf8)
. flip luciusRTMinified []
. TLE.decodeUtf8
-}
, csCssPostProcess = const return
, csJsPostProcess = const return
-- FIXME The following borders on a hack. With combining of files,
-- the final location of the CSS is no longer fixed, so relative
-- references will break. Instead, we switched to using /static/
-- absolute references. However, when served from a separate domain
-- name, this will break too. The solution is that, during
-- development, we keep /static/, and in the combining phase, we
-- replace /static with a relative reference to the parent folder.
, csCssPreProcess =
return
. TL.replace "'/static/" "'../"
. TL.replace "\"/static/" "\"../"
, csJsPreProcess = return
, csCombinedFolder = "combined"
}
liftRoutes :: [Route Static] -> Q Exp
liftRoutes =
fmap ListE . mapM go
where
go :: Route Static -> Q Exp
go (StaticRoute x y) = [|StaticRoute $(liftTexts x) $(liftPairs y)|]
liftTexts = fmap ListE . mapM liftT
liftT t = [|pack $(TH.lift $ T.unpack t)|]
liftPairs = fmap ListE . mapM liftPair
liftPair (x, y) = [|($(liftT x), $(liftT y))|]
-- | Combine multiple CSS files together. Common usage would be:
--
-- >>> combineStylesheets' development def 'StaticR [style1_css, style2_css]
--
-- Where @development@ is a variable in your site indicated whether you are in
-- development or production mode.
--
-- Since 1.2.0
combineStylesheets' :: Bool -- ^ development? if so, perform no combining
-> CombineSettings
-> Name -- ^ Static route constructor name, e.g. \'StaticR
-> [Route Static] -- ^ files to combine
-> Q Exp
combineStylesheets' development cs con routes
| development = [| mapM_ (addStylesheet . $(return $ ConE con)) $(liftRoutes routes) |]
| otherwise = [| addStylesheet $ $(return $ ConE con) $(combineStatics' CSS cs routes) |]
-- | Combine multiple JS files together. Common usage would be:
--
-- >>> combineScripts' development def 'StaticR [script1_js, script2_js]
--
-- Where @development@ is a variable in your site indicated whether you are in
-- development or production mode.
--
-- Since 1.2.0
combineScripts' :: Bool -- ^ development? if so, perform no combining
-> CombineSettings
-> Name -- ^ Static route constructor name, e.g. \'StaticR
-> [Route Static] -- ^ files to combine
-> Q Exp
combineScripts' development cs con routes
| development = [| mapM_ (addScript . $(return $ ConE con)) $(liftRoutes routes) |]
| otherwise = [| addScript $ $(return $ ConE con) $(combineStatics' JS cs routes) |]
| geraldus/yesod | yesod-static/Yesod/Static.hs | mit | 21,599 | 0 | 22 | 5,540 | 4,118 | 2,244 | 1,874 | 342 | 4 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.Lambda.DeleteEventSourceMapping
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Removes an event source mapping. This means AWS Lambda will no longer
-- invoke the function for events in the associated source.
--
-- This operation requires permission for the
-- 'lambda:DeleteEventSourceMapping' action.
--
-- /See:/ <http://docs.aws.amazon.com/lambda/latest/dg/API_DeleteEventSourceMapping.html AWS API Reference> for DeleteEventSourceMapping.
module Network.AWS.Lambda.DeleteEventSourceMapping
(
-- * Creating a Request
deleteEventSourceMapping
, DeleteEventSourceMapping
-- * Request Lenses
, desmUUId
-- * Destructuring the Response
, eventSourceMappingConfiguration
, EventSourceMappingConfiguration
-- * Response Lenses
, esmcEventSourceARN
, esmcState
, esmcFunctionARN
, esmcUUId
, esmcLastProcessingResult
, esmcBatchSize
, esmcStateTransitionReason
, esmcLastModified
) where
import Network.AWS.Lambda.Types
import Network.AWS.Lambda.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'deleteEventSourceMapping' smart constructor.
newtype DeleteEventSourceMapping = DeleteEventSourceMapping'
{ _desmUUId :: Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteEventSourceMapping' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'desmUUId'
deleteEventSourceMapping
:: Text -- ^ 'desmUUId'
-> DeleteEventSourceMapping
deleteEventSourceMapping pUUId_ =
DeleteEventSourceMapping'
{ _desmUUId = pUUId_
}
-- | The event source mapping ID.
desmUUId :: Lens' DeleteEventSourceMapping Text
desmUUId = lens _desmUUId (\ s a -> s{_desmUUId = a});
instance AWSRequest DeleteEventSourceMapping where
type Rs DeleteEventSourceMapping =
EventSourceMappingConfiguration
request = delete lambda
response = receiveJSON (\ s h x -> eitherParseJSON x)
instance ToHeaders DeleteEventSourceMapping where
toHeaders = const mempty
instance ToPath DeleteEventSourceMapping where
toPath DeleteEventSourceMapping'{..}
= mconcat
["/2015-03-31/event-source-mappings/",
toBS _desmUUId]
instance ToQuery DeleteEventSourceMapping where
toQuery = const mempty
| fmapfmapfmap/amazonka | amazonka-lambda/gen/Network/AWS/Lambda/DeleteEventSourceMapping.hs | mpl-2.0 | 3,080 | 0 | 9 | 621 | 336 | 209 | 127 | 53 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
module BitcoinCore.Transaction.Transactions where
import General.Util
import BitcoinCore.Transaction.Script
import BitcoinCore.Keys (PublicKeyRep(..), PubKeyFormat(..))
import General.Hash (Hash(..), hashObject, doubleSHA)
import Prelude hiding (concat, reverse, sequence)
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BL
import Crypto.PubKey.ECC.ECDSA
( signWith
, Signature(..)
, PrivateKey(..)
, PublicKey(..)
)
import Crypto.Hash.Algorithms (SHA256(..))
import Control.Lens (makeLenses, (^.), mapped, set)
import qualified Data.Binary as BIN
import qualified Data.Binary.Put as Put
import Data.Binary.Put (Put)
import qualified Data.Binary.Get as Get
import Data.Binary.Get (Get)
import Data.Binary (Binary(..), Word32)
import Control.Monad (replicateM)
import Data.Bits ((.&.))
import Crypto.Hash (hashWith)
import Data.ByteArray (convert)
import Data.Maybe (fromMaybe)
import Test.QuickCheck.Arbitrary (Arbitrary(..))
import Test.QuickCheck.Gen (choose, suchThat)
data Transaction = Transaction
{ _inputs :: [TxInput]
, _outputs :: [TxOutput]
, _txVersion :: TxVersion
, _locktime :: LockTime
} deriving (Eq, Show)
data TxInput = TxInput
{ _utxo :: UTXO
, _signatureScript :: Script
, _sequence :: Sequence
} deriving (Eq, Show)
data TxOutput = TxOutput
{ _value :: Value
, _outputScript :: Script
} deriving (Eq, Show)
data UTXO = UTXO
{ _outTxHash :: TxHash
, _outIndex :: TxIndex
} deriving (Eq, Show)
newtype Value = Satoshis Int
deriving (Eq, Show)
newtype TxVersion = TxVersion Int
deriving (Eq, Show)
type TxHash = Hash Transaction
newtype TxIndex = TxIndex Int
deriving (Eq, Show)
newtype Sequence = Sequence Word32
deriving (Eq, Show)
newtype LockTime = LockTime Word32
deriving (Eq, Show)
makeLenses ''Transaction
makeLenses ''TxInput
makeLenses ''TxOutput
makeLenses ''UTXO
hashTransaction :: Transaction -> Hash Transaction
hashTransaction = hashObject doubleSHA
outputScripts :: Transaction -> [Script]
outputScripts transaction = map (^.outputScript) (transaction^.outputs)
-------------------- Transaction signing
signedTransaction :: UTXO -> Script -> (PublicKey, PrivateKey) -> [TxOutput] -> Transaction
signedTransaction utxo' oldInputScript keys outputs' =
set (inputs.mapped.signatureScript) newInputScript transaction
where
newInputScript = scriptSig (signedHash oldInputScript (snd keys) transaction) (fst keys)
transaction = Transaction
{_inputs = [TxInput { _utxo = utxo'
, _sequence = defaultSequence}]
, _outputs = outputs'
, _txVersion = TxVersion 1
, _locktime = defaultLockTime}
-- We need to sign the double SHA the intermediateTransaction.
-- Since `signWith` always performs a SHA, we achieve correct
-- behaviour by performing one SHA in `intermediateHash`
-- and allowing `signWith` to perform the second hash
signedHash :: Script -> PrivateKey -> Transaction -> Signature
signedHash oldInputScript privateKey intermediateTransaction = fromMaybe
(error "Unable to sign hash")
(signWith 100 privateKey SHA256 intermediateHash')
where intermediateHash' = intermediateHash intermediateTransaction oldInputScript
intermediateHash :: Transaction -> Script -> ByteString
intermediateHash intermediateTransaction oldInputScript =
convert . hashWith SHA256 $ bs
where bs = BL.toStrict . Put.runPut $ do
put (set (inputs.mapped.signatureScript) oldInputScript intermediateTransaction)
Put.putWord32le sighashAll
sighashAll :: Word32
sighashAll = 0x00000001
instance Binary Transaction where
put = putTransaction
get = getTransaction
putTransaction :: Transaction -> Put
putTransaction tx = do
put (tx^.txVersion)
put . VarInt . fromIntegral . length $ (tx^.inputs)
mapM_ put (tx^.inputs)
put . VarInt . fromIntegral . length $ (tx^.outputs)
mapM_ put (tx^.outputs)
put $ tx^.locktime
getTransaction :: Get Transaction
getTransaction = do
v <- get
VarInt inputCount <- get
inputArray <- replicateM (fromIntegral inputCount) get
VarInt outputCount <- get
outputArray <- replicateM (fromIntegral outputCount) get
locktime' <- get
return Transaction
{ _inputs = inputArray
, _outputs = outputArray
, _txVersion = v
, _locktime = locktime'}
instance Binary TxInput where
put = putInput
get = getInput
putInput :: TxInput -> Put
putInput txInput = do
put (txInput^.utxo)
putWithLength
(putScript (txInput^.signatureScript))
put $ txInput^.sequence
getInput :: Get TxInput
getInput = do
outPoint <- get
VarInt scriptLength <- get
script <- getScript (fromIntegral scriptLength)
sequence' <- get
return TxInput
{ _utxo = outPoint
, _signatureScript = script
, _sequence = sequence' }
instance Binary TxOutput where
put = putOutput
get = getOutput
putOutput :: TxOutput -> Put
putOutput txOutput = do
put (txOutput^.value)
putWithLength
(putScript (txOutput^.outputScript))
getOutput :: Get TxOutput
getOutput = do
val <- get
VarInt scriptLength <- get
script <- getScript (fromIntegral scriptLength)
return TxOutput
{ _value = val
, _outputScript = script }
instance Binary UTXO where
put = putOutPoint
get = getOutPoint
putOutPoint :: UTXO -> Put
putOutPoint utxo' = do
put (utxo'^.outTxHash)
let TxIndex i = utxo'^.outIndex
Put.putWord32le . fromIntegral $ i
getOutPoint :: Get UTXO
getOutPoint = UTXO
<$> get
<*> (TxIndex . fromIntegral <$> Get.getWord32le)
instance Binary Value where
put = putTxValue
get = getTxValue
putTxValue :: Value -> Put
putTxValue (Satoshis i) =
Put.putWord64le . fromIntegral $ i
getTxValue :: Get Value
getTxValue =
Satoshis . fromIntegral <$> Get.getWord64le
scriptSig :: Signature -> PublicKey -> Script
scriptSig signature pubKey = Script [Txt der, Txt compressedPubkey]
where der = BL.toStrict . Put.runPut $ putDerSignature signature
compressedPubkey = BL.toStrict . BIN.encode
$ PublicKeyRep Compressed pubKey
-- TODO: This scriptSig will only be valid for
-- pay to pub key hash scripts where the compressed pub key is hashed
-- make sure that I'm making addresses using compressed pubkeys also
-- See https://github.com/bitcoin/bips/blob/master/bip-0066.mediawiki
-- for a description of requiered der format
putDerSignature :: Signature -> Put
putDerSignature signature = do
Put.putWord8 0x30
putWithLength (putDERContents signature)
Put.putWord8 0x01 -- one byte hashcode type
putDERInt :: Integer -> Put
putDERInt int = do
let intBS = unroll BE int
headByte = BS.head intBS
if headByte .&. 0x80 == 0x80
then Put.putByteString $ 0x00 `BS.cons` intBS
else Put.putByteString intBS
putDERContents signature = do
Put.putWord8 0x02
putWithLength
(putDERInt . sign_r $ signature)
Put.putWord8 0x02
putWithLength
(putDERInt . getLowS . sign_s $ signature)
-- Multiple s values can yield the same signature
-- to prevent transaction malleability, s values are required to use the "low s" value
-- See: https://github.com/bitcoin/bips/blob/master/bip-0062.mediawiki#low-s-values-in-signatures
getLowS :: Integer -> Integer
getLowS s = if s <= maxS
then s
else constant - s
where maxS = 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF5D576E7357A4501DDFE92F46681B20A0
constant = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141
getDerSignature :: Get Signature
getDerSignature = do
sequenceCode <- Get.getWord8
derLength <- fromIntegral <$> Get.getWord8
Get.getWord8
xLength <- fromIntegral <$> Get.getWord8
x <- roll BE <$> Get.getByteString xLength
Get.getWord8
yLength <- fromIntegral <$> Get.getWord8
y <- roll BE <$> Get.getByteString yLength
return $
Signature x y
instance Binary TxVersion where
put = putTxVersion
get = getTxVersion
putTxVersion :: TxVersion -> Put
putTxVersion (TxVersion v) =
Put.putWord32le . fromIntegral $ v
getTxVersion :: Get TxVersion
getTxVersion =
TxVersion . fromIntegral <$> Get.getWord32le
defaultVersion :: TxVersion
defaultVersion = TxVersion 1
defaultSequence :: Sequence
defaultSequence = Sequence 0xffffffff
instance Binary Sequence where
put = putSequence
get = getSequence
putSequence :: Sequence -> Put
putSequence (Sequence sequence') =
Put.putWord32le sequence'
getSequence :: Get Sequence
getSequence = Sequence <$> Get.getWord32le
defaultLockTime :: LockTime
defaultLockTime = LockTime 0x00000000
instance Binary LockTime where
put = putBlockLockTime
get = getBlockLockTime
putBlockLockTime :: LockTime -> Put
putBlockLockTime (LockTime locktime') =
Put.putWord32le locktime'
getBlockLockTime :: Get LockTime
getBlockLockTime = LockTime <$> Get.getWord32le
putSighashAll :: Put
putSighashAll =
Put.putWord8 1
instance Arbitrary Transaction where
arbitrary = do
inputs' <- arbitrary
outputs' <- arbitrary
txVersion' <- arbitrary
locktime' <- LockTime <$> arbitrary
return Transaction
{ _inputs = inputs'
, _outputs = outputs'
, _txVersion = txVersion'
, _locktime = locktime'}
instance Arbitrary TxOutput where
arbitrary = do
value <- arbitrary
script <- arbitrary
return TxOutput
{ _value = value
, _outputScript = script }
instance Arbitrary TxInput where
arbitrary = do
utxo' <- arbitrary
script <- arbitrary
sequence' <- Sequence <$> arbitrary
return TxInput
{ _utxo = utxo'
, _signatureScript = script
, _sequence = sequence'}
instance Arbitrary TxVersion where
arbitrary = TxVersion <$> choose (0, 0xffffffff)
instance Arbitrary UTXO where
arbitrary = do
hash <- arbitrary
index <- TxIndex <$> choose (0, 0xffffffff)
return UTXO
{ _outTxHash = hash
, _outIndex = index }
instance Arbitrary Value where
arbitrary = Satoshis <$> arbitrary `suchThat` (> 0)
| clample/lamdabtc | backend/src/BitcoinCore/Transaction/Transactions.hs | bsd-3-clause | 10,065 | 0 | 15 | 1,909 | 2,702 | 1,447 | 1,255 | 286 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
-- | Build the project.
module Stack.Build
(build
,withLoadPackage
,mkBaseConfigOpts
,queryBuildInfo
,splitObjsWarning
,CabalVersionException(..))
where
import Control.Exception (Exception)
import Control.Monad
import Control.Monad.Catch (MonadMask, MonadMask)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Resource
import Control.Monad.Trans.Unlift (MonadBaseUnlift)
import Data.Aeson (Value (Object, Array), (.=), object)
import Data.Function
import qualified Data.HashMap.Strict as HM
import Data.List ((\\))
import Data.List.Extra (groupSort)
import Data.List.NonEmpty (NonEmpty(..))
import qualified Data.List.NonEmpty as NE
import qualified Data.Map as Map
import Data.Map.Strict (Map)
import Data.Monoid
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8)
import qualified Data.Text.IO as TIO
import Data.Text.Read (decimal)
import Data.Typeable (Typeable)
import qualified Data.Vector as V
import qualified Data.Yaml as Yaml
import Network.HTTP.Client.Conduit (HasHttpManager)
import Path
import Prelude hiding (FilePath, writeFile)
import Stack.Build.ConstructPlan
import Stack.Build.Execute
import Stack.Build.Haddock
import Stack.Build.Installed
import Stack.Build.Source
import Stack.Build.Target
import Stack.Fetch as Fetch
import Stack.GhcPkg
import Stack.Package
import Stack.Types.FlagName
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
import Stack.Types.Version
import Stack.Types.Config
import Stack.Types.Build
import Stack.Types.Package
import Stack.Types.Internal
#ifdef WINDOWS
import Stack.Types.Compiler
#endif
import System.FileLock (FileLock, unlockFile)
#ifdef WINDOWS
import System.Win32.Console (setConsoleCP, setConsoleOutputCP, getConsoleCP, getConsoleOutputCP)
import qualified Control.Monad.Catch as Catch
#endif
type M env m = (MonadIO m,MonadReader env m,HasHttpManager env,HasBuildConfig env,MonadLoggerIO m,MonadBaseUnlift IO m,MonadMask m,HasLogLevel env,HasEnvConfig env,HasTerminal env)
-- | Build.
--
-- If a buildLock is passed there is an important contract here. That lock must
-- protect the snapshot, and it must be safe to unlock it if there are no further
-- modifications to the snapshot to be performed by this build.
build :: M env m
=> (Set (Path Abs File) -> IO ()) -- ^ callback after discovering all local files
-> Maybe FileLock
-> BuildOptsCLI
-> m ()
build setLocalFiles mbuildLk boptsCli = fixCodePage $ do
bopts <- asks (configBuild . getConfig)
let profiling = boptsLibProfile bopts || boptsExeProfile bopts
menv <- getMinimalEnvOverride
(targets, mbp, locals, extraToBuild, sourceMap) <- loadSourceMap NeedTargets boptsCli
-- Set local files, necessary for file watching
stackYaml <- asks $ bcStackYaml . getBuildConfig
liftIO $ setLocalFiles
$ Set.insert stackYaml
$ Set.unions
$ map lpFiles locals
(installedMap, globalDumpPkgs, snapshotDumpPkgs, localDumpPkgs) <-
getInstalled menv
GetInstalledOpts
{ getInstalledProfiling = profiling
, getInstalledHaddock = shouldHaddockDeps bopts }
sourceMap
baseConfigOpts <- mkBaseConfigOpts boptsCli
plan <- withLoadPackage menv $ \loadPackage ->
constructPlan mbp baseConfigOpts locals extraToBuild localDumpPkgs loadPackage sourceMap installedMap
-- If our work to do is all local, let someone else have a turn with the snapshot.
-- They won't damage what's already in there.
case (mbuildLk, allLocal plan) of
-- NOTE: This policy is too conservative. In the future we should be able to
-- schedule unlocking as an Action that happens after all non-local actions are
-- complete.
(Just lk,True) -> do $logDebug "All installs are local; releasing snapshot lock early."
liftIO $ unlockFile lk
_ -> return ()
checkCabalVersion
warnAboutSplitObjs bopts
warnIfExecutablesWithSameNameCouldBeOverwritten locals plan
when (boptsPreFetch bopts) $
preFetch plan
if boptsCLIDryrun boptsCli
then printPlan plan
else executePlan menv boptsCli baseConfigOpts locals
globalDumpPkgs
snapshotDumpPkgs
localDumpPkgs
installedMap
targets
plan
-- | If all the tasks are local, they don't mutate anything outside of our local directory.
allLocal :: Plan -> Bool
allLocal =
all (== Local) .
map taskLocation .
Map.elems .
planTasks
checkCabalVersion :: M env m => m ()
checkCabalVersion = do
allowNewer <- asks (configAllowNewer . getConfig)
cabalVer <- asks (envConfigCabalVersion . getEnvConfig)
-- https://github.com/haskell/cabal/issues/2023
when (allowNewer && cabalVer < $(mkVersion "1.22")) $ throwM $
CabalVersionException $
"Error: --allow-newer requires at least Cabal version 1.22, but version " ++
versionString cabalVer ++
" was found."
data CabalVersionException = CabalVersionException { unCabalVersionException :: String }
deriving (Typeable)
instance Show CabalVersionException where show = unCabalVersionException
instance Exception CabalVersionException
-- | See https://github.com/commercialhaskell/stack/issues/1198.
warnIfExecutablesWithSameNameCouldBeOverwritten
:: MonadLogger m => [LocalPackage] -> Plan -> m ()
warnIfExecutablesWithSameNameCouldBeOverwritten locals plan = do
$logDebug "Checking if we are going to build multiple executables with the same name"
forM_ (Map.toList warnings) $ \(exe,(toBuild,otherLocals)) -> do
let exe_s
| length toBuild > 1 = "several executables with the same name:"
| otherwise = "executable"
exesText pkgs =
T.intercalate
", "
["'" <> packageNameText p <> ":" <> exe <> "'" | p <- pkgs]
($logWarn . T.unlines . concat)
[ [ "Building " <> exe_s <> " " <> exesText toBuild <> "." ]
, [ "Only one of them will be available via 'stack exec' or locally installed."
| length toBuild > 1
]
, [ "Other executables with the same name might be overwritten: " <>
exesText otherLocals <> "."
| not (null otherLocals)
]
]
where
-- Cases of several local packages having executables with the same name.
-- The Map entries have the following form:
--
-- executable name: ( package names for executables that are being built
-- , package names for other local packages that have an
-- executable with the same name
-- )
warnings :: Map Text ([PackageName],[PackageName])
warnings =
Map.mapMaybe
(\(pkgsToBuild,localPkgs) ->
case (pkgsToBuild,NE.toList localPkgs \\ NE.toList pkgsToBuild) of
(_ :| [],[]) ->
-- We want to build the executable of single local package
-- and there are no other local packages with an executable of
-- the same name. Nothing to warn about, ignore.
Nothing
(_,otherLocals) ->
-- We could be here for two reasons (or their combination):
-- 1) We are building two or more executables with the same
-- name that will end up overwriting each other.
-- 2) In addition to the executable(s) that we want to build
-- there are other local packages with an executable of the
-- same name that might get overwritten.
-- Both cases warrant a warning.
Just (NE.toList pkgsToBuild,otherLocals))
(Map.intersectionWith (,) exesToBuild localExes)
exesToBuild :: Map Text (NonEmpty PackageName)
exesToBuild =
collect
[ (exe,pkgName)
| (pkgName,task) <- Map.toList (planTasks plan)
, isLocal task
, exe <- (Set.toList . exeComponents . lpComponents . taskLP) task
]
where
isLocal Task{taskType = (TTLocal _)} = True
isLocal _ = False
taskLP Task{taskType = (TTLocal lp)} = lp
taskLP _ = error "warnIfExecutablesWithSameNameCouldBeOverwritten/taskLP: task isn't local"
localExes :: Map Text (NonEmpty PackageName)
localExes =
collect
[ (exe,packageName pkg)
| pkg <- map lpPackage locals
, exe <- Set.toList (packageExes pkg)
]
collect :: Ord k => [(k,v)] -> Map k (NonEmpty v)
collect = Map.map NE.fromList . Map.fromDistinctAscList . groupSort
warnAboutSplitObjs :: MonadLogger m => BuildOpts -> m ()
warnAboutSplitObjs bopts | boptsSplitObjs bopts = do
$logWarn $ "Building with --split-objs is enabled. " <> T.pack splitObjsWarning
warnAboutSplitObjs _ = return ()
splitObjsWarning :: String
splitObjsWarning = unwords
[ "Note that this feature is EXPERIMENTAL, and its behavior may be changed and improved."
, "You will need to clean your workdirs before use. If you want to compile all dependencies"
, "with split-objs, you will need to delete the snapshot (and all snapshots that could"
, "reference that snapshot)."
]
-- | Get the @BaseConfigOpts@ necessary for constructing configure options
mkBaseConfigOpts :: (MonadIO m, MonadReader env m, HasEnvConfig env, MonadThrow m)
=> BuildOptsCLI -> m BaseConfigOpts
mkBaseConfigOpts boptsCli = do
bopts <- asks (configBuild . getConfig)
snapDBPath <- packageDatabaseDeps
localDBPath <- packageDatabaseLocal
snapInstallRoot <- installationRootDeps
localInstallRoot <- installationRootLocal
packageExtraDBs <- packageDatabaseExtra
return BaseConfigOpts
{ bcoSnapDB = snapDBPath
, bcoLocalDB = localDBPath
, bcoSnapInstallRoot = snapInstallRoot
, bcoLocalInstallRoot = localInstallRoot
, bcoBuildOpts = bopts
, bcoBuildOptsCLI = boptsCli
, bcoExtraDBs = packageExtraDBs
}
-- | Provide a function for loading package information from the package index
withLoadPackage :: ( MonadIO m
, HasHttpManager env
, MonadReader env m
, MonadBaseUnlift IO m
, MonadMask m
, MonadLogger m
, HasEnvConfig env)
=> EnvOverride
-> ((PackageName -> Version -> Map FlagName Bool -> [Text] -> IO Package) -> m a)
-> m a
withLoadPackage menv inner = do
econfig <- asks getEnvConfig
withCabalLoader menv $ \cabalLoader ->
inner $ \name version flags ghcOptions -> do
bs <- cabalLoader $ PackageIdentifier name version
-- Intentionally ignore warnings, as it's not really
-- appropriate to print a bunch of warnings out while
-- resolving the package index.
(_warnings,pkg) <- readPackageBS (depPackageConfig econfig flags ghcOptions) bs
return pkg
where
-- | Package config to be used for dependencies
depPackageConfig :: EnvConfig -> Map FlagName Bool -> [Text] -> PackageConfig
depPackageConfig econfig flags ghcOptions = PackageConfig
{ packageConfigEnableTests = False
, packageConfigEnableBenchmarks = False
, packageConfigFlags = flags
, packageConfigGhcOptions = ghcOptions
, packageConfigCompilerVersion = envConfigCompilerVersion econfig
, packageConfigPlatform = configPlatform (getConfig econfig)
}
-- | Set the code page for this process as necessary. Only applies to Windows.
-- See: https://github.com/commercialhaskell/stack/issues/738
#ifdef WINDOWS
fixCodePage :: M env m => m a -> m a
fixCodePage inner = do
mcp <- asks $ configModifyCodePage . getConfig
ec <- asks getEnvConfig
if mcp && getGhcVersion (envConfigCompilerVersion ec) < $(mkVersion "7.10.3")
then fixCodePage'
-- GHC >=7.10.3 doesn't need this code page hack.
else inner
where
fixCodePage' = do
origCPI <- liftIO getConsoleCP
origCPO <- liftIO getConsoleOutputCP
let setInput = origCPI /= expected
setOutput = origCPO /= expected
fixInput
| setInput = Catch.bracket_
(liftIO $ do
setConsoleCP expected)
(liftIO $ setConsoleCP origCPI)
| otherwise = id
fixOutput
| setInput = Catch.bracket_
(liftIO $ do
setConsoleOutputCP expected)
(liftIO $ setConsoleOutputCP origCPO)
| otherwise = id
case (setInput, setOutput) of
(False, False) -> return ()
(True, True) -> warn ""
(True, False) -> warn " input"
(False, True) -> warn " output"
fixInput $ fixOutput inner
expected = 65001 -- UTF-8
warn typ = $logInfo $ T.concat
[ "Setting"
, typ
, " codepage to UTF-8 (65001) to ensure correct output from GHC"
]
#else
fixCodePage :: a -> a
fixCodePage = id
#endif
-- | Query information about the build and print the result to stdout in YAML format.
queryBuildInfo :: M env m
=> [Text] -- ^ selectors
-> m ()
queryBuildInfo selectors0 =
rawBuildInfo
>>= select id selectors0
>>= liftIO . TIO.putStrLn . decodeUtf8 . Yaml.encode
where
select _ [] value = return value
select front (sel:sels) value =
case value of
Object o ->
case HM.lookup sel o of
Nothing -> err "Selector not found"
Just value' -> cont value'
Array v ->
case decimal sel of
Right (i, "")
| i >= 0 && i < V.length v -> cont $ v V.! i
| otherwise -> err "Index out of range"
_ -> err "Encountered array and needed numeric selector"
_ -> err $ "Cannot apply selector to " ++ show value
where
cont = select (front . (sel:)) sels
err msg = error $ msg ++ ": " ++ show (front [sel])
-- | Get the raw build information object
rawBuildInfo :: M env m => m Value
rawBuildInfo = do
(_, _mbp, locals, _extraToBuild, _sourceMap) <- loadSourceMap NeedTargets defaultBuildOptsCLI
return $ object
[ "locals" .= Object (HM.fromList $ map localToPair locals)
]
where
localToPair lp =
(T.pack $ packageNameString $ packageName p, value)
where
p = lpPackage lp
value = object
[ "version" .= packageVersion p
, "path" .= toFilePath (lpDir lp)
]
| AndrewRademacher/stack | src/Stack/Build.hs | bsd-3-clause | 16,147 | 0 | 20 | 5,022 | 3,238 | 1,725 | 1,513 | 264 | 6 |
module BrownPLT.JavaScript.Contracts.Interface
( module BrownPLT.JavaScript.Contracts.Types
) where
import BrownPLT.JavaScript.Contracts.Types
| brownplt/javascript-contracts | src/BrownPLT/JavaScript/Contracts/Interface.hs | bsd-3-clause | 148 | 0 | 5 | 14 | 25 | 18 | 7 | 3 | 0 |
-- Module : Network.AWS.StorageGateway
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | AWS Storage Gateway is a service that connects an on-premises software
-- appliance with cloud-based storage to provide seamless and secure integration
-- between your on-premises IT environment and AWS's storage infrastructure.
module Network.AWS.StorageGateway
( module Network.AWS.StorageGateway.ActivateGateway
, module Network.AWS.StorageGateway.AddCache
, module Network.AWS.StorageGateway.AddUploadBuffer
, module Network.AWS.StorageGateway.AddWorkingStorage
, module Network.AWS.StorageGateway.CancelArchival
, module Network.AWS.StorageGateway.CancelRetrieval
, module Network.AWS.StorageGateway.CreateCachediSCSIVolume
, module Network.AWS.StorageGateway.CreateSnapshot
, module Network.AWS.StorageGateway.CreateSnapshotFromVolumeRecoveryPoint
, module Network.AWS.StorageGateway.CreateStorediSCSIVolume
, module Network.AWS.StorageGateway.CreateTapes
, module Network.AWS.StorageGateway.DeleteBandwidthRateLimit
, module Network.AWS.StorageGateway.DeleteChapCredentials
, module Network.AWS.StorageGateway.DeleteGateway
, module Network.AWS.StorageGateway.DeleteSnapshotSchedule
, module Network.AWS.StorageGateway.DeleteTape
, module Network.AWS.StorageGateway.DeleteTapeArchive
, module Network.AWS.StorageGateway.DeleteVolume
, module Network.AWS.StorageGateway.DescribeBandwidthRateLimit
, module Network.AWS.StorageGateway.DescribeCache
, module Network.AWS.StorageGateway.DescribeCachediSCSIVolumes
, module Network.AWS.StorageGateway.DescribeChapCredentials
, module Network.AWS.StorageGateway.DescribeGatewayInformation
, module Network.AWS.StorageGateway.DescribeMaintenanceStartTime
, module Network.AWS.StorageGateway.DescribeSnapshotSchedule
, module Network.AWS.StorageGateway.DescribeStorediSCSIVolumes
, module Network.AWS.StorageGateway.DescribeTapeArchives
, module Network.AWS.StorageGateway.DescribeTapeRecoveryPoints
, module Network.AWS.StorageGateway.DescribeTapes
, module Network.AWS.StorageGateway.DescribeUploadBuffer
, module Network.AWS.StorageGateway.DescribeVTLDevices
, module Network.AWS.StorageGateway.DescribeWorkingStorage
, module Network.AWS.StorageGateway.DisableGateway
, module Network.AWS.StorageGateway.ListGateways
, module Network.AWS.StorageGateway.ListLocalDisks
, module Network.AWS.StorageGateway.ListVolumeRecoveryPoints
, module Network.AWS.StorageGateway.ListVolumes
, module Network.AWS.StorageGateway.ResetCache
, module Network.AWS.StorageGateway.RetrieveTapeArchive
, module Network.AWS.StorageGateway.RetrieveTapeRecoveryPoint
, module Network.AWS.StorageGateway.ShutdownGateway
, module Network.AWS.StorageGateway.StartGateway
, module Network.AWS.StorageGateway.Types
, module Network.AWS.StorageGateway.UpdateBandwidthRateLimit
, module Network.AWS.StorageGateway.UpdateChapCredentials
, module Network.AWS.StorageGateway.UpdateGatewayInformation
, module Network.AWS.StorageGateway.UpdateGatewaySoftwareNow
, module Network.AWS.StorageGateway.UpdateMaintenanceStartTime
, module Network.AWS.StorageGateway.UpdateSnapshotSchedule
, module Network.AWS.StorageGateway.UpdateVTLDeviceType
) where
import Network.AWS.StorageGateway.ActivateGateway
import Network.AWS.StorageGateway.AddCache
import Network.AWS.StorageGateway.AddUploadBuffer
import Network.AWS.StorageGateway.AddWorkingStorage
import Network.AWS.StorageGateway.CancelArchival
import Network.AWS.StorageGateway.CancelRetrieval
import Network.AWS.StorageGateway.CreateCachediSCSIVolume
import Network.AWS.StorageGateway.CreateSnapshot
import Network.AWS.StorageGateway.CreateSnapshotFromVolumeRecoveryPoint
import Network.AWS.StorageGateway.CreateStorediSCSIVolume
import Network.AWS.StorageGateway.CreateTapes
import Network.AWS.StorageGateway.DeleteBandwidthRateLimit
import Network.AWS.StorageGateway.DeleteChapCredentials
import Network.AWS.StorageGateway.DeleteGateway
import Network.AWS.StorageGateway.DeleteSnapshotSchedule
import Network.AWS.StorageGateway.DeleteTape
import Network.AWS.StorageGateway.DeleteTapeArchive
import Network.AWS.StorageGateway.DeleteVolume
import Network.AWS.StorageGateway.DescribeBandwidthRateLimit
import Network.AWS.StorageGateway.DescribeCache
import Network.AWS.StorageGateway.DescribeCachediSCSIVolumes
import Network.AWS.StorageGateway.DescribeChapCredentials
import Network.AWS.StorageGateway.DescribeGatewayInformation
import Network.AWS.StorageGateway.DescribeMaintenanceStartTime
import Network.AWS.StorageGateway.DescribeSnapshotSchedule
import Network.AWS.StorageGateway.DescribeStorediSCSIVolumes
import Network.AWS.StorageGateway.DescribeTapeArchives
import Network.AWS.StorageGateway.DescribeTapeRecoveryPoints
import Network.AWS.StorageGateway.DescribeTapes
import Network.AWS.StorageGateway.DescribeUploadBuffer
import Network.AWS.StorageGateway.DescribeVTLDevices
import Network.AWS.StorageGateway.DescribeWorkingStorage
import Network.AWS.StorageGateway.DisableGateway
import Network.AWS.StorageGateway.ListGateways
import Network.AWS.StorageGateway.ListLocalDisks
import Network.AWS.StorageGateway.ListVolumeRecoveryPoints
import Network.AWS.StorageGateway.ListVolumes
import Network.AWS.StorageGateway.ResetCache
import Network.AWS.StorageGateway.RetrieveTapeArchive
import Network.AWS.StorageGateway.RetrieveTapeRecoveryPoint
import Network.AWS.StorageGateway.ShutdownGateway
import Network.AWS.StorageGateway.StartGateway
import Network.AWS.StorageGateway.Types
import Network.AWS.StorageGateway.UpdateBandwidthRateLimit
import Network.AWS.StorageGateway.UpdateChapCredentials
import Network.AWS.StorageGateway.UpdateGatewayInformation
import Network.AWS.StorageGateway.UpdateGatewaySoftwareNow
import Network.AWS.StorageGateway.UpdateMaintenanceStartTime
import Network.AWS.StorageGateway.UpdateSnapshotSchedule
import Network.AWS.StorageGateway.UpdateVTLDeviceType
| romanb/amazonka | amazonka-storagegateway/gen/Network/AWS/StorageGateway.hs | mpl-2.0 | 6,542 | 0 | 5 | 630 | 773 | 570 | 203 | 101 | 0 |
{-# LANGUAGE TemplateHaskell #-}
{-| Lenses for Ganeti config objects
-}
{-
Copyright (C) 2014 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Objects.Lens where
import qualified Data.Set as Set
import System.Time (ClockTime(..))
import Ganeti.Lens (makeCustomLenses, Lens')
import Ganeti.Objects
-- | Class of objects that have timestamps.
class TimeStampObject a => TimeStampObjectL a where
mTimeL :: Lens' a ClockTime
-- | Class of objects that have an UUID.
class UuidObject a => UuidObjectL a where
uuidL :: Lens' a String
-- | Class of object that have a serial number.
class SerialNoObject a => SerialNoObjectL a where
serialL :: Lens' a Int
-- | Class of objects that have tags.
class TagsObject a => TagsObjectL a where
tagsL :: Lens' a (Set.Set String)
$(makeCustomLenses ''AddressPool)
$(makeCustomLenses ''Network)
instance SerialNoObjectL Network where
serialL = networkSerialL
instance TagsObjectL Network where
tagsL = networkTagsL
instance UuidObjectL Network where
uuidL = networkUuidL
instance TimeStampObjectL Network where
mTimeL = networkMtimeL
$(makeCustomLenses ''PartialNic)
$(makeCustomLenses ''Disk)
$(makeCustomLenses ''Instance)
instance TimeStampObjectL Instance where
mTimeL = instMtimeL
instance UuidObjectL Instance where
uuidL = instUuidL
instance SerialNoObjectL Instance where
serialL = instSerialL
instance TagsObjectL Instance where
tagsL = instTagsL
$(makeCustomLenses ''MinMaxISpecs)
$(makeCustomLenses ''PartialIPolicy)
$(makeCustomLenses ''FilledIPolicy)
$(makeCustomLenses ''Node)
instance TimeStampObjectL Node where
mTimeL = nodeMtimeL
instance UuidObjectL Node where
uuidL = nodeUuidL
instance SerialNoObjectL Node where
serialL = nodeSerialL
instance TagsObjectL Node where
tagsL = nodeTagsL
$(makeCustomLenses ''NodeGroup)
instance TimeStampObjectL NodeGroup where
mTimeL = groupMtimeL
instance UuidObjectL NodeGroup where
uuidL = groupUuidL
instance SerialNoObjectL NodeGroup where
serialL = groupSerialL
instance TagsObjectL NodeGroup where
tagsL = groupTagsL
$(makeCustomLenses ''Cluster)
instance TimeStampObjectL Cluster where
mTimeL = clusterMtimeL
instance UuidObjectL Cluster where
uuidL = clusterUuidL
instance SerialNoObjectL Cluster where
serialL = clusterSerialL
instance TagsObjectL Cluster where
tagsL = clusterTagsL
$(makeCustomLenses ''ConfigData)
instance SerialNoObjectL ConfigData where
serialL = configSerialL
instance TimeStampObjectL ConfigData where
mTimeL = configMtimeL
| apyrgio/ganeti | src/Ganeti/Objects/Lens.hs | bsd-2-clause | 3,779 | 0 | 10 | 587 | 596 | 304 | 292 | 70 | 0 |
module List2 where
import FFI
import Prelude hiding (take)
main :: Fay ()
main = putStrLn (showList (take 5 (let ns = 1 : map' (foo 123) ns in ns)))
foo :: Double -> Double -> Double
foo x y = x * y / 2
take :: Int -> [a] -> [a]
take 0 _ = []
take n (x:xs) = x : take (n - 1) xs
map' :: (a -> b) -> [a] -> [b]
map' f [] = []
map' f (x:xs) = f x : map' f xs
showList :: [Double] -> String
showList = ffi "JSON.stringify(%1)"
| fpco/fay | tests/List2.hs | bsd-3-clause | 460 | 0 | 18 | 138 | 261 | 138 | 123 | 15 | 1 |
{-|
Module : Idris.ProofSearch
Description : Searches current context for proofs'
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE PatternGuards #-}
module Idris.ProofSearch(
trivial
, trivialHoles
, proofSearch
, resolveTC
) where
import Idris.Core.Elaborate hiding (Tactic(..))
import Idris.Core.TT
import Idris.Core.Unify
import Idris.Core.Evaluate
import Idris.Core.CaseTree
import Idris.Core.Typecheck
import Idris.AbsSyntax
import Idris.Delaborate
import Idris.Error
import Control.Applicative ((<$>))
import Control.Monad
import Control.Monad.State.Strict
import qualified Data.Set as S
import Data.List
import Debug.Trace
-- Pass in a term elaborator to avoid a cyclic dependency with ElabTerm
trivial :: (PTerm -> ElabD ()) -> IState -> ElabD ()
trivial = trivialHoles [] []
trivialHoles :: [Name] -> -- user visible names, when working
-- in interactive mode
[(Name, Int)] -> (PTerm -> ElabD ()) -> IState -> ElabD ()
trivialHoles psnames ok elab ist
= try' (do elab (PApp (fileFC "prf") (PRef (fileFC "prf") [] eqCon) [pimp (sUN "A") Placeholder False, pimp (sUN "x") Placeholder False])
return ())
(do env <- get_env
g <- goal
tryAll env
return ()) True
where
tryAll [] = fail "No trivial solution"
tryAll ((x, b):xs)
= do -- if type of x has any holes in it, move on
hs <- get_holes
let badhs = hs -- filter (flip notElem holesOK) hs
g <- goal
-- anywhere but the top is okay for a hole, if holesOK set
if -- all (\n -> not (n `elem` badhs)) (freeNames (binderTy b))
(holesOK hs (binderTy b) && (null psnames || x `elem` psnames))
then try' (elab (PRef (fileFC "prf") [] x))
(tryAll xs) True
else tryAll xs
holesOK hs ap@(App _ _ _)
| (P _ n _, args) <- unApply ap
= holeArgsOK hs n 0 args
holesOK hs (App _ f a) = holesOK hs f && holesOK hs a
holesOK hs (P _ n _) = not (n `elem` hs)
holesOK hs (Bind n b sc) = holesOK hs (binderTy b) &&
holesOK hs sc
holesOK hs _ = True
holeArgsOK hs n p [] = True
holeArgsOK hs n p (a : as)
| (n, p) `elem` ok = holeArgsOK hs n (p + 1) as
| otherwise = holesOK hs a && holeArgsOK hs n (p + 1) as
trivialTCs :: [(Name, Int)] -> (PTerm -> ElabD ()) -> IState -> ElabD ()
trivialTCs ok elab ist
= try' (do elab (PApp (fileFC "prf") (PRef (fileFC "prf") [] eqCon) [pimp (sUN "A") Placeholder False, pimp (sUN "x") Placeholder False])
return ())
(do env <- get_env
g <- goal
tryAll env
return ()) True
where
tryAll [] = fail "No trivial solution"
tryAll ((x, b):xs)
= do -- if type of x has any holes in it, move on
hs <- get_holes
let badhs = hs -- filter (flip notElem holesOK) hs
g <- goal
env <- get_env
-- anywhere but the top is okay for a hole, if holesOK set
if -- all (\n -> not (n `elem` badhs)) (freeNames (binderTy b))
(holesOK hs (binderTy b) && tcArg env (binderTy b))
then try' (elab (PRef (fileFC "prf") [] x))
(tryAll xs) True
else tryAll xs
tcArg env ty
| (P _ n _, args) <- unApply (getRetTy (normalise (tt_ctxt ist) env ty))
= case lookupCtxtExact n (idris_classes ist) of
Just _ -> True
_ -> False
| otherwise = False
holesOK hs ap@(App _ _ _)
| (P _ n _, args) <- unApply ap
= holeArgsOK hs n 0 args
holesOK hs (App _ f a) = holesOK hs f && holesOK hs a
holesOK hs (P _ n _) = not (n `elem` hs)
holesOK hs (Bind n b sc) = holesOK hs (binderTy b) &&
holesOK hs sc
holesOK hs _ = True
holeArgsOK hs n p [] = True
holeArgsOK hs n p (a : as)
| (n, p) `elem` ok = holeArgsOK hs n (p + 1) as
| otherwise = holesOK hs a && holeArgsOK hs n (p + 1) as
cantSolveGoal :: ElabD a
cantSolveGoal = do g <- goal
env <- get_env
lift $ tfail $
CantSolveGoal g (map (\(n,b) -> (n, binderTy b)) env)
proofSearch :: Bool -- ^ recursive search (False for 'refine')
-> Bool -- ^ invoked from a tactic proof. If so, making new metavariables is meaningless, and there should be an error reported instead.
-> Bool -- ^ ambiguity ok
-> Bool -- ^ defer on failure
-> Int -- ^ maximum depth
-> (PTerm -> ElabD ())
-> Maybe Name
-> Name
-> [Name]
-> [Name]
-> IState
-> ElabD ()
proofSearch False fromProver ambigok deferonfail depth elab _ nroot psnames [fn] ist
= do -- get all possible versions of the name, take the first one that
-- works
let all_imps = lookupCtxtName fn (idris_implicits ist)
tryAllFns all_imps
where
-- if nothing worked, make a new metavariable
tryAllFns [] | fromProver = cantSolveGoal
tryAllFns [] = do attack; defer [] nroot; solve
tryAllFns (f : fs) = try' (tryFn f) (tryAllFns fs) True
tryFn (f, args) = do let imps = map isImp args
ps <- get_probs
hs <- get_holes
args <- map snd <$> try' (apply (Var f) imps)
(match_apply (Var f) imps) True
ps' <- get_probs
-- when (length ps < length ps') $ fail "Can't apply constructor"
-- Make metavariables for new holes
hs' <- get_holes
ptm <- get_term
if fromProver then cantSolveGoal
else do
mapM_ (\ h -> do focus h
attack; defer [] nroot; solve)
(hs' \\ hs)
-- (filter (\ (x, y) -> not x) (zip (map fst imps) args))
solve
isImp (PImp p _ _ _ _) = (True, p)
isImp arg = (True, priority arg) -- try to get all of them by unification
proofSearch rec fromProver ambigok deferonfail maxDepth elab fn nroot psnames hints ist
= do compute
ty <- goal
hs <- get_holes
env <- get_env
tm <- get_term
argsok <- conArgsOK ty
if ambigok || argsok then
case lookupCtxt nroot (idris_tyinfodata ist) of
[TISolution ts] -> findInferredTy ts
_ -> if ambigok then psRec rec maxDepth [] S.empty
-- postpone if it fails early in elaboration
else handleError cantsolve
(psRec rec maxDepth [] S.empty)
(autoArg (sUN "auto"))
else autoArg (sUN "auto") -- not enough info in the type yet
where
findInferredTy (t : _) = elab (delab ist (toUN t))
cantsolve (InternalMsg _) = True
cantsolve (CantSolveGoal _ _) = True
cantsolve (IncompleteTerm _) = True
cantsolve (At _ e) = cantsolve e
cantsolve (Elaborating _ _ _ e) = cantsolve e
cantsolve (ElaboratingArg _ _ _ e) = cantsolve e
cantsolve err = False
conArgsOK ty
= let (f, as) = unApply ty in
case f of
P _ n _ ->
let autohints = case lookupCtxtExact n (idris_autohints ist) of
Nothing -> []
Just hs -> hs in
case lookupCtxtExact n (idris_datatypes ist) of
Just t -> do rs <- mapM (conReady as)
(autohints ++ con_names t)
return (and rs)
Nothing -> -- local variable, go for it
return True
TType _ -> return True
_ -> typeNotSearchable ty
conReady :: [Term] -> Name -> ElabD Bool
conReady as n
= case lookupTyExact n (tt_ctxt ist) of
Just ty -> do let (_, cs) = unApply (getRetTy ty)
-- if any metavariables in 'as' correspond to
-- a constructor form in 'cs', then we're not
-- ready to run auto yet. Otherwise, go for it
hs <- get_holes
return $ and (map (notHole hs) (zip as cs))
Nothing -> fail "Can't happen"
-- if n is a metavariable, and c is a constructor form, we're not ready
-- to run yet
notHole hs (P _ n _, c)
| (P _ cn _, _) <- unApply c,
n `elem` hs && isConName cn (tt_ctxt ist) = False
| Constant _ <- c = not (n `elem` hs)
-- if fa is a metavariable applied to anything, we're not ready to run yet.
notHole hs (fa, c)
| (P _ fn _, args@(_:_)) <- unApply fa = fn `notElem` hs
notHole _ _ = True
inHS hs (P _ n _) = n `elem` hs
isHS _ _ = False
toUN t@(P nt (MN i n) ty)
| ('_':xs) <- str n = t
| otherwise = P nt (UN n) ty
toUN (App s f a) = App s (toUN f) (toUN a)
toUN t = t
-- psRec counts depth and the local variable applications we're under
-- (so we don't try a pointless application of something to itself,
-- which obviously won't work anyway but might lead us on a wild
-- goose chase...)
-- Also keep track of the types we've proved so far in this branch
-- (if we get back to one we've been to before, we're just in a cycle and
-- that's no use)
psRec :: Bool -> Int -> [Name] -> S.Set Type -> ElabD ()
psRec _ 0 locs tys | fromProver = cantSolveGoal
psRec rec 0 locs tys = do attack; defer [] nroot; solve --fail "Maximum depth reached"
psRec False d locs tys = tryCons d locs tys hints
psRec True d locs tys
= do compute
ty <- goal
when (S.member ty tys) $ fail "Been here before"
let tys' = S.insert ty tys
try' (try' (trivialHoles psnames [] elab ist)
(resolveTC False False 20 ty nroot elab ist)
True)
(try' (try' (resolveByCon (d - 1) locs tys')
(resolveByLocals (d - 1) locs tys')
True)
-- if all else fails, make a new metavariable
(if fromProver
then fail "cantSolveGoal"
else do attack; defer [] nroot; solve) True) True
-- get recursive function name. Only user given names make sense.
getFn d (Just f) | d < maxDepth-1 && usersname f = [f]
| otherwise = []
getFn d _ = []
usersname (UN _) = True
usersname (NS n _) = usersname n
usersname _ = False
resolveByCon d locs tys
= do t <- goal
let (f, _) = unApply t
case f of
P _ n _ ->
do let autohints = case lookupCtxtExact n (idris_autohints ist) of
Nothing -> []
Just hs -> hs
case lookupCtxtExact n (idris_datatypes ist) of
Just t -> do
let others = hints ++ con_names t ++ autohints
tryCons d locs tys (others ++ getFn d fn)
Nothing -> typeNotSearchable t
_ -> typeNotSearchable t
-- if there are local variables which have a function type, try
-- applying them too
resolveByLocals d locs tys
= do env <- get_env
tryLocals d locs tys env
tryLocals d locs tys [] = fail "Locals failed"
tryLocals d locs tys ((x, t) : xs)
| x `elem` locs || x `notElem` psnames = tryLocals d locs tys xs
| otherwise = try' (tryLocal d (x : locs) tys x t)
(tryLocals d locs tys xs) True
tryCons d locs tys [] = fail "Constructors failed"
tryCons d locs tys (c : cs)
= try' (tryCon d locs tys c) (tryCons d locs tys cs) True
tryLocal d locs tys n t
= do let a = getPArity (delab ist (binderTy t))
tryLocalArg d locs tys n a
tryLocalArg d locs tys n 0 = elab (PRef (fileFC "prf") [] n)
tryLocalArg d locs tys n i
= simple_app False (tryLocalArg d locs tys n (i - 1))
(psRec True d locs tys) "proof search local apply"
-- Like interface resolution, but searching with constructors
tryCon d locs tys n =
do ty <- goal
let imps = case lookupCtxtExact n (idris_implicits ist) of
Nothing -> []
Just args -> map isImp args
ps <- get_probs
hs <- get_holes
args <- map snd <$> try' (apply (Var n) imps)
(match_apply (Var n) imps) True
ps' <- get_probs
hs' <- get_holes
when (length ps < length ps') $ fail "Can't apply constructor"
let newhs = filter (\ (x, y) -> not x) (zip (map fst imps) args)
mapM_ (\ (_, h) -> do focus h
aty <- goal
psRec True d locs tys) newhs
solve
isImp (PImp p _ _ _ _) = (True, p)
isImp arg = (False, priority arg)
typeNotSearchable ty =
lift $ tfail $ FancyMsg $
[TextPart "Attempted to find an element of type",
TermPart ty,
TextPart "using proof search, but proof search only works on datatypes with constructors."] ++
case ty of
(Bind _ (Pi _ _ _) _) -> [TextPart "In particular, function types are not supported."]
_ -> []
-- | Resolve interfaces. This will only pick up 'normal'
-- implementations, never named implementations (which is enforced by
-- 'findInstances').
resolveTC :: Bool -- ^ using default Int
-> Bool -- ^ allow open implementations
-> Int -- ^ depth
-> Term -- ^ top level goal, for error messages
-> Name -- ^ top level function name, to prevent loops
-> (PTerm -> ElabD ()) -- ^ top level elaborator
-> IState -> ElabD ()
resolveTC def openOK depth top fn elab ist
= do hs <- get_holes
resTC' [] def openOK hs depth top fn elab ist
resTC' tcs def openOK topholes 0 topg fn elab ist = fail "Can't resolve interface"
resTC' tcs def openOK topholes 1 topg fn elab ist = try' (trivial elab ist) (resolveTC def False 0 topg fn elab ist) True
resTC' tcs defaultOn openOK topholes depth topg fn elab ist
= do compute
if openOK
then try' (resolveOpen (idris_openimpls ist))
resolveNormal
True
else resolveNormal
where
-- try all the Open implementations first
resolveOpen open = do t <- goal
blunderbuss t depth [] open
resolveNormal = do
-- Resolution can proceed only if there is something concrete in the
-- determining argument positions. Keep track of the holes in the
-- non-determining position, because it's okay for 'trivial' to solve
-- those holes and no others.
g <- goal
let (argsok, okholePos) = case tcArgsOK g topholes of
Nothing -> (False, [])
Just hs -> (True, hs)
env <- get_env
probs <- get_probs
if not argsok -- && not mvok)
then lift $ tfail $ CantResolve True topg (probErr probs)
else do
ptm <- get_term
ulog <- getUnifyLog
hs <- get_holes
env <- get_env
t <- goal
let (tc, ttypes) = unApply (getRetTy t)
let okholes = case tc of
P _ n _ -> zip (repeat n) okholePos
_ -> []
traceWhen ulog ("Resolving class " ++ show g ++ "\nin" ++ show env ++ "\n" ++ show okholes) $
try' (trivialTCs okholes elab ist)
(do addDefault t tc ttypes
let stk = map fst (filter snd $ elab_stack ist)
let insts = idris_openimpls ist ++ findInstances ist t
blunderbuss t depth stk (stk ++ insts)) True
-- returns Just hs if okay, where hs are holes which are okay in the
-- goal, or Nothing if not okay to proceed
tcArgsOK ty hs | (P _ nc _, as) <- unApply (getRetTy ty), nc == numclass && defaultOn
= Just []
tcArgsOK ty hs -- if any determining arguments are metavariables, postpone
= let (f, as) = unApply (getRetTy ty) in
case f of
P _ cn _ -> case lookupCtxtExact cn (idris_classes ist) of
Just ci -> tcDetArgsOK 0 (class_determiners ci) hs as
Nothing -> if any (isMeta hs) as
then Nothing
else Just []
_ -> if any (isMeta hs) as
then Nothing
else Just []
-- return the list of argument positions which can safely be a hole
-- or Nothing if one of the determining arguments is a hole
tcDetArgsOK i ds hs (x : xs)
| i `elem` ds = if isMeta hs x
then Nothing
else tcDetArgsOK (i + 1) ds hs xs
| otherwise = do rs <- tcDetArgsOK (i + 1) ds hs xs
case x of
P _ n _ -> Just (i : rs)
_ -> Just rs
tcDetArgsOK _ _ _ [] = Just []
probErr [] = Msg ""
probErr ((_,_,_,_,err,_,_) : _) = err
isMeta :: [Name] -> Term -> Bool
isMeta ns (P _ n _) = n `elem` ns
isMeta _ _ = False
notHole hs (P _ n _, c)
| (P _ cn _, _) <- unApply (getRetTy c),
n `elem` hs && isConName cn (tt_ctxt ist) = False
| Constant _ <- c = not (n `elem` hs)
notHole _ _ = True
numclass = sNS (sUN "Num") ["Interfaces","Prelude"]
addDefault t num@(P _ nc _) [P Bound a _] | nc == numclass && defaultOn
= do focus a
fill (RConstant (AType (ATInt ITBig))) -- default Integer
solve
addDefault t f as
| all boundVar as = return () -- True -- fail $ "Can't resolve " ++ show t
addDefault t f a = return () -- trace (show t) $ return ()
boundVar (P Bound _ _) = True
boundVar _ = False
blunderbuss t d stk [] = do ps <- get_probs
lift $ tfail $ CantResolve False topg (probErr ps)
blunderbuss t d stk (n:ns)
| n /= fn -- && (n `elem` stk)
= tryCatch (resolve n d)
(\e -> case e of
CantResolve True _ _ -> lift $ tfail e
_ -> blunderbuss t d stk ns)
| otherwise = blunderbuss t d stk ns
introImps = do g <- goal
case g of
(Bind _ (Pi _ _ _) sc) -> do attack; intro Nothing
num <- introImps
return (num + 1)
_ -> return 0
solven n = replicateM_ n solve
resolve n depth
| depth == 0 = fail "Can't resolve interface"
| otherwise
= do lams <- introImps
t <- goal
let (tc, ttypes) = trace (show t) $ unApply (getRetTy t)
-- if (all boundVar ttypes) then resolveTC (depth - 1) fn insts ist
-- else do
-- if there's a hole in the goal, don't even try
let imps = case lookupCtxtName n (idris_implicits ist) of
[] -> []
[args] -> map isImp (snd args) -- won't be overloaded!
xs -> error "The impossible happened - overloading is not expected here!"
ps <- get_probs
tm <- get_term
args <- map snd <$> apply (Var n) imps
solven lams -- close any implicit lambdas we introduced
ps' <- get_probs
when (length ps < length ps' || unrecoverable ps') $
fail "Can't apply interface"
-- traceWhen (all boundVar ttypes) ("Progress: " ++ show t ++ " with " ++ show n) $
mapM_ (\ (_,n) -> do focus n
t' <- goal
let (tc', ttype) = unApply (getRetTy t')
let got = fst (unApply (getRetTy t))
let depth' = if tc' `elem` tcs
then depth - 1 else depth
resTC' (got : tcs) defaultOn openOK topholes depth' topg fn elab ist)
(filter (\ (x, y) -> not x) (zip (map fst imps) args))
-- if there's any arguments left, we've failed to resolve
hs <- get_holes
ulog <- getUnifyLog
solve
traceWhen ulog ("Got " ++ show n) $ return ()
where isImp (PImp p _ _ _ _) = (True, p)
isImp arg = (False, priority arg)
-- | Find the names of implementations that have been designeated for
-- searching (i.e. non-named implementations or implementations from Elab scripts)
findInstances :: IState -> Term -> [Name]
findInstances ist t
| (P _ n _, _) <- unApply (getRetTy t)
= case lookupCtxt n (idris_classes ist) of
[CI _ _ _ _ _ ins _] ->
[n | (n, True) <- ins, accessible n]
_ -> []
| otherwise = []
where accessible n = case lookupDefAccExact n False (tt_ctxt ist) of
Just (_, Hidden) -> False
Just (_, Private) -> False
_ -> True
| tpsinnem/Idris-dev | src/Idris/ProofSearch.hs | bsd-3-clause | 23,017 | 160 | 22 | 9,737 | 6,142 | 3,222 | 2,920 | 422 | 39 |
module DeadRecursiveBoxed where
import Prelude
topEntity :: Bool -> Bool
topEntity y = f ((\x -> x) . (\y -> y)) y
f x y = fst (x, f x y) y
| christiaanb/clash-compiler | tests/shouldwork/BoxedFunctions/DeadRecursiveBoxed.hs | bsd-2-clause | 143 | 0 | 9 | 35 | 78 | 43 | 35 | 5 | 1 |
{-# LANGUAGE OverloadedStrings, FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Documentation.Haddock.ParserSpec (main, spec) where
import Data.String
import qualified Documentation.Haddock.Parser as Parse
import Documentation.Haddock.Types
import Documentation.Haddock.Doc (docAppend)
import Test.Hspec
import Test.QuickCheck
infixr 6 <>
(<>) :: Doc id -> Doc id -> Doc id
(<>) = docAppend
type Doc id = DocH () id
instance IsString (Doc String) where
fromString = DocString
instance IsString a => IsString (Maybe a) where
fromString = Just . fromString
parseParas :: String -> MetaDoc () String
parseParas = overDoc Parse.toRegular . Parse.parseParas
parseString :: String -> Doc String
parseString = Parse.toRegular . Parse.parseString
hyperlink :: String -> Maybe String -> Doc String
hyperlink url = DocHyperlink . Hyperlink url
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "parseString" $ do
let infix 1 `shouldParseTo`
shouldParseTo :: String -> Doc String -> Expectation
shouldParseTo input ast = parseString input `shouldBe` ast
it "is total" $ do
property $ \xs ->
(length . show . parseString) xs `shouldSatisfy` (> 0)
context "when parsing text" $ do
it "can handle unicode" $ do
"灼眼のシャナ" `shouldParseTo` "灼眼のシャナ"
it "accepts numeric character references" $ do
"foo bar baz λ" `shouldParseTo` "foo bar baz λ"
it "accepts hexadecimal character references" $ do
"e" `shouldParseTo` "e"
it "allows to backslash-escape characters" $ do
property $ \x -> ['\\', x] `shouldParseTo` DocString [x]
context "when parsing strings contaning numeric character references" $ do
it "will implicitly convert digits to characters" $ do
"AAAA" `shouldParseTo` "AAAA"
"灼眼のシャナ"
`shouldParseTo` "灼眼のシャナ"
it "will implicitly convert hex encoded characters" $ do
"eeee" `shouldParseTo` "eeee"
context "when parsing identifiers" $ do
it "parses identifiers enclosed within single ticks" $ do
"'foo'" `shouldParseTo` DocIdentifier "foo"
it "parses identifiers enclosed within backticks" $ do
"`foo`" `shouldParseTo` DocIdentifier "foo"
it "parses a word with an one of the delimiters in it as DocString" $ do
"don't" `shouldParseTo` "don't"
it "doesn't pass pairs of delimiters with spaces between them" $ do
"hel'lo w'orld" `shouldParseTo` "hel'lo w'orld"
it "don't use apostrophe's in the wrong place's" $ do
" don't use apostrophe's in the wrong place's" `shouldParseTo`
"don't use apostrophe's in the wrong place's"
it "doesn't parse empty identifiers" $ do
"``" `shouldParseTo` "``"
it "can parse infix identifiers" $ do
"``infix``" `shouldParseTo` "`" <> DocIdentifier "infix" <> "`"
context "when parsing URLs" $ do
it "parses a URL" $ do
"<http://example.com/>" `shouldParseTo` hyperlink "http://example.com/" Nothing
it "accepts an optional label" $ do
"<http://example.com/ some link>" `shouldParseTo` hyperlink "http://example.com/" "some link"
it "does not accept newlines in label" $ do
"<foo bar\nbaz>" `shouldParseTo` "<foo bar\nbaz>"
-- new behaviour test, this will be now consistent with other markup
it "allows us to escape > inside the URL" $ do
"<http://examp\\>le.com>" `shouldParseTo`
hyperlink "http://examp>le.com" Nothing
"<http://exa\\>mp\\>le.com>" `shouldParseTo`
hyperlink "http://exa>mp>le.com" Nothing
-- Likewise in label
"<http://example.com f\\>oo>" `shouldParseTo`
hyperlink "http://example.com" "f>oo"
it "parses inline URLs" $ do
"foo <http://example.com/> bar" `shouldParseTo`
"foo " <> hyperlink "http://example.com/" Nothing <> " bar"
it "doesn't allow for multi-line link tags" $ do
"<ba\nz aar>" `shouldParseTo` "<ba\nz aar>"
context "when parsing markdown links" $ do
it "parses a simple link" $ do
"[some label](url)" `shouldParseTo`
hyperlink "url" "some label"
it "allows whitespace between label and URL" $ do
"[some label] \t (url)" `shouldParseTo`
hyperlink "url" "some label"
it "allows newlines in label" $ do
"[some\n\nlabel](url)" `shouldParseTo`
hyperlink "url" "some\n\nlabel"
it "allows escaping in label" $ do
"[some\\] label](url)" `shouldParseTo`
hyperlink "url" "some] label"
it "strips leading and trailing whitespace from label" $ do
"[ some label ](url)" `shouldParseTo`
hyperlink "url" "some label"
it "rejects whitespace in URL" $ do
"[some label]( url)" `shouldParseTo`
"[some label]( url)"
context "when URL is on a separate line" $ do
it "allows URL to be on a separate line" $ do
"[some label]\n(url)" `shouldParseTo`
hyperlink "url" "some label"
it "allows leading whitespace" $ do
"[some label]\n \t (url)" `shouldParseTo`
hyperlink "url" "some label"
it "rejects additional newlines" $ do
"[some label]\n\n(url)" `shouldParseTo`
"[some label]\n\n(url)"
context "when autolinking URLs" $ do
it "autolinks HTTP URLs" $ do
"http://example.com/" `shouldParseTo` hyperlink "http://example.com/" Nothing
it "autolinks HTTPS URLs" $ do
"https://www.example.com/" `shouldParseTo` hyperlink "https://www.example.com/" Nothing
it "autolinks FTP URLs" $ do
"ftp://example.com/" `shouldParseTo` hyperlink "ftp://example.com/" Nothing
it "does not include a trailing comma" $ do
"http://example.com/, Some other sentence." `shouldParseTo`
hyperlink "http://example.com/" Nothing <> ", Some other sentence."
it "does not include a trailing dot" $ do
"http://example.com/. Some other sentence." `shouldParseTo`
hyperlink "http://example.com/" Nothing <> ". Some other sentence."
it "does not include a trailing exclamation mark" $ do
"http://example.com/! Some other sentence." `shouldParseTo`
hyperlink "http://example.com/" Nothing <> "! Some other sentence."
it "does not include a trailing question mark" $ do
"http://example.com/? Some other sentence." `shouldParseTo`
hyperlink "http://example.com/" Nothing <> "? Some other sentence."
it "autolinks URLs occuring mid-sentence with multiple ‘/’s" $ do
"foo https://example.com/example bar" `shouldParseTo`
"foo " <> hyperlink "https://example.com/example" Nothing <> " bar"
context "when parsing images" $ do
let image :: String -> Maybe String -> Doc String
image uri = DocPic . Picture uri
it "accepts markdown syntax for images" $ do
"" `shouldParseTo` image "url" "label"
it "accepts Unicode" $ do
"" `shouldParseTo` image "url" "灼眼のシャナ"
it "supports deprecated picture syntax" $ do
"<<baz>>" `shouldParseTo` image "baz" Nothing
it "supports title for deprecated picture syntax" $ do
"<<b a z>>" `shouldParseTo` image "b" "a z"
context "when parsing anchors" $ do
it "parses a single word anchor" $ do
"#foo#" `shouldParseTo` DocAName "foo"
it "parses a multi word anchor" $ do
"#foo bar#" `shouldParseTo` DocAName "foo bar"
it "parses a unicode anchor" $ do
"#灼眼のシャナ#" `shouldParseTo` DocAName "灼眼のシャナ"
it "does not accept newlines in anchors" $ do
"#foo\nbar#" `shouldParseTo` "#foo\nbar#"
it "accepts anchors mid-paragraph" $ do
"Hello #someAnchor# world!"
`shouldParseTo` "Hello " <> DocAName "someAnchor" <> " world!"
it "does not accept empty anchors" $ do
"##" `shouldParseTo` "##"
context "when parsing emphasised text" $ do
it "emphasises a word on its own" $ do
"/foo/" `shouldParseTo` DocEmphasis "foo"
it "emphasises inline correctly" $ do
"foo /bar/ baz" `shouldParseTo` "foo " <> DocEmphasis "bar" <> " baz"
it "emphasises unicode" $ do
"/灼眼のシャナ/" `shouldParseTo` DocEmphasis "灼眼のシャナ"
it "does not emphasise multi-line strings" $ do
" /foo\nbar/" `shouldParseTo` "/foo\nbar/"
it "does not emphasise the empty string" $ do
"//" `shouldParseTo` "//"
it "parses escaped slashes literally" $ do
"/foo\\/bar/" `shouldParseTo` DocEmphasis "foo/bar"
it "recognizes other markup constructs within emphasised text" $ do
"/foo @bar@ baz/" `shouldParseTo`
DocEmphasis ("foo " <> DocMonospaced "bar" <> " baz")
it "allows other markup inside of emphasis" $ do
"/__inner bold__/" `shouldParseTo` DocEmphasis (DocBold "inner bold")
it "doesn't mangle inner markup unicode" $ do
"/__灼眼のシャナ A__/" `shouldParseTo` DocEmphasis (DocBold "灼眼のシャナ A")
it "properly converts HTML escape sequences" $ do
"/AAAA/" `shouldParseTo` DocEmphasis "AAAA"
it "allows to escape the emphasis delimiter inside of emphasis" $ do
"/empha\\/sis/" `shouldParseTo` DocEmphasis "empha/sis"
context "when parsing monospaced text" $ do
it "parses simple monospaced text" $ do
"@foo@" `shouldParseTo` DocMonospaced "foo"
it "parses inline monospaced text" $ do
"foo @bar@ baz" `shouldParseTo` "foo " <> DocMonospaced "bar" <> " baz"
it "allows to escape @" $ do
"@foo \\@ bar@" `shouldParseTo` DocMonospaced "foo @ bar"
it "accepts unicode" $ do
"@foo 灼眼のシャナ bar@" `shouldParseTo` DocMonospaced "foo 灼眼のシャナ bar"
it "accepts other markup in monospaced text" $ do
"@/foo/@" `shouldParseTo` DocMonospaced (DocEmphasis "foo")
it "requires the closing @" $ do
"@foo /bar/ baz" `shouldParseTo` "@foo " <> DocEmphasis "bar" <> " baz"
context "when parsing bold strings" $ do
it "allows for a bold string on its own" $ do
"__bold string__" `shouldParseTo`
DocBold "bold string"
it "bolds inline correctly" $ do
"hello __everyone__ there" `shouldParseTo`
"hello "
<> DocBold "everyone" <> " there"
it "bolds unicode" $ do
"__灼眼のシャナ__" `shouldParseTo`
DocBold "灼眼のシャナ"
it "does not do __multi-line\\n bold__" $ do
" __multi-line\n bold__" `shouldParseTo` "__multi-line\n bold__"
it "allows other markup inside of bold" $ do
"__/inner emphasis/__" `shouldParseTo`
(DocBold $ DocEmphasis "inner emphasis")
it "doesn't mangle inner markup unicode" $ do
"__/灼眼のシャナ A/__" `shouldParseTo`
(DocBold $ DocEmphasis "灼眼のシャナ A")
it "properly converts HTML escape sequences" $ do
"__AAAA__" `shouldParseTo`
DocBold "AAAA"
it "allows to escape the bold delimiter inside of bold" $ do
"__bo\\__ld__" `shouldParseTo`
DocBold "bo__ld"
it "doesn't allow for empty bold" $ do
"____" `shouldParseTo` "____"
context "when parsing module strings" $ do
it "should parse a module on its own" $ do
"\"Module\"" `shouldParseTo`
DocModule "Module"
it "should parse a module inline" $ do
"This is a \"Module\"." `shouldParseTo`
"This is a " <> DocModule "Module" <> "."
it "can accept a simple module name" $ do
"\"Hello\"" `shouldParseTo` DocModule "Hello"
it "can accept a module name with dots" $ do
"\"Hello.World\"" `shouldParseTo` DocModule "Hello.World"
it "can accept a module name with unicode" $ do
"\"Hello.Worldλ\"" `shouldParseTo` DocModule "Hello.Worldλ"
it "parses a module name with a trailing dot as regular quoted string" $ do
"\"Hello.\"" `shouldParseTo` "\"Hello.\""
it "parses a module name with a space as regular quoted string" $ do
"\"Hello World\"" `shouldParseTo` "\"Hello World\""
it "parses a module name with invalid characters as regular quoted string" $ do
"\"Hello&[{}(=*)+]!\"" `shouldParseTo` "\"Hello&[{}(=*)+]!\""
it "accepts a module name with unicode" $ do
"\"Foo.Barλ\"" `shouldParseTo` DocModule "Foo.Barλ"
it "treats empty module name as regular double quotes" $ do
"\"\"" `shouldParseTo` "\"\""
it "accepts anchor reference syntax as DocModule" $ do
"\"Foo#bar\"" `shouldParseTo` DocModule "Foo#bar"
it "accepts old anchor reference syntax as DocModule" $ do
"\"Foo\\#bar\"" `shouldParseTo` DocModule "Foo\\#bar"
describe "parseParas" $ do
let infix 1 `shouldParseTo`
shouldParseTo :: String -> Doc String -> Expectation
shouldParseTo input ast = _doc (parseParas input) `shouldBe` ast
it "is total" $ do
property $ \xs ->
(length . show . parseParas) xs `shouldSatisfy` (> 0)
context "when parsing @since" $ do
it "adds specified version to the result" $ do
parseParas "@since 0.5.0" `shouldBe`
MetaDoc { _meta = Meta { _version = Just [0,5,0] }
, _doc = DocEmpty }
it "ignores trailing whitespace" $ do
parseParas "@since 0.5.0 \t " `shouldBe`
MetaDoc { _meta = Meta { _version = Just [0,5,0] }
, _doc = DocEmpty }
it "does not allow trailing input" $ do
parseParas "@since 0.5.0 foo" `shouldBe`
MetaDoc { _meta = Meta { _version = Nothing }
, _doc = DocParagraph "@since 0.5.0 foo" }
context "when given multiple times" $ do
it "gives last occurrence precedence" $ do
(parseParas . unlines) [
"@since 0.5.0"
, "@since 0.6.0"
, "@since 0.7.0"
] `shouldBe` MetaDoc { _meta = Meta { _version = Just [0,7,0] }
, _doc = DocEmpty }
context "when parsing text paragraphs" $ do
let filterSpecial = filter (`notElem` (".(=#-[*`\v\f\n\t\r\\\"'_/@<> " :: String))
it "parses an empty paragraph" $ do
"" `shouldParseTo` DocEmpty
it "parses a simple text paragraph" $ do
"foo bar baz" `shouldParseTo` DocParagraph "foo bar baz"
it "accepts markup in text paragraphs" $ do
"foo /bar/ baz" `shouldParseTo` DocParagraph ("foo " <> DocEmphasis "bar" <> " baz")
it "preserve all regular characters" $ do
property $ \xs -> let input = filterSpecial xs in (not . null) input ==>
input `shouldParseTo` DocParagraph (DocString input)
it "separates paragraphs by empty lines" $ do
unlines [
"foo"
, " \t "
, "bar"
] `shouldParseTo` DocParagraph "foo" <> DocParagraph "bar"
context "when a pragraph only contains monospaced text" $ do
it "turns it into a code block" $ do
"@foo@" `shouldParseTo` DocCodeBlock "foo"
context "when a paragraph starts with a markdown link" $ do
it "correctly parses it as a text paragraph (not a definition list)" $ do
"[label](url)" `shouldParseTo`
DocParagraph (hyperlink "url" "label")
it "can be followed by an other paragraph" $ do
"[label](url)\n\nfoobar" `shouldParseTo`
DocParagraph (hyperlink "url" "label") <> DocParagraph "foobar"
context "when paragraph contains additional text" $ do
it "accepts more text after the link" $ do
"[label](url) foo bar baz" `shouldParseTo`
DocParagraph (hyperlink "url" "label" <> " foo bar baz")
it "accepts a newline right after the markdown link" $ do
"[label](url)\nfoo bar baz" `shouldParseTo`
DocParagraph (hyperlink "url" "label" <> " foo bar baz")
it "can be followed by an other paragraph" $ do
"[label](url)foo\n\nbar" `shouldParseTo`
DocParagraph (hyperlink "url" "label" <> "foo") <> DocParagraph "bar"
context "when parsing birdtracks" $ do
it "parses them as a code block" $ do
unlines [
">foo"
, ">bar"
, ">baz"
] `shouldParseTo` DocCodeBlock "foo\nbar\nbaz"
it "ignores leading whitespace" $ do
unlines [
" >foo"
, " \t >bar"
, " >baz"
]
`shouldParseTo` DocCodeBlock "foo\nbar\nbaz"
it "strips one leading space from each line of the block" $ do
unlines [
"> foo"
, "> bar"
, "> baz"
] `shouldParseTo` DocCodeBlock "foo\n bar\nbaz"
it "ignores empty lines when stripping spaces" $ do
unlines [
"> foo"
, ">"
, "> bar"
] `shouldParseTo` DocCodeBlock "foo\n\nbar"
context "when any non-empty line does not start with a space" $ do
it "does not strip any spaces" $ do
unlines [
">foo"
, "> bar"
] `shouldParseTo` DocCodeBlock "foo\n bar"
it "ignores nested markup" $ do
unlines [
">/foo/"
] `shouldParseTo` DocCodeBlock "/foo/"
it "treats them as regular text inside text paragraphs" $ do
unlines [
"foo"
, ">bar"
] `shouldParseTo` DocParagraph "foo\n>bar"
context "when parsing code blocks" $ do
it "accepts a simple code block" $ do
unlines [
"@"
, "foo"
, "bar"
, "baz"
, "@"
] `shouldParseTo` DocCodeBlock "foo\nbar\nbaz\n"
it "ignores trailing whitespace after the opening @" $ do
unlines [
"@ "
, "foo"
, "@"
] `shouldParseTo` DocCodeBlock "foo\n"
it "rejects code blocks that are not closed" $ do
unlines [
"@"
, "foo"
] `shouldParseTo` DocParagraph "@\nfoo"
it "accepts nested markup" $ do
unlines [
"@"
, "/foo/"
, "@"
] `shouldParseTo` DocCodeBlock (DocEmphasis "foo" <> "\n")
it "allows to escape the @" $ do
unlines [
"@"
, "foo"
, "\\@"
, "bar"
, "@"
] `shouldParseTo` DocCodeBlock "foo\n@\nbar\n"
it "accepts horizontal space before the @" $ do
unlines [ " @"
, "foo"
, ""
, "bar"
, "@"
] `shouldParseTo` DocCodeBlock "foo\n\nbar\n"
it "strips a leading space from a @ block if present" $ do
unlines [ " @"
, " hello"
, " world"
, " @"
] `shouldParseTo` DocCodeBlock "hello\nworld\n"
unlines [ " @"
, " hello"
, ""
, " world"
, " @"
] `shouldParseTo` DocCodeBlock "hello\n\nworld\n"
it "only drops whitespace if there's some before closing @" $ do
unlines [ "@"
, " Formatting"
, " matters."
, "@"
]
`shouldParseTo` DocCodeBlock " Formatting\n matters.\n"
it "accepts unicode" $ do
"@foo 灼眼のシャナ bar@" `shouldParseTo` DocCodeBlock "foo 灼眼のシャナ bar"
it "requires the closing @" $ do
"@foo /bar/ baz"
`shouldParseTo` DocParagraph ("@foo " <> DocEmphasis "bar" <> " baz")
context "when parsing examples" $ do
it "parses a simple example" $ do
">>> foo" `shouldParseTo` DocExamples [Example "foo" []]
it "parses an example with result" $ do
unlines [
">>> foo"
, "bar"
, "baz"
] `shouldParseTo` DocExamples [Example "foo" ["bar", "baz"]]
it "parses consecutive examples" $ do
unlines [
">>> fib 5"
, "5"
, ">>> fib 10"
, "55"
] `shouldParseTo` DocExamples [
Example "fib 5" ["5"]
, Example "fib 10" ["55"]
]
it ("requires an example to be separated"
++ " from a previous paragraph by an empty line") $ do
"foobar\n\n>>> fib 10\n55" `shouldParseTo`
DocParagraph "foobar"
<> DocExamples [Example "fib 10" ["55"]]
it "parses bird-tracks inside of paragraphs as plain strings" $ do
let xs = "foo\n>>> bar"
xs `shouldParseTo` DocParagraph (DocString xs)
it "skips empty lines in front of an example" $ do
"\n \n\n>>> foo" `shouldParseTo` DocExamples [Example "foo" []]
it "terminates example on empty line" $ do
unlines [
">>> foo"
, "bar"
, " "
, "baz"
]
`shouldParseTo`
DocExamples [Example "foo" ["bar"]] <> DocParagraph "baz"
it "parses a <BLANKLINE> result as an empty result" $ do
unlines [
">>> foo"
, "bar"
, "<BLANKLINE>"
, "baz"
]
`shouldParseTo` DocExamples [Example "foo" ["bar", "", "baz"]]
it "accepts unicode in examples" $ do
">>> 灼眼\nシャナ" `shouldParseTo` DocExamples [Example "灼眼" ["シャナ"]]
context "when prompt is prefixed by whitespace" $ do
it "strips the exact same amount of whitespace from result lines" $ do
unlines [
" >>> foo"
, " bar"
, " baz"
] `shouldParseTo` DocExamples [Example "foo" ["bar", "baz"]]
it "preserves additional whitespace" $ do
unlines [
" >>> foo"
, " bar"
] `shouldParseTo` DocExamples [Example "foo" [" bar"]]
it "keeps original if stripping is not possible" $ do
unlines [
" >>> foo"
, " bar"
] `shouldParseTo` DocExamples [Example "foo" [" bar"]]
context "when parsing paragraphs nested in lists" $ do
it "can nest the same type of list" $ do
"* foo\n\n * bar" `shouldParseTo`
DocUnorderedList [ DocParagraph "foo"
<> DocUnorderedList [DocParagraph "bar"]]
it "can nest another type of list inside" $ do
"* foo\n\n 1. bar" `shouldParseTo`
DocUnorderedList [ DocParagraph "foo"
<> DocOrderedList [DocParagraph "bar"]]
it "can nest a code block inside" $ do
"* foo\n\n @foo bar baz@" `shouldParseTo`
DocUnorderedList [ DocParagraph "foo"
<> DocCodeBlock "foo bar baz"]
"* foo\n\n @\n foo bar baz\n @" `shouldParseTo`
DocUnorderedList [ DocParagraph "foo"
<> DocCodeBlock "foo bar baz\n"]
it "can nest more than one level" $ do
"* foo\n\n * bar\n\n * baz\n qux" `shouldParseTo`
DocUnorderedList [ DocParagraph "foo"
<> DocUnorderedList [ DocParagraph "bar"
<> DocUnorderedList [DocParagraph "baz\nqux"]
]
]
it "won't fail on not fully indented paragraph" $ do
"* foo\n\n * bar\n\n * qux\nquux" `shouldParseTo`
DocUnorderedList [ DocParagraph "foo"
<> DocUnorderedList [ DocParagraph "bar" ]
, DocParagraph "qux\nquux"]
it "can nest definition lists" $ do
"[a]: foo\n\n [b]: bar\n\n [c]: baz\n qux" `shouldParseTo`
DocDefList [ ("a", "foo"
<> DocDefList [ ("b", "bar"
<> DocDefList [("c", "baz\nqux")])
])
]
it "can come back to top level with a different list" $ do
"* foo\n\n * bar\n\n1. baz" `shouldParseTo`
DocUnorderedList [ DocParagraph "foo"
<> DocUnorderedList [ DocParagraph "bar" ]
]
<> DocOrderedList [ DocParagraph "baz" ]
it "definition lists can come back to top level with a different list" $ do
"[foo]: foov\n\n [bar]: barv\n\n1. baz" `shouldParseTo`
DocDefList [ ("foo", "foov"
<> DocDefList [ ("bar", "barv") ])
]
<> DocOrderedList [ DocParagraph "baz" ]
it "list order is preserved in presence of nesting + extra text" $ do
"1. Foo\n\n > Some code\n\n2. Bar\n\nSome text"
`shouldParseTo`
DocOrderedList [ DocParagraph "Foo" <> DocCodeBlock "Some code"
, DocParagraph "Bar"
]
<> DocParagraph (DocString "Some text")
"1. Foo\n\n2. Bar\n\nSome text"
`shouldParseTo`
DocOrderedList [ DocParagraph "Foo"
, DocParagraph "Bar"
]
<> DocParagraph (DocString "Some text")
context "when parsing properties" $ do
it "can parse a single property" $ do
"prop> 23 == 23" `shouldParseTo` DocProperty "23 == 23"
it "can parse multiple subsequent properties" $ do
unlines [
"prop> 23 == 23"
, "prop> 42 == 42"
]
`shouldParseTo`
DocProperty "23 == 23" <> DocProperty "42 == 42"
it "accepts unicode in properties" $ do
"prop> 灼眼のシャナ ≡ 愛" `shouldParseTo`
DocProperty "灼眼のシャナ ≡ 愛"
it "can deal with whitespace before and after the prop> prompt" $ do
" prop> xs == (reverse $ reverse xs) " `shouldParseTo`
DocProperty "xs == (reverse $ reverse xs)"
context "when parsing unordered lists" $ do
it "parses a simple list" $ do
unlines [
" * one"
, " * two"
, " * three"
]
`shouldParseTo` DocUnorderedList [
DocParagraph "one"
, DocParagraph "two"
, DocParagraph "three"
]
it "ignores empty lines between list items" $ do
unlines [
"* one"
, ""
, "* two"
]
`shouldParseTo` DocUnorderedList [
DocParagraph "one"
, DocParagraph "two"
]
it "accepts an empty list item" $ do
"*" `shouldParseTo` DocUnorderedList [DocParagraph DocEmpty]
it "accepts multi-line list items" $ do
unlines [
"* point one"
, " more one"
, "* point two"
, "more two"
]
`shouldParseTo` DocUnorderedList [
DocParagraph "point one\n more one"
, DocParagraph "point two\nmore two"
]
it "accepts markup in list items" $ do
"* /foo/" `shouldParseTo` DocUnorderedList [DocParagraph (DocEmphasis "foo")]
it "requires empty lines between list and other paragraphs" $ do
unlines [
"foo"
, ""
, "* bar"
, ""
, "baz"
]
`shouldParseTo` DocParagraph "foo" <> DocUnorderedList [DocParagraph "bar"] <> DocParagraph "baz"
context "when parsing ordered lists" $ do
it "parses a simple list" $ do
unlines [
" 1. one"
, " (1) two"
, " 3. three"
]
`shouldParseTo` DocOrderedList [
DocParagraph "one"
, DocParagraph "two"
, DocParagraph "three"
]
it "ignores empty lines between list items" $ do
unlines [
"1. one"
, ""
, "2. two"
]
`shouldParseTo` DocOrderedList [
DocParagraph "one"
, DocParagraph "two"
]
it "accepts an empty list item" $ do
"1." `shouldParseTo` DocOrderedList [DocParagraph DocEmpty]
it "accepts multi-line list items" $ do
unlines [
"1. point one"
, " more one"
, "1. point two"
, "more two"
]
`shouldParseTo` DocOrderedList [
DocParagraph "point one\n more one"
, DocParagraph "point two\nmore two"
]
it "accepts markup in list items" $ do
"1. /foo/" `shouldParseTo` DocOrderedList [DocParagraph (DocEmphasis "foo")]
it "requires empty lines between list and other paragraphs" $ do
unlines [
"foo"
, ""
, "1. bar"
, ""
, "baz"
]
`shouldParseTo` DocParagraph "foo" <> DocOrderedList [DocParagraph "bar"] <> DocParagraph "baz"
context "when parsing definition lists" $ do
it "parses a simple list" $ do
unlines [
" [foo]: one"
, " [bar]: two"
, " [baz]: three"
]
`shouldParseTo` DocDefList [
("foo", "one")
, ("bar", "two")
, ("baz", "three")
]
it "ignores empty lines between list items" $ do
unlines [
"[foo]: one"
, ""
, "[bar]: two"
]
`shouldParseTo` DocDefList [
("foo", "one")
, ("bar", "two")
]
it "accepts an empty list item" $ do
"[foo]:" `shouldParseTo` DocDefList [("foo", DocEmpty)]
it "accepts multi-line list items" $ do
unlines [
"[foo]: point one"
, " more one"
, "[bar]: point two"
, "more two"
]
`shouldParseTo` DocDefList [
("foo", "point one\n more one")
, ("bar", "point two\nmore two")
]
it "accepts markup in list items" $ do
"[foo]: /foo/" `shouldParseTo` DocDefList [("foo", DocEmphasis "foo")]
it "accepts markup for the label" $ do
"[/foo/]: bar" `shouldParseTo` DocDefList [(DocEmphasis "foo", "bar")]
it "requires empty lines between list and other paragraphs" $ do
unlines [
"foo"
, ""
, "[foo]: bar"
, ""
, "baz"
]
`shouldParseTo` DocParagraph "foo" <> DocDefList [("foo", "bar")] <> DocParagraph "baz"
it "dose not require the colon (deprecated - this will be removed in a future release)" $ do
unlines [
" [foo] one"
, " [bar] two"
, " [baz] three"
]
`shouldParseTo` DocDefList [
("foo", "one")
, ("bar", "two")
, ("baz", "three")
]
context "when parsing consecutive paragraphs" $ do
it "will not capture irrelevant consecutive lists" $ do
unlines [ " * bullet"
, ""
, ""
, " - different bullet"
, ""
, ""
, " (1) ordered"
, " "
, " 2. different bullet"
, " "
, " [cat]: kitten"
, " "
, " [pineapple]: fruit"
] `shouldParseTo`
DocUnorderedList [ DocParagraph "bullet"
, DocParagraph "different bullet"]
<> DocOrderedList [ DocParagraph "ordered"
, DocParagraph "different bullet"
]
<> DocDefList [ ("cat", "kitten")
, ("pineapple", "fruit")
]
context "when parsing function documentation headers" $ do
it "can parse a simple header" $ do
"= Header 1\nHello." `shouldParseTo`
(DocHeader (Header 1 "Header 1"))
<> DocParagraph "Hello."
it "allow consecutive headers" $ do
"= Header 1\n== Header 2" `shouldParseTo`
DocHeader (Header 1 "Header 1")
<> DocHeader (Header 2 "Header 2")
it "accepts markup in the header" $ do
"= /Header/ __1__\nFoo" `shouldParseTo`
DocHeader (Header 1 (DocEmphasis "Header" <> " " <> DocBold "1"))
<> DocParagraph "Foo"
| DavidAlphaFox/ghc | utils/haddock/haddock-library/test/Documentation/Haddock/ParserSpec.hs | bsd-3-clause | 33,032 | 0 | 76 | 11,371 | 6,446 | 3,142 | 3,304 | 713 | 1 |
-- Copyright (c) 2014-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is distributed under the terms of a BSD license,
-- found in the LICENSE file. An additional grant of patent rights can
-- be found in the PATENTS file.
{-# LANGUAGE CPP #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | The implementation of the 'Haxl' monad.
module Haxl.Core.Monad (
-- * The monad
GenHaxl (..), runHaxl,
env,
-- * Env
Env(..), caches, initEnvWithData, initEnv, emptyEnv,
-- * Exceptions
throw, catch, catchIf, try, tryToHaxlException,
-- * Data fetching and caching
dataFetch, uncachedRequest,
cacheRequest, cacheResult, cachedComputation,
dumpCacheAsHaskell,
-- * Unsafe operations
unsafeLiftIO, unsafeToHaxlException,
) where
import Haxl.Core.Types
import Haxl.Core.Show1
import Haxl.Core.StateStore
import Haxl.Core.Exception
import Haxl.Core.RequestStore
import Haxl.Core.Util
import Haxl.Core.DataCache as DataCache
import qualified Data.Text as Text
import Control.Exception (Exception(..), SomeException)
#if __GLASGOW_HASKELL__ >= 708
import Control.Exception (SomeAsyncException(..))
#endif
#if __GLASGOW_HASKELL__ >= 710
import Control.Exception (AllocationLimitExceeded(..))
#endif
import Control.Monad
import qualified Control.Exception as Exception
import Control.Applicative hiding (Const)
import Control.DeepSeq
import GHC.Exts (IsString(..))
#if __GLASGOW_HASKELL__ < 706
import Prelude hiding (catch)
#endif
import Data.IORef
import Data.List
import Data.Monoid
import Data.Time
import qualified Data.HashMap.Strict as HashMap
import Text.Printf
import Text.PrettyPrint hiding ((<>))
import Control.Arrow (left)
#ifdef EVENTLOG
import Control.Exception (bracket_)
import Debug.Trace (traceEventIO)
#endif
-- -----------------------------------------------------------------------------
-- The environment
-- | The data we carry around in the Haxl monad.
data Env u = Env
{ cacheRef :: IORef (DataCache ResultVar) -- cached data fetches
, memoRef :: IORef (DataCache (MemoVar u)) -- memoized computations
, flags :: Flags
, userEnv :: u
, statsRef :: IORef Stats
, states :: StateStore
-- ^ Data sources and other components can store their state in
-- here. Items in this store must be instances of 'StateKey'.
}
type Caches u = (IORef (DataCache ResultVar), IORef (DataCache (MemoVar u)))
caches :: Env u -> Caches u
caches env = (cacheRef env, memoRef env)
-- | Initialize an environment with a 'StateStore', an input map, a
-- preexisting 'DataCache', and a seed for the random number generator.
initEnvWithData :: StateStore -> u -> Caches u -> IO (Env u)
initEnvWithData states e (cref, mref) = do
sref <- newIORef emptyStats
return Env
{ cacheRef = cref
, memoRef = mref
, flags = defaultFlags
, userEnv = e
, states = states
, statsRef = sref
}
-- | Initializes an environment with 'DataStates' and an input map.
initEnv :: StateStore -> u -> IO (Env u)
initEnv states e = do
cref <- newIORef DataCache.empty
mref <- newIORef DataCache.empty
initEnvWithData states e (cref,mref)
-- | A new, empty environment.
emptyEnv :: u -> IO (Env u)
emptyEnv = initEnv stateEmpty
-- -----------------------------------------------------------------------------
-- | The Haxl monad, which does several things:
--
-- * It is a reader monad for 'Env' and 'IORef' 'RequestStore', The
-- latter is the current batch of unsubmitted data fetch requests.
--
-- * It is a concurrency, or resumption, monad. A computation may run
-- partially and return 'Blocked', in which case the framework should
-- perform the outstanding requests in the 'RequestStore', and then
-- resume the computation.
--
-- * The Applicative combinator '<*>' explores /both/ branches in the
-- event that the left branch is 'Blocked', so that we can collect
-- multiple requests and submit them as a batch.
--
-- * It contains IO, so that we can perform real data fetching.
--
newtype GenHaxl u a = GenHaxl
{ unHaxl :: Env u -> IORef (RequestStore u) -> IO (Result u a) }
-- | The result of a computation is either 'Done' with a value, 'Throw'
-- with an exception, or 'Blocked' on the result of a data fetch with
-- a continuation.
data Result u a
= Done a
| Throw SomeException
| Blocked (GenHaxl u a)
instance (Show a) => Show (Result u a) where
show (Done a) = printf "Done(%s)" $ show a
show (Throw e) = printf "Throw(%s)" $ show e
show Blocked{} = "Blocked"
instance Monad (GenHaxl u) where
return a = GenHaxl $ \_env _ref -> return (Done a)
GenHaxl m >>= k = GenHaxl $ \env ref -> do
e <- m env ref
case e of
Done a -> unHaxl (k a) env ref
Throw e -> return (Throw e)
Blocked cont -> return (Blocked (cont >>= k))
instance Functor (GenHaxl u) where
fmap f m = pure f <*> m
instance Applicative (GenHaxl u) where
pure = return
GenHaxl f <*> GenHaxl a = GenHaxl $ \env ref -> do
r <- f env ref
case r of
Throw e -> return (Throw e)
Done f' -> do
ra <- a env ref
case ra of
Done a' -> return (Done (f' a'))
Throw e -> return (Throw e)
Blocked a' -> return (Blocked (f' <$> a'))
Blocked f' -> do
ra <- a env ref -- left is blocked, explore the right
case ra of
Done a' -> return (Blocked (f' <*> return a'))
Throw e -> return (Blocked (f' <*> throw e))
Blocked a' -> return (Blocked (f' <*> a'))
-- | Runs a 'Haxl' computation in an 'Env'.
runHaxl :: Env u -> GenHaxl u a -> IO a
#ifdef EVENTLOG
runHaxl env h = do
let go !n env (GenHaxl haxl) = do
traceEventIO "START computation"
ref <- newIORef noRequests
e <- haxl env ref
traceEventIO "STOP computation"
case e of
Done a -> return a
Throw e -> Exception.throw e
Blocked cont -> do
bs <- readIORef ref
writeIORef ref noRequests -- Note [RoundId]
traceEventIO "START performFetches"
n' <- performFetches n env bs
traceEventIO "STOP performFetches"
go n' env cont
traceEventIO "START runHaxl"
r <- go 0 env h
traceEventIO "STOP runHaxl"
return r
#else
runHaxl env (GenHaxl haxl) = do
ref <- newIORef noRequests
e <- haxl env ref
case e of
Done a -> return a
Throw e -> Exception.throw e
Blocked cont -> do
bs <- readIORef ref
writeIORef ref noRequests -- Note [RoundId]
void (performFetches 0 env bs)
runHaxl env cont
#endif
-- | Extracts data from the 'Env'.
env :: (Env u -> a) -> GenHaxl u a
env f = GenHaxl $ \env _ref -> return (Done (f env))
-- -----------------------------------------------------------------------------
-- Exceptions
-- | Throw an exception in the Haxl monad
throw :: (Exception e) => e -> GenHaxl u a
throw e = GenHaxl $ \_env _ref -> raise e
raise :: (Exception e) => e -> IO (Result u a)
raise = return . Throw . toException
-- | Catch an exception in the Haxl monad
catch :: Exception e => GenHaxl u a -> (e -> GenHaxl u a) -> GenHaxl u a
catch (GenHaxl m) h = GenHaxl $ \env ref -> do
r <- m env ref
case r of
Done a -> return (Done a)
Throw e | Just e' <- fromException e -> unHaxl (h e') env ref
| otherwise -> return (Throw e)
Blocked k -> return (Blocked (catch k h))
-- | Catch exceptions that satisfy a predicate
catchIf
:: Exception e => (e -> Bool) -> GenHaxl u a -> (e -> GenHaxl u a)
-> GenHaxl u a
catchIf cond haxl handler =
catch haxl $ \e -> if cond e then handler e else throw e
-- | Returns @'Left' e@ if the computation throws an exception @e@, or
-- @'Right' a@ if it returns a result @a@.
try :: Exception e => GenHaxl u a -> GenHaxl u (Either e a)
try haxl = (Right <$> haxl) `catch` (return . Left)
-- -----------------------------------------------------------------------------
-- Unsafe operations
-- | Under ordinary circumstances this is unnecessary; users of the Haxl
-- monad should generally /not/ perform arbitrary IO.
unsafeLiftIO :: IO a -> GenHaxl u a
unsafeLiftIO m = GenHaxl $ \_env _ref -> Done <$> m
-- | Convert exceptions in the underlying IO monad to exceptions in
-- the Haxl monad. This is morally unsafe, because you could then
-- catch those exceptions in Haxl and observe the underlying execution
-- order. Not to be exposed to user code.
unsafeToHaxlException :: GenHaxl u a -> GenHaxl u a
unsafeToHaxlException (GenHaxl m) = GenHaxl $ \env ref -> do
r <- m env ref `Exception.catch` \e -> return (Throw e)
case r of
Blocked c -> return (Blocked (unsafeToHaxlException c))
other -> return other
-- | Like 'try', but lifts all exceptions into the 'HaxlException'
-- hierarchy. Uses 'unsafeToHaxlException' internally. Typically
-- this is used at the top level of a Haxl computation, to ensure that
-- all exceptions are caught.
tryToHaxlException :: GenHaxl u a -> GenHaxl u (Either HaxlException a)
tryToHaxlException h = left asHaxlException <$> try (unsafeToHaxlException h)
-- -----------------------------------------------------------------------------
-- Data fetching and caching
-- | Possible responses when checking the cache.
data CacheResult a
-- | The request hadn't been seen until now.
= Uncached (ResultVar a)
-- | The request has been seen before, but its result has not yet been
-- fetched.
| CachedNotFetched (ResultVar a)
-- | The request has been seen before, and its result has already been
-- fetched.
| Cached (Either SomeException a)
-- | Checks the data cache for the result of a request.
cached :: (Request r a) => Env u -> r a -> IO (CacheResult a)
cached env req = do
cache <- readIORef (cacheRef env)
let
do_fetch = do
rvar <- newEmptyResult
writeIORef (cacheRef env) $! DataCache.insert req rvar cache
return (Uncached rvar)
case DataCache.lookup req cache of
Nothing -> do_fetch
Just rvar -> do
mb <- tryReadResult rvar
case mb of
Nothing -> return (CachedNotFetched rvar)
-- Use the cached result, even if it was an error.
Just r -> do
ifTrace (flags env) 3 $ putStrLn $ case r of
Left _ -> "Cached error: " ++ show req
Right _ -> "Cached request: " ++ show req
return (Cached r)
-- | Performs actual fetching of data for a 'Request' from a 'DataSource'.
dataFetch :: (DataSource u r, Request r a) => r a -> GenHaxl u a
dataFetch req = GenHaxl $ \env ref -> do
-- First, check the cache
res <- cached env req
case res of
-- Not seen before: add the request to the RequestStore, so it
-- will be fetched in the next round.
Uncached rvar -> do
modifyIORef' ref $ \bs -> addRequest (BlockedFetch req rvar) bs
return $ Blocked (continueFetch req rvar)
-- Seen before but not fetched yet. We're blocked, but we don't have
-- to add the request to the RequestStore.
CachedNotFetched rvar -> return
$ Blocked (continueFetch req rvar)
-- Cached: either a result, or an exception
Cached (Left ex) -> return (Throw ex)
Cached (Right a) -> return (Done a)
-- | A data request that is not cached. This is not what you want for
-- normal read requests, because then multiple identical requests may
-- return different results, and this invalidates some of the
-- properties that we expect Haxl computations to respect: that data
-- fetches can be aribtrarily reordered, and identical requests can be
-- commoned up, for example.
--
-- 'uncachedRequest' is useful for performing writes, provided those
-- are done in a safe way - that is, not mixed with reads that might
-- conflict in the same Haxl computation.
--
uncachedRequest :: (DataSource u r, Request r a) => r a -> GenHaxl u a
uncachedRequest req = GenHaxl $ \_env ref -> do
rvar <- newEmptyResult
modifyIORef' ref $ \bs -> addRequest (BlockedFetch req rvar) bs
return $ Blocked (continueFetch req rvar)
continueFetch
:: (DataSource u r, Request r a, Show a)
=> r a -> ResultVar a -> GenHaxl u a
continueFetch req rvar = GenHaxl $ \_env _ref -> do
m <- tryReadResult rvar
case m of
Nothing -> raise . DataSourceError $
textShow req <> " did not set contents of result var"
Just r -> done r
-- | Transparently provides caching. Useful for datasources that can
-- return immediately, but also caches values. Exceptions thrown by
-- the IO operation (except for asynchronous exceptions) are
-- propagated into the Haxl monad and can be caught by 'catch' and
-- 'try'.
cacheResult :: (Request r a) => r a -> IO a -> GenHaxl u a
cacheResult req val = GenHaxl $ \env _ref -> do
cachedResult <- cached env req
case cachedResult of
Uncached rvar -> do
result <- Exception.try val
putResult rvar result
case result of
Left e -> do rethrowAsyncExceptions e; done result
_other -> done result
Cached result -> done result
CachedNotFetched _ -> corruptCache
where
corruptCache = raise . DataSourceError $ Text.concat
[ textShow req
, " has a corrupted cache value: these requests are meant to"
, " return immediately without an intermediate value. Either"
, " the cache was updated incorrectly, or you're calling"
, " cacheResult on a query that involves a blocking fetch."
]
-- We must be careful about turning IO monad exceptions into Haxl
-- exceptions. An IO monad exception will normally propagate right
-- out of runHaxl and terminate the whole computation, whereas a Haxl
-- exception can get dropped on the floor, if it is on the right of
-- <*> and the left side also throws, for example. So turning an IO
-- monad exception into a Haxl exception is a dangerous thing to do.
-- In particular, we never want to do it for an asynchronous exception
-- (AllocationLimitExceeded, ThreadKilled, etc.), because these are
-- supposed to unconditionally terminate the computation.
--
-- There are three places where we take an arbitrary IO monad exception and
-- turn it into a Haxl exception:
--
-- * wrapFetchInCatch. Here we want to propagate a failure of the
-- data source to the callers of the data source, but if the
-- failure came from elsewhere (an asynchronous exception), then we
-- should just propagate it
--
-- * cacheResult (cache the results of IO operations): again,
-- failures of the IO operation should be visible to the caller as
-- a Haxl exception, but we exclude asynchronous exceptions from
-- this.
-- * unsafeToHaxlException: assume the caller knows what they're
-- doing, and just wrap all exceptions.
--
rethrowAsyncExceptions :: SomeException -> IO ()
rethrowAsyncExceptions e
#if __GLASGOW_HASKELL__ >= 708
| Just SomeAsyncException{} <- fromException e = Exception.throw e
#endif
#if __GLASGOW_HASKELL__ >= 710
| Just AllocationLimitExceeded{} <- fromException e = Exception.throw e
-- AllocationLimitExceeded is not a child of SomeAsyncException,
-- but it should be.
#endif
| otherwise = return ()
-- | Inserts a request/result pair into the cache. Throws an exception
-- if the request has already been issued, either via 'dataFetch' or
-- 'cacheRequest'.
--
-- This can be used to pre-populate the cache when running tests, to
-- avoid going to the actual data source and ensure that results are
-- deterministic.
--
cacheRequest
:: (Request req a) => req a -> Either SomeException a -> GenHaxl u ()
cacheRequest request result = GenHaxl $ \env _ref -> do
res <- cached env request
case res of
Uncached rvar -> do
-- request was not in the cache: insert the result and continue
putResult rvar result
return $ Done ()
-- It is an error if the request is already in the cache. We can't test
-- whether the cached result is the same without adding an Eq constraint,
-- and we don't necessarily have Eq for all results.
_other -> raise $
DataSourceError "cacheRequest: request is already in the cache"
instance IsString a => IsString (GenHaxl u a) where
fromString s = return (fromString s)
-- | Issues a batch of fetches in a 'RequestStore'. After
-- 'performFetches', all the requests in the 'RequestStore' are
-- complete, and all of the 'ResultVar's are full.
performFetches :: forall u. Int -> Env u -> RequestStore u -> IO Int
performFetches n env reqs = do
let f = flags env
sref = statsRef env
jobs = contents reqs
!n' = n + length jobs
t0 <- getCurrentTime
let
roundstats =
[ (dataSourceName (getReq reqs), length reqs)
| BlockedFetches reqs <- jobs ]
where
getReq :: [BlockedFetch r] -> r a
getReq = undefined
ifTrace f 1 $
printf "Batch data fetch (%s)\n" $
intercalate (", "::String) $
map (\(name,num) -> printf "%d %s" num (Text.unpack name)) roundstats
ifTrace f 3 $
forM_ jobs $ \(BlockedFetches reqs) ->
forM_ reqs $ \(BlockedFetch r _) -> putStrLn (show1 r)
let
applyFetch (i, BlockedFetches (reqs :: [BlockedFetch r])) =
case stateGet (states env) of
Nothing ->
return (SyncFetch (mapM_ (setError (const e)) reqs))
where req :: r a; req = undefined
e = DataSourceError $
"data source not initialized: " <> dataSourceName req
Just state ->
return $ wrapFetchInTrace i (length reqs)
(dataSourceName (undefined :: r a))
$ wrapFetchInCatch reqs
$ fetch state f (userEnv env) reqs
fetches <- mapM applyFetch $ zip [n..] jobs
times <-
if report f >= 2
then do
(refs, timedfetches) <- mapAndUnzipM wrapFetchInTimer fetches
scheduleFetches timedfetches
mapM (fmap Just . readIORef) refs
else do
scheduleFetches fetches
return $ repeat Nothing
let dsroundstats = HashMap.fromList
[ (name, DataSourceRoundStats { dataSourceFetches = fetches
, dataSourceTime = time
})
| ((name, fetches), time) <- zip roundstats times]
t1 <- getCurrentTime
let roundtime = realToFrac (diffUTCTime t1 t0) :: Double
ifReport f 1 $
modifyIORef' sref $ \(Stats rounds) -> roundstats `deepseq`
Stats (RoundStats (microsecs roundtime) dsroundstats: rounds)
ifTrace f 1 $
printf "Batch data fetch done (%.2fs)\n" (realToFrac roundtime :: Double)
return n'
-- Catch exceptions arising from the data source and stuff them into
-- the appropriate requests. We don't want any exceptions propagating
-- directly from the data sources, because we want the exception to be
-- thrown by dataFetch instead.
--
wrapFetchInCatch :: [BlockedFetch req] -> PerformFetch -> PerformFetch
wrapFetchInCatch reqs fetch =
case fetch of
SyncFetch io ->
SyncFetch (io `Exception.catch` handler)
AsyncFetch fio ->
AsyncFetch (\io -> fio io `Exception.catch` handler)
where
handler :: SomeException -> IO ()
handler e = do
rethrowAsyncExceptions e
mapM_ (forceError e) reqs
-- Set the exception even if the request already had a result.
-- Otherwise we could be discarding an exception.
forceError e (BlockedFetch _ rvar) = do
void $ tryTakeResult rvar
putResult rvar (except e)
wrapFetchInTimer :: PerformFetch -> IO (IORef Microseconds, PerformFetch)
wrapFetchInTimer f = do
r <- newIORef 0
case f of
SyncFetch io -> return (r, SyncFetch (time io >>= writeIORef r))
AsyncFetch f -> do
inner_r <- newIORef 0
return (r, AsyncFetch $ \inner -> do
total <- time (f (time inner >>= writeIORef inner_r))
inner_t <- readIORef inner_r
writeIORef r (total - inner_t))
wrapFetchInTrace :: Int -> Int -> Text.Text -> PerformFetch -> PerformFetch
#ifdef EVENTLOG
wrapFetchInTrace i n dsName f =
case f of
SyncFetch io -> SyncFetch (wrapF "Sync" io)
AsyncFetch fio -> AsyncFetch (wrapF "Async" . fio . unwrapF "Async")
where
d = Text.unpack dsName
wrapF :: String -> IO a -> IO a
wrapF ty = bracket_ (traceEventIO $ printf "START %d %s (%d %s)" i d n ty)
(traceEventIO $ printf "STOP %d %s (%d %s)" i d n ty)
unwrapF :: String -> IO a -> IO a
unwrapF ty = bracket_ (traceEventIO $ printf "STOP %d %s (%d %s)" i d n ty)
(traceEventIO $ printf "START %d %s (%d %s)" i d n ty)
#else
wrapFetchInTrace _ _ _ f = f
#endif
time :: IO () -> IO Microseconds
time io = do
t0 <- getCurrentTime
io
t1 <- getCurrentTime
return . microsecs . realToFrac $ t1 `diffUTCTime` t0
microsecs :: Double -> Microseconds
microsecs t = round (t * 10^(6::Int))
-- | Start all the async fetches first, then perform the sync fetches before
-- getting the results of the async fetches.
scheduleFetches :: [PerformFetch] -> IO()
scheduleFetches fetches = async_fetches sync_fetches
where
async_fetches :: IO () -> IO ()
async_fetches = compose [f | AsyncFetch f <- fetches]
sync_fetches :: IO ()
sync_fetches = sequence_ [io | SyncFetch io <- fetches]
-- -----------------------------------------------------------------------------
-- Memoization
-- | A variable in the cache representing the state of a memoized computation
newtype MemoVar u a = MemoVar (IORef (MemoStatus u a))
-- | The state of a memoized computation
data MemoStatus u a
= MemoInProgress (RoundId u) (GenHaxl u a)
-- ^ Under evaluation in the given round, here is the latest
-- continuation. The continuation might be a little out of
-- date, but that's fine, the worst that can happen is we do a
-- little extra work.
| MemoDone (Either SomeException a)
-- fully evaluated, here is the result.
type RoundId u = IORef (RequestStore u)
{-
Note [RoundId]
A token representing the round. This needs to be unique per round,
and it needs to support Eq. Fortunately the IORef RequestStore is
exactly what we need: IORef supports Eq, and we make a new one for
each round. There's a danger that storing this in the DataCache could
cause a space leak, so we stub out the contents after each round (see
runHaxl).
-}
-- | 'cachedComputation' memoizes a Haxl computation. The key is a
-- request.
--
-- /Note:/ These cached computations will /not/ be included in the output
-- of 'dumpCacheAsHaskell'.
--
cachedComputation
:: forall req u a. (Request req a)
=> req a -> GenHaxl u a -> GenHaxl u a
cachedComputation req haxl = GenHaxl $ \env ref -> do
cache <- readIORef (memoRef env)
case DataCache.lookup req cache of
Nothing -> do
memovar <- newIORef (MemoInProgress ref haxl)
writeIORef (memoRef env) $! DataCache.insert req (MemoVar memovar) cache
run memovar haxl env ref
Just (MemoVar memovar) -> do
status <- readIORef memovar
case status of
MemoDone r -> done r
MemoInProgress round cont
| round == ref -> return (Blocked (retryMemo req))
| otherwise -> run memovar cont env ref
-- was blocked in a previous round; run the saved continuation to
-- make more progress.
where
-- If we got blocked on this memo in the current round, this is the
-- continuation: just try to evaluate the memo again. We know it is
-- already in the cache (because we just checked), so the computation
-- will never be used.
retryMemo req =
cachedComputation req (throw (CriticalError "retryMemo"))
-- Run the memoized computation and store the result (complete or
-- partial) back in the MemoVar afterwards.
--
-- We don't attempt to catch IO monad exceptions here. That may seem
-- dangerous, because if an IO exception is raised we'll leave the
-- MemoInProgress in the MemoVar. But we always want to just
-- propagate an IO monad exception (it should kill the whole runHaxl,
-- unless there's a unsafeToHaxlException), so we should never be
-- looking at the MemoVar again anyway. Furthermore, storing the
-- exception in the MemoVar is wrong, because that will turn it into
-- a Haxl exception (see rethrowAsyncExceptions).
run memovar cont env ref = do
e <- unHaxl cont env ref
case e of
Done a -> complete memovar (Right a)
Throw e -> complete memovar (Left e)
Blocked cont -> do
writeIORef memovar (MemoInProgress ref cont)
return (Blocked (retryMemo req))
-- We're finished: store the final result
complete memovar r = do
writeIORef memovar (MemoDone r)
done r
-- | Lifts an 'Either' into either 'Throw' or 'Done'.
done :: Either SomeException a -> IO (Result u a)
done = return . either Throw Done
-- -----------------------------------------------------------------------------
-- | Dump the contents of the cache as Haskell code that, when
-- compiled and run, will recreate the same cache contents. For
-- example, the generated code looks something like this:
--
-- > loadCache :: GenHaxl u ()
-- > loadCache = do
-- > cacheRequest (ListWombats 3) (Right ([1,2,3]))
-- > cacheRequest (CountAardvarks "abcabc") (Right (2))
--
dumpCacheAsHaskell :: GenHaxl u String
dumpCacheAsHaskell = do
ref <- env cacheRef -- NB. cacheRef, not memoRef. We ignore memoized
-- results when dumping the cache.
entries <- unsafeLiftIO $ readIORef ref >>= showCache
let
mk_cr (req, res) =
text "cacheRequest" <+> parens (text req) <+> parens (result res)
result (Left e) = text "except" <+> parens (text (show e))
result (Right s) = text "Right" <+> parens (text s)
return $ show $
text "loadCache :: GenHaxl u ()" $$
text "loadCache = do" $$
nest 2 (vcat (map mk_cr (concatMap snd entries))) $$
text "" -- final newline
| iblumenfeld/Haxl | Haxl/Core/Monad.hs | bsd-3-clause | 26,121 | 0 | 24 | 6,164 | 5,906 | 2,996 | 2,910 | 378 | 5 |
{-# LANGUAGE TemplateHaskell #-}
-- Trac #2632
module MkData where
import Language.Haskell.TH
op :: Num v => v -> v -> v
op a b = a + b
decl1 = [d| func = 0 `op` 3 |]
decl2 = [d| op x y = x
func = 0 `op` 3 |]
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/th/T2632.hs | bsd-3-clause | 228 | 0 | 7 | 71 | 64 | 40 | 24 | 8 | 1 |
import System.IO hiding (writeFile)
import System.Environment
import Prelude hiding (writeFile, concat)
import Data.ByteString.Lazy (writeFile, concat)
import Data.Binary (encode)
import Control.Monad (when)
import System.Exit (exitFailure)
import System.FilePath ((</>))
import Language.RuScript.Serialize
import Language.RuScript.Codegen
import Language.RuScript.Parser
import Language.RuScript.StaticCheck
import Language.RuScript.Option
import Language.RuScript.Optimize
import Language.RuScript.Import
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
args <- getArgs
if (length args > 1) then do
let opt = parseOpt $ drop 2 args
let includeDir = _includeOpt opt
txt <- readFile (includeDir </> head args)
let target = includeDir </> (args !! 1)
case parseProgram txt of
Left err -> exitError $ "Error in parsing: " ++ show err
Right program -> do
program' <- resolveDeps includeDir program
when (_debugOpt opt) $ print program'
case checkProgram program' of
Left err -> exitError $ "Error in checking: " ++ err
Right _ -> do
let bytecode = if (_optimizeOpt opt)
then optimize $ runCodegen program'
else runCodegen program'
when (_debugOpt opt) $ printCode bytecode
writeFile target (concat $ map (encode . serialize) bytecode)
else putStrLn "usage: rusc <source> <target>"
where
exitError s = hPutStrLn stderr s >> exitFailure
| izgzhen/RuScript | rusc/src/Main.hs | mit | 1,680 | 0 | 25 | 523 | 455 | 231 | 224 | 39 | 5 |
-- To turn a file list like: 01.first 02.second 03.third into, say: 25.first 26.second 27.third
import System.Environment
import Data.List
import Text.Printf
import System.Posix.Files
import System.Directory
prependNewTag :: FilePath -> FilePath -> FilePath
prependNewTag orig newNum = newNum ++ ( dropWhile (/='.') orig )
myIntToTag :: Int -> FilePath
myIntToTag i = printf "%.4d" i
renameFiles :: [FilePath] -> Int -> [IO ()]
renameFiles [] startNum = []
renameFiles (x:xs) startNum = ( rename x ( prependNewTag x $ myIntToTag startNum ) ) : renameFiles xs (startNum + 1)
doFiles :: Int -> IO ()
doFiles startNum = do
files <- fmap sort $ getDirectoryContents "."
sequence_ $ renameFiles ( dropWhile ( (=='.') . head ) files ) startNum
main = do
args <- getArgs
let num = (read::String -> Int) $ head args
doFiles num
| ChristopherMorton/Renumber.hs | renumber.hs | mit | 848 | 0 | 13 | 163 | 295 | 151 | 144 | 20 | 1 |
import XMonad
import System.Taffybar.Support.PagerHints (pagerHints)
import XMonad.Hooks.EwmhDesktops (ewmh)
import XMonad.Hooks.ManageDocks
import XMonad.Util.EZConfig
myKeys = [ ("<XF86AudioMute>" , spawn "amixer -c 0 set Master toggle")
, ("<XF86AudioLowerVolume>", spawn "amixer -c 0 set Master 5%-")
, ("<XF86AudioRaiseVolume>", spawn "amixer -c 0 set Master 5%+")
, ("<XF86AudioMicMute>" , spawn "amixer -c 0 set Capture toggle")
]
main = xmonad $
docks $
ewmh $
pagerHints $
def
{ borderWidth = 4
, modMask = mod4Mask
, terminal = "termite"
, layoutHook = avoidStruts $ layoutHook defaultConfig
, manageHook = manageHook defaultConfig <+> manageDocks
}
`additionalKeysP` myKeys
| rossabaker/dotfiles | xmonad/xmonad.hs | mit | 775 | 0 | 10 | 183 | 165 | 98 | 67 | 20 | 1 |
module Data.LTE
(
) where
| mckeankylej/fast-nats | src/Data/LTE.hs | mit | 30 | 0 | 3 | 9 | 9 | 6 | 3 | 2 | 0 |
module Lesson2.BFS.Enumerator where
import Control.Monad.Trans (MonadIO, liftIO)
import Data.Ord (Ord(..), comparing)
import qualified Data.Set as Set
--------------------
import Data.Enumerator (
Stream(..)
, Step(..)
, Iteratee(..)
, Enumerator
, (>>==)
, returnI
, continue
, yield
)
import Data.Hashable (Hashable(..))
--------------------
import Navigation.Enumerator
import Lesson2.Types
import Lesson2.BFS.Types
-------------------------------------------------------------------------------
enumBFS :: (MonadIO m, Hashable a)
=> Node a
-> BFSGraph a
-> Enumerator (NavEvent (Node a)) m b
enumBFS source0 g =
enumNavigation (\_ _ -> return 1)
(return . (`getNodeNeighbours` g))
source0
| roman/ai-class-haskell | src/Lesson2/BFS/Enumerator.hs | mit | 788 | 0 | 12 | 176 | 220 | 134 | 86 | 25 | 1 |
{-# LANGUAGE NoImplicitPrelude, FlexibleInstances, OverloadedStrings #-}
module IHaskell.Eval.Hoogle (
search,
document,
render,
OutputFormat(..),
HoogleResult,
) where
import IHaskellPrelude
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.ByteString.Char8 as CBS
import Network.HTTP.Client
import Network.HTTP.Client.TLS
import Data.Aeson
import qualified Data.List as List
import Data.Char (isAscii, isAlphaNum)
import IHaskell.IPython
import StringUtils (split, strip, replace)
-- | Types of formats to render output to.
data OutputFormat = Plain -- ^ Render to plain text.
| HTML -- ^ Render to HTML.
data HoogleResponse = HoogleResponse { location :: String, self :: String, docs :: String }
deriving (Eq, Show)
data HoogleResult = SearchResult HoogleResponse
| DocResult HoogleResponse
| NoResult String
deriving Show
data HoogleResponseList = HoogleResponseList [HoogleResponse]
instance FromJSON HoogleResponseList where
parseJSON (Object obj) = do
results <- obj .: "results"
HoogleResponseList <$> mapM parseJSON results
parseJSON _ = fail "Expected object with 'results' field."
instance FromJSON HoogleResponse where
parseJSON (Object obj) =
HoogleResponse <$> obj .: "location" <*> obj .: "self" <*> obj .: "docs"
parseJSON _ = fail "Expected object with fields: location, self, docs"
-- | Query Hoogle for the given string. This searches Hoogle using the internet. It returns either
-- an error message or the successful JSON result.
query :: String -> IO (Either String String)
query str = do
request <- parseUrlThrow $ queryUrl $ urlEncode str
mgr <- newManager tlsManagerSettings
catch
(Right . CBS.unpack . LBS.toStrict . responseBody <$> httpLbs request mgr)
(\e -> return $ Left $ show (e :: SomeException))
where
queryUrl :: String -> String
queryUrl = printf "https://www.haskell.org/hoogle/?hoogle=%s&mode=json"
-- | Copied from the HTTP package.
urlEncode :: String -> String
urlEncode [] = []
urlEncode (ch:t)
| (isAscii ch && isAlphaNum ch) || ch `elem` ("-_.~" :: String) = ch : urlEncode t
| not (isAscii ch) = foldr escape (urlEncode t) (eightBs [] (fromEnum ch))
| otherwise = escape (fromEnum ch) (urlEncode t)
where
escape :: Int -> String -> String
escape b rs = '%' : showH (b `div` 16) (showH (b `mod` 16) rs)
showH :: Int -> String -> String
showH x xs
| x <= 9 = toEnum (o_0 + x) : xs
| otherwise = toEnum (o_A + (x - 10)) : xs
where
o_0 = fromEnum '0'
o_A = fromEnum 'A'
eightBs :: [Int] -> Int -> [Int]
eightBs acc x
| x <= 255 = x : acc
| otherwise = eightBs ((x `mod` 256) : acc) (x `div` 256)
-- | Search for a query on Hoogle. Return all search results.
search :: String -> IO [HoogleResult]
search string = do
response <- query string
return $
case response of
Left err -> [NoResult err]
Right json ->
case eitherDecode $ LBS.fromStrict $ CBS.pack json of
Left err -> [NoResult err]
Right results ->
case map SearchResult $ (\(HoogleResponseList l) -> l) results of
[] -> [NoResult "no matching identifiers found."]
res -> res
-- | Look up an identifier on Hoogle. Return documentation for that identifier. If there are many
-- identifiers, include documentation for all of them.
document :: String -> IO [HoogleResult]
document string = do
matchingResults <- filter matches <$> search string
let results = map toDocResult matchingResults
return $
case results of
[] -> [NoResult "no matching identifiers found."]
res -> res
where
matches (SearchResult resp) =
case split " " $ self resp of
name:_ -> strip string == strip name
_ -> False
matches _ = False
toDocResult (SearchResult resp) = DocResult resp
-- | Render a Hoogle search result into an output format.
render :: OutputFormat -> HoogleResult -> String
render Plain = renderPlain
render HTML = renderHtml
-- | Render a Hoogle result to plain text.
renderPlain :: HoogleResult -> String
renderPlain (NoResult res) =
"No response available: " ++ res
renderPlain (SearchResult resp) =
printf "%s\nURL: %s\n%s" (self resp) (location resp) (docs resp)
renderPlain (DocResult resp) =
printf "%s\nURL: %s\n%s" (self resp) (location resp) (docs resp)
-- | Render a Hoogle result to HTML.
renderHtml :: HoogleResult -> String
renderHtml (NoResult resp) =
printf "<span class='err-msg'>No result: %s</span>" resp
renderHtml (DocResult resp) =
renderSelf (self resp) (location resp)
++
renderDocs (docs resp)
renderHtml (SearchResult resp) =
renderSelf (self resp) (location resp)
++
renderDocs (docs resp)
renderSelf :: String -> String -> String
renderSelf string loc
| "package" `isPrefixOf` string =
pkg ++ " " ++ span "hoogle-package" (link loc $ extractPackage string)
| "module" `isPrefixOf` string =
let package = extractPackageName loc
in mod ++ " " ++
span "hoogle-module" (link loc $ extractModule string) ++
packageSub package
| "class" `isPrefixOf` string =
let package = extractPackageName loc
in cls ++ " " ++
span "hoogle-class" (link loc $ extractClass string) ++
packageSub package
| "data" `isPrefixOf` string =
let package = extractPackageName loc
in dat ++ " " ++
span "hoogle-class" (link loc $ extractData string) ++
packageSub package
| otherwise =
let [name, args] = split "::" string
package = extractPackageName loc
modname = extractModuleName loc
in span "hoogle-name"
(unicodeReplace $
link loc (strip name) ++
" :: " ++
strip args)
++ packageAndModuleSub package modname
where
extractPackage = strip . replace "package" ""
extractModule = strip . replace "module" ""
extractClass = strip . replace "class" ""
extractData = strip . replace "data" ""
pkg = span "hoogle-head" "package"
mod = span "hoogle-head" "module"
cls = span "hoogle-head" "class"
dat = span "hoogle-head" "data"
unicodeReplace :: String -> String
unicodeReplace =
replace "forall" "∀" .
replace "=>" "⇒" .
replace "->" "→" .
replace "::" "∷"
packageSub Nothing = ""
packageSub (Just package) =
span "hoogle-sub" $
"(" ++ pkg ++ " " ++ span "hoogle-package" package ++ ")"
packageAndModuleSub Nothing _ = ""
packageAndModuleSub (Just package) Nothing = packageSub (Just package)
packageAndModuleSub (Just package) (Just modname) =
span "hoogle-sub" $
"(" ++ pkg ++ " " ++ span "hoogle-package" package ++
", " ++ mod ++ " " ++ span "hoogle-module" modname ++ ")"
renderDocs :: String -> String
renderDocs doc =
let groups = List.groupBy bothAreCode $ lines doc
nonull = filter (not . null . strip)
bothAreCode s1 s2 =
isPrefixOf ">" (strip s1) &&
isPrefixOf ">" (strip s2)
isCode (s:_) = isPrefixOf ">" $ strip s
makeBlock lines =
if isCode lines
then div' "hoogle-code" $ unlines $ nonull lines
else div' "hoogle-text" $ unlines $ nonull lines
in div' "hoogle-doc" $ unlines $ map makeBlock groups
extractPackageName :: String -> Maybe String
extractPackageName link = do
let pieces = split "/" link
archiveLoc <- List.elemIndex "archive" pieces
latestLoc <- List.elemIndex "latest" pieces
guard $ latestLoc - archiveLoc == 2
return $ pieces List.!! (latestLoc - 1)
extractModuleName :: String -> Maybe String
extractModuleName link = do
let pieces = split "/" link
guard $ not $ null pieces
let html = fromJust $ lastMay pieces
mod = replace "-" "." $ takeWhile (/= '.') html
return mod
div' :: String -> String -> String
div' = printf "<div class='%s'>%s</div>"
span :: String -> String -> String
span = printf "<span class='%s'>%s</span>"
link :: String -> String -> String
link = printf "<a target='_blank' href='%s'>%s</a>"
| sumitsahrawat/IHaskell | src/IHaskell/Eval/Hoogle.hs | mit | 8,550 | 0 | 20 | 2,263 | 2,518 | 1,267 | 1,251 | 199 | 4 |
module Error where
import LispData
import Control.Monad.Except
import Text.ParserCombinators.Parsec (ParseError)
data LispError = NumArgs Integer [LispVal]
| TypeMismatch String LispVal
| Parser ParseError
| BadSpecialForm String LispVal
| NotFunction String String
| UnboundVar String String
| Default String
showError :: LispError -> String
showError (UnboundVar message varname) = message ++ ": " ++ varname
showError (BadSpecialForm message form) = message ++ ": " ++ show form
showError (NotFunction message func) = message ++ ": " ++ show func
showError (NumArgs expected found) = "Expected " ++ show expected ++ " args; found values " ++ unwordsList found
showError (TypeMismatch expected found) = "Invalid type: expected " ++ expected ++ ", found " ++ show found
showError (Parser parseErr) = "Parse error at " ++ show parseErr
instance Show LispError where show = showError
type ThrowsError = Either LispError
trapError :: (Either LispError String) -> Either LispError String
trapError action = catchError action (return . show)
extractValue :: ThrowsError a -> a
extractValue (Right val) = val
| evinstk/scheme-by-haskell | Error.hs | mit | 1,222 | 0 | 8 | 284 | 339 | 176 | 163 | 24 | 1 |
-- Game State
module State(State(..), initialGameState, resetGameState) where
import Playfield
import Piece
import System.Random
data State = State
{ well :: Well
, time :: Float
, deltaTime :: Float
, secondsToNextMove :: Float
, piece :: Piece
, piecePos :: (Int, Int)
, randomSeed :: StdGen
, score :: Int
, accelerate :: Bool
} deriving (Show)
initialGameState :: State
initialGameState = State
{ well = emptyWell
, time = 0
, deltaTime = 0
, secondsToNextMove = 0
, piece = tetrominoO
, piecePos = (0, 0)
, randomSeed = mkStdGen 0 -- found better way!
, score = 0
, accelerate = False
}
-- Resets a game state, maintaining the random seed
resetGameState :: State -> State
resetGameState s = initialGameState {randomSeed = (randomSeed s)} | mgeorgoulopoulos/TetrisHaskellWeekend | State.hs | mit | 866 | 0 | 9 | 252 | 215 | 137 | 78 | 28 | 1 |
module Main where
import Git
main :: IO ()
main = do
putStrLn <- currentBranch
putStrLn <- remoteUrl
return ()
| roberto/octo-reviews | src/Main.hs | mit | 119 | 0 | 8 | 28 | 44 | 22 | 22 | 7 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module JoScript.Data.Error where
import Protolude hiding (Location)
import Data.Aeson ((.=))
import qualified Data.Aeson.Types as A
import JoScript.Data.Position (Position)
import JoScript.Util.Json (withObject)
import JoScript.Data.Lexer (LpRepr(..), LpReprKind(..))
import qualified JoScript.Data.Block as Bp
import qualified JoScript.Data.Lexer as Lp
data Error = Error Repr Location
deriving (Eq, Show)
data Location = Known Position
deriving (Eq, Show)
data Label = Label { position :: Position, description :: Text }
deriving (Show, Eq)
data Repr
= IndentError IndentErrorT
| LexerError LexerErrorT
| ParseError ParseErrorT [Label]
deriving (Eq, Show)
data IndentErrorT
= ShallowDedent
deriving (Eq, Show)
data LexerErrorT
= LUnexpectedEnd
| LUnexpectedToken Bp.BpRepr
| LUnexpectedCharacter Char
| LUnknownTokenStart Char
| LInvalidIntSuffix Char
| LDuplicateDecimial
deriving (Eq, Show)
data ParseErrorT
= PUnexpectedEnd
| PImpossible
| PIncompleteAlt
| PExpectedToken LpReprKind LpRepr
| PUnexpectedToken LpRepr
deriving (Eq, Show)
--------------------------------------------------------------
-- exports --
--------------------------------------------------------------
known :: Repr -> Position -> Error
known k p = Error k (Known p)
reprDescription :: Repr -> Text
reprDescription (IndentError _) = "text:block"
reprDescription (LexerError _) = "text:lexer"
reprDescription (ParseError _ _) = "text:parse"
lexerErrMsg :: LexerErrorT -> Text
lexerErrMsg LUnexpectedEnd = "unexpected lexer ended"
lexerErrMsg (LUnexpectedToken _) = "unexpected block token"
lexerErrMsg (LUnexpectedCharacter _) = "unexpected character"
lexerErrMsg (LUnknownTokenStart _) = "unexpected character"
lexerErrMsg (LInvalidIntSuffix _) = "integer was suffixed with invalid character"
lexerErrMsg LDuplicateDecimial = "duplicated decimal place in float"
parseErrMsg :: ParseErrorT -> Text
parseErrMsg PUnexpectedEnd = "unexpected parse end during parse"
parseErrMsg PIncompleteAlt = "implementation error"
parseErrMsg PUnexpectedToken{} = "encounted unexpected token"
parseErrMsg PExpectedToken{} = "Expected different token"
parseErrMsg PImpossible = "Impossible error"
{- Determines which error is most recently occuring in a file -}
newestError :: Error -> Error -> Error
newestError a@(Error _ (Known al)) b@(Error _ (Known bl))
| al > bl = a
| bl > al = b
| otherwise = a
--------------------------------------------------------------
-- instances --
--------------------------------------------------------------
instance A.ToJSON Error where
toJSON (Error repr loc) = A.object ["location" .= loc, "repr" .= repr]
instance A.ToJSON Location where
toJSON (Known p) = withObject ["type" .= knownS] (A.toJSON p) where
knownS = "known" :: Text
instance A.ToJSON Label where
toJSON Label{..} = A.object ["description" .= description, "position" .= position]
instance A.ToJSON Repr where
toJSON t = withObject ["level" .= reprDescription t] (withType t) where
withType (IndentError err) = A.toJSON err
withType (LexerError err) = A.toJSON err
withType (ParseError err lbs) = withObject ["path" .= lbs] (A.toJSON err)
instance A.ToJSON IndentErrorT where
toJSON ShallowDedent = A.object ["message" .= ("dedent depth is too shallow" :: Text)]
instance A.ToJSON LexerErrorT where
toJSON t = withObject ["message" .= lexerErrMsg t] (withType t) where
withType (LUnexpectedToken token) = A.object ["token" .= token]
withType (LUnknownTokenStart c) = A.object ["character" .= c]
withType (LUnexpectedCharacter c) = A.object ["character" .= c]
withType (LInvalidIntSuffix c) = A.object ["character" .= c]
withType _ = A.object []
instance A.ToJSON ParseErrorT where
toJSON t = withObject ["message" .= parseErrMsg t] (withType t) where
withType (PUnexpectedToken t) = A.object ["read" .= t]
withType (PExpectedToken e t) = A.object ["read" .= t, "expected-type" .= e]
withType ____________________ = A.object []
| AKST/jo | source/lib/JoScript/Data/Error.hs | mit | 4,220 | 0 | 11 | 795 | 1,197 | 638 | 559 | -1 | -1 |
data ArrowOpts n
= ArrowOpts
{ _arrowHead :: ArrowHT n
, _arrowTail :: ArrowHT n
, _arrowShaft :: Trail V2 n
, _headGap :: Measure n
, _tailGap :: Measure n
, _headStyle :: Style V2 n
, _headLength :: Measure n
, _tailStyle :: Style V2 n
, _tailLength :: Measure n
, _shaftStyle :: Style V2 n
}
| jeffreyrosenbluth/NYC-meetup | meetup/ArrowOpts.hs | mit | 351 | 0 | 9 | 116 | 109 | 60 | 49 | 12 | 0 |
import Probs1to10
import Probs11to20
import Probs21to30
import Probs31to40
main :: IO ()
main = do
putStrLn $ show $ mlast "RandPer"
putStrLn $ show $ lastbutone "RandPer"
putStrLn $ show $ kth "RandPer" 0
putStrLn $ show $ mlen "RandPer"
putStrLn $ show $ rev "RandPer"
putStrLn $ show $ frev "RandPer"
putStrLn $ show $ ispalin "RandPer"
putStrLn $ show $ flatlist ((List [Elem 1, List [Elem 2, List [Elem 3, Elem 4], Elem 5]]) :: NestedList Int)
putStrLn $ show $ flatten ((List [Elem 1, List [Elem 2, List [Elem 3, Elem 4], Elem 5]]) :: NestedList Int)
putStrLn $ show $ remdups "aaaabccaadeeee"
putStrLn $ show $ remdups' "aaaabccaadeeee"
putStrLn $ show $ pack "aaaabccaadeeee"
putStrLn $ show $ encode $ pack "aaaabccaadeeee"
putStrLn $ show $ encodemodified "aaaabccaadeeee"
putStrLn $ show $ decodemodified [Multiple 4 'a',Single 'b',Multiple 2 'c', Multiple 2 'a',Single 'd',Multiple 4 'e']
putStrLn $ show $ insertat "chandra" 'M' 3
| ajjaic/Problems99 | src/Main.hs | mit | 1,011 | 0 | 17 | 224 | 420 | 197 | 223 | 22 | 1 |
module Utils where
import Data.Aeson
import qualified Data.ByteString.Lazy as B
import Control.Applicative
loadJSON :: (FromJSON a) => FilePath -> IO a
loadJSON a = do
x <- B.readFile a >>= return . decode
case x of
Nothing -> error $ "Cannot load from " ++ a
Just y -> return y
writeJSON :: (ToJSON a) => FilePath -> a -> IO ()
writeJSON f = B.writeFile f . encode
| edwardwas/haskcal | src/Utils.hs | mit | 375 | 0 | 11 | 78 | 149 | 77 | 72 | 12 | 2 |
{-Generation of pink noise roughly based on the algorithm from here:
http://www.firstpr.com.au/dsp/pink-noise/phil_burk_19990905_patest_pink.c
From here:
http://www.firstpr.com.au/dsp/pink-noise/
-}
module PinkVMBurk (genPink,initialPinkNoise) where
import WhiteNoise (whiteRandom)
import Data.Bits
trailingZeros:: Int -> Int
trailingZeros n
| n == 0 = 0
| n .&. 1 /= 0 = 0
| otherwise = 1 + trailingZeros (n `shiftR` 1)
numOfPinkRows::Int
numOfPinkRows = 30
indexMask::Integer
indexMask = (1 `shiftL` numOfPinkRows) - 1
initialPinkNoise :: PinkNoise
initialPinkNoise = PinkNoise initRows 0 0 0
initRows::[Double]
initRows = replicate numOfPinkRows 0.0
data PinkNoise = PinkNoise {
rows::[Double]
, runningSum::Double
, index::Int
, rand2::Double
} deriving (Show,Eq)
data RandomValue = RandomValue { rval1::Double
, rval2::Double
} deriving (Show,Eq)
randomValues :: Int -> Int -> [RandomValue]
randomValues rate dur = map buildVal (zip3 [0..(rate * dur)] (whiteRandom 100) (whiteRandom 200))
where buildVal ( _ ,val1',val2') = RandomValue (val1' * 0.1) (val2' * 0.1)
getPink::PinkNoise -> Int-> Int -> [PinkNoise]
getPink pn rate dur = scanl nextpValue pn (randomValues rate dur)
-- generate list of pink floating values
genPink :: PinkNoise -> Int -> Int -> [Double]
genPink pn rate dur = map calcVal (getPink pn rate dur)
where calcVal pn'= runningSum pn' + rand2 pn'
updatePink :: PinkNoise-> Int -> Double->Double -> PinkNoise
updatePink pn idx rnd = PinkNoise
updateRow
updateRunningSum
updateIndex
where updateRunningSum = runningSum pn + rnd - ( rows pn!!idx)
updateRow = let sp = splitAt idx (rows pn)
in case fst sp of
[] -> rnd:snd sp
_ -> init (fst sp) ++ [rnd] ++ snd sp
updateIndex = (index pn + 1) .&. fromIntegral indexMask
nextpValue::PinkNoise -> RandomValue -> PinkNoise
nextpValue pn rv | nextIdx /= 0 = updatePink pn
(trailingZeros nextIdx)
(rval1 rv)
(rval2 rv)
| otherwise = pn {index = nextIdx,rand2 = rval2 rv}
where nextIdx = index pn + 1 .&. fromIntegral indexMask
| danplubell/color-noise | library/PinkVMBurk.hs | mit | 2,590 | 0 | 16 | 897 | 737 | 387 | 350 | 51 | 2 |
fibs :: [Integer]
fibs = 1:2:zipWith (+) fibs (tail fibs)
main = print . sum . takeWhile (<4000000) . filter even $ fibs
| chrisfosterelli/euler.hs | solutions/002.hs | mit | 121 | 0 | 9 | 23 | 68 | 36 | 32 | 3 | 1 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.SVGNumber
(setValue, getValue, SVGNumber(..), gTypeSVGNumber) where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGNumber.value Mozilla SVGNumber.value documentation>
setValue :: (MonadDOM m) => SVGNumber -> Float -> m ()
setValue self val = liftDOM (self ^. jss "value" (toJSVal val))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGNumber.value Mozilla SVGNumber.value documentation>
getValue :: (MonadDOM m) => SVGNumber -> m Float
getValue self
= liftDOM (realToFrac <$> ((self ^. js "value") >>= valToNumber))
| ghcjs/jsaddle-dom | src/JSDOM/Generated/SVGNumber.hs | mit | 1,486 | 0 | 12 | 171 | 414 | 260 | 154 | 23 | 1 |
module RotButtons where
import Control.Monad
import Control.Monad.Instances
import Data.IORef
import Graphics.UI.Gtk hiding (on,get)
import qualified Graphics.UI.Gtk as G
import Graphics.UI.Gtk.Glade
import Numeric.LinearAlgebra ((<>),(><),(|>),(@>),(@@>),Vector,Matrix)
import qualified Numeric.LinearAlgebra as V
import Types
import Utils
refreshButtons :: IORef GameState -> IO ()
refreshButtons gameStateRef = do
gameState <- readIORef gameStateRef
let (up,down,left,right) = getButtonBoxes gameState
n = getN gameState
rotStep = getRotStep gameState
drawingArea = getDrawingArea gameState
xml = getXML gameState
[upB,downB,leftB,rightB] <- mapM (mapM $ (\ k -> fmap ((,) k) $ buttonNewWithLabel $ show k)) $ replicate 4 [3..n]
containerForeach up (containerRemove up)
containerForeach down (containerRemove down)
containerForeach left (containerRemove left)
containerForeach right (containerRemove right)
mapM_ (processButton gameStateRef up 1) upB
mapM_ (processButton gameStateRef down 3) downB
mapM_ (processButton gameStateRef left 2) leftB
mapM_ (processButton gameStateRef right 0) rightB
buttonClickAction xml "rotCW" (buttonAction gameStateRef 4 0)
buttonClickAction xml "rotCCW" (buttonAction gameStateRef 5 0)
return ()
processButton :: (ContainerClass b) => IORef GameState -> b -> Int -> (Int,Button) -> IO ()
processButton g box side (k,button) = do -- side: 0 = right, going CCW
containerAdd box button
widgetShow button
G.on button buttonActivated $ buttonAction g side k
return ()
buttonAction :: IORef GameState -> Int -> Int -> IO ()
buttonAction gameStateRef side k = do
gameState <- readIORef gameStateRef
let rotStep = getRotStep gameState
n = getN gameState
d = getDrawingArea gameState
rp = case side of
0 -> rotPlane n 0 (k-1) . negate
1 -> rotPlane n 1 (k-1) . negate
2 -> rotPlane n 0 (k-1)
3 -> rotPlane n 1 (k-1)
4 -> rotPlane n 0 1
5 -> rotPlane n 0 1 . negate
modifyIORef gameStateRef (\gS -> gS { getViewPoint = rp rotStep <> getViewPoint gS })
widgetQueueDraw d
| benjaminjkraft/nmines | RotButtons.hs | mit | 2,190 | 0 | 19 | 477 | 795 | 403 | 392 | 52 | 6 |
Subsets and Splits