code
stringlengths
5
1.03M
repo_name
stringlengths
5
90
path
stringlengths
4
158
license
stringclasses
15 values
size
int64
5
1.03M
n_ast_errors
int64
0
53.9k
ast_max_depth
int64
2
4.17k
n_whitespaces
int64
0
365k
n_ast_nodes
int64
3
317k
n_ast_terminals
int64
1
171k
n_ast_nonterminals
int64
1
146k
loc
int64
-1
37.3k
cycloplexity
int64
-1
1.31k
module Main where import Options.Applicative import ConduitWordCount main :: IO () main = do opts <- execParser optionsWithHelp runWordCount opts where optionsWithHelp = info (helper <*> options) ( fullDesc <> header "Example wc clone written using Conduit" ) options :: Parser Options options = Options <$> bytesOption <*> wordsOption <*> linesOption <*> filesOption bytesOption, wordsOption, linesOption :: Parser Bool filesOption :: Parser (Maybe [FilePath]) bytesOption = switch ( long "bytes" <> short 'c' <> help "The number of bytes in each input file" ) wordsOption = switch ( long "words" <> short 'w' <> help "The number of words in each input file" ) linesOption = switch ( long "lines" <> short 'l' <> help "The number of lines in each input file" ) filesOption = optional $ some ( argument str ( metavar "FILES..." <> help "files to count, if blank, read from STDIN" )) -- SYNOPSIS -- wc [-clmw] [file ...] -- -- DESCRIPTION -- The wc utility displays the number of lines, words, and bytes contained in -- each input file, or standard input (if no file is specified) to the stan- -- dard output. A line is defined as a string of characters delimited by a -- <newline> character. Characters beyond the final <newline> character will -- not be included in the line count. -- -- A word is defined as a string of characters delimited by white space char- -- acters. White space characters are the set of characters for which the -- iswspace(3) function returns true. If more than one input file is speci- -- fied, a line of cumulative counts for all the files is displayed on a sep- -- arate line after the output for the last file. -- -- The following options are available: -- -- -c The number of bytes in each input file is written to the standard -- output. This will cancel out any prior usage of the -m option. -- -- -l The number of lines in each input file is written to the standard -- output. -- -- -m The number of characters in each input file is written to the -- standard output. If the current locale does not support multibyte -- characters, this is equivalent to the -c option. This will cancel -- out any prior usage of the -c option. -- -- -w The number of words in each input file is written to the standard -- output. -- -- When an option is specified, wc only reports the information requested by -- that option. The order of output always takes the form of line, word, -- byte, and file name. The default action is equivalent to specifying the -- -c, -l and -w options. -- -- If no files are specified, the standard input is used and no file name is -- displayed. The prompt will accept input until receiving EOF, or [^D] in -- most environments. -- -- ENVIRONMENT -- The LANG, LC_ALL and LC_CTYPE environment variables affect the execution -- of wc as described in environ(7). -- -- EXIT STATUS -- The wc utility exits 0 on success, and >0 if an error occurs. -- -- EXAMPLES -- Count the number of characters, words and lines in each of the files -- report1 and report2 as well as the totals for both: -- -- wc -mlw report1 report2 -- -- COMPATIBILITY -- Historically, the wc utility was documented to define a word as a ``maxi- -- mal string of characters delimited by <space>, <tab> or <newline> charac- -- ters''. The implementation, however, did not handle non-printing charac- -- ters correctly so that `` ^D^E '' counted as 6 spaces, while -- ``foo^D^Ebar'' counted as 8 characters. 4BSD systems after 4.3BSD modi- -- fied the implementation to be consistent with the documentation. This -- implementation defines a ``word'' in terms of the iswspace(3) function, as -- required by IEEE Std 1003.2 (``POSIX.2''). -- -- SEE ALSO -- iswspace(3) -- -- STANDARDS -- The wc utility conforms to IEEE Std 1003.1-2001 (``POSIX.1'').
cschneid/wc-conduits
src/Main.hs
bsd-3-clause
4,225
0
11
1,132
316
193
123
34
1
{-# LANGUAGE DeriveDataTypeable #-} module App.Pages.HomePageService (getHomePageText) where import Text.Hastache import Text.Hastache.Context import qualified Data.Text.Lazy.IO as TL import Data.Text.Lazy import App.TeamDetails.Types as Team(TeamDetails(..), Person(..)) import App.Roster.Types (TeamRoster(..), current, next) ---- UI specific stuff. --- home page getHomePageText :: TeamDetails -> TeamRoster -> IO Text getHomePageText team roster = populateHomePage $ getHomePageDtoFromTeam team roster --- Helpers data HomePageDto = HomePageDto { tName :: String ,thisDuty :: [String] ,nxtDuty :: [String] ,teamMembers :: [Person] } emptyDto :: String -> HomePageDto emptyDto tName = HomePageDto tName [] [] [] getHomePageDtoFromTeam :: TeamDetails -> TeamRoster -> HomePageDto getHomePageDtoFromTeam team roster = let thisDuty = pairToList $ current roster nxtDuty = pairToList $ next roster teamMembers = members team in HomePageDto (Team.teamName team) thisDuty nxtDuty teamMembers populateHomePage :: HomePageDto -> IO Text populateHomePage dto = do let context "name" = MuVariable $ tName dto context "thisDuty.p1" = MuVariable $ (thisDuty dto) !! 0 context "thisDuty.p2" = MuVariable $ (thisDuty dto) !! 1 context "nxtDuty.p1" = MuVariable $ (nxtDuty dto) !! 0 context "nxtDuty.p2" = MuVariable $ (nxtDuty dto) !! 1 context "teamId" = MuVariable $ tName dto context "people" = MuList $ Prelude.map (mkStrContext . mkListContext) (teamMembers dto) where mkListContext p = \val -> case val of "pName" -> MuVariable $ name p "pTimes" -> MuVariable $ timesOnDuty p useTemplate "templates/index.html" context useTemplate :: String -> (String -> MuType IO) -> IO Text useTemplate templateName context = hastacheFile defaultConfig templateName (mkStrContext context) -- TODO Remove. The model should be a pair. pairToList :: (a,a) -> [a] pairToList (fst, snd) = [fst, snd]
afcastano/cafe-duty
src/App/Pages/HomePageService.hs
bsd-3-clause
2,400
0
17
781
590
318
272
39
8
-------------------------------------------------------------------------------- {-# LANGUAGE ForeignFunctionInterface #-} module Firefly.Audio.Sound ( Sound , soundFromFile , soundFilePath ) where -------------------------------------------------------------------------------- import Control.Applicative ((<$>)) import Foreign.C.String import Foreign.C.Types import Foreign.ForeignPtr import Foreign.Ptr import System.IO.Unsafe (unsafePerformIO) -------------------------------------------------------------------------------- import Firefly.Audio.Internal -------------------------------------------------------------------------------- foreign import ccall unsafe "ff_soundFromFile" ff_soundFromFile :: CString -> IO (Ptr CSound) foreign import ccall "&ff_soundFree" ff_soundFree :: FunPtr (Ptr CSound -> IO ()) foreign import ccall unsafe "ff_soundFilePath" ff_soundFilePath :: Ptr CSound -> IO (Ptr CChar) -------------------------------------------------------------------------------- -- | Load a sound sample from a file. Supported formats depend on your SDL_mixer -- version, WAV is probably the safest bet, but OGG should also be fine. soundFromFile :: FilePath -> IO Sound soundFromFile filePath = do ptr <- withCString filePath ff_soundFromFile if ptr /= nullPtr then Sound <$> newForeignPtr ff_soundFree ptr else error $ "Firefly.Audio.Sound.soundFromFile: Can't load " ++ show filePath -------------------------------------------------------------------------------- soundFilePath :: Sound -> FilePath soundFilePath (Sound fptr) = unsafePerformIO $ withForeignPtr fptr $ \ptr -> ff_soundFilePath ptr >>= peekCString
jaspervdj/firefly
src/Firefly/Audio/Sound.hs
bsd-3-clause
1,790
0
10
316
278
153
125
28
2
{-#LANGUAGE MultiParamTypeClasses #-} {-#LANGUAGE OverloadedStrings #-} module Twilio.Recordings ( -- * Resource Recordings(..) , Twilio.Recordings.get ) where import Control.Applicative import Control.Monad.Catch import Data.Aeson import Data.Maybe import Control.Monad.Twilio import Twilio.Internal.Request import Twilio.Internal.Resource as Resource import Twilio.Recording import Twilio.Types {- Resource -} data Recordings = Recordings { recordingsPagingInformation :: PagingInformation , recordingList :: [Recording] } deriving (Show, Eq) instance List Recordings Recording where getListWrapper = wrap (Recordings . fromJust) getList = recordingList getPlural = Const "recordings" instance FromJSON Recordings where parseJSON = parseJSONToList instance Get0 Recordings where get0 = request parseJSONFromResponse =<< makeTwilioRequest "/Recordings.json" -- | Get 'Recordings'. get :: MonadThrow m => TwilioT m Recordings get = Resource.get
seagreen/twilio-haskell
src/Twilio/Recordings.hs
bsd-3-clause
983
0
9
145
210
123
87
30
1
module CalculatorKata.Day3Spec (spec) where import Test.Hspec import CalculatorKata.Day3 (calculate) spec :: Spec spec = do it "calculates one digit" (calculate "5" == 5.0) it "calculates many digits" (calculate "435" == 435.0) -- it "calculates addition" -- (calculate "45+23" == 45.0+23.0) -- it "calculates subtraction" -- (calculate "56-45" == 56.0-45.0) it "calculates multiplication" (calculate "45*2" == 45.0*2.0) it "calculates division" (calculate "56/12" == 56.0/12.0)
Alex-Diez/haskell-tdd-kata
old-katas/test/CalculatorKata/Day3Spec.hs
bsd-3-clause
620
0
11
204
120
62
58
13
1
module Control.OpenRTB.Auction where {- whats our query vocab? maybe ands a ords -} data BQuery = AND [BQuery] | OR [BQuery] | Simple AtomicQuery data AtomicQuery = HasIPPrefix | DeviceInfo | GeoTimeZone data Query = DaQuery {has :: BQuery, butExcept:: RejectionRules } {- -} {- compile current universe o -} data EvaluationFunction = LogisticFeature (Vector (Int,Double)) Budget |
cartazio/hopenRTB
src/Control/OpenRTB/Auction.hs
bsd-3-clause
427
1
9
103
92
58
34
-1
-1
{-Joseph Eremondi UU# 4229924 Utrecht University, APA 2015 Project one: dataflow analysis March 17, 2015 -} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE OverlappingInstances #-} {-# LANGUAGE StandaloneDeriving #-} module Optimize.Types where -- |A central place to put types and definitions -- | to avoid dep cycles import qualified AST.Annotation as Annotate import AST.Expression.General import qualified AST.Pattern as Pattern import AST.Type as CanonicalType import qualified AST.Variable as Var import qualified Data.Map as Map import qualified Elm.Compiler.Module as PublicModule import Text.PrettyPrint as P import AST.PrettyPrint {- type WholeProgOptFun = [PublicModule.Name] -> Map.Map PublicModule.Name (PublicModule.Module, PublicModule.Interface) -> Map.Map PublicModule.Name (PublicModule.Module, PublicModule.Interface) -} -- | Generic type for an optimization transformation type ModuleOptFun = Map.Map PublicModule.Name PublicModule.Interface -> PublicModule.Name -> (PublicModule.Module, PublicModule.Interface) -> (PublicModule.Module, PublicModule.Interface) -- |Export from AST so that things are nice and encapsulated type Region = Annotate.Region type Var = Var.Canonical type Pattern = Pattern.CanonicalPattern -- |Environment types -- |We use maps to store what variables are and aren't in scope at a given level -- |And the label of the expression in which they were declared -- |We never store values for the variables, so we can just use sets -- |These environments will often be used as "context" for tree traversals type Env l = (Map.Map (Var ) l) -- | We label each sub-expression in our AST with a unique integer type Label = Int --newtype Label = Label Int -- deriving (Eq, Ord, Show) -- | Used as the initial value we pass to the fold that labels an AST startLabel :: Label startLabel = 1 -- | Generic type for a Canonical expression generated after Type-checking -- | But with the annotation on expressions left open type AExpr a = Expr a (GenericDef a Var) Var type AExpr' a = Expr' a (GenericDef a Var) Var -- | The main expression type used during optimization -- | In addition to line-number information, we give each sub-expression -- | A unique label, and an environment mapping each name to the point it was defined type LabeledExpr = AExpr (Region, Label, Env Label) type LabeledExpr' = AExpr' (Region, Label, Env Label) -- | Basic getter for labels getLabel :: LabeledExpr -> Label getLabel (Annotate.A (_,a,_) _) = a {-| Generic type for a definition, as in a Let expression. We need this because the form defined in AST.Expression.General is too restrictive on the annotation types allowed for expressions. |-} data GenericDef a v = GenericDef { defPat :: Pattern, defBody :: (Expr a (GenericDef a v) v), defType:: (Maybe CanonicalType) } -- | The main Definition type we use for optimization type LabelDef = GenericDef (Region, Label, Env Label) Var -- | We need this to be able to pretty-print annotated ASTs instance Pretty LabelDef where pretty (GenericDef pattern expr maybeTipe) = P.vcat [ annotation, definition ] where definition = pretty pattern <+> P.equals <+> pretty expr annotation = case maybeTipe of Nothing -> P.empty Just tipe -> pretty pattern <+> P.colon <+> pretty tipe deriving instance Show LabelDef
JoeyEremondi/utrecht-apa-p1
src/Optimize/Types.hs
bsd-3-clause
3,559
0
13
790
518
310
208
43
1
module Physics.Falling.Shape.CSO ( CSO , AnnotatedCSO , mkCSO , mkCSOWithTransforms , mkAnnotatedCSO , mkAnnotatedCSOWithTransforms ) where import Physics.Falling.Math.Transform import Physics.Falling.Shape.ImplicitShape import Physics.Falling.Shape.TransformedShape import Physics.Falling.Shape.MinkowskiSum import Physics.Falling.Shape.ShapeReflection type CSO g1 g2 v = MinkowskiSum g1 (ShapeReflection g2 v) v type AnnotatedCSO g1 g2 v = AnnotatedMinkowskiSum g1 (ShapeReflection g2 v) v mkCSO :: (ImplicitShape g1 v, ImplicitShape g2 v) => g1 -> g2 -> CSO g1 g2 v mkCSO g1 g2 = MinkowskiSum g1 (ShapeReflection g2) mkCSOWithTransforms :: (ImplicitShape g1 v, ImplicitShape g2 v, Transform m v) => (g1, m) -> (g2, m) -> CSO (TransformedShape g1 m v) (TransformedShape g2 m v) v mkCSOWithTransforms (g1, t1) (g2, t2) = MinkowskiSum (TransformedShape g1 t1) $ ShapeReflection $ TransformedShape g2 t2 mkAnnotatedCSO :: (ImplicitShape g1 v, ImplicitShape g2 v) => g1 -> g2 -> AnnotatedCSO g1 g2 v mkAnnotatedCSO g1 g2 = AnnotatedMinkowskiSum g1 (ShapeReflection g2) mkAnnotatedCSOWithTransforms :: (ImplicitShape g1 v, ImplicitShape g2 v, Transform m v) => (g1, m) -> (g2, m) -> AnnotatedCSO (TransformedShape g1 m v) (TransformedShape g2 m v) v mkAnnotatedCSOWithTransforms (g1, t1) (g2, t2) = AnnotatedMinkowskiSum (TransformedShape g1 t1) $ ShapeReflection $ TransformedShape g2 t2
sebcrozet/falling
Physics/Falling/Shape/CSO.hs
bsd-3-clause
1,489
0
10
291
481
264
217
27
1
{-# LANGUAGE OverloadedStrings #-} -- | Checking for missing cases in a match expression. Based on -- "Warnings for pattern matching" by Luc Maranget. We only detect -- inexhaustiveness here - ideally, we would also like to check for -- redundant cases. module Language.Futhark.TypeChecker.Match ( unmatched, Match, ) where import qualified Data.Map.Strict as M import Data.Maybe import Futhark.Util (maybeHead, nubOrd) import Futhark.Util.Pretty hiding (bool, group, space) import Language.Futhark hiding (ExpBase (Constr)) data Constr = Constr Name | ConstrTuple | ConstrRecord [Name] | -- | Treated as 0-ary. ConstrLit PatLit deriving (Eq, Ord, Show) -- | A representation of the essentials of a pattern. data Match = MatchWild StructType | MatchConstr Constr [Match] StructType deriving (Eq, Ord, Show) matchType :: Match -> StructType matchType (MatchWild t) = t matchType (MatchConstr _ _ t) = t pprMatch :: Int -> Match -> Doc pprMatch _ MatchWild {} = "_" pprMatch _ (MatchConstr (ConstrLit l) _ _) = ppr l pprMatch p (MatchConstr (Constr c) ps _) = parensIf (not (null ps) && p >= 10) $ "#" <> ppr c <> mconcat (map ((" " <>) . pprMatch 10) ps) pprMatch _ (MatchConstr ConstrTuple ps _) = parens $ commasep $ map (pprMatch (-1)) ps pprMatch _ (MatchConstr (ConstrRecord fs) ps _) = braces $ commasep $ zipWith ppField fs ps where ppField name t = text (nameToString name) <> equals <> pprMatch (-1) t instance Pretty Match where ppr = pprMatch (-1) patternToMatch :: Pat -> Match patternToMatch (Id _ (Info t) _) = MatchWild $ toStruct t patternToMatch (Wildcard (Info t) _) = MatchWild $ toStruct t patternToMatch (PatParens p _) = patternToMatch p patternToMatch (PatAttr _ p _) = patternToMatch p patternToMatch (PatAscription p _ _) = patternToMatch p patternToMatch (PatLit l (Info t) _) = MatchConstr (ConstrLit l) [] $ toStruct t patternToMatch p@(TuplePat ps _) = MatchConstr ConstrTuple (map patternToMatch ps) $ patternStructType p patternToMatch p@(RecordPat fs _) = MatchConstr (ConstrRecord fnames) (map patternToMatch ps) $ patternStructType p where (fnames, ps) = unzip $ sortFields $ M.fromList fs patternToMatch (PatConstr c (Info t) args _) = MatchConstr (Constr c) (map patternToMatch args) $ toStruct t isConstr :: Match -> Maybe Name isConstr (MatchConstr (Constr c) _ _) = Just c isConstr _ = Nothing complete :: [Match] -> Bool complete xs | Just x <- maybeHead xs, Scalar (Sum all_cs) <- matchType x, Just xs_cs <- mapM isConstr xs = all (`elem` xs_cs) (M.keys all_cs) | otherwise = (any (isBool True) xs && any (isBool False) xs) || all isRecord xs || all isTuple xs where isBool b1 (MatchConstr (ConstrLit (PatLitPrim (BoolValue b2))) _ _) = b1 == b2 isBool _ _ = False isRecord (MatchConstr ConstrRecord {} _ _) = True isRecord _ = False isTuple (MatchConstr ConstrTuple _ _) = True isTuple _ = False specialise :: [StructType] -> Match -> [[Match]] -> [[Match]] specialise ats c1 = go where go ((c2 : row) : ps) | Just args <- match c1 c2 = (args ++ row) : go ps | otherwise = go ps go _ = [] match (MatchConstr c1' _ _) (MatchConstr c2' args _) | c1' == c2' = Just args | otherwise = Nothing match _ MatchWild {} = Just $ map MatchWild ats match _ _ = Nothing defaultMat :: [[Match]] -> [[Match]] defaultMat = mapMaybe onRow where onRow (MatchConstr {} : _) = Nothing onRow (MatchWild {} : ps) = Just ps onRow [] = Nothing -- Should not happen. findUnmatched :: [[Match]] -> Int -> [[Match]] findUnmatched pmat n | ((p : _) : _) <- pmat, Just heads <- mapM maybeHead pmat = if complete heads then completeCase heads else incompleteCase (matchType p) heads where completeCase cs = do c <- cs let ats = case c of MatchConstr _ args _ -> map matchType args MatchWild _ -> [] a_k = length ats pmat' = specialise ats c pmat u <- findUnmatched pmat' (a_k + n - 1) pure $ case c of MatchConstr c' _ t -> let (r, p) = splitAt a_k u in MatchConstr c' r t : p MatchWild t -> MatchWild t : u incompleteCase pt cs = do u <- findUnmatched (defaultMat pmat) (n - 1) if null cs then return $ MatchWild pt : u else case pt of Scalar (Sum all_cs) -> do -- Figure out which constructors are missing. let sigma = mapMaybe isConstr cs notCovered (k, _) = k `notElem` sigma (cname, ts) <- filter notCovered $ M.toList all_cs pure $ MatchConstr (Constr cname) (map MatchWild ts) pt : u _ -> -- This is where we could have enumerated missing match -- values (e.g. for booleans), rather than just emitting a -- wildcard. pure $ MatchWild pt : u -- If we get here, then the number of columns must be zero. findUnmatched [] _ = [[]] findUnmatched _ _ = [] {-# NOINLINE unmatched #-} -- | Find the unmatched cases. unmatched :: [Pat] -> [Match] unmatched orig_ps = -- The algorithm may find duplicate example, which we filter away -- here. nubOrd $ mapMaybe maybeHead $ findUnmatched (map ((: []) . patternToMatch) orig_ps) 1
HIPERFIT/futhark
src/Language/Futhark/TypeChecker/Match.hs
isc
5,410
0
19
1,435
1,981
1,005
976
134
6
-- | -- Copyright : (c) 2015 Egor Tensin <[email protected]> -- License : MIT -- Maintainer : [email protected] -- Stability : experimental -- Portability : Windows-only module Main (main) where import Control.Monad (void) import Control.Monad.Trans.Except (runExceptT) import Data.Monoid ((<>)) import Data.Version (showVersion) import System.IO.Error (ioError) import Options.Applicative import qualified Paths_windows_env as Meta import qualified WindowsEnv import Utils.Prompt import Utils.PromptMessage data Options = Options { optYes :: Bool , optGlobal :: Bool , optName :: WindowsEnv.Name , optValue :: String } deriving (Eq, Show) optionParser :: Parser Options optionParser = Options <$> optYesDesc <*> optGlobalDesc <*> optNameDesc <*> optValueDesc where optYesDesc = switch $ long "yes" <> short 'y' <> help "Skip confirmation prompt" optGlobalDesc = switch $ long "global" <> short 'g' <> help "Set for all users" optNameDesc = argument str $ metavar "NAME" <> help "Variable name" optValueDesc = argument str $ metavar "VALUE" <> help "Variable value" main :: IO () main = execParser parser >>= setEnv where parser = info (helper <*> versioner <*> optionParser) $ fullDesc <> progDesc "Define environment variables" versioner = infoOption (showVersion Meta.version) $ long "version" <> help "Show version" <> hidden setEnv :: Options -> IO () setEnv options = runExceptT doSetEnv >>= either ioError return where varName = optName options varValue = optValue options forAllUsers = optGlobal options skipPrompt = optYes options profile | forAllUsers = WindowsEnv.AllUsers | otherwise = WindowsEnv.CurrentUser doSetEnv = do expanded <- WindowsEnv.expand varValue let expandable = expanded /= varValue let newValue = WindowsEnv.Value expandable varValue promptAndEngrave newValue promptAndEngrave newValue = do let promptAnd = if skipPrompt then withoutPrompt else withPrompt $ newMessage profile varName newValue let engrave = WindowsEnv.engrave profile varName newValue void $ promptAnd engrave
egor-tensin/windows-env
app/SetEnv.hs
mit
2,353
0
14
619
564
292
272
64
2
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE OverloadedStrings #-} module Qi.Test.Config.Identifier where import Control.Lens import Control.Monad.Freer import Control.Monad.Freer.State import Data.Default (def) import qualified Data.HashMap.Strict as SHM import Protolude hiding (runState) import Qi.Config.AWS (Config (..), s3Config) import Qi.Config.AWS.S3 (s3IdToBucket) import qualified Qi.Program.Config.Ipret.State as Config import Qi.Program.Config.Lang (ConfigEff, s3Bucket) import Test.Tasty.Hspec appName :: Text appName = "testName" configProgram :: Member ConfigEff effs => Eff effs () configProgram = do s3Bucket "bucket1" def s3Bucket "bucket2" def s3Bucket "bucket3" def pass spec :: Spec spec = parallel $ describe "Internal Config identifier" $ do it "is unique" $ do idCount `shouldBe` 3 where idCount = length . SHM.elems $ config ^. s3Config . s3IdToBucket config = snd . run . runState def{ _namePrefix = appName } . Config.run $ configProgram
qmuli/qmuli
tests/Qi/Test/Config/Identifier.hs
mit
1,214
0
12
371
280
162
118
-1
-1
{-# LANGUAGE FlexibleInstances, OverloadedStrings, FlexibleContexts #-} {-| Module : VizHaskell.RawRepresentation Description : Special representation of raw JSON data This module defines the 'RawRepresentation' class, with allows one to use a data type that already is representable with a valid JSON contract -} module VizHaskell.RawRepresentation(RepresentationRaw(..)) where import VizHaskell.Core import Data.Aeson import Data.Aeson.Types(Pair) import Data.String import qualified Data.Text import qualified Data.Vector as V data RepresentationRaw = RepresentationRaw instance Representation RepresentationRaw instance ToJSON a => VizRepresentable (RPair RepresentationRaw a) where vizToJSON rpair = toJSON (rPairValue rpair) {-| The raw representable object is supposed to implement toJSON, so no other kind of processing is needed but to call it. -}
robarago/vizhaskell
hs/CoreRepresentacion/VizHaskell/RawRepresentation.hs
gpl-2.0
874
0
8
123
112
65
47
12
0
{-# LANGUAGE TypeApplications #-} module Test.Pos.Chain.Txp.Gen ( genTxpConfiguration , genPkWitness , genRedeemWitness , genScriptWitness , genTx , genTxAttributes , genTxAux , genTxHash , genTxId , genTxIn , genTxInList , genTxInWitness , genTxOut , genTxOutAux , genTxOutList , genTxpUndo , genTxPayload , genTxProof , genTxSig , genTxSigData , genTxValidationRulesConfig , genTxUndo , genTxWitness , genUnknownWitnessType ) where import Universum import Data.ByteString.Base16 as B16 import Data.Coerce (coerce) import qualified Data.Set as S import qualified Data.Vector as V import Hedgehog import qualified Hedgehog.Gen as Gen import qualified Hedgehog.Range as Range import Pos.Chain.Txp (Tx (..), TxAttributes, TxAux (..), TxId, TxIn (..), TxInWitness (..), TxOut (..), TxOutAux (..), TxPayload, TxProof (..), TxSig, TxSigData (..), TxUndo, TxValidationRulesConfig (..), TxWitness, TxpConfiguration (..), TxpUndo, mkTxPayload) import Pos.Core.Attributes (mkAttributes) import Pos.Crypto (Hash, ProtocolMagic, decodeHash, sign) import Test.Pos.Core.Gen (gen32Bytes, genAddress, genBytes, genCoin, genEpochIndex, genMerkleRoot, genScript, genTextHash, genWord32) import Test.Pos.Crypto.Gen (genAbstractHash, genPublicKey, genRedeemPublicKey, genRedeemSignature, genSecretKey, genSignTag) genTxpConfiguration :: Gen TxpConfiguration genTxpConfiguration = do limit <- Gen.int (Range.constant 0 200) addrs <- Gen.list (Range.linear 0 50) genAddress return (TxpConfiguration limit (S.fromList addrs)) genPkWitness :: ProtocolMagic -> Gen TxInWitness genPkWitness pm = PkWitness <$> genPublicKey <*> genTxSig pm genRedeemWitness :: ProtocolMagic -> Gen TxInWitness genRedeemWitness pm = RedeemWitness <$> genRedeemPublicKey <*> genRedeemSignature pm genTxSigData genTxValidationRulesConfig :: Gen TxValidationRulesConfig genTxValidationRulesConfig = TxValidationRulesConfig <$> genEpochIndex <*> Gen.integral (Range.constant 1 1000) <*> Gen.integral (Range.constant 1 1000) genScriptWitness :: Gen TxInWitness genScriptWitness = ScriptWitness <$> genScript <*> genScript genTx :: Gen Tx genTx = UnsafeTx <$> genTxInList <*> genTxOutList <*> genTxAttributes genTxAttributes :: Gen TxAttributes genTxAttributes = pure $ mkAttributes () genTxAux :: ProtocolMagic -> Gen TxAux genTxAux pm = TxAux <$> genTx <*> (genTxWitness pm) genTxHash :: Gen (Hash Tx) genTxHash = coerce <$> genTextHash genTxId :: Gen TxId genTxId = genBase16Text >>= pure . decodeHash >>= either error pure where genBase16Text = decodeUtf8 @Text @ByteString <$> genBase16Bs genBase16Bs :: Gen ByteString genBase16Bs = B16.encode <$> genBytes 32 --genTxId :: Gen TxId --genTxId = coerce <$> genTxHash genTxIn :: Gen TxIn genTxIn = Gen.choice gens where gens = [ TxInUtxo <$> genTxId <*> genWord32 -- 0 is reserved for TxInUtxo tag ----------+ , TxInUnknown <$> Gen.word8 (Range.constant 1 255) <*> gen32Bytes ] genTxInList :: Gen (NonEmpty TxIn) genTxInList = Gen.nonEmpty (Range.linear 1 20) genTxIn genTxOut :: Gen TxOut genTxOut = TxOut <$> genAddress <*> genCoin genTxOutAux :: Gen TxOutAux genTxOutAux = TxOutAux <$> genTxOut genTxOutList :: Gen (NonEmpty TxOut) genTxOutList = Gen.nonEmpty (Range.linear 1 100) genTxOut genTxpUndo :: Gen TxpUndo genTxpUndo = Gen.list (Range.linear 1 50) genTxUndo genTxPayload :: ProtocolMagic -> Gen TxPayload genTxPayload pm = mkTxPayload <$> (Gen.list (Range.linear 0 10) (genTxAux pm)) genTxProof :: ProtocolMagic -> Gen TxProof genTxProof pm = TxProof <$> genWord32 <*> genMerkleRoot genTx <*> genAbstractHash (Gen.list (Range.linear 1 5) (genTxWitness pm)) genTxSig :: ProtocolMagic -> Gen TxSig genTxSig pm = sign pm <$> genSignTag <*> genSecretKey <*> genTxSigData genTxSigData :: Gen TxSigData genTxSigData = TxSigData <$> genTxHash genTxInWitness :: ProtocolMagic -> Gen TxInWitness genTxInWitness pm = Gen.choice gens where gens = [ genPkWitness pm , genRedeemWitness pm , genScriptWitness , genUnknownWitnessType ] genTxUndo :: Gen TxUndo genTxUndo = Gen.nonEmpty (Range.linear 1 10) $ Gen.maybe genTxOutAux genTxWitness :: ProtocolMagic -> Gen TxWitness genTxWitness pm = V.fromList <$> Gen.list (Range.linear 1 10) (genTxInWitness pm) genUnknownWitnessType :: Gen TxInWitness genUnknownWitnessType = UnknownWitnessType <$> Gen.word8 (Range.constant 3 maxBound) <*> gen32Bytes
input-output-hk/cardano-sl
chain/test/Test/Pos/Chain/Txp/Gen.hs
apache-2.0
5,001
0
13
1,248
1,275
699
576
118
1
-- | Examples of how to use @cryptonite@. module Crypto.Tutorial ( -- * API design -- $api_design -- * Hash algorithms -- $hash_algorithms -- * Symmetric block ciphers -- $symmetric_block_ciphers -- * Combining primitives -- $combining_primitives ) where -- $api_design -- -- APIs in cryptonite are often based on type classes from package -- <https://hackage.haskell.org/package/memory memory>, notably -- 'Data.ByteArray.ByteArrayAccess' and 'Data.ByteArray.ByteArray'. -- Module "Data.ByteArray" provides many primitives that are useful to -- work with cryptonite types. For example function 'Data.ByteArray.convert' -- can transform one 'Data.ByteArray.ByteArrayAccess' concrete type like -- 'Crypto.Hash.Digest' to a 'Data.ByteString.ByteString'. -- -- Algorithms and functions needing random bytes are based on type class -- 'Crypto.Random.Types.MonadRandom'. Implementation 'IO' uses a system source -- of entropy. It is also possible to use a 'Crypto.Random.Types.DRG' with -- 'Crypto.Random.Types.MonadPseudoRandom' -- -- Error conditions are returned with data type 'Crypto.Error.CryptoFailable'. -- Functions in module "Crypto.Error" can convert those values to runtime -- exceptions, 'Maybe' or 'Either' values. -- $hash_algorithms -- -- Hashing a complete message: -- -- > import Crypto.Hash -- > -- > import Data.ByteString (ByteString) -- > -- > exampleHashWith :: ByteString -> IO () -- > exampleHashWith msg = do -- > putStrLn $ " sha1(" ++ show msg ++ ") = " ++ show (hashWith SHA1 msg) -- > putStrLn $ "sha256(" ++ show msg ++ ") = " ++ show (hashWith SHA256 msg) -- -- Hashing incrementally, with intermediate context allocations: -- -- > {-# LANGUAGE OverloadedStrings #-} -- > -- > import Crypto.Hash -- > -- > import Data.ByteString (ByteString) -- > -- > exampleIncrWithAllocs :: IO () -- > exampleIncrWithAllocs = do -- > let ctx0 = hashInitWith SHA3_512 -- > ctx1 = hashUpdate ctx0 ("The " :: ByteString) -- > ctx2 = hashUpdate ctx1 ("quick " :: ByteString) -- > ctx3 = hashUpdate ctx2 ("brown " :: ByteString) -- > ctx4 = hashUpdate ctx3 ("fox " :: ByteString) -- > ctx5 = hashUpdate ctx4 ("jumps " :: ByteString) -- > ctx6 = hashUpdate ctx5 ("over " :: ByteString) -- > ctx7 = hashUpdate ctx6 ("the " :: ByteString) -- > ctx8 = hashUpdate ctx7 ("lazy " :: ByteString) -- > ctx9 = hashUpdate ctx8 ("dog" :: ByteString) -- > print (hashFinalize ctx9) -- -- Hashing incrementally, updating context in place: -- -- > {-# LANGUAGE OverloadedStrings #-} -- > -- > import Crypto.Hash.Algorithms -- > import Crypto.Hash.IO -- > -- > import Data.ByteString (ByteString) -- > -- > exampleIncrInPlace :: IO () -- > exampleIncrInPlace = do -- > ctx <- hashMutableInitWith SHA3_512 -- > hashMutableUpdate ctx ("The " :: ByteString) -- > hashMutableUpdate ctx ("quick " :: ByteString) -- > hashMutableUpdate ctx ("brown " :: ByteString) -- > hashMutableUpdate ctx ("fox " :: ByteString) -- > hashMutableUpdate ctx ("jumps " :: ByteString) -- > hashMutableUpdate ctx ("over " :: ByteString) -- > hashMutableUpdate ctx ("the " :: ByteString) -- > hashMutableUpdate ctx ("lazy " :: ByteString) -- > hashMutableUpdate ctx ("dog" :: ByteString) -- > hashMutableFinalize ctx >>= print -- $symmetric_block_ciphers -- -- > {-# LANGUAGE OverloadedStrings #-} -- > {-# LANGUAGE ScopedTypeVariables #-} -- > {-# LANGUAGE GADTs #-} -- > -- > import Crypto.Cipher.AES (AES256) -- > import Crypto.Cipher.Types (BlockCipher(..), Cipher(..), nullIV, KeySizeSpecifier(..), IV, makeIV) -- > import Crypto.Error (CryptoFailable(..), CryptoError(..)) -- > -- > import qualified Crypto.Random.Types as CRT -- > -- > import Data.ByteArray (ByteArray) -- > import Data.ByteString (ByteString) -- > -- > -- | Not required, but most general implementation -- > data Key c a where -- > Key :: (BlockCipher c, ByteArray a) => a -> Key c a -- > -- > -- | Generates a string of bytes (key) of a specific length for a given block cipher -- > genSecretKey :: forall m c a. (CRT.MonadRandom m, BlockCipher c, ByteArray a) => c -> Int -> m (Key c a) -- > genSecretKey _ = fmap Key . CRT.getRandomBytes -- > -- > -- | Generate a random initialization vector for a given block cipher -- > genRandomIV :: forall m c. (CRT.MonadRandom m, BlockCipher c) => c -> m (Maybe (IV c)) -- > genRandomIV _ = do -- > bytes :: ByteString <- CRT.getRandomBytes $ blockSize (undefined :: c) -- > return $ makeIV bytes -- > -- > -- | Initialize a block cipher -- > initCipher :: (BlockCipher c, ByteArray a) => Key c a -> Either CryptoError c -- > initCipher (Key k) = case cipherInit k of -- > CryptoFailed e -> Left e -- > CryptoPassed a -> Right a -- > -- > encrypt :: (BlockCipher c, ByteArray a) => Key c a -> IV c -> a -> Either CryptoError a -- > encrypt secretKey initIV msg = -- > case initCipher secretKey of -- > Left e -> Left e -- > Right c -> Right $ ctrCombine c initIV msg -- > -- > decrypt :: (BlockCipher c, ByteArray a) => Key c a -> IV c -> a -> Either CryptoError a -- > decrypt = encrypt -- > -- > exampleAES256 :: ByteString -> IO () -- > exampleAES256 msg = do -- > -- secret key needs 256 bits (32 * 8) -- > secretKey <- genSecretKey (undefined :: AES256) 32 -- > mInitIV <- genRandomIV (undefined :: AES256) -- > case mInitIV of -- > Nothing -> error "Failed to generate and initialization vector." -- > Just initIV -> do -- > let encryptedMsg = encrypt secretKey initIV msg -- > decryptedMsg = decrypt secretKey initIV =<< encryptedMsg -- > case (,) <$> encryptedMsg <*> decryptedMsg of -- > Left err -> error $ show err -- > Right (eMsg, dMsg) -> do -- > putStrLn $ "Original Message: " ++ show msg -- > putStrLn $ "Message after encryption: " ++ show eMsg -- > putStrLn $ "Message after decryption: " ++ show dMsg -- $combining_primitives -- -- This example shows how to use Curve25519, XSalsa and Poly1305 primitives to -- emulate NaCl's @crypto_box@ construct. -- -- > import qualified Data.ByteArray as BA -- > import Data.ByteString (ByteString) -- > import qualified Data.ByteString as B -- > -- > import qualified Crypto.Cipher.XSalsa as XSalsa -- > import qualified Crypto.MAC.Poly1305 as Poly1305 -- > import qualified Crypto.PubKey.Curve25519 as X25519 -- > -- > -- | Build a @crypto_box@ packet encrypting the specified content with a -- > -- 192-bit nonce, receiver public key and sender private key. -- > crypto_box content nonce pk sk = BA.convert tag `B.append` c -- > where -- > zero = B.replicate 16 0 -- > shared = X25519.dh pk sk -- > (iv0, iv1) = B.splitAt 8 nonce -- > state0 = XSalsa.initialize 20 shared (zero `B.append` iv0) -- > state1 = XSalsa.derive state0 iv1 -- > (rs, state2) = XSalsa.generate state1 32 -- > (c, _) = XSalsa.combine state2 content -- > tag = Poly1305.auth (rs :: ByteString) c -- > -- > -- | Try to open a @crypto_box@ packet and recover the content using the -- > -- 192-bit nonce, sender public key and receiver private key. -- > crypto_box_open packet nonce pk sk -- > | B.length packet < 16 = Nothing -- > | BA.constEq tag' tag = Just content -- > | otherwise = Nothing -- > where -- > (tag', c) = B.splitAt 16 packet -- > zero = B.replicate 16 0 -- > shared = X25519.dh pk sk -- > (iv0, iv1) = B.splitAt 8 nonce -- > state0 = XSalsa.initialize 20 shared (zero `B.append` iv0) -- > state1 = XSalsa.derive state0 iv1 -- > (rs, state2) = XSalsa.generate state1 32 -- > (content, _) = XSalsa.combine state2 c -- > tag = Poly1305.auth (rs :: ByteString) c
vincenthz/cryptonite
Crypto/Tutorial.hs
bsd-3-clause
7,954
0
3
1,845
195
192
3
2
0
{-# LANGUAGE OverloadedStrings #-} {-# OPTIONS_HADDOCK hide #-} module Text.Markdown.Inline ( Inline (..) , inlineParser , toInline ) where import Prelude hiding (takeWhile) import Data.Text (Text) import qualified Data.Text as T import Data.Attoparsec.Text import Control.Applicative import Data.Monoid (Monoid, mappend) import qualified Data.Map as Map type RefMap = Map.Map Text Text toInline :: RefMap -> Text -> [Inline] toInline refmap t = case parseOnly (inlineParser refmap) t of Left s -> [InlineText $ T.pack s] Right is -> is (<>) :: Monoid m => m -> m -> m (<>) = mappend data Inline = InlineText Text | InlineItalic [Inline] | InlineBold [Inline] | InlineCode Text | InlineHtml Text | InlineLink Text (Maybe Text) [Inline] -- ^ URL, title, content | InlineImage Text (Maybe Text) Text -- ^ URL, title, content deriving (Show, Eq) inlineParser :: RefMap -> Parser [Inline] inlineParser = fmap combine . many . inlineAny combine :: [Inline] -> [Inline] combine [] = [] combine (InlineText x:InlineText y:rest) = combine (InlineText (x <> y):rest) combine (InlineText x:rest) = InlineText x : combine rest combine (InlineItalic x:InlineItalic y:rest) = combine (InlineItalic (x <> y):rest) combine (InlineItalic x:rest) = InlineItalic (combine x) : combine rest combine (InlineBold x:InlineBold y:rest) = combine (InlineBold (x <> y):rest) combine (InlineBold x:rest) = InlineBold (combine x) : combine rest combine (InlineCode x:InlineCode y:rest) = combine (InlineCode (x <> y):rest) combine (InlineCode x:rest) = InlineCode x : combine rest combine (InlineLink u t c:rest) = InlineLink u t (combine c) : combine rest combine (InlineImage u t c:rest) = InlineImage u t c : combine rest combine (InlineHtml t:rest) = InlineHtml t : combine rest specials :: [Char] specials = "*_`\\[]!<&" inlineAny :: RefMap -> Parser Inline inlineAny refs = inline refs <|> special where special = InlineText . T.singleton <$> satisfy (`elem` specials) inline :: RefMap -> Parser Inline inline refs = text <|> escape <|> paired "**" InlineBold <|> paired "__" InlineBold <|> paired "*" InlineItalic <|> paired "_" InlineItalic <|> doubleCode <|> code <|> link <|> image <|> autoLink <|> html <|> entity where inlinesTill :: Text -> Parser [Inline] inlinesTill end = go id where go front = (string end *> pure (front [])) <|> (do x <- inlineAny refs go $ front . (x:)) text = InlineText <$> takeWhile1 (`notElem` specials) paired t wrap = wrap <$> do _ <- string t is <- inlinesTill t if null is then fail "wrapped around something missing" else return is doubleCode = InlineCode . T.pack <$> (string "`` " *> manyTill anyChar (string " ``")) code = InlineCode <$> (char '`' *> takeWhile1 (/= '`') <* char '`') escape = InlineText . T.singleton <$> (char '\\' *> satisfy (`elem` "\\`*_{}[]()#+-.!>")) takeBalancedBrackets = T.pack <$> go (0 :: Int) where go i = do c <- anyChar case c of '[' -> (c:) <$> go (i + 1) ']' | i == 0 -> return [] | otherwise -> (c:) <$> go (i - 1) _ -> (c:) <$> go i parseUrl = fixUrl . T.pack <$> parseUrl' (0 :: Int) parseUrl' level | level > 0 = do c <- anyChar let level' | c == ')' = level - 1 | otherwise = level c' <- if c == '\\' then anyChar else return c cs <- parseUrl' level' return $ c' : cs | otherwise = (do c <- hrefChar if c == '(' then (c:) <$> parseUrl' 1 else (c:) <$> parseUrl' 0) <|> return [] parseUrlTitle defRef = parseUrlTitleInline <|> parseUrlTitleRef defRef parseUrlTitleInside endTitle = do url <- parseUrl mtitle <- (Just <$> title) <|> (skipSpace >> endTitle >> pure Nothing) return (url, mtitle) where title = do space skipSpace _ <- char '"' t <- T.stripEnd . T.pack <$> go return $ if not (T.null t) && T.last t == '"' then T.init t else t where go = (char '\\' *> anyChar >>= \c -> (c:) <$> go) <|> (endTitle *> return []) <|> (anyChar >>= \c -> (c:) <$> go) parseUrlTitleInline = char '(' *> parseUrlTitleInside (char ')') parseUrlTitleRef defRef = do ref' <- (skipSpace *> char '[' *> takeWhile (/= ']') <* char ']') <|> return "" let ref = if T.null ref' then defRef else ref' case Map.lookup (T.unwords $ T.words ref) refs of Nothing -> fail "ref not found" Just t -> either fail return $ parseOnly (parseUrlTitleInside endOfInput) t link = do _ <- char '[' rawContent <- takeBalancedBrackets content <- either fail return $ parseOnly (inlineParser refs) rawContent (url, mtitle) <- parseUrlTitle rawContent return $ InlineLink url mtitle content image = do _ <- string "![" content <- takeBalancedBrackets (url, mtitle) <- parseUrlTitle content return $ InlineImage url mtitle content fixUrl t | T.length t > 2 && T.head t == '<' && T.last t == '>' = T.init $ T.tail t | otherwise = t autoLink = do _ <- char '<' a <- string "http:" <|> string "https:" b <- takeWhile1 (/= '>') _ <- char '>' let url = a `T.append` b return $ InlineLink url Nothing [InlineText url] html = do c <- char '<' t <- takeWhile1 (\x -> ('A' <= x && x <= 'Z') || ('a' <= x && x <= 'z') || x == '/') if T.null t then fail "invalid tag" else do t2 <- takeWhile (/= '>') c2 <- char '>' return $ InlineHtml $ T.concat [ T.singleton c , t , t2 , T.singleton c2 ] entity = rawent "&lt;" <|> rawent "&gt;" <|> rawent "&amp;" <|> rawent "&quot;" <|> rawent "&apos;" <|> decEnt <|> hexEnt rawent t = InlineHtml <$> string t decEnt = do s <- string "&#" t <- takeWhile1 $ \x -> ('0' <= x && x <= '9') c <- char ';' return $ InlineHtml $ T.concat [ s , t , T.singleton c ] hexEnt = do s <- string "&#x" <|> string "&#X" t <- takeWhile1 $ \x -> ('0' <= x && x <= '9') || ('A' <= x && x <= 'F') || ('a' <= x && x <= 'f') c <- char ';' return $ InlineHtml $ T.concat [ s , t , T.singleton c ] hrefChar :: Parser Char hrefChar = (char '\\' *> anyChar) <|> satisfy (notInClass " )")
beni55/markdown
Text/Markdown/Inline.hs
bsd-3-clause
7,262
0
19
2,598
2,641
1,314
1,327
191
10
{-# LANGUAGE TypeOperators #-} module Math.Probably.PlotR where import System.Cmd import System.Directory import Data.List import Data.Unique import TNUtils import Control.Monad.Trans import System.IO import Control.Monad import Data.Maybe data RPlotCmd = RPlCmd { -- plotData :: String, prePlot :: [String], plotArgs :: [PlotLine], cleanUp :: IO () } data PlotLine = TimeSeries String | Histo String | PLPoints [(Double,Double)] | PLLines [(Double,Double)] plotLines :: [PlotLine] -> Maybe String plotLines pls = let tss = [ts | TimeSeries ts <- pls ] pts = [ts | PLPoints ts <- pls ] lns = [ts | PLLines ts <- pls ] hss = [ts | Histo ts <- pls ] in cond [(nonempty tss, Just $ unlines ["ts.plot("++(intercalate "," tss)++")", unlines $ map lnsplot lns, unlines $ map ptplot pts]), (nonempty hss, Just $ "hist("++head hss++")"), (nonempty lns, Just $ unlines $ map (lnsOrPnts "plot" ", type=\"l\", xlab=\"xs\", ylab=\"ys\"") lns), (nonempty pts, Just $ unlines $ map (lnsOrPnts "plot" ", type=\"p\", xlab=\"xs\", ylab=\"ys\"") lns)] Nothing where lnsplot = lnsOrPnts "lines" "" ptplot = lnsOrPnts "points" ", col=\"blue\", pch=16" lnsOrPnts cmd extra xsys = let (xs,ys) = unzip xsys in cmd++"(c("++(intercalate "," $ map show xs)++"), c("++(intercalate "," $ map show ys)++")"++extra++")" class PlotWithR a where getRPlotCmd :: a -> IO RPlotCmd plotWithR :: PlotWithR a => a -> IO () plotWithR pl' = do pl <- getRPlotCmd pl' plotPlotCmd pl plotPlotCmd pl = do r <- (show. hashUnique) `fmap` newUnique --print pl let rfile = "/tmp/bugplot"++r++".r" let plines = plotLines $ plotArgs pl when (isNothing plines) $ fail $ "plotPlotCmd: noghtin to plot!" let rlines = unlines [ "x11(width=10,height=7)", unlines $ prePlot pl, fromJust plines, "z<-locator(1)", "q()"] writeFile rfile $ rlines --putStrLn rlines system $ "R --vanilla --slave < "++rfile removeFile rfile cleanUp pl return () plotCmdToPng pls' = do forM_ (inChunksOf 50 pls') $ \pls -> do r <- (show. hashUnique) `fmap` newUnique let rfile = "/tmp/bugplot"++r++".r" h <- openFile rfile WriteMode forM_ pls $ \(nm,pl) -> do let plines = plotLines $ plotArgs pl when (isNothing plines) $ fail $ "plotCmdToPng: noghtin to plot!" let rlines = unlines [ "png(filename=\""++nm++"\")", unlines $ prePlot pl, fromJust plines] hPutStrLn h rlines --putStrLn rlines hClose h system $ "R --vanilla --slave < "++rfile --removeFile rfile forM_ pls $ \(nm,pl) ->cleanUp pl plot :: (MonadIO m, PlotWithR p) => p -> m () plot = liftIO . plotWithR newtype Points a = Points [a] instance Real a => PlotWithR (Points a) where getRPlotCmd (Points xs) = do r <- hashUnique `fmap` newUnique return $ RPlCmd { prePlot = [], plotArgs = [PLPoints $ zip [0..] $ map realToFrac xs], cleanUp = return () } infixr 2 :+: data a :+: b = a :+: b instance (PlotWithR a, PlotWithR b) => PlotWithR (a :+: b) where getRPlotCmd (xs :+: ys) = do px <- getRPlotCmd xs py <- getRPlotCmd ys return $ RPlCmd { prePlot = prePlot py++prePlot px, plotArgs = plotArgs px++plotArgs py, cleanUp = cleanUp px >> cleanUp px } {-instance (PlotWithR a) => PlotWithR [a] where getRPlotCmd xs = do pxs <- mapM getRPlotCmd xs return $ RPlCmd { prePlot = concatMap prePlot pxs, plotArgs = concatMap plotArgs pxs, cleanUp = mapM_ cleanUp pxs} -} --test = plotWithR (Points [1,2,3] :+: Points [4,5,6]) --plotWithR :: V -> IO () --plotWithR (SigV t1 t2 dt sf) = do plotHisto :: Num a => [a] -> IO RPlotCmd plotHisto pts = do r <- (show . idInt . hashUnique) `fmap` newUnique let fnm = "/tmp/bugplot"++r writeFile fnm . unlines $ map (show) pts return $ RPlCmd { prePlot = [concat ["dat", r, " <- scan(\"", fnm, "\")"]], cleanUp = removeFile fnm, plotArgs = [Histo $ "dat"++r] }
glutamate/probably
Math/Probably/PlotR.hs
bsd-3-clause
4,665
0
21
1,599
1,414
726
688
98
1
-- | -- Module : Foundation.Random -- License : BSD-style -- Stability : experimental -- Portability : Good -- -- This module deals with the random subsystem abstractions. -- -- It provide 2 different set of abstractions: -- -- * The first abstraction that allow a monad to generate random -- through the 'MonadRandom' class. -- -- * The second abstraction to make generic random generator 'RandomGen' -- and a small State monad like wrapper 'MonadRandomState' to -- abstract a generator. -- {-# LANGUAGE ForeignFunctionInterface #-} {-# LANGUAGE ScopedTypeVariables #-} module Foundation.Random ( MonadRandom(..) , RandomGen(..) , MonadRandomState(..) , withRandomGenerator , RNG , RNGv1 ) where import Foundation.Random.Class import Foundation.Random.DRG import qualified Foundation.Random.ChaChaDRG as ChaChaDRG -- | An alias to the default choice of deterministic random number generator -- -- Unless, you want to have the stability of a specific random number generator, -- e.g. for tests purpose, it's recommended to use this alias so that you would -- keep up to date with possible bugfixes, or change of algorithms. type RNG = RNGv1 type RNGv1 = ChaChaDRG.State
vincenthz/hs-foundation
foundation/Foundation/Random.hs
bsd-3-clause
1,243
0
5
251
97
74
23
14
0
{-# LANGUAGE OverloadedStrings #-} {- | Module : Network.MPD.Applicative.Mount Copyright : (c) Joachim Fasting 2014 License : MIT Maintainer : [email protected] Stability : stable Portability : unportable Mounting remote storage. -} module Network.MPD.Applicative.Mount ( mount , unmount , listMounts , listNeighbors ) where import Network.MPD.Commands.Arg hiding (Command) import Network.MPD.Applicative.Internal import Network.MPD.Util import Data.ByteString.Char8 (ByteString) import qualified Data.ByteString.UTF8 as UTF8 mount :: String -- Path -> String -- Uri -> Command () mount p u = Command emptyResponse ["mount" <@> p <++> u] unmount :: String -- Path -> Command () unmount p = Command emptyResponse ["unmount" <@> p] listMounts :: Command [(String, String)] -- (Path, Uri) listMounts = Command (liftParser p) ["listmounts"] where p = mapM parseMount . splitGroups ["mount"] . toAssocList parseMount :: [(ByteString, ByteString)] -> Either String (String, String) parseMount [("mount", mo), ("storage", st)] = Right (UTF8.toString mo, UTF8.toString st) parseMount _ = Left "Unexpected result from listMounts" listNeighbors :: Command [(String, String)] -- (Uri, Name) listNeighbors = Command (liftParser p) ["listneighbors"] where p = mapM parseNeighbor . splitGroups ["neighbor"] . toAssocList parseNeighbor :: [(ByteString, ByteString)] -> Either String (String, String) parseNeighbor [("neighbor", ne), ("name", na)] = Right (UTF8.toString ne, UTF8.toString na) parseNeighbor _ = Left "Unexpected result from listNeighbors"
bens/libmpd-haskell
src/Network/MPD/Applicative/Mount.hs
lgpl-2.1
1,672
0
10
332
443
251
192
30
2
-- FIXME See how much of this module can be deleted. {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE BangPatterns #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TupleSections #-} {-# OPTIONS -fno-warn-unused-do-bind #-} -- | Functions for the GHC package database. module Stack.GhcPkg (findGhcPkgId ,getGlobalDB ,EnvOverride ,envHelper ,createDatabase ,unregisterGhcPkgId ,getCabalPkgVer ,findGhcPkgHaddockHtml ,findGhcPkgDepends ,findTransitiveGhcPkgDepends ,listGhcPkgDbs ,ghcPkgExeName) where import Control.Applicative import Control.Monad import Control.Monad.Catch import Control.Monad.IO.Class import Control.Monad.Logger import Control.Monad.Trans.Control import qualified Data.ByteString.Char8 as S8 import Data.Either import Data.List import qualified Data.Map as Map import Data.Maybe import Data.Set (Set) import qualified Data.Set as Set import Data.Text (Text) import qualified Data.Text as T import qualified Data.Text.Encoding as T import Path (Path, Abs, Dir, toFilePath, parent, parseAbsDir) import Path.IO (dirExists, createTree) import Prelude hiding (FilePath) import Stack.Constants import Stack.Types import System.Directory (canonicalizePath, doesDirectoryExist) import System.Process.Read -- | Get the global package database getGlobalDB :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m, MonadThrow m) => EnvOverride -> WhichCompiler -> m (Path Abs Dir) getGlobalDB menv wc = do -- This seems like a strange way to get the global package database -- location, but I don't know of a better one bs <- ghcPkg menv wc [] ["list", "--global"] >>= either throwM return let fp = S8.unpack $ stripTrailingColon $ firstLine bs liftIO (canonicalizePath fp) >>= parseAbsDir where stripTrailingColon bs | S8.null bs = bs | S8.last bs == ':' = S8.init bs | otherwise = bs firstLine = S8.takeWhile (\c -> c /= '\r' && c /= '\n') -- | Run the ghc-pkg executable ghcPkg :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m, MonadThrow m) => EnvOverride -> WhichCompiler -> [Path Abs Dir] -> [String] -> m (Either ReadProcessException S8.ByteString) ghcPkg menv wc pkgDbs args = do eres <- go r <- case eres of Left _ -> do mapM_ (createDatabase menv wc) pkgDbs go Right _ -> return eres return r where go = tryProcessStdout Nothing menv (ghcPkgExeName wc) args' args' = packageDbFlags pkgDbs ++ args -- | Create a package database in the given directory, if it doesn't exist. createDatabase :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m, MonadThrow m) => EnvOverride -> WhichCompiler -> Path Abs Dir -> m () createDatabase menv wc db = do exists <- dirExists db unless exists $ do -- Creating the parent doesn't seem necessary, as ghc-pkg -- seems to be sufficiently smart. But I don't feel like -- finding out it isn't the hard way createTree (parent db) _ <- tryProcessStdout Nothing menv (ghcPkgExeName wc) ["init", toFilePath db] return () -- | Get the name to use for "ghc-pkg", given the compiler version. ghcPkgExeName :: WhichCompiler -> String ghcPkgExeName Ghc = "ghc-pkg" ghcPkgExeName Ghcjs = "ghcjs-pkg" -- | Get the necessary ghc-pkg flags for setting up the given package database packageDbFlags :: [Path Abs Dir] -> [String] packageDbFlags pkgDbs = "--no-user-package-db" : map (\x -> ("--package-db=" ++ toFilePath x)) pkgDbs -- | Get the value of a field of the package. findGhcPkgField :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m, MonadThrow m) => EnvOverride -> WhichCompiler -> [Path Abs Dir] -- ^ package databases -> String -- ^ package identifier, or GhcPkgId -> Text -> m (Maybe Text) findGhcPkgField menv wc pkgDbs name field = do result <- ghcPkg menv wc pkgDbs ["field", "--simple-output", name, T.unpack field] return $ case result of Left{} -> Nothing Right lbs -> fmap (stripCR . T.decodeUtf8) $ listToMaybe $ S8.lines lbs where stripCR t = fromMaybe t (T.stripSuffix "\r" t) -- | Get the id of the package e.g. @foo-0.0.0-9c293923c0685761dcff6f8c3ad8f8ec@. findGhcPkgId :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m, MonadThrow m) => EnvOverride -> WhichCompiler -> [Path Abs Dir] -- ^ package databases -> PackageName -> m (Maybe GhcPkgId) findGhcPkgId menv wc pkgDbs name = do mpid <- findGhcPkgField menv wc pkgDbs (packageNameString name) "id" case mpid of Just !pid -> return (parseGhcPkgId (T.encodeUtf8 pid)) _ -> return Nothing -- | Get the version of the package findGhcPkgVersion :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m, MonadThrow m) => EnvOverride -> WhichCompiler -> [Path Abs Dir] -- ^ package databases -> PackageName -> m (Maybe Version) findGhcPkgVersion menv wc pkgDbs name = do mpid <- findGhcPkgField menv wc pkgDbs (packageNameString name) "version" case mpid of Just !v -> return (parseVersion (T.encodeUtf8 v)) _ -> return Nothing -- | Get the Haddock HTML documentation path of the package. findGhcPkgHaddockHtml :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m, MonadThrow m) => EnvOverride -> WhichCompiler -> [Path Abs Dir] -- ^ package databases -> String -- ^ PackageIdentifier or GhcPkgId -> m (Maybe (PackageIdentifier, Path Abs Dir)) findGhcPkgHaddockHtml menv wc pkgDbs ghcPkgId = do mpath <- findGhcPkgField menv wc pkgDbs ghcPkgId "haddock-html" mid <- findGhcPkgField menv wc pkgDbs ghcPkgId "id" mversion <- findGhcPkgField menv wc pkgDbs ghcPkgId "version" let mpkgId = PackageIdentifier <$> (mid >>= parsePackageName . T.encodeUtf8) <*> (mversion >>= parseVersion . T.encodeUtf8) case (,) <$> mpath <*> mpkgId of Just (path0, pkgId) -> do let path = T.unpack path0 exists <- liftIO $ doesDirectoryExist path path' <- if exists then liftIO $ canonicalizePath path else return path return $ fmap (pkgId,) (parseAbsDir path') _ -> return Nothing -- | Finds dependencies of package, and all their dependencies, etc. findTransitiveGhcPkgDepends :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m, MonadThrow m) => EnvOverride -> WhichCompiler -> [Path Abs Dir] -- ^ package databases -> PackageIdentifier -> m (Set PackageIdentifier) findTransitiveGhcPkgDepends menv wc pkgDbs pkgId0 = liftM (Set.fromList . Map.elems) (go (packageIdentifierString pkgId0) Map.empty) where go pkgId res = do deps <- findGhcPkgDepends menv wc pkgDbs pkgId loop deps res loop [] res = return res loop (dep:deps) res = do if Map.member dep res then loop deps res else do let pkgId = ghcPkgIdString dep mname <- findGhcPkgField menv wc pkgDbs pkgId "name" mversion <- findGhcPkgField menv wc pkgDbs pkgId "version" let mident = do name <- mname >>= parsePackageName . T.encodeUtf8 version <- mversion >>= parseVersion . T.encodeUtf8 Just $ PackageIdentifier name version res' = maybe id (Map.insert dep) mident res res'' <- go pkgId res' -- FIXME is the Map.union actually necessary? loop deps (Map.union res res'') -- | Get the dependencies of the package. findGhcPkgDepends :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m, MonadThrow m) => EnvOverride -> WhichCompiler -> [Path Abs Dir] -- ^ package databases -> String -- ^ package identifier or GhcPkgId -> m [GhcPkgId] findGhcPkgDepends menv wc pkgDbs pkgId = do mdeps <- findGhcPkgField menv wc pkgDbs pkgId "depends" case mdeps of Just !deps -> return (mapMaybe (parseGhcPkgId . T.encodeUtf8) (T.words deps)) _ -> return [] unregisterGhcPkgId :: (MonadIO m, MonadLogger m, MonadThrow m, MonadCatch m, MonadBaseControl IO m) => EnvOverride -> WhichCompiler -> CompilerVersion -> Path Abs Dir -- ^ package database -> GhcPkgId -> PackageIdentifier -> m () unregisterGhcPkgId menv wc cv pkgDb gid ident = do eres <- ghcPkg menv wc [pkgDb] args case eres of Left e -> $logWarn $ T.pack $ show e Right _ -> return () where -- TODO ideally we'd tell ghc-pkg a GhcPkgId instead args = "unregister" : "--user" : "--force" : (case cv of GhcVersion v | v < $(mkVersion "7.9") -> [packageIdentifierString ident] _ -> ["--ipid", ghcPkgIdString gid]) -- | Get the version of Cabal from the global package database. getCabalPkgVer :: (MonadThrow m, MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m) => EnvOverride -> WhichCompiler -> m Version getCabalPkgVer menv wc = findGhcPkgVersion menv wc [] -- global DB cabalPackageName >>= maybe (throwM $ Couldn'tFindPkgId cabalPackageName) return listGhcPkgDbs :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m, MonadThrow m) => EnvOverride -> WhichCompiler -> [Path Abs Dir] -> m [PackageIdentifier] listGhcPkgDbs menv wc pkgDbs = do result <- ghcPkg menv wc pkgDbs ["list", "--simple-output"] return $ case result of Left{} -> [] Right lbs -> mapMaybe parsePackageIdentifier (S8.words lbs)
akhileshs/stack
src/Stack/GhcPkg.hs
bsd-3-clause
10,591
0
19
3,201
2,723
1,380
1,343
230
3
module LiftToToplevel.PatBindIn1 where --A definition can be lifted from a where or let into the surrounding binding group. --Lifting a definition widens the scope of the definition. --In this example, lift 'tup' defined in 'foo' --This example aims to test renaming and the lifting of type signatures. main :: Int main = foo 3 foo :: Int -> Int foo x = h + t + (snd tup) where h :: Int t :: Int tup :: (Int,Int) tup@(h,t) = head $ zip [1..10] [3..15]
kmate/HaRe
test/testdata/LiftToToplevel/PatBindIn1.hs
bsd-3-clause
486
0
9
119
111
64
47
9
1
{-| A WAI adapter to the HTML5 Server-Sent Events API. -} module Network.Wai.EventSource ( ServerEvent(..), eventSourceAppChan, eventSourceAppIO ) where import Data.Function (fix) import Control.Concurrent.Chan (Chan, dupChan, readChan) import Control.Monad.IO.Class (liftIO) import Network.HTTP.Types (status200, hContentType) import Network.Wai (Application, responseStream) import Network.Wai.EventSource.EventStream -- | Make a new WAI EventSource application reading events from -- the given channel. eventSourceAppChan :: Chan ServerEvent -> Application eventSourceAppChan chan req sendResponse = do chan' <- liftIO $ dupChan chan eventSourceAppIO (readChan chan') req sendResponse -- | Make a new WAI EventSource application reading events from -- the given IO action. eventSourceAppIO :: IO ServerEvent -> Application eventSourceAppIO src _ sendResponse = sendResponse $ responseStream status200 [(hContentType, "text/event-stream")] $ \sendChunk flush -> fix $ \loop -> do se <- src case eventToBuilder se of Nothing -> return () Just b -> sendChunk b >> flush >> loop
dylex/wai
wai-extra/Network/Wai/EventSource.hs
mit
1,245
0
16
307
264
146
118
24
2
{-# LANGUAGE OverloadedStrings, LambdaCase, ScopedTypeVariables, TupleSections #-} {- | Simple persistent cache. Cache file is deleted if the modification time of any of the dependencies has changed. -} module Gen2.Cache (getCached, putCached) where import Control.Applicative import qualified Control.Exception as E import qualified Crypto.Hash.SHA1 as SHA1 import qualified Data.Binary as DB import qualified Data.Binary.Get as DB import qualified Data.Binary.Put as DB import Data.ByteString (ByteString) import qualified Data.ByteString.Base16 as B16 import qualified Data.ByteString.Lazy as BL import qualified Data.List import Data.Monoid import Data.Text (Text) import qualified Data.Text as T import qualified Data.Text.Encoding as T import Data.Time.Clock.POSIX import System.Directory import System.FilePath import System.IO.Error import DynFlags import qualified Compiler.Info as Info getCacheMeta :: [FilePath] -> IO (Maybe BL.ByteString) getCacheMeta files = do m <- mapM (\file -> (file,) <$> getModified file) files return $ if any ((==0).snd) m then Nothing else Just (DB.runPut $ DB.put m) checkCacheMeta :: BL.ByteString -> IO Bool checkCacheMeta meta = (and :: [Bool] -> Bool) <$> mapM (\(file,mod) -> (\m -> m==mod && m/=0) <$> getModified file) (DB.runGet DB.get $ meta) getModified :: FilePath -> IO Integer getModified file = (round . (*1000) . utcTimeToPOSIXSeconds <$> getModificationTime file) `catchIOError` \_ -> return 0 cacheFileName :: DynFlags -> Text -> Text -> IO (Maybe FilePath) cacheFileName _dflags prefix key = do let b = prefix <> "-" <> (T.decodeUtf8 . B16.encode . SHA1.hash . T.encodeUtf8 $ key) Info.getUserCacheDir >>= \case Nothing -> return Nothing Just cdir -> (createDirectoryIfMissing True cdir >> return (Just $ cdir </> T.unpack b <.> "cache")) `catchIOError` \_ -> return Nothing removeCacheFile :: FilePath -> IO () removeCacheFile file = removeFile file `catchIOError` \_ -> return () getCached :: DynFlags -> Text -> Text -> IO (Maybe ByteString) getCached dflags prefix name = let getCacheEntry = cacheFileName dflags prefix name >>= \case Nothing -> return Nothing Just file -> getCacheFile file `E.onException` removeCacheFile file getCacheFile file = do dat <- BL.readFile file let (meta, content) = DB.runGet DB.get dat valid <- checkCacheMeta meta if valid then content `seq` return (Just content) else removeCacheFile file >> return Nothing in getCacheEntry `E.catch` \(_::E.SomeException) -> return Nothing {- put a file in the cache, returns False if the cache file could not be created -} putCached :: DynFlags -> Text -- prefix name, ends up in file name -> Text -- unique name (may be long) -> [FilePath] -- files, invalidate cache item if these are modified -> ByteString -- contents -> IO Bool putCached dflags prefix key deps content = cacheFileName dflags prefix key >>= \case Nothing -> return False Just file -> do getCacheMeta deps >>= \case Nothing -> return False Just meta -> (BL.writeFile file (DB.runPut $ DB.put (meta, content)) >> return True) `catchIOError` \_ -> return False
danse/ghcjs
src/Gen2/Cache.hs
mit
3,590
0
22
981
1,002
540
462
77
3
import Control.Concurrent import Control.Exception import Control.Monad import System.IO import System.Environment -- test for deadlocks main = do hSetBuffering stdout NoBuffering [n] <- getArgs replicateM_ (read n) $ do chan <- newChan wid <- forkIO $ forever $ writeChan chan (5::Int) rid <- forkIO $ forever $ void $ readChan chan threadDelay 100000 throwTo rid ThreadKilled putStr "." readChan chan throwTo wid ThreadKilled
beni55/ghcjs
test/pkg/base/Concurrent/chan002.hs
mit
507
1
13
141
158
72
86
17
1
{-# LANGUAGE BangPatterns, OverloadedStrings #-} {- The Computer Language Benchmarks Game http://benchmarkgame.alioth.debian.org/ contributed by Bryan O'Sullivan -} import Control.Monad import Data.ByteString.Unsafe import Foreign.Ptr import Foreign.Storable import System.Environment import qualified Data.ByteString.Char8 as B import qualified Data.ByteString.Lazy.Char8 as L main = do n <- getArgs >>= readIO.head writeAlu ">ONE Homo sapiens alu" (L.take (fromIntegral n*2) (L.cycle alu)) make ">TWO IUB ambiguity codes" (n*3) iub 42 >>= void . make ">THREE Homo sapiens frequency" (n*5) homosapiens writeAlu name s0 = B.putStrLn name >> go s0 where go s = L.putStrLn h >> unless (L.null t) (go t) where (h,t) = L.splitAt 60 s make name n0 tbl seed0 = do B.putStrLn name let modulus = 139968 fill ((c,p):cps) j = let !k = min modulus (floor (fromIntegral modulus * (p::Float) + 1)) in B.replicate (k - j) c : fill cps k fill _ _ = [] lookupTable = B.concat $ fill (scanl1 (\(_,p) (c,q) -> (c,p+q)) tbl) 0 line = B.replicate 60 '\0' unsafeUseAsCString line $ \ptr -> do let make' n !i seed | n > (0::Int) = do let newseed = rem (seed * 3877 + 29573) modulus plusPtr ptr i `poke` unsafeIndex lookupTable newseed if i+1 >= 60 then puts line 60 >> make' (n-1) 0 newseed else make' (n-1) (i+1) newseed | otherwise = when (i > 0) (puts line i) >> return seed make' n0 0 seed0 alu = "GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGGGAGGCCGAGGCGGGCGGATCACCTGAGG\ \TCAGGAGTTCGAGACCAGCCTGGCCAACATGGTGAAACCCCGTCTCTACTAAAAATACAAAAATTAGCCGGG\ \CGTGGTGGCGCGCGCCTGTAATCCCAGCTACTCGGGAGGCTGAGGCAGGAGAATCGCTTGAACCCGGGAGGC\ \GGAGGTTGCAGTGAGCCGAGATCGCGCCACTGCACTCCAGCCTGGGCGACAGAGCGAGACTCCGTCTCAAAAA" iub = [('a',0.27),('c',0.12),('g',0.12),('t',0.27),('B',0.02) ,('D',0.02),('H',0.02),('K',0.02),('M',0.02),('N',0.02) ,('R',0.02),('S',0.02),('V',0.02),('W',0.02),('Y',0.02)] homosapiens = [('a',0.3029549426680),('c',0.1979883004921) ,('g',0.1975473066391),('t',0.3015094502008)] puts bs n = B.putStrLn (B.take n bs)
beni55/ghcjs
test/nofib/shootout/fasta/Main.hs
mit
2,149
9
25
407
852
455
397
42
3
{-# LANGUAGE TemplateHaskell #-} module T2685a (th) where import Language.Haskell.TH newtype NT = C (() -> ()) th :: Q [Dec] th = [d| foo = C undefined |]
urbanslug/ghc
testsuite/tests/th/T2685a.hs
bsd-3-clause
158
0
8
32
55
35
20
6
1
{-# LANGUAGE CPP, DeriveDataTypeable, FlexibleContexts,OverloadedStrings, GeneralizedNewtypeDeriving, MultiParamTypeClasses , TemplateHaskell, TypeFamilies, RecordWildCards, DeriveGeneric, DeriveDataTypeable #-} module Tach.Migration.Acidic.Types where import Tach.Impulse.Types.TimeValue -- import Tach.Impulse.Types.TimeValueSeries import Tach.Impulse.Types.Impulse import Data.Typeable (Typeable) import Data.Set import Data.IntMap import Data.ByteString -- import Data.Thyme import Data.Vector import GHC.Generics import qualified DirectedKeys.Types as DK import Tach.Migration.Types --import Data.SafeCopy ( base, deriveSafeCopy ) -- | The Acid State instance of anything is that thing suffixed with Store... 'TimeValue' -> 'TimeValueStore' -- This prevents confusion later -- | This is a simple impulse valued sequence. No Compression, No structural change. -- Note, the terrible type signature occurs because safecopy hates type synonyms! newtype TVSimpleImpulseTypeStore = TVSimpleImpulseTypeStore { unTimeValueStore :: (ImpulseSeries (ImpulseKey (DK.DirectedKeyRaw KeyPid KeySource KeyDestination KeyTime)) (ImpulsePeriod (Vector Double) Int ) (ImpulseStart Int) (ImpulseEnd Int) (ImpulseRep (Set TVNoKey))) } deriving (Typeable,Generic) -- | The ByteString is the filename used to grab the correct TVSimple... Allowing for TS level locks instead of DB level newtype IntKey = IntKey {unIntKey :: ByteString} deriving (Eq,Show,Ord,Typeable,Generic) -- | ImpulseMap maps pid (ints) to the filenames newtype ImpulseMap = ImpulseMap { unImpulseMap :: IntMap IntKey} deriving (Eq,Show,Ord,Typeable,Generic)
smurphy8/tach
core-types/tach-migration-acidic/src/Tach/Migration/Acidic/Types.hs
mit
1,642
0
13
212
252
151
101
19
0
-- @Author: Zeyuan Shang -- @Date: 2016-06-07 12:57:23 -- @Last Modified by: Zeyuan Shang -- @Last Modified time: 2016-06-14 13:47:56 paths :: (Eq a) => a -> a -> [(a, a)] -> [[a]] paths src dst edges | src == dst = [[dst]] | otherwise = [ src : path | edge <- edges, (fst edge) == src, path <- (paths (snd edge) dst [e | e <- edges, e /= edge])] cycle' :: (Eq a) => a -> [(a, a)] -> [[a]] cycle' src edges = [ src : path | edge <- edges, (fst edge) == src, path <- (paths (snd edge) src [e | e <- edges, e /= edge])] main = do let value = cycle' 2 [(1,2),(2,3),(1,3),(3,4),(4,2),(5,6)] print value let value2 = cycle' 1 [(1,2),(2,3),(1,3),(3,4),(4,2),(5,6)] print value2
zeyuanxy/haskell-playground
ninety-nine-haskell-problems/vol9/82.hs
mit
753
2
13
213
437
240
197
15
1
module Player where data Player = O | X | None deriving (Show, Enum, Eq) getOther :: Player -> Player getOther O = X getOther X = O readPlayerString :: String -> Player readPlayerString "Player O" = O readPlayerString "Player X" = X readPlayerString "Player None" = None readPlayerString "O" = O readPlayerString "X" = X readPlayerString "" = None showPlayer :: Player -> String showPlayer O = "Player O" showPlayer X = "Player X" showPlayer None = ""
prydonius/Oxo
Player.hs
mit
499
0
6
126
147
78
69
17
1
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE NoMonomorphismRestriction #-} {-# LANGUAGE TypeOperators, DataKinds #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeApplications #-} module Control.Eff.Test (testGroups) where import Test.HUnit hiding (State) import Test.QuickCheck import Control.Eff import Control.Eff.Reader.Strict import Control.Eff.State.Strict import Control.Eff.Exception import qualified Control.Exception as Exc import Utils import Test.Framework.TH import Test.Framework.Providers.HUnit import Test.Framework.Providers.QuickCheck2 testGroups = [ $(testGroupGenerator) ] prop_NestedEff :: Property prop_NestedEff = forAll arbitrary (\x -> property (qu x == x)) where qu :: Bool -> Bool qu x = run $ runReader readerId (readerAp x) readerAp :: Bool -> Eff '[Reader (Eff '[Reader Bool] Bool)] Bool readerAp x = do f <- ask return . run $ runReader x f readerId :: Eff '[Reader Bool] Bool readerId = do x <- ask return x -- | Ensure that https://github.com/RobotGymnast/extensible-effects/issues/11 stays resolved. case_Lift_building :: Assertion case_Lift_building = runLift possiblyAmbiguous where possiblyAmbiguous :: (Monad m, Lifted m r) => Eff r () possiblyAmbiguous = lift $ return () case_Lift_tl1r :: Assertion case_Lift_tl1r = do ((), output) <- catchOutput tl1r assertOutput "Test tl1r" [show input] output where input = (5::Int) -- tl1r :: IO () tl1r = runLift (runReader input tl1) where tl1 = ask >>= \(x::Int) -> lift . print $ x case_Lift_tMd' :: Assertion case_Lift_tMd' = do (actualResult, actualOutput) <- catchOutput tMd' let expected = (output, map show input) assertEqual "Test mapMdebug using Lift" expected (actualResult, lines actualOutput) where input = [1..5] val = (10::Int) output = map (+ val) input tMd' = runLift $ runReader val $ mapMdebug' f input where f x = ask `add` return x -- Re-implemenation of mapMdebug using Lifting -- The signature is inferred mapMdebug' :: (Show a, Lifted IO r) => (a -> Eff r b) -> [a] -> Eff r [b] mapMdebug' _f [] = return [] mapMdebug' f (h:t) = do lift $ print h h' <- f h t' <- mapMdebug' f t return (h':t') -- tests from <http://okmij.org/ftp/Haskell/misc.html#catch-MonadIO> data MyException = MyException String deriving (Show) instance Exc.Exception MyException exfn :: Lifted IO r => Bool -> Eff r Bool exfn True = lift . Exc.throw $ (MyException "thrown") exfn False = return True testc m = catchDynE (m >>= return . show) (\ (MyException s) -> return s) test1 m = do runLift (tf m True) >>= print; runLift (tf m False) >>= print tf m x = runReader (x::Bool) . runState ([]::[String]) $ m runErrorStr = runError @String case_catchDynE_test1 :: Assertion case_catchDynE_test1 = do ((), actual) <- catchOutput $ test1 (testc m) let expected = [ "(\"thrown\",[\"begin\"])" , "(\"True\",[\"end\",\"begin\"])"] assertOutput "catchDynE: test1: exception shouldn't drop Writer's state" expected actual where -- In CatchMonadIO, the result of tf True is ("thrown",[]) -- -- that is, an exception will drop the Writer's state, even if that -- exception is caught. Here, the state is preserved! -- So, this is an advantage over MTL! m = do modify ("begin":) x <- ask r <- exfn x modify ("end":) return r -- Let us use an Error effect instead case_catchDynE_test1' :: Assertion case_catchDynE_test1' = do ((), actual') <- catchOutput $ test1 (runErrorStr (testc m)) let expected' = [ "(Left \"thrown\",[\"begin\"])" , "(Right \"True\",[\"end\",\"begin\"])"] assertOutput "catchDynE: test1': Error shouldn't drop Writer's state" expected' actual' where -- In CatchMonadIO, the result of tf True is ("thrown",[]) -- -- that is, an exception will drop the Writer's state, even if that -- exception is caught. Here, the state is preserved! -- So, this is an advantage over MTL! m = do modify ("begin":) x <- ask r <- exfn x modify ("end":) return r exfn True = throwError $ ("thrown") exfn False = return True -- Now, the behavior of the dynamic Exception and Error effect is consistent. -- The state is preserved. Before it wasn't. case_catchDynE_test2 :: Assertion case_catchDynE_test2 = do ((), actual) <- catchOutput $ test1 (runErrorStr (testc m)) let expected = [ "(Left \"thrown\",[\"begin\"])" , "(Right \"True\",[\"end\",\"begin\"])"] assertOutput "catchDynE: test2: Error shouldn't drop Writer's state" expected actual where m = do modify ("begin":) x <- ask r <- exfn x `catchDynE` (\ (MyException s) -> throwError s) modify ("end":) return r -- Full recovery case_catchDynE_test2' :: Assertion case_catchDynE_test2' = do ((), actual) <- catchOutput $ test1 (runErrorStr (testc m)) let expected = [ "(Right \"False\",[\"end\",\"begin\"])" , "(Right \"True\",[\"end\",\"begin\"])"] assertOutput "catchDynE: test2': Fully recover from errors" expected actual where m = do modify ("begin":) x <- ask r <- exfn x `catchDynE` (\ (MyException _s) -> return False) modify ("end":) return r -- Throwing within a handler case_catchDynE_test3 :: Assertion case_catchDynE_test3 = do ((), actual) <- catchOutput $ test1 (runErrorStr (testc m)) let expected = [ "(Right \"rethrow:thrown\",[\"begin\"])" , "(Right \"True\",[\"end\",\"begin\"])"] assertOutput "catchDynE: test3: Throwing within a handler" expected actual where m = do modify ("begin":) x <- ask r <- exfn x `catchDynE` (\ (MyException s) -> lift . Exc.throw . MyException $ ("rethrow:" ++ s)) modify ("end":) return r -- Implement the transactional behavior: when the exception is raised, -- the state is rolled back to what it existed at the entrance to -- the catch block. -- This is the ``scoping behavior'' of `Handlers in action' case_catchDynE_tran :: Assertion case_catchDynE_tran = do ((), actual1) <- catchOutput $ test1 m1 ((), actual2) <- catchOutput $ test1 m2 let expected1 = ["(\"thrown\",[\"init\"])" ,"(\"True\",[\"end\",\"begin\",\"init\"])"] let expected2 = ["(\"thrown\",[\"begin\",\"init\"])" ,"(\"True\",[\"end\",\"begin\",\"init\"])"] assertOutput "catchDynE: tran: Transactional behaviour" expected1 actual1 >> assertOutput "catchDynE: tran: usual behaviour" expected2 actual2 where m1 = do modify ("init":) testc (transactionState (TxState :: TxState [String]) m) m2 = do modify ("init":) testc m m = do modify ("begin":) x <- ask r <- exfn x modify ("end":) return r
suhailshergill/extensible-effects
test/Control/Eff/Test.hs
mit
6,989
0
16
1,676
1,873
967
906
-1
-1
module Network.Yandex.Translate ( -- types APIKey, Language, LanguagesDescr, Direction(..), YandexApiT, YandexApiConfig(..), TranslateParams(..), -- api funcs directions, detect, translate, -- lens apikey, httpOptions, format, options, _config, _session, -- other functions configureApi, runYandexApiT, runYandexApi, runYandexApiSession ) where import Control.Lens import Data.Aeson.Lens import Data.Text import Network.Wreq hiding (options) import qualified Network.Wreq.Session as S import Data.Maybe (fromMaybe) import Control.Arrow ((&&&)) import Control.Applicative import Control.Monad.Catch (MonadThrow(throwM)) import Control.Monad.IO.Class import Network.Yandex.Translate.Types import Network.Yandex.Translate.Internal baseUrl :: String baseUrl = "https://translate.yandex.net/api/v1.5/tr.json/" getLangsUrl, detectUrl, translateUrl :: String getLangsUrl = baseUrl ++ "getLangs" detectUrl = baseUrl ++ "detect" translateUrl = baseUrl ++ "translate" directions :: (MonadIO m, MonadThrow m) => Maybe Language -> YandexApiT m ([Direction], Maybe LanguagesDescr) directions lang = do opts <- getOpts sess <- view _session r <- liftIO $ asValue =<< S.getWith opts sess getLangsUrl let (dm, l) = (^? key "dirs" ._JSON) &&& (^? key "langs" ._JSON) $ r ^. responseBody d <- maybe (throwM $ JSONError "no dirs key in json") return dm return (d, l) where getOpts = do opts <- baseOptions return $ fromMaybe opts $ (\l -> opts & param "ui" .~ [l]) <$> lang detect :: (MonadIO m, MonadThrow m) => Text -> YandexApiT m Language detect text = do opts <- getOpts sess <- view _session r <- liftIO $ asValue =<< S.postWith opts sess detectUrl ["text" := text] let mlang = r ^? responseBody .key "lang" ._String maybe (throwM $ JSONError "Error no lang key in json") return mlang where getOpts = baseOptions translate :: (MonadIO m, MonadThrow m) => Maybe Language -> Language -> TranslateParams -> [Text] -> YandexApiT m ([Text], Direction, Maybe Text) translate f t params texts = do opts <- getOpts sess <- view _session r <- liftIO $ asValue =<< S.postWith opts sess translateUrl postParams let mres_text = r ^? responseBody .key "text" ._JSON mres_lang = r ^? responseBody .key "lang" ._JSON mdetected = r ^? responseBody .key "detected" .key "lang" ._String res_text <- maybe (throwM $ JSONError "no text key in json") return mres_text res_lang <- maybe (throwM $ JSONError "no lang key in json") return mres_lang return (res_text, res_lang, mdetected) where tdir = formatDirection f t postParams = ("text" :=) <$> texts topts = params ^. options getOpts = do dopts <- baseOptions let bopts = dopts & param "lang" .~ [tdir] & param "format" .~ [params ^.format .to (pack . show)] return $ if topts & isn't _Empty then bopts & param "options" .~ (topts <&> pack . show) else bopts
bacher09/yandex-translate
src/Network/Yandex/Translate.hs
mit
3,087
12
17
720
998
531
467
-1
-1
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-} module GHCJS.DOM.JSFFI.Generated.HTMLImageElement (js_setName, setName, js_getName, getName, js_setAlign, setAlign, js_getAlign, getAlign, js_setAlt, setAlt, js_getAlt, getAlt, js_setBorder, setBorder, js_getBorder, getBorder, js_setCrossOrigin, setCrossOrigin, js_getCrossOrigin, getCrossOrigin, js_setHeight, setHeight, js_getHeight, getHeight, js_setHspace, setHspace, js_getHspace, getHspace, js_setIsMap, setIsMap, js_getIsMap, getIsMap, js_setLongDesc, setLongDesc, js_getLongDesc, getLongDesc, js_setSrc, setSrc, js_getSrc, getSrc, js_setSrcset, setSrcset, js_getSrcset, getSrcset, js_setSizes, setSizes, js_getSizes, getSizes, js_getCurrentSrc, getCurrentSrc, js_setUseMap, setUseMap, js_getUseMap, getUseMap, js_setVspace, setVspace, js_getVspace, getVspace, js_setWidth, setWidth, js_getWidth, getWidth, js_getComplete, getComplete, js_setLowsrc, setLowsrc, js_getLowsrc, getLowsrc, js_getNaturalHeight, getNaturalHeight, js_getNaturalWidth, getNaturalWidth, js_getX, getX, js_getY, getY, HTMLImageElement, castToHTMLImageElement, gTypeHTMLImageElement) where import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord) import Data.Typeable (Typeable) import GHCJS.Types (JSRef(..), JSString, castRef) import GHCJS.Foreign (jsNull) import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..)) import GHCJS.Marshal (ToJSRef(..), FromJSRef(..)) import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..)) import Control.Monad.IO.Class (MonadIO(..)) import Data.Int (Int64) import Data.Word (Word, Word64) import GHCJS.DOM.Types import Control.Applicative ((<$>)) import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName) import GHCJS.DOM.Enums foreign import javascript unsafe "$1[\"name\"] = $2;" js_setName :: JSRef HTMLImageElement -> JSString -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.name Mozilla HTMLImageElement.name documentation> setName :: (MonadIO m, ToJSString val) => HTMLImageElement -> val -> m () setName self val = liftIO (js_setName (unHTMLImageElement self) (toJSString val)) foreign import javascript unsafe "$1[\"name\"]" js_getName :: JSRef HTMLImageElement -> IO JSString -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.name Mozilla HTMLImageElement.name documentation> getName :: (MonadIO m, FromJSString result) => HTMLImageElement -> m result getName self = liftIO (fromJSString <$> (js_getName (unHTMLImageElement self))) foreign import javascript unsafe "$1[\"align\"] = $2;" js_setAlign :: JSRef HTMLImageElement -> JSString -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.align Mozilla HTMLImageElement.align documentation> setAlign :: (MonadIO m, ToJSString val) => HTMLImageElement -> val -> m () setAlign self val = liftIO (js_setAlign (unHTMLImageElement self) (toJSString val)) foreign import javascript unsafe "$1[\"align\"]" js_getAlign :: JSRef HTMLImageElement -> IO JSString -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.align Mozilla HTMLImageElement.align documentation> getAlign :: (MonadIO m, FromJSString result) => HTMLImageElement -> m result getAlign self = liftIO (fromJSString <$> (js_getAlign (unHTMLImageElement self))) foreign import javascript unsafe "$1[\"alt\"] = $2;" js_setAlt :: JSRef HTMLImageElement -> JSString -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.alt Mozilla HTMLImageElement.alt documentation> setAlt :: (MonadIO m, ToJSString val) => HTMLImageElement -> val -> m () setAlt self val = liftIO (js_setAlt (unHTMLImageElement self) (toJSString val)) foreign import javascript unsafe "$1[\"alt\"]" js_getAlt :: JSRef HTMLImageElement -> IO JSString -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.alt Mozilla HTMLImageElement.alt documentation> getAlt :: (MonadIO m, FromJSString result) => HTMLImageElement -> m result getAlt self = liftIO (fromJSString <$> (js_getAlt (unHTMLImageElement self))) foreign import javascript unsafe "$1[\"border\"] = $2;" js_setBorder :: JSRef HTMLImageElement -> JSString -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.border Mozilla HTMLImageElement.border documentation> setBorder :: (MonadIO m, ToJSString val) => HTMLImageElement -> val -> m () setBorder self val = liftIO (js_setBorder (unHTMLImageElement self) (toJSString val)) foreign import javascript unsafe "$1[\"border\"]" js_getBorder :: JSRef HTMLImageElement -> IO JSString -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.border Mozilla HTMLImageElement.border documentation> getBorder :: (MonadIO m, FromJSString result) => HTMLImageElement -> m result getBorder self = liftIO (fromJSString <$> (js_getBorder (unHTMLImageElement self))) foreign import javascript unsafe "$1[\"crossOrigin\"] = $2;" js_setCrossOrigin :: JSRef HTMLImageElement -> JSString -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.crossOrigin Mozilla HTMLImageElement.crossOrigin documentation> setCrossOrigin :: (MonadIO m, ToJSString val) => HTMLImageElement -> val -> m () setCrossOrigin self val = liftIO (js_setCrossOrigin (unHTMLImageElement self) (toJSString val)) foreign import javascript unsafe "$1[\"crossOrigin\"]" js_getCrossOrigin :: JSRef HTMLImageElement -> IO JSString -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.crossOrigin Mozilla HTMLImageElement.crossOrigin documentation> getCrossOrigin :: (MonadIO m, FromJSString result) => HTMLImageElement -> m result getCrossOrigin self = liftIO (fromJSString <$> (js_getCrossOrigin (unHTMLImageElement self))) foreign import javascript unsafe "$1[\"height\"] = $2;" js_setHeight :: JSRef HTMLImageElement -> Int -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.height Mozilla HTMLImageElement.height documentation> setHeight :: (MonadIO m) => HTMLImageElement -> Int -> m () setHeight self val = liftIO (js_setHeight (unHTMLImageElement self) val) foreign import javascript unsafe "$1[\"height\"]" js_getHeight :: JSRef HTMLImageElement -> IO Int -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.height Mozilla HTMLImageElement.height documentation> getHeight :: (MonadIO m) => HTMLImageElement -> m Int getHeight self = liftIO (js_getHeight (unHTMLImageElement self)) foreign import javascript unsafe "$1[\"hspace\"] = $2;" js_setHspace :: JSRef HTMLImageElement -> Int -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.hspace Mozilla HTMLImageElement.hspace documentation> setHspace :: (MonadIO m) => HTMLImageElement -> Int -> m () setHspace self val = liftIO (js_setHspace (unHTMLImageElement self) val) foreign import javascript unsafe "$1[\"hspace\"]" js_getHspace :: JSRef HTMLImageElement -> IO Int -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.hspace Mozilla HTMLImageElement.hspace documentation> getHspace :: (MonadIO m) => HTMLImageElement -> m Int getHspace self = liftIO (js_getHspace (unHTMLImageElement self)) foreign import javascript unsafe "$1[\"isMap\"] = $2;" js_setIsMap :: JSRef HTMLImageElement -> Bool -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.isMap Mozilla HTMLImageElement.isMap documentation> setIsMap :: (MonadIO m) => HTMLImageElement -> Bool -> m () setIsMap self val = liftIO (js_setIsMap (unHTMLImageElement self) val) foreign import javascript unsafe "($1[\"isMap\"] ? 1 : 0)" js_getIsMap :: JSRef HTMLImageElement -> IO Bool -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.isMap Mozilla HTMLImageElement.isMap documentation> getIsMap :: (MonadIO m) => HTMLImageElement -> m Bool getIsMap self = liftIO (js_getIsMap (unHTMLImageElement self)) foreign import javascript unsafe "$1[\"longDesc\"] = $2;" js_setLongDesc :: JSRef HTMLImageElement -> JSString -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.longDesc Mozilla HTMLImageElement.longDesc documentation> setLongDesc :: (MonadIO m, ToJSString val) => HTMLImageElement -> val -> m () setLongDesc self val = liftIO (js_setLongDesc (unHTMLImageElement self) (toJSString val)) foreign import javascript unsafe "$1[\"longDesc\"]" js_getLongDesc :: JSRef HTMLImageElement -> IO JSString -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.longDesc Mozilla HTMLImageElement.longDesc documentation> getLongDesc :: (MonadIO m, FromJSString result) => HTMLImageElement -> m result getLongDesc self = liftIO (fromJSString <$> (js_getLongDesc (unHTMLImageElement self))) foreign import javascript unsafe "$1[\"src\"] = $2;" js_setSrc :: JSRef HTMLImageElement -> JSString -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.src Mozilla HTMLImageElement.src documentation> setSrc :: (MonadIO m, ToJSString val) => HTMLImageElement -> val -> m () setSrc self val = liftIO (js_setSrc (unHTMLImageElement self) (toJSString val)) foreign import javascript unsafe "$1[\"src\"]" js_getSrc :: JSRef HTMLImageElement -> IO JSString -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.src Mozilla HTMLImageElement.src documentation> getSrc :: (MonadIO m, FromJSString result) => HTMLImageElement -> m result getSrc self = liftIO (fromJSString <$> (js_getSrc (unHTMLImageElement self))) foreign import javascript unsafe "$1[\"srcset\"] = $2;" js_setSrcset :: JSRef HTMLImageElement -> JSString -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.srcset Mozilla HTMLImageElement.srcset documentation> setSrcset :: (MonadIO m, ToJSString val) => HTMLImageElement -> val -> m () setSrcset self val = liftIO (js_setSrcset (unHTMLImageElement self) (toJSString val)) foreign import javascript unsafe "$1[\"srcset\"]" js_getSrcset :: JSRef HTMLImageElement -> IO JSString -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.srcset Mozilla HTMLImageElement.srcset documentation> getSrcset :: (MonadIO m, FromJSString result) => HTMLImageElement -> m result getSrcset self = liftIO (fromJSString <$> (js_getSrcset (unHTMLImageElement self))) foreign import javascript unsafe "$1[\"sizes\"] = $2;" js_setSizes :: JSRef HTMLImageElement -> JSString -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.sizes Mozilla HTMLImageElement.sizes documentation> setSizes :: (MonadIO m, ToJSString val) => HTMLImageElement -> val -> m () setSizes self val = liftIO (js_setSizes (unHTMLImageElement self) (toJSString val)) foreign import javascript unsafe "$1[\"sizes\"]" js_getSizes :: JSRef HTMLImageElement -> IO JSString -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.sizes Mozilla HTMLImageElement.sizes documentation> getSizes :: (MonadIO m, FromJSString result) => HTMLImageElement -> m result getSizes self = liftIO (fromJSString <$> (js_getSizes (unHTMLImageElement self))) foreign import javascript unsafe "$1[\"currentSrc\"]" js_getCurrentSrc :: JSRef HTMLImageElement -> IO JSString -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.currentSrc Mozilla HTMLImageElement.currentSrc documentation> getCurrentSrc :: (MonadIO m, FromJSString result) => HTMLImageElement -> m result getCurrentSrc self = liftIO (fromJSString <$> (js_getCurrentSrc (unHTMLImageElement self))) foreign import javascript unsafe "$1[\"useMap\"] = $2;" js_setUseMap :: JSRef HTMLImageElement -> JSString -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.useMap Mozilla HTMLImageElement.useMap documentation> setUseMap :: (MonadIO m, ToJSString val) => HTMLImageElement -> val -> m () setUseMap self val = liftIO (js_setUseMap (unHTMLImageElement self) (toJSString val)) foreign import javascript unsafe "$1[\"useMap\"]" js_getUseMap :: JSRef HTMLImageElement -> IO JSString -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.useMap Mozilla HTMLImageElement.useMap documentation> getUseMap :: (MonadIO m, FromJSString result) => HTMLImageElement -> m result getUseMap self = liftIO (fromJSString <$> (js_getUseMap (unHTMLImageElement self))) foreign import javascript unsafe "$1[\"vspace\"] = $2;" js_setVspace :: JSRef HTMLImageElement -> Int -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.vspace Mozilla HTMLImageElement.vspace documentation> setVspace :: (MonadIO m) => HTMLImageElement -> Int -> m () setVspace self val = liftIO (js_setVspace (unHTMLImageElement self) val) foreign import javascript unsafe "$1[\"vspace\"]" js_getVspace :: JSRef HTMLImageElement -> IO Int -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.vspace Mozilla HTMLImageElement.vspace documentation> getVspace :: (MonadIO m) => HTMLImageElement -> m Int getVspace self = liftIO (js_getVspace (unHTMLImageElement self)) foreign import javascript unsafe "$1[\"width\"] = $2;" js_setWidth :: JSRef HTMLImageElement -> Int -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.width Mozilla HTMLImageElement.width documentation> setWidth :: (MonadIO m) => HTMLImageElement -> Int -> m () setWidth self val = liftIO (js_setWidth (unHTMLImageElement self) val) foreign import javascript unsafe "$1[\"width\"]" js_getWidth :: JSRef HTMLImageElement -> IO Int -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.width Mozilla HTMLImageElement.width documentation> getWidth :: (MonadIO m) => HTMLImageElement -> m Int getWidth self = liftIO (js_getWidth (unHTMLImageElement self)) foreign import javascript unsafe "($1[\"complete\"] ? 1 : 0)" js_getComplete :: JSRef HTMLImageElement -> IO Bool -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.complete Mozilla HTMLImageElement.complete documentation> getComplete :: (MonadIO m) => HTMLImageElement -> m Bool getComplete self = liftIO (js_getComplete (unHTMLImageElement self)) foreign import javascript unsafe "$1[\"lowsrc\"] = $2;" js_setLowsrc :: JSRef HTMLImageElement -> JSString -> IO () -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.lowsrc Mozilla HTMLImageElement.lowsrc documentation> setLowsrc :: (MonadIO m, ToJSString val) => HTMLImageElement -> val -> m () setLowsrc self val = liftIO (js_setLowsrc (unHTMLImageElement self) (toJSString val)) foreign import javascript unsafe "$1[\"lowsrc\"]" js_getLowsrc :: JSRef HTMLImageElement -> IO JSString -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.lowsrc Mozilla HTMLImageElement.lowsrc documentation> getLowsrc :: (MonadIO m, FromJSString result) => HTMLImageElement -> m result getLowsrc self = liftIO (fromJSString <$> (js_getLowsrc (unHTMLImageElement self))) foreign import javascript unsafe "$1[\"naturalHeight\"]" js_getNaturalHeight :: JSRef HTMLImageElement -> IO Int -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.naturalHeight Mozilla HTMLImageElement.naturalHeight documentation> getNaturalHeight :: (MonadIO m) => HTMLImageElement -> m Int getNaturalHeight self = liftIO (js_getNaturalHeight (unHTMLImageElement self)) foreign import javascript unsafe "$1[\"naturalWidth\"]" js_getNaturalWidth :: JSRef HTMLImageElement -> IO Int -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.naturalWidth Mozilla HTMLImageElement.naturalWidth documentation> getNaturalWidth :: (MonadIO m) => HTMLImageElement -> m Int getNaturalWidth self = liftIO (js_getNaturalWidth (unHTMLImageElement self)) foreign import javascript unsafe "$1[\"x\"]" js_getX :: JSRef HTMLImageElement -> IO Int -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.x Mozilla HTMLImageElement.x documentation> getX :: (MonadIO m) => HTMLImageElement -> m Int getX self = liftIO (js_getX (unHTMLImageElement self)) foreign import javascript unsafe "$1[\"y\"]" js_getY :: JSRef HTMLImageElement -> IO Int -- | <https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement.y Mozilla HTMLImageElement.y documentation> getY :: (MonadIO m) => HTMLImageElement -> m Int getY self = liftIO (js_getY (unHTMLImageElement self))
plow-technologies/ghcjs-dom
src/GHCJS/DOM/JSFFI/Generated/HTMLImageElement.hs
mit
17,283
260
11
2,607
3,677
1,949
1,728
248
1
module Main where import Options.Applicative import qualified Data.ByteString.Char8 as BS import Jenkins.Client (handleCmd, showError) import Jenkins.Client.Types import Network.HTTP.Client import System.IO (stderr) import System.Exit import Options main :: IO () main = do opts <- execParser handleOpts withManager defaultManagerSettings $ \m -> do let e = Env { envOpts = opts , envManager = m } eError <- runClient handleCmd e case eError of (Left err) -> BS.hPutStrLn stderr (showError err) >> exitFailure (Right _) -> exitSuccess where handleOpts = info (helper <*> parseOptions) ( fullDesc <> progDesc "A cli tool for managing Jenkins' builds" <> header "jenkins-tty - a command line interface for Jenkins" )
afiore/jenkins-tty.hs
src/Main.hs
mit
841
0
17
231
220
118
102
23
2
{-# LANGUAGE CPP #-} module GHCJS.DOM.WebGLContextAttributes ( #if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT) module GHCJS.DOM.JSFFI.Generated.WebGLContextAttributes #else #endif ) where #if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT) import GHCJS.DOM.JSFFI.Generated.WebGLContextAttributes #else #endif
plow-technologies/ghcjs-dom
src/GHCJS/DOM/WebGLContextAttributes.hs
mit
379
0
5
33
33
26
7
4
0
module Main (main) where import qualified Data.ByteString as B import qualified Data.ByteString.Char8 as BC import Data.Char (isSpace, toUpper) import Data.List (nub, sort, sortBy) import System.Environment (getArgs) import System.FilePath (takeDirectory) import HsSearch.FileUtil (getParentPath, pathExists) import HsSearch.SearchOptions import HsSearch.Searcher (getSearchFiles, doSearchFiles) import HsSearch.SearchResult import HsSearch.SearchSettings validateSettings :: SearchSettings -> [String] validateSettings settings = concatMap ($settings) validators where validators = [ \s -> ["Startpath not defined" | startPath s == ""] , \s -> ["No search patterns defined" | null (searchPatterns s)] , \s -> ["Invalid lines after" | linesAfter s < 0] , \s -> ["Invalid lines before" | linesBefore s < 0] , \s -> ["Invalid max line length" | maxLineLength s < 0] ] errsOrUsage :: [SearchOption] -> SearchSettings -> Maybe String errsOrUsage searchOptions settings = case usage of "" -> Nothing _ -> Just usage where errs = validateSettings settings errMsg = if not (null errs) then "\nERROR: " ++ head errs ++ "\n\n" else "" usage = case (printUsage settings, not (null errMsg)) of (True, _) -> "\n" ++ getUsage searchOptions (False, True) -> errMsg ++ getUsage searchOptions _ -> "" formatResults :: SearchSettings -> [SearchResult] -> String formatResults settings results = "\nSearch results (" ++ show (length results) ++ "):\n" ++ (if not (null results) then unlines (map (formatSearchResult settings) results) else "") getMatchingDirs :: [SearchResult] -> [FilePath] getMatchingDirs = sort . nub . map getDirectory where getDirectory r = takeDirectory (filePath r) formatMatchingDirs :: [SearchResult] -> String formatMatchingDirs results = "\nDirectories with matches (" ++ show (length matchingDirs) ++ "):\n" ++ unlines matchingDirs where matchingDirs = getMatchingDirs results getMatchingFiles :: [SearchResult] -> [FilePath] getMatchingFiles = sort . nub . map filePath formatMatchingFiles :: [SearchResult] -> String formatMatchingFiles results = "\nFiles with matches (" ++ show (length matchingFiles) ++ "):\n" ++ unlines matchingFiles where matchingFiles = getMatchingFiles results byteStringToUpper :: B.ByteString -> B.ByteString byteStringToUpper = BC.pack . map toUpper . BC.unpack sortCaseInsensitive :: [B.ByteString] -> [B.ByteString] sortCaseInsensitive = sortBy compareCaseInsensitive where compareCaseInsensitive a b = byteStringToUpper a `compare` byteStringToUpper b getMatchingLines :: [SearchResult] -> Bool -> [B.ByteString] getMatchingLines results unique | unique = (sortCaseInsensitive . nub . map trimLine) results | otherwise = (sortCaseInsensitive . map trimLine) results where trimLine = BC.dropWhile isSpace . line formatMatchingLines :: [SearchResult] -> Bool -> String formatMatchingLines results unique = "\n" ++ hdrText ++ " (" ++ show (length matchingLines) ++ "):\n" ++ BC.unpack (BC.intercalate (BC.pack "\n") matchingLines) ++ "\n" where matchingLines = getMatchingLines results unique hdrText = if unique then "Unique lines with matches" else "Lines with matches" formatSearchDirs :: [FilePath] -> String formatSearchDirs dirs = "\nDirectories to be searched (" ++ show (length dirs) ++ "):\n" ++ unlines (sort dirs) formatSearchFiles :: [FilePath] -> String formatSearchFiles files = formatSearchDirs (nub (map getParentPath files)) ++ "\nFiles to be searched (" ++ show (length files) ++ "):\n" ++ unlines (sort files) logMsg :: String -> IO () logMsg = putStr main :: IO () main = do args <- getArgs searchOptions <- getSearchOptions case settingsFromArgs searchOptions args of Left errMsg -> logMsg $ "\nERROR: " ++ errMsg ++ "\n" ++ getUsage searchOptions ++ "\n" Right settings -> do logMsg $ if debug settings then "\nsettings: " ++ show settings ++ "\n" else "" case errsOrUsage searchOptions settings of Just usage -> logMsg $ usage ++ "\n" Nothing -> do foundPath <- pathExists (startPath settings) if foundPath then do searchFiles <- getSearchFiles settings logMsg $ if verbose settings then formatSearchFiles searchFiles else "" results <- doSearchFiles settings searchFiles logMsg $ if printResults settings then formatResults settings results else "" logMsg $ if listDirs settings then formatMatchingDirs results else "" logMsg $ if listFiles settings then formatMatchingFiles results else "" logMsg $ if listLines settings then formatMatchingLines results (uniqueLines settings) else "" logMsg "" else logMsg $ "\nERROR: Startpath not found\n\n" ++ getUsage searchOptions ++ "\n"
clarkcb/xsearch
haskell/hssearch/app/Main.hs
mit
5,337
0
24
1,427
1,453
745
708
115
10
-- A palindromic number reads the same both ways. The largest -- palindrome made from the product of two 2-digit numbers is 9009 = -- 91 × 99. -- Find the largest palindrome made from the product of two 3-digit numbers. module Euler.Problem004 ( solution , palindrome ) where solution :: Integer -> Integer solution = head . filter palindrome . desortedProductsFrom palindrome :: Integer -> Bool palindrome i = s == reverse s where s = show i desortedProductsFrom :: Integer -> [Integer] desortedProductsFrom ceil = foldr merge' [] [map (* i) (ceil `downto` i) | i <- ceil `downto` 1] -- Adapted from https://www.haskell.org/pipermail/haskell-cafe/2009-April/060034.html merge' :: Ord a => [a] -> [a] -> [a] merge' (x:xs) ys = x : merge xs ys merge' [] ys = ys merge :: Ord a => [a] -> [a] -> [a] merge xs@(x:xt) ys@(y:yt) | x > y = x : merge xt ys | otherwise = y : merge xs yt merge xs ys = xs ++ ys downto :: Integer -> Integer -> [Integer] downto ceil = enumFromThenTo (ceil - 1) (ceil - 2)
whittle/euler
src/Euler/Problem004.hs
mit
1,031
0
9
220
362
194
168
20
1
-- In ghci, you can just type simple expressions, e.g. 2 + 15 5 / 2 -- Need to put parens around negative numbers: 5 * (-3) -- The following works if typed into ghci - but gives error when loaded, via :l arithmetic.hs let x = 5 * 7 div 92 10 -- 9 92 `div` 10 -- backticks make it infix, so purpose is clearer
claremacrae/haskell_snippets
arithmetic.hs
mit
314
8
6
72
62
31
31
-1
-1
applyLog :: (a, String) -> (a -> (b,String)) -> (b, String) applyLog (x,log) f = let (y, newLog) = f x in (y, log ++ newLog)
rglew/lyah
applylog.hs
mit
127
0
9
28
86
48
38
2
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE StrictData #-} {-# LANGUAGE TupleSections #-} -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-replicationgroup-nodegroupconfiguration.html module Stratosphere.ResourceProperties.ElastiCacheReplicationGroupNodeGroupConfiguration where import Stratosphere.ResourceImports -- | Full data type definition for -- ElastiCacheReplicationGroupNodeGroupConfiguration. See -- 'elastiCacheReplicationGroupNodeGroupConfiguration' for a more convenient -- constructor. data ElastiCacheReplicationGroupNodeGroupConfiguration = ElastiCacheReplicationGroupNodeGroupConfiguration { _elastiCacheReplicationGroupNodeGroupConfigurationNodeGroupId :: Maybe (Val Text) , _elastiCacheReplicationGroupNodeGroupConfigurationPrimaryAvailabilityZone :: Maybe (Val Text) , _elastiCacheReplicationGroupNodeGroupConfigurationReplicaAvailabilityZones :: Maybe (ValList Text) , _elastiCacheReplicationGroupNodeGroupConfigurationReplicaCount :: Maybe (Val Integer) , _elastiCacheReplicationGroupNodeGroupConfigurationSlots :: Maybe (Val Text) } deriving (Show, Eq) instance ToJSON ElastiCacheReplicationGroupNodeGroupConfiguration where toJSON ElastiCacheReplicationGroupNodeGroupConfiguration{..} = object $ catMaybes [ fmap (("NodeGroupId",) . toJSON) _elastiCacheReplicationGroupNodeGroupConfigurationNodeGroupId , fmap (("PrimaryAvailabilityZone",) . toJSON) _elastiCacheReplicationGroupNodeGroupConfigurationPrimaryAvailabilityZone , fmap (("ReplicaAvailabilityZones",) . toJSON) _elastiCacheReplicationGroupNodeGroupConfigurationReplicaAvailabilityZones , fmap (("ReplicaCount",) . toJSON) _elastiCacheReplicationGroupNodeGroupConfigurationReplicaCount , fmap (("Slots",) . toJSON) _elastiCacheReplicationGroupNodeGroupConfigurationSlots ] -- | Constructor for 'ElastiCacheReplicationGroupNodeGroupConfiguration' -- containing required fields as arguments. elastiCacheReplicationGroupNodeGroupConfiguration :: ElastiCacheReplicationGroupNodeGroupConfiguration elastiCacheReplicationGroupNodeGroupConfiguration = ElastiCacheReplicationGroupNodeGroupConfiguration { _elastiCacheReplicationGroupNodeGroupConfigurationNodeGroupId = Nothing , _elastiCacheReplicationGroupNodeGroupConfigurationPrimaryAvailabilityZone = Nothing , _elastiCacheReplicationGroupNodeGroupConfigurationReplicaAvailabilityZones = Nothing , _elastiCacheReplicationGroupNodeGroupConfigurationReplicaCount = Nothing , _elastiCacheReplicationGroupNodeGroupConfigurationSlots = Nothing } -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-replicationgroup-nodegroupconfiguration.html#cfn-elasticache-replicationgroup-nodegroupconfiguration-nodegroupid ecrgngcNodeGroupId :: Lens' ElastiCacheReplicationGroupNodeGroupConfiguration (Maybe (Val Text)) ecrgngcNodeGroupId = lens _elastiCacheReplicationGroupNodeGroupConfigurationNodeGroupId (\s a -> s { _elastiCacheReplicationGroupNodeGroupConfigurationNodeGroupId = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-replicationgroup-nodegroupconfiguration.html#cfn-elasticache-replicationgroup-nodegroupconfiguration-primaryavailabilityzone ecrgngcPrimaryAvailabilityZone :: Lens' ElastiCacheReplicationGroupNodeGroupConfiguration (Maybe (Val Text)) ecrgngcPrimaryAvailabilityZone = lens _elastiCacheReplicationGroupNodeGroupConfigurationPrimaryAvailabilityZone (\s a -> s { _elastiCacheReplicationGroupNodeGroupConfigurationPrimaryAvailabilityZone = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-replicationgroup-nodegroupconfiguration.html#cfn-elasticache-replicationgroup-nodegroupconfiguration-replicaavailabilityzones ecrgngcReplicaAvailabilityZones :: Lens' ElastiCacheReplicationGroupNodeGroupConfiguration (Maybe (ValList Text)) ecrgngcReplicaAvailabilityZones = lens _elastiCacheReplicationGroupNodeGroupConfigurationReplicaAvailabilityZones (\s a -> s { _elastiCacheReplicationGroupNodeGroupConfigurationReplicaAvailabilityZones = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-replicationgroup-nodegroupconfiguration.html#cfn-elasticache-replicationgroup-nodegroupconfiguration-replicacount ecrgngcReplicaCount :: Lens' ElastiCacheReplicationGroupNodeGroupConfiguration (Maybe (Val Integer)) ecrgngcReplicaCount = lens _elastiCacheReplicationGroupNodeGroupConfigurationReplicaCount (\s a -> s { _elastiCacheReplicationGroupNodeGroupConfigurationReplicaCount = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-elasticache-replicationgroup-nodegroupconfiguration.html#cfn-elasticache-replicationgroup-nodegroupconfiguration-slots ecrgngcSlots :: Lens' ElastiCacheReplicationGroupNodeGroupConfiguration (Maybe (Val Text)) ecrgngcSlots = lens _elastiCacheReplicationGroupNodeGroupConfigurationSlots (\s a -> s { _elastiCacheReplicationGroupNodeGroupConfigurationSlots = a })
frontrowed/stratosphere
library-gen/Stratosphere/ResourceProperties/ElastiCacheReplicationGroupNodeGroupConfiguration.hs
mit
5,108
0
12
351
539
306
233
42
1
----------------------------------------------------------- ---- | ---- Module: DNA ---- Description: Calculate hamming distance between DNA strands ---- Copyright: (c) 2015 Alex Dzyoba <[email protected]> ---- License: MIT ------------------------------------------------------------- module DNA where dnaEquals :: String -> String -> [Bool] dnaEquals = zipWith (==) hammingDistance :: String -> String -> Int hammingDistance dna1 dna2 = length mutatedAcids where mutatedAcids = filter (==False) (dnaEquals dna1 dna2)
dzeban/haskell-exercism
point-mutations/haskell/point-mutations/DNA.hs
mit
529
0
9
66
87
51
36
6
1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE StrictData #-} {-# LANGUAGE TupleSections #-} -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-dms-endpoint-mongodbsettings.html module Stratosphere.ResourceProperties.DMSEndpointMongoDbSettings where import Stratosphere.ResourceImports -- | Full data type definition for DMSEndpointMongoDbSettings. See -- 'dmsEndpointMongoDbSettings' for a more convenient constructor. data DMSEndpointMongoDbSettings = DMSEndpointMongoDbSettings { _dMSEndpointMongoDbSettingsAuthMechanism :: Maybe (Val Text) , _dMSEndpointMongoDbSettingsAuthSource :: Maybe (Val Text) , _dMSEndpointMongoDbSettingsAuthType :: Maybe (Val Text) , _dMSEndpointMongoDbSettingsDatabaseName :: Maybe (Val Text) , _dMSEndpointMongoDbSettingsDocsToInvestigate :: Maybe (Val Text) , _dMSEndpointMongoDbSettingsExtractDocId :: Maybe (Val Text) , _dMSEndpointMongoDbSettingsNestingLevel :: Maybe (Val Text) , _dMSEndpointMongoDbSettingsPassword :: Maybe (Val Text) , _dMSEndpointMongoDbSettingsPort :: Maybe (Val Integer) , _dMSEndpointMongoDbSettingsServerName :: Maybe (Val Text) , _dMSEndpointMongoDbSettingsUsername :: Maybe (Val Text) } deriving (Show, Eq) instance ToJSON DMSEndpointMongoDbSettings where toJSON DMSEndpointMongoDbSettings{..} = object $ catMaybes [ fmap (("AuthMechanism",) . toJSON) _dMSEndpointMongoDbSettingsAuthMechanism , fmap (("AuthSource",) . toJSON) _dMSEndpointMongoDbSettingsAuthSource , fmap (("AuthType",) . toJSON) _dMSEndpointMongoDbSettingsAuthType , fmap (("DatabaseName",) . toJSON) _dMSEndpointMongoDbSettingsDatabaseName , fmap (("DocsToInvestigate",) . toJSON) _dMSEndpointMongoDbSettingsDocsToInvestigate , fmap (("ExtractDocId",) . toJSON) _dMSEndpointMongoDbSettingsExtractDocId , fmap (("NestingLevel",) . toJSON) _dMSEndpointMongoDbSettingsNestingLevel , fmap (("Password",) . toJSON) _dMSEndpointMongoDbSettingsPassword , fmap (("Port",) . toJSON) _dMSEndpointMongoDbSettingsPort , fmap (("ServerName",) . toJSON) _dMSEndpointMongoDbSettingsServerName , fmap (("Username",) . toJSON) _dMSEndpointMongoDbSettingsUsername ] -- | Constructor for 'DMSEndpointMongoDbSettings' containing required fields -- as arguments. dmsEndpointMongoDbSettings :: DMSEndpointMongoDbSettings dmsEndpointMongoDbSettings = DMSEndpointMongoDbSettings { _dMSEndpointMongoDbSettingsAuthMechanism = Nothing , _dMSEndpointMongoDbSettingsAuthSource = Nothing , _dMSEndpointMongoDbSettingsAuthType = Nothing , _dMSEndpointMongoDbSettingsDatabaseName = Nothing , _dMSEndpointMongoDbSettingsDocsToInvestigate = Nothing , _dMSEndpointMongoDbSettingsExtractDocId = Nothing , _dMSEndpointMongoDbSettingsNestingLevel = Nothing , _dMSEndpointMongoDbSettingsPassword = Nothing , _dMSEndpointMongoDbSettingsPort = Nothing , _dMSEndpointMongoDbSettingsServerName = Nothing , _dMSEndpointMongoDbSettingsUsername = Nothing } -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-dms-endpoint-mongodbsettings.html#cfn-dms-endpoint-mongodbsettings-authmechanism dmsemdsAuthMechanism :: Lens' DMSEndpointMongoDbSettings (Maybe (Val Text)) dmsemdsAuthMechanism = lens _dMSEndpointMongoDbSettingsAuthMechanism (\s a -> s { _dMSEndpointMongoDbSettingsAuthMechanism = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-dms-endpoint-mongodbsettings.html#cfn-dms-endpoint-mongodbsettings-authsource dmsemdsAuthSource :: Lens' DMSEndpointMongoDbSettings (Maybe (Val Text)) dmsemdsAuthSource = lens _dMSEndpointMongoDbSettingsAuthSource (\s a -> s { _dMSEndpointMongoDbSettingsAuthSource = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-dms-endpoint-mongodbsettings.html#cfn-dms-endpoint-mongodbsettings-authtype dmsemdsAuthType :: Lens' DMSEndpointMongoDbSettings (Maybe (Val Text)) dmsemdsAuthType = lens _dMSEndpointMongoDbSettingsAuthType (\s a -> s { _dMSEndpointMongoDbSettingsAuthType = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-dms-endpoint-mongodbsettings.html#cfn-dms-endpoint-mongodbsettings-databasename dmsemdsDatabaseName :: Lens' DMSEndpointMongoDbSettings (Maybe (Val Text)) dmsemdsDatabaseName = lens _dMSEndpointMongoDbSettingsDatabaseName (\s a -> s { _dMSEndpointMongoDbSettingsDatabaseName = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-dms-endpoint-mongodbsettings.html#cfn-dms-endpoint-mongodbsettings-docstoinvestigate dmsemdsDocsToInvestigate :: Lens' DMSEndpointMongoDbSettings (Maybe (Val Text)) dmsemdsDocsToInvestigate = lens _dMSEndpointMongoDbSettingsDocsToInvestigate (\s a -> s { _dMSEndpointMongoDbSettingsDocsToInvestigate = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-dms-endpoint-mongodbsettings.html#cfn-dms-endpoint-mongodbsettings-extractdocid dmsemdsExtractDocId :: Lens' DMSEndpointMongoDbSettings (Maybe (Val Text)) dmsemdsExtractDocId = lens _dMSEndpointMongoDbSettingsExtractDocId (\s a -> s { _dMSEndpointMongoDbSettingsExtractDocId = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-dms-endpoint-mongodbsettings.html#cfn-dms-endpoint-mongodbsettings-nestinglevel dmsemdsNestingLevel :: Lens' DMSEndpointMongoDbSettings (Maybe (Val Text)) dmsemdsNestingLevel = lens _dMSEndpointMongoDbSettingsNestingLevel (\s a -> s { _dMSEndpointMongoDbSettingsNestingLevel = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-dms-endpoint-mongodbsettings.html#cfn-dms-endpoint-mongodbsettings-password dmsemdsPassword :: Lens' DMSEndpointMongoDbSettings (Maybe (Val Text)) dmsemdsPassword = lens _dMSEndpointMongoDbSettingsPassword (\s a -> s { _dMSEndpointMongoDbSettingsPassword = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-dms-endpoint-mongodbsettings.html#cfn-dms-endpoint-mongodbsettings-port dmsemdsPort :: Lens' DMSEndpointMongoDbSettings (Maybe (Val Integer)) dmsemdsPort = lens _dMSEndpointMongoDbSettingsPort (\s a -> s { _dMSEndpointMongoDbSettingsPort = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-dms-endpoint-mongodbsettings.html#cfn-dms-endpoint-mongodbsettings-servername dmsemdsServerName :: Lens' DMSEndpointMongoDbSettings (Maybe (Val Text)) dmsemdsServerName = lens _dMSEndpointMongoDbSettingsServerName (\s a -> s { _dMSEndpointMongoDbSettingsServerName = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-dms-endpoint-mongodbsettings.html#cfn-dms-endpoint-mongodbsettings-username dmsemdsUsername :: Lens' DMSEndpointMongoDbSettings (Maybe (Val Text)) dmsemdsUsername = lens _dMSEndpointMongoDbSettingsUsername (\s a -> s { _dMSEndpointMongoDbSettingsUsername = a })
frontrowed/stratosphere
library-gen/Stratosphere/ResourceProperties/DMSEndpointMongoDbSettings.hs
mit
6,956
0
12
637
1,083
610
473
72
1
{-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ViewPatterns #-} module Control.Biegunka.Biegunka ( -- * Wrap/unwrap biegunka interpreters biegunka -- * Auxiliary , expandHome ) where import Control.Lens import Data.Bool (bool) import qualified Data.List as List import Data.Version (showVersion) import System.Exit (ExitCode(..)) import System.FilePath ((</>)) import qualified System.Posix as Posix import Control.Biegunka.Interpreter (Interpreter(..)) import Control.Biegunka.Language import qualified Control.Biegunka.Logger as Logger import Control.Biegunka.Script (Script, defaultMAnnotations, defaultAnnotations, evalScript) import Control.Biegunka.Settings import qualified System.IO as IO import qualified Git_biegunka as Git import qualified Paths_biegunka as Paths -- | Entry point into the library biegunka :: (Settings -> Settings) -- ^ User defined settings -> Interpreter -- ^ Combined interpreters -> Script 'Sources () -- ^ Script to interpret -> IO ExitCode biegunka (($ defaultSettings) -> c) (I interpret) script = do rr <- views runRoot expandHome c br <- views biegunkaRoot expandHome c Logger.with $ \l -> do Logger.write IO.stdout l (info rr br) let annotatedScript = evalScript defaultMAnnotations (set runRoot rr defaultAnnotations) script c' = c & runRoot .~ rr & biegunkaRoot .~ br & _logger ?~ l interpret c' annotatedScript (return ExitSuccess) where info rr br = List.intercalate "\n" $ [ " ___ _ __ " , " / _ )(_)__ ___ ___ _____ / /_____ _ " , " / _ / / -_) _ `/ // / _ \\/ '_/ _ `/ " , "/____/_/\\__/\\_, /\\_,_/_//_/_/\\_\\\\_,_/ " ++ version , " /___/ " , "" , "* Relative filepaths are deemed relative to " ++ rr , "* Data will be saved in " ++ br ] ++ bool [] ["* Offline mode"] (has (mode._Offline) c) where version = showVersion Paths.version ++ "-" ++ Git.hash -- | Expand \"~\" at the start of the path expandHome :: String -> IO String expandHome ('~' : (splitUser -> (user, '/' : xs))) = getHome user <&> (</> xs) expandHome ('~' : (splitUser -> (user, ""))) = getHome user expandHome x = return x -- | Break the path on the first '/' splitUser :: String -> (String, String) splitUser = break (== '/') -- | Get home directory for user by name. If the name is empty return the value -- of HOME environment variable getHome :: String -> IO FilePath getHome "" = Posix.getEnvDefault "HOME" "" getHome user = fmap Posix.homeDirectory (Posix.getUserEntryForName user)
biegunka/biegunka
src/Control/Biegunka/Biegunka.hs
mit
2,821
0
19
743
654
369
285
61
1
import Data.List -- yeah... just to simplify things div' a b = (0 == rem a b) -- it returns a list of primes less than lim sieve lim = takeWhile (< lim) (2 : sieves [3,5..]) -- the helper for sieve sieves (x:xs) = x : deleteBy (\x n -> div' n x) x (sieves xs) primes' = 2 : filter (null . tail . primeFactors') [3,5..] primeFactors' n = factor n primes' where factor n (p:ps) | p*p > n = [n] | rem n p == 0 = p : factor (quot n p) (p:ps) | otherwise = factor n ps primesUnder' lim = takeWhile (< lim) primes' primes lim = siva ++ [x | x <- [xs+2,xs+4..lim], primeStep x siva] where siva = primesUnder' (20 + (ceiling (sqrt (fromIntegral lim)))) xs = last siva primeStep p (x:xs) | (x*x) > p = True | (x * x) == p = False | div' p x = False | otherwise = primeStep p xs
zeniuseducation/poly-euler
haskell/opt1.hs
epl-1.0
837
3
13
235
437
222
215
19
1
module GL.Bindings ( display, idle, reshape ) where import Control.Applicative ( (<$>) ) import Graphics.Rendering.OpenGL import Graphics.UI.GLUT ( swapBuffers, postRedisplay ) import GL.Aliases ( glfloat, zerof, onef, setColor4 ) import qualified Program.State as P -- |Callback to draw on screen display :: P.State -> IO () display state = do -- Clear Buffer clearColor $= Color4 zerof zerof zerof onef clear [ ColorBuffer, DepthBuffer ] -- Get state variables draws <- get $ P.drawList state width <- glfloat <$> get (P.width state) height <- glfloat <$> get (P.height state) -- Draw contents of state loadIdentity translate $ Vector3 (-onef) onef zerof scale (2/width) (-2/height) onef setColor4 0 0 0 1 preservingMatrix $ do sequence_ draws return () swapBuffers -- |Callback for disle state between renders (display calls) idle :: P.State -> IO () idle _ = postRedisplay Nothing -- |Callabck in the event of window size change reshape :: P.State -> Size -> IO () reshape state size@(Size w h) = do P.width state $= w P.height state $= h viewport $= (Position 0 0, size)
szbokhar/genetic-boxes
GL/Bindings.hs
gpl-2.0
1,189
0
12
289
388
198
190
29
1
{- | Module : $Header$ Description : print the abstract syntax so that it can be re-parsed Copyright : (c) Christian Maeder and Uni Bremen 2003 License : GPLv2 or higher, see LICENSE.txt Maintainer : [email protected] Stability : experimental Portability : portable printing data types of the abstract syntax -} module HasCASL.PrintAs where import HasCASL.As import HasCASL.AsUtils import HasCASL.FoldTerm import HasCASL.Builtin import Common.Id import Common.Keywords import Common.DocUtils import Common.Doc import Common.AS_Annotation import qualified Data.Set as Set import Data.List -- | short cut for: if b then empty else d noPrint :: Bool -> Doc -> Doc noPrint b d = if b then empty else d noNullPrint :: [a] -> Doc -> Doc noNullPrint = noPrint . null semiDs :: Pretty a => [a] -> Doc semiDs = fsep . punctuate semi . map pretty semiAnnoted :: Pretty a => [Annoted a] -> Doc semiAnnoted = vcat . map (printSemiAnno pretty True) instance Pretty Variance where pretty = sidDoc . mkSimpleId . show instance Pretty a => Pretty (AnyKind a) where pretty knd = case knd of ClassKind ci -> pretty ci FunKind v k1 k2 _ -> fsep [ pretty v <> (case k1 of FunKind _ _ _ _ -> parens _ -> id) (pretty k1) , funArrow, pretty k2] varOfTypeArg :: TypeArg -> Id varOfTypeArg (TypeArg i _ _ _ _ _ _) = i instance Pretty TypePattern where pretty tp = case tp of TypePattern name@(Id ts cs _) args _ -> let ds = map (pretty . varOfTypeArg) args in if placeCount name == length args then let (ras, dts) = mapAccumL ( \ l t -> if isPlace t then case l of x : r -> (r, x) _ -> error "Pretty TypePattern" else (l, printTypeToken t)) ds ts in fsep $ dts ++ (if null cs then [] else [brackets $ sepByCommas $ map printTypeId cs]) ++ ras else printTypeId name <+> fsep ds TypePatternToken t -> printTypeToken t MixfixTypePattern ts -> fsep $ map pretty ts BracketTypePattern k l _ -> bracket k $ ppWithCommas l TypePatternArg t _ -> parens $ pretty t -- | put proper brackets around a document bracket :: BracketKind -> Doc -> Doc bracket b = case b of Parens -> parens Squares -> brackets Braces -> specBraces NoBrackets -> id -- | print a 'Kind' plus a preceding colon (or nothing) printKind :: Kind -> Doc printKind k = noPrint (k == universe) $ printVarKind NonVar (VarKind k) -- | print the kind of a variable with its variance and a preceding colon printVarKind :: Variance -> VarKind -> Doc printVarKind e vk = case vk of Downset t -> less <+> pretty t VarKind k -> colon <+> pretty e <> pretty k MissingKind -> empty data TypePrec = Outfix | Prefix | Lazyfix | ProdInfix | FunInfix | Absfix deriving (Eq, Ord) parenPrec :: TypePrec -> (TypePrec, Doc) -> Doc parenPrec p1 (p2, d) = if p2 < p1 then d else parens d printTypeToken :: Token -> Doc printTypeToken t = let l = ("*", cross) : map ( \ (a, d) -> (show a, d) ) [ (FunArr, funArrow) , (PFunArr, pfun) , (ContFunArr, cfun) , (PContFunArr, pcfun) ] in case lookup (tokStr t) l of Just d -> d _ -> pretty t printTypeId :: Id -> Doc printTypeId (Id ts cs _) = let (toks, pls) = splitMixToken ts in fcat $ map printTypeToken toks ++ (if null cs then [] else [brackets $ sepByCommas $ map printTypeId cs]) ++ map printTypeToken pls toMixType :: Type -> (TypePrec, Doc) toMixType typ = case typ of TypeName name _ _ -> (Outfix, printTypeId name) TypeToken tt -> (Outfix, printTypeToken tt) TypeAbs v t _ -> (Absfix, sep [ lambda <+> pretty v, bullet <+> snd (toMixType t)]) ExpandedType t1 _ -> toMixType t1 -- here we print the unexpanded type BracketType k l _ -> (Outfix, bracket k $ sepByCommas $ map (snd . toMixType) l) KindedType t kind _ -> (Lazyfix, sep [ parenPrec Lazyfix $ toMixType t , colon <+> printList0 (Set.toList kind)]) MixfixType ts -> (Prefix, fsep $ map (snd . toMixType) ts) TypeAppl t1 t2 -> let (topTy, tyArgs) = getTypeApplAux False typ aArgs = (Prefix, sep [ parenPrec ProdInfix $ toMixType t1 , parenPrec Prefix $ toMixType t2 ]) in case topTy of TypeName name@(Id ts cs _) _k _i -> case map toMixType tyArgs of [dArg] -> case ts of [e] | name == lazyTypeId -> (Lazyfix, pretty e <+> parenPrec Lazyfix dArg) [e1, e2, e3] | not (isPlace e1) && isPlace e2 && not (isPlace e3) && null cs -> (Outfix, fsep [pretty e1, snd dArg, pretty e3]) _ -> aArgs [dArg1, dArg2] -> case ts of [_, e2, _] | isInfix name && null cs -> if tokStr e2 == prodS then (ProdInfix, fsep [ parenPrec ProdInfix dArg1 , cross, parenPrec ProdInfix dArg2]) else -- assume fun type (FunInfix, fsep [ parenPrec FunInfix dArg1 , printTypeToken e2, snd dArg2]) _ -> aArgs dArgs -> if isProductIdWithArgs name $ length tyArgs then (ProdInfix, fsep $ punctuate (space <> cross) $ map (parenPrec ProdInfix) dArgs) else aArgs _ -> aArgs instance Pretty Type where pretty = snd . toMixType printTypeScheme :: PolyId -> TypeScheme -> Doc printTypeScheme (PolyId _ tys _) (TypeScheme vs t _) = let tdoc = pretty t in if null vs || not (null tys) then tdoc else fsep [forallDoc, semiDs vs, bullet <+> tdoc] -- no curried notation for bound variables instance Pretty TypeScheme where pretty = printTypeScheme (PolyId applId [] nullRange) instance Pretty Partiality where pretty p = case p of Partial -> quMarkD Total -> empty instance Pretty Quantifier where pretty q = case q of Universal -> forallDoc Existential -> exists Unique -> unique instance Pretty TypeQual where pretty q = case q of OfType -> colon AsType -> text asS InType -> inDoc Inferred -> colon instance Pretty Term where pretty = printTerm . rmSomeTypes isSimpleTerm :: Term -> Bool isSimpleTerm trm = case trm of QualVar _ -> True QualOp _ _ _ _ _ _ -> True ResolvedMixTerm _ _ _ _ -> True ApplTerm _ _ _ -> True TupleTerm _ _ -> True TermToken _ -> True BracketTerm _ _ _ -> True _ -> False -- | used only to produce CASL applications isSimpleArgTerm :: Term -> Bool isSimpleArgTerm trm = case trm of QualVar vd -> not (isPatVarDecl vd) QualOp _ _ _ _ _ _ -> True ResolvedMixTerm n _ l _ -> placeCount n /= 0 || not (null l) TupleTerm _ _ -> True BracketTerm _ _ _ -> True _ -> False hasRightQuant :: Term -> Bool hasRightQuant t = case t of QuantifiedTerm {} -> True LambdaTerm {} -> True CaseTerm {} -> True LetTerm Let _ _ _ -> True ResolvedMixTerm n _ ts _ | endPlace n && placeCount n == length ts -> hasRightQuant (last ts) ApplTerm (ResolvedMixTerm n _ [] _) t2 _ | endPlace n -> case t2 of TupleTerm ts _ | placeCount n == length ts -> hasRightQuant (last ts) _ -> hasRightQuant t2 ApplTerm _ t2 _ -> hasRightQuant t2 _ -> False zipArgs :: Id -> [Term] -> [Doc] -> [Doc] zipArgs n ts ds = case (ts, ds) of (t : r, d : s) -> let p = parenTermDoc t d e = if hasRightQuant t then parens d else p in if null r && null s && endPlace n then [if hasRightQuant t then d else p] else e : zipArgs n r s _ -> [] isPatVarDecl :: VarDecl -> Bool isPatVarDecl (VarDecl v ty _ _) = case ty of TypeName t _ _ -> isSimpleId v && isPrefixOf "_v" (show t) _ -> False parenTermDoc :: Term -> Doc -> Doc parenTermDoc trm = if isSimpleTerm trm then id else parens printTermRec :: FoldRec Doc (Doc, Doc) printTermRec = FoldRec { foldQualVar = \ _ vd@(VarDecl v _ _ _) -> if isPatVarDecl vd then pretty v else parens $ keyword varS <+> pretty vd , foldQualOp = \ _ br n t tys k _ -> (if null tys || k == Infer then id else (<> brackets (ppWithCommas tys))) $ parens $ fsep [pretty br, pretty n, colon, printTypeScheme n $ if isPred br then unPredTypeScheme t else t] , foldResolvedMixTerm = \ rt n@(Id toks cs ps) tys ts _ -> let pn = placeCount n ResolvedMixTerm _ _ os _ = rt ds = zipArgs n os ts in if pn == length ts || null ts then if null tys then idApplDoc n ds else let (ftoks, _) = splitMixToken toks fId = Id ftoks cs ps (fts, rts) = splitAt (placeCount fId) $ if null ts then replicate pn $ pretty placeTok else ds in fsep $ (idApplDoc fId fts <> brackets (ppWithCommas tys)) : rts else idApplDoc applId [idDoc n, parens $ sepByCommas ts] , foldApplTerm = \ ot t1 t2 _ -> case ot of -- comment out the following two guards for CASL applications ApplTerm (ResolvedMixTerm n _ [] _) (TupleTerm ts@(_ : _) _) _ | placeCount n == length ts -> idApplDoc n (zipArgs n ts $ map printTerm ts) ApplTerm (ResolvedMixTerm n _ [] _) o2 _ | placeCount n == 1 -> idApplDoc n $ zipArgs n [o2] [t2] ApplTerm o1 o2 _ -> idApplDoc applId $ zipArgs applId [o1, o2] [t1, t2] _ -> error "printTermRec.foldApplTerm" , foldTupleTerm = \ _ ts _ -> parens $ sepByCommas ts , foldTypedTerm = \ ~(TypedTerm ot _ _ _) t q typ _ -> fsep [(case ot of TypedTerm {} | elem q [Inferred, OfType] -> parens ApplTerm (ResolvedMixTerm n _ [] _) arg _ -> let pn = placeCount n in case arg of TupleTerm ts@(_ : _) _ | pn == length ts -> parens _ | pn == 1 || hasRightQuant ot -> parens _ -> id _ | hasRightQuant ot -> parens _ -> id) t, pretty q, pretty typ] , foldQuantifiedTerm = \ _ q vs t _ -> fsep [pretty q, printGenVarDecls vs, bullet <+> t] , foldLambdaTerm = \ ot ps q t _ -> let LambdaTerm ops _ _ _ = ot in fsep [ lambda , case ops of [p] -> case p of TupleTerm [] _ -> empty QualVar vd@(VarDecl v ty _ _) -> pretty v <+> if isPatVarDecl vd then empty else printVarDeclType ty _ -> head ps _ -> if all ( \ p -> case p of QualVar vd -> not $ isPatVarDecl vd _ -> False) ops then printGenVarDecls $ map (\ pt -> let QualVar vd = pt in GenVarDecl vd) ops else fcat $ map parens ps , (case q of Partial -> bullet Total -> bullet <> text exMark) <+> t] , foldCaseTerm = \ _ t es _ -> fsep [text caseS, t, text ofS, cat $ punctuate (space <> bar <> space) $ map (printEq0 funArrow) es] , foldLetTerm = \ _ br es t _ -> let des = sep $ punctuate semi $ map (printEq0 equals) es in case br of Let -> fsep [sep [text letS <+> des, text inS], t] Where -> fsep [sep [t, text whereS], des] Program -> text programS <+> des , foldTermToken = const pretty , foldMixTypeTerm = \ _ q t _ -> pretty q <+> pretty t , foldMixfixTerm = const fsep , foldBracketTerm = \ _ k l _ -> bracket k $ sepByCommas l , foldAsPattern = \ _ (VarDecl v _ _ _) p _ -> fsep [pretty v, text asP, p] , foldProgEq = \ _ p t _ -> (p, t) } printTerm :: Term -> Doc printTerm = foldTerm printTermRec rmTypeRec :: MapRec rmTypeRec = mapRec { foldQualOp = \ t _ (PolyId i _ _) _ tys k ps -> if elem i $ map fst bList then ResolvedMixTerm i (if k == Infer then [] else tys) [] ps else t , foldTypedTerm = \ _ nt q ty ps -> case q of Inferred -> nt _ -> case nt of TypedTerm tt oq oty _ | oty == ty || oq == InType -> if q == AsType then TypedTerm tt q ty ps else nt QualVar (VarDecl _ oty _ _) | oty == ty -> nt _ -> TypedTerm nt q ty ps } rmSomeTypes :: Term -> Term rmSomeTypes = foldTerm rmTypeRec -- | put parenthesis around applications parenTermRec :: MapRec parenTermRec = let addParAppl t = case t of ResolvedMixTerm _ _ [] _ -> t QualVar _ -> t QualOp _ _ _ _ _ _ -> t TermToken _ -> t BracketTerm _ _ _ -> t TupleTerm _ _ -> t _ -> TupleTerm [t] nullRange in mapRec { foldApplTerm = \ _ t1 t2 -> ApplTerm (addParAppl t1) (addParAppl t2) , foldResolvedMixTerm = \ _ n tys -> ResolvedMixTerm n tys . map addParAppl , foldTypedTerm = \ _ -> TypedTerm . addParAppl , foldMixfixTerm = \ _ -> MixfixTerm . map addParAppl , foldAsPattern = \ _ v -> AsPattern v . addParAppl } parenTerm :: Term -> Term parenTerm = foldTerm parenTermRec -- | print an equation with different symbols between pattern and term printEq0 :: Doc -> (Doc, Doc) -> Doc printEq0 s (p, t) = sep [p, hsep [s, t]] printGenVarDecls :: [GenVarDecl] -> Doc printGenVarDecls = fsep . punctuate semi . map ( \ l -> case l of [x] -> pretty x GenVarDecl (VarDecl _ t _ _) : _ -> sep [ ppWithCommas $ map (\ g -> let GenVarDecl (VarDecl v _ _ _) = g in v) l , printVarDeclType t] GenTypeVarDecl (TypeArg _ e c _ _ _ _) : _ -> sep [ ppWithCommas $ map (\ g -> let GenTypeVarDecl v = g in varOfTypeArg v) l , printVarKind e c] _ -> error "printGenVarDecls") . groupBy sameType sameType :: GenVarDecl -> GenVarDecl -> Bool sameType g1 g2 = case (g1, g2) of (GenVarDecl (VarDecl _ t1 Comma _), GenVarDecl (VarDecl _ t2 _ _)) | t1 == t2 -> True (GenTypeVarDecl (TypeArg _ e1 c1 _ _ Comma _), GenTypeVarDecl (TypeArg _ e2 c2 _ _ _ _)) | e1 == e2 && c1 == c2 -> True _ -> False printVarDeclType :: Type -> Doc printVarDeclType t = case t of MixfixType [] -> empty _ -> colon <+> pretty t instance Pretty VarDecl where pretty (VarDecl v t _ _) = pretty v <+> printVarDeclType t instance Pretty GenVarDecl where pretty gvd = case gvd of GenVarDecl v -> pretty v GenTypeVarDecl tv -> pretty tv instance Pretty TypeArg where pretty (TypeArg v e c _ _ _ _) = pretty v <+> printVarKind e c -- | don't print an empty list and put parens around longer lists printList0 :: (Pretty a) => [a] -> Doc printList0 l = case l of [] -> empty [x] -> pretty x _ -> parens $ ppWithCommas l instance Pretty BasicSpec where pretty (BasicSpec l) = if null l then specBraces empty else changeGlobalAnnos addBuiltins . vcat $ map pretty l instance Pretty ProgEq where pretty (ProgEq p t ps) = printEq0 equals $ foldEq printTermRec $ ProgEq (rmSomeTypes p) (rmSomeTypes t) ps instance Pretty BasicItem where pretty bi = case bi of SigItems s -> pretty s ProgItems l _ -> noNullPrint l $ sep [keyword programS, semiAnnoted l] ClassItems i l _ -> noNullPrint l $ let b = semiAnnos pretty l p = plClass l in case i of Plain -> topSigKey (classS ++ if p then "es" else "") <+> b Instance -> sep [keyword classS <+> keyword (instanceS ++ if p then sS else ""), b] GenVarItems l _ -> topSigKey (varS ++ pluralS l) <+> printGenVarDecls l FreeDatatype l _ -> sep [ keyword freeS <+> keyword (typeS ++ pluralS l) , semiAnnos pretty l] GenItems l _ -> let gkw = keyword generatedS in (if all (isDatatype . item) l then \ i -> gkw <+> rmTopKey i else \ i -> sep [gkw, specBraces i]) $ vcat $ map (printAnnoted pretty) l AxiomItems vs fs _ -> sep [ if null vs then empty else forallDoc <+> printGenVarDecls vs , case fs of [] -> empty _ -> let pp = addBullet . pretty in vcat $ map (printAnnoted pp) (init fs) ++ [printSemiAnno pp True $ last fs]] Internal l _ -> sep [ keyword internalS , specBraces $ vcat $ map (printAnnoted pretty) l] plClass :: [Annoted ClassItem] -> Bool plClass l = case map item l of _ : _ : _ -> True [ClassItem (ClassDecl (_ : _ : _) _ _) _ _] -> True _ -> False pluralS :: [a] -> String pluralS l = case l of _ : _ : _ -> sS _ -> "" isDatatype :: SigItems -> Bool isDatatype si = case si of TypeItems _ l _ -> all ((\ t -> case t of Datatype _ -> True _ -> False) . item) l _ -> False instance Pretty OpBrand where pretty = keyword . show instance Pretty SigItems where pretty si = case si of TypeItems i l _ -> noNullPrint l $ let b = semiAnnos pretty l in case i of Plain -> topSigKey ((if all (isSimpleTypeItem . item) l then typeS else typeS) ++ plTypes l) <+> b Instance -> sep [keyword typeS <+> keyword (instanceS ++ plTypes l), b] OpItems b l _ -> noNullPrint l $ topSigKey (show b ++ plOps l) <+> let po = prettyOpItem $ isPred b in if case item $ last l of OpDecl _ _ a@(_ : _) _ -> case last a of UnitOpAttr {} -> True _ -> False OpDefn {} -> True _ -> False then vcat (map (printSemiAnno po True) l) else semiAnnos po l plTypes :: [Annoted TypeItem] -> String plTypes l = case map item l of _ : _ : _ -> sS [TypeDecl (_ : _ : _) _ _] -> sS [SubtypeDecl (_ : _ : _) _ _] -> sS [IsoDecl (_ : _ : _) _] -> sS _ -> "" plOps :: [Annoted OpItem] -> String plOps l = case map item l of _ : _ : _ -> sS [OpDecl (_ : _ : _) _ _ _] -> sS _ -> "" isSimpleTypeItem :: TypeItem -> Bool isSimpleTypeItem ti = case ti of TypeDecl l k _ -> k == universe && all isSimpleTypePat l SubtypeDecl l (TypeName i _ _) _ -> not (isMixfix i) && all isSimpleTypePat l SubtypeDefn p (Var _) t _ _ -> isSimpleTypePat p && isSimpleType t _ -> False isSimpleTypePat :: TypePattern -> Bool isSimpleTypePat tp = case tp of TypePattern i [] _ -> not $ isMixfix i _ -> False isSimpleType :: Type -> Bool isSimpleType t = case t of TypeName i _ _ -> not $ isMixfix i TypeToken _ -> True MixfixType [TypeToken _, BracketType Squares (_ : _) _] -> True _ -> False instance Pretty ClassItem where pretty (ClassItem d l _) = pretty d $+$ noNullPrint l (specBraces $ vcat $ map (printAnnoted pretty) l) instance Pretty ClassDecl where pretty (ClassDecl l k _) = let cs = ppWithCommas l in if k == universe then cs else fsep [cs, less, pretty k] instance Pretty Vars where pretty vd = case vd of Var v -> pretty v VarTuple vs _ -> parens $ ppWithCommas vs instance Pretty TypeItem where pretty ti = case ti of TypeDecl l k _ -> sep [ppWithCommas l, printKind k] SubtypeDecl l t _ -> fsep [ppWithCommas l, less, pretty t] IsoDecl l _ -> fsep $ punctuate (space <> equals) $ map pretty l SubtypeDefn p v t f _ -> fsep [pretty p, equals, specBraces $ fsep [pretty v, colon <+> pretty t, bullet <+> pretty f]] AliasType p _ (TypeScheme l t _) _ -> fsep $ pretty p : map (pretty . varOfTypeArg) l ++ [text assignS <+> pretty t] Datatype t -> pretty t printItScheme :: [PolyId] -> Bool -> TypeScheme -> Doc printItScheme ps b = (case ps of [p] -> printTypeScheme p _ -> pretty) . (if b then unPredTypeScheme else id) printHead :: [[VarDecl]] -> [Doc] printHead = map ((<> space) . parens . printGenVarDecls . map GenVarDecl) prettyOpItem :: Bool -> OpItem -> Doc prettyOpItem b oi = case oi of OpDecl l t a _ -> fsep $ punctuate comma (map pretty l) ++ [colon <+> (if null a then id else (<> comma)) (printItScheme l b t)] ++ punctuate comma (map pretty a) OpDefn n ps s t _ -> fcat $ (if null ps then (<> space) else id) (pretty n) : printHead ps ++ (if b then [] else [colon <+> printItScheme [n] b s <> space]) ++ [(if b then equiv else equals) <> space, pretty t] instance Pretty PolyId where pretty (PolyId i@(Id ts cs ps) tys _) = if null tys then pretty i else let (fts, plcs) = splitMixToken ts in idDoc (Id fts cs ps) <> brackets (ppWithCommas tys) <> hcat (map pretty plcs) instance Pretty BinOpAttr where pretty a = text $ case a of Assoc -> assocS Comm -> commS Idem -> idemS instance Pretty OpAttr where pretty oa = case oa of BinOpAttr a _ -> pretty a UnitOpAttr t _ -> text unitS <+> pretty t instance Pretty DatatypeDecl where pretty (DatatypeDecl p k alts d _) = fsep [ pretty p, printKind k, defn <+> cat (punctuate (space <> bar <> space) $ map pretty alts) , case d of [] -> empty _ -> keyword derivingS <+> ppWithCommas d] instance Pretty Alternative where pretty alt = case alt of Constructor n cs p _ -> pretty n <+> fsep (map ( \ l -> case (l, p) of -- comment out the following line to output real CASL ([NoSelector (TypeToken t)], Total) | isSimpleId n -> pretty t _ -> parens $ semiDs l) cs) <> pretty p Subtype l _ -> text (if all isSimpleType l then typeS else typeS) <+> ppWithCommas l instance Pretty Component where pretty sel = case sel of Selector n _ t _ _ -> sep [pretty n, colon <+> pretty t] NoSelector t -> pretty t instance Pretty Symb where pretty (Symb i mt _) = sep $ pretty i : case mt of Nothing -> [] Just (SymbType t) -> [colon <+> pretty t] instance Pretty SymbItems where pretty (SymbItems k syms _ _) = printSK k syms <+> ppWithCommas syms instance Pretty SymbOrMap where pretty (SymbOrMap s mt _) = sep $ pretty s : case mt of Nothing -> [] Just t -> [mapsto <+> pretty t] instance Pretty SymbMapItems where pretty (SymbMapItems k syms _ _) = printSK k syms <+> ppWithCommas syms -- | print symbol kind printSK :: SymbKind -> [a] -> Doc printSK k l = case k of Implicit -> empty _ -> keyword $ drop 3 (show k) ++ case l of _ : _ : _ -> sS _ -> ""
nevrenato/Hets_Fork
HasCASL/PrintAs.hs
gpl-2.0
23,634
33
27
8,208
8,858
4,431
4,427
553
24
-- P16 Drop every N'th element from a list. -- Recursion with a counter f1 :: Int -> [a] -> [a] f1 n = f n where f _ [] = [] f 1 (_:xs) = f n xs f k (x:xs) = x : f (pred k) xs -- Tail recursion f2 :: Int -> [a] -> [a] f2 n = f n [] where f _ acc [] = reverse acc f 1 acc (_:xs) = f n acc xs f k acc (x:xs) = f (pred k) (x : acc) xs -- Recursion with "take" and "drop" f3 :: Int -> [a] -> [a] f3 _ [] = [] f3 n xs = take (pred n) xs ++ (f2 n $ drop n xs) -- Zip with cycle, filter, map f4 :: Int -> [a] -> [a] f4 n = map snd . filter ((< n) . fst) . zip (cycle [1..n]) -- Using list comprehensions f5 :: Int -> [a] -> [a] f5 n xs = [x | (x, k) <- zip xs $ cycle [1..n], k < n]
pavelfatin/ninety-nine
haskell/p16.hs
gpl-3.0
728
4
11
235
451
231
220
17
3
{-# LANGUAGE TupleSections, OverloadedStrings #-} module Handler.Home where import Yesod.Auth import Import import Handler.DB import Data.Time import Handler.Utils getHomeR :: Handler Value getHomeR = do (Entity _ u) <- requireAuth mug <- runDB $ get $ userDefaultUserGroupId u today <- liftIO $ fmap utctDay getCurrentTime return $ object [ "user" .= object [ "name" .= userName u, "firstName" .= userFirstName u, "lastName" .= userLastName u, "email" .= userEmail u, "config" .= userConfig u, "defaultUserGroupOrganization" .= (mug >>= userGroupOrganization), "defaultUserGroupEmail" .= (mug >>= (Just . userGroupEmail)), "validContract" .= isContractValid u today ] ]
tlaitinen/receipts
backend/Handler/Home.hs
gpl-3.0
847
0
17
268
209
108
101
22
1
module Main (main) where import Control.Arrow import Data.List f :: [Int] -> [[Int]] -> [[Int]] f _ [] = [] f [] rem = [[]] f (x:s) rem = concatMap (concatMap (\ (y,t) -> map (y:) $ f s [t]) . g x) rem where g :: Int -> [Int] -> [(Int,[Int])] g _ [] = [] g n (y:t) = if other `elem` t then (y,other `delete` t) : rec else rec where rec = map (second (y:)) $ g n t other = y + n + 1 upperLimmit :: Int -> Int upperLimmit n = 2*n+1 solveFor :: Int -> [[(Int, Int)]] solveFor n = map (reverse . zipWith (\m x -> (x,x+m+1)) lst) $ f lst [[1..upperLimmit n]] where lst = reverse [1..n] update :: a -> (Int, Int) -> [a] -> [a] update v (a,b) = zipWith (\n c -> if n == a || n == b then v else c) [1..] showFor :: Int -> [(Int, Int)] -> [Char] showFor n = flip id (replicate (upperLimmit n) '-') . foldr (.) id . zipWith update ['A'..] allFor :: Int -> [[Char]] allFor n = map (showFor n) $ solveFor n main :: IO () main = mapM_ putStrLn $ allFor 26
xkollar/handy-haskell
other/pt-2017-05/main.hs
gpl-3.0
964
0
14
229
628
344
284
25
2
module BarTender.Options ( module System.Console.GetOpt , FilePath , Bound (..) , inBound , outOfBound , completeOption , getConfigOpt , getEnvironOpt , handleOpt ) where import Control.Monad import Data.Char import Data.Maybe import System.Console.GetOpt import System.Environment import System.IO import System.Log.Logger import System.Log.Handler.Simple import Text.ParserCombinators.Parsec import BarTender.Util data Bound a = AtLeast a | AtMost a | Between a a | Exactly a | Unbounded deriving Eq -- | Determine if a value is inside of a bound, inclusively. inBound :: Ord a => a -> Bound a -> Bool inBound x (AtLeast y) = y <= x inBound x (AtMost y) = x <= y inBound x (Between y z) = y <= x && x <= z inBound x (Exactly y) = x == y inBound x Unbounded = True -- | Determine if a value is outside of a bound, where inclusion is inclusive. outOfBound :: Ord a => a -> Bound a -> Bool outOfBound = ((.) . (.)) not inBound -- | Complement a flag with the corresponding "--no-" flag. The base flag -- enables a feature, and its inverse disables the feature. completeOption :: OptDescr (Bool -> a) -> [OptDescr a] completeOption (Option shortLs longLs descr help) = case descr of NoArg fn -> [ Option shortLs longLs (NoArg $ fn True) help , Option [] (map ("--no-" ++) longLs) (NoArg $ fn False) "" ] OptArg fn x -> [ Option shortLs longLs (OptArg (flip fn $ True) x) help ] ReqArg fn x -> [ Option shortLs longLs (ReqArg (flip fn $ True) x) help ] -- | Works like @getOpt@ from "System.Console.GetOpt", except it parses a -- config file instead of a list of tokens. Short option names are ignored. getConfigOpt :: [OptDescr a] -> FilePath -> IO ([a], [String], [String]) getConfigOpt descList path = do errorOrPairs <- parseFromFile file path return $ case errorOrPairs of Left error -> ([], [], [show error]) Right pairs -> getOpt RequireOrder descList $ foldr fn [] pairs where fn :: (String, String) -> [String] -> [String] fn (key, value) list = (++ list) $ case getArgDescr key descList of Just (NoArg _) -> case smartReadBool value of Just True -> ["--" ++ key] Just False -> ["--no-" ++ key] Nothing -> ["--" ++ key] Just (OptArg _ _) -> if null value then ["--" ++ key] else ["--" ++ key, value] Just (ReqArg _ _) -> ["--" ++ key, value] Nothing -> [] getArgDescr :: String -> [OptDescr a] -> Maybe (ArgDescr a) getArgDescr key ls = listToMaybe . catMaybes $ do (Option shortLs longLs descr help) <- ls return $ ifJust (key `elem` longLs) descr eol :: Parser () eol = do try (void $ oneOf "\n\r") <|> eof return () <?> "end of line" comment :: Parser () comment = do char '#' manyTill anyChar (try eol) return () <?> "comment" item :: Parser (String, String) item = do key <- manyTill anyChar $ char '=' skipMany space value <- manyTill anyChar $ try eol <|> comment return (rstrip key, rstrip value) where rstrip :: String -> String rstrip = reverse . dropWhile isSpace . reverse line :: Parser (Maybe (String, String)) line = do mPair <- try (comment >> return Nothing) <|> (item >>= return . Just) skipMany space return mPair file :: Parser [(String, String)] file = do skipMany space fmap catMaybes $ many line -- | Works like @getOpt@ from "System.Console.GetOpt", except it searches the -- environment for options instead of parsing a list of tokens. Short option -- names are ignored. A flag @--foo-bar@ will correspond to the environment -- variable @FOO_BAR@. getEnvironOpt :: [OptDescr a] -> IO ([a], [String], [String]) getEnvironOpt descList = do ls <- fmap catMaybes . sequence $ map optionToValue descList return $ (ls, [], []) where optionToValue :: OptDescr a -> IO (Maybe a) optionToValue (Option _ longLs descr _) = do mValue <- getEnvironValue longLs return $ case mValue of Nothing -> Nothing Just value -> Just $ case descr of NoArg x -> x OptArg fn _ -> fn $ ifJust (null value) value ReqArg fn _ -> fn value getEnvironValue :: [String] -> IO (Maybe String) getEnvironValue keyLs = do resultLs <- sequence $ map (lookupEnv . toEnvironKey) keyLs return . listToMaybe $ catMaybes resultLs toEnvironKey :: String -> String toEnvironKey = map $ toUpper . (\c -> if c == '-' then '_' else c) -- | Use the results of a get*Opt function to generate either a result option -- value or an error message. handleOpt :: Bound Int -> a -> ([a -> Either String a], [String], [String]) -> Either String (a, [String]) handleOpt bound initOpt (fnLs, nonOptLs, errorLs) = result >>= handleNonOpts where result = foldr (=<<) (Right initOpt) $ map (const . Left) errorLs ++ fnLs handleNonOpts opt = if inBound (length nonOptLs) bound then Right $ (opt, nonOptLs) else Left $ "incorrect number of positional arguments"
chrisbouchard/bartender
src/BarTender/Options.hs
gpl-3.0
5,805
0
20
1,994
1,742
902
840
120
8
{-# LANGUAGE TemplateHaskell, TypeApplications, PolyKinds, UndecidableInstances #-} {-# LANGUAGE MultiParamTypeClasses, FlexibleInstances #-} module Lamdu.Sugar.Annotations ( ShowAnnotation(..), showTypeAlways, showInTypeMode, showInEvalMode , MarkAnnotations(..), alwaysShowAnnotations ) where import qualified Control.Lens as Lens import Hyper import Hyper.Class.Morph import qualified Lamdu.Builtins.Anchors as Builtins import qualified Lamdu.Sugar.Lens as SugarLens import qualified Lamdu.Sugar.Props as SugarProps import Lamdu.Sugar.Types import Lamdu.Prelude data ShowAnnotation = ShowAnnotation { -- For holes and fragments we always show types _showTypeAlways :: Bool , _showInTypeMode :: Bool , _showInEvalMode :: Bool } deriving (Eq, Ord, Show, Generic) Lens.makeLenses ''ShowAnnotation showAnnotationWhenVerbose :: ShowAnnotation showAnnotationWhenVerbose = ShowAnnotation { _showTypeAlways = False , _showInTypeMode = True , _showInEvalMode = True } alwaysShowAnnotations :: ShowAnnotation alwaysShowAnnotations = ShowAnnotation True True True neverShowAnnotations :: ShowAnnotation neverShowAnnotations = ShowAnnotation False False False dontShowEval :: ShowAnnotation dontShowEval = ShowAnnotation { _showTypeAlways = False , _showInTypeMode = True , _showInEvalMode = False } dontShowType :: ShowAnnotation dontShowType = ShowAnnotation { _showTypeAlways = False , _showInTypeMode = False , _showInEvalMode = True } class MarkBodyAnnotations v e where markBodyAnnotations :: e v n i o # Annotated a -> (ShowAnnotation, e (ShowAnnotation, v) n i o # Annotated (ShowAnnotation, a)) class MarkAnnotations t0 t1 where markNodeAnnotations :: Annotated a # t0 -> Annotated (ShowAnnotation, a) # t1 instance MarkAnnotations (Const a) (Const a) where markNodeAnnotations (Ann a (Const b)) = Ann (a & Lens._Wrapped %~ (,) neverShowAnnotations) (Const b) instance MarkBodyAnnotations v e => MarkAnnotations (e v n i o) (e (ShowAnnotation, v) n i o) where markNodeAnnotations (Ann (Const pl) x) = Ann (Const (showAnn, pl)) newBody where (showAnn, newBody) = markBodyAnnotations x instance MarkBodyAnnotations v Binder where markBodyAnnotations = bBody markBodyAnnotations instance MarkBodyAnnotations v BinderBody where markBodyAnnotations (BinderTerm body) = markBodyAnnotations body & _2 %~ BinderTerm markBodyAnnotations (BinderLet let_) = markBodyAnnotations let_ & _2 %~ BinderLet instance MarkBodyAnnotations v Let where markBodyAnnotations l = ( neverShowAnnotations , l { _lValue = val & annotation . _1 .~ neverShowAnnotations , _lNames = l ^. lNames & markParamAnnotations & _LhsVar . traverse . _1 .~ val ^. annotation . _1 , _lBody = l ^. lBody & markNodeAnnotations } ) where val = l ^. lValue & markNodeAnnotations instance MarkBodyAnnotations v Assignment where markBodyAnnotations (BodyPlain (AssignPlain a b)) = markBodyAnnotations b & _2 %~ BodyPlain . AssignPlain a markBodyAnnotations (BodyFunction f) = markBodyAnnotations f & _2 %~ BodyFunction instance MarkBodyAnnotations v Else where markBodyAnnotations (SimpleElse body) = markBodyAnnotations body & _2 %~ SimpleElse . markCaseHandler markBodyAnnotations (ElseIf x) = ( neverShowAnnotations , x & eIfElse %~ morphMap (Proxy @MarkAnnotations #?> markNodeAnnotations) & ElseIf ) instance MarkBodyAnnotations v Function where markBodyAnnotations func = ( neverShowAnnotations , func { _fBody = func ^. fBody & markNodeAnnotations , _fParams = func ^. fParams & markParamAnnotations } ) markParamAnnotations :: LhsNames n i o v -> LhsNames n i o (ShowAnnotation, v) markParamAnnotations (LhsVar v) = v <&> (,) showAnnotationWhenVerbose & LhsVar markParamAnnotations (LhsRecord r) = r <&> markLhsFieldAnnotations & LhsRecord markLhsFieldAnnotations :: LhsField n v -> LhsField n (ShowAnnotation, v) markLhsFieldAnnotations (LhsField f s) = LhsField (f <&> (,) r) (s <&> traverse . _2 %~ markLhsFieldAnnotations) where r = case s of Nothing -> showAnnotationWhenVerbose Just{} -> neverShowAnnotations instance MarkBodyAnnotations v HoleOpt where markBodyAnnotations (HoleBinder t) = markBodyAnnotations t & _2 %~ HoleBinder markBodyAnnotations (HoleVarsRecord x) = (neverShowAnnotations, HoleVarsRecord x) instance MarkBodyAnnotations v IfElse where markBodyAnnotations (IfElse i t e) = ( showAnnotationWhenVerbose , IfElse (markNodeAnnotations i) (markNodeAnnotations t & hVal %~ markCaseHandler) (markNodeAnnotations e) ) instance MarkBodyAnnotations v Composite where markBodyAnnotations x = ( neverShowAnnotations , morphMap (Proxy @MarkAnnotations #?> markNodeAnnotations) x ) instance MarkBodyAnnotations v PostfixFunc where markBodyAnnotations x = ( neverShowAnnotations , morphMap (Proxy @MarkAnnotations #?> markNodeAnnotations) x ) instance MarkBodyAnnotations v Term where markBodyAnnotations (BodyLeaf (LeafLiteral x@LiteralBytes{})) = (dontShowEval, BodyLeaf (LeafLiteral x)) markBodyAnnotations (BodyLeaf (LeafLiteral x)) = (neverShowAnnotations, BodyLeaf (LeafLiteral x)) markBodyAnnotations (BodyRecord x) = markBodyAnnotations x & _2 %~ BodyRecord markBodyAnnotations (BodyLam x) = lamFunc markBodyAnnotations x & _2 %~ BodyLam markBodyAnnotations (BodyLeaf (LeafGetVar x)) = ( case x ^. vForm of GetLightParam -> showAnnotationWhenVerbose GetDefinition{} -> showAnnotationWhenVerbose _ -> neverShowAnnotations , LeafGetVar x & BodyLeaf ) markBodyAnnotations (BodyPostfixFunc x) = markBodyAnnotations x & _2 %~ BodyPostfixFunc markBodyAnnotations (BodyToNom (Nominal tid binder)) = ( showAnnotationWhenVerbose & showInEvalMode .~ ( tid ^. tidTId == Builtins.textTid || newBinder ^. SugarLens.binderResultExpr . _1 . showInEvalMode ) , newBinder & Lens.filtered (not . SugarProps.isUnfinished . (^. hVal)) . annotation . _1 .~ dontShowEval & Nominal tid & BodyToNom ) where newBinder = markNodeAnnotations binder markBodyAnnotations (BodyLeaf (LeafInject x)) = (dontShowEval, BodyLeaf (LeafInject x)) markBodyAnnotations (BodyNullaryInject x) = ( dontShowEval , x & hmap (Proxy @(Recursively HFunctor) #> hflipped %~ hmap (const (Lens._Wrapped %~ (,) neverShowAnnotations))) & BodyNullaryInject ) markBodyAnnotations (BodySimpleApply x) = ( if Lens.has (appFunc . hVal . _BodyLeaf . _LeafInject) x then dontShowEval else showAnnotationWhenVerbose , morphMap (Proxy @MarkAnnotations #?> markNodeAnnotations) x & appFunc . nonHoleAnn .~ neverShowAnnotations & BodySimpleApply ) markBodyAnnotations (BodyPostfixApply x) = ( case x ^. pFunc . hVal of PfFromNom{} -> dontShowEval _ -> neverShowAnnotations -- No need to see result of case/get-field , morphMap (Proxy @MarkAnnotations #?> markNodeAnnotations) x -- Don't show annotations for argument of pattern match & Lens.filteredBy (pFunc . hVal . _PfCase) . pArg . annotation . _1 .~ neverShowAnnotations & BodyPostfixApply ) markBodyAnnotations (BodyLabeledApply x) = ( showAnnotationWhenVerbose , morphMap (Proxy @MarkAnnotations #?> markNodeAnnotations) x & BodyLabeledApply ) markBodyAnnotations (BodyIfElse x) = markBodyAnnotations x & _2 %~ BodyIfElse markBodyAnnotations (BodyLeaf (LeafHole x)) = ( alwaysShowAnnotations , BodyLeaf (LeafHole x) ) markBodyAnnotations (BodyFragment f) = ( alwaysShowAnnotations , f & fExpr %~ (if Lens.has (fTypeMismatch . Lens._Just) f then nonHoleAnn .~ dontShowType else id) . markNodeAnnotations & BodyFragment ) nonHoleAnn :: Lens.Traversal' (Annotated (ShowAnnotation, a) # Term v1 n i o) ShowAnnotation nonHoleAnn = Lens.filtered (Lens.nullOf (hVal . SugarLens.bodyUnfinished)) . annotation . _1 markCaseHandler :: Term v n i o # Annotated (ShowAnnotation, a) -> Term v n i o # Annotated (ShowAnnotation, a) markCaseHandler = _BodyLam . lamFunc . fBody . SugarLens.binderResultExpr . Lens.ifiltered (const . Lens.nullOf SugarLens.bodyUnfinished) . _1 .~ neverShowAnnotations
lamdu/lamdu
src/Lamdu/Sugar/Annotations.hs
gpl-3.0
9,023
0
18
2,262
2,355
1,236
1,119
-1
-1
{-# LANGUAGE DeriveDataTypeable #-} -- | new BLT : a more modular approach module Main where import Options.Applicative import System.FilePath data Input = FileInput FilePath | StdIn fileInput :: Parser Input fileInput = FileInput <$> strOption ( long "file" <> short 'f' <> metavar "FILENAME" <> help "Source .bpl file" ) stdInput :: Parser Input stdInput = flag' StdIn ( long "stdin" <> help "Read from stdin" ) input = fileInput <|> stdInput
emptylambda/BLT
src/NewBLT.hs
gpl-3.0
478
0
11
106
115
60
55
16
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.AndroidManagement.Enterprises.List -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Lists EMM-managed enterprises. Only BASIC fields are returned. -- -- /See:/ <https://developers.google.com/android/management Android Management API Reference> for @androidmanagement.enterprises.list@. module Network.Google.Resource.AndroidManagement.Enterprises.List ( -- * REST Resource EnterprisesListResource -- * Creating a Request , enterprisesList , EnterprisesList -- * Request Lenses , elXgafv , elUploadProtocol , elAccessToken , elUploadType , elView , elPageToken , elProjectId , elPageSize , elCallback ) where import Network.Google.AndroidManagement.Types import Network.Google.Prelude -- | A resource alias for @androidmanagement.enterprises.list@ method which the -- 'EnterprisesList' request conforms to. type EnterprisesListResource = "v1" :> "enterprises" :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "view" EnterprisesListView :> QueryParam "pageToken" Text :> QueryParam "projectId" Text :> QueryParam "pageSize" (Textual Int32) :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> Get '[JSON] ListEnterprisesResponse -- | Lists EMM-managed enterprises. Only BASIC fields are returned. -- -- /See:/ 'enterprisesList' smart constructor. data EnterprisesList = EnterprisesList' { _elXgafv :: !(Maybe Xgafv) , _elUploadProtocol :: !(Maybe Text) , _elAccessToken :: !(Maybe Text) , _elUploadType :: !(Maybe Text) , _elView :: !(Maybe EnterprisesListView) , _elPageToken :: !(Maybe Text) , _elProjectId :: !(Maybe Text) , _elPageSize :: !(Maybe (Textual Int32)) , _elCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'EnterprisesList' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'elXgafv' -- -- * 'elUploadProtocol' -- -- * 'elAccessToken' -- -- * 'elUploadType' -- -- * 'elView' -- -- * 'elPageToken' -- -- * 'elProjectId' -- -- * 'elPageSize' -- -- * 'elCallback' enterprisesList :: EnterprisesList enterprisesList = EnterprisesList' { _elXgafv = Nothing , _elUploadProtocol = Nothing , _elAccessToken = Nothing , _elUploadType = Nothing , _elView = Nothing , _elPageToken = Nothing , _elProjectId = Nothing , _elPageSize = Nothing , _elCallback = Nothing } -- | V1 error format. elXgafv :: Lens' EnterprisesList (Maybe Xgafv) elXgafv = lens _elXgafv (\ s a -> s{_elXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). elUploadProtocol :: Lens' EnterprisesList (Maybe Text) elUploadProtocol = lens _elUploadProtocol (\ s a -> s{_elUploadProtocol = a}) -- | OAuth access token. elAccessToken :: Lens' EnterprisesList (Maybe Text) elAccessToken = lens _elAccessToken (\ s a -> s{_elAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). elUploadType :: Lens' EnterprisesList (Maybe Text) elUploadType = lens _elUploadType (\ s a -> s{_elUploadType = a}) -- | Specifies which Enterprise fields to return. This method only supports -- BASIC. elView :: Lens' EnterprisesList (Maybe EnterprisesListView) elView = lens _elView (\ s a -> s{_elView = a}) -- | A token identifying a page of results returned by the server. elPageToken :: Lens' EnterprisesList (Maybe Text) elPageToken = lens _elPageToken (\ s a -> s{_elPageToken = a}) -- | Required. The Cloud project ID of the EMM managing the enterprises. elProjectId :: Lens' EnterprisesList (Maybe Text) elProjectId = lens _elProjectId (\ s a -> s{_elProjectId = a}) -- | The requested page size. The actual page size may be fixed to a min or -- max value. elPageSize :: Lens' EnterprisesList (Maybe Int32) elPageSize = lens _elPageSize (\ s a -> s{_elPageSize = a}) . mapping _Coerce -- | JSONP elCallback :: Lens' EnterprisesList (Maybe Text) elCallback = lens _elCallback (\ s a -> s{_elCallback = a}) instance GoogleRequest EnterprisesList where type Rs EnterprisesList = ListEnterprisesResponse type Scopes EnterprisesList = '["https://www.googleapis.com/auth/androidmanagement"] requestClient EnterprisesList'{..} = go _elXgafv _elUploadProtocol _elAccessToken _elUploadType _elView _elPageToken _elProjectId _elPageSize _elCallback (Just AltJSON) androidManagementService where go = buildClient (Proxy :: Proxy EnterprisesListResource) mempty
brendanhay/gogol
gogol-androidmanagement/gen/Network/Google/Resource/AndroidManagement/Enterprises/List.hs
mpl-2.0
5,759
0
19
1,402
963
555
408
131
1
func = fooooooooooooooooooooooooooooooooo + foooooooooooooooooooooooooooooooo foooooooooooooooooooooooooooooooo foooooooooooooooooooooooooooooooo
lspitzner/brittany
data/Test300.hs
agpl-3.0
192
2
5
53
16
7
9
4
1
module QuantLib.Math ( module QuantLib.Math.InverseNormal , module QuantLib.Math.Copulas ) where import QuantLib.Math.InverseNormal import QuantLib.Math.Copulas
paulrzcz/hquantlib
src/QuantLib/Math.hs
lgpl-3.0
186
0
5
39
34
23
11
5
0
module Network.Haskoin.Node.Checkpoints ( checkpointMap , checkpointList , verifyCheckpoint ) where import Data.Map.Strict (Map) import qualified Data.Map.Strict as M (fromList, lookup) import Data.Word (Word32) import Network.Haskoin.Block import Network.Haskoin.Constants -- | Checkpoints from bitcoind reference implementation /src/checkpoints.cpp -- presented as an IntMap. checkpointMap :: Map Word32 BlockHash checkpointMap = M.fromList checkpointList -- | Checkpoints from bitcoind reference implementation /src/checkpoints.cpp -- presented as a list. checkpointList :: [(Word32, BlockHash)] checkpointList = checkpoints -- | Verify that a block hash at a given height either matches an existing -- checkpoint or is not a checkpoint. verifyCheckpoint :: Word32 -> BlockHash -> Bool verifyCheckpoint height hash = case M.lookup height checkpointMap of Just value -> hash == value Nothing -> True
plaprade/haskoin
haskoin-node/src/Network/Haskoin/Node/Checkpoints.hs
unlicense
919
0
8
133
167
100
67
17
2
-- Copyright (c) 2013-2015 PivotCloud, Inc. -- -- Aws.Kinesis.Commands.PutRecords -- -- Please feel free to contact us at [email protected] with any -- contributions, additions, or other feedback; we would love to hear from -- you. -- -- Licensed under the Apache License, Version 2.0 (the "License"); you may -- not use this file except in compliance with the License. You may obtain a -- copy of the License at http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -- WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -- License for the specific language governing permissions and limitations -- under the License. -- | -- Module: Aws.Kinesis.Commands.PutRecords -- Copyright: Copyright (c) 2013-2015 PivotCloud, Inc. -- license: Apache License, Version 2.0 -- Maintainer: Jon Sterling <[email protected]> -- Stability: experimental -- -- /API Version: 2013-12-02/ -- -- Puts (writes) multiple data records from a producer into an Amazon Kinesis -- stream in a single call (also referred to as a PutRecords request). Use this -- operation to send data from a data producer into the Amazon Kinesis stream -- for real-time ingestion and processing. Each shard can support up to 1000 -- records written per second, up to a maximum total of 1 MB data written per -- second. -- -- You must specify the name of the stream that captures, stores, and -- transports the data; and an array of request Records, with each record in -- the array requiring a partition key and data blob. -- -- The data blob can be any type of data; for example, a segment from a log -- file, geographic/location data, website clickstream data, and so on. -- -- The partition key is used by Amazon Kinesis as input to a hash function that -- maps the partition key and associated data to a specific shard. An MD5 hash -- function is used to map partition keys to 128-bit integer values and to map -- associated data records to shards. As a result of this hashing mechanism, -- all data records with the same partition key map to the same shard within -- the stream. For more information, see Partition Key in the Amazon Kinesis -- Developer Guide. -- -- Each record in the Records array may include an optional parameter, -- ExplicitHashKey, which overrides the partition key to shard mapping. This -- parameter allows a data producer to determine explicitly the shard where the -- record is stored. For more information, see Adding Multiple Records with -- PutRecords in the Amazon Kinesis Developer Guide. -- -- The PutRecords response includes an array of response Records. Each record -- in the response array directly correlates with a record in the request array -- using natural ordering, from the top to the bottom of the request and -- response. The response Records array always includes the same number of -- records as the request array. -- -- The response Records array includes both successfully and unsuccessfully -- processed records. Amazon Kinesis attempts to process all records in each -- PutRecords request. A single record failure does not stop the processing of -- subsequent records. -- -- A successfully-processed record includes ShardId and SequenceNumber values. -- The ShardId parameter identifies the shard in the stream where the record is -- stored. The SequenceNumber parameter is an identifier assigned to the put -- record, unique to all records in the stream. -- -- An unsuccessfully-processed record includes ErrorCode and ErrorMessage -- values. ErrorCode reflects the type of error and can be one of the following -- values: ProvisionedThroughputExceededException or InternalFailure. -- ErrorMessage provides more detailed information about the -- ProvisionedThroughputExceededException exception including the account ID, -- stream name, and shard ID of the record that was throttled. -- -- Data records are accessible for only 24 hours from the time that they are -- added to an Amazon Kinesis stream. -- -- <http://docs.aws.amazon.com/kinesis/latest/APIReference/API_PutRecords.html> {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} module Aws.Kinesis.Commands.PutRecords ( PutRecords(..) , PutRecordsRequestEntry(..) , PutRecordsResponse(..) , PutRecordsResponseRecord(..) ) where import Aws.Core import Aws.Kinesis.Core import Aws.Kinesis.Types import Control.DeepSeq import Data.Aeson import qualified Data.ByteString as BS import qualified Data.ByteString.Lazy as BL import qualified Data.ByteString.Base64 as B64 import qualified Data.Text as T import qualified Data.Text.Encoding as T import Data.Typeable import GHC.Generics -- | Represents a single record in a 'PutRecords' request. -- data PutRecordsRequestEntry = PutRecordsRequestEntry { putRecordsRequestEntryData :: !BS.ByteString -- ^ The data blob to be put into the record. The maximum size of the data -- blob is 50 kilobytes. , putRecordsRequestEntryExplicitHashKey :: !(Maybe PartitionHash) -- ^ The hash value used to determine explicitly the shard that the data -- record is assigned to by overriding the partition key hash. , putRecordsRequestEntryPartitionKey :: !PartitionKey -- ^ Determines which shard in the stream the data record is assigned to. -- All data records with the same partition key map to the same shard. } deriving (Show, Read, Eq, Ord, Typeable, Generic) instance NFData PutRecordsRequestEntry -- | The body of the 'PutRecords' request. -- data PutRecords = PutRecords { putRecordsRecords :: ![PutRecordsRequestEntry] -- ^ The records associated with the request. Minimum of 1 item, maximum -- 500. , putRecordsStreamName :: !StreamName -- ^ The stream name associated with the request. } deriving (Show, Read, Eq, Ord, Typeable, Generic) instance NFData PutRecords -- | Represents the result for a single record in a 'PutRecordsResponse'. -- data PutRecordsResponseRecord = PutRecordsResponseRecord { putRecordsResponseRecordErrorCode :: !(Maybe T.Text) -- ^ If the request did not succeed, an error code will be provided. , putRecordsResponseRecordErrorMessage :: !(Maybe T.Text) -- ^ If the request did not succeed, an error message will be provided. , putRecordsResponseRecordSequenceNumber :: !(Maybe SequenceNumber) -- ^ The sequence number assigned to the (sucessfully processed) record. , putRecordsResponseRecordShardId :: !(Maybe ShardId) -- ^ The shard ID assigned to the (successfully processed) record. } deriving (Show, Read, Eq, Ord, Typeable, Generic) instance NFData PutRecordsResponseRecord data PutRecordsResponse = PutRecordsResponse { putRecordsResponseFailedRecordCount :: !Int -- ^ The number of unsuccessfully processed records in a 'PutRecords' -- request. , putRecordsResponseRecords :: ![PutRecordsResponseRecord] -- ^ An array of successfully and unsuccessfully processed records, -- correlated with the request by natural ordering. } deriving (Show, Read, Eq, Ord, Typeable, Generic) instance NFData PutRecordsResponse instance ToJSON PutRecordsRequestEntry where toJSON PutRecordsRequestEntry{..} = object $ [ "Data" .= T.decodeUtf8 (B64.encode putRecordsRequestEntryData) , "ExplicitHashKey" .= putRecordsRequestEntryExplicitHashKey , "PartitionKey" .= putRecordsRequestEntryPartitionKey ] instance ToJSON PutRecords where toJSON PutRecords{..} = object [ "Records" .= putRecordsRecords , "StreamName" .= putRecordsStreamName ] instance FromJSON PutRecordsResponseRecord where parseJSON = withObject "PutRecordsResponseRecord" $ \o -> do putRecordsResponseRecordErrorCode <- o .:? "ErrorCode" putRecordsResponseRecordErrorMessage <- o .:? "ErrorMessage" putRecordsResponseRecordSequenceNumber <- o .:? "SequenceNumber" putRecordsResponseRecordShardId <- o .:? "ShardId" return PutRecordsResponseRecord{..} instance FromJSON PutRecordsResponse where parseJSON = withObject "PutRecordsResponse" $ \o -> do putRecordsResponseFailedRecordCount <- o .: "FailedRecordCount" putRecordsResponseRecords <- o .: "Records" return PutRecordsResponse{..} instance Transaction PutRecords PutRecordsResponse where instance ResponseConsumer r PutRecordsResponse where type ResponseMetadata PutRecordsResponse = KinesisMetadata responseConsumer _ = kinesisResponseConsumer instance AsMemoryResponse PutRecordsResponse where type MemoryResponse PutRecordsResponse = PutRecordsResponse loadToMemory = return instance SignQuery PutRecords where type ServiceConfiguration PutRecords = KinesisConfiguration signQuery cmd = kinesisSignQuery KinesisQuery { kinesisQueryAction = KinesisPutRecords , kinesisQueryBody = Just $ BL.toStrict $ encode cmd }
alephcloud/hs-aws-kinesis
src/Aws/Kinesis/Commands/PutRecords.hs
apache-2.0
9,186
0
12
1,584
871
525
346
102
0
{-# Language TypeFamilies #-} module Language.Drasil.Chunk.Concept ( ConceptChunk, dcc, dcc', dccWDS, dccWDS', cc, cc', ccs, cic, cw , CommonConcept, ConceptInstance ) where import Language.Drasil.Classes.Core (HasUID(uid)) import Language.Drasil.Classes (Idea, Definition(defn), ConceptDomain(cdom), Concept) import Language.Drasil.Chunk.CommonIdea (commonIdea) import Language.Drasil.Chunk.Concept.Core (ConceptChunk(ConDict), ConceptInstance(ConInst), CommonConcept(ComConDict)) import Language.Drasil.Sentence (Sentence(S)) import Language.Drasil.Chunk.NamedIdea(mkIdea,nw, nc) import Language.Drasil.NounPhrase (NP, pn) import Language.Drasil.ShortName (shortname') import Control.Lens ((^.)) --FIXME: Temporary ConceptDomain tag hacking to not break everything. dcc :: String -> NP -> String -> ConceptChunk -- | Smart constructor for creating concept chunks given an id, -- 'NounPhrase' ('NP') and definition (as String). dcc i ter des = ConDict (mkIdea i ter Nothing) (S des) [] -- ^ Concept domain tagging is not yet implemented in this constructor. -- | Identical to 'dcc', but adds an abbreviation (String) dcc' :: String -> NP -> String -> String -> CommonConcept dcc' i t d a = ComConDict (commonIdea i t a []) (S d) -- | Similar to 'dcc', except the definition is a 'Sentence' dccWDS :: String -> NP -> Sentence -> ConceptChunk dccWDS i t d = ConDict (mkIdea i t Nothing) d [] -- | Similar to 'dcc', except the definition is a 'Sentence' and adds -- an abbreviation (String) dccWDS' :: String -> NP -> Sentence -> String -> CommonConcept dccWDS' i t d a = ComConDict (commonIdea i t a []) d -- | Constructor for 'ConceptChunk'. Does not allow concept domain tagging. cc :: Idea c => c -> String -> ConceptChunk cc n d = ConDict (nw n) (S d) [] -- | Same as cc, except definition is a 'Sentence' cc' :: Idea c => c -> Sentence -> ConceptChunk cc' n d = ConDict (nw n) d [] -- | Constructor for 'ConceptChunk'. Allows explicit tagging. ccs :: (Idea c, Concept d) => c -> Sentence -> [d] -> ConceptChunk --Explicit tagging ccs n d l = ConDict (nw n) d $ map (^. uid) l -- | For projecting out to the ConceptChunk data-type cw :: Concept c => c -> ConceptChunk cw c = ConDict (nw c) (c ^. defn) (cdom c) cic :: Concept c => String -> Sentence -> String -> c -> ConceptInstance cic u d sn dom = ConInst (ccs (nc u $ pn sn) d [dom]) u $ shortname' sn
JacquesCarette/literate-scientific-software
code/drasil-lang/Language/Drasil/Chunk/Concept.hs
bsd-2-clause
2,384
0
11
408
730
407
323
32
1
instance Functor Maybe where fmap f (Just x) = Just (f x) fmap f Nothing = Nothing
sharkspeed/dororis
languages/haskell/LYHGG/8-making-our-own-types-and-typeclasses/8-the-functor-typeclass.hs
bsd-2-clause
91
0
7
25
44
21
23
3
0
module YesodDsl.Simplify (simplify) where import YesodDsl.AST import Data.Generics import Data.Either import Data.Generics.Uniplate.Data import qualified Data.List as L import Data.Maybe import qualified Data.Map as Map simplify :: Module -> Module simplify m = everywhere ((mkT sHandler) . (mkT sExpr) . (mkT sStmt) . (mkT mapEntityRef)) m where sExpr (SubQueryExpr sq) = SubQueryExpr $ mapSq sq sExpr (ExistsExpr sq) = ExistsExpr $ mapSq sq sExpr x = x sStmt (Require sq) = Require $ mapSq sq sStmt (IfFilter (pn,js,be,ob,uf)) = IfFilter (pn, map mapJoin js, be, ob, uf) sStmt (Select sq) = Select $ mapSq sq sStmt x = x mapSq sq = let sq' = sq { sqJoins = map mapJoin $ sqJoins sq } in sq' { sqFields = concatMap (expand sq') $ sqFields sq', sqWhere = everywhere (mkT sExpr) $ sqWhere sq' } mapJoin j = j { joinEntity = mapEntityRef $ joinEntity j, joinExpr = joinExpr j >>= Just . (everywhere $ (mkT sExpr)) } lookupEntity en = L.find ((==en) . entityName) $ modEntities m mapEntityRef l@(Left en) = fromMaybe l $ lookupEntity en >>= Just . Right mapEntityRef x = x expand sq (SelectField vr@(Var vn _ _) fn Nothing) = fromMaybe [] $ do (e,_) <- Map.lookup vn $ sqAliases sq Just $ [ SelectField vr fn (Just $ fieldJsonName f) | f <- entityFields e, fieldName f == fn ] expand sq (SelectAllFields (Var vn _ _)) = fromMaybe [] $ do (e,_) <- Map.lookup vn $ sqAliases sq Just $ [ SelectField (Var vn (Left "") False) (fieldName f) (Just $ fieldJsonName f) | f <- entityFields e, fieldInternal f == False ] expand _ x = [x] sHandler :: Handler -> Handler sHandler h = everywhere ((mkT mapVarRef) . (mkT mapStmt) . (mkT mapSq)) h where baseAliases = Map.unions [ sqAliases sq | Select sq <- universeBi h ] mapStmt df@(DeleteFrom er vn _) = everywhere (mkT $ mapSqVarRef $ Map.unions [ baseAliases, Map.fromList $ rights [ er >>= \e -> Right (vn, (e, False)) ] ]) df mapStmt i@(IfFilter (_,js,_,_,_)) = everywhere (mkT $ mapSqVarRef $ Map.unions [ baseAliases, Map.fromList $ rights [ joinEntity j >>= \e -> Right (joinAlias j,(e, isOuterJoin $ joinType j)) | j <- js ] ]) i mapStmt i = i mapSq sq = everywhere (mkT $ mapSqVarRef $ sqAliases sq) sq mapSqVarRef aliases (Var vn (Left "") _) = case Map.lookup vn aliases of Just (e,mf) -> Var vn (Right e) mf _ -> Var vn (lookupEntityRef vn) False mapSqVarRef _ v = v lookupEntityRef vn = case listToMaybe [ er | GetById er _ vn' <- universeBi h, vn' == vn ] of Just er -> er Nothing -> Left "" mapVarRef (Var vn (Left "") _) = Var vn (lookupEntityRef vn) False mapVarRef v = v
tlaitinen/yesod-dsl
YesodDsl/Simplify.hs
bsd-2-clause
3,114
0
21
1,051
1,258
640
618
56
9
{-| Copyright : (C) 2012-2016, University of Twente License : BSD2 (see the file LICENSE) Maintainer : Christiaan Baaij <[email protected]> Create Netlists out of normalized CoreHW Terms -} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TupleSections #-} module CLaSH.Netlist where import Control.Lens ((.=)) import qualified Control.Lens as Lens import Control.Monad.State.Strict (runStateT) import Control.Monad.Writer.Strict (listen, runWriterT, tell) import Data.Char (ord) import Data.Either (lefts,partitionEithers) import Data.HashMap.Lazy (HashMap) import qualified Data.HashMap.Lazy as HashMap import Data.List (elemIndex) import Data.Maybe (fromMaybe) import Data.Set (toList,fromList) import qualified Data.Text.Lazy as Text import Unbound.Generics.LocallyNameless (Embed (..), name2String, runFreshMT, unbind, unembed, unrebind) import CLaSH.Core.DataCon (DataCon (..)) import CLaSH.Core.FreeVars (typeFreeVars) import CLaSH.Core.Literal (Literal (..)) import CLaSH.Core.Pretty (showDoc) import CLaSH.Core.Term (Pat (..), Term (..), TmName) import qualified CLaSH.Core.Term as Core import CLaSH.Core.Type (Type (..)) import CLaSH.Core.TyCon (TyConName, TyCon) import CLaSH.Core.Util (collectArgs, isVar, termType) import CLaSH.Core.Var (Id, Var (..)) import CLaSH.Netlist.BlackBox import CLaSH.Netlist.BlackBox.Types (BlackBoxTemplate) import CLaSH.Netlist.Id import CLaSH.Netlist.Types as HW import CLaSH.Netlist.Util import CLaSH.Normalize.Util import CLaSH.Primitives.Types as P import CLaSH.Util -- | Generate a hierarchical netlist out of a set of global binders with -- @topEntity@ at the top. genNetlist :: HashMap TmName (Type,Term) -- ^ Global binders -> PrimMap BlackBoxTemplate -- ^ Primitive definitions -> HashMap TyConName TyCon -- ^ TyCon cache -> (HashMap TyConName TyCon -> Type -> Maybe (Either String HWType)) -- ^ Hardcoded Type -> HWType translator -> Maybe Int -- ^ Symbol count -> String -- ^ Name of the module containing the @topEntity@ -> [(String,FilePath)] -- ^ Set of collected data-files -> Int -- ^ Int/Word/Integer bit-width -> (Identifier -> Identifier) -- ^ valid identifiers -> [Identifier] -- ^ Seen components -> TmName -- ^ Name of the @topEntity@ -> IO ([Component],[(String,FilePath)],[Identifier]) genNetlist globals primMap tcm typeTrans mStart modName dfiles iw mkId seen topEntity = do (_,s) <- runNetlistMonad globals primMap tcm typeTrans modName dfiles iw mkId seen $ genComponent topEntity mStart return (HashMap.elems $ _components s, _dataFiles s, _seenComps s) -- | Run a NetlistMonad action in a given environment runNetlistMonad :: HashMap TmName (Type,Term) -- ^ Global binders -> PrimMap BlackBoxTemplate -- ^ Primitive Definitions -> HashMap TyConName TyCon -- ^ TyCon cache -> (HashMap TyConName TyCon -> Type -> Maybe (Either String HWType)) -- ^ Hardcode Type -> HWType translator -> String -- ^ Name of the module containing the @topEntity@ -> [(String,FilePath)] -- ^ Set of collected data-files -> Int -- ^ Int/Word/Integer bit-width -> (Identifier -> Identifier) -- ^ valid identifiers -> [Identifier] -- ^ Seen components -> NetlistMonad a -- ^ Action to run -> IO (a, NetlistState) runNetlistMonad s p tcm typeTrans modName dfiles iw mkId seen = runFreshMT . flip runStateT s' . (fmap fst . runWriterT) . runNetlist where s' = NetlistState s HashMap.empty 0 HashMap.empty p typeTrans tcm Text.empty dfiles iw mkId [] seen' names (seen',names) = genNames mkId modName seen HashMap.empty (HashMap.keys s) genNames :: (Identifier -> Identifier) -> String -> [Identifier] -> HashMap TmName Identifier -> [TmName] -> ([Identifier], HashMap TmName Identifier) genNames mkId modName = go where go s m [] = (s,m) go s m (nm:nms) = let nm' = genComponentName s mkId modName nm s' = nm':s m' = HashMap.insert nm nm' m in go s' m' nms -- | Generate a component for a given function (caching) genComponent :: TmName -- ^ Name of the function -> Maybe Int -- ^ Starting value of the unique counter -> NetlistMonad Component genComponent compName mStart = do compExprM <- fmap (HashMap.lookup compName) $ Lens.use bindings case compExprM of Nothing -> error $ $(curLoc) ++ "No normalized expression found for: " ++ show compName Just (_,expr_) -> makeCached compName components $ genComponentT compName expr_ mStart -- | Generate a component for a given function genComponentT :: TmName -- ^ Name of the function -> Term -- ^ Corresponding term -> Maybe Int -- ^ Starting value of the unique counter -> NetlistMonad Component genComponentT compName componentExpr mStart = do varCount .= fromMaybe 0 mStart componentName' <- (HashMap.! compName) <$> Lens.use componentNames curCompNm .= componentName' tcm <- Lens.use tcCache seenIds .= [] (arguments,binders,result) <- do { normalizedM <- splitNormalized tcm componentExpr ; case normalizedM of Right normalized -> mkUniqueNormalized normalized Left err -> error $ $(curLoc) ++ err } let ids = HashMap.fromList $ map (\(Id v (Embed t)) -> (v,t)) $ arguments ++ map fst binders gamma <- (ids `HashMap.union`) . HashMap.map fst <$> Lens.use bindings varEnv .= gamma typeTrans <- Lens.use typeTranslator let resType = unsafeCoreTypeToHWType $(curLoc) typeTrans tcm $ HashMap.lookupDefault (error $ $(curLoc) ++ "resType" ++ show (result,HashMap.keys ids)) result ids argTypes = map (\(Id _ (Embed t)) -> unsafeCoreTypeToHWType $(curLoc) typeTrans tcm t) arguments let netDecls = map (\(id_,_) -> NetDecl (Text.pack . name2String $ varName id_) (unsafeCoreTypeToHWType $(curLoc) typeTrans tcm . unembed $ varType id_) ) $ filter ((/= result) . varName . fst) binders (decls,clks) <- listen $ concat <$> mapM (uncurry mkDeclarations . second unembed) binders let compInps = zip (map (Text.pack . name2String . varName) arguments) argTypes compOutp = (Text.pack $ name2String result, resType) component = Component componentName' (toList clks) compInps [compOutp] (netDecls ++ decls) return component genComponentName :: [Identifier] -> (Identifier -> Identifier) -> String -> TmName -> Identifier genComponentName seen mkId prefix nm = let i = mkId . stripDollarPrefixes . last . Text.splitOn (Text.pack ".") . Text.pack $ name2String nm i' = if Text.null i then Text.pack "Component" else i i'' = mkId (Text.pack (prefix ++ "_") `Text.append` i') in if i'' `elem` seen then go 0 i'' else i'' where go :: Integer -> Identifier -> Identifier go n i = let i' = mkId (i `Text.append` Text.pack ('_':show n)) in if i' `elem` seen then go (n+1) i else i' -- | Generate a list of Declarations for a let-binder mkDeclarations :: Id -- ^ LHS of the let-binder -> Term -- ^ RHS of the let-binder -> NetlistMonad [Declaration] mkDeclarations bndr (Var _ v) = mkFunApp bndr v [] mkDeclarations _ e@(Case _ _ []) = error $ $(curLoc) ++ "Not in normal form: Case-decompositions with an empty list of alternatives not supported: " ++ showDoc e mkDeclarations bndr e@(Case scrut _ [alt]) = do (pat,v) <- unbind alt (varTy,varTm) <- case v of (Var t n) -> return (t,n) _ -> error $ $(curLoc) ++ "Not in normal form: RHS of case-projection is not a variable: " ++ showDoc e typeTrans <- Lens.use typeTranslator tcm <- Lens.use tcCache scrutTy <- termType tcm scrut let sHwTy = unsafeCoreTypeToHWType $(curLoc) typeTrans tcm scrutTy vHwTy = unsafeCoreTypeToHWType $(curLoc) typeTrans tcm varTy (selId,decls) <- case scrut of (Var _ scrutNm) -> return (Text.pack $ name2String scrutNm,[]) _ -> do let scrutId = Text.pack . (++ "_case_scrut") . name2String $ varName bndr (newExpr, newDecls) <- mkExpr False (Left scrutId) scrutTy scrut case newExpr of (Identifier newId Nothing) -> return (newId,newDecls) _ -> do scrutId' <- mkUniqueIdentifier scrutId let scrutDecl = NetDecl scrutId' sHwTy scrutAssn = Assignment scrutId' newExpr return (scrutId',newDecls ++ [scrutDecl,scrutAssn]) let dstId = Text.pack . name2String $ varName bndr altVarId = Text.pack $ name2String varTm modifier = case pat of DataPat (Embed dc) ids -> let (exts,tms) = unrebind ids tmsTys = map (unembed . varType) tms tmsFVs = concatMap (Lens.toListOf typeFreeVars) tmsTys extNms = map varName exts tms' = if any (`elem` tmsFVs) extNms then error $ $(curLoc) ++ "Not in normal form: Pattern binds existential variables: " ++ showDoc e else tms in case elemIndex (Id varTm (Embed varTy)) tms' of Nothing -> Nothing Just fI | sHwTy /= vHwTy -> Just (Indexed (sHwTy,dcTag dc - 1,fI)) -- When element and subject have the same HW-type, -- then the projections is just the identity | otherwise -> Just (DC (Void,0)) _ -> error $ $(curLoc) ++ "Not in normal form: Unexpected pattern in case-projection: " ++ showDoc e extractExpr = Identifier (maybe altVarId (const selId) modifier) modifier return (decls ++ [Assignment dstId extractExpr]) mkDeclarations bndr (Case scrut altTy alts) = do alts' <- reorderPats <$> mapM unbind alts tcm <- Lens.use tcCache scrutTy <- termType tcm scrut scrutHTy <- unsafeCoreTypeToHWTypeM $(curLoc) scrutTy altHTy <- unsafeCoreTypeToHWTypeM $(curLoc) altTy let scrutId = Text.pack . (++ "_case_scrut") . name2String $ varName bndr (scrutExpr,scrutDecls) <- first (mkScrutExpr scrutHTy (fst (head alts'))) <$> mkExpr True (Left scrutId) scrutTy scrut (exprs,altsDecls) <- (second concat . unzip) <$> mapM (mkCondExpr scrutHTy) alts' let dstId = Text.pack . name2String $ varName bndr return $! scrutDecls ++ altsDecls ++ [CondAssignment dstId altHTy scrutExpr scrutHTy exprs] where mkCondExpr :: HWType -> (Pat,Term) -> NetlistMonad ((Maybe HW.Literal,Expr),[Declaration]) mkCondExpr scrutHTy (pat,alt) = do let altId = Text.pack . (++ "_case_alt") . name2String $ varName bndr (altExpr,altDecls) <- mkExpr False (Left altId) altTy alt (,altDecls) <$> case pat of DefaultPat -> return (Nothing,altExpr) DataPat (Embed dc) _ -> return (Just (dcToLiteral scrutHTy (dcTag dc)),altExpr) LitPat (Embed (IntegerLiteral i)) -> return (Just (NumLit i),altExpr) LitPat (Embed (IntLiteral i)) -> return (Just (NumLit i), altExpr) LitPat (Embed (WordLiteral w)) -> return (Just (NumLit w), altExpr) LitPat (Embed (CharLiteral c)) -> return (Just (NumLit . toInteger $ ord c), altExpr) LitPat (Embed (Int64Literal i)) -> return (Just (NumLit i), altExpr) LitPat (Embed (Word64Literal w)) -> return (Just (NumLit w), altExpr) _ -> error $ $(curLoc) ++ "Not an integer literal in LitPat" mkScrutExpr :: HWType -> Pat -> Expr -> Expr mkScrutExpr scrutHTy pat scrutE = case pat of DataPat (Embed dc) _ -> let modifier = Just (DC (scrutHTy,dcTag dc - 1)) in case scrutE of Identifier scrutId _ -> Identifier scrutId modifier _ -> error $ $(curLoc) ++ "Not in normal form: Not a variable reference or primitive as subject of a case-statement" _ -> scrutE -- GHC puts default patterns in the first position, we want them in the -- last position. reorderPats :: [(Pat,Term)] -> [(Pat,Term)] reorderPats ((DefaultPat,e):alts') = alts' ++ [(DefaultPat,e)] reorderPats alts' = alts' mkDeclarations bndr app = let (appF,(args,tyArgs)) = second partitionEithers $ collectArgs app in case appF of Var _ f | null tyArgs -> mkFunApp bndr f args | otherwise -> error $ $(curLoc) ++ "Not in normal form: Var-application with Type arguments" _ -> do (exprApp,declsApp) <- mkExpr False (Right bndr) (unembed $ varType bndr) app let dstId = Text.pack . name2String $ varName bndr assn = case exprApp of Identifier _ Nothing -> [] _ -> [Assignment dstId exprApp] return (declsApp ++ assn) -- | Generate a list of Declarations for a let-binder where the RHS is a function application mkFunApp :: Id -- ^ LHS of the let-binder -> TmName -- ^ Name of the applied function -> [Term] -- ^ Function arguments -> NetlistMonad [Declaration] mkFunApp dst fun args = do normalized <- Lens.use bindings case HashMap.lookup fun normalized of Just _ -> do (Component compName hidden compInps [compOutp] _) <- preserveVarEnv $ genComponent fun Nothing if length args == length compInps then do tcm <- Lens.use tcCache argTys <- mapM (termType tcm) args let dstId = Text.pack . name2String $ varName dst (argExprs,argDecls) <- fmap (second concat . unzip) $! mapM (\(e,t) -> mkExpr False (Left dstId) t e) (zip args argTys) (argExprs',argDecls') <- (second concat . unzip) <$> mapM (toSimpleVar dst) (zip argExprs argTys) let hiddenAssigns = map (\(i,t) -> (i,In,t,Identifier i Nothing)) hidden inpAssigns = zipWith (\(i,t) e -> (i,In,t,e)) compInps argExprs' outpAssign = (fst compOutp,Out,snd compOutp,Identifier dstId Nothing) instLabel = Text.concat [compName, Text.pack "_", dstId] instDecl = InstDecl compName instLabel (outpAssign:hiddenAssigns ++ inpAssigns) tell (fromList hidden) return (argDecls ++ argDecls' ++ [instDecl]) else error $ $(curLoc) ++ "under-applied normalized function" Nothing -> case args of [] -> do let dstId = Text.pack . name2String $ varName dst return [Assignment dstId (Identifier (Text.pack $ name2String fun) Nothing)] _ -> error $ $(curLoc) ++ "Unknown function: " ++ showDoc fun toSimpleVar :: Id -> (Expr,Type) -> NetlistMonad (Expr,[Declaration]) toSimpleVar _ (e@(Identifier _ _),_) = return (e,[]) toSimpleVar dst (e,ty) = do let argNm = Text.pack . (++ "_app_arg") . name2String $ varName dst argNm' <- mkUniqueIdentifier argNm hTy <- unsafeCoreTypeToHWTypeM $(curLoc) ty let argDecl = NetDecl argNm' hTy argAssn = Assignment argNm' e return (Identifier argNm' Nothing,[argDecl,argAssn]) -- | Generate an expression for a term occurring on the RHS of a let-binder mkExpr :: Bool -- ^ Treat BlackBox expression as declaration -> (Either Identifier Id) -- ^ Id to assign the result to -> Type -- ^ Type of the LHS of the let-binder -> Term -- ^ Term to convert to an expression -> NetlistMonad (Expr,[Declaration]) -- ^ Returned expression and a list of generate BlackBox declarations mkExpr _ _ _ (Core.Literal l) = do iw <- Lens.use intWidth case l of IntegerLiteral i -> return (HW.Literal (Just (Signed iw,iw)) $ NumLit i, []) IntLiteral i -> return (HW.Literal (Just (Signed iw,iw)) $ NumLit i, []) WordLiteral w -> return (HW.Literal (Just (Unsigned iw,iw)) $ NumLit w, []) Int64Literal i -> return (HW.Literal (Just (Signed 64,64)) $ NumLit i, []) Word64Literal w -> return (HW.Literal (Just (Unsigned 64,64)) $ NumLit w, []) CharLiteral c -> return (HW.Literal (Just (Unsigned 21,21)) . NumLit . toInteger $ ord c, []) _ -> error $ $(curLoc) ++ "not an integer or char literal" mkExpr bbEasD bndr ty app = do let (appF,args) = collectArgs app tmArgs = lefts args hwTy <- unsafeCoreTypeToHWTypeM $(curLoc) ty case appF of Data dc | all (\e -> isConstant e || isVar e) tmArgs -> mkDcApplication hwTy bndr dc tmArgs | otherwise -> error $ $(curLoc) ++ "Not in normal form: DataCon-application with non-Simple arguments: " ++ showDoc app Prim nm _ -> mkPrimitive False bbEasD bndr nm args ty Var _ f | null tmArgs -> return (Identifier (Text.pack $ name2String f) Nothing,[]) | otherwise -> error $ $(curLoc) ++ "Not in normal form: top-level binder in argument position: " ++ showDoc app _ -> error $ $(curLoc) ++ "Not in normal form: application of a Let/Lam/Case: " ++ showDoc app -- | Generate an expression for a DataCon application occurring on the RHS of a let-binder mkDcApplication :: HWType -- ^ HWType of the LHS of the let-binder -> (Either Identifier Id) -- ^ Id to assign the result to -> DataCon -- ^ Applied DataCon -> [Term] -- ^ DataCon Arguments -> NetlistMonad (Expr,[Declaration]) -- ^ Returned expression and a list of generate BlackBox declarations mkDcApplication dstHType bndr dc args = do tcm <- Lens.use tcCache argTys <- mapM (termType tcm) args let isSP (SP _ _) = True isSP _ = False let argNm = either id (Text.pack . (++ "_app_arg") . name2String . varName) bndr (argExprs,argDecls) <- fmap (second concat . unzip) $! mapM (\(e,t) -> mkExpr (isSP dstHType) (Left argNm) t e) (zip args argTys) argHWTys <- mapM coreTypeToHWTypeM argTys fmap (,argDecls) $! case (argHWTys,argExprs) of -- Is the DC just a newtype wrapper? ([Just argHwTy],[argExpr]) | argHwTy == dstHType -> return (HW.DataCon dstHType (DC (Void,-1)) [argExpr]) _ -> case dstHType of SP _ dcArgPairs -> do let dcI = dcTag dc - 1 dcArgs = snd $ indexNote ($(curLoc) ++ "No DC with tag: " ++ show dcI) dcArgPairs dcI case compare (length dcArgs) (length argExprs) of EQ -> return (HW.DataCon dstHType (DC (dstHType,dcI)) argExprs) LT -> error $ $(curLoc) ++ "Over-applied constructor" GT -> error $ $(curLoc) ++ "Under-applied constructor" Product _ dcArgs -> case compare (length dcArgs) (length argExprs) of EQ -> return (HW.DataCon dstHType (DC (dstHType,0)) argExprs) LT -> error $ $(curLoc) ++ "Over-applied constructor" GT -> error $ $(curLoc) ++ "Under-applied constructor" Sum _ _ -> return (HW.DataCon dstHType (DC (dstHType,dcTag dc - 1)) []) Bool -> let dc' = case dcTag dc of 1 -> HW.Literal Nothing (BoolLit False) 2 -> HW.Literal Nothing (BoolLit True) tg -> error $ $(curLoc) ++ "unknown bool literal: " ++ showDoc dc ++ "(tag: " ++ show tg ++ ")" in return dc' Vector 0 _ -> return (HW.DataCon dstHType VecAppend []) -- Note [Vector Wrapper] -- The Vector type has two versions of the cons constructor: -- * The 'normal' one, which takes a coercion as its first argument, -- followed by the element and the vector -- * The wrapper one, which just takes the element and vector argument -- -- We need to account for both occurrences, that's why we have the two -- case statements below: Vector 1 _ -> case argExprs of [_,e,_] -> return (HW.DataCon dstHType VecAppend [e]) _ -> return (HW.DataCon dstHType VecAppend [head argExprs]) Vector _ _ -> case argExprs of [_,e1,e2] -> return (HW.DataCon dstHType VecAppend [e1,e2]) _ -> return (HW.DataCon dstHType VecAppend argExprs) _ -> error $ $(curLoc) ++ "mkDcApplication undefined for: " ++ show (dstHType,dc,args,argHWTys)
ggreif/clash-compiler
clash-lib/src/CLaSH/Netlist.hs
bsd-2-clause
22,136
0
33
7,207
6,436
3,300
3,136
346
20
{-# LANGUAGE TypeFamilies, FlexibleInstances, PostfixOperators #-} {-# OPTIONS_HADDOCK hide #-} ----------------------------------------------------------------------------- -- | -- Module : ForSyDe.MoC.SDF -- Copyright : (c) George Ungureanu, KTH/ICT/E 2015; -- SAM Group, KTH/ICT/ECS 2007-2008 -- License : BSD-style (see the file LICENSE) -- -- Maintainer : [email protected] -- Stability : experimental -- Portability : portable -- -- The synchronuous library defines process constructors, processes and a signal conduit -- for the synchronous computational model. A process constructor is a -- higher order function which together with combinational function(s) -- and values as arguments constructs a process. ----------------------------------------------------------------------------- module ForSyDe.Atom.MoC.SDF.Core where import ForSyDe.Atom.MoC import ForSyDe.Atom.MoC.Stream import ForSyDe.Atom.Utility.Tuple -- | Type synonym for production rate type Cons = Int -- | Type synonym for consumption rate type Prod = Int -- | Type synonym for a SY signal, i.e. "a signal of SY events" type Signal a = Stream (SDF a) -- | The SDF event. It identifies a synchronous dataflow signal, and -- wraps only a value. newtype SDF a = SDF { val :: a } -- | Implenents the SDF semantics for the MoC atoms. instance MoC SDF where type Fun SDF a b = (Cons, [a] -> b) type Ret SDF a = (Prod, [a]) --------------------- _ -.- NullS = NullS (c,f) -.- s = (comb c f . map val . fromStream) s where comb c f l = let x' = take c l xs' = drop c l in if length x' == c then SDF (f x') :- comb c f xs' else NullS --------------------- cfs -*- s = (comb2 cfs . map val . fromStream) s where comb2 NullS _ = NullS comb2 (SDF (c,f):-fs) l = let x' = take c l xs' = drop c l in if length x' == c then SDF (f x') :- comb2 fs xs' else NullS --------------------- (-*) NullS = NullS (-*) ((SDF (p,r)):-xs) | length r == p = stream (map SDF r) +-+ (xs -*) | otherwise = error "[MoC.SDF] Wrong production" --------------------- (-<-) = (+-+) --------------------- (-&-) _ a = a --------------------- -- | Allows for mapping of functions on a SDF event. instance Functor SDF where fmap f (SDF a) = SDF (f a) -- | Allows for lifting functions on a pair of SDF events. instance Applicative SDF where pure = SDF (SDF a) <*> (SDF b) = SDF (a b) instance Foldable SDF where foldr f z (SDF x) = f x z foldl f z (SDF x) = f z x instance Traversable SDF where traverse f (SDF x) = SDF <$> f x -- | Shows the value wrapped instance Show a => Show (SDF a) where showsPrec _ (SDF x) = (++) (show x) -- | Reads the value wrapped instance Read a => Read (SDF a) where readsPrec _ s = [(SDF x, r) | (x, r) <- reads s] ----------------------------------------------------------------------------- -- | Transforms a list of values into a SDF signal with only one -- partition, i.e. all events share the same (initial) tag. signal :: [a] -> Signal a signal l = stream (SDF <$> l) signal2 (l1,l2) = (signal l1, signal l2) signal3 (l1,l2,l3) = (signal l1, signal l2, signal l3) signal4 (l1,l2,l3,l4) = (signal l1, signal l2, signal l3, signal l4) -- | Transforms a signal back to a list fromSignal :: Signal a -> [a] fromSignal = fromStream . fmap (\(SDF a) -> a) -- | Reads a signal from a string. Like with the @read@ function from -- @Prelude@, you must specify the tipe of the signal. -- -- >>> readSignal "{1,2,3,4,5}" :: Signal Int -- {1,2,3,4,5} readSignal :: Read a => String -> Signal a readSignal = read ---------------------------------------------------------------------- scen11 (c,p,f) = ctxt11 c p f scen12 (c,p,f) = ctxt12 c p f scen13 (c,p,f) = ctxt13 c p f scen14 (c,p,f) = ctxt14 c p f scen21 (c,p,f) = ctxt21 c p f scen22 (c,p,f) = ctxt22 c p f scen23 (c,p,f) = ctxt23 c p f scen24 (c,p,f) = ctxt24 c p f scen31 (c,p,f) = ctxt31 c p f scen32 (c,p,f) = ctxt32 c p f scen33 (c,p,f) = ctxt33 c p f scen34 (c,p,f) = ctxt34 c p f scen41 (c,p,f) = ctxt41 c p f scen42 (c,p,f) = ctxt42 c p f scen43 (c,p,f) = ctxt43 c p f scen44 (c,p,f) = ctxt44 c p f scen51 (c,p,f) = ctxt51 c p f scen52 (c,p,f) = ctxt52 c p f scen53 (c,p,f) = ctxt53 c p f scen54 (c,p,f) = ctxt54 c p f scen61 (c,p,f) = ctxt61 c p f scen62 (c,p,f) = ctxt62 c p f scen63 (c,p,f) = ctxt63 c p f scen64 (c,p,f) = ctxt64 c p f scen71 (c,p,f) = ctxt71 c p f scen72 (c,p,f) = ctxt72 c p f scen73 (c,p,f) = ctxt73 c p f scen74 (c,p,f) = ctxt74 c p f scen81 (c,p,f) = ctxt81 c p f scen82 (c,p,f) = ctxt82 c p f scen83 (c,p,f) = ctxt83 c p f scen84 (c,p,f) = ctxt84 c p f
forsyde/forsyde-atom
src/ForSyDe/Atom/MoC/SDF/Core.hs
bsd-3-clause
4,963
0
14
1,304
1,768
960
808
88
1
module Signal.Wavelet.Repa2Bench where import Data.Array.Repa import Signal.Wavelet.Repa2 import Signal.Wavelet.Repa.Common (forceS, forceP) {-# INLINE benchDwtS #-} benchDwtS :: (Array U DIM1 Double, Array U DIM1 Double) -> Array U DIM1 Double benchDwtS = uncurry dwtS {-# INLINE benchDwtP #-} benchDwtP :: (Array U DIM1 Double, Array U DIM1 Double) -> Array U DIM1 Double benchDwtP = uncurry dwtP {-# INLINE benchIdwtS #-} benchIdwtS :: (Array U DIM1 Double, Array U DIM1 Double) -> Array U DIM1 Double benchIdwtS = uncurry idwtS {-# INLINE benchIdwtP #-} benchIdwtP :: (Array U DIM1 Double, Array U DIM1 Double) -> Array U DIM1 Double benchIdwtP = uncurry idwtP {-# INLINE benchLatticeS #-} benchLatticeS :: ((Double, Double), Array U DIM1 Double) -> Array U DIM1 Double benchLatticeS = forceS . (uncurry lattice) {-# INLINE benchLatticeP #-} benchLatticeP :: ((Double, Double), Array U DIM1 Double) -> Array U DIM1 Double benchLatticeP = forceP . (uncurry lattice) {-# INLINE benchTrimLatticeS #-} benchTrimLatticeS :: ((Double, Double), Array U DIM1 Double) -> Array U DIM1 Double benchTrimLatticeS = forceS . trim . (uncurry lattice) {-# INLINE benchTrimLatticeP #-} benchTrimLatticeP :: ((Double, Double), Array U DIM1 Double) -> Array U DIM1 Double benchTrimLatticeP = forceP . trim . (uncurry lattice) {-# INLINE benchExtendFrontS #-} benchExtendFrontS :: (Int, Array U DIM1 Double) -> Array U DIM1 Double benchExtendFrontS = forceS . (uncurry extendFront) {-# INLINE benchExtendFrontP #-} benchExtendFrontP :: (Int, Array U DIM1 Double) -> Array U DIM1 Double benchExtendFrontP = forceP . (uncurry extendFront) {-# INLINE benchExtendEndS #-} benchExtendEndS :: (Int, Array U DIM1 Double) -> Array U DIM1 Double benchExtendEndS = forceS . (uncurry extendEnd) {-# INLINE benchExtendEndP #-} benchExtendEndP :: (Int, Array U DIM1 Double) -> Array U DIM1 Double benchExtendEndP = forceP . (uncurry extendEnd) dataExtend :: ([Double], [Double]) -> (Int, Array U DIM1 Double) dataExtend (ls, sig) = (length ls, fromListUnboxed (Z :. sigSize) sig) where sigSize = length sig
jstolarek/lattice-structure-hs
bench/Signal/Wavelet/Repa2Bench.hs
bsd-3-clause
2,186
0
8
421
685
371
314
48
1
{-# LANGUAGE Trustworthy #-} {-# LANGUAGE CPP, NoImplicitPrelude, ScopedTypeVariables, MagicHash #-} ----------------------------------------------------------------------------- -- | -- Module : Data.List -- Copyright : (c) The University of Glasgow 2001 -- License : BSD-style (see the file libraries/base/LICENSE) -- -- Maintainer : [email protected] -- Stability : stable -- Portability : portable -- -- Operations on lists. -- ----------------------------------------------------------------------------- module Data.OldList ( -- * Basic functions (++) , head , last , tail , init , uncons , null , length -- * List transformations , map , reverse , intersperse , intercalate , transpose , subsequences , permutations -- * Reducing lists (folds) , foldl , foldl' , foldl1 , foldl1' , foldr , foldr1 -- ** Special folds , concat , concatMap , and , or , any , all , sum , product , maximum , minimum -- * Building lists -- ** Scans , scanl , scanl1 , scanr , scanr1 -- ** Accumulating maps , mapAccumL , mapAccumR -- ** Infinite lists , iterate , repeat , replicate , cycle -- ** Unfolding , unfoldr -- * Sublists -- ** Extracting sublists , take , drop , splitAt , takeWhile , dropWhile , dropWhileEnd , span , break , stripPrefix , group , inits , tails -- ** Predicates , isPrefixOf , isSuffixOf , isInfixOf -- * Searching lists -- ** Searching by equality , elem , notElem , lookup -- ** Searching with a predicate , find , filter , partition -- * Indexing lists -- | These functions treat a list @xs@ as a indexed collection, -- with indices ranging from 0 to @'length' xs - 1@. , (!!) , elemIndex , elemIndices , findIndex , findIndices -- * Zipping and unzipping lists , zip , zip3 , zip4, zip5, zip6, zip7 , zipWith , zipWith3 , zipWith4, zipWith5, zipWith6, zipWith7 , unzip , unzip3 , unzip4, unzip5, unzip6, unzip7 -- * Special lists -- ** Functions on strings , lines , words , unlines , unwords -- ** \"Set\" operations , nub , delete , (\\) , union , intersect -- ** Ordered lists , sort , sortOn , insert -- * Generalized functions -- ** The \"@By@\" operations -- | By convention, overloaded functions have a non-overloaded -- counterpart whose name is suffixed with \`@By@\'. -- -- It is often convenient to use these functions together with -- 'Data.Function.on', for instance @'sortBy' ('compare' -- \`on\` 'fst')@. -- *** User-supplied equality (replacing an @Eq@ context) -- | The predicate is assumed to define an equivalence. , nubBy , deleteBy , deleteFirstsBy , unionBy , intersectBy , groupBy -- *** User-supplied comparison (replacing an @Ord@ context) -- | The function is assumed to define a total ordering. , sortBy , insertBy , maximumBy , minimumBy -- ** The \"@generic@\" operations -- | The prefix \`@generic@\' indicates an overloaded function that -- is a generalized version of a "Prelude" function. , genericLength , genericTake , genericDrop , genericSplitAt , genericIndex , genericReplicate ) where import Data.Maybe import Data.Char ( isSpace ) import Data.Ord ( comparing ) import Data.Tuple ( fst, snd ) import GHC.Num import GHC.Real import GHC.List import GHC.Base infix 5 \\ -- comment to fool cpp: https://www.haskell.org/ghc/docs/latest/html/users_guide/options-phases.html#cpp-string-gaps -- ----------------------------------------------------------------------------- -- List functions -- | The 'dropWhileEnd' function drops the largest suffix of a list -- in which the given predicate holds for all elements. For example: -- -- > dropWhileEnd isSpace "foo\n" == "foo" -- > dropWhileEnd isSpace "foo bar" == "foo bar" -- > dropWhileEnd isSpace ("foo\n" ++ undefined) == "foo" ++ undefined -- -- /Since: 4.5.0.0/ dropWhileEnd :: (a -> Bool) -> [a] -> [a] dropWhileEnd p = foldr (\x xs -> if p x && null xs then [] else x : xs) [] -- | The 'stripPrefix' function drops the given prefix from a list. -- It returns 'Nothing' if the list did not start with the prefix -- given, or 'Just' the list after the prefix, if it does. -- -- > stripPrefix "foo" "foobar" == Just "bar" -- > stripPrefix "foo" "foo" == Just "" -- > stripPrefix "foo" "barfoo" == Nothing -- > stripPrefix "foo" "barfoobaz" == Nothing stripPrefix :: Eq a => [a] -> [a] -> Maybe [a] stripPrefix [] ys = Just ys stripPrefix (x:xs) (y:ys) | x == y = stripPrefix xs ys stripPrefix _ _ = Nothing -- | The 'elemIndex' function returns the index of the first element -- in the given list which is equal (by '==') to the query element, -- or 'Nothing' if there is no such element. elemIndex :: Eq a => a -> [a] -> Maybe Int elemIndex x = findIndex (x==) -- | The 'elemIndices' function extends 'elemIndex', by returning the -- indices of all elements equal to the query element, in ascending order. elemIndices :: Eq a => a -> [a] -> [Int] elemIndices x = findIndices (x==) -- | The 'find' function takes a predicate and a list and returns the -- first element in the list matching the predicate, or 'Nothing' if -- there is no such element. find :: (a -> Bool) -> [a] -> Maybe a find p = listToMaybe . filter p -- | The 'findIndex' function takes a predicate and a list and returns -- the index of the first element in the list satisfying the predicate, -- or 'Nothing' if there is no such element. findIndex :: (a -> Bool) -> [a] -> Maybe Int findIndex p = listToMaybe . findIndices p -- | The 'findIndices' function extends 'findIndex', by returning the -- indices of all elements satisfying the predicate, in ascending order. findIndices :: (a -> Bool) -> [a] -> [Int] #ifdef USE_REPORT_PRELUDE findIndices p xs = [ i | (x,i) <- zip xs [0..], p x] #else -- Efficient definition findIndices p ls = loop 0# ls where loop _ [] = [] loop n (x:xs) | p x = I# n : loop (n +# 1#) xs | otherwise = loop (n +# 1#) xs #endif /* USE_REPORT_PRELUDE */ -- | The 'isPrefixOf' function takes two lists and returns 'True' -- iff the first list is a prefix of the second. isPrefixOf :: (Eq a) => [a] -> [a] -> Bool isPrefixOf [] _ = True isPrefixOf _ [] = False isPrefixOf (x:xs) (y:ys)= x == y && isPrefixOf xs ys -- | The 'isSuffixOf' function takes two lists and returns 'True' -- iff the first list is a suffix of the second. -- Both lists must be finite. isSuffixOf :: (Eq a) => [a] -> [a] -> Bool isSuffixOf x y = reverse x `isPrefixOf` reverse y -- | The 'isInfixOf' function takes two lists and returns 'True' -- iff the first list is contained, wholly and intact, -- anywhere within the second. -- -- Example: -- -- >isInfixOf "Haskell" "I really like Haskell." == True -- >isInfixOf "Ial" "I really like Haskell." == False isInfixOf :: (Eq a) => [a] -> [a] -> Bool isInfixOf needle haystack = any (isPrefixOf needle) (tails haystack) -- | /O(n^2)/. The 'nub' function removes duplicate elements from a list. -- In particular, it keeps only the first occurrence of each element. -- (The name 'nub' means \`essence\'.) -- It is a special case of 'nubBy', which allows the programmer to supply -- their own equality test. nub :: (Eq a) => [a] -> [a] #ifdef USE_REPORT_PRELUDE nub = nubBy (==) #else -- stolen from HBC nub l = nub' l [] -- ' where nub' [] _ = [] -- ' nub' (x:xs) ls -- ' | x `elem` ls = nub' xs ls -- ' | otherwise = x : nub' xs (x:ls) -- ' #endif -- | The 'nubBy' function behaves just like 'nub', except it uses a -- user-supplied equality predicate instead of the overloaded '==' -- function. nubBy :: (a -> a -> Bool) -> [a] -> [a] #ifdef USE_REPORT_PRELUDE nubBy eq [] = [] nubBy eq (x:xs) = x : nubBy eq (filter (\ y -> not (eq x y)) xs) #else nubBy eq l = nubBy' l [] where nubBy' [] _ = [] nubBy' (y:ys) xs | elem_by eq y xs = nubBy' ys xs | otherwise = y : nubBy' ys (y:xs) -- Not exported: -- Note that we keep the call to `eq` with arguments in the -- same order as in the reference implementation -- 'xs' is the list of things we've seen so far, -- 'y' is the potential new element elem_by :: (a -> a -> Bool) -> a -> [a] -> Bool elem_by _ _ [] = False elem_by eq y (x:xs) = y `eq` x || elem_by eq y xs #endif -- | 'delete' @x@ removes the first occurrence of @x@ from its list argument. -- For example, -- -- > delete 'a' "banana" == "bnana" -- -- It is a special case of 'deleteBy', which allows the programmer to -- supply their own equality test. delete :: (Eq a) => a -> [a] -> [a] delete = deleteBy (==) -- | The 'deleteBy' function behaves like 'delete', but takes a -- user-supplied equality predicate. deleteBy :: (a -> a -> Bool) -> a -> [a] -> [a] deleteBy _ _ [] = [] deleteBy eq x (y:ys) = if x `eq` y then ys else y : deleteBy eq x ys -- | The '\\' function is list difference (non-associative). -- In the result of @xs@ '\\' @ys@, the first occurrence of each element of -- @ys@ in turn (if any) has been removed from @xs@. Thus -- -- > (xs ++ ys) \\ xs == ys. -- -- It is a special case of 'deleteFirstsBy', which allows the programmer -- to supply their own equality test. (\\) :: (Eq a) => [a] -> [a] -> [a] (\\) = foldl (flip delete) -- | The 'union' function returns the list union of the two lists. -- For example, -- -- > "dog" `union` "cow" == "dogcw" -- -- Duplicates, and elements of the first list, are removed from the -- the second list, but if the first list contains duplicates, so will -- the result. -- It is a special case of 'unionBy', which allows the programmer to supply -- their own equality test. union :: (Eq a) => [a] -> [a] -> [a] union = unionBy (==) -- | The 'unionBy' function is the non-overloaded version of 'union'. unionBy :: (a -> a -> Bool) -> [a] -> [a] -> [a] unionBy eq xs ys = xs ++ foldl (flip (deleteBy eq)) (nubBy eq ys) xs -- | The 'intersect' function takes the list intersection of two lists. -- For example, -- -- > [1,2,3,4] `intersect` [2,4,6,8] == [2,4] -- -- If the first list contains duplicates, so will the result. -- -- > [1,2,2,3,4] `intersect` [6,4,4,2] == [2,2,4] -- -- It is a special case of 'intersectBy', which allows the programmer to -- supply their own equality test. If the element is found in both the first -- and the second list, the element from the first list will be used. intersect :: (Eq a) => [a] -> [a] -> [a] intersect = intersectBy (==) -- | The 'intersectBy' function is the non-overloaded version of 'intersect'. intersectBy :: (a -> a -> Bool) -> [a] -> [a] -> [a] intersectBy _ [] _ = [] intersectBy _ _ [] = [] intersectBy eq xs ys = [x | x <- xs, any (eq x) ys] -- | The 'intersperse' function takes an element and a list and -- \`intersperses\' that element between the elements of the list. -- For example, -- -- > intersperse ',' "abcde" == "a,b,c,d,e" intersperse :: a -> [a] -> [a] intersperse _ [] = [] intersperse sep (x:xs) = x : prependToAll sep xs -- Not exported: -- We want to make every element in the 'intersperse'd list available -- as soon as possible to avoid space leaks. Experiments suggested that -- a separate top-level helper is more efficient than a local worker. prependToAll :: a -> [a] -> [a] prependToAll _ [] = [] prependToAll sep (x:xs) = sep : x : prependToAll sep xs -- | 'intercalate' @xs xss@ is equivalent to @('concat' ('intersperse' xs xss))@. -- It inserts the list @xs@ in between the lists in @xss@ and concatenates the -- result. intercalate :: [a] -> [[a]] -> [a] intercalate xs xss = concat (intersperse xs xss) -- | The 'transpose' function transposes the rows and columns of its argument. -- For example, -- -- > transpose [[1,2,3],[4,5,6]] == [[1,4],[2,5],[3,6]] transpose :: [[a]] -> [[a]] transpose [] = [] transpose ([] : xss) = transpose xss transpose ((x:xs) : xss) = (x : [h | (h:_) <- xss]) : transpose (xs : [ t | (_:t) <- xss]) -- | The 'partition' function takes a predicate a list and returns -- the pair of lists of elements which do and do not satisfy the -- predicate, respectively; i.e., -- -- > partition p xs == (filter p xs, filter (not . p) xs) partition :: (a -> Bool) -> [a] -> ([a],[a]) {-# INLINE partition #-} partition p xs = foldr (select p) ([],[]) xs select :: (a -> Bool) -> a -> ([a], [a]) -> ([a], [a]) select p x ~(ts,fs) | p x = (x:ts,fs) | otherwise = (ts, x:fs) -- | The 'mapAccumL' function behaves like a combination of 'map' and -- 'foldl'; it applies a function to each element of a list, passing -- an accumulating parameter from left to right, and returning a final -- value of this accumulator together with the new list. mapAccumL :: (acc -> x -> (acc, y)) -- Function of elt of input list -- and accumulator, returning new -- accumulator and elt of result list -> acc -- Initial accumulator -> [x] -- Input list -> (acc, [y]) -- Final accumulator and result list mapAccumL _ s [] = (s, []) mapAccumL f s (x:xs) = (s'',y:ys) where (s', y ) = f s x (s'',ys) = mapAccumL f s' xs -- | The 'mapAccumR' function behaves like a combination of 'map' and -- 'foldr'; it applies a function to each element of a list, passing -- an accumulating parameter from right to left, and returning a final -- value of this accumulator together with the new list. mapAccumR :: (acc -> x -> (acc, y)) -- Function of elt of input list -- and accumulator, returning new -- accumulator and elt of result list -> acc -- Initial accumulator -> [x] -- Input list -> (acc, [y]) -- Final accumulator and result list mapAccumR _ s [] = (s, []) mapAccumR f s (x:xs) = (s'', y:ys) where (s'',y ) = f s' x (s', ys) = mapAccumR f s xs -- | The 'insert' function takes an element and a list and inserts the -- element into the list at the first position where it is less -- than or equal to the next element. In particular, if the list -- is sorted before the call, the result will also be sorted. -- It is a special case of 'insertBy', which allows the programmer to -- supply their own comparison function. insert :: Ord a => a -> [a] -> [a] insert e ls = insertBy (compare) e ls -- | The non-overloaded version of 'insert'. insertBy :: (a -> a -> Ordering) -> a -> [a] -> [a] insertBy _ x [] = [x] insertBy cmp x ys@(y:ys') = case cmp x y of GT -> y : insertBy cmp x ys' _ -> x : ys -- | 'maximum' returns the maximum value from a list, -- which must be non-empty, finite, and of an ordered type. -- It is a special case of 'Data.List.maximumBy', which allows the -- programmer to supply their own comparison function. maximum :: (Ord a) => [a] -> a {-# INLINE [1] maximum #-} maximum [] = errorEmptyList "maximum" maximum xs = foldl1 max xs {-# RULES "maximumInt" maximum = (strictMaximum :: [Int] -> Int); "maximumInteger" maximum = (strictMaximum :: [Integer] -> Integer) #-} -- We can't make the overloaded version of maximum strict without -- changing its semantics (max might not be strict), but we can for -- the version specialised to 'Int'. strictMaximum :: (Ord a) => [a] -> a strictMaximum [] = errorEmptyList "maximum" strictMaximum xs = foldl1' max xs -- | 'minimum' returns the minimum value from a list, -- which must be non-empty, finite, and of an ordered type. -- It is a special case of 'Data.List.minimumBy', which allows the -- programmer to supply their own comparison function. minimum :: (Ord a) => [a] -> a {-# INLINE [1] minimum #-} minimum [] = errorEmptyList "minimum" minimum xs = foldl1 min xs {-# RULES "minimumInt" minimum = (strictMinimum :: [Int] -> Int); "minimumInteger" minimum = (strictMinimum :: [Integer] -> Integer) #-} strictMinimum :: (Ord a) => [a] -> a strictMinimum [] = errorEmptyList "minimum" strictMinimum xs = foldl1' min xs -- | The 'maximumBy' function takes a comparison function and a list -- and returns the greatest element of the list by the comparison function. -- The list must be finite and non-empty. maximumBy :: (a -> a -> Ordering) -> [a] -> a maximumBy _ [] = error "List.maximumBy: empty list" maximumBy cmp xs = foldl1 maxBy xs where maxBy x y = case cmp x y of GT -> x _ -> y -- | The 'minimumBy' function takes a comparison function and a list -- and returns the least element of the list by the comparison function. -- The list must be finite and non-empty. minimumBy :: (a -> a -> Ordering) -> [a] -> a minimumBy _ [] = error "List.minimumBy: empty list" minimumBy cmp xs = foldl1 minBy xs where minBy x y = case cmp x y of GT -> y _ -> x -- | The 'genericLength' function is an overloaded version of 'length'. In -- particular, instead of returning an 'Int', it returns any type which is -- an instance of 'Num'. It is, however, less efficient than 'length'. genericLength :: (Num i) => [a] -> i {-# NOINLINE [1] genericLength #-} genericLength [] = 0 genericLength (_:l) = 1 + genericLength l {-# RULES "genericLengthInt" genericLength = (strictGenericLength :: [a] -> Int); "genericLengthInteger" genericLength = (strictGenericLength :: [a] -> Integer); #-} strictGenericLength :: (Num i) => [b] -> i strictGenericLength l = gl l 0 where gl [] a = a gl (_:xs) a = let a' = a + 1 in a' `seq` gl xs a' -- | The 'genericTake' function is an overloaded version of 'take', which -- accepts any 'Integral' value as the number of elements to take. genericTake :: (Integral i) => i -> [a] -> [a] genericTake n _ | n <= 0 = [] genericTake _ [] = [] genericTake n (x:xs) = x : genericTake (n-1) xs -- | The 'genericDrop' function is an overloaded version of 'drop', which -- accepts any 'Integral' value as the number of elements to drop. genericDrop :: (Integral i) => i -> [a] -> [a] genericDrop n xs | n <= 0 = xs genericDrop _ [] = [] genericDrop n (_:xs) = genericDrop (n-1) xs -- | The 'genericSplitAt' function is an overloaded version of 'splitAt', which -- accepts any 'Integral' value as the position at which to split. genericSplitAt :: (Integral i) => i -> [a] -> ([a], [a]) genericSplitAt n xs | n <= 0 = ([],xs) genericSplitAt _ [] = ([],[]) genericSplitAt n (x:xs) = (x:xs',xs'') where (xs',xs'') = genericSplitAt (n-1) xs -- | The 'genericIndex' function is an overloaded version of '!!', which -- accepts any 'Integral' value as the index. genericIndex :: (Integral i) => [a] -> i -> a genericIndex (x:_) 0 = x genericIndex (_:xs) n | n > 0 = genericIndex xs (n-1) | otherwise = error "List.genericIndex: negative argument." genericIndex _ _ = error "List.genericIndex: index too large." -- | The 'genericReplicate' function is an overloaded version of 'replicate', -- which accepts any 'Integral' value as the number of repetitions to make. genericReplicate :: (Integral i) => i -> a -> [a] genericReplicate n x = genericTake n (repeat x) -- | The 'zip4' function takes four lists and returns a list of -- quadruples, analogous to 'zip'. zip4 :: [a] -> [b] -> [c] -> [d] -> [(a,b,c,d)] zip4 = zipWith4 (,,,) -- | The 'zip5' function takes five lists and returns a list of -- five-tuples, analogous to 'zip'. zip5 :: [a] -> [b] -> [c] -> [d] -> [e] -> [(a,b,c,d,e)] zip5 = zipWith5 (,,,,) -- | The 'zip6' function takes six lists and returns a list of six-tuples, -- analogous to 'zip'. zip6 :: [a] -> [b] -> [c] -> [d] -> [e] -> [f] -> [(a,b,c,d,e,f)] zip6 = zipWith6 (,,,,,) -- | The 'zip7' function takes seven lists and returns a list of -- seven-tuples, analogous to 'zip'. zip7 :: [a] -> [b] -> [c] -> [d] -> [e] -> [f] -> [g] -> [(a,b,c,d,e,f,g)] zip7 = zipWith7 (,,,,,,) -- | The 'zipWith4' function takes a function which combines four -- elements, as well as four lists and returns a list of their point-wise -- combination, analogous to 'zipWith'. zipWith4 :: (a->b->c->d->e) -> [a]->[b]->[c]->[d]->[e] zipWith4 z (a:as) (b:bs) (c:cs) (d:ds) = z a b c d : zipWith4 z as bs cs ds zipWith4 _ _ _ _ _ = [] -- | The 'zipWith5' function takes a function which combines five -- elements, as well as five lists and returns a list of their point-wise -- combination, analogous to 'zipWith'. zipWith5 :: (a->b->c->d->e->f) -> [a]->[b]->[c]->[d]->[e]->[f] zipWith5 z (a:as) (b:bs) (c:cs) (d:ds) (e:es) = z a b c d e : zipWith5 z as bs cs ds es zipWith5 _ _ _ _ _ _ = [] -- | The 'zipWith6' function takes a function which combines six -- elements, as well as six lists and returns a list of their point-wise -- combination, analogous to 'zipWith'. zipWith6 :: (a->b->c->d->e->f->g) -> [a]->[b]->[c]->[d]->[e]->[f]->[g] zipWith6 z (a:as) (b:bs) (c:cs) (d:ds) (e:es) (f:fs) = z a b c d e f : zipWith6 z as bs cs ds es fs zipWith6 _ _ _ _ _ _ _ = [] -- | The 'zipWith7' function takes a function which combines seven -- elements, as well as seven lists and returns a list of their point-wise -- combination, analogous to 'zipWith'. zipWith7 :: (a->b->c->d->e->f->g->h) -> [a]->[b]->[c]->[d]->[e]->[f]->[g]->[h] zipWith7 z (a:as) (b:bs) (c:cs) (d:ds) (e:es) (f:fs) (g:gs) = z a b c d e f g : zipWith7 z as bs cs ds es fs gs zipWith7 _ _ _ _ _ _ _ _ = [] -- | The 'unzip4' function takes a list of quadruples and returns four -- lists, analogous to 'unzip'. unzip4 :: [(a,b,c,d)] -> ([a],[b],[c],[d]) unzip4 = foldr (\(a,b,c,d) ~(as,bs,cs,ds) -> (a:as,b:bs,c:cs,d:ds)) ([],[],[],[]) -- | The 'unzip5' function takes a list of five-tuples and returns five -- lists, analogous to 'unzip'. unzip5 :: [(a,b,c,d,e)] -> ([a],[b],[c],[d],[e]) unzip5 = foldr (\(a,b,c,d,e) ~(as,bs,cs,ds,es) -> (a:as,b:bs,c:cs,d:ds,e:es)) ([],[],[],[],[]) -- | The 'unzip6' function takes a list of six-tuples and returns six -- lists, analogous to 'unzip'. unzip6 :: [(a,b,c,d,e,f)] -> ([a],[b],[c],[d],[e],[f]) unzip6 = foldr (\(a,b,c,d,e,f) ~(as,bs,cs,ds,es,fs) -> (a:as,b:bs,c:cs,d:ds,e:es,f:fs)) ([],[],[],[],[],[]) -- | The 'unzip7' function takes a list of seven-tuples and returns -- seven lists, analogous to 'unzip'. unzip7 :: [(a,b,c,d,e,f,g)] -> ([a],[b],[c],[d],[e],[f],[g]) unzip7 = foldr (\(a,b,c,d,e,f,g) ~(as,bs,cs,ds,es,fs,gs) -> (a:as,b:bs,c:cs,d:ds,e:es,f:fs,g:gs)) ([],[],[],[],[],[],[]) -- | The 'deleteFirstsBy' function takes a predicate and two lists and -- returns the first list with the first occurrence of each element of -- the second list removed. deleteFirstsBy :: (a -> a -> Bool) -> [a] -> [a] -> [a] deleteFirstsBy eq = foldl (flip (deleteBy eq)) -- | The 'group' function takes a list and returns a list of lists such -- that the concatenation of the result is equal to the argument. Moreover, -- each sublist in the result contains only equal elements. For example, -- -- > group "Mississippi" = ["M","i","ss","i","ss","i","pp","i"] -- -- It is a special case of 'groupBy', which allows the programmer to supply -- their own equality test. group :: Eq a => [a] -> [[a]] group = groupBy (==) -- | The 'groupBy' function is the non-overloaded version of 'group'. groupBy :: (a -> a -> Bool) -> [a] -> [[a]] groupBy _ [] = [] groupBy eq (x:xs) = (x:ys) : groupBy eq zs where (ys,zs) = span (eq x) xs -- | The 'inits' function returns all initial segments of the argument, -- shortest first. For example, -- -- > inits "abc" == ["","a","ab","abc"] -- -- Note that 'inits' has the following strictness property: -- @inits _|_ = [] : _|_@ inits :: [a] -> [[a]] inits xs = [] : case xs of [] -> [] x : xs' -> map (x :) (inits xs') -- | The 'tails' function returns all final segments of the argument, -- longest first. For example, -- -- > tails "abc" == ["abc", "bc", "c",""] -- -- Note that 'tails' has the following strictness property: -- @tails _|_ = _|_ : _|_@ tails :: [a] -> [[a]] tails xs = xs : case xs of [] -> [] _ : xs' -> tails xs' -- | The 'subsequences' function returns the list of all subsequences of the argument. -- -- > subsequences "abc" == ["","a","b","ab","c","ac","bc","abc"] subsequences :: [a] -> [[a]] subsequences xs = [] : nonEmptySubsequences xs -- | The 'nonEmptySubsequences' function returns the list of all subsequences of the argument, -- except for the empty list. -- -- > nonEmptySubsequences "abc" == ["a","b","ab","c","ac","bc","abc"] nonEmptySubsequences :: [a] -> [[a]] nonEmptySubsequences [] = [] nonEmptySubsequences (x:xs) = [x] : foldr f [] (nonEmptySubsequences xs) where f ys r = ys : (x : ys) : r -- | The 'permutations' function returns the list of all permutations of the argument. -- -- > permutations "abc" == ["abc","bac","cba","bca","cab","acb"] permutations :: [a] -> [[a]] permutations xs0 = xs0 : perms xs0 [] where perms [] _ = [] perms (t:ts) is = foldr interleave (perms ts (t:is)) (permutations is) where interleave xs r = let (_,zs) = interleave' id xs r in zs interleave' _ [] r = (ts, r) interleave' f (y:ys) r = let (us,zs) = interleave' (f . (y:)) ys r in (y:us, f (t:y:us) : zs) ------------------------------------------------------------------------------ -- Quick Sort algorithm taken from HBC's QSort library. -- | The 'sort' function implements a stable sorting algorithm. -- It is a special case of 'sortBy', which allows the programmer to supply -- their own comparison function. sort :: (Ord a) => [a] -> [a] -- | The 'sortBy' function is the non-overloaded version of 'sort'. sortBy :: (a -> a -> Ordering) -> [a] -> [a] #ifdef USE_REPORT_PRELUDE sort = sortBy compare sortBy cmp = foldr (insertBy cmp) [] #else {- GHC's mergesort replaced by a better implementation, 24/12/2009. This code originally contributed to the nhc12 compiler by Thomas Nordin in 2002. Rumoured to have been based on code by Lennart Augustsson, e.g. http://www.mail-archive.com/[email protected]/msg01822.html and possibly to bear similarities to a 1982 paper by Richard O'Keefe: "A smooth applicative merge sort". Benchmarks show it to be often 2x the speed of the previous implementation. Fixes ticket http://ghc.haskell.org/trac/ghc/ticket/2143 -} sort = sortBy compare sortBy cmp = mergeAll . sequences where sequences (a:b:xs) | a `cmp` b == GT = descending b [a] xs | otherwise = ascending b (a:) xs sequences xs = [xs] descending a as (b:bs) | a `cmp` b == GT = descending b (a:as) bs descending a as bs = (a:as): sequences bs ascending a as (b:bs) | a `cmp` b /= GT = ascending b (\ys -> as (a:ys)) bs ascending a as bs = as [a]: sequences bs mergeAll [x] = x mergeAll xs = mergeAll (mergePairs xs) mergePairs (a:b:xs) = merge a b: mergePairs xs mergePairs xs = xs merge as@(a:as') bs@(b:bs') | a `cmp` b == GT = b:merge as bs' | otherwise = a:merge as' bs merge [] bs = bs merge as [] = as {- sortBy cmp l = mergesort cmp l sort l = mergesort compare l Quicksort replaced by mergesort, 14/5/2002. From: Ian Lynagh <[email protected]> I am curious as to why the List.sort implementation in GHC is a quicksort algorithm rather than an algorithm that guarantees n log n time in the worst case? I have attached a mergesort implementation along with a few scripts to time it's performance, the results of which are shown below (* means it didn't finish successfully - in all cases this was due to a stack overflow). If I heap profile the random_list case with only 10000 then I see random_list peaks at using about 2.5M of memory, whereas in the same program using List.sort it uses only 100k. Input style Input length Sort data Sort alg User time stdin 10000 random_list sort 2.82 stdin 10000 random_list mergesort 2.96 stdin 10000 sorted sort 31.37 stdin 10000 sorted mergesort 1.90 stdin 10000 revsorted sort 31.21 stdin 10000 revsorted mergesort 1.88 stdin 100000 random_list sort * stdin 100000 random_list mergesort * stdin 100000 sorted sort * stdin 100000 sorted mergesort * stdin 100000 revsorted sort * stdin 100000 revsorted mergesort * func 10000 random_list sort 0.31 func 10000 random_list mergesort 0.91 func 10000 sorted sort 19.09 func 10000 sorted mergesort 0.15 func 10000 revsorted sort 19.17 func 10000 revsorted mergesort 0.16 func 100000 random_list sort 3.85 func 100000 random_list mergesort * func 100000 sorted sort 5831.47 func 100000 sorted mergesort 2.23 func 100000 revsorted sort 5872.34 func 100000 revsorted mergesort 2.24 mergesort :: (a -> a -> Ordering) -> [a] -> [a] mergesort cmp = mergesort' cmp . map wrap mergesort' :: (a -> a -> Ordering) -> [[a]] -> [a] mergesort' _ [] = [] mergesort' _ [xs] = xs mergesort' cmp xss = mergesort' cmp (merge_pairs cmp xss) merge_pairs :: (a -> a -> Ordering) -> [[a]] -> [[a]] merge_pairs _ [] = [] merge_pairs _ [xs] = [xs] merge_pairs cmp (xs:ys:xss) = merge cmp xs ys : merge_pairs cmp xss merge :: (a -> a -> Ordering) -> [a] -> [a] -> [a] merge _ [] ys = ys merge _ xs [] = xs merge cmp (x:xs) (y:ys) = case x `cmp` y of GT -> y : merge cmp (x:xs) ys _ -> x : merge cmp xs (y:ys) wrap :: a -> [a] wrap x = [x] OLDER: qsort version -- qsort is stable and does not concatenate. qsort :: (a -> a -> Ordering) -> [a] -> [a] -> [a] qsort _ [] r = r qsort _ [x] r = x:r qsort cmp (x:xs) r = qpart cmp x xs [] [] r -- qpart partitions and sorts the sublists qpart :: (a -> a -> Ordering) -> a -> [a] -> [a] -> [a] -> [a] -> [a] qpart cmp x [] rlt rge r = -- rlt and rge are in reverse order and must be sorted with an -- anti-stable sorting rqsort cmp rlt (x:rqsort cmp rge r) qpart cmp x (y:ys) rlt rge r = case cmp x y of GT -> qpart cmp x ys (y:rlt) rge r _ -> qpart cmp x ys rlt (y:rge) r -- rqsort is as qsort but anti-stable, i.e. reverses equal elements rqsort :: (a -> a -> Ordering) -> [a] -> [a] -> [a] rqsort _ [] r = r rqsort _ [x] r = x:r rqsort cmp (x:xs) r = rqpart cmp x xs [] [] r rqpart :: (a -> a -> Ordering) -> a -> [a] -> [a] -> [a] -> [a] -> [a] rqpart cmp x [] rle rgt r = qsort cmp rle (x:qsort cmp rgt r) rqpart cmp x (y:ys) rle rgt r = case cmp y x of GT -> rqpart cmp x ys rle (y:rgt) r _ -> rqpart cmp x ys (y:rle) rgt r -} #endif /* USE_REPORT_PRELUDE */ -- | Sort a list by comparing the results of a key function applied to each -- element. @sortOn f@ is equivalent to @sortBy . comparing f@, but has the -- performance advantage of only evaluating @f@ once for each element in the -- input list. This is called the decorate-sort-undecorate paradigm, or -- Schwartzian transform. -- -- /Since: 4.8.0.0/ sortOn :: Ord b => (a -> b) -> [a] -> [a] sortOn f = map snd . sortBy (comparing fst) . map (\x -> let y = f x in y `seq` (y, x)) -- | The 'unfoldr' function is a \`dual\' to 'foldr': while 'foldr' -- reduces a list to a summary value, 'unfoldr' builds a list from -- a seed value. The function takes the element and returns 'Nothing' -- if it is done producing the list or returns 'Just' @(a,b)@, in which -- case, @a@ is a prepended to the list and @b@ is used as the next -- element in a recursive call. For example, -- -- > iterate f == unfoldr (\x -> Just (x, f x)) -- -- In some cases, 'unfoldr' can undo a 'foldr' operation: -- -- > unfoldr f' (foldr f z xs) == xs -- -- if the following holds: -- -- > f' (f x y) = Just (x,y) -- > f' z = Nothing -- -- A simple use of unfoldr: -- -- > unfoldr (\b -> if b == 0 then Nothing else Just (b, b-1)) 10 -- > [10,9,8,7,6,5,4,3,2,1] -- -- Note [INLINE unfoldr] -- We treat unfoldr a little differently from some other forms for list fusion -- for two reasons: -- -- 1. We don't want to use a rule to rewrite a basic form to a fusible -- form because this would inline before constant floating. As Simon Peyton- -- Jones and others have pointed out, this could reduce sharing in some cases -- where sharing is beneficial. Thus we simply INLINE it, which is, for -- example, how enumFromTo::Int becomes eftInt. Unfortunately, we don't seem -- to get enough of an inlining discount to get a version of eftInt based on -- unfoldr to inline as readily as the usual one. We know that all the Maybe -- nonsense will go away, but the compiler does not. -- -- 2. The benefit of inlining unfoldr is likely to be huge in many common cases, -- even apart from list fusion. In particular, inlining unfoldr often -- allows GHC to erase all the Maybes. This appears to be critical if unfoldr -- is to be used in high-performance code. A small increase in code size -- in the relatively rare cases when this does not happen looks like a very -- small price to pay. -- -- Doing a back-and-forth dance doesn't seem to accomplish anything if the -- final form has to be inlined in any case. unfoldr :: (b -> Maybe (a, b)) -> b -> [a] {-# INLINE unfoldr #-} -- See Note [INLINE unfoldr] unfoldr f b0 = build (\c n -> let go b = case f b of Just (a, new_b) -> a `c` go new_b Nothing -> n in go b0) -- ----------------------------------------------------------------------------- -- | A strict version of 'foldl'. foldl' :: forall a b . (b -> a -> b) -> b -> [a] -> b foldl' k z0 xs = foldr (\(v::a) (fn::b->b) (z::b) -> z `seq` fn (k z v)) (id :: b -> b) xs z0 -- Implementing foldl' via foldr is only a good idea if the compiler can optimize -- the resulting code (eta-expand the recursive "go"), so this needs -fcall-arity! -- Also see #7994 -- | 'foldl1' is a variant of 'foldl' that has no starting value argument, -- and thus must be applied to non-empty lists. foldl1 :: (a -> a -> a) -> [a] -> a foldl1 f (x:xs) = foldl f x xs foldl1 _ [] = errorEmptyList "foldl1" -- | A strict version of 'foldl1' foldl1' :: (a -> a -> a) -> [a] -> a foldl1' f (x:xs) = foldl' f x xs foldl1' _ [] = errorEmptyList "foldl1'" -- ----------------------------------------------------------------------------- -- List sum and product -- | The 'sum' function computes the sum of a finite list of numbers. sum :: (Num a) => [a] -> a -- | The 'product' function computes the product of a finite list of numbers. product :: (Num a) => [a] -> a {-# INLINE sum #-} sum = foldl (+) 0 {-# INLINE product #-} product = foldl (*) 1 -- ----------------------------------------------------------------------------- -- Functions on strings -- | 'lines' breaks a string up into a list of strings at newline -- characters. The resulting strings do not contain newlines. lines :: String -> [String] lines "" = [] -- Somehow GHC doesn't detect the selector thunks in the below code, -- so s' keeps a reference to the first line via the pair and we have -- a space leak (cf. #4334). -- So we need to make GHC see the selector thunks with a trick. lines s = cons (case break (== '\n') s of (l, s') -> (l, case s' of [] -> [] _:s'' -> lines s'')) where cons ~(h, t) = h : t -- | 'unlines' is an inverse operation to 'lines'. -- It joins lines, after appending a terminating newline to each. unlines :: [String] -> String #ifdef USE_REPORT_PRELUDE unlines = concatMap (++ "\n") #else -- HBC version (stolen) -- here's a more efficient version unlines [] = [] unlines (l:ls) = l ++ '\n' : unlines ls #endif -- | 'words' breaks a string up into a list of words, which were delimited -- by white space. words :: String -> [String] words s = case dropWhile {-partain:Char.-}isSpace s of "" -> [] s' -> w : words s'' where (w, s'') = break {-partain:Char.-}isSpace s' -- | 'unwords' is an inverse operation to 'words'. -- It joins words with separating spaces. unwords :: [String] -> String #ifdef USE_REPORT_PRELUDE unwords [] = "" unwords ws = foldr1 (\w s -> w ++ ' ':s) ws #else -- HBC version (stolen) -- here's a more efficient version unwords [] = "" unwords [w] = w unwords (w:ws) = w ++ ' ' : unwords ws #endif
spacekitteh/smcghc
libraries/base/Data/OldList.hs
bsd-3-clause
40,237
0
16
12,088
7,229
4,173
3,056
426
8
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE DeriveDataTypeable #-} module Site where import qualified Data.Text as T import Data.Typeable import Data.Data import Data.ConfigFile import Data.Either.Utils import Config data Site = Site { title :: T.Text , style :: T.Text , author :: T.Text , tagline :: T.Text , url :: T.Text } deriving (Data, Typeable, Show) defaultSite = Site { title = "Default Blog" , style = "/style.css" , author = "Your Name" , tagline = "Description of this blog" , url = "http://example.com" } readSite :: IO Site readSite = do t <- getSetting "site" "title" s <- getSetting "site" "style" a <- getSetting "site" "author" g <- getSetting "site" "tagline" u <- getSetting "site" "url" return $ Site { title = T.pack t , style = T.pack s , author = T.pack a , tagline = T.pack g , url = T.pack u }
kaashif/muon
src/Site.hs
bsd-3-clause
1,123
0
11
443
276
154
122
32
1
module Graphics.UI.SDL.Audio ( -- * Audio Device Management, Playing and Recording audioInit, audioQuit, buildAudioCVT, closeAudio, closeAudioDevice, convertAudio, freeWAV, getAudioDeviceName, getAudioDeviceStatus, getAudioDriver, getAudioStatus, getCurrentAudioDriver, getNumAudioDevices, getNumAudioDrivers, loadWAV, loadWAV_RW, lockAudio, lockAudioDevice, mixAudio, mixAudioFormat, openAudio, openAudioDevice, pauseAudio, pauseAudioDevice, unlockAudio, unlockAudioDevice ) where import Data.Word import Foreign.C.String import Foreign.C.Types import Foreign.Ptr import Graphics.UI.SDL.Enum import Graphics.UI.SDL.Filesystem import Graphics.UI.SDL.Types foreign import ccall "SDL.h SDL_AudioInit" audioInit :: CString -> IO CInt foreign import ccall "SDL.h SDL_AudioQuit" audioQuit :: IO () foreign import ccall "SDL.h SDL_BuildAudioCVT" buildAudioCVT :: Ptr AudioCVT -> AudioFormat -> Word8 -> CInt -> AudioFormat -> Word8 -> CInt -> IO CInt foreign import ccall "SDL.h SDL_CloseAudio" closeAudio :: IO () foreign import ccall "SDL.h SDL_CloseAudioDevice" closeAudioDevice :: AudioDeviceID -> IO () foreign import ccall "SDL.h SDL_ConvertAudio" convertAudio :: Ptr AudioCVT -> IO CInt foreign import ccall "SDL.h SDL_FreeWAV" freeWAV :: Ptr Word8 -> IO () foreign import ccall "SDL.h SDL_GetAudioDeviceName" getAudioDeviceName :: CInt -> CInt -> IO CString foreign import ccall "SDL.h SDL_GetAudioDeviceStatus" getAudioDeviceStatus :: AudioDeviceID -> IO AudioStatus foreign import ccall "SDL.h SDL_GetAudioDriver" getAudioDriver :: CInt -> IO CString foreign import ccall "SDL.h SDL_GetAudioStatus" getAudioStatus :: IO AudioStatus foreign import ccall "SDL.h SDL_GetCurrentAudioDriver" getCurrentAudioDriver :: IO CString foreign import ccall "SDL.h SDL_GetNumAudioDevices" getNumAudioDevices :: CInt -> IO CInt foreign import ccall "SDL.h SDL_GetNumAudioDrivers" getNumAudioDrivers :: IO CInt foreign import ccall "SDL.h SDL_LoadWAV_RW" loadWAV_RW :: Ptr RWops -> CInt -> Ptr AudioSpec -> Ptr (Ptr Word8) -> Ptr Word32 -> IO (Ptr AudioSpec) foreign import ccall "SDL.h SDL_LockAudio" lockAudio :: IO () foreign import ccall "SDL.h SDL_LockAudioDevice" lockAudioDevice :: AudioDeviceID -> IO () foreign import ccall "SDL.h SDL_MixAudio" mixAudio :: Ptr Word8 -> Ptr Word8 -> Word32 -> CInt -> IO () foreign import ccall "SDL.h SDL_MixAudioFormat" mixAudioFormat :: Ptr Word8 -> Ptr Word8 -> AudioFormat -> Word32 -> CInt -> IO () foreign import ccall "SDL.h SDL_OpenAudio" openAudio :: Ptr AudioSpec -> Ptr AudioSpec -> IO CInt foreign import ccall "SDL.h SDL_OpenAudioDevice" openAudioDevice :: CString -> CInt -> Ptr AudioSpec -> Ptr AudioSpec -> CInt -> IO AudioDeviceID foreign import ccall "SDL.h SDL_PauseAudio" pauseAudio :: CInt -> IO () foreign import ccall "SDL.h SDL_PauseAudioDevice" pauseAudioDevice :: AudioDeviceID -> CInt -> IO () foreign import ccall "SDL.h SDL_UnlockAudio" unlockAudio :: IO () foreign import ccall "SDL.h SDL_UnlockAudioDevice" unlockAudioDevice :: AudioDeviceID -> IO () loadWAV :: CString -> Ptr AudioSpec -> Ptr (Ptr Word8) -> Ptr Word32 -> IO (Ptr AudioSpec) loadWAV file spec audio_buf audio_len = do rw <- withCString "rb" $ rwFromFile file loadWAV_RW rw 1 spec audio_buf audio_len
ekmett/sdl2
Graphics/UI/SDL/Audio.hs
bsd-3-clause
3,262
52
13
449
898
473
425
63
1
-- | Diff- and merge-related things. module Guide.Diff ( -- * Diffing Diff(..), DiffChunk(..), diff, -- * Merging merge, -- * Tokenizing tokenize, ) where import Imports import Guide.Diff.Merge (merge) import Guide.Diff.Tokenize (tokenize) import Guide.Utils (makeClassWithLenses) import qualified Data.Patch as PV -- | Result of a diff. data Diff = Diff { diffContextAbove :: [Text], -- ^ Context (unchanged parts) -- above the differing part diffContextBelow :: [Text], -- ^ Context below the differing part diffLeft :: [DiffChunk], -- ^ Will contain only 'Deleted' and 'Plain' diffRight :: [DiffChunk] -- ^ Will contain only 'Added' and 'Plain' } deriving (Show) data DiffChunk = Deleted Text -- ^ Something was deleted (from the left side) | Added Text -- ^ Something was added (to the right side) | Plain Text -- ^ This part should be rendered as not modified deriving (Eq, Show) makeClassWithLenses ''Diff diff :: Text -- ^ Original text -> Text -- ^ Edited text -> Diff diff (tokenize -> orig) (tokenize -> edit) = trimDiff (diffL (PV.hunks diffBA (toVector edit'))) (diffR (PV.hunks diffAB (toVector orig'))) & _diffContextAbove %~ (prefix <>) & _diffContextBelow %~ (<> suffix) where -- we find common parts in advance because diffs are O(mn) and removing -- big unchanged parts in advance helps us (prefix, (orig', edit'), suffix) = commonParts orig edit -- then we compute orig→edit and edit→orig diffs diffAB = PV.diff (toVector orig') (toVector edit') diffBA = PV.inverse diffAB -- | Create a diff for the right (edited) part. We only want to highlight -- parts which were inserted or replaced. diffR :: PV.Hunks Text -> [DiffChunk] diffR = removeExtraAdded . concatMap hunkToChunk where hunkToChunk (v, PV.Inserted) = [Added (mconcat (toList v))] hunkToChunk (v, PV.Replaced) = [Added (mconcat (toList v))] hunkToChunk (v, PV.Unchanged) = map Plain (toList v) -- it's useful to report deleted things as well because then we can mark -- them with tiny rectangles like “insert here” hunkToChunk (_, PV.Deleted) = [Added ""] -- however, we don't need them if there's already an addition marked there removeExtraAdded (Added "" : Added x : xs) = removeExtraAdded (Added x : xs) removeExtraAdded (Added x : Added "" : xs) = removeExtraAdded (Added x : xs) removeExtraAdded (x : xs) = x : removeExtraAdded xs removeExtraAdded [] = [] -- | Create a diff for the left (original) part. We only want to highlight -- parts which were deleted or replaced. -- -- This function should receive a diff that goes in reverse (i.e. from edited -- text to original text) diffL :: PV.Hunks Text -> [DiffChunk] diffL = removeExtraDeleted . concatMap hunkToChunk where -- Since the diff is edit→orig, this code might make not much sense at -- first. When something was “inserted” to original text when going -- edit→orig, it actually means that it was deleted from the original -- text when going orig→edit, and thus we want to render it as deleted. hunkToChunk (v, PV.Inserted) = [Deleted (mconcat (toList v))] hunkToChunk (v, PV.Replaced) = [Deleted (mconcat (toList v))] hunkToChunk (v, PV.Unchanged) = map Plain (toList v) hunkToChunk (_, PV.Deleted) = [Deleted ""] removeExtraDeleted (Deleted "" : Deleted x : xs) = removeExtraDeleted (Deleted x : xs) removeExtraDeleted (Deleted x : Deleted "" : xs) = removeExtraDeleted (Deleted x : xs) removeExtraDeleted (x : xs) = x : removeExtraDeleted xs removeExtraDeleted [] = [] -- | In a bunch of chunks, find only the part that was changed trimDiff :: [DiffChunk] -- ^ The diff after 'diffL' -> [DiffChunk] -- ^ The diff after 'diffR' -> Diff trimDiff a b = Diff { diffContextAbove = map getPlain prefix, diffContextBelow = map getPlain suffix, diffLeft = a', diffRight = b' } where (prefix, (a', b'), suffix) = commonParts a b -- since chunks in 'a' contain Deleted and Plain, and chunks in 'b' -- contain Added and Plain, the only equal parts will be Plain getPlain (Plain x) = x getPlain x = error ("trimDiff: impossible: " ++ show x) ---------------------------------------------------------------------------- -- Utils ---------------------------------------------------------------------------- -- | Find longest common prefix commonPrefix :: Eq a => [a] -> [a] -> ([a], ([a], [a])) commonPrefix = go [] where go p [] bs = (reverse p, ([], bs)) go p as [] = (reverse p, (as, [])) go p (a:as) (b:bs) | a == b = go (a:p) as bs | otherwise = (reverse p, (a:as, b:bs)) -- | Find longest common suffix commonSuffix :: Eq a => [a] -> [a] -> (([a], [a]), [a]) commonSuffix a b = ((reverse neqA, reverse neqB), reverse eq) where (eq, (neqA, neqB)) = commonPrefix (reverse a) (reverse b) -- | Find longest common prefix and suffix commonParts :: Eq a => [a] -> [a] -> ([a], ([a], [a]), [a]) commonParts a b = (prefix, (a'', b''), suffix) where (prefix, (a', b')) = commonPrefix a b ((a'', b''), suffix) = commonSuffix a' b'
aelve/guide
back/src/Guide/Diff.hs
bsd-3-clause
5,333
0
15
1,260
1,468
817
651
-1
-1
module Three where import Data.List (nub) import Data.List.Split (chunksOf) type Coord = (Int, Int) numDistinctLocations :: [Coord] -> Int numDistinctLocations = length . nub locationsVisited :: String -> [Coord] locationsVisited = scanl followArrow (0,0) followArrow :: Coord -> Char -> Coord followArrow (x, y) arrow = newLocation where newLocation = case arrow of '^' -> (x, y - 1) 'v' -> (x, y + 1) '>' -> (x + 1, y) '<' -> (x - 1, y) _ -> error "bad arrow" locationsVisitedPart2 :: String -> [Coord] locationsVisitedPart2 arrows = locationsVisited santa ++ locationsVisited robot where successiveArrows = chunksOf 2 arrows santa = map (!! 1) successiveArrows robot = map head successiveArrows three :: IO (Int, Int) three = do text <- readFile "input/3.txt" return ( numDistinctLocations $ locationsVisited text , numDistinctLocations $ locationsVisitedPart2 text )
purcell/adventofcodeteam
app/Three.hs
bsd-3-clause
996
0
11
261
320
175
145
27
5
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE ExplicitForAll #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE ScopedTypeVariables #-} module API.Bitbucket where import ClassyPrelude import Control.Exception (SomeException) import Control.Lens import Data.Aeson import Data.Aeson.Types import qualified Data.ByteString.Char8 as BS import Data.Maybe (fromMaybe) import Data.Time import Network.HTTP.Types.Status (ok200) import Network.Wreq import Util bitbucketBaseUrl :: String bitbucketBaseUrl = "https://api.bitbucket.org/" data SimpleLink = SimpleLink { href :: Text } deriving (Eq, Ord, Show, Generic) instance ToJSON SimpleLink where toJSON = genericToJSON jsonOptions instance FromJSON SimpleLink where parseJSON = genericParseJSON jsonOptions data Issue = Issue { id :: Integer , title :: Text , reporter :: User , assignee :: Maybe User , content :: MarkupContent , createdOn :: ZonedTime , updatedOn :: ZonedTime , state :: Text , kind :: Text , priority :: Text , version :: Maybe Text , component :: Maybe Component , milestone :: Maybe Text , watches :: Integer , votes :: Integer , repository :: Repository , links :: IssueLinks , type_ :: Text } deriving (Show, Generic) instance ToJSON Issue where toJSON = genericToJSON jsonOptions instance FromJSON Issue where parseJSON = genericParseJSON jsonOptions data Component = Component { cname :: Text , clinks :: SelfOnlyLink } deriving (Eq, Ord, Show, Generic) instance ToJSON Component where toJSON = genericToJSON $ withDropPrefix "c" jsonOptions instance FromJSON Component where parseJSON = genericParseJSON $ withDropPrefix "c" jsonOptions data IssueLinks = IssueLinks { self :: SimpleLink , comments :: SimpleLink , watch :: SimpleLink , attachments :: SimpleLink , html :: SimpleLink } deriving (Eq, Ord, Show, Generic) instance ToJSON IssueLinks where toJSON = genericToJSON jsonOptions instance FromJSON IssueLinks where parseJSON = genericParseJSON jsonOptions data Repository = Repository { name :: Text , fullName :: Text } deriving (Eq, Ord, Show, Generic) instance ToJSON Repository where toJSON = genericToJSON jsonOptions instance FromJSON Repository where parseJSON = genericParseJSON jsonOptions data MarkupContent = MarkupContent { icraw :: Text , icmarkup :: Text , ichtml :: Text } deriving (Eq, Ord, Show, Generic) instance ToJSON MarkupContent where toJSON = genericToJSON $ withDropPrefix "ic" jsonOptions instance FromJSON MarkupContent where parseJSON = genericParseJSON $ withDropPrefix "ic" jsonOptions data User = User { username :: Text , displayName :: Text } deriving (Eq, Ord, Show, Generic) instance ToJSON User where toJSON = genericToJSON jsonOptions instance FromJSON User where parseJSON = genericParseJSON jsonOptions data PagedRequest a = PagedRequest { values :: [a] , size :: Integer , page :: Integer , pagelen :: Integer , next :: Maybe Text } deriving (Show, Generic) instance ToJSON a => ToJSON (PagedRequest a) where toJSON = genericToJSON jsonOptions instance FromJSON a => FromJSON (PagedRequest a) where parseJSON = genericParseJSON jsonOptions data CommentShortInfo = CommentShortInfo { commInfoId :: Integer , commInfoLinks :: SelfOnlyLink } deriving (Eq, Ord, Show, Generic) instance ToJSON CommentShortInfo where toJSON = genericToJSON $ withDropPrefix "commeInfo" jsonOptions instance FromJSON CommentShortInfo where parseJSON = genericParseJSON $ withDropPrefix "commInfo" jsonOptions data Comment = Comment { commentLinks :: CommentLinks , commentContent :: MarkupContent , commentCreatedOn :: ZonedTime , commentUser :: User , commentUpdatedOn :: Maybe ZonedTime , commentIssue :: IssueShortInfo } deriving (Show, Generic) instance ToJSON Comment where toJSON = genericToJSON $ withDropPrefix "comment" jsonOptions instance FromJSON Comment where parseJSON = genericParseJSON $ withDropPrefix "comment" jsonOptions data CommentLinks = CommentLinks { commLinkSelf :: SimpleLink , commLinkHtml :: SimpleLink } deriving (Eq, Ord, Show, Generic) instance ToJSON CommentLinks where toJSON = genericToJSON $ withDropPrefix "commLink" jsonOptions instance FromJSON CommentLinks where parseJSON = genericParseJSON $ withDropPrefix "commLink" jsonOptions data IssueShortInfo = IssueShortInfo { isiLinks :: SelfOnlyLink , isiTitle :: Text , isiId :: Integer , isiRepository :: RepositoryShortInfo } deriving (Show, Generic) instance ToJSON IssueShortInfo where toJSON = genericToJSON $ withDropPrefix "isi" jsonOptions instance FromJSON IssueShortInfo where parseJSON = genericParseJSON $ withDropPrefix "isi" jsonOptions data SelfOnlyLink = SelfOnlyLink { solSelf :: SimpleLink } deriving (Eq, Ord, Show, Generic) instance ToJSON SelfOnlyLink where toJSON = genericToJSON $ withDropPrefix "sol" jsonOptions instance FromJSON SelfOnlyLink where parseJSON = genericParseJSON $ withDropPrefix "sol" jsonOptions data RepositoryShortInfo = RepositoryShortInfo { rsiLinks :: RepositoryShortInfoLinks , rsiType :: Text , rsiName :: Text , rsiFullName :: Text , rsiUUID :: Text } deriving (Eq, Ord, Show, Generic) instance ToJSON RepositoryShortInfo where toJSON = genericToJSON $ withDropPrefix "rsi" jsonOptions instance FromJSON RepositoryShortInfo where parseJSON = genericParseJSON $ withDropPrefix "rsi" jsonOptions data RepositoryShortInfoLinks = RepositoryShortInfoLinks { rsilSelf :: SimpleLink , rsilHtml :: SimpleLink , rsilAvatar :: SimpleLink } deriving (Eq, Ord, Show, Generic) instance ToJSON RepositoryShortInfoLinks where toJSON = genericToJSON $ withDropPrefix "rsil" jsonOptions instance FromJSON RepositoryShortInfoLinks where parseJSON = genericParseJSON $ withDropPrefix "rsil" jsonOptions type ReqUrl a = TypeTag a String type APIVersion a = TypeTag a String class FromJSON result => BitbucketRequestable reqData result where getUrl :: reqData -> ReqUrl result apiVersion :: reqData -> APIVersion result instance BitbucketRequestable Repository Issue where getUrl Repository {fullName = n} = toTag $ unpack n <> "/issues" apiVersion _ = toTag "2.0" bitbucketGETPaged :: forall reqData result. BitbucketRequestable reqData result => reqData -> IO (Either String (PagedRequest result)) bitbucketGETPaged reqData = flip fmap (try $ get url) $ \case Left err -> Left $ show (err :: SomeException) Right r -> if r ^. responseStatus == ok200 then eitherDecode $ r ^. responseBody else Left $ BS.unpack $ r ^. responseStatus . statusMessage where url = bitbucketBaseUrl <> unTTag (apiVersion reqData :: APIVersion result) <> "/" <> unTTag (getUrl reqData :: ReqUrl result)
JustusAdam/bitbucket-github-migrate
src/API/Bitbucket.hs
bsd-3-clause
7,461
0
14
1,826
1,791
984
807
-1
-1
{-| The main box module. -} module Acme.Box ( box ) where -- | The main box function, actually empty! box :: IO () box = do return () -- inspected by #42
drwebb/acme-box
src/Acme/Box.hs
bsd-3-clause
167
0
8
46
37
21
16
5
1
{-# LANGUAGE CPP, RecordWildCards, NamedFieldPuns, RankNTypes #-} -- | Planning how to build everything in a project. -- module Distribution.Client.ProjectPlanning ( -- * elaborated install plan types ElaboratedInstallPlan, ElaboratedConfiguredPackage(..), ElaboratedPlanPackage, ElaboratedSharedConfig(..), ElaboratedReadyPackage, BuildStyle(..), CabalFileText, --TODO: [code cleanup] these types should live with execution, not with -- plan definition. Need to better separate InstallPlan definition. GenericBuildResult(..), BuildResult, BuildSuccess(..), BuildFailure(..), DocsResult(..), TestsResult(..), -- * Producing the elaborated install plan rebuildInstallPlan, -- * Build targets PackageTarget(..), ComponentTarget(..), SubComponentTarget(..), showComponentTarget, -- * Selecting a plan subset pruneInstallPlanToTargets, -- * Utils required for building pkgHasEphemeralBuildTargets, pkgBuildTargetWholeComponents, -- * Setup.hs CLI flags for building setupHsScriptOptions, setupHsConfigureFlags, setupHsBuildFlags, setupHsBuildArgs, setupHsReplFlags, setupHsReplArgs, setupHsCopyFlags, setupHsRegisterFlags, setupHsHaddockFlags, packageHashInputs, -- TODO: [code cleanup] utils that should live in some shared place? createPackageDBIfMissing ) where import Distribution.Client.ProjectPlanning.Types import Distribution.Client.PackageHash import Distribution.Client.RebuildMonad import Distribution.Client.ProjectConfig import Distribution.Client.ProjectPlanOutput import Distribution.Client.Types hiding ( BuildResult, BuildSuccess(..), BuildFailure(..) , DocsResult(..), TestsResult(..) ) import qualified Distribution.Client.InstallPlan as InstallPlan import Distribution.Client.Dependency import Distribution.Client.Dependency.Types import qualified Distribution.Client.IndexUtils as IndexUtils import Distribution.Client.Targets (userToPackageConstraint) import Distribution.Client.DistDirLayout import Distribution.Client.SetupWrapper import Distribution.Client.JobControl import Distribution.Client.FetchUtils import qualified Hackage.Security.Client as Sec import Distribution.Client.Setup hiding (packageName, cabalVersion) import Distribution.Utils.NubList import qualified Distribution.Solver.Types.ComponentDeps as CD import Distribution.Solver.Types.ComponentDeps (ComponentDeps) import Distribution.Solver.Types.ConstraintSource import Distribution.Solver.Types.LabeledPackageConstraint import Distribution.Solver.Types.OptionalStanza import Distribution.Solver.Types.PackageFixedDeps import Distribution.Solver.Types.PkgConfigDb import Distribution.Solver.Types.Settings import Distribution.Solver.Types.SolverId import Distribution.Solver.Types.SolverPackage import Distribution.Solver.Types.SourcePackage import Distribution.Package hiding (InstalledPackageId, installedPackageId) import Distribution.System import qualified Distribution.PackageDescription as Cabal import qualified Distribution.PackageDescription as PD import qualified Distribution.PackageDescription.Configuration as PD import Distribution.Simple.PackageIndex (InstalledPackageIndex) import qualified Distribution.Simple.PackageIndex as PackageIndex import Distribution.Simple.Compiler hiding (Flag) import qualified Distribution.Simple.GHC as GHC --TODO: [code cleanup] eliminate import qualified Distribution.Simple.GHCJS as GHCJS --TODO: [code cleanup] eliminate import Distribution.Simple.Program import Distribution.Simple.Program.Db import Distribution.Simple.Program.Find import qualified Distribution.Simple.Setup as Cabal import Distribution.Simple.Setup (Flag, toFlag, flagToMaybe, flagToList, fromFlagOrDefault) import qualified Distribution.Simple.Configure as Cabal import qualified Distribution.Simple.LocalBuildInfo as Cabal import Distribution.Simple.LocalBuildInfo (ComponentName(..)) import qualified Distribution.Simple.Register as Cabal import qualified Distribution.Simple.InstallDirs as InstallDirs import qualified Distribution.Simple.BuildTarget as Cabal import Distribution.Simple.Utils hiding (matchFileGlob) import Distribution.Version import Distribution.Verbosity import Distribution.Text import Data.Map (Map) import qualified Data.Map as Map import Data.Set (Set) import qualified Data.Set as Set import qualified Data.Graph as Graph import qualified Data.Tree as Tree #if !MIN_VERSION_base(4,8,0) import Control.Applicative #endif import Control.Monad import Control.Monad.State as State import Control.Exception import Data.List import Data.Maybe import Data.Either import Data.Monoid import Data.Function import System.FilePath import System.Directory (doesDirectoryExist) ------------------------------------------------------------------------------ -- * Elaborated install plan ------------------------------------------------------------------------------ -- "Elaborated" -- worked out with great care and nicety of detail; -- executed with great minuteness: elaborate preparations; -- elaborate care. -- -- So here's the idea: -- -- Rather than a miscellaneous collection of 'ConfigFlags', 'InstallFlags' etc -- all passed in as separate args and which are then further selected, -- transformed etc during the execution of the build. Instead we construct -- an elaborated install plan that includes everything we will need, and then -- during the execution of the plan we do as little transformation of this -- info as possible. -- -- So we're trying to split the work into two phases: construction of the -- elaborated install plan (which as far as possible should be pure) and -- then simple execution of that plan without any smarts, just doing what the -- plan says to do. -- -- So that means we need a representation of this fully elaborated install -- plan. The representation consists of two parts: -- -- * A 'ElaboratedInstallPlan'. This is a 'GenericInstallPlan' with a -- representation of source packages that includes a lot more detail about -- that package's individual configuration -- -- * A 'ElaboratedSharedConfig'. Some package configuration is the same for -- every package in a plan. Rather than duplicate that info every entry in -- the 'GenericInstallPlan' we keep that separately. -- -- The division between the shared and per-package config is /not set in stone -- for all time/. For example if we wanted to generalise the install plan to -- describe a situation where we want to build some packages with GHC and some -- with GHCJS then the platform and compiler would no longer be shared between -- all packages but would have to be per-package (probably with some sanity -- condition on the graph structure). -- -- Refer to ProjectPlanning.Types for details of these important types: -- type ElaboratedInstallPlan = ... -- type ElaboratedPlanPackage = ... -- data ElaboratedSharedConfig = ... -- data ElaboratedConfiguredPackage = ... -- data BuildStyle = -- | Check that an 'ElaboratedConfiguredPackage' actually makes -- sense under some 'ElaboratedSharedConfig'. sanityCheckElaboratedConfiguredPackage :: ElaboratedSharedConfig -> ElaboratedConfiguredPackage -> a -> a sanityCheckElaboratedConfiguredPackage sharedConfig pkg@ElaboratedConfiguredPackage{..} ret = -- we should only have enabled stanzas that actually can be built -- (according to the solver) assert (pkgStanzasEnabled `Set.isSubsetOf` pkgStanzasAvailable) -- the stanzas that the user explicitly requested should be -- enabled (by the previous test, they are also available) . assert (Map.keysSet (Map.filter id pkgStanzasRequested) `Set.isSubsetOf` pkgStanzasEnabled) -- the stanzas explicitly disabled should not be available . assert (Set.null (Map.keysSet (Map.filter not pkgStanzasRequested) `Set.intersection` pkgStanzasAvailable)) -- either a package is being built inplace, or the -- 'installedPackageId' we assigned is consistent with -- the 'hashedInstalledPackageId' we would compute from -- the elaborated configured package . assert (pkgBuildStyle == BuildInplaceOnly || installedPackageId pkg == hashedInstalledPackageId (packageHashInputs sharedConfig pkg)) -- either a package is built inplace, or we are not attempting to -- build any test suites or benchmarks (we never build these -- for remote packages!) . assert (pkgBuildStyle == BuildInplaceOnly || Set.null pkgStanzasAvailable) $ ret ------------------------------------------------------------------------------ -- * Deciding what to do: making an 'ElaboratedInstallPlan' ------------------------------------------------------------------------------ -- | Return an up-to-date elaborated install plan and associated config. -- -- Two variants of the install plan are returned: with and without packages -- from the store. That is, the \"improved\" plan where source packages are -- replaced by pre-existing installed packages from the store (when their ids -- match), and also the original elaborated plan which uses primarily source -- packages. -- The improved plan is what we use for building, but the original elaborated -- plan is useful for reporting and configuration. For example the @freeze@ -- command needs the source package info to know about flag choices and -- dependencies of executables and setup scripts. -- rebuildInstallPlan :: Verbosity -> FilePath -> DistDirLayout -> CabalDirLayout -> ProjectConfig -> IO ( ElaboratedInstallPlan -- with store packages , ElaboratedInstallPlan -- with source packages , ElaboratedSharedConfig , ProjectConfig ) -- ^ @(improvedPlan, elaboratedPlan, _, _)@ rebuildInstallPlan verbosity projectRootDir distDirLayout@DistDirLayout { distDirectory, distProjectCacheFile, distProjectCacheDirectory } cabalDirLayout@CabalDirLayout { cabalPackageCacheDirectory, cabalStoreDirectory, cabalStorePackageDB } cliConfig = runRebuild projectRootDir $ do progsearchpath <- liftIO $ getSystemSearchPath let cliConfigPersistent = cliConfig { projectConfigBuildOnly = mempty } -- The overall improved plan is cached rerunIfChanged verbosity fileMonitorImprovedPlan -- react to changes in command line args and the path (cliConfigPersistent, progsearchpath) $ do -- And so is the elaborated plan that the improved plan based on (elaboratedPlan, elaboratedShared, projectConfig) <- rerunIfChanged verbosity fileMonitorElaboratedPlan (cliConfigPersistent, progsearchpath) $ do (projectConfig, projectConfigTransient) <- phaseReadProjectConfig localPackages <- phaseReadLocalPackages projectConfig compilerEtc <- phaseConfigureCompiler projectConfig _ <- phaseConfigurePrograms projectConfig compilerEtc solverPlan <- phaseRunSolver projectConfigTransient compilerEtc localPackages (elaboratedPlan, elaboratedShared) <- phaseElaboratePlan projectConfigTransient compilerEtc solverPlan localPackages return (elaboratedPlan, elaboratedShared, projectConfig) -- The improved plan changes each time we install something, whereas -- the underlying elaborated plan only changes when input config -- changes, so it's worth caching them separately. improvedPlan <- phaseImprovePlan elaboratedPlan elaboratedShared phaseMaintainPlanOutputs improvedPlan elaboratedPlan elaboratedShared return (improvedPlan, elaboratedPlan, elaboratedShared, projectConfig) where fileMonitorCompiler = newFileMonitorInCacheDir "compiler" fileMonitorSolverPlan = newFileMonitorInCacheDir "solver-plan" fileMonitorSourceHashes = newFileMonitorInCacheDir "source-hashes" fileMonitorElaboratedPlan = newFileMonitorInCacheDir "elaborated-plan" fileMonitorImprovedPlan = newFileMonitorInCacheDir "improved-plan" newFileMonitorInCacheDir :: Eq a => FilePath -> FileMonitor a b newFileMonitorInCacheDir = newFileMonitor . distProjectCacheFile -- Read the cabal.project (or implicit config) and combine it with -- arguments from the command line -- phaseReadProjectConfig :: Rebuild (ProjectConfig, ProjectConfig) phaseReadProjectConfig = do liftIO $ do info verbosity "Project settings changed, reconfiguring..." createDirectoryIfMissingVerbose verbosity False distDirectory createDirectoryIfMissingVerbose verbosity False distProjectCacheDirectory projectConfig <- readProjectConfig verbosity projectRootDir -- The project config comming from the command line includes "build only" -- flags that we don't cache persistently (because like all "build only" -- flags they do not affect the value of the outcome) but that we do -- sometimes using during planning (in particular the http transport) let projectConfigTransient = projectConfig <> cliConfig projectConfigPersistent = projectConfig <> cliConfig { projectConfigBuildOnly = mempty } liftIO $ writeProjectConfigFile (distProjectCacheFile "config") projectConfigPersistent return (projectConfigPersistent, projectConfigTransient) -- Look for all the cabal packages in the project -- some of which may be local src dirs, tarballs etc -- phaseReadLocalPackages :: ProjectConfig -> Rebuild [UnresolvedSourcePackage] phaseReadLocalPackages projectConfig = do localCabalFiles <- findProjectPackages projectRootDir projectConfig mapM (readSourcePackage verbosity) localCabalFiles -- Configure the compiler we're using. -- -- This is moderately expensive and doesn't change that often so we cache -- it independently. -- phaseConfigureCompiler :: ProjectConfig -> Rebuild (Compiler, Platform, ProgramDb) phaseConfigureCompiler ProjectConfig { projectConfigShared = ProjectConfigShared { projectConfigHcFlavor, projectConfigHcPath, projectConfigHcPkg }, projectConfigLocalPackages = PackageConfig { packageConfigProgramPaths, packageConfigProgramArgs, packageConfigProgramPathExtra } } = do progsearchpath <- liftIO $ getSystemSearchPath rerunIfChanged verbosity fileMonitorCompiler (hcFlavor, hcPath, hcPkg, progsearchpath, packageConfigProgramPaths, packageConfigProgramArgs, packageConfigProgramPathExtra) $ do liftIO $ info verbosity "Compiler settings changed, reconfiguring..." result@(_, _, progdb') <- liftIO $ Cabal.configCompilerEx hcFlavor hcPath hcPkg progdb verbosity -- Note that we added the user-supplied program locations and args -- for /all/ programs, not just those for the compiler prog and -- compiler-related utils. In principle we don't know which programs -- the compiler will configure (and it does vary between compilers). -- We do know however that the compiler will only configure the -- programs it cares about, and those are the ones we monitor here. monitorFiles (programsMonitorFiles progdb') return result where hcFlavor = flagToMaybe projectConfigHcFlavor hcPath = flagToMaybe projectConfigHcPath hcPkg = flagToMaybe projectConfigHcPkg progdb = userSpecifyPaths (Map.toList (getMapLast packageConfigProgramPaths)) . userSpecifyArgss (Map.toList (getMapMappend packageConfigProgramArgs)) . modifyProgramSearchPath (++ [ ProgramSearchPathDir dir | dir <- fromNubList packageConfigProgramPathExtra ]) $ defaultProgramDb -- Configuring other programs. -- -- Having configred the compiler, now we configure all the remaining -- programs. This is to check we can find them, and to monitor them for -- changes. -- -- TODO: [required eventually] we don't actually do this yet. -- -- We rely on the fact that the previous phase added the program config for -- all local packages, but that all the programs configured so far are the -- compiler program or related util programs. -- phaseConfigurePrograms :: ProjectConfig -> (Compiler, Platform, ProgramDb) -> Rebuild () phaseConfigurePrograms projectConfig (_, _, compilerprogdb) = do -- Users are allowed to specify program locations independently for -- each package (e.g. to use a particular version of a pre-processor -- for some packages). However they cannot do this for the compiler -- itself as that's just not going to work. So we check for this. liftIO $ checkBadPerPackageCompilerPaths (configuredPrograms compilerprogdb) (getMapMappend (projectConfigSpecificPackage projectConfig)) --TODO: [required eventually] find/configure other programs that the -- user specifies. --TODO: [required eventually] find/configure all build-tools -- but note that some of them may be built as part of the plan. -- Run the solver to get the initial install plan. -- This is expensive so we cache it independently. -- phaseRunSolver :: ProjectConfig -> (Compiler, Platform, ProgramDb) -> [UnresolvedSourcePackage] -> Rebuild SolverInstallPlan phaseRunSolver projectConfig@ProjectConfig { projectConfigShared, projectConfigBuildOnly } (compiler, platform, progdb) localPackages = rerunIfChanged verbosity fileMonitorSolverPlan (solverSettings, cabalPackageCacheDirectory, localPackages, localPackagesEnabledStanzas, compiler, platform, programsDbSignature progdb) $ do installedPkgIndex <- getInstalledPackages verbosity compiler progdb platform corePackageDbs sourcePkgDb <- getSourcePackages verbosity withRepoCtx pkgConfigDB <- getPkgConfigDb verbosity progdb --TODO: [code cleanup] it'd be better if the Compiler contained the -- ConfiguredPrograms that it needs, rather than relying on the progdb -- since we don't need to depend on all the programs here, just the -- ones relevant for the compiler. liftIO $ do solver <- chooseSolver verbosity (solverSettingSolver solverSettings) (compilerInfo compiler) notice verbosity "Resolving dependencies..." foldProgress logMsg die return $ planPackages compiler platform solver solverSettings installedPkgIndex sourcePkgDb pkgConfigDB localPackages localPackagesEnabledStanzas where corePackageDbs = [GlobalPackageDB] withRepoCtx = projectConfigWithSolverRepoContext verbosity cabalPackageCacheDirectory projectConfigShared projectConfigBuildOnly solverSettings = resolveSolverSettings projectConfig logMsg message rest = debugNoWrap verbosity message >> rest localPackagesEnabledStanzas = Map.fromList [ (pkgname, stanzas) | pkg <- localPackages , let pkgname = packageName pkg testsEnabled = lookupLocalPackageConfig packageConfigTests projectConfig pkgname benchmarksEnabled = lookupLocalPackageConfig packageConfigBenchmarks projectConfig pkgname stanzas = Map.fromList $ [ (TestStanzas, enabled) | enabled <- flagToList testsEnabled ] ++ [ (BenchStanzas , enabled) | enabled <- flagToList benchmarksEnabled ] ] -- Elaborate the solver's install plan to get a fully detailed plan. This -- version of the plan has the final nix-style hashed ids. -- phaseElaboratePlan :: ProjectConfig -> (Compiler, Platform, ProgramDb) -> SolverInstallPlan -> [SourcePackage loc] -> Rebuild ( ElaboratedInstallPlan , ElaboratedSharedConfig ) phaseElaboratePlan ProjectConfig { projectConfigShared, projectConfigLocalPackages, projectConfigSpecificPackage, projectConfigBuildOnly } (compiler, platform, progdb) solverPlan localPackages = do liftIO $ debug verbosity "Elaborating the install plan..." sourcePackageHashes <- rerunIfChanged verbosity fileMonitorSourceHashes (packageLocationsSignature solverPlan) $ getPackageSourceHashes verbosity withRepoCtx solverPlan defaultInstallDirs <- liftIO $ userInstallDirTemplates compiler return $ elaborateInstallPlan platform compiler progdb distDirLayout cabalDirLayout solverPlan localPackages sourcePackageHashes defaultInstallDirs projectConfigShared projectConfigLocalPackages (getMapMappend projectConfigSpecificPackage) where withRepoCtx = projectConfigWithSolverRepoContext verbosity cabalPackageCacheDirectory projectConfigShared projectConfigBuildOnly -- Update the files we maintain that reflect our current build environment. -- In particular we maintain a JSON representation of the elaborated -- install plan. -- -- TODO: [required eventually] maintain the ghc environment file reflecting -- the libs available. This will need to be after plan improvement phase. -- phaseMaintainPlanOutputs :: ElaboratedInstallPlan -> ElaboratedInstallPlan -> ElaboratedSharedConfig -> Rebuild () phaseMaintainPlanOutputs _improvedPlan elaboratedPlan elaboratedShared = do liftIO $ debug verbosity "Updating plan.json" liftIO $ writePlanExternalRepresentation distDirLayout elaboratedPlan elaboratedShared -- Improve the elaborated install plan. The elaborated plan consists -- mostly of source packages (with full nix-style hashed ids). Where -- corresponding installed packages already exist in the store, replace -- them in the plan. -- -- Note that we do monitor the store's package db here, so we will redo -- this improvement phase when the db changes -- including as a result of -- executing a plan and installing things. -- phaseImprovePlan :: ElaboratedInstallPlan -> ElaboratedSharedConfig -> Rebuild ElaboratedInstallPlan phaseImprovePlan elaboratedPlan elaboratedShared = do liftIO $ debug verbosity "Improving the install plan..." recreateDirectory verbosity True storeDirectory storePkgIndex <- getPackageDBContents verbosity compiler progdb platform storePackageDb let improvedPlan = improveInstallPlanWithPreExistingPackages storePkgIndex elaboratedPlan return improvedPlan where storeDirectory = cabalStoreDirectory (compilerId compiler) storePackageDb = cabalStorePackageDB (compilerId compiler) ElaboratedSharedConfig { pkgConfigPlatform = platform, pkgConfigCompiler = compiler, pkgConfigCompilerProgs = progdb } = elaboratedShared programsMonitorFiles :: ProgramDb -> [MonitorFilePath] programsMonitorFiles progdb = [ monitor | prog <- configuredPrograms progdb , monitor <- monitorFileSearchPath (programMonitorFiles prog) (programPath prog) ] -- | Select the bits of a 'ProgramDb' to monitor for value changes. -- Use 'programsMonitorFiles' for the files to monitor. -- programsDbSignature :: ProgramDb -> [ConfiguredProgram] programsDbSignature progdb = [ prog { programMonitorFiles = [] , programOverrideEnv = filter ((/="PATH") . fst) (programOverrideEnv prog) } | prog <- configuredPrograms progdb ] getInstalledPackages :: Verbosity -> Compiler -> ProgramDb -> Platform -> PackageDBStack -> Rebuild InstalledPackageIndex getInstalledPackages verbosity compiler progdb platform packagedbs = do monitorFiles . map monitorFileOrDirectory =<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles verbosity compiler packagedbs progdb platform) liftIO $ IndexUtils.getInstalledPackages verbosity compiler packagedbs progdb getPackageDBContents :: Verbosity -> Compiler -> ProgramDb -> Platform -> PackageDB -> Rebuild InstalledPackageIndex getPackageDBContents verbosity compiler progdb platform packagedb = do monitorFiles . map monitorFileOrDirectory =<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles verbosity compiler [packagedb] progdb platform) liftIO $ do createPackageDBIfMissing verbosity compiler progdb packagedb Cabal.getPackageDBContents verbosity compiler packagedb progdb getSourcePackages :: Verbosity -> (forall a. (RepoContext -> IO a) -> IO a) -> Rebuild SourcePackageDb getSourcePackages verbosity withRepoCtx = do (sourcePkgDb, repos) <- liftIO $ withRepoCtx $ \repoctx -> do sourcePkgDb <- IndexUtils.getSourcePackages verbosity repoctx return (sourcePkgDb, repoContextRepos repoctx) monitorFiles . map monitorFile . IndexUtils.getSourcePackagesMonitorFiles $ repos return sourcePkgDb -- | Create a package DB if it does not currently exist. Note that this action -- is /not/ safe to run concurrently. -- createPackageDBIfMissing :: Verbosity -> Compiler -> ProgramDb -> PackageDB -> IO () createPackageDBIfMissing verbosity compiler progdb (SpecificPackageDB dbPath) = do exists <- liftIO $ Cabal.doesPackageDBExist dbPath unless exists $ do createDirectoryIfMissingVerbose verbosity False (takeDirectory dbPath) Cabal.createPackageDB verbosity compiler progdb False dbPath createPackageDBIfMissing _ _ _ _ = return () getPkgConfigDb :: Verbosity -> ProgramDb -> Rebuild PkgConfigDb getPkgConfigDb verbosity progdb = do dirs <- liftIO $ getPkgConfigDbDirs verbosity progdb -- Just monitor the dirs so we'll notice new .pc files. -- Alternatively we could monitor all the .pc files too. forM_ dirs $ \dir -> do dirExists <- liftIO $ doesDirectoryExist dir -- TODO: turn this into a utility function monitorFiles [if dirExists then monitorDirectory dir else monitorNonExistentDirectory dir] liftIO $ readPkgConfigDb verbosity progdb recreateDirectory :: Verbosity -> Bool -> FilePath -> Rebuild () recreateDirectory verbosity createParents dir = do liftIO $ createDirectoryIfMissingVerbose verbosity createParents dir monitorFiles [monitorDirectoryExistence dir] -- | Select the config values to monitor for changes package source hashes. packageLocationsSignature :: SolverInstallPlan -> [(PackageId, PackageLocation (Maybe FilePath))] packageLocationsSignature solverPlan = [ (packageId pkg, packageSource pkg) | InstallPlan.Configured (SolverPackage { solverPkgSource = pkg}) <- InstallPlan.toList solverPlan ] -- | Get the 'HashValue' for all the source packages where we use hashes, -- and download any packages required to do so. -- -- Note that we don't get hashes for local unpacked packages. -- getPackageSourceHashes :: Verbosity -> (forall a. (RepoContext -> IO a) -> IO a) -> SolverInstallPlan -> Rebuild (Map PackageId PackageSourceHash) getPackageSourceHashes verbosity withRepoCtx solverPlan = do -- Determine if and where to get the package's source hash from. -- let allPkgLocations :: [(PackageId, PackageLocation (Maybe FilePath))] allPkgLocations = [ (packageId pkg, packageSource pkg) | InstallPlan.Configured (SolverPackage { solverPkgSource = pkg}) <- InstallPlan.toList solverPlan ] -- Tarballs that were local in the first place. -- We'll hash these tarball files directly. localTarballPkgs :: [(PackageId, FilePath)] localTarballPkgs = [ (pkgid, tarball) | (pkgid, LocalTarballPackage tarball) <- allPkgLocations ] -- Tarballs from remote URLs. We must have downloaded these already -- (since we extracted the .cabal file earlier) --TODO: [required eventually] finish remote tarball functionality -- allRemoteTarballPkgs = -- [ (pkgid, ) -- | (pkgid, RemoteTarballPackage ) <- allPkgLocations ] -- Tarballs from repositories, either where the repository provides -- hashes as part of the repo metadata, or where we will have to -- download and hash the tarball. repoTarballPkgsWithMetadata :: [(PackageId, Repo)] repoTarballPkgsWithoutMetadata :: [(PackageId, Repo)] (repoTarballPkgsWithMetadata, repoTarballPkgsWithoutMetadata) = partitionEithers [ case repo of RepoSecure{} -> Left (pkgid, repo) _ -> Right (pkgid, repo) | (pkgid, RepoTarballPackage repo _ _) <- allPkgLocations ] -- For tarballs from repos that do not have hashes available we now have -- to check if the packages were downloaded already. -- (repoTarballPkgsToDownload, repoTarballPkgsDownloaded) <- fmap partitionEithers $ liftIO $ sequence [ do mtarball <- checkRepoTarballFetched repo pkgid case mtarball of Nothing -> return (Left (pkgid, repo)) Just tarball -> return (Right (pkgid, tarball)) | (pkgid, repo) <- repoTarballPkgsWithoutMetadata ] (hashesFromRepoMetadata, repoTarballPkgsNewlyDownloaded) <- -- Avoid having to initialise the repository (ie 'withRepoCtx') if we -- don't have to. (The main cost is configuring the http client.) if null repoTarballPkgsToDownload && null repoTarballPkgsWithMetadata then return (Map.empty, []) else liftIO $ withRepoCtx $ \repoctx -> do -- For tarballs from repos that do have hashes available as part of the -- repo metadata we now load up the index for each repo and retrieve -- the hashes for the packages -- hashesFromRepoMetadata <- Sec.uncheckClientErrors $ --TODO: [code cleanup] wrap in our own exceptions fmap (Map.fromList . concat) $ sequence -- Reading the repo index is expensive so we group the packages by repo [ repoContextWithSecureRepo repoctx repo $ \secureRepo -> Sec.withIndex secureRepo $ \repoIndex -> sequence [ do hash <- Sec.trusted <$> -- strip off Trusted tag Sec.indexLookupHash repoIndex pkgid -- Note that hackage-security currently uses SHA256 -- but this API could in principle give us some other -- choice in future. return (pkgid, hashFromTUF hash) | pkgid <- pkgids ] | (repo, pkgids) <- map (\grp@((_,repo):_) -> (repo, map fst grp)) . groupBy ((==) `on` (remoteRepoName . repoRemote . snd)) . sortBy (compare `on` (remoteRepoName . repoRemote . snd)) $ repoTarballPkgsWithMetadata ] -- For tarballs from repos that do not have hashes available, download -- the ones we previously determined we need. -- repoTarballPkgsNewlyDownloaded <- sequence [ do tarball <- fetchRepoTarball verbosity repoctx repo pkgid return (pkgid, tarball) | (pkgid, repo) <- repoTarballPkgsToDownload ] return (hashesFromRepoMetadata, repoTarballPkgsNewlyDownloaded) -- Hash tarball files for packages where we have to do that. This includes -- tarballs that were local in the first place, plus tarballs from repos, -- either previously cached or freshly downloaded. -- let allTarballFilePkgs :: [(PackageId, FilePath)] allTarballFilePkgs = localTarballPkgs ++ repoTarballPkgsDownloaded ++ repoTarballPkgsNewlyDownloaded hashesFromTarballFiles <- liftIO $ fmap Map.fromList $ sequence [ do srchash <- readFileHashValue tarball return (pkgid, srchash) | (pkgid, tarball) <- allTarballFilePkgs ] monitorFiles [ monitorFile tarball | (_pkgid, tarball) <- allTarballFilePkgs ] -- Return the combination return $! hashesFromRepoMetadata <> hashesFromTarballFiles -- ------------------------------------------------------------ -- * Installation planning -- ------------------------------------------------------------ planPackages :: Compiler -> Platform -> Solver -> SolverSettings -> InstalledPackageIndex -> SourcePackageDb -> PkgConfigDb -> [UnresolvedSourcePackage] -> Map PackageName (Map OptionalStanza Bool) -> Progress String String SolverInstallPlan planPackages comp platform solver SolverSettings{..} installedPkgIndex sourcePkgDb pkgConfigDB localPackages pkgStanzasEnable = resolveDependencies platform (compilerInfo comp) pkgConfigDB solver resolverParams where --TODO: [nice to have] disable multiple instances restriction in the solver, but then -- make sure we can cope with that in the output. resolverParams = setMaxBackjumps solverSettingMaxBackjumps --TODO: [required eventually] should only be configurable for custom installs -- . setIndependentGoals solverSettingIndependentGoals . setReorderGoals solverSettingReorderGoals . setCountConflicts solverSettingCountConflicts --TODO: [required eventually] should only be configurable for custom installs -- . setAvoidReinstalls solverSettingAvoidReinstalls --TODO: [required eventually] should only be configurable for custom installs -- . setShadowPkgs solverSettingShadowPkgs . setStrongFlags solverSettingStrongFlags --TODO: [required eventually] decide if we need to prefer installed for -- global packages, or prefer latest even for global packages. Perhaps -- should be configurable but with a different name than "upgrade-dependencies". . setPreferenceDefault PreferLatestForSelected {-(if solverSettingUpgradeDeps then PreferAllLatest else PreferLatestForSelected)-} . removeUpperBounds solverSettingAllowNewer . addDefaultSetupDependencies (defaultSetupDeps comp platform . PD.packageDescription . packageDescription) . addPreferences -- preferences from the config file or command line [ PackageVersionPreference name ver | Dependency name ver <- solverSettingPreferences ] . addConstraints -- version constraints from the config file or command line [ LabeledPackageConstraint (userToPackageConstraint pc) src | (pc, src) <- solverSettingConstraints ] . addPreferences -- enable stanza preference where the user did not specify [ PackageStanzasPreference pkgname stanzas | pkg <- localPackages , let pkgname = packageName pkg stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable stanzas = [ stanza | stanza <- [minBound..maxBound] , Map.lookup stanza stanzaM == Nothing ] , not (null stanzas) ] . addConstraints -- enable stanza constraints where the user asked to enable [ LabeledPackageConstraint (PackageConstraintStanzas pkgname stanzas) ConstraintSourceConfigFlagOrTarget | pkg <- localPackages , let pkgname = packageName pkg stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable stanzas = [ stanza | stanza <- [minBound..maxBound] , Map.lookup stanza stanzaM == Just True ] , not (null stanzas) ] . addConstraints --TODO: [nice to have] should have checked at some point that the -- package in question actually has these flags. [ LabeledPackageConstraint (PackageConstraintFlags pkgname flags) ConstraintSourceConfigFlagOrTarget | (pkgname, flags) <- Map.toList solverSettingFlagAssignments ] . addConstraints --TODO: [nice to have] we have user-supplied flags for unspecified -- local packages (as well as specific per-package flags). For the -- former we just apply all these flags to all local targets which -- is silly. We should check if the flags are appropriate. [ LabeledPackageConstraint (PackageConstraintFlags pkgname flags) ConstraintSourceConfigFlagOrTarget | let flags = solverSettingFlagAssignment , not (null flags) , pkg <- localPackages , let pkgname = packageName pkg ] $ stdResolverParams stdResolverParams = -- Note: we don't use the standardInstallPolicy here, since that uses -- its own addDefaultSetupDependencies that is not appropriate for us. basicInstallPolicy installedPkgIndex sourcePkgDb (map SpecificSourcePackage localPackages) ------------------------------------------------------------------------------ -- * Install plan post-processing ------------------------------------------------------------------------------ -- This phase goes from the InstallPlan we get from the solver and has to -- make an elaborated install plan. -- -- We go in two steps: -- -- 1. elaborate all the source packages that the solver has chosen. -- 2. swap source packages for pre-existing installed packages wherever -- possible. -- -- We do it in this order, elaborating and then replacing, because the easiest -- way to calculate the installed package ids used for the replacement step is -- from the elaborated configuration for each package. ------------------------------------------------------------------------------ -- * Install plan elaboration ------------------------------------------------------------------------------ -- | Produce an elaborated install plan using the policy for local builds with -- a nix-style shared store. -- -- In theory should be able to make an elaborated install plan with a policy -- matching that of the classic @cabal install --user@ or @--global@ -- elaborateInstallPlan :: Platform -> Compiler -> ProgramDb -> DistDirLayout -> CabalDirLayout -> SolverInstallPlan -> [SourcePackage loc] -> Map PackageId PackageSourceHash -> InstallDirs.InstallDirTemplates -> ProjectConfigShared -> PackageConfig -> Map PackageName PackageConfig -> (ElaboratedInstallPlan, ElaboratedSharedConfig) elaborateInstallPlan platform compiler compilerprogdb DistDirLayout{..} cabalDirLayout@CabalDirLayout{cabalStorePackageDB} solverPlan localPackages sourcePackageHashes defaultInstallDirs _sharedPackageConfig localPackagesConfig perPackageConfig = (elaboratedInstallPlan, elaboratedSharedConfig) where elaboratedSharedConfig = ElaboratedSharedConfig { pkgConfigPlatform = platform, pkgConfigCompiler = compiler, pkgConfigCompilerProgs = compilerprogdb } elaboratedInstallPlan = flip InstallPlan.mapPreservingGraph solverPlan $ \mapDep planpkg -> case planpkg of InstallPlan.PreExisting pkg -> InstallPlan.PreExisting pkg InstallPlan.Configured pkg -> InstallPlan.Configured (elaborateSolverPackage mapDep pkg) _ -> error "elaborateInstallPlan: unexpected package state" elaborateSolverPackage :: (UnitId -> UnitId) -> SolverPackage UnresolvedPkgLoc -> ElaboratedConfiguredPackage elaborateSolverPackage mapDep pkg@(SolverPackage (SourcePackage pkgid gdesc srcloc descOverride) flags stanzas deps0) = elaboratedPackage where -- Knot tying: the final elaboratedPackage includes the -- pkgInstalledId, which is calculated by hashing many -- of the other fields of the elaboratedPackage. -- elaboratedPackage = ElaboratedConfiguredPackage {..} deps = fmap (map elaborateSolverId) deps0 elaborateSolverId sid = ConfiguredId { confSrcId = packageId sid, -- Update the 'UnitId' to the final nix-style hashed ID confInstId = mapDep (installedPackageId sid) } pkgInstalledId | shouldBuildInplaceOnly pkg = mkUnitId (display pkgid ++ "-inplace") | otherwise = assert (isJust pkgSourceHash) $ hashedInstalledPackageId (packageHashInputs elaboratedSharedConfig elaboratedPackage) -- recursive use of elaboratedPackage | otherwise = error $ "elaborateInstallPlan: non-inplace package " ++ " is missing a source hash: " ++ display pkgid -- All the other fields of the ElaboratedConfiguredPackage -- pkgSourceId = pkgid pkgDescription = let Right (desc, _) = PD.finalizePackageDescription flags (const True) platform (compilerInfo compiler) [] gdesc in desc pkgFlagAssignment = flags pkgFlagDefaults = [ (Cabal.flagName flag, Cabal.flagDefault flag) | flag <- PD.genPackageFlags gdesc ] pkgDependencies = deps pkgStanzasAvailable = Set.fromList stanzas pkgStanzasRequested = -- NB: even if a package stanza is requested, if the package -- doesn't actually have any of that stanza we omit it from -- the request, to ensure that we don't decide that this -- package needs to be rebuilt. (It needs to be done here, -- because the ElaboratedConfiguredPackage is where we test -- whether or not there have been changes.) Map.fromList $ [ (TestStanzas, v) | v <- maybeToList tests , _ <- PD.testSuites pkgDescription ] ++ [ (BenchStanzas, v) | v <- maybeToList benchmarks , _ <- PD.benchmarks pkgDescription ] where tests, benchmarks :: Maybe Bool tests = perPkgOptionMaybe pkgid packageConfigTests benchmarks = perPkgOptionMaybe pkgid packageConfigBenchmarks -- This is a placeholder which will get updated by 'pruneInstallPlanPass1' -- and 'pruneInstallPlanPass2'. We can't populate it here -- because whether or not tests/benchmarks should be enabled -- is heuristically calculated based on whether or not the -- dependencies of the test suite have already been installed, -- but this function doesn't know what is installed (since -- we haven't improved the plan yet), so we do it in another pass. -- Check the comments of those functions for more details. pkgStanzasEnabled = Set.empty pkgBuildTargets = [] pkgReplTarget = Nothing pkgBuildHaddocks = False pkgSourceLocation = srcloc pkgSourceHash = Map.lookup pkgid sourcePackageHashes pkgLocalToProject = isLocalToProject pkg pkgBuildStyle = if shouldBuildInplaceOnly pkg then BuildInplaceOnly else BuildAndInstall pkgBuildPackageDBStack = buildAndRegisterDbs pkgRegisterPackageDBStack = buildAndRegisterDbs pkgRequiresRegistration = PD.hasPublicLib pkgDescription pkgSetupScriptStyle = packageSetupScriptStyle pkgDescription pkgSetupScriptCliVersion = packageSetupScriptSpecVersion pkgSetupScriptStyle pkgDescription deps pkgSetupPackageDBStack = buildAndRegisterDbs buildAndRegisterDbs | shouldBuildInplaceOnly pkg = inplacePackageDbs | otherwise = storePackageDbs pkgDescriptionOverride = descOverride pkgVanillaLib = perPkgOptionFlag pkgid True packageConfigVanillaLib --TODO: [required feature]: also needs to be handled recursively pkgSharedLib = pkgid `Set.member` pkgsUseSharedLibrary pkgDynExe = perPkgOptionFlag pkgid False packageConfigDynExe pkgGHCiLib = perPkgOptionFlag pkgid False packageConfigGHCiLib --TODO: [required feature] needs to default to enabled on windows still pkgProfExe = perPkgOptionFlag pkgid False packageConfigProf pkgProfLib = pkgid `Set.member` pkgsUseProfilingLibrary (pkgProfExeDetail, pkgProfLibDetail) = perPkgOptionLibExeFlag pkgid ProfDetailDefault packageConfigProfDetail packageConfigProfLibDetail pkgCoverage = perPkgOptionFlag pkgid False packageConfigCoverage pkgOptimization = perPkgOptionFlag pkgid NormalOptimisation packageConfigOptimization pkgSplitObjs = perPkgOptionFlag pkgid False packageConfigSplitObjs pkgStripLibs = perPkgOptionFlag pkgid False packageConfigStripLibs pkgStripExes = perPkgOptionFlag pkgid False packageConfigStripExes pkgDebugInfo = perPkgOptionFlag pkgid NoDebugInfo packageConfigDebugInfo -- Combine the configured compiler prog settings with the user-supplied -- config. For the compiler progs any user-supplied config was taken -- into account earlier when configuring the compiler so its ok that -- our configured settings for the compiler override the user-supplied -- config here. pkgProgramPaths = Map.fromList [ (programId prog, programPath prog) | prog <- configuredPrograms compilerprogdb ] <> perPkgOptionMapLast pkgid packageConfigProgramPaths pkgProgramArgs = Map.fromList [ (programId prog, args) | prog <- configuredPrograms compilerprogdb , let args = programOverrideArgs prog , not (null args) ] <> perPkgOptionMapMappend pkgid packageConfigProgramArgs pkgProgramPathExtra = perPkgOptionNubList pkgid packageConfigProgramPathExtra pkgConfigureScriptArgs = perPkgOptionList pkgid packageConfigConfigureArgs pkgExtraLibDirs = perPkgOptionList pkgid packageConfigExtraLibDirs pkgExtraFrameworkDirs = perPkgOptionList pkgid packageConfigExtraFrameworkDirs pkgExtraIncludeDirs = perPkgOptionList pkgid packageConfigExtraIncludeDirs pkgProgPrefix = perPkgOptionMaybe pkgid packageConfigProgPrefix pkgProgSuffix = perPkgOptionMaybe pkgid packageConfigProgSuffix pkgInstallDirs | shouldBuildInplaceOnly pkg -- use the ordinary default install dirs = (InstallDirs.absoluteInstallDirs pkgid (installedUnitId pkg) (compilerInfo compiler) InstallDirs.NoCopyDest platform defaultInstallDirs) { InstallDirs.libsubdir = "", -- absoluteInstallDirs sets these as InstallDirs.datasubdir = "" -- 'undefined' but we have to use } -- them as "Setup.hs configure" args | otherwise -- use special simplified install dirs = storePackageInstallDirs cabalDirLayout (compilerId compiler) pkgInstalledId pkgHaddockHoogle = perPkgOptionFlag pkgid False packageConfigHaddockHoogle pkgHaddockHtml = perPkgOptionFlag pkgid False packageConfigHaddockHtml pkgHaddockHtmlLocation = perPkgOptionMaybe pkgid packageConfigHaddockHtmlLocation pkgHaddockExecutables = perPkgOptionFlag pkgid False packageConfigHaddockExecutables pkgHaddockTestSuites = perPkgOptionFlag pkgid False packageConfigHaddockTestSuites pkgHaddockBenchmarks = perPkgOptionFlag pkgid False packageConfigHaddockBenchmarks pkgHaddockInternal = perPkgOptionFlag pkgid False packageConfigHaddockInternal pkgHaddockCss = perPkgOptionMaybe pkgid packageConfigHaddockCss pkgHaddockHscolour = perPkgOptionFlag pkgid False packageConfigHaddockHscolour pkgHaddockHscolourCss = perPkgOptionMaybe pkgid packageConfigHaddockHscolourCss pkgHaddockContents = perPkgOptionMaybe pkgid packageConfigHaddockContents perPkgOptionFlag :: PackageId -> a -> (PackageConfig -> Flag a) -> a perPkgOptionMaybe :: PackageId -> (PackageConfig -> Flag a) -> Maybe a perPkgOptionList :: PackageId -> (PackageConfig -> [a]) -> [a] perPkgOptionFlag pkgid def f = fromFlagOrDefault def (lookupPerPkgOption pkgid f) perPkgOptionMaybe pkgid f = flagToMaybe (lookupPerPkgOption pkgid f) perPkgOptionList pkgid f = lookupPerPkgOption pkgid f perPkgOptionNubList pkgid f = fromNubList (lookupPerPkgOption pkgid f) perPkgOptionMapLast pkgid f = getMapLast (lookupPerPkgOption pkgid f) perPkgOptionMapMappend pkgid f = getMapMappend (lookupPerPkgOption pkgid f) perPkgOptionLibExeFlag pkgid def fboth flib = (exe, lib) where exe = fromFlagOrDefault def bothflag lib = fromFlagOrDefault def (bothflag <> libflag) bothflag = lookupPerPkgOption pkgid fboth libflag = lookupPerPkgOption pkgid flib lookupPerPkgOption :: (Package pkg, Monoid m) => pkg -> (PackageConfig -> m) -> m lookupPerPkgOption pkg f -- the project config specifies values that apply to packages local to -- but by default non-local packages get all default config values -- the project, and can specify per-package values for any package, | isLocalToProject pkg = local <> perpkg | otherwise = perpkg where local = f localPackagesConfig perpkg = maybe mempty f (Map.lookup (packageName pkg) perPackageConfig) inplacePackageDbs = storePackageDbs ++ [ distPackageDB (compilerId compiler) ] storePackageDbs = [ GlobalPackageDB , cabalStorePackageDB (compilerId compiler) ] -- For this local build policy, every package that lives in a local source -- dir (as opposed to a tarball), or depends on such a package, will be -- built inplace into a shared dist dir. Tarball packages that depend on -- source dir packages will also get unpacked locally. shouldBuildInplaceOnly :: HasUnitId pkg => pkg -> Bool shouldBuildInplaceOnly pkg = Set.member (installedPackageId pkg) pkgsToBuildInplaceOnly pkgsToBuildInplaceOnly :: Set InstalledPackageId pkgsToBuildInplaceOnly = Set.fromList $ map installedPackageId $ InstallPlan.reverseDependencyClosure solverPlan [ installedPackageId (PlannedId (packageId pkg)) | pkg <- localPackages ] isLocalToProject :: Package pkg => pkg -> Bool isLocalToProject pkg = Set.member (packageId pkg) pkgsLocalToProject pkgsLocalToProject :: Set PackageId pkgsLocalToProject = Set.fromList [ packageId pkg | pkg <- localPackages ] pkgsUseSharedLibrary :: Set PackageId pkgsUseSharedLibrary = packagesWithDownwardClosedProperty needsSharedLib where needsSharedLib pkg = fromMaybe compilerShouldUseSharedLibByDefault (liftM2 (||) pkgSharedLib pkgDynExe) where pkgid = packageId pkg pkgSharedLib = perPkgOptionMaybe pkgid packageConfigSharedLib pkgDynExe = perPkgOptionMaybe pkgid packageConfigDynExe --TODO: [code cleanup] move this into the Cabal lib. It's currently open -- coded in Distribution.Simple.Configure, but should be made a proper -- function of the Compiler or CompilerInfo. compilerShouldUseSharedLibByDefault = case compilerFlavor compiler of GHC -> GHC.isDynamic compiler GHCJS -> GHCJS.isDynamic compiler _ -> False pkgsUseProfilingLibrary :: Set PackageId pkgsUseProfilingLibrary = packagesWithDownwardClosedProperty needsProfilingLib where needsProfilingLib pkg = fromFlagOrDefault False (profBothFlag <> profLibFlag) where pkgid = packageId pkg profBothFlag = lookupPerPkgOption pkgid packageConfigProf profLibFlag = lookupPerPkgOption pkgid packageConfigProfLib --TODO: [code cleanup] unused: the old deprecated packageConfigProfExe packagesWithDownwardClosedProperty property = Set.fromList $ map packageId $ InstallPlan.dependencyClosure solverPlan [ installedPackageId pkg | pkg <- InstallPlan.toList solverPlan , property pkg ] -- just the packages that satisfy the propety --TODO: [nice to have] this does not check the config consistency, -- e.g. a package explicitly turning off profiling, but something -- depending on it that needs profiling. This really needs a separate -- package config validation/resolution pass. --TODO: [nice to have] config consistency checking: -- * profiling libs & exes, exe needs lib, recursive -- * shared libs & exes, exe needs lib, recursive -- * vanilla libs & exes, exe needs lib, recursive -- * ghci or shared lib needed by TH, recursive, ghc version dependent --------------------------- -- Build targets -- -- Refer to ProjectPlanning.Types for details of these important types: -- data PackageTarget = ... -- data ComponentTarget = ... -- data SubComponentTarget = ... --TODO: this needs to report some user target/config errors elaboratePackageTargets :: ElaboratedConfiguredPackage -> [PackageTarget] -> ([ComponentTarget], Maybe ComponentTarget, Bool) elaboratePackageTargets ElaboratedConfiguredPackage{..} targets = let buildTargets = nubComponentTargets . map compatSubComponentTargets . concatMap elaborateBuildTarget $ targets --TODO: instead of listToMaybe we should be reporting an error here replTargets = listToMaybe . nubComponentTargets . map compatSubComponentTargets . concatMap elaborateReplTarget $ targets buildHaddocks = HaddockDefaultComponents `elem` targets in (buildTargets, replTargets, buildHaddocks) where --TODO: need to report an error here if defaultComponents is empty elaborateBuildTarget BuildDefaultComponents = pkgDefaultComponents elaborateBuildTarget (BuildSpecificComponent t) = [t] elaborateBuildTarget _ = [] --TODO: need to report an error here if defaultComponents is empty elaborateReplTarget ReplDefaultComponent = take 1 pkgDefaultComponents elaborateReplTarget (ReplSpecificComponent t) = [t] elaborateReplTarget _ = [] pkgDefaultComponents = [ ComponentTarget cname WholeComponent | c <- Cabal.pkgComponents pkgDescription , PD.buildable (Cabal.componentBuildInfo c) , let cname = Cabal.componentName c , enabledOptionalStanza cname ] where enabledOptionalStanza cname = case componentOptionalStanza cname of Nothing -> True Just stanza -> Map.lookup stanza pkgStanzasRequested == Just True -- Not all Cabal Setup.hs versions support sub-component targets, so switch -- them over to the whole component compatSubComponentTargets :: ComponentTarget -> ComponentTarget compatSubComponentTargets target@(ComponentTarget cname _subtarget) | not setupHsSupportsSubComponentTargets = ComponentTarget cname WholeComponent | otherwise = target -- Actually the reality is that no current version of Cabal's Setup.hs -- build command actually support building specific files or modules. setupHsSupportsSubComponentTargets = False -- TODO: when that changes, adjust this test, e.g. -- | pkgSetupScriptCliVersion >= Version [x,y] [] nubComponentTargets :: [ComponentTarget] -> [ComponentTarget] nubComponentTargets = concatMap (wholeComponentOverrides . map snd) . groupBy ((==) `on` fst) . sortBy (compare `on` fst) . map (\t@(ComponentTarget cname _) -> (cname, t)) -- If we're building the whole component then that the only target all we -- need, otherwise we can have several targets within the component. wholeComponentOverrides :: [ComponentTarget] -> [ComponentTarget] wholeComponentOverrides ts = case [ t | t@(ComponentTarget _ WholeComponent) <- ts ] of (t:_) -> [t] [] -> ts pkgHasEphemeralBuildTargets :: ElaboratedConfiguredPackage -> Bool pkgHasEphemeralBuildTargets pkg = isJust (pkgReplTarget pkg) || (not . null) [ () | ComponentTarget _ subtarget <- pkgBuildTargets pkg , subtarget /= WholeComponent ] -- | The components that we'll build all of, meaning that after they're built -- we can skip building them again (unlike with building just some modules or -- other files within a component). -- pkgBuildTargetWholeComponents :: ElaboratedConfiguredPackage -> Set ComponentName pkgBuildTargetWholeComponents pkg = Set.fromList [ cname | ComponentTarget cname WholeComponent <- pkgBuildTargets pkg ] ------------------------------------------------------------------------------ -- * Install plan pruning ------------------------------------------------------------------------------ -- | Given a set of package targets (and optionally component targets within -- those packages), take the subset of the install plan needed to build those -- targets. Also, update the package config to specify which optional stanzas -- to enable, and which targets within each package to build. -- pruneInstallPlanToTargets :: Map InstalledPackageId [PackageTarget] -> ElaboratedInstallPlan -> ElaboratedInstallPlan pruneInstallPlanToTargets perPkgTargetsMap = either (\_ -> assert False undefined) id . InstallPlan.new (IndependentGoals False) . PackageIndex.fromList -- We have to do this in two passes . pruneInstallPlanPass2 . pruneInstallPlanPass1 perPkgTargetsMap . InstallPlan.toList -- | The first pass does three things: -- -- * Set the build targets based on the user targets (but not rev deps yet). -- * A first go at determining which optional stanzas (testsuites, benchmarks) -- are needed. We have a second go in the next pass. -- * Take the dependency closure using pruned dependencies. We prune deps that -- are used only by unneeded optional stanzas. These pruned deps are only -- used for the dependency closure and are not persisted in this pass. -- pruneInstallPlanPass1 :: Map InstalledPackageId [PackageTarget] -> [ElaboratedPlanPackage] -> [ElaboratedPlanPackage] pruneInstallPlanPass1 perPkgTargetsMap pkgs = map fst $ dependencyClosure (installedPackageId . fst) -- the pkg id snd -- the pruned deps [ (pkg', pruneOptionalDependencies pkg') | pkg <- pkgs , let pkg' = mapConfiguredPackage (pruneOptionalStanzas . setBuildTargets) pkg ] (Map.keys perPkgTargetsMap) where -- Elaborate and set the targets we'll build for this package. This is just -- based on the targets from the user, not targets implied by reverse -- dependencies. Those comes in the second pass once we know the rev deps. -- setBuildTargets pkg = pkg { pkgBuildTargets = buildTargets, pkgReplTarget = replTarget, pkgBuildHaddocks = buildHaddocks } where (buildTargets, replTarget, buildHaddocks) = elaboratePackageTargets pkg targets targets = fromMaybe [] $ Map.lookup (installedPackageId pkg) perPkgTargetsMap -- Decide whether or not to enable testsuites and benchmarks -- -- The testsuite and benchmark targets are somewhat special in that we need -- to configure the packages with them enabled, and we need to do that even -- if we only want to build one of several testsuites. -- -- There are two cases in which we will enable the testsuites (or -- benchmarks): if one of the targets is a testsuite, or if all of the -- testsuite dependencies are already cached in the store. The rationale -- for the latter is to minimise how often we have to reconfigure due to -- the particular targets we choose to build. Otherwise choosing to build -- a testsuite target, and then later choosing to build an exe target -- would involve unnecessarily reconfiguring the package with testsuites -- disabled. Technically this introduces a little bit of stateful -- behaviour to make this "sticky", but it should be benign. -- pruneOptionalStanzas pkg = pkg { pkgStanzasEnabled = stanzas } where stanzas :: Set OptionalStanza stanzas = optionalStanzasRequiredByTargets pkg <> optionalStanzasRequestedByDefault pkg <> optionalStanzasWithDepsAvailable availablePkgs pkg -- Calculate package dependencies but cut out those needed only by -- optional stanzas that we've determined we will not enable. -- These pruned deps are not persisted in this pass since they're based on -- the optional stanzas and we'll make further tweaks to the optional -- stanzas in the next pass. -- pruneOptionalDependencies :: ElaboratedPlanPackage -> [InstalledPackageId] pruneOptionalDependencies (InstallPlan.Configured pkg) = (CD.flatDeps . CD.filterDeps keepNeeded) (depends pkg) where keepNeeded (CD.ComponentTest _) _ = TestStanzas `Set.member` stanzas keepNeeded (CD.ComponentBench _) _ = BenchStanzas `Set.member` stanzas keepNeeded _ _ = True stanzas = pkgStanzasEnabled pkg pruneOptionalDependencies pkg = CD.flatDeps (depends pkg) optionalStanzasRequiredByTargets :: ElaboratedConfiguredPackage -> Set OptionalStanza optionalStanzasRequiredByTargets pkg = Set.fromList [ stanza | ComponentTarget cname _ <- pkgBuildTargets pkg ++ maybeToList (pkgReplTarget pkg) , stanza <- maybeToList (componentOptionalStanza cname) ] optionalStanzasRequestedByDefault :: ElaboratedConfiguredPackage -> Set OptionalStanza optionalStanzasRequestedByDefault = Map.keysSet . Map.filter (id :: Bool -> Bool) . pkgStanzasRequested availablePkgs = Set.fromList [ installedPackageId pkg | InstallPlan.PreExisting pkg <- pkgs ] -- | Given a set of already installed packages @availablePkgs@, -- determine the set of available optional stanzas from @pkg@ -- which have all of their dependencies already installed. This is used -- to implement "sticky" testsuites, where once we have installed -- all of the deps needed for the test suite, we go ahead and -- enable it always. optionalStanzasWithDepsAvailable :: Set InstalledPackageId -> ElaboratedConfiguredPackage -> Set OptionalStanza optionalStanzasWithDepsAvailable availablePkgs pkg = Set.fromList [ stanza | stanza <- Set.toList (pkgStanzasAvailable pkg) , let deps :: [InstalledPackageId] deps = map installedPackageId $ CD.select (optionalStanzaDeps stanza) (pkgDependencies pkg) , all (`Set.member` availablePkgs) deps ] where optionalStanzaDeps TestStanzas (CD.ComponentTest _) = True optionalStanzaDeps BenchStanzas (CD.ComponentBench _) = True optionalStanzaDeps _ _ = False -- The second pass does three things: -- -- * A second go at deciding which optional stanzas to enable. -- * Prune the dependencies based on the final choice of optional stanzas. -- * Extend the targets within each package to build, now we know the reverse -- dependencies, ie we know which libs are needed as deps by other packages. -- -- Achieving sticky behaviour with enabling\/disabling optional stanzas is -- tricky. The first approximation was handled by the first pass above, but -- it's not quite enough. That pass will enable stanzas if all of the deps -- of the optional stanza are already installed /in the store/. That's important -- but it does not account for dependencies that get built inplace as part of -- the project. We cannot take those inplace build deps into account in the -- pruning pass however because we don't yet know which ones we're going to -- build. Once we do know, we can have another go and enable stanzas that have -- all their deps available. Now we can consider all packages in the pruned -- plan to be available, including ones we already decided to build from -- source. -- -- Deciding which targets to build depends on knowing which packages have -- reverse dependencies (ie are needed). This requires the result of first -- pass, which is another reason we have to split it into two passes. -- -- Note that just because we might enable testsuites or benchmarks (in the -- first or second pass) doesn't mean that we build all (or even any) of them. -- That depends on which targets we picked in the first pass. -- pruneInstallPlanPass2 :: [ElaboratedPlanPackage] -> [ElaboratedPlanPackage] pruneInstallPlanPass2 pkgs = map (mapConfiguredPackage setStanzasDepsAndTargets) pkgs where setStanzasDepsAndTargets pkg = pkg { pkgStanzasEnabled = stanzas, pkgDependencies = CD.filterDeps keepNeeded (pkgDependencies pkg), pkgBuildTargets = pkgBuildTargets pkg ++ targetsRequiredForRevDeps } where stanzas :: Set OptionalStanza stanzas = pkgStanzasEnabled pkg <> optionalStanzasWithDepsAvailable availablePkgs pkg keepNeeded (CD.ComponentTest _) _ = TestStanzas `Set.member` stanzas keepNeeded (CD.ComponentBench _) _ = BenchStanzas `Set.member` stanzas keepNeeded _ _ = True targetsRequiredForRevDeps = [ ComponentTarget (Cabal.defaultLibName (pkgSourceId pkg)) WholeComponent -- if anything needs this pkg, build the library component | installedPackageId pkg `Set.member` hasReverseLibDeps ] --TODO: also need to track build-tool rev-deps for exes availablePkgs :: Set InstalledPackageId availablePkgs = Set.fromList (map installedPackageId pkgs) hasReverseLibDeps :: Set InstalledPackageId hasReverseLibDeps = Set.fromList [ depid | pkg <- pkgs , depid <- CD.flatDeps (depends pkg) ] mapConfiguredPackage :: (ElaboratedConfiguredPackage -> ElaboratedConfiguredPackage) -> ElaboratedPlanPackage -> ElaboratedPlanPackage mapConfiguredPackage f (InstallPlan.Configured pkg) = InstallPlan.Configured (f pkg) mapConfiguredPackage _ pkg = pkg componentOptionalStanza :: Cabal.ComponentName -> Maybe OptionalStanza componentOptionalStanza (Cabal.CTestName _) = Just TestStanzas componentOptionalStanza (Cabal.CBenchName _) = Just BenchStanzas componentOptionalStanza _ = Nothing dependencyClosure :: (pkg -> InstalledPackageId) -> (pkg -> [InstalledPackageId]) -> [pkg] -> [InstalledPackageId] -> [pkg] dependencyClosure pkgid deps allpkgs = map vertexToPkg . concatMap Tree.flatten . Graph.dfs graph . map pkgidToVertex where (graph, vertexToPkg, pkgidToVertex) = dependencyGraph pkgid deps allpkgs dependencyGraph :: (pkg -> InstalledPackageId) -> (pkg -> [InstalledPackageId]) -> [pkg] -> (Graph.Graph, Graph.Vertex -> pkg, InstalledPackageId -> Graph.Vertex) dependencyGraph pkgid deps pkgs = (graph, vertexToPkg', pkgidToVertex') where (graph, vertexToPkg, pkgidToVertex) = Graph.graphFromEdges [ ( pkg, pkgid pkg, deps pkg ) | pkg <- pkgs ] vertexToPkg' = (\(pkg,_,_) -> pkg) . vertexToPkg pkgidToVertex' = fromMaybe (error "dependencyGraph: lookup failure") . pkgidToVertex --------------------------- -- Setup.hs script policy -- -- Handling for Setup.hs scripts is a bit tricky, part of it lives in the -- solver phase, and part in the elaboration phase. We keep the helper -- functions for both phases together here so at least you can see all of it -- in one place. -- -- There are four major cases for Setup.hs handling: -- -- 1. @build-type@ Custom with a @custom-setup@ section -- 2. @build-type@ Custom without a @custom-setup@ section -- 3. @build-type@ not Custom with @cabal-version > $our-cabal-version@ -- 4. @build-type@ not Custom with @cabal-version <= $our-cabal-version@ -- -- It's also worth noting that packages specifying @cabal-version: >= 1.23@ -- or later that have @build-type@ Custom will always have a @custom-setup@ -- section. Therefore in case 2, the specified @cabal-version@ will always be -- less than 1.23. -- -- In cases 1 and 2 we obviously have to build an external Setup.hs script, -- while in case 4 we can use the internal library API. In case 3 we also have -- to build an external Setup.hs script because the package needs a later -- Cabal lib version than we can support internally. -- -- data SetupScriptStyle = ... -- see ProjectPlanning.Types -- | Work out the 'SetupScriptStyle' given the package description. -- packageSetupScriptStyle :: PD.PackageDescription -> SetupScriptStyle packageSetupScriptStyle pkg | buildType == PD.Custom , Just setupbi <- PD.setupBuildInfo pkg -- does have a custom-setup stanza , not (PD.defaultSetupDepends setupbi) -- but not one we added internally = SetupCustomExplicitDeps | buildType == PD.Custom , Just setupbi <- PD.setupBuildInfo pkg -- we get this case post-solver as , PD.defaultSetupDepends setupbi -- the solver fills in the deps = SetupCustomImplicitDeps | buildType == PD.Custom , Nothing <- PD.setupBuildInfo pkg -- we get this case pre-solver = SetupCustomImplicitDeps | PD.specVersion pkg > cabalVersion -- one cabal-install is built against = SetupNonCustomExternalLib | otherwise = SetupNonCustomInternalLib where buildType = fromMaybe PD.Custom (PD.buildType pkg) -- | Part of our Setup.hs handling policy is implemented by getting the solver -- to work out setup dependencies for packages. The solver already handles -- packages that explicitly specify setup dependencies, but we can also tell -- the solver to treat other packages as if they had setup dependencies. -- That's what this function does, it gets called by the solver for all -- packages that don't already have setup dependencies. -- -- The dependencies we want to add is different for each 'SetupScriptStyle'. -- -- Note that adding default deps means these deps are actually /added/ to the -- packages that we get out of the solver in the 'SolverInstallPlan'. Making -- implicit setup deps explicit is a problem in the post-solver stages because -- we still need to distinguish the case of explicit and implict setup deps. -- See 'rememberImplicitSetupDeps'. -- defaultSetupDeps :: Compiler -> Platform -> PD.PackageDescription -> Maybe [Dependency] defaultSetupDeps compiler platform pkg = case packageSetupScriptStyle pkg of -- For packages with build type custom that do not specify explicit -- setup dependencies, we add a dependency on Cabal and a number -- of other packages. SetupCustomImplicitDeps -> Just $ [ Dependency depPkgname anyVersion | depPkgname <- legacyCustomSetupPkgs compiler platform ] ++ [ Dependency cabalPkgname cabalConstraint | packageName pkg /= cabalPkgname ] where -- The Cabal dep is slightly special: -- * We omit the dep for the Cabal lib itself, since it bootstraps. -- * We constrain it to be >= 1.18 < 2 -- cabalConstraint = orLaterVersion cabalCompatMinVer `intersectVersionRanges` orLaterVersion (PD.specVersion pkg) `intersectVersionRanges` earlierVersion cabalCompatMaxVer -- The idea here is that at some point we will make significant -- breaking changes to the Cabal API that Setup.hs scripts use. -- So for old custom Setup scripts that do not specify explicit -- constraints, we constrain them to use a compatible Cabal version. -- The exact version where we'll make this API break has not yet been -- decided, so for the meantime we guess at 2.x. cabalCompatMaxVer = Version [2] [] -- In principle we can talk to any old Cabal version, and we need to -- be able to do that for custom Setup scripts that require older -- Cabal lib versions. However in practice we have currently have -- problems with Cabal-1.16. (1.16 does not know about build targets) -- If this is fixed we can relax this constraint. cabalCompatMinVer = Version [1,18] [] -- For other build types (like Simple) if we still need to compile an -- external Setup.hs, it'll be one of the simple ones that only depends -- on Cabal and base. SetupNonCustomExternalLib -> Just [ Dependency cabalPkgname cabalConstraint , Dependency basePkgname anyVersion ] where cabalConstraint = orLaterVersion (PD.specVersion pkg) -- The internal setup wrapper method has no deps at all. SetupNonCustomInternalLib -> Just [] SetupCustomExplicitDeps -> error $ "defaultSetupDeps: called for a package with explicit " ++ "setup deps: " ++ display (packageId pkg) -- | Work out which version of the Cabal spec we will be using to talk to the -- Setup.hs interface for this package. -- -- This depends somewhat on the 'SetupScriptStyle' but most cases are a result -- of what the solver picked for us, based on the explicit setup deps or the -- ones added implicitly by 'defaultSetupDeps'. -- packageSetupScriptSpecVersion :: Package pkg => SetupScriptStyle -> PD.PackageDescription -> ComponentDeps [pkg] -> Version -- We're going to be using the internal Cabal library, so the spec version of -- that is simply the version of the Cabal library that cabal-install has been -- built with. packageSetupScriptSpecVersion SetupNonCustomInternalLib _ _ = cabalVersion -- If we happen to be building the Cabal lib itself then because that -- bootstraps itself then we use the version of the lib we're building. packageSetupScriptSpecVersion SetupCustomImplicitDeps pkg _ | packageName pkg == cabalPkgname = packageVersion pkg -- In all other cases we have a look at what version of the Cabal lib the -- solver picked. Or if it didn't depend on Cabal at all (which is very rare) -- then we look at the .cabal file to see what spec version it declares. packageSetupScriptSpecVersion _ pkg deps = case find ((cabalPkgname ==) . packageName) (CD.setupDeps deps) of Just dep -> packageVersion dep Nothing -> PD.specVersion pkg cabalPkgname, basePkgname :: PackageName cabalPkgname = PackageName "Cabal" basePkgname = PackageName "base" legacyCustomSetupPkgs :: Compiler -> Platform -> [PackageName] legacyCustomSetupPkgs compiler (Platform _ os) = map PackageName $ [ "array", "base", "binary", "bytestring", "containers" , "deepseq", "directory", "filepath", "old-time", "pretty" , "process", "time", "transformers" ] ++ [ "Win32" | os == Windows ] ++ [ "unix" | os /= Windows ] ++ [ "ghc-prim" | isGHC ] ++ [ "template-haskell" | isGHC ] where isGHC = compilerCompatFlavor GHC compiler -- The other aspects of our Setup.hs policy lives here where we decide on -- the 'SetupScriptOptions'. -- -- Our current policy for the 'SetupCustomImplicitDeps' case is that we -- try to make the implicit deps cover everything, and we don't allow the -- compiler to pick up other deps. This may or may not be sustainable, and -- we might have to allow the deps to be non-exclusive, but that itself would -- be tricky since we would have to allow the Setup access to all the packages -- in the store and local dbs. setupHsScriptOptions :: ElaboratedReadyPackage -> ElaboratedSharedConfig -> FilePath -> FilePath -> Bool -> Lock -> SetupScriptOptions setupHsScriptOptions (ReadyPackage ElaboratedConfiguredPackage{..}) ElaboratedSharedConfig{..} srcdir builddir isParallelBuild cacheLock = SetupScriptOptions { useCabalVersion = thisVersion pkgSetupScriptCliVersion, useCabalSpecVersion = Just pkgSetupScriptCliVersion, useCompiler = Just pkgConfigCompiler, usePlatform = Just pkgConfigPlatform, usePackageDB = pkgSetupPackageDBStack, usePackageIndex = Nothing, useDependencies = [ (uid, srcid) | ConfiguredId srcid uid <- CD.setupDeps pkgDependencies ], useDependenciesExclusive = True, useVersionMacros = pkgSetupScriptStyle == SetupCustomExplicitDeps, useProgramConfig = pkgConfigCompilerProgs, useDistPref = builddir, useLoggingHandle = Nothing, -- this gets set later useWorkingDir = Just srcdir, useWin32CleanHack = False, --TODO: [required eventually] forceExternalSetupMethod = isParallelBuild, setupCacheLock = Just cacheLock } -- | To be used for the input for elaborateInstallPlan. -- -- TODO: [code cleanup] make InstallDirs.defaultInstallDirs pure. -- userInstallDirTemplates :: Compiler -> IO InstallDirs.InstallDirTemplates userInstallDirTemplates compiler = do InstallDirs.defaultInstallDirs (compilerFlavor compiler) True -- user install False -- unused storePackageInstallDirs :: CabalDirLayout -> CompilerId -> InstalledPackageId -> InstallDirs.InstallDirs FilePath storePackageInstallDirs CabalDirLayout{cabalStorePackageDirectory} compid ipkgid = InstallDirs.InstallDirs {..} where prefix = cabalStorePackageDirectory compid ipkgid bindir = prefix </> "bin" libdir = prefix </> "lib" libsubdir = "" dynlibdir = libdir libexecdir = prefix </> "libexec" includedir = libdir </> "include" datadir = prefix </> "share" datasubdir = "" docdir = datadir </> "doc" mandir = datadir </> "man" htmldir = docdir </> "html" haddockdir = htmldir sysconfdir = prefix </> "etc" --TODO: [code cleanup] perhaps reorder this code -- based on the ElaboratedInstallPlan + ElaboratedSharedConfig, -- make the various Setup.hs {configure,build,copy} flags setupHsConfigureFlags :: ElaboratedReadyPackage -> ElaboratedSharedConfig -> Verbosity -> FilePath -> Cabal.ConfigFlags setupHsConfigureFlags (ReadyPackage pkg@ElaboratedConfiguredPackage{..}) sharedConfig@ElaboratedSharedConfig{..} verbosity builddir = sanityCheckElaboratedConfiguredPackage sharedConfig pkg (Cabal.ConfigFlags {..}) where configDistPref = toFlag builddir configVerbosity = toFlag verbosity configIPID = toFlag (display (installedUnitId pkg)) configProgramPaths = Map.toList pkgProgramPaths configProgramArgs = Map.toList pkgProgramArgs configProgramPathExtra = toNubList pkgProgramPathExtra configHcFlavor = toFlag (compilerFlavor pkgConfigCompiler) configHcPath = mempty -- we use configProgramPaths instead configHcPkg = mempty -- we use configProgramPaths instead configVanillaLib = toFlag pkgVanillaLib configSharedLib = toFlag pkgSharedLib configDynExe = toFlag pkgDynExe configGHCiLib = toFlag pkgGHCiLib configProfExe = mempty configProfLib = toFlag pkgProfLib configProf = toFlag pkgProfExe -- configProfDetail is for exe+lib, but overridden by configProfLibDetail -- so we specify both so we can specify independently configProfDetail = toFlag pkgProfExeDetail configProfLibDetail = toFlag pkgProfLibDetail configCoverage = toFlag pkgCoverage configLibCoverage = mempty configOptimization = toFlag pkgOptimization configSplitObjs = toFlag pkgSplitObjs configStripExes = toFlag pkgStripExes configStripLibs = toFlag pkgStripLibs configDebugInfo = toFlag pkgDebugInfo configAllowNewer = mempty -- we use configExactConfiguration True configConfigurationsFlags = pkgFlagAssignment configConfigureArgs = pkgConfigureScriptArgs configExtraLibDirs = pkgExtraLibDirs configExtraFrameworkDirs = pkgExtraFrameworkDirs configExtraIncludeDirs = pkgExtraIncludeDirs configProgPrefix = maybe mempty toFlag pkgProgPrefix configProgSuffix = maybe mempty toFlag pkgProgSuffix configInstallDirs = fmap (toFlag . InstallDirs.toPathTemplate) pkgInstallDirs -- we only use configDependencies, unless we're talking to an old Cabal -- in which case we use configConstraints configDependencies = [ (packageName srcid, uid) | ConfiguredId srcid uid <- CD.nonSetupDeps pkgDependencies ] configConstraints = [ thisPackageVersion srcid | ConfiguredId srcid _uid <- CD.nonSetupDeps pkgDependencies ] -- explicitly clear, then our package db stack -- TODO: [required eventually] have to do this differently for older Cabal versions configPackageDBs = Nothing : map Just pkgBuildPackageDBStack configTests = toFlag (TestStanzas `Set.member` pkgStanzasEnabled) configBenchmarks = toFlag (BenchStanzas `Set.member` pkgStanzasEnabled) configExactConfiguration = toFlag True configFlagError = mempty --TODO: [research required] appears not to be implemented configRelocatable = mempty --TODO: [research required] ??? configScratchDir = mempty -- never use configUserInstall = mempty -- don't rely on defaults configPrograms_ = mempty -- never use, shouldn't exist setupHsBuildFlags :: ElaboratedConfiguredPackage -> ElaboratedSharedConfig -> Verbosity -> FilePath -> Cabal.BuildFlags setupHsBuildFlags ElaboratedConfiguredPackage{..} _ verbosity builddir = Cabal.BuildFlags { buildProgramPaths = mempty, --unused, set at configure time buildProgramArgs = mempty, --unused, set at configure time buildVerbosity = toFlag verbosity, buildDistPref = toFlag builddir, buildAssumeDepsUpToDate = toFlag False, buildNumJobs = mempty, --TODO: [nice to have] sometimes want to use toFlag (Just numBuildJobs), buildArgs = mempty -- unused, passed via args not flags } setupHsBuildArgs :: ElaboratedConfiguredPackage -> [String] setupHsBuildArgs pkg = map (showComponentTarget pkg) (pkgBuildTargets pkg) showComponentTarget :: ElaboratedConfiguredPackage -> ComponentTarget -> String showComponentTarget _pkg = showBuildTarget . toBuildTarget where showBuildTarget t = Cabal.showBuildTarget (qlBuildTarget t) t qlBuildTarget Cabal.BuildTargetComponent{} = Cabal.QL2 qlBuildTarget _ = Cabal.QL3 toBuildTarget :: ComponentTarget -> Cabal.BuildTarget toBuildTarget (ComponentTarget cname subtarget) = case subtarget of WholeComponent -> Cabal.BuildTargetComponent cname ModuleTarget mname -> Cabal.BuildTargetModule cname mname FileTarget fname -> Cabal.BuildTargetFile cname fname setupHsReplFlags :: ElaboratedConfiguredPackage -> ElaboratedSharedConfig -> Verbosity -> FilePath -> Cabal.ReplFlags setupHsReplFlags ElaboratedConfiguredPackage{..} _ verbosity builddir = Cabal.ReplFlags { replProgramPaths = mempty, --unused, set at configure time replProgramArgs = mempty, --unused, set at configure time replVerbosity = toFlag verbosity, replDistPref = toFlag builddir, replReload = mempty --only used as callback from repl } setupHsReplArgs :: ElaboratedConfiguredPackage -> [String] setupHsReplArgs pkg = maybe [] (\t -> [showComponentTarget pkg t]) (pkgReplTarget pkg) --TODO: should be able to give multiple modules in one component setupHsCopyFlags :: ElaboratedConfiguredPackage -> ElaboratedSharedConfig -> Verbosity -> FilePath -> Cabal.CopyFlags setupHsCopyFlags _ _ verbosity builddir = Cabal.CopyFlags { --TODO: [nice to have] we currently just rely on Setup.hs copy to always do the right -- thing, but perhaps we ought really to copy into an image dir and do -- some sanity checks and move into the final location ourselves copyArgs = [], -- TODO: could use this to only copy what we enabled copyDest = toFlag InstallDirs.NoCopyDest, copyDistPref = toFlag builddir, copyAssumeDepsUpToDate = toFlag False, copyVerbosity = toFlag verbosity } setupHsRegisterFlags :: ElaboratedConfiguredPackage -> ElaboratedSharedConfig -> Verbosity -> FilePath -> FilePath -> Cabal.RegisterFlags setupHsRegisterFlags ElaboratedConfiguredPackage {pkgBuildStyle} _ verbosity builddir pkgConfFile = Cabal.RegisterFlags { regPackageDB = mempty, -- misfeature regGenScript = mempty, -- never use regGenPkgConf = toFlag (Just pkgConfFile), regInPlace = case pkgBuildStyle of BuildInplaceOnly -> toFlag True _ -> toFlag False, regPrintId = mempty, -- never use regDistPref = toFlag builddir, regVerbosity = toFlag verbosity, -- Currently not used, because this is per-package. regAssumeDepsUpToDate = toFlag False, regArgs = [] } setupHsHaddockFlags :: ElaboratedConfiguredPackage -> ElaboratedSharedConfig -> Verbosity -> FilePath -> Cabal.HaddockFlags setupHsHaddockFlags ElaboratedConfiguredPackage{..} _ verbosity builddir = Cabal.HaddockFlags { haddockProgramPaths = mempty, --unused, set at configure time haddockProgramArgs = mempty, --unused, set at configure time haddockHoogle = toFlag pkgHaddockHoogle, haddockHtml = toFlag pkgHaddockHtml, haddockHtmlLocation = maybe mempty toFlag pkgHaddockHtmlLocation, haddockForHackage = mempty, --TODO: new flag haddockExecutables = toFlag pkgHaddockExecutables, haddockTestSuites = toFlag pkgHaddockTestSuites, haddockBenchmarks = toFlag pkgHaddockBenchmarks, haddockInternal = toFlag pkgHaddockInternal, haddockCss = maybe mempty toFlag pkgHaddockCss, haddockHscolour = toFlag pkgHaddockHscolour, haddockHscolourCss = maybe mempty toFlag pkgHaddockHscolourCss, haddockContents = maybe mempty toFlag pkgHaddockContents, haddockDistPref = toFlag builddir, haddockKeepTempFiles = mempty, --TODO: from build settings haddockVerbosity = toFlag verbosity } {- setupHsTestFlags :: ElaboratedConfiguredPackage -> ElaboratedSharedConfig -> Verbosity -> FilePath -> Cabal.TestFlags setupHsTestFlags _ _ verbosity builddir = Cabal.TestFlags { } -} ------------------------------------------------------------------------------ -- * Sharing installed packages ------------------------------------------------------------------------------ -- -- Nix style store management for tarball packages -- -- So here's our strategy: -- -- We use a per-user nix-style hashed store, but /only/ for tarball packages. -- So that includes packages from hackage repos (and other http and local -- tarballs). For packages in local directories we do not register them into -- the shared store by default, we just build them locally inplace. -- -- The reason we do it like this is that it's easy to make stable hashes for -- tarball packages, and these packages benefit most from sharing. By contrast -- unpacked dir packages are harder to hash and they tend to change more -- frequently so there's less benefit to sharing them. -- -- When using the nix store approach we have to run the solver *without* -- looking at the packages installed in the store, just at the source packages -- (plus core\/global installed packages). Then we do a post-processing pass -- to replace configured packages in the plan with pre-existing ones, where -- possible. Where possible of course means where the nix-style package hash -- equals one that's already in the store. -- -- One extra wrinkle is that unless we know package tarball hashes upfront, we -- will have to download the tarballs to find their hashes. So we have two -- options: delay replacing source with pre-existing installed packages until -- the point during the execution of the install plan where we have the -- tarball, or try to do as much up-front as possible and then check again -- during plan execution. The former isn't great because we would end up -- telling users we're going to re-install loads of packages when in fact we -- would just share them. It'd be better to give as accurate a prediction as -- we can. The latter is better for users, but we do still have to check -- during plan execution because it's important that we don't replace existing -- installed packages even if they have the same package hash, because we -- don't guarantee ABI stability. -- TODO: [required eventually] for safety of concurrent installs, we must make sure we register but -- not replace installed packages with ghc-pkg. packageHashInputs :: ElaboratedSharedConfig -> ElaboratedConfiguredPackage -> PackageHashInputs packageHashInputs pkgshared pkg@ElaboratedConfiguredPackage{ pkgSourceId, pkgSourceHash = Just srchash, pkgDependencies } = PackageHashInputs { pkgHashPkgId = pkgSourceId, pkgHashSourceHash = srchash, pkgHashDirectDeps = Set.fromList [ installedPackageId dep | dep <- CD.select relevantDeps pkgDependencies ], pkgHashOtherConfig = packageHashConfigInputs pkgshared pkg } where -- Obviously the main deps are relevant relevantDeps (CD.ComponentLib _) = True relevantDeps (CD.ComponentExe _) = True -- Setup deps can affect the Setup.hs behaviour and thus what is built relevantDeps CD.ComponentSetup = True -- However testsuites and benchmarks do not get installed and should not -- affect the result, so we do not include them. relevantDeps (CD.ComponentTest _) = False relevantDeps (CD.ComponentBench _) = False packageHashInputs _ pkg = error $ "packageHashInputs: only for packages with source hashes. " ++ display (packageId pkg) packageHashConfigInputs :: ElaboratedSharedConfig -> ElaboratedConfiguredPackage -> PackageHashConfigInputs packageHashConfigInputs ElaboratedSharedConfig{..} ElaboratedConfiguredPackage{..} = PackageHashConfigInputs { pkgHashCompilerId = compilerId pkgConfigCompiler, pkgHashPlatform = pkgConfigPlatform, pkgHashFlagAssignment = pkgFlagAssignment, pkgHashConfigureScriptArgs = pkgConfigureScriptArgs, pkgHashVanillaLib = pkgVanillaLib, pkgHashSharedLib = pkgSharedLib, pkgHashDynExe = pkgDynExe, pkgHashGHCiLib = pkgGHCiLib, pkgHashProfLib = pkgProfLib, pkgHashProfExe = pkgProfExe, pkgHashProfLibDetail = pkgProfLibDetail, pkgHashProfExeDetail = pkgProfExeDetail, pkgHashCoverage = pkgCoverage, pkgHashOptimization = pkgOptimization, pkgHashSplitObjs = pkgSplitObjs, pkgHashStripLibs = pkgStripLibs, pkgHashStripExes = pkgStripExes, pkgHashDebugInfo = pkgDebugInfo, pkgHashExtraLibDirs = pkgExtraLibDirs, pkgHashExtraFrameworkDirs = pkgExtraFrameworkDirs, pkgHashExtraIncludeDirs = pkgExtraIncludeDirs, pkgHashProgPrefix = pkgProgPrefix, pkgHashProgSuffix = pkgProgSuffix } -- | Given the 'InstalledPackageIndex' for a nix-style package store, and an -- 'ElaboratedInstallPlan', replace configured source packages by pre-existing -- installed packages whenever they exist. -- improveInstallPlanWithPreExistingPackages :: InstalledPackageIndex -> ElaboratedInstallPlan -> ElaboratedInstallPlan improveInstallPlanWithPreExistingPackages installedPkgIndex installPlan = replaceWithPreExisting installPlan [ ipkg | InstallPlan.Configured pkg <- InstallPlan.reverseTopologicalOrder installPlan , ipkg <- maybeToList (canPackageBeImproved pkg) ] where --TODO: sanity checks: -- * the installed package must have the expected deps etc -- * the installed package must not be broken, valid dep closure --TODO: decide what to do if we encounter broken installed packages, -- since overwriting is never safe. canPackageBeImproved pkg = PackageIndex.lookupUnitId installedPkgIndex (installedPackageId pkg) replaceWithPreExisting = foldl' (\plan ipkg -> InstallPlan.preexisting (installedPackageId ipkg) ipkg plan)
thomie/cabal
cabal-install/Distribution/Client/ProjectPlanning.hs
bsd-3-clause
101,157
0
28
29,052
12,880
7,037
5,843
1,358
7
import System.Environment (getArgs) import Data.List (elemIndices) readMore :: String -> String readMore s | length s < 56 = s | otherwise = take (last e) s ++ "... <Read More>" where e = 40 : [x | x <- elemIndices ' ' s, x < 40] main :: IO () main = do [inpFile] <- getArgs input <- readFile inpFile putStr . unlines . map readMore $ lines input
nikai3d/ce-challenges
easy/read_more.hs
bsd-3-clause
388
0
11
111
166
81
85
11
1
module Mark where data Mark = None | One | Two | Three deriving (Show,Eq,Ord) instance Num Mark where (+) None x = x (+) x None = x (+) One One = Two (+) Three _ = Three (+) _ Three = Three (+) One Two = Three (+) Two One = Three (+) Two Two = Three fromInteger 0 = None fromInteger 1 = One fromInteger 2 = Two fromInteger 3 = Three instance Read Mark where readsPrec _ value = tryParse [("0", None),("1", One), ("2", Two), ("3", Three)] where tryParse [] = [] tryParse ((attempt, result):xs) = if take (length attempt) value == attempt then [(result, drop (length attempt) value)] else tryParse xs
Raynes/Hricket
src/Mark.hs
bsd-3-clause
755
0
13
272
309
174
135
24
0
{-# LANGUAGE PatternSynonyms #-} {-# OPTIONS_HADDOCK not-home #-} -- | Description: How we encode GraphQL responses module GraphQL.Internal.Output ( Response(..) , Errors , Error(..) , GraphQLError(..) , singleError ) where import Protolude hiding (Location, Map) import Data.Aeson (ToJSON(..)) import Data.List.NonEmpty (NonEmpty(..)) import GraphQL.Value ( Object , objectFromList , Value , pattern ValueObject , pattern ValueNull , NameError(..) , ToValue(..) ) import GraphQL.Internal.Name (Name) -- | GraphQL response. -- -- A GraphQL response must: -- -- * be a map -- * have a "data" key iff the operation executed -- * have an "errors" key iff the operation encountered errors -- * not include "data" if operation failed before execution (e.g. syntax errors, -- validation errors, missing info) -- * not have keys other than "data", "errors", and "extensions" -- -- Other interesting things: -- -- * Doesn't have to be JSON, but does have to have maps, strings, lists, -- and null -- * Can also support bool, int, enum, and float -- * Value of "extensions" must be a map -- -- "data" must be null if an error was encountered during execution that -- prevented a valid response. -- -- "errors" -- -- * must be a non-empty list -- * each error is a map with "message", optionally "locations" key -- with list of locations -- * locations are maps with 1-indexed "line" and "column" keys. data Response = Success Object | PreExecutionFailure Errors | ExecutionFailure Errors | PartialSuccess Object Errors deriving (Eq, Ord, Show) -- | Construct an object from a list of names and values. -- -- Panic if there are duplicate names. unsafeMakeObject :: HasCallStack => [(Name, Value)] -> Value unsafeMakeObject fields = case objectFromList fields of Nothing -> panic $ "Object has duplicate keys: " <> show fields Just object -> ValueObject object instance ToValue Response where toValue (Success x) = unsafeMakeObject [("data", toValue x)] toValue (PreExecutionFailure e) = unsafeMakeObject [("errors", toValue e)] toValue (ExecutionFailure e) = unsafeMakeObject [("data", ValueNull) ,("errors", toValue e)] toValue (PartialSuccess x e) = unsafeMakeObject [("data", toValue x) ,("errors", toValue e) ] instance ToJSON Response where toJSON = toJSON . toValue type Errors = NonEmpty Error data Error = Error Text [Location] deriving (Eq, Ord, Show) instance ToValue Error where toValue (Error message []) = unsafeMakeObject [("message", toValue message)] toValue (Error message locations) = unsafeMakeObject [("message", toValue message) ,("locations", toValue locations) ] -- | Make a list of errors containing a single error. singleError :: GraphQLError e => e -> Errors singleError e = toError e :| [] data Location = Location Line Column deriving (Eq, Ord, Show) type Line = Int32 -- XXX: 1-indexed natural number type Column = Int32 -- XXX: 1-indexed natural number instance ToValue Location where toValue (Location line column) = unsafeMakeObject [("line" , toValue line) ,("column", toValue column) ] -- | An error that arises while processing a GraphQL query. class GraphQLError e where -- | Represent an error as human-readable text, primarily intended for -- developers of GraphQL clients, and secondarily for developers of GraphQL -- servers. formatError :: e -> Text -- | Represent an error as human-readable text, together with reference to a -- series of locations within a GraphQL query document. Default -- implementation calls 'formatError' and provides no locations. toError :: e -> Error toError e = Error (formatError e) [] -- Defined here to avoid circular dependency. instance GraphQLError NameError where formatError (NameError name) = "Not a valid GraphQL name: " <> show name
jml/graphql-api
src/GraphQL/Internal/Output.hs
bsd-3-clause
4,204
0
9
1,078
765
440
325
-1
-1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE TypeOperators #-} module Zero.Bittrex.Handlers ( getMarketSummary ) where import Control.Monad.IO.Class (liftIO) import Data.Text (Text) import Network.HTTP.Client (newManager) import Network.HTTP.Client.TLS (tlsManagerSettings) import Servant (Handler(..)) import Servant.Client import Zero.Bittrex.API import Zero.Bittrex.Internal ------------------------------------------------------------------------------ marketSummary :: Maybe Text -> ClientM (Message [MarketSummary]) marketSummary = client bittrexAPI ------------------------------------------------------------------------------ getMarketSummary :: Maybe Text -> Handler (Message [MarketSummary]) getMarketSummary market = do manager <- liftIO $ newManager tlsManagerSettings res <- liftIO $ runClientM (marketSummary market) (ClientEnv manager (BaseUrl Https "bittrex.com" 443 "")) case res of Left err -> error $ show err Right ms -> return ms
et4te/zero
server/src/Zero/Bittrex/Handlers.hs
bsd-3-clause
1,101
0
13
223
246
134
112
25
2
{-# LANGUAGE GADTs #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE ViewPatterns #-} {-# LANGUAGE FlexibleContexts, FlexibleInstances, TypeSynonymInstances #-} {-# LANGUAGE OverlappingInstances, UndecidableInstances #-} module Narradar.Processor.InfinitaryProblem where import Control.Applicative import Data.Foldable (Foldable) import Data.Traversable (Traversable) import qualified Data.Set as Set import Narradar.Framework import Narradar.Framework.Ppr import Narradar.Constraints.VariableCondition import Narradar.Types.ArgumentFiltering (AF_, ApplyAF, PolyHeuristic, Heuristic, MkHeu, mkHeu, isSoundAF) import qualified Narradar.Types.ArgumentFiltering as AF import Narradar.Types as Narradar import Narradar.Types.Problem.Infinitary as Infinitary import Narradar.Types.Problem.NarrowingGoal import Narradar.Utils import Lattice import Prelude hiding (pi) data InfinitaryToRewriting heu = InfinitaryToRewriting (MkHeu heu) Bool data NarrowingGoalToInfinitary heu = NarrowingGoalToInfinitary (MkHeu heu) Bool infinitaryToRewriting heu = apply(InfinitaryToRewriting heu False) narrowingGoalToInfinitary heu = apply(NarrowingGoalToInfinitary heu False) -- | This is the infinitary constructor rewriting AF processor described in -- "Termination of Logic Programs ..." (Schneider-Kamp et al) instance (t ~ TermF id ,v ~ Var ,trs ~ NTRS id ,HasSignature (NProblem typ id), id ~ SignatureId (NProblem typ id) ,PolyHeuristic heu id, Lattice (AF_ id), Ord id, Pretty id ,MkDPProblem typ (NTRS id), Traversable (Problem typ) ,ApplyAF (NProblem typ id) ,Info info (InfinitaryToRewritingProof id) ,ICap t v (typ, trs) ,IUsableRules t v typ trs ) => Processor info (InfinitaryToRewriting heu) (NProblem (Infinitary id typ) id) (NProblem typ id) where applySearch (InfinitaryToRewriting mk usable) p | null orProblems = [dontKnow (InfinitaryToRewritingFail :: InfinitaryToRewritingProof id) p] | otherwise = orProblems where orProblems = do let heu = mkHeu mk p base_p = getFramework (Infinitary.baseProblem p) let p' = if usable then iUsableRules p (rhs <$> rules (getP p)) else p af' <- Set.toList $ invariantEV heu p' (Infinitary.pi p') return $ singleP (InfinitaryToRewritingProof af') p (AF.apply af' . mkDerivedDPProblem base_p $ p') -- ------------- -- Proofs -- ------------- data InfinitaryToRewritingProof id where InfinitaryToRewritingProof :: AF_ id -> InfinitaryToRewritingProof id InfinitaryToRewritingFail :: InfinitaryToRewritingProof id instance Pretty id => Pretty (InfinitaryToRewritingProof id) where pPrint InfinitaryToRewritingFail = text "Failed to find an argument filtering that satisfies" <> text "the variable condition." pPrint (InfinitaryToRewritingProof af) = text "(SGST07) Termination of the following rewriting DP problem" <+> text "implies termination of the infinitary rewriting problem." $$ text "The argument filtering used is:" $$ pPrint af data NarrowingGoalToInfinitaryProof = NarrowingGoalToInfinitaryProof deriving (Eq, Ord, Show) instance Pretty NarrowingGoalToInfinitaryProof where pPrint _ = text "Termination of this infinitary rewriting problem" $$ text "implies termination of the original problem"
pepeiborra/narradar
src/Narradar/Processor/InfinitaryProblem.hs
bsd-3-clause
3,649
0
19
821
793
420
373
64
1
-- -- -- ----------------- -- Exercise 8.19. ----------------- -- -- -- module E'8'19 where copy :: IO () copy = do line <- getLine let whileCopy = do if (line == "") then (return () ) else ( do putStrLn line line <- getLine whileCopy ) whileCopy -- In the 'outermost' do-block a local definition "line" is introduced, -- that holds a user-defined string. -- Then a function named "whileCopy" is defined, which has another -- 'inner' do-block. It contains an "if-function" which uses the ex- -- pression "line" to compare it with the empty string "(line == "")". -- The name "line" is bound to the definition in the 'outer' do-block. -- That means, if the user-input is not the empty string "", this con- -- dition is never "True". In the else-case is yet another do-block. -- In its sequence it prints out the "line" which is bound to the -- 'outermost' do-block. After that it introduces its own name "line". -- Because of single-assignment this name is only valid in the -- 'innermost' do-block. The call of "whileCopy" at the end of the -- definition of "whileCopy" makes it a recursive definition, but: -- at this point we are already stuck in an endless recursion. -- If the user didn't abort with the empty string, he is trapped -- in the else-branch. Without care/knowledge introducing names in do- -- blocks can lead to confusion and errors.
pascal-knodel/haskell-craft
_/links/E'8'19.hs
mit
1,570
0
16
458
109
67
42
11
2
{-# LANGUAGE OverloadedStrings #-} {- Copyright (C) 2006-2010 Puneeth Chaganti <[email protected]> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -} {- | Module : Text.Pandoc.Writers.Org Copyright : Copyright (C) 2010 Puneeth Chaganti License : GNU GPL, version 2 or above Maintainer : Puneeth Chaganti <[email protected]> Stability : alpha Portability : portable Conversion of 'Pandoc' documents to Emacs Org-Mode. Org-Mode: <http://orgmode.org> -} module Text.Pandoc.Writers.Org ( writeOrg) where import Text.Pandoc.Definition import Text.Pandoc.Shared import Text.Pandoc.Pretty import Text.Pandoc.Templates (renderTemplate) import Data.List ( intersect, intersperse, transpose ) import Control.Monad.State import Control.Applicative ( (<$>) ) data WriterState = WriterState { stNotes :: [[Block]] , stLinks :: Bool , stImages :: Bool , stHasMath :: Bool , stOptions :: WriterOptions } -- | Convert Pandoc to Org. writeOrg :: WriterOptions -> Pandoc -> String writeOrg opts document = let st = WriterState { stNotes = [], stLinks = False, stImages = False, stHasMath = False, stOptions = opts } in evalState (pandocToOrg document) st -- | Return Org representation of document. pandocToOrg :: Pandoc -> State WriterState String pandocToOrg (Pandoc (Meta tit auth dat) blocks) = do opts <- liftM stOptions get title <- titleToOrg tit authors <- mapM inlineListToOrg auth date <- inlineListToOrg dat body <- blockListToOrg blocks notes <- liftM (reverse . stNotes) get >>= notesToOrg -- note that the notes may contain refs, so we do them first hasMath <- liftM stHasMath get let colwidth = if writerWrapText opts then Just $ writerColumns opts else Nothing let main = render colwidth $ foldl ($+$) empty $ [body, notes] let context = writerVariables opts ++ [ ("body", main) , ("title", render Nothing title) , ("date", render Nothing date) ] ++ [ ("math", "yes") | hasMath ] ++ [ ("author", render Nothing a) | a <- authors ] if writerStandalone opts then return $ renderTemplate context $ writerTemplate opts else return main -- | Return Org representation of notes. notesToOrg :: [[Block]] -> State WriterState Doc notesToOrg notes = mapM (\(num, note) -> noteToOrg num note) (zip [1..] notes) >>= return . vsep -- | Return Org representation of a note. noteToOrg :: Int -> [Block] -> State WriterState Doc noteToOrg num note = do contents <- blockListToOrg note let marker = "[" ++ show num ++ "] " return $ hang (length marker) (text marker) contents -- | Escape special characters for Org. escapeString :: String -> String escapeString = escapeStringUsing (backslashEscapes "^_") titleToOrg :: [Inline] -> State WriterState Doc titleToOrg [] = return empty titleToOrg lst = do contents <- inlineListToOrg lst return $ "#+TITLE: " <> contents -- | Convert Pandoc block element to Org. blockToOrg :: Block -- ^ Block element -> State WriterState Doc blockToOrg Null = return empty blockToOrg (Plain inlines) = inlineListToOrg inlines blockToOrg (Para [Image txt (src,tit)]) = do capt <- inlineListToOrg txt img <- inlineToOrg (Image txt (src,tit)) return $ "#+CAPTION: " <> capt <> blankline <> img blockToOrg (Para inlines) = do contents <- inlineListToOrg inlines return $ contents <> blankline blockToOrg (RawBlock "html" str) = return $ blankline $$ "#+BEGIN_HTML" $$ nest 2 (text str) $$ "#+END_HTML" $$ blankline blockToOrg (RawBlock f str) | f == "org" || f == "latex" || f == "tex" = return $ text str blockToOrg (RawBlock _ _) = return empty blockToOrg HorizontalRule = return $ blankline $$ "--------------" $$ blankline blockToOrg (Header level inlines) = do contents <- inlineListToOrg inlines let headerStr = text $ if level > 999 then " " else replicate level '*' return $ headerStr <> " " <> contents <> blankline blockToOrg (CodeBlock (_,classes,_) str) = do opts <- stOptions <$> get let tabstop = writerTabStop opts let at = classes `intersect` ["asymptote", "C", "clojure", "css", "ditaa", "dot", "emacs-lisp", "gnuplot", "haskell", "js", "latex", "ledger", "lisp", "matlab", "mscgen", "ocaml", "octave", "oz", "perl", "plantuml", "python", "R", "ruby", "sass", "scheme", "screen", "sh", "sql", "sqlite"] let (beg, end) = if null at then ("#+BEGIN_EXAMPLE", "#+END_EXAMPLE") else ("#+BEGIN_SRC" ++ head at, "#+END_SRC") return $ text beg $$ nest tabstop (text str) $$ text end $$ blankline blockToOrg (BlockQuote blocks) = do contents <- blockListToOrg blocks return $ blankline $$ "#+BEGIN_QUOTE" $$ nest 2 contents $$ "#+END_QUOTE" $$ blankline blockToOrg (Table caption' _ _ headers rows) = do caption'' <- inlineListToOrg caption' let caption = if null caption' then empty else ("#+CAPTION: " <> caption'') headers' <- mapM blockListToOrg headers rawRows <- mapM (mapM blockListToOrg) rows let numChars = maximum . map offset -- FIXME: width is not being used. let widthsInChars = map ((+2) . numChars) $ transpose (headers' : rawRows) -- FIXME: Org doesn't allow blocks with height more than 1. let hpipeBlocks blocks = hcat [beg, middle, end] where h = maximum (map height blocks) sep' = lblock 3 $ vcat (map text $ replicate h " | ") beg = lblock 2 $ vcat (map text $ replicate h "| ") end = lblock 2 $ vcat (map text $ replicate h " |") middle = hcat $ intersperse sep' blocks let makeRow = hpipeBlocks . zipWith lblock widthsInChars let head' = makeRow headers' rows' <- mapM (\row -> do cols <- mapM blockListToOrg row return $ makeRow cols) rows let border ch = char '|' <> char ch <> (hcat $ intersperse (char ch <> char '+' <> char ch) $ map (\l -> text $ replicate l ch) widthsInChars) <> char ch <> char '|' let body = vcat rows' let head'' = if all null headers then empty else head' $$ border '-' return $ head'' $$ body $$ caption $$ blankline blockToOrg (BulletList items) = do contents <- mapM bulletListItemToOrg items -- ensure that sublists have preceding blank line return $ blankline $+$ vcat contents $$ blankline blockToOrg (OrderedList (start, _, delim) items) = do let delim' = case delim of TwoParens -> OneParen x -> x let markers = take (length items) $ orderedListMarkers (start, Decimal, delim') let maxMarkerLength = maximum $ map length markers let markers' = map (\m -> let s = maxMarkerLength - length m in m ++ replicate s ' ') markers contents <- mapM (\(item, num) -> orderedListItemToOrg item num) $ zip markers' items -- ensure that sublists have preceding blank line return $ blankline $$ vcat contents $$ blankline blockToOrg (DefinitionList items) = do contents <- mapM definitionListItemToOrg items return $ vcat contents $$ blankline -- | Convert bullet list item (list of blocks) to Org. bulletListItemToOrg :: [Block] -> State WriterState Doc bulletListItemToOrg items = do contents <- blockListToOrg items return $ hang 3 "- " (contents <> cr) -- | Convert ordered list item (a list of blocks) to Org. orderedListItemToOrg :: String -- ^ marker for list item -> [Block] -- ^ list item (list of blocks) -> State WriterState Doc orderedListItemToOrg marker items = do contents <- blockListToOrg items return $ hang (length marker + 1) (text marker <> space) (contents <> cr) -- | Convert defintion list item (label, list of blocks) to Org. definitionListItemToOrg :: ([Inline], [[Block]]) -> State WriterState Doc definitionListItemToOrg (label, defs) = do label' <- inlineListToOrg label contents <- liftM vcat $ mapM blockListToOrg defs return $ hang 3 "- " $ label' <> " :: " <> (contents <> cr) -- | Convert list of Pandoc block elements to Org. blockListToOrg :: [Block] -- ^ List of block elements -> State WriterState Doc blockListToOrg blocks = mapM blockToOrg blocks >>= return . vcat -- | Convert list of Pandoc inline elements to Org. inlineListToOrg :: [Inline] -> State WriterState Doc inlineListToOrg lst = mapM inlineToOrg lst >>= return . hcat -- | Convert Pandoc inline element to Org. inlineToOrg :: Inline -> State WriterState Doc inlineToOrg (Emph lst) = do contents <- inlineListToOrg lst return $ "/" <> contents <> "/" inlineToOrg (Strong lst) = do contents <- inlineListToOrg lst return $ "*" <> contents <> "*" inlineToOrg (Strikeout lst) = do contents <- inlineListToOrg lst return $ "+" <> contents <> "+" inlineToOrg (Superscript lst) = do contents <- inlineListToOrg lst return $ "^{" <> contents <> "}" inlineToOrg (Subscript lst) = do contents <- inlineListToOrg lst return $ "_{" <> contents <> "}" inlineToOrg (SmallCaps lst) = inlineListToOrg lst inlineToOrg (Quoted SingleQuote lst) = do contents <- inlineListToOrg lst return $ "'" <> contents <> "'" inlineToOrg (Quoted DoubleQuote lst) = do contents <- inlineListToOrg lst return $ "\"" <> contents <> "\"" inlineToOrg (Cite _ lst) = inlineListToOrg lst inlineToOrg EmDash = return "---" inlineToOrg EnDash = return "--" inlineToOrg Apostrophe = return "'" inlineToOrg Ellipses = return "..." inlineToOrg (Code _ str) = return $ "=" <> text str <> "=" inlineToOrg (Str str) = return $ text $ escapeString str inlineToOrg (Math t str) = do modify $ \st -> st{ stHasMath = True } return $ if t == InlineMath then "$" <> text str <> "$" else "$$" <> text str <> "$$" inlineToOrg (RawInline f str) | f == "tex" || f == "latex" = return $ text str inlineToOrg (RawInline _ _) = return empty inlineToOrg (LineBreak) = return cr -- there's no line break in Org inlineToOrg Space = return space inlineToOrg (Link txt (src, _)) = do case txt of [Code _ x] | x == src -> -- autolink do modify $ \s -> s{ stLinks = True } return $ "[[" <> text x <> "]]" _ -> do contents <- inlineListToOrg txt modify $ \s -> s{ stLinks = True } return $ "[[" <> text src <> "][" <> contents <> "]]" inlineToOrg (Image _ (source', _)) = do let source = unescapeURI source' modify $ \s -> s{ stImages = True } return $ "[[" <> text source <> "]]" inlineToOrg (Note contents) = do -- add to notes in state notes <- get >>= (return . stNotes) modify $ \st -> st { stNotes = contents:notes } let ref = show $ (length notes) + 1 return $ " [" <> text ref <> "]"
Lythimus/lptv
sites/all/modules/jgm-pandoc-8be6cc2/src/Text/Pandoc/Writers/Org.hs
gpl-2.0
11,782
0
20
3,027
3,470
1,731
1,739
220
6
module HyLoRes.Formula.TypeLevel where data Top data Prop data Nom data Neg f data Disj data Conj data Diam f data Box f data At f data Down f data Opaque class IsSubformula f_a a | f_a -> a instance IsSubformula (Neg f) f instance IsSubformula (Diam f) f instance IsSubformula (Box f) f instance IsSubformula (At f) f instance IsSubformula (Down f) f class (IsSubformula f_a a, IsSubformula f_b b) => Replace a b f_a f_b | f_a a b -> f_b, a b f_b -> f_a, f_a a f_b -> b, f_a b f_b -> a instance Replace a b (Neg a) (Neg b) instance Replace a b (Diam a) (Diam b) instance Replace a b (Box a) (Box b) instance Replace a b (At a) (At b) instance Replace a b (Down a) (Down b) data Spec a = AtTop (a (At Top)) | AtProp (a (At Prop)) | AtNom (a (At Nom)) -- | AtNegTop (a (At (Neg Top))) | AtNegProp (a (At (Neg Prop))) | AtNegNom (a (At (Neg Nom))) -- | AtConj (a (At Conj)) | AtDisj (a (At Disj)) -- | AtDiamNom (a (At (Diam Nom))) | AtDiamF (a (At (Diam Opaque))) | AtBoxF (a (At (Box Opaque))) -- | AtDownF (a (At (Down Opaque))) class Specializable a where specialize:: a (At t) -> Spec a instance Show (Spec a) where show AtTop{} = "AtTop" show AtProp{} = "AtProp" show AtNom{} = "AtNom" -- show AtNegTop{} = "AtNegTop" show AtNegProp{} = "AtNegProp" show AtNegNom{} = "AtNegNom" -- show AtConj{} = "AtConj" show AtDisj{} = "AtDisj" -- show AtDiamNom{} = "AtDiamNom" show AtDiamF{} = "AtDiamF" show AtBoxF{} = "AtBoxF" -- show AtDownF{} = "AtDownF"
nevrenato/HyLoRes_Source
src/HyLoRes/Formula/TypeLevel.hs
gpl-2.0
1,872
0
12
700
768
408
360
-1
-1
module Lamdu.Paths ( getDataFileName , getDataFileNameMaybe , getLamduDir , readDataFile ) where import Control.Monad.Except (runExceptT, throwError) import qualified Paths_Lamdu import qualified System.Directory as Directory import System.Environment.Executable (splitExecutablePath) import System.FilePath ((</>)) import qualified System.Info as SysInfo import Lamdu.Prelude -- | Data-dir as in the .cabal file dataDir :: FilePath dataDir = "data" searchPaths :: FilePath -> [IO FilePath] searchPaths path = [ Directory.getCurrentDirectory <&> (</> dataDir </> path) , splitExecutablePath <&> fst <&> (</> relDataDir </> path) , Paths_Lamdu.getDataFileName path ] where relDataDir | SysInfo.os == "darwin" = "../Resources" | otherwise = dataDir getDataFileNameMaybe :: FilePath -> IO (Maybe FilePath) getDataFileNameMaybe fileName = traverse_ (\mkSearchPath -> do path <- lift mkSearchPath exists <- Directory.doesFileExist path & lift when exists $ throwError path -- Early exit, not an error ) (searchPaths fileName) & runExceptT <&> either Just (\() -> Nothing) getDataFileName :: FilePath -> IO FilePath getDataFileName fileName = getDataFileNameMaybe fileName >>= maybe (fail ("Cannot find data file " ++ show fileName)) pure getLamduDir :: IO FilePath getLamduDir = Directory.getHomeDirectory <&> (</> ".lamdu") readDataFile :: FilePath -> IO String readDataFile path = getDataFileName path >>= readFile
lamdu/lamdu
src/Lamdu/Paths.hs
gpl-3.0
1,620
0
15
392
394
213
181
41
1
module B1.Graphics.Rendering.OpenGL.Utils ( color3 , color4 , normal3 , scale3 , texCoord2 , vector3 , vertex2 , vertex3 ) where import Graphics.Rendering.OpenGL color3 :: GLfloat -> GLfloat -> GLfloat -> Color3 GLfloat color3 = Color3 color4 :: GLfloat -> GLfloat -> GLfloat -> GLfloat -> Color4 GLfloat color4 = Color4 normal3 :: GLfloat -> GLfloat -> GLfloat -> Normal3 GLfloat normal3 = Normal3 scale3 :: GLfloat -> GLfloat -> GLfloat -> IO () scale3 = scale texCoord2 :: GLfloat -> GLfloat -> TexCoord2 GLfloat texCoord2 = TexCoord2 vector3 :: GLfloat -> GLfloat -> GLfloat -> Vector3 GLfloat vector3 = Vector3 vertex2 :: GLfloat -> GLfloat -> Vertex2 GLfloat vertex2 = Vertex2 vertex3 :: GLfloat -> GLfloat -> GLfloat -> Vertex3 GLfloat vertex3 = Vertex3
madjestic/b1
src/B1/Graphics/Rendering/OpenGL/Utils.hs
bsd-3-clause
789
0
9
152
239
131
108
26
1
{-# LANGUAGE TemplateHaskell #-} -------------------------------------------------------------------------------- -- | -- Module : Data.Comp.Derive.Ordering -- Copyright : (c) 2010-2011 Patrick Bahr -- License : BSD3 -- Maintainer : Patrick Bahr <[email protected]> -- Stability : experimental -- Portability : non-portable (GHC Extensions) -- -- Automatically derive instances of @OrdF@. -- -------------------------------------------------------------------------------- module Data.Comp.Derive.Ordering ( OrdF(..), makeOrdF ) where import Data.Comp.Derive.Equality import Data.Comp.Derive.Utils import Data.List import Data.Maybe import Language.Haskell.TH hiding (Cxt) {-| Signature ordering. An instance @OrdF f@ gives rise to an instance @Ord (Term f)@. -} class EqF f => OrdF f where compareF :: Ord a => f a -> f a -> Ordering compList :: [Ordering] -> Ordering compList = fromMaybe EQ . find (/= EQ) {-| Derive an instance of 'OrdF' for a type constructor of any first-order kind taking at least one argument. -} makeOrdF :: Name -> Q [Dec] makeOrdF fname = do TyConI (DataD _cxt name args constrs _deriving) <- abstractNewtypeQ $ reify fname let argNames = map (VarT . tyVarBndrName) (init args) complType = foldl AppT (ConT name) argNames preCond = map (mkClassP ''Ord . (: [])) argNames classType = AppT (ConT ''OrdF) complType eqAlgDecl <- funD 'compareF (compareFClauses constrs) return [InstanceD preCond classType [eqAlgDecl]] where compareFClauses [] = [] compareFClauses constrs = let constrs' = map abstractConType constrs `zip` [1..] constPairs = [(x,y)| x<-constrs', y <- constrs'] in map genClause constPairs genClause ((c,n),(d,m)) | n == m = genEqClause c | n < m = genLtClause c d | otherwise = genGtClause c d genEqClause (constr, n) = do varNs <- newNames n "x" varNs' <- newNames n "y" let pat = ConP constr $ map VarP varNs pat' = ConP constr $ map VarP varNs' vars = map VarE varNs vars' = map VarE varNs' mkEq x y = let (x',y') = (return x,return y) in [| compare $x' $y'|] eqs = listE $ zipWith mkEq vars vars' body <- [|compList $eqs|] return $ Clause [pat, pat'] (NormalB body) [] genLtClause (c, _) (d, _) = clause [recP c [], recP d []] (normalB [| LT |]) [] genGtClause (c, _) (d, _) = clause [recP c [], recP d []] (normalB [| GT |]) []
spacekitteh/compdata
src/Data/Comp/Derive/Ordering.hs
bsd-3-clause
2,704
0
17
795
793
419
374
46
2
module Dotnet.System.Xml.XmlAttributeCollection ( module Dotnet.System.Xml.XmlAttributeCollection , module Dotnet.System.Xml.XmlAttributeCollectionTy ) where import Dotnet import qualified Dotnet.System.Xml.XmlAttributeCollectionTy import qualified Dotnet.System.Xml.XmlNamedNodeMap import qualified Dotnet.System.Xml.XmlAttributeTy import qualified Dotnet.System.Xml.XmlNodeTy import qualified Dotnet.System.Array data XmlAttributeCollection_ a type XmlAttributeCollection a = Dotnet.System.Xml.XmlNamedNodeMap.XmlNamedNodeMap (XmlAttributeCollection_ a) foreign import dotnet "method Dotnet.System.Xml.XmlAttributeCollection.RemoveAll" removeAll :: XmlAttributeCollection obj -> IO (()) foreign import dotnet "method Dotnet.System.Xml.XmlAttributeCollection.RemoveAt" removeAt :: Int -> XmlAttributeCollection obj -> IO (Dotnet.System.Xml.XmlAttributeTy.XmlAttribute a1) foreign import dotnet "method Dotnet.System.Xml.XmlAttributeCollection.Remove" remove :: Dotnet.System.Xml.XmlAttributeTy.XmlAttribute a0 -> XmlAttributeCollection obj -> IO (Dotnet.System.Xml.XmlAttributeTy.XmlAttribute a1) foreign import dotnet "method Dotnet.System.Xml.XmlAttributeCollection.InsertAfter" insertAfter :: Dotnet.System.Xml.XmlAttributeTy.XmlAttribute a0 -> Dotnet.System.Xml.XmlAttributeTy.XmlAttribute a1 -> XmlAttributeCollection obj -> IO (Dotnet.System.Xml.XmlAttributeTy.XmlAttribute a2) foreign import dotnet "method Dotnet.System.Xml.XmlAttributeCollection.InsertBefore" insertBefore :: Dotnet.System.Xml.XmlAttributeTy.XmlAttribute a0 -> Dotnet.System.Xml.XmlAttributeTy.XmlAttribute a1 -> XmlAttributeCollection obj -> IO (Dotnet.System.Xml.XmlAttributeTy.XmlAttribute a2) foreign import dotnet "method Dotnet.System.Xml.XmlAttributeCollection.Append" append :: Dotnet.System.Xml.XmlAttributeTy.XmlAttribute a0 -> XmlAttributeCollection obj -> IO (Dotnet.System.Xml.XmlAttributeTy.XmlAttribute a1) foreign import dotnet "method Dotnet.System.Xml.XmlAttributeCollection.Prepend" prepend :: Dotnet.System.Xml.XmlAttributeTy.XmlAttribute a0 -> XmlAttributeCollection obj -> IO (Dotnet.System.Xml.XmlAttributeTy.XmlAttribute a1) foreign import dotnet "method Dotnet.System.Xml.XmlAttributeCollection.get_ItemOf" get_ItemOf :: String -> String -> XmlAttributeCollection obj -> IO (Dotnet.System.Xml.XmlAttributeTy.XmlAttribute a2) foreign import dotnet "method Dotnet.System.Xml.XmlAttributeCollection.get_ItemOf" get_ItemOf_1 :: String -> XmlAttributeCollection obj -> IO (Dotnet.System.Xml.XmlAttributeTy.XmlAttribute a1) foreign import dotnet "method Dotnet.System.Xml.XmlAttributeCollection.get_ItemOf" get_ItemOf_2 :: Int -> XmlAttributeCollection obj -> IO (Dotnet.System.Xml.XmlAttributeTy.XmlAttribute a1) foreign import dotnet "method Dotnet.System.Xml.XmlAttributeCollection.SetNamedItem" setNamedItem :: Dotnet.System.Xml.XmlNodeTy.XmlNode a0 -> XmlAttributeCollection obj -> IO (Dotnet.System.Xml.XmlNodeTy.XmlNode a1) foreign import dotnet "method Dotnet.System.Xml.XmlAttributeCollection.CopyTo" copyTo :: Dotnet.System.Array.Array (Dotnet.System.Xml.XmlAttributeTy.XmlAttribute a0) -> Int -> XmlAttributeCollection obj -> IO (())
FranklinChen/Hugs
dotnet/lib/Dotnet/System/Xml/XmlAttributeCollection.hs
bsd-3-clause
3,211
6
12
301
625
358
267
-1
-1
{-# LANGUAGE TemplateHaskell, DataKinds, PolyKinds , TypeInType, TypeApplications, TypeFamilies #-} module T12045TH1 where import Data.Kind import Language.Haskell.TH hiding (Type) $([d| type family F (a :: k) :: Type where F @Type Int = Bool F @(Type->Type) Maybe = Char |]) $([d| data family D (a :: k) |]) $([d| data instance D @Type a = DBool |]) $([d| data instance D @(Type -> Type) b = DChar |])
sdiehl/ghc
testsuite/tests/th/T12045TH1.hs
bsd-3-clause
465
0
6
130
66
43
23
-1
-1
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RankNTypes #-} module Web.Offset.Init where import Control.Concurrent.MVar import Control.Monad.State import qualified Data.Map as Map import Data.Text (Text) import qualified Database.Redis as R import Web.Larceny import qualified Data.IntSet as IntSet import Web.Offset.Cache import Web.Offset.HTTP import Web.Offset.Internal import Web.Offset.Splices import Web.Offset.Types initWordpress :: WordpressConfig s -> R.Connection -> StateT s IO Text -> WPLens b s -> IO (Wordpress b, Substitutions s) initWordpress wpconf redis getURI wpLens = do let rrunRedis = R.runRedis redis let logf = wpLogInt $ wpConfLogger wpconf let wpReq = case wpConfRequester wpconf of Left (u,p) -> wreqRequester logf u p Right r -> r active <- newMVar Map.empty let wpInt = WordpressInt{ wpRequest = wpRequestInt wpReq (wpConfEndpoint wpconf) , wpCacheSet = wpCacheSetInt rrunRedis (wpConfCacheBehavior wpconf) , wpCacheGet = wpCacheGetInt rrunRedis (wpConfCacheBehavior wpconf) , startReqMutex = startReqMutexInt active , stopReqMutex = stopReqMutexInt active , runRedis = rrunRedis } let wp = Wordpress{ wpExpireAggregates = wpExpireAggregatesInt rrunRedis , wpExpirePost = wpExpirePostInt rrunRedis , cachingGet = cachingGetInt wpInt , cachingGetRetry = cachingGetRetryInt wpInt , cachingGetError = cachingGetErrorInt wpInt , cacheInternals = wpInt , wpLogger = logf } let extraFields = wpConfExtraFields wpconf return (wp, wordpressSubs wp extraFields getURI wpLens Nothing)
dbp/snaplet-wordpress
src/Web/Offset/Init.hs
bsd-3-clause
2,046
0
14
735
435
234
201
42
2
{-# LANGUAGE GADTSyntax #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE MagicHash #-} {-# LANGUAGE UnliftedNewtypes #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE PolyKinds #-} {-# LANGUAGE UnboxedTuples #-} {-# LANGUAGE UnboxedSums #-} {-# LANGUAGE TypeInType #-} {-# LANGUAGE TypeFamilies #-} import GHC.Int (Int(I#)) import GHC.Word (Word(W#)) import GHC.Exts (Int#,Word#,(+#)) import GHC.Types import Data.Coerce (coerce) main :: IO () main = do print (I# (coerce (Foo 5#))) newtype Foo = Foo Int#
sdiehl/ghc
testsuite/tests/typecheck/should_run/UnliftedNewtypesCoerceRun.hs
bsd-3-clause
506
0
13
76
119
73
46
19
1
{-# LANGUAGE StandaloneKindSignatures #-} module SAKS_Fail002 where import Data.Kind (Type) data D type D :: Type type D :: Type type D :: Type
sdiehl/ghc
testsuite/tests/saks/should_fail/saks_fail002.hs
bsd-3-clause
148
0
5
28
36
24
12
-1
-1
{-# LANGUAGE GADTs #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -Wpartial-fields #-} module T16411 where import Data.Type.Equality data T1 z where MkT1a :: { rec1 :: () } -> T1 Int MkT1b :: (z ~ Bool) => T1 z data T2 z where MkT2a :: { rec2 :: () } -> T2 Int MkT2b :: (z ~~ Bool) => T2 z
sdiehl/ghc
testsuite/tests/typecheck/should_compile/T16411.hs
bsd-3-clause
305
0
8
72
99
60
39
-1
-1
module Let1 where import Control.Parallel.Strategies (rpar, runEval) fib n | n <= 1 = 1 | otherwise = let n1 = fib (n - 1) n2 = fib (n - 2) n1_2 = runEval (do n1_2 <- rpar n1 return n1_2) in (n1_2 + n2) + 1
RefactoringTools/HaRe
old/testing/evalMonad/Let1AST.hs
bsd-3-clause
372
0
15
212
119
60
59
13
1
{-# LANGUAGE PatternGuards #-} module Main (main) where import Network.HTTP hiding (password) import Network.Browser import Network.URI (URI(..), parseRelativeReference, relativeTo) import Distribution.Client import Distribution.Client.Cron (cron, rethrowSignalsAsExceptions, Signal(..), ReceivedSignal(..)) import Distribution.Package import Distribution.Text import Distribution.Verbosity import Distribution.Simple.Utils hiding (intercalate) import Distribution.Version (Version(..)) import Data.List import Data.Maybe import Data.IORef import Data.Time import Control.Exception import Control.Monad import Control.Monad.Trans import qualified Data.ByteString.Lazy as BS import qualified Data.Set as S import qualified Codec.Compression.GZip as GZip import qualified Codec.Archive.Tar as Tar import System.Environment import System.Exit(exitFailure, ExitCode(..)) import System.FilePath import System.Directory import System.Console.GetOpt import System.Process import System.IO import System.IO.Error import Paths_hackage_server (version) import Data.Aeson (eitherDecode) data Mode = Help [String] | Init URI [URI] | Stats | Build [PackageId] data BuildOpts = BuildOpts { bo_verbosity :: Verbosity, bo_runTime :: Maybe NominalDiffTime, bo_stateDir :: FilePath, bo_continuous :: Maybe Int, bo_keepGoing :: Bool, bo_dryRun :: Bool, bo_prune :: Bool, bo_username :: Maybe String, bo_password :: Maybe String } data BuildConfig = BuildConfig { bc_srcURI :: URI, bc_auxURIs :: [URI], bc_username :: String, bc_password :: String } srcName :: URI -> String srcName uri = fromMaybe (show uri) (uriHostName uri) installDirectory :: BuildOpts -> FilePath installDirectory bo = bo_stateDir bo </> "tmp-install" resultsDirectory :: BuildOpts -> FilePath resultsDirectory bo = bo_stateDir bo </> "results" main :: IO () main = topHandler $ do rethrowSignalsAsExceptions [SIGABRT, SIGINT, SIGQUIT, SIGTERM] hSetBuffering stdout LineBuffering args <- getArgs (mode, opts) <- validateOpts args case mode of Help strs -> do let usageHeader = intercalate "\n" [ "Usage: hackage-build init URL [auxiliary URLs] [options]", " hackage-build build [packages] [options]", " hackage-build stats", "Options:"] mapM_ putStrLn $ strs putStrLn $ usageInfo usageHeader buildFlagDescrs unless (null strs) exitFailure Init uri auxUris -> initialise opts uri auxUris Stats -> do stateDir <- canonicalizePath $ bo_stateDir opts let opts' = opts { bo_stateDir = stateDir } stats opts' Build pkgs -> do stateDir <- canonicalizePath $ bo_stateDir opts let opts' = opts { bo_stateDir = stateDir } case bo_continuous opts' of Nothing -> buildOnce opts' pkgs Just interval -> do cron (bo_verbosity opts') interval (const (buildOnce opts' pkgs)) () --------------------------------- -- Initialisation & config file -- initialise :: BuildOpts -> URI -> [URI] -> IO () initialise opts uri auxUris = do username <- readMissingOpt "Enter hackage username" (bo_username opts) password <- readMissingOpt "Enter hackage password" (bo_password opts) let config = BuildConfig { bc_srcURI = uri, bc_auxURIs = auxUris, bc_username = username, bc_password = password } createDirectoryIfMissing False $ bo_stateDir opts createDirectoryIfMissing False $ resultsDirectory opts writeConfig opts config writeCabalConfig opts config where readMissingOpt prompt = maybe (putStrLn prompt >> getLine) return writeConfig :: BuildOpts -> BuildConfig -> IO () writeConfig opts BuildConfig { bc_srcURI = uri, bc_auxURIs = auxUris, bc_username = username, bc_password = password } = -- [Note: Show/Read URI] -- Ideally we'd just be showing a BuildConfig, but URI doesn't -- have Show/Read, so that doesn't work. So instead, we write -- out a tuple containing the uri as a string, and parse it -- each time we read it. let confStr = show (show uri, map show auxUris, username, password) in writeFile (configFile opts) confStr readConfig :: BuildOpts -> IO BuildConfig readConfig opts = do xs <- readFile $ configFile opts case reads xs of [((uriStr, auxUriStrs, username, password), _)] -> case mapM validateHackageURI (uriStr : auxUriStrs) of -- Shouldn't happen: We check that this -- returns Right when we create the -- config file. See [Note: Show/Read URI]. Left theError -> die theError Right (uri : auxUris) -> return $ BuildConfig { bc_srcURI = uri, bc_auxURIs = auxUris, bc_username = username, bc_password = password } Right _ -> error "The impossible happened" _ -> die "Can't parse config file (maybe re-run \"hackage-build init\")" configFile :: BuildOpts -> FilePath configFile opts = bo_stateDir opts </> "hackage-build-config" writeCabalConfig :: BuildOpts -> BuildConfig -> IO () writeCabalConfig opts config = do let tarballsDir = bo_stateDir opts </> "cached-tarballs" writeFile (bo_stateDir opts </> "cabal-config") . unlines $ [ "remote-repo: " ++ srcName uri ++ ":" ++ show uri | uri <- bc_srcURI config : bc_auxURIs config ] ++ [ "remote-repo-cache: " ++ tarballsDir ] createDirectoryIfMissing False tarballsDir ---------------------- -- Displaying status -- data StatResult = AllVersionsBuiltOk | AllVersionsAttempted | NoneBuilt | SomeBuiltOk | SomeFailed deriving Eq stats :: BuildOpts -> IO () stats opts = do config <- readConfig opts let verbosity = bo_verbosity opts notice verbosity "Initialising" (didFail, _, _) <- mkPackageFailed opts pkgIdsHaveDocs <- getDocumentationStats verbosity config didFail infoStats verbosity (Just statsFile) pkgIdsHaveDocs where statsFile = bo_stateDir opts </> "stats" infoStats :: Verbosity -> Maybe FilePath -> [DocInfo] -> IO () infoStats verbosity mDetailedStats pkgIdsHaveDocs = do nfo $ "There are " ++ show (length byPackage) ++ " packages with a total of " ++ show (length pkgIdsHaveDocs) ++ " package versions" nfo $ "So far we have built or attempted to built " ++ show (length (filter ((/= DocsNotBuilt) . docInfoHasDocs) pkgIdsHaveDocs)) ++ " packages; only " ++ show (length (filter ((== DocsNotBuilt) . docInfoHasDocs) pkgIdsHaveDocs)) ++ " left!" nfo "Considering the most recent version only:" nfo . printTable . indent $ [ [show (length mostRecentBuilt) , "built succesfully"] , [show (length mostRecentFailed) , "failed to build"] , [show (length mostRecentNotBuilt), "not yet built"] ] nfo "Considering all versions:" nfo . printTable . indent $ [ [count AllVersionsBuiltOk, "all versions built successfully"] , [count AllVersionsAttempted, "attempted to build all versions, but some failed"] , [count SomeBuiltOk, "not all versions built yet, but those that did were ok"] , [count SomeFailed, "not all versions built yet, and some failures"] , [count NoneBuilt, "no versions built yet"] ] case mDetailedStats of Nothing -> return () Just statsFile -> do writeFile statsFile $ printTable (["Package", "Version", "Has docs?"] : formattedStats) notice verbosity $ "Detailed statistics written to " ++ statsFile where -- | We avoid 'info' here because it re-wraps the text nfo :: String -> IO () nfo str = when (verbosity >= verbose) $ putStrLn str byPackage :: [[DocInfo]] byPackage = map (sortBy (flip (comparing docInfoPackageVersion))) $ groupBy (equating docInfoPackageName) $ sortBy (comparing docInfoPackageName) pkgIdsHaveDocs mostRecentBuilt, mostRecentFailed, mostRecentNotBuilt :: [[DocInfo]] mostRecentBuilt = filter ((== HasDocs) . docInfoHasDocs . head) byPackage mostRecentFailed = filter ((== DocsFailed) . docInfoHasDocs . head) byPackage mostRecentNotBuilt = filter ((== DocsNotBuilt) . docInfoHasDocs . head) byPackage categorise :: [DocInfo] -> StatResult categorise ps | all (== HasDocs) hd = AllVersionsBuiltOk | all (/= DocsNotBuilt) hd = AllVersionsAttempted | all (== DocsNotBuilt) hd = NoneBuilt | all (/= DocsFailed) hd = SomeBuiltOk | otherwise = SomeFailed where hd = map docInfoHasDocs ps categorised :: [StatResult] categorised = map categorise byPackage count :: StatResult -> String count c = show (length (filter (c ==) categorised)) formatPkg :: [DocInfo] -> [[String]] formatPkg = map $ \docInfo -> [ display (docInfoPackageName docInfo) , display (docInfoPackageVersion docInfo) , show (docInfoHasDocs docInfo) ] formattedStats :: [[String]] formattedStats = concatMap formatPkg byPackage indent :: [[String]] -> [[String]] indent = map (" " :) -- | Formats a 2D table so that everything is nicely aligned -- -- NOTE: Expects the same number of columns in every row! printTable :: [[String]] -> String printTable xss = intercalate "\n" . map (intercalate " ") . map padCols $ xss where colWidths :: [[Int]] colWidths = map (map length) $ xss maxColWidths :: [Int] maxColWidths = foldr1 (\xs ys -> map (uncurry max) (zip xs ys)) colWidths padCols :: [String] -> [String] padCols cols = map (uncurry padTo) (zip maxColWidths cols) padTo :: Int -> String -> String padTo len str = str ++ replicate (len - length str) ' ' data HasDocs = HasDocs | DocsNotBuilt | DocsFailed deriving (Eq, Show) data DocInfo = DocInfo { docInfoPackage :: PackageIdentifier , docInfoHasDocs :: HasDocs , docInfoIsCandidate :: Bool } docInfoPackageName :: DocInfo -> PackageName docInfoPackageName = pkgName . docInfoPackage docInfoPackageVersion :: DocInfo -> Version docInfoPackageVersion = pkgVersion . docInfoPackage docInfoBaseURI :: BuildConfig -> DocInfo -> URI docInfoBaseURI config docInfo = if not (docInfoIsCandidate docInfo) then bc_srcURI config <//> "package" </> display (docInfoPackage docInfo) else bc_srcURI config <//> "package" </> display (docInfoPackage docInfo) </> "candidate" docInfoDocsURI :: BuildConfig -> DocInfo -> URI docInfoDocsURI config docInfo = docInfoBaseURI config docInfo <//> "docs" docInfoTarGzURI :: BuildConfig -> DocInfo -> URI docInfoTarGzURI config docInfo = docInfoBaseURI config docInfo <//> display (docInfoPackage docInfo) <.> "tar.gz" docInfoReports :: BuildConfig -> DocInfo -> URI docInfoReports config docInfo = docInfoBaseURI config docInfo <//> "reports/" getDocumentationStats :: Verbosity -> BuildConfig -> (PackageId -> IO Bool) -> IO [DocInfo] getDocumentationStats verbosity config didFail = do notice verbosity "Downloading documentation index" httpSession verbosity "hackage-build" version $ do mPackages <- liftM eitherDecode `liftM` requestGET' packagesUri mCandidates <- liftM eitherDecode `liftM` requestGET' candidatesUri case (mPackages, mCandidates) of -- Download failure (Nothing, _) -> fail $ "Could not download " ++ show packagesUri (_, Nothing) -> fail $ "Could not download " ++ show candidatesUri -- Decoding failure (Just (Left e), _) -> fail $ "Could not decode " ++ show packagesUri ++ ": " ++ e (_, Just (Left e)) -> fail $ "Could not decode " ++ show candidatesUri ++ ": " ++ e -- Success (Just (Right packages), Just (Right candidates)) -> do packages' <- liftIO $ mapM checkFailed packages candidates' <- liftIO $ mapM checkFailed candidates return $ map (setIsCandidate False) packages' ++ map (setIsCandidate True) candidates' where packagesUri = bc_srcURI config <//> "packages" </> "docs.json" candidatesUri = bc_srcURI config <//> "packages" </> "candidates" </> "docs.json" checkFailed :: (String, Bool) -> IO (PackageIdentifier, HasDocs) checkFailed (pkgId, docsBuilt) = do let pkgId' = fromJust (simpleParse pkgId) if docsBuilt then return (pkgId', HasDocs) else do failed <- didFail pkgId' if failed then return (pkgId', DocsFailed) else return (pkgId', DocsNotBuilt) setIsCandidate :: Bool -> (PackageIdentifier, HasDocs) -> DocInfo setIsCandidate isCandidate (pId, hasDocs) = DocInfo { docInfoPackage = pId , docInfoHasDocs = hasDocs , docInfoIsCandidate = isCandidate } ---------------------- -- Building packages -- buildOnce :: BuildOpts -> [PackageId] -> IO () buildOnce opts pkgs = keepGoing $ do config <- readConfig opts notice verbosity "Initialising" (has_failed, mark_as_failed, persist_failed) <- mkPackageFailed opts flip finally persist_failed $ do updatePackageIndex pkgIdsHaveDocs <- getDocumentationStats verbosity config has_failed infoStats verbosity Nothing pkgIdsHaveDocs -- First build all of the latest versions of each package -- Then go back and build all the older versions -- NOTE: assumes all these lists are non-empty let latestFirst :: [[DocInfo]] -> [DocInfo] latestFirst ids = map head ids ++ concatMap tail ids -- Find those files *not* marked as having documentation in our cache let toBuild :: [DocInfo] toBuild = filter shouldBuild . latestFirst . map (sortBy (flip (comparing docInfoPackageVersion))) . groupBy (equating docInfoPackageName) . sortBy (comparing docInfoPackageName) $ pkgIdsHaveDocs notice verbosity $ show (length toBuild) ++ " package(s) to build" -- Try to build each of them, uploading the documentation and -- build reports along the way. We mark each package as having -- documentation in the cache even if the build fails because -- we don't want to keep continually trying to build a failing -- package! startTime <- getCurrentTime let go :: [DocInfo] -> IO () go [] = return () go (docInfo : toBuild') = do (mTgz, mRpt, logfile) <- buildPackage verbosity opts config docInfo case mTgz of Nothing -> mark_as_failed (docInfoPackage docInfo) Just _ -> return () case mRpt of Just _ | bo_dryRun opts -> return () Just report -> uploadResults verbosity config docInfo mTgz report logfile _ -> return () -- We don't check the runtime until we've actually tried -- to build a doc, so as to ensure we make progress. outOfTime <- case bo_runTime opts of Nothing -> return False Just d -> do currentTime <- getCurrentTime return $ (currentTime `diffUTCTime` startTime) > d if outOfTime then return () else go toBuild' go toBuild where shouldBuild :: DocInfo -> Bool shouldBuild docInfo = case docInfoHasDocs docInfo of DocsNotBuilt -> null pkgs || any (isSelectedPackage pkgid) pkgs _ -> False where pkgid = docInfoPackage docInfo -- do versionless matching if no version was given isSelectedPackage pkgid pkgid'@(PackageIdentifier _ (Version [] _)) = packageName pkgid == packageName pkgid' isSelectedPackage pkgid pkgid' = pkgid == pkgid' keepGoing :: IO () -> IO () keepGoing act | bo_keepGoing opts = Control.Exception.catch act showExceptionAsWarning | otherwise = act showExceptionAsWarning :: SomeException -> IO () showExceptionAsWarning e -- except for signals telling us to really stop | Just (ReceivedSignal {}) <- fromException e = throwIO e | Just UserInterrupt <- fromException e = throwIO e | otherwise = do warn verbosity (show e) notice verbosity "Abandoning this build attempt." verbosity = bo_verbosity opts updatePackageIndex = do update_ec <- cabal opts "update" [] Nothing unless (update_ec == ExitSuccess) $ die "Could not 'cabal update' from specified server" -- Builds a little memoised function that can tell us whether a -- particular package failed to build its documentation mkPackageFailed :: BuildOpts -> IO (PackageId -> IO Bool, PackageId -> IO (), IO ()) mkPackageFailed opts = do init_failed <- readFailedCache (bo_stateDir opts) cache_var <- newIORef init_failed let mark_as_failed pkg_id = atomicModifyIORef cache_var $ \already_failed -> (S.insert pkg_id already_failed, ()) has_failed pkg_id = liftM (pkg_id `S.member`) $ readIORef cache_var persist = readIORef cache_var >>= writeFailedCache (bo_stateDir opts) return (has_failed, mark_as_failed, persist) where readFailedCache :: FilePath -> IO (S.Set PackageId) readFailedCache cache_dir = do pkgstrs <- handleDoesNotExist (return []) $ liftM lines $ readFile (cache_dir </> "failed") case validatePackageIds pkgstrs of Left theError -> die theError Right pkgs -> return (S.fromList pkgs) writeFailedCache :: FilePath -> S.Set PackageId -> IO () writeFailedCache cache_dir pkgs = writeFile (cache_dir </> "failed") $ unlines $ map display $ S.toList pkgs -- | Build documentation and return @(Just tgz)@ for the built tgz file -- on success, or @Nothing@ otherwise. buildPackage :: Verbosity -> BuildOpts -> BuildConfig -> DocInfo -> IO (Maybe FilePath, Maybe FilePath, FilePath) buildPackage verbosity opts config docInfo = do let pkgid = docInfoPackage docInfo notice verbosity ("Building " ++ display pkgid) handleDoesNotExist (return ()) $ removeDirectoryRecursive $ installDirectory opts createDirectory $ installDirectory opts -- Create the local package db let packageDb = installDirectory opts </> "packages.db" -- TODO: use Distribution.Simple.Program.HcPkg ph <- runProcess "ghc-pkg" ["init", packageDb] Nothing Nothing Nothing Nothing Nothing init_ec <- waitForProcess ph unless (init_ec == ExitSuccess) $ die $ "Could not initialise the package db " ++ packageDb -- The documentation is installed within the stateDir because we -- set a prefix while installing let doc_root = installDirectory opts </> "haddocks" doc_dir_tmpl = doc_root </> "$pkgid-docs" doc_dir_pkg = doc_root </> display pkgid ++ "-docs" -- doc_dir_html = doc_dir </> "html" -- deps_doc_dir = doc_dir </> "deps" -- temp_doc_dir = doc_dir </> display (docInfoPackage docInfo) ++ "-docs" pkg_url = "/package" </> "$pkg-$version" pkg_flags = ["--enable-documentation", "--htmldir=" ++ doc_dir_tmpl, -- We only care about docs, so we want to build as -- quickly as possible, and hence turn -- optimisation off. Also explicitly pass -O0 as a -- GHC option, in case it overrides a .cabal -- setting or anything "--disable-optimization", "--ghc-option", "-O0", "--disable-library-for-ghci", -- We don't want packages installed in the user -- package.conf to affect things. In particular, -- we don't want doc building to fail because -- "packages are likely to be broken by the reinstalls" "--package-db=clear", "--package-db=global", "--package-db=" ++ packageDb, -- Always build the package, even when it's been built -- before. This lets us regenerate documentation when -- dependencies are updated. "--reinstall", -- We know where this documentation will -- eventually be hosted, bake that in. -- The wiki claims we shouldn't include the -- version in the hyperlinks so we don't have -- to rehaddock some package when the dependent -- packages get updated. However, this is NOT -- what the Hackage v1 did, so ignore that: "--haddock-html-location=" ++ pkg_url </> "docs", -- Link "Contents" to the package page: "--haddock-contents-location=" ++ pkg_url, -- Link to colourised source code: "--haddock-hyperlink-source", "--prefix=" ++ installDirectory opts, "--build-summary=" ++ installDirectory opts </> "reports" </> "$pkgid.report", "--report-planning-failure", -- We want both html documentation and hoogle database generated "--haddock-html", "--haddock-hoogle", -- For candidates we need to use the full URL, because -- otherwise cabal-install will not find the package. -- For regular packages however we need to use just the -- package name, otherwise cabal-install will not -- generate a report if docInfoIsCandidate docInfo then show (docInfoTarGzURI config docInfo) else display pkgid ] -- The installDirectory is purely temporary, while the resultsDirectory is -- more persistent. We will grab various outputs from the tmp dir and stash -- them for safe keeping (for later upload or manual inspection) in the -- results dir. let resultDir = resultsDirectory opts resultLogFile = resultDir </> display pkgid <.> "log" resultReportFile = resultDir </> display pkgid <.> "report" resultDocsTarball = resultDir </> (display pkgid ++ "-docs") <.> "tar.gz" buildLogHnd <- openFile resultLogFile WriteMode -- We ignore the result of calling @cabal install@ because -- @cabal install@ succeeds even if the documentation fails to build. void $ cabal opts "install" pkg_flags (Just buildLogHnd) -- Grab the report for the package we want. Stash it for safe keeping. report <- handleDoesNotExist (return Nothing) $ do renameFile (installDirectory opts </> "reports" </> display pkgid <.> "report") resultReportFile appendFile resultReportFile "\ndoc-builder: True" return (Just resultReportFile) docs_generated <- fmap and $ sequence [ doesDirectoryExist doc_dir_pkg, doesFileExist (doc_dir_pkg </> "doc-index.html"), doesFileExist (doc_dir_pkg </> display (docInfoPackageName docInfo) <.> "haddock")] docs <- if docs_generated then do when (bo_prune opts) (pruneHaddockFiles doc_dir_pkg) BS.writeFile resultDocsTarball =<< tarGzDirectory doc_dir_pkg return (Just resultDocsTarball) else return Nothing notice verbosity $ unlines [ "Build results for " ++ display pkgid ++ ":" , fromMaybe "no report" report , fromMaybe "no docs" docs , resultLogFile ] return (docs, report, resultLogFile) cabal :: BuildOpts -> String -> [String] -> Maybe Handle -> IO ExitCode cabal opts cmd args moutput = do let verbosity = bo_verbosity opts cabalConfigFile = bo_stateDir opts </> "cabal-config" verbosityArgs = if verbosity == silent then ["-v0"] else [] all_args = ("--config-file=" ++ cabalConfigFile) : cmd : verbosityArgs ++ args info verbosity $ unwords ("cabal":all_args) ph <- runProcess "cabal" all_args Nothing Nothing Nothing moutput moutput waitForProcess ph pruneHaddockFiles :: FilePath -> IO () pruneHaddockFiles dir = do -- Hackage doesn't support the haddock frames view, so remove it -- both visually (no frames link) and save space too. files <- getDirectoryContents dir sequence_ [ removeFile (dir </> file) | file <- files , unwantedFile file ] hackJsUtils where unwantedFile file | "frames.html" == file = True | "mini_" `isPrefixOf` file = True -- The .haddock file is haddock-version specific -- so it is not useful to make available for download | ".haddock" <- takeExtension file = True | otherwise = False -- The "Frames" link is added by the JS, just comment it out. hackJsUtils = do content <- readFile (dir </> "haddock-util.js") _ <- evaluate (length content) writeFile (dir </> "haddock-util.js") (munge content) where munge = unlines . map removeAddMenuItem . lines removeAddMenuItem l | (sp, l') <- span (==' ') l , "addMenuItem" `isPrefixOf` l' = sp ++ "//" ++ l' removeAddMenuItem l = l tarGzDirectory :: FilePath -> IO BS.ByteString tarGzDirectory dir = do res <- liftM (GZip.compress . Tar.write) $ Tar.pack containing_dir [nested_dir] -- This seq is extremely important! Tar.pack is lazy, scanning -- directories as entries are demanded. -- This interacts very badly with the renameDirectory stuff with -- which tarGzDirectory gets wrapped. BS.length res `seq` return res where (containing_dir, nested_dir) = splitFileName dir uploadResults :: Verbosity -> BuildConfig -> DocInfo -> Maybe FilePath -> FilePath -> FilePath -> IO () uploadResults verbosity config docInfo mdocsTarballFile buildReportFile buildLogFile = httpSession verbosity "hackage-build" version $ do -- Make sure we authenticate to Hackage setAuthorityGen (provideAuthInfo (bc_srcURI config) (Just (bc_username config, bc_password config))) case mdocsTarballFile of Nothing -> return () Just docsTarballFile -> putDocsTarball config docInfo docsTarballFile buildId <- postBuildReport config docInfo buildReportFile putBuildLog buildId buildLogFile putDocsTarball :: BuildConfig -> DocInfo -> FilePath -> HttpSession () putDocsTarball config docInfo docsTarballFile = requestPUTFile (docInfoDocsURI config docInfo) "application/x-tar" (Just "gzip") docsTarballFile type BuildReportId = URI postBuildReport :: BuildConfig -> DocInfo -> FilePath -> HttpSession BuildReportId postBuildReport config docInfo reportFile = do let uri = docInfoReports config docInfo body <- liftIO $ BS.readFile reportFile setAllowRedirects False (_, response) <- request Request { rqURI = uri, rqMethod = POST, rqHeaders = [Header HdrContentType ("text/plain"), Header HdrContentLength (show (BS.length body)), Header HdrAccept ("text/plain")], rqBody = body } case rspCode response of --TODO: fix server to not do give 303, 201 is more appropriate (3,0,3) | [Just buildId] <- [ do rel <- parseRelativeReference location return $ relativeTo rel uri | Header HdrLocation location <- rspHeaders response ] -> return buildId _ -> do checkStatus uri response fail "Unexpected response from server." putBuildLog :: BuildReportId -> FilePath -> HttpSession () putBuildLog reportId buildLogFile = do body <- liftIO $ BS.readFile buildLogFile let uri = reportId <//> "log" setAllowRedirects False (_, response) <- request Request { rqURI = uri, rqMethod = PUT, rqHeaders = [Header HdrContentType ("text/plain"), Header HdrContentLength (show (BS.length body)), Header HdrAccept ("text/plain")], rqBody = body } case rspCode response of --TODO: fix server to not to give 303, 201 is more appropriate (3,0,3) -> return () _ -> checkStatus uri response ------------------------- -- Command line handling ------------------------- data BuildFlags = BuildFlags { flagCacheDir :: Maybe FilePath, flagVerbosity :: Verbosity, flagRunTime :: Maybe NominalDiffTime, flagHelp :: Bool, flagForce :: Bool, flagContinuous :: Bool, flagKeepGoing :: Bool, flagDryRun :: Bool, flagInterval :: Maybe String, flagPrune :: Bool, flagUsername :: Maybe String, flagPassword :: Maybe String } emptyBuildFlags :: BuildFlags emptyBuildFlags = BuildFlags { flagCacheDir = Nothing , flagVerbosity = normal , flagRunTime = Nothing , flagHelp = False , flagForce = False , flagContinuous = False , flagKeepGoing = False , flagDryRun = False , flagInterval = Nothing , flagPrune = False , flagUsername = Nothing , flagPassword = Nothing } buildFlagDescrs :: [OptDescr (BuildFlags -> BuildFlags)] buildFlagDescrs = [ Option ['h'] ["help"] (NoArg (\opts -> opts { flagHelp = True })) "Show this help text" , Option ['s'] [] (NoArg (\opts -> opts { flagVerbosity = silent })) "Silent mode" , Option ['v'] [] (NoArg (\opts -> opts { flagVerbosity = moreVerbose (flagVerbosity opts) })) "Verbose mode (can be listed multiple times e.g. -vv)" , Option [] ["run-time"] (ReqArg (\mins opts -> case reads mins of [(mins', "")] -> opts { flagRunTime = Just (fromInteger mins' * 60) } _ -> error "Can't parse minutes") "MINS") "Limit the running time of the build client" , Option [] ["cache-dir"] (ReqArg (\dir opts -> opts { flagCacheDir = Just dir }) "DIR") "Where to put files during building" , Option [] ["continuous"] (NoArg (\opts -> opts { flagContinuous = True })) "Build continuously rather than just once" , Option [] ["keep-going"] (NoArg (\opts -> opts { flagKeepGoing = True })) "Keep going after errors" , Option [] ["dry-run"] (NoArg (\opts -> opts { flagDryRun = True })) "Don't record results or upload" , Option [] ["interval"] (ReqArg (\int opts -> opts { flagInterval = Just int }) "MIN") "Set the building interval in minutes (default 30)" , Option [] ["prune-haddock-files"] (NoArg (\opts -> opts { flagPrune = True })) "Remove unnecessary haddock files (frames, .haddock file)" , Option [] ["init-username"] (ReqArg (\uname opts -> opts { flagUsername = Just uname }) "USERNAME") "The Hackage user to run the build as (used with init)" , Option [] ["init-password"] (ReqArg (\passwd opts -> opts { flagPassword = Just passwd }) "PASSWORD") "The password of the Hackage user to run the build as (used with init)" ] validateOpts :: [String] -> IO (Mode, BuildOpts) validateOpts args = do let (flags0, args', errs) = getOpt Permute buildFlagDescrs args flags = accum flags0 emptyBuildFlags stateDir = fromMaybe "build-cache" (flagCacheDir flags) opts = BuildOpts { bo_verbosity = flagVerbosity flags, bo_runTime = flagRunTime flags, bo_stateDir = stateDir, bo_continuous = case (flagContinuous flags, flagInterval flags) of (True, Just i) -> Just (read i) (True, Nothing) -> Just 30 -- default interval (False, _) -> Nothing, bo_keepGoing = flagKeepGoing flags, bo_dryRun = flagDryRun flags, bo_prune = flagPrune flags, bo_username = flagUsername flags, bo_password = flagPassword flags } mode = case args' of _ | flagHelp flags -> Help [] | not (null errs) -> Help errs "init" : uriStr : auxUriStrs -> -- We don't actually want the URI at this point -- (see [Note: Show/Read URI]) case mapM validateHackageURI (uriStr : auxUriStrs) of Left theError -> Help [theError] Right (uri:auxUris) -> Init uri auxUris Right _ -> error "impossible" ["stats"] -> Stats "stats" : _ -> Help ["stats takes no arguments"] "build" : pkgstrs -> case validatePackageIds pkgstrs of Left theError -> Help [theError] Right pkgs -> Build pkgs cmd : _ -> Help ["Unrecognised command: " ++ show cmd] [] -> Help [] -- Ensure we store the absolute state_dir, because we might -- change the CWD later and we don't want the stateDir to be -- invalidated by such a change -- -- We have to ensure the directory exists before we do -- canonicalizePath, or otherwise we get an exception if it -- does not yet exist return (mode, opts) where accum flags = foldr (flip (.)) id flags {------------------------------------------------------------------------------ Auxiliary ------------------------------------------------------------------------------} handleDoesNotExist :: IO a -> IO a -> IO a handleDoesNotExist handler act = handleJust (\e -> if isDoesNotExistError e then Just () else Nothing) (\() -> handler) act
ocharles/hackage-server
BuildClient.hs
bsd-3-clause
35,871
0
25
11,270
7,999
4,128
3,871
646
12
{-# OPTIONS_JHC -fno-prelude -fffi #-} ----------------------------------------------------------------------------- -- | -- Module : Foreign.C.String -- Copyright : (c) The FFI task force 2001 -- License : BSD-style (see the file libraries/base/LICENSE) -- -- Maintainer : [email protected] -- Stability : provisional -- Portability : portable -- -- Utilities for primitive marshalling of C strings. -- -- The marshalling converts each Haskell character, representing a Unicode -- code point, to one or more bytes in a manner that, by default, is -- determined by the current locale. As a consequence, no guarantees -- can be made about the relative length of a Haskell string and its -- corresponding C string, and therefore all the marshalling routines -- include memory allocation. The translation between Unicode and the -- encoding of the current locale may be lossy. -- ----------------------------------------------------------------------------- module Foreign.C.String ( -- representation of strings in C -- * C strings CString, -- = Ptr CChar CStringLen, -- = (Ptr CChar, Int) -- ** Using a locale-dependent encoding -- | Currently these functions are identical to their @CAString@ counterparts; -- eventually they will use an encoding determined by the current locale. -- conversion of C strings into Haskell strings -- peekCString, -- :: CString -> IO String peekCStringLen, -- :: CStringLen -> IO String -- conversion of Haskell strings into C strings -- newCString, -- :: String -> IO CString newCStringLen, -- :: String -> IO CStringLen -- conversion of Haskell strings into C strings using temporary storage -- withCString, -- :: String -> (CString -> IO a) -> IO a withCStringLen, -- :: String -> (CStringLen -> IO a) -> IO a charIsRepresentable, -- :: Char -> IO Bool -- ** Using 8-bit characters -- | These variants of the above functions are for use with C libraries -- that are ignorant of Unicode. These functions should be used with -- care, as a loss of information can occur. castCharToCChar, -- :: Char -> CChar castCCharToChar, -- :: CChar -> Char peekCAString, -- :: CString -> IO String peekCAStringLen, -- :: CStringLen -> IO String newCAString, -- :: String -> IO CString newCAStringLen, -- :: String -> IO CStringLen withCAString, -- :: String -> (CString -> IO a) -> IO a withCAStringLen, -- :: String -> (CStringLen -> IO a) -> IO a -- * C wide strings -- | These variants of the above functions are for use with C libraries -- that encode Unicode using the C @wchar_t@ type in a system-dependent -- way. The only encodings supported are -- -- * UTF-32 (the C compiler defines @__STDC_ISO_10646__@), or -- -- * UTF-16 (as used on Windows systems). CWString, -- = Ptr CWchar CWStringLen, -- = (Ptr CWchar, Int) peekCWString, -- :: CWString -> IO String peekCWStringLen, -- :: CWStringLen -> IO String newCWString, -- :: String -> IO CWString newCWStringLen, -- :: String -> IO CWStringLen withCWString, -- :: String -> (CWString -> IO a) -> IO a withCWStringLen, -- :: String -> (CWStringLen -> IO a) -> IO a ) where import Jhc.Basics import Jhc.Monad import Jhc.Order import Jhc.List import Jhc.Num import Foreign.Marshal.Array import Foreign.C.Types import Foreign.Ptr import Foreign.Storable ----------------------------------------------------------------------------- -- Strings -- representation of strings in C -- ------------------------------ -- | A C string is a reference to an array of C characters terminated by NUL. type CString = Ptr CChar -- | A string with explicit length information in bytes instead of a -- terminating NUL (allowing NUL characters in the middle of the string). type CStringLen = (Ptr CChar, Int) -- exported functions -- ------------------ -- -- * the following routines apply the default conversion when converting the -- C-land character encoding into the Haskell-land character encoding -- | Marshal a NUL terminated C string into a Haskell string. -- peekCString :: CString -> IO String peekCString = peekCAString -- | Marshal a C string with explicit length into a Haskell string. -- peekCStringLen :: CStringLen -> IO String peekCStringLen = peekCAStringLen -- | Marshal a Haskell string into a NUL terminated C string. -- -- * the Haskell string may /not/ contain any NUL characters -- -- * new storage is allocated for the C string and must be -- explicitly freed using 'Foreign.Marshal.Alloc.free' or -- 'Foreign.Marshal.Alloc.finalizerFree'. -- newCString :: String -> IO CString newCString = newCAString -- | Marshal a Haskell string into a C string (ie, character array) with -- explicit length information. -- -- * new storage is allocated for the C string and must be -- explicitly freed using 'Foreign.Marshal.Alloc.free' or -- 'Foreign.Marshal.Alloc.finalizerFree'. -- newCStringLen :: String -> IO CStringLen newCStringLen = newCAStringLen -- | Marshal a Haskell string into a NUL terminated C string using temporary -- storage. -- -- * the Haskell string may /not/ contain any NUL characters -- -- * the memory is freed when the subcomputation terminates (either -- normally or via an exception), so the pointer to the temporary -- storage must /not/ be used after this. -- withCString :: String -> (CString -> IO a) -> IO a withCString = withCAString -- | Marshal a Haskell string into a NUL terminated C string using temporary -- storage. -- -- * the Haskell string may /not/ contain any NUL characters -- -- * the memory is freed when the subcomputation terminates (either -- normally or via an exception), so the pointer to the temporary -- storage must /not/ be used after this. -- withCStringLen :: String -> (CStringLen -> IO a) -> IO a withCStringLen = withCAStringLen -- | Determines whether a character can be accurately encoded in a 'CString'. -- Unrepresentable characters are converted to @\'?\'@. -- -- Currently only Latin-1 characters are representable. charIsRepresentable :: Char -> IO Bool charIsRepresentable c = return (ord c < 256) -- single byte characters -- ---------------------- -- -- ** NOTE: These routines don't handle conversions! ** -- | Convert a C byte, representing a Latin-1 character, to the corresponding -- Haskell character. --castCCharToChar :: CChar -> Char --castCCharToChar ch = chr (fromIntegral (fromIntegral ch :: Word8)) -- | Convert a Haskell character to a C character. -- This function is only safe on the first 256 characters. --castCharToCChar :: Char -> CChar --castCharToCChar ch = fromIntegral (ord ch) foreign import primitive "U2U" castCCharToChar :: CChar -> Char foreign import primitive "U2U" castCharToCChar :: Char -> CChar -- | Marshal a NUL terminated C string into a Haskell string. -- peekCAString :: CString -> IO String -- #ifndef __GLASGOW_HASKELL__ --peekCAString cp = do -- cs <- peekArray0 nUL cp -- return (cCharsToChars cs) -- #else peekCAString cp = do l <- lengthArray0 nUL cp if l <= 0 then return "" else loop "" (l-1) where loop s i = do xval <- peekElemOff cp i let val = castCCharToChar xval val `seq` if i <= 0 then return (val:s) else loop (val:s) (i-1) -- #endif -- | Marshal a C string with explicit length into a Haskell string. -- peekCAStringLen :: CStringLen -> IO String -- #ifndef __GLASGOW_HASKELL__ -- peekCAStringLen (cp, len) = do -- cs <- peekArray len cp -- return (cCharsToChars cs) -- #else peekCAStringLen (cp, len) | len <= 0 = return "" -- being (too?) nice. | otherwise = loop [] (len-1) where loop acc i = do xval <- peekElemOff cp i let val = castCCharToChar xval -- blow away the coercion ASAP. if (val `seq` (i == 0)) then return (val:acc) else loop (val:acc) (i-1) -- #endif -- | Marshal a Haskell string into a NUL terminated C string. -- -- * the Haskell string may /not/ contain any NUL characters -- -- * new storage is allocated for the C string and must be -- explicitly freed using 'Foreign.Marshal.Alloc.free' or -- 'Foreign.Marshal.Alloc.finalizerFree'. -- newCAString :: String -> IO CString -- #ifndef __GLASGOW_HASKELL__ -- newCAString = newArray0 nUL . charsToCChars -- #else newCAString str = do ptr <- mallocArray0 (length str) let go [] n = pokeElemOff ptr n nUL go (c:cs) n = do pokeElemOff ptr n (castCharToCChar c); go cs (n+1) go str 0 return ptr -- #endif -- | Marshal a Haskell string into a C string (ie, character array) with -- explicit length information. -- -- * new storage is allocated for the C string and must be -- explicitly freed using 'Foreign.Marshal.Alloc.free' or -- 'Foreign.Marshal.Alloc.finalizerFree'. -- newCAStringLen :: String -> IO CStringLen -- #ifndef __GLASGOW_HASKELL__ -- newCAStringLen str = do -- a <- newArray (charsToCChars str) -- return (pairLength str a) -- #else newCAStringLen str = do ptr <- mallocArray0 len let go [] n = n `seq` return () -- make it strict in n go (c:cs) n = do pokeElemOff ptr n (castCharToCChar c); go cs (n+1) go str 0 return (ptr, len) where len = length str -- #endif -- | Marshal a Haskell string into a NUL terminated C string using temporary -- storage. -- -- * the Haskell string may /not/ contain any NUL characters -- -- * the memory is freed when the subcomputation terminates (either -- normally or via an exception), so the pointer to the temporary -- storage must /not/ be used after this. -- withCAString :: String -> (CString -> IO a) -> IO a -- #ifndef __GLASGOW_HASKELL__ -- withCAString = withArray0 nUL . charsToCChars -- #else withCAString str f = allocaArray0 (length str) $ \ptr -> let go [] n = pokeElemOff ptr n nUL go (c:cs) n = do pokeElemOff ptr n (castCharToCChar c); go cs (n+1) in do go str 0 f ptr -- #endif -- | Marshal a Haskell string into a NUL terminated C string using temporary -- storage. -- -- * the Haskell string may /not/ contain any NUL characters -- -- * the memory is freed when the subcomputation terminates (either -- normally or via an exception), so the pointer to the temporary -- storage must /not/ be used after this. -- -- withCAStringLen :: String -> (CStringLen -> IO a) -> IO a -- #ifndef __GLASGOW_HASKELL__ -- withCAStringLen str act = withArray (charsToCChars str) $ act . pairLength str -- #else withCAStringLen str f = allocaArray len $ \ptr -> let go [] n = n `seq` return () -- make it strict in n go (c:cs) n = do pokeElemOff ptr n (castCharToCChar c); go cs (n+1) in do go str 0 f (ptr,len) where len = length str -- #endif -- auxiliary definitions -- ---------------------- -- C's end of string character -- nUL :: CChar nUL = 0 -- pair a C string with the length of the given Haskell string -- pairLength :: String -> a -> (a, Int) pairLength = flip (,) . length -- #ifndef __GLASGOW_HASKELL__ -- cast [CChar] to [Char] -- cCharsToChars :: [CChar] -> [Char] cCharsToChars xs = map castCCharToChar xs -- cast [Char] to [CChar] -- charsToCChars :: [Char] -> [CChar] charsToCChars xs = map castCharToCChar xs -- #endif ----------------------------------------------------------------------------- -- Wide strings -- representation of wide strings in C -- ----------------------------------- -- | A C wide string is a reference to an array of C wide characters -- terminated by NUL. type CWString = Ptr CWchar -- | A wide character string with explicit length information in bytes -- instead of a terminating NUL (allowing NUL characters in the middle -- of the string). type CWStringLen = (Ptr CWchar, Int) -- | Marshal a NUL terminated C wide string into a Haskell string. -- peekCWString :: CWString -> IO String peekCWString cp = do cs <- peekArray0 wNUL cp return (cWcharsToChars cs) -- | Marshal a C wide string with explicit length into a Haskell string. -- peekCWStringLen :: CWStringLen -> IO String peekCWStringLen (cp, len) = do cs <- peekArray len cp return (cWcharsToChars cs) -- | Marshal a Haskell string into a NUL terminated C wide string. -- -- * the Haskell string may /not/ contain any NUL characters -- -- * new storage is allocated for the C wide string and must -- be explicitly freed using 'Foreign.Marshal.Alloc.free' or -- 'Foreign.Marshal.Alloc.finalizerFree'. -- newCWString :: String -> IO CWString newCWString = newArray0 wNUL . charsToCWchars -- | Marshal a Haskell string into a C wide string (ie, wide character array) -- with explicit length information. -- -- * new storage is allocated for the C wide string and must -- be explicitly freed using 'Foreign.Marshal.Alloc.free' or -- 'Foreign.Marshal.Alloc.finalizerFree'. -- newCWStringLen :: String -> IO CWStringLen newCWStringLen str = do a <- newArray (charsToCWchars str) return (pairLength str a) -- | Marshal a Haskell string into a NUL terminated C wide string using -- temporary storage. -- -- * the Haskell string may /not/ contain any NUL characters -- -- * the memory is freed when the subcomputation terminates (either -- normally or via an exception), so the pointer to the temporary -- storage must /not/ be used after this. -- withCWString :: String -> (CWString -> IO a) -> IO a withCWString = withArray0 wNUL . charsToCWchars -- | Marshal a Haskell string into a NUL terminated C wide string using -- temporary storage. -- -- * the Haskell string may /not/ contain any NUL characters -- -- * the memory is freed when the subcomputation terminates (either -- normally or via an exception), so the pointer to the temporary -- storage must /not/ be used after this. -- withCWStringLen :: String -> (CWStringLen -> IO a) -> IO a withCWStringLen str act = withArray (charsToCWchars str) $ act . pairLength str -- auxiliary definitions -- ---------------------- wNUL :: CWchar wNUL = 0 cWcharsToChars :: [CWchar] -> [Char] charsToCWchars :: [Char] -> [CWchar] cWcharsToChars xs = map castCWcharToChar xs charsToCWchars xs = map castCharToCWchar xs -- These conversions only make sense if __STDC_ISO_10646__ is defined -- (meaning that wchar_t is ISO 10646, aka Unicode) --castCWcharToChar :: CWchar -> Char --castCWcharToChar ch = chr (fromIntegral ch ) --castCharToCWchar :: Char -> CWchar --castCharToCWchar ch = fromIntegral (ord ch) foreign import primitive "U2U" castCWcharToChar :: CWchar -> Char foreign import primitive "U2U" castCharToCWchar :: Char -> CWchar
m-alvarez/jhc
lib/jhc/Foreign/C/String.hs
mit
14,685
113
13
3,005
1,825
1,106
719
-1
-1
{-# LANGUAGE OverloadedStrings #-} {-# OPTIONS_HADDOCK show-extensions #-} -- | -- Module : Yi.Keymap.Vim.Ex.Commands.Undo -- License : GPL-2 -- Maintainer : [email protected] -- Stability : experimental -- Portability : portable module Yi.Keymap.Vim.Ex.Commands.Undo (parse) where import Yi.Buffer.Adjusted (redoB, undoB) import Yi.Keymap (Action (BufferA)) import Yi.Keymap.Vim.Common (EventString (Ev)) import Yi.Keymap.Vim.Ex.Commands.Common (pureExCommand) import Yi.Keymap.Vim.Ex.Types (ExCommand (cmdAction, cmdComplete, cmdShow)) parse :: EventString -> Maybe ExCommand parse (Ev s) | s `elem` ["u", "undo"] = Just pureExCommand { cmdAction = BufferA undoB , cmdShow = "undo" , cmdComplete = return ["undo"] } parse (Ev s) | s `elem` ["redo"] = Just pureExCommand { cmdAction = BufferA redoB , cmdShow = "redo" , cmdComplete = return ["redo"] } parse _ = Nothing
siddhanathan/yi
yi-keymap-vim/src/Yi/Keymap/Vim/Ex/Commands/Undo.hs
gpl-2.0
1,176
0
10
413
247
152
95
20
1
{-@ LIQUID "--maxparams=3" @-} {-# OPTIONS_GHC -cpp -fglasgow-exts #-} -- | -- Module : Data.ByteString.Lazy.Internal -- License : BSD-style -- Maintainer : [email protected], [email protected] -- Stability : experimental -- Portability : portable -- -- A module containing semi-public 'ByteString' internals. This exposes -- the 'ByteString' representation and low level construction functions. -- Modules which extend the 'ByteString' system will need to use this module -- while ideally most users will be able to make do with the public interface -- modules. -- module Data.ByteString.Lazy.Internal ( liquidCanary, -- * The lazy @ByteString@ type and representation ByteString(..), -- instances: Eq, Ord, Show, Read, Data, Typeable chunk, foldrChunks, foldlChunks, -- * Data type invariant and abstraction function invariant, checkInvariant, -- * Chunk allocation sizes defaultChunkSize, smallChunkSize, chunkOverhead ) where import qualified Data.ByteString.Internal as S -- LIQUID import Language.Haskell.Liquid.Prelude (liquidError) -- import qualified Data.ByteString.Internal -- import Foreign.ForeignPtr (ForeignPtr) -- import Data.Word (Word, Word8, Word16, Word32, Word64) -- import Foreign.Ptr (Ptr) -- import qualified Foreign.C.String import Foreign.Storable (sizeOf) #if defined(__GLASGOW_HASKELL__) import Data.Generics (Data(..), Typeable(..)) #endif -- | A space-efficient representation of a Word8 vector, supporting many -- efficient operations. A 'ByteString' contains 8-bit characters only. -- -- Instances of Eq, Ord, Read, Show, Data, Typeable -- data ByteString = Empty | Chunk {-# UNPACK #-} !S.ByteString ByteString deriving (Show) -- LIQUID deriving (Show, Read -- LIQUID #if defined(__GLASGOW_HASKELL__) -- LIQUID ,Data, Typeable -- LIQUID #endif -- LIQUID ) {-@ data ByteString [lbLength] = Empty | Chunk (b :: ByteStringNE) (cs :: ByteString) @-} {-@ measure lbLength :: ByteString -> Int lbLength (Empty) = 0 lbLength (Chunk b bs) = (bLength b) + (lbLength bs) @-} {-@ measure lbLengths :: [ByteString] -> Int lbLengths ([]) = 0 lbLengths (x:xs) = (lbLength x) + (lbLengths xs) @-} {-@ invariant {v:ByteString | (lbLength v) >= 0} @-} {-@ invariant {v:[ByteString] | (lbLengths v) >= 0} @-} {-@ type LByteStringSplit B = {v:[ByteString] | ((lbLengths v) + (len v) - 1) = (lbLength B) } @-} {-@ type LByteStringPair B = (ByteString, ByteString)<{\x1 x2 -> (lbLength x1) + (lbLength x2) = (lbLength B)}> @-} {-@ predicate LBValid B N = ((N >= 0) && (N < (lbLength B))) @-} {-@ type LByteStringN N = {v:ByteString | (lbLength v) = N} @-} {-@ type LByteStringNE = {v:ByteString | (lbLength v) > 0} @-} {-@ type LByteStringSZ B = {v:ByteString | (lbLength v) = (lbLength B)} @-} {-@ type LByteStringLE B = {v:ByteString | (lbLength v) <= (lbLength B)} @-} ------------------------------------------------------------------------ {- liquidCanary :: x:Int -> {v: Int | v > x} @-} liquidCanary :: Int -> Int liquidCanary x = x - 1 -- | The data type invariant: -- Every ByteString is either 'Empty' or consists of non-null 'S.ByteString's. -- All functions must preserve this, and the QC properties must check this. -- -- LIQUID RENAME: rename `invariant` to `invt` to avoid name clash! {-@ invt :: ByteString -> {v: Bool | (Prop v)} @-} invt :: ByteString -> Bool invt Empty = True invt (Chunk (S.PS _ _ len) cs) = len > 0 && invt cs invariant = invt -- | In a form that checks the invariant lazily. {-@ checkInvariant :: ByteString -> ByteString @-} checkInvariant :: ByteString -> ByteString checkInvariant Empty = Empty checkInvariant (Chunk c@(S.PS _ _ len) cs) | len > 0 = Chunk c (checkInvariant cs) | otherwise = liquidError $ "Data.ByteString.Lazy: invariant violation:" ++ show (Chunk c cs) ------------------------------------------------------------------------ -- | Smart constructor for 'Chunk'. Guarantees the data type invariant. {-@ chunk :: b:S.ByteString -> bs:ByteString -> {v:ByteString | (lbLength v) = ((bLength b) + (lbLength bs))} @-} chunk :: S.ByteString -> ByteString -> ByteString chunk c@(S.PS _ _ len) cs | len == 0 = cs | otherwise = Chunk c cs {-# INLINE chunk #-} -- | Consume the chunks of a lazy ByteString with a natural right fold. {-@ foldrChunks :: forall <p :: ByteString -> a -> Prop>. (bs:ByteString -> b:ByteStringNE -> a<p bs> -> a<p (Chunk b bs)>) -> a<p Empty> -> b:ByteString -> a<p b> @-} --LIQUID GHOST added parameter to `f` for abstract refinement foldrChunks :: (ByteString -> S.ByteString -> a -> a) -> a -> ByteString -> a foldrChunks f z = go where go Empty = z go (Chunk c cs) = f cs c (go cs) {-# INLINE foldrChunks #-} -- | Consume the chunks of a lazy ByteString with a strict, tail-recursive, -- accumulating left fold. {-@ foldlChunks :: (a -> ByteStringNE -> a) -> a -> ByteString -> a @-} foldlChunks :: (a -> S.ByteString -> a) -> a -> ByteString -> a foldlChunks f z = go z where go a _ | a `seq` False = undefined go a Empty = a go a (Chunk c cs) = go (f a c) cs {-# INLINE foldlChunks #-} ------------------------------------------------------------------------ -- The representation uses lists of packed chunks. When we have to convert from -- a lazy list to the chunked representation, then by default we use this -- chunk size. Some functions give you more control over the chunk size. -- -- Measurements here: -- http://www.cse.unsw.edu.au/~dons/tmp/chunksize_v_cache.png -- -- indicate that a value around 0.5 to 1 x your L2 cache is best. -- The following value assumes people have something greater than 128k, -- and need to share the cache with other programs. -- | Currently set to 32k, less the memory management overhead {-@ defaultChunkSize :: {v:Nat | v = 32752} @-} defaultChunkSize :: Int defaultChunkSize = {-LIUQID MULTIPLY 32 * k -} 32768 - chunkOverhead where k = 1024 -- | Currently set to 4k, less the memory management overhead {-@ smallChunkSize :: {v:Nat | v = 4080} @-} smallChunkSize :: Int smallChunkSize = {-LIQUID MULTIPLY 4 * k -} 4096 - chunkOverhead where k = 1024 -- | The memory management overhead. Currently this is tuned for GHC only. {-@ chunkOverhead :: {v:Nat | v = 16} @-} chunkOverhead :: Int chunkOverhead = 2 * sizeOf (undefined :: Int)
mightymoose/liquidhaskell
benchmarks/bytestring-0.9.2.1/Data/ByteString/Lazy/Internal.hs
bsd-3-clause
6,807
0
11
1,623
710
422
288
52
3
{-# LANGUAGE CPP #-} -- | Compatibility layer for "Control.Monad.Fail" module Distribution.Compat.MonadFail ( MonadFail(fail) ) where #if __GLASGOW_HASKELL__ >= 800 -- provided by base-4.9.0.0 and later import Control.Monad.Fail (MonadFail(fail)) #else -- the following code corresponds to -- http://hackage.haskell.org/package/fail-4.9.0.0 import qualified Prelude as P import Distribution.Compat.Prelude hiding (fail) import Text.ParserCombinators.ReadP import Text.ParserCombinators.ReadPrec class Monad m => MonadFail m where fail :: String -> m a -- instances provided by base-4.9 instance MonadFail Maybe where fail _ = Nothing instance MonadFail [] where fail _ = [] instance MonadFail P.IO where fail = P.fail instance MonadFail ReadPrec where fail = P.fail -- = P (\_ -> fail s) instance MonadFail ReadP where fail = P.fail #endif
sopvop/cabal
Cabal/Distribution/Compat/MonadFail.hs
bsd-3-clause
873
0
6
147
37
27
10
21
0
module Main where import Control.Concurrent main = do m <- newEmptyMVar sync <- newEmptyMVar let f = readMVar m t1 <- forkIO (f >> error "FAILURE") t2 <- forkIO (f >> putMVar sync ()) killThread t1 putMVar m (0 :: Int) readMVar sync
forked-upstream-packages-for-ghcjs/ghc
testsuite/tests/concurrent/should_run/readMVar2.hs
bsd-3-clause
267
0
12
77
110
51
59
11
1
module CommandLine where import Control.Monad import Data.List import Expense import System.Console.GetOpt import System.Environment import System.Exit import System.IO data Options = Options { input :: IO String , output :: String -> IO () , logF :: LogLevel -> String -> IO () , cmds :: [Expenses -> IO Expenses] } data LogLevel = LogInfo | LogWarn | LogError deriving (Eq, Ord) defaultOptions :: Options defaultOptions = Options { input = getContents , output = putStr , logF = outF LogWarn , cmds = [] } updateCmds :: Options -> (Expenses -> IO Expenses) -> Options updateCmds opts cmd = opts { cmds = cmds opts ++ [cmd] } options :: [OptDescr (Options -> IO Options)] options = [ Option "v" ["verbose"] (NoArg readVerbose ) "Also output status" , Option "q" ["quiet"] (NoArg readQuiet ) "Only output errors" , Option "o" ["output"] (ReqArg readOutput "FILE") "Output file [stdout]" , Option "V" ["version"] (NoArg readVersion ) "Show version info" , Option "h" ["help"] (NoArg readHelp ) "Show help" , Option "d" ["display"] (NoArg readDisplay ) "Display all expenses" , Option "p" ["person"] (ReqArg readPerson "NAME") "Filter by person name" , Option "t" ["tag"] (ReqArg readTag "TAG" ) "Filter by tag" , Option "s" ["shop"] (ReqArg readShop "SHOP") "Filter by shop name" , Option "T" ["total"] (NoArg readTotal ) "Show the total amount" ] where readVerbose opts = return opts { logF = outF LogInfo } readQuiet opts = return opts { logF = outF LogError } readDisplay opts = return $ updateCmds opts displayExpenses readPerson arg opts = return $ updateCmds opts $ selectPerson arg readTag arg opts = return $ updateCmds opts $ selectTag arg readShop arg opts = return $ updateCmds opts $ selectShop arg readTotal opts = return $ updateCmds opts displayTotal readOutput arg opts = return $ if arg == "-" then opts { output = output defaultOptions } else opts { output = writeFile arg } readVersion _ = do progName >>= \pname -> putStr $ versionInfo pname exitSuccess readHelp _ = do pname <- progName putStrLn $ versionInfo pname putStr $ usageInfo (usageHeader pname) options exitSuccess outF :: LogLevel -> LogLevel -> String -> IO () outF minLevel level msg = unless (minLevel > level) $ hPutStrLn stderr $ (case level of LogInfo -> "[LOG] " LogWarn -> "[WRN] " LogError -> "[ERR] ") ++ msg readFiles :: [FilePath] -> IO String readFiles = foldM (\a v -> readFile v >>= (\d -> return $ a ++ d)) [] parseCmdLine :: [String] -> IO Options parseCmdLine args = case getOpt Permute options args of ([], [], []) -> return defaultOptions (os, [], []) -> foldOpts os (os, ns, []) -> foldOpts os >>= (\o -> return o { input = readFiles ns }) (_, _, es) -> do hPutStrLn stderr $ if length es > 1 then "Errors: " else "Error: " mapM_ (hPutStr stderr . (" " ++)) es hPutStrLn stderr "" pname <- progName hPutStr stderr $ usageInfo (usageHeader pname) options exitFailure where foldOpts = foldl (>>=) (return defaultOptions) progName :: IO String progName = do pname <- getProgName return $ if "pet" `isInfixOf` pname then pname else "pet" versionInfo :: String -> String versionInfo name = unlines [ name ++ " - Personal Expense Tracker - version 0.2" , "Copyright (c) 2012-2013 - Fred Morcos <[email protected]>" , "https://github.com/fredmorcos/pet.git" ] usageHeader :: String -> String usageHeader name = "Usage: " ++ name ++ " [ARGUMENTS] [FILES]"
fredmorcos/attic
projects/pet/archive/pet_haskell_args/CommandLine.hs
isc
4,249
0
14
1,467
1,232
640
592
80
5
{-# LANGUAGE FlexibleInstances #-} module Club.Json ( ) where import Club.Model import Data.Aeson.Compat instance ToJSON Club
matsumonkie/tennis
api/src/Club/Json.hs
mit
129
0
5
19
28
17
11
5
0
{-# LANGUAGE OverloadedStrings #-} module Config where import Control.Monad.Except import Control.Monad.Logger import Control.Monad.Reader import Control.Monad.Trans.Maybe import qualified Data.ByteString.Char8 as BS import Data.Monoid ((<>)) import Network.Wai import Network.Wai.Middleware.RequestLogger import Servant import System.Environment (lookupEnv) import Database.Persist.Postgresql data Config = Config { getPool :: ConnectionPool , getEnv :: Environment } type AppM = ReaderT Config (ExceptT ServantErr IO) data Environment = Development | Test | Production deriving (Eq, Show, Read) defaultConfig :: Config defaultConfig = Config { getPool = undefined , getEnv = Development } setLogger :: Environment -> Middleware setLogger Test = id setLogger Development = logStdoutDev setLogger Production = logStdout makePool :: Environment -> IO ConnectionPool makePool Test = runNoLoggingT $ createPostgresqlPool (connStr Test) (envPool Test) makePool Development = runStdoutLoggingT $ createPostgresqlPool (connStr Development) (envPool Development) makePool Production = do pool <- runMaybeT $ do let keys = fmap BS.pack [ "host=" , "port=" , "user=" , "password=" , "dbname=" ] envs = [ "PGHOST" , "PGPORT" , "PGUSER" , "PGPASS" , "PGDATABASE" ] prodStr <- mconcat . zipWith (<>) keys . fmap BS.pack <$> traverse (MaybeT . lookupEnv) envs runStdoutLoggingT $ createPostgresqlPool prodStr (envPool Production) case pool of Nothing -> error "Database Configuration not present in environment." Just a -> return a envPool :: Environment -> Int envPool Test = 1 envPool Development = 1 envPool Production = 8 connStr :: Environment -> ConnectionString connStr _ = "host=localhost dbname=quicklift user=test password=test port=5432"
parsonsmatt/QuickLift
src/Config.hs
mit
2,244
0
15
740
486
267
219
60
2
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NoMonomorphismRestriction #-} {-# LANGUAGE FlexibleContexts #-} module Text.Atomos.Atom.Import where import Control.Applicative ((<*), (*>), (<$>), (<$)) import qualified Control.Applicative as A import Text.Atomos.Types import Text.HTML.TagSoup import Text.Parsec import Data.List import qualified Data.List.NonEmpty as N import Data.Time import Data.Char import qualified Data.ByteString.Base64 as Base64 import qualified Data.Text.Encoding as T import qualified Data.ByteString as S import qualified Data.ByteString.Char8 as S8 type TagParser a = Parsec [Tag S.ByteString] ParseMode a raw :: (Tag S.ByteString -> Maybe a) -> TagParser a raw f = tokenPrim show next test where test t = f t next pos _ _ = incSourceColumn pos 1 rawBool :: (Tag S.ByteString -> Bool) -> TagParser (Tag S.ByteString) rawBool f = raw $ \t -> if f t then Just t else Nothing rawBool_ :: (Tag S.ByteString -> Bool) -> TagParser () rawBool_ f = raw $ \t -> if f t then Just () else Nothing tagOpen :: TagParser (Tag S.ByteString) tagOpen = rawBool isTagOpen tagOpenName :: S.ByteString -> TagParser [Attribute S.ByteString] tagOpenName n = raw $ \tag -> case tag of TagOpen t a | t == n -> Just a _ -> Nothing tagOpenName_ :: S.ByteString -> TagParser () tagOpenName_ n = rawBool_ $ isTagOpenName n tagText_ :: TagParser () tagText_ = () <$ tagText tagText :: TagParser S.ByteString tagText = raw $ \tag -> case tag of TagText t -> Just t _ -> Nothing skipText :: TagParser () skipText = optional tagText tagCloseName :: S.ByteString -> TagParser () tagCloseName n = rawBool_ (isTagCloseName n) <* many tagText anyTag_ :: TagParser () anyTag_ = rawBool_ (const True) unknownTag :: TagParser () unknownTag = do TagOpen o _ <- tagOpen loop o where loop o = tagCloseName o <|> (anyTag_ >> loop o) inTagText :: S.ByteString -> TagParser S.ByteString inTagText t = tagOpenName_ t *> tagText <* tagCloseName t newtype IV a = IV { getIV :: Maybe a } newIV :: IV a newIV = IV Nothing setIV :: String -> IV a -> a -> TagParser (IV a) setIV _ (IV Nothing) a = return . IV $ Just a setIV f iv _ = getState >>= \case Strict -> fail $ "Attempt to set to an IV twice: " ++ f Fuzzy{} -> return iv tagTextSetIV :: S.ByteString -> IV S.ByteString -> TagParser (IV S.ByteString) tagTextSetIV n iv = inTagText n >>= \v' -> setIV (S8.unpack n) iv v' person :: S.ByteString -> TagParser Person person t = tagOpenName_ t >> skipText >> loop newIV newIV newIV where loop n u e = choice [ tagTextSetIV "name" n >>= \n' -> loop n' u e , tagTextSetIV "uri" u >>= \u' -> loop n u' e , tagTextSetIV "email" e >>= \e' -> loop n u e' , do tagCloseName t name <- case getIV n of Nothing -> getState >>= \case Strict -> fail "person without name." Fuzzy{defaultPerson} -> return $ personName defaultPerson Just nm -> return $ T.decodeUtf8 nm return $ Person name (getIV u) (getIV e) , unknownTag >> loop n u e ] author :: TagParser Person author = person "author" category :: TagParser Category category = do attrs <- tagOpenName "category" optional tagText_ tagCloseName "category" term <- case lookup "term" attrs of Nothing -> getState >>= \case Strict -> fail "category without term." Fuzzy{defaultTerm} -> return defaultTerm Just c -> return c return $ Category term (lookup "scheme" attrs) (T.decodeUtf8 <$> lookup "label" attrs) contributor :: TagParser Person contributor = person "contributor" generator :: TagParser Generator generator = do attrs <- tagOpenName "generator" gtxt <- tagText tagCloseName "generator" return $ Generator (lookup "uri" attrs) (lookup "version" attrs) gtxt icon :: TagParser URI icon = inTagText "icon" ident :: TagParser URI ident = inTagText "id" link :: TagParser Link link = do attrs <- tagOpenName "link" optional tagText_ tagCloseName "link" href <- case lookup "href" attrs of Nothing -> getState >>= \case Strict -> fail "link without href." Fuzzy{defaultHref} -> return defaultHref Just h -> return h return $ Link (maybe "alternate" id $ lookup "rel" attrs) $ Link_ href (lookup "type" attrs) (lookup "hreflang" attrs) (T.decodeUtf8 <$> lookup "title" attrs) (lookup "length" attrs) logo :: TagParser URI logo = inTagText "logo" published :: TagParser ZonedTime published = inTagText "published" >>= \d -> getState >>= \case Strict -> parseRfc3339Date d Fuzzy{defaultTime} -> parseRfc3339Date d <|> return defaultTime textElement :: S.ByteString -> TagParser TextElement textElement t = do attrs <- tagOpenName t let typ = maybe "text" id $ lookup "type" attrs case typ of "text" -> PlainText <$> plain "html" -> HtmlText <$> plain "xhtml" -> xml e -> getState >>= \case Strict -> fail $ "unknown text element type: " ++ show e Fuzzy{} -> case (isXmlMimeType e, e == "text/html") of (True, _) -> xml (False, True) -> HtmlText <$> plain (False, False) -> HtmlText <$> plain where plain = T.decodeUtf8 <$> tagText <* tagCloseName t xml = do ts <- many $ rawBool (not . isTagCloseName t) tagCloseName t return $ XHtmlText ts rights :: TagParser TextElement rights = textElement "rights" subtitle :: TagParser TextElement subtitle = textElement "subtitle" summary :: TagParser TextElement summary = textElement "summary" parseRfc3339Date :: (Monad m, A.Alternative m) => S.ByteString -> m ZonedTime parseRfc3339Date s = p "%FT%TZ" A.<|> p "%FT%T%z" A.<|> p "%FT%T%Q%z" A.<|> p "%FT%T%QZ" A.<|> fail "Date format incorrect." where p f = parseTimeM False defaultTimeLocale f (S8.unpack s) title :: TagParser TextElement title = textElement "title" updated :: TagParser ZonedTime updated = inTagText "updated" >>= \d -> getState >>= \case Strict -> parseRfc3339Date d Fuzzy{defaultTime} -> parseRfc3339Date d <|> return defaultTime content :: TagParser (Maybe TextElement -> Maybe Content) content = do attrs <- tagOpenName "content" let typ = maybe "text" id $ lookup "type" attrs case lookup "src" attrs of Just src -> do tagCloseName "content" return $ fmap (ContentOutOfLine . OutOfLine (Just typ) src) Nothing -> do r <- case S8.map toLower typ of t | isPlain t -> do b <- tagText return $ \s -> Just . ContentInlinePlain $ InlinePlain (Just typ) (T.decodeUtf8 b) s | isXml t -> do ts <- many $ rawBool (not . isTagCloseName "content") return $ \s -> Just . ContentInlineXml $ InlineXml (Just typ) ts s | otherwise -> Base64.decode <$> tagText >>= \case Right b -> return $ fmap (ContentInlineBase64 . InlineBase64 (Just typ) b) Left e -> getState >>= \case Strict -> fail e Fuzzy{base64DecodeFailed} -> return base64DecodeFailed tagCloseName "content" return r where isPlain s = s == "text" || s == "html" || isPlainMimeType s isXml s = s == "xhtml" || isXmlMimeType s isPlainMimeType :: S.ByteString -> Bool isPlainMimeType s = "text/" `S8.isPrefixOf` s && s /= "text/xml" isXmlMimeType :: S.ByteString -> Bool isXmlMimeType s = "+xml" `S8.isSuffixOf` s || "/xml" `S8.isSuffixOf` s data EntryState = EntryState { esAuthor :: [Person] -> [Person] , esCategory :: [Category] -> [Category] , esContent :: IV (Maybe TextElement -> Maybe Content) , esSummary :: IV TextElement , esContributor :: [Person] -> [Person] , esId :: IV URI , esLinks :: [Link] -> [Link] , esPublished :: IV ZonedTime , esRights :: IV TextElement , esSource :: IV Feed , esTitle :: IV TextElement , esUpdated :: IV ZonedTime } completeId :: Maybe URI -> TagParser URI completeId (Just i) = return i completeId Nothing = getState >>= \mode -> case mode of Strict -> fail "id element required" Fuzzy{} -> let idnt = defaultInitialId mode in idnt <$ modifyState (\s -> s { defaultInitialId = updateId mode idnt } ) entry :: TagParser (Entry []) entry = tagOpenName "entry" >> skipText >> loop (EntryState id id newIV newIV id newIV id newIV newIV newIV newIV newIV) where loop es = choice [ author >>= \a -> loop es { esAuthor = (esAuthor es . (a:)) } , category >>= \c -> loop es { esCategory = (esCategory es . (c:)) } , contributor >>= \c -> loop es { esContributor = (esContributor es . (c:)) } , link >>= \l -> loop es { esLinks = (esLinks es . (l:)) } , content >>= setIV "content" (esContent es) >>= \c -> loop es { esContent = c } , summary >>= setIV "summary" (esSummary es) >>= \s -> loop es { esSummary = s } , ident >>= setIV "id" (esId es) >>= \i -> loop es { esId = i } , published >>= setIV "published" (esPublished es) >>= \p -> loop es { esPublished = p } , rights >>= setIV "rights" (esRights es) >>= \r -> loop es { esRights = r } , source >>= setIV "source" (esSource es) >>= \s -> loop es { esSource = s } , title >>= setIV "title" (esTitle es) >>= \t -> loop es { esTitle = t } , updated >>= setIV "updated" (esUpdated es) >>= \u -> loop es { esUpdated = u } , tagCloseName "entry" >> result es , unknownTag >> loop es ] result es = do mode <- getState let fuz f = case mode of Strict -> Nothing Fuzzy{} -> Just $ f mode (contf, links) <- case getIV (esContent es) of Nothing -> case partition (("alternate" ==) . linkRel) (esLinks es []) of ([], ls) -> case mode of Strict -> fail "alternate link element required" Fuzzy{defaultAlternate} -> return (Just . Right . NoContent (N.fromList [defaultAlternate]), ls) (as, ls) -> return (Just . Right . NoContent (N.fromList $ map linkAttrs as), ls) Just cf -> return (fmap Left . cf, esLinks es []) cnts <- maybe (fail "summary element required") return $ contf $ (getIV $ esSummary es) A.<|> fuz defaultSummary idnt <- completeId $ getIV (esId es) titl <- maybe (fail "title element required") return $ getIV (esTitle es) A.<|> fuz defaultTitle upd <- maybe (fail "updated element required") return $ getIV (esUpdated es) A.<|> fuz defaultTime return $ Entry (esAuthor es []) (esCategory es []) cnts (esContributor es []) idnt links (getIV $ esPublished es) (getIV $ esRights es) (getIV $ esSource es) titl upd data FeedState = FeedState { fsCategories :: [Category] -> [Category] , fsContributors :: [Person] -> [Person] , fsGenerator :: IV Generator , fsIcon :: IV URI , fsId :: IV URI , fsLinks :: [Link] -> [Link] , fsLogo :: IV URI , fsRights :: IV TextElement , fsSubtitle :: IV TextElement , fsTitle :: IV TextElement , fsUpdated :: IV ZonedTime , fsAuthors :: [Person] -> [Person] , fsEntries :: [Entry []] -> [Entry []] } feedLike :: S.ByteString -> TagParser Feed feedLike n = tagOpenName_ n >> skipText >> loop (FeedState id id newIV newIV newIV id newIV newIV newIV newIV newIV id id) where loop fs = choice [ category >>= \c -> loop fs { fsCategories = (fsCategories fs . (c:)) } , contributor >>= \c -> loop fs { fsContributors = (fsContributors fs . (c:)) } , link >>= \l -> loop fs { fsLinks = (fsLinks fs . (l:)) } , author >>= \a -> loop fs { fsAuthors = (fsAuthors fs . (a:)) } , entry >>= \e -> loop fs { fsEntries = (fsEntries fs . (e:)) } , generator >>= setIV "generator" (fsGenerator fs) >>= \g -> loop fs { fsGenerator = g } , icon >>= setIV "icon" (fsIcon fs) >>= \i -> loop fs { fsIcon = i } , ident >>= setIV "id" (fsId fs) >>= \i -> loop fs { fsId = i } , logo >>= setIV "logo" (fsLogo fs) >>= \l -> loop fs { fsLogo = l } , rights >>= setIV "rights" (fsRights fs) >>= \r -> loop fs { fsRights = r } , subtitle >>= setIV "subtitle" (fsSubtitle fs) >>= \s -> loop fs { fsSubtitle = s } , title >>= setIV "title" (fsTitle fs) >>= \t -> loop fs { fsTitle = t } , updated >>= setIV "updated" (fsUpdated fs) >>= \u -> loop fs { fsUpdated = u } , tagCloseName n >> result fs , unknownTag >> loop fs ] result fs = do mode <- getState let fuz f = case mode of Strict -> Nothing Fuzzy{} -> Just $ f mode aes <- case N.nonEmpty $ fsAuthors fs [] of Nothing -> case sequence $ map requireEntryAuthor (fsEntries fs []) of Nothing -> case mode of Strict -> fail "all entry require author element." Fuzzy{} -> return . Right $ map (requireEntryAuthorDef (defaultPerson mode)) (fsEntries fs []) Just es -> return (Right es) Just as -> return $ Left (as, fsEntries fs []) (self, other) <- case partition (("self" ==) . linkRel) (fsLinks fs []) of (s:_, o) -> return (Just $ linkAttrs s, o) ([], o) -> return (Nothing, o) idnt <- completeId $ getIV (fsId fs) titl <- maybe (fail "title element required.") return $ getIV (fsTitle fs) A.<|> fuz defaultTitle upd <- maybe (fail "updated element required.") return $ getIV (fsUpdated fs) A.<|> fuz defaultTime return $ Feed (fsCategories fs []) (fsContributors fs []) (getIV $ fsGenerator fs) (getIV $ fsIcon fs) idnt self other (getIV $ fsLogo fs) (getIV $ fsRights fs) (getIV $ fsSubtitle fs) titl upd aes feed, source :: TagParser Feed feed = feedLike "feed" source = feedLike "source" requireEntryAuthor :: Entry [] -> Maybe (Entry N.NonEmpty) requireEntryAuthor e = (\a -> e { entryAuthors = a } ) <$> N.nonEmpty (entryAuthors e) requireEntryAuthorDef :: Person -> Entry [] -> Entry N.NonEmpty requireEntryAuthorDef p e = case N.nonEmpty (entryAuthors e) of Nothing -> e { entryAuthors = p N.:| [] } Just p' -> e { entryAuthors = p' } atomP :: TagParser Feed atomP = many (rawBool (not . isTagOpenName "feed")) >> feed data ParseMode = Strict | Fuzzy { defaultPerson :: Person , defaultTerm :: S8.ByteString , defaultHref :: URI , defaultTime :: ZonedTime , base64DecodeFailed :: Maybe TextElement -> Maybe Content , defaultAlternate :: Link_ , defaultSummary :: TextElement , defaultTitle :: TextElement , defaultInitialId :: URI , updateId :: URI -> URI } defaultFuzzy :: ParseMode defaultFuzzy = Fuzzy { defaultPerson = Person "unknown" Nothing Nothing , defaultTerm = "unknown" , defaultHref = "http://example.com" , defaultTime = ZonedTime (LocalTime (ModifiedJulianDay 0) (TimeOfDay 0 0 0)) utc , base64DecodeFailed = \s -> Just . ContentInlinePlain $ InlinePlain Nothing "base64 decode failed" s , defaultAlternate = Link_ "http://example.com" Nothing Nothing Nothing Nothing , defaultSummary = PlainText "no summary" , defaultTitle = PlainText "no title" , defaultInitialId = "no-id-0" , updateId = \i -> let i' = S8.pack . show . succ . maybe 0 fst $ S8.readInt (S.drop 6 i) in "no-id-" `S.append` i' } parseAtom :: ParseMode -> SourceName -> S8.ByteString -> Either ParseError Feed parseAtom m s f = runParser atomP m s (parseTags f)
philopon/atomos
Text/Atomos/Atom/Import.hs
mit
16,795
0
27
5,173
5,837
2,963
2,874
338
7
module CFDI.Types.ProductOrService where import CFDI.Chainable import CFDI.Types.Type import Control.Error.Safe (justErr) import Data.Set (fromList, member, unions) import Text.Read (readMaybe) newtype ProductOrService = ProductOrService Int deriving (Eq, Show) instance Chainable ProductOrService where chain (ProductOrService 1010101) = "01010101" chain (ProductOrService c) = chain c instance Type ProductOrService where parseExpr c = justErr NotInCatalog maybeProductPorService where maybeProductPorService = ProductOrService <$> (readMaybe c >>= isValid) isValid x | x `member` validCodes = Just x | otherwise = Nothing validCodes = unions . map fromList $ [ [ 1010101..1010101 ] , [10101500..10101502] , [10101504..10101517] , [10101600..10101605] , [10101700..10101705] , [10101800..10101809] , [10101900..10101904] , [10102000..10102002] , [10102100..10102100] , [10111300..10111307] , [10121500..10121507] , [10121600..10121604] , [10121700..10121703] , [10121800..10121806] , [10121900..10121901] , [10122000..10122003] , [10122100..10122104] , [10131500..10131500] , [10131506..10131508] , [10131600..10131604] , [10131700..10131702] , [10141500..10141505] , [10141600..10141611] , [10151500..10151539] , [10151600..10151613] , [10151700..10151706] , [10151800..10151817] , [10151900..10151907] , [10152000..10152007] , [10152100..10152104] , [10152200..10152202] , [10152300..10152300] , [10152400..10152400] , [10161500..10161530] , [10161600..10161600] , [10161602..10161602] , [10161604..10161606] , [10161800..10161805] , [10161900..10161903] , [10161905..10161908] , [10171500..10171506] , [10171600..10171611] , [10171700..10171702] , [10171800..10171802] , [10191500..10191500] , [10191506..10191511] , [10191700..10191701] , [10191703..10191707] , [10201500..10201523] , [10201600..10201610] , [10201700..10201746] , [10201800..10201813] , [10201900..10201962] , [10202000..10202037] , [10202100..10202186] , [10202200..10202241] , [10202300..10202393] , [10202400..10202480] , [10202500..10202510] , [10202600..10202632] , [10202700..10202782] , [10202800..10202888] , [10211500..10211519] , [10211600..10211619] , [10211700..10211734] , [10211800..10211805] , [10211900..10211910] , [10212000..10212011] , [10212100..10212104] , [10212200..10212219] , [10212300..10212305] , [10212400..10212405] , [10212500..10212505] , [10212600..10212653] , [10212800..10212813] , [10212900..10212906] , [10213000..10213010] , [10213100..10213113] , [10213200..10213208] , [10213300..10213305] , [10213400..10213405] , [10213500..10213510] , [10213600..10213614] , [10213700..10213709] , [10213800..10213808] , [10213900..10213929] , [10214000..10214006] , [10214100..10214114] , [10214200..10214206] , [10214300..10214305] , [10214400..10214406] , [10214500..10214507] , [10214600..10214619] , [10214700..10214712] , [10214800..10214829] , [10214900..10214919] , [10215000..10215007] , [10215100..10215106] , [10215200..10215206] , [10215300..10215306] , [10215400..10215484] , [10215500..10215506] , [10215600..10215613] , [10215700..10215705] , [10215800..10215808] , [10216000..10216005] , [10216100..10216105] , [10216200..10216218] , [10216300..10216302] , [10216306..10216306] , [10216311..10216311] , [10216315..10216315] , [10216322..10216322] , [10216325..10216326] , [10216332..10216332] , [10216400..10216413] , [10216500..10216507] , [10216600..10216604] , [10216700..10216709] , [10216800..10216808] , [10216900..10216910] , [10217000..10217006] , [10217100..10217108] , [10217200..10217208] , [10217300..10217355] , [10217400..10217424] , [10217500..10217510] , [10217600..10217606] , [10217700..10217708] , [10217800..10217827] , [10217900..10217972] , [10218000..10218027] , [10218100..10218122] , [10218200..10218220] , [10218300..10218350] , [10221500..10221502] , [10221600..10221602] , [10221700..10221704] , [10221800..10221802] , [10221900..10221902] , [10222000..10222003] , [10222100..10222102] , [10222200..10222203] , [10222300..10222302] , [10222400..10222402] , [10222500..10222502] , [10222600..10222603] , [10222700..10222703] , [10222800..10222803] , [10222900..10222902] , [10223000..10223003] , [10223100..10223103] , [10223200..10223202] , [10223300..10223302] , [10223400..10223403] , [10223500..10223504] , [10223600..10223602] , [10223700..10223702] , [10223800..10223804] , [10223900..10223902] , [10224000..10224002] , [10224100..10224102] , [10224200..10224202] , [10224300..10224303] , [10224400..10224402] , [10224500..10224502] , [10224600..10224602] , [10224700..10224703] , [10224800..10224804] , [10224900..10224902] , [10225000..10225004] , [10225100..10225104] , [10225200..10225202] , [10225300..10225303] , [10225400..10225404] , [10225500..10225503] , [10225600..10225602] , [10225700..10225701] , [10225800..10225804] , [10225900..10225904] , [10226000..10226082] , [10226084..10226086] , [10226100..10226102] , [10226200..10226204] , [10226300..10226303] , [10226400..10226402] , [10226500..10226502] , [10226600..10226603] , [10226700..10226704] , [10231500..10231513] , [10231600..10231647] , [10231700..10231714] , [10231800..10231812] , [10231900..10231926] , [10232000..10232010] , [10232012..10232067] , [10232100..10232132] , [10241500..10241520] , [10241600..10241614] , [10251500..10251505] , [10251600..10251605] , [10251700..10251702] , [10251800..10251806] , [10251900..10251905] , [10252000..10252065] , [10252067..10252067] , [10252100..10252112] , [10252200..10252213] , [10252300..10252302] , [10252400..10252406] , [10301500..10301523] , [10301600..10301610] , [10301700..10301746] , [10301800..10301813] , [10301900..10301962] , [10302000..10302037] , [10302100..10302186] , [10302200..10302241] , [10302300..10302393] , [10302400..10302480] , [10302500..10302510] , [10302600..10302632] , [10302700..10302782] , [10302800..10302888] , [10311500..10311519] , [10311600..10311619] , [10311700..10311734] , [10311800..10311805] , [10311900..10311910] , [10312000..10312011] , [10312100..10312104] , [10312200..10312219] , [10312300..10312305] , [10312400..10312405] , [10312500..10312505] , [10312600..10312653] , [10312800..10312813] , [10312900..10312906] , [10313000..10313010] , [10313100..10313113] , [10313200..10313208] , [10313300..10313305] , [10313400..10313405] , [10313500..10313510] , [10313600..10313614] , [10313700..10313709] , [10313800..10313808] , [10313900..10313929] , [10314000..10314006] , [10314100..10314114] , [10314200..10314206] , [10314300..10314305] , [10314400..10314406] , [10314500..10314507] , [10314600..10314619] , [10314700..10314712] , [10314800..10314829] , [10314900..10314919] , [10315000..10315007] , [10315100..10315106] , [10315200..10315206] , [10315300..10315306] , [10315400..10315484] , [10315500..10315506] , [10315600..10315613] , [10315700..10315705] , [10315800..10315808] , [10316000..10316005] , [10316100..10316105] , [10316200..10316218] , [10316300..10316302] , [10316306..10316306] , [10316311..10316311] , [10316315..10316315] , [10316322..10316322] , [10316325..10316326] , [10316332..10316332] , [10316400..10316413] , [10316500..10316507] , [10316600..10316604] , [10316700..10316709] , [10316800..10316808] , [10316900..10316910] , [10317000..10317006] , [10317100..10317108] , [10317200..10317208] , [10317300..10317355] , [10317400..10317424] , [10317500..10317510] , [10317600..10317606] , [10317700..10317708] , [10317800..10317827] , [10317900..10317972] , [10318000..10318027] , [10318100..10318122] , [10318200..10318220] , [10318300..10318350] , [10321500..10321502] , [10321600..10321602] , [10321700..10321704] , [10321800..10321802] , [10321900..10321902] , [10322000..10322003] , [10322100..10322102] , [10322200..10322203] , [10322300..10322302] , [10322400..10322402] , [10322500..10322502] , [10322600..10322603] , [10322700..10322703] , [10322800..10322803] , [10322900..10322902] , [10323000..10323003] , [10323100..10323103] , [10323200..10323202] , [10323300..10323302] , [10323400..10323403] , [10323500..10323504] , [10323600..10323602] , [10323700..10323702] , [10323800..10323804] , [10323900..10323902] , [10324000..10324002] , [10324100..10324102] , [10324200..10324202] , [10324300..10324303] , [10324400..10324402] , [10324500..10324502] , [10324600..10324602] , [10324700..10324703] , [10324800..10324804] , [10324900..10324902] , [10325000..10325004] , [10325100..10325104] , [10325200..10325202] , [10325300..10325303] , [10325400..10325404] , [10325500..10325503] , [10325600..10325602] , [10325700..10325701] , [10325800..10325804] , [10325900..10325904] , [10326000..10326082] , [10326084..10326086] , [10326100..10326102] , [10326200..10326204] , [10326300..10326303] , [10326400..10326402] , [10326500..10326502] , [10326600..10326603] , [10326700..10326704] , [10331500..10331513] , [10331600..10331647] , [10331700..10331714] , [10331800..10331812] , [10331900..10331926] , [10332000..10332010] , [10332012..10332067] , [10332100..10332132] , [10341500..10341501] , [10341600..10341601] , [10341700..10341701] , [10341800..10341801] , [10341900..10341901] , [10342000..10342001] , [10342100..10342101] , [10342200..10342201] , [10351500..10351520] , [10351600..10351614] , [10361500..10361505] , [10361600..10361605] , [10361700..10361702] , [10361800..10361806] , [10361900..10361905] , [10362000..10362065] , [10362067..10362067] , [10362100..10362112] , [10362200..10362213] , [10362300..10362302] , [10362400..10362406] , [10401500..10401523] , [10401600..10401610] , [10401700..10401746] , [10401800..10401813] , [10401900..10401962] , [10402000..10402037] , [10402100..10402186] , [10402200..10402241] , [10402300..10402393] , [10402400..10402480] , [10402500..10402510] , [10402600..10402632] , [10402700..10402782] , [10402800..10402888] , [10411500..10411519] , [10411600..10411619] , [10411700..10411734] , [10411800..10411805] , [10411900..10411910] , [10412000..10412011] , [10412100..10412104] , [10412200..10412219] , [10412300..10412305] , [10412400..10412405] , [10412500..10412505] , [10412600..10412653] , [10412800..10412813] , [10412900..10412906] , [10413000..10413010] , [10413100..10413113] , [10413200..10413208] , [10413300..10413305] , [10413400..10413405] , [10413500..10413510] , [10413600..10413614] , [10413700..10413709] , [10413800..10413808] , [10413900..10413929] , [10414000..10414006] , [10414100..10414114] , [10414200..10414206] , [10414300..10414305] , [10414400..10414406] , [10414500..10414507] , [10414600..10414619] , [10414700..10414712] , [10414800..10414829] , [10414900..10414919] , [10415000..10415007] , [10415100..10415106] , [10415200..10415206] , [10415300..10415306] , [10415400..10415484] , [10415500..10415506] , [10415600..10415613] , [10415700..10415705] , [10415800..10415808] , [10416000..10416005] , [10416100..10416105] , [10416200..10416218] , [10416300..10416302] , [10416306..10416306] , [10416311..10416311] , [10416315..10416315] , [10416322..10416322] , [10416325..10416326] , [10416332..10416332] , [10416400..10416413] , [10416500..10416507] , [10416600..10416604] , [10416700..10416709] , [10416800..10416808] , [10416900..10416910] , [10417000..10417006] , [10417100..10417108] , [10417200..10417208] , [10417300..10417355] , [10417400..10417424] , [10417500..10417510] , [10417600..10417606] , [10417700..10417708] , [10417800..10417827] , [10417900..10417972] , [10418000..10418027] , [10418100..10418122] , [10418200..10418220] , [10418300..10418350] , [10421500..10421502] , [10421600..10421602] , [10421700..10421704] , [10421800..10421802] , [10421900..10421902] , [10422000..10422003] , [10422100..10422102] , [10422200..10422203] , [10422300..10422302] , [10422400..10422402] , [10422500..10422502] , [10422600..10422603] , [10422700..10422703] , [10422800..10422803] , [10422900..10422902] , [10423000..10423003] , [10423100..10423103] , [10423200..10423202] , [10423300..10423302] , [10423400..10423403] , [10423500..10423504] , [10423600..10423602] , [10423700..10423702] , [10423800..10423804] , [10423900..10423902] , [10424000..10424002] , [10424100..10424102] , [10424200..10424202] , [10424300..10424303] , [10424400..10424402] , [10424500..10424502] , [10424600..10424602] , [10424700..10424703] , [10424800..10424804] , [10424900..10424902] , [10425000..10425004] , [10425100..10425104] , [10425200..10425202] , [10425300..10425303] , [10425400..10425404] , [10425500..10425503] , [10425600..10425602] , [10425700..10425701] , [10425800..10425804] , [10425900..10425904] , [10426000..10426082] , [10426084..10426086] , [10426100..10426102] , [10426200..10426204] , [10426300..10426303] , [10426400..10426402] , [10426500..10426502] , [10426600..10426603] , [10426700..10426704] , [10431500..10431513] , [10431600..10431647] , [10431700..10431714] , [10431800..10431812] , [10431900..10431926] , [10432000..10432010] , [10432012..10432067] , [10432100..10432132] , [10441500..10441520] , [10441600..10441614] , [10451500..10451505] , [10451600..10451605] , [10451700..10451702] , [10451800..10451806] , [10451900..10451905] , [10452000..10452065] , [10452067..10452067] , [10452100..10452112] , [10452200..10452213] , [10452300..10452302] , [10452400..10452406] , [10501500..10501503] , [10501600..10501604] , [10501700..10501703] , [10501800..10501813] , [10501900..10501908] , [10502000..10502017] , [10502100..10502125] , [10502200..10502202] , [10502300..10502305] , [10502400..10502402] , [10502500..10502502] , [10502600..10502601] , [10502700..10502703] , [10502800..10502805] , [10502900..10502959] , [11101500..11101534] , [11101600..11101625] , [11101700..11101702] , [11101704..11101706] , [11101708..11101714] , [11101716..11101719] , [11101800..11101803] , [11101900..11101908] , [11111500..11111505] , [11111600..11111613] , [11111700..11111701] , [11111800..11111812] , [11121500..11121500] , [11121502..11121503] , [11121600..11121600] , [11121603..11121608] , [11121610..11121610] , [11121612..11121612] , [11121614..11121620] , [11121700..11121703] , [11121705..11121710] , [11121800..11121810] , [11121900..11121901] , [11122000..11122006] , [11131500..11131508] , [11131600..11131600] , [11131602..11131605] , [11131607..11131608] , [11141600..11141610] , [11141700..11141702] , [11151500..11151519] , [11151600..11151612] , [11151700..11151706] , [11151708..11151716] , [11161500..11161504] , [11161600..11161604] , [11161700..11161705] , [11161800..11161807] , [11162000..11162003] , [11162100..11162102] , [11162104..11162105] , [11162107..11162133] , [11162200..11162202] , [11162300..11162311] , [11162400..11162402] , [11171500..11171501] , [11171600..11171604] , [11171700..11171702] , [11171800..11171801] , [11171900..11171901] , [11172000..11172003] , [11172100..11172101] , [11172200..11172201] , [11172300..11172304] , [11181500..11181508] , [11191500..11191505] , [11191600..11191611] , [11191700..11191702] , [11191800..11191803] , [12131500..12131509] , [12131600..12131605] , [12131700..12131709] , [12131800..12131806] , [12141500..12141506] , [12141600..12141617] , [12141700..12141760] , [12141800..12141806] , [12141900..12141916] , [12142000..12142006] , [12142100..12142110] , [12142200..12142208] , [12161500..12161507] , [12161600..12161606] , [12161700..12161706] , [12161800..12161809] , [12161900..12161913] , [12162000..12162000] , [12162002..12162006] , [12162100..12162101] , [12162200..12162212] , [12162300..12162305] , [12162400..12162402] , [12162500..12162503] , [12162600..12162602] , [12162700..12162702] , [12162800..12162802] , [12162900..12162903] , [12163000..12163001] , [12163100..12163102] , [12163200..12163201] , [12163300..12163301] , [12163400..12163401] , [12163500..12163501] , [12163600..12163602] , [12163700..12163701] , [12163800..12163802] , [12163900..12163902] , [12164000..12164001] , [12164100..12164102] , [12164200..12164201] , [12164300..12164303] , [12164400..12164401] , [12164500..12164509] , [12164600..12164606] , [12164700..12164701] , [12164800..12164803] , [12164900..12164905] , [12165000..12165001] , [12165100..12165104] , [12171500..12171511] , [12171600..12171600] , [12171602..12171621] , [12171700..12171703] , [12181500..12181504] , [12181600..12181602] , [12191500..12191504] , [12191600..12191602] , [12352000..12352003] , [12352005..12352005] , [12352100..12352108] , [12352111..12352121] , [12352123..12352133] , [12352135..12352137] , [12352200..12352212] , [12352300..12352321] , [12352400..12352402] , [12352500..12352503] , [12352600..12352600] , [13101500..13101505] , [13101600..13101608] , [13101700..13101725] , [13101900..13101900] , [13101902..13101906] , [13102000..13102003] , [13102005..13102006] , [13102008..13102008] , [13102010..13102014] , [13102016..13102022] , [13102024..13102032] , [13111000..13111082] , [13111100..13111103] , [13111200..13111220] , [13111300..13111308] , [14101500..14101501] , [14111500..14111516] , [14111518..14111520] , [14111523..14111543] , [14111600..14111601] , [14111604..14111611] , [14111613..14111618] , [14111700..14111706] , [14111800..14111828] , [14121500..14121507] , [14121600..14121605] , [14121700..14121703] , [14121800..14121812] , [14121900..14121905] , [14122100..14122107] , [14122200..14122202] , [15101500..15101500] , [15101502..15101513] , [15101600..15101614] , [15101700..15101702] , [15101800..15101801] , [15111500..15111511] , [15111700..15111702] , [15121500..15121505] , [15121508..15121530] , [15121800..15121807] , [15121900..15121905] , [15131500..15131500] , [15131502..15131506] , [15131600..15131601] , [20101500..20101506] , [20101600..20101603] , [20101617..20101621] , [20101700..20101716] , [20101800..20101805] , [20101810..20101810] , [20101900..20101903] , [20102000..20102008] , [20102100..20102106] , [20102200..20102203] , [20102300..20102307] , [20111500..20111500] , [20111504..20111505] , [20111600..20111604] , [20111606..20111623] , [20111700..20111715] , [20121000..20121016] , [20121100..20121116] , [20121118..20121130] , [20121200..20121213] , [20121300..20121326] , [20121400..20121425] , [20121427..20121451] , [20121500..20121511] , [20121513..20121524] , [20121600..20121613] , [20121700..20121728] , [20121800..20121813] , [20121900..20121923] , [20122000..20122006] , [20122100..20122115] , [20122200..20122216] , [20122300..20122336] , [20122338..20122354] , [20122356..20122373] , [20122400..20122410] , [20122500..20122516] , [20122518..20122518] , [20122600..20122623] , [20122700..20122710] , [20122800..20122804] , [20122806..20122849] , [20122851..20122851] , [20122900..20122903] , [20123000..20123004] , [20123100..20123102] , [20123200..20123203] , [20123300..20123304] , [20131000..20131010] , [20131100..20131106] , [20131200..20131202] , [20131300..20131308] , [20141000..20141008] , [20141011..20141018] , [20141100..20141101] , [20141200..20141201] , [20141300..20141303] , [20141400..20141401] , [20141500..20141502] , [20141600..20141601] , [20141700..20141705] , [20141800..20141801] , [20141900..20141901] , [20142000..20142001] , [20142100..20142101] , [20142200..20142201] , [20142300..20142301] , [20142400..20142407] , [20142500..20142501] , [20142600..20142601] , [20142700..20142710] , [20142800..20142801] , [20142900..20142905] , [20143000..20143005] , [20143300..20143303] , [21101500..21101509] , [21101511..21101514] , [21101516..21101523] , [21101600..21101612] , [21101700..21101712] , [21101800..21101809] , [21101900..21101915] , [21102000..21102008] , [21102100..21102102] , [21102200..21102207] , [21102300..21102306] , [21102400..21102404] , [21111500..21111504] , [21111506..21111508] , [21111600..21111602] , [22101500..22101502] , [22101504..22101505] , [22101507..22101509] , [22101511..22101511] , [22101513..22101514] , [22101516..22101516] , [22101518..22101538] , [22101600..22101600] , [22101602..22101631] , [22101700..22101724] , [22101800..22101804] , [22101900..22101908] , [22102000..22102002] , [23101500..23101522] , [23101525..23101525] , [23101528..23101538] , [23111500..23111507] , [23111600..23111606] , [23121500..23121520] , [23121600..23121616] , [23131500..23131515] , [23131600..23131604] , [23131700..23131704] , [23141600..23141608] , [23141700..23141704] , [23151500..23151504] , [23151506..23151520] , [23151600..23151604] , [23151606..23151610] , [23151700..23151705] , [23151800..23151814] , [23151816..23151819] , [23151821..23151825] , [23151900..23151911] , [23152000..23152002] , [23152100..23152113] , [23152200..23152206] , [23152900..23152910] , [23153000..23153039] , [23153100..23153103] , [23153129..23153145] , [23153200..23153207] , [23153400..23153421] , [23153500..23153508] , [23153600..23153608] , [23153700..23153702] , [23153800..23153802] , [23161500..23161503] , [23161506..23161507] , [23161510..23161510] , [23161514..23161514] , [23161516..23161517] , [23161600..23161603] , [23161605..23161608] , [23161700..23161702] , [23181500..23181502] , [23181504..23181518] , [23181600..23181606] , [23181700..23181705] , [23181800..23181806] , [23191000..23191006] , [23191100..23191102] , [23191200..23191202] , [23201000..23201008] , [23201100..23201102] , [23201200..23201204] , [23211000..23211003] , [23211100..23211106] , [23221000..23221002] , [23221100..23221102] , [23221200..23221201] , [23231000..23231002] , [23231100..23231102] , [23231200..23231202] , [23231300..23231302] , [23231400..23231402] , [23231500..23231502] , [23231600..23231602] , [23231700..23231701] , [23231800..23231801] , [23231900..23231903] , [23232000..23232001] , [23232100..23232101] , [23232200..23232201] , [23241400..23241411] , [23241500..23241511] , [23241600..23241603] , [23241605..23241606] , [23241608..23241647] , [23241700..23241703] , [23241800..23241809] , [23241900..23241906] , [23242100..23242120] , [23242200..23242208] , [23242300..23242308] , [23242400..23242403] , [23242500..23242511] , [23242600..23242615] , [23242700..23242702] , [23251500..23251509] , [23251600..23251603] , [23251700..23251714] , [23251800..23251814] , [23251900..23251901] , [23261500..23261507] , [23271400..23271424] , [23271500..23271502] , [23271600..23271607] , [23271700..23271718] , [23271800..23271821] , [23281500..23281504] , [23281600..23281603] , [23281700..23281704] , [23281800..23281802] , [23281900..23281905] , [23291500..23291504] , [23291600..23291601] , [23291700..23291704] , [23291800..23291804] , [23291900..23291902] , [23301500..23301501] , [24101500..24101517] , [24101600..24101606] , [24101608..24101662] , [24101700..24101719] , [24101721..24101750] , [24101800..24101809] , [24101900..24101908] , [24102000..24102002] , [24102004..24102011] , [24102100..24102109] , [24102200..24102204] , [24102208..24102210] , [24102300..24102302] , [24111500..24111503] , [24111505..24111514] , [24111800..24111818] , [24112000..24112000] , [24112003..24112007] , [24112100..24112102] , [24112108..24112112] , [24112200..24112200] , [24112204..24112209] , [24112400..24112404] , [24112406..24112409] , [24112411..24112415] , [24112500..24112505] , [24112600..24112602] , [24112700..24112702] , [24112800..24112805] , [24112900..24112902] , [24113000..24113003] , [24121500..24121500] , [24121502..24121504] , [24121506..24121513] , [24121800..24121808] , [24122000..24122006] , [24131500..24131514] , [24131600..24131610] , [24131900..24131903] , [24141500..24141502] , [24141504..24141504] , [24141506..24141508] , [24141510..24141520] , [24141600..24141608] , [24141700..24141710] , [25101500..25101509] , [25101600..25101602] , [25101604..25101604] , [25101609..25101613] , [25101700..25101703] , [25101800..25101804] , [25101900..25101938] , [25102000..25102003] , [25102100..25102106] , [25111500..25111535] , [25111600..25111603] , [25111700..25111726] , [25111800..25111808] , [25111900..25111940] , [25121500..25121504] , [25121600..25121605] , [25121700..25121717] , [25131500..25131509] , [25131600..25131605] , [25131700..25131709] , [25131800..25131802] , [25131900..25131900] , [25131902..25131906] , [25132000..25132005] , [25151500..25151502] , [25151700..25151709] , [25161500..25161510] , [25171500..25171500] , [25171502..25171508] , [25171600..25171600] , [25171602..25171603] , [25171700..25171700] , [25171702..25171725] , [25171900..25171903] , [25171905..25171906] , [25172000..25172005] , [25172007..25172007] , [25172009..25172014] , [25172100..25172101] , [25172104..25172106] , [25172108..25172131] , [25172200..25172201] , [25172203..25172205] , [25172300..25172301] , [25172303..25172305] , [25172400..25172400] , [25172404..25172411] , [25172500..25172500] , [25172502..25172512] , [25172600..25172611] , [25172700..25172700] , [25172702..25172711] , [25172800..25172800] , [25172802..25172803] , [25172900..25172901] , [25172903..25172908] , [25173000..25173001] , [25173003..25173008] , [25173100..25173100] , [25173107..25173108] , [25173300..25173300] , [25173303..25173304] , [25173700..25173708] , [25173800..25173813] , [25173815..25173821] , [25173900..25173903] , [25174000..25174006] , [25174100..25174107] , [25174200..25174217] , [25174400..25174421] , [25174600..25174605] , [25174700..25174705] , [25174800..25174810] , [25174900..25174903] , [25175000..25175003] , [25181600..25181612] , [25181700..25181719] , [25191500..25191525] , [25191600..25191605] , [25191700..25191743] , [25191800..25191838] , [25201500..25201522] , [25201600..25201606] , [25201700..25201710] , [25201800..25201802] , [25201900..25201904] , [25202000..25202004] , [25202100..25202105] , [25202200..25202207] , [25202300..25202302] , [25202400..25202406] , [25202500..25202510] , [25202600..25202607] , [25202700..25202702] , [26101100..26101103] , [26101105..26101117] , [26101200..26101212] , [26101300..26101304] , [26101306..26101306] , [26101309..26101313] , [26101400..26101406] , [26101408..26101415] , [26101500..26101515] , [26101700..26101713] , [26101715..26101721] , [26101723..26101738] , [26101740..26101743] , [26101747..26101751] , [26101754..26101787] , [26101900..26101900] , [26101903..26101905] , [26111500..26111500] , [26111503..26111506] , [26111508..26111510] , [26111512..26111525] , [26111527..26111549] , [26111600..26111613] , [26111700..26111729] , [26111800..26111816] , [26111900..26111905] , [26111907..26111917] , [26112000..26112004] , [26112100..26112105] , [26121500..26121501] , [26121505..26121505] , [26121507..26121510] , [26121514..26121515] , [26121517..26121517] , [26121519..26121524] , [26121532..26121534] , [26121536..26121536] , [26121538..26121548] , [26121600..26121604] , [26121606..26121624] , [26121628..26121666] , [26121700..26121704] , [26121706..26121711] , [26121800..26121800] , [26121802..26121852] , [26131500..26131510] , [26131600..26131618] , [26131700..26131702] , [26131800..26131804] , [26131807..26131808] , [26131811..26131814] , [26141600..26141603] , [26141700..26141704] , [26141800..26141809] , [26141900..26141902] , [26141904..26141911] , [26142000..26142007] , [26142100..26142101] , [26142106..26142106] , [26142108..26142108] , [26142117..26142117] , [26142200..26142202] , [26142300..26142300] , [26142302..26142304] , [26142306..26142308] , [26142310..26142312] , [26142400..26142408] , [27111500..27111527] , [27111529..27111562] , [27111600..27111605] , [27111607..27111623] , [27111700..27111718] , [27111720..27111767] , [27111800..27111804] , [27111806..27111807] , [27111809..27111829] , [27111900..27111901] , [27111903..27111946] , [27111948..27111958] , [27112000..27112047] , [27112100..27112117] , [27112119..27112162] , [27112200..27112203] , [27112205..27112213] , [27112215..27112232] , [27112300..27112313] , [27112400..27112410] , [27112500..27112508] , [27112600..27112604] , [27112700..27112750] , [27112800..27112800] , [27112802..27112815] , [27112818..27112847] , [27112900..27112916] , [27113000..27113005] , [27113100..27113105] , [27113200..27113204] , [27113300..27113301] , [27121500..27121504] , [27121600..27121606] , [27121700..27121707] , [27121800..27121812] , [27131500..27131502] , [27131504..27131528] , [27131600..27131601] , [27131603..27131605] , [27131608..27131610] , [27131613..27131615] , [27131700..27131709] , [27141000..27141001] , [27141100..27141101] , [30101500..30101517] , [30101700..30101720] , [30101800..30101818] , [30102000..30102016] , [30102200..30102218] , [30102220..30102227] , [30102300..30102316] , [30102400..30102417] , [30102800..30102804] , [30102900..30102901] , [30102903..30102907] , [30103100..30103104] , [30103200..30103208] , [30103500..30103515] , [30103600..30103623] , [30103700..30103701] , [30103800..30103804] , [30103900..30103901] , [30111500..30111509] , [30111600..30111605] , [30111607..30111607] , [30111700..30111701] , [30111800..30111802] , [30111900..30111903] , [30121500..30121501] , [30121503..30121504] , [30121600..30121605] , [30121700..30121719] , [30121800..30121803] , [30121900..30121901] , [30131500..30131500] , [30131502..30131517] , [30131600..30131600] , [30131602..30131610] , [30131700..30131700] , [30131702..30131706] , [30141500..30141501] , [30141503..30141503] , [30141505..30141505] , [30141508..30141508] , [30141510..30141516] , [30141600..30141601] , [30141603..30141605] , [30141700..30141703] , [30151500..30151503] , [30151505..30151505] , [30151507..30151515] , [30151600..30151603] , [30151605..30151605] , [30151607..30151610] , [30151700..30151704] , [30151800..30151803] , [30151805..30151808] , [30151900..30151902] , [30152000..30152003] , [30152100..30152101] , [30161500..30161505] , [30161508..30161511] , [30161600..30161604] , [30161700..30161703] , [30161705..30161715] , [30161717..30161721] , [30161800..30161801] , [30161803..30161810] , [30161900..30161906] , [30161908..30161908] , [30162000..30162005] , [30162100..30162104] , [30162200..30162204] , [30162300..30162311] , [30162400..30162404] , [30171500..30171526] , [30171600..30171600] , [30171604..30171615] , [30171700..30171701] , [30171703..30171712] , [30171800..30171804] , [30171900..30171908] , [30172000..30172002] , [30172100..30172113] , [30181500..30181508] , [30181511..30181511] , [30181514..30181517] , [30181600..30181614] , [30181700..30181701] , [30181800..30181812] , [30191500..30191502] , [30191505..30191510] , [30191600..30191617] , [30191700..30191702] , [30191800..30191803] , [30241500..30241515] , [30241600..30241604] , [30241700..30241702] , [30251500..30251505] , [30261500..30261505] , [30261600..30261603] , [30261700..30261704] , [30261800..30261802] , [30261900..30261904] , [30262000..30262002] , [30262100..30262104] , [30262200..30262202] , [30262300..30262302] , [30262400..30262405] , [30262500..30262506] , [30262600..30262606] , [30262700..30262704] , [30262800..30262802] , [30262900..30262904] , [30263000..30263002] , [30263100..30263102] , [30263200..30263203] , [30263300..30263303] , [30263400..30263402] , [30263500..30263504] , [30263600..30263608] , [30263700..30263710] , [30263800..30263802] , [30263900..30263901] , [30264000..30264024] , [30264100..30264110] , [30264200..30264204] , [30264300..30264305] , [30264400..30264413] , [30264500..30264506] , [30264600..30264606] , [30264700..30264705] , [30264800..30264806] , [30264900..30264912] , [30265000..30265012] , [30265100..30265106] , [30265200..30265202] , [30265300..30265304] , [30265400..30265404] , [30265500..30265506] , [30265600..30265602] , [30265700..30265704] , [30265800..30265803] , [30265900..30265902] , [30266000..30266002] , [30266100..30266103] , [30266200..30266216] , [30266300..30266301] , [30266400..30266408] , [30266410..30266410] , [30266500..30266504] , [30266600..30266602] , [30266700..30266702] , [31101600..31101604] , [31101606..31101619] , [31101700..31101716] , [31101800..31101804] , [31101806..31101817] , [31101900..31101912] , [31101914..31101914] , [31102000..31102016] , [31102100..31102116] , [31102200..31102216] , [31102300..31102316] , [31102400..31102416] , [31102500..31102501] , [31102600..31102603] , [31102700..31102701] , [31102800..31102801] , [31102900..31102901] , [31111500..31111517] , [31111600..31111617] , [31111700..31111717] , [31121000..31121019] , [31121100..31121125] , [31121200..31121227] , [31121300..31121321] , [31121400..31121419] , [31121500..31121521] , [31121600..31121618] , [31121700..31121719] , [31121800..31121819] , [31121900..31121919] , [31122000..31122002] , [31122100..31122102] , [31122200..31122202] , [31132000..31132002] , [31132100..31132112] , [31132200..31132208] , [31132300..31132308] , [31132400..31132412] , [31132500..31132512] , [31132600..31132612] , [31132700..31132712] , [31132800..31132812] , [31132900..31132912] , [31133000..31133012] , [31133100..31133112] , [31133200..31133212] , [31133300..31133312] , [31133400..31133412] , [31133500..31133512] , [31133600..31133612] , [31133700..31133711] , [31141500..31141503] , [31141600..31141603] , [31141700..31141702] , [31141800..31141802] , [31141900..31141901] , [31142000..31142007] , [31142100..31142107] , [31142200..31142207] , [31142300..31142307] , [31142400..31142407] , [31142500..31142507] , [31142600..31142607] , [31142700..31142707] , [31142800..31142804] , [31142900..31142904] , [31143000..31143002] , [31143100..31143103] , [31143200..31143205] , [31151500..31151521] , [31151600..31151601] , [31151603..31151619] , [31151700..31151700] , [31151702..31151710] , [31151800..31151800] , [31151803..31151806] , [31151900..31151906] , [31152000..31152002] , [31152100..31152112] , [31152200..31152209] , [31152300..31152307] , [31161500..31161514] , [31161516..31161534] , [31161600..31161614] , [31161616..31161640] , [31161700..31161714] , [31161716..31161741] , [31161800..31161838] , [31161900..31161912] , [31162000..31162014] , [31162100..31162109] , [31162200..31162216] , [31162300..31162301] , [31162303..31162314] , [31162400..31162407] , [31162409..31162421] , [31162500..31162508] , [31162600..31162614] , [31162700..31162704] , [31162800..31162819] , [31162900..31162920] , [31163000..31163032] , [31163100..31163103] , [31163200..31163205] , [31163207..31163231] , [31163300..31163304] , [31163400..31163401] , [31171500..31171513] , [31171515..31171516] , [31171518..31171520] , [31171522..31171564] , [31171600..31171600] , [31171603..31171610] , [31171700..31171700] , [31171704..31171704] , [31171706..31171716] , [31171800..31171806] , [31171900..31171905] , [31181700..31181705] , [31181800..31181802] , [31191500..31191502] , [31191504..31191521] , [31191600..31191603] , [31201500..31201537] , [31201600..31201637] , [31211500..31211522] , [31211600..31211607] , [31211700..31211709] , [31211800..31211803] , [31211900..31211906] , [31211908..31211910] , [31211912..31211918] , [31221600..31221603] , [31231100..31231120] , [31231200..31231219] , [31231400..31231405] , [31241500..31241502] , [31241600..31241610] , [31241700..31241706] , [31241800..31241815] , [31241900..31241908] , [31242000..31242003] , [31242100..31242101] , [31242103..31242107] , [31242200..31242208] , [31251500..31251514] , [31251600..31251601] , [31261500..31261505] , [31261600..31261603] , [31261700..31261704] , [31271600..31271602] , [31281500..31281500] , [31281502..31281538] , [31281700..31281701] , [31281800..31281819] , [31281900..31281919] , [31282000..31282019] , [31282100..31282119] , [31282200..31282219] , [31282300..31282319] , [31282400..31282419] , [31291100..31291120] , [31291200..31291220] , [31291300..31291320] , [31291400..31291420] , [31301100..31301119] , [31301200..31301219] , [31301300..31301319] , [31301400..31301419] , [31301500..31301519] , [31311100..31311106] , [31311109..31311113] , [31311200..31311206] , [31311209..31311213] , [31311300..31311306] , [31311309..31311313] , [31311400..31311406] , [31311409..31311413] , [31311500..31311506] , [31311509..31311513] , [31311600..31311606] , [31311609..31311613] , [31311700..31311706] , [31311709..31311713] , [31321100..31321106] , [31321109..31321113] , [31321200..31321206] , [31321209..31321213] , [31321300..31321306] , [31321309..31321313] , [31321400..31321406] , [31321409..31321413] , [31321500..31321506] , [31321509..31321513] , [31321600..31321606] , [31321609..31321613] , [31321700..31321706] , [31321709..31321713] , [31331100..31331106] , [31331109..31331113] , [31331200..31331206] , [31331209..31331213] , [31331300..31331306] , [31331309..31331313] , [31331400..31331406] , [31331409..31331413] , [31331500..31331506] , [31331509..31331513] , [31331600..31331606] , [31331609..31331613] , [31331700..31331706] , [31331709..31331713] , [31341100..31341106] , [31341109..31341113] , [31341200..31341206] , [31341209..31341213] , [31341300..31341306] , [31341309..31341313] , [31341400..31341406] , [31341409..31341413] , [31341500..31341506] , [31341509..31341513] , [31341600..31341606] , [31341609..31341613] , [31341700..31341706] , [31341709..31341713] , [31351100..31351106] , [31351109..31351113] , [31351200..31351206] , [31351209..31351213] , [31351300..31351306] , [31351309..31351313] , [31351400..31351406] , [31351409..31351413] , [31351500..31351506] , [31351509..31351513] , [31351600..31351606] , [31351609..31351613] , [31351700..31351706] , [31351709..31351713] , [31361100..31361106] , [31361109..31361113] , [31361200..31361206] , [31361209..31361213] , [31361300..31361306] , [31361309..31361313] , [31361400..31361406] , [31361409..31361413] , [31361500..31361506] , [31361509..31361513] , [31361600..31361606] , [31361609..31361613] , [31361700..31361706] , [31361709..31361713] , [31371000..31371003] , [31371100..31371108] , [31371200..31371212] , [31371300..31371302] , [31371400..31371401] , [31381100..31381155] , [31381200..31381255] , [31381300..31381355] , [31381400..31381455] , [31381500..31381555] , [31391500..31391506] , [31391600..31391606] , [31391700..31391706] , [31401500..31401507] , [31401600..31401610] , [31401700..31401708] , [31401800..31401807] , [31401900..31401904] , [31411500..31411506] , [31411600..31411605] , [31411700..31411705] , [31411800..31411805] , [31411900..31411905] , [31421500..31421522] , [32101500..32101500] , [32101502..32101510] , [32101512..32101548] , [32101600..32101609] , [32101611..32101672] , [32111500..32111515] , [32111600..32111604] , [32111607..32111616] , [32111700..32111709] , [32121500..32121515] , [32121600..32121600] , [32121602..32121603] , [32121607..32121607] , [32121609..32121618] , [32121700..32121712] , [32121800..32121804] , [32121900..32121902] , [32131000..32131003] , [32131005..32131023] , [32141000..32141022] , [32141100..32141110] , [32151500..32151504] , [32151600..32151603] , [32151700..32151707] , [32151800..32151805] , [32151900..32151910] , [32152000..32152003] , [39101600..39101603] , [39101605..39101605] , [39101608..39101609] , [39101612..39101627] , [39101800..39101806] , [39101900..39101907] , [39111500..39111501] , [39111503..39111510] , [39111512..39111512] , [39111515..39111515] , [39111520..39111522] , [39111524..39111525] , [39111527..39111541] , [39111600..39111600] , [39111603..39111603] , [39111605..39111606] , [39111608..39111616] , [39111700..39111700] , [39111703..39111703] , [39111705..39111714] , [39111800..39111803] , [39111806..39111806] , [39111808..39111830] , [39111900..39111900] , [39111903..39111909] , [39112000..39112013] , [39112100..39112102] , [39112200..39112202] , [39112300..39112309] , [39112400..39112403] , [39112500..39112508] , [39112600..39112604] , [39121000..39121004] , [39121006..39121047] , [39121100..39121117] , [39121300..39121337] , [39121400..39121400] , [39121402..39121410] , [39121412..39121416] , [39121419..39121421] , [39121423..39121428] , [39121431..39121438] , [39121440..39121467] , [39121500..39121500] , [39121521..39121524] , [39121527..39121529] , [39121534..39121534] , [39121544..39121545] , [39121551..39121552] , [39121555..39121555] , [39121561..39121561] , [39121565..39121565] , [39121568..39121570] , [39121600..39121607] , [39121609..39121648] , [39121700..39121710] , [39121717..39121731] , [39121800..39121803] , [39121900..39121910] , [39122000..39122003] , [39122100..39122120] , [39122200..39122251] , [39122300..39122337] , [39131500..39131509] , [39131600..39131608] , [39131700..39131720] , [40101500..40101512] , [40101600..40101610] , [40101700..40101722] , [40101800..40101802] , [40101805..40101850] , [40101900..40101903] , [40102000..40102007] , [40102100..40102107] , [40141600..40141600] , [40141602..40141613] , [40141615..40141659] , [40141700..40141700] , [40141719..40141720] , [40141725..40141727] , [40141731..40141732] , [40141734..40141747] , [40141749..40141751] , [40141755..40141770] , [40141900..40141924] , [40142000..40142022] , [40142200..40142208] , [40142500..40142515] , [40151500..40151534] , [40151546..40151580] , [40151600..40151609] , [40151611..40151616] , [40151700..40151701] , [40151712..40151739] , [40151800..40151804] , [40161500..40161509] , [40161511..40161522] , [40161524..40161533] , [40161600..40161608] , [40161700..40161705] , [40161800..40161809] , [40171500..40171527] , [40171600..40171626] , [40171700..40171711] , [40171800..40171803] , [40171900..40171905] , [40172000..40172008] , [40172100..40172103] , [40172200..40172206] , [40172300..40172313] , [40172400..40172413] , [40172500..40172522] , [40172600..40172612] , [40172700..40172712] , [40172800..40172812] , [40172900..40172911] , [40173000..40173009] , [40173100..40173104] , [40173200..40173203] , [40173300..40173307] , [40173400..40173409] , [40173500..40173513] , [40173600..40173613] , [40173700..40173711] , [40173800..40173808] , [40173900..40173910] , [40174000..40174007] , [40174100..40174108] , [40174200..40174203] , [40174300..40174310] , [40174400..40174403] , [40174500..40174506] , [40174600..40174612] , [40174700..40174711] , [40174800..40174808] , [40174900..40174912] , [40175000..40175003] , [40175100..40175103] , [40175200..40175212] , [40175300..40175309] , [40181500..40181506] , [40181600..40181606] , [40181700..40181706] , [40181800..40181806] , [40181900..40181906] , [40182000..40182006] , [40182100..40182106] , [40182200..40182206] , [40182300..40182306] , [40182400..40182406] , [40182500..40182506] , [40182600..40182606] , [40182700..40182707] , [40182800..40182806] , [40182900..40182906] , [40183000..40183010] , [40183100..40183112] , [41101500..41101500] , [41101502..41101505] , [41101515..41101516] , [41101518..41101518] , [41101700..41101703] , [41101705..41101709] , [41101800..41101813] , [41101900..41101903] , [41102400..41102407] , [41102410..41102410] , [41102412..41102412] , [41102421..41102429] , [41102500..41102513] , [41102600..41102609] , [41102700..41102706] , [41102900..41102905] , [41102909..41102927] , [41103000..41103001] , [41103003..41103008] , [41103010..41103015] , [41103017..41103017] , [41103019..41103027] , [41103200..41103203] , [41103205..41103212] , [41103300..41103303] , [41103305..41103327] , [41103400..41103401] , [41103403..41103403] , [41103406..41103421] , [41103500..41103502] , [41103504..41103504] , [41103506..41103516] , [41103700..41103717] , [41103800..41103817] , [41103900..41103914] , [41104000..41104022] , [41104100..41104112] , [41104114..41104129] , [41104200..41104213] , [41104300..41104308] , [41104400..41104427] , [41104500..41104513] , [41104600..41104613] , [41104700..41104704] , [41104800..41104821] , [41104900..41104930] , [41105000..41105003] , [41105100..41105109] , [41105200..41105209] , [41105300..41105305] , [41105307..41105341] , [41105500..41105521] , [41105600..41105601] , [41105700..41105701] , [41105800..41105804] , [41105900..41105909] , [41106000..41106006] , [41106100..41106104] , [41106200..41106223] , [41106300..41106314] , [41106400..41106403] , [41106500..41106516] , [41106600..41106622] , [41106700..41106708] , [41111500..41111513] , [41111515..41111526] , [41111600..41111607] , [41111613..41111651] , [41111700..41111731] , [41111733..41111749] , [41111800..41111820] , [41111900..41111924] , [41111926..41111978] , [41112100..41112101] , [41112103..41112114] , [41112200..41112207] , [41112209..41112217] , [41112219..41112240] , [41112300..41112307] , [41112400..41112423] , [41112500..41112506] , [41112508..41112514] , [41112516..41112521] , [41112600..41112602] , [41112700..41112702] , [41112704..41112704] , [41112800..41112809] , [41112900..41112907] , [41113000..41113010] , [41113023..41113027] , [41113029..41113031] , [41113033..41113057] , [41113100..41113124] , [41113300..41113302] , [41113304..41113306] , [41113308..41113316] , [41113318..41113339] , [41113400..41113407] , [41113600..41113608] , [41113611..41113647] , [41113649..41113689] , [41113700..41113740] , [41113800..41113831] , [41113900..41113910] , [41114000..41114001] , [41114100..41114100] , [41114102..41114108] , [41114200..41114221] , [41114300..41114303] , [41114400..41114427] , [41114500..41114530] , [41114600..41114650] , [41114700..41114723] , [41114800..41114803] , [41115100..41115102] , [41115200..41115203] , [41115300..41115339] , [41115400..41115409] , [41115411..41115416] , [41115500..41115515] , [41115600..41115604] , [41115606..41115615] , [41115700..41115723] , [41115800..41115840] , [41116000..41116018] , [41116100..41116113] , [41116116..41116157] , [41116200..41116203] , [41116205..41116212] , [41116300..41116304] , [41116400..41116401] , [41116500..41116502] , [41121500..41121511] , [41121513..41121517] , [41121600..41121609] , [41121700..41121713] , [41121800..41121819] , [41122000..41122004] , [41122100..41122110] , [41122200..41122203] , [41122300..41122301] , [41122400..41122415] , [41122500..41122503] , [41122600..41122607] , [41122700..41122704] , [41122800..41122811] , [41123000..41123005] , [41123100..41123105] , [41123200..41123202] , [41123300..41123300] , [41123302..41123306] , [41123400..41123403] , [42121500..42121515] , [42121600..42121608] , [42121700..42121702] , [42121800..42121804] , [42131500..42131512] , [42131600..42131613] , [42131700..42131702] , [42131704..42131708] , [42132100..42132108] , [42132200..42132205] , [42141500..42141504] , [42141600..42141603] , [42141605..42141608] , [42141700..42141705] , [42141800..42141817] , [42141900..42141905] , [42142000..42142007] , [42142100..42142114] , [42142119..42142121] , [42142200..42142204] , [42142300..42142303] , [42142400..42142404] , [42142406..42142407] , [42142500..42142507] , [42142509..42142538] , [42142600..42142620] , [42142700..42142721] , [42142800..42142802] , [42142900..42142914] , [42143100..42143108] , [42143200..42143204] , [42143300..42143303] , [42143400..42143401] , [42143500..42143514] , [42143600..42143609] , [42143700..42143709] , [42143800..42143803] , [42143900..42143903] , [42144000..42144005] , [42144100..42144103] , [42144200..42144203] , [42144300..42144302] , [42151500..42151507] , [42151600..42151648] , [42151650..42151683] , [42151700..42151705] , [42151800..42151816] , [42151900..42151912] , [42152000..42152014] , [42152100..42152115] , [42152200..42152224] , [42152300..42152307] , [42152400..42152447] , [42152449..42152466] , [42152468..42152470] , [42152500..42152514] , [42152516..42152521] , [42152600..42152608] , [42152700..42152718] , [42152800..42152810] , [42161500..42161510] , [42161600..42161635] , [42161700..42161704] , [42161800..42161804] , [42161900..42161901] , [42171500..42171502] , [42171600..42171614] , [42171700..42171704] , [42171800..42171806] , [42171900..42171920] , [42172000..42172018] , [42172100..42172105] , [42172200..42172201] , [42181500..42181516] , [42181518..42181538] , [42181600..42181612] , [42181700..42181708] , [42181710..42181722] , [42181800..42181805] , [42181900..42181908] , [42181910..42181912] , [42182000..42182020] , [42182100..42182108] , [42182200..42182212] , [42182300..42182308] , [42182310..42182316] , [42182400..42182422] , [42182500..42182502] , [42182600..42182604] , [42182700..42182707] , [42182800..42182808] , [42182900..42182904] , [42183000..42183024] , [42183026..42183067] , [42183100..42183101] , [42183200..42183201] , [42183300..42183301] , [42191500..42191502] , [42191600..42191613] , [42191700..42191711] , [42191800..42191814] , [42191900..42191909] , [42192000..42192003] , [42192100..42192104] , [42192106..42192107] , [42192200..42192214] , [42192300..42192305] , [42192400..42192406] , [42192500..42192502] , [42192600..42192606] , [42201500..42201505] , [42201507..42201513] , [42201600..42201605] , [42201607..42201611] , [42201700..42201719] , [42201800..42201841] , [42201843..42201852] , [42201900..42201908] , [42202000..42202006] , [42202100..42202106] , [42202200..42202205] , [42202300..42202303] , [42202400..42202401] , [42202500..42202501] , [42202600..42202602] , [42202700..42202704] , [42202900..42202901] , [42203000..42203001] , [42203100..42203101] , [42203200..42203202] , [42203300..42203303] , [42203400..42203430] , [42203500..42203506] , [42203600..42203606] , [42203700..42203710] , [42203800..42203804] , [42203900..42203903] , [42204000..42204009] , [42204100..42204101] , [42211500..42211509] , [42211600..42211608] , [42211610..42211620] , [42211700..42211712] , [42211800..42211813] , [42211900..42211918] , [42212000..42212007] , [42212100..42212113] , [42212200..42212204] , [42212300..42212304] , [42221500..42221509] , [42221512..42221518] , [42221600..42221619] , [42221700..42221707] , [42221800..42221803] , [42221900..42221906] , [42222000..42222009] , [42222100..42222104] , [42222200..42222202] , [42222300..42222309] , [42231500..42231510] , [42231600..42231606] , [42231608..42231609] , [42231700..42231705] , [42231800..42231808] , [42231900..42231904] , [42232000..42232003] , [42241500..42241507] , [42241509..42241517] , [42241600..42241604] , [42241606..42241607] , [42241700..42241709] , [42241800..42241809] , [42241811..42241811] , [42241900..42241902] , [42242000..42242004] , [42242100..42242109] , [42242300..42242302] , [42251500..42251506] , [42251600..42251625] , [42251700..42251706] , [42251800..42251805] , [42261500..42261516] , [42261600..42261602] , [42261604..42261613] , [42261700..42261707] , [42261800..42261810] , [42261900..42261904] , [42262000..42262008] , [42262100..42262105] , [42271500..42271506] , [42271600..42271622] , [42271700..42271722] , [42271800..42271803] , [42271900..42271916] , [42272000..42272009] , [42272011..42272011] , [42272016..42272017] , [42272100..42272102] , [42272200..42272225] , [42272300..42272307] , [42272500..42272503] , [42272505..42272510] , [42281500..42281519] , [42281521..42281532] , [42281600..42281600] , [42281603..42281606] , [42281700..42281713] , [42281800..42281811] , [42281900..42281909] , [42281912..42281916] , [42291500..42291502] , [42291600..42291617] , [42291619..42291625] , [42291627..42291627] , [42291700..42291705] , [42291707..42291710] , [42291800..42291805] , [42291900..42291902] , [42292000..42292001] , [42292100..42292103] , [42292200..42292203] , [42292300..42292307] , [42292400..42292403] , [42292500..42292505] , [42292600..42292603] , [42292700..42292704] , [42292800..42292803] , [42292900..42292904] , [42292907..42292908] , [42293000..42293006] , [42293100..42293139] , [42293200..42293201] , [42293300..42293304] , [42293400..42293401] , [42293403..42293408] , [42293500..42293509] , [42293600..42293603] , [42293700..42293703] , [42293800..42293804] , [42293900..42293902] , [42294000..42294003] , [42294100..42294103] , [42294200..42294220] , [42294300..42294306] , [42294400..42294402] , [42294500..42294531] , [42294600..42294607] , [42294700..42294724] , [42294800..42294808] , [42294900..42294931] , [42294933..42294958] , [42295000..42295016] , [42295100..42295109] , [42295111..42295112] , [42295114..42295116] , [42295118..42295148] , [42295200..42295207] , [42295300..42295308] , [42295400..42295402] , [42295405..42295411] , [42295413..42295428] , [42295431..42295431] , [42295433..42295433] , [42295435..42295437] , [42295439..42295441] , [42295445..42295446] , [42295448..42295448] , [42295450..42295471] , [42295500..42295500] , [42295502..42295503] , [42295505..42295506] , [42295508..42295526] , [42295600..42295603] , [42295800..42295803] , [42295900..42295906] , [42296000..42296008] , [42296100..42296107] , [42296200..42296212] , [42301500..42301508] , [42311500..42311506] , [42311508..42311508] , [42311510..42311515] , [42311517..42311524] , [42311527..42311528] , [42311531..42311531] , [42311537..42311543] , [42311600..42311605] , [42311700..42311700] , [42311702..42311705] , [42311707..42311708] , [42311900..42311903] , [42312000..42312012] , [42312014..42312014] , [42312100..42312113] , [42312115..42312115] , [42312200..42312208] , [42312300..42312307] , [42312309..42312313] , [42312400..42312403] , [42312500..42312504] , [42312600..42312602] , [42312700..42312701] , [42321500..42321510] , [42321512..42321516] , [42321600..42321622] , [42321700..42321723] , [42321800..42321806] , [42321808..42321813] , [42321900..42321900] , [42321902..42321912] , [42322000..42322006] , [42322100..42322106] , [42322200..42322206] , [43191500..43191505] , [43191507..43191516] , [43191600..43191612] , [43191614..43191616] , [43191618..43191619] , [43191621..43191634] , [43201400..43201407] , [43201409..43201417] , [43201500..43201503] , [43201507..43201509] , [43201513..43201513] , [43201522..43201522] , [43201531..43201531] , [43201533..43201535] , [43201537..43201547] , [43201549..43201550] , [43201552..43201560] , [43201600..43201605] , [43201608..43201612] , [43201614..43201619] , [43201800..43201803] , [43201806..43201823] , [43201825..43201835] , [43201900..43201900] , [43201902..43201904] , [43202000..43202010] , [43202100..43202108] , [43202200..43202202] , [43202204..43202222] , [43211500..43211518] , [43211600..43211619] , [43211700..43211702] , [43211704..43211715] , [43211717..43211732] , [43211800..43211807] , [43211900..43211914] , [43212000..43212002] , [43212100..43212117] , [43212200..43212201] , [43221500..43221510] , [43221513..43221530] , [43221600..43221604] , [43221700..43221733] , [43221800..43221811] , [43222500..43222504] , [43222600..43222600] , [43222602..43222602] , [43222604..43222612] , [43222615..43222615] , [43222619..43222644] , [43222700..43222704] , [43222800..43222803] , [43222805..43222806] , [43222811..43222811] , [43222813..43222820] , [43222822..43222827] , [43222900..43222903] , [43223000..43223001] , [43223100..43223113] , [43223200..43223212] , [43223300..43223303] , [43223305..43223343] , [43231500..43231501] , [43231503..43231503] , [43231505..43231515] , [43231600..43231605] , [43232000..43232005] , [43232100..43232108] , [43232110..43232112] , [43232200..43232205] , [43232300..43232307] , [43232309..43232313] , [43232400..43232409] , [43232500..43232508] , [43232600..43232614] , [43232700..43232705] , [43232800..43232805] , [43232900..43232916] , [43233000..43233002] , [43233004..43233005] , [43233200..43233201] , [43233203..43233205] , [43233400..43233407] , [43233410..43233411] , [43233413..43233421] , [43233500..43233512] , [43233600..43233603] , [43233700..43233701] , [44101500..44101501] , [44101503..44101510] , [44101600..44101607] , [44101700..44101716] , [44101718..44101730] , [44101800..44101800] , [44101802..44101810] , [44101900..44101903] , [44102000..44102004] , [44102100..44102109] , [44102200..44102203] , [44102300..44102307] , [44102400..44102400] , [44102402..44102409] , [44102411..44102414] , [44102500..44102503] , [44102600..44102600] , [44102602..44102610] , [44102800..44102806] , [44102900..44102913] , [44103000..44103005] , [44103100..44103101] , [44103103..44103114] , [44103116..44103126] , [44103200..44103206] , [44103500..44103500] , [44103502..44103508] , [44103600..44103601] , [44111500..44111503] , [44111506..44111507] , [44111509..44111522] , [44111600..44111601] , [44111603..44111618] , [44111800..44111810] , [44111812..44111818] , [44111900..44111914] , [44112000..44112002] , [44112004..44112008] , [44121500..44121501] , [44121503..44121513] , [44121600..44121600] , [44121604..44121605] , [44121611..44121615] , [44121617..44121628] , [44121630..44121636] , [44121700..44121721] , [44121800..44121802] , [44121804..44121809] , [44121900..44121900] , [44121902..44121902] , [44121904..44121908] , [44122000..44122003] , [44122005..44122005] , [44122008..44122034] , [44122100..44122101] , [44122103..44122107] , [44122109..44122122] , [45101500..45101518] , [45101600..45101600] , [45101602..45101604] , [45101606..45101612] , [45101700..45101709] , [45101800..45101808] , [45101900..45101905] , [45102000..45102005] , [45111500..45111504] , [45111600..45111610] , [45111612..45111620] , [45111700..45111724] , [45111800..45111829] , [45111900..45111902] , [45112000..45112004] , [45121500..45121506] , [45121510..45121523] , [45121600..45121630] , [45121700..45121720] , [45121800..45121810] , [45131500..45131503] , [45131505..45131510] , [45131600..45131601] , [45131604..45131604] , [45131700..45131701] , [45141500..45141504] , [45141600..45141612] , [46101500..46101506] , [46101600..46101601] , [46101700..46101703] , [46101800..46101802] , [46111700..46111705] , [46151500..46151507] , [46151600..46151602] , [46151604..46151609] , [46151700..46151700] , [46151702..46151716] , [46151800..46151801] , [46151900..46151901] , [46161500..46161531] , [46161600..46161605] , [46161700..46161708] , [46161710..46161715] , [46171500..46171524] , [46171600..46171600] , [46171602..46171613] , [46171615..46171640] , [46181500..46181509] , [46181512..46181512] , [46181514..46181514] , [46181516..46181518] , [46181520..46181520] , [46181522..46181522] , [46181526..46181552] , [46181600..46181613] , [46181700..46181711] , [46181800..46181806] , [46181808..46181811] , [46181900..46181904] , [46182000..46182007] , [46182100..46182108] , [46182200..46182213] , [46182300..46182315] , [46182400..46182406] , [46182500..46182507] , [46191500..46191511] , [46191600..46191621] , [46201000..46201002] , [46201100..46201102] , [47101500..47101514] , [47101516..47101519] , [47101521..47101574] , [47101600..47101615] , [47111500..47111503] , [47111505..47111509] , [47111600..47111603] , [47111700..47111701] , [47121500..47121502] , [47121600..47121600] , [47121602..47121613] , [47121700..47121709] , [47121800..47121815] , [47121900..47121903] , [47131500..47131503] , [47131600..47131605] , [47131608..47131619] , [47131700..47131707] , [47131709..47131711] , [47131800..47131835] , [47131900..47131910] , [47132100..47132102] , [48101500..48101546] , [48101600..48101619] , [48101700..48101716] , [48101800..48101820] , [48101900..48101920] , [48102000..48102010] , [48102100..48102109] , [48111000..48111002] , [48111100..48111109] , [48111200..48111202] , [48111300..48111306] , [48111400..48111406] , [48121100..48121102] , [48121200..48121202] , [48121300..48121302] , [48131500..48131505] , [49101600..49101609] , [49101611..49101614] , [49101700..49101702] , [49101704..49101709] , [49121500..49121500] , [49121502..49121513] , [49121600..49121603] , [49131500..49131506] , [49131600..49131607] , [49141500..49141509] , [49141600..49141600] , [49141602..49141607] , [49151500..49151506] , [49151600..49151603] , [49161500..49161527] , [49161600..49161621] , [49161700..49161711] , [49171500..49171509] , [49171600..49171604] , [49181500..49181515] , [49181600..49181614] , [49201500..49201504] , [49201512..49201520] , [49201600..49201612] , [49211600..49211609] , [49211700..49211703] , [49211800..49211833] , [49221500..49221533] , [49241500..49241511] , [49241600..49241604] , [49241700..49241712] , [49241800..49241801] , [50101700..50101700] , [50101716..50101717] , [50111500..50111500] , [50111513..50111562] , [50112000..50112000] , [50112004..50112053] , [50121500..50121500] , [50121537..50121539] , [50121600..50121600] , [50121611..50121613] , [50121700..50121700] , [50121705..50121707] , [50121800..50121800] , [50121802..50121804] , [50121900..50121903] , [50131600..50131600] , [50131609..50131609] , [50131612..50131635] , [50131700..50131706] , [50131800..50131803] , [50151500..50151500] , [50151513..50151515] , [50151600..50151600] , [50151604..50151605] , [50161500..50161500] , [50161509..50161512] , [50161800..50161800] , [50161813..50161815] , [50161900..50161900] , [50171500..50171500] , [50171548..50171548] , [50171550..50171554] , [50171700..50171700] , [50171707..50171708] , [50171800..50171800] , [50171830..50171834] , [50171900..50171902] , [50171904..50171904] , [50172000..50172006] , [50181700..50181700] , [50181708..50181709] , [50181900..50181909] , [50182000..50182005] , [50191500..50191500] , [50191505..50191507] , [50192100..50192100] , [50192109..50192113] , [50192300..50192304] , [50192400..50192406] , [50192500..50192504] , [50192600..50192603] , [50192700..50192703] , [50192800..50192803] , [50192900..50192902] , [50193000..50193002] , [50193100..50193108] , [50193200..50193203] , [50201700..50201700] , [50201706..50201715] , [50202200..50202210] , [50202300..50202311] , [50202400..50202401] , [50202403..50202416] , [50202418..50202429] , [50202500..50202513] , [50202600..50202602] , [50202700..50202706] , [50202800..50202806] , [50203000..50203003] , [50211502..50211506] , [50211600..50211600] , [50211607..50211612] , [50221000..50221002] , [50221100..50221102] , [50221200..50221202] , [50221300..50221304] , [50301500..50301597] , [50301600..50301655] , [50301700..50301718] , [50301800..50301814] , [50301900..50301903] , [50302000..50302015] , [50302100..50302104] , [50302200..50302215] , [50302300..50302304] , [50302400..50302409] , [50302500..50302525] , [50302527..50302533] , [50302600..50302607] , [50302700..50302710] , [50302800..50302806] , [50302900..50302916] , [50303000..50303002] , [50303100..50303111] , [50303200..50303207] , [50303300..50303309] , [50303400..50303506] , [50303600..50303665] , [50303700..50303704] , [50303800..50303805] , [50303900..50303906] , [50304000..50304006] , [50304100..50304108] , [50304200..50304208] , [50304300..50304313] , [50304400..50304446] , [50304500..50304525] , [50304600..50304661] , [50304700..50304702] , [50304800..50304801] , [50304900..50304957] , [50305000..50305044] , [50305100..50305106] , [50305200..50305210] , [50305300..50305371] , [50305400..50305460] , [50305500..50305512] , [50305600..50305610] , [50305700..50305723] , [50305800..50305878] , [50305900..50305908] , [50306000..50306008] , [50306100..50306103] , [50306200..50306229] , [50306300..50306310] , [50306400..50306404] , [50306500..50306502] , [50306600..50306604] , [50306700..50306703] , [50306800..50306803] , [50306900..50306906] , [50307000..50307046] , [50307100..50307104] , [50307200..50307230] , [50307500..50307503] , [50311500..50311596] , [50311600..50311655] , [50311700..50311715] , [50311800..50311814] , [50311900..50311903] , [50312000..50312015] , [50312100..50312104] , [50312200..50312215] , [50312300..50312304] , [50312400..50312409] , [50312500..50312525] , [50312527..50312532] , [50312600..50312607] , [50312700..50312710] , [50312800..50312806] , [50312900..50312916] , [50313000..50313002] , [50313100..50313111] , [50313200..50313207] , [50313300..50313309] , [50313400..50313496] , [50313500..50313506] , [50313600..50313665] , [50313700..50313704] , [50313800..50313805] , [50313900..50313906] , [50314000..50314006] , [50314100..50314108] , [50314200..50314208] , [50314300..50314313] , [50314400..50314446] , [50314500..50314525] , [50314600..50314661] , [50314700..50314702] , [50314800..50314801] , [50314900..50314957] , [50315000..50315044] , [50315100..50315106] , [50315200..50315210] , [50315300..50315371] , [50315400..50315460] , [50315500..50315512] , [50315600..50315610] , [50315700..50315723] , [50315800..50315877] , [50315900..50315908] , [50316000..50316008] , [50316100..50316103] , [50316200..50316229] , [50316300..50316310] , [50316400..50316404] , [50316500..50316502] , [50316600..50316604] , [50316700..50316703] , [50316800..50316803] , [50316900..50316906] , [50317000..50317037] , [50317100..50317104] , [50317200..50317230] , [50321500..50321596] , [50321600..50321655] , [50321700..50321715] , [50321800..50321814] , [50321900..50321903] , [50322000..50322015] , [50322100..50322104] , [50322200..50322215] , [50322300..50322304] , [50322400..50322409] , [50322500..50322525] , [50322527..50322532] , [50322600..50322607] , [50322700..50322710] , [50322800..50322806] , [50322900..50322916] , [50323000..50323002] , [50323100..50323111] , [50323200..50323207] , [50323300..50323309] , [50323400..50323496] , [50323500..50323506] , [50323600..50323665] , [50323700..50323704] , [50323800..50323805] , [50323900..50323906] , [50324000..50324006] , [50324100..50324108] , [50324200..50324208] , [50324300..50324313] , [50324400..50324446] , [50324500..50324525] , [50324600..50324661] , [50324700..50324702] , [50324800..50324801] , [50324900..50324957] , [50325000..50325044] , [50325100..50325106] , [50325200..50325210] , [50325300..50325371] , [50325400..50325460] , [50325500..50325512] , [50325600..50325610] , [50325700..50325723] , [50325800..50325877] , [50325900..50325908] , [50326000..50326008] , [50326100..50326103] , [50326200..50326229] , [50326300..50326310] , [50326400..50326404] , [50326500..50326502] , [50326600..50326604] , [50326700..50326703] , [50326800..50326803] , [50326900..50326906] , [50327000..50327037] , [50327100..50327104] , [50327200..50327230] , [50331500..50331596] , [50331600..50331655] , [50331700..50331715] , [50331800..50331814] , [50331900..50331903] , [50332000..50332015] , [50332100..50332104] , [50332200..50332215] , [50332300..50332304] , [50332400..50332409] , [50332500..50332525] , [50332527..50332532] , [50332600..50332607] , [50332700..50332710] , [50332800..50332806] , [50332900..50332916] , [50333000..50333002] , [50333100..50333111] , [50333200..50333207] , [50333300..50333309] , [50333400..50333496] , [50333500..50333506] , [50333600..50333665] , [50333700..50333704] , [50333800..50333805] , [50333900..50333906] , [50334000..50334006] , [50334100..50334108] , [50334200..50334208] , [50334300..50334313] , [50334400..50334446] , [50334500..50334525] , [50334600..50334661] , [50334700..50334702] , [50334800..50334801] , [50334900..50334957] , [50335000..50335044] , [50335100..50335106] , [50335200..50335210] , [50335300..50335371] , [50335400..50335460] , [50335500..50335512] , [50335600..50335610] , [50335700..50335723] , [50335800..50335877] , [50335900..50335908] , [50336000..50336008] , [50336100..50336103] , [50336200..50336229] , [50336300..50336310] , [50336400..50336404] , [50336500..50336502] , [50336600..50336604] , [50336700..50336703] , [50336800..50336803] , [50336900..50336906] , [50337000..50337037] , [50337100..50337104] , [50337200..50337230] , [50341500..50341596] , [50341600..50341655] , [50341700..50341715] , [50341800..50341814] , [50341900..50341903] , [50342000..50342015] , [50342100..50342104] , [50342200..50342215] , [50342300..50342304] , [50342400..50342409] , [50342500..50342525] , [50342527..50342532] , [50342600..50342607] , [50342700..50342710] , [50342800..50342806] , [50342900..50342916] , [50343000..50343002] , [50343100..50343111] , [50343200..50343207] , [50343300..50343309] , [50343400..50343496] , [50343500..50343506] , [50343600..50343665] , [50343700..50343704] , [50343800..50343805] , [50343900..50343906] , [50344000..50344006] , [50344100..50344108] , [50344200..50344208] , [50344300..50344313] , [50344400..50344446] , [50344500..50344525] , [50344600..50344661] , [50344700..50344702] , [50344800..50344801] , [50344900..50344957] , [50345000..50345044] , [50345100..50345106] , [50345200..50345210] , [50345300..50345371] , [50345400..50345460] , [50345500..50345512] , [50345600..50345610] , [50345700..50345723] , [50345800..50345877] , [50345900..50345908] , [50346000..50346008] , [50346100..50346103] , [50346200..50346229] , [50346300..50346310] , [50346400..50346404] , [50346500..50346502] , [50346600..50346604] , [50346700..50346703] , [50346800..50346803] , [50346900..50346906] , [50347000..50347037] , [50347100..50347104] , [50347200..50347230] , [50351500..50351596] , [50351600..50351655] , [50351700..50351715] , [50351800..50351814] , [50351900..50351903] , [50352000..50352015] , [50352100..50352104] , [50352200..50352215] , [50352300..50352304] , [50352400..50352409] , [50352500..50352525] , [50352527..50352532] , [50352600..50352607] , [50352700..50352710] , [50352800..50352806] , [50352900..50352916] , [50353000..50353002] , [50353100..50353111] , [50353200..50353207] , [50353300..50353309] , [50353400..50353496] , [50353500..50353506] , [50353600..50353665] , [50353700..50353704] , [50353800..50353805] , [50353900..50353906] , [50354000..50354006] , [50354100..50354108] , [50354200..50354208] , [50354300..50354313] , [50354400..50354446] , [50354500..50354525] , [50354600..50354661] , [50354700..50354702] , [50354800..50354801] , [50354900..50354957] , [50355000..50355044] , [50355100..50355106] , [50355200..50355210] , [50355300..50355371] , [50355400..50355460] , [50355500..50355512] , [50355600..50355610] , [50355700..50355723] , [50355800..50355877] , [50355900..50355908] , [50356000..50356008] , [50356100..50356103] , [50356200..50356229] , [50356300..50356310] , [50356400..50356404] , [50356500..50356502] , [50356600..50356604] , [50356700..50356703] , [50356800..50356803] , [50356900..50356906] , [50357000..50357037] , [50357100..50357104] , [50357200..50357230] , [50361500..50361596] , [50361600..50361655] , [50361700..50361715] , [50361800..50361814] , [50361900..50361903] , [50362000..50362015] , [50362100..50362104] , [50362200..50362215] , [50362300..50362304] , [50362400..50362409] , [50362500..50362525] , [50362527..50362532] , [50362600..50362607] , [50362700..50362710] , [50362800..50362806] , [50362900..50362916] , [50363000..50363002] , [50363100..50363111] , [50363200..50363207] , [50363300..50363309] , [50363400..50363496] , [50363500..50363506] , [50363600..50363665] , [50363700..50363704] , [50363800..50363805] , [50363900..50363906] , [50364000..50364006] , [50364100..50364108] , [50364200..50364208] , [50364300..50364313] , [50364400..50364446] , [50364500..50364525] , [50364600..50364661] , [50364700..50364702] , [50364800..50364801] , [50364900..50364957] , [50365000..50365044] , [50365100..50365106] , [50365200..50365210] , [50365300..50365371] , [50365400..50365460] , [50365500..50365512] , [50365600..50365610] , [50365700..50365723] , [50365800..50365877] , [50365900..50365908] , [50366000..50366008] , [50366100..50366103] , [50366200..50366229] , [50366300..50366310] , [50366400..50366404] , [50366500..50366502] , [50366600..50366604] , [50366700..50366703] , [50366800..50366803] , [50366900..50366906] , [50367000..50367037] , [50367100..50367104] , [50367200..50367230] , [50371500..50371596] , [50371600..50371655] , [50371700..50371715] , [50371800..50371814] , [50371900..50371903] , [50372000..50372015] , [50372100..50372104] , [50372200..50372215] , [50372300..50372304] , [50372400..50372409] , [50372500..50372525] , [50372527..50372532] , [50372600..50372607] , [50372700..50372710] , [50372800..50372806] , [50372900..50372916] , [50373000..50373002] , [50373100..50373111] , [50373200..50373207] , [50373300..50373309] , [50373400..50373496] , [50373500..50373506] , [50373600..50373665] , [50373700..50373704] , [50373800..50373805] , [50373900..50373906] , [50374000..50374006] , [50374100..50374108] , [50374200..50374208] , [50374300..50374313] , [50374400..50374446] , [50374500..50374525] , [50374600..50374661] , [50374700..50374702] , [50374800..50374801] , [50374900..50374957] , [50375000..50375044] , [50375100..50375106] , [50375200..50375210] , [50375300..50375371] , [50375400..50375460] , [50375500..50375512] , [50375600..50375610] , [50375700..50375723] , [50375800..50375877] , [50375900..50375908] , [50376000..50376008] , [50376100..50376103] , [50376200..50376229] , [50376300..50376310] , [50376400..50376404] , [50376500..50376502] , [50376600..50376604] , [50376700..50376703] , [50376800..50376803] , [50376900..50376906] , [50377000..50377037] , [50377100..50377104] , [50377200..50377230] , [50381500..50381503] , [50381600..50381602] , [50381700..50381702] , [50381800..50381802] , [50401500..50401513] , [50401600..50401605] , [50401700..50401782] , [50401800..50401853] , [50401900..50401922] , [50402000..50402004] , [50402100..50402107] , [50402200..50402211] , [50402300..50402305] , [50402400..50402402] , [50402500..50402507] , [50402600..50402612] , [50402700..50402715] , [50402800..50402808] , [50402900..50402911] , [50403000..50403016] , [50403100..50403101] , [50403200..50403269] , [50403300..50403304] , [50403400..50403423] , [50403500..50403520] , [50403600..50403614] , [50403700..50403707] , [50403800..50403811] , [50403900..50403911] , [50404000..50404004] , [50404006..50404006] , [50404100..50404127] , [50404200..50404202] , [50404300..50404306] , [50404400..50404406] , [50404500..50404508] , [50404600..50404623] , [50404700..50404706] , [50404800..50404823] , [50404900..50404906] , [50405100..50405104] , [50405200..50405207] , [50405300..50405323] , [50405400..50405406] , [50405500..50405506] , [50405600..50405640] , [50405700..50405711] , [50405800..50405804] , [50405900..50405916] , [50406000..50406011] , [50406100..50406104] , [50406200..50406213] , [50406224..50406225] , [50406300..50406329] , [50406400..50406421] , [50406500..50406527] , [50406600..50406608] , [50406700..50406723] , [50406800..50406810] , [50407000..50407068] , [50407070..50407071] , [50407100..50407110] , [50407200..50407205] , [50411500..50411513] , [50411600..50411605] , [50411700..50411782] , [50411800..50411852] , [50411900..50411922] , [50412000..50412004] , [50412100..50412107] , [50412200..50412211] , [50412300..50412304] , [50412400..50412402] , [50412500..50412506] , [50412600..50412612] , [50412700..50412715] , [50412800..50412807] , [50412900..50412911] , [50413000..50413016] , [50413100..50413101] , [50413200..50413204] , [50413300..50413323] , [50413400..50413420] , [50413500..50413514] , [50413600..50413607] , [50413700..50413711] , [50413800..50413811] , [50413900..50413904] , [50413906..50413906] , [50414000..50414027] , [50414100..50414102] , [50414200..50414206] , [50414300..50414306] , [50414400..50414408] , [50414500..50414523] , [50414600..50414606] , [50414700..50414721] , [50414800..50414806] , [50415000..50415004] , [50415100..50415107] , [50415200..50415221] , [50415300..50415306] , [50415400..50415406] , [50415500..50415537] , [50415600..50415608] , [50415700..50415704] , [50415800..50415816] , [50415900..50415911] , [50416000..50416004] , [50416100..50416115] , [50416200..50416229] , [50416300..50416320] , [50416400..50416427] , [50416500..50416508] , [50416600..50416620] , [50416700..50416710] , [50416800..50416869] , [50417000..50417069] , [50417100..50417110] , [50421500..50421513] , [50421600..50421605] , [50421700..50421782] , [50421800..50421852] , [50421900..50421922] , [50422000..50422004] , [50422100..50422107] , [50422200..50422211] , [50422300..50422304] , [50422400..50422402] , [50422500..50422506] , [50422600..50422612] , [50422700..50422715] , [50422800..50422808] , [50422900..50422911] , [50423000..50423016] , [50423100..50423101] , [50423200..50423269] , [50423300..50423304] , [50423400..50423423] , [50423500..50423520] , [50423600..50423614] , [50423700..50423707] , [50423800..50423811] , [50423900..50423911] , [50424000..50424005] , [50424100..50424127] , [50424200..50424202] , [50424300..50424306] , [50424400..50424406] , [50424500..50424508] , [50424600..50424623] , [50424700..50424706] , [50424800..50424821] , [50424900..50424906] , [50425000..50425004] , [50425100..50425107] , [50425200..50425221] , [50425300..50425306] , [50425400..50425406] , [50425500..50425538] , [50425600..50425609] , [50425700..50425704] , [50425800..50425816] , [50425900..50425911] , [50426000..50426004] , [50426100..50426116] , [50426200..50426229] , [50426300..50426320] , [50426400..50426427] , [50426500..50426508] , [50426600..50426620] , [50426700..50426710] , [50426800..50426868] , [50426900..50426910] , [50431500..50431513] , [50431600..50431605] , [50431700..50431782] , [50431800..50431852] , [50431900..50431922] , [50432000..50432004] , [50432100..50432107] , [50432200..50432211] , [50432300..50432304] , [50432400..50432402] , [50432500..50432506] , [50432600..50432612] , [50432700..50432715] , [50432800..50432808] , [50432900..50432911] , [50433000..50433016] , [50433100..50433101] , [50433200..50433269] , [50433300..50433304] , [50433400..50433423] , [50433500..50433520] , [50433600..50433614] , [50433700..50433707] , [50433800..50433811] , [50433900..50433911] , [50434000..50434005] , [50434100..50434127] , [50434200..50434202] , [50434300..50434306] , [50434400..50434406] , [50434500..50434508] , [50434600..50434623] , [50434700..50434706] , [50434800..50434821] , [50434900..50434906] , [50435000..50435004] , [50435100..50435107] , [50435200..50435221] , [50435300..50435306] , [50435400..50435406] , [50435500..50435537] , [50435600..50435608] , [50435700..50435704] , [50435800..50435816] , [50435900..50435911] , [50436000..50436004] , [50436100..50436116] , [50436200..50436229] , [50436300..50436320] , [50436400..50436427] , [50436500..50436508] , [50436600..50436620] , [50436700..50436710] , [50436800..50436868] , [50436900..50436910] , [50441500..50441513] , [50441600..50441605] , [50441700..50441782] , [50441800..50441852] , [50441900..50441922] , [50442000..50442004] , [50442100..50442107] , [50442200..50442211] , [50442300..50442304] , [50442400..50442402] , [50442500..50442506] , [50442600..50442612] , [50442700..50442715] , [50442800..50442808] , [50442900..50442911] , [50443000..50443016] , [50443100..50443101] , [50443200..50443269] , [50443300..50443304] , [50443400..50443423] , [50443500..50443520] , [50443600..50443614] , [50443700..50443707] , [50443800..50443811] , [50443900..50443911] , [50444000..50444005] , [50444100..50444127] , [50444200..50444202] , [50444300..50444306] , [50444400..50444406] , [50444500..50444508] , [50444600..50444623] , [50444700..50444706] , [50444800..50444821] , [50444900..50444906] , [50445000..50445004] , [50445100..50445107] , [50445200..50445221] , [50445300..50445306] , [50445400..50445406] , [50445500..50445537] , [50445600..50445611] , [50445700..50445704] , [50445800..50445816] , [50445900..50445911] , [50446000..50446004] , [50446100..50446116] , [50446200..50446229] , [50446300..50446321] , [50446400..50446427] , [50446500..50446508] , [50446600..50446620] , [50446700..50446710] , [50446800..50446868] , [50446900..50446910] , [50451500..50451513] , [50451600..50451605] , [50451700..50451782] , [50451800..50451852] , [50451900..50451922] , [50452000..50452004] , [50452100..50452107] , [50452200..50452211] , [50452300..50452304] , [50452400..50452402] , [50452500..50452506] , [50452600..50452612] , [50452700..50452715] , [50452800..50452808] , [50452900..50452911] , [50453000..50453016] , [50453100..50453101] , [50453200..50453269] , [50453300..50453304] , [50453400..50453423] , [50453500..50453520] , [50453600..50453614] , [50453700..50453707] , [50453800..50453811] , [50453900..50453911] , [50454000..50454005] , [50454100..50454127] , [50454200..50454202] , [50454300..50454306] , [50454400..50454406] , [50454500..50454508] , [50454600..50454623] , [50454700..50454706] , [50454800..50454821] , [50454900..50454906] , [50455000..50455004] , [50455100..50455107] , [50455200..50455221] , [50455300..50455306] , [50455400..50455406] , [50455500..50455537] , [50455600..50455608] , [50455700..50455704] , [50455800..50455816] , [50455900..50455911] , [50456000..50456004] , [50456100..50456116] , [50456200..50456229] , [50456300..50456320] , [50456400..50456427] , [50456500..50456508] , [50456600..50456620] , [50456700..50456710] , [50456800..50456868] , [50456900..50456910] , [50461500..50461513] , [50461600..50461605] , [50461700..50461782] , [50461800..50461852] , [50461900..50461922] , [50462000..50462004] , [50462100..50462107] , [50462200..50462211] , [50462300..50462304] , [50462400..50462402] , [50462500..50462506] , [50462600..50462612] , [50462700..50462715] , [50462800..50462808] , [50462900..50462911] , [50463000..50463016] , [50463100..50463101] , [50463200..50463269] , [50463300..50463304] , [50463400..50463423] , [50463500..50463520] , [50463600..50463614] , [50463700..50463707] , [50463800..50463811] , [50463900..50463911] , [50464000..50464005] , [50464100..50464127] , [50464200..50464202] , [50464300..50464306] , [50464400..50464406] , [50464500..50464508] , [50464600..50464623] , [50464700..50464706] , [50464800..50464821] , [50464900..50464906] , [50465000..50465004] , [50465100..50465107] , [50465200..50465221] , [50465300..50465306] , [50465400..50465406] , [50465500..50465537] , [50465600..50465608] , [50465700..50465704] , [50465800..50465816] , [50465900..50465911] , [50466000..50466004] , [50466100..50466116] , [50466200..50466229] , [50466300..50466320] , [50466400..50466427] , [50466500..50466508] , [50466600..50466620] , [50466700..50466710] , [50466800..50466868] , [50466900..50466910] , [50467000..50467007] , [50471500..50471513] , [50471600..50471605] , [50471700..50471782] , [50471800..50471852] , [50471900..50471922] , [50472000..50472004] , [50472100..50472107] , [50472200..50472211] , [50472300..50472304] , [50472400..50472402] , [50472500..50472506] , [50472600..50472612] , [50472700..50472715] , [50472800..50472808] , [50472900..50472911] , [50473000..50473016] , [50473100..50473101] , [50473200..50473269] , [50473300..50473304] , [50473400..50473423] , [50473500..50473520] , [50473600..50473614] , [50473700..50473707] , [50473800..50473811] , [50473900..50473911] , [50474000..50474005] , [50474100..50474127] , [50474200..50474202] , [50474300..50474306] , [50474400..50474406] , [50474500..50474508] , [50474600..50474623] , [50474700..50474706] , [50474800..50474821] , [50474900..50474906] , [50475000..50475004] , [50475100..50475107] , [50475200..50475221] , [50475300..50475306] , [50475400..50475406] , [50475500..50475537] , [50475600..50475608] , [50475700..50475704] , [50475800..50475816] , [50475900..50475911] , [50476000..50476004] , [50476100..50476116] , [50476200..50476229] , [50476300..50476320] , [50476400..50476427] , [50476500..50476508] , [50476600..50476620] , [50476700..50476710] , [50476800..50476868] , [50476900..50476910] , [51101500..51101500] , [51101503..51101504] , [51101507..51101516] , [51101518..51101519] , [51101521..51101528] , [51101530..51101604] , [51101606..51101607] , [51101610..51101614] , [51101616..51101620] , [51101624..51101625] , [51101629..51101632] , [51101700..51101721] , [51101800..51101821] , [51101824..51101832] , [51101834..51101837] , [51101900..51101915] , [51102000..51102010] , [51102100..51102102] , [51102200..51102209] , [51102211..51102214] , [51102300..51102302] , [51102304..51102346] , [51102400..51102400] , [51102402..51102403] , [51102500..51102503] , [51102505..51102507] , [51102600..51102601] , [51102700..51102702] , [51102705..51102715] , [51102717..51102730] , [51111500..51111522] , [51111600..51111606] , [51111609..51111618] , [51111700..51111723] , [51111800..51111828] , [51111900..51111902] , [51111904..51111907] , [51121500..51121504] , [51121506..51121507] , [51121509..51121523] , [51121600..51121604] , [51121607..51121611] , [51121614..51121617] , [51121700..51121711] , [51121713..51121718] , [51121721..51121722] , [51121724..51121735] , [51121737..51121775] , [51121777..51121780] , [51121800..51121823] , [51121900..51121910] , [51122100..51122105] , [51122107..51122113] , [51122200..51122201] , [51122300..51122301] , [51131500..51131518] , [51131600..51131617] , [51131700..51131716] , [51131800..51131811] , [51131900..51131901] , [51131903..51131910] , [51132000..51132001] , [51141500..51141539] , [51141541..51141543] , [51141600..51141640] , [51141700..51141732] , [51141800..51141823] , [51141900..51141900] , [51141903..51141904] , [51141907..51141908] , [51141910..51141922] , [51142000..51142006] , [51142009..51142018] , [51142100..51142114] , [51142116..51142155] , [51142200..51142203] , [51142205..51142235] , [51142237..51142237] , [51142300..51142306] , [51142400..51142415] , [51142500..51142514] , [51142600..51142619] , [51142700..51142702] , [51142800..51142801] , [51142900..51142925] , [51142927..51142948] , [51151500..51151518] , [51151600..51151616] , [51151700..51151728] , [51151730..51151750] , [51151800..51151805] , [51151810..51151825] , [51151900..51151908] , [51151910..51151920] , [51152000..51152012] , [51161500..51161508] , [51161510..51161511] , [51161513..51161525] , [51161600..51161603] , [51161605..51161640] , [51161646..51161655] , [51161700..51161710] , [51161800..51161803] , [51161805..51161806] , [51161808..51161815] , [51161817..51161820] , [51161900..51161901] , [51161903..51161903] , [51171500..51171505] , [51171507..51171511] , [51171513..51171513] , [51171600..51171624] , [51171626..51171633] , [51171700..51171704] , [51171706..51171712] , [51171800..51171809] , [51171811..51171823] , [51171900..51171926] , [51172000..51172004] , [51172100..51172103] , [51172105..51172111] , [51181500..51181506] , [51181508..51181511] , [51181513..51181517] , [51181519..51181528] , [51181600..51181609] , [51181700..51181756] , [51181800..51181808] , [51181810..51181836] , [51181900..51181906] , [51181908..51181908] , [51181911..51181914] , [51182000..51182014] , [51182100..51182102] , [51182200..51182204] , [51182300..51182304] , [51182400..51182401] , [51182403..51182413] , [51182415..51182424] , [51191500..51191523] , [51191600..51191606] , [51191700..51191706] , [51191800..51191805] , [51191900..51191911] , [51201500..51201519] , [51201600..51201629] , [51201631..51201636] , [51201638..51201639] , [51201646..51201649] , [51201700..51201705] , [51201800..51201811] , [51201900..51201901] , [51211500..51211505] , [51211600..51211613] , [51211615..51211625] , [51211900..51211901] , [51212000..51212018] , [51212020..51212036] , [51212100..51212101] , [51212200..51212204] , [51212300..51212311] , [51212400..51212404] , [51212500..51212505] , [51241000..51241002] , [51241100..51241121] , [51241200..51241234] , [51241300..51241305] , [51251000..51251002] , [52101500..52101516] , [52121500..52121514] , [52121600..52121608] , [52121700..52121705] , [52131500..52131501] , [52131503..52131503] , [52131600..52131604] , [52131700..52131705] , [52141500..52141554] , [52141600..52141612] , [52141700..52141701] , [52141703..52141708] , [52141800..52141807] , [52151500..52151507] , [52151600..52151663] , [52151700..52151709] , [52151800..52151813] , [52151900..52151909] , [52152000..52152018] , [52152100..52152104] , [52152200..52152204] , [52152300..52152301] , [52161500..52161500] , [52161502..52161502] , [52161505..52161505] , [52161507..52161518] , [52161520..52161527] , [52161529..52161529] , [52161531..52161560] , [52161600..52161611] , [52171000..52171002] , [53101500..53101505] , [53101600..53101605] , [53101700..53101705] , [53101800..53101805] , [53101900..53101905] , [53102000..53102003] , [53102100..53102105] , [53102200..53102205] , [53102300..53102310] , [53102400..53102404] , [53102500..53102520] , [53102600..53102606] , [53102700..53102716] , [53102800..53102805] , [53102900..53102904] , [53103000..53103001] , [53103100..53103101] , [53103200..53103201] , [53111500..53111505] , [53111600..53111605] , [53111700..53111705] , [53111800..53111805] , [53111900..53111905] , [53112000..53112005] , [53112100..53112105] , [53121500..53121503] , [53121600..53121603] , [53121605..53121608] , [53121700..53121702] , [53121704..53121706] , [53121800..53121804] , [53131500..53131510] , [53131600..53131647] , [53131700..53131702] , [53131800..53131802] , [53141500..53141508] , [53141600..53141630] , [54101500..54101513] , [54101600..54101605] , [54101700..54101706] , [54111500..54111505] , [54111600..54111606] , [54111700..54111709] , [54121500..54121504] , [54121600..54121603] , [54121700..54121702] , [54121800..54121802] , [55101500..55101507] , [55101509..55101510] , [55101513..55101532] , [55111500..55111515] , [55111600..55111601] , [55121500..55121507] , [55121600..55121602] , [55121604..55121622] , [55121700..55121708] , [55121710..55121710] , [55121712..55121721] , [55121725..55121726] , [55121728..55121735] , [55121800..55121804] , [55121806..55121808] , [55121900..55121908] , [56101500..56101510] , [56101513..56101516] , [56101518..56101533] , [56101535..56101545] , [56101600..56101610] , [56101700..56101708] , [56101710..56101719] , [56101800..56101800] , [56101803..56101813] , [56101900..56101907] , [56111500..56111514] , [56111600..56111606] , [56111700..56111707] , [56111800..56111806] , [56111900..56111907] , [56112000..56112005] , [56112100..56112111] , [56112200..56112206] , [56112300..56112304] , [56121000..56121012] , [56121014..56121015] , [56121100..56121102] , [56121200..56121201] , [56121300..56121304] , [56121400..56121403] , [56121500..56121510] , [56121600..56121611] , [56121700..56121704] , [56121800..56121805] , [56121900..56121903] , [56122000..56122004] , [56131500..56131504] , [56131600..56131607] , [56131700..56131703] , [56141500..56141504] , [56141600..56141604] , [60101000..60101010] , [60101100..60101104] , [60101200..60101205] , [60101300..60101302] , [60101304..60101331] , [60101400..60101405] , [60101600..60101610] , [60101700..60101732] , [60101800..60101811] , [60101900..60101911] , [60102000..60102007] , [60102100..60102106] , [60102200..60102206] , [60102300..60102312] , [60102400..60102414] , [60102500..60102513] , [60102600..60102614] , [60102700..60102715] , [60102717..60102718] , [60102800..60102807] , [60102900..60102917] , [60103000..60103010] , [60103012..60103013] , [60103100..60103112] , [60103200..60103204] , [60103300..60103303] , [60103400..60103410] , [60103500..60103504] , [60103600..60103606] , [60103700..60103706] , [60103800..60103809] , [60103900..60103900] , [60103903..60103909] , [60103911..60103911] , [60103915..60103916] , [60103918..60103934] , [60103936..60103936] , [60104000..60104008] , [60104100..60104107] , [60104200..60104204] , [60104300..60104309] , [60104400..60104416] , [60104500..60104509] , [60104511..60104512] , [60104600..60104602] , [60104604..60104619] , [60104700..60104724] , [60104726..60104726] , [60104800..60104827] , [60104900..60104930] , [60105000..60105006] , [60105100..60105104] , [60105200..60105203] , [60105300..60105309] , [60105400..60105429] , [60105500..60105505] , [60105600..60105626] , [60105700..60105705] , [60105800..60105811] , [60105900..60105919] , [60106000..60106004] , [60106100..60106109] , [60106200..60106215] , [60106300..60106302] , [60106400..60106402] , [60106500..60106506] , [60106600..60106612] , [60111000..60111005] , [60111100..60111109] , [60111200..60111208] , [60111300..60111306] , [60111400..60111405] , [60111407..60111411] , [60121000..60121015] , [60121100..60121121] , [60121123..60121153] , [60121200..60121239] , [60121241..60121253] , [60121300..60121306] , [60121400..60121415] , [60121500..60121526] , [60121531..60121540] , [60121600..60121606] , [60121700..60121718] , [60121800..60121814] , [60121900..60121912] , [60122000..60122000] , [60122002..60122009] , [60122100..60122103] , [60122200..60122204] , [60122300..60122302] , [60122400..60122402] , [60122500..60122504] , [60122506..60122509] , [60122600..60122604] , [60122700..60122704] , [60122800..60122801] , [60122900..60122909] , [60123000..60123002] , [60123100..60123103] , [60123200..60123204] , [60123300..60123303] , [60123400..60123403] , [60123500..60123502] , [60123600..60123606] , [60123700..60123703] , [60123800..60123802] , [60123900..60123901] , [60124000..60124002] , [60124100..60124102] , [60124200..60124201] , [60124300..60124325] , [60124400..60124404] , [60124406..60124412] , [60124500..60124515] , [60131000..60131007] , [60131100..60131116] , [60131200..60131237] , [60131300..60131329] , [60131400..60131460] , [60131500..60131520] , [60131600..60131601] , [60131700..60131703] , [60131800..60131803] , [60141000..60141026] , [60141100..60141118] , [60141200..60141205] , [60141300..60141300] , [60141302..60141307] , [60141400..60141405] , [64122100..64122100] , [70101500..70101510] , [70101600..70101607] , [70101700..70101704] , [70101800..70101806] , [70101900..70101905] , [70111500..70111508] , [70111600..70111603] , [70111700..70111713] , [70121500..70121505] , [70121600..70121608] , [70121610..70121610] , [70121700..70121705] , [70121800..70121803] , [70121900..70121903] , [70122000..70122010] , [70131500..70131506] , [70131600..70131605] , [70131700..70131708] , [70141500..70141520] , [70141600..70141607] , [70141700..70141710] , [70141800..70141804] , [70141900..70141904] , [70142000..70142011] , [70151500..70151510] , [70151600..70151606] , [70151700..70151707] , [70151800..70151807] , [70151900..70151907] , [70151909..70151910] , [70161500..70161501] , [70161600..70161601] , [70161700..70161704] , [70171500..70171506] , [70171600..70171607] , [70171700..70171709] , [70171800..70171803] , [71101500..71101502] , [71101600..71101609] , [71101700..71101710] , [71112000..71112015] , [71112017..71112031] , [71112100..71112103] , [71112105..71112117] , [71112119..71112123] , [71112200..71112200] , [71112202..71112206] , [71112300..71112303] , [71112322..71112330] , [71121000..71121002] , [71121008..71121012] , [71121016..71121018] , [71121024..71121025] , [71121100..71121123] , [71121200..71121211] , [71121300..71121305] , [71121307..71121307] , [71121309..71121310] , [71121400..71121408] , [71121500..71121516] , [71121600..71121608] , [71121610..71121641] , [71121700..71121706] , [71121800..71121800] , [71121805..71121812] , [71121900..71121905] , [71122000..71122006] , [71122100..71122105] , [71122107..71122116] , [71122200..71122207] , [71122300..71122311] , [71122400..71122400] , [71122407..71122410] , [71122500..71122506] , [71122600..71122602] , [71122605..71122606] , [71122608..71122608] , [71122610..71122610] , [71122614..71122616] , [71122700..71122711] , [71122800..71122808] , [71122810..71122810] , [71122900..71122905] , [71123000..71123007] , [71131000..71131016] , [71131018..71131019] , [71131100..71131111] , [71131200..71131201] , [71131300..71131313] , [71131400..71131420] , [71141000..71141008] , [71141100..71141104] , [71141200..71141202] , [71151000..71151005] , [71151007..71151007] , [71151100..71151106] , [71151200..71151203] , [71151300..71151311] , [71151315..71151327] , [71151400..71151406] , [71161000..71161008] , [71161100..71161107] , [71161109..71161111] , [71161200..71161206] , [71161300..71161308] , [71161400..71161400] , [71161402..71161403] , [71161405..71161405] , [71161407..71161411] , [71161413..71161414] , [71161500..71161500] , [71161503..71161503] , [71161600..71161606] , [72101500..72101501] , [72101504..72101518] , [72102100..72102106] , [72102900..72102900] , [72102902..72102903] , [72102905..72102905] , [72103100..72103104] , [72103300..72103302] , [72103304..72103304] , [72111000..72111008] , [72111100..72111111] , [72121000..72121008] , [72121100..72121105] , [72121200..72121203] , [72121300..72121302] , [72121400..72121410] , [72121500..72121517] , [72141000..72141004] , [72141100..72141126] , [72141200..72141216] , [72141300..72141303] , [72141400..72141402] , [72141500..72141500] , [72141502..72141505] , [72141507..72141511] , [72141600..72141605] , [72141700..72141702] , [72151000..72151006] , [72151100..72151103] , [72151200..72151207] , [72151300..72151308] , [72151400..72151402] , [72151500..72151511] , [72151514..72151515] , [72151600..72151609] , [72151700..72151704] , [72151800..72151803] , [72151900..72151901] , [72151903..72151911] , [72152000..72152005] , [72152100..72152104] , [72152200..72152204] , [72152300..72152303] , [72152400..72152405] , [72152500..72152509] , [72152600..72152607] , [72152700..72152711] , [72152800..72152802] , [72152900..72152909] , [72153000..72153002] , [72153100..72153105] , [72153200..72153209] , [72153300..72153303] , [72153400..72153402] , [72153500..72153507] , [72153600..72153613] , [72153700..72153702] , [72153900..72153902] , [72154000..72154066] , [72154100..72154110] , [72154200..72154201] , [72154300..72154302] , [72154400..72154402] , [72154500..72154503] , [73101500..73101505] , [73101600..73101614] , [73101700..73101703] , [73101800..73101802] , [73101900..73101903] , [73111500..73111507] , [73111600..73111604] , [73121500..73121509] , [73121600..73121603] , [73121606..73121608] , [73121610..73121614] , [73121800..73121807] , [73131500..73131508] , [73131600..73131608] , [73131700..73131703] , [73131800..73131804] , [73131900..73131900] , [73131902..73131906] , [73141500..73141508] , [73141600..73141602] , [73141700..73141715] , [73151500..73151506] , [73151600..73151608] , [73151700..73151705] , [73151800..73151805] , [73151900..73151907] , [73152000..73152004] , [73152100..73152109] , [73152112..73152112] , [73161500..73161519] , [73161600..73161607] , [73171500..73171508] , [73171510..73171513] , [73171600..73171606] , [73181000..73181023] , [73181100..73181125] , [73181200..73181206] , [73181300..73181314] , [73181900..73181908] , [76101500..76101503] , [76101600..76101606] , [76111500..76111501] , [76111503..76111506] , [76111600..76111605] , [76111800..76111801] , [76121500..76121503] , [76121600..76121604] , [76121700..76121702] , [76121900..76121904] , [76122000..76122004] , [76122100..76122103] , [76122200..76122203] , [76122300..76122315] , [76122400..76122408] , [76131500..76131502] , [76131600..76131602] , [76131700..76131702] , [77101500..77101505] , [77101600..77101605] , [77101700..77101707] , [77101800..77101806] , [77101900..77101910] , [77102000..77102004] , [77111500..77111508] , [77111600..77111603] , [77121500..77121509] , [77121600..77121610] , [77121700..77121709] , [77131500..77131503] , [77131600..77131604] , [77131700..77131702] , [78101500..78101503] , [78101600..78101606] , [78101700..78101706] , [78101800..78101807] , [78101900..78101905] , [78102000..78102002] , [78102100..78102103] , [78102200..78102206] , [78111500..78111503] , [78111600..78111603] , [78111700..78111705] , [78111802..78111804] , [78111807..78111812] , [78111900..78111901] , [78121500..78121502] , [78121600..78121604] , [78131500..78131502] , [78131600..78131603] , [78131700..78131702] , [78131800..78131806] , [78141500..78141504] , [78141600..78141603] , [78141700..78141704] , [78141800..78141806] , [78141900..78141902] , [78181500..78181509] , [78181600..78181601] , [78181700..78181703] , [78181800..78181802] , [78181900..78181901] , [80101500..80101511] , [80101600..80101605] , [80101700..80101708] , [80111500..80111510] , [80111600..80111625] , [80111700..80111716] , [80121500..80121503] , [80121600..80121611] , [80121700..80121707] , [80121800..80121804] , [80121900..80121903] , [80131500..80131506] , [80131600..80131605] , [80131700..80131703] , [80131800..80131803] , [80141500..80141514] , [80141600..80141607] , [80141609..80141630] , [80141700..80141706] , [80141800..80141803] , [80141900..80141903] , [80151500..80151505] , [80151600..80151605] , [80161500..80161508] , [80161600..80161603] , [80161700..80161703] , [80161800..80161801] , [81101500..81101503] , [81101505..81101516] , [81101600..81101605] , [81101700..81101703] , [81101706..81101707] , [81101710..81101711] , [81101713..81101713] , [81101800..81101802] , [81101900..81101900] , [81101902..81101902] , [81102000..81102001] , [81102100..81102101] , [81102200..81102203] , [81102300..81102301] , [81102400..81102402] , [81102500..81102501] , [81102600..81102601] , [81102700..81102702] , [81111500..81111510] , [81111600..81111613] , [81111700..81111708] , [81111800..81111806] , [81111808..81111812] , [81111814..81111814] , [81111818..81111820] , [81111900..81111902] , [81112000..81112010] , [81112100..81112107] , [81112200..81112222] , [81112300..81112309] , [81112400..81112401] , [81112500..81112502] , [81121500..81121504] , [81121600..81121607] , [81131500..81131505] , [81141500..81141506] , [81141600..81141606] , [81141700..81141704] , [81141800..81141807] , [81141900..81141902] , [81151500..81151503] , [81151600..81151604] , [81151700..81151705] , [81151800..81151806] , [81151900..81151904] , [81161500..81161503] , [81161600..81161601] , [81161700..81161712] , [81161800..81161801] , [82101500..82101508] , [82101600..82101605] , [82101700..82101702] , [82101800..82101802] , [82101900..82101905] , [82111500..82111503] , [82111600..82111604] , [82111700..82111705] , [82111800..82111804] , [82111900..82111904] , [82112000..82112066] , [82121500..82121512] , [82121600..82121603] , [82121700..82121702] , [82121800..82121802] , [82121900..82121908] , [82131500..82131505] , [82131600..82131604] , [82141500..82141507] , [82141600..82141602] , [82151500..82151508] , [82151600..82151604] , [82151700..82151706] , [83101500..83101510] , [83101600..83101605] , [83101800..83101808] , [83101900..83101903] , [83111500..83111508] , [83111510..83111511] , [83111600..83111605] , [83111700..83111703] , [83111800..83111804] , [83111900..83111905] , [83112200..83112206] , [83112300..83112304] , [83112400..83112406] , [83112500..83112506] , [83112600..83112606] , [83121500..83121504] , [83121600..83121606] , [83121700..83121704] , [84101500..84101503] , [84101600..84101604] , [84101700..84101705] , [84111500..84111508] , [84111600..84111603] , [84111700..84111703] , [84111800..84111802] , [84121500..84121504] , [84121600..84121607] , [84121700..84121705] , [84121800..84121806] , [84121900..84121903] , [84122000..84122001] , [84131500..84131517] , [84131601..84131610] , [84131700..84131702] , [84131801..84131802] , [84141500..84141503] , [84141600..84141602] , [84141700..84141702] , [85101501..85101509] , [85101601..85101605] , [85101701..85101707] , [85111500..85111514] , [85111600..85111617] , [85111700..85111704] , [85121501..85121504] , [85121600..85121614] , [85121700..85121706] , [85121800..85121810] , [85121900..85121902] , [85122000..85122005] , [85122100..85122109] , [85122200..85122201] , [85131500..85131505] , [85131600..85131600] , [85131602..85131604] , [85131700..85131713] , [85141500..85141504] , [85141600..85141603] , [85141700..85141702] , [85151500..85151508] , [85151600..85151601] , [85151603..85151605] , [85151607..85151607] , [85151700..85151700] , [85161500..85161505] , [85171500..85171501] , [85171600..85171601] , [86101500..86101509] , [86101600..86101610] , [86101700..86101716] , [86101800..86101808] , [86101810..86101810] , [86111500..86111505] , [86111600..86111604] , [86111700..86111702] , [86111800..86111802] , [86121500..86121504] , [86121600..86121602] , [86121700..86121702] , [86121800..86121800] , [86121802..86121804] , [86131500..86131504] , [86131600..86131603] , [86131700..86131703] , [86131800..86131800] , [86131900..86131904] , [86132000..86132001] , [86141500..86141503] , [86141600..86141603] , [86141700..86141704] , [90101500..90101504] , [90101600..90101604] , [90101700..90101701] , [90101800..90101802] , [90111500..90111504] , [90111600..90111604] , [90111700..90111703] , [90111800..90111803] , [90121500..90121503] , [90121600..90121603] , [90121700..90121702] , [90121800..90121801] , [90131500..90131504] , [90131600..90131603] , [90141500..90141503] , [90141600..90141603] , [90141700..90141703] , [90151500..90151503] , [90151600..90151603] , [90151700..90151703] , [90151800..90151803] , [90151900..90151903] , [90152000..90152002] , [90152100..90152101] , [91101500..91101505] , [91101600..91101605] , [91101700..91101702] , [91101800..91101803] , [91101900..91101904] , [91111500..91111504] , [91111600..91111603] , [91111700..91111701] , [91111703..91111703] , [91111800..91111801] , [91111803..91111803] , [91111900..91111904] , [92101500..92101501] , [92101503..92101504] , [92101600..92101604] , [92101700..92101700] , [92101800..92101800] , [92101804..92101804] , [92101900..92101904] , [92111704..92111704] , [92111800..92111800] , [92111805..92111806] , [92112100..92112100] , [92121500..92121500] , [92121502..92121504] , [92121600..92121604] , [92121700..92121704] , [92121800..92121803] , [92121900..92121901] , [93101500..93101501] , [93101606..93101606] , [93111500..93111507] , [93111604..93111604] , [93121500..93121500] , [93121503..93121503] , [93121600..93121600] , [93121608..93121608] , [93121700..93121700] , [93131500..93131507] , [93131600..93131613] , [93131700..93131705] , [93131800..93131803] , [93141500..93141500] , [93141504..93141505] , [93141510..93141511] , [93141514..93141514] , [93141600..93141602] , [93141605..93141606] , [93141608..93141610] , [93141700..93141714] , [93141800..93141804] , [93141806..93141806] , [93141808..93141808] , [93141810..93141814] , [93141900..93141906] , [93141909..93141910] , [93142000..93142009] , [93142100..93142104] , [93151500..93151502] , [93151512..93151512] , [93151516..93151516] , [93151600..93151600] , [93151608..93151609] , [93151611..93151611] , [93161500..93161504] , [93161600..93161609] , [93161700..93161700] , [93161702..93161702] , [93161800..93161800] , [93161803..93161804] , [93161806..93161807] , [93171500..93171504] , [93171600..93171600] , [93171700..93171702] , [93171800..93171803] , [94101500..94101505] , [94101600..94101610] , [94101700..94101705] , [94101800..94101801] , [94111700..94111704] , [94111800..94111804] , [94111900..94111903] , [94112000..94112005] , [94121500..94121514] , [94121600..94121607] , [94121700..94121704] , [94121800..94121805] , [94131500..94131504] , [94131600..94131608] , [94131700..94131704] , [94131800..94131800] , [94131805..94131805] , [94131900..94131903] , [94132000..94132004] , [95101500..95101510] , [95101600..95101603] , [95101700..95101708] , [95101800..95101800] , [95101807..95101807] , [95101900..95101903] , [95111500..95111504] , [95111600..95111600] , [95111609..95111610] , [95111615..95111615] , [95121500..95121517] , [95121600..95121624] , [95121640..95121644] , [95121646..95121646] , [95121700..95121712] , [95121800..95121802] , [95121804..95121806] , [95121808..95121808] , [95121900..95121911] , [95121913..95121913] , [95122000..95122008] , [95122100..95122106] , [95122300..95122308] , [95122400..95122403] , [95122500..95122503] , [95122600..95122600] , [95122700..95122700] , [95131500..95131503] , [95131600..95131606] , [95131700..95131702] , [95141500..95141502] , [95141600..95141606] , [95141700..95141711] , [95141800..95141803] , [95141900..95141904] ] render (ProductOrService 1010101) = "01010101" render (ProductOrService x) = show x
yusent/cfdis
src/CFDI/Types/ProductOrService.hs
mit
152,570
0
11
55,358
37,018
23,114
13,904
4,618
0
module CFDI.Types.ZipCode where import CFDI.Chainable import CFDI.Types.Type import Control.Error.Safe (justErr) import Text.Read (readMaybe) newtype ZipCode = ZipCode Int deriving (Eq, Show) instance Chainable ZipCode where chain (ZipCode c) = chain c instance Type ZipCode where parseExpr c = justErr NotInCatalog maybeZipCode where maybeZipCode = ZipCode <$> (readMaybe c >>= isValid) isValid x | x > 0 && x < 1000 = Nothing | x > 16999 && x < 20000 = Nothing | x == 20640 = Nothing | x > 28469 && x < 28500 = Nothing | x > 52799 && x < 52900 = Nothing | x > 54499 && x < 54539 = Nothing | x > 54539 && x < 54570 = Nothing | x > 68929 && x < 68940 = Nothing | x > 90750 && x < 90754 = Nothing | x > 90754 && x < 90760 = Nothing | x > 91310 && x < 91315 = Nothing | x > 93769 && x < 93780 = Nothing | x > 95049 && x < 95670 = Nothing | x > 99999 = Nothing | otherwise = Just x render (ZipCode x) = replicate (5 - length xStr) '0' ++ xStr where xStr = show x
yusent/cfdis
src/CFDI/Types/ZipCode.hs
mit
1,156
0
13
402
463
229
234
29
0
import System.IO import Data.List import System.Directory main = do handle <- openFile "todo.txt" ReadMode (tempName, tempHandle) <- openTempFile "." "temp" contents <- hGetContents handle let tasks = lines contents numberedTasks = zipWith (\n line -> show n ++ " - " ++ line) [0..] tasks putStrLn "These are your todo tasks:" putStrLn $ unlines numberedTasks putStrLn "Which one do you wish to delete?" numberString <- getLine let number = read numberString newTasks = delete (tasks !! number) tasks hPutStr tempHandle $ unlines newTasks hClose handle hClose tempHandle removeFile "todo.txt" renameFile tempName "todo.txt"
RAFIRAF/HASKELL
IO/todo.hs
mit
664
0
15
132
206
93
113
20
1